Small ChangeLog tweak.
[official-gcc.git] / gcc / tree-ssa.c
blob45b9951bf256b3012d36fa9c51590358ba83ebb9
1 /* Miscellaneous SSA utility functions.
2 Copyright (C) 2001-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "tree.h"
25 #include "gimple.h"
26 #include "cfghooks.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "diagnostic-core.h"
31 #include "fold-const.h"
32 #include "stor-layout.h"
33 #include "gimple-fold.h"
34 #include "gimplify.h"
35 #include "gimple-iterator.h"
36 #include "gimple-walk.h"
37 #include "tree-ssa-loop-manip.h"
38 #include "tree-into-ssa.h"
39 #include "tree-ssa.h"
40 #include "cfgloop.h"
41 #include "cfgexpand.h"
42 #include "tree-cfg.h"
43 #include "tree-dfa.h"
44 #include "asan.h"
46 /* Pointer map of variable mappings, keyed by edge. */
47 static hash_map<edge, auto_vec<edge_var_map> > *edge_var_maps;
50 /* Add a mapping with PHI RESULT and PHI DEF associated with edge E. */
52 void
53 redirect_edge_var_map_add (edge e, tree result, tree def, source_location locus)
55 edge_var_map new_node;
57 if (edge_var_maps == NULL)
58 edge_var_maps = new hash_map<edge, auto_vec<edge_var_map> >;
60 auto_vec<edge_var_map> &slot = edge_var_maps->get_or_insert (e);
61 new_node.def = def;
62 new_node.result = result;
63 new_node.locus = locus;
65 slot.safe_push (new_node);
69 /* Clear the var mappings in edge E. */
71 void
72 redirect_edge_var_map_clear (edge e)
74 if (!edge_var_maps)
75 return;
77 auto_vec<edge_var_map> *head = edge_var_maps->get (e);
79 if (head)
80 head->release ();
84 /* Duplicate the redirected var mappings in OLDE in NEWE.
86 This assumes a hash_map can have multiple edges mapping to the same
87 var_map (many to one mapping), since we don't remove the previous mappings.
90 void
91 redirect_edge_var_map_dup (edge newe, edge olde)
93 if (!edge_var_maps)
94 return;
96 auto_vec<edge_var_map> *new_head = &edge_var_maps->get_or_insert (newe);
97 auto_vec<edge_var_map> *old_head = edge_var_maps->get (olde);
98 if (!old_head)
99 return;
101 new_head->safe_splice (*old_head);
105 /* Return the variable mappings for a given edge. If there is none, return
106 NULL. */
108 vec<edge_var_map> *
109 redirect_edge_var_map_vector (edge e)
111 /* Hey, what kind of idiot would... you'd be surprised. */
112 if (!edge_var_maps)
113 return NULL;
115 auto_vec<edge_var_map> *slot = edge_var_maps->get (e);
116 if (!slot)
117 return NULL;
119 return slot;
122 /* Clear the edge variable mappings. */
124 void
125 redirect_edge_var_map_empty (void)
127 if (edge_var_maps)
128 edge_var_maps->empty ();
132 /* Remove the corresponding arguments from the PHI nodes in E's
133 destination block and redirect it to DEST. Return redirected edge.
134 The list of removed arguments is stored in a vector accessed
135 through edge_var_maps. */
137 edge
138 ssa_redirect_edge (edge e, basic_block dest)
140 gphi_iterator gsi;
141 gphi *phi;
143 redirect_edge_var_map_clear (e);
145 /* Remove the appropriate PHI arguments in E's destination block. */
146 for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
148 tree def;
149 source_location locus ;
151 phi = gsi.phi ();
152 def = gimple_phi_arg_def (phi, e->dest_idx);
153 locus = gimple_phi_arg_location (phi, e->dest_idx);
155 if (def == NULL_TREE)
156 continue;
158 redirect_edge_var_map_add (e, gimple_phi_result (phi), def, locus);
161 e = redirect_edge_succ_nodup (e, dest);
163 return e;
167 /* Add PHI arguments queued in PENDING_STMT list on edge E to edge
168 E->dest. */
170 void
171 flush_pending_stmts (edge e)
173 gphi *phi;
174 edge_var_map *vm;
175 int i;
176 gphi_iterator gsi;
178 vec<edge_var_map> *v = redirect_edge_var_map_vector (e);
179 if (!v)
180 return;
182 for (gsi = gsi_start_phis (e->dest), i = 0;
183 !gsi_end_p (gsi) && v->iterate (i, &vm);
184 gsi_next (&gsi), i++)
186 tree def;
188 phi = gsi.phi ();
189 def = redirect_edge_var_map_def (vm);
190 add_phi_arg (phi, def, e, redirect_edge_var_map_location (vm));
193 redirect_edge_var_map_clear (e);
196 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
197 GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
198 expression with a different value.
200 This will update any annotations (say debug bind stmts) referring
201 to the original LHS, so that they use the RHS instead. This is
202 done even if NLHS and LHS are the same, for it is understood that
203 the RHS will be modified afterwards, and NLHS will not be assigned
204 an equivalent value.
206 Adjusting any non-annotation uses of the LHS, if needed, is a
207 responsibility of the caller.
209 The effect of this call should be pretty much the same as that of
210 inserting a copy of STMT before STMT, and then removing the
211 original stmt, at which time gsi_remove() would have update
212 annotations, but using this function saves all the inserting,
213 copying and removing. */
215 void
216 gimple_replace_ssa_lhs (gimple *stmt, tree nlhs)
218 if (MAY_HAVE_DEBUG_STMTS)
220 tree lhs = gimple_get_lhs (stmt);
222 gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt);
224 insert_debug_temp_for_var_def (NULL, lhs);
227 gimple_set_lhs (stmt, nlhs);
231 /* Given a tree for an expression for which we might want to emit
232 locations or values in debug information (generally a variable, but
233 we might deal with other kinds of trees in the future), return the
234 tree that should be used as the variable of a DEBUG_BIND STMT or
235 VAR_LOCATION INSN or NOTE. Return NULL if VAR is not to be tracked. */
237 tree
238 target_for_debug_bind (tree var)
240 if (!MAY_HAVE_DEBUG_STMTS)
241 return NULL_TREE;
243 if (TREE_CODE (var) == SSA_NAME)
245 var = SSA_NAME_VAR (var);
246 if (var == NULL_TREE)
247 return NULL_TREE;
250 if ((!VAR_P (var) || VAR_DECL_IS_VIRTUAL_OPERAND (var))
251 && TREE_CODE (var) != PARM_DECL)
252 return NULL_TREE;
254 if (DECL_HAS_VALUE_EXPR_P (var))
255 return target_for_debug_bind (DECL_VALUE_EXPR (var));
257 if (DECL_IGNORED_P (var))
258 return NULL_TREE;
260 /* var-tracking only tracks registers. */
261 if (!is_gimple_reg_type (TREE_TYPE (var)))
262 return NULL_TREE;
264 return var;
267 /* Called via walk_tree, look for SSA_NAMEs that have already been
268 released. */
270 static tree
271 find_released_ssa_name (tree *tp, int *walk_subtrees, void *data_)
273 struct walk_stmt_info *wi = (struct walk_stmt_info *) data_;
275 if (wi && wi->is_lhs)
276 return NULL_TREE;
278 if (TREE_CODE (*tp) == SSA_NAME)
280 if (SSA_NAME_IN_FREE_LIST (*tp))
281 return *tp;
283 *walk_subtrees = 0;
285 else if (IS_TYPE_OR_DECL_P (*tp))
286 *walk_subtrees = 0;
288 return NULL_TREE;
291 /* Insert a DEBUG BIND stmt before the DEF of VAR if VAR is referenced
292 by other DEBUG stmts, and replace uses of the DEF with the
293 newly-created debug temp. */
295 void
296 insert_debug_temp_for_var_def (gimple_stmt_iterator *gsi, tree var)
298 imm_use_iterator imm_iter;
299 use_operand_p use_p;
300 gimple *stmt;
301 gimple *def_stmt = NULL;
302 int usecount = 0;
303 tree value = NULL;
305 if (!MAY_HAVE_DEBUG_STMTS)
306 return;
308 /* If this name has already been registered for replacement, do nothing
309 as anything that uses this name isn't in SSA form. */
310 if (name_registered_for_update_p (var))
311 return;
313 /* Check whether there are debug stmts that reference this variable and,
314 if there are, decide whether we should use a debug temp. */
315 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var)
317 stmt = USE_STMT (use_p);
319 if (!gimple_debug_bind_p (stmt))
320 continue;
322 if (usecount++)
323 break;
325 if (gimple_debug_bind_get_value (stmt) != var)
327 /* Count this as an additional use, so as to make sure we
328 use a temp unless VAR's definition has a SINGLE_RHS that
329 can be shared. */
330 usecount++;
331 break;
335 if (!usecount)
336 return;
338 if (gsi)
339 def_stmt = gsi_stmt (*gsi);
340 else
341 def_stmt = SSA_NAME_DEF_STMT (var);
343 /* If we didn't get an insertion point, and the stmt has already
344 been removed, we won't be able to insert the debug bind stmt, so
345 we'll have to drop debug information. */
346 if (gimple_code (def_stmt) == GIMPLE_PHI)
348 value = degenerate_phi_result (as_a <gphi *> (def_stmt));
349 if (value && walk_tree (&value, find_released_ssa_name, NULL, NULL))
350 value = NULL;
351 /* error_mark_node is what fixup_noreturn_call changes PHI arguments
352 to. */
353 else if (value == error_mark_node)
354 value = NULL;
356 else if (is_gimple_assign (def_stmt))
358 bool no_value = false;
360 if (!dom_info_available_p (CDI_DOMINATORS))
362 struct walk_stmt_info wi;
364 memset (&wi, 0, sizeof (wi));
366 /* When removing blocks without following reverse dominance
367 order, we may sometimes encounter SSA_NAMEs that have
368 already been released, referenced in other SSA_DEFs that
369 we're about to release. Consider:
371 <bb X>:
372 v_1 = foo;
374 <bb Y>:
375 w_2 = v_1 + bar;
376 # DEBUG w => w_2
378 If we deleted BB X first, propagating the value of w_2
379 won't do us any good. It's too late to recover their
380 original definition of v_1: when it was deleted, it was
381 only referenced in other DEFs, it couldn't possibly know
382 it should have been retained, and propagating every
383 single DEF just in case it might have to be propagated
384 into a DEBUG STMT would probably be too wasteful.
386 When dominator information is not readily available, we
387 check for and accept some loss of debug information. But
388 if it is available, there's no excuse for us to remove
389 blocks in the wrong order, so we don't even check for
390 dead SSA NAMEs. SSA verification shall catch any
391 errors. */
392 if ((!gsi && !gimple_bb (def_stmt))
393 || walk_gimple_op (def_stmt, find_released_ssa_name, &wi))
394 no_value = true;
397 if (!no_value)
398 value = gimple_assign_rhs_to_tree (def_stmt);
401 if (value)
403 /* If there's a single use of VAR, and VAR is the entire debug
404 expression (usecount would have been incremented again
405 otherwise), and the definition involves only constants and
406 SSA names, then we can propagate VALUE into this single use,
407 avoiding the temp.
409 We can also avoid using a temp if VALUE can be shared and
410 propagated into all uses, without generating expressions that
411 wouldn't be valid gimple RHSs.
413 Other cases that would require unsharing or non-gimple RHSs
414 are deferred to a debug temp, although we could avoid temps
415 at the expense of duplication of expressions. */
417 if (CONSTANT_CLASS_P (value)
418 || gimple_code (def_stmt) == GIMPLE_PHI
419 || (usecount == 1
420 && (!gimple_assign_single_p (def_stmt)
421 || is_gimple_min_invariant (value)))
422 || is_gimple_reg (value))
424 else
426 gdebug *def_temp;
427 tree vexpr = make_node (DEBUG_EXPR_DECL);
429 def_temp = gimple_build_debug_bind (vexpr,
430 unshare_expr (value),
431 def_stmt);
433 DECL_ARTIFICIAL (vexpr) = 1;
434 TREE_TYPE (vexpr) = TREE_TYPE (value);
435 if (DECL_P (value))
436 SET_DECL_MODE (vexpr, DECL_MODE (value));
437 else
438 SET_DECL_MODE (vexpr, TYPE_MODE (TREE_TYPE (value)));
440 if (gsi)
441 gsi_insert_before (gsi, def_temp, GSI_SAME_STMT);
442 else
444 gimple_stmt_iterator ngsi = gsi_for_stmt (def_stmt);
445 gsi_insert_before (&ngsi, def_temp, GSI_SAME_STMT);
448 value = vexpr;
452 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, var)
454 if (!gimple_debug_bind_p (stmt))
455 continue;
457 if (value)
459 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
460 /* unshare_expr is not needed here. vexpr is either a
461 SINGLE_RHS, that can be safely shared, some other RHS
462 that was unshared when we found it had a single debug
463 use, or a DEBUG_EXPR_DECL, that can be safely
464 shared. */
465 SET_USE (use_p, unshare_expr (value));
466 /* If we didn't replace uses with a debug decl fold the
467 resulting expression. Otherwise we end up with invalid IL. */
468 if (TREE_CODE (value) != DEBUG_EXPR_DECL)
470 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
471 fold_stmt_inplace (&gsi);
474 else
475 gimple_debug_bind_reset_value (stmt);
477 update_stmt (stmt);
482 /* Insert a DEBUG BIND stmt before STMT for each DEF referenced by
483 other DEBUG stmts, and replace uses of the DEF with the
484 newly-created debug temp. */
486 void
487 insert_debug_temps_for_defs (gimple_stmt_iterator *gsi)
489 gimple *stmt;
490 ssa_op_iter op_iter;
491 def_operand_p def_p;
493 if (!MAY_HAVE_DEBUG_STMTS)
494 return;
496 stmt = gsi_stmt (*gsi);
498 FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
500 tree var = DEF_FROM_PTR (def_p);
502 if (TREE_CODE (var) != SSA_NAME)
503 continue;
505 insert_debug_temp_for_var_def (gsi, var);
509 /* Reset all debug stmts that use SSA_NAME(s) defined in STMT. */
511 void
512 reset_debug_uses (gimple *stmt)
514 ssa_op_iter op_iter;
515 def_operand_p def_p;
516 imm_use_iterator imm_iter;
517 gimple *use_stmt;
519 if (!MAY_HAVE_DEBUG_STMTS)
520 return;
522 FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
524 tree var = DEF_FROM_PTR (def_p);
526 if (TREE_CODE (var) != SSA_NAME)
527 continue;
529 FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, var)
531 if (!gimple_debug_bind_p (use_stmt))
532 continue;
534 gimple_debug_bind_reset_value (use_stmt);
535 update_stmt (use_stmt);
540 /* Delete SSA DEFs for SSA versions in the TOREMOVE bitmap, removing
541 dominated stmts before their dominators, so that release_ssa_defs
542 stands a chance of propagating DEFs into debug bind stmts. */
544 void
545 release_defs_bitset (bitmap toremove)
547 unsigned j;
548 bitmap_iterator bi;
550 /* Performing a topological sort is probably overkill, this will
551 most likely run in slightly superlinear time, rather than the
552 pathological quadratic worst case. */
553 while (!bitmap_empty_p (toremove))
555 unsigned to_remove_bit = -1U;
556 EXECUTE_IF_SET_IN_BITMAP (toremove, 0, j, bi)
558 if (to_remove_bit != -1U)
560 bitmap_clear_bit (toremove, to_remove_bit);
561 to_remove_bit = -1U;
564 bool remove_now = true;
565 tree var = ssa_name (j);
566 gimple *stmt;
567 imm_use_iterator uit;
569 FOR_EACH_IMM_USE_STMT (stmt, uit, var)
571 ssa_op_iter dit;
572 def_operand_p def_p;
574 /* We can't propagate PHI nodes into debug stmts. */
575 if (gimple_code (stmt) == GIMPLE_PHI
576 || is_gimple_debug (stmt))
577 continue;
579 /* If we find another definition to remove that uses
580 the one we're looking at, defer the removal of this
581 one, so that it can be propagated into debug stmts
582 after the other is. */
583 FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, dit, SSA_OP_DEF)
585 tree odef = DEF_FROM_PTR (def_p);
587 if (bitmap_bit_p (toremove, SSA_NAME_VERSION (odef)))
589 remove_now = false;
590 break;
594 if (!remove_now)
595 BREAK_FROM_IMM_USE_STMT (uit);
598 if (remove_now)
600 gimple *def = SSA_NAME_DEF_STMT (var);
601 gimple_stmt_iterator gsi = gsi_for_stmt (def);
603 if (gimple_code (def) == GIMPLE_PHI)
604 remove_phi_node (&gsi, true);
605 else
607 gsi_remove (&gsi, true);
608 release_defs (def);
611 to_remove_bit = j;
614 if (to_remove_bit != -1U)
615 bitmap_clear_bit (toremove, to_remove_bit);
620 /* Verify virtual SSA form. */
622 bool
623 verify_vssa (basic_block bb, tree current_vdef, sbitmap visited)
625 bool err = false;
627 if (bitmap_bit_p (visited, bb->index))
628 return false;
630 bitmap_set_bit (visited, bb->index);
632 /* Pick up the single virtual PHI def. */
633 gphi *phi = NULL;
634 for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);
635 gsi_next (&si))
637 tree res = gimple_phi_result (si.phi ());
638 if (virtual_operand_p (res))
640 if (phi)
642 error ("multiple virtual PHI nodes in BB %d", bb->index);
643 print_gimple_stmt (stderr, phi, 0);
644 print_gimple_stmt (stderr, si.phi (), 0);
645 err = true;
647 else
648 phi = si.phi ();
651 if (phi)
653 current_vdef = gimple_phi_result (phi);
654 if (TREE_CODE (current_vdef) != SSA_NAME)
656 error ("virtual definition is not an SSA name");
657 print_gimple_stmt (stderr, phi, 0);
658 err = true;
662 /* Verify stmts. */
663 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
664 gsi_next (&gsi))
666 gimple *stmt = gsi_stmt (gsi);
667 tree vuse = gimple_vuse (stmt);
668 if (vuse)
670 if (vuse != current_vdef)
672 error ("stmt with wrong VUSE");
673 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
674 fprintf (stderr, "expected ");
675 print_generic_expr (stderr, current_vdef);
676 fprintf (stderr, "\n");
677 err = true;
679 tree vdef = gimple_vdef (stmt);
680 if (vdef)
682 current_vdef = vdef;
683 if (TREE_CODE (current_vdef) != SSA_NAME)
685 error ("virtual definition is not an SSA name");
686 print_gimple_stmt (stderr, phi, 0);
687 err = true;
693 /* Verify destination PHI uses and recurse. */
694 edge_iterator ei;
695 edge e;
696 FOR_EACH_EDGE (e, ei, bb->succs)
698 gphi *phi = get_virtual_phi (e->dest);
699 if (phi
700 && PHI_ARG_DEF_FROM_EDGE (phi, e) != current_vdef)
702 error ("PHI node with wrong VUSE on edge from BB %d",
703 e->src->index);
704 print_gimple_stmt (stderr, phi, 0, TDF_VOPS);
705 fprintf (stderr, "expected ");
706 print_generic_expr (stderr, current_vdef);
707 fprintf (stderr, "\n");
708 err = true;
711 /* Recurse. */
712 err |= verify_vssa (e->dest, current_vdef, visited);
715 return err;
718 /* Return true if SSA_NAME is malformed and mark it visited.
720 IS_VIRTUAL is true if this SSA_NAME was found inside a virtual
721 operand. */
723 static bool
724 verify_ssa_name (tree ssa_name, bool is_virtual)
726 if (TREE_CODE (ssa_name) != SSA_NAME)
728 error ("expected an SSA_NAME object");
729 return true;
732 if (SSA_NAME_IN_FREE_LIST (ssa_name))
734 error ("found an SSA_NAME that had been released into the free pool");
735 return true;
738 if (SSA_NAME_VAR (ssa_name) != NULL_TREE
739 && TREE_TYPE (ssa_name) != TREE_TYPE (SSA_NAME_VAR (ssa_name)))
741 error ("type mismatch between an SSA_NAME and its symbol");
742 return true;
745 if (is_virtual && !virtual_operand_p (ssa_name))
747 error ("found a virtual definition for a GIMPLE register");
748 return true;
751 if (is_virtual && SSA_NAME_VAR (ssa_name) != gimple_vop (cfun))
753 error ("virtual SSA name for non-VOP decl");
754 return true;
757 if (!is_virtual && virtual_operand_p (ssa_name))
759 error ("found a real definition for a non-register");
760 return true;
763 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
764 && !gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name)))
766 error ("found a default name with a non-empty defining statement");
767 return true;
770 return false;
774 /* Return true if the definition of SSA_NAME at block BB is malformed.
776 STMT is the statement where SSA_NAME is created.
778 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
779 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
780 it means that the block in that array slot contains the
781 definition of SSA_NAME.
783 IS_VIRTUAL is true if SSA_NAME is created by a VDEF. */
785 static bool
786 verify_def (basic_block bb, basic_block *definition_block, tree ssa_name,
787 gimple *stmt, bool is_virtual)
789 if (verify_ssa_name (ssa_name, is_virtual))
790 goto err;
792 if (SSA_NAME_VAR (ssa_name)
793 && TREE_CODE (SSA_NAME_VAR (ssa_name)) == RESULT_DECL
794 && DECL_BY_REFERENCE (SSA_NAME_VAR (ssa_name)))
796 error ("RESULT_DECL should be read only when DECL_BY_REFERENCE is set");
797 goto err;
800 if (definition_block[SSA_NAME_VERSION (ssa_name)])
802 error ("SSA_NAME created in two different blocks %i and %i",
803 definition_block[SSA_NAME_VERSION (ssa_name)]->index, bb->index);
804 goto err;
807 definition_block[SSA_NAME_VERSION (ssa_name)] = bb;
809 if (SSA_NAME_DEF_STMT (ssa_name) != stmt)
811 error ("SSA_NAME_DEF_STMT is wrong");
812 fprintf (stderr, "Expected definition statement:\n");
813 print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (ssa_name), 4, TDF_VOPS);
814 fprintf (stderr, "\nActual definition statement:\n");
815 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
816 goto err;
819 return false;
821 err:
822 fprintf (stderr, "while verifying SSA_NAME ");
823 print_generic_expr (stderr, ssa_name);
824 fprintf (stderr, " in statement\n");
825 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
827 return true;
831 /* Return true if the use of SSA_NAME at statement STMT in block BB is
832 malformed.
834 DEF_BB is the block where SSA_NAME was found to be created.
836 IDOM contains immediate dominator information for the flowgraph.
838 CHECK_ABNORMAL is true if the caller wants to check whether this use
839 is flowing through an abnormal edge (only used when checking PHI
840 arguments).
842 If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names
843 that are defined before STMT in basic block BB. */
845 static bool
846 verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
847 gimple *stmt, bool check_abnormal, bitmap names_defined_in_bb)
849 bool err = false;
850 tree ssa_name = USE_FROM_PTR (use_p);
852 if (!TREE_VISITED (ssa_name))
853 if (verify_imm_links (stderr, ssa_name))
854 err = true;
856 TREE_VISITED (ssa_name) = 1;
858 if (gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name))
859 && SSA_NAME_IS_DEFAULT_DEF (ssa_name))
860 ; /* Default definitions have empty statements. Nothing to do. */
861 else if (!def_bb)
863 error ("missing definition");
864 err = true;
866 else if (bb != def_bb
867 && !dominated_by_p (CDI_DOMINATORS, bb, def_bb))
869 error ("definition in block %i does not dominate use in block %i",
870 def_bb->index, bb->index);
871 err = true;
873 else if (bb == def_bb
874 && names_defined_in_bb != NULL
875 && !bitmap_bit_p (names_defined_in_bb, SSA_NAME_VERSION (ssa_name)))
877 error ("definition in block %i follows the use", def_bb->index);
878 err = true;
881 if (check_abnormal
882 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name))
884 error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set");
885 err = true;
888 /* Make sure the use is in an appropriate list by checking the previous
889 element to make sure it's the same. */
890 if (use_p->prev == NULL)
892 error ("no immediate_use list");
893 err = true;
895 else
897 tree listvar;
898 if (use_p->prev->use == NULL)
899 listvar = use_p->prev->loc.ssa_name;
900 else
901 listvar = USE_FROM_PTR (use_p->prev);
902 if (listvar != ssa_name)
904 error ("wrong immediate use list");
905 err = true;
909 if (err)
911 fprintf (stderr, "for SSA_NAME: ");
912 print_generic_expr (stderr, ssa_name, TDF_VOPS);
913 fprintf (stderr, " in statement:\n");
914 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
917 return err;
921 /* Return true if any of the arguments for PHI node PHI at block BB is
922 malformed.
924 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
925 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
926 it means that the block in that array slot contains the
927 definition of SSA_NAME. */
929 static bool
930 verify_phi_args (gphi *phi, basic_block bb, basic_block *definition_block)
932 edge e;
933 bool err = false;
934 size_t i, phi_num_args = gimple_phi_num_args (phi);
936 if (EDGE_COUNT (bb->preds) != phi_num_args)
938 error ("incoming edge count does not match number of PHI arguments");
939 err = true;
940 goto error;
943 for (i = 0; i < phi_num_args; i++)
945 use_operand_p op_p = gimple_phi_arg_imm_use_ptr (phi, i);
946 tree op = USE_FROM_PTR (op_p);
948 e = EDGE_PRED (bb, i);
950 if (op == NULL_TREE)
952 error ("PHI argument is missing for edge %d->%d",
953 e->src->index,
954 e->dest->index);
955 err = true;
956 goto error;
959 if (TREE_CODE (op) != SSA_NAME && !is_gimple_min_invariant (op))
961 error ("PHI argument is not SSA_NAME, or invariant");
962 err = true;
965 if (TREE_CODE (op) == SSA_NAME)
967 err = verify_ssa_name (op, virtual_operand_p (gimple_phi_result (phi)));
968 err |= verify_use (e->src, definition_block[SSA_NAME_VERSION (op)],
969 op_p, phi, e->flags & EDGE_ABNORMAL, NULL);
972 if (TREE_CODE (op) == ADDR_EXPR)
974 tree base = TREE_OPERAND (op, 0);
975 while (handled_component_p (base))
976 base = TREE_OPERAND (base, 0);
977 if ((VAR_P (base)
978 || TREE_CODE (base) == PARM_DECL
979 || TREE_CODE (base) == RESULT_DECL)
980 && !TREE_ADDRESSABLE (base))
982 error ("address taken, but ADDRESSABLE bit not set");
983 err = true;
987 if (e->dest != bb)
989 error ("wrong edge %d->%d for PHI argument",
990 e->src->index, e->dest->index);
991 err = true;
994 if (err)
996 fprintf (stderr, "PHI argument\n");
997 print_generic_stmt (stderr, op, TDF_VOPS);
998 goto error;
1002 error:
1003 if (err)
1005 fprintf (stderr, "for PHI node\n");
1006 print_gimple_stmt (stderr, phi, 0, TDF_VOPS|TDF_MEMSYMS);
1010 return err;
1014 /* Verify common invariants in the SSA web.
1015 TODO: verify the variable annotations. */
1017 DEBUG_FUNCTION void
1018 verify_ssa (bool check_modified_stmt, bool check_ssa_operands)
1020 basic_block bb;
1021 basic_block *definition_block = XCNEWVEC (basic_block, num_ssa_names);
1022 ssa_op_iter iter;
1023 tree op;
1024 enum dom_state orig_dom_state = dom_info_state (CDI_DOMINATORS);
1025 auto_bitmap names_defined_in_bb;
1027 gcc_assert (!need_ssa_update_p (cfun));
1029 timevar_push (TV_TREE_SSA_VERIFY);
1032 /* Keep track of SSA names present in the IL. */
1033 size_t i;
1034 tree name;
1035 hash_map <void *, tree> ssa_info;
1037 FOR_EACH_SSA_NAME (i, name, cfun)
1039 gimple *stmt;
1040 TREE_VISITED (name) = 0;
1042 verify_ssa_name (name, virtual_operand_p (name));
1044 stmt = SSA_NAME_DEF_STMT (name);
1045 if (!gimple_nop_p (stmt))
1047 basic_block bb = gimple_bb (stmt);
1048 if (verify_def (bb, definition_block,
1049 name, stmt, virtual_operand_p (name)))
1050 goto err;
1053 void *info = NULL;
1054 if (POINTER_TYPE_P (TREE_TYPE (name)))
1055 info = SSA_NAME_PTR_INFO (name);
1056 else if (INTEGRAL_TYPE_P (TREE_TYPE (name)))
1057 info = SSA_NAME_RANGE_INFO (name);
1058 if (info)
1060 bool existed;
1061 tree &val = ssa_info.get_or_insert (info, &existed);
1062 if (existed)
1064 error ("shared SSA name info");
1065 print_generic_expr (stderr, val);
1066 fprintf (stderr, " and ");
1067 print_generic_expr (stderr, name);
1068 fprintf (stderr, "\n");
1069 goto err;
1071 else
1072 val = name;
1077 calculate_dominance_info (CDI_DOMINATORS);
1079 /* Now verify all the uses and make sure they agree with the definitions
1080 found in the previous pass. */
1081 FOR_EACH_BB_FN (bb, cfun)
1083 edge e;
1084 edge_iterator ei;
1086 /* Make sure that all edges have a clear 'aux' field. */
1087 FOR_EACH_EDGE (e, ei, bb->preds)
1089 if (e->aux)
1091 error ("AUX pointer initialized for edge %d->%d", e->src->index,
1092 e->dest->index);
1093 goto err;
1097 /* Verify the arguments for every PHI node in the block. */
1098 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1100 gphi *phi = gsi.phi ();
1101 if (verify_phi_args (phi, bb, definition_block))
1102 goto err;
1104 bitmap_set_bit (names_defined_in_bb,
1105 SSA_NAME_VERSION (gimple_phi_result (phi)));
1108 /* Now verify all the uses and vuses in every statement of the block. */
1109 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
1110 gsi_next (&gsi))
1112 gimple *stmt = gsi_stmt (gsi);
1113 use_operand_p use_p;
1115 if (check_modified_stmt && gimple_modified_p (stmt))
1117 error ("stmt (%p) marked modified after optimization pass: ",
1118 (void *)stmt);
1119 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
1120 goto err;
1123 if (check_ssa_operands && verify_ssa_operands (cfun, stmt))
1125 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
1126 goto err;
1129 if (gimple_debug_bind_p (stmt)
1130 && !gimple_debug_bind_has_value_p (stmt))
1131 continue;
1133 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE|SSA_OP_VUSE)
1135 op = USE_FROM_PTR (use_p);
1136 if (verify_use (bb, definition_block[SSA_NAME_VERSION (op)],
1137 use_p, stmt, false, names_defined_in_bb))
1138 goto err;
1141 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_DEFS)
1143 if (SSA_NAME_DEF_STMT (op) != stmt)
1145 error ("SSA_NAME_DEF_STMT is wrong");
1146 fprintf (stderr, "Expected definition statement:\n");
1147 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
1148 fprintf (stderr, "\nActual definition statement:\n");
1149 print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (op),
1150 4, TDF_VOPS);
1151 goto err;
1153 bitmap_set_bit (names_defined_in_bb, SSA_NAME_VERSION (op));
1157 bitmap_clear (names_defined_in_bb);
1160 free (definition_block);
1162 if (gimple_vop (cfun)
1163 && ssa_default_def (cfun, gimple_vop (cfun)))
1165 auto_sbitmap visited (last_basic_block_for_fn (cfun) + 1);
1166 bitmap_clear (visited);
1167 if (verify_vssa (ENTRY_BLOCK_PTR_FOR_FN (cfun),
1168 ssa_default_def (cfun, gimple_vop (cfun)), visited))
1169 goto err;
1172 /* Restore the dominance information to its prior known state, so
1173 that we do not perturb the compiler's subsequent behavior. */
1174 if (orig_dom_state == DOM_NONE)
1175 free_dominance_info (CDI_DOMINATORS);
1176 else
1177 set_dom_info_availability (CDI_DOMINATORS, orig_dom_state);
1179 timevar_pop (TV_TREE_SSA_VERIFY);
1180 return;
1182 err:
1183 internal_error ("verify_ssa failed");
1187 /* Initialize global DFA and SSA structures. */
1189 void
1190 init_tree_ssa (struct function *fn)
1192 fn->gimple_df = ggc_cleared_alloc<gimple_df> ();
1193 fn->gimple_df->default_defs = hash_table<ssa_name_hasher>::create_ggc (20);
1194 pt_solution_reset (&fn->gimple_df->escaped);
1195 init_ssanames (fn, 0);
1198 /* Deallocate memory associated with SSA data structures for FNDECL. */
1200 void
1201 delete_tree_ssa (struct function *fn)
1203 fini_ssanames (fn);
1205 /* We no longer maintain the SSA operand cache at this point. */
1206 if (ssa_operands_active (fn))
1207 fini_ssa_operands (fn);
1209 fn->gimple_df->default_defs->empty ();
1210 fn->gimple_df->default_defs = NULL;
1211 pt_solution_reset (&fn->gimple_df->escaped);
1212 if (fn->gimple_df->decls_to_pointers != NULL)
1213 delete fn->gimple_df->decls_to_pointers;
1214 fn->gimple_df->decls_to_pointers = NULL;
1215 fn->gimple_df = NULL;
1217 /* We no longer need the edge variable maps. */
1218 redirect_edge_var_map_empty ();
1221 /* Return true if EXPR is a useless type conversion, otherwise return
1222 false. */
1224 bool
1225 tree_ssa_useless_type_conversion (tree expr)
1227 /* If we have an assignment that merely uses a NOP_EXPR to change
1228 the top of the RHS to the type of the LHS and the type conversion
1229 is "safe", then strip away the type conversion so that we can
1230 enter LHS = RHS into the const_and_copies table. */
1231 if (CONVERT_EXPR_P (expr)
1232 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
1233 || TREE_CODE (expr) == NON_LVALUE_EXPR)
1234 return useless_type_conversion_p
1235 (TREE_TYPE (expr),
1236 TREE_TYPE (TREE_OPERAND (expr, 0)));
1238 return false;
1241 /* Strip conversions from EXP according to
1242 tree_ssa_useless_type_conversion and return the resulting
1243 expression. */
1245 tree
1246 tree_ssa_strip_useless_type_conversions (tree exp)
1248 while (tree_ssa_useless_type_conversion (exp))
1249 exp = TREE_OPERAND (exp, 0);
1250 return exp;
1253 /* Return true if T, as SSA_NAME, has an implicit default defined value. */
1255 bool
1256 ssa_defined_default_def_p (tree t)
1258 tree var = SSA_NAME_VAR (t);
1260 if (!var)
1262 /* Parameters get their initial value from the function entry. */
1263 else if (TREE_CODE (var) == PARM_DECL)
1264 return true;
1265 /* When returning by reference the return address is actually a hidden
1266 parameter. */
1267 else if (TREE_CODE (var) == RESULT_DECL && DECL_BY_REFERENCE (var))
1268 return true;
1269 /* Hard register variables get their initial value from the ether. */
1270 else if (VAR_P (var) && DECL_HARD_REGISTER (var))
1271 return true;
1273 return false;
1277 /* Return true if T, an SSA_NAME, has an undefined value. PARTIAL is what
1278 should be returned if the value is only partially undefined. */
1280 bool
1281 ssa_undefined_value_p (tree t, bool partial)
1283 gimple *def_stmt;
1285 if (ssa_defined_default_def_p (t))
1286 return false;
1288 /* The value is undefined iff its definition statement is empty. */
1289 def_stmt = SSA_NAME_DEF_STMT (t);
1290 if (gimple_nop_p (def_stmt))
1291 return true;
1293 /* Check if the complex was not only partially defined. */
1294 if (partial && is_gimple_assign (def_stmt)
1295 && gimple_assign_rhs_code (def_stmt) == COMPLEX_EXPR)
1297 tree rhs1, rhs2;
1299 rhs1 = gimple_assign_rhs1 (def_stmt);
1300 rhs2 = gimple_assign_rhs2 (def_stmt);
1301 return (TREE_CODE (rhs1) == SSA_NAME && ssa_undefined_value_p (rhs1))
1302 || (TREE_CODE (rhs2) == SSA_NAME && ssa_undefined_value_p (rhs2));
1304 return false;
1308 /* Return TRUE iff STMT, a gimple statement, references an undefined
1309 SSA name. */
1311 bool
1312 gimple_uses_undefined_value_p (gimple *stmt)
1314 ssa_op_iter iter;
1315 tree op;
1317 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
1318 if (ssa_undefined_value_p (op))
1319 return true;
1321 return false;
1326 /* If necessary, rewrite the base of the reference tree *TP from
1327 a MEM_REF to a plain or converted symbol. */
1329 static void
1330 maybe_rewrite_mem_ref_base (tree *tp, bitmap suitable_for_renaming)
1332 tree sym;
1334 while (handled_component_p (*tp))
1335 tp = &TREE_OPERAND (*tp, 0);
1336 if (TREE_CODE (*tp) == MEM_REF
1337 && TREE_CODE (TREE_OPERAND (*tp, 0)) == ADDR_EXPR
1338 && (sym = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0))
1339 && DECL_P (sym)
1340 && !TREE_ADDRESSABLE (sym)
1341 && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym))
1342 && is_gimple_reg_type (TREE_TYPE (*tp))
1343 && ! VOID_TYPE_P (TREE_TYPE (*tp)))
1345 if (TREE_CODE (TREE_TYPE (sym)) == VECTOR_TYPE
1346 && useless_type_conversion_p (TREE_TYPE (*tp),
1347 TREE_TYPE (TREE_TYPE (sym)))
1348 && multiple_of_p (sizetype, TREE_OPERAND (*tp, 1),
1349 TYPE_SIZE_UNIT (TREE_TYPE (*tp))))
1351 *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym,
1352 TYPE_SIZE (TREE_TYPE (*tp)),
1353 int_const_binop (MULT_EXPR,
1354 bitsize_int (BITS_PER_UNIT),
1355 TREE_OPERAND (*tp, 1)));
1357 else if (TREE_CODE (TREE_TYPE (sym)) == COMPLEX_TYPE
1358 && useless_type_conversion_p (TREE_TYPE (*tp),
1359 TREE_TYPE (TREE_TYPE (sym))))
1361 *tp = build1 (integer_zerop (TREE_OPERAND (*tp, 1))
1362 ? REALPART_EXPR : IMAGPART_EXPR,
1363 TREE_TYPE (*tp), sym);
1365 else if (integer_zerop (TREE_OPERAND (*tp, 1))
1366 && DECL_SIZE (sym) == TYPE_SIZE (TREE_TYPE (*tp)))
1368 if (!useless_type_conversion_p (TREE_TYPE (*tp),
1369 TREE_TYPE (sym)))
1370 *tp = build1 (VIEW_CONVERT_EXPR,
1371 TREE_TYPE (*tp), sym);
1372 else
1373 *tp = sym;
1375 else if (DECL_SIZE (sym)
1376 && TREE_CODE (DECL_SIZE (sym)) == INTEGER_CST
1377 && mem_ref_offset (*tp) >= 0
1378 && wi::leu_p (mem_ref_offset (*tp)
1379 + wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (*tp))),
1380 wi::to_offset (DECL_SIZE_UNIT (sym)))
1381 && (! INTEGRAL_TYPE_P (TREE_TYPE (*tp))
1382 || (wi::to_offset (TYPE_SIZE (TREE_TYPE (*tp)))
1383 == TYPE_PRECISION (TREE_TYPE (*tp))))
1384 && wi::umod_trunc (wi::to_offset (TYPE_SIZE (TREE_TYPE (*tp))),
1385 BITS_PER_UNIT) == 0)
1387 *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym,
1388 TYPE_SIZE (TREE_TYPE (*tp)),
1389 wide_int_to_tree (bitsizetype,
1390 mem_ref_offset (*tp)
1391 << LOG2_BITS_PER_UNIT));
1396 /* For a tree REF return its base if it is the base of a MEM_REF
1397 that cannot be rewritten into SSA form. Otherwise return NULL_TREE. */
1399 static tree
1400 non_rewritable_mem_ref_base (tree ref)
1402 tree base;
1404 /* A plain decl does not need it set. */
1405 if (DECL_P (ref))
1406 return NULL_TREE;
1408 if (! (base = CONST_CAST_TREE (strip_invariant_refs (ref))))
1410 base = get_base_address (ref);
1411 if (DECL_P (base))
1412 return base;
1413 return NULL_TREE;
1416 /* But watch out for MEM_REFs we cannot lower to a
1417 VIEW_CONVERT_EXPR or a BIT_FIELD_REF. */
1418 if (TREE_CODE (base) == MEM_REF
1419 && TREE_CODE (TREE_OPERAND (base, 0)) == ADDR_EXPR)
1421 tree decl = TREE_OPERAND (TREE_OPERAND (base, 0), 0);
1422 if (! DECL_P (decl))
1423 return NULL_TREE;
1424 if (! is_gimple_reg_type (TREE_TYPE (base))
1425 || VOID_TYPE_P (TREE_TYPE (base)))
1426 return decl;
1427 if ((TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE
1428 || TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE)
1429 && useless_type_conversion_p (TREE_TYPE (base),
1430 TREE_TYPE (TREE_TYPE (decl)))
1431 && wi::fits_uhwi_p (mem_ref_offset (base))
1432 && wi::gtu_p (wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (decl))),
1433 mem_ref_offset (base))
1434 && multiple_of_p (sizetype, TREE_OPERAND (base, 1),
1435 TYPE_SIZE_UNIT (TREE_TYPE (base))))
1436 return NULL_TREE;
1437 /* For same sizes and zero offset we can use a VIEW_CONVERT_EXPR. */
1438 if (integer_zerop (TREE_OPERAND (base, 1))
1439 && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (base)))
1440 return NULL_TREE;
1441 /* For integral typed extracts we can use a BIT_FIELD_REF. */
1442 if (DECL_SIZE (decl)
1443 && TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST
1444 && mem_ref_offset (base) >= 0
1445 && wi::leu_p (mem_ref_offset (base)
1446 + wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (base))),
1447 wi::to_offset (DECL_SIZE_UNIT (decl)))
1448 /* ??? We can't handle bitfield precision extracts without
1449 either using an alternate type for the BIT_FIELD_REF and
1450 then doing a conversion or possibly adjusting the offset
1451 according to endianness. */
1452 && (! INTEGRAL_TYPE_P (TREE_TYPE (base))
1453 || (wi::to_offset (TYPE_SIZE (TREE_TYPE (base)))
1454 == TYPE_PRECISION (TREE_TYPE (base))))
1455 && wi::umod_trunc (wi::to_offset (TYPE_SIZE (TREE_TYPE (base))),
1456 BITS_PER_UNIT) == 0)
1457 return NULL_TREE;
1458 return decl;
1461 return NULL_TREE;
1464 /* For an lvalue tree LHS return true if it cannot be rewritten into SSA form.
1465 Otherwise return true. */
1467 static bool
1468 non_rewritable_lvalue_p (tree lhs)
1470 /* A plain decl is always rewritable. */
1471 if (DECL_P (lhs))
1472 return false;
1474 /* We can re-write REALPART_EXPR and IMAGPART_EXPR sets in
1475 a reasonably efficient manner... */
1476 if ((TREE_CODE (lhs) == REALPART_EXPR
1477 || TREE_CODE (lhs) == IMAGPART_EXPR)
1478 && DECL_P (TREE_OPERAND (lhs, 0)))
1479 return false;
1481 /* ??? The following could be relaxed allowing component
1482 references that do not change the access size. */
1483 if (TREE_CODE (lhs) == MEM_REF
1484 && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR)
1486 tree decl = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0);
1488 /* A decl that is wrapped inside a MEM-REF that covers
1489 it full is also rewritable. */
1490 if (integer_zerop (TREE_OPERAND (lhs, 1))
1491 && DECL_P (decl)
1492 && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (lhs))
1493 /* If the dynamic type of the decl has larger precision than
1494 the decl itself we can't use the decls type for SSA rewriting. */
1495 && ((! INTEGRAL_TYPE_P (TREE_TYPE (decl))
1496 || compare_tree_int (DECL_SIZE (decl),
1497 TYPE_PRECISION (TREE_TYPE (decl))) == 0)
1498 || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
1499 && (TYPE_PRECISION (TREE_TYPE (decl))
1500 >= TYPE_PRECISION (TREE_TYPE (lhs)))))
1501 /* Make sure we are not re-writing non-float copying into float
1502 copying as that can incur normalization. */
1503 && (! FLOAT_TYPE_P (TREE_TYPE (decl))
1504 || types_compatible_p (TREE_TYPE (lhs), TREE_TYPE (decl)))
1505 && (TREE_THIS_VOLATILE (decl) == TREE_THIS_VOLATILE (lhs)))
1506 return false;
1508 /* A vector-insert using a MEM_REF or ARRAY_REF is rewritable
1509 using a BIT_INSERT_EXPR. */
1510 if (DECL_P (decl)
1511 && VECTOR_TYPE_P (TREE_TYPE (decl))
1512 && TYPE_MODE (TREE_TYPE (decl)) != BLKmode
1513 && types_compatible_p (TREE_TYPE (lhs),
1514 TREE_TYPE (TREE_TYPE (decl)))
1515 && tree_fits_uhwi_p (TREE_OPERAND (lhs, 1))
1516 && tree_int_cst_lt (TREE_OPERAND (lhs, 1),
1517 TYPE_SIZE_UNIT (TREE_TYPE (decl)))
1518 && (tree_to_uhwi (TREE_OPERAND (lhs, 1))
1519 % tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (lhs)))) == 0)
1520 return false;
1523 /* A vector-insert using a BIT_FIELD_REF is rewritable using
1524 BIT_INSERT_EXPR. */
1525 if (TREE_CODE (lhs) == BIT_FIELD_REF
1526 && DECL_P (TREE_OPERAND (lhs, 0))
1527 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (lhs, 0)))
1528 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (lhs, 0))) != BLKmode
1529 && types_compatible_p (TREE_TYPE (lhs),
1530 TREE_TYPE (TREE_TYPE (TREE_OPERAND (lhs, 0))))
1531 && (tree_to_uhwi (TREE_OPERAND (lhs, 2))
1532 % tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs)))) == 0)
1533 return false;
1535 return true;
1538 /* When possible, clear TREE_ADDRESSABLE bit or set DECL_GIMPLE_REG_P bit and
1539 mark the variable VAR for conversion into SSA. Return true when updating
1540 stmts is required. */
1542 static void
1543 maybe_optimize_var (tree var, bitmap addresses_taken, bitmap not_reg_needs,
1544 bitmap suitable_for_renaming)
1546 /* Global Variables, result decls cannot be changed. */
1547 if (is_global_var (var)
1548 || TREE_CODE (var) == RESULT_DECL
1549 || bitmap_bit_p (addresses_taken, DECL_UID (var)))
1550 return;
1552 if (TREE_ADDRESSABLE (var)
1553 /* Do not change TREE_ADDRESSABLE if we need to preserve var as
1554 a non-register. Otherwise we are confused and forget to
1555 add virtual operands for it. */
1556 && (!is_gimple_reg_type (TREE_TYPE (var))
1557 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
1558 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1559 || !bitmap_bit_p (not_reg_needs, DECL_UID (var))))
1561 TREE_ADDRESSABLE (var) = 0;
1562 if (is_gimple_reg (var))
1563 bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
1564 if (dump_file)
1566 fprintf (dump_file, "No longer having address taken: ");
1567 print_generic_expr (dump_file, var);
1568 fprintf (dump_file, "\n");
1572 if (!DECL_GIMPLE_REG_P (var)
1573 && !bitmap_bit_p (not_reg_needs, DECL_UID (var))
1574 && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1575 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
1576 && !TREE_THIS_VOLATILE (var)
1577 && (!VAR_P (var) || !DECL_HARD_REGISTER (var)))
1579 DECL_GIMPLE_REG_P (var) = 1;
1580 bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
1581 if (dump_file)
1583 fprintf (dump_file, "Now a gimple register: ");
1584 print_generic_expr (dump_file, var);
1585 fprintf (dump_file, "\n");
1590 /* Return true when STMT is ASAN mark where second argument is an address
1591 of a local variable. */
1593 static bool
1594 is_asan_mark_p (gimple *stmt)
1596 if (!gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1597 return false;
1599 tree addr = get_base_address (gimple_call_arg (stmt, 1));
1600 if (TREE_CODE (addr) == ADDR_EXPR
1601 && VAR_P (TREE_OPERAND (addr, 0)))
1603 tree var = TREE_OPERAND (addr, 0);
1604 if (lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1605 DECL_ATTRIBUTES (var)))
1606 return false;
1608 unsigned addressable = TREE_ADDRESSABLE (var);
1609 TREE_ADDRESSABLE (var) = 0;
1610 bool r = is_gimple_reg (var);
1611 TREE_ADDRESSABLE (var) = addressable;
1612 return r;
1615 return false;
1618 /* Compute TREE_ADDRESSABLE and DECL_GIMPLE_REG_P for local variables. */
1620 void
1621 execute_update_addresses_taken (void)
1623 basic_block bb;
1624 auto_bitmap addresses_taken;
1625 auto_bitmap not_reg_needs;
1626 auto_bitmap suitable_for_renaming;
1627 tree var;
1628 unsigned i;
1630 timevar_push (TV_ADDRESS_TAKEN);
1632 /* Collect into ADDRESSES_TAKEN all variables whose address is taken within
1633 the function body. */
1634 FOR_EACH_BB_FN (bb, cfun)
1636 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
1637 gsi_next (&gsi))
1639 gimple *stmt = gsi_stmt (gsi);
1640 enum gimple_code code = gimple_code (stmt);
1641 tree decl;
1643 if (code == GIMPLE_CALL)
1645 if (optimize_atomic_compare_exchange_p (stmt))
1647 /* For __atomic_compare_exchange_N if the second argument
1648 is &var, don't mark var addressable;
1649 if it becomes non-addressable, we'll rewrite it into
1650 ATOMIC_COMPARE_EXCHANGE call. */
1651 tree arg = gimple_call_arg (stmt, 1);
1652 gimple_call_set_arg (stmt, 1, null_pointer_node);
1653 gimple_ior_addresses_taken (addresses_taken, stmt);
1654 gimple_call_set_arg (stmt, 1, arg);
1656 else if (is_asan_mark_p (stmt)
1657 || gimple_call_internal_p (stmt, IFN_GOMP_SIMT_ENTER))
1659 else
1660 gimple_ior_addresses_taken (addresses_taken, stmt);
1662 else
1663 /* Note all addresses taken by the stmt. */
1664 gimple_ior_addresses_taken (addresses_taken, stmt);
1666 /* If we have a call or an assignment, see if the lhs contains
1667 a local decl that requires not to be a gimple register. */
1668 if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
1670 tree lhs = gimple_get_lhs (stmt);
1671 if (lhs
1672 && TREE_CODE (lhs) != SSA_NAME
1673 && ((code == GIMPLE_CALL && ! DECL_P (lhs))
1674 || non_rewritable_lvalue_p (lhs)))
1676 decl = get_base_address (lhs);
1677 if (DECL_P (decl))
1678 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1682 if (gimple_assign_single_p (stmt))
1684 tree rhs = gimple_assign_rhs1 (stmt);
1685 if ((decl = non_rewritable_mem_ref_base (rhs)))
1686 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1689 else if (code == GIMPLE_CALL)
1691 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1693 tree arg = gimple_call_arg (stmt, i);
1694 if ((decl = non_rewritable_mem_ref_base (arg)))
1695 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1699 else if (code == GIMPLE_ASM)
1701 gasm *asm_stmt = as_a <gasm *> (stmt);
1702 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
1704 tree link = gimple_asm_output_op (asm_stmt, i);
1705 tree lhs = TREE_VALUE (link);
1706 if (TREE_CODE (lhs) != SSA_NAME)
1708 decl = get_base_address (lhs);
1709 if (DECL_P (decl)
1710 && (non_rewritable_lvalue_p (lhs)
1711 /* We cannot move required conversions from
1712 the lhs to the rhs in asm statements, so
1713 require we do not need any. */
1714 || !useless_type_conversion_p
1715 (TREE_TYPE (lhs), TREE_TYPE (decl))))
1716 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1719 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
1721 tree link = gimple_asm_input_op (asm_stmt, i);
1722 if ((decl = non_rewritable_mem_ref_base (TREE_VALUE (link))))
1723 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1728 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1729 gsi_next (&gsi))
1731 size_t i;
1732 gphi *phi = gsi.phi ();
1734 for (i = 0; i < gimple_phi_num_args (phi); i++)
1736 tree op = PHI_ARG_DEF (phi, i), var;
1737 if (TREE_CODE (op) == ADDR_EXPR
1738 && (var = get_base_address (TREE_OPERAND (op, 0))) != NULL
1739 && DECL_P (var))
1740 bitmap_set_bit (addresses_taken, DECL_UID (var));
1745 /* We cannot iterate over all referenced vars because that can contain
1746 unused vars from BLOCK trees, which causes code generation differences
1747 for -g vs. -g0. */
1748 for (var = DECL_ARGUMENTS (cfun->decl); var; var = DECL_CHAIN (var))
1749 maybe_optimize_var (var, addresses_taken, not_reg_needs,
1750 suitable_for_renaming);
1752 FOR_EACH_VEC_SAFE_ELT (cfun->local_decls, i, var)
1753 maybe_optimize_var (var, addresses_taken, not_reg_needs,
1754 suitable_for_renaming);
1756 /* Operand caches need to be recomputed for operands referencing the updated
1757 variables and operands need to be rewritten to expose bare symbols. */
1758 if (!bitmap_empty_p (suitable_for_renaming))
1760 FOR_EACH_BB_FN (bb, cfun)
1761 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
1763 gimple *stmt = gsi_stmt (gsi);
1765 /* Re-write TARGET_MEM_REFs of symbols we want to
1766 rewrite into SSA form. */
1767 if (gimple_assign_single_p (stmt))
1769 tree lhs = gimple_assign_lhs (stmt);
1770 tree rhs, *rhsp = gimple_assign_rhs1_ptr (stmt);
1771 tree sym;
1773 /* Rewrite LHS IMAG/REALPART_EXPR similar to
1774 gimplify_modify_expr_complex_part. */
1775 if ((TREE_CODE (lhs) == IMAGPART_EXPR
1776 || TREE_CODE (lhs) == REALPART_EXPR)
1777 && DECL_P (TREE_OPERAND (lhs, 0))
1778 && bitmap_bit_p (suitable_for_renaming,
1779 DECL_UID (TREE_OPERAND (lhs, 0))))
1781 tree other = make_ssa_name (TREE_TYPE (lhs));
1782 tree lrhs = build1 (TREE_CODE (lhs) == IMAGPART_EXPR
1783 ? REALPART_EXPR : IMAGPART_EXPR,
1784 TREE_TYPE (other),
1785 TREE_OPERAND (lhs, 0));
1786 gimple *load = gimple_build_assign (other, lrhs);
1787 location_t loc = gimple_location (stmt);
1788 gimple_set_location (load, loc);
1789 gimple_set_vuse (load, gimple_vuse (stmt));
1790 gsi_insert_before (&gsi, load, GSI_SAME_STMT);
1791 gimple_assign_set_lhs (stmt, TREE_OPERAND (lhs, 0));
1792 gimple_assign_set_rhs_with_ops
1793 (&gsi, COMPLEX_EXPR,
1794 TREE_CODE (lhs) == IMAGPART_EXPR
1795 ? other : gimple_assign_rhs1 (stmt),
1796 TREE_CODE (lhs) == IMAGPART_EXPR
1797 ? gimple_assign_rhs1 (stmt) : other, NULL_TREE);
1798 stmt = gsi_stmt (gsi);
1799 unlink_stmt_vdef (stmt);
1800 update_stmt (stmt);
1801 continue;
1804 /* Rewrite a vector insert via a BIT_FIELD_REF on the LHS
1805 into a BIT_INSERT_EXPR. */
1806 if (TREE_CODE (lhs) == BIT_FIELD_REF
1807 && DECL_P (TREE_OPERAND (lhs, 0))
1808 && bitmap_bit_p (suitable_for_renaming,
1809 DECL_UID (TREE_OPERAND (lhs, 0)))
1810 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (lhs, 0)))
1811 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (lhs, 0))) != BLKmode
1812 && types_compatible_p (TREE_TYPE (lhs),
1813 TREE_TYPE (TREE_TYPE
1814 (TREE_OPERAND (lhs, 0))))
1815 && (tree_to_uhwi (TREE_OPERAND (lhs, 2))
1816 % tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs))) == 0))
1818 tree var = TREE_OPERAND (lhs, 0);
1819 tree val = gimple_assign_rhs1 (stmt);
1820 tree bitpos = TREE_OPERAND (lhs, 2);
1821 gimple_assign_set_lhs (stmt, var);
1822 gimple_assign_set_rhs_with_ops
1823 (&gsi, BIT_INSERT_EXPR, var, val, bitpos);
1824 stmt = gsi_stmt (gsi);
1825 unlink_stmt_vdef (stmt);
1826 update_stmt (stmt);
1827 continue;
1830 /* Rewrite a vector insert using a MEM_REF on the LHS
1831 into a BIT_INSERT_EXPR. */
1832 if (TREE_CODE (lhs) == MEM_REF
1833 && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1834 && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0))
1835 && DECL_P (sym)
1836 && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym))
1837 && VECTOR_TYPE_P (TREE_TYPE (sym))
1838 && TYPE_MODE (TREE_TYPE (sym)) != BLKmode
1839 && types_compatible_p (TREE_TYPE (lhs),
1840 TREE_TYPE (TREE_TYPE (sym)))
1841 && tree_fits_uhwi_p (TREE_OPERAND (lhs, 1))
1842 && tree_int_cst_lt (TREE_OPERAND (lhs, 1),
1843 TYPE_SIZE_UNIT (TREE_TYPE (sym)))
1844 && (tree_to_uhwi (TREE_OPERAND (lhs, 1))
1845 % tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (lhs)))) == 0)
1847 tree val = gimple_assign_rhs1 (stmt);
1848 tree bitpos
1849 = wide_int_to_tree (bitsizetype,
1850 mem_ref_offset (lhs) * BITS_PER_UNIT);
1851 gimple_assign_set_lhs (stmt, sym);
1852 gimple_assign_set_rhs_with_ops
1853 (&gsi, BIT_INSERT_EXPR, sym, val, bitpos);
1854 stmt = gsi_stmt (gsi);
1855 unlink_stmt_vdef (stmt);
1856 update_stmt (stmt);
1857 continue;
1860 /* We shouldn't have any fancy wrapping of
1861 component-refs on the LHS, but look through
1862 VIEW_CONVERT_EXPRs as that is easy. */
1863 while (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
1864 lhs = TREE_OPERAND (lhs, 0);
1865 if (TREE_CODE (lhs) == MEM_REF
1866 && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1867 && integer_zerop (TREE_OPERAND (lhs, 1))
1868 && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0))
1869 && DECL_P (sym)
1870 && !TREE_ADDRESSABLE (sym)
1871 && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym)))
1872 lhs = sym;
1873 else
1874 lhs = gimple_assign_lhs (stmt);
1876 /* Rewrite the RHS and make sure the resulting assignment
1877 is validly typed. */
1878 maybe_rewrite_mem_ref_base (rhsp, suitable_for_renaming);
1879 rhs = gimple_assign_rhs1 (stmt);
1880 if (gimple_assign_lhs (stmt) != lhs
1881 && !useless_type_conversion_p (TREE_TYPE (lhs),
1882 TREE_TYPE (rhs)))
1884 if (gimple_clobber_p (stmt))
1886 rhs = build_constructor (TREE_TYPE (lhs), NULL);
1887 TREE_THIS_VOLATILE (rhs) = 1;
1889 else
1890 rhs = fold_build1 (VIEW_CONVERT_EXPR,
1891 TREE_TYPE (lhs), rhs);
1893 if (gimple_assign_lhs (stmt) != lhs)
1894 gimple_assign_set_lhs (stmt, lhs);
1896 if (gimple_assign_rhs1 (stmt) != rhs)
1898 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1899 gimple_assign_set_rhs_from_tree (&gsi, rhs);
1903 else if (gimple_code (stmt) == GIMPLE_CALL)
1905 unsigned i;
1906 if (optimize_atomic_compare_exchange_p (stmt))
1908 tree expected = gimple_call_arg (stmt, 1);
1909 if (bitmap_bit_p (suitable_for_renaming,
1910 DECL_UID (TREE_OPERAND (expected, 0))))
1912 fold_builtin_atomic_compare_exchange (&gsi);
1913 continue;
1916 else if (is_asan_mark_p (stmt))
1918 tree var = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
1919 if (bitmap_bit_p (suitable_for_renaming, DECL_UID (var)))
1921 unlink_stmt_vdef (stmt);
1922 if (asan_mark_p (stmt, ASAN_MARK_POISON))
1924 gcall *call
1925 = gimple_build_call_internal (IFN_ASAN_POISON, 0);
1926 gimple_call_set_lhs (call, var);
1927 gsi_replace (&gsi, call, GSI_SAME_STMT);
1929 else
1931 /* In ASAN_MARK (UNPOISON, &b, ...) the variable
1932 is uninitialized. Avoid dependencies on
1933 previous out of scope value. */
1934 tree clobber
1935 = build_constructor (TREE_TYPE (var), NULL);
1936 TREE_THIS_VOLATILE (clobber) = 1;
1937 gimple *g = gimple_build_assign (var, clobber);
1938 gsi_replace (&gsi, g, GSI_SAME_STMT);
1940 continue;
1943 else if (gimple_call_internal_p (stmt, IFN_GOMP_SIMT_ENTER))
1944 for (i = 1; i < gimple_call_num_args (stmt); i++)
1946 tree *argp = gimple_call_arg_ptr (stmt, i);
1947 if (*argp == null_pointer_node)
1948 continue;
1949 gcc_assert (TREE_CODE (*argp) == ADDR_EXPR
1950 && VAR_P (TREE_OPERAND (*argp, 0)));
1951 tree var = TREE_OPERAND (*argp, 0);
1952 if (bitmap_bit_p (suitable_for_renaming, DECL_UID (var)))
1953 *argp = null_pointer_node;
1955 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1957 tree *argp = gimple_call_arg_ptr (stmt, i);
1958 maybe_rewrite_mem_ref_base (argp, suitable_for_renaming);
1962 else if (gimple_code (stmt) == GIMPLE_ASM)
1964 gasm *asm_stmt = as_a <gasm *> (stmt);
1965 unsigned i;
1966 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
1968 tree link = gimple_asm_output_op (asm_stmt, i);
1969 maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
1970 suitable_for_renaming);
1972 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
1974 tree link = gimple_asm_input_op (asm_stmt, i);
1975 maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
1976 suitable_for_renaming);
1980 else if (gimple_debug_bind_p (stmt)
1981 && gimple_debug_bind_has_value_p (stmt))
1983 tree *valuep = gimple_debug_bind_get_value_ptr (stmt);
1984 tree decl;
1985 maybe_rewrite_mem_ref_base (valuep, suitable_for_renaming);
1986 decl = non_rewritable_mem_ref_base (*valuep);
1987 if (decl
1988 && bitmap_bit_p (suitable_for_renaming, DECL_UID (decl)))
1989 gimple_debug_bind_reset_value (stmt);
1992 if (gimple_references_memory_p (stmt)
1993 || is_gimple_debug (stmt))
1994 update_stmt (stmt);
1996 gsi_next (&gsi);
1999 /* Update SSA form here, we are called as non-pass as well. */
2000 if (number_of_loops (cfun) > 1
2001 && loops_state_satisfies_p (LOOP_CLOSED_SSA))
2002 rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
2003 else
2004 update_ssa (TODO_update_ssa);
2007 timevar_pop (TV_ADDRESS_TAKEN);
2010 namespace {
2012 const pass_data pass_data_update_address_taken =
2014 GIMPLE_PASS, /* type */
2015 "addressables", /* name */
2016 OPTGROUP_NONE, /* optinfo_flags */
2017 TV_ADDRESS_TAKEN, /* tv_id */
2018 PROP_ssa, /* properties_required */
2019 0, /* properties_provided */
2020 0, /* properties_destroyed */
2021 0, /* todo_flags_start */
2022 TODO_update_address_taken, /* todo_flags_finish */
2025 class pass_update_address_taken : public gimple_opt_pass
2027 public:
2028 pass_update_address_taken (gcc::context *ctxt)
2029 : gimple_opt_pass (pass_data_update_address_taken, ctxt)
2032 /* opt_pass methods: */
2034 }; // class pass_update_address_taken
2036 } // anon namespace
2038 gimple_opt_pass *
2039 make_pass_update_address_taken (gcc::context *ctxt)
2041 return new pass_update_address_taken (ctxt);