[ARM] Fix assembly comment syntax in -mprint-tune-info
[official-gcc.git] / gcc / tree-ssa.c
blob28020b003f83375ba91f3542c67d9eec9ffc5902
1 /* Miscellaneous SSA utility functions.
2 Copyright (C) 2001-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "tree.h"
25 #include "gimple.h"
26 #include "cfghooks.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "diagnostic-core.h"
31 #include "fold-const.h"
32 #include "stor-layout.h"
33 #include "gimple-fold.h"
34 #include "gimplify.h"
35 #include "gimple-iterator.h"
36 #include "gimple-walk.h"
37 #include "tree-ssa-loop-manip.h"
38 #include "tree-into-ssa.h"
39 #include "tree-ssa.h"
40 #include "cfgloop.h"
41 #include "cfgexpand.h"
42 #include "tree-cfg.h"
43 #include "tree-dfa.h"
44 #include "asan.h"
46 /* Pointer map of variable mappings, keyed by edge. */
47 static hash_map<edge, auto_vec<edge_var_map> > *edge_var_maps;
50 /* Add a mapping with PHI RESULT and PHI DEF associated with edge E. */
52 void
53 redirect_edge_var_map_add (edge e, tree result, tree def, source_location locus)
55 edge_var_map new_node;
57 if (edge_var_maps == NULL)
58 edge_var_maps = new hash_map<edge, auto_vec<edge_var_map> >;
60 auto_vec<edge_var_map> &slot = edge_var_maps->get_or_insert (e);
61 new_node.def = def;
62 new_node.result = result;
63 new_node.locus = locus;
65 slot.safe_push (new_node);
69 /* Clear the var mappings in edge E. */
71 void
72 redirect_edge_var_map_clear (edge e)
74 if (!edge_var_maps)
75 return;
77 auto_vec<edge_var_map> *head = edge_var_maps->get (e);
79 if (head)
80 head->release ();
84 /* Duplicate the redirected var mappings in OLDE in NEWE.
86 This assumes a hash_map can have multiple edges mapping to the same
87 var_map (many to one mapping), since we don't remove the previous mappings.
90 void
91 redirect_edge_var_map_dup (edge newe, edge olde)
93 if (!edge_var_maps)
94 return;
96 auto_vec<edge_var_map> *new_head = &edge_var_maps->get_or_insert (newe);
97 auto_vec<edge_var_map> *old_head = edge_var_maps->get (olde);
98 if (!old_head)
99 return;
101 new_head->safe_splice (*old_head);
105 /* Return the variable mappings for a given edge. If there is none, return
106 NULL. */
108 vec<edge_var_map> *
109 redirect_edge_var_map_vector (edge e)
111 /* Hey, what kind of idiot would... you'd be surprised. */
112 if (!edge_var_maps)
113 return NULL;
115 auto_vec<edge_var_map> *slot = edge_var_maps->get (e);
116 if (!slot)
117 return NULL;
119 return slot;
122 /* Clear the edge variable mappings. */
124 void
125 redirect_edge_var_map_empty (void)
127 if (edge_var_maps)
128 edge_var_maps->empty ();
132 /* Remove the corresponding arguments from the PHI nodes in E's
133 destination block and redirect it to DEST. Return redirected edge.
134 The list of removed arguments is stored in a vector accessed
135 through edge_var_maps. */
137 edge
138 ssa_redirect_edge (edge e, basic_block dest)
140 gphi_iterator gsi;
141 gphi *phi;
143 redirect_edge_var_map_clear (e);
145 /* Remove the appropriate PHI arguments in E's destination block. */
146 for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
148 tree def;
149 source_location locus ;
151 phi = gsi.phi ();
152 def = gimple_phi_arg_def (phi, e->dest_idx);
153 locus = gimple_phi_arg_location (phi, e->dest_idx);
155 if (def == NULL_TREE)
156 continue;
158 redirect_edge_var_map_add (e, gimple_phi_result (phi), def, locus);
161 e = redirect_edge_succ_nodup (e, dest);
163 return e;
167 /* Add PHI arguments queued in PENDING_STMT list on edge E to edge
168 E->dest. */
170 void
171 flush_pending_stmts (edge e)
173 gphi *phi;
174 edge_var_map *vm;
175 int i;
176 gphi_iterator gsi;
178 vec<edge_var_map> *v = redirect_edge_var_map_vector (e);
179 if (!v)
180 return;
182 for (gsi = gsi_start_phis (e->dest), i = 0;
183 !gsi_end_p (gsi) && v->iterate (i, &vm);
184 gsi_next (&gsi), i++)
186 tree def;
188 phi = gsi.phi ();
189 def = redirect_edge_var_map_def (vm);
190 add_phi_arg (phi, def, e, redirect_edge_var_map_location (vm));
193 redirect_edge_var_map_clear (e);
196 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
197 GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
198 expression with a different value.
200 This will update any annotations (say debug bind stmts) referring
201 to the original LHS, so that they use the RHS instead. This is
202 done even if NLHS and LHS are the same, for it is understood that
203 the RHS will be modified afterwards, and NLHS will not be assigned
204 an equivalent value.
206 Adjusting any non-annotation uses of the LHS, if needed, is a
207 responsibility of the caller.
209 The effect of this call should be pretty much the same as that of
210 inserting a copy of STMT before STMT, and then removing the
211 original stmt, at which time gsi_remove() would have update
212 annotations, but using this function saves all the inserting,
213 copying and removing. */
215 void
216 gimple_replace_ssa_lhs (gimple *stmt, tree nlhs)
218 if (MAY_HAVE_DEBUG_STMTS)
220 tree lhs = gimple_get_lhs (stmt);
222 gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt);
224 insert_debug_temp_for_var_def (NULL, lhs);
227 gimple_set_lhs (stmt, nlhs);
231 /* Given a tree for an expression for which we might want to emit
232 locations or values in debug information (generally a variable, but
233 we might deal with other kinds of trees in the future), return the
234 tree that should be used as the variable of a DEBUG_BIND STMT or
235 VAR_LOCATION INSN or NOTE. Return NULL if VAR is not to be tracked. */
237 tree
238 target_for_debug_bind (tree var)
240 if (!MAY_HAVE_DEBUG_STMTS)
241 return NULL_TREE;
243 if (TREE_CODE (var) == SSA_NAME)
245 var = SSA_NAME_VAR (var);
246 if (var == NULL_TREE)
247 return NULL_TREE;
250 if ((!VAR_P (var) || VAR_DECL_IS_VIRTUAL_OPERAND (var))
251 && TREE_CODE (var) != PARM_DECL)
252 return NULL_TREE;
254 if (DECL_HAS_VALUE_EXPR_P (var))
255 return target_for_debug_bind (DECL_VALUE_EXPR (var));
257 if (DECL_IGNORED_P (var))
258 return NULL_TREE;
260 /* var-tracking only tracks registers. */
261 if (!is_gimple_reg_type (TREE_TYPE (var)))
262 return NULL_TREE;
264 return var;
267 /* Called via walk_tree, look for SSA_NAMEs that have already been
268 released. */
270 static tree
271 find_released_ssa_name (tree *tp, int *walk_subtrees, void *data_)
273 struct walk_stmt_info *wi = (struct walk_stmt_info *) data_;
275 if (wi && wi->is_lhs)
276 return NULL_TREE;
278 if (TREE_CODE (*tp) == SSA_NAME)
280 if (SSA_NAME_IN_FREE_LIST (*tp))
281 return *tp;
283 *walk_subtrees = 0;
285 else if (IS_TYPE_OR_DECL_P (*tp))
286 *walk_subtrees = 0;
288 return NULL_TREE;
291 /* Insert a DEBUG BIND stmt before the DEF of VAR if VAR is referenced
292 by other DEBUG stmts, and replace uses of the DEF with the
293 newly-created debug temp. */
295 void
296 insert_debug_temp_for_var_def (gimple_stmt_iterator *gsi, tree var)
298 imm_use_iterator imm_iter;
299 use_operand_p use_p;
300 gimple *stmt;
301 gimple *def_stmt = NULL;
302 int usecount = 0;
303 tree value = NULL;
305 if (!MAY_HAVE_DEBUG_STMTS)
306 return;
308 /* If this name has already been registered for replacement, do nothing
309 as anything that uses this name isn't in SSA form. */
310 if (name_registered_for_update_p (var))
311 return;
313 /* Check whether there are debug stmts that reference this variable and,
314 if there are, decide whether we should use a debug temp. */
315 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var)
317 stmt = USE_STMT (use_p);
319 if (!gimple_debug_bind_p (stmt))
320 continue;
322 if (usecount++)
323 break;
325 if (gimple_debug_bind_get_value (stmt) != var)
327 /* Count this as an additional use, so as to make sure we
328 use a temp unless VAR's definition has a SINGLE_RHS that
329 can be shared. */
330 usecount++;
331 break;
335 if (!usecount)
336 return;
338 if (gsi)
339 def_stmt = gsi_stmt (*gsi);
340 else
341 def_stmt = SSA_NAME_DEF_STMT (var);
343 /* If we didn't get an insertion point, and the stmt has already
344 been removed, we won't be able to insert the debug bind stmt, so
345 we'll have to drop debug information. */
346 if (gimple_code (def_stmt) == GIMPLE_PHI)
348 value = degenerate_phi_result (as_a <gphi *> (def_stmt));
349 if (value && walk_tree (&value, find_released_ssa_name, NULL, NULL))
350 value = NULL;
351 /* error_mark_node is what fixup_noreturn_call changes PHI arguments
352 to. */
353 else if (value == error_mark_node)
354 value = NULL;
356 else if (is_gimple_assign (def_stmt))
358 bool no_value = false;
360 if (!dom_info_available_p (CDI_DOMINATORS))
362 struct walk_stmt_info wi;
364 memset (&wi, 0, sizeof (wi));
366 /* When removing blocks without following reverse dominance
367 order, we may sometimes encounter SSA_NAMEs that have
368 already been released, referenced in other SSA_DEFs that
369 we're about to release. Consider:
371 <bb X>:
372 v_1 = foo;
374 <bb Y>:
375 w_2 = v_1 + bar;
376 # DEBUG w => w_2
378 If we deleted BB X first, propagating the value of w_2
379 won't do us any good. It's too late to recover their
380 original definition of v_1: when it was deleted, it was
381 only referenced in other DEFs, it couldn't possibly know
382 it should have been retained, and propagating every
383 single DEF just in case it might have to be propagated
384 into a DEBUG STMT would probably be too wasteful.
386 When dominator information is not readily available, we
387 check for and accept some loss of debug information. But
388 if it is available, there's no excuse for us to remove
389 blocks in the wrong order, so we don't even check for
390 dead SSA NAMEs. SSA verification shall catch any
391 errors. */
392 if ((!gsi && !gimple_bb (def_stmt))
393 || walk_gimple_op (def_stmt, find_released_ssa_name, &wi))
394 no_value = true;
397 if (!no_value)
398 value = gimple_assign_rhs_to_tree (def_stmt);
401 if (value)
403 /* If there's a single use of VAR, and VAR is the entire debug
404 expression (usecount would have been incremented again
405 otherwise), and the definition involves only constants and
406 SSA names, then we can propagate VALUE into this single use,
407 avoiding the temp.
409 We can also avoid using a temp if VALUE can be shared and
410 propagated into all uses, without generating expressions that
411 wouldn't be valid gimple RHSs.
413 Other cases that would require unsharing or non-gimple RHSs
414 are deferred to a debug temp, although we could avoid temps
415 at the expense of duplication of expressions. */
417 if (CONSTANT_CLASS_P (value)
418 || gimple_code (def_stmt) == GIMPLE_PHI
419 || (usecount == 1
420 && (!gimple_assign_single_p (def_stmt)
421 || is_gimple_min_invariant (value)))
422 || is_gimple_reg (value))
424 else
426 gdebug *def_temp;
427 tree vexpr = make_node (DEBUG_EXPR_DECL);
429 def_temp = gimple_build_debug_bind (vexpr,
430 unshare_expr (value),
431 def_stmt);
433 DECL_ARTIFICIAL (vexpr) = 1;
434 TREE_TYPE (vexpr) = TREE_TYPE (value);
435 if (DECL_P (value))
436 SET_DECL_MODE (vexpr, DECL_MODE (value));
437 else
438 SET_DECL_MODE (vexpr, TYPE_MODE (TREE_TYPE (value)));
440 if (gsi)
441 gsi_insert_before (gsi, def_temp, GSI_SAME_STMT);
442 else
444 gimple_stmt_iterator ngsi = gsi_for_stmt (def_stmt);
445 gsi_insert_before (&ngsi, def_temp, GSI_SAME_STMT);
448 value = vexpr;
452 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, var)
454 if (!gimple_debug_bind_p (stmt))
455 continue;
457 if (value)
459 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
460 /* unshare_expr is not needed here. vexpr is either a
461 SINGLE_RHS, that can be safely shared, some other RHS
462 that was unshared when we found it had a single debug
463 use, or a DEBUG_EXPR_DECL, that can be safely
464 shared. */
465 SET_USE (use_p, unshare_expr (value));
466 /* If we didn't replace uses with a debug decl fold the
467 resulting expression. Otherwise we end up with invalid IL. */
468 if (TREE_CODE (value) != DEBUG_EXPR_DECL)
470 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
471 fold_stmt_inplace (&gsi);
474 else
475 gimple_debug_bind_reset_value (stmt);
477 update_stmt (stmt);
482 /* Insert a DEBUG BIND stmt before STMT for each DEF referenced by
483 other DEBUG stmts, and replace uses of the DEF with the
484 newly-created debug temp. */
486 void
487 insert_debug_temps_for_defs (gimple_stmt_iterator *gsi)
489 gimple *stmt;
490 ssa_op_iter op_iter;
491 def_operand_p def_p;
493 if (!MAY_HAVE_DEBUG_STMTS)
494 return;
496 stmt = gsi_stmt (*gsi);
498 FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
500 tree var = DEF_FROM_PTR (def_p);
502 if (TREE_CODE (var) != SSA_NAME)
503 continue;
505 insert_debug_temp_for_var_def (gsi, var);
509 /* Reset all debug stmts that use SSA_NAME(s) defined in STMT. */
511 void
512 reset_debug_uses (gimple *stmt)
514 ssa_op_iter op_iter;
515 def_operand_p def_p;
516 imm_use_iterator imm_iter;
517 gimple *use_stmt;
519 if (!MAY_HAVE_DEBUG_STMTS)
520 return;
522 FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
524 tree var = DEF_FROM_PTR (def_p);
526 if (TREE_CODE (var) != SSA_NAME)
527 continue;
529 FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, var)
531 if (!gimple_debug_bind_p (use_stmt))
532 continue;
534 gimple_debug_bind_reset_value (use_stmt);
535 update_stmt (use_stmt);
540 /* Delete SSA DEFs for SSA versions in the TOREMOVE bitmap, removing
541 dominated stmts before their dominators, so that release_ssa_defs
542 stands a chance of propagating DEFs into debug bind stmts. */
544 void
545 release_defs_bitset (bitmap toremove)
547 unsigned j;
548 bitmap_iterator bi;
550 /* Performing a topological sort is probably overkill, this will
551 most likely run in slightly superlinear time, rather than the
552 pathological quadratic worst case. */
553 while (!bitmap_empty_p (toremove))
555 unsigned to_remove_bit = -1U;
556 EXECUTE_IF_SET_IN_BITMAP (toremove, 0, j, bi)
558 if (to_remove_bit != -1U)
560 bitmap_clear_bit (toremove, to_remove_bit);
561 to_remove_bit = -1U;
564 bool remove_now = true;
565 tree var = ssa_name (j);
566 gimple *stmt;
567 imm_use_iterator uit;
569 FOR_EACH_IMM_USE_STMT (stmt, uit, var)
571 ssa_op_iter dit;
572 def_operand_p def_p;
574 /* We can't propagate PHI nodes into debug stmts. */
575 if (gimple_code (stmt) == GIMPLE_PHI
576 || is_gimple_debug (stmt))
577 continue;
579 /* If we find another definition to remove that uses
580 the one we're looking at, defer the removal of this
581 one, so that it can be propagated into debug stmts
582 after the other is. */
583 FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, dit, SSA_OP_DEF)
585 tree odef = DEF_FROM_PTR (def_p);
587 if (bitmap_bit_p (toremove, SSA_NAME_VERSION (odef)))
589 remove_now = false;
590 break;
594 if (!remove_now)
595 BREAK_FROM_IMM_USE_STMT (uit);
598 if (remove_now)
600 gimple *def = SSA_NAME_DEF_STMT (var);
601 gimple_stmt_iterator gsi = gsi_for_stmt (def);
603 if (gimple_code (def) == GIMPLE_PHI)
604 remove_phi_node (&gsi, true);
605 else
607 gsi_remove (&gsi, true);
608 release_defs (def);
611 to_remove_bit = j;
614 if (to_remove_bit != -1U)
615 bitmap_clear_bit (toremove, to_remove_bit);
620 /* Verify virtual SSA form. */
622 bool
623 verify_vssa (basic_block bb, tree current_vdef, sbitmap visited)
625 bool err = false;
627 if (bitmap_bit_p (visited, bb->index))
628 return false;
630 bitmap_set_bit (visited, bb->index);
632 /* Pick up the single virtual PHI def. */
633 gphi *phi = NULL;
634 for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);
635 gsi_next (&si))
637 tree res = gimple_phi_result (si.phi ());
638 if (virtual_operand_p (res))
640 if (phi)
642 error ("multiple virtual PHI nodes in BB %d", bb->index);
643 print_gimple_stmt (stderr, phi, 0, 0);
644 print_gimple_stmt (stderr, si.phi (), 0, 0);
645 err = true;
647 else
648 phi = si.phi ();
651 if (phi)
653 current_vdef = gimple_phi_result (phi);
654 if (TREE_CODE (current_vdef) != SSA_NAME)
656 error ("virtual definition is not an SSA name");
657 print_gimple_stmt (stderr, phi, 0, 0);
658 err = true;
662 /* Verify stmts. */
663 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
664 gsi_next (&gsi))
666 gimple *stmt = gsi_stmt (gsi);
667 tree vuse = gimple_vuse (stmt);
668 if (vuse)
670 if (vuse != current_vdef)
672 error ("stmt with wrong VUSE");
673 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
674 fprintf (stderr, "expected ");
675 print_generic_expr (stderr, current_vdef, 0);
676 fprintf (stderr, "\n");
677 err = true;
679 tree vdef = gimple_vdef (stmt);
680 if (vdef)
682 current_vdef = vdef;
683 if (TREE_CODE (current_vdef) != SSA_NAME)
685 error ("virtual definition is not an SSA name");
686 print_gimple_stmt (stderr, phi, 0, 0);
687 err = true;
693 /* Verify destination PHI uses and recurse. */
694 edge_iterator ei;
695 edge e;
696 FOR_EACH_EDGE (e, ei, bb->succs)
698 gphi *phi = get_virtual_phi (e->dest);
699 if (phi
700 && PHI_ARG_DEF_FROM_EDGE (phi, e) != current_vdef)
702 error ("PHI node with wrong VUSE on edge from BB %d",
703 e->src->index);
704 print_gimple_stmt (stderr, phi, 0, TDF_VOPS);
705 fprintf (stderr, "expected ");
706 print_generic_expr (stderr, current_vdef, 0);
707 fprintf (stderr, "\n");
708 err = true;
711 /* Recurse. */
712 err |= verify_vssa (e->dest, current_vdef, visited);
715 return err;
718 /* Return true if SSA_NAME is malformed and mark it visited.
720 IS_VIRTUAL is true if this SSA_NAME was found inside a virtual
721 operand. */
723 static bool
724 verify_ssa_name (tree ssa_name, bool is_virtual)
726 if (TREE_CODE (ssa_name) != SSA_NAME)
728 error ("expected an SSA_NAME object");
729 return true;
732 if (SSA_NAME_IN_FREE_LIST (ssa_name))
734 error ("found an SSA_NAME that had been released into the free pool");
735 return true;
738 if (SSA_NAME_VAR (ssa_name) != NULL_TREE
739 && TREE_TYPE (ssa_name) != TREE_TYPE (SSA_NAME_VAR (ssa_name)))
741 error ("type mismatch between an SSA_NAME and its symbol");
742 return true;
745 if (is_virtual && !virtual_operand_p (ssa_name))
747 error ("found a virtual definition for a GIMPLE register");
748 return true;
751 if (is_virtual && SSA_NAME_VAR (ssa_name) != gimple_vop (cfun))
753 error ("virtual SSA name for non-VOP decl");
754 return true;
757 if (!is_virtual && virtual_operand_p (ssa_name))
759 error ("found a real definition for a non-register");
760 return true;
763 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
764 && !gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name)))
766 error ("found a default name with a non-empty defining statement");
767 return true;
770 return false;
774 /* Return true if the definition of SSA_NAME at block BB is malformed.
776 STMT is the statement where SSA_NAME is created.
778 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
779 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
780 it means that the block in that array slot contains the
781 definition of SSA_NAME.
783 IS_VIRTUAL is true if SSA_NAME is created by a VDEF. */
785 static bool
786 verify_def (basic_block bb, basic_block *definition_block, tree ssa_name,
787 gimple *stmt, bool is_virtual)
789 if (verify_ssa_name (ssa_name, is_virtual))
790 goto err;
792 if (SSA_NAME_VAR (ssa_name)
793 && TREE_CODE (SSA_NAME_VAR (ssa_name)) == RESULT_DECL
794 && DECL_BY_REFERENCE (SSA_NAME_VAR (ssa_name)))
796 error ("RESULT_DECL should be read only when DECL_BY_REFERENCE is set");
797 goto err;
800 if (definition_block[SSA_NAME_VERSION (ssa_name)])
802 error ("SSA_NAME created in two different blocks %i and %i",
803 definition_block[SSA_NAME_VERSION (ssa_name)]->index, bb->index);
804 goto err;
807 definition_block[SSA_NAME_VERSION (ssa_name)] = bb;
809 if (SSA_NAME_DEF_STMT (ssa_name) != stmt)
811 error ("SSA_NAME_DEF_STMT is wrong");
812 fprintf (stderr, "Expected definition statement:\n");
813 print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (ssa_name), 4, TDF_VOPS);
814 fprintf (stderr, "\nActual definition statement:\n");
815 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
816 goto err;
819 return false;
821 err:
822 fprintf (stderr, "while verifying SSA_NAME ");
823 print_generic_expr (stderr, ssa_name, 0);
824 fprintf (stderr, " in statement\n");
825 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
827 return true;
831 /* Return true if the use of SSA_NAME at statement STMT in block BB is
832 malformed.
834 DEF_BB is the block where SSA_NAME was found to be created.
836 IDOM contains immediate dominator information for the flowgraph.
838 CHECK_ABNORMAL is true if the caller wants to check whether this use
839 is flowing through an abnormal edge (only used when checking PHI
840 arguments).
842 If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names
843 that are defined before STMT in basic block BB. */
845 static bool
846 verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
847 gimple *stmt, bool check_abnormal, bitmap names_defined_in_bb)
849 bool err = false;
850 tree ssa_name = USE_FROM_PTR (use_p);
852 if (!TREE_VISITED (ssa_name))
853 if (verify_imm_links (stderr, ssa_name))
854 err = true;
856 TREE_VISITED (ssa_name) = 1;
858 if (gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name))
859 && SSA_NAME_IS_DEFAULT_DEF (ssa_name))
860 ; /* Default definitions have empty statements. Nothing to do. */
861 else if (!def_bb)
863 error ("missing definition");
864 err = true;
866 else if (bb != def_bb
867 && !dominated_by_p (CDI_DOMINATORS, bb, def_bb))
869 error ("definition in block %i does not dominate use in block %i",
870 def_bb->index, bb->index);
871 err = true;
873 else if (bb == def_bb
874 && names_defined_in_bb != NULL
875 && !bitmap_bit_p (names_defined_in_bb, SSA_NAME_VERSION (ssa_name)))
877 error ("definition in block %i follows the use", def_bb->index);
878 err = true;
881 if (check_abnormal
882 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name))
884 error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set");
885 err = true;
888 /* Make sure the use is in an appropriate list by checking the previous
889 element to make sure it's the same. */
890 if (use_p->prev == NULL)
892 error ("no immediate_use list");
893 err = true;
895 else
897 tree listvar;
898 if (use_p->prev->use == NULL)
899 listvar = use_p->prev->loc.ssa_name;
900 else
901 listvar = USE_FROM_PTR (use_p->prev);
902 if (listvar != ssa_name)
904 error ("wrong immediate use list");
905 err = true;
909 if (err)
911 fprintf (stderr, "for SSA_NAME: ");
912 print_generic_expr (stderr, ssa_name, TDF_VOPS);
913 fprintf (stderr, " in statement:\n");
914 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
917 return err;
921 /* Return true if any of the arguments for PHI node PHI at block BB is
922 malformed.
924 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
925 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
926 it means that the block in that array slot contains the
927 definition of SSA_NAME. */
929 static bool
930 verify_phi_args (gphi *phi, basic_block bb, basic_block *definition_block)
932 edge e;
933 bool err = false;
934 size_t i, phi_num_args = gimple_phi_num_args (phi);
936 if (EDGE_COUNT (bb->preds) != phi_num_args)
938 error ("incoming edge count does not match number of PHI arguments");
939 err = true;
940 goto error;
943 for (i = 0; i < phi_num_args; i++)
945 use_operand_p op_p = gimple_phi_arg_imm_use_ptr (phi, i);
946 tree op = USE_FROM_PTR (op_p);
948 e = EDGE_PRED (bb, i);
950 if (op == NULL_TREE)
952 error ("PHI argument is missing for edge %d->%d",
953 e->src->index,
954 e->dest->index);
955 err = true;
956 goto error;
959 if (TREE_CODE (op) != SSA_NAME && !is_gimple_min_invariant (op))
961 error ("PHI argument is not SSA_NAME, or invariant");
962 err = true;
965 if (TREE_CODE (op) == SSA_NAME)
967 err = verify_ssa_name (op, virtual_operand_p (gimple_phi_result (phi)));
968 err |= verify_use (e->src, definition_block[SSA_NAME_VERSION (op)],
969 op_p, phi, e->flags & EDGE_ABNORMAL, NULL);
972 if (TREE_CODE (op) == ADDR_EXPR)
974 tree base = TREE_OPERAND (op, 0);
975 while (handled_component_p (base))
976 base = TREE_OPERAND (base, 0);
977 if ((VAR_P (base)
978 || TREE_CODE (base) == PARM_DECL
979 || TREE_CODE (base) == RESULT_DECL)
980 && !TREE_ADDRESSABLE (base))
982 error ("address taken, but ADDRESSABLE bit not set");
983 err = true;
987 if (e->dest != bb)
989 error ("wrong edge %d->%d for PHI argument",
990 e->src->index, e->dest->index);
991 err = true;
994 if (err)
996 fprintf (stderr, "PHI argument\n");
997 print_generic_stmt (stderr, op, TDF_VOPS);
998 goto error;
1002 error:
1003 if (err)
1005 fprintf (stderr, "for PHI node\n");
1006 print_gimple_stmt (stderr, phi, 0, TDF_VOPS|TDF_MEMSYMS);
1010 return err;
1014 /* Verify common invariants in the SSA web.
1015 TODO: verify the variable annotations. */
1017 DEBUG_FUNCTION void
1018 verify_ssa (bool check_modified_stmt, bool check_ssa_operands)
1020 basic_block bb;
1021 basic_block *definition_block = XCNEWVEC (basic_block, num_ssa_names);
1022 ssa_op_iter iter;
1023 tree op;
1024 enum dom_state orig_dom_state = dom_info_state (CDI_DOMINATORS);
1025 bitmap names_defined_in_bb = BITMAP_ALLOC (NULL);
1027 gcc_assert (!need_ssa_update_p (cfun));
1029 timevar_push (TV_TREE_SSA_VERIFY);
1032 /* Keep track of SSA names present in the IL. */
1033 size_t i;
1034 tree name;
1035 hash_map <void *, tree> ssa_info;
1037 FOR_EACH_SSA_NAME (i, name, cfun)
1039 gimple *stmt;
1040 TREE_VISITED (name) = 0;
1042 verify_ssa_name (name, virtual_operand_p (name));
1044 stmt = SSA_NAME_DEF_STMT (name);
1045 if (!gimple_nop_p (stmt))
1047 basic_block bb = gimple_bb (stmt);
1048 if (verify_def (bb, definition_block,
1049 name, stmt, virtual_operand_p (name)))
1050 goto err;
1053 void *info = NULL;
1054 if (POINTER_TYPE_P (TREE_TYPE (name)))
1055 info = SSA_NAME_PTR_INFO (name);
1056 else if (INTEGRAL_TYPE_P (TREE_TYPE (name)))
1057 info = SSA_NAME_RANGE_INFO (name);
1058 if (info)
1060 bool existed;
1061 tree &val = ssa_info.get_or_insert (info, &existed);
1062 if (existed)
1064 error ("shared SSA name info");
1065 print_generic_expr (stderr, val, 0);
1066 fprintf (stderr, " and ");
1067 print_generic_expr (stderr, name, 0);
1068 fprintf (stderr, "\n");
1069 goto err;
1071 else
1072 val = name;
1077 calculate_dominance_info (CDI_DOMINATORS);
1079 /* Now verify all the uses and make sure they agree with the definitions
1080 found in the previous pass. */
1081 FOR_EACH_BB_FN (bb, cfun)
1083 edge e;
1084 edge_iterator ei;
1086 /* Make sure that all edges have a clear 'aux' field. */
1087 FOR_EACH_EDGE (e, ei, bb->preds)
1089 if (e->aux)
1091 error ("AUX pointer initialized for edge %d->%d", e->src->index,
1092 e->dest->index);
1093 goto err;
1097 /* Verify the arguments for every PHI node in the block. */
1098 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1100 gphi *phi = gsi.phi ();
1101 if (verify_phi_args (phi, bb, definition_block))
1102 goto err;
1104 bitmap_set_bit (names_defined_in_bb,
1105 SSA_NAME_VERSION (gimple_phi_result (phi)));
1108 /* Now verify all the uses and vuses in every statement of the block. */
1109 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
1110 gsi_next (&gsi))
1112 gimple *stmt = gsi_stmt (gsi);
1113 use_operand_p use_p;
1115 if (check_modified_stmt && gimple_modified_p (stmt))
1117 error ("stmt (%p) marked modified after optimization pass: ",
1118 (void *)stmt);
1119 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
1120 goto err;
1123 if (check_ssa_operands && verify_ssa_operands (cfun, stmt))
1125 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
1126 goto err;
1129 if (gimple_debug_bind_p (stmt)
1130 && !gimple_debug_bind_has_value_p (stmt))
1131 continue;
1133 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE|SSA_OP_VUSE)
1135 op = USE_FROM_PTR (use_p);
1136 if (verify_use (bb, definition_block[SSA_NAME_VERSION (op)],
1137 use_p, stmt, false, names_defined_in_bb))
1138 goto err;
1141 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_DEFS)
1143 if (SSA_NAME_DEF_STMT (op) != stmt)
1145 error ("SSA_NAME_DEF_STMT is wrong");
1146 fprintf (stderr, "Expected definition statement:\n");
1147 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
1148 fprintf (stderr, "\nActual definition statement:\n");
1149 print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (op),
1150 4, TDF_VOPS);
1151 goto err;
1153 bitmap_set_bit (names_defined_in_bb, SSA_NAME_VERSION (op));
1157 bitmap_clear (names_defined_in_bb);
1160 free (definition_block);
1162 if (gimple_vop (cfun)
1163 && ssa_default_def (cfun, gimple_vop (cfun)))
1165 auto_sbitmap visited (last_basic_block_for_fn (cfun) + 1);
1166 bitmap_clear (visited);
1167 if (verify_vssa (ENTRY_BLOCK_PTR_FOR_FN (cfun),
1168 ssa_default_def (cfun, gimple_vop (cfun)), visited))
1169 goto err;
1172 /* Restore the dominance information to its prior known state, so
1173 that we do not perturb the compiler's subsequent behavior. */
1174 if (orig_dom_state == DOM_NONE)
1175 free_dominance_info (CDI_DOMINATORS);
1176 else
1177 set_dom_info_availability (CDI_DOMINATORS, orig_dom_state);
1179 BITMAP_FREE (names_defined_in_bb);
1180 timevar_pop (TV_TREE_SSA_VERIFY);
1181 return;
1183 err:
1184 internal_error ("verify_ssa failed");
1188 /* Initialize global DFA and SSA structures. */
1190 void
1191 init_tree_ssa (struct function *fn)
1193 fn->gimple_df = ggc_cleared_alloc<gimple_df> ();
1194 fn->gimple_df->default_defs = hash_table<ssa_name_hasher>::create_ggc (20);
1195 pt_solution_reset (&fn->gimple_df->escaped);
1196 init_ssanames (fn, 0);
1199 /* Deallocate memory associated with SSA data structures for FNDECL. */
1201 void
1202 delete_tree_ssa (struct function *fn)
1204 fini_ssanames (fn);
1206 /* We no longer maintain the SSA operand cache at this point. */
1207 if (ssa_operands_active (fn))
1208 fini_ssa_operands (fn);
1210 fn->gimple_df->default_defs->empty ();
1211 fn->gimple_df->default_defs = NULL;
1212 pt_solution_reset (&fn->gimple_df->escaped);
1213 if (fn->gimple_df->decls_to_pointers != NULL)
1214 delete fn->gimple_df->decls_to_pointers;
1215 fn->gimple_df->decls_to_pointers = NULL;
1216 fn->gimple_df = NULL;
1218 /* We no longer need the edge variable maps. */
1219 redirect_edge_var_map_empty ();
1222 /* Return true if EXPR is a useless type conversion, otherwise return
1223 false. */
1225 bool
1226 tree_ssa_useless_type_conversion (tree expr)
1228 /* If we have an assignment that merely uses a NOP_EXPR to change
1229 the top of the RHS to the type of the LHS and the type conversion
1230 is "safe", then strip away the type conversion so that we can
1231 enter LHS = RHS into the const_and_copies table. */
1232 if (CONVERT_EXPR_P (expr)
1233 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
1234 || TREE_CODE (expr) == NON_LVALUE_EXPR)
1235 return useless_type_conversion_p
1236 (TREE_TYPE (expr),
1237 TREE_TYPE (TREE_OPERAND (expr, 0)));
1239 return false;
1242 /* Strip conversions from EXP according to
1243 tree_ssa_useless_type_conversion and return the resulting
1244 expression. */
1246 tree
1247 tree_ssa_strip_useless_type_conversions (tree exp)
1249 while (tree_ssa_useless_type_conversion (exp))
1250 exp = TREE_OPERAND (exp, 0);
1251 return exp;
1255 /* Return true if T, an SSA_NAME, has an undefined value. PARTIAL is what
1256 should be returned if the value is only partially undefined. */
1258 bool
1259 ssa_undefined_value_p (tree t, bool partial)
1261 gimple *def_stmt;
1262 tree var = SSA_NAME_VAR (t);
1264 if (!var)
1266 /* Parameters get their initial value from the function entry. */
1267 else if (TREE_CODE (var) == PARM_DECL)
1268 return false;
1269 /* When returning by reference the return address is actually a hidden
1270 parameter. */
1271 else if (TREE_CODE (var) == RESULT_DECL && DECL_BY_REFERENCE (var))
1272 return false;
1273 /* Hard register variables get their initial value from the ether. */
1274 else if (VAR_P (var) && DECL_HARD_REGISTER (var))
1275 return false;
1277 /* The value is undefined iff its definition statement is empty. */
1278 def_stmt = SSA_NAME_DEF_STMT (t);
1279 if (gimple_nop_p (def_stmt))
1280 return true;
1282 /* Check if the complex was not only partially defined. */
1283 if (partial && is_gimple_assign (def_stmt)
1284 && gimple_assign_rhs_code (def_stmt) == COMPLEX_EXPR)
1286 tree rhs1, rhs2;
1288 rhs1 = gimple_assign_rhs1 (def_stmt);
1289 rhs2 = gimple_assign_rhs2 (def_stmt);
1290 return (TREE_CODE (rhs1) == SSA_NAME && ssa_undefined_value_p (rhs1))
1291 || (TREE_CODE (rhs2) == SSA_NAME && ssa_undefined_value_p (rhs2));
1293 return false;
1297 /* Return TRUE iff STMT, a gimple statement, references an undefined
1298 SSA name. */
1300 bool
1301 gimple_uses_undefined_value_p (gimple *stmt)
1303 ssa_op_iter iter;
1304 tree op;
1306 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
1307 if (ssa_undefined_value_p (op))
1308 return true;
1310 return false;
1315 /* If necessary, rewrite the base of the reference tree *TP from
1316 a MEM_REF to a plain or converted symbol. */
1318 static void
1319 maybe_rewrite_mem_ref_base (tree *tp, bitmap suitable_for_renaming)
1321 tree sym;
1323 while (handled_component_p (*tp))
1324 tp = &TREE_OPERAND (*tp, 0);
1325 if (TREE_CODE (*tp) == MEM_REF
1326 && TREE_CODE (TREE_OPERAND (*tp, 0)) == ADDR_EXPR
1327 && (sym = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0))
1328 && DECL_P (sym)
1329 && !TREE_ADDRESSABLE (sym)
1330 && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym))
1331 && is_gimple_reg_type (TREE_TYPE (*tp))
1332 && ! VOID_TYPE_P (TREE_TYPE (*tp)))
1334 if (TREE_CODE (TREE_TYPE (sym)) == VECTOR_TYPE
1335 && useless_type_conversion_p (TREE_TYPE (*tp),
1336 TREE_TYPE (TREE_TYPE (sym)))
1337 && multiple_of_p (sizetype, TREE_OPERAND (*tp, 1),
1338 TYPE_SIZE_UNIT (TREE_TYPE (*tp))))
1340 *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym,
1341 TYPE_SIZE (TREE_TYPE (*tp)),
1342 int_const_binop (MULT_EXPR,
1343 bitsize_int (BITS_PER_UNIT),
1344 TREE_OPERAND (*tp, 1)));
1346 else if (TREE_CODE (TREE_TYPE (sym)) == COMPLEX_TYPE
1347 && useless_type_conversion_p (TREE_TYPE (*tp),
1348 TREE_TYPE (TREE_TYPE (sym))))
1350 *tp = build1 (integer_zerop (TREE_OPERAND (*tp, 1))
1351 ? REALPART_EXPR : IMAGPART_EXPR,
1352 TREE_TYPE (*tp), sym);
1354 else if (integer_zerop (TREE_OPERAND (*tp, 1))
1355 && DECL_SIZE (sym) == TYPE_SIZE (TREE_TYPE (*tp)))
1357 if (!useless_type_conversion_p (TREE_TYPE (*tp),
1358 TREE_TYPE (sym)))
1359 *tp = build1 (VIEW_CONVERT_EXPR,
1360 TREE_TYPE (*tp), sym);
1361 else
1362 *tp = sym;
1364 else if (DECL_SIZE (sym)
1365 && TREE_CODE (DECL_SIZE (sym)) == INTEGER_CST
1366 && mem_ref_offset (*tp) >= 0
1367 && wi::leu_p (mem_ref_offset (*tp)
1368 + wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (*tp))),
1369 wi::to_offset (DECL_SIZE_UNIT (sym)))
1370 && (! INTEGRAL_TYPE_P (TREE_TYPE (*tp))
1371 || (wi::to_offset (TYPE_SIZE (TREE_TYPE (*tp)))
1372 == TYPE_PRECISION (TREE_TYPE (*tp))))
1373 && wi::umod_trunc (wi::to_offset (TYPE_SIZE (TREE_TYPE (*tp))),
1374 BITS_PER_UNIT) == 0)
1376 *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym,
1377 TYPE_SIZE (TREE_TYPE (*tp)),
1378 wide_int_to_tree (bitsizetype,
1379 mem_ref_offset (*tp)
1380 << LOG2_BITS_PER_UNIT));
1385 /* For a tree REF return its base if it is the base of a MEM_REF
1386 that cannot be rewritten into SSA form. Otherwise return NULL_TREE. */
1388 static tree
1389 non_rewritable_mem_ref_base (tree ref)
1391 tree base;
1393 /* A plain decl does not need it set. */
1394 if (DECL_P (ref))
1395 return NULL_TREE;
1397 if (! (base = CONST_CAST_TREE (strip_invariant_refs (ref))))
1399 base = get_base_address (ref);
1400 if (DECL_P (base))
1401 return base;
1402 return NULL_TREE;
1405 /* But watch out for MEM_REFs we cannot lower to a
1406 VIEW_CONVERT_EXPR or a BIT_FIELD_REF. */
1407 if (TREE_CODE (base) == MEM_REF
1408 && TREE_CODE (TREE_OPERAND (base, 0)) == ADDR_EXPR)
1410 tree decl = TREE_OPERAND (TREE_OPERAND (base, 0), 0);
1411 if (! DECL_P (decl))
1412 return NULL_TREE;
1413 if (! is_gimple_reg_type (TREE_TYPE (base))
1414 || VOID_TYPE_P (TREE_TYPE (base)))
1415 return decl;
1416 if ((TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE
1417 || TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE)
1418 && useless_type_conversion_p (TREE_TYPE (base),
1419 TREE_TYPE (TREE_TYPE (decl)))
1420 && wi::fits_uhwi_p (mem_ref_offset (base))
1421 && wi::gtu_p (wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (decl))),
1422 mem_ref_offset (base))
1423 && multiple_of_p (sizetype, TREE_OPERAND (base, 1),
1424 TYPE_SIZE_UNIT (TREE_TYPE (base))))
1425 return NULL_TREE;
1426 /* For same sizes and zero offset we can use a VIEW_CONVERT_EXPR. */
1427 if (integer_zerop (TREE_OPERAND (base, 1))
1428 && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (base)))
1429 return NULL_TREE;
1430 /* For integral typed extracts we can use a BIT_FIELD_REF. */
1431 if (DECL_SIZE (decl)
1432 && TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST
1433 && mem_ref_offset (base) >= 0
1434 && wi::leu_p (mem_ref_offset (base)
1435 + wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (base))),
1436 wi::to_offset (DECL_SIZE_UNIT (decl)))
1437 /* ??? We can't handle bitfield precision extracts without
1438 either using an alternate type for the BIT_FIELD_REF and
1439 then doing a conversion or possibly adjusting the offset
1440 according to endianness. */
1441 && (! INTEGRAL_TYPE_P (TREE_TYPE (base))
1442 || (wi::to_offset (TYPE_SIZE (TREE_TYPE (base)))
1443 == TYPE_PRECISION (TREE_TYPE (base))))
1444 && wi::umod_trunc (wi::to_offset (TYPE_SIZE (TREE_TYPE (base))),
1445 BITS_PER_UNIT) == 0)
1446 return NULL_TREE;
1447 return decl;
1450 return NULL_TREE;
1453 /* For an lvalue tree LHS return true if it cannot be rewritten into SSA form.
1454 Otherwise return true. */
1456 static bool
1457 non_rewritable_lvalue_p (tree lhs)
1459 /* A plain decl is always rewritable. */
1460 if (DECL_P (lhs))
1461 return false;
1463 /* We can re-write REALPART_EXPR and IMAGPART_EXPR sets in
1464 a reasonably efficient manner... */
1465 if ((TREE_CODE (lhs) == REALPART_EXPR
1466 || TREE_CODE (lhs) == IMAGPART_EXPR)
1467 && DECL_P (TREE_OPERAND (lhs, 0)))
1468 return false;
1470 /* ??? The following could be relaxed allowing component
1471 references that do not change the access size. */
1472 if (TREE_CODE (lhs) == MEM_REF
1473 && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR)
1475 tree decl = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0);
1477 /* A decl that is wrapped inside a MEM-REF that covers
1478 it full is also rewritable. */
1479 if (integer_zerop (TREE_OPERAND (lhs, 1))
1480 && DECL_P (decl)
1481 && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (lhs))
1482 /* If the dynamic type of the decl has larger precision than
1483 the decl itself we can't use the decls type for SSA rewriting. */
1484 && ((! INTEGRAL_TYPE_P (TREE_TYPE (decl))
1485 || compare_tree_int (DECL_SIZE (decl),
1486 TYPE_PRECISION (TREE_TYPE (decl))) == 0)
1487 || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
1488 && (TYPE_PRECISION (TREE_TYPE (decl))
1489 >= TYPE_PRECISION (TREE_TYPE (lhs)))))
1490 /* Make sure we are not re-writing non-float copying into float
1491 copying as that can incur normalization. */
1492 && (! FLOAT_TYPE_P (TREE_TYPE (decl))
1493 || types_compatible_p (TREE_TYPE (lhs), TREE_TYPE (decl)))
1494 && (TREE_THIS_VOLATILE (decl) == TREE_THIS_VOLATILE (lhs)))
1495 return false;
1497 /* A vector-insert using a MEM_REF or ARRAY_REF is rewritable
1498 using a BIT_INSERT_EXPR. */
1499 if (DECL_P (decl)
1500 && VECTOR_TYPE_P (TREE_TYPE (decl))
1501 && TYPE_MODE (TREE_TYPE (decl)) != BLKmode
1502 && types_compatible_p (TREE_TYPE (lhs),
1503 TREE_TYPE (TREE_TYPE (decl)))
1504 && tree_fits_uhwi_p (TREE_OPERAND (lhs, 1))
1505 && tree_int_cst_lt (TREE_OPERAND (lhs, 1),
1506 TYPE_SIZE_UNIT (TREE_TYPE (decl)))
1507 && (tree_to_uhwi (TREE_OPERAND (lhs, 1))
1508 % tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (lhs)))) == 0)
1509 return false;
1512 /* A vector-insert using a BIT_FIELD_REF is rewritable using
1513 BIT_INSERT_EXPR. */
1514 if (TREE_CODE (lhs) == BIT_FIELD_REF
1515 && DECL_P (TREE_OPERAND (lhs, 0))
1516 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (lhs, 0)))
1517 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (lhs, 0))) != BLKmode
1518 && types_compatible_p (TREE_TYPE (lhs),
1519 TREE_TYPE (TREE_TYPE (TREE_OPERAND (lhs, 0))))
1520 && (tree_to_uhwi (TREE_OPERAND (lhs, 2))
1521 % tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs)))) == 0)
1522 return false;
1524 return true;
1527 /* When possible, clear TREE_ADDRESSABLE bit or set DECL_GIMPLE_REG_P bit and
1528 mark the variable VAR for conversion into SSA. Return true when updating
1529 stmts is required. */
1531 static void
1532 maybe_optimize_var (tree var, bitmap addresses_taken, bitmap not_reg_needs,
1533 bitmap suitable_for_renaming)
1535 /* Global Variables, result decls cannot be changed. */
1536 if (is_global_var (var)
1537 || TREE_CODE (var) == RESULT_DECL
1538 || bitmap_bit_p (addresses_taken, DECL_UID (var)))
1539 return;
1541 if (TREE_ADDRESSABLE (var)
1542 /* Do not change TREE_ADDRESSABLE if we need to preserve var as
1543 a non-register. Otherwise we are confused and forget to
1544 add virtual operands for it. */
1545 && (!is_gimple_reg_type (TREE_TYPE (var))
1546 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
1547 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1548 || !bitmap_bit_p (not_reg_needs, DECL_UID (var))))
1550 TREE_ADDRESSABLE (var) = 0;
1551 if (is_gimple_reg (var))
1552 bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
1553 if (dump_file)
1555 fprintf (dump_file, "No longer having address taken: ");
1556 print_generic_expr (dump_file, var, 0);
1557 fprintf (dump_file, "\n");
1561 if (!DECL_GIMPLE_REG_P (var)
1562 && !bitmap_bit_p (not_reg_needs, DECL_UID (var))
1563 && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1564 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
1565 && !TREE_THIS_VOLATILE (var)
1566 && (!VAR_P (var) || !DECL_HARD_REGISTER (var)))
1568 DECL_GIMPLE_REG_P (var) = 1;
1569 bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
1570 if (dump_file)
1572 fprintf (dump_file, "Now a gimple register: ");
1573 print_generic_expr (dump_file, var, 0);
1574 fprintf (dump_file, "\n");
1579 /* Return true when STMT is ASAN mark where second argument is an address
1580 of a local variable. */
1582 static bool
1583 is_asan_mark_p (gimple *stmt)
1585 if (!gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1586 return false;
1588 tree addr = get_base_address (gimple_call_arg (stmt, 1));
1589 if (TREE_CODE (addr) == ADDR_EXPR
1590 && VAR_P (TREE_OPERAND (addr, 0)))
1592 tree var = TREE_OPERAND (addr, 0);
1593 if (lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1594 DECL_ATTRIBUTES (var)))
1595 return false;
1597 unsigned addressable = TREE_ADDRESSABLE (var);
1598 TREE_ADDRESSABLE (var) = 0;
1599 bool r = is_gimple_reg (var);
1600 TREE_ADDRESSABLE (var) = addressable;
1601 return r;
1604 return false;
1607 /* Compute TREE_ADDRESSABLE and DECL_GIMPLE_REG_P for local variables. */
1609 void
1610 execute_update_addresses_taken (void)
1612 basic_block bb;
1613 bitmap addresses_taken = BITMAP_ALLOC (NULL);
1614 bitmap not_reg_needs = BITMAP_ALLOC (NULL);
1615 bitmap suitable_for_renaming = BITMAP_ALLOC (NULL);
1616 tree var;
1617 unsigned i;
1619 timevar_push (TV_ADDRESS_TAKEN);
1621 /* Collect into ADDRESSES_TAKEN all variables whose address is taken within
1622 the function body. */
1623 FOR_EACH_BB_FN (bb, cfun)
1625 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
1626 gsi_next (&gsi))
1628 gimple *stmt = gsi_stmt (gsi);
1629 enum gimple_code code = gimple_code (stmt);
1630 tree decl;
1632 if (code == GIMPLE_CALL)
1634 if (optimize_atomic_compare_exchange_p (stmt))
1636 /* For __atomic_compare_exchange_N if the second argument
1637 is &var, don't mark var addressable;
1638 if it becomes non-addressable, we'll rewrite it into
1639 ATOMIC_COMPARE_EXCHANGE call. */
1640 tree arg = gimple_call_arg (stmt, 1);
1641 gimple_call_set_arg (stmt, 1, null_pointer_node);
1642 gimple_ior_addresses_taken (addresses_taken, stmt);
1643 gimple_call_set_arg (stmt, 1, arg);
1645 else if (is_asan_mark_p (stmt))
1647 else
1648 gimple_ior_addresses_taken (addresses_taken, stmt);
1650 else
1651 /* Note all addresses taken by the stmt. */
1652 gimple_ior_addresses_taken (addresses_taken, stmt);
1654 /* If we have a call or an assignment, see if the lhs contains
1655 a local decl that requires not to be a gimple register. */
1656 if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
1658 tree lhs = gimple_get_lhs (stmt);
1659 if (lhs
1660 && TREE_CODE (lhs) != SSA_NAME
1661 && ((code == GIMPLE_CALL && ! DECL_P (lhs))
1662 || non_rewritable_lvalue_p (lhs)))
1664 decl = get_base_address (lhs);
1665 if (DECL_P (decl))
1666 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1670 if (gimple_assign_single_p (stmt))
1672 tree rhs = gimple_assign_rhs1 (stmt);
1673 if ((decl = non_rewritable_mem_ref_base (rhs)))
1674 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1677 else if (code == GIMPLE_CALL)
1679 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1681 tree arg = gimple_call_arg (stmt, i);
1682 if ((decl = non_rewritable_mem_ref_base (arg)))
1683 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1687 else if (code == GIMPLE_ASM)
1689 gasm *asm_stmt = as_a <gasm *> (stmt);
1690 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
1692 tree link = gimple_asm_output_op (asm_stmt, i);
1693 tree lhs = TREE_VALUE (link);
1694 if (TREE_CODE (lhs) != SSA_NAME)
1696 decl = get_base_address (lhs);
1697 if (DECL_P (decl)
1698 && (non_rewritable_lvalue_p (lhs)
1699 /* We cannot move required conversions from
1700 the lhs to the rhs in asm statements, so
1701 require we do not need any. */
1702 || !useless_type_conversion_p
1703 (TREE_TYPE (lhs), TREE_TYPE (decl))))
1704 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1707 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
1709 tree link = gimple_asm_input_op (asm_stmt, i);
1710 if ((decl = non_rewritable_mem_ref_base (TREE_VALUE (link))))
1711 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1716 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1717 gsi_next (&gsi))
1719 size_t i;
1720 gphi *phi = gsi.phi ();
1722 for (i = 0; i < gimple_phi_num_args (phi); i++)
1724 tree op = PHI_ARG_DEF (phi, i), var;
1725 if (TREE_CODE (op) == ADDR_EXPR
1726 && (var = get_base_address (TREE_OPERAND (op, 0))) != NULL
1727 && DECL_P (var))
1728 bitmap_set_bit (addresses_taken, DECL_UID (var));
1733 /* We cannot iterate over all referenced vars because that can contain
1734 unused vars from BLOCK trees, which causes code generation differences
1735 for -g vs. -g0. */
1736 for (var = DECL_ARGUMENTS (cfun->decl); var; var = DECL_CHAIN (var))
1737 maybe_optimize_var (var, addresses_taken, not_reg_needs,
1738 suitable_for_renaming);
1740 FOR_EACH_VEC_SAFE_ELT (cfun->local_decls, i, var)
1741 maybe_optimize_var (var, addresses_taken, not_reg_needs,
1742 suitable_for_renaming);
1744 /* Operand caches need to be recomputed for operands referencing the updated
1745 variables and operands need to be rewritten to expose bare symbols. */
1746 if (!bitmap_empty_p (suitable_for_renaming))
1748 FOR_EACH_BB_FN (bb, cfun)
1749 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
1751 gimple *stmt = gsi_stmt (gsi);
1753 /* Re-write TARGET_MEM_REFs of symbols we want to
1754 rewrite into SSA form. */
1755 if (gimple_assign_single_p (stmt))
1757 tree lhs = gimple_assign_lhs (stmt);
1758 tree rhs, *rhsp = gimple_assign_rhs1_ptr (stmt);
1759 tree sym;
1761 /* Rewrite LHS IMAG/REALPART_EXPR similar to
1762 gimplify_modify_expr_complex_part. */
1763 if ((TREE_CODE (lhs) == IMAGPART_EXPR
1764 || TREE_CODE (lhs) == REALPART_EXPR)
1765 && DECL_P (TREE_OPERAND (lhs, 0))
1766 && bitmap_bit_p (suitable_for_renaming,
1767 DECL_UID (TREE_OPERAND (lhs, 0))))
1769 tree other = make_ssa_name (TREE_TYPE (lhs));
1770 tree lrhs = build1 (TREE_CODE (lhs) == IMAGPART_EXPR
1771 ? REALPART_EXPR : IMAGPART_EXPR,
1772 TREE_TYPE (other),
1773 TREE_OPERAND (lhs, 0));
1774 gimple *load = gimple_build_assign (other, lrhs);
1775 location_t loc = gimple_location (stmt);
1776 gimple_set_location (load, loc);
1777 gimple_set_vuse (load, gimple_vuse (stmt));
1778 gsi_insert_before (&gsi, load, GSI_SAME_STMT);
1779 gimple_assign_set_lhs (stmt, TREE_OPERAND (lhs, 0));
1780 gimple_assign_set_rhs_with_ops
1781 (&gsi, COMPLEX_EXPR,
1782 TREE_CODE (lhs) == IMAGPART_EXPR
1783 ? other : gimple_assign_rhs1 (stmt),
1784 TREE_CODE (lhs) == IMAGPART_EXPR
1785 ? gimple_assign_rhs1 (stmt) : other, NULL_TREE);
1786 stmt = gsi_stmt (gsi);
1787 unlink_stmt_vdef (stmt);
1788 update_stmt (stmt);
1789 continue;
1792 /* Rewrite a vector insert via a BIT_FIELD_REF on the LHS
1793 into a BIT_INSERT_EXPR. */
1794 if (TREE_CODE (lhs) == BIT_FIELD_REF
1795 && DECL_P (TREE_OPERAND (lhs, 0))
1796 && bitmap_bit_p (suitable_for_renaming,
1797 DECL_UID (TREE_OPERAND (lhs, 0)))
1798 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (lhs, 0)))
1799 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (lhs, 0))) != BLKmode
1800 && types_compatible_p (TREE_TYPE (lhs),
1801 TREE_TYPE (TREE_TYPE
1802 (TREE_OPERAND (lhs, 0))))
1803 && (tree_to_uhwi (TREE_OPERAND (lhs, 2))
1804 % tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs))) == 0))
1806 tree var = TREE_OPERAND (lhs, 0);
1807 tree val = gimple_assign_rhs1 (stmt);
1808 tree bitpos = TREE_OPERAND (lhs, 2);
1809 gimple_assign_set_lhs (stmt, var);
1810 gimple_assign_set_rhs_with_ops
1811 (&gsi, BIT_INSERT_EXPR, var, val, bitpos);
1812 stmt = gsi_stmt (gsi);
1813 unlink_stmt_vdef (stmt);
1814 update_stmt (stmt);
1815 continue;
1818 /* Rewrite a vector insert using a MEM_REF on the LHS
1819 into a BIT_INSERT_EXPR. */
1820 if (TREE_CODE (lhs) == MEM_REF
1821 && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1822 && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0))
1823 && DECL_P (sym)
1824 && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym))
1825 && VECTOR_TYPE_P (TREE_TYPE (sym))
1826 && TYPE_MODE (TREE_TYPE (sym)) != BLKmode
1827 && types_compatible_p (TREE_TYPE (lhs),
1828 TREE_TYPE (TREE_TYPE (sym)))
1829 && tree_fits_uhwi_p (TREE_OPERAND (lhs, 1))
1830 && tree_int_cst_lt (TREE_OPERAND (lhs, 1),
1831 TYPE_SIZE_UNIT (TREE_TYPE (sym)))
1832 && (tree_to_uhwi (TREE_OPERAND (lhs, 1))
1833 % tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (lhs)))) == 0)
1835 tree val = gimple_assign_rhs1 (stmt);
1836 tree bitpos
1837 = wide_int_to_tree (bitsizetype,
1838 mem_ref_offset (lhs) * BITS_PER_UNIT);
1839 gimple_assign_set_lhs (stmt, sym);
1840 gimple_assign_set_rhs_with_ops
1841 (&gsi, BIT_INSERT_EXPR, sym, val, bitpos);
1842 stmt = gsi_stmt (gsi);
1843 unlink_stmt_vdef (stmt);
1844 update_stmt (stmt);
1845 continue;
1848 /* We shouldn't have any fancy wrapping of
1849 component-refs on the LHS, but look through
1850 VIEW_CONVERT_EXPRs as that is easy. */
1851 while (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
1852 lhs = TREE_OPERAND (lhs, 0);
1853 if (TREE_CODE (lhs) == MEM_REF
1854 && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1855 && integer_zerop (TREE_OPERAND (lhs, 1))
1856 && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0))
1857 && DECL_P (sym)
1858 && !TREE_ADDRESSABLE (sym)
1859 && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym)))
1860 lhs = sym;
1861 else
1862 lhs = gimple_assign_lhs (stmt);
1864 /* Rewrite the RHS and make sure the resulting assignment
1865 is validly typed. */
1866 maybe_rewrite_mem_ref_base (rhsp, suitable_for_renaming);
1867 rhs = gimple_assign_rhs1 (stmt);
1868 if (gimple_assign_lhs (stmt) != lhs
1869 && !useless_type_conversion_p (TREE_TYPE (lhs),
1870 TREE_TYPE (rhs)))
1872 if (gimple_clobber_p (stmt))
1874 rhs = build_constructor (TREE_TYPE (lhs), NULL);
1875 TREE_THIS_VOLATILE (rhs) = 1;
1877 else
1878 rhs = fold_build1 (VIEW_CONVERT_EXPR,
1879 TREE_TYPE (lhs), rhs);
1881 if (gimple_assign_lhs (stmt) != lhs)
1882 gimple_assign_set_lhs (stmt, lhs);
1884 if (gimple_assign_rhs1 (stmt) != rhs)
1886 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1887 gimple_assign_set_rhs_from_tree (&gsi, rhs);
1891 else if (gimple_code (stmt) == GIMPLE_CALL)
1893 unsigned i;
1894 if (optimize_atomic_compare_exchange_p (stmt))
1896 tree expected = gimple_call_arg (stmt, 1);
1897 if (bitmap_bit_p (suitable_for_renaming,
1898 DECL_UID (TREE_OPERAND (expected, 0))))
1900 fold_builtin_atomic_compare_exchange (&gsi);
1901 continue;
1904 else if (is_asan_mark_p (stmt))
1906 tree var = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
1907 if (bitmap_bit_p (suitable_for_renaming, DECL_UID (var)))
1909 unlink_stmt_vdef (stmt);
1910 if (asan_mark_p (stmt, ASAN_MARK_POISON))
1912 gcall *call
1913 = gimple_build_call_internal (IFN_ASAN_POISON, 0);
1914 gimple_call_set_lhs (call, var);
1915 gsi_replace (&gsi, call, GSI_SAME_STMT);
1917 else
1919 /* In ASAN_MARK (UNPOISON, &b, ...) the variable
1920 is uninitialized. Avoid dependencies on
1921 previous out of scope value. */
1922 tree clobber
1923 = build_constructor (TREE_TYPE (var), NULL);
1924 TREE_THIS_VOLATILE (clobber) = 1;
1925 gimple *g = gimple_build_assign (var, clobber);
1926 gsi_replace (&gsi, g, GSI_SAME_STMT);
1928 continue;
1931 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1933 tree *argp = gimple_call_arg_ptr (stmt, i);
1934 maybe_rewrite_mem_ref_base (argp, suitable_for_renaming);
1938 else if (gimple_code (stmt) == GIMPLE_ASM)
1940 gasm *asm_stmt = as_a <gasm *> (stmt);
1941 unsigned i;
1942 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
1944 tree link = gimple_asm_output_op (asm_stmt, i);
1945 maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
1946 suitable_for_renaming);
1948 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
1950 tree link = gimple_asm_input_op (asm_stmt, i);
1951 maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
1952 suitable_for_renaming);
1956 else if (gimple_debug_bind_p (stmt)
1957 && gimple_debug_bind_has_value_p (stmt))
1959 tree *valuep = gimple_debug_bind_get_value_ptr (stmt);
1960 tree decl;
1961 maybe_rewrite_mem_ref_base (valuep, suitable_for_renaming);
1962 decl = non_rewritable_mem_ref_base (*valuep);
1963 if (decl
1964 && bitmap_bit_p (suitable_for_renaming, DECL_UID (decl)))
1965 gimple_debug_bind_reset_value (stmt);
1968 if (gimple_references_memory_p (stmt)
1969 || is_gimple_debug (stmt))
1970 update_stmt (stmt);
1972 gsi_next (&gsi);
1975 /* Update SSA form here, we are called as non-pass as well. */
1976 if (number_of_loops (cfun) > 1
1977 && loops_state_satisfies_p (LOOP_CLOSED_SSA))
1978 rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
1979 else
1980 update_ssa (TODO_update_ssa);
1983 BITMAP_FREE (not_reg_needs);
1984 BITMAP_FREE (addresses_taken);
1985 BITMAP_FREE (suitable_for_renaming);
1986 timevar_pop (TV_ADDRESS_TAKEN);
1989 namespace {
1991 const pass_data pass_data_update_address_taken =
1993 GIMPLE_PASS, /* type */
1994 "addressables", /* name */
1995 OPTGROUP_NONE, /* optinfo_flags */
1996 TV_ADDRESS_TAKEN, /* tv_id */
1997 PROP_ssa, /* properties_required */
1998 0, /* properties_provided */
1999 0, /* properties_destroyed */
2000 0, /* todo_flags_start */
2001 TODO_update_address_taken, /* todo_flags_finish */
2004 class pass_update_address_taken : public gimple_opt_pass
2006 public:
2007 pass_update_address_taken (gcc::context *ctxt)
2008 : gimple_opt_pass (pass_data_update_address_taken, ctxt)
2011 /* opt_pass methods: */
2013 }; // class pass_update_address_taken
2015 } // anon namespace
2017 gimple_opt_pass *
2018 make_pass_update_address_taken (gcc::context *ctxt)
2020 return new pass_update_address_taken (ctxt);