* lib/scanasm.exp (hidden-scan-for): Add XCOFF support.
[official-gcc.git] / gcc / tree-ssa.c
blob135952bc33949afe1a7e4faca6a09170d456ab3d
1 /* Miscellaneous SSA utility functions.
2 Copyright (C) 2001-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "tree.h"
25 #include "gimple.h"
26 #include "cfghooks.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "diagnostic-core.h"
31 #include "fold-const.h"
32 #include "stor-layout.h"
33 #include "gimple-fold.h"
34 #include "gimplify.h"
35 #include "gimple-iterator.h"
36 #include "gimple-walk.h"
37 #include "tree-ssa-loop-manip.h"
38 #include "tree-into-ssa.h"
39 #include "tree-ssa.h"
40 #include "cfgloop.h"
41 #include "cfgexpand.h"
42 #include "tree-cfg.h"
43 #include "tree-dfa.h"
45 /* Pointer map of variable mappings, keyed by edge. */
46 static hash_map<edge, auto_vec<edge_var_map> > *edge_var_maps;
49 /* Add a mapping with PHI RESULT and PHI DEF associated with edge E. */
51 void
52 redirect_edge_var_map_add (edge e, tree result, tree def, source_location locus)
54 edge_var_map new_node;
56 if (edge_var_maps == NULL)
57 edge_var_maps = new hash_map<edge, auto_vec<edge_var_map> >;
59 auto_vec<edge_var_map> &slot = edge_var_maps->get_or_insert (e);
60 new_node.def = def;
61 new_node.result = result;
62 new_node.locus = locus;
64 slot.safe_push (new_node);
68 /* Clear the var mappings in edge E. */
70 void
71 redirect_edge_var_map_clear (edge e)
73 if (!edge_var_maps)
74 return;
76 auto_vec<edge_var_map> *head = edge_var_maps->get (e);
78 if (head)
79 head->release ();
83 /* Duplicate the redirected var mappings in OLDE in NEWE.
85 This assumes a hash_map can have multiple edges mapping to the same
86 var_map (many to one mapping), since we don't remove the previous mappings.
89 void
90 redirect_edge_var_map_dup (edge newe, edge olde)
92 if (!edge_var_maps)
93 return;
95 auto_vec<edge_var_map> *new_head = &edge_var_maps->get_or_insert (newe);
96 auto_vec<edge_var_map> *old_head = edge_var_maps->get (olde);
97 if (!old_head)
98 return;
100 new_head->safe_splice (*old_head);
104 /* Return the variable mappings for a given edge. If there is none, return
105 NULL. */
107 vec<edge_var_map> *
108 redirect_edge_var_map_vector (edge e)
110 /* Hey, what kind of idiot would... you'd be surprised. */
111 if (!edge_var_maps)
112 return NULL;
114 auto_vec<edge_var_map> *slot = edge_var_maps->get (e);
115 if (!slot)
116 return NULL;
118 return slot;
121 /* Clear the edge variable mappings. */
123 void
124 redirect_edge_var_map_empty (void)
126 if (edge_var_maps)
127 edge_var_maps->empty ();
131 /* Remove the corresponding arguments from the PHI nodes in E's
132 destination block and redirect it to DEST. Return redirected edge.
133 The list of removed arguments is stored in a vector accessed
134 through edge_var_maps. */
136 edge
137 ssa_redirect_edge (edge e, basic_block dest)
139 gphi_iterator gsi;
140 gphi *phi;
142 redirect_edge_var_map_clear (e);
144 /* Remove the appropriate PHI arguments in E's destination block. */
145 for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
147 tree def;
148 source_location locus ;
150 phi = gsi.phi ();
151 def = gimple_phi_arg_def (phi, e->dest_idx);
152 locus = gimple_phi_arg_location (phi, e->dest_idx);
154 if (def == NULL_TREE)
155 continue;
157 redirect_edge_var_map_add (e, gimple_phi_result (phi), def, locus);
160 e = redirect_edge_succ_nodup (e, dest);
162 return e;
166 /* Add PHI arguments queued in PENDING_STMT list on edge E to edge
167 E->dest. */
169 void
170 flush_pending_stmts (edge e)
172 gphi *phi;
173 edge_var_map *vm;
174 int i;
175 gphi_iterator gsi;
177 vec<edge_var_map> *v = redirect_edge_var_map_vector (e);
178 if (!v)
179 return;
181 for (gsi = gsi_start_phis (e->dest), i = 0;
182 !gsi_end_p (gsi) && v->iterate (i, &vm);
183 gsi_next (&gsi), i++)
185 tree def;
187 phi = gsi.phi ();
188 def = redirect_edge_var_map_def (vm);
189 add_phi_arg (phi, def, e, redirect_edge_var_map_location (vm));
192 redirect_edge_var_map_clear (e);
195 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
196 GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
197 expression with a different value.
199 This will update any annotations (say debug bind stmts) referring
200 to the original LHS, so that they use the RHS instead. This is
201 done even if NLHS and LHS are the same, for it is understood that
202 the RHS will be modified afterwards, and NLHS will not be assigned
203 an equivalent value.
205 Adjusting any non-annotation uses of the LHS, if needed, is a
206 responsibility of the caller.
208 The effect of this call should be pretty much the same as that of
209 inserting a copy of STMT before STMT, and then removing the
210 original stmt, at which time gsi_remove() would have update
211 annotations, but using this function saves all the inserting,
212 copying and removing. */
214 void
215 gimple_replace_ssa_lhs (gimple *stmt, tree nlhs)
217 if (MAY_HAVE_DEBUG_STMTS)
219 tree lhs = gimple_get_lhs (stmt);
221 gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt);
223 insert_debug_temp_for_var_def (NULL, lhs);
226 gimple_set_lhs (stmt, nlhs);
230 /* Given a tree for an expression for which we might want to emit
231 locations or values in debug information (generally a variable, but
232 we might deal with other kinds of trees in the future), return the
233 tree that should be used as the variable of a DEBUG_BIND STMT or
234 VAR_LOCATION INSN or NOTE. Return NULL if VAR is not to be tracked. */
236 tree
237 target_for_debug_bind (tree var)
239 if (!MAY_HAVE_DEBUG_STMTS)
240 return NULL_TREE;
242 if (TREE_CODE (var) == SSA_NAME)
244 var = SSA_NAME_VAR (var);
245 if (var == NULL_TREE)
246 return NULL_TREE;
249 if ((!VAR_P (var) || VAR_DECL_IS_VIRTUAL_OPERAND (var))
250 && TREE_CODE (var) != PARM_DECL)
251 return NULL_TREE;
253 if (DECL_HAS_VALUE_EXPR_P (var))
254 return target_for_debug_bind (DECL_VALUE_EXPR (var));
256 if (DECL_IGNORED_P (var))
257 return NULL_TREE;
259 /* var-tracking only tracks registers. */
260 if (!is_gimple_reg_type (TREE_TYPE (var)))
261 return NULL_TREE;
263 return var;
266 /* Called via walk_tree, look for SSA_NAMEs that have already been
267 released. */
269 static tree
270 find_released_ssa_name (tree *tp, int *walk_subtrees, void *data_)
272 struct walk_stmt_info *wi = (struct walk_stmt_info *) data_;
274 if (wi && wi->is_lhs)
275 return NULL_TREE;
277 if (TREE_CODE (*tp) == SSA_NAME)
279 if (SSA_NAME_IN_FREE_LIST (*tp))
280 return *tp;
282 *walk_subtrees = 0;
284 else if (IS_TYPE_OR_DECL_P (*tp))
285 *walk_subtrees = 0;
287 return NULL_TREE;
290 /* Insert a DEBUG BIND stmt before the DEF of VAR if VAR is referenced
291 by other DEBUG stmts, and replace uses of the DEF with the
292 newly-created debug temp. */
294 void
295 insert_debug_temp_for_var_def (gimple_stmt_iterator *gsi, tree var)
297 imm_use_iterator imm_iter;
298 use_operand_p use_p;
299 gimple *stmt;
300 gimple *def_stmt = NULL;
301 int usecount = 0;
302 tree value = NULL;
304 if (!MAY_HAVE_DEBUG_STMTS)
305 return;
307 /* If this name has already been registered for replacement, do nothing
308 as anything that uses this name isn't in SSA form. */
309 if (name_registered_for_update_p (var))
310 return;
312 /* Check whether there are debug stmts that reference this variable and,
313 if there are, decide whether we should use a debug temp. */
314 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var)
316 stmt = USE_STMT (use_p);
318 if (!gimple_debug_bind_p (stmt))
319 continue;
321 if (usecount++)
322 break;
324 if (gimple_debug_bind_get_value (stmt) != var)
326 /* Count this as an additional use, so as to make sure we
327 use a temp unless VAR's definition has a SINGLE_RHS that
328 can be shared. */
329 usecount++;
330 break;
334 if (!usecount)
335 return;
337 if (gsi)
338 def_stmt = gsi_stmt (*gsi);
339 else
340 def_stmt = SSA_NAME_DEF_STMT (var);
342 /* If we didn't get an insertion point, and the stmt has already
343 been removed, we won't be able to insert the debug bind stmt, so
344 we'll have to drop debug information. */
345 if (gimple_code (def_stmt) == GIMPLE_PHI)
347 value = degenerate_phi_result (as_a <gphi *> (def_stmt));
348 if (value && walk_tree (&value, find_released_ssa_name, NULL, NULL))
349 value = NULL;
350 /* error_mark_node is what fixup_noreturn_call changes PHI arguments
351 to. */
352 else if (value == error_mark_node)
353 value = NULL;
355 else if (is_gimple_assign (def_stmt))
357 bool no_value = false;
359 if (!dom_info_available_p (CDI_DOMINATORS))
361 struct walk_stmt_info wi;
363 memset (&wi, 0, sizeof (wi));
365 /* When removing blocks without following reverse dominance
366 order, we may sometimes encounter SSA_NAMEs that have
367 already been released, referenced in other SSA_DEFs that
368 we're about to release. Consider:
370 <bb X>:
371 v_1 = foo;
373 <bb Y>:
374 w_2 = v_1 + bar;
375 # DEBUG w => w_2
377 If we deleted BB X first, propagating the value of w_2
378 won't do us any good. It's too late to recover their
379 original definition of v_1: when it was deleted, it was
380 only referenced in other DEFs, it couldn't possibly know
381 it should have been retained, and propagating every
382 single DEF just in case it might have to be propagated
383 into a DEBUG STMT would probably be too wasteful.
385 When dominator information is not readily available, we
386 check for and accept some loss of debug information. But
387 if it is available, there's no excuse for us to remove
388 blocks in the wrong order, so we don't even check for
389 dead SSA NAMEs. SSA verification shall catch any
390 errors. */
391 if ((!gsi && !gimple_bb (def_stmt))
392 || walk_gimple_op (def_stmt, find_released_ssa_name, &wi))
393 no_value = true;
396 if (!no_value)
397 value = gimple_assign_rhs_to_tree (def_stmt);
400 if (value)
402 /* If there's a single use of VAR, and VAR is the entire debug
403 expression (usecount would have been incremented again
404 otherwise), and the definition involves only constants and
405 SSA names, then we can propagate VALUE into this single use,
406 avoiding the temp.
408 We can also avoid using a temp if VALUE can be shared and
409 propagated into all uses, without generating expressions that
410 wouldn't be valid gimple RHSs.
412 Other cases that would require unsharing or non-gimple RHSs
413 are deferred to a debug temp, although we could avoid temps
414 at the expense of duplication of expressions. */
416 if (CONSTANT_CLASS_P (value)
417 || gimple_code (def_stmt) == GIMPLE_PHI
418 || (usecount == 1
419 && (!gimple_assign_single_p (def_stmt)
420 || is_gimple_min_invariant (value)))
421 || is_gimple_reg (value))
423 else
425 gdebug *def_temp;
426 tree vexpr = make_node (DEBUG_EXPR_DECL);
428 def_temp = gimple_build_debug_bind (vexpr,
429 unshare_expr (value),
430 def_stmt);
432 DECL_ARTIFICIAL (vexpr) = 1;
433 TREE_TYPE (vexpr) = TREE_TYPE (value);
434 if (DECL_P (value))
435 DECL_MODE (vexpr) = DECL_MODE (value);
436 else
437 DECL_MODE (vexpr) = TYPE_MODE (TREE_TYPE (value));
439 if (gsi)
440 gsi_insert_before (gsi, def_temp, GSI_SAME_STMT);
441 else
443 gimple_stmt_iterator ngsi = gsi_for_stmt (def_stmt);
444 gsi_insert_before (&ngsi, def_temp, GSI_SAME_STMT);
447 value = vexpr;
451 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, var)
453 if (!gimple_debug_bind_p (stmt))
454 continue;
456 if (value)
458 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
459 /* unshare_expr is not needed here. vexpr is either a
460 SINGLE_RHS, that can be safely shared, some other RHS
461 that was unshared when we found it had a single debug
462 use, or a DEBUG_EXPR_DECL, that can be safely
463 shared. */
464 SET_USE (use_p, unshare_expr (value));
465 /* If we didn't replace uses with a debug decl fold the
466 resulting expression. Otherwise we end up with invalid IL. */
467 if (TREE_CODE (value) != DEBUG_EXPR_DECL)
469 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
470 fold_stmt_inplace (&gsi);
473 else
474 gimple_debug_bind_reset_value (stmt);
476 update_stmt (stmt);
481 /* Insert a DEBUG BIND stmt before STMT for each DEF referenced by
482 other DEBUG stmts, and replace uses of the DEF with the
483 newly-created debug temp. */
485 void
486 insert_debug_temps_for_defs (gimple_stmt_iterator *gsi)
488 gimple *stmt;
489 ssa_op_iter op_iter;
490 def_operand_p def_p;
492 if (!MAY_HAVE_DEBUG_STMTS)
493 return;
495 stmt = gsi_stmt (*gsi);
497 FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
499 tree var = DEF_FROM_PTR (def_p);
501 if (TREE_CODE (var) != SSA_NAME)
502 continue;
504 insert_debug_temp_for_var_def (gsi, var);
508 /* Reset all debug stmts that use SSA_NAME(s) defined in STMT. */
510 void
511 reset_debug_uses (gimple *stmt)
513 ssa_op_iter op_iter;
514 def_operand_p def_p;
515 imm_use_iterator imm_iter;
516 gimple *use_stmt;
518 if (!MAY_HAVE_DEBUG_STMTS)
519 return;
521 FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
523 tree var = DEF_FROM_PTR (def_p);
525 if (TREE_CODE (var) != SSA_NAME)
526 continue;
528 FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, var)
530 if (!gimple_debug_bind_p (use_stmt))
531 continue;
533 gimple_debug_bind_reset_value (use_stmt);
534 update_stmt (use_stmt);
539 /* Delete SSA DEFs for SSA versions in the TOREMOVE bitmap, removing
540 dominated stmts before their dominators, so that release_ssa_defs
541 stands a chance of propagating DEFs into debug bind stmts. */
543 void
544 release_defs_bitset (bitmap toremove)
546 unsigned j;
547 bitmap_iterator bi;
549 /* Performing a topological sort is probably overkill, this will
550 most likely run in slightly superlinear time, rather than the
551 pathological quadratic worst case. */
552 while (!bitmap_empty_p (toremove))
554 unsigned to_remove_bit = -1U;
555 EXECUTE_IF_SET_IN_BITMAP (toremove, 0, j, bi)
557 if (to_remove_bit != -1U)
559 bitmap_clear_bit (toremove, to_remove_bit);
560 to_remove_bit = -1U;
563 bool remove_now = true;
564 tree var = ssa_name (j);
565 gimple *stmt;
566 imm_use_iterator uit;
568 FOR_EACH_IMM_USE_STMT (stmt, uit, var)
570 ssa_op_iter dit;
571 def_operand_p def_p;
573 /* We can't propagate PHI nodes into debug stmts. */
574 if (gimple_code (stmt) == GIMPLE_PHI
575 || is_gimple_debug (stmt))
576 continue;
578 /* If we find another definition to remove that uses
579 the one we're looking at, defer the removal of this
580 one, so that it can be propagated into debug stmts
581 after the other is. */
582 FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, dit, SSA_OP_DEF)
584 tree odef = DEF_FROM_PTR (def_p);
586 if (bitmap_bit_p (toremove, SSA_NAME_VERSION (odef)))
588 remove_now = false;
589 break;
593 if (!remove_now)
594 BREAK_FROM_IMM_USE_STMT (uit);
597 if (remove_now)
599 gimple *def = SSA_NAME_DEF_STMT (var);
600 gimple_stmt_iterator gsi = gsi_for_stmt (def);
602 if (gimple_code (def) == GIMPLE_PHI)
603 remove_phi_node (&gsi, true);
604 else
606 gsi_remove (&gsi, true);
607 release_defs (def);
610 to_remove_bit = j;
613 if (to_remove_bit != -1U)
614 bitmap_clear_bit (toremove, to_remove_bit);
619 /* Verify virtual SSA form. */
621 bool
622 verify_vssa (basic_block bb, tree current_vdef, sbitmap visited)
624 bool err = false;
626 if (bitmap_bit_p (visited, bb->index))
627 return false;
629 bitmap_set_bit (visited, bb->index);
631 /* Pick up the single virtual PHI def. */
632 gphi *phi = NULL;
633 for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);
634 gsi_next (&si))
636 tree res = gimple_phi_result (si.phi ());
637 if (virtual_operand_p (res))
639 if (phi)
641 error ("multiple virtual PHI nodes in BB %d", bb->index);
642 print_gimple_stmt (stderr, phi, 0, 0);
643 print_gimple_stmt (stderr, si.phi (), 0, 0);
644 err = true;
646 else
647 phi = si.phi ();
650 if (phi)
652 current_vdef = gimple_phi_result (phi);
653 if (TREE_CODE (current_vdef) != SSA_NAME)
655 error ("virtual definition is not an SSA name");
656 print_gimple_stmt (stderr, phi, 0, 0);
657 err = true;
661 /* Verify stmts. */
662 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
663 gsi_next (&gsi))
665 gimple *stmt = gsi_stmt (gsi);
666 tree vuse = gimple_vuse (stmt);
667 if (vuse)
669 if (vuse != current_vdef)
671 error ("stmt with wrong VUSE");
672 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
673 fprintf (stderr, "expected ");
674 print_generic_expr (stderr, current_vdef, 0);
675 fprintf (stderr, "\n");
676 err = true;
678 tree vdef = gimple_vdef (stmt);
679 if (vdef)
681 current_vdef = vdef;
682 if (TREE_CODE (current_vdef) != SSA_NAME)
684 error ("virtual definition is not an SSA name");
685 print_gimple_stmt (stderr, phi, 0, 0);
686 err = true;
692 /* Verify destination PHI uses and recurse. */
693 edge_iterator ei;
694 edge e;
695 FOR_EACH_EDGE (e, ei, bb->succs)
697 gphi *phi = get_virtual_phi (e->dest);
698 if (phi
699 && PHI_ARG_DEF_FROM_EDGE (phi, e) != current_vdef)
701 error ("PHI node with wrong VUSE on edge from BB %d",
702 e->src->index);
703 print_gimple_stmt (stderr, phi, 0, TDF_VOPS);
704 fprintf (stderr, "expected ");
705 print_generic_expr (stderr, current_vdef, 0);
706 fprintf (stderr, "\n");
707 err = true;
710 /* Recurse. */
711 err |= verify_vssa (e->dest, current_vdef, visited);
714 return err;
717 /* Return true if SSA_NAME is malformed and mark it visited.
719 IS_VIRTUAL is true if this SSA_NAME was found inside a virtual
720 operand. */
722 static bool
723 verify_ssa_name (tree ssa_name, bool is_virtual)
725 if (TREE_CODE (ssa_name) != SSA_NAME)
727 error ("expected an SSA_NAME object");
728 return true;
731 if (SSA_NAME_IN_FREE_LIST (ssa_name))
733 error ("found an SSA_NAME that had been released into the free pool");
734 return true;
737 if (SSA_NAME_VAR (ssa_name) != NULL_TREE
738 && TREE_TYPE (ssa_name) != TREE_TYPE (SSA_NAME_VAR (ssa_name)))
740 error ("type mismatch between an SSA_NAME and its symbol");
741 return true;
744 if (is_virtual && !virtual_operand_p (ssa_name))
746 error ("found a virtual definition for a GIMPLE register");
747 return true;
750 if (is_virtual && SSA_NAME_VAR (ssa_name) != gimple_vop (cfun))
752 error ("virtual SSA name for non-VOP decl");
753 return true;
756 if (!is_virtual && virtual_operand_p (ssa_name))
758 error ("found a real definition for a non-register");
759 return true;
762 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
763 && !gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name)))
765 error ("found a default name with a non-empty defining statement");
766 return true;
769 return false;
773 /* Return true if the definition of SSA_NAME at block BB is malformed.
775 STMT is the statement where SSA_NAME is created.
777 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
778 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
779 it means that the block in that array slot contains the
780 definition of SSA_NAME.
782 IS_VIRTUAL is true if SSA_NAME is created by a VDEF. */
784 static bool
785 verify_def (basic_block bb, basic_block *definition_block, tree ssa_name,
786 gimple *stmt, bool is_virtual)
788 if (verify_ssa_name (ssa_name, is_virtual))
789 goto err;
791 if (SSA_NAME_VAR (ssa_name)
792 && TREE_CODE (SSA_NAME_VAR (ssa_name)) == RESULT_DECL
793 && DECL_BY_REFERENCE (SSA_NAME_VAR (ssa_name)))
795 error ("RESULT_DECL should be read only when DECL_BY_REFERENCE is set");
796 goto err;
799 if (definition_block[SSA_NAME_VERSION (ssa_name)])
801 error ("SSA_NAME created in two different blocks %i and %i",
802 definition_block[SSA_NAME_VERSION (ssa_name)]->index, bb->index);
803 goto err;
806 definition_block[SSA_NAME_VERSION (ssa_name)] = bb;
808 if (SSA_NAME_DEF_STMT (ssa_name) != stmt)
810 error ("SSA_NAME_DEF_STMT is wrong");
811 fprintf (stderr, "Expected definition statement:\n");
812 print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (ssa_name), 4, TDF_VOPS);
813 fprintf (stderr, "\nActual definition statement:\n");
814 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
815 goto err;
818 return false;
820 err:
821 fprintf (stderr, "while verifying SSA_NAME ");
822 print_generic_expr (stderr, ssa_name, 0);
823 fprintf (stderr, " in statement\n");
824 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
826 return true;
830 /* Return true if the use of SSA_NAME at statement STMT in block BB is
831 malformed.
833 DEF_BB is the block where SSA_NAME was found to be created.
835 IDOM contains immediate dominator information for the flowgraph.
837 CHECK_ABNORMAL is true if the caller wants to check whether this use
838 is flowing through an abnormal edge (only used when checking PHI
839 arguments).
841 If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names
842 that are defined before STMT in basic block BB. */
844 static bool
845 verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
846 gimple *stmt, bool check_abnormal, bitmap names_defined_in_bb)
848 bool err = false;
849 tree ssa_name = USE_FROM_PTR (use_p);
851 if (!TREE_VISITED (ssa_name))
852 if (verify_imm_links (stderr, ssa_name))
853 err = true;
855 TREE_VISITED (ssa_name) = 1;
857 if (gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name))
858 && SSA_NAME_IS_DEFAULT_DEF (ssa_name))
859 ; /* Default definitions have empty statements. Nothing to do. */
860 else if (!def_bb)
862 error ("missing definition");
863 err = true;
865 else if (bb != def_bb
866 && !dominated_by_p (CDI_DOMINATORS, bb, def_bb))
868 error ("definition in block %i does not dominate use in block %i",
869 def_bb->index, bb->index);
870 err = true;
872 else if (bb == def_bb
873 && names_defined_in_bb != NULL
874 && !bitmap_bit_p (names_defined_in_bb, SSA_NAME_VERSION (ssa_name)))
876 error ("definition in block %i follows the use", def_bb->index);
877 err = true;
880 if (check_abnormal
881 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name))
883 error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set");
884 err = true;
887 /* Make sure the use is in an appropriate list by checking the previous
888 element to make sure it's the same. */
889 if (use_p->prev == NULL)
891 error ("no immediate_use list");
892 err = true;
894 else
896 tree listvar;
897 if (use_p->prev->use == NULL)
898 listvar = use_p->prev->loc.ssa_name;
899 else
900 listvar = USE_FROM_PTR (use_p->prev);
901 if (listvar != ssa_name)
903 error ("wrong immediate use list");
904 err = true;
908 if (err)
910 fprintf (stderr, "for SSA_NAME: ");
911 print_generic_expr (stderr, ssa_name, TDF_VOPS);
912 fprintf (stderr, " in statement:\n");
913 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
916 return err;
920 /* Return true if any of the arguments for PHI node PHI at block BB is
921 malformed.
923 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
924 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
925 it means that the block in that array slot contains the
926 definition of SSA_NAME. */
928 static bool
929 verify_phi_args (gphi *phi, basic_block bb, basic_block *definition_block)
931 edge e;
932 bool err = false;
933 size_t i, phi_num_args = gimple_phi_num_args (phi);
935 if (EDGE_COUNT (bb->preds) != phi_num_args)
937 error ("incoming edge count does not match number of PHI arguments");
938 err = true;
939 goto error;
942 for (i = 0; i < phi_num_args; i++)
944 use_operand_p op_p = gimple_phi_arg_imm_use_ptr (phi, i);
945 tree op = USE_FROM_PTR (op_p);
947 e = EDGE_PRED (bb, i);
949 if (op == NULL_TREE)
951 error ("PHI argument is missing for edge %d->%d",
952 e->src->index,
953 e->dest->index);
954 err = true;
955 goto error;
958 if (TREE_CODE (op) != SSA_NAME && !is_gimple_min_invariant (op))
960 error ("PHI argument is not SSA_NAME, or invariant");
961 err = true;
964 if (TREE_CODE (op) == SSA_NAME)
966 err = verify_ssa_name (op, virtual_operand_p (gimple_phi_result (phi)));
967 err |= verify_use (e->src, definition_block[SSA_NAME_VERSION (op)],
968 op_p, phi, e->flags & EDGE_ABNORMAL, NULL);
971 if (TREE_CODE (op) == ADDR_EXPR)
973 tree base = TREE_OPERAND (op, 0);
974 while (handled_component_p (base))
975 base = TREE_OPERAND (base, 0);
976 if ((VAR_P (base)
977 || TREE_CODE (base) == PARM_DECL
978 || TREE_CODE (base) == RESULT_DECL)
979 && !TREE_ADDRESSABLE (base))
981 error ("address taken, but ADDRESSABLE bit not set");
982 err = true;
986 if (e->dest != bb)
988 error ("wrong edge %d->%d for PHI argument",
989 e->src->index, e->dest->index);
990 err = true;
993 if (err)
995 fprintf (stderr, "PHI argument\n");
996 print_generic_stmt (stderr, op, TDF_VOPS);
997 goto error;
1001 error:
1002 if (err)
1004 fprintf (stderr, "for PHI node\n");
1005 print_gimple_stmt (stderr, phi, 0, TDF_VOPS|TDF_MEMSYMS);
1009 return err;
1013 /* Verify common invariants in the SSA web.
1014 TODO: verify the variable annotations. */
1016 DEBUG_FUNCTION void
1017 verify_ssa (bool check_modified_stmt, bool check_ssa_operands)
1019 basic_block bb;
1020 basic_block *definition_block = XCNEWVEC (basic_block, num_ssa_names);
1021 ssa_op_iter iter;
1022 tree op;
1023 enum dom_state orig_dom_state = dom_info_state (CDI_DOMINATORS);
1024 bitmap names_defined_in_bb = BITMAP_ALLOC (NULL);
1026 gcc_assert (!need_ssa_update_p (cfun));
1028 timevar_push (TV_TREE_SSA_VERIFY);
1030 /* Keep track of SSA names present in the IL. */
1031 size_t i;
1032 tree name;
1034 FOR_EACH_SSA_NAME (i, name, cfun)
1036 gimple *stmt;
1037 TREE_VISITED (name) = 0;
1039 verify_ssa_name (name, virtual_operand_p (name));
1041 stmt = SSA_NAME_DEF_STMT (name);
1042 if (!gimple_nop_p (stmt))
1044 basic_block bb = gimple_bb (stmt);
1045 if (verify_def (bb, definition_block,
1046 name, stmt, virtual_operand_p (name)))
1047 goto err;
1051 calculate_dominance_info (CDI_DOMINATORS);
1053 /* Now verify all the uses and make sure they agree with the definitions
1054 found in the previous pass. */
1055 FOR_EACH_BB_FN (bb, cfun)
1057 edge e;
1058 edge_iterator ei;
1060 /* Make sure that all edges have a clear 'aux' field. */
1061 FOR_EACH_EDGE (e, ei, bb->preds)
1063 if (e->aux)
1065 error ("AUX pointer initialized for edge %d->%d", e->src->index,
1066 e->dest->index);
1067 goto err;
1071 /* Verify the arguments for every PHI node in the block. */
1072 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1074 gphi *phi = gsi.phi ();
1075 if (verify_phi_args (phi, bb, definition_block))
1076 goto err;
1078 bitmap_set_bit (names_defined_in_bb,
1079 SSA_NAME_VERSION (gimple_phi_result (phi)));
1082 /* Now verify all the uses and vuses in every statement of the block. */
1083 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
1084 gsi_next (&gsi))
1086 gimple *stmt = gsi_stmt (gsi);
1087 use_operand_p use_p;
1089 if (check_modified_stmt && gimple_modified_p (stmt))
1091 error ("stmt (%p) marked modified after optimization pass: ",
1092 (void *)stmt);
1093 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
1094 goto err;
1097 if (check_ssa_operands && verify_ssa_operands (cfun, stmt))
1099 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
1100 goto err;
1103 if (gimple_debug_bind_p (stmt)
1104 && !gimple_debug_bind_has_value_p (stmt))
1105 continue;
1107 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE|SSA_OP_VUSE)
1109 op = USE_FROM_PTR (use_p);
1110 if (verify_use (bb, definition_block[SSA_NAME_VERSION (op)],
1111 use_p, stmt, false, names_defined_in_bb))
1112 goto err;
1115 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_DEFS)
1117 if (SSA_NAME_DEF_STMT (op) != stmt)
1119 error ("SSA_NAME_DEF_STMT is wrong");
1120 fprintf (stderr, "Expected definition statement:\n");
1121 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
1122 fprintf (stderr, "\nActual definition statement:\n");
1123 print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (op),
1124 4, TDF_VOPS);
1125 goto err;
1127 bitmap_set_bit (names_defined_in_bb, SSA_NAME_VERSION (op));
1131 bitmap_clear (names_defined_in_bb);
1134 free (definition_block);
1136 if (gimple_vop (cfun)
1137 && ssa_default_def (cfun, gimple_vop (cfun)))
1139 auto_sbitmap visited (last_basic_block_for_fn (cfun) + 1);
1140 bitmap_clear (visited);
1141 if (verify_vssa (ENTRY_BLOCK_PTR_FOR_FN (cfun),
1142 ssa_default_def (cfun, gimple_vop (cfun)), visited))
1143 goto err;
1146 /* Restore the dominance information to its prior known state, so
1147 that we do not perturb the compiler's subsequent behavior. */
1148 if (orig_dom_state == DOM_NONE)
1149 free_dominance_info (CDI_DOMINATORS);
1150 else
1151 set_dom_info_availability (CDI_DOMINATORS, orig_dom_state);
1153 BITMAP_FREE (names_defined_in_bb);
1154 timevar_pop (TV_TREE_SSA_VERIFY);
1155 return;
1157 err:
1158 internal_error ("verify_ssa failed");
1162 /* Initialize global DFA and SSA structures. */
1164 void
1165 init_tree_ssa (struct function *fn)
1167 fn->gimple_df = ggc_cleared_alloc<gimple_df> ();
1168 fn->gimple_df->default_defs = hash_table<ssa_name_hasher>::create_ggc (20);
1169 pt_solution_reset (&fn->gimple_df->escaped);
1170 init_ssanames (fn, 0);
1173 /* Deallocate memory associated with SSA data structures for FNDECL. */
1175 void
1176 delete_tree_ssa (struct function *fn)
1178 fini_ssanames (fn);
1180 /* We no longer maintain the SSA operand cache at this point. */
1181 if (ssa_operands_active (fn))
1182 fini_ssa_operands (fn);
1184 fn->gimple_df->default_defs->empty ();
1185 fn->gimple_df->default_defs = NULL;
1186 pt_solution_reset (&fn->gimple_df->escaped);
1187 if (fn->gimple_df->decls_to_pointers != NULL)
1188 delete fn->gimple_df->decls_to_pointers;
1189 fn->gimple_df->decls_to_pointers = NULL;
1190 fn->gimple_df = NULL;
1192 /* We no longer need the edge variable maps. */
1193 redirect_edge_var_map_empty ();
1196 /* Return true if EXPR is a useless type conversion, otherwise return
1197 false. */
1199 bool
1200 tree_ssa_useless_type_conversion (tree expr)
1202 /* If we have an assignment that merely uses a NOP_EXPR to change
1203 the top of the RHS to the type of the LHS and the type conversion
1204 is "safe", then strip away the type conversion so that we can
1205 enter LHS = RHS into the const_and_copies table. */
1206 if (CONVERT_EXPR_P (expr)
1207 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
1208 || TREE_CODE (expr) == NON_LVALUE_EXPR)
1209 return useless_type_conversion_p
1210 (TREE_TYPE (expr),
1211 TREE_TYPE (TREE_OPERAND (expr, 0)));
1213 return false;
1216 /* Strip conversions from EXP according to
1217 tree_ssa_useless_type_conversion and return the resulting
1218 expression. */
1220 tree
1221 tree_ssa_strip_useless_type_conversions (tree exp)
1223 while (tree_ssa_useless_type_conversion (exp))
1224 exp = TREE_OPERAND (exp, 0);
1225 return exp;
1229 /* Return true if T, an SSA_NAME, has an undefined value. PARTIAL is what
1230 should be returned if the value is only partially undefined. */
1232 bool
1233 ssa_undefined_value_p (tree t, bool partial)
1235 gimple *def_stmt;
1236 tree var = SSA_NAME_VAR (t);
1238 if (!var)
1240 /* Parameters get their initial value from the function entry. */
1241 else if (TREE_CODE (var) == PARM_DECL)
1242 return false;
1243 /* When returning by reference the return address is actually a hidden
1244 parameter. */
1245 else if (TREE_CODE (var) == RESULT_DECL && DECL_BY_REFERENCE (var))
1246 return false;
1247 /* Hard register variables get their initial value from the ether. */
1248 else if (VAR_P (var) && DECL_HARD_REGISTER (var))
1249 return false;
1251 /* The value is undefined iff its definition statement is empty. */
1252 def_stmt = SSA_NAME_DEF_STMT (t);
1253 if (gimple_nop_p (def_stmt))
1254 return true;
1256 /* Check if the complex was not only partially defined. */
1257 if (partial && is_gimple_assign (def_stmt)
1258 && gimple_assign_rhs_code (def_stmt) == COMPLEX_EXPR)
1260 tree rhs1, rhs2;
1262 rhs1 = gimple_assign_rhs1 (def_stmt);
1263 rhs2 = gimple_assign_rhs2 (def_stmt);
1264 return (TREE_CODE (rhs1) == SSA_NAME && ssa_undefined_value_p (rhs1))
1265 || (TREE_CODE (rhs2) == SSA_NAME && ssa_undefined_value_p (rhs2));
1267 return false;
1271 /* Return TRUE iff STMT, a gimple statement, references an undefined
1272 SSA name. */
1274 bool
1275 gimple_uses_undefined_value_p (gimple *stmt)
1277 ssa_op_iter iter;
1278 tree op;
1280 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
1281 if (ssa_undefined_value_p (op))
1282 return true;
1284 return false;
1289 /* If necessary, rewrite the base of the reference tree *TP from
1290 a MEM_REF to a plain or converted symbol. */
1292 static void
1293 maybe_rewrite_mem_ref_base (tree *tp, bitmap suitable_for_renaming)
1295 tree sym;
1297 while (handled_component_p (*tp))
1298 tp = &TREE_OPERAND (*tp, 0);
1299 if (TREE_CODE (*tp) == MEM_REF
1300 && TREE_CODE (TREE_OPERAND (*tp, 0)) == ADDR_EXPR
1301 && (sym = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0))
1302 && DECL_P (sym)
1303 && !TREE_ADDRESSABLE (sym)
1304 && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym))
1305 && is_gimple_reg_type (TREE_TYPE (*tp))
1306 && ! VOID_TYPE_P (TREE_TYPE (*tp)))
1308 if (TREE_CODE (TREE_TYPE (sym)) == VECTOR_TYPE
1309 && useless_type_conversion_p (TREE_TYPE (*tp),
1310 TREE_TYPE (TREE_TYPE (sym)))
1311 && multiple_of_p (sizetype, TREE_OPERAND (*tp, 1),
1312 TYPE_SIZE_UNIT (TREE_TYPE (*tp))))
1314 *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym,
1315 TYPE_SIZE (TREE_TYPE (*tp)),
1316 int_const_binop (MULT_EXPR,
1317 bitsize_int (BITS_PER_UNIT),
1318 TREE_OPERAND (*tp, 1)));
1320 else if (TREE_CODE (TREE_TYPE (sym)) == COMPLEX_TYPE
1321 && useless_type_conversion_p (TREE_TYPE (*tp),
1322 TREE_TYPE (TREE_TYPE (sym))))
1324 *tp = build1 (integer_zerop (TREE_OPERAND (*tp, 1))
1325 ? REALPART_EXPR : IMAGPART_EXPR,
1326 TREE_TYPE (*tp), sym);
1328 else if (integer_zerop (TREE_OPERAND (*tp, 1))
1329 && DECL_SIZE (sym) == TYPE_SIZE (TREE_TYPE (*tp)))
1331 if (!useless_type_conversion_p (TREE_TYPE (*tp),
1332 TREE_TYPE (sym)))
1333 *tp = build1 (VIEW_CONVERT_EXPR,
1334 TREE_TYPE (*tp), sym);
1335 else
1336 *tp = sym;
1338 else if (DECL_SIZE (sym)
1339 && TREE_CODE (DECL_SIZE (sym)) == INTEGER_CST
1340 && mem_ref_offset (*tp) >= 0
1341 && wi::leu_p (mem_ref_offset (*tp)
1342 + wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (*tp))),
1343 wi::to_offset (DECL_SIZE_UNIT (sym)))
1344 && (! INTEGRAL_TYPE_P (TREE_TYPE (*tp))
1345 || (wi::to_offset (TYPE_SIZE (TREE_TYPE (*tp)))
1346 == TYPE_PRECISION (TREE_TYPE (*tp))))
1347 && wi::umod_trunc (wi::to_offset (TYPE_SIZE (TREE_TYPE (*tp))),
1348 BITS_PER_UNIT) == 0)
1350 *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym,
1351 TYPE_SIZE (TREE_TYPE (*tp)),
1352 wide_int_to_tree (bitsizetype,
1353 mem_ref_offset (*tp)
1354 << LOG2_BITS_PER_UNIT));
1359 /* For a tree REF return its base if it is the base of a MEM_REF
1360 that cannot be rewritten into SSA form. Otherwise return NULL_TREE. */
1362 static tree
1363 non_rewritable_mem_ref_base (tree ref)
1365 tree base;
1367 /* A plain decl does not need it set. */
1368 if (DECL_P (ref))
1369 return NULL_TREE;
1371 if (! (base = CONST_CAST_TREE (strip_invariant_refs (ref))))
1373 base = get_base_address (ref);
1374 if (DECL_P (base))
1375 return base;
1376 return NULL_TREE;
1379 /* But watch out for MEM_REFs we cannot lower to a
1380 VIEW_CONVERT_EXPR or a BIT_FIELD_REF. */
1381 if (TREE_CODE (base) == MEM_REF
1382 && TREE_CODE (TREE_OPERAND (base, 0)) == ADDR_EXPR)
1384 tree decl = TREE_OPERAND (TREE_OPERAND (base, 0), 0);
1385 if (! DECL_P (decl))
1386 return NULL_TREE;
1387 if (! is_gimple_reg_type (TREE_TYPE (base))
1388 || VOID_TYPE_P (TREE_TYPE (base)))
1389 return decl;
1390 if ((TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE
1391 || TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE)
1392 && useless_type_conversion_p (TREE_TYPE (base),
1393 TREE_TYPE (TREE_TYPE (decl)))
1394 && wi::fits_uhwi_p (mem_ref_offset (base))
1395 && wi::gtu_p (wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (decl))),
1396 mem_ref_offset (base))
1397 && multiple_of_p (sizetype, TREE_OPERAND (base, 1),
1398 TYPE_SIZE_UNIT (TREE_TYPE (base))))
1399 return NULL_TREE;
1400 /* For same sizes and zero offset we can use a VIEW_CONVERT_EXPR. */
1401 if (integer_zerop (TREE_OPERAND (base, 1))
1402 && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (base)))
1403 return NULL_TREE;
1404 /* For integral typed extracts we can use a BIT_FIELD_REF. */
1405 if (DECL_SIZE (decl)
1406 && TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST
1407 && mem_ref_offset (base) >= 0
1408 && wi::leu_p (mem_ref_offset (base)
1409 + wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (base))),
1410 wi::to_offset (DECL_SIZE_UNIT (decl)))
1411 /* ??? We can't handle bitfield precision extracts without
1412 either using an alternate type for the BIT_FIELD_REF and
1413 then doing a conversion or possibly adjusting the offset
1414 according to endianess. */
1415 && (! INTEGRAL_TYPE_P (TREE_TYPE (base))
1416 || (wi::to_offset (TYPE_SIZE (TREE_TYPE (base)))
1417 == TYPE_PRECISION (TREE_TYPE (base))))
1418 && wi::umod_trunc (wi::to_offset (TYPE_SIZE (TREE_TYPE (base))),
1419 BITS_PER_UNIT) == 0)
1420 return NULL_TREE;
1421 return decl;
1424 return NULL_TREE;
1427 /* For an lvalue tree LHS return true if it cannot be rewritten into SSA form.
1428 Otherwise return true. */
1430 static bool
1431 non_rewritable_lvalue_p (tree lhs)
1433 /* A plain decl is always rewritable. */
1434 if (DECL_P (lhs))
1435 return false;
1437 /* We can re-write REALPART_EXPR and IMAGPART_EXPR sets in
1438 a reasonably efficient manner... */
1439 if ((TREE_CODE (lhs) == REALPART_EXPR
1440 || TREE_CODE (lhs) == IMAGPART_EXPR)
1441 && DECL_P (TREE_OPERAND (lhs, 0)))
1442 return false;
1444 /* ??? The following could be relaxed allowing component
1445 references that do not change the access size. */
1446 if (TREE_CODE (lhs) == MEM_REF
1447 && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR)
1449 tree decl = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0);
1451 /* A decl that is wrapped inside a MEM-REF that covers
1452 it full is also rewritable. */
1453 if (integer_zerop (TREE_OPERAND (lhs, 1))
1454 && DECL_P (decl)
1455 && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (lhs))
1456 /* If the dynamic type of the decl has larger precision than
1457 the decl itself we can't use the decls type for SSA rewriting. */
1458 && ((! INTEGRAL_TYPE_P (TREE_TYPE (decl))
1459 || compare_tree_int (DECL_SIZE (decl),
1460 TYPE_PRECISION (TREE_TYPE (decl))) == 0)
1461 || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
1462 && (TYPE_PRECISION (TREE_TYPE (decl))
1463 >= TYPE_PRECISION (TREE_TYPE (lhs)))))
1464 /* Make sure we are not re-writing non-float copying into float
1465 copying as that can incur normalization. */
1466 && (! FLOAT_TYPE_P (TREE_TYPE (decl))
1467 || types_compatible_p (TREE_TYPE (lhs), TREE_TYPE (decl)))
1468 && (TREE_THIS_VOLATILE (decl) == TREE_THIS_VOLATILE (lhs)))
1469 return false;
1471 /* A vector-insert using a MEM_REF or ARRAY_REF is rewritable
1472 using a BIT_INSERT_EXPR. */
1473 if (DECL_P (decl)
1474 && VECTOR_TYPE_P (TREE_TYPE (decl))
1475 && TYPE_MODE (TREE_TYPE (decl)) != BLKmode
1476 && types_compatible_p (TREE_TYPE (lhs),
1477 TREE_TYPE (TREE_TYPE (decl)))
1478 && tree_fits_uhwi_p (TREE_OPERAND (lhs, 1))
1479 && tree_int_cst_lt (TREE_OPERAND (lhs, 1),
1480 TYPE_SIZE_UNIT (TREE_TYPE (decl)))
1481 && (tree_to_uhwi (TREE_OPERAND (lhs, 1))
1482 % tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (lhs)))) == 0)
1483 return false;
1486 /* A vector-insert using a BIT_FIELD_REF is rewritable using
1487 BIT_INSERT_EXPR. */
1488 if (TREE_CODE (lhs) == BIT_FIELD_REF
1489 && DECL_P (TREE_OPERAND (lhs, 0))
1490 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (lhs, 0)))
1491 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (lhs, 0))) != BLKmode
1492 && types_compatible_p (TREE_TYPE (lhs),
1493 TREE_TYPE (TREE_TYPE (TREE_OPERAND (lhs, 0))))
1494 && (tree_to_uhwi (TREE_OPERAND (lhs, 2))
1495 % tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs)))) == 0)
1496 return false;
1498 return true;
1501 /* When possible, clear TREE_ADDRESSABLE bit or set DECL_GIMPLE_REG_P bit and
1502 mark the variable VAR for conversion into SSA. Return true when updating
1503 stmts is required. */
1505 static void
1506 maybe_optimize_var (tree var, bitmap addresses_taken, bitmap not_reg_needs,
1507 bitmap suitable_for_renaming)
1509 /* Global Variables, result decls cannot be changed. */
1510 if (is_global_var (var)
1511 || TREE_CODE (var) == RESULT_DECL
1512 || bitmap_bit_p (addresses_taken, DECL_UID (var)))
1513 return;
1515 if (TREE_ADDRESSABLE (var)
1516 /* Do not change TREE_ADDRESSABLE if we need to preserve var as
1517 a non-register. Otherwise we are confused and forget to
1518 add virtual operands for it. */
1519 && (!is_gimple_reg_type (TREE_TYPE (var))
1520 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
1521 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1522 || !bitmap_bit_p (not_reg_needs, DECL_UID (var))))
1524 TREE_ADDRESSABLE (var) = 0;
1525 if (is_gimple_reg (var))
1526 bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
1527 if (dump_file)
1529 fprintf (dump_file, "No longer having address taken: ");
1530 print_generic_expr (dump_file, var, 0);
1531 fprintf (dump_file, "\n");
1535 if (!DECL_GIMPLE_REG_P (var)
1536 && !bitmap_bit_p (not_reg_needs, DECL_UID (var))
1537 && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1538 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
1539 && !TREE_THIS_VOLATILE (var)
1540 && (!VAR_P (var) || !DECL_HARD_REGISTER (var)))
1542 DECL_GIMPLE_REG_P (var) = 1;
1543 bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
1544 if (dump_file)
1546 fprintf (dump_file, "Now a gimple register: ");
1547 print_generic_expr (dump_file, var, 0);
1548 fprintf (dump_file, "\n");
1553 /* Compute TREE_ADDRESSABLE and DECL_GIMPLE_REG_P for local variables. */
1555 void
1556 execute_update_addresses_taken (void)
1558 basic_block bb;
1559 bitmap addresses_taken = BITMAP_ALLOC (NULL);
1560 bitmap not_reg_needs = BITMAP_ALLOC (NULL);
1561 bitmap suitable_for_renaming = BITMAP_ALLOC (NULL);
1562 tree var;
1563 unsigned i;
1565 timevar_push (TV_ADDRESS_TAKEN);
1567 /* Collect into ADDRESSES_TAKEN all variables whose address is taken within
1568 the function body. */
1569 FOR_EACH_BB_FN (bb, cfun)
1571 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
1572 gsi_next (&gsi))
1574 gimple *stmt = gsi_stmt (gsi);
1575 enum gimple_code code = gimple_code (stmt);
1576 tree decl;
1578 if (code == GIMPLE_CALL
1579 && optimize_atomic_compare_exchange_p (stmt))
1581 /* For __atomic_compare_exchange_N if the second argument
1582 is &var, don't mark var addressable;
1583 if it becomes non-addressable, we'll rewrite it into
1584 ATOMIC_COMPARE_EXCHANGE call. */
1585 tree arg = gimple_call_arg (stmt, 1);
1586 gimple_call_set_arg (stmt, 1, null_pointer_node);
1587 gimple_ior_addresses_taken (addresses_taken, stmt);
1588 gimple_call_set_arg (stmt, 1, arg);
1590 else
1591 /* Note all addresses taken by the stmt. */
1592 gimple_ior_addresses_taken (addresses_taken, stmt);
1594 /* If we have a call or an assignment, see if the lhs contains
1595 a local decl that requires not to be a gimple register. */
1596 if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
1598 tree lhs = gimple_get_lhs (stmt);
1599 if (lhs
1600 && TREE_CODE (lhs) != SSA_NAME
1601 && ((code == GIMPLE_CALL && ! DECL_P (lhs))
1602 || non_rewritable_lvalue_p (lhs)))
1604 decl = get_base_address (lhs);
1605 if (DECL_P (decl))
1606 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1610 if (gimple_assign_single_p (stmt))
1612 tree rhs = gimple_assign_rhs1 (stmt);
1613 if ((decl = non_rewritable_mem_ref_base (rhs)))
1614 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1617 else if (code == GIMPLE_CALL)
1619 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1621 tree arg = gimple_call_arg (stmt, i);
1622 if ((decl = non_rewritable_mem_ref_base (arg)))
1623 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1627 else if (code == GIMPLE_ASM)
1629 gasm *asm_stmt = as_a <gasm *> (stmt);
1630 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
1632 tree link = gimple_asm_output_op (asm_stmt, i);
1633 tree lhs = TREE_VALUE (link);
1634 if (TREE_CODE (lhs) != SSA_NAME)
1636 decl = get_base_address (lhs);
1637 if (DECL_P (decl)
1638 && (non_rewritable_lvalue_p (lhs)
1639 /* We cannot move required conversions from
1640 the lhs to the rhs in asm statements, so
1641 require we do not need any. */
1642 || !useless_type_conversion_p
1643 (TREE_TYPE (lhs), TREE_TYPE (decl))))
1644 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1647 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
1649 tree link = gimple_asm_input_op (asm_stmt, i);
1650 if ((decl = non_rewritable_mem_ref_base (TREE_VALUE (link))))
1651 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1656 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1657 gsi_next (&gsi))
1659 size_t i;
1660 gphi *phi = gsi.phi ();
1662 for (i = 0; i < gimple_phi_num_args (phi); i++)
1664 tree op = PHI_ARG_DEF (phi, i), var;
1665 if (TREE_CODE (op) == ADDR_EXPR
1666 && (var = get_base_address (TREE_OPERAND (op, 0))) != NULL
1667 && DECL_P (var))
1668 bitmap_set_bit (addresses_taken, DECL_UID (var));
1673 /* We cannot iterate over all referenced vars because that can contain
1674 unused vars from BLOCK trees, which causes code generation differences
1675 for -g vs. -g0. */
1676 for (var = DECL_ARGUMENTS (cfun->decl); var; var = DECL_CHAIN (var))
1677 maybe_optimize_var (var, addresses_taken, not_reg_needs,
1678 suitable_for_renaming);
1680 FOR_EACH_VEC_SAFE_ELT (cfun->local_decls, i, var)
1681 maybe_optimize_var (var, addresses_taken, not_reg_needs,
1682 suitable_for_renaming);
1684 /* Operand caches need to be recomputed for operands referencing the updated
1685 variables and operands need to be rewritten to expose bare symbols. */
1686 if (!bitmap_empty_p (suitable_for_renaming))
1688 FOR_EACH_BB_FN (bb, cfun)
1689 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
1691 gimple *stmt = gsi_stmt (gsi);
1693 /* Re-write TARGET_MEM_REFs of symbols we want to
1694 rewrite into SSA form. */
1695 if (gimple_assign_single_p (stmt))
1697 tree lhs = gimple_assign_lhs (stmt);
1698 tree rhs, *rhsp = gimple_assign_rhs1_ptr (stmt);
1699 tree sym;
1701 /* Rewrite LHS IMAG/REALPART_EXPR similar to
1702 gimplify_modify_expr_complex_part. */
1703 if ((TREE_CODE (lhs) == IMAGPART_EXPR
1704 || TREE_CODE (lhs) == REALPART_EXPR)
1705 && DECL_P (TREE_OPERAND (lhs, 0))
1706 && bitmap_bit_p (suitable_for_renaming,
1707 DECL_UID (TREE_OPERAND (lhs, 0))))
1709 tree other = make_ssa_name (TREE_TYPE (lhs));
1710 tree lrhs = build1 (TREE_CODE (lhs) == IMAGPART_EXPR
1711 ? REALPART_EXPR : IMAGPART_EXPR,
1712 TREE_TYPE (other),
1713 TREE_OPERAND (lhs, 0));
1714 gimple *load = gimple_build_assign (other, lrhs);
1715 location_t loc = gimple_location (stmt);
1716 gimple_set_location (load, loc);
1717 gimple_set_vuse (load, gimple_vuse (stmt));
1718 gsi_insert_before (&gsi, load, GSI_SAME_STMT);
1719 gimple_assign_set_lhs (stmt, TREE_OPERAND (lhs, 0));
1720 gimple_assign_set_rhs_with_ops
1721 (&gsi, COMPLEX_EXPR,
1722 TREE_CODE (lhs) == IMAGPART_EXPR
1723 ? other : gimple_assign_rhs1 (stmt),
1724 TREE_CODE (lhs) == IMAGPART_EXPR
1725 ? gimple_assign_rhs1 (stmt) : other, NULL_TREE);
1726 stmt = gsi_stmt (gsi);
1727 unlink_stmt_vdef (stmt);
1728 update_stmt (stmt);
1729 continue;
1732 /* Rewrite a vector insert via a BIT_FIELD_REF on the LHS
1733 into a BIT_INSERT_EXPR. */
1734 if (TREE_CODE (lhs) == BIT_FIELD_REF
1735 && DECL_P (TREE_OPERAND (lhs, 0))
1736 && bitmap_bit_p (suitable_for_renaming,
1737 DECL_UID (TREE_OPERAND (lhs, 0)))
1738 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (lhs, 0)))
1739 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (lhs, 0))) != BLKmode
1740 && types_compatible_p (TREE_TYPE (lhs),
1741 TREE_TYPE (TREE_TYPE
1742 (TREE_OPERAND (lhs, 0))))
1743 && (tree_to_uhwi (TREE_OPERAND (lhs, 2))
1744 % tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs))) == 0))
1746 tree var = TREE_OPERAND (lhs, 0);
1747 tree val = gimple_assign_rhs1 (stmt);
1748 tree bitpos = TREE_OPERAND (lhs, 2);
1749 gimple_assign_set_lhs (stmt, var);
1750 gimple_assign_set_rhs_with_ops
1751 (&gsi, BIT_INSERT_EXPR, var, val, bitpos);
1752 stmt = gsi_stmt (gsi);
1753 unlink_stmt_vdef (stmt);
1754 update_stmt (stmt);
1755 continue;
1758 /* Rewrite a vector insert using a MEM_REF on the LHS
1759 into a BIT_INSERT_EXPR. */
1760 if (TREE_CODE (lhs) == MEM_REF
1761 && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1762 && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0))
1763 && DECL_P (sym)
1764 && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym))
1765 && VECTOR_TYPE_P (TREE_TYPE (sym))
1766 && TYPE_MODE (TREE_TYPE (sym)) != BLKmode
1767 && types_compatible_p (TREE_TYPE (lhs),
1768 TREE_TYPE (TREE_TYPE (sym)))
1769 && tree_fits_uhwi_p (TREE_OPERAND (lhs, 1))
1770 && tree_int_cst_lt (TREE_OPERAND (lhs, 1),
1771 TYPE_SIZE_UNIT (TREE_TYPE (sym)))
1772 && (tree_to_uhwi (TREE_OPERAND (lhs, 1))
1773 % tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (lhs)))) == 0)
1775 tree val = gimple_assign_rhs1 (stmt);
1776 tree bitpos
1777 = wide_int_to_tree (bitsizetype,
1778 mem_ref_offset (lhs) * BITS_PER_UNIT);
1779 gimple_assign_set_lhs (stmt, sym);
1780 gimple_assign_set_rhs_with_ops
1781 (&gsi, BIT_INSERT_EXPR, sym, val, bitpos);
1782 stmt = gsi_stmt (gsi);
1783 unlink_stmt_vdef (stmt);
1784 update_stmt (stmt);
1785 continue;
1788 /* We shouldn't have any fancy wrapping of
1789 component-refs on the LHS, but look through
1790 VIEW_CONVERT_EXPRs as that is easy. */
1791 while (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
1792 lhs = TREE_OPERAND (lhs, 0);
1793 if (TREE_CODE (lhs) == MEM_REF
1794 && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1795 && integer_zerop (TREE_OPERAND (lhs, 1))
1796 && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0))
1797 && DECL_P (sym)
1798 && !TREE_ADDRESSABLE (sym)
1799 && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym)))
1800 lhs = sym;
1801 else
1802 lhs = gimple_assign_lhs (stmt);
1804 /* Rewrite the RHS and make sure the resulting assignment
1805 is validly typed. */
1806 maybe_rewrite_mem_ref_base (rhsp, suitable_for_renaming);
1807 rhs = gimple_assign_rhs1 (stmt);
1808 if (gimple_assign_lhs (stmt) != lhs
1809 && !useless_type_conversion_p (TREE_TYPE (lhs),
1810 TREE_TYPE (rhs)))
1812 if (gimple_clobber_p (stmt))
1814 rhs = build_constructor (TREE_TYPE (lhs), NULL);
1815 TREE_THIS_VOLATILE (rhs) = 1;
1817 else
1818 rhs = fold_build1 (VIEW_CONVERT_EXPR,
1819 TREE_TYPE (lhs), rhs);
1821 if (gimple_assign_lhs (stmt) != lhs)
1822 gimple_assign_set_lhs (stmt, lhs);
1824 if (gimple_assign_rhs1 (stmt) != rhs)
1826 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1827 gimple_assign_set_rhs_from_tree (&gsi, rhs);
1831 else if (gimple_code (stmt) == GIMPLE_CALL)
1833 unsigned i;
1834 if (optimize_atomic_compare_exchange_p (stmt))
1836 tree expected = gimple_call_arg (stmt, 1);
1837 if (bitmap_bit_p (suitable_for_renaming,
1838 DECL_UID (TREE_OPERAND (expected, 0))))
1840 fold_builtin_atomic_compare_exchange (&gsi);
1841 continue;
1844 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1846 tree *argp = gimple_call_arg_ptr (stmt, i);
1847 maybe_rewrite_mem_ref_base (argp, suitable_for_renaming);
1851 else if (gimple_code (stmt) == GIMPLE_ASM)
1853 gasm *asm_stmt = as_a <gasm *> (stmt);
1854 unsigned i;
1855 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
1857 tree link = gimple_asm_output_op (asm_stmt, i);
1858 maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
1859 suitable_for_renaming);
1861 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
1863 tree link = gimple_asm_input_op (asm_stmt, i);
1864 maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
1865 suitable_for_renaming);
1869 else if (gimple_debug_bind_p (stmt)
1870 && gimple_debug_bind_has_value_p (stmt))
1872 tree *valuep = gimple_debug_bind_get_value_ptr (stmt);
1873 tree decl;
1874 maybe_rewrite_mem_ref_base (valuep, suitable_for_renaming);
1875 decl = non_rewritable_mem_ref_base (*valuep);
1876 if (decl
1877 && bitmap_bit_p (suitable_for_renaming, DECL_UID (decl)))
1878 gimple_debug_bind_reset_value (stmt);
1881 if (gimple_references_memory_p (stmt)
1882 || is_gimple_debug (stmt))
1883 update_stmt (stmt);
1885 gsi_next (&gsi);
1888 /* Update SSA form here, we are called as non-pass as well. */
1889 if (number_of_loops (cfun) > 1
1890 && loops_state_satisfies_p (LOOP_CLOSED_SSA))
1891 rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
1892 else
1893 update_ssa (TODO_update_ssa);
1896 BITMAP_FREE (not_reg_needs);
1897 BITMAP_FREE (addresses_taken);
1898 BITMAP_FREE (suitable_for_renaming);
1899 timevar_pop (TV_ADDRESS_TAKEN);
1902 namespace {
1904 const pass_data pass_data_update_address_taken =
1906 GIMPLE_PASS, /* type */
1907 "addressables", /* name */
1908 OPTGROUP_NONE, /* optinfo_flags */
1909 TV_ADDRESS_TAKEN, /* tv_id */
1910 PROP_ssa, /* properties_required */
1911 0, /* properties_provided */
1912 0, /* properties_destroyed */
1913 0, /* todo_flags_start */
1914 TODO_update_address_taken, /* todo_flags_finish */
1917 class pass_update_address_taken : public gimple_opt_pass
1919 public:
1920 pass_update_address_taken (gcc::context *ctxt)
1921 : gimple_opt_pass (pass_data_update_address_taken, ctxt)
1924 /* opt_pass methods: */
1926 }; // class pass_update_address_taken
1928 } // anon namespace
1930 gimple_opt_pass *
1931 make_pass_update_address_taken (gcc::context *ctxt)
1933 return new pass_update_address_taken (ctxt);