* intrinsic.c (char_conversions, ncharconv): New static variables.
[official-gcc.git] / gcc / tree-ssa.c
blob150074f7c23618c9cc1c98853fa9467465f68e67
1 /* Miscellaneous SSA utility functions.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2007 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "flags.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "ggc.h"
29 #include "langhooks.h"
30 #include "hard-reg-set.h"
31 #include "basic-block.h"
32 #include "output.h"
33 #include "expr.h"
34 #include "function.h"
35 #include "diagnostic.h"
36 #include "bitmap.h"
37 #include "pointer-set.h"
38 #include "tree-flow.h"
39 #include "tree-gimple.h"
40 #include "tree-inline.h"
41 #include "varray.h"
42 #include "timevar.h"
43 #include "hashtab.h"
44 #include "tree-dump.h"
45 #include "tree-pass.h"
46 #include "toplev.h"
48 /* Pointer map of variable mappings, keyed by edge. */
49 static struct pointer_map_t *edge_var_maps;
52 /* Add a mapping with PHI RESULT and PHI DEF associated with edge E. */
54 void
55 redirect_edge_var_map_add (edge e, tree result, tree def)
57 void **slot;
58 edge_var_map_vector old_head, head;
59 edge_var_map new_node;
61 if (edge_var_maps == NULL)
62 edge_var_maps = pointer_map_create ();
64 slot = pointer_map_insert (edge_var_maps, e);
65 old_head = head = *slot;
66 if (!head)
68 head = VEC_alloc (edge_var_map, heap, 5);
69 *slot = head;
71 new_node.def = def;
72 new_node.result = result;
74 VEC_safe_push (edge_var_map, heap, head, &new_node);
75 if (old_head != head)
77 /* The push did some reallocation. Update the pointer map. */
78 *slot = head;
83 /* Clear the var mappings in edge E. */
85 void
86 redirect_edge_var_map_clear (edge e)
88 void **slot;
89 edge_var_map_vector head;
91 if (!edge_var_maps)
92 return;
94 slot = pointer_map_contains (edge_var_maps, e);
96 if (slot)
98 head = *slot;
99 VEC_free (edge_var_map, heap, head);
100 *slot = NULL;
105 /* Duplicate the redirected var mappings in OLDE in NEWE.
107 Since we can't remove a mapping, let's just duplicate it. This assumes a
108 pointer_map can have multiple edges mapping to the same var_map (many to
109 one mapping), since we don't remove the previous mappings. */
111 void
112 redirect_edge_var_map_dup (edge newe, edge olde)
114 void **new_slot, **old_slot; edge_var_map_vector head;
116 if (!edge_var_maps)
117 return;
119 new_slot = pointer_map_insert (edge_var_maps, newe);
120 old_slot = pointer_map_contains (edge_var_maps, olde);
121 if (!old_slot)
122 return;
123 head = *old_slot;
125 if (head)
126 *new_slot = VEC_copy (edge_var_map, heap, head);
127 else
128 *new_slot = VEC_alloc (edge_var_map, heap, 5);
132 /* Return the varable mappings for a given edge. If there is none, return
133 NULL. */
135 edge_var_map_vector
136 redirect_edge_var_map_vector (edge e)
138 void **slot;
140 /* Hey, what kind of idiot would... you'd be surprised. */
141 if (!edge_var_maps)
142 return NULL;
144 slot = pointer_map_contains (edge_var_maps, e);
145 if (!slot)
146 return NULL;
148 return (edge_var_map_vector) *slot;
152 /* Clear the edge variable mappings. */
154 void
155 redirect_edge_var_map_destroy (void)
157 if (edge_var_maps)
159 pointer_map_destroy (edge_var_maps);
160 edge_var_maps = NULL;
165 /* Remove the corresponding arguments from the PHI nodes in E's
166 destination block and redirect it to DEST. Return redirected edge.
167 The list of removed arguments is stored in a vector accessed
168 through edge_var_maps. */
170 edge
171 ssa_redirect_edge (edge e, basic_block dest)
173 tree phi;
175 redirect_edge_var_map_clear (e);
177 /* Remove the appropriate PHI arguments in E's destination block. */
178 for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi))
180 tree def = PHI_ARG_DEF (phi, e->dest_idx);
182 if (def == NULL_TREE)
183 continue;
185 redirect_edge_var_map_add (e, PHI_RESULT (phi), def);
188 e = redirect_edge_succ_nodup (e, dest);
190 return e;
193 /* Add PHI arguments queued in PENDING_STMT list on edge E to edge
194 E->dest. */
196 void
197 flush_pending_stmts (edge e)
199 tree phi;
200 edge_var_map_vector v;
201 edge_var_map *vm;
202 int i;
204 v = redirect_edge_var_map_vector (e);
205 if (!v)
206 return;
208 for (phi = phi_nodes (e->dest), i = 0;
209 phi && VEC_iterate (edge_var_map, v, i, vm);
210 phi = PHI_CHAIN (phi), i++)
212 tree def = redirect_edge_var_map_def (vm);
213 add_phi_arg (phi, def, e);
216 redirect_edge_var_map_clear (e);
219 /* Return true if SSA_NAME is malformed and mark it visited.
221 IS_VIRTUAL is true if this SSA_NAME was found inside a virtual
222 operand. */
224 static bool
225 verify_ssa_name (tree ssa_name, bool is_virtual)
227 if (TREE_CODE (ssa_name) != SSA_NAME)
229 error ("expected an SSA_NAME object");
230 return true;
233 if (TREE_TYPE (ssa_name) != TREE_TYPE (SSA_NAME_VAR (ssa_name)))
235 error ("type mismatch between an SSA_NAME and its symbol");
236 return true;
239 if (SSA_NAME_IN_FREE_LIST (ssa_name))
241 error ("found an SSA_NAME that had been released into the free pool");
242 return true;
245 if (is_virtual && is_gimple_reg (ssa_name))
247 error ("found a virtual definition for a GIMPLE register");
248 return true;
251 if (!is_virtual && !is_gimple_reg (ssa_name))
253 error ("found a real definition for a non-register");
254 return true;
257 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
258 && !IS_EMPTY_STMT (SSA_NAME_DEF_STMT (ssa_name)))
260 error ("found a default name with a non-empty defining statement");
261 return true;
264 return false;
268 /* Return true if the definition of SSA_NAME at block BB is malformed.
270 STMT is the statement where SSA_NAME is created.
272 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
273 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
274 it means that the block in that array slot contains the
275 definition of SSA_NAME.
277 IS_VIRTUAL is true if SSA_NAME is created by a VDEF. */
279 static bool
280 verify_def (basic_block bb, basic_block *definition_block, tree ssa_name,
281 tree stmt, bool is_virtual)
283 if (verify_ssa_name (ssa_name, is_virtual))
284 goto err;
286 if (definition_block[SSA_NAME_VERSION (ssa_name)])
288 error ("SSA_NAME created in two different blocks %i and %i",
289 definition_block[SSA_NAME_VERSION (ssa_name)]->index, bb->index);
290 goto err;
293 definition_block[SSA_NAME_VERSION (ssa_name)] = bb;
295 if (SSA_NAME_DEF_STMT (ssa_name) != stmt)
297 error ("SSA_NAME_DEF_STMT is wrong");
298 fprintf (stderr, "Expected definition statement:\n");
299 print_generic_stmt (stderr, SSA_NAME_DEF_STMT (ssa_name), TDF_VOPS);
300 fprintf (stderr, "\nActual definition statement:\n");
301 print_generic_stmt (stderr, stmt, TDF_VOPS);
302 goto err;
305 return false;
307 err:
308 fprintf (stderr, "while verifying SSA_NAME ");
309 print_generic_expr (stderr, ssa_name, 0);
310 fprintf (stderr, " in statement\n");
311 print_generic_stmt (stderr, stmt, TDF_VOPS);
313 return true;
317 /* Return true if the use of SSA_NAME at statement STMT in block BB is
318 malformed.
320 DEF_BB is the block where SSA_NAME was found to be created.
322 IDOM contains immediate dominator information for the flowgraph.
324 CHECK_ABNORMAL is true if the caller wants to check whether this use
325 is flowing through an abnormal edge (only used when checking PHI
326 arguments).
328 If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names
329 that are defined before STMT in basic block BB. */
331 static bool
332 verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
333 tree stmt, bool check_abnormal, bitmap names_defined_in_bb)
335 bool err = false;
336 tree ssa_name = USE_FROM_PTR (use_p);
338 if (!TREE_VISITED (ssa_name))
339 if (verify_imm_links (stderr, ssa_name))
340 err = true;
342 TREE_VISITED (ssa_name) = 1;
344 if (IS_EMPTY_STMT (SSA_NAME_DEF_STMT (ssa_name))
345 && SSA_NAME_IS_DEFAULT_DEF (ssa_name))
346 ; /* Default definitions have empty statements. Nothing to do. */
347 else if (!def_bb)
349 error ("missing definition");
350 err = true;
352 else if (bb != def_bb
353 && !dominated_by_p (CDI_DOMINATORS, bb, def_bb))
355 error ("definition in block %i does not dominate use in block %i",
356 def_bb->index, bb->index);
357 err = true;
359 else if (bb == def_bb
360 && names_defined_in_bb != NULL
361 && !bitmap_bit_p (names_defined_in_bb, SSA_NAME_VERSION (ssa_name)))
363 error ("definition in block %i follows the use", def_bb->index);
364 err = true;
367 if (check_abnormal
368 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name))
370 error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set");
371 err = true;
374 /* Make sure the use is in an appropriate list by checking the previous
375 element to make sure it's the same. */
376 if (use_p->prev == NULL)
378 error ("no immediate_use list");
379 err = true;
381 else
383 tree listvar ;
384 if (use_p->prev->use == NULL)
385 listvar = use_p->prev->stmt;
386 else
387 listvar = USE_FROM_PTR (use_p->prev);
388 if (listvar != ssa_name)
390 error ("wrong immediate use list");
391 err = true;
395 if (err)
397 fprintf (stderr, "for SSA_NAME: ");
398 print_generic_expr (stderr, ssa_name, TDF_VOPS);
399 fprintf (stderr, " in statement:\n");
400 print_generic_stmt (stderr, stmt, TDF_VOPS);
403 return err;
407 /* Return true if any of the arguments for PHI node PHI at block BB is
408 malformed.
410 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
411 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
412 it means that the block in that array slot contains the
413 definition of SSA_NAME. */
415 static bool
416 verify_phi_args (tree phi, basic_block bb, basic_block *definition_block)
418 edge e;
419 bool err = false;
420 unsigned i, phi_num_args = PHI_NUM_ARGS (phi);
422 if (EDGE_COUNT (bb->preds) != phi_num_args)
424 error ("incoming edge count does not match number of PHI arguments");
425 err = true;
426 goto error;
429 for (i = 0; i < phi_num_args; i++)
431 use_operand_p op_p = PHI_ARG_DEF_PTR (phi, i);
432 tree op = USE_FROM_PTR (op_p);
434 e = EDGE_PRED (bb, i);
436 if (op == NULL_TREE)
438 error ("PHI argument is missing for edge %d->%d",
439 e->src->index,
440 e->dest->index);
441 err = true;
442 goto error;
445 if (TREE_CODE (op) != SSA_NAME && !is_gimple_min_invariant (op))
447 error ("PHI argument is not SSA_NAME, or invariant");
448 err = true;
451 if (TREE_CODE (op) == SSA_NAME)
453 err = verify_ssa_name (op, !is_gimple_reg (PHI_RESULT (phi)));
454 err |= verify_use (e->src, definition_block[SSA_NAME_VERSION (op)],
455 op_p, phi, e->flags & EDGE_ABNORMAL, NULL);
458 if (e->dest != bb)
460 error ("wrong edge %d->%d for PHI argument",
461 e->src->index, e->dest->index);
462 err = true;
465 if (err)
467 fprintf (stderr, "PHI argument\n");
468 print_generic_stmt (stderr, op, TDF_VOPS);
469 goto error;
473 error:
474 if (err)
476 fprintf (stderr, "for PHI node\n");
477 print_generic_stmt (stderr, phi, TDF_VOPS|TDF_MEMSYMS);
481 return err;
485 static void
486 verify_flow_insensitive_alias_info (void)
488 tree var;
489 referenced_var_iterator rvi;
491 FOR_EACH_REFERENCED_VAR (var, rvi)
493 unsigned int j;
494 bitmap aliases;
495 tree alias;
496 bitmap_iterator bi;
498 if (!MTAG_P (var) || !MTAG_ALIASES (var))
499 continue;
501 aliases = MTAG_ALIASES (var);
503 EXECUTE_IF_SET_IN_BITMAP (aliases, 0, j, bi)
505 alias = referenced_var (j);
507 if (TREE_CODE (alias) != MEMORY_PARTITION_TAG
508 && !may_be_aliased (alias))
510 error ("non-addressable variable inside an alias set");
511 debug_variable (alias);
512 goto err;
517 return;
519 err:
520 debug_variable (var);
521 internal_error ("verify_flow_insensitive_alias_info failed");
525 static void
526 verify_flow_sensitive_alias_info (void)
528 size_t i;
529 tree ptr;
531 for (i = 1; i < num_ssa_names; i++)
533 tree var;
534 var_ann_t ann;
535 struct ptr_info_def *pi;
538 ptr = ssa_name (i);
539 if (!ptr)
540 continue;
542 /* We only care for pointers that are actually referenced in the
543 program. */
544 if (!POINTER_TYPE_P (TREE_TYPE (ptr)) || !TREE_VISITED (ptr))
545 continue;
547 /* RESULT_DECL is special. If it's a GIMPLE register, then it
548 is only written-to only once in the return statement.
549 Otherwise, aggregate RESULT_DECLs may be written-to more than
550 once in virtual operands. */
551 var = SSA_NAME_VAR (ptr);
552 if (TREE_CODE (var) == RESULT_DECL
553 && is_gimple_reg (ptr))
554 continue;
556 pi = SSA_NAME_PTR_INFO (ptr);
557 if (pi == NULL)
558 continue;
560 ann = var_ann (var);
561 if (pi->is_dereferenced && !pi->name_mem_tag && !ann->symbol_mem_tag)
563 error ("dereferenced pointers should have a name or a symbol tag");
564 goto err;
567 if (pi->name_mem_tag
568 && (pi->pt_vars == NULL || bitmap_empty_p (pi->pt_vars)))
570 error ("pointers with a memory tag, should have points-to sets");
571 goto err;
574 if (pi->value_escapes_p && pi->name_mem_tag)
576 tree t = memory_partition (pi->name_mem_tag);
577 if (t == NULL_TREE)
578 t = pi->name_mem_tag;
580 if (!is_call_clobbered (t))
582 error ("pointer escapes but its name tag is not call-clobbered");
583 goto err;
588 return;
590 err:
591 debug_variable (ptr);
592 internal_error ("verify_flow_sensitive_alias_info failed");
596 /* Verify the consistency of call clobbering information. */
598 static void
599 verify_call_clobbering (void)
601 unsigned int i;
602 bitmap_iterator bi;
603 tree var;
604 referenced_var_iterator rvi;
606 /* At all times, the result of the call_clobbered flag should
607 match the result of the call_clobbered_vars bitmap. Verify both
608 that everything in call_clobbered_vars is marked
609 call_clobbered, and that everything marked
610 call_clobbered is in call_clobbered_vars. */
611 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, i, bi)
613 var = referenced_var (i);
615 if (memory_partition (var))
616 var = memory_partition (var);
618 if (!MTAG_P (var) && !var_ann (var)->call_clobbered)
620 error ("variable in call_clobbered_vars but not marked "
621 "call_clobbered");
622 debug_variable (var);
623 goto err;
627 FOR_EACH_REFERENCED_VAR (var, rvi)
629 if (is_gimple_reg (var))
630 continue;
632 if (memory_partition (var))
633 var = memory_partition (var);
635 if (!MTAG_P (var)
636 && var_ann (var)->call_clobbered
637 && !bitmap_bit_p (gimple_call_clobbered_vars (cfun), DECL_UID (var)))
639 error ("variable marked call_clobbered but not in "
640 "call_clobbered_vars bitmap.");
641 debug_variable (var);
642 goto err;
646 return;
648 err:
649 internal_error ("verify_call_clobbering failed");
653 /* Verify invariants in memory partitions. */
655 static void
656 verify_memory_partitions (void)
658 unsigned i;
659 tree mpt;
660 VEC(tree,heap) *mpt_table = gimple_ssa_operands (cfun)->mpt_table;
661 struct pointer_set_t *partitioned_syms = pointer_set_create ();
663 for (i = 0; VEC_iterate (tree, mpt_table, i, mpt); i++)
665 unsigned j;
666 bitmap_iterator bj;
668 if (MPT_SYMBOLS (mpt) == NULL)
670 error ("Memory partitions should have at least one symbol");
671 debug_variable (mpt);
672 goto err;
675 EXECUTE_IF_SET_IN_BITMAP (MPT_SYMBOLS (mpt), 0, j, bj)
677 tree var = referenced_var (j);
678 if (pointer_set_insert (partitioned_syms, var))
680 error ("Partitioned symbols should belong to exactly one "
681 "partition");
682 debug_variable (var);
683 goto err;
688 pointer_set_destroy (partitioned_syms);
690 return;
692 err:
693 internal_error ("verify_memory_partitions failed");
697 /* Verify the consistency of aliasing information. */
699 static void
700 verify_alias_info (void)
702 verify_flow_sensitive_alias_info ();
703 verify_call_clobbering ();
704 verify_flow_insensitive_alias_info ();
705 verify_memory_partitions ();
709 /* Verify common invariants in the SSA web.
710 TODO: verify the variable annotations. */
712 void
713 verify_ssa (bool check_modified_stmt)
715 size_t i;
716 basic_block bb;
717 basic_block *definition_block = XCNEWVEC (basic_block, num_ssa_names);
718 ssa_op_iter iter;
719 tree op;
720 enum dom_state orig_dom_state = dom_info_state (CDI_DOMINATORS);
721 bitmap names_defined_in_bb = BITMAP_ALLOC (NULL);
723 gcc_assert (!need_ssa_update_p ());
725 verify_stmts ();
727 timevar_push (TV_TREE_SSA_VERIFY);
729 /* Keep track of SSA names present in the IL. */
730 for (i = 1; i < num_ssa_names; i++)
732 tree name = ssa_name (i);
733 if (name)
735 tree stmt;
736 TREE_VISITED (name) = 0;
738 stmt = SSA_NAME_DEF_STMT (name);
739 if (!IS_EMPTY_STMT (stmt))
741 basic_block bb = bb_for_stmt (stmt);
742 verify_def (bb, definition_block,
743 name, stmt, !is_gimple_reg (name));
749 calculate_dominance_info (CDI_DOMINATORS);
751 /* Now verify all the uses and make sure they agree with the definitions
752 found in the previous pass. */
753 FOR_EACH_BB (bb)
755 edge e;
756 tree phi;
757 edge_iterator ei;
758 block_stmt_iterator bsi;
760 /* Make sure that all edges have a clear 'aux' field. */
761 FOR_EACH_EDGE (e, ei, bb->preds)
763 if (e->aux)
765 error ("AUX pointer initialized for edge %d->%d", e->src->index,
766 e->dest->index);
767 goto err;
771 /* Verify the arguments for every PHI node in the block. */
772 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
774 if (verify_phi_args (phi, bb, definition_block))
775 goto err;
777 bitmap_set_bit (names_defined_in_bb,
778 SSA_NAME_VERSION (PHI_RESULT (phi)));
781 /* Now verify all the uses and vuses in every statement of the block. */
782 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
784 tree stmt = bsi_stmt (bsi);
785 use_operand_p use_p;
787 if (check_modified_stmt && stmt_modified_p (stmt))
789 error ("stmt (%p) marked modified after optimization pass: ",
790 (void *)stmt);
791 print_generic_stmt (stderr, stmt, TDF_VOPS);
792 goto err;
795 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
796 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 0)) != SSA_NAME)
798 tree lhs, base_address;
800 lhs = GIMPLE_STMT_OPERAND (stmt, 0);
801 base_address = get_base_address (lhs);
803 if (base_address
804 && gimple_aliases_computed_p (cfun)
805 && SSA_VAR_P (base_address)
806 && !stmt_ann (stmt)->has_volatile_ops
807 && ZERO_SSA_OPERANDS (stmt, SSA_OP_VDEF))
809 error ("statement makes a memory store, but has no VDEFS");
810 print_generic_stmt (stderr, stmt, TDF_VOPS);
811 goto err;
815 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_VIRTUALS)
817 if (verify_ssa_name (op, true))
819 error ("in statement");
820 print_generic_stmt (stderr, stmt, TDF_VOPS|TDF_MEMSYMS);
821 goto err;
825 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE|SSA_OP_DEF)
827 if (verify_ssa_name (op, false))
829 error ("in statement");
830 print_generic_stmt (stderr, stmt, TDF_VOPS|TDF_MEMSYMS);
831 goto err;
835 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE|SSA_OP_VUSE)
837 op = USE_FROM_PTR (use_p);
838 if (verify_use (bb, definition_block[SSA_NAME_VERSION (op)],
839 use_p, stmt, false, names_defined_in_bb))
840 goto err;
843 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_DEFS)
844 bitmap_set_bit (names_defined_in_bb, SSA_NAME_VERSION (op));
847 bitmap_clear (names_defined_in_bb);
850 /* Finally, verify alias information. */
851 if (gimple_aliases_computed_p (cfun))
852 verify_alias_info ();
854 free (definition_block);
856 /* Restore the dominance information to its prior known state, so
857 that we do not perturb the compiler's subsequent behavior. */
858 if (orig_dom_state == DOM_NONE)
859 free_dominance_info (CDI_DOMINATORS);
860 else
861 set_dom_info_availability (CDI_DOMINATORS, orig_dom_state);
863 BITMAP_FREE (names_defined_in_bb);
864 timevar_pop (TV_TREE_SSA_VERIFY);
865 return;
867 err:
868 internal_error ("verify_ssa failed");
871 /* Return true if the uid in both int tree maps are equal. */
874 int_tree_map_eq (const void *va, const void *vb)
876 const struct int_tree_map *a = (const struct int_tree_map *) va;
877 const struct int_tree_map *b = (const struct int_tree_map *) vb;
878 return (a->uid == b->uid);
881 /* Hash a UID in a int_tree_map. */
883 unsigned int
884 int_tree_map_hash (const void *item)
886 return ((const struct int_tree_map *)item)->uid;
889 /* Return true if the DECL_UID in both trees are equal. */
892 uid_decl_map_eq (const void *va, const void *vb)
894 const_tree a = (const_tree) va;
895 const_tree b = (const_tree) vb;
896 return (a->decl_minimal.uid == b->decl_minimal.uid);
899 /* Hash a tree in a uid_decl_map. */
901 unsigned int
902 uid_decl_map_hash (const void *item)
904 return ((const_tree)item)->decl_minimal.uid;
907 /* Return true if the uid in both int tree maps are equal. */
909 static int
910 var_ann_eq (const void *va, const void *vb)
912 const struct static_var_ann_d *a = (const struct static_var_ann_d *) va;
913 const_tree const b = (const_tree) vb;
914 return (a->uid == DECL_UID (b));
917 /* Hash a UID in a int_tree_map. */
919 static unsigned int
920 var_ann_hash (const void *item)
922 return ((const struct static_var_ann_d *)item)->uid;
925 /* Return true if the DECL_UID in both trees are equal. */
927 static int
928 uid_ssaname_map_eq (const void *va, const void *vb)
930 const_tree a = (const_tree) va;
931 const_tree b = (const_tree) vb;
932 return (a->ssa_name.var->decl_minimal.uid == b->ssa_name.var->decl_minimal.uid);
935 /* Hash a tree in a uid_decl_map. */
937 static unsigned int
938 uid_ssaname_map_hash (const void *item)
940 return ((const_tree)item)->ssa_name.var->decl_minimal.uid;
944 /* Initialize global DFA and SSA structures. */
946 void
947 init_tree_ssa (struct function *fn)
949 fn->gimple_df = GGC_CNEW (struct gimple_df);
950 fn->gimple_df->referenced_vars = htab_create_ggc (20, uid_decl_map_hash,
951 uid_decl_map_eq, NULL);
952 fn->gimple_df->default_defs = htab_create_ggc (20, uid_ssaname_map_hash,
953 uid_ssaname_map_eq, NULL);
954 fn->gimple_df->var_anns = htab_create_ggc (20, var_ann_hash,
955 var_ann_eq, NULL);
956 fn->gimple_df->call_clobbered_vars = BITMAP_GGC_ALLOC ();
957 fn->gimple_df->addressable_vars = BITMAP_GGC_ALLOC ();
958 init_ssanames (fn, 0);
959 init_phinodes ();
963 /* Deallocate memory associated with SSA data structures for FNDECL. */
965 void
966 delete_tree_ssa (void)
968 size_t i;
969 basic_block bb;
970 block_stmt_iterator bsi;
971 referenced_var_iterator rvi;
972 tree var;
974 /* Release any ssa_names still in use. */
975 for (i = 0; i < num_ssa_names; i++)
977 tree var = ssa_name (i);
978 if (var && TREE_CODE (var) == SSA_NAME)
980 SSA_NAME_IMM_USE_NODE (var).prev = &(SSA_NAME_IMM_USE_NODE (var));
981 SSA_NAME_IMM_USE_NODE (var).next = &(SSA_NAME_IMM_USE_NODE (var));
983 release_ssa_name (var);
986 /* Remove annotations from every tree in the function. */
987 FOR_EACH_BB (bb)
989 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
991 tree stmt = bsi_stmt (bsi);
992 stmt_ann_t ann = get_stmt_ann (stmt);
994 free_ssa_operands (&ann->operands);
995 ann->addresses_taken = 0;
996 mark_stmt_modified (stmt);
998 set_phi_nodes (bb, NULL);
1001 /* Remove annotations from every referenced variable. */
1002 FOR_EACH_REFERENCED_VAR (var, rvi)
1004 if (var->base.ann)
1005 ggc_free (var->base.ann);
1006 var->base.ann = NULL;
1008 htab_delete (gimple_referenced_vars (cfun));
1009 cfun->gimple_df->referenced_vars = NULL;
1011 fini_ssanames ();
1012 fini_phinodes ();
1013 /* we no longer maintain the SSA operand cache at this point. */
1014 if (ssa_operands_active ())
1015 fini_ssa_operands ();
1017 cfun->gimple_df->global_var = NULL_TREE;
1019 htab_delete (cfun->gimple_df->default_defs);
1020 cfun->gimple_df->default_defs = NULL;
1021 htab_delete (cfun->gimple_df->var_anns);
1022 cfun->gimple_df->var_anns = NULL;
1023 cfun->gimple_df->call_clobbered_vars = NULL;
1024 cfun->gimple_df->addressable_vars = NULL;
1025 cfun->gimple_df->modified_noreturn_calls = NULL;
1026 if (gimple_aliases_computed_p (cfun))
1028 delete_alias_heapvars ();
1029 gcc_assert (!need_ssa_update_p ());
1031 cfun->gimple_df->aliases_computed_p = false;
1032 delete_mem_ref_stats (cfun);
1034 cfun->gimple_df = NULL;
1036 /* We no longer need the edge variable maps. */
1037 redirect_edge_var_map_destroy ();
1040 /* Helper function for useless_type_conversion_p. */
1042 static bool
1043 useless_type_conversion_p_1 (tree outer_type, tree inner_type)
1045 /* Qualifiers on value types do not matter. */
1046 inner_type = TYPE_MAIN_VARIANT (inner_type);
1047 outer_type = TYPE_MAIN_VARIANT (outer_type);
1049 if (inner_type == outer_type)
1050 return true;
1052 /* If we know the canonical types, compare them. */
1053 if (TYPE_CANONICAL (inner_type)
1054 && TYPE_CANONICAL (inner_type) == TYPE_CANONICAL (outer_type))
1055 return true;
1057 /* Changes in machine mode are never useless conversions. */
1058 if (TYPE_MODE (inner_type) != TYPE_MODE (outer_type))
1059 return false;
1061 /* If both the inner and outer types are integral types, then the
1062 conversion is not necessary if they have the same mode and
1063 signedness and precision, and both or neither are boolean. */
1064 if (INTEGRAL_TYPE_P (inner_type)
1065 && INTEGRAL_TYPE_P (outer_type))
1067 /* Preserve changes in signedness or precision. */
1068 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
1069 || TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
1070 return false;
1072 /* Conversions from a non-base to a base type are not useless.
1073 This way we preserve the invariant to do arithmetic in
1074 base types only. */
1075 if (TREE_TYPE (inner_type)
1076 && TREE_TYPE (inner_type) != inner_type
1077 && (TREE_TYPE (outer_type) == outer_type
1078 || TREE_TYPE (outer_type) == NULL_TREE))
1079 return false;
1081 /* We don't need to preserve changes in the types minimum or
1082 maximum value in general as these do not generate code
1083 unless the types precisions are different. */
1085 return true;
1088 /* Scalar floating point types with the same mode are compatible. */
1089 else if (SCALAR_FLOAT_TYPE_P (inner_type)
1090 && SCALAR_FLOAT_TYPE_P (outer_type))
1091 return true;
1093 /* We need to take special care recursing to pointed-to types. */
1094 else if (POINTER_TYPE_P (inner_type)
1095 && POINTER_TYPE_P (outer_type))
1097 /* Don't lose casts between pointers to volatile and non-volatile
1098 qualified types. Doing so would result in changing the semantics
1099 of later accesses. */
1100 if ((TYPE_VOLATILE (TREE_TYPE (outer_type))
1101 != TYPE_VOLATILE (TREE_TYPE (inner_type)))
1102 && TYPE_VOLATILE (TREE_TYPE (outer_type)))
1103 return false;
1105 /* Do not lose casts between pointers with different
1106 TYPE_REF_CAN_ALIAS_ALL setting or alias sets. */
1107 if ((TYPE_REF_CAN_ALIAS_ALL (inner_type)
1108 != TYPE_REF_CAN_ALIAS_ALL (outer_type))
1109 || (get_alias_set (TREE_TYPE (inner_type))
1110 != get_alias_set (TREE_TYPE (outer_type))))
1111 return false;
1113 /* We do not care for const qualification of the pointed-to types
1114 as const qualification has no semantic value to the middle-end. */
1116 /* Do not lose casts to restrict qualified pointers. */
1117 if ((TYPE_RESTRICT (outer_type)
1118 != TYPE_RESTRICT (inner_type))
1119 && TYPE_RESTRICT (outer_type))
1120 return false;
1122 /* Otherwise pointers/references are equivalent if their pointed
1123 to types are effectively the same. We can strip qualifiers
1124 on pointed-to types for further comparison, which is done in
1125 the callee. */
1126 return useless_type_conversion_p_1 (TREE_TYPE (outer_type),
1127 TREE_TYPE (inner_type));
1130 /* Recurse for complex types. */
1131 else if (TREE_CODE (inner_type) == COMPLEX_TYPE
1132 && TREE_CODE (outer_type) == COMPLEX_TYPE)
1133 return useless_type_conversion_p_1 (TREE_TYPE (outer_type),
1134 TREE_TYPE (inner_type));
1136 /* Recurse for vector types with the same number of subparts. */
1137 else if (TREE_CODE (inner_type) == VECTOR_TYPE
1138 && TREE_CODE (outer_type) == VECTOR_TYPE
1139 && TYPE_PRECISION (inner_type) == TYPE_PRECISION (outer_type))
1140 return useless_type_conversion_p_1 (TREE_TYPE (outer_type),
1141 TREE_TYPE (inner_type));
1143 /* For aggregates we may need to fall back to structural equality
1144 checks. */
1145 else if (AGGREGATE_TYPE_P (inner_type)
1146 && AGGREGATE_TYPE_P (outer_type))
1148 /* Different types of aggregates are incompatible. */
1149 if (TREE_CODE (inner_type) != TREE_CODE (outer_type))
1150 return false;
1152 /* ??? Add structural equivalence check. */
1154 /* ??? This should eventually just return false. */
1155 return lang_hooks.types_compatible_p (inner_type, outer_type);
1158 return false;
1161 /* Return true if the conversion from INNER_TYPE to OUTER_TYPE is a
1162 useless type conversion, otherwise return false.
1164 This function implicitly defines the middle-end type system. With
1165 the notion of 'a < b' meaning that useless_type_conversion_p (a, b)
1166 holds and 'a > b' meaning that useless_type_conversion_p (b, a) holds,
1167 the following invariants shall be fulfilled:
1169 1) useless_type_conversion_p is transitive.
1170 If a < b and b < c then a < c.
1172 2) useless_type_conversion_p is not symmetric.
1173 From a < b does not follow a > b.
1175 3) Types define the available set of operations applicable to values.
1176 A type conversion is useless if the operations for the target type
1177 is a subset of the operations for the source type. For example
1178 casts to void* are useless, casts from void* are not (void* can't
1179 be dereferenced or offsetted, but copied, hence its set of operations
1180 is a strict subset of that of all other data pointer types). Casts
1181 to const T* are useless (can't be written to), casts from const T*
1182 to T* are not. */
1184 bool
1185 useless_type_conversion_p (tree outer_type, tree inner_type)
1187 /* If the outer type is (void *), then the conversion is not
1188 necessary. We have to make sure to not apply this while
1189 recursing though. */
1190 if (POINTER_TYPE_P (inner_type)
1191 && POINTER_TYPE_P (outer_type)
1192 && TREE_CODE (TREE_TYPE (outer_type)) == VOID_TYPE)
1193 return true;
1195 return useless_type_conversion_p_1 (outer_type, inner_type);
1198 /* Return true if a conversion from either type of TYPE1 and TYPE2
1199 to the other is not required. Otherwise return false. */
1201 bool
1202 types_compatible_p (tree type1, tree type2)
1204 return (type1 == type2
1205 || (useless_type_conversion_p (type1, type2)
1206 && useless_type_conversion_p (type2, type1)));
1209 /* Return true if EXPR is a useless type conversion, otherwise return
1210 false. */
1212 bool
1213 tree_ssa_useless_type_conversion (tree expr)
1215 /* If we have an assignment that merely uses a NOP_EXPR to change
1216 the top of the RHS to the type of the LHS and the type conversion
1217 is "safe", then strip away the type conversion so that we can
1218 enter LHS = RHS into the const_and_copies table. */
1219 if (CONVERT_EXPR_P (expr)
1220 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
1221 || TREE_CODE (expr) == NON_LVALUE_EXPR)
1222 /* FIXME: Use of GENERIC_TREE_TYPE here is a temporary measure to work
1223 around known bugs with GIMPLE_MODIFY_STMTs appearing in places
1224 they shouldn't. See PR 30391. */
1225 return useless_type_conversion_p
1226 (TREE_TYPE (expr),
1227 GENERIC_TREE_TYPE (TREE_OPERAND (expr, 0)));
1229 return false;
1233 /* Internal helper for walk_use_def_chains. VAR, FN and DATA are as
1234 described in walk_use_def_chains.
1236 VISITED is a pointer set used to mark visited SSA_NAMEs to avoid
1237 infinite loops. We used to have a bitmap for this to just mark
1238 SSA versions we had visited. But non-sparse bitmaps are way too
1239 expensive, while sparse bitmaps may cause quadratic behavior.
1241 IS_DFS is true if the caller wants to perform a depth-first search
1242 when visiting PHI nodes. A DFS will visit each PHI argument and
1243 call FN after each one. Otherwise, all the arguments are
1244 visited first and then FN is called with each of the visited
1245 arguments in a separate pass. */
1247 static bool
1248 walk_use_def_chains_1 (tree var, walk_use_def_chains_fn fn, void *data,
1249 struct pointer_set_t *visited, bool is_dfs)
1251 tree def_stmt;
1253 if (pointer_set_insert (visited, var))
1254 return false;
1256 def_stmt = SSA_NAME_DEF_STMT (var);
1258 if (TREE_CODE (def_stmt) != PHI_NODE)
1260 /* If we reached the end of the use-def chain, call FN. */
1261 return fn (var, def_stmt, data);
1263 else
1265 int i;
1267 /* When doing a breadth-first search, call FN before following the
1268 use-def links for each argument. */
1269 if (!is_dfs)
1270 for (i = 0; i < PHI_NUM_ARGS (def_stmt); i++)
1271 if (fn (PHI_ARG_DEF (def_stmt, i), def_stmt, data))
1272 return true;
1274 /* Follow use-def links out of each PHI argument. */
1275 for (i = 0; i < PHI_NUM_ARGS (def_stmt); i++)
1277 tree arg = PHI_ARG_DEF (def_stmt, i);
1279 /* ARG may be NULL for newly introduced PHI nodes. */
1280 if (arg
1281 && TREE_CODE (arg) == SSA_NAME
1282 && walk_use_def_chains_1 (arg, fn, data, visited, is_dfs))
1283 return true;
1286 /* When doing a depth-first search, call FN after following the
1287 use-def links for each argument. */
1288 if (is_dfs)
1289 for (i = 0; i < PHI_NUM_ARGS (def_stmt); i++)
1290 if (fn (PHI_ARG_DEF (def_stmt, i), def_stmt, data))
1291 return true;
1294 return false;
1299 /* Walk use-def chains starting at the SSA variable VAR. Call
1300 function FN at each reaching definition found. FN takes three
1301 arguments: VAR, its defining statement (DEF_STMT) and a generic
1302 pointer to whatever state information that FN may want to maintain
1303 (DATA). FN is able to stop the walk by returning true, otherwise
1304 in order to continue the walk, FN should return false.
1306 Note, that if DEF_STMT is a PHI node, the semantics are slightly
1307 different. The first argument to FN is no longer the original
1308 variable VAR, but the PHI argument currently being examined. If FN
1309 wants to get at VAR, it should call PHI_RESULT (PHI).
1311 If IS_DFS is true, this function will:
1313 1- walk the use-def chains for all the PHI arguments, and,
1314 2- call (*FN) (ARG, PHI, DATA) on all the PHI arguments.
1316 If IS_DFS is false, the two steps above are done in reverse order
1317 (i.e., a breadth-first search). */
1319 void
1320 walk_use_def_chains (tree var, walk_use_def_chains_fn fn, void *data,
1321 bool is_dfs)
1323 tree def_stmt;
1325 gcc_assert (TREE_CODE (var) == SSA_NAME);
1327 def_stmt = SSA_NAME_DEF_STMT (var);
1329 /* We only need to recurse if the reaching definition comes from a PHI
1330 node. */
1331 if (TREE_CODE (def_stmt) != PHI_NODE)
1332 (*fn) (var, def_stmt, data);
1333 else
1335 struct pointer_set_t *visited = pointer_set_create ();
1336 walk_use_def_chains_1 (var, fn, data, visited, is_dfs);
1337 pointer_set_destroy (visited);
1342 /* Return true if T, an SSA_NAME, has an undefined value. */
1344 bool
1345 ssa_undefined_value_p (tree t)
1347 tree var = SSA_NAME_VAR (t);
1349 /* Parameters get their initial value from the function entry. */
1350 if (TREE_CODE (var) == PARM_DECL)
1351 return false;
1353 /* Hard register variables get their initial value from the ether. */
1354 if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1355 return false;
1357 /* The value is undefined iff its definition statement is empty. */
1358 return IS_EMPTY_STMT (SSA_NAME_DEF_STMT (t));
1361 /* Emit warnings for uninitialized variables. This is done in two passes.
1363 The first pass notices real uses of SSA names with undefined values.
1364 Such uses are unconditionally uninitialized, and we can be certain that
1365 such a use is a mistake. This pass is run before most optimizations,
1366 so that we catch as many as we can.
1368 The second pass follows PHI nodes to find uses that are potentially
1369 uninitialized. In this case we can't necessarily prove that the use
1370 is really uninitialized. This pass is run after most optimizations,
1371 so that we thread as many jumps and possible, and delete as much dead
1372 code as possible, in order to reduce false positives. We also look
1373 again for plain uninitialized variables, since optimization may have
1374 changed conditionally uninitialized to unconditionally uninitialized. */
1376 /* Emit a warning for T, an SSA_NAME, being uninitialized. The exact
1377 warning text is in MSGID and LOCUS may contain a location or be null. */
1379 static void
1380 warn_uninit (tree t, const char *gmsgid, void *data)
1382 tree var = SSA_NAME_VAR (t);
1383 tree context = (tree) data;
1384 location_t *locus;
1385 expanded_location xloc, floc;
1387 if (!ssa_undefined_value_p (t))
1388 return;
1390 /* TREE_NO_WARNING either means we already warned, or the front end
1391 wishes to suppress the warning. */
1392 if (TREE_NO_WARNING (var))
1393 return;
1395 locus = (context != NULL && EXPR_HAS_LOCATION (context)
1396 ? EXPR_LOCUS (context)
1397 : &DECL_SOURCE_LOCATION (var));
1398 warning (OPT_Wuninitialized, gmsgid, locus, var);
1399 xloc = expand_location (*locus);
1400 floc = expand_location (DECL_SOURCE_LOCATION (cfun->decl));
1401 if (xloc.file != floc.file
1402 || xloc.line < floc.line
1403 || xloc.line > LOCATION_LINE (cfun->function_end_locus))
1404 inform ("%J%qD was declared here", var, var);
1406 TREE_NO_WARNING (var) = 1;
1409 struct walk_data {
1410 tree stmt;
1411 bool always_executed;
1414 /* Called via walk_tree, look for SSA_NAMEs that have empty definitions
1415 and warn about them. */
1417 static tree
1418 warn_uninitialized_var (tree *tp, int *walk_subtrees, void *data_)
1420 struct walk_data *data = (struct walk_data *)data_;
1421 tree t = *tp;
1423 switch (TREE_CODE (t))
1425 case SSA_NAME:
1426 /* We only do data flow with SSA_NAMEs, so that's all we
1427 can warn about. */
1428 if (data->always_executed)
1429 warn_uninit (t, "%H%qD is used uninitialized in this function",
1430 data->stmt);
1431 else
1432 warn_uninit (t, "%H%qD may be used uninitialized in this function",
1433 data->stmt);
1434 *walk_subtrees = 0;
1435 break;
1437 case REALPART_EXPR:
1438 case IMAGPART_EXPR:
1439 /* The total store transformation performed during gimplification
1440 creates uninitialized variable uses. If all is well, these will
1441 be optimized away, so don't warn now. */
1442 if (TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME)
1443 *walk_subtrees = 0;
1444 break;
1446 default:
1447 if (IS_TYPE_OR_DECL_P (t))
1448 *walk_subtrees = 0;
1449 break;
1452 return NULL_TREE;
1455 /* Look for inputs to PHI that are SSA_NAMEs that have empty definitions
1456 and warn about them. */
1458 static void
1459 warn_uninitialized_phi (tree phi)
1461 int i, n = PHI_NUM_ARGS (phi);
1463 /* Don't look at memory tags. */
1464 if (!is_gimple_reg (PHI_RESULT (phi)))
1465 return;
1467 for (i = 0; i < n; ++i)
1469 tree op = PHI_ARG_DEF (phi, i);
1470 if (TREE_CODE (op) == SSA_NAME)
1471 warn_uninit (op, "%H%qD may be used uninitialized in this function",
1472 NULL);
1476 static unsigned int
1477 execute_early_warn_uninitialized (void)
1479 block_stmt_iterator bsi;
1480 basic_block bb;
1481 struct walk_data data;
1483 calculate_dominance_info (CDI_POST_DOMINATORS);
1485 FOR_EACH_BB (bb)
1487 data.always_executed = dominated_by_p (CDI_POST_DOMINATORS,
1488 single_succ (ENTRY_BLOCK_PTR), bb);
1489 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1491 data.stmt = bsi_stmt (bsi);
1492 walk_tree (bsi_stmt_ptr (bsi), warn_uninitialized_var,
1493 &data, NULL);
1496 return 0;
1499 static unsigned int
1500 execute_late_warn_uninitialized (void)
1502 basic_block bb;
1503 tree phi;
1505 /* Re-do the plain uninitialized variable check, as optimization may have
1506 straightened control flow. Do this first so that we don't accidentally
1507 get a "may be" warning when we'd have seen an "is" warning later. */
1508 execute_early_warn_uninitialized ();
1510 FOR_EACH_BB (bb)
1511 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
1512 warn_uninitialized_phi (phi);
1513 return 0;
1516 static bool
1517 gate_warn_uninitialized (void)
1519 return warn_uninitialized != 0;
1522 struct gimple_opt_pass pass_early_warn_uninitialized =
1525 GIMPLE_PASS,
1526 NULL, /* name */
1527 gate_warn_uninitialized, /* gate */
1528 execute_early_warn_uninitialized, /* execute */
1529 NULL, /* sub */
1530 NULL, /* next */
1531 0, /* static_pass_number */
1532 0, /* tv_id */
1533 PROP_ssa, /* properties_required */
1534 0, /* properties_provided */
1535 0, /* properties_destroyed */
1536 0, /* todo_flags_start */
1537 0 /* todo_flags_finish */
1541 struct gimple_opt_pass pass_late_warn_uninitialized =
1544 GIMPLE_PASS,
1545 NULL, /* name */
1546 gate_warn_uninitialized, /* gate */
1547 execute_late_warn_uninitialized, /* execute */
1548 NULL, /* sub */
1549 NULL, /* next */
1550 0, /* static_pass_number */
1551 0, /* tv_id */
1552 PROP_ssa, /* properties_required */
1553 0, /* properties_provided */
1554 0, /* properties_destroyed */
1555 0, /* todo_flags_start */
1556 0 /* todo_flags_finish */
1560 /* Compute TREE_ADDRESSABLE for local variables. */
1562 static unsigned int
1563 execute_update_addresses_taken (void)
1565 tree var;
1566 referenced_var_iterator rvi;
1567 block_stmt_iterator bsi;
1568 basic_block bb;
1569 bitmap addresses_taken = BITMAP_ALLOC (NULL);
1570 bitmap vars_updated = BITMAP_ALLOC (NULL);
1571 bool update_vops = false;
1572 tree phi;
1574 /* Collect into ADDRESSES_TAKEN all variables whose address is taken within
1575 the function body. */
1576 FOR_EACH_BB (bb)
1578 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1580 stmt_ann_t s_ann = stmt_ann (bsi_stmt (bsi));
1582 if (s_ann->addresses_taken)
1583 bitmap_ior_into (addresses_taken, s_ann->addresses_taken);
1585 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
1587 unsigned i, phi_num_args = PHI_NUM_ARGS (phi);
1588 for (i = 0; i < phi_num_args; i++)
1590 tree op = PHI_ARG_DEF (phi, i), var;
1591 if (TREE_CODE (op) == ADDR_EXPR
1592 && (var = get_base_address (TREE_OPERAND (op, 0))) != NULL_TREE
1593 && DECL_P (var))
1594 bitmap_set_bit (addresses_taken, DECL_UID (var));
1599 /* When possible, clear ADDRESSABLE bit and mark variable for conversion into
1600 SSA. */
1601 FOR_EACH_REFERENCED_VAR (var, rvi)
1602 if (!is_global_var (var)
1603 && TREE_CODE (var) != RESULT_DECL
1604 && TREE_ADDRESSABLE (var)
1605 && !bitmap_bit_p (addresses_taken, DECL_UID (var)))
1607 TREE_ADDRESSABLE (var) = 0;
1608 if (is_gimple_reg (var))
1609 mark_sym_for_renaming (var);
1610 update_vops = true;
1611 bitmap_set_bit (vars_updated, DECL_UID (var));
1612 if (dump_file)
1614 fprintf (dump_file, "No longer having address taken ");
1615 print_generic_expr (dump_file, var, 0);
1616 fprintf (dump_file, "\n");
1620 /* Operand caches needs to be recomputed for operands referencing the updated
1621 variables. */
1622 if (update_vops)
1623 FOR_EACH_BB (bb)
1624 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1626 tree stmt = bsi_stmt (bsi);
1628 if ((LOADED_SYMS (stmt)
1629 && bitmap_intersect_p (LOADED_SYMS (stmt), vars_updated))
1630 || (STORED_SYMS (stmt)
1631 && bitmap_intersect_p (STORED_SYMS (stmt), vars_updated)))
1632 update_stmt (stmt);
1634 BITMAP_FREE (addresses_taken);
1635 BITMAP_FREE (vars_updated);
1636 return 0;
1639 struct gimple_opt_pass pass_update_address_taken =
1642 GIMPLE_PASS,
1643 "addressables", /* name */
1644 NULL, /* gate */
1645 execute_update_addresses_taken, /* execute */
1646 NULL, /* sub */
1647 NULL, /* next */
1648 0, /* static_pass_number */
1649 0, /* tv_id */
1650 PROP_ssa, /* properties_required */
1651 0, /* properties_provided */
1652 0, /* properties_destroyed */
1653 0, /* todo_flags_start */
1654 TODO_update_ssa /* todo_flags_finish */