1 /* Data flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
27 #include "pointer-set.h"
30 #include "basic-block.h"
34 #include "langhooks.h"
37 #include "tree-pretty-print.h"
38 #include "tree-dump.h"
40 #include "tree-flow.h"
41 #include "tree-inline.h"
42 #include "tree-pass.h"
47 /* Build and maintain data flow information for trees. */
49 /* Counters used to display DFA and SSA statistics. */
57 size_t max_num_phi_args
;
63 /* Local functions. */
64 static void collect_dfa_stats (struct dfa_stats_d
*);
65 static tree
find_vars_r (tree
*, int *, void *);
68 /*---------------------------------------------------------------------------
69 Dataflow analysis (DFA) routines
70 ---------------------------------------------------------------------------*/
71 /* Find all the variables referenced in the function. This function
72 builds the global arrays REFERENCED_VARS and CALL_CLOBBERED_VARS.
74 Note that this function does not look for statement operands, it simply
75 determines what variables are referenced in the program and detects
76 various attributes for each variable used by alias analysis and the
80 find_referenced_vars (void)
83 gimple_stmt_iterator si
;
87 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
89 gimple stmt
= gsi_stmt (si
);
90 if (is_gimple_debug (stmt
))
92 find_referenced_vars_in (gsi_stmt (si
));
95 for (si
= gsi_start_phis (bb
); !gsi_end_p (si
); gsi_next (&si
))
96 find_referenced_vars_in (gsi_stmt (si
));
102 struct gimple_opt_pass pass_referenced_vars
=
106 "*referenced_vars", /* name */
108 find_referenced_vars
, /* execute */
111 0, /* static_pass_number */
112 TV_FIND_REFERENCED_VARS
, /* tv_id */
113 PROP_gimple_leh
| PROP_cfg
, /* properties_required */
114 PROP_referenced_vars
, /* properties_provided */
115 0, /* properties_destroyed */
116 TODO_dump_func
, /* todo_flags_start */
117 TODO_dump_func
/* todo_flags_finish */
122 /*---------------------------------------------------------------------------
124 ---------------------------------------------------------------------------*/
125 /* Create a new annotation for a _DECL node T. */
128 create_var_ann (tree t
)
133 gcc_assert (TREE_CODE (t
) == VAR_DECL
134 || TREE_CODE (t
) == PARM_DECL
135 || TREE_CODE (t
) == RESULT_DECL
);
137 ann
= ggc_alloc_cleared_var_ann_d ();
138 *DECL_VAR_ANN_PTR (t
) = ann
;
143 /* Renumber all of the gimple stmt uids. */
146 renumber_gimple_stmt_uids (void)
150 set_gimple_stmt_max_uid (cfun
, 0);
153 gimple_stmt_iterator bsi
;
154 for (bsi
= gsi_start_bb (bb
); !gsi_end_p (bsi
); gsi_next (&bsi
))
156 gimple stmt
= gsi_stmt (bsi
);
157 gimple_set_uid (stmt
, inc_gimple_stmt_max_uid (cfun
));
162 /* Like renumber_gimple_stmt_uids, but only do work on the basic blocks
163 in BLOCKS, of which there are N_BLOCKS. Also renumbers PHIs. */
166 renumber_gimple_stmt_uids_in_blocks (basic_block
*blocks
, int n_blocks
)
170 set_gimple_stmt_max_uid (cfun
, 0);
171 for (i
= 0; i
< n_blocks
; i
++)
173 basic_block bb
= blocks
[i
];
174 gimple_stmt_iterator bsi
;
175 for (bsi
= gsi_start_phis (bb
); !gsi_end_p (bsi
); gsi_next (&bsi
))
177 gimple stmt
= gsi_stmt (bsi
);
178 gimple_set_uid (stmt
, inc_gimple_stmt_max_uid (cfun
));
180 for (bsi
= gsi_start_bb (bb
); !gsi_end_p (bsi
); gsi_next (&bsi
))
182 gimple stmt
= gsi_stmt (bsi
);
183 gimple_set_uid (stmt
, inc_gimple_stmt_max_uid (cfun
));
188 /* Build a temporary. Make sure and register it to be renamed. */
191 make_rename_temp (tree type
, const char *prefix
)
193 tree t
= create_tmp_reg (type
, prefix
);
195 if (gimple_referenced_vars (cfun
))
197 add_referenced_var (t
);
198 mark_sym_for_renaming (t
);
206 /*---------------------------------------------------------------------------
208 ---------------------------------------------------------------------------*/
209 /* Dump the list of all the referenced variables in the current function to
213 dump_referenced_vars (FILE *file
)
216 referenced_var_iterator rvi
;
218 fprintf (file
, "\nReferenced variables in %s: %u\n\n",
219 get_name (current_function_decl
), (unsigned) num_referenced_vars
);
221 FOR_EACH_REFERENCED_VAR (var
, rvi
)
223 fprintf (file
, "Variable: ");
224 dump_variable (file
, var
);
227 fprintf (file
, "\n");
231 /* Dump the list of all the referenced variables to stderr. */
234 debug_referenced_vars (void)
236 dump_referenced_vars (stderr
);
240 /* Dump variable VAR and its may-aliases to FILE. */
243 dump_variable (FILE *file
, tree var
)
245 if (TREE_CODE (var
) == SSA_NAME
)
247 if (POINTER_TYPE_P (TREE_TYPE (var
)))
248 dump_points_to_info_for (file
, var
);
249 var
= SSA_NAME_VAR (var
);
252 if (var
== NULL_TREE
)
254 fprintf (file
, "<nil>");
258 print_generic_expr (file
, var
, dump_flags
);
260 fprintf (file
, ", UID D.%u", (unsigned) DECL_UID (var
));
261 if (DECL_PT_UID (var
) != DECL_UID (var
))
262 fprintf (file
, ", PT-UID D.%u", (unsigned) DECL_PT_UID (var
));
264 fprintf (file
, ", ");
265 print_generic_expr (file
, TREE_TYPE (var
), dump_flags
);
267 if (TREE_ADDRESSABLE (var
))
268 fprintf (file
, ", is addressable");
270 if (is_global_var (var
))
271 fprintf (file
, ", is global");
273 if (TREE_THIS_VOLATILE (var
))
274 fprintf (file
, ", is volatile");
276 if (cfun
&& gimple_default_def (cfun
, var
))
278 fprintf (file
, ", default def: ");
279 print_generic_expr (file
, gimple_default_def (cfun
, var
), dump_flags
);
282 if (DECL_INITIAL (var
))
284 fprintf (file
, ", initial: ");
285 print_generic_expr (file
, DECL_INITIAL (var
), dump_flags
);
288 fprintf (file
, "\n");
292 /* Dump variable VAR and its may-aliases to stderr. */
295 debug_variable (tree var
)
297 dump_variable (stderr
, var
);
301 /* Dump various DFA statistics to FILE. */
304 dump_dfa_stats (FILE *file
)
306 struct dfa_stats_d dfa_stats
;
308 unsigned long size
, total
= 0;
309 const char * const fmt_str
= "%-30s%-13s%12s\n";
310 const char * const fmt_str_1
= "%-30s%13lu%11lu%c\n";
311 const char * const fmt_str_3
= "%-43s%11lu%c\n";
313 = lang_hooks
.decl_printable_name (current_function_decl
, 2);
315 collect_dfa_stats (&dfa_stats
);
317 fprintf (file
, "\nDFA Statistics for %s\n\n", funcname
);
319 fprintf (file
, "---------------------------------------------------------\n");
320 fprintf (file
, fmt_str
, "", " Number of ", "Memory");
321 fprintf (file
, fmt_str
, "", " instances ", "used ");
322 fprintf (file
, "---------------------------------------------------------\n");
324 size
= num_referenced_vars
* sizeof (tree
);
326 fprintf (file
, fmt_str_1
, "Referenced variables", (unsigned long)num_referenced_vars
,
327 SCALE (size
), LABEL (size
));
329 size
= dfa_stats
.num_var_anns
* sizeof (struct var_ann_d
);
331 fprintf (file
, fmt_str_1
, "Variables annotated", dfa_stats
.num_var_anns
,
332 SCALE (size
), LABEL (size
));
334 size
= dfa_stats
.num_uses
* sizeof (tree
*);
336 fprintf (file
, fmt_str_1
, "USE operands", dfa_stats
.num_uses
,
337 SCALE (size
), LABEL (size
));
339 size
= dfa_stats
.num_defs
* sizeof (tree
*);
341 fprintf (file
, fmt_str_1
, "DEF operands", dfa_stats
.num_defs
,
342 SCALE (size
), LABEL (size
));
344 size
= dfa_stats
.num_vuses
* sizeof (tree
*);
346 fprintf (file
, fmt_str_1
, "VUSE operands", dfa_stats
.num_vuses
,
347 SCALE (size
), LABEL (size
));
349 size
= dfa_stats
.num_vdefs
* sizeof (tree
*);
351 fprintf (file
, fmt_str_1
, "VDEF operands", dfa_stats
.num_vdefs
,
352 SCALE (size
), LABEL (size
));
354 size
= dfa_stats
.num_phis
* sizeof (struct gimple_statement_phi
);
356 fprintf (file
, fmt_str_1
, "PHI nodes", dfa_stats
.num_phis
,
357 SCALE (size
), LABEL (size
));
359 size
= dfa_stats
.num_phi_args
* sizeof (struct phi_arg_d
);
361 fprintf (file
, fmt_str_1
, "PHI arguments", dfa_stats
.num_phi_args
,
362 SCALE (size
), LABEL (size
));
364 fprintf (file
, "---------------------------------------------------------\n");
365 fprintf (file
, fmt_str_3
, "Total memory used by DFA/SSA data", SCALE (total
),
367 fprintf (file
, "---------------------------------------------------------\n");
368 fprintf (file
, "\n");
370 if (dfa_stats
.num_phis
)
371 fprintf (file
, "Average number of arguments per PHI node: %.1f (max: %ld)\n",
372 (float) dfa_stats
.num_phi_args
/ (float) dfa_stats
.num_phis
,
373 (long) dfa_stats
.max_num_phi_args
);
375 fprintf (file
, "\n");
379 /* Dump DFA statistics on stderr. */
382 debug_dfa_stats (void)
384 dump_dfa_stats (stderr
);
388 /* Collect DFA statistics and store them in the structure pointed to by
392 collect_dfa_stats (struct dfa_stats_d
*dfa_stats_p ATTRIBUTE_UNUSED
)
395 referenced_var_iterator vi
;
398 gcc_assert (dfa_stats_p
);
400 memset ((void *)dfa_stats_p
, 0, sizeof (struct dfa_stats_d
));
402 /* Count all the variable annotations. */
403 FOR_EACH_REFERENCED_VAR (var
, vi
)
405 dfa_stats_p
->num_var_anns
++;
407 /* Walk all the statements in the function counting references. */
410 gimple_stmt_iterator si
;
412 for (si
= gsi_start_phis (bb
); !gsi_end_p (si
); gsi_next (&si
))
414 gimple phi
= gsi_stmt (si
);
415 dfa_stats_p
->num_phis
++;
416 dfa_stats_p
->num_phi_args
+= gimple_phi_num_args (phi
);
417 if (gimple_phi_num_args (phi
) > dfa_stats_p
->max_num_phi_args
)
418 dfa_stats_p
->max_num_phi_args
= gimple_phi_num_args (phi
);
421 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
423 gimple stmt
= gsi_stmt (si
);
424 dfa_stats_p
->num_defs
+= NUM_SSA_OPERANDS (stmt
, SSA_OP_DEF
);
425 dfa_stats_p
->num_uses
+= NUM_SSA_OPERANDS (stmt
, SSA_OP_USE
);
426 dfa_stats_p
->num_vdefs
+= gimple_vdef (stmt
) ? 1 : 0;
427 dfa_stats_p
->num_vuses
+= gimple_vuse (stmt
) ? 1 : 0;
433 /*---------------------------------------------------------------------------
434 Miscellaneous helpers
435 ---------------------------------------------------------------------------*/
436 /* Callback for walk_tree. Used to collect variables referenced in
440 find_vars_r (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
442 /* If we are reading the lto info back in, we need to rescan the
444 if (TREE_CODE (*tp
) == SSA_NAME
)
445 add_referenced_var (SSA_NAME_VAR (*tp
));
447 /* If T is a regular variable that the optimizers are interested
448 in, add it to the list of variables. */
449 else if (SSA_VAR_P (*tp
))
450 add_referenced_var (*tp
);
452 /* Type, _DECL and constant nodes have no interesting children.
454 else if (IS_TYPE_OR_DECL_P (*tp
) || CONSTANT_CLASS_P (*tp
))
460 /* Find referenced variables in STMT. In contrast with
461 find_new_referenced_vars, this function will not mark newly found
462 variables for renaming. */
465 find_referenced_vars_in (gimple stmt
)
469 if (gimple_code (stmt
) != GIMPLE_PHI
)
471 for (i
= 0; i
< gimple_num_ops (stmt
); i
++)
472 walk_tree (gimple_op_ptr (stmt
, i
), find_vars_r
, NULL
, NULL
);
476 walk_tree (gimple_phi_result_ptr (stmt
), find_vars_r
, NULL
, NULL
);
478 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
480 tree arg
= gimple_phi_arg_def (stmt
, i
);
481 walk_tree (&arg
, find_vars_r
, NULL
, NULL
);
487 /* Lookup UID in the referenced_vars hashtable and return the associated
491 referenced_var_lookup (unsigned int uid
)
494 struct tree_decl_minimal in
;
496 h
= (tree
) htab_find_with_hash (gimple_referenced_vars (cfun
), &in
, uid
);
500 /* Check if TO is in the referenced_vars hash table and insert it if not.
501 Return true if it required insertion. */
504 referenced_var_check_and_insert (tree to
)
507 struct tree_decl_minimal in
;
508 unsigned int uid
= DECL_UID (to
);
511 h
= (tree
) htab_find_with_hash (gimple_referenced_vars (cfun
), &in
, uid
);
514 /* DECL_UID has already been entered in the table. Verify that it is
515 the same entry as TO. See PR 27793. */
516 gcc_assert (h
== to
);
520 loc
= (tree
*) htab_find_slot_with_hash (gimple_referenced_vars (cfun
),
526 /* Lookup VAR UID in the default_defs hashtable and return the associated
530 gimple_default_def (struct function
*fn
, tree var
)
532 struct tree_decl_minimal ind
;
533 struct tree_ssa_name in
;
534 gcc_assert (SSA_VAR_P (var
));
536 ind
.uid
= DECL_UID (var
);
537 return (tree
) htab_find_with_hash (DEFAULT_DEFS (fn
), &in
, DECL_UID (var
));
540 /* Insert the pair VAR's UID, DEF into the default_defs hashtable. */
543 set_default_def (tree var
, tree def
)
545 struct tree_decl_minimal ind
;
546 struct tree_ssa_name in
;
549 gcc_assert (SSA_VAR_P (var
));
551 ind
.uid
= DECL_UID (var
);
554 loc
= htab_find_slot_with_hash (DEFAULT_DEFS (cfun
), &in
,
555 DECL_UID (var
), INSERT
);
557 htab_remove_elt (DEFAULT_DEFS (cfun
), *loc
);
560 gcc_assert (TREE_CODE (def
) == SSA_NAME
&& SSA_NAME_VAR (def
) == var
);
561 loc
= htab_find_slot_with_hash (DEFAULT_DEFS (cfun
), &in
,
562 DECL_UID (var
), INSERT
);
564 /* Default definition might be changed by tail call optimization. */
566 SSA_NAME_IS_DEFAULT_DEF (*(tree
*) loc
) = false;
569 /* Mark DEF as the default definition for VAR. */
570 SSA_NAME_IS_DEFAULT_DEF (def
) = true;
573 /* Add VAR to the list of referenced variables if it isn't already there. */
576 add_referenced_var (tree var
)
579 gcc_assert (DECL_P (var
));
581 /* Insert VAR into the referenced_vars has table if it isn't present. */
582 if (referenced_var_check_and_insert (var
))
584 /* Scan DECL_INITIAL for pointer variables as they may contain
585 address arithmetic referencing the address of other
586 variables. As we are only interested in directly referenced
587 globals or referenced locals restrict this to initializers
588 than can refer to local variables. */
589 if (DECL_INITIAL (var
)
590 && DECL_CONTEXT (var
) == current_function_decl
)
591 walk_tree (&DECL_INITIAL (var
), find_vars_r
, NULL
, 0);
599 /* Remove VAR from the list. */
602 remove_referenced_var (tree var
)
605 struct tree_decl_minimal in
;
607 unsigned int uid
= DECL_UID (var
);
609 /* Preserve var_anns of globals. */
610 if (!is_global_var (var
)
611 && (v_ann
= var_ann (var
)))
614 *DECL_VAR_ANN_PTR (var
) = NULL
;
616 gcc_assert (DECL_P (var
));
618 loc
= htab_find_slot_with_hash (gimple_referenced_vars (cfun
), &in
, uid
,
620 htab_clear_slot (gimple_referenced_vars (cfun
), loc
);
624 /* Return the virtual variable associated to the non-scalar variable VAR. */
627 get_virtual_var (tree var
)
631 if (TREE_CODE (var
) == SSA_NAME
)
632 var
= SSA_NAME_VAR (var
);
634 while (TREE_CODE (var
) == REALPART_EXPR
|| TREE_CODE (var
) == IMAGPART_EXPR
635 || handled_component_p (var
))
636 var
= TREE_OPERAND (var
, 0);
638 /* Treating GIMPLE registers as virtual variables makes no sense.
639 Also complain if we couldn't extract a _DECL out of the original
641 gcc_assert (SSA_VAR_P (var
));
642 gcc_assert (!is_gimple_reg (var
));
647 /* Mark all the naked symbols in STMT for SSA renaming. */
650 mark_symbols_for_renaming (gimple stmt
)
657 /* Mark all the operands for renaming. */
658 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_ALL_OPERANDS
)
660 mark_sym_for_renaming (op
);
664 /* Find all variables within the gimplified statement that were not
665 previously visible to the function and add them to the referenced
669 find_new_referenced_vars_1 (tree
*tp
, int *walk_subtrees
,
670 void *data ATTRIBUTE_UNUSED
)
674 if (TREE_CODE (t
) == VAR_DECL
&& !var_ann (t
))
676 add_referenced_var (t
);
677 mark_sym_for_renaming (t
);
680 if (IS_TYPE_OR_DECL_P (t
))
687 /* Find any new referenced variables in STMT. */
690 find_new_referenced_vars (gimple stmt
)
692 walk_gimple_op (stmt
, find_new_referenced_vars_1
, NULL
);
696 /* If EXP is a handled component reference for a structure, return the
697 base variable. The access range is delimited by bit positions *POFFSET and
698 *POFFSET + *PMAX_SIZE. The access size is *PSIZE bits. If either
699 *PSIZE or *PMAX_SIZE is -1, they could not be determined. If *PSIZE
700 and *PMAX_SIZE are equal, the access is non-variable. */
703 get_ref_base_and_extent (tree exp
, HOST_WIDE_INT
*poffset
,
704 HOST_WIDE_INT
*psize
,
705 HOST_WIDE_INT
*pmax_size
)
707 HOST_WIDE_INT bitsize
= -1;
708 HOST_WIDE_INT maxsize
= -1;
709 tree size_tree
= NULL_TREE
;
710 HOST_WIDE_INT bit_offset
= 0;
711 bool seen_variable_array_ref
= false;
713 /* First get the final access size from just the outermost expression. */
714 if (TREE_CODE (exp
) == COMPONENT_REF
)
715 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
716 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
717 size_tree
= TREE_OPERAND (exp
, 1);
718 else if (!VOID_TYPE_P (TREE_TYPE (exp
)))
720 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
722 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
724 bitsize
= GET_MODE_BITSIZE (mode
);
726 if (size_tree
!= NULL_TREE
)
728 if (! host_integerp (size_tree
, 1))
731 bitsize
= TREE_INT_CST_LOW (size_tree
);
734 /* Initially, maxsize is the same as the accessed element size.
735 In the following it will only grow (or become -1). */
738 /* Compute cumulative bit-offset for nested component-refs and array-refs,
739 and find the ultimate containing object. */
742 switch (TREE_CODE (exp
))
745 bit_offset
+= TREE_INT_CST_LOW (TREE_OPERAND (exp
, 2));
750 tree field
= TREE_OPERAND (exp
, 1);
751 tree this_offset
= component_ref_field_offset (exp
);
754 && TREE_CODE (this_offset
) == INTEGER_CST
755 && host_integerp (this_offset
, 0))
757 HOST_WIDE_INT hthis_offset
= TREE_INT_CST_LOW (this_offset
);
758 hthis_offset
*= BITS_PER_UNIT
;
760 += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field
));
761 bit_offset
+= hthis_offset
;
763 /* If we had seen a variable array ref already and we just
764 referenced the last field of a struct or a union member
765 then we have to adjust maxsize by the padding at the end
767 if (seen_variable_array_ref
770 tree stype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
771 tree next
= DECL_CHAIN (field
);
772 while (next
&& TREE_CODE (next
) != FIELD_DECL
)
773 next
= DECL_CHAIN (next
);
775 || TREE_CODE (stype
) != RECORD_TYPE
)
777 tree fsize
= DECL_SIZE_UNIT (field
);
778 tree ssize
= TYPE_SIZE_UNIT (stype
);
779 if (host_integerp (fsize
, 0)
780 && host_integerp (ssize
, 0))
781 maxsize
+= ((TREE_INT_CST_LOW (ssize
)
782 - TREE_INT_CST_LOW (fsize
))
783 * BITS_PER_UNIT
- hthis_offset
);
791 tree csize
= TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
792 /* We need to adjust maxsize to the whole structure bitsize.
793 But we can subtract any constant offset seen so far,
794 because that would get us out of the structure otherwise. */
795 if (maxsize
!= -1 && csize
&& host_integerp (csize
, 1))
796 maxsize
= TREE_INT_CST_LOW (csize
) - bit_offset
;
804 case ARRAY_RANGE_REF
:
806 tree index
= TREE_OPERAND (exp
, 1);
807 tree low_bound
, unit_size
;
809 /* If the resulting bit-offset is constant, track it. */
810 if (TREE_CODE (index
) == INTEGER_CST
811 && host_integerp (index
, 0)
812 && (low_bound
= array_ref_low_bound (exp
),
813 host_integerp (low_bound
, 0))
814 && (unit_size
= array_ref_element_size (exp
),
815 host_integerp (unit_size
, 1)))
817 HOST_WIDE_INT hindex
= TREE_INT_CST_LOW (index
);
819 hindex
-= TREE_INT_CST_LOW (low_bound
);
820 hindex
*= TREE_INT_CST_LOW (unit_size
);
821 hindex
*= BITS_PER_UNIT
;
822 bit_offset
+= hindex
;
824 /* An array ref with a constant index up in the structure
825 hierarchy will constrain the size of any variable array ref
826 lower in the access hierarchy. */
827 seen_variable_array_ref
= false;
831 tree asize
= TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
832 /* We need to adjust maxsize to the whole array bitsize.
833 But we can subtract any constant offset seen so far,
834 because that would get us outside of the array otherwise. */
835 if (maxsize
!= -1 && asize
&& host_integerp (asize
, 1))
836 maxsize
= TREE_INT_CST_LOW (asize
) - bit_offset
;
840 /* Remember that we have seen an array ref with a variable
842 seen_variable_array_ref
= true;
851 bit_offset
+= bitsize
;
854 case VIEW_CONVERT_EXPR
:
858 /* Hand back the decl for MEM[&decl, off]. */
859 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
)
861 if (integer_zerop (TREE_OPERAND (exp
, 1)))
862 exp
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
865 double_int off
= mem_ref_offset (exp
);
866 off
= double_int_lshift (off
,
868 ? 3 : exact_log2 (BITS_PER_UNIT
),
869 HOST_BITS_PER_DOUBLE_INT
, true);
870 off
= double_int_add (off
, shwi_to_double_int (bit_offset
));
871 if (double_int_fits_in_shwi_p (off
))
873 bit_offset
= double_int_to_shwi (off
);
874 exp
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
881 /* Hand back the decl for MEM[&decl, off]. */
882 if (TREE_CODE (TMR_BASE (exp
)) == ADDR_EXPR
)
884 /* Via the variable index or index2 we can reach the
886 if (TMR_INDEX (exp
) || TMR_INDEX2 (exp
))
888 exp
= TREE_OPERAND (TMR_BASE (exp
), 0);
893 if (integer_zerop (TMR_OFFSET (exp
)))
894 exp
= TREE_OPERAND (TMR_BASE (exp
), 0);
897 double_int off
= mem_ref_offset (exp
);
898 off
= double_int_lshift (off
,
900 ? 3 : exact_log2 (BITS_PER_UNIT
),
901 HOST_BITS_PER_DOUBLE_INT
, true);
902 off
= double_int_add (off
, shwi_to_double_int (bit_offset
));
903 if (double_int_fits_in_shwi_p (off
))
905 bit_offset
= double_int_to_shwi (off
);
906 exp
= TREE_OPERAND (TMR_BASE (exp
), 0);
916 exp
= TREE_OPERAND (exp
, 0);
920 /* We need to deal with variable arrays ending structures such as
921 struct { int length; int a[1]; } x; x.a[d]
922 struct { struct { int a; int b; } a[1]; } x; x.a[d].a
923 struct { struct { int a[1]; } a[1]; } x; x.a[0][d], x.a[d][0]
924 struct { int len; union { int a[1]; struct X x; } u; } x; x.u.a[d]
925 where we do not know maxsize for variable index accesses to
926 the array. The simplest way to conservatively deal with this
927 is to punt in the case that offset + maxsize reaches the
928 base type boundary. This needs to include possible trailing padding
929 that is there for alignment purposes.
931 That is of course only true if the base object is not a decl. */
935 /* If maxsize is unknown adjust it according to the size of the
938 && host_integerp (DECL_SIZE (exp
), 1))
939 maxsize
= TREE_INT_CST_LOW (DECL_SIZE (exp
)) - bit_offset
;
941 else if (seen_variable_array_ref
943 && (!host_integerp (TYPE_SIZE (TREE_TYPE (exp
)), 1)
944 || (bit_offset
+ maxsize
945 == (signed) TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp
))))))
948 /* ??? Due to negative offsets in ARRAY_REF we can end up with
949 negative bit_offset here. We might want to store a zero offset
951 *poffset
= bit_offset
;
953 *pmax_size
= maxsize
;
958 /* Returns the base object and a constant BITS_PER_UNIT offset in *POFFSET that
959 denotes the starting address of the memory access EXP.
960 Returns NULL_TREE if the offset is not constant or any component
961 is not BITS_PER_UNIT-aligned. */
964 get_addr_base_and_unit_offset (tree exp
, HOST_WIDE_INT
*poffset
)
966 HOST_WIDE_INT byte_offset
= 0;
968 /* Compute cumulative byte-offset for nested component-refs and array-refs,
969 and find the ultimate containing object. */
972 switch (TREE_CODE (exp
))
979 tree field
= TREE_OPERAND (exp
, 1);
980 tree this_offset
= component_ref_field_offset (exp
);
981 HOST_WIDE_INT hthis_offset
;
984 || TREE_CODE (this_offset
) != INTEGER_CST
985 || (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field
))
989 hthis_offset
= TREE_INT_CST_LOW (this_offset
);
990 hthis_offset
+= (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field
))
992 byte_offset
+= hthis_offset
;
997 case ARRAY_RANGE_REF
:
999 tree index
= TREE_OPERAND (exp
, 1);
1000 tree low_bound
, unit_size
;
1002 /* If the resulting bit-offset is constant, track it. */
1003 if (TREE_CODE (index
) == INTEGER_CST
1004 && (low_bound
= array_ref_low_bound (exp
),
1005 TREE_CODE (low_bound
) == INTEGER_CST
)
1006 && (unit_size
= array_ref_element_size (exp
),
1007 TREE_CODE (unit_size
) == INTEGER_CST
))
1009 HOST_WIDE_INT hindex
= TREE_INT_CST_LOW (index
);
1011 hindex
-= TREE_INT_CST_LOW (low_bound
);
1012 hindex
*= TREE_INT_CST_LOW (unit_size
);
1013 byte_offset
+= hindex
;
1024 byte_offset
+= TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (exp
)));
1027 case VIEW_CONVERT_EXPR
:
1031 /* Hand back the decl for MEM[&decl, off]. */
1032 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
)
1034 if (!integer_zerop (TREE_OPERAND (exp
, 1)))
1036 double_int off
= mem_ref_offset (exp
);
1037 gcc_assert (off
.high
== -1 || off
.high
== 0);
1038 byte_offset
+= double_int_to_shwi (off
);
1040 exp
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
1044 case TARGET_MEM_REF
:
1045 /* Hand back the decl for MEM[&decl, off]. */
1046 if (TREE_CODE (TMR_BASE (exp
)) == ADDR_EXPR
)
1048 if (TMR_INDEX (exp
) || TMR_INDEX2 (exp
))
1050 if (!integer_zerop (TMR_OFFSET (exp
)))
1052 double_int off
= mem_ref_offset (exp
);
1053 gcc_assert (off
.high
== -1 || off
.high
== 0);
1054 byte_offset
+= double_int_to_shwi (off
);
1056 exp
= TREE_OPERAND (TMR_BASE (exp
), 0);
1064 exp
= TREE_OPERAND (exp
, 0);
1068 *poffset
= byte_offset
;
1072 /* Returns true if STMT references an SSA_NAME that has
1073 SSA_NAME_OCCURS_IN_ABNORMAL_PHI set, otherwise false. */
1076 stmt_references_abnormal_ssa_name (gimple stmt
)
1079 use_operand_p use_p
;
1081 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, oi
, SSA_OP_USE
)
1083 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (USE_FROM_PTR (use_p
)))