1 /* Data flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "pointer-set.h"
31 #include "basic-block.h"
35 #include "langhooks.h"
38 #include "tree-pretty-print.h"
39 #include "tree-dump.h"
41 #include "tree-flow.h"
42 #include "tree-inline.h"
43 #include "tree-pass.h"
48 /* Build and maintain data flow information for trees. */
50 /* Counters used to display DFA and SSA statistics. */
58 size_t max_num_phi_args
;
64 /* Local functions. */
65 static void collect_dfa_stats (struct dfa_stats_d
*);
66 static tree
find_vars_r (tree
*, int *, void *);
69 /*---------------------------------------------------------------------------
70 Dataflow analysis (DFA) routines
71 ---------------------------------------------------------------------------*/
72 /* Find all the variables referenced in the function. This function
73 builds the global arrays REFERENCED_VARS and CALL_CLOBBERED_VARS.
75 Note that this function does not look for statement operands, it simply
76 determines what variables are referenced in the program and detects
77 various attributes for each variable used by alias analysis and the
81 find_referenced_vars (void)
84 gimple_stmt_iterator si
;
88 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
90 gimple stmt
= gsi_stmt (si
);
91 if (is_gimple_debug (stmt
))
93 find_referenced_vars_in (gsi_stmt (si
));
96 for (si
= gsi_start_phis (bb
); !gsi_end_p (si
); gsi_next (&si
))
97 find_referenced_vars_in (gsi_stmt (si
));
103 struct gimple_opt_pass pass_referenced_vars
=
107 "*referenced_vars", /* name */
109 find_referenced_vars
, /* execute */
112 0, /* static_pass_number */
113 TV_FIND_REFERENCED_VARS
, /* tv_id */
114 PROP_gimple_leh
| PROP_cfg
, /* properties_required */
115 PROP_referenced_vars
, /* properties_provided */
116 0, /* properties_destroyed */
117 TODO_dump_func
, /* todo_flags_start */
118 TODO_dump_func
/* todo_flags_finish */
123 /*---------------------------------------------------------------------------
125 ---------------------------------------------------------------------------*/
126 /* Create a new annotation for a _DECL node T. */
129 create_var_ann (tree t
)
134 gcc_assert (TREE_CODE (t
) == VAR_DECL
135 || TREE_CODE (t
) == PARM_DECL
136 || TREE_CODE (t
) == RESULT_DECL
);
138 ann
= ggc_alloc_cleared_var_ann_d ();
139 *DECL_VAR_ANN_PTR (t
) = ann
;
144 /* Renumber all of the gimple stmt uids. */
147 renumber_gimple_stmt_uids (void)
151 set_gimple_stmt_max_uid (cfun
, 0);
154 gimple_stmt_iterator bsi
;
155 for (bsi
= gsi_start_bb (bb
); !gsi_end_p (bsi
); gsi_next (&bsi
))
157 gimple stmt
= gsi_stmt (bsi
);
158 gimple_set_uid (stmt
, inc_gimple_stmt_max_uid (cfun
));
163 /* Like renumber_gimple_stmt_uids, but only do work on the basic blocks
164 in BLOCKS, of which there are N_BLOCKS. Also renumbers PHIs. */
167 renumber_gimple_stmt_uids_in_blocks (basic_block
*blocks
, int n_blocks
)
171 set_gimple_stmt_max_uid (cfun
, 0);
172 for (i
= 0; i
< n_blocks
; i
++)
174 basic_block bb
= blocks
[i
];
175 gimple_stmt_iterator bsi
;
176 for (bsi
= gsi_start_phis (bb
); !gsi_end_p (bsi
); gsi_next (&bsi
))
178 gimple stmt
= gsi_stmt (bsi
);
179 gimple_set_uid (stmt
, inc_gimple_stmt_max_uid (cfun
));
181 for (bsi
= gsi_start_bb (bb
); !gsi_end_p (bsi
); gsi_next (&bsi
))
183 gimple stmt
= gsi_stmt (bsi
);
184 gimple_set_uid (stmt
, inc_gimple_stmt_max_uid (cfun
));
189 /* Build a temporary. Make sure and register it to be renamed. */
192 make_rename_temp (tree type
, const char *prefix
)
194 tree t
= create_tmp_reg (type
, prefix
);
196 if (gimple_referenced_vars (cfun
))
198 add_referenced_var (t
);
199 mark_sym_for_renaming (t
);
207 /*---------------------------------------------------------------------------
209 ---------------------------------------------------------------------------*/
210 /* Dump the list of all the referenced variables in the current function to
214 dump_referenced_vars (FILE *file
)
217 referenced_var_iterator rvi
;
219 fprintf (file
, "\nReferenced variables in %s: %u\n\n",
220 get_name (current_function_decl
), (unsigned) num_referenced_vars
);
222 FOR_EACH_REFERENCED_VAR (var
, rvi
)
224 fprintf (file
, "Variable: ");
225 dump_variable (file
, var
);
228 fprintf (file
, "\n");
232 /* Dump the list of all the referenced variables to stderr. */
235 debug_referenced_vars (void)
237 dump_referenced_vars (stderr
);
241 /* Dump variable VAR and its may-aliases to FILE. */
244 dump_variable (FILE *file
, tree var
)
246 if (TREE_CODE (var
) == SSA_NAME
)
248 if (POINTER_TYPE_P (TREE_TYPE (var
)))
249 dump_points_to_info_for (file
, var
);
250 var
= SSA_NAME_VAR (var
);
253 if (var
== NULL_TREE
)
255 fprintf (file
, "<nil>");
259 print_generic_expr (file
, var
, dump_flags
);
261 fprintf (file
, ", UID D.%u", (unsigned) DECL_UID (var
));
262 if (DECL_PT_UID (var
) != DECL_UID (var
))
263 fprintf (file
, ", PT-UID D.%u", (unsigned) DECL_PT_UID (var
));
265 fprintf (file
, ", ");
266 print_generic_expr (file
, TREE_TYPE (var
), dump_flags
);
268 if (TREE_ADDRESSABLE (var
))
269 fprintf (file
, ", is addressable");
271 if (is_global_var (var
))
272 fprintf (file
, ", is global");
274 if (TREE_THIS_VOLATILE (var
))
275 fprintf (file
, ", is volatile");
277 if (cfun
&& gimple_default_def (cfun
, var
))
279 fprintf (file
, ", default def: ");
280 print_generic_expr (file
, gimple_default_def (cfun
, var
), dump_flags
);
283 if (DECL_INITIAL (var
))
285 fprintf (file
, ", initial: ");
286 print_generic_expr (file
, DECL_INITIAL (var
), dump_flags
);
289 fprintf (file
, "\n");
293 /* Dump variable VAR and its may-aliases to stderr. */
296 debug_variable (tree var
)
298 dump_variable (stderr
, var
);
302 /* Dump various DFA statistics to FILE. */
305 dump_dfa_stats (FILE *file
)
307 struct dfa_stats_d dfa_stats
;
309 unsigned long size
, total
= 0;
310 const char * const fmt_str
= "%-30s%-13s%12s\n";
311 const char * const fmt_str_1
= "%-30s%13lu%11lu%c\n";
312 const char * const fmt_str_3
= "%-43s%11lu%c\n";
314 = lang_hooks
.decl_printable_name (current_function_decl
, 2);
316 collect_dfa_stats (&dfa_stats
);
318 fprintf (file
, "\nDFA Statistics for %s\n\n", funcname
);
320 fprintf (file
, "---------------------------------------------------------\n");
321 fprintf (file
, fmt_str
, "", " Number of ", "Memory");
322 fprintf (file
, fmt_str
, "", " instances ", "used ");
323 fprintf (file
, "---------------------------------------------------------\n");
325 size
= num_referenced_vars
* sizeof (tree
);
327 fprintf (file
, fmt_str_1
, "Referenced variables", (unsigned long)num_referenced_vars
,
328 SCALE (size
), LABEL (size
));
330 size
= dfa_stats
.num_var_anns
* sizeof (struct var_ann_d
);
332 fprintf (file
, fmt_str_1
, "Variables annotated", dfa_stats
.num_var_anns
,
333 SCALE (size
), LABEL (size
));
335 size
= dfa_stats
.num_uses
* sizeof (tree
*);
337 fprintf (file
, fmt_str_1
, "USE operands", dfa_stats
.num_uses
,
338 SCALE (size
), LABEL (size
));
340 size
= dfa_stats
.num_defs
* sizeof (tree
*);
342 fprintf (file
, fmt_str_1
, "DEF operands", dfa_stats
.num_defs
,
343 SCALE (size
), LABEL (size
));
345 size
= dfa_stats
.num_vuses
* sizeof (tree
*);
347 fprintf (file
, fmt_str_1
, "VUSE operands", dfa_stats
.num_vuses
,
348 SCALE (size
), LABEL (size
));
350 size
= dfa_stats
.num_vdefs
* sizeof (tree
*);
352 fprintf (file
, fmt_str_1
, "VDEF operands", dfa_stats
.num_vdefs
,
353 SCALE (size
), LABEL (size
));
355 size
= dfa_stats
.num_phis
* sizeof (struct gimple_statement_phi
);
357 fprintf (file
, fmt_str_1
, "PHI nodes", dfa_stats
.num_phis
,
358 SCALE (size
), LABEL (size
));
360 size
= dfa_stats
.num_phi_args
* sizeof (struct phi_arg_d
);
362 fprintf (file
, fmt_str_1
, "PHI arguments", dfa_stats
.num_phi_args
,
363 SCALE (size
), LABEL (size
));
365 fprintf (file
, "---------------------------------------------------------\n");
366 fprintf (file
, fmt_str_3
, "Total memory used by DFA/SSA data", SCALE (total
),
368 fprintf (file
, "---------------------------------------------------------\n");
369 fprintf (file
, "\n");
371 if (dfa_stats
.num_phis
)
372 fprintf (file
, "Average number of arguments per PHI node: %.1f (max: %ld)\n",
373 (float) dfa_stats
.num_phi_args
/ (float) dfa_stats
.num_phis
,
374 (long) dfa_stats
.max_num_phi_args
);
376 fprintf (file
, "\n");
380 /* Dump DFA statistics on stderr. */
383 debug_dfa_stats (void)
385 dump_dfa_stats (stderr
);
389 /* Collect DFA statistics and store them in the structure pointed to by
393 collect_dfa_stats (struct dfa_stats_d
*dfa_stats_p ATTRIBUTE_UNUSED
)
396 referenced_var_iterator vi
;
399 gcc_assert (dfa_stats_p
);
401 memset ((void *)dfa_stats_p
, 0, sizeof (struct dfa_stats_d
));
403 /* Count all the variable annotations. */
404 FOR_EACH_REFERENCED_VAR (var
, vi
)
406 dfa_stats_p
->num_var_anns
++;
408 /* Walk all the statements in the function counting references. */
411 gimple_stmt_iterator si
;
413 for (si
= gsi_start_phis (bb
); !gsi_end_p (si
); gsi_next (&si
))
415 gimple phi
= gsi_stmt (si
);
416 dfa_stats_p
->num_phis
++;
417 dfa_stats_p
->num_phi_args
+= gimple_phi_num_args (phi
);
418 if (gimple_phi_num_args (phi
) > dfa_stats_p
->max_num_phi_args
)
419 dfa_stats_p
->max_num_phi_args
= gimple_phi_num_args (phi
);
422 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
424 gimple stmt
= gsi_stmt (si
);
425 dfa_stats_p
->num_defs
+= NUM_SSA_OPERANDS (stmt
, SSA_OP_DEF
);
426 dfa_stats_p
->num_uses
+= NUM_SSA_OPERANDS (stmt
, SSA_OP_USE
);
427 dfa_stats_p
->num_vdefs
+= gimple_vdef (stmt
) ? 1 : 0;
428 dfa_stats_p
->num_vuses
+= gimple_vuse (stmt
) ? 1 : 0;
434 /*---------------------------------------------------------------------------
435 Miscellaneous helpers
436 ---------------------------------------------------------------------------*/
437 /* Callback for walk_tree. Used to collect variables referenced in
441 find_vars_r (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
443 /* If we are reading the lto info back in, we need to rescan the
445 if (TREE_CODE (*tp
) == SSA_NAME
)
446 add_referenced_var (SSA_NAME_VAR (*tp
));
448 /* If T is a regular variable that the optimizers are interested
449 in, add it to the list of variables. */
450 else if (SSA_VAR_P (*tp
))
451 add_referenced_var (*tp
);
453 /* Type, _DECL and constant nodes have no interesting children.
455 else if (IS_TYPE_OR_DECL_P (*tp
) || CONSTANT_CLASS_P (*tp
))
461 /* Find referenced variables in STMT. In contrast with
462 find_new_referenced_vars, this function will not mark newly found
463 variables for renaming. */
466 find_referenced_vars_in (gimple stmt
)
470 if (gimple_code (stmt
) != GIMPLE_PHI
)
472 for (i
= 0; i
< gimple_num_ops (stmt
); i
++)
473 walk_tree (gimple_op_ptr (stmt
, i
), find_vars_r
, NULL
, NULL
);
477 walk_tree (gimple_phi_result_ptr (stmt
), find_vars_r
, NULL
, NULL
);
479 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
481 tree arg
= gimple_phi_arg_def (stmt
, i
);
482 walk_tree (&arg
, find_vars_r
, NULL
, NULL
);
488 /* Lookup UID in the referenced_vars hashtable and return the associated
492 referenced_var_lookup (unsigned int uid
)
495 struct tree_decl_minimal in
;
497 h
= (tree
) htab_find_with_hash (gimple_referenced_vars (cfun
), &in
, uid
);
501 /* Check if TO is in the referenced_vars hash table and insert it if not.
502 Return true if it required insertion. */
505 referenced_var_check_and_insert (tree to
)
508 struct tree_decl_minimal in
;
509 unsigned int uid
= DECL_UID (to
);
512 h
= (tree
) htab_find_with_hash (gimple_referenced_vars (cfun
), &in
, uid
);
515 /* DECL_UID has already been entered in the table. Verify that it is
516 the same entry as TO. See PR 27793. */
517 gcc_assert (h
== to
);
521 loc
= (tree
*) htab_find_slot_with_hash (gimple_referenced_vars (cfun
),
527 /* Lookup VAR UID in the default_defs hashtable and return the associated
531 gimple_default_def (struct function
*fn
, tree var
)
533 struct tree_decl_minimal ind
;
534 struct tree_ssa_name in
;
535 gcc_assert (SSA_VAR_P (var
));
537 ind
.uid
= DECL_UID (var
);
538 return (tree
) htab_find_with_hash (DEFAULT_DEFS (fn
), &in
, DECL_UID (var
));
541 /* Insert the pair VAR's UID, DEF into the default_defs hashtable. */
544 set_default_def (tree var
, tree def
)
546 struct tree_decl_minimal ind
;
547 struct tree_ssa_name in
;
550 gcc_assert (SSA_VAR_P (var
));
552 ind
.uid
= DECL_UID (var
);
555 loc
= htab_find_slot_with_hash (DEFAULT_DEFS (cfun
), &in
,
556 DECL_UID (var
), INSERT
);
558 htab_remove_elt (DEFAULT_DEFS (cfun
), *loc
);
561 gcc_assert (TREE_CODE (def
) == SSA_NAME
&& SSA_NAME_VAR (def
) == var
);
562 loc
= htab_find_slot_with_hash (DEFAULT_DEFS (cfun
), &in
,
563 DECL_UID (var
), INSERT
);
565 /* Default definition might be changed by tail call optimization. */
567 SSA_NAME_IS_DEFAULT_DEF (*(tree
*) loc
) = false;
570 /* Mark DEF as the default definition for VAR. */
571 SSA_NAME_IS_DEFAULT_DEF (def
) = true;
574 /* Add VAR to the list of referenced variables if it isn't already there. */
577 add_referenced_var (tree var
)
580 gcc_assert (DECL_P (var
));
582 /* Insert VAR into the referenced_vars has table if it isn't present. */
583 if (referenced_var_check_and_insert (var
))
585 /* Scan DECL_INITIAL for pointer variables as they may contain
586 address arithmetic referencing the address of other
587 variables. As we are only interested in directly referenced
588 globals or referenced locals restrict this to initializers
589 than can refer to local variables. */
590 if (DECL_INITIAL (var
)
591 && DECL_CONTEXT (var
) == current_function_decl
)
592 walk_tree (&DECL_INITIAL (var
), find_vars_r
, NULL
, 0);
600 /* Remove VAR from the list. */
603 remove_referenced_var (tree var
)
606 struct tree_decl_minimal in
;
608 unsigned int uid
= DECL_UID (var
);
610 /* Preserve var_anns of globals. */
611 if (!is_global_var (var
)
612 && (v_ann
= var_ann (var
)))
615 *DECL_VAR_ANN_PTR (var
) = NULL
;
617 gcc_assert (DECL_P (var
));
619 loc
= htab_find_slot_with_hash (gimple_referenced_vars (cfun
), &in
, uid
,
621 htab_clear_slot (gimple_referenced_vars (cfun
), loc
);
625 /* Return the virtual variable associated to the non-scalar variable VAR. */
628 get_virtual_var (tree var
)
632 if (TREE_CODE (var
) == SSA_NAME
)
633 var
= SSA_NAME_VAR (var
);
635 while (TREE_CODE (var
) == REALPART_EXPR
|| TREE_CODE (var
) == IMAGPART_EXPR
636 || handled_component_p (var
))
637 var
= TREE_OPERAND (var
, 0);
639 /* Treating GIMPLE registers as virtual variables makes no sense.
640 Also complain if we couldn't extract a _DECL out of the original
642 gcc_assert (SSA_VAR_P (var
));
643 gcc_assert (!is_gimple_reg (var
));
648 /* Mark all the naked symbols in STMT for SSA renaming. */
651 mark_symbols_for_renaming (gimple stmt
)
658 /* Mark all the operands for renaming. */
659 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_ALL_OPERANDS
)
661 mark_sym_for_renaming (op
);
665 /* Find all variables within the gimplified statement that were not
666 previously visible to the function and add them to the referenced
670 find_new_referenced_vars_1 (tree
*tp
, int *walk_subtrees
,
671 void *data ATTRIBUTE_UNUSED
)
675 if (TREE_CODE (t
) == VAR_DECL
&& !var_ann (t
))
677 add_referenced_var (t
);
678 mark_sym_for_renaming (t
);
681 if (IS_TYPE_OR_DECL_P (t
))
688 /* Find any new referenced variables in STMT. */
691 find_new_referenced_vars (gimple stmt
)
693 walk_gimple_op (stmt
, find_new_referenced_vars_1
, NULL
);
697 /* If EXP is a handled component reference for a structure, return the
698 base variable. The access range is delimited by bit positions *POFFSET and
699 *POFFSET + *PMAX_SIZE. The access size is *PSIZE bits. If either
700 *PSIZE or *PMAX_SIZE is -1, they could not be determined. If *PSIZE
701 and *PMAX_SIZE are equal, the access is non-variable. */
704 get_ref_base_and_extent (tree exp
, HOST_WIDE_INT
*poffset
,
705 HOST_WIDE_INT
*psize
,
706 HOST_WIDE_INT
*pmax_size
)
708 HOST_WIDE_INT bitsize
= -1;
709 HOST_WIDE_INT maxsize
= -1;
710 tree size_tree
= NULL_TREE
;
711 HOST_WIDE_INT bit_offset
= 0;
712 bool seen_variable_array_ref
= false;
714 /* First get the final access size from just the outermost expression. */
715 if (TREE_CODE (exp
) == COMPONENT_REF
)
716 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
717 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
718 size_tree
= TREE_OPERAND (exp
, 1);
719 else if (!VOID_TYPE_P (TREE_TYPE (exp
)))
721 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
723 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
725 bitsize
= GET_MODE_BITSIZE (mode
);
727 if (size_tree
!= NULL_TREE
)
729 if (! host_integerp (size_tree
, 1))
732 bitsize
= TREE_INT_CST_LOW (size_tree
);
735 /* Initially, maxsize is the same as the accessed element size.
736 In the following it will only grow (or become -1). */
739 /* Compute cumulative bit-offset for nested component-refs and array-refs,
740 and find the ultimate containing object. */
743 switch (TREE_CODE (exp
))
746 bit_offset
+= TREE_INT_CST_LOW (TREE_OPERAND (exp
, 2));
751 tree field
= TREE_OPERAND (exp
, 1);
752 tree this_offset
= component_ref_field_offset (exp
);
755 && TREE_CODE (this_offset
) == INTEGER_CST
756 && host_integerp (this_offset
, 0))
758 HOST_WIDE_INT hthis_offset
= TREE_INT_CST_LOW (this_offset
);
759 hthis_offset
*= BITS_PER_UNIT
;
761 += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field
));
762 bit_offset
+= hthis_offset
;
764 /* If we had seen a variable array ref already and we just
765 referenced the last field of a struct or a union member
766 then we have to adjust maxsize by the padding at the end
768 if (seen_variable_array_ref
771 tree stype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
772 tree next
= DECL_CHAIN (field
);
773 while (next
&& TREE_CODE (next
) != FIELD_DECL
)
774 next
= DECL_CHAIN (next
);
776 || TREE_CODE (stype
) != RECORD_TYPE
)
778 tree fsize
= DECL_SIZE_UNIT (field
);
779 tree ssize
= TYPE_SIZE_UNIT (stype
);
780 if (host_integerp (fsize
, 0)
781 && host_integerp (ssize
, 0))
782 maxsize
+= ((TREE_INT_CST_LOW (ssize
)
783 - TREE_INT_CST_LOW (fsize
))
784 * BITS_PER_UNIT
- hthis_offset
);
792 tree csize
= TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
793 /* We need to adjust maxsize to the whole structure bitsize.
794 But we can subtract any constant offset seen so far,
795 because that would get us out of the structure otherwise. */
796 if (maxsize
!= -1 && csize
&& host_integerp (csize
, 1))
797 maxsize
= TREE_INT_CST_LOW (csize
) - bit_offset
;
805 case ARRAY_RANGE_REF
:
807 tree index
= TREE_OPERAND (exp
, 1);
808 tree low_bound
, unit_size
;
810 /* If the resulting bit-offset is constant, track it. */
811 if (TREE_CODE (index
) == INTEGER_CST
812 && host_integerp (index
, 0)
813 && (low_bound
= array_ref_low_bound (exp
),
814 host_integerp (low_bound
, 0))
815 && (unit_size
= array_ref_element_size (exp
),
816 host_integerp (unit_size
, 1)))
818 HOST_WIDE_INT hindex
= TREE_INT_CST_LOW (index
);
820 hindex
-= TREE_INT_CST_LOW (low_bound
);
821 hindex
*= TREE_INT_CST_LOW (unit_size
);
822 hindex
*= BITS_PER_UNIT
;
823 bit_offset
+= hindex
;
825 /* An array ref with a constant index up in the structure
826 hierarchy will constrain the size of any variable array ref
827 lower in the access hierarchy. */
828 seen_variable_array_ref
= false;
832 tree asize
= TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
833 /* We need to adjust maxsize to the whole array bitsize.
834 But we can subtract any constant offset seen so far,
835 because that would get us outside of the array otherwise. */
836 if (maxsize
!= -1 && asize
&& host_integerp (asize
, 1))
837 maxsize
= TREE_INT_CST_LOW (asize
) - bit_offset
;
841 /* Remember that we have seen an array ref with a variable
843 seen_variable_array_ref
= true;
852 bit_offset
+= bitsize
;
855 case VIEW_CONVERT_EXPR
:
859 /* Hand back the decl for MEM[&decl, off]. */
860 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
)
862 if (integer_zerop (TREE_OPERAND (exp
, 1)))
863 exp
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
866 double_int off
= mem_ref_offset (exp
);
867 off
= double_int_lshift (off
,
869 ? 3 : exact_log2 (BITS_PER_UNIT
),
870 HOST_BITS_PER_DOUBLE_INT
, true);
871 off
= double_int_add (off
, shwi_to_double_int (bit_offset
));
872 if (double_int_fits_in_shwi_p (off
))
874 bit_offset
= double_int_to_shwi (off
);
875 exp
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
882 /* Hand back the decl for MEM[&decl, off]. */
883 if (TREE_CODE (TMR_BASE (exp
)) == ADDR_EXPR
)
885 /* Via the variable index or index2 we can reach the
887 if (TMR_INDEX (exp
) || TMR_INDEX2 (exp
))
889 exp
= TREE_OPERAND (TMR_BASE (exp
), 0);
894 if (integer_zerop (TMR_OFFSET (exp
)))
895 exp
= TREE_OPERAND (TMR_BASE (exp
), 0);
898 double_int off
= mem_ref_offset (exp
);
899 off
= double_int_lshift (off
,
901 ? 3 : exact_log2 (BITS_PER_UNIT
),
902 HOST_BITS_PER_DOUBLE_INT
, true);
903 off
= double_int_add (off
, shwi_to_double_int (bit_offset
));
904 if (double_int_fits_in_shwi_p (off
))
906 bit_offset
= double_int_to_shwi (off
);
907 exp
= TREE_OPERAND (TMR_BASE (exp
), 0);
917 exp
= TREE_OPERAND (exp
, 0);
921 /* We need to deal with variable arrays ending structures such as
922 struct { int length; int a[1]; } x; x.a[d]
923 struct { struct { int a; int b; } a[1]; } x; x.a[d].a
924 struct { struct { int a[1]; } a[1]; } x; x.a[0][d], x.a[d][0]
925 struct { int len; union { int a[1]; struct X x; } u; } x; x.u.a[d]
926 where we do not know maxsize for variable index accesses to
927 the array. The simplest way to conservatively deal with this
928 is to punt in the case that offset + maxsize reaches the
929 base type boundary. This needs to include possible trailing padding
930 that is there for alignment purposes.
932 That is of course only true if the base object is not a decl. */
936 /* If maxsize is unknown adjust it according to the size of the
939 && host_integerp (DECL_SIZE (exp
), 1))
940 maxsize
= TREE_INT_CST_LOW (DECL_SIZE (exp
)) - bit_offset
;
942 else if (seen_variable_array_ref
944 && (!host_integerp (TYPE_SIZE (TREE_TYPE (exp
)), 1)
945 || (bit_offset
+ maxsize
946 == (signed) TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp
))))))
949 /* ??? Due to negative offsets in ARRAY_REF we can end up with
950 negative bit_offset here. We might want to store a zero offset
952 *poffset
= bit_offset
;
954 *pmax_size
= maxsize
;
959 /* Returns the base object and a constant BITS_PER_UNIT offset in *POFFSET that
960 denotes the starting address of the memory access EXP.
961 Returns NULL_TREE if the offset is not constant or any component
962 is not BITS_PER_UNIT-aligned. */
965 get_addr_base_and_unit_offset (tree exp
, HOST_WIDE_INT
*poffset
)
967 HOST_WIDE_INT byte_offset
= 0;
969 /* Compute cumulative byte-offset for nested component-refs and array-refs,
970 and find the ultimate containing object. */
973 switch (TREE_CODE (exp
))
980 tree field
= TREE_OPERAND (exp
, 1);
981 tree this_offset
= component_ref_field_offset (exp
);
982 HOST_WIDE_INT hthis_offset
;
985 || TREE_CODE (this_offset
) != INTEGER_CST
986 || (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field
))
990 hthis_offset
= TREE_INT_CST_LOW (this_offset
);
991 hthis_offset
+= (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field
))
993 byte_offset
+= hthis_offset
;
998 case ARRAY_RANGE_REF
:
1000 tree index
= TREE_OPERAND (exp
, 1);
1001 tree low_bound
, unit_size
;
1003 /* If the resulting bit-offset is constant, track it. */
1004 if (TREE_CODE (index
) == INTEGER_CST
1005 && (low_bound
= array_ref_low_bound (exp
),
1006 TREE_CODE (low_bound
) == INTEGER_CST
)
1007 && (unit_size
= array_ref_element_size (exp
),
1008 TREE_CODE (unit_size
) == INTEGER_CST
))
1010 HOST_WIDE_INT hindex
= TREE_INT_CST_LOW (index
);
1012 hindex
-= TREE_INT_CST_LOW (low_bound
);
1013 hindex
*= TREE_INT_CST_LOW (unit_size
);
1014 byte_offset
+= hindex
;
1025 byte_offset
+= TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (exp
)));
1028 case VIEW_CONVERT_EXPR
:
1032 /* Hand back the decl for MEM[&decl, off]. */
1033 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
)
1035 if (!integer_zerop (TREE_OPERAND (exp
, 1)))
1037 double_int off
= mem_ref_offset (exp
);
1038 gcc_assert (off
.high
== -1 || off
.high
== 0);
1039 byte_offset
+= double_int_to_shwi (off
);
1041 exp
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
1045 case TARGET_MEM_REF
:
1046 /* Hand back the decl for MEM[&decl, off]. */
1047 if (TREE_CODE (TMR_BASE (exp
)) == ADDR_EXPR
)
1049 if (TMR_INDEX (exp
) || TMR_INDEX2 (exp
))
1051 if (!integer_zerop (TMR_OFFSET (exp
)))
1053 double_int off
= mem_ref_offset (exp
);
1054 gcc_assert (off
.high
== -1 || off
.high
== 0);
1055 byte_offset
+= double_int_to_shwi (off
);
1057 exp
= TREE_OPERAND (TMR_BASE (exp
), 0);
1065 exp
= TREE_OPERAND (exp
, 0);
1069 *poffset
= byte_offset
;
1073 /* Returns true if STMT references an SSA_NAME that has
1074 SSA_NAME_OCCURS_IN_ABNORMAL_PHI set, otherwise false. */
1077 stmt_references_abnormal_ssa_name (gimple stmt
)
1080 use_operand_p use_p
;
1082 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, oi
, SSA_OP_USE
)
1084 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (USE_FROM_PTR (use_p
)))