1 /* Data flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
27 #include "pointer-set.h"
30 #include "basic-block.h"
33 #include "langhooks.h"
36 #include "tree-pretty-print.h"
37 #include "tree-dump.h"
39 #include "tree-flow.h"
40 #include "tree-inline.h"
41 #include "tree-pass.h"
46 /* Build and maintain data flow information for trees. */
48 /* Counters used to display DFA and SSA statistics. */
56 size_t max_num_phi_args
;
62 /* Local functions. */
63 static void collect_dfa_stats (struct dfa_stats_d
*);
66 /*---------------------------------------------------------------------------
67 Dataflow analysis (DFA) routines
68 ---------------------------------------------------------------------------*/
69 /* Find all the variables referenced in the function. This function
70 builds the global arrays REFERENCED_VARS and CALL_CLOBBERED_VARS.
72 Note that this function does not look for statement operands, it simply
73 determines what variables are referenced in the program and detects
74 various attributes for each variable used by alias analysis and the
78 find_referenced_vars (void)
81 gimple_stmt_iterator si
;
85 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
87 gimple stmt
= gsi_stmt (si
);
88 if (is_gimple_debug (stmt
))
90 find_referenced_vars_in (gsi_stmt (si
));
93 for (si
= gsi_start_phis (bb
); !gsi_end_p (si
); gsi_next (&si
))
94 find_referenced_vars_in (gsi_stmt (si
));
100 struct gimple_opt_pass pass_referenced_vars
=
104 "*referenced_vars", /* name */
106 find_referenced_vars
, /* execute */
109 0, /* static_pass_number */
110 TV_FIND_REFERENCED_VARS
, /* tv_id */
111 PROP_gimple_leh
| PROP_cfg
, /* properties_required */
112 PROP_referenced_vars
, /* properties_provided */
113 0, /* properties_destroyed */
114 0, /* todo_flags_start */
115 0 /* todo_flags_finish */
120 /* Renumber all of the gimple stmt uids. */
123 renumber_gimple_stmt_uids (void)
127 set_gimple_stmt_max_uid (cfun
, 0);
130 gimple_stmt_iterator bsi
;
131 for (bsi
= gsi_start_phis (bb
); !gsi_end_p (bsi
); gsi_next (&bsi
))
133 gimple stmt
= gsi_stmt (bsi
);
134 gimple_set_uid (stmt
, inc_gimple_stmt_max_uid (cfun
));
136 for (bsi
= gsi_start_bb (bb
); !gsi_end_p (bsi
); gsi_next (&bsi
))
138 gimple stmt
= gsi_stmt (bsi
);
139 gimple_set_uid (stmt
, inc_gimple_stmt_max_uid (cfun
));
144 /* Like renumber_gimple_stmt_uids, but only do work on the basic blocks
145 in BLOCKS, of which there are N_BLOCKS. Also renumbers PHIs. */
148 renumber_gimple_stmt_uids_in_blocks (basic_block
*blocks
, int n_blocks
)
152 set_gimple_stmt_max_uid (cfun
, 0);
153 for (i
= 0; i
< n_blocks
; i
++)
155 basic_block bb
= blocks
[i
];
156 gimple_stmt_iterator bsi
;
157 for (bsi
= gsi_start_phis (bb
); !gsi_end_p (bsi
); gsi_next (&bsi
))
159 gimple stmt
= gsi_stmt (bsi
);
160 gimple_set_uid (stmt
, inc_gimple_stmt_max_uid (cfun
));
162 for (bsi
= gsi_start_bb (bb
); !gsi_end_p (bsi
); gsi_next (&bsi
))
164 gimple stmt
= gsi_stmt (bsi
);
165 gimple_set_uid (stmt
, inc_gimple_stmt_max_uid (cfun
));
170 /* Build a temporary. Make sure and register it to be renamed. */
173 make_rename_temp (tree type
, const char *prefix
)
175 tree t
= create_tmp_reg (type
, prefix
);
177 if (gimple_referenced_vars (cfun
))
178 add_referenced_var (t
);
179 if (gimple_in_ssa_p (cfun
))
180 mark_sym_for_renaming (t
);
187 /*---------------------------------------------------------------------------
189 ---------------------------------------------------------------------------*/
190 /* Dump the list of all the referenced variables in the current function to
194 dump_referenced_vars (FILE *file
)
197 referenced_var_iterator rvi
;
199 fprintf (file
, "\nReferenced variables in %s: %u\n\n",
200 get_name (current_function_decl
), (unsigned) num_referenced_vars
);
202 FOR_EACH_REFERENCED_VAR (cfun
, var
, rvi
)
204 fprintf (file
, "Variable: ");
205 dump_variable (file
, var
);
208 fprintf (file
, "\n");
212 /* Dump the list of all the referenced variables to stderr. */
215 debug_referenced_vars (void)
217 dump_referenced_vars (stderr
);
221 /* Dump variable VAR and its may-aliases to FILE. */
224 dump_variable (FILE *file
, tree var
)
226 if (TREE_CODE (var
) == SSA_NAME
)
228 if (POINTER_TYPE_P (TREE_TYPE (var
)))
229 dump_points_to_info_for (file
, var
);
230 var
= SSA_NAME_VAR (var
);
233 if (var
== NULL_TREE
)
235 fprintf (file
, "<nil>");
239 print_generic_expr (file
, var
, dump_flags
);
241 fprintf (file
, ", UID D.%u", (unsigned) DECL_UID (var
));
242 if (DECL_PT_UID (var
) != DECL_UID (var
))
243 fprintf (file
, ", PT-UID D.%u", (unsigned) DECL_PT_UID (var
));
245 fprintf (file
, ", ");
246 print_generic_expr (file
, TREE_TYPE (var
), dump_flags
);
248 if (TREE_ADDRESSABLE (var
))
249 fprintf (file
, ", is addressable");
251 if (is_global_var (var
))
252 fprintf (file
, ", is global");
254 if (TREE_THIS_VOLATILE (var
))
255 fprintf (file
, ", is volatile");
257 if (cfun
&& gimple_default_def (cfun
, var
))
259 fprintf (file
, ", default def: ");
260 print_generic_expr (file
, gimple_default_def (cfun
, var
), dump_flags
);
263 if (DECL_INITIAL (var
))
265 fprintf (file
, ", initial: ");
266 print_generic_expr (file
, DECL_INITIAL (var
), dump_flags
);
269 fprintf (file
, "\n");
273 /* Dump variable VAR and its may-aliases to stderr. */
276 debug_variable (tree var
)
278 dump_variable (stderr
, var
);
282 /* Dump various DFA statistics to FILE. */
285 dump_dfa_stats (FILE *file
)
287 struct dfa_stats_d dfa_stats
;
289 unsigned long size
, total
= 0;
290 const char * const fmt_str
= "%-30s%-13s%12s\n";
291 const char * const fmt_str_1
= "%-30s%13lu%11lu%c\n";
292 const char * const fmt_str_3
= "%-43s%11lu%c\n";
294 = lang_hooks
.decl_printable_name (current_function_decl
, 2);
296 collect_dfa_stats (&dfa_stats
);
298 fprintf (file
, "\nDFA Statistics for %s\n\n", funcname
);
300 fprintf (file
, "---------------------------------------------------------\n");
301 fprintf (file
, fmt_str
, "", " Number of ", "Memory");
302 fprintf (file
, fmt_str
, "", " instances ", "used ");
303 fprintf (file
, "---------------------------------------------------------\n");
305 size
= num_referenced_vars
* sizeof (tree
);
307 fprintf (file
, fmt_str_1
, "Referenced variables", (unsigned long)num_referenced_vars
,
308 SCALE (size
), LABEL (size
));
310 size
= dfa_stats
.num_var_anns
* sizeof (struct var_ann_d
);
312 fprintf (file
, fmt_str_1
, "Variables annotated", dfa_stats
.num_var_anns
,
313 SCALE (size
), LABEL (size
));
315 size
= dfa_stats
.num_uses
* sizeof (tree
*);
317 fprintf (file
, fmt_str_1
, "USE operands", dfa_stats
.num_uses
,
318 SCALE (size
), LABEL (size
));
320 size
= dfa_stats
.num_defs
* sizeof (tree
*);
322 fprintf (file
, fmt_str_1
, "DEF operands", dfa_stats
.num_defs
,
323 SCALE (size
), LABEL (size
));
325 size
= dfa_stats
.num_vuses
* sizeof (tree
*);
327 fprintf (file
, fmt_str_1
, "VUSE operands", dfa_stats
.num_vuses
,
328 SCALE (size
), LABEL (size
));
330 size
= dfa_stats
.num_vdefs
* sizeof (tree
*);
332 fprintf (file
, fmt_str_1
, "VDEF operands", dfa_stats
.num_vdefs
,
333 SCALE (size
), LABEL (size
));
335 size
= dfa_stats
.num_phis
* sizeof (struct gimple_statement_phi
);
337 fprintf (file
, fmt_str_1
, "PHI nodes", dfa_stats
.num_phis
,
338 SCALE (size
), LABEL (size
));
340 size
= dfa_stats
.num_phi_args
* sizeof (struct phi_arg_d
);
342 fprintf (file
, fmt_str_1
, "PHI arguments", dfa_stats
.num_phi_args
,
343 SCALE (size
), LABEL (size
));
345 fprintf (file
, "---------------------------------------------------------\n");
346 fprintf (file
, fmt_str_3
, "Total memory used by DFA/SSA data", SCALE (total
),
348 fprintf (file
, "---------------------------------------------------------\n");
349 fprintf (file
, "\n");
351 if (dfa_stats
.num_phis
)
352 fprintf (file
, "Average number of arguments per PHI node: %.1f (max: %ld)\n",
353 (float) dfa_stats
.num_phi_args
/ (float) dfa_stats
.num_phis
,
354 (long) dfa_stats
.max_num_phi_args
);
356 fprintf (file
, "\n");
360 /* Dump DFA statistics on stderr. */
363 debug_dfa_stats (void)
365 dump_dfa_stats (stderr
);
369 /* Collect DFA statistics and store them in the structure pointed to by
373 collect_dfa_stats (struct dfa_stats_d
*dfa_stats_p ATTRIBUTE_UNUSED
)
376 referenced_var_iterator vi
;
379 gcc_assert (dfa_stats_p
);
381 memset ((void *)dfa_stats_p
, 0, sizeof (struct dfa_stats_d
));
383 /* Count all the variable annotations. */
384 FOR_EACH_REFERENCED_VAR (cfun
, var
, vi
)
386 dfa_stats_p
->num_var_anns
++;
388 /* Walk all the statements in the function counting references. */
391 gimple_stmt_iterator si
;
393 for (si
= gsi_start_phis (bb
); !gsi_end_p (si
); gsi_next (&si
))
395 gimple phi
= gsi_stmt (si
);
396 dfa_stats_p
->num_phis
++;
397 dfa_stats_p
->num_phi_args
+= gimple_phi_num_args (phi
);
398 if (gimple_phi_num_args (phi
) > dfa_stats_p
->max_num_phi_args
)
399 dfa_stats_p
->max_num_phi_args
= gimple_phi_num_args (phi
);
402 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
404 gimple stmt
= gsi_stmt (si
);
405 dfa_stats_p
->num_defs
+= NUM_SSA_OPERANDS (stmt
, SSA_OP_DEF
);
406 dfa_stats_p
->num_uses
+= NUM_SSA_OPERANDS (stmt
, SSA_OP_USE
);
407 dfa_stats_p
->num_vdefs
+= gimple_vdef (stmt
) ? 1 : 0;
408 dfa_stats_p
->num_vuses
+= gimple_vuse (stmt
) ? 1 : 0;
414 /*---------------------------------------------------------------------------
415 Miscellaneous helpers
416 ---------------------------------------------------------------------------*/
417 /* Callback for walk_tree. Used to collect variables referenced in
421 find_vars_r (tree
*tp
, int *walk_subtrees
, void *data
)
423 struct function
*fn
= (struct function
*) data
;
425 /* If we are reading the lto info back in, we need to rescan the
427 if (TREE_CODE (*tp
) == SSA_NAME
)
428 add_referenced_var_1 (SSA_NAME_VAR (*tp
), fn
);
430 /* If T is a regular variable that the optimizers are interested
431 in, add it to the list of variables. */
432 else if ((TREE_CODE (*tp
) == VAR_DECL
433 && !is_global_var (*tp
))
434 || TREE_CODE (*tp
) == PARM_DECL
435 || TREE_CODE (*tp
) == RESULT_DECL
)
436 add_referenced_var_1 (*tp
, fn
);
438 /* Type, _DECL and constant nodes have no interesting children.
440 else if (IS_TYPE_OR_DECL_P (*tp
) || CONSTANT_CLASS_P (*tp
))
446 /* Find referenced variables in STMT. */
449 find_referenced_vars_in (gimple stmt
)
453 if (gimple_code (stmt
) != GIMPLE_PHI
)
455 for (i
= 0; i
< gimple_num_ops (stmt
); i
++)
456 walk_tree (gimple_op_ptr (stmt
, i
), find_vars_r
, cfun
, NULL
);
460 walk_tree (gimple_phi_result_ptr (stmt
), find_vars_r
, cfun
, NULL
);
462 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
464 tree arg
= gimple_phi_arg_def (stmt
, i
);
465 walk_tree (&arg
, find_vars_r
, cfun
, NULL
);
471 /* Lookup UID in the referenced_vars hashtable and return the associated
475 referenced_var_lookup (struct function
*fn
, unsigned int uid
)
478 struct tree_decl_minimal in
;
480 h
= (tree
) htab_find_with_hash (gimple_referenced_vars (fn
), &in
, uid
);
484 /* Check if TO is in the referenced_vars hash table and insert it if not.
485 Return true if it required insertion. */
488 referenced_var_check_and_insert (tree to
, struct function
*fn
)
491 struct tree_decl_minimal in
;
492 unsigned int uid
= DECL_UID (to
);
495 loc
= (tree
*) htab_find_slot_with_hash (gimple_referenced_vars (fn
),
499 /* DECL_UID has already been entered in the table. Verify that it is
500 the same entry as TO. See PR 27793. */
501 gcc_assert (*loc
== to
);
509 /* Lookup VAR UID in the default_defs hashtable and return the associated
513 gimple_default_def (struct function
*fn
, tree var
)
515 struct tree_decl_minimal ind
;
516 struct tree_ssa_name in
;
517 gcc_assert (SSA_VAR_P (var
));
519 ind
.uid
= DECL_UID (var
);
520 return (tree
) htab_find_with_hash (DEFAULT_DEFS (fn
), &in
, DECL_UID (var
));
523 /* Insert the pair VAR's UID, DEF into the default_defs hashtable. */
526 set_default_def (tree var
, tree def
)
528 struct tree_decl_minimal ind
;
529 struct tree_ssa_name in
;
532 gcc_assert (SSA_VAR_P (var
));
534 ind
.uid
= DECL_UID (var
);
537 loc
= htab_find_slot_with_hash (DEFAULT_DEFS (cfun
), &in
,
538 DECL_UID (var
), INSERT
);
540 htab_remove_elt (DEFAULT_DEFS (cfun
), *loc
);
543 gcc_assert (TREE_CODE (def
) == SSA_NAME
&& SSA_NAME_VAR (def
) == var
);
544 loc
= htab_find_slot_with_hash (DEFAULT_DEFS (cfun
), &in
,
545 DECL_UID (var
), INSERT
);
547 /* Default definition might be changed by tail call optimization. */
549 SSA_NAME_IS_DEFAULT_DEF (*(tree
*) loc
) = false;
552 /* Mark DEF as the default definition for VAR. */
553 SSA_NAME_IS_DEFAULT_DEF (def
) = true;
556 /* Add VAR to the list of referenced variables if it isn't already there. */
559 add_referenced_var_1 (tree var
, struct function
*fn
)
561 gcc_checking_assert (TREE_CODE (var
) == VAR_DECL
562 || TREE_CODE (var
) == PARM_DECL
563 || TREE_CODE (var
) == RESULT_DECL
);
565 gcc_checking_assert ((TREE_CODE (var
) == VAR_DECL
566 && VAR_DECL_IS_VIRTUAL_OPERAND (var
))
567 || !is_global_var (var
));
569 /* Insert VAR into the referenced_vars hash table if it isn't present
570 and allocate its var-annotation. */
571 if (referenced_var_check_and_insert (var
, fn
))
573 gcc_checking_assert (!*DECL_VAR_ANN_PTR (var
));
574 *DECL_VAR_ANN_PTR (var
) = ggc_alloc_cleared_var_ann_d ();
581 /* Remove VAR from the list of referenced variables and clear its
585 remove_referenced_var (tree var
)
588 struct tree_decl_minimal in
;
590 unsigned int uid
= DECL_UID (var
);
592 gcc_checking_assert (TREE_CODE (var
) == VAR_DECL
593 || TREE_CODE (var
) == PARM_DECL
594 || TREE_CODE (var
) == RESULT_DECL
);
596 gcc_checking_assert (!is_global_var (var
));
598 v_ann
= var_ann (var
);
600 *DECL_VAR_ANN_PTR (var
) = NULL
;
603 loc
= htab_find_slot_with_hash (gimple_referenced_vars (cfun
), &in
, uid
,
605 htab_clear_slot (gimple_referenced_vars (cfun
), loc
);
609 /* If EXP is a handled component reference for a structure, return the
610 base variable. The access range is delimited by bit positions *POFFSET and
611 *POFFSET + *PMAX_SIZE. The access size is *PSIZE bits. If either
612 *PSIZE or *PMAX_SIZE is -1, they could not be determined. If *PSIZE
613 and *PMAX_SIZE are equal, the access is non-variable. */
616 get_ref_base_and_extent (tree exp
, HOST_WIDE_INT
*poffset
,
617 HOST_WIDE_INT
*psize
,
618 HOST_WIDE_INT
*pmax_size
)
620 HOST_WIDE_INT bitsize
= -1;
621 HOST_WIDE_INT maxsize
= -1;
622 tree size_tree
= NULL_TREE
;
623 double_int bit_offset
= double_int_zero
;
624 HOST_WIDE_INT hbit_offset
;
625 bool seen_variable_array_ref
= false;
628 /* First get the final access size from just the outermost expression. */
629 if (TREE_CODE (exp
) == COMPONENT_REF
)
630 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
631 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
632 size_tree
= TREE_OPERAND (exp
, 1);
633 else if (!VOID_TYPE_P (TREE_TYPE (exp
)))
635 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
637 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
639 bitsize
= GET_MODE_BITSIZE (mode
);
641 if (size_tree
!= NULL_TREE
)
643 if (! host_integerp (size_tree
, 1))
646 bitsize
= TREE_INT_CST_LOW (size_tree
);
649 /* Initially, maxsize is the same as the accessed element size.
650 In the following it will only grow (or become -1). */
653 /* Compute cumulative bit-offset for nested component-refs and array-refs,
654 and find the ultimate containing object. */
657 base_type
= TREE_TYPE (exp
);
659 switch (TREE_CODE (exp
))
663 = double_int_add (bit_offset
,
664 tree_to_double_int (TREE_OPERAND (exp
, 2)));
669 tree field
= TREE_OPERAND (exp
, 1);
670 tree this_offset
= component_ref_field_offset (exp
);
672 if (this_offset
&& TREE_CODE (this_offset
) == INTEGER_CST
)
674 double_int doffset
= tree_to_double_int (this_offset
);
675 doffset
= double_int_lshift (doffset
,
677 ? 3 : exact_log2 (BITS_PER_UNIT
),
678 HOST_BITS_PER_DOUBLE_INT
, true);
679 doffset
= double_int_add (doffset
,
681 (DECL_FIELD_BIT_OFFSET (field
)));
682 bit_offset
= double_int_add (bit_offset
, doffset
);
684 /* If we had seen a variable array ref already and we just
685 referenced the last field of a struct or a union member
686 then we have to adjust maxsize by the padding at the end
688 if (seen_variable_array_ref
&& maxsize
!= -1)
690 tree stype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
691 tree next
= DECL_CHAIN (field
);
692 while (next
&& TREE_CODE (next
) != FIELD_DECL
)
693 next
= DECL_CHAIN (next
);
695 || TREE_CODE (stype
) != RECORD_TYPE
)
697 tree fsize
= DECL_SIZE_UNIT (field
);
698 tree ssize
= TYPE_SIZE_UNIT (stype
);
699 if (host_integerp (fsize
, 0)
700 && host_integerp (ssize
, 0)
701 && double_int_fits_in_shwi_p (doffset
))
702 maxsize
+= ((TREE_INT_CST_LOW (ssize
)
703 - TREE_INT_CST_LOW (fsize
))
705 - double_int_to_shwi (doffset
));
713 tree csize
= TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
714 /* We need to adjust maxsize to the whole structure bitsize.
715 But we can subtract any constant offset seen so far,
716 because that would get us out of the structure otherwise. */
719 && host_integerp (csize
, 1)
720 && double_int_fits_in_shwi_p (bit_offset
))
721 maxsize
= TREE_INT_CST_LOW (csize
)
722 - double_int_to_shwi (bit_offset
);
730 case ARRAY_RANGE_REF
:
732 tree index
= TREE_OPERAND (exp
, 1);
733 tree low_bound
, unit_size
;
735 /* If the resulting bit-offset is constant, track it. */
736 if (TREE_CODE (index
) == INTEGER_CST
737 && (low_bound
= array_ref_low_bound (exp
),
738 TREE_CODE (low_bound
) == INTEGER_CST
)
739 && (unit_size
= array_ref_element_size (exp
),
740 TREE_CODE (unit_size
) == INTEGER_CST
))
744 (double_int_sub (TREE_INT_CST (index
),
745 TREE_INT_CST (low_bound
)),
746 TYPE_PRECISION (TREE_TYPE (index
)));
747 doffset
= double_int_mul (doffset
,
748 tree_to_double_int (unit_size
));
749 doffset
= double_int_lshift (doffset
,
751 ? 3 : exact_log2 (BITS_PER_UNIT
),
752 HOST_BITS_PER_DOUBLE_INT
, true);
753 bit_offset
= double_int_add (bit_offset
, doffset
);
755 /* An array ref with a constant index up in the structure
756 hierarchy will constrain the size of any variable array ref
757 lower in the access hierarchy. */
758 seen_variable_array_ref
= false;
762 tree asize
= TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
763 /* We need to adjust maxsize to the whole array bitsize.
764 But we can subtract any constant offset seen so far,
765 because that would get us outside of the array otherwise. */
768 && host_integerp (asize
, 1)
769 && double_int_fits_in_shwi_p (bit_offset
))
770 maxsize
= TREE_INT_CST_LOW (asize
)
771 - double_int_to_shwi (bit_offset
);
775 /* Remember that we have seen an array ref with a variable
777 seen_variable_array_ref
= true;
787 = double_int_add (bit_offset
, uhwi_to_double_int (bitsize
));
790 case VIEW_CONVERT_EXPR
:
794 /* Hand back the decl for MEM[&decl, off]. */
795 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
)
797 if (integer_zerop (TREE_OPERAND (exp
, 1)))
798 exp
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
801 double_int off
= mem_ref_offset (exp
);
802 off
= double_int_lshift (off
,
804 ? 3 : exact_log2 (BITS_PER_UNIT
),
805 HOST_BITS_PER_DOUBLE_INT
, true);
806 off
= double_int_add (off
, bit_offset
);
807 if (double_int_fits_in_shwi_p (off
))
810 exp
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
817 /* Hand back the decl for MEM[&decl, off]. */
818 if (TREE_CODE (TMR_BASE (exp
)) == ADDR_EXPR
)
820 /* Via the variable index or index2 we can reach the
822 if (TMR_INDEX (exp
) || TMR_INDEX2 (exp
))
824 exp
= TREE_OPERAND (TMR_BASE (exp
), 0);
825 bit_offset
= double_int_zero
;
829 if (integer_zerop (TMR_OFFSET (exp
)))
830 exp
= TREE_OPERAND (TMR_BASE (exp
), 0);
833 double_int off
= mem_ref_offset (exp
);
834 off
= double_int_lshift (off
,
836 ? 3 : exact_log2 (BITS_PER_UNIT
),
837 HOST_BITS_PER_DOUBLE_INT
, true);
838 off
= double_int_add (off
, bit_offset
);
839 if (double_int_fits_in_shwi_p (off
))
842 exp
= TREE_OPERAND (TMR_BASE (exp
), 0);
852 exp
= TREE_OPERAND (exp
, 0);
856 if (!double_int_fits_in_shwi_p (bit_offset
))
865 hbit_offset
= double_int_to_shwi (bit_offset
);
867 /* We need to deal with variable arrays ending structures such as
868 struct { int length; int a[1]; } x; x.a[d]
869 struct { struct { int a; int b; } a[1]; } x; x.a[d].a
870 struct { struct { int a[1]; } a[1]; } x; x.a[0][d], x.a[d][0]
871 struct { int len; union { int a[1]; struct X x; } u; } x; x.u.a[d]
872 where we do not know maxsize for variable index accesses to
873 the array. The simplest way to conservatively deal with this
874 is to punt in the case that offset + maxsize reaches the
875 base type boundary. This needs to include possible trailing padding
876 that is there for alignment purposes. */
878 if (seen_variable_array_ref
880 && (!host_integerp (TYPE_SIZE (base_type
), 1)
881 || (hbit_offset
+ maxsize
882 == (signed) TREE_INT_CST_LOW (TYPE_SIZE (base_type
)))))
885 /* In case of a decl or constant base object we can do better. */
889 /* If maxsize is unknown adjust it according to the size of the
892 && host_integerp (DECL_SIZE (exp
), 1))
893 maxsize
= TREE_INT_CST_LOW (DECL_SIZE (exp
)) - hbit_offset
;
895 else if (CONSTANT_CLASS_P (exp
))
897 /* If maxsize is unknown adjust it according to the size of the
898 base type constant. */
900 && host_integerp (TYPE_SIZE (TREE_TYPE (exp
)), 1))
901 maxsize
= TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp
))) - hbit_offset
;
904 /* ??? Due to negative offsets in ARRAY_REF we can end up with
905 negative bit_offset here. We might want to store a zero offset
907 *poffset
= hbit_offset
;
909 *pmax_size
= maxsize
;
914 /* Returns the base object and a constant BITS_PER_UNIT offset in *POFFSET that
915 denotes the starting address of the memory access EXP.
916 Returns NULL_TREE if the offset is not constant or any component
917 is not BITS_PER_UNIT-aligned. */
920 get_addr_base_and_unit_offset (tree exp
, HOST_WIDE_INT
*poffset
)
922 return get_addr_base_and_unit_offset_1 (exp
, poffset
, NULL
);
925 /* Returns true if STMT references an SSA_NAME that has
926 SSA_NAME_OCCURS_IN_ABNORMAL_PHI set, otherwise false. */
929 stmt_references_abnormal_ssa_name (gimple stmt
)
934 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, oi
, SSA_OP_USE
)
936 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (USE_FROM_PTR (use_p
)))