* lib/ubsan-dg.exp (check_effective_target_fsanitize_undefined):
[official-gcc.git] / gcc / tree-dfa.c
blob681bf18ffac994186353cad1a37e45fe56983d6a
1 /* Data flow functions for trees.
2 Copyright (C) 2001-2014 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "hashtab.h"
26 #include "tree.h"
27 #include "stor-layout.h"
28 #include "tm_p.h"
29 #include "predict.h"
30 #include "vec.h"
31 #include "hash-set.h"
32 #include "machmode.h"
33 #include "hard-reg-set.h"
34 #include "input.h"
35 #include "function.h"
36 #include "dominance.h"
37 #include "cfg.h"
38 #include "basic-block.h"
39 #include "langhooks.h"
40 #include "flags.h"
41 #include "tree-pretty-print.h"
42 #include "tree-ssa-alias.h"
43 #include "internal-fn.h"
44 #include "gimple-expr.h"
45 #include "is-a.h"
46 #include "gimple.h"
47 #include "gimple-iterator.h"
48 #include "gimple-walk.h"
49 #include "gimple-ssa.h"
50 #include "tree-phinodes.h"
51 #include "ssa-iterators.h"
52 #include "stringpool.h"
53 #include "tree-ssanames.h"
54 #include "expr.h"
55 #include "tree-dfa.h"
56 #include "tree-inline.h"
57 #include "tree-pass.h"
58 #include "params.h"
59 #include "wide-int.h"
61 /* Build and maintain data flow information for trees. */
63 /* Counters used to display DFA and SSA statistics. */
64 struct dfa_stats_d
66 long num_defs;
67 long num_uses;
68 long num_phis;
69 long num_phi_args;
70 size_t max_num_phi_args;
71 long num_vdefs;
72 long num_vuses;
76 /* Local functions. */
77 static void collect_dfa_stats (struct dfa_stats_d *);
80 /*---------------------------------------------------------------------------
81 Dataflow analysis (DFA) routines
82 ---------------------------------------------------------------------------*/
84 /* Renumber all of the gimple stmt uids. */
86 void
87 renumber_gimple_stmt_uids (void)
89 basic_block bb;
91 set_gimple_stmt_max_uid (cfun, 0);
92 FOR_ALL_BB_FN (bb, cfun)
94 gimple_stmt_iterator bsi;
95 for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
97 gimple stmt = gsi_stmt (bsi);
98 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
100 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
102 gimple stmt = gsi_stmt (bsi);
103 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
108 /* Like renumber_gimple_stmt_uids, but only do work on the basic blocks
109 in BLOCKS, of which there are N_BLOCKS. Also renumbers PHIs. */
111 void
112 renumber_gimple_stmt_uids_in_blocks (basic_block *blocks, int n_blocks)
114 int i;
116 set_gimple_stmt_max_uid (cfun, 0);
117 for (i = 0; i < n_blocks; i++)
119 basic_block bb = blocks[i];
120 gimple_stmt_iterator bsi;
121 for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
123 gimple stmt = gsi_stmt (bsi);
124 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
126 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
128 gimple stmt = gsi_stmt (bsi);
129 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
136 /*---------------------------------------------------------------------------
137 Debugging functions
138 ---------------------------------------------------------------------------*/
140 /* Dump variable VAR and its may-aliases to FILE. */
142 void
143 dump_variable (FILE *file, tree var)
145 if (TREE_CODE (var) == SSA_NAME)
147 if (POINTER_TYPE_P (TREE_TYPE (var)))
148 dump_points_to_info_for (file, var);
149 var = SSA_NAME_VAR (var);
152 if (var == NULL_TREE)
154 fprintf (file, "<nil>");
155 return;
158 print_generic_expr (file, var, dump_flags);
160 fprintf (file, ", UID D.%u", (unsigned) DECL_UID (var));
161 if (DECL_PT_UID (var) != DECL_UID (var))
162 fprintf (file, ", PT-UID D.%u", (unsigned) DECL_PT_UID (var));
164 fprintf (file, ", ");
165 print_generic_expr (file, TREE_TYPE (var), dump_flags);
167 if (TREE_ADDRESSABLE (var))
168 fprintf (file, ", is addressable");
170 if (is_global_var (var))
171 fprintf (file, ", is global");
173 if (TREE_THIS_VOLATILE (var))
174 fprintf (file, ", is volatile");
176 if (cfun && ssa_default_def (cfun, var))
178 fprintf (file, ", default def: ");
179 print_generic_expr (file, ssa_default_def (cfun, var), dump_flags);
182 if (DECL_INITIAL (var))
184 fprintf (file, ", initial: ");
185 print_generic_expr (file, DECL_INITIAL (var), dump_flags);
188 fprintf (file, "\n");
192 /* Dump variable VAR and its may-aliases to stderr. */
194 DEBUG_FUNCTION void
195 debug_variable (tree var)
197 dump_variable (stderr, var);
201 /* Dump various DFA statistics to FILE. */
203 void
204 dump_dfa_stats (FILE *file)
206 struct dfa_stats_d dfa_stats;
208 unsigned long size, total = 0;
209 const char * const fmt_str = "%-30s%-13s%12s\n";
210 const char * const fmt_str_1 = "%-30s%13lu%11lu%c\n";
211 const char * const fmt_str_3 = "%-43s%11lu%c\n";
212 const char *funcname
213 = lang_hooks.decl_printable_name (current_function_decl, 2);
215 collect_dfa_stats (&dfa_stats);
217 fprintf (file, "\nDFA Statistics for %s\n\n", funcname);
219 fprintf (file, "---------------------------------------------------------\n");
220 fprintf (file, fmt_str, "", " Number of ", "Memory");
221 fprintf (file, fmt_str, "", " instances ", "used ");
222 fprintf (file, "---------------------------------------------------------\n");
224 size = dfa_stats.num_uses * sizeof (tree *);
225 total += size;
226 fprintf (file, fmt_str_1, "USE operands", dfa_stats.num_uses,
227 SCALE (size), LABEL (size));
229 size = dfa_stats.num_defs * sizeof (tree *);
230 total += size;
231 fprintf (file, fmt_str_1, "DEF operands", dfa_stats.num_defs,
232 SCALE (size), LABEL (size));
234 size = dfa_stats.num_vuses * sizeof (tree *);
235 total += size;
236 fprintf (file, fmt_str_1, "VUSE operands", dfa_stats.num_vuses,
237 SCALE (size), LABEL (size));
239 size = dfa_stats.num_vdefs * sizeof (tree *);
240 total += size;
241 fprintf (file, fmt_str_1, "VDEF operands", dfa_stats.num_vdefs,
242 SCALE (size), LABEL (size));
244 size = dfa_stats.num_phis * sizeof (struct gphi);
245 total += size;
246 fprintf (file, fmt_str_1, "PHI nodes", dfa_stats.num_phis,
247 SCALE (size), LABEL (size));
249 size = dfa_stats.num_phi_args * sizeof (struct phi_arg_d);
250 total += size;
251 fprintf (file, fmt_str_1, "PHI arguments", dfa_stats.num_phi_args,
252 SCALE (size), LABEL (size));
254 fprintf (file, "---------------------------------------------------------\n");
255 fprintf (file, fmt_str_3, "Total memory used by DFA/SSA data", SCALE (total),
256 LABEL (total));
257 fprintf (file, "---------------------------------------------------------\n");
258 fprintf (file, "\n");
260 if (dfa_stats.num_phis)
261 fprintf (file, "Average number of arguments per PHI node: %.1f (max: %ld)\n",
262 (float) dfa_stats.num_phi_args / (float) dfa_stats.num_phis,
263 (long) dfa_stats.max_num_phi_args);
265 fprintf (file, "\n");
269 /* Dump DFA statistics on stderr. */
271 DEBUG_FUNCTION void
272 debug_dfa_stats (void)
274 dump_dfa_stats (stderr);
278 /* Collect DFA statistics and store them in the structure pointed to by
279 DFA_STATS_P. */
281 static void
282 collect_dfa_stats (struct dfa_stats_d *dfa_stats_p ATTRIBUTE_UNUSED)
284 basic_block bb;
286 gcc_assert (dfa_stats_p);
288 memset ((void *)dfa_stats_p, 0, sizeof (struct dfa_stats_d));
290 /* Walk all the statements in the function counting references. */
291 FOR_EACH_BB_FN (bb, cfun)
293 for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);
294 gsi_next (&si))
296 gphi *phi = si.phi ();
297 dfa_stats_p->num_phis++;
298 dfa_stats_p->num_phi_args += gimple_phi_num_args (phi);
299 if (gimple_phi_num_args (phi) > dfa_stats_p->max_num_phi_args)
300 dfa_stats_p->max_num_phi_args = gimple_phi_num_args (phi);
303 for (gimple_stmt_iterator si = gsi_start_bb (bb); !gsi_end_p (si);
304 gsi_next (&si))
306 gimple stmt = gsi_stmt (si);
307 dfa_stats_p->num_defs += NUM_SSA_OPERANDS (stmt, SSA_OP_DEF);
308 dfa_stats_p->num_uses += NUM_SSA_OPERANDS (stmt, SSA_OP_USE);
309 dfa_stats_p->num_vdefs += gimple_vdef (stmt) ? 1 : 0;
310 dfa_stats_p->num_vuses += gimple_vuse (stmt) ? 1 : 0;
316 /*---------------------------------------------------------------------------
317 Miscellaneous helpers
318 ---------------------------------------------------------------------------*/
320 /* Lookup VAR UID in the default_defs hashtable and return the associated
321 variable. */
323 tree
324 ssa_default_def (struct function *fn, tree var)
326 struct tree_decl_minimal ind;
327 struct tree_ssa_name in;
328 gcc_assert (TREE_CODE (var) == VAR_DECL
329 || TREE_CODE (var) == PARM_DECL
330 || TREE_CODE (var) == RESULT_DECL);
331 in.var = (tree)&ind;
332 ind.uid = DECL_UID (var);
333 return DEFAULT_DEFS (fn)->find_with_hash ((tree)&in, DECL_UID (var));
336 /* Insert the pair VAR's UID, DEF into the default_defs hashtable
337 of function FN. */
339 void
340 set_ssa_default_def (struct function *fn, tree var, tree def)
342 struct tree_decl_minimal ind;
343 struct tree_ssa_name in;
345 gcc_assert (TREE_CODE (var) == VAR_DECL
346 || TREE_CODE (var) == PARM_DECL
347 || TREE_CODE (var) == RESULT_DECL);
348 in.var = (tree)&ind;
349 ind.uid = DECL_UID (var);
350 if (!def)
352 tree *loc = DEFAULT_DEFS (fn)->find_slot_with_hash ((tree)&in,
353 DECL_UID (var),
354 NO_INSERT);
355 if (loc)
357 SSA_NAME_IS_DEFAULT_DEF (*(tree *)loc) = false;
358 DEFAULT_DEFS (fn)->clear_slot (loc);
360 return;
362 gcc_assert (TREE_CODE (def) == SSA_NAME && SSA_NAME_VAR (def) == var);
363 tree *loc = DEFAULT_DEFS (fn)->find_slot_with_hash ((tree)&in,
364 DECL_UID (var), INSERT);
366 /* Default definition might be changed by tail call optimization. */
367 if (*loc)
368 SSA_NAME_IS_DEFAULT_DEF (*loc) = false;
370 /* Mark DEF as the default definition for VAR. */
371 *loc = def;
372 SSA_NAME_IS_DEFAULT_DEF (def) = true;
375 /* Retrieve or create a default definition for VAR. */
377 tree
378 get_or_create_ssa_default_def (struct function *fn, tree var)
380 tree ddef = ssa_default_def (fn, var);
381 if (ddef == NULL_TREE)
383 ddef = make_ssa_name_fn (fn, var, gimple_build_nop ());
384 set_ssa_default_def (fn, var, ddef);
386 return ddef;
390 /* If EXP is a handled component reference for a structure, return the
391 base variable. The access range is delimited by bit positions *POFFSET and
392 *POFFSET + *PMAX_SIZE. The access size is *PSIZE bits. If either
393 *PSIZE or *PMAX_SIZE is -1, they could not be determined. If *PSIZE
394 and *PMAX_SIZE are equal, the access is non-variable. */
396 tree
397 get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset,
398 HOST_WIDE_INT *psize,
399 HOST_WIDE_INT *pmax_size)
401 offset_int bitsize = -1;
402 offset_int maxsize;
403 tree size_tree = NULL_TREE;
404 offset_int bit_offset = 0;
405 bool seen_variable_array_ref = false;
407 /* First get the final access size from just the outermost expression. */
408 if (TREE_CODE (exp) == COMPONENT_REF)
409 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
410 else if (TREE_CODE (exp) == BIT_FIELD_REF)
411 size_tree = TREE_OPERAND (exp, 1);
412 else if (!VOID_TYPE_P (TREE_TYPE (exp)))
414 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
415 if (mode == BLKmode)
416 size_tree = TYPE_SIZE (TREE_TYPE (exp));
417 else
418 bitsize = int (GET_MODE_PRECISION (mode));
420 if (size_tree != NULL_TREE
421 && TREE_CODE (size_tree) == INTEGER_CST)
422 bitsize = wi::to_offset (size_tree);
424 /* Initially, maxsize is the same as the accessed element size.
425 In the following it will only grow (or become -1). */
426 maxsize = bitsize;
428 /* Compute cumulative bit-offset for nested component-refs and array-refs,
429 and find the ultimate containing object. */
430 while (1)
432 switch (TREE_CODE (exp))
434 case BIT_FIELD_REF:
435 bit_offset += wi::to_offset (TREE_OPERAND (exp, 2));
436 break;
438 case COMPONENT_REF:
440 tree field = TREE_OPERAND (exp, 1);
441 tree this_offset = component_ref_field_offset (exp);
443 if (this_offset && TREE_CODE (this_offset) == INTEGER_CST)
445 offset_int woffset = wi::lshift (wi::to_offset (this_offset),
446 LOG2_BITS_PER_UNIT);
447 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
448 bit_offset += woffset;
450 /* If we had seen a variable array ref already and we just
451 referenced the last field of a struct or a union member
452 then we have to adjust maxsize by the padding at the end
453 of our field. */
454 if (seen_variable_array_ref && maxsize != -1)
456 tree stype = TREE_TYPE (TREE_OPERAND (exp, 0));
457 tree next = DECL_CHAIN (field);
458 while (next && TREE_CODE (next) != FIELD_DECL)
459 next = DECL_CHAIN (next);
460 if (!next
461 || TREE_CODE (stype) != RECORD_TYPE)
463 tree fsize = DECL_SIZE_UNIT (field);
464 tree ssize = TYPE_SIZE_UNIT (stype);
465 if (fsize == NULL
466 || TREE_CODE (fsize) != INTEGER_CST
467 || ssize == NULL
468 || TREE_CODE (ssize) != INTEGER_CST)
469 maxsize = -1;
470 else
472 offset_int tem = (wi::to_offset (ssize)
473 - wi::to_offset (fsize));
474 tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
475 tem -= woffset;
476 maxsize += tem;
481 else
483 tree csize = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)));
484 /* We need to adjust maxsize to the whole structure bitsize.
485 But we can subtract any constant offset seen so far,
486 because that would get us out of the structure otherwise. */
487 if (maxsize != -1
488 && csize
489 && TREE_CODE (csize) == INTEGER_CST)
490 maxsize = wi::to_offset (csize) - bit_offset;
491 else
492 maxsize = -1;
495 break;
497 case ARRAY_REF:
498 case ARRAY_RANGE_REF:
500 tree index = TREE_OPERAND (exp, 1);
501 tree low_bound, unit_size;
503 /* If the resulting bit-offset is constant, track it. */
504 if (TREE_CODE (index) == INTEGER_CST
505 && (low_bound = array_ref_low_bound (exp),
506 TREE_CODE (low_bound) == INTEGER_CST)
507 && (unit_size = array_ref_element_size (exp),
508 TREE_CODE (unit_size) == INTEGER_CST))
510 offset_int woffset
511 = wi::sext (wi::to_offset (index) - wi::to_offset (low_bound),
512 TYPE_PRECISION (TREE_TYPE (index)));
513 woffset *= wi::to_offset (unit_size);
514 woffset = wi::lshift (woffset, LOG2_BITS_PER_UNIT);
515 bit_offset += woffset;
517 /* An array ref with a constant index up in the structure
518 hierarchy will constrain the size of any variable array ref
519 lower in the access hierarchy. */
520 seen_variable_array_ref = false;
522 else
524 tree asize = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)));
525 /* We need to adjust maxsize to the whole array bitsize.
526 But we can subtract any constant offset seen so far,
527 because that would get us outside of the array otherwise. */
528 if (maxsize != -1
529 && asize
530 && TREE_CODE (asize) == INTEGER_CST)
531 maxsize = wi::to_offset (asize) - bit_offset;
532 else
533 maxsize = -1;
535 /* Remember that we have seen an array ref with a variable
536 index. */
537 seen_variable_array_ref = true;
540 break;
542 case REALPART_EXPR:
543 break;
545 case IMAGPART_EXPR:
546 bit_offset += bitsize;
547 break;
549 case VIEW_CONVERT_EXPR:
550 break;
552 case TARGET_MEM_REF:
553 /* Via the variable index or index2 we can reach the
554 whole object. Still hand back the decl here. */
555 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
556 && (TMR_INDEX (exp) || TMR_INDEX2 (exp)))
558 exp = TREE_OPERAND (TMR_BASE (exp), 0);
559 bit_offset = 0;
560 maxsize = -1;
561 goto done;
563 /* Fallthru. */
564 case MEM_REF:
565 /* We need to deal with variable arrays ending structures such as
566 struct { int length; int a[1]; } x; x.a[d]
567 struct { struct { int a; int b; } a[1]; } x; x.a[d].a
568 struct { struct { int a[1]; } a[1]; } x; x.a[0][d], x.a[d][0]
569 struct { int len; union { int a[1]; struct X x; } u; } x; x.u.a[d]
570 where we do not know maxsize for variable index accesses to
571 the array. The simplest way to conservatively deal with this
572 is to punt in the case that offset + maxsize reaches the
573 base type boundary. This needs to include possible trailing
574 padding that is there for alignment purposes. */
575 if (seen_variable_array_ref
576 && maxsize != -1
577 && (TYPE_SIZE (TREE_TYPE (exp)) == NULL_TREE
578 || TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
579 || (bit_offset + maxsize
580 == wi::to_offset (TYPE_SIZE (TREE_TYPE (exp))))))
581 maxsize = -1;
583 /* Hand back the decl for MEM[&decl, off]. */
584 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
586 if (integer_zerop (TREE_OPERAND (exp, 1)))
587 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
588 else
590 offset_int off = mem_ref_offset (exp);
591 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
592 off += bit_offset;
593 if (wi::fits_shwi_p (off))
595 bit_offset = off;
596 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
600 goto done;
602 default:
603 goto done;
606 exp = TREE_OPERAND (exp, 0);
609 /* We need to deal with variable arrays ending structures. */
610 if (seen_variable_array_ref
611 && maxsize != -1
612 && (TYPE_SIZE (TREE_TYPE (exp)) == NULL_TREE
613 || TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
614 || (bit_offset + maxsize
615 == wi::to_offset (TYPE_SIZE (TREE_TYPE (exp))))))
616 maxsize = -1;
618 done:
619 if (!wi::fits_shwi_p (bitsize) || wi::neg_p (bitsize))
621 *poffset = 0;
622 *psize = -1;
623 *pmax_size = -1;
625 return exp;
628 *psize = bitsize.to_shwi ();
630 if (!wi::fits_shwi_p (bit_offset))
632 *poffset = 0;
633 *pmax_size = -1;
635 return exp;
638 /* In case of a decl or constant base object we can do better. */
640 if (DECL_P (exp))
642 /* If maxsize is unknown adjust it according to the size of the
643 base decl. */
644 if (maxsize == -1
645 && DECL_SIZE (exp)
646 && TREE_CODE (DECL_SIZE (exp)) == INTEGER_CST)
647 maxsize = wi::to_offset (DECL_SIZE (exp)) - bit_offset;
649 else if (CONSTANT_CLASS_P (exp))
651 /* If maxsize is unknown adjust it according to the size of the
652 base type constant. */
653 if (maxsize == -1
654 && TYPE_SIZE (TREE_TYPE (exp))
655 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST)
656 maxsize = (wi::to_offset (TYPE_SIZE (TREE_TYPE (exp)))
657 - bit_offset);
660 /* ??? Due to negative offsets in ARRAY_REF we can end up with
661 negative bit_offset here. We might want to store a zero offset
662 in this case. */
663 *poffset = bit_offset.to_shwi ();
664 if (!wi::fits_shwi_p (maxsize) || wi::neg_p (maxsize))
665 *pmax_size = -1;
666 else
667 *pmax_size = maxsize.to_shwi ();
669 return exp;
672 /* Returns the base object and a constant BITS_PER_UNIT offset in *POFFSET that
673 denotes the starting address of the memory access EXP.
674 Returns NULL_TREE if the offset is not constant or any component
675 is not BITS_PER_UNIT-aligned.
676 VALUEIZE if non-NULL is used to valueize SSA names. It should return
677 its argument or a constant if the argument is known to be constant. */
679 tree
680 get_addr_base_and_unit_offset_1 (tree exp, HOST_WIDE_INT *poffset,
681 tree (*valueize) (tree))
683 HOST_WIDE_INT byte_offset = 0;
685 /* Compute cumulative byte-offset for nested component-refs and array-refs,
686 and find the ultimate containing object. */
687 while (1)
689 switch (TREE_CODE (exp))
691 case BIT_FIELD_REF:
693 HOST_WIDE_INT this_off = TREE_INT_CST_LOW (TREE_OPERAND (exp, 2));
694 if (this_off % BITS_PER_UNIT)
695 return NULL_TREE;
696 byte_offset += this_off / BITS_PER_UNIT;
698 break;
700 case COMPONENT_REF:
702 tree field = TREE_OPERAND (exp, 1);
703 tree this_offset = component_ref_field_offset (exp);
704 HOST_WIDE_INT hthis_offset;
706 if (!this_offset
707 || TREE_CODE (this_offset) != INTEGER_CST
708 || (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field))
709 % BITS_PER_UNIT))
710 return NULL_TREE;
712 hthis_offset = TREE_INT_CST_LOW (this_offset);
713 hthis_offset += (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field))
714 / BITS_PER_UNIT);
715 byte_offset += hthis_offset;
717 break;
719 case ARRAY_REF:
720 case ARRAY_RANGE_REF:
722 tree index = TREE_OPERAND (exp, 1);
723 tree low_bound, unit_size;
725 if (valueize
726 && TREE_CODE (index) == SSA_NAME)
727 index = (*valueize) (index);
729 /* If the resulting bit-offset is constant, track it. */
730 if (TREE_CODE (index) == INTEGER_CST
731 && (low_bound = array_ref_low_bound (exp),
732 TREE_CODE (low_bound) == INTEGER_CST)
733 && (unit_size = array_ref_element_size (exp),
734 TREE_CODE (unit_size) == INTEGER_CST))
736 offset_int woffset
737 = wi::sext (wi::to_offset (index) - wi::to_offset (low_bound),
738 TYPE_PRECISION (TREE_TYPE (index)));
739 woffset *= wi::to_offset (unit_size);
740 byte_offset += woffset.to_shwi ();
742 else
743 return NULL_TREE;
745 break;
747 case REALPART_EXPR:
748 break;
750 case IMAGPART_EXPR:
751 byte_offset += TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (exp)));
752 break;
754 case VIEW_CONVERT_EXPR:
755 break;
757 case MEM_REF:
759 tree base = TREE_OPERAND (exp, 0);
760 if (valueize
761 && TREE_CODE (base) == SSA_NAME)
762 base = (*valueize) (base);
764 /* Hand back the decl for MEM[&decl, off]. */
765 if (TREE_CODE (base) == ADDR_EXPR)
767 if (!integer_zerop (TREE_OPERAND (exp, 1)))
769 offset_int off = mem_ref_offset (exp);
770 byte_offset += off.to_short_addr ();
772 exp = TREE_OPERAND (base, 0);
774 goto done;
777 case TARGET_MEM_REF:
779 tree base = TREE_OPERAND (exp, 0);
780 if (valueize
781 && TREE_CODE (base) == SSA_NAME)
782 base = (*valueize) (base);
784 /* Hand back the decl for MEM[&decl, off]. */
785 if (TREE_CODE (base) == ADDR_EXPR)
787 if (TMR_INDEX (exp) || TMR_INDEX2 (exp))
788 return NULL_TREE;
789 if (!integer_zerop (TMR_OFFSET (exp)))
791 offset_int off = mem_ref_offset (exp);
792 byte_offset += off.to_short_addr ();
794 exp = TREE_OPERAND (base, 0);
796 goto done;
799 default:
800 goto done;
803 exp = TREE_OPERAND (exp, 0);
805 done:
807 *poffset = byte_offset;
808 return exp;
811 /* Returns the base object and a constant BITS_PER_UNIT offset in *POFFSET that
812 denotes the starting address of the memory access EXP.
813 Returns NULL_TREE if the offset is not constant or any component
814 is not BITS_PER_UNIT-aligned. */
816 tree
817 get_addr_base_and_unit_offset (tree exp, HOST_WIDE_INT *poffset)
819 return get_addr_base_and_unit_offset_1 (exp, poffset, NULL);
822 /* Returns true if STMT references an SSA_NAME that has
823 SSA_NAME_OCCURS_IN_ABNORMAL_PHI set, otherwise false. */
825 bool
826 stmt_references_abnormal_ssa_name (gimple stmt)
828 ssa_op_iter oi;
829 use_operand_p use_p;
831 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, oi, SSA_OP_USE)
833 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (USE_FROM_PTR (use_p)))
834 return true;
837 return false;
840 /* Pair of tree and a sorting index, for dump_enumerated_decls. */
841 struct GTY(()) numbered_tree_d
843 tree t;
844 int num;
846 typedef struct numbered_tree_d numbered_tree;
849 /* Compare two declarations references by their DECL_UID / sequence number.
850 Called via qsort. */
852 static int
853 compare_decls_by_uid (const void *pa, const void *pb)
855 const numbered_tree *nt_a = ((const numbered_tree *)pa);
856 const numbered_tree *nt_b = ((const numbered_tree *)pb);
858 if (DECL_UID (nt_a->t) != DECL_UID (nt_b->t))
859 return DECL_UID (nt_a->t) - DECL_UID (nt_b->t);
860 return nt_a->num - nt_b->num;
863 /* Called via walk_gimple_stmt / walk_gimple_op by dump_enumerated_decls. */
864 static tree
865 dump_enumerated_decls_push (tree *tp, int *walk_subtrees, void *data)
867 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
868 vec<numbered_tree> *list = (vec<numbered_tree> *) wi->info;
869 numbered_tree nt;
871 if (!DECL_P (*tp))
872 return NULL_TREE;
873 nt.t = *tp;
874 nt.num = list->length ();
875 list->safe_push (nt);
876 *walk_subtrees = 0;
877 return NULL_TREE;
880 /* Find all the declarations used by the current function, sort them by uid,
881 and emit the sorted list. Each declaration is tagged with a sequence
882 number indicating when it was found during statement / tree walking,
883 so that TDF_NOUID comparisons of anonymous declarations are still
884 meaningful. Where a declaration was encountered more than once, we
885 emit only the sequence number of the first encounter.
886 FILE is the dump file where to output the list and FLAGS is as in
887 print_generic_expr. */
888 void
889 dump_enumerated_decls (FILE *file, int flags)
891 basic_block bb;
892 struct walk_stmt_info wi;
893 auto_vec<numbered_tree, 40> decl_list;
895 memset (&wi, '\0', sizeof (wi));
896 wi.info = (void *) &decl_list;
897 FOR_EACH_BB_FN (bb, cfun)
899 gimple_stmt_iterator gsi;
901 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
902 if (!is_gimple_debug (gsi_stmt (gsi)))
903 walk_gimple_stmt (&gsi, NULL, dump_enumerated_decls_push, &wi);
905 decl_list.qsort (compare_decls_by_uid);
906 if (decl_list.length ())
908 unsigned ix;
909 numbered_tree *ntp;
910 tree last = NULL_TREE;
912 fprintf (file, "Declarations used by %s, sorted by DECL_UID:\n",
913 current_function_name ());
914 FOR_EACH_VEC_ELT (decl_list, ix, ntp)
916 if (ntp->t == last)
917 continue;
918 fprintf (file, "%d: ", ntp->num);
919 print_generic_decl (file, ntp->t, flags);
920 fprintf (file, "\n");
921 last = ntp->t;