2015-06-23 Paolo Carlini <paolo.carlini@oracle.com>
[official-gcc.git] / gcc / var-tracking.c
blobdbf090e21b5aa1c4462ed145a509f05f2dc62868
1 /* Variable tracking routines for the GNU compiler.
2 Copyright (C) 2002-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the variable tracking pass. It computes where
21 variables are located (which registers or where in memory) at each position
22 in instruction stream and emits notes describing the locations.
23 Debug information (DWARF2 location lists) is finally generated from
24 these notes.
25 With this debug information, it is possible to show variables
26 even when debugging optimized code.
28 How does the variable tracking pass work?
30 First, it scans RTL code for uses, stores and clobbers (register/memory
31 references in instructions), for call insns and for stack adjustments
32 separately for each basic block and saves them to an array of micro
33 operations.
34 The micro operations of one instruction are ordered so that
35 pre-modifying stack adjustment < use < use with no var < call insn <
36 < clobber < set < post-modifying stack adjustment
38 Then, a forward dataflow analysis is performed to find out how locations
39 of variables change through code and to propagate the variable locations
40 along control flow graph.
41 The IN set for basic block BB is computed as a union of OUT sets of BB's
42 predecessors, the OUT set for BB is copied from the IN set for BB and
43 is changed according to micro operations in BB.
45 The IN and OUT sets for basic blocks consist of a current stack adjustment
46 (used for adjusting offset of variables addressed using stack pointer),
47 the table of structures describing the locations of parts of a variable
48 and for each physical register a linked list for each physical register.
49 The linked list is a list of variable parts stored in the register,
50 i.e. it is a list of triplets (reg, decl, offset) where decl is
51 REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
52 effective deleting appropriate variable parts when we set or clobber the
53 register.
55 There may be more than one variable part in a register. The linked lists
56 should be pretty short so it is a good data structure here.
57 For example in the following code, register allocator may assign same
58 register to variables A and B, and both of them are stored in the same
59 register in CODE:
61 if (cond)
62 set A;
63 else
64 set B;
65 CODE;
66 if (cond)
67 use A;
68 else
69 use B;
71 Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
72 are emitted to appropriate positions in RTL code. Each such a note describes
73 the location of one variable at the point in instruction stream where the
74 note is. There is no need to emit a note for each variable before each
75 instruction, we only emit these notes where the location of variable changes
76 (this means that we also emit notes for changes between the OUT set of the
77 previous block and the IN set of the current block).
79 The notes consist of two parts:
80 1. the declaration (from REG_EXPR or MEM_EXPR)
81 2. the location of a variable - it is either a simple register/memory
82 reference (for simple variables, for example int),
83 or a parallel of register/memory references (for a large variables
84 which consist of several parts, for example long long).
88 #include "config.h"
89 #include "system.h"
90 #include "coretypes.h"
91 #include "tm.h"
92 #include "rtl.h"
93 #include "alias.h"
94 #include "symtab.h"
95 #include "tree.h"
96 #include "varasm.h"
97 #include "stor-layout.h"
98 #include "predict.h"
99 #include "hard-reg-set.h"
100 #include "function.h"
101 #include "dominance.h"
102 #include "cfg.h"
103 #include "cfgrtl.h"
104 #include "cfganal.h"
105 #include "basic-block.h"
106 #include "tm_p.h"
107 #include "flags.h"
108 #include "insn-config.h"
109 #include "reload.h"
110 #include "sbitmap.h"
111 #include "alloc-pool.h"
112 #include "regs.h"
113 #include "expmed.h"
114 #include "dojump.h"
115 #include "explow.h"
116 #include "calls.h"
117 #include "emit-rtl.h"
118 #include "stmt.h"
119 #include "expr.h"
120 #include "tree-pass.h"
121 #include "bitmap.h"
122 #include "tree-dfa.h"
123 #include "tree-ssa.h"
124 #include "cselib.h"
125 #include "target.h"
126 #include "params.h"
127 #include "diagnostic.h"
128 #include "tree-pretty-print.h"
129 #include "recog.h"
130 #include "rtl-iter.h"
131 #include "fibonacci_heap.h"
133 typedef fibonacci_heap <long, basic_block_def> bb_heap_t;
134 typedef fibonacci_node <long, basic_block_def> bb_heap_node_t;
136 /* var-tracking.c assumes that tree code with the same value as VALUE rtx code
137 has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
138 Currently the value is the same as IDENTIFIER_NODE, which has such
139 a property. If this compile time assertion ever fails, make sure that
140 the new tree code that equals (int) VALUE has the same property. */
141 extern char check_value_val[(int) VALUE == (int) IDENTIFIER_NODE ? 1 : -1];
143 /* Type of micro operation. */
144 enum micro_operation_type
146 MO_USE, /* Use location (REG or MEM). */
147 MO_USE_NO_VAR,/* Use location which is not associated with a variable
148 or the variable is not trackable. */
149 MO_VAL_USE, /* Use location which is associated with a value. */
150 MO_VAL_LOC, /* Use location which appears in a debug insn. */
151 MO_VAL_SET, /* Set location associated with a value. */
152 MO_SET, /* Set location. */
153 MO_COPY, /* Copy the same portion of a variable from one
154 location to another. */
155 MO_CLOBBER, /* Clobber location. */
156 MO_CALL, /* Call insn. */
157 MO_ADJUST /* Adjust stack pointer. */
161 static const char * const ATTRIBUTE_UNUSED
162 micro_operation_type_name[] = {
163 "MO_USE",
164 "MO_USE_NO_VAR",
165 "MO_VAL_USE",
166 "MO_VAL_LOC",
167 "MO_VAL_SET",
168 "MO_SET",
169 "MO_COPY",
170 "MO_CLOBBER",
171 "MO_CALL",
172 "MO_ADJUST"
175 /* Where shall the note be emitted? BEFORE or AFTER the instruction.
176 Notes emitted as AFTER_CALL are to take effect during the call,
177 rather than after the call. */
178 enum emit_note_where
180 EMIT_NOTE_BEFORE_INSN,
181 EMIT_NOTE_AFTER_INSN,
182 EMIT_NOTE_AFTER_CALL_INSN
185 /* Structure holding information about micro operation. */
186 typedef struct micro_operation_def
188 /* Type of micro operation. */
189 enum micro_operation_type type;
191 /* The instruction which the micro operation is in, for MO_USE,
192 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
193 instruction or note in the original flow (before any var-tracking
194 notes are inserted, to simplify emission of notes), for MO_SET
195 and MO_CLOBBER. */
196 rtx_insn *insn;
198 union {
199 /* Location. For MO_SET and MO_COPY, this is the SET that
200 performs the assignment, if known, otherwise it is the target
201 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
202 CONCAT of the VALUE and the LOC associated with it. For
203 MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
204 associated with it. */
205 rtx loc;
207 /* Stack adjustment. */
208 HOST_WIDE_INT adjust;
209 } u;
210 } micro_operation;
213 /* A declaration of a variable, or an RTL value being handled like a
214 declaration. */
215 typedef void *decl_or_value;
217 /* Return true if a decl_or_value DV is a DECL or NULL. */
218 static inline bool
219 dv_is_decl_p (decl_or_value dv)
221 return !dv || (int) TREE_CODE ((tree) dv) != (int) VALUE;
224 /* Return true if a decl_or_value is a VALUE rtl. */
225 static inline bool
226 dv_is_value_p (decl_or_value dv)
228 return dv && !dv_is_decl_p (dv);
231 /* Return the decl in the decl_or_value. */
232 static inline tree
233 dv_as_decl (decl_or_value dv)
235 gcc_checking_assert (dv_is_decl_p (dv));
236 return (tree) dv;
239 /* Return the value in the decl_or_value. */
240 static inline rtx
241 dv_as_value (decl_or_value dv)
243 gcc_checking_assert (dv_is_value_p (dv));
244 return (rtx)dv;
247 /* Return the opaque pointer in the decl_or_value. */
248 static inline void *
249 dv_as_opaque (decl_or_value dv)
251 return dv;
255 /* Description of location of a part of a variable. The content of a physical
256 register is described by a chain of these structures.
257 The chains are pretty short (usually 1 or 2 elements) and thus
258 chain is the best data structure. */
259 typedef struct attrs_def
261 /* Pointer to next member of the list. */
262 struct attrs_def *next;
264 /* The rtx of register. */
265 rtx loc;
267 /* The declaration corresponding to LOC. */
268 decl_or_value dv;
270 /* Offset from start of DECL. */
271 HOST_WIDE_INT offset;
273 /* Pool allocation new operator. */
274 inline void *operator new (size_t)
276 return pool.allocate ();
279 /* Delete operator utilizing pool allocation. */
280 inline void operator delete (void *ptr)
282 pool.remove ((attrs_def *) ptr);
285 /* Memory allocation pool. */
286 static pool_allocator<attrs_def> pool;
287 } *attrs;
289 /* Structure for chaining the locations. */
290 typedef struct location_chain_def
292 /* Next element in the chain. */
293 struct location_chain_def *next;
295 /* The location (REG, MEM or VALUE). */
296 rtx loc;
298 /* The "value" stored in this location. */
299 rtx set_src;
301 /* Initialized? */
302 enum var_init_status init;
304 /* Pool allocation new operator. */
305 inline void *operator new (size_t)
307 return pool.allocate ();
310 /* Delete operator utilizing pool allocation. */
311 inline void operator delete (void *ptr)
313 pool.remove ((location_chain_def *) ptr);
316 /* Memory allocation pool. */
317 static pool_allocator<location_chain_def> pool;
318 } *location_chain;
320 /* A vector of loc_exp_dep holds the active dependencies of a one-part
321 DV on VALUEs, i.e., the VALUEs expanded so as to form the current
322 location of DV. Each entry is also part of VALUE' s linked-list of
323 backlinks back to DV. */
324 typedef struct loc_exp_dep_s
326 /* The dependent DV. */
327 decl_or_value dv;
328 /* The dependency VALUE or DECL_DEBUG. */
329 rtx value;
330 /* The next entry in VALUE's backlinks list. */
331 struct loc_exp_dep_s *next;
332 /* A pointer to the pointer to this entry (head or prev's next) in
333 the doubly-linked list. */
334 struct loc_exp_dep_s **pprev;
336 /* Pool allocation new operator. */
337 inline void *operator new (size_t)
339 return pool.allocate ();
342 /* Delete operator utilizing pool allocation. */
343 inline void operator delete (void *ptr)
345 pool.remove ((loc_exp_dep_s *) ptr);
348 /* Memory allocation pool. */
349 static pool_allocator<loc_exp_dep_s> pool;
350 } loc_exp_dep;
353 /* This data structure holds information about the depth of a variable
354 expansion. */
355 typedef struct expand_depth_struct
357 /* This measures the complexity of the expanded expression. It
358 grows by one for each level of expansion that adds more than one
359 operand. */
360 int complexity;
361 /* This counts the number of ENTRY_VALUE expressions in an
362 expansion. We want to minimize their use. */
363 int entryvals;
364 } expand_depth;
366 /* This data structure is allocated for one-part variables at the time
367 of emitting notes. */
368 struct onepart_aux
370 /* Doubly-linked list of dependent DVs. These are DVs whose cur_loc
371 computation used the expansion of this variable, and that ought
372 to be notified should this variable change. If the DV's cur_loc
373 expanded to NULL, all components of the loc list are regarded as
374 active, so that any changes in them give us a chance to get a
375 location. Otherwise, only components of the loc that expanded to
376 non-NULL are regarded as active dependencies. */
377 loc_exp_dep *backlinks;
378 /* This holds the LOC that was expanded into cur_loc. We need only
379 mark a one-part variable as changed if the FROM loc is removed,
380 or if it has no known location and a loc is added, or if it gets
381 a change notification from any of its active dependencies. */
382 rtx from;
383 /* The depth of the cur_loc expression. */
384 expand_depth depth;
385 /* Dependencies actively used when expand FROM into cur_loc. */
386 vec<loc_exp_dep, va_heap, vl_embed> deps;
389 /* Structure describing one part of variable. */
390 typedef struct variable_part_def
392 /* Chain of locations of the part. */
393 location_chain loc_chain;
395 /* Location which was last emitted to location list. */
396 rtx cur_loc;
398 union variable_aux
400 /* The offset in the variable, if !var->onepart. */
401 HOST_WIDE_INT offset;
403 /* Pointer to auxiliary data, if var->onepart and emit_notes. */
404 struct onepart_aux *onepaux;
405 } aux;
406 } variable_part;
408 /* Maximum number of location parts. */
409 #define MAX_VAR_PARTS 16
411 /* Enumeration type used to discriminate various types of one-part
412 variables. */
413 typedef enum onepart_enum
415 /* Not a one-part variable. */
416 NOT_ONEPART = 0,
417 /* A one-part DECL that is not a DEBUG_EXPR_DECL. */
418 ONEPART_VDECL = 1,
419 /* A DEBUG_EXPR_DECL. */
420 ONEPART_DEXPR = 2,
421 /* A VALUE. */
422 ONEPART_VALUE = 3
423 } onepart_enum_t;
425 /* Structure describing where the variable is located. */
426 typedef struct variable_def
428 /* The declaration of the variable, or an RTL value being handled
429 like a declaration. */
430 decl_or_value dv;
432 /* Reference count. */
433 int refcount;
435 /* Number of variable parts. */
436 char n_var_parts;
438 /* What type of DV this is, according to enum onepart_enum. */
439 ENUM_BITFIELD (onepart_enum) onepart : CHAR_BIT;
441 /* True if this variable_def struct is currently in the
442 changed_variables hash table. */
443 bool in_changed_variables;
445 /* The variable parts. */
446 variable_part var_part[1];
447 } *variable;
448 typedef const struct variable_def *const_variable;
450 /* Pointer to the BB's information specific to variable tracking pass. */
451 #define VTI(BB) ((variable_tracking_info) (BB)->aux)
453 /* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */
454 #define INT_MEM_OFFSET(mem) (MEM_OFFSET_KNOWN_P (mem) ? MEM_OFFSET (mem) : 0)
456 #if ENABLE_CHECKING && (GCC_VERSION >= 2007)
458 /* Access VAR's Ith part's offset, checking that it's not a one-part
459 variable. */
460 #define VAR_PART_OFFSET(var, i) __extension__ \
461 (*({ variable const __v = (var); \
462 gcc_checking_assert (!__v->onepart); \
463 &__v->var_part[(i)].aux.offset; }))
465 /* Access VAR's one-part auxiliary data, checking that it is a
466 one-part variable. */
467 #define VAR_LOC_1PAUX(var) __extension__ \
468 (*({ variable const __v = (var); \
469 gcc_checking_assert (__v->onepart); \
470 &__v->var_part[0].aux.onepaux; }))
472 #else
473 #define VAR_PART_OFFSET(var, i) ((var)->var_part[(i)].aux.offset)
474 #define VAR_LOC_1PAUX(var) ((var)->var_part[0].aux.onepaux)
475 #endif
477 /* These are accessor macros for the one-part auxiliary data. When
478 convenient for users, they're guarded by tests that the data was
479 allocated. */
480 #define VAR_LOC_DEP_LST(var) (VAR_LOC_1PAUX (var) \
481 ? VAR_LOC_1PAUX (var)->backlinks \
482 : NULL)
483 #define VAR_LOC_DEP_LSTP(var) (VAR_LOC_1PAUX (var) \
484 ? &VAR_LOC_1PAUX (var)->backlinks \
485 : NULL)
486 #define VAR_LOC_FROM(var) (VAR_LOC_1PAUX (var)->from)
487 #define VAR_LOC_DEPTH(var) (VAR_LOC_1PAUX (var)->depth)
488 #define VAR_LOC_DEP_VEC(var) (VAR_LOC_1PAUX (var) \
489 ? &VAR_LOC_1PAUX (var)->deps \
490 : NULL)
494 typedef unsigned int dvuid;
496 /* Return the uid of DV. */
498 static inline dvuid
499 dv_uid (decl_or_value dv)
501 if (dv_is_value_p (dv))
502 return CSELIB_VAL_PTR (dv_as_value (dv))->uid;
503 else
504 return DECL_UID (dv_as_decl (dv));
507 /* Compute the hash from the uid. */
509 static inline hashval_t
510 dv_uid2hash (dvuid uid)
512 return uid;
515 /* The hash function for a mask table in a shared_htab chain. */
517 static inline hashval_t
518 dv_htab_hash (decl_or_value dv)
520 return dv_uid2hash (dv_uid (dv));
523 static void variable_htab_free (void *);
525 /* Variable hashtable helpers. */
527 struct variable_hasher
529 typedef variable_def *value_type;
530 typedef void *compare_type;
531 static inline hashval_t hash (const variable_def *);
532 static inline bool equal (const variable_def *, const void *);
533 static inline void remove (variable_def *);
536 /* The hash function for variable_htab, computes the hash value
537 from the declaration of variable X. */
539 inline hashval_t
540 variable_hasher::hash (const variable_def *v)
542 return dv_htab_hash (v->dv);
545 /* Compare the declaration of variable X with declaration Y. */
547 inline bool
548 variable_hasher::equal (const variable_def *v, const void *y)
550 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
552 return (dv_as_opaque (v->dv) == dv_as_opaque (dv));
555 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
557 inline void
558 variable_hasher::remove (variable_def *var)
560 variable_htab_free (var);
563 typedef hash_table<variable_hasher> variable_table_type;
564 typedef variable_table_type::iterator variable_iterator_type;
566 /* Structure for passing some other parameters to function
567 emit_note_insn_var_location. */
568 typedef struct emit_note_data_def
570 /* The instruction which the note will be emitted before/after. */
571 rtx_insn *insn;
573 /* Where the note will be emitted (before/after insn)? */
574 enum emit_note_where where;
576 /* The variables and values active at this point. */
577 variable_table_type *vars;
578 } emit_note_data;
580 /* Structure holding a refcounted hash table. If refcount > 1,
581 it must be first unshared before modified. */
582 typedef struct shared_hash_def
584 /* Reference count. */
585 int refcount;
587 /* Actual hash table. */
588 variable_table_type *htab;
590 /* Pool allocation new operator. */
591 inline void *operator new (size_t)
593 return pool.allocate ();
596 /* Delete operator utilizing pool allocation. */
597 inline void operator delete (void *ptr)
599 pool.remove ((shared_hash_def *) ptr);
602 /* Memory allocation pool. */
603 static pool_allocator<shared_hash_def> pool;
604 } *shared_hash;
606 /* Structure holding the IN or OUT set for a basic block. */
607 typedef struct dataflow_set_def
609 /* Adjustment of stack offset. */
610 HOST_WIDE_INT stack_adjust;
612 /* Attributes for registers (lists of attrs). */
613 attrs regs[FIRST_PSEUDO_REGISTER];
615 /* Variable locations. */
616 shared_hash vars;
618 /* Vars that is being traversed. */
619 shared_hash traversed_vars;
620 } dataflow_set;
622 /* The structure (one for each basic block) containing the information
623 needed for variable tracking. */
624 typedef struct variable_tracking_info_def
626 /* The vector of micro operations. */
627 vec<micro_operation> mos;
629 /* The IN and OUT set for dataflow analysis. */
630 dataflow_set in;
631 dataflow_set out;
633 /* The permanent-in dataflow set for this block. This is used to
634 hold values for which we had to compute entry values. ??? This
635 should probably be dynamically allocated, to avoid using more
636 memory in non-debug builds. */
637 dataflow_set *permp;
639 /* Has the block been visited in DFS? */
640 bool visited;
642 /* Has the block been flooded in VTA? */
643 bool flooded;
645 } *variable_tracking_info;
647 /* Alloc pool for struct attrs_def. */
648 pool_allocator<attrs_def> attrs_def::pool ("attrs_def pool", 1024);
650 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
652 static pool_allocator<variable_def> var_pool
653 ("variable_def pool", 64,
654 (MAX_VAR_PARTS - 1) * sizeof (((variable)NULL)->var_part[0]));
656 /* Alloc pool for struct variable_def with a single var_part entry. */
657 static pool_allocator<variable_def> valvar_pool
658 ("small variable_def pool", 256);
660 /* Alloc pool for struct location_chain_def. */
661 pool_allocator<location_chain_def> location_chain_def::pool
662 ("location_chain_def pool", 1024);
664 /* Alloc pool for struct shared_hash_def. */
665 pool_allocator<shared_hash_def> shared_hash_def::pool
666 ("shared_hash_def pool", 256);
668 /* Alloc pool for struct loc_exp_dep_s for NOT_ONEPART variables. */
669 pool_allocator<loc_exp_dep> loc_exp_dep::pool ("loc_exp_dep pool", 64);
671 /* Changed variables, notes will be emitted for them. */
672 static variable_table_type *changed_variables;
674 /* Shall notes be emitted? */
675 static bool emit_notes;
677 /* Values whose dynamic location lists have gone empty, but whose
678 cselib location lists are still usable. Use this to hold the
679 current location, the backlinks, etc, during emit_notes. */
680 static variable_table_type *dropped_values;
682 /* Empty shared hashtable. */
683 static shared_hash empty_shared_hash;
685 /* Scratch register bitmap used by cselib_expand_value_rtx. */
686 static bitmap scratch_regs = NULL;
688 #ifdef HAVE_window_save
689 typedef struct GTY(()) parm_reg {
690 rtx outgoing;
691 rtx incoming;
692 } parm_reg_t;
695 /* Vector of windowed parameter registers, if any. */
696 static vec<parm_reg_t, va_gc> *windowed_parm_regs = NULL;
697 #endif
699 /* Variable used to tell whether cselib_process_insn called our hook. */
700 static bool cselib_hook_called;
702 /* Local function prototypes. */
703 static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
704 HOST_WIDE_INT *);
705 static void insn_stack_adjust_offset_pre_post (rtx_insn *, HOST_WIDE_INT *,
706 HOST_WIDE_INT *);
707 static bool vt_stack_adjustments (void);
709 static void init_attrs_list_set (attrs *);
710 static void attrs_list_clear (attrs *);
711 static attrs attrs_list_member (attrs, decl_or_value, HOST_WIDE_INT);
712 static void attrs_list_insert (attrs *, decl_or_value, HOST_WIDE_INT, rtx);
713 static void attrs_list_copy (attrs *, attrs);
714 static void attrs_list_union (attrs *, attrs);
716 static variable_def **unshare_variable (dataflow_set *set, variable_def **slot,
717 variable var, enum var_init_status);
718 static void vars_copy (variable_table_type *, variable_table_type *);
719 static tree var_debug_decl (tree);
720 static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
721 static void var_reg_delete_and_set (dataflow_set *, rtx, bool,
722 enum var_init_status, rtx);
723 static void var_reg_delete (dataflow_set *, rtx, bool);
724 static void var_regno_delete (dataflow_set *, int);
725 static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx);
726 static void var_mem_delete_and_set (dataflow_set *, rtx, bool,
727 enum var_init_status, rtx);
728 static void var_mem_delete (dataflow_set *, rtx, bool);
730 static void dataflow_set_init (dataflow_set *);
731 static void dataflow_set_clear (dataflow_set *);
732 static void dataflow_set_copy (dataflow_set *, dataflow_set *);
733 static int variable_union_info_cmp_pos (const void *, const void *);
734 static void dataflow_set_union (dataflow_set *, dataflow_set *);
735 static location_chain find_loc_in_1pdv (rtx, variable, variable_table_type *);
736 static bool canon_value_cmp (rtx, rtx);
737 static int loc_cmp (rtx, rtx);
738 static bool variable_part_different_p (variable_part *, variable_part *);
739 static bool onepart_variable_different_p (variable, variable);
740 static bool variable_different_p (variable, variable);
741 static bool dataflow_set_different (dataflow_set *, dataflow_set *);
742 static void dataflow_set_destroy (dataflow_set *);
744 static bool contains_symbol_ref (rtx);
745 static bool track_expr_p (tree, bool);
746 static bool same_variable_part_p (rtx, tree, HOST_WIDE_INT);
747 static void add_uses_1 (rtx *, void *);
748 static void add_stores (rtx, const_rtx, void *);
749 static bool compute_bb_dataflow (basic_block);
750 static bool vt_find_locations (void);
752 static void dump_attrs_list (attrs);
753 static void dump_var (variable);
754 static void dump_vars (variable_table_type *);
755 static void dump_dataflow_set (dataflow_set *);
756 static void dump_dataflow_sets (void);
758 static void set_dv_changed (decl_or_value, bool);
759 static void variable_was_changed (variable, dataflow_set *);
760 static variable_def **set_slot_part (dataflow_set *, rtx, variable_def **,
761 decl_or_value, HOST_WIDE_INT,
762 enum var_init_status, rtx);
763 static void set_variable_part (dataflow_set *, rtx,
764 decl_or_value, HOST_WIDE_INT,
765 enum var_init_status, rtx, enum insert_option);
766 static variable_def **clobber_slot_part (dataflow_set *, rtx,
767 variable_def **, HOST_WIDE_INT, rtx);
768 static void clobber_variable_part (dataflow_set *, rtx,
769 decl_or_value, HOST_WIDE_INT, rtx);
770 static variable_def **delete_slot_part (dataflow_set *, rtx, variable_def **,
771 HOST_WIDE_INT);
772 static void delete_variable_part (dataflow_set *, rtx,
773 decl_or_value, HOST_WIDE_INT);
774 static void emit_notes_in_bb (basic_block, dataflow_set *);
775 static void vt_emit_notes (void);
777 static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *);
778 static void vt_add_function_parameters (void);
779 static bool vt_initialize (void);
780 static void vt_finalize (void);
782 /* Callback for stack_adjust_offset_pre_post, called via for_each_inc_dec. */
784 static int
785 stack_adjust_offset_pre_post_cb (rtx, rtx op, rtx dest, rtx src, rtx srcoff,
786 void *arg)
788 if (dest != stack_pointer_rtx)
789 return 0;
791 switch (GET_CODE (op))
793 case PRE_INC:
794 case PRE_DEC:
795 ((HOST_WIDE_INT *)arg)[0] -= INTVAL (srcoff);
796 return 0;
797 case POST_INC:
798 case POST_DEC:
799 ((HOST_WIDE_INT *)arg)[1] -= INTVAL (srcoff);
800 return 0;
801 case PRE_MODIFY:
802 case POST_MODIFY:
803 /* We handle only adjustments by constant amount. */
804 gcc_assert (GET_CODE (src) == PLUS
805 && CONST_INT_P (XEXP (src, 1))
806 && XEXP (src, 0) == stack_pointer_rtx);
807 ((HOST_WIDE_INT *)arg)[GET_CODE (op) == POST_MODIFY]
808 -= INTVAL (XEXP (src, 1));
809 return 0;
810 default:
811 gcc_unreachable ();
815 /* Given a SET, calculate the amount of stack adjustment it contains
816 PRE- and POST-modifying stack pointer.
817 This function is similar to stack_adjust_offset. */
819 static void
820 stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
821 HOST_WIDE_INT *post)
823 rtx src = SET_SRC (pattern);
824 rtx dest = SET_DEST (pattern);
825 enum rtx_code code;
827 if (dest == stack_pointer_rtx)
829 /* (set (reg sp) (plus (reg sp) (const_int))) */
830 code = GET_CODE (src);
831 if (! (code == PLUS || code == MINUS)
832 || XEXP (src, 0) != stack_pointer_rtx
833 || !CONST_INT_P (XEXP (src, 1)))
834 return;
836 if (code == MINUS)
837 *post += INTVAL (XEXP (src, 1));
838 else
839 *post -= INTVAL (XEXP (src, 1));
840 return;
842 HOST_WIDE_INT res[2] = { 0, 0 };
843 for_each_inc_dec (pattern, stack_adjust_offset_pre_post_cb, res);
844 *pre += res[0];
845 *post += res[1];
848 /* Given an INSN, calculate the amount of stack adjustment it contains
849 PRE- and POST-modifying stack pointer. */
851 static void
852 insn_stack_adjust_offset_pre_post (rtx_insn *insn, HOST_WIDE_INT *pre,
853 HOST_WIDE_INT *post)
855 rtx pattern;
857 *pre = 0;
858 *post = 0;
860 pattern = PATTERN (insn);
861 if (RTX_FRAME_RELATED_P (insn))
863 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
864 if (expr)
865 pattern = XEXP (expr, 0);
868 if (GET_CODE (pattern) == SET)
869 stack_adjust_offset_pre_post (pattern, pre, post);
870 else if (GET_CODE (pattern) == PARALLEL
871 || GET_CODE (pattern) == SEQUENCE)
873 int i;
875 /* There may be stack adjustments inside compound insns. Search
876 for them. */
877 for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
878 if (GET_CODE (XVECEXP (pattern, 0, i)) == SET)
879 stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post);
883 /* Compute stack adjustments for all blocks by traversing DFS tree.
884 Return true when the adjustments on all incoming edges are consistent.
885 Heavily borrowed from pre_and_rev_post_order_compute. */
887 static bool
888 vt_stack_adjustments (void)
890 edge_iterator *stack;
891 int sp;
893 /* Initialize entry block. */
894 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->visited = true;
895 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->in.stack_adjust
896 = INCOMING_FRAME_SP_OFFSET;
897 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out.stack_adjust
898 = INCOMING_FRAME_SP_OFFSET;
900 /* Allocate stack for back-tracking up CFG. */
901 stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun) + 1);
902 sp = 0;
904 /* Push the first edge on to the stack. */
905 stack[sp++] = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
907 while (sp)
909 edge_iterator ei;
910 basic_block src;
911 basic_block dest;
913 /* Look at the edge on the top of the stack. */
914 ei = stack[sp - 1];
915 src = ei_edge (ei)->src;
916 dest = ei_edge (ei)->dest;
918 /* Check if the edge destination has been visited yet. */
919 if (!VTI (dest)->visited)
921 rtx_insn *insn;
922 HOST_WIDE_INT pre, post, offset;
923 VTI (dest)->visited = true;
924 VTI (dest)->in.stack_adjust = offset = VTI (src)->out.stack_adjust;
926 if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
927 for (insn = BB_HEAD (dest);
928 insn != NEXT_INSN (BB_END (dest));
929 insn = NEXT_INSN (insn))
930 if (INSN_P (insn))
932 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
933 offset += pre + post;
936 VTI (dest)->out.stack_adjust = offset;
938 if (EDGE_COUNT (dest->succs) > 0)
939 /* Since the DEST node has been visited for the first
940 time, check its successors. */
941 stack[sp++] = ei_start (dest->succs);
943 else
945 /* We can end up with different stack adjustments for the exit block
946 of a shrink-wrapped function if stack_adjust_offset_pre_post
947 doesn't understand the rtx pattern used to restore the stack
948 pointer in the epilogue. For example, on s390(x), the stack
949 pointer is often restored via a load-multiple instruction
950 and so no stack_adjust offset is recorded for it. This means
951 that the stack offset at the end of the epilogue block is the
952 the same as the offset before the epilogue, whereas other paths
953 to the exit block will have the correct stack_adjust.
955 It is safe to ignore these differences because (a) we never
956 use the stack_adjust for the exit block in this pass and
957 (b) dwarf2cfi checks whether the CFA notes in a shrink-wrapped
958 function are correct.
960 We must check whether the adjustments on other edges are
961 the same though. */
962 if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
963 && VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust)
965 free (stack);
966 return false;
969 if (! ei_one_before_end_p (ei))
970 /* Go to the next edge. */
971 ei_next (&stack[sp - 1]);
972 else
973 /* Return to previous level if there are no more edges. */
974 sp--;
978 free (stack);
979 return true;
982 /* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or
983 hard_frame_pointer_rtx is being mapped to it and offset for it. */
984 static rtx cfa_base_rtx;
985 static HOST_WIDE_INT cfa_base_offset;
987 /* Compute a CFA-based value for an ADJUSTMENT made to stack_pointer_rtx
988 or hard_frame_pointer_rtx. */
990 static inline rtx
991 compute_cfa_pointer (HOST_WIDE_INT adjustment)
993 return plus_constant (Pmode, cfa_base_rtx, adjustment + cfa_base_offset);
996 /* Adjustment for hard_frame_pointer_rtx to cfa base reg,
997 or -1 if the replacement shouldn't be done. */
998 static HOST_WIDE_INT hard_frame_pointer_adjustment = -1;
1000 /* Data for adjust_mems callback. */
1002 struct adjust_mem_data
1004 bool store;
1005 machine_mode mem_mode;
1006 HOST_WIDE_INT stack_adjust;
1007 rtx_expr_list *side_effects;
1010 /* Helper for adjust_mems. Return true if X is suitable for
1011 transformation of wider mode arithmetics to narrower mode. */
1013 static bool
1014 use_narrower_mode_test (rtx x, const_rtx subreg)
1016 subrtx_var_iterator::array_type array;
1017 FOR_EACH_SUBRTX_VAR (iter, array, x, NONCONST)
1019 rtx x = *iter;
1020 if (CONSTANT_P (x))
1021 iter.skip_subrtxes ();
1022 else
1023 switch (GET_CODE (x))
1025 case REG:
1026 if (cselib_lookup (x, GET_MODE (SUBREG_REG (subreg)), 0, VOIDmode))
1027 return false;
1028 if (!validate_subreg (GET_MODE (subreg), GET_MODE (x), x,
1029 subreg_lowpart_offset (GET_MODE (subreg),
1030 GET_MODE (x))))
1031 return false;
1032 break;
1033 case PLUS:
1034 case MINUS:
1035 case MULT:
1036 break;
1037 case ASHIFT:
1038 iter.substitute (XEXP (x, 0));
1039 break;
1040 default:
1041 return false;
1044 return true;
1047 /* Transform X into narrower mode MODE from wider mode WMODE. */
1049 static rtx
1050 use_narrower_mode (rtx x, machine_mode mode, machine_mode wmode)
1052 rtx op0, op1;
1053 if (CONSTANT_P (x))
1054 return lowpart_subreg (mode, x, wmode);
1055 switch (GET_CODE (x))
1057 case REG:
1058 return lowpart_subreg (mode, x, wmode);
1059 case PLUS:
1060 case MINUS:
1061 case MULT:
1062 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
1063 op1 = use_narrower_mode (XEXP (x, 1), mode, wmode);
1064 return simplify_gen_binary (GET_CODE (x), mode, op0, op1);
1065 case ASHIFT:
1066 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
1067 op1 = XEXP (x, 1);
1068 /* Ensure shift amount is not wider than mode. */
1069 if (GET_MODE (op1) == VOIDmode)
1070 op1 = lowpart_subreg (mode, op1, wmode);
1071 else if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (GET_MODE (op1)))
1072 op1 = lowpart_subreg (mode, op1, GET_MODE (op1));
1073 return simplify_gen_binary (ASHIFT, mode, op0, op1);
1074 default:
1075 gcc_unreachable ();
1079 /* Helper function for adjusting used MEMs. */
1081 static rtx
1082 adjust_mems (rtx loc, const_rtx old_rtx, void *data)
1084 struct adjust_mem_data *amd = (struct adjust_mem_data *) data;
1085 rtx mem, addr = loc, tem;
1086 machine_mode mem_mode_save;
1087 bool store_save;
1088 switch (GET_CODE (loc))
1090 case REG:
1091 /* Don't do any sp or fp replacements outside of MEM addresses
1092 on the LHS. */
1093 if (amd->mem_mode == VOIDmode && amd->store)
1094 return loc;
1095 if (loc == stack_pointer_rtx
1096 && !frame_pointer_needed
1097 && cfa_base_rtx)
1098 return compute_cfa_pointer (amd->stack_adjust);
1099 else if (loc == hard_frame_pointer_rtx
1100 && frame_pointer_needed
1101 && hard_frame_pointer_adjustment != -1
1102 && cfa_base_rtx)
1103 return compute_cfa_pointer (hard_frame_pointer_adjustment);
1104 gcc_checking_assert (loc != virtual_incoming_args_rtx);
1105 return loc;
1106 case MEM:
1107 mem = loc;
1108 if (!amd->store)
1110 mem = targetm.delegitimize_address (mem);
1111 if (mem != loc && !MEM_P (mem))
1112 return simplify_replace_fn_rtx (mem, old_rtx, adjust_mems, data);
1115 addr = XEXP (mem, 0);
1116 mem_mode_save = amd->mem_mode;
1117 amd->mem_mode = GET_MODE (mem);
1118 store_save = amd->store;
1119 amd->store = false;
1120 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1121 amd->store = store_save;
1122 amd->mem_mode = mem_mode_save;
1123 if (mem == loc)
1124 addr = targetm.delegitimize_address (addr);
1125 if (addr != XEXP (mem, 0))
1126 mem = replace_equiv_address_nv (mem, addr);
1127 if (!amd->store)
1128 mem = avoid_constant_pool_reference (mem);
1129 return mem;
1130 case PRE_INC:
1131 case PRE_DEC:
1132 addr = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
1133 gen_int_mode (GET_CODE (loc) == PRE_INC
1134 ? GET_MODE_SIZE (amd->mem_mode)
1135 : -GET_MODE_SIZE (amd->mem_mode),
1136 GET_MODE (loc)));
1137 case POST_INC:
1138 case POST_DEC:
1139 if (addr == loc)
1140 addr = XEXP (loc, 0);
1141 gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode);
1142 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1143 tem = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
1144 gen_int_mode ((GET_CODE (loc) == PRE_INC
1145 || GET_CODE (loc) == POST_INC)
1146 ? GET_MODE_SIZE (amd->mem_mode)
1147 : -GET_MODE_SIZE (amd->mem_mode),
1148 GET_MODE (loc)));
1149 store_save = amd->store;
1150 amd->store = false;
1151 tem = simplify_replace_fn_rtx (tem, old_rtx, adjust_mems, data);
1152 amd->store = store_save;
1153 amd->side_effects = alloc_EXPR_LIST (0,
1154 gen_rtx_SET (XEXP (loc, 0), tem),
1155 amd->side_effects);
1156 return addr;
1157 case PRE_MODIFY:
1158 addr = XEXP (loc, 1);
1159 case POST_MODIFY:
1160 if (addr == loc)
1161 addr = XEXP (loc, 0);
1162 gcc_assert (amd->mem_mode != VOIDmode);
1163 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1164 store_save = amd->store;
1165 amd->store = false;
1166 tem = simplify_replace_fn_rtx (XEXP (loc, 1), old_rtx,
1167 adjust_mems, data);
1168 amd->store = store_save;
1169 amd->side_effects = alloc_EXPR_LIST (0,
1170 gen_rtx_SET (XEXP (loc, 0), tem),
1171 amd->side_effects);
1172 return addr;
1173 case SUBREG:
1174 /* First try without delegitimization of whole MEMs and
1175 avoid_constant_pool_reference, which is more likely to succeed. */
1176 store_save = amd->store;
1177 amd->store = true;
1178 addr = simplify_replace_fn_rtx (SUBREG_REG (loc), old_rtx, adjust_mems,
1179 data);
1180 amd->store = store_save;
1181 mem = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1182 if (mem == SUBREG_REG (loc))
1184 tem = loc;
1185 goto finish_subreg;
1187 tem = simplify_gen_subreg (GET_MODE (loc), mem,
1188 GET_MODE (SUBREG_REG (loc)),
1189 SUBREG_BYTE (loc));
1190 if (tem)
1191 goto finish_subreg;
1192 tem = simplify_gen_subreg (GET_MODE (loc), addr,
1193 GET_MODE (SUBREG_REG (loc)),
1194 SUBREG_BYTE (loc));
1195 if (tem == NULL_RTX)
1196 tem = gen_rtx_raw_SUBREG (GET_MODE (loc), addr, SUBREG_BYTE (loc));
1197 finish_subreg:
1198 if (MAY_HAVE_DEBUG_INSNS
1199 && GET_CODE (tem) == SUBREG
1200 && (GET_CODE (SUBREG_REG (tem)) == PLUS
1201 || GET_CODE (SUBREG_REG (tem)) == MINUS
1202 || GET_CODE (SUBREG_REG (tem)) == MULT
1203 || GET_CODE (SUBREG_REG (tem)) == ASHIFT)
1204 && (GET_MODE_CLASS (GET_MODE (tem)) == MODE_INT
1205 || GET_MODE_CLASS (GET_MODE (tem)) == MODE_PARTIAL_INT)
1206 && (GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem))) == MODE_INT
1207 || GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem))) == MODE_PARTIAL_INT)
1208 && GET_MODE_PRECISION (GET_MODE (tem))
1209 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (tem)))
1210 && subreg_lowpart_p (tem)
1211 && use_narrower_mode_test (SUBREG_REG (tem), tem))
1212 return use_narrower_mode (SUBREG_REG (tem), GET_MODE (tem),
1213 GET_MODE (SUBREG_REG (tem)));
1214 return tem;
1215 case ASM_OPERANDS:
1216 /* Don't do any replacements in second and following
1217 ASM_OPERANDS of inline-asm with multiple sets.
1218 ASM_OPERANDS_INPUT_VEC, ASM_OPERANDS_INPUT_CONSTRAINT_VEC
1219 and ASM_OPERANDS_LABEL_VEC need to be equal between
1220 all the ASM_OPERANDs in the insn and adjust_insn will
1221 fix this up. */
1222 if (ASM_OPERANDS_OUTPUT_IDX (loc) != 0)
1223 return loc;
1224 break;
1225 default:
1226 break;
1228 return NULL_RTX;
1231 /* Helper function for replacement of uses. */
1233 static void
1234 adjust_mem_uses (rtx *x, void *data)
1236 rtx new_x = simplify_replace_fn_rtx (*x, NULL_RTX, adjust_mems, data);
1237 if (new_x != *x)
1238 validate_change (NULL_RTX, x, new_x, true);
1241 /* Helper function for replacement of stores. */
1243 static void
1244 adjust_mem_stores (rtx loc, const_rtx expr, void *data)
1246 if (MEM_P (loc))
1248 rtx new_dest = simplify_replace_fn_rtx (SET_DEST (expr), NULL_RTX,
1249 adjust_mems, data);
1250 if (new_dest != SET_DEST (expr))
1252 rtx xexpr = CONST_CAST_RTX (expr);
1253 validate_change (NULL_RTX, &SET_DEST (xexpr), new_dest, true);
1258 /* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
1259 replace them with their value in the insn and add the side-effects
1260 as other sets to the insn. */
1262 static void
1263 adjust_insn (basic_block bb, rtx_insn *insn)
1265 struct adjust_mem_data amd;
1266 rtx set;
1268 #ifdef HAVE_window_save
1269 /* If the target machine has an explicit window save instruction, the
1270 transformation OUTGOING_REGNO -> INCOMING_REGNO is done there. */
1271 if (RTX_FRAME_RELATED_P (insn)
1272 && find_reg_note (insn, REG_CFA_WINDOW_SAVE, NULL_RTX))
1274 unsigned int i, nregs = vec_safe_length (windowed_parm_regs);
1275 rtx rtl = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nregs * 2));
1276 parm_reg_t *p;
1278 FOR_EACH_VEC_SAFE_ELT (windowed_parm_regs, i, p)
1280 XVECEXP (rtl, 0, i * 2)
1281 = gen_rtx_SET (p->incoming, p->outgoing);
1282 /* Do not clobber the attached DECL, but only the REG. */
1283 XVECEXP (rtl, 0, i * 2 + 1)
1284 = gen_rtx_CLOBBER (GET_MODE (p->outgoing),
1285 gen_raw_REG (GET_MODE (p->outgoing),
1286 REGNO (p->outgoing)));
1289 validate_change (NULL_RTX, &PATTERN (insn), rtl, true);
1290 return;
1292 #endif
1294 amd.mem_mode = VOIDmode;
1295 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
1296 amd.side_effects = NULL;
1298 amd.store = true;
1299 note_stores (PATTERN (insn), adjust_mem_stores, &amd);
1301 amd.store = false;
1302 if (GET_CODE (PATTERN (insn)) == PARALLEL
1303 && asm_noperands (PATTERN (insn)) > 0
1304 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1306 rtx body, set0;
1307 int i;
1309 /* inline-asm with multiple sets is tiny bit more complicated,
1310 because the 3 vectors in ASM_OPERANDS need to be shared between
1311 all ASM_OPERANDS in the instruction. adjust_mems will
1312 not touch ASM_OPERANDS other than the first one, asm_noperands
1313 test above needs to be called before that (otherwise it would fail)
1314 and afterwards this code fixes it up. */
1315 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1316 body = PATTERN (insn);
1317 set0 = XVECEXP (body, 0, 0);
1318 gcc_checking_assert (GET_CODE (set0) == SET
1319 && GET_CODE (SET_SRC (set0)) == ASM_OPERANDS
1320 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set0)) == 0);
1321 for (i = 1; i < XVECLEN (body, 0); i++)
1322 if (GET_CODE (XVECEXP (body, 0, i)) != SET)
1323 break;
1324 else
1326 set = XVECEXP (body, 0, i);
1327 gcc_checking_assert (GET_CODE (SET_SRC (set)) == ASM_OPERANDS
1328 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set))
1329 == i);
1330 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (set))
1331 != ASM_OPERANDS_INPUT_VEC (SET_SRC (set0))
1332 || ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set))
1333 != ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0))
1334 || ASM_OPERANDS_LABEL_VEC (SET_SRC (set))
1335 != ASM_OPERANDS_LABEL_VEC (SET_SRC (set0)))
1337 rtx newsrc = shallow_copy_rtx (SET_SRC (set));
1338 ASM_OPERANDS_INPUT_VEC (newsrc)
1339 = ASM_OPERANDS_INPUT_VEC (SET_SRC (set0));
1340 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (newsrc)
1341 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0));
1342 ASM_OPERANDS_LABEL_VEC (newsrc)
1343 = ASM_OPERANDS_LABEL_VEC (SET_SRC (set0));
1344 validate_change (NULL_RTX, &SET_SRC (set), newsrc, true);
1348 else
1349 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1351 /* For read-only MEMs containing some constant, prefer those
1352 constants. */
1353 set = single_set (insn);
1354 if (set && MEM_P (SET_SRC (set)) && MEM_READONLY_P (SET_SRC (set)))
1356 rtx note = find_reg_equal_equiv_note (insn);
1358 if (note && CONSTANT_P (XEXP (note, 0)))
1359 validate_change (NULL_RTX, &SET_SRC (set), XEXP (note, 0), true);
1362 if (amd.side_effects)
1364 rtx *pat, new_pat, s;
1365 int i, oldn, newn;
1367 pat = &PATTERN (insn);
1368 if (GET_CODE (*pat) == COND_EXEC)
1369 pat = &COND_EXEC_CODE (*pat);
1370 if (GET_CODE (*pat) == PARALLEL)
1371 oldn = XVECLEN (*pat, 0);
1372 else
1373 oldn = 1;
1374 for (s = amd.side_effects, newn = 0; s; newn++)
1375 s = XEXP (s, 1);
1376 new_pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (oldn + newn));
1377 if (GET_CODE (*pat) == PARALLEL)
1378 for (i = 0; i < oldn; i++)
1379 XVECEXP (new_pat, 0, i) = XVECEXP (*pat, 0, i);
1380 else
1381 XVECEXP (new_pat, 0, 0) = *pat;
1382 for (s = amd.side_effects, i = oldn; i < oldn + newn; i++, s = XEXP (s, 1))
1383 XVECEXP (new_pat, 0, i) = XEXP (s, 0);
1384 free_EXPR_LIST_list (&amd.side_effects);
1385 validate_change (NULL_RTX, pat, new_pat, true);
1389 /* Return the DEBUG_EXPR of a DEBUG_EXPR_DECL or the VALUE in DV. */
1390 static inline rtx
1391 dv_as_rtx (decl_or_value dv)
1393 tree decl;
1395 if (dv_is_value_p (dv))
1396 return dv_as_value (dv);
1398 decl = dv_as_decl (dv);
1400 gcc_checking_assert (TREE_CODE (decl) == DEBUG_EXPR_DECL);
1401 return DECL_RTL_KNOWN_SET (decl);
1404 /* Return nonzero if a decl_or_value must not have more than one
1405 variable part. The returned value discriminates among various
1406 kinds of one-part DVs ccording to enum onepart_enum. */
1407 static inline onepart_enum_t
1408 dv_onepart_p (decl_or_value dv)
1410 tree decl;
1412 if (!MAY_HAVE_DEBUG_INSNS)
1413 return NOT_ONEPART;
1415 if (dv_is_value_p (dv))
1416 return ONEPART_VALUE;
1418 decl = dv_as_decl (dv);
1420 if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
1421 return ONEPART_DEXPR;
1423 if (target_for_debug_bind (decl) != NULL_TREE)
1424 return ONEPART_VDECL;
1426 return NOT_ONEPART;
1429 /* Return the variable pool to be used for a dv of type ONEPART. */
1430 static inline pool_allocator <variable_def> &
1431 onepart_pool (onepart_enum_t onepart)
1433 return onepart ? valvar_pool : var_pool;
1436 /* Build a decl_or_value out of a decl. */
1437 static inline decl_or_value
1438 dv_from_decl (tree decl)
1440 decl_or_value dv;
1441 dv = decl;
1442 gcc_checking_assert (dv_is_decl_p (dv));
1443 return dv;
1446 /* Build a decl_or_value out of a value. */
1447 static inline decl_or_value
1448 dv_from_value (rtx value)
1450 decl_or_value dv;
1451 dv = value;
1452 gcc_checking_assert (dv_is_value_p (dv));
1453 return dv;
1456 /* Return a value or the decl of a debug_expr as a decl_or_value. */
1457 static inline decl_or_value
1458 dv_from_rtx (rtx x)
1460 decl_or_value dv;
1462 switch (GET_CODE (x))
1464 case DEBUG_EXPR:
1465 dv = dv_from_decl (DEBUG_EXPR_TREE_DECL (x));
1466 gcc_checking_assert (DECL_RTL_KNOWN_SET (DEBUG_EXPR_TREE_DECL (x)) == x);
1467 break;
1469 case VALUE:
1470 dv = dv_from_value (x);
1471 break;
1473 default:
1474 gcc_unreachable ();
1477 return dv;
1480 extern void debug_dv (decl_or_value dv);
1482 DEBUG_FUNCTION void
1483 debug_dv (decl_or_value dv)
1485 if (dv_is_value_p (dv))
1486 debug_rtx (dv_as_value (dv));
1487 else
1488 debug_generic_stmt (dv_as_decl (dv));
1491 static void loc_exp_dep_clear (variable var);
1493 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
1495 static void
1496 variable_htab_free (void *elem)
1498 int i;
1499 variable var = (variable) elem;
1500 location_chain node, next;
1502 gcc_checking_assert (var->refcount > 0);
1504 var->refcount--;
1505 if (var->refcount > 0)
1506 return;
1508 for (i = 0; i < var->n_var_parts; i++)
1510 for (node = var->var_part[i].loc_chain; node; node = next)
1512 next = node->next;
1513 delete node;
1515 var->var_part[i].loc_chain = NULL;
1517 if (var->onepart && VAR_LOC_1PAUX (var))
1519 loc_exp_dep_clear (var);
1520 if (VAR_LOC_DEP_LST (var))
1521 VAR_LOC_DEP_LST (var)->pprev = NULL;
1522 XDELETE (VAR_LOC_1PAUX (var));
1523 /* These may be reused across functions, so reset
1524 e.g. NO_LOC_P. */
1525 if (var->onepart == ONEPART_DEXPR)
1526 set_dv_changed (var->dv, true);
1528 onepart_pool (var->onepart).remove (var);
1531 /* Initialize the set (array) SET of attrs to empty lists. */
1533 static void
1534 init_attrs_list_set (attrs *set)
1536 int i;
1538 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1539 set[i] = NULL;
1542 /* Make the list *LISTP empty. */
1544 static void
1545 attrs_list_clear (attrs *listp)
1547 attrs list, next;
1549 for (list = *listp; list; list = next)
1551 next = list->next;
1552 delete list;
1554 *listp = NULL;
1557 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */
1559 static attrs
1560 attrs_list_member (attrs list, decl_or_value dv, HOST_WIDE_INT offset)
1562 for (; list; list = list->next)
1563 if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset)
1564 return list;
1565 return NULL;
1568 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
1570 static void
1571 attrs_list_insert (attrs *listp, decl_or_value dv,
1572 HOST_WIDE_INT offset, rtx loc)
1574 attrs list = new attrs_def;
1575 list->loc = loc;
1576 list->dv = dv;
1577 list->offset = offset;
1578 list->next = *listp;
1579 *listp = list;
1582 /* Copy all nodes from SRC and create a list *DSTP of the copies. */
1584 static void
1585 attrs_list_copy (attrs *dstp, attrs src)
1587 attrs_list_clear (dstp);
1588 for (; src; src = src->next)
1590 attrs n = new attrs_def;
1591 n->loc = src->loc;
1592 n->dv = src->dv;
1593 n->offset = src->offset;
1594 n->next = *dstp;
1595 *dstp = n;
1599 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */
1601 static void
1602 attrs_list_union (attrs *dstp, attrs src)
1604 for (; src; src = src->next)
1606 if (!attrs_list_member (*dstp, src->dv, src->offset))
1607 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1611 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1612 *DSTP. */
1614 static void
1615 attrs_list_mpdv_union (attrs *dstp, attrs src, attrs src2)
1617 gcc_assert (!*dstp);
1618 for (; src; src = src->next)
1620 if (!dv_onepart_p (src->dv))
1621 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1623 for (src = src2; src; src = src->next)
1625 if (!dv_onepart_p (src->dv)
1626 && !attrs_list_member (*dstp, src->dv, src->offset))
1627 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1631 /* Shared hashtable support. */
1633 /* Return true if VARS is shared. */
1635 static inline bool
1636 shared_hash_shared (shared_hash vars)
1638 return vars->refcount > 1;
1641 /* Return the hash table for VARS. */
1643 static inline variable_table_type *
1644 shared_hash_htab (shared_hash vars)
1646 return vars->htab;
1649 /* Return true if VAR is shared, or maybe because VARS is shared. */
1651 static inline bool
1652 shared_var_p (variable var, shared_hash vars)
1654 /* Don't count an entry in the changed_variables table as a duplicate. */
1655 return ((var->refcount > 1 + (int) var->in_changed_variables)
1656 || shared_hash_shared (vars));
1659 /* Copy variables into a new hash table. */
1661 static shared_hash
1662 shared_hash_unshare (shared_hash vars)
1664 shared_hash new_vars = new shared_hash_def;
1665 gcc_assert (vars->refcount > 1);
1666 new_vars->refcount = 1;
1667 new_vars->htab = new variable_table_type (vars->htab->elements () + 3);
1668 vars_copy (new_vars->htab, vars->htab);
1669 vars->refcount--;
1670 return new_vars;
1673 /* Increment reference counter on VARS and return it. */
1675 static inline shared_hash
1676 shared_hash_copy (shared_hash vars)
1678 vars->refcount++;
1679 return vars;
1682 /* Decrement reference counter and destroy hash table if not shared
1683 anymore. */
1685 static void
1686 shared_hash_destroy (shared_hash vars)
1688 gcc_checking_assert (vars->refcount > 0);
1689 if (--vars->refcount == 0)
1691 delete vars->htab;
1692 delete vars;
1696 /* Unshare *PVARS if shared and return slot for DV. If INS is
1697 INSERT, insert it if not already present. */
1699 static inline variable_def **
1700 shared_hash_find_slot_unshare_1 (shared_hash *pvars, decl_or_value dv,
1701 hashval_t dvhash, enum insert_option ins)
1703 if (shared_hash_shared (*pvars))
1704 *pvars = shared_hash_unshare (*pvars);
1705 return shared_hash_htab (*pvars)->find_slot_with_hash (dv, dvhash, ins);
1708 static inline variable_def **
1709 shared_hash_find_slot_unshare (shared_hash *pvars, decl_or_value dv,
1710 enum insert_option ins)
1712 return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins);
1715 /* Return slot for DV, if it is already present in the hash table.
1716 If it is not present, insert it only VARS is not shared, otherwise
1717 return NULL. */
1719 static inline variable_def **
1720 shared_hash_find_slot_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1722 return shared_hash_htab (vars)->find_slot_with_hash (dv, dvhash,
1723 shared_hash_shared (vars)
1724 ? NO_INSERT : INSERT);
1727 static inline variable_def **
1728 shared_hash_find_slot (shared_hash vars, decl_or_value dv)
1730 return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv));
1733 /* Return slot for DV only if it is already present in the hash table. */
1735 static inline variable_def **
1736 shared_hash_find_slot_noinsert_1 (shared_hash vars, decl_or_value dv,
1737 hashval_t dvhash)
1739 return shared_hash_htab (vars)->find_slot_with_hash (dv, dvhash, NO_INSERT);
1742 static inline variable_def **
1743 shared_hash_find_slot_noinsert (shared_hash vars, decl_or_value dv)
1745 return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv));
1748 /* Return variable for DV or NULL if not already present in the hash
1749 table. */
1751 static inline variable
1752 shared_hash_find_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1754 return shared_hash_htab (vars)->find_with_hash (dv, dvhash);
1757 static inline variable
1758 shared_hash_find (shared_hash vars, decl_or_value dv)
1760 return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
1763 /* Return true if TVAL is better than CVAL as a canonival value. We
1764 choose lowest-numbered VALUEs, using the RTX address as a
1765 tie-breaker. The idea is to arrange them into a star topology,
1766 such that all of them are at most one step away from the canonical
1767 value, and the canonical value has backlinks to all of them, in
1768 addition to all the actual locations. We don't enforce this
1769 topology throughout the entire dataflow analysis, though.
1772 static inline bool
1773 canon_value_cmp (rtx tval, rtx cval)
1775 return !cval
1776 || CSELIB_VAL_PTR (tval)->uid < CSELIB_VAL_PTR (cval)->uid;
1779 static bool dst_can_be_shared;
1781 /* Return a copy of a variable VAR and insert it to dataflow set SET. */
1783 static variable_def **
1784 unshare_variable (dataflow_set *set, variable_def **slot, variable var,
1785 enum var_init_status initialized)
1787 variable new_var;
1788 int i;
1790 new_var = onepart_pool (var->onepart).allocate ();
1791 new_var->dv = var->dv;
1792 new_var->refcount = 1;
1793 var->refcount--;
1794 new_var->n_var_parts = var->n_var_parts;
1795 new_var->onepart = var->onepart;
1796 new_var->in_changed_variables = false;
1798 if (! flag_var_tracking_uninit)
1799 initialized = VAR_INIT_STATUS_INITIALIZED;
1801 for (i = 0; i < var->n_var_parts; i++)
1803 location_chain node;
1804 location_chain *nextp;
1806 if (i == 0 && var->onepart)
1808 /* One-part auxiliary data is only used while emitting
1809 notes, so propagate it to the new variable in the active
1810 dataflow set. If we're not emitting notes, this will be
1811 a no-op. */
1812 gcc_checking_assert (!VAR_LOC_1PAUX (var) || emit_notes);
1813 VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (var);
1814 VAR_LOC_1PAUX (var) = NULL;
1816 else
1817 VAR_PART_OFFSET (new_var, i) = VAR_PART_OFFSET (var, i);
1818 nextp = &new_var->var_part[i].loc_chain;
1819 for (node = var->var_part[i].loc_chain; node; node = node->next)
1821 location_chain new_lc;
1823 new_lc = new location_chain_def;
1824 new_lc->next = NULL;
1825 if (node->init > initialized)
1826 new_lc->init = node->init;
1827 else
1828 new_lc->init = initialized;
1829 if (node->set_src && !(MEM_P (node->set_src)))
1830 new_lc->set_src = node->set_src;
1831 else
1832 new_lc->set_src = NULL;
1833 new_lc->loc = node->loc;
1835 *nextp = new_lc;
1836 nextp = &new_lc->next;
1839 new_var->var_part[i].cur_loc = var->var_part[i].cur_loc;
1842 dst_can_be_shared = false;
1843 if (shared_hash_shared (set->vars))
1844 slot = shared_hash_find_slot_unshare (&set->vars, var->dv, NO_INSERT);
1845 else if (set->traversed_vars && set->vars != set->traversed_vars)
1846 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
1847 *slot = new_var;
1848 if (var->in_changed_variables)
1850 variable_def **cslot
1851 = changed_variables->find_slot_with_hash (var->dv,
1852 dv_htab_hash (var->dv),
1853 NO_INSERT);
1854 gcc_assert (*cslot == (void *) var);
1855 var->in_changed_variables = false;
1856 variable_htab_free (var);
1857 *cslot = new_var;
1858 new_var->in_changed_variables = true;
1860 return slot;
1863 /* Copy all variables from hash table SRC to hash table DST. */
1865 static void
1866 vars_copy (variable_table_type *dst, variable_table_type *src)
1868 variable_iterator_type hi;
1869 variable var;
1871 FOR_EACH_HASH_TABLE_ELEMENT (*src, var, variable, hi)
1873 variable_def **dstp;
1874 var->refcount++;
1875 dstp = dst->find_slot_with_hash (var->dv, dv_htab_hash (var->dv),
1876 INSERT);
1877 *dstp = var;
1881 /* Map a decl to its main debug decl. */
1883 static inline tree
1884 var_debug_decl (tree decl)
1886 if (decl && TREE_CODE (decl) == VAR_DECL
1887 && DECL_HAS_DEBUG_EXPR_P (decl))
1889 tree debugdecl = DECL_DEBUG_EXPR (decl);
1890 if (DECL_P (debugdecl))
1891 decl = debugdecl;
1894 return decl;
1897 /* Set the register LOC to contain DV, OFFSET. */
1899 static void
1900 var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1901 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1902 enum insert_option iopt)
1904 attrs node;
1905 bool decl_p = dv_is_decl_p (dv);
1907 if (decl_p)
1908 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1910 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1911 if (dv_as_opaque (node->dv) == dv_as_opaque (dv)
1912 && node->offset == offset)
1913 break;
1914 if (!node)
1915 attrs_list_insert (&set->regs[REGNO (loc)], dv, offset, loc);
1916 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1919 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
1921 static void
1922 var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1923 rtx set_src)
1925 tree decl = REG_EXPR (loc);
1926 HOST_WIDE_INT offset = REG_OFFSET (loc);
1928 var_reg_decl_set (set, loc, initialized,
1929 dv_from_decl (decl), offset, set_src, INSERT);
1932 static enum var_init_status
1933 get_init_value (dataflow_set *set, rtx loc, decl_or_value dv)
1935 variable var;
1936 int i;
1937 enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN;
1939 if (! flag_var_tracking_uninit)
1940 return VAR_INIT_STATUS_INITIALIZED;
1942 var = shared_hash_find (set->vars, dv);
1943 if (var)
1945 for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++)
1947 location_chain nextp;
1948 for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next)
1949 if (rtx_equal_p (nextp->loc, loc))
1951 ret_val = nextp->init;
1952 break;
1957 return ret_val;
1960 /* Delete current content of register LOC in dataflow set SET and set
1961 the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
1962 MODIFY is true, any other live copies of the same variable part are
1963 also deleted from the dataflow set, otherwise the variable part is
1964 assumed to be copied from another location holding the same
1965 part. */
1967 static void
1968 var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1969 enum var_init_status initialized, rtx set_src)
1971 tree decl = REG_EXPR (loc);
1972 HOST_WIDE_INT offset = REG_OFFSET (loc);
1973 attrs node, next;
1974 attrs *nextp;
1976 decl = var_debug_decl (decl);
1978 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1979 initialized = get_init_value (set, loc, dv_from_decl (decl));
1981 nextp = &set->regs[REGNO (loc)];
1982 for (node = *nextp; node; node = next)
1984 next = node->next;
1985 if (dv_as_opaque (node->dv) != decl || node->offset != offset)
1987 delete_variable_part (set, node->loc, node->dv, node->offset);
1988 delete node;
1989 *nextp = next;
1991 else
1993 node->loc = loc;
1994 nextp = &node->next;
1997 if (modify)
1998 clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src);
1999 var_reg_set (set, loc, initialized, set_src);
2002 /* Delete the association of register LOC in dataflow set SET with any
2003 variables that aren't onepart. If CLOBBER is true, also delete any
2004 other live copies of the same variable part, and delete the
2005 association with onepart dvs too. */
2007 static void
2008 var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
2010 attrs *nextp = &set->regs[REGNO (loc)];
2011 attrs node, next;
2013 if (clobber)
2015 tree decl = REG_EXPR (loc);
2016 HOST_WIDE_INT offset = REG_OFFSET (loc);
2018 decl = var_debug_decl (decl);
2020 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
2023 for (node = *nextp; node; node = next)
2025 next = node->next;
2026 if (clobber || !dv_onepart_p (node->dv))
2028 delete_variable_part (set, node->loc, node->dv, node->offset);
2029 delete node;
2030 *nextp = next;
2032 else
2033 nextp = &node->next;
2037 /* Delete content of register with number REGNO in dataflow set SET. */
2039 static void
2040 var_regno_delete (dataflow_set *set, int regno)
2042 attrs *reg = &set->regs[regno];
2043 attrs node, next;
2045 for (node = *reg; node; node = next)
2047 next = node->next;
2048 delete_variable_part (set, node->loc, node->dv, node->offset);
2049 delete node;
2051 *reg = NULL;
2054 /* Return true if I is the negated value of a power of two. */
2055 static bool
2056 negative_power_of_two_p (HOST_WIDE_INT i)
2058 unsigned HOST_WIDE_INT x = -(unsigned HOST_WIDE_INT)i;
2059 return x == (x & -x);
2062 /* Strip constant offsets and alignments off of LOC. Return the base
2063 expression. */
2065 static rtx
2066 vt_get_canonicalize_base (rtx loc)
2068 while ((GET_CODE (loc) == PLUS
2069 || GET_CODE (loc) == AND)
2070 && GET_CODE (XEXP (loc, 1)) == CONST_INT
2071 && (GET_CODE (loc) != AND
2072 || negative_power_of_two_p (INTVAL (XEXP (loc, 1)))))
2073 loc = XEXP (loc, 0);
2075 return loc;
2078 /* This caches canonicalized addresses for VALUEs, computed using
2079 information in the global cselib table. */
2080 static hash_map<rtx, rtx> *global_get_addr_cache;
2082 /* This caches canonicalized addresses for VALUEs, computed using
2083 information from the global cache and information pertaining to a
2084 basic block being analyzed. */
2085 static hash_map<rtx, rtx> *local_get_addr_cache;
2087 static rtx vt_canonicalize_addr (dataflow_set *, rtx);
2089 /* Return the canonical address for LOC, that must be a VALUE, using a
2090 cached global equivalence or computing it and storing it in the
2091 global cache. */
2093 static rtx
2094 get_addr_from_global_cache (rtx const loc)
2096 rtx x;
2098 gcc_checking_assert (GET_CODE (loc) == VALUE);
2100 bool existed;
2101 rtx *slot = &global_get_addr_cache->get_or_insert (loc, &existed);
2102 if (existed)
2103 return *slot;
2105 x = canon_rtx (get_addr (loc));
2107 /* Tentative, avoiding infinite recursion. */
2108 *slot = x;
2110 if (x != loc)
2112 rtx nx = vt_canonicalize_addr (NULL, x);
2113 if (nx != x)
2115 /* The table may have moved during recursion, recompute
2116 SLOT. */
2117 *global_get_addr_cache->get (loc) = x = nx;
2121 return x;
2124 /* Return the canonical address for LOC, that must be a VALUE, using a
2125 cached local equivalence or computing it and storing it in the
2126 local cache. */
2128 static rtx
2129 get_addr_from_local_cache (dataflow_set *set, rtx const loc)
2131 rtx x;
2132 decl_or_value dv;
2133 variable var;
2134 location_chain l;
2136 gcc_checking_assert (GET_CODE (loc) == VALUE);
2138 bool existed;
2139 rtx *slot = &local_get_addr_cache->get_or_insert (loc, &existed);
2140 if (existed)
2141 return *slot;
2143 x = get_addr_from_global_cache (loc);
2145 /* Tentative, avoiding infinite recursion. */
2146 *slot = x;
2148 /* Recurse to cache local expansion of X, or if we need to search
2149 for a VALUE in the expansion. */
2150 if (x != loc)
2152 rtx nx = vt_canonicalize_addr (set, x);
2153 if (nx != x)
2155 slot = local_get_addr_cache->get (loc);
2156 *slot = x = nx;
2158 return x;
2161 dv = dv_from_rtx (x);
2162 var = shared_hash_find (set->vars, dv);
2163 if (!var)
2164 return x;
2166 /* Look for an improved equivalent expression. */
2167 for (l = var->var_part[0].loc_chain; l; l = l->next)
2169 rtx base = vt_get_canonicalize_base (l->loc);
2170 if (GET_CODE (base) == VALUE
2171 && canon_value_cmp (base, loc))
2173 rtx nx = vt_canonicalize_addr (set, l->loc);
2174 if (x != nx)
2176 slot = local_get_addr_cache->get (loc);
2177 *slot = x = nx;
2179 break;
2183 return x;
2186 /* Canonicalize LOC using equivalences from SET in addition to those
2187 in the cselib static table. It expects a VALUE-based expression,
2188 and it will only substitute VALUEs with other VALUEs or
2189 function-global equivalences, so that, if two addresses have base
2190 VALUEs that are locally or globally related in ways that
2191 memrefs_conflict_p cares about, they will both canonicalize to
2192 expressions that have the same base VALUE.
2194 The use of VALUEs as canonical base addresses enables the canonical
2195 RTXs to remain unchanged globally, if they resolve to a constant,
2196 or throughout a basic block otherwise, so that they can be cached
2197 and the cache needs not be invalidated when REGs, MEMs or such
2198 change. */
2200 static rtx
2201 vt_canonicalize_addr (dataflow_set *set, rtx oloc)
2203 HOST_WIDE_INT ofst = 0;
2204 machine_mode mode = GET_MODE (oloc);
2205 rtx loc = oloc;
2206 rtx x;
2207 bool retry = true;
2209 while (retry)
2211 while (GET_CODE (loc) == PLUS
2212 && GET_CODE (XEXP (loc, 1)) == CONST_INT)
2214 ofst += INTVAL (XEXP (loc, 1));
2215 loc = XEXP (loc, 0);
2218 /* Alignment operations can't normally be combined, so just
2219 canonicalize the base and we're done. We'll normally have
2220 only one stack alignment anyway. */
2221 if (GET_CODE (loc) == AND
2222 && GET_CODE (XEXP (loc, 1)) == CONST_INT
2223 && negative_power_of_two_p (INTVAL (XEXP (loc, 1))))
2225 x = vt_canonicalize_addr (set, XEXP (loc, 0));
2226 if (x != XEXP (loc, 0))
2227 loc = gen_rtx_AND (mode, x, XEXP (loc, 1));
2228 retry = false;
2231 if (GET_CODE (loc) == VALUE)
2233 if (set)
2234 loc = get_addr_from_local_cache (set, loc);
2235 else
2236 loc = get_addr_from_global_cache (loc);
2238 /* Consolidate plus_constants. */
2239 while (ofst && GET_CODE (loc) == PLUS
2240 && GET_CODE (XEXP (loc, 1)) == CONST_INT)
2242 ofst += INTVAL (XEXP (loc, 1));
2243 loc = XEXP (loc, 0);
2246 retry = false;
2248 else
2250 x = canon_rtx (loc);
2251 if (retry)
2252 retry = (x != loc);
2253 loc = x;
2257 /* Add OFST back in. */
2258 if (ofst)
2260 /* Don't build new RTL if we can help it. */
2261 if (GET_CODE (oloc) == PLUS
2262 && XEXP (oloc, 0) == loc
2263 && INTVAL (XEXP (oloc, 1)) == ofst)
2264 return oloc;
2266 loc = plus_constant (mode, loc, ofst);
2269 return loc;
2272 /* Return true iff there's a true dependence between MLOC and LOC.
2273 MADDR must be a canonicalized version of MLOC's address. */
2275 static inline bool
2276 vt_canon_true_dep (dataflow_set *set, rtx mloc, rtx maddr, rtx loc)
2278 if (GET_CODE (loc) != MEM)
2279 return false;
2281 rtx addr = vt_canonicalize_addr (set, XEXP (loc, 0));
2282 if (!canon_true_dependence (mloc, GET_MODE (mloc), maddr, loc, addr))
2283 return false;
2285 return true;
2288 /* Hold parameters for the hashtab traversal function
2289 drop_overlapping_mem_locs, see below. */
2291 struct overlapping_mems
2293 dataflow_set *set;
2294 rtx loc, addr;
2297 /* Remove all MEMs that overlap with COMS->LOC from the location list
2298 of a hash table entry for a value. COMS->ADDR must be a
2299 canonicalized form of COMS->LOC's address, and COMS->LOC must be
2300 canonicalized itself. */
2303 drop_overlapping_mem_locs (variable_def **slot, overlapping_mems *coms)
2305 dataflow_set *set = coms->set;
2306 rtx mloc = coms->loc, addr = coms->addr;
2307 variable var = *slot;
2309 if (var->onepart == ONEPART_VALUE)
2311 location_chain loc, *locp;
2312 bool changed = false;
2313 rtx cur_loc;
2315 gcc_assert (var->n_var_parts == 1);
2317 if (shared_var_p (var, set->vars))
2319 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
2320 if (vt_canon_true_dep (set, mloc, addr, loc->loc))
2321 break;
2323 if (!loc)
2324 return 1;
2326 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
2327 var = *slot;
2328 gcc_assert (var->n_var_parts == 1);
2331 if (VAR_LOC_1PAUX (var))
2332 cur_loc = VAR_LOC_FROM (var);
2333 else
2334 cur_loc = var->var_part[0].cur_loc;
2336 for (locp = &var->var_part[0].loc_chain, loc = *locp;
2337 loc; loc = *locp)
2339 if (!vt_canon_true_dep (set, mloc, addr, loc->loc))
2341 locp = &loc->next;
2342 continue;
2345 *locp = loc->next;
2346 /* If we have deleted the location which was last emitted
2347 we have to emit new location so add the variable to set
2348 of changed variables. */
2349 if (cur_loc == loc->loc)
2351 changed = true;
2352 var->var_part[0].cur_loc = NULL;
2353 if (VAR_LOC_1PAUX (var))
2354 VAR_LOC_FROM (var) = NULL;
2356 delete loc;
2359 if (!var->var_part[0].loc_chain)
2361 var->n_var_parts--;
2362 changed = true;
2364 if (changed)
2365 variable_was_changed (var, set);
2368 return 1;
2371 /* Remove from SET all VALUE bindings to MEMs that overlap with LOC. */
2373 static void
2374 clobber_overlapping_mems (dataflow_set *set, rtx loc)
2376 struct overlapping_mems coms;
2378 gcc_checking_assert (GET_CODE (loc) == MEM);
2380 coms.set = set;
2381 coms.loc = canon_rtx (loc);
2382 coms.addr = vt_canonicalize_addr (set, XEXP (loc, 0));
2384 set->traversed_vars = set->vars;
2385 shared_hash_htab (set->vars)
2386 ->traverse <overlapping_mems*, drop_overlapping_mem_locs> (&coms);
2387 set->traversed_vars = NULL;
2390 /* Set the location of DV, OFFSET as the MEM LOC. */
2392 static void
2393 var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
2394 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
2395 enum insert_option iopt)
2397 if (dv_is_decl_p (dv))
2398 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
2400 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
2403 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
2404 SET to LOC.
2405 Adjust the address first if it is stack pointer based. */
2407 static void
2408 var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
2409 rtx set_src)
2411 tree decl = MEM_EXPR (loc);
2412 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2414 var_mem_decl_set (set, loc, initialized,
2415 dv_from_decl (decl), offset, set_src, INSERT);
2418 /* Delete and set the location part of variable MEM_EXPR (LOC) in
2419 dataflow set SET to LOC. If MODIFY is true, any other live copies
2420 of the same variable part are also deleted from the dataflow set,
2421 otherwise the variable part is assumed to be copied from another
2422 location holding the same part.
2423 Adjust the address first if it is stack pointer based. */
2425 static void
2426 var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify,
2427 enum var_init_status initialized, rtx set_src)
2429 tree decl = MEM_EXPR (loc);
2430 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2432 clobber_overlapping_mems (set, loc);
2433 decl = var_debug_decl (decl);
2435 if (initialized == VAR_INIT_STATUS_UNKNOWN)
2436 initialized = get_init_value (set, loc, dv_from_decl (decl));
2438 if (modify)
2439 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src);
2440 var_mem_set (set, loc, initialized, set_src);
2443 /* Delete the location part LOC from dataflow set SET. If CLOBBER is
2444 true, also delete any other live copies of the same variable part.
2445 Adjust the address first if it is stack pointer based. */
2447 static void
2448 var_mem_delete (dataflow_set *set, rtx loc, bool clobber)
2450 tree decl = MEM_EXPR (loc);
2451 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2453 clobber_overlapping_mems (set, loc);
2454 decl = var_debug_decl (decl);
2455 if (clobber)
2456 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
2457 delete_variable_part (set, loc, dv_from_decl (decl), offset);
2460 /* Return true if LOC should not be expanded for location expressions,
2461 or used in them. */
2463 static inline bool
2464 unsuitable_loc (rtx loc)
2466 switch (GET_CODE (loc))
2468 case PC:
2469 case SCRATCH:
2470 case CC0:
2471 case ASM_INPUT:
2472 case ASM_OPERANDS:
2473 return true;
2475 default:
2476 return false;
2480 /* Bind VAL to LOC in SET. If MODIFIED, detach LOC from any values
2481 bound to it. */
2483 static inline void
2484 val_bind (dataflow_set *set, rtx val, rtx loc, bool modified)
2486 if (REG_P (loc))
2488 if (modified)
2489 var_regno_delete (set, REGNO (loc));
2490 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2491 dv_from_value (val), 0, NULL_RTX, INSERT);
2493 else if (MEM_P (loc))
2495 struct elt_loc_list *l = CSELIB_VAL_PTR (val)->locs;
2497 if (modified)
2498 clobber_overlapping_mems (set, loc);
2500 if (l && GET_CODE (l->loc) == VALUE)
2501 l = canonical_cselib_val (CSELIB_VAL_PTR (l->loc))->locs;
2503 /* If this MEM is a global constant, we don't need it in the
2504 dynamic tables. ??? We should test this before emitting the
2505 micro-op in the first place. */
2506 while (l)
2507 if (GET_CODE (l->loc) == MEM && XEXP (l->loc, 0) == XEXP (loc, 0))
2508 break;
2509 else
2510 l = l->next;
2512 if (!l)
2513 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2514 dv_from_value (val), 0, NULL_RTX, INSERT);
2516 else
2518 /* Other kinds of equivalences are necessarily static, at least
2519 so long as we do not perform substitutions while merging
2520 expressions. */
2521 gcc_unreachable ();
2522 set_variable_part (set, loc, dv_from_value (val), 0,
2523 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2527 /* Bind a value to a location it was just stored in. If MODIFIED
2528 holds, assume the location was modified, detaching it from any
2529 values bound to it. */
2531 static void
2532 val_store (dataflow_set *set, rtx val, rtx loc, rtx_insn *insn,
2533 bool modified)
2535 cselib_val *v = CSELIB_VAL_PTR (val);
2537 gcc_assert (cselib_preserved_value_p (v));
2539 if (dump_file)
2541 fprintf (dump_file, "%i: ", insn ? INSN_UID (insn) : 0);
2542 print_inline_rtx (dump_file, loc, 0);
2543 fprintf (dump_file, " evaluates to ");
2544 print_inline_rtx (dump_file, val, 0);
2545 if (v->locs)
2547 struct elt_loc_list *l;
2548 for (l = v->locs; l; l = l->next)
2550 fprintf (dump_file, "\n%i: ", INSN_UID (l->setting_insn));
2551 print_inline_rtx (dump_file, l->loc, 0);
2554 fprintf (dump_file, "\n");
2557 gcc_checking_assert (!unsuitable_loc (loc));
2559 val_bind (set, val, loc, modified);
2562 /* Clear (canonical address) slots that reference X. */
2564 bool
2565 local_get_addr_clear_given_value (rtx const &, rtx *slot, rtx x)
2567 if (vt_get_canonicalize_base (*slot) == x)
2568 *slot = NULL;
2569 return true;
2572 /* Reset this node, detaching all its equivalences. Return the slot
2573 in the variable hash table that holds dv, if there is one. */
2575 static void
2576 val_reset (dataflow_set *set, decl_or_value dv)
2578 variable var = shared_hash_find (set->vars, dv) ;
2579 location_chain node;
2580 rtx cval;
2582 if (!var || !var->n_var_parts)
2583 return;
2585 gcc_assert (var->n_var_parts == 1);
2587 if (var->onepart == ONEPART_VALUE)
2589 rtx x = dv_as_value (dv);
2591 /* Relationships in the global cache don't change, so reset the
2592 local cache entry only. */
2593 rtx *slot = local_get_addr_cache->get (x);
2594 if (slot)
2596 /* If the value resolved back to itself, odds are that other
2597 values may have cached it too. These entries now refer
2598 to the old X, so detach them too. Entries that used the
2599 old X but resolved to something else remain ok as long as
2600 that something else isn't also reset. */
2601 if (*slot == x)
2602 local_get_addr_cache
2603 ->traverse<rtx, local_get_addr_clear_given_value> (x);
2604 *slot = NULL;
2608 cval = NULL;
2609 for (node = var->var_part[0].loc_chain; node; node = node->next)
2610 if (GET_CODE (node->loc) == VALUE
2611 && canon_value_cmp (node->loc, cval))
2612 cval = node->loc;
2614 for (node = var->var_part[0].loc_chain; node; node = node->next)
2615 if (GET_CODE (node->loc) == VALUE && cval != node->loc)
2617 /* Redirect the equivalence link to the new canonical
2618 value, or simply remove it if it would point at
2619 itself. */
2620 if (cval)
2621 set_variable_part (set, cval, dv_from_value (node->loc),
2622 0, node->init, node->set_src, NO_INSERT);
2623 delete_variable_part (set, dv_as_value (dv),
2624 dv_from_value (node->loc), 0);
2627 if (cval)
2629 decl_or_value cdv = dv_from_value (cval);
2631 /* Keep the remaining values connected, accummulating links
2632 in the canonical value. */
2633 for (node = var->var_part[0].loc_chain; node; node = node->next)
2635 if (node->loc == cval)
2636 continue;
2637 else if (GET_CODE (node->loc) == REG)
2638 var_reg_decl_set (set, node->loc, node->init, cdv, 0,
2639 node->set_src, NO_INSERT);
2640 else if (GET_CODE (node->loc) == MEM)
2641 var_mem_decl_set (set, node->loc, node->init, cdv, 0,
2642 node->set_src, NO_INSERT);
2643 else
2644 set_variable_part (set, node->loc, cdv, 0,
2645 node->init, node->set_src, NO_INSERT);
2649 /* We remove this last, to make sure that the canonical value is not
2650 removed to the point of requiring reinsertion. */
2651 if (cval)
2652 delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0);
2654 clobber_variable_part (set, NULL, dv, 0, NULL);
2657 /* Find the values in a given location and map the val to another
2658 value, if it is unique, or add the location as one holding the
2659 value. */
2661 static void
2662 val_resolve (dataflow_set *set, rtx val, rtx loc, rtx_insn *insn)
2664 decl_or_value dv = dv_from_value (val);
2666 if (dump_file && (dump_flags & TDF_DETAILS))
2668 if (insn)
2669 fprintf (dump_file, "%i: ", INSN_UID (insn));
2670 else
2671 fprintf (dump_file, "head: ");
2672 print_inline_rtx (dump_file, val, 0);
2673 fputs (" is at ", dump_file);
2674 print_inline_rtx (dump_file, loc, 0);
2675 fputc ('\n', dump_file);
2678 val_reset (set, dv);
2680 gcc_checking_assert (!unsuitable_loc (loc));
2682 if (REG_P (loc))
2684 attrs node, found = NULL;
2686 for (node = set->regs[REGNO (loc)]; node; node = node->next)
2687 if (dv_is_value_p (node->dv)
2688 && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc))
2690 found = node;
2692 /* Map incoming equivalences. ??? Wouldn't it be nice if
2693 we just started sharing the location lists? Maybe a
2694 circular list ending at the value itself or some
2695 such. */
2696 set_variable_part (set, dv_as_value (node->dv),
2697 dv_from_value (val), node->offset,
2698 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2699 set_variable_part (set, val, node->dv, node->offset,
2700 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2703 /* If we didn't find any equivalence, we need to remember that
2704 this value is held in the named register. */
2705 if (found)
2706 return;
2708 /* ??? Attempt to find and merge equivalent MEMs or other
2709 expressions too. */
2711 val_bind (set, val, loc, false);
2714 /* Initialize dataflow set SET to be empty.
2715 VARS_SIZE is the initial size of hash table VARS. */
2717 static void
2718 dataflow_set_init (dataflow_set *set)
2720 init_attrs_list_set (set->regs);
2721 set->vars = shared_hash_copy (empty_shared_hash);
2722 set->stack_adjust = 0;
2723 set->traversed_vars = NULL;
2726 /* Delete the contents of dataflow set SET. */
2728 static void
2729 dataflow_set_clear (dataflow_set *set)
2731 int i;
2733 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2734 attrs_list_clear (&set->regs[i]);
2736 shared_hash_destroy (set->vars);
2737 set->vars = shared_hash_copy (empty_shared_hash);
2740 /* Copy the contents of dataflow set SRC to DST. */
2742 static void
2743 dataflow_set_copy (dataflow_set *dst, dataflow_set *src)
2745 int i;
2747 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2748 attrs_list_copy (&dst->regs[i], src->regs[i]);
2750 shared_hash_destroy (dst->vars);
2751 dst->vars = shared_hash_copy (src->vars);
2752 dst->stack_adjust = src->stack_adjust;
2755 /* Information for merging lists of locations for a given offset of variable.
2757 struct variable_union_info
2759 /* Node of the location chain. */
2760 location_chain lc;
2762 /* The sum of positions in the input chains. */
2763 int pos;
2765 /* The position in the chain of DST dataflow set. */
2766 int pos_dst;
2769 /* Buffer for location list sorting and its allocated size. */
2770 static struct variable_union_info *vui_vec;
2771 static int vui_allocated;
2773 /* Compare function for qsort, order the structures by POS element. */
2775 static int
2776 variable_union_info_cmp_pos (const void *n1, const void *n2)
2778 const struct variable_union_info *const i1 =
2779 (const struct variable_union_info *) n1;
2780 const struct variable_union_info *const i2 =
2781 ( const struct variable_union_info *) n2;
2783 if (i1->pos != i2->pos)
2784 return i1->pos - i2->pos;
2786 return (i1->pos_dst - i2->pos_dst);
2789 /* Compute union of location parts of variable *SLOT and the same variable
2790 from hash table DATA. Compute "sorted" union of the location chains
2791 for common offsets, i.e. the locations of a variable part are sorted by
2792 a priority where the priority is the sum of the positions in the 2 chains
2793 (if a location is only in one list the position in the second list is
2794 defined to be larger than the length of the chains).
2795 When we are updating the location parts the newest location is in the
2796 beginning of the chain, so when we do the described "sorted" union
2797 we keep the newest locations in the beginning. */
2799 static int
2800 variable_union (variable src, dataflow_set *set)
2802 variable dst;
2803 variable_def **dstp;
2804 int i, j, k;
2806 dstp = shared_hash_find_slot (set->vars, src->dv);
2807 if (!dstp || !*dstp)
2809 src->refcount++;
2811 dst_can_be_shared = false;
2812 if (!dstp)
2813 dstp = shared_hash_find_slot_unshare (&set->vars, src->dv, INSERT);
2815 *dstp = src;
2817 /* Continue traversing the hash table. */
2818 return 1;
2820 else
2821 dst = *dstp;
2823 gcc_assert (src->n_var_parts);
2824 gcc_checking_assert (src->onepart == dst->onepart);
2826 /* We can combine one-part variables very efficiently, because their
2827 entries are in canonical order. */
2828 if (src->onepart)
2830 location_chain *nodep, dnode, snode;
2832 gcc_assert (src->n_var_parts == 1
2833 && dst->n_var_parts == 1);
2835 snode = src->var_part[0].loc_chain;
2836 gcc_assert (snode);
2838 restart_onepart_unshared:
2839 nodep = &dst->var_part[0].loc_chain;
2840 dnode = *nodep;
2841 gcc_assert (dnode);
2843 while (snode)
2845 int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1;
2847 if (r > 0)
2849 location_chain nnode;
2851 if (shared_var_p (dst, set->vars))
2853 dstp = unshare_variable (set, dstp, dst,
2854 VAR_INIT_STATUS_INITIALIZED);
2855 dst = *dstp;
2856 goto restart_onepart_unshared;
2859 *nodep = nnode = new location_chain_def;
2860 nnode->loc = snode->loc;
2861 nnode->init = snode->init;
2862 if (!snode->set_src || MEM_P (snode->set_src))
2863 nnode->set_src = NULL;
2864 else
2865 nnode->set_src = snode->set_src;
2866 nnode->next = dnode;
2867 dnode = nnode;
2869 else if (r == 0)
2870 gcc_checking_assert (rtx_equal_p (dnode->loc, snode->loc));
2872 if (r >= 0)
2873 snode = snode->next;
2875 nodep = &dnode->next;
2876 dnode = *nodep;
2879 return 1;
2882 gcc_checking_assert (!src->onepart);
2884 /* Count the number of location parts, result is K. */
2885 for (i = 0, j = 0, k = 0;
2886 i < src->n_var_parts && j < dst->n_var_parts; k++)
2888 if (VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2890 i++;
2891 j++;
2893 else if (VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
2894 i++;
2895 else
2896 j++;
2898 k += src->n_var_parts - i;
2899 k += dst->n_var_parts - j;
2901 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
2902 thus there are at most MAX_VAR_PARTS different offsets. */
2903 gcc_checking_assert (dst->onepart ? k == 1 : k <= MAX_VAR_PARTS);
2905 if (dst->n_var_parts != k && shared_var_p (dst, set->vars))
2907 dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN);
2908 dst = *dstp;
2911 i = src->n_var_parts - 1;
2912 j = dst->n_var_parts - 1;
2913 dst->n_var_parts = k;
2915 for (k--; k >= 0; k--)
2917 location_chain node, node2;
2919 if (i >= 0 && j >= 0
2920 && VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2922 /* Compute the "sorted" union of the chains, i.e. the locations which
2923 are in both chains go first, they are sorted by the sum of
2924 positions in the chains. */
2925 int dst_l, src_l;
2926 int ii, jj, n;
2927 struct variable_union_info *vui;
2929 /* If DST is shared compare the location chains.
2930 If they are different we will modify the chain in DST with
2931 high probability so make a copy of DST. */
2932 if (shared_var_p (dst, set->vars))
2934 for (node = src->var_part[i].loc_chain,
2935 node2 = dst->var_part[j].loc_chain; node && node2;
2936 node = node->next, node2 = node2->next)
2938 if (!((REG_P (node2->loc)
2939 && REG_P (node->loc)
2940 && REGNO (node2->loc) == REGNO (node->loc))
2941 || rtx_equal_p (node2->loc, node->loc)))
2943 if (node2->init < node->init)
2944 node2->init = node->init;
2945 break;
2948 if (node || node2)
2950 dstp = unshare_variable (set, dstp, dst,
2951 VAR_INIT_STATUS_UNKNOWN);
2952 dst = (variable)*dstp;
2956 src_l = 0;
2957 for (node = src->var_part[i].loc_chain; node; node = node->next)
2958 src_l++;
2959 dst_l = 0;
2960 for (node = dst->var_part[j].loc_chain; node; node = node->next)
2961 dst_l++;
2963 if (dst_l == 1)
2965 /* The most common case, much simpler, no qsort is needed. */
2966 location_chain dstnode = dst->var_part[j].loc_chain;
2967 dst->var_part[k].loc_chain = dstnode;
2968 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
2969 node2 = dstnode;
2970 for (node = src->var_part[i].loc_chain; node; node = node->next)
2971 if (!((REG_P (dstnode->loc)
2972 && REG_P (node->loc)
2973 && REGNO (dstnode->loc) == REGNO (node->loc))
2974 || rtx_equal_p (dstnode->loc, node->loc)))
2976 location_chain new_node;
2978 /* Copy the location from SRC. */
2979 new_node = new location_chain_def;
2980 new_node->loc = node->loc;
2981 new_node->init = node->init;
2982 if (!node->set_src || MEM_P (node->set_src))
2983 new_node->set_src = NULL;
2984 else
2985 new_node->set_src = node->set_src;
2986 node2->next = new_node;
2987 node2 = new_node;
2989 node2->next = NULL;
2991 else
2993 if (src_l + dst_l > vui_allocated)
2995 vui_allocated = MAX (vui_allocated * 2, src_l + dst_l);
2996 vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec,
2997 vui_allocated);
2999 vui = vui_vec;
3001 /* Fill in the locations from DST. */
3002 for (node = dst->var_part[j].loc_chain, jj = 0; node;
3003 node = node->next, jj++)
3005 vui[jj].lc = node;
3006 vui[jj].pos_dst = jj;
3008 /* Pos plus value larger than a sum of 2 valid positions. */
3009 vui[jj].pos = jj + src_l + dst_l;
3012 /* Fill in the locations from SRC. */
3013 n = dst_l;
3014 for (node = src->var_part[i].loc_chain, ii = 0; node;
3015 node = node->next, ii++)
3017 /* Find location from NODE. */
3018 for (jj = 0; jj < dst_l; jj++)
3020 if ((REG_P (vui[jj].lc->loc)
3021 && REG_P (node->loc)
3022 && REGNO (vui[jj].lc->loc) == REGNO (node->loc))
3023 || rtx_equal_p (vui[jj].lc->loc, node->loc))
3025 vui[jj].pos = jj + ii;
3026 break;
3029 if (jj >= dst_l) /* The location has not been found. */
3031 location_chain new_node;
3033 /* Copy the location from SRC. */
3034 new_node = new location_chain_def;
3035 new_node->loc = node->loc;
3036 new_node->init = node->init;
3037 if (!node->set_src || MEM_P (node->set_src))
3038 new_node->set_src = NULL;
3039 else
3040 new_node->set_src = node->set_src;
3041 vui[n].lc = new_node;
3042 vui[n].pos_dst = src_l + dst_l;
3043 vui[n].pos = ii + src_l + dst_l;
3044 n++;
3048 if (dst_l == 2)
3050 /* Special case still very common case. For dst_l == 2
3051 all entries dst_l ... n-1 are sorted, with for i >= dst_l
3052 vui[i].pos == i + src_l + dst_l. */
3053 if (vui[0].pos > vui[1].pos)
3055 /* Order should be 1, 0, 2... */
3056 dst->var_part[k].loc_chain = vui[1].lc;
3057 vui[1].lc->next = vui[0].lc;
3058 if (n >= 3)
3060 vui[0].lc->next = vui[2].lc;
3061 vui[n - 1].lc->next = NULL;
3063 else
3064 vui[0].lc->next = NULL;
3065 ii = 3;
3067 else
3069 dst->var_part[k].loc_chain = vui[0].lc;
3070 if (n >= 3 && vui[2].pos < vui[1].pos)
3072 /* Order should be 0, 2, 1, 3... */
3073 vui[0].lc->next = vui[2].lc;
3074 vui[2].lc->next = vui[1].lc;
3075 if (n >= 4)
3077 vui[1].lc->next = vui[3].lc;
3078 vui[n - 1].lc->next = NULL;
3080 else
3081 vui[1].lc->next = NULL;
3082 ii = 4;
3084 else
3086 /* Order should be 0, 1, 2... */
3087 ii = 1;
3088 vui[n - 1].lc->next = NULL;
3091 for (; ii < n; ii++)
3092 vui[ii - 1].lc->next = vui[ii].lc;
3094 else
3096 qsort (vui, n, sizeof (struct variable_union_info),
3097 variable_union_info_cmp_pos);
3099 /* Reconnect the nodes in sorted order. */
3100 for (ii = 1; ii < n; ii++)
3101 vui[ii - 1].lc->next = vui[ii].lc;
3102 vui[n - 1].lc->next = NULL;
3103 dst->var_part[k].loc_chain = vui[0].lc;
3106 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
3108 i--;
3109 j--;
3111 else if ((i >= 0 && j >= 0
3112 && VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
3113 || i < 0)
3115 dst->var_part[k] = dst->var_part[j];
3116 j--;
3118 else if ((i >= 0 && j >= 0
3119 && VAR_PART_OFFSET (src, i) > VAR_PART_OFFSET (dst, j))
3120 || j < 0)
3122 location_chain *nextp;
3124 /* Copy the chain from SRC. */
3125 nextp = &dst->var_part[k].loc_chain;
3126 for (node = src->var_part[i].loc_chain; node; node = node->next)
3128 location_chain new_lc;
3130 new_lc = new location_chain_def;
3131 new_lc->next = NULL;
3132 new_lc->init = node->init;
3133 if (!node->set_src || MEM_P (node->set_src))
3134 new_lc->set_src = NULL;
3135 else
3136 new_lc->set_src = node->set_src;
3137 new_lc->loc = node->loc;
3139 *nextp = new_lc;
3140 nextp = &new_lc->next;
3143 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (src, i);
3144 i--;
3146 dst->var_part[k].cur_loc = NULL;
3149 if (flag_var_tracking_uninit)
3150 for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++)
3152 location_chain node, node2;
3153 for (node = src->var_part[i].loc_chain; node; node = node->next)
3154 for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next)
3155 if (rtx_equal_p (node->loc, node2->loc))
3157 if (node->init > node2->init)
3158 node2->init = node->init;
3162 /* Continue traversing the hash table. */
3163 return 1;
3166 /* Compute union of dataflow sets SRC and DST and store it to DST. */
3168 static void
3169 dataflow_set_union (dataflow_set *dst, dataflow_set *src)
3171 int i;
3173 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3174 attrs_list_union (&dst->regs[i], src->regs[i]);
3176 if (dst->vars == empty_shared_hash)
3178 shared_hash_destroy (dst->vars);
3179 dst->vars = shared_hash_copy (src->vars);
3181 else
3183 variable_iterator_type hi;
3184 variable var;
3186 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (src->vars),
3187 var, variable, hi)
3188 variable_union (var, dst);
3192 /* Whether the value is currently being expanded. */
3193 #define VALUE_RECURSED_INTO(x) \
3194 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
3196 /* Whether no expansion was found, saving useless lookups.
3197 It must only be set when VALUE_CHANGED is clear. */
3198 #define NO_LOC_P(x) \
3199 (RTL_FLAG_CHECK2 ("NO_LOC_P", (x), VALUE, DEBUG_EXPR)->return_val)
3201 /* Whether cur_loc in the value needs to be (re)computed. */
3202 #define VALUE_CHANGED(x) \
3203 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
3204 /* Whether cur_loc in the decl needs to be (re)computed. */
3205 #define DECL_CHANGED(x) TREE_VISITED (x)
3207 /* Record (if NEWV) that DV needs to have its cur_loc recomputed. For
3208 user DECLs, this means they're in changed_variables. Values and
3209 debug exprs may be left with this flag set if no user variable
3210 requires them to be evaluated. */
3212 static inline void
3213 set_dv_changed (decl_or_value dv, bool newv)
3215 switch (dv_onepart_p (dv))
3217 case ONEPART_VALUE:
3218 if (newv)
3219 NO_LOC_P (dv_as_value (dv)) = false;
3220 VALUE_CHANGED (dv_as_value (dv)) = newv;
3221 break;
3223 case ONEPART_DEXPR:
3224 if (newv)
3225 NO_LOC_P (DECL_RTL_KNOWN_SET (dv_as_decl (dv))) = false;
3226 /* Fall through... */
3228 default:
3229 DECL_CHANGED (dv_as_decl (dv)) = newv;
3230 break;
3234 /* Return true if DV needs to have its cur_loc recomputed. */
3236 static inline bool
3237 dv_changed_p (decl_or_value dv)
3239 return (dv_is_value_p (dv)
3240 ? VALUE_CHANGED (dv_as_value (dv))
3241 : DECL_CHANGED (dv_as_decl (dv)));
3244 /* Return a location list node whose loc is rtx_equal to LOC, in the
3245 location list of a one-part variable or value VAR, or in that of
3246 any values recursively mentioned in the location lists. VARS must
3247 be in star-canonical form. */
3249 static location_chain
3250 find_loc_in_1pdv (rtx loc, variable var, variable_table_type *vars)
3252 location_chain node;
3253 enum rtx_code loc_code;
3255 if (!var)
3256 return NULL;
3258 gcc_checking_assert (var->onepart);
3260 if (!var->n_var_parts)
3261 return NULL;
3263 gcc_checking_assert (loc != dv_as_opaque (var->dv));
3265 loc_code = GET_CODE (loc);
3266 for (node = var->var_part[0].loc_chain; node; node = node->next)
3268 decl_or_value dv;
3269 variable rvar;
3271 if (GET_CODE (node->loc) != loc_code)
3273 if (GET_CODE (node->loc) != VALUE)
3274 continue;
3276 else if (loc == node->loc)
3277 return node;
3278 else if (loc_code != VALUE)
3280 if (rtx_equal_p (loc, node->loc))
3281 return node;
3282 continue;
3285 /* Since we're in star-canonical form, we don't need to visit
3286 non-canonical nodes: one-part variables and non-canonical
3287 values would only point back to the canonical node. */
3288 if (dv_is_value_p (var->dv)
3289 && !canon_value_cmp (node->loc, dv_as_value (var->dv)))
3291 /* Skip all subsequent VALUEs. */
3292 while (node->next && GET_CODE (node->next->loc) == VALUE)
3294 node = node->next;
3295 gcc_checking_assert (!canon_value_cmp (node->loc,
3296 dv_as_value (var->dv)));
3297 if (loc == node->loc)
3298 return node;
3300 continue;
3303 gcc_checking_assert (node == var->var_part[0].loc_chain);
3304 gcc_checking_assert (!node->next);
3306 dv = dv_from_value (node->loc);
3307 rvar = vars->find_with_hash (dv, dv_htab_hash (dv));
3308 return find_loc_in_1pdv (loc, rvar, vars);
3311 /* ??? Gotta look in cselib_val locations too. */
3313 return NULL;
3316 /* Hash table iteration argument passed to variable_merge. */
3317 struct dfset_merge
3319 /* The set in which the merge is to be inserted. */
3320 dataflow_set *dst;
3321 /* The set that we're iterating in. */
3322 dataflow_set *cur;
3323 /* The set that may contain the other dv we are to merge with. */
3324 dataflow_set *src;
3325 /* Number of onepart dvs in src. */
3326 int src_onepart_cnt;
3329 /* Insert LOC in *DNODE, if it's not there yet. The list must be in
3330 loc_cmp order, and it is maintained as such. */
3332 static void
3333 insert_into_intersection (location_chain *nodep, rtx loc,
3334 enum var_init_status status)
3336 location_chain node;
3337 int r;
3339 for (node = *nodep; node; nodep = &node->next, node = *nodep)
3340 if ((r = loc_cmp (node->loc, loc)) == 0)
3342 node->init = MIN (node->init, status);
3343 return;
3345 else if (r > 0)
3346 break;
3348 node = new location_chain_def;
3350 node->loc = loc;
3351 node->set_src = NULL;
3352 node->init = status;
3353 node->next = *nodep;
3354 *nodep = node;
3357 /* Insert in DEST the intersection of the locations present in both
3358 S1NODE and S2VAR, directly or indirectly. S1NODE is from a
3359 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
3360 DSM->dst. */
3362 static void
3363 intersect_loc_chains (rtx val, location_chain *dest, struct dfset_merge *dsm,
3364 location_chain s1node, variable s2var)
3366 dataflow_set *s1set = dsm->cur;
3367 dataflow_set *s2set = dsm->src;
3368 location_chain found;
3370 if (s2var)
3372 location_chain s2node;
3374 gcc_checking_assert (s2var->onepart);
3376 if (s2var->n_var_parts)
3378 s2node = s2var->var_part[0].loc_chain;
3380 for (; s1node && s2node;
3381 s1node = s1node->next, s2node = s2node->next)
3382 if (s1node->loc != s2node->loc)
3383 break;
3384 else if (s1node->loc == val)
3385 continue;
3386 else
3387 insert_into_intersection (dest, s1node->loc,
3388 MIN (s1node->init, s2node->init));
3392 for (; s1node; s1node = s1node->next)
3394 if (s1node->loc == val)
3395 continue;
3397 if ((found = find_loc_in_1pdv (s1node->loc, s2var,
3398 shared_hash_htab (s2set->vars))))
3400 insert_into_intersection (dest, s1node->loc,
3401 MIN (s1node->init, found->init));
3402 continue;
3405 if (GET_CODE (s1node->loc) == VALUE
3406 && !VALUE_RECURSED_INTO (s1node->loc))
3408 decl_or_value dv = dv_from_value (s1node->loc);
3409 variable svar = shared_hash_find (s1set->vars, dv);
3410 if (svar)
3412 if (svar->n_var_parts == 1)
3414 VALUE_RECURSED_INTO (s1node->loc) = true;
3415 intersect_loc_chains (val, dest, dsm,
3416 svar->var_part[0].loc_chain,
3417 s2var);
3418 VALUE_RECURSED_INTO (s1node->loc) = false;
3423 /* ??? gotta look in cselib_val locations too. */
3425 /* ??? if the location is equivalent to any location in src,
3426 searched recursively
3428 add to dst the values needed to represent the equivalence
3430 telling whether locations S is equivalent to another dv's
3431 location list:
3433 for each location D in the list
3435 if S and D satisfy rtx_equal_p, then it is present
3437 else if D is a value, recurse without cycles
3439 else if S and D have the same CODE and MODE
3441 for each operand oS and the corresponding oD
3443 if oS and oD are not equivalent, then S an D are not equivalent
3445 else if they are RTX vectors
3447 if any vector oS element is not equivalent to its respective oD,
3448 then S and D are not equivalent
3456 /* Return -1 if X should be before Y in a location list for a 1-part
3457 variable, 1 if Y should be before X, and 0 if they're equivalent
3458 and should not appear in the list. */
3460 static int
3461 loc_cmp (rtx x, rtx y)
3463 int i, j, r;
3464 RTX_CODE code = GET_CODE (x);
3465 const char *fmt;
3467 if (x == y)
3468 return 0;
3470 if (REG_P (x))
3472 if (!REG_P (y))
3473 return -1;
3474 gcc_assert (GET_MODE (x) == GET_MODE (y));
3475 if (REGNO (x) == REGNO (y))
3476 return 0;
3477 else if (REGNO (x) < REGNO (y))
3478 return -1;
3479 else
3480 return 1;
3483 if (REG_P (y))
3484 return 1;
3486 if (MEM_P (x))
3488 if (!MEM_P (y))
3489 return -1;
3490 gcc_assert (GET_MODE (x) == GET_MODE (y));
3491 return loc_cmp (XEXP (x, 0), XEXP (y, 0));
3494 if (MEM_P (y))
3495 return 1;
3497 if (GET_CODE (x) == VALUE)
3499 if (GET_CODE (y) != VALUE)
3500 return -1;
3501 /* Don't assert the modes are the same, that is true only
3502 when not recursing. (subreg:QI (value:SI 1:1) 0)
3503 and (subreg:QI (value:DI 2:2) 0) can be compared,
3504 even when the modes are different. */
3505 if (canon_value_cmp (x, y))
3506 return -1;
3507 else
3508 return 1;
3511 if (GET_CODE (y) == VALUE)
3512 return 1;
3514 /* Entry value is the least preferable kind of expression. */
3515 if (GET_CODE (x) == ENTRY_VALUE)
3517 if (GET_CODE (y) != ENTRY_VALUE)
3518 return 1;
3519 gcc_assert (GET_MODE (x) == GET_MODE (y));
3520 return loc_cmp (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
3523 if (GET_CODE (y) == ENTRY_VALUE)
3524 return -1;
3526 if (GET_CODE (x) == GET_CODE (y))
3527 /* Compare operands below. */;
3528 else if (GET_CODE (x) < GET_CODE (y))
3529 return -1;
3530 else
3531 return 1;
3533 gcc_assert (GET_MODE (x) == GET_MODE (y));
3535 if (GET_CODE (x) == DEBUG_EXPR)
3537 if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3538 < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)))
3539 return -1;
3540 gcc_checking_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3541 > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)));
3542 return 1;
3545 fmt = GET_RTX_FORMAT (code);
3546 for (i = 0; i < GET_RTX_LENGTH (code); i++)
3547 switch (fmt[i])
3549 case 'w':
3550 if (XWINT (x, i) == XWINT (y, i))
3551 break;
3552 else if (XWINT (x, i) < XWINT (y, i))
3553 return -1;
3554 else
3555 return 1;
3557 case 'n':
3558 case 'i':
3559 if (XINT (x, i) == XINT (y, i))
3560 break;
3561 else if (XINT (x, i) < XINT (y, i))
3562 return -1;
3563 else
3564 return 1;
3566 case 'V':
3567 case 'E':
3568 /* Compare the vector length first. */
3569 if (XVECLEN (x, i) == XVECLEN (y, i))
3570 /* Compare the vectors elements. */;
3571 else if (XVECLEN (x, i) < XVECLEN (y, i))
3572 return -1;
3573 else
3574 return 1;
3576 for (j = 0; j < XVECLEN (x, i); j++)
3577 if ((r = loc_cmp (XVECEXP (x, i, j),
3578 XVECEXP (y, i, j))))
3579 return r;
3580 break;
3582 case 'e':
3583 if ((r = loc_cmp (XEXP (x, i), XEXP (y, i))))
3584 return r;
3585 break;
3587 case 'S':
3588 case 's':
3589 if (XSTR (x, i) == XSTR (y, i))
3590 break;
3591 if (!XSTR (x, i))
3592 return -1;
3593 if (!XSTR (y, i))
3594 return 1;
3595 if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0)
3596 break;
3597 else if (r < 0)
3598 return -1;
3599 else
3600 return 1;
3602 case 'u':
3603 /* These are just backpointers, so they don't matter. */
3604 break;
3606 case '0':
3607 case 't':
3608 break;
3610 /* It is believed that rtx's at this level will never
3611 contain anything but integers and other rtx's,
3612 except for within LABEL_REFs and SYMBOL_REFs. */
3613 default:
3614 gcc_unreachable ();
3616 if (CONST_WIDE_INT_P (x))
3618 /* Compare the vector length first. */
3619 if (CONST_WIDE_INT_NUNITS (x) >= CONST_WIDE_INT_NUNITS (y))
3620 return 1;
3621 else if (CONST_WIDE_INT_NUNITS (x) < CONST_WIDE_INT_NUNITS (y))
3622 return -1;
3624 /* Compare the vectors elements. */;
3625 for (j = CONST_WIDE_INT_NUNITS (x) - 1; j >= 0 ; j--)
3627 if (CONST_WIDE_INT_ELT (x, j) < CONST_WIDE_INT_ELT (y, j))
3628 return -1;
3629 if (CONST_WIDE_INT_ELT (x, j) > CONST_WIDE_INT_ELT (y, j))
3630 return 1;
3634 return 0;
3637 #if ENABLE_CHECKING
3638 /* Check the order of entries in one-part variables. */
3641 canonicalize_loc_order_check (variable_def **slot,
3642 dataflow_set *data ATTRIBUTE_UNUSED)
3644 variable var = *slot;
3645 location_chain node, next;
3647 #ifdef ENABLE_RTL_CHECKING
3648 int i;
3649 for (i = 0; i < var->n_var_parts; i++)
3650 gcc_assert (var->var_part[0].cur_loc == NULL);
3651 gcc_assert (!var->in_changed_variables);
3652 #endif
3654 if (!var->onepart)
3655 return 1;
3657 gcc_assert (var->n_var_parts == 1);
3658 node = var->var_part[0].loc_chain;
3659 gcc_assert (node);
3661 while ((next = node->next))
3663 gcc_assert (loc_cmp (node->loc, next->loc) < 0);
3664 node = next;
3667 return 1;
3669 #endif
3671 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
3672 more likely to be chosen as canonical for an equivalence set.
3673 Ensure less likely values can reach more likely neighbors, making
3674 the connections bidirectional. */
3677 canonicalize_values_mark (variable_def **slot, dataflow_set *set)
3679 variable var = *slot;
3680 decl_or_value dv = var->dv;
3681 rtx val;
3682 location_chain node;
3684 if (!dv_is_value_p (dv))
3685 return 1;
3687 gcc_checking_assert (var->n_var_parts == 1);
3689 val = dv_as_value (dv);
3691 for (node = var->var_part[0].loc_chain; node; node = node->next)
3692 if (GET_CODE (node->loc) == VALUE)
3694 if (canon_value_cmp (node->loc, val))
3695 VALUE_RECURSED_INTO (val) = true;
3696 else
3698 decl_or_value odv = dv_from_value (node->loc);
3699 variable_def **oslot;
3700 oslot = shared_hash_find_slot_noinsert (set->vars, odv);
3702 set_slot_part (set, val, oslot, odv, 0,
3703 node->init, NULL_RTX);
3705 VALUE_RECURSED_INTO (node->loc) = true;
3709 return 1;
3712 /* Remove redundant entries from equivalence lists in onepart
3713 variables, canonicalizing equivalence sets into star shapes. */
3716 canonicalize_values_star (variable_def **slot, dataflow_set *set)
3718 variable var = *slot;
3719 decl_or_value dv = var->dv;
3720 location_chain node;
3721 decl_or_value cdv;
3722 rtx val, cval;
3723 variable_def **cslot;
3724 bool has_value;
3725 bool has_marks;
3727 if (!var->onepart)
3728 return 1;
3730 gcc_checking_assert (var->n_var_parts == 1);
3732 if (dv_is_value_p (dv))
3734 cval = dv_as_value (dv);
3735 if (!VALUE_RECURSED_INTO (cval))
3736 return 1;
3737 VALUE_RECURSED_INTO (cval) = false;
3739 else
3740 cval = NULL_RTX;
3742 restart:
3743 val = cval;
3744 has_value = false;
3745 has_marks = false;
3747 gcc_assert (var->n_var_parts == 1);
3749 for (node = var->var_part[0].loc_chain; node; node = node->next)
3750 if (GET_CODE (node->loc) == VALUE)
3752 has_value = true;
3753 if (VALUE_RECURSED_INTO (node->loc))
3754 has_marks = true;
3755 if (canon_value_cmp (node->loc, cval))
3756 cval = node->loc;
3759 if (!has_value)
3760 return 1;
3762 if (cval == val)
3764 if (!has_marks || dv_is_decl_p (dv))
3765 return 1;
3767 /* Keep it marked so that we revisit it, either after visiting a
3768 child node, or after visiting a new parent that might be
3769 found out. */
3770 VALUE_RECURSED_INTO (val) = true;
3772 for (node = var->var_part[0].loc_chain; node; node = node->next)
3773 if (GET_CODE (node->loc) == VALUE
3774 && VALUE_RECURSED_INTO (node->loc))
3776 cval = node->loc;
3777 restart_with_cval:
3778 VALUE_RECURSED_INTO (cval) = false;
3779 dv = dv_from_value (cval);
3780 slot = shared_hash_find_slot_noinsert (set->vars, dv);
3781 if (!slot)
3783 gcc_assert (dv_is_decl_p (var->dv));
3784 /* The canonical value was reset and dropped.
3785 Remove it. */
3786 clobber_variable_part (set, NULL, var->dv, 0, NULL);
3787 return 1;
3789 var = *slot;
3790 gcc_assert (dv_is_value_p (var->dv));
3791 if (var->n_var_parts == 0)
3792 return 1;
3793 gcc_assert (var->n_var_parts == 1);
3794 goto restart;
3797 VALUE_RECURSED_INTO (val) = false;
3799 return 1;
3802 /* Push values to the canonical one. */
3803 cdv = dv_from_value (cval);
3804 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3806 for (node = var->var_part[0].loc_chain; node; node = node->next)
3807 if (node->loc != cval)
3809 cslot = set_slot_part (set, node->loc, cslot, cdv, 0,
3810 node->init, NULL_RTX);
3811 if (GET_CODE (node->loc) == VALUE)
3813 decl_or_value ndv = dv_from_value (node->loc);
3815 set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX,
3816 NO_INSERT);
3818 if (canon_value_cmp (node->loc, val))
3820 /* If it could have been a local minimum, it's not any more,
3821 since it's now neighbor to cval, so it may have to push
3822 to it. Conversely, if it wouldn't have prevailed over
3823 val, then whatever mark it has is fine: if it was to
3824 push, it will now push to a more canonical node, but if
3825 it wasn't, then it has already pushed any values it might
3826 have to. */
3827 VALUE_RECURSED_INTO (node->loc) = true;
3828 /* Make sure we visit node->loc by ensuring we cval is
3829 visited too. */
3830 VALUE_RECURSED_INTO (cval) = true;
3832 else if (!VALUE_RECURSED_INTO (node->loc))
3833 /* If we have no need to "recurse" into this node, it's
3834 already "canonicalized", so drop the link to the old
3835 parent. */
3836 clobber_variable_part (set, cval, ndv, 0, NULL);
3838 else if (GET_CODE (node->loc) == REG)
3840 attrs list = set->regs[REGNO (node->loc)], *listp;
3842 /* Change an existing attribute referring to dv so that it
3843 refers to cdv, removing any duplicate this might
3844 introduce, and checking that no previous duplicates
3845 existed, all in a single pass. */
3847 while (list)
3849 if (list->offset == 0
3850 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3851 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3852 break;
3854 list = list->next;
3857 gcc_assert (list);
3858 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3860 list->dv = cdv;
3861 for (listp = &list->next; (list = *listp); listp = &list->next)
3863 if (list->offset)
3864 continue;
3866 if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3868 *listp = list->next;
3869 delete list;
3870 list = *listp;
3871 break;
3874 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv));
3877 else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3879 for (listp = &list->next; (list = *listp); listp = &list->next)
3881 if (list->offset)
3882 continue;
3884 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3886 *listp = list->next;
3887 delete list;
3888 list = *listp;
3889 break;
3892 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv));
3895 else
3896 gcc_unreachable ();
3898 #if ENABLE_CHECKING
3899 while (list)
3901 if (list->offset == 0
3902 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3903 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3904 gcc_unreachable ();
3906 list = list->next;
3908 #endif
3912 if (val)
3913 set_slot_part (set, val, cslot, cdv, 0,
3914 VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
3916 slot = clobber_slot_part (set, cval, slot, 0, NULL);
3918 /* Variable may have been unshared. */
3919 var = *slot;
3920 gcc_checking_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval
3921 && var->var_part[0].loc_chain->next == NULL);
3923 if (VALUE_RECURSED_INTO (cval))
3924 goto restart_with_cval;
3926 return 1;
3929 /* Bind one-part variables to the canonical value in an equivalence
3930 set. Not doing this causes dataflow convergence failure in rare
3931 circumstances, see PR42873. Unfortunately we can't do this
3932 efficiently as part of canonicalize_values_star, since we may not
3933 have determined or even seen the canonical value of a set when we
3934 get to a variable that references another member of the set. */
3937 canonicalize_vars_star (variable_def **slot, dataflow_set *set)
3939 variable var = *slot;
3940 decl_or_value dv = var->dv;
3941 location_chain node;
3942 rtx cval;
3943 decl_or_value cdv;
3944 variable_def **cslot;
3945 variable cvar;
3946 location_chain cnode;
3948 if (!var->onepart || var->onepart == ONEPART_VALUE)
3949 return 1;
3951 gcc_assert (var->n_var_parts == 1);
3953 node = var->var_part[0].loc_chain;
3955 if (GET_CODE (node->loc) != VALUE)
3956 return 1;
3958 gcc_assert (!node->next);
3959 cval = node->loc;
3961 /* Push values to the canonical one. */
3962 cdv = dv_from_value (cval);
3963 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3964 if (!cslot)
3965 return 1;
3966 cvar = *cslot;
3967 gcc_assert (cvar->n_var_parts == 1);
3969 cnode = cvar->var_part[0].loc_chain;
3971 /* CVAL is canonical if its value list contains non-VALUEs or VALUEs
3972 that are not “more canonical” than it. */
3973 if (GET_CODE (cnode->loc) != VALUE
3974 || !canon_value_cmp (cnode->loc, cval))
3975 return 1;
3977 /* CVAL was found to be non-canonical. Change the variable to point
3978 to the canonical VALUE. */
3979 gcc_assert (!cnode->next);
3980 cval = cnode->loc;
3982 slot = set_slot_part (set, cval, slot, dv, 0,
3983 node->init, node->set_src);
3984 clobber_slot_part (set, cval, slot, 0, node->set_src);
3986 return 1;
3989 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
3990 corresponding entry in DSM->src. Multi-part variables are combined
3991 with variable_union, whereas onepart dvs are combined with
3992 intersection. */
3994 static int
3995 variable_merge_over_cur (variable s1var, struct dfset_merge *dsm)
3997 dataflow_set *dst = dsm->dst;
3998 variable_def **dstslot;
3999 variable s2var, dvar = NULL;
4000 decl_or_value dv = s1var->dv;
4001 onepart_enum_t onepart = s1var->onepart;
4002 rtx val;
4003 hashval_t dvhash;
4004 location_chain node, *nodep;
4006 /* If the incoming onepart variable has an empty location list, then
4007 the intersection will be just as empty. For other variables,
4008 it's always union. */
4009 gcc_checking_assert (s1var->n_var_parts
4010 && s1var->var_part[0].loc_chain);
4012 if (!onepart)
4013 return variable_union (s1var, dst);
4015 gcc_checking_assert (s1var->n_var_parts == 1);
4017 dvhash = dv_htab_hash (dv);
4018 if (dv_is_value_p (dv))
4019 val = dv_as_value (dv);
4020 else
4021 val = NULL;
4023 s2var = shared_hash_find_1 (dsm->src->vars, dv, dvhash);
4024 if (!s2var)
4026 dst_can_be_shared = false;
4027 return 1;
4030 dsm->src_onepart_cnt--;
4031 gcc_assert (s2var->var_part[0].loc_chain
4032 && s2var->onepart == onepart
4033 && s2var->n_var_parts == 1);
4035 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4036 if (dstslot)
4038 dvar = *dstslot;
4039 gcc_assert (dvar->refcount == 1
4040 && dvar->onepart == onepart
4041 && dvar->n_var_parts == 1);
4042 nodep = &dvar->var_part[0].loc_chain;
4044 else
4046 nodep = &node;
4047 node = NULL;
4050 if (!dstslot && !onepart_variable_different_p (s1var, s2var))
4052 dstslot = shared_hash_find_slot_unshare_1 (&dst->vars, dv,
4053 dvhash, INSERT);
4054 *dstslot = dvar = s2var;
4055 dvar->refcount++;
4057 else
4059 dst_can_be_shared = false;
4061 intersect_loc_chains (val, nodep, dsm,
4062 s1var->var_part[0].loc_chain, s2var);
4064 if (!dstslot)
4066 if (node)
4068 dvar = onepart_pool (onepart).allocate ();
4069 dvar->dv = dv;
4070 dvar->refcount = 1;
4071 dvar->n_var_parts = 1;
4072 dvar->onepart = onepart;
4073 dvar->in_changed_variables = false;
4074 dvar->var_part[0].loc_chain = node;
4075 dvar->var_part[0].cur_loc = NULL;
4076 if (onepart)
4077 VAR_LOC_1PAUX (dvar) = NULL;
4078 else
4079 VAR_PART_OFFSET (dvar, 0) = 0;
4081 dstslot
4082 = shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash,
4083 INSERT);
4084 gcc_assert (!*dstslot);
4085 *dstslot = dvar;
4087 else
4088 return 1;
4092 nodep = &dvar->var_part[0].loc_chain;
4093 while ((node = *nodep))
4095 location_chain *nextp = &node->next;
4097 if (GET_CODE (node->loc) == REG)
4099 attrs list;
4101 for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
4102 if (GET_MODE (node->loc) == GET_MODE (list->loc)
4103 && dv_is_value_p (list->dv))
4104 break;
4106 if (!list)
4107 attrs_list_insert (&dst->regs[REGNO (node->loc)],
4108 dv, 0, node->loc);
4109 /* If this value became canonical for another value that had
4110 this register, we want to leave it alone. */
4111 else if (dv_as_value (list->dv) != val)
4113 dstslot = set_slot_part (dst, dv_as_value (list->dv),
4114 dstslot, dv, 0,
4115 node->init, NULL_RTX);
4116 dstslot = delete_slot_part (dst, node->loc, dstslot, 0);
4118 /* Since nextp points into the removed node, we can't
4119 use it. The pointer to the next node moved to nodep.
4120 However, if the variable we're walking is unshared
4121 during our walk, we'll keep walking the location list
4122 of the previously-shared variable, in which case the
4123 node won't have been removed, and we'll want to skip
4124 it. That's why we test *nodep here. */
4125 if (*nodep != node)
4126 nextp = nodep;
4129 else
4130 /* Canonicalization puts registers first, so we don't have to
4131 walk it all. */
4132 break;
4133 nodep = nextp;
4136 if (dvar != *dstslot)
4137 dvar = *dstslot;
4138 nodep = &dvar->var_part[0].loc_chain;
4140 if (val)
4142 /* Mark all referenced nodes for canonicalization, and make sure
4143 we have mutual equivalence links. */
4144 VALUE_RECURSED_INTO (val) = true;
4145 for (node = *nodep; node; node = node->next)
4146 if (GET_CODE (node->loc) == VALUE)
4148 VALUE_RECURSED_INTO (node->loc) = true;
4149 set_variable_part (dst, val, dv_from_value (node->loc), 0,
4150 node->init, NULL, INSERT);
4153 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4154 gcc_assert (*dstslot == dvar);
4155 canonicalize_values_star (dstslot, dst);
4156 gcc_checking_assert (dstslot
4157 == shared_hash_find_slot_noinsert_1 (dst->vars,
4158 dv, dvhash));
4159 dvar = *dstslot;
4161 else
4163 bool has_value = false, has_other = false;
4165 /* If we have one value and anything else, we're going to
4166 canonicalize this, so make sure all values have an entry in
4167 the table and are marked for canonicalization. */
4168 for (node = *nodep; node; node = node->next)
4170 if (GET_CODE (node->loc) == VALUE)
4172 /* If this was marked during register canonicalization,
4173 we know we have to canonicalize values. */
4174 if (has_value)
4175 has_other = true;
4176 has_value = true;
4177 if (has_other)
4178 break;
4180 else
4182 has_other = true;
4183 if (has_value)
4184 break;
4188 if (has_value && has_other)
4190 for (node = *nodep; node; node = node->next)
4192 if (GET_CODE (node->loc) == VALUE)
4194 decl_or_value dv = dv_from_value (node->loc);
4195 variable_def **slot = NULL;
4197 if (shared_hash_shared (dst->vars))
4198 slot = shared_hash_find_slot_noinsert (dst->vars, dv);
4199 if (!slot)
4200 slot = shared_hash_find_slot_unshare (&dst->vars, dv,
4201 INSERT);
4202 if (!*slot)
4204 variable var = onepart_pool (ONEPART_VALUE).allocate ();
4205 var->dv = dv;
4206 var->refcount = 1;
4207 var->n_var_parts = 1;
4208 var->onepart = ONEPART_VALUE;
4209 var->in_changed_variables = false;
4210 var->var_part[0].loc_chain = NULL;
4211 var->var_part[0].cur_loc = NULL;
4212 VAR_LOC_1PAUX (var) = NULL;
4213 *slot = var;
4216 VALUE_RECURSED_INTO (node->loc) = true;
4220 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4221 gcc_assert (*dstslot == dvar);
4222 canonicalize_values_star (dstslot, dst);
4223 gcc_checking_assert (dstslot
4224 == shared_hash_find_slot_noinsert_1 (dst->vars,
4225 dv, dvhash));
4226 dvar = *dstslot;
4230 if (!onepart_variable_different_p (dvar, s2var))
4232 variable_htab_free (dvar);
4233 *dstslot = dvar = s2var;
4234 dvar->refcount++;
4236 else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var))
4238 variable_htab_free (dvar);
4239 *dstslot = dvar = s1var;
4240 dvar->refcount++;
4241 dst_can_be_shared = false;
4243 else
4244 dst_can_be_shared = false;
4246 return 1;
4249 /* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
4250 multi-part variable. Unions of multi-part variables and
4251 intersections of one-part ones will be handled in
4252 variable_merge_over_cur(). */
4254 static int
4255 variable_merge_over_src (variable s2var, struct dfset_merge *dsm)
4257 dataflow_set *dst = dsm->dst;
4258 decl_or_value dv = s2var->dv;
4260 if (!s2var->onepart)
4262 variable_def **dstp = shared_hash_find_slot (dst->vars, dv);
4263 *dstp = s2var;
4264 s2var->refcount++;
4265 return 1;
4268 dsm->src_onepart_cnt++;
4269 return 1;
4272 /* Combine dataflow set information from SRC2 into DST, using PDST
4273 to carry over information across passes. */
4275 static void
4276 dataflow_set_merge (dataflow_set *dst, dataflow_set *src2)
4278 dataflow_set cur = *dst;
4279 dataflow_set *src1 = &cur;
4280 struct dfset_merge dsm;
4281 int i;
4282 size_t src1_elems, src2_elems;
4283 variable_iterator_type hi;
4284 variable var;
4286 src1_elems = shared_hash_htab (src1->vars)->elements ();
4287 src2_elems = shared_hash_htab (src2->vars)->elements ();
4288 dataflow_set_init (dst);
4289 dst->stack_adjust = cur.stack_adjust;
4290 shared_hash_destroy (dst->vars);
4291 dst->vars = new shared_hash_def;
4292 dst->vars->refcount = 1;
4293 dst->vars->htab = new variable_table_type (MAX (src1_elems, src2_elems));
4295 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4296 attrs_list_mpdv_union (&dst->regs[i], src1->regs[i], src2->regs[i]);
4298 dsm.dst = dst;
4299 dsm.src = src2;
4300 dsm.cur = src1;
4301 dsm.src_onepart_cnt = 0;
4303 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm.src->vars),
4304 var, variable, hi)
4305 variable_merge_over_src (var, &dsm);
4306 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm.cur->vars),
4307 var, variable, hi)
4308 variable_merge_over_cur (var, &dsm);
4310 if (dsm.src_onepart_cnt)
4311 dst_can_be_shared = false;
4313 dataflow_set_destroy (src1);
4316 /* Mark register equivalences. */
4318 static void
4319 dataflow_set_equiv_regs (dataflow_set *set)
4321 int i;
4322 attrs list, *listp;
4324 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4326 rtx canon[NUM_MACHINE_MODES];
4328 /* If the list is empty or one entry, no need to canonicalize
4329 anything. */
4330 if (set->regs[i] == NULL || set->regs[i]->next == NULL)
4331 continue;
4333 memset (canon, 0, sizeof (canon));
4335 for (list = set->regs[i]; list; list = list->next)
4336 if (list->offset == 0 && dv_is_value_p (list->dv))
4338 rtx val = dv_as_value (list->dv);
4339 rtx *cvalp = &canon[(int)GET_MODE (val)];
4340 rtx cval = *cvalp;
4342 if (canon_value_cmp (val, cval))
4343 *cvalp = val;
4346 for (list = set->regs[i]; list; list = list->next)
4347 if (list->offset == 0 && dv_onepart_p (list->dv))
4349 rtx cval = canon[(int)GET_MODE (list->loc)];
4351 if (!cval)
4352 continue;
4354 if (dv_is_value_p (list->dv))
4356 rtx val = dv_as_value (list->dv);
4358 if (val == cval)
4359 continue;
4361 VALUE_RECURSED_INTO (val) = true;
4362 set_variable_part (set, val, dv_from_value (cval), 0,
4363 VAR_INIT_STATUS_INITIALIZED,
4364 NULL, NO_INSERT);
4367 VALUE_RECURSED_INTO (cval) = true;
4368 set_variable_part (set, cval, list->dv, 0,
4369 VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT);
4372 for (listp = &set->regs[i]; (list = *listp);
4373 listp = list ? &list->next : listp)
4374 if (list->offset == 0 && dv_onepart_p (list->dv))
4376 rtx cval = canon[(int)GET_MODE (list->loc)];
4377 variable_def **slot;
4379 if (!cval)
4380 continue;
4382 if (dv_is_value_p (list->dv))
4384 rtx val = dv_as_value (list->dv);
4385 if (!VALUE_RECURSED_INTO (val))
4386 continue;
4389 slot = shared_hash_find_slot_noinsert (set->vars, list->dv);
4390 canonicalize_values_star (slot, set);
4391 if (*listp != list)
4392 list = NULL;
4397 /* Remove any redundant values in the location list of VAR, which must
4398 be unshared and 1-part. */
4400 static void
4401 remove_duplicate_values (variable var)
4403 location_chain node, *nodep;
4405 gcc_assert (var->onepart);
4406 gcc_assert (var->n_var_parts == 1);
4407 gcc_assert (var->refcount == 1);
4409 for (nodep = &var->var_part[0].loc_chain; (node = *nodep); )
4411 if (GET_CODE (node->loc) == VALUE)
4413 if (VALUE_RECURSED_INTO (node->loc))
4415 /* Remove duplicate value node. */
4416 *nodep = node->next;
4417 delete node;
4418 continue;
4420 else
4421 VALUE_RECURSED_INTO (node->loc) = true;
4423 nodep = &node->next;
4426 for (node = var->var_part[0].loc_chain; node; node = node->next)
4427 if (GET_CODE (node->loc) == VALUE)
4429 gcc_assert (VALUE_RECURSED_INTO (node->loc));
4430 VALUE_RECURSED_INTO (node->loc) = false;
4435 /* Hash table iteration argument passed to variable_post_merge. */
4436 struct dfset_post_merge
4438 /* The new input set for the current block. */
4439 dataflow_set *set;
4440 /* Pointer to the permanent input set for the current block, or
4441 NULL. */
4442 dataflow_set **permp;
4445 /* Create values for incoming expressions associated with one-part
4446 variables that don't have value numbers for them. */
4449 variable_post_merge_new_vals (variable_def **slot, dfset_post_merge *dfpm)
4451 dataflow_set *set = dfpm->set;
4452 variable var = *slot;
4453 location_chain node;
4455 if (!var->onepart || !var->n_var_parts)
4456 return 1;
4458 gcc_assert (var->n_var_parts == 1);
4460 if (dv_is_decl_p (var->dv))
4462 bool check_dupes = false;
4464 restart:
4465 for (node = var->var_part[0].loc_chain; node; node = node->next)
4467 if (GET_CODE (node->loc) == VALUE)
4468 gcc_assert (!VALUE_RECURSED_INTO (node->loc));
4469 else if (GET_CODE (node->loc) == REG)
4471 attrs att, *attp, *curp = NULL;
4473 if (var->refcount != 1)
4475 slot = unshare_variable (set, slot, var,
4476 VAR_INIT_STATUS_INITIALIZED);
4477 var = *slot;
4478 goto restart;
4481 for (attp = &set->regs[REGNO (node->loc)]; (att = *attp);
4482 attp = &att->next)
4483 if (att->offset == 0
4484 && GET_MODE (att->loc) == GET_MODE (node->loc))
4486 if (dv_is_value_p (att->dv))
4488 rtx cval = dv_as_value (att->dv);
4489 node->loc = cval;
4490 check_dupes = true;
4491 break;
4493 else if (dv_as_opaque (att->dv) == dv_as_opaque (var->dv))
4494 curp = attp;
4497 if (!curp)
4499 curp = attp;
4500 while (*curp)
4501 if ((*curp)->offset == 0
4502 && GET_MODE ((*curp)->loc) == GET_MODE (node->loc)
4503 && dv_as_opaque ((*curp)->dv) == dv_as_opaque (var->dv))
4504 break;
4505 else
4506 curp = &(*curp)->next;
4507 gcc_assert (*curp);
4510 if (!att)
4512 decl_or_value cdv;
4513 rtx cval;
4515 if (!*dfpm->permp)
4517 *dfpm->permp = XNEW (dataflow_set);
4518 dataflow_set_init (*dfpm->permp);
4521 for (att = (*dfpm->permp)->regs[REGNO (node->loc)];
4522 att; att = att->next)
4523 if (GET_MODE (att->loc) == GET_MODE (node->loc))
4525 gcc_assert (att->offset == 0
4526 && dv_is_value_p (att->dv));
4527 val_reset (set, att->dv);
4528 break;
4531 if (att)
4533 cdv = att->dv;
4534 cval = dv_as_value (cdv);
4536 else
4538 /* Create a unique value to hold this register,
4539 that ought to be found and reused in
4540 subsequent rounds. */
4541 cselib_val *v;
4542 gcc_assert (!cselib_lookup (node->loc,
4543 GET_MODE (node->loc), 0,
4544 VOIDmode));
4545 v = cselib_lookup (node->loc, GET_MODE (node->loc), 1,
4546 VOIDmode);
4547 cselib_preserve_value (v);
4548 cselib_invalidate_rtx (node->loc);
4549 cval = v->val_rtx;
4550 cdv = dv_from_value (cval);
4551 if (dump_file)
4552 fprintf (dump_file,
4553 "Created new value %u:%u for reg %i\n",
4554 v->uid, v->hash, REGNO (node->loc));
4557 var_reg_decl_set (*dfpm->permp, node->loc,
4558 VAR_INIT_STATUS_INITIALIZED,
4559 cdv, 0, NULL, INSERT);
4561 node->loc = cval;
4562 check_dupes = true;
4565 /* Remove attribute referring to the decl, which now
4566 uses the value for the register, already existing or
4567 to be added when we bring perm in. */
4568 att = *curp;
4569 *curp = att->next;
4570 delete att;
4574 if (check_dupes)
4575 remove_duplicate_values (var);
4578 return 1;
4581 /* Reset values in the permanent set that are not associated with the
4582 chosen expression. */
4585 variable_post_merge_perm_vals (variable_def **pslot, dfset_post_merge *dfpm)
4587 dataflow_set *set = dfpm->set;
4588 variable pvar = *pslot, var;
4589 location_chain pnode;
4590 decl_or_value dv;
4591 attrs att;
4593 gcc_assert (dv_is_value_p (pvar->dv)
4594 && pvar->n_var_parts == 1);
4595 pnode = pvar->var_part[0].loc_chain;
4596 gcc_assert (pnode
4597 && !pnode->next
4598 && REG_P (pnode->loc));
4600 dv = pvar->dv;
4602 var = shared_hash_find (set->vars, dv);
4603 if (var)
4605 /* Although variable_post_merge_new_vals may have made decls
4606 non-star-canonical, values that pre-existed in canonical form
4607 remain canonical, and newly-created values reference a single
4608 REG, so they are canonical as well. Since VAR has the
4609 location list for a VALUE, using find_loc_in_1pdv for it is
4610 fine, since VALUEs don't map back to DECLs. */
4611 if (find_loc_in_1pdv (pnode->loc, var, shared_hash_htab (set->vars)))
4612 return 1;
4613 val_reset (set, dv);
4616 for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next)
4617 if (att->offset == 0
4618 && GET_MODE (att->loc) == GET_MODE (pnode->loc)
4619 && dv_is_value_p (att->dv))
4620 break;
4622 /* If there is a value associated with this register already, create
4623 an equivalence. */
4624 if (att && dv_as_value (att->dv) != dv_as_value (dv))
4626 rtx cval = dv_as_value (att->dv);
4627 set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT);
4628 set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init,
4629 NULL, INSERT);
4631 else if (!att)
4633 attrs_list_insert (&set->regs[REGNO (pnode->loc)],
4634 dv, 0, pnode->loc);
4635 variable_union (pvar, set);
4638 return 1;
4641 /* Just checking stuff and registering register attributes for
4642 now. */
4644 static void
4645 dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp)
4647 struct dfset_post_merge dfpm;
4649 dfpm.set = set;
4650 dfpm.permp = permp;
4652 shared_hash_htab (set->vars)
4653 ->traverse <dfset_post_merge*, variable_post_merge_new_vals> (&dfpm);
4654 if (*permp)
4655 shared_hash_htab ((*permp)->vars)
4656 ->traverse <dfset_post_merge*, variable_post_merge_perm_vals> (&dfpm);
4657 shared_hash_htab (set->vars)
4658 ->traverse <dataflow_set *, canonicalize_values_star> (set);
4659 shared_hash_htab (set->vars)
4660 ->traverse <dataflow_set *, canonicalize_vars_star> (set);
4663 /* Return a node whose loc is a MEM that refers to EXPR in the
4664 location list of a one-part variable or value VAR, or in that of
4665 any values recursively mentioned in the location lists. */
4667 static location_chain
4668 find_mem_expr_in_1pdv (tree expr, rtx val, variable_table_type *vars)
4670 location_chain node;
4671 decl_or_value dv;
4672 variable var;
4673 location_chain where = NULL;
4675 if (!val)
4676 return NULL;
4678 gcc_assert (GET_CODE (val) == VALUE
4679 && !VALUE_RECURSED_INTO (val));
4681 dv = dv_from_value (val);
4682 var = vars->find_with_hash (dv, dv_htab_hash (dv));
4684 if (!var)
4685 return NULL;
4687 gcc_assert (var->onepart);
4689 if (!var->n_var_parts)
4690 return NULL;
4692 VALUE_RECURSED_INTO (val) = true;
4694 for (node = var->var_part[0].loc_chain; node; node = node->next)
4695 if (MEM_P (node->loc)
4696 && MEM_EXPR (node->loc) == expr
4697 && INT_MEM_OFFSET (node->loc) == 0)
4699 where = node;
4700 break;
4702 else if (GET_CODE (node->loc) == VALUE
4703 && !VALUE_RECURSED_INTO (node->loc)
4704 && (where = find_mem_expr_in_1pdv (expr, node->loc, vars)))
4705 break;
4707 VALUE_RECURSED_INTO (val) = false;
4709 return where;
4712 /* Return TRUE if the value of MEM may vary across a call. */
4714 static bool
4715 mem_dies_at_call (rtx mem)
4717 tree expr = MEM_EXPR (mem);
4718 tree decl;
4720 if (!expr)
4721 return true;
4723 decl = get_base_address (expr);
4725 if (!decl)
4726 return true;
4728 if (!DECL_P (decl))
4729 return true;
4731 return (may_be_aliased (decl)
4732 || (!TREE_READONLY (decl) && is_global_var (decl)));
4735 /* Remove all MEMs from the location list of a hash table entry for a
4736 one-part variable, except those whose MEM attributes map back to
4737 the variable itself, directly or within a VALUE. */
4740 dataflow_set_preserve_mem_locs (variable_def **slot, dataflow_set *set)
4742 variable var = *slot;
4744 if (var->onepart == ONEPART_VDECL || var->onepart == ONEPART_DEXPR)
4746 tree decl = dv_as_decl (var->dv);
4747 location_chain loc, *locp;
4748 bool changed = false;
4750 if (!var->n_var_parts)
4751 return 1;
4753 gcc_assert (var->n_var_parts == 1);
4755 if (shared_var_p (var, set->vars))
4757 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4759 /* We want to remove dying MEMs that doesn't refer to DECL. */
4760 if (GET_CODE (loc->loc) == MEM
4761 && (MEM_EXPR (loc->loc) != decl
4762 || INT_MEM_OFFSET (loc->loc) != 0)
4763 && !mem_dies_at_call (loc->loc))
4764 break;
4765 /* We want to move here MEMs that do refer to DECL. */
4766 else if (GET_CODE (loc->loc) == VALUE
4767 && find_mem_expr_in_1pdv (decl, loc->loc,
4768 shared_hash_htab (set->vars)))
4769 break;
4772 if (!loc)
4773 return 1;
4775 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4776 var = *slot;
4777 gcc_assert (var->n_var_parts == 1);
4780 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4781 loc; loc = *locp)
4783 rtx old_loc = loc->loc;
4784 if (GET_CODE (old_loc) == VALUE)
4786 location_chain mem_node
4787 = find_mem_expr_in_1pdv (decl, loc->loc,
4788 shared_hash_htab (set->vars));
4790 /* ??? This picks up only one out of multiple MEMs that
4791 refer to the same variable. Do we ever need to be
4792 concerned about dealing with more than one, or, given
4793 that they should all map to the same variable
4794 location, their addresses will have been merged and
4795 they will be regarded as equivalent? */
4796 if (mem_node)
4798 loc->loc = mem_node->loc;
4799 loc->set_src = mem_node->set_src;
4800 loc->init = MIN (loc->init, mem_node->init);
4804 if (GET_CODE (loc->loc) != MEM
4805 || (MEM_EXPR (loc->loc) == decl
4806 && INT_MEM_OFFSET (loc->loc) == 0)
4807 || !mem_dies_at_call (loc->loc))
4809 if (old_loc != loc->loc && emit_notes)
4811 if (old_loc == var->var_part[0].cur_loc)
4813 changed = true;
4814 var->var_part[0].cur_loc = NULL;
4817 locp = &loc->next;
4818 continue;
4821 if (emit_notes)
4823 if (old_loc == var->var_part[0].cur_loc)
4825 changed = true;
4826 var->var_part[0].cur_loc = NULL;
4829 *locp = loc->next;
4830 delete loc;
4833 if (!var->var_part[0].loc_chain)
4835 var->n_var_parts--;
4836 changed = true;
4838 if (changed)
4839 variable_was_changed (var, set);
4842 return 1;
4845 /* Remove all MEMs from the location list of a hash table entry for a
4846 value. */
4849 dataflow_set_remove_mem_locs (variable_def **slot, dataflow_set *set)
4851 variable var = *slot;
4853 if (var->onepart == ONEPART_VALUE)
4855 location_chain loc, *locp;
4856 bool changed = false;
4857 rtx cur_loc;
4859 gcc_assert (var->n_var_parts == 1);
4861 if (shared_var_p (var, set->vars))
4863 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4864 if (GET_CODE (loc->loc) == MEM
4865 && mem_dies_at_call (loc->loc))
4866 break;
4868 if (!loc)
4869 return 1;
4871 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4872 var = *slot;
4873 gcc_assert (var->n_var_parts == 1);
4876 if (VAR_LOC_1PAUX (var))
4877 cur_loc = VAR_LOC_FROM (var);
4878 else
4879 cur_loc = var->var_part[0].cur_loc;
4881 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4882 loc; loc = *locp)
4884 if (GET_CODE (loc->loc) != MEM
4885 || !mem_dies_at_call (loc->loc))
4887 locp = &loc->next;
4888 continue;
4891 *locp = loc->next;
4892 /* If we have deleted the location which was last emitted
4893 we have to emit new location so add the variable to set
4894 of changed variables. */
4895 if (cur_loc == loc->loc)
4897 changed = true;
4898 var->var_part[0].cur_loc = NULL;
4899 if (VAR_LOC_1PAUX (var))
4900 VAR_LOC_FROM (var) = NULL;
4902 delete loc;
4905 if (!var->var_part[0].loc_chain)
4907 var->n_var_parts--;
4908 changed = true;
4910 if (changed)
4911 variable_was_changed (var, set);
4914 return 1;
4917 /* Remove all variable-location information about call-clobbered
4918 registers, as well as associations between MEMs and VALUEs. */
4920 static void
4921 dataflow_set_clear_at_call (dataflow_set *set)
4923 unsigned int r;
4924 hard_reg_set_iterator hrsi;
4926 EXECUTE_IF_SET_IN_HARD_REG_SET (regs_invalidated_by_call, 0, r, hrsi)
4927 var_regno_delete (set, r);
4929 if (MAY_HAVE_DEBUG_INSNS)
4931 set->traversed_vars = set->vars;
4932 shared_hash_htab (set->vars)
4933 ->traverse <dataflow_set *, dataflow_set_preserve_mem_locs> (set);
4934 set->traversed_vars = set->vars;
4935 shared_hash_htab (set->vars)
4936 ->traverse <dataflow_set *, dataflow_set_remove_mem_locs> (set);
4937 set->traversed_vars = NULL;
4941 static bool
4942 variable_part_different_p (variable_part *vp1, variable_part *vp2)
4944 location_chain lc1, lc2;
4946 for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next)
4948 for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next)
4950 if (REG_P (lc1->loc) && REG_P (lc2->loc))
4952 if (REGNO (lc1->loc) == REGNO (lc2->loc))
4953 break;
4955 if (rtx_equal_p (lc1->loc, lc2->loc))
4956 break;
4958 if (!lc2)
4959 return true;
4961 return false;
4964 /* Return true if one-part variables VAR1 and VAR2 are different.
4965 They must be in canonical order. */
4967 static bool
4968 onepart_variable_different_p (variable var1, variable var2)
4970 location_chain lc1, lc2;
4972 if (var1 == var2)
4973 return false;
4975 gcc_assert (var1->n_var_parts == 1
4976 && var2->n_var_parts == 1);
4978 lc1 = var1->var_part[0].loc_chain;
4979 lc2 = var2->var_part[0].loc_chain;
4981 gcc_assert (lc1 && lc2);
4983 while (lc1 && lc2)
4985 if (loc_cmp (lc1->loc, lc2->loc))
4986 return true;
4987 lc1 = lc1->next;
4988 lc2 = lc2->next;
4991 return lc1 != lc2;
4994 /* Return true if variables VAR1 and VAR2 are different. */
4996 static bool
4997 variable_different_p (variable var1, variable var2)
4999 int i;
5001 if (var1 == var2)
5002 return false;
5004 if (var1->onepart != var2->onepart)
5005 return true;
5007 if (var1->n_var_parts != var2->n_var_parts)
5008 return true;
5010 if (var1->onepart && var1->n_var_parts)
5012 gcc_checking_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv)
5013 && var1->n_var_parts == 1);
5014 /* One-part values have locations in a canonical order. */
5015 return onepart_variable_different_p (var1, var2);
5018 for (i = 0; i < var1->n_var_parts; i++)
5020 if (VAR_PART_OFFSET (var1, i) != VAR_PART_OFFSET (var2, i))
5021 return true;
5022 if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i]))
5023 return true;
5024 if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i]))
5025 return true;
5027 return false;
5030 /* Return true if dataflow sets OLD_SET and NEW_SET differ. */
5032 static bool
5033 dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
5035 variable_iterator_type hi;
5036 variable var1;
5038 if (old_set->vars == new_set->vars)
5039 return false;
5041 if (shared_hash_htab (old_set->vars)->elements ()
5042 != shared_hash_htab (new_set->vars)->elements ())
5043 return true;
5045 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (old_set->vars),
5046 var1, variable, hi)
5048 variable_table_type *htab = shared_hash_htab (new_set->vars);
5049 variable var2 = htab->find_with_hash (var1->dv, dv_htab_hash (var1->dv));
5050 if (!var2)
5052 if (dump_file && (dump_flags & TDF_DETAILS))
5054 fprintf (dump_file, "dataflow difference found: removal of:\n");
5055 dump_var (var1);
5057 return true;
5060 if (variable_different_p (var1, var2))
5062 if (dump_file && (dump_flags & TDF_DETAILS))
5064 fprintf (dump_file, "dataflow difference found: "
5065 "old and new follow:\n");
5066 dump_var (var1);
5067 dump_var (var2);
5069 return true;
5073 /* No need to traverse the second hashtab, if both have the same number
5074 of elements and the second one had all entries found in the first one,
5075 then it can't have any extra entries. */
5076 return false;
5079 /* Free the contents of dataflow set SET. */
5081 static void
5082 dataflow_set_destroy (dataflow_set *set)
5084 int i;
5086 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5087 attrs_list_clear (&set->regs[i]);
5089 shared_hash_destroy (set->vars);
5090 set->vars = NULL;
5093 /* Return true if RTL X contains a SYMBOL_REF. */
5095 static bool
5096 contains_symbol_ref (rtx x)
5098 const char *fmt;
5099 RTX_CODE code;
5100 int i;
5102 if (!x)
5103 return false;
5105 code = GET_CODE (x);
5106 if (code == SYMBOL_REF)
5107 return true;
5109 fmt = GET_RTX_FORMAT (code);
5110 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5112 if (fmt[i] == 'e')
5114 if (contains_symbol_ref (XEXP (x, i)))
5115 return true;
5117 else if (fmt[i] == 'E')
5119 int j;
5120 for (j = 0; j < XVECLEN (x, i); j++)
5121 if (contains_symbol_ref (XVECEXP (x, i, j)))
5122 return true;
5126 return false;
5129 /* Shall EXPR be tracked? */
5131 static bool
5132 track_expr_p (tree expr, bool need_rtl)
5134 rtx decl_rtl;
5135 tree realdecl;
5137 if (TREE_CODE (expr) == DEBUG_EXPR_DECL)
5138 return DECL_RTL_SET_P (expr);
5140 /* If EXPR is not a parameter or a variable do not track it. */
5141 if (TREE_CODE (expr) != VAR_DECL && TREE_CODE (expr) != PARM_DECL)
5142 return 0;
5144 /* It also must have a name... */
5145 if (!DECL_NAME (expr) && need_rtl)
5146 return 0;
5148 /* ... and a RTL assigned to it. */
5149 decl_rtl = DECL_RTL_IF_SET (expr);
5150 if (!decl_rtl && need_rtl)
5151 return 0;
5153 /* If this expression is really a debug alias of some other declaration, we
5154 don't need to track this expression if the ultimate declaration is
5155 ignored. */
5156 realdecl = expr;
5157 if (TREE_CODE (realdecl) == VAR_DECL && DECL_HAS_DEBUG_EXPR_P (realdecl))
5159 realdecl = DECL_DEBUG_EXPR (realdecl);
5160 if (!DECL_P (realdecl))
5162 if (handled_component_p (realdecl)
5163 || (TREE_CODE (realdecl) == MEM_REF
5164 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
5166 HOST_WIDE_INT bitsize, bitpos, maxsize;
5167 tree innerdecl
5168 = get_ref_base_and_extent (realdecl, &bitpos, &bitsize,
5169 &maxsize);
5170 if (!DECL_P (innerdecl)
5171 || DECL_IGNORED_P (innerdecl)
5172 /* Do not track declarations for parts of tracked parameters
5173 since we want to track them as a whole instead. */
5174 || (TREE_CODE (innerdecl) == PARM_DECL
5175 && DECL_MODE (innerdecl) != BLKmode
5176 && TREE_CODE (TREE_TYPE (innerdecl)) != UNION_TYPE)
5177 || TREE_STATIC (innerdecl)
5178 || bitsize <= 0
5179 || bitpos + bitsize > 256
5180 || bitsize != maxsize)
5181 return 0;
5182 else
5183 realdecl = expr;
5185 else
5186 return 0;
5190 /* Do not track EXPR if REALDECL it should be ignored for debugging
5191 purposes. */
5192 if (DECL_IGNORED_P (realdecl))
5193 return 0;
5195 /* Do not track global variables until we are able to emit correct location
5196 list for them. */
5197 if (TREE_STATIC (realdecl))
5198 return 0;
5200 /* When the EXPR is a DECL for alias of some variable (see example)
5201 the TREE_STATIC flag is not used. Disable tracking all DECLs whose
5202 DECL_RTL contains SYMBOL_REF.
5204 Example:
5205 extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
5206 char **_dl_argv;
5208 if (decl_rtl && MEM_P (decl_rtl)
5209 && contains_symbol_ref (XEXP (decl_rtl, 0)))
5210 return 0;
5212 /* If RTX is a memory it should not be very large (because it would be
5213 an array or struct). */
5214 if (decl_rtl && MEM_P (decl_rtl))
5216 /* Do not track structures and arrays. */
5217 if (GET_MODE (decl_rtl) == BLKmode
5218 || AGGREGATE_TYPE_P (TREE_TYPE (realdecl)))
5219 return 0;
5220 if (MEM_SIZE_KNOWN_P (decl_rtl)
5221 && MEM_SIZE (decl_rtl) > MAX_VAR_PARTS)
5222 return 0;
5225 DECL_CHANGED (expr) = 0;
5226 DECL_CHANGED (realdecl) = 0;
5227 return 1;
5230 /* Determine whether a given LOC refers to the same variable part as
5231 EXPR+OFFSET. */
5233 static bool
5234 same_variable_part_p (rtx loc, tree expr, HOST_WIDE_INT offset)
5236 tree expr2;
5237 HOST_WIDE_INT offset2;
5239 if (! DECL_P (expr))
5240 return false;
5242 if (REG_P (loc))
5244 expr2 = REG_EXPR (loc);
5245 offset2 = REG_OFFSET (loc);
5247 else if (MEM_P (loc))
5249 expr2 = MEM_EXPR (loc);
5250 offset2 = INT_MEM_OFFSET (loc);
5252 else
5253 return false;
5255 if (! expr2 || ! DECL_P (expr2))
5256 return false;
5258 expr = var_debug_decl (expr);
5259 expr2 = var_debug_decl (expr2);
5261 return (expr == expr2 && offset == offset2);
5264 /* LOC is a REG or MEM that we would like to track if possible.
5265 If EXPR is null, we don't know what expression LOC refers to,
5266 otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
5267 LOC is an lvalue register.
5269 Return true if EXPR is nonnull and if LOC, or some lowpart of it,
5270 is something we can track. When returning true, store the mode of
5271 the lowpart we can track in *MODE_OUT (if nonnull) and its offset
5272 from EXPR in *OFFSET_OUT (if nonnull). */
5274 static bool
5275 track_loc_p (rtx loc, tree expr, HOST_WIDE_INT offset, bool store_reg_p,
5276 machine_mode *mode_out, HOST_WIDE_INT *offset_out)
5278 machine_mode mode;
5280 if (expr == NULL || !track_expr_p (expr, true))
5281 return false;
5283 /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
5284 whole subreg, but only the old inner part is really relevant. */
5285 mode = GET_MODE (loc);
5286 if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc)))
5288 machine_mode pseudo_mode;
5290 pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc));
5291 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (pseudo_mode))
5293 offset += byte_lowpart_offset (pseudo_mode, mode);
5294 mode = pseudo_mode;
5298 /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
5299 Do the same if we are storing to a register and EXPR occupies
5300 the whole of register LOC; in that case, the whole of EXPR is
5301 being changed. We exclude complex modes from the second case
5302 because the real and imaginary parts are represented as separate
5303 pseudo registers, even if the whole complex value fits into one
5304 hard register. */
5305 if ((GET_MODE_SIZE (mode) > GET_MODE_SIZE (DECL_MODE (expr))
5306 || (store_reg_p
5307 && !COMPLEX_MODE_P (DECL_MODE (expr))
5308 && hard_regno_nregs[REGNO (loc)][DECL_MODE (expr)] == 1))
5309 && offset + byte_lowpart_offset (DECL_MODE (expr), mode) == 0)
5311 mode = DECL_MODE (expr);
5312 offset = 0;
5315 if (offset < 0 || offset >= MAX_VAR_PARTS)
5316 return false;
5318 if (mode_out)
5319 *mode_out = mode;
5320 if (offset_out)
5321 *offset_out = offset;
5322 return true;
5325 /* Return the MODE lowpart of LOC, or null if LOC is not something we
5326 want to track. When returning nonnull, make sure that the attributes
5327 on the returned value are updated. */
5329 static rtx
5330 var_lowpart (machine_mode mode, rtx loc)
5332 unsigned int offset, reg_offset, regno;
5334 if (GET_MODE (loc) == mode)
5335 return loc;
5337 if (!REG_P (loc) && !MEM_P (loc))
5338 return NULL;
5340 offset = byte_lowpart_offset (mode, GET_MODE (loc));
5342 if (MEM_P (loc))
5343 return adjust_address_nv (loc, mode, offset);
5345 reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc));
5346 regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc),
5347 reg_offset, mode);
5348 return gen_rtx_REG_offset (loc, mode, regno, offset);
5351 /* Carry information about uses and stores while walking rtx. */
5353 struct count_use_info
5355 /* The insn where the RTX is. */
5356 rtx_insn *insn;
5358 /* The basic block where insn is. */
5359 basic_block bb;
5361 /* The array of n_sets sets in the insn, as determined by cselib. */
5362 struct cselib_set *sets;
5363 int n_sets;
5365 /* True if we're counting stores, false otherwise. */
5366 bool store_p;
5369 /* Find a VALUE corresponding to X. */
5371 static inline cselib_val *
5372 find_use_val (rtx x, machine_mode mode, struct count_use_info *cui)
5374 int i;
5376 if (cui->sets)
5378 /* This is called after uses are set up and before stores are
5379 processed by cselib, so it's safe to look up srcs, but not
5380 dsts. So we look up expressions that appear in srcs or in
5381 dest expressions, but we search the sets array for dests of
5382 stores. */
5383 if (cui->store_p)
5385 /* Some targets represent memset and memcpy patterns
5386 by (set (mem:BLK ...) (reg:[QHSD]I ...)) or
5387 (set (mem:BLK ...) (const_int ...)) or
5388 (set (mem:BLK ...) (mem:BLK ...)). Don't return anything
5389 in that case, otherwise we end up with mode mismatches. */
5390 if (mode == BLKmode && MEM_P (x))
5391 return NULL;
5392 for (i = 0; i < cui->n_sets; i++)
5393 if (cui->sets[i].dest == x)
5394 return cui->sets[i].src_elt;
5396 else
5397 return cselib_lookup (x, mode, 0, VOIDmode);
5400 return NULL;
5403 /* Replace all registers and addresses in an expression with VALUE
5404 expressions that map back to them, unless the expression is a
5405 register. If no mapping is or can be performed, returns NULL. */
5407 static rtx
5408 replace_expr_with_values (rtx loc)
5410 if (REG_P (loc) || GET_CODE (loc) == ENTRY_VALUE)
5411 return NULL;
5412 else if (MEM_P (loc))
5414 cselib_val *addr = cselib_lookup (XEXP (loc, 0),
5415 get_address_mode (loc), 0,
5416 GET_MODE (loc));
5417 if (addr)
5418 return replace_equiv_address_nv (loc, addr->val_rtx);
5419 else
5420 return NULL;
5422 else
5423 return cselib_subst_to_values (loc, VOIDmode);
5426 /* Return true if X contains a DEBUG_EXPR. */
5428 static bool
5429 rtx_debug_expr_p (const_rtx x)
5431 subrtx_iterator::array_type array;
5432 FOR_EACH_SUBRTX (iter, array, x, ALL)
5433 if (GET_CODE (*iter) == DEBUG_EXPR)
5434 return true;
5435 return false;
5438 /* Determine what kind of micro operation to choose for a USE. Return
5439 MO_CLOBBER if no micro operation is to be generated. */
5441 static enum micro_operation_type
5442 use_type (rtx loc, struct count_use_info *cui, machine_mode *modep)
5444 tree expr;
5446 if (cui && cui->sets)
5448 if (GET_CODE (loc) == VAR_LOCATION)
5450 if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false))
5452 rtx ploc = PAT_VAR_LOCATION_LOC (loc);
5453 if (! VAR_LOC_UNKNOWN_P (ploc))
5455 cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1,
5456 VOIDmode);
5458 /* ??? flag_float_store and volatile mems are never
5459 given values, but we could in theory use them for
5460 locations. */
5461 gcc_assert (val || 1);
5463 return MO_VAL_LOC;
5465 else
5466 return MO_CLOBBER;
5469 if (REG_P (loc) || MEM_P (loc))
5471 if (modep)
5472 *modep = GET_MODE (loc);
5473 if (cui->store_p)
5475 if (REG_P (loc)
5476 || (find_use_val (loc, GET_MODE (loc), cui)
5477 && cselib_lookup (XEXP (loc, 0),
5478 get_address_mode (loc), 0,
5479 GET_MODE (loc))))
5480 return MO_VAL_SET;
5482 else
5484 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5486 if (val && !cselib_preserved_value_p (val))
5487 return MO_VAL_USE;
5492 if (REG_P (loc))
5494 gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER);
5496 if (loc == cfa_base_rtx)
5497 return MO_CLOBBER;
5498 expr = REG_EXPR (loc);
5500 if (!expr)
5501 return MO_USE_NO_VAR;
5502 else if (target_for_debug_bind (var_debug_decl (expr)))
5503 return MO_CLOBBER;
5504 else if (track_loc_p (loc, expr, REG_OFFSET (loc),
5505 false, modep, NULL))
5506 return MO_USE;
5507 else
5508 return MO_USE_NO_VAR;
5510 else if (MEM_P (loc))
5512 expr = MEM_EXPR (loc);
5514 if (!expr)
5515 return MO_CLOBBER;
5516 else if (target_for_debug_bind (var_debug_decl (expr)))
5517 return MO_CLOBBER;
5518 else if (track_loc_p (loc, expr, INT_MEM_OFFSET (loc),
5519 false, modep, NULL)
5520 /* Multi-part variables shouldn't refer to one-part
5521 variable names such as VALUEs (never happens) or
5522 DEBUG_EXPRs (only happens in the presence of debug
5523 insns). */
5524 && (!MAY_HAVE_DEBUG_INSNS
5525 || !rtx_debug_expr_p (XEXP (loc, 0))))
5526 return MO_USE;
5527 else
5528 return MO_CLOBBER;
5531 return MO_CLOBBER;
5534 /* Log to OUT information about micro-operation MOPT involving X in
5535 INSN of BB. */
5537 static inline void
5538 log_op_type (rtx x, basic_block bb, rtx_insn *insn,
5539 enum micro_operation_type mopt, FILE *out)
5541 fprintf (out, "bb %i op %i insn %i %s ",
5542 bb->index, VTI (bb)->mos.length (),
5543 INSN_UID (insn), micro_operation_type_name[mopt]);
5544 print_inline_rtx (out, x, 2);
5545 fputc ('\n', out);
5548 /* Tell whether the CONCAT used to holds a VALUE and its location
5549 needs value resolution, i.e., an attempt of mapping the location
5550 back to other incoming values. */
5551 #define VAL_NEEDS_RESOLUTION(x) \
5552 (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
5553 /* Whether the location in the CONCAT is a tracked expression, that
5554 should also be handled like a MO_USE. */
5555 #define VAL_HOLDS_TRACK_EXPR(x) \
5556 (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
5557 /* Whether the location in the CONCAT should be handled like a MO_COPY
5558 as well. */
5559 #define VAL_EXPR_IS_COPIED(x) \
5560 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
5561 /* Whether the location in the CONCAT should be handled like a
5562 MO_CLOBBER as well. */
5563 #define VAL_EXPR_IS_CLOBBERED(x) \
5564 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
5566 /* All preserved VALUEs. */
5567 static vec<rtx> preserved_values;
5569 /* Ensure VAL is preserved and remember it in a vector for vt_emit_notes. */
5571 static void
5572 preserve_value (cselib_val *val)
5574 cselib_preserve_value (val);
5575 preserved_values.safe_push (val->val_rtx);
5578 /* Helper function for MO_VAL_LOC handling. Return non-zero if
5579 any rtxes not suitable for CONST use not replaced by VALUEs
5580 are discovered. */
5582 static bool
5583 non_suitable_const (const_rtx x)
5585 subrtx_iterator::array_type array;
5586 FOR_EACH_SUBRTX (iter, array, x, ALL)
5588 const_rtx x = *iter;
5589 switch (GET_CODE (x))
5591 case REG:
5592 case DEBUG_EXPR:
5593 case PC:
5594 case SCRATCH:
5595 case CC0:
5596 case ASM_INPUT:
5597 case ASM_OPERANDS:
5598 return true;
5599 case MEM:
5600 if (!MEM_READONLY_P (x))
5601 return true;
5602 break;
5603 default:
5604 break;
5607 return false;
5610 /* Add uses (register and memory references) LOC which will be tracked
5611 to VTI (bb)->mos. */
5613 static void
5614 add_uses (rtx loc, struct count_use_info *cui)
5616 machine_mode mode = VOIDmode;
5617 enum micro_operation_type type = use_type (loc, cui, &mode);
5619 if (type != MO_CLOBBER)
5621 basic_block bb = cui->bb;
5622 micro_operation mo;
5624 mo.type = type;
5625 mo.u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc;
5626 mo.insn = cui->insn;
5628 if (type == MO_VAL_LOC)
5630 rtx oloc = loc;
5631 rtx vloc = PAT_VAR_LOCATION_LOC (oloc);
5632 cselib_val *val;
5634 gcc_assert (cui->sets);
5636 if (MEM_P (vloc)
5637 && !REG_P (XEXP (vloc, 0))
5638 && !MEM_P (XEXP (vloc, 0)))
5640 rtx mloc = vloc;
5641 machine_mode address_mode = get_address_mode (mloc);
5642 cselib_val *val
5643 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5644 GET_MODE (mloc));
5646 if (val && !cselib_preserved_value_p (val))
5647 preserve_value (val);
5650 if (CONSTANT_P (vloc)
5651 && (GET_CODE (vloc) != CONST || non_suitable_const (vloc)))
5652 /* For constants don't look up any value. */;
5653 else if (!VAR_LOC_UNKNOWN_P (vloc) && !unsuitable_loc (vloc)
5654 && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
5656 machine_mode mode2;
5657 enum micro_operation_type type2;
5658 rtx nloc = NULL;
5659 bool resolvable = REG_P (vloc) || MEM_P (vloc);
5661 if (resolvable)
5662 nloc = replace_expr_with_values (vloc);
5664 if (nloc)
5666 oloc = shallow_copy_rtx (oloc);
5667 PAT_VAR_LOCATION_LOC (oloc) = nloc;
5670 oloc = gen_rtx_CONCAT (mode, val->val_rtx, oloc);
5672 type2 = use_type (vloc, 0, &mode2);
5674 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5675 || type2 == MO_CLOBBER);
5677 if (type2 == MO_CLOBBER
5678 && !cselib_preserved_value_p (val))
5680 VAL_NEEDS_RESOLUTION (oloc) = resolvable;
5681 preserve_value (val);
5684 else if (!VAR_LOC_UNKNOWN_P (vloc))
5686 oloc = shallow_copy_rtx (oloc);
5687 PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC ();
5690 mo.u.loc = oloc;
5692 else if (type == MO_VAL_USE)
5694 machine_mode mode2 = VOIDmode;
5695 enum micro_operation_type type2;
5696 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5697 rtx vloc, oloc = loc, nloc;
5699 gcc_assert (cui->sets);
5701 if (MEM_P (oloc)
5702 && !REG_P (XEXP (oloc, 0))
5703 && !MEM_P (XEXP (oloc, 0)))
5705 rtx mloc = oloc;
5706 machine_mode address_mode = get_address_mode (mloc);
5707 cselib_val *val
5708 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5709 GET_MODE (mloc));
5711 if (val && !cselib_preserved_value_p (val))
5712 preserve_value (val);
5715 type2 = use_type (loc, 0, &mode2);
5717 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5718 || type2 == MO_CLOBBER);
5720 if (type2 == MO_USE)
5721 vloc = var_lowpart (mode2, loc);
5722 else
5723 vloc = oloc;
5725 /* The loc of a MO_VAL_USE may have two forms:
5727 (concat val src): val is at src, a value-based
5728 representation.
5730 (concat (concat val use) src): same as above, with use as
5731 the MO_USE tracked value, if it differs from src.
5735 gcc_checking_assert (REG_P (loc) || MEM_P (loc));
5736 nloc = replace_expr_with_values (loc);
5737 if (!nloc)
5738 nloc = oloc;
5740 if (vloc != nloc)
5741 oloc = gen_rtx_CONCAT (mode2, val->val_rtx, vloc);
5742 else
5743 oloc = val->val_rtx;
5745 mo.u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
5747 if (type2 == MO_USE)
5748 VAL_HOLDS_TRACK_EXPR (mo.u.loc) = 1;
5749 if (!cselib_preserved_value_p (val))
5751 VAL_NEEDS_RESOLUTION (mo.u.loc) = 1;
5752 preserve_value (val);
5755 else
5756 gcc_assert (type == MO_USE || type == MO_USE_NO_VAR);
5758 if (dump_file && (dump_flags & TDF_DETAILS))
5759 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
5760 VTI (bb)->mos.safe_push (mo);
5764 /* Helper function for finding all uses of REG/MEM in X in insn INSN. */
5766 static void
5767 add_uses_1 (rtx *x, void *cui)
5769 subrtx_var_iterator::array_type array;
5770 FOR_EACH_SUBRTX_VAR (iter, array, *x, NONCONST)
5771 add_uses (*iter, (struct count_use_info *) cui);
5774 /* This is the value used during expansion of locations. We want it
5775 to be unbounded, so that variables expanded deep in a recursion
5776 nest are fully evaluated, so that their values are cached
5777 correctly. We avoid recursion cycles through other means, and we
5778 don't unshare RTL, so excess complexity is not a problem. */
5779 #define EXPR_DEPTH (INT_MAX)
5780 /* We use this to keep too-complex expressions from being emitted as
5781 location notes, and then to debug information. Users can trade
5782 compile time for ridiculously complex expressions, although they're
5783 seldom useful, and they may often have to be discarded as not
5784 representable anyway. */
5785 #define EXPR_USE_DEPTH (PARAM_VALUE (PARAM_MAX_VARTRACK_EXPR_DEPTH))
5787 /* Attempt to reverse the EXPR operation in the debug info and record
5788 it in the cselib table. Say for reg1 = reg2 + 6 even when reg2 is
5789 no longer live we can express its value as VAL - 6. */
5791 static void
5792 reverse_op (rtx val, const_rtx expr, rtx_insn *insn)
5794 rtx src, arg, ret;
5795 cselib_val *v;
5796 struct elt_loc_list *l;
5797 enum rtx_code code;
5798 int count;
5800 if (GET_CODE (expr) != SET)
5801 return;
5803 if (!REG_P (SET_DEST (expr)) || GET_MODE (val) != GET_MODE (SET_DEST (expr)))
5804 return;
5806 src = SET_SRC (expr);
5807 switch (GET_CODE (src))
5809 case PLUS:
5810 case MINUS:
5811 case XOR:
5812 case NOT:
5813 case NEG:
5814 if (!REG_P (XEXP (src, 0)))
5815 return;
5816 break;
5817 case SIGN_EXTEND:
5818 case ZERO_EXTEND:
5819 if (!REG_P (XEXP (src, 0)) && !MEM_P (XEXP (src, 0)))
5820 return;
5821 break;
5822 default:
5823 return;
5826 if (!SCALAR_INT_MODE_P (GET_MODE (src)) || XEXP (src, 0) == cfa_base_rtx)
5827 return;
5829 v = cselib_lookup (XEXP (src, 0), GET_MODE (XEXP (src, 0)), 0, VOIDmode);
5830 if (!v || !cselib_preserved_value_p (v))
5831 return;
5833 /* Use canonical V to avoid creating multiple redundant expressions
5834 for different VALUES equivalent to V. */
5835 v = canonical_cselib_val (v);
5837 /* Adding a reverse op isn't useful if V already has an always valid
5838 location. Ignore ENTRY_VALUE, while it is always constant, we should
5839 prefer non-ENTRY_VALUE locations whenever possible. */
5840 for (l = v->locs, count = 0; l; l = l->next, count++)
5841 if (CONSTANT_P (l->loc)
5842 && (GET_CODE (l->loc) != CONST || !references_value_p (l->loc, 0)))
5843 return;
5844 /* Avoid creating too large locs lists. */
5845 else if (count == PARAM_VALUE (PARAM_MAX_VARTRACK_REVERSE_OP_SIZE))
5846 return;
5848 switch (GET_CODE (src))
5850 case NOT:
5851 case NEG:
5852 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5853 return;
5854 ret = gen_rtx_fmt_e (GET_CODE (src), GET_MODE (val), val);
5855 break;
5856 case SIGN_EXTEND:
5857 case ZERO_EXTEND:
5858 ret = gen_lowpart_SUBREG (GET_MODE (v->val_rtx), val);
5859 break;
5860 case XOR:
5861 code = XOR;
5862 goto binary;
5863 case PLUS:
5864 code = MINUS;
5865 goto binary;
5866 case MINUS:
5867 code = PLUS;
5868 goto binary;
5869 binary:
5870 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5871 return;
5872 arg = XEXP (src, 1);
5873 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5875 arg = cselib_expand_value_rtx (arg, scratch_regs, 5);
5876 if (arg == NULL_RTX)
5877 return;
5878 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5879 return;
5881 ret = simplify_gen_binary (code, GET_MODE (val), val, arg);
5882 if (ret == val)
5883 /* Ensure ret isn't VALUE itself (which can happen e.g. for
5884 (plus (reg1) (reg2)) when reg2 is known to be 0), as that
5885 breaks a lot of routines during var-tracking. */
5886 ret = gen_rtx_fmt_ee (PLUS, GET_MODE (val), val, const0_rtx);
5887 break;
5888 default:
5889 gcc_unreachable ();
5892 cselib_add_permanent_equiv (v, ret, insn);
5895 /* Add stores (register and memory references) LOC which will be tracked
5896 to VTI (bb)->mos. EXPR is the RTL expression containing the store.
5897 CUIP->insn is instruction which the LOC is part of. */
5899 static void
5900 add_stores (rtx loc, const_rtx expr, void *cuip)
5902 machine_mode mode = VOIDmode, mode2;
5903 struct count_use_info *cui = (struct count_use_info *)cuip;
5904 basic_block bb = cui->bb;
5905 micro_operation mo;
5906 rtx oloc = loc, nloc, src = NULL;
5907 enum micro_operation_type type = use_type (loc, cui, &mode);
5908 bool track_p = false;
5909 cselib_val *v;
5910 bool resolve, preserve;
5912 if (type == MO_CLOBBER)
5913 return;
5915 mode2 = mode;
5917 if (REG_P (loc))
5919 gcc_assert (loc != cfa_base_rtx);
5920 if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
5921 || !(track_p = use_type (loc, NULL, &mode2) == MO_USE)
5922 || GET_CODE (expr) == CLOBBER)
5924 mo.type = MO_CLOBBER;
5925 mo.u.loc = loc;
5926 if (GET_CODE (expr) == SET
5927 && SET_DEST (expr) == loc
5928 && !unsuitable_loc (SET_SRC (expr))
5929 && find_use_val (loc, mode, cui))
5931 gcc_checking_assert (type == MO_VAL_SET);
5932 mo.u.loc = gen_rtx_SET (loc, SET_SRC (expr));
5935 else
5937 if (GET_CODE (expr) == SET
5938 && SET_DEST (expr) == loc
5939 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
5940 src = var_lowpart (mode2, SET_SRC (expr));
5941 loc = var_lowpart (mode2, loc);
5943 if (src == NULL)
5945 mo.type = MO_SET;
5946 mo.u.loc = loc;
5948 else
5950 rtx xexpr = gen_rtx_SET (loc, src);
5951 if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
5953 /* If this is an instruction copying (part of) a parameter
5954 passed by invisible reference to its register location,
5955 pretend it's a SET so that the initial memory location
5956 is discarded, as the parameter register can be reused
5957 for other purposes and we do not track locations based
5958 on generic registers. */
5959 if (MEM_P (src)
5960 && REG_EXPR (loc)
5961 && TREE_CODE (REG_EXPR (loc)) == PARM_DECL
5962 && DECL_MODE (REG_EXPR (loc)) != BLKmode
5963 && MEM_P (DECL_INCOMING_RTL (REG_EXPR (loc)))
5964 && XEXP (DECL_INCOMING_RTL (REG_EXPR (loc)), 0)
5965 != arg_pointer_rtx)
5966 mo.type = MO_SET;
5967 else
5968 mo.type = MO_COPY;
5970 else
5971 mo.type = MO_SET;
5972 mo.u.loc = xexpr;
5975 mo.insn = cui->insn;
5977 else if (MEM_P (loc)
5978 && ((track_p = use_type (loc, NULL, &mode2) == MO_USE)
5979 || cui->sets))
5981 if (MEM_P (loc) && type == MO_VAL_SET
5982 && !REG_P (XEXP (loc, 0))
5983 && !MEM_P (XEXP (loc, 0)))
5985 rtx mloc = loc;
5986 machine_mode address_mode = get_address_mode (mloc);
5987 cselib_val *val = cselib_lookup (XEXP (mloc, 0),
5988 address_mode, 0,
5989 GET_MODE (mloc));
5991 if (val && !cselib_preserved_value_p (val))
5992 preserve_value (val);
5995 if (GET_CODE (expr) == CLOBBER || !track_p)
5997 mo.type = MO_CLOBBER;
5998 mo.u.loc = track_p ? var_lowpart (mode2, loc) : loc;
6000 else
6002 if (GET_CODE (expr) == SET
6003 && SET_DEST (expr) == loc
6004 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
6005 src = var_lowpart (mode2, SET_SRC (expr));
6006 loc = var_lowpart (mode2, loc);
6008 if (src == NULL)
6010 mo.type = MO_SET;
6011 mo.u.loc = loc;
6013 else
6015 rtx xexpr = gen_rtx_SET (loc, src);
6016 if (same_variable_part_p (SET_SRC (xexpr),
6017 MEM_EXPR (loc),
6018 INT_MEM_OFFSET (loc)))
6019 mo.type = MO_COPY;
6020 else
6021 mo.type = MO_SET;
6022 mo.u.loc = xexpr;
6025 mo.insn = cui->insn;
6027 else
6028 return;
6030 if (type != MO_VAL_SET)
6031 goto log_and_return;
6033 v = find_use_val (oloc, mode, cui);
6035 if (!v)
6036 goto log_and_return;
6038 resolve = preserve = !cselib_preserved_value_p (v);
6040 /* We cannot track values for multiple-part variables, so we track only
6041 locations for tracked parameters passed either by invisible reference
6042 or directly in multiple locations. */
6043 if (track_p
6044 && REG_P (loc)
6045 && REG_EXPR (loc)
6046 && TREE_CODE (REG_EXPR (loc)) == PARM_DECL
6047 && DECL_MODE (REG_EXPR (loc)) != BLKmode
6048 && TREE_CODE (TREE_TYPE (REG_EXPR (loc))) != UNION_TYPE
6049 && ((MEM_P (DECL_INCOMING_RTL (REG_EXPR (loc)))
6050 && XEXP (DECL_INCOMING_RTL (REG_EXPR (loc)), 0) != arg_pointer_rtx)
6051 || (GET_CODE (DECL_INCOMING_RTL (REG_EXPR (loc))) == PARALLEL
6052 && XVECLEN (DECL_INCOMING_RTL (REG_EXPR (loc)), 0) > 1)))
6054 /* Although we don't use the value here, it could be used later by the
6055 mere virtue of its existence as the operand of the reverse operation
6056 that gave rise to it (typically extension/truncation). Make sure it
6057 is preserved as required by vt_expand_var_loc_chain. */
6058 if (preserve)
6059 preserve_value (v);
6060 goto log_and_return;
6063 if (loc == stack_pointer_rtx
6064 && hard_frame_pointer_adjustment != -1
6065 && preserve)
6066 cselib_set_value_sp_based (v);
6068 nloc = replace_expr_with_values (oloc);
6069 if (nloc)
6070 oloc = nloc;
6072 if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC)
6074 cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0, VOIDmode);
6076 if (oval == v)
6077 return;
6078 gcc_assert (REG_P (oloc) || MEM_P (oloc));
6080 if (oval && !cselib_preserved_value_p (oval))
6082 micro_operation moa;
6084 preserve_value (oval);
6086 moa.type = MO_VAL_USE;
6087 moa.u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc);
6088 VAL_NEEDS_RESOLUTION (moa.u.loc) = 1;
6089 moa.insn = cui->insn;
6091 if (dump_file && (dump_flags & TDF_DETAILS))
6092 log_op_type (moa.u.loc, cui->bb, cui->insn,
6093 moa.type, dump_file);
6094 VTI (bb)->mos.safe_push (moa);
6097 resolve = false;
6099 else if (resolve && GET_CODE (mo.u.loc) == SET)
6101 if (REG_P (SET_SRC (expr)) || MEM_P (SET_SRC (expr)))
6102 nloc = replace_expr_with_values (SET_SRC (expr));
6103 else
6104 nloc = NULL_RTX;
6106 /* Avoid the mode mismatch between oexpr and expr. */
6107 if (!nloc && mode != mode2)
6109 nloc = SET_SRC (expr);
6110 gcc_assert (oloc == SET_DEST (expr));
6113 if (nloc && nloc != SET_SRC (mo.u.loc))
6114 oloc = gen_rtx_SET (oloc, nloc);
6115 else
6117 if (oloc == SET_DEST (mo.u.loc))
6118 /* No point in duplicating. */
6119 oloc = mo.u.loc;
6120 if (!REG_P (SET_SRC (mo.u.loc)))
6121 resolve = false;
6124 else if (!resolve)
6126 if (GET_CODE (mo.u.loc) == SET
6127 && oloc == SET_DEST (mo.u.loc))
6128 /* No point in duplicating. */
6129 oloc = mo.u.loc;
6131 else
6132 resolve = false;
6134 loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc);
6136 if (mo.u.loc != oloc)
6137 loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, mo.u.loc);
6139 /* The loc of a MO_VAL_SET may have various forms:
6141 (concat val dst): dst now holds val
6143 (concat val (set dst src)): dst now holds val, copied from src
6145 (concat (concat val dstv) dst): dst now holds val; dstv is dst
6146 after replacing mems and non-top-level regs with values.
6148 (concat (concat val dstv) (set dst src)): dst now holds val,
6149 copied from src. dstv is a value-based representation of dst, if
6150 it differs from dst. If resolution is needed, src is a REG, and
6151 its mode is the same as that of val.
6153 (concat (concat val (set dstv srcv)) (set dst src)): src
6154 copied to dst, holding val. dstv and srcv are value-based
6155 representations of dst and src, respectively.
6159 if (GET_CODE (PATTERN (cui->insn)) != COND_EXEC)
6160 reverse_op (v->val_rtx, expr, cui->insn);
6162 mo.u.loc = loc;
6164 if (track_p)
6165 VAL_HOLDS_TRACK_EXPR (loc) = 1;
6166 if (preserve)
6168 VAL_NEEDS_RESOLUTION (loc) = resolve;
6169 preserve_value (v);
6171 if (mo.type == MO_CLOBBER)
6172 VAL_EXPR_IS_CLOBBERED (loc) = 1;
6173 if (mo.type == MO_COPY)
6174 VAL_EXPR_IS_COPIED (loc) = 1;
6176 mo.type = MO_VAL_SET;
6178 log_and_return:
6179 if (dump_file && (dump_flags & TDF_DETAILS))
6180 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
6181 VTI (bb)->mos.safe_push (mo);
6184 /* Arguments to the call. */
6185 static rtx call_arguments;
6187 /* Compute call_arguments. */
6189 static void
6190 prepare_call_arguments (basic_block bb, rtx_insn *insn)
6192 rtx link, x, call;
6193 rtx prev, cur, next;
6194 rtx this_arg = NULL_RTX;
6195 tree type = NULL_TREE, t, fndecl = NULL_TREE;
6196 tree obj_type_ref = NULL_TREE;
6197 CUMULATIVE_ARGS args_so_far_v;
6198 cumulative_args_t args_so_far;
6200 memset (&args_so_far_v, 0, sizeof (args_so_far_v));
6201 args_so_far = pack_cumulative_args (&args_so_far_v);
6202 call = get_call_rtx_from (insn);
6203 if (call)
6205 if (GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
6207 rtx symbol = XEXP (XEXP (call, 0), 0);
6208 if (SYMBOL_REF_DECL (symbol))
6209 fndecl = SYMBOL_REF_DECL (symbol);
6211 if (fndecl == NULL_TREE)
6212 fndecl = MEM_EXPR (XEXP (call, 0));
6213 if (fndecl
6214 && TREE_CODE (TREE_TYPE (fndecl)) != FUNCTION_TYPE
6215 && TREE_CODE (TREE_TYPE (fndecl)) != METHOD_TYPE)
6216 fndecl = NULL_TREE;
6217 if (fndecl && TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
6218 type = TREE_TYPE (fndecl);
6219 if (fndecl && TREE_CODE (fndecl) != FUNCTION_DECL)
6221 if (TREE_CODE (fndecl) == INDIRECT_REF
6222 && TREE_CODE (TREE_OPERAND (fndecl, 0)) == OBJ_TYPE_REF)
6223 obj_type_ref = TREE_OPERAND (fndecl, 0);
6224 fndecl = NULL_TREE;
6226 if (type)
6228 for (t = TYPE_ARG_TYPES (type); t && t != void_list_node;
6229 t = TREE_CHAIN (t))
6230 if (TREE_CODE (TREE_VALUE (t)) == REFERENCE_TYPE
6231 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_VALUE (t))))
6232 break;
6233 if ((t == NULL || t == void_list_node) && obj_type_ref == NULL_TREE)
6234 type = NULL;
6235 else
6237 int nargs ATTRIBUTE_UNUSED = list_length (TYPE_ARG_TYPES (type));
6238 link = CALL_INSN_FUNCTION_USAGE (insn);
6239 #ifndef PCC_STATIC_STRUCT_RETURN
6240 if (aggregate_value_p (TREE_TYPE (type), type)
6241 && targetm.calls.struct_value_rtx (type, 0) == 0)
6243 tree struct_addr = build_pointer_type (TREE_TYPE (type));
6244 machine_mode mode = TYPE_MODE (struct_addr);
6245 rtx reg;
6246 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
6247 nargs + 1);
6248 reg = targetm.calls.function_arg (args_so_far, mode,
6249 struct_addr, true);
6250 targetm.calls.function_arg_advance (args_so_far, mode,
6251 struct_addr, true);
6252 if (reg == NULL_RTX)
6254 for (; link; link = XEXP (link, 1))
6255 if (GET_CODE (XEXP (link, 0)) == USE
6256 && MEM_P (XEXP (XEXP (link, 0), 0)))
6258 link = XEXP (link, 1);
6259 break;
6263 else
6264 #endif
6265 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
6266 nargs);
6267 if (obj_type_ref && TYPE_ARG_TYPES (type) != void_list_node)
6269 machine_mode mode;
6270 t = TYPE_ARG_TYPES (type);
6271 mode = TYPE_MODE (TREE_VALUE (t));
6272 this_arg = targetm.calls.function_arg (args_so_far, mode,
6273 TREE_VALUE (t), true);
6274 if (this_arg && !REG_P (this_arg))
6275 this_arg = NULL_RTX;
6276 else if (this_arg == NULL_RTX)
6278 for (; link; link = XEXP (link, 1))
6279 if (GET_CODE (XEXP (link, 0)) == USE
6280 && MEM_P (XEXP (XEXP (link, 0), 0)))
6282 this_arg = XEXP (XEXP (link, 0), 0);
6283 break;
6290 t = type ? TYPE_ARG_TYPES (type) : NULL_TREE;
6292 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
6293 if (GET_CODE (XEXP (link, 0)) == USE)
6295 rtx item = NULL_RTX;
6296 x = XEXP (XEXP (link, 0), 0);
6297 if (GET_MODE (link) == VOIDmode
6298 || GET_MODE (link) == BLKmode
6299 || (GET_MODE (link) != GET_MODE (x)
6300 && ((GET_MODE_CLASS (GET_MODE (link)) != MODE_INT
6301 && GET_MODE_CLASS (GET_MODE (link)) != MODE_PARTIAL_INT)
6302 || (GET_MODE_CLASS (GET_MODE (x)) != MODE_INT
6303 && GET_MODE_CLASS (GET_MODE (x)) != MODE_PARTIAL_INT))))
6304 /* Can't do anything for these, if the original type mode
6305 isn't known or can't be converted. */;
6306 else if (REG_P (x))
6308 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
6309 if (val && cselib_preserved_value_p (val))
6310 item = val->val_rtx;
6311 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
6312 || GET_MODE_CLASS (GET_MODE (x)) == MODE_PARTIAL_INT)
6314 machine_mode mode = GET_MODE (x);
6316 while ((mode = GET_MODE_WIDER_MODE (mode)) != VOIDmode
6317 && GET_MODE_BITSIZE (mode) <= BITS_PER_WORD)
6319 rtx reg = simplify_subreg (mode, x, GET_MODE (x), 0);
6321 if (reg == NULL_RTX || !REG_P (reg))
6322 continue;
6323 val = cselib_lookup (reg, mode, 0, VOIDmode);
6324 if (val && cselib_preserved_value_p (val))
6326 item = val->val_rtx;
6327 break;
6332 else if (MEM_P (x))
6334 rtx mem = x;
6335 cselib_val *val;
6337 if (!frame_pointer_needed)
6339 struct adjust_mem_data amd;
6340 amd.mem_mode = VOIDmode;
6341 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
6342 amd.side_effects = NULL;
6343 amd.store = true;
6344 mem = simplify_replace_fn_rtx (mem, NULL_RTX, adjust_mems,
6345 &amd);
6346 gcc_assert (amd.side_effects == NULL_RTX);
6348 val = cselib_lookup (mem, GET_MODE (mem), 0, VOIDmode);
6349 if (val && cselib_preserved_value_p (val))
6350 item = val->val_rtx;
6351 else if (GET_MODE_CLASS (GET_MODE (mem)) != MODE_INT
6352 && GET_MODE_CLASS (GET_MODE (mem)) != MODE_PARTIAL_INT)
6354 /* For non-integer stack argument see also if they weren't
6355 initialized by integers. */
6356 machine_mode imode = int_mode_for_mode (GET_MODE (mem));
6357 if (imode != GET_MODE (mem) && imode != BLKmode)
6359 val = cselib_lookup (adjust_address_nv (mem, imode, 0),
6360 imode, 0, VOIDmode);
6361 if (val && cselib_preserved_value_p (val))
6362 item = lowpart_subreg (GET_MODE (x), val->val_rtx,
6363 imode);
6367 if (item)
6369 rtx x2 = x;
6370 if (GET_MODE (item) != GET_MODE (link))
6371 item = lowpart_subreg (GET_MODE (link), item, GET_MODE (item));
6372 if (GET_MODE (x2) != GET_MODE (link))
6373 x2 = lowpart_subreg (GET_MODE (link), x2, GET_MODE (x2));
6374 item = gen_rtx_CONCAT (GET_MODE (link), x2, item);
6375 call_arguments
6376 = gen_rtx_EXPR_LIST (VOIDmode, item, call_arguments);
6378 if (t && t != void_list_node)
6380 tree argtype = TREE_VALUE (t);
6381 machine_mode mode = TYPE_MODE (argtype);
6382 rtx reg;
6383 if (pass_by_reference (&args_so_far_v, mode, argtype, true))
6385 argtype = build_pointer_type (argtype);
6386 mode = TYPE_MODE (argtype);
6388 reg = targetm.calls.function_arg (args_so_far, mode,
6389 argtype, true);
6390 if (TREE_CODE (argtype) == REFERENCE_TYPE
6391 && INTEGRAL_TYPE_P (TREE_TYPE (argtype))
6392 && reg
6393 && REG_P (reg)
6394 && GET_MODE (reg) == mode
6395 && (GET_MODE_CLASS (mode) == MODE_INT
6396 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
6397 && REG_P (x)
6398 && REGNO (x) == REGNO (reg)
6399 && GET_MODE (x) == mode
6400 && item)
6402 machine_mode indmode
6403 = TYPE_MODE (TREE_TYPE (argtype));
6404 rtx mem = gen_rtx_MEM (indmode, x);
6405 cselib_val *val = cselib_lookup (mem, indmode, 0, VOIDmode);
6406 if (val && cselib_preserved_value_p (val))
6408 item = gen_rtx_CONCAT (indmode, mem, val->val_rtx);
6409 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
6410 call_arguments);
6412 else
6414 struct elt_loc_list *l;
6415 tree initial;
6417 /* Try harder, when passing address of a constant
6418 pool integer it can be easily read back. */
6419 item = XEXP (item, 1);
6420 if (GET_CODE (item) == SUBREG)
6421 item = SUBREG_REG (item);
6422 gcc_assert (GET_CODE (item) == VALUE);
6423 val = CSELIB_VAL_PTR (item);
6424 for (l = val->locs; l; l = l->next)
6425 if (GET_CODE (l->loc) == SYMBOL_REF
6426 && TREE_CONSTANT_POOL_ADDRESS_P (l->loc)
6427 && SYMBOL_REF_DECL (l->loc)
6428 && DECL_INITIAL (SYMBOL_REF_DECL (l->loc)))
6430 initial = DECL_INITIAL (SYMBOL_REF_DECL (l->loc));
6431 if (tree_fits_shwi_p (initial))
6433 item = GEN_INT (tree_to_shwi (initial));
6434 item = gen_rtx_CONCAT (indmode, mem, item);
6435 call_arguments
6436 = gen_rtx_EXPR_LIST (VOIDmode, item,
6437 call_arguments);
6439 break;
6443 targetm.calls.function_arg_advance (args_so_far, mode,
6444 argtype, true);
6445 t = TREE_CHAIN (t);
6449 /* Add debug arguments. */
6450 if (fndecl
6451 && TREE_CODE (fndecl) == FUNCTION_DECL
6452 && DECL_HAS_DEBUG_ARGS_P (fndecl))
6454 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (fndecl);
6455 if (debug_args)
6457 unsigned int ix;
6458 tree param;
6459 for (ix = 0; vec_safe_iterate (*debug_args, ix, &param); ix += 2)
6461 rtx item;
6462 tree dtemp = (**debug_args)[ix + 1];
6463 machine_mode mode = DECL_MODE (dtemp);
6464 item = gen_rtx_DEBUG_PARAMETER_REF (mode, param);
6465 item = gen_rtx_CONCAT (mode, item, DECL_RTL_KNOWN_SET (dtemp));
6466 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
6467 call_arguments);
6472 /* Reverse call_arguments chain. */
6473 prev = NULL_RTX;
6474 for (cur = call_arguments; cur; cur = next)
6476 next = XEXP (cur, 1);
6477 XEXP (cur, 1) = prev;
6478 prev = cur;
6480 call_arguments = prev;
6482 x = get_call_rtx_from (insn);
6483 if (x)
6485 x = XEXP (XEXP (x, 0), 0);
6486 if (GET_CODE (x) == SYMBOL_REF)
6487 /* Don't record anything. */;
6488 else if (CONSTANT_P (x))
6490 x = gen_rtx_CONCAT (GET_MODE (x) == VOIDmode ? Pmode : GET_MODE (x),
6491 pc_rtx, x);
6492 call_arguments
6493 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6495 else
6497 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
6498 if (val && cselib_preserved_value_p (val))
6500 x = gen_rtx_CONCAT (GET_MODE (x), pc_rtx, val->val_rtx);
6501 call_arguments
6502 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6506 if (this_arg)
6508 machine_mode mode
6509 = TYPE_MODE (TREE_TYPE (OBJ_TYPE_REF_EXPR (obj_type_ref)));
6510 rtx clobbered = gen_rtx_MEM (mode, this_arg);
6511 HOST_WIDE_INT token
6512 = tree_to_shwi (OBJ_TYPE_REF_TOKEN (obj_type_ref));
6513 if (token)
6514 clobbered = plus_constant (mode, clobbered,
6515 token * GET_MODE_SIZE (mode));
6516 clobbered = gen_rtx_MEM (mode, clobbered);
6517 x = gen_rtx_CONCAT (mode, gen_rtx_CLOBBER (VOIDmode, pc_rtx), clobbered);
6518 call_arguments
6519 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6523 /* Callback for cselib_record_sets_hook, that records as micro
6524 operations uses and stores in an insn after cselib_record_sets has
6525 analyzed the sets in an insn, but before it modifies the stored
6526 values in the internal tables, unless cselib_record_sets doesn't
6527 call it directly (perhaps because we're not doing cselib in the
6528 first place, in which case sets and n_sets will be 0). */
6530 static void
6531 add_with_sets (rtx_insn *insn, struct cselib_set *sets, int n_sets)
6533 basic_block bb = BLOCK_FOR_INSN (insn);
6534 int n1, n2;
6535 struct count_use_info cui;
6536 micro_operation *mos;
6538 cselib_hook_called = true;
6540 cui.insn = insn;
6541 cui.bb = bb;
6542 cui.sets = sets;
6543 cui.n_sets = n_sets;
6545 n1 = VTI (bb)->mos.length ();
6546 cui.store_p = false;
6547 note_uses (&PATTERN (insn), add_uses_1, &cui);
6548 n2 = VTI (bb)->mos.length () - 1;
6549 mos = VTI (bb)->mos.address ();
6551 /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
6552 MO_VAL_LOC last. */
6553 while (n1 < n2)
6555 while (n1 < n2 && mos[n1].type == MO_USE)
6556 n1++;
6557 while (n1 < n2 && mos[n2].type != MO_USE)
6558 n2--;
6559 if (n1 < n2)
6560 std::swap (mos[n1], mos[n2]);
6563 n2 = VTI (bb)->mos.length () - 1;
6564 while (n1 < n2)
6566 while (n1 < n2 && mos[n1].type != MO_VAL_LOC)
6567 n1++;
6568 while (n1 < n2 && mos[n2].type == MO_VAL_LOC)
6569 n2--;
6570 if (n1 < n2)
6571 std::swap (mos[n1], mos[n2]);
6574 if (CALL_P (insn))
6576 micro_operation mo;
6578 mo.type = MO_CALL;
6579 mo.insn = insn;
6580 mo.u.loc = call_arguments;
6581 call_arguments = NULL_RTX;
6583 if (dump_file && (dump_flags & TDF_DETAILS))
6584 log_op_type (PATTERN (insn), bb, insn, mo.type, dump_file);
6585 VTI (bb)->mos.safe_push (mo);
6588 n1 = VTI (bb)->mos.length ();
6589 /* This will record NEXT_INSN (insn), such that we can
6590 insert notes before it without worrying about any
6591 notes that MO_USEs might emit after the insn. */
6592 cui.store_p = true;
6593 note_stores (PATTERN (insn), add_stores, &cui);
6594 n2 = VTI (bb)->mos.length () - 1;
6595 mos = VTI (bb)->mos.address ();
6597 /* Order the MO_VAL_USEs first (note_stores does nothing
6598 on DEBUG_INSNs, so there are no MO_VAL_LOCs from this
6599 insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET. */
6600 while (n1 < n2)
6602 while (n1 < n2 && mos[n1].type == MO_VAL_USE)
6603 n1++;
6604 while (n1 < n2 && mos[n2].type != MO_VAL_USE)
6605 n2--;
6606 if (n1 < n2)
6607 std::swap (mos[n1], mos[n2]);
6610 n2 = VTI (bb)->mos.length () - 1;
6611 while (n1 < n2)
6613 while (n1 < n2 && mos[n1].type == MO_CLOBBER)
6614 n1++;
6615 while (n1 < n2 && mos[n2].type != MO_CLOBBER)
6616 n2--;
6617 if (n1 < n2)
6618 std::swap (mos[n1], mos[n2]);
6622 static enum var_init_status
6623 find_src_status (dataflow_set *in, rtx src)
6625 tree decl = NULL_TREE;
6626 enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
6628 if (! flag_var_tracking_uninit)
6629 status = VAR_INIT_STATUS_INITIALIZED;
6631 if (src && REG_P (src))
6632 decl = var_debug_decl (REG_EXPR (src));
6633 else if (src && MEM_P (src))
6634 decl = var_debug_decl (MEM_EXPR (src));
6636 if (src && decl)
6637 status = get_init_value (in, src, dv_from_decl (decl));
6639 return status;
6642 /* SRC is the source of an assignment. Use SET to try to find what
6643 was ultimately assigned to SRC. Return that value if known,
6644 otherwise return SRC itself. */
6646 static rtx
6647 find_src_set_src (dataflow_set *set, rtx src)
6649 tree decl = NULL_TREE; /* The variable being copied around. */
6650 rtx set_src = NULL_RTX; /* The value for "decl" stored in "src". */
6651 variable var;
6652 location_chain nextp;
6653 int i;
6654 bool found;
6656 if (src && REG_P (src))
6657 decl = var_debug_decl (REG_EXPR (src));
6658 else if (src && MEM_P (src))
6659 decl = var_debug_decl (MEM_EXPR (src));
6661 if (src && decl)
6663 decl_or_value dv = dv_from_decl (decl);
6665 var = shared_hash_find (set->vars, dv);
6666 if (var)
6668 found = false;
6669 for (i = 0; i < var->n_var_parts && !found; i++)
6670 for (nextp = var->var_part[i].loc_chain; nextp && !found;
6671 nextp = nextp->next)
6672 if (rtx_equal_p (nextp->loc, src))
6674 set_src = nextp->set_src;
6675 found = true;
6681 return set_src;
6684 /* Compute the changes of variable locations in the basic block BB. */
6686 static bool
6687 compute_bb_dataflow (basic_block bb)
6689 unsigned int i;
6690 micro_operation *mo;
6691 bool changed;
6692 dataflow_set old_out;
6693 dataflow_set *in = &VTI (bb)->in;
6694 dataflow_set *out = &VTI (bb)->out;
6696 dataflow_set_init (&old_out);
6697 dataflow_set_copy (&old_out, out);
6698 dataflow_set_copy (out, in);
6700 if (MAY_HAVE_DEBUG_INSNS)
6701 local_get_addr_cache = new hash_map<rtx, rtx>;
6703 FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo)
6705 rtx_insn *insn = mo->insn;
6707 switch (mo->type)
6709 case MO_CALL:
6710 dataflow_set_clear_at_call (out);
6711 break;
6713 case MO_USE:
6715 rtx loc = mo->u.loc;
6717 if (REG_P (loc))
6718 var_reg_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6719 else if (MEM_P (loc))
6720 var_mem_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6722 break;
6724 case MO_VAL_LOC:
6726 rtx loc = mo->u.loc;
6727 rtx val, vloc;
6728 tree var;
6730 if (GET_CODE (loc) == CONCAT)
6732 val = XEXP (loc, 0);
6733 vloc = XEXP (loc, 1);
6735 else
6737 val = NULL_RTX;
6738 vloc = loc;
6741 var = PAT_VAR_LOCATION_DECL (vloc);
6743 clobber_variable_part (out, NULL_RTX,
6744 dv_from_decl (var), 0, NULL_RTX);
6745 if (val)
6747 if (VAL_NEEDS_RESOLUTION (loc))
6748 val_resolve (out, val, PAT_VAR_LOCATION_LOC (vloc), insn);
6749 set_variable_part (out, val, dv_from_decl (var), 0,
6750 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6751 INSERT);
6753 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
6754 set_variable_part (out, PAT_VAR_LOCATION_LOC (vloc),
6755 dv_from_decl (var), 0,
6756 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6757 INSERT);
6759 break;
6761 case MO_VAL_USE:
6763 rtx loc = mo->u.loc;
6764 rtx val, vloc, uloc;
6766 vloc = uloc = XEXP (loc, 1);
6767 val = XEXP (loc, 0);
6769 if (GET_CODE (val) == CONCAT)
6771 uloc = XEXP (val, 1);
6772 val = XEXP (val, 0);
6775 if (VAL_NEEDS_RESOLUTION (loc))
6776 val_resolve (out, val, vloc, insn);
6777 else
6778 val_store (out, val, uloc, insn, false);
6780 if (VAL_HOLDS_TRACK_EXPR (loc))
6782 if (GET_CODE (uloc) == REG)
6783 var_reg_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6784 NULL);
6785 else if (GET_CODE (uloc) == MEM)
6786 var_mem_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6787 NULL);
6790 break;
6792 case MO_VAL_SET:
6794 rtx loc = mo->u.loc;
6795 rtx val, vloc, uloc;
6796 rtx dstv, srcv;
6798 vloc = loc;
6799 uloc = XEXP (vloc, 1);
6800 val = XEXP (vloc, 0);
6801 vloc = uloc;
6803 if (GET_CODE (uloc) == SET)
6805 dstv = SET_DEST (uloc);
6806 srcv = SET_SRC (uloc);
6808 else
6810 dstv = uloc;
6811 srcv = NULL;
6814 if (GET_CODE (val) == CONCAT)
6816 dstv = vloc = XEXP (val, 1);
6817 val = XEXP (val, 0);
6820 if (GET_CODE (vloc) == SET)
6822 srcv = SET_SRC (vloc);
6824 gcc_assert (val != srcv);
6825 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
6827 dstv = vloc = SET_DEST (vloc);
6829 if (VAL_NEEDS_RESOLUTION (loc))
6830 val_resolve (out, val, srcv, insn);
6832 else if (VAL_NEEDS_RESOLUTION (loc))
6834 gcc_assert (GET_CODE (uloc) == SET
6835 && GET_CODE (SET_SRC (uloc)) == REG);
6836 val_resolve (out, val, SET_SRC (uloc), insn);
6839 if (VAL_HOLDS_TRACK_EXPR (loc))
6841 if (VAL_EXPR_IS_CLOBBERED (loc))
6843 if (REG_P (uloc))
6844 var_reg_delete (out, uloc, true);
6845 else if (MEM_P (uloc))
6847 gcc_assert (MEM_P (dstv));
6848 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
6849 var_mem_delete (out, dstv, true);
6852 else
6854 bool copied_p = VAL_EXPR_IS_COPIED (loc);
6855 rtx src = NULL, dst = uloc;
6856 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
6858 if (GET_CODE (uloc) == SET)
6860 src = SET_SRC (uloc);
6861 dst = SET_DEST (uloc);
6864 if (copied_p)
6866 if (flag_var_tracking_uninit)
6868 status = find_src_status (in, src);
6870 if (status == VAR_INIT_STATUS_UNKNOWN)
6871 status = find_src_status (out, src);
6874 src = find_src_set_src (in, src);
6877 if (REG_P (dst))
6878 var_reg_delete_and_set (out, dst, !copied_p,
6879 status, srcv);
6880 else if (MEM_P (dst))
6882 gcc_assert (MEM_P (dstv));
6883 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
6884 var_mem_delete_and_set (out, dstv, !copied_p,
6885 status, srcv);
6889 else if (REG_P (uloc))
6890 var_regno_delete (out, REGNO (uloc));
6891 else if (MEM_P (uloc))
6893 gcc_checking_assert (GET_CODE (vloc) == MEM);
6894 gcc_checking_assert (dstv == vloc);
6895 if (dstv != vloc)
6896 clobber_overlapping_mems (out, vloc);
6899 val_store (out, val, dstv, insn, true);
6901 break;
6903 case MO_SET:
6905 rtx loc = mo->u.loc;
6906 rtx set_src = NULL;
6908 if (GET_CODE (loc) == SET)
6910 set_src = SET_SRC (loc);
6911 loc = SET_DEST (loc);
6914 if (REG_P (loc))
6915 var_reg_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6916 set_src);
6917 else if (MEM_P (loc))
6918 var_mem_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6919 set_src);
6921 break;
6923 case MO_COPY:
6925 rtx loc = mo->u.loc;
6926 enum var_init_status src_status;
6927 rtx set_src = NULL;
6929 if (GET_CODE (loc) == SET)
6931 set_src = SET_SRC (loc);
6932 loc = SET_DEST (loc);
6935 if (! flag_var_tracking_uninit)
6936 src_status = VAR_INIT_STATUS_INITIALIZED;
6937 else
6939 src_status = find_src_status (in, set_src);
6941 if (src_status == VAR_INIT_STATUS_UNKNOWN)
6942 src_status = find_src_status (out, set_src);
6945 set_src = find_src_set_src (in, set_src);
6947 if (REG_P (loc))
6948 var_reg_delete_and_set (out, loc, false, src_status, set_src);
6949 else if (MEM_P (loc))
6950 var_mem_delete_and_set (out, loc, false, src_status, set_src);
6952 break;
6954 case MO_USE_NO_VAR:
6956 rtx loc = mo->u.loc;
6958 if (REG_P (loc))
6959 var_reg_delete (out, loc, false);
6960 else if (MEM_P (loc))
6961 var_mem_delete (out, loc, false);
6963 break;
6965 case MO_CLOBBER:
6967 rtx loc = mo->u.loc;
6969 if (REG_P (loc))
6970 var_reg_delete (out, loc, true);
6971 else if (MEM_P (loc))
6972 var_mem_delete (out, loc, true);
6974 break;
6976 case MO_ADJUST:
6977 out->stack_adjust += mo->u.adjust;
6978 break;
6982 if (MAY_HAVE_DEBUG_INSNS)
6984 delete local_get_addr_cache;
6985 local_get_addr_cache = NULL;
6987 dataflow_set_equiv_regs (out);
6988 shared_hash_htab (out->vars)
6989 ->traverse <dataflow_set *, canonicalize_values_mark> (out);
6990 shared_hash_htab (out->vars)
6991 ->traverse <dataflow_set *, canonicalize_values_star> (out);
6992 #if ENABLE_CHECKING
6993 shared_hash_htab (out->vars)
6994 ->traverse <dataflow_set *, canonicalize_loc_order_check> (out);
6995 #endif
6997 changed = dataflow_set_different (&old_out, out);
6998 dataflow_set_destroy (&old_out);
6999 return changed;
7002 /* Find the locations of variables in the whole function. */
7004 static bool
7005 vt_find_locations (void)
7007 bb_heap_t *worklist = new bb_heap_t (LONG_MIN);
7008 bb_heap_t *pending = new bb_heap_t (LONG_MIN);
7009 sbitmap visited, in_worklist, in_pending;
7010 basic_block bb;
7011 edge e;
7012 int *bb_order;
7013 int *rc_order;
7014 int i;
7015 int htabsz = 0;
7016 int htabmax = PARAM_VALUE (PARAM_MAX_VARTRACK_SIZE);
7017 bool success = true;
7019 timevar_push (TV_VAR_TRACKING_DATAFLOW);
7020 /* Compute reverse completion order of depth first search of the CFG
7021 so that the data-flow runs faster. */
7022 rc_order = XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
7023 bb_order = XNEWVEC (int, last_basic_block_for_fn (cfun));
7024 pre_and_rev_post_order_compute (NULL, rc_order, false);
7025 for (i = 0; i < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; i++)
7026 bb_order[rc_order[i]] = i;
7027 free (rc_order);
7029 visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
7030 in_worklist = sbitmap_alloc (last_basic_block_for_fn (cfun));
7031 in_pending = sbitmap_alloc (last_basic_block_for_fn (cfun));
7032 bitmap_clear (in_worklist);
7034 FOR_EACH_BB_FN (bb, cfun)
7035 pending->insert (bb_order[bb->index], bb);
7036 bitmap_ones (in_pending);
7038 while (success && !pending->empty ())
7040 std::swap (worklist, pending);
7041 std::swap (in_worklist, in_pending);
7043 bitmap_clear (visited);
7045 while (!worklist->empty ())
7047 bb = worklist->extract_min ();
7048 bitmap_clear_bit (in_worklist, bb->index);
7049 gcc_assert (!bitmap_bit_p (visited, bb->index));
7050 if (!bitmap_bit_p (visited, bb->index))
7052 bool changed;
7053 edge_iterator ei;
7054 int oldinsz, oldoutsz;
7056 bitmap_set_bit (visited, bb->index);
7058 if (VTI (bb)->in.vars)
7060 htabsz
7061 -= shared_hash_htab (VTI (bb)->in.vars)->size ()
7062 + shared_hash_htab (VTI (bb)->out.vars)->size ();
7063 oldinsz = shared_hash_htab (VTI (bb)->in.vars)->elements ();
7064 oldoutsz
7065 = shared_hash_htab (VTI (bb)->out.vars)->elements ();
7067 else
7068 oldinsz = oldoutsz = 0;
7070 if (MAY_HAVE_DEBUG_INSNS)
7072 dataflow_set *in = &VTI (bb)->in, *first_out = NULL;
7073 bool first = true, adjust = false;
7075 /* Calculate the IN set as the intersection of
7076 predecessor OUT sets. */
7078 dataflow_set_clear (in);
7079 dst_can_be_shared = true;
7081 FOR_EACH_EDGE (e, ei, bb->preds)
7082 if (!VTI (e->src)->flooded)
7083 gcc_assert (bb_order[bb->index]
7084 <= bb_order[e->src->index]);
7085 else if (first)
7087 dataflow_set_copy (in, &VTI (e->src)->out);
7088 first_out = &VTI (e->src)->out;
7089 first = false;
7091 else
7093 dataflow_set_merge (in, &VTI (e->src)->out);
7094 adjust = true;
7097 if (adjust)
7099 dataflow_post_merge_adjust (in, &VTI (bb)->permp);
7100 #if ENABLE_CHECKING
7101 /* Merge and merge_adjust should keep entries in
7102 canonical order. */
7103 shared_hash_htab (in->vars)
7104 ->traverse <dataflow_set *,
7105 canonicalize_loc_order_check> (in);
7106 #endif
7107 if (dst_can_be_shared)
7109 shared_hash_destroy (in->vars);
7110 in->vars = shared_hash_copy (first_out->vars);
7114 VTI (bb)->flooded = true;
7116 else
7118 /* Calculate the IN set as union of predecessor OUT sets. */
7119 dataflow_set_clear (&VTI (bb)->in);
7120 FOR_EACH_EDGE (e, ei, bb->preds)
7121 dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out);
7124 changed = compute_bb_dataflow (bb);
7125 htabsz += shared_hash_htab (VTI (bb)->in.vars)->size ()
7126 + shared_hash_htab (VTI (bb)->out.vars)->size ();
7128 if (htabmax && htabsz > htabmax)
7130 if (MAY_HAVE_DEBUG_INSNS)
7131 inform (DECL_SOURCE_LOCATION (cfun->decl),
7132 "variable tracking size limit exceeded with "
7133 "-fvar-tracking-assignments, retrying without");
7134 else
7135 inform (DECL_SOURCE_LOCATION (cfun->decl),
7136 "variable tracking size limit exceeded");
7137 success = false;
7138 break;
7141 if (changed)
7143 FOR_EACH_EDGE (e, ei, bb->succs)
7145 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7146 continue;
7148 if (bitmap_bit_p (visited, e->dest->index))
7150 if (!bitmap_bit_p (in_pending, e->dest->index))
7152 /* Send E->DEST to next round. */
7153 bitmap_set_bit (in_pending, e->dest->index);
7154 pending->insert (bb_order[e->dest->index],
7155 e->dest);
7158 else if (!bitmap_bit_p (in_worklist, e->dest->index))
7160 /* Add E->DEST to current round. */
7161 bitmap_set_bit (in_worklist, e->dest->index);
7162 worklist->insert (bb_order[e->dest->index],
7163 e->dest);
7168 if (dump_file)
7169 fprintf (dump_file,
7170 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
7171 bb->index,
7172 (int)shared_hash_htab (VTI (bb)->in.vars)->size (),
7173 oldinsz,
7174 (int)shared_hash_htab (VTI (bb)->out.vars)->size (),
7175 oldoutsz,
7176 (int)worklist->nodes (), (int)pending->nodes (),
7177 htabsz);
7179 if (dump_file && (dump_flags & TDF_DETAILS))
7181 fprintf (dump_file, "BB %i IN:\n", bb->index);
7182 dump_dataflow_set (&VTI (bb)->in);
7183 fprintf (dump_file, "BB %i OUT:\n", bb->index);
7184 dump_dataflow_set (&VTI (bb)->out);
7190 if (success && MAY_HAVE_DEBUG_INSNS)
7191 FOR_EACH_BB_FN (bb, cfun)
7192 gcc_assert (VTI (bb)->flooded);
7194 free (bb_order);
7195 delete worklist;
7196 delete pending;
7197 sbitmap_free (visited);
7198 sbitmap_free (in_worklist);
7199 sbitmap_free (in_pending);
7201 timevar_pop (TV_VAR_TRACKING_DATAFLOW);
7202 return success;
7205 /* Print the content of the LIST to dump file. */
7207 static void
7208 dump_attrs_list (attrs list)
7210 for (; list; list = list->next)
7212 if (dv_is_decl_p (list->dv))
7213 print_mem_expr (dump_file, dv_as_decl (list->dv));
7214 else
7215 print_rtl_single (dump_file, dv_as_value (list->dv));
7216 fprintf (dump_file, "+" HOST_WIDE_INT_PRINT_DEC, list->offset);
7218 fprintf (dump_file, "\n");
7221 /* Print the information about variable *SLOT to dump file. */
7224 dump_var_tracking_slot (variable_def **slot, void *data ATTRIBUTE_UNUSED)
7226 variable var = *slot;
7228 dump_var (var);
7230 /* Continue traversing the hash table. */
7231 return 1;
7234 /* Print the information about variable VAR to dump file. */
7236 static void
7237 dump_var (variable var)
7239 int i;
7240 location_chain node;
7242 if (dv_is_decl_p (var->dv))
7244 const_tree decl = dv_as_decl (var->dv);
7246 if (DECL_NAME (decl))
7248 fprintf (dump_file, " name: %s",
7249 IDENTIFIER_POINTER (DECL_NAME (decl)));
7250 if (dump_flags & TDF_UID)
7251 fprintf (dump_file, "D.%u", DECL_UID (decl));
7253 else if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
7254 fprintf (dump_file, " name: D#%u", DEBUG_TEMP_UID (decl));
7255 else
7256 fprintf (dump_file, " name: D.%u", DECL_UID (decl));
7257 fprintf (dump_file, "\n");
7259 else
7261 fputc (' ', dump_file);
7262 print_rtl_single (dump_file, dv_as_value (var->dv));
7265 for (i = 0; i < var->n_var_parts; i++)
7267 fprintf (dump_file, " offset %ld\n",
7268 (long)(var->onepart ? 0 : VAR_PART_OFFSET (var, i)));
7269 for (node = var->var_part[i].loc_chain; node; node = node->next)
7271 fprintf (dump_file, " ");
7272 if (node->init == VAR_INIT_STATUS_UNINITIALIZED)
7273 fprintf (dump_file, "[uninit]");
7274 print_rtl_single (dump_file, node->loc);
7279 /* Print the information about variables from hash table VARS to dump file. */
7281 static void
7282 dump_vars (variable_table_type *vars)
7284 if (vars->elements () > 0)
7286 fprintf (dump_file, "Variables:\n");
7287 vars->traverse <void *, dump_var_tracking_slot> (NULL);
7291 /* Print the dataflow set SET to dump file. */
7293 static void
7294 dump_dataflow_set (dataflow_set *set)
7296 int i;
7298 fprintf (dump_file, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC "\n",
7299 set->stack_adjust);
7300 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7302 if (set->regs[i])
7304 fprintf (dump_file, "Reg %d:", i);
7305 dump_attrs_list (set->regs[i]);
7308 dump_vars (shared_hash_htab (set->vars));
7309 fprintf (dump_file, "\n");
7312 /* Print the IN and OUT sets for each basic block to dump file. */
7314 static void
7315 dump_dataflow_sets (void)
7317 basic_block bb;
7319 FOR_EACH_BB_FN (bb, cfun)
7321 fprintf (dump_file, "\nBasic block %d:\n", bb->index);
7322 fprintf (dump_file, "IN:\n");
7323 dump_dataflow_set (&VTI (bb)->in);
7324 fprintf (dump_file, "OUT:\n");
7325 dump_dataflow_set (&VTI (bb)->out);
7329 /* Return the variable for DV in dropped_values, inserting one if
7330 requested with INSERT. */
7332 static inline variable
7333 variable_from_dropped (decl_or_value dv, enum insert_option insert)
7335 variable_def **slot;
7336 variable empty_var;
7337 onepart_enum_t onepart;
7339 slot = dropped_values->find_slot_with_hash (dv, dv_htab_hash (dv), insert);
7341 if (!slot)
7342 return NULL;
7344 if (*slot)
7345 return *slot;
7347 gcc_checking_assert (insert == INSERT);
7349 onepart = dv_onepart_p (dv);
7351 gcc_checking_assert (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR);
7353 empty_var = onepart_pool (onepart).allocate ();
7354 empty_var->dv = dv;
7355 empty_var->refcount = 1;
7356 empty_var->n_var_parts = 0;
7357 empty_var->onepart = onepart;
7358 empty_var->in_changed_variables = false;
7359 empty_var->var_part[0].loc_chain = NULL;
7360 empty_var->var_part[0].cur_loc = NULL;
7361 VAR_LOC_1PAUX (empty_var) = NULL;
7362 set_dv_changed (dv, true);
7364 *slot = empty_var;
7366 return empty_var;
7369 /* Recover the one-part aux from dropped_values. */
7371 static struct onepart_aux *
7372 recover_dropped_1paux (variable var)
7374 variable dvar;
7376 gcc_checking_assert (var->onepart);
7378 if (VAR_LOC_1PAUX (var))
7379 return VAR_LOC_1PAUX (var);
7381 if (var->onepart == ONEPART_VDECL)
7382 return NULL;
7384 dvar = variable_from_dropped (var->dv, NO_INSERT);
7386 if (!dvar)
7387 return NULL;
7389 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (dvar);
7390 VAR_LOC_1PAUX (dvar) = NULL;
7392 return VAR_LOC_1PAUX (var);
7395 /* Add variable VAR to the hash table of changed variables and
7396 if it has no locations delete it from SET's hash table. */
7398 static void
7399 variable_was_changed (variable var, dataflow_set *set)
7401 hashval_t hash = dv_htab_hash (var->dv);
7403 if (emit_notes)
7405 variable_def **slot;
7407 /* Remember this decl or VALUE has been added to changed_variables. */
7408 set_dv_changed (var->dv, true);
7410 slot = changed_variables->find_slot_with_hash (var->dv, hash, INSERT);
7412 if (*slot)
7414 variable old_var = *slot;
7415 gcc_assert (old_var->in_changed_variables);
7416 old_var->in_changed_variables = false;
7417 if (var != old_var && var->onepart)
7419 /* Restore the auxiliary info from an empty variable
7420 previously created for changed_variables, so it is
7421 not lost. */
7422 gcc_checking_assert (!VAR_LOC_1PAUX (var));
7423 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (old_var);
7424 VAR_LOC_1PAUX (old_var) = NULL;
7426 variable_htab_free (*slot);
7429 if (set && var->n_var_parts == 0)
7431 onepart_enum_t onepart = var->onepart;
7432 variable empty_var = NULL;
7433 variable_def **dslot = NULL;
7435 if (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR)
7437 dslot = dropped_values->find_slot_with_hash (var->dv,
7438 dv_htab_hash (var->dv),
7439 INSERT);
7440 empty_var = *dslot;
7442 if (empty_var)
7444 gcc_checking_assert (!empty_var->in_changed_variables);
7445 if (!VAR_LOC_1PAUX (var))
7447 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (empty_var);
7448 VAR_LOC_1PAUX (empty_var) = NULL;
7450 else
7451 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
7455 if (!empty_var)
7457 empty_var = onepart_pool (onepart).allocate ();
7458 empty_var->dv = var->dv;
7459 empty_var->refcount = 1;
7460 empty_var->n_var_parts = 0;
7461 empty_var->onepart = onepart;
7462 if (dslot)
7464 empty_var->refcount++;
7465 *dslot = empty_var;
7468 else
7469 empty_var->refcount++;
7470 empty_var->in_changed_variables = true;
7471 *slot = empty_var;
7472 if (onepart)
7474 empty_var->var_part[0].loc_chain = NULL;
7475 empty_var->var_part[0].cur_loc = NULL;
7476 VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (var);
7477 VAR_LOC_1PAUX (var) = NULL;
7479 goto drop_var;
7481 else
7483 if (var->onepart && !VAR_LOC_1PAUX (var))
7484 recover_dropped_1paux (var);
7485 var->refcount++;
7486 var->in_changed_variables = true;
7487 *slot = var;
7490 else
7492 gcc_assert (set);
7493 if (var->n_var_parts == 0)
7495 variable_def **slot;
7497 drop_var:
7498 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
7499 if (slot)
7501 if (shared_hash_shared (set->vars))
7502 slot = shared_hash_find_slot_unshare (&set->vars, var->dv,
7503 NO_INSERT);
7504 shared_hash_htab (set->vars)->clear_slot (slot);
7510 /* Look for the index in VAR->var_part corresponding to OFFSET.
7511 Return -1 if not found. If INSERTION_POINT is non-NULL, the
7512 referenced int will be set to the index that the part has or should
7513 have, if it should be inserted. */
7515 static inline int
7516 find_variable_location_part (variable var, HOST_WIDE_INT offset,
7517 int *insertion_point)
7519 int pos, low, high;
7521 if (var->onepart)
7523 if (offset != 0)
7524 return -1;
7526 if (insertion_point)
7527 *insertion_point = 0;
7529 return var->n_var_parts - 1;
7532 /* Find the location part. */
7533 low = 0;
7534 high = var->n_var_parts;
7535 while (low != high)
7537 pos = (low + high) / 2;
7538 if (VAR_PART_OFFSET (var, pos) < offset)
7539 low = pos + 1;
7540 else
7541 high = pos;
7543 pos = low;
7545 if (insertion_point)
7546 *insertion_point = pos;
7548 if (pos < var->n_var_parts && VAR_PART_OFFSET (var, pos) == offset)
7549 return pos;
7551 return -1;
7554 static variable_def **
7555 set_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
7556 decl_or_value dv, HOST_WIDE_INT offset,
7557 enum var_init_status initialized, rtx set_src)
7559 int pos;
7560 location_chain node, next;
7561 location_chain *nextp;
7562 variable var;
7563 onepart_enum_t onepart;
7565 var = *slot;
7567 if (var)
7568 onepart = var->onepart;
7569 else
7570 onepart = dv_onepart_p (dv);
7572 gcc_checking_assert (offset == 0 || !onepart);
7573 gcc_checking_assert (loc != dv_as_opaque (dv));
7575 if (! flag_var_tracking_uninit)
7576 initialized = VAR_INIT_STATUS_INITIALIZED;
7578 if (!var)
7580 /* Create new variable information. */
7581 var = onepart_pool (onepart).allocate ();
7582 var->dv = dv;
7583 var->refcount = 1;
7584 var->n_var_parts = 1;
7585 var->onepart = onepart;
7586 var->in_changed_variables = false;
7587 if (var->onepart)
7588 VAR_LOC_1PAUX (var) = NULL;
7589 else
7590 VAR_PART_OFFSET (var, 0) = offset;
7591 var->var_part[0].loc_chain = NULL;
7592 var->var_part[0].cur_loc = NULL;
7593 *slot = var;
7594 pos = 0;
7595 nextp = &var->var_part[0].loc_chain;
7597 else if (onepart)
7599 int r = -1, c = 0;
7601 gcc_assert (dv_as_opaque (var->dv) == dv_as_opaque (dv));
7603 pos = 0;
7605 if (GET_CODE (loc) == VALUE)
7607 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7608 nextp = &node->next)
7609 if (GET_CODE (node->loc) == VALUE)
7611 if (node->loc == loc)
7613 r = 0;
7614 break;
7616 if (canon_value_cmp (node->loc, loc))
7617 c++;
7618 else
7620 r = 1;
7621 break;
7624 else if (REG_P (node->loc) || MEM_P (node->loc))
7625 c++;
7626 else
7628 r = 1;
7629 break;
7632 else if (REG_P (loc))
7634 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7635 nextp = &node->next)
7636 if (REG_P (node->loc))
7638 if (REGNO (node->loc) < REGNO (loc))
7639 c++;
7640 else
7642 if (REGNO (node->loc) == REGNO (loc))
7643 r = 0;
7644 else
7645 r = 1;
7646 break;
7649 else
7651 r = 1;
7652 break;
7655 else if (MEM_P (loc))
7657 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7658 nextp = &node->next)
7659 if (REG_P (node->loc))
7660 c++;
7661 else if (MEM_P (node->loc))
7663 if ((r = loc_cmp (XEXP (node->loc, 0), XEXP (loc, 0))) >= 0)
7664 break;
7665 else
7666 c++;
7668 else
7670 r = 1;
7671 break;
7674 else
7675 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7676 nextp = &node->next)
7677 if ((r = loc_cmp (node->loc, loc)) >= 0)
7678 break;
7679 else
7680 c++;
7682 if (r == 0)
7683 return slot;
7685 if (shared_var_p (var, set->vars))
7687 slot = unshare_variable (set, slot, var, initialized);
7688 var = *slot;
7689 for (nextp = &var->var_part[0].loc_chain; c;
7690 nextp = &(*nextp)->next)
7691 c--;
7692 gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc);
7695 else
7697 int inspos = 0;
7699 gcc_assert (dv_as_decl (var->dv) == dv_as_decl (dv));
7701 pos = find_variable_location_part (var, offset, &inspos);
7703 if (pos >= 0)
7705 node = var->var_part[pos].loc_chain;
7707 if (node
7708 && ((REG_P (node->loc) && REG_P (loc)
7709 && REGNO (node->loc) == REGNO (loc))
7710 || rtx_equal_p (node->loc, loc)))
7712 /* LOC is in the beginning of the chain so we have nothing
7713 to do. */
7714 if (node->init < initialized)
7715 node->init = initialized;
7716 if (set_src != NULL)
7717 node->set_src = set_src;
7719 return slot;
7721 else
7723 /* We have to make a copy of a shared variable. */
7724 if (shared_var_p (var, set->vars))
7726 slot = unshare_variable (set, slot, var, initialized);
7727 var = *slot;
7731 else
7733 /* We have not found the location part, new one will be created. */
7735 /* We have to make a copy of the shared variable. */
7736 if (shared_var_p (var, set->vars))
7738 slot = unshare_variable (set, slot, var, initialized);
7739 var = *slot;
7742 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
7743 thus there are at most MAX_VAR_PARTS different offsets. */
7744 gcc_assert (var->n_var_parts < MAX_VAR_PARTS
7745 && (!var->n_var_parts || !onepart));
7747 /* We have to move the elements of array starting at index
7748 inspos to the next position. */
7749 for (pos = var->n_var_parts; pos > inspos; pos--)
7750 var->var_part[pos] = var->var_part[pos - 1];
7752 var->n_var_parts++;
7753 gcc_checking_assert (!onepart);
7754 VAR_PART_OFFSET (var, pos) = offset;
7755 var->var_part[pos].loc_chain = NULL;
7756 var->var_part[pos].cur_loc = NULL;
7759 /* Delete the location from the list. */
7760 nextp = &var->var_part[pos].loc_chain;
7761 for (node = var->var_part[pos].loc_chain; node; node = next)
7763 next = node->next;
7764 if ((REG_P (node->loc) && REG_P (loc)
7765 && REGNO (node->loc) == REGNO (loc))
7766 || rtx_equal_p (node->loc, loc))
7768 /* Save these values, to assign to the new node, before
7769 deleting this one. */
7770 if (node->init > initialized)
7771 initialized = node->init;
7772 if (node->set_src != NULL && set_src == NULL)
7773 set_src = node->set_src;
7774 if (var->var_part[pos].cur_loc == node->loc)
7775 var->var_part[pos].cur_loc = NULL;
7776 delete node;
7777 *nextp = next;
7778 break;
7780 else
7781 nextp = &node->next;
7784 nextp = &var->var_part[pos].loc_chain;
7787 /* Add the location to the beginning. */
7788 node = new location_chain_def;
7789 node->loc = loc;
7790 node->init = initialized;
7791 node->set_src = set_src;
7792 node->next = *nextp;
7793 *nextp = node;
7795 /* If no location was emitted do so. */
7796 if (var->var_part[pos].cur_loc == NULL)
7797 variable_was_changed (var, set);
7799 return slot;
7802 /* Set the part of variable's location in the dataflow set SET. The
7803 variable part is specified by variable's declaration in DV and
7804 offset OFFSET and the part's location by LOC. IOPT should be
7805 NO_INSERT if the variable is known to be in SET already and the
7806 variable hash table must not be resized, and INSERT otherwise. */
7808 static void
7809 set_variable_part (dataflow_set *set, rtx loc,
7810 decl_or_value dv, HOST_WIDE_INT offset,
7811 enum var_init_status initialized, rtx set_src,
7812 enum insert_option iopt)
7814 variable_def **slot;
7816 if (iopt == NO_INSERT)
7817 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7818 else
7820 slot = shared_hash_find_slot (set->vars, dv);
7821 if (!slot)
7822 slot = shared_hash_find_slot_unshare (&set->vars, dv, iopt);
7824 set_slot_part (set, loc, slot, dv, offset, initialized, set_src);
7827 /* Remove all recorded register locations for the given variable part
7828 from dataflow set SET, except for those that are identical to loc.
7829 The variable part is specified by variable's declaration or value
7830 DV and offset OFFSET. */
7832 static variable_def **
7833 clobber_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
7834 HOST_WIDE_INT offset, rtx set_src)
7836 variable var = *slot;
7837 int pos = find_variable_location_part (var, offset, NULL);
7839 if (pos >= 0)
7841 location_chain node, next;
7843 /* Remove the register locations from the dataflow set. */
7844 next = var->var_part[pos].loc_chain;
7845 for (node = next; node; node = next)
7847 next = node->next;
7848 if (node->loc != loc
7849 && (!flag_var_tracking_uninit
7850 || !set_src
7851 || MEM_P (set_src)
7852 || !rtx_equal_p (set_src, node->set_src)))
7854 if (REG_P (node->loc))
7856 attrs anode, anext;
7857 attrs *anextp;
7859 /* Remove the variable part from the register's
7860 list, but preserve any other variable parts
7861 that might be regarded as live in that same
7862 register. */
7863 anextp = &set->regs[REGNO (node->loc)];
7864 for (anode = *anextp; anode; anode = anext)
7866 anext = anode->next;
7867 if (dv_as_opaque (anode->dv) == dv_as_opaque (var->dv)
7868 && anode->offset == offset)
7870 delete anode;
7871 *anextp = anext;
7873 else
7874 anextp = &anode->next;
7878 slot = delete_slot_part (set, node->loc, slot, offset);
7883 return slot;
7886 /* Remove all recorded register locations for the given variable part
7887 from dataflow set SET, except for those that are identical to loc.
7888 The variable part is specified by variable's declaration or value
7889 DV and offset OFFSET. */
7891 static void
7892 clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
7893 HOST_WIDE_INT offset, rtx set_src)
7895 variable_def **slot;
7897 if (!dv_as_opaque (dv)
7898 || (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv))))
7899 return;
7901 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7902 if (!slot)
7903 return;
7905 clobber_slot_part (set, loc, slot, offset, set_src);
7908 /* Delete the part of variable's location from dataflow set SET. The
7909 variable part is specified by its SET->vars slot SLOT and offset
7910 OFFSET and the part's location by LOC. */
7912 static variable_def **
7913 delete_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
7914 HOST_WIDE_INT offset)
7916 variable var = *slot;
7917 int pos = find_variable_location_part (var, offset, NULL);
7919 if (pos >= 0)
7921 location_chain node, next;
7922 location_chain *nextp;
7923 bool changed;
7924 rtx cur_loc;
7926 if (shared_var_p (var, set->vars))
7928 /* If the variable contains the location part we have to
7929 make a copy of the variable. */
7930 for (node = var->var_part[pos].loc_chain; node;
7931 node = node->next)
7933 if ((REG_P (node->loc) && REG_P (loc)
7934 && REGNO (node->loc) == REGNO (loc))
7935 || rtx_equal_p (node->loc, loc))
7937 slot = unshare_variable (set, slot, var,
7938 VAR_INIT_STATUS_UNKNOWN);
7939 var = *slot;
7940 break;
7945 if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
7946 cur_loc = VAR_LOC_FROM (var);
7947 else
7948 cur_loc = var->var_part[pos].cur_loc;
7950 /* Delete the location part. */
7951 changed = false;
7952 nextp = &var->var_part[pos].loc_chain;
7953 for (node = *nextp; node; node = next)
7955 next = node->next;
7956 if ((REG_P (node->loc) && REG_P (loc)
7957 && REGNO (node->loc) == REGNO (loc))
7958 || rtx_equal_p (node->loc, loc))
7960 /* If we have deleted the location which was last emitted
7961 we have to emit new location so add the variable to set
7962 of changed variables. */
7963 if (cur_loc == node->loc)
7965 changed = true;
7966 var->var_part[pos].cur_loc = NULL;
7967 if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
7968 VAR_LOC_FROM (var) = NULL;
7970 delete node;
7971 *nextp = next;
7972 break;
7974 else
7975 nextp = &node->next;
7978 if (var->var_part[pos].loc_chain == NULL)
7980 changed = true;
7981 var->n_var_parts--;
7982 while (pos < var->n_var_parts)
7984 var->var_part[pos] = var->var_part[pos + 1];
7985 pos++;
7988 if (changed)
7989 variable_was_changed (var, set);
7992 return slot;
7995 /* Delete the part of variable's location from dataflow set SET. The
7996 variable part is specified by variable's declaration or value DV
7997 and offset OFFSET and the part's location by LOC. */
7999 static void
8000 delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
8001 HOST_WIDE_INT offset)
8003 variable_def **slot = shared_hash_find_slot_noinsert (set->vars, dv);
8004 if (!slot)
8005 return;
8007 delete_slot_part (set, loc, slot, offset);
8011 /* Structure for passing some other parameters to function
8012 vt_expand_loc_callback. */
8013 struct expand_loc_callback_data
8015 /* The variables and values active at this point. */
8016 variable_table_type *vars;
8018 /* Stack of values and debug_exprs under expansion, and their
8019 children. */
8020 auto_vec<rtx, 4> expanding;
8022 /* Stack of values and debug_exprs whose expansion hit recursion
8023 cycles. They will have VALUE_RECURSED_INTO marked when added to
8024 this list. This flag will be cleared if any of its dependencies
8025 resolves to a valid location. So, if the flag remains set at the
8026 end of the search, we know no valid location for this one can
8027 possibly exist. */
8028 auto_vec<rtx, 4> pending;
8030 /* The maximum depth among the sub-expressions under expansion.
8031 Zero indicates no expansion so far. */
8032 expand_depth depth;
8035 /* Allocate the one-part auxiliary data structure for VAR, with enough
8036 room for COUNT dependencies. */
8038 static void
8039 loc_exp_dep_alloc (variable var, int count)
8041 size_t allocsize;
8043 gcc_checking_assert (var->onepart);
8045 /* We can be called with COUNT == 0 to allocate the data structure
8046 without any dependencies, e.g. for the backlinks only. However,
8047 if we are specifying a COUNT, then the dependency list must have
8048 been emptied before. It would be possible to adjust pointers or
8049 force it empty here, but this is better done at an earlier point
8050 in the algorithm, so we instead leave an assertion to catch
8051 errors. */
8052 gcc_checking_assert (!count
8053 || VAR_LOC_DEP_VEC (var) == NULL
8054 || VAR_LOC_DEP_VEC (var)->is_empty ());
8056 if (VAR_LOC_1PAUX (var) && VAR_LOC_DEP_VEC (var)->space (count))
8057 return;
8059 allocsize = offsetof (struct onepart_aux, deps)
8060 + vec<loc_exp_dep, va_heap, vl_embed>::embedded_size (count);
8062 if (VAR_LOC_1PAUX (var))
8064 VAR_LOC_1PAUX (var) = XRESIZEVAR (struct onepart_aux,
8065 VAR_LOC_1PAUX (var), allocsize);
8066 /* If the reallocation moves the onepaux structure, the
8067 back-pointer to BACKLINKS in the first list member will still
8068 point to its old location. Adjust it. */
8069 if (VAR_LOC_DEP_LST (var))
8070 VAR_LOC_DEP_LST (var)->pprev = VAR_LOC_DEP_LSTP (var);
8072 else
8074 VAR_LOC_1PAUX (var) = XNEWVAR (struct onepart_aux, allocsize);
8075 *VAR_LOC_DEP_LSTP (var) = NULL;
8076 VAR_LOC_FROM (var) = NULL;
8077 VAR_LOC_DEPTH (var).complexity = 0;
8078 VAR_LOC_DEPTH (var).entryvals = 0;
8080 VAR_LOC_DEP_VEC (var)->embedded_init (count);
8083 /* Remove all entries from the vector of active dependencies of VAR,
8084 removing them from the back-links lists too. */
8086 static void
8087 loc_exp_dep_clear (variable var)
8089 while (VAR_LOC_DEP_VEC (var) && !VAR_LOC_DEP_VEC (var)->is_empty ())
8091 loc_exp_dep *led = &VAR_LOC_DEP_VEC (var)->last ();
8092 if (led->next)
8093 led->next->pprev = led->pprev;
8094 if (led->pprev)
8095 *led->pprev = led->next;
8096 VAR_LOC_DEP_VEC (var)->pop ();
8100 /* Insert an active dependency from VAR on X to the vector of
8101 dependencies, and add the corresponding back-link to X's list of
8102 back-links in VARS. */
8104 static void
8105 loc_exp_insert_dep (variable var, rtx x, variable_table_type *vars)
8107 decl_or_value dv;
8108 variable xvar;
8109 loc_exp_dep *led;
8111 dv = dv_from_rtx (x);
8113 /* ??? Build a vector of variables parallel to EXPANDING, to avoid
8114 an additional look up? */
8115 xvar = vars->find_with_hash (dv, dv_htab_hash (dv));
8117 if (!xvar)
8119 xvar = variable_from_dropped (dv, NO_INSERT);
8120 gcc_checking_assert (xvar);
8123 /* No point in adding the same backlink more than once. This may
8124 arise if say the same value appears in two complex expressions in
8125 the same loc_list, or even more than once in a single
8126 expression. */
8127 if (VAR_LOC_DEP_LST (xvar) && VAR_LOC_DEP_LST (xvar)->dv == var->dv)
8128 return;
8130 if (var->onepart == NOT_ONEPART)
8131 led = new loc_exp_dep;
8132 else
8134 loc_exp_dep empty;
8135 memset (&empty, 0, sizeof (empty));
8136 VAR_LOC_DEP_VEC (var)->quick_push (empty);
8137 led = &VAR_LOC_DEP_VEC (var)->last ();
8139 led->dv = var->dv;
8140 led->value = x;
8142 loc_exp_dep_alloc (xvar, 0);
8143 led->pprev = VAR_LOC_DEP_LSTP (xvar);
8144 led->next = *led->pprev;
8145 if (led->next)
8146 led->next->pprev = &led->next;
8147 *led->pprev = led;
8150 /* Create active dependencies of VAR on COUNT values starting at
8151 VALUE, and corresponding back-links to the entries in VARS. Return
8152 true if we found any pending-recursion results. */
8154 static bool
8155 loc_exp_dep_set (variable var, rtx result, rtx *value, int count,
8156 variable_table_type *vars)
8158 bool pending_recursion = false;
8160 gcc_checking_assert (VAR_LOC_DEP_VEC (var) == NULL
8161 || VAR_LOC_DEP_VEC (var)->is_empty ());
8163 /* Set up all dependencies from last_child (as set up at the end of
8164 the loop above) to the end. */
8165 loc_exp_dep_alloc (var, count);
8167 while (count--)
8169 rtx x = *value++;
8171 if (!pending_recursion)
8172 pending_recursion = !result && VALUE_RECURSED_INTO (x);
8174 loc_exp_insert_dep (var, x, vars);
8177 return pending_recursion;
8180 /* Notify the back-links of IVAR that are pending recursion that we
8181 have found a non-NIL value for it, so they are cleared for another
8182 attempt to compute a current location. */
8184 static void
8185 notify_dependents_of_resolved_value (variable ivar, variable_table_type *vars)
8187 loc_exp_dep *led, *next;
8189 for (led = VAR_LOC_DEP_LST (ivar); led; led = next)
8191 decl_or_value dv = led->dv;
8192 variable var;
8194 next = led->next;
8196 if (dv_is_value_p (dv))
8198 rtx value = dv_as_value (dv);
8200 /* If we have already resolved it, leave it alone. */
8201 if (!VALUE_RECURSED_INTO (value))
8202 continue;
8204 /* Check that VALUE_RECURSED_INTO, true from the test above,
8205 implies NO_LOC_P. */
8206 gcc_checking_assert (NO_LOC_P (value));
8208 /* We won't notify variables that are being expanded,
8209 because their dependency list is cleared before
8210 recursing. */
8211 NO_LOC_P (value) = false;
8212 VALUE_RECURSED_INTO (value) = false;
8214 gcc_checking_assert (dv_changed_p (dv));
8216 else
8218 gcc_checking_assert (dv_onepart_p (dv) != NOT_ONEPART);
8219 if (!dv_changed_p (dv))
8220 continue;
8223 var = vars->find_with_hash (dv, dv_htab_hash (dv));
8225 if (!var)
8226 var = variable_from_dropped (dv, NO_INSERT);
8228 if (var)
8229 notify_dependents_of_resolved_value (var, vars);
8231 if (next)
8232 next->pprev = led->pprev;
8233 if (led->pprev)
8234 *led->pprev = next;
8235 led->next = NULL;
8236 led->pprev = NULL;
8240 static rtx vt_expand_loc_callback (rtx x, bitmap regs,
8241 int max_depth, void *data);
8243 /* Return the combined depth, when one sub-expression evaluated to
8244 BEST_DEPTH and the previous known depth was SAVED_DEPTH. */
8246 static inline expand_depth
8247 update_depth (expand_depth saved_depth, expand_depth best_depth)
8249 /* If we didn't find anything, stick with what we had. */
8250 if (!best_depth.complexity)
8251 return saved_depth;
8253 /* If we found hadn't found anything, use the depth of the current
8254 expression. Do NOT add one extra level, we want to compute the
8255 maximum depth among sub-expressions. We'll increment it later,
8256 if appropriate. */
8257 if (!saved_depth.complexity)
8258 return best_depth;
8260 /* Combine the entryval count so that regardless of which one we
8261 return, the entryval count is accurate. */
8262 best_depth.entryvals = saved_depth.entryvals
8263 = best_depth.entryvals + saved_depth.entryvals;
8265 if (saved_depth.complexity < best_depth.complexity)
8266 return best_depth;
8267 else
8268 return saved_depth;
8271 /* Expand VAR to a location RTX, updating its cur_loc. Use REGS and
8272 DATA for cselib expand callback. If PENDRECP is given, indicate in
8273 it whether any sub-expression couldn't be fully evaluated because
8274 it is pending recursion resolution. */
8276 static inline rtx
8277 vt_expand_var_loc_chain (variable var, bitmap regs, void *data, bool *pendrecp)
8279 struct expand_loc_callback_data *elcd
8280 = (struct expand_loc_callback_data *) data;
8281 location_chain loc, next;
8282 rtx result = NULL;
8283 int first_child, result_first_child, last_child;
8284 bool pending_recursion;
8285 rtx loc_from = NULL;
8286 struct elt_loc_list *cloc = NULL;
8287 expand_depth depth = { 0, 0 }, saved_depth = elcd->depth;
8288 int wanted_entryvals, found_entryvals = 0;
8290 /* Clear all backlinks pointing at this, so that we're not notified
8291 while we're active. */
8292 loc_exp_dep_clear (var);
8294 retry:
8295 if (var->onepart == ONEPART_VALUE)
8297 cselib_val *val = CSELIB_VAL_PTR (dv_as_value (var->dv));
8299 gcc_checking_assert (cselib_preserved_value_p (val));
8301 cloc = val->locs;
8304 first_child = result_first_child = last_child
8305 = elcd->expanding.length ();
8307 wanted_entryvals = found_entryvals;
8309 /* Attempt to expand each available location in turn. */
8310 for (next = loc = var->n_var_parts ? var->var_part[0].loc_chain : NULL;
8311 loc || cloc; loc = next)
8313 result_first_child = last_child;
8315 if (!loc)
8317 loc_from = cloc->loc;
8318 next = loc;
8319 cloc = cloc->next;
8320 if (unsuitable_loc (loc_from))
8321 continue;
8323 else
8325 loc_from = loc->loc;
8326 next = loc->next;
8329 gcc_checking_assert (!unsuitable_loc (loc_from));
8331 elcd->depth.complexity = elcd->depth.entryvals = 0;
8332 result = cselib_expand_value_rtx_cb (loc_from, regs, EXPR_DEPTH,
8333 vt_expand_loc_callback, data);
8334 last_child = elcd->expanding.length ();
8336 if (result)
8338 depth = elcd->depth;
8340 gcc_checking_assert (depth.complexity
8341 || result_first_child == last_child);
8343 if (last_child - result_first_child != 1)
8345 if (!depth.complexity && GET_CODE (result) == ENTRY_VALUE)
8346 depth.entryvals++;
8347 depth.complexity++;
8350 if (depth.complexity <= EXPR_USE_DEPTH)
8352 if (depth.entryvals <= wanted_entryvals)
8353 break;
8354 else if (!found_entryvals || depth.entryvals < found_entryvals)
8355 found_entryvals = depth.entryvals;
8358 result = NULL;
8361 /* Set it up in case we leave the loop. */
8362 depth.complexity = depth.entryvals = 0;
8363 loc_from = NULL;
8364 result_first_child = first_child;
8367 if (!loc_from && wanted_entryvals < found_entryvals)
8369 /* We found entries with ENTRY_VALUEs and skipped them. Since
8370 we could not find any expansions without ENTRY_VALUEs, but we
8371 found at least one with them, go back and get an entry with
8372 the minimum number ENTRY_VALUE count that we found. We could
8373 avoid looping, but since each sub-loc is already resolved,
8374 the re-expansion should be trivial. ??? Should we record all
8375 attempted locs as dependencies, so that we retry the
8376 expansion should any of them change, in the hope it can give
8377 us a new entry without an ENTRY_VALUE? */
8378 elcd->expanding.truncate (first_child);
8379 goto retry;
8382 /* Register all encountered dependencies as active. */
8383 pending_recursion = loc_exp_dep_set
8384 (var, result, elcd->expanding.address () + result_first_child,
8385 last_child - result_first_child, elcd->vars);
8387 elcd->expanding.truncate (first_child);
8389 /* Record where the expansion came from. */
8390 gcc_checking_assert (!result || !pending_recursion);
8391 VAR_LOC_FROM (var) = loc_from;
8392 VAR_LOC_DEPTH (var) = depth;
8394 gcc_checking_assert (!depth.complexity == !result);
8396 elcd->depth = update_depth (saved_depth, depth);
8398 /* Indicate whether any of the dependencies are pending recursion
8399 resolution. */
8400 if (pendrecp)
8401 *pendrecp = pending_recursion;
8403 if (!pendrecp || !pending_recursion)
8404 var->var_part[0].cur_loc = result;
8406 return result;
8409 /* Callback for cselib_expand_value, that looks for expressions
8410 holding the value in the var-tracking hash tables. Return X for
8411 standard processing, anything else is to be used as-is. */
8413 static rtx
8414 vt_expand_loc_callback (rtx x, bitmap regs,
8415 int max_depth ATTRIBUTE_UNUSED,
8416 void *data)
8418 struct expand_loc_callback_data *elcd
8419 = (struct expand_loc_callback_data *) data;
8420 decl_or_value dv;
8421 variable var;
8422 rtx result, subreg;
8423 bool pending_recursion = false;
8424 bool from_empty = false;
8426 switch (GET_CODE (x))
8428 case SUBREG:
8429 subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs,
8430 EXPR_DEPTH,
8431 vt_expand_loc_callback, data);
8433 if (!subreg)
8434 return NULL;
8436 result = simplify_gen_subreg (GET_MODE (x), subreg,
8437 GET_MODE (SUBREG_REG (x)),
8438 SUBREG_BYTE (x));
8440 /* Invalid SUBREGs are ok in debug info. ??? We could try
8441 alternate expansions for the VALUE as well. */
8442 if (!result)
8443 result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x));
8445 return result;
8447 case DEBUG_EXPR:
8448 case VALUE:
8449 dv = dv_from_rtx (x);
8450 break;
8452 default:
8453 return x;
8456 elcd->expanding.safe_push (x);
8458 /* Check that VALUE_RECURSED_INTO implies NO_LOC_P. */
8459 gcc_checking_assert (!VALUE_RECURSED_INTO (x) || NO_LOC_P (x));
8461 if (NO_LOC_P (x))
8463 gcc_checking_assert (VALUE_RECURSED_INTO (x) || !dv_changed_p (dv));
8464 return NULL;
8467 var = elcd->vars->find_with_hash (dv, dv_htab_hash (dv));
8469 if (!var)
8471 from_empty = true;
8472 var = variable_from_dropped (dv, INSERT);
8475 gcc_checking_assert (var);
8477 if (!dv_changed_p (dv))
8479 gcc_checking_assert (!NO_LOC_P (x));
8480 gcc_checking_assert (var->var_part[0].cur_loc);
8481 gcc_checking_assert (VAR_LOC_1PAUX (var));
8482 gcc_checking_assert (VAR_LOC_1PAUX (var)->depth.complexity);
8484 elcd->depth = update_depth (elcd->depth, VAR_LOC_1PAUX (var)->depth);
8486 return var->var_part[0].cur_loc;
8489 VALUE_RECURSED_INTO (x) = true;
8490 /* This is tentative, but it makes some tests simpler. */
8491 NO_LOC_P (x) = true;
8493 gcc_checking_assert (var->n_var_parts == 1 || from_empty);
8495 result = vt_expand_var_loc_chain (var, regs, data, &pending_recursion);
8497 if (pending_recursion)
8499 gcc_checking_assert (!result);
8500 elcd->pending.safe_push (x);
8502 else
8504 NO_LOC_P (x) = !result;
8505 VALUE_RECURSED_INTO (x) = false;
8506 set_dv_changed (dv, false);
8508 if (result)
8509 notify_dependents_of_resolved_value (var, elcd->vars);
8512 return result;
8515 /* While expanding variables, we may encounter recursion cycles
8516 because of mutual (possibly indirect) dependencies between two
8517 particular variables (or values), say A and B. If we're trying to
8518 expand A when we get to B, which in turn attempts to expand A, if
8519 we can't find any other expansion for B, we'll add B to this
8520 pending-recursion stack, and tentatively return NULL for its
8521 location. This tentative value will be used for any other
8522 occurrences of B, unless A gets some other location, in which case
8523 it will notify B that it is worth another try at computing a
8524 location for it, and it will use the location computed for A then.
8525 At the end of the expansion, the tentative NULL locations become
8526 final for all members of PENDING that didn't get a notification.
8527 This function performs this finalization of NULL locations. */
8529 static void
8530 resolve_expansions_pending_recursion (vec<rtx, va_heap> *pending)
8532 while (!pending->is_empty ())
8534 rtx x = pending->pop ();
8535 decl_or_value dv;
8537 if (!VALUE_RECURSED_INTO (x))
8538 continue;
8540 gcc_checking_assert (NO_LOC_P (x));
8541 VALUE_RECURSED_INTO (x) = false;
8542 dv = dv_from_rtx (x);
8543 gcc_checking_assert (dv_changed_p (dv));
8544 set_dv_changed (dv, false);
8548 /* Initialize expand_loc_callback_data D with variable hash table V.
8549 It must be a macro because of alloca (vec stack). */
8550 #define INIT_ELCD(d, v) \
8551 do \
8553 (d).vars = (v); \
8554 (d).depth.complexity = (d).depth.entryvals = 0; \
8556 while (0)
8557 /* Finalize expand_loc_callback_data D, resolved to location L. */
8558 #define FINI_ELCD(d, l) \
8559 do \
8561 resolve_expansions_pending_recursion (&(d).pending); \
8562 (d).pending.release (); \
8563 (d).expanding.release (); \
8565 if ((l) && MEM_P (l)) \
8566 (l) = targetm.delegitimize_address (l); \
8568 while (0)
8570 /* Expand VALUEs and DEBUG_EXPRs in LOC to a location, using the
8571 equivalences in VARS, updating their CUR_LOCs in the process. */
8573 static rtx
8574 vt_expand_loc (rtx loc, variable_table_type *vars)
8576 struct expand_loc_callback_data data;
8577 rtx result;
8579 if (!MAY_HAVE_DEBUG_INSNS)
8580 return loc;
8582 INIT_ELCD (data, vars);
8584 result = cselib_expand_value_rtx_cb (loc, scratch_regs, EXPR_DEPTH,
8585 vt_expand_loc_callback, &data);
8587 FINI_ELCD (data, result);
8589 return result;
8592 /* Expand the one-part VARiable to a location, using the equivalences
8593 in VARS, updating their CUR_LOCs in the process. */
8595 static rtx
8596 vt_expand_1pvar (variable var, variable_table_type *vars)
8598 struct expand_loc_callback_data data;
8599 rtx loc;
8601 gcc_checking_assert (var->onepart && var->n_var_parts == 1);
8603 if (!dv_changed_p (var->dv))
8604 return var->var_part[0].cur_loc;
8606 INIT_ELCD (data, vars);
8608 loc = vt_expand_var_loc_chain (var, scratch_regs, &data, NULL);
8610 gcc_checking_assert (data.expanding.is_empty ());
8612 FINI_ELCD (data, loc);
8614 return loc;
8617 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
8618 additional parameters: WHERE specifies whether the note shall be emitted
8619 before or after instruction INSN. */
8622 emit_note_insn_var_location (variable_def **varp, emit_note_data *data)
8624 variable var = *varp;
8625 rtx_insn *insn = data->insn;
8626 enum emit_note_where where = data->where;
8627 variable_table_type *vars = data->vars;
8628 rtx_note *note;
8629 rtx note_vl;
8630 int i, j, n_var_parts;
8631 bool complete;
8632 enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED;
8633 HOST_WIDE_INT last_limit;
8634 tree type_size_unit;
8635 HOST_WIDE_INT offsets[MAX_VAR_PARTS];
8636 rtx loc[MAX_VAR_PARTS];
8637 tree decl;
8638 location_chain lc;
8640 gcc_checking_assert (var->onepart == NOT_ONEPART
8641 || var->onepart == ONEPART_VDECL);
8643 decl = dv_as_decl (var->dv);
8645 complete = true;
8646 last_limit = 0;
8647 n_var_parts = 0;
8648 if (!var->onepart)
8649 for (i = 0; i < var->n_var_parts; i++)
8650 if (var->var_part[i].cur_loc == NULL && var->var_part[i].loc_chain)
8651 var->var_part[i].cur_loc = var->var_part[i].loc_chain->loc;
8652 for (i = 0; i < var->n_var_parts; i++)
8654 machine_mode mode, wider_mode;
8655 rtx loc2;
8656 HOST_WIDE_INT offset;
8658 if (i == 0 && var->onepart)
8660 gcc_checking_assert (var->n_var_parts == 1);
8661 offset = 0;
8662 initialized = VAR_INIT_STATUS_INITIALIZED;
8663 loc2 = vt_expand_1pvar (var, vars);
8665 else
8667 if (last_limit < VAR_PART_OFFSET (var, i))
8669 complete = false;
8670 break;
8672 else if (last_limit > VAR_PART_OFFSET (var, i))
8673 continue;
8674 offset = VAR_PART_OFFSET (var, i);
8675 loc2 = var->var_part[i].cur_loc;
8676 if (loc2 && GET_CODE (loc2) == MEM
8677 && GET_CODE (XEXP (loc2, 0)) == VALUE)
8679 rtx depval = XEXP (loc2, 0);
8681 loc2 = vt_expand_loc (loc2, vars);
8683 if (loc2)
8684 loc_exp_insert_dep (var, depval, vars);
8686 if (!loc2)
8688 complete = false;
8689 continue;
8691 gcc_checking_assert (GET_CODE (loc2) != VALUE);
8692 for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
8693 if (var->var_part[i].cur_loc == lc->loc)
8695 initialized = lc->init;
8696 break;
8698 gcc_assert (lc);
8701 offsets[n_var_parts] = offset;
8702 if (!loc2)
8704 complete = false;
8705 continue;
8707 loc[n_var_parts] = loc2;
8708 mode = GET_MODE (var->var_part[i].cur_loc);
8709 if (mode == VOIDmode && var->onepart)
8710 mode = DECL_MODE (decl);
8711 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
8713 /* Attempt to merge adjacent registers or memory. */
8714 wider_mode = GET_MODE_WIDER_MODE (mode);
8715 for (j = i + 1; j < var->n_var_parts; j++)
8716 if (last_limit <= VAR_PART_OFFSET (var, j))
8717 break;
8718 if (j < var->n_var_parts
8719 && wider_mode != VOIDmode
8720 && var->var_part[j].cur_loc
8721 && mode == GET_MODE (var->var_part[j].cur_loc)
8722 && (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts]))
8723 && last_limit == (var->onepart ? 0 : VAR_PART_OFFSET (var, j))
8724 && (loc2 = vt_expand_loc (var->var_part[j].cur_loc, vars))
8725 && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2))
8727 rtx new_loc = NULL;
8729 if (REG_P (loc[n_var_parts])
8730 && hard_regno_nregs[REGNO (loc[n_var_parts])][mode] * 2
8731 == hard_regno_nregs[REGNO (loc[n_var_parts])][wider_mode]
8732 && end_hard_regno (mode, REGNO (loc[n_var_parts]))
8733 == REGNO (loc2))
8735 if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN)
8736 new_loc = simplify_subreg (wider_mode, loc[n_var_parts],
8737 mode, 0);
8738 else if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
8739 new_loc = simplify_subreg (wider_mode, loc2, mode, 0);
8740 if (new_loc)
8742 if (!REG_P (new_loc)
8743 || REGNO (new_loc) != REGNO (loc[n_var_parts]))
8744 new_loc = NULL;
8745 else
8746 REG_ATTRS (new_loc) = REG_ATTRS (loc[n_var_parts]);
8749 else if (MEM_P (loc[n_var_parts])
8750 && GET_CODE (XEXP (loc2, 0)) == PLUS
8751 && REG_P (XEXP (XEXP (loc2, 0), 0))
8752 && CONST_INT_P (XEXP (XEXP (loc2, 0), 1)))
8754 if ((REG_P (XEXP (loc[n_var_parts], 0))
8755 && rtx_equal_p (XEXP (loc[n_var_parts], 0),
8756 XEXP (XEXP (loc2, 0), 0))
8757 && INTVAL (XEXP (XEXP (loc2, 0), 1))
8758 == GET_MODE_SIZE (mode))
8759 || (GET_CODE (XEXP (loc[n_var_parts], 0)) == PLUS
8760 && CONST_INT_P (XEXP (XEXP (loc[n_var_parts], 0), 1))
8761 && rtx_equal_p (XEXP (XEXP (loc[n_var_parts], 0), 0),
8762 XEXP (XEXP (loc2, 0), 0))
8763 && INTVAL (XEXP (XEXP (loc[n_var_parts], 0), 1))
8764 + GET_MODE_SIZE (mode)
8765 == INTVAL (XEXP (XEXP (loc2, 0), 1))))
8766 new_loc = adjust_address_nv (loc[n_var_parts],
8767 wider_mode, 0);
8770 if (new_loc)
8772 loc[n_var_parts] = new_loc;
8773 mode = wider_mode;
8774 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
8775 i = j;
8778 ++n_var_parts;
8780 type_size_unit = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8781 if ((unsigned HOST_WIDE_INT) last_limit < TREE_INT_CST_LOW (type_size_unit))
8782 complete = false;
8784 if (! flag_var_tracking_uninit)
8785 initialized = VAR_INIT_STATUS_INITIALIZED;
8787 note_vl = NULL_RTX;
8788 if (!complete)
8789 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, NULL_RTX, initialized);
8790 else if (n_var_parts == 1)
8792 rtx expr_list;
8794 if (offsets[0] || GET_CODE (loc[0]) == PARALLEL)
8795 expr_list = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
8796 else
8797 expr_list = loc[0];
8799 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, expr_list, initialized);
8801 else if (n_var_parts)
8803 rtx parallel;
8805 for (i = 0; i < n_var_parts; i++)
8806 loc[i]
8807 = gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i]));
8809 parallel = gen_rtx_PARALLEL (VOIDmode,
8810 gen_rtvec_v (n_var_parts, loc));
8811 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl,
8812 parallel, initialized);
8815 if (where != EMIT_NOTE_BEFORE_INSN)
8817 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8818 if (where == EMIT_NOTE_AFTER_CALL_INSN)
8819 NOTE_DURING_CALL_P (note) = true;
8821 else
8823 /* Make sure that the call related notes come first. */
8824 while (NEXT_INSN (insn)
8825 && NOTE_P (insn)
8826 && ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8827 && NOTE_DURING_CALL_P (insn))
8828 || NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
8829 insn = NEXT_INSN (insn);
8830 if (NOTE_P (insn)
8831 && ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8832 && NOTE_DURING_CALL_P (insn))
8833 || NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
8834 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8835 else
8836 note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
8838 NOTE_VAR_LOCATION (note) = note_vl;
8840 set_dv_changed (var->dv, false);
8841 gcc_assert (var->in_changed_variables);
8842 var->in_changed_variables = false;
8843 changed_variables->clear_slot (varp);
8845 /* Continue traversing the hash table. */
8846 return 1;
8849 /* While traversing changed_variables, push onto DATA (a stack of RTX
8850 values) entries that aren't user variables. */
8853 var_track_values_to_stack (variable_def **slot,
8854 vec<rtx, va_heap> *changed_values_stack)
8856 variable var = *slot;
8858 if (var->onepart == ONEPART_VALUE)
8859 changed_values_stack->safe_push (dv_as_value (var->dv));
8860 else if (var->onepart == ONEPART_DEXPR)
8861 changed_values_stack->safe_push (DECL_RTL_KNOWN_SET (dv_as_decl (var->dv)));
8863 return 1;
8866 /* Remove from changed_variables the entry whose DV corresponds to
8867 value or debug_expr VAL. */
8868 static void
8869 remove_value_from_changed_variables (rtx val)
8871 decl_or_value dv = dv_from_rtx (val);
8872 variable_def **slot;
8873 variable var;
8875 slot = changed_variables->find_slot_with_hash (dv, dv_htab_hash (dv),
8876 NO_INSERT);
8877 var = *slot;
8878 var->in_changed_variables = false;
8879 changed_variables->clear_slot (slot);
8882 /* If VAL (a value or debug_expr) has backlinks to variables actively
8883 dependent on it in HTAB or in CHANGED_VARIABLES, mark them as
8884 changed, adding to CHANGED_VALUES_STACK any dependencies that may
8885 have dependencies of their own to notify. */
8887 static void
8888 notify_dependents_of_changed_value (rtx val, variable_table_type *htab,
8889 vec<rtx, va_heap> *changed_values_stack)
8891 variable_def **slot;
8892 variable var;
8893 loc_exp_dep *led;
8894 decl_or_value dv = dv_from_rtx (val);
8896 slot = changed_variables->find_slot_with_hash (dv, dv_htab_hash (dv),
8897 NO_INSERT);
8898 if (!slot)
8899 slot = htab->find_slot_with_hash (dv, dv_htab_hash (dv), NO_INSERT);
8900 if (!slot)
8901 slot = dropped_values->find_slot_with_hash (dv, dv_htab_hash (dv),
8902 NO_INSERT);
8903 var = *slot;
8905 while ((led = VAR_LOC_DEP_LST (var)))
8907 decl_or_value ldv = led->dv;
8908 variable ivar;
8910 /* Deactivate and remove the backlink, as it was “used up”. It
8911 makes no sense to attempt to notify the same entity again:
8912 either it will be recomputed and re-register an active
8913 dependency, or it will still have the changed mark. */
8914 if (led->next)
8915 led->next->pprev = led->pprev;
8916 if (led->pprev)
8917 *led->pprev = led->next;
8918 led->next = NULL;
8919 led->pprev = NULL;
8921 if (dv_changed_p (ldv))
8922 continue;
8924 switch (dv_onepart_p (ldv))
8926 case ONEPART_VALUE:
8927 case ONEPART_DEXPR:
8928 set_dv_changed (ldv, true);
8929 changed_values_stack->safe_push (dv_as_rtx (ldv));
8930 break;
8932 case ONEPART_VDECL:
8933 ivar = htab->find_with_hash (ldv, dv_htab_hash (ldv));
8934 gcc_checking_assert (!VAR_LOC_DEP_LST (ivar));
8935 variable_was_changed (ivar, NULL);
8936 break;
8938 case NOT_ONEPART:
8939 delete led;
8940 ivar = htab->find_with_hash (ldv, dv_htab_hash (ldv));
8941 if (ivar)
8943 int i = ivar->n_var_parts;
8944 while (i--)
8946 rtx loc = ivar->var_part[i].cur_loc;
8948 if (loc && GET_CODE (loc) == MEM
8949 && XEXP (loc, 0) == val)
8951 variable_was_changed (ivar, NULL);
8952 break;
8956 break;
8958 default:
8959 gcc_unreachable ();
8964 /* Take out of changed_variables any entries that don't refer to use
8965 variables. Back-propagate change notifications from values and
8966 debug_exprs to their active dependencies in HTAB or in
8967 CHANGED_VARIABLES. */
8969 static void
8970 process_changed_values (variable_table_type *htab)
8972 int i, n;
8973 rtx val;
8974 auto_vec<rtx, 20> changed_values_stack;
8976 /* Move values from changed_variables to changed_values_stack. */
8977 changed_variables
8978 ->traverse <vec<rtx, va_heap>*, var_track_values_to_stack>
8979 (&changed_values_stack);
8981 /* Back-propagate change notifications in values while popping
8982 them from the stack. */
8983 for (n = i = changed_values_stack.length ();
8984 i > 0; i = changed_values_stack.length ())
8986 val = changed_values_stack.pop ();
8987 notify_dependents_of_changed_value (val, htab, &changed_values_stack);
8989 /* This condition will hold when visiting each of the entries
8990 originally in changed_variables. We can't remove them
8991 earlier because this could drop the backlinks before we got a
8992 chance to use them. */
8993 if (i == n)
8995 remove_value_from_changed_variables (val);
8996 n--;
9001 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
9002 CHANGED_VARIABLES and delete this chain. WHERE specifies whether
9003 the notes shall be emitted before of after instruction INSN. */
9005 static void
9006 emit_notes_for_changes (rtx_insn *insn, enum emit_note_where where,
9007 shared_hash vars)
9009 emit_note_data data;
9010 variable_table_type *htab = shared_hash_htab (vars);
9012 if (!changed_variables->elements ())
9013 return;
9015 if (MAY_HAVE_DEBUG_INSNS)
9016 process_changed_values (htab);
9018 data.insn = insn;
9019 data.where = where;
9020 data.vars = htab;
9022 changed_variables
9023 ->traverse <emit_note_data*, emit_note_insn_var_location> (&data);
9026 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
9027 same variable in hash table DATA or is not there at all. */
9030 emit_notes_for_differences_1 (variable_def **slot, variable_table_type *new_vars)
9032 variable old_var, new_var;
9034 old_var = *slot;
9035 new_var = new_vars->find_with_hash (old_var->dv, dv_htab_hash (old_var->dv));
9037 if (!new_var)
9039 /* Variable has disappeared. */
9040 variable empty_var = NULL;
9042 if (old_var->onepart == ONEPART_VALUE
9043 || old_var->onepart == ONEPART_DEXPR)
9045 empty_var = variable_from_dropped (old_var->dv, NO_INSERT);
9046 if (empty_var)
9048 gcc_checking_assert (!empty_var->in_changed_variables);
9049 if (!VAR_LOC_1PAUX (old_var))
9051 VAR_LOC_1PAUX (old_var) = VAR_LOC_1PAUX (empty_var);
9052 VAR_LOC_1PAUX (empty_var) = NULL;
9054 else
9055 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
9059 if (!empty_var)
9061 empty_var = onepart_pool (old_var->onepart).allocate ();
9062 empty_var->dv = old_var->dv;
9063 empty_var->refcount = 0;
9064 empty_var->n_var_parts = 0;
9065 empty_var->onepart = old_var->onepart;
9066 empty_var->in_changed_variables = false;
9069 if (empty_var->onepart)
9071 /* Propagate the auxiliary data to (ultimately)
9072 changed_variables. */
9073 empty_var->var_part[0].loc_chain = NULL;
9074 empty_var->var_part[0].cur_loc = NULL;
9075 VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (old_var);
9076 VAR_LOC_1PAUX (old_var) = NULL;
9078 variable_was_changed (empty_var, NULL);
9079 /* Continue traversing the hash table. */
9080 return 1;
9082 /* Update cur_loc and one-part auxiliary data, before new_var goes
9083 through variable_was_changed. */
9084 if (old_var != new_var && new_var->onepart)
9086 gcc_checking_assert (VAR_LOC_1PAUX (new_var) == NULL);
9087 VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (old_var);
9088 VAR_LOC_1PAUX (old_var) = NULL;
9089 new_var->var_part[0].cur_loc = old_var->var_part[0].cur_loc;
9091 if (variable_different_p (old_var, new_var))
9092 variable_was_changed (new_var, NULL);
9094 /* Continue traversing the hash table. */
9095 return 1;
9098 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
9099 table DATA. */
9102 emit_notes_for_differences_2 (variable_def **slot, variable_table_type *old_vars)
9104 variable old_var, new_var;
9106 new_var = *slot;
9107 old_var = old_vars->find_with_hash (new_var->dv, dv_htab_hash (new_var->dv));
9108 if (!old_var)
9110 int i;
9111 for (i = 0; i < new_var->n_var_parts; i++)
9112 new_var->var_part[i].cur_loc = NULL;
9113 variable_was_changed (new_var, NULL);
9116 /* Continue traversing the hash table. */
9117 return 1;
9120 /* Emit notes before INSN for differences between dataflow sets OLD_SET and
9121 NEW_SET. */
9123 static void
9124 emit_notes_for_differences (rtx_insn *insn, dataflow_set *old_set,
9125 dataflow_set *new_set)
9127 shared_hash_htab (old_set->vars)
9128 ->traverse <variable_table_type *, emit_notes_for_differences_1>
9129 (shared_hash_htab (new_set->vars));
9130 shared_hash_htab (new_set->vars)
9131 ->traverse <variable_table_type *, emit_notes_for_differences_2>
9132 (shared_hash_htab (old_set->vars));
9133 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars);
9136 /* Return the next insn after INSN that is not a NOTE_INSN_VAR_LOCATION. */
9138 static rtx_insn *
9139 next_non_note_insn_var_location (rtx_insn *insn)
9141 while (insn)
9143 insn = NEXT_INSN (insn);
9144 if (insn == 0
9145 || !NOTE_P (insn)
9146 || NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION)
9147 break;
9150 return insn;
9153 /* Emit the notes for changes of location parts in the basic block BB. */
9155 static void
9156 emit_notes_in_bb (basic_block bb, dataflow_set *set)
9158 unsigned int i;
9159 micro_operation *mo;
9161 dataflow_set_clear (set);
9162 dataflow_set_copy (set, &VTI (bb)->in);
9164 FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo)
9166 rtx_insn *insn = mo->insn;
9167 rtx_insn *next_insn = next_non_note_insn_var_location (insn);
9169 switch (mo->type)
9171 case MO_CALL:
9172 dataflow_set_clear_at_call (set);
9173 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars);
9175 rtx arguments = mo->u.loc, *p = &arguments;
9176 rtx_note *note;
9177 while (*p)
9179 XEXP (XEXP (*p, 0), 1)
9180 = vt_expand_loc (XEXP (XEXP (*p, 0), 1),
9181 shared_hash_htab (set->vars));
9182 /* If expansion is successful, keep it in the list. */
9183 if (XEXP (XEXP (*p, 0), 1))
9184 p = &XEXP (*p, 1);
9185 /* Otherwise, if the following item is data_value for it,
9186 drop it too too. */
9187 else if (XEXP (*p, 1)
9188 && REG_P (XEXP (XEXP (*p, 0), 0))
9189 && MEM_P (XEXP (XEXP (XEXP (*p, 1), 0), 0))
9190 && REG_P (XEXP (XEXP (XEXP (XEXP (*p, 1), 0), 0),
9192 && REGNO (XEXP (XEXP (*p, 0), 0))
9193 == REGNO (XEXP (XEXP (XEXP (XEXP (*p, 1), 0),
9194 0), 0)))
9195 *p = XEXP (XEXP (*p, 1), 1);
9196 /* Just drop this item. */
9197 else
9198 *p = XEXP (*p, 1);
9200 note = emit_note_after (NOTE_INSN_CALL_ARG_LOCATION, insn);
9201 NOTE_VAR_LOCATION (note) = arguments;
9203 break;
9205 case MO_USE:
9207 rtx loc = mo->u.loc;
9209 if (REG_P (loc))
9210 var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
9211 else
9212 var_mem_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
9214 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
9216 break;
9218 case MO_VAL_LOC:
9220 rtx loc = mo->u.loc;
9221 rtx val, vloc;
9222 tree var;
9224 if (GET_CODE (loc) == CONCAT)
9226 val = XEXP (loc, 0);
9227 vloc = XEXP (loc, 1);
9229 else
9231 val = NULL_RTX;
9232 vloc = loc;
9235 var = PAT_VAR_LOCATION_DECL (vloc);
9237 clobber_variable_part (set, NULL_RTX,
9238 dv_from_decl (var), 0, NULL_RTX);
9239 if (val)
9241 if (VAL_NEEDS_RESOLUTION (loc))
9242 val_resolve (set, val, PAT_VAR_LOCATION_LOC (vloc), insn);
9243 set_variable_part (set, val, dv_from_decl (var), 0,
9244 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
9245 INSERT);
9247 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
9248 set_variable_part (set, PAT_VAR_LOCATION_LOC (vloc),
9249 dv_from_decl (var), 0,
9250 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
9251 INSERT);
9253 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
9255 break;
9257 case MO_VAL_USE:
9259 rtx loc = mo->u.loc;
9260 rtx val, vloc, uloc;
9262 vloc = uloc = XEXP (loc, 1);
9263 val = XEXP (loc, 0);
9265 if (GET_CODE (val) == CONCAT)
9267 uloc = XEXP (val, 1);
9268 val = XEXP (val, 0);
9271 if (VAL_NEEDS_RESOLUTION (loc))
9272 val_resolve (set, val, vloc, insn);
9273 else
9274 val_store (set, val, uloc, insn, false);
9276 if (VAL_HOLDS_TRACK_EXPR (loc))
9278 if (GET_CODE (uloc) == REG)
9279 var_reg_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
9280 NULL);
9281 else if (GET_CODE (uloc) == MEM)
9282 var_mem_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
9283 NULL);
9286 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
9288 break;
9290 case MO_VAL_SET:
9292 rtx loc = mo->u.loc;
9293 rtx val, vloc, uloc;
9294 rtx dstv, srcv;
9296 vloc = loc;
9297 uloc = XEXP (vloc, 1);
9298 val = XEXP (vloc, 0);
9299 vloc = uloc;
9301 if (GET_CODE (uloc) == SET)
9303 dstv = SET_DEST (uloc);
9304 srcv = SET_SRC (uloc);
9306 else
9308 dstv = uloc;
9309 srcv = NULL;
9312 if (GET_CODE (val) == CONCAT)
9314 dstv = vloc = XEXP (val, 1);
9315 val = XEXP (val, 0);
9318 if (GET_CODE (vloc) == SET)
9320 srcv = SET_SRC (vloc);
9322 gcc_assert (val != srcv);
9323 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
9325 dstv = vloc = SET_DEST (vloc);
9327 if (VAL_NEEDS_RESOLUTION (loc))
9328 val_resolve (set, val, srcv, insn);
9330 else if (VAL_NEEDS_RESOLUTION (loc))
9332 gcc_assert (GET_CODE (uloc) == SET
9333 && GET_CODE (SET_SRC (uloc)) == REG);
9334 val_resolve (set, val, SET_SRC (uloc), insn);
9337 if (VAL_HOLDS_TRACK_EXPR (loc))
9339 if (VAL_EXPR_IS_CLOBBERED (loc))
9341 if (REG_P (uloc))
9342 var_reg_delete (set, uloc, true);
9343 else if (MEM_P (uloc))
9345 gcc_assert (MEM_P (dstv));
9346 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
9347 var_mem_delete (set, dstv, true);
9350 else
9352 bool copied_p = VAL_EXPR_IS_COPIED (loc);
9353 rtx src = NULL, dst = uloc;
9354 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
9356 if (GET_CODE (uloc) == SET)
9358 src = SET_SRC (uloc);
9359 dst = SET_DEST (uloc);
9362 if (copied_p)
9364 status = find_src_status (set, src);
9366 src = find_src_set_src (set, src);
9369 if (REG_P (dst))
9370 var_reg_delete_and_set (set, dst, !copied_p,
9371 status, srcv);
9372 else if (MEM_P (dst))
9374 gcc_assert (MEM_P (dstv));
9375 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
9376 var_mem_delete_and_set (set, dstv, !copied_p,
9377 status, srcv);
9381 else if (REG_P (uloc))
9382 var_regno_delete (set, REGNO (uloc));
9383 else if (MEM_P (uloc))
9385 gcc_checking_assert (GET_CODE (vloc) == MEM);
9386 gcc_checking_assert (vloc == dstv);
9387 if (vloc != dstv)
9388 clobber_overlapping_mems (set, vloc);
9391 val_store (set, val, dstv, insn, true);
9393 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9394 set->vars);
9396 break;
9398 case MO_SET:
9400 rtx loc = mo->u.loc;
9401 rtx set_src = NULL;
9403 if (GET_CODE (loc) == SET)
9405 set_src = SET_SRC (loc);
9406 loc = SET_DEST (loc);
9409 if (REG_P (loc))
9410 var_reg_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
9411 set_src);
9412 else
9413 var_mem_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
9414 set_src);
9416 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9417 set->vars);
9419 break;
9421 case MO_COPY:
9423 rtx loc = mo->u.loc;
9424 enum var_init_status src_status;
9425 rtx set_src = NULL;
9427 if (GET_CODE (loc) == SET)
9429 set_src = SET_SRC (loc);
9430 loc = SET_DEST (loc);
9433 src_status = find_src_status (set, set_src);
9434 set_src = find_src_set_src (set, set_src);
9436 if (REG_P (loc))
9437 var_reg_delete_and_set (set, loc, false, src_status, set_src);
9438 else
9439 var_mem_delete_and_set (set, loc, false, src_status, set_src);
9441 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9442 set->vars);
9444 break;
9446 case MO_USE_NO_VAR:
9448 rtx loc = mo->u.loc;
9450 if (REG_P (loc))
9451 var_reg_delete (set, loc, false);
9452 else
9453 var_mem_delete (set, loc, false);
9455 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
9457 break;
9459 case MO_CLOBBER:
9461 rtx loc = mo->u.loc;
9463 if (REG_P (loc))
9464 var_reg_delete (set, loc, true);
9465 else
9466 var_mem_delete (set, loc, true);
9468 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9469 set->vars);
9471 break;
9473 case MO_ADJUST:
9474 set->stack_adjust += mo->u.adjust;
9475 break;
9480 /* Emit notes for the whole function. */
9482 static void
9483 vt_emit_notes (void)
9485 basic_block bb;
9486 dataflow_set cur;
9488 gcc_assert (!changed_variables->elements ());
9490 /* Free memory occupied by the out hash tables, as they aren't used
9491 anymore. */
9492 FOR_EACH_BB_FN (bb, cfun)
9493 dataflow_set_clear (&VTI (bb)->out);
9495 /* Enable emitting notes by functions (mainly by set_variable_part and
9496 delete_variable_part). */
9497 emit_notes = true;
9499 if (MAY_HAVE_DEBUG_INSNS)
9501 dropped_values = new variable_table_type (cselib_get_next_uid () * 2);
9504 dataflow_set_init (&cur);
9506 FOR_EACH_BB_FN (bb, cfun)
9508 /* Emit the notes for changes of variable locations between two
9509 subsequent basic blocks. */
9510 emit_notes_for_differences (BB_HEAD (bb), &cur, &VTI (bb)->in);
9512 if (MAY_HAVE_DEBUG_INSNS)
9513 local_get_addr_cache = new hash_map<rtx, rtx>;
9515 /* Emit the notes for the changes in the basic block itself. */
9516 emit_notes_in_bb (bb, &cur);
9518 if (MAY_HAVE_DEBUG_INSNS)
9519 delete local_get_addr_cache;
9520 local_get_addr_cache = NULL;
9522 /* Free memory occupied by the in hash table, we won't need it
9523 again. */
9524 dataflow_set_clear (&VTI (bb)->in);
9526 #ifdef ENABLE_CHECKING
9527 shared_hash_htab (cur.vars)
9528 ->traverse <variable_table_type *, emit_notes_for_differences_1>
9529 (shared_hash_htab (empty_shared_hash));
9530 #endif
9531 dataflow_set_destroy (&cur);
9533 if (MAY_HAVE_DEBUG_INSNS)
9534 delete dropped_values;
9535 dropped_values = NULL;
9537 emit_notes = false;
9540 /* If there is a declaration and offset associated with register/memory RTL
9541 assign declaration to *DECLP and offset to *OFFSETP, and return true. */
9543 static bool
9544 vt_get_decl_and_offset (rtx rtl, tree *declp, HOST_WIDE_INT *offsetp)
9546 if (REG_P (rtl))
9548 if (REG_ATTRS (rtl))
9550 *declp = REG_EXPR (rtl);
9551 *offsetp = REG_OFFSET (rtl);
9552 return true;
9555 else if (GET_CODE (rtl) == PARALLEL)
9557 tree decl = NULL_TREE;
9558 HOST_WIDE_INT offset = MAX_VAR_PARTS;
9559 int len = XVECLEN (rtl, 0), i;
9561 for (i = 0; i < len; i++)
9563 rtx reg = XEXP (XVECEXP (rtl, 0, i), 0);
9564 if (!REG_P (reg) || !REG_ATTRS (reg))
9565 break;
9566 if (!decl)
9567 decl = REG_EXPR (reg);
9568 if (REG_EXPR (reg) != decl)
9569 break;
9570 if (REG_OFFSET (reg) < offset)
9571 offset = REG_OFFSET (reg);
9574 if (i == len)
9576 *declp = decl;
9577 *offsetp = offset;
9578 return true;
9581 else if (MEM_P (rtl))
9583 if (MEM_ATTRS (rtl))
9585 *declp = MEM_EXPR (rtl);
9586 *offsetp = INT_MEM_OFFSET (rtl);
9587 return true;
9590 return false;
9593 /* Record the value for the ENTRY_VALUE of RTL as a global equivalence
9594 of VAL. */
9596 static void
9597 record_entry_value (cselib_val *val, rtx rtl)
9599 rtx ev = gen_rtx_ENTRY_VALUE (GET_MODE (rtl));
9601 ENTRY_VALUE_EXP (ev) = rtl;
9603 cselib_add_permanent_equiv (val, ev, get_insns ());
9606 /* Insert function parameter PARM in IN and OUT sets of ENTRY_BLOCK. */
9608 static void
9609 vt_add_function_parameter (tree parm)
9611 rtx decl_rtl = DECL_RTL_IF_SET (parm);
9612 rtx incoming = DECL_INCOMING_RTL (parm);
9613 tree decl;
9614 machine_mode mode;
9615 HOST_WIDE_INT offset;
9616 dataflow_set *out;
9617 decl_or_value dv;
9619 if (TREE_CODE (parm) != PARM_DECL)
9620 return;
9622 if (!decl_rtl || !incoming)
9623 return;
9625 if (GET_MODE (decl_rtl) == BLKmode || GET_MODE (incoming) == BLKmode)
9626 return;
9628 /* If there is a DRAP register or a pseudo in internal_arg_pointer,
9629 rewrite the incoming location of parameters passed on the stack
9630 into MEMs based on the argument pointer, so that incoming doesn't
9631 depend on a pseudo. */
9632 if (MEM_P (incoming)
9633 && (XEXP (incoming, 0) == crtl->args.internal_arg_pointer
9634 || (GET_CODE (XEXP (incoming, 0)) == PLUS
9635 && XEXP (XEXP (incoming, 0), 0)
9636 == crtl->args.internal_arg_pointer
9637 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
9639 HOST_WIDE_INT off = -FIRST_PARM_OFFSET (current_function_decl);
9640 if (GET_CODE (XEXP (incoming, 0)) == PLUS)
9641 off += INTVAL (XEXP (XEXP (incoming, 0), 1));
9642 incoming
9643 = replace_equiv_address_nv (incoming,
9644 plus_constant (Pmode,
9645 arg_pointer_rtx, off));
9648 #ifdef HAVE_window_save
9649 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
9650 If the target machine has an explicit window save instruction, the
9651 actual entry value is the corresponding OUTGOING_REGNO instead. */
9652 if (HAVE_window_save && !crtl->uses_only_leaf_regs)
9654 if (REG_P (incoming)
9655 && HARD_REGISTER_P (incoming)
9656 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
9658 parm_reg_t p;
9659 p.incoming = incoming;
9660 incoming
9661 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
9662 OUTGOING_REGNO (REGNO (incoming)), 0);
9663 p.outgoing = incoming;
9664 vec_safe_push (windowed_parm_regs, p);
9666 else if (GET_CODE (incoming) == PARALLEL)
9668 rtx outgoing
9669 = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (XVECLEN (incoming, 0)));
9670 int i;
9672 for (i = 0; i < XVECLEN (incoming, 0); i++)
9674 rtx reg = XEXP (XVECEXP (incoming, 0, i), 0);
9675 parm_reg_t p;
9676 p.incoming = reg;
9677 reg = gen_rtx_REG_offset (reg, GET_MODE (reg),
9678 OUTGOING_REGNO (REGNO (reg)), 0);
9679 p.outgoing = reg;
9680 XVECEXP (outgoing, 0, i)
9681 = gen_rtx_EXPR_LIST (VOIDmode, reg,
9682 XEXP (XVECEXP (incoming, 0, i), 1));
9683 vec_safe_push (windowed_parm_regs, p);
9686 incoming = outgoing;
9688 else if (MEM_P (incoming)
9689 && REG_P (XEXP (incoming, 0))
9690 && HARD_REGISTER_P (XEXP (incoming, 0)))
9692 rtx reg = XEXP (incoming, 0);
9693 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
9695 parm_reg_t p;
9696 p.incoming = reg;
9697 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
9698 p.outgoing = reg;
9699 vec_safe_push (windowed_parm_regs, p);
9700 incoming = replace_equiv_address_nv (incoming, reg);
9704 #endif
9706 if (!vt_get_decl_and_offset (incoming, &decl, &offset))
9708 if (MEM_P (incoming))
9710 /* This means argument is passed by invisible reference. */
9711 offset = 0;
9712 decl = parm;
9714 else
9716 if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset))
9717 return;
9718 offset += byte_lowpart_offset (GET_MODE (incoming),
9719 GET_MODE (decl_rtl));
9723 if (!decl)
9724 return;
9726 if (parm != decl)
9728 /* If that DECL_RTL wasn't a pseudo that got spilled to
9729 memory, bail out. Otherwise, the spill slot sharing code
9730 will force the memory to reference spill_slot_decl (%sfp),
9731 so we don't match above. That's ok, the pseudo must have
9732 referenced the entire parameter, so just reset OFFSET. */
9733 if (decl != get_spill_slot_decl (false))
9734 return;
9735 offset = 0;
9738 if (!track_loc_p (incoming, parm, offset, false, &mode, &offset))
9739 return;
9741 out = &VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out;
9743 dv = dv_from_decl (parm);
9745 if (target_for_debug_bind (parm)
9746 /* We can't deal with these right now, because this kind of
9747 variable is single-part. ??? We could handle parallels
9748 that describe multiple locations for the same single
9749 value, but ATM we don't. */
9750 && GET_CODE (incoming) != PARALLEL)
9752 cselib_val *val;
9753 rtx lowpart;
9755 /* ??? We shouldn't ever hit this, but it may happen because
9756 arguments passed by invisible reference aren't dealt with
9757 above: incoming-rtl will have Pmode rather than the
9758 expected mode for the type. */
9759 if (offset)
9760 return;
9762 lowpart = var_lowpart (mode, incoming);
9763 if (!lowpart)
9764 return;
9766 val = cselib_lookup_from_insn (lowpart, mode, true,
9767 VOIDmode, get_insns ());
9769 /* ??? Float-typed values in memory are not handled by
9770 cselib. */
9771 if (val)
9773 preserve_value (val);
9774 set_variable_part (out, val->val_rtx, dv, offset,
9775 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9776 dv = dv_from_value (val->val_rtx);
9779 if (MEM_P (incoming))
9781 val = cselib_lookup_from_insn (XEXP (incoming, 0), mode, true,
9782 VOIDmode, get_insns ());
9783 if (val)
9785 preserve_value (val);
9786 incoming = replace_equiv_address_nv (incoming, val->val_rtx);
9791 if (REG_P (incoming))
9793 incoming = var_lowpart (mode, incoming);
9794 gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER);
9795 attrs_list_insert (&out->regs[REGNO (incoming)], dv, offset,
9796 incoming);
9797 set_variable_part (out, incoming, dv, offset,
9798 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9799 if (dv_is_value_p (dv))
9801 record_entry_value (CSELIB_VAL_PTR (dv_as_value (dv)), incoming);
9802 if (TREE_CODE (TREE_TYPE (parm)) == REFERENCE_TYPE
9803 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (parm))))
9805 machine_mode indmode
9806 = TYPE_MODE (TREE_TYPE (TREE_TYPE (parm)));
9807 rtx mem = gen_rtx_MEM (indmode, incoming);
9808 cselib_val *val = cselib_lookup_from_insn (mem, indmode, true,
9809 VOIDmode,
9810 get_insns ());
9811 if (val)
9813 preserve_value (val);
9814 record_entry_value (val, mem);
9815 set_variable_part (out, mem, dv_from_value (val->val_rtx), 0,
9816 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9821 else if (GET_CODE (incoming) == PARALLEL && !dv_onepart_p (dv))
9823 int i;
9825 for (i = 0; i < XVECLEN (incoming, 0); i++)
9827 rtx reg = XEXP (XVECEXP (incoming, 0, i), 0);
9828 offset = REG_OFFSET (reg);
9829 gcc_assert (REGNO (reg) < FIRST_PSEUDO_REGISTER);
9830 attrs_list_insert (&out->regs[REGNO (reg)], dv, offset, reg);
9831 set_variable_part (out, reg, dv, offset,
9832 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9835 else if (MEM_P (incoming))
9837 incoming = var_lowpart (mode, incoming);
9838 set_variable_part (out, incoming, dv, offset,
9839 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9843 /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
9845 static void
9846 vt_add_function_parameters (void)
9848 tree parm;
9850 for (parm = DECL_ARGUMENTS (current_function_decl);
9851 parm; parm = DECL_CHAIN (parm))
9852 if (!POINTER_BOUNDS_P (parm))
9853 vt_add_function_parameter (parm);
9855 if (DECL_HAS_VALUE_EXPR_P (DECL_RESULT (current_function_decl)))
9857 tree vexpr = DECL_VALUE_EXPR (DECL_RESULT (current_function_decl));
9859 if (TREE_CODE (vexpr) == INDIRECT_REF)
9860 vexpr = TREE_OPERAND (vexpr, 0);
9862 if (TREE_CODE (vexpr) == PARM_DECL
9863 && DECL_ARTIFICIAL (vexpr)
9864 && !DECL_IGNORED_P (vexpr)
9865 && DECL_NAMELESS (vexpr))
9866 vt_add_function_parameter (vexpr);
9870 /* Initialize cfa_base_rtx, create a preserved VALUE for it and
9871 ensure it isn't flushed during cselib_reset_table.
9872 Can be called only if frame_pointer_rtx resp. arg_pointer_rtx
9873 has been eliminated. */
9875 static void
9876 vt_init_cfa_base (void)
9878 cselib_val *val;
9880 #ifdef FRAME_POINTER_CFA_OFFSET
9881 cfa_base_rtx = frame_pointer_rtx;
9882 cfa_base_offset = -FRAME_POINTER_CFA_OFFSET (current_function_decl);
9883 #else
9884 cfa_base_rtx = arg_pointer_rtx;
9885 cfa_base_offset = -ARG_POINTER_CFA_OFFSET (current_function_decl);
9886 #endif
9887 if (cfa_base_rtx == hard_frame_pointer_rtx
9888 || !fixed_regs[REGNO (cfa_base_rtx)])
9890 cfa_base_rtx = NULL_RTX;
9891 return;
9893 if (!MAY_HAVE_DEBUG_INSNS)
9894 return;
9896 /* Tell alias analysis that cfa_base_rtx should share
9897 find_base_term value with stack pointer or hard frame pointer. */
9898 if (!frame_pointer_needed)
9899 vt_equate_reg_base_value (cfa_base_rtx, stack_pointer_rtx);
9900 else if (!crtl->stack_realign_tried)
9901 vt_equate_reg_base_value (cfa_base_rtx, hard_frame_pointer_rtx);
9903 val = cselib_lookup_from_insn (cfa_base_rtx, GET_MODE (cfa_base_rtx), 1,
9904 VOIDmode, get_insns ());
9905 preserve_value (val);
9906 cselib_preserve_cfa_base_value (val, REGNO (cfa_base_rtx));
9909 /* Allocate and initialize the data structures for variable tracking
9910 and parse the RTL to get the micro operations. */
9912 static bool
9913 vt_initialize (void)
9915 basic_block bb;
9916 HOST_WIDE_INT fp_cfa_offset = -1;
9918 alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def));
9920 empty_shared_hash = new shared_hash_def;
9921 empty_shared_hash->refcount = 1;
9922 empty_shared_hash->htab = new variable_table_type (1);
9923 changed_variables = new variable_table_type (10);
9925 /* Init the IN and OUT sets. */
9926 FOR_ALL_BB_FN (bb, cfun)
9928 VTI (bb)->visited = false;
9929 VTI (bb)->flooded = false;
9930 dataflow_set_init (&VTI (bb)->in);
9931 dataflow_set_init (&VTI (bb)->out);
9932 VTI (bb)->permp = NULL;
9935 if (MAY_HAVE_DEBUG_INSNS)
9937 cselib_init (CSELIB_RECORD_MEMORY | CSELIB_PRESERVE_CONSTANTS);
9938 scratch_regs = BITMAP_ALLOC (NULL);
9939 preserved_values.create (256);
9940 global_get_addr_cache = new hash_map<rtx, rtx>;
9942 else
9944 scratch_regs = NULL;
9945 global_get_addr_cache = NULL;
9948 if (MAY_HAVE_DEBUG_INSNS)
9950 rtx reg, expr;
9951 int ofst;
9952 cselib_val *val;
9954 #ifdef FRAME_POINTER_CFA_OFFSET
9955 reg = frame_pointer_rtx;
9956 ofst = FRAME_POINTER_CFA_OFFSET (current_function_decl);
9957 #else
9958 reg = arg_pointer_rtx;
9959 ofst = ARG_POINTER_CFA_OFFSET (current_function_decl);
9960 #endif
9962 ofst -= INCOMING_FRAME_SP_OFFSET;
9964 val = cselib_lookup_from_insn (reg, GET_MODE (reg), 1,
9965 VOIDmode, get_insns ());
9966 preserve_value (val);
9967 if (reg != hard_frame_pointer_rtx && fixed_regs[REGNO (reg)])
9968 cselib_preserve_cfa_base_value (val, REGNO (reg));
9969 expr = plus_constant (GET_MODE (stack_pointer_rtx),
9970 stack_pointer_rtx, -ofst);
9971 cselib_add_permanent_equiv (val, expr, get_insns ());
9973 if (ofst)
9975 val = cselib_lookup_from_insn (stack_pointer_rtx,
9976 GET_MODE (stack_pointer_rtx), 1,
9977 VOIDmode, get_insns ());
9978 preserve_value (val);
9979 expr = plus_constant (GET_MODE (reg), reg, ofst);
9980 cselib_add_permanent_equiv (val, expr, get_insns ());
9984 /* In order to factor out the adjustments made to the stack pointer or to
9985 the hard frame pointer and thus be able to use DW_OP_fbreg operations
9986 instead of individual location lists, we're going to rewrite MEMs based
9987 on them into MEMs based on the CFA by de-eliminating stack_pointer_rtx
9988 or hard_frame_pointer_rtx to the virtual CFA pointer frame_pointer_rtx
9989 resp. arg_pointer_rtx. We can do this either when there is no frame
9990 pointer in the function and stack adjustments are consistent for all
9991 basic blocks or when there is a frame pointer and no stack realignment.
9992 But we first have to check that frame_pointer_rtx resp. arg_pointer_rtx
9993 has been eliminated. */
9994 if (!frame_pointer_needed)
9996 rtx reg, elim;
9998 if (!vt_stack_adjustments ())
9999 return false;
10001 #ifdef FRAME_POINTER_CFA_OFFSET
10002 reg = frame_pointer_rtx;
10003 #else
10004 reg = arg_pointer_rtx;
10005 #endif
10006 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
10007 if (elim != reg)
10009 if (GET_CODE (elim) == PLUS)
10010 elim = XEXP (elim, 0);
10011 if (elim == stack_pointer_rtx)
10012 vt_init_cfa_base ();
10015 else if (!crtl->stack_realign_tried)
10017 rtx reg, elim;
10019 #ifdef FRAME_POINTER_CFA_OFFSET
10020 reg = frame_pointer_rtx;
10021 fp_cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
10022 #else
10023 reg = arg_pointer_rtx;
10024 fp_cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
10025 #endif
10026 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
10027 if (elim != reg)
10029 if (GET_CODE (elim) == PLUS)
10031 fp_cfa_offset -= INTVAL (XEXP (elim, 1));
10032 elim = XEXP (elim, 0);
10034 if (elim != hard_frame_pointer_rtx)
10035 fp_cfa_offset = -1;
10037 else
10038 fp_cfa_offset = -1;
10041 /* If the stack is realigned and a DRAP register is used, we're going to
10042 rewrite MEMs based on it representing incoming locations of parameters
10043 passed on the stack into MEMs based on the argument pointer. Although
10044 we aren't going to rewrite other MEMs, we still need to initialize the
10045 virtual CFA pointer in order to ensure that the argument pointer will
10046 be seen as a constant throughout the function.
10048 ??? This doesn't work if FRAME_POINTER_CFA_OFFSET is defined. */
10049 else if (stack_realign_drap)
10051 rtx reg, elim;
10053 #ifdef FRAME_POINTER_CFA_OFFSET
10054 reg = frame_pointer_rtx;
10055 #else
10056 reg = arg_pointer_rtx;
10057 #endif
10058 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
10059 if (elim != reg)
10061 if (GET_CODE (elim) == PLUS)
10062 elim = XEXP (elim, 0);
10063 if (elim == hard_frame_pointer_rtx)
10064 vt_init_cfa_base ();
10068 hard_frame_pointer_adjustment = -1;
10070 vt_add_function_parameters ();
10072 FOR_EACH_BB_FN (bb, cfun)
10074 rtx_insn *insn;
10075 HOST_WIDE_INT pre, post = 0;
10076 basic_block first_bb, last_bb;
10078 if (MAY_HAVE_DEBUG_INSNS)
10080 cselib_record_sets_hook = add_with_sets;
10081 if (dump_file && (dump_flags & TDF_DETAILS))
10082 fprintf (dump_file, "first value: %i\n",
10083 cselib_get_next_uid ());
10086 first_bb = bb;
10087 for (;;)
10089 edge e;
10090 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
10091 || ! single_pred_p (bb->next_bb))
10092 break;
10093 e = find_edge (bb, bb->next_bb);
10094 if (! e || (e->flags & EDGE_FALLTHRU) == 0)
10095 break;
10096 bb = bb->next_bb;
10098 last_bb = bb;
10100 /* Add the micro-operations to the vector. */
10101 FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb)
10103 HOST_WIDE_INT offset = VTI (bb)->out.stack_adjust;
10104 VTI (bb)->out.stack_adjust = VTI (bb)->in.stack_adjust;
10105 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
10106 insn = NEXT_INSN (insn))
10108 if (INSN_P (insn))
10110 if (!frame_pointer_needed)
10112 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
10113 if (pre)
10115 micro_operation mo;
10116 mo.type = MO_ADJUST;
10117 mo.u.adjust = pre;
10118 mo.insn = insn;
10119 if (dump_file && (dump_flags & TDF_DETAILS))
10120 log_op_type (PATTERN (insn), bb, insn,
10121 MO_ADJUST, dump_file);
10122 VTI (bb)->mos.safe_push (mo);
10123 VTI (bb)->out.stack_adjust += pre;
10127 cselib_hook_called = false;
10128 adjust_insn (bb, insn);
10129 if (MAY_HAVE_DEBUG_INSNS)
10131 if (CALL_P (insn))
10132 prepare_call_arguments (bb, insn);
10133 cselib_process_insn (insn);
10134 if (dump_file && (dump_flags & TDF_DETAILS))
10136 print_rtl_single (dump_file, insn);
10137 dump_cselib_table (dump_file);
10140 if (!cselib_hook_called)
10141 add_with_sets (insn, 0, 0);
10142 cancel_changes (0);
10144 if (!frame_pointer_needed && post)
10146 micro_operation mo;
10147 mo.type = MO_ADJUST;
10148 mo.u.adjust = post;
10149 mo.insn = insn;
10150 if (dump_file && (dump_flags & TDF_DETAILS))
10151 log_op_type (PATTERN (insn), bb, insn,
10152 MO_ADJUST, dump_file);
10153 VTI (bb)->mos.safe_push (mo);
10154 VTI (bb)->out.stack_adjust += post;
10157 if (fp_cfa_offset != -1
10158 && hard_frame_pointer_adjustment == -1
10159 && fp_setter_insn (insn))
10161 vt_init_cfa_base ();
10162 hard_frame_pointer_adjustment = fp_cfa_offset;
10163 /* Disassociate sp from fp now. */
10164 if (MAY_HAVE_DEBUG_INSNS)
10166 cselib_val *v;
10167 cselib_invalidate_rtx (stack_pointer_rtx);
10168 v = cselib_lookup (stack_pointer_rtx, Pmode, 1,
10169 VOIDmode);
10170 if (v && !cselib_preserved_value_p (v))
10172 cselib_set_value_sp_based (v);
10173 preserve_value (v);
10179 gcc_assert (offset == VTI (bb)->out.stack_adjust);
10182 bb = last_bb;
10184 if (MAY_HAVE_DEBUG_INSNS)
10186 cselib_preserve_only_values ();
10187 cselib_reset_table (cselib_get_next_uid ());
10188 cselib_record_sets_hook = NULL;
10192 hard_frame_pointer_adjustment = -1;
10193 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->flooded = true;
10194 cfa_base_rtx = NULL_RTX;
10195 return true;
10198 /* This is *not* reset after each function. It gives each
10199 NOTE_INSN_DELETED_DEBUG_LABEL in the entire compilation
10200 a unique label number. */
10202 static int debug_label_num = 1;
10204 /* Get rid of all debug insns from the insn stream. */
10206 static void
10207 delete_debug_insns (void)
10209 basic_block bb;
10210 rtx_insn *insn, *next;
10212 if (!MAY_HAVE_DEBUG_INSNS)
10213 return;
10215 FOR_EACH_BB_FN (bb, cfun)
10217 FOR_BB_INSNS_SAFE (bb, insn, next)
10218 if (DEBUG_INSN_P (insn))
10220 tree decl = INSN_VAR_LOCATION_DECL (insn);
10221 if (TREE_CODE (decl) == LABEL_DECL
10222 && DECL_NAME (decl)
10223 && !DECL_RTL_SET_P (decl))
10225 PUT_CODE (insn, NOTE);
10226 NOTE_KIND (insn) = NOTE_INSN_DELETED_DEBUG_LABEL;
10227 NOTE_DELETED_LABEL_NAME (insn)
10228 = IDENTIFIER_POINTER (DECL_NAME (decl));
10229 SET_DECL_RTL (decl, insn);
10230 CODE_LABEL_NUMBER (insn) = debug_label_num++;
10232 else
10233 delete_insn (insn);
10238 /* Run a fast, BB-local only version of var tracking, to take care of
10239 information that we don't do global analysis on, such that not all
10240 information is lost. If SKIPPED holds, we're skipping the global
10241 pass entirely, so we should try to use information it would have
10242 handled as well.. */
10244 static void
10245 vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED)
10247 /* ??? Just skip it all for now. */
10248 delete_debug_insns ();
10251 /* Free the data structures needed for variable tracking. */
10253 static void
10254 vt_finalize (void)
10256 basic_block bb;
10258 FOR_EACH_BB_FN (bb, cfun)
10260 VTI (bb)->mos.release ();
10263 FOR_ALL_BB_FN (bb, cfun)
10265 dataflow_set_destroy (&VTI (bb)->in);
10266 dataflow_set_destroy (&VTI (bb)->out);
10267 if (VTI (bb)->permp)
10269 dataflow_set_destroy (VTI (bb)->permp);
10270 XDELETE (VTI (bb)->permp);
10273 free_aux_for_blocks ();
10274 delete empty_shared_hash->htab;
10275 empty_shared_hash->htab = NULL;
10276 delete changed_variables;
10277 changed_variables = NULL;
10278 attrs_def::pool.release ();
10279 var_pool.release ();
10280 location_chain_def::pool.release ();
10281 shared_hash_def::pool.release ();
10283 if (MAY_HAVE_DEBUG_INSNS)
10285 if (global_get_addr_cache)
10286 delete global_get_addr_cache;
10287 global_get_addr_cache = NULL;
10288 loc_exp_dep::pool.release ();
10289 valvar_pool.release ();
10290 preserved_values.release ();
10291 cselib_finish ();
10292 BITMAP_FREE (scratch_regs);
10293 scratch_regs = NULL;
10296 #ifdef HAVE_window_save
10297 vec_free (windowed_parm_regs);
10298 #endif
10300 if (vui_vec)
10301 XDELETEVEC (vui_vec);
10302 vui_vec = NULL;
10303 vui_allocated = 0;
10306 /* The entry point to variable tracking pass. */
10308 static inline unsigned int
10309 variable_tracking_main_1 (void)
10311 bool success;
10313 if (flag_var_tracking_assignments < 0
10314 /* Var-tracking right now assumes the IR doesn't contain
10315 any pseudos at this point. */
10316 || targetm.no_register_allocation)
10318 delete_debug_insns ();
10319 return 0;
10322 if (n_basic_blocks_for_fn (cfun) > 500 &&
10323 n_edges_for_fn (cfun) / n_basic_blocks_for_fn (cfun) >= 20)
10325 vt_debug_insns_local (true);
10326 return 0;
10329 mark_dfs_back_edges ();
10330 if (!vt_initialize ())
10332 vt_finalize ();
10333 vt_debug_insns_local (true);
10334 return 0;
10337 success = vt_find_locations ();
10339 if (!success && flag_var_tracking_assignments > 0)
10341 vt_finalize ();
10343 delete_debug_insns ();
10345 /* This is later restored by our caller. */
10346 flag_var_tracking_assignments = 0;
10348 success = vt_initialize ();
10349 gcc_assert (success);
10351 success = vt_find_locations ();
10354 if (!success)
10356 vt_finalize ();
10357 vt_debug_insns_local (false);
10358 return 0;
10361 if (dump_file && (dump_flags & TDF_DETAILS))
10363 dump_dataflow_sets ();
10364 dump_reg_info (dump_file);
10365 dump_flow_info (dump_file, dump_flags);
10368 timevar_push (TV_VAR_TRACKING_EMIT);
10369 vt_emit_notes ();
10370 timevar_pop (TV_VAR_TRACKING_EMIT);
10372 vt_finalize ();
10373 vt_debug_insns_local (false);
10374 return 0;
10377 unsigned int
10378 variable_tracking_main (void)
10380 unsigned int ret;
10381 int save = flag_var_tracking_assignments;
10383 ret = variable_tracking_main_1 ();
10385 flag_var_tracking_assignments = save;
10387 return ret;
10390 namespace {
10392 const pass_data pass_data_variable_tracking =
10394 RTL_PASS, /* type */
10395 "vartrack", /* name */
10396 OPTGROUP_NONE, /* optinfo_flags */
10397 TV_VAR_TRACKING, /* tv_id */
10398 0, /* properties_required */
10399 0, /* properties_provided */
10400 0, /* properties_destroyed */
10401 0, /* todo_flags_start */
10402 0, /* todo_flags_finish */
10405 class pass_variable_tracking : public rtl_opt_pass
10407 public:
10408 pass_variable_tracking (gcc::context *ctxt)
10409 : rtl_opt_pass (pass_data_variable_tracking, ctxt)
10412 /* opt_pass methods: */
10413 virtual bool gate (function *)
10415 return (flag_var_tracking && !targetm.delay_vartrack);
10418 virtual unsigned int execute (function *)
10420 return variable_tracking_main ();
10423 }; // class pass_variable_tracking
10425 } // anon namespace
10427 rtl_opt_pass *
10428 make_pass_variable_tracking (gcc::context *ctxt)
10430 return new pass_variable_tracking (ctxt);