/cp
[official-gcc.git] / gcc / var-tracking.c
blobb5b0cb6a3bdbb0d623b4a8c769edb4734401490b
1 /* Variable tracking routines for the GNU compiler.
2 Copyright (C) 2002-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the variable tracking pass. It computes where
21 variables are located (which registers or where in memory) at each position
22 in instruction stream and emits notes describing the locations.
23 Debug information (DWARF2 location lists) is finally generated from
24 these notes.
25 With this debug information, it is possible to show variables
26 even when debugging optimized code.
28 How does the variable tracking pass work?
30 First, it scans RTL code for uses, stores and clobbers (register/memory
31 references in instructions), for call insns and for stack adjustments
32 separately for each basic block and saves them to an array of micro
33 operations.
34 The micro operations of one instruction are ordered so that
35 pre-modifying stack adjustment < use < use with no var < call insn <
36 < clobber < set < post-modifying stack adjustment
38 Then, a forward dataflow analysis is performed to find out how locations
39 of variables change through code and to propagate the variable locations
40 along control flow graph.
41 The IN set for basic block BB is computed as a union of OUT sets of BB's
42 predecessors, the OUT set for BB is copied from the IN set for BB and
43 is changed according to micro operations in BB.
45 The IN and OUT sets for basic blocks consist of a current stack adjustment
46 (used for adjusting offset of variables addressed using stack pointer),
47 the table of structures describing the locations of parts of a variable
48 and for each physical register a linked list for each physical register.
49 The linked list is a list of variable parts stored in the register,
50 i.e. it is a list of triplets (reg, decl, offset) where decl is
51 REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
52 effective deleting appropriate variable parts when we set or clobber the
53 register.
55 There may be more than one variable part in a register. The linked lists
56 should be pretty short so it is a good data structure here.
57 For example in the following code, register allocator may assign same
58 register to variables A and B, and both of them are stored in the same
59 register in CODE:
61 if (cond)
62 set A;
63 else
64 set B;
65 CODE;
66 if (cond)
67 use A;
68 else
69 use B;
71 Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
72 are emitted to appropriate positions in RTL code. Each such a note describes
73 the location of one variable at the point in instruction stream where the
74 note is. There is no need to emit a note for each variable before each
75 instruction, we only emit these notes where the location of variable changes
76 (this means that we also emit notes for changes between the OUT set of the
77 previous block and the IN set of the current block).
79 The notes consist of two parts:
80 1. the declaration (from REG_EXPR or MEM_EXPR)
81 2. the location of a variable - it is either a simple register/memory
82 reference (for simple variables, for example int),
83 or a parallel of register/memory references (for a large variables
84 which consist of several parts, for example long long).
88 #include "config.h"
89 #include "system.h"
90 #include "coretypes.h"
91 #include "backend.h"
92 #include "cfghooks.h"
93 #include "rtl.h"
94 #include "alias.h"
95 #include "tree.h"
96 #include "varasm.h"
97 #include "stor-layout.h"
98 #include "cfgrtl.h"
99 #include "cfganal.h"
100 #include "tm_p.h"
101 #include "flags.h"
102 #include "insn-config.h"
103 #include "reload.h"
104 #include "alloc-pool.h"
105 #include "regs.h"
106 #include "expmed.h"
107 #include "dojump.h"
108 #include "explow.h"
109 #include "calls.h"
110 #include "emit-rtl.h"
111 #include "stmt.h"
112 #include "expr.h"
113 #include "tree-pass.h"
114 #include "tree-dfa.h"
115 #include "tree-ssa.h"
116 #include "cselib.h"
117 #include "target.h"
118 #include "params.h"
119 #include "diagnostic.h"
120 #include "tree-pretty-print.h"
121 #include "recog.h"
122 #include "rtl-iter.h"
123 #include "fibonacci_heap.h"
125 typedef fibonacci_heap <long, basic_block_def> bb_heap_t;
126 typedef fibonacci_node <long, basic_block_def> bb_heap_node_t;
128 /* var-tracking.c assumes that tree code with the same value as VALUE rtx code
129 has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
130 Currently the value is the same as IDENTIFIER_NODE, which has such
131 a property. If this compile time assertion ever fails, make sure that
132 the new tree code that equals (int) VALUE has the same property. */
133 extern char check_value_val[(int) VALUE == (int) IDENTIFIER_NODE ? 1 : -1];
135 /* Type of micro operation. */
136 enum micro_operation_type
138 MO_USE, /* Use location (REG or MEM). */
139 MO_USE_NO_VAR,/* Use location which is not associated with a variable
140 or the variable is not trackable. */
141 MO_VAL_USE, /* Use location which is associated with a value. */
142 MO_VAL_LOC, /* Use location which appears in a debug insn. */
143 MO_VAL_SET, /* Set location associated with a value. */
144 MO_SET, /* Set location. */
145 MO_COPY, /* Copy the same portion of a variable from one
146 location to another. */
147 MO_CLOBBER, /* Clobber location. */
148 MO_CALL, /* Call insn. */
149 MO_ADJUST /* Adjust stack pointer. */
153 static const char * const ATTRIBUTE_UNUSED
154 micro_operation_type_name[] = {
155 "MO_USE",
156 "MO_USE_NO_VAR",
157 "MO_VAL_USE",
158 "MO_VAL_LOC",
159 "MO_VAL_SET",
160 "MO_SET",
161 "MO_COPY",
162 "MO_CLOBBER",
163 "MO_CALL",
164 "MO_ADJUST"
167 /* Where shall the note be emitted? BEFORE or AFTER the instruction.
168 Notes emitted as AFTER_CALL are to take effect during the call,
169 rather than after the call. */
170 enum emit_note_where
172 EMIT_NOTE_BEFORE_INSN,
173 EMIT_NOTE_AFTER_INSN,
174 EMIT_NOTE_AFTER_CALL_INSN
177 /* Structure holding information about micro operation. */
178 typedef struct micro_operation_def
180 /* Type of micro operation. */
181 enum micro_operation_type type;
183 /* The instruction which the micro operation is in, for MO_USE,
184 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
185 instruction or note in the original flow (before any var-tracking
186 notes are inserted, to simplify emission of notes), for MO_SET
187 and MO_CLOBBER. */
188 rtx_insn *insn;
190 union {
191 /* Location. For MO_SET and MO_COPY, this is the SET that
192 performs the assignment, if known, otherwise it is the target
193 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
194 CONCAT of the VALUE and the LOC associated with it. For
195 MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
196 associated with it. */
197 rtx loc;
199 /* Stack adjustment. */
200 HOST_WIDE_INT adjust;
201 } u;
202 } micro_operation;
205 /* A declaration of a variable, or an RTL value being handled like a
206 declaration. */
207 typedef void *decl_or_value;
209 /* Return true if a decl_or_value DV is a DECL or NULL. */
210 static inline bool
211 dv_is_decl_p (decl_or_value dv)
213 return !dv || (int) TREE_CODE ((tree) dv) != (int) VALUE;
216 /* Return true if a decl_or_value is a VALUE rtl. */
217 static inline bool
218 dv_is_value_p (decl_or_value dv)
220 return dv && !dv_is_decl_p (dv);
223 /* Return the decl in the decl_or_value. */
224 static inline tree
225 dv_as_decl (decl_or_value dv)
227 gcc_checking_assert (dv_is_decl_p (dv));
228 return (tree) dv;
231 /* Return the value in the decl_or_value. */
232 static inline rtx
233 dv_as_value (decl_or_value dv)
235 gcc_checking_assert (dv_is_value_p (dv));
236 return (rtx)dv;
239 /* Return the opaque pointer in the decl_or_value. */
240 static inline void *
241 dv_as_opaque (decl_or_value dv)
243 return dv;
247 /* Description of location of a part of a variable. The content of a physical
248 register is described by a chain of these structures.
249 The chains are pretty short (usually 1 or 2 elements) and thus
250 chain is the best data structure. */
251 typedef struct attrs_def
253 /* Pointer to next member of the list. */
254 struct attrs_def *next;
256 /* The rtx of register. */
257 rtx loc;
259 /* The declaration corresponding to LOC. */
260 decl_or_value dv;
262 /* Offset from start of DECL. */
263 HOST_WIDE_INT offset;
265 /* Pool allocation new operator. */
266 inline void *operator new (size_t)
268 return pool.allocate ();
271 /* Delete operator utilizing pool allocation. */
272 inline void operator delete (void *ptr)
274 pool.remove ((attrs_def *) ptr);
277 /* Memory allocation pool. */
278 static pool_allocator<attrs_def> pool;
279 } *attrs;
281 /* Structure for chaining the locations. */
282 typedef struct location_chain_def
284 /* Next element in the chain. */
285 struct location_chain_def *next;
287 /* The location (REG, MEM or VALUE). */
288 rtx loc;
290 /* The "value" stored in this location. */
291 rtx set_src;
293 /* Initialized? */
294 enum var_init_status init;
296 /* Pool allocation new operator. */
297 inline void *operator new (size_t)
299 return pool.allocate ();
302 /* Delete operator utilizing pool allocation. */
303 inline void operator delete (void *ptr)
305 pool.remove ((location_chain_def *) ptr);
308 /* Memory allocation pool. */
309 static pool_allocator<location_chain_def> pool;
310 } *location_chain;
312 /* A vector of loc_exp_dep holds the active dependencies of a one-part
313 DV on VALUEs, i.e., the VALUEs expanded so as to form the current
314 location of DV. Each entry is also part of VALUE' s linked-list of
315 backlinks back to DV. */
316 typedef struct loc_exp_dep_s
318 /* The dependent DV. */
319 decl_or_value dv;
320 /* The dependency VALUE or DECL_DEBUG. */
321 rtx value;
322 /* The next entry in VALUE's backlinks list. */
323 struct loc_exp_dep_s *next;
324 /* A pointer to the pointer to this entry (head or prev's next) in
325 the doubly-linked list. */
326 struct loc_exp_dep_s **pprev;
328 /* Pool allocation new operator. */
329 inline void *operator new (size_t)
331 return pool.allocate ();
334 /* Delete operator utilizing pool allocation. */
335 inline void operator delete (void *ptr)
337 pool.remove ((loc_exp_dep_s *) ptr);
340 /* Memory allocation pool. */
341 static pool_allocator<loc_exp_dep_s> pool;
342 } loc_exp_dep;
345 /* This data structure holds information about the depth of a variable
346 expansion. */
347 typedef struct expand_depth_struct
349 /* This measures the complexity of the expanded expression. It
350 grows by one for each level of expansion that adds more than one
351 operand. */
352 int complexity;
353 /* This counts the number of ENTRY_VALUE expressions in an
354 expansion. We want to minimize their use. */
355 int entryvals;
356 } expand_depth;
358 /* This data structure is allocated for one-part variables at the time
359 of emitting notes. */
360 struct onepart_aux
362 /* Doubly-linked list of dependent DVs. These are DVs whose cur_loc
363 computation used the expansion of this variable, and that ought
364 to be notified should this variable change. If the DV's cur_loc
365 expanded to NULL, all components of the loc list are regarded as
366 active, so that any changes in them give us a chance to get a
367 location. Otherwise, only components of the loc that expanded to
368 non-NULL are regarded as active dependencies. */
369 loc_exp_dep *backlinks;
370 /* This holds the LOC that was expanded into cur_loc. We need only
371 mark a one-part variable as changed if the FROM loc is removed,
372 or if it has no known location and a loc is added, or if it gets
373 a change notification from any of its active dependencies. */
374 rtx from;
375 /* The depth of the cur_loc expression. */
376 expand_depth depth;
377 /* Dependencies actively used when expand FROM into cur_loc. */
378 vec<loc_exp_dep, va_heap, vl_embed> deps;
381 /* Structure describing one part of variable. */
382 typedef struct variable_part_def
384 /* Chain of locations of the part. */
385 location_chain loc_chain;
387 /* Location which was last emitted to location list. */
388 rtx cur_loc;
390 union variable_aux
392 /* The offset in the variable, if !var->onepart. */
393 HOST_WIDE_INT offset;
395 /* Pointer to auxiliary data, if var->onepart and emit_notes. */
396 struct onepart_aux *onepaux;
397 } aux;
398 } variable_part;
400 /* Maximum number of location parts. */
401 #define MAX_VAR_PARTS 16
403 /* Enumeration type used to discriminate various types of one-part
404 variables. */
405 typedef enum onepart_enum
407 /* Not a one-part variable. */
408 NOT_ONEPART = 0,
409 /* A one-part DECL that is not a DEBUG_EXPR_DECL. */
410 ONEPART_VDECL = 1,
411 /* A DEBUG_EXPR_DECL. */
412 ONEPART_DEXPR = 2,
413 /* A VALUE. */
414 ONEPART_VALUE = 3
415 } onepart_enum_t;
417 /* Structure describing where the variable is located. */
418 typedef struct variable_def
420 /* The declaration of the variable, or an RTL value being handled
421 like a declaration. */
422 decl_or_value dv;
424 /* Reference count. */
425 int refcount;
427 /* Number of variable parts. */
428 char n_var_parts;
430 /* What type of DV this is, according to enum onepart_enum. */
431 ENUM_BITFIELD (onepart_enum) onepart : CHAR_BIT;
433 /* True if this variable_def struct is currently in the
434 changed_variables hash table. */
435 bool in_changed_variables;
437 /* The variable parts. */
438 variable_part var_part[1];
439 } *variable;
440 typedef const struct variable_def *const_variable;
442 /* Pointer to the BB's information specific to variable tracking pass. */
443 #define VTI(BB) ((variable_tracking_info) (BB)->aux)
445 /* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */
446 #define INT_MEM_OFFSET(mem) (MEM_OFFSET_KNOWN_P (mem) ? MEM_OFFSET (mem) : 0)
448 #if ENABLE_CHECKING && (GCC_VERSION >= 2007)
450 /* Access VAR's Ith part's offset, checking that it's not a one-part
451 variable. */
452 #define VAR_PART_OFFSET(var, i) __extension__ \
453 (*({ variable const __v = (var); \
454 gcc_checking_assert (!__v->onepart); \
455 &__v->var_part[(i)].aux.offset; }))
457 /* Access VAR's one-part auxiliary data, checking that it is a
458 one-part variable. */
459 #define VAR_LOC_1PAUX(var) __extension__ \
460 (*({ variable const __v = (var); \
461 gcc_checking_assert (__v->onepart); \
462 &__v->var_part[0].aux.onepaux; }))
464 #else
465 #define VAR_PART_OFFSET(var, i) ((var)->var_part[(i)].aux.offset)
466 #define VAR_LOC_1PAUX(var) ((var)->var_part[0].aux.onepaux)
467 #endif
469 /* These are accessor macros for the one-part auxiliary data. When
470 convenient for users, they're guarded by tests that the data was
471 allocated. */
472 #define VAR_LOC_DEP_LST(var) (VAR_LOC_1PAUX (var) \
473 ? VAR_LOC_1PAUX (var)->backlinks \
474 : NULL)
475 #define VAR_LOC_DEP_LSTP(var) (VAR_LOC_1PAUX (var) \
476 ? &VAR_LOC_1PAUX (var)->backlinks \
477 : NULL)
478 #define VAR_LOC_FROM(var) (VAR_LOC_1PAUX (var)->from)
479 #define VAR_LOC_DEPTH(var) (VAR_LOC_1PAUX (var)->depth)
480 #define VAR_LOC_DEP_VEC(var) (VAR_LOC_1PAUX (var) \
481 ? &VAR_LOC_1PAUX (var)->deps \
482 : NULL)
486 typedef unsigned int dvuid;
488 /* Return the uid of DV. */
490 static inline dvuid
491 dv_uid (decl_or_value dv)
493 if (dv_is_value_p (dv))
494 return CSELIB_VAL_PTR (dv_as_value (dv))->uid;
495 else
496 return DECL_UID (dv_as_decl (dv));
499 /* Compute the hash from the uid. */
501 static inline hashval_t
502 dv_uid2hash (dvuid uid)
504 return uid;
507 /* The hash function for a mask table in a shared_htab chain. */
509 static inline hashval_t
510 dv_htab_hash (decl_or_value dv)
512 return dv_uid2hash (dv_uid (dv));
515 static void variable_htab_free (void *);
517 /* Variable hashtable helpers. */
519 struct variable_hasher : pointer_hash <variable_def>
521 typedef void *compare_type;
522 static inline hashval_t hash (const variable_def *);
523 static inline bool equal (const variable_def *, const void *);
524 static inline void remove (variable_def *);
527 /* The hash function for variable_htab, computes the hash value
528 from the declaration of variable X. */
530 inline hashval_t
531 variable_hasher::hash (const variable_def *v)
533 return dv_htab_hash (v->dv);
536 /* Compare the declaration of variable X with declaration Y. */
538 inline bool
539 variable_hasher::equal (const variable_def *v, const void *y)
541 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
543 return (dv_as_opaque (v->dv) == dv_as_opaque (dv));
546 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
548 inline void
549 variable_hasher::remove (variable_def *var)
551 variable_htab_free (var);
554 typedef hash_table<variable_hasher> variable_table_type;
555 typedef variable_table_type::iterator variable_iterator_type;
557 /* Structure for passing some other parameters to function
558 emit_note_insn_var_location. */
559 typedef struct emit_note_data_def
561 /* The instruction which the note will be emitted before/after. */
562 rtx_insn *insn;
564 /* Where the note will be emitted (before/after insn)? */
565 enum emit_note_where where;
567 /* The variables and values active at this point. */
568 variable_table_type *vars;
569 } emit_note_data;
571 /* Structure holding a refcounted hash table. If refcount > 1,
572 it must be first unshared before modified. */
573 typedef struct shared_hash_def
575 /* Reference count. */
576 int refcount;
578 /* Actual hash table. */
579 variable_table_type *htab;
581 /* Pool allocation new operator. */
582 inline void *operator new (size_t)
584 return pool.allocate ();
587 /* Delete operator utilizing pool allocation. */
588 inline void operator delete (void *ptr)
590 pool.remove ((shared_hash_def *) ptr);
593 /* Memory allocation pool. */
594 static pool_allocator<shared_hash_def> pool;
595 } *shared_hash;
597 /* Structure holding the IN or OUT set for a basic block. */
598 typedef struct dataflow_set_def
600 /* Adjustment of stack offset. */
601 HOST_WIDE_INT stack_adjust;
603 /* Attributes for registers (lists of attrs). */
604 attrs regs[FIRST_PSEUDO_REGISTER];
606 /* Variable locations. */
607 shared_hash vars;
609 /* Vars that is being traversed. */
610 shared_hash traversed_vars;
611 } dataflow_set;
613 /* The structure (one for each basic block) containing the information
614 needed for variable tracking. */
615 typedef struct variable_tracking_info_def
617 /* The vector of micro operations. */
618 vec<micro_operation> mos;
620 /* The IN and OUT set for dataflow analysis. */
621 dataflow_set in;
622 dataflow_set out;
624 /* The permanent-in dataflow set for this block. This is used to
625 hold values for which we had to compute entry values. ??? This
626 should probably be dynamically allocated, to avoid using more
627 memory in non-debug builds. */
628 dataflow_set *permp;
630 /* Has the block been visited in DFS? */
631 bool visited;
633 /* Has the block been flooded in VTA? */
634 bool flooded;
636 } *variable_tracking_info;
638 /* Alloc pool for struct attrs_def. */
639 pool_allocator<attrs_def> attrs_def::pool ("attrs_def pool", 1024);
641 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
643 static pool_allocator<variable_def> var_pool
644 ("variable_def pool", 64,
645 (MAX_VAR_PARTS - 1) * sizeof (((variable)NULL)->var_part[0]));
647 /* Alloc pool for struct variable_def with a single var_part entry. */
648 static pool_allocator<variable_def> valvar_pool
649 ("small variable_def pool", 256);
651 /* Alloc pool for struct location_chain_def. */
652 pool_allocator<location_chain_def> location_chain_def::pool
653 ("location_chain_def pool", 1024);
655 /* Alloc pool for struct shared_hash_def. */
656 pool_allocator<shared_hash_def> shared_hash_def::pool
657 ("shared_hash_def pool", 256);
659 /* Alloc pool for struct loc_exp_dep_s for NOT_ONEPART variables. */
660 pool_allocator<loc_exp_dep> loc_exp_dep::pool ("loc_exp_dep pool", 64);
662 /* Changed variables, notes will be emitted for them. */
663 static variable_table_type *changed_variables;
665 /* Shall notes be emitted? */
666 static bool emit_notes;
668 /* Values whose dynamic location lists have gone empty, but whose
669 cselib location lists are still usable. Use this to hold the
670 current location, the backlinks, etc, during emit_notes. */
671 static variable_table_type *dropped_values;
673 /* Empty shared hashtable. */
674 static shared_hash empty_shared_hash;
676 /* Scratch register bitmap used by cselib_expand_value_rtx. */
677 static bitmap scratch_regs = NULL;
679 #ifdef HAVE_window_save
680 typedef struct GTY(()) parm_reg {
681 rtx outgoing;
682 rtx incoming;
683 } parm_reg_t;
686 /* Vector of windowed parameter registers, if any. */
687 static vec<parm_reg_t, va_gc> *windowed_parm_regs = NULL;
688 #endif
690 /* Variable used to tell whether cselib_process_insn called our hook. */
691 static bool cselib_hook_called;
693 /* Local function prototypes. */
694 static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
695 HOST_WIDE_INT *);
696 static void insn_stack_adjust_offset_pre_post (rtx_insn *, HOST_WIDE_INT *,
697 HOST_WIDE_INT *);
698 static bool vt_stack_adjustments (void);
700 static void init_attrs_list_set (attrs *);
701 static void attrs_list_clear (attrs *);
702 static attrs attrs_list_member (attrs, decl_or_value, HOST_WIDE_INT);
703 static void attrs_list_insert (attrs *, decl_or_value, HOST_WIDE_INT, rtx);
704 static void attrs_list_copy (attrs *, attrs);
705 static void attrs_list_union (attrs *, attrs);
707 static variable_def **unshare_variable (dataflow_set *set, variable_def **slot,
708 variable var, enum var_init_status);
709 static void vars_copy (variable_table_type *, variable_table_type *);
710 static tree var_debug_decl (tree);
711 static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
712 static void var_reg_delete_and_set (dataflow_set *, rtx, bool,
713 enum var_init_status, rtx);
714 static void var_reg_delete (dataflow_set *, rtx, bool);
715 static void var_regno_delete (dataflow_set *, int);
716 static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx);
717 static void var_mem_delete_and_set (dataflow_set *, rtx, bool,
718 enum var_init_status, rtx);
719 static void var_mem_delete (dataflow_set *, rtx, bool);
721 static void dataflow_set_init (dataflow_set *);
722 static void dataflow_set_clear (dataflow_set *);
723 static void dataflow_set_copy (dataflow_set *, dataflow_set *);
724 static int variable_union_info_cmp_pos (const void *, const void *);
725 static void dataflow_set_union (dataflow_set *, dataflow_set *);
726 static location_chain find_loc_in_1pdv (rtx, variable, variable_table_type *);
727 static bool canon_value_cmp (rtx, rtx);
728 static int loc_cmp (rtx, rtx);
729 static bool variable_part_different_p (variable_part *, variable_part *);
730 static bool onepart_variable_different_p (variable, variable);
731 static bool variable_different_p (variable, variable);
732 static bool dataflow_set_different (dataflow_set *, dataflow_set *);
733 static void dataflow_set_destroy (dataflow_set *);
735 static bool contains_symbol_ref (rtx);
736 static bool track_expr_p (tree, bool);
737 static bool same_variable_part_p (rtx, tree, HOST_WIDE_INT);
738 static void add_uses_1 (rtx *, void *);
739 static void add_stores (rtx, const_rtx, void *);
740 static bool compute_bb_dataflow (basic_block);
741 static bool vt_find_locations (void);
743 static void dump_attrs_list (attrs);
744 static void dump_var (variable);
745 static void dump_vars (variable_table_type *);
746 static void dump_dataflow_set (dataflow_set *);
747 static void dump_dataflow_sets (void);
749 static void set_dv_changed (decl_or_value, bool);
750 static void variable_was_changed (variable, dataflow_set *);
751 static variable_def **set_slot_part (dataflow_set *, rtx, variable_def **,
752 decl_or_value, HOST_WIDE_INT,
753 enum var_init_status, rtx);
754 static void set_variable_part (dataflow_set *, rtx,
755 decl_or_value, HOST_WIDE_INT,
756 enum var_init_status, rtx, enum insert_option);
757 static variable_def **clobber_slot_part (dataflow_set *, rtx,
758 variable_def **, HOST_WIDE_INT, rtx);
759 static void clobber_variable_part (dataflow_set *, rtx,
760 decl_or_value, HOST_WIDE_INT, rtx);
761 static variable_def **delete_slot_part (dataflow_set *, rtx, variable_def **,
762 HOST_WIDE_INT);
763 static void delete_variable_part (dataflow_set *, rtx,
764 decl_or_value, HOST_WIDE_INT);
765 static void emit_notes_in_bb (basic_block, dataflow_set *);
766 static void vt_emit_notes (void);
768 static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *);
769 static void vt_add_function_parameters (void);
770 static bool vt_initialize (void);
771 static void vt_finalize (void);
773 /* Callback for stack_adjust_offset_pre_post, called via for_each_inc_dec. */
775 static int
776 stack_adjust_offset_pre_post_cb (rtx, rtx op, rtx dest, rtx src, rtx srcoff,
777 void *arg)
779 if (dest != stack_pointer_rtx)
780 return 0;
782 switch (GET_CODE (op))
784 case PRE_INC:
785 case PRE_DEC:
786 ((HOST_WIDE_INT *)arg)[0] -= INTVAL (srcoff);
787 return 0;
788 case POST_INC:
789 case POST_DEC:
790 ((HOST_WIDE_INT *)arg)[1] -= INTVAL (srcoff);
791 return 0;
792 case PRE_MODIFY:
793 case POST_MODIFY:
794 /* We handle only adjustments by constant amount. */
795 gcc_assert (GET_CODE (src) == PLUS
796 && CONST_INT_P (XEXP (src, 1))
797 && XEXP (src, 0) == stack_pointer_rtx);
798 ((HOST_WIDE_INT *)arg)[GET_CODE (op) == POST_MODIFY]
799 -= INTVAL (XEXP (src, 1));
800 return 0;
801 default:
802 gcc_unreachable ();
806 /* Given a SET, calculate the amount of stack adjustment it contains
807 PRE- and POST-modifying stack pointer.
808 This function is similar to stack_adjust_offset. */
810 static void
811 stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
812 HOST_WIDE_INT *post)
814 rtx src = SET_SRC (pattern);
815 rtx dest = SET_DEST (pattern);
816 enum rtx_code code;
818 if (dest == stack_pointer_rtx)
820 /* (set (reg sp) (plus (reg sp) (const_int))) */
821 code = GET_CODE (src);
822 if (! (code == PLUS || code == MINUS)
823 || XEXP (src, 0) != stack_pointer_rtx
824 || !CONST_INT_P (XEXP (src, 1)))
825 return;
827 if (code == MINUS)
828 *post += INTVAL (XEXP (src, 1));
829 else
830 *post -= INTVAL (XEXP (src, 1));
831 return;
833 HOST_WIDE_INT res[2] = { 0, 0 };
834 for_each_inc_dec (pattern, stack_adjust_offset_pre_post_cb, res);
835 *pre += res[0];
836 *post += res[1];
839 /* Given an INSN, calculate the amount of stack adjustment it contains
840 PRE- and POST-modifying stack pointer. */
842 static void
843 insn_stack_adjust_offset_pre_post (rtx_insn *insn, HOST_WIDE_INT *pre,
844 HOST_WIDE_INT *post)
846 rtx pattern;
848 *pre = 0;
849 *post = 0;
851 pattern = PATTERN (insn);
852 if (RTX_FRAME_RELATED_P (insn))
854 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
855 if (expr)
856 pattern = XEXP (expr, 0);
859 if (GET_CODE (pattern) == SET)
860 stack_adjust_offset_pre_post (pattern, pre, post);
861 else if (GET_CODE (pattern) == PARALLEL
862 || GET_CODE (pattern) == SEQUENCE)
864 int i;
866 /* There may be stack adjustments inside compound insns. Search
867 for them. */
868 for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
869 if (GET_CODE (XVECEXP (pattern, 0, i)) == SET)
870 stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post);
874 /* Compute stack adjustments for all blocks by traversing DFS tree.
875 Return true when the adjustments on all incoming edges are consistent.
876 Heavily borrowed from pre_and_rev_post_order_compute. */
878 static bool
879 vt_stack_adjustments (void)
881 edge_iterator *stack;
882 int sp;
884 /* Initialize entry block. */
885 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->visited = true;
886 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->in.stack_adjust
887 = INCOMING_FRAME_SP_OFFSET;
888 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out.stack_adjust
889 = INCOMING_FRAME_SP_OFFSET;
891 /* Allocate stack for back-tracking up CFG. */
892 stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun) + 1);
893 sp = 0;
895 /* Push the first edge on to the stack. */
896 stack[sp++] = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
898 while (sp)
900 edge_iterator ei;
901 basic_block src;
902 basic_block dest;
904 /* Look at the edge on the top of the stack. */
905 ei = stack[sp - 1];
906 src = ei_edge (ei)->src;
907 dest = ei_edge (ei)->dest;
909 /* Check if the edge destination has been visited yet. */
910 if (!VTI (dest)->visited)
912 rtx_insn *insn;
913 HOST_WIDE_INT pre, post, offset;
914 VTI (dest)->visited = true;
915 VTI (dest)->in.stack_adjust = offset = VTI (src)->out.stack_adjust;
917 if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
918 for (insn = BB_HEAD (dest);
919 insn != NEXT_INSN (BB_END (dest));
920 insn = NEXT_INSN (insn))
921 if (INSN_P (insn))
923 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
924 offset += pre + post;
927 VTI (dest)->out.stack_adjust = offset;
929 if (EDGE_COUNT (dest->succs) > 0)
930 /* Since the DEST node has been visited for the first
931 time, check its successors. */
932 stack[sp++] = ei_start (dest->succs);
934 else
936 /* We can end up with different stack adjustments for the exit block
937 of a shrink-wrapped function if stack_adjust_offset_pre_post
938 doesn't understand the rtx pattern used to restore the stack
939 pointer in the epilogue. For example, on s390(x), the stack
940 pointer is often restored via a load-multiple instruction
941 and so no stack_adjust offset is recorded for it. This means
942 that the stack offset at the end of the epilogue block is the
943 the same as the offset before the epilogue, whereas other paths
944 to the exit block will have the correct stack_adjust.
946 It is safe to ignore these differences because (a) we never
947 use the stack_adjust for the exit block in this pass and
948 (b) dwarf2cfi checks whether the CFA notes in a shrink-wrapped
949 function are correct.
951 We must check whether the adjustments on other edges are
952 the same though. */
953 if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
954 && VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust)
956 free (stack);
957 return false;
960 if (! ei_one_before_end_p (ei))
961 /* Go to the next edge. */
962 ei_next (&stack[sp - 1]);
963 else
964 /* Return to previous level if there are no more edges. */
965 sp--;
969 free (stack);
970 return true;
973 /* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or
974 hard_frame_pointer_rtx is being mapped to it and offset for it. */
975 static rtx cfa_base_rtx;
976 static HOST_WIDE_INT cfa_base_offset;
978 /* Compute a CFA-based value for an ADJUSTMENT made to stack_pointer_rtx
979 or hard_frame_pointer_rtx. */
981 static inline rtx
982 compute_cfa_pointer (HOST_WIDE_INT adjustment)
984 return plus_constant (Pmode, cfa_base_rtx, adjustment + cfa_base_offset);
987 /* Adjustment for hard_frame_pointer_rtx to cfa base reg,
988 or -1 if the replacement shouldn't be done. */
989 static HOST_WIDE_INT hard_frame_pointer_adjustment = -1;
991 /* Data for adjust_mems callback. */
993 struct adjust_mem_data
995 bool store;
996 machine_mode mem_mode;
997 HOST_WIDE_INT stack_adjust;
998 rtx_expr_list *side_effects;
1001 /* Helper for adjust_mems. Return true if X is suitable for
1002 transformation of wider mode arithmetics to narrower mode. */
1004 static bool
1005 use_narrower_mode_test (rtx x, const_rtx subreg)
1007 subrtx_var_iterator::array_type array;
1008 FOR_EACH_SUBRTX_VAR (iter, array, x, NONCONST)
1010 rtx x = *iter;
1011 if (CONSTANT_P (x))
1012 iter.skip_subrtxes ();
1013 else
1014 switch (GET_CODE (x))
1016 case REG:
1017 if (cselib_lookup (x, GET_MODE (SUBREG_REG (subreg)), 0, VOIDmode))
1018 return false;
1019 if (!validate_subreg (GET_MODE (subreg), GET_MODE (x), x,
1020 subreg_lowpart_offset (GET_MODE (subreg),
1021 GET_MODE (x))))
1022 return false;
1023 break;
1024 case PLUS:
1025 case MINUS:
1026 case MULT:
1027 break;
1028 case ASHIFT:
1029 iter.substitute (XEXP (x, 0));
1030 break;
1031 default:
1032 return false;
1035 return true;
1038 /* Transform X into narrower mode MODE from wider mode WMODE. */
1040 static rtx
1041 use_narrower_mode (rtx x, machine_mode mode, machine_mode wmode)
1043 rtx op0, op1;
1044 if (CONSTANT_P (x))
1045 return lowpart_subreg (mode, x, wmode);
1046 switch (GET_CODE (x))
1048 case REG:
1049 return lowpart_subreg (mode, x, wmode);
1050 case PLUS:
1051 case MINUS:
1052 case MULT:
1053 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
1054 op1 = use_narrower_mode (XEXP (x, 1), mode, wmode);
1055 return simplify_gen_binary (GET_CODE (x), mode, op0, op1);
1056 case ASHIFT:
1057 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
1058 op1 = XEXP (x, 1);
1059 /* Ensure shift amount is not wider than mode. */
1060 if (GET_MODE (op1) == VOIDmode)
1061 op1 = lowpart_subreg (mode, op1, wmode);
1062 else if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (GET_MODE (op1)))
1063 op1 = lowpart_subreg (mode, op1, GET_MODE (op1));
1064 return simplify_gen_binary (ASHIFT, mode, op0, op1);
1065 default:
1066 gcc_unreachable ();
1070 /* Helper function for adjusting used MEMs. */
1072 static rtx
1073 adjust_mems (rtx loc, const_rtx old_rtx, void *data)
1075 struct adjust_mem_data *amd = (struct adjust_mem_data *) data;
1076 rtx mem, addr = loc, tem;
1077 machine_mode mem_mode_save;
1078 bool store_save;
1079 switch (GET_CODE (loc))
1081 case REG:
1082 /* Don't do any sp or fp replacements outside of MEM addresses
1083 on the LHS. */
1084 if (amd->mem_mode == VOIDmode && amd->store)
1085 return loc;
1086 if (loc == stack_pointer_rtx
1087 && !frame_pointer_needed
1088 && cfa_base_rtx)
1089 return compute_cfa_pointer (amd->stack_adjust);
1090 else if (loc == hard_frame_pointer_rtx
1091 && frame_pointer_needed
1092 && hard_frame_pointer_adjustment != -1
1093 && cfa_base_rtx)
1094 return compute_cfa_pointer (hard_frame_pointer_adjustment);
1095 gcc_checking_assert (loc != virtual_incoming_args_rtx);
1096 return loc;
1097 case MEM:
1098 mem = loc;
1099 if (!amd->store)
1101 mem = targetm.delegitimize_address (mem);
1102 if (mem != loc && !MEM_P (mem))
1103 return simplify_replace_fn_rtx (mem, old_rtx, adjust_mems, data);
1106 addr = XEXP (mem, 0);
1107 mem_mode_save = amd->mem_mode;
1108 amd->mem_mode = GET_MODE (mem);
1109 store_save = amd->store;
1110 amd->store = false;
1111 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1112 amd->store = store_save;
1113 amd->mem_mode = mem_mode_save;
1114 if (mem == loc)
1115 addr = targetm.delegitimize_address (addr);
1116 if (addr != XEXP (mem, 0))
1117 mem = replace_equiv_address_nv (mem, addr);
1118 if (!amd->store)
1119 mem = avoid_constant_pool_reference (mem);
1120 return mem;
1121 case PRE_INC:
1122 case PRE_DEC:
1123 addr = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
1124 gen_int_mode (GET_CODE (loc) == PRE_INC
1125 ? GET_MODE_SIZE (amd->mem_mode)
1126 : -GET_MODE_SIZE (amd->mem_mode),
1127 GET_MODE (loc)));
1128 case POST_INC:
1129 case POST_DEC:
1130 if (addr == loc)
1131 addr = XEXP (loc, 0);
1132 gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode);
1133 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1134 tem = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
1135 gen_int_mode ((GET_CODE (loc) == PRE_INC
1136 || GET_CODE (loc) == POST_INC)
1137 ? GET_MODE_SIZE (amd->mem_mode)
1138 : -GET_MODE_SIZE (amd->mem_mode),
1139 GET_MODE (loc)));
1140 store_save = amd->store;
1141 amd->store = false;
1142 tem = simplify_replace_fn_rtx (tem, old_rtx, adjust_mems, data);
1143 amd->store = store_save;
1144 amd->side_effects = alloc_EXPR_LIST (0,
1145 gen_rtx_SET (XEXP (loc, 0), tem),
1146 amd->side_effects);
1147 return addr;
1148 case PRE_MODIFY:
1149 addr = XEXP (loc, 1);
1150 case POST_MODIFY:
1151 if (addr == loc)
1152 addr = XEXP (loc, 0);
1153 gcc_assert (amd->mem_mode != VOIDmode);
1154 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1155 store_save = amd->store;
1156 amd->store = false;
1157 tem = simplify_replace_fn_rtx (XEXP (loc, 1), old_rtx,
1158 adjust_mems, data);
1159 amd->store = store_save;
1160 amd->side_effects = alloc_EXPR_LIST (0,
1161 gen_rtx_SET (XEXP (loc, 0), tem),
1162 amd->side_effects);
1163 return addr;
1164 case SUBREG:
1165 /* First try without delegitimization of whole MEMs and
1166 avoid_constant_pool_reference, which is more likely to succeed. */
1167 store_save = amd->store;
1168 amd->store = true;
1169 addr = simplify_replace_fn_rtx (SUBREG_REG (loc), old_rtx, adjust_mems,
1170 data);
1171 amd->store = store_save;
1172 mem = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1173 if (mem == SUBREG_REG (loc))
1175 tem = loc;
1176 goto finish_subreg;
1178 tem = simplify_gen_subreg (GET_MODE (loc), mem,
1179 GET_MODE (SUBREG_REG (loc)),
1180 SUBREG_BYTE (loc));
1181 if (tem)
1182 goto finish_subreg;
1183 tem = simplify_gen_subreg (GET_MODE (loc), addr,
1184 GET_MODE (SUBREG_REG (loc)),
1185 SUBREG_BYTE (loc));
1186 if (tem == NULL_RTX)
1187 tem = gen_rtx_raw_SUBREG (GET_MODE (loc), addr, SUBREG_BYTE (loc));
1188 finish_subreg:
1189 if (MAY_HAVE_DEBUG_INSNS
1190 && GET_CODE (tem) == SUBREG
1191 && (GET_CODE (SUBREG_REG (tem)) == PLUS
1192 || GET_CODE (SUBREG_REG (tem)) == MINUS
1193 || GET_CODE (SUBREG_REG (tem)) == MULT
1194 || GET_CODE (SUBREG_REG (tem)) == ASHIFT)
1195 && (GET_MODE_CLASS (GET_MODE (tem)) == MODE_INT
1196 || GET_MODE_CLASS (GET_MODE (tem)) == MODE_PARTIAL_INT)
1197 && (GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem))) == MODE_INT
1198 || GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem))) == MODE_PARTIAL_INT)
1199 && GET_MODE_PRECISION (GET_MODE (tem))
1200 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (tem)))
1201 && subreg_lowpart_p (tem)
1202 && use_narrower_mode_test (SUBREG_REG (tem), tem))
1203 return use_narrower_mode (SUBREG_REG (tem), GET_MODE (tem),
1204 GET_MODE (SUBREG_REG (tem)));
1205 return tem;
1206 case ASM_OPERANDS:
1207 /* Don't do any replacements in second and following
1208 ASM_OPERANDS of inline-asm with multiple sets.
1209 ASM_OPERANDS_INPUT_VEC, ASM_OPERANDS_INPUT_CONSTRAINT_VEC
1210 and ASM_OPERANDS_LABEL_VEC need to be equal between
1211 all the ASM_OPERANDs in the insn and adjust_insn will
1212 fix this up. */
1213 if (ASM_OPERANDS_OUTPUT_IDX (loc) != 0)
1214 return loc;
1215 break;
1216 default:
1217 break;
1219 return NULL_RTX;
1222 /* Helper function for replacement of uses. */
1224 static void
1225 adjust_mem_uses (rtx *x, void *data)
1227 rtx new_x = simplify_replace_fn_rtx (*x, NULL_RTX, adjust_mems, data);
1228 if (new_x != *x)
1229 validate_change (NULL_RTX, x, new_x, true);
1232 /* Helper function for replacement of stores. */
1234 static void
1235 adjust_mem_stores (rtx loc, const_rtx expr, void *data)
1237 if (MEM_P (loc))
1239 rtx new_dest = simplify_replace_fn_rtx (SET_DEST (expr), NULL_RTX,
1240 adjust_mems, data);
1241 if (new_dest != SET_DEST (expr))
1243 rtx xexpr = CONST_CAST_RTX (expr);
1244 validate_change (NULL_RTX, &SET_DEST (xexpr), new_dest, true);
1249 /* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
1250 replace them with their value in the insn and add the side-effects
1251 as other sets to the insn. */
1253 static void
1254 adjust_insn (basic_block bb, rtx_insn *insn)
1256 struct adjust_mem_data amd;
1257 rtx set;
1259 #ifdef HAVE_window_save
1260 /* If the target machine has an explicit window save instruction, the
1261 transformation OUTGOING_REGNO -> INCOMING_REGNO is done there. */
1262 if (RTX_FRAME_RELATED_P (insn)
1263 && find_reg_note (insn, REG_CFA_WINDOW_SAVE, NULL_RTX))
1265 unsigned int i, nregs = vec_safe_length (windowed_parm_regs);
1266 rtx rtl = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nregs * 2));
1267 parm_reg_t *p;
1269 FOR_EACH_VEC_SAFE_ELT (windowed_parm_regs, i, p)
1271 XVECEXP (rtl, 0, i * 2)
1272 = gen_rtx_SET (p->incoming, p->outgoing);
1273 /* Do not clobber the attached DECL, but only the REG. */
1274 XVECEXP (rtl, 0, i * 2 + 1)
1275 = gen_rtx_CLOBBER (GET_MODE (p->outgoing),
1276 gen_raw_REG (GET_MODE (p->outgoing),
1277 REGNO (p->outgoing)));
1280 validate_change (NULL_RTX, &PATTERN (insn), rtl, true);
1281 return;
1283 #endif
1285 amd.mem_mode = VOIDmode;
1286 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
1287 amd.side_effects = NULL;
1289 amd.store = true;
1290 note_stores (PATTERN (insn), adjust_mem_stores, &amd);
1292 amd.store = false;
1293 if (GET_CODE (PATTERN (insn)) == PARALLEL
1294 && asm_noperands (PATTERN (insn)) > 0
1295 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1297 rtx body, set0;
1298 int i;
1300 /* inline-asm with multiple sets is tiny bit more complicated,
1301 because the 3 vectors in ASM_OPERANDS need to be shared between
1302 all ASM_OPERANDS in the instruction. adjust_mems will
1303 not touch ASM_OPERANDS other than the first one, asm_noperands
1304 test above needs to be called before that (otherwise it would fail)
1305 and afterwards this code fixes it up. */
1306 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1307 body = PATTERN (insn);
1308 set0 = XVECEXP (body, 0, 0);
1309 gcc_checking_assert (GET_CODE (set0) == SET
1310 && GET_CODE (SET_SRC (set0)) == ASM_OPERANDS
1311 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set0)) == 0);
1312 for (i = 1; i < XVECLEN (body, 0); i++)
1313 if (GET_CODE (XVECEXP (body, 0, i)) != SET)
1314 break;
1315 else
1317 set = XVECEXP (body, 0, i);
1318 gcc_checking_assert (GET_CODE (SET_SRC (set)) == ASM_OPERANDS
1319 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set))
1320 == i);
1321 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (set))
1322 != ASM_OPERANDS_INPUT_VEC (SET_SRC (set0))
1323 || ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set))
1324 != ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0))
1325 || ASM_OPERANDS_LABEL_VEC (SET_SRC (set))
1326 != ASM_OPERANDS_LABEL_VEC (SET_SRC (set0)))
1328 rtx newsrc = shallow_copy_rtx (SET_SRC (set));
1329 ASM_OPERANDS_INPUT_VEC (newsrc)
1330 = ASM_OPERANDS_INPUT_VEC (SET_SRC (set0));
1331 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (newsrc)
1332 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0));
1333 ASM_OPERANDS_LABEL_VEC (newsrc)
1334 = ASM_OPERANDS_LABEL_VEC (SET_SRC (set0));
1335 validate_change (NULL_RTX, &SET_SRC (set), newsrc, true);
1339 else
1340 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1342 /* For read-only MEMs containing some constant, prefer those
1343 constants. */
1344 set = single_set (insn);
1345 if (set && MEM_P (SET_SRC (set)) && MEM_READONLY_P (SET_SRC (set)))
1347 rtx note = find_reg_equal_equiv_note (insn);
1349 if (note && CONSTANT_P (XEXP (note, 0)))
1350 validate_change (NULL_RTX, &SET_SRC (set), XEXP (note, 0), true);
1353 if (amd.side_effects)
1355 rtx *pat, new_pat, s;
1356 int i, oldn, newn;
1358 pat = &PATTERN (insn);
1359 if (GET_CODE (*pat) == COND_EXEC)
1360 pat = &COND_EXEC_CODE (*pat);
1361 if (GET_CODE (*pat) == PARALLEL)
1362 oldn = XVECLEN (*pat, 0);
1363 else
1364 oldn = 1;
1365 for (s = amd.side_effects, newn = 0; s; newn++)
1366 s = XEXP (s, 1);
1367 new_pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (oldn + newn));
1368 if (GET_CODE (*pat) == PARALLEL)
1369 for (i = 0; i < oldn; i++)
1370 XVECEXP (new_pat, 0, i) = XVECEXP (*pat, 0, i);
1371 else
1372 XVECEXP (new_pat, 0, 0) = *pat;
1373 for (s = amd.side_effects, i = oldn; i < oldn + newn; i++, s = XEXP (s, 1))
1374 XVECEXP (new_pat, 0, i) = XEXP (s, 0);
1375 free_EXPR_LIST_list (&amd.side_effects);
1376 validate_change (NULL_RTX, pat, new_pat, true);
1380 /* Return the DEBUG_EXPR of a DEBUG_EXPR_DECL or the VALUE in DV. */
1381 static inline rtx
1382 dv_as_rtx (decl_or_value dv)
1384 tree decl;
1386 if (dv_is_value_p (dv))
1387 return dv_as_value (dv);
1389 decl = dv_as_decl (dv);
1391 gcc_checking_assert (TREE_CODE (decl) == DEBUG_EXPR_DECL);
1392 return DECL_RTL_KNOWN_SET (decl);
1395 /* Return nonzero if a decl_or_value must not have more than one
1396 variable part. The returned value discriminates among various
1397 kinds of one-part DVs ccording to enum onepart_enum. */
1398 static inline onepart_enum_t
1399 dv_onepart_p (decl_or_value dv)
1401 tree decl;
1403 if (!MAY_HAVE_DEBUG_INSNS)
1404 return NOT_ONEPART;
1406 if (dv_is_value_p (dv))
1407 return ONEPART_VALUE;
1409 decl = dv_as_decl (dv);
1411 if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
1412 return ONEPART_DEXPR;
1414 if (target_for_debug_bind (decl) != NULL_TREE)
1415 return ONEPART_VDECL;
1417 return NOT_ONEPART;
1420 /* Return the variable pool to be used for a dv of type ONEPART. */
1421 static inline pool_allocator <variable_def> &
1422 onepart_pool (onepart_enum_t onepart)
1424 return onepart ? valvar_pool : var_pool;
1427 /* Build a decl_or_value out of a decl. */
1428 static inline decl_or_value
1429 dv_from_decl (tree decl)
1431 decl_or_value dv;
1432 dv = decl;
1433 gcc_checking_assert (dv_is_decl_p (dv));
1434 return dv;
1437 /* Build a decl_or_value out of a value. */
1438 static inline decl_or_value
1439 dv_from_value (rtx value)
1441 decl_or_value dv;
1442 dv = value;
1443 gcc_checking_assert (dv_is_value_p (dv));
1444 return dv;
1447 /* Return a value or the decl of a debug_expr as a decl_or_value. */
1448 static inline decl_or_value
1449 dv_from_rtx (rtx x)
1451 decl_or_value dv;
1453 switch (GET_CODE (x))
1455 case DEBUG_EXPR:
1456 dv = dv_from_decl (DEBUG_EXPR_TREE_DECL (x));
1457 gcc_checking_assert (DECL_RTL_KNOWN_SET (DEBUG_EXPR_TREE_DECL (x)) == x);
1458 break;
1460 case VALUE:
1461 dv = dv_from_value (x);
1462 break;
1464 default:
1465 gcc_unreachable ();
1468 return dv;
1471 extern void debug_dv (decl_or_value dv);
1473 DEBUG_FUNCTION void
1474 debug_dv (decl_or_value dv)
1476 if (dv_is_value_p (dv))
1477 debug_rtx (dv_as_value (dv));
1478 else
1479 debug_generic_stmt (dv_as_decl (dv));
1482 static void loc_exp_dep_clear (variable var);
1484 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
1486 static void
1487 variable_htab_free (void *elem)
1489 int i;
1490 variable var = (variable) elem;
1491 location_chain node, next;
1493 gcc_checking_assert (var->refcount > 0);
1495 var->refcount--;
1496 if (var->refcount > 0)
1497 return;
1499 for (i = 0; i < var->n_var_parts; i++)
1501 for (node = var->var_part[i].loc_chain; node; node = next)
1503 next = node->next;
1504 delete node;
1506 var->var_part[i].loc_chain = NULL;
1508 if (var->onepart && VAR_LOC_1PAUX (var))
1510 loc_exp_dep_clear (var);
1511 if (VAR_LOC_DEP_LST (var))
1512 VAR_LOC_DEP_LST (var)->pprev = NULL;
1513 XDELETE (VAR_LOC_1PAUX (var));
1514 /* These may be reused across functions, so reset
1515 e.g. NO_LOC_P. */
1516 if (var->onepart == ONEPART_DEXPR)
1517 set_dv_changed (var->dv, true);
1519 onepart_pool (var->onepart).remove (var);
1522 /* Initialize the set (array) SET of attrs to empty lists. */
1524 static void
1525 init_attrs_list_set (attrs *set)
1527 int i;
1529 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1530 set[i] = NULL;
1533 /* Make the list *LISTP empty. */
1535 static void
1536 attrs_list_clear (attrs *listp)
1538 attrs list, next;
1540 for (list = *listp; list; list = next)
1542 next = list->next;
1543 delete list;
1545 *listp = NULL;
1548 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */
1550 static attrs
1551 attrs_list_member (attrs list, decl_or_value dv, HOST_WIDE_INT offset)
1553 for (; list; list = list->next)
1554 if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset)
1555 return list;
1556 return NULL;
1559 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
1561 static void
1562 attrs_list_insert (attrs *listp, decl_or_value dv,
1563 HOST_WIDE_INT offset, rtx loc)
1565 attrs list = new attrs_def;
1566 list->loc = loc;
1567 list->dv = dv;
1568 list->offset = offset;
1569 list->next = *listp;
1570 *listp = list;
1573 /* Copy all nodes from SRC and create a list *DSTP of the copies. */
1575 static void
1576 attrs_list_copy (attrs *dstp, attrs src)
1578 attrs_list_clear (dstp);
1579 for (; src; src = src->next)
1581 attrs n = new attrs_def;
1582 n->loc = src->loc;
1583 n->dv = src->dv;
1584 n->offset = src->offset;
1585 n->next = *dstp;
1586 *dstp = n;
1590 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */
1592 static void
1593 attrs_list_union (attrs *dstp, attrs src)
1595 for (; src; src = src->next)
1597 if (!attrs_list_member (*dstp, src->dv, src->offset))
1598 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1602 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1603 *DSTP. */
1605 static void
1606 attrs_list_mpdv_union (attrs *dstp, attrs src, attrs src2)
1608 gcc_assert (!*dstp);
1609 for (; src; src = src->next)
1611 if (!dv_onepart_p (src->dv))
1612 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1614 for (src = src2; src; src = src->next)
1616 if (!dv_onepart_p (src->dv)
1617 && !attrs_list_member (*dstp, src->dv, src->offset))
1618 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1622 /* Shared hashtable support. */
1624 /* Return true if VARS is shared. */
1626 static inline bool
1627 shared_hash_shared (shared_hash vars)
1629 return vars->refcount > 1;
1632 /* Return the hash table for VARS. */
1634 static inline variable_table_type *
1635 shared_hash_htab (shared_hash vars)
1637 return vars->htab;
1640 /* Return true if VAR is shared, or maybe because VARS is shared. */
1642 static inline bool
1643 shared_var_p (variable var, shared_hash vars)
1645 /* Don't count an entry in the changed_variables table as a duplicate. */
1646 return ((var->refcount > 1 + (int) var->in_changed_variables)
1647 || shared_hash_shared (vars));
1650 /* Copy variables into a new hash table. */
1652 static shared_hash
1653 shared_hash_unshare (shared_hash vars)
1655 shared_hash new_vars = new shared_hash_def;
1656 gcc_assert (vars->refcount > 1);
1657 new_vars->refcount = 1;
1658 new_vars->htab = new variable_table_type (vars->htab->elements () + 3);
1659 vars_copy (new_vars->htab, vars->htab);
1660 vars->refcount--;
1661 return new_vars;
1664 /* Increment reference counter on VARS and return it. */
1666 static inline shared_hash
1667 shared_hash_copy (shared_hash vars)
1669 vars->refcount++;
1670 return vars;
1673 /* Decrement reference counter and destroy hash table if not shared
1674 anymore. */
1676 static void
1677 shared_hash_destroy (shared_hash vars)
1679 gcc_checking_assert (vars->refcount > 0);
1680 if (--vars->refcount == 0)
1682 delete vars->htab;
1683 delete vars;
1687 /* Unshare *PVARS if shared and return slot for DV. If INS is
1688 INSERT, insert it if not already present. */
1690 static inline variable_def **
1691 shared_hash_find_slot_unshare_1 (shared_hash *pvars, decl_or_value dv,
1692 hashval_t dvhash, enum insert_option ins)
1694 if (shared_hash_shared (*pvars))
1695 *pvars = shared_hash_unshare (*pvars);
1696 return shared_hash_htab (*pvars)->find_slot_with_hash (dv, dvhash, ins);
1699 static inline variable_def **
1700 shared_hash_find_slot_unshare (shared_hash *pvars, decl_or_value dv,
1701 enum insert_option ins)
1703 return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins);
1706 /* Return slot for DV, if it is already present in the hash table.
1707 If it is not present, insert it only VARS is not shared, otherwise
1708 return NULL. */
1710 static inline variable_def **
1711 shared_hash_find_slot_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1713 return shared_hash_htab (vars)->find_slot_with_hash (dv, dvhash,
1714 shared_hash_shared (vars)
1715 ? NO_INSERT : INSERT);
1718 static inline variable_def **
1719 shared_hash_find_slot (shared_hash vars, decl_or_value dv)
1721 return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv));
1724 /* Return slot for DV only if it is already present in the hash table. */
1726 static inline variable_def **
1727 shared_hash_find_slot_noinsert_1 (shared_hash vars, decl_or_value dv,
1728 hashval_t dvhash)
1730 return shared_hash_htab (vars)->find_slot_with_hash (dv, dvhash, NO_INSERT);
1733 static inline variable_def **
1734 shared_hash_find_slot_noinsert (shared_hash vars, decl_or_value dv)
1736 return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv));
1739 /* Return variable for DV or NULL if not already present in the hash
1740 table. */
1742 static inline variable
1743 shared_hash_find_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1745 return shared_hash_htab (vars)->find_with_hash (dv, dvhash);
1748 static inline variable
1749 shared_hash_find (shared_hash vars, decl_or_value dv)
1751 return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
1754 /* Return true if TVAL is better than CVAL as a canonival value. We
1755 choose lowest-numbered VALUEs, using the RTX address as a
1756 tie-breaker. The idea is to arrange them into a star topology,
1757 such that all of them are at most one step away from the canonical
1758 value, and the canonical value has backlinks to all of them, in
1759 addition to all the actual locations. We don't enforce this
1760 topology throughout the entire dataflow analysis, though.
1763 static inline bool
1764 canon_value_cmp (rtx tval, rtx cval)
1766 return !cval
1767 || CSELIB_VAL_PTR (tval)->uid < CSELIB_VAL_PTR (cval)->uid;
1770 static bool dst_can_be_shared;
1772 /* Return a copy of a variable VAR and insert it to dataflow set SET. */
1774 static variable_def **
1775 unshare_variable (dataflow_set *set, variable_def **slot, variable var,
1776 enum var_init_status initialized)
1778 variable new_var;
1779 int i;
1781 new_var = onepart_pool (var->onepart).allocate ();
1782 new_var->dv = var->dv;
1783 new_var->refcount = 1;
1784 var->refcount--;
1785 new_var->n_var_parts = var->n_var_parts;
1786 new_var->onepart = var->onepart;
1787 new_var->in_changed_variables = false;
1789 if (! flag_var_tracking_uninit)
1790 initialized = VAR_INIT_STATUS_INITIALIZED;
1792 for (i = 0; i < var->n_var_parts; i++)
1794 location_chain node;
1795 location_chain *nextp;
1797 if (i == 0 && var->onepart)
1799 /* One-part auxiliary data is only used while emitting
1800 notes, so propagate it to the new variable in the active
1801 dataflow set. If we're not emitting notes, this will be
1802 a no-op. */
1803 gcc_checking_assert (!VAR_LOC_1PAUX (var) || emit_notes);
1804 VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (var);
1805 VAR_LOC_1PAUX (var) = NULL;
1807 else
1808 VAR_PART_OFFSET (new_var, i) = VAR_PART_OFFSET (var, i);
1809 nextp = &new_var->var_part[i].loc_chain;
1810 for (node = var->var_part[i].loc_chain; node; node = node->next)
1812 location_chain new_lc;
1814 new_lc = new location_chain_def;
1815 new_lc->next = NULL;
1816 if (node->init > initialized)
1817 new_lc->init = node->init;
1818 else
1819 new_lc->init = initialized;
1820 if (node->set_src && !(MEM_P (node->set_src)))
1821 new_lc->set_src = node->set_src;
1822 else
1823 new_lc->set_src = NULL;
1824 new_lc->loc = node->loc;
1826 *nextp = new_lc;
1827 nextp = &new_lc->next;
1830 new_var->var_part[i].cur_loc = var->var_part[i].cur_loc;
1833 dst_can_be_shared = false;
1834 if (shared_hash_shared (set->vars))
1835 slot = shared_hash_find_slot_unshare (&set->vars, var->dv, NO_INSERT);
1836 else if (set->traversed_vars && set->vars != set->traversed_vars)
1837 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
1838 *slot = new_var;
1839 if (var->in_changed_variables)
1841 variable_def **cslot
1842 = changed_variables->find_slot_with_hash (var->dv,
1843 dv_htab_hash (var->dv),
1844 NO_INSERT);
1845 gcc_assert (*cslot == (void *) var);
1846 var->in_changed_variables = false;
1847 variable_htab_free (var);
1848 *cslot = new_var;
1849 new_var->in_changed_variables = true;
1851 return slot;
1854 /* Copy all variables from hash table SRC to hash table DST. */
1856 static void
1857 vars_copy (variable_table_type *dst, variable_table_type *src)
1859 variable_iterator_type hi;
1860 variable var;
1862 FOR_EACH_HASH_TABLE_ELEMENT (*src, var, variable, hi)
1864 variable_def **dstp;
1865 var->refcount++;
1866 dstp = dst->find_slot_with_hash (var->dv, dv_htab_hash (var->dv),
1867 INSERT);
1868 *dstp = var;
1872 /* Map a decl to its main debug decl. */
1874 static inline tree
1875 var_debug_decl (tree decl)
1877 if (decl && TREE_CODE (decl) == VAR_DECL
1878 && DECL_HAS_DEBUG_EXPR_P (decl))
1880 tree debugdecl = DECL_DEBUG_EXPR (decl);
1881 if (DECL_P (debugdecl))
1882 decl = debugdecl;
1885 return decl;
1888 /* Set the register LOC to contain DV, OFFSET. */
1890 static void
1891 var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1892 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1893 enum insert_option iopt)
1895 attrs node;
1896 bool decl_p = dv_is_decl_p (dv);
1898 if (decl_p)
1899 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1901 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1902 if (dv_as_opaque (node->dv) == dv_as_opaque (dv)
1903 && node->offset == offset)
1904 break;
1905 if (!node)
1906 attrs_list_insert (&set->regs[REGNO (loc)], dv, offset, loc);
1907 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1910 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
1912 static void
1913 var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1914 rtx set_src)
1916 tree decl = REG_EXPR (loc);
1917 HOST_WIDE_INT offset = REG_OFFSET (loc);
1919 var_reg_decl_set (set, loc, initialized,
1920 dv_from_decl (decl), offset, set_src, INSERT);
1923 static enum var_init_status
1924 get_init_value (dataflow_set *set, rtx loc, decl_or_value dv)
1926 variable var;
1927 int i;
1928 enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN;
1930 if (! flag_var_tracking_uninit)
1931 return VAR_INIT_STATUS_INITIALIZED;
1933 var = shared_hash_find (set->vars, dv);
1934 if (var)
1936 for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++)
1938 location_chain nextp;
1939 for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next)
1940 if (rtx_equal_p (nextp->loc, loc))
1942 ret_val = nextp->init;
1943 break;
1948 return ret_val;
1951 /* Delete current content of register LOC in dataflow set SET and set
1952 the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
1953 MODIFY is true, any other live copies of the same variable part are
1954 also deleted from the dataflow set, otherwise the variable part is
1955 assumed to be copied from another location holding the same
1956 part. */
1958 static void
1959 var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1960 enum var_init_status initialized, rtx set_src)
1962 tree decl = REG_EXPR (loc);
1963 HOST_WIDE_INT offset = REG_OFFSET (loc);
1964 attrs node, next;
1965 attrs *nextp;
1967 decl = var_debug_decl (decl);
1969 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1970 initialized = get_init_value (set, loc, dv_from_decl (decl));
1972 nextp = &set->regs[REGNO (loc)];
1973 for (node = *nextp; node; node = next)
1975 next = node->next;
1976 if (dv_as_opaque (node->dv) != decl || node->offset != offset)
1978 delete_variable_part (set, node->loc, node->dv, node->offset);
1979 delete node;
1980 *nextp = next;
1982 else
1984 node->loc = loc;
1985 nextp = &node->next;
1988 if (modify)
1989 clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src);
1990 var_reg_set (set, loc, initialized, set_src);
1993 /* Delete the association of register LOC in dataflow set SET with any
1994 variables that aren't onepart. If CLOBBER is true, also delete any
1995 other live copies of the same variable part, and delete the
1996 association with onepart dvs too. */
1998 static void
1999 var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
2001 attrs *nextp = &set->regs[REGNO (loc)];
2002 attrs node, next;
2004 if (clobber)
2006 tree decl = REG_EXPR (loc);
2007 HOST_WIDE_INT offset = REG_OFFSET (loc);
2009 decl = var_debug_decl (decl);
2011 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
2014 for (node = *nextp; node; node = next)
2016 next = node->next;
2017 if (clobber || !dv_onepart_p (node->dv))
2019 delete_variable_part (set, node->loc, node->dv, node->offset);
2020 delete node;
2021 *nextp = next;
2023 else
2024 nextp = &node->next;
2028 /* Delete content of register with number REGNO in dataflow set SET. */
2030 static void
2031 var_regno_delete (dataflow_set *set, int regno)
2033 attrs *reg = &set->regs[regno];
2034 attrs node, next;
2036 for (node = *reg; node; node = next)
2038 next = node->next;
2039 delete_variable_part (set, node->loc, node->dv, node->offset);
2040 delete node;
2042 *reg = NULL;
2045 /* Return true if I is the negated value of a power of two. */
2046 static bool
2047 negative_power_of_two_p (HOST_WIDE_INT i)
2049 unsigned HOST_WIDE_INT x = -(unsigned HOST_WIDE_INT)i;
2050 return x == (x & -x);
2053 /* Strip constant offsets and alignments off of LOC. Return the base
2054 expression. */
2056 static rtx
2057 vt_get_canonicalize_base (rtx loc)
2059 while ((GET_CODE (loc) == PLUS
2060 || GET_CODE (loc) == AND)
2061 && GET_CODE (XEXP (loc, 1)) == CONST_INT
2062 && (GET_CODE (loc) != AND
2063 || negative_power_of_two_p (INTVAL (XEXP (loc, 1)))))
2064 loc = XEXP (loc, 0);
2066 return loc;
2069 /* This caches canonicalized addresses for VALUEs, computed using
2070 information in the global cselib table. */
2071 static hash_map<rtx, rtx> *global_get_addr_cache;
2073 /* This caches canonicalized addresses for VALUEs, computed using
2074 information from the global cache and information pertaining to a
2075 basic block being analyzed. */
2076 static hash_map<rtx, rtx> *local_get_addr_cache;
2078 static rtx vt_canonicalize_addr (dataflow_set *, rtx);
2080 /* Return the canonical address for LOC, that must be a VALUE, using a
2081 cached global equivalence or computing it and storing it in the
2082 global cache. */
2084 static rtx
2085 get_addr_from_global_cache (rtx const loc)
2087 rtx x;
2089 gcc_checking_assert (GET_CODE (loc) == VALUE);
2091 bool existed;
2092 rtx *slot = &global_get_addr_cache->get_or_insert (loc, &existed);
2093 if (existed)
2094 return *slot;
2096 x = canon_rtx (get_addr (loc));
2098 /* Tentative, avoiding infinite recursion. */
2099 *slot = x;
2101 if (x != loc)
2103 rtx nx = vt_canonicalize_addr (NULL, x);
2104 if (nx != x)
2106 /* The table may have moved during recursion, recompute
2107 SLOT. */
2108 *global_get_addr_cache->get (loc) = x = nx;
2112 return x;
2115 /* Return the canonical address for LOC, that must be a VALUE, using a
2116 cached local equivalence or computing it and storing it in the
2117 local cache. */
2119 static rtx
2120 get_addr_from_local_cache (dataflow_set *set, rtx const loc)
2122 rtx x;
2123 decl_or_value dv;
2124 variable var;
2125 location_chain l;
2127 gcc_checking_assert (GET_CODE (loc) == VALUE);
2129 bool existed;
2130 rtx *slot = &local_get_addr_cache->get_or_insert (loc, &existed);
2131 if (existed)
2132 return *slot;
2134 x = get_addr_from_global_cache (loc);
2136 /* Tentative, avoiding infinite recursion. */
2137 *slot = x;
2139 /* Recurse to cache local expansion of X, or if we need to search
2140 for a VALUE in the expansion. */
2141 if (x != loc)
2143 rtx nx = vt_canonicalize_addr (set, x);
2144 if (nx != x)
2146 slot = local_get_addr_cache->get (loc);
2147 *slot = x = nx;
2149 return x;
2152 dv = dv_from_rtx (x);
2153 var = shared_hash_find (set->vars, dv);
2154 if (!var)
2155 return x;
2157 /* Look for an improved equivalent expression. */
2158 for (l = var->var_part[0].loc_chain; l; l = l->next)
2160 rtx base = vt_get_canonicalize_base (l->loc);
2161 if (GET_CODE (base) == VALUE
2162 && canon_value_cmp (base, loc))
2164 rtx nx = vt_canonicalize_addr (set, l->loc);
2165 if (x != nx)
2167 slot = local_get_addr_cache->get (loc);
2168 *slot = x = nx;
2170 break;
2174 return x;
2177 /* Canonicalize LOC using equivalences from SET in addition to those
2178 in the cselib static table. It expects a VALUE-based expression,
2179 and it will only substitute VALUEs with other VALUEs or
2180 function-global equivalences, so that, if two addresses have base
2181 VALUEs that are locally or globally related in ways that
2182 memrefs_conflict_p cares about, they will both canonicalize to
2183 expressions that have the same base VALUE.
2185 The use of VALUEs as canonical base addresses enables the canonical
2186 RTXs to remain unchanged globally, if they resolve to a constant,
2187 or throughout a basic block otherwise, so that they can be cached
2188 and the cache needs not be invalidated when REGs, MEMs or such
2189 change. */
2191 static rtx
2192 vt_canonicalize_addr (dataflow_set *set, rtx oloc)
2194 HOST_WIDE_INT ofst = 0;
2195 machine_mode mode = GET_MODE (oloc);
2196 rtx loc = oloc;
2197 rtx x;
2198 bool retry = true;
2200 while (retry)
2202 while (GET_CODE (loc) == PLUS
2203 && GET_CODE (XEXP (loc, 1)) == CONST_INT)
2205 ofst += INTVAL (XEXP (loc, 1));
2206 loc = XEXP (loc, 0);
2209 /* Alignment operations can't normally be combined, so just
2210 canonicalize the base and we're done. We'll normally have
2211 only one stack alignment anyway. */
2212 if (GET_CODE (loc) == AND
2213 && GET_CODE (XEXP (loc, 1)) == CONST_INT
2214 && negative_power_of_two_p (INTVAL (XEXP (loc, 1))))
2216 x = vt_canonicalize_addr (set, XEXP (loc, 0));
2217 if (x != XEXP (loc, 0))
2218 loc = gen_rtx_AND (mode, x, XEXP (loc, 1));
2219 retry = false;
2222 if (GET_CODE (loc) == VALUE)
2224 if (set)
2225 loc = get_addr_from_local_cache (set, loc);
2226 else
2227 loc = get_addr_from_global_cache (loc);
2229 /* Consolidate plus_constants. */
2230 while (ofst && GET_CODE (loc) == PLUS
2231 && GET_CODE (XEXP (loc, 1)) == CONST_INT)
2233 ofst += INTVAL (XEXP (loc, 1));
2234 loc = XEXP (loc, 0);
2237 retry = false;
2239 else
2241 x = canon_rtx (loc);
2242 if (retry)
2243 retry = (x != loc);
2244 loc = x;
2248 /* Add OFST back in. */
2249 if (ofst)
2251 /* Don't build new RTL if we can help it. */
2252 if (GET_CODE (oloc) == PLUS
2253 && XEXP (oloc, 0) == loc
2254 && INTVAL (XEXP (oloc, 1)) == ofst)
2255 return oloc;
2257 loc = plus_constant (mode, loc, ofst);
2260 return loc;
2263 /* Return true iff there's a true dependence between MLOC and LOC.
2264 MADDR must be a canonicalized version of MLOC's address. */
2266 static inline bool
2267 vt_canon_true_dep (dataflow_set *set, rtx mloc, rtx maddr, rtx loc)
2269 if (GET_CODE (loc) != MEM)
2270 return false;
2272 rtx addr = vt_canonicalize_addr (set, XEXP (loc, 0));
2273 if (!canon_true_dependence (mloc, GET_MODE (mloc), maddr, loc, addr))
2274 return false;
2276 return true;
2279 /* Hold parameters for the hashtab traversal function
2280 drop_overlapping_mem_locs, see below. */
2282 struct overlapping_mems
2284 dataflow_set *set;
2285 rtx loc, addr;
2288 /* Remove all MEMs that overlap with COMS->LOC from the location list
2289 of a hash table entry for a value. COMS->ADDR must be a
2290 canonicalized form of COMS->LOC's address, and COMS->LOC must be
2291 canonicalized itself. */
2294 drop_overlapping_mem_locs (variable_def **slot, overlapping_mems *coms)
2296 dataflow_set *set = coms->set;
2297 rtx mloc = coms->loc, addr = coms->addr;
2298 variable var = *slot;
2300 if (var->onepart == ONEPART_VALUE)
2302 location_chain loc, *locp;
2303 bool changed = false;
2304 rtx cur_loc;
2306 gcc_assert (var->n_var_parts == 1);
2308 if (shared_var_p (var, set->vars))
2310 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
2311 if (vt_canon_true_dep (set, mloc, addr, loc->loc))
2312 break;
2314 if (!loc)
2315 return 1;
2317 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
2318 var = *slot;
2319 gcc_assert (var->n_var_parts == 1);
2322 if (VAR_LOC_1PAUX (var))
2323 cur_loc = VAR_LOC_FROM (var);
2324 else
2325 cur_loc = var->var_part[0].cur_loc;
2327 for (locp = &var->var_part[0].loc_chain, loc = *locp;
2328 loc; loc = *locp)
2330 if (!vt_canon_true_dep (set, mloc, addr, loc->loc))
2332 locp = &loc->next;
2333 continue;
2336 *locp = loc->next;
2337 /* If we have deleted the location which was last emitted
2338 we have to emit new location so add the variable to set
2339 of changed variables. */
2340 if (cur_loc == loc->loc)
2342 changed = true;
2343 var->var_part[0].cur_loc = NULL;
2344 if (VAR_LOC_1PAUX (var))
2345 VAR_LOC_FROM (var) = NULL;
2347 delete loc;
2350 if (!var->var_part[0].loc_chain)
2352 var->n_var_parts--;
2353 changed = true;
2355 if (changed)
2356 variable_was_changed (var, set);
2359 return 1;
2362 /* Remove from SET all VALUE bindings to MEMs that overlap with LOC. */
2364 static void
2365 clobber_overlapping_mems (dataflow_set *set, rtx loc)
2367 struct overlapping_mems coms;
2369 gcc_checking_assert (GET_CODE (loc) == MEM);
2371 coms.set = set;
2372 coms.loc = canon_rtx (loc);
2373 coms.addr = vt_canonicalize_addr (set, XEXP (loc, 0));
2375 set->traversed_vars = set->vars;
2376 shared_hash_htab (set->vars)
2377 ->traverse <overlapping_mems*, drop_overlapping_mem_locs> (&coms);
2378 set->traversed_vars = NULL;
2381 /* Set the location of DV, OFFSET as the MEM LOC. */
2383 static void
2384 var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
2385 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
2386 enum insert_option iopt)
2388 if (dv_is_decl_p (dv))
2389 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
2391 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
2394 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
2395 SET to LOC.
2396 Adjust the address first if it is stack pointer based. */
2398 static void
2399 var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
2400 rtx set_src)
2402 tree decl = MEM_EXPR (loc);
2403 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2405 var_mem_decl_set (set, loc, initialized,
2406 dv_from_decl (decl), offset, set_src, INSERT);
2409 /* Delete and set the location part of variable MEM_EXPR (LOC) in
2410 dataflow set SET to LOC. If MODIFY is true, any other live copies
2411 of the same variable part are also deleted from the dataflow set,
2412 otherwise the variable part is assumed to be copied from another
2413 location holding the same part.
2414 Adjust the address first if it is stack pointer based. */
2416 static void
2417 var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify,
2418 enum var_init_status initialized, rtx set_src)
2420 tree decl = MEM_EXPR (loc);
2421 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2423 clobber_overlapping_mems (set, loc);
2424 decl = var_debug_decl (decl);
2426 if (initialized == VAR_INIT_STATUS_UNKNOWN)
2427 initialized = get_init_value (set, loc, dv_from_decl (decl));
2429 if (modify)
2430 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src);
2431 var_mem_set (set, loc, initialized, set_src);
2434 /* Delete the location part LOC from dataflow set SET. If CLOBBER is
2435 true, also delete any other live copies of the same variable part.
2436 Adjust the address first if it is stack pointer based. */
2438 static void
2439 var_mem_delete (dataflow_set *set, rtx loc, bool clobber)
2441 tree decl = MEM_EXPR (loc);
2442 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2444 clobber_overlapping_mems (set, loc);
2445 decl = var_debug_decl (decl);
2446 if (clobber)
2447 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
2448 delete_variable_part (set, loc, dv_from_decl (decl), offset);
2451 /* Return true if LOC should not be expanded for location expressions,
2452 or used in them. */
2454 static inline bool
2455 unsuitable_loc (rtx loc)
2457 switch (GET_CODE (loc))
2459 case PC:
2460 case SCRATCH:
2461 case CC0:
2462 case ASM_INPUT:
2463 case ASM_OPERANDS:
2464 return true;
2466 default:
2467 return false;
2471 /* Bind VAL to LOC in SET. If MODIFIED, detach LOC from any values
2472 bound to it. */
2474 static inline void
2475 val_bind (dataflow_set *set, rtx val, rtx loc, bool modified)
2477 if (REG_P (loc))
2479 if (modified)
2480 var_regno_delete (set, REGNO (loc));
2481 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2482 dv_from_value (val), 0, NULL_RTX, INSERT);
2484 else if (MEM_P (loc))
2486 struct elt_loc_list *l = CSELIB_VAL_PTR (val)->locs;
2488 if (modified)
2489 clobber_overlapping_mems (set, loc);
2491 if (l && GET_CODE (l->loc) == VALUE)
2492 l = canonical_cselib_val (CSELIB_VAL_PTR (l->loc))->locs;
2494 /* If this MEM is a global constant, we don't need it in the
2495 dynamic tables. ??? We should test this before emitting the
2496 micro-op in the first place. */
2497 while (l)
2498 if (GET_CODE (l->loc) == MEM && XEXP (l->loc, 0) == XEXP (loc, 0))
2499 break;
2500 else
2501 l = l->next;
2503 if (!l)
2504 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2505 dv_from_value (val), 0, NULL_RTX, INSERT);
2507 else
2509 /* Other kinds of equivalences are necessarily static, at least
2510 so long as we do not perform substitutions while merging
2511 expressions. */
2512 gcc_unreachable ();
2513 set_variable_part (set, loc, dv_from_value (val), 0,
2514 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2518 /* Bind a value to a location it was just stored in. If MODIFIED
2519 holds, assume the location was modified, detaching it from any
2520 values bound to it. */
2522 static void
2523 val_store (dataflow_set *set, rtx val, rtx loc, rtx_insn *insn,
2524 bool modified)
2526 cselib_val *v = CSELIB_VAL_PTR (val);
2528 gcc_assert (cselib_preserved_value_p (v));
2530 if (dump_file)
2532 fprintf (dump_file, "%i: ", insn ? INSN_UID (insn) : 0);
2533 print_inline_rtx (dump_file, loc, 0);
2534 fprintf (dump_file, " evaluates to ");
2535 print_inline_rtx (dump_file, val, 0);
2536 if (v->locs)
2538 struct elt_loc_list *l;
2539 for (l = v->locs; l; l = l->next)
2541 fprintf (dump_file, "\n%i: ", INSN_UID (l->setting_insn));
2542 print_inline_rtx (dump_file, l->loc, 0);
2545 fprintf (dump_file, "\n");
2548 gcc_checking_assert (!unsuitable_loc (loc));
2550 val_bind (set, val, loc, modified);
2553 /* Clear (canonical address) slots that reference X. */
2555 bool
2556 local_get_addr_clear_given_value (rtx const &, rtx *slot, rtx x)
2558 if (vt_get_canonicalize_base (*slot) == x)
2559 *slot = NULL;
2560 return true;
2563 /* Reset this node, detaching all its equivalences. Return the slot
2564 in the variable hash table that holds dv, if there is one. */
2566 static void
2567 val_reset (dataflow_set *set, decl_or_value dv)
2569 variable var = shared_hash_find (set->vars, dv) ;
2570 location_chain node;
2571 rtx cval;
2573 if (!var || !var->n_var_parts)
2574 return;
2576 gcc_assert (var->n_var_parts == 1);
2578 if (var->onepart == ONEPART_VALUE)
2580 rtx x = dv_as_value (dv);
2582 /* Relationships in the global cache don't change, so reset the
2583 local cache entry only. */
2584 rtx *slot = local_get_addr_cache->get (x);
2585 if (slot)
2587 /* If the value resolved back to itself, odds are that other
2588 values may have cached it too. These entries now refer
2589 to the old X, so detach them too. Entries that used the
2590 old X but resolved to something else remain ok as long as
2591 that something else isn't also reset. */
2592 if (*slot == x)
2593 local_get_addr_cache
2594 ->traverse<rtx, local_get_addr_clear_given_value> (x);
2595 *slot = NULL;
2599 cval = NULL;
2600 for (node = var->var_part[0].loc_chain; node; node = node->next)
2601 if (GET_CODE (node->loc) == VALUE
2602 && canon_value_cmp (node->loc, cval))
2603 cval = node->loc;
2605 for (node = var->var_part[0].loc_chain; node; node = node->next)
2606 if (GET_CODE (node->loc) == VALUE && cval != node->loc)
2608 /* Redirect the equivalence link to the new canonical
2609 value, or simply remove it if it would point at
2610 itself. */
2611 if (cval)
2612 set_variable_part (set, cval, dv_from_value (node->loc),
2613 0, node->init, node->set_src, NO_INSERT);
2614 delete_variable_part (set, dv_as_value (dv),
2615 dv_from_value (node->loc), 0);
2618 if (cval)
2620 decl_or_value cdv = dv_from_value (cval);
2622 /* Keep the remaining values connected, accummulating links
2623 in the canonical value. */
2624 for (node = var->var_part[0].loc_chain; node; node = node->next)
2626 if (node->loc == cval)
2627 continue;
2628 else if (GET_CODE (node->loc) == REG)
2629 var_reg_decl_set (set, node->loc, node->init, cdv, 0,
2630 node->set_src, NO_INSERT);
2631 else if (GET_CODE (node->loc) == MEM)
2632 var_mem_decl_set (set, node->loc, node->init, cdv, 0,
2633 node->set_src, NO_INSERT);
2634 else
2635 set_variable_part (set, node->loc, cdv, 0,
2636 node->init, node->set_src, NO_INSERT);
2640 /* We remove this last, to make sure that the canonical value is not
2641 removed to the point of requiring reinsertion. */
2642 if (cval)
2643 delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0);
2645 clobber_variable_part (set, NULL, dv, 0, NULL);
2648 /* Find the values in a given location and map the val to another
2649 value, if it is unique, or add the location as one holding the
2650 value. */
2652 static void
2653 val_resolve (dataflow_set *set, rtx val, rtx loc, rtx_insn *insn)
2655 decl_or_value dv = dv_from_value (val);
2657 if (dump_file && (dump_flags & TDF_DETAILS))
2659 if (insn)
2660 fprintf (dump_file, "%i: ", INSN_UID (insn));
2661 else
2662 fprintf (dump_file, "head: ");
2663 print_inline_rtx (dump_file, val, 0);
2664 fputs (" is at ", dump_file);
2665 print_inline_rtx (dump_file, loc, 0);
2666 fputc ('\n', dump_file);
2669 val_reset (set, dv);
2671 gcc_checking_assert (!unsuitable_loc (loc));
2673 if (REG_P (loc))
2675 attrs node, found = NULL;
2677 for (node = set->regs[REGNO (loc)]; node; node = node->next)
2678 if (dv_is_value_p (node->dv)
2679 && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc))
2681 found = node;
2683 /* Map incoming equivalences. ??? Wouldn't it be nice if
2684 we just started sharing the location lists? Maybe a
2685 circular list ending at the value itself or some
2686 such. */
2687 set_variable_part (set, dv_as_value (node->dv),
2688 dv_from_value (val), node->offset,
2689 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2690 set_variable_part (set, val, node->dv, node->offset,
2691 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2694 /* If we didn't find any equivalence, we need to remember that
2695 this value is held in the named register. */
2696 if (found)
2697 return;
2699 /* ??? Attempt to find and merge equivalent MEMs or other
2700 expressions too. */
2702 val_bind (set, val, loc, false);
2705 /* Initialize dataflow set SET to be empty.
2706 VARS_SIZE is the initial size of hash table VARS. */
2708 static void
2709 dataflow_set_init (dataflow_set *set)
2711 init_attrs_list_set (set->regs);
2712 set->vars = shared_hash_copy (empty_shared_hash);
2713 set->stack_adjust = 0;
2714 set->traversed_vars = NULL;
2717 /* Delete the contents of dataflow set SET. */
2719 static void
2720 dataflow_set_clear (dataflow_set *set)
2722 int i;
2724 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2725 attrs_list_clear (&set->regs[i]);
2727 shared_hash_destroy (set->vars);
2728 set->vars = shared_hash_copy (empty_shared_hash);
2731 /* Copy the contents of dataflow set SRC to DST. */
2733 static void
2734 dataflow_set_copy (dataflow_set *dst, dataflow_set *src)
2736 int i;
2738 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2739 attrs_list_copy (&dst->regs[i], src->regs[i]);
2741 shared_hash_destroy (dst->vars);
2742 dst->vars = shared_hash_copy (src->vars);
2743 dst->stack_adjust = src->stack_adjust;
2746 /* Information for merging lists of locations for a given offset of variable.
2748 struct variable_union_info
2750 /* Node of the location chain. */
2751 location_chain lc;
2753 /* The sum of positions in the input chains. */
2754 int pos;
2756 /* The position in the chain of DST dataflow set. */
2757 int pos_dst;
2760 /* Buffer for location list sorting and its allocated size. */
2761 static struct variable_union_info *vui_vec;
2762 static int vui_allocated;
2764 /* Compare function for qsort, order the structures by POS element. */
2766 static int
2767 variable_union_info_cmp_pos (const void *n1, const void *n2)
2769 const struct variable_union_info *const i1 =
2770 (const struct variable_union_info *) n1;
2771 const struct variable_union_info *const i2 =
2772 ( const struct variable_union_info *) n2;
2774 if (i1->pos != i2->pos)
2775 return i1->pos - i2->pos;
2777 return (i1->pos_dst - i2->pos_dst);
2780 /* Compute union of location parts of variable *SLOT and the same variable
2781 from hash table DATA. Compute "sorted" union of the location chains
2782 for common offsets, i.e. the locations of a variable part are sorted by
2783 a priority where the priority is the sum of the positions in the 2 chains
2784 (if a location is only in one list the position in the second list is
2785 defined to be larger than the length of the chains).
2786 When we are updating the location parts the newest location is in the
2787 beginning of the chain, so when we do the described "sorted" union
2788 we keep the newest locations in the beginning. */
2790 static int
2791 variable_union (variable src, dataflow_set *set)
2793 variable dst;
2794 variable_def **dstp;
2795 int i, j, k;
2797 dstp = shared_hash_find_slot (set->vars, src->dv);
2798 if (!dstp || !*dstp)
2800 src->refcount++;
2802 dst_can_be_shared = false;
2803 if (!dstp)
2804 dstp = shared_hash_find_slot_unshare (&set->vars, src->dv, INSERT);
2806 *dstp = src;
2808 /* Continue traversing the hash table. */
2809 return 1;
2811 else
2812 dst = *dstp;
2814 gcc_assert (src->n_var_parts);
2815 gcc_checking_assert (src->onepart == dst->onepart);
2817 /* We can combine one-part variables very efficiently, because their
2818 entries are in canonical order. */
2819 if (src->onepart)
2821 location_chain *nodep, dnode, snode;
2823 gcc_assert (src->n_var_parts == 1
2824 && dst->n_var_parts == 1);
2826 snode = src->var_part[0].loc_chain;
2827 gcc_assert (snode);
2829 restart_onepart_unshared:
2830 nodep = &dst->var_part[0].loc_chain;
2831 dnode = *nodep;
2832 gcc_assert (dnode);
2834 while (snode)
2836 int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1;
2838 if (r > 0)
2840 location_chain nnode;
2842 if (shared_var_p (dst, set->vars))
2844 dstp = unshare_variable (set, dstp, dst,
2845 VAR_INIT_STATUS_INITIALIZED);
2846 dst = *dstp;
2847 goto restart_onepart_unshared;
2850 *nodep = nnode = new location_chain_def;
2851 nnode->loc = snode->loc;
2852 nnode->init = snode->init;
2853 if (!snode->set_src || MEM_P (snode->set_src))
2854 nnode->set_src = NULL;
2855 else
2856 nnode->set_src = snode->set_src;
2857 nnode->next = dnode;
2858 dnode = nnode;
2860 else if (r == 0)
2861 gcc_checking_assert (rtx_equal_p (dnode->loc, snode->loc));
2863 if (r >= 0)
2864 snode = snode->next;
2866 nodep = &dnode->next;
2867 dnode = *nodep;
2870 return 1;
2873 gcc_checking_assert (!src->onepart);
2875 /* Count the number of location parts, result is K. */
2876 for (i = 0, j = 0, k = 0;
2877 i < src->n_var_parts && j < dst->n_var_parts; k++)
2879 if (VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2881 i++;
2882 j++;
2884 else if (VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
2885 i++;
2886 else
2887 j++;
2889 k += src->n_var_parts - i;
2890 k += dst->n_var_parts - j;
2892 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
2893 thus there are at most MAX_VAR_PARTS different offsets. */
2894 gcc_checking_assert (dst->onepart ? k == 1 : k <= MAX_VAR_PARTS);
2896 if (dst->n_var_parts != k && shared_var_p (dst, set->vars))
2898 dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN);
2899 dst = *dstp;
2902 i = src->n_var_parts - 1;
2903 j = dst->n_var_parts - 1;
2904 dst->n_var_parts = k;
2906 for (k--; k >= 0; k--)
2908 location_chain node, node2;
2910 if (i >= 0 && j >= 0
2911 && VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2913 /* Compute the "sorted" union of the chains, i.e. the locations which
2914 are in both chains go first, they are sorted by the sum of
2915 positions in the chains. */
2916 int dst_l, src_l;
2917 int ii, jj, n;
2918 struct variable_union_info *vui;
2920 /* If DST is shared compare the location chains.
2921 If they are different we will modify the chain in DST with
2922 high probability so make a copy of DST. */
2923 if (shared_var_p (dst, set->vars))
2925 for (node = src->var_part[i].loc_chain,
2926 node2 = dst->var_part[j].loc_chain; node && node2;
2927 node = node->next, node2 = node2->next)
2929 if (!((REG_P (node2->loc)
2930 && REG_P (node->loc)
2931 && REGNO (node2->loc) == REGNO (node->loc))
2932 || rtx_equal_p (node2->loc, node->loc)))
2934 if (node2->init < node->init)
2935 node2->init = node->init;
2936 break;
2939 if (node || node2)
2941 dstp = unshare_variable (set, dstp, dst,
2942 VAR_INIT_STATUS_UNKNOWN);
2943 dst = (variable)*dstp;
2947 src_l = 0;
2948 for (node = src->var_part[i].loc_chain; node; node = node->next)
2949 src_l++;
2950 dst_l = 0;
2951 for (node = dst->var_part[j].loc_chain; node; node = node->next)
2952 dst_l++;
2954 if (dst_l == 1)
2956 /* The most common case, much simpler, no qsort is needed. */
2957 location_chain dstnode = dst->var_part[j].loc_chain;
2958 dst->var_part[k].loc_chain = dstnode;
2959 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
2960 node2 = dstnode;
2961 for (node = src->var_part[i].loc_chain; node; node = node->next)
2962 if (!((REG_P (dstnode->loc)
2963 && REG_P (node->loc)
2964 && REGNO (dstnode->loc) == REGNO (node->loc))
2965 || rtx_equal_p (dstnode->loc, node->loc)))
2967 location_chain new_node;
2969 /* Copy the location from SRC. */
2970 new_node = new location_chain_def;
2971 new_node->loc = node->loc;
2972 new_node->init = node->init;
2973 if (!node->set_src || MEM_P (node->set_src))
2974 new_node->set_src = NULL;
2975 else
2976 new_node->set_src = node->set_src;
2977 node2->next = new_node;
2978 node2 = new_node;
2980 node2->next = NULL;
2982 else
2984 if (src_l + dst_l > vui_allocated)
2986 vui_allocated = MAX (vui_allocated * 2, src_l + dst_l);
2987 vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec,
2988 vui_allocated);
2990 vui = vui_vec;
2992 /* Fill in the locations from DST. */
2993 for (node = dst->var_part[j].loc_chain, jj = 0; node;
2994 node = node->next, jj++)
2996 vui[jj].lc = node;
2997 vui[jj].pos_dst = jj;
2999 /* Pos plus value larger than a sum of 2 valid positions. */
3000 vui[jj].pos = jj + src_l + dst_l;
3003 /* Fill in the locations from SRC. */
3004 n = dst_l;
3005 for (node = src->var_part[i].loc_chain, ii = 0; node;
3006 node = node->next, ii++)
3008 /* Find location from NODE. */
3009 for (jj = 0; jj < dst_l; jj++)
3011 if ((REG_P (vui[jj].lc->loc)
3012 && REG_P (node->loc)
3013 && REGNO (vui[jj].lc->loc) == REGNO (node->loc))
3014 || rtx_equal_p (vui[jj].lc->loc, node->loc))
3016 vui[jj].pos = jj + ii;
3017 break;
3020 if (jj >= dst_l) /* The location has not been found. */
3022 location_chain new_node;
3024 /* Copy the location from SRC. */
3025 new_node = new location_chain_def;
3026 new_node->loc = node->loc;
3027 new_node->init = node->init;
3028 if (!node->set_src || MEM_P (node->set_src))
3029 new_node->set_src = NULL;
3030 else
3031 new_node->set_src = node->set_src;
3032 vui[n].lc = new_node;
3033 vui[n].pos_dst = src_l + dst_l;
3034 vui[n].pos = ii + src_l + dst_l;
3035 n++;
3039 if (dst_l == 2)
3041 /* Special case still very common case. For dst_l == 2
3042 all entries dst_l ... n-1 are sorted, with for i >= dst_l
3043 vui[i].pos == i + src_l + dst_l. */
3044 if (vui[0].pos > vui[1].pos)
3046 /* Order should be 1, 0, 2... */
3047 dst->var_part[k].loc_chain = vui[1].lc;
3048 vui[1].lc->next = vui[0].lc;
3049 if (n >= 3)
3051 vui[0].lc->next = vui[2].lc;
3052 vui[n - 1].lc->next = NULL;
3054 else
3055 vui[0].lc->next = NULL;
3056 ii = 3;
3058 else
3060 dst->var_part[k].loc_chain = vui[0].lc;
3061 if (n >= 3 && vui[2].pos < vui[1].pos)
3063 /* Order should be 0, 2, 1, 3... */
3064 vui[0].lc->next = vui[2].lc;
3065 vui[2].lc->next = vui[1].lc;
3066 if (n >= 4)
3068 vui[1].lc->next = vui[3].lc;
3069 vui[n - 1].lc->next = NULL;
3071 else
3072 vui[1].lc->next = NULL;
3073 ii = 4;
3075 else
3077 /* Order should be 0, 1, 2... */
3078 ii = 1;
3079 vui[n - 1].lc->next = NULL;
3082 for (; ii < n; ii++)
3083 vui[ii - 1].lc->next = vui[ii].lc;
3085 else
3087 qsort (vui, n, sizeof (struct variable_union_info),
3088 variable_union_info_cmp_pos);
3090 /* Reconnect the nodes in sorted order. */
3091 for (ii = 1; ii < n; ii++)
3092 vui[ii - 1].lc->next = vui[ii].lc;
3093 vui[n - 1].lc->next = NULL;
3094 dst->var_part[k].loc_chain = vui[0].lc;
3097 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
3099 i--;
3100 j--;
3102 else if ((i >= 0 && j >= 0
3103 && VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
3104 || i < 0)
3106 dst->var_part[k] = dst->var_part[j];
3107 j--;
3109 else if ((i >= 0 && j >= 0
3110 && VAR_PART_OFFSET (src, i) > VAR_PART_OFFSET (dst, j))
3111 || j < 0)
3113 location_chain *nextp;
3115 /* Copy the chain from SRC. */
3116 nextp = &dst->var_part[k].loc_chain;
3117 for (node = src->var_part[i].loc_chain; node; node = node->next)
3119 location_chain new_lc;
3121 new_lc = new location_chain_def;
3122 new_lc->next = NULL;
3123 new_lc->init = node->init;
3124 if (!node->set_src || MEM_P (node->set_src))
3125 new_lc->set_src = NULL;
3126 else
3127 new_lc->set_src = node->set_src;
3128 new_lc->loc = node->loc;
3130 *nextp = new_lc;
3131 nextp = &new_lc->next;
3134 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (src, i);
3135 i--;
3137 dst->var_part[k].cur_loc = NULL;
3140 if (flag_var_tracking_uninit)
3141 for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++)
3143 location_chain node, node2;
3144 for (node = src->var_part[i].loc_chain; node; node = node->next)
3145 for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next)
3146 if (rtx_equal_p (node->loc, node2->loc))
3148 if (node->init > node2->init)
3149 node2->init = node->init;
3153 /* Continue traversing the hash table. */
3154 return 1;
3157 /* Compute union of dataflow sets SRC and DST and store it to DST. */
3159 static void
3160 dataflow_set_union (dataflow_set *dst, dataflow_set *src)
3162 int i;
3164 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3165 attrs_list_union (&dst->regs[i], src->regs[i]);
3167 if (dst->vars == empty_shared_hash)
3169 shared_hash_destroy (dst->vars);
3170 dst->vars = shared_hash_copy (src->vars);
3172 else
3174 variable_iterator_type hi;
3175 variable var;
3177 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (src->vars),
3178 var, variable, hi)
3179 variable_union (var, dst);
3183 /* Whether the value is currently being expanded. */
3184 #define VALUE_RECURSED_INTO(x) \
3185 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
3187 /* Whether no expansion was found, saving useless lookups.
3188 It must only be set when VALUE_CHANGED is clear. */
3189 #define NO_LOC_P(x) \
3190 (RTL_FLAG_CHECK2 ("NO_LOC_P", (x), VALUE, DEBUG_EXPR)->return_val)
3192 /* Whether cur_loc in the value needs to be (re)computed. */
3193 #define VALUE_CHANGED(x) \
3194 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
3195 /* Whether cur_loc in the decl needs to be (re)computed. */
3196 #define DECL_CHANGED(x) TREE_VISITED (x)
3198 /* Record (if NEWV) that DV needs to have its cur_loc recomputed. For
3199 user DECLs, this means they're in changed_variables. Values and
3200 debug exprs may be left with this flag set if no user variable
3201 requires them to be evaluated. */
3203 static inline void
3204 set_dv_changed (decl_or_value dv, bool newv)
3206 switch (dv_onepart_p (dv))
3208 case ONEPART_VALUE:
3209 if (newv)
3210 NO_LOC_P (dv_as_value (dv)) = false;
3211 VALUE_CHANGED (dv_as_value (dv)) = newv;
3212 break;
3214 case ONEPART_DEXPR:
3215 if (newv)
3216 NO_LOC_P (DECL_RTL_KNOWN_SET (dv_as_decl (dv))) = false;
3217 /* Fall through... */
3219 default:
3220 DECL_CHANGED (dv_as_decl (dv)) = newv;
3221 break;
3225 /* Return true if DV needs to have its cur_loc recomputed. */
3227 static inline bool
3228 dv_changed_p (decl_or_value dv)
3230 return (dv_is_value_p (dv)
3231 ? VALUE_CHANGED (dv_as_value (dv))
3232 : DECL_CHANGED (dv_as_decl (dv)));
3235 /* Return a location list node whose loc is rtx_equal to LOC, in the
3236 location list of a one-part variable or value VAR, or in that of
3237 any values recursively mentioned in the location lists. VARS must
3238 be in star-canonical form. */
3240 static location_chain
3241 find_loc_in_1pdv (rtx loc, variable var, variable_table_type *vars)
3243 location_chain node;
3244 enum rtx_code loc_code;
3246 if (!var)
3247 return NULL;
3249 gcc_checking_assert (var->onepart);
3251 if (!var->n_var_parts)
3252 return NULL;
3254 gcc_checking_assert (loc != dv_as_opaque (var->dv));
3256 loc_code = GET_CODE (loc);
3257 for (node = var->var_part[0].loc_chain; node; node = node->next)
3259 decl_or_value dv;
3260 variable rvar;
3262 if (GET_CODE (node->loc) != loc_code)
3264 if (GET_CODE (node->loc) != VALUE)
3265 continue;
3267 else if (loc == node->loc)
3268 return node;
3269 else if (loc_code != VALUE)
3271 if (rtx_equal_p (loc, node->loc))
3272 return node;
3273 continue;
3276 /* Since we're in star-canonical form, we don't need to visit
3277 non-canonical nodes: one-part variables and non-canonical
3278 values would only point back to the canonical node. */
3279 if (dv_is_value_p (var->dv)
3280 && !canon_value_cmp (node->loc, dv_as_value (var->dv)))
3282 /* Skip all subsequent VALUEs. */
3283 while (node->next && GET_CODE (node->next->loc) == VALUE)
3285 node = node->next;
3286 gcc_checking_assert (!canon_value_cmp (node->loc,
3287 dv_as_value (var->dv)));
3288 if (loc == node->loc)
3289 return node;
3291 continue;
3294 gcc_checking_assert (node == var->var_part[0].loc_chain);
3295 gcc_checking_assert (!node->next);
3297 dv = dv_from_value (node->loc);
3298 rvar = vars->find_with_hash (dv, dv_htab_hash (dv));
3299 return find_loc_in_1pdv (loc, rvar, vars);
3302 /* ??? Gotta look in cselib_val locations too. */
3304 return NULL;
3307 /* Hash table iteration argument passed to variable_merge. */
3308 struct dfset_merge
3310 /* The set in which the merge is to be inserted. */
3311 dataflow_set *dst;
3312 /* The set that we're iterating in. */
3313 dataflow_set *cur;
3314 /* The set that may contain the other dv we are to merge with. */
3315 dataflow_set *src;
3316 /* Number of onepart dvs in src. */
3317 int src_onepart_cnt;
3320 /* Insert LOC in *DNODE, if it's not there yet. The list must be in
3321 loc_cmp order, and it is maintained as such. */
3323 static void
3324 insert_into_intersection (location_chain *nodep, rtx loc,
3325 enum var_init_status status)
3327 location_chain node;
3328 int r;
3330 for (node = *nodep; node; nodep = &node->next, node = *nodep)
3331 if ((r = loc_cmp (node->loc, loc)) == 0)
3333 node->init = MIN (node->init, status);
3334 return;
3336 else if (r > 0)
3337 break;
3339 node = new location_chain_def;
3341 node->loc = loc;
3342 node->set_src = NULL;
3343 node->init = status;
3344 node->next = *nodep;
3345 *nodep = node;
3348 /* Insert in DEST the intersection of the locations present in both
3349 S1NODE and S2VAR, directly or indirectly. S1NODE is from a
3350 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
3351 DSM->dst. */
3353 static void
3354 intersect_loc_chains (rtx val, location_chain *dest, struct dfset_merge *dsm,
3355 location_chain s1node, variable s2var)
3357 dataflow_set *s1set = dsm->cur;
3358 dataflow_set *s2set = dsm->src;
3359 location_chain found;
3361 if (s2var)
3363 location_chain s2node;
3365 gcc_checking_assert (s2var->onepart);
3367 if (s2var->n_var_parts)
3369 s2node = s2var->var_part[0].loc_chain;
3371 for (; s1node && s2node;
3372 s1node = s1node->next, s2node = s2node->next)
3373 if (s1node->loc != s2node->loc)
3374 break;
3375 else if (s1node->loc == val)
3376 continue;
3377 else
3378 insert_into_intersection (dest, s1node->loc,
3379 MIN (s1node->init, s2node->init));
3383 for (; s1node; s1node = s1node->next)
3385 if (s1node->loc == val)
3386 continue;
3388 if ((found = find_loc_in_1pdv (s1node->loc, s2var,
3389 shared_hash_htab (s2set->vars))))
3391 insert_into_intersection (dest, s1node->loc,
3392 MIN (s1node->init, found->init));
3393 continue;
3396 if (GET_CODE (s1node->loc) == VALUE
3397 && !VALUE_RECURSED_INTO (s1node->loc))
3399 decl_or_value dv = dv_from_value (s1node->loc);
3400 variable svar = shared_hash_find (s1set->vars, dv);
3401 if (svar)
3403 if (svar->n_var_parts == 1)
3405 VALUE_RECURSED_INTO (s1node->loc) = true;
3406 intersect_loc_chains (val, dest, dsm,
3407 svar->var_part[0].loc_chain,
3408 s2var);
3409 VALUE_RECURSED_INTO (s1node->loc) = false;
3414 /* ??? gotta look in cselib_val locations too. */
3416 /* ??? if the location is equivalent to any location in src,
3417 searched recursively
3419 add to dst the values needed to represent the equivalence
3421 telling whether locations S is equivalent to another dv's
3422 location list:
3424 for each location D in the list
3426 if S and D satisfy rtx_equal_p, then it is present
3428 else if D is a value, recurse without cycles
3430 else if S and D have the same CODE and MODE
3432 for each operand oS and the corresponding oD
3434 if oS and oD are not equivalent, then S an D are not equivalent
3436 else if they are RTX vectors
3438 if any vector oS element is not equivalent to its respective oD,
3439 then S and D are not equivalent
3447 /* Return -1 if X should be before Y in a location list for a 1-part
3448 variable, 1 if Y should be before X, and 0 if they're equivalent
3449 and should not appear in the list. */
3451 static int
3452 loc_cmp (rtx x, rtx y)
3454 int i, j, r;
3455 RTX_CODE code = GET_CODE (x);
3456 const char *fmt;
3458 if (x == y)
3459 return 0;
3461 if (REG_P (x))
3463 if (!REG_P (y))
3464 return -1;
3465 gcc_assert (GET_MODE (x) == GET_MODE (y));
3466 if (REGNO (x) == REGNO (y))
3467 return 0;
3468 else if (REGNO (x) < REGNO (y))
3469 return -1;
3470 else
3471 return 1;
3474 if (REG_P (y))
3475 return 1;
3477 if (MEM_P (x))
3479 if (!MEM_P (y))
3480 return -1;
3481 gcc_assert (GET_MODE (x) == GET_MODE (y));
3482 return loc_cmp (XEXP (x, 0), XEXP (y, 0));
3485 if (MEM_P (y))
3486 return 1;
3488 if (GET_CODE (x) == VALUE)
3490 if (GET_CODE (y) != VALUE)
3491 return -1;
3492 /* Don't assert the modes are the same, that is true only
3493 when not recursing. (subreg:QI (value:SI 1:1) 0)
3494 and (subreg:QI (value:DI 2:2) 0) can be compared,
3495 even when the modes are different. */
3496 if (canon_value_cmp (x, y))
3497 return -1;
3498 else
3499 return 1;
3502 if (GET_CODE (y) == VALUE)
3503 return 1;
3505 /* Entry value is the least preferable kind of expression. */
3506 if (GET_CODE (x) == ENTRY_VALUE)
3508 if (GET_CODE (y) != ENTRY_VALUE)
3509 return 1;
3510 gcc_assert (GET_MODE (x) == GET_MODE (y));
3511 return loc_cmp (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
3514 if (GET_CODE (y) == ENTRY_VALUE)
3515 return -1;
3517 if (GET_CODE (x) == GET_CODE (y))
3518 /* Compare operands below. */;
3519 else if (GET_CODE (x) < GET_CODE (y))
3520 return -1;
3521 else
3522 return 1;
3524 gcc_assert (GET_MODE (x) == GET_MODE (y));
3526 if (GET_CODE (x) == DEBUG_EXPR)
3528 if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3529 < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)))
3530 return -1;
3531 gcc_checking_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3532 > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)));
3533 return 1;
3536 fmt = GET_RTX_FORMAT (code);
3537 for (i = 0; i < GET_RTX_LENGTH (code); i++)
3538 switch (fmt[i])
3540 case 'w':
3541 if (XWINT (x, i) == XWINT (y, i))
3542 break;
3543 else if (XWINT (x, i) < XWINT (y, i))
3544 return -1;
3545 else
3546 return 1;
3548 case 'n':
3549 case 'i':
3550 if (XINT (x, i) == XINT (y, i))
3551 break;
3552 else if (XINT (x, i) < XINT (y, i))
3553 return -1;
3554 else
3555 return 1;
3557 case 'V':
3558 case 'E':
3559 /* Compare the vector length first. */
3560 if (XVECLEN (x, i) == XVECLEN (y, i))
3561 /* Compare the vectors elements. */;
3562 else if (XVECLEN (x, i) < XVECLEN (y, i))
3563 return -1;
3564 else
3565 return 1;
3567 for (j = 0; j < XVECLEN (x, i); j++)
3568 if ((r = loc_cmp (XVECEXP (x, i, j),
3569 XVECEXP (y, i, j))))
3570 return r;
3571 break;
3573 case 'e':
3574 if ((r = loc_cmp (XEXP (x, i), XEXP (y, i))))
3575 return r;
3576 break;
3578 case 'S':
3579 case 's':
3580 if (XSTR (x, i) == XSTR (y, i))
3581 break;
3582 if (!XSTR (x, i))
3583 return -1;
3584 if (!XSTR (y, i))
3585 return 1;
3586 if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0)
3587 break;
3588 else if (r < 0)
3589 return -1;
3590 else
3591 return 1;
3593 case 'u':
3594 /* These are just backpointers, so they don't matter. */
3595 break;
3597 case '0':
3598 case 't':
3599 break;
3601 /* It is believed that rtx's at this level will never
3602 contain anything but integers and other rtx's,
3603 except for within LABEL_REFs and SYMBOL_REFs. */
3604 default:
3605 gcc_unreachable ();
3607 if (CONST_WIDE_INT_P (x))
3609 /* Compare the vector length first. */
3610 if (CONST_WIDE_INT_NUNITS (x) >= CONST_WIDE_INT_NUNITS (y))
3611 return 1;
3612 else if (CONST_WIDE_INT_NUNITS (x) < CONST_WIDE_INT_NUNITS (y))
3613 return -1;
3615 /* Compare the vectors elements. */;
3616 for (j = CONST_WIDE_INT_NUNITS (x) - 1; j >= 0 ; j--)
3618 if (CONST_WIDE_INT_ELT (x, j) < CONST_WIDE_INT_ELT (y, j))
3619 return -1;
3620 if (CONST_WIDE_INT_ELT (x, j) > CONST_WIDE_INT_ELT (y, j))
3621 return 1;
3625 return 0;
3628 #if ENABLE_CHECKING
3629 /* Check the order of entries in one-part variables. */
3632 canonicalize_loc_order_check (variable_def **slot,
3633 dataflow_set *data ATTRIBUTE_UNUSED)
3635 variable var = *slot;
3636 location_chain node, next;
3638 #ifdef ENABLE_RTL_CHECKING
3639 int i;
3640 for (i = 0; i < var->n_var_parts; i++)
3641 gcc_assert (var->var_part[0].cur_loc == NULL);
3642 gcc_assert (!var->in_changed_variables);
3643 #endif
3645 if (!var->onepart)
3646 return 1;
3648 gcc_assert (var->n_var_parts == 1);
3649 node = var->var_part[0].loc_chain;
3650 gcc_assert (node);
3652 while ((next = node->next))
3654 gcc_assert (loc_cmp (node->loc, next->loc) < 0);
3655 node = next;
3658 return 1;
3660 #endif
3662 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
3663 more likely to be chosen as canonical for an equivalence set.
3664 Ensure less likely values can reach more likely neighbors, making
3665 the connections bidirectional. */
3668 canonicalize_values_mark (variable_def **slot, dataflow_set *set)
3670 variable var = *slot;
3671 decl_or_value dv = var->dv;
3672 rtx val;
3673 location_chain node;
3675 if (!dv_is_value_p (dv))
3676 return 1;
3678 gcc_checking_assert (var->n_var_parts == 1);
3680 val = dv_as_value (dv);
3682 for (node = var->var_part[0].loc_chain; node; node = node->next)
3683 if (GET_CODE (node->loc) == VALUE)
3685 if (canon_value_cmp (node->loc, val))
3686 VALUE_RECURSED_INTO (val) = true;
3687 else
3689 decl_or_value odv = dv_from_value (node->loc);
3690 variable_def **oslot;
3691 oslot = shared_hash_find_slot_noinsert (set->vars, odv);
3693 set_slot_part (set, val, oslot, odv, 0,
3694 node->init, NULL_RTX);
3696 VALUE_RECURSED_INTO (node->loc) = true;
3700 return 1;
3703 /* Remove redundant entries from equivalence lists in onepart
3704 variables, canonicalizing equivalence sets into star shapes. */
3707 canonicalize_values_star (variable_def **slot, dataflow_set *set)
3709 variable var = *slot;
3710 decl_or_value dv = var->dv;
3711 location_chain node;
3712 decl_or_value cdv;
3713 rtx val, cval;
3714 variable_def **cslot;
3715 bool has_value;
3716 bool has_marks;
3718 if (!var->onepart)
3719 return 1;
3721 gcc_checking_assert (var->n_var_parts == 1);
3723 if (dv_is_value_p (dv))
3725 cval = dv_as_value (dv);
3726 if (!VALUE_RECURSED_INTO (cval))
3727 return 1;
3728 VALUE_RECURSED_INTO (cval) = false;
3730 else
3731 cval = NULL_RTX;
3733 restart:
3734 val = cval;
3735 has_value = false;
3736 has_marks = false;
3738 gcc_assert (var->n_var_parts == 1);
3740 for (node = var->var_part[0].loc_chain; node; node = node->next)
3741 if (GET_CODE (node->loc) == VALUE)
3743 has_value = true;
3744 if (VALUE_RECURSED_INTO (node->loc))
3745 has_marks = true;
3746 if (canon_value_cmp (node->loc, cval))
3747 cval = node->loc;
3750 if (!has_value)
3751 return 1;
3753 if (cval == val)
3755 if (!has_marks || dv_is_decl_p (dv))
3756 return 1;
3758 /* Keep it marked so that we revisit it, either after visiting a
3759 child node, or after visiting a new parent that might be
3760 found out. */
3761 VALUE_RECURSED_INTO (val) = true;
3763 for (node = var->var_part[0].loc_chain; node; node = node->next)
3764 if (GET_CODE (node->loc) == VALUE
3765 && VALUE_RECURSED_INTO (node->loc))
3767 cval = node->loc;
3768 restart_with_cval:
3769 VALUE_RECURSED_INTO (cval) = false;
3770 dv = dv_from_value (cval);
3771 slot = shared_hash_find_slot_noinsert (set->vars, dv);
3772 if (!slot)
3774 gcc_assert (dv_is_decl_p (var->dv));
3775 /* The canonical value was reset and dropped.
3776 Remove it. */
3777 clobber_variable_part (set, NULL, var->dv, 0, NULL);
3778 return 1;
3780 var = *slot;
3781 gcc_assert (dv_is_value_p (var->dv));
3782 if (var->n_var_parts == 0)
3783 return 1;
3784 gcc_assert (var->n_var_parts == 1);
3785 goto restart;
3788 VALUE_RECURSED_INTO (val) = false;
3790 return 1;
3793 /* Push values to the canonical one. */
3794 cdv = dv_from_value (cval);
3795 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3797 for (node = var->var_part[0].loc_chain; node; node = node->next)
3798 if (node->loc != cval)
3800 cslot = set_slot_part (set, node->loc, cslot, cdv, 0,
3801 node->init, NULL_RTX);
3802 if (GET_CODE (node->loc) == VALUE)
3804 decl_or_value ndv = dv_from_value (node->loc);
3806 set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX,
3807 NO_INSERT);
3809 if (canon_value_cmp (node->loc, val))
3811 /* If it could have been a local minimum, it's not any more,
3812 since it's now neighbor to cval, so it may have to push
3813 to it. Conversely, if it wouldn't have prevailed over
3814 val, then whatever mark it has is fine: if it was to
3815 push, it will now push to a more canonical node, but if
3816 it wasn't, then it has already pushed any values it might
3817 have to. */
3818 VALUE_RECURSED_INTO (node->loc) = true;
3819 /* Make sure we visit node->loc by ensuring we cval is
3820 visited too. */
3821 VALUE_RECURSED_INTO (cval) = true;
3823 else if (!VALUE_RECURSED_INTO (node->loc))
3824 /* If we have no need to "recurse" into this node, it's
3825 already "canonicalized", so drop the link to the old
3826 parent. */
3827 clobber_variable_part (set, cval, ndv, 0, NULL);
3829 else if (GET_CODE (node->loc) == REG)
3831 attrs list = set->regs[REGNO (node->loc)], *listp;
3833 /* Change an existing attribute referring to dv so that it
3834 refers to cdv, removing any duplicate this might
3835 introduce, and checking that no previous duplicates
3836 existed, all in a single pass. */
3838 while (list)
3840 if (list->offset == 0
3841 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3842 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3843 break;
3845 list = list->next;
3848 gcc_assert (list);
3849 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3851 list->dv = cdv;
3852 for (listp = &list->next; (list = *listp); listp = &list->next)
3854 if (list->offset)
3855 continue;
3857 if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3859 *listp = list->next;
3860 delete list;
3861 list = *listp;
3862 break;
3865 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv));
3868 else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3870 for (listp = &list->next; (list = *listp); listp = &list->next)
3872 if (list->offset)
3873 continue;
3875 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3877 *listp = list->next;
3878 delete list;
3879 list = *listp;
3880 break;
3883 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv));
3886 else
3887 gcc_unreachable ();
3889 #if ENABLE_CHECKING
3890 while (list)
3892 if (list->offset == 0
3893 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3894 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3895 gcc_unreachable ();
3897 list = list->next;
3899 #endif
3903 if (val)
3904 set_slot_part (set, val, cslot, cdv, 0,
3905 VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
3907 slot = clobber_slot_part (set, cval, slot, 0, NULL);
3909 /* Variable may have been unshared. */
3910 var = *slot;
3911 gcc_checking_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval
3912 && var->var_part[0].loc_chain->next == NULL);
3914 if (VALUE_RECURSED_INTO (cval))
3915 goto restart_with_cval;
3917 return 1;
3920 /* Bind one-part variables to the canonical value in an equivalence
3921 set. Not doing this causes dataflow convergence failure in rare
3922 circumstances, see PR42873. Unfortunately we can't do this
3923 efficiently as part of canonicalize_values_star, since we may not
3924 have determined or even seen the canonical value of a set when we
3925 get to a variable that references another member of the set. */
3928 canonicalize_vars_star (variable_def **slot, dataflow_set *set)
3930 variable var = *slot;
3931 decl_or_value dv = var->dv;
3932 location_chain node;
3933 rtx cval;
3934 decl_or_value cdv;
3935 variable_def **cslot;
3936 variable cvar;
3937 location_chain cnode;
3939 if (!var->onepart || var->onepart == ONEPART_VALUE)
3940 return 1;
3942 gcc_assert (var->n_var_parts == 1);
3944 node = var->var_part[0].loc_chain;
3946 if (GET_CODE (node->loc) != VALUE)
3947 return 1;
3949 gcc_assert (!node->next);
3950 cval = node->loc;
3952 /* Push values to the canonical one. */
3953 cdv = dv_from_value (cval);
3954 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3955 if (!cslot)
3956 return 1;
3957 cvar = *cslot;
3958 gcc_assert (cvar->n_var_parts == 1);
3960 cnode = cvar->var_part[0].loc_chain;
3962 /* CVAL is canonical if its value list contains non-VALUEs or VALUEs
3963 that are not “more canonical” than it. */
3964 if (GET_CODE (cnode->loc) != VALUE
3965 || !canon_value_cmp (cnode->loc, cval))
3966 return 1;
3968 /* CVAL was found to be non-canonical. Change the variable to point
3969 to the canonical VALUE. */
3970 gcc_assert (!cnode->next);
3971 cval = cnode->loc;
3973 slot = set_slot_part (set, cval, slot, dv, 0,
3974 node->init, node->set_src);
3975 clobber_slot_part (set, cval, slot, 0, node->set_src);
3977 return 1;
3980 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
3981 corresponding entry in DSM->src. Multi-part variables are combined
3982 with variable_union, whereas onepart dvs are combined with
3983 intersection. */
3985 static int
3986 variable_merge_over_cur (variable s1var, struct dfset_merge *dsm)
3988 dataflow_set *dst = dsm->dst;
3989 variable_def **dstslot;
3990 variable s2var, dvar = NULL;
3991 decl_or_value dv = s1var->dv;
3992 onepart_enum_t onepart = s1var->onepart;
3993 rtx val;
3994 hashval_t dvhash;
3995 location_chain node, *nodep;
3997 /* If the incoming onepart variable has an empty location list, then
3998 the intersection will be just as empty. For other variables,
3999 it's always union. */
4000 gcc_checking_assert (s1var->n_var_parts
4001 && s1var->var_part[0].loc_chain);
4003 if (!onepart)
4004 return variable_union (s1var, dst);
4006 gcc_checking_assert (s1var->n_var_parts == 1);
4008 dvhash = dv_htab_hash (dv);
4009 if (dv_is_value_p (dv))
4010 val = dv_as_value (dv);
4011 else
4012 val = NULL;
4014 s2var = shared_hash_find_1 (dsm->src->vars, dv, dvhash);
4015 if (!s2var)
4017 dst_can_be_shared = false;
4018 return 1;
4021 dsm->src_onepart_cnt--;
4022 gcc_assert (s2var->var_part[0].loc_chain
4023 && s2var->onepart == onepart
4024 && s2var->n_var_parts == 1);
4026 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4027 if (dstslot)
4029 dvar = *dstslot;
4030 gcc_assert (dvar->refcount == 1
4031 && dvar->onepart == onepart
4032 && dvar->n_var_parts == 1);
4033 nodep = &dvar->var_part[0].loc_chain;
4035 else
4037 nodep = &node;
4038 node = NULL;
4041 if (!dstslot && !onepart_variable_different_p (s1var, s2var))
4043 dstslot = shared_hash_find_slot_unshare_1 (&dst->vars, dv,
4044 dvhash, INSERT);
4045 *dstslot = dvar = s2var;
4046 dvar->refcount++;
4048 else
4050 dst_can_be_shared = false;
4052 intersect_loc_chains (val, nodep, dsm,
4053 s1var->var_part[0].loc_chain, s2var);
4055 if (!dstslot)
4057 if (node)
4059 dvar = onepart_pool (onepart).allocate ();
4060 dvar->dv = dv;
4061 dvar->refcount = 1;
4062 dvar->n_var_parts = 1;
4063 dvar->onepart = onepart;
4064 dvar->in_changed_variables = false;
4065 dvar->var_part[0].loc_chain = node;
4066 dvar->var_part[0].cur_loc = NULL;
4067 if (onepart)
4068 VAR_LOC_1PAUX (dvar) = NULL;
4069 else
4070 VAR_PART_OFFSET (dvar, 0) = 0;
4072 dstslot
4073 = shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash,
4074 INSERT);
4075 gcc_assert (!*dstslot);
4076 *dstslot = dvar;
4078 else
4079 return 1;
4083 nodep = &dvar->var_part[0].loc_chain;
4084 while ((node = *nodep))
4086 location_chain *nextp = &node->next;
4088 if (GET_CODE (node->loc) == REG)
4090 attrs list;
4092 for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
4093 if (GET_MODE (node->loc) == GET_MODE (list->loc)
4094 && dv_is_value_p (list->dv))
4095 break;
4097 if (!list)
4098 attrs_list_insert (&dst->regs[REGNO (node->loc)],
4099 dv, 0, node->loc);
4100 /* If this value became canonical for another value that had
4101 this register, we want to leave it alone. */
4102 else if (dv_as_value (list->dv) != val)
4104 dstslot = set_slot_part (dst, dv_as_value (list->dv),
4105 dstslot, dv, 0,
4106 node->init, NULL_RTX);
4107 dstslot = delete_slot_part (dst, node->loc, dstslot, 0);
4109 /* Since nextp points into the removed node, we can't
4110 use it. The pointer to the next node moved to nodep.
4111 However, if the variable we're walking is unshared
4112 during our walk, we'll keep walking the location list
4113 of the previously-shared variable, in which case the
4114 node won't have been removed, and we'll want to skip
4115 it. That's why we test *nodep here. */
4116 if (*nodep != node)
4117 nextp = nodep;
4120 else
4121 /* Canonicalization puts registers first, so we don't have to
4122 walk it all. */
4123 break;
4124 nodep = nextp;
4127 if (dvar != *dstslot)
4128 dvar = *dstslot;
4129 nodep = &dvar->var_part[0].loc_chain;
4131 if (val)
4133 /* Mark all referenced nodes for canonicalization, and make sure
4134 we have mutual equivalence links. */
4135 VALUE_RECURSED_INTO (val) = true;
4136 for (node = *nodep; node; node = node->next)
4137 if (GET_CODE (node->loc) == VALUE)
4139 VALUE_RECURSED_INTO (node->loc) = true;
4140 set_variable_part (dst, val, dv_from_value (node->loc), 0,
4141 node->init, NULL, INSERT);
4144 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4145 gcc_assert (*dstslot == dvar);
4146 canonicalize_values_star (dstslot, dst);
4147 gcc_checking_assert (dstslot
4148 == shared_hash_find_slot_noinsert_1 (dst->vars,
4149 dv, dvhash));
4150 dvar = *dstslot;
4152 else
4154 bool has_value = false, has_other = false;
4156 /* If we have one value and anything else, we're going to
4157 canonicalize this, so make sure all values have an entry in
4158 the table and are marked for canonicalization. */
4159 for (node = *nodep; node; node = node->next)
4161 if (GET_CODE (node->loc) == VALUE)
4163 /* If this was marked during register canonicalization,
4164 we know we have to canonicalize values. */
4165 if (has_value)
4166 has_other = true;
4167 has_value = true;
4168 if (has_other)
4169 break;
4171 else
4173 has_other = true;
4174 if (has_value)
4175 break;
4179 if (has_value && has_other)
4181 for (node = *nodep; node; node = node->next)
4183 if (GET_CODE (node->loc) == VALUE)
4185 decl_or_value dv = dv_from_value (node->loc);
4186 variable_def **slot = NULL;
4188 if (shared_hash_shared (dst->vars))
4189 slot = shared_hash_find_slot_noinsert (dst->vars, dv);
4190 if (!slot)
4191 slot = shared_hash_find_slot_unshare (&dst->vars, dv,
4192 INSERT);
4193 if (!*slot)
4195 variable var = onepart_pool (ONEPART_VALUE).allocate ();
4196 var->dv = dv;
4197 var->refcount = 1;
4198 var->n_var_parts = 1;
4199 var->onepart = ONEPART_VALUE;
4200 var->in_changed_variables = false;
4201 var->var_part[0].loc_chain = NULL;
4202 var->var_part[0].cur_loc = NULL;
4203 VAR_LOC_1PAUX (var) = NULL;
4204 *slot = var;
4207 VALUE_RECURSED_INTO (node->loc) = true;
4211 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4212 gcc_assert (*dstslot == dvar);
4213 canonicalize_values_star (dstslot, dst);
4214 gcc_checking_assert (dstslot
4215 == shared_hash_find_slot_noinsert_1 (dst->vars,
4216 dv, dvhash));
4217 dvar = *dstslot;
4221 if (!onepart_variable_different_p (dvar, s2var))
4223 variable_htab_free (dvar);
4224 *dstslot = dvar = s2var;
4225 dvar->refcount++;
4227 else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var))
4229 variable_htab_free (dvar);
4230 *dstslot = dvar = s1var;
4231 dvar->refcount++;
4232 dst_can_be_shared = false;
4234 else
4235 dst_can_be_shared = false;
4237 return 1;
4240 /* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
4241 multi-part variable. Unions of multi-part variables and
4242 intersections of one-part ones will be handled in
4243 variable_merge_over_cur(). */
4245 static int
4246 variable_merge_over_src (variable s2var, struct dfset_merge *dsm)
4248 dataflow_set *dst = dsm->dst;
4249 decl_or_value dv = s2var->dv;
4251 if (!s2var->onepart)
4253 variable_def **dstp = shared_hash_find_slot (dst->vars, dv);
4254 *dstp = s2var;
4255 s2var->refcount++;
4256 return 1;
4259 dsm->src_onepart_cnt++;
4260 return 1;
4263 /* Combine dataflow set information from SRC2 into DST, using PDST
4264 to carry over information across passes. */
4266 static void
4267 dataflow_set_merge (dataflow_set *dst, dataflow_set *src2)
4269 dataflow_set cur = *dst;
4270 dataflow_set *src1 = &cur;
4271 struct dfset_merge dsm;
4272 int i;
4273 size_t src1_elems, src2_elems;
4274 variable_iterator_type hi;
4275 variable var;
4277 src1_elems = shared_hash_htab (src1->vars)->elements ();
4278 src2_elems = shared_hash_htab (src2->vars)->elements ();
4279 dataflow_set_init (dst);
4280 dst->stack_adjust = cur.stack_adjust;
4281 shared_hash_destroy (dst->vars);
4282 dst->vars = new shared_hash_def;
4283 dst->vars->refcount = 1;
4284 dst->vars->htab = new variable_table_type (MAX (src1_elems, src2_elems));
4286 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4287 attrs_list_mpdv_union (&dst->regs[i], src1->regs[i], src2->regs[i]);
4289 dsm.dst = dst;
4290 dsm.src = src2;
4291 dsm.cur = src1;
4292 dsm.src_onepart_cnt = 0;
4294 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm.src->vars),
4295 var, variable, hi)
4296 variable_merge_over_src (var, &dsm);
4297 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm.cur->vars),
4298 var, variable, hi)
4299 variable_merge_over_cur (var, &dsm);
4301 if (dsm.src_onepart_cnt)
4302 dst_can_be_shared = false;
4304 dataflow_set_destroy (src1);
4307 /* Mark register equivalences. */
4309 static void
4310 dataflow_set_equiv_regs (dataflow_set *set)
4312 int i;
4313 attrs list, *listp;
4315 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4317 rtx canon[NUM_MACHINE_MODES];
4319 /* If the list is empty or one entry, no need to canonicalize
4320 anything. */
4321 if (set->regs[i] == NULL || set->regs[i]->next == NULL)
4322 continue;
4324 memset (canon, 0, sizeof (canon));
4326 for (list = set->regs[i]; list; list = list->next)
4327 if (list->offset == 0 && dv_is_value_p (list->dv))
4329 rtx val = dv_as_value (list->dv);
4330 rtx *cvalp = &canon[(int)GET_MODE (val)];
4331 rtx cval = *cvalp;
4333 if (canon_value_cmp (val, cval))
4334 *cvalp = val;
4337 for (list = set->regs[i]; list; list = list->next)
4338 if (list->offset == 0 && dv_onepart_p (list->dv))
4340 rtx cval = canon[(int)GET_MODE (list->loc)];
4342 if (!cval)
4343 continue;
4345 if (dv_is_value_p (list->dv))
4347 rtx val = dv_as_value (list->dv);
4349 if (val == cval)
4350 continue;
4352 VALUE_RECURSED_INTO (val) = true;
4353 set_variable_part (set, val, dv_from_value (cval), 0,
4354 VAR_INIT_STATUS_INITIALIZED,
4355 NULL, NO_INSERT);
4358 VALUE_RECURSED_INTO (cval) = true;
4359 set_variable_part (set, cval, list->dv, 0,
4360 VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT);
4363 for (listp = &set->regs[i]; (list = *listp);
4364 listp = list ? &list->next : listp)
4365 if (list->offset == 0 && dv_onepart_p (list->dv))
4367 rtx cval = canon[(int)GET_MODE (list->loc)];
4368 variable_def **slot;
4370 if (!cval)
4371 continue;
4373 if (dv_is_value_p (list->dv))
4375 rtx val = dv_as_value (list->dv);
4376 if (!VALUE_RECURSED_INTO (val))
4377 continue;
4380 slot = shared_hash_find_slot_noinsert (set->vars, list->dv);
4381 canonicalize_values_star (slot, set);
4382 if (*listp != list)
4383 list = NULL;
4388 /* Remove any redundant values in the location list of VAR, which must
4389 be unshared and 1-part. */
4391 static void
4392 remove_duplicate_values (variable var)
4394 location_chain node, *nodep;
4396 gcc_assert (var->onepart);
4397 gcc_assert (var->n_var_parts == 1);
4398 gcc_assert (var->refcount == 1);
4400 for (nodep = &var->var_part[0].loc_chain; (node = *nodep); )
4402 if (GET_CODE (node->loc) == VALUE)
4404 if (VALUE_RECURSED_INTO (node->loc))
4406 /* Remove duplicate value node. */
4407 *nodep = node->next;
4408 delete node;
4409 continue;
4411 else
4412 VALUE_RECURSED_INTO (node->loc) = true;
4414 nodep = &node->next;
4417 for (node = var->var_part[0].loc_chain; node; node = node->next)
4418 if (GET_CODE (node->loc) == VALUE)
4420 gcc_assert (VALUE_RECURSED_INTO (node->loc));
4421 VALUE_RECURSED_INTO (node->loc) = false;
4426 /* Hash table iteration argument passed to variable_post_merge. */
4427 struct dfset_post_merge
4429 /* The new input set for the current block. */
4430 dataflow_set *set;
4431 /* Pointer to the permanent input set for the current block, or
4432 NULL. */
4433 dataflow_set **permp;
4436 /* Create values for incoming expressions associated with one-part
4437 variables that don't have value numbers for them. */
4440 variable_post_merge_new_vals (variable_def **slot, dfset_post_merge *dfpm)
4442 dataflow_set *set = dfpm->set;
4443 variable var = *slot;
4444 location_chain node;
4446 if (!var->onepart || !var->n_var_parts)
4447 return 1;
4449 gcc_assert (var->n_var_parts == 1);
4451 if (dv_is_decl_p (var->dv))
4453 bool check_dupes = false;
4455 restart:
4456 for (node = var->var_part[0].loc_chain; node; node = node->next)
4458 if (GET_CODE (node->loc) == VALUE)
4459 gcc_assert (!VALUE_RECURSED_INTO (node->loc));
4460 else if (GET_CODE (node->loc) == REG)
4462 attrs att, *attp, *curp = NULL;
4464 if (var->refcount != 1)
4466 slot = unshare_variable (set, slot, var,
4467 VAR_INIT_STATUS_INITIALIZED);
4468 var = *slot;
4469 goto restart;
4472 for (attp = &set->regs[REGNO (node->loc)]; (att = *attp);
4473 attp = &att->next)
4474 if (att->offset == 0
4475 && GET_MODE (att->loc) == GET_MODE (node->loc))
4477 if (dv_is_value_p (att->dv))
4479 rtx cval = dv_as_value (att->dv);
4480 node->loc = cval;
4481 check_dupes = true;
4482 break;
4484 else if (dv_as_opaque (att->dv) == dv_as_opaque (var->dv))
4485 curp = attp;
4488 if (!curp)
4490 curp = attp;
4491 while (*curp)
4492 if ((*curp)->offset == 0
4493 && GET_MODE ((*curp)->loc) == GET_MODE (node->loc)
4494 && dv_as_opaque ((*curp)->dv) == dv_as_opaque (var->dv))
4495 break;
4496 else
4497 curp = &(*curp)->next;
4498 gcc_assert (*curp);
4501 if (!att)
4503 decl_or_value cdv;
4504 rtx cval;
4506 if (!*dfpm->permp)
4508 *dfpm->permp = XNEW (dataflow_set);
4509 dataflow_set_init (*dfpm->permp);
4512 for (att = (*dfpm->permp)->regs[REGNO (node->loc)];
4513 att; att = att->next)
4514 if (GET_MODE (att->loc) == GET_MODE (node->loc))
4516 gcc_assert (att->offset == 0
4517 && dv_is_value_p (att->dv));
4518 val_reset (set, att->dv);
4519 break;
4522 if (att)
4524 cdv = att->dv;
4525 cval = dv_as_value (cdv);
4527 else
4529 /* Create a unique value to hold this register,
4530 that ought to be found and reused in
4531 subsequent rounds. */
4532 cselib_val *v;
4533 gcc_assert (!cselib_lookup (node->loc,
4534 GET_MODE (node->loc), 0,
4535 VOIDmode));
4536 v = cselib_lookup (node->loc, GET_MODE (node->loc), 1,
4537 VOIDmode);
4538 cselib_preserve_value (v);
4539 cselib_invalidate_rtx (node->loc);
4540 cval = v->val_rtx;
4541 cdv = dv_from_value (cval);
4542 if (dump_file)
4543 fprintf (dump_file,
4544 "Created new value %u:%u for reg %i\n",
4545 v->uid, v->hash, REGNO (node->loc));
4548 var_reg_decl_set (*dfpm->permp, node->loc,
4549 VAR_INIT_STATUS_INITIALIZED,
4550 cdv, 0, NULL, INSERT);
4552 node->loc = cval;
4553 check_dupes = true;
4556 /* Remove attribute referring to the decl, which now
4557 uses the value for the register, already existing or
4558 to be added when we bring perm in. */
4559 att = *curp;
4560 *curp = att->next;
4561 delete att;
4565 if (check_dupes)
4566 remove_duplicate_values (var);
4569 return 1;
4572 /* Reset values in the permanent set that are not associated with the
4573 chosen expression. */
4576 variable_post_merge_perm_vals (variable_def **pslot, dfset_post_merge *dfpm)
4578 dataflow_set *set = dfpm->set;
4579 variable pvar = *pslot, var;
4580 location_chain pnode;
4581 decl_or_value dv;
4582 attrs att;
4584 gcc_assert (dv_is_value_p (pvar->dv)
4585 && pvar->n_var_parts == 1);
4586 pnode = pvar->var_part[0].loc_chain;
4587 gcc_assert (pnode
4588 && !pnode->next
4589 && REG_P (pnode->loc));
4591 dv = pvar->dv;
4593 var = shared_hash_find (set->vars, dv);
4594 if (var)
4596 /* Although variable_post_merge_new_vals may have made decls
4597 non-star-canonical, values that pre-existed in canonical form
4598 remain canonical, and newly-created values reference a single
4599 REG, so they are canonical as well. Since VAR has the
4600 location list for a VALUE, using find_loc_in_1pdv for it is
4601 fine, since VALUEs don't map back to DECLs. */
4602 if (find_loc_in_1pdv (pnode->loc, var, shared_hash_htab (set->vars)))
4603 return 1;
4604 val_reset (set, dv);
4607 for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next)
4608 if (att->offset == 0
4609 && GET_MODE (att->loc) == GET_MODE (pnode->loc)
4610 && dv_is_value_p (att->dv))
4611 break;
4613 /* If there is a value associated with this register already, create
4614 an equivalence. */
4615 if (att && dv_as_value (att->dv) != dv_as_value (dv))
4617 rtx cval = dv_as_value (att->dv);
4618 set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT);
4619 set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init,
4620 NULL, INSERT);
4622 else if (!att)
4624 attrs_list_insert (&set->regs[REGNO (pnode->loc)],
4625 dv, 0, pnode->loc);
4626 variable_union (pvar, set);
4629 return 1;
4632 /* Just checking stuff and registering register attributes for
4633 now. */
4635 static void
4636 dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp)
4638 struct dfset_post_merge dfpm;
4640 dfpm.set = set;
4641 dfpm.permp = permp;
4643 shared_hash_htab (set->vars)
4644 ->traverse <dfset_post_merge*, variable_post_merge_new_vals> (&dfpm);
4645 if (*permp)
4646 shared_hash_htab ((*permp)->vars)
4647 ->traverse <dfset_post_merge*, variable_post_merge_perm_vals> (&dfpm);
4648 shared_hash_htab (set->vars)
4649 ->traverse <dataflow_set *, canonicalize_values_star> (set);
4650 shared_hash_htab (set->vars)
4651 ->traverse <dataflow_set *, canonicalize_vars_star> (set);
4654 /* Return a node whose loc is a MEM that refers to EXPR in the
4655 location list of a one-part variable or value VAR, or in that of
4656 any values recursively mentioned in the location lists. */
4658 static location_chain
4659 find_mem_expr_in_1pdv (tree expr, rtx val, variable_table_type *vars)
4661 location_chain node;
4662 decl_or_value dv;
4663 variable var;
4664 location_chain where = NULL;
4666 if (!val)
4667 return NULL;
4669 gcc_assert (GET_CODE (val) == VALUE
4670 && !VALUE_RECURSED_INTO (val));
4672 dv = dv_from_value (val);
4673 var = vars->find_with_hash (dv, dv_htab_hash (dv));
4675 if (!var)
4676 return NULL;
4678 gcc_assert (var->onepart);
4680 if (!var->n_var_parts)
4681 return NULL;
4683 VALUE_RECURSED_INTO (val) = true;
4685 for (node = var->var_part[0].loc_chain; node; node = node->next)
4686 if (MEM_P (node->loc)
4687 && MEM_EXPR (node->loc) == expr
4688 && INT_MEM_OFFSET (node->loc) == 0)
4690 where = node;
4691 break;
4693 else if (GET_CODE (node->loc) == VALUE
4694 && !VALUE_RECURSED_INTO (node->loc)
4695 && (where = find_mem_expr_in_1pdv (expr, node->loc, vars)))
4696 break;
4698 VALUE_RECURSED_INTO (val) = false;
4700 return where;
4703 /* Return TRUE if the value of MEM may vary across a call. */
4705 static bool
4706 mem_dies_at_call (rtx mem)
4708 tree expr = MEM_EXPR (mem);
4709 tree decl;
4711 if (!expr)
4712 return true;
4714 decl = get_base_address (expr);
4716 if (!decl)
4717 return true;
4719 if (!DECL_P (decl))
4720 return true;
4722 return (may_be_aliased (decl)
4723 || (!TREE_READONLY (decl) && is_global_var (decl)));
4726 /* Remove all MEMs from the location list of a hash table entry for a
4727 one-part variable, except those whose MEM attributes map back to
4728 the variable itself, directly or within a VALUE. */
4731 dataflow_set_preserve_mem_locs (variable_def **slot, dataflow_set *set)
4733 variable var = *slot;
4735 if (var->onepart == ONEPART_VDECL || var->onepart == ONEPART_DEXPR)
4737 tree decl = dv_as_decl (var->dv);
4738 location_chain loc, *locp;
4739 bool changed = false;
4741 if (!var->n_var_parts)
4742 return 1;
4744 gcc_assert (var->n_var_parts == 1);
4746 if (shared_var_p (var, set->vars))
4748 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4750 /* We want to remove dying MEMs that doesn't refer to DECL. */
4751 if (GET_CODE (loc->loc) == MEM
4752 && (MEM_EXPR (loc->loc) != decl
4753 || INT_MEM_OFFSET (loc->loc) != 0)
4754 && !mem_dies_at_call (loc->loc))
4755 break;
4756 /* We want to move here MEMs that do refer to DECL. */
4757 else if (GET_CODE (loc->loc) == VALUE
4758 && find_mem_expr_in_1pdv (decl, loc->loc,
4759 shared_hash_htab (set->vars)))
4760 break;
4763 if (!loc)
4764 return 1;
4766 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4767 var = *slot;
4768 gcc_assert (var->n_var_parts == 1);
4771 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4772 loc; loc = *locp)
4774 rtx old_loc = loc->loc;
4775 if (GET_CODE (old_loc) == VALUE)
4777 location_chain mem_node
4778 = find_mem_expr_in_1pdv (decl, loc->loc,
4779 shared_hash_htab (set->vars));
4781 /* ??? This picks up only one out of multiple MEMs that
4782 refer to the same variable. Do we ever need to be
4783 concerned about dealing with more than one, or, given
4784 that they should all map to the same variable
4785 location, their addresses will have been merged and
4786 they will be regarded as equivalent? */
4787 if (mem_node)
4789 loc->loc = mem_node->loc;
4790 loc->set_src = mem_node->set_src;
4791 loc->init = MIN (loc->init, mem_node->init);
4795 if (GET_CODE (loc->loc) != MEM
4796 || (MEM_EXPR (loc->loc) == decl
4797 && INT_MEM_OFFSET (loc->loc) == 0)
4798 || !mem_dies_at_call (loc->loc))
4800 if (old_loc != loc->loc && emit_notes)
4802 if (old_loc == var->var_part[0].cur_loc)
4804 changed = true;
4805 var->var_part[0].cur_loc = NULL;
4808 locp = &loc->next;
4809 continue;
4812 if (emit_notes)
4814 if (old_loc == var->var_part[0].cur_loc)
4816 changed = true;
4817 var->var_part[0].cur_loc = NULL;
4820 *locp = loc->next;
4821 delete loc;
4824 if (!var->var_part[0].loc_chain)
4826 var->n_var_parts--;
4827 changed = true;
4829 if (changed)
4830 variable_was_changed (var, set);
4833 return 1;
4836 /* Remove all MEMs from the location list of a hash table entry for a
4837 value. */
4840 dataflow_set_remove_mem_locs (variable_def **slot, dataflow_set *set)
4842 variable var = *slot;
4844 if (var->onepart == ONEPART_VALUE)
4846 location_chain loc, *locp;
4847 bool changed = false;
4848 rtx cur_loc;
4850 gcc_assert (var->n_var_parts == 1);
4852 if (shared_var_p (var, set->vars))
4854 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4855 if (GET_CODE (loc->loc) == MEM
4856 && mem_dies_at_call (loc->loc))
4857 break;
4859 if (!loc)
4860 return 1;
4862 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4863 var = *slot;
4864 gcc_assert (var->n_var_parts == 1);
4867 if (VAR_LOC_1PAUX (var))
4868 cur_loc = VAR_LOC_FROM (var);
4869 else
4870 cur_loc = var->var_part[0].cur_loc;
4872 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4873 loc; loc = *locp)
4875 if (GET_CODE (loc->loc) != MEM
4876 || !mem_dies_at_call (loc->loc))
4878 locp = &loc->next;
4879 continue;
4882 *locp = loc->next;
4883 /* If we have deleted the location which was last emitted
4884 we have to emit new location so add the variable to set
4885 of changed variables. */
4886 if (cur_loc == loc->loc)
4888 changed = true;
4889 var->var_part[0].cur_loc = NULL;
4890 if (VAR_LOC_1PAUX (var))
4891 VAR_LOC_FROM (var) = NULL;
4893 delete loc;
4896 if (!var->var_part[0].loc_chain)
4898 var->n_var_parts--;
4899 changed = true;
4901 if (changed)
4902 variable_was_changed (var, set);
4905 return 1;
4908 /* Remove all variable-location information about call-clobbered
4909 registers, as well as associations between MEMs and VALUEs. */
4911 static void
4912 dataflow_set_clear_at_call (dataflow_set *set)
4914 unsigned int r;
4915 hard_reg_set_iterator hrsi;
4917 EXECUTE_IF_SET_IN_HARD_REG_SET (regs_invalidated_by_call, 0, r, hrsi)
4918 var_regno_delete (set, r);
4920 if (MAY_HAVE_DEBUG_INSNS)
4922 set->traversed_vars = set->vars;
4923 shared_hash_htab (set->vars)
4924 ->traverse <dataflow_set *, dataflow_set_preserve_mem_locs> (set);
4925 set->traversed_vars = set->vars;
4926 shared_hash_htab (set->vars)
4927 ->traverse <dataflow_set *, dataflow_set_remove_mem_locs> (set);
4928 set->traversed_vars = NULL;
4932 static bool
4933 variable_part_different_p (variable_part *vp1, variable_part *vp2)
4935 location_chain lc1, lc2;
4937 for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next)
4939 for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next)
4941 if (REG_P (lc1->loc) && REG_P (lc2->loc))
4943 if (REGNO (lc1->loc) == REGNO (lc2->loc))
4944 break;
4946 if (rtx_equal_p (lc1->loc, lc2->loc))
4947 break;
4949 if (!lc2)
4950 return true;
4952 return false;
4955 /* Return true if one-part variables VAR1 and VAR2 are different.
4956 They must be in canonical order. */
4958 static bool
4959 onepart_variable_different_p (variable var1, variable var2)
4961 location_chain lc1, lc2;
4963 if (var1 == var2)
4964 return false;
4966 gcc_assert (var1->n_var_parts == 1
4967 && var2->n_var_parts == 1);
4969 lc1 = var1->var_part[0].loc_chain;
4970 lc2 = var2->var_part[0].loc_chain;
4972 gcc_assert (lc1 && lc2);
4974 while (lc1 && lc2)
4976 if (loc_cmp (lc1->loc, lc2->loc))
4977 return true;
4978 lc1 = lc1->next;
4979 lc2 = lc2->next;
4982 return lc1 != lc2;
4985 /* Return true if variables VAR1 and VAR2 are different. */
4987 static bool
4988 variable_different_p (variable var1, variable var2)
4990 int i;
4992 if (var1 == var2)
4993 return false;
4995 if (var1->onepart != var2->onepart)
4996 return true;
4998 if (var1->n_var_parts != var2->n_var_parts)
4999 return true;
5001 if (var1->onepart && var1->n_var_parts)
5003 gcc_checking_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv)
5004 && var1->n_var_parts == 1);
5005 /* One-part values have locations in a canonical order. */
5006 return onepart_variable_different_p (var1, var2);
5009 for (i = 0; i < var1->n_var_parts; i++)
5011 if (VAR_PART_OFFSET (var1, i) != VAR_PART_OFFSET (var2, i))
5012 return true;
5013 if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i]))
5014 return true;
5015 if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i]))
5016 return true;
5018 return false;
5021 /* Return true if dataflow sets OLD_SET and NEW_SET differ. */
5023 static bool
5024 dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
5026 variable_iterator_type hi;
5027 variable var1;
5029 if (old_set->vars == new_set->vars)
5030 return false;
5032 if (shared_hash_htab (old_set->vars)->elements ()
5033 != shared_hash_htab (new_set->vars)->elements ())
5034 return true;
5036 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (old_set->vars),
5037 var1, variable, hi)
5039 variable_table_type *htab = shared_hash_htab (new_set->vars);
5040 variable var2 = htab->find_with_hash (var1->dv, dv_htab_hash (var1->dv));
5041 if (!var2)
5043 if (dump_file && (dump_flags & TDF_DETAILS))
5045 fprintf (dump_file, "dataflow difference found: removal of:\n");
5046 dump_var (var1);
5048 return true;
5051 if (variable_different_p (var1, var2))
5053 if (dump_file && (dump_flags & TDF_DETAILS))
5055 fprintf (dump_file, "dataflow difference found: "
5056 "old and new follow:\n");
5057 dump_var (var1);
5058 dump_var (var2);
5060 return true;
5064 /* No need to traverse the second hashtab, if both have the same number
5065 of elements and the second one had all entries found in the first one,
5066 then it can't have any extra entries. */
5067 return false;
5070 /* Free the contents of dataflow set SET. */
5072 static void
5073 dataflow_set_destroy (dataflow_set *set)
5075 int i;
5077 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5078 attrs_list_clear (&set->regs[i]);
5080 shared_hash_destroy (set->vars);
5081 set->vars = NULL;
5084 /* Return true if RTL X contains a SYMBOL_REF. */
5086 static bool
5087 contains_symbol_ref (rtx x)
5089 const char *fmt;
5090 RTX_CODE code;
5091 int i;
5093 if (!x)
5094 return false;
5096 code = GET_CODE (x);
5097 if (code == SYMBOL_REF)
5098 return true;
5100 fmt = GET_RTX_FORMAT (code);
5101 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5103 if (fmt[i] == 'e')
5105 if (contains_symbol_ref (XEXP (x, i)))
5106 return true;
5108 else if (fmt[i] == 'E')
5110 int j;
5111 for (j = 0; j < XVECLEN (x, i); j++)
5112 if (contains_symbol_ref (XVECEXP (x, i, j)))
5113 return true;
5117 return false;
5120 /* Shall EXPR be tracked? */
5122 static bool
5123 track_expr_p (tree expr, bool need_rtl)
5125 rtx decl_rtl;
5126 tree realdecl;
5128 if (TREE_CODE (expr) == DEBUG_EXPR_DECL)
5129 return DECL_RTL_SET_P (expr);
5131 /* If EXPR is not a parameter or a variable do not track it. */
5132 if (TREE_CODE (expr) != VAR_DECL && TREE_CODE (expr) != PARM_DECL)
5133 return 0;
5135 /* It also must have a name... */
5136 if (!DECL_NAME (expr) && need_rtl)
5137 return 0;
5139 /* ... and a RTL assigned to it. */
5140 decl_rtl = DECL_RTL_IF_SET (expr);
5141 if (!decl_rtl && need_rtl)
5142 return 0;
5144 /* If this expression is really a debug alias of some other declaration, we
5145 don't need to track this expression if the ultimate declaration is
5146 ignored. */
5147 realdecl = expr;
5148 if (TREE_CODE (realdecl) == VAR_DECL && DECL_HAS_DEBUG_EXPR_P (realdecl))
5150 realdecl = DECL_DEBUG_EXPR (realdecl);
5151 if (!DECL_P (realdecl))
5153 if (handled_component_p (realdecl)
5154 || (TREE_CODE (realdecl) == MEM_REF
5155 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
5157 HOST_WIDE_INT bitsize, bitpos, maxsize;
5158 tree innerdecl
5159 = get_ref_base_and_extent (realdecl, &bitpos, &bitsize,
5160 &maxsize);
5161 if (!DECL_P (innerdecl)
5162 || DECL_IGNORED_P (innerdecl)
5163 /* Do not track declarations for parts of tracked parameters
5164 since we want to track them as a whole instead. */
5165 || (TREE_CODE (innerdecl) == PARM_DECL
5166 && DECL_MODE (innerdecl) != BLKmode
5167 && TREE_CODE (TREE_TYPE (innerdecl)) != UNION_TYPE)
5168 || TREE_STATIC (innerdecl)
5169 || bitsize <= 0
5170 || bitpos + bitsize > 256
5171 || bitsize != maxsize)
5172 return 0;
5173 else
5174 realdecl = expr;
5176 else
5177 return 0;
5181 /* Do not track EXPR if REALDECL it should be ignored for debugging
5182 purposes. */
5183 if (DECL_IGNORED_P (realdecl))
5184 return 0;
5186 /* Do not track global variables until we are able to emit correct location
5187 list for them. */
5188 if (TREE_STATIC (realdecl))
5189 return 0;
5191 /* When the EXPR is a DECL for alias of some variable (see example)
5192 the TREE_STATIC flag is not used. Disable tracking all DECLs whose
5193 DECL_RTL contains SYMBOL_REF.
5195 Example:
5196 extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
5197 char **_dl_argv;
5199 if (decl_rtl && MEM_P (decl_rtl)
5200 && contains_symbol_ref (XEXP (decl_rtl, 0)))
5201 return 0;
5203 /* If RTX is a memory it should not be very large (because it would be
5204 an array or struct). */
5205 if (decl_rtl && MEM_P (decl_rtl))
5207 /* Do not track structures and arrays. */
5208 if (GET_MODE (decl_rtl) == BLKmode
5209 || AGGREGATE_TYPE_P (TREE_TYPE (realdecl)))
5210 return 0;
5211 if (MEM_SIZE_KNOWN_P (decl_rtl)
5212 && MEM_SIZE (decl_rtl) > MAX_VAR_PARTS)
5213 return 0;
5216 DECL_CHANGED (expr) = 0;
5217 DECL_CHANGED (realdecl) = 0;
5218 return 1;
5221 /* Determine whether a given LOC refers to the same variable part as
5222 EXPR+OFFSET. */
5224 static bool
5225 same_variable_part_p (rtx loc, tree expr, HOST_WIDE_INT offset)
5227 tree expr2;
5228 HOST_WIDE_INT offset2;
5230 if (! DECL_P (expr))
5231 return false;
5233 if (REG_P (loc))
5235 expr2 = REG_EXPR (loc);
5236 offset2 = REG_OFFSET (loc);
5238 else if (MEM_P (loc))
5240 expr2 = MEM_EXPR (loc);
5241 offset2 = INT_MEM_OFFSET (loc);
5243 else
5244 return false;
5246 if (! expr2 || ! DECL_P (expr2))
5247 return false;
5249 expr = var_debug_decl (expr);
5250 expr2 = var_debug_decl (expr2);
5252 return (expr == expr2 && offset == offset2);
5255 /* LOC is a REG or MEM that we would like to track if possible.
5256 If EXPR is null, we don't know what expression LOC refers to,
5257 otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
5258 LOC is an lvalue register.
5260 Return true if EXPR is nonnull and if LOC, or some lowpart of it,
5261 is something we can track. When returning true, store the mode of
5262 the lowpart we can track in *MODE_OUT (if nonnull) and its offset
5263 from EXPR in *OFFSET_OUT (if nonnull). */
5265 static bool
5266 track_loc_p (rtx loc, tree expr, HOST_WIDE_INT offset, bool store_reg_p,
5267 machine_mode *mode_out, HOST_WIDE_INT *offset_out)
5269 machine_mode mode;
5271 if (expr == NULL || !track_expr_p (expr, true))
5272 return false;
5274 /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
5275 whole subreg, but only the old inner part is really relevant. */
5276 mode = GET_MODE (loc);
5277 if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc)))
5279 machine_mode pseudo_mode;
5281 pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc));
5282 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (pseudo_mode))
5284 offset += byte_lowpart_offset (pseudo_mode, mode);
5285 mode = pseudo_mode;
5289 /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
5290 Do the same if we are storing to a register and EXPR occupies
5291 the whole of register LOC; in that case, the whole of EXPR is
5292 being changed. We exclude complex modes from the second case
5293 because the real and imaginary parts are represented as separate
5294 pseudo registers, even if the whole complex value fits into one
5295 hard register. */
5296 if ((GET_MODE_SIZE (mode) > GET_MODE_SIZE (DECL_MODE (expr))
5297 || (store_reg_p
5298 && !COMPLEX_MODE_P (DECL_MODE (expr))
5299 && hard_regno_nregs[REGNO (loc)][DECL_MODE (expr)] == 1))
5300 && offset + byte_lowpart_offset (DECL_MODE (expr), mode) == 0)
5302 mode = DECL_MODE (expr);
5303 offset = 0;
5306 if (offset < 0 || offset >= MAX_VAR_PARTS)
5307 return false;
5309 if (mode_out)
5310 *mode_out = mode;
5311 if (offset_out)
5312 *offset_out = offset;
5313 return true;
5316 /* Return the MODE lowpart of LOC, or null if LOC is not something we
5317 want to track. When returning nonnull, make sure that the attributes
5318 on the returned value are updated. */
5320 static rtx
5321 var_lowpart (machine_mode mode, rtx loc)
5323 unsigned int offset, reg_offset, regno;
5325 if (GET_MODE (loc) == mode)
5326 return loc;
5328 if (!REG_P (loc) && !MEM_P (loc))
5329 return NULL;
5331 offset = byte_lowpart_offset (mode, GET_MODE (loc));
5333 if (MEM_P (loc))
5334 return adjust_address_nv (loc, mode, offset);
5336 reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc));
5337 regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc),
5338 reg_offset, mode);
5339 return gen_rtx_REG_offset (loc, mode, regno, offset);
5342 /* Carry information about uses and stores while walking rtx. */
5344 struct count_use_info
5346 /* The insn where the RTX is. */
5347 rtx_insn *insn;
5349 /* The basic block where insn is. */
5350 basic_block bb;
5352 /* The array of n_sets sets in the insn, as determined by cselib. */
5353 struct cselib_set *sets;
5354 int n_sets;
5356 /* True if we're counting stores, false otherwise. */
5357 bool store_p;
5360 /* Find a VALUE corresponding to X. */
5362 static inline cselib_val *
5363 find_use_val (rtx x, machine_mode mode, struct count_use_info *cui)
5365 int i;
5367 if (cui->sets)
5369 /* This is called after uses are set up and before stores are
5370 processed by cselib, so it's safe to look up srcs, but not
5371 dsts. So we look up expressions that appear in srcs or in
5372 dest expressions, but we search the sets array for dests of
5373 stores. */
5374 if (cui->store_p)
5376 /* Some targets represent memset and memcpy patterns
5377 by (set (mem:BLK ...) (reg:[QHSD]I ...)) or
5378 (set (mem:BLK ...) (const_int ...)) or
5379 (set (mem:BLK ...) (mem:BLK ...)). Don't return anything
5380 in that case, otherwise we end up with mode mismatches. */
5381 if (mode == BLKmode && MEM_P (x))
5382 return NULL;
5383 for (i = 0; i < cui->n_sets; i++)
5384 if (cui->sets[i].dest == x)
5385 return cui->sets[i].src_elt;
5387 else
5388 return cselib_lookup (x, mode, 0, VOIDmode);
5391 return NULL;
5394 /* Replace all registers and addresses in an expression with VALUE
5395 expressions that map back to them, unless the expression is a
5396 register. If no mapping is or can be performed, returns NULL. */
5398 static rtx
5399 replace_expr_with_values (rtx loc)
5401 if (REG_P (loc) || GET_CODE (loc) == ENTRY_VALUE)
5402 return NULL;
5403 else if (MEM_P (loc))
5405 cselib_val *addr = cselib_lookup (XEXP (loc, 0),
5406 get_address_mode (loc), 0,
5407 GET_MODE (loc));
5408 if (addr)
5409 return replace_equiv_address_nv (loc, addr->val_rtx);
5410 else
5411 return NULL;
5413 else
5414 return cselib_subst_to_values (loc, VOIDmode);
5417 /* Return true if X contains a DEBUG_EXPR. */
5419 static bool
5420 rtx_debug_expr_p (const_rtx x)
5422 subrtx_iterator::array_type array;
5423 FOR_EACH_SUBRTX (iter, array, x, ALL)
5424 if (GET_CODE (*iter) == DEBUG_EXPR)
5425 return true;
5426 return false;
5429 /* Determine what kind of micro operation to choose for a USE. Return
5430 MO_CLOBBER if no micro operation is to be generated. */
5432 static enum micro_operation_type
5433 use_type (rtx loc, struct count_use_info *cui, machine_mode *modep)
5435 tree expr;
5437 if (cui && cui->sets)
5439 if (GET_CODE (loc) == VAR_LOCATION)
5441 if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false))
5443 rtx ploc = PAT_VAR_LOCATION_LOC (loc);
5444 if (! VAR_LOC_UNKNOWN_P (ploc))
5446 cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1,
5447 VOIDmode);
5449 /* ??? flag_float_store and volatile mems are never
5450 given values, but we could in theory use them for
5451 locations. */
5452 gcc_assert (val || 1);
5454 return MO_VAL_LOC;
5456 else
5457 return MO_CLOBBER;
5460 if (REG_P (loc) || MEM_P (loc))
5462 if (modep)
5463 *modep = GET_MODE (loc);
5464 if (cui->store_p)
5466 if (REG_P (loc)
5467 || (find_use_val (loc, GET_MODE (loc), cui)
5468 && cselib_lookup (XEXP (loc, 0),
5469 get_address_mode (loc), 0,
5470 GET_MODE (loc))))
5471 return MO_VAL_SET;
5473 else
5475 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5477 if (val && !cselib_preserved_value_p (val))
5478 return MO_VAL_USE;
5483 if (REG_P (loc))
5485 gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER);
5487 if (loc == cfa_base_rtx)
5488 return MO_CLOBBER;
5489 expr = REG_EXPR (loc);
5491 if (!expr)
5492 return MO_USE_NO_VAR;
5493 else if (target_for_debug_bind (var_debug_decl (expr)))
5494 return MO_CLOBBER;
5495 else if (track_loc_p (loc, expr, REG_OFFSET (loc),
5496 false, modep, NULL))
5497 return MO_USE;
5498 else
5499 return MO_USE_NO_VAR;
5501 else if (MEM_P (loc))
5503 expr = MEM_EXPR (loc);
5505 if (!expr)
5506 return MO_CLOBBER;
5507 else if (target_for_debug_bind (var_debug_decl (expr)))
5508 return MO_CLOBBER;
5509 else if (track_loc_p (loc, expr, INT_MEM_OFFSET (loc),
5510 false, modep, NULL)
5511 /* Multi-part variables shouldn't refer to one-part
5512 variable names such as VALUEs (never happens) or
5513 DEBUG_EXPRs (only happens in the presence of debug
5514 insns). */
5515 && (!MAY_HAVE_DEBUG_INSNS
5516 || !rtx_debug_expr_p (XEXP (loc, 0))))
5517 return MO_USE;
5518 else
5519 return MO_CLOBBER;
5522 return MO_CLOBBER;
5525 /* Log to OUT information about micro-operation MOPT involving X in
5526 INSN of BB. */
5528 static inline void
5529 log_op_type (rtx x, basic_block bb, rtx_insn *insn,
5530 enum micro_operation_type mopt, FILE *out)
5532 fprintf (out, "bb %i op %i insn %i %s ",
5533 bb->index, VTI (bb)->mos.length (),
5534 INSN_UID (insn), micro_operation_type_name[mopt]);
5535 print_inline_rtx (out, x, 2);
5536 fputc ('\n', out);
5539 /* Tell whether the CONCAT used to holds a VALUE and its location
5540 needs value resolution, i.e., an attempt of mapping the location
5541 back to other incoming values. */
5542 #define VAL_NEEDS_RESOLUTION(x) \
5543 (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
5544 /* Whether the location in the CONCAT is a tracked expression, that
5545 should also be handled like a MO_USE. */
5546 #define VAL_HOLDS_TRACK_EXPR(x) \
5547 (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
5548 /* Whether the location in the CONCAT should be handled like a MO_COPY
5549 as well. */
5550 #define VAL_EXPR_IS_COPIED(x) \
5551 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
5552 /* Whether the location in the CONCAT should be handled like a
5553 MO_CLOBBER as well. */
5554 #define VAL_EXPR_IS_CLOBBERED(x) \
5555 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
5557 /* All preserved VALUEs. */
5558 static vec<rtx> preserved_values;
5560 /* Ensure VAL is preserved and remember it in a vector for vt_emit_notes. */
5562 static void
5563 preserve_value (cselib_val *val)
5565 cselib_preserve_value (val);
5566 preserved_values.safe_push (val->val_rtx);
5569 /* Helper function for MO_VAL_LOC handling. Return non-zero if
5570 any rtxes not suitable for CONST use not replaced by VALUEs
5571 are discovered. */
5573 static bool
5574 non_suitable_const (const_rtx x)
5576 subrtx_iterator::array_type array;
5577 FOR_EACH_SUBRTX (iter, array, x, ALL)
5579 const_rtx x = *iter;
5580 switch (GET_CODE (x))
5582 case REG:
5583 case DEBUG_EXPR:
5584 case PC:
5585 case SCRATCH:
5586 case CC0:
5587 case ASM_INPUT:
5588 case ASM_OPERANDS:
5589 return true;
5590 case MEM:
5591 if (!MEM_READONLY_P (x))
5592 return true;
5593 break;
5594 default:
5595 break;
5598 return false;
5601 /* Add uses (register and memory references) LOC which will be tracked
5602 to VTI (bb)->mos. */
5604 static void
5605 add_uses (rtx loc, struct count_use_info *cui)
5607 machine_mode mode = VOIDmode;
5608 enum micro_operation_type type = use_type (loc, cui, &mode);
5610 if (type != MO_CLOBBER)
5612 basic_block bb = cui->bb;
5613 micro_operation mo;
5615 mo.type = type;
5616 mo.u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc;
5617 mo.insn = cui->insn;
5619 if (type == MO_VAL_LOC)
5621 rtx oloc = loc;
5622 rtx vloc = PAT_VAR_LOCATION_LOC (oloc);
5623 cselib_val *val;
5625 gcc_assert (cui->sets);
5627 if (MEM_P (vloc)
5628 && !REG_P (XEXP (vloc, 0))
5629 && !MEM_P (XEXP (vloc, 0)))
5631 rtx mloc = vloc;
5632 machine_mode address_mode = get_address_mode (mloc);
5633 cselib_val *val
5634 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5635 GET_MODE (mloc));
5637 if (val && !cselib_preserved_value_p (val))
5638 preserve_value (val);
5641 if (CONSTANT_P (vloc)
5642 && (GET_CODE (vloc) != CONST || non_suitable_const (vloc)))
5643 /* For constants don't look up any value. */;
5644 else if (!VAR_LOC_UNKNOWN_P (vloc) && !unsuitable_loc (vloc)
5645 && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
5647 machine_mode mode2;
5648 enum micro_operation_type type2;
5649 rtx nloc = NULL;
5650 bool resolvable = REG_P (vloc) || MEM_P (vloc);
5652 if (resolvable)
5653 nloc = replace_expr_with_values (vloc);
5655 if (nloc)
5657 oloc = shallow_copy_rtx (oloc);
5658 PAT_VAR_LOCATION_LOC (oloc) = nloc;
5661 oloc = gen_rtx_CONCAT (mode, val->val_rtx, oloc);
5663 type2 = use_type (vloc, 0, &mode2);
5665 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5666 || type2 == MO_CLOBBER);
5668 if (type2 == MO_CLOBBER
5669 && !cselib_preserved_value_p (val))
5671 VAL_NEEDS_RESOLUTION (oloc) = resolvable;
5672 preserve_value (val);
5675 else if (!VAR_LOC_UNKNOWN_P (vloc))
5677 oloc = shallow_copy_rtx (oloc);
5678 PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC ();
5681 mo.u.loc = oloc;
5683 else if (type == MO_VAL_USE)
5685 machine_mode mode2 = VOIDmode;
5686 enum micro_operation_type type2;
5687 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5688 rtx vloc, oloc = loc, nloc;
5690 gcc_assert (cui->sets);
5692 if (MEM_P (oloc)
5693 && !REG_P (XEXP (oloc, 0))
5694 && !MEM_P (XEXP (oloc, 0)))
5696 rtx mloc = oloc;
5697 machine_mode address_mode = get_address_mode (mloc);
5698 cselib_val *val
5699 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5700 GET_MODE (mloc));
5702 if (val && !cselib_preserved_value_p (val))
5703 preserve_value (val);
5706 type2 = use_type (loc, 0, &mode2);
5708 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5709 || type2 == MO_CLOBBER);
5711 if (type2 == MO_USE)
5712 vloc = var_lowpart (mode2, loc);
5713 else
5714 vloc = oloc;
5716 /* The loc of a MO_VAL_USE may have two forms:
5718 (concat val src): val is at src, a value-based
5719 representation.
5721 (concat (concat val use) src): same as above, with use as
5722 the MO_USE tracked value, if it differs from src.
5726 gcc_checking_assert (REG_P (loc) || MEM_P (loc));
5727 nloc = replace_expr_with_values (loc);
5728 if (!nloc)
5729 nloc = oloc;
5731 if (vloc != nloc)
5732 oloc = gen_rtx_CONCAT (mode2, val->val_rtx, vloc);
5733 else
5734 oloc = val->val_rtx;
5736 mo.u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
5738 if (type2 == MO_USE)
5739 VAL_HOLDS_TRACK_EXPR (mo.u.loc) = 1;
5740 if (!cselib_preserved_value_p (val))
5742 VAL_NEEDS_RESOLUTION (mo.u.loc) = 1;
5743 preserve_value (val);
5746 else
5747 gcc_assert (type == MO_USE || type == MO_USE_NO_VAR);
5749 if (dump_file && (dump_flags & TDF_DETAILS))
5750 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
5751 VTI (bb)->mos.safe_push (mo);
5755 /* Helper function for finding all uses of REG/MEM in X in insn INSN. */
5757 static void
5758 add_uses_1 (rtx *x, void *cui)
5760 subrtx_var_iterator::array_type array;
5761 FOR_EACH_SUBRTX_VAR (iter, array, *x, NONCONST)
5762 add_uses (*iter, (struct count_use_info *) cui);
5765 /* This is the value used during expansion of locations. We want it
5766 to be unbounded, so that variables expanded deep in a recursion
5767 nest are fully evaluated, so that their values are cached
5768 correctly. We avoid recursion cycles through other means, and we
5769 don't unshare RTL, so excess complexity is not a problem. */
5770 #define EXPR_DEPTH (INT_MAX)
5771 /* We use this to keep too-complex expressions from being emitted as
5772 location notes, and then to debug information. Users can trade
5773 compile time for ridiculously complex expressions, although they're
5774 seldom useful, and they may often have to be discarded as not
5775 representable anyway. */
5776 #define EXPR_USE_DEPTH (PARAM_VALUE (PARAM_MAX_VARTRACK_EXPR_DEPTH))
5778 /* Attempt to reverse the EXPR operation in the debug info and record
5779 it in the cselib table. Say for reg1 = reg2 + 6 even when reg2 is
5780 no longer live we can express its value as VAL - 6. */
5782 static void
5783 reverse_op (rtx val, const_rtx expr, rtx_insn *insn)
5785 rtx src, arg, ret;
5786 cselib_val *v;
5787 struct elt_loc_list *l;
5788 enum rtx_code code;
5789 int count;
5791 if (GET_CODE (expr) != SET)
5792 return;
5794 if (!REG_P (SET_DEST (expr)) || GET_MODE (val) != GET_MODE (SET_DEST (expr)))
5795 return;
5797 src = SET_SRC (expr);
5798 switch (GET_CODE (src))
5800 case PLUS:
5801 case MINUS:
5802 case XOR:
5803 case NOT:
5804 case NEG:
5805 if (!REG_P (XEXP (src, 0)))
5806 return;
5807 break;
5808 case SIGN_EXTEND:
5809 case ZERO_EXTEND:
5810 if (!REG_P (XEXP (src, 0)) && !MEM_P (XEXP (src, 0)))
5811 return;
5812 break;
5813 default:
5814 return;
5817 if (!SCALAR_INT_MODE_P (GET_MODE (src)) || XEXP (src, 0) == cfa_base_rtx)
5818 return;
5820 v = cselib_lookup (XEXP (src, 0), GET_MODE (XEXP (src, 0)), 0, VOIDmode);
5821 if (!v || !cselib_preserved_value_p (v))
5822 return;
5824 /* Use canonical V to avoid creating multiple redundant expressions
5825 for different VALUES equivalent to V. */
5826 v = canonical_cselib_val (v);
5828 /* Adding a reverse op isn't useful if V already has an always valid
5829 location. Ignore ENTRY_VALUE, while it is always constant, we should
5830 prefer non-ENTRY_VALUE locations whenever possible. */
5831 for (l = v->locs, count = 0; l; l = l->next, count++)
5832 if (CONSTANT_P (l->loc)
5833 && (GET_CODE (l->loc) != CONST || !references_value_p (l->loc, 0)))
5834 return;
5835 /* Avoid creating too large locs lists. */
5836 else if (count == PARAM_VALUE (PARAM_MAX_VARTRACK_REVERSE_OP_SIZE))
5837 return;
5839 switch (GET_CODE (src))
5841 case NOT:
5842 case NEG:
5843 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5844 return;
5845 ret = gen_rtx_fmt_e (GET_CODE (src), GET_MODE (val), val);
5846 break;
5847 case SIGN_EXTEND:
5848 case ZERO_EXTEND:
5849 ret = gen_lowpart_SUBREG (GET_MODE (v->val_rtx), val);
5850 break;
5851 case XOR:
5852 code = XOR;
5853 goto binary;
5854 case PLUS:
5855 code = MINUS;
5856 goto binary;
5857 case MINUS:
5858 code = PLUS;
5859 goto binary;
5860 binary:
5861 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5862 return;
5863 arg = XEXP (src, 1);
5864 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5866 arg = cselib_expand_value_rtx (arg, scratch_regs, 5);
5867 if (arg == NULL_RTX)
5868 return;
5869 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5870 return;
5872 ret = simplify_gen_binary (code, GET_MODE (val), val, arg);
5873 if (ret == val)
5874 /* Ensure ret isn't VALUE itself (which can happen e.g. for
5875 (plus (reg1) (reg2)) when reg2 is known to be 0), as that
5876 breaks a lot of routines during var-tracking. */
5877 ret = gen_rtx_fmt_ee (PLUS, GET_MODE (val), val, const0_rtx);
5878 break;
5879 default:
5880 gcc_unreachable ();
5883 cselib_add_permanent_equiv (v, ret, insn);
5886 /* Add stores (register and memory references) LOC which will be tracked
5887 to VTI (bb)->mos. EXPR is the RTL expression containing the store.
5888 CUIP->insn is instruction which the LOC is part of. */
5890 static void
5891 add_stores (rtx loc, const_rtx expr, void *cuip)
5893 machine_mode mode = VOIDmode, mode2;
5894 struct count_use_info *cui = (struct count_use_info *)cuip;
5895 basic_block bb = cui->bb;
5896 micro_operation mo;
5897 rtx oloc = loc, nloc, src = NULL;
5898 enum micro_operation_type type = use_type (loc, cui, &mode);
5899 bool track_p = false;
5900 cselib_val *v;
5901 bool resolve, preserve;
5903 if (type == MO_CLOBBER)
5904 return;
5906 mode2 = mode;
5908 if (REG_P (loc))
5910 gcc_assert (loc != cfa_base_rtx);
5911 if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
5912 || !(track_p = use_type (loc, NULL, &mode2) == MO_USE)
5913 || GET_CODE (expr) == CLOBBER)
5915 mo.type = MO_CLOBBER;
5916 mo.u.loc = loc;
5917 if (GET_CODE (expr) == SET
5918 && SET_DEST (expr) == loc
5919 && !unsuitable_loc (SET_SRC (expr))
5920 && find_use_val (loc, mode, cui))
5922 gcc_checking_assert (type == MO_VAL_SET);
5923 mo.u.loc = gen_rtx_SET (loc, SET_SRC (expr));
5926 else
5928 if (GET_CODE (expr) == SET
5929 && SET_DEST (expr) == loc
5930 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
5931 src = var_lowpart (mode2, SET_SRC (expr));
5932 loc = var_lowpart (mode2, loc);
5934 if (src == NULL)
5936 mo.type = MO_SET;
5937 mo.u.loc = loc;
5939 else
5941 rtx xexpr = gen_rtx_SET (loc, src);
5942 if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
5944 /* If this is an instruction copying (part of) a parameter
5945 passed by invisible reference to its register location,
5946 pretend it's a SET so that the initial memory location
5947 is discarded, as the parameter register can be reused
5948 for other purposes and we do not track locations based
5949 on generic registers. */
5950 if (MEM_P (src)
5951 && REG_EXPR (loc)
5952 && TREE_CODE (REG_EXPR (loc)) == PARM_DECL
5953 && DECL_MODE (REG_EXPR (loc)) != BLKmode
5954 && MEM_P (DECL_INCOMING_RTL (REG_EXPR (loc)))
5955 && XEXP (DECL_INCOMING_RTL (REG_EXPR (loc)), 0)
5956 != arg_pointer_rtx)
5957 mo.type = MO_SET;
5958 else
5959 mo.type = MO_COPY;
5961 else
5962 mo.type = MO_SET;
5963 mo.u.loc = xexpr;
5966 mo.insn = cui->insn;
5968 else if (MEM_P (loc)
5969 && ((track_p = use_type (loc, NULL, &mode2) == MO_USE)
5970 || cui->sets))
5972 if (MEM_P (loc) && type == MO_VAL_SET
5973 && !REG_P (XEXP (loc, 0))
5974 && !MEM_P (XEXP (loc, 0)))
5976 rtx mloc = loc;
5977 machine_mode address_mode = get_address_mode (mloc);
5978 cselib_val *val = cselib_lookup (XEXP (mloc, 0),
5979 address_mode, 0,
5980 GET_MODE (mloc));
5982 if (val && !cselib_preserved_value_p (val))
5983 preserve_value (val);
5986 if (GET_CODE (expr) == CLOBBER || !track_p)
5988 mo.type = MO_CLOBBER;
5989 mo.u.loc = track_p ? var_lowpart (mode2, loc) : loc;
5991 else
5993 if (GET_CODE (expr) == SET
5994 && SET_DEST (expr) == loc
5995 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
5996 src = var_lowpart (mode2, SET_SRC (expr));
5997 loc = var_lowpart (mode2, loc);
5999 if (src == NULL)
6001 mo.type = MO_SET;
6002 mo.u.loc = loc;
6004 else
6006 rtx xexpr = gen_rtx_SET (loc, src);
6007 if (same_variable_part_p (SET_SRC (xexpr),
6008 MEM_EXPR (loc),
6009 INT_MEM_OFFSET (loc)))
6010 mo.type = MO_COPY;
6011 else
6012 mo.type = MO_SET;
6013 mo.u.loc = xexpr;
6016 mo.insn = cui->insn;
6018 else
6019 return;
6021 if (type != MO_VAL_SET)
6022 goto log_and_return;
6024 v = find_use_val (oloc, mode, cui);
6026 if (!v)
6027 goto log_and_return;
6029 resolve = preserve = !cselib_preserved_value_p (v);
6031 /* We cannot track values for multiple-part variables, so we track only
6032 locations for tracked parameters passed either by invisible reference
6033 or directly in multiple locations. */
6034 if (track_p
6035 && REG_P (loc)
6036 && REG_EXPR (loc)
6037 && TREE_CODE (REG_EXPR (loc)) == PARM_DECL
6038 && DECL_MODE (REG_EXPR (loc)) != BLKmode
6039 && TREE_CODE (TREE_TYPE (REG_EXPR (loc))) != UNION_TYPE
6040 && ((MEM_P (DECL_INCOMING_RTL (REG_EXPR (loc)))
6041 && XEXP (DECL_INCOMING_RTL (REG_EXPR (loc)), 0) != arg_pointer_rtx)
6042 || (GET_CODE (DECL_INCOMING_RTL (REG_EXPR (loc))) == PARALLEL
6043 && XVECLEN (DECL_INCOMING_RTL (REG_EXPR (loc)), 0) > 1)))
6045 /* Although we don't use the value here, it could be used later by the
6046 mere virtue of its existence as the operand of the reverse operation
6047 that gave rise to it (typically extension/truncation). Make sure it
6048 is preserved as required by vt_expand_var_loc_chain. */
6049 if (preserve)
6050 preserve_value (v);
6051 goto log_and_return;
6054 if (loc == stack_pointer_rtx
6055 && hard_frame_pointer_adjustment != -1
6056 && preserve)
6057 cselib_set_value_sp_based (v);
6059 nloc = replace_expr_with_values (oloc);
6060 if (nloc)
6061 oloc = nloc;
6063 if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC)
6065 cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0, VOIDmode);
6067 if (oval == v)
6068 return;
6069 gcc_assert (REG_P (oloc) || MEM_P (oloc));
6071 if (oval && !cselib_preserved_value_p (oval))
6073 micro_operation moa;
6075 preserve_value (oval);
6077 moa.type = MO_VAL_USE;
6078 moa.u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc);
6079 VAL_NEEDS_RESOLUTION (moa.u.loc) = 1;
6080 moa.insn = cui->insn;
6082 if (dump_file && (dump_flags & TDF_DETAILS))
6083 log_op_type (moa.u.loc, cui->bb, cui->insn,
6084 moa.type, dump_file);
6085 VTI (bb)->mos.safe_push (moa);
6088 resolve = false;
6090 else if (resolve && GET_CODE (mo.u.loc) == SET)
6092 if (REG_P (SET_SRC (expr)) || MEM_P (SET_SRC (expr)))
6093 nloc = replace_expr_with_values (SET_SRC (expr));
6094 else
6095 nloc = NULL_RTX;
6097 /* Avoid the mode mismatch between oexpr and expr. */
6098 if (!nloc && mode != mode2)
6100 nloc = SET_SRC (expr);
6101 gcc_assert (oloc == SET_DEST (expr));
6104 if (nloc && nloc != SET_SRC (mo.u.loc))
6105 oloc = gen_rtx_SET (oloc, nloc);
6106 else
6108 if (oloc == SET_DEST (mo.u.loc))
6109 /* No point in duplicating. */
6110 oloc = mo.u.loc;
6111 if (!REG_P (SET_SRC (mo.u.loc)))
6112 resolve = false;
6115 else if (!resolve)
6117 if (GET_CODE (mo.u.loc) == SET
6118 && oloc == SET_DEST (mo.u.loc))
6119 /* No point in duplicating. */
6120 oloc = mo.u.loc;
6122 else
6123 resolve = false;
6125 loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc);
6127 if (mo.u.loc != oloc)
6128 loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, mo.u.loc);
6130 /* The loc of a MO_VAL_SET may have various forms:
6132 (concat val dst): dst now holds val
6134 (concat val (set dst src)): dst now holds val, copied from src
6136 (concat (concat val dstv) dst): dst now holds val; dstv is dst
6137 after replacing mems and non-top-level regs with values.
6139 (concat (concat val dstv) (set dst src)): dst now holds val,
6140 copied from src. dstv is a value-based representation of dst, if
6141 it differs from dst. If resolution is needed, src is a REG, and
6142 its mode is the same as that of val.
6144 (concat (concat val (set dstv srcv)) (set dst src)): src
6145 copied to dst, holding val. dstv and srcv are value-based
6146 representations of dst and src, respectively.
6150 if (GET_CODE (PATTERN (cui->insn)) != COND_EXEC)
6151 reverse_op (v->val_rtx, expr, cui->insn);
6153 mo.u.loc = loc;
6155 if (track_p)
6156 VAL_HOLDS_TRACK_EXPR (loc) = 1;
6157 if (preserve)
6159 VAL_NEEDS_RESOLUTION (loc) = resolve;
6160 preserve_value (v);
6162 if (mo.type == MO_CLOBBER)
6163 VAL_EXPR_IS_CLOBBERED (loc) = 1;
6164 if (mo.type == MO_COPY)
6165 VAL_EXPR_IS_COPIED (loc) = 1;
6167 mo.type = MO_VAL_SET;
6169 log_and_return:
6170 if (dump_file && (dump_flags & TDF_DETAILS))
6171 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
6172 VTI (bb)->mos.safe_push (mo);
6175 /* Arguments to the call. */
6176 static rtx call_arguments;
6178 /* Compute call_arguments. */
6180 static void
6181 prepare_call_arguments (basic_block bb, rtx_insn *insn)
6183 rtx link, x, call;
6184 rtx prev, cur, next;
6185 rtx this_arg = NULL_RTX;
6186 tree type = NULL_TREE, t, fndecl = NULL_TREE;
6187 tree obj_type_ref = NULL_TREE;
6188 CUMULATIVE_ARGS args_so_far_v;
6189 cumulative_args_t args_so_far;
6191 memset (&args_so_far_v, 0, sizeof (args_so_far_v));
6192 args_so_far = pack_cumulative_args (&args_so_far_v);
6193 call = get_call_rtx_from (insn);
6194 if (call)
6196 if (GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
6198 rtx symbol = XEXP (XEXP (call, 0), 0);
6199 if (SYMBOL_REF_DECL (symbol))
6200 fndecl = SYMBOL_REF_DECL (symbol);
6202 if (fndecl == NULL_TREE)
6203 fndecl = MEM_EXPR (XEXP (call, 0));
6204 if (fndecl
6205 && TREE_CODE (TREE_TYPE (fndecl)) != FUNCTION_TYPE
6206 && TREE_CODE (TREE_TYPE (fndecl)) != METHOD_TYPE)
6207 fndecl = NULL_TREE;
6208 if (fndecl && TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
6209 type = TREE_TYPE (fndecl);
6210 if (fndecl && TREE_CODE (fndecl) != FUNCTION_DECL)
6212 if (TREE_CODE (fndecl) == INDIRECT_REF
6213 && TREE_CODE (TREE_OPERAND (fndecl, 0)) == OBJ_TYPE_REF)
6214 obj_type_ref = TREE_OPERAND (fndecl, 0);
6215 fndecl = NULL_TREE;
6217 if (type)
6219 for (t = TYPE_ARG_TYPES (type); t && t != void_list_node;
6220 t = TREE_CHAIN (t))
6221 if (TREE_CODE (TREE_VALUE (t)) == REFERENCE_TYPE
6222 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_VALUE (t))))
6223 break;
6224 if ((t == NULL || t == void_list_node) && obj_type_ref == NULL_TREE)
6225 type = NULL;
6226 else
6228 int nargs ATTRIBUTE_UNUSED = list_length (TYPE_ARG_TYPES (type));
6229 link = CALL_INSN_FUNCTION_USAGE (insn);
6230 #ifndef PCC_STATIC_STRUCT_RETURN
6231 if (aggregate_value_p (TREE_TYPE (type), type)
6232 && targetm.calls.struct_value_rtx (type, 0) == 0)
6234 tree struct_addr = build_pointer_type (TREE_TYPE (type));
6235 machine_mode mode = TYPE_MODE (struct_addr);
6236 rtx reg;
6237 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
6238 nargs + 1);
6239 reg = targetm.calls.function_arg (args_so_far, mode,
6240 struct_addr, true);
6241 targetm.calls.function_arg_advance (args_so_far, mode,
6242 struct_addr, true);
6243 if (reg == NULL_RTX)
6245 for (; link; link = XEXP (link, 1))
6246 if (GET_CODE (XEXP (link, 0)) == USE
6247 && MEM_P (XEXP (XEXP (link, 0), 0)))
6249 link = XEXP (link, 1);
6250 break;
6254 else
6255 #endif
6256 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
6257 nargs);
6258 if (obj_type_ref && TYPE_ARG_TYPES (type) != void_list_node)
6260 machine_mode mode;
6261 t = TYPE_ARG_TYPES (type);
6262 mode = TYPE_MODE (TREE_VALUE (t));
6263 this_arg = targetm.calls.function_arg (args_so_far, mode,
6264 TREE_VALUE (t), true);
6265 if (this_arg && !REG_P (this_arg))
6266 this_arg = NULL_RTX;
6267 else if (this_arg == NULL_RTX)
6269 for (; link; link = XEXP (link, 1))
6270 if (GET_CODE (XEXP (link, 0)) == USE
6271 && MEM_P (XEXP (XEXP (link, 0), 0)))
6273 this_arg = XEXP (XEXP (link, 0), 0);
6274 break;
6281 t = type ? TYPE_ARG_TYPES (type) : NULL_TREE;
6283 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
6284 if (GET_CODE (XEXP (link, 0)) == USE)
6286 rtx item = NULL_RTX;
6287 x = XEXP (XEXP (link, 0), 0);
6288 if (GET_MODE (link) == VOIDmode
6289 || GET_MODE (link) == BLKmode
6290 || (GET_MODE (link) != GET_MODE (x)
6291 && ((GET_MODE_CLASS (GET_MODE (link)) != MODE_INT
6292 && GET_MODE_CLASS (GET_MODE (link)) != MODE_PARTIAL_INT)
6293 || (GET_MODE_CLASS (GET_MODE (x)) != MODE_INT
6294 && GET_MODE_CLASS (GET_MODE (x)) != MODE_PARTIAL_INT))))
6295 /* Can't do anything for these, if the original type mode
6296 isn't known or can't be converted. */;
6297 else if (REG_P (x))
6299 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
6300 if (val && cselib_preserved_value_p (val))
6301 item = val->val_rtx;
6302 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
6303 || GET_MODE_CLASS (GET_MODE (x)) == MODE_PARTIAL_INT)
6305 machine_mode mode = GET_MODE (x);
6307 while ((mode = GET_MODE_WIDER_MODE (mode)) != VOIDmode
6308 && GET_MODE_BITSIZE (mode) <= BITS_PER_WORD)
6310 rtx reg = simplify_subreg (mode, x, GET_MODE (x), 0);
6312 if (reg == NULL_RTX || !REG_P (reg))
6313 continue;
6314 val = cselib_lookup (reg, mode, 0, VOIDmode);
6315 if (val && cselib_preserved_value_p (val))
6317 item = val->val_rtx;
6318 break;
6323 else if (MEM_P (x))
6325 rtx mem = x;
6326 cselib_val *val;
6328 if (!frame_pointer_needed)
6330 struct adjust_mem_data amd;
6331 amd.mem_mode = VOIDmode;
6332 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
6333 amd.side_effects = NULL;
6334 amd.store = true;
6335 mem = simplify_replace_fn_rtx (mem, NULL_RTX, adjust_mems,
6336 &amd);
6337 gcc_assert (amd.side_effects == NULL_RTX);
6339 val = cselib_lookup (mem, GET_MODE (mem), 0, VOIDmode);
6340 if (val && cselib_preserved_value_p (val))
6341 item = val->val_rtx;
6342 else if (GET_MODE_CLASS (GET_MODE (mem)) != MODE_INT
6343 && GET_MODE_CLASS (GET_MODE (mem)) != MODE_PARTIAL_INT)
6345 /* For non-integer stack argument see also if they weren't
6346 initialized by integers. */
6347 machine_mode imode = int_mode_for_mode (GET_MODE (mem));
6348 if (imode != GET_MODE (mem) && imode != BLKmode)
6350 val = cselib_lookup (adjust_address_nv (mem, imode, 0),
6351 imode, 0, VOIDmode);
6352 if (val && cselib_preserved_value_p (val))
6353 item = lowpart_subreg (GET_MODE (x), val->val_rtx,
6354 imode);
6358 if (item)
6360 rtx x2 = x;
6361 if (GET_MODE (item) != GET_MODE (link))
6362 item = lowpart_subreg (GET_MODE (link), item, GET_MODE (item));
6363 if (GET_MODE (x2) != GET_MODE (link))
6364 x2 = lowpart_subreg (GET_MODE (link), x2, GET_MODE (x2));
6365 item = gen_rtx_CONCAT (GET_MODE (link), x2, item);
6366 call_arguments
6367 = gen_rtx_EXPR_LIST (VOIDmode, item, call_arguments);
6369 if (t && t != void_list_node)
6371 tree argtype = TREE_VALUE (t);
6372 machine_mode mode = TYPE_MODE (argtype);
6373 rtx reg;
6374 if (pass_by_reference (&args_so_far_v, mode, argtype, true))
6376 argtype = build_pointer_type (argtype);
6377 mode = TYPE_MODE (argtype);
6379 reg = targetm.calls.function_arg (args_so_far, mode,
6380 argtype, true);
6381 if (TREE_CODE (argtype) == REFERENCE_TYPE
6382 && INTEGRAL_TYPE_P (TREE_TYPE (argtype))
6383 && reg
6384 && REG_P (reg)
6385 && GET_MODE (reg) == mode
6386 && (GET_MODE_CLASS (mode) == MODE_INT
6387 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
6388 && REG_P (x)
6389 && REGNO (x) == REGNO (reg)
6390 && GET_MODE (x) == mode
6391 && item)
6393 machine_mode indmode
6394 = TYPE_MODE (TREE_TYPE (argtype));
6395 rtx mem = gen_rtx_MEM (indmode, x);
6396 cselib_val *val = cselib_lookup (mem, indmode, 0, VOIDmode);
6397 if (val && cselib_preserved_value_p (val))
6399 item = gen_rtx_CONCAT (indmode, mem, val->val_rtx);
6400 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
6401 call_arguments);
6403 else
6405 struct elt_loc_list *l;
6406 tree initial;
6408 /* Try harder, when passing address of a constant
6409 pool integer it can be easily read back. */
6410 item = XEXP (item, 1);
6411 if (GET_CODE (item) == SUBREG)
6412 item = SUBREG_REG (item);
6413 gcc_assert (GET_CODE (item) == VALUE);
6414 val = CSELIB_VAL_PTR (item);
6415 for (l = val->locs; l; l = l->next)
6416 if (GET_CODE (l->loc) == SYMBOL_REF
6417 && TREE_CONSTANT_POOL_ADDRESS_P (l->loc)
6418 && SYMBOL_REF_DECL (l->loc)
6419 && DECL_INITIAL (SYMBOL_REF_DECL (l->loc)))
6421 initial = DECL_INITIAL (SYMBOL_REF_DECL (l->loc));
6422 if (tree_fits_shwi_p (initial))
6424 item = GEN_INT (tree_to_shwi (initial));
6425 item = gen_rtx_CONCAT (indmode, mem, item);
6426 call_arguments
6427 = gen_rtx_EXPR_LIST (VOIDmode, item,
6428 call_arguments);
6430 break;
6434 targetm.calls.function_arg_advance (args_so_far, mode,
6435 argtype, true);
6436 t = TREE_CHAIN (t);
6440 /* Add debug arguments. */
6441 if (fndecl
6442 && TREE_CODE (fndecl) == FUNCTION_DECL
6443 && DECL_HAS_DEBUG_ARGS_P (fndecl))
6445 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (fndecl);
6446 if (debug_args)
6448 unsigned int ix;
6449 tree param;
6450 for (ix = 0; vec_safe_iterate (*debug_args, ix, &param); ix += 2)
6452 rtx item;
6453 tree dtemp = (**debug_args)[ix + 1];
6454 machine_mode mode = DECL_MODE (dtemp);
6455 item = gen_rtx_DEBUG_PARAMETER_REF (mode, param);
6456 item = gen_rtx_CONCAT (mode, item, DECL_RTL_KNOWN_SET (dtemp));
6457 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
6458 call_arguments);
6463 /* Reverse call_arguments chain. */
6464 prev = NULL_RTX;
6465 for (cur = call_arguments; cur; cur = next)
6467 next = XEXP (cur, 1);
6468 XEXP (cur, 1) = prev;
6469 prev = cur;
6471 call_arguments = prev;
6473 x = get_call_rtx_from (insn);
6474 if (x)
6476 x = XEXP (XEXP (x, 0), 0);
6477 if (GET_CODE (x) == SYMBOL_REF)
6478 /* Don't record anything. */;
6479 else if (CONSTANT_P (x))
6481 x = gen_rtx_CONCAT (GET_MODE (x) == VOIDmode ? Pmode : GET_MODE (x),
6482 pc_rtx, x);
6483 call_arguments
6484 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6486 else
6488 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
6489 if (val && cselib_preserved_value_p (val))
6491 x = gen_rtx_CONCAT (GET_MODE (x), pc_rtx, val->val_rtx);
6492 call_arguments
6493 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6497 if (this_arg)
6499 machine_mode mode
6500 = TYPE_MODE (TREE_TYPE (OBJ_TYPE_REF_EXPR (obj_type_ref)));
6501 rtx clobbered = gen_rtx_MEM (mode, this_arg);
6502 HOST_WIDE_INT token
6503 = tree_to_shwi (OBJ_TYPE_REF_TOKEN (obj_type_ref));
6504 if (token)
6505 clobbered = plus_constant (mode, clobbered,
6506 token * GET_MODE_SIZE (mode));
6507 clobbered = gen_rtx_MEM (mode, clobbered);
6508 x = gen_rtx_CONCAT (mode, gen_rtx_CLOBBER (VOIDmode, pc_rtx), clobbered);
6509 call_arguments
6510 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6514 /* Callback for cselib_record_sets_hook, that records as micro
6515 operations uses and stores in an insn after cselib_record_sets has
6516 analyzed the sets in an insn, but before it modifies the stored
6517 values in the internal tables, unless cselib_record_sets doesn't
6518 call it directly (perhaps because we're not doing cselib in the
6519 first place, in which case sets and n_sets will be 0). */
6521 static void
6522 add_with_sets (rtx_insn *insn, struct cselib_set *sets, int n_sets)
6524 basic_block bb = BLOCK_FOR_INSN (insn);
6525 int n1, n2;
6526 struct count_use_info cui;
6527 micro_operation *mos;
6529 cselib_hook_called = true;
6531 cui.insn = insn;
6532 cui.bb = bb;
6533 cui.sets = sets;
6534 cui.n_sets = n_sets;
6536 n1 = VTI (bb)->mos.length ();
6537 cui.store_p = false;
6538 note_uses (&PATTERN (insn), add_uses_1, &cui);
6539 n2 = VTI (bb)->mos.length () - 1;
6540 mos = VTI (bb)->mos.address ();
6542 /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
6543 MO_VAL_LOC last. */
6544 while (n1 < n2)
6546 while (n1 < n2 && mos[n1].type == MO_USE)
6547 n1++;
6548 while (n1 < n2 && mos[n2].type != MO_USE)
6549 n2--;
6550 if (n1 < n2)
6551 std::swap (mos[n1], mos[n2]);
6554 n2 = VTI (bb)->mos.length () - 1;
6555 while (n1 < n2)
6557 while (n1 < n2 && mos[n1].type != MO_VAL_LOC)
6558 n1++;
6559 while (n1 < n2 && mos[n2].type == MO_VAL_LOC)
6560 n2--;
6561 if (n1 < n2)
6562 std::swap (mos[n1], mos[n2]);
6565 if (CALL_P (insn))
6567 micro_operation mo;
6569 mo.type = MO_CALL;
6570 mo.insn = insn;
6571 mo.u.loc = call_arguments;
6572 call_arguments = NULL_RTX;
6574 if (dump_file && (dump_flags & TDF_DETAILS))
6575 log_op_type (PATTERN (insn), bb, insn, mo.type, dump_file);
6576 VTI (bb)->mos.safe_push (mo);
6579 n1 = VTI (bb)->mos.length ();
6580 /* This will record NEXT_INSN (insn), such that we can
6581 insert notes before it without worrying about any
6582 notes that MO_USEs might emit after the insn. */
6583 cui.store_p = true;
6584 note_stores (PATTERN (insn), add_stores, &cui);
6585 n2 = VTI (bb)->mos.length () - 1;
6586 mos = VTI (bb)->mos.address ();
6588 /* Order the MO_VAL_USEs first (note_stores does nothing
6589 on DEBUG_INSNs, so there are no MO_VAL_LOCs from this
6590 insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET. */
6591 while (n1 < n2)
6593 while (n1 < n2 && mos[n1].type == MO_VAL_USE)
6594 n1++;
6595 while (n1 < n2 && mos[n2].type != MO_VAL_USE)
6596 n2--;
6597 if (n1 < n2)
6598 std::swap (mos[n1], mos[n2]);
6601 n2 = VTI (bb)->mos.length () - 1;
6602 while (n1 < n2)
6604 while (n1 < n2 && mos[n1].type == MO_CLOBBER)
6605 n1++;
6606 while (n1 < n2 && mos[n2].type != MO_CLOBBER)
6607 n2--;
6608 if (n1 < n2)
6609 std::swap (mos[n1], mos[n2]);
6613 static enum var_init_status
6614 find_src_status (dataflow_set *in, rtx src)
6616 tree decl = NULL_TREE;
6617 enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
6619 if (! flag_var_tracking_uninit)
6620 status = VAR_INIT_STATUS_INITIALIZED;
6622 if (src && REG_P (src))
6623 decl = var_debug_decl (REG_EXPR (src));
6624 else if (src && MEM_P (src))
6625 decl = var_debug_decl (MEM_EXPR (src));
6627 if (src && decl)
6628 status = get_init_value (in, src, dv_from_decl (decl));
6630 return status;
6633 /* SRC is the source of an assignment. Use SET to try to find what
6634 was ultimately assigned to SRC. Return that value if known,
6635 otherwise return SRC itself. */
6637 static rtx
6638 find_src_set_src (dataflow_set *set, rtx src)
6640 tree decl = NULL_TREE; /* The variable being copied around. */
6641 rtx set_src = NULL_RTX; /* The value for "decl" stored in "src". */
6642 variable var;
6643 location_chain nextp;
6644 int i;
6645 bool found;
6647 if (src && REG_P (src))
6648 decl = var_debug_decl (REG_EXPR (src));
6649 else if (src && MEM_P (src))
6650 decl = var_debug_decl (MEM_EXPR (src));
6652 if (src && decl)
6654 decl_or_value dv = dv_from_decl (decl);
6656 var = shared_hash_find (set->vars, dv);
6657 if (var)
6659 found = false;
6660 for (i = 0; i < var->n_var_parts && !found; i++)
6661 for (nextp = var->var_part[i].loc_chain; nextp && !found;
6662 nextp = nextp->next)
6663 if (rtx_equal_p (nextp->loc, src))
6665 set_src = nextp->set_src;
6666 found = true;
6672 return set_src;
6675 /* Compute the changes of variable locations in the basic block BB. */
6677 static bool
6678 compute_bb_dataflow (basic_block bb)
6680 unsigned int i;
6681 micro_operation *mo;
6682 bool changed;
6683 dataflow_set old_out;
6684 dataflow_set *in = &VTI (bb)->in;
6685 dataflow_set *out = &VTI (bb)->out;
6687 dataflow_set_init (&old_out);
6688 dataflow_set_copy (&old_out, out);
6689 dataflow_set_copy (out, in);
6691 if (MAY_HAVE_DEBUG_INSNS)
6692 local_get_addr_cache = new hash_map<rtx, rtx>;
6694 FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo)
6696 rtx_insn *insn = mo->insn;
6698 switch (mo->type)
6700 case MO_CALL:
6701 dataflow_set_clear_at_call (out);
6702 break;
6704 case MO_USE:
6706 rtx loc = mo->u.loc;
6708 if (REG_P (loc))
6709 var_reg_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6710 else if (MEM_P (loc))
6711 var_mem_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6713 break;
6715 case MO_VAL_LOC:
6717 rtx loc = mo->u.loc;
6718 rtx val, vloc;
6719 tree var;
6721 if (GET_CODE (loc) == CONCAT)
6723 val = XEXP (loc, 0);
6724 vloc = XEXP (loc, 1);
6726 else
6728 val = NULL_RTX;
6729 vloc = loc;
6732 var = PAT_VAR_LOCATION_DECL (vloc);
6734 clobber_variable_part (out, NULL_RTX,
6735 dv_from_decl (var), 0, NULL_RTX);
6736 if (val)
6738 if (VAL_NEEDS_RESOLUTION (loc))
6739 val_resolve (out, val, PAT_VAR_LOCATION_LOC (vloc), insn);
6740 set_variable_part (out, val, dv_from_decl (var), 0,
6741 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6742 INSERT);
6744 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
6745 set_variable_part (out, PAT_VAR_LOCATION_LOC (vloc),
6746 dv_from_decl (var), 0,
6747 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6748 INSERT);
6750 break;
6752 case MO_VAL_USE:
6754 rtx loc = mo->u.loc;
6755 rtx val, vloc, uloc;
6757 vloc = uloc = XEXP (loc, 1);
6758 val = XEXP (loc, 0);
6760 if (GET_CODE (val) == CONCAT)
6762 uloc = XEXP (val, 1);
6763 val = XEXP (val, 0);
6766 if (VAL_NEEDS_RESOLUTION (loc))
6767 val_resolve (out, val, vloc, insn);
6768 else
6769 val_store (out, val, uloc, insn, false);
6771 if (VAL_HOLDS_TRACK_EXPR (loc))
6773 if (GET_CODE (uloc) == REG)
6774 var_reg_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6775 NULL);
6776 else if (GET_CODE (uloc) == MEM)
6777 var_mem_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6778 NULL);
6781 break;
6783 case MO_VAL_SET:
6785 rtx loc = mo->u.loc;
6786 rtx val, vloc, uloc;
6787 rtx dstv, srcv;
6789 vloc = loc;
6790 uloc = XEXP (vloc, 1);
6791 val = XEXP (vloc, 0);
6792 vloc = uloc;
6794 if (GET_CODE (uloc) == SET)
6796 dstv = SET_DEST (uloc);
6797 srcv = SET_SRC (uloc);
6799 else
6801 dstv = uloc;
6802 srcv = NULL;
6805 if (GET_CODE (val) == CONCAT)
6807 dstv = vloc = XEXP (val, 1);
6808 val = XEXP (val, 0);
6811 if (GET_CODE (vloc) == SET)
6813 srcv = SET_SRC (vloc);
6815 gcc_assert (val != srcv);
6816 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
6818 dstv = vloc = SET_DEST (vloc);
6820 if (VAL_NEEDS_RESOLUTION (loc))
6821 val_resolve (out, val, srcv, insn);
6823 else if (VAL_NEEDS_RESOLUTION (loc))
6825 gcc_assert (GET_CODE (uloc) == SET
6826 && GET_CODE (SET_SRC (uloc)) == REG);
6827 val_resolve (out, val, SET_SRC (uloc), insn);
6830 if (VAL_HOLDS_TRACK_EXPR (loc))
6832 if (VAL_EXPR_IS_CLOBBERED (loc))
6834 if (REG_P (uloc))
6835 var_reg_delete (out, uloc, true);
6836 else if (MEM_P (uloc))
6838 gcc_assert (MEM_P (dstv));
6839 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
6840 var_mem_delete (out, dstv, true);
6843 else
6845 bool copied_p = VAL_EXPR_IS_COPIED (loc);
6846 rtx src = NULL, dst = uloc;
6847 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
6849 if (GET_CODE (uloc) == SET)
6851 src = SET_SRC (uloc);
6852 dst = SET_DEST (uloc);
6855 if (copied_p)
6857 if (flag_var_tracking_uninit)
6859 status = find_src_status (in, src);
6861 if (status == VAR_INIT_STATUS_UNKNOWN)
6862 status = find_src_status (out, src);
6865 src = find_src_set_src (in, src);
6868 if (REG_P (dst))
6869 var_reg_delete_and_set (out, dst, !copied_p,
6870 status, srcv);
6871 else if (MEM_P (dst))
6873 gcc_assert (MEM_P (dstv));
6874 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
6875 var_mem_delete_and_set (out, dstv, !copied_p,
6876 status, srcv);
6880 else if (REG_P (uloc))
6881 var_regno_delete (out, REGNO (uloc));
6882 else if (MEM_P (uloc))
6884 gcc_checking_assert (GET_CODE (vloc) == MEM);
6885 gcc_checking_assert (dstv == vloc);
6886 if (dstv != vloc)
6887 clobber_overlapping_mems (out, vloc);
6890 val_store (out, val, dstv, insn, true);
6892 break;
6894 case MO_SET:
6896 rtx loc = mo->u.loc;
6897 rtx set_src = NULL;
6899 if (GET_CODE (loc) == SET)
6901 set_src = SET_SRC (loc);
6902 loc = SET_DEST (loc);
6905 if (REG_P (loc))
6906 var_reg_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6907 set_src);
6908 else if (MEM_P (loc))
6909 var_mem_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6910 set_src);
6912 break;
6914 case MO_COPY:
6916 rtx loc = mo->u.loc;
6917 enum var_init_status src_status;
6918 rtx set_src = NULL;
6920 if (GET_CODE (loc) == SET)
6922 set_src = SET_SRC (loc);
6923 loc = SET_DEST (loc);
6926 if (! flag_var_tracking_uninit)
6927 src_status = VAR_INIT_STATUS_INITIALIZED;
6928 else
6930 src_status = find_src_status (in, set_src);
6932 if (src_status == VAR_INIT_STATUS_UNKNOWN)
6933 src_status = find_src_status (out, set_src);
6936 set_src = find_src_set_src (in, set_src);
6938 if (REG_P (loc))
6939 var_reg_delete_and_set (out, loc, false, src_status, set_src);
6940 else if (MEM_P (loc))
6941 var_mem_delete_and_set (out, loc, false, src_status, set_src);
6943 break;
6945 case MO_USE_NO_VAR:
6947 rtx loc = mo->u.loc;
6949 if (REG_P (loc))
6950 var_reg_delete (out, loc, false);
6951 else if (MEM_P (loc))
6952 var_mem_delete (out, loc, false);
6954 break;
6956 case MO_CLOBBER:
6958 rtx loc = mo->u.loc;
6960 if (REG_P (loc))
6961 var_reg_delete (out, loc, true);
6962 else if (MEM_P (loc))
6963 var_mem_delete (out, loc, true);
6965 break;
6967 case MO_ADJUST:
6968 out->stack_adjust += mo->u.adjust;
6969 break;
6973 if (MAY_HAVE_DEBUG_INSNS)
6975 delete local_get_addr_cache;
6976 local_get_addr_cache = NULL;
6978 dataflow_set_equiv_regs (out);
6979 shared_hash_htab (out->vars)
6980 ->traverse <dataflow_set *, canonicalize_values_mark> (out);
6981 shared_hash_htab (out->vars)
6982 ->traverse <dataflow_set *, canonicalize_values_star> (out);
6983 #if ENABLE_CHECKING
6984 shared_hash_htab (out->vars)
6985 ->traverse <dataflow_set *, canonicalize_loc_order_check> (out);
6986 #endif
6988 changed = dataflow_set_different (&old_out, out);
6989 dataflow_set_destroy (&old_out);
6990 return changed;
6993 /* Find the locations of variables in the whole function. */
6995 static bool
6996 vt_find_locations (void)
6998 bb_heap_t *worklist = new bb_heap_t (LONG_MIN);
6999 bb_heap_t *pending = new bb_heap_t (LONG_MIN);
7000 sbitmap visited, in_worklist, in_pending;
7001 basic_block bb;
7002 edge e;
7003 int *bb_order;
7004 int *rc_order;
7005 int i;
7006 int htabsz = 0;
7007 int htabmax = PARAM_VALUE (PARAM_MAX_VARTRACK_SIZE);
7008 bool success = true;
7010 timevar_push (TV_VAR_TRACKING_DATAFLOW);
7011 /* Compute reverse completion order of depth first search of the CFG
7012 so that the data-flow runs faster. */
7013 rc_order = XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
7014 bb_order = XNEWVEC (int, last_basic_block_for_fn (cfun));
7015 pre_and_rev_post_order_compute (NULL, rc_order, false);
7016 for (i = 0; i < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; i++)
7017 bb_order[rc_order[i]] = i;
7018 free (rc_order);
7020 visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
7021 in_worklist = sbitmap_alloc (last_basic_block_for_fn (cfun));
7022 in_pending = sbitmap_alloc (last_basic_block_for_fn (cfun));
7023 bitmap_clear (in_worklist);
7025 FOR_EACH_BB_FN (bb, cfun)
7026 pending->insert (bb_order[bb->index], bb);
7027 bitmap_ones (in_pending);
7029 while (success && !pending->empty ())
7031 std::swap (worklist, pending);
7032 std::swap (in_worklist, in_pending);
7034 bitmap_clear (visited);
7036 while (!worklist->empty ())
7038 bb = worklist->extract_min ();
7039 bitmap_clear_bit (in_worklist, bb->index);
7040 gcc_assert (!bitmap_bit_p (visited, bb->index));
7041 if (!bitmap_bit_p (visited, bb->index))
7043 bool changed;
7044 edge_iterator ei;
7045 int oldinsz, oldoutsz;
7047 bitmap_set_bit (visited, bb->index);
7049 if (VTI (bb)->in.vars)
7051 htabsz
7052 -= shared_hash_htab (VTI (bb)->in.vars)->size ()
7053 + shared_hash_htab (VTI (bb)->out.vars)->size ();
7054 oldinsz = shared_hash_htab (VTI (bb)->in.vars)->elements ();
7055 oldoutsz
7056 = shared_hash_htab (VTI (bb)->out.vars)->elements ();
7058 else
7059 oldinsz = oldoutsz = 0;
7061 if (MAY_HAVE_DEBUG_INSNS)
7063 dataflow_set *in = &VTI (bb)->in, *first_out = NULL;
7064 bool first = true, adjust = false;
7066 /* Calculate the IN set as the intersection of
7067 predecessor OUT sets. */
7069 dataflow_set_clear (in);
7070 dst_can_be_shared = true;
7072 FOR_EACH_EDGE (e, ei, bb->preds)
7073 if (!VTI (e->src)->flooded)
7074 gcc_assert (bb_order[bb->index]
7075 <= bb_order[e->src->index]);
7076 else if (first)
7078 dataflow_set_copy (in, &VTI (e->src)->out);
7079 first_out = &VTI (e->src)->out;
7080 first = false;
7082 else
7084 dataflow_set_merge (in, &VTI (e->src)->out);
7085 adjust = true;
7088 if (adjust)
7090 dataflow_post_merge_adjust (in, &VTI (bb)->permp);
7091 #if ENABLE_CHECKING
7092 /* Merge and merge_adjust should keep entries in
7093 canonical order. */
7094 shared_hash_htab (in->vars)
7095 ->traverse <dataflow_set *,
7096 canonicalize_loc_order_check> (in);
7097 #endif
7098 if (dst_can_be_shared)
7100 shared_hash_destroy (in->vars);
7101 in->vars = shared_hash_copy (first_out->vars);
7105 VTI (bb)->flooded = true;
7107 else
7109 /* Calculate the IN set as union of predecessor OUT sets. */
7110 dataflow_set_clear (&VTI (bb)->in);
7111 FOR_EACH_EDGE (e, ei, bb->preds)
7112 dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out);
7115 changed = compute_bb_dataflow (bb);
7116 htabsz += shared_hash_htab (VTI (bb)->in.vars)->size ()
7117 + shared_hash_htab (VTI (bb)->out.vars)->size ();
7119 if (htabmax && htabsz > htabmax)
7121 if (MAY_HAVE_DEBUG_INSNS)
7122 inform (DECL_SOURCE_LOCATION (cfun->decl),
7123 "variable tracking size limit exceeded with "
7124 "-fvar-tracking-assignments, retrying without");
7125 else
7126 inform (DECL_SOURCE_LOCATION (cfun->decl),
7127 "variable tracking size limit exceeded");
7128 success = false;
7129 break;
7132 if (changed)
7134 FOR_EACH_EDGE (e, ei, bb->succs)
7136 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7137 continue;
7139 if (bitmap_bit_p (visited, e->dest->index))
7141 if (!bitmap_bit_p (in_pending, e->dest->index))
7143 /* Send E->DEST to next round. */
7144 bitmap_set_bit (in_pending, e->dest->index);
7145 pending->insert (bb_order[e->dest->index],
7146 e->dest);
7149 else if (!bitmap_bit_p (in_worklist, e->dest->index))
7151 /* Add E->DEST to current round. */
7152 bitmap_set_bit (in_worklist, e->dest->index);
7153 worklist->insert (bb_order[e->dest->index],
7154 e->dest);
7159 if (dump_file)
7160 fprintf (dump_file,
7161 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
7162 bb->index,
7163 (int)shared_hash_htab (VTI (bb)->in.vars)->size (),
7164 oldinsz,
7165 (int)shared_hash_htab (VTI (bb)->out.vars)->size (),
7166 oldoutsz,
7167 (int)worklist->nodes (), (int)pending->nodes (),
7168 htabsz);
7170 if (dump_file && (dump_flags & TDF_DETAILS))
7172 fprintf (dump_file, "BB %i IN:\n", bb->index);
7173 dump_dataflow_set (&VTI (bb)->in);
7174 fprintf (dump_file, "BB %i OUT:\n", bb->index);
7175 dump_dataflow_set (&VTI (bb)->out);
7181 if (success && MAY_HAVE_DEBUG_INSNS)
7182 FOR_EACH_BB_FN (bb, cfun)
7183 gcc_assert (VTI (bb)->flooded);
7185 free (bb_order);
7186 delete worklist;
7187 delete pending;
7188 sbitmap_free (visited);
7189 sbitmap_free (in_worklist);
7190 sbitmap_free (in_pending);
7192 timevar_pop (TV_VAR_TRACKING_DATAFLOW);
7193 return success;
7196 /* Print the content of the LIST to dump file. */
7198 static void
7199 dump_attrs_list (attrs list)
7201 for (; list; list = list->next)
7203 if (dv_is_decl_p (list->dv))
7204 print_mem_expr (dump_file, dv_as_decl (list->dv));
7205 else
7206 print_rtl_single (dump_file, dv_as_value (list->dv));
7207 fprintf (dump_file, "+" HOST_WIDE_INT_PRINT_DEC, list->offset);
7209 fprintf (dump_file, "\n");
7212 /* Print the information about variable *SLOT to dump file. */
7215 dump_var_tracking_slot (variable_def **slot, void *data ATTRIBUTE_UNUSED)
7217 variable var = *slot;
7219 dump_var (var);
7221 /* Continue traversing the hash table. */
7222 return 1;
7225 /* Print the information about variable VAR to dump file. */
7227 static void
7228 dump_var (variable var)
7230 int i;
7231 location_chain node;
7233 if (dv_is_decl_p (var->dv))
7235 const_tree decl = dv_as_decl (var->dv);
7237 if (DECL_NAME (decl))
7239 fprintf (dump_file, " name: %s",
7240 IDENTIFIER_POINTER (DECL_NAME (decl)));
7241 if (dump_flags & TDF_UID)
7242 fprintf (dump_file, "D.%u", DECL_UID (decl));
7244 else if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
7245 fprintf (dump_file, " name: D#%u", DEBUG_TEMP_UID (decl));
7246 else
7247 fprintf (dump_file, " name: D.%u", DECL_UID (decl));
7248 fprintf (dump_file, "\n");
7250 else
7252 fputc (' ', dump_file);
7253 print_rtl_single (dump_file, dv_as_value (var->dv));
7256 for (i = 0; i < var->n_var_parts; i++)
7258 fprintf (dump_file, " offset %ld\n",
7259 (long)(var->onepart ? 0 : VAR_PART_OFFSET (var, i)));
7260 for (node = var->var_part[i].loc_chain; node; node = node->next)
7262 fprintf (dump_file, " ");
7263 if (node->init == VAR_INIT_STATUS_UNINITIALIZED)
7264 fprintf (dump_file, "[uninit]");
7265 print_rtl_single (dump_file, node->loc);
7270 /* Print the information about variables from hash table VARS to dump file. */
7272 static void
7273 dump_vars (variable_table_type *vars)
7275 if (vars->elements () > 0)
7277 fprintf (dump_file, "Variables:\n");
7278 vars->traverse <void *, dump_var_tracking_slot> (NULL);
7282 /* Print the dataflow set SET to dump file. */
7284 static void
7285 dump_dataflow_set (dataflow_set *set)
7287 int i;
7289 fprintf (dump_file, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC "\n",
7290 set->stack_adjust);
7291 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7293 if (set->regs[i])
7295 fprintf (dump_file, "Reg %d:", i);
7296 dump_attrs_list (set->regs[i]);
7299 dump_vars (shared_hash_htab (set->vars));
7300 fprintf (dump_file, "\n");
7303 /* Print the IN and OUT sets for each basic block to dump file. */
7305 static void
7306 dump_dataflow_sets (void)
7308 basic_block bb;
7310 FOR_EACH_BB_FN (bb, cfun)
7312 fprintf (dump_file, "\nBasic block %d:\n", bb->index);
7313 fprintf (dump_file, "IN:\n");
7314 dump_dataflow_set (&VTI (bb)->in);
7315 fprintf (dump_file, "OUT:\n");
7316 dump_dataflow_set (&VTI (bb)->out);
7320 /* Return the variable for DV in dropped_values, inserting one if
7321 requested with INSERT. */
7323 static inline variable
7324 variable_from_dropped (decl_or_value dv, enum insert_option insert)
7326 variable_def **slot;
7327 variable empty_var;
7328 onepart_enum_t onepart;
7330 slot = dropped_values->find_slot_with_hash (dv, dv_htab_hash (dv), insert);
7332 if (!slot)
7333 return NULL;
7335 if (*slot)
7336 return *slot;
7338 gcc_checking_assert (insert == INSERT);
7340 onepart = dv_onepart_p (dv);
7342 gcc_checking_assert (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR);
7344 empty_var = onepart_pool (onepart).allocate ();
7345 empty_var->dv = dv;
7346 empty_var->refcount = 1;
7347 empty_var->n_var_parts = 0;
7348 empty_var->onepart = onepart;
7349 empty_var->in_changed_variables = false;
7350 empty_var->var_part[0].loc_chain = NULL;
7351 empty_var->var_part[0].cur_loc = NULL;
7352 VAR_LOC_1PAUX (empty_var) = NULL;
7353 set_dv_changed (dv, true);
7355 *slot = empty_var;
7357 return empty_var;
7360 /* Recover the one-part aux from dropped_values. */
7362 static struct onepart_aux *
7363 recover_dropped_1paux (variable var)
7365 variable dvar;
7367 gcc_checking_assert (var->onepart);
7369 if (VAR_LOC_1PAUX (var))
7370 return VAR_LOC_1PAUX (var);
7372 if (var->onepart == ONEPART_VDECL)
7373 return NULL;
7375 dvar = variable_from_dropped (var->dv, NO_INSERT);
7377 if (!dvar)
7378 return NULL;
7380 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (dvar);
7381 VAR_LOC_1PAUX (dvar) = NULL;
7383 return VAR_LOC_1PAUX (var);
7386 /* Add variable VAR to the hash table of changed variables and
7387 if it has no locations delete it from SET's hash table. */
7389 static void
7390 variable_was_changed (variable var, dataflow_set *set)
7392 hashval_t hash = dv_htab_hash (var->dv);
7394 if (emit_notes)
7396 variable_def **slot;
7398 /* Remember this decl or VALUE has been added to changed_variables. */
7399 set_dv_changed (var->dv, true);
7401 slot = changed_variables->find_slot_with_hash (var->dv, hash, INSERT);
7403 if (*slot)
7405 variable old_var = *slot;
7406 gcc_assert (old_var->in_changed_variables);
7407 old_var->in_changed_variables = false;
7408 if (var != old_var && var->onepart)
7410 /* Restore the auxiliary info from an empty variable
7411 previously created for changed_variables, so it is
7412 not lost. */
7413 gcc_checking_assert (!VAR_LOC_1PAUX (var));
7414 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (old_var);
7415 VAR_LOC_1PAUX (old_var) = NULL;
7417 variable_htab_free (*slot);
7420 if (set && var->n_var_parts == 0)
7422 onepart_enum_t onepart = var->onepart;
7423 variable empty_var = NULL;
7424 variable_def **dslot = NULL;
7426 if (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR)
7428 dslot = dropped_values->find_slot_with_hash (var->dv,
7429 dv_htab_hash (var->dv),
7430 INSERT);
7431 empty_var = *dslot;
7433 if (empty_var)
7435 gcc_checking_assert (!empty_var->in_changed_variables);
7436 if (!VAR_LOC_1PAUX (var))
7438 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (empty_var);
7439 VAR_LOC_1PAUX (empty_var) = NULL;
7441 else
7442 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
7446 if (!empty_var)
7448 empty_var = onepart_pool (onepart).allocate ();
7449 empty_var->dv = var->dv;
7450 empty_var->refcount = 1;
7451 empty_var->n_var_parts = 0;
7452 empty_var->onepart = onepart;
7453 if (dslot)
7455 empty_var->refcount++;
7456 *dslot = empty_var;
7459 else
7460 empty_var->refcount++;
7461 empty_var->in_changed_variables = true;
7462 *slot = empty_var;
7463 if (onepart)
7465 empty_var->var_part[0].loc_chain = NULL;
7466 empty_var->var_part[0].cur_loc = NULL;
7467 VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (var);
7468 VAR_LOC_1PAUX (var) = NULL;
7470 goto drop_var;
7472 else
7474 if (var->onepart && !VAR_LOC_1PAUX (var))
7475 recover_dropped_1paux (var);
7476 var->refcount++;
7477 var->in_changed_variables = true;
7478 *slot = var;
7481 else
7483 gcc_assert (set);
7484 if (var->n_var_parts == 0)
7486 variable_def **slot;
7488 drop_var:
7489 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
7490 if (slot)
7492 if (shared_hash_shared (set->vars))
7493 slot = shared_hash_find_slot_unshare (&set->vars, var->dv,
7494 NO_INSERT);
7495 shared_hash_htab (set->vars)->clear_slot (slot);
7501 /* Look for the index in VAR->var_part corresponding to OFFSET.
7502 Return -1 if not found. If INSERTION_POINT is non-NULL, the
7503 referenced int will be set to the index that the part has or should
7504 have, if it should be inserted. */
7506 static inline int
7507 find_variable_location_part (variable var, HOST_WIDE_INT offset,
7508 int *insertion_point)
7510 int pos, low, high;
7512 if (var->onepart)
7514 if (offset != 0)
7515 return -1;
7517 if (insertion_point)
7518 *insertion_point = 0;
7520 return var->n_var_parts - 1;
7523 /* Find the location part. */
7524 low = 0;
7525 high = var->n_var_parts;
7526 while (low != high)
7528 pos = (low + high) / 2;
7529 if (VAR_PART_OFFSET (var, pos) < offset)
7530 low = pos + 1;
7531 else
7532 high = pos;
7534 pos = low;
7536 if (insertion_point)
7537 *insertion_point = pos;
7539 if (pos < var->n_var_parts && VAR_PART_OFFSET (var, pos) == offset)
7540 return pos;
7542 return -1;
7545 static variable_def **
7546 set_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
7547 decl_or_value dv, HOST_WIDE_INT offset,
7548 enum var_init_status initialized, rtx set_src)
7550 int pos;
7551 location_chain node, next;
7552 location_chain *nextp;
7553 variable var;
7554 onepart_enum_t onepart;
7556 var = *slot;
7558 if (var)
7559 onepart = var->onepart;
7560 else
7561 onepart = dv_onepart_p (dv);
7563 gcc_checking_assert (offset == 0 || !onepart);
7564 gcc_checking_assert (loc != dv_as_opaque (dv));
7566 if (! flag_var_tracking_uninit)
7567 initialized = VAR_INIT_STATUS_INITIALIZED;
7569 if (!var)
7571 /* Create new variable information. */
7572 var = onepart_pool (onepart).allocate ();
7573 var->dv = dv;
7574 var->refcount = 1;
7575 var->n_var_parts = 1;
7576 var->onepart = onepart;
7577 var->in_changed_variables = false;
7578 if (var->onepart)
7579 VAR_LOC_1PAUX (var) = NULL;
7580 else
7581 VAR_PART_OFFSET (var, 0) = offset;
7582 var->var_part[0].loc_chain = NULL;
7583 var->var_part[0].cur_loc = NULL;
7584 *slot = var;
7585 pos = 0;
7586 nextp = &var->var_part[0].loc_chain;
7588 else if (onepart)
7590 int r = -1, c = 0;
7592 gcc_assert (dv_as_opaque (var->dv) == dv_as_opaque (dv));
7594 pos = 0;
7596 if (GET_CODE (loc) == VALUE)
7598 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7599 nextp = &node->next)
7600 if (GET_CODE (node->loc) == VALUE)
7602 if (node->loc == loc)
7604 r = 0;
7605 break;
7607 if (canon_value_cmp (node->loc, loc))
7608 c++;
7609 else
7611 r = 1;
7612 break;
7615 else if (REG_P (node->loc) || MEM_P (node->loc))
7616 c++;
7617 else
7619 r = 1;
7620 break;
7623 else if (REG_P (loc))
7625 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7626 nextp = &node->next)
7627 if (REG_P (node->loc))
7629 if (REGNO (node->loc) < REGNO (loc))
7630 c++;
7631 else
7633 if (REGNO (node->loc) == REGNO (loc))
7634 r = 0;
7635 else
7636 r = 1;
7637 break;
7640 else
7642 r = 1;
7643 break;
7646 else if (MEM_P (loc))
7648 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7649 nextp = &node->next)
7650 if (REG_P (node->loc))
7651 c++;
7652 else if (MEM_P (node->loc))
7654 if ((r = loc_cmp (XEXP (node->loc, 0), XEXP (loc, 0))) >= 0)
7655 break;
7656 else
7657 c++;
7659 else
7661 r = 1;
7662 break;
7665 else
7666 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7667 nextp = &node->next)
7668 if ((r = loc_cmp (node->loc, loc)) >= 0)
7669 break;
7670 else
7671 c++;
7673 if (r == 0)
7674 return slot;
7676 if (shared_var_p (var, set->vars))
7678 slot = unshare_variable (set, slot, var, initialized);
7679 var = *slot;
7680 for (nextp = &var->var_part[0].loc_chain; c;
7681 nextp = &(*nextp)->next)
7682 c--;
7683 gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc);
7686 else
7688 int inspos = 0;
7690 gcc_assert (dv_as_decl (var->dv) == dv_as_decl (dv));
7692 pos = find_variable_location_part (var, offset, &inspos);
7694 if (pos >= 0)
7696 node = var->var_part[pos].loc_chain;
7698 if (node
7699 && ((REG_P (node->loc) && REG_P (loc)
7700 && REGNO (node->loc) == REGNO (loc))
7701 || rtx_equal_p (node->loc, loc)))
7703 /* LOC is in the beginning of the chain so we have nothing
7704 to do. */
7705 if (node->init < initialized)
7706 node->init = initialized;
7707 if (set_src != NULL)
7708 node->set_src = set_src;
7710 return slot;
7712 else
7714 /* We have to make a copy of a shared variable. */
7715 if (shared_var_p (var, set->vars))
7717 slot = unshare_variable (set, slot, var, initialized);
7718 var = *slot;
7722 else
7724 /* We have not found the location part, new one will be created. */
7726 /* We have to make a copy of the shared variable. */
7727 if (shared_var_p (var, set->vars))
7729 slot = unshare_variable (set, slot, var, initialized);
7730 var = *slot;
7733 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
7734 thus there are at most MAX_VAR_PARTS different offsets. */
7735 gcc_assert (var->n_var_parts < MAX_VAR_PARTS
7736 && (!var->n_var_parts || !onepart));
7738 /* We have to move the elements of array starting at index
7739 inspos to the next position. */
7740 for (pos = var->n_var_parts; pos > inspos; pos--)
7741 var->var_part[pos] = var->var_part[pos - 1];
7743 var->n_var_parts++;
7744 gcc_checking_assert (!onepart);
7745 VAR_PART_OFFSET (var, pos) = offset;
7746 var->var_part[pos].loc_chain = NULL;
7747 var->var_part[pos].cur_loc = NULL;
7750 /* Delete the location from the list. */
7751 nextp = &var->var_part[pos].loc_chain;
7752 for (node = var->var_part[pos].loc_chain; node; node = next)
7754 next = node->next;
7755 if ((REG_P (node->loc) && REG_P (loc)
7756 && REGNO (node->loc) == REGNO (loc))
7757 || rtx_equal_p (node->loc, loc))
7759 /* Save these values, to assign to the new node, before
7760 deleting this one. */
7761 if (node->init > initialized)
7762 initialized = node->init;
7763 if (node->set_src != NULL && set_src == NULL)
7764 set_src = node->set_src;
7765 if (var->var_part[pos].cur_loc == node->loc)
7766 var->var_part[pos].cur_loc = NULL;
7767 delete node;
7768 *nextp = next;
7769 break;
7771 else
7772 nextp = &node->next;
7775 nextp = &var->var_part[pos].loc_chain;
7778 /* Add the location to the beginning. */
7779 node = new location_chain_def;
7780 node->loc = loc;
7781 node->init = initialized;
7782 node->set_src = set_src;
7783 node->next = *nextp;
7784 *nextp = node;
7786 /* If no location was emitted do so. */
7787 if (var->var_part[pos].cur_loc == NULL)
7788 variable_was_changed (var, set);
7790 return slot;
7793 /* Set the part of variable's location in the dataflow set SET. The
7794 variable part is specified by variable's declaration in DV and
7795 offset OFFSET and the part's location by LOC. IOPT should be
7796 NO_INSERT if the variable is known to be in SET already and the
7797 variable hash table must not be resized, and INSERT otherwise. */
7799 static void
7800 set_variable_part (dataflow_set *set, rtx loc,
7801 decl_or_value dv, HOST_WIDE_INT offset,
7802 enum var_init_status initialized, rtx set_src,
7803 enum insert_option iopt)
7805 variable_def **slot;
7807 if (iopt == NO_INSERT)
7808 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7809 else
7811 slot = shared_hash_find_slot (set->vars, dv);
7812 if (!slot)
7813 slot = shared_hash_find_slot_unshare (&set->vars, dv, iopt);
7815 set_slot_part (set, loc, slot, dv, offset, initialized, set_src);
7818 /* Remove all recorded register locations for the given variable part
7819 from dataflow set SET, except for those that are identical to loc.
7820 The variable part is specified by variable's declaration or value
7821 DV and offset OFFSET. */
7823 static variable_def **
7824 clobber_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
7825 HOST_WIDE_INT offset, rtx set_src)
7827 variable var = *slot;
7828 int pos = find_variable_location_part (var, offset, NULL);
7830 if (pos >= 0)
7832 location_chain node, next;
7834 /* Remove the register locations from the dataflow set. */
7835 next = var->var_part[pos].loc_chain;
7836 for (node = next; node; node = next)
7838 next = node->next;
7839 if (node->loc != loc
7840 && (!flag_var_tracking_uninit
7841 || !set_src
7842 || MEM_P (set_src)
7843 || !rtx_equal_p (set_src, node->set_src)))
7845 if (REG_P (node->loc))
7847 attrs anode, anext;
7848 attrs *anextp;
7850 /* Remove the variable part from the register's
7851 list, but preserve any other variable parts
7852 that might be regarded as live in that same
7853 register. */
7854 anextp = &set->regs[REGNO (node->loc)];
7855 for (anode = *anextp; anode; anode = anext)
7857 anext = anode->next;
7858 if (dv_as_opaque (anode->dv) == dv_as_opaque (var->dv)
7859 && anode->offset == offset)
7861 delete anode;
7862 *anextp = anext;
7864 else
7865 anextp = &anode->next;
7869 slot = delete_slot_part (set, node->loc, slot, offset);
7874 return slot;
7877 /* Remove all recorded register locations for the given variable part
7878 from dataflow set SET, except for those that are identical to loc.
7879 The variable part is specified by variable's declaration or value
7880 DV and offset OFFSET. */
7882 static void
7883 clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
7884 HOST_WIDE_INT offset, rtx set_src)
7886 variable_def **slot;
7888 if (!dv_as_opaque (dv)
7889 || (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv))))
7890 return;
7892 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7893 if (!slot)
7894 return;
7896 clobber_slot_part (set, loc, slot, offset, set_src);
7899 /* Delete the part of variable's location from dataflow set SET. The
7900 variable part is specified by its SET->vars slot SLOT and offset
7901 OFFSET and the part's location by LOC. */
7903 static variable_def **
7904 delete_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
7905 HOST_WIDE_INT offset)
7907 variable var = *slot;
7908 int pos = find_variable_location_part (var, offset, NULL);
7910 if (pos >= 0)
7912 location_chain node, next;
7913 location_chain *nextp;
7914 bool changed;
7915 rtx cur_loc;
7917 if (shared_var_p (var, set->vars))
7919 /* If the variable contains the location part we have to
7920 make a copy of the variable. */
7921 for (node = var->var_part[pos].loc_chain; node;
7922 node = node->next)
7924 if ((REG_P (node->loc) && REG_P (loc)
7925 && REGNO (node->loc) == REGNO (loc))
7926 || rtx_equal_p (node->loc, loc))
7928 slot = unshare_variable (set, slot, var,
7929 VAR_INIT_STATUS_UNKNOWN);
7930 var = *slot;
7931 break;
7936 if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
7937 cur_loc = VAR_LOC_FROM (var);
7938 else
7939 cur_loc = var->var_part[pos].cur_loc;
7941 /* Delete the location part. */
7942 changed = false;
7943 nextp = &var->var_part[pos].loc_chain;
7944 for (node = *nextp; node; node = next)
7946 next = node->next;
7947 if ((REG_P (node->loc) && REG_P (loc)
7948 && REGNO (node->loc) == REGNO (loc))
7949 || rtx_equal_p (node->loc, loc))
7951 /* If we have deleted the location which was last emitted
7952 we have to emit new location so add the variable to set
7953 of changed variables. */
7954 if (cur_loc == node->loc)
7956 changed = true;
7957 var->var_part[pos].cur_loc = NULL;
7958 if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
7959 VAR_LOC_FROM (var) = NULL;
7961 delete node;
7962 *nextp = next;
7963 break;
7965 else
7966 nextp = &node->next;
7969 if (var->var_part[pos].loc_chain == NULL)
7971 changed = true;
7972 var->n_var_parts--;
7973 while (pos < var->n_var_parts)
7975 var->var_part[pos] = var->var_part[pos + 1];
7976 pos++;
7979 if (changed)
7980 variable_was_changed (var, set);
7983 return slot;
7986 /* Delete the part of variable's location from dataflow set SET. The
7987 variable part is specified by variable's declaration or value DV
7988 and offset OFFSET and the part's location by LOC. */
7990 static void
7991 delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
7992 HOST_WIDE_INT offset)
7994 variable_def **slot = shared_hash_find_slot_noinsert (set->vars, dv);
7995 if (!slot)
7996 return;
7998 delete_slot_part (set, loc, slot, offset);
8002 /* Structure for passing some other parameters to function
8003 vt_expand_loc_callback. */
8004 struct expand_loc_callback_data
8006 /* The variables and values active at this point. */
8007 variable_table_type *vars;
8009 /* Stack of values and debug_exprs under expansion, and their
8010 children. */
8011 auto_vec<rtx, 4> expanding;
8013 /* Stack of values and debug_exprs whose expansion hit recursion
8014 cycles. They will have VALUE_RECURSED_INTO marked when added to
8015 this list. This flag will be cleared if any of its dependencies
8016 resolves to a valid location. So, if the flag remains set at the
8017 end of the search, we know no valid location for this one can
8018 possibly exist. */
8019 auto_vec<rtx, 4> pending;
8021 /* The maximum depth among the sub-expressions under expansion.
8022 Zero indicates no expansion so far. */
8023 expand_depth depth;
8026 /* Allocate the one-part auxiliary data structure for VAR, with enough
8027 room for COUNT dependencies. */
8029 static void
8030 loc_exp_dep_alloc (variable var, int count)
8032 size_t allocsize;
8034 gcc_checking_assert (var->onepart);
8036 /* We can be called with COUNT == 0 to allocate the data structure
8037 without any dependencies, e.g. for the backlinks only. However,
8038 if we are specifying a COUNT, then the dependency list must have
8039 been emptied before. It would be possible to adjust pointers or
8040 force it empty here, but this is better done at an earlier point
8041 in the algorithm, so we instead leave an assertion to catch
8042 errors. */
8043 gcc_checking_assert (!count
8044 || VAR_LOC_DEP_VEC (var) == NULL
8045 || VAR_LOC_DEP_VEC (var)->is_empty ());
8047 if (VAR_LOC_1PAUX (var) && VAR_LOC_DEP_VEC (var)->space (count))
8048 return;
8050 allocsize = offsetof (struct onepart_aux, deps)
8051 + vec<loc_exp_dep, va_heap, vl_embed>::embedded_size (count);
8053 if (VAR_LOC_1PAUX (var))
8055 VAR_LOC_1PAUX (var) = XRESIZEVAR (struct onepart_aux,
8056 VAR_LOC_1PAUX (var), allocsize);
8057 /* If the reallocation moves the onepaux structure, the
8058 back-pointer to BACKLINKS in the first list member will still
8059 point to its old location. Adjust it. */
8060 if (VAR_LOC_DEP_LST (var))
8061 VAR_LOC_DEP_LST (var)->pprev = VAR_LOC_DEP_LSTP (var);
8063 else
8065 VAR_LOC_1PAUX (var) = XNEWVAR (struct onepart_aux, allocsize);
8066 *VAR_LOC_DEP_LSTP (var) = NULL;
8067 VAR_LOC_FROM (var) = NULL;
8068 VAR_LOC_DEPTH (var).complexity = 0;
8069 VAR_LOC_DEPTH (var).entryvals = 0;
8071 VAR_LOC_DEP_VEC (var)->embedded_init (count);
8074 /* Remove all entries from the vector of active dependencies of VAR,
8075 removing them from the back-links lists too. */
8077 static void
8078 loc_exp_dep_clear (variable var)
8080 while (VAR_LOC_DEP_VEC (var) && !VAR_LOC_DEP_VEC (var)->is_empty ())
8082 loc_exp_dep *led = &VAR_LOC_DEP_VEC (var)->last ();
8083 if (led->next)
8084 led->next->pprev = led->pprev;
8085 if (led->pprev)
8086 *led->pprev = led->next;
8087 VAR_LOC_DEP_VEC (var)->pop ();
8091 /* Insert an active dependency from VAR on X to the vector of
8092 dependencies, and add the corresponding back-link to X's list of
8093 back-links in VARS. */
8095 static void
8096 loc_exp_insert_dep (variable var, rtx x, variable_table_type *vars)
8098 decl_or_value dv;
8099 variable xvar;
8100 loc_exp_dep *led;
8102 dv = dv_from_rtx (x);
8104 /* ??? Build a vector of variables parallel to EXPANDING, to avoid
8105 an additional look up? */
8106 xvar = vars->find_with_hash (dv, dv_htab_hash (dv));
8108 if (!xvar)
8110 xvar = variable_from_dropped (dv, NO_INSERT);
8111 gcc_checking_assert (xvar);
8114 /* No point in adding the same backlink more than once. This may
8115 arise if say the same value appears in two complex expressions in
8116 the same loc_list, or even more than once in a single
8117 expression. */
8118 if (VAR_LOC_DEP_LST (xvar) && VAR_LOC_DEP_LST (xvar)->dv == var->dv)
8119 return;
8121 if (var->onepart == NOT_ONEPART)
8122 led = new loc_exp_dep;
8123 else
8125 loc_exp_dep empty;
8126 memset (&empty, 0, sizeof (empty));
8127 VAR_LOC_DEP_VEC (var)->quick_push (empty);
8128 led = &VAR_LOC_DEP_VEC (var)->last ();
8130 led->dv = var->dv;
8131 led->value = x;
8133 loc_exp_dep_alloc (xvar, 0);
8134 led->pprev = VAR_LOC_DEP_LSTP (xvar);
8135 led->next = *led->pprev;
8136 if (led->next)
8137 led->next->pprev = &led->next;
8138 *led->pprev = led;
8141 /* Create active dependencies of VAR on COUNT values starting at
8142 VALUE, and corresponding back-links to the entries in VARS. Return
8143 true if we found any pending-recursion results. */
8145 static bool
8146 loc_exp_dep_set (variable var, rtx result, rtx *value, int count,
8147 variable_table_type *vars)
8149 bool pending_recursion = false;
8151 gcc_checking_assert (VAR_LOC_DEP_VEC (var) == NULL
8152 || VAR_LOC_DEP_VEC (var)->is_empty ());
8154 /* Set up all dependencies from last_child (as set up at the end of
8155 the loop above) to the end. */
8156 loc_exp_dep_alloc (var, count);
8158 while (count--)
8160 rtx x = *value++;
8162 if (!pending_recursion)
8163 pending_recursion = !result && VALUE_RECURSED_INTO (x);
8165 loc_exp_insert_dep (var, x, vars);
8168 return pending_recursion;
8171 /* Notify the back-links of IVAR that are pending recursion that we
8172 have found a non-NIL value for it, so they are cleared for another
8173 attempt to compute a current location. */
8175 static void
8176 notify_dependents_of_resolved_value (variable ivar, variable_table_type *vars)
8178 loc_exp_dep *led, *next;
8180 for (led = VAR_LOC_DEP_LST (ivar); led; led = next)
8182 decl_or_value dv = led->dv;
8183 variable var;
8185 next = led->next;
8187 if (dv_is_value_p (dv))
8189 rtx value = dv_as_value (dv);
8191 /* If we have already resolved it, leave it alone. */
8192 if (!VALUE_RECURSED_INTO (value))
8193 continue;
8195 /* Check that VALUE_RECURSED_INTO, true from the test above,
8196 implies NO_LOC_P. */
8197 gcc_checking_assert (NO_LOC_P (value));
8199 /* We won't notify variables that are being expanded,
8200 because their dependency list is cleared before
8201 recursing. */
8202 NO_LOC_P (value) = false;
8203 VALUE_RECURSED_INTO (value) = false;
8205 gcc_checking_assert (dv_changed_p (dv));
8207 else
8209 gcc_checking_assert (dv_onepart_p (dv) != NOT_ONEPART);
8210 if (!dv_changed_p (dv))
8211 continue;
8214 var = vars->find_with_hash (dv, dv_htab_hash (dv));
8216 if (!var)
8217 var = variable_from_dropped (dv, NO_INSERT);
8219 if (var)
8220 notify_dependents_of_resolved_value (var, vars);
8222 if (next)
8223 next->pprev = led->pprev;
8224 if (led->pprev)
8225 *led->pprev = next;
8226 led->next = NULL;
8227 led->pprev = NULL;
8231 static rtx vt_expand_loc_callback (rtx x, bitmap regs,
8232 int max_depth, void *data);
8234 /* Return the combined depth, when one sub-expression evaluated to
8235 BEST_DEPTH and the previous known depth was SAVED_DEPTH. */
8237 static inline expand_depth
8238 update_depth (expand_depth saved_depth, expand_depth best_depth)
8240 /* If we didn't find anything, stick with what we had. */
8241 if (!best_depth.complexity)
8242 return saved_depth;
8244 /* If we found hadn't found anything, use the depth of the current
8245 expression. Do NOT add one extra level, we want to compute the
8246 maximum depth among sub-expressions. We'll increment it later,
8247 if appropriate. */
8248 if (!saved_depth.complexity)
8249 return best_depth;
8251 /* Combine the entryval count so that regardless of which one we
8252 return, the entryval count is accurate. */
8253 best_depth.entryvals = saved_depth.entryvals
8254 = best_depth.entryvals + saved_depth.entryvals;
8256 if (saved_depth.complexity < best_depth.complexity)
8257 return best_depth;
8258 else
8259 return saved_depth;
8262 /* Expand VAR to a location RTX, updating its cur_loc. Use REGS and
8263 DATA for cselib expand callback. If PENDRECP is given, indicate in
8264 it whether any sub-expression couldn't be fully evaluated because
8265 it is pending recursion resolution. */
8267 static inline rtx
8268 vt_expand_var_loc_chain (variable var, bitmap regs, void *data, bool *pendrecp)
8270 struct expand_loc_callback_data *elcd
8271 = (struct expand_loc_callback_data *) data;
8272 location_chain loc, next;
8273 rtx result = NULL;
8274 int first_child, result_first_child, last_child;
8275 bool pending_recursion;
8276 rtx loc_from = NULL;
8277 struct elt_loc_list *cloc = NULL;
8278 expand_depth depth = { 0, 0 }, saved_depth = elcd->depth;
8279 int wanted_entryvals, found_entryvals = 0;
8281 /* Clear all backlinks pointing at this, so that we're not notified
8282 while we're active. */
8283 loc_exp_dep_clear (var);
8285 retry:
8286 if (var->onepart == ONEPART_VALUE)
8288 cselib_val *val = CSELIB_VAL_PTR (dv_as_value (var->dv));
8290 gcc_checking_assert (cselib_preserved_value_p (val));
8292 cloc = val->locs;
8295 first_child = result_first_child = last_child
8296 = elcd->expanding.length ();
8298 wanted_entryvals = found_entryvals;
8300 /* Attempt to expand each available location in turn. */
8301 for (next = loc = var->n_var_parts ? var->var_part[0].loc_chain : NULL;
8302 loc || cloc; loc = next)
8304 result_first_child = last_child;
8306 if (!loc)
8308 loc_from = cloc->loc;
8309 next = loc;
8310 cloc = cloc->next;
8311 if (unsuitable_loc (loc_from))
8312 continue;
8314 else
8316 loc_from = loc->loc;
8317 next = loc->next;
8320 gcc_checking_assert (!unsuitable_loc (loc_from));
8322 elcd->depth.complexity = elcd->depth.entryvals = 0;
8323 result = cselib_expand_value_rtx_cb (loc_from, regs, EXPR_DEPTH,
8324 vt_expand_loc_callback, data);
8325 last_child = elcd->expanding.length ();
8327 if (result)
8329 depth = elcd->depth;
8331 gcc_checking_assert (depth.complexity
8332 || result_first_child == last_child);
8334 if (last_child - result_first_child != 1)
8336 if (!depth.complexity && GET_CODE (result) == ENTRY_VALUE)
8337 depth.entryvals++;
8338 depth.complexity++;
8341 if (depth.complexity <= EXPR_USE_DEPTH)
8343 if (depth.entryvals <= wanted_entryvals)
8344 break;
8345 else if (!found_entryvals || depth.entryvals < found_entryvals)
8346 found_entryvals = depth.entryvals;
8349 result = NULL;
8352 /* Set it up in case we leave the loop. */
8353 depth.complexity = depth.entryvals = 0;
8354 loc_from = NULL;
8355 result_first_child = first_child;
8358 if (!loc_from && wanted_entryvals < found_entryvals)
8360 /* We found entries with ENTRY_VALUEs and skipped them. Since
8361 we could not find any expansions without ENTRY_VALUEs, but we
8362 found at least one with them, go back and get an entry with
8363 the minimum number ENTRY_VALUE count that we found. We could
8364 avoid looping, but since each sub-loc is already resolved,
8365 the re-expansion should be trivial. ??? Should we record all
8366 attempted locs as dependencies, so that we retry the
8367 expansion should any of them change, in the hope it can give
8368 us a new entry without an ENTRY_VALUE? */
8369 elcd->expanding.truncate (first_child);
8370 goto retry;
8373 /* Register all encountered dependencies as active. */
8374 pending_recursion = loc_exp_dep_set
8375 (var, result, elcd->expanding.address () + result_first_child,
8376 last_child - result_first_child, elcd->vars);
8378 elcd->expanding.truncate (first_child);
8380 /* Record where the expansion came from. */
8381 gcc_checking_assert (!result || !pending_recursion);
8382 VAR_LOC_FROM (var) = loc_from;
8383 VAR_LOC_DEPTH (var) = depth;
8385 gcc_checking_assert (!depth.complexity == !result);
8387 elcd->depth = update_depth (saved_depth, depth);
8389 /* Indicate whether any of the dependencies are pending recursion
8390 resolution. */
8391 if (pendrecp)
8392 *pendrecp = pending_recursion;
8394 if (!pendrecp || !pending_recursion)
8395 var->var_part[0].cur_loc = result;
8397 return result;
8400 /* Callback for cselib_expand_value, that looks for expressions
8401 holding the value in the var-tracking hash tables. Return X for
8402 standard processing, anything else is to be used as-is. */
8404 static rtx
8405 vt_expand_loc_callback (rtx x, bitmap regs,
8406 int max_depth ATTRIBUTE_UNUSED,
8407 void *data)
8409 struct expand_loc_callback_data *elcd
8410 = (struct expand_loc_callback_data *) data;
8411 decl_or_value dv;
8412 variable var;
8413 rtx result, subreg;
8414 bool pending_recursion = false;
8415 bool from_empty = false;
8417 switch (GET_CODE (x))
8419 case SUBREG:
8420 subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs,
8421 EXPR_DEPTH,
8422 vt_expand_loc_callback, data);
8424 if (!subreg)
8425 return NULL;
8427 result = simplify_gen_subreg (GET_MODE (x), subreg,
8428 GET_MODE (SUBREG_REG (x)),
8429 SUBREG_BYTE (x));
8431 /* Invalid SUBREGs are ok in debug info. ??? We could try
8432 alternate expansions for the VALUE as well. */
8433 if (!result)
8434 result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x));
8436 return result;
8438 case DEBUG_EXPR:
8439 case VALUE:
8440 dv = dv_from_rtx (x);
8441 break;
8443 default:
8444 return x;
8447 elcd->expanding.safe_push (x);
8449 /* Check that VALUE_RECURSED_INTO implies NO_LOC_P. */
8450 gcc_checking_assert (!VALUE_RECURSED_INTO (x) || NO_LOC_P (x));
8452 if (NO_LOC_P (x))
8454 gcc_checking_assert (VALUE_RECURSED_INTO (x) || !dv_changed_p (dv));
8455 return NULL;
8458 var = elcd->vars->find_with_hash (dv, dv_htab_hash (dv));
8460 if (!var)
8462 from_empty = true;
8463 var = variable_from_dropped (dv, INSERT);
8466 gcc_checking_assert (var);
8468 if (!dv_changed_p (dv))
8470 gcc_checking_assert (!NO_LOC_P (x));
8471 gcc_checking_assert (var->var_part[0].cur_loc);
8472 gcc_checking_assert (VAR_LOC_1PAUX (var));
8473 gcc_checking_assert (VAR_LOC_1PAUX (var)->depth.complexity);
8475 elcd->depth = update_depth (elcd->depth, VAR_LOC_1PAUX (var)->depth);
8477 return var->var_part[0].cur_loc;
8480 VALUE_RECURSED_INTO (x) = true;
8481 /* This is tentative, but it makes some tests simpler. */
8482 NO_LOC_P (x) = true;
8484 gcc_checking_assert (var->n_var_parts == 1 || from_empty);
8486 result = vt_expand_var_loc_chain (var, regs, data, &pending_recursion);
8488 if (pending_recursion)
8490 gcc_checking_assert (!result);
8491 elcd->pending.safe_push (x);
8493 else
8495 NO_LOC_P (x) = !result;
8496 VALUE_RECURSED_INTO (x) = false;
8497 set_dv_changed (dv, false);
8499 if (result)
8500 notify_dependents_of_resolved_value (var, elcd->vars);
8503 return result;
8506 /* While expanding variables, we may encounter recursion cycles
8507 because of mutual (possibly indirect) dependencies between two
8508 particular variables (or values), say A and B. If we're trying to
8509 expand A when we get to B, which in turn attempts to expand A, if
8510 we can't find any other expansion for B, we'll add B to this
8511 pending-recursion stack, and tentatively return NULL for its
8512 location. This tentative value will be used for any other
8513 occurrences of B, unless A gets some other location, in which case
8514 it will notify B that it is worth another try at computing a
8515 location for it, and it will use the location computed for A then.
8516 At the end of the expansion, the tentative NULL locations become
8517 final for all members of PENDING that didn't get a notification.
8518 This function performs this finalization of NULL locations. */
8520 static void
8521 resolve_expansions_pending_recursion (vec<rtx, va_heap> *pending)
8523 while (!pending->is_empty ())
8525 rtx x = pending->pop ();
8526 decl_or_value dv;
8528 if (!VALUE_RECURSED_INTO (x))
8529 continue;
8531 gcc_checking_assert (NO_LOC_P (x));
8532 VALUE_RECURSED_INTO (x) = false;
8533 dv = dv_from_rtx (x);
8534 gcc_checking_assert (dv_changed_p (dv));
8535 set_dv_changed (dv, false);
8539 /* Initialize expand_loc_callback_data D with variable hash table V.
8540 It must be a macro because of alloca (vec stack). */
8541 #define INIT_ELCD(d, v) \
8542 do \
8544 (d).vars = (v); \
8545 (d).depth.complexity = (d).depth.entryvals = 0; \
8547 while (0)
8548 /* Finalize expand_loc_callback_data D, resolved to location L. */
8549 #define FINI_ELCD(d, l) \
8550 do \
8552 resolve_expansions_pending_recursion (&(d).pending); \
8553 (d).pending.release (); \
8554 (d).expanding.release (); \
8556 if ((l) && MEM_P (l)) \
8557 (l) = targetm.delegitimize_address (l); \
8559 while (0)
8561 /* Expand VALUEs and DEBUG_EXPRs in LOC to a location, using the
8562 equivalences in VARS, updating their CUR_LOCs in the process. */
8564 static rtx
8565 vt_expand_loc (rtx loc, variable_table_type *vars)
8567 struct expand_loc_callback_data data;
8568 rtx result;
8570 if (!MAY_HAVE_DEBUG_INSNS)
8571 return loc;
8573 INIT_ELCD (data, vars);
8575 result = cselib_expand_value_rtx_cb (loc, scratch_regs, EXPR_DEPTH,
8576 vt_expand_loc_callback, &data);
8578 FINI_ELCD (data, result);
8580 return result;
8583 /* Expand the one-part VARiable to a location, using the equivalences
8584 in VARS, updating their CUR_LOCs in the process. */
8586 static rtx
8587 vt_expand_1pvar (variable var, variable_table_type *vars)
8589 struct expand_loc_callback_data data;
8590 rtx loc;
8592 gcc_checking_assert (var->onepart && var->n_var_parts == 1);
8594 if (!dv_changed_p (var->dv))
8595 return var->var_part[0].cur_loc;
8597 INIT_ELCD (data, vars);
8599 loc = vt_expand_var_loc_chain (var, scratch_regs, &data, NULL);
8601 gcc_checking_assert (data.expanding.is_empty ());
8603 FINI_ELCD (data, loc);
8605 return loc;
8608 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
8609 additional parameters: WHERE specifies whether the note shall be emitted
8610 before or after instruction INSN. */
8613 emit_note_insn_var_location (variable_def **varp, emit_note_data *data)
8615 variable var = *varp;
8616 rtx_insn *insn = data->insn;
8617 enum emit_note_where where = data->where;
8618 variable_table_type *vars = data->vars;
8619 rtx_note *note;
8620 rtx note_vl;
8621 int i, j, n_var_parts;
8622 bool complete;
8623 enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED;
8624 HOST_WIDE_INT last_limit;
8625 tree type_size_unit;
8626 HOST_WIDE_INT offsets[MAX_VAR_PARTS];
8627 rtx loc[MAX_VAR_PARTS];
8628 tree decl;
8629 location_chain lc;
8631 gcc_checking_assert (var->onepart == NOT_ONEPART
8632 || var->onepart == ONEPART_VDECL);
8634 decl = dv_as_decl (var->dv);
8636 complete = true;
8637 last_limit = 0;
8638 n_var_parts = 0;
8639 if (!var->onepart)
8640 for (i = 0; i < var->n_var_parts; i++)
8641 if (var->var_part[i].cur_loc == NULL && var->var_part[i].loc_chain)
8642 var->var_part[i].cur_loc = var->var_part[i].loc_chain->loc;
8643 for (i = 0; i < var->n_var_parts; i++)
8645 machine_mode mode, wider_mode;
8646 rtx loc2;
8647 HOST_WIDE_INT offset;
8649 if (i == 0 && var->onepart)
8651 gcc_checking_assert (var->n_var_parts == 1);
8652 offset = 0;
8653 initialized = VAR_INIT_STATUS_INITIALIZED;
8654 loc2 = vt_expand_1pvar (var, vars);
8656 else
8658 if (last_limit < VAR_PART_OFFSET (var, i))
8660 complete = false;
8661 break;
8663 else if (last_limit > VAR_PART_OFFSET (var, i))
8664 continue;
8665 offset = VAR_PART_OFFSET (var, i);
8666 loc2 = var->var_part[i].cur_loc;
8667 if (loc2 && GET_CODE (loc2) == MEM
8668 && GET_CODE (XEXP (loc2, 0)) == VALUE)
8670 rtx depval = XEXP (loc2, 0);
8672 loc2 = vt_expand_loc (loc2, vars);
8674 if (loc2)
8675 loc_exp_insert_dep (var, depval, vars);
8677 if (!loc2)
8679 complete = false;
8680 continue;
8682 gcc_checking_assert (GET_CODE (loc2) != VALUE);
8683 for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
8684 if (var->var_part[i].cur_loc == lc->loc)
8686 initialized = lc->init;
8687 break;
8689 gcc_assert (lc);
8692 offsets[n_var_parts] = offset;
8693 if (!loc2)
8695 complete = false;
8696 continue;
8698 loc[n_var_parts] = loc2;
8699 mode = GET_MODE (var->var_part[i].cur_loc);
8700 if (mode == VOIDmode && var->onepart)
8701 mode = DECL_MODE (decl);
8702 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
8704 /* Attempt to merge adjacent registers or memory. */
8705 wider_mode = GET_MODE_WIDER_MODE (mode);
8706 for (j = i + 1; j < var->n_var_parts; j++)
8707 if (last_limit <= VAR_PART_OFFSET (var, j))
8708 break;
8709 if (j < var->n_var_parts
8710 && wider_mode != VOIDmode
8711 && var->var_part[j].cur_loc
8712 && mode == GET_MODE (var->var_part[j].cur_loc)
8713 && (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts]))
8714 && last_limit == (var->onepart ? 0 : VAR_PART_OFFSET (var, j))
8715 && (loc2 = vt_expand_loc (var->var_part[j].cur_loc, vars))
8716 && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2))
8718 rtx new_loc = NULL;
8720 if (REG_P (loc[n_var_parts])
8721 && hard_regno_nregs[REGNO (loc[n_var_parts])][mode] * 2
8722 == hard_regno_nregs[REGNO (loc[n_var_parts])][wider_mode]
8723 && end_hard_regno (mode, REGNO (loc[n_var_parts]))
8724 == REGNO (loc2))
8726 if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN)
8727 new_loc = simplify_subreg (wider_mode, loc[n_var_parts],
8728 mode, 0);
8729 else if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
8730 new_loc = simplify_subreg (wider_mode, loc2, mode, 0);
8731 if (new_loc)
8733 if (!REG_P (new_loc)
8734 || REGNO (new_loc) != REGNO (loc[n_var_parts]))
8735 new_loc = NULL;
8736 else
8737 REG_ATTRS (new_loc) = REG_ATTRS (loc[n_var_parts]);
8740 else if (MEM_P (loc[n_var_parts])
8741 && GET_CODE (XEXP (loc2, 0)) == PLUS
8742 && REG_P (XEXP (XEXP (loc2, 0), 0))
8743 && CONST_INT_P (XEXP (XEXP (loc2, 0), 1)))
8745 if ((REG_P (XEXP (loc[n_var_parts], 0))
8746 && rtx_equal_p (XEXP (loc[n_var_parts], 0),
8747 XEXP (XEXP (loc2, 0), 0))
8748 && INTVAL (XEXP (XEXP (loc2, 0), 1))
8749 == GET_MODE_SIZE (mode))
8750 || (GET_CODE (XEXP (loc[n_var_parts], 0)) == PLUS
8751 && CONST_INT_P (XEXP (XEXP (loc[n_var_parts], 0), 1))
8752 && rtx_equal_p (XEXP (XEXP (loc[n_var_parts], 0), 0),
8753 XEXP (XEXP (loc2, 0), 0))
8754 && INTVAL (XEXP (XEXP (loc[n_var_parts], 0), 1))
8755 + GET_MODE_SIZE (mode)
8756 == INTVAL (XEXP (XEXP (loc2, 0), 1))))
8757 new_loc = adjust_address_nv (loc[n_var_parts],
8758 wider_mode, 0);
8761 if (new_loc)
8763 loc[n_var_parts] = new_loc;
8764 mode = wider_mode;
8765 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
8766 i = j;
8769 ++n_var_parts;
8771 type_size_unit = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8772 if ((unsigned HOST_WIDE_INT) last_limit < TREE_INT_CST_LOW (type_size_unit))
8773 complete = false;
8775 if (! flag_var_tracking_uninit)
8776 initialized = VAR_INIT_STATUS_INITIALIZED;
8778 note_vl = NULL_RTX;
8779 if (!complete)
8780 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, NULL_RTX, initialized);
8781 else if (n_var_parts == 1)
8783 rtx expr_list;
8785 if (offsets[0] || GET_CODE (loc[0]) == PARALLEL)
8786 expr_list = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
8787 else
8788 expr_list = loc[0];
8790 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, expr_list, initialized);
8792 else if (n_var_parts)
8794 rtx parallel;
8796 for (i = 0; i < n_var_parts; i++)
8797 loc[i]
8798 = gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i]));
8800 parallel = gen_rtx_PARALLEL (VOIDmode,
8801 gen_rtvec_v (n_var_parts, loc));
8802 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl,
8803 parallel, initialized);
8806 if (where != EMIT_NOTE_BEFORE_INSN)
8808 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8809 if (where == EMIT_NOTE_AFTER_CALL_INSN)
8810 NOTE_DURING_CALL_P (note) = true;
8812 else
8814 /* Make sure that the call related notes come first. */
8815 while (NEXT_INSN (insn)
8816 && NOTE_P (insn)
8817 && ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8818 && NOTE_DURING_CALL_P (insn))
8819 || NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
8820 insn = NEXT_INSN (insn);
8821 if (NOTE_P (insn)
8822 && ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8823 && NOTE_DURING_CALL_P (insn))
8824 || NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
8825 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8826 else
8827 note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
8829 NOTE_VAR_LOCATION (note) = note_vl;
8831 set_dv_changed (var->dv, false);
8832 gcc_assert (var->in_changed_variables);
8833 var->in_changed_variables = false;
8834 changed_variables->clear_slot (varp);
8836 /* Continue traversing the hash table. */
8837 return 1;
8840 /* While traversing changed_variables, push onto DATA (a stack of RTX
8841 values) entries that aren't user variables. */
8844 var_track_values_to_stack (variable_def **slot,
8845 vec<rtx, va_heap> *changed_values_stack)
8847 variable var = *slot;
8849 if (var->onepart == ONEPART_VALUE)
8850 changed_values_stack->safe_push (dv_as_value (var->dv));
8851 else if (var->onepart == ONEPART_DEXPR)
8852 changed_values_stack->safe_push (DECL_RTL_KNOWN_SET (dv_as_decl (var->dv)));
8854 return 1;
8857 /* Remove from changed_variables the entry whose DV corresponds to
8858 value or debug_expr VAL. */
8859 static void
8860 remove_value_from_changed_variables (rtx val)
8862 decl_or_value dv = dv_from_rtx (val);
8863 variable_def **slot;
8864 variable var;
8866 slot = changed_variables->find_slot_with_hash (dv, dv_htab_hash (dv),
8867 NO_INSERT);
8868 var = *slot;
8869 var->in_changed_variables = false;
8870 changed_variables->clear_slot (slot);
8873 /* If VAL (a value or debug_expr) has backlinks to variables actively
8874 dependent on it in HTAB or in CHANGED_VARIABLES, mark them as
8875 changed, adding to CHANGED_VALUES_STACK any dependencies that may
8876 have dependencies of their own to notify. */
8878 static void
8879 notify_dependents_of_changed_value (rtx val, variable_table_type *htab,
8880 vec<rtx, va_heap> *changed_values_stack)
8882 variable_def **slot;
8883 variable var;
8884 loc_exp_dep *led;
8885 decl_or_value dv = dv_from_rtx (val);
8887 slot = changed_variables->find_slot_with_hash (dv, dv_htab_hash (dv),
8888 NO_INSERT);
8889 if (!slot)
8890 slot = htab->find_slot_with_hash (dv, dv_htab_hash (dv), NO_INSERT);
8891 if (!slot)
8892 slot = dropped_values->find_slot_with_hash (dv, dv_htab_hash (dv),
8893 NO_INSERT);
8894 var = *slot;
8896 while ((led = VAR_LOC_DEP_LST (var)))
8898 decl_or_value ldv = led->dv;
8899 variable ivar;
8901 /* Deactivate and remove the backlink, as it was “used up”. It
8902 makes no sense to attempt to notify the same entity again:
8903 either it will be recomputed and re-register an active
8904 dependency, or it will still have the changed mark. */
8905 if (led->next)
8906 led->next->pprev = led->pprev;
8907 if (led->pprev)
8908 *led->pprev = led->next;
8909 led->next = NULL;
8910 led->pprev = NULL;
8912 if (dv_changed_p (ldv))
8913 continue;
8915 switch (dv_onepart_p (ldv))
8917 case ONEPART_VALUE:
8918 case ONEPART_DEXPR:
8919 set_dv_changed (ldv, true);
8920 changed_values_stack->safe_push (dv_as_rtx (ldv));
8921 break;
8923 case ONEPART_VDECL:
8924 ivar = htab->find_with_hash (ldv, dv_htab_hash (ldv));
8925 gcc_checking_assert (!VAR_LOC_DEP_LST (ivar));
8926 variable_was_changed (ivar, NULL);
8927 break;
8929 case NOT_ONEPART:
8930 delete led;
8931 ivar = htab->find_with_hash (ldv, dv_htab_hash (ldv));
8932 if (ivar)
8934 int i = ivar->n_var_parts;
8935 while (i--)
8937 rtx loc = ivar->var_part[i].cur_loc;
8939 if (loc && GET_CODE (loc) == MEM
8940 && XEXP (loc, 0) == val)
8942 variable_was_changed (ivar, NULL);
8943 break;
8947 break;
8949 default:
8950 gcc_unreachable ();
8955 /* Take out of changed_variables any entries that don't refer to use
8956 variables. Back-propagate change notifications from values and
8957 debug_exprs to their active dependencies in HTAB or in
8958 CHANGED_VARIABLES. */
8960 static void
8961 process_changed_values (variable_table_type *htab)
8963 int i, n;
8964 rtx val;
8965 auto_vec<rtx, 20> changed_values_stack;
8967 /* Move values from changed_variables to changed_values_stack. */
8968 changed_variables
8969 ->traverse <vec<rtx, va_heap>*, var_track_values_to_stack>
8970 (&changed_values_stack);
8972 /* Back-propagate change notifications in values while popping
8973 them from the stack. */
8974 for (n = i = changed_values_stack.length ();
8975 i > 0; i = changed_values_stack.length ())
8977 val = changed_values_stack.pop ();
8978 notify_dependents_of_changed_value (val, htab, &changed_values_stack);
8980 /* This condition will hold when visiting each of the entries
8981 originally in changed_variables. We can't remove them
8982 earlier because this could drop the backlinks before we got a
8983 chance to use them. */
8984 if (i == n)
8986 remove_value_from_changed_variables (val);
8987 n--;
8992 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
8993 CHANGED_VARIABLES and delete this chain. WHERE specifies whether
8994 the notes shall be emitted before of after instruction INSN. */
8996 static void
8997 emit_notes_for_changes (rtx_insn *insn, enum emit_note_where where,
8998 shared_hash vars)
9000 emit_note_data data;
9001 variable_table_type *htab = shared_hash_htab (vars);
9003 if (!changed_variables->elements ())
9004 return;
9006 if (MAY_HAVE_DEBUG_INSNS)
9007 process_changed_values (htab);
9009 data.insn = insn;
9010 data.where = where;
9011 data.vars = htab;
9013 changed_variables
9014 ->traverse <emit_note_data*, emit_note_insn_var_location> (&data);
9017 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
9018 same variable in hash table DATA or is not there at all. */
9021 emit_notes_for_differences_1 (variable_def **slot, variable_table_type *new_vars)
9023 variable old_var, new_var;
9025 old_var = *slot;
9026 new_var = new_vars->find_with_hash (old_var->dv, dv_htab_hash (old_var->dv));
9028 if (!new_var)
9030 /* Variable has disappeared. */
9031 variable empty_var = NULL;
9033 if (old_var->onepart == ONEPART_VALUE
9034 || old_var->onepart == ONEPART_DEXPR)
9036 empty_var = variable_from_dropped (old_var->dv, NO_INSERT);
9037 if (empty_var)
9039 gcc_checking_assert (!empty_var->in_changed_variables);
9040 if (!VAR_LOC_1PAUX (old_var))
9042 VAR_LOC_1PAUX (old_var) = VAR_LOC_1PAUX (empty_var);
9043 VAR_LOC_1PAUX (empty_var) = NULL;
9045 else
9046 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
9050 if (!empty_var)
9052 empty_var = onepart_pool (old_var->onepart).allocate ();
9053 empty_var->dv = old_var->dv;
9054 empty_var->refcount = 0;
9055 empty_var->n_var_parts = 0;
9056 empty_var->onepart = old_var->onepart;
9057 empty_var->in_changed_variables = false;
9060 if (empty_var->onepart)
9062 /* Propagate the auxiliary data to (ultimately)
9063 changed_variables. */
9064 empty_var->var_part[0].loc_chain = NULL;
9065 empty_var->var_part[0].cur_loc = NULL;
9066 VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (old_var);
9067 VAR_LOC_1PAUX (old_var) = NULL;
9069 variable_was_changed (empty_var, NULL);
9070 /* Continue traversing the hash table. */
9071 return 1;
9073 /* Update cur_loc and one-part auxiliary data, before new_var goes
9074 through variable_was_changed. */
9075 if (old_var != new_var && new_var->onepart)
9077 gcc_checking_assert (VAR_LOC_1PAUX (new_var) == NULL);
9078 VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (old_var);
9079 VAR_LOC_1PAUX (old_var) = NULL;
9080 new_var->var_part[0].cur_loc = old_var->var_part[0].cur_loc;
9082 if (variable_different_p (old_var, new_var))
9083 variable_was_changed (new_var, NULL);
9085 /* Continue traversing the hash table. */
9086 return 1;
9089 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
9090 table DATA. */
9093 emit_notes_for_differences_2 (variable_def **slot, variable_table_type *old_vars)
9095 variable old_var, new_var;
9097 new_var = *slot;
9098 old_var = old_vars->find_with_hash (new_var->dv, dv_htab_hash (new_var->dv));
9099 if (!old_var)
9101 int i;
9102 for (i = 0; i < new_var->n_var_parts; i++)
9103 new_var->var_part[i].cur_loc = NULL;
9104 variable_was_changed (new_var, NULL);
9107 /* Continue traversing the hash table. */
9108 return 1;
9111 /* Emit notes before INSN for differences between dataflow sets OLD_SET and
9112 NEW_SET. */
9114 static void
9115 emit_notes_for_differences (rtx_insn *insn, dataflow_set *old_set,
9116 dataflow_set *new_set)
9118 shared_hash_htab (old_set->vars)
9119 ->traverse <variable_table_type *, emit_notes_for_differences_1>
9120 (shared_hash_htab (new_set->vars));
9121 shared_hash_htab (new_set->vars)
9122 ->traverse <variable_table_type *, emit_notes_for_differences_2>
9123 (shared_hash_htab (old_set->vars));
9124 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars);
9127 /* Return the next insn after INSN that is not a NOTE_INSN_VAR_LOCATION. */
9129 static rtx_insn *
9130 next_non_note_insn_var_location (rtx_insn *insn)
9132 while (insn)
9134 insn = NEXT_INSN (insn);
9135 if (insn == 0
9136 || !NOTE_P (insn)
9137 || NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION)
9138 break;
9141 return insn;
9144 /* Emit the notes for changes of location parts in the basic block BB. */
9146 static void
9147 emit_notes_in_bb (basic_block bb, dataflow_set *set)
9149 unsigned int i;
9150 micro_operation *mo;
9152 dataflow_set_clear (set);
9153 dataflow_set_copy (set, &VTI (bb)->in);
9155 FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo)
9157 rtx_insn *insn = mo->insn;
9158 rtx_insn *next_insn = next_non_note_insn_var_location (insn);
9160 switch (mo->type)
9162 case MO_CALL:
9163 dataflow_set_clear_at_call (set);
9164 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars);
9166 rtx arguments = mo->u.loc, *p = &arguments;
9167 rtx_note *note;
9168 while (*p)
9170 XEXP (XEXP (*p, 0), 1)
9171 = vt_expand_loc (XEXP (XEXP (*p, 0), 1),
9172 shared_hash_htab (set->vars));
9173 /* If expansion is successful, keep it in the list. */
9174 if (XEXP (XEXP (*p, 0), 1))
9175 p = &XEXP (*p, 1);
9176 /* Otherwise, if the following item is data_value for it,
9177 drop it too too. */
9178 else if (XEXP (*p, 1)
9179 && REG_P (XEXP (XEXP (*p, 0), 0))
9180 && MEM_P (XEXP (XEXP (XEXP (*p, 1), 0), 0))
9181 && REG_P (XEXP (XEXP (XEXP (XEXP (*p, 1), 0), 0),
9183 && REGNO (XEXP (XEXP (*p, 0), 0))
9184 == REGNO (XEXP (XEXP (XEXP (XEXP (*p, 1), 0),
9185 0), 0)))
9186 *p = XEXP (XEXP (*p, 1), 1);
9187 /* Just drop this item. */
9188 else
9189 *p = XEXP (*p, 1);
9191 note = emit_note_after (NOTE_INSN_CALL_ARG_LOCATION, insn);
9192 NOTE_VAR_LOCATION (note) = arguments;
9194 break;
9196 case MO_USE:
9198 rtx loc = mo->u.loc;
9200 if (REG_P (loc))
9201 var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
9202 else
9203 var_mem_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
9205 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
9207 break;
9209 case MO_VAL_LOC:
9211 rtx loc = mo->u.loc;
9212 rtx val, vloc;
9213 tree var;
9215 if (GET_CODE (loc) == CONCAT)
9217 val = XEXP (loc, 0);
9218 vloc = XEXP (loc, 1);
9220 else
9222 val = NULL_RTX;
9223 vloc = loc;
9226 var = PAT_VAR_LOCATION_DECL (vloc);
9228 clobber_variable_part (set, NULL_RTX,
9229 dv_from_decl (var), 0, NULL_RTX);
9230 if (val)
9232 if (VAL_NEEDS_RESOLUTION (loc))
9233 val_resolve (set, val, PAT_VAR_LOCATION_LOC (vloc), insn);
9234 set_variable_part (set, val, dv_from_decl (var), 0,
9235 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
9236 INSERT);
9238 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
9239 set_variable_part (set, PAT_VAR_LOCATION_LOC (vloc),
9240 dv_from_decl (var), 0,
9241 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
9242 INSERT);
9244 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
9246 break;
9248 case MO_VAL_USE:
9250 rtx loc = mo->u.loc;
9251 rtx val, vloc, uloc;
9253 vloc = uloc = XEXP (loc, 1);
9254 val = XEXP (loc, 0);
9256 if (GET_CODE (val) == CONCAT)
9258 uloc = XEXP (val, 1);
9259 val = XEXP (val, 0);
9262 if (VAL_NEEDS_RESOLUTION (loc))
9263 val_resolve (set, val, vloc, insn);
9264 else
9265 val_store (set, val, uloc, insn, false);
9267 if (VAL_HOLDS_TRACK_EXPR (loc))
9269 if (GET_CODE (uloc) == REG)
9270 var_reg_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
9271 NULL);
9272 else if (GET_CODE (uloc) == MEM)
9273 var_mem_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
9274 NULL);
9277 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
9279 break;
9281 case MO_VAL_SET:
9283 rtx loc = mo->u.loc;
9284 rtx val, vloc, uloc;
9285 rtx dstv, srcv;
9287 vloc = loc;
9288 uloc = XEXP (vloc, 1);
9289 val = XEXP (vloc, 0);
9290 vloc = uloc;
9292 if (GET_CODE (uloc) == SET)
9294 dstv = SET_DEST (uloc);
9295 srcv = SET_SRC (uloc);
9297 else
9299 dstv = uloc;
9300 srcv = NULL;
9303 if (GET_CODE (val) == CONCAT)
9305 dstv = vloc = XEXP (val, 1);
9306 val = XEXP (val, 0);
9309 if (GET_CODE (vloc) == SET)
9311 srcv = SET_SRC (vloc);
9313 gcc_assert (val != srcv);
9314 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
9316 dstv = vloc = SET_DEST (vloc);
9318 if (VAL_NEEDS_RESOLUTION (loc))
9319 val_resolve (set, val, srcv, insn);
9321 else if (VAL_NEEDS_RESOLUTION (loc))
9323 gcc_assert (GET_CODE (uloc) == SET
9324 && GET_CODE (SET_SRC (uloc)) == REG);
9325 val_resolve (set, val, SET_SRC (uloc), insn);
9328 if (VAL_HOLDS_TRACK_EXPR (loc))
9330 if (VAL_EXPR_IS_CLOBBERED (loc))
9332 if (REG_P (uloc))
9333 var_reg_delete (set, uloc, true);
9334 else if (MEM_P (uloc))
9336 gcc_assert (MEM_P (dstv));
9337 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
9338 var_mem_delete (set, dstv, true);
9341 else
9343 bool copied_p = VAL_EXPR_IS_COPIED (loc);
9344 rtx src = NULL, dst = uloc;
9345 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
9347 if (GET_CODE (uloc) == SET)
9349 src = SET_SRC (uloc);
9350 dst = SET_DEST (uloc);
9353 if (copied_p)
9355 status = find_src_status (set, src);
9357 src = find_src_set_src (set, src);
9360 if (REG_P (dst))
9361 var_reg_delete_and_set (set, dst, !copied_p,
9362 status, srcv);
9363 else if (MEM_P (dst))
9365 gcc_assert (MEM_P (dstv));
9366 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
9367 var_mem_delete_and_set (set, dstv, !copied_p,
9368 status, srcv);
9372 else if (REG_P (uloc))
9373 var_regno_delete (set, REGNO (uloc));
9374 else if (MEM_P (uloc))
9376 gcc_checking_assert (GET_CODE (vloc) == MEM);
9377 gcc_checking_assert (vloc == dstv);
9378 if (vloc != dstv)
9379 clobber_overlapping_mems (set, vloc);
9382 val_store (set, val, dstv, insn, true);
9384 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9385 set->vars);
9387 break;
9389 case MO_SET:
9391 rtx loc = mo->u.loc;
9392 rtx set_src = NULL;
9394 if (GET_CODE (loc) == SET)
9396 set_src = SET_SRC (loc);
9397 loc = SET_DEST (loc);
9400 if (REG_P (loc))
9401 var_reg_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
9402 set_src);
9403 else
9404 var_mem_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
9405 set_src);
9407 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9408 set->vars);
9410 break;
9412 case MO_COPY:
9414 rtx loc = mo->u.loc;
9415 enum var_init_status src_status;
9416 rtx set_src = NULL;
9418 if (GET_CODE (loc) == SET)
9420 set_src = SET_SRC (loc);
9421 loc = SET_DEST (loc);
9424 src_status = find_src_status (set, set_src);
9425 set_src = find_src_set_src (set, set_src);
9427 if (REG_P (loc))
9428 var_reg_delete_and_set (set, loc, false, src_status, set_src);
9429 else
9430 var_mem_delete_and_set (set, loc, false, src_status, set_src);
9432 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9433 set->vars);
9435 break;
9437 case MO_USE_NO_VAR:
9439 rtx loc = mo->u.loc;
9441 if (REG_P (loc))
9442 var_reg_delete (set, loc, false);
9443 else
9444 var_mem_delete (set, loc, false);
9446 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
9448 break;
9450 case MO_CLOBBER:
9452 rtx loc = mo->u.loc;
9454 if (REG_P (loc))
9455 var_reg_delete (set, loc, true);
9456 else
9457 var_mem_delete (set, loc, true);
9459 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9460 set->vars);
9462 break;
9464 case MO_ADJUST:
9465 set->stack_adjust += mo->u.adjust;
9466 break;
9471 /* Emit notes for the whole function. */
9473 static void
9474 vt_emit_notes (void)
9476 basic_block bb;
9477 dataflow_set cur;
9479 gcc_assert (!changed_variables->elements ());
9481 /* Free memory occupied by the out hash tables, as they aren't used
9482 anymore. */
9483 FOR_EACH_BB_FN (bb, cfun)
9484 dataflow_set_clear (&VTI (bb)->out);
9486 /* Enable emitting notes by functions (mainly by set_variable_part and
9487 delete_variable_part). */
9488 emit_notes = true;
9490 if (MAY_HAVE_DEBUG_INSNS)
9492 dropped_values = new variable_table_type (cselib_get_next_uid () * 2);
9495 dataflow_set_init (&cur);
9497 FOR_EACH_BB_FN (bb, cfun)
9499 /* Emit the notes for changes of variable locations between two
9500 subsequent basic blocks. */
9501 emit_notes_for_differences (BB_HEAD (bb), &cur, &VTI (bb)->in);
9503 if (MAY_HAVE_DEBUG_INSNS)
9504 local_get_addr_cache = new hash_map<rtx, rtx>;
9506 /* Emit the notes for the changes in the basic block itself. */
9507 emit_notes_in_bb (bb, &cur);
9509 if (MAY_HAVE_DEBUG_INSNS)
9510 delete local_get_addr_cache;
9511 local_get_addr_cache = NULL;
9513 /* Free memory occupied by the in hash table, we won't need it
9514 again. */
9515 dataflow_set_clear (&VTI (bb)->in);
9517 #ifdef ENABLE_CHECKING
9518 shared_hash_htab (cur.vars)
9519 ->traverse <variable_table_type *, emit_notes_for_differences_1>
9520 (shared_hash_htab (empty_shared_hash));
9521 #endif
9522 dataflow_set_destroy (&cur);
9524 if (MAY_HAVE_DEBUG_INSNS)
9525 delete dropped_values;
9526 dropped_values = NULL;
9528 emit_notes = false;
9531 /* If there is a declaration and offset associated with register/memory RTL
9532 assign declaration to *DECLP and offset to *OFFSETP, and return true. */
9534 static bool
9535 vt_get_decl_and_offset (rtx rtl, tree *declp, HOST_WIDE_INT *offsetp)
9537 if (REG_P (rtl))
9539 if (REG_ATTRS (rtl))
9541 *declp = REG_EXPR (rtl);
9542 *offsetp = REG_OFFSET (rtl);
9543 return true;
9546 else if (GET_CODE (rtl) == PARALLEL)
9548 tree decl = NULL_TREE;
9549 HOST_WIDE_INT offset = MAX_VAR_PARTS;
9550 int len = XVECLEN (rtl, 0), i;
9552 for (i = 0; i < len; i++)
9554 rtx reg = XEXP (XVECEXP (rtl, 0, i), 0);
9555 if (!REG_P (reg) || !REG_ATTRS (reg))
9556 break;
9557 if (!decl)
9558 decl = REG_EXPR (reg);
9559 if (REG_EXPR (reg) != decl)
9560 break;
9561 if (REG_OFFSET (reg) < offset)
9562 offset = REG_OFFSET (reg);
9565 if (i == len)
9567 *declp = decl;
9568 *offsetp = offset;
9569 return true;
9572 else if (MEM_P (rtl))
9574 if (MEM_ATTRS (rtl))
9576 *declp = MEM_EXPR (rtl);
9577 *offsetp = INT_MEM_OFFSET (rtl);
9578 return true;
9581 return false;
9584 /* Record the value for the ENTRY_VALUE of RTL as a global equivalence
9585 of VAL. */
9587 static void
9588 record_entry_value (cselib_val *val, rtx rtl)
9590 rtx ev = gen_rtx_ENTRY_VALUE (GET_MODE (rtl));
9592 ENTRY_VALUE_EXP (ev) = rtl;
9594 cselib_add_permanent_equiv (val, ev, get_insns ());
9597 /* Insert function parameter PARM in IN and OUT sets of ENTRY_BLOCK. */
9599 static void
9600 vt_add_function_parameter (tree parm)
9602 rtx decl_rtl = DECL_RTL_IF_SET (parm);
9603 rtx incoming = DECL_INCOMING_RTL (parm);
9604 tree decl;
9605 machine_mode mode;
9606 HOST_WIDE_INT offset;
9607 dataflow_set *out;
9608 decl_or_value dv;
9610 if (TREE_CODE (parm) != PARM_DECL)
9611 return;
9613 if (!decl_rtl || !incoming)
9614 return;
9616 if (GET_MODE (decl_rtl) == BLKmode || GET_MODE (incoming) == BLKmode)
9617 return;
9619 /* If there is a DRAP register or a pseudo in internal_arg_pointer,
9620 rewrite the incoming location of parameters passed on the stack
9621 into MEMs based on the argument pointer, so that incoming doesn't
9622 depend on a pseudo. */
9623 if (MEM_P (incoming)
9624 && (XEXP (incoming, 0) == crtl->args.internal_arg_pointer
9625 || (GET_CODE (XEXP (incoming, 0)) == PLUS
9626 && XEXP (XEXP (incoming, 0), 0)
9627 == crtl->args.internal_arg_pointer
9628 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
9630 HOST_WIDE_INT off = -FIRST_PARM_OFFSET (current_function_decl);
9631 if (GET_CODE (XEXP (incoming, 0)) == PLUS)
9632 off += INTVAL (XEXP (XEXP (incoming, 0), 1));
9633 incoming
9634 = replace_equiv_address_nv (incoming,
9635 plus_constant (Pmode,
9636 arg_pointer_rtx, off));
9639 #ifdef HAVE_window_save
9640 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
9641 If the target machine has an explicit window save instruction, the
9642 actual entry value is the corresponding OUTGOING_REGNO instead. */
9643 if (HAVE_window_save && !crtl->uses_only_leaf_regs)
9645 if (REG_P (incoming)
9646 && HARD_REGISTER_P (incoming)
9647 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
9649 parm_reg_t p;
9650 p.incoming = incoming;
9651 incoming
9652 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
9653 OUTGOING_REGNO (REGNO (incoming)), 0);
9654 p.outgoing = incoming;
9655 vec_safe_push (windowed_parm_regs, p);
9657 else if (GET_CODE (incoming) == PARALLEL)
9659 rtx outgoing
9660 = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (XVECLEN (incoming, 0)));
9661 int i;
9663 for (i = 0; i < XVECLEN (incoming, 0); i++)
9665 rtx reg = XEXP (XVECEXP (incoming, 0, i), 0);
9666 parm_reg_t p;
9667 p.incoming = reg;
9668 reg = gen_rtx_REG_offset (reg, GET_MODE (reg),
9669 OUTGOING_REGNO (REGNO (reg)), 0);
9670 p.outgoing = reg;
9671 XVECEXP (outgoing, 0, i)
9672 = gen_rtx_EXPR_LIST (VOIDmode, reg,
9673 XEXP (XVECEXP (incoming, 0, i), 1));
9674 vec_safe_push (windowed_parm_regs, p);
9677 incoming = outgoing;
9679 else if (MEM_P (incoming)
9680 && REG_P (XEXP (incoming, 0))
9681 && HARD_REGISTER_P (XEXP (incoming, 0)))
9683 rtx reg = XEXP (incoming, 0);
9684 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
9686 parm_reg_t p;
9687 p.incoming = reg;
9688 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
9689 p.outgoing = reg;
9690 vec_safe_push (windowed_parm_regs, p);
9691 incoming = replace_equiv_address_nv (incoming, reg);
9695 #endif
9697 if (!vt_get_decl_and_offset (incoming, &decl, &offset))
9699 if (MEM_P (incoming))
9701 /* This means argument is passed by invisible reference. */
9702 offset = 0;
9703 decl = parm;
9705 else
9707 if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset))
9708 return;
9709 offset += byte_lowpart_offset (GET_MODE (incoming),
9710 GET_MODE (decl_rtl));
9714 if (!decl)
9715 return;
9717 if (parm != decl)
9719 /* If that DECL_RTL wasn't a pseudo that got spilled to
9720 memory, bail out. Otherwise, the spill slot sharing code
9721 will force the memory to reference spill_slot_decl (%sfp),
9722 so we don't match above. That's ok, the pseudo must have
9723 referenced the entire parameter, so just reset OFFSET. */
9724 if (decl != get_spill_slot_decl (false))
9725 return;
9726 offset = 0;
9729 if (!track_loc_p (incoming, parm, offset, false, &mode, &offset))
9730 return;
9732 out = &VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out;
9734 dv = dv_from_decl (parm);
9736 if (target_for_debug_bind (parm)
9737 /* We can't deal with these right now, because this kind of
9738 variable is single-part. ??? We could handle parallels
9739 that describe multiple locations for the same single
9740 value, but ATM we don't. */
9741 && GET_CODE (incoming) != PARALLEL)
9743 cselib_val *val;
9744 rtx lowpart;
9746 /* ??? We shouldn't ever hit this, but it may happen because
9747 arguments passed by invisible reference aren't dealt with
9748 above: incoming-rtl will have Pmode rather than the
9749 expected mode for the type. */
9750 if (offset)
9751 return;
9753 lowpart = var_lowpart (mode, incoming);
9754 if (!lowpart)
9755 return;
9757 val = cselib_lookup_from_insn (lowpart, mode, true,
9758 VOIDmode, get_insns ());
9760 /* ??? Float-typed values in memory are not handled by
9761 cselib. */
9762 if (val)
9764 preserve_value (val);
9765 set_variable_part (out, val->val_rtx, dv, offset,
9766 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9767 dv = dv_from_value (val->val_rtx);
9770 if (MEM_P (incoming))
9772 val = cselib_lookup_from_insn (XEXP (incoming, 0), mode, true,
9773 VOIDmode, get_insns ());
9774 if (val)
9776 preserve_value (val);
9777 incoming = replace_equiv_address_nv (incoming, val->val_rtx);
9782 if (REG_P (incoming))
9784 incoming = var_lowpart (mode, incoming);
9785 gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER);
9786 attrs_list_insert (&out->regs[REGNO (incoming)], dv, offset,
9787 incoming);
9788 set_variable_part (out, incoming, dv, offset,
9789 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9790 if (dv_is_value_p (dv))
9792 record_entry_value (CSELIB_VAL_PTR (dv_as_value (dv)), incoming);
9793 if (TREE_CODE (TREE_TYPE (parm)) == REFERENCE_TYPE
9794 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (parm))))
9796 machine_mode indmode
9797 = TYPE_MODE (TREE_TYPE (TREE_TYPE (parm)));
9798 rtx mem = gen_rtx_MEM (indmode, incoming);
9799 cselib_val *val = cselib_lookup_from_insn (mem, indmode, true,
9800 VOIDmode,
9801 get_insns ());
9802 if (val)
9804 preserve_value (val);
9805 record_entry_value (val, mem);
9806 set_variable_part (out, mem, dv_from_value (val->val_rtx), 0,
9807 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9812 else if (GET_CODE (incoming) == PARALLEL && !dv_onepart_p (dv))
9814 int i;
9816 for (i = 0; i < XVECLEN (incoming, 0); i++)
9818 rtx reg = XEXP (XVECEXP (incoming, 0, i), 0);
9819 offset = REG_OFFSET (reg);
9820 gcc_assert (REGNO (reg) < FIRST_PSEUDO_REGISTER);
9821 attrs_list_insert (&out->regs[REGNO (reg)], dv, offset, reg);
9822 set_variable_part (out, reg, dv, offset,
9823 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9826 else if (MEM_P (incoming))
9828 incoming = var_lowpart (mode, incoming);
9829 set_variable_part (out, incoming, dv, offset,
9830 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9834 /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
9836 static void
9837 vt_add_function_parameters (void)
9839 tree parm;
9841 for (parm = DECL_ARGUMENTS (current_function_decl);
9842 parm; parm = DECL_CHAIN (parm))
9843 if (!POINTER_BOUNDS_P (parm))
9844 vt_add_function_parameter (parm);
9846 if (DECL_HAS_VALUE_EXPR_P (DECL_RESULT (current_function_decl)))
9848 tree vexpr = DECL_VALUE_EXPR (DECL_RESULT (current_function_decl));
9850 if (TREE_CODE (vexpr) == INDIRECT_REF)
9851 vexpr = TREE_OPERAND (vexpr, 0);
9853 if (TREE_CODE (vexpr) == PARM_DECL
9854 && DECL_ARTIFICIAL (vexpr)
9855 && !DECL_IGNORED_P (vexpr)
9856 && DECL_NAMELESS (vexpr))
9857 vt_add_function_parameter (vexpr);
9861 /* Initialize cfa_base_rtx, create a preserved VALUE for it and
9862 ensure it isn't flushed during cselib_reset_table.
9863 Can be called only if frame_pointer_rtx resp. arg_pointer_rtx
9864 has been eliminated. */
9866 static void
9867 vt_init_cfa_base (void)
9869 cselib_val *val;
9871 #ifdef FRAME_POINTER_CFA_OFFSET
9872 cfa_base_rtx = frame_pointer_rtx;
9873 cfa_base_offset = -FRAME_POINTER_CFA_OFFSET (current_function_decl);
9874 #else
9875 cfa_base_rtx = arg_pointer_rtx;
9876 cfa_base_offset = -ARG_POINTER_CFA_OFFSET (current_function_decl);
9877 #endif
9878 if (cfa_base_rtx == hard_frame_pointer_rtx
9879 || !fixed_regs[REGNO (cfa_base_rtx)])
9881 cfa_base_rtx = NULL_RTX;
9882 return;
9884 if (!MAY_HAVE_DEBUG_INSNS)
9885 return;
9887 /* Tell alias analysis that cfa_base_rtx should share
9888 find_base_term value with stack pointer or hard frame pointer. */
9889 if (!frame_pointer_needed)
9890 vt_equate_reg_base_value (cfa_base_rtx, stack_pointer_rtx);
9891 else if (!crtl->stack_realign_tried)
9892 vt_equate_reg_base_value (cfa_base_rtx, hard_frame_pointer_rtx);
9894 val = cselib_lookup_from_insn (cfa_base_rtx, GET_MODE (cfa_base_rtx), 1,
9895 VOIDmode, get_insns ());
9896 preserve_value (val);
9897 cselib_preserve_cfa_base_value (val, REGNO (cfa_base_rtx));
9900 /* Allocate and initialize the data structures for variable tracking
9901 and parse the RTL to get the micro operations. */
9903 static bool
9904 vt_initialize (void)
9906 basic_block bb;
9907 HOST_WIDE_INT fp_cfa_offset = -1;
9909 alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def));
9911 empty_shared_hash = new shared_hash_def;
9912 empty_shared_hash->refcount = 1;
9913 empty_shared_hash->htab = new variable_table_type (1);
9914 changed_variables = new variable_table_type (10);
9916 /* Init the IN and OUT sets. */
9917 FOR_ALL_BB_FN (bb, cfun)
9919 VTI (bb)->visited = false;
9920 VTI (bb)->flooded = false;
9921 dataflow_set_init (&VTI (bb)->in);
9922 dataflow_set_init (&VTI (bb)->out);
9923 VTI (bb)->permp = NULL;
9926 if (MAY_HAVE_DEBUG_INSNS)
9928 cselib_init (CSELIB_RECORD_MEMORY | CSELIB_PRESERVE_CONSTANTS);
9929 scratch_regs = BITMAP_ALLOC (NULL);
9930 preserved_values.create (256);
9931 global_get_addr_cache = new hash_map<rtx, rtx>;
9933 else
9935 scratch_regs = NULL;
9936 global_get_addr_cache = NULL;
9939 if (MAY_HAVE_DEBUG_INSNS)
9941 rtx reg, expr;
9942 int ofst;
9943 cselib_val *val;
9945 #ifdef FRAME_POINTER_CFA_OFFSET
9946 reg = frame_pointer_rtx;
9947 ofst = FRAME_POINTER_CFA_OFFSET (current_function_decl);
9948 #else
9949 reg = arg_pointer_rtx;
9950 ofst = ARG_POINTER_CFA_OFFSET (current_function_decl);
9951 #endif
9953 ofst -= INCOMING_FRAME_SP_OFFSET;
9955 val = cselib_lookup_from_insn (reg, GET_MODE (reg), 1,
9956 VOIDmode, get_insns ());
9957 preserve_value (val);
9958 if (reg != hard_frame_pointer_rtx && fixed_regs[REGNO (reg)])
9959 cselib_preserve_cfa_base_value (val, REGNO (reg));
9960 expr = plus_constant (GET_MODE (stack_pointer_rtx),
9961 stack_pointer_rtx, -ofst);
9962 cselib_add_permanent_equiv (val, expr, get_insns ());
9964 if (ofst)
9966 val = cselib_lookup_from_insn (stack_pointer_rtx,
9967 GET_MODE (stack_pointer_rtx), 1,
9968 VOIDmode, get_insns ());
9969 preserve_value (val);
9970 expr = plus_constant (GET_MODE (reg), reg, ofst);
9971 cselib_add_permanent_equiv (val, expr, get_insns ());
9975 /* In order to factor out the adjustments made to the stack pointer or to
9976 the hard frame pointer and thus be able to use DW_OP_fbreg operations
9977 instead of individual location lists, we're going to rewrite MEMs based
9978 on them into MEMs based on the CFA by de-eliminating stack_pointer_rtx
9979 or hard_frame_pointer_rtx to the virtual CFA pointer frame_pointer_rtx
9980 resp. arg_pointer_rtx. We can do this either when there is no frame
9981 pointer in the function and stack adjustments are consistent for all
9982 basic blocks or when there is a frame pointer and no stack realignment.
9983 But we first have to check that frame_pointer_rtx resp. arg_pointer_rtx
9984 has been eliminated. */
9985 if (!frame_pointer_needed)
9987 rtx reg, elim;
9989 if (!vt_stack_adjustments ())
9990 return false;
9992 #ifdef FRAME_POINTER_CFA_OFFSET
9993 reg = frame_pointer_rtx;
9994 #else
9995 reg = arg_pointer_rtx;
9996 #endif
9997 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
9998 if (elim != reg)
10000 if (GET_CODE (elim) == PLUS)
10001 elim = XEXP (elim, 0);
10002 if (elim == stack_pointer_rtx)
10003 vt_init_cfa_base ();
10006 else if (!crtl->stack_realign_tried)
10008 rtx reg, elim;
10010 #ifdef FRAME_POINTER_CFA_OFFSET
10011 reg = frame_pointer_rtx;
10012 fp_cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
10013 #else
10014 reg = arg_pointer_rtx;
10015 fp_cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
10016 #endif
10017 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
10018 if (elim != reg)
10020 if (GET_CODE (elim) == PLUS)
10022 fp_cfa_offset -= INTVAL (XEXP (elim, 1));
10023 elim = XEXP (elim, 0);
10025 if (elim != hard_frame_pointer_rtx)
10026 fp_cfa_offset = -1;
10028 else
10029 fp_cfa_offset = -1;
10032 /* If the stack is realigned and a DRAP register is used, we're going to
10033 rewrite MEMs based on it representing incoming locations of parameters
10034 passed on the stack into MEMs based on the argument pointer. Although
10035 we aren't going to rewrite other MEMs, we still need to initialize the
10036 virtual CFA pointer in order to ensure that the argument pointer will
10037 be seen as a constant throughout the function.
10039 ??? This doesn't work if FRAME_POINTER_CFA_OFFSET is defined. */
10040 else if (stack_realign_drap)
10042 rtx reg, elim;
10044 #ifdef FRAME_POINTER_CFA_OFFSET
10045 reg = frame_pointer_rtx;
10046 #else
10047 reg = arg_pointer_rtx;
10048 #endif
10049 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
10050 if (elim != reg)
10052 if (GET_CODE (elim) == PLUS)
10053 elim = XEXP (elim, 0);
10054 if (elim == hard_frame_pointer_rtx)
10055 vt_init_cfa_base ();
10059 hard_frame_pointer_adjustment = -1;
10061 vt_add_function_parameters ();
10063 FOR_EACH_BB_FN (bb, cfun)
10065 rtx_insn *insn;
10066 HOST_WIDE_INT pre, post = 0;
10067 basic_block first_bb, last_bb;
10069 if (MAY_HAVE_DEBUG_INSNS)
10071 cselib_record_sets_hook = add_with_sets;
10072 if (dump_file && (dump_flags & TDF_DETAILS))
10073 fprintf (dump_file, "first value: %i\n",
10074 cselib_get_next_uid ());
10077 first_bb = bb;
10078 for (;;)
10080 edge e;
10081 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
10082 || ! single_pred_p (bb->next_bb))
10083 break;
10084 e = find_edge (bb, bb->next_bb);
10085 if (! e || (e->flags & EDGE_FALLTHRU) == 0)
10086 break;
10087 bb = bb->next_bb;
10089 last_bb = bb;
10091 /* Add the micro-operations to the vector. */
10092 FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb)
10094 HOST_WIDE_INT offset = VTI (bb)->out.stack_adjust;
10095 VTI (bb)->out.stack_adjust = VTI (bb)->in.stack_adjust;
10096 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
10097 insn = NEXT_INSN (insn))
10099 if (INSN_P (insn))
10101 if (!frame_pointer_needed)
10103 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
10104 if (pre)
10106 micro_operation mo;
10107 mo.type = MO_ADJUST;
10108 mo.u.adjust = pre;
10109 mo.insn = insn;
10110 if (dump_file && (dump_flags & TDF_DETAILS))
10111 log_op_type (PATTERN (insn), bb, insn,
10112 MO_ADJUST, dump_file);
10113 VTI (bb)->mos.safe_push (mo);
10114 VTI (bb)->out.stack_adjust += pre;
10118 cselib_hook_called = false;
10119 adjust_insn (bb, insn);
10120 if (MAY_HAVE_DEBUG_INSNS)
10122 if (CALL_P (insn))
10123 prepare_call_arguments (bb, insn);
10124 cselib_process_insn (insn);
10125 if (dump_file && (dump_flags & TDF_DETAILS))
10127 print_rtl_single (dump_file, insn);
10128 dump_cselib_table (dump_file);
10131 if (!cselib_hook_called)
10132 add_with_sets (insn, 0, 0);
10133 cancel_changes (0);
10135 if (!frame_pointer_needed && post)
10137 micro_operation mo;
10138 mo.type = MO_ADJUST;
10139 mo.u.adjust = post;
10140 mo.insn = insn;
10141 if (dump_file && (dump_flags & TDF_DETAILS))
10142 log_op_type (PATTERN (insn), bb, insn,
10143 MO_ADJUST, dump_file);
10144 VTI (bb)->mos.safe_push (mo);
10145 VTI (bb)->out.stack_adjust += post;
10148 if (fp_cfa_offset != -1
10149 && hard_frame_pointer_adjustment == -1
10150 && fp_setter_insn (insn))
10152 vt_init_cfa_base ();
10153 hard_frame_pointer_adjustment = fp_cfa_offset;
10154 /* Disassociate sp from fp now. */
10155 if (MAY_HAVE_DEBUG_INSNS)
10157 cselib_val *v;
10158 cselib_invalidate_rtx (stack_pointer_rtx);
10159 v = cselib_lookup (stack_pointer_rtx, Pmode, 1,
10160 VOIDmode);
10161 if (v && !cselib_preserved_value_p (v))
10163 cselib_set_value_sp_based (v);
10164 preserve_value (v);
10170 gcc_assert (offset == VTI (bb)->out.stack_adjust);
10173 bb = last_bb;
10175 if (MAY_HAVE_DEBUG_INSNS)
10177 cselib_preserve_only_values ();
10178 cselib_reset_table (cselib_get_next_uid ());
10179 cselib_record_sets_hook = NULL;
10183 hard_frame_pointer_adjustment = -1;
10184 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->flooded = true;
10185 cfa_base_rtx = NULL_RTX;
10186 return true;
10189 /* This is *not* reset after each function. It gives each
10190 NOTE_INSN_DELETED_DEBUG_LABEL in the entire compilation
10191 a unique label number. */
10193 static int debug_label_num = 1;
10195 /* Get rid of all debug insns from the insn stream. */
10197 static void
10198 delete_debug_insns (void)
10200 basic_block bb;
10201 rtx_insn *insn, *next;
10203 if (!MAY_HAVE_DEBUG_INSNS)
10204 return;
10206 FOR_EACH_BB_FN (bb, cfun)
10208 FOR_BB_INSNS_SAFE (bb, insn, next)
10209 if (DEBUG_INSN_P (insn))
10211 tree decl = INSN_VAR_LOCATION_DECL (insn);
10212 if (TREE_CODE (decl) == LABEL_DECL
10213 && DECL_NAME (decl)
10214 && !DECL_RTL_SET_P (decl))
10216 PUT_CODE (insn, NOTE);
10217 NOTE_KIND (insn) = NOTE_INSN_DELETED_DEBUG_LABEL;
10218 NOTE_DELETED_LABEL_NAME (insn)
10219 = IDENTIFIER_POINTER (DECL_NAME (decl));
10220 SET_DECL_RTL (decl, insn);
10221 CODE_LABEL_NUMBER (insn) = debug_label_num++;
10223 else
10224 delete_insn (insn);
10229 /* Run a fast, BB-local only version of var tracking, to take care of
10230 information that we don't do global analysis on, such that not all
10231 information is lost. If SKIPPED holds, we're skipping the global
10232 pass entirely, so we should try to use information it would have
10233 handled as well.. */
10235 static void
10236 vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED)
10238 /* ??? Just skip it all for now. */
10239 delete_debug_insns ();
10242 /* Free the data structures needed for variable tracking. */
10244 static void
10245 vt_finalize (void)
10247 basic_block bb;
10249 FOR_EACH_BB_FN (bb, cfun)
10251 VTI (bb)->mos.release ();
10254 FOR_ALL_BB_FN (bb, cfun)
10256 dataflow_set_destroy (&VTI (bb)->in);
10257 dataflow_set_destroy (&VTI (bb)->out);
10258 if (VTI (bb)->permp)
10260 dataflow_set_destroy (VTI (bb)->permp);
10261 XDELETE (VTI (bb)->permp);
10264 free_aux_for_blocks ();
10265 delete empty_shared_hash->htab;
10266 empty_shared_hash->htab = NULL;
10267 delete changed_variables;
10268 changed_variables = NULL;
10269 attrs_def::pool.release ();
10270 var_pool.release ();
10271 location_chain_def::pool.release ();
10272 shared_hash_def::pool.release ();
10274 if (MAY_HAVE_DEBUG_INSNS)
10276 if (global_get_addr_cache)
10277 delete global_get_addr_cache;
10278 global_get_addr_cache = NULL;
10279 loc_exp_dep::pool.release ();
10280 valvar_pool.release ();
10281 preserved_values.release ();
10282 cselib_finish ();
10283 BITMAP_FREE (scratch_regs);
10284 scratch_regs = NULL;
10287 #ifdef HAVE_window_save
10288 vec_free (windowed_parm_regs);
10289 #endif
10291 if (vui_vec)
10292 XDELETEVEC (vui_vec);
10293 vui_vec = NULL;
10294 vui_allocated = 0;
10297 /* The entry point to variable tracking pass. */
10299 static inline unsigned int
10300 variable_tracking_main_1 (void)
10302 bool success;
10304 if (flag_var_tracking_assignments < 0
10305 /* Var-tracking right now assumes the IR doesn't contain
10306 any pseudos at this point. */
10307 || targetm.no_register_allocation)
10309 delete_debug_insns ();
10310 return 0;
10313 if (n_basic_blocks_for_fn (cfun) > 500 &&
10314 n_edges_for_fn (cfun) / n_basic_blocks_for_fn (cfun) >= 20)
10316 vt_debug_insns_local (true);
10317 return 0;
10320 mark_dfs_back_edges ();
10321 if (!vt_initialize ())
10323 vt_finalize ();
10324 vt_debug_insns_local (true);
10325 return 0;
10328 success = vt_find_locations ();
10330 if (!success && flag_var_tracking_assignments > 0)
10332 vt_finalize ();
10334 delete_debug_insns ();
10336 /* This is later restored by our caller. */
10337 flag_var_tracking_assignments = 0;
10339 success = vt_initialize ();
10340 gcc_assert (success);
10342 success = vt_find_locations ();
10345 if (!success)
10347 vt_finalize ();
10348 vt_debug_insns_local (false);
10349 return 0;
10352 if (dump_file && (dump_flags & TDF_DETAILS))
10354 dump_dataflow_sets ();
10355 dump_reg_info (dump_file);
10356 dump_flow_info (dump_file, dump_flags);
10359 timevar_push (TV_VAR_TRACKING_EMIT);
10360 vt_emit_notes ();
10361 timevar_pop (TV_VAR_TRACKING_EMIT);
10363 vt_finalize ();
10364 vt_debug_insns_local (false);
10365 return 0;
10368 unsigned int
10369 variable_tracking_main (void)
10371 unsigned int ret;
10372 int save = flag_var_tracking_assignments;
10374 ret = variable_tracking_main_1 ();
10376 flag_var_tracking_assignments = save;
10378 return ret;
10381 namespace {
10383 const pass_data pass_data_variable_tracking =
10385 RTL_PASS, /* type */
10386 "vartrack", /* name */
10387 OPTGROUP_NONE, /* optinfo_flags */
10388 TV_VAR_TRACKING, /* tv_id */
10389 0, /* properties_required */
10390 0, /* properties_provided */
10391 0, /* properties_destroyed */
10392 0, /* todo_flags_start */
10393 0, /* todo_flags_finish */
10396 class pass_variable_tracking : public rtl_opt_pass
10398 public:
10399 pass_variable_tracking (gcc::context *ctxt)
10400 : rtl_opt_pass (pass_data_variable_tracking, ctxt)
10403 /* opt_pass methods: */
10404 virtual bool gate (function *)
10406 return (flag_var_tracking && !targetm.delay_vartrack);
10409 virtual unsigned int execute (function *)
10411 return variable_tracking_main ();
10414 }; // class pass_variable_tracking
10416 } // anon namespace
10418 rtl_opt_pass *
10419 make_pass_variable_tracking (gcc::context *ctxt)
10421 return new pass_variable_tracking (ctxt);