jit: document union types
[official-gcc.git] / gcc / var-tracking.c
blobebd0cfa7a7c2f7e6117d5640917cf30f5453aa39
1 /* Variable tracking routines for the GNU compiler.
2 Copyright (C) 2002-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the variable tracking pass. It computes where
21 variables are located (which registers or where in memory) at each position
22 in instruction stream and emits notes describing the locations.
23 Debug information (DWARF2 location lists) is finally generated from
24 these notes.
25 With this debug information, it is possible to show variables
26 even when debugging optimized code.
28 How does the variable tracking pass work?
30 First, it scans RTL code for uses, stores and clobbers (register/memory
31 references in instructions), for call insns and for stack adjustments
32 separately for each basic block and saves them to an array of micro
33 operations.
34 The micro operations of one instruction are ordered so that
35 pre-modifying stack adjustment < use < use with no var < call insn <
36 < clobber < set < post-modifying stack adjustment
38 Then, a forward dataflow analysis is performed to find out how locations
39 of variables change through code and to propagate the variable locations
40 along control flow graph.
41 The IN set for basic block BB is computed as a union of OUT sets of BB's
42 predecessors, the OUT set for BB is copied from the IN set for BB and
43 is changed according to micro operations in BB.
45 The IN and OUT sets for basic blocks consist of a current stack adjustment
46 (used for adjusting offset of variables addressed using stack pointer),
47 the table of structures describing the locations of parts of a variable
48 and for each physical register a linked list for each physical register.
49 The linked list is a list of variable parts stored in the register,
50 i.e. it is a list of triplets (reg, decl, offset) where decl is
51 REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
52 effective deleting appropriate variable parts when we set or clobber the
53 register.
55 There may be more than one variable part in a register. The linked lists
56 should be pretty short so it is a good data structure here.
57 For example in the following code, register allocator may assign same
58 register to variables A and B, and both of them are stored in the same
59 register in CODE:
61 if (cond)
62 set A;
63 else
64 set B;
65 CODE;
66 if (cond)
67 use A;
68 else
69 use B;
71 Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
72 are emitted to appropriate positions in RTL code. Each such a note describes
73 the location of one variable at the point in instruction stream where the
74 note is. There is no need to emit a note for each variable before each
75 instruction, we only emit these notes where the location of variable changes
76 (this means that we also emit notes for changes between the OUT set of the
77 previous block and the IN set of the current block).
79 The notes consist of two parts:
80 1. the declaration (from REG_EXPR or MEM_EXPR)
81 2. the location of a variable - it is either a simple register/memory
82 reference (for simple variables, for example int),
83 or a parallel of register/memory references (for a large variables
84 which consist of several parts, for example long long).
88 #include "config.h"
89 #include "system.h"
90 #include "coretypes.h"
91 #include "tm.h"
92 #include "rtl.h"
93 #include "alias.h"
94 #include "symtab.h"
95 #include "tree.h"
96 #include "varasm.h"
97 #include "stor-layout.h"
98 #include "predict.h"
99 #include "hard-reg-set.h"
100 #include "function.h"
101 #include "dominance.h"
102 #include "cfg.h"
103 #include "cfgrtl.h"
104 #include "cfganal.h"
105 #include "basic-block.h"
106 #include "tm_p.h"
107 #include "flags.h"
108 #include "insn-config.h"
109 #include "reload.h"
110 #include "sbitmap.h"
111 #include "alloc-pool.h"
112 #include "regs.h"
113 #include "expmed.h"
114 #include "dojump.h"
115 #include "explow.h"
116 #include "calls.h"
117 #include "emit-rtl.h"
118 #include "stmt.h"
119 #include "expr.h"
120 #include "tree-pass.h"
121 #include "bitmap.h"
122 #include "tree-dfa.h"
123 #include "tree-ssa.h"
124 #include "cselib.h"
125 #include "target.h"
126 #include "params.h"
127 #include "diagnostic.h"
128 #include "tree-pretty-print.h"
129 #include "recog.h"
130 #include "rtl-iter.h"
131 #include "fibonacci_heap.h"
133 typedef fibonacci_heap <long, basic_block_def> bb_heap_t;
134 typedef fibonacci_node <long, basic_block_def> bb_heap_node_t;
136 /* var-tracking.c assumes that tree code with the same value as VALUE rtx code
137 has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
138 Currently the value is the same as IDENTIFIER_NODE, which has such
139 a property. If this compile time assertion ever fails, make sure that
140 the new tree code that equals (int) VALUE has the same property. */
141 extern char check_value_val[(int) VALUE == (int) IDENTIFIER_NODE ? 1 : -1];
143 /* Type of micro operation. */
144 enum micro_operation_type
146 MO_USE, /* Use location (REG or MEM). */
147 MO_USE_NO_VAR,/* Use location which is not associated with a variable
148 or the variable is not trackable. */
149 MO_VAL_USE, /* Use location which is associated with a value. */
150 MO_VAL_LOC, /* Use location which appears in a debug insn. */
151 MO_VAL_SET, /* Set location associated with a value. */
152 MO_SET, /* Set location. */
153 MO_COPY, /* Copy the same portion of a variable from one
154 location to another. */
155 MO_CLOBBER, /* Clobber location. */
156 MO_CALL, /* Call insn. */
157 MO_ADJUST /* Adjust stack pointer. */
161 static const char * const ATTRIBUTE_UNUSED
162 micro_operation_type_name[] = {
163 "MO_USE",
164 "MO_USE_NO_VAR",
165 "MO_VAL_USE",
166 "MO_VAL_LOC",
167 "MO_VAL_SET",
168 "MO_SET",
169 "MO_COPY",
170 "MO_CLOBBER",
171 "MO_CALL",
172 "MO_ADJUST"
175 /* Where shall the note be emitted? BEFORE or AFTER the instruction.
176 Notes emitted as AFTER_CALL are to take effect during the call,
177 rather than after the call. */
178 enum emit_note_where
180 EMIT_NOTE_BEFORE_INSN,
181 EMIT_NOTE_AFTER_INSN,
182 EMIT_NOTE_AFTER_CALL_INSN
185 /* Structure holding information about micro operation. */
186 typedef struct micro_operation_def
188 /* Type of micro operation. */
189 enum micro_operation_type type;
191 /* The instruction which the micro operation is in, for MO_USE,
192 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
193 instruction or note in the original flow (before any var-tracking
194 notes are inserted, to simplify emission of notes), for MO_SET
195 and MO_CLOBBER. */
196 rtx_insn *insn;
198 union {
199 /* Location. For MO_SET and MO_COPY, this is the SET that
200 performs the assignment, if known, otherwise it is the target
201 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
202 CONCAT of the VALUE and the LOC associated with it. For
203 MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
204 associated with it. */
205 rtx loc;
207 /* Stack adjustment. */
208 HOST_WIDE_INT adjust;
209 } u;
210 } micro_operation;
213 /* A declaration of a variable, or an RTL value being handled like a
214 declaration. */
215 typedef void *decl_or_value;
217 /* Return true if a decl_or_value DV is a DECL or NULL. */
218 static inline bool
219 dv_is_decl_p (decl_or_value dv)
221 return !dv || (int) TREE_CODE ((tree) dv) != (int) VALUE;
224 /* Return true if a decl_or_value is a VALUE rtl. */
225 static inline bool
226 dv_is_value_p (decl_or_value dv)
228 return dv && !dv_is_decl_p (dv);
231 /* Return the decl in the decl_or_value. */
232 static inline tree
233 dv_as_decl (decl_or_value dv)
235 gcc_checking_assert (dv_is_decl_p (dv));
236 return (tree) dv;
239 /* Return the value in the decl_or_value. */
240 static inline rtx
241 dv_as_value (decl_or_value dv)
243 gcc_checking_assert (dv_is_value_p (dv));
244 return (rtx)dv;
247 /* Return the opaque pointer in the decl_or_value. */
248 static inline void *
249 dv_as_opaque (decl_or_value dv)
251 return dv;
255 /* Description of location of a part of a variable. The content of a physical
256 register is described by a chain of these structures.
257 The chains are pretty short (usually 1 or 2 elements) and thus
258 chain is the best data structure. */
259 typedef struct attrs_def
261 /* Pointer to next member of the list. */
262 struct attrs_def *next;
264 /* The rtx of register. */
265 rtx loc;
267 /* The declaration corresponding to LOC. */
268 decl_or_value dv;
270 /* Offset from start of DECL. */
271 HOST_WIDE_INT offset;
273 /* Pool allocation new operator. */
274 inline void *operator new (size_t)
276 return pool.allocate ();
279 /* Delete operator utilizing pool allocation. */
280 inline void operator delete (void *ptr)
282 pool.remove ((attrs_def *) ptr);
285 /* Memory allocation pool. */
286 static pool_allocator<attrs_def> pool;
287 } *attrs;
289 /* Structure for chaining the locations. */
290 typedef struct location_chain_def
292 /* Next element in the chain. */
293 struct location_chain_def *next;
295 /* The location (REG, MEM or VALUE). */
296 rtx loc;
298 /* The "value" stored in this location. */
299 rtx set_src;
301 /* Initialized? */
302 enum var_init_status init;
304 /* Pool allocation new operator. */
305 inline void *operator new (size_t)
307 return pool.allocate ();
310 /* Delete operator utilizing pool allocation. */
311 inline void operator delete (void *ptr)
313 pool.remove ((location_chain_def *) ptr);
316 /* Memory allocation pool. */
317 static pool_allocator<location_chain_def> pool;
318 } *location_chain;
320 /* A vector of loc_exp_dep holds the active dependencies of a one-part
321 DV on VALUEs, i.e., the VALUEs expanded so as to form the current
322 location of DV. Each entry is also part of VALUE' s linked-list of
323 backlinks back to DV. */
324 typedef struct loc_exp_dep_s
326 /* The dependent DV. */
327 decl_or_value dv;
328 /* The dependency VALUE or DECL_DEBUG. */
329 rtx value;
330 /* The next entry in VALUE's backlinks list. */
331 struct loc_exp_dep_s *next;
332 /* A pointer to the pointer to this entry (head or prev's next) in
333 the doubly-linked list. */
334 struct loc_exp_dep_s **pprev;
336 /* Pool allocation new operator. */
337 inline void *operator new (size_t)
339 return pool.allocate ();
342 /* Delete operator utilizing pool allocation. */
343 inline void operator delete (void *ptr)
345 pool.remove ((loc_exp_dep_s *) ptr);
348 /* Memory allocation pool. */
349 static pool_allocator<loc_exp_dep_s> pool;
350 } loc_exp_dep;
353 /* This data structure holds information about the depth of a variable
354 expansion. */
355 typedef struct expand_depth_struct
357 /* This measures the complexity of the expanded expression. It
358 grows by one for each level of expansion that adds more than one
359 operand. */
360 int complexity;
361 /* This counts the number of ENTRY_VALUE expressions in an
362 expansion. We want to minimize their use. */
363 int entryvals;
364 } expand_depth;
366 /* This data structure is allocated for one-part variables at the time
367 of emitting notes. */
368 struct onepart_aux
370 /* Doubly-linked list of dependent DVs. These are DVs whose cur_loc
371 computation used the expansion of this variable, and that ought
372 to be notified should this variable change. If the DV's cur_loc
373 expanded to NULL, all components of the loc list are regarded as
374 active, so that any changes in them give us a chance to get a
375 location. Otherwise, only components of the loc that expanded to
376 non-NULL are regarded as active dependencies. */
377 loc_exp_dep *backlinks;
378 /* This holds the LOC that was expanded into cur_loc. We need only
379 mark a one-part variable as changed if the FROM loc is removed,
380 or if it has no known location and a loc is added, or if it gets
381 a change notification from any of its active dependencies. */
382 rtx from;
383 /* The depth of the cur_loc expression. */
384 expand_depth depth;
385 /* Dependencies actively used when expand FROM into cur_loc. */
386 vec<loc_exp_dep, va_heap, vl_embed> deps;
389 /* Structure describing one part of variable. */
390 typedef struct variable_part_def
392 /* Chain of locations of the part. */
393 location_chain loc_chain;
395 /* Location which was last emitted to location list. */
396 rtx cur_loc;
398 union variable_aux
400 /* The offset in the variable, if !var->onepart. */
401 HOST_WIDE_INT offset;
403 /* Pointer to auxiliary data, if var->onepart and emit_notes. */
404 struct onepart_aux *onepaux;
405 } aux;
406 } variable_part;
408 /* Maximum number of location parts. */
409 #define MAX_VAR_PARTS 16
411 /* Enumeration type used to discriminate various types of one-part
412 variables. */
413 typedef enum onepart_enum
415 /* Not a one-part variable. */
416 NOT_ONEPART = 0,
417 /* A one-part DECL that is not a DEBUG_EXPR_DECL. */
418 ONEPART_VDECL = 1,
419 /* A DEBUG_EXPR_DECL. */
420 ONEPART_DEXPR = 2,
421 /* A VALUE. */
422 ONEPART_VALUE = 3
423 } onepart_enum_t;
425 /* Structure describing where the variable is located. */
426 typedef struct variable_def
428 /* The declaration of the variable, or an RTL value being handled
429 like a declaration. */
430 decl_or_value dv;
432 /* Reference count. */
433 int refcount;
435 /* Number of variable parts. */
436 char n_var_parts;
438 /* What type of DV this is, according to enum onepart_enum. */
439 ENUM_BITFIELD (onepart_enum) onepart : CHAR_BIT;
441 /* True if this variable_def struct is currently in the
442 changed_variables hash table. */
443 bool in_changed_variables;
445 /* The variable parts. */
446 variable_part var_part[1];
447 } *variable;
448 typedef const struct variable_def *const_variable;
450 /* Pointer to the BB's information specific to variable tracking pass. */
451 #define VTI(BB) ((variable_tracking_info) (BB)->aux)
453 /* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */
454 #define INT_MEM_OFFSET(mem) (MEM_OFFSET_KNOWN_P (mem) ? MEM_OFFSET (mem) : 0)
456 #if ENABLE_CHECKING && (GCC_VERSION >= 2007)
458 /* Access VAR's Ith part's offset, checking that it's not a one-part
459 variable. */
460 #define VAR_PART_OFFSET(var, i) __extension__ \
461 (*({ variable const __v = (var); \
462 gcc_checking_assert (!__v->onepart); \
463 &__v->var_part[(i)].aux.offset; }))
465 /* Access VAR's one-part auxiliary data, checking that it is a
466 one-part variable. */
467 #define VAR_LOC_1PAUX(var) __extension__ \
468 (*({ variable const __v = (var); \
469 gcc_checking_assert (__v->onepart); \
470 &__v->var_part[0].aux.onepaux; }))
472 #else
473 #define VAR_PART_OFFSET(var, i) ((var)->var_part[(i)].aux.offset)
474 #define VAR_LOC_1PAUX(var) ((var)->var_part[0].aux.onepaux)
475 #endif
477 /* These are accessor macros for the one-part auxiliary data. When
478 convenient for users, they're guarded by tests that the data was
479 allocated. */
480 #define VAR_LOC_DEP_LST(var) (VAR_LOC_1PAUX (var) \
481 ? VAR_LOC_1PAUX (var)->backlinks \
482 : NULL)
483 #define VAR_LOC_DEP_LSTP(var) (VAR_LOC_1PAUX (var) \
484 ? &VAR_LOC_1PAUX (var)->backlinks \
485 : NULL)
486 #define VAR_LOC_FROM(var) (VAR_LOC_1PAUX (var)->from)
487 #define VAR_LOC_DEPTH(var) (VAR_LOC_1PAUX (var)->depth)
488 #define VAR_LOC_DEP_VEC(var) (VAR_LOC_1PAUX (var) \
489 ? &VAR_LOC_1PAUX (var)->deps \
490 : NULL)
494 typedef unsigned int dvuid;
496 /* Return the uid of DV. */
498 static inline dvuid
499 dv_uid (decl_or_value dv)
501 if (dv_is_value_p (dv))
502 return CSELIB_VAL_PTR (dv_as_value (dv))->uid;
503 else
504 return DECL_UID (dv_as_decl (dv));
507 /* Compute the hash from the uid. */
509 static inline hashval_t
510 dv_uid2hash (dvuid uid)
512 return uid;
515 /* The hash function for a mask table in a shared_htab chain. */
517 static inline hashval_t
518 dv_htab_hash (decl_or_value dv)
520 return dv_uid2hash (dv_uid (dv));
523 static void variable_htab_free (void *);
525 /* Variable hashtable helpers. */
527 struct variable_hasher : pointer_hash <variable_def>
529 typedef void *compare_type;
530 static inline hashval_t hash (const variable_def *);
531 static inline bool equal (const variable_def *, const void *);
532 static inline void remove (variable_def *);
535 /* The hash function for variable_htab, computes the hash value
536 from the declaration of variable X. */
538 inline hashval_t
539 variable_hasher::hash (const variable_def *v)
541 return dv_htab_hash (v->dv);
544 /* Compare the declaration of variable X with declaration Y. */
546 inline bool
547 variable_hasher::equal (const variable_def *v, const void *y)
549 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
551 return (dv_as_opaque (v->dv) == dv_as_opaque (dv));
554 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
556 inline void
557 variable_hasher::remove (variable_def *var)
559 variable_htab_free (var);
562 typedef hash_table<variable_hasher> variable_table_type;
563 typedef variable_table_type::iterator variable_iterator_type;
565 /* Structure for passing some other parameters to function
566 emit_note_insn_var_location. */
567 typedef struct emit_note_data_def
569 /* The instruction which the note will be emitted before/after. */
570 rtx_insn *insn;
572 /* Where the note will be emitted (before/after insn)? */
573 enum emit_note_where where;
575 /* The variables and values active at this point. */
576 variable_table_type *vars;
577 } emit_note_data;
579 /* Structure holding a refcounted hash table. If refcount > 1,
580 it must be first unshared before modified. */
581 typedef struct shared_hash_def
583 /* Reference count. */
584 int refcount;
586 /* Actual hash table. */
587 variable_table_type *htab;
589 /* Pool allocation new operator. */
590 inline void *operator new (size_t)
592 return pool.allocate ();
595 /* Delete operator utilizing pool allocation. */
596 inline void operator delete (void *ptr)
598 pool.remove ((shared_hash_def *) ptr);
601 /* Memory allocation pool. */
602 static pool_allocator<shared_hash_def> pool;
603 } *shared_hash;
605 /* Structure holding the IN or OUT set for a basic block. */
606 typedef struct dataflow_set_def
608 /* Adjustment of stack offset. */
609 HOST_WIDE_INT stack_adjust;
611 /* Attributes for registers (lists of attrs). */
612 attrs regs[FIRST_PSEUDO_REGISTER];
614 /* Variable locations. */
615 shared_hash vars;
617 /* Vars that is being traversed. */
618 shared_hash traversed_vars;
619 } dataflow_set;
621 /* The structure (one for each basic block) containing the information
622 needed for variable tracking. */
623 typedef struct variable_tracking_info_def
625 /* The vector of micro operations. */
626 vec<micro_operation> mos;
628 /* The IN and OUT set for dataflow analysis. */
629 dataflow_set in;
630 dataflow_set out;
632 /* The permanent-in dataflow set for this block. This is used to
633 hold values for which we had to compute entry values. ??? This
634 should probably be dynamically allocated, to avoid using more
635 memory in non-debug builds. */
636 dataflow_set *permp;
638 /* Has the block been visited in DFS? */
639 bool visited;
641 /* Has the block been flooded in VTA? */
642 bool flooded;
644 } *variable_tracking_info;
646 /* Alloc pool for struct attrs_def. */
647 pool_allocator<attrs_def> attrs_def::pool ("attrs_def pool", 1024);
649 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
651 static pool_allocator<variable_def> var_pool
652 ("variable_def pool", 64,
653 (MAX_VAR_PARTS - 1) * sizeof (((variable)NULL)->var_part[0]));
655 /* Alloc pool for struct variable_def with a single var_part entry. */
656 static pool_allocator<variable_def> valvar_pool
657 ("small variable_def pool", 256);
659 /* Alloc pool for struct location_chain_def. */
660 pool_allocator<location_chain_def> location_chain_def::pool
661 ("location_chain_def pool", 1024);
663 /* Alloc pool for struct shared_hash_def. */
664 pool_allocator<shared_hash_def> shared_hash_def::pool
665 ("shared_hash_def pool", 256);
667 /* Alloc pool for struct loc_exp_dep_s for NOT_ONEPART variables. */
668 pool_allocator<loc_exp_dep> loc_exp_dep::pool ("loc_exp_dep pool", 64);
670 /* Changed variables, notes will be emitted for them. */
671 static variable_table_type *changed_variables;
673 /* Shall notes be emitted? */
674 static bool emit_notes;
676 /* Values whose dynamic location lists have gone empty, but whose
677 cselib location lists are still usable. Use this to hold the
678 current location, the backlinks, etc, during emit_notes. */
679 static variable_table_type *dropped_values;
681 /* Empty shared hashtable. */
682 static shared_hash empty_shared_hash;
684 /* Scratch register bitmap used by cselib_expand_value_rtx. */
685 static bitmap scratch_regs = NULL;
687 #ifdef HAVE_window_save
688 typedef struct GTY(()) parm_reg {
689 rtx outgoing;
690 rtx incoming;
691 } parm_reg_t;
694 /* Vector of windowed parameter registers, if any. */
695 static vec<parm_reg_t, va_gc> *windowed_parm_regs = NULL;
696 #endif
698 /* Variable used to tell whether cselib_process_insn called our hook. */
699 static bool cselib_hook_called;
701 /* Local function prototypes. */
702 static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
703 HOST_WIDE_INT *);
704 static void insn_stack_adjust_offset_pre_post (rtx_insn *, HOST_WIDE_INT *,
705 HOST_WIDE_INT *);
706 static bool vt_stack_adjustments (void);
708 static void init_attrs_list_set (attrs *);
709 static void attrs_list_clear (attrs *);
710 static attrs attrs_list_member (attrs, decl_or_value, HOST_WIDE_INT);
711 static void attrs_list_insert (attrs *, decl_or_value, HOST_WIDE_INT, rtx);
712 static void attrs_list_copy (attrs *, attrs);
713 static void attrs_list_union (attrs *, attrs);
715 static variable_def **unshare_variable (dataflow_set *set, variable_def **slot,
716 variable var, enum var_init_status);
717 static void vars_copy (variable_table_type *, variable_table_type *);
718 static tree var_debug_decl (tree);
719 static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
720 static void var_reg_delete_and_set (dataflow_set *, rtx, bool,
721 enum var_init_status, rtx);
722 static void var_reg_delete (dataflow_set *, rtx, bool);
723 static void var_regno_delete (dataflow_set *, int);
724 static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx);
725 static void var_mem_delete_and_set (dataflow_set *, rtx, bool,
726 enum var_init_status, rtx);
727 static void var_mem_delete (dataflow_set *, rtx, bool);
729 static void dataflow_set_init (dataflow_set *);
730 static void dataflow_set_clear (dataflow_set *);
731 static void dataflow_set_copy (dataflow_set *, dataflow_set *);
732 static int variable_union_info_cmp_pos (const void *, const void *);
733 static void dataflow_set_union (dataflow_set *, dataflow_set *);
734 static location_chain find_loc_in_1pdv (rtx, variable, variable_table_type *);
735 static bool canon_value_cmp (rtx, rtx);
736 static int loc_cmp (rtx, rtx);
737 static bool variable_part_different_p (variable_part *, variable_part *);
738 static bool onepart_variable_different_p (variable, variable);
739 static bool variable_different_p (variable, variable);
740 static bool dataflow_set_different (dataflow_set *, dataflow_set *);
741 static void dataflow_set_destroy (dataflow_set *);
743 static bool contains_symbol_ref (rtx);
744 static bool track_expr_p (tree, bool);
745 static bool same_variable_part_p (rtx, tree, HOST_WIDE_INT);
746 static void add_uses_1 (rtx *, void *);
747 static void add_stores (rtx, const_rtx, void *);
748 static bool compute_bb_dataflow (basic_block);
749 static bool vt_find_locations (void);
751 static void dump_attrs_list (attrs);
752 static void dump_var (variable);
753 static void dump_vars (variable_table_type *);
754 static void dump_dataflow_set (dataflow_set *);
755 static void dump_dataflow_sets (void);
757 static void set_dv_changed (decl_or_value, bool);
758 static void variable_was_changed (variable, dataflow_set *);
759 static variable_def **set_slot_part (dataflow_set *, rtx, variable_def **,
760 decl_or_value, HOST_WIDE_INT,
761 enum var_init_status, rtx);
762 static void set_variable_part (dataflow_set *, rtx,
763 decl_or_value, HOST_WIDE_INT,
764 enum var_init_status, rtx, enum insert_option);
765 static variable_def **clobber_slot_part (dataflow_set *, rtx,
766 variable_def **, HOST_WIDE_INT, rtx);
767 static void clobber_variable_part (dataflow_set *, rtx,
768 decl_or_value, HOST_WIDE_INT, rtx);
769 static variable_def **delete_slot_part (dataflow_set *, rtx, variable_def **,
770 HOST_WIDE_INT);
771 static void delete_variable_part (dataflow_set *, rtx,
772 decl_or_value, HOST_WIDE_INT);
773 static void emit_notes_in_bb (basic_block, dataflow_set *);
774 static void vt_emit_notes (void);
776 static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *);
777 static void vt_add_function_parameters (void);
778 static bool vt_initialize (void);
779 static void vt_finalize (void);
781 /* Callback for stack_adjust_offset_pre_post, called via for_each_inc_dec. */
783 static int
784 stack_adjust_offset_pre_post_cb (rtx, rtx op, rtx dest, rtx src, rtx srcoff,
785 void *arg)
787 if (dest != stack_pointer_rtx)
788 return 0;
790 switch (GET_CODE (op))
792 case PRE_INC:
793 case PRE_DEC:
794 ((HOST_WIDE_INT *)arg)[0] -= INTVAL (srcoff);
795 return 0;
796 case POST_INC:
797 case POST_DEC:
798 ((HOST_WIDE_INT *)arg)[1] -= INTVAL (srcoff);
799 return 0;
800 case PRE_MODIFY:
801 case POST_MODIFY:
802 /* We handle only adjustments by constant amount. */
803 gcc_assert (GET_CODE (src) == PLUS
804 && CONST_INT_P (XEXP (src, 1))
805 && XEXP (src, 0) == stack_pointer_rtx);
806 ((HOST_WIDE_INT *)arg)[GET_CODE (op) == POST_MODIFY]
807 -= INTVAL (XEXP (src, 1));
808 return 0;
809 default:
810 gcc_unreachable ();
814 /* Given a SET, calculate the amount of stack adjustment it contains
815 PRE- and POST-modifying stack pointer.
816 This function is similar to stack_adjust_offset. */
818 static void
819 stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
820 HOST_WIDE_INT *post)
822 rtx src = SET_SRC (pattern);
823 rtx dest = SET_DEST (pattern);
824 enum rtx_code code;
826 if (dest == stack_pointer_rtx)
828 /* (set (reg sp) (plus (reg sp) (const_int))) */
829 code = GET_CODE (src);
830 if (! (code == PLUS || code == MINUS)
831 || XEXP (src, 0) != stack_pointer_rtx
832 || !CONST_INT_P (XEXP (src, 1)))
833 return;
835 if (code == MINUS)
836 *post += INTVAL (XEXP (src, 1));
837 else
838 *post -= INTVAL (XEXP (src, 1));
839 return;
841 HOST_WIDE_INT res[2] = { 0, 0 };
842 for_each_inc_dec (pattern, stack_adjust_offset_pre_post_cb, res);
843 *pre += res[0];
844 *post += res[1];
847 /* Given an INSN, calculate the amount of stack adjustment it contains
848 PRE- and POST-modifying stack pointer. */
850 static void
851 insn_stack_adjust_offset_pre_post (rtx_insn *insn, HOST_WIDE_INT *pre,
852 HOST_WIDE_INT *post)
854 rtx pattern;
856 *pre = 0;
857 *post = 0;
859 pattern = PATTERN (insn);
860 if (RTX_FRAME_RELATED_P (insn))
862 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
863 if (expr)
864 pattern = XEXP (expr, 0);
867 if (GET_CODE (pattern) == SET)
868 stack_adjust_offset_pre_post (pattern, pre, post);
869 else if (GET_CODE (pattern) == PARALLEL
870 || GET_CODE (pattern) == SEQUENCE)
872 int i;
874 /* There may be stack adjustments inside compound insns. Search
875 for them. */
876 for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
877 if (GET_CODE (XVECEXP (pattern, 0, i)) == SET)
878 stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post);
882 /* Compute stack adjustments for all blocks by traversing DFS tree.
883 Return true when the adjustments on all incoming edges are consistent.
884 Heavily borrowed from pre_and_rev_post_order_compute. */
886 static bool
887 vt_stack_adjustments (void)
889 edge_iterator *stack;
890 int sp;
892 /* Initialize entry block. */
893 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->visited = true;
894 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->in.stack_adjust
895 = INCOMING_FRAME_SP_OFFSET;
896 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out.stack_adjust
897 = INCOMING_FRAME_SP_OFFSET;
899 /* Allocate stack for back-tracking up CFG. */
900 stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun) + 1);
901 sp = 0;
903 /* Push the first edge on to the stack. */
904 stack[sp++] = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
906 while (sp)
908 edge_iterator ei;
909 basic_block src;
910 basic_block dest;
912 /* Look at the edge on the top of the stack. */
913 ei = stack[sp - 1];
914 src = ei_edge (ei)->src;
915 dest = ei_edge (ei)->dest;
917 /* Check if the edge destination has been visited yet. */
918 if (!VTI (dest)->visited)
920 rtx_insn *insn;
921 HOST_WIDE_INT pre, post, offset;
922 VTI (dest)->visited = true;
923 VTI (dest)->in.stack_adjust = offset = VTI (src)->out.stack_adjust;
925 if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
926 for (insn = BB_HEAD (dest);
927 insn != NEXT_INSN (BB_END (dest));
928 insn = NEXT_INSN (insn))
929 if (INSN_P (insn))
931 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
932 offset += pre + post;
935 VTI (dest)->out.stack_adjust = offset;
937 if (EDGE_COUNT (dest->succs) > 0)
938 /* Since the DEST node has been visited for the first
939 time, check its successors. */
940 stack[sp++] = ei_start (dest->succs);
942 else
944 /* We can end up with different stack adjustments for the exit block
945 of a shrink-wrapped function if stack_adjust_offset_pre_post
946 doesn't understand the rtx pattern used to restore the stack
947 pointer in the epilogue. For example, on s390(x), the stack
948 pointer is often restored via a load-multiple instruction
949 and so no stack_adjust offset is recorded for it. This means
950 that the stack offset at the end of the epilogue block is the
951 the same as the offset before the epilogue, whereas other paths
952 to the exit block will have the correct stack_adjust.
954 It is safe to ignore these differences because (a) we never
955 use the stack_adjust for the exit block in this pass and
956 (b) dwarf2cfi checks whether the CFA notes in a shrink-wrapped
957 function are correct.
959 We must check whether the adjustments on other edges are
960 the same though. */
961 if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
962 && VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust)
964 free (stack);
965 return false;
968 if (! ei_one_before_end_p (ei))
969 /* Go to the next edge. */
970 ei_next (&stack[sp - 1]);
971 else
972 /* Return to previous level if there are no more edges. */
973 sp--;
977 free (stack);
978 return true;
981 /* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or
982 hard_frame_pointer_rtx is being mapped to it and offset for it. */
983 static rtx cfa_base_rtx;
984 static HOST_WIDE_INT cfa_base_offset;
986 /* Compute a CFA-based value for an ADJUSTMENT made to stack_pointer_rtx
987 or hard_frame_pointer_rtx. */
989 static inline rtx
990 compute_cfa_pointer (HOST_WIDE_INT adjustment)
992 return plus_constant (Pmode, cfa_base_rtx, adjustment + cfa_base_offset);
995 /* Adjustment for hard_frame_pointer_rtx to cfa base reg,
996 or -1 if the replacement shouldn't be done. */
997 static HOST_WIDE_INT hard_frame_pointer_adjustment = -1;
999 /* Data for adjust_mems callback. */
1001 struct adjust_mem_data
1003 bool store;
1004 machine_mode mem_mode;
1005 HOST_WIDE_INT stack_adjust;
1006 rtx_expr_list *side_effects;
1009 /* Helper for adjust_mems. Return true if X is suitable for
1010 transformation of wider mode arithmetics to narrower mode. */
1012 static bool
1013 use_narrower_mode_test (rtx x, const_rtx subreg)
1015 subrtx_var_iterator::array_type array;
1016 FOR_EACH_SUBRTX_VAR (iter, array, x, NONCONST)
1018 rtx x = *iter;
1019 if (CONSTANT_P (x))
1020 iter.skip_subrtxes ();
1021 else
1022 switch (GET_CODE (x))
1024 case REG:
1025 if (cselib_lookup (x, GET_MODE (SUBREG_REG (subreg)), 0, VOIDmode))
1026 return false;
1027 if (!validate_subreg (GET_MODE (subreg), GET_MODE (x), x,
1028 subreg_lowpart_offset (GET_MODE (subreg),
1029 GET_MODE (x))))
1030 return false;
1031 break;
1032 case PLUS:
1033 case MINUS:
1034 case MULT:
1035 break;
1036 case ASHIFT:
1037 iter.substitute (XEXP (x, 0));
1038 break;
1039 default:
1040 return false;
1043 return true;
1046 /* Transform X into narrower mode MODE from wider mode WMODE. */
1048 static rtx
1049 use_narrower_mode (rtx x, machine_mode mode, machine_mode wmode)
1051 rtx op0, op1;
1052 if (CONSTANT_P (x))
1053 return lowpart_subreg (mode, x, wmode);
1054 switch (GET_CODE (x))
1056 case REG:
1057 return lowpart_subreg (mode, x, wmode);
1058 case PLUS:
1059 case MINUS:
1060 case MULT:
1061 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
1062 op1 = use_narrower_mode (XEXP (x, 1), mode, wmode);
1063 return simplify_gen_binary (GET_CODE (x), mode, op0, op1);
1064 case ASHIFT:
1065 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
1066 op1 = XEXP (x, 1);
1067 /* Ensure shift amount is not wider than mode. */
1068 if (GET_MODE (op1) == VOIDmode)
1069 op1 = lowpart_subreg (mode, op1, wmode);
1070 else if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (GET_MODE (op1)))
1071 op1 = lowpart_subreg (mode, op1, GET_MODE (op1));
1072 return simplify_gen_binary (ASHIFT, mode, op0, op1);
1073 default:
1074 gcc_unreachable ();
1078 /* Helper function for adjusting used MEMs. */
1080 static rtx
1081 adjust_mems (rtx loc, const_rtx old_rtx, void *data)
1083 struct adjust_mem_data *amd = (struct adjust_mem_data *) data;
1084 rtx mem, addr = loc, tem;
1085 machine_mode mem_mode_save;
1086 bool store_save;
1087 switch (GET_CODE (loc))
1089 case REG:
1090 /* Don't do any sp or fp replacements outside of MEM addresses
1091 on the LHS. */
1092 if (amd->mem_mode == VOIDmode && amd->store)
1093 return loc;
1094 if (loc == stack_pointer_rtx
1095 && !frame_pointer_needed
1096 && cfa_base_rtx)
1097 return compute_cfa_pointer (amd->stack_adjust);
1098 else if (loc == hard_frame_pointer_rtx
1099 && frame_pointer_needed
1100 && hard_frame_pointer_adjustment != -1
1101 && cfa_base_rtx)
1102 return compute_cfa_pointer (hard_frame_pointer_adjustment);
1103 gcc_checking_assert (loc != virtual_incoming_args_rtx);
1104 return loc;
1105 case MEM:
1106 mem = loc;
1107 if (!amd->store)
1109 mem = targetm.delegitimize_address (mem);
1110 if (mem != loc && !MEM_P (mem))
1111 return simplify_replace_fn_rtx (mem, old_rtx, adjust_mems, data);
1114 addr = XEXP (mem, 0);
1115 mem_mode_save = amd->mem_mode;
1116 amd->mem_mode = GET_MODE (mem);
1117 store_save = amd->store;
1118 amd->store = false;
1119 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1120 amd->store = store_save;
1121 amd->mem_mode = mem_mode_save;
1122 if (mem == loc)
1123 addr = targetm.delegitimize_address (addr);
1124 if (addr != XEXP (mem, 0))
1125 mem = replace_equiv_address_nv (mem, addr);
1126 if (!amd->store)
1127 mem = avoid_constant_pool_reference (mem);
1128 return mem;
1129 case PRE_INC:
1130 case PRE_DEC:
1131 addr = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
1132 gen_int_mode (GET_CODE (loc) == PRE_INC
1133 ? GET_MODE_SIZE (amd->mem_mode)
1134 : -GET_MODE_SIZE (amd->mem_mode),
1135 GET_MODE (loc)));
1136 case POST_INC:
1137 case POST_DEC:
1138 if (addr == loc)
1139 addr = XEXP (loc, 0);
1140 gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode);
1141 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1142 tem = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
1143 gen_int_mode ((GET_CODE (loc) == PRE_INC
1144 || GET_CODE (loc) == POST_INC)
1145 ? GET_MODE_SIZE (amd->mem_mode)
1146 : -GET_MODE_SIZE (amd->mem_mode),
1147 GET_MODE (loc)));
1148 store_save = amd->store;
1149 amd->store = false;
1150 tem = simplify_replace_fn_rtx (tem, old_rtx, adjust_mems, data);
1151 amd->store = store_save;
1152 amd->side_effects = alloc_EXPR_LIST (0,
1153 gen_rtx_SET (XEXP (loc, 0), tem),
1154 amd->side_effects);
1155 return addr;
1156 case PRE_MODIFY:
1157 addr = XEXP (loc, 1);
1158 case POST_MODIFY:
1159 if (addr == loc)
1160 addr = XEXP (loc, 0);
1161 gcc_assert (amd->mem_mode != VOIDmode);
1162 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1163 store_save = amd->store;
1164 amd->store = false;
1165 tem = simplify_replace_fn_rtx (XEXP (loc, 1), old_rtx,
1166 adjust_mems, data);
1167 amd->store = store_save;
1168 amd->side_effects = alloc_EXPR_LIST (0,
1169 gen_rtx_SET (XEXP (loc, 0), tem),
1170 amd->side_effects);
1171 return addr;
1172 case SUBREG:
1173 /* First try without delegitimization of whole MEMs and
1174 avoid_constant_pool_reference, which is more likely to succeed. */
1175 store_save = amd->store;
1176 amd->store = true;
1177 addr = simplify_replace_fn_rtx (SUBREG_REG (loc), old_rtx, adjust_mems,
1178 data);
1179 amd->store = store_save;
1180 mem = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1181 if (mem == SUBREG_REG (loc))
1183 tem = loc;
1184 goto finish_subreg;
1186 tem = simplify_gen_subreg (GET_MODE (loc), mem,
1187 GET_MODE (SUBREG_REG (loc)),
1188 SUBREG_BYTE (loc));
1189 if (tem)
1190 goto finish_subreg;
1191 tem = simplify_gen_subreg (GET_MODE (loc), addr,
1192 GET_MODE (SUBREG_REG (loc)),
1193 SUBREG_BYTE (loc));
1194 if (tem == NULL_RTX)
1195 tem = gen_rtx_raw_SUBREG (GET_MODE (loc), addr, SUBREG_BYTE (loc));
1196 finish_subreg:
1197 if (MAY_HAVE_DEBUG_INSNS
1198 && GET_CODE (tem) == SUBREG
1199 && (GET_CODE (SUBREG_REG (tem)) == PLUS
1200 || GET_CODE (SUBREG_REG (tem)) == MINUS
1201 || GET_CODE (SUBREG_REG (tem)) == MULT
1202 || GET_CODE (SUBREG_REG (tem)) == ASHIFT)
1203 && (GET_MODE_CLASS (GET_MODE (tem)) == MODE_INT
1204 || GET_MODE_CLASS (GET_MODE (tem)) == MODE_PARTIAL_INT)
1205 && (GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem))) == MODE_INT
1206 || GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem))) == MODE_PARTIAL_INT)
1207 && GET_MODE_PRECISION (GET_MODE (tem))
1208 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (tem)))
1209 && subreg_lowpart_p (tem)
1210 && use_narrower_mode_test (SUBREG_REG (tem), tem))
1211 return use_narrower_mode (SUBREG_REG (tem), GET_MODE (tem),
1212 GET_MODE (SUBREG_REG (tem)));
1213 return tem;
1214 case ASM_OPERANDS:
1215 /* Don't do any replacements in second and following
1216 ASM_OPERANDS of inline-asm with multiple sets.
1217 ASM_OPERANDS_INPUT_VEC, ASM_OPERANDS_INPUT_CONSTRAINT_VEC
1218 and ASM_OPERANDS_LABEL_VEC need to be equal between
1219 all the ASM_OPERANDs in the insn and adjust_insn will
1220 fix this up. */
1221 if (ASM_OPERANDS_OUTPUT_IDX (loc) != 0)
1222 return loc;
1223 break;
1224 default:
1225 break;
1227 return NULL_RTX;
1230 /* Helper function for replacement of uses. */
1232 static void
1233 adjust_mem_uses (rtx *x, void *data)
1235 rtx new_x = simplify_replace_fn_rtx (*x, NULL_RTX, adjust_mems, data);
1236 if (new_x != *x)
1237 validate_change (NULL_RTX, x, new_x, true);
1240 /* Helper function for replacement of stores. */
1242 static void
1243 adjust_mem_stores (rtx loc, const_rtx expr, void *data)
1245 if (MEM_P (loc))
1247 rtx new_dest = simplify_replace_fn_rtx (SET_DEST (expr), NULL_RTX,
1248 adjust_mems, data);
1249 if (new_dest != SET_DEST (expr))
1251 rtx xexpr = CONST_CAST_RTX (expr);
1252 validate_change (NULL_RTX, &SET_DEST (xexpr), new_dest, true);
1257 /* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
1258 replace them with their value in the insn and add the side-effects
1259 as other sets to the insn. */
1261 static void
1262 adjust_insn (basic_block bb, rtx_insn *insn)
1264 struct adjust_mem_data amd;
1265 rtx set;
1267 #ifdef HAVE_window_save
1268 /* If the target machine has an explicit window save instruction, the
1269 transformation OUTGOING_REGNO -> INCOMING_REGNO is done there. */
1270 if (RTX_FRAME_RELATED_P (insn)
1271 && find_reg_note (insn, REG_CFA_WINDOW_SAVE, NULL_RTX))
1273 unsigned int i, nregs = vec_safe_length (windowed_parm_regs);
1274 rtx rtl = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nregs * 2));
1275 parm_reg_t *p;
1277 FOR_EACH_VEC_SAFE_ELT (windowed_parm_regs, i, p)
1279 XVECEXP (rtl, 0, i * 2)
1280 = gen_rtx_SET (p->incoming, p->outgoing);
1281 /* Do not clobber the attached DECL, but only the REG. */
1282 XVECEXP (rtl, 0, i * 2 + 1)
1283 = gen_rtx_CLOBBER (GET_MODE (p->outgoing),
1284 gen_raw_REG (GET_MODE (p->outgoing),
1285 REGNO (p->outgoing)));
1288 validate_change (NULL_RTX, &PATTERN (insn), rtl, true);
1289 return;
1291 #endif
1293 amd.mem_mode = VOIDmode;
1294 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
1295 amd.side_effects = NULL;
1297 amd.store = true;
1298 note_stores (PATTERN (insn), adjust_mem_stores, &amd);
1300 amd.store = false;
1301 if (GET_CODE (PATTERN (insn)) == PARALLEL
1302 && asm_noperands (PATTERN (insn)) > 0
1303 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1305 rtx body, set0;
1306 int i;
1308 /* inline-asm with multiple sets is tiny bit more complicated,
1309 because the 3 vectors in ASM_OPERANDS need to be shared between
1310 all ASM_OPERANDS in the instruction. adjust_mems will
1311 not touch ASM_OPERANDS other than the first one, asm_noperands
1312 test above needs to be called before that (otherwise it would fail)
1313 and afterwards this code fixes it up. */
1314 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1315 body = PATTERN (insn);
1316 set0 = XVECEXP (body, 0, 0);
1317 gcc_checking_assert (GET_CODE (set0) == SET
1318 && GET_CODE (SET_SRC (set0)) == ASM_OPERANDS
1319 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set0)) == 0);
1320 for (i = 1; i < XVECLEN (body, 0); i++)
1321 if (GET_CODE (XVECEXP (body, 0, i)) != SET)
1322 break;
1323 else
1325 set = XVECEXP (body, 0, i);
1326 gcc_checking_assert (GET_CODE (SET_SRC (set)) == ASM_OPERANDS
1327 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set))
1328 == i);
1329 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (set))
1330 != ASM_OPERANDS_INPUT_VEC (SET_SRC (set0))
1331 || ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set))
1332 != ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0))
1333 || ASM_OPERANDS_LABEL_VEC (SET_SRC (set))
1334 != ASM_OPERANDS_LABEL_VEC (SET_SRC (set0)))
1336 rtx newsrc = shallow_copy_rtx (SET_SRC (set));
1337 ASM_OPERANDS_INPUT_VEC (newsrc)
1338 = ASM_OPERANDS_INPUT_VEC (SET_SRC (set0));
1339 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (newsrc)
1340 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0));
1341 ASM_OPERANDS_LABEL_VEC (newsrc)
1342 = ASM_OPERANDS_LABEL_VEC (SET_SRC (set0));
1343 validate_change (NULL_RTX, &SET_SRC (set), newsrc, true);
1347 else
1348 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1350 /* For read-only MEMs containing some constant, prefer those
1351 constants. */
1352 set = single_set (insn);
1353 if (set && MEM_P (SET_SRC (set)) && MEM_READONLY_P (SET_SRC (set)))
1355 rtx note = find_reg_equal_equiv_note (insn);
1357 if (note && CONSTANT_P (XEXP (note, 0)))
1358 validate_change (NULL_RTX, &SET_SRC (set), XEXP (note, 0), true);
1361 if (amd.side_effects)
1363 rtx *pat, new_pat, s;
1364 int i, oldn, newn;
1366 pat = &PATTERN (insn);
1367 if (GET_CODE (*pat) == COND_EXEC)
1368 pat = &COND_EXEC_CODE (*pat);
1369 if (GET_CODE (*pat) == PARALLEL)
1370 oldn = XVECLEN (*pat, 0);
1371 else
1372 oldn = 1;
1373 for (s = amd.side_effects, newn = 0; s; newn++)
1374 s = XEXP (s, 1);
1375 new_pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (oldn + newn));
1376 if (GET_CODE (*pat) == PARALLEL)
1377 for (i = 0; i < oldn; i++)
1378 XVECEXP (new_pat, 0, i) = XVECEXP (*pat, 0, i);
1379 else
1380 XVECEXP (new_pat, 0, 0) = *pat;
1381 for (s = amd.side_effects, i = oldn; i < oldn + newn; i++, s = XEXP (s, 1))
1382 XVECEXP (new_pat, 0, i) = XEXP (s, 0);
1383 free_EXPR_LIST_list (&amd.side_effects);
1384 validate_change (NULL_RTX, pat, new_pat, true);
1388 /* Return the DEBUG_EXPR of a DEBUG_EXPR_DECL or the VALUE in DV. */
1389 static inline rtx
1390 dv_as_rtx (decl_or_value dv)
1392 tree decl;
1394 if (dv_is_value_p (dv))
1395 return dv_as_value (dv);
1397 decl = dv_as_decl (dv);
1399 gcc_checking_assert (TREE_CODE (decl) == DEBUG_EXPR_DECL);
1400 return DECL_RTL_KNOWN_SET (decl);
1403 /* Return nonzero if a decl_or_value must not have more than one
1404 variable part. The returned value discriminates among various
1405 kinds of one-part DVs ccording to enum onepart_enum. */
1406 static inline onepart_enum_t
1407 dv_onepart_p (decl_or_value dv)
1409 tree decl;
1411 if (!MAY_HAVE_DEBUG_INSNS)
1412 return NOT_ONEPART;
1414 if (dv_is_value_p (dv))
1415 return ONEPART_VALUE;
1417 decl = dv_as_decl (dv);
1419 if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
1420 return ONEPART_DEXPR;
1422 if (target_for_debug_bind (decl) != NULL_TREE)
1423 return ONEPART_VDECL;
1425 return NOT_ONEPART;
1428 /* Return the variable pool to be used for a dv of type ONEPART. */
1429 static inline pool_allocator <variable_def> &
1430 onepart_pool (onepart_enum_t onepart)
1432 return onepart ? valvar_pool : var_pool;
1435 /* Build a decl_or_value out of a decl. */
1436 static inline decl_or_value
1437 dv_from_decl (tree decl)
1439 decl_or_value dv;
1440 dv = decl;
1441 gcc_checking_assert (dv_is_decl_p (dv));
1442 return dv;
1445 /* Build a decl_or_value out of a value. */
1446 static inline decl_or_value
1447 dv_from_value (rtx value)
1449 decl_or_value dv;
1450 dv = value;
1451 gcc_checking_assert (dv_is_value_p (dv));
1452 return dv;
1455 /* Return a value or the decl of a debug_expr as a decl_or_value. */
1456 static inline decl_or_value
1457 dv_from_rtx (rtx x)
1459 decl_or_value dv;
1461 switch (GET_CODE (x))
1463 case DEBUG_EXPR:
1464 dv = dv_from_decl (DEBUG_EXPR_TREE_DECL (x));
1465 gcc_checking_assert (DECL_RTL_KNOWN_SET (DEBUG_EXPR_TREE_DECL (x)) == x);
1466 break;
1468 case VALUE:
1469 dv = dv_from_value (x);
1470 break;
1472 default:
1473 gcc_unreachable ();
1476 return dv;
1479 extern void debug_dv (decl_or_value dv);
1481 DEBUG_FUNCTION void
1482 debug_dv (decl_or_value dv)
1484 if (dv_is_value_p (dv))
1485 debug_rtx (dv_as_value (dv));
1486 else
1487 debug_generic_stmt (dv_as_decl (dv));
1490 static void loc_exp_dep_clear (variable var);
1492 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
1494 static void
1495 variable_htab_free (void *elem)
1497 int i;
1498 variable var = (variable) elem;
1499 location_chain node, next;
1501 gcc_checking_assert (var->refcount > 0);
1503 var->refcount--;
1504 if (var->refcount > 0)
1505 return;
1507 for (i = 0; i < var->n_var_parts; i++)
1509 for (node = var->var_part[i].loc_chain; node; node = next)
1511 next = node->next;
1512 delete node;
1514 var->var_part[i].loc_chain = NULL;
1516 if (var->onepart && VAR_LOC_1PAUX (var))
1518 loc_exp_dep_clear (var);
1519 if (VAR_LOC_DEP_LST (var))
1520 VAR_LOC_DEP_LST (var)->pprev = NULL;
1521 XDELETE (VAR_LOC_1PAUX (var));
1522 /* These may be reused across functions, so reset
1523 e.g. NO_LOC_P. */
1524 if (var->onepart == ONEPART_DEXPR)
1525 set_dv_changed (var->dv, true);
1527 onepart_pool (var->onepart).remove (var);
1530 /* Initialize the set (array) SET of attrs to empty lists. */
1532 static void
1533 init_attrs_list_set (attrs *set)
1535 int i;
1537 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1538 set[i] = NULL;
1541 /* Make the list *LISTP empty. */
1543 static void
1544 attrs_list_clear (attrs *listp)
1546 attrs list, next;
1548 for (list = *listp; list; list = next)
1550 next = list->next;
1551 delete list;
1553 *listp = NULL;
1556 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */
1558 static attrs
1559 attrs_list_member (attrs list, decl_or_value dv, HOST_WIDE_INT offset)
1561 for (; list; list = list->next)
1562 if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset)
1563 return list;
1564 return NULL;
1567 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
1569 static void
1570 attrs_list_insert (attrs *listp, decl_or_value dv,
1571 HOST_WIDE_INT offset, rtx loc)
1573 attrs list = new attrs_def;
1574 list->loc = loc;
1575 list->dv = dv;
1576 list->offset = offset;
1577 list->next = *listp;
1578 *listp = list;
1581 /* Copy all nodes from SRC and create a list *DSTP of the copies. */
1583 static void
1584 attrs_list_copy (attrs *dstp, attrs src)
1586 attrs_list_clear (dstp);
1587 for (; src; src = src->next)
1589 attrs n = new attrs_def;
1590 n->loc = src->loc;
1591 n->dv = src->dv;
1592 n->offset = src->offset;
1593 n->next = *dstp;
1594 *dstp = n;
1598 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */
1600 static void
1601 attrs_list_union (attrs *dstp, attrs src)
1603 for (; src; src = src->next)
1605 if (!attrs_list_member (*dstp, src->dv, src->offset))
1606 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1610 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1611 *DSTP. */
1613 static void
1614 attrs_list_mpdv_union (attrs *dstp, attrs src, attrs src2)
1616 gcc_assert (!*dstp);
1617 for (; src; src = src->next)
1619 if (!dv_onepart_p (src->dv))
1620 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1622 for (src = src2; src; src = src->next)
1624 if (!dv_onepart_p (src->dv)
1625 && !attrs_list_member (*dstp, src->dv, src->offset))
1626 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1630 /* Shared hashtable support. */
1632 /* Return true if VARS is shared. */
1634 static inline bool
1635 shared_hash_shared (shared_hash vars)
1637 return vars->refcount > 1;
1640 /* Return the hash table for VARS. */
1642 static inline variable_table_type *
1643 shared_hash_htab (shared_hash vars)
1645 return vars->htab;
1648 /* Return true if VAR is shared, or maybe because VARS is shared. */
1650 static inline bool
1651 shared_var_p (variable var, shared_hash vars)
1653 /* Don't count an entry in the changed_variables table as a duplicate. */
1654 return ((var->refcount > 1 + (int) var->in_changed_variables)
1655 || shared_hash_shared (vars));
1658 /* Copy variables into a new hash table. */
1660 static shared_hash
1661 shared_hash_unshare (shared_hash vars)
1663 shared_hash new_vars = new shared_hash_def;
1664 gcc_assert (vars->refcount > 1);
1665 new_vars->refcount = 1;
1666 new_vars->htab = new variable_table_type (vars->htab->elements () + 3);
1667 vars_copy (new_vars->htab, vars->htab);
1668 vars->refcount--;
1669 return new_vars;
1672 /* Increment reference counter on VARS and return it. */
1674 static inline shared_hash
1675 shared_hash_copy (shared_hash vars)
1677 vars->refcount++;
1678 return vars;
1681 /* Decrement reference counter and destroy hash table if not shared
1682 anymore. */
1684 static void
1685 shared_hash_destroy (shared_hash vars)
1687 gcc_checking_assert (vars->refcount > 0);
1688 if (--vars->refcount == 0)
1690 delete vars->htab;
1691 delete vars;
1695 /* Unshare *PVARS if shared and return slot for DV. If INS is
1696 INSERT, insert it if not already present. */
1698 static inline variable_def **
1699 shared_hash_find_slot_unshare_1 (shared_hash *pvars, decl_or_value dv,
1700 hashval_t dvhash, enum insert_option ins)
1702 if (shared_hash_shared (*pvars))
1703 *pvars = shared_hash_unshare (*pvars);
1704 return shared_hash_htab (*pvars)->find_slot_with_hash (dv, dvhash, ins);
1707 static inline variable_def **
1708 shared_hash_find_slot_unshare (shared_hash *pvars, decl_or_value dv,
1709 enum insert_option ins)
1711 return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins);
1714 /* Return slot for DV, if it is already present in the hash table.
1715 If it is not present, insert it only VARS is not shared, otherwise
1716 return NULL. */
1718 static inline variable_def **
1719 shared_hash_find_slot_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1721 return shared_hash_htab (vars)->find_slot_with_hash (dv, dvhash,
1722 shared_hash_shared (vars)
1723 ? NO_INSERT : INSERT);
1726 static inline variable_def **
1727 shared_hash_find_slot (shared_hash vars, decl_or_value dv)
1729 return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv));
1732 /* Return slot for DV only if it is already present in the hash table. */
1734 static inline variable_def **
1735 shared_hash_find_slot_noinsert_1 (shared_hash vars, decl_or_value dv,
1736 hashval_t dvhash)
1738 return shared_hash_htab (vars)->find_slot_with_hash (dv, dvhash, NO_INSERT);
1741 static inline variable_def **
1742 shared_hash_find_slot_noinsert (shared_hash vars, decl_or_value dv)
1744 return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv));
1747 /* Return variable for DV or NULL if not already present in the hash
1748 table. */
1750 static inline variable
1751 shared_hash_find_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1753 return shared_hash_htab (vars)->find_with_hash (dv, dvhash);
1756 static inline variable
1757 shared_hash_find (shared_hash vars, decl_or_value dv)
1759 return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
1762 /* Return true if TVAL is better than CVAL as a canonival value. We
1763 choose lowest-numbered VALUEs, using the RTX address as a
1764 tie-breaker. The idea is to arrange them into a star topology,
1765 such that all of them are at most one step away from the canonical
1766 value, and the canonical value has backlinks to all of them, in
1767 addition to all the actual locations. We don't enforce this
1768 topology throughout the entire dataflow analysis, though.
1771 static inline bool
1772 canon_value_cmp (rtx tval, rtx cval)
1774 return !cval
1775 || CSELIB_VAL_PTR (tval)->uid < CSELIB_VAL_PTR (cval)->uid;
1778 static bool dst_can_be_shared;
1780 /* Return a copy of a variable VAR and insert it to dataflow set SET. */
1782 static variable_def **
1783 unshare_variable (dataflow_set *set, variable_def **slot, variable var,
1784 enum var_init_status initialized)
1786 variable new_var;
1787 int i;
1789 new_var = onepart_pool (var->onepart).allocate ();
1790 new_var->dv = var->dv;
1791 new_var->refcount = 1;
1792 var->refcount--;
1793 new_var->n_var_parts = var->n_var_parts;
1794 new_var->onepart = var->onepart;
1795 new_var->in_changed_variables = false;
1797 if (! flag_var_tracking_uninit)
1798 initialized = VAR_INIT_STATUS_INITIALIZED;
1800 for (i = 0; i < var->n_var_parts; i++)
1802 location_chain node;
1803 location_chain *nextp;
1805 if (i == 0 && var->onepart)
1807 /* One-part auxiliary data is only used while emitting
1808 notes, so propagate it to the new variable in the active
1809 dataflow set. If we're not emitting notes, this will be
1810 a no-op. */
1811 gcc_checking_assert (!VAR_LOC_1PAUX (var) || emit_notes);
1812 VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (var);
1813 VAR_LOC_1PAUX (var) = NULL;
1815 else
1816 VAR_PART_OFFSET (new_var, i) = VAR_PART_OFFSET (var, i);
1817 nextp = &new_var->var_part[i].loc_chain;
1818 for (node = var->var_part[i].loc_chain; node; node = node->next)
1820 location_chain new_lc;
1822 new_lc = new location_chain_def;
1823 new_lc->next = NULL;
1824 if (node->init > initialized)
1825 new_lc->init = node->init;
1826 else
1827 new_lc->init = initialized;
1828 if (node->set_src && !(MEM_P (node->set_src)))
1829 new_lc->set_src = node->set_src;
1830 else
1831 new_lc->set_src = NULL;
1832 new_lc->loc = node->loc;
1834 *nextp = new_lc;
1835 nextp = &new_lc->next;
1838 new_var->var_part[i].cur_loc = var->var_part[i].cur_loc;
1841 dst_can_be_shared = false;
1842 if (shared_hash_shared (set->vars))
1843 slot = shared_hash_find_slot_unshare (&set->vars, var->dv, NO_INSERT);
1844 else if (set->traversed_vars && set->vars != set->traversed_vars)
1845 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
1846 *slot = new_var;
1847 if (var->in_changed_variables)
1849 variable_def **cslot
1850 = changed_variables->find_slot_with_hash (var->dv,
1851 dv_htab_hash (var->dv),
1852 NO_INSERT);
1853 gcc_assert (*cslot == (void *) var);
1854 var->in_changed_variables = false;
1855 variable_htab_free (var);
1856 *cslot = new_var;
1857 new_var->in_changed_variables = true;
1859 return slot;
1862 /* Copy all variables from hash table SRC to hash table DST. */
1864 static void
1865 vars_copy (variable_table_type *dst, variable_table_type *src)
1867 variable_iterator_type hi;
1868 variable var;
1870 FOR_EACH_HASH_TABLE_ELEMENT (*src, var, variable, hi)
1872 variable_def **dstp;
1873 var->refcount++;
1874 dstp = dst->find_slot_with_hash (var->dv, dv_htab_hash (var->dv),
1875 INSERT);
1876 *dstp = var;
1880 /* Map a decl to its main debug decl. */
1882 static inline tree
1883 var_debug_decl (tree decl)
1885 if (decl && TREE_CODE (decl) == VAR_DECL
1886 && DECL_HAS_DEBUG_EXPR_P (decl))
1888 tree debugdecl = DECL_DEBUG_EXPR (decl);
1889 if (DECL_P (debugdecl))
1890 decl = debugdecl;
1893 return decl;
1896 /* Set the register LOC to contain DV, OFFSET. */
1898 static void
1899 var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1900 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1901 enum insert_option iopt)
1903 attrs node;
1904 bool decl_p = dv_is_decl_p (dv);
1906 if (decl_p)
1907 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1909 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1910 if (dv_as_opaque (node->dv) == dv_as_opaque (dv)
1911 && node->offset == offset)
1912 break;
1913 if (!node)
1914 attrs_list_insert (&set->regs[REGNO (loc)], dv, offset, loc);
1915 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1918 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
1920 static void
1921 var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1922 rtx set_src)
1924 tree decl = REG_EXPR (loc);
1925 HOST_WIDE_INT offset = REG_OFFSET (loc);
1927 var_reg_decl_set (set, loc, initialized,
1928 dv_from_decl (decl), offset, set_src, INSERT);
1931 static enum var_init_status
1932 get_init_value (dataflow_set *set, rtx loc, decl_or_value dv)
1934 variable var;
1935 int i;
1936 enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN;
1938 if (! flag_var_tracking_uninit)
1939 return VAR_INIT_STATUS_INITIALIZED;
1941 var = shared_hash_find (set->vars, dv);
1942 if (var)
1944 for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++)
1946 location_chain nextp;
1947 for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next)
1948 if (rtx_equal_p (nextp->loc, loc))
1950 ret_val = nextp->init;
1951 break;
1956 return ret_val;
1959 /* Delete current content of register LOC in dataflow set SET and set
1960 the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
1961 MODIFY is true, any other live copies of the same variable part are
1962 also deleted from the dataflow set, otherwise the variable part is
1963 assumed to be copied from another location holding the same
1964 part. */
1966 static void
1967 var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1968 enum var_init_status initialized, rtx set_src)
1970 tree decl = REG_EXPR (loc);
1971 HOST_WIDE_INT offset = REG_OFFSET (loc);
1972 attrs node, next;
1973 attrs *nextp;
1975 decl = var_debug_decl (decl);
1977 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1978 initialized = get_init_value (set, loc, dv_from_decl (decl));
1980 nextp = &set->regs[REGNO (loc)];
1981 for (node = *nextp; node; node = next)
1983 next = node->next;
1984 if (dv_as_opaque (node->dv) != decl || node->offset != offset)
1986 delete_variable_part (set, node->loc, node->dv, node->offset);
1987 delete node;
1988 *nextp = next;
1990 else
1992 node->loc = loc;
1993 nextp = &node->next;
1996 if (modify)
1997 clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src);
1998 var_reg_set (set, loc, initialized, set_src);
2001 /* Delete the association of register LOC in dataflow set SET with any
2002 variables that aren't onepart. If CLOBBER is true, also delete any
2003 other live copies of the same variable part, and delete the
2004 association with onepart dvs too. */
2006 static void
2007 var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
2009 attrs *nextp = &set->regs[REGNO (loc)];
2010 attrs node, next;
2012 if (clobber)
2014 tree decl = REG_EXPR (loc);
2015 HOST_WIDE_INT offset = REG_OFFSET (loc);
2017 decl = var_debug_decl (decl);
2019 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
2022 for (node = *nextp; node; node = next)
2024 next = node->next;
2025 if (clobber || !dv_onepart_p (node->dv))
2027 delete_variable_part (set, node->loc, node->dv, node->offset);
2028 delete node;
2029 *nextp = next;
2031 else
2032 nextp = &node->next;
2036 /* Delete content of register with number REGNO in dataflow set SET. */
2038 static void
2039 var_regno_delete (dataflow_set *set, int regno)
2041 attrs *reg = &set->regs[regno];
2042 attrs node, next;
2044 for (node = *reg; node; node = next)
2046 next = node->next;
2047 delete_variable_part (set, node->loc, node->dv, node->offset);
2048 delete node;
2050 *reg = NULL;
2053 /* Return true if I is the negated value of a power of two. */
2054 static bool
2055 negative_power_of_two_p (HOST_WIDE_INT i)
2057 unsigned HOST_WIDE_INT x = -(unsigned HOST_WIDE_INT)i;
2058 return x == (x & -x);
2061 /* Strip constant offsets and alignments off of LOC. Return the base
2062 expression. */
2064 static rtx
2065 vt_get_canonicalize_base (rtx loc)
2067 while ((GET_CODE (loc) == PLUS
2068 || GET_CODE (loc) == AND)
2069 && GET_CODE (XEXP (loc, 1)) == CONST_INT
2070 && (GET_CODE (loc) != AND
2071 || negative_power_of_two_p (INTVAL (XEXP (loc, 1)))))
2072 loc = XEXP (loc, 0);
2074 return loc;
2077 /* This caches canonicalized addresses for VALUEs, computed using
2078 information in the global cselib table. */
2079 static hash_map<rtx, rtx> *global_get_addr_cache;
2081 /* This caches canonicalized addresses for VALUEs, computed using
2082 information from the global cache and information pertaining to a
2083 basic block being analyzed. */
2084 static hash_map<rtx, rtx> *local_get_addr_cache;
2086 static rtx vt_canonicalize_addr (dataflow_set *, rtx);
2088 /* Return the canonical address for LOC, that must be a VALUE, using a
2089 cached global equivalence or computing it and storing it in the
2090 global cache. */
2092 static rtx
2093 get_addr_from_global_cache (rtx const loc)
2095 rtx x;
2097 gcc_checking_assert (GET_CODE (loc) == VALUE);
2099 bool existed;
2100 rtx *slot = &global_get_addr_cache->get_or_insert (loc, &existed);
2101 if (existed)
2102 return *slot;
2104 x = canon_rtx (get_addr (loc));
2106 /* Tentative, avoiding infinite recursion. */
2107 *slot = x;
2109 if (x != loc)
2111 rtx nx = vt_canonicalize_addr (NULL, x);
2112 if (nx != x)
2114 /* The table may have moved during recursion, recompute
2115 SLOT. */
2116 *global_get_addr_cache->get (loc) = x = nx;
2120 return x;
2123 /* Return the canonical address for LOC, that must be a VALUE, using a
2124 cached local equivalence or computing it and storing it in the
2125 local cache. */
2127 static rtx
2128 get_addr_from_local_cache (dataflow_set *set, rtx const loc)
2130 rtx x;
2131 decl_or_value dv;
2132 variable var;
2133 location_chain l;
2135 gcc_checking_assert (GET_CODE (loc) == VALUE);
2137 bool existed;
2138 rtx *slot = &local_get_addr_cache->get_or_insert (loc, &existed);
2139 if (existed)
2140 return *slot;
2142 x = get_addr_from_global_cache (loc);
2144 /* Tentative, avoiding infinite recursion. */
2145 *slot = x;
2147 /* Recurse to cache local expansion of X, or if we need to search
2148 for a VALUE in the expansion. */
2149 if (x != loc)
2151 rtx nx = vt_canonicalize_addr (set, x);
2152 if (nx != x)
2154 slot = local_get_addr_cache->get (loc);
2155 *slot = x = nx;
2157 return x;
2160 dv = dv_from_rtx (x);
2161 var = shared_hash_find (set->vars, dv);
2162 if (!var)
2163 return x;
2165 /* Look for an improved equivalent expression. */
2166 for (l = var->var_part[0].loc_chain; l; l = l->next)
2168 rtx base = vt_get_canonicalize_base (l->loc);
2169 if (GET_CODE (base) == VALUE
2170 && canon_value_cmp (base, loc))
2172 rtx nx = vt_canonicalize_addr (set, l->loc);
2173 if (x != nx)
2175 slot = local_get_addr_cache->get (loc);
2176 *slot = x = nx;
2178 break;
2182 return x;
2185 /* Canonicalize LOC using equivalences from SET in addition to those
2186 in the cselib static table. It expects a VALUE-based expression,
2187 and it will only substitute VALUEs with other VALUEs or
2188 function-global equivalences, so that, if two addresses have base
2189 VALUEs that are locally or globally related in ways that
2190 memrefs_conflict_p cares about, they will both canonicalize to
2191 expressions that have the same base VALUE.
2193 The use of VALUEs as canonical base addresses enables the canonical
2194 RTXs to remain unchanged globally, if they resolve to a constant,
2195 or throughout a basic block otherwise, so that they can be cached
2196 and the cache needs not be invalidated when REGs, MEMs or such
2197 change. */
2199 static rtx
2200 vt_canonicalize_addr (dataflow_set *set, rtx oloc)
2202 HOST_WIDE_INT ofst = 0;
2203 machine_mode mode = GET_MODE (oloc);
2204 rtx loc = oloc;
2205 rtx x;
2206 bool retry = true;
2208 while (retry)
2210 while (GET_CODE (loc) == PLUS
2211 && GET_CODE (XEXP (loc, 1)) == CONST_INT)
2213 ofst += INTVAL (XEXP (loc, 1));
2214 loc = XEXP (loc, 0);
2217 /* Alignment operations can't normally be combined, so just
2218 canonicalize the base and we're done. We'll normally have
2219 only one stack alignment anyway. */
2220 if (GET_CODE (loc) == AND
2221 && GET_CODE (XEXP (loc, 1)) == CONST_INT
2222 && negative_power_of_two_p (INTVAL (XEXP (loc, 1))))
2224 x = vt_canonicalize_addr (set, XEXP (loc, 0));
2225 if (x != XEXP (loc, 0))
2226 loc = gen_rtx_AND (mode, x, XEXP (loc, 1));
2227 retry = false;
2230 if (GET_CODE (loc) == VALUE)
2232 if (set)
2233 loc = get_addr_from_local_cache (set, loc);
2234 else
2235 loc = get_addr_from_global_cache (loc);
2237 /* Consolidate plus_constants. */
2238 while (ofst && GET_CODE (loc) == PLUS
2239 && GET_CODE (XEXP (loc, 1)) == CONST_INT)
2241 ofst += INTVAL (XEXP (loc, 1));
2242 loc = XEXP (loc, 0);
2245 retry = false;
2247 else
2249 x = canon_rtx (loc);
2250 if (retry)
2251 retry = (x != loc);
2252 loc = x;
2256 /* Add OFST back in. */
2257 if (ofst)
2259 /* Don't build new RTL if we can help it. */
2260 if (GET_CODE (oloc) == PLUS
2261 && XEXP (oloc, 0) == loc
2262 && INTVAL (XEXP (oloc, 1)) == ofst)
2263 return oloc;
2265 loc = plus_constant (mode, loc, ofst);
2268 return loc;
2271 /* Return true iff there's a true dependence between MLOC and LOC.
2272 MADDR must be a canonicalized version of MLOC's address. */
2274 static inline bool
2275 vt_canon_true_dep (dataflow_set *set, rtx mloc, rtx maddr, rtx loc)
2277 if (GET_CODE (loc) != MEM)
2278 return false;
2280 rtx addr = vt_canonicalize_addr (set, XEXP (loc, 0));
2281 if (!canon_true_dependence (mloc, GET_MODE (mloc), maddr, loc, addr))
2282 return false;
2284 return true;
2287 /* Hold parameters for the hashtab traversal function
2288 drop_overlapping_mem_locs, see below. */
2290 struct overlapping_mems
2292 dataflow_set *set;
2293 rtx loc, addr;
2296 /* Remove all MEMs that overlap with COMS->LOC from the location list
2297 of a hash table entry for a value. COMS->ADDR must be a
2298 canonicalized form of COMS->LOC's address, and COMS->LOC must be
2299 canonicalized itself. */
2302 drop_overlapping_mem_locs (variable_def **slot, overlapping_mems *coms)
2304 dataflow_set *set = coms->set;
2305 rtx mloc = coms->loc, addr = coms->addr;
2306 variable var = *slot;
2308 if (var->onepart == ONEPART_VALUE)
2310 location_chain loc, *locp;
2311 bool changed = false;
2312 rtx cur_loc;
2314 gcc_assert (var->n_var_parts == 1);
2316 if (shared_var_p (var, set->vars))
2318 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
2319 if (vt_canon_true_dep (set, mloc, addr, loc->loc))
2320 break;
2322 if (!loc)
2323 return 1;
2325 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
2326 var = *slot;
2327 gcc_assert (var->n_var_parts == 1);
2330 if (VAR_LOC_1PAUX (var))
2331 cur_loc = VAR_LOC_FROM (var);
2332 else
2333 cur_loc = var->var_part[0].cur_loc;
2335 for (locp = &var->var_part[0].loc_chain, loc = *locp;
2336 loc; loc = *locp)
2338 if (!vt_canon_true_dep (set, mloc, addr, loc->loc))
2340 locp = &loc->next;
2341 continue;
2344 *locp = loc->next;
2345 /* If we have deleted the location which was last emitted
2346 we have to emit new location so add the variable to set
2347 of changed variables. */
2348 if (cur_loc == loc->loc)
2350 changed = true;
2351 var->var_part[0].cur_loc = NULL;
2352 if (VAR_LOC_1PAUX (var))
2353 VAR_LOC_FROM (var) = NULL;
2355 delete loc;
2358 if (!var->var_part[0].loc_chain)
2360 var->n_var_parts--;
2361 changed = true;
2363 if (changed)
2364 variable_was_changed (var, set);
2367 return 1;
2370 /* Remove from SET all VALUE bindings to MEMs that overlap with LOC. */
2372 static void
2373 clobber_overlapping_mems (dataflow_set *set, rtx loc)
2375 struct overlapping_mems coms;
2377 gcc_checking_assert (GET_CODE (loc) == MEM);
2379 coms.set = set;
2380 coms.loc = canon_rtx (loc);
2381 coms.addr = vt_canonicalize_addr (set, XEXP (loc, 0));
2383 set->traversed_vars = set->vars;
2384 shared_hash_htab (set->vars)
2385 ->traverse <overlapping_mems*, drop_overlapping_mem_locs> (&coms);
2386 set->traversed_vars = NULL;
2389 /* Set the location of DV, OFFSET as the MEM LOC. */
2391 static void
2392 var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
2393 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
2394 enum insert_option iopt)
2396 if (dv_is_decl_p (dv))
2397 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
2399 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
2402 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
2403 SET to LOC.
2404 Adjust the address first if it is stack pointer based. */
2406 static void
2407 var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
2408 rtx set_src)
2410 tree decl = MEM_EXPR (loc);
2411 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2413 var_mem_decl_set (set, loc, initialized,
2414 dv_from_decl (decl), offset, set_src, INSERT);
2417 /* Delete and set the location part of variable MEM_EXPR (LOC) in
2418 dataflow set SET to LOC. If MODIFY is true, any other live copies
2419 of the same variable part are also deleted from the dataflow set,
2420 otherwise the variable part is assumed to be copied from another
2421 location holding the same part.
2422 Adjust the address first if it is stack pointer based. */
2424 static void
2425 var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify,
2426 enum var_init_status initialized, rtx set_src)
2428 tree decl = MEM_EXPR (loc);
2429 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2431 clobber_overlapping_mems (set, loc);
2432 decl = var_debug_decl (decl);
2434 if (initialized == VAR_INIT_STATUS_UNKNOWN)
2435 initialized = get_init_value (set, loc, dv_from_decl (decl));
2437 if (modify)
2438 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src);
2439 var_mem_set (set, loc, initialized, set_src);
2442 /* Delete the location part LOC from dataflow set SET. If CLOBBER is
2443 true, also delete any other live copies of the same variable part.
2444 Adjust the address first if it is stack pointer based. */
2446 static void
2447 var_mem_delete (dataflow_set *set, rtx loc, bool clobber)
2449 tree decl = MEM_EXPR (loc);
2450 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2452 clobber_overlapping_mems (set, loc);
2453 decl = var_debug_decl (decl);
2454 if (clobber)
2455 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
2456 delete_variable_part (set, loc, dv_from_decl (decl), offset);
2459 /* Return true if LOC should not be expanded for location expressions,
2460 or used in them. */
2462 static inline bool
2463 unsuitable_loc (rtx loc)
2465 switch (GET_CODE (loc))
2467 case PC:
2468 case SCRATCH:
2469 case CC0:
2470 case ASM_INPUT:
2471 case ASM_OPERANDS:
2472 return true;
2474 default:
2475 return false;
2479 /* Bind VAL to LOC in SET. If MODIFIED, detach LOC from any values
2480 bound to it. */
2482 static inline void
2483 val_bind (dataflow_set *set, rtx val, rtx loc, bool modified)
2485 if (REG_P (loc))
2487 if (modified)
2488 var_regno_delete (set, REGNO (loc));
2489 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2490 dv_from_value (val), 0, NULL_RTX, INSERT);
2492 else if (MEM_P (loc))
2494 struct elt_loc_list *l = CSELIB_VAL_PTR (val)->locs;
2496 if (modified)
2497 clobber_overlapping_mems (set, loc);
2499 if (l && GET_CODE (l->loc) == VALUE)
2500 l = canonical_cselib_val (CSELIB_VAL_PTR (l->loc))->locs;
2502 /* If this MEM is a global constant, we don't need it in the
2503 dynamic tables. ??? We should test this before emitting the
2504 micro-op in the first place. */
2505 while (l)
2506 if (GET_CODE (l->loc) == MEM && XEXP (l->loc, 0) == XEXP (loc, 0))
2507 break;
2508 else
2509 l = l->next;
2511 if (!l)
2512 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2513 dv_from_value (val), 0, NULL_RTX, INSERT);
2515 else
2517 /* Other kinds of equivalences are necessarily static, at least
2518 so long as we do not perform substitutions while merging
2519 expressions. */
2520 gcc_unreachable ();
2521 set_variable_part (set, loc, dv_from_value (val), 0,
2522 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2526 /* Bind a value to a location it was just stored in. If MODIFIED
2527 holds, assume the location was modified, detaching it from any
2528 values bound to it. */
2530 static void
2531 val_store (dataflow_set *set, rtx val, rtx loc, rtx_insn *insn,
2532 bool modified)
2534 cselib_val *v = CSELIB_VAL_PTR (val);
2536 gcc_assert (cselib_preserved_value_p (v));
2538 if (dump_file)
2540 fprintf (dump_file, "%i: ", insn ? INSN_UID (insn) : 0);
2541 print_inline_rtx (dump_file, loc, 0);
2542 fprintf (dump_file, " evaluates to ");
2543 print_inline_rtx (dump_file, val, 0);
2544 if (v->locs)
2546 struct elt_loc_list *l;
2547 for (l = v->locs; l; l = l->next)
2549 fprintf (dump_file, "\n%i: ", INSN_UID (l->setting_insn));
2550 print_inline_rtx (dump_file, l->loc, 0);
2553 fprintf (dump_file, "\n");
2556 gcc_checking_assert (!unsuitable_loc (loc));
2558 val_bind (set, val, loc, modified);
2561 /* Clear (canonical address) slots that reference X. */
2563 bool
2564 local_get_addr_clear_given_value (rtx const &, rtx *slot, rtx x)
2566 if (vt_get_canonicalize_base (*slot) == x)
2567 *slot = NULL;
2568 return true;
2571 /* Reset this node, detaching all its equivalences. Return the slot
2572 in the variable hash table that holds dv, if there is one. */
2574 static void
2575 val_reset (dataflow_set *set, decl_or_value dv)
2577 variable var = shared_hash_find (set->vars, dv) ;
2578 location_chain node;
2579 rtx cval;
2581 if (!var || !var->n_var_parts)
2582 return;
2584 gcc_assert (var->n_var_parts == 1);
2586 if (var->onepart == ONEPART_VALUE)
2588 rtx x = dv_as_value (dv);
2590 /* Relationships in the global cache don't change, so reset the
2591 local cache entry only. */
2592 rtx *slot = local_get_addr_cache->get (x);
2593 if (slot)
2595 /* If the value resolved back to itself, odds are that other
2596 values may have cached it too. These entries now refer
2597 to the old X, so detach them too. Entries that used the
2598 old X but resolved to something else remain ok as long as
2599 that something else isn't also reset. */
2600 if (*slot == x)
2601 local_get_addr_cache
2602 ->traverse<rtx, local_get_addr_clear_given_value> (x);
2603 *slot = NULL;
2607 cval = NULL;
2608 for (node = var->var_part[0].loc_chain; node; node = node->next)
2609 if (GET_CODE (node->loc) == VALUE
2610 && canon_value_cmp (node->loc, cval))
2611 cval = node->loc;
2613 for (node = var->var_part[0].loc_chain; node; node = node->next)
2614 if (GET_CODE (node->loc) == VALUE && cval != node->loc)
2616 /* Redirect the equivalence link to the new canonical
2617 value, or simply remove it if it would point at
2618 itself. */
2619 if (cval)
2620 set_variable_part (set, cval, dv_from_value (node->loc),
2621 0, node->init, node->set_src, NO_INSERT);
2622 delete_variable_part (set, dv_as_value (dv),
2623 dv_from_value (node->loc), 0);
2626 if (cval)
2628 decl_or_value cdv = dv_from_value (cval);
2630 /* Keep the remaining values connected, accummulating links
2631 in the canonical value. */
2632 for (node = var->var_part[0].loc_chain; node; node = node->next)
2634 if (node->loc == cval)
2635 continue;
2636 else if (GET_CODE (node->loc) == REG)
2637 var_reg_decl_set (set, node->loc, node->init, cdv, 0,
2638 node->set_src, NO_INSERT);
2639 else if (GET_CODE (node->loc) == MEM)
2640 var_mem_decl_set (set, node->loc, node->init, cdv, 0,
2641 node->set_src, NO_INSERT);
2642 else
2643 set_variable_part (set, node->loc, cdv, 0,
2644 node->init, node->set_src, NO_INSERT);
2648 /* We remove this last, to make sure that the canonical value is not
2649 removed to the point of requiring reinsertion. */
2650 if (cval)
2651 delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0);
2653 clobber_variable_part (set, NULL, dv, 0, NULL);
2656 /* Find the values in a given location and map the val to another
2657 value, if it is unique, or add the location as one holding the
2658 value. */
2660 static void
2661 val_resolve (dataflow_set *set, rtx val, rtx loc, rtx_insn *insn)
2663 decl_or_value dv = dv_from_value (val);
2665 if (dump_file && (dump_flags & TDF_DETAILS))
2667 if (insn)
2668 fprintf (dump_file, "%i: ", INSN_UID (insn));
2669 else
2670 fprintf (dump_file, "head: ");
2671 print_inline_rtx (dump_file, val, 0);
2672 fputs (" is at ", dump_file);
2673 print_inline_rtx (dump_file, loc, 0);
2674 fputc ('\n', dump_file);
2677 val_reset (set, dv);
2679 gcc_checking_assert (!unsuitable_loc (loc));
2681 if (REG_P (loc))
2683 attrs node, found = NULL;
2685 for (node = set->regs[REGNO (loc)]; node; node = node->next)
2686 if (dv_is_value_p (node->dv)
2687 && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc))
2689 found = node;
2691 /* Map incoming equivalences. ??? Wouldn't it be nice if
2692 we just started sharing the location lists? Maybe a
2693 circular list ending at the value itself or some
2694 such. */
2695 set_variable_part (set, dv_as_value (node->dv),
2696 dv_from_value (val), node->offset,
2697 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2698 set_variable_part (set, val, node->dv, node->offset,
2699 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2702 /* If we didn't find any equivalence, we need to remember that
2703 this value is held in the named register. */
2704 if (found)
2705 return;
2707 /* ??? Attempt to find and merge equivalent MEMs or other
2708 expressions too. */
2710 val_bind (set, val, loc, false);
2713 /* Initialize dataflow set SET to be empty.
2714 VARS_SIZE is the initial size of hash table VARS. */
2716 static void
2717 dataflow_set_init (dataflow_set *set)
2719 init_attrs_list_set (set->regs);
2720 set->vars = shared_hash_copy (empty_shared_hash);
2721 set->stack_adjust = 0;
2722 set->traversed_vars = NULL;
2725 /* Delete the contents of dataflow set SET. */
2727 static void
2728 dataflow_set_clear (dataflow_set *set)
2730 int i;
2732 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2733 attrs_list_clear (&set->regs[i]);
2735 shared_hash_destroy (set->vars);
2736 set->vars = shared_hash_copy (empty_shared_hash);
2739 /* Copy the contents of dataflow set SRC to DST. */
2741 static void
2742 dataflow_set_copy (dataflow_set *dst, dataflow_set *src)
2744 int i;
2746 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2747 attrs_list_copy (&dst->regs[i], src->regs[i]);
2749 shared_hash_destroy (dst->vars);
2750 dst->vars = shared_hash_copy (src->vars);
2751 dst->stack_adjust = src->stack_adjust;
2754 /* Information for merging lists of locations for a given offset of variable.
2756 struct variable_union_info
2758 /* Node of the location chain. */
2759 location_chain lc;
2761 /* The sum of positions in the input chains. */
2762 int pos;
2764 /* The position in the chain of DST dataflow set. */
2765 int pos_dst;
2768 /* Buffer for location list sorting and its allocated size. */
2769 static struct variable_union_info *vui_vec;
2770 static int vui_allocated;
2772 /* Compare function for qsort, order the structures by POS element. */
2774 static int
2775 variable_union_info_cmp_pos (const void *n1, const void *n2)
2777 const struct variable_union_info *const i1 =
2778 (const struct variable_union_info *) n1;
2779 const struct variable_union_info *const i2 =
2780 ( const struct variable_union_info *) n2;
2782 if (i1->pos != i2->pos)
2783 return i1->pos - i2->pos;
2785 return (i1->pos_dst - i2->pos_dst);
2788 /* Compute union of location parts of variable *SLOT and the same variable
2789 from hash table DATA. Compute "sorted" union of the location chains
2790 for common offsets, i.e. the locations of a variable part are sorted by
2791 a priority where the priority is the sum of the positions in the 2 chains
2792 (if a location is only in one list the position in the second list is
2793 defined to be larger than the length of the chains).
2794 When we are updating the location parts the newest location is in the
2795 beginning of the chain, so when we do the described "sorted" union
2796 we keep the newest locations in the beginning. */
2798 static int
2799 variable_union (variable src, dataflow_set *set)
2801 variable dst;
2802 variable_def **dstp;
2803 int i, j, k;
2805 dstp = shared_hash_find_slot (set->vars, src->dv);
2806 if (!dstp || !*dstp)
2808 src->refcount++;
2810 dst_can_be_shared = false;
2811 if (!dstp)
2812 dstp = shared_hash_find_slot_unshare (&set->vars, src->dv, INSERT);
2814 *dstp = src;
2816 /* Continue traversing the hash table. */
2817 return 1;
2819 else
2820 dst = *dstp;
2822 gcc_assert (src->n_var_parts);
2823 gcc_checking_assert (src->onepart == dst->onepart);
2825 /* We can combine one-part variables very efficiently, because their
2826 entries are in canonical order. */
2827 if (src->onepart)
2829 location_chain *nodep, dnode, snode;
2831 gcc_assert (src->n_var_parts == 1
2832 && dst->n_var_parts == 1);
2834 snode = src->var_part[0].loc_chain;
2835 gcc_assert (snode);
2837 restart_onepart_unshared:
2838 nodep = &dst->var_part[0].loc_chain;
2839 dnode = *nodep;
2840 gcc_assert (dnode);
2842 while (snode)
2844 int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1;
2846 if (r > 0)
2848 location_chain nnode;
2850 if (shared_var_p (dst, set->vars))
2852 dstp = unshare_variable (set, dstp, dst,
2853 VAR_INIT_STATUS_INITIALIZED);
2854 dst = *dstp;
2855 goto restart_onepart_unshared;
2858 *nodep = nnode = new location_chain_def;
2859 nnode->loc = snode->loc;
2860 nnode->init = snode->init;
2861 if (!snode->set_src || MEM_P (snode->set_src))
2862 nnode->set_src = NULL;
2863 else
2864 nnode->set_src = snode->set_src;
2865 nnode->next = dnode;
2866 dnode = nnode;
2868 else if (r == 0)
2869 gcc_checking_assert (rtx_equal_p (dnode->loc, snode->loc));
2871 if (r >= 0)
2872 snode = snode->next;
2874 nodep = &dnode->next;
2875 dnode = *nodep;
2878 return 1;
2881 gcc_checking_assert (!src->onepart);
2883 /* Count the number of location parts, result is K. */
2884 for (i = 0, j = 0, k = 0;
2885 i < src->n_var_parts && j < dst->n_var_parts; k++)
2887 if (VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2889 i++;
2890 j++;
2892 else if (VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
2893 i++;
2894 else
2895 j++;
2897 k += src->n_var_parts - i;
2898 k += dst->n_var_parts - j;
2900 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
2901 thus there are at most MAX_VAR_PARTS different offsets. */
2902 gcc_checking_assert (dst->onepart ? k == 1 : k <= MAX_VAR_PARTS);
2904 if (dst->n_var_parts != k && shared_var_p (dst, set->vars))
2906 dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN);
2907 dst = *dstp;
2910 i = src->n_var_parts - 1;
2911 j = dst->n_var_parts - 1;
2912 dst->n_var_parts = k;
2914 for (k--; k >= 0; k--)
2916 location_chain node, node2;
2918 if (i >= 0 && j >= 0
2919 && VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2921 /* Compute the "sorted" union of the chains, i.e. the locations which
2922 are in both chains go first, they are sorted by the sum of
2923 positions in the chains. */
2924 int dst_l, src_l;
2925 int ii, jj, n;
2926 struct variable_union_info *vui;
2928 /* If DST is shared compare the location chains.
2929 If they are different we will modify the chain in DST with
2930 high probability so make a copy of DST. */
2931 if (shared_var_p (dst, set->vars))
2933 for (node = src->var_part[i].loc_chain,
2934 node2 = dst->var_part[j].loc_chain; node && node2;
2935 node = node->next, node2 = node2->next)
2937 if (!((REG_P (node2->loc)
2938 && REG_P (node->loc)
2939 && REGNO (node2->loc) == REGNO (node->loc))
2940 || rtx_equal_p (node2->loc, node->loc)))
2942 if (node2->init < node->init)
2943 node2->init = node->init;
2944 break;
2947 if (node || node2)
2949 dstp = unshare_variable (set, dstp, dst,
2950 VAR_INIT_STATUS_UNKNOWN);
2951 dst = (variable)*dstp;
2955 src_l = 0;
2956 for (node = src->var_part[i].loc_chain; node; node = node->next)
2957 src_l++;
2958 dst_l = 0;
2959 for (node = dst->var_part[j].loc_chain; node; node = node->next)
2960 dst_l++;
2962 if (dst_l == 1)
2964 /* The most common case, much simpler, no qsort is needed. */
2965 location_chain dstnode = dst->var_part[j].loc_chain;
2966 dst->var_part[k].loc_chain = dstnode;
2967 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
2968 node2 = dstnode;
2969 for (node = src->var_part[i].loc_chain; node; node = node->next)
2970 if (!((REG_P (dstnode->loc)
2971 && REG_P (node->loc)
2972 && REGNO (dstnode->loc) == REGNO (node->loc))
2973 || rtx_equal_p (dstnode->loc, node->loc)))
2975 location_chain new_node;
2977 /* Copy the location from SRC. */
2978 new_node = new location_chain_def;
2979 new_node->loc = node->loc;
2980 new_node->init = node->init;
2981 if (!node->set_src || MEM_P (node->set_src))
2982 new_node->set_src = NULL;
2983 else
2984 new_node->set_src = node->set_src;
2985 node2->next = new_node;
2986 node2 = new_node;
2988 node2->next = NULL;
2990 else
2992 if (src_l + dst_l > vui_allocated)
2994 vui_allocated = MAX (vui_allocated * 2, src_l + dst_l);
2995 vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec,
2996 vui_allocated);
2998 vui = vui_vec;
3000 /* Fill in the locations from DST. */
3001 for (node = dst->var_part[j].loc_chain, jj = 0; node;
3002 node = node->next, jj++)
3004 vui[jj].lc = node;
3005 vui[jj].pos_dst = jj;
3007 /* Pos plus value larger than a sum of 2 valid positions. */
3008 vui[jj].pos = jj + src_l + dst_l;
3011 /* Fill in the locations from SRC. */
3012 n = dst_l;
3013 for (node = src->var_part[i].loc_chain, ii = 0; node;
3014 node = node->next, ii++)
3016 /* Find location from NODE. */
3017 for (jj = 0; jj < dst_l; jj++)
3019 if ((REG_P (vui[jj].lc->loc)
3020 && REG_P (node->loc)
3021 && REGNO (vui[jj].lc->loc) == REGNO (node->loc))
3022 || rtx_equal_p (vui[jj].lc->loc, node->loc))
3024 vui[jj].pos = jj + ii;
3025 break;
3028 if (jj >= dst_l) /* The location has not been found. */
3030 location_chain new_node;
3032 /* Copy the location from SRC. */
3033 new_node = new location_chain_def;
3034 new_node->loc = node->loc;
3035 new_node->init = node->init;
3036 if (!node->set_src || MEM_P (node->set_src))
3037 new_node->set_src = NULL;
3038 else
3039 new_node->set_src = node->set_src;
3040 vui[n].lc = new_node;
3041 vui[n].pos_dst = src_l + dst_l;
3042 vui[n].pos = ii + src_l + dst_l;
3043 n++;
3047 if (dst_l == 2)
3049 /* Special case still very common case. For dst_l == 2
3050 all entries dst_l ... n-1 are sorted, with for i >= dst_l
3051 vui[i].pos == i + src_l + dst_l. */
3052 if (vui[0].pos > vui[1].pos)
3054 /* Order should be 1, 0, 2... */
3055 dst->var_part[k].loc_chain = vui[1].lc;
3056 vui[1].lc->next = vui[0].lc;
3057 if (n >= 3)
3059 vui[0].lc->next = vui[2].lc;
3060 vui[n - 1].lc->next = NULL;
3062 else
3063 vui[0].lc->next = NULL;
3064 ii = 3;
3066 else
3068 dst->var_part[k].loc_chain = vui[0].lc;
3069 if (n >= 3 && vui[2].pos < vui[1].pos)
3071 /* Order should be 0, 2, 1, 3... */
3072 vui[0].lc->next = vui[2].lc;
3073 vui[2].lc->next = vui[1].lc;
3074 if (n >= 4)
3076 vui[1].lc->next = vui[3].lc;
3077 vui[n - 1].lc->next = NULL;
3079 else
3080 vui[1].lc->next = NULL;
3081 ii = 4;
3083 else
3085 /* Order should be 0, 1, 2... */
3086 ii = 1;
3087 vui[n - 1].lc->next = NULL;
3090 for (; ii < n; ii++)
3091 vui[ii - 1].lc->next = vui[ii].lc;
3093 else
3095 qsort (vui, n, sizeof (struct variable_union_info),
3096 variable_union_info_cmp_pos);
3098 /* Reconnect the nodes in sorted order. */
3099 for (ii = 1; ii < n; ii++)
3100 vui[ii - 1].lc->next = vui[ii].lc;
3101 vui[n - 1].lc->next = NULL;
3102 dst->var_part[k].loc_chain = vui[0].lc;
3105 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
3107 i--;
3108 j--;
3110 else if ((i >= 0 && j >= 0
3111 && VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
3112 || i < 0)
3114 dst->var_part[k] = dst->var_part[j];
3115 j--;
3117 else if ((i >= 0 && j >= 0
3118 && VAR_PART_OFFSET (src, i) > VAR_PART_OFFSET (dst, j))
3119 || j < 0)
3121 location_chain *nextp;
3123 /* Copy the chain from SRC. */
3124 nextp = &dst->var_part[k].loc_chain;
3125 for (node = src->var_part[i].loc_chain; node; node = node->next)
3127 location_chain new_lc;
3129 new_lc = new location_chain_def;
3130 new_lc->next = NULL;
3131 new_lc->init = node->init;
3132 if (!node->set_src || MEM_P (node->set_src))
3133 new_lc->set_src = NULL;
3134 else
3135 new_lc->set_src = node->set_src;
3136 new_lc->loc = node->loc;
3138 *nextp = new_lc;
3139 nextp = &new_lc->next;
3142 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (src, i);
3143 i--;
3145 dst->var_part[k].cur_loc = NULL;
3148 if (flag_var_tracking_uninit)
3149 for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++)
3151 location_chain node, node2;
3152 for (node = src->var_part[i].loc_chain; node; node = node->next)
3153 for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next)
3154 if (rtx_equal_p (node->loc, node2->loc))
3156 if (node->init > node2->init)
3157 node2->init = node->init;
3161 /* Continue traversing the hash table. */
3162 return 1;
3165 /* Compute union of dataflow sets SRC and DST and store it to DST. */
3167 static void
3168 dataflow_set_union (dataflow_set *dst, dataflow_set *src)
3170 int i;
3172 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3173 attrs_list_union (&dst->regs[i], src->regs[i]);
3175 if (dst->vars == empty_shared_hash)
3177 shared_hash_destroy (dst->vars);
3178 dst->vars = shared_hash_copy (src->vars);
3180 else
3182 variable_iterator_type hi;
3183 variable var;
3185 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (src->vars),
3186 var, variable, hi)
3187 variable_union (var, dst);
3191 /* Whether the value is currently being expanded. */
3192 #define VALUE_RECURSED_INTO(x) \
3193 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
3195 /* Whether no expansion was found, saving useless lookups.
3196 It must only be set when VALUE_CHANGED is clear. */
3197 #define NO_LOC_P(x) \
3198 (RTL_FLAG_CHECK2 ("NO_LOC_P", (x), VALUE, DEBUG_EXPR)->return_val)
3200 /* Whether cur_loc in the value needs to be (re)computed. */
3201 #define VALUE_CHANGED(x) \
3202 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
3203 /* Whether cur_loc in the decl needs to be (re)computed. */
3204 #define DECL_CHANGED(x) TREE_VISITED (x)
3206 /* Record (if NEWV) that DV needs to have its cur_loc recomputed. For
3207 user DECLs, this means they're in changed_variables. Values and
3208 debug exprs may be left with this flag set if no user variable
3209 requires them to be evaluated. */
3211 static inline void
3212 set_dv_changed (decl_or_value dv, bool newv)
3214 switch (dv_onepart_p (dv))
3216 case ONEPART_VALUE:
3217 if (newv)
3218 NO_LOC_P (dv_as_value (dv)) = false;
3219 VALUE_CHANGED (dv_as_value (dv)) = newv;
3220 break;
3222 case ONEPART_DEXPR:
3223 if (newv)
3224 NO_LOC_P (DECL_RTL_KNOWN_SET (dv_as_decl (dv))) = false;
3225 /* Fall through... */
3227 default:
3228 DECL_CHANGED (dv_as_decl (dv)) = newv;
3229 break;
3233 /* Return true if DV needs to have its cur_loc recomputed. */
3235 static inline bool
3236 dv_changed_p (decl_or_value dv)
3238 return (dv_is_value_p (dv)
3239 ? VALUE_CHANGED (dv_as_value (dv))
3240 : DECL_CHANGED (dv_as_decl (dv)));
3243 /* Return a location list node whose loc is rtx_equal to LOC, in the
3244 location list of a one-part variable or value VAR, or in that of
3245 any values recursively mentioned in the location lists. VARS must
3246 be in star-canonical form. */
3248 static location_chain
3249 find_loc_in_1pdv (rtx loc, variable var, variable_table_type *vars)
3251 location_chain node;
3252 enum rtx_code loc_code;
3254 if (!var)
3255 return NULL;
3257 gcc_checking_assert (var->onepart);
3259 if (!var->n_var_parts)
3260 return NULL;
3262 gcc_checking_assert (loc != dv_as_opaque (var->dv));
3264 loc_code = GET_CODE (loc);
3265 for (node = var->var_part[0].loc_chain; node; node = node->next)
3267 decl_or_value dv;
3268 variable rvar;
3270 if (GET_CODE (node->loc) != loc_code)
3272 if (GET_CODE (node->loc) != VALUE)
3273 continue;
3275 else if (loc == node->loc)
3276 return node;
3277 else if (loc_code != VALUE)
3279 if (rtx_equal_p (loc, node->loc))
3280 return node;
3281 continue;
3284 /* Since we're in star-canonical form, we don't need to visit
3285 non-canonical nodes: one-part variables and non-canonical
3286 values would only point back to the canonical node. */
3287 if (dv_is_value_p (var->dv)
3288 && !canon_value_cmp (node->loc, dv_as_value (var->dv)))
3290 /* Skip all subsequent VALUEs. */
3291 while (node->next && GET_CODE (node->next->loc) == VALUE)
3293 node = node->next;
3294 gcc_checking_assert (!canon_value_cmp (node->loc,
3295 dv_as_value (var->dv)));
3296 if (loc == node->loc)
3297 return node;
3299 continue;
3302 gcc_checking_assert (node == var->var_part[0].loc_chain);
3303 gcc_checking_assert (!node->next);
3305 dv = dv_from_value (node->loc);
3306 rvar = vars->find_with_hash (dv, dv_htab_hash (dv));
3307 return find_loc_in_1pdv (loc, rvar, vars);
3310 /* ??? Gotta look in cselib_val locations too. */
3312 return NULL;
3315 /* Hash table iteration argument passed to variable_merge. */
3316 struct dfset_merge
3318 /* The set in which the merge is to be inserted. */
3319 dataflow_set *dst;
3320 /* The set that we're iterating in. */
3321 dataflow_set *cur;
3322 /* The set that may contain the other dv we are to merge with. */
3323 dataflow_set *src;
3324 /* Number of onepart dvs in src. */
3325 int src_onepart_cnt;
3328 /* Insert LOC in *DNODE, if it's not there yet. The list must be in
3329 loc_cmp order, and it is maintained as such. */
3331 static void
3332 insert_into_intersection (location_chain *nodep, rtx loc,
3333 enum var_init_status status)
3335 location_chain node;
3336 int r;
3338 for (node = *nodep; node; nodep = &node->next, node = *nodep)
3339 if ((r = loc_cmp (node->loc, loc)) == 0)
3341 node->init = MIN (node->init, status);
3342 return;
3344 else if (r > 0)
3345 break;
3347 node = new location_chain_def;
3349 node->loc = loc;
3350 node->set_src = NULL;
3351 node->init = status;
3352 node->next = *nodep;
3353 *nodep = node;
3356 /* Insert in DEST the intersection of the locations present in both
3357 S1NODE and S2VAR, directly or indirectly. S1NODE is from a
3358 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
3359 DSM->dst. */
3361 static void
3362 intersect_loc_chains (rtx val, location_chain *dest, struct dfset_merge *dsm,
3363 location_chain s1node, variable s2var)
3365 dataflow_set *s1set = dsm->cur;
3366 dataflow_set *s2set = dsm->src;
3367 location_chain found;
3369 if (s2var)
3371 location_chain s2node;
3373 gcc_checking_assert (s2var->onepart);
3375 if (s2var->n_var_parts)
3377 s2node = s2var->var_part[0].loc_chain;
3379 for (; s1node && s2node;
3380 s1node = s1node->next, s2node = s2node->next)
3381 if (s1node->loc != s2node->loc)
3382 break;
3383 else if (s1node->loc == val)
3384 continue;
3385 else
3386 insert_into_intersection (dest, s1node->loc,
3387 MIN (s1node->init, s2node->init));
3391 for (; s1node; s1node = s1node->next)
3393 if (s1node->loc == val)
3394 continue;
3396 if ((found = find_loc_in_1pdv (s1node->loc, s2var,
3397 shared_hash_htab (s2set->vars))))
3399 insert_into_intersection (dest, s1node->loc,
3400 MIN (s1node->init, found->init));
3401 continue;
3404 if (GET_CODE (s1node->loc) == VALUE
3405 && !VALUE_RECURSED_INTO (s1node->loc))
3407 decl_or_value dv = dv_from_value (s1node->loc);
3408 variable svar = shared_hash_find (s1set->vars, dv);
3409 if (svar)
3411 if (svar->n_var_parts == 1)
3413 VALUE_RECURSED_INTO (s1node->loc) = true;
3414 intersect_loc_chains (val, dest, dsm,
3415 svar->var_part[0].loc_chain,
3416 s2var);
3417 VALUE_RECURSED_INTO (s1node->loc) = false;
3422 /* ??? gotta look in cselib_val locations too. */
3424 /* ??? if the location is equivalent to any location in src,
3425 searched recursively
3427 add to dst the values needed to represent the equivalence
3429 telling whether locations S is equivalent to another dv's
3430 location list:
3432 for each location D in the list
3434 if S and D satisfy rtx_equal_p, then it is present
3436 else if D is a value, recurse without cycles
3438 else if S and D have the same CODE and MODE
3440 for each operand oS and the corresponding oD
3442 if oS and oD are not equivalent, then S an D are not equivalent
3444 else if they are RTX vectors
3446 if any vector oS element is not equivalent to its respective oD,
3447 then S and D are not equivalent
3455 /* Return -1 if X should be before Y in a location list for a 1-part
3456 variable, 1 if Y should be before X, and 0 if they're equivalent
3457 and should not appear in the list. */
3459 static int
3460 loc_cmp (rtx x, rtx y)
3462 int i, j, r;
3463 RTX_CODE code = GET_CODE (x);
3464 const char *fmt;
3466 if (x == y)
3467 return 0;
3469 if (REG_P (x))
3471 if (!REG_P (y))
3472 return -1;
3473 gcc_assert (GET_MODE (x) == GET_MODE (y));
3474 if (REGNO (x) == REGNO (y))
3475 return 0;
3476 else if (REGNO (x) < REGNO (y))
3477 return -1;
3478 else
3479 return 1;
3482 if (REG_P (y))
3483 return 1;
3485 if (MEM_P (x))
3487 if (!MEM_P (y))
3488 return -1;
3489 gcc_assert (GET_MODE (x) == GET_MODE (y));
3490 return loc_cmp (XEXP (x, 0), XEXP (y, 0));
3493 if (MEM_P (y))
3494 return 1;
3496 if (GET_CODE (x) == VALUE)
3498 if (GET_CODE (y) != VALUE)
3499 return -1;
3500 /* Don't assert the modes are the same, that is true only
3501 when not recursing. (subreg:QI (value:SI 1:1) 0)
3502 and (subreg:QI (value:DI 2:2) 0) can be compared,
3503 even when the modes are different. */
3504 if (canon_value_cmp (x, y))
3505 return -1;
3506 else
3507 return 1;
3510 if (GET_CODE (y) == VALUE)
3511 return 1;
3513 /* Entry value is the least preferable kind of expression. */
3514 if (GET_CODE (x) == ENTRY_VALUE)
3516 if (GET_CODE (y) != ENTRY_VALUE)
3517 return 1;
3518 gcc_assert (GET_MODE (x) == GET_MODE (y));
3519 return loc_cmp (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
3522 if (GET_CODE (y) == ENTRY_VALUE)
3523 return -1;
3525 if (GET_CODE (x) == GET_CODE (y))
3526 /* Compare operands below. */;
3527 else if (GET_CODE (x) < GET_CODE (y))
3528 return -1;
3529 else
3530 return 1;
3532 gcc_assert (GET_MODE (x) == GET_MODE (y));
3534 if (GET_CODE (x) == DEBUG_EXPR)
3536 if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3537 < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)))
3538 return -1;
3539 gcc_checking_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3540 > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)));
3541 return 1;
3544 fmt = GET_RTX_FORMAT (code);
3545 for (i = 0; i < GET_RTX_LENGTH (code); i++)
3546 switch (fmt[i])
3548 case 'w':
3549 if (XWINT (x, i) == XWINT (y, i))
3550 break;
3551 else if (XWINT (x, i) < XWINT (y, i))
3552 return -1;
3553 else
3554 return 1;
3556 case 'n':
3557 case 'i':
3558 if (XINT (x, i) == XINT (y, i))
3559 break;
3560 else if (XINT (x, i) < XINT (y, i))
3561 return -1;
3562 else
3563 return 1;
3565 case 'V':
3566 case 'E':
3567 /* Compare the vector length first. */
3568 if (XVECLEN (x, i) == XVECLEN (y, i))
3569 /* Compare the vectors elements. */;
3570 else if (XVECLEN (x, i) < XVECLEN (y, i))
3571 return -1;
3572 else
3573 return 1;
3575 for (j = 0; j < XVECLEN (x, i); j++)
3576 if ((r = loc_cmp (XVECEXP (x, i, j),
3577 XVECEXP (y, i, j))))
3578 return r;
3579 break;
3581 case 'e':
3582 if ((r = loc_cmp (XEXP (x, i), XEXP (y, i))))
3583 return r;
3584 break;
3586 case 'S':
3587 case 's':
3588 if (XSTR (x, i) == XSTR (y, i))
3589 break;
3590 if (!XSTR (x, i))
3591 return -1;
3592 if (!XSTR (y, i))
3593 return 1;
3594 if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0)
3595 break;
3596 else if (r < 0)
3597 return -1;
3598 else
3599 return 1;
3601 case 'u':
3602 /* These are just backpointers, so they don't matter. */
3603 break;
3605 case '0':
3606 case 't':
3607 break;
3609 /* It is believed that rtx's at this level will never
3610 contain anything but integers and other rtx's,
3611 except for within LABEL_REFs and SYMBOL_REFs. */
3612 default:
3613 gcc_unreachable ();
3615 if (CONST_WIDE_INT_P (x))
3617 /* Compare the vector length first. */
3618 if (CONST_WIDE_INT_NUNITS (x) >= CONST_WIDE_INT_NUNITS (y))
3619 return 1;
3620 else if (CONST_WIDE_INT_NUNITS (x) < CONST_WIDE_INT_NUNITS (y))
3621 return -1;
3623 /* Compare the vectors elements. */;
3624 for (j = CONST_WIDE_INT_NUNITS (x) - 1; j >= 0 ; j--)
3626 if (CONST_WIDE_INT_ELT (x, j) < CONST_WIDE_INT_ELT (y, j))
3627 return -1;
3628 if (CONST_WIDE_INT_ELT (x, j) > CONST_WIDE_INT_ELT (y, j))
3629 return 1;
3633 return 0;
3636 #if ENABLE_CHECKING
3637 /* Check the order of entries in one-part variables. */
3640 canonicalize_loc_order_check (variable_def **slot,
3641 dataflow_set *data ATTRIBUTE_UNUSED)
3643 variable var = *slot;
3644 location_chain node, next;
3646 #ifdef ENABLE_RTL_CHECKING
3647 int i;
3648 for (i = 0; i < var->n_var_parts; i++)
3649 gcc_assert (var->var_part[0].cur_loc == NULL);
3650 gcc_assert (!var->in_changed_variables);
3651 #endif
3653 if (!var->onepart)
3654 return 1;
3656 gcc_assert (var->n_var_parts == 1);
3657 node = var->var_part[0].loc_chain;
3658 gcc_assert (node);
3660 while ((next = node->next))
3662 gcc_assert (loc_cmp (node->loc, next->loc) < 0);
3663 node = next;
3666 return 1;
3668 #endif
3670 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
3671 more likely to be chosen as canonical for an equivalence set.
3672 Ensure less likely values can reach more likely neighbors, making
3673 the connections bidirectional. */
3676 canonicalize_values_mark (variable_def **slot, dataflow_set *set)
3678 variable var = *slot;
3679 decl_or_value dv = var->dv;
3680 rtx val;
3681 location_chain node;
3683 if (!dv_is_value_p (dv))
3684 return 1;
3686 gcc_checking_assert (var->n_var_parts == 1);
3688 val = dv_as_value (dv);
3690 for (node = var->var_part[0].loc_chain; node; node = node->next)
3691 if (GET_CODE (node->loc) == VALUE)
3693 if (canon_value_cmp (node->loc, val))
3694 VALUE_RECURSED_INTO (val) = true;
3695 else
3697 decl_or_value odv = dv_from_value (node->loc);
3698 variable_def **oslot;
3699 oslot = shared_hash_find_slot_noinsert (set->vars, odv);
3701 set_slot_part (set, val, oslot, odv, 0,
3702 node->init, NULL_RTX);
3704 VALUE_RECURSED_INTO (node->loc) = true;
3708 return 1;
3711 /* Remove redundant entries from equivalence lists in onepart
3712 variables, canonicalizing equivalence sets into star shapes. */
3715 canonicalize_values_star (variable_def **slot, dataflow_set *set)
3717 variable var = *slot;
3718 decl_or_value dv = var->dv;
3719 location_chain node;
3720 decl_or_value cdv;
3721 rtx val, cval;
3722 variable_def **cslot;
3723 bool has_value;
3724 bool has_marks;
3726 if (!var->onepart)
3727 return 1;
3729 gcc_checking_assert (var->n_var_parts == 1);
3731 if (dv_is_value_p (dv))
3733 cval = dv_as_value (dv);
3734 if (!VALUE_RECURSED_INTO (cval))
3735 return 1;
3736 VALUE_RECURSED_INTO (cval) = false;
3738 else
3739 cval = NULL_RTX;
3741 restart:
3742 val = cval;
3743 has_value = false;
3744 has_marks = false;
3746 gcc_assert (var->n_var_parts == 1);
3748 for (node = var->var_part[0].loc_chain; node; node = node->next)
3749 if (GET_CODE (node->loc) == VALUE)
3751 has_value = true;
3752 if (VALUE_RECURSED_INTO (node->loc))
3753 has_marks = true;
3754 if (canon_value_cmp (node->loc, cval))
3755 cval = node->loc;
3758 if (!has_value)
3759 return 1;
3761 if (cval == val)
3763 if (!has_marks || dv_is_decl_p (dv))
3764 return 1;
3766 /* Keep it marked so that we revisit it, either after visiting a
3767 child node, or after visiting a new parent that might be
3768 found out. */
3769 VALUE_RECURSED_INTO (val) = true;
3771 for (node = var->var_part[0].loc_chain; node; node = node->next)
3772 if (GET_CODE (node->loc) == VALUE
3773 && VALUE_RECURSED_INTO (node->loc))
3775 cval = node->loc;
3776 restart_with_cval:
3777 VALUE_RECURSED_INTO (cval) = false;
3778 dv = dv_from_value (cval);
3779 slot = shared_hash_find_slot_noinsert (set->vars, dv);
3780 if (!slot)
3782 gcc_assert (dv_is_decl_p (var->dv));
3783 /* The canonical value was reset and dropped.
3784 Remove it. */
3785 clobber_variable_part (set, NULL, var->dv, 0, NULL);
3786 return 1;
3788 var = *slot;
3789 gcc_assert (dv_is_value_p (var->dv));
3790 if (var->n_var_parts == 0)
3791 return 1;
3792 gcc_assert (var->n_var_parts == 1);
3793 goto restart;
3796 VALUE_RECURSED_INTO (val) = false;
3798 return 1;
3801 /* Push values to the canonical one. */
3802 cdv = dv_from_value (cval);
3803 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3805 for (node = var->var_part[0].loc_chain; node; node = node->next)
3806 if (node->loc != cval)
3808 cslot = set_slot_part (set, node->loc, cslot, cdv, 0,
3809 node->init, NULL_RTX);
3810 if (GET_CODE (node->loc) == VALUE)
3812 decl_or_value ndv = dv_from_value (node->loc);
3814 set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX,
3815 NO_INSERT);
3817 if (canon_value_cmp (node->loc, val))
3819 /* If it could have been a local minimum, it's not any more,
3820 since it's now neighbor to cval, so it may have to push
3821 to it. Conversely, if it wouldn't have prevailed over
3822 val, then whatever mark it has is fine: if it was to
3823 push, it will now push to a more canonical node, but if
3824 it wasn't, then it has already pushed any values it might
3825 have to. */
3826 VALUE_RECURSED_INTO (node->loc) = true;
3827 /* Make sure we visit node->loc by ensuring we cval is
3828 visited too. */
3829 VALUE_RECURSED_INTO (cval) = true;
3831 else if (!VALUE_RECURSED_INTO (node->loc))
3832 /* If we have no need to "recurse" into this node, it's
3833 already "canonicalized", so drop the link to the old
3834 parent. */
3835 clobber_variable_part (set, cval, ndv, 0, NULL);
3837 else if (GET_CODE (node->loc) == REG)
3839 attrs list = set->regs[REGNO (node->loc)], *listp;
3841 /* Change an existing attribute referring to dv so that it
3842 refers to cdv, removing any duplicate this might
3843 introduce, and checking that no previous duplicates
3844 existed, all in a single pass. */
3846 while (list)
3848 if (list->offset == 0
3849 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3850 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3851 break;
3853 list = list->next;
3856 gcc_assert (list);
3857 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3859 list->dv = cdv;
3860 for (listp = &list->next; (list = *listp); listp = &list->next)
3862 if (list->offset)
3863 continue;
3865 if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3867 *listp = list->next;
3868 delete list;
3869 list = *listp;
3870 break;
3873 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv));
3876 else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3878 for (listp = &list->next; (list = *listp); listp = &list->next)
3880 if (list->offset)
3881 continue;
3883 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3885 *listp = list->next;
3886 delete list;
3887 list = *listp;
3888 break;
3891 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv));
3894 else
3895 gcc_unreachable ();
3897 #if ENABLE_CHECKING
3898 while (list)
3900 if (list->offset == 0
3901 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3902 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3903 gcc_unreachable ();
3905 list = list->next;
3907 #endif
3911 if (val)
3912 set_slot_part (set, val, cslot, cdv, 0,
3913 VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
3915 slot = clobber_slot_part (set, cval, slot, 0, NULL);
3917 /* Variable may have been unshared. */
3918 var = *slot;
3919 gcc_checking_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval
3920 && var->var_part[0].loc_chain->next == NULL);
3922 if (VALUE_RECURSED_INTO (cval))
3923 goto restart_with_cval;
3925 return 1;
3928 /* Bind one-part variables to the canonical value in an equivalence
3929 set. Not doing this causes dataflow convergence failure in rare
3930 circumstances, see PR42873. Unfortunately we can't do this
3931 efficiently as part of canonicalize_values_star, since we may not
3932 have determined or even seen the canonical value of a set when we
3933 get to a variable that references another member of the set. */
3936 canonicalize_vars_star (variable_def **slot, dataflow_set *set)
3938 variable var = *slot;
3939 decl_or_value dv = var->dv;
3940 location_chain node;
3941 rtx cval;
3942 decl_or_value cdv;
3943 variable_def **cslot;
3944 variable cvar;
3945 location_chain cnode;
3947 if (!var->onepart || var->onepart == ONEPART_VALUE)
3948 return 1;
3950 gcc_assert (var->n_var_parts == 1);
3952 node = var->var_part[0].loc_chain;
3954 if (GET_CODE (node->loc) != VALUE)
3955 return 1;
3957 gcc_assert (!node->next);
3958 cval = node->loc;
3960 /* Push values to the canonical one. */
3961 cdv = dv_from_value (cval);
3962 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3963 if (!cslot)
3964 return 1;
3965 cvar = *cslot;
3966 gcc_assert (cvar->n_var_parts == 1);
3968 cnode = cvar->var_part[0].loc_chain;
3970 /* CVAL is canonical if its value list contains non-VALUEs or VALUEs
3971 that are not “more canonical” than it. */
3972 if (GET_CODE (cnode->loc) != VALUE
3973 || !canon_value_cmp (cnode->loc, cval))
3974 return 1;
3976 /* CVAL was found to be non-canonical. Change the variable to point
3977 to the canonical VALUE. */
3978 gcc_assert (!cnode->next);
3979 cval = cnode->loc;
3981 slot = set_slot_part (set, cval, slot, dv, 0,
3982 node->init, node->set_src);
3983 clobber_slot_part (set, cval, slot, 0, node->set_src);
3985 return 1;
3988 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
3989 corresponding entry in DSM->src. Multi-part variables are combined
3990 with variable_union, whereas onepart dvs are combined with
3991 intersection. */
3993 static int
3994 variable_merge_over_cur (variable s1var, struct dfset_merge *dsm)
3996 dataflow_set *dst = dsm->dst;
3997 variable_def **dstslot;
3998 variable s2var, dvar = NULL;
3999 decl_or_value dv = s1var->dv;
4000 onepart_enum_t onepart = s1var->onepart;
4001 rtx val;
4002 hashval_t dvhash;
4003 location_chain node, *nodep;
4005 /* If the incoming onepart variable has an empty location list, then
4006 the intersection will be just as empty. For other variables,
4007 it's always union. */
4008 gcc_checking_assert (s1var->n_var_parts
4009 && s1var->var_part[0].loc_chain);
4011 if (!onepart)
4012 return variable_union (s1var, dst);
4014 gcc_checking_assert (s1var->n_var_parts == 1);
4016 dvhash = dv_htab_hash (dv);
4017 if (dv_is_value_p (dv))
4018 val = dv_as_value (dv);
4019 else
4020 val = NULL;
4022 s2var = shared_hash_find_1 (dsm->src->vars, dv, dvhash);
4023 if (!s2var)
4025 dst_can_be_shared = false;
4026 return 1;
4029 dsm->src_onepart_cnt--;
4030 gcc_assert (s2var->var_part[0].loc_chain
4031 && s2var->onepart == onepart
4032 && s2var->n_var_parts == 1);
4034 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4035 if (dstslot)
4037 dvar = *dstslot;
4038 gcc_assert (dvar->refcount == 1
4039 && dvar->onepart == onepart
4040 && dvar->n_var_parts == 1);
4041 nodep = &dvar->var_part[0].loc_chain;
4043 else
4045 nodep = &node;
4046 node = NULL;
4049 if (!dstslot && !onepart_variable_different_p (s1var, s2var))
4051 dstslot = shared_hash_find_slot_unshare_1 (&dst->vars, dv,
4052 dvhash, INSERT);
4053 *dstslot = dvar = s2var;
4054 dvar->refcount++;
4056 else
4058 dst_can_be_shared = false;
4060 intersect_loc_chains (val, nodep, dsm,
4061 s1var->var_part[0].loc_chain, s2var);
4063 if (!dstslot)
4065 if (node)
4067 dvar = onepart_pool (onepart).allocate ();
4068 dvar->dv = dv;
4069 dvar->refcount = 1;
4070 dvar->n_var_parts = 1;
4071 dvar->onepart = onepart;
4072 dvar->in_changed_variables = false;
4073 dvar->var_part[0].loc_chain = node;
4074 dvar->var_part[0].cur_loc = NULL;
4075 if (onepart)
4076 VAR_LOC_1PAUX (dvar) = NULL;
4077 else
4078 VAR_PART_OFFSET (dvar, 0) = 0;
4080 dstslot
4081 = shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash,
4082 INSERT);
4083 gcc_assert (!*dstslot);
4084 *dstslot = dvar;
4086 else
4087 return 1;
4091 nodep = &dvar->var_part[0].loc_chain;
4092 while ((node = *nodep))
4094 location_chain *nextp = &node->next;
4096 if (GET_CODE (node->loc) == REG)
4098 attrs list;
4100 for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
4101 if (GET_MODE (node->loc) == GET_MODE (list->loc)
4102 && dv_is_value_p (list->dv))
4103 break;
4105 if (!list)
4106 attrs_list_insert (&dst->regs[REGNO (node->loc)],
4107 dv, 0, node->loc);
4108 /* If this value became canonical for another value that had
4109 this register, we want to leave it alone. */
4110 else if (dv_as_value (list->dv) != val)
4112 dstslot = set_slot_part (dst, dv_as_value (list->dv),
4113 dstslot, dv, 0,
4114 node->init, NULL_RTX);
4115 dstslot = delete_slot_part (dst, node->loc, dstslot, 0);
4117 /* Since nextp points into the removed node, we can't
4118 use it. The pointer to the next node moved to nodep.
4119 However, if the variable we're walking is unshared
4120 during our walk, we'll keep walking the location list
4121 of the previously-shared variable, in which case the
4122 node won't have been removed, and we'll want to skip
4123 it. That's why we test *nodep here. */
4124 if (*nodep != node)
4125 nextp = nodep;
4128 else
4129 /* Canonicalization puts registers first, so we don't have to
4130 walk it all. */
4131 break;
4132 nodep = nextp;
4135 if (dvar != *dstslot)
4136 dvar = *dstslot;
4137 nodep = &dvar->var_part[0].loc_chain;
4139 if (val)
4141 /* Mark all referenced nodes for canonicalization, and make sure
4142 we have mutual equivalence links. */
4143 VALUE_RECURSED_INTO (val) = true;
4144 for (node = *nodep; node; node = node->next)
4145 if (GET_CODE (node->loc) == VALUE)
4147 VALUE_RECURSED_INTO (node->loc) = true;
4148 set_variable_part (dst, val, dv_from_value (node->loc), 0,
4149 node->init, NULL, INSERT);
4152 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4153 gcc_assert (*dstslot == dvar);
4154 canonicalize_values_star (dstslot, dst);
4155 gcc_checking_assert (dstslot
4156 == shared_hash_find_slot_noinsert_1 (dst->vars,
4157 dv, dvhash));
4158 dvar = *dstslot;
4160 else
4162 bool has_value = false, has_other = false;
4164 /* If we have one value and anything else, we're going to
4165 canonicalize this, so make sure all values have an entry in
4166 the table and are marked for canonicalization. */
4167 for (node = *nodep; node; node = node->next)
4169 if (GET_CODE (node->loc) == VALUE)
4171 /* If this was marked during register canonicalization,
4172 we know we have to canonicalize values. */
4173 if (has_value)
4174 has_other = true;
4175 has_value = true;
4176 if (has_other)
4177 break;
4179 else
4181 has_other = true;
4182 if (has_value)
4183 break;
4187 if (has_value && has_other)
4189 for (node = *nodep; node; node = node->next)
4191 if (GET_CODE (node->loc) == VALUE)
4193 decl_or_value dv = dv_from_value (node->loc);
4194 variable_def **slot = NULL;
4196 if (shared_hash_shared (dst->vars))
4197 slot = shared_hash_find_slot_noinsert (dst->vars, dv);
4198 if (!slot)
4199 slot = shared_hash_find_slot_unshare (&dst->vars, dv,
4200 INSERT);
4201 if (!*slot)
4203 variable var = onepart_pool (ONEPART_VALUE).allocate ();
4204 var->dv = dv;
4205 var->refcount = 1;
4206 var->n_var_parts = 1;
4207 var->onepart = ONEPART_VALUE;
4208 var->in_changed_variables = false;
4209 var->var_part[0].loc_chain = NULL;
4210 var->var_part[0].cur_loc = NULL;
4211 VAR_LOC_1PAUX (var) = NULL;
4212 *slot = var;
4215 VALUE_RECURSED_INTO (node->loc) = true;
4219 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4220 gcc_assert (*dstslot == dvar);
4221 canonicalize_values_star (dstslot, dst);
4222 gcc_checking_assert (dstslot
4223 == shared_hash_find_slot_noinsert_1 (dst->vars,
4224 dv, dvhash));
4225 dvar = *dstslot;
4229 if (!onepart_variable_different_p (dvar, s2var))
4231 variable_htab_free (dvar);
4232 *dstslot = dvar = s2var;
4233 dvar->refcount++;
4235 else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var))
4237 variable_htab_free (dvar);
4238 *dstslot = dvar = s1var;
4239 dvar->refcount++;
4240 dst_can_be_shared = false;
4242 else
4243 dst_can_be_shared = false;
4245 return 1;
4248 /* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
4249 multi-part variable. Unions of multi-part variables and
4250 intersections of one-part ones will be handled in
4251 variable_merge_over_cur(). */
4253 static int
4254 variable_merge_over_src (variable s2var, struct dfset_merge *dsm)
4256 dataflow_set *dst = dsm->dst;
4257 decl_or_value dv = s2var->dv;
4259 if (!s2var->onepart)
4261 variable_def **dstp = shared_hash_find_slot (dst->vars, dv);
4262 *dstp = s2var;
4263 s2var->refcount++;
4264 return 1;
4267 dsm->src_onepart_cnt++;
4268 return 1;
4271 /* Combine dataflow set information from SRC2 into DST, using PDST
4272 to carry over information across passes. */
4274 static void
4275 dataflow_set_merge (dataflow_set *dst, dataflow_set *src2)
4277 dataflow_set cur = *dst;
4278 dataflow_set *src1 = &cur;
4279 struct dfset_merge dsm;
4280 int i;
4281 size_t src1_elems, src2_elems;
4282 variable_iterator_type hi;
4283 variable var;
4285 src1_elems = shared_hash_htab (src1->vars)->elements ();
4286 src2_elems = shared_hash_htab (src2->vars)->elements ();
4287 dataflow_set_init (dst);
4288 dst->stack_adjust = cur.stack_adjust;
4289 shared_hash_destroy (dst->vars);
4290 dst->vars = new shared_hash_def;
4291 dst->vars->refcount = 1;
4292 dst->vars->htab = new variable_table_type (MAX (src1_elems, src2_elems));
4294 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4295 attrs_list_mpdv_union (&dst->regs[i], src1->regs[i], src2->regs[i]);
4297 dsm.dst = dst;
4298 dsm.src = src2;
4299 dsm.cur = src1;
4300 dsm.src_onepart_cnt = 0;
4302 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm.src->vars),
4303 var, variable, hi)
4304 variable_merge_over_src (var, &dsm);
4305 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm.cur->vars),
4306 var, variable, hi)
4307 variable_merge_over_cur (var, &dsm);
4309 if (dsm.src_onepart_cnt)
4310 dst_can_be_shared = false;
4312 dataflow_set_destroy (src1);
4315 /* Mark register equivalences. */
4317 static void
4318 dataflow_set_equiv_regs (dataflow_set *set)
4320 int i;
4321 attrs list, *listp;
4323 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4325 rtx canon[NUM_MACHINE_MODES];
4327 /* If the list is empty or one entry, no need to canonicalize
4328 anything. */
4329 if (set->regs[i] == NULL || set->regs[i]->next == NULL)
4330 continue;
4332 memset (canon, 0, sizeof (canon));
4334 for (list = set->regs[i]; list; list = list->next)
4335 if (list->offset == 0 && dv_is_value_p (list->dv))
4337 rtx val = dv_as_value (list->dv);
4338 rtx *cvalp = &canon[(int)GET_MODE (val)];
4339 rtx cval = *cvalp;
4341 if (canon_value_cmp (val, cval))
4342 *cvalp = val;
4345 for (list = set->regs[i]; list; list = list->next)
4346 if (list->offset == 0 && dv_onepart_p (list->dv))
4348 rtx cval = canon[(int)GET_MODE (list->loc)];
4350 if (!cval)
4351 continue;
4353 if (dv_is_value_p (list->dv))
4355 rtx val = dv_as_value (list->dv);
4357 if (val == cval)
4358 continue;
4360 VALUE_RECURSED_INTO (val) = true;
4361 set_variable_part (set, val, dv_from_value (cval), 0,
4362 VAR_INIT_STATUS_INITIALIZED,
4363 NULL, NO_INSERT);
4366 VALUE_RECURSED_INTO (cval) = true;
4367 set_variable_part (set, cval, list->dv, 0,
4368 VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT);
4371 for (listp = &set->regs[i]; (list = *listp);
4372 listp = list ? &list->next : listp)
4373 if (list->offset == 0 && dv_onepart_p (list->dv))
4375 rtx cval = canon[(int)GET_MODE (list->loc)];
4376 variable_def **slot;
4378 if (!cval)
4379 continue;
4381 if (dv_is_value_p (list->dv))
4383 rtx val = dv_as_value (list->dv);
4384 if (!VALUE_RECURSED_INTO (val))
4385 continue;
4388 slot = shared_hash_find_slot_noinsert (set->vars, list->dv);
4389 canonicalize_values_star (slot, set);
4390 if (*listp != list)
4391 list = NULL;
4396 /* Remove any redundant values in the location list of VAR, which must
4397 be unshared and 1-part. */
4399 static void
4400 remove_duplicate_values (variable var)
4402 location_chain node, *nodep;
4404 gcc_assert (var->onepart);
4405 gcc_assert (var->n_var_parts == 1);
4406 gcc_assert (var->refcount == 1);
4408 for (nodep = &var->var_part[0].loc_chain; (node = *nodep); )
4410 if (GET_CODE (node->loc) == VALUE)
4412 if (VALUE_RECURSED_INTO (node->loc))
4414 /* Remove duplicate value node. */
4415 *nodep = node->next;
4416 delete node;
4417 continue;
4419 else
4420 VALUE_RECURSED_INTO (node->loc) = true;
4422 nodep = &node->next;
4425 for (node = var->var_part[0].loc_chain; node; node = node->next)
4426 if (GET_CODE (node->loc) == VALUE)
4428 gcc_assert (VALUE_RECURSED_INTO (node->loc));
4429 VALUE_RECURSED_INTO (node->loc) = false;
4434 /* Hash table iteration argument passed to variable_post_merge. */
4435 struct dfset_post_merge
4437 /* The new input set for the current block. */
4438 dataflow_set *set;
4439 /* Pointer to the permanent input set for the current block, or
4440 NULL. */
4441 dataflow_set **permp;
4444 /* Create values for incoming expressions associated with one-part
4445 variables that don't have value numbers for them. */
4448 variable_post_merge_new_vals (variable_def **slot, dfset_post_merge *dfpm)
4450 dataflow_set *set = dfpm->set;
4451 variable var = *slot;
4452 location_chain node;
4454 if (!var->onepart || !var->n_var_parts)
4455 return 1;
4457 gcc_assert (var->n_var_parts == 1);
4459 if (dv_is_decl_p (var->dv))
4461 bool check_dupes = false;
4463 restart:
4464 for (node = var->var_part[0].loc_chain; node; node = node->next)
4466 if (GET_CODE (node->loc) == VALUE)
4467 gcc_assert (!VALUE_RECURSED_INTO (node->loc));
4468 else if (GET_CODE (node->loc) == REG)
4470 attrs att, *attp, *curp = NULL;
4472 if (var->refcount != 1)
4474 slot = unshare_variable (set, slot, var,
4475 VAR_INIT_STATUS_INITIALIZED);
4476 var = *slot;
4477 goto restart;
4480 for (attp = &set->regs[REGNO (node->loc)]; (att = *attp);
4481 attp = &att->next)
4482 if (att->offset == 0
4483 && GET_MODE (att->loc) == GET_MODE (node->loc))
4485 if (dv_is_value_p (att->dv))
4487 rtx cval = dv_as_value (att->dv);
4488 node->loc = cval;
4489 check_dupes = true;
4490 break;
4492 else if (dv_as_opaque (att->dv) == dv_as_opaque (var->dv))
4493 curp = attp;
4496 if (!curp)
4498 curp = attp;
4499 while (*curp)
4500 if ((*curp)->offset == 0
4501 && GET_MODE ((*curp)->loc) == GET_MODE (node->loc)
4502 && dv_as_opaque ((*curp)->dv) == dv_as_opaque (var->dv))
4503 break;
4504 else
4505 curp = &(*curp)->next;
4506 gcc_assert (*curp);
4509 if (!att)
4511 decl_or_value cdv;
4512 rtx cval;
4514 if (!*dfpm->permp)
4516 *dfpm->permp = XNEW (dataflow_set);
4517 dataflow_set_init (*dfpm->permp);
4520 for (att = (*dfpm->permp)->regs[REGNO (node->loc)];
4521 att; att = att->next)
4522 if (GET_MODE (att->loc) == GET_MODE (node->loc))
4524 gcc_assert (att->offset == 0
4525 && dv_is_value_p (att->dv));
4526 val_reset (set, att->dv);
4527 break;
4530 if (att)
4532 cdv = att->dv;
4533 cval = dv_as_value (cdv);
4535 else
4537 /* Create a unique value to hold this register,
4538 that ought to be found and reused in
4539 subsequent rounds. */
4540 cselib_val *v;
4541 gcc_assert (!cselib_lookup (node->loc,
4542 GET_MODE (node->loc), 0,
4543 VOIDmode));
4544 v = cselib_lookup (node->loc, GET_MODE (node->loc), 1,
4545 VOIDmode);
4546 cselib_preserve_value (v);
4547 cselib_invalidate_rtx (node->loc);
4548 cval = v->val_rtx;
4549 cdv = dv_from_value (cval);
4550 if (dump_file)
4551 fprintf (dump_file,
4552 "Created new value %u:%u for reg %i\n",
4553 v->uid, v->hash, REGNO (node->loc));
4556 var_reg_decl_set (*dfpm->permp, node->loc,
4557 VAR_INIT_STATUS_INITIALIZED,
4558 cdv, 0, NULL, INSERT);
4560 node->loc = cval;
4561 check_dupes = true;
4564 /* Remove attribute referring to the decl, which now
4565 uses the value for the register, already existing or
4566 to be added when we bring perm in. */
4567 att = *curp;
4568 *curp = att->next;
4569 delete att;
4573 if (check_dupes)
4574 remove_duplicate_values (var);
4577 return 1;
4580 /* Reset values in the permanent set that are not associated with the
4581 chosen expression. */
4584 variable_post_merge_perm_vals (variable_def **pslot, dfset_post_merge *dfpm)
4586 dataflow_set *set = dfpm->set;
4587 variable pvar = *pslot, var;
4588 location_chain pnode;
4589 decl_or_value dv;
4590 attrs att;
4592 gcc_assert (dv_is_value_p (pvar->dv)
4593 && pvar->n_var_parts == 1);
4594 pnode = pvar->var_part[0].loc_chain;
4595 gcc_assert (pnode
4596 && !pnode->next
4597 && REG_P (pnode->loc));
4599 dv = pvar->dv;
4601 var = shared_hash_find (set->vars, dv);
4602 if (var)
4604 /* Although variable_post_merge_new_vals may have made decls
4605 non-star-canonical, values that pre-existed in canonical form
4606 remain canonical, and newly-created values reference a single
4607 REG, so they are canonical as well. Since VAR has the
4608 location list for a VALUE, using find_loc_in_1pdv for it is
4609 fine, since VALUEs don't map back to DECLs. */
4610 if (find_loc_in_1pdv (pnode->loc, var, shared_hash_htab (set->vars)))
4611 return 1;
4612 val_reset (set, dv);
4615 for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next)
4616 if (att->offset == 0
4617 && GET_MODE (att->loc) == GET_MODE (pnode->loc)
4618 && dv_is_value_p (att->dv))
4619 break;
4621 /* If there is a value associated with this register already, create
4622 an equivalence. */
4623 if (att && dv_as_value (att->dv) != dv_as_value (dv))
4625 rtx cval = dv_as_value (att->dv);
4626 set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT);
4627 set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init,
4628 NULL, INSERT);
4630 else if (!att)
4632 attrs_list_insert (&set->regs[REGNO (pnode->loc)],
4633 dv, 0, pnode->loc);
4634 variable_union (pvar, set);
4637 return 1;
4640 /* Just checking stuff and registering register attributes for
4641 now. */
4643 static void
4644 dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp)
4646 struct dfset_post_merge dfpm;
4648 dfpm.set = set;
4649 dfpm.permp = permp;
4651 shared_hash_htab (set->vars)
4652 ->traverse <dfset_post_merge*, variable_post_merge_new_vals> (&dfpm);
4653 if (*permp)
4654 shared_hash_htab ((*permp)->vars)
4655 ->traverse <dfset_post_merge*, variable_post_merge_perm_vals> (&dfpm);
4656 shared_hash_htab (set->vars)
4657 ->traverse <dataflow_set *, canonicalize_values_star> (set);
4658 shared_hash_htab (set->vars)
4659 ->traverse <dataflow_set *, canonicalize_vars_star> (set);
4662 /* Return a node whose loc is a MEM that refers to EXPR in the
4663 location list of a one-part variable or value VAR, or in that of
4664 any values recursively mentioned in the location lists. */
4666 static location_chain
4667 find_mem_expr_in_1pdv (tree expr, rtx val, variable_table_type *vars)
4669 location_chain node;
4670 decl_or_value dv;
4671 variable var;
4672 location_chain where = NULL;
4674 if (!val)
4675 return NULL;
4677 gcc_assert (GET_CODE (val) == VALUE
4678 && !VALUE_RECURSED_INTO (val));
4680 dv = dv_from_value (val);
4681 var = vars->find_with_hash (dv, dv_htab_hash (dv));
4683 if (!var)
4684 return NULL;
4686 gcc_assert (var->onepart);
4688 if (!var->n_var_parts)
4689 return NULL;
4691 VALUE_RECURSED_INTO (val) = true;
4693 for (node = var->var_part[0].loc_chain; node; node = node->next)
4694 if (MEM_P (node->loc)
4695 && MEM_EXPR (node->loc) == expr
4696 && INT_MEM_OFFSET (node->loc) == 0)
4698 where = node;
4699 break;
4701 else if (GET_CODE (node->loc) == VALUE
4702 && !VALUE_RECURSED_INTO (node->loc)
4703 && (where = find_mem_expr_in_1pdv (expr, node->loc, vars)))
4704 break;
4706 VALUE_RECURSED_INTO (val) = false;
4708 return where;
4711 /* Return TRUE if the value of MEM may vary across a call. */
4713 static bool
4714 mem_dies_at_call (rtx mem)
4716 tree expr = MEM_EXPR (mem);
4717 tree decl;
4719 if (!expr)
4720 return true;
4722 decl = get_base_address (expr);
4724 if (!decl)
4725 return true;
4727 if (!DECL_P (decl))
4728 return true;
4730 return (may_be_aliased (decl)
4731 || (!TREE_READONLY (decl) && is_global_var (decl)));
4734 /* Remove all MEMs from the location list of a hash table entry for a
4735 one-part variable, except those whose MEM attributes map back to
4736 the variable itself, directly or within a VALUE. */
4739 dataflow_set_preserve_mem_locs (variable_def **slot, dataflow_set *set)
4741 variable var = *slot;
4743 if (var->onepart == ONEPART_VDECL || var->onepart == ONEPART_DEXPR)
4745 tree decl = dv_as_decl (var->dv);
4746 location_chain loc, *locp;
4747 bool changed = false;
4749 if (!var->n_var_parts)
4750 return 1;
4752 gcc_assert (var->n_var_parts == 1);
4754 if (shared_var_p (var, set->vars))
4756 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4758 /* We want to remove dying MEMs that doesn't refer to DECL. */
4759 if (GET_CODE (loc->loc) == MEM
4760 && (MEM_EXPR (loc->loc) != decl
4761 || INT_MEM_OFFSET (loc->loc) != 0)
4762 && !mem_dies_at_call (loc->loc))
4763 break;
4764 /* We want to move here MEMs that do refer to DECL. */
4765 else if (GET_CODE (loc->loc) == VALUE
4766 && find_mem_expr_in_1pdv (decl, loc->loc,
4767 shared_hash_htab (set->vars)))
4768 break;
4771 if (!loc)
4772 return 1;
4774 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4775 var = *slot;
4776 gcc_assert (var->n_var_parts == 1);
4779 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4780 loc; loc = *locp)
4782 rtx old_loc = loc->loc;
4783 if (GET_CODE (old_loc) == VALUE)
4785 location_chain mem_node
4786 = find_mem_expr_in_1pdv (decl, loc->loc,
4787 shared_hash_htab (set->vars));
4789 /* ??? This picks up only one out of multiple MEMs that
4790 refer to the same variable. Do we ever need to be
4791 concerned about dealing with more than one, or, given
4792 that they should all map to the same variable
4793 location, their addresses will have been merged and
4794 they will be regarded as equivalent? */
4795 if (mem_node)
4797 loc->loc = mem_node->loc;
4798 loc->set_src = mem_node->set_src;
4799 loc->init = MIN (loc->init, mem_node->init);
4803 if (GET_CODE (loc->loc) != MEM
4804 || (MEM_EXPR (loc->loc) == decl
4805 && INT_MEM_OFFSET (loc->loc) == 0)
4806 || !mem_dies_at_call (loc->loc))
4808 if (old_loc != loc->loc && emit_notes)
4810 if (old_loc == var->var_part[0].cur_loc)
4812 changed = true;
4813 var->var_part[0].cur_loc = NULL;
4816 locp = &loc->next;
4817 continue;
4820 if (emit_notes)
4822 if (old_loc == var->var_part[0].cur_loc)
4824 changed = true;
4825 var->var_part[0].cur_loc = NULL;
4828 *locp = loc->next;
4829 delete loc;
4832 if (!var->var_part[0].loc_chain)
4834 var->n_var_parts--;
4835 changed = true;
4837 if (changed)
4838 variable_was_changed (var, set);
4841 return 1;
4844 /* Remove all MEMs from the location list of a hash table entry for a
4845 value. */
4848 dataflow_set_remove_mem_locs (variable_def **slot, dataflow_set *set)
4850 variable var = *slot;
4852 if (var->onepart == ONEPART_VALUE)
4854 location_chain loc, *locp;
4855 bool changed = false;
4856 rtx cur_loc;
4858 gcc_assert (var->n_var_parts == 1);
4860 if (shared_var_p (var, set->vars))
4862 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4863 if (GET_CODE (loc->loc) == MEM
4864 && mem_dies_at_call (loc->loc))
4865 break;
4867 if (!loc)
4868 return 1;
4870 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4871 var = *slot;
4872 gcc_assert (var->n_var_parts == 1);
4875 if (VAR_LOC_1PAUX (var))
4876 cur_loc = VAR_LOC_FROM (var);
4877 else
4878 cur_loc = var->var_part[0].cur_loc;
4880 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4881 loc; loc = *locp)
4883 if (GET_CODE (loc->loc) != MEM
4884 || !mem_dies_at_call (loc->loc))
4886 locp = &loc->next;
4887 continue;
4890 *locp = loc->next;
4891 /* If we have deleted the location which was last emitted
4892 we have to emit new location so add the variable to set
4893 of changed variables. */
4894 if (cur_loc == loc->loc)
4896 changed = true;
4897 var->var_part[0].cur_loc = NULL;
4898 if (VAR_LOC_1PAUX (var))
4899 VAR_LOC_FROM (var) = NULL;
4901 delete loc;
4904 if (!var->var_part[0].loc_chain)
4906 var->n_var_parts--;
4907 changed = true;
4909 if (changed)
4910 variable_was_changed (var, set);
4913 return 1;
4916 /* Remove all variable-location information about call-clobbered
4917 registers, as well as associations between MEMs and VALUEs. */
4919 static void
4920 dataflow_set_clear_at_call (dataflow_set *set)
4922 unsigned int r;
4923 hard_reg_set_iterator hrsi;
4925 EXECUTE_IF_SET_IN_HARD_REG_SET (regs_invalidated_by_call, 0, r, hrsi)
4926 var_regno_delete (set, r);
4928 if (MAY_HAVE_DEBUG_INSNS)
4930 set->traversed_vars = set->vars;
4931 shared_hash_htab (set->vars)
4932 ->traverse <dataflow_set *, dataflow_set_preserve_mem_locs> (set);
4933 set->traversed_vars = set->vars;
4934 shared_hash_htab (set->vars)
4935 ->traverse <dataflow_set *, dataflow_set_remove_mem_locs> (set);
4936 set->traversed_vars = NULL;
4940 static bool
4941 variable_part_different_p (variable_part *vp1, variable_part *vp2)
4943 location_chain lc1, lc2;
4945 for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next)
4947 for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next)
4949 if (REG_P (lc1->loc) && REG_P (lc2->loc))
4951 if (REGNO (lc1->loc) == REGNO (lc2->loc))
4952 break;
4954 if (rtx_equal_p (lc1->loc, lc2->loc))
4955 break;
4957 if (!lc2)
4958 return true;
4960 return false;
4963 /* Return true if one-part variables VAR1 and VAR2 are different.
4964 They must be in canonical order. */
4966 static bool
4967 onepart_variable_different_p (variable var1, variable var2)
4969 location_chain lc1, lc2;
4971 if (var1 == var2)
4972 return false;
4974 gcc_assert (var1->n_var_parts == 1
4975 && var2->n_var_parts == 1);
4977 lc1 = var1->var_part[0].loc_chain;
4978 lc2 = var2->var_part[0].loc_chain;
4980 gcc_assert (lc1 && lc2);
4982 while (lc1 && lc2)
4984 if (loc_cmp (lc1->loc, lc2->loc))
4985 return true;
4986 lc1 = lc1->next;
4987 lc2 = lc2->next;
4990 return lc1 != lc2;
4993 /* Return true if variables VAR1 and VAR2 are different. */
4995 static bool
4996 variable_different_p (variable var1, variable var2)
4998 int i;
5000 if (var1 == var2)
5001 return false;
5003 if (var1->onepart != var2->onepart)
5004 return true;
5006 if (var1->n_var_parts != var2->n_var_parts)
5007 return true;
5009 if (var1->onepart && var1->n_var_parts)
5011 gcc_checking_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv)
5012 && var1->n_var_parts == 1);
5013 /* One-part values have locations in a canonical order. */
5014 return onepart_variable_different_p (var1, var2);
5017 for (i = 0; i < var1->n_var_parts; i++)
5019 if (VAR_PART_OFFSET (var1, i) != VAR_PART_OFFSET (var2, i))
5020 return true;
5021 if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i]))
5022 return true;
5023 if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i]))
5024 return true;
5026 return false;
5029 /* Return true if dataflow sets OLD_SET and NEW_SET differ. */
5031 static bool
5032 dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
5034 variable_iterator_type hi;
5035 variable var1;
5037 if (old_set->vars == new_set->vars)
5038 return false;
5040 if (shared_hash_htab (old_set->vars)->elements ()
5041 != shared_hash_htab (new_set->vars)->elements ())
5042 return true;
5044 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (old_set->vars),
5045 var1, variable, hi)
5047 variable_table_type *htab = shared_hash_htab (new_set->vars);
5048 variable var2 = htab->find_with_hash (var1->dv, dv_htab_hash (var1->dv));
5049 if (!var2)
5051 if (dump_file && (dump_flags & TDF_DETAILS))
5053 fprintf (dump_file, "dataflow difference found: removal of:\n");
5054 dump_var (var1);
5056 return true;
5059 if (variable_different_p (var1, var2))
5061 if (dump_file && (dump_flags & TDF_DETAILS))
5063 fprintf (dump_file, "dataflow difference found: "
5064 "old and new follow:\n");
5065 dump_var (var1);
5066 dump_var (var2);
5068 return true;
5072 /* No need to traverse the second hashtab, if both have the same number
5073 of elements and the second one had all entries found in the first one,
5074 then it can't have any extra entries. */
5075 return false;
5078 /* Free the contents of dataflow set SET. */
5080 static void
5081 dataflow_set_destroy (dataflow_set *set)
5083 int i;
5085 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5086 attrs_list_clear (&set->regs[i]);
5088 shared_hash_destroy (set->vars);
5089 set->vars = NULL;
5092 /* Return true if RTL X contains a SYMBOL_REF. */
5094 static bool
5095 contains_symbol_ref (rtx x)
5097 const char *fmt;
5098 RTX_CODE code;
5099 int i;
5101 if (!x)
5102 return false;
5104 code = GET_CODE (x);
5105 if (code == SYMBOL_REF)
5106 return true;
5108 fmt = GET_RTX_FORMAT (code);
5109 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5111 if (fmt[i] == 'e')
5113 if (contains_symbol_ref (XEXP (x, i)))
5114 return true;
5116 else if (fmt[i] == 'E')
5118 int j;
5119 for (j = 0; j < XVECLEN (x, i); j++)
5120 if (contains_symbol_ref (XVECEXP (x, i, j)))
5121 return true;
5125 return false;
5128 /* Shall EXPR be tracked? */
5130 static bool
5131 track_expr_p (tree expr, bool need_rtl)
5133 rtx decl_rtl;
5134 tree realdecl;
5136 if (TREE_CODE (expr) == DEBUG_EXPR_DECL)
5137 return DECL_RTL_SET_P (expr);
5139 /* If EXPR is not a parameter or a variable do not track it. */
5140 if (TREE_CODE (expr) != VAR_DECL && TREE_CODE (expr) != PARM_DECL)
5141 return 0;
5143 /* It also must have a name... */
5144 if (!DECL_NAME (expr) && need_rtl)
5145 return 0;
5147 /* ... and a RTL assigned to it. */
5148 decl_rtl = DECL_RTL_IF_SET (expr);
5149 if (!decl_rtl && need_rtl)
5150 return 0;
5152 /* If this expression is really a debug alias of some other declaration, we
5153 don't need to track this expression if the ultimate declaration is
5154 ignored. */
5155 realdecl = expr;
5156 if (TREE_CODE (realdecl) == VAR_DECL && DECL_HAS_DEBUG_EXPR_P (realdecl))
5158 realdecl = DECL_DEBUG_EXPR (realdecl);
5159 if (!DECL_P (realdecl))
5161 if (handled_component_p (realdecl)
5162 || (TREE_CODE (realdecl) == MEM_REF
5163 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
5165 HOST_WIDE_INT bitsize, bitpos, maxsize;
5166 tree innerdecl
5167 = get_ref_base_and_extent (realdecl, &bitpos, &bitsize,
5168 &maxsize);
5169 if (!DECL_P (innerdecl)
5170 || DECL_IGNORED_P (innerdecl)
5171 /* Do not track declarations for parts of tracked parameters
5172 since we want to track them as a whole instead. */
5173 || (TREE_CODE (innerdecl) == PARM_DECL
5174 && DECL_MODE (innerdecl) != BLKmode
5175 && TREE_CODE (TREE_TYPE (innerdecl)) != UNION_TYPE)
5176 || TREE_STATIC (innerdecl)
5177 || bitsize <= 0
5178 || bitpos + bitsize > 256
5179 || bitsize != maxsize)
5180 return 0;
5181 else
5182 realdecl = expr;
5184 else
5185 return 0;
5189 /* Do not track EXPR if REALDECL it should be ignored for debugging
5190 purposes. */
5191 if (DECL_IGNORED_P (realdecl))
5192 return 0;
5194 /* Do not track global variables until we are able to emit correct location
5195 list for them. */
5196 if (TREE_STATIC (realdecl))
5197 return 0;
5199 /* When the EXPR is a DECL for alias of some variable (see example)
5200 the TREE_STATIC flag is not used. Disable tracking all DECLs whose
5201 DECL_RTL contains SYMBOL_REF.
5203 Example:
5204 extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
5205 char **_dl_argv;
5207 if (decl_rtl && MEM_P (decl_rtl)
5208 && contains_symbol_ref (XEXP (decl_rtl, 0)))
5209 return 0;
5211 /* If RTX is a memory it should not be very large (because it would be
5212 an array or struct). */
5213 if (decl_rtl && MEM_P (decl_rtl))
5215 /* Do not track structures and arrays. */
5216 if (GET_MODE (decl_rtl) == BLKmode
5217 || AGGREGATE_TYPE_P (TREE_TYPE (realdecl)))
5218 return 0;
5219 if (MEM_SIZE_KNOWN_P (decl_rtl)
5220 && MEM_SIZE (decl_rtl) > MAX_VAR_PARTS)
5221 return 0;
5224 DECL_CHANGED (expr) = 0;
5225 DECL_CHANGED (realdecl) = 0;
5226 return 1;
5229 /* Determine whether a given LOC refers to the same variable part as
5230 EXPR+OFFSET. */
5232 static bool
5233 same_variable_part_p (rtx loc, tree expr, HOST_WIDE_INT offset)
5235 tree expr2;
5236 HOST_WIDE_INT offset2;
5238 if (! DECL_P (expr))
5239 return false;
5241 if (REG_P (loc))
5243 expr2 = REG_EXPR (loc);
5244 offset2 = REG_OFFSET (loc);
5246 else if (MEM_P (loc))
5248 expr2 = MEM_EXPR (loc);
5249 offset2 = INT_MEM_OFFSET (loc);
5251 else
5252 return false;
5254 if (! expr2 || ! DECL_P (expr2))
5255 return false;
5257 expr = var_debug_decl (expr);
5258 expr2 = var_debug_decl (expr2);
5260 return (expr == expr2 && offset == offset2);
5263 /* LOC is a REG or MEM that we would like to track if possible.
5264 If EXPR is null, we don't know what expression LOC refers to,
5265 otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
5266 LOC is an lvalue register.
5268 Return true if EXPR is nonnull and if LOC, or some lowpart of it,
5269 is something we can track. When returning true, store the mode of
5270 the lowpart we can track in *MODE_OUT (if nonnull) and its offset
5271 from EXPR in *OFFSET_OUT (if nonnull). */
5273 static bool
5274 track_loc_p (rtx loc, tree expr, HOST_WIDE_INT offset, bool store_reg_p,
5275 machine_mode *mode_out, HOST_WIDE_INT *offset_out)
5277 machine_mode mode;
5279 if (expr == NULL || !track_expr_p (expr, true))
5280 return false;
5282 /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
5283 whole subreg, but only the old inner part is really relevant. */
5284 mode = GET_MODE (loc);
5285 if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc)))
5287 machine_mode pseudo_mode;
5289 pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc));
5290 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (pseudo_mode))
5292 offset += byte_lowpart_offset (pseudo_mode, mode);
5293 mode = pseudo_mode;
5297 /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
5298 Do the same if we are storing to a register and EXPR occupies
5299 the whole of register LOC; in that case, the whole of EXPR is
5300 being changed. We exclude complex modes from the second case
5301 because the real and imaginary parts are represented as separate
5302 pseudo registers, even if the whole complex value fits into one
5303 hard register. */
5304 if ((GET_MODE_SIZE (mode) > GET_MODE_SIZE (DECL_MODE (expr))
5305 || (store_reg_p
5306 && !COMPLEX_MODE_P (DECL_MODE (expr))
5307 && hard_regno_nregs[REGNO (loc)][DECL_MODE (expr)] == 1))
5308 && offset + byte_lowpart_offset (DECL_MODE (expr), mode) == 0)
5310 mode = DECL_MODE (expr);
5311 offset = 0;
5314 if (offset < 0 || offset >= MAX_VAR_PARTS)
5315 return false;
5317 if (mode_out)
5318 *mode_out = mode;
5319 if (offset_out)
5320 *offset_out = offset;
5321 return true;
5324 /* Return the MODE lowpart of LOC, or null if LOC is not something we
5325 want to track. When returning nonnull, make sure that the attributes
5326 on the returned value are updated. */
5328 static rtx
5329 var_lowpart (machine_mode mode, rtx loc)
5331 unsigned int offset, reg_offset, regno;
5333 if (GET_MODE (loc) == mode)
5334 return loc;
5336 if (!REG_P (loc) && !MEM_P (loc))
5337 return NULL;
5339 offset = byte_lowpart_offset (mode, GET_MODE (loc));
5341 if (MEM_P (loc))
5342 return adjust_address_nv (loc, mode, offset);
5344 reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc));
5345 regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc),
5346 reg_offset, mode);
5347 return gen_rtx_REG_offset (loc, mode, regno, offset);
5350 /* Carry information about uses and stores while walking rtx. */
5352 struct count_use_info
5354 /* The insn where the RTX is. */
5355 rtx_insn *insn;
5357 /* The basic block where insn is. */
5358 basic_block bb;
5360 /* The array of n_sets sets in the insn, as determined by cselib. */
5361 struct cselib_set *sets;
5362 int n_sets;
5364 /* True if we're counting stores, false otherwise. */
5365 bool store_p;
5368 /* Find a VALUE corresponding to X. */
5370 static inline cselib_val *
5371 find_use_val (rtx x, machine_mode mode, struct count_use_info *cui)
5373 int i;
5375 if (cui->sets)
5377 /* This is called after uses are set up and before stores are
5378 processed by cselib, so it's safe to look up srcs, but not
5379 dsts. So we look up expressions that appear in srcs or in
5380 dest expressions, but we search the sets array for dests of
5381 stores. */
5382 if (cui->store_p)
5384 /* Some targets represent memset and memcpy patterns
5385 by (set (mem:BLK ...) (reg:[QHSD]I ...)) or
5386 (set (mem:BLK ...) (const_int ...)) or
5387 (set (mem:BLK ...) (mem:BLK ...)). Don't return anything
5388 in that case, otherwise we end up with mode mismatches. */
5389 if (mode == BLKmode && MEM_P (x))
5390 return NULL;
5391 for (i = 0; i < cui->n_sets; i++)
5392 if (cui->sets[i].dest == x)
5393 return cui->sets[i].src_elt;
5395 else
5396 return cselib_lookup (x, mode, 0, VOIDmode);
5399 return NULL;
5402 /* Replace all registers and addresses in an expression with VALUE
5403 expressions that map back to them, unless the expression is a
5404 register. If no mapping is or can be performed, returns NULL. */
5406 static rtx
5407 replace_expr_with_values (rtx loc)
5409 if (REG_P (loc) || GET_CODE (loc) == ENTRY_VALUE)
5410 return NULL;
5411 else if (MEM_P (loc))
5413 cselib_val *addr = cselib_lookup (XEXP (loc, 0),
5414 get_address_mode (loc), 0,
5415 GET_MODE (loc));
5416 if (addr)
5417 return replace_equiv_address_nv (loc, addr->val_rtx);
5418 else
5419 return NULL;
5421 else
5422 return cselib_subst_to_values (loc, VOIDmode);
5425 /* Return true if X contains a DEBUG_EXPR. */
5427 static bool
5428 rtx_debug_expr_p (const_rtx x)
5430 subrtx_iterator::array_type array;
5431 FOR_EACH_SUBRTX (iter, array, x, ALL)
5432 if (GET_CODE (*iter) == DEBUG_EXPR)
5433 return true;
5434 return false;
5437 /* Determine what kind of micro operation to choose for a USE. Return
5438 MO_CLOBBER if no micro operation is to be generated. */
5440 static enum micro_operation_type
5441 use_type (rtx loc, struct count_use_info *cui, machine_mode *modep)
5443 tree expr;
5445 if (cui && cui->sets)
5447 if (GET_CODE (loc) == VAR_LOCATION)
5449 if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false))
5451 rtx ploc = PAT_VAR_LOCATION_LOC (loc);
5452 if (! VAR_LOC_UNKNOWN_P (ploc))
5454 cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1,
5455 VOIDmode);
5457 /* ??? flag_float_store and volatile mems are never
5458 given values, but we could in theory use them for
5459 locations. */
5460 gcc_assert (val || 1);
5462 return MO_VAL_LOC;
5464 else
5465 return MO_CLOBBER;
5468 if (REG_P (loc) || MEM_P (loc))
5470 if (modep)
5471 *modep = GET_MODE (loc);
5472 if (cui->store_p)
5474 if (REG_P (loc)
5475 || (find_use_val (loc, GET_MODE (loc), cui)
5476 && cselib_lookup (XEXP (loc, 0),
5477 get_address_mode (loc), 0,
5478 GET_MODE (loc))))
5479 return MO_VAL_SET;
5481 else
5483 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5485 if (val && !cselib_preserved_value_p (val))
5486 return MO_VAL_USE;
5491 if (REG_P (loc))
5493 gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER);
5495 if (loc == cfa_base_rtx)
5496 return MO_CLOBBER;
5497 expr = REG_EXPR (loc);
5499 if (!expr)
5500 return MO_USE_NO_VAR;
5501 else if (target_for_debug_bind (var_debug_decl (expr)))
5502 return MO_CLOBBER;
5503 else if (track_loc_p (loc, expr, REG_OFFSET (loc),
5504 false, modep, NULL))
5505 return MO_USE;
5506 else
5507 return MO_USE_NO_VAR;
5509 else if (MEM_P (loc))
5511 expr = MEM_EXPR (loc);
5513 if (!expr)
5514 return MO_CLOBBER;
5515 else if (target_for_debug_bind (var_debug_decl (expr)))
5516 return MO_CLOBBER;
5517 else if (track_loc_p (loc, expr, INT_MEM_OFFSET (loc),
5518 false, modep, NULL)
5519 /* Multi-part variables shouldn't refer to one-part
5520 variable names such as VALUEs (never happens) or
5521 DEBUG_EXPRs (only happens in the presence of debug
5522 insns). */
5523 && (!MAY_HAVE_DEBUG_INSNS
5524 || !rtx_debug_expr_p (XEXP (loc, 0))))
5525 return MO_USE;
5526 else
5527 return MO_CLOBBER;
5530 return MO_CLOBBER;
5533 /* Log to OUT information about micro-operation MOPT involving X in
5534 INSN of BB. */
5536 static inline void
5537 log_op_type (rtx x, basic_block bb, rtx_insn *insn,
5538 enum micro_operation_type mopt, FILE *out)
5540 fprintf (out, "bb %i op %i insn %i %s ",
5541 bb->index, VTI (bb)->mos.length (),
5542 INSN_UID (insn), micro_operation_type_name[mopt]);
5543 print_inline_rtx (out, x, 2);
5544 fputc ('\n', out);
5547 /* Tell whether the CONCAT used to holds a VALUE and its location
5548 needs value resolution, i.e., an attempt of mapping the location
5549 back to other incoming values. */
5550 #define VAL_NEEDS_RESOLUTION(x) \
5551 (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
5552 /* Whether the location in the CONCAT is a tracked expression, that
5553 should also be handled like a MO_USE. */
5554 #define VAL_HOLDS_TRACK_EXPR(x) \
5555 (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
5556 /* Whether the location in the CONCAT should be handled like a MO_COPY
5557 as well. */
5558 #define VAL_EXPR_IS_COPIED(x) \
5559 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
5560 /* Whether the location in the CONCAT should be handled like a
5561 MO_CLOBBER as well. */
5562 #define VAL_EXPR_IS_CLOBBERED(x) \
5563 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
5565 /* All preserved VALUEs. */
5566 static vec<rtx> preserved_values;
5568 /* Ensure VAL is preserved and remember it in a vector for vt_emit_notes. */
5570 static void
5571 preserve_value (cselib_val *val)
5573 cselib_preserve_value (val);
5574 preserved_values.safe_push (val->val_rtx);
5577 /* Helper function for MO_VAL_LOC handling. Return non-zero if
5578 any rtxes not suitable for CONST use not replaced by VALUEs
5579 are discovered. */
5581 static bool
5582 non_suitable_const (const_rtx x)
5584 subrtx_iterator::array_type array;
5585 FOR_EACH_SUBRTX (iter, array, x, ALL)
5587 const_rtx x = *iter;
5588 switch (GET_CODE (x))
5590 case REG:
5591 case DEBUG_EXPR:
5592 case PC:
5593 case SCRATCH:
5594 case CC0:
5595 case ASM_INPUT:
5596 case ASM_OPERANDS:
5597 return true;
5598 case MEM:
5599 if (!MEM_READONLY_P (x))
5600 return true;
5601 break;
5602 default:
5603 break;
5606 return false;
5609 /* Add uses (register and memory references) LOC which will be tracked
5610 to VTI (bb)->mos. */
5612 static void
5613 add_uses (rtx loc, struct count_use_info *cui)
5615 machine_mode mode = VOIDmode;
5616 enum micro_operation_type type = use_type (loc, cui, &mode);
5618 if (type != MO_CLOBBER)
5620 basic_block bb = cui->bb;
5621 micro_operation mo;
5623 mo.type = type;
5624 mo.u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc;
5625 mo.insn = cui->insn;
5627 if (type == MO_VAL_LOC)
5629 rtx oloc = loc;
5630 rtx vloc = PAT_VAR_LOCATION_LOC (oloc);
5631 cselib_val *val;
5633 gcc_assert (cui->sets);
5635 if (MEM_P (vloc)
5636 && !REG_P (XEXP (vloc, 0))
5637 && !MEM_P (XEXP (vloc, 0)))
5639 rtx mloc = vloc;
5640 machine_mode address_mode = get_address_mode (mloc);
5641 cselib_val *val
5642 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5643 GET_MODE (mloc));
5645 if (val && !cselib_preserved_value_p (val))
5646 preserve_value (val);
5649 if (CONSTANT_P (vloc)
5650 && (GET_CODE (vloc) != CONST || non_suitable_const (vloc)))
5651 /* For constants don't look up any value. */;
5652 else if (!VAR_LOC_UNKNOWN_P (vloc) && !unsuitable_loc (vloc)
5653 && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
5655 machine_mode mode2;
5656 enum micro_operation_type type2;
5657 rtx nloc = NULL;
5658 bool resolvable = REG_P (vloc) || MEM_P (vloc);
5660 if (resolvable)
5661 nloc = replace_expr_with_values (vloc);
5663 if (nloc)
5665 oloc = shallow_copy_rtx (oloc);
5666 PAT_VAR_LOCATION_LOC (oloc) = nloc;
5669 oloc = gen_rtx_CONCAT (mode, val->val_rtx, oloc);
5671 type2 = use_type (vloc, 0, &mode2);
5673 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5674 || type2 == MO_CLOBBER);
5676 if (type2 == MO_CLOBBER
5677 && !cselib_preserved_value_p (val))
5679 VAL_NEEDS_RESOLUTION (oloc) = resolvable;
5680 preserve_value (val);
5683 else if (!VAR_LOC_UNKNOWN_P (vloc))
5685 oloc = shallow_copy_rtx (oloc);
5686 PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC ();
5689 mo.u.loc = oloc;
5691 else if (type == MO_VAL_USE)
5693 machine_mode mode2 = VOIDmode;
5694 enum micro_operation_type type2;
5695 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5696 rtx vloc, oloc = loc, nloc;
5698 gcc_assert (cui->sets);
5700 if (MEM_P (oloc)
5701 && !REG_P (XEXP (oloc, 0))
5702 && !MEM_P (XEXP (oloc, 0)))
5704 rtx mloc = oloc;
5705 machine_mode address_mode = get_address_mode (mloc);
5706 cselib_val *val
5707 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5708 GET_MODE (mloc));
5710 if (val && !cselib_preserved_value_p (val))
5711 preserve_value (val);
5714 type2 = use_type (loc, 0, &mode2);
5716 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5717 || type2 == MO_CLOBBER);
5719 if (type2 == MO_USE)
5720 vloc = var_lowpart (mode2, loc);
5721 else
5722 vloc = oloc;
5724 /* The loc of a MO_VAL_USE may have two forms:
5726 (concat val src): val is at src, a value-based
5727 representation.
5729 (concat (concat val use) src): same as above, with use as
5730 the MO_USE tracked value, if it differs from src.
5734 gcc_checking_assert (REG_P (loc) || MEM_P (loc));
5735 nloc = replace_expr_with_values (loc);
5736 if (!nloc)
5737 nloc = oloc;
5739 if (vloc != nloc)
5740 oloc = gen_rtx_CONCAT (mode2, val->val_rtx, vloc);
5741 else
5742 oloc = val->val_rtx;
5744 mo.u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
5746 if (type2 == MO_USE)
5747 VAL_HOLDS_TRACK_EXPR (mo.u.loc) = 1;
5748 if (!cselib_preserved_value_p (val))
5750 VAL_NEEDS_RESOLUTION (mo.u.loc) = 1;
5751 preserve_value (val);
5754 else
5755 gcc_assert (type == MO_USE || type == MO_USE_NO_VAR);
5757 if (dump_file && (dump_flags & TDF_DETAILS))
5758 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
5759 VTI (bb)->mos.safe_push (mo);
5763 /* Helper function for finding all uses of REG/MEM in X in insn INSN. */
5765 static void
5766 add_uses_1 (rtx *x, void *cui)
5768 subrtx_var_iterator::array_type array;
5769 FOR_EACH_SUBRTX_VAR (iter, array, *x, NONCONST)
5770 add_uses (*iter, (struct count_use_info *) cui);
5773 /* This is the value used during expansion of locations. We want it
5774 to be unbounded, so that variables expanded deep in a recursion
5775 nest are fully evaluated, so that their values are cached
5776 correctly. We avoid recursion cycles through other means, and we
5777 don't unshare RTL, so excess complexity is not a problem. */
5778 #define EXPR_DEPTH (INT_MAX)
5779 /* We use this to keep too-complex expressions from being emitted as
5780 location notes, and then to debug information. Users can trade
5781 compile time for ridiculously complex expressions, although they're
5782 seldom useful, and they may often have to be discarded as not
5783 representable anyway. */
5784 #define EXPR_USE_DEPTH (PARAM_VALUE (PARAM_MAX_VARTRACK_EXPR_DEPTH))
5786 /* Attempt to reverse the EXPR operation in the debug info and record
5787 it in the cselib table. Say for reg1 = reg2 + 6 even when reg2 is
5788 no longer live we can express its value as VAL - 6. */
5790 static void
5791 reverse_op (rtx val, const_rtx expr, rtx_insn *insn)
5793 rtx src, arg, ret;
5794 cselib_val *v;
5795 struct elt_loc_list *l;
5796 enum rtx_code code;
5797 int count;
5799 if (GET_CODE (expr) != SET)
5800 return;
5802 if (!REG_P (SET_DEST (expr)) || GET_MODE (val) != GET_MODE (SET_DEST (expr)))
5803 return;
5805 src = SET_SRC (expr);
5806 switch (GET_CODE (src))
5808 case PLUS:
5809 case MINUS:
5810 case XOR:
5811 case NOT:
5812 case NEG:
5813 if (!REG_P (XEXP (src, 0)))
5814 return;
5815 break;
5816 case SIGN_EXTEND:
5817 case ZERO_EXTEND:
5818 if (!REG_P (XEXP (src, 0)) && !MEM_P (XEXP (src, 0)))
5819 return;
5820 break;
5821 default:
5822 return;
5825 if (!SCALAR_INT_MODE_P (GET_MODE (src)) || XEXP (src, 0) == cfa_base_rtx)
5826 return;
5828 v = cselib_lookup (XEXP (src, 0), GET_MODE (XEXP (src, 0)), 0, VOIDmode);
5829 if (!v || !cselib_preserved_value_p (v))
5830 return;
5832 /* Use canonical V to avoid creating multiple redundant expressions
5833 for different VALUES equivalent to V. */
5834 v = canonical_cselib_val (v);
5836 /* Adding a reverse op isn't useful if V already has an always valid
5837 location. Ignore ENTRY_VALUE, while it is always constant, we should
5838 prefer non-ENTRY_VALUE locations whenever possible. */
5839 for (l = v->locs, count = 0; l; l = l->next, count++)
5840 if (CONSTANT_P (l->loc)
5841 && (GET_CODE (l->loc) != CONST || !references_value_p (l->loc, 0)))
5842 return;
5843 /* Avoid creating too large locs lists. */
5844 else if (count == PARAM_VALUE (PARAM_MAX_VARTRACK_REVERSE_OP_SIZE))
5845 return;
5847 switch (GET_CODE (src))
5849 case NOT:
5850 case NEG:
5851 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5852 return;
5853 ret = gen_rtx_fmt_e (GET_CODE (src), GET_MODE (val), val);
5854 break;
5855 case SIGN_EXTEND:
5856 case ZERO_EXTEND:
5857 ret = gen_lowpart_SUBREG (GET_MODE (v->val_rtx), val);
5858 break;
5859 case XOR:
5860 code = XOR;
5861 goto binary;
5862 case PLUS:
5863 code = MINUS;
5864 goto binary;
5865 case MINUS:
5866 code = PLUS;
5867 goto binary;
5868 binary:
5869 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5870 return;
5871 arg = XEXP (src, 1);
5872 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5874 arg = cselib_expand_value_rtx (arg, scratch_regs, 5);
5875 if (arg == NULL_RTX)
5876 return;
5877 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5878 return;
5880 ret = simplify_gen_binary (code, GET_MODE (val), val, arg);
5881 if (ret == val)
5882 /* Ensure ret isn't VALUE itself (which can happen e.g. for
5883 (plus (reg1) (reg2)) when reg2 is known to be 0), as that
5884 breaks a lot of routines during var-tracking. */
5885 ret = gen_rtx_fmt_ee (PLUS, GET_MODE (val), val, const0_rtx);
5886 break;
5887 default:
5888 gcc_unreachable ();
5891 cselib_add_permanent_equiv (v, ret, insn);
5894 /* Add stores (register and memory references) LOC which will be tracked
5895 to VTI (bb)->mos. EXPR is the RTL expression containing the store.
5896 CUIP->insn is instruction which the LOC is part of. */
5898 static void
5899 add_stores (rtx loc, const_rtx expr, void *cuip)
5901 machine_mode mode = VOIDmode, mode2;
5902 struct count_use_info *cui = (struct count_use_info *)cuip;
5903 basic_block bb = cui->bb;
5904 micro_operation mo;
5905 rtx oloc = loc, nloc, src = NULL;
5906 enum micro_operation_type type = use_type (loc, cui, &mode);
5907 bool track_p = false;
5908 cselib_val *v;
5909 bool resolve, preserve;
5911 if (type == MO_CLOBBER)
5912 return;
5914 mode2 = mode;
5916 if (REG_P (loc))
5918 gcc_assert (loc != cfa_base_rtx);
5919 if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
5920 || !(track_p = use_type (loc, NULL, &mode2) == MO_USE)
5921 || GET_CODE (expr) == CLOBBER)
5923 mo.type = MO_CLOBBER;
5924 mo.u.loc = loc;
5925 if (GET_CODE (expr) == SET
5926 && SET_DEST (expr) == loc
5927 && !unsuitable_loc (SET_SRC (expr))
5928 && find_use_val (loc, mode, cui))
5930 gcc_checking_assert (type == MO_VAL_SET);
5931 mo.u.loc = gen_rtx_SET (loc, SET_SRC (expr));
5934 else
5936 if (GET_CODE (expr) == SET
5937 && SET_DEST (expr) == loc
5938 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
5939 src = var_lowpart (mode2, SET_SRC (expr));
5940 loc = var_lowpart (mode2, loc);
5942 if (src == NULL)
5944 mo.type = MO_SET;
5945 mo.u.loc = loc;
5947 else
5949 rtx xexpr = gen_rtx_SET (loc, src);
5950 if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
5952 /* If this is an instruction copying (part of) a parameter
5953 passed by invisible reference to its register location,
5954 pretend it's a SET so that the initial memory location
5955 is discarded, as the parameter register can be reused
5956 for other purposes and we do not track locations based
5957 on generic registers. */
5958 if (MEM_P (src)
5959 && REG_EXPR (loc)
5960 && TREE_CODE (REG_EXPR (loc)) == PARM_DECL
5961 && DECL_MODE (REG_EXPR (loc)) != BLKmode
5962 && MEM_P (DECL_INCOMING_RTL (REG_EXPR (loc)))
5963 && XEXP (DECL_INCOMING_RTL (REG_EXPR (loc)), 0)
5964 != arg_pointer_rtx)
5965 mo.type = MO_SET;
5966 else
5967 mo.type = MO_COPY;
5969 else
5970 mo.type = MO_SET;
5971 mo.u.loc = xexpr;
5974 mo.insn = cui->insn;
5976 else if (MEM_P (loc)
5977 && ((track_p = use_type (loc, NULL, &mode2) == MO_USE)
5978 || cui->sets))
5980 if (MEM_P (loc) && type == MO_VAL_SET
5981 && !REG_P (XEXP (loc, 0))
5982 && !MEM_P (XEXP (loc, 0)))
5984 rtx mloc = loc;
5985 machine_mode address_mode = get_address_mode (mloc);
5986 cselib_val *val = cselib_lookup (XEXP (mloc, 0),
5987 address_mode, 0,
5988 GET_MODE (mloc));
5990 if (val && !cselib_preserved_value_p (val))
5991 preserve_value (val);
5994 if (GET_CODE (expr) == CLOBBER || !track_p)
5996 mo.type = MO_CLOBBER;
5997 mo.u.loc = track_p ? var_lowpart (mode2, loc) : loc;
5999 else
6001 if (GET_CODE (expr) == SET
6002 && SET_DEST (expr) == loc
6003 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
6004 src = var_lowpart (mode2, SET_SRC (expr));
6005 loc = var_lowpart (mode2, loc);
6007 if (src == NULL)
6009 mo.type = MO_SET;
6010 mo.u.loc = loc;
6012 else
6014 rtx xexpr = gen_rtx_SET (loc, src);
6015 if (same_variable_part_p (SET_SRC (xexpr),
6016 MEM_EXPR (loc),
6017 INT_MEM_OFFSET (loc)))
6018 mo.type = MO_COPY;
6019 else
6020 mo.type = MO_SET;
6021 mo.u.loc = xexpr;
6024 mo.insn = cui->insn;
6026 else
6027 return;
6029 if (type != MO_VAL_SET)
6030 goto log_and_return;
6032 v = find_use_val (oloc, mode, cui);
6034 if (!v)
6035 goto log_and_return;
6037 resolve = preserve = !cselib_preserved_value_p (v);
6039 /* We cannot track values for multiple-part variables, so we track only
6040 locations for tracked parameters passed either by invisible reference
6041 or directly in multiple locations. */
6042 if (track_p
6043 && REG_P (loc)
6044 && REG_EXPR (loc)
6045 && TREE_CODE (REG_EXPR (loc)) == PARM_DECL
6046 && DECL_MODE (REG_EXPR (loc)) != BLKmode
6047 && TREE_CODE (TREE_TYPE (REG_EXPR (loc))) != UNION_TYPE
6048 && ((MEM_P (DECL_INCOMING_RTL (REG_EXPR (loc)))
6049 && XEXP (DECL_INCOMING_RTL (REG_EXPR (loc)), 0) != arg_pointer_rtx)
6050 || (GET_CODE (DECL_INCOMING_RTL (REG_EXPR (loc))) == PARALLEL
6051 && XVECLEN (DECL_INCOMING_RTL (REG_EXPR (loc)), 0) > 1)))
6053 /* Although we don't use the value here, it could be used later by the
6054 mere virtue of its existence as the operand of the reverse operation
6055 that gave rise to it (typically extension/truncation). Make sure it
6056 is preserved as required by vt_expand_var_loc_chain. */
6057 if (preserve)
6058 preserve_value (v);
6059 goto log_and_return;
6062 if (loc == stack_pointer_rtx
6063 && hard_frame_pointer_adjustment != -1
6064 && preserve)
6065 cselib_set_value_sp_based (v);
6067 nloc = replace_expr_with_values (oloc);
6068 if (nloc)
6069 oloc = nloc;
6071 if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC)
6073 cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0, VOIDmode);
6075 if (oval == v)
6076 return;
6077 gcc_assert (REG_P (oloc) || MEM_P (oloc));
6079 if (oval && !cselib_preserved_value_p (oval))
6081 micro_operation moa;
6083 preserve_value (oval);
6085 moa.type = MO_VAL_USE;
6086 moa.u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc);
6087 VAL_NEEDS_RESOLUTION (moa.u.loc) = 1;
6088 moa.insn = cui->insn;
6090 if (dump_file && (dump_flags & TDF_DETAILS))
6091 log_op_type (moa.u.loc, cui->bb, cui->insn,
6092 moa.type, dump_file);
6093 VTI (bb)->mos.safe_push (moa);
6096 resolve = false;
6098 else if (resolve && GET_CODE (mo.u.loc) == SET)
6100 if (REG_P (SET_SRC (expr)) || MEM_P (SET_SRC (expr)))
6101 nloc = replace_expr_with_values (SET_SRC (expr));
6102 else
6103 nloc = NULL_RTX;
6105 /* Avoid the mode mismatch between oexpr and expr. */
6106 if (!nloc && mode != mode2)
6108 nloc = SET_SRC (expr);
6109 gcc_assert (oloc == SET_DEST (expr));
6112 if (nloc && nloc != SET_SRC (mo.u.loc))
6113 oloc = gen_rtx_SET (oloc, nloc);
6114 else
6116 if (oloc == SET_DEST (mo.u.loc))
6117 /* No point in duplicating. */
6118 oloc = mo.u.loc;
6119 if (!REG_P (SET_SRC (mo.u.loc)))
6120 resolve = false;
6123 else if (!resolve)
6125 if (GET_CODE (mo.u.loc) == SET
6126 && oloc == SET_DEST (mo.u.loc))
6127 /* No point in duplicating. */
6128 oloc = mo.u.loc;
6130 else
6131 resolve = false;
6133 loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc);
6135 if (mo.u.loc != oloc)
6136 loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, mo.u.loc);
6138 /* The loc of a MO_VAL_SET may have various forms:
6140 (concat val dst): dst now holds val
6142 (concat val (set dst src)): dst now holds val, copied from src
6144 (concat (concat val dstv) dst): dst now holds val; dstv is dst
6145 after replacing mems and non-top-level regs with values.
6147 (concat (concat val dstv) (set dst src)): dst now holds val,
6148 copied from src. dstv is a value-based representation of dst, if
6149 it differs from dst. If resolution is needed, src is a REG, and
6150 its mode is the same as that of val.
6152 (concat (concat val (set dstv srcv)) (set dst src)): src
6153 copied to dst, holding val. dstv and srcv are value-based
6154 representations of dst and src, respectively.
6158 if (GET_CODE (PATTERN (cui->insn)) != COND_EXEC)
6159 reverse_op (v->val_rtx, expr, cui->insn);
6161 mo.u.loc = loc;
6163 if (track_p)
6164 VAL_HOLDS_TRACK_EXPR (loc) = 1;
6165 if (preserve)
6167 VAL_NEEDS_RESOLUTION (loc) = resolve;
6168 preserve_value (v);
6170 if (mo.type == MO_CLOBBER)
6171 VAL_EXPR_IS_CLOBBERED (loc) = 1;
6172 if (mo.type == MO_COPY)
6173 VAL_EXPR_IS_COPIED (loc) = 1;
6175 mo.type = MO_VAL_SET;
6177 log_and_return:
6178 if (dump_file && (dump_flags & TDF_DETAILS))
6179 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
6180 VTI (bb)->mos.safe_push (mo);
6183 /* Arguments to the call. */
6184 static rtx call_arguments;
6186 /* Compute call_arguments. */
6188 static void
6189 prepare_call_arguments (basic_block bb, rtx_insn *insn)
6191 rtx link, x, call;
6192 rtx prev, cur, next;
6193 rtx this_arg = NULL_RTX;
6194 tree type = NULL_TREE, t, fndecl = NULL_TREE;
6195 tree obj_type_ref = NULL_TREE;
6196 CUMULATIVE_ARGS args_so_far_v;
6197 cumulative_args_t args_so_far;
6199 memset (&args_so_far_v, 0, sizeof (args_so_far_v));
6200 args_so_far = pack_cumulative_args (&args_so_far_v);
6201 call = get_call_rtx_from (insn);
6202 if (call)
6204 if (GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
6206 rtx symbol = XEXP (XEXP (call, 0), 0);
6207 if (SYMBOL_REF_DECL (symbol))
6208 fndecl = SYMBOL_REF_DECL (symbol);
6210 if (fndecl == NULL_TREE)
6211 fndecl = MEM_EXPR (XEXP (call, 0));
6212 if (fndecl
6213 && TREE_CODE (TREE_TYPE (fndecl)) != FUNCTION_TYPE
6214 && TREE_CODE (TREE_TYPE (fndecl)) != METHOD_TYPE)
6215 fndecl = NULL_TREE;
6216 if (fndecl && TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
6217 type = TREE_TYPE (fndecl);
6218 if (fndecl && TREE_CODE (fndecl) != FUNCTION_DECL)
6220 if (TREE_CODE (fndecl) == INDIRECT_REF
6221 && TREE_CODE (TREE_OPERAND (fndecl, 0)) == OBJ_TYPE_REF)
6222 obj_type_ref = TREE_OPERAND (fndecl, 0);
6223 fndecl = NULL_TREE;
6225 if (type)
6227 for (t = TYPE_ARG_TYPES (type); t && t != void_list_node;
6228 t = TREE_CHAIN (t))
6229 if (TREE_CODE (TREE_VALUE (t)) == REFERENCE_TYPE
6230 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_VALUE (t))))
6231 break;
6232 if ((t == NULL || t == void_list_node) && obj_type_ref == NULL_TREE)
6233 type = NULL;
6234 else
6236 int nargs ATTRIBUTE_UNUSED = list_length (TYPE_ARG_TYPES (type));
6237 link = CALL_INSN_FUNCTION_USAGE (insn);
6238 #ifndef PCC_STATIC_STRUCT_RETURN
6239 if (aggregate_value_p (TREE_TYPE (type), type)
6240 && targetm.calls.struct_value_rtx (type, 0) == 0)
6242 tree struct_addr = build_pointer_type (TREE_TYPE (type));
6243 machine_mode mode = TYPE_MODE (struct_addr);
6244 rtx reg;
6245 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
6246 nargs + 1);
6247 reg = targetm.calls.function_arg (args_so_far, mode,
6248 struct_addr, true);
6249 targetm.calls.function_arg_advance (args_so_far, mode,
6250 struct_addr, true);
6251 if (reg == NULL_RTX)
6253 for (; link; link = XEXP (link, 1))
6254 if (GET_CODE (XEXP (link, 0)) == USE
6255 && MEM_P (XEXP (XEXP (link, 0), 0)))
6257 link = XEXP (link, 1);
6258 break;
6262 else
6263 #endif
6264 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
6265 nargs);
6266 if (obj_type_ref && TYPE_ARG_TYPES (type) != void_list_node)
6268 machine_mode mode;
6269 t = TYPE_ARG_TYPES (type);
6270 mode = TYPE_MODE (TREE_VALUE (t));
6271 this_arg = targetm.calls.function_arg (args_so_far, mode,
6272 TREE_VALUE (t), true);
6273 if (this_arg && !REG_P (this_arg))
6274 this_arg = NULL_RTX;
6275 else if (this_arg == NULL_RTX)
6277 for (; link; link = XEXP (link, 1))
6278 if (GET_CODE (XEXP (link, 0)) == USE
6279 && MEM_P (XEXP (XEXP (link, 0), 0)))
6281 this_arg = XEXP (XEXP (link, 0), 0);
6282 break;
6289 t = type ? TYPE_ARG_TYPES (type) : NULL_TREE;
6291 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
6292 if (GET_CODE (XEXP (link, 0)) == USE)
6294 rtx item = NULL_RTX;
6295 x = XEXP (XEXP (link, 0), 0);
6296 if (GET_MODE (link) == VOIDmode
6297 || GET_MODE (link) == BLKmode
6298 || (GET_MODE (link) != GET_MODE (x)
6299 && ((GET_MODE_CLASS (GET_MODE (link)) != MODE_INT
6300 && GET_MODE_CLASS (GET_MODE (link)) != MODE_PARTIAL_INT)
6301 || (GET_MODE_CLASS (GET_MODE (x)) != MODE_INT
6302 && GET_MODE_CLASS (GET_MODE (x)) != MODE_PARTIAL_INT))))
6303 /* Can't do anything for these, if the original type mode
6304 isn't known or can't be converted. */;
6305 else if (REG_P (x))
6307 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
6308 if (val && cselib_preserved_value_p (val))
6309 item = val->val_rtx;
6310 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
6311 || GET_MODE_CLASS (GET_MODE (x)) == MODE_PARTIAL_INT)
6313 machine_mode mode = GET_MODE (x);
6315 while ((mode = GET_MODE_WIDER_MODE (mode)) != VOIDmode
6316 && GET_MODE_BITSIZE (mode) <= BITS_PER_WORD)
6318 rtx reg = simplify_subreg (mode, x, GET_MODE (x), 0);
6320 if (reg == NULL_RTX || !REG_P (reg))
6321 continue;
6322 val = cselib_lookup (reg, mode, 0, VOIDmode);
6323 if (val && cselib_preserved_value_p (val))
6325 item = val->val_rtx;
6326 break;
6331 else if (MEM_P (x))
6333 rtx mem = x;
6334 cselib_val *val;
6336 if (!frame_pointer_needed)
6338 struct adjust_mem_data amd;
6339 amd.mem_mode = VOIDmode;
6340 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
6341 amd.side_effects = NULL;
6342 amd.store = true;
6343 mem = simplify_replace_fn_rtx (mem, NULL_RTX, adjust_mems,
6344 &amd);
6345 gcc_assert (amd.side_effects == NULL_RTX);
6347 val = cselib_lookup (mem, GET_MODE (mem), 0, VOIDmode);
6348 if (val && cselib_preserved_value_p (val))
6349 item = val->val_rtx;
6350 else if (GET_MODE_CLASS (GET_MODE (mem)) != MODE_INT
6351 && GET_MODE_CLASS (GET_MODE (mem)) != MODE_PARTIAL_INT)
6353 /* For non-integer stack argument see also if they weren't
6354 initialized by integers. */
6355 machine_mode imode = int_mode_for_mode (GET_MODE (mem));
6356 if (imode != GET_MODE (mem) && imode != BLKmode)
6358 val = cselib_lookup (adjust_address_nv (mem, imode, 0),
6359 imode, 0, VOIDmode);
6360 if (val && cselib_preserved_value_p (val))
6361 item = lowpart_subreg (GET_MODE (x), val->val_rtx,
6362 imode);
6366 if (item)
6368 rtx x2 = x;
6369 if (GET_MODE (item) != GET_MODE (link))
6370 item = lowpart_subreg (GET_MODE (link), item, GET_MODE (item));
6371 if (GET_MODE (x2) != GET_MODE (link))
6372 x2 = lowpart_subreg (GET_MODE (link), x2, GET_MODE (x2));
6373 item = gen_rtx_CONCAT (GET_MODE (link), x2, item);
6374 call_arguments
6375 = gen_rtx_EXPR_LIST (VOIDmode, item, call_arguments);
6377 if (t && t != void_list_node)
6379 tree argtype = TREE_VALUE (t);
6380 machine_mode mode = TYPE_MODE (argtype);
6381 rtx reg;
6382 if (pass_by_reference (&args_so_far_v, mode, argtype, true))
6384 argtype = build_pointer_type (argtype);
6385 mode = TYPE_MODE (argtype);
6387 reg = targetm.calls.function_arg (args_so_far, mode,
6388 argtype, true);
6389 if (TREE_CODE (argtype) == REFERENCE_TYPE
6390 && INTEGRAL_TYPE_P (TREE_TYPE (argtype))
6391 && reg
6392 && REG_P (reg)
6393 && GET_MODE (reg) == mode
6394 && (GET_MODE_CLASS (mode) == MODE_INT
6395 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
6396 && REG_P (x)
6397 && REGNO (x) == REGNO (reg)
6398 && GET_MODE (x) == mode
6399 && item)
6401 machine_mode indmode
6402 = TYPE_MODE (TREE_TYPE (argtype));
6403 rtx mem = gen_rtx_MEM (indmode, x);
6404 cselib_val *val = cselib_lookup (mem, indmode, 0, VOIDmode);
6405 if (val && cselib_preserved_value_p (val))
6407 item = gen_rtx_CONCAT (indmode, mem, val->val_rtx);
6408 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
6409 call_arguments);
6411 else
6413 struct elt_loc_list *l;
6414 tree initial;
6416 /* Try harder, when passing address of a constant
6417 pool integer it can be easily read back. */
6418 item = XEXP (item, 1);
6419 if (GET_CODE (item) == SUBREG)
6420 item = SUBREG_REG (item);
6421 gcc_assert (GET_CODE (item) == VALUE);
6422 val = CSELIB_VAL_PTR (item);
6423 for (l = val->locs; l; l = l->next)
6424 if (GET_CODE (l->loc) == SYMBOL_REF
6425 && TREE_CONSTANT_POOL_ADDRESS_P (l->loc)
6426 && SYMBOL_REF_DECL (l->loc)
6427 && DECL_INITIAL (SYMBOL_REF_DECL (l->loc)))
6429 initial = DECL_INITIAL (SYMBOL_REF_DECL (l->loc));
6430 if (tree_fits_shwi_p (initial))
6432 item = GEN_INT (tree_to_shwi (initial));
6433 item = gen_rtx_CONCAT (indmode, mem, item);
6434 call_arguments
6435 = gen_rtx_EXPR_LIST (VOIDmode, item,
6436 call_arguments);
6438 break;
6442 targetm.calls.function_arg_advance (args_so_far, mode,
6443 argtype, true);
6444 t = TREE_CHAIN (t);
6448 /* Add debug arguments. */
6449 if (fndecl
6450 && TREE_CODE (fndecl) == FUNCTION_DECL
6451 && DECL_HAS_DEBUG_ARGS_P (fndecl))
6453 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (fndecl);
6454 if (debug_args)
6456 unsigned int ix;
6457 tree param;
6458 for (ix = 0; vec_safe_iterate (*debug_args, ix, &param); ix += 2)
6460 rtx item;
6461 tree dtemp = (**debug_args)[ix + 1];
6462 machine_mode mode = DECL_MODE (dtemp);
6463 item = gen_rtx_DEBUG_PARAMETER_REF (mode, param);
6464 item = gen_rtx_CONCAT (mode, item, DECL_RTL_KNOWN_SET (dtemp));
6465 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
6466 call_arguments);
6471 /* Reverse call_arguments chain. */
6472 prev = NULL_RTX;
6473 for (cur = call_arguments; cur; cur = next)
6475 next = XEXP (cur, 1);
6476 XEXP (cur, 1) = prev;
6477 prev = cur;
6479 call_arguments = prev;
6481 x = get_call_rtx_from (insn);
6482 if (x)
6484 x = XEXP (XEXP (x, 0), 0);
6485 if (GET_CODE (x) == SYMBOL_REF)
6486 /* Don't record anything. */;
6487 else if (CONSTANT_P (x))
6489 x = gen_rtx_CONCAT (GET_MODE (x) == VOIDmode ? Pmode : GET_MODE (x),
6490 pc_rtx, x);
6491 call_arguments
6492 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6494 else
6496 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
6497 if (val && cselib_preserved_value_p (val))
6499 x = gen_rtx_CONCAT (GET_MODE (x), pc_rtx, val->val_rtx);
6500 call_arguments
6501 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6505 if (this_arg)
6507 machine_mode mode
6508 = TYPE_MODE (TREE_TYPE (OBJ_TYPE_REF_EXPR (obj_type_ref)));
6509 rtx clobbered = gen_rtx_MEM (mode, this_arg);
6510 HOST_WIDE_INT token
6511 = tree_to_shwi (OBJ_TYPE_REF_TOKEN (obj_type_ref));
6512 if (token)
6513 clobbered = plus_constant (mode, clobbered,
6514 token * GET_MODE_SIZE (mode));
6515 clobbered = gen_rtx_MEM (mode, clobbered);
6516 x = gen_rtx_CONCAT (mode, gen_rtx_CLOBBER (VOIDmode, pc_rtx), clobbered);
6517 call_arguments
6518 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6522 /* Callback for cselib_record_sets_hook, that records as micro
6523 operations uses and stores in an insn after cselib_record_sets has
6524 analyzed the sets in an insn, but before it modifies the stored
6525 values in the internal tables, unless cselib_record_sets doesn't
6526 call it directly (perhaps because we're not doing cselib in the
6527 first place, in which case sets and n_sets will be 0). */
6529 static void
6530 add_with_sets (rtx_insn *insn, struct cselib_set *sets, int n_sets)
6532 basic_block bb = BLOCK_FOR_INSN (insn);
6533 int n1, n2;
6534 struct count_use_info cui;
6535 micro_operation *mos;
6537 cselib_hook_called = true;
6539 cui.insn = insn;
6540 cui.bb = bb;
6541 cui.sets = sets;
6542 cui.n_sets = n_sets;
6544 n1 = VTI (bb)->mos.length ();
6545 cui.store_p = false;
6546 note_uses (&PATTERN (insn), add_uses_1, &cui);
6547 n2 = VTI (bb)->mos.length () - 1;
6548 mos = VTI (bb)->mos.address ();
6550 /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
6551 MO_VAL_LOC last. */
6552 while (n1 < n2)
6554 while (n1 < n2 && mos[n1].type == MO_USE)
6555 n1++;
6556 while (n1 < n2 && mos[n2].type != MO_USE)
6557 n2--;
6558 if (n1 < n2)
6559 std::swap (mos[n1], mos[n2]);
6562 n2 = VTI (bb)->mos.length () - 1;
6563 while (n1 < n2)
6565 while (n1 < n2 && mos[n1].type != MO_VAL_LOC)
6566 n1++;
6567 while (n1 < n2 && mos[n2].type == MO_VAL_LOC)
6568 n2--;
6569 if (n1 < n2)
6570 std::swap (mos[n1], mos[n2]);
6573 if (CALL_P (insn))
6575 micro_operation mo;
6577 mo.type = MO_CALL;
6578 mo.insn = insn;
6579 mo.u.loc = call_arguments;
6580 call_arguments = NULL_RTX;
6582 if (dump_file && (dump_flags & TDF_DETAILS))
6583 log_op_type (PATTERN (insn), bb, insn, mo.type, dump_file);
6584 VTI (bb)->mos.safe_push (mo);
6587 n1 = VTI (bb)->mos.length ();
6588 /* This will record NEXT_INSN (insn), such that we can
6589 insert notes before it without worrying about any
6590 notes that MO_USEs might emit after the insn. */
6591 cui.store_p = true;
6592 note_stores (PATTERN (insn), add_stores, &cui);
6593 n2 = VTI (bb)->mos.length () - 1;
6594 mos = VTI (bb)->mos.address ();
6596 /* Order the MO_VAL_USEs first (note_stores does nothing
6597 on DEBUG_INSNs, so there are no MO_VAL_LOCs from this
6598 insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET. */
6599 while (n1 < n2)
6601 while (n1 < n2 && mos[n1].type == MO_VAL_USE)
6602 n1++;
6603 while (n1 < n2 && mos[n2].type != MO_VAL_USE)
6604 n2--;
6605 if (n1 < n2)
6606 std::swap (mos[n1], mos[n2]);
6609 n2 = VTI (bb)->mos.length () - 1;
6610 while (n1 < n2)
6612 while (n1 < n2 && mos[n1].type == MO_CLOBBER)
6613 n1++;
6614 while (n1 < n2 && mos[n2].type != MO_CLOBBER)
6615 n2--;
6616 if (n1 < n2)
6617 std::swap (mos[n1], mos[n2]);
6621 static enum var_init_status
6622 find_src_status (dataflow_set *in, rtx src)
6624 tree decl = NULL_TREE;
6625 enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
6627 if (! flag_var_tracking_uninit)
6628 status = VAR_INIT_STATUS_INITIALIZED;
6630 if (src && REG_P (src))
6631 decl = var_debug_decl (REG_EXPR (src));
6632 else if (src && MEM_P (src))
6633 decl = var_debug_decl (MEM_EXPR (src));
6635 if (src && decl)
6636 status = get_init_value (in, src, dv_from_decl (decl));
6638 return status;
6641 /* SRC is the source of an assignment. Use SET to try to find what
6642 was ultimately assigned to SRC. Return that value if known,
6643 otherwise return SRC itself. */
6645 static rtx
6646 find_src_set_src (dataflow_set *set, rtx src)
6648 tree decl = NULL_TREE; /* The variable being copied around. */
6649 rtx set_src = NULL_RTX; /* The value for "decl" stored in "src". */
6650 variable var;
6651 location_chain nextp;
6652 int i;
6653 bool found;
6655 if (src && REG_P (src))
6656 decl = var_debug_decl (REG_EXPR (src));
6657 else if (src && MEM_P (src))
6658 decl = var_debug_decl (MEM_EXPR (src));
6660 if (src && decl)
6662 decl_or_value dv = dv_from_decl (decl);
6664 var = shared_hash_find (set->vars, dv);
6665 if (var)
6667 found = false;
6668 for (i = 0; i < var->n_var_parts && !found; i++)
6669 for (nextp = var->var_part[i].loc_chain; nextp && !found;
6670 nextp = nextp->next)
6671 if (rtx_equal_p (nextp->loc, src))
6673 set_src = nextp->set_src;
6674 found = true;
6680 return set_src;
6683 /* Compute the changes of variable locations in the basic block BB. */
6685 static bool
6686 compute_bb_dataflow (basic_block bb)
6688 unsigned int i;
6689 micro_operation *mo;
6690 bool changed;
6691 dataflow_set old_out;
6692 dataflow_set *in = &VTI (bb)->in;
6693 dataflow_set *out = &VTI (bb)->out;
6695 dataflow_set_init (&old_out);
6696 dataflow_set_copy (&old_out, out);
6697 dataflow_set_copy (out, in);
6699 if (MAY_HAVE_DEBUG_INSNS)
6700 local_get_addr_cache = new hash_map<rtx, rtx>;
6702 FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo)
6704 rtx_insn *insn = mo->insn;
6706 switch (mo->type)
6708 case MO_CALL:
6709 dataflow_set_clear_at_call (out);
6710 break;
6712 case MO_USE:
6714 rtx loc = mo->u.loc;
6716 if (REG_P (loc))
6717 var_reg_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6718 else if (MEM_P (loc))
6719 var_mem_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6721 break;
6723 case MO_VAL_LOC:
6725 rtx loc = mo->u.loc;
6726 rtx val, vloc;
6727 tree var;
6729 if (GET_CODE (loc) == CONCAT)
6731 val = XEXP (loc, 0);
6732 vloc = XEXP (loc, 1);
6734 else
6736 val = NULL_RTX;
6737 vloc = loc;
6740 var = PAT_VAR_LOCATION_DECL (vloc);
6742 clobber_variable_part (out, NULL_RTX,
6743 dv_from_decl (var), 0, NULL_RTX);
6744 if (val)
6746 if (VAL_NEEDS_RESOLUTION (loc))
6747 val_resolve (out, val, PAT_VAR_LOCATION_LOC (vloc), insn);
6748 set_variable_part (out, val, dv_from_decl (var), 0,
6749 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6750 INSERT);
6752 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
6753 set_variable_part (out, PAT_VAR_LOCATION_LOC (vloc),
6754 dv_from_decl (var), 0,
6755 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6756 INSERT);
6758 break;
6760 case MO_VAL_USE:
6762 rtx loc = mo->u.loc;
6763 rtx val, vloc, uloc;
6765 vloc = uloc = XEXP (loc, 1);
6766 val = XEXP (loc, 0);
6768 if (GET_CODE (val) == CONCAT)
6770 uloc = XEXP (val, 1);
6771 val = XEXP (val, 0);
6774 if (VAL_NEEDS_RESOLUTION (loc))
6775 val_resolve (out, val, vloc, insn);
6776 else
6777 val_store (out, val, uloc, insn, false);
6779 if (VAL_HOLDS_TRACK_EXPR (loc))
6781 if (GET_CODE (uloc) == REG)
6782 var_reg_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6783 NULL);
6784 else if (GET_CODE (uloc) == MEM)
6785 var_mem_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6786 NULL);
6789 break;
6791 case MO_VAL_SET:
6793 rtx loc = mo->u.loc;
6794 rtx val, vloc, uloc;
6795 rtx dstv, srcv;
6797 vloc = loc;
6798 uloc = XEXP (vloc, 1);
6799 val = XEXP (vloc, 0);
6800 vloc = uloc;
6802 if (GET_CODE (uloc) == SET)
6804 dstv = SET_DEST (uloc);
6805 srcv = SET_SRC (uloc);
6807 else
6809 dstv = uloc;
6810 srcv = NULL;
6813 if (GET_CODE (val) == CONCAT)
6815 dstv = vloc = XEXP (val, 1);
6816 val = XEXP (val, 0);
6819 if (GET_CODE (vloc) == SET)
6821 srcv = SET_SRC (vloc);
6823 gcc_assert (val != srcv);
6824 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
6826 dstv = vloc = SET_DEST (vloc);
6828 if (VAL_NEEDS_RESOLUTION (loc))
6829 val_resolve (out, val, srcv, insn);
6831 else if (VAL_NEEDS_RESOLUTION (loc))
6833 gcc_assert (GET_CODE (uloc) == SET
6834 && GET_CODE (SET_SRC (uloc)) == REG);
6835 val_resolve (out, val, SET_SRC (uloc), insn);
6838 if (VAL_HOLDS_TRACK_EXPR (loc))
6840 if (VAL_EXPR_IS_CLOBBERED (loc))
6842 if (REG_P (uloc))
6843 var_reg_delete (out, uloc, true);
6844 else if (MEM_P (uloc))
6846 gcc_assert (MEM_P (dstv));
6847 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
6848 var_mem_delete (out, dstv, true);
6851 else
6853 bool copied_p = VAL_EXPR_IS_COPIED (loc);
6854 rtx src = NULL, dst = uloc;
6855 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
6857 if (GET_CODE (uloc) == SET)
6859 src = SET_SRC (uloc);
6860 dst = SET_DEST (uloc);
6863 if (copied_p)
6865 if (flag_var_tracking_uninit)
6867 status = find_src_status (in, src);
6869 if (status == VAR_INIT_STATUS_UNKNOWN)
6870 status = find_src_status (out, src);
6873 src = find_src_set_src (in, src);
6876 if (REG_P (dst))
6877 var_reg_delete_and_set (out, dst, !copied_p,
6878 status, srcv);
6879 else if (MEM_P (dst))
6881 gcc_assert (MEM_P (dstv));
6882 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
6883 var_mem_delete_and_set (out, dstv, !copied_p,
6884 status, srcv);
6888 else if (REG_P (uloc))
6889 var_regno_delete (out, REGNO (uloc));
6890 else if (MEM_P (uloc))
6892 gcc_checking_assert (GET_CODE (vloc) == MEM);
6893 gcc_checking_assert (dstv == vloc);
6894 if (dstv != vloc)
6895 clobber_overlapping_mems (out, vloc);
6898 val_store (out, val, dstv, insn, true);
6900 break;
6902 case MO_SET:
6904 rtx loc = mo->u.loc;
6905 rtx set_src = NULL;
6907 if (GET_CODE (loc) == SET)
6909 set_src = SET_SRC (loc);
6910 loc = SET_DEST (loc);
6913 if (REG_P (loc))
6914 var_reg_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6915 set_src);
6916 else if (MEM_P (loc))
6917 var_mem_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6918 set_src);
6920 break;
6922 case MO_COPY:
6924 rtx loc = mo->u.loc;
6925 enum var_init_status src_status;
6926 rtx set_src = NULL;
6928 if (GET_CODE (loc) == SET)
6930 set_src = SET_SRC (loc);
6931 loc = SET_DEST (loc);
6934 if (! flag_var_tracking_uninit)
6935 src_status = VAR_INIT_STATUS_INITIALIZED;
6936 else
6938 src_status = find_src_status (in, set_src);
6940 if (src_status == VAR_INIT_STATUS_UNKNOWN)
6941 src_status = find_src_status (out, set_src);
6944 set_src = find_src_set_src (in, set_src);
6946 if (REG_P (loc))
6947 var_reg_delete_and_set (out, loc, false, src_status, set_src);
6948 else if (MEM_P (loc))
6949 var_mem_delete_and_set (out, loc, false, src_status, set_src);
6951 break;
6953 case MO_USE_NO_VAR:
6955 rtx loc = mo->u.loc;
6957 if (REG_P (loc))
6958 var_reg_delete (out, loc, false);
6959 else if (MEM_P (loc))
6960 var_mem_delete (out, loc, false);
6962 break;
6964 case MO_CLOBBER:
6966 rtx loc = mo->u.loc;
6968 if (REG_P (loc))
6969 var_reg_delete (out, loc, true);
6970 else if (MEM_P (loc))
6971 var_mem_delete (out, loc, true);
6973 break;
6975 case MO_ADJUST:
6976 out->stack_adjust += mo->u.adjust;
6977 break;
6981 if (MAY_HAVE_DEBUG_INSNS)
6983 delete local_get_addr_cache;
6984 local_get_addr_cache = NULL;
6986 dataflow_set_equiv_regs (out);
6987 shared_hash_htab (out->vars)
6988 ->traverse <dataflow_set *, canonicalize_values_mark> (out);
6989 shared_hash_htab (out->vars)
6990 ->traverse <dataflow_set *, canonicalize_values_star> (out);
6991 #if ENABLE_CHECKING
6992 shared_hash_htab (out->vars)
6993 ->traverse <dataflow_set *, canonicalize_loc_order_check> (out);
6994 #endif
6996 changed = dataflow_set_different (&old_out, out);
6997 dataflow_set_destroy (&old_out);
6998 return changed;
7001 /* Find the locations of variables in the whole function. */
7003 static bool
7004 vt_find_locations (void)
7006 bb_heap_t *worklist = new bb_heap_t (LONG_MIN);
7007 bb_heap_t *pending = new bb_heap_t (LONG_MIN);
7008 sbitmap visited, in_worklist, in_pending;
7009 basic_block bb;
7010 edge e;
7011 int *bb_order;
7012 int *rc_order;
7013 int i;
7014 int htabsz = 0;
7015 int htabmax = PARAM_VALUE (PARAM_MAX_VARTRACK_SIZE);
7016 bool success = true;
7018 timevar_push (TV_VAR_TRACKING_DATAFLOW);
7019 /* Compute reverse completion order of depth first search of the CFG
7020 so that the data-flow runs faster. */
7021 rc_order = XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
7022 bb_order = XNEWVEC (int, last_basic_block_for_fn (cfun));
7023 pre_and_rev_post_order_compute (NULL, rc_order, false);
7024 for (i = 0; i < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; i++)
7025 bb_order[rc_order[i]] = i;
7026 free (rc_order);
7028 visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
7029 in_worklist = sbitmap_alloc (last_basic_block_for_fn (cfun));
7030 in_pending = sbitmap_alloc (last_basic_block_for_fn (cfun));
7031 bitmap_clear (in_worklist);
7033 FOR_EACH_BB_FN (bb, cfun)
7034 pending->insert (bb_order[bb->index], bb);
7035 bitmap_ones (in_pending);
7037 while (success && !pending->empty ())
7039 std::swap (worklist, pending);
7040 std::swap (in_worklist, in_pending);
7042 bitmap_clear (visited);
7044 while (!worklist->empty ())
7046 bb = worklist->extract_min ();
7047 bitmap_clear_bit (in_worklist, bb->index);
7048 gcc_assert (!bitmap_bit_p (visited, bb->index));
7049 if (!bitmap_bit_p (visited, bb->index))
7051 bool changed;
7052 edge_iterator ei;
7053 int oldinsz, oldoutsz;
7055 bitmap_set_bit (visited, bb->index);
7057 if (VTI (bb)->in.vars)
7059 htabsz
7060 -= shared_hash_htab (VTI (bb)->in.vars)->size ()
7061 + shared_hash_htab (VTI (bb)->out.vars)->size ();
7062 oldinsz = shared_hash_htab (VTI (bb)->in.vars)->elements ();
7063 oldoutsz
7064 = shared_hash_htab (VTI (bb)->out.vars)->elements ();
7066 else
7067 oldinsz = oldoutsz = 0;
7069 if (MAY_HAVE_DEBUG_INSNS)
7071 dataflow_set *in = &VTI (bb)->in, *first_out = NULL;
7072 bool first = true, adjust = false;
7074 /* Calculate the IN set as the intersection of
7075 predecessor OUT sets. */
7077 dataflow_set_clear (in);
7078 dst_can_be_shared = true;
7080 FOR_EACH_EDGE (e, ei, bb->preds)
7081 if (!VTI (e->src)->flooded)
7082 gcc_assert (bb_order[bb->index]
7083 <= bb_order[e->src->index]);
7084 else if (first)
7086 dataflow_set_copy (in, &VTI (e->src)->out);
7087 first_out = &VTI (e->src)->out;
7088 first = false;
7090 else
7092 dataflow_set_merge (in, &VTI (e->src)->out);
7093 adjust = true;
7096 if (adjust)
7098 dataflow_post_merge_adjust (in, &VTI (bb)->permp);
7099 #if ENABLE_CHECKING
7100 /* Merge and merge_adjust should keep entries in
7101 canonical order. */
7102 shared_hash_htab (in->vars)
7103 ->traverse <dataflow_set *,
7104 canonicalize_loc_order_check> (in);
7105 #endif
7106 if (dst_can_be_shared)
7108 shared_hash_destroy (in->vars);
7109 in->vars = shared_hash_copy (first_out->vars);
7113 VTI (bb)->flooded = true;
7115 else
7117 /* Calculate the IN set as union of predecessor OUT sets. */
7118 dataflow_set_clear (&VTI (bb)->in);
7119 FOR_EACH_EDGE (e, ei, bb->preds)
7120 dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out);
7123 changed = compute_bb_dataflow (bb);
7124 htabsz += shared_hash_htab (VTI (bb)->in.vars)->size ()
7125 + shared_hash_htab (VTI (bb)->out.vars)->size ();
7127 if (htabmax && htabsz > htabmax)
7129 if (MAY_HAVE_DEBUG_INSNS)
7130 inform (DECL_SOURCE_LOCATION (cfun->decl),
7131 "variable tracking size limit exceeded with "
7132 "-fvar-tracking-assignments, retrying without");
7133 else
7134 inform (DECL_SOURCE_LOCATION (cfun->decl),
7135 "variable tracking size limit exceeded");
7136 success = false;
7137 break;
7140 if (changed)
7142 FOR_EACH_EDGE (e, ei, bb->succs)
7144 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7145 continue;
7147 if (bitmap_bit_p (visited, e->dest->index))
7149 if (!bitmap_bit_p (in_pending, e->dest->index))
7151 /* Send E->DEST to next round. */
7152 bitmap_set_bit (in_pending, e->dest->index);
7153 pending->insert (bb_order[e->dest->index],
7154 e->dest);
7157 else if (!bitmap_bit_p (in_worklist, e->dest->index))
7159 /* Add E->DEST to current round. */
7160 bitmap_set_bit (in_worklist, e->dest->index);
7161 worklist->insert (bb_order[e->dest->index],
7162 e->dest);
7167 if (dump_file)
7168 fprintf (dump_file,
7169 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
7170 bb->index,
7171 (int)shared_hash_htab (VTI (bb)->in.vars)->size (),
7172 oldinsz,
7173 (int)shared_hash_htab (VTI (bb)->out.vars)->size (),
7174 oldoutsz,
7175 (int)worklist->nodes (), (int)pending->nodes (),
7176 htabsz);
7178 if (dump_file && (dump_flags & TDF_DETAILS))
7180 fprintf (dump_file, "BB %i IN:\n", bb->index);
7181 dump_dataflow_set (&VTI (bb)->in);
7182 fprintf (dump_file, "BB %i OUT:\n", bb->index);
7183 dump_dataflow_set (&VTI (bb)->out);
7189 if (success && MAY_HAVE_DEBUG_INSNS)
7190 FOR_EACH_BB_FN (bb, cfun)
7191 gcc_assert (VTI (bb)->flooded);
7193 free (bb_order);
7194 delete worklist;
7195 delete pending;
7196 sbitmap_free (visited);
7197 sbitmap_free (in_worklist);
7198 sbitmap_free (in_pending);
7200 timevar_pop (TV_VAR_TRACKING_DATAFLOW);
7201 return success;
7204 /* Print the content of the LIST to dump file. */
7206 static void
7207 dump_attrs_list (attrs list)
7209 for (; list; list = list->next)
7211 if (dv_is_decl_p (list->dv))
7212 print_mem_expr (dump_file, dv_as_decl (list->dv));
7213 else
7214 print_rtl_single (dump_file, dv_as_value (list->dv));
7215 fprintf (dump_file, "+" HOST_WIDE_INT_PRINT_DEC, list->offset);
7217 fprintf (dump_file, "\n");
7220 /* Print the information about variable *SLOT to dump file. */
7223 dump_var_tracking_slot (variable_def **slot, void *data ATTRIBUTE_UNUSED)
7225 variable var = *slot;
7227 dump_var (var);
7229 /* Continue traversing the hash table. */
7230 return 1;
7233 /* Print the information about variable VAR to dump file. */
7235 static void
7236 dump_var (variable var)
7238 int i;
7239 location_chain node;
7241 if (dv_is_decl_p (var->dv))
7243 const_tree decl = dv_as_decl (var->dv);
7245 if (DECL_NAME (decl))
7247 fprintf (dump_file, " name: %s",
7248 IDENTIFIER_POINTER (DECL_NAME (decl)));
7249 if (dump_flags & TDF_UID)
7250 fprintf (dump_file, "D.%u", DECL_UID (decl));
7252 else if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
7253 fprintf (dump_file, " name: D#%u", DEBUG_TEMP_UID (decl));
7254 else
7255 fprintf (dump_file, " name: D.%u", DECL_UID (decl));
7256 fprintf (dump_file, "\n");
7258 else
7260 fputc (' ', dump_file);
7261 print_rtl_single (dump_file, dv_as_value (var->dv));
7264 for (i = 0; i < var->n_var_parts; i++)
7266 fprintf (dump_file, " offset %ld\n",
7267 (long)(var->onepart ? 0 : VAR_PART_OFFSET (var, i)));
7268 for (node = var->var_part[i].loc_chain; node; node = node->next)
7270 fprintf (dump_file, " ");
7271 if (node->init == VAR_INIT_STATUS_UNINITIALIZED)
7272 fprintf (dump_file, "[uninit]");
7273 print_rtl_single (dump_file, node->loc);
7278 /* Print the information about variables from hash table VARS to dump file. */
7280 static void
7281 dump_vars (variable_table_type *vars)
7283 if (vars->elements () > 0)
7285 fprintf (dump_file, "Variables:\n");
7286 vars->traverse <void *, dump_var_tracking_slot> (NULL);
7290 /* Print the dataflow set SET to dump file. */
7292 static void
7293 dump_dataflow_set (dataflow_set *set)
7295 int i;
7297 fprintf (dump_file, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC "\n",
7298 set->stack_adjust);
7299 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7301 if (set->regs[i])
7303 fprintf (dump_file, "Reg %d:", i);
7304 dump_attrs_list (set->regs[i]);
7307 dump_vars (shared_hash_htab (set->vars));
7308 fprintf (dump_file, "\n");
7311 /* Print the IN and OUT sets for each basic block to dump file. */
7313 static void
7314 dump_dataflow_sets (void)
7316 basic_block bb;
7318 FOR_EACH_BB_FN (bb, cfun)
7320 fprintf (dump_file, "\nBasic block %d:\n", bb->index);
7321 fprintf (dump_file, "IN:\n");
7322 dump_dataflow_set (&VTI (bb)->in);
7323 fprintf (dump_file, "OUT:\n");
7324 dump_dataflow_set (&VTI (bb)->out);
7328 /* Return the variable for DV in dropped_values, inserting one if
7329 requested with INSERT. */
7331 static inline variable
7332 variable_from_dropped (decl_or_value dv, enum insert_option insert)
7334 variable_def **slot;
7335 variable empty_var;
7336 onepart_enum_t onepart;
7338 slot = dropped_values->find_slot_with_hash (dv, dv_htab_hash (dv), insert);
7340 if (!slot)
7341 return NULL;
7343 if (*slot)
7344 return *slot;
7346 gcc_checking_assert (insert == INSERT);
7348 onepart = dv_onepart_p (dv);
7350 gcc_checking_assert (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR);
7352 empty_var = onepart_pool (onepart).allocate ();
7353 empty_var->dv = dv;
7354 empty_var->refcount = 1;
7355 empty_var->n_var_parts = 0;
7356 empty_var->onepart = onepart;
7357 empty_var->in_changed_variables = false;
7358 empty_var->var_part[0].loc_chain = NULL;
7359 empty_var->var_part[0].cur_loc = NULL;
7360 VAR_LOC_1PAUX (empty_var) = NULL;
7361 set_dv_changed (dv, true);
7363 *slot = empty_var;
7365 return empty_var;
7368 /* Recover the one-part aux from dropped_values. */
7370 static struct onepart_aux *
7371 recover_dropped_1paux (variable var)
7373 variable dvar;
7375 gcc_checking_assert (var->onepart);
7377 if (VAR_LOC_1PAUX (var))
7378 return VAR_LOC_1PAUX (var);
7380 if (var->onepart == ONEPART_VDECL)
7381 return NULL;
7383 dvar = variable_from_dropped (var->dv, NO_INSERT);
7385 if (!dvar)
7386 return NULL;
7388 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (dvar);
7389 VAR_LOC_1PAUX (dvar) = NULL;
7391 return VAR_LOC_1PAUX (var);
7394 /* Add variable VAR to the hash table of changed variables and
7395 if it has no locations delete it from SET's hash table. */
7397 static void
7398 variable_was_changed (variable var, dataflow_set *set)
7400 hashval_t hash = dv_htab_hash (var->dv);
7402 if (emit_notes)
7404 variable_def **slot;
7406 /* Remember this decl or VALUE has been added to changed_variables. */
7407 set_dv_changed (var->dv, true);
7409 slot = changed_variables->find_slot_with_hash (var->dv, hash, INSERT);
7411 if (*slot)
7413 variable old_var = *slot;
7414 gcc_assert (old_var->in_changed_variables);
7415 old_var->in_changed_variables = false;
7416 if (var != old_var && var->onepart)
7418 /* Restore the auxiliary info from an empty variable
7419 previously created for changed_variables, so it is
7420 not lost. */
7421 gcc_checking_assert (!VAR_LOC_1PAUX (var));
7422 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (old_var);
7423 VAR_LOC_1PAUX (old_var) = NULL;
7425 variable_htab_free (*slot);
7428 if (set && var->n_var_parts == 0)
7430 onepart_enum_t onepart = var->onepart;
7431 variable empty_var = NULL;
7432 variable_def **dslot = NULL;
7434 if (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR)
7436 dslot = dropped_values->find_slot_with_hash (var->dv,
7437 dv_htab_hash (var->dv),
7438 INSERT);
7439 empty_var = *dslot;
7441 if (empty_var)
7443 gcc_checking_assert (!empty_var->in_changed_variables);
7444 if (!VAR_LOC_1PAUX (var))
7446 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (empty_var);
7447 VAR_LOC_1PAUX (empty_var) = NULL;
7449 else
7450 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
7454 if (!empty_var)
7456 empty_var = onepart_pool (onepart).allocate ();
7457 empty_var->dv = var->dv;
7458 empty_var->refcount = 1;
7459 empty_var->n_var_parts = 0;
7460 empty_var->onepart = onepart;
7461 if (dslot)
7463 empty_var->refcount++;
7464 *dslot = empty_var;
7467 else
7468 empty_var->refcount++;
7469 empty_var->in_changed_variables = true;
7470 *slot = empty_var;
7471 if (onepart)
7473 empty_var->var_part[0].loc_chain = NULL;
7474 empty_var->var_part[0].cur_loc = NULL;
7475 VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (var);
7476 VAR_LOC_1PAUX (var) = NULL;
7478 goto drop_var;
7480 else
7482 if (var->onepart && !VAR_LOC_1PAUX (var))
7483 recover_dropped_1paux (var);
7484 var->refcount++;
7485 var->in_changed_variables = true;
7486 *slot = var;
7489 else
7491 gcc_assert (set);
7492 if (var->n_var_parts == 0)
7494 variable_def **slot;
7496 drop_var:
7497 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
7498 if (slot)
7500 if (shared_hash_shared (set->vars))
7501 slot = shared_hash_find_slot_unshare (&set->vars, var->dv,
7502 NO_INSERT);
7503 shared_hash_htab (set->vars)->clear_slot (slot);
7509 /* Look for the index in VAR->var_part corresponding to OFFSET.
7510 Return -1 if not found. If INSERTION_POINT is non-NULL, the
7511 referenced int will be set to the index that the part has or should
7512 have, if it should be inserted. */
7514 static inline int
7515 find_variable_location_part (variable var, HOST_WIDE_INT offset,
7516 int *insertion_point)
7518 int pos, low, high;
7520 if (var->onepart)
7522 if (offset != 0)
7523 return -1;
7525 if (insertion_point)
7526 *insertion_point = 0;
7528 return var->n_var_parts - 1;
7531 /* Find the location part. */
7532 low = 0;
7533 high = var->n_var_parts;
7534 while (low != high)
7536 pos = (low + high) / 2;
7537 if (VAR_PART_OFFSET (var, pos) < offset)
7538 low = pos + 1;
7539 else
7540 high = pos;
7542 pos = low;
7544 if (insertion_point)
7545 *insertion_point = pos;
7547 if (pos < var->n_var_parts && VAR_PART_OFFSET (var, pos) == offset)
7548 return pos;
7550 return -1;
7553 static variable_def **
7554 set_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
7555 decl_or_value dv, HOST_WIDE_INT offset,
7556 enum var_init_status initialized, rtx set_src)
7558 int pos;
7559 location_chain node, next;
7560 location_chain *nextp;
7561 variable var;
7562 onepart_enum_t onepart;
7564 var = *slot;
7566 if (var)
7567 onepart = var->onepart;
7568 else
7569 onepart = dv_onepart_p (dv);
7571 gcc_checking_assert (offset == 0 || !onepart);
7572 gcc_checking_assert (loc != dv_as_opaque (dv));
7574 if (! flag_var_tracking_uninit)
7575 initialized = VAR_INIT_STATUS_INITIALIZED;
7577 if (!var)
7579 /* Create new variable information. */
7580 var = onepart_pool (onepart).allocate ();
7581 var->dv = dv;
7582 var->refcount = 1;
7583 var->n_var_parts = 1;
7584 var->onepart = onepart;
7585 var->in_changed_variables = false;
7586 if (var->onepart)
7587 VAR_LOC_1PAUX (var) = NULL;
7588 else
7589 VAR_PART_OFFSET (var, 0) = offset;
7590 var->var_part[0].loc_chain = NULL;
7591 var->var_part[0].cur_loc = NULL;
7592 *slot = var;
7593 pos = 0;
7594 nextp = &var->var_part[0].loc_chain;
7596 else if (onepart)
7598 int r = -1, c = 0;
7600 gcc_assert (dv_as_opaque (var->dv) == dv_as_opaque (dv));
7602 pos = 0;
7604 if (GET_CODE (loc) == VALUE)
7606 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7607 nextp = &node->next)
7608 if (GET_CODE (node->loc) == VALUE)
7610 if (node->loc == loc)
7612 r = 0;
7613 break;
7615 if (canon_value_cmp (node->loc, loc))
7616 c++;
7617 else
7619 r = 1;
7620 break;
7623 else if (REG_P (node->loc) || MEM_P (node->loc))
7624 c++;
7625 else
7627 r = 1;
7628 break;
7631 else if (REG_P (loc))
7633 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7634 nextp = &node->next)
7635 if (REG_P (node->loc))
7637 if (REGNO (node->loc) < REGNO (loc))
7638 c++;
7639 else
7641 if (REGNO (node->loc) == REGNO (loc))
7642 r = 0;
7643 else
7644 r = 1;
7645 break;
7648 else
7650 r = 1;
7651 break;
7654 else if (MEM_P (loc))
7656 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7657 nextp = &node->next)
7658 if (REG_P (node->loc))
7659 c++;
7660 else if (MEM_P (node->loc))
7662 if ((r = loc_cmp (XEXP (node->loc, 0), XEXP (loc, 0))) >= 0)
7663 break;
7664 else
7665 c++;
7667 else
7669 r = 1;
7670 break;
7673 else
7674 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7675 nextp = &node->next)
7676 if ((r = loc_cmp (node->loc, loc)) >= 0)
7677 break;
7678 else
7679 c++;
7681 if (r == 0)
7682 return slot;
7684 if (shared_var_p (var, set->vars))
7686 slot = unshare_variable (set, slot, var, initialized);
7687 var = *slot;
7688 for (nextp = &var->var_part[0].loc_chain; c;
7689 nextp = &(*nextp)->next)
7690 c--;
7691 gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc);
7694 else
7696 int inspos = 0;
7698 gcc_assert (dv_as_decl (var->dv) == dv_as_decl (dv));
7700 pos = find_variable_location_part (var, offset, &inspos);
7702 if (pos >= 0)
7704 node = var->var_part[pos].loc_chain;
7706 if (node
7707 && ((REG_P (node->loc) && REG_P (loc)
7708 && REGNO (node->loc) == REGNO (loc))
7709 || rtx_equal_p (node->loc, loc)))
7711 /* LOC is in the beginning of the chain so we have nothing
7712 to do. */
7713 if (node->init < initialized)
7714 node->init = initialized;
7715 if (set_src != NULL)
7716 node->set_src = set_src;
7718 return slot;
7720 else
7722 /* We have to make a copy of a shared variable. */
7723 if (shared_var_p (var, set->vars))
7725 slot = unshare_variable (set, slot, var, initialized);
7726 var = *slot;
7730 else
7732 /* We have not found the location part, new one will be created. */
7734 /* We have to make a copy of the shared variable. */
7735 if (shared_var_p (var, set->vars))
7737 slot = unshare_variable (set, slot, var, initialized);
7738 var = *slot;
7741 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
7742 thus there are at most MAX_VAR_PARTS different offsets. */
7743 gcc_assert (var->n_var_parts < MAX_VAR_PARTS
7744 && (!var->n_var_parts || !onepart));
7746 /* We have to move the elements of array starting at index
7747 inspos to the next position. */
7748 for (pos = var->n_var_parts; pos > inspos; pos--)
7749 var->var_part[pos] = var->var_part[pos - 1];
7751 var->n_var_parts++;
7752 gcc_checking_assert (!onepart);
7753 VAR_PART_OFFSET (var, pos) = offset;
7754 var->var_part[pos].loc_chain = NULL;
7755 var->var_part[pos].cur_loc = NULL;
7758 /* Delete the location from the list. */
7759 nextp = &var->var_part[pos].loc_chain;
7760 for (node = var->var_part[pos].loc_chain; node; node = next)
7762 next = node->next;
7763 if ((REG_P (node->loc) && REG_P (loc)
7764 && REGNO (node->loc) == REGNO (loc))
7765 || rtx_equal_p (node->loc, loc))
7767 /* Save these values, to assign to the new node, before
7768 deleting this one. */
7769 if (node->init > initialized)
7770 initialized = node->init;
7771 if (node->set_src != NULL && set_src == NULL)
7772 set_src = node->set_src;
7773 if (var->var_part[pos].cur_loc == node->loc)
7774 var->var_part[pos].cur_loc = NULL;
7775 delete node;
7776 *nextp = next;
7777 break;
7779 else
7780 nextp = &node->next;
7783 nextp = &var->var_part[pos].loc_chain;
7786 /* Add the location to the beginning. */
7787 node = new location_chain_def;
7788 node->loc = loc;
7789 node->init = initialized;
7790 node->set_src = set_src;
7791 node->next = *nextp;
7792 *nextp = node;
7794 /* If no location was emitted do so. */
7795 if (var->var_part[pos].cur_loc == NULL)
7796 variable_was_changed (var, set);
7798 return slot;
7801 /* Set the part of variable's location in the dataflow set SET. The
7802 variable part is specified by variable's declaration in DV and
7803 offset OFFSET and the part's location by LOC. IOPT should be
7804 NO_INSERT if the variable is known to be in SET already and the
7805 variable hash table must not be resized, and INSERT otherwise. */
7807 static void
7808 set_variable_part (dataflow_set *set, rtx loc,
7809 decl_or_value dv, HOST_WIDE_INT offset,
7810 enum var_init_status initialized, rtx set_src,
7811 enum insert_option iopt)
7813 variable_def **slot;
7815 if (iopt == NO_INSERT)
7816 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7817 else
7819 slot = shared_hash_find_slot (set->vars, dv);
7820 if (!slot)
7821 slot = shared_hash_find_slot_unshare (&set->vars, dv, iopt);
7823 set_slot_part (set, loc, slot, dv, offset, initialized, set_src);
7826 /* Remove all recorded register locations for the given variable part
7827 from dataflow set SET, except for those that are identical to loc.
7828 The variable part is specified by variable's declaration or value
7829 DV and offset OFFSET. */
7831 static variable_def **
7832 clobber_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
7833 HOST_WIDE_INT offset, rtx set_src)
7835 variable var = *slot;
7836 int pos = find_variable_location_part (var, offset, NULL);
7838 if (pos >= 0)
7840 location_chain node, next;
7842 /* Remove the register locations from the dataflow set. */
7843 next = var->var_part[pos].loc_chain;
7844 for (node = next; node; node = next)
7846 next = node->next;
7847 if (node->loc != loc
7848 && (!flag_var_tracking_uninit
7849 || !set_src
7850 || MEM_P (set_src)
7851 || !rtx_equal_p (set_src, node->set_src)))
7853 if (REG_P (node->loc))
7855 attrs anode, anext;
7856 attrs *anextp;
7858 /* Remove the variable part from the register's
7859 list, but preserve any other variable parts
7860 that might be regarded as live in that same
7861 register. */
7862 anextp = &set->regs[REGNO (node->loc)];
7863 for (anode = *anextp; anode; anode = anext)
7865 anext = anode->next;
7866 if (dv_as_opaque (anode->dv) == dv_as_opaque (var->dv)
7867 && anode->offset == offset)
7869 delete anode;
7870 *anextp = anext;
7872 else
7873 anextp = &anode->next;
7877 slot = delete_slot_part (set, node->loc, slot, offset);
7882 return slot;
7885 /* Remove all recorded register locations for the given variable part
7886 from dataflow set SET, except for those that are identical to loc.
7887 The variable part is specified by variable's declaration or value
7888 DV and offset OFFSET. */
7890 static void
7891 clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
7892 HOST_WIDE_INT offset, rtx set_src)
7894 variable_def **slot;
7896 if (!dv_as_opaque (dv)
7897 || (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv))))
7898 return;
7900 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7901 if (!slot)
7902 return;
7904 clobber_slot_part (set, loc, slot, offset, set_src);
7907 /* Delete the part of variable's location from dataflow set SET. The
7908 variable part is specified by its SET->vars slot SLOT and offset
7909 OFFSET and the part's location by LOC. */
7911 static variable_def **
7912 delete_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
7913 HOST_WIDE_INT offset)
7915 variable var = *slot;
7916 int pos = find_variable_location_part (var, offset, NULL);
7918 if (pos >= 0)
7920 location_chain node, next;
7921 location_chain *nextp;
7922 bool changed;
7923 rtx cur_loc;
7925 if (shared_var_p (var, set->vars))
7927 /* If the variable contains the location part we have to
7928 make a copy of the variable. */
7929 for (node = var->var_part[pos].loc_chain; node;
7930 node = node->next)
7932 if ((REG_P (node->loc) && REG_P (loc)
7933 && REGNO (node->loc) == REGNO (loc))
7934 || rtx_equal_p (node->loc, loc))
7936 slot = unshare_variable (set, slot, var,
7937 VAR_INIT_STATUS_UNKNOWN);
7938 var = *slot;
7939 break;
7944 if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
7945 cur_loc = VAR_LOC_FROM (var);
7946 else
7947 cur_loc = var->var_part[pos].cur_loc;
7949 /* Delete the location part. */
7950 changed = false;
7951 nextp = &var->var_part[pos].loc_chain;
7952 for (node = *nextp; node; node = next)
7954 next = node->next;
7955 if ((REG_P (node->loc) && REG_P (loc)
7956 && REGNO (node->loc) == REGNO (loc))
7957 || rtx_equal_p (node->loc, loc))
7959 /* If we have deleted the location which was last emitted
7960 we have to emit new location so add the variable to set
7961 of changed variables. */
7962 if (cur_loc == node->loc)
7964 changed = true;
7965 var->var_part[pos].cur_loc = NULL;
7966 if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
7967 VAR_LOC_FROM (var) = NULL;
7969 delete node;
7970 *nextp = next;
7971 break;
7973 else
7974 nextp = &node->next;
7977 if (var->var_part[pos].loc_chain == NULL)
7979 changed = true;
7980 var->n_var_parts--;
7981 while (pos < var->n_var_parts)
7983 var->var_part[pos] = var->var_part[pos + 1];
7984 pos++;
7987 if (changed)
7988 variable_was_changed (var, set);
7991 return slot;
7994 /* Delete the part of variable's location from dataflow set SET. The
7995 variable part is specified by variable's declaration or value DV
7996 and offset OFFSET and the part's location by LOC. */
7998 static void
7999 delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
8000 HOST_WIDE_INT offset)
8002 variable_def **slot = shared_hash_find_slot_noinsert (set->vars, dv);
8003 if (!slot)
8004 return;
8006 delete_slot_part (set, loc, slot, offset);
8010 /* Structure for passing some other parameters to function
8011 vt_expand_loc_callback. */
8012 struct expand_loc_callback_data
8014 /* The variables and values active at this point. */
8015 variable_table_type *vars;
8017 /* Stack of values and debug_exprs under expansion, and their
8018 children. */
8019 auto_vec<rtx, 4> expanding;
8021 /* Stack of values and debug_exprs whose expansion hit recursion
8022 cycles. They will have VALUE_RECURSED_INTO marked when added to
8023 this list. This flag will be cleared if any of its dependencies
8024 resolves to a valid location. So, if the flag remains set at the
8025 end of the search, we know no valid location for this one can
8026 possibly exist. */
8027 auto_vec<rtx, 4> pending;
8029 /* The maximum depth among the sub-expressions under expansion.
8030 Zero indicates no expansion so far. */
8031 expand_depth depth;
8034 /* Allocate the one-part auxiliary data structure for VAR, with enough
8035 room for COUNT dependencies. */
8037 static void
8038 loc_exp_dep_alloc (variable var, int count)
8040 size_t allocsize;
8042 gcc_checking_assert (var->onepart);
8044 /* We can be called with COUNT == 0 to allocate the data structure
8045 without any dependencies, e.g. for the backlinks only. However,
8046 if we are specifying a COUNT, then the dependency list must have
8047 been emptied before. It would be possible to adjust pointers or
8048 force it empty here, but this is better done at an earlier point
8049 in the algorithm, so we instead leave an assertion to catch
8050 errors. */
8051 gcc_checking_assert (!count
8052 || VAR_LOC_DEP_VEC (var) == NULL
8053 || VAR_LOC_DEP_VEC (var)->is_empty ());
8055 if (VAR_LOC_1PAUX (var) && VAR_LOC_DEP_VEC (var)->space (count))
8056 return;
8058 allocsize = offsetof (struct onepart_aux, deps)
8059 + vec<loc_exp_dep, va_heap, vl_embed>::embedded_size (count);
8061 if (VAR_LOC_1PAUX (var))
8063 VAR_LOC_1PAUX (var) = XRESIZEVAR (struct onepart_aux,
8064 VAR_LOC_1PAUX (var), allocsize);
8065 /* If the reallocation moves the onepaux structure, the
8066 back-pointer to BACKLINKS in the first list member will still
8067 point to its old location. Adjust it. */
8068 if (VAR_LOC_DEP_LST (var))
8069 VAR_LOC_DEP_LST (var)->pprev = VAR_LOC_DEP_LSTP (var);
8071 else
8073 VAR_LOC_1PAUX (var) = XNEWVAR (struct onepart_aux, allocsize);
8074 *VAR_LOC_DEP_LSTP (var) = NULL;
8075 VAR_LOC_FROM (var) = NULL;
8076 VAR_LOC_DEPTH (var).complexity = 0;
8077 VAR_LOC_DEPTH (var).entryvals = 0;
8079 VAR_LOC_DEP_VEC (var)->embedded_init (count);
8082 /* Remove all entries from the vector of active dependencies of VAR,
8083 removing them from the back-links lists too. */
8085 static void
8086 loc_exp_dep_clear (variable var)
8088 while (VAR_LOC_DEP_VEC (var) && !VAR_LOC_DEP_VEC (var)->is_empty ())
8090 loc_exp_dep *led = &VAR_LOC_DEP_VEC (var)->last ();
8091 if (led->next)
8092 led->next->pprev = led->pprev;
8093 if (led->pprev)
8094 *led->pprev = led->next;
8095 VAR_LOC_DEP_VEC (var)->pop ();
8099 /* Insert an active dependency from VAR on X to the vector of
8100 dependencies, and add the corresponding back-link to X's list of
8101 back-links in VARS. */
8103 static void
8104 loc_exp_insert_dep (variable var, rtx x, variable_table_type *vars)
8106 decl_or_value dv;
8107 variable xvar;
8108 loc_exp_dep *led;
8110 dv = dv_from_rtx (x);
8112 /* ??? Build a vector of variables parallel to EXPANDING, to avoid
8113 an additional look up? */
8114 xvar = vars->find_with_hash (dv, dv_htab_hash (dv));
8116 if (!xvar)
8118 xvar = variable_from_dropped (dv, NO_INSERT);
8119 gcc_checking_assert (xvar);
8122 /* No point in adding the same backlink more than once. This may
8123 arise if say the same value appears in two complex expressions in
8124 the same loc_list, or even more than once in a single
8125 expression. */
8126 if (VAR_LOC_DEP_LST (xvar) && VAR_LOC_DEP_LST (xvar)->dv == var->dv)
8127 return;
8129 if (var->onepart == NOT_ONEPART)
8130 led = new loc_exp_dep;
8131 else
8133 loc_exp_dep empty;
8134 memset (&empty, 0, sizeof (empty));
8135 VAR_LOC_DEP_VEC (var)->quick_push (empty);
8136 led = &VAR_LOC_DEP_VEC (var)->last ();
8138 led->dv = var->dv;
8139 led->value = x;
8141 loc_exp_dep_alloc (xvar, 0);
8142 led->pprev = VAR_LOC_DEP_LSTP (xvar);
8143 led->next = *led->pprev;
8144 if (led->next)
8145 led->next->pprev = &led->next;
8146 *led->pprev = led;
8149 /* Create active dependencies of VAR on COUNT values starting at
8150 VALUE, and corresponding back-links to the entries in VARS. Return
8151 true if we found any pending-recursion results. */
8153 static bool
8154 loc_exp_dep_set (variable var, rtx result, rtx *value, int count,
8155 variable_table_type *vars)
8157 bool pending_recursion = false;
8159 gcc_checking_assert (VAR_LOC_DEP_VEC (var) == NULL
8160 || VAR_LOC_DEP_VEC (var)->is_empty ());
8162 /* Set up all dependencies from last_child (as set up at the end of
8163 the loop above) to the end. */
8164 loc_exp_dep_alloc (var, count);
8166 while (count--)
8168 rtx x = *value++;
8170 if (!pending_recursion)
8171 pending_recursion = !result && VALUE_RECURSED_INTO (x);
8173 loc_exp_insert_dep (var, x, vars);
8176 return pending_recursion;
8179 /* Notify the back-links of IVAR that are pending recursion that we
8180 have found a non-NIL value for it, so they are cleared for another
8181 attempt to compute a current location. */
8183 static void
8184 notify_dependents_of_resolved_value (variable ivar, variable_table_type *vars)
8186 loc_exp_dep *led, *next;
8188 for (led = VAR_LOC_DEP_LST (ivar); led; led = next)
8190 decl_or_value dv = led->dv;
8191 variable var;
8193 next = led->next;
8195 if (dv_is_value_p (dv))
8197 rtx value = dv_as_value (dv);
8199 /* If we have already resolved it, leave it alone. */
8200 if (!VALUE_RECURSED_INTO (value))
8201 continue;
8203 /* Check that VALUE_RECURSED_INTO, true from the test above,
8204 implies NO_LOC_P. */
8205 gcc_checking_assert (NO_LOC_P (value));
8207 /* We won't notify variables that are being expanded,
8208 because their dependency list is cleared before
8209 recursing. */
8210 NO_LOC_P (value) = false;
8211 VALUE_RECURSED_INTO (value) = false;
8213 gcc_checking_assert (dv_changed_p (dv));
8215 else
8217 gcc_checking_assert (dv_onepart_p (dv) != NOT_ONEPART);
8218 if (!dv_changed_p (dv))
8219 continue;
8222 var = vars->find_with_hash (dv, dv_htab_hash (dv));
8224 if (!var)
8225 var = variable_from_dropped (dv, NO_INSERT);
8227 if (var)
8228 notify_dependents_of_resolved_value (var, vars);
8230 if (next)
8231 next->pprev = led->pprev;
8232 if (led->pprev)
8233 *led->pprev = next;
8234 led->next = NULL;
8235 led->pprev = NULL;
8239 static rtx vt_expand_loc_callback (rtx x, bitmap regs,
8240 int max_depth, void *data);
8242 /* Return the combined depth, when one sub-expression evaluated to
8243 BEST_DEPTH and the previous known depth was SAVED_DEPTH. */
8245 static inline expand_depth
8246 update_depth (expand_depth saved_depth, expand_depth best_depth)
8248 /* If we didn't find anything, stick with what we had. */
8249 if (!best_depth.complexity)
8250 return saved_depth;
8252 /* If we found hadn't found anything, use the depth of the current
8253 expression. Do NOT add one extra level, we want to compute the
8254 maximum depth among sub-expressions. We'll increment it later,
8255 if appropriate. */
8256 if (!saved_depth.complexity)
8257 return best_depth;
8259 /* Combine the entryval count so that regardless of which one we
8260 return, the entryval count is accurate. */
8261 best_depth.entryvals = saved_depth.entryvals
8262 = best_depth.entryvals + saved_depth.entryvals;
8264 if (saved_depth.complexity < best_depth.complexity)
8265 return best_depth;
8266 else
8267 return saved_depth;
8270 /* Expand VAR to a location RTX, updating its cur_loc. Use REGS and
8271 DATA for cselib expand callback. If PENDRECP is given, indicate in
8272 it whether any sub-expression couldn't be fully evaluated because
8273 it is pending recursion resolution. */
8275 static inline rtx
8276 vt_expand_var_loc_chain (variable var, bitmap regs, void *data, bool *pendrecp)
8278 struct expand_loc_callback_data *elcd
8279 = (struct expand_loc_callback_data *) data;
8280 location_chain loc, next;
8281 rtx result = NULL;
8282 int first_child, result_first_child, last_child;
8283 bool pending_recursion;
8284 rtx loc_from = NULL;
8285 struct elt_loc_list *cloc = NULL;
8286 expand_depth depth = { 0, 0 }, saved_depth = elcd->depth;
8287 int wanted_entryvals, found_entryvals = 0;
8289 /* Clear all backlinks pointing at this, so that we're not notified
8290 while we're active. */
8291 loc_exp_dep_clear (var);
8293 retry:
8294 if (var->onepart == ONEPART_VALUE)
8296 cselib_val *val = CSELIB_VAL_PTR (dv_as_value (var->dv));
8298 gcc_checking_assert (cselib_preserved_value_p (val));
8300 cloc = val->locs;
8303 first_child = result_first_child = last_child
8304 = elcd->expanding.length ();
8306 wanted_entryvals = found_entryvals;
8308 /* Attempt to expand each available location in turn. */
8309 for (next = loc = var->n_var_parts ? var->var_part[0].loc_chain : NULL;
8310 loc || cloc; loc = next)
8312 result_first_child = last_child;
8314 if (!loc)
8316 loc_from = cloc->loc;
8317 next = loc;
8318 cloc = cloc->next;
8319 if (unsuitable_loc (loc_from))
8320 continue;
8322 else
8324 loc_from = loc->loc;
8325 next = loc->next;
8328 gcc_checking_assert (!unsuitable_loc (loc_from));
8330 elcd->depth.complexity = elcd->depth.entryvals = 0;
8331 result = cselib_expand_value_rtx_cb (loc_from, regs, EXPR_DEPTH,
8332 vt_expand_loc_callback, data);
8333 last_child = elcd->expanding.length ();
8335 if (result)
8337 depth = elcd->depth;
8339 gcc_checking_assert (depth.complexity
8340 || result_first_child == last_child);
8342 if (last_child - result_first_child != 1)
8344 if (!depth.complexity && GET_CODE (result) == ENTRY_VALUE)
8345 depth.entryvals++;
8346 depth.complexity++;
8349 if (depth.complexity <= EXPR_USE_DEPTH)
8351 if (depth.entryvals <= wanted_entryvals)
8352 break;
8353 else if (!found_entryvals || depth.entryvals < found_entryvals)
8354 found_entryvals = depth.entryvals;
8357 result = NULL;
8360 /* Set it up in case we leave the loop. */
8361 depth.complexity = depth.entryvals = 0;
8362 loc_from = NULL;
8363 result_first_child = first_child;
8366 if (!loc_from && wanted_entryvals < found_entryvals)
8368 /* We found entries with ENTRY_VALUEs and skipped them. Since
8369 we could not find any expansions without ENTRY_VALUEs, but we
8370 found at least one with them, go back and get an entry with
8371 the minimum number ENTRY_VALUE count that we found. We could
8372 avoid looping, but since each sub-loc is already resolved,
8373 the re-expansion should be trivial. ??? Should we record all
8374 attempted locs as dependencies, so that we retry the
8375 expansion should any of them change, in the hope it can give
8376 us a new entry without an ENTRY_VALUE? */
8377 elcd->expanding.truncate (first_child);
8378 goto retry;
8381 /* Register all encountered dependencies as active. */
8382 pending_recursion = loc_exp_dep_set
8383 (var, result, elcd->expanding.address () + result_first_child,
8384 last_child - result_first_child, elcd->vars);
8386 elcd->expanding.truncate (first_child);
8388 /* Record where the expansion came from. */
8389 gcc_checking_assert (!result || !pending_recursion);
8390 VAR_LOC_FROM (var) = loc_from;
8391 VAR_LOC_DEPTH (var) = depth;
8393 gcc_checking_assert (!depth.complexity == !result);
8395 elcd->depth = update_depth (saved_depth, depth);
8397 /* Indicate whether any of the dependencies are pending recursion
8398 resolution. */
8399 if (pendrecp)
8400 *pendrecp = pending_recursion;
8402 if (!pendrecp || !pending_recursion)
8403 var->var_part[0].cur_loc = result;
8405 return result;
8408 /* Callback for cselib_expand_value, that looks for expressions
8409 holding the value in the var-tracking hash tables. Return X for
8410 standard processing, anything else is to be used as-is. */
8412 static rtx
8413 vt_expand_loc_callback (rtx x, bitmap regs,
8414 int max_depth ATTRIBUTE_UNUSED,
8415 void *data)
8417 struct expand_loc_callback_data *elcd
8418 = (struct expand_loc_callback_data *) data;
8419 decl_or_value dv;
8420 variable var;
8421 rtx result, subreg;
8422 bool pending_recursion = false;
8423 bool from_empty = false;
8425 switch (GET_CODE (x))
8427 case SUBREG:
8428 subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs,
8429 EXPR_DEPTH,
8430 vt_expand_loc_callback, data);
8432 if (!subreg)
8433 return NULL;
8435 result = simplify_gen_subreg (GET_MODE (x), subreg,
8436 GET_MODE (SUBREG_REG (x)),
8437 SUBREG_BYTE (x));
8439 /* Invalid SUBREGs are ok in debug info. ??? We could try
8440 alternate expansions for the VALUE as well. */
8441 if (!result)
8442 result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x));
8444 return result;
8446 case DEBUG_EXPR:
8447 case VALUE:
8448 dv = dv_from_rtx (x);
8449 break;
8451 default:
8452 return x;
8455 elcd->expanding.safe_push (x);
8457 /* Check that VALUE_RECURSED_INTO implies NO_LOC_P. */
8458 gcc_checking_assert (!VALUE_RECURSED_INTO (x) || NO_LOC_P (x));
8460 if (NO_LOC_P (x))
8462 gcc_checking_assert (VALUE_RECURSED_INTO (x) || !dv_changed_p (dv));
8463 return NULL;
8466 var = elcd->vars->find_with_hash (dv, dv_htab_hash (dv));
8468 if (!var)
8470 from_empty = true;
8471 var = variable_from_dropped (dv, INSERT);
8474 gcc_checking_assert (var);
8476 if (!dv_changed_p (dv))
8478 gcc_checking_assert (!NO_LOC_P (x));
8479 gcc_checking_assert (var->var_part[0].cur_loc);
8480 gcc_checking_assert (VAR_LOC_1PAUX (var));
8481 gcc_checking_assert (VAR_LOC_1PAUX (var)->depth.complexity);
8483 elcd->depth = update_depth (elcd->depth, VAR_LOC_1PAUX (var)->depth);
8485 return var->var_part[0].cur_loc;
8488 VALUE_RECURSED_INTO (x) = true;
8489 /* This is tentative, but it makes some tests simpler. */
8490 NO_LOC_P (x) = true;
8492 gcc_checking_assert (var->n_var_parts == 1 || from_empty);
8494 result = vt_expand_var_loc_chain (var, regs, data, &pending_recursion);
8496 if (pending_recursion)
8498 gcc_checking_assert (!result);
8499 elcd->pending.safe_push (x);
8501 else
8503 NO_LOC_P (x) = !result;
8504 VALUE_RECURSED_INTO (x) = false;
8505 set_dv_changed (dv, false);
8507 if (result)
8508 notify_dependents_of_resolved_value (var, elcd->vars);
8511 return result;
8514 /* While expanding variables, we may encounter recursion cycles
8515 because of mutual (possibly indirect) dependencies between two
8516 particular variables (or values), say A and B. If we're trying to
8517 expand A when we get to B, which in turn attempts to expand A, if
8518 we can't find any other expansion for B, we'll add B to this
8519 pending-recursion stack, and tentatively return NULL for its
8520 location. This tentative value will be used for any other
8521 occurrences of B, unless A gets some other location, in which case
8522 it will notify B that it is worth another try at computing a
8523 location for it, and it will use the location computed for A then.
8524 At the end of the expansion, the tentative NULL locations become
8525 final for all members of PENDING that didn't get a notification.
8526 This function performs this finalization of NULL locations. */
8528 static void
8529 resolve_expansions_pending_recursion (vec<rtx, va_heap> *pending)
8531 while (!pending->is_empty ())
8533 rtx x = pending->pop ();
8534 decl_or_value dv;
8536 if (!VALUE_RECURSED_INTO (x))
8537 continue;
8539 gcc_checking_assert (NO_LOC_P (x));
8540 VALUE_RECURSED_INTO (x) = false;
8541 dv = dv_from_rtx (x);
8542 gcc_checking_assert (dv_changed_p (dv));
8543 set_dv_changed (dv, false);
8547 /* Initialize expand_loc_callback_data D with variable hash table V.
8548 It must be a macro because of alloca (vec stack). */
8549 #define INIT_ELCD(d, v) \
8550 do \
8552 (d).vars = (v); \
8553 (d).depth.complexity = (d).depth.entryvals = 0; \
8555 while (0)
8556 /* Finalize expand_loc_callback_data D, resolved to location L. */
8557 #define FINI_ELCD(d, l) \
8558 do \
8560 resolve_expansions_pending_recursion (&(d).pending); \
8561 (d).pending.release (); \
8562 (d).expanding.release (); \
8564 if ((l) && MEM_P (l)) \
8565 (l) = targetm.delegitimize_address (l); \
8567 while (0)
8569 /* Expand VALUEs and DEBUG_EXPRs in LOC to a location, using the
8570 equivalences in VARS, updating their CUR_LOCs in the process. */
8572 static rtx
8573 vt_expand_loc (rtx loc, variable_table_type *vars)
8575 struct expand_loc_callback_data data;
8576 rtx result;
8578 if (!MAY_HAVE_DEBUG_INSNS)
8579 return loc;
8581 INIT_ELCD (data, vars);
8583 result = cselib_expand_value_rtx_cb (loc, scratch_regs, EXPR_DEPTH,
8584 vt_expand_loc_callback, &data);
8586 FINI_ELCD (data, result);
8588 return result;
8591 /* Expand the one-part VARiable to a location, using the equivalences
8592 in VARS, updating their CUR_LOCs in the process. */
8594 static rtx
8595 vt_expand_1pvar (variable var, variable_table_type *vars)
8597 struct expand_loc_callback_data data;
8598 rtx loc;
8600 gcc_checking_assert (var->onepart && var->n_var_parts == 1);
8602 if (!dv_changed_p (var->dv))
8603 return var->var_part[0].cur_loc;
8605 INIT_ELCD (data, vars);
8607 loc = vt_expand_var_loc_chain (var, scratch_regs, &data, NULL);
8609 gcc_checking_assert (data.expanding.is_empty ());
8611 FINI_ELCD (data, loc);
8613 return loc;
8616 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
8617 additional parameters: WHERE specifies whether the note shall be emitted
8618 before or after instruction INSN. */
8621 emit_note_insn_var_location (variable_def **varp, emit_note_data *data)
8623 variable var = *varp;
8624 rtx_insn *insn = data->insn;
8625 enum emit_note_where where = data->where;
8626 variable_table_type *vars = data->vars;
8627 rtx_note *note;
8628 rtx note_vl;
8629 int i, j, n_var_parts;
8630 bool complete;
8631 enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED;
8632 HOST_WIDE_INT last_limit;
8633 tree type_size_unit;
8634 HOST_WIDE_INT offsets[MAX_VAR_PARTS];
8635 rtx loc[MAX_VAR_PARTS];
8636 tree decl;
8637 location_chain lc;
8639 gcc_checking_assert (var->onepart == NOT_ONEPART
8640 || var->onepart == ONEPART_VDECL);
8642 decl = dv_as_decl (var->dv);
8644 complete = true;
8645 last_limit = 0;
8646 n_var_parts = 0;
8647 if (!var->onepart)
8648 for (i = 0; i < var->n_var_parts; i++)
8649 if (var->var_part[i].cur_loc == NULL && var->var_part[i].loc_chain)
8650 var->var_part[i].cur_loc = var->var_part[i].loc_chain->loc;
8651 for (i = 0; i < var->n_var_parts; i++)
8653 machine_mode mode, wider_mode;
8654 rtx loc2;
8655 HOST_WIDE_INT offset;
8657 if (i == 0 && var->onepart)
8659 gcc_checking_assert (var->n_var_parts == 1);
8660 offset = 0;
8661 initialized = VAR_INIT_STATUS_INITIALIZED;
8662 loc2 = vt_expand_1pvar (var, vars);
8664 else
8666 if (last_limit < VAR_PART_OFFSET (var, i))
8668 complete = false;
8669 break;
8671 else if (last_limit > VAR_PART_OFFSET (var, i))
8672 continue;
8673 offset = VAR_PART_OFFSET (var, i);
8674 loc2 = var->var_part[i].cur_loc;
8675 if (loc2 && GET_CODE (loc2) == MEM
8676 && GET_CODE (XEXP (loc2, 0)) == VALUE)
8678 rtx depval = XEXP (loc2, 0);
8680 loc2 = vt_expand_loc (loc2, vars);
8682 if (loc2)
8683 loc_exp_insert_dep (var, depval, vars);
8685 if (!loc2)
8687 complete = false;
8688 continue;
8690 gcc_checking_assert (GET_CODE (loc2) != VALUE);
8691 for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
8692 if (var->var_part[i].cur_loc == lc->loc)
8694 initialized = lc->init;
8695 break;
8697 gcc_assert (lc);
8700 offsets[n_var_parts] = offset;
8701 if (!loc2)
8703 complete = false;
8704 continue;
8706 loc[n_var_parts] = loc2;
8707 mode = GET_MODE (var->var_part[i].cur_loc);
8708 if (mode == VOIDmode && var->onepart)
8709 mode = DECL_MODE (decl);
8710 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
8712 /* Attempt to merge adjacent registers or memory. */
8713 wider_mode = GET_MODE_WIDER_MODE (mode);
8714 for (j = i + 1; j < var->n_var_parts; j++)
8715 if (last_limit <= VAR_PART_OFFSET (var, j))
8716 break;
8717 if (j < var->n_var_parts
8718 && wider_mode != VOIDmode
8719 && var->var_part[j].cur_loc
8720 && mode == GET_MODE (var->var_part[j].cur_loc)
8721 && (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts]))
8722 && last_limit == (var->onepart ? 0 : VAR_PART_OFFSET (var, j))
8723 && (loc2 = vt_expand_loc (var->var_part[j].cur_loc, vars))
8724 && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2))
8726 rtx new_loc = NULL;
8728 if (REG_P (loc[n_var_parts])
8729 && hard_regno_nregs[REGNO (loc[n_var_parts])][mode] * 2
8730 == hard_regno_nregs[REGNO (loc[n_var_parts])][wider_mode]
8731 && end_hard_regno (mode, REGNO (loc[n_var_parts]))
8732 == REGNO (loc2))
8734 if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN)
8735 new_loc = simplify_subreg (wider_mode, loc[n_var_parts],
8736 mode, 0);
8737 else if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
8738 new_loc = simplify_subreg (wider_mode, loc2, mode, 0);
8739 if (new_loc)
8741 if (!REG_P (new_loc)
8742 || REGNO (new_loc) != REGNO (loc[n_var_parts]))
8743 new_loc = NULL;
8744 else
8745 REG_ATTRS (new_loc) = REG_ATTRS (loc[n_var_parts]);
8748 else if (MEM_P (loc[n_var_parts])
8749 && GET_CODE (XEXP (loc2, 0)) == PLUS
8750 && REG_P (XEXP (XEXP (loc2, 0), 0))
8751 && CONST_INT_P (XEXP (XEXP (loc2, 0), 1)))
8753 if ((REG_P (XEXP (loc[n_var_parts], 0))
8754 && rtx_equal_p (XEXP (loc[n_var_parts], 0),
8755 XEXP (XEXP (loc2, 0), 0))
8756 && INTVAL (XEXP (XEXP (loc2, 0), 1))
8757 == GET_MODE_SIZE (mode))
8758 || (GET_CODE (XEXP (loc[n_var_parts], 0)) == PLUS
8759 && CONST_INT_P (XEXP (XEXP (loc[n_var_parts], 0), 1))
8760 && rtx_equal_p (XEXP (XEXP (loc[n_var_parts], 0), 0),
8761 XEXP (XEXP (loc2, 0), 0))
8762 && INTVAL (XEXP (XEXP (loc[n_var_parts], 0), 1))
8763 + GET_MODE_SIZE (mode)
8764 == INTVAL (XEXP (XEXP (loc2, 0), 1))))
8765 new_loc = adjust_address_nv (loc[n_var_parts],
8766 wider_mode, 0);
8769 if (new_loc)
8771 loc[n_var_parts] = new_loc;
8772 mode = wider_mode;
8773 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
8774 i = j;
8777 ++n_var_parts;
8779 type_size_unit = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8780 if ((unsigned HOST_WIDE_INT) last_limit < TREE_INT_CST_LOW (type_size_unit))
8781 complete = false;
8783 if (! flag_var_tracking_uninit)
8784 initialized = VAR_INIT_STATUS_INITIALIZED;
8786 note_vl = NULL_RTX;
8787 if (!complete)
8788 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, NULL_RTX, initialized);
8789 else if (n_var_parts == 1)
8791 rtx expr_list;
8793 if (offsets[0] || GET_CODE (loc[0]) == PARALLEL)
8794 expr_list = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
8795 else
8796 expr_list = loc[0];
8798 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, expr_list, initialized);
8800 else if (n_var_parts)
8802 rtx parallel;
8804 for (i = 0; i < n_var_parts; i++)
8805 loc[i]
8806 = gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i]));
8808 parallel = gen_rtx_PARALLEL (VOIDmode,
8809 gen_rtvec_v (n_var_parts, loc));
8810 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl,
8811 parallel, initialized);
8814 if (where != EMIT_NOTE_BEFORE_INSN)
8816 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8817 if (where == EMIT_NOTE_AFTER_CALL_INSN)
8818 NOTE_DURING_CALL_P (note) = true;
8820 else
8822 /* Make sure that the call related notes come first. */
8823 while (NEXT_INSN (insn)
8824 && NOTE_P (insn)
8825 && ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8826 && NOTE_DURING_CALL_P (insn))
8827 || NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
8828 insn = NEXT_INSN (insn);
8829 if (NOTE_P (insn)
8830 && ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8831 && NOTE_DURING_CALL_P (insn))
8832 || NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
8833 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8834 else
8835 note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
8837 NOTE_VAR_LOCATION (note) = note_vl;
8839 set_dv_changed (var->dv, false);
8840 gcc_assert (var->in_changed_variables);
8841 var->in_changed_variables = false;
8842 changed_variables->clear_slot (varp);
8844 /* Continue traversing the hash table. */
8845 return 1;
8848 /* While traversing changed_variables, push onto DATA (a stack of RTX
8849 values) entries that aren't user variables. */
8852 var_track_values_to_stack (variable_def **slot,
8853 vec<rtx, va_heap> *changed_values_stack)
8855 variable var = *slot;
8857 if (var->onepart == ONEPART_VALUE)
8858 changed_values_stack->safe_push (dv_as_value (var->dv));
8859 else if (var->onepart == ONEPART_DEXPR)
8860 changed_values_stack->safe_push (DECL_RTL_KNOWN_SET (dv_as_decl (var->dv)));
8862 return 1;
8865 /* Remove from changed_variables the entry whose DV corresponds to
8866 value or debug_expr VAL. */
8867 static void
8868 remove_value_from_changed_variables (rtx val)
8870 decl_or_value dv = dv_from_rtx (val);
8871 variable_def **slot;
8872 variable var;
8874 slot = changed_variables->find_slot_with_hash (dv, dv_htab_hash (dv),
8875 NO_INSERT);
8876 var = *slot;
8877 var->in_changed_variables = false;
8878 changed_variables->clear_slot (slot);
8881 /* If VAL (a value or debug_expr) has backlinks to variables actively
8882 dependent on it in HTAB or in CHANGED_VARIABLES, mark them as
8883 changed, adding to CHANGED_VALUES_STACK any dependencies that may
8884 have dependencies of their own to notify. */
8886 static void
8887 notify_dependents_of_changed_value (rtx val, variable_table_type *htab,
8888 vec<rtx, va_heap> *changed_values_stack)
8890 variable_def **slot;
8891 variable var;
8892 loc_exp_dep *led;
8893 decl_or_value dv = dv_from_rtx (val);
8895 slot = changed_variables->find_slot_with_hash (dv, dv_htab_hash (dv),
8896 NO_INSERT);
8897 if (!slot)
8898 slot = htab->find_slot_with_hash (dv, dv_htab_hash (dv), NO_INSERT);
8899 if (!slot)
8900 slot = dropped_values->find_slot_with_hash (dv, dv_htab_hash (dv),
8901 NO_INSERT);
8902 var = *slot;
8904 while ((led = VAR_LOC_DEP_LST (var)))
8906 decl_or_value ldv = led->dv;
8907 variable ivar;
8909 /* Deactivate and remove the backlink, as it was “used up”. It
8910 makes no sense to attempt to notify the same entity again:
8911 either it will be recomputed and re-register an active
8912 dependency, or it will still have the changed mark. */
8913 if (led->next)
8914 led->next->pprev = led->pprev;
8915 if (led->pprev)
8916 *led->pprev = led->next;
8917 led->next = NULL;
8918 led->pprev = NULL;
8920 if (dv_changed_p (ldv))
8921 continue;
8923 switch (dv_onepart_p (ldv))
8925 case ONEPART_VALUE:
8926 case ONEPART_DEXPR:
8927 set_dv_changed (ldv, true);
8928 changed_values_stack->safe_push (dv_as_rtx (ldv));
8929 break;
8931 case ONEPART_VDECL:
8932 ivar = htab->find_with_hash (ldv, dv_htab_hash (ldv));
8933 gcc_checking_assert (!VAR_LOC_DEP_LST (ivar));
8934 variable_was_changed (ivar, NULL);
8935 break;
8937 case NOT_ONEPART:
8938 delete led;
8939 ivar = htab->find_with_hash (ldv, dv_htab_hash (ldv));
8940 if (ivar)
8942 int i = ivar->n_var_parts;
8943 while (i--)
8945 rtx loc = ivar->var_part[i].cur_loc;
8947 if (loc && GET_CODE (loc) == MEM
8948 && XEXP (loc, 0) == val)
8950 variable_was_changed (ivar, NULL);
8951 break;
8955 break;
8957 default:
8958 gcc_unreachable ();
8963 /* Take out of changed_variables any entries that don't refer to use
8964 variables. Back-propagate change notifications from values and
8965 debug_exprs to their active dependencies in HTAB or in
8966 CHANGED_VARIABLES. */
8968 static void
8969 process_changed_values (variable_table_type *htab)
8971 int i, n;
8972 rtx val;
8973 auto_vec<rtx, 20> changed_values_stack;
8975 /* Move values from changed_variables to changed_values_stack. */
8976 changed_variables
8977 ->traverse <vec<rtx, va_heap>*, var_track_values_to_stack>
8978 (&changed_values_stack);
8980 /* Back-propagate change notifications in values while popping
8981 them from the stack. */
8982 for (n = i = changed_values_stack.length ();
8983 i > 0; i = changed_values_stack.length ())
8985 val = changed_values_stack.pop ();
8986 notify_dependents_of_changed_value (val, htab, &changed_values_stack);
8988 /* This condition will hold when visiting each of the entries
8989 originally in changed_variables. We can't remove them
8990 earlier because this could drop the backlinks before we got a
8991 chance to use them. */
8992 if (i == n)
8994 remove_value_from_changed_variables (val);
8995 n--;
9000 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
9001 CHANGED_VARIABLES and delete this chain. WHERE specifies whether
9002 the notes shall be emitted before of after instruction INSN. */
9004 static void
9005 emit_notes_for_changes (rtx_insn *insn, enum emit_note_where where,
9006 shared_hash vars)
9008 emit_note_data data;
9009 variable_table_type *htab = shared_hash_htab (vars);
9011 if (!changed_variables->elements ())
9012 return;
9014 if (MAY_HAVE_DEBUG_INSNS)
9015 process_changed_values (htab);
9017 data.insn = insn;
9018 data.where = where;
9019 data.vars = htab;
9021 changed_variables
9022 ->traverse <emit_note_data*, emit_note_insn_var_location> (&data);
9025 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
9026 same variable in hash table DATA or is not there at all. */
9029 emit_notes_for_differences_1 (variable_def **slot, variable_table_type *new_vars)
9031 variable old_var, new_var;
9033 old_var = *slot;
9034 new_var = new_vars->find_with_hash (old_var->dv, dv_htab_hash (old_var->dv));
9036 if (!new_var)
9038 /* Variable has disappeared. */
9039 variable empty_var = NULL;
9041 if (old_var->onepart == ONEPART_VALUE
9042 || old_var->onepart == ONEPART_DEXPR)
9044 empty_var = variable_from_dropped (old_var->dv, NO_INSERT);
9045 if (empty_var)
9047 gcc_checking_assert (!empty_var->in_changed_variables);
9048 if (!VAR_LOC_1PAUX (old_var))
9050 VAR_LOC_1PAUX (old_var) = VAR_LOC_1PAUX (empty_var);
9051 VAR_LOC_1PAUX (empty_var) = NULL;
9053 else
9054 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
9058 if (!empty_var)
9060 empty_var = onepart_pool (old_var->onepart).allocate ();
9061 empty_var->dv = old_var->dv;
9062 empty_var->refcount = 0;
9063 empty_var->n_var_parts = 0;
9064 empty_var->onepart = old_var->onepart;
9065 empty_var->in_changed_variables = false;
9068 if (empty_var->onepart)
9070 /* Propagate the auxiliary data to (ultimately)
9071 changed_variables. */
9072 empty_var->var_part[0].loc_chain = NULL;
9073 empty_var->var_part[0].cur_loc = NULL;
9074 VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (old_var);
9075 VAR_LOC_1PAUX (old_var) = NULL;
9077 variable_was_changed (empty_var, NULL);
9078 /* Continue traversing the hash table. */
9079 return 1;
9081 /* Update cur_loc and one-part auxiliary data, before new_var goes
9082 through variable_was_changed. */
9083 if (old_var != new_var && new_var->onepart)
9085 gcc_checking_assert (VAR_LOC_1PAUX (new_var) == NULL);
9086 VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (old_var);
9087 VAR_LOC_1PAUX (old_var) = NULL;
9088 new_var->var_part[0].cur_loc = old_var->var_part[0].cur_loc;
9090 if (variable_different_p (old_var, new_var))
9091 variable_was_changed (new_var, NULL);
9093 /* Continue traversing the hash table. */
9094 return 1;
9097 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
9098 table DATA. */
9101 emit_notes_for_differences_2 (variable_def **slot, variable_table_type *old_vars)
9103 variable old_var, new_var;
9105 new_var = *slot;
9106 old_var = old_vars->find_with_hash (new_var->dv, dv_htab_hash (new_var->dv));
9107 if (!old_var)
9109 int i;
9110 for (i = 0; i < new_var->n_var_parts; i++)
9111 new_var->var_part[i].cur_loc = NULL;
9112 variable_was_changed (new_var, NULL);
9115 /* Continue traversing the hash table. */
9116 return 1;
9119 /* Emit notes before INSN for differences between dataflow sets OLD_SET and
9120 NEW_SET. */
9122 static void
9123 emit_notes_for_differences (rtx_insn *insn, dataflow_set *old_set,
9124 dataflow_set *new_set)
9126 shared_hash_htab (old_set->vars)
9127 ->traverse <variable_table_type *, emit_notes_for_differences_1>
9128 (shared_hash_htab (new_set->vars));
9129 shared_hash_htab (new_set->vars)
9130 ->traverse <variable_table_type *, emit_notes_for_differences_2>
9131 (shared_hash_htab (old_set->vars));
9132 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars);
9135 /* Return the next insn after INSN that is not a NOTE_INSN_VAR_LOCATION. */
9137 static rtx_insn *
9138 next_non_note_insn_var_location (rtx_insn *insn)
9140 while (insn)
9142 insn = NEXT_INSN (insn);
9143 if (insn == 0
9144 || !NOTE_P (insn)
9145 || NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION)
9146 break;
9149 return insn;
9152 /* Emit the notes for changes of location parts in the basic block BB. */
9154 static void
9155 emit_notes_in_bb (basic_block bb, dataflow_set *set)
9157 unsigned int i;
9158 micro_operation *mo;
9160 dataflow_set_clear (set);
9161 dataflow_set_copy (set, &VTI (bb)->in);
9163 FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo)
9165 rtx_insn *insn = mo->insn;
9166 rtx_insn *next_insn = next_non_note_insn_var_location (insn);
9168 switch (mo->type)
9170 case MO_CALL:
9171 dataflow_set_clear_at_call (set);
9172 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars);
9174 rtx arguments = mo->u.loc, *p = &arguments;
9175 rtx_note *note;
9176 while (*p)
9178 XEXP (XEXP (*p, 0), 1)
9179 = vt_expand_loc (XEXP (XEXP (*p, 0), 1),
9180 shared_hash_htab (set->vars));
9181 /* If expansion is successful, keep it in the list. */
9182 if (XEXP (XEXP (*p, 0), 1))
9183 p = &XEXP (*p, 1);
9184 /* Otherwise, if the following item is data_value for it,
9185 drop it too too. */
9186 else if (XEXP (*p, 1)
9187 && REG_P (XEXP (XEXP (*p, 0), 0))
9188 && MEM_P (XEXP (XEXP (XEXP (*p, 1), 0), 0))
9189 && REG_P (XEXP (XEXP (XEXP (XEXP (*p, 1), 0), 0),
9191 && REGNO (XEXP (XEXP (*p, 0), 0))
9192 == REGNO (XEXP (XEXP (XEXP (XEXP (*p, 1), 0),
9193 0), 0)))
9194 *p = XEXP (XEXP (*p, 1), 1);
9195 /* Just drop this item. */
9196 else
9197 *p = XEXP (*p, 1);
9199 note = emit_note_after (NOTE_INSN_CALL_ARG_LOCATION, insn);
9200 NOTE_VAR_LOCATION (note) = arguments;
9202 break;
9204 case MO_USE:
9206 rtx loc = mo->u.loc;
9208 if (REG_P (loc))
9209 var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
9210 else
9211 var_mem_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
9213 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
9215 break;
9217 case MO_VAL_LOC:
9219 rtx loc = mo->u.loc;
9220 rtx val, vloc;
9221 tree var;
9223 if (GET_CODE (loc) == CONCAT)
9225 val = XEXP (loc, 0);
9226 vloc = XEXP (loc, 1);
9228 else
9230 val = NULL_RTX;
9231 vloc = loc;
9234 var = PAT_VAR_LOCATION_DECL (vloc);
9236 clobber_variable_part (set, NULL_RTX,
9237 dv_from_decl (var), 0, NULL_RTX);
9238 if (val)
9240 if (VAL_NEEDS_RESOLUTION (loc))
9241 val_resolve (set, val, PAT_VAR_LOCATION_LOC (vloc), insn);
9242 set_variable_part (set, val, dv_from_decl (var), 0,
9243 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
9244 INSERT);
9246 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
9247 set_variable_part (set, PAT_VAR_LOCATION_LOC (vloc),
9248 dv_from_decl (var), 0,
9249 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
9250 INSERT);
9252 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
9254 break;
9256 case MO_VAL_USE:
9258 rtx loc = mo->u.loc;
9259 rtx val, vloc, uloc;
9261 vloc = uloc = XEXP (loc, 1);
9262 val = XEXP (loc, 0);
9264 if (GET_CODE (val) == CONCAT)
9266 uloc = XEXP (val, 1);
9267 val = XEXP (val, 0);
9270 if (VAL_NEEDS_RESOLUTION (loc))
9271 val_resolve (set, val, vloc, insn);
9272 else
9273 val_store (set, val, uloc, insn, false);
9275 if (VAL_HOLDS_TRACK_EXPR (loc))
9277 if (GET_CODE (uloc) == REG)
9278 var_reg_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
9279 NULL);
9280 else if (GET_CODE (uloc) == MEM)
9281 var_mem_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
9282 NULL);
9285 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
9287 break;
9289 case MO_VAL_SET:
9291 rtx loc = mo->u.loc;
9292 rtx val, vloc, uloc;
9293 rtx dstv, srcv;
9295 vloc = loc;
9296 uloc = XEXP (vloc, 1);
9297 val = XEXP (vloc, 0);
9298 vloc = uloc;
9300 if (GET_CODE (uloc) == SET)
9302 dstv = SET_DEST (uloc);
9303 srcv = SET_SRC (uloc);
9305 else
9307 dstv = uloc;
9308 srcv = NULL;
9311 if (GET_CODE (val) == CONCAT)
9313 dstv = vloc = XEXP (val, 1);
9314 val = XEXP (val, 0);
9317 if (GET_CODE (vloc) == SET)
9319 srcv = SET_SRC (vloc);
9321 gcc_assert (val != srcv);
9322 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
9324 dstv = vloc = SET_DEST (vloc);
9326 if (VAL_NEEDS_RESOLUTION (loc))
9327 val_resolve (set, val, srcv, insn);
9329 else if (VAL_NEEDS_RESOLUTION (loc))
9331 gcc_assert (GET_CODE (uloc) == SET
9332 && GET_CODE (SET_SRC (uloc)) == REG);
9333 val_resolve (set, val, SET_SRC (uloc), insn);
9336 if (VAL_HOLDS_TRACK_EXPR (loc))
9338 if (VAL_EXPR_IS_CLOBBERED (loc))
9340 if (REG_P (uloc))
9341 var_reg_delete (set, uloc, true);
9342 else if (MEM_P (uloc))
9344 gcc_assert (MEM_P (dstv));
9345 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
9346 var_mem_delete (set, dstv, true);
9349 else
9351 bool copied_p = VAL_EXPR_IS_COPIED (loc);
9352 rtx src = NULL, dst = uloc;
9353 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
9355 if (GET_CODE (uloc) == SET)
9357 src = SET_SRC (uloc);
9358 dst = SET_DEST (uloc);
9361 if (copied_p)
9363 status = find_src_status (set, src);
9365 src = find_src_set_src (set, src);
9368 if (REG_P (dst))
9369 var_reg_delete_and_set (set, dst, !copied_p,
9370 status, srcv);
9371 else if (MEM_P (dst))
9373 gcc_assert (MEM_P (dstv));
9374 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
9375 var_mem_delete_and_set (set, dstv, !copied_p,
9376 status, srcv);
9380 else if (REG_P (uloc))
9381 var_regno_delete (set, REGNO (uloc));
9382 else if (MEM_P (uloc))
9384 gcc_checking_assert (GET_CODE (vloc) == MEM);
9385 gcc_checking_assert (vloc == dstv);
9386 if (vloc != dstv)
9387 clobber_overlapping_mems (set, vloc);
9390 val_store (set, val, dstv, insn, true);
9392 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9393 set->vars);
9395 break;
9397 case MO_SET:
9399 rtx loc = mo->u.loc;
9400 rtx set_src = NULL;
9402 if (GET_CODE (loc) == SET)
9404 set_src = SET_SRC (loc);
9405 loc = SET_DEST (loc);
9408 if (REG_P (loc))
9409 var_reg_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
9410 set_src);
9411 else
9412 var_mem_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
9413 set_src);
9415 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9416 set->vars);
9418 break;
9420 case MO_COPY:
9422 rtx loc = mo->u.loc;
9423 enum var_init_status src_status;
9424 rtx set_src = NULL;
9426 if (GET_CODE (loc) == SET)
9428 set_src = SET_SRC (loc);
9429 loc = SET_DEST (loc);
9432 src_status = find_src_status (set, set_src);
9433 set_src = find_src_set_src (set, set_src);
9435 if (REG_P (loc))
9436 var_reg_delete_and_set (set, loc, false, src_status, set_src);
9437 else
9438 var_mem_delete_and_set (set, loc, false, src_status, set_src);
9440 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9441 set->vars);
9443 break;
9445 case MO_USE_NO_VAR:
9447 rtx loc = mo->u.loc;
9449 if (REG_P (loc))
9450 var_reg_delete (set, loc, false);
9451 else
9452 var_mem_delete (set, loc, false);
9454 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
9456 break;
9458 case MO_CLOBBER:
9460 rtx loc = mo->u.loc;
9462 if (REG_P (loc))
9463 var_reg_delete (set, loc, true);
9464 else
9465 var_mem_delete (set, loc, true);
9467 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9468 set->vars);
9470 break;
9472 case MO_ADJUST:
9473 set->stack_adjust += mo->u.adjust;
9474 break;
9479 /* Emit notes for the whole function. */
9481 static void
9482 vt_emit_notes (void)
9484 basic_block bb;
9485 dataflow_set cur;
9487 gcc_assert (!changed_variables->elements ());
9489 /* Free memory occupied by the out hash tables, as they aren't used
9490 anymore. */
9491 FOR_EACH_BB_FN (bb, cfun)
9492 dataflow_set_clear (&VTI (bb)->out);
9494 /* Enable emitting notes by functions (mainly by set_variable_part and
9495 delete_variable_part). */
9496 emit_notes = true;
9498 if (MAY_HAVE_DEBUG_INSNS)
9500 dropped_values = new variable_table_type (cselib_get_next_uid () * 2);
9503 dataflow_set_init (&cur);
9505 FOR_EACH_BB_FN (bb, cfun)
9507 /* Emit the notes for changes of variable locations between two
9508 subsequent basic blocks. */
9509 emit_notes_for_differences (BB_HEAD (bb), &cur, &VTI (bb)->in);
9511 if (MAY_HAVE_DEBUG_INSNS)
9512 local_get_addr_cache = new hash_map<rtx, rtx>;
9514 /* Emit the notes for the changes in the basic block itself. */
9515 emit_notes_in_bb (bb, &cur);
9517 if (MAY_HAVE_DEBUG_INSNS)
9518 delete local_get_addr_cache;
9519 local_get_addr_cache = NULL;
9521 /* Free memory occupied by the in hash table, we won't need it
9522 again. */
9523 dataflow_set_clear (&VTI (bb)->in);
9525 #ifdef ENABLE_CHECKING
9526 shared_hash_htab (cur.vars)
9527 ->traverse <variable_table_type *, emit_notes_for_differences_1>
9528 (shared_hash_htab (empty_shared_hash));
9529 #endif
9530 dataflow_set_destroy (&cur);
9532 if (MAY_HAVE_DEBUG_INSNS)
9533 delete dropped_values;
9534 dropped_values = NULL;
9536 emit_notes = false;
9539 /* If there is a declaration and offset associated with register/memory RTL
9540 assign declaration to *DECLP and offset to *OFFSETP, and return true. */
9542 static bool
9543 vt_get_decl_and_offset (rtx rtl, tree *declp, HOST_WIDE_INT *offsetp)
9545 if (REG_P (rtl))
9547 if (REG_ATTRS (rtl))
9549 *declp = REG_EXPR (rtl);
9550 *offsetp = REG_OFFSET (rtl);
9551 return true;
9554 else if (GET_CODE (rtl) == PARALLEL)
9556 tree decl = NULL_TREE;
9557 HOST_WIDE_INT offset = MAX_VAR_PARTS;
9558 int len = XVECLEN (rtl, 0), i;
9560 for (i = 0; i < len; i++)
9562 rtx reg = XEXP (XVECEXP (rtl, 0, i), 0);
9563 if (!REG_P (reg) || !REG_ATTRS (reg))
9564 break;
9565 if (!decl)
9566 decl = REG_EXPR (reg);
9567 if (REG_EXPR (reg) != decl)
9568 break;
9569 if (REG_OFFSET (reg) < offset)
9570 offset = REG_OFFSET (reg);
9573 if (i == len)
9575 *declp = decl;
9576 *offsetp = offset;
9577 return true;
9580 else if (MEM_P (rtl))
9582 if (MEM_ATTRS (rtl))
9584 *declp = MEM_EXPR (rtl);
9585 *offsetp = INT_MEM_OFFSET (rtl);
9586 return true;
9589 return false;
9592 /* Record the value for the ENTRY_VALUE of RTL as a global equivalence
9593 of VAL. */
9595 static void
9596 record_entry_value (cselib_val *val, rtx rtl)
9598 rtx ev = gen_rtx_ENTRY_VALUE (GET_MODE (rtl));
9600 ENTRY_VALUE_EXP (ev) = rtl;
9602 cselib_add_permanent_equiv (val, ev, get_insns ());
9605 /* Insert function parameter PARM in IN and OUT sets of ENTRY_BLOCK. */
9607 static void
9608 vt_add_function_parameter (tree parm)
9610 rtx decl_rtl = DECL_RTL_IF_SET (parm);
9611 rtx incoming = DECL_INCOMING_RTL (parm);
9612 tree decl;
9613 machine_mode mode;
9614 HOST_WIDE_INT offset;
9615 dataflow_set *out;
9616 decl_or_value dv;
9618 if (TREE_CODE (parm) != PARM_DECL)
9619 return;
9621 if (!decl_rtl || !incoming)
9622 return;
9624 if (GET_MODE (decl_rtl) == BLKmode || GET_MODE (incoming) == BLKmode)
9625 return;
9627 /* If there is a DRAP register or a pseudo in internal_arg_pointer,
9628 rewrite the incoming location of parameters passed on the stack
9629 into MEMs based on the argument pointer, so that incoming doesn't
9630 depend on a pseudo. */
9631 if (MEM_P (incoming)
9632 && (XEXP (incoming, 0) == crtl->args.internal_arg_pointer
9633 || (GET_CODE (XEXP (incoming, 0)) == PLUS
9634 && XEXP (XEXP (incoming, 0), 0)
9635 == crtl->args.internal_arg_pointer
9636 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
9638 HOST_WIDE_INT off = -FIRST_PARM_OFFSET (current_function_decl);
9639 if (GET_CODE (XEXP (incoming, 0)) == PLUS)
9640 off += INTVAL (XEXP (XEXP (incoming, 0), 1));
9641 incoming
9642 = replace_equiv_address_nv (incoming,
9643 plus_constant (Pmode,
9644 arg_pointer_rtx, off));
9647 #ifdef HAVE_window_save
9648 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
9649 If the target machine has an explicit window save instruction, the
9650 actual entry value is the corresponding OUTGOING_REGNO instead. */
9651 if (HAVE_window_save && !crtl->uses_only_leaf_regs)
9653 if (REG_P (incoming)
9654 && HARD_REGISTER_P (incoming)
9655 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
9657 parm_reg_t p;
9658 p.incoming = incoming;
9659 incoming
9660 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
9661 OUTGOING_REGNO (REGNO (incoming)), 0);
9662 p.outgoing = incoming;
9663 vec_safe_push (windowed_parm_regs, p);
9665 else if (GET_CODE (incoming) == PARALLEL)
9667 rtx outgoing
9668 = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (XVECLEN (incoming, 0)));
9669 int i;
9671 for (i = 0; i < XVECLEN (incoming, 0); i++)
9673 rtx reg = XEXP (XVECEXP (incoming, 0, i), 0);
9674 parm_reg_t p;
9675 p.incoming = reg;
9676 reg = gen_rtx_REG_offset (reg, GET_MODE (reg),
9677 OUTGOING_REGNO (REGNO (reg)), 0);
9678 p.outgoing = reg;
9679 XVECEXP (outgoing, 0, i)
9680 = gen_rtx_EXPR_LIST (VOIDmode, reg,
9681 XEXP (XVECEXP (incoming, 0, i), 1));
9682 vec_safe_push (windowed_parm_regs, p);
9685 incoming = outgoing;
9687 else if (MEM_P (incoming)
9688 && REG_P (XEXP (incoming, 0))
9689 && HARD_REGISTER_P (XEXP (incoming, 0)))
9691 rtx reg = XEXP (incoming, 0);
9692 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
9694 parm_reg_t p;
9695 p.incoming = reg;
9696 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
9697 p.outgoing = reg;
9698 vec_safe_push (windowed_parm_regs, p);
9699 incoming = replace_equiv_address_nv (incoming, reg);
9703 #endif
9705 if (!vt_get_decl_and_offset (incoming, &decl, &offset))
9707 if (MEM_P (incoming))
9709 /* This means argument is passed by invisible reference. */
9710 offset = 0;
9711 decl = parm;
9713 else
9715 if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset))
9716 return;
9717 offset += byte_lowpart_offset (GET_MODE (incoming),
9718 GET_MODE (decl_rtl));
9722 if (!decl)
9723 return;
9725 if (parm != decl)
9727 /* If that DECL_RTL wasn't a pseudo that got spilled to
9728 memory, bail out. Otherwise, the spill slot sharing code
9729 will force the memory to reference spill_slot_decl (%sfp),
9730 so we don't match above. That's ok, the pseudo must have
9731 referenced the entire parameter, so just reset OFFSET. */
9732 if (decl != get_spill_slot_decl (false))
9733 return;
9734 offset = 0;
9737 if (!track_loc_p (incoming, parm, offset, false, &mode, &offset))
9738 return;
9740 out = &VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out;
9742 dv = dv_from_decl (parm);
9744 if (target_for_debug_bind (parm)
9745 /* We can't deal with these right now, because this kind of
9746 variable is single-part. ??? We could handle parallels
9747 that describe multiple locations for the same single
9748 value, but ATM we don't. */
9749 && GET_CODE (incoming) != PARALLEL)
9751 cselib_val *val;
9752 rtx lowpart;
9754 /* ??? We shouldn't ever hit this, but it may happen because
9755 arguments passed by invisible reference aren't dealt with
9756 above: incoming-rtl will have Pmode rather than the
9757 expected mode for the type. */
9758 if (offset)
9759 return;
9761 lowpart = var_lowpart (mode, incoming);
9762 if (!lowpart)
9763 return;
9765 val = cselib_lookup_from_insn (lowpart, mode, true,
9766 VOIDmode, get_insns ());
9768 /* ??? Float-typed values in memory are not handled by
9769 cselib. */
9770 if (val)
9772 preserve_value (val);
9773 set_variable_part (out, val->val_rtx, dv, offset,
9774 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9775 dv = dv_from_value (val->val_rtx);
9778 if (MEM_P (incoming))
9780 val = cselib_lookup_from_insn (XEXP (incoming, 0), mode, true,
9781 VOIDmode, get_insns ());
9782 if (val)
9784 preserve_value (val);
9785 incoming = replace_equiv_address_nv (incoming, val->val_rtx);
9790 if (REG_P (incoming))
9792 incoming = var_lowpart (mode, incoming);
9793 gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER);
9794 attrs_list_insert (&out->regs[REGNO (incoming)], dv, offset,
9795 incoming);
9796 set_variable_part (out, incoming, dv, offset,
9797 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9798 if (dv_is_value_p (dv))
9800 record_entry_value (CSELIB_VAL_PTR (dv_as_value (dv)), incoming);
9801 if (TREE_CODE (TREE_TYPE (parm)) == REFERENCE_TYPE
9802 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (parm))))
9804 machine_mode indmode
9805 = TYPE_MODE (TREE_TYPE (TREE_TYPE (parm)));
9806 rtx mem = gen_rtx_MEM (indmode, incoming);
9807 cselib_val *val = cselib_lookup_from_insn (mem, indmode, true,
9808 VOIDmode,
9809 get_insns ());
9810 if (val)
9812 preserve_value (val);
9813 record_entry_value (val, mem);
9814 set_variable_part (out, mem, dv_from_value (val->val_rtx), 0,
9815 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9820 else if (GET_CODE (incoming) == PARALLEL && !dv_onepart_p (dv))
9822 int i;
9824 for (i = 0; i < XVECLEN (incoming, 0); i++)
9826 rtx reg = XEXP (XVECEXP (incoming, 0, i), 0);
9827 offset = REG_OFFSET (reg);
9828 gcc_assert (REGNO (reg) < FIRST_PSEUDO_REGISTER);
9829 attrs_list_insert (&out->regs[REGNO (reg)], dv, offset, reg);
9830 set_variable_part (out, reg, dv, offset,
9831 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9834 else if (MEM_P (incoming))
9836 incoming = var_lowpart (mode, incoming);
9837 set_variable_part (out, incoming, dv, offset,
9838 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9842 /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
9844 static void
9845 vt_add_function_parameters (void)
9847 tree parm;
9849 for (parm = DECL_ARGUMENTS (current_function_decl);
9850 parm; parm = DECL_CHAIN (parm))
9851 if (!POINTER_BOUNDS_P (parm))
9852 vt_add_function_parameter (parm);
9854 if (DECL_HAS_VALUE_EXPR_P (DECL_RESULT (current_function_decl)))
9856 tree vexpr = DECL_VALUE_EXPR (DECL_RESULT (current_function_decl));
9858 if (TREE_CODE (vexpr) == INDIRECT_REF)
9859 vexpr = TREE_OPERAND (vexpr, 0);
9861 if (TREE_CODE (vexpr) == PARM_DECL
9862 && DECL_ARTIFICIAL (vexpr)
9863 && !DECL_IGNORED_P (vexpr)
9864 && DECL_NAMELESS (vexpr))
9865 vt_add_function_parameter (vexpr);
9869 /* Initialize cfa_base_rtx, create a preserved VALUE for it and
9870 ensure it isn't flushed during cselib_reset_table.
9871 Can be called only if frame_pointer_rtx resp. arg_pointer_rtx
9872 has been eliminated. */
9874 static void
9875 vt_init_cfa_base (void)
9877 cselib_val *val;
9879 #ifdef FRAME_POINTER_CFA_OFFSET
9880 cfa_base_rtx = frame_pointer_rtx;
9881 cfa_base_offset = -FRAME_POINTER_CFA_OFFSET (current_function_decl);
9882 #else
9883 cfa_base_rtx = arg_pointer_rtx;
9884 cfa_base_offset = -ARG_POINTER_CFA_OFFSET (current_function_decl);
9885 #endif
9886 if (cfa_base_rtx == hard_frame_pointer_rtx
9887 || !fixed_regs[REGNO (cfa_base_rtx)])
9889 cfa_base_rtx = NULL_RTX;
9890 return;
9892 if (!MAY_HAVE_DEBUG_INSNS)
9893 return;
9895 /* Tell alias analysis that cfa_base_rtx should share
9896 find_base_term value with stack pointer or hard frame pointer. */
9897 if (!frame_pointer_needed)
9898 vt_equate_reg_base_value (cfa_base_rtx, stack_pointer_rtx);
9899 else if (!crtl->stack_realign_tried)
9900 vt_equate_reg_base_value (cfa_base_rtx, hard_frame_pointer_rtx);
9902 val = cselib_lookup_from_insn (cfa_base_rtx, GET_MODE (cfa_base_rtx), 1,
9903 VOIDmode, get_insns ());
9904 preserve_value (val);
9905 cselib_preserve_cfa_base_value (val, REGNO (cfa_base_rtx));
9908 /* Allocate and initialize the data structures for variable tracking
9909 and parse the RTL to get the micro operations. */
9911 static bool
9912 vt_initialize (void)
9914 basic_block bb;
9915 HOST_WIDE_INT fp_cfa_offset = -1;
9917 alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def));
9919 empty_shared_hash = new shared_hash_def;
9920 empty_shared_hash->refcount = 1;
9921 empty_shared_hash->htab = new variable_table_type (1);
9922 changed_variables = new variable_table_type (10);
9924 /* Init the IN and OUT sets. */
9925 FOR_ALL_BB_FN (bb, cfun)
9927 VTI (bb)->visited = false;
9928 VTI (bb)->flooded = false;
9929 dataflow_set_init (&VTI (bb)->in);
9930 dataflow_set_init (&VTI (bb)->out);
9931 VTI (bb)->permp = NULL;
9934 if (MAY_HAVE_DEBUG_INSNS)
9936 cselib_init (CSELIB_RECORD_MEMORY | CSELIB_PRESERVE_CONSTANTS);
9937 scratch_regs = BITMAP_ALLOC (NULL);
9938 preserved_values.create (256);
9939 global_get_addr_cache = new hash_map<rtx, rtx>;
9941 else
9943 scratch_regs = NULL;
9944 global_get_addr_cache = NULL;
9947 if (MAY_HAVE_DEBUG_INSNS)
9949 rtx reg, expr;
9950 int ofst;
9951 cselib_val *val;
9953 #ifdef FRAME_POINTER_CFA_OFFSET
9954 reg = frame_pointer_rtx;
9955 ofst = FRAME_POINTER_CFA_OFFSET (current_function_decl);
9956 #else
9957 reg = arg_pointer_rtx;
9958 ofst = ARG_POINTER_CFA_OFFSET (current_function_decl);
9959 #endif
9961 ofst -= INCOMING_FRAME_SP_OFFSET;
9963 val = cselib_lookup_from_insn (reg, GET_MODE (reg), 1,
9964 VOIDmode, get_insns ());
9965 preserve_value (val);
9966 if (reg != hard_frame_pointer_rtx && fixed_regs[REGNO (reg)])
9967 cselib_preserve_cfa_base_value (val, REGNO (reg));
9968 expr = plus_constant (GET_MODE (stack_pointer_rtx),
9969 stack_pointer_rtx, -ofst);
9970 cselib_add_permanent_equiv (val, expr, get_insns ());
9972 if (ofst)
9974 val = cselib_lookup_from_insn (stack_pointer_rtx,
9975 GET_MODE (stack_pointer_rtx), 1,
9976 VOIDmode, get_insns ());
9977 preserve_value (val);
9978 expr = plus_constant (GET_MODE (reg), reg, ofst);
9979 cselib_add_permanent_equiv (val, expr, get_insns ());
9983 /* In order to factor out the adjustments made to the stack pointer or to
9984 the hard frame pointer and thus be able to use DW_OP_fbreg operations
9985 instead of individual location lists, we're going to rewrite MEMs based
9986 on them into MEMs based on the CFA by de-eliminating stack_pointer_rtx
9987 or hard_frame_pointer_rtx to the virtual CFA pointer frame_pointer_rtx
9988 resp. arg_pointer_rtx. We can do this either when there is no frame
9989 pointer in the function and stack adjustments are consistent for all
9990 basic blocks or when there is a frame pointer and no stack realignment.
9991 But we first have to check that frame_pointer_rtx resp. arg_pointer_rtx
9992 has been eliminated. */
9993 if (!frame_pointer_needed)
9995 rtx reg, elim;
9997 if (!vt_stack_adjustments ())
9998 return false;
10000 #ifdef FRAME_POINTER_CFA_OFFSET
10001 reg = frame_pointer_rtx;
10002 #else
10003 reg = arg_pointer_rtx;
10004 #endif
10005 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
10006 if (elim != reg)
10008 if (GET_CODE (elim) == PLUS)
10009 elim = XEXP (elim, 0);
10010 if (elim == stack_pointer_rtx)
10011 vt_init_cfa_base ();
10014 else if (!crtl->stack_realign_tried)
10016 rtx reg, elim;
10018 #ifdef FRAME_POINTER_CFA_OFFSET
10019 reg = frame_pointer_rtx;
10020 fp_cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
10021 #else
10022 reg = arg_pointer_rtx;
10023 fp_cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
10024 #endif
10025 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
10026 if (elim != reg)
10028 if (GET_CODE (elim) == PLUS)
10030 fp_cfa_offset -= INTVAL (XEXP (elim, 1));
10031 elim = XEXP (elim, 0);
10033 if (elim != hard_frame_pointer_rtx)
10034 fp_cfa_offset = -1;
10036 else
10037 fp_cfa_offset = -1;
10040 /* If the stack is realigned and a DRAP register is used, we're going to
10041 rewrite MEMs based on it representing incoming locations of parameters
10042 passed on the stack into MEMs based on the argument pointer. Although
10043 we aren't going to rewrite other MEMs, we still need to initialize the
10044 virtual CFA pointer in order to ensure that the argument pointer will
10045 be seen as a constant throughout the function.
10047 ??? This doesn't work if FRAME_POINTER_CFA_OFFSET is defined. */
10048 else if (stack_realign_drap)
10050 rtx reg, elim;
10052 #ifdef FRAME_POINTER_CFA_OFFSET
10053 reg = frame_pointer_rtx;
10054 #else
10055 reg = arg_pointer_rtx;
10056 #endif
10057 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
10058 if (elim != reg)
10060 if (GET_CODE (elim) == PLUS)
10061 elim = XEXP (elim, 0);
10062 if (elim == hard_frame_pointer_rtx)
10063 vt_init_cfa_base ();
10067 hard_frame_pointer_adjustment = -1;
10069 vt_add_function_parameters ();
10071 FOR_EACH_BB_FN (bb, cfun)
10073 rtx_insn *insn;
10074 HOST_WIDE_INT pre, post = 0;
10075 basic_block first_bb, last_bb;
10077 if (MAY_HAVE_DEBUG_INSNS)
10079 cselib_record_sets_hook = add_with_sets;
10080 if (dump_file && (dump_flags & TDF_DETAILS))
10081 fprintf (dump_file, "first value: %i\n",
10082 cselib_get_next_uid ());
10085 first_bb = bb;
10086 for (;;)
10088 edge e;
10089 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
10090 || ! single_pred_p (bb->next_bb))
10091 break;
10092 e = find_edge (bb, bb->next_bb);
10093 if (! e || (e->flags & EDGE_FALLTHRU) == 0)
10094 break;
10095 bb = bb->next_bb;
10097 last_bb = bb;
10099 /* Add the micro-operations to the vector. */
10100 FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb)
10102 HOST_WIDE_INT offset = VTI (bb)->out.stack_adjust;
10103 VTI (bb)->out.stack_adjust = VTI (bb)->in.stack_adjust;
10104 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
10105 insn = NEXT_INSN (insn))
10107 if (INSN_P (insn))
10109 if (!frame_pointer_needed)
10111 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
10112 if (pre)
10114 micro_operation mo;
10115 mo.type = MO_ADJUST;
10116 mo.u.adjust = pre;
10117 mo.insn = insn;
10118 if (dump_file && (dump_flags & TDF_DETAILS))
10119 log_op_type (PATTERN (insn), bb, insn,
10120 MO_ADJUST, dump_file);
10121 VTI (bb)->mos.safe_push (mo);
10122 VTI (bb)->out.stack_adjust += pre;
10126 cselib_hook_called = false;
10127 adjust_insn (bb, insn);
10128 if (MAY_HAVE_DEBUG_INSNS)
10130 if (CALL_P (insn))
10131 prepare_call_arguments (bb, insn);
10132 cselib_process_insn (insn);
10133 if (dump_file && (dump_flags & TDF_DETAILS))
10135 print_rtl_single (dump_file, insn);
10136 dump_cselib_table (dump_file);
10139 if (!cselib_hook_called)
10140 add_with_sets (insn, 0, 0);
10141 cancel_changes (0);
10143 if (!frame_pointer_needed && post)
10145 micro_operation mo;
10146 mo.type = MO_ADJUST;
10147 mo.u.adjust = post;
10148 mo.insn = insn;
10149 if (dump_file && (dump_flags & TDF_DETAILS))
10150 log_op_type (PATTERN (insn), bb, insn,
10151 MO_ADJUST, dump_file);
10152 VTI (bb)->mos.safe_push (mo);
10153 VTI (bb)->out.stack_adjust += post;
10156 if (fp_cfa_offset != -1
10157 && hard_frame_pointer_adjustment == -1
10158 && fp_setter_insn (insn))
10160 vt_init_cfa_base ();
10161 hard_frame_pointer_adjustment = fp_cfa_offset;
10162 /* Disassociate sp from fp now. */
10163 if (MAY_HAVE_DEBUG_INSNS)
10165 cselib_val *v;
10166 cselib_invalidate_rtx (stack_pointer_rtx);
10167 v = cselib_lookup (stack_pointer_rtx, Pmode, 1,
10168 VOIDmode);
10169 if (v && !cselib_preserved_value_p (v))
10171 cselib_set_value_sp_based (v);
10172 preserve_value (v);
10178 gcc_assert (offset == VTI (bb)->out.stack_adjust);
10181 bb = last_bb;
10183 if (MAY_HAVE_DEBUG_INSNS)
10185 cselib_preserve_only_values ();
10186 cselib_reset_table (cselib_get_next_uid ());
10187 cselib_record_sets_hook = NULL;
10191 hard_frame_pointer_adjustment = -1;
10192 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->flooded = true;
10193 cfa_base_rtx = NULL_RTX;
10194 return true;
10197 /* This is *not* reset after each function. It gives each
10198 NOTE_INSN_DELETED_DEBUG_LABEL in the entire compilation
10199 a unique label number. */
10201 static int debug_label_num = 1;
10203 /* Get rid of all debug insns from the insn stream. */
10205 static void
10206 delete_debug_insns (void)
10208 basic_block bb;
10209 rtx_insn *insn, *next;
10211 if (!MAY_HAVE_DEBUG_INSNS)
10212 return;
10214 FOR_EACH_BB_FN (bb, cfun)
10216 FOR_BB_INSNS_SAFE (bb, insn, next)
10217 if (DEBUG_INSN_P (insn))
10219 tree decl = INSN_VAR_LOCATION_DECL (insn);
10220 if (TREE_CODE (decl) == LABEL_DECL
10221 && DECL_NAME (decl)
10222 && !DECL_RTL_SET_P (decl))
10224 PUT_CODE (insn, NOTE);
10225 NOTE_KIND (insn) = NOTE_INSN_DELETED_DEBUG_LABEL;
10226 NOTE_DELETED_LABEL_NAME (insn)
10227 = IDENTIFIER_POINTER (DECL_NAME (decl));
10228 SET_DECL_RTL (decl, insn);
10229 CODE_LABEL_NUMBER (insn) = debug_label_num++;
10231 else
10232 delete_insn (insn);
10237 /* Run a fast, BB-local only version of var tracking, to take care of
10238 information that we don't do global analysis on, such that not all
10239 information is lost. If SKIPPED holds, we're skipping the global
10240 pass entirely, so we should try to use information it would have
10241 handled as well.. */
10243 static void
10244 vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED)
10246 /* ??? Just skip it all for now. */
10247 delete_debug_insns ();
10250 /* Free the data structures needed for variable tracking. */
10252 static void
10253 vt_finalize (void)
10255 basic_block bb;
10257 FOR_EACH_BB_FN (bb, cfun)
10259 VTI (bb)->mos.release ();
10262 FOR_ALL_BB_FN (bb, cfun)
10264 dataflow_set_destroy (&VTI (bb)->in);
10265 dataflow_set_destroy (&VTI (bb)->out);
10266 if (VTI (bb)->permp)
10268 dataflow_set_destroy (VTI (bb)->permp);
10269 XDELETE (VTI (bb)->permp);
10272 free_aux_for_blocks ();
10273 delete empty_shared_hash->htab;
10274 empty_shared_hash->htab = NULL;
10275 delete changed_variables;
10276 changed_variables = NULL;
10277 attrs_def::pool.release ();
10278 var_pool.release ();
10279 location_chain_def::pool.release ();
10280 shared_hash_def::pool.release ();
10282 if (MAY_HAVE_DEBUG_INSNS)
10284 if (global_get_addr_cache)
10285 delete global_get_addr_cache;
10286 global_get_addr_cache = NULL;
10287 loc_exp_dep::pool.release ();
10288 valvar_pool.release ();
10289 preserved_values.release ();
10290 cselib_finish ();
10291 BITMAP_FREE (scratch_regs);
10292 scratch_regs = NULL;
10295 #ifdef HAVE_window_save
10296 vec_free (windowed_parm_regs);
10297 #endif
10299 if (vui_vec)
10300 XDELETEVEC (vui_vec);
10301 vui_vec = NULL;
10302 vui_allocated = 0;
10305 /* The entry point to variable tracking pass. */
10307 static inline unsigned int
10308 variable_tracking_main_1 (void)
10310 bool success;
10312 if (flag_var_tracking_assignments < 0
10313 /* Var-tracking right now assumes the IR doesn't contain
10314 any pseudos at this point. */
10315 || targetm.no_register_allocation)
10317 delete_debug_insns ();
10318 return 0;
10321 if (n_basic_blocks_for_fn (cfun) > 500 &&
10322 n_edges_for_fn (cfun) / n_basic_blocks_for_fn (cfun) >= 20)
10324 vt_debug_insns_local (true);
10325 return 0;
10328 mark_dfs_back_edges ();
10329 if (!vt_initialize ())
10331 vt_finalize ();
10332 vt_debug_insns_local (true);
10333 return 0;
10336 success = vt_find_locations ();
10338 if (!success && flag_var_tracking_assignments > 0)
10340 vt_finalize ();
10342 delete_debug_insns ();
10344 /* This is later restored by our caller. */
10345 flag_var_tracking_assignments = 0;
10347 success = vt_initialize ();
10348 gcc_assert (success);
10350 success = vt_find_locations ();
10353 if (!success)
10355 vt_finalize ();
10356 vt_debug_insns_local (false);
10357 return 0;
10360 if (dump_file && (dump_flags & TDF_DETAILS))
10362 dump_dataflow_sets ();
10363 dump_reg_info (dump_file);
10364 dump_flow_info (dump_file, dump_flags);
10367 timevar_push (TV_VAR_TRACKING_EMIT);
10368 vt_emit_notes ();
10369 timevar_pop (TV_VAR_TRACKING_EMIT);
10371 vt_finalize ();
10372 vt_debug_insns_local (false);
10373 return 0;
10376 unsigned int
10377 variable_tracking_main (void)
10379 unsigned int ret;
10380 int save = flag_var_tracking_assignments;
10382 ret = variable_tracking_main_1 ();
10384 flag_var_tracking_assignments = save;
10386 return ret;
10389 namespace {
10391 const pass_data pass_data_variable_tracking =
10393 RTL_PASS, /* type */
10394 "vartrack", /* name */
10395 OPTGROUP_NONE, /* optinfo_flags */
10396 TV_VAR_TRACKING, /* tv_id */
10397 0, /* properties_required */
10398 0, /* properties_provided */
10399 0, /* properties_destroyed */
10400 0, /* todo_flags_start */
10401 0, /* todo_flags_finish */
10404 class pass_variable_tracking : public rtl_opt_pass
10406 public:
10407 pass_variable_tracking (gcc::context *ctxt)
10408 : rtl_opt_pass (pass_data_variable_tracking, ctxt)
10411 /* opt_pass methods: */
10412 virtual bool gate (function *)
10414 return (flag_var_tracking && !targetm.delay_vartrack);
10417 virtual unsigned int execute (function *)
10419 return variable_tracking_main ();
10422 }; // class pass_variable_tracking
10424 } // anon namespace
10426 rtl_opt_pass *
10427 make_pass_variable_tracking (gcc::context *ctxt)
10429 return new pass_variable_tracking (ctxt);