config.gcc (powerpc*-*-*): Add support for a new configure option --with-advance...
[official-gcc.git] / gcc / var-tracking.c
blob0b240073bb3a2d28a537ccd332eb27ecd88764be
1 /* Variable tracking routines for the GNU compiler.
2 Copyright (C) 2002-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the variable tracking pass. It computes where
21 variables are located (which registers or where in memory) at each position
22 in instruction stream and emits notes describing the locations.
23 Debug information (DWARF2 location lists) is finally generated from
24 these notes.
25 With this debug information, it is possible to show variables
26 even when debugging optimized code.
28 How does the variable tracking pass work?
30 First, it scans RTL code for uses, stores and clobbers (register/memory
31 references in instructions), for call insns and for stack adjustments
32 separately for each basic block and saves them to an array of micro
33 operations.
34 The micro operations of one instruction are ordered so that
35 pre-modifying stack adjustment < use < use with no var < call insn <
36 < clobber < set < post-modifying stack adjustment
38 Then, a forward dataflow analysis is performed to find out how locations
39 of variables change through code and to propagate the variable locations
40 along control flow graph.
41 The IN set for basic block BB is computed as a union of OUT sets of BB's
42 predecessors, the OUT set for BB is copied from the IN set for BB and
43 is changed according to micro operations in BB.
45 The IN and OUT sets for basic blocks consist of a current stack adjustment
46 (used for adjusting offset of variables addressed using stack pointer),
47 the table of structures describing the locations of parts of a variable
48 and for each physical register a linked list for each physical register.
49 The linked list is a list of variable parts stored in the register,
50 i.e. it is a list of triplets (reg, decl, offset) where decl is
51 REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
52 effective deleting appropriate variable parts when we set or clobber the
53 register.
55 There may be more than one variable part in a register. The linked lists
56 should be pretty short so it is a good data structure here.
57 For example in the following code, register allocator may assign same
58 register to variables A and B, and both of them are stored in the same
59 register in CODE:
61 if (cond)
62 set A;
63 else
64 set B;
65 CODE;
66 if (cond)
67 use A;
68 else
69 use B;
71 Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
72 are emitted to appropriate positions in RTL code. Each such a note describes
73 the location of one variable at the point in instruction stream where the
74 note is. There is no need to emit a note for each variable before each
75 instruction, we only emit these notes where the location of variable changes
76 (this means that we also emit notes for changes between the OUT set of the
77 previous block and the IN set of the current block).
79 The notes consist of two parts:
80 1. the declaration (from REG_EXPR or MEM_EXPR)
81 2. the location of a variable - it is either a simple register/memory
82 reference (for simple variables, for example int),
83 or a parallel of register/memory references (for a large variables
84 which consist of several parts, for example long long).
88 #include "config.h"
89 #include "system.h"
90 #include "coretypes.h"
91 #include "tm.h"
92 #include "rtl.h"
93 #include "hash-set.h"
94 #include "machmode.h"
95 #include "vec.h"
96 #include "double-int.h"
97 #include "input.h"
98 #include "alias.h"
99 #include "symtab.h"
100 #include "wide-int.h"
101 #include "inchash.h"
102 #include "tree.h"
103 #include "varasm.h"
104 #include "stor-layout.h"
105 #include "hash-map.h"
106 #include "hash-table.h"
107 #include "predict.h"
108 #include "hard-reg-set.h"
109 #include "function.h"
110 #include "dominance.h"
111 #include "cfg.h"
112 #include "cfgrtl.h"
113 #include "cfganal.h"
114 #include "basic-block.h"
115 #include "tm_p.h"
116 #include "flags.h"
117 #include "insn-config.h"
118 #include "reload.h"
119 #include "sbitmap.h"
120 #include "alloc-pool.h"
121 #include "regs.h"
122 #include "hashtab.h"
123 #include "statistics.h"
124 #include "real.h"
125 #include "fixed-value.h"
126 #include "expmed.h"
127 #include "dojump.h"
128 #include "explow.h"
129 #include "calls.h"
130 #include "emit-rtl.h"
131 #include "stmt.h"
132 #include "expr.h"
133 #include "tree-pass.h"
134 #include "bitmap.h"
135 #include "tree-dfa.h"
136 #include "tree-ssa.h"
137 #include "cselib.h"
138 #include "target.h"
139 #include "params.h"
140 #include "diagnostic.h"
141 #include "tree-pretty-print.h"
142 #include "recog.h"
143 #include "rtl-iter.h"
144 #include "fibonacci_heap.h"
146 typedef fibonacci_heap <long, basic_block_def> bb_heap_t;
147 typedef fibonacci_node <long, basic_block_def> bb_heap_node_t;
149 /* var-tracking.c assumes that tree code with the same value as VALUE rtx code
150 has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
151 Currently the value is the same as IDENTIFIER_NODE, which has such
152 a property. If this compile time assertion ever fails, make sure that
153 the new tree code that equals (int) VALUE has the same property. */
154 extern char check_value_val[(int) VALUE == (int) IDENTIFIER_NODE ? 1 : -1];
156 /* Type of micro operation. */
157 enum micro_operation_type
159 MO_USE, /* Use location (REG or MEM). */
160 MO_USE_NO_VAR,/* Use location which is not associated with a variable
161 or the variable is not trackable. */
162 MO_VAL_USE, /* Use location which is associated with a value. */
163 MO_VAL_LOC, /* Use location which appears in a debug insn. */
164 MO_VAL_SET, /* Set location associated with a value. */
165 MO_SET, /* Set location. */
166 MO_COPY, /* Copy the same portion of a variable from one
167 location to another. */
168 MO_CLOBBER, /* Clobber location. */
169 MO_CALL, /* Call insn. */
170 MO_ADJUST /* Adjust stack pointer. */
174 static const char * const ATTRIBUTE_UNUSED
175 micro_operation_type_name[] = {
176 "MO_USE",
177 "MO_USE_NO_VAR",
178 "MO_VAL_USE",
179 "MO_VAL_LOC",
180 "MO_VAL_SET",
181 "MO_SET",
182 "MO_COPY",
183 "MO_CLOBBER",
184 "MO_CALL",
185 "MO_ADJUST"
188 /* Where shall the note be emitted? BEFORE or AFTER the instruction.
189 Notes emitted as AFTER_CALL are to take effect during the call,
190 rather than after the call. */
191 enum emit_note_where
193 EMIT_NOTE_BEFORE_INSN,
194 EMIT_NOTE_AFTER_INSN,
195 EMIT_NOTE_AFTER_CALL_INSN
198 /* Structure holding information about micro operation. */
199 typedef struct micro_operation_def
201 /* Type of micro operation. */
202 enum micro_operation_type type;
204 /* The instruction which the micro operation is in, for MO_USE,
205 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
206 instruction or note in the original flow (before any var-tracking
207 notes are inserted, to simplify emission of notes), for MO_SET
208 and MO_CLOBBER. */
209 rtx_insn *insn;
211 union {
212 /* Location. For MO_SET and MO_COPY, this is the SET that
213 performs the assignment, if known, otherwise it is the target
214 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
215 CONCAT of the VALUE and the LOC associated with it. For
216 MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
217 associated with it. */
218 rtx loc;
220 /* Stack adjustment. */
221 HOST_WIDE_INT adjust;
222 } u;
223 } micro_operation;
226 /* A declaration of a variable, or an RTL value being handled like a
227 declaration. */
228 typedef void *decl_or_value;
230 /* Return true if a decl_or_value DV is a DECL or NULL. */
231 static inline bool
232 dv_is_decl_p (decl_or_value dv)
234 return !dv || (int) TREE_CODE ((tree) dv) != (int) VALUE;
237 /* Return true if a decl_or_value is a VALUE rtl. */
238 static inline bool
239 dv_is_value_p (decl_or_value dv)
241 return dv && !dv_is_decl_p (dv);
244 /* Return the decl in the decl_or_value. */
245 static inline tree
246 dv_as_decl (decl_or_value dv)
248 gcc_checking_assert (dv_is_decl_p (dv));
249 return (tree) dv;
252 /* Return the value in the decl_or_value. */
253 static inline rtx
254 dv_as_value (decl_or_value dv)
256 gcc_checking_assert (dv_is_value_p (dv));
257 return (rtx)dv;
260 /* Return the opaque pointer in the decl_or_value. */
261 static inline void *
262 dv_as_opaque (decl_or_value dv)
264 return dv;
268 /* Description of location of a part of a variable. The content of a physical
269 register is described by a chain of these structures.
270 The chains are pretty short (usually 1 or 2 elements) and thus
271 chain is the best data structure. */
272 typedef struct attrs_def
274 /* Pointer to next member of the list. */
275 struct attrs_def *next;
277 /* The rtx of register. */
278 rtx loc;
280 /* The declaration corresponding to LOC. */
281 decl_or_value dv;
283 /* Offset from start of DECL. */
284 HOST_WIDE_INT offset;
286 /* Pool allocation new operator. */
287 inline void *operator new (size_t)
289 return pool.allocate ();
292 /* Delete operator utilizing pool allocation. */
293 inline void operator delete (void *ptr)
295 pool.remove ((attrs_def *) ptr);
298 /* Memory allocation pool. */
299 static pool_allocator<attrs_def> pool;
300 } *attrs;
302 /* Structure for chaining the locations. */
303 typedef struct location_chain_def
305 /* Next element in the chain. */
306 struct location_chain_def *next;
308 /* The location (REG, MEM or VALUE). */
309 rtx loc;
311 /* The "value" stored in this location. */
312 rtx set_src;
314 /* Initialized? */
315 enum var_init_status init;
317 /* Pool allocation new operator. */
318 inline void *operator new (size_t)
320 return pool.allocate ();
323 /* Delete operator utilizing pool allocation. */
324 inline void operator delete (void *ptr)
326 pool.remove ((location_chain_def *) ptr);
329 /* Memory allocation pool. */
330 static pool_allocator<location_chain_def> pool;
331 } *location_chain;
333 /* A vector of loc_exp_dep holds the active dependencies of a one-part
334 DV on VALUEs, i.e., the VALUEs expanded so as to form the current
335 location of DV. Each entry is also part of VALUE' s linked-list of
336 backlinks back to DV. */
337 typedef struct loc_exp_dep_s
339 /* The dependent DV. */
340 decl_or_value dv;
341 /* The dependency VALUE or DECL_DEBUG. */
342 rtx value;
343 /* The next entry in VALUE's backlinks list. */
344 struct loc_exp_dep_s *next;
345 /* A pointer to the pointer to this entry (head or prev's next) in
346 the doubly-linked list. */
347 struct loc_exp_dep_s **pprev;
349 /* Pool allocation new operator. */
350 inline void *operator new (size_t)
352 return pool.allocate ();
355 /* Delete operator utilizing pool allocation. */
356 inline void operator delete (void *ptr)
358 pool.remove ((loc_exp_dep_s *) ptr);
361 /* Memory allocation pool. */
362 static pool_allocator<loc_exp_dep_s> pool;
363 } loc_exp_dep;
366 /* This data structure holds information about the depth of a variable
367 expansion. */
368 typedef struct expand_depth_struct
370 /* This measures the complexity of the expanded expression. It
371 grows by one for each level of expansion that adds more than one
372 operand. */
373 int complexity;
374 /* This counts the number of ENTRY_VALUE expressions in an
375 expansion. We want to minimize their use. */
376 int entryvals;
377 } expand_depth;
379 /* This data structure is allocated for one-part variables at the time
380 of emitting notes. */
381 struct onepart_aux
383 /* Doubly-linked list of dependent DVs. These are DVs whose cur_loc
384 computation used the expansion of this variable, and that ought
385 to be notified should this variable change. If the DV's cur_loc
386 expanded to NULL, all components of the loc list are regarded as
387 active, so that any changes in them give us a chance to get a
388 location. Otherwise, only components of the loc that expanded to
389 non-NULL are regarded as active dependencies. */
390 loc_exp_dep *backlinks;
391 /* This holds the LOC that was expanded into cur_loc. We need only
392 mark a one-part variable as changed if the FROM loc is removed,
393 or if it has no known location and a loc is added, or if it gets
394 a change notification from any of its active dependencies. */
395 rtx from;
396 /* The depth of the cur_loc expression. */
397 expand_depth depth;
398 /* Dependencies actively used when expand FROM into cur_loc. */
399 vec<loc_exp_dep, va_heap, vl_embed> deps;
402 /* Structure describing one part of variable. */
403 typedef struct variable_part_def
405 /* Chain of locations of the part. */
406 location_chain loc_chain;
408 /* Location which was last emitted to location list. */
409 rtx cur_loc;
411 union variable_aux
413 /* The offset in the variable, if !var->onepart. */
414 HOST_WIDE_INT offset;
416 /* Pointer to auxiliary data, if var->onepart and emit_notes. */
417 struct onepart_aux *onepaux;
418 } aux;
419 } variable_part;
421 /* Maximum number of location parts. */
422 #define MAX_VAR_PARTS 16
424 /* Enumeration type used to discriminate various types of one-part
425 variables. */
426 typedef enum onepart_enum
428 /* Not a one-part variable. */
429 NOT_ONEPART = 0,
430 /* A one-part DECL that is not a DEBUG_EXPR_DECL. */
431 ONEPART_VDECL = 1,
432 /* A DEBUG_EXPR_DECL. */
433 ONEPART_DEXPR = 2,
434 /* A VALUE. */
435 ONEPART_VALUE = 3
436 } onepart_enum_t;
438 /* Structure describing where the variable is located. */
439 typedef struct variable_def
441 /* The declaration of the variable, or an RTL value being handled
442 like a declaration. */
443 decl_or_value dv;
445 /* Reference count. */
446 int refcount;
448 /* Number of variable parts. */
449 char n_var_parts;
451 /* What type of DV this is, according to enum onepart_enum. */
452 ENUM_BITFIELD (onepart_enum) onepart : CHAR_BIT;
454 /* True if this variable_def struct is currently in the
455 changed_variables hash table. */
456 bool in_changed_variables;
458 /* The variable parts. */
459 variable_part var_part[1];
460 } *variable;
461 typedef const struct variable_def *const_variable;
463 /* Pointer to the BB's information specific to variable tracking pass. */
464 #define VTI(BB) ((variable_tracking_info) (BB)->aux)
466 /* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */
467 #define INT_MEM_OFFSET(mem) (MEM_OFFSET_KNOWN_P (mem) ? MEM_OFFSET (mem) : 0)
469 #if ENABLE_CHECKING && (GCC_VERSION >= 2007)
471 /* Access VAR's Ith part's offset, checking that it's not a one-part
472 variable. */
473 #define VAR_PART_OFFSET(var, i) __extension__ \
474 (*({ variable const __v = (var); \
475 gcc_checking_assert (!__v->onepart); \
476 &__v->var_part[(i)].aux.offset; }))
478 /* Access VAR's one-part auxiliary data, checking that it is a
479 one-part variable. */
480 #define VAR_LOC_1PAUX(var) __extension__ \
481 (*({ variable const __v = (var); \
482 gcc_checking_assert (__v->onepart); \
483 &__v->var_part[0].aux.onepaux; }))
485 #else
486 #define VAR_PART_OFFSET(var, i) ((var)->var_part[(i)].aux.offset)
487 #define VAR_LOC_1PAUX(var) ((var)->var_part[0].aux.onepaux)
488 #endif
490 /* These are accessor macros for the one-part auxiliary data. When
491 convenient for users, they're guarded by tests that the data was
492 allocated. */
493 #define VAR_LOC_DEP_LST(var) (VAR_LOC_1PAUX (var) \
494 ? VAR_LOC_1PAUX (var)->backlinks \
495 : NULL)
496 #define VAR_LOC_DEP_LSTP(var) (VAR_LOC_1PAUX (var) \
497 ? &VAR_LOC_1PAUX (var)->backlinks \
498 : NULL)
499 #define VAR_LOC_FROM(var) (VAR_LOC_1PAUX (var)->from)
500 #define VAR_LOC_DEPTH(var) (VAR_LOC_1PAUX (var)->depth)
501 #define VAR_LOC_DEP_VEC(var) (VAR_LOC_1PAUX (var) \
502 ? &VAR_LOC_1PAUX (var)->deps \
503 : NULL)
507 typedef unsigned int dvuid;
509 /* Return the uid of DV. */
511 static inline dvuid
512 dv_uid (decl_or_value dv)
514 if (dv_is_value_p (dv))
515 return CSELIB_VAL_PTR (dv_as_value (dv))->uid;
516 else
517 return DECL_UID (dv_as_decl (dv));
520 /* Compute the hash from the uid. */
522 static inline hashval_t
523 dv_uid2hash (dvuid uid)
525 return uid;
528 /* The hash function for a mask table in a shared_htab chain. */
530 static inline hashval_t
531 dv_htab_hash (decl_or_value dv)
533 return dv_uid2hash (dv_uid (dv));
536 static void variable_htab_free (void *);
538 /* Variable hashtable helpers. */
540 struct variable_hasher
542 typedef variable_def *value_type;
543 typedef void *compare_type;
544 static inline hashval_t hash (const variable_def *);
545 static inline bool equal (const variable_def *, const void *);
546 static inline void remove (variable_def *);
549 /* The hash function for variable_htab, computes the hash value
550 from the declaration of variable X. */
552 inline hashval_t
553 variable_hasher::hash (const variable_def *v)
555 return dv_htab_hash (v->dv);
558 /* Compare the declaration of variable X with declaration Y. */
560 inline bool
561 variable_hasher::equal (const variable_def *v, const void *y)
563 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
565 return (dv_as_opaque (v->dv) == dv_as_opaque (dv));
568 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
570 inline void
571 variable_hasher::remove (variable_def *var)
573 variable_htab_free (var);
576 typedef hash_table<variable_hasher> variable_table_type;
577 typedef variable_table_type::iterator variable_iterator_type;
579 /* Structure for passing some other parameters to function
580 emit_note_insn_var_location. */
581 typedef struct emit_note_data_def
583 /* The instruction which the note will be emitted before/after. */
584 rtx_insn *insn;
586 /* Where the note will be emitted (before/after insn)? */
587 enum emit_note_where where;
589 /* The variables and values active at this point. */
590 variable_table_type *vars;
591 } emit_note_data;
593 /* Structure holding a refcounted hash table. If refcount > 1,
594 it must be first unshared before modified. */
595 typedef struct shared_hash_def
597 /* Reference count. */
598 int refcount;
600 /* Actual hash table. */
601 variable_table_type *htab;
603 /* Pool allocation new operator. */
604 inline void *operator new (size_t)
606 return pool.allocate ();
609 /* Delete operator utilizing pool allocation. */
610 inline void operator delete (void *ptr)
612 pool.remove ((shared_hash_def *) ptr);
615 /* Memory allocation pool. */
616 static pool_allocator<shared_hash_def> pool;
617 } *shared_hash;
619 /* Structure holding the IN or OUT set for a basic block. */
620 typedef struct dataflow_set_def
622 /* Adjustment of stack offset. */
623 HOST_WIDE_INT stack_adjust;
625 /* Attributes for registers (lists of attrs). */
626 attrs regs[FIRST_PSEUDO_REGISTER];
628 /* Variable locations. */
629 shared_hash vars;
631 /* Vars that is being traversed. */
632 shared_hash traversed_vars;
633 } dataflow_set;
635 /* The structure (one for each basic block) containing the information
636 needed for variable tracking. */
637 typedef struct variable_tracking_info_def
639 /* The vector of micro operations. */
640 vec<micro_operation> mos;
642 /* The IN and OUT set for dataflow analysis. */
643 dataflow_set in;
644 dataflow_set out;
646 /* The permanent-in dataflow set for this block. This is used to
647 hold values for which we had to compute entry values. ??? This
648 should probably be dynamically allocated, to avoid using more
649 memory in non-debug builds. */
650 dataflow_set *permp;
652 /* Has the block been visited in DFS? */
653 bool visited;
655 /* Has the block been flooded in VTA? */
656 bool flooded;
658 } *variable_tracking_info;
660 /* Alloc pool for struct attrs_def. */
661 pool_allocator<attrs_def> attrs_def::pool ("attrs_def pool", 1024);
663 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
665 static pool_allocator<variable_def> var_pool
666 ("variable_def pool", 64,
667 (MAX_VAR_PARTS - 1) * sizeof (((variable)NULL)->var_part[0]));
669 /* Alloc pool for struct variable_def with a single var_part entry. */
670 static pool_allocator<variable_def> valvar_pool
671 ("small variable_def pool", 256);
673 /* Alloc pool for struct location_chain_def. */
674 pool_allocator<location_chain_def> location_chain_def::pool
675 ("location_chain_def pool", 1024);
677 /* Alloc pool for struct shared_hash_def. */
678 pool_allocator<shared_hash_def> shared_hash_def::pool
679 ("shared_hash_def pool", 256);
681 /* Alloc pool for struct loc_exp_dep_s for NOT_ONEPART variables. */
682 pool_allocator<loc_exp_dep> loc_exp_dep::pool ("loc_exp_dep pool", 64);
684 /* Changed variables, notes will be emitted for them. */
685 static variable_table_type *changed_variables;
687 /* Shall notes be emitted? */
688 static bool emit_notes;
690 /* Values whose dynamic location lists have gone empty, but whose
691 cselib location lists are still usable. Use this to hold the
692 current location, the backlinks, etc, during emit_notes. */
693 static variable_table_type *dropped_values;
695 /* Empty shared hashtable. */
696 static shared_hash empty_shared_hash;
698 /* Scratch register bitmap used by cselib_expand_value_rtx. */
699 static bitmap scratch_regs = NULL;
701 #ifdef HAVE_window_save
702 typedef struct GTY(()) parm_reg {
703 rtx outgoing;
704 rtx incoming;
705 } parm_reg_t;
708 /* Vector of windowed parameter registers, if any. */
709 static vec<parm_reg_t, va_gc> *windowed_parm_regs = NULL;
710 #endif
712 /* Variable used to tell whether cselib_process_insn called our hook. */
713 static bool cselib_hook_called;
715 /* Local function prototypes. */
716 static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
717 HOST_WIDE_INT *);
718 static void insn_stack_adjust_offset_pre_post (rtx_insn *, HOST_WIDE_INT *,
719 HOST_WIDE_INT *);
720 static bool vt_stack_adjustments (void);
722 static void init_attrs_list_set (attrs *);
723 static void attrs_list_clear (attrs *);
724 static attrs attrs_list_member (attrs, decl_or_value, HOST_WIDE_INT);
725 static void attrs_list_insert (attrs *, decl_or_value, HOST_WIDE_INT, rtx);
726 static void attrs_list_copy (attrs *, attrs);
727 static void attrs_list_union (attrs *, attrs);
729 static variable_def **unshare_variable (dataflow_set *set, variable_def **slot,
730 variable var, enum var_init_status);
731 static void vars_copy (variable_table_type *, variable_table_type *);
732 static tree var_debug_decl (tree);
733 static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
734 static void var_reg_delete_and_set (dataflow_set *, rtx, bool,
735 enum var_init_status, rtx);
736 static void var_reg_delete (dataflow_set *, rtx, bool);
737 static void var_regno_delete (dataflow_set *, int);
738 static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx);
739 static void var_mem_delete_and_set (dataflow_set *, rtx, bool,
740 enum var_init_status, rtx);
741 static void var_mem_delete (dataflow_set *, rtx, bool);
743 static void dataflow_set_init (dataflow_set *);
744 static void dataflow_set_clear (dataflow_set *);
745 static void dataflow_set_copy (dataflow_set *, dataflow_set *);
746 static int variable_union_info_cmp_pos (const void *, const void *);
747 static void dataflow_set_union (dataflow_set *, dataflow_set *);
748 static location_chain find_loc_in_1pdv (rtx, variable, variable_table_type *);
749 static bool canon_value_cmp (rtx, rtx);
750 static int loc_cmp (rtx, rtx);
751 static bool variable_part_different_p (variable_part *, variable_part *);
752 static bool onepart_variable_different_p (variable, variable);
753 static bool variable_different_p (variable, variable);
754 static bool dataflow_set_different (dataflow_set *, dataflow_set *);
755 static void dataflow_set_destroy (dataflow_set *);
757 static bool contains_symbol_ref (rtx);
758 static bool track_expr_p (tree, bool);
759 static bool same_variable_part_p (rtx, tree, HOST_WIDE_INT);
760 static void add_uses_1 (rtx *, void *);
761 static void add_stores (rtx, const_rtx, void *);
762 static bool compute_bb_dataflow (basic_block);
763 static bool vt_find_locations (void);
765 static void dump_attrs_list (attrs);
766 static void dump_var (variable);
767 static void dump_vars (variable_table_type *);
768 static void dump_dataflow_set (dataflow_set *);
769 static void dump_dataflow_sets (void);
771 static void set_dv_changed (decl_or_value, bool);
772 static void variable_was_changed (variable, dataflow_set *);
773 static variable_def **set_slot_part (dataflow_set *, rtx, variable_def **,
774 decl_or_value, HOST_WIDE_INT,
775 enum var_init_status, rtx);
776 static void set_variable_part (dataflow_set *, rtx,
777 decl_or_value, HOST_WIDE_INT,
778 enum var_init_status, rtx, enum insert_option);
779 static variable_def **clobber_slot_part (dataflow_set *, rtx,
780 variable_def **, HOST_WIDE_INT, rtx);
781 static void clobber_variable_part (dataflow_set *, rtx,
782 decl_or_value, HOST_WIDE_INT, rtx);
783 static variable_def **delete_slot_part (dataflow_set *, rtx, variable_def **,
784 HOST_WIDE_INT);
785 static void delete_variable_part (dataflow_set *, rtx,
786 decl_or_value, HOST_WIDE_INT);
787 static void emit_notes_in_bb (basic_block, dataflow_set *);
788 static void vt_emit_notes (void);
790 static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *);
791 static void vt_add_function_parameters (void);
792 static bool vt_initialize (void);
793 static void vt_finalize (void);
795 /* Callback for stack_adjust_offset_pre_post, called via for_each_inc_dec. */
797 static int
798 stack_adjust_offset_pre_post_cb (rtx, rtx op, rtx dest, rtx src, rtx srcoff,
799 void *arg)
801 if (dest != stack_pointer_rtx)
802 return 0;
804 switch (GET_CODE (op))
806 case PRE_INC:
807 case PRE_DEC:
808 ((HOST_WIDE_INT *)arg)[0] -= INTVAL (srcoff);
809 return 0;
810 case POST_INC:
811 case POST_DEC:
812 ((HOST_WIDE_INT *)arg)[1] -= INTVAL (srcoff);
813 return 0;
814 case PRE_MODIFY:
815 case POST_MODIFY:
816 /* We handle only adjustments by constant amount. */
817 gcc_assert (GET_CODE (src) == PLUS
818 && CONST_INT_P (XEXP (src, 1))
819 && XEXP (src, 0) == stack_pointer_rtx);
820 ((HOST_WIDE_INT *)arg)[GET_CODE (op) == POST_MODIFY]
821 -= INTVAL (XEXP (src, 1));
822 return 0;
823 default:
824 gcc_unreachable ();
828 /* Given a SET, calculate the amount of stack adjustment it contains
829 PRE- and POST-modifying stack pointer.
830 This function is similar to stack_adjust_offset. */
832 static void
833 stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
834 HOST_WIDE_INT *post)
836 rtx src = SET_SRC (pattern);
837 rtx dest = SET_DEST (pattern);
838 enum rtx_code code;
840 if (dest == stack_pointer_rtx)
842 /* (set (reg sp) (plus (reg sp) (const_int))) */
843 code = GET_CODE (src);
844 if (! (code == PLUS || code == MINUS)
845 || XEXP (src, 0) != stack_pointer_rtx
846 || !CONST_INT_P (XEXP (src, 1)))
847 return;
849 if (code == MINUS)
850 *post += INTVAL (XEXP (src, 1));
851 else
852 *post -= INTVAL (XEXP (src, 1));
853 return;
855 HOST_WIDE_INT res[2] = { 0, 0 };
856 for_each_inc_dec (pattern, stack_adjust_offset_pre_post_cb, res);
857 *pre += res[0];
858 *post += res[1];
861 /* Given an INSN, calculate the amount of stack adjustment it contains
862 PRE- and POST-modifying stack pointer. */
864 static void
865 insn_stack_adjust_offset_pre_post (rtx_insn *insn, HOST_WIDE_INT *pre,
866 HOST_WIDE_INT *post)
868 rtx pattern;
870 *pre = 0;
871 *post = 0;
873 pattern = PATTERN (insn);
874 if (RTX_FRAME_RELATED_P (insn))
876 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
877 if (expr)
878 pattern = XEXP (expr, 0);
881 if (GET_CODE (pattern) == SET)
882 stack_adjust_offset_pre_post (pattern, pre, post);
883 else if (GET_CODE (pattern) == PARALLEL
884 || GET_CODE (pattern) == SEQUENCE)
886 int i;
888 /* There may be stack adjustments inside compound insns. Search
889 for them. */
890 for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
891 if (GET_CODE (XVECEXP (pattern, 0, i)) == SET)
892 stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post);
896 /* Compute stack adjustments for all blocks by traversing DFS tree.
897 Return true when the adjustments on all incoming edges are consistent.
898 Heavily borrowed from pre_and_rev_post_order_compute. */
900 static bool
901 vt_stack_adjustments (void)
903 edge_iterator *stack;
904 int sp;
906 /* Initialize entry block. */
907 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->visited = true;
908 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->in.stack_adjust
909 = INCOMING_FRAME_SP_OFFSET;
910 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out.stack_adjust
911 = INCOMING_FRAME_SP_OFFSET;
913 /* Allocate stack for back-tracking up CFG. */
914 stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun) + 1);
915 sp = 0;
917 /* Push the first edge on to the stack. */
918 stack[sp++] = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
920 while (sp)
922 edge_iterator ei;
923 basic_block src;
924 basic_block dest;
926 /* Look at the edge on the top of the stack. */
927 ei = stack[sp - 1];
928 src = ei_edge (ei)->src;
929 dest = ei_edge (ei)->dest;
931 /* Check if the edge destination has been visited yet. */
932 if (!VTI (dest)->visited)
934 rtx_insn *insn;
935 HOST_WIDE_INT pre, post, offset;
936 VTI (dest)->visited = true;
937 VTI (dest)->in.stack_adjust = offset = VTI (src)->out.stack_adjust;
939 if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
940 for (insn = BB_HEAD (dest);
941 insn != NEXT_INSN (BB_END (dest));
942 insn = NEXT_INSN (insn))
943 if (INSN_P (insn))
945 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
946 offset += pre + post;
949 VTI (dest)->out.stack_adjust = offset;
951 if (EDGE_COUNT (dest->succs) > 0)
952 /* Since the DEST node has been visited for the first
953 time, check its successors. */
954 stack[sp++] = ei_start (dest->succs);
956 else
958 /* We can end up with different stack adjustments for the exit block
959 of a shrink-wrapped function if stack_adjust_offset_pre_post
960 doesn't understand the rtx pattern used to restore the stack
961 pointer in the epilogue. For example, on s390(x), the stack
962 pointer is often restored via a load-multiple instruction
963 and so no stack_adjust offset is recorded for it. This means
964 that the stack offset at the end of the epilogue block is the
965 the same as the offset before the epilogue, whereas other paths
966 to the exit block will have the correct stack_adjust.
968 It is safe to ignore these differences because (a) we never
969 use the stack_adjust for the exit block in this pass and
970 (b) dwarf2cfi checks whether the CFA notes in a shrink-wrapped
971 function are correct.
973 We must check whether the adjustments on other edges are
974 the same though. */
975 if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
976 && VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust)
978 free (stack);
979 return false;
982 if (! ei_one_before_end_p (ei))
983 /* Go to the next edge. */
984 ei_next (&stack[sp - 1]);
985 else
986 /* Return to previous level if there are no more edges. */
987 sp--;
991 free (stack);
992 return true;
995 /* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or
996 hard_frame_pointer_rtx is being mapped to it and offset for it. */
997 static rtx cfa_base_rtx;
998 static HOST_WIDE_INT cfa_base_offset;
1000 /* Compute a CFA-based value for an ADJUSTMENT made to stack_pointer_rtx
1001 or hard_frame_pointer_rtx. */
1003 static inline rtx
1004 compute_cfa_pointer (HOST_WIDE_INT adjustment)
1006 return plus_constant (Pmode, cfa_base_rtx, adjustment + cfa_base_offset);
1009 /* Adjustment for hard_frame_pointer_rtx to cfa base reg,
1010 or -1 if the replacement shouldn't be done. */
1011 static HOST_WIDE_INT hard_frame_pointer_adjustment = -1;
1013 /* Data for adjust_mems callback. */
1015 struct adjust_mem_data
1017 bool store;
1018 machine_mode mem_mode;
1019 HOST_WIDE_INT stack_adjust;
1020 rtx_expr_list *side_effects;
1023 /* Helper for adjust_mems. Return true if X is suitable for
1024 transformation of wider mode arithmetics to narrower mode. */
1026 static bool
1027 use_narrower_mode_test (rtx x, const_rtx subreg)
1029 subrtx_var_iterator::array_type array;
1030 FOR_EACH_SUBRTX_VAR (iter, array, x, NONCONST)
1032 rtx x = *iter;
1033 if (CONSTANT_P (x))
1034 iter.skip_subrtxes ();
1035 else
1036 switch (GET_CODE (x))
1038 case REG:
1039 if (cselib_lookup (x, GET_MODE (SUBREG_REG (subreg)), 0, VOIDmode))
1040 return false;
1041 if (!validate_subreg (GET_MODE (subreg), GET_MODE (x), x,
1042 subreg_lowpart_offset (GET_MODE (subreg),
1043 GET_MODE (x))))
1044 return false;
1045 break;
1046 case PLUS:
1047 case MINUS:
1048 case MULT:
1049 break;
1050 case ASHIFT:
1051 iter.substitute (XEXP (x, 0));
1052 break;
1053 default:
1054 return false;
1057 return true;
1060 /* Transform X into narrower mode MODE from wider mode WMODE. */
1062 static rtx
1063 use_narrower_mode (rtx x, machine_mode mode, machine_mode wmode)
1065 rtx op0, op1;
1066 if (CONSTANT_P (x))
1067 return lowpart_subreg (mode, x, wmode);
1068 switch (GET_CODE (x))
1070 case REG:
1071 return lowpart_subreg (mode, x, wmode);
1072 case PLUS:
1073 case MINUS:
1074 case MULT:
1075 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
1076 op1 = use_narrower_mode (XEXP (x, 1), mode, wmode);
1077 return simplify_gen_binary (GET_CODE (x), mode, op0, op1);
1078 case ASHIFT:
1079 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
1080 op1 = XEXP (x, 1);
1081 /* Ensure shift amount is not wider than mode. */
1082 if (GET_MODE (op1) == VOIDmode)
1083 op1 = lowpart_subreg (mode, op1, wmode);
1084 else if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (GET_MODE (op1)))
1085 op1 = lowpart_subreg (mode, op1, GET_MODE (op1));
1086 return simplify_gen_binary (ASHIFT, mode, op0, op1);
1087 default:
1088 gcc_unreachable ();
1092 /* Helper function for adjusting used MEMs. */
1094 static rtx
1095 adjust_mems (rtx loc, const_rtx old_rtx, void *data)
1097 struct adjust_mem_data *amd = (struct adjust_mem_data *) data;
1098 rtx mem, addr = loc, tem;
1099 machine_mode mem_mode_save;
1100 bool store_save;
1101 switch (GET_CODE (loc))
1103 case REG:
1104 /* Don't do any sp or fp replacements outside of MEM addresses
1105 on the LHS. */
1106 if (amd->mem_mode == VOIDmode && amd->store)
1107 return loc;
1108 if (loc == stack_pointer_rtx
1109 && !frame_pointer_needed
1110 && cfa_base_rtx)
1111 return compute_cfa_pointer (amd->stack_adjust);
1112 else if (loc == hard_frame_pointer_rtx
1113 && frame_pointer_needed
1114 && hard_frame_pointer_adjustment != -1
1115 && cfa_base_rtx)
1116 return compute_cfa_pointer (hard_frame_pointer_adjustment);
1117 gcc_checking_assert (loc != virtual_incoming_args_rtx);
1118 return loc;
1119 case MEM:
1120 mem = loc;
1121 if (!amd->store)
1123 mem = targetm.delegitimize_address (mem);
1124 if (mem != loc && !MEM_P (mem))
1125 return simplify_replace_fn_rtx (mem, old_rtx, adjust_mems, data);
1128 addr = XEXP (mem, 0);
1129 mem_mode_save = amd->mem_mode;
1130 amd->mem_mode = GET_MODE (mem);
1131 store_save = amd->store;
1132 amd->store = false;
1133 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1134 amd->store = store_save;
1135 amd->mem_mode = mem_mode_save;
1136 if (mem == loc)
1137 addr = targetm.delegitimize_address (addr);
1138 if (addr != XEXP (mem, 0))
1139 mem = replace_equiv_address_nv (mem, addr);
1140 if (!amd->store)
1141 mem = avoid_constant_pool_reference (mem);
1142 return mem;
1143 case PRE_INC:
1144 case PRE_DEC:
1145 addr = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
1146 gen_int_mode (GET_CODE (loc) == PRE_INC
1147 ? GET_MODE_SIZE (amd->mem_mode)
1148 : -GET_MODE_SIZE (amd->mem_mode),
1149 GET_MODE (loc)));
1150 case POST_INC:
1151 case POST_DEC:
1152 if (addr == loc)
1153 addr = XEXP (loc, 0);
1154 gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode);
1155 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1156 tem = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
1157 gen_int_mode ((GET_CODE (loc) == PRE_INC
1158 || GET_CODE (loc) == POST_INC)
1159 ? GET_MODE_SIZE (amd->mem_mode)
1160 : -GET_MODE_SIZE (amd->mem_mode),
1161 GET_MODE (loc)));
1162 store_save = amd->store;
1163 amd->store = false;
1164 tem = simplify_replace_fn_rtx (tem, old_rtx, adjust_mems, data);
1165 amd->store = store_save;
1166 amd->side_effects = alloc_EXPR_LIST (0,
1167 gen_rtx_SET (XEXP (loc, 0), tem),
1168 amd->side_effects);
1169 return addr;
1170 case PRE_MODIFY:
1171 addr = XEXP (loc, 1);
1172 case POST_MODIFY:
1173 if (addr == loc)
1174 addr = XEXP (loc, 0);
1175 gcc_assert (amd->mem_mode != VOIDmode);
1176 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1177 store_save = amd->store;
1178 amd->store = false;
1179 tem = simplify_replace_fn_rtx (XEXP (loc, 1), old_rtx,
1180 adjust_mems, data);
1181 amd->store = store_save;
1182 amd->side_effects = alloc_EXPR_LIST (0,
1183 gen_rtx_SET (XEXP (loc, 0), tem),
1184 amd->side_effects);
1185 return addr;
1186 case SUBREG:
1187 /* First try without delegitimization of whole MEMs and
1188 avoid_constant_pool_reference, which is more likely to succeed. */
1189 store_save = amd->store;
1190 amd->store = true;
1191 addr = simplify_replace_fn_rtx (SUBREG_REG (loc), old_rtx, adjust_mems,
1192 data);
1193 amd->store = store_save;
1194 mem = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1195 if (mem == SUBREG_REG (loc))
1197 tem = loc;
1198 goto finish_subreg;
1200 tem = simplify_gen_subreg (GET_MODE (loc), mem,
1201 GET_MODE (SUBREG_REG (loc)),
1202 SUBREG_BYTE (loc));
1203 if (tem)
1204 goto finish_subreg;
1205 tem = simplify_gen_subreg (GET_MODE (loc), addr,
1206 GET_MODE (SUBREG_REG (loc)),
1207 SUBREG_BYTE (loc));
1208 if (tem == NULL_RTX)
1209 tem = gen_rtx_raw_SUBREG (GET_MODE (loc), addr, SUBREG_BYTE (loc));
1210 finish_subreg:
1211 if (MAY_HAVE_DEBUG_INSNS
1212 && GET_CODE (tem) == SUBREG
1213 && (GET_CODE (SUBREG_REG (tem)) == PLUS
1214 || GET_CODE (SUBREG_REG (tem)) == MINUS
1215 || GET_CODE (SUBREG_REG (tem)) == MULT
1216 || GET_CODE (SUBREG_REG (tem)) == ASHIFT)
1217 && (GET_MODE_CLASS (GET_MODE (tem)) == MODE_INT
1218 || GET_MODE_CLASS (GET_MODE (tem)) == MODE_PARTIAL_INT)
1219 && (GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem))) == MODE_INT
1220 || GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem))) == MODE_PARTIAL_INT)
1221 && GET_MODE_PRECISION (GET_MODE (tem))
1222 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (tem)))
1223 && subreg_lowpart_p (tem)
1224 && use_narrower_mode_test (SUBREG_REG (tem), tem))
1225 return use_narrower_mode (SUBREG_REG (tem), GET_MODE (tem),
1226 GET_MODE (SUBREG_REG (tem)));
1227 return tem;
1228 case ASM_OPERANDS:
1229 /* Don't do any replacements in second and following
1230 ASM_OPERANDS of inline-asm with multiple sets.
1231 ASM_OPERANDS_INPUT_VEC, ASM_OPERANDS_INPUT_CONSTRAINT_VEC
1232 and ASM_OPERANDS_LABEL_VEC need to be equal between
1233 all the ASM_OPERANDs in the insn and adjust_insn will
1234 fix this up. */
1235 if (ASM_OPERANDS_OUTPUT_IDX (loc) != 0)
1236 return loc;
1237 break;
1238 default:
1239 break;
1241 return NULL_RTX;
1244 /* Helper function for replacement of uses. */
1246 static void
1247 adjust_mem_uses (rtx *x, void *data)
1249 rtx new_x = simplify_replace_fn_rtx (*x, NULL_RTX, adjust_mems, data);
1250 if (new_x != *x)
1251 validate_change (NULL_RTX, x, new_x, true);
1254 /* Helper function for replacement of stores. */
1256 static void
1257 adjust_mem_stores (rtx loc, const_rtx expr, void *data)
1259 if (MEM_P (loc))
1261 rtx new_dest = simplify_replace_fn_rtx (SET_DEST (expr), NULL_RTX,
1262 adjust_mems, data);
1263 if (new_dest != SET_DEST (expr))
1265 rtx xexpr = CONST_CAST_RTX (expr);
1266 validate_change (NULL_RTX, &SET_DEST (xexpr), new_dest, true);
1271 /* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
1272 replace them with their value in the insn and add the side-effects
1273 as other sets to the insn. */
1275 static void
1276 adjust_insn (basic_block bb, rtx_insn *insn)
1278 struct adjust_mem_data amd;
1279 rtx set;
1281 #ifdef HAVE_window_save
1282 /* If the target machine has an explicit window save instruction, the
1283 transformation OUTGOING_REGNO -> INCOMING_REGNO is done there. */
1284 if (RTX_FRAME_RELATED_P (insn)
1285 && find_reg_note (insn, REG_CFA_WINDOW_SAVE, NULL_RTX))
1287 unsigned int i, nregs = vec_safe_length (windowed_parm_regs);
1288 rtx rtl = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nregs * 2));
1289 parm_reg_t *p;
1291 FOR_EACH_VEC_SAFE_ELT (windowed_parm_regs, i, p)
1293 XVECEXP (rtl, 0, i * 2)
1294 = gen_rtx_SET (p->incoming, p->outgoing);
1295 /* Do not clobber the attached DECL, but only the REG. */
1296 XVECEXP (rtl, 0, i * 2 + 1)
1297 = gen_rtx_CLOBBER (GET_MODE (p->outgoing),
1298 gen_raw_REG (GET_MODE (p->outgoing),
1299 REGNO (p->outgoing)));
1302 validate_change (NULL_RTX, &PATTERN (insn), rtl, true);
1303 return;
1305 #endif
1307 amd.mem_mode = VOIDmode;
1308 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
1309 amd.side_effects = NULL;
1311 amd.store = true;
1312 note_stores (PATTERN (insn), adjust_mem_stores, &amd);
1314 amd.store = false;
1315 if (GET_CODE (PATTERN (insn)) == PARALLEL
1316 && asm_noperands (PATTERN (insn)) > 0
1317 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1319 rtx body, set0;
1320 int i;
1322 /* inline-asm with multiple sets is tiny bit more complicated,
1323 because the 3 vectors in ASM_OPERANDS need to be shared between
1324 all ASM_OPERANDS in the instruction. adjust_mems will
1325 not touch ASM_OPERANDS other than the first one, asm_noperands
1326 test above needs to be called before that (otherwise it would fail)
1327 and afterwards this code fixes it up. */
1328 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1329 body = PATTERN (insn);
1330 set0 = XVECEXP (body, 0, 0);
1331 gcc_checking_assert (GET_CODE (set0) == SET
1332 && GET_CODE (SET_SRC (set0)) == ASM_OPERANDS
1333 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set0)) == 0);
1334 for (i = 1; i < XVECLEN (body, 0); i++)
1335 if (GET_CODE (XVECEXP (body, 0, i)) != SET)
1336 break;
1337 else
1339 set = XVECEXP (body, 0, i);
1340 gcc_checking_assert (GET_CODE (SET_SRC (set)) == ASM_OPERANDS
1341 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set))
1342 == i);
1343 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (set))
1344 != ASM_OPERANDS_INPUT_VEC (SET_SRC (set0))
1345 || ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set))
1346 != ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0))
1347 || ASM_OPERANDS_LABEL_VEC (SET_SRC (set))
1348 != ASM_OPERANDS_LABEL_VEC (SET_SRC (set0)))
1350 rtx newsrc = shallow_copy_rtx (SET_SRC (set));
1351 ASM_OPERANDS_INPUT_VEC (newsrc)
1352 = ASM_OPERANDS_INPUT_VEC (SET_SRC (set0));
1353 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (newsrc)
1354 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0));
1355 ASM_OPERANDS_LABEL_VEC (newsrc)
1356 = ASM_OPERANDS_LABEL_VEC (SET_SRC (set0));
1357 validate_change (NULL_RTX, &SET_SRC (set), newsrc, true);
1361 else
1362 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1364 /* For read-only MEMs containing some constant, prefer those
1365 constants. */
1366 set = single_set (insn);
1367 if (set && MEM_P (SET_SRC (set)) && MEM_READONLY_P (SET_SRC (set)))
1369 rtx note = find_reg_equal_equiv_note (insn);
1371 if (note && CONSTANT_P (XEXP (note, 0)))
1372 validate_change (NULL_RTX, &SET_SRC (set), XEXP (note, 0), true);
1375 if (amd.side_effects)
1377 rtx *pat, new_pat, s;
1378 int i, oldn, newn;
1380 pat = &PATTERN (insn);
1381 if (GET_CODE (*pat) == COND_EXEC)
1382 pat = &COND_EXEC_CODE (*pat);
1383 if (GET_CODE (*pat) == PARALLEL)
1384 oldn = XVECLEN (*pat, 0);
1385 else
1386 oldn = 1;
1387 for (s = amd.side_effects, newn = 0; s; newn++)
1388 s = XEXP (s, 1);
1389 new_pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (oldn + newn));
1390 if (GET_CODE (*pat) == PARALLEL)
1391 for (i = 0; i < oldn; i++)
1392 XVECEXP (new_pat, 0, i) = XVECEXP (*pat, 0, i);
1393 else
1394 XVECEXP (new_pat, 0, 0) = *pat;
1395 for (s = amd.side_effects, i = oldn; i < oldn + newn; i++, s = XEXP (s, 1))
1396 XVECEXP (new_pat, 0, i) = XEXP (s, 0);
1397 free_EXPR_LIST_list (&amd.side_effects);
1398 validate_change (NULL_RTX, pat, new_pat, true);
1402 /* Return the DEBUG_EXPR of a DEBUG_EXPR_DECL or the VALUE in DV. */
1403 static inline rtx
1404 dv_as_rtx (decl_or_value dv)
1406 tree decl;
1408 if (dv_is_value_p (dv))
1409 return dv_as_value (dv);
1411 decl = dv_as_decl (dv);
1413 gcc_checking_assert (TREE_CODE (decl) == DEBUG_EXPR_DECL);
1414 return DECL_RTL_KNOWN_SET (decl);
1417 /* Return nonzero if a decl_or_value must not have more than one
1418 variable part. The returned value discriminates among various
1419 kinds of one-part DVs ccording to enum onepart_enum. */
1420 static inline onepart_enum_t
1421 dv_onepart_p (decl_or_value dv)
1423 tree decl;
1425 if (!MAY_HAVE_DEBUG_INSNS)
1426 return NOT_ONEPART;
1428 if (dv_is_value_p (dv))
1429 return ONEPART_VALUE;
1431 decl = dv_as_decl (dv);
1433 if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
1434 return ONEPART_DEXPR;
1436 if (target_for_debug_bind (decl) != NULL_TREE)
1437 return ONEPART_VDECL;
1439 return NOT_ONEPART;
1442 /* Return the variable pool to be used for a dv of type ONEPART. */
1443 static inline pool_allocator <variable_def> &
1444 onepart_pool (onepart_enum_t onepart)
1446 return onepart ? valvar_pool : var_pool;
1449 /* Build a decl_or_value out of a decl. */
1450 static inline decl_or_value
1451 dv_from_decl (tree decl)
1453 decl_or_value dv;
1454 dv = decl;
1455 gcc_checking_assert (dv_is_decl_p (dv));
1456 return dv;
1459 /* Build a decl_or_value out of a value. */
1460 static inline decl_or_value
1461 dv_from_value (rtx value)
1463 decl_or_value dv;
1464 dv = value;
1465 gcc_checking_assert (dv_is_value_p (dv));
1466 return dv;
1469 /* Return a value or the decl of a debug_expr as a decl_or_value. */
1470 static inline decl_or_value
1471 dv_from_rtx (rtx x)
1473 decl_or_value dv;
1475 switch (GET_CODE (x))
1477 case DEBUG_EXPR:
1478 dv = dv_from_decl (DEBUG_EXPR_TREE_DECL (x));
1479 gcc_checking_assert (DECL_RTL_KNOWN_SET (DEBUG_EXPR_TREE_DECL (x)) == x);
1480 break;
1482 case VALUE:
1483 dv = dv_from_value (x);
1484 break;
1486 default:
1487 gcc_unreachable ();
1490 return dv;
1493 extern void debug_dv (decl_or_value dv);
1495 DEBUG_FUNCTION void
1496 debug_dv (decl_or_value dv)
1498 if (dv_is_value_p (dv))
1499 debug_rtx (dv_as_value (dv));
1500 else
1501 debug_generic_stmt (dv_as_decl (dv));
1504 static void loc_exp_dep_clear (variable var);
1506 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
1508 static void
1509 variable_htab_free (void *elem)
1511 int i;
1512 variable var = (variable) elem;
1513 location_chain node, next;
1515 gcc_checking_assert (var->refcount > 0);
1517 var->refcount--;
1518 if (var->refcount > 0)
1519 return;
1521 for (i = 0; i < var->n_var_parts; i++)
1523 for (node = var->var_part[i].loc_chain; node; node = next)
1525 next = node->next;
1526 delete node;
1528 var->var_part[i].loc_chain = NULL;
1530 if (var->onepart && VAR_LOC_1PAUX (var))
1532 loc_exp_dep_clear (var);
1533 if (VAR_LOC_DEP_LST (var))
1534 VAR_LOC_DEP_LST (var)->pprev = NULL;
1535 XDELETE (VAR_LOC_1PAUX (var));
1536 /* These may be reused across functions, so reset
1537 e.g. NO_LOC_P. */
1538 if (var->onepart == ONEPART_DEXPR)
1539 set_dv_changed (var->dv, true);
1541 onepart_pool (var->onepart).remove (var);
1544 /* Initialize the set (array) SET of attrs to empty lists. */
1546 static void
1547 init_attrs_list_set (attrs *set)
1549 int i;
1551 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1552 set[i] = NULL;
1555 /* Make the list *LISTP empty. */
1557 static void
1558 attrs_list_clear (attrs *listp)
1560 attrs list, next;
1562 for (list = *listp; list; list = next)
1564 next = list->next;
1565 delete list;
1567 *listp = NULL;
1570 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */
1572 static attrs
1573 attrs_list_member (attrs list, decl_or_value dv, HOST_WIDE_INT offset)
1575 for (; list; list = list->next)
1576 if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset)
1577 return list;
1578 return NULL;
1581 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
1583 static void
1584 attrs_list_insert (attrs *listp, decl_or_value dv,
1585 HOST_WIDE_INT offset, rtx loc)
1587 attrs list = new attrs_def;
1588 list->loc = loc;
1589 list->dv = dv;
1590 list->offset = offset;
1591 list->next = *listp;
1592 *listp = list;
1595 /* Copy all nodes from SRC and create a list *DSTP of the copies. */
1597 static void
1598 attrs_list_copy (attrs *dstp, attrs src)
1600 attrs_list_clear (dstp);
1601 for (; src; src = src->next)
1603 attrs n = new attrs_def;
1604 n->loc = src->loc;
1605 n->dv = src->dv;
1606 n->offset = src->offset;
1607 n->next = *dstp;
1608 *dstp = n;
1612 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */
1614 static void
1615 attrs_list_union (attrs *dstp, attrs src)
1617 for (; src; src = src->next)
1619 if (!attrs_list_member (*dstp, src->dv, src->offset))
1620 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1624 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1625 *DSTP. */
1627 static void
1628 attrs_list_mpdv_union (attrs *dstp, attrs src, attrs src2)
1630 gcc_assert (!*dstp);
1631 for (; src; src = src->next)
1633 if (!dv_onepart_p (src->dv))
1634 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1636 for (src = src2; src; src = src->next)
1638 if (!dv_onepart_p (src->dv)
1639 && !attrs_list_member (*dstp, src->dv, src->offset))
1640 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1644 /* Shared hashtable support. */
1646 /* Return true if VARS is shared. */
1648 static inline bool
1649 shared_hash_shared (shared_hash vars)
1651 return vars->refcount > 1;
1654 /* Return the hash table for VARS. */
1656 static inline variable_table_type *
1657 shared_hash_htab (shared_hash vars)
1659 return vars->htab;
1662 /* Return true if VAR is shared, or maybe because VARS is shared. */
1664 static inline bool
1665 shared_var_p (variable var, shared_hash vars)
1667 /* Don't count an entry in the changed_variables table as a duplicate. */
1668 return ((var->refcount > 1 + (int) var->in_changed_variables)
1669 || shared_hash_shared (vars));
1672 /* Copy variables into a new hash table. */
1674 static shared_hash
1675 shared_hash_unshare (shared_hash vars)
1677 shared_hash new_vars = new shared_hash_def;
1678 gcc_assert (vars->refcount > 1);
1679 new_vars->refcount = 1;
1680 new_vars->htab = new variable_table_type (vars->htab->elements () + 3);
1681 vars_copy (new_vars->htab, vars->htab);
1682 vars->refcount--;
1683 return new_vars;
1686 /* Increment reference counter on VARS and return it. */
1688 static inline shared_hash
1689 shared_hash_copy (shared_hash vars)
1691 vars->refcount++;
1692 return vars;
1695 /* Decrement reference counter and destroy hash table if not shared
1696 anymore. */
1698 static void
1699 shared_hash_destroy (shared_hash vars)
1701 gcc_checking_assert (vars->refcount > 0);
1702 if (--vars->refcount == 0)
1704 delete vars->htab;
1705 delete vars;
1709 /* Unshare *PVARS if shared and return slot for DV. If INS is
1710 INSERT, insert it if not already present. */
1712 static inline variable_def **
1713 shared_hash_find_slot_unshare_1 (shared_hash *pvars, decl_or_value dv,
1714 hashval_t dvhash, enum insert_option ins)
1716 if (shared_hash_shared (*pvars))
1717 *pvars = shared_hash_unshare (*pvars);
1718 return shared_hash_htab (*pvars)->find_slot_with_hash (dv, dvhash, ins);
1721 static inline variable_def **
1722 shared_hash_find_slot_unshare (shared_hash *pvars, decl_or_value dv,
1723 enum insert_option ins)
1725 return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins);
1728 /* Return slot for DV, if it is already present in the hash table.
1729 If it is not present, insert it only VARS is not shared, otherwise
1730 return NULL. */
1732 static inline variable_def **
1733 shared_hash_find_slot_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1735 return shared_hash_htab (vars)->find_slot_with_hash (dv, dvhash,
1736 shared_hash_shared (vars)
1737 ? NO_INSERT : INSERT);
1740 static inline variable_def **
1741 shared_hash_find_slot (shared_hash vars, decl_or_value dv)
1743 return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv));
1746 /* Return slot for DV only if it is already present in the hash table. */
1748 static inline variable_def **
1749 shared_hash_find_slot_noinsert_1 (shared_hash vars, decl_or_value dv,
1750 hashval_t dvhash)
1752 return shared_hash_htab (vars)->find_slot_with_hash (dv, dvhash, NO_INSERT);
1755 static inline variable_def **
1756 shared_hash_find_slot_noinsert (shared_hash vars, decl_or_value dv)
1758 return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv));
1761 /* Return variable for DV or NULL if not already present in the hash
1762 table. */
1764 static inline variable
1765 shared_hash_find_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1767 return shared_hash_htab (vars)->find_with_hash (dv, dvhash);
1770 static inline variable
1771 shared_hash_find (shared_hash vars, decl_or_value dv)
1773 return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
1776 /* Return true if TVAL is better than CVAL as a canonival value. We
1777 choose lowest-numbered VALUEs, using the RTX address as a
1778 tie-breaker. The idea is to arrange them into a star topology,
1779 such that all of them are at most one step away from the canonical
1780 value, and the canonical value has backlinks to all of them, in
1781 addition to all the actual locations. We don't enforce this
1782 topology throughout the entire dataflow analysis, though.
1785 static inline bool
1786 canon_value_cmp (rtx tval, rtx cval)
1788 return !cval
1789 || CSELIB_VAL_PTR (tval)->uid < CSELIB_VAL_PTR (cval)->uid;
1792 static bool dst_can_be_shared;
1794 /* Return a copy of a variable VAR and insert it to dataflow set SET. */
1796 static variable_def **
1797 unshare_variable (dataflow_set *set, variable_def **slot, variable var,
1798 enum var_init_status initialized)
1800 variable new_var;
1801 int i;
1803 new_var = onepart_pool (var->onepart).allocate ();
1804 new_var->dv = var->dv;
1805 new_var->refcount = 1;
1806 var->refcount--;
1807 new_var->n_var_parts = var->n_var_parts;
1808 new_var->onepart = var->onepart;
1809 new_var->in_changed_variables = false;
1811 if (! flag_var_tracking_uninit)
1812 initialized = VAR_INIT_STATUS_INITIALIZED;
1814 for (i = 0; i < var->n_var_parts; i++)
1816 location_chain node;
1817 location_chain *nextp;
1819 if (i == 0 && var->onepart)
1821 /* One-part auxiliary data is only used while emitting
1822 notes, so propagate it to the new variable in the active
1823 dataflow set. If we're not emitting notes, this will be
1824 a no-op. */
1825 gcc_checking_assert (!VAR_LOC_1PAUX (var) || emit_notes);
1826 VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (var);
1827 VAR_LOC_1PAUX (var) = NULL;
1829 else
1830 VAR_PART_OFFSET (new_var, i) = VAR_PART_OFFSET (var, i);
1831 nextp = &new_var->var_part[i].loc_chain;
1832 for (node = var->var_part[i].loc_chain; node; node = node->next)
1834 location_chain new_lc;
1836 new_lc = new location_chain_def;
1837 new_lc->next = NULL;
1838 if (node->init > initialized)
1839 new_lc->init = node->init;
1840 else
1841 new_lc->init = initialized;
1842 if (node->set_src && !(MEM_P (node->set_src)))
1843 new_lc->set_src = node->set_src;
1844 else
1845 new_lc->set_src = NULL;
1846 new_lc->loc = node->loc;
1848 *nextp = new_lc;
1849 nextp = &new_lc->next;
1852 new_var->var_part[i].cur_loc = var->var_part[i].cur_loc;
1855 dst_can_be_shared = false;
1856 if (shared_hash_shared (set->vars))
1857 slot = shared_hash_find_slot_unshare (&set->vars, var->dv, NO_INSERT);
1858 else if (set->traversed_vars && set->vars != set->traversed_vars)
1859 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
1860 *slot = new_var;
1861 if (var->in_changed_variables)
1863 variable_def **cslot
1864 = changed_variables->find_slot_with_hash (var->dv,
1865 dv_htab_hash (var->dv),
1866 NO_INSERT);
1867 gcc_assert (*cslot == (void *) var);
1868 var->in_changed_variables = false;
1869 variable_htab_free (var);
1870 *cslot = new_var;
1871 new_var->in_changed_variables = true;
1873 return slot;
1876 /* Copy all variables from hash table SRC to hash table DST. */
1878 static void
1879 vars_copy (variable_table_type *dst, variable_table_type *src)
1881 variable_iterator_type hi;
1882 variable var;
1884 FOR_EACH_HASH_TABLE_ELEMENT (*src, var, variable, hi)
1886 variable_def **dstp;
1887 var->refcount++;
1888 dstp = dst->find_slot_with_hash (var->dv, dv_htab_hash (var->dv),
1889 INSERT);
1890 *dstp = var;
1894 /* Map a decl to its main debug decl. */
1896 static inline tree
1897 var_debug_decl (tree decl)
1899 if (decl && TREE_CODE (decl) == VAR_DECL
1900 && DECL_HAS_DEBUG_EXPR_P (decl))
1902 tree debugdecl = DECL_DEBUG_EXPR (decl);
1903 if (DECL_P (debugdecl))
1904 decl = debugdecl;
1907 return decl;
1910 /* Set the register LOC to contain DV, OFFSET. */
1912 static void
1913 var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1914 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1915 enum insert_option iopt)
1917 attrs node;
1918 bool decl_p = dv_is_decl_p (dv);
1920 if (decl_p)
1921 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1923 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1924 if (dv_as_opaque (node->dv) == dv_as_opaque (dv)
1925 && node->offset == offset)
1926 break;
1927 if (!node)
1928 attrs_list_insert (&set->regs[REGNO (loc)], dv, offset, loc);
1929 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1932 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
1934 static void
1935 var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1936 rtx set_src)
1938 tree decl = REG_EXPR (loc);
1939 HOST_WIDE_INT offset = REG_OFFSET (loc);
1941 var_reg_decl_set (set, loc, initialized,
1942 dv_from_decl (decl), offset, set_src, INSERT);
1945 static enum var_init_status
1946 get_init_value (dataflow_set *set, rtx loc, decl_or_value dv)
1948 variable var;
1949 int i;
1950 enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN;
1952 if (! flag_var_tracking_uninit)
1953 return VAR_INIT_STATUS_INITIALIZED;
1955 var = shared_hash_find (set->vars, dv);
1956 if (var)
1958 for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++)
1960 location_chain nextp;
1961 for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next)
1962 if (rtx_equal_p (nextp->loc, loc))
1964 ret_val = nextp->init;
1965 break;
1970 return ret_val;
1973 /* Delete current content of register LOC in dataflow set SET and set
1974 the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
1975 MODIFY is true, any other live copies of the same variable part are
1976 also deleted from the dataflow set, otherwise the variable part is
1977 assumed to be copied from another location holding the same
1978 part. */
1980 static void
1981 var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1982 enum var_init_status initialized, rtx set_src)
1984 tree decl = REG_EXPR (loc);
1985 HOST_WIDE_INT offset = REG_OFFSET (loc);
1986 attrs node, next;
1987 attrs *nextp;
1989 decl = var_debug_decl (decl);
1991 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1992 initialized = get_init_value (set, loc, dv_from_decl (decl));
1994 nextp = &set->regs[REGNO (loc)];
1995 for (node = *nextp; node; node = next)
1997 next = node->next;
1998 if (dv_as_opaque (node->dv) != decl || node->offset != offset)
2000 delete_variable_part (set, node->loc, node->dv, node->offset);
2001 delete node;
2002 *nextp = next;
2004 else
2006 node->loc = loc;
2007 nextp = &node->next;
2010 if (modify)
2011 clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src);
2012 var_reg_set (set, loc, initialized, set_src);
2015 /* Delete the association of register LOC in dataflow set SET with any
2016 variables that aren't onepart. If CLOBBER is true, also delete any
2017 other live copies of the same variable part, and delete the
2018 association with onepart dvs too. */
2020 static void
2021 var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
2023 attrs *nextp = &set->regs[REGNO (loc)];
2024 attrs node, next;
2026 if (clobber)
2028 tree decl = REG_EXPR (loc);
2029 HOST_WIDE_INT offset = REG_OFFSET (loc);
2031 decl = var_debug_decl (decl);
2033 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
2036 for (node = *nextp; node; node = next)
2038 next = node->next;
2039 if (clobber || !dv_onepart_p (node->dv))
2041 delete_variable_part (set, node->loc, node->dv, node->offset);
2042 delete node;
2043 *nextp = next;
2045 else
2046 nextp = &node->next;
2050 /* Delete content of register with number REGNO in dataflow set SET. */
2052 static void
2053 var_regno_delete (dataflow_set *set, int regno)
2055 attrs *reg = &set->regs[regno];
2056 attrs node, next;
2058 for (node = *reg; node; node = next)
2060 next = node->next;
2061 delete_variable_part (set, node->loc, node->dv, node->offset);
2062 delete node;
2064 *reg = NULL;
2067 /* Return true if I is the negated value of a power of two. */
2068 static bool
2069 negative_power_of_two_p (HOST_WIDE_INT i)
2071 unsigned HOST_WIDE_INT x = -(unsigned HOST_WIDE_INT)i;
2072 return x == (x & -x);
2075 /* Strip constant offsets and alignments off of LOC. Return the base
2076 expression. */
2078 static rtx
2079 vt_get_canonicalize_base (rtx loc)
2081 while ((GET_CODE (loc) == PLUS
2082 || GET_CODE (loc) == AND)
2083 && GET_CODE (XEXP (loc, 1)) == CONST_INT
2084 && (GET_CODE (loc) != AND
2085 || negative_power_of_two_p (INTVAL (XEXP (loc, 1)))))
2086 loc = XEXP (loc, 0);
2088 return loc;
2091 /* This caches canonicalized addresses for VALUEs, computed using
2092 information in the global cselib table. */
2093 static hash_map<rtx, rtx> *global_get_addr_cache;
2095 /* This caches canonicalized addresses for VALUEs, computed using
2096 information from the global cache and information pertaining to a
2097 basic block being analyzed. */
2098 static hash_map<rtx, rtx> *local_get_addr_cache;
2100 static rtx vt_canonicalize_addr (dataflow_set *, rtx);
2102 /* Return the canonical address for LOC, that must be a VALUE, using a
2103 cached global equivalence or computing it and storing it in the
2104 global cache. */
2106 static rtx
2107 get_addr_from_global_cache (rtx const loc)
2109 rtx x;
2111 gcc_checking_assert (GET_CODE (loc) == VALUE);
2113 bool existed;
2114 rtx *slot = &global_get_addr_cache->get_or_insert (loc, &existed);
2115 if (existed)
2116 return *slot;
2118 x = canon_rtx (get_addr (loc));
2120 /* Tentative, avoiding infinite recursion. */
2121 *slot = x;
2123 if (x != loc)
2125 rtx nx = vt_canonicalize_addr (NULL, x);
2126 if (nx != x)
2128 /* The table may have moved during recursion, recompute
2129 SLOT. */
2130 *global_get_addr_cache->get (loc) = x = nx;
2134 return x;
2137 /* Return the canonical address for LOC, that must be a VALUE, using a
2138 cached local equivalence or computing it and storing it in the
2139 local cache. */
2141 static rtx
2142 get_addr_from_local_cache (dataflow_set *set, rtx const loc)
2144 rtx x;
2145 decl_or_value dv;
2146 variable var;
2147 location_chain l;
2149 gcc_checking_assert (GET_CODE (loc) == VALUE);
2151 bool existed;
2152 rtx *slot = &local_get_addr_cache->get_or_insert (loc, &existed);
2153 if (existed)
2154 return *slot;
2156 x = get_addr_from_global_cache (loc);
2158 /* Tentative, avoiding infinite recursion. */
2159 *slot = x;
2161 /* Recurse to cache local expansion of X, or if we need to search
2162 for a VALUE in the expansion. */
2163 if (x != loc)
2165 rtx nx = vt_canonicalize_addr (set, x);
2166 if (nx != x)
2168 slot = local_get_addr_cache->get (loc);
2169 *slot = x = nx;
2171 return x;
2174 dv = dv_from_rtx (x);
2175 var = shared_hash_find (set->vars, dv);
2176 if (!var)
2177 return x;
2179 /* Look for an improved equivalent expression. */
2180 for (l = var->var_part[0].loc_chain; l; l = l->next)
2182 rtx base = vt_get_canonicalize_base (l->loc);
2183 if (GET_CODE (base) == VALUE
2184 && canon_value_cmp (base, loc))
2186 rtx nx = vt_canonicalize_addr (set, l->loc);
2187 if (x != nx)
2189 slot = local_get_addr_cache->get (loc);
2190 *slot = x = nx;
2192 break;
2196 return x;
2199 /* Canonicalize LOC using equivalences from SET in addition to those
2200 in the cselib static table. It expects a VALUE-based expression,
2201 and it will only substitute VALUEs with other VALUEs or
2202 function-global equivalences, so that, if two addresses have base
2203 VALUEs that are locally or globally related in ways that
2204 memrefs_conflict_p cares about, they will both canonicalize to
2205 expressions that have the same base VALUE.
2207 The use of VALUEs as canonical base addresses enables the canonical
2208 RTXs to remain unchanged globally, if they resolve to a constant,
2209 or throughout a basic block otherwise, so that they can be cached
2210 and the cache needs not be invalidated when REGs, MEMs or such
2211 change. */
2213 static rtx
2214 vt_canonicalize_addr (dataflow_set *set, rtx oloc)
2216 HOST_WIDE_INT ofst = 0;
2217 machine_mode mode = GET_MODE (oloc);
2218 rtx loc = oloc;
2219 rtx x;
2220 bool retry = true;
2222 while (retry)
2224 while (GET_CODE (loc) == PLUS
2225 && GET_CODE (XEXP (loc, 1)) == CONST_INT)
2227 ofst += INTVAL (XEXP (loc, 1));
2228 loc = XEXP (loc, 0);
2231 /* Alignment operations can't normally be combined, so just
2232 canonicalize the base and we're done. We'll normally have
2233 only one stack alignment anyway. */
2234 if (GET_CODE (loc) == AND
2235 && GET_CODE (XEXP (loc, 1)) == CONST_INT
2236 && negative_power_of_two_p (INTVAL (XEXP (loc, 1))))
2238 x = vt_canonicalize_addr (set, XEXP (loc, 0));
2239 if (x != XEXP (loc, 0))
2240 loc = gen_rtx_AND (mode, x, XEXP (loc, 1));
2241 retry = false;
2244 if (GET_CODE (loc) == VALUE)
2246 if (set)
2247 loc = get_addr_from_local_cache (set, loc);
2248 else
2249 loc = get_addr_from_global_cache (loc);
2251 /* Consolidate plus_constants. */
2252 while (ofst && GET_CODE (loc) == PLUS
2253 && GET_CODE (XEXP (loc, 1)) == CONST_INT)
2255 ofst += INTVAL (XEXP (loc, 1));
2256 loc = XEXP (loc, 0);
2259 retry = false;
2261 else
2263 x = canon_rtx (loc);
2264 if (retry)
2265 retry = (x != loc);
2266 loc = x;
2270 /* Add OFST back in. */
2271 if (ofst)
2273 /* Don't build new RTL if we can help it. */
2274 if (GET_CODE (oloc) == PLUS
2275 && XEXP (oloc, 0) == loc
2276 && INTVAL (XEXP (oloc, 1)) == ofst)
2277 return oloc;
2279 loc = plus_constant (mode, loc, ofst);
2282 return loc;
2285 /* Return true iff there's a true dependence between MLOC and LOC.
2286 MADDR must be a canonicalized version of MLOC's address. */
2288 static inline bool
2289 vt_canon_true_dep (dataflow_set *set, rtx mloc, rtx maddr, rtx loc)
2291 if (GET_CODE (loc) != MEM)
2292 return false;
2294 rtx addr = vt_canonicalize_addr (set, XEXP (loc, 0));
2295 if (!canon_true_dependence (mloc, GET_MODE (mloc), maddr, loc, addr))
2296 return false;
2298 return true;
2301 /* Hold parameters for the hashtab traversal function
2302 drop_overlapping_mem_locs, see below. */
2304 struct overlapping_mems
2306 dataflow_set *set;
2307 rtx loc, addr;
2310 /* Remove all MEMs that overlap with COMS->LOC from the location list
2311 of a hash table entry for a value. COMS->ADDR must be a
2312 canonicalized form of COMS->LOC's address, and COMS->LOC must be
2313 canonicalized itself. */
2316 drop_overlapping_mem_locs (variable_def **slot, overlapping_mems *coms)
2318 dataflow_set *set = coms->set;
2319 rtx mloc = coms->loc, addr = coms->addr;
2320 variable var = *slot;
2322 if (var->onepart == ONEPART_VALUE)
2324 location_chain loc, *locp;
2325 bool changed = false;
2326 rtx cur_loc;
2328 gcc_assert (var->n_var_parts == 1);
2330 if (shared_var_p (var, set->vars))
2332 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
2333 if (vt_canon_true_dep (set, mloc, addr, loc->loc))
2334 break;
2336 if (!loc)
2337 return 1;
2339 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
2340 var = *slot;
2341 gcc_assert (var->n_var_parts == 1);
2344 if (VAR_LOC_1PAUX (var))
2345 cur_loc = VAR_LOC_FROM (var);
2346 else
2347 cur_loc = var->var_part[0].cur_loc;
2349 for (locp = &var->var_part[0].loc_chain, loc = *locp;
2350 loc; loc = *locp)
2352 if (!vt_canon_true_dep (set, mloc, addr, loc->loc))
2354 locp = &loc->next;
2355 continue;
2358 *locp = loc->next;
2359 /* If we have deleted the location which was last emitted
2360 we have to emit new location so add the variable to set
2361 of changed variables. */
2362 if (cur_loc == loc->loc)
2364 changed = true;
2365 var->var_part[0].cur_loc = NULL;
2366 if (VAR_LOC_1PAUX (var))
2367 VAR_LOC_FROM (var) = NULL;
2369 delete loc;
2372 if (!var->var_part[0].loc_chain)
2374 var->n_var_parts--;
2375 changed = true;
2377 if (changed)
2378 variable_was_changed (var, set);
2381 return 1;
2384 /* Remove from SET all VALUE bindings to MEMs that overlap with LOC. */
2386 static void
2387 clobber_overlapping_mems (dataflow_set *set, rtx loc)
2389 struct overlapping_mems coms;
2391 gcc_checking_assert (GET_CODE (loc) == MEM);
2393 coms.set = set;
2394 coms.loc = canon_rtx (loc);
2395 coms.addr = vt_canonicalize_addr (set, XEXP (loc, 0));
2397 set->traversed_vars = set->vars;
2398 shared_hash_htab (set->vars)
2399 ->traverse <overlapping_mems*, drop_overlapping_mem_locs> (&coms);
2400 set->traversed_vars = NULL;
2403 /* Set the location of DV, OFFSET as the MEM LOC. */
2405 static void
2406 var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
2407 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
2408 enum insert_option iopt)
2410 if (dv_is_decl_p (dv))
2411 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
2413 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
2416 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
2417 SET to LOC.
2418 Adjust the address first if it is stack pointer based. */
2420 static void
2421 var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
2422 rtx set_src)
2424 tree decl = MEM_EXPR (loc);
2425 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2427 var_mem_decl_set (set, loc, initialized,
2428 dv_from_decl (decl), offset, set_src, INSERT);
2431 /* Delete and set the location part of variable MEM_EXPR (LOC) in
2432 dataflow set SET to LOC. If MODIFY is true, any other live copies
2433 of the same variable part are also deleted from the dataflow set,
2434 otherwise the variable part is assumed to be copied from another
2435 location holding the same part.
2436 Adjust the address first if it is stack pointer based. */
2438 static void
2439 var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify,
2440 enum var_init_status initialized, rtx set_src)
2442 tree decl = MEM_EXPR (loc);
2443 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2445 clobber_overlapping_mems (set, loc);
2446 decl = var_debug_decl (decl);
2448 if (initialized == VAR_INIT_STATUS_UNKNOWN)
2449 initialized = get_init_value (set, loc, dv_from_decl (decl));
2451 if (modify)
2452 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src);
2453 var_mem_set (set, loc, initialized, set_src);
2456 /* Delete the location part LOC from dataflow set SET. If CLOBBER is
2457 true, also delete any other live copies of the same variable part.
2458 Adjust the address first if it is stack pointer based. */
2460 static void
2461 var_mem_delete (dataflow_set *set, rtx loc, bool clobber)
2463 tree decl = MEM_EXPR (loc);
2464 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2466 clobber_overlapping_mems (set, loc);
2467 decl = var_debug_decl (decl);
2468 if (clobber)
2469 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
2470 delete_variable_part (set, loc, dv_from_decl (decl), offset);
2473 /* Return true if LOC should not be expanded for location expressions,
2474 or used in them. */
2476 static inline bool
2477 unsuitable_loc (rtx loc)
2479 switch (GET_CODE (loc))
2481 case PC:
2482 case SCRATCH:
2483 case CC0:
2484 case ASM_INPUT:
2485 case ASM_OPERANDS:
2486 return true;
2488 default:
2489 return false;
2493 /* Bind VAL to LOC in SET. If MODIFIED, detach LOC from any values
2494 bound to it. */
2496 static inline void
2497 val_bind (dataflow_set *set, rtx val, rtx loc, bool modified)
2499 if (REG_P (loc))
2501 if (modified)
2502 var_regno_delete (set, REGNO (loc));
2503 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2504 dv_from_value (val), 0, NULL_RTX, INSERT);
2506 else if (MEM_P (loc))
2508 struct elt_loc_list *l = CSELIB_VAL_PTR (val)->locs;
2510 if (modified)
2511 clobber_overlapping_mems (set, loc);
2513 if (l && GET_CODE (l->loc) == VALUE)
2514 l = canonical_cselib_val (CSELIB_VAL_PTR (l->loc))->locs;
2516 /* If this MEM is a global constant, we don't need it in the
2517 dynamic tables. ??? We should test this before emitting the
2518 micro-op in the first place. */
2519 while (l)
2520 if (GET_CODE (l->loc) == MEM && XEXP (l->loc, 0) == XEXP (loc, 0))
2521 break;
2522 else
2523 l = l->next;
2525 if (!l)
2526 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2527 dv_from_value (val), 0, NULL_RTX, INSERT);
2529 else
2531 /* Other kinds of equivalences are necessarily static, at least
2532 so long as we do not perform substitutions while merging
2533 expressions. */
2534 gcc_unreachable ();
2535 set_variable_part (set, loc, dv_from_value (val), 0,
2536 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2540 /* Bind a value to a location it was just stored in. If MODIFIED
2541 holds, assume the location was modified, detaching it from any
2542 values bound to it. */
2544 static void
2545 val_store (dataflow_set *set, rtx val, rtx loc, rtx_insn *insn,
2546 bool modified)
2548 cselib_val *v = CSELIB_VAL_PTR (val);
2550 gcc_assert (cselib_preserved_value_p (v));
2552 if (dump_file)
2554 fprintf (dump_file, "%i: ", insn ? INSN_UID (insn) : 0);
2555 print_inline_rtx (dump_file, loc, 0);
2556 fprintf (dump_file, " evaluates to ");
2557 print_inline_rtx (dump_file, val, 0);
2558 if (v->locs)
2560 struct elt_loc_list *l;
2561 for (l = v->locs; l; l = l->next)
2563 fprintf (dump_file, "\n%i: ", INSN_UID (l->setting_insn));
2564 print_inline_rtx (dump_file, l->loc, 0);
2567 fprintf (dump_file, "\n");
2570 gcc_checking_assert (!unsuitable_loc (loc));
2572 val_bind (set, val, loc, modified);
2575 /* Clear (canonical address) slots that reference X. */
2577 bool
2578 local_get_addr_clear_given_value (rtx const &, rtx *slot, rtx x)
2580 if (vt_get_canonicalize_base (*slot) == x)
2581 *slot = NULL;
2582 return true;
2585 /* Reset this node, detaching all its equivalences. Return the slot
2586 in the variable hash table that holds dv, if there is one. */
2588 static void
2589 val_reset (dataflow_set *set, decl_or_value dv)
2591 variable var = shared_hash_find (set->vars, dv) ;
2592 location_chain node;
2593 rtx cval;
2595 if (!var || !var->n_var_parts)
2596 return;
2598 gcc_assert (var->n_var_parts == 1);
2600 if (var->onepart == ONEPART_VALUE)
2602 rtx x = dv_as_value (dv);
2604 /* Relationships in the global cache don't change, so reset the
2605 local cache entry only. */
2606 rtx *slot = local_get_addr_cache->get (x);
2607 if (slot)
2609 /* If the value resolved back to itself, odds are that other
2610 values may have cached it too. These entries now refer
2611 to the old X, so detach them too. Entries that used the
2612 old X but resolved to something else remain ok as long as
2613 that something else isn't also reset. */
2614 if (*slot == x)
2615 local_get_addr_cache
2616 ->traverse<rtx, local_get_addr_clear_given_value> (x);
2617 *slot = NULL;
2621 cval = NULL;
2622 for (node = var->var_part[0].loc_chain; node; node = node->next)
2623 if (GET_CODE (node->loc) == VALUE
2624 && canon_value_cmp (node->loc, cval))
2625 cval = node->loc;
2627 for (node = var->var_part[0].loc_chain; node; node = node->next)
2628 if (GET_CODE (node->loc) == VALUE && cval != node->loc)
2630 /* Redirect the equivalence link to the new canonical
2631 value, or simply remove it if it would point at
2632 itself. */
2633 if (cval)
2634 set_variable_part (set, cval, dv_from_value (node->loc),
2635 0, node->init, node->set_src, NO_INSERT);
2636 delete_variable_part (set, dv_as_value (dv),
2637 dv_from_value (node->loc), 0);
2640 if (cval)
2642 decl_or_value cdv = dv_from_value (cval);
2644 /* Keep the remaining values connected, accummulating links
2645 in the canonical value. */
2646 for (node = var->var_part[0].loc_chain; node; node = node->next)
2648 if (node->loc == cval)
2649 continue;
2650 else if (GET_CODE (node->loc) == REG)
2651 var_reg_decl_set (set, node->loc, node->init, cdv, 0,
2652 node->set_src, NO_INSERT);
2653 else if (GET_CODE (node->loc) == MEM)
2654 var_mem_decl_set (set, node->loc, node->init, cdv, 0,
2655 node->set_src, NO_INSERT);
2656 else
2657 set_variable_part (set, node->loc, cdv, 0,
2658 node->init, node->set_src, NO_INSERT);
2662 /* We remove this last, to make sure that the canonical value is not
2663 removed to the point of requiring reinsertion. */
2664 if (cval)
2665 delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0);
2667 clobber_variable_part (set, NULL, dv, 0, NULL);
2670 /* Find the values in a given location and map the val to another
2671 value, if it is unique, or add the location as one holding the
2672 value. */
2674 static void
2675 val_resolve (dataflow_set *set, rtx val, rtx loc, rtx_insn *insn)
2677 decl_or_value dv = dv_from_value (val);
2679 if (dump_file && (dump_flags & TDF_DETAILS))
2681 if (insn)
2682 fprintf (dump_file, "%i: ", INSN_UID (insn));
2683 else
2684 fprintf (dump_file, "head: ");
2685 print_inline_rtx (dump_file, val, 0);
2686 fputs (" is at ", dump_file);
2687 print_inline_rtx (dump_file, loc, 0);
2688 fputc ('\n', dump_file);
2691 val_reset (set, dv);
2693 gcc_checking_assert (!unsuitable_loc (loc));
2695 if (REG_P (loc))
2697 attrs node, found = NULL;
2699 for (node = set->regs[REGNO (loc)]; node; node = node->next)
2700 if (dv_is_value_p (node->dv)
2701 && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc))
2703 found = node;
2705 /* Map incoming equivalences. ??? Wouldn't it be nice if
2706 we just started sharing the location lists? Maybe a
2707 circular list ending at the value itself or some
2708 such. */
2709 set_variable_part (set, dv_as_value (node->dv),
2710 dv_from_value (val), node->offset,
2711 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2712 set_variable_part (set, val, node->dv, node->offset,
2713 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2716 /* If we didn't find any equivalence, we need to remember that
2717 this value is held in the named register. */
2718 if (found)
2719 return;
2721 /* ??? Attempt to find and merge equivalent MEMs or other
2722 expressions too. */
2724 val_bind (set, val, loc, false);
2727 /* Initialize dataflow set SET to be empty.
2728 VARS_SIZE is the initial size of hash table VARS. */
2730 static void
2731 dataflow_set_init (dataflow_set *set)
2733 init_attrs_list_set (set->regs);
2734 set->vars = shared_hash_copy (empty_shared_hash);
2735 set->stack_adjust = 0;
2736 set->traversed_vars = NULL;
2739 /* Delete the contents of dataflow set SET. */
2741 static void
2742 dataflow_set_clear (dataflow_set *set)
2744 int i;
2746 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2747 attrs_list_clear (&set->regs[i]);
2749 shared_hash_destroy (set->vars);
2750 set->vars = shared_hash_copy (empty_shared_hash);
2753 /* Copy the contents of dataflow set SRC to DST. */
2755 static void
2756 dataflow_set_copy (dataflow_set *dst, dataflow_set *src)
2758 int i;
2760 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2761 attrs_list_copy (&dst->regs[i], src->regs[i]);
2763 shared_hash_destroy (dst->vars);
2764 dst->vars = shared_hash_copy (src->vars);
2765 dst->stack_adjust = src->stack_adjust;
2768 /* Information for merging lists of locations for a given offset of variable.
2770 struct variable_union_info
2772 /* Node of the location chain. */
2773 location_chain lc;
2775 /* The sum of positions in the input chains. */
2776 int pos;
2778 /* The position in the chain of DST dataflow set. */
2779 int pos_dst;
2782 /* Buffer for location list sorting and its allocated size. */
2783 static struct variable_union_info *vui_vec;
2784 static int vui_allocated;
2786 /* Compare function for qsort, order the structures by POS element. */
2788 static int
2789 variable_union_info_cmp_pos (const void *n1, const void *n2)
2791 const struct variable_union_info *const i1 =
2792 (const struct variable_union_info *) n1;
2793 const struct variable_union_info *const i2 =
2794 ( const struct variable_union_info *) n2;
2796 if (i1->pos != i2->pos)
2797 return i1->pos - i2->pos;
2799 return (i1->pos_dst - i2->pos_dst);
2802 /* Compute union of location parts of variable *SLOT and the same variable
2803 from hash table DATA. Compute "sorted" union of the location chains
2804 for common offsets, i.e. the locations of a variable part are sorted by
2805 a priority where the priority is the sum of the positions in the 2 chains
2806 (if a location is only in one list the position in the second list is
2807 defined to be larger than the length of the chains).
2808 When we are updating the location parts the newest location is in the
2809 beginning of the chain, so when we do the described "sorted" union
2810 we keep the newest locations in the beginning. */
2812 static int
2813 variable_union (variable src, dataflow_set *set)
2815 variable dst;
2816 variable_def **dstp;
2817 int i, j, k;
2819 dstp = shared_hash_find_slot (set->vars, src->dv);
2820 if (!dstp || !*dstp)
2822 src->refcount++;
2824 dst_can_be_shared = false;
2825 if (!dstp)
2826 dstp = shared_hash_find_slot_unshare (&set->vars, src->dv, INSERT);
2828 *dstp = src;
2830 /* Continue traversing the hash table. */
2831 return 1;
2833 else
2834 dst = *dstp;
2836 gcc_assert (src->n_var_parts);
2837 gcc_checking_assert (src->onepart == dst->onepart);
2839 /* We can combine one-part variables very efficiently, because their
2840 entries are in canonical order. */
2841 if (src->onepart)
2843 location_chain *nodep, dnode, snode;
2845 gcc_assert (src->n_var_parts == 1
2846 && dst->n_var_parts == 1);
2848 snode = src->var_part[0].loc_chain;
2849 gcc_assert (snode);
2851 restart_onepart_unshared:
2852 nodep = &dst->var_part[0].loc_chain;
2853 dnode = *nodep;
2854 gcc_assert (dnode);
2856 while (snode)
2858 int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1;
2860 if (r > 0)
2862 location_chain nnode;
2864 if (shared_var_p (dst, set->vars))
2866 dstp = unshare_variable (set, dstp, dst,
2867 VAR_INIT_STATUS_INITIALIZED);
2868 dst = *dstp;
2869 goto restart_onepart_unshared;
2872 *nodep = nnode = new location_chain_def;
2873 nnode->loc = snode->loc;
2874 nnode->init = snode->init;
2875 if (!snode->set_src || MEM_P (snode->set_src))
2876 nnode->set_src = NULL;
2877 else
2878 nnode->set_src = snode->set_src;
2879 nnode->next = dnode;
2880 dnode = nnode;
2882 else if (r == 0)
2883 gcc_checking_assert (rtx_equal_p (dnode->loc, snode->loc));
2885 if (r >= 0)
2886 snode = snode->next;
2888 nodep = &dnode->next;
2889 dnode = *nodep;
2892 return 1;
2895 gcc_checking_assert (!src->onepart);
2897 /* Count the number of location parts, result is K. */
2898 for (i = 0, j = 0, k = 0;
2899 i < src->n_var_parts && j < dst->n_var_parts; k++)
2901 if (VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2903 i++;
2904 j++;
2906 else if (VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
2907 i++;
2908 else
2909 j++;
2911 k += src->n_var_parts - i;
2912 k += dst->n_var_parts - j;
2914 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
2915 thus there are at most MAX_VAR_PARTS different offsets. */
2916 gcc_checking_assert (dst->onepart ? k == 1 : k <= MAX_VAR_PARTS);
2918 if (dst->n_var_parts != k && shared_var_p (dst, set->vars))
2920 dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN);
2921 dst = *dstp;
2924 i = src->n_var_parts - 1;
2925 j = dst->n_var_parts - 1;
2926 dst->n_var_parts = k;
2928 for (k--; k >= 0; k--)
2930 location_chain node, node2;
2932 if (i >= 0 && j >= 0
2933 && VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2935 /* Compute the "sorted" union of the chains, i.e. the locations which
2936 are in both chains go first, they are sorted by the sum of
2937 positions in the chains. */
2938 int dst_l, src_l;
2939 int ii, jj, n;
2940 struct variable_union_info *vui;
2942 /* If DST is shared compare the location chains.
2943 If they are different we will modify the chain in DST with
2944 high probability so make a copy of DST. */
2945 if (shared_var_p (dst, set->vars))
2947 for (node = src->var_part[i].loc_chain,
2948 node2 = dst->var_part[j].loc_chain; node && node2;
2949 node = node->next, node2 = node2->next)
2951 if (!((REG_P (node2->loc)
2952 && REG_P (node->loc)
2953 && REGNO (node2->loc) == REGNO (node->loc))
2954 || rtx_equal_p (node2->loc, node->loc)))
2956 if (node2->init < node->init)
2957 node2->init = node->init;
2958 break;
2961 if (node || node2)
2963 dstp = unshare_variable (set, dstp, dst,
2964 VAR_INIT_STATUS_UNKNOWN);
2965 dst = (variable)*dstp;
2969 src_l = 0;
2970 for (node = src->var_part[i].loc_chain; node; node = node->next)
2971 src_l++;
2972 dst_l = 0;
2973 for (node = dst->var_part[j].loc_chain; node; node = node->next)
2974 dst_l++;
2976 if (dst_l == 1)
2978 /* The most common case, much simpler, no qsort is needed. */
2979 location_chain dstnode = dst->var_part[j].loc_chain;
2980 dst->var_part[k].loc_chain = dstnode;
2981 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
2982 node2 = dstnode;
2983 for (node = src->var_part[i].loc_chain; node; node = node->next)
2984 if (!((REG_P (dstnode->loc)
2985 && REG_P (node->loc)
2986 && REGNO (dstnode->loc) == REGNO (node->loc))
2987 || rtx_equal_p (dstnode->loc, node->loc)))
2989 location_chain new_node;
2991 /* Copy the location from SRC. */
2992 new_node = new location_chain_def;
2993 new_node->loc = node->loc;
2994 new_node->init = node->init;
2995 if (!node->set_src || MEM_P (node->set_src))
2996 new_node->set_src = NULL;
2997 else
2998 new_node->set_src = node->set_src;
2999 node2->next = new_node;
3000 node2 = new_node;
3002 node2->next = NULL;
3004 else
3006 if (src_l + dst_l > vui_allocated)
3008 vui_allocated = MAX (vui_allocated * 2, src_l + dst_l);
3009 vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec,
3010 vui_allocated);
3012 vui = vui_vec;
3014 /* Fill in the locations from DST. */
3015 for (node = dst->var_part[j].loc_chain, jj = 0; node;
3016 node = node->next, jj++)
3018 vui[jj].lc = node;
3019 vui[jj].pos_dst = jj;
3021 /* Pos plus value larger than a sum of 2 valid positions. */
3022 vui[jj].pos = jj + src_l + dst_l;
3025 /* Fill in the locations from SRC. */
3026 n = dst_l;
3027 for (node = src->var_part[i].loc_chain, ii = 0; node;
3028 node = node->next, ii++)
3030 /* Find location from NODE. */
3031 for (jj = 0; jj < dst_l; jj++)
3033 if ((REG_P (vui[jj].lc->loc)
3034 && REG_P (node->loc)
3035 && REGNO (vui[jj].lc->loc) == REGNO (node->loc))
3036 || rtx_equal_p (vui[jj].lc->loc, node->loc))
3038 vui[jj].pos = jj + ii;
3039 break;
3042 if (jj >= dst_l) /* The location has not been found. */
3044 location_chain new_node;
3046 /* Copy the location from SRC. */
3047 new_node = new location_chain_def;
3048 new_node->loc = node->loc;
3049 new_node->init = node->init;
3050 if (!node->set_src || MEM_P (node->set_src))
3051 new_node->set_src = NULL;
3052 else
3053 new_node->set_src = node->set_src;
3054 vui[n].lc = new_node;
3055 vui[n].pos_dst = src_l + dst_l;
3056 vui[n].pos = ii + src_l + dst_l;
3057 n++;
3061 if (dst_l == 2)
3063 /* Special case still very common case. For dst_l == 2
3064 all entries dst_l ... n-1 are sorted, with for i >= dst_l
3065 vui[i].pos == i + src_l + dst_l. */
3066 if (vui[0].pos > vui[1].pos)
3068 /* Order should be 1, 0, 2... */
3069 dst->var_part[k].loc_chain = vui[1].lc;
3070 vui[1].lc->next = vui[0].lc;
3071 if (n >= 3)
3073 vui[0].lc->next = vui[2].lc;
3074 vui[n - 1].lc->next = NULL;
3076 else
3077 vui[0].lc->next = NULL;
3078 ii = 3;
3080 else
3082 dst->var_part[k].loc_chain = vui[0].lc;
3083 if (n >= 3 && vui[2].pos < vui[1].pos)
3085 /* Order should be 0, 2, 1, 3... */
3086 vui[0].lc->next = vui[2].lc;
3087 vui[2].lc->next = vui[1].lc;
3088 if (n >= 4)
3090 vui[1].lc->next = vui[3].lc;
3091 vui[n - 1].lc->next = NULL;
3093 else
3094 vui[1].lc->next = NULL;
3095 ii = 4;
3097 else
3099 /* Order should be 0, 1, 2... */
3100 ii = 1;
3101 vui[n - 1].lc->next = NULL;
3104 for (; ii < n; ii++)
3105 vui[ii - 1].lc->next = vui[ii].lc;
3107 else
3109 qsort (vui, n, sizeof (struct variable_union_info),
3110 variable_union_info_cmp_pos);
3112 /* Reconnect the nodes in sorted order. */
3113 for (ii = 1; ii < n; ii++)
3114 vui[ii - 1].lc->next = vui[ii].lc;
3115 vui[n - 1].lc->next = NULL;
3116 dst->var_part[k].loc_chain = vui[0].lc;
3119 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
3121 i--;
3122 j--;
3124 else if ((i >= 0 && j >= 0
3125 && VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
3126 || i < 0)
3128 dst->var_part[k] = dst->var_part[j];
3129 j--;
3131 else if ((i >= 0 && j >= 0
3132 && VAR_PART_OFFSET (src, i) > VAR_PART_OFFSET (dst, j))
3133 || j < 0)
3135 location_chain *nextp;
3137 /* Copy the chain from SRC. */
3138 nextp = &dst->var_part[k].loc_chain;
3139 for (node = src->var_part[i].loc_chain; node; node = node->next)
3141 location_chain new_lc;
3143 new_lc = new location_chain_def;
3144 new_lc->next = NULL;
3145 new_lc->init = node->init;
3146 if (!node->set_src || MEM_P (node->set_src))
3147 new_lc->set_src = NULL;
3148 else
3149 new_lc->set_src = node->set_src;
3150 new_lc->loc = node->loc;
3152 *nextp = new_lc;
3153 nextp = &new_lc->next;
3156 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (src, i);
3157 i--;
3159 dst->var_part[k].cur_loc = NULL;
3162 if (flag_var_tracking_uninit)
3163 for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++)
3165 location_chain node, node2;
3166 for (node = src->var_part[i].loc_chain; node; node = node->next)
3167 for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next)
3168 if (rtx_equal_p (node->loc, node2->loc))
3170 if (node->init > node2->init)
3171 node2->init = node->init;
3175 /* Continue traversing the hash table. */
3176 return 1;
3179 /* Compute union of dataflow sets SRC and DST and store it to DST. */
3181 static void
3182 dataflow_set_union (dataflow_set *dst, dataflow_set *src)
3184 int i;
3186 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3187 attrs_list_union (&dst->regs[i], src->regs[i]);
3189 if (dst->vars == empty_shared_hash)
3191 shared_hash_destroy (dst->vars);
3192 dst->vars = shared_hash_copy (src->vars);
3194 else
3196 variable_iterator_type hi;
3197 variable var;
3199 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (src->vars),
3200 var, variable, hi)
3201 variable_union (var, dst);
3205 /* Whether the value is currently being expanded. */
3206 #define VALUE_RECURSED_INTO(x) \
3207 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
3209 /* Whether no expansion was found, saving useless lookups.
3210 It must only be set when VALUE_CHANGED is clear. */
3211 #define NO_LOC_P(x) \
3212 (RTL_FLAG_CHECK2 ("NO_LOC_P", (x), VALUE, DEBUG_EXPR)->return_val)
3214 /* Whether cur_loc in the value needs to be (re)computed. */
3215 #define VALUE_CHANGED(x) \
3216 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
3217 /* Whether cur_loc in the decl needs to be (re)computed. */
3218 #define DECL_CHANGED(x) TREE_VISITED (x)
3220 /* Record (if NEWV) that DV needs to have its cur_loc recomputed. For
3221 user DECLs, this means they're in changed_variables. Values and
3222 debug exprs may be left with this flag set if no user variable
3223 requires them to be evaluated. */
3225 static inline void
3226 set_dv_changed (decl_or_value dv, bool newv)
3228 switch (dv_onepart_p (dv))
3230 case ONEPART_VALUE:
3231 if (newv)
3232 NO_LOC_P (dv_as_value (dv)) = false;
3233 VALUE_CHANGED (dv_as_value (dv)) = newv;
3234 break;
3236 case ONEPART_DEXPR:
3237 if (newv)
3238 NO_LOC_P (DECL_RTL_KNOWN_SET (dv_as_decl (dv))) = false;
3239 /* Fall through... */
3241 default:
3242 DECL_CHANGED (dv_as_decl (dv)) = newv;
3243 break;
3247 /* Return true if DV needs to have its cur_loc recomputed. */
3249 static inline bool
3250 dv_changed_p (decl_or_value dv)
3252 return (dv_is_value_p (dv)
3253 ? VALUE_CHANGED (dv_as_value (dv))
3254 : DECL_CHANGED (dv_as_decl (dv)));
3257 /* Return a location list node whose loc is rtx_equal to LOC, in the
3258 location list of a one-part variable or value VAR, or in that of
3259 any values recursively mentioned in the location lists. VARS must
3260 be in star-canonical form. */
3262 static location_chain
3263 find_loc_in_1pdv (rtx loc, variable var, variable_table_type *vars)
3265 location_chain node;
3266 enum rtx_code loc_code;
3268 if (!var)
3269 return NULL;
3271 gcc_checking_assert (var->onepart);
3273 if (!var->n_var_parts)
3274 return NULL;
3276 gcc_checking_assert (loc != dv_as_opaque (var->dv));
3278 loc_code = GET_CODE (loc);
3279 for (node = var->var_part[0].loc_chain; node; node = node->next)
3281 decl_or_value dv;
3282 variable rvar;
3284 if (GET_CODE (node->loc) != loc_code)
3286 if (GET_CODE (node->loc) != VALUE)
3287 continue;
3289 else if (loc == node->loc)
3290 return node;
3291 else if (loc_code != VALUE)
3293 if (rtx_equal_p (loc, node->loc))
3294 return node;
3295 continue;
3298 /* Since we're in star-canonical form, we don't need to visit
3299 non-canonical nodes: one-part variables and non-canonical
3300 values would only point back to the canonical node. */
3301 if (dv_is_value_p (var->dv)
3302 && !canon_value_cmp (node->loc, dv_as_value (var->dv)))
3304 /* Skip all subsequent VALUEs. */
3305 while (node->next && GET_CODE (node->next->loc) == VALUE)
3307 node = node->next;
3308 gcc_checking_assert (!canon_value_cmp (node->loc,
3309 dv_as_value (var->dv)));
3310 if (loc == node->loc)
3311 return node;
3313 continue;
3316 gcc_checking_assert (node == var->var_part[0].loc_chain);
3317 gcc_checking_assert (!node->next);
3319 dv = dv_from_value (node->loc);
3320 rvar = vars->find_with_hash (dv, dv_htab_hash (dv));
3321 return find_loc_in_1pdv (loc, rvar, vars);
3324 /* ??? Gotta look in cselib_val locations too. */
3326 return NULL;
3329 /* Hash table iteration argument passed to variable_merge. */
3330 struct dfset_merge
3332 /* The set in which the merge is to be inserted. */
3333 dataflow_set *dst;
3334 /* The set that we're iterating in. */
3335 dataflow_set *cur;
3336 /* The set that may contain the other dv we are to merge with. */
3337 dataflow_set *src;
3338 /* Number of onepart dvs in src. */
3339 int src_onepart_cnt;
3342 /* Insert LOC in *DNODE, if it's not there yet. The list must be in
3343 loc_cmp order, and it is maintained as such. */
3345 static void
3346 insert_into_intersection (location_chain *nodep, rtx loc,
3347 enum var_init_status status)
3349 location_chain node;
3350 int r;
3352 for (node = *nodep; node; nodep = &node->next, node = *nodep)
3353 if ((r = loc_cmp (node->loc, loc)) == 0)
3355 node->init = MIN (node->init, status);
3356 return;
3358 else if (r > 0)
3359 break;
3361 node = new location_chain_def;
3363 node->loc = loc;
3364 node->set_src = NULL;
3365 node->init = status;
3366 node->next = *nodep;
3367 *nodep = node;
3370 /* Insert in DEST the intersection of the locations present in both
3371 S1NODE and S2VAR, directly or indirectly. S1NODE is from a
3372 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
3373 DSM->dst. */
3375 static void
3376 intersect_loc_chains (rtx val, location_chain *dest, struct dfset_merge *dsm,
3377 location_chain s1node, variable s2var)
3379 dataflow_set *s1set = dsm->cur;
3380 dataflow_set *s2set = dsm->src;
3381 location_chain found;
3383 if (s2var)
3385 location_chain s2node;
3387 gcc_checking_assert (s2var->onepart);
3389 if (s2var->n_var_parts)
3391 s2node = s2var->var_part[0].loc_chain;
3393 for (; s1node && s2node;
3394 s1node = s1node->next, s2node = s2node->next)
3395 if (s1node->loc != s2node->loc)
3396 break;
3397 else if (s1node->loc == val)
3398 continue;
3399 else
3400 insert_into_intersection (dest, s1node->loc,
3401 MIN (s1node->init, s2node->init));
3405 for (; s1node; s1node = s1node->next)
3407 if (s1node->loc == val)
3408 continue;
3410 if ((found = find_loc_in_1pdv (s1node->loc, s2var,
3411 shared_hash_htab (s2set->vars))))
3413 insert_into_intersection (dest, s1node->loc,
3414 MIN (s1node->init, found->init));
3415 continue;
3418 if (GET_CODE (s1node->loc) == VALUE
3419 && !VALUE_RECURSED_INTO (s1node->loc))
3421 decl_or_value dv = dv_from_value (s1node->loc);
3422 variable svar = shared_hash_find (s1set->vars, dv);
3423 if (svar)
3425 if (svar->n_var_parts == 1)
3427 VALUE_RECURSED_INTO (s1node->loc) = true;
3428 intersect_loc_chains (val, dest, dsm,
3429 svar->var_part[0].loc_chain,
3430 s2var);
3431 VALUE_RECURSED_INTO (s1node->loc) = false;
3436 /* ??? gotta look in cselib_val locations too. */
3438 /* ??? if the location is equivalent to any location in src,
3439 searched recursively
3441 add to dst the values needed to represent the equivalence
3443 telling whether locations S is equivalent to another dv's
3444 location list:
3446 for each location D in the list
3448 if S and D satisfy rtx_equal_p, then it is present
3450 else if D is a value, recurse without cycles
3452 else if S and D have the same CODE and MODE
3454 for each operand oS and the corresponding oD
3456 if oS and oD are not equivalent, then S an D are not equivalent
3458 else if they are RTX vectors
3460 if any vector oS element is not equivalent to its respective oD,
3461 then S and D are not equivalent
3469 /* Return -1 if X should be before Y in a location list for a 1-part
3470 variable, 1 if Y should be before X, and 0 if they're equivalent
3471 and should not appear in the list. */
3473 static int
3474 loc_cmp (rtx x, rtx y)
3476 int i, j, r;
3477 RTX_CODE code = GET_CODE (x);
3478 const char *fmt;
3480 if (x == y)
3481 return 0;
3483 if (REG_P (x))
3485 if (!REG_P (y))
3486 return -1;
3487 gcc_assert (GET_MODE (x) == GET_MODE (y));
3488 if (REGNO (x) == REGNO (y))
3489 return 0;
3490 else if (REGNO (x) < REGNO (y))
3491 return -1;
3492 else
3493 return 1;
3496 if (REG_P (y))
3497 return 1;
3499 if (MEM_P (x))
3501 if (!MEM_P (y))
3502 return -1;
3503 gcc_assert (GET_MODE (x) == GET_MODE (y));
3504 return loc_cmp (XEXP (x, 0), XEXP (y, 0));
3507 if (MEM_P (y))
3508 return 1;
3510 if (GET_CODE (x) == VALUE)
3512 if (GET_CODE (y) != VALUE)
3513 return -1;
3514 /* Don't assert the modes are the same, that is true only
3515 when not recursing. (subreg:QI (value:SI 1:1) 0)
3516 and (subreg:QI (value:DI 2:2) 0) can be compared,
3517 even when the modes are different. */
3518 if (canon_value_cmp (x, y))
3519 return -1;
3520 else
3521 return 1;
3524 if (GET_CODE (y) == VALUE)
3525 return 1;
3527 /* Entry value is the least preferable kind of expression. */
3528 if (GET_CODE (x) == ENTRY_VALUE)
3530 if (GET_CODE (y) != ENTRY_VALUE)
3531 return 1;
3532 gcc_assert (GET_MODE (x) == GET_MODE (y));
3533 return loc_cmp (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
3536 if (GET_CODE (y) == ENTRY_VALUE)
3537 return -1;
3539 if (GET_CODE (x) == GET_CODE (y))
3540 /* Compare operands below. */;
3541 else if (GET_CODE (x) < GET_CODE (y))
3542 return -1;
3543 else
3544 return 1;
3546 gcc_assert (GET_MODE (x) == GET_MODE (y));
3548 if (GET_CODE (x) == DEBUG_EXPR)
3550 if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3551 < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)))
3552 return -1;
3553 gcc_checking_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3554 > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)));
3555 return 1;
3558 fmt = GET_RTX_FORMAT (code);
3559 for (i = 0; i < GET_RTX_LENGTH (code); i++)
3560 switch (fmt[i])
3562 case 'w':
3563 if (XWINT (x, i) == XWINT (y, i))
3564 break;
3565 else if (XWINT (x, i) < XWINT (y, i))
3566 return -1;
3567 else
3568 return 1;
3570 case 'n':
3571 case 'i':
3572 if (XINT (x, i) == XINT (y, i))
3573 break;
3574 else if (XINT (x, i) < XINT (y, i))
3575 return -1;
3576 else
3577 return 1;
3579 case 'V':
3580 case 'E':
3581 /* Compare the vector length first. */
3582 if (XVECLEN (x, i) == XVECLEN (y, i))
3583 /* Compare the vectors elements. */;
3584 else if (XVECLEN (x, i) < XVECLEN (y, i))
3585 return -1;
3586 else
3587 return 1;
3589 for (j = 0; j < XVECLEN (x, i); j++)
3590 if ((r = loc_cmp (XVECEXP (x, i, j),
3591 XVECEXP (y, i, j))))
3592 return r;
3593 break;
3595 case 'e':
3596 if ((r = loc_cmp (XEXP (x, i), XEXP (y, i))))
3597 return r;
3598 break;
3600 case 'S':
3601 case 's':
3602 if (XSTR (x, i) == XSTR (y, i))
3603 break;
3604 if (!XSTR (x, i))
3605 return -1;
3606 if (!XSTR (y, i))
3607 return 1;
3608 if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0)
3609 break;
3610 else if (r < 0)
3611 return -1;
3612 else
3613 return 1;
3615 case 'u':
3616 /* These are just backpointers, so they don't matter. */
3617 break;
3619 case '0':
3620 case 't':
3621 break;
3623 /* It is believed that rtx's at this level will never
3624 contain anything but integers and other rtx's,
3625 except for within LABEL_REFs and SYMBOL_REFs. */
3626 default:
3627 gcc_unreachable ();
3629 if (CONST_WIDE_INT_P (x))
3631 /* Compare the vector length first. */
3632 if (CONST_WIDE_INT_NUNITS (x) >= CONST_WIDE_INT_NUNITS (y))
3633 return 1;
3634 else if (CONST_WIDE_INT_NUNITS (x) < CONST_WIDE_INT_NUNITS (y))
3635 return -1;
3637 /* Compare the vectors elements. */;
3638 for (j = CONST_WIDE_INT_NUNITS (x) - 1; j >= 0 ; j--)
3640 if (CONST_WIDE_INT_ELT (x, j) < CONST_WIDE_INT_ELT (y, j))
3641 return -1;
3642 if (CONST_WIDE_INT_ELT (x, j) > CONST_WIDE_INT_ELT (y, j))
3643 return 1;
3647 return 0;
3650 #if ENABLE_CHECKING
3651 /* Check the order of entries in one-part variables. */
3654 canonicalize_loc_order_check (variable_def **slot,
3655 dataflow_set *data ATTRIBUTE_UNUSED)
3657 variable var = *slot;
3658 location_chain node, next;
3660 #ifdef ENABLE_RTL_CHECKING
3661 int i;
3662 for (i = 0; i < var->n_var_parts; i++)
3663 gcc_assert (var->var_part[0].cur_loc == NULL);
3664 gcc_assert (!var->in_changed_variables);
3665 #endif
3667 if (!var->onepart)
3668 return 1;
3670 gcc_assert (var->n_var_parts == 1);
3671 node = var->var_part[0].loc_chain;
3672 gcc_assert (node);
3674 while ((next = node->next))
3676 gcc_assert (loc_cmp (node->loc, next->loc) < 0);
3677 node = next;
3680 return 1;
3682 #endif
3684 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
3685 more likely to be chosen as canonical for an equivalence set.
3686 Ensure less likely values can reach more likely neighbors, making
3687 the connections bidirectional. */
3690 canonicalize_values_mark (variable_def **slot, dataflow_set *set)
3692 variable var = *slot;
3693 decl_or_value dv = var->dv;
3694 rtx val;
3695 location_chain node;
3697 if (!dv_is_value_p (dv))
3698 return 1;
3700 gcc_checking_assert (var->n_var_parts == 1);
3702 val = dv_as_value (dv);
3704 for (node = var->var_part[0].loc_chain; node; node = node->next)
3705 if (GET_CODE (node->loc) == VALUE)
3707 if (canon_value_cmp (node->loc, val))
3708 VALUE_RECURSED_INTO (val) = true;
3709 else
3711 decl_or_value odv = dv_from_value (node->loc);
3712 variable_def **oslot;
3713 oslot = shared_hash_find_slot_noinsert (set->vars, odv);
3715 set_slot_part (set, val, oslot, odv, 0,
3716 node->init, NULL_RTX);
3718 VALUE_RECURSED_INTO (node->loc) = true;
3722 return 1;
3725 /* Remove redundant entries from equivalence lists in onepart
3726 variables, canonicalizing equivalence sets into star shapes. */
3729 canonicalize_values_star (variable_def **slot, dataflow_set *set)
3731 variable var = *slot;
3732 decl_or_value dv = var->dv;
3733 location_chain node;
3734 decl_or_value cdv;
3735 rtx val, cval;
3736 variable_def **cslot;
3737 bool has_value;
3738 bool has_marks;
3740 if (!var->onepart)
3741 return 1;
3743 gcc_checking_assert (var->n_var_parts == 1);
3745 if (dv_is_value_p (dv))
3747 cval = dv_as_value (dv);
3748 if (!VALUE_RECURSED_INTO (cval))
3749 return 1;
3750 VALUE_RECURSED_INTO (cval) = false;
3752 else
3753 cval = NULL_RTX;
3755 restart:
3756 val = cval;
3757 has_value = false;
3758 has_marks = false;
3760 gcc_assert (var->n_var_parts == 1);
3762 for (node = var->var_part[0].loc_chain; node; node = node->next)
3763 if (GET_CODE (node->loc) == VALUE)
3765 has_value = true;
3766 if (VALUE_RECURSED_INTO (node->loc))
3767 has_marks = true;
3768 if (canon_value_cmp (node->loc, cval))
3769 cval = node->loc;
3772 if (!has_value)
3773 return 1;
3775 if (cval == val)
3777 if (!has_marks || dv_is_decl_p (dv))
3778 return 1;
3780 /* Keep it marked so that we revisit it, either after visiting a
3781 child node, or after visiting a new parent that might be
3782 found out. */
3783 VALUE_RECURSED_INTO (val) = true;
3785 for (node = var->var_part[0].loc_chain; node; node = node->next)
3786 if (GET_CODE (node->loc) == VALUE
3787 && VALUE_RECURSED_INTO (node->loc))
3789 cval = node->loc;
3790 restart_with_cval:
3791 VALUE_RECURSED_INTO (cval) = false;
3792 dv = dv_from_value (cval);
3793 slot = shared_hash_find_slot_noinsert (set->vars, dv);
3794 if (!slot)
3796 gcc_assert (dv_is_decl_p (var->dv));
3797 /* The canonical value was reset and dropped.
3798 Remove it. */
3799 clobber_variable_part (set, NULL, var->dv, 0, NULL);
3800 return 1;
3802 var = *slot;
3803 gcc_assert (dv_is_value_p (var->dv));
3804 if (var->n_var_parts == 0)
3805 return 1;
3806 gcc_assert (var->n_var_parts == 1);
3807 goto restart;
3810 VALUE_RECURSED_INTO (val) = false;
3812 return 1;
3815 /* Push values to the canonical one. */
3816 cdv = dv_from_value (cval);
3817 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3819 for (node = var->var_part[0].loc_chain; node; node = node->next)
3820 if (node->loc != cval)
3822 cslot = set_slot_part (set, node->loc, cslot, cdv, 0,
3823 node->init, NULL_RTX);
3824 if (GET_CODE (node->loc) == VALUE)
3826 decl_or_value ndv = dv_from_value (node->loc);
3828 set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX,
3829 NO_INSERT);
3831 if (canon_value_cmp (node->loc, val))
3833 /* If it could have been a local minimum, it's not any more,
3834 since it's now neighbor to cval, so it may have to push
3835 to it. Conversely, if it wouldn't have prevailed over
3836 val, then whatever mark it has is fine: if it was to
3837 push, it will now push to a more canonical node, but if
3838 it wasn't, then it has already pushed any values it might
3839 have to. */
3840 VALUE_RECURSED_INTO (node->loc) = true;
3841 /* Make sure we visit node->loc by ensuring we cval is
3842 visited too. */
3843 VALUE_RECURSED_INTO (cval) = true;
3845 else if (!VALUE_RECURSED_INTO (node->loc))
3846 /* If we have no need to "recurse" into this node, it's
3847 already "canonicalized", so drop the link to the old
3848 parent. */
3849 clobber_variable_part (set, cval, ndv, 0, NULL);
3851 else if (GET_CODE (node->loc) == REG)
3853 attrs list = set->regs[REGNO (node->loc)], *listp;
3855 /* Change an existing attribute referring to dv so that it
3856 refers to cdv, removing any duplicate this might
3857 introduce, and checking that no previous duplicates
3858 existed, all in a single pass. */
3860 while (list)
3862 if (list->offset == 0
3863 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3864 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3865 break;
3867 list = list->next;
3870 gcc_assert (list);
3871 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3873 list->dv = cdv;
3874 for (listp = &list->next; (list = *listp); listp = &list->next)
3876 if (list->offset)
3877 continue;
3879 if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3881 *listp = list->next;
3882 delete list;
3883 list = *listp;
3884 break;
3887 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv));
3890 else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3892 for (listp = &list->next; (list = *listp); listp = &list->next)
3894 if (list->offset)
3895 continue;
3897 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3899 *listp = list->next;
3900 delete list;
3901 list = *listp;
3902 break;
3905 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv));
3908 else
3909 gcc_unreachable ();
3911 #if ENABLE_CHECKING
3912 while (list)
3914 if (list->offset == 0
3915 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3916 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3917 gcc_unreachable ();
3919 list = list->next;
3921 #endif
3925 if (val)
3926 set_slot_part (set, val, cslot, cdv, 0,
3927 VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
3929 slot = clobber_slot_part (set, cval, slot, 0, NULL);
3931 /* Variable may have been unshared. */
3932 var = *slot;
3933 gcc_checking_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval
3934 && var->var_part[0].loc_chain->next == NULL);
3936 if (VALUE_RECURSED_INTO (cval))
3937 goto restart_with_cval;
3939 return 1;
3942 /* Bind one-part variables to the canonical value in an equivalence
3943 set. Not doing this causes dataflow convergence failure in rare
3944 circumstances, see PR42873. Unfortunately we can't do this
3945 efficiently as part of canonicalize_values_star, since we may not
3946 have determined or even seen the canonical value of a set when we
3947 get to a variable that references another member of the set. */
3950 canonicalize_vars_star (variable_def **slot, dataflow_set *set)
3952 variable var = *slot;
3953 decl_or_value dv = var->dv;
3954 location_chain node;
3955 rtx cval;
3956 decl_or_value cdv;
3957 variable_def **cslot;
3958 variable cvar;
3959 location_chain cnode;
3961 if (!var->onepart || var->onepart == ONEPART_VALUE)
3962 return 1;
3964 gcc_assert (var->n_var_parts == 1);
3966 node = var->var_part[0].loc_chain;
3968 if (GET_CODE (node->loc) != VALUE)
3969 return 1;
3971 gcc_assert (!node->next);
3972 cval = node->loc;
3974 /* Push values to the canonical one. */
3975 cdv = dv_from_value (cval);
3976 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3977 if (!cslot)
3978 return 1;
3979 cvar = *cslot;
3980 gcc_assert (cvar->n_var_parts == 1);
3982 cnode = cvar->var_part[0].loc_chain;
3984 /* CVAL is canonical if its value list contains non-VALUEs or VALUEs
3985 that are not “more canonical” than it. */
3986 if (GET_CODE (cnode->loc) != VALUE
3987 || !canon_value_cmp (cnode->loc, cval))
3988 return 1;
3990 /* CVAL was found to be non-canonical. Change the variable to point
3991 to the canonical VALUE. */
3992 gcc_assert (!cnode->next);
3993 cval = cnode->loc;
3995 slot = set_slot_part (set, cval, slot, dv, 0,
3996 node->init, node->set_src);
3997 clobber_slot_part (set, cval, slot, 0, node->set_src);
3999 return 1;
4002 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
4003 corresponding entry in DSM->src. Multi-part variables are combined
4004 with variable_union, whereas onepart dvs are combined with
4005 intersection. */
4007 static int
4008 variable_merge_over_cur (variable s1var, struct dfset_merge *dsm)
4010 dataflow_set *dst = dsm->dst;
4011 variable_def **dstslot;
4012 variable s2var, dvar = NULL;
4013 decl_or_value dv = s1var->dv;
4014 onepart_enum_t onepart = s1var->onepart;
4015 rtx val;
4016 hashval_t dvhash;
4017 location_chain node, *nodep;
4019 /* If the incoming onepart variable has an empty location list, then
4020 the intersection will be just as empty. For other variables,
4021 it's always union. */
4022 gcc_checking_assert (s1var->n_var_parts
4023 && s1var->var_part[0].loc_chain);
4025 if (!onepart)
4026 return variable_union (s1var, dst);
4028 gcc_checking_assert (s1var->n_var_parts == 1);
4030 dvhash = dv_htab_hash (dv);
4031 if (dv_is_value_p (dv))
4032 val = dv_as_value (dv);
4033 else
4034 val = NULL;
4036 s2var = shared_hash_find_1 (dsm->src->vars, dv, dvhash);
4037 if (!s2var)
4039 dst_can_be_shared = false;
4040 return 1;
4043 dsm->src_onepart_cnt--;
4044 gcc_assert (s2var->var_part[0].loc_chain
4045 && s2var->onepart == onepart
4046 && s2var->n_var_parts == 1);
4048 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4049 if (dstslot)
4051 dvar = *dstslot;
4052 gcc_assert (dvar->refcount == 1
4053 && dvar->onepart == onepart
4054 && dvar->n_var_parts == 1);
4055 nodep = &dvar->var_part[0].loc_chain;
4057 else
4059 nodep = &node;
4060 node = NULL;
4063 if (!dstslot && !onepart_variable_different_p (s1var, s2var))
4065 dstslot = shared_hash_find_slot_unshare_1 (&dst->vars, dv,
4066 dvhash, INSERT);
4067 *dstslot = dvar = s2var;
4068 dvar->refcount++;
4070 else
4072 dst_can_be_shared = false;
4074 intersect_loc_chains (val, nodep, dsm,
4075 s1var->var_part[0].loc_chain, s2var);
4077 if (!dstslot)
4079 if (node)
4081 dvar = onepart_pool (onepart).allocate ();
4082 dvar->dv = dv;
4083 dvar->refcount = 1;
4084 dvar->n_var_parts = 1;
4085 dvar->onepart = onepart;
4086 dvar->in_changed_variables = false;
4087 dvar->var_part[0].loc_chain = node;
4088 dvar->var_part[0].cur_loc = NULL;
4089 if (onepart)
4090 VAR_LOC_1PAUX (dvar) = NULL;
4091 else
4092 VAR_PART_OFFSET (dvar, 0) = 0;
4094 dstslot
4095 = shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash,
4096 INSERT);
4097 gcc_assert (!*dstslot);
4098 *dstslot = dvar;
4100 else
4101 return 1;
4105 nodep = &dvar->var_part[0].loc_chain;
4106 while ((node = *nodep))
4108 location_chain *nextp = &node->next;
4110 if (GET_CODE (node->loc) == REG)
4112 attrs list;
4114 for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
4115 if (GET_MODE (node->loc) == GET_MODE (list->loc)
4116 && dv_is_value_p (list->dv))
4117 break;
4119 if (!list)
4120 attrs_list_insert (&dst->regs[REGNO (node->loc)],
4121 dv, 0, node->loc);
4122 /* If this value became canonical for another value that had
4123 this register, we want to leave it alone. */
4124 else if (dv_as_value (list->dv) != val)
4126 dstslot = set_slot_part (dst, dv_as_value (list->dv),
4127 dstslot, dv, 0,
4128 node->init, NULL_RTX);
4129 dstslot = delete_slot_part (dst, node->loc, dstslot, 0);
4131 /* Since nextp points into the removed node, we can't
4132 use it. The pointer to the next node moved to nodep.
4133 However, if the variable we're walking is unshared
4134 during our walk, we'll keep walking the location list
4135 of the previously-shared variable, in which case the
4136 node won't have been removed, and we'll want to skip
4137 it. That's why we test *nodep here. */
4138 if (*nodep != node)
4139 nextp = nodep;
4142 else
4143 /* Canonicalization puts registers first, so we don't have to
4144 walk it all. */
4145 break;
4146 nodep = nextp;
4149 if (dvar != *dstslot)
4150 dvar = *dstslot;
4151 nodep = &dvar->var_part[0].loc_chain;
4153 if (val)
4155 /* Mark all referenced nodes for canonicalization, and make sure
4156 we have mutual equivalence links. */
4157 VALUE_RECURSED_INTO (val) = true;
4158 for (node = *nodep; node; node = node->next)
4159 if (GET_CODE (node->loc) == VALUE)
4161 VALUE_RECURSED_INTO (node->loc) = true;
4162 set_variable_part (dst, val, dv_from_value (node->loc), 0,
4163 node->init, NULL, INSERT);
4166 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4167 gcc_assert (*dstslot == dvar);
4168 canonicalize_values_star (dstslot, dst);
4169 gcc_checking_assert (dstslot
4170 == shared_hash_find_slot_noinsert_1 (dst->vars,
4171 dv, dvhash));
4172 dvar = *dstslot;
4174 else
4176 bool has_value = false, has_other = false;
4178 /* If we have one value and anything else, we're going to
4179 canonicalize this, so make sure all values have an entry in
4180 the table and are marked for canonicalization. */
4181 for (node = *nodep; node; node = node->next)
4183 if (GET_CODE (node->loc) == VALUE)
4185 /* If this was marked during register canonicalization,
4186 we know we have to canonicalize values. */
4187 if (has_value)
4188 has_other = true;
4189 has_value = true;
4190 if (has_other)
4191 break;
4193 else
4195 has_other = true;
4196 if (has_value)
4197 break;
4201 if (has_value && has_other)
4203 for (node = *nodep; node; node = node->next)
4205 if (GET_CODE (node->loc) == VALUE)
4207 decl_or_value dv = dv_from_value (node->loc);
4208 variable_def **slot = NULL;
4210 if (shared_hash_shared (dst->vars))
4211 slot = shared_hash_find_slot_noinsert (dst->vars, dv);
4212 if (!slot)
4213 slot = shared_hash_find_slot_unshare (&dst->vars, dv,
4214 INSERT);
4215 if (!*slot)
4217 variable var = onepart_pool (ONEPART_VALUE).allocate ();
4218 var->dv = dv;
4219 var->refcount = 1;
4220 var->n_var_parts = 1;
4221 var->onepart = ONEPART_VALUE;
4222 var->in_changed_variables = false;
4223 var->var_part[0].loc_chain = NULL;
4224 var->var_part[0].cur_loc = NULL;
4225 VAR_LOC_1PAUX (var) = NULL;
4226 *slot = var;
4229 VALUE_RECURSED_INTO (node->loc) = true;
4233 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4234 gcc_assert (*dstslot == dvar);
4235 canonicalize_values_star (dstslot, dst);
4236 gcc_checking_assert (dstslot
4237 == shared_hash_find_slot_noinsert_1 (dst->vars,
4238 dv, dvhash));
4239 dvar = *dstslot;
4243 if (!onepart_variable_different_p (dvar, s2var))
4245 variable_htab_free (dvar);
4246 *dstslot = dvar = s2var;
4247 dvar->refcount++;
4249 else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var))
4251 variable_htab_free (dvar);
4252 *dstslot = dvar = s1var;
4253 dvar->refcount++;
4254 dst_can_be_shared = false;
4256 else
4257 dst_can_be_shared = false;
4259 return 1;
4262 /* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
4263 multi-part variable. Unions of multi-part variables and
4264 intersections of one-part ones will be handled in
4265 variable_merge_over_cur(). */
4267 static int
4268 variable_merge_over_src (variable s2var, struct dfset_merge *dsm)
4270 dataflow_set *dst = dsm->dst;
4271 decl_or_value dv = s2var->dv;
4273 if (!s2var->onepart)
4275 variable_def **dstp = shared_hash_find_slot (dst->vars, dv);
4276 *dstp = s2var;
4277 s2var->refcount++;
4278 return 1;
4281 dsm->src_onepart_cnt++;
4282 return 1;
4285 /* Combine dataflow set information from SRC2 into DST, using PDST
4286 to carry over information across passes. */
4288 static void
4289 dataflow_set_merge (dataflow_set *dst, dataflow_set *src2)
4291 dataflow_set cur = *dst;
4292 dataflow_set *src1 = &cur;
4293 struct dfset_merge dsm;
4294 int i;
4295 size_t src1_elems, src2_elems;
4296 variable_iterator_type hi;
4297 variable var;
4299 src1_elems = shared_hash_htab (src1->vars)->elements ();
4300 src2_elems = shared_hash_htab (src2->vars)->elements ();
4301 dataflow_set_init (dst);
4302 dst->stack_adjust = cur.stack_adjust;
4303 shared_hash_destroy (dst->vars);
4304 dst->vars = new shared_hash_def;
4305 dst->vars->refcount = 1;
4306 dst->vars->htab = new variable_table_type (MAX (src1_elems, src2_elems));
4308 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4309 attrs_list_mpdv_union (&dst->regs[i], src1->regs[i], src2->regs[i]);
4311 dsm.dst = dst;
4312 dsm.src = src2;
4313 dsm.cur = src1;
4314 dsm.src_onepart_cnt = 0;
4316 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm.src->vars),
4317 var, variable, hi)
4318 variable_merge_over_src (var, &dsm);
4319 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm.cur->vars),
4320 var, variable, hi)
4321 variable_merge_over_cur (var, &dsm);
4323 if (dsm.src_onepart_cnt)
4324 dst_can_be_shared = false;
4326 dataflow_set_destroy (src1);
4329 /* Mark register equivalences. */
4331 static void
4332 dataflow_set_equiv_regs (dataflow_set *set)
4334 int i;
4335 attrs list, *listp;
4337 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4339 rtx canon[NUM_MACHINE_MODES];
4341 /* If the list is empty or one entry, no need to canonicalize
4342 anything. */
4343 if (set->regs[i] == NULL || set->regs[i]->next == NULL)
4344 continue;
4346 memset (canon, 0, sizeof (canon));
4348 for (list = set->regs[i]; list; list = list->next)
4349 if (list->offset == 0 && dv_is_value_p (list->dv))
4351 rtx val = dv_as_value (list->dv);
4352 rtx *cvalp = &canon[(int)GET_MODE (val)];
4353 rtx cval = *cvalp;
4355 if (canon_value_cmp (val, cval))
4356 *cvalp = val;
4359 for (list = set->regs[i]; list; list = list->next)
4360 if (list->offset == 0 && dv_onepart_p (list->dv))
4362 rtx cval = canon[(int)GET_MODE (list->loc)];
4364 if (!cval)
4365 continue;
4367 if (dv_is_value_p (list->dv))
4369 rtx val = dv_as_value (list->dv);
4371 if (val == cval)
4372 continue;
4374 VALUE_RECURSED_INTO (val) = true;
4375 set_variable_part (set, val, dv_from_value (cval), 0,
4376 VAR_INIT_STATUS_INITIALIZED,
4377 NULL, NO_INSERT);
4380 VALUE_RECURSED_INTO (cval) = true;
4381 set_variable_part (set, cval, list->dv, 0,
4382 VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT);
4385 for (listp = &set->regs[i]; (list = *listp);
4386 listp = list ? &list->next : listp)
4387 if (list->offset == 0 && dv_onepart_p (list->dv))
4389 rtx cval = canon[(int)GET_MODE (list->loc)];
4390 variable_def **slot;
4392 if (!cval)
4393 continue;
4395 if (dv_is_value_p (list->dv))
4397 rtx val = dv_as_value (list->dv);
4398 if (!VALUE_RECURSED_INTO (val))
4399 continue;
4402 slot = shared_hash_find_slot_noinsert (set->vars, list->dv);
4403 canonicalize_values_star (slot, set);
4404 if (*listp != list)
4405 list = NULL;
4410 /* Remove any redundant values in the location list of VAR, which must
4411 be unshared and 1-part. */
4413 static void
4414 remove_duplicate_values (variable var)
4416 location_chain node, *nodep;
4418 gcc_assert (var->onepart);
4419 gcc_assert (var->n_var_parts == 1);
4420 gcc_assert (var->refcount == 1);
4422 for (nodep = &var->var_part[0].loc_chain; (node = *nodep); )
4424 if (GET_CODE (node->loc) == VALUE)
4426 if (VALUE_RECURSED_INTO (node->loc))
4428 /* Remove duplicate value node. */
4429 *nodep = node->next;
4430 delete node;
4431 continue;
4433 else
4434 VALUE_RECURSED_INTO (node->loc) = true;
4436 nodep = &node->next;
4439 for (node = var->var_part[0].loc_chain; node; node = node->next)
4440 if (GET_CODE (node->loc) == VALUE)
4442 gcc_assert (VALUE_RECURSED_INTO (node->loc));
4443 VALUE_RECURSED_INTO (node->loc) = false;
4448 /* Hash table iteration argument passed to variable_post_merge. */
4449 struct dfset_post_merge
4451 /* The new input set for the current block. */
4452 dataflow_set *set;
4453 /* Pointer to the permanent input set for the current block, or
4454 NULL. */
4455 dataflow_set **permp;
4458 /* Create values for incoming expressions associated with one-part
4459 variables that don't have value numbers for them. */
4462 variable_post_merge_new_vals (variable_def **slot, dfset_post_merge *dfpm)
4464 dataflow_set *set = dfpm->set;
4465 variable var = *slot;
4466 location_chain node;
4468 if (!var->onepart || !var->n_var_parts)
4469 return 1;
4471 gcc_assert (var->n_var_parts == 1);
4473 if (dv_is_decl_p (var->dv))
4475 bool check_dupes = false;
4477 restart:
4478 for (node = var->var_part[0].loc_chain; node; node = node->next)
4480 if (GET_CODE (node->loc) == VALUE)
4481 gcc_assert (!VALUE_RECURSED_INTO (node->loc));
4482 else if (GET_CODE (node->loc) == REG)
4484 attrs att, *attp, *curp = NULL;
4486 if (var->refcount != 1)
4488 slot = unshare_variable (set, slot, var,
4489 VAR_INIT_STATUS_INITIALIZED);
4490 var = *slot;
4491 goto restart;
4494 for (attp = &set->regs[REGNO (node->loc)]; (att = *attp);
4495 attp = &att->next)
4496 if (att->offset == 0
4497 && GET_MODE (att->loc) == GET_MODE (node->loc))
4499 if (dv_is_value_p (att->dv))
4501 rtx cval = dv_as_value (att->dv);
4502 node->loc = cval;
4503 check_dupes = true;
4504 break;
4506 else if (dv_as_opaque (att->dv) == dv_as_opaque (var->dv))
4507 curp = attp;
4510 if (!curp)
4512 curp = attp;
4513 while (*curp)
4514 if ((*curp)->offset == 0
4515 && GET_MODE ((*curp)->loc) == GET_MODE (node->loc)
4516 && dv_as_opaque ((*curp)->dv) == dv_as_opaque (var->dv))
4517 break;
4518 else
4519 curp = &(*curp)->next;
4520 gcc_assert (*curp);
4523 if (!att)
4525 decl_or_value cdv;
4526 rtx cval;
4528 if (!*dfpm->permp)
4530 *dfpm->permp = XNEW (dataflow_set);
4531 dataflow_set_init (*dfpm->permp);
4534 for (att = (*dfpm->permp)->regs[REGNO (node->loc)];
4535 att; att = att->next)
4536 if (GET_MODE (att->loc) == GET_MODE (node->loc))
4538 gcc_assert (att->offset == 0
4539 && dv_is_value_p (att->dv));
4540 val_reset (set, att->dv);
4541 break;
4544 if (att)
4546 cdv = att->dv;
4547 cval = dv_as_value (cdv);
4549 else
4551 /* Create a unique value to hold this register,
4552 that ought to be found and reused in
4553 subsequent rounds. */
4554 cselib_val *v;
4555 gcc_assert (!cselib_lookup (node->loc,
4556 GET_MODE (node->loc), 0,
4557 VOIDmode));
4558 v = cselib_lookup (node->loc, GET_MODE (node->loc), 1,
4559 VOIDmode);
4560 cselib_preserve_value (v);
4561 cselib_invalidate_rtx (node->loc);
4562 cval = v->val_rtx;
4563 cdv = dv_from_value (cval);
4564 if (dump_file)
4565 fprintf (dump_file,
4566 "Created new value %u:%u for reg %i\n",
4567 v->uid, v->hash, REGNO (node->loc));
4570 var_reg_decl_set (*dfpm->permp, node->loc,
4571 VAR_INIT_STATUS_INITIALIZED,
4572 cdv, 0, NULL, INSERT);
4574 node->loc = cval;
4575 check_dupes = true;
4578 /* Remove attribute referring to the decl, which now
4579 uses the value for the register, already existing or
4580 to be added when we bring perm in. */
4581 att = *curp;
4582 *curp = att->next;
4583 delete att;
4587 if (check_dupes)
4588 remove_duplicate_values (var);
4591 return 1;
4594 /* Reset values in the permanent set that are not associated with the
4595 chosen expression. */
4598 variable_post_merge_perm_vals (variable_def **pslot, dfset_post_merge *dfpm)
4600 dataflow_set *set = dfpm->set;
4601 variable pvar = *pslot, var;
4602 location_chain pnode;
4603 decl_or_value dv;
4604 attrs att;
4606 gcc_assert (dv_is_value_p (pvar->dv)
4607 && pvar->n_var_parts == 1);
4608 pnode = pvar->var_part[0].loc_chain;
4609 gcc_assert (pnode
4610 && !pnode->next
4611 && REG_P (pnode->loc));
4613 dv = pvar->dv;
4615 var = shared_hash_find (set->vars, dv);
4616 if (var)
4618 /* Although variable_post_merge_new_vals may have made decls
4619 non-star-canonical, values that pre-existed in canonical form
4620 remain canonical, and newly-created values reference a single
4621 REG, so they are canonical as well. Since VAR has the
4622 location list for a VALUE, using find_loc_in_1pdv for it is
4623 fine, since VALUEs don't map back to DECLs. */
4624 if (find_loc_in_1pdv (pnode->loc, var, shared_hash_htab (set->vars)))
4625 return 1;
4626 val_reset (set, dv);
4629 for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next)
4630 if (att->offset == 0
4631 && GET_MODE (att->loc) == GET_MODE (pnode->loc)
4632 && dv_is_value_p (att->dv))
4633 break;
4635 /* If there is a value associated with this register already, create
4636 an equivalence. */
4637 if (att && dv_as_value (att->dv) != dv_as_value (dv))
4639 rtx cval = dv_as_value (att->dv);
4640 set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT);
4641 set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init,
4642 NULL, INSERT);
4644 else if (!att)
4646 attrs_list_insert (&set->regs[REGNO (pnode->loc)],
4647 dv, 0, pnode->loc);
4648 variable_union (pvar, set);
4651 return 1;
4654 /* Just checking stuff and registering register attributes for
4655 now. */
4657 static void
4658 dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp)
4660 struct dfset_post_merge dfpm;
4662 dfpm.set = set;
4663 dfpm.permp = permp;
4665 shared_hash_htab (set->vars)
4666 ->traverse <dfset_post_merge*, variable_post_merge_new_vals> (&dfpm);
4667 if (*permp)
4668 shared_hash_htab ((*permp)->vars)
4669 ->traverse <dfset_post_merge*, variable_post_merge_perm_vals> (&dfpm);
4670 shared_hash_htab (set->vars)
4671 ->traverse <dataflow_set *, canonicalize_values_star> (set);
4672 shared_hash_htab (set->vars)
4673 ->traverse <dataflow_set *, canonicalize_vars_star> (set);
4676 /* Return a node whose loc is a MEM that refers to EXPR in the
4677 location list of a one-part variable or value VAR, or in that of
4678 any values recursively mentioned in the location lists. */
4680 static location_chain
4681 find_mem_expr_in_1pdv (tree expr, rtx val, variable_table_type *vars)
4683 location_chain node;
4684 decl_or_value dv;
4685 variable var;
4686 location_chain where = NULL;
4688 if (!val)
4689 return NULL;
4691 gcc_assert (GET_CODE (val) == VALUE
4692 && !VALUE_RECURSED_INTO (val));
4694 dv = dv_from_value (val);
4695 var = vars->find_with_hash (dv, dv_htab_hash (dv));
4697 if (!var)
4698 return NULL;
4700 gcc_assert (var->onepart);
4702 if (!var->n_var_parts)
4703 return NULL;
4705 VALUE_RECURSED_INTO (val) = true;
4707 for (node = var->var_part[0].loc_chain; node; node = node->next)
4708 if (MEM_P (node->loc)
4709 && MEM_EXPR (node->loc) == expr
4710 && INT_MEM_OFFSET (node->loc) == 0)
4712 where = node;
4713 break;
4715 else if (GET_CODE (node->loc) == VALUE
4716 && !VALUE_RECURSED_INTO (node->loc)
4717 && (where = find_mem_expr_in_1pdv (expr, node->loc, vars)))
4718 break;
4720 VALUE_RECURSED_INTO (val) = false;
4722 return where;
4725 /* Return TRUE if the value of MEM may vary across a call. */
4727 static bool
4728 mem_dies_at_call (rtx mem)
4730 tree expr = MEM_EXPR (mem);
4731 tree decl;
4733 if (!expr)
4734 return true;
4736 decl = get_base_address (expr);
4738 if (!decl)
4739 return true;
4741 if (!DECL_P (decl))
4742 return true;
4744 return (may_be_aliased (decl)
4745 || (!TREE_READONLY (decl) && is_global_var (decl)));
4748 /* Remove all MEMs from the location list of a hash table entry for a
4749 one-part variable, except those whose MEM attributes map back to
4750 the variable itself, directly or within a VALUE. */
4753 dataflow_set_preserve_mem_locs (variable_def **slot, dataflow_set *set)
4755 variable var = *slot;
4757 if (var->onepart == ONEPART_VDECL || var->onepart == ONEPART_DEXPR)
4759 tree decl = dv_as_decl (var->dv);
4760 location_chain loc, *locp;
4761 bool changed = false;
4763 if (!var->n_var_parts)
4764 return 1;
4766 gcc_assert (var->n_var_parts == 1);
4768 if (shared_var_p (var, set->vars))
4770 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4772 /* We want to remove dying MEMs that doesn't refer to DECL. */
4773 if (GET_CODE (loc->loc) == MEM
4774 && (MEM_EXPR (loc->loc) != decl
4775 || INT_MEM_OFFSET (loc->loc) != 0)
4776 && !mem_dies_at_call (loc->loc))
4777 break;
4778 /* We want to move here MEMs that do refer to DECL. */
4779 else if (GET_CODE (loc->loc) == VALUE
4780 && find_mem_expr_in_1pdv (decl, loc->loc,
4781 shared_hash_htab (set->vars)))
4782 break;
4785 if (!loc)
4786 return 1;
4788 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4789 var = *slot;
4790 gcc_assert (var->n_var_parts == 1);
4793 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4794 loc; loc = *locp)
4796 rtx old_loc = loc->loc;
4797 if (GET_CODE (old_loc) == VALUE)
4799 location_chain mem_node
4800 = find_mem_expr_in_1pdv (decl, loc->loc,
4801 shared_hash_htab (set->vars));
4803 /* ??? This picks up only one out of multiple MEMs that
4804 refer to the same variable. Do we ever need to be
4805 concerned about dealing with more than one, or, given
4806 that they should all map to the same variable
4807 location, their addresses will have been merged and
4808 they will be regarded as equivalent? */
4809 if (mem_node)
4811 loc->loc = mem_node->loc;
4812 loc->set_src = mem_node->set_src;
4813 loc->init = MIN (loc->init, mem_node->init);
4817 if (GET_CODE (loc->loc) != MEM
4818 || (MEM_EXPR (loc->loc) == decl
4819 && INT_MEM_OFFSET (loc->loc) == 0)
4820 || !mem_dies_at_call (loc->loc))
4822 if (old_loc != loc->loc && emit_notes)
4824 if (old_loc == var->var_part[0].cur_loc)
4826 changed = true;
4827 var->var_part[0].cur_loc = NULL;
4830 locp = &loc->next;
4831 continue;
4834 if (emit_notes)
4836 if (old_loc == var->var_part[0].cur_loc)
4838 changed = true;
4839 var->var_part[0].cur_loc = NULL;
4842 *locp = loc->next;
4843 delete loc;
4846 if (!var->var_part[0].loc_chain)
4848 var->n_var_parts--;
4849 changed = true;
4851 if (changed)
4852 variable_was_changed (var, set);
4855 return 1;
4858 /* Remove all MEMs from the location list of a hash table entry for a
4859 value. */
4862 dataflow_set_remove_mem_locs (variable_def **slot, dataflow_set *set)
4864 variable var = *slot;
4866 if (var->onepart == ONEPART_VALUE)
4868 location_chain loc, *locp;
4869 bool changed = false;
4870 rtx cur_loc;
4872 gcc_assert (var->n_var_parts == 1);
4874 if (shared_var_p (var, set->vars))
4876 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4877 if (GET_CODE (loc->loc) == MEM
4878 && mem_dies_at_call (loc->loc))
4879 break;
4881 if (!loc)
4882 return 1;
4884 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4885 var = *slot;
4886 gcc_assert (var->n_var_parts == 1);
4889 if (VAR_LOC_1PAUX (var))
4890 cur_loc = VAR_LOC_FROM (var);
4891 else
4892 cur_loc = var->var_part[0].cur_loc;
4894 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4895 loc; loc = *locp)
4897 if (GET_CODE (loc->loc) != MEM
4898 || !mem_dies_at_call (loc->loc))
4900 locp = &loc->next;
4901 continue;
4904 *locp = loc->next;
4905 /* If we have deleted the location which was last emitted
4906 we have to emit new location so add the variable to set
4907 of changed variables. */
4908 if (cur_loc == loc->loc)
4910 changed = true;
4911 var->var_part[0].cur_loc = NULL;
4912 if (VAR_LOC_1PAUX (var))
4913 VAR_LOC_FROM (var) = NULL;
4915 delete loc;
4918 if (!var->var_part[0].loc_chain)
4920 var->n_var_parts--;
4921 changed = true;
4923 if (changed)
4924 variable_was_changed (var, set);
4927 return 1;
4930 /* Remove all variable-location information about call-clobbered
4931 registers, as well as associations between MEMs and VALUEs. */
4933 static void
4934 dataflow_set_clear_at_call (dataflow_set *set)
4936 unsigned int r;
4937 hard_reg_set_iterator hrsi;
4939 EXECUTE_IF_SET_IN_HARD_REG_SET (regs_invalidated_by_call, 0, r, hrsi)
4940 var_regno_delete (set, r);
4942 if (MAY_HAVE_DEBUG_INSNS)
4944 set->traversed_vars = set->vars;
4945 shared_hash_htab (set->vars)
4946 ->traverse <dataflow_set *, dataflow_set_preserve_mem_locs> (set);
4947 set->traversed_vars = set->vars;
4948 shared_hash_htab (set->vars)
4949 ->traverse <dataflow_set *, dataflow_set_remove_mem_locs> (set);
4950 set->traversed_vars = NULL;
4954 static bool
4955 variable_part_different_p (variable_part *vp1, variable_part *vp2)
4957 location_chain lc1, lc2;
4959 for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next)
4961 for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next)
4963 if (REG_P (lc1->loc) && REG_P (lc2->loc))
4965 if (REGNO (lc1->loc) == REGNO (lc2->loc))
4966 break;
4968 if (rtx_equal_p (lc1->loc, lc2->loc))
4969 break;
4971 if (!lc2)
4972 return true;
4974 return false;
4977 /* Return true if one-part variables VAR1 and VAR2 are different.
4978 They must be in canonical order. */
4980 static bool
4981 onepart_variable_different_p (variable var1, variable var2)
4983 location_chain lc1, lc2;
4985 if (var1 == var2)
4986 return false;
4988 gcc_assert (var1->n_var_parts == 1
4989 && var2->n_var_parts == 1);
4991 lc1 = var1->var_part[0].loc_chain;
4992 lc2 = var2->var_part[0].loc_chain;
4994 gcc_assert (lc1 && lc2);
4996 while (lc1 && lc2)
4998 if (loc_cmp (lc1->loc, lc2->loc))
4999 return true;
5000 lc1 = lc1->next;
5001 lc2 = lc2->next;
5004 return lc1 != lc2;
5007 /* Return true if variables VAR1 and VAR2 are different. */
5009 static bool
5010 variable_different_p (variable var1, variable var2)
5012 int i;
5014 if (var1 == var2)
5015 return false;
5017 if (var1->onepart != var2->onepart)
5018 return true;
5020 if (var1->n_var_parts != var2->n_var_parts)
5021 return true;
5023 if (var1->onepart && var1->n_var_parts)
5025 gcc_checking_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv)
5026 && var1->n_var_parts == 1);
5027 /* One-part values have locations in a canonical order. */
5028 return onepart_variable_different_p (var1, var2);
5031 for (i = 0; i < var1->n_var_parts; i++)
5033 if (VAR_PART_OFFSET (var1, i) != VAR_PART_OFFSET (var2, i))
5034 return true;
5035 if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i]))
5036 return true;
5037 if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i]))
5038 return true;
5040 return false;
5043 /* Return true if dataflow sets OLD_SET and NEW_SET differ. */
5045 static bool
5046 dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
5048 variable_iterator_type hi;
5049 variable var1;
5051 if (old_set->vars == new_set->vars)
5052 return false;
5054 if (shared_hash_htab (old_set->vars)->elements ()
5055 != shared_hash_htab (new_set->vars)->elements ())
5056 return true;
5058 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (old_set->vars),
5059 var1, variable, hi)
5061 variable_table_type *htab = shared_hash_htab (new_set->vars);
5062 variable var2 = htab->find_with_hash (var1->dv, dv_htab_hash (var1->dv));
5063 if (!var2)
5065 if (dump_file && (dump_flags & TDF_DETAILS))
5067 fprintf (dump_file, "dataflow difference found: removal of:\n");
5068 dump_var (var1);
5070 return true;
5073 if (variable_different_p (var1, var2))
5075 if (dump_file && (dump_flags & TDF_DETAILS))
5077 fprintf (dump_file, "dataflow difference found: "
5078 "old and new follow:\n");
5079 dump_var (var1);
5080 dump_var (var2);
5082 return true;
5086 /* No need to traverse the second hashtab, if both have the same number
5087 of elements and the second one had all entries found in the first one,
5088 then it can't have any extra entries. */
5089 return false;
5092 /* Free the contents of dataflow set SET. */
5094 static void
5095 dataflow_set_destroy (dataflow_set *set)
5097 int i;
5099 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5100 attrs_list_clear (&set->regs[i]);
5102 shared_hash_destroy (set->vars);
5103 set->vars = NULL;
5106 /* Return true if RTL X contains a SYMBOL_REF. */
5108 static bool
5109 contains_symbol_ref (rtx x)
5111 const char *fmt;
5112 RTX_CODE code;
5113 int i;
5115 if (!x)
5116 return false;
5118 code = GET_CODE (x);
5119 if (code == SYMBOL_REF)
5120 return true;
5122 fmt = GET_RTX_FORMAT (code);
5123 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5125 if (fmt[i] == 'e')
5127 if (contains_symbol_ref (XEXP (x, i)))
5128 return true;
5130 else if (fmt[i] == 'E')
5132 int j;
5133 for (j = 0; j < XVECLEN (x, i); j++)
5134 if (contains_symbol_ref (XVECEXP (x, i, j)))
5135 return true;
5139 return false;
5142 /* Shall EXPR be tracked? */
5144 static bool
5145 track_expr_p (tree expr, bool need_rtl)
5147 rtx decl_rtl;
5148 tree realdecl;
5150 if (TREE_CODE (expr) == DEBUG_EXPR_DECL)
5151 return DECL_RTL_SET_P (expr);
5153 /* If EXPR is not a parameter or a variable do not track it. */
5154 if (TREE_CODE (expr) != VAR_DECL && TREE_CODE (expr) != PARM_DECL)
5155 return 0;
5157 /* It also must have a name... */
5158 if (!DECL_NAME (expr) && need_rtl)
5159 return 0;
5161 /* ... and a RTL assigned to it. */
5162 decl_rtl = DECL_RTL_IF_SET (expr);
5163 if (!decl_rtl && need_rtl)
5164 return 0;
5166 /* If this expression is really a debug alias of some other declaration, we
5167 don't need to track this expression if the ultimate declaration is
5168 ignored. */
5169 realdecl = expr;
5170 if (TREE_CODE (realdecl) == VAR_DECL && DECL_HAS_DEBUG_EXPR_P (realdecl))
5172 realdecl = DECL_DEBUG_EXPR (realdecl);
5173 if (!DECL_P (realdecl))
5175 if (handled_component_p (realdecl)
5176 || (TREE_CODE (realdecl) == MEM_REF
5177 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
5179 HOST_WIDE_INT bitsize, bitpos, maxsize;
5180 tree innerdecl
5181 = get_ref_base_and_extent (realdecl, &bitpos, &bitsize,
5182 &maxsize);
5183 if (!DECL_P (innerdecl)
5184 || DECL_IGNORED_P (innerdecl)
5185 /* Do not track declarations for parts of tracked parameters
5186 since we want to track them as a whole instead. */
5187 || (TREE_CODE (innerdecl) == PARM_DECL
5188 && DECL_MODE (innerdecl) != BLKmode
5189 && TREE_CODE (TREE_TYPE (innerdecl)) != UNION_TYPE)
5190 || TREE_STATIC (innerdecl)
5191 || bitsize <= 0
5192 || bitpos + bitsize > 256
5193 || bitsize != maxsize)
5194 return 0;
5195 else
5196 realdecl = expr;
5198 else
5199 return 0;
5203 /* Do not track EXPR if REALDECL it should be ignored for debugging
5204 purposes. */
5205 if (DECL_IGNORED_P (realdecl))
5206 return 0;
5208 /* Do not track global variables until we are able to emit correct location
5209 list for them. */
5210 if (TREE_STATIC (realdecl))
5211 return 0;
5213 /* When the EXPR is a DECL for alias of some variable (see example)
5214 the TREE_STATIC flag is not used. Disable tracking all DECLs whose
5215 DECL_RTL contains SYMBOL_REF.
5217 Example:
5218 extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
5219 char **_dl_argv;
5221 if (decl_rtl && MEM_P (decl_rtl)
5222 && contains_symbol_ref (XEXP (decl_rtl, 0)))
5223 return 0;
5225 /* If RTX is a memory it should not be very large (because it would be
5226 an array or struct). */
5227 if (decl_rtl && MEM_P (decl_rtl))
5229 /* Do not track structures and arrays. */
5230 if (GET_MODE (decl_rtl) == BLKmode
5231 || AGGREGATE_TYPE_P (TREE_TYPE (realdecl)))
5232 return 0;
5233 if (MEM_SIZE_KNOWN_P (decl_rtl)
5234 && MEM_SIZE (decl_rtl) > MAX_VAR_PARTS)
5235 return 0;
5238 DECL_CHANGED (expr) = 0;
5239 DECL_CHANGED (realdecl) = 0;
5240 return 1;
5243 /* Determine whether a given LOC refers to the same variable part as
5244 EXPR+OFFSET. */
5246 static bool
5247 same_variable_part_p (rtx loc, tree expr, HOST_WIDE_INT offset)
5249 tree expr2;
5250 HOST_WIDE_INT offset2;
5252 if (! DECL_P (expr))
5253 return false;
5255 if (REG_P (loc))
5257 expr2 = REG_EXPR (loc);
5258 offset2 = REG_OFFSET (loc);
5260 else if (MEM_P (loc))
5262 expr2 = MEM_EXPR (loc);
5263 offset2 = INT_MEM_OFFSET (loc);
5265 else
5266 return false;
5268 if (! expr2 || ! DECL_P (expr2))
5269 return false;
5271 expr = var_debug_decl (expr);
5272 expr2 = var_debug_decl (expr2);
5274 return (expr == expr2 && offset == offset2);
5277 /* LOC is a REG or MEM that we would like to track if possible.
5278 If EXPR is null, we don't know what expression LOC refers to,
5279 otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
5280 LOC is an lvalue register.
5282 Return true if EXPR is nonnull and if LOC, or some lowpart of it,
5283 is something we can track. When returning true, store the mode of
5284 the lowpart we can track in *MODE_OUT (if nonnull) and its offset
5285 from EXPR in *OFFSET_OUT (if nonnull). */
5287 static bool
5288 track_loc_p (rtx loc, tree expr, HOST_WIDE_INT offset, bool store_reg_p,
5289 machine_mode *mode_out, HOST_WIDE_INT *offset_out)
5291 machine_mode mode;
5293 if (expr == NULL || !track_expr_p (expr, true))
5294 return false;
5296 /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
5297 whole subreg, but only the old inner part is really relevant. */
5298 mode = GET_MODE (loc);
5299 if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc)))
5301 machine_mode pseudo_mode;
5303 pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc));
5304 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (pseudo_mode))
5306 offset += byte_lowpart_offset (pseudo_mode, mode);
5307 mode = pseudo_mode;
5311 /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
5312 Do the same if we are storing to a register and EXPR occupies
5313 the whole of register LOC; in that case, the whole of EXPR is
5314 being changed. We exclude complex modes from the second case
5315 because the real and imaginary parts are represented as separate
5316 pseudo registers, even if the whole complex value fits into one
5317 hard register. */
5318 if ((GET_MODE_SIZE (mode) > GET_MODE_SIZE (DECL_MODE (expr))
5319 || (store_reg_p
5320 && !COMPLEX_MODE_P (DECL_MODE (expr))
5321 && hard_regno_nregs[REGNO (loc)][DECL_MODE (expr)] == 1))
5322 && offset + byte_lowpart_offset (DECL_MODE (expr), mode) == 0)
5324 mode = DECL_MODE (expr);
5325 offset = 0;
5328 if (offset < 0 || offset >= MAX_VAR_PARTS)
5329 return false;
5331 if (mode_out)
5332 *mode_out = mode;
5333 if (offset_out)
5334 *offset_out = offset;
5335 return true;
5338 /* Return the MODE lowpart of LOC, or null if LOC is not something we
5339 want to track. When returning nonnull, make sure that the attributes
5340 on the returned value are updated. */
5342 static rtx
5343 var_lowpart (machine_mode mode, rtx loc)
5345 unsigned int offset, reg_offset, regno;
5347 if (GET_MODE (loc) == mode)
5348 return loc;
5350 if (!REG_P (loc) && !MEM_P (loc))
5351 return NULL;
5353 offset = byte_lowpart_offset (mode, GET_MODE (loc));
5355 if (MEM_P (loc))
5356 return adjust_address_nv (loc, mode, offset);
5358 reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc));
5359 regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc),
5360 reg_offset, mode);
5361 return gen_rtx_REG_offset (loc, mode, regno, offset);
5364 /* Carry information about uses and stores while walking rtx. */
5366 struct count_use_info
5368 /* The insn where the RTX is. */
5369 rtx_insn *insn;
5371 /* The basic block where insn is. */
5372 basic_block bb;
5374 /* The array of n_sets sets in the insn, as determined by cselib. */
5375 struct cselib_set *sets;
5376 int n_sets;
5378 /* True if we're counting stores, false otherwise. */
5379 bool store_p;
5382 /* Find a VALUE corresponding to X. */
5384 static inline cselib_val *
5385 find_use_val (rtx x, machine_mode mode, struct count_use_info *cui)
5387 int i;
5389 if (cui->sets)
5391 /* This is called after uses are set up and before stores are
5392 processed by cselib, so it's safe to look up srcs, but not
5393 dsts. So we look up expressions that appear in srcs or in
5394 dest expressions, but we search the sets array for dests of
5395 stores. */
5396 if (cui->store_p)
5398 /* Some targets represent memset and memcpy patterns
5399 by (set (mem:BLK ...) (reg:[QHSD]I ...)) or
5400 (set (mem:BLK ...) (const_int ...)) or
5401 (set (mem:BLK ...) (mem:BLK ...)). Don't return anything
5402 in that case, otherwise we end up with mode mismatches. */
5403 if (mode == BLKmode && MEM_P (x))
5404 return NULL;
5405 for (i = 0; i < cui->n_sets; i++)
5406 if (cui->sets[i].dest == x)
5407 return cui->sets[i].src_elt;
5409 else
5410 return cselib_lookup (x, mode, 0, VOIDmode);
5413 return NULL;
5416 /* Replace all registers and addresses in an expression with VALUE
5417 expressions that map back to them, unless the expression is a
5418 register. If no mapping is or can be performed, returns NULL. */
5420 static rtx
5421 replace_expr_with_values (rtx loc)
5423 if (REG_P (loc) || GET_CODE (loc) == ENTRY_VALUE)
5424 return NULL;
5425 else if (MEM_P (loc))
5427 cselib_val *addr = cselib_lookup (XEXP (loc, 0),
5428 get_address_mode (loc), 0,
5429 GET_MODE (loc));
5430 if (addr)
5431 return replace_equiv_address_nv (loc, addr->val_rtx);
5432 else
5433 return NULL;
5435 else
5436 return cselib_subst_to_values (loc, VOIDmode);
5439 /* Return true if X contains a DEBUG_EXPR. */
5441 static bool
5442 rtx_debug_expr_p (const_rtx x)
5444 subrtx_iterator::array_type array;
5445 FOR_EACH_SUBRTX (iter, array, x, ALL)
5446 if (GET_CODE (*iter) == DEBUG_EXPR)
5447 return true;
5448 return false;
5451 /* Determine what kind of micro operation to choose for a USE. Return
5452 MO_CLOBBER if no micro operation is to be generated. */
5454 static enum micro_operation_type
5455 use_type (rtx loc, struct count_use_info *cui, machine_mode *modep)
5457 tree expr;
5459 if (cui && cui->sets)
5461 if (GET_CODE (loc) == VAR_LOCATION)
5463 if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false))
5465 rtx ploc = PAT_VAR_LOCATION_LOC (loc);
5466 if (! VAR_LOC_UNKNOWN_P (ploc))
5468 cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1,
5469 VOIDmode);
5471 /* ??? flag_float_store and volatile mems are never
5472 given values, but we could in theory use them for
5473 locations. */
5474 gcc_assert (val || 1);
5476 return MO_VAL_LOC;
5478 else
5479 return MO_CLOBBER;
5482 if (REG_P (loc) || MEM_P (loc))
5484 if (modep)
5485 *modep = GET_MODE (loc);
5486 if (cui->store_p)
5488 if (REG_P (loc)
5489 || (find_use_val (loc, GET_MODE (loc), cui)
5490 && cselib_lookup (XEXP (loc, 0),
5491 get_address_mode (loc), 0,
5492 GET_MODE (loc))))
5493 return MO_VAL_SET;
5495 else
5497 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5499 if (val && !cselib_preserved_value_p (val))
5500 return MO_VAL_USE;
5505 if (REG_P (loc))
5507 gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER);
5509 if (loc == cfa_base_rtx)
5510 return MO_CLOBBER;
5511 expr = REG_EXPR (loc);
5513 if (!expr)
5514 return MO_USE_NO_VAR;
5515 else if (target_for_debug_bind (var_debug_decl (expr)))
5516 return MO_CLOBBER;
5517 else if (track_loc_p (loc, expr, REG_OFFSET (loc),
5518 false, modep, NULL))
5519 return MO_USE;
5520 else
5521 return MO_USE_NO_VAR;
5523 else if (MEM_P (loc))
5525 expr = MEM_EXPR (loc);
5527 if (!expr)
5528 return MO_CLOBBER;
5529 else if (target_for_debug_bind (var_debug_decl (expr)))
5530 return MO_CLOBBER;
5531 else if (track_loc_p (loc, expr, INT_MEM_OFFSET (loc),
5532 false, modep, NULL)
5533 /* Multi-part variables shouldn't refer to one-part
5534 variable names such as VALUEs (never happens) or
5535 DEBUG_EXPRs (only happens in the presence of debug
5536 insns). */
5537 && (!MAY_HAVE_DEBUG_INSNS
5538 || !rtx_debug_expr_p (XEXP (loc, 0))))
5539 return MO_USE;
5540 else
5541 return MO_CLOBBER;
5544 return MO_CLOBBER;
5547 /* Log to OUT information about micro-operation MOPT involving X in
5548 INSN of BB. */
5550 static inline void
5551 log_op_type (rtx x, basic_block bb, rtx_insn *insn,
5552 enum micro_operation_type mopt, FILE *out)
5554 fprintf (out, "bb %i op %i insn %i %s ",
5555 bb->index, VTI (bb)->mos.length (),
5556 INSN_UID (insn), micro_operation_type_name[mopt]);
5557 print_inline_rtx (out, x, 2);
5558 fputc ('\n', out);
5561 /* Tell whether the CONCAT used to holds a VALUE and its location
5562 needs value resolution, i.e., an attempt of mapping the location
5563 back to other incoming values. */
5564 #define VAL_NEEDS_RESOLUTION(x) \
5565 (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
5566 /* Whether the location in the CONCAT is a tracked expression, that
5567 should also be handled like a MO_USE. */
5568 #define VAL_HOLDS_TRACK_EXPR(x) \
5569 (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
5570 /* Whether the location in the CONCAT should be handled like a MO_COPY
5571 as well. */
5572 #define VAL_EXPR_IS_COPIED(x) \
5573 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
5574 /* Whether the location in the CONCAT should be handled like a
5575 MO_CLOBBER as well. */
5576 #define VAL_EXPR_IS_CLOBBERED(x) \
5577 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
5579 /* All preserved VALUEs. */
5580 static vec<rtx> preserved_values;
5582 /* Ensure VAL is preserved and remember it in a vector for vt_emit_notes. */
5584 static void
5585 preserve_value (cselib_val *val)
5587 cselib_preserve_value (val);
5588 preserved_values.safe_push (val->val_rtx);
5591 /* Helper function for MO_VAL_LOC handling. Return non-zero if
5592 any rtxes not suitable for CONST use not replaced by VALUEs
5593 are discovered. */
5595 static bool
5596 non_suitable_const (const_rtx x)
5598 subrtx_iterator::array_type array;
5599 FOR_EACH_SUBRTX (iter, array, x, ALL)
5601 const_rtx x = *iter;
5602 switch (GET_CODE (x))
5604 case REG:
5605 case DEBUG_EXPR:
5606 case PC:
5607 case SCRATCH:
5608 case CC0:
5609 case ASM_INPUT:
5610 case ASM_OPERANDS:
5611 return true;
5612 case MEM:
5613 if (!MEM_READONLY_P (x))
5614 return true;
5615 break;
5616 default:
5617 break;
5620 return false;
5623 /* Add uses (register and memory references) LOC which will be tracked
5624 to VTI (bb)->mos. */
5626 static void
5627 add_uses (rtx loc, struct count_use_info *cui)
5629 machine_mode mode = VOIDmode;
5630 enum micro_operation_type type = use_type (loc, cui, &mode);
5632 if (type != MO_CLOBBER)
5634 basic_block bb = cui->bb;
5635 micro_operation mo;
5637 mo.type = type;
5638 mo.u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc;
5639 mo.insn = cui->insn;
5641 if (type == MO_VAL_LOC)
5643 rtx oloc = loc;
5644 rtx vloc = PAT_VAR_LOCATION_LOC (oloc);
5645 cselib_val *val;
5647 gcc_assert (cui->sets);
5649 if (MEM_P (vloc)
5650 && !REG_P (XEXP (vloc, 0))
5651 && !MEM_P (XEXP (vloc, 0)))
5653 rtx mloc = vloc;
5654 machine_mode address_mode = get_address_mode (mloc);
5655 cselib_val *val
5656 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5657 GET_MODE (mloc));
5659 if (val && !cselib_preserved_value_p (val))
5660 preserve_value (val);
5663 if (CONSTANT_P (vloc)
5664 && (GET_CODE (vloc) != CONST || non_suitable_const (vloc)))
5665 /* For constants don't look up any value. */;
5666 else if (!VAR_LOC_UNKNOWN_P (vloc) && !unsuitable_loc (vloc)
5667 && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
5669 machine_mode mode2;
5670 enum micro_operation_type type2;
5671 rtx nloc = NULL;
5672 bool resolvable = REG_P (vloc) || MEM_P (vloc);
5674 if (resolvable)
5675 nloc = replace_expr_with_values (vloc);
5677 if (nloc)
5679 oloc = shallow_copy_rtx (oloc);
5680 PAT_VAR_LOCATION_LOC (oloc) = nloc;
5683 oloc = gen_rtx_CONCAT (mode, val->val_rtx, oloc);
5685 type2 = use_type (vloc, 0, &mode2);
5687 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5688 || type2 == MO_CLOBBER);
5690 if (type2 == MO_CLOBBER
5691 && !cselib_preserved_value_p (val))
5693 VAL_NEEDS_RESOLUTION (oloc) = resolvable;
5694 preserve_value (val);
5697 else if (!VAR_LOC_UNKNOWN_P (vloc))
5699 oloc = shallow_copy_rtx (oloc);
5700 PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC ();
5703 mo.u.loc = oloc;
5705 else if (type == MO_VAL_USE)
5707 machine_mode mode2 = VOIDmode;
5708 enum micro_operation_type type2;
5709 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5710 rtx vloc, oloc = loc, nloc;
5712 gcc_assert (cui->sets);
5714 if (MEM_P (oloc)
5715 && !REG_P (XEXP (oloc, 0))
5716 && !MEM_P (XEXP (oloc, 0)))
5718 rtx mloc = oloc;
5719 machine_mode address_mode = get_address_mode (mloc);
5720 cselib_val *val
5721 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5722 GET_MODE (mloc));
5724 if (val && !cselib_preserved_value_p (val))
5725 preserve_value (val);
5728 type2 = use_type (loc, 0, &mode2);
5730 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5731 || type2 == MO_CLOBBER);
5733 if (type2 == MO_USE)
5734 vloc = var_lowpart (mode2, loc);
5735 else
5736 vloc = oloc;
5738 /* The loc of a MO_VAL_USE may have two forms:
5740 (concat val src): val is at src, a value-based
5741 representation.
5743 (concat (concat val use) src): same as above, with use as
5744 the MO_USE tracked value, if it differs from src.
5748 gcc_checking_assert (REG_P (loc) || MEM_P (loc));
5749 nloc = replace_expr_with_values (loc);
5750 if (!nloc)
5751 nloc = oloc;
5753 if (vloc != nloc)
5754 oloc = gen_rtx_CONCAT (mode2, val->val_rtx, vloc);
5755 else
5756 oloc = val->val_rtx;
5758 mo.u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
5760 if (type2 == MO_USE)
5761 VAL_HOLDS_TRACK_EXPR (mo.u.loc) = 1;
5762 if (!cselib_preserved_value_p (val))
5764 VAL_NEEDS_RESOLUTION (mo.u.loc) = 1;
5765 preserve_value (val);
5768 else
5769 gcc_assert (type == MO_USE || type == MO_USE_NO_VAR);
5771 if (dump_file && (dump_flags & TDF_DETAILS))
5772 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
5773 VTI (bb)->mos.safe_push (mo);
5777 /* Helper function for finding all uses of REG/MEM in X in insn INSN. */
5779 static void
5780 add_uses_1 (rtx *x, void *cui)
5782 subrtx_var_iterator::array_type array;
5783 FOR_EACH_SUBRTX_VAR (iter, array, *x, NONCONST)
5784 add_uses (*iter, (struct count_use_info *) cui);
5787 /* This is the value used during expansion of locations. We want it
5788 to be unbounded, so that variables expanded deep in a recursion
5789 nest are fully evaluated, so that their values are cached
5790 correctly. We avoid recursion cycles through other means, and we
5791 don't unshare RTL, so excess complexity is not a problem. */
5792 #define EXPR_DEPTH (INT_MAX)
5793 /* We use this to keep too-complex expressions from being emitted as
5794 location notes, and then to debug information. Users can trade
5795 compile time for ridiculously complex expressions, although they're
5796 seldom useful, and they may often have to be discarded as not
5797 representable anyway. */
5798 #define EXPR_USE_DEPTH (PARAM_VALUE (PARAM_MAX_VARTRACK_EXPR_DEPTH))
5800 /* Attempt to reverse the EXPR operation in the debug info and record
5801 it in the cselib table. Say for reg1 = reg2 + 6 even when reg2 is
5802 no longer live we can express its value as VAL - 6. */
5804 static void
5805 reverse_op (rtx val, const_rtx expr, rtx_insn *insn)
5807 rtx src, arg, ret;
5808 cselib_val *v;
5809 struct elt_loc_list *l;
5810 enum rtx_code code;
5811 int count;
5813 if (GET_CODE (expr) != SET)
5814 return;
5816 if (!REG_P (SET_DEST (expr)) || GET_MODE (val) != GET_MODE (SET_DEST (expr)))
5817 return;
5819 src = SET_SRC (expr);
5820 switch (GET_CODE (src))
5822 case PLUS:
5823 case MINUS:
5824 case XOR:
5825 case NOT:
5826 case NEG:
5827 if (!REG_P (XEXP (src, 0)))
5828 return;
5829 break;
5830 case SIGN_EXTEND:
5831 case ZERO_EXTEND:
5832 if (!REG_P (XEXP (src, 0)) && !MEM_P (XEXP (src, 0)))
5833 return;
5834 break;
5835 default:
5836 return;
5839 if (!SCALAR_INT_MODE_P (GET_MODE (src)) || XEXP (src, 0) == cfa_base_rtx)
5840 return;
5842 v = cselib_lookup (XEXP (src, 0), GET_MODE (XEXP (src, 0)), 0, VOIDmode);
5843 if (!v || !cselib_preserved_value_p (v))
5844 return;
5846 /* Use canonical V to avoid creating multiple redundant expressions
5847 for different VALUES equivalent to V. */
5848 v = canonical_cselib_val (v);
5850 /* Adding a reverse op isn't useful if V already has an always valid
5851 location. Ignore ENTRY_VALUE, while it is always constant, we should
5852 prefer non-ENTRY_VALUE locations whenever possible. */
5853 for (l = v->locs, count = 0; l; l = l->next, count++)
5854 if (CONSTANT_P (l->loc)
5855 && (GET_CODE (l->loc) != CONST || !references_value_p (l->loc, 0)))
5856 return;
5857 /* Avoid creating too large locs lists. */
5858 else if (count == PARAM_VALUE (PARAM_MAX_VARTRACK_REVERSE_OP_SIZE))
5859 return;
5861 switch (GET_CODE (src))
5863 case NOT:
5864 case NEG:
5865 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5866 return;
5867 ret = gen_rtx_fmt_e (GET_CODE (src), GET_MODE (val), val);
5868 break;
5869 case SIGN_EXTEND:
5870 case ZERO_EXTEND:
5871 ret = gen_lowpart_SUBREG (GET_MODE (v->val_rtx), val);
5872 break;
5873 case XOR:
5874 code = XOR;
5875 goto binary;
5876 case PLUS:
5877 code = MINUS;
5878 goto binary;
5879 case MINUS:
5880 code = PLUS;
5881 goto binary;
5882 binary:
5883 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5884 return;
5885 arg = XEXP (src, 1);
5886 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5888 arg = cselib_expand_value_rtx (arg, scratch_regs, 5);
5889 if (arg == NULL_RTX)
5890 return;
5891 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5892 return;
5894 ret = simplify_gen_binary (code, GET_MODE (val), val, arg);
5895 if (ret == val)
5896 /* Ensure ret isn't VALUE itself (which can happen e.g. for
5897 (plus (reg1) (reg2)) when reg2 is known to be 0), as that
5898 breaks a lot of routines during var-tracking. */
5899 ret = gen_rtx_fmt_ee (PLUS, GET_MODE (val), val, const0_rtx);
5900 break;
5901 default:
5902 gcc_unreachable ();
5905 cselib_add_permanent_equiv (v, ret, insn);
5908 /* Add stores (register and memory references) LOC which will be tracked
5909 to VTI (bb)->mos. EXPR is the RTL expression containing the store.
5910 CUIP->insn is instruction which the LOC is part of. */
5912 static void
5913 add_stores (rtx loc, const_rtx expr, void *cuip)
5915 machine_mode mode = VOIDmode, mode2;
5916 struct count_use_info *cui = (struct count_use_info *)cuip;
5917 basic_block bb = cui->bb;
5918 micro_operation mo;
5919 rtx oloc = loc, nloc, src = NULL;
5920 enum micro_operation_type type = use_type (loc, cui, &mode);
5921 bool track_p = false;
5922 cselib_val *v;
5923 bool resolve, preserve;
5925 if (type == MO_CLOBBER)
5926 return;
5928 mode2 = mode;
5930 if (REG_P (loc))
5932 gcc_assert (loc != cfa_base_rtx);
5933 if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
5934 || !(track_p = use_type (loc, NULL, &mode2) == MO_USE)
5935 || GET_CODE (expr) == CLOBBER)
5937 mo.type = MO_CLOBBER;
5938 mo.u.loc = loc;
5939 if (GET_CODE (expr) == SET
5940 && SET_DEST (expr) == loc
5941 && !unsuitable_loc (SET_SRC (expr))
5942 && find_use_val (loc, mode, cui))
5944 gcc_checking_assert (type == MO_VAL_SET);
5945 mo.u.loc = gen_rtx_SET (loc, SET_SRC (expr));
5948 else
5950 if (GET_CODE (expr) == SET
5951 && SET_DEST (expr) == loc
5952 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
5953 src = var_lowpart (mode2, SET_SRC (expr));
5954 loc = var_lowpart (mode2, loc);
5956 if (src == NULL)
5958 mo.type = MO_SET;
5959 mo.u.loc = loc;
5961 else
5963 rtx xexpr = gen_rtx_SET (loc, src);
5964 if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
5966 /* If this is an instruction copying (part of) a parameter
5967 passed by invisible reference to its register location,
5968 pretend it's a SET so that the initial memory location
5969 is discarded, as the parameter register can be reused
5970 for other purposes and we do not track locations based
5971 on generic registers. */
5972 if (MEM_P (src)
5973 && REG_EXPR (loc)
5974 && TREE_CODE (REG_EXPR (loc)) == PARM_DECL
5975 && DECL_MODE (REG_EXPR (loc)) != BLKmode
5976 && MEM_P (DECL_INCOMING_RTL (REG_EXPR (loc)))
5977 && XEXP (DECL_INCOMING_RTL (REG_EXPR (loc)), 0)
5978 != arg_pointer_rtx)
5979 mo.type = MO_SET;
5980 else
5981 mo.type = MO_COPY;
5983 else
5984 mo.type = MO_SET;
5985 mo.u.loc = xexpr;
5988 mo.insn = cui->insn;
5990 else if (MEM_P (loc)
5991 && ((track_p = use_type (loc, NULL, &mode2) == MO_USE)
5992 || cui->sets))
5994 if (MEM_P (loc) && type == MO_VAL_SET
5995 && !REG_P (XEXP (loc, 0))
5996 && !MEM_P (XEXP (loc, 0)))
5998 rtx mloc = loc;
5999 machine_mode address_mode = get_address_mode (mloc);
6000 cselib_val *val = cselib_lookup (XEXP (mloc, 0),
6001 address_mode, 0,
6002 GET_MODE (mloc));
6004 if (val && !cselib_preserved_value_p (val))
6005 preserve_value (val);
6008 if (GET_CODE (expr) == CLOBBER || !track_p)
6010 mo.type = MO_CLOBBER;
6011 mo.u.loc = track_p ? var_lowpart (mode2, loc) : loc;
6013 else
6015 if (GET_CODE (expr) == SET
6016 && SET_DEST (expr) == loc
6017 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
6018 src = var_lowpart (mode2, SET_SRC (expr));
6019 loc = var_lowpart (mode2, loc);
6021 if (src == NULL)
6023 mo.type = MO_SET;
6024 mo.u.loc = loc;
6026 else
6028 rtx xexpr = gen_rtx_SET (loc, src);
6029 if (same_variable_part_p (SET_SRC (xexpr),
6030 MEM_EXPR (loc),
6031 INT_MEM_OFFSET (loc)))
6032 mo.type = MO_COPY;
6033 else
6034 mo.type = MO_SET;
6035 mo.u.loc = xexpr;
6038 mo.insn = cui->insn;
6040 else
6041 return;
6043 if (type != MO_VAL_SET)
6044 goto log_and_return;
6046 v = find_use_val (oloc, mode, cui);
6048 if (!v)
6049 goto log_and_return;
6051 resolve = preserve = !cselib_preserved_value_p (v);
6053 /* We cannot track values for multiple-part variables, so we track only
6054 locations for tracked parameters passed either by invisible reference
6055 or directly in multiple locations. */
6056 if (track_p
6057 && REG_P (loc)
6058 && REG_EXPR (loc)
6059 && TREE_CODE (REG_EXPR (loc)) == PARM_DECL
6060 && DECL_MODE (REG_EXPR (loc)) != BLKmode
6061 && TREE_CODE (TREE_TYPE (REG_EXPR (loc))) != UNION_TYPE
6062 && ((MEM_P (DECL_INCOMING_RTL (REG_EXPR (loc)))
6063 && XEXP (DECL_INCOMING_RTL (REG_EXPR (loc)), 0) != arg_pointer_rtx)
6064 || (GET_CODE (DECL_INCOMING_RTL (REG_EXPR (loc))) == PARALLEL
6065 && XVECLEN (DECL_INCOMING_RTL (REG_EXPR (loc)), 0) > 1)))
6067 /* Although we don't use the value here, it could be used later by the
6068 mere virtue of its existence as the operand of the reverse operation
6069 that gave rise to it (typically extension/truncation). Make sure it
6070 is preserved as required by vt_expand_var_loc_chain. */
6071 if (preserve)
6072 preserve_value (v);
6073 goto log_and_return;
6076 if (loc == stack_pointer_rtx
6077 && hard_frame_pointer_adjustment != -1
6078 && preserve)
6079 cselib_set_value_sp_based (v);
6081 nloc = replace_expr_with_values (oloc);
6082 if (nloc)
6083 oloc = nloc;
6085 if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC)
6087 cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0, VOIDmode);
6089 if (oval == v)
6090 return;
6091 gcc_assert (REG_P (oloc) || MEM_P (oloc));
6093 if (oval && !cselib_preserved_value_p (oval))
6095 micro_operation moa;
6097 preserve_value (oval);
6099 moa.type = MO_VAL_USE;
6100 moa.u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc);
6101 VAL_NEEDS_RESOLUTION (moa.u.loc) = 1;
6102 moa.insn = cui->insn;
6104 if (dump_file && (dump_flags & TDF_DETAILS))
6105 log_op_type (moa.u.loc, cui->bb, cui->insn,
6106 moa.type, dump_file);
6107 VTI (bb)->mos.safe_push (moa);
6110 resolve = false;
6112 else if (resolve && GET_CODE (mo.u.loc) == SET)
6114 if (REG_P (SET_SRC (expr)) || MEM_P (SET_SRC (expr)))
6115 nloc = replace_expr_with_values (SET_SRC (expr));
6116 else
6117 nloc = NULL_RTX;
6119 /* Avoid the mode mismatch between oexpr and expr. */
6120 if (!nloc && mode != mode2)
6122 nloc = SET_SRC (expr);
6123 gcc_assert (oloc == SET_DEST (expr));
6126 if (nloc && nloc != SET_SRC (mo.u.loc))
6127 oloc = gen_rtx_SET (oloc, nloc);
6128 else
6130 if (oloc == SET_DEST (mo.u.loc))
6131 /* No point in duplicating. */
6132 oloc = mo.u.loc;
6133 if (!REG_P (SET_SRC (mo.u.loc)))
6134 resolve = false;
6137 else if (!resolve)
6139 if (GET_CODE (mo.u.loc) == SET
6140 && oloc == SET_DEST (mo.u.loc))
6141 /* No point in duplicating. */
6142 oloc = mo.u.loc;
6144 else
6145 resolve = false;
6147 loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc);
6149 if (mo.u.loc != oloc)
6150 loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, mo.u.loc);
6152 /* The loc of a MO_VAL_SET may have various forms:
6154 (concat val dst): dst now holds val
6156 (concat val (set dst src)): dst now holds val, copied from src
6158 (concat (concat val dstv) dst): dst now holds val; dstv is dst
6159 after replacing mems and non-top-level regs with values.
6161 (concat (concat val dstv) (set dst src)): dst now holds val,
6162 copied from src. dstv is a value-based representation of dst, if
6163 it differs from dst. If resolution is needed, src is a REG, and
6164 its mode is the same as that of val.
6166 (concat (concat val (set dstv srcv)) (set dst src)): src
6167 copied to dst, holding val. dstv and srcv are value-based
6168 representations of dst and src, respectively.
6172 if (GET_CODE (PATTERN (cui->insn)) != COND_EXEC)
6173 reverse_op (v->val_rtx, expr, cui->insn);
6175 mo.u.loc = loc;
6177 if (track_p)
6178 VAL_HOLDS_TRACK_EXPR (loc) = 1;
6179 if (preserve)
6181 VAL_NEEDS_RESOLUTION (loc) = resolve;
6182 preserve_value (v);
6184 if (mo.type == MO_CLOBBER)
6185 VAL_EXPR_IS_CLOBBERED (loc) = 1;
6186 if (mo.type == MO_COPY)
6187 VAL_EXPR_IS_COPIED (loc) = 1;
6189 mo.type = MO_VAL_SET;
6191 log_and_return:
6192 if (dump_file && (dump_flags & TDF_DETAILS))
6193 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
6194 VTI (bb)->mos.safe_push (mo);
6197 /* Arguments to the call. */
6198 static rtx call_arguments;
6200 /* Compute call_arguments. */
6202 static void
6203 prepare_call_arguments (basic_block bb, rtx_insn *insn)
6205 rtx link, x, call;
6206 rtx prev, cur, next;
6207 rtx this_arg = NULL_RTX;
6208 tree type = NULL_TREE, t, fndecl = NULL_TREE;
6209 tree obj_type_ref = NULL_TREE;
6210 CUMULATIVE_ARGS args_so_far_v;
6211 cumulative_args_t args_so_far;
6213 memset (&args_so_far_v, 0, sizeof (args_so_far_v));
6214 args_so_far = pack_cumulative_args (&args_so_far_v);
6215 call = get_call_rtx_from (insn);
6216 if (call)
6218 if (GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
6220 rtx symbol = XEXP (XEXP (call, 0), 0);
6221 if (SYMBOL_REF_DECL (symbol))
6222 fndecl = SYMBOL_REF_DECL (symbol);
6224 if (fndecl == NULL_TREE)
6225 fndecl = MEM_EXPR (XEXP (call, 0));
6226 if (fndecl
6227 && TREE_CODE (TREE_TYPE (fndecl)) != FUNCTION_TYPE
6228 && TREE_CODE (TREE_TYPE (fndecl)) != METHOD_TYPE)
6229 fndecl = NULL_TREE;
6230 if (fndecl && TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
6231 type = TREE_TYPE (fndecl);
6232 if (fndecl && TREE_CODE (fndecl) != FUNCTION_DECL)
6234 if (TREE_CODE (fndecl) == INDIRECT_REF
6235 && TREE_CODE (TREE_OPERAND (fndecl, 0)) == OBJ_TYPE_REF)
6236 obj_type_ref = TREE_OPERAND (fndecl, 0);
6237 fndecl = NULL_TREE;
6239 if (type)
6241 for (t = TYPE_ARG_TYPES (type); t && t != void_list_node;
6242 t = TREE_CHAIN (t))
6243 if (TREE_CODE (TREE_VALUE (t)) == REFERENCE_TYPE
6244 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_VALUE (t))))
6245 break;
6246 if ((t == NULL || t == void_list_node) && obj_type_ref == NULL_TREE)
6247 type = NULL;
6248 else
6250 int nargs ATTRIBUTE_UNUSED = list_length (TYPE_ARG_TYPES (type));
6251 link = CALL_INSN_FUNCTION_USAGE (insn);
6252 #ifndef PCC_STATIC_STRUCT_RETURN
6253 if (aggregate_value_p (TREE_TYPE (type), type)
6254 && targetm.calls.struct_value_rtx (type, 0) == 0)
6256 tree struct_addr = build_pointer_type (TREE_TYPE (type));
6257 machine_mode mode = TYPE_MODE (struct_addr);
6258 rtx reg;
6259 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
6260 nargs + 1);
6261 reg = targetm.calls.function_arg (args_so_far, mode,
6262 struct_addr, true);
6263 targetm.calls.function_arg_advance (args_so_far, mode,
6264 struct_addr, true);
6265 if (reg == NULL_RTX)
6267 for (; link; link = XEXP (link, 1))
6268 if (GET_CODE (XEXP (link, 0)) == USE
6269 && MEM_P (XEXP (XEXP (link, 0), 0)))
6271 link = XEXP (link, 1);
6272 break;
6276 else
6277 #endif
6278 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
6279 nargs);
6280 if (obj_type_ref && TYPE_ARG_TYPES (type) != void_list_node)
6282 machine_mode mode;
6283 t = TYPE_ARG_TYPES (type);
6284 mode = TYPE_MODE (TREE_VALUE (t));
6285 this_arg = targetm.calls.function_arg (args_so_far, mode,
6286 TREE_VALUE (t), true);
6287 if (this_arg && !REG_P (this_arg))
6288 this_arg = NULL_RTX;
6289 else if (this_arg == NULL_RTX)
6291 for (; link; link = XEXP (link, 1))
6292 if (GET_CODE (XEXP (link, 0)) == USE
6293 && MEM_P (XEXP (XEXP (link, 0), 0)))
6295 this_arg = XEXP (XEXP (link, 0), 0);
6296 break;
6303 t = type ? TYPE_ARG_TYPES (type) : NULL_TREE;
6305 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
6306 if (GET_CODE (XEXP (link, 0)) == USE)
6308 rtx item = NULL_RTX;
6309 x = XEXP (XEXP (link, 0), 0);
6310 if (GET_MODE (link) == VOIDmode
6311 || GET_MODE (link) == BLKmode
6312 || (GET_MODE (link) != GET_MODE (x)
6313 && ((GET_MODE_CLASS (GET_MODE (link)) != MODE_INT
6314 && GET_MODE_CLASS (GET_MODE (link)) != MODE_PARTIAL_INT)
6315 || (GET_MODE_CLASS (GET_MODE (x)) != MODE_INT
6316 && GET_MODE_CLASS (GET_MODE (x)) != MODE_PARTIAL_INT))))
6317 /* Can't do anything for these, if the original type mode
6318 isn't known or can't be converted. */;
6319 else if (REG_P (x))
6321 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
6322 if (val && cselib_preserved_value_p (val))
6323 item = val->val_rtx;
6324 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
6325 || GET_MODE_CLASS (GET_MODE (x)) == MODE_PARTIAL_INT)
6327 machine_mode mode = GET_MODE (x);
6329 while ((mode = GET_MODE_WIDER_MODE (mode)) != VOIDmode
6330 && GET_MODE_BITSIZE (mode) <= BITS_PER_WORD)
6332 rtx reg = simplify_subreg (mode, x, GET_MODE (x), 0);
6334 if (reg == NULL_RTX || !REG_P (reg))
6335 continue;
6336 val = cselib_lookup (reg, mode, 0, VOIDmode);
6337 if (val && cselib_preserved_value_p (val))
6339 item = val->val_rtx;
6340 break;
6345 else if (MEM_P (x))
6347 rtx mem = x;
6348 cselib_val *val;
6350 if (!frame_pointer_needed)
6352 struct adjust_mem_data amd;
6353 amd.mem_mode = VOIDmode;
6354 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
6355 amd.side_effects = NULL;
6356 amd.store = true;
6357 mem = simplify_replace_fn_rtx (mem, NULL_RTX, adjust_mems,
6358 &amd);
6359 gcc_assert (amd.side_effects == NULL_RTX);
6361 val = cselib_lookup (mem, GET_MODE (mem), 0, VOIDmode);
6362 if (val && cselib_preserved_value_p (val))
6363 item = val->val_rtx;
6364 else if (GET_MODE_CLASS (GET_MODE (mem)) != MODE_INT
6365 && GET_MODE_CLASS (GET_MODE (mem)) != MODE_PARTIAL_INT)
6367 /* For non-integer stack argument see also if they weren't
6368 initialized by integers. */
6369 machine_mode imode = int_mode_for_mode (GET_MODE (mem));
6370 if (imode != GET_MODE (mem) && imode != BLKmode)
6372 val = cselib_lookup (adjust_address_nv (mem, imode, 0),
6373 imode, 0, VOIDmode);
6374 if (val && cselib_preserved_value_p (val))
6375 item = lowpart_subreg (GET_MODE (x), val->val_rtx,
6376 imode);
6380 if (item)
6382 rtx x2 = x;
6383 if (GET_MODE (item) != GET_MODE (link))
6384 item = lowpart_subreg (GET_MODE (link), item, GET_MODE (item));
6385 if (GET_MODE (x2) != GET_MODE (link))
6386 x2 = lowpart_subreg (GET_MODE (link), x2, GET_MODE (x2));
6387 item = gen_rtx_CONCAT (GET_MODE (link), x2, item);
6388 call_arguments
6389 = gen_rtx_EXPR_LIST (VOIDmode, item, call_arguments);
6391 if (t && t != void_list_node)
6393 tree argtype = TREE_VALUE (t);
6394 machine_mode mode = TYPE_MODE (argtype);
6395 rtx reg;
6396 if (pass_by_reference (&args_so_far_v, mode, argtype, true))
6398 argtype = build_pointer_type (argtype);
6399 mode = TYPE_MODE (argtype);
6401 reg = targetm.calls.function_arg (args_so_far, mode,
6402 argtype, true);
6403 if (TREE_CODE (argtype) == REFERENCE_TYPE
6404 && INTEGRAL_TYPE_P (TREE_TYPE (argtype))
6405 && reg
6406 && REG_P (reg)
6407 && GET_MODE (reg) == mode
6408 && (GET_MODE_CLASS (mode) == MODE_INT
6409 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
6410 && REG_P (x)
6411 && REGNO (x) == REGNO (reg)
6412 && GET_MODE (x) == mode
6413 && item)
6415 machine_mode indmode
6416 = TYPE_MODE (TREE_TYPE (argtype));
6417 rtx mem = gen_rtx_MEM (indmode, x);
6418 cselib_val *val = cselib_lookup (mem, indmode, 0, VOIDmode);
6419 if (val && cselib_preserved_value_p (val))
6421 item = gen_rtx_CONCAT (indmode, mem, val->val_rtx);
6422 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
6423 call_arguments);
6425 else
6427 struct elt_loc_list *l;
6428 tree initial;
6430 /* Try harder, when passing address of a constant
6431 pool integer it can be easily read back. */
6432 item = XEXP (item, 1);
6433 if (GET_CODE (item) == SUBREG)
6434 item = SUBREG_REG (item);
6435 gcc_assert (GET_CODE (item) == VALUE);
6436 val = CSELIB_VAL_PTR (item);
6437 for (l = val->locs; l; l = l->next)
6438 if (GET_CODE (l->loc) == SYMBOL_REF
6439 && TREE_CONSTANT_POOL_ADDRESS_P (l->loc)
6440 && SYMBOL_REF_DECL (l->loc)
6441 && DECL_INITIAL (SYMBOL_REF_DECL (l->loc)))
6443 initial = DECL_INITIAL (SYMBOL_REF_DECL (l->loc));
6444 if (tree_fits_shwi_p (initial))
6446 item = GEN_INT (tree_to_shwi (initial));
6447 item = gen_rtx_CONCAT (indmode, mem, item);
6448 call_arguments
6449 = gen_rtx_EXPR_LIST (VOIDmode, item,
6450 call_arguments);
6452 break;
6456 targetm.calls.function_arg_advance (args_so_far, mode,
6457 argtype, true);
6458 t = TREE_CHAIN (t);
6462 /* Add debug arguments. */
6463 if (fndecl
6464 && TREE_CODE (fndecl) == FUNCTION_DECL
6465 && DECL_HAS_DEBUG_ARGS_P (fndecl))
6467 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (fndecl);
6468 if (debug_args)
6470 unsigned int ix;
6471 tree param;
6472 for (ix = 0; vec_safe_iterate (*debug_args, ix, &param); ix += 2)
6474 rtx item;
6475 tree dtemp = (**debug_args)[ix + 1];
6476 machine_mode mode = DECL_MODE (dtemp);
6477 item = gen_rtx_DEBUG_PARAMETER_REF (mode, param);
6478 item = gen_rtx_CONCAT (mode, item, DECL_RTL_KNOWN_SET (dtemp));
6479 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
6480 call_arguments);
6485 /* Reverse call_arguments chain. */
6486 prev = NULL_RTX;
6487 for (cur = call_arguments; cur; cur = next)
6489 next = XEXP (cur, 1);
6490 XEXP (cur, 1) = prev;
6491 prev = cur;
6493 call_arguments = prev;
6495 x = get_call_rtx_from (insn);
6496 if (x)
6498 x = XEXP (XEXP (x, 0), 0);
6499 if (GET_CODE (x) == SYMBOL_REF)
6500 /* Don't record anything. */;
6501 else if (CONSTANT_P (x))
6503 x = gen_rtx_CONCAT (GET_MODE (x) == VOIDmode ? Pmode : GET_MODE (x),
6504 pc_rtx, x);
6505 call_arguments
6506 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6508 else
6510 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
6511 if (val && cselib_preserved_value_p (val))
6513 x = gen_rtx_CONCAT (GET_MODE (x), pc_rtx, val->val_rtx);
6514 call_arguments
6515 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6519 if (this_arg)
6521 machine_mode mode
6522 = TYPE_MODE (TREE_TYPE (OBJ_TYPE_REF_EXPR (obj_type_ref)));
6523 rtx clobbered = gen_rtx_MEM (mode, this_arg);
6524 HOST_WIDE_INT token
6525 = tree_to_shwi (OBJ_TYPE_REF_TOKEN (obj_type_ref));
6526 if (token)
6527 clobbered = plus_constant (mode, clobbered,
6528 token * GET_MODE_SIZE (mode));
6529 clobbered = gen_rtx_MEM (mode, clobbered);
6530 x = gen_rtx_CONCAT (mode, gen_rtx_CLOBBER (VOIDmode, pc_rtx), clobbered);
6531 call_arguments
6532 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6536 /* Callback for cselib_record_sets_hook, that records as micro
6537 operations uses and stores in an insn after cselib_record_sets has
6538 analyzed the sets in an insn, but before it modifies the stored
6539 values in the internal tables, unless cselib_record_sets doesn't
6540 call it directly (perhaps because we're not doing cselib in the
6541 first place, in which case sets and n_sets will be 0). */
6543 static void
6544 add_with_sets (rtx_insn *insn, struct cselib_set *sets, int n_sets)
6546 basic_block bb = BLOCK_FOR_INSN (insn);
6547 int n1, n2;
6548 struct count_use_info cui;
6549 micro_operation *mos;
6551 cselib_hook_called = true;
6553 cui.insn = insn;
6554 cui.bb = bb;
6555 cui.sets = sets;
6556 cui.n_sets = n_sets;
6558 n1 = VTI (bb)->mos.length ();
6559 cui.store_p = false;
6560 note_uses (&PATTERN (insn), add_uses_1, &cui);
6561 n2 = VTI (bb)->mos.length () - 1;
6562 mos = VTI (bb)->mos.address ();
6564 /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
6565 MO_VAL_LOC last. */
6566 while (n1 < n2)
6568 while (n1 < n2 && mos[n1].type == MO_USE)
6569 n1++;
6570 while (n1 < n2 && mos[n2].type != MO_USE)
6571 n2--;
6572 if (n1 < n2)
6573 std::swap (mos[n1], mos[n2]);
6576 n2 = VTI (bb)->mos.length () - 1;
6577 while (n1 < n2)
6579 while (n1 < n2 && mos[n1].type != MO_VAL_LOC)
6580 n1++;
6581 while (n1 < n2 && mos[n2].type == MO_VAL_LOC)
6582 n2--;
6583 if (n1 < n2)
6584 std::swap (mos[n1], mos[n2]);
6587 if (CALL_P (insn))
6589 micro_operation mo;
6591 mo.type = MO_CALL;
6592 mo.insn = insn;
6593 mo.u.loc = call_arguments;
6594 call_arguments = NULL_RTX;
6596 if (dump_file && (dump_flags & TDF_DETAILS))
6597 log_op_type (PATTERN (insn), bb, insn, mo.type, dump_file);
6598 VTI (bb)->mos.safe_push (mo);
6601 n1 = VTI (bb)->mos.length ();
6602 /* This will record NEXT_INSN (insn), such that we can
6603 insert notes before it without worrying about any
6604 notes that MO_USEs might emit after the insn. */
6605 cui.store_p = true;
6606 note_stores (PATTERN (insn), add_stores, &cui);
6607 n2 = VTI (bb)->mos.length () - 1;
6608 mos = VTI (bb)->mos.address ();
6610 /* Order the MO_VAL_USEs first (note_stores does nothing
6611 on DEBUG_INSNs, so there are no MO_VAL_LOCs from this
6612 insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET. */
6613 while (n1 < n2)
6615 while (n1 < n2 && mos[n1].type == MO_VAL_USE)
6616 n1++;
6617 while (n1 < n2 && mos[n2].type != MO_VAL_USE)
6618 n2--;
6619 if (n1 < n2)
6620 std::swap (mos[n1], mos[n2]);
6623 n2 = VTI (bb)->mos.length () - 1;
6624 while (n1 < n2)
6626 while (n1 < n2 && mos[n1].type == MO_CLOBBER)
6627 n1++;
6628 while (n1 < n2 && mos[n2].type != MO_CLOBBER)
6629 n2--;
6630 if (n1 < n2)
6631 std::swap (mos[n1], mos[n2]);
6635 static enum var_init_status
6636 find_src_status (dataflow_set *in, rtx src)
6638 tree decl = NULL_TREE;
6639 enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
6641 if (! flag_var_tracking_uninit)
6642 status = VAR_INIT_STATUS_INITIALIZED;
6644 if (src && REG_P (src))
6645 decl = var_debug_decl (REG_EXPR (src));
6646 else if (src && MEM_P (src))
6647 decl = var_debug_decl (MEM_EXPR (src));
6649 if (src && decl)
6650 status = get_init_value (in, src, dv_from_decl (decl));
6652 return status;
6655 /* SRC is the source of an assignment. Use SET to try to find what
6656 was ultimately assigned to SRC. Return that value if known,
6657 otherwise return SRC itself. */
6659 static rtx
6660 find_src_set_src (dataflow_set *set, rtx src)
6662 tree decl = NULL_TREE; /* The variable being copied around. */
6663 rtx set_src = NULL_RTX; /* The value for "decl" stored in "src". */
6664 variable var;
6665 location_chain nextp;
6666 int i;
6667 bool found;
6669 if (src && REG_P (src))
6670 decl = var_debug_decl (REG_EXPR (src));
6671 else if (src && MEM_P (src))
6672 decl = var_debug_decl (MEM_EXPR (src));
6674 if (src && decl)
6676 decl_or_value dv = dv_from_decl (decl);
6678 var = shared_hash_find (set->vars, dv);
6679 if (var)
6681 found = false;
6682 for (i = 0; i < var->n_var_parts && !found; i++)
6683 for (nextp = var->var_part[i].loc_chain; nextp && !found;
6684 nextp = nextp->next)
6685 if (rtx_equal_p (nextp->loc, src))
6687 set_src = nextp->set_src;
6688 found = true;
6694 return set_src;
6697 /* Compute the changes of variable locations in the basic block BB. */
6699 static bool
6700 compute_bb_dataflow (basic_block bb)
6702 unsigned int i;
6703 micro_operation *mo;
6704 bool changed;
6705 dataflow_set old_out;
6706 dataflow_set *in = &VTI (bb)->in;
6707 dataflow_set *out = &VTI (bb)->out;
6709 dataflow_set_init (&old_out);
6710 dataflow_set_copy (&old_out, out);
6711 dataflow_set_copy (out, in);
6713 if (MAY_HAVE_DEBUG_INSNS)
6714 local_get_addr_cache = new hash_map<rtx, rtx>;
6716 FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo)
6718 rtx_insn *insn = mo->insn;
6720 switch (mo->type)
6722 case MO_CALL:
6723 dataflow_set_clear_at_call (out);
6724 break;
6726 case MO_USE:
6728 rtx loc = mo->u.loc;
6730 if (REG_P (loc))
6731 var_reg_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6732 else if (MEM_P (loc))
6733 var_mem_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6735 break;
6737 case MO_VAL_LOC:
6739 rtx loc = mo->u.loc;
6740 rtx val, vloc;
6741 tree var;
6743 if (GET_CODE (loc) == CONCAT)
6745 val = XEXP (loc, 0);
6746 vloc = XEXP (loc, 1);
6748 else
6750 val = NULL_RTX;
6751 vloc = loc;
6754 var = PAT_VAR_LOCATION_DECL (vloc);
6756 clobber_variable_part (out, NULL_RTX,
6757 dv_from_decl (var), 0, NULL_RTX);
6758 if (val)
6760 if (VAL_NEEDS_RESOLUTION (loc))
6761 val_resolve (out, val, PAT_VAR_LOCATION_LOC (vloc), insn);
6762 set_variable_part (out, val, dv_from_decl (var), 0,
6763 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6764 INSERT);
6766 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
6767 set_variable_part (out, PAT_VAR_LOCATION_LOC (vloc),
6768 dv_from_decl (var), 0,
6769 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6770 INSERT);
6772 break;
6774 case MO_VAL_USE:
6776 rtx loc = mo->u.loc;
6777 rtx val, vloc, uloc;
6779 vloc = uloc = XEXP (loc, 1);
6780 val = XEXP (loc, 0);
6782 if (GET_CODE (val) == CONCAT)
6784 uloc = XEXP (val, 1);
6785 val = XEXP (val, 0);
6788 if (VAL_NEEDS_RESOLUTION (loc))
6789 val_resolve (out, val, vloc, insn);
6790 else
6791 val_store (out, val, uloc, insn, false);
6793 if (VAL_HOLDS_TRACK_EXPR (loc))
6795 if (GET_CODE (uloc) == REG)
6796 var_reg_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6797 NULL);
6798 else if (GET_CODE (uloc) == MEM)
6799 var_mem_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6800 NULL);
6803 break;
6805 case MO_VAL_SET:
6807 rtx loc = mo->u.loc;
6808 rtx val, vloc, uloc;
6809 rtx dstv, srcv;
6811 vloc = loc;
6812 uloc = XEXP (vloc, 1);
6813 val = XEXP (vloc, 0);
6814 vloc = uloc;
6816 if (GET_CODE (uloc) == SET)
6818 dstv = SET_DEST (uloc);
6819 srcv = SET_SRC (uloc);
6821 else
6823 dstv = uloc;
6824 srcv = NULL;
6827 if (GET_CODE (val) == CONCAT)
6829 dstv = vloc = XEXP (val, 1);
6830 val = XEXP (val, 0);
6833 if (GET_CODE (vloc) == SET)
6835 srcv = SET_SRC (vloc);
6837 gcc_assert (val != srcv);
6838 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
6840 dstv = vloc = SET_DEST (vloc);
6842 if (VAL_NEEDS_RESOLUTION (loc))
6843 val_resolve (out, val, srcv, insn);
6845 else if (VAL_NEEDS_RESOLUTION (loc))
6847 gcc_assert (GET_CODE (uloc) == SET
6848 && GET_CODE (SET_SRC (uloc)) == REG);
6849 val_resolve (out, val, SET_SRC (uloc), insn);
6852 if (VAL_HOLDS_TRACK_EXPR (loc))
6854 if (VAL_EXPR_IS_CLOBBERED (loc))
6856 if (REG_P (uloc))
6857 var_reg_delete (out, uloc, true);
6858 else if (MEM_P (uloc))
6860 gcc_assert (MEM_P (dstv));
6861 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
6862 var_mem_delete (out, dstv, true);
6865 else
6867 bool copied_p = VAL_EXPR_IS_COPIED (loc);
6868 rtx src = NULL, dst = uloc;
6869 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
6871 if (GET_CODE (uloc) == SET)
6873 src = SET_SRC (uloc);
6874 dst = SET_DEST (uloc);
6877 if (copied_p)
6879 if (flag_var_tracking_uninit)
6881 status = find_src_status (in, src);
6883 if (status == VAR_INIT_STATUS_UNKNOWN)
6884 status = find_src_status (out, src);
6887 src = find_src_set_src (in, src);
6890 if (REG_P (dst))
6891 var_reg_delete_and_set (out, dst, !copied_p,
6892 status, srcv);
6893 else if (MEM_P (dst))
6895 gcc_assert (MEM_P (dstv));
6896 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
6897 var_mem_delete_and_set (out, dstv, !copied_p,
6898 status, srcv);
6902 else if (REG_P (uloc))
6903 var_regno_delete (out, REGNO (uloc));
6904 else if (MEM_P (uloc))
6906 gcc_checking_assert (GET_CODE (vloc) == MEM);
6907 gcc_checking_assert (dstv == vloc);
6908 if (dstv != vloc)
6909 clobber_overlapping_mems (out, vloc);
6912 val_store (out, val, dstv, insn, true);
6914 break;
6916 case MO_SET:
6918 rtx loc = mo->u.loc;
6919 rtx set_src = NULL;
6921 if (GET_CODE (loc) == SET)
6923 set_src = SET_SRC (loc);
6924 loc = SET_DEST (loc);
6927 if (REG_P (loc))
6928 var_reg_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6929 set_src);
6930 else if (MEM_P (loc))
6931 var_mem_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6932 set_src);
6934 break;
6936 case MO_COPY:
6938 rtx loc = mo->u.loc;
6939 enum var_init_status src_status;
6940 rtx set_src = NULL;
6942 if (GET_CODE (loc) == SET)
6944 set_src = SET_SRC (loc);
6945 loc = SET_DEST (loc);
6948 if (! flag_var_tracking_uninit)
6949 src_status = VAR_INIT_STATUS_INITIALIZED;
6950 else
6952 src_status = find_src_status (in, set_src);
6954 if (src_status == VAR_INIT_STATUS_UNKNOWN)
6955 src_status = find_src_status (out, set_src);
6958 set_src = find_src_set_src (in, set_src);
6960 if (REG_P (loc))
6961 var_reg_delete_and_set (out, loc, false, src_status, set_src);
6962 else if (MEM_P (loc))
6963 var_mem_delete_and_set (out, loc, false, src_status, set_src);
6965 break;
6967 case MO_USE_NO_VAR:
6969 rtx loc = mo->u.loc;
6971 if (REG_P (loc))
6972 var_reg_delete (out, loc, false);
6973 else if (MEM_P (loc))
6974 var_mem_delete (out, loc, false);
6976 break;
6978 case MO_CLOBBER:
6980 rtx loc = mo->u.loc;
6982 if (REG_P (loc))
6983 var_reg_delete (out, loc, true);
6984 else if (MEM_P (loc))
6985 var_mem_delete (out, loc, true);
6987 break;
6989 case MO_ADJUST:
6990 out->stack_adjust += mo->u.adjust;
6991 break;
6995 if (MAY_HAVE_DEBUG_INSNS)
6997 delete local_get_addr_cache;
6998 local_get_addr_cache = NULL;
7000 dataflow_set_equiv_regs (out);
7001 shared_hash_htab (out->vars)
7002 ->traverse <dataflow_set *, canonicalize_values_mark> (out);
7003 shared_hash_htab (out->vars)
7004 ->traverse <dataflow_set *, canonicalize_values_star> (out);
7005 #if ENABLE_CHECKING
7006 shared_hash_htab (out->vars)
7007 ->traverse <dataflow_set *, canonicalize_loc_order_check> (out);
7008 #endif
7010 changed = dataflow_set_different (&old_out, out);
7011 dataflow_set_destroy (&old_out);
7012 return changed;
7015 /* Find the locations of variables in the whole function. */
7017 static bool
7018 vt_find_locations (void)
7020 bb_heap_t *worklist = new bb_heap_t (LONG_MIN);
7021 bb_heap_t *pending = new bb_heap_t (LONG_MIN);
7022 sbitmap visited, in_worklist, in_pending;
7023 basic_block bb;
7024 edge e;
7025 int *bb_order;
7026 int *rc_order;
7027 int i;
7028 int htabsz = 0;
7029 int htabmax = PARAM_VALUE (PARAM_MAX_VARTRACK_SIZE);
7030 bool success = true;
7032 timevar_push (TV_VAR_TRACKING_DATAFLOW);
7033 /* Compute reverse completion order of depth first search of the CFG
7034 so that the data-flow runs faster. */
7035 rc_order = XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
7036 bb_order = XNEWVEC (int, last_basic_block_for_fn (cfun));
7037 pre_and_rev_post_order_compute (NULL, rc_order, false);
7038 for (i = 0; i < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; i++)
7039 bb_order[rc_order[i]] = i;
7040 free (rc_order);
7042 visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
7043 in_worklist = sbitmap_alloc (last_basic_block_for_fn (cfun));
7044 in_pending = sbitmap_alloc (last_basic_block_for_fn (cfun));
7045 bitmap_clear (in_worklist);
7047 FOR_EACH_BB_FN (bb, cfun)
7048 pending->insert (bb_order[bb->index], bb);
7049 bitmap_ones (in_pending);
7051 while (success && !pending->empty ())
7053 std::swap (worklist, pending);
7054 std::swap (in_worklist, in_pending);
7056 bitmap_clear (visited);
7058 while (!worklist->empty ())
7060 bb = worklist->extract_min ();
7061 bitmap_clear_bit (in_worklist, bb->index);
7062 gcc_assert (!bitmap_bit_p (visited, bb->index));
7063 if (!bitmap_bit_p (visited, bb->index))
7065 bool changed;
7066 edge_iterator ei;
7067 int oldinsz, oldoutsz;
7069 bitmap_set_bit (visited, bb->index);
7071 if (VTI (bb)->in.vars)
7073 htabsz
7074 -= shared_hash_htab (VTI (bb)->in.vars)->size ()
7075 + shared_hash_htab (VTI (bb)->out.vars)->size ();
7076 oldinsz = shared_hash_htab (VTI (bb)->in.vars)->elements ();
7077 oldoutsz
7078 = shared_hash_htab (VTI (bb)->out.vars)->elements ();
7080 else
7081 oldinsz = oldoutsz = 0;
7083 if (MAY_HAVE_DEBUG_INSNS)
7085 dataflow_set *in = &VTI (bb)->in, *first_out = NULL;
7086 bool first = true, adjust = false;
7088 /* Calculate the IN set as the intersection of
7089 predecessor OUT sets. */
7091 dataflow_set_clear (in);
7092 dst_can_be_shared = true;
7094 FOR_EACH_EDGE (e, ei, bb->preds)
7095 if (!VTI (e->src)->flooded)
7096 gcc_assert (bb_order[bb->index]
7097 <= bb_order[e->src->index]);
7098 else if (first)
7100 dataflow_set_copy (in, &VTI (e->src)->out);
7101 first_out = &VTI (e->src)->out;
7102 first = false;
7104 else
7106 dataflow_set_merge (in, &VTI (e->src)->out);
7107 adjust = true;
7110 if (adjust)
7112 dataflow_post_merge_adjust (in, &VTI (bb)->permp);
7113 #if ENABLE_CHECKING
7114 /* Merge and merge_adjust should keep entries in
7115 canonical order. */
7116 shared_hash_htab (in->vars)
7117 ->traverse <dataflow_set *,
7118 canonicalize_loc_order_check> (in);
7119 #endif
7120 if (dst_can_be_shared)
7122 shared_hash_destroy (in->vars);
7123 in->vars = shared_hash_copy (first_out->vars);
7127 VTI (bb)->flooded = true;
7129 else
7131 /* Calculate the IN set as union of predecessor OUT sets. */
7132 dataflow_set_clear (&VTI (bb)->in);
7133 FOR_EACH_EDGE (e, ei, bb->preds)
7134 dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out);
7137 changed = compute_bb_dataflow (bb);
7138 htabsz += shared_hash_htab (VTI (bb)->in.vars)->size ()
7139 + shared_hash_htab (VTI (bb)->out.vars)->size ();
7141 if (htabmax && htabsz > htabmax)
7143 if (MAY_HAVE_DEBUG_INSNS)
7144 inform (DECL_SOURCE_LOCATION (cfun->decl),
7145 "variable tracking size limit exceeded with "
7146 "-fvar-tracking-assignments, retrying without");
7147 else
7148 inform (DECL_SOURCE_LOCATION (cfun->decl),
7149 "variable tracking size limit exceeded");
7150 success = false;
7151 break;
7154 if (changed)
7156 FOR_EACH_EDGE (e, ei, bb->succs)
7158 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7159 continue;
7161 if (bitmap_bit_p (visited, e->dest->index))
7163 if (!bitmap_bit_p (in_pending, e->dest->index))
7165 /* Send E->DEST to next round. */
7166 bitmap_set_bit (in_pending, e->dest->index);
7167 pending->insert (bb_order[e->dest->index],
7168 e->dest);
7171 else if (!bitmap_bit_p (in_worklist, e->dest->index))
7173 /* Add E->DEST to current round. */
7174 bitmap_set_bit (in_worklist, e->dest->index);
7175 worklist->insert (bb_order[e->dest->index],
7176 e->dest);
7181 if (dump_file)
7182 fprintf (dump_file,
7183 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
7184 bb->index,
7185 (int)shared_hash_htab (VTI (bb)->in.vars)->size (),
7186 oldinsz,
7187 (int)shared_hash_htab (VTI (bb)->out.vars)->size (),
7188 oldoutsz,
7189 (int)worklist->nodes (), (int)pending->nodes (),
7190 htabsz);
7192 if (dump_file && (dump_flags & TDF_DETAILS))
7194 fprintf (dump_file, "BB %i IN:\n", bb->index);
7195 dump_dataflow_set (&VTI (bb)->in);
7196 fprintf (dump_file, "BB %i OUT:\n", bb->index);
7197 dump_dataflow_set (&VTI (bb)->out);
7203 if (success && MAY_HAVE_DEBUG_INSNS)
7204 FOR_EACH_BB_FN (bb, cfun)
7205 gcc_assert (VTI (bb)->flooded);
7207 free (bb_order);
7208 delete worklist;
7209 delete pending;
7210 sbitmap_free (visited);
7211 sbitmap_free (in_worklist);
7212 sbitmap_free (in_pending);
7214 timevar_pop (TV_VAR_TRACKING_DATAFLOW);
7215 return success;
7218 /* Print the content of the LIST to dump file. */
7220 static void
7221 dump_attrs_list (attrs list)
7223 for (; list; list = list->next)
7225 if (dv_is_decl_p (list->dv))
7226 print_mem_expr (dump_file, dv_as_decl (list->dv));
7227 else
7228 print_rtl_single (dump_file, dv_as_value (list->dv));
7229 fprintf (dump_file, "+" HOST_WIDE_INT_PRINT_DEC, list->offset);
7231 fprintf (dump_file, "\n");
7234 /* Print the information about variable *SLOT to dump file. */
7237 dump_var_tracking_slot (variable_def **slot, void *data ATTRIBUTE_UNUSED)
7239 variable var = *slot;
7241 dump_var (var);
7243 /* Continue traversing the hash table. */
7244 return 1;
7247 /* Print the information about variable VAR to dump file. */
7249 static void
7250 dump_var (variable var)
7252 int i;
7253 location_chain node;
7255 if (dv_is_decl_p (var->dv))
7257 const_tree decl = dv_as_decl (var->dv);
7259 if (DECL_NAME (decl))
7261 fprintf (dump_file, " name: %s",
7262 IDENTIFIER_POINTER (DECL_NAME (decl)));
7263 if (dump_flags & TDF_UID)
7264 fprintf (dump_file, "D.%u", DECL_UID (decl));
7266 else if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
7267 fprintf (dump_file, " name: D#%u", DEBUG_TEMP_UID (decl));
7268 else
7269 fprintf (dump_file, " name: D.%u", DECL_UID (decl));
7270 fprintf (dump_file, "\n");
7272 else
7274 fputc (' ', dump_file);
7275 print_rtl_single (dump_file, dv_as_value (var->dv));
7278 for (i = 0; i < var->n_var_parts; i++)
7280 fprintf (dump_file, " offset %ld\n",
7281 (long)(var->onepart ? 0 : VAR_PART_OFFSET (var, i)));
7282 for (node = var->var_part[i].loc_chain; node; node = node->next)
7284 fprintf (dump_file, " ");
7285 if (node->init == VAR_INIT_STATUS_UNINITIALIZED)
7286 fprintf (dump_file, "[uninit]");
7287 print_rtl_single (dump_file, node->loc);
7292 /* Print the information about variables from hash table VARS to dump file. */
7294 static void
7295 dump_vars (variable_table_type *vars)
7297 if (vars->elements () > 0)
7299 fprintf (dump_file, "Variables:\n");
7300 vars->traverse <void *, dump_var_tracking_slot> (NULL);
7304 /* Print the dataflow set SET to dump file. */
7306 static void
7307 dump_dataflow_set (dataflow_set *set)
7309 int i;
7311 fprintf (dump_file, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC "\n",
7312 set->stack_adjust);
7313 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7315 if (set->regs[i])
7317 fprintf (dump_file, "Reg %d:", i);
7318 dump_attrs_list (set->regs[i]);
7321 dump_vars (shared_hash_htab (set->vars));
7322 fprintf (dump_file, "\n");
7325 /* Print the IN and OUT sets for each basic block to dump file. */
7327 static void
7328 dump_dataflow_sets (void)
7330 basic_block bb;
7332 FOR_EACH_BB_FN (bb, cfun)
7334 fprintf (dump_file, "\nBasic block %d:\n", bb->index);
7335 fprintf (dump_file, "IN:\n");
7336 dump_dataflow_set (&VTI (bb)->in);
7337 fprintf (dump_file, "OUT:\n");
7338 dump_dataflow_set (&VTI (bb)->out);
7342 /* Return the variable for DV in dropped_values, inserting one if
7343 requested with INSERT. */
7345 static inline variable
7346 variable_from_dropped (decl_or_value dv, enum insert_option insert)
7348 variable_def **slot;
7349 variable empty_var;
7350 onepart_enum_t onepart;
7352 slot = dropped_values->find_slot_with_hash (dv, dv_htab_hash (dv), insert);
7354 if (!slot)
7355 return NULL;
7357 if (*slot)
7358 return *slot;
7360 gcc_checking_assert (insert == INSERT);
7362 onepart = dv_onepart_p (dv);
7364 gcc_checking_assert (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR);
7366 empty_var = onepart_pool (onepart).allocate ();
7367 empty_var->dv = dv;
7368 empty_var->refcount = 1;
7369 empty_var->n_var_parts = 0;
7370 empty_var->onepart = onepart;
7371 empty_var->in_changed_variables = false;
7372 empty_var->var_part[0].loc_chain = NULL;
7373 empty_var->var_part[0].cur_loc = NULL;
7374 VAR_LOC_1PAUX (empty_var) = NULL;
7375 set_dv_changed (dv, true);
7377 *slot = empty_var;
7379 return empty_var;
7382 /* Recover the one-part aux from dropped_values. */
7384 static struct onepart_aux *
7385 recover_dropped_1paux (variable var)
7387 variable dvar;
7389 gcc_checking_assert (var->onepart);
7391 if (VAR_LOC_1PAUX (var))
7392 return VAR_LOC_1PAUX (var);
7394 if (var->onepart == ONEPART_VDECL)
7395 return NULL;
7397 dvar = variable_from_dropped (var->dv, NO_INSERT);
7399 if (!dvar)
7400 return NULL;
7402 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (dvar);
7403 VAR_LOC_1PAUX (dvar) = NULL;
7405 return VAR_LOC_1PAUX (var);
7408 /* Add variable VAR to the hash table of changed variables and
7409 if it has no locations delete it from SET's hash table. */
7411 static void
7412 variable_was_changed (variable var, dataflow_set *set)
7414 hashval_t hash = dv_htab_hash (var->dv);
7416 if (emit_notes)
7418 variable_def **slot;
7420 /* Remember this decl or VALUE has been added to changed_variables. */
7421 set_dv_changed (var->dv, true);
7423 slot = changed_variables->find_slot_with_hash (var->dv, hash, INSERT);
7425 if (*slot)
7427 variable old_var = *slot;
7428 gcc_assert (old_var->in_changed_variables);
7429 old_var->in_changed_variables = false;
7430 if (var != old_var && var->onepart)
7432 /* Restore the auxiliary info from an empty variable
7433 previously created for changed_variables, so it is
7434 not lost. */
7435 gcc_checking_assert (!VAR_LOC_1PAUX (var));
7436 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (old_var);
7437 VAR_LOC_1PAUX (old_var) = NULL;
7439 variable_htab_free (*slot);
7442 if (set && var->n_var_parts == 0)
7444 onepart_enum_t onepart = var->onepart;
7445 variable empty_var = NULL;
7446 variable_def **dslot = NULL;
7448 if (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR)
7450 dslot = dropped_values->find_slot_with_hash (var->dv,
7451 dv_htab_hash (var->dv),
7452 INSERT);
7453 empty_var = *dslot;
7455 if (empty_var)
7457 gcc_checking_assert (!empty_var->in_changed_variables);
7458 if (!VAR_LOC_1PAUX (var))
7460 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (empty_var);
7461 VAR_LOC_1PAUX (empty_var) = NULL;
7463 else
7464 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
7468 if (!empty_var)
7470 empty_var = onepart_pool (onepart).allocate ();
7471 empty_var->dv = var->dv;
7472 empty_var->refcount = 1;
7473 empty_var->n_var_parts = 0;
7474 empty_var->onepart = onepart;
7475 if (dslot)
7477 empty_var->refcount++;
7478 *dslot = empty_var;
7481 else
7482 empty_var->refcount++;
7483 empty_var->in_changed_variables = true;
7484 *slot = empty_var;
7485 if (onepart)
7487 empty_var->var_part[0].loc_chain = NULL;
7488 empty_var->var_part[0].cur_loc = NULL;
7489 VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (var);
7490 VAR_LOC_1PAUX (var) = NULL;
7492 goto drop_var;
7494 else
7496 if (var->onepart && !VAR_LOC_1PAUX (var))
7497 recover_dropped_1paux (var);
7498 var->refcount++;
7499 var->in_changed_variables = true;
7500 *slot = var;
7503 else
7505 gcc_assert (set);
7506 if (var->n_var_parts == 0)
7508 variable_def **slot;
7510 drop_var:
7511 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
7512 if (slot)
7514 if (shared_hash_shared (set->vars))
7515 slot = shared_hash_find_slot_unshare (&set->vars, var->dv,
7516 NO_INSERT);
7517 shared_hash_htab (set->vars)->clear_slot (slot);
7523 /* Look for the index in VAR->var_part corresponding to OFFSET.
7524 Return -1 if not found. If INSERTION_POINT is non-NULL, the
7525 referenced int will be set to the index that the part has or should
7526 have, if it should be inserted. */
7528 static inline int
7529 find_variable_location_part (variable var, HOST_WIDE_INT offset,
7530 int *insertion_point)
7532 int pos, low, high;
7534 if (var->onepart)
7536 if (offset != 0)
7537 return -1;
7539 if (insertion_point)
7540 *insertion_point = 0;
7542 return var->n_var_parts - 1;
7545 /* Find the location part. */
7546 low = 0;
7547 high = var->n_var_parts;
7548 while (low != high)
7550 pos = (low + high) / 2;
7551 if (VAR_PART_OFFSET (var, pos) < offset)
7552 low = pos + 1;
7553 else
7554 high = pos;
7556 pos = low;
7558 if (insertion_point)
7559 *insertion_point = pos;
7561 if (pos < var->n_var_parts && VAR_PART_OFFSET (var, pos) == offset)
7562 return pos;
7564 return -1;
7567 static variable_def **
7568 set_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
7569 decl_or_value dv, HOST_WIDE_INT offset,
7570 enum var_init_status initialized, rtx set_src)
7572 int pos;
7573 location_chain node, next;
7574 location_chain *nextp;
7575 variable var;
7576 onepart_enum_t onepart;
7578 var = *slot;
7580 if (var)
7581 onepart = var->onepart;
7582 else
7583 onepart = dv_onepart_p (dv);
7585 gcc_checking_assert (offset == 0 || !onepart);
7586 gcc_checking_assert (loc != dv_as_opaque (dv));
7588 if (! flag_var_tracking_uninit)
7589 initialized = VAR_INIT_STATUS_INITIALIZED;
7591 if (!var)
7593 /* Create new variable information. */
7594 var = onepart_pool (onepart).allocate ();
7595 var->dv = dv;
7596 var->refcount = 1;
7597 var->n_var_parts = 1;
7598 var->onepart = onepart;
7599 var->in_changed_variables = false;
7600 if (var->onepart)
7601 VAR_LOC_1PAUX (var) = NULL;
7602 else
7603 VAR_PART_OFFSET (var, 0) = offset;
7604 var->var_part[0].loc_chain = NULL;
7605 var->var_part[0].cur_loc = NULL;
7606 *slot = var;
7607 pos = 0;
7608 nextp = &var->var_part[0].loc_chain;
7610 else if (onepart)
7612 int r = -1, c = 0;
7614 gcc_assert (dv_as_opaque (var->dv) == dv_as_opaque (dv));
7616 pos = 0;
7618 if (GET_CODE (loc) == VALUE)
7620 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7621 nextp = &node->next)
7622 if (GET_CODE (node->loc) == VALUE)
7624 if (node->loc == loc)
7626 r = 0;
7627 break;
7629 if (canon_value_cmp (node->loc, loc))
7630 c++;
7631 else
7633 r = 1;
7634 break;
7637 else if (REG_P (node->loc) || MEM_P (node->loc))
7638 c++;
7639 else
7641 r = 1;
7642 break;
7645 else if (REG_P (loc))
7647 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7648 nextp = &node->next)
7649 if (REG_P (node->loc))
7651 if (REGNO (node->loc) < REGNO (loc))
7652 c++;
7653 else
7655 if (REGNO (node->loc) == REGNO (loc))
7656 r = 0;
7657 else
7658 r = 1;
7659 break;
7662 else
7664 r = 1;
7665 break;
7668 else if (MEM_P (loc))
7670 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7671 nextp = &node->next)
7672 if (REG_P (node->loc))
7673 c++;
7674 else if (MEM_P (node->loc))
7676 if ((r = loc_cmp (XEXP (node->loc, 0), XEXP (loc, 0))) >= 0)
7677 break;
7678 else
7679 c++;
7681 else
7683 r = 1;
7684 break;
7687 else
7688 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7689 nextp = &node->next)
7690 if ((r = loc_cmp (node->loc, loc)) >= 0)
7691 break;
7692 else
7693 c++;
7695 if (r == 0)
7696 return slot;
7698 if (shared_var_p (var, set->vars))
7700 slot = unshare_variable (set, slot, var, initialized);
7701 var = *slot;
7702 for (nextp = &var->var_part[0].loc_chain; c;
7703 nextp = &(*nextp)->next)
7704 c--;
7705 gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc);
7708 else
7710 int inspos = 0;
7712 gcc_assert (dv_as_decl (var->dv) == dv_as_decl (dv));
7714 pos = find_variable_location_part (var, offset, &inspos);
7716 if (pos >= 0)
7718 node = var->var_part[pos].loc_chain;
7720 if (node
7721 && ((REG_P (node->loc) && REG_P (loc)
7722 && REGNO (node->loc) == REGNO (loc))
7723 || rtx_equal_p (node->loc, loc)))
7725 /* LOC is in the beginning of the chain so we have nothing
7726 to do. */
7727 if (node->init < initialized)
7728 node->init = initialized;
7729 if (set_src != NULL)
7730 node->set_src = set_src;
7732 return slot;
7734 else
7736 /* We have to make a copy of a shared variable. */
7737 if (shared_var_p (var, set->vars))
7739 slot = unshare_variable (set, slot, var, initialized);
7740 var = *slot;
7744 else
7746 /* We have not found the location part, new one will be created. */
7748 /* We have to make a copy of the shared variable. */
7749 if (shared_var_p (var, set->vars))
7751 slot = unshare_variable (set, slot, var, initialized);
7752 var = *slot;
7755 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
7756 thus there are at most MAX_VAR_PARTS different offsets. */
7757 gcc_assert (var->n_var_parts < MAX_VAR_PARTS
7758 && (!var->n_var_parts || !onepart));
7760 /* We have to move the elements of array starting at index
7761 inspos to the next position. */
7762 for (pos = var->n_var_parts; pos > inspos; pos--)
7763 var->var_part[pos] = var->var_part[pos - 1];
7765 var->n_var_parts++;
7766 gcc_checking_assert (!onepart);
7767 VAR_PART_OFFSET (var, pos) = offset;
7768 var->var_part[pos].loc_chain = NULL;
7769 var->var_part[pos].cur_loc = NULL;
7772 /* Delete the location from the list. */
7773 nextp = &var->var_part[pos].loc_chain;
7774 for (node = var->var_part[pos].loc_chain; node; node = next)
7776 next = node->next;
7777 if ((REG_P (node->loc) && REG_P (loc)
7778 && REGNO (node->loc) == REGNO (loc))
7779 || rtx_equal_p (node->loc, loc))
7781 /* Save these values, to assign to the new node, before
7782 deleting this one. */
7783 if (node->init > initialized)
7784 initialized = node->init;
7785 if (node->set_src != NULL && set_src == NULL)
7786 set_src = node->set_src;
7787 if (var->var_part[pos].cur_loc == node->loc)
7788 var->var_part[pos].cur_loc = NULL;
7789 delete node;
7790 *nextp = next;
7791 break;
7793 else
7794 nextp = &node->next;
7797 nextp = &var->var_part[pos].loc_chain;
7800 /* Add the location to the beginning. */
7801 node = new location_chain_def;
7802 node->loc = loc;
7803 node->init = initialized;
7804 node->set_src = set_src;
7805 node->next = *nextp;
7806 *nextp = node;
7808 /* If no location was emitted do so. */
7809 if (var->var_part[pos].cur_loc == NULL)
7810 variable_was_changed (var, set);
7812 return slot;
7815 /* Set the part of variable's location in the dataflow set SET. The
7816 variable part is specified by variable's declaration in DV and
7817 offset OFFSET and the part's location by LOC. IOPT should be
7818 NO_INSERT if the variable is known to be in SET already and the
7819 variable hash table must not be resized, and INSERT otherwise. */
7821 static void
7822 set_variable_part (dataflow_set *set, rtx loc,
7823 decl_or_value dv, HOST_WIDE_INT offset,
7824 enum var_init_status initialized, rtx set_src,
7825 enum insert_option iopt)
7827 variable_def **slot;
7829 if (iopt == NO_INSERT)
7830 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7831 else
7833 slot = shared_hash_find_slot (set->vars, dv);
7834 if (!slot)
7835 slot = shared_hash_find_slot_unshare (&set->vars, dv, iopt);
7837 set_slot_part (set, loc, slot, dv, offset, initialized, set_src);
7840 /* Remove all recorded register locations for the given variable part
7841 from dataflow set SET, except for those that are identical to loc.
7842 The variable part is specified by variable's declaration or value
7843 DV and offset OFFSET. */
7845 static variable_def **
7846 clobber_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
7847 HOST_WIDE_INT offset, rtx set_src)
7849 variable var = *slot;
7850 int pos = find_variable_location_part (var, offset, NULL);
7852 if (pos >= 0)
7854 location_chain node, next;
7856 /* Remove the register locations from the dataflow set. */
7857 next = var->var_part[pos].loc_chain;
7858 for (node = next; node; node = next)
7860 next = node->next;
7861 if (node->loc != loc
7862 && (!flag_var_tracking_uninit
7863 || !set_src
7864 || MEM_P (set_src)
7865 || !rtx_equal_p (set_src, node->set_src)))
7867 if (REG_P (node->loc))
7869 attrs anode, anext;
7870 attrs *anextp;
7872 /* Remove the variable part from the register's
7873 list, but preserve any other variable parts
7874 that might be regarded as live in that same
7875 register. */
7876 anextp = &set->regs[REGNO (node->loc)];
7877 for (anode = *anextp; anode; anode = anext)
7879 anext = anode->next;
7880 if (dv_as_opaque (anode->dv) == dv_as_opaque (var->dv)
7881 && anode->offset == offset)
7883 delete anode;
7884 *anextp = anext;
7886 else
7887 anextp = &anode->next;
7891 slot = delete_slot_part (set, node->loc, slot, offset);
7896 return slot;
7899 /* Remove all recorded register locations for the given variable part
7900 from dataflow set SET, except for those that are identical to loc.
7901 The variable part is specified by variable's declaration or value
7902 DV and offset OFFSET. */
7904 static void
7905 clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
7906 HOST_WIDE_INT offset, rtx set_src)
7908 variable_def **slot;
7910 if (!dv_as_opaque (dv)
7911 || (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv))))
7912 return;
7914 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7915 if (!slot)
7916 return;
7918 clobber_slot_part (set, loc, slot, offset, set_src);
7921 /* Delete the part of variable's location from dataflow set SET. The
7922 variable part is specified by its SET->vars slot SLOT and offset
7923 OFFSET and the part's location by LOC. */
7925 static variable_def **
7926 delete_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
7927 HOST_WIDE_INT offset)
7929 variable var = *slot;
7930 int pos = find_variable_location_part (var, offset, NULL);
7932 if (pos >= 0)
7934 location_chain node, next;
7935 location_chain *nextp;
7936 bool changed;
7937 rtx cur_loc;
7939 if (shared_var_p (var, set->vars))
7941 /* If the variable contains the location part we have to
7942 make a copy of the variable. */
7943 for (node = var->var_part[pos].loc_chain; node;
7944 node = node->next)
7946 if ((REG_P (node->loc) && REG_P (loc)
7947 && REGNO (node->loc) == REGNO (loc))
7948 || rtx_equal_p (node->loc, loc))
7950 slot = unshare_variable (set, slot, var,
7951 VAR_INIT_STATUS_UNKNOWN);
7952 var = *slot;
7953 break;
7958 if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
7959 cur_loc = VAR_LOC_FROM (var);
7960 else
7961 cur_loc = var->var_part[pos].cur_loc;
7963 /* Delete the location part. */
7964 changed = false;
7965 nextp = &var->var_part[pos].loc_chain;
7966 for (node = *nextp; node; node = next)
7968 next = node->next;
7969 if ((REG_P (node->loc) && REG_P (loc)
7970 && REGNO (node->loc) == REGNO (loc))
7971 || rtx_equal_p (node->loc, loc))
7973 /* If we have deleted the location which was last emitted
7974 we have to emit new location so add the variable to set
7975 of changed variables. */
7976 if (cur_loc == node->loc)
7978 changed = true;
7979 var->var_part[pos].cur_loc = NULL;
7980 if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
7981 VAR_LOC_FROM (var) = NULL;
7983 delete node;
7984 *nextp = next;
7985 break;
7987 else
7988 nextp = &node->next;
7991 if (var->var_part[pos].loc_chain == NULL)
7993 changed = true;
7994 var->n_var_parts--;
7995 while (pos < var->n_var_parts)
7997 var->var_part[pos] = var->var_part[pos + 1];
7998 pos++;
8001 if (changed)
8002 variable_was_changed (var, set);
8005 return slot;
8008 /* Delete the part of variable's location from dataflow set SET. The
8009 variable part is specified by variable's declaration or value DV
8010 and offset OFFSET and the part's location by LOC. */
8012 static void
8013 delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
8014 HOST_WIDE_INT offset)
8016 variable_def **slot = shared_hash_find_slot_noinsert (set->vars, dv);
8017 if (!slot)
8018 return;
8020 delete_slot_part (set, loc, slot, offset);
8024 /* Structure for passing some other parameters to function
8025 vt_expand_loc_callback. */
8026 struct expand_loc_callback_data
8028 /* The variables and values active at this point. */
8029 variable_table_type *vars;
8031 /* Stack of values and debug_exprs under expansion, and their
8032 children. */
8033 auto_vec<rtx, 4> expanding;
8035 /* Stack of values and debug_exprs whose expansion hit recursion
8036 cycles. They will have VALUE_RECURSED_INTO marked when added to
8037 this list. This flag will be cleared if any of its dependencies
8038 resolves to a valid location. So, if the flag remains set at the
8039 end of the search, we know no valid location for this one can
8040 possibly exist. */
8041 auto_vec<rtx, 4> pending;
8043 /* The maximum depth among the sub-expressions under expansion.
8044 Zero indicates no expansion so far. */
8045 expand_depth depth;
8048 /* Allocate the one-part auxiliary data structure for VAR, with enough
8049 room for COUNT dependencies. */
8051 static void
8052 loc_exp_dep_alloc (variable var, int count)
8054 size_t allocsize;
8056 gcc_checking_assert (var->onepart);
8058 /* We can be called with COUNT == 0 to allocate the data structure
8059 without any dependencies, e.g. for the backlinks only. However,
8060 if we are specifying a COUNT, then the dependency list must have
8061 been emptied before. It would be possible to adjust pointers or
8062 force it empty here, but this is better done at an earlier point
8063 in the algorithm, so we instead leave an assertion to catch
8064 errors. */
8065 gcc_checking_assert (!count
8066 || VAR_LOC_DEP_VEC (var) == NULL
8067 || VAR_LOC_DEP_VEC (var)->is_empty ());
8069 if (VAR_LOC_1PAUX (var) && VAR_LOC_DEP_VEC (var)->space (count))
8070 return;
8072 allocsize = offsetof (struct onepart_aux, deps)
8073 + vec<loc_exp_dep, va_heap, vl_embed>::embedded_size (count);
8075 if (VAR_LOC_1PAUX (var))
8077 VAR_LOC_1PAUX (var) = XRESIZEVAR (struct onepart_aux,
8078 VAR_LOC_1PAUX (var), allocsize);
8079 /* If the reallocation moves the onepaux structure, the
8080 back-pointer to BACKLINKS in the first list member will still
8081 point to its old location. Adjust it. */
8082 if (VAR_LOC_DEP_LST (var))
8083 VAR_LOC_DEP_LST (var)->pprev = VAR_LOC_DEP_LSTP (var);
8085 else
8087 VAR_LOC_1PAUX (var) = XNEWVAR (struct onepart_aux, allocsize);
8088 *VAR_LOC_DEP_LSTP (var) = NULL;
8089 VAR_LOC_FROM (var) = NULL;
8090 VAR_LOC_DEPTH (var).complexity = 0;
8091 VAR_LOC_DEPTH (var).entryvals = 0;
8093 VAR_LOC_DEP_VEC (var)->embedded_init (count);
8096 /* Remove all entries from the vector of active dependencies of VAR,
8097 removing them from the back-links lists too. */
8099 static void
8100 loc_exp_dep_clear (variable var)
8102 while (VAR_LOC_DEP_VEC (var) && !VAR_LOC_DEP_VEC (var)->is_empty ())
8104 loc_exp_dep *led = &VAR_LOC_DEP_VEC (var)->last ();
8105 if (led->next)
8106 led->next->pprev = led->pprev;
8107 if (led->pprev)
8108 *led->pprev = led->next;
8109 VAR_LOC_DEP_VEC (var)->pop ();
8113 /* Insert an active dependency from VAR on X to the vector of
8114 dependencies, and add the corresponding back-link to X's list of
8115 back-links in VARS. */
8117 static void
8118 loc_exp_insert_dep (variable var, rtx x, variable_table_type *vars)
8120 decl_or_value dv;
8121 variable xvar;
8122 loc_exp_dep *led;
8124 dv = dv_from_rtx (x);
8126 /* ??? Build a vector of variables parallel to EXPANDING, to avoid
8127 an additional look up? */
8128 xvar = vars->find_with_hash (dv, dv_htab_hash (dv));
8130 if (!xvar)
8132 xvar = variable_from_dropped (dv, NO_INSERT);
8133 gcc_checking_assert (xvar);
8136 /* No point in adding the same backlink more than once. This may
8137 arise if say the same value appears in two complex expressions in
8138 the same loc_list, or even more than once in a single
8139 expression. */
8140 if (VAR_LOC_DEP_LST (xvar) && VAR_LOC_DEP_LST (xvar)->dv == var->dv)
8141 return;
8143 if (var->onepart == NOT_ONEPART)
8144 led = new loc_exp_dep;
8145 else
8147 loc_exp_dep empty;
8148 memset (&empty, 0, sizeof (empty));
8149 VAR_LOC_DEP_VEC (var)->quick_push (empty);
8150 led = &VAR_LOC_DEP_VEC (var)->last ();
8152 led->dv = var->dv;
8153 led->value = x;
8155 loc_exp_dep_alloc (xvar, 0);
8156 led->pprev = VAR_LOC_DEP_LSTP (xvar);
8157 led->next = *led->pprev;
8158 if (led->next)
8159 led->next->pprev = &led->next;
8160 *led->pprev = led;
8163 /* Create active dependencies of VAR on COUNT values starting at
8164 VALUE, and corresponding back-links to the entries in VARS. Return
8165 true if we found any pending-recursion results. */
8167 static bool
8168 loc_exp_dep_set (variable var, rtx result, rtx *value, int count,
8169 variable_table_type *vars)
8171 bool pending_recursion = false;
8173 gcc_checking_assert (VAR_LOC_DEP_VEC (var) == NULL
8174 || VAR_LOC_DEP_VEC (var)->is_empty ());
8176 /* Set up all dependencies from last_child (as set up at the end of
8177 the loop above) to the end. */
8178 loc_exp_dep_alloc (var, count);
8180 while (count--)
8182 rtx x = *value++;
8184 if (!pending_recursion)
8185 pending_recursion = !result && VALUE_RECURSED_INTO (x);
8187 loc_exp_insert_dep (var, x, vars);
8190 return pending_recursion;
8193 /* Notify the back-links of IVAR that are pending recursion that we
8194 have found a non-NIL value for it, so they are cleared for another
8195 attempt to compute a current location. */
8197 static void
8198 notify_dependents_of_resolved_value (variable ivar, variable_table_type *vars)
8200 loc_exp_dep *led, *next;
8202 for (led = VAR_LOC_DEP_LST (ivar); led; led = next)
8204 decl_or_value dv = led->dv;
8205 variable var;
8207 next = led->next;
8209 if (dv_is_value_p (dv))
8211 rtx value = dv_as_value (dv);
8213 /* If we have already resolved it, leave it alone. */
8214 if (!VALUE_RECURSED_INTO (value))
8215 continue;
8217 /* Check that VALUE_RECURSED_INTO, true from the test above,
8218 implies NO_LOC_P. */
8219 gcc_checking_assert (NO_LOC_P (value));
8221 /* We won't notify variables that are being expanded,
8222 because their dependency list is cleared before
8223 recursing. */
8224 NO_LOC_P (value) = false;
8225 VALUE_RECURSED_INTO (value) = false;
8227 gcc_checking_assert (dv_changed_p (dv));
8229 else
8231 gcc_checking_assert (dv_onepart_p (dv) != NOT_ONEPART);
8232 if (!dv_changed_p (dv))
8233 continue;
8236 var = vars->find_with_hash (dv, dv_htab_hash (dv));
8238 if (!var)
8239 var = variable_from_dropped (dv, NO_INSERT);
8241 if (var)
8242 notify_dependents_of_resolved_value (var, vars);
8244 if (next)
8245 next->pprev = led->pprev;
8246 if (led->pprev)
8247 *led->pprev = next;
8248 led->next = NULL;
8249 led->pprev = NULL;
8253 static rtx vt_expand_loc_callback (rtx x, bitmap regs,
8254 int max_depth, void *data);
8256 /* Return the combined depth, when one sub-expression evaluated to
8257 BEST_DEPTH and the previous known depth was SAVED_DEPTH. */
8259 static inline expand_depth
8260 update_depth (expand_depth saved_depth, expand_depth best_depth)
8262 /* If we didn't find anything, stick with what we had. */
8263 if (!best_depth.complexity)
8264 return saved_depth;
8266 /* If we found hadn't found anything, use the depth of the current
8267 expression. Do NOT add one extra level, we want to compute the
8268 maximum depth among sub-expressions. We'll increment it later,
8269 if appropriate. */
8270 if (!saved_depth.complexity)
8271 return best_depth;
8273 /* Combine the entryval count so that regardless of which one we
8274 return, the entryval count is accurate. */
8275 best_depth.entryvals = saved_depth.entryvals
8276 = best_depth.entryvals + saved_depth.entryvals;
8278 if (saved_depth.complexity < best_depth.complexity)
8279 return best_depth;
8280 else
8281 return saved_depth;
8284 /* Expand VAR to a location RTX, updating its cur_loc. Use REGS and
8285 DATA for cselib expand callback. If PENDRECP is given, indicate in
8286 it whether any sub-expression couldn't be fully evaluated because
8287 it is pending recursion resolution. */
8289 static inline rtx
8290 vt_expand_var_loc_chain (variable var, bitmap regs, void *data, bool *pendrecp)
8292 struct expand_loc_callback_data *elcd
8293 = (struct expand_loc_callback_data *) data;
8294 location_chain loc, next;
8295 rtx result = NULL;
8296 int first_child, result_first_child, last_child;
8297 bool pending_recursion;
8298 rtx loc_from = NULL;
8299 struct elt_loc_list *cloc = NULL;
8300 expand_depth depth = { 0, 0 }, saved_depth = elcd->depth;
8301 int wanted_entryvals, found_entryvals = 0;
8303 /* Clear all backlinks pointing at this, so that we're not notified
8304 while we're active. */
8305 loc_exp_dep_clear (var);
8307 retry:
8308 if (var->onepart == ONEPART_VALUE)
8310 cselib_val *val = CSELIB_VAL_PTR (dv_as_value (var->dv));
8312 gcc_checking_assert (cselib_preserved_value_p (val));
8314 cloc = val->locs;
8317 first_child = result_first_child = last_child
8318 = elcd->expanding.length ();
8320 wanted_entryvals = found_entryvals;
8322 /* Attempt to expand each available location in turn. */
8323 for (next = loc = var->n_var_parts ? var->var_part[0].loc_chain : NULL;
8324 loc || cloc; loc = next)
8326 result_first_child = last_child;
8328 if (!loc)
8330 loc_from = cloc->loc;
8331 next = loc;
8332 cloc = cloc->next;
8333 if (unsuitable_loc (loc_from))
8334 continue;
8336 else
8338 loc_from = loc->loc;
8339 next = loc->next;
8342 gcc_checking_assert (!unsuitable_loc (loc_from));
8344 elcd->depth.complexity = elcd->depth.entryvals = 0;
8345 result = cselib_expand_value_rtx_cb (loc_from, regs, EXPR_DEPTH,
8346 vt_expand_loc_callback, data);
8347 last_child = elcd->expanding.length ();
8349 if (result)
8351 depth = elcd->depth;
8353 gcc_checking_assert (depth.complexity
8354 || result_first_child == last_child);
8356 if (last_child - result_first_child != 1)
8358 if (!depth.complexity && GET_CODE (result) == ENTRY_VALUE)
8359 depth.entryvals++;
8360 depth.complexity++;
8363 if (depth.complexity <= EXPR_USE_DEPTH)
8365 if (depth.entryvals <= wanted_entryvals)
8366 break;
8367 else if (!found_entryvals || depth.entryvals < found_entryvals)
8368 found_entryvals = depth.entryvals;
8371 result = NULL;
8374 /* Set it up in case we leave the loop. */
8375 depth.complexity = depth.entryvals = 0;
8376 loc_from = NULL;
8377 result_first_child = first_child;
8380 if (!loc_from && wanted_entryvals < found_entryvals)
8382 /* We found entries with ENTRY_VALUEs and skipped them. Since
8383 we could not find any expansions without ENTRY_VALUEs, but we
8384 found at least one with them, go back and get an entry with
8385 the minimum number ENTRY_VALUE count that we found. We could
8386 avoid looping, but since each sub-loc is already resolved,
8387 the re-expansion should be trivial. ??? Should we record all
8388 attempted locs as dependencies, so that we retry the
8389 expansion should any of them change, in the hope it can give
8390 us a new entry without an ENTRY_VALUE? */
8391 elcd->expanding.truncate (first_child);
8392 goto retry;
8395 /* Register all encountered dependencies as active. */
8396 pending_recursion = loc_exp_dep_set
8397 (var, result, elcd->expanding.address () + result_first_child,
8398 last_child - result_first_child, elcd->vars);
8400 elcd->expanding.truncate (first_child);
8402 /* Record where the expansion came from. */
8403 gcc_checking_assert (!result || !pending_recursion);
8404 VAR_LOC_FROM (var) = loc_from;
8405 VAR_LOC_DEPTH (var) = depth;
8407 gcc_checking_assert (!depth.complexity == !result);
8409 elcd->depth = update_depth (saved_depth, depth);
8411 /* Indicate whether any of the dependencies are pending recursion
8412 resolution. */
8413 if (pendrecp)
8414 *pendrecp = pending_recursion;
8416 if (!pendrecp || !pending_recursion)
8417 var->var_part[0].cur_loc = result;
8419 return result;
8422 /* Callback for cselib_expand_value, that looks for expressions
8423 holding the value in the var-tracking hash tables. Return X for
8424 standard processing, anything else is to be used as-is. */
8426 static rtx
8427 vt_expand_loc_callback (rtx x, bitmap regs,
8428 int max_depth ATTRIBUTE_UNUSED,
8429 void *data)
8431 struct expand_loc_callback_data *elcd
8432 = (struct expand_loc_callback_data *) data;
8433 decl_or_value dv;
8434 variable var;
8435 rtx result, subreg;
8436 bool pending_recursion = false;
8437 bool from_empty = false;
8439 switch (GET_CODE (x))
8441 case SUBREG:
8442 subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs,
8443 EXPR_DEPTH,
8444 vt_expand_loc_callback, data);
8446 if (!subreg)
8447 return NULL;
8449 result = simplify_gen_subreg (GET_MODE (x), subreg,
8450 GET_MODE (SUBREG_REG (x)),
8451 SUBREG_BYTE (x));
8453 /* Invalid SUBREGs are ok in debug info. ??? We could try
8454 alternate expansions for the VALUE as well. */
8455 if (!result)
8456 result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x));
8458 return result;
8460 case DEBUG_EXPR:
8461 case VALUE:
8462 dv = dv_from_rtx (x);
8463 break;
8465 default:
8466 return x;
8469 elcd->expanding.safe_push (x);
8471 /* Check that VALUE_RECURSED_INTO implies NO_LOC_P. */
8472 gcc_checking_assert (!VALUE_RECURSED_INTO (x) || NO_LOC_P (x));
8474 if (NO_LOC_P (x))
8476 gcc_checking_assert (VALUE_RECURSED_INTO (x) || !dv_changed_p (dv));
8477 return NULL;
8480 var = elcd->vars->find_with_hash (dv, dv_htab_hash (dv));
8482 if (!var)
8484 from_empty = true;
8485 var = variable_from_dropped (dv, INSERT);
8488 gcc_checking_assert (var);
8490 if (!dv_changed_p (dv))
8492 gcc_checking_assert (!NO_LOC_P (x));
8493 gcc_checking_assert (var->var_part[0].cur_loc);
8494 gcc_checking_assert (VAR_LOC_1PAUX (var));
8495 gcc_checking_assert (VAR_LOC_1PAUX (var)->depth.complexity);
8497 elcd->depth = update_depth (elcd->depth, VAR_LOC_1PAUX (var)->depth);
8499 return var->var_part[0].cur_loc;
8502 VALUE_RECURSED_INTO (x) = true;
8503 /* This is tentative, but it makes some tests simpler. */
8504 NO_LOC_P (x) = true;
8506 gcc_checking_assert (var->n_var_parts == 1 || from_empty);
8508 result = vt_expand_var_loc_chain (var, regs, data, &pending_recursion);
8510 if (pending_recursion)
8512 gcc_checking_assert (!result);
8513 elcd->pending.safe_push (x);
8515 else
8517 NO_LOC_P (x) = !result;
8518 VALUE_RECURSED_INTO (x) = false;
8519 set_dv_changed (dv, false);
8521 if (result)
8522 notify_dependents_of_resolved_value (var, elcd->vars);
8525 return result;
8528 /* While expanding variables, we may encounter recursion cycles
8529 because of mutual (possibly indirect) dependencies between two
8530 particular variables (or values), say A and B. If we're trying to
8531 expand A when we get to B, which in turn attempts to expand A, if
8532 we can't find any other expansion for B, we'll add B to this
8533 pending-recursion stack, and tentatively return NULL for its
8534 location. This tentative value will be used for any other
8535 occurrences of B, unless A gets some other location, in which case
8536 it will notify B that it is worth another try at computing a
8537 location for it, and it will use the location computed for A then.
8538 At the end of the expansion, the tentative NULL locations become
8539 final for all members of PENDING that didn't get a notification.
8540 This function performs this finalization of NULL locations. */
8542 static void
8543 resolve_expansions_pending_recursion (vec<rtx, va_heap> *pending)
8545 while (!pending->is_empty ())
8547 rtx x = pending->pop ();
8548 decl_or_value dv;
8550 if (!VALUE_RECURSED_INTO (x))
8551 continue;
8553 gcc_checking_assert (NO_LOC_P (x));
8554 VALUE_RECURSED_INTO (x) = false;
8555 dv = dv_from_rtx (x);
8556 gcc_checking_assert (dv_changed_p (dv));
8557 set_dv_changed (dv, false);
8561 /* Initialize expand_loc_callback_data D with variable hash table V.
8562 It must be a macro because of alloca (vec stack). */
8563 #define INIT_ELCD(d, v) \
8564 do \
8566 (d).vars = (v); \
8567 (d).depth.complexity = (d).depth.entryvals = 0; \
8569 while (0)
8570 /* Finalize expand_loc_callback_data D, resolved to location L. */
8571 #define FINI_ELCD(d, l) \
8572 do \
8574 resolve_expansions_pending_recursion (&(d).pending); \
8575 (d).pending.release (); \
8576 (d).expanding.release (); \
8578 if ((l) && MEM_P (l)) \
8579 (l) = targetm.delegitimize_address (l); \
8581 while (0)
8583 /* Expand VALUEs and DEBUG_EXPRs in LOC to a location, using the
8584 equivalences in VARS, updating their CUR_LOCs in the process. */
8586 static rtx
8587 vt_expand_loc (rtx loc, variable_table_type *vars)
8589 struct expand_loc_callback_data data;
8590 rtx result;
8592 if (!MAY_HAVE_DEBUG_INSNS)
8593 return loc;
8595 INIT_ELCD (data, vars);
8597 result = cselib_expand_value_rtx_cb (loc, scratch_regs, EXPR_DEPTH,
8598 vt_expand_loc_callback, &data);
8600 FINI_ELCD (data, result);
8602 return result;
8605 /* Expand the one-part VARiable to a location, using the equivalences
8606 in VARS, updating their CUR_LOCs in the process. */
8608 static rtx
8609 vt_expand_1pvar (variable var, variable_table_type *vars)
8611 struct expand_loc_callback_data data;
8612 rtx loc;
8614 gcc_checking_assert (var->onepart && var->n_var_parts == 1);
8616 if (!dv_changed_p (var->dv))
8617 return var->var_part[0].cur_loc;
8619 INIT_ELCD (data, vars);
8621 loc = vt_expand_var_loc_chain (var, scratch_regs, &data, NULL);
8623 gcc_checking_assert (data.expanding.is_empty ());
8625 FINI_ELCD (data, loc);
8627 return loc;
8630 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
8631 additional parameters: WHERE specifies whether the note shall be emitted
8632 before or after instruction INSN. */
8635 emit_note_insn_var_location (variable_def **varp, emit_note_data *data)
8637 variable var = *varp;
8638 rtx_insn *insn = data->insn;
8639 enum emit_note_where where = data->where;
8640 variable_table_type *vars = data->vars;
8641 rtx_note *note;
8642 rtx note_vl;
8643 int i, j, n_var_parts;
8644 bool complete;
8645 enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED;
8646 HOST_WIDE_INT last_limit;
8647 tree type_size_unit;
8648 HOST_WIDE_INT offsets[MAX_VAR_PARTS];
8649 rtx loc[MAX_VAR_PARTS];
8650 tree decl;
8651 location_chain lc;
8653 gcc_checking_assert (var->onepart == NOT_ONEPART
8654 || var->onepart == ONEPART_VDECL);
8656 decl = dv_as_decl (var->dv);
8658 complete = true;
8659 last_limit = 0;
8660 n_var_parts = 0;
8661 if (!var->onepart)
8662 for (i = 0; i < var->n_var_parts; i++)
8663 if (var->var_part[i].cur_loc == NULL && var->var_part[i].loc_chain)
8664 var->var_part[i].cur_loc = var->var_part[i].loc_chain->loc;
8665 for (i = 0; i < var->n_var_parts; i++)
8667 machine_mode mode, wider_mode;
8668 rtx loc2;
8669 HOST_WIDE_INT offset;
8671 if (i == 0 && var->onepart)
8673 gcc_checking_assert (var->n_var_parts == 1);
8674 offset = 0;
8675 initialized = VAR_INIT_STATUS_INITIALIZED;
8676 loc2 = vt_expand_1pvar (var, vars);
8678 else
8680 if (last_limit < VAR_PART_OFFSET (var, i))
8682 complete = false;
8683 break;
8685 else if (last_limit > VAR_PART_OFFSET (var, i))
8686 continue;
8687 offset = VAR_PART_OFFSET (var, i);
8688 loc2 = var->var_part[i].cur_loc;
8689 if (loc2 && GET_CODE (loc2) == MEM
8690 && GET_CODE (XEXP (loc2, 0)) == VALUE)
8692 rtx depval = XEXP (loc2, 0);
8694 loc2 = vt_expand_loc (loc2, vars);
8696 if (loc2)
8697 loc_exp_insert_dep (var, depval, vars);
8699 if (!loc2)
8701 complete = false;
8702 continue;
8704 gcc_checking_assert (GET_CODE (loc2) != VALUE);
8705 for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
8706 if (var->var_part[i].cur_loc == lc->loc)
8708 initialized = lc->init;
8709 break;
8711 gcc_assert (lc);
8714 offsets[n_var_parts] = offset;
8715 if (!loc2)
8717 complete = false;
8718 continue;
8720 loc[n_var_parts] = loc2;
8721 mode = GET_MODE (var->var_part[i].cur_loc);
8722 if (mode == VOIDmode && var->onepart)
8723 mode = DECL_MODE (decl);
8724 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
8726 /* Attempt to merge adjacent registers or memory. */
8727 wider_mode = GET_MODE_WIDER_MODE (mode);
8728 for (j = i + 1; j < var->n_var_parts; j++)
8729 if (last_limit <= VAR_PART_OFFSET (var, j))
8730 break;
8731 if (j < var->n_var_parts
8732 && wider_mode != VOIDmode
8733 && var->var_part[j].cur_loc
8734 && mode == GET_MODE (var->var_part[j].cur_loc)
8735 && (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts]))
8736 && last_limit == (var->onepart ? 0 : VAR_PART_OFFSET (var, j))
8737 && (loc2 = vt_expand_loc (var->var_part[j].cur_loc, vars))
8738 && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2))
8740 rtx new_loc = NULL;
8742 if (REG_P (loc[n_var_parts])
8743 && hard_regno_nregs[REGNO (loc[n_var_parts])][mode] * 2
8744 == hard_regno_nregs[REGNO (loc[n_var_parts])][wider_mode]
8745 && end_hard_regno (mode, REGNO (loc[n_var_parts]))
8746 == REGNO (loc2))
8748 if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN)
8749 new_loc = simplify_subreg (wider_mode, loc[n_var_parts],
8750 mode, 0);
8751 else if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
8752 new_loc = simplify_subreg (wider_mode, loc2, mode, 0);
8753 if (new_loc)
8755 if (!REG_P (new_loc)
8756 || REGNO (new_loc) != REGNO (loc[n_var_parts]))
8757 new_loc = NULL;
8758 else
8759 REG_ATTRS (new_loc) = REG_ATTRS (loc[n_var_parts]);
8762 else if (MEM_P (loc[n_var_parts])
8763 && GET_CODE (XEXP (loc2, 0)) == PLUS
8764 && REG_P (XEXP (XEXP (loc2, 0), 0))
8765 && CONST_INT_P (XEXP (XEXP (loc2, 0), 1)))
8767 if ((REG_P (XEXP (loc[n_var_parts], 0))
8768 && rtx_equal_p (XEXP (loc[n_var_parts], 0),
8769 XEXP (XEXP (loc2, 0), 0))
8770 && INTVAL (XEXP (XEXP (loc2, 0), 1))
8771 == GET_MODE_SIZE (mode))
8772 || (GET_CODE (XEXP (loc[n_var_parts], 0)) == PLUS
8773 && CONST_INT_P (XEXP (XEXP (loc[n_var_parts], 0), 1))
8774 && rtx_equal_p (XEXP (XEXP (loc[n_var_parts], 0), 0),
8775 XEXP (XEXP (loc2, 0), 0))
8776 && INTVAL (XEXP (XEXP (loc[n_var_parts], 0), 1))
8777 + GET_MODE_SIZE (mode)
8778 == INTVAL (XEXP (XEXP (loc2, 0), 1))))
8779 new_loc = adjust_address_nv (loc[n_var_parts],
8780 wider_mode, 0);
8783 if (new_loc)
8785 loc[n_var_parts] = new_loc;
8786 mode = wider_mode;
8787 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
8788 i = j;
8791 ++n_var_parts;
8793 type_size_unit = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8794 if ((unsigned HOST_WIDE_INT) last_limit < TREE_INT_CST_LOW (type_size_unit))
8795 complete = false;
8797 if (! flag_var_tracking_uninit)
8798 initialized = VAR_INIT_STATUS_INITIALIZED;
8800 note_vl = NULL_RTX;
8801 if (!complete)
8802 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, NULL_RTX, initialized);
8803 else if (n_var_parts == 1)
8805 rtx expr_list;
8807 if (offsets[0] || GET_CODE (loc[0]) == PARALLEL)
8808 expr_list = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
8809 else
8810 expr_list = loc[0];
8812 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, expr_list, initialized);
8814 else if (n_var_parts)
8816 rtx parallel;
8818 for (i = 0; i < n_var_parts; i++)
8819 loc[i]
8820 = gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i]));
8822 parallel = gen_rtx_PARALLEL (VOIDmode,
8823 gen_rtvec_v (n_var_parts, loc));
8824 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl,
8825 parallel, initialized);
8828 if (where != EMIT_NOTE_BEFORE_INSN)
8830 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8831 if (where == EMIT_NOTE_AFTER_CALL_INSN)
8832 NOTE_DURING_CALL_P (note) = true;
8834 else
8836 /* Make sure that the call related notes come first. */
8837 while (NEXT_INSN (insn)
8838 && NOTE_P (insn)
8839 && ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8840 && NOTE_DURING_CALL_P (insn))
8841 || NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
8842 insn = NEXT_INSN (insn);
8843 if (NOTE_P (insn)
8844 && ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8845 && NOTE_DURING_CALL_P (insn))
8846 || NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
8847 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8848 else
8849 note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
8851 NOTE_VAR_LOCATION (note) = note_vl;
8853 set_dv_changed (var->dv, false);
8854 gcc_assert (var->in_changed_variables);
8855 var->in_changed_variables = false;
8856 changed_variables->clear_slot (varp);
8858 /* Continue traversing the hash table. */
8859 return 1;
8862 /* While traversing changed_variables, push onto DATA (a stack of RTX
8863 values) entries that aren't user variables. */
8866 var_track_values_to_stack (variable_def **slot,
8867 vec<rtx, va_heap> *changed_values_stack)
8869 variable var = *slot;
8871 if (var->onepart == ONEPART_VALUE)
8872 changed_values_stack->safe_push (dv_as_value (var->dv));
8873 else if (var->onepart == ONEPART_DEXPR)
8874 changed_values_stack->safe_push (DECL_RTL_KNOWN_SET (dv_as_decl (var->dv)));
8876 return 1;
8879 /* Remove from changed_variables the entry whose DV corresponds to
8880 value or debug_expr VAL. */
8881 static void
8882 remove_value_from_changed_variables (rtx val)
8884 decl_or_value dv = dv_from_rtx (val);
8885 variable_def **slot;
8886 variable var;
8888 slot = changed_variables->find_slot_with_hash (dv, dv_htab_hash (dv),
8889 NO_INSERT);
8890 var = *slot;
8891 var->in_changed_variables = false;
8892 changed_variables->clear_slot (slot);
8895 /* If VAL (a value or debug_expr) has backlinks to variables actively
8896 dependent on it in HTAB or in CHANGED_VARIABLES, mark them as
8897 changed, adding to CHANGED_VALUES_STACK any dependencies that may
8898 have dependencies of their own to notify. */
8900 static void
8901 notify_dependents_of_changed_value (rtx val, variable_table_type *htab,
8902 vec<rtx, va_heap> *changed_values_stack)
8904 variable_def **slot;
8905 variable var;
8906 loc_exp_dep *led;
8907 decl_or_value dv = dv_from_rtx (val);
8909 slot = changed_variables->find_slot_with_hash (dv, dv_htab_hash (dv),
8910 NO_INSERT);
8911 if (!slot)
8912 slot = htab->find_slot_with_hash (dv, dv_htab_hash (dv), NO_INSERT);
8913 if (!slot)
8914 slot = dropped_values->find_slot_with_hash (dv, dv_htab_hash (dv),
8915 NO_INSERT);
8916 var = *slot;
8918 while ((led = VAR_LOC_DEP_LST (var)))
8920 decl_or_value ldv = led->dv;
8921 variable ivar;
8923 /* Deactivate and remove the backlink, as it was “used up”. It
8924 makes no sense to attempt to notify the same entity again:
8925 either it will be recomputed and re-register an active
8926 dependency, or it will still have the changed mark. */
8927 if (led->next)
8928 led->next->pprev = led->pprev;
8929 if (led->pprev)
8930 *led->pprev = led->next;
8931 led->next = NULL;
8932 led->pprev = NULL;
8934 if (dv_changed_p (ldv))
8935 continue;
8937 switch (dv_onepart_p (ldv))
8939 case ONEPART_VALUE:
8940 case ONEPART_DEXPR:
8941 set_dv_changed (ldv, true);
8942 changed_values_stack->safe_push (dv_as_rtx (ldv));
8943 break;
8945 case ONEPART_VDECL:
8946 ivar = htab->find_with_hash (ldv, dv_htab_hash (ldv));
8947 gcc_checking_assert (!VAR_LOC_DEP_LST (ivar));
8948 variable_was_changed (ivar, NULL);
8949 break;
8951 case NOT_ONEPART:
8952 delete led;
8953 ivar = htab->find_with_hash (ldv, dv_htab_hash (ldv));
8954 if (ivar)
8956 int i = ivar->n_var_parts;
8957 while (i--)
8959 rtx loc = ivar->var_part[i].cur_loc;
8961 if (loc && GET_CODE (loc) == MEM
8962 && XEXP (loc, 0) == val)
8964 variable_was_changed (ivar, NULL);
8965 break;
8969 break;
8971 default:
8972 gcc_unreachable ();
8977 /* Take out of changed_variables any entries that don't refer to use
8978 variables. Back-propagate change notifications from values and
8979 debug_exprs to their active dependencies in HTAB or in
8980 CHANGED_VARIABLES. */
8982 static void
8983 process_changed_values (variable_table_type *htab)
8985 int i, n;
8986 rtx val;
8987 auto_vec<rtx, 20> changed_values_stack;
8989 /* Move values from changed_variables to changed_values_stack. */
8990 changed_variables
8991 ->traverse <vec<rtx, va_heap>*, var_track_values_to_stack>
8992 (&changed_values_stack);
8994 /* Back-propagate change notifications in values while popping
8995 them from the stack. */
8996 for (n = i = changed_values_stack.length ();
8997 i > 0; i = changed_values_stack.length ())
8999 val = changed_values_stack.pop ();
9000 notify_dependents_of_changed_value (val, htab, &changed_values_stack);
9002 /* This condition will hold when visiting each of the entries
9003 originally in changed_variables. We can't remove them
9004 earlier because this could drop the backlinks before we got a
9005 chance to use them. */
9006 if (i == n)
9008 remove_value_from_changed_variables (val);
9009 n--;
9014 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
9015 CHANGED_VARIABLES and delete this chain. WHERE specifies whether
9016 the notes shall be emitted before of after instruction INSN. */
9018 static void
9019 emit_notes_for_changes (rtx_insn *insn, enum emit_note_where where,
9020 shared_hash vars)
9022 emit_note_data data;
9023 variable_table_type *htab = shared_hash_htab (vars);
9025 if (!changed_variables->elements ())
9026 return;
9028 if (MAY_HAVE_DEBUG_INSNS)
9029 process_changed_values (htab);
9031 data.insn = insn;
9032 data.where = where;
9033 data.vars = htab;
9035 changed_variables
9036 ->traverse <emit_note_data*, emit_note_insn_var_location> (&data);
9039 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
9040 same variable in hash table DATA or is not there at all. */
9043 emit_notes_for_differences_1 (variable_def **slot, variable_table_type *new_vars)
9045 variable old_var, new_var;
9047 old_var = *slot;
9048 new_var = new_vars->find_with_hash (old_var->dv, dv_htab_hash (old_var->dv));
9050 if (!new_var)
9052 /* Variable has disappeared. */
9053 variable empty_var = NULL;
9055 if (old_var->onepart == ONEPART_VALUE
9056 || old_var->onepart == ONEPART_DEXPR)
9058 empty_var = variable_from_dropped (old_var->dv, NO_INSERT);
9059 if (empty_var)
9061 gcc_checking_assert (!empty_var->in_changed_variables);
9062 if (!VAR_LOC_1PAUX (old_var))
9064 VAR_LOC_1PAUX (old_var) = VAR_LOC_1PAUX (empty_var);
9065 VAR_LOC_1PAUX (empty_var) = NULL;
9067 else
9068 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
9072 if (!empty_var)
9074 empty_var = onepart_pool (old_var->onepart).allocate ();
9075 empty_var->dv = old_var->dv;
9076 empty_var->refcount = 0;
9077 empty_var->n_var_parts = 0;
9078 empty_var->onepart = old_var->onepart;
9079 empty_var->in_changed_variables = false;
9082 if (empty_var->onepart)
9084 /* Propagate the auxiliary data to (ultimately)
9085 changed_variables. */
9086 empty_var->var_part[0].loc_chain = NULL;
9087 empty_var->var_part[0].cur_loc = NULL;
9088 VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (old_var);
9089 VAR_LOC_1PAUX (old_var) = NULL;
9091 variable_was_changed (empty_var, NULL);
9092 /* Continue traversing the hash table. */
9093 return 1;
9095 /* Update cur_loc and one-part auxiliary data, before new_var goes
9096 through variable_was_changed. */
9097 if (old_var != new_var && new_var->onepart)
9099 gcc_checking_assert (VAR_LOC_1PAUX (new_var) == NULL);
9100 VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (old_var);
9101 VAR_LOC_1PAUX (old_var) = NULL;
9102 new_var->var_part[0].cur_loc = old_var->var_part[0].cur_loc;
9104 if (variable_different_p (old_var, new_var))
9105 variable_was_changed (new_var, NULL);
9107 /* Continue traversing the hash table. */
9108 return 1;
9111 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
9112 table DATA. */
9115 emit_notes_for_differences_2 (variable_def **slot, variable_table_type *old_vars)
9117 variable old_var, new_var;
9119 new_var = *slot;
9120 old_var = old_vars->find_with_hash (new_var->dv, dv_htab_hash (new_var->dv));
9121 if (!old_var)
9123 int i;
9124 for (i = 0; i < new_var->n_var_parts; i++)
9125 new_var->var_part[i].cur_loc = NULL;
9126 variable_was_changed (new_var, NULL);
9129 /* Continue traversing the hash table. */
9130 return 1;
9133 /* Emit notes before INSN for differences between dataflow sets OLD_SET and
9134 NEW_SET. */
9136 static void
9137 emit_notes_for_differences (rtx_insn *insn, dataflow_set *old_set,
9138 dataflow_set *new_set)
9140 shared_hash_htab (old_set->vars)
9141 ->traverse <variable_table_type *, emit_notes_for_differences_1>
9142 (shared_hash_htab (new_set->vars));
9143 shared_hash_htab (new_set->vars)
9144 ->traverse <variable_table_type *, emit_notes_for_differences_2>
9145 (shared_hash_htab (old_set->vars));
9146 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars);
9149 /* Return the next insn after INSN that is not a NOTE_INSN_VAR_LOCATION. */
9151 static rtx_insn *
9152 next_non_note_insn_var_location (rtx_insn *insn)
9154 while (insn)
9156 insn = NEXT_INSN (insn);
9157 if (insn == 0
9158 || !NOTE_P (insn)
9159 || NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION)
9160 break;
9163 return insn;
9166 /* Emit the notes for changes of location parts in the basic block BB. */
9168 static void
9169 emit_notes_in_bb (basic_block bb, dataflow_set *set)
9171 unsigned int i;
9172 micro_operation *mo;
9174 dataflow_set_clear (set);
9175 dataflow_set_copy (set, &VTI (bb)->in);
9177 FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo)
9179 rtx_insn *insn = mo->insn;
9180 rtx_insn *next_insn = next_non_note_insn_var_location (insn);
9182 switch (mo->type)
9184 case MO_CALL:
9185 dataflow_set_clear_at_call (set);
9186 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars);
9188 rtx arguments = mo->u.loc, *p = &arguments;
9189 rtx_note *note;
9190 while (*p)
9192 XEXP (XEXP (*p, 0), 1)
9193 = vt_expand_loc (XEXP (XEXP (*p, 0), 1),
9194 shared_hash_htab (set->vars));
9195 /* If expansion is successful, keep it in the list. */
9196 if (XEXP (XEXP (*p, 0), 1))
9197 p = &XEXP (*p, 1);
9198 /* Otherwise, if the following item is data_value for it,
9199 drop it too too. */
9200 else if (XEXP (*p, 1)
9201 && REG_P (XEXP (XEXP (*p, 0), 0))
9202 && MEM_P (XEXP (XEXP (XEXP (*p, 1), 0), 0))
9203 && REG_P (XEXP (XEXP (XEXP (XEXP (*p, 1), 0), 0),
9205 && REGNO (XEXP (XEXP (*p, 0), 0))
9206 == REGNO (XEXP (XEXP (XEXP (XEXP (*p, 1), 0),
9207 0), 0)))
9208 *p = XEXP (XEXP (*p, 1), 1);
9209 /* Just drop this item. */
9210 else
9211 *p = XEXP (*p, 1);
9213 note = emit_note_after (NOTE_INSN_CALL_ARG_LOCATION, insn);
9214 NOTE_VAR_LOCATION (note) = arguments;
9216 break;
9218 case MO_USE:
9220 rtx loc = mo->u.loc;
9222 if (REG_P (loc))
9223 var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
9224 else
9225 var_mem_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
9227 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
9229 break;
9231 case MO_VAL_LOC:
9233 rtx loc = mo->u.loc;
9234 rtx val, vloc;
9235 tree var;
9237 if (GET_CODE (loc) == CONCAT)
9239 val = XEXP (loc, 0);
9240 vloc = XEXP (loc, 1);
9242 else
9244 val = NULL_RTX;
9245 vloc = loc;
9248 var = PAT_VAR_LOCATION_DECL (vloc);
9250 clobber_variable_part (set, NULL_RTX,
9251 dv_from_decl (var), 0, NULL_RTX);
9252 if (val)
9254 if (VAL_NEEDS_RESOLUTION (loc))
9255 val_resolve (set, val, PAT_VAR_LOCATION_LOC (vloc), insn);
9256 set_variable_part (set, val, dv_from_decl (var), 0,
9257 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
9258 INSERT);
9260 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
9261 set_variable_part (set, PAT_VAR_LOCATION_LOC (vloc),
9262 dv_from_decl (var), 0,
9263 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
9264 INSERT);
9266 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
9268 break;
9270 case MO_VAL_USE:
9272 rtx loc = mo->u.loc;
9273 rtx val, vloc, uloc;
9275 vloc = uloc = XEXP (loc, 1);
9276 val = XEXP (loc, 0);
9278 if (GET_CODE (val) == CONCAT)
9280 uloc = XEXP (val, 1);
9281 val = XEXP (val, 0);
9284 if (VAL_NEEDS_RESOLUTION (loc))
9285 val_resolve (set, val, vloc, insn);
9286 else
9287 val_store (set, val, uloc, insn, false);
9289 if (VAL_HOLDS_TRACK_EXPR (loc))
9291 if (GET_CODE (uloc) == REG)
9292 var_reg_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
9293 NULL);
9294 else if (GET_CODE (uloc) == MEM)
9295 var_mem_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
9296 NULL);
9299 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
9301 break;
9303 case MO_VAL_SET:
9305 rtx loc = mo->u.loc;
9306 rtx val, vloc, uloc;
9307 rtx dstv, srcv;
9309 vloc = loc;
9310 uloc = XEXP (vloc, 1);
9311 val = XEXP (vloc, 0);
9312 vloc = uloc;
9314 if (GET_CODE (uloc) == SET)
9316 dstv = SET_DEST (uloc);
9317 srcv = SET_SRC (uloc);
9319 else
9321 dstv = uloc;
9322 srcv = NULL;
9325 if (GET_CODE (val) == CONCAT)
9327 dstv = vloc = XEXP (val, 1);
9328 val = XEXP (val, 0);
9331 if (GET_CODE (vloc) == SET)
9333 srcv = SET_SRC (vloc);
9335 gcc_assert (val != srcv);
9336 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
9338 dstv = vloc = SET_DEST (vloc);
9340 if (VAL_NEEDS_RESOLUTION (loc))
9341 val_resolve (set, val, srcv, insn);
9343 else if (VAL_NEEDS_RESOLUTION (loc))
9345 gcc_assert (GET_CODE (uloc) == SET
9346 && GET_CODE (SET_SRC (uloc)) == REG);
9347 val_resolve (set, val, SET_SRC (uloc), insn);
9350 if (VAL_HOLDS_TRACK_EXPR (loc))
9352 if (VAL_EXPR_IS_CLOBBERED (loc))
9354 if (REG_P (uloc))
9355 var_reg_delete (set, uloc, true);
9356 else if (MEM_P (uloc))
9358 gcc_assert (MEM_P (dstv));
9359 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
9360 var_mem_delete (set, dstv, true);
9363 else
9365 bool copied_p = VAL_EXPR_IS_COPIED (loc);
9366 rtx src = NULL, dst = uloc;
9367 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
9369 if (GET_CODE (uloc) == SET)
9371 src = SET_SRC (uloc);
9372 dst = SET_DEST (uloc);
9375 if (copied_p)
9377 status = find_src_status (set, src);
9379 src = find_src_set_src (set, src);
9382 if (REG_P (dst))
9383 var_reg_delete_and_set (set, dst, !copied_p,
9384 status, srcv);
9385 else if (MEM_P (dst))
9387 gcc_assert (MEM_P (dstv));
9388 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
9389 var_mem_delete_and_set (set, dstv, !copied_p,
9390 status, srcv);
9394 else if (REG_P (uloc))
9395 var_regno_delete (set, REGNO (uloc));
9396 else if (MEM_P (uloc))
9398 gcc_checking_assert (GET_CODE (vloc) == MEM);
9399 gcc_checking_assert (vloc == dstv);
9400 if (vloc != dstv)
9401 clobber_overlapping_mems (set, vloc);
9404 val_store (set, val, dstv, insn, true);
9406 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9407 set->vars);
9409 break;
9411 case MO_SET:
9413 rtx loc = mo->u.loc;
9414 rtx set_src = NULL;
9416 if (GET_CODE (loc) == SET)
9418 set_src = SET_SRC (loc);
9419 loc = SET_DEST (loc);
9422 if (REG_P (loc))
9423 var_reg_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
9424 set_src);
9425 else
9426 var_mem_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
9427 set_src);
9429 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9430 set->vars);
9432 break;
9434 case MO_COPY:
9436 rtx loc = mo->u.loc;
9437 enum var_init_status src_status;
9438 rtx set_src = NULL;
9440 if (GET_CODE (loc) == SET)
9442 set_src = SET_SRC (loc);
9443 loc = SET_DEST (loc);
9446 src_status = find_src_status (set, set_src);
9447 set_src = find_src_set_src (set, set_src);
9449 if (REG_P (loc))
9450 var_reg_delete_and_set (set, loc, false, src_status, set_src);
9451 else
9452 var_mem_delete_and_set (set, loc, false, src_status, set_src);
9454 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9455 set->vars);
9457 break;
9459 case MO_USE_NO_VAR:
9461 rtx loc = mo->u.loc;
9463 if (REG_P (loc))
9464 var_reg_delete (set, loc, false);
9465 else
9466 var_mem_delete (set, loc, false);
9468 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
9470 break;
9472 case MO_CLOBBER:
9474 rtx loc = mo->u.loc;
9476 if (REG_P (loc))
9477 var_reg_delete (set, loc, true);
9478 else
9479 var_mem_delete (set, loc, true);
9481 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9482 set->vars);
9484 break;
9486 case MO_ADJUST:
9487 set->stack_adjust += mo->u.adjust;
9488 break;
9493 /* Emit notes for the whole function. */
9495 static void
9496 vt_emit_notes (void)
9498 basic_block bb;
9499 dataflow_set cur;
9501 gcc_assert (!changed_variables->elements ());
9503 /* Free memory occupied by the out hash tables, as they aren't used
9504 anymore. */
9505 FOR_EACH_BB_FN (bb, cfun)
9506 dataflow_set_clear (&VTI (bb)->out);
9508 /* Enable emitting notes by functions (mainly by set_variable_part and
9509 delete_variable_part). */
9510 emit_notes = true;
9512 if (MAY_HAVE_DEBUG_INSNS)
9514 dropped_values = new variable_table_type (cselib_get_next_uid () * 2);
9517 dataflow_set_init (&cur);
9519 FOR_EACH_BB_FN (bb, cfun)
9521 /* Emit the notes for changes of variable locations between two
9522 subsequent basic blocks. */
9523 emit_notes_for_differences (BB_HEAD (bb), &cur, &VTI (bb)->in);
9525 if (MAY_HAVE_DEBUG_INSNS)
9526 local_get_addr_cache = new hash_map<rtx, rtx>;
9528 /* Emit the notes for the changes in the basic block itself. */
9529 emit_notes_in_bb (bb, &cur);
9531 if (MAY_HAVE_DEBUG_INSNS)
9532 delete local_get_addr_cache;
9533 local_get_addr_cache = NULL;
9535 /* Free memory occupied by the in hash table, we won't need it
9536 again. */
9537 dataflow_set_clear (&VTI (bb)->in);
9539 #ifdef ENABLE_CHECKING
9540 shared_hash_htab (cur.vars)
9541 ->traverse <variable_table_type *, emit_notes_for_differences_1>
9542 (shared_hash_htab (empty_shared_hash));
9543 #endif
9544 dataflow_set_destroy (&cur);
9546 if (MAY_HAVE_DEBUG_INSNS)
9547 delete dropped_values;
9548 dropped_values = NULL;
9550 emit_notes = false;
9553 /* If there is a declaration and offset associated with register/memory RTL
9554 assign declaration to *DECLP and offset to *OFFSETP, and return true. */
9556 static bool
9557 vt_get_decl_and_offset (rtx rtl, tree *declp, HOST_WIDE_INT *offsetp)
9559 if (REG_P (rtl))
9561 if (REG_ATTRS (rtl))
9563 *declp = REG_EXPR (rtl);
9564 *offsetp = REG_OFFSET (rtl);
9565 return true;
9568 else if (GET_CODE (rtl) == PARALLEL)
9570 tree decl = NULL_TREE;
9571 HOST_WIDE_INT offset = MAX_VAR_PARTS;
9572 int len = XVECLEN (rtl, 0), i;
9574 for (i = 0; i < len; i++)
9576 rtx reg = XEXP (XVECEXP (rtl, 0, i), 0);
9577 if (!REG_P (reg) || !REG_ATTRS (reg))
9578 break;
9579 if (!decl)
9580 decl = REG_EXPR (reg);
9581 if (REG_EXPR (reg) != decl)
9582 break;
9583 if (REG_OFFSET (reg) < offset)
9584 offset = REG_OFFSET (reg);
9587 if (i == len)
9589 *declp = decl;
9590 *offsetp = offset;
9591 return true;
9594 else if (MEM_P (rtl))
9596 if (MEM_ATTRS (rtl))
9598 *declp = MEM_EXPR (rtl);
9599 *offsetp = INT_MEM_OFFSET (rtl);
9600 return true;
9603 return false;
9606 /* Record the value for the ENTRY_VALUE of RTL as a global equivalence
9607 of VAL. */
9609 static void
9610 record_entry_value (cselib_val *val, rtx rtl)
9612 rtx ev = gen_rtx_ENTRY_VALUE (GET_MODE (rtl));
9614 ENTRY_VALUE_EXP (ev) = rtl;
9616 cselib_add_permanent_equiv (val, ev, get_insns ());
9619 /* Insert function parameter PARM in IN and OUT sets of ENTRY_BLOCK. */
9621 static void
9622 vt_add_function_parameter (tree parm)
9624 rtx decl_rtl = DECL_RTL_IF_SET (parm);
9625 rtx incoming = DECL_INCOMING_RTL (parm);
9626 tree decl;
9627 machine_mode mode;
9628 HOST_WIDE_INT offset;
9629 dataflow_set *out;
9630 decl_or_value dv;
9632 if (TREE_CODE (parm) != PARM_DECL)
9633 return;
9635 if (!decl_rtl || !incoming)
9636 return;
9638 if (GET_MODE (decl_rtl) == BLKmode || GET_MODE (incoming) == BLKmode)
9639 return;
9641 /* If there is a DRAP register or a pseudo in internal_arg_pointer,
9642 rewrite the incoming location of parameters passed on the stack
9643 into MEMs based on the argument pointer, so that incoming doesn't
9644 depend on a pseudo. */
9645 if (MEM_P (incoming)
9646 && (XEXP (incoming, 0) == crtl->args.internal_arg_pointer
9647 || (GET_CODE (XEXP (incoming, 0)) == PLUS
9648 && XEXP (XEXP (incoming, 0), 0)
9649 == crtl->args.internal_arg_pointer
9650 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
9652 HOST_WIDE_INT off = -FIRST_PARM_OFFSET (current_function_decl);
9653 if (GET_CODE (XEXP (incoming, 0)) == PLUS)
9654 off += INTVAL (XEXP (XEXP (incoming, 0), 1));
9655 incoming
9656 = replace_equiv_address_nv (incoming,
9657 plus_constant (Pmode,
9658 arg_pointer_rtx, off));
9661 #ifdef HAVE_window_save
9662 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
9663 If the target machine has an explicit window save instruction, the
9664 actual entry value is the corresponding OUTGOING_REGNO instead. */
9665 if (HAVE_window_save && !crtl->uses_only_leaf_regs)
9667 if (REG_P (incoming)
9668 && HARD_REGISTER_P (incoming)
9669 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
9671 parm_reg_t p;
9672 p.incoming = incoming;
9673 incoming
9674 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
9675 OUTGOING_REGNO (REGNO (incoming)), 0);
9676 p.outgoing = incoming;
9677 vec_safe_push (windowed_parm_regs, p);
9679 else if (GET_CODE (incoming) == PARALLEL)
9681 rtx outgoing
9682 = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (XVECLEN (incoming, 0)));
9683 int i;
9685 for (i = 0; i < XVECLEN (incoming, 0); i++)
9687 rtx reg = XEXP (XVECEXP (incoming, 0, i), 0);
9688 parm_reg_t p;
9689 p.incoming = reg;
9690 reg = gen_rtx_REG_offset (reg, GET_MODE (reg),
9691 OUTGOING_REGNO (REGNO (reg)), 0);
9692 p.outgoing = reg;
9693 XVECEXP (outgoing, 0, i)
9694 = gen_rtx_EXPR_LIST (VOIDmode, reg,
9695 XEXP (XVECEXP (incoming, 0, i), 1));
9696 vec_safe_push (windowed_parm_regs, p);
9699 incoming = outgoing;
9701 else if (MEM_P (incoming)
9702 && REG_P (XEXP (incoming, 0))
9703 && HARD_REGISTER_P (XEXP (incoming, 0)))
9705 rtx reg = XEXP (incoming, 0);
9706 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
9708 parm_reg_t p;
9709 p.incoming = reg;
9710 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
9711 p.outgoing = reg;
9712 vec_safe_push (windowed_parm_regs, p);
9713 incoming = replace_equiv_address_nv (incoming, reg);
9717 #endif
9719 if (!vt_get_decl_and_offset (incoming, &decl, &offset))
9721 if (MEM_P (incoming))
9723 /* This means argument is passed by invisible reference. */
9724 offset = 0;
9725 decl = parm;
9727 else
9729 if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset))
9730 return;
9731 offset += byte_lowpart_offset (GET_MODE (incoming),
9732 GET_MODE (decl_rtl));
9736 if (!decl)
9737 return;
9739 if (parm != decl)
9741 /* If that DECL_RTL wasn't a pseudo that got spilled to
9742 memory, bail out. Otherwise, the spill slot sharing code
9743 will force the memory to reference spill_slot_decl (%sfp),
9744 so we don't match above. That's ok, the pseudo must have
9745 referenced the entire parameter, so just reset OFFSET. */
9746 if (decl != get_spill_slot_decl (false))
9747 return;
9748 offset = 0;
9751 if (!track_loc_p (incoming, parm, offset, false, &mode, &offset))
9752 return;
9754 out = &VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out;
9756 dv = dv_from_decl (parm);
9758 if (target_for_debug_bind (parm)
9759 /* We can't deal with these right now, because this kind of
9760 variable is single-part. ??? We could handle parallels
9761 that describe multiple locations for the same single
9762 value, but ATM we don't. */
9763 && GET_CODE (incoming) != PARALLEL)
9765 cselib_val *val;
9766 rtx lowpart;
9768 /* ??? We shouldn't ever hit this, but it may happen because
9769 arguments passed by invisible reference aren't dealt with
9770 above: incoming-rtl will have Pmode rather than the
9771 expected mode for the type. */
9772 if (offset)
9773 return;
9775 lowpart = var_lowpart (mode, incoming);
9776 if (!lowpart)
9777 return;
9779 val = cselib_lookup_from_insn (lowpart, mode, true,
9780 VOIDmode, get_insns ());
9782 /* ??? Float-typed values in memory are not handled by
9783 cselib. */
9784 if (val)
9786 preserve_value (val);
9787 set_variable_part (out, val->val_rtx, dv, offset,
9788 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9789 dv = dv_from_value (val->val_rtx);
9792 if (MEM_P (incoming))
9794 val = cselib_lookup_from_insn (XEXP (incoming, 0), mode, true,
9795 VOIDmode, get_insns ());
9796 if (val)
9798 preserve_value (val);
9799 incoming = replace_equiv_address_nv (incoming, val->val_rtx);
9804 if (REG_P (incoming))
9806 incoming = var_lowpart (mode, incoming);
9807 gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER);
9808 attrs_list_insert (&out->regs[REGNO (incoming)], dv, offset,
9809 incoming);
9810 set_variable_part (out, incoming, dv, offset,
9811 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9812 if (dv_is_value_p (dv))
9814 record_entry_value (CSELIB_VAL_PTR (dv_as_value (dv)), incoming);
9815 if (TREE_CODE (TREE_TYPE (parm)) == REFERENCE_TYPE
9816 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (parm))))
9818 machine_mode indmode
9819 = TYPE_MODE (TREE_TYPE (TREE_TYPE (parm)));
9820 rtx mem = gen_rtx_MEM (indmode, incoming);
9821 cselib_val *val = cselib_lookup_from_insn (mem, indmode, true,
9822 VOIDmode,
9823 get_insns ());
9824 if (val)
9826 preserve_value (val);
9827 record_entry_value (val, mem);
9828 set_variable_part (out, mem, dv_from_value (val->val_rtx), 0,
9829 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9834 else if (GET_CODE (incoming) == PARALLEL && !dv_onepart_p (dv))
9836 int i;
9838 for (i = 0; i < XVECLEN (incoming, 0); i++)
9840 rtx reg = XEXP (XVECEXP (incoming, 0, i), 0);
9841 offset = REG_OFFSET (reg);
9842 gcc_assert (REGNO (reg) < FIRST_PSEUDO_REGISTER);
9843 attrs_list_insert (&out->regs[REGNO (reg)], dv, offset, reg);
9844 set_variable_part (out, reg, dv, offset,
9845 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9848 else if (MEM_P (incoming))
9850 incoming = var_lowpart (mode, incoming);
9851 set_variable_part (out, incoming, dv, offset,
9852 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9856 /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
9858 static void
9859 vt_add_function_parameters (void)
9861 tree parm;
9863 for (parm = DECL_ARGUMENTS (current_function_decl);
9864 parm; parm = DECL_CHAIN (parm))
9865 if (!POINTER_BOUNDS_P (parm))
9866 vt_add_function_parameter (parm);
9868 if (DECL_HAS_VALUE_EXPR_P (DECL_RESULT (current_function_decl)))
9870 tree vexpr = DECL_VALUE_EXPR (DECL_RESULT (current_function_decl));
9872 if (TREE_CODE (vexpr) == INDIRECT_REF)
9873 vexpr = TREE_OPERAND (vexpr, 0);
9875 if (TREE_CODE (vexpr) == PARM_DECL
9876 && DECL_ARTIFICIAL (vexpr)
9877 && !DECL_IGNORED_P (vexpr)
9878 && DECL_NAMELESS (vexpr))
9879 vt_add_function_parameter (vexpr);
9883 /* Initialize cfa_base_rtx, create a preserved VALUE for it and
9884 ensure it isn't flushed during cselib_reset_table.
9885 Can be called only if frame_pointer_rtx resp. arg_pointer_rtx
9886 has been eliminated. */
9888 static void
9889 vt_init_cfa_base (void)
9891 cselib_val *val;
9893 #ifdef FRAME_POINTER_CFA_OFFSET
9894 cfa_base_rtx = frame_pointer_rtx;
9895 cfa_base_offset = -FRAME_POINTER_CFA_OFFSET (current_function_decl);
9896 #else
9897 cfa_base_rtx = arg_pointer_rtx;
9898 cfa_base_offset = -ARG_POINTER_CFA_OFFSET (current_function_decl);
9899 #endif
9900 if (cfa_base_rtx == hard_frame_pointer_rtx
9901 || !fixed_regs[REGNO (cfa_base_rtx)])
9903 cfa_base_rtx = NULL_RTX;
9904 return;
9906 if (!MAY_HAVE_DEBUG_INSNS)
9907 return;
9909 /* Tell alias analysis that cfa_base_rtx should share
9910 find_base_term value with stack pointer or hard frame pointer. */
9911 if (!frame_pointer_needed)
9912 vt_equate_reg_base_value (cfa_base_rtx, stack_pointer_rtx);
9913 else if (!crtl->stack_realign_tried)
9914 vt_equate_reg_base_value (cfa_base_rtx, hard_frame_pointer_rtx);
9916 val = cselib_lookup_from_insn (cfa_base_rtx, GET_MODE (cfa_base_rtx), 1,
9917 VOIDmode, get_insns ());
9918 preserve_value (val);
9919 cselib_preserve_cfa_base_value (val, REGNO (cfa_base_rtx));
9922 /* Allocate and initialize the data structures for variable tracking
9923 and parse the RTL to get the micro operations. */
9925 static bool
9926 vt_initialize (void)
9928 basic_block bb;
9929 HOST_WIDE_INT fp_cfa_offset = -1;
9931 alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def));
9933 empty_shared_hash = new shared_hash_def;
9934 empty_shared_hash->refcount = 1;
9935 empty_shared_hash->htab = new variable_table_type (1);
9936 changed_variables = new variable_table_type (10);
9938 /* Init the IN and OUT sets. */
9939 FOR_ALL_BB_FN (bb, cfun)
9941 VTI (bb)->visited = false;
9942 VTI (bb)->flooded = false;
9943 dataflow_set_init (&VTI (bb)->in);
9944 dataflow_set_init (&VTI (bb)->out);
9945 VTI (bb)->permp = NULL;
9948 if (MAY_HAVE_DEBUG_INSNS)
9950 cselib_init (CSELIB_RECORD_MEMORY | CSELIB_PRESERVE_CONSTANTS);
9951 scratch_regs = BITMAP_ALLOC (NULL);
9952 preserved_values.create (256);
9953 global_get_addr_cache = new hash_map<rtx, rtx>;
9955 else
9957 scratch_regs = NULL;
9958 global_get_addr_cache = NULL;
9961 if (MAY_HAVE_DEBUG_INSNS)
9963 rtx reg, expr;
9964 int ofst;
9965 cselib_val *val;
9967 #ifdef FRAME_POINTER_CFA_OFFSET
9968 reg = frame_pointer_rtx;
9969 ofst = FRAME_POINTER_CFA_OFFSET (current_function_decl);
9970 #else
9971 reg = arg_pointer_rtx;
9972 ofst = ARG_POINTER_CFA_OFFSET (current_function_decl);
9973 #endif
9975 ofst -= INCOMING_FRAME_SP_OFFSET;
9977 val = cselib_lookup_from_insn (reg, GET_MODE (reg), 1,
9978 VOIDmode, get_insns ());
9979 preserve_value (val);
9980 if (reg != hard_frame_pointer_rtx && fixed_regs[REGNO (reg)])
9981 cselib_preserve_cfa_base_value (val, REGNO (reg));
9982 expr = plus_constant (GET_MODE (stack_pointer_rtx),
9983 stack_pointer_rtx, -ofst);
9984 cselib_add_permanent_equiv (val, expr, get_insns ());
9986 if (ofst)
9988 val = cselib_lookup_from_insn (stack_pointer_rtx,
9989 GET_MODE (stack_pointer_rtx), 1,
9990 VOIDmode, get_insns ());
9991 preserve_value (val);
9992 expr = plus_constant (GET_MODE (reg), reg, ofst);
9993 cselib_add_permanent_equiv (val, expr, get_insns ());
9997 /* In order to factor out the adjustments made to the stack pointer or to
9998 the hard frame pointer and thus be able to use DW_OP_fbreg operations
9999 instead of individual location lists, we're going to rewrite MEMs based
10000 on them into MEMs based on the CFA by de-eliminating stack_pointer_rtx
10001 or hard_frame_pointer_rtx to the virtual CFA pointer frame_pointer_rtx
10002 resp. arg_pointer_rtx. We can do this either when there is no frame
10003 pointer in the function and stack adjustments are consistent for all
10004 basic blocks or when there is a frame pointer and no stack realignment.
10005 But we first have to check that frame_pointer_rtx resp. arg_pointer_rtx
10006 has been eliminated. */
10007 if (!frame_pointer_needed)
10009 rtx reg, elim;
10011 if (!vt_stack_adjustments ())
10012 return false;
10014 #ifdef FRAME_POINTER_CFA_OFFSET
10015 reg = frame_pointer_rtx;
10016 #else
10017 reg = arg_pointer_rtx;
10018 #endif
10019 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
10020 if (elim != reg)
10022 if (GET_CODE (elim) == PLUS)
10023 elim = XEXP (elim, 0);
10024 if (elim == stack_pointer_rtx)
10025 vt_init_cfa_base ();
10028 else if (!crtl->stack_realign_tried)
10030 rtx reg, elim;
10032 #ifdef FRAME_POINTER_CFA_OFFSET
10033 reg = frame_pointer_rtx;
10034 fp_cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
10035 #else
10036 reg = arg_pointer_rtx;
10037 fp_cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
10038 #endif
10039 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
10040 if (elim != reg)
10042 if (GET_CODE (elim) == PLUS)
10044 fp_cfa_offset -= INTVAL (XEXP (elim, 1));
10045 elim = XEXP (elim, 0);
10047 if (elim != hard_frame_pointer_rtx)
10048 fp_cfa_offset = -1;
10050 else
10051 fp_cfa_offset = -1;
10054 /* If the stack is realigned and a DRAP register is used, we're going to
10055 rewrite MEMs based on it representing incoming locations of parameters
10056 passed on the stack into MEMs based on the argument pointer. Although
10057 we aren't going to rewrite other MEMs, we still need to initialize the
10058 virtual CFA pointer in order to ensure that the argument pointer will
10059 be seen as a constant throughout the function.
10061 ??? This doesn't work if FRAME_POINTER_CFA_OFFSET is defined. */
10062 else if (stack_realign_drap)
10064 rtx reg, elim;
10066 #ifdef FRAME_POINTER_CFA_OFFSET
10067 reg = frame_pointer_rtx;
10068 #else
10069 reg = arg_pointer_rtx;
10070 #endif
10071 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
10072 if (elim != reg)
10074 if (GET_CODE (elim) == PLUS)
10075 elim = XEXP (elim, 0);
10076 if (elim == hard_frame_pointer_rtx)
10077 vt_init_cfa_base ();
10081 hard_frame_pointer_adjustment = -1;
10083 vt_add_function_parameters ();
10085 FOR_EACH_BB_FN (bb, cfun)
10087 rtx_insn *insn;
10088 HOST_WIDE_INT pre, post = 0;
10089 basic_block first_bb, last_bb;
10091 if (MAY_HAVE_DEBUG_INSNS)
10093 cselib_record_sets_hook = add_with_sets;
10094 if (dump_file && (dump_flags & TDF_DETAILS))
10095 fprintf (dump_file, "first value: %i\n",
10096 cselib_get_next_uid ());
10099 first_bb = bb;
10100 for (;;)
10102 edge e;
10103 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
10104 || ! single_pred_p (bb->next_bb))
10105 break;
10106 e = find_edge (bb, bb->next_bb);
10107 if (! e || (e->flags & EDGE_FALLTHRU) == 0)
10108 break;
10109 bb = bb->next_bb;
10111 last_bb = bb;
10113 /* Add the micro-operations to the vector. */
10114 FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb)
10116 HOST_WIDE_INT offset = VTI (bb)->out.stack_adjust;
10117 VTI (bb)->out.stack_adjust = VTI (bb)->in.stack_adjust;
10118 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
10119 insn = NEXT_INSN (insn))
10121 if (INSN_P (insn))
10123 if (!frame_pointer_needed)
10125 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
10126 if (pre)
10128 micro_operation mo;
10129 mo.type = MO_ADJUST;
10130 mo.u.adjust = pre;
10131 mo.insn = insn;
10132 if (dump_file && (dump_flags & TDF_DETAILS))
10133 log_op_type (PATTERN (insn), bb, insn,
10134 MO_ADJUST, dump_file);
10135 VTI (bb)->mos.safe_push (mo);
10136 VTI (bb)->out.stack_adjust += pre;
10140 cselib_hook_called = false;
10141 adjust_insn (bb, insn);
10142 if (MAY_HAVE_DEBUG_INSNS)
10144 if (CALL_P (insn))
10145 prepare_call_arguments (bb, insn);
10146 cselib_process_insn (insn);
10147 if (dump_file && (dump_flags & TDF_DETAILS))
10149 print_rtl_single (dump_file, insn);
10150 dump_cselib_table (dump_file);
10153 if (!cselib_hook_called)
10154 add_with_sets (insn, 0, 0);
10155 cancel_changes (0);
10157 if (!frame_pointer_needed && post)
10159 micro_operation mo;
10160 mo.type = MO_ADJUST;
10161 mo.u.adjust = post;
10162 mo.insn = insn;
10163 if (dump_file && (dump_flags & TDF_DETAILS))
10164 log_op_type (PATTERN (insn), bb, insn,
10165 MO_ADJUST, dump_file);
10166 VTI (bb)->mos.safe_push (mo);
10167 VTI (bb)->out.stack_adjust += post;
10170 if (fp_cfa_offset != -1
10171 && hard_frame_pointer_adjustment == -1
10172 && fp_setter_insn (insn))
10174 vt_init_cfa_base ();
10175 hard_frame_pointer_adjustment = fp_cfa_offset;
10176 /* Disassociate sp from fp now. */
10177 if (MAY_HAVE_DEBUG_INSNS)
10179 cselib_val *v;
10180 cselib_invalidate_rtx (stack_pointer_rtx);
10181 v = cselib_lookup (stack_pointer_rtx, Pmode, 1,
10182 VOIDmode);
10183 if (v && !cselib_preserved_value_p (v))
10185 cselib_set_value_sp_based (v);
10186 preserve_value (v);
10192 gcc_assert (offset == VTI (bb)->out.stack_adjust);
10195 bb = last_bb;
10197 if (MAY_HAVE_DEBUG_INSNS)
10199 cselib_preserve_only_values ();
10200 cselib_reset_table (cselib_get_next_uid ());
10201 cselib_record_sets_hook = NULL;
10205 hard_frame_pointer_adjustment = -1;
10206 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->flooded = true;
10207 cfa_base_rtx = NULL_RTX;
10208 return true;
10211 /* This is *not* reset after each function. It gives each
10212 NOTE_INSN_DELETED_DEBUG_LABEL in the entire compilation
10213 a unique label number. */
10215 static int debug_label_num = 1;
10217 /* Get rid of all debug insns from the insn stream. */
10219 static void
10220 delete_debug_insns (void)
10222 basic_block bb;
10223 rtx_insn *insn, *next;
10225 if (!MAY_HAVE_DEBUG_INSNS)
10226 return;
10228 FOR_EACH_BB_FN (bb, cfun)
10230 FOR_BB_INSNS_SAFE (bb, insn, next)
10231 if (DEBUG_INSN_P (insn))
10233 tree decl = INSN_VAR_LOCATION_DECL (insn);
10234 if (TREE_CODE (decl) == LABEL_DECL
10235 && DECL_NAME (decl)
10236 && !DECL_RTL_SET_P (decl))
10238 PUT_CODE (insn, NOTE);
10239 NOTE_KIND (insn) = NOTE_INSN_DELETED_DEBUG_LABEL;
10240 NOTE_DELETED_LABEL_NAME (insn)
10241 = IDENTIFIER_POINTER (DECL_NAME (decl));
10242 SET_DECL_RTL (decl, insn);
10243 CODE_LABEL_NUMBER (insn) = debug_label_num++;
10245 else
10246 delete_insn (insn);
10251 /* Run a fast, BB-local only version of var tracking, to take care of
10252 information that we don't do global analysis on, such that not all
10253 information is lost. If SKIPPED holds, we're skipping the global
10254 pass entirely, so we should try to use information it would have
10255 handled as well.. */
10257 static void
10258 vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED)
10260 /* ??? Just skip it all for now. */
10261 delete_debug_insns ();
10264 /* Free the data structures needed for variable tracking. */
10266 static void
10267 vt_finalize (void)
10269 basic_block bb;
10271 FOR_EACH_BB_FN (bb, cfun)
10273 VTI (bb)->mos.release ();
10276 FOR_ALL_BB_FN (bb, cfun)
10278 dataflow_set_destroy (&VTI (bb)->in);
10279 dataflow_set_destroy (&VTI (bb)->out);
10280 if (VTI (bb)->permp)
10282 dataflow_set_destroy (VTI (bb)->permp);
10283 XDELETE (VTI (bb)->permp);
10286 free_aux_for_blocks ();
10287 delete empty_shared_hash->htab;
10288 empty_shared_hash->htab = NULL;
10289 delete changed_variables;
10290 changed_variables = NULL;
10291 attrs_def::pool.release ();
10292 var_pool.release ();
10293 location_chain_def::pool.release ();
10294 shared_hash_def::pool.release ();
10296 if (MAY_HAVE_DEBUG_INSNS)
10298 if (global_get_addr_cache)
10299 delete global_get_addr_cache;
10300 global_get_addr_cache = NULL;
10301 loc_exp_dep::pool.release ();
10302 valvar_pool.release ();
10303 preserved_values.release ();
10304 cselib_finish ();
10305 BITMAP_FREE (scratch_regs);
10306 scratch_regs = NULL;
10309 #ifdef HAVE_window_save
10310 vec_free (windowed_parm_regs);
10311 #endif
10313 if (vui_vec)
10314 XDELETEVEC (vui_vec);
10315 vui_vec = NULL;
10316 vui_allocated = 0;
10319 /* The entry point to variable tracking pass. */
10321 static inline unsigned int
10322 variable_tracking_main_1 (void)
10324 bool success;
10326 if (flag_var_tracking_assignments < 0
10327 /* Var-tracking right now assumes the IR doesn't contain
10328 any pseudos at this point. */
10329 || targetm.no_register_allocation)
10331 delete_debug_insns ();
10332 return 0;
10335 if (n_basic_blocks_for_fn (cfun) > 500 &&
10336 n_edges_for_fn (cfun) / n_basic_blocks_for_fn (cfun) >= 20)
10338 vt_debug_insns_local (true);
10339 return 0;
10342 mark_dfs_back_edges ();
10343 if (!vt_initialize ())
10345 vt_finalize ();
10346 vt_debug_insns_local (true);
10347 return 0;
10350 success = vt_find_locations ();
10352 if (!success && flag_var_tracking_assignments > 0)
10354 vt_finalize ();
10356 delete_debug_insns ();
10358 /* This is later restored by our caller. */
10359 flag_var_tracking_assignments = 0;
10361 success = vt_initialize ();
10362 gcc_assert (success);
10364 success = vt_find_locations ();
10367 if (!success)
10369 vt_finalize ();
10370 vt_debug_insns_local (false);
10371 return 0;
10374 if (dump_file && (dump_flags & TDF_DETAILS))
10376 dump_dataflow_sets ();
10377 dump_reg_info (dump_file);
10378 dump_flow_info (dump_file, dump_flags);
10381 timevar_push (TV_VAR_TRACKING_EMIT);
10382 vt_emit_notes ();
10383 timevar_pop (TV_VAR_TRACKING_EMIT);
10385 vt_finalize ();
10386 vt_debug_insns_local (false);
10387 return 0;
10390 unsigned int
10391 variable_tracking_main (void)
10393 unsigned int ret;
10394 int save = flag_var_tracking_assignments;
10396 ret = variable_tracking_main_1 ();
10398 flag_var_tracking_assignments = save;
10400 return ret;
10403 namespace {
10405 const pass_data pass_data_variable_tracking =
10407 RTL_PASS, /* type */
10408 "vartrack", /* name */
10409 OPTGROUP_NONE, /* optinfo_flags */
10410 TV_VAR_TRACKING, /* tv_id */
10411 0, /* properties_required */
10412 0, /* properties_provided */
10413 0, /* properties_destroyed */
10414 0, /* todo_flags_start */
10415 0, /* todo_flags_finish */
10418 class pass_variable_tracking : public rtl_opt_pass
10420 public:
10421 pass_variable_tracking (gcc::context *ctxt)
10422 : rtl_opt_pass (pass_data_variable_tracking, ctxt)
10425 /* opt_pass methods: */
10426 virtual bool gate (function *)
10428 return (flag_var_tracking && !targetm.delay_vartrack);
10431 virtual unsigned int execute (function *)
10433 return variable_tracking_main ();
10436 }; // class pass_variable_tracking
10438 } // anon namespace
10440 rtl_opt_pass *
10441 make_pass_variable_tracking (gcc::context *ctxt)
10443 return new pass_variable_tracking (ctxt);