libgomp: Use pthread mutexes in the nvptx plugin.
[official-gcc.git] / gcc / var-tracking.c
blob76fbf491c9f900ebdead5d323378debd884678ff
1 /* Variable tracking routines for the GNU compiler.
2 Copyright (C) 2002-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the variable tracking pass. It computes where
21 variables are located (which registers or where in memory) at each position
22 in instruction stream and emits notes describing the locations.
23 Debug information (DWARF2 location lists) is finally generated from
24 these notes.
25 With this debug information, it is possible to show variables
26 even when debugging optimized code.
28 How does the variable tracking pass work?
30 First, it scans RTL code for uses, stores and clobbers (register/memory
31 references in instructions), for call insns and for stack adjustments
32 separately for each basic block and saves them to an array of micro
33 operations.
34 The micro operations of one instruction are ordered so that
35 pre-modifying stack adjustment < use < use with no var < call insn <
36 < clobber < set < post-modifying stack adjustment
38 Then, a forward dataflow analysis is performed to find out how locations
39 of variables change through code and to propagate the variable locations
40 along control flow graph.
41 The IN set for basic block BB is computed as a union of OUT sets of BB's
42 predecessors, the OUT set for BB is copied from the IN set for BB and
43 is changed according to micro operations in BB.
45 The IN and OUT sets for basic blocks consist of a current stack adjustment
46 (used for adjusting offset of variables addressed using stack pointer),
47 the table of structures describing the locations of parts of a variable
48 and for each physical register a linked list for each physical register.
49 The linked list is a list of variable parts stored in the register,
50 i.e. it is a list of triplets (reg, decl, offset) where decl is
51 REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
52 effective deleting appropriate variable parts when we set or clobber the
53 register.
55 There may be more than one variable part in a register. The linked lists
56 should be pretty short so it is a good data structure here.
57 For example in the following code, register allocator may assign same
58 register to variables A and B, and both of them are stored in the same
59 register in CODE:
61 if (cond)
62 set A;
63 else
64 set B;
65 CODE;
66 if (cond)
67 use A;
68 else
69 use B;
71 Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
72 are emitted to appropriate positions in RTL code. Each such a note describes
73 the location of one variable at the point in instruction stream where the
74 note is. There is no need to emit a note for each variable before each
75 instruction, we only emit these notes where the location of variable changes
76 (this means that we also emit notes for changes between the OUT set of the
77 previous block and the IN set of the current block).
79 The notes consist of two parts:
80 1. the declaration (from REG_EXPR or MEM_EXPR)
81 2. the location of a variable - it is either a simple register/memory
82 reference (for simple variables, for example int),
83 or a parallel of register/memory references (for a large variables
84 which consist of several parts, for example long long).
88 #include "config.h"
89 #include "system.h"
90 #include "coretypes.h"
91 #include "tm.h"
92 #include "rtl.h"
93 #include "hash-set.h"
94 #include "machmode.h"
95 #include "vec.h"
96 #include "double-int.h"
97 #include "input.h"
98 #include "alias.h"
99 #include "symtab.h"
100 #include "wide-int.h"
101 #include "inchash.h"
102 #include "tree.h"
103 #include "varasm.h"
104 #include "stor-layout.h"
105 #include "hash-map.h"
106 #include "hash-table.h"
107 #include "predict.h"
108 #include "hard-reg-set.h"
109 #include "input.h"
110 #include "function.h"
111 #include "dominance.h"
112 #include "cfg.h"
113 #include "cfgrtl.h"
114 #include "cfganal.h"
115 #include "basic-block.h"
116 #include "tm_p.h"
117 #include "flags.h"
118 #include "insn-config.h"
119 #include "reload.h"
120 #include "sbitmap.h"
121 #include "alloc-pool.h"
122 #include "regs.h"
123 #include "expr.h"
124 #include "tree-pass.h"
125 #include "bitmap.h"
126 #include "tree-dfa.h"
127 #include "tree-ssa.h"
128 #include "cselib.h"
129 #include "target.h"
130 #include "params.h"
131 #include "diagnostic.h"
132 #include "tree-pretty-print.h"
133 #include "recog.h"
134 #include "tm_p.h"
135 #include "alias.h"
136 #include "rtl-iter.h"
137 #include "fibonacci_heap.h"
139 typedef fibonacci_heap <long, basic_block_def> bb_heap_t;
140 typedef fibonacci_node <long, basic_block_def> bb_heap_node_t;
142 /* var-tracking.c assumes that tree code with the same value as VALUE rtx code
143 has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
144 Currently the value is the same as IDENTIFIER_NODE, which has such
145 a property. If this compile time assertion ever fails, make sure that
146 the new tree code that equals (int) VALUE has the same property. */
147 extern char check_value_val[(int) VALUE == (int) IDENTIFIER_NODE ? 1 : -1];
149 /* Type of micro operation. */
150 enum micro_operation_type
152 MO_USE, /* Use location (REG or MEM). */
153 MO_USE_NO_VAR,/* Use location which is not associated with a variable
154 or the variable is not trackable. */
155 MO_VAL_USE, /* Use location which is associated with a value. */
156 MO_VAL_LOC, /* Use location which appears in a debug insn. */
157 MO_VAL_SET, /* Set location associated with a value. */
158 MO_SET, /* Set location. */
159 MO_COPY, /* Copy the same portion of a variable from one
160 location to another. */
161 MO_CLOBBER, /* Clobber location. */
162 MO_CALL, /* Call insn. */
163 MO_ADJUST /* Adjust stack pointer. */
167 static const char * const ATTRIBUTE_UNUSED
168 micro_operation_type_name[] = {
169 "MO_USE",
170 "MO_USE_NO_VAR",
171 "MO_VAL_USE",
172 "MO_VAL_LOC",
173 "MO_VAL_SET",
174 "MO_SET",
175 "MO_COPY",
176 "MO_CLOBBER",
177 "MO_CALL",
178 "MO_ADJUST"
181 /* Where shall the note be emitted? BEFORE or AFTER the instruction.
182 Notes emitted as AFTER_CALL are to take effect during the call,
183 rather than after the call. */
184 enum emit_note_where
186 EMIT_NOTE_BEFORE_INSN,
187 EMIT_NOTE_AFTER_INSN,
188 EMIT_NOTE_AFTER_CALL_INSN
191 /* Structure holding information about micro operation. */
192 typedef struct micro_operation_def
194 /* Type of micro operation. */
195 enum micro_operation_type type;
197 /* The instruction which the micro operation is in, for MO_USE,
198 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
199 instruction or note in the original flow (before any var-tracking
200 notes are inserted, to simplify emission of notes), for MO_SET
201 and MO_CLOBBER. */
202 rtx_insn *insn;
204 union {
205 /* Location. For MO_SET and MO_COPY, this is the SET that
206 performs the assignment, if known, otherwise it is the target
207 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
208 CONCAT of the VALUE and the LOC associated with it. For
209 MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
210 associated with it. */
211 rtx loc;
213 /* Stack adjustment. */
214 HOST_WIDE_INT adjust;
215 } u;
216 } micro_operation;
219 /* A declaration of a variable, or an RTL value being handled like a
220 declaration. */
221 typedef void *decl_or_value;
223 /* Return true if a decl_or_value DV is a DECL or NULL. */
224 static inline bool
225 dv_is_decl_p (decl_or_value dv)
227 return !dv || (int) TREE_CODE ((tree) dv) != (int) VALUE;
230 /* Return true if a decl_or_value is a VALUE rtl. */
231 static inline bool
232 dv_is_value_p (decl_or_value dv)
234 return dv && !dv_is_decl_p (dv);
237 /* Return the decl in the decl_or_value. */
238 static inline tree
239 dv_as_decl (decl_or_value dv)
241 gcc_checking_assert (dv_is_decl_p (dv));
242 return (tree) dv;
245 /* Return the value in the decl_or_value. */
246 static inline rtx
247 dv_as_value (decl_or_value dv)
249 gcc_checking_assert (dv_is_value_p (dv));
250 return (rtx)dv;
253 /* Return the opaque pointer in the decl_or_value. */
254 static inline void *
255 dv_as_opaque (decl_or_value dv)
257 return dv;
261 /* Description of location of a part of a variable. The content of a physical
262 register is described by a chain of these structures.
263 The chains are pretty short (usually 1 or 2 elements) and thus
264 chain is the best data structure. */
265 typedef struct attrs_def
267 /* Pointer to next member of the list. */
268 struct attrs_def *next;
270 /* The rtx of register. */
271 rtx loc;
273 /* The declaration corresponding to LOC. */
274 decl_or_value dv;
276 /* Offset from start of DECL. */
277 HOST_WIDE_INT offset;
278 } *attrs;
280 /* Structure for chaining the locations. */
281 typedef struct location_chain_def
283 /* Next element in the chain. */
284 struct location_chain_def *next;
286 /* The location (REG, MEM or VALUE). */
287 rtx loc;
289 /* The "value" stored in this location. */
290 rtx set_src;
292 /* Initialized? */
293 enum var_init_status init;
294 } *location_chain;
296 /* A vector of loc_exp_dep holds the active dependencies of a one-part
297 DV on VALUEs, i.e., the VALUEs expanded so as to form the current
298 location of DV. Each entry is also part of VALUE' s linked-list of
299 backlinks back to DV. */
300 typedef struct loc_exp_dep_s
302 /* The dependent DV. */
303 decl_or_value dv;
304 /* The dependency VALUE or DECL_DEBUG. */
305 rtx value;
306 /* The next entry in VALUE's backlinks list. */
307 struct loc_exp_dep_s *next;
308 /* A pointer to the pointer to this entry (head or prev's next) in
309 the doubly-linked list. */
310 struct loc_exp_dep_s **pprev;
311 } loc_exp_dep;
314 /* This data structure holds information about the depth of a variable
315 expansion. */
316 typedef struct expand_depth_struct
318 /* This measures the complexity of the expanded expression. It
319 grows by one for each level of expansion that adds more than one
320 operand. */
321 int complexity;
322 /* This counts the number of ENTRY_VALUE expressions in an
323 expansion. We want to minimize their use. */
324 int entryvals;
325 } expand_depth;
327 /* This data structure is allocated for one-part variables at the time
328 of emitting notes. */
329 struct onepart_aux
331 /* Doubly-linked list of dependent DVs. These are DVs whose cur_loc
332 computation used the expansion of this variable, and that ought
333 to be notified should this variable change. If the DV's cur_loc
334 expanded to NULL, all components of the loc list are regarded as
335 active, so that any changes in them give us a chance to get a
336 location. Otherwise, only components of the loc that expanded to
337 non-NULL are regarded as active dependencies. */
338 loc_exp_dep *backlinks;
339 /* This holds the LOC that was expanded into cur_loc. We need only
340 mark a one-part variable as changed if the FROM loc is removed,
341 or if it has no known location and a loc is added, or if it gets
342 a change notification from any of its active dependencies. */
343 rtx from;
344 /* The depth of the cur_loc expression. */
345 expand_depth depth;
346 /* Dependencies actively used when expand FROM into cur_loc. */
347 vec<loc_exp_dep, va_heap, vl_embed> deps;
350 /* Structure describing one part of variable. */
351 typedef struct variable_part_def
353 /* Chain of locations of the part. */
354 location_chain loc_chain;
356 /* Location which was last emitted to location list. */
357 rtx cur_loc;
359 union variable_aux
361 /* The offset in the variable, if !var->onepart. */
362 HOST_WIDE_INT offset;
364 /* Pointer to auxiliary data, if var->onepart and emit_notes. */
365 struct onepart_aux *onepaux;
366 } aux;
367 } variable_part;
369 /* Maximum number of location parts. */
370 #define MAX_VAR_PARTS 16
372 /* Enumeration type used to discriminate various types of one-part
373 variables. */
374 typedef enum onepart_enum
376 /* Not a one-part variable. */
377 NOT_ONEPART = 0,
378 /* A one-part DECL that is not a DEBUG_EXPR_DECL. */
379 ONEPART_VDECL = 1,
380 /* A DEBUG_EXPR_DECL. */
381 ONEPART_DEXPR = 2,
382 /* A VALUE. */
383 ONEPART_VALUE = 3
384 } onepart_enum_t;
386 /* Structure describing where the variable is located. */
387 typedef struct variable_def
389 /* The declaration of the variable, or an RTL value being handled
390 like a declaration. */
391 decl_or_value dv;
393 /* Reference count. */
394 int refcount;
396 /* Number of variable parts. */
397 char n_var_parts;
399 /* What type of DV this is, according to enum onepart_enum. */
400 ENUM_BITFIELD (onepart_enum) onepart : CHAR_BIT;
402 /* True if this variable_def struct is currently in the
403 changed_variables hash table. */
404 bool in_changed_variables;
406 /* The variable parts. */
407 variable_part var_part[1];
408 } *variable;
409 typedef const struct variable_def *const_variable;
411 /* Pointer to the BB's information specific to variable tracking pass. */
412 #define VTI(BB) ((variable_tracking_info) (BB)->aux)
414 /* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */
415 #define INT_MEM_OFFSET(mem) (MEM_OFFSET_KNOWN_P (mem) ? MEM_OFFSET (mem) : 0)
417 #if ENABLE_CHECKING && (GCC_VERSION >= 2007)
419 /* Access VAR's Ith part's offset, checking that it's not a one-part
420 variable. */
421 #define VAR_PART_OFFSET(var, i) __extension__ \
422 (*({ variable const __v = (var); \
423 gcc_checking_assert (!__v->onepart); \
424 &__v->var_part[(i)].aux.offset; }))
426 /* Access VAR's one-part auxiliary data, checking that it is a
427 one-part variable. */
428 #define VAR_LOC_1PAUX(var) __extension__ \
429 (*({ variable const __v = (var); \
430 gcc_checking_assert (__v->onepart); \
431 &__v->var_part[0].aux.onepaux; }))
433 #else
434 #define VAR_PART_OFFSET(var, i) ((var)->var_part[(i)].aux.offset)
435 #define VAR_LOC_1PAUX(var) ((var)->var_part[0].aux.onepaux)
436 #endif
438 /* These are accessor macros for the one-part auxiliary data. When
439 convenient for users, they're guarded by tests that the data was
440 allocated. */
441 #define VAR_LOC_DEP_LST(var) (VAR_LOC_1PAUX (var) \
442 ? VAR_LOC_1PAUX (var)->backlinks \
443 : NULL)
444 #define VAR_LOC_DEP_LSTP(var) (VAR_LOC_1PAUX (var) \
445 ? &VAR_LOC_1PAUX (var)->backlinks \
446 : NULL)
447 #define VAR_LOC_FROM(var) (VAR_LOC_1PAUX (var)->from)
448 #define VAR_LOC_DEPTH(var) (VAR_LOC_1PAUX (var)->depth)
449 #define VAR_LOC_DEP_VEC(var) (VAR_LOC_1PAUX (var) \
450 ? &VAR_LOC_1PAUX (var)->deps \
451 : NULL)
455 typedef unsigned int dvuid;
457 /* Return the uid of DV. */
459 static inline dvuid
460 dv_uid (decl_or_value dv)
462 if (dv_is_value_p (dv))
463 return CSELIB_VAL_PTR (dv_as_value (dv))->uid;
464 else
465 return DECL_UID (dv_as_decl (dv));
468 /* Compute the hash from the uid. */
470 static inline hashval_t
471 dv_uid2hash (dvuid uid)
473 return uid;
476 /* The hash function for a mask table in a shared_htab chain. */
478 static inline hashval_t
479 dv_htab_hash (decl_or_value dv)
481 return dv_uid2hash (dv_uid (dv));
484 static void variable_htab_free (void *);
486 /* Variable hashtable helpers. */
488 struct variable_hasher
490 typedef variable_def value_type;
491 typedef void compare_type;
492 static inline hashval_t hash (const value_type *);
493 static inline bool equal (const value_type *, const compare_type *);
494 static inline void remove (value_type *);
497 /* The hash function for variable_htab, computes the hash value
498 from the declaration of variable X. */
500 inline hashval_t
501 variable_hasher::hash (const value_type *v)
503 return dv_htab_hash (v->dv);
506 /* Compare the declaration of variable X with declaration Y. */
508 inline bool
509 variable_hasher::equal (const value_type *v, const compare_type *y)
511 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
513 return (dv_as_opaque (v->dv) == dv_as_opaque (dv));
516 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
518 inline void
519 variable_hasher::remove (value_type *var)
521 variable_htab_free (var);
524 typedef hash_table<variable_hasher> variable_table_type;
525 typedef variable_table_type::iterator variable_iterator_type;
527 /* Structure for passing some other parameters to function
528 emit_note_insn_var_location. */
529 typedef struct emit_note_data_def
531 /* The instruction which the note will be emitted before/after. */
532 rtx_insn *insn;
534 /* Where the note will be emitted (before/after insn)? */
535 enum emit_note_where where;
537 /* The variables and values active at this point. */
538 variable_table_type *vars;
539 } emit_note_data;
541 /* Structure holding a refcounted hash table. If refcount > 1,
542 it must be first unshared before modified. */
543 typedef struct shared_hash_def
545 /* Reference count. */
546 int refcount;
548 /* Actual hash table. */
549 variable_table_type *htab;
550 } *shared_hash;
552 /* Structure holding the IN or OUT set for a basic block. */
553 typedef struct dataflow_set_def
555 /* Adjustment of stack offset. */
556 HOST_WIDE_INT stack_adjust;
558 /* Attributes for registers (lists of attrs). */
559 attrs regs[FIRST_PSEUDO_REGISTER];
561 /* Variable locations. */
562 shared_hash vars;
564 /* Vars that is being traversed. */
565 shared_hash traversed_vars;
566 } dataflow_set;
568 /* The structure (one for each basic block) containing the information
569 needed for variable tracking. */
570 typedef struct variable_tracking_info_def
572 /* The vector of micro operations. */
573 vec<micro_operation> mos;
575 /* The IN and OUT set for dataflow analysis. */
576 dataflow_set in;
577 dataflow_set out;
579 /* The permanent-in dataflow set for this block. This is used to
580 hold values for which we had to compute entry values. ??? This
581 should probably be dynamically allocated, to avoid using more
582 memory in non-debug builds. */
583 dataflow_set *permp;
585 /* Has the block been visited in DFS? */
586 bool visited;
588 /* Has the block been flooded in VTA? */
589 bool flooded;
591 } *variable_tracking_info;
593 /* Alloc pool for struct attrs_def. */
594 static alloc_pool attrs_pool;
596 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
597 static alloc_pool var_pool;
599 /* Alloc pool for struct variable_def with a single var_part entry. */
600 static alloc_pool valvar_pool;
602 /* Alloc pool for struct location_chain_def. */
603 static alloc_pool loc_chain_pool;
605 /* Alloc pool for struct shared_hash_def. */
606 static alloc_pool shared_hash_pool;
608 /* Alloc pool for struct loc_exp_dep_s for NOT_ONEPART variables. */
609 static alloc_pool loc_exp_dep_pool;
611 /* Changed variables, notes will be emitted for them. */
612 static variable_table_type *changed_variables;
614 /* Shall notes be emitted? */
615 static bool emit_notes;
617 /* Values whose dynamic location lists have gone empty, but whose
618 cselib location lists are still usable. Use this to hold the
619 current location, the backlinks, etc, during emit_notes. */
620 static variable_table_type *dropped_values;
622 /* Empty shared hashtable. */
623 static shared_hash empty_shared_hash;
625 /* Scratch register bitmap used by cselib_expand_value_rtx. */
626 static bitmap scratch_regs = NULL;
628 #ifdef HAVE_window_save
629 typedef struct GTY(()) parm_reg {
630 rtx outgoing;
631 rtx incoming;
632 } parm_reg_t;
635 /* Vector of windowed parameter registers, if any. */
636 static vec<parm_reg_t, va_gc> *windowed_parm_regs = NULL;
637 #endif
639 /* Variable used to tell whether cselib_process_insn called our hook. */
640 static bool cselib_hook_called;
642 /* Local function prototypes. */
643 static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
644 HOST_WIDE_INT *);
645 static void insn_stack_adjust_offset_pre_post (rtx_insn *, HOST_WIDE_INT *,
646 HOST_WIDE_INT *);
647 static bool vt_stack_adjustments (void);
649 static void init_attrs_list_set (attrs *);
650 static void attrs_list_clear (attrs *);
651 static attrs attrs_list_member (attrs, decl_or_value, HOST_WIDE_INT);
652 static void attrs_list_insert (attrs *, decl_or_value, HOST_WIDE_INT, rtx);
653 static void attrs_list_copy (attrs *, attrs);
654 static void attrs_list_union (attrs *, attrs);
656 static variable_def **unshare_variable (dataflow_set *set, variable_def **slot,
657 variable var, enum var_init_status);
658 static void vars_copy (variable_table_type *, variable_table_type *);
659 static tree var_debug_decl (tree);
660 static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
661 static void var_reg_delete_and_set (dataflow_set *, rtx, bool,
662 enum var_init_status, rtx);
663 static void var_reg_delete (dataflow_set *, rtx, bool);
664 static void var_regno_delete (dataflow_set *, int);
665 static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx);
666 static void var_mem_delete_and_set (dataflow_set *, rtx, bool,
667 enum var_init_status, rtx);
668 static void var_mem_delete (dataflow_set *, rtx, bool);
670 static void dataflow_set_init (dataflow_set *);
671 static void dataflow_set_clear (dataflow_set *);
672 static void dataflow_set_copy (dataflow_set *, dataflow_set *);
673 static int variable_union_info_cmp_pos (const void *, const void *);
674 static void dataflow_set_union (dataflow_set *, dataflow_set *);
675 static location_chain find_loc_in_1pdv (rtx, variable, variable_table_type *);
676 static bool canon_value_cmp (rtx, rtx);
677 static int loc_cmp (rtx, rtx);
678 static bool variable_part_different_p (variable_part *, variable_part *);
679 static bool onepart_variable_different_p (variable, variable);
680 static bool variable_different_p (variable, variable);
681 static bool dataflow_set_different (dataflow_set *, dataflow_set *);
682 static void dataflow_set_destroy (dataflow_set *);
684 static bool contains_symbol_ref (rtx);
685 static bool track_expr_p (tree, bool);
686 static bool same_variable_part_p (rtx, tree, HOST_WIDE_INT);
687 static void add_uses_1 (rtx *, void *);
688 static void add_stores (rtx, const_rtx, void *);
689 static bool compute_bb_dataflow (basic_block);
690 static bool vt_find_locations (void);
692 static void dump_attrs_list (attrs);
693 static void dump_var (variable);
694 static void dump_vars (variable_table_type *);
695 static void dump_dataflow_set (dataflow_set *);
696 static void dump_dataflow_sets (void);
698 static void set_dv_changed (decl_or_value, bool);
699 static void variable_was_changed (variable, dataflow_set *);
700 static variable_def **set_slot_part (dataflow_set *, rtx, variable_def **,
701 decl_or_value, HOST_WIDE_INT,
702 enum var_init_status, rtx);
703 static void set_variable_part (dataflow_set *, rtx,
704 decl_or_value, HOST_WIDE_INT,
705 enum var_init_status, rtx, enum insert_option);
706 static variable_def **clobber_slot_part (dataflow_set *, rtx,
707 variable_def **, HOST_WIDE_INT, rtx);
708 static void clobber_variable_part (dataflow_set *, rtx,
709 decl_or_value, HOST_WIDE_INT, rtx);
710 static variable_def **delete_slot_part (dataflow_set *, rtx, variable_def **,
711 HOST_WIDE_INT);
712 static void delete_variable_part (dataflow_set *, rtx,
713 decl_or_value, HOST_WIDE_INT);
714 static void emit_notes_in_bb (basic_block, dataflow_set *);
715 static void vt_emit_notes (void);
717 static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *);
718 static void vt_add_function_parameters (void);
719 static bool vt_initialize (void);
720 static void vt_finalize (void);
722 /* Callback for stack_adjust_offset_pre_post, called via for_each_inc_dec. */
724 static int
725 stack_adjust_offset_pre_post_cb (rtx, rtx op, rtx dest, rtx src, rtx srcoff,
726 void *arg)
728 if (dest != stack_pointer_rtx)
729 return 0;
731 switch (GET_CODE (op))
733 case PRE_INC:
734 case PRE_DEC:
735 ((HOST_WIDE_INT *)arg)[0] -= INTVAL (srcoff);
736 return 0;
737 case POST_INC:
738 case POST_DEC:
739 ((HOST_WIDE_INT *)arg)[1] -= INTVAL (srcoff);
740 return 0;
741 case PRE_MODIFY:
742 case POST_MODIFY:
743 /* We handle only adjustments by constant amount. */
744 gcc_assert (GET_CODE (src) == PLUS
745 && CONST_INT_P (XEXP (src, 1))
746 && XEXP (src, 0) == stack_pointer_rtx);
747 ((HOST_WIDE_INT *)arg)[GET_CODE (op) == POST_MODIFY]
748 -= INTVAL (XEXP (src, 1));
749 return 0;
750 default:
751 gcc_unreachable ();
755 /* Given a SET, calculate the amount of stack adjustment it contains
756 PRE- and POST-modifying stack pointer.
757 This function is similar to stack_adjust_offset. */
759 static void
760 stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
761 HOST_WIDE_INT *post)
763 rtx src = SET_SRC (pattern);
764 rtx dest = SET_DEST (pattern);
765 enum rtx_code code;
767 if (dest == stack_pointer_rtx)
769 /* (set (reg sp) (plus (reg sp) (const_int))) */
770 code = GET_CODE (src);
771 if (! (code == PLUS || code == MINUS)
772 || XEXP (src, 0) != stack_pointer_rtx
773 || !CONST_INT_P (XEXP (src, 1)))
774 return;
776 if (code == MINUS)
777 *post += INTVAL (XEXP (src, 1));
778 else
779 *post -= INTVAL (XEXP (src, 1));
780 return;
782 HOST_WIDE_INT res[2] = { 0, 0 };
783 for_each_inc_dec (pattern, stack_adjust_offset_pre_post_cb, res);
784 *pre += res[0];
785 *post += res[1];
788 /* Given an INSN, calculate the amount of stack adjustment it contains
789 PRE- and POST-modifying stack pointer. */
791 static void
792 insn_stack_adjust_offset_pre_post (rtx_insn *insn, HOST_WIDE_INT *pre,
793 HOST_WIDE_INT *post)
795 rtx pattern;
797 *pre = 0;
798 *post = 0;
800 pattern = PATTERN (insn);
801 if (RTX_FRAME_RELATED_P (insn))
803 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
804 if (expr)
805 pattern = XEXP (expr, 0);
808 if (GET_CODE (pattern) == SET)
809 stack_adjust_offset_pre_post (pattern, pre, post);
810 else if (GET_CODE (pattern) == PARALLEL
811 || GET_CODE (pattern) == SEQUENCE)
813 int i;
815 /* There may be stack adjustments inside compound insns. Search
816 for them. */
817 for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
818 if (GET_CODE (XVECEXP (pattern, 0, i)) == SET)
819 stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post);
823 /* Compute stack adjustments for all blocks by traversing DFS tree.
824 Return true when the adjustments on all incoming edges are consistent.
825 Heavily borrowed from pre_and_rev_post_order_compute. */
827 static bool
828 vt_stack_adjustments (void)
830 edge_iterator *stack;
831 int sp;
833 /* Initialize entry block. */
834 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->visited = true;
835 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->in.stack_adjust
836 = INCOMING_FRAME_SP_OFFSET;
837 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out.stack_adjust
838 = INCOMING_FRAME_SP_OFFSET;
840 /* Allocate stack for back-tracking up CFG. */
841 stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun) + 1);
842 sp = 0;
844 /* Push the first edge on to the stack. */
845 stack[sp++] = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
847 while (sp)
849 edge_iterator ei;
850 basic_block src;
851 basic_block dest;
853 /* Look at the edge on the top of the stack. */
854 ei = stack[sp - 1];
855 src = ei_edge (ei)->src;
856 dest = ei_edge (ei)->dest;
858 /* Check if the edge destination has been visited yet. */
859 if (!VTI (dest)->visited)
861 rtx_insn *insn;
862 HOST_WIDE_INT pre, post, offset;
863 VTI (dest)->visited = true;
864 VTI (dest)->in.stack_adjust = offset = VTI (src)->out.stack_adjust;
866 if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
867 for (insn = BB_HEAD (dest);
868 insn != NEXT_INSN (BB_END (dest));
869 insn = NEXT_INSN (insn))
870 if (INSN_P (insn))
872 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
873 offset += pre + post;
876 VTI (dest)->out.stack_adjust = offset;
878 if (EDGE_COUNT (dest->succs) > 0)
879 /* Since the DEST node has been visited for the first
880 time, check its successors. */
881 stack[sp++] = ei_start (dest->succs);
883 else
885 /* We can end up with different stack adjustments for the exit block
886 of a shrink-wrapped function if stack_adjust_offset_pre_post
887 doesn't understand the rtx pattern used to restore the stack
888 pointer in the epilogue. For example, on s390(x), the stack
889 pointer is often restored via a load-multiple instruction
890 and so no stack_adjust offset is recorded for it. This means
891 that the stack offset at the end of the epilogue block is the
892 the same as the offset before the epilogue, whereas other paths
893 to the exit block will have the correct stack_adjust.
895 It is safe to ignore these differences because (a) we never
896 use the stack_adjust for the exit block in this pass and
897 (b) dwarf2cfi checks whether the CFA notes in a shrink-wrapped
898 function are correct.
900 We must check whether the adjustments on other edges are
901 the same though. */
902 if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
903 && VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust)
905 free (stack);
906 return false;
909 if (! ei_one_before_end_p (ei))
910 /* Go to the next edge. */
911 ei_next (&stack[sp - 1]);
912 else
913 /* Return to previous level if there are no more edges. */
914 sp--;
918 free (stack);
919 return true;
922 /* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or
923 hard_frame_pointer_rtx is being mapped to it and offset for it. */
924 static rtx cfa_base_rtx;
925 static HOST_WIDE_INT cfa_base_offset;
927 /* Compute a CFA-based value for an ADJUSTMENT made to stack_pointer_rtx
928 or hard_frame_pointer_rtx. */
930 static inline rtx
931 compute_cfa_pointer (HOST_WIDE_INT adjustment)
933 return plus_constant (Pmode, cfa_base_rtx, adjustment + cfa_base_offset);
936 /* Adjustment for hard_frame_pointer_rtx to cfa base reg,
937 or -1 if the replacement shouldn't be done. */
938 static HOST_WIDE_INT hard_frame_pointer_adjustment = -1;
940 /* Data for adjust_mems callback. */
942 struct adjust_mem_data
944 bool store;
945 machine_mode mem_mode;
946 HOST_WIDE_INT stack_adjust;
947 rtx_expr_list *side_effects;
950 /* Helper for adjust_mems. Return true if X is suitable for
951 transformation of wider mode arithmetics to narrower mode. */
953 static bool
954 use_narrower_mode_test (rtx x, const_rtx subreg)
956 subrtx_var_iterator::array_type array;
957 FOR_EACH_SUBRTX_VAR (iter, array, x, NONCONST)
959 rtx x = *iter;
960 if (CONSTANT_P (x))
961 iter.skip_subrtxes ();
962 else
963 switch (GET_CODE (x))
965 case REG:
966 if (cselib_lookup (x, GET_MODE (SUBREG_REG (subreg)), 0, VOIDmode))
967 return false;
968 if (!validate_subreg (GET_MODE (subreg), GET_MODE (x), x,
969 subreg_lowpart_offset (GET_MODE (subreg),
970 GET_MODE (x))))
971 return false;
972 break;
973 case PLUS:
974 case MINUS:
975 case MULT:
976 break;
977 case ASHIFT:
978 iter.substitute (XEXP (x, 0));
979 break;
980 default:
981 return false;
984 return true;
987 /* Transform X into narrower mode MODE from wider mode WMODE. */
989 static rtx
990 use_narrower_mode (rtx x, machine_mode mode, machine_mode wmode)
992 rtx op0, op1;
993 if (CONSTANT_P (x))
994 return lowpart_subreg (mode, x, wmode);
995 switch (GET_CODE (x))
997 case REG:
998 return lowpart_subreg (mode, x, wmode);
999 case PLUS:
1000 case MINUS:
1001 case MULT:
1002 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
1003 op1 = use_narrower_mode (XEXP (x, 1), mode, wmode);
1004 return simplify_gen_binary (GET_CODE (x), mode, op0, op1);
1005 case ASHIFT:
1006 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
1007 return simplify_gen_binary (ASHIFT, mode, op0, XEXP (x, 1));
1008 default:
1009 gcc_unreachable ();
1013 /* Helper function for adjusting used MEMs. */
1015 static rtx
1016 adjust_mems (rtx loc, const_rtx old_rtx, void *data)
1018 struct adjust_mem_data *amd = (struct adjust_mem_data *) data;
1019 rtx mem, addr = loc, tem;
1020 machine_mode mem_mode_save;
1021 bool store_save;
1022 switch (GET_CODE (loc))
1024 case REG:
1025 /* Don't do any sp or fp replacements outside of MEM addresses
1026 on the LHS. */
1027 if (amd->mem_mode == VOIDmode && amd->store)
1028 return loc;
1029 if (loc == stack_pointer_rtx
1030 && !frame_pointer_needed
1031 && cfa_base_rtx)
1032 return compute_cfa_pointer (amd->stack_adjust);
1033 else if (loc == hard_frame_pointer_rtx
1034 && frame_pointer_needed
1035 && hard_frame_pointer_adjustment != -1
1036 && cfa_base_rtx)
1037 return compute_cfa_pointer (hard_frame_pointer_adjustment);
1038 gcc_checking_assert (loc != virtual_incoming_args_rtx);
1039 return loc;
1040 case MEM:
1041 mem = loc;
1042 if (!amd->store)
1044 mem = targetm.delegitimize_address (mem);
1045 if (mem != loc && !MEM_P (mem))
1046 return simplify_replace_fn_rtx (mem, old_rtx, adjust_mems, data);
1049 addr = XEXP (mem, 0);
1050 mem_mode_save = amd->mem_mode;
1051 amd->mem_mode = GET_MODE (mem);
1052 store_save = amd->store;
1053 amd->store = false;
1054 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1055 amd->store = store_save;
1056 amd->mem_mode = mem_mode_save;
1057 if (mem == loc)
1058 addr = targetm.delegitimize_address (addr);
1059 if (addr != XEXP (mem, 0))
1060 mem = replace_equiv_address_nv (mem, addr);
1061 if (!amd->store)
1062 mem = avoid_constant_pool_reference (mem);
1063 return mem;
1064 case PRE_INC:
1065 case PRE_DEC:
1066 addr = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
1067 gen_int_mode (GET_CODE (loc) == PRE_INC
1068 ? GET_MODE_SIZE (amd->mem_mode)
1069 : -GET_MODE_SIZE (amd->mem_mode),
1070 GET_MODE (loc)));
1071 case POST_INC:
1072 case POST_DEC:
1073 if (addr == loc)
1074 addr = XEXP (loc, 0);
1075 gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode);
1076 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1077 tem = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
1078 gen_int_mode ((GET_CODE (loc) == PRE_INC
1079 || GET_CODE (loc) == POST_INC)
1080 ? GET_MODE_SIZE (amd->mem_mode)
1081 : -GET_MODE_SIZE (amd->mem_mode),
1082 GET_MODE (loc)));
1083 store_save = amd->store;
1084 amd->store = false;
1085 tem = simplify_replace_fn_rtx (tem, old_rtx, adjust_mems, data);
1086 amd->store = store_save;
1087 amd->side_effects = alloc_EXPR_LIST (0,
1088 gen_rtx_SET (VOIDmode,
1089 XEXP (loc, 0), tem),
1090 amd->side_effects);
1091 return addr;
1092 case PRE_MODIFY:
1093 addr = XEXP (loc, 1);
1094 case POST_MODIFY:
1095 if (addr == loc)
1096 addr = XEXP (loc, 0);
1097 gcc_assert (amd->mem_mode != VOIDmode);
1098 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1099 store_save = amd->store;
1100 amd->store = false;
1101 tem = simplify_replace_fn_rtx (XEXP (loc, 1), old_rtx,
1102 adjust_mems, data);
1103 amd->store = store_save;
1104 amd->side_effects = alloc_EXPR_LIST (0,
1105 gen_rtx_SET (VOIDmode,
1106 XEXP (loc, 0), tem),
1107 amd->side_effects);
1108 return addr;
1109 case SUBREG:
1110 /* First try without delegitimization of whole MEMs and
1111 avoid_constant_pool_reference, which is more likely to succeed. */
1112 store_save = amd->store;
1113 amd->store = true;
1114 addr = simplify_replace_fn_rtx (SUBREG_REG (loc), old_rtx, adjust_mems,
1115 data);
1116 amd->store = store_save;
1117 mem = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1118 if (mem == SUBREG_REG (loc))
1120 tem = loc;
1121 goto finish_subreg;
1123 tem = simplify_gen_subreg (GET_MODE (loc), mem,
1124 GET_MODE (SUBREG_REG (loc)),
1125 SUBREG_BYTE (loc));
1126 if (tem)
1127 goto finish_subreg;
1128 tem = simplify_gen_subreg (GET_MODE (loc), addr,
1129 GET_MODE (SUBREG_REG (loc)),
1130 SUBREG_BYTE (loc));
1131 if (tem == NULL_RTX)
1132 tem = gen_rtx_raw_SUBREG (GET_MODE (loc), addr, SUBREG_BYTE (loc));
1133 finish_subreg:
1134 if (MAY_HAVE_DEBUG_INSNS
1135 && GET_CODE (tem) == SUBREG
1136 && (GET_CODE (SUBREG_REG (tem)) == PLUS
1137 || GET_CODE (SUBREG_REG (tem)) == MINUS
1138 || GET_CODE (SUBREG_REG (tem)) == MULT
1139 || GET_CODE (SUBREG_REG (tem)) == ASHIFT)
1140 && (GET_MODE_CLASS (GET_MODE (tem)) == MODE_INT
1141 || GET_MODE_CLASS (GET_MODE (tem)) == MODE_PARTIAL_INT)
1142 && (GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem))) == MODE_INT
1143 || GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem))) == MODE_PARTIAL_INT)
1144 && GET_MODE_PRECISION (GET_MODE (tem))
1145 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (tem)))
1146 && subreg_lowpart_p (tem)
1147 && use_narrower_mode_test (SUBREG_REG (tem), tem))
1148 return use_narrower_mode (SUBREG_REG (tem), GET_MODE (tem),
1149 GET_MODE (SUBREG_REG (tem)));
1150 return tem;
1151 case ASM_OPERANDS:
1152 /* Don't do any replacements in second and following
1153 ASM_OPERANDS of inline-asm with multiple sets.
1154 ASM_OPERANDS_INPUT_VEC, ASM_OPERANDS_INPUT_CONSTRAINT_VEC
1155 and ASM_OPERANDS_LABEL_VEC need to be equal between
1156 all the ASM_OPERANDs in the insn and adjust_insn will
1157 fix this up. */
1158 if (ASM_OPERANDS_OUTPUT_IDX (loc) != 0)
1159 return loc;
1160 break;
1161 default:
1162 break;
1164 return NULL_RTX;
1167 /* Helper function for replacement of uses. */
1169 static void
1170 adjust_mem_uses (rtx *x, void *data)
1172 rtx new_x = simplify_replace_fn_rtx (*x, NULL_RTX, adjust_mems, data);
1173 if (new_x != *x)
1174 validate_change (NULL_RTX, x, new_x, true);
1177 /* Helper function for replacement of stores. */
1179 static void
1180 adjust_mem_stores (rtx loc, const_rtx expr, void *data)
1182 if (MEM_P (loc))
1184 rtx new_dest = simplify_replace_fn_rtx (SET_DEST (expr), NULL_RTX,
1185 adjust_mems, data);
1186 if (new_dest != SET_DEST (expr))
1188 rtx xexpr = CONST_CAST_RTX (expr);
1189 validate_change (NULL_RTX, &SET_DEST (xexpr), new_dest, true);
1194 /* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
1195 replace them with their value in the insn and add the side-effects
1196 as other sets to the insn. */
1198 static void
1199 adjust_insn (basic_block bb, rtx_insn *insn)
1201 struct adjust_mem_data amd;
1202 rtx set;
1204 #ifdef HAVE_window_save
1205 /* If the target machine has an explicit window save instruction, the
1206 transformation OUTGOING_REGNO -> INCOMING_REGNO is done there. */
1207 if (RTX_FRAME_RELATED_P (insn)
1208 && find_reg_note (insn, REG_CFA_WINDOW_SAVE, NULL_RTX))
1210 unsigned int i, nregs = vec_safe_length (windowed_parm_regs);
1211 rtx rtl = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nregs * 2));
1212 parm_reg_t *p;
1214 FOR_EACH_VEC_SAFE_ELT (windowed_parm_regs, i, p)
1216 XVECEXP (rtl, 0, i * 2)
1217 = gen_rtx_SET (VOIDmode, p->incoming, p->outgoing);
1218 /* Do not clobber the attached DECL, but only the REG. */
1219 XVECEXP (rtl, 0, i * 2 + 1)
1220 = gen_rtx_CLOBBER (GET_MODE (p->outgoing),
1221 gen_raw_REG (GET_MODE (p->outgoing),
1222 REGNO (p->outgoing)));
1225 validate_change (NULL_RTX, &PATTERN (insn), rtl, true);
1226 return;
1228 #endif
1230 amd.mem_mode = VOIDmode;
1231 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
1232 amd.side_effects = NULL;
1234 amd.store = true;
1235 note_stores (PATTERN (insn), adjust_mem_stores, &amd);
1237 amd.store = false;
1238 if (GET_CODE (PATTERN (insn)) == PARALLEL
1239 && asm_noperands (PATTERN (insn)) > 0
1240 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1242 rtx body, set0;
1243 int i;
1245 /* inline-asm with multiple sets is tiny bit more complicated,
1246 because the 3 vectors in ASM_OPERANDS need to be shared between
1247 all ASM_OPERANDS in the instruction. adjust_mems will
1248 not touch ASM_OPERANDS other than the first one, asm_noperands
1249 test above needs to be called before that (otherwise it would fail)
1250 and afterwards this code fixes it up. */
1251 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1252 body = PATTERN (insn);
1253 set0 = XVECEXP (body, 0, 0);
1254 gcc_checking_assert (GET_CODE (set0) == SET
1255 && GET_CODE (SET_SRC (set0)) == ASM_OPERANDS
1256 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set0)) == 0);
1257 for (i = 1; i < XVECLEN (body, 0); i++)
1258 if (GET_CODE (XVECEXP (body, 0, i)) != SET)
1259 break;
1260 else
1262 set = XVECEXP (body, 0, i);
1263 gcc_checking_assert (GET_CODE (SET_SRC (set)) == ASM_OPERANDS
1264 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set))
1265 == i);
1266 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (set))
1267 != ASM_OPERANDS_INPUT_VEC (SET_SRC (set0))
1268 || ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set))
1269 != ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0))
1270 || ASM_OPERANDS_LABEL_VEC (SET_SRC (set))
1271 != ASM_OPERANDS_LABEL_VEC (SET_SRC (set0)))
1273 rtx newsrc = shallow_copy_rtx (SET_SRC (set));
1274 ASM_OPERANDS_INPUT_VEC (newsrc)
1275 = ASM_OPERANDS_INPUT_VEC (SET_SRC (set0));
1276 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (newsrc)
1277 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0));
1278 ASM_OPERANDS_LABEL_VEC (newsrc)
1279 = ASM_OPERANDS_LABEL_VEC (SET_SRC (set0));
1280 validate_change (NULL_RTX, &SET_SRC (set), newsrc, true);
1284 else
1285 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1287 /* For read-only MEMs containing some constant, prefer those
1288 constants. */
1289 set = single_set (insn);
1290 if (set && MEM_P (SET_SRC (set)) && MEM_READONLY_P (SET_SRC (set)))
1292 rtx note = find_reg_equal_equiv_note (insn);
1294 if (note && CONSTANT_P (XEXP (note, 0)))
1295 validate_change (NULL_RTX, &SET_SRC (set), XEXP (note, 0), true);
1298 if (amd.side_effects)
1300 rtx *pat, new_pat, s;
1301 int i, oldn, newn;
1303 pat = &PATTERN (insn);
1304 if (GET_CODE (*pat) == COND_EXEC)
1305 pat = &COND_EXEC_CODE (*pat);
1306 if (GET_CODE (*pat) == PARALLEL)
1307 oldn = XVECLEN (*pat, 0);
1308 else
1309 oldn = 1;
1310 for (s = amd.side_effects, newn = 0; s; newn++)
1311 s = XEXP (s, 1);
1312 new_pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (oldn + newn));
1313 if (GET_CODE (*pat) == PARALLEL)
1314 for (i = 0; i < oldn; i++)
1315 XVECEXP (new_pat, 0, i) = XVECEXP (*pat, 0, i);
1316 else
1317 XVECEXP (new_pat, 0, 0) = *pat;
1318 for (s = amd.side_effects, i = oldn; i < oldn + newn; i++, s = XEXP (s, 1))
1319 XVECEXP (new_pat, 0, i) = XEXP (s, 0);
1320 free_EXPR_LIST_list (&amd.side_effects);
1321 validate_change (NULL_RTX, pat, new_pat, true);
1325 /* Return the DEBUG_EXPR of a DEBUG_EXPR_DECL or the VALUE in DV. */
1326 static inline rtx
1327 dv_as_rtx (decl_or_value dv)
1329 tree decl;
1331 if (dv_is_value_p (dv))
1332 return dv_as_value (dv);
1334 decl = dv_as_decl (dv);
1336 gcc_checking_assert (TREE_CODE (decl) == DEBUG_EXPR_DECL);
1337 return DECL_RTL_KNOWN_SET (decl);
1340 /* Return nonzero if a decl_or_value must not have more than one
1341 variable part. The returned value discriminates among various
1342 kinds of one-part DVs ccording to enum onepart_enum. */
1343 static inline onepart_enum_t
1344 dv_onepart_p (decl_or_value dv)
1346 tree decl;
1348 if (!MAY_HAVE_DEBUG_INSNS)
1349 return NOT_ONEPART;
1351 if (dv_is_value_p (dv))
1352 return ONEPART_VALUE;
1354 decl = dv_as_decl (dv);
1356 if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
1357 return ONEPART_DEXPR;
1359 if (target_for_debug_bind (decl) != NULL_TREE)
1360 return ONEPART_VDECL;
1362 return NOT_ONEPART;
1365 /* Return the variable pool to be used for a dv of type ONEPART. */
1366 static inline alloc_pool
1367 onepart_pool (onepart_enum_t onepart)
1369 return onepart ? valvar_pool : var_pool;
1372 /* Build a decl_or_value out of a decl. */
1373 static inline decl_or_value
1374 dv_from_decl (tree decl)
1376 decl_or_value dv;
1377 dv = decl;
1378 gcc_checking_assert (dv_is_decl_p (dv));
1379 return dv;
1382 /* Build a decl_or_value out of a value. */
1383 static inline decl_or_value
1384 dv_from_value (rtx value)
1386 decl_or_value dv;
1387 dv = value;
1388 gcc_checking_assert (dv_is_value_p (dv));
1389 return dv;
1392 /* Return a value or the decl of a debug_expr as a decl_or_value. */
1393 static inline decl_or_value
1394 dv_from_rtx (rtx x)
1396 decl_or_value dv;
1398 switch (GET_CODE (x))
1400 case DEBUG_EXPR:
1401 dv = dv_from_decl (DEBUG_EXPR_TREE_DECL (x));
1402 gcc_checking_assert (DECL_RTL_KNOWN_SET (DEBUG_EXPR_TREE_DECL (x)) == x);
1403 break;
1405 case VALUE:
1406 dv = dv_from_value (x);
1407 break;
1409 default:
1410 gcc_unreachable ();
1413 return dv;
1416 extern void debug_dv (decl_or_value dv);
1418 DEBUG_FUNCTION void
1419 debug_dv (decl_or_value dv)
1421 if (dv_is_value_p (dv))
1422 debug_rtx (dv_as_value (dv));
1423 else
1424 debug_generic_stmt (dv_as_decl (dv));
1427 static void loc_exp_dep_clear (variable var);
1429 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
1431 static void
1432 variable_htab_free (void *elem)
1434 int i;
1435 variable var = (variable) elem;
1436 location_chain node, next;
1438 gcc_checking_assert (var->refcount > 0);
1440 var->refcount--;
1441 if (var->refcount > 0)
1442 return;
1444 for (i = 0; i < var->n_var_parts; i++)
1446 for (node = var->var_part[i].loc_chain; node; node = next)
1448 next = node->next;
1449 pool_free (loc_chain_pool, node);
1451 var->var_part[i].loc_chain = NULL;
1453 if (var->onepart && VAR_LOC_1PAUX (var))
1455 loc_exp_dep_clear (var);
1456 if (VAR_LOC_DEP_LST (var))
1457 VAR_LOC_DEP_LST (var)->pprev = NULL;
1458 XDELETE (VAR_LOC_1PAUX (var));
1459 /* These may be reused across functions, so reset
1460 e.g. NO_LOC_P. */
1461 if (var->onepart == ONEPART_DEXPR)
1462 set_dv_changed (var->dv, true);
1464 pool_free (onepart_pool (var->onepart), var);
1467 /* Initialize the set (array) SET of attrs to empty lists. */
1469 static void
1470 init_attrs_list_set (attrs *set)
1472 int i;
1474 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1475 set[i] = NULL;
1478 /* Make the list *LISTP empty. */
1480 static void
1481 attrs_list_clear (attrs *listp)
1483 attrs list, next;
1485 for (list = *listp; list; list = next)
1487 next = list->next;
1488 pool_free (attrs_pool, list);
1490 *listp = NULL;
1493 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */
1495 static attrs
1496 attrs_list_member (attrs list, decl_or_value dv, HOST_WIDE_INT offset)
1498 for (; list; list = list->next)
1499 if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset)
1500 return list;
1501 return NULL;
1504 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
1506 static void
1507 attrs_list_insert (attrs *listp, decl_or_value dv,
1508 HOST_WIDE_INT offset, rtx loc)
1510 attrs list;
1512 list = (attrs) pool_alloc (attrs_pool);
1513 list->loc = loc;
1514 list->dv = dv;
1515 list->offset = offset;
1516 list->next = *listp;
1517 *listp = list;
1520 /* Copy all nodes from SRC and create a list *DSTP of the copies. */
1522 static void
1523 attrs_list_copy (attrs *dstp, attrs src)
1525 attrs n;
1527 attrs_list_clear (dstp);
1528 for (; src; src = src->next)
1530 n = (attrs) pool_alloc (attrs_pool);
1531 n->loc = src->loc;
1532 n->dv = src->dv;
1533 n->offset = src->offset;
1534 n->next = *dstp;
1535 *dstp = n;
1539 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */
1541 static void
1542 attrs_list_union (attrs *dstp, attrs src)
1544 for (; src; src = src->next)
1546 if (!attrs_list_member (*dstp, src->dv, src->offset))
1547 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1551 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1552 *DSTP. */
1554 static void
1555 attrs_list_mpdv_union (attrs *dstp, attrs src, attrs src2)
1557 gcc_assert (!*dstp);
1558 for (; src; src = src->next)
1560 if (!dv_onepart_p (src->dv))
1561 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1563 for (src = src2; src; src = src->next)
1565 if (!dv_onepart_p (src->dv)
1566 && !attrs_list_member (*dstp, src->dv, src->offset))
1567 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1571 /* Shared hashtable support. */
1573 /* Return true if VARS is shared. */
1575 static inline bool
1576 shared_hash_shared (shared_hash vars)
1578 return vars->refcount > 1;
1581 /* Return the hash table for VARS. */
1583 static inline variable_table_type *
1584 shared_hash_htab (shared_hash vars)
1586 return vars->htab;
1589 /* Return true if VAR is shared, or maybe because VARS is shared. */
1591 static inline bool
1592 shared_var_p (variable var, shared_hash vars)
1594 /* Don't count an entry in the changed_variables table as a duplicate. */
1595 return ((var->refcount > 1 + (int) var->in_changed_variables)
1596 || shared_hash_shared (vars));
1599 /* Copy variables into a new hash table. */
1601 static shared_hash
1602 shared_hash_unshare (shared_hash vars)
1604 shared_hash new_vars = (shared_hash) pool_alloc (shared_hash_pool);
1605 gcc_assert (vars->refcount > 1);
1606 new_vars->refcount = 1;
1607 new_vars->htab = new variable_table_type (vars->htab->elements () + 3);
1608 vars_copy (new_vars->htab, vars->htab);
1609 vars->refcount--;
1610 return new_vars;
1613 /* Increment reference counter on VARS and return it. */
1615 static inline shared_hash
1616 shared_hash_copy (shared_hash vars)
1618 vars->refcount++;
1619 return vars;
1622 /* Decrement reference counter and destroy hash table if not shared
1623 anymore. */
1625 static void
1626 shared_hash_destroy (shared_hash vars)
1628 gcc_checking_assert (vars->refcount > 0);
1629 if (--vars->refcount == 0)
1631 delete vars->htab;
1632 pool_free (shared_hash_pool, vars);
1636 /* Unshare *PVARS if shared and return slot for DV. If INS is
1637 INSERT, insert it if not already present. */
1639 static inline variable_def **
1640 shared_hash_find_slot_unshare_1 (shared_hash *pvars, decl_or_value dv,
1641 hashval_t dvhash, enum insert_option ins)
1643 if (shared_hash_shared (*pvars))
1644 *pvars = shared_hash_unshare (*pvars);
1645 return shared_hash_htab (*pvars)->find_slot_with_hash (dv, dvhash, ins);
1648 static inline variable_def **
1649 shared_hash_find_slot_unshare (shared_hash *pvars, decl_or_value dv,
1650 enum insert_option ins)
1652 return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins);
1655 /* Return slot for DV, if it is already present in the hash table.
1656 If it is not present, insert it only VARS is not shared, otherwise
1657 return NULL. */
1659 static inline variable_def **
1660 shared_hash_find_slot_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1662 return shared_hash_htab (vars)->find_slot_with_hash (dv, dvhash,
1663 shared_hash_shared (vars)
1664 ? NO_INSERT : INSERT);
1667 static inline variable_def **
1668 shared_hash_find_slot (shared_hash vars, decl_or_value dv)
1670 return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv));
1673 /* Return slot for DV only if it is already present in the hash table. */
1675 static inline variable_def **
1676 shared_hash_find_slot_noinsert_1 (shared_hash vars, decl_or_value dv,
1677 hashval_t dvhash)
1679 return shared_hash_htab (vars)->find_slot_with_hash (dv, dvhash, NO_INSERT);
1682 static inline variable_def **
1683 shared_hash_find_slot_noinsert (shared_hash vars, decl_or_value dv)
1685 return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv));
1688 /* Return variable for DV or NULL if not already present in the hash
1689 table. */
1691 static inline variable
1692 shared_hash_find_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1694 return shared_hash_htab (vars)->find_with_hash (dv, dvhash);
1697 static inline variable
1698 shared_hash_find (shared_hash vars, decl_or_value dv)
1700 return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
1703 /* Return true if TVAL is better than CVAL as a canonival value. We
1704 choose lowest-numbered VALUEs, using the RTX address as a
1705 tie-breaker. The idea is to arrange them into a star topology,
1706 such that all of them are at most one step away from the canonical
1707 value, and the canonical value has backlinks to all of them, in
1708 addition to all the actual locations. We don't enforce this
1709 topology throughout the entire dataflow analysis, though.
1712 static inline bool
1713 canon_value_cmp (rtx tval, rtx cval)
1715 return !cval
1716 || CSELIB_VAL_PTR (tval)->uid < CSELIB_VAL_PTR (cval)->uid;
1719 static bool dst_can_be_shared;
1721 /* Return a copy of a variable VAR and insert it to dataflow set SET. */
1723 static variable_def **
1724 unshare_variable (dataflow_set *set, variable_def **slot, variable var,
1725 enum var_init_status initialized)
1727 variable new_var;
1728 int i;
1730 new_var = (variable) pool_alloc (onepart_pool (var->onepart));
1731 new_var->dv = var->dv;
1732 new_var->refcount = 1;
1733 var->refcount--;
1734 new_var->n_var_parts = var->n_var_parts;
1735 new_var->onepart = var->onepart;
1736 new_var->in_changed_variables = false;
1738 if (! flag_var_tracking_uninit)
1739 initialized = VAR_INIT_STATUS_INITIALIZED;
1741 for (i = 0; i < var->n_var_parts; i++)
1743 location_chain node;
1744 location_chain *nextp;
1746 if (i == 0 && var->onepart)
1748 /* One-part auxiliary data is only used while emitting
1749 notes, so propagate it to the new variable in the active
1750 dataflow set. If we're not emitting notes, this will be
1751 a no-op. */
1752 gcc_checking_assert (!VAR_LOC_1PAUX (var) || emit_notes);
1753 VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (var);
1754 VAR_LOC_1PAUX (var) = NULL;
1756 else
1757 VAR_PART_OFFSET (new_var, i) = VAR_PART_OFFSET (var, i);
1758 nextp = &new_var->var_part[i].loc_chain;
1759 for (node = var->var_part[i].loc_chain; node; node = node->next)
1761 location_chain new_lc;
1763 new_lc = (location_chain) pool_alloc (loc_chain_pool);
1764 new_lc->next = NULL;
1765 if (node->init > initialized)
1766 new_lc->init = node->init;
1767 else
1768 new_lc->init = initialized;
1769 if (node->set_src && !(MEM_P (node->set_src)))
1770 new_lc->set_src = node->set_src;
1771 else
1772 new_lc->set_src = NULL;
1773 new_lc->loc = node->loc;
1775 *nextp = new_lc;
1776 nextp = &new_lc->next;
1779 new_var->var_part[i].cur_loc = var->var_part[i].cur_loc;
1782 dst_can_be_shared = false;
1783 if (shared_hash_shared (set->vars))
1784 slot = shared_hash_find_slot_unshare (&set->vars, var->dv, NO_INSERT);
1785 else if (set->traversed_vars && set->vars != set->traversed_vars)
1786 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
1787 *slot = new_var;
1788 if (var->in_changed_variables)
1790 variable_def **cslot
1791 = changed_variables->find_slot_with_hash (var->dv,
1792 dv_htab_hash (var->dv),
1793 NO_INSERT);
1794 gcc_assert (*cslot == (void *) var);
1795 var->in_changed_variables = false;
1796 variable_htab_free (var);
1797 *cslot = new_var;
1798 new_var->in_changed_variables = true;
1800 return slot;
1803 /* Copy all variables from hash table SRC to hash table DST. */
1805 static void
1806 vars_copy (variable_table_type *dst, variable_table_type *src)
1808 variable_iterator_type hi;
1809 variable var;
1811 FOR_EACH_HASH_TABLE_ELEMENT (*src, var, variable, hi)
1813 variable_def **dstp;
1814 var->refcount++;
1815 dstp = dst->find_slot_with_hash (var->dv, dv_htab_hash (var->dv),
1816 INSERT);
1817 *dstp = var;
1821 /* Map a decl to its main debug decl. */
1823 static inline tree
1824 var_debug_decl (tree decl)
1826 if (decl && TREE_CODE (decl) == VAR_DECL
1827 && DECL_HAS_DEBUG_EXPR_P (decl))
1829 tree debugdecl = DECL_DEBUG_EXPR (decl);
1830 if (DECL_P (debugdecl))
1831 decl = debugdecl;
1834 return decl;
1837 /* Set the register LOC to contain DV, OFFSET. */
1839 static void
1840 var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1841 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1842 enum insert_option iopt)
1844 attrs node;
1845 bool decl_p = dv_is_decl_p (dv);
1847 if (decl_p)
1848 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1850 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1851 if (dv_as_opaque (node->dv) == dv_as_opaque (dv)
1852 && node->offset == offset)
1853 break;
1854 if (!node)
1855 attrs_list_insert (&set->regs[REGNO (loc)], dv, offset, loc);
1856 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1859 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
1861 static void
1862 var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1863 rtx set_src)
1865 tree decl = REG_EXPR (loc);
1866 HOST_WIDE_INT offset = REG_OFFSET (loc);
1868 var_reg_decl_set (set, loc, initialized,
1869 dv_from_decl (decl), offset, set_src, INSERT);
1872 static enum var_init_status
1873 get_init_value (dataflow_set *set, rtx loc, decl_or_value dv)
1875 variable var;
1876 int i;
1877 enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN;
1879 if (! flag_var_tracking_uninit)
1880 return VAR_INIT_STATUS_INITIALIZED;
1882 var = shared_hash_find (set->vars, dv);
1883 if (var)
1885 for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++)
1887 location_chain nextp;
1888 for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next)
1889 if (rtx_equal_p (nextp->loc, loc))
1891 ret_val = nextp->init;
1892 break;
1897 return ret_val;
1900 /* Delete current content of register LOC in dataflow set SET and set
1901 the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
1902 MODIFY is true, any other live copies of the same variable part are
1903 also deleted from the dataflow set, otherwise the variable part is
1904 assumed to be copied from another location holding the same
1905 part. */
1907 static void
1908 var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1909 enum var_init_status initialized, rtx set_src)
1911 tree decl = REG_EXPR (loc);
1912 HOST_WIDE_INT offset = REG_OFFSET (loc);
1913 attrs node, next;
1914 attrs *nextp;
1916 decl = var_debug_decl (decl);
1918 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1919 initialized = get_init_value (set, loc, dv_from_decl (decl));
1921 nextp = &set->regs[REGNO (loc)];
1922 for (node = *nextp; node; node = next)
1924 next = node->next;
1925 if (dv_as_opaque (node->dv) != decl || node->offset != offset)
1927 delete_variable_part (set, node->loc, node->dv, node->offset);
1928 pool_free (attrs_pool, node);
1929 *nextp = next;
1931 else
1933 node->loc = loc;
1934 nextp = &node->next;
1937 if (modify)
1938 clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src);
1939 var_reg_set (set, loc, initialized, set_src);
1942 /* Delete the association of register LOC in dataflow set SET with any
1943 variables that aren't onepart. If CLOBBER is true, also delete any
1944 other live copies of the same variable part, and delete the
1945 association with onepart dvs too. */
1947 static void
1948 var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
1950 attrs *nextp = &set->regs[REGNO (loc)];
1951 attrs node, next;
1953 if (clobber)
1955 tree decl = REG_EXPR (loc);
1956 HOST_WIDE_INT offset = REG_OFFSET (loc);
1958 decl = var_debug_decl (decl);
1960 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1963 for (node = *nextp; node; node = next)
1965 next = node->next;
1966 if (clobber || !dv_onepart_p (node->dv))
1968 delete_variable_part (set, node->loc, node->dv, node->offset);
1969 pool_free (attrs_pool, node);
1970 *nextp = next;
1972 else
1973 nextp = &node->next;
1977 /* Delete content of register with number REGNO in dataflow set SET. */
1979 static void
1980 var_regno_delete (dataflow_set *set, int regno)
1982 attrs *reg = &set->regs[regno];
1983 attrs node, next;
1985 for (node = *reg; node; node = next)
1987 next = node->next;
1988 delete_variable_part (set, node->loc, node->dv, node->offset);
1989 pool_free (attrs_pool, node);
1991 *reg = NULL;
1994 /* Return true if I is the negated value of a power of two. */
1995 static bool
1996 negative_power_of_two_p (HOST_WIDE_INT i)
1998 unsigned HOST_WIDE_INT x = -(unsigned HOST_WIDE_INT)i;
1999 return x == (x & -x);
2002 /* Strip constant offsets and alignments off of LOC. Return the base
2003 expression. */
2005 static rtx
2006 vt_get_canonicalize_base (rtx loc)
2008 while ((GET_CODE (loc) == PLUS
2009 || GET_CODE (loc) == AND)
2010 && GET_CODE (XEXP (loc, 1)) == CONST_INT
2011 && (GET_CODE (loc) != AND
2012 || negative_power_of_two_p (INTVAL (XEXP (loc, 1)))))
2013 loc = XEXP (loc, 0);
2015 return loc;
2018 /* This caches canonicalized addresses for VALUEs, computed using
2019 information in the global cselib table. */
2020 static hash_map<rtx, rtx> *global_get_addr_cache;
2022 /* This caches canonicalized addresses for VALUEs, computed using
2023 information from the global cache and information pertaining to a
2024 basic block being analyzed. */
2025 static hash_map<rtx, rtx> *local_get_addr_cache;
2027 static rtx vt_canonicalize_addr (dataflow_set *, rtx);
2029 /* Return the canonical address for LOC, that must be a VALUE, using a
2030 cached global equivalence or computing it and storing it in the
2031 global cache. */
2033 static rtx
2034 get_addr_from_global_cache (rtx const loc)
2036 rtx x;
2038 gcc_checking_assert (GET_CODE (loc) == VALUE);
2040 bool existed;
2041 rtx *slot = &global_get_addr_cache->get_or_insert (loc, &existed);
2042 if (existed)
2043 return *slot;
2045 x = canon_rtx (get_addr (loc));
2047 /* Tentative, avoiding infinite recursion. */
2048 *slot = x;
2050 if (x != loc)
2052 rtx nx = vt_canonicalize_addr (NULL, x);
2053 if (nx != x)
2055 /* The table may have moved during recursion, recompute
2056 SLOT. */
2057 *global_get_addr_cache->get (loc) = x = nx;
2061 return x;
2064 /* Return the canonical address for LOC, that must be a VALUE, using a
2065 cached local equivalence or computing it and storing it in the
2066 local cache. */
2068 static rtx
2069 get_addr_from_local_cache (dataflow_set *set, rtx const loc)
2071 rtx x;
2072 decl_or_value dv;
2073 variable var;
2074 location_chain l;
2076 gcc_checking_assert (GET_CODE (loc) == VALUE);
2078 bool existed;
2079 rtx *slot = &local_get_addr_cache->get_or_insert (loc, &existed);
2080 if (existed)
2081 return *slot;
2083 x = get_addr_from_global_cache (loc);
2085 /* Tentative, avoiding infinite recursion. */
2086 *slot = x;
2088 /* Recurse to cache local expansion of X, or if we need to search
2089 for a VALUE in the expansion. */
2090 if (x != loc)
2092 rtx nx = vt_canonicalize_addr (set, x);
2093 if (nx != x)
2095 slot = local_get_addr_cache->get (loc);
2096 *slot = x = nx;
2098 return x;
2101 dv = dv_from_rtx (x);
2102 var = shared_hash_find (set->vars, dv);
2103 if (!var)
2104 return x;
2106 /* Look for an improved equivalent expression. */
2107 for (l = var->var_part[0].loc_chain; l; l = l->next)
2109 rtx base = vt_get_canonicalize_base (l->loc);
2110 if (GET_CODE (base) == VALUE
2111 && canon_value_cmp (base, loc))
2113 rtx nx = vt_canonicalize_addr (set, l->loc);
2114 if (x != nx)
2116 slot = local_get_addr_cache->get (loc);
2117 *slot = x = nx;
2119 break;
2123 return x;
2126 /* Canonicalize LOC using equivalences from SET in addition to those
2127 in the cselib static table. It expects a VALUE-based expression,
2128 and it will only substitute VALUEs with other VALUEs or
2129 function-global equivalences, so that, if two addresses have base
2130 VALUEs that are locally or globally related in ways that
2131 memrefs_conflict_p cares about, they will both canonicalize to
2132 expressions that have the same base VALUE.
2134 The use of VALUEs as canonical base addresses enables the canonical
2135 RTXs to remain unchanged globally, if they resolve to a constant,
2136 or throughout a basic block otherwise, so that they can be cached
2137 and the cache needs not be invalidated when REGs, MEMs or such
2138 change. */
2140 static rtx
2141 vt_canonicalize_addr (dataflow_set *set, rtx oloc)
2143 HOST_WIDE_INT ofst = 0;
2144 machine_mode mode = GET_MODE (oloc);
2145 rtx loc = oloc;
2146 rtx x;
2147 bool retry = true;
2149 while (retry)
2151 while (GET_CODE (loc) == PLUS
2152 && GET_CODE (XEXP (loc, 1)) == CONST_INT)
2154 ofst += INTVAL (XEXP (loc, 1));
2155 loc = XEXP (loc, 0);
2158 /* Alignment operations can't normally be combined, so just
2159 canonicalize the base and we're done. We'll normally have
2160 only one stack alignment anyway. */
2161 if (GET_CODE (loc) == AND
2162 && GET_CODE (XEXP (loc, 1)) == CONST_INT
2163 && negative_power_of_two_p (INTVAL (XEXP (loc, 1))))
2165 x = vt_canonicalize_addr (set, XEXP (loc, 0));
2166 if (x != XEXP (loc, 0))
2167 loc = gen_rtx_AND (mode, x, XEXP (loc, 1));
2168 retry = false;
2171 if (GET_CODE (loc) == VALUE)
2173 if (set)
2174 loc = get_addr_from_local_cache (set, loc);
2175 else
2176 loc = get_addr_from_global_cache (loc);
2178 /* Consolidate plus_constants. */
2179 while (ofst && GET_CODE (loc) == PLUS
2180 && GET_CODE (XEXP (loc, 1)) == CONST_INT)
2182 ofst += INTVAL (XEXP (loc, 1));
2183 loc = XEXP (loc, 0);
2186 retry = false;
2188 else
2190 x = canon_rtx (loc);
2191 if (retry)
2192 retry = (x != loc);
2193 loc = x;
2197 /* Add OFST back in. */
2198 if (ofst)
2200 /* Don't build new RTL if we can help it. */
2201 if (GET_CODE (oloc) == PLUS
2202 && XEXP (oloc, 0) == loc
2203 && INTVAL (XEXP (oloc, 1)) == ofst)
2204 return oloc;
2206 loc = plus_constant (mode, loc, ofst);
2209 return loc;
2212 /* Return true iff there's a true dependence between MLOC and LOC.
2213 MADDR must be a canonicalized version of MLOC's address. */
2215 static inline bool
2216 vt_canon_true_dep (dataflow_set *set, rtx mloc, rtx maddr, rtx loc)
2218 if (GET_CODE (loc) != MEM)
2219 return false;
2221 rtx addr = vt_canonicalize_addr (set, XEXP (loc, 0));
2222 if (!canon_true_dependence (mloc, GET_MODE (mloc), maddr, loc, addr))
2223 return false;
2225 return true;
2228 /* Hold parameters for the hashtab traversal function
2229 drop_overlapping_mem_locs, see below. */
2231 struct overlapping_mems
2233 dataflow_set *set;
2234 rtx loc, addr;
2237 /* Remove all MEMs that overlap with COMS->LOC from the location list
2238 of a hash table entry for a value. COMS->ADDR must be a
2239 canonicalized form of COMS->LOC's address, and COMS->LOC must be
2240 canonicalized itself. */
2243 drop_overlapping_mem_locs (variable_def **slot, overlapping_mems *coms)
2245 dataflow_set *set = coms->set;
2246 rtx mloc = coms->loc, addr = coms->addr;
2247 variable var = *slot;
2249 if (var->onepart == ONEPART_VALUE)
2251 location_chain loc, *locp;
2252 bool changed = false;
2253 rtx cur_loc;
2255 gcc_assert (var->n_var_parts == 1);
2257 if (shared_var_p (var, set->vars))
2259 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
2260 if (vt_canon_true_dep (set, mloc, addr, loc->loc))
2261 break;
2263 if (!loc)
2264 return 1;
2266 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
2267 var = *slot;
2268 gcc_assert (var->n_var_parts == 1);
2271 if (VAR_LOC_1PAUX (var))
2272 cur_loc = VAR_LOC_FROM (var);
2273 else
2274 cur_loc = var->var_part[0].cur_loc;
2276 for (locp = &var->var_part[0].loc_chain, loc = *locp;
2277 loc; loc = *locp)
2279 if (!vt_canon_true_dep (set, mloc, addr, loc->loc))
2281 locp = &loc->next;
2282 continue;
2285 *locp = loc->next;
2286 /* If we have deleted the location which was last emitted
2287 we have to emit new location so add the variable to set
2288 of changed variables. */
2289 if (cur_loc == loc->loc)
2291 changed = true;
2292 var->var_part[0].cur_loc = NULL;
2293 if (VAR_LOC_1PAUX (var))
2294 VAR_LOC_FROM (var) = NULL;
2296 pool_free (loc_chain_pool, loc);
2299 if (!var->var_part[0].loc_chain)
2301 var->n_var_parts--;
2302 changed = true;
2304 if (changed)
2305 variable_was_changed (var, set);
2308 return 1;
2311 /* Remove from SET all VALUE bindings to MEMs that overlap with LOC. */
2313 static void
2314 clobber_overlapping_mems (dataflow_set *set, rtx loc)
2316 struct overlapping_mems coms;
2318 gcc_checking_assert (GET_CODE (loc) == MEM);
2320 coms.set = set;
2321 coms.loc = canon_rtx (loc);
2322 coms.addr = vt_canonicalize_addr (set, XEXP (loc, 0));
2324 set->traversed_vars = set->vars;
2325 shared_hash_htab (set->vars)
2326 ->traverse <overlapping_mems*, drop_overlapping_mem_locs> (&coms);
2327 set->traversed_vars = NULL;
2330 /* Set the location of DV, OFFSET as the MEM LOC. */
2332 static void
2333 var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
2334 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
2335 enum insert_option iopt)
2337 if (dv_is_decl_p (dv))
2338 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
2340 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
2343 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
2344 SET to LOC.
2345 Adjust the address first if it is stack pointer based. */
2347 static void
2348 var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
2349 rtx set_src)
2351 tree decl = MEM_EXPR (loc);
2352 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2354 var_mem_decl_set (set, loc, initialized,
2355 dv_from_decl (decl), offset, set_src, INSERT);
2358 /* Delete and set the location part of variable MEM_EXPR (LOC) in
2359 dataflow set SET to LOC. If MODIFY is true, any other live copies
2360 of the same variable part are also deleted from the dataflow set,
2361 otherwise the variable part is assumed to be copied from another
2362 location holding the same part.
2363 Adjust the address first if it is stack pointer based. */
2365 static void
2366 var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify,
2367 enum var_init_status initialized, rtx set_src)
2369 tree decl = MEM_EXPR (loc);
2370 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2372 clobber_overlapping_mems (set, loc);
2373 decl = var_debug_decl (decl);
2375 if (initialized == VAR_INIT_STATUS_UNKNOWN)
2376 initialized = get_init_value (set, loc, dv_from_decl (decl));
2378 if (modify)
2379 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src);
2380 var_mem_set (set, loc, initialized, set_src);
2383 /* Delete the location part LOC from dataflow set SET. If CLOBBER is
2384 true, also delete any other live copies of the same variable part.
2385 Adjust the address first if it is stack pointer based. */
2387 static void
2388 var_mem_delete (dataflow_set *set, rtx loc, bool clobber)
2390 tree decl = MEM_EXPR (loc);
2391 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2393 clobber_overlapping_mems (set, loc);
2394 decl = var_debug_decl (decl);
2395 if (clobber)
2396 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
2397 delete_variable_part (set, loc, dv_from_decl (decl), offset);
2400 /* Return true if LOC should not be expanded for location expressions,
2401 or used in them. */
2403 static inline bool
2404 unsuitable_loc (rtx loc)
2406 switch (GET_CODE (loc))
2408 case PC:
2409 case SCRATCH:
2410 case CC0:
2411 case ASM_INPUT:
2412 case ASM_OPERANDS:
2413 return true;
2415 default:
2416 return false;
2420 /* Bind VAL to LOC in SET. If MODIFIED, detach LOC from any values
2421 bound to it. */
2423 static inline void
2424 val_bind (dataflow_set *set, rtx val, rtx loc, bool modified)
2426 if (REG_P (loc))
2428 if (modified)
2429 var_regno_delete (set, REGNO (loc));
2430 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2431 dv_from_value (val), 0, NULL_RTX, INSERT);
2433 else if (MEM_P (loc))
2435 struct elt_loc_list *l = CSELIB_VAL_PTR (val)->locs;
2437 if (modified)
2438 clobber_overlapping_mems (set, loc);
2440 if (l && GET_CODE (l->loc) == VALUE)
2441 l = canonical_cselib_val (CSELIB_VAL_PTR (l->loc))->locs;
2443 /* If this MEM is a global constant, we don't need it in the
2444 dynamic tables. ??? We should test this before emitting the
2445 micro-op in the first place. */
2446 while (l)
2447 if (GET_CODE (l->loc) == MEM && XEXP (l->loc, 0) == XEXP (loc, 0))
2448 break;
2449 else
2450 l = l->next;
2452 if (!l)
2453 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2454 dv_from_value (val), 0, NULL_RTX, INSERT);
2456 else
2458 /* Other kinds of equivalences are necessarily static, at least
2459 so long as we do not perform substitutions while merging
2460 expressions. */
2461 gcc_unreachable ();
2462 set_variable_part (set, loc, dv_from_value (val), 0,
2463 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2467 /* Bind a value to a location it was just stored in. If MODIFIED
2468 holds, assume the location was modified, detaching it from any
2469 values bound to it. */
2471 static void
2472 val_store (dataflow_set *set, rtx val, rtx loc, rtx_insn *insn,
2473 bool modified)
2475 cselib_val *v = CSELIB_VAL_PTR (val);
2477 gcc_assert (cselib_preserved_value_p (v));
2479 if (dump_file)
2481 fprintf (dump_file, "%i: ", insn ? INSN_UID (insn) : 0);
2482 print_inline_rtx (dump_file, loc, 0);
2483 fprintf (dump_file, " evaluates to ");
2484 print_inline_rtx (dump_file, val, 0);
2485 if (v->locs)
2487 struct elt_loc_list *l;
2488 for (l = v->locs; l; l = l->next)
2490 fprintf (dump_file, "\n%i: ", INSN_UID (l->setting_insn));
2491 print_inline_rtx (dump_file, l->loc, 0);
2494 fprintf (dump_file, "\n");
2497 gcc_checking_assert (!unsuitable_loc (loc));
2499 val_bind (set, val, loc, modified);
2502 /* Clear (canonical address) slots that reference X. */
2504 bool
2505 local_get_addr_clear_given_value (rtx const &, rtx *slot, rtx x)
2507 if (vt_get_canonicalize_base (*slot) == x)
2508 *slot = NULL;
2509 return true;
2512 /* Reset this node, detaching all its equivalences. Return the slot
2513 in the variable hash table that holds dv, if there is one. */
2515 static void
2516 val_reset (dataflow_set *set, decl_or_value dv)
2518 variable var = shared_hash_find (set->vars, dv) ;
2519 location_chain node;
2520 rtx cval;
2522 if (!var || !var->n_var_parts)
2523 return;
2525 gcc_assert (var->n_var_parts == 1);
2527 if (var->onepart == ONEPART_VALUE)
2529 rtx x = dv_as_value (dv);
2531 /* Relationships in the global cache don't change, so reset the
2532 local cache entry only. */
2533 rtx *slot = local_get_addr_cache->get (x);
2534 if (slot)
2536 /* If the value resolved back to itself, odds are that other
2537 values may have cached it too. These entries now refer
2538 to the old X, so detach them too. Entries that used the
2539 old X but resolved to something else remain ok as long as
2540 that something else isn't also reset. */
2541 if (*slot == x)
2542 local_get_addr_cache
2543 ->traverse<rtx, local_get_addr_clear_given_value> (x);
2544 *slot = NULL;
2548 cval = NULL;
2549 for (node = var->var_part[0].loc_chain; node; node = node->next)
2550 if (GET_CODE (node->loc) == VALUE
2551 && canon_value_cmp (node->loc, cval))
2552 cval = node->loc;
2554 for (node = var->var_part[0].loc_chain; node; node = node->next)
2555 if (GET_CODE (node->loc) == VALUE && cval != node->loc)
2557 /* Redirect the equivalence link to the new canonical
2558 value, or simply remove it if it would point at
2559 itself. */
2560 if (cval)
2561 set_variable_part (set, cval, dv_from_value (node->loc),
2562 0, node->init, node->set_src, NO_INSERT);
2563 delete_variable_part (set, dv_as_value (dv),
2564 dv_from_value (node->loc), 0);
2567 if (cval)
2569 decl_or_value cdv = dv_from_value (cval);
2571 /* Keep the remaining values connected, accummulating links
2572 in the canonical value. */
2573 for (node = var->var_part[0].loc_chain; node; node = node->next)
2575 if (node->loc == cval)
2576 continue;
2577 else if (GET_CODE (node->loc) == REG)
2578 var_reg_decl_set (set, node->loc, node->init, cdv, 0,
2579 node->set_src, NO_INSERT);
2580 else if (GET_CODE (node->loc) == MEM)
2581 var_mem_decl_set (set, node->loc, node->init, cdv, 0,
2582 node->set_src, NO_INSERT);
2583 else
2584 set_variable_part (set, node->loc, cdv, 0,
2585 node->init, node->set_src, NO_INSERT);
2589 /* We remove this last, to make sure that the canonical value is not
2590 removed to the point of requiring reinsertion. */
2591 if (cval)
2592 delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0);
2594 clobber_variable_part (set, NULL, dv, 0, NULL);
2597 /* Find the values in a given location and map the val to another
2598 value, if it is unique, or add the location as one holding the
2599 value. */
2601 static void
2602 val_resolve (dataflow_set *set, rtx val, rtx loc, rtx_insn *insn)
2604 decl_or_value dv = dv_from_value (val);
2606 if (dump_file && (dump_flags & TDF_DETAILS))
2608 if (insn)
2609 fprintf (dump_file, "%i: ", INSN_UID (insn));
2610 else
2611 fprintf (dump_file, "head: ");
2612 print_inline_rtx (dump_file, val, 0);
2613 fputs (" is at ", dump_file);
2614 print_inline_rtx (dump_file, loc, 0);
2615 fputc ('\n', dump_file);
2618 val_reset (set, dv);
2620 gcc_checking_assert (!unsuitable_loc (loc));
2622 if (REG_P (loc))
2624 attrs node, found = NULL;
2626 for (node = set->regs[REGNO (loc)]; node; node = node->next)
2627 if (dv_is_value_p (node->dv)
2628 && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc))
2630 found = node;
2632 /* Map incoming equivalences. ??? Wouldn't it be nice if
2633 we just started sharing the location lists? Maybe a
2634 circular list ending at the value itself or some
2635 such. */
2636 set_variable_part (set, dv_as_value (node->dv),
2637 dv_from_value (val), node->offset,
2638 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2639 set_variable_part (set, val, node->dv, node->offset,
2640 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2643 /* If we didn't find any equivalence, we need to remember that
2644 this value is held in the named register. */
2645 if (found)
2646 return;
2648 /* ??? Attempt to find and merge equivalent MEMs or other
2649 expressions too. */
2651 val_bind (set, val, loc, false);
2654 /* Initialize dataflow set SET to be empty.
2655 VARS_SIZE is the initial size of hash table VARS. */
2657 static void
2658 dataflow_set_init (dataflow_set *set)
2660 init_attrs_list_set (set->regs);
2661 set->vars = shared_hash_copy (empty_shared_hash);
2662 set->stack_adjust = 0;
2663 set->traversed_vars = NULL;
2666 /* Delete the contents of dataflow set SET. */
2668 static void
2669 dataflow_set_clear (dataflow_set *set)
2671 int i;
2673 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2674 attrs_list_clear (&set->regs[i]);
2676 shared_hash_destroy (set->vars);
2677 set->vars = shared_hash_copy (empty_shared_hash);
2680 /* Copy the contents of dataflow set SRC to DST. */
2682 static void
2683 dataflow_set_copy (dataflow_set *dst, dataflow_set *src)
2685 int i;
2687 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2688 attrs_list_copy (&dst->regs[i], src->regs[i]);
2690 shared_hash_destroy (dst->vars);
2691 dst->vars = shared_hash_copy (src->vars);
2692 dst->stack_adjust = src->stack_adjust;
2695 /* Information for merging lists of locations for a given offset of variable.
2697 struct variable_union_info
2699 /* Node of the location chain. */
2700 location_chain lc;
2702 /* The sum of positions in the input chains. */
2703 int pos;
2705 /* The position in the chain of DST dataflow set. */
2706 int pos_dst;
2709 /* Buffer for location list sorting and its allocated size. */
2710 static struct variable_union_info *vui_vec;
2711 static int vui_allocated;
2713 /* Compare function for qsort, order the structures by POS element. */
2715 static int
2716 variable_union_info_cmp_pos (const void *n1, const void *n2)
2718 const struct variable_union_info *const i1 =
2719 (const struct variable_union_info *) n1;
2720 const struct variable_union_info *const i2 =
2721 ( const struct variable_union_info *) n2;
2723 if (i1->pos != i2->pos)
2724 return i1->pos - i2->pos;
2726 return (i1->pos_dst - i2->pos_dst);
2729 /* Compute union of location parts of variable *SLOT and the same variable
2730 from hash table DATA. Compute "sorted" union of the location chains
2731 for common offsets, i.e. the locations of a variable part are sorted by
2732 a priority where the priority is the sum of the positions in the 2 chains
2733 (if a location is only in one list the position in the second list is
2734 defined to be larger than the length of the chains).
2735 When we are updating the location parts the newest location is in the
2736 beginning of the chain, so when we do the described "sorted" union
2737 we keep the newest locations in the beginning. */
2739 static int
2740 variable_union (variable src, dataflow_set *set)
2742 variable dst;
2743 variable_def **dstp;
2744 int i, j, k;
2746 dstp = shared_hash_find_slot (set->vars, src->dv);
2747 if (!dstp || !*dstp)
2749 src->refcount++;
2751 dst_can_be_shared = false;
2752 if (!dstp)
2753 dstp = shared_hash_find_slot_unshare (&set->vars, src->dv, INSERT);
2755 *dstp = src;
2757 /* Continue traversing the hash table. */
2758 return 1;
2760 else
2761 dst = *dstp;
2763 gcc_assert (src->n_var_parts);
2764 gcc_checking_assert (src->onepart == dst->onepart);
2766 /* We can combine one-part variables very efficiently, because their
2767 entries are in canonical order. */
2768 if (src->onepart)
2770 location_chain *nodep, dnode, snode;
2772 gcc_assert (src->n_var_parts == 1
2773 && dst->n_var_parts == 1);
2775 snode = src->var_part[0].loc_chain;
2776 gcc_assert (snode);
2778 restart_onepart_unshared:
2779 nodep = &dst->var_part[0].loc_chain;
2780 dnode = *nodep;
2781 gcc_assert (dnode);
2783 while (snode)
2785 int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1;
2787 if (r > 0)
2789 location_chain nnode;
2791 if (shared_var_p (dst, set->vars))
2793 dstp = unshare_variable (set, dstp, dst,
2794 VAR_INIT_STATUS_INITIALIZED);
2795 dst = *dstp;
2796 goto restart_onepart_unshared;
2799 *nodep = nnode = (location_chain) pool_alloc (loc_chain_pool);
2800 nnode->loc = snode->loc;
2801 nnode->init = snode->init;
2802 if (!snode->set_src || MEM_P (snode->set_src))
2803 nnode->set_src = NULL;
2804 else
2805 nnode->set_src = snode->set_src;
2806 nnode->next = dnode;
2807 dnode = nnode;
2809 else if (r == 0)
2810 gcc_checking_assert (rtx_equal_p (dnode->loc, snode->loc));
2812 if (r >= 0)
2813 snode = snode->next;
2815 nodep = &dnode->next;
2816 dnode = *nodep;
2819 return 1;
2822 gcc_checking_assert (!src->onepart);
2824 /* Count the number of location parts, result is K. */
2825 for (i = 0, j = 0, k = 0;
2826 i < src->n_var_parts && j < dst->n_var_parts; k++)
2828 if (VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2830 i++;
2831 j++;
2833 else if (VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
2834 i++;
2835 else
2836 j++;
2838 k += src->n_var_parts - i;
2839 k += dst->n_var_parts - j;
2841 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
2842 thus there are at most MAX_VAR_PARTS different offsets. */
2843 gcc_checking_assert (dst->onepart ? k == 1 : k <= MAX_VAR_PARTS);
2845 if (dst->n_var_parts != k && shared_var_p (dst, set->vars))
2847 dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN);
2848 dst = *dstp;
2851 i = src->n_var_parts - 1;
2852 j = dst->n_var_parts - 1;
2853 dst->n_var_parts = k;
2855 for (k--; k >= 0; k--)
2857 location_chain node, node2;
2859 if (i >= 0 && j >= 0
2860 && VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2862 /* Compute the "sorted" union of the chains, i.e. the locations which
2863 are in both chains go first, they are sorted by the sum of
2864 positions in the chains. */
2865 int dst_l, src_l;
2866 int ii, jj, n;
2867 struct variable_union_info *vui;
2869 /* If DST is shared compare the location chains.
2870 If they are different we will modify the chain in DST with
2871 high probability so make a copy of DST. */
2872 if (shared_var_p (dst, set->vars))
2874 for (node = src->var_part[i].loc_chain,
2875 node2 = dst->var_part[j].loc_chain; node && node2;
2876 node = node->next, node2 = node2->next)
2878 if (!((REG_P (node2->loc)
2879 && REG_P (node->loc)
2880 && REGNO (node2->loc) == REGNO (node->loc))
2881 || rtx_equal_p (node2->loc, node->loc)))
2883 if (node2->init < node->init)
2884 node2->init = node->init;
2885 break;
2888 if (node || node2)
2890 dstp = unshare_variable (set, dstp, dst,
2891 VAR_INIT_STATUS_UNKNOWN);
2892 dst = (variable)*dstp;
2896 src_l = 0;
2897 for (node = src->var_part[i].loc_chain; node; node = node->next)
2898 src_l++;
2899 dst_l = 0;
2900 for (node = dst->var_part[j].loc_chain; node; node = node->next)
2901 dst_l++;
2903 if (dst_l == 1)
2905 /* The most common case, much simpler, no qsort is needed. */
2906 location_chain dstnode = dst->var_part[j].loc_chain;
2907 dst->var_part[k].loc_chain = dstnode;
2908 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
2909 node2 = dstnode;
2910 for (node = src->var_part[i].loc_chain; node; node = node->next)
2911 if (!((REG_P (dstnode->loc)
2912 && REG_P (node->loc)
2913 && REGNO (dstnode->loc) == REGNO (node->loc))
2914 || rtx_equal_p (dstnode->loc, node->loc)))
2916 location_chain new_node;
2918 /* Copy the location from SRC. */
2919 new_node = (location_chain) pool_alloc (loc_chain_pool);
2920 new_node->loc = node->loc;
2921 new_node->init = node->init;
2922 if (!node->set_src || MEM_P (node->set_src))
2923 new_node->set_src = NULL;
2924 else
2925 new_node->set_src = node->set_src;
2926 node2->next = new_node;
2927 node2 = new_node;
2929 node2->next = NULL;
2931 else
2933 if (src_l + dst_l > vui_allocated)
2935 vui_allocated = MAX (vui_allocated * 2, src_l + dst_l);
2936 vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec,
2937 vui_allocated);
2939 vui = vui_vec;
2941 /* Fill in the locations from DST. */
2942 for (node = dst->var_part[j].loc_chain, jj = 0; node;
2943 node = node->next, jj++)
2945 vui[jj].lc = node;
2946 vui[jj].pos_dst = jj;
2948 /* Pos plus value larger than a sum of 2 valid positions. */
2949 vui[jj].pos = jj + src_l + dst_l;
2952 /* Fill in the locations from SRC. */
2953 n = dst_l;
2954 for (node = src->var_part[i].loc_chain, ii = 0; node;
2955 node = node->next, ii++)
2957 /* Find location from NODE. */
2958 for (jj = 0; jj < dst_l; jj++)
2960 if ((REG_P (vui[jj].lc->loc)
2961 && REG_P (node->loc)
2962 && REGNO (vui[jj].lc->loc) == REGNO (node->loc))
2963 || rtx_equal_p (vui[jj].lc->loc, node->loc))
2965 vui[jj].pos = jj + ii;
2966 break;
2969 if (jj >= dst_l) /* The location has not been found. */
2971 location_chain new_node;
2973 /* Copy the location from SRC. */
2974 new_node = (location_chain) pool_alloc (loc_chain_pool);
2975 new_node->loc = node->loc;
2976 new_node->init = node->init;
2977 if (!node->set_src || MEM_P (node->set_src))
2978 new_node->set_src = NULL;
2979 else
2980 new_node->set_src = node->set_src;
2981 vui[n].lc = new_node;
2982 vui[n].pos_dst = src_l + dst_l;
2983 vui[n].pos = ii + src_l + dst_l;
2984 n++;
2988 if (dst_l == 2)
2990 /* Special case still very common case. For dst_l == 2
2991 all entries dst_l ... n-1 are sorted, with for i >= dst_l
2992 vui[i].pos == i + src_l + dst_l. */
2993 if (vui[0].pos > vui[1].pos)
2995 /* Order should be 1, 0, 2... */
2996 dst->var_part[k].loc_chain = vui[1].lc;
2997 vui[1].lc->next = vui[0].lc;
2998 if (n >= 3)
3000 vui[0].lc->next = vui[2].lc;
3001 vui[n - 1].lc->next = NULL;
3003 else
3004 vui[0].lc->next = NULL;
3005 ii = 3;
3007 else
3009 dst->var_part[k].loc_chain = vui[0].lc;
3010 if (n >= 3 && vui[2].pos < vui[1].pos)
3012 /* Order should be 0, 2, 1, 3... */
3013 vui[0].lc->next = vui[2].lc;
3014 vui[2].lc->next = vui[1].lc;
3015 if (n >= 4)
3017 vui[1].lc->next = vui[3].lc;
3018 vui[n - 1].lc->next = NULL;
3020 else
3021 vui[1].lc->next = NULL;
3022 ii = 4;
3024 else
3026 /* Order should be 0, 1, 2... */
3027 ii = 1;
3028 vui[n - 1].lc->next = NULL;
3031 for (; ii < n; ii++)
3032 vui[ii - 1].lc->next = vui[ii].lc;
3034 else
3036 qsort (vui, n, sizeof (struct variable_union_info),
3037 variable_union_info_cmp_pos);
3039 /* Reconnect the nodes in sorted order. */
3040 for (ii = 1; ii < n; ii++)
3041 vui[ii - 1].lc->next = vui[ii].lc;
3042 vui[n - 1].lc->next = NULL;
3043 dst->var_part[k].loc_chain = vui[0].lc;
3046 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
3048 i--;
3049 j--;
3051 else if ((i >= 0 && j >= 0
3052 && VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
3053 || i < 0)
3055 dst->var_part[k] = dst->var_part[j];
3056 j--;
3058 else if ((i >= 0 && j >= 0
3059 && VAR_PART_OFFSET (src, i) > VAR_PART_OFFSET (dst, j))
3060 || j < 0)
3062 location_chain *nextp;
3064 /* Copy the chain from SRC. */
3065 nextp = &dst->var_part[k].loc_chain;
3066 for (node = src->var_part[i].loc_chain; node; node = node->next)
3068 location_chain new_lc;
3070 new_lc = (location_chain) pool_alloc (loc_chain_pool);
3071 new_lc->next = NULL;
3072 new_lc->init = node->init;
3073 if (!node->set_src || MEM_P (node->set_src))
3074 new_lc->set_src = NULL;
3075 else
3076 new_lc->set_src = node->set_src;
3077 new_lc->loc = node->loc;
3079 *nextp = new_lc;
3080 nextp = &new_lc->next;
3083 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (src, i);
3084 i--;
3086 dst->var_part[k].cur_loc = NULL;
3089 if (flag_var_tracking_uninit)
3090 for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++)
3092 location_chain node, node2;
3093 for (node = src->var_part[i].loc_chain; node; node = node->next)
3094 for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next)
3095 if (rtx_equal_p (node->loc, node2->loc))
3097 if (node->init > node2->init)
3098 node2->init = node->init;
3102 /* Continue traversing the hash table. */
3103 return 1;
3106 /* Compute union of dataflow sets SRC and DST and store it to DST. */
3108 static void
3109 dataflow_set_union (dataflow_set *dst, dataflow_set *src)
3111 int i;
3113 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3114 attrs_list_union (&dst->regs[i], src->regs[i]);
3116 if (dst->vars == empty_shared_hash)
3118 shared_hash_destroy (dst->vars);
3119 dst->vars = shared_hash_copy (src->vars);
3121 else
3123 variable_iterator_type hi;
3124 variable var;
3126 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (src->vars),
3127 var, variable, hi)
3128 variable_union (var, dst);
3132 /* Whether the value is currently being expanded. */
3133 #define VALUE_RECURSED_INTO(x) \
3134 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
3136 /* Whether no expansion was found, saving useless lookups.
3137 It must only be set when VALUE_CHANGED is clear. */
3138 #define NO_LOC_P(x) \
3139 (RTL_FLAG_CHECK2 ("NO_LOC_P", (x), VALUE, DEBUG_EXPR)->return_val)
3141 /* Whether cur_loc in the value needs to be (re)computed. */
3142 #define VALUE_CHANGED(x) \
3143 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
3144 /* Whether cur_loc in the decl needs to be (re)computed. */
3145 #define DECL_CHANGED(x) TREE_VISITED (x)
3147 /* Record (if NEWV) that DV needs to have its cur_loc recomputed. For
3148 user DECLs, this means they're in changed_variables. Values and
3149 debug exprs may be left with this flag set if no user variable
3150 requires them to be evaluated. */
3152 static inline void
3153 set_dv_changed (decl_or_value dv, bool newv)
3155 switch (dv_onepart_p (dv))
3157 case ONEPART_VALUE:
3158 if (newv)
3159 NO_LOC_P (dv_as_value (dv)) = false;
3160 VALUE_CHANGED (dv_as_value (dv)) = newv;
3161 break;
3163 case ONEPART_DEXPR:
3164 if (newv)
3165 NO_LOC_P (DECL_RTL_KNOWN_SET (dv_as_decl (dv))) = false;
3166 /* Fall through... */
3168 default:
3169 DECL_CHANGED (dv_as_decl (dv)) = newv;
3170 break;
3174 /* Return true if DV needs to have its cur_loc recomputed. */
3176 static inline bool
3177 dv_changed_p (decl_or_value dv)
3179 return (dv_is_value_p (dv)
3180 ? VALUE_CHANGED (dv_as_value (dv))
3181 : DECL_CHANGED (dv_as_decl (dv)));
3184 /* Return a location list node whose loc is rtx_equal to LOC, in the
3185 location list of a one-part variable or value VAR, or in that of
3186 any values recursively mentioned in the location lists. VARS must
3187 be in star-canonical form. */
3189 static location_chain
3190 find_loc_in_1pdv (rtx loc, variable var, variable_table_type *vars)
3192 location_chain node;
3193 enum rtx_code loc_code;
3195 if (!var)
3196 return NULL;
3198 gcc_checking_assert (var->onepart);
3200 if (!var->n_var_parts)
3201 return NULL;
3203 gcc_checking_assert (loc != dv_as_opaque (var->dv));
3205 loc_code = GET_CODE (loc);
3206 for (node = var->var_part[0].loc_chain; node; node = node->next)
3208 decl_or_value dv;
3209 variable rvar;
3211 if (GET_CODE (node->loc) != loc_code)
3213 if (GET_CODE (node->loc) != VALUE)
3214 continue;
3216 else if (loc == node->loc)
3217 return node;
3218 else if (loc_code != VALUE)
3220 if (rtx_equal_p (loc, node->loc))
3221 return node;
3222 continue;
3225 /* Since we're in star-canonical form, we don't need to visit
3226 non-canonical nodes: one-part variables and non-canonical
3227 values would only point back to the canonical node. */
3228 if (dv_is_value_p (var->dv)
3229 && !canon_value_cmp (node->loc, dv_as_value (var->dv)))
3231 /* Skip all subsequent VALUEs. */
3232 while (node->next && GET_CODE (node->next->loc) == VALUE)
3234 node = node->next;
3235 gcc_checking_assert (!canon_value_cmp (node->loc,
3236 dv_as_value (var->dv)));
3237 if (loc == node->loc)
3238 return node;
3240 continue;
3243 gcc_checking_assert (node == var->var_part[0].loc_chain);
3244 gcc_checking_assert (!node->next);
3246 dv = dv_from_value (node->loc);
3247 rvar = vars->find_with_hash (dv, dv_htab_hash (dv));
3248 return find_loc_in_1pdv (loc, rvar, vars);
3251 /* ??? Gotta look in cselib_val locations too. */
3253 return NULL;
3256 /* Hash table iteration argument passed to variable_merge. */
3257 struct dfset_merge
3259 /* The set in which the merge is to be inserted. */
3260 dataflow_set *dst;
3261 /* The set that we're iterating in. */
3262 dataflow_set *cur;
3263 /* The set that may contain the other dv we are to merge with. */
3264 dataflow_set *src;
3265 /* Number of onepart dvs in src. */
3266 int src_onepart_cnt;
3269 /* Insert LOC in *DNODE, if it's not there yet. The list must be in
3270 loc_cmp order, and it is maintained as such. */
3272 static void
3273 insert_into_intersection (location_chain *nodep, rtx loc,
3274 enum var_init_status status)
3276 location_chain node;
3277 int r;
3279 for (node = *nodep; node; nodep = &node->next, node = *nodep)
3280 if ((r = loc_cmp (node->loc, loc)) == 0)
3282 node->init = MIN (node->init, status);
3283 return;
3285 else if (r > 0)
3286 break;
3288 node = (location_chain) pool_alloc (loc_chain_pool);
3290 node->loc = loc;
3291 node->set_src = NULL;
3292 node->init = status;
3293 node->next = *nodep;
3294 *nodep = node;
3297 /* Insert in DEST the intersection of the locations present in both
3298 S1NODE and S2VAR, directly or indirectly. S1NODE is from a
3299 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
3300 DSM->dst. */
3302 static void
3303 intersect_loc_chains (rtx val, location_chain *dest, struct dfset_merge *dsm,
3304 location_chain s1node, variable s2var)
3306 dataflow_set *s1set = dsm->cur;
3307 dataflow_set *s2set = dsm->src;
3308 location_chain found;
3310 if (s2var)
3312 location_chain s2node;
3314 gcc_checking_assert (s2var->onepart);
3316 if (s2var->n_var_parts)
3318 s2node = s2var->var_part[0].loc_chain;
3320 for (; s1node && s2node;
3321 s1node = s1node->next, s2node = s2node->next)
3322 if (s1node->loc != s2node->loc)
3323 break;
3324 else if (s1node->loc == val)
3325 continue;
3326 else
3327 insert_into_intersection (dest, s1node->loc,
3328 MIN (s1node->init, s2node->init));
3332 for (; s1node; s1node = s1node->next)
3334 if (s1node->loc == val)
3335 continue;
3337 if ((found = find_loc_in_1pdv (s1node->loc, s2var,
3338 shared_hash_htab (s2set->vars))))
3340 insert_into_intersection (dest, s1node->loc,
3341 MIN (s1node->init, found->init));
3342 continue;
3345 if (GET_CODE (s1node->loc) == VALUE
3346 && !VALUE_RECURSED_INTO (s1node->loc))
3348 decl_or_value dv = dv_from_value (s1node->loc);
3349 variable svar = shared_hash_find (s1set->vars, dv);
3350 if (svar)
3352 if (svar->n_var_parts == 1)
3354 VALUE_RECURSED_INTO (s1node->loc) = true;
3355 intersect_loc_chains (val, dest, dsm,
3356 svar->var_part[0].loc_chain,
3357 s2var);
3358 VALUE_RECURSED_INTO (s1node->loc) = false;
3363 /* ??? gotta look in cselib_val locations too. */
3365 /* ??? if the location is equivalent to any location in src,
3366 searched recursively
3368 add to dst the values needed to represent the equivalence
3370 telling whether locations S is equivalent to another dv's
3371 location list:
3373 for each location D in the list
3375 if S and D satisfy rtx_equal_p, then it is present
3377 else if D is a value, recurse without cycles
3379 else if S and D have the same CODE and MODE
3381 for each operand oS and the corresponding oD
3383 if oS and oD are not equivalent, then S an D are not equivalent
3385 else if they are RTX vectors
3387 if any vector oS element is not equivalent to its respective oD,
3388 then S and D are not equivalent
3396 /* Return -1 if X should be before Y in a location list for a 1-part
3397 variable, 1 if Y should be before X, and 0 if they're equivalent
3398 and should not appear in the list. */
3400 static int
3401 loc_cmp (rtx x, rtx y)
3403 int i, j, r;
3404 RTX_CODE code = GET_CODE (x);
3405 const char *fmt;
3407 if (x == y)
3408 return 0;
3410 if (REG_P (x))
3412 if (!REG_P (y))
3413 return -1;
3414 gcc_assert (GET_MODE (x) == GET_MODE (y));
3415 if (REGNO (x) == REGNO (y))
3416 return 0;
3417 else if (REGNO (x) < REGNO (y))
3418 return -1;
3419 else
3420 return 1;
3423 if (REG_P (y))
3424 return 1;
3426 if (MEM_P (x))
3428 if (!MEM_P (y))
3429 return -1;
3430 gcc_assert (GET_MODE (x) == GET_MODE (y));
3431 return loc_cmp (XEXP (x, 0), XEXP (y, 0));
3434 if (MEM_P (y))
3435 return 1;
3437 if (GET_CODE (x) == VALUE)
3439 if (GET_CODE (y) != VALUE)
3440 return -1;
3441 /* Don't assert the modes are the same, that is true only
3442 when not recursing. (subreg:QI (value:SI 1:1) 0)
3443 and (subreg:QI (value:DI 2:2) 0) can be compared,
3444 even when the modes are different. */
3445 if (canon_value_cmp (x, y))
3446 return -1;
3447 else
3448 return 1;
3451 if (GET_CODE (y) == VALUE)
3452 return 1;
3454 /* Entry value is the least preferable kind of expression. */
3455 if (GET_CODE (x) == ENTRY_VALUE)
3457 if (GET_CODE (y) != ENTRY_VALUE)
3458 return 1;
3459 gcc_assert (GET_MODE (x) == GET_MODE (y));
3460 return loc_cmp (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
3463 if (GET_CODE (y) == ENTRY_VALUE)
3464 return -1;
3466 if (GET_CODE (x) == GET_CODE (y))
3467 /* Compare operands below. */;
3468 else if (GET_CODE (x) < GET_CODE (y))
3469 return -1;
3470 else
3471 return 1;
3473 gcc_assert (GET_MODE (x) == GET_MODE (y));
3475 if (GET_CODE (x) == DEBUG_EXPR)
3477 if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3478 < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)))
3479 return -1;
3480 gcc_checking_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3481 > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)));
3482 return 1;
3485 fmt = GET_RTX_FORMAT (code);
3486 for (i = 0; i < GET_RTX_LENGTH (code); i++)
3487 switch (fmt[i])
3489 case 'w':
3490 if (XWINT (x, i) == XWINT (y, i))
3491 break;
3492 else if (XWINT (x, i) < XWINT (y, i))
3493 return -1;
3494 else
3495 return 1;
3497 case 'n':
3498 case 'i':
3499 if (XINT (x, i) == XINT (y, i))
3500 break;
3501 else if (XINT (x, i) < XINT (y, i))
3502 return -1;
3503 else
3504 return 1;
3506 case 'V':
3507 case 'E':
3508 /* Compare the vector length first. */
3509 if (XVECLEN (x, i) == XVECLEN (y, i))
3510 /* Compare the vectors elements. */;
3511 else if (XVECLEN (x, i) < XVECLEN (y, i))
3512 return -1;
3513 else
3514 return 1;
3516 for (j = 0; j < XVECLEN (x, i); j++)
3517 if ((r = loc_cmp (XVECEXP (x, i, j),
3518 XVECEXP (y, i, j))))
3519 return r;
3520 break;
3522 case 'e':
3523 if ((r = loc_cmp (XEXP (x, i), XEXP (y, i))))
3524 return r;
3525 break;
3527 case 'S':
3528 case 's':
3529 if (XSTR (x, i) == XSTR (y, i))
3530 break;
3531 if (!XSTR (x, i))
3532 return -1;
3533 if (!XSTR (y, i))
3534 return 1;
3535 if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0)
3536 break;
3537 else if (r < 0)
3538 return -1;
3539 else
3540 return 1;
3542 case 'u':
3543 /* These are just backpointers, so they don't matter. */
3544 break;
3546 case '0':
3547 case 't':
3548 break;
3550 /* It is believed that rtx's at this level will never
3551 contain anything but integers and other rtx's,
3552 except for within LABEL_REFs and SYMBOL_REFs. */
3553 default:
3554 gcc_unreachable ();
3556 if (CONST_WIDE_INT_P (x))
3558 /* Compare the vector length first. */
3559 if (CONST_WIDE_INT_NUNITS (x) >= CONST_WIDE_INT_NUNITS (y))
3560 return 1;
3561 else if (CONST_WIDE_INT_NUNITS (x) < CONST_WIDE_INT_NUNITS (y))
3562 return -1;
3564 /* Compare the vectors elements. */;
3565 for (j = CONST_WIDE_INT_NUNITS (x) - 1; j >= 0 ; j--)
3567 if (CONST_WIDE_INT_ELT (x, j) < CONST_WIDE_INT_ELT (y, j))
3568 return -1;
3569 if (CONST_WIDE_INT_ELT (x, j) > CONST_WIDE_INT_ELT (y, j))
3570 return 1;
3574 return 0;
3577 #if ENABLE_CHECKING
3578 /* Check the order of entries in one-part variables. */
3581 canonicalize_loc_order_check (variable_def **slot,
3582 dataflow_set *data ATTRIBUTE_UNUSED)
3584 variable var = *slot;
3585 location_chain node, next;
3587 #ifdef ENABLE_RTL_CHECKING
3588 int i;
3589 for (i = 0; i < var->n_var_parts; i++)
3590 gcc_assert (var->var_part[0].cur_loc == NULL);
3591 gcc_assert (!var->in_changed_variables);
3592 #endif
3594 if (!var->onepart)
3595 return 1;
3597 gcc_assert (var->n_var_parts == 1);
3598 node = var->var_part[0].loc_chain;
3599 gcc_assert (node);
3601 while ((next = node->next))
3603 gcc_assert (loc_cmp (node->loc, next->loc) < 0);
3604 node = next;
3607 return 1;
3609 #endif
3611 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
3612 more likely to be chosen as canonical for an equivalence set.
3613 Ensure less likely values can reach more likely neighbors, making
3614 the connections bidirectional. */
3617 canonicalize_values_mark (variable_def **slot, dataflow_set *set)
3619 variable var = *slot;
3620 decl_or_value dv = var->dv;
3621 rtx val;
3622 location_chain node;
3624 if (!dv_is_value_p (dv))
3625 return 1;
3627 gcc_checking_assert (var->n_var_parts == 1);
3629 val = dv_as_value (dv);
3631 for (node = var->var_part[0].loc_chain; node; node = node->next)
3632 if (GET_CODE (node->loc) == VALUE)
3634 if (canon_value_cmp (node->loc, val))
3635 VALUE_RECURSED_INTO (val) = true;
3636 else
3638 decl_or_value odv = dv_from_value (node->loc);
3639 variable_def **oslot;
3640 oslot = shared_hash_find_slot_noinsert (set->vars, odv);
3642 set_slot_part (set, val, oslot, odv, 0,
3643 node->init, NULL_RTX);
3645 VALUE_RECURSED_INTO (node->loc) = true;
3649 return 1;
3652 /* Remove redundant entries from equivalence lists in onepart
3653 variables, canonicalizing equivalence sets into star shapes. */
3656 canonicalize_values_star (variable_def **slot, dataflow_set *set)
3658 variable var = *slot;
3659 decl_or_value dv = var->dv;
3660 location_chain node;
3661 decl_or_value cdv;
3662 rtx val, cval;
3663 variable_def **cslot;
3664 bool has_value;
3665 bool has_marks;
3667 if (!var->onepart)
3668 return 1;
3670 gcc_checking_assert (var->n_var_parts == 1);
3672 if (dv_is_value_p (dv))
3674 cval = dv_as_value (dv);
3675 if (!VALUE_RECURSED_INTO (cval))
3676 return 1;
3677 VALUE_RECURSED_INTO (cval) = false;
3679 else
3680 cval = NULL_RTX;
3682 restart:
3683 val = cval;
3684 has_value = false;
3685 has_marks = false;
3687 gcc_assert (var->n_var_parts == 1);
3689 for (node = var->var_part[0].loc_chain; node; node = node->next)
3690 if (GET_CODE (node->loc) == VALUE)
3692 has_value = true;
3693 if (VALUE_RECURSED_INTO (node->loc))
3694 has_marks = true;
3695 if (canon_value_cmp (node->loc, cval))
3696 cval = node->loc;
3699 if (!has_value)
3700 return 1;
3702 if (cval == val)
3704 if (!has_marks || dv_is_decl_p (dv))
3705 return 1;
3707 /* Keep it marked so that we revisit it, either after visiting a
3708 child node, or after visiting a new parent that might be
3709 found out. */
3710 VALUE_RECURSED_INTO (val) = true;
3712 for (node = var->var_part[0].loc_chain; node; node = node->next)
3713 if (GET_CODE (node->loc) == VALUE
3714 && VALUE_RECURSED_INTO (node->loc))
3716 cval = node->loc;
3717 restart_with_cval:
3718 VALUE_RECURSED_INTO (cval) = false;
3719 dv = dv_from_value (cval);
3720 slot = shared_hash_find_slot_noinsert (set->vars, dv);
3721 if (!slot)
3723 gcc_assert (dv_is_decl_p (var->dv));
3724 /* The canonical value was reset and dropped.
3725 Remove it. */
3726 clobber_variable_part (set, NULL, var->dv, 0, NULL);
3727 return 1;
3729 var = *slot;
3730 gcc_assert (dv_is_value_p (var->dv));
3731 if (var->n_var_parts == 0)
3732 return 1;
3733 gcc_assert (var->n_var_parts == 1);
3734 goto restart;
3737 VALUE_RECURSED_INTO (val) = false;
3739 return 1;
3742 /* Push values to the canonical one. */
3743 cdv = dv_from_value (cval);
3744 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3746 for (node = var->var_part[0].loc_chain; node; node = node->next)
3747 if (node->loc != cval)
3749 cslot = set_slot_part (set, node->loc, cslot, cdv, 0,
3750 node->init, NULL_RTX);
3751 if (GET_CODE (node->loc) == VALUE)
3753 decl_or_value ndv = dv_from_value (node->loc);
3755 set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX,
3756 NO_INSERT);
3758 if (canon_value_cmp (node->loc, val))
3760 /* If it could have been a local minimum, it's not any more,
3761 since it's now neighbor to cval, so it may have to push
3762 to it. Conversely, if it wouldn't have prevailed over
3763 val, then whatever mark it has is fine: if it was to
3764 push, it will now push to a more canonical node, but if
3765 it wasn't, then it has already pushed any values it might
3766 have to. */
3767 VALUE_RECURSED_INTO (node->loc) = true;
3768 /* Make sure we visit node->loc by ensuring we cval is
3769 visited too. */
3770 VALUE_RECURSED_INTO (cval) = true;
3772 else if (!VALUE_RECURSED_INTO (node->loc))
3773 /* If we have no need to "recurse" into this node, it's
3774 already "canonicalized", so drop the link to the old
3775 parent. */
3776 clobber_variable_part (set, cval, ndv, 0, NULL);
3778 else if (GET_CODE (node->loc) == REG)
3780 attrs list = set->regs[REGNO (node->loc)], *listp;
3782 /* Change an existing attribute referring to dv so that it
3783 refers to cdv, removing any duplicate this might
3784 introduce, and checking that no previous duplicates
3785 existed, all in a single pass. */
3787 while (list)
3789 if (list->offset == 0
3790 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3791 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3792 break;
3794 list = list->next;
3797 gcc_assert (list);
3798 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3800 list->dv = cdv;
3801 for (listp = &list->next; (list = *listp); listp = &list->next)
3803 if (list->offset)
3804 continue;
3806 if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3808 *listp = list->next;
3809 pool_free (attrs_pool, list);
3810 list = *listp;
3811 break;
3814 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv));
3817 else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3819 for (listp = &list->next; (list = *listp); listp = &list->next)
3821 if (list->offset)
3822 continue;
3824 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3826 *listp = list->next;
3827 pool_free (attrs_pool, list);
3828 list = *listp;
3829 break;
3832 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv));
3835 else
3836 gcc_unreachable ();
3838 #if ENABLE_CHECKING
3839 while (list)
3841 if (list->offset == 0
3842 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3843 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3844 gcc_unreachable ();
3846 list = list->next;
3848 #endif
3852 if (val)
3853 set_slot_part (set, val, cslot, cdv, 0,
3854 VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
3856 slot = clobber_slot_part (set, cval, slot, 0, NULL);
3858 /* Variable may have been unshared. */
3859 var = *slot;
3860 gcc_checking_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval
3861 && var->var_part[0].loc_chain->next == NULL);
3863 if (VALUE_RECURSED_INTO (cval))
3864 goto restart_with_cval;
3866 return 1;
3869 /* Bind one-part variables to the canonical value in an equivalence
3870 set. Not doing this causes dataflow convergence failure in rare
3871 circumstances, see PR42873. Unfortunately we can't do this
3872 efficiently as part of canonicalize_values_star, since we may not
3873 have determined or even seen the canonical value of a set when we
3874 get to a variable that references another member of the set. */
3877 canonicalize_vars_star (variable_def **slot, dataflow_set *set)
3879 variable var = *slot;
3880 decl_or_value dv = var->dv;
3881 location_chain node;
3882 rtx cval;
3883 decl_or_value cdv;
3884 variable_def **cslot;
3885 variable cvar;
3886 location_chain cnode;
3888 if (!var->onepart || var->onepart == ONEPART_VALUE)
3889 return 1;
3891 gcc_assert (var->n_var_parts == 1);
3893 node = var->var_part[0].loc_chain;
3895 if (GET_CODE (node->loc) != VALUE)
3896 return 1;
3898 gcc_assert (!node->next);
3899 cval = node->loc;
3901 /* Push values to the canonical one. */
3902 cdv = dv_from_value (cval);
3903 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3904 if (!cslot)
3905 return 1;
3906 cvar = *cslot;
3907 gcc_assert (cvar->n_var_parts == 1);
3909 cnode = cvar->var_part[0].loc_chain;
3911 /* CVAL is canonical if its value list contains non-VALUEs or VALUEs
3912 that are not “more canonical” than it. */
3913 if (GET_CODE (cnode->loc) != VALUE
3914 || !canon_value_cmp (cnode->loc, cval))
3915 return 1;
3917 /* CVAL was found to be non-canonical. Change the variable to point
3918 to the canonical VALUE. */
3919 gcc_assert (!cnode->next);
3920 cval = cnode->loc;
3922 slot = set_slot_part (set, cval, slot, dv, 0,
3923 node->init, node->set_src);
3924 clobber_slot_part (set, cval, slot, 0, node->set_src);
3926 return 1;
3929 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
3930 corresponding entry in DSM->src. Multi-part variables are combined
3931 with variable_union, whereas onepart dvs are combined with
3932 intersection. */
3934 static int
3935 variable_merge_over_cur (variable s1var, struct dfset_merge *dsm)
3937 dataflow_set *dst = dsm->dst;
3938 variable_def **dstslot;
3939 variable s2var, dvar = NULL;
3940 decl_or_value dv = s1var->dv;
3941 onepart_enum_t onepart = s1var->onepart;
3942 rtx val;
3943 hashval_t dvhash;
3944 location_chain node, *nodep;
3946 /* If the incoming onepart variable has an empty location list, then
3947 the intersection will be just as empty. For other variables,
3948 it's always union. */
3949 gcc_checking_assert (s1var->n_var_parts
3950 && s1var->var_part[0].loc_chain);
3952 if (!onepart)
3953 return variable_union (s1var, dst);
3955 gcc_checking_assert (s1var->n_var_parts == 1);
3957 dvhash = dv_htab_hash (dv);
3958 if (dv_is_value_p (dv))
3959 val = dv_as_value (dv);
3960 else
3961 val = NULL;
3963 s2var = shared_hash_find_1 (dsm->src->vars, dv, dvhash);
3964 if (!s2var)
3966 dst_can_be_shared = false;
3967 return 1;
3970 dsm->src_onepart_cnt--;
3971 gcc_assert (s2var->var_part[0].loc_chain
3972 && s2var->onepart == onepart
3973 && s2var->n_var_parts == 1);
3975 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3976 if (dstslot)
3978 dvar = *dstslot;
3979 gcc_assert (dvar->refcount == 1
3980 && dvar->onepart == onepart
3981 && dvar->n_var_parts == 1);
3982 nodep = &dvar->var_part[0].loc_chain;
3984 else
3986 nodep = &node;
3987 node = NULL;
3990 if (!dstslot && !onepart_variable_different_p (s1var, s2var))
3992 dstslot = shared_hash_find_slot_unshare_1 (&dst->vars, dv,
3993 dvhash, INSERT);
3994 *dstslot = dvar = s2var;
3995 dvar->refcount++;
3997 else
3999 dst_can_be_shared = false;
4001 intersect_loc_chains (val, nodep, dsm,
4002 s1var->var_part[0].loc_chain, s2var);
4004 if (!dstslot)
4006 if (node)
4008 dvar = (variable) pool_alloc (onepart_pool (onepart));
4009 dvar->dv = dv;
4010 dvar->refcount = 1;
4011 dvar->n_var_parts = 1;
4012 dvar->onepart = onepart;
4013 dvar->in_changed_variables = false;
4014 dvar->var_part[0].loc_chain = node;
4015 dvar->var_part[0].cur_loc = NULL;
4016 if (onepart)
4017 VAR_LOC_1PAUX (dvar) = NULL;
4018 else
4019 VAR_PART_OFFSET (dvar, 0) = 0;
4021 dstslot
4022 = shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash,
4023 INSERT);
4024 gcc_assert (!*dstslot);
4025 *dstslot = dvar;
4027 else
4028 return 1;
4032 nodep = &dvar->var_part[0].loc_chain;
4033 while ((node = *nodep))
4035 location_chain *nextp = &node->next;
4037 if (GET_CODE (node->loc) == REG)
4039 attrs list;
4041 for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
4042 if (GET_MODE (node->loc) == GET_MODE (list->loc)
4043 && dv_is_value_p (list->dv))
4044 break;
4046 if (!list)
4047 attrs_list_insert (&dst->regs[REGNO (node->loc)],
4048 dv, 0, node->loc);
4049 /* If this value became canonical for another value that had
4050 this register, we want to leave it alone. */
4051 else if (dv_as_value (list->dv) != val)
4053 dstslot = set_slot_part (dst, dv_as_value (list->dv),
4054 dstslot, dv, 0,
4055 node->init, NULL_RTX);
4056 dstslot = delete_slot_part (dst, node->loc, dstslot, 0);
4058 /* Since nextp points into the removed node, we can't
4059 use it. The pointer to the next node moved to nodep.
4060 However, if the variable we're walking is unshared
4061 during our walk, we'll keep walking the location list
4062 of the previously-shared variable, in which case the
4063 node won't have been removed, and we'll want to skip
4064 it. That's why we test *nodep here. */
4065 if (*nodep != node)
4066 nextp = nodep;
4069 else
4070 /* Canonicalization puts registers first, so we don't have to
4071 walk it all. */
4072 break;
4073 nodep = nextp;
4076 if (dvar != *dstslot)
4077 dvar = *dstslot;
4078 nodep = &dvar->var_part[0].loc_chain;
4080 if (val)
4082 /* Mark all referenced nodes for canonicalization, and make sure
4083 we have mutual equivalence links. */
4084 VALUE_RECURSED_INTO (val) = true;
4085 for (node = *nodep; node; node = node->next)
4086 if (GET_CODE (node->loc) == VALUE)
4088 VALUE_RECURSED_INTO (node->loc) = true;
4089 set_variable_part (dst, val, dv_from_value (node->loc), 0,
4090 node->init, NULL, INSERT);
4093 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4094 gcc_assert (*dstslot == dvar);
4095 canonicalize_values_star (dstslot, dst);
4096 gcc_checking_assert (dstslot
4097 == shared_hash_find_slot_noinsert_1 (dst->vars,
4098 dv, dvhash));
4099 dvar = *dstslot;
4101 else
4103 bool has_value = false, has_other = false;
4105 /* If we have one value and anything else, we're going to
4106 canonicalize this, so make sure all values have an entry in
4107 the table and are marked for canonicalization. */
4108 for (node = *nodep; node; node = node->next)
4110 if (GET_CODE (node->loc) == VALUE)
4112 /* If this was marked during register canonicalization,
4113 we know we have to canonicalize values. */
4114 if (has_value)
4115 has_other = true;
4116 has_value = true;
4117 if (has_other)
4118 break;
4120 else
4122 has_other = true;
4123 if (has_value)
4124 break;
4128 if (has_value && has_other)
4130 for (node = *nodep; node; node = node->next)
4132 if (GET_CODE (node->loc) == VALUE)
4134 decl_or_value dv = dv_from_value (node->loc);
4135 variable_def **slot = NULL;
4137 if (shared_hash_shared (dst->vars))
4138 slot = shared_hash_find_slot_noinsert (dst->vars, dv);
4139 if (!slot)
4140 slot = shared_hash_find_slot_unshare (&dst->vars, dv,
4141 INSERT);
4142 if (!*slot)
4144 variable var = (variable) pool_alloc (onepart_pool
4145 (ONEPART_VALUE));
4146 var->dv = dv;
4147 var->refcount = 1;
4148 var->n_var_parts = 1;
4149 var->onepart = ONEPART_VALUE;
4150 var->in_changed_variables = false;
4151 var->var_part[0].loc_chain = NULL;
4152 var->var_part[0].cur_loc = NULL;
4153 VAR_LOC_1PAUX (var) = NULL;
4154 *slot = var;
4157 VALUE_RECURSED_INTO (node->loc) = true;
4161 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4162 gcc_assert (*dstslot == dvar);
4163 canonicalize_values_star (dstslot, dst);
4164 gcc_checking_assert (dstslot
4165 == shared_hash_find_slot_noinsert_1 (dst->vars,
4166 dv, dvhash));
4167 dvar = *dstslot;
4171 if (!onepart_variable_different_p (dvar, s2var))
4173 variable_htab_free (dvar);
4174 *dstslot = dvar = s2var;
4175 dvar->refcount++;
4177 else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var))
4179 variable_htab_free (dvar);
4180 *dstslot = dvar = s1var;
4181 dvar->refcount++;
4182 dst_can_be_shared = false;
4184 else
4185 dst_can_be_shared = false;
4187 return 1;
4190 /* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
4191 multi-part variable. Unions of multi-part variables and
4192 intersections of one-part ones will be handled in
4193 variable_merge_over_cur(). */
4195 static int
4196 variable_merge_over_src (variable s2var, struct dfset_merge *dsm)
4198 dataflow_set *dst = dsm->dst;
4199 decl_or_value dv = s2var->dv;
4201 if (!s2var->onepart)
4203 variable_def **dstp = shared_hash_find_slot (dst->vars, dv);
4204 *dstp = s2var;
4205 s2var->refcount++;
4206 return 1;
4209 dsm->src_onepart_cnt++;
4210 return 1;
4213 /* Combine dataflow set information from SRC2 into DST, using PDST
4214 to carry over information across passes. */
4216 static void
4217 dataflow_set_merge (dataflow_set *dst, dataflow_set *src2)
4219 dataflow_set cur = *dst;
4220 dataflow_set *src1 = &cur;
4221 struct dfset_merge dsm;
4222 int i;
4223 size_t src1_elems, src2_elems;
4224 variable_iterator_type hi;
4225 variable var;
4227 src1_elems = shared_hash_htab (src1->vars)->elements ();
4228 src2_elems = shared_hash_htab (src2->vars)->elements ();
4229 dataflow_set_init (dst);
4230 dst->stack_adjust = cur.stack_adjust;
4231 shared_hash_destroy (dst->vars);
4232 dst->vars = (shared_hash) pool_alloc (shared_hash_pool);
4233 dst->vars->refcount = 1;
4234 dst->vars->htab = new variable_table_type (MAX (src1_elems, src2_elems));
4236 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4237 attrs_list_mpdv_union (&dst->regs[i], src1->regs[i], src2->regs[i]);
4239 dsm.dst = dst;
4240 dsm.src = src2;
4241 dsm.cur = src1;
4242 dsm.src_onepart_cnt = 0;
4244 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm.src->vars),
4245 var, variable, hi)
4246 variable_merge_over_src (var, &dsm);
4247 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm.cur->vars),
4248 var, variable, hi)
4249 variable_merge_over_cur (var, &dsm);
4251 if (dsm.src_onepart_cnt)
4252 dst_can_be_shared = false;
4254 dataflow_set_destroy (src1);
4257 /* Mark register equivalences. */
4259 static void
4260 dataflow_set_equiv_regs (dataflow_set *set)
4262 int i;
4263 attrs list, *listp;
4265 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4267 rtx canon[NUM_MACHINE_MODES];
4269 /* If the list is empty or one entry, no need to canonicalize
4270 anything. */
4271 if (set->regs[i] == NULL || set->regs[i]->next == NULL)
4272 continue;
4274 memset (canon, 0, sizeof (canon));
4276 for (list = set->regs[i]; list; list = list->next)
4277 if (list->offset == 0 && dv_is_value_p (list->dv))
4279 rtx val = dv_as_value (list->dv);
4280 rtx *cvalp = &canon[(int)GET_MODE (val)];
4281 rtx cval = *cvalp;
4283 if (canon_value_cmp (val, cval))
4284 *cvalp = val;
4287 for (list = set->regs[i]; list; list = list->next)
4288 if (list->offset == 0 && dv_onepart_p (list->dv))
4290 rtx cval = canon[(int)GET_MODE (list->loc)];
4292 if (!cval)
4293 continue;
4295 if (dv_is_value_p (list->dv))
4297 rtx val = dv_as_value (list->dv);
4299 if (val == cval)
4300 continue;
4302 VALUE_RECURSED_INTO (val) = true;
4303 set_variable_part (set, val, dv_from_value (cval), 0,
4304 VAR_INIT_STATUS_INITIALIZED,
4305 NULL, NO_INSERT);
4308 VALUE_RECURSED_INTO (cval) = true;
4309 set_variable_part (set, cval, list->dv, 0,
4310 VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT);
4313 for (listp = &set->regs[i]; (list = *listp);
4314 listp = list ? &list->next : listp)
4315 if (list->offset == 0 && dv_onepart_p (list->dv))
4317 rtx cval = canon[(int)GET_MODE (list->loc)];
4318 variable_def **slot;
4320 if (!cval)
4321 continue;
4323 if (dv_is_value_p (list->dv))
4325 rtx val = dv_as_value (list->dv);
4326 if (!VALUE_RECURSED_INTO (val))
4327 continue;
4330 slot = shared_hash_find_slot_noinsert (set->vars, list->dv);
4331 canonicalize_values_star (slot, set);
4332 if (*listp != list)
4333 list = NULL;
4338 /* Remove any redundant values in the location list of VAR, which must
4339 be unshared and 1-part. */
4341 static void
4342 remove_duplicate_values (variable var)
4344 location_chain node, *nodep;
4346 gcc_assert (var->onepart);
4347 gcc_assert (var->n_var_parts == 1);
4348 gcc_assert (var->refcount == 1);
4350 for (nodep = &var->var_part[0].loc_chain; (node = *nodep); )
4352 if (GET_CODE (node->loc) == VALUE)
4354 if (VALUE_RECURSED_INTO (node->loc))
4356 /* Remove duplicate value node. */
4357 *nodep = node->next;
4358 pool_free (loc_chain_pool, node);
4359 continue;
4361 else
4362 VALUE_RECURSED_INTO (node->loc) = true;
4364 nodep = &node->next;
4367 for (node = var->var_part[0].loc_chain; node; node = node->next)
4368 if (GET_CODE (node->loc) == VALUE)
4370 gcc_assert (VALUE_RECURSED_INTO (node->loc));
4371 VALUE_RECURSED_INTO (node->loc) = false;
4376 /* Hash table iteration argument passed to variable_post_merge. */
4377 struct dfset_post_merge
4379 /* The new input set for the current block. */
4380 dataflow_set *set;
4381 /* Pointer to the permanent input set for the current block, or
4382 NULL. */
4383 dataflow_set **permp;
4386 /* Create values for incoming expressions associated with one-part
4387 variables that don't have value numbers for them. */
4390 variable_post_merge_new_vals (variable_def **slot, dfset_post_merge *dfpm)
4392 dataflow_set *set = dfpm->set;
4393 variable var = *slot;
4394 location_chain node;
4396 if (!var->onepart || !var->n_var_parts)
4397 return 1;
4399 gcc_assert (var->n_var_parts == 1);
4401 if (dv_is_decl_p (var->dv))
4403 bool check_dupes = false;
4405 restart:
4406 for (node = var->var_part[0].loc_chain; node; node = node->next)
4408 if (GET_CODE (node->loc) == VALUE)
4409 gcc_assert (!VALUE_RECURSED_INTO (node->loc));
4410 else if (GET_CODE (node->loc) == REG)
4412 attrs att, *attp, *curp = NULL;
4414 if (var->refcount != 1)
4416 slot = unshare_variable (set, slot, var,
4417 VAR_INIT_STATUS_INITIALIZED);
4418 var = *slot;
4419 goto restart;
4422 for (attp = &set->regs[REGNO (node->loc)]; (att = *attp);
4423 attp = &att->next)
4424 if (att->offset == 0
4425 && GET_MODE (att->loc) == GET_MODE (node->loc))
4427 if (dv_is_value_p (att->dv))
4429 rtx cval = dv_as_value (att->dv);
4430 node->loc = cval;
4431 check_dupes = true;
4432 break;
4434 else if (dv_as_opaque (att->dv) == dv_as_opaque (var->dv))
4435 curp = attp;
4438 if (!curp)
4440 curp = attp;
4441 while (*curp)
4442 if ((*curp)->offset == 0
4443 && GET_MODE ((*curp)->loc) == GET_MODE (node->loc)
4444 && dv_as_opaque ((*curp)->dv) == dv_as_opaque (var->dv))
4445 break;
4446 else
4447 curp = &(*curp)->next;
4448 gcc_assert (*curp);
4451 if (!att)
4453 decl_or_value cdv;
4454 rtx cval;
4456 if (!*dfpm->permp)
4458 *dfpm->permp = XNEW (dataflow_set);
4459 dataflow_set_init (*dfpm->permp);
4462 for (att = (*dfpm->permp)->regs[REGNO (node->loc)];
4463 att; att = att->next)
4464 if (GET_MODE (att->loc) == GET_MODE (node->loc))
4466 gcc_assert (att->offset == 0
4467 && dv_is_value_p (att->dv));
4468 val_reset (set, att->dv);
4469 break;
4472 if (att)
4474 cdv = att->dv;
4475 cval = dv_as_value (cdv);
4477 else
4479 /* Create a unique value to hold this register,
4480 that ought to be found and reused in
4481 subsequent rounds. */
4482 cselib_val *v;
4483 gcc_assert (!cselib_lookup (node->loc,
4484 GET_MODE (node->loc), 0,
4485 VOIDmode));
4486 v = cselib_lookup (node->loc, GET_MODE (node->loc), 1,
4487 VOIDmode);
4488 cselib_preserve_value (v);
4489 cselib_invalidate_rtx (node->loc);
4490 cval = v->val_rtx;
4491 cdv = dv_from_value (cval);
4492 if (dump_file)
4493 fprintf (dump_file,
4494 "Created new value %u:%u for reg %i\n",
4495 v->uid, v->hash, REGNO (node->loc));
4498 var_reg_decl_set (*dfpm->permp, node->loc,
4499 VAR_INIT_STATUS_INITIALIZED,
4500 cdv, 0, NULL, INSERT);
4502 node->loc = cval;
4503 check_dupes = true;
4506 /* Remove attribute referring to the decl, which now
4507 uses the value for the register, already existing or
4508 to be added when we bring perm in. */
4509 att = *curp;
4510 *curp = att->next;
4511 pool_free (attrs_pool, att);
4515 if (check_dupes)
4516 remove_duplicate_values (var);
4519 return 1;
4522 /* Reset values in the permanent set that are not associated with the
4523 chosen expression. */
4526 variable_post_merge_perm_vals (variable_def **pslot, dfset_post_merge *dfpm)
4528 dataflow_set *set = dfpm->set;
4529 variable pvar = *pslot, var;
4530 location_chain pnode;
4531 decl_or_value dv;
4532 attrs att;
4534 gcc_assert (dv_is_value_p (pvar->dv)
4535 && pvar->n_var_parts == 1);
4536 pnode = pvar->var_part[0].loc_chain;
4537 gcc_assert (pnode
4538 && !pnode->next
4539 && REG_P (pnode->loc));
4541 dv = pvar->dv;
4543 var = shared_hash_find (set->vars, dv);
4544 if (var)
4546 /* Although variable_post_merge_new_vals may have made decls
4547 non-star-canonical, values that pre-existed in canonical form
4548 remain canonical, and newly-created values reference a single
4549 REG, so they are canonical as well. Since VAR has the
4550 location list for a VALUE, using find_loc_in_1pdv for it is
4551 fine, since VALUEs don't map back to DECLs. */
4552 if (find_loc_in_1pdv (pnode->loc, var, shared_hash_htab (set->vars)))
4553 return 1;
4554 val_reset (set, dv);
4557 for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next)
4558 if (att->offset == 0
4559 && GET_MODE (att->loc) == GET_MODE (pnode->loc)
4560 && dv_is_value_p (att->dv))
4561 break;
4563 /* If there is a value associated with this register already, create
4564 an equivalence. */
4565 if (att && dv_as_value (att->dv) != dv_as_value (dv))
4567 rtx cval = dv_as_value (att->dv);
4568 set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT);
4569 set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init,
4570 NULL, INSERT);
4572 else if (!att)
4574 attrs_list_insert (&set->regs[REGNO (pnode->loc)],
4575 dv, 0, pnode->loc);
4576 variable_union (pvar, set);
4579 return 1;
4582 /* Just checking stuff and registering register attributes for
4583 now. */
4585 static void
4586 dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp)
4588 struct dfset_post_merge dfpm;
4590 dfpm.set = set;
4591 dfpm.permp = permp;
4593 shared_hash_htab (set->vars)
4594 ->traverse <dfset_post_merge*, variable_post_merge_new_vals> (&dfpm);
4595 if (*permp)
4596 shared_hash_htab ((*permp)->vars)
4597 ->traverse <dfset_post_merge*, variable_post_merge_perm_vals> (&dfpm);
4598 shared_hash_htab (set->vars)
4599 ->traverse <dataflow_set *, canonicalize_values_star> (set);
4600 shared_hash_htab (set->vars)
4601 ->traverse <dataflow_set *, canonicalize_vars_star> (set);
4604 /* Return a node whose loc is a MEM that refers to EXPR in the
4605 location list of a one-part variable or value VAR, or in that of
4606 any values recursively mentioned in the location lists. */
4608 static location_chain
4609 find_mem_expr_in_1pdv (tree expr, rtx val, variable_table_type *vars)
4611 location_chain node;
4612 decl_or_value dv;
4613 variable var;
4614 location_chain where = NULL;
4616 if (!val)
4617 return NULL;
4619 gcc_assert (GET_CODE (val) == VALUE
4620 && !VALUE_RECURSED_INTO (val));
4622 dv = dv_from_value (val);
4623 var = vars->find_with_hash (dv, dv_htab_hash (dv));
4625 if (!var)
4626 return NULL;
4628 gcc_assert (var->onepart);
4630 if (!var->n_var_parts)
4631 return NULL;
4633 VALUE_RECURSED_INTO (val) = true;
4635 for (node = var->var_part[0].loc_chain; node; node = node->next)
4636 if (MEM_P (node->loc)
4637 && MEM_EXPR (node->loc) == expr
4638 && INT_MEM_OFFSET (node->loc) == 0)
4640 where = node;
4641 break;
4643 else if (GET_CODE (node->loc) == VALUE
4644 && !VALUE_RECURSED_INTO (node->loc)
4645 && (where = find_mem_expr_in_1pdv (expr, node->loc, vars)))
4646 break;
4648 VALUE_RECURSED_INTO (val) = false;
4650 return where;
4653 /* Return TRUE if the value of MEM may vary across a call. */
4655 static bool
4656 mem_dies_at_call (rtx mem)
4658 tree expr = MEM_EXPR (mem);
4659 tree decl;
4661 if (!expr)
4662 return true;
4664 decl = get_base_address (expr);
4666 if (!decl)
4667 return true;
4669 if (!DECL_P (decl))
4670 return true;
4672 return (may_be_aliased (decl)
4673 || (!TREE_READONLY (decl) && is_global_var (decl)));
4676 /* Remove all MEMs from the location list of a hash table entry for a
4677 one-part variable, except those whose MEM attributes map back to
4678 the variable itself, directly or within a VALUE. */
4681 dataflow_set_preserve_mem_locs (variable_def **slot, dataflow_set *set)
4683 variable var = *slot;
4685 if (var->onepart == ONEPART_VDECL || var->onepart == ONEPART_DEXPR)
4687 tree decl = dv_as_decl (var->dv);
4688 location_chain loc, *locp;
4689 bool changed = false;
4691 if (!var->n_var_parts)
4692 return 1;
4694 gcc_assert (var->n_var_parts == 1);
4696 if (shared_var_p (var, set->vars))
4698 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4700 /* We want to remove dying MEMs that doesn't refer to DECL. */
4701 if (GET_CODE (loc->loc) == MEM
4702 && (MEM_EXPR (loc->loc) != decl
4703 || INT_MEM_OFFSET (loc->loc) != 0)
4704 && !mem_dies_at_call (loc->loc))
4705 break;
4706 /* We want to move here MEMs that do refer to DECL. */
4707 else if (GET_CODE (loc->loc) == VALUE
4708 && find_mem_expr_in_1pdv (decl, loc->loc,
4709 shared_hash_htab (set->vars)))
4710 break;
4713 if (!loc)
4714 return 1;
4716 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4717 var = *slot;
4718 gcc_assert (var->n_var_parts == 1);
4721 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4722 loc; loc = *locp)
4724 rtx old_loc = loc->loc;
4725 if (GET_CODE (old_loc) == VALUE)
4727 location_chain mem_node
4728 = find_mem_expr_in_1pdv (decl, loc->loc,
4729 shared_hash_htab (set->vars));
4731 /* ??? This picks up only one out of multiple MEMs that
4732 refer to the same variable. Do we ever need to be
4733 concerned about dealing with more than one, or, given
4734 that they should all map to the same variable
4735 location, their addresses will have been merged and
4736 they will be regarded as equivalent? */
4737 if (mem_node)
4739 loc->loc = mem_node->loc;
4740 loc->set_src = mem_node->set_src;
4741 loc->init = MIN (loc->init, mem_node->init);
4745 if (GET_CODE (loc->loc) != MEM
4746 || (MEM_EXPR (loc->loc) == decl
4747 && INT_MEM_OFFSET (loc->loc) == 0)
4748 || !mem_dies_at_call (loc->loc))
4750 if (old_loc != loc->loc && emit_notes)
4752 if (old_loc == var->var_part[0].cur_loc)
4754 changed = true;
4755 var->var_part[0].cur_loc = NULL;
4758 locp = &loc->next;
4759 continue;
4762 if (emit_notes)
4764 if (old_loc == var->var_part[0].cur_loc)
4766 changed = true;
4767 var->var_part[0].cur_loc = NULL;
4770 *locp = loc->next;
4771 pool_free (loc_chain_pool, loc);
4774 if (!var->var_part[0].loc_chain)
4776 var->n_var_parts--;
4777 changed = true;
4779 if (changed)
4780 variable_was_changed (var, set);
4783 return 1;
4786 /* Remove all MEMs from the location list of a hash table entry for a
4787 value. */
4790 dataflow_set_remove_mem_locs (variable_def **slot, dataflow_set *set)
4792 variable var = *slot;
4794 if (var->onepart == ONEPART_VALUE)
4796 location_chain loc, *locp;
4797 bool changed = false;
4798 rtx cur_loc;
4800 gcc_assert (var->n_var_parts == 1);
4802 if (shared_var_p (var, set->vars))
4804 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4805 if (GET_CODE (loc->loc) == MEM
4806 && mem_dies_at_call (loc->loc))
4807 break;
4809 if (!loc)
4810 return 1;
4812 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4813 var = *slot;
4814 gcc_assert (var->n_var_parts == 1);
4817 if (VAR_LOC_1PAUX (var))
4818 cur_loc = VAR_LOC_FROM (var);
4819 else
4820 cur_loc = var->var_part[0].cur_loc;
4822 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4823 loc; loc = *locp)
4825 if (GET_CODE (loc->loc) != MEM
4826 || !mem_dies_at_call (loc->loc))
4828 locp = &loc->next;
4829 continue;
4832 *locp = loc->next;
4833 /* If we have deleted the location which was last emitted
4834 we have to emit new location so add the variable to set
4835 of changed variables. */
4836 if (cur_loc == loc->loc)
4838 changed = true;
4839 var->var_part[0].cur_loc = NULL;
4840 if (VAR_LOC_1PAUX (var))
4841 VAR_LOC_FROM (var) = NULL;
4843 pool_free (loc_chain_pool, loc);
4846 if (!var->var_part[0].loc_chain)
4848 var->n_var_parts--;
4849 changed = true;
4851 if (changed)
4852 variable_was_changed (var, set);
4855 return 1;
4858 /* Remove all variable-location information about call-clobbered
4859 registers, as well as associations between MEMs and VALUEs. */
4861 static void
4862 dataflow_set_clear_at_call (dataflow_set *set)
4864 unsigned int r;
4865 hard_reg_set_iterator hrsi;
4867 EXECUTE_IF_SET_IN_HARD_REG_SET (regs_invalidated_by_call, 0, r, hrsi)
4868 var_regno_delete (set, r);
4870 if (MAY_HAVE_DEBUG_INSNS)
4872 set->traversed_vars = set->vars;
4873 shared_hash_htab (set->vars)
4874 ->traverse <dataflow_set *, dataflow_set_preserve_mem_locs> (set);
4875 set->traversed_vars = set->vars;
4876 shared_hash_htab (set->vars)
4877 ->traverse <dataflow_set *, dataflow_set_remove_mem_locs> (set);
4878 set->traversed_vars = NULL;
4882 static bool
4883 variable_part_different_p (variable_part *vp1, variable_part *vp2)
4885 location_chain lc1, lc2;
4887 for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next)
4889 for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next)
4891 if (REG_P (lc1->loc) && REG_P (lc2->loc))
4893 if (REGNO (lc1->loc) == REGNO (lc2->loc))
4894 break;
4896 if (rtx_equal_p (lc1->loc, lc2->loc))
4897 break;
4899 if (!lc2)
4900 return true;
4902 return false;
4905 /* Return true if one-part variables VAR1 and VAR2 are different.
4906 They must be in canonical order. */
4908 static bool
4909 onepart_variable_different_p (variable var1, variable var2)
4911 location_chain lc1, lc2;
4913 if (var1 == var2)
4914 return false;
4916 gcc_assert (var1->n_var_parts == 1
4917 && var2->n_var_parts == 1);
4919 lc1 = var1->var_part[0].loc_chain;
4920 lc2 = var2->var_part[0].loc_chain;
4922 gcc_assert (lc1 && lc2);
4924 while (lc1 && lc2)
4926 if (loc_cmp (lc1->loc, lc2->loc))
4927 return true;
4928 lc1 = lc1->next;
4929 lc2 = lc2->next;
4932 return lc1 != lc2;
4935 /* Return true if variables VAR1 and VAR2 are different. */
4937 static bool
4938 variable_different_p (variable var1, variable var2)
4940 int i;
4942 if (var1 == var2)
4943 return false;
4945 if (var1->onepart != var2->onepart)
4946 return true;
4948 if (var1->n_var_parts != var2->n_var_parts)
4949 return true;
4951 if (var1->onepart && var1->n_var_parts)
4953 gcc_checking_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv)
4954 && var1->n_var_parts == 1);
4955 /* One-part values have locations in a canonical order. */
4956 return onepart_variable_different_p (var1, var2);
4959 for (i = 0; i < var1->n_var_parts; i++)
4961 if (VAR_PART_OFFSET (var1, i) != VAR_PART_OFFSET (var2, i))
4962 return true;
4963 if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i]))
4964 return true;
4965 if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i]))
4966 return true;
4968 return false;
4971 /* Return true if dataflow sets OLD_SET and NEW_SET differ. */
4973 static bool
4974 dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
4976 variable_iterator_type hi;
4977 variable var1;
4979 if (old_set->vars == new_set->vars)
4980 return false;
4982 if (shared_hash_htab (old_set->vars)->elements ()
4983 != shared_hash_htab (new_set->vars)->elements ())
4984 return true;
4986 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (old_set->vars),
4987 var1, variable, hi)
4989 variable_table_type *htab = shared_hash_htab (new_set->vars);
4990 variable var2 = htab->find_with_hash (var1->dv, dv_htab_hash (var1->dv));
4991 if (!var2)
4993 if (dump_file && (dump_flags & TDF_DETAILS))
4995 fprintf (dump_file, "dataflow difference found: removal of:\n");
4996 dump_var (var1);
4998 return true;
5001 if (variable_different_p (var1, var2))
5003 if (dump_file && (dump_flags & TDF_DETAILS))
5005 fprintf (dump_file, "dataflow difference found: "
5006 "old and new follow:\n");
5007 dump_var (var1);
5008 dump_var (var2);
5010 return true;
5014 /* No need to traverse the second hashtab, if both have the same number
5015 of elements and the second one had all entries found in the first one,
5016 then it can't have any extra entries. */
5017 return false;
5020 /* Free the contents of dataflow set SET. */
5022 static void
5023 dataflow_set_destroy (dataflow_set *set)
5025 int i;
5027 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5028 attrs_list_clear (&set->regs[i]);
5030 shared_hash_destroy (set->vars);
5031 set->vars = NULL;
5034 /* Return true if RTL X contains a SYMBOL_REF. */
5036 static bool
5037 contains_symbol_ref (rtx x)
5039 const char *fmt;
5040 RTX_CODE code;
5041 int i;
5043 if (!x)
5044 return false;
5046 code = GET_CODE (x);
5047 if (code == SYMBOL_REF)
5048 return true;
5050 fmt = GET_RTX_FORMAT (code);
5051 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5053 if (fmt[i] == 'e')
5055 if (contains_symbol_ref (XEXP (x, i)))
5056 return true;
5058 else if (fmt[i] == 'E')
5060 int j;
5061 for (j = 0; j < XVECLEN (x, i); j++)
5062 if (contains_symbol_ref (XVECEXP (x, i, j)))
5063 return true;
5067 return false;
5070 /* Shall EXPR be tracked? */
5072 static bool
5073 track_expr_p (tree expr, bool need_rtl)
5075 rtx decl_rtl;
5076 tree realdecl;
5078 if (TREE_CODE (expr) == DEBUG_EXPR_DECL)
5079 return DECL_RTL_SET_P (expr);
5081 /* If EXPR is not a parameter or a variable do not track it. */
5082 if (TREE_CODE (expr) != VAR_DECL && TREE_CODE (expr) != PARM_DECL)
5083 return 0;
5085 /* It also must have a name... */
5086 if (!DECL_NAME (expr) && need_rtl)
5087 return 0;
5089 /* ... and a RTL assigned to it. */
5090 decl_rtl = DECL_RTL_IF_SET (expr);
5091 if (!decl_rtl && need_rtl)
5092 return 0;
5094 /* If this expression is really a debug alias of some other declaration, we
5095 don't need to track this expression if the ultimate declaration is
5096 ignored. */
5097 realdecl = expr;
5098 if (TREE_CODE (realdecl) == VAR_DECL && DECL_HAS_DEBUG_EXPR_P (realdecl))
5100 realdecl = DECL_DEBUG_EXPR (realdecl);
5101 if (!DECL_P (realdecl))
5103 if (handled_component_p (realdecl)
5104 || (TREE_CODE (realdecl) == MEM_REF
5105 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
5107 HOST_WIDE_INT bitsize, bitpos, maxsize;
5108 tree innerdecl
5109 = get_ref_base_and_extent (realdecl, &bitpos, &bitsize,
5110 &maxsize);
5111 if (!DECL_P (innerdecl)
5112 || DECL_IGNORED_P (innerdecl)
5113 /* Do not track declarations for parts of tracked parameters
5114 since we want to track them as a whole instead. */
5115 || (TREE_CODE (innerdecl) == PARM_DECL
5116 && DECL_MODE (innerdecl) != BLKmode
5117 && TREE_CODE (TREE_TYPE (innerdecl)) != UNION_TYPE)
5118 || TREE_STATIC (innerdecl)
5119 || bitsize <= 0
5120 || bitpos + bitsize > 256
5121 || bitsize != maxsize)
5122 return 0;
5123 else
5124 realdecl = expr;
5126 else
5127 return 0;
5131 /* Do not track EXPR if REALDECL it should be ignored for debugging
5132 purposes. */
5133 if (DECL_IGNORED_P (realdecl))
5134 return 0;
5136 /* Do not track global variables until we are able to emit correct location
5137 list for them. */
5138 if (TREE_STATIC (realdecl))
5139 return 0;
5141 /* When the EXPR is a DECL for alias of some variable (see example)
5142 the TREE_STATIC flag is not used. Disable tracking all DECLs whose
5143 DECL_RTL contains SYMBOL_REF.
5145 Example:
5146 extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
5147 char **_dl_argv;
5149 if (decl_rtl && MEM_P (decl_rtl)
5150 && contains_symbol_ref (XEXP (decl_rtl, 0)))
5151 return 0;
5153 /* If RTX is a memory it should not be very large (because it would be
5154 an array or struct). */
5155 if (decl_rtl && MEM_P (decl_rtl))
5157 /* Do not track structures and arrays. */
5158 if (GET_MODE (decl_rtl) == BLKmode
5159 || AGGREGATE_TYPE_P (TREE_TYPE (realdecl)))
5160 return 0;
5161 if (MEM_SIZE_KNOWN_P (decl_rtl)
5162 && MEM_SIZE (decl_rtl) > MAX_VAR_PARTS)
5163 return 0;
5166 DECL_CHANGED (expr) = 0;
5167 DECL_CHANGED (realdecl) = 0;
5168 return 1;
5171 /* Determine whether a given LOC refers to the same variable part as
5172 EXPR+OFFSET. */
5174 static bool
5175 same_variable_part_p (rtx loc, tree expr, HOST_WIDE_INT offset)
5177 tree expr2;
5178 HOST_WIDE_INT offset2;
5180 if (! DECL_P (expr))
5181 return false;
5183 if (REG_P (loc))
5185 expr2 = REG_EXPR (loc);
5186 offset2 = REG_OFFSET (loc);
5188 else if (MEM_P (loc))
5190 expr2 = MEM_EXPR (loc);
5191 offset2 = INT_MEM_OFFSET (loc);
5193 else
5194 return false;
5196 if (! expr2 || ! DECL_P (expr2))
5197 return false;
5199 expr = var_debug_decl (expr);
5200 expr2 = var_debug_decl (expr2);
5202 return (expr == expr2 && offset == offset2);
5205 /* LOC is a REG or MEM that we would like to track if possible.
5206 If EXPR is null, we don't know what expression LOC refers to,
5207 otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
5208 LOC is an lvalue register.
5210 Return true if EXPR is nonnull and if LOC, or some lowpart of it,
5211 is something we can track. When returning true, store the mode of
5212 the lowpart we can track in *MODE_OUT (if nonnull) and its offset
5213 from EXPR in *OFFSET_OUT (if nonnull). */
5215 static bool
5216 track_loc_p (rtx loc, tree expr, HOST_WIDE_INT offset, bool store_reg_p,
5217 machine_mode *mode_out, HOST_WIDE_INT *offset_out)
5219 machine_mode mode;
5221 if (expr == NULL || !track_expr_p (expr, true))
5222 return false;
5224 /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
5225 whole subreg, but only the old inner part is really relevant. */
5226 mode = GET_MODE (loc);
5227 if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc)))
5229 machine_mode pseudo_mode;
5231 pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc));
5232 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (pseudo_mode))
5234 offset += byte_lowpart_offset (pseudo_mode, mode);
5235 mode = pseudo_mode;
5239 /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
5240 Do the same if we are storing to a register and EXPR occupies
5241 the whole of register LOC; in that case, the whole of EXPR is
5242 being changed. We exclude complex modes from the second case
5243 because the real and imaginary parts are represented as separate
5244 pseudo registers, even if the whole complex value fits into one
5245 hard register. */
5246 if ((GET_MODE_SIZE (mode) > GET_MODE_SIZE (DECL_MODE (expr))
5247 || (store_reg_p
5248 && !COMPLEX_MODE_P (DECL_MODE (expr))
5249 && hard_regno_nregs[REGNO (loc)][DECL_MODE (expr)] == 1))
5250 && offset + byte_lowpart_offset (DECL_MODE (expr), mode) == 0)
5252 mode = DECL_MODE (expr);
5253 offset = 0;
5256 if (offset < 0 || offset >= MAX_VAR_PARTS)
5257 return false;
5259 if (mode_out)
5260 *mode_out = mode;
5261 if (offset_out)
5262 *offset_out = offset;
5263 return true;
5266 /* Return the MODE lowpart of LOC, or null if LOC is not something we
5267 want to track. When returning nonnull, make sure that the attributes
5268 on the returned value are updated. */
5270 static rtx
5271 var_lowpart (machine_mode mode, rtx loc)
5273 unsigned int offset, reg_offset, regno;
5275 if (GET_MODE (loc) == mode)
5276 return loc;
5278 if (!REG_P (loc) && !MEM_P (loc))
5279 return NULL;
5281 offset = byte_lowpart_offset (mode, GET_MODE (loc));
5283 if (MEM_P (loc))
5284 return adjust_address_nv (loc, mode, offset);
5286 reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc));
5287 regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc),
5288 reg_offset, mode);
5289 return gen_rtx_REG_offset (loc, mode, regno, offset);
5292 /* Carry information about uses and stores while walking rtx. */
5294 struct count_use_info
5296 /* The insn where the RTX is. */
5297 rtx_insn *insn;
5299 /* The basic block where insn is. */
5300 basic_block bb;
5302 /* The array of n_sets sets in the insn, as determined by cselib. */
5303 struct cselib_set *sets;
5304 int n_sets;
5306 /* True if we're counting stores, false otherwise. */
5307 bool store_p;
5310 /* Find a VALUE corresponding to X. */
5312 static inline cselib_val *
5313 find_use_val (rtx x, machine_mode mode, struct count_use_info *cui)
5315 int i;
5317 if (cui->sets)
5319 /* This is called after uses are set up and before stores are
5320 processed by cselib, so it's safe to look up srcs, but not
5321 dsts. So we look up expressions that appear in srcs or in
5322 dest expressions, but we search the sets array for dests of
5323 stores. */
5324 if (cui->store_p)
5326 /* Some targets represent memset and memcpy patterns
5327 by (set (mem:BLK ...) (reg:[QHSD]I ...)) or
5328 (set (mem:BLK ...) (const_int ...)) or
5329 (set (mem:BLK ...) (mem:BLK ...)). Don't return anything
5330 in that case, otherwise we end up with mode mismatches. */
5331 if (mode == BLKmode && MEM_P (x))
5332 return NULL;
5333 for (i = 0; i < cui->n_sets; i++)
5334 if (cui->sets[i].dest == x)
5335 return cui->sets[i].src_elt;
5337 else
5338 return cselib_lookup (x, mode, 0, VOIDmode);
5341 return NULL;
5344 /* Replace all registers and addresses in an expression with VALUE
5345 expressions that map back to them, unless the expression is a
5346 register. If no mapping is or can be performed, returns NULL. */
5348 static rtx
5349 replace_expr_with_values (rtx loc)
5351 if (REG_P (loc) || GET_CODE (loc) == ENTRY_VALUE)
5352 return NULL;
5353 else if (MEM_P (loc))
5355 cselib_val *addr = cselib_lookup (XEXP (loc, 0),
5356 get_address_mode (loc), 0,
5357 GET_MODE (loc));
5358 if (addr)
5359 return replace_equiv_address_nv (loc, addr->val_rtx);
5360 else
5361 return NULL;
5363 else
5364 return cselib_subst_to_values (loc, VOIDmode);
5367 /* Return true if X contains a DEBUG_EXPR. */
5369 static bool
5370 rtx_debug_expr_p (const_rtx x)
5372 subrtx_iterator::array_type array;
5373 FOR_EACH_SUBRTX (iter, array, x, ALL)
5374 if (GET_CODE (*iter) == DEBUG_EXPR)
5375 return true;
5376 return false;
5379 /* Determine what kind of micro operation to choose for a USE. Return
5380 MO_CLOBBER if no micro operation is to be generated. */
5382 static enum micro_operation_type
5383 use_type (rtx loc, struct count_use_info *cui, machine_mode *modep)
5385 tree expr;
5387 if (cui && cui->sets)
5389 if (GET_CODE (loc) == VAR_LOCATION)
5391 if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false))
5393 rtx ploc = PAT_VAR_LOCATION_LOC (loc);
5394 if (! VAR_LOC_UNKNOWN_P (ploc))
5396 cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1,
5397 VOIDmode);
5399 /* ??? flag_float_store and volatile mems are never
5400 given values, but we could in theory use them for
5401 locations. */
5402 gcc_assert (val || 1);
5404 return MO_VAL_LOC;
5406 else
5407 return MO_CLOBBER;
5410 if (REG_P (loc) || MEM_P (loc))
5412 if (modep)
5413 *modep = GET_MODE (loc);
5414 if (cui->store_p)
5416 if (REG_P (loc)
5417 || (find_use_val (loc, GET_MODE (loc), cui)
5418 && cselib_lookup (XEXP (loc, 0),
5419 get_address_mode (loc), 0,
5420 GET_MODE (loc))))
5421 return MO_VAL_SET;
5423 else
5425 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5427 if (val && !cselib_preserved_value_p (val))
5428 return MO_VAL_USE;
5433 if (REG_P (loc))
5435 gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER);
5437 if (loc == cfa_base_rtx)
5438 return MO_CLOBBER;
5439 expr = REG_EXPR (loc);
5441 if (!expr)
5442 return MO_USE_NO_VAR;
5443 else if (target_for_debug_bind (var_debug_decl (expr)))
5444 return MO_CLOBBER;
5445 else if (track_loc_p (loc, expr, REG_OFFSET (loc),
5446 false, modep, NULL))
5447 return MO_USE;
5448 else
5449 return MO_USE_NO_VAR;
5451 else if (MEM_P (loc))
5453 expr = MEM_EXPR (loc);
5455 if (!expr)
5456 return MO_CLOBBER;
5457 else if (target_for_debug_bind (var_debug_decl (expr)))
5458 return MO_CLOBBER;
5459 else if (track_loc_p (loc, expr, INT_MEM_OFFSET (loc),
5460 false, modep, NULL)
5461 /* Multi-part variables shouldn't refer to one-part
5462 variable names such as VALUEs (never happens) or
5463 DEBUG_EXPRs (only happens in the presence of debug
5464 insns). */
5465 && (!MAY_HAVE_DEBUG_INSNS
5466 || !rtx_debug_expr_p (XEXP (loc, 0))))
5467 return MO_USE;
5468 else
5469 return MO_CLOBBER;
5472 return MO_CLOBBER;
5475 /* Log to OUT information about micro-operation MOPT involving X in
5476 INSN of BB. */
5478 static inline void
5479 log_op_type (rtx x, basic_block bb, rtx_insn *insn,
5480 enum micro_operation_type mopt, FILE *out)
5482 fprintf (out, "bb %i op %i insn %i %s ",
5483 bb->index, VTI (bb)->mos.length (),
5484 INSN_UID (insn), micro_operation_type_name[mopt]);
5485 print_inline_rtx (out, x, 2);
5486 fputc ('\n', out);
5489 /* Tell whether the CONCAT used to holds a VALUE and its location
5490 needs value resolution, i.e., an attempt of mapping the location
5491 back to other incoming values. */
5492 #define VAL_NEEDS_RESOLUTION(x) \
5493 (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
5494 /* Whether the location in the CONCAT is a tracked expression, that
5495 should also be handled like a MO_USE. */
5496 #define VAL_HOLDS_TRACK_EXPR(x) \
5497 (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
5498 /* Whether the location in the CONCAT should be handled like a MO_COPY
5499 as well. */
5500 #define VAL_EXPR_IS_COPIED(x) \
5501 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
5502 /* Whether the location in the CONCAT should be handled like a
5503 MO_CLOBBER as well. */
5504 #define VAL_EXPR_IS_CLOBBERED(x) \
5505 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
5507 /* All preserved VALUEs. */
5508 static vec<rtx> preserved_values;
5510 /* Ensure VAL is preserved and remember it in a vector for vt_emit_notes. */
5512 static void
5513 preserve_value (cselib_val *val)
5515 cselib_preserve_value (val);
5516 preserved_values.safe_push (val->val_rtx);
5519 /* Helper function for MO_VAL_LOC handling. Return non-zero if
5520 any rtxes not suitable for CONST use not replaced by VALUEs
5521 are discovered. */
5523 static bool
5524 non_suitable_const (const_rtx x)
5526 subrtx_iterator::array_type array;
5527 FOR_EACH_SUBRTX (iter, array, x, ALL)
5529 const_rtx x = *iter;
5530 switch (GET_CODE (x))
5532 case REG:
5533 case DEBUG_EXPR:
5534 case PC:
5535 case SCRATCH:
5536 case CC0:
5537 case ASM_INPUT:
5538 case ASM_OPERANDS:
5539 return true;
5540 case MEM:
5541 if (!MEM_READONLY_P (x))
5542 return true;
5543 break;
5544 default:
5545 break;
5548 return false;
5551 /* Add uses (register and memory references) LOC which will be tracked
5552 to VTI (bb)->mos. */
5554 static void
5555 add_uses (rtx loc, struct count_use_info *cui)
5557 machine_mode mode = VOIDmode;
5558 enum micro_operation_type type = use_type (loc, cui, &mode);
5560 if (type != MO_CLOBBER)
5562 basic_block bb = cui->bb;
5563 micro_operation mo;
5565 mo.type = type;
5566 mo.u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc;
5567 mo.insn = cui->insn;
5569 if (type == MO_VAL_LOC)
5571 rtx oloc = loc;
5572 rtx vloc = PAT_VAR_LOCATION_LOC (oloc);
5573 cselib_val *val;
5575 gcc_assert (cui->sets);
5577 if (MEM_P (vloc)
5578 && !REG_P (XEXP (vloc, 0))
5579 && !MEM_P (XEXP (vloc, 0)))
5581 rtx mloc = vloc;
5582 machine_mode address_mode = get_address_mode (mloc);
5583 cselib_val *val
5584 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5585 GET_MODE (mloc));
5587 if (val && !cselib_preserved_value_p (val))
5588 preserve_value (val);
5591 if (CONSTANT_P (vloc)
5592 && (GET_CODE (vloc) != CONST || non_suitable_const (vloc)))
5593 /* For constants don't look up any value. */;
5594 else if (!VAR_LOC_UNKNOWN_P (vloc) && !unsuitable_loc (vloc)
5595 && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
5597 machine_mode mode2;
5598 enum micro_operation_type type2;
5599 rtx nloc = NULL;
5600 bool resolvable = REG_P (vloc) || MEM_P (vloc);
5602 if (resolvable)
5603 nloc = replace_expr_with_values (vloc);
5605 if (nloc)
5607 oloc = shallow_copy_rtx (oloc);
5608 PAT_VAR_LOCATION_LOC (oloc) = nloc;
5611 oloc = gen_rtx_CONCAT (mode, val->val_rtx, oloc);
5613 type2 = use_type (vloc, 0, &mode2);
5615 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5616 || type2 == MO_CLOBBER);
5618 if (type2 == MO_CLOBBER
5619 && !cselib_preserved_value_p (val))
5621 VAL_NEEDS_RESOLUTION (oloc) = resolvable;
5622 preserve_value (val);
5625 else if (!VAR_LOC_UNKNOWN_P (vloc))
5627 oloc = shallow_copy_rtx (oloc);
5628 PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC ();
5631 mo.u.loc = oloc;
5633 else if (type == MO_VAL_USE)
5635 machine_mode mode2 = VOIDmode;
5636 enum micro_operation_type type2;
5637 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5638 rtx vloc, oloc = loc, nloc;
5640 gcc_assert (cui->sets);
5642 if (MEM_P (oloc)
5643 && !REG_P (XEXP (oloc, 0))
5644 && !MEM_P (XEXP (oloc, 0)))
5646 rtx mloc = oloc;
5647 machine_mode address_mode = get_address_mode (mloc);
5648 cselib_val *val
5649 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5650 GET_MODE (mloc));
5652 if (val && !cselib_preserved_value_p (val))
5653 preserve_value (val);
5656 type2 = use_type (loc, 0, &mode2);
5658 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5659 || type2 == MO_CLOBBER);
5661 if (type2 == MO_USE)
5662 vloc = var_lowpart (mode2, loc);
5663 else
5664 vloc = oloc;
5666 /* The loc of a MO_VAL_USE may have two forms:
5668 (concat val src): val is at src, a value-based
5669 representation.
5671 (concat (concat val use) src): same as above, with use as
5672 the MO_USE tracked value, if it differs from src.
5676 gcc_checking_assert (REG_P (loc) || MEM_P (loc));
5677 nloc = replace_expr_with_values (loc);
5678 if (!nloc)
5679 nloc = oloc;
5681 if (vloc != nloc)
5682 oloc = gen_rtx_CONCAT (mode2, val->val_rtx, vloc);
5683 else
5684 oloc = val->val_rtx;
5686 mo.u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
5688 if (type2 == MO_USE)
5689 VAL_HOLDS_TRACK_EXPR (mo.u.loc) = 1;
5690 if (!cselib_preserved_value_p (val))
5692 VAL_NEEDS_RESOLUTION (mo.u.loc) = 1;
5693 preserve_value (val);
5696 else
5697 gcc_assert (type == MO_USE || type == MO_USE_NO_VAR);
5699 if (dump_file && (dump_flags & TDF_DETAILS))
5700 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
5701 VTI (bb)->mos.safe_push (mo);
5705 /* Helper function for finding all uses of REG/MEM in X in insn INSN. */
5707 static void
5708 add_uses_1 (rtx *x, void *cui)
5710 subrtx_var_iterator::array_type array;
5711 FOR_EACH_SUBRTX_VAR (iter, array, *x, NONCONST)
5712 add_uses (*iter, (struct count_use_info *) cui);
5715 /* This is the value used during expansion of locations. We want it
5716 to be unbounded, so that variables expanded deep in a recursion
5717 nest are fully evaluated, so that their values are cached
5718 correctly. We avoid recursion cycles through other means, and we
5719 don't unshare RTL, so excess complexity is not a problem. */
5720 #define EXPR_DEPTH (INT_MAX)
5721 /* We use this to keep too-complex expressions from being emitted as
5722 location notes, and then to debug information. Users can trade
5723 compile time for ridiculously complex expressions, although they're
5724 seldom useful, and they may often have to be discarded as not
5725 representable anyway. */
5726 #define EXPR_USE_DEPTH (PARAM_VALUE (PARAM_MAX_VARTRACK_EXPR_DEPTH))
5728 /* Attempt to reverse the EXPR operation in the debug info and record
5729 it in the cselib table. Say for reg1 = reg2 + 6 even when reg2 is
5730 no longer live we can express its value as VAL - 6. */
5732 static void
5733 reverse_op (rtx val, const_rtx expr, rtx_insn *insn)
5735 rtx src, arg, ret;
5736 cselib_val *v;
5737 struct elt_loc_list *l;
5738 enum rtx_code code;
5739 int count;
5741 if (GET_CODE (expr) != SET)
5742 return;
5744 if (!REG_P (SET_DEST (expr)) || GET_MODE (val) != GET_MODE (SET_DEST (expr)))
5745 return;
5747 src = SET_SRC (expr);
5748 switch (GET_CODE (src))
5750 case PLUS:
5751 case MINUS:
5752 case XOR:
5753 case NOT:
5754 case NEG:
5755 if (!REG_P (XEXP (src, 0)))
5756 return;
5757 break;
5758 case SIGN_EXTEND:
5759 case ZERO_EXTEND:
5760 if (!REG_P (XEXP (src, 0)) && !MEM_P (XEXP (src, 0)))
5761 return;
5762 break;
5763 default:
5764 return;
5767 if (!SCALAR_INT_MODE_P (GET_MODE (src)) || XEXP (src, 0) == cfa_base_rtx)
5768 return;
5770 v = cselib_lookup (XEXP (src, 0), GET_MODE (XEXP (src, 0)), 0, VOIDmode);
5771 if (!v || !cselib_preserved_value_p (v))
5772 return;
5774 /* Use canonical V to avoid creating multiple redundant expressions
5775 for different VALUES equivalent to V. */
5776 v = canonical_cselib_val (v);
5778 /* Adding a reverse op isn't useful if V already has an always valid
5779 location. Ignore ENTRY_VALUE, while it is always constant, we should
5780 prefer non-ENTRY_VALUE locations whenever possible. */
5781 for (l = v->locs, count = 0; l; l = l->next, count++)
5782 if (CONSTANT_P (l->loc)
5783 && (GET_CODE (l->loc) != CONST || !references_value_p (l->loc, 0)))
5784 return;
5785 /* Avoid creating too large locs lists. */
5786 else if (count == PARAM_VALUE (PARAM_MAX_VARTRACK_REVERSE_OP_SIZE))
5787 return;
5789 switch (GET_CODE (src))
5791 case NOT:
5792 case NEG:
5793 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5794 return;
5795 ret = gen_rtx_fmt_e (GET_CODE (src), GET_MODE (val), val);
5796 break;
5797 case SIGN_EXTEND:
5798 case ZERO_EXTEND:
5799 ret = gen_lowpart_SUBREG (GET_MODE (v->val_rtx), val);
5800 break;
5801 case XOR:
5802 code = XOR;
5803 goto binary;
5804 case PLUS:
5805 code = MINUS;
5806 goto binary;
5807 case MINUS:
5808 code = PLUS;
5809 goto binary;
5810 binary:
5811 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5812 return;
5813 arg = XEXP (src, 1);
5814 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5816 arg = cselib_expand_value_rtx (arg, scratch_regs, 5);
5817 if (arg == NULL_RTX)
5818 return;
5819 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5820 return;
5822 ret = simplify_gen_binary (code, GET_MODE (val), val, arg);
5823 if (ret == val)
5824 /* Ensure ret isn't VALUE itself (which can happen e.g. for
5825 (plus (reg1) (reg2)) when reg2 is known to be 0), as that
5826 breaks a lot of routines during var-tracking. */
5827 ret = gen_rtx_fmt_ee (PLUS, GET_MODE (val), val, const0_rtx);
5828 break;
5829 default:
5830 gcc_unreachable ();
5833 cselib_add_permanent_equiv (v, ret, insn);
5836 /* Add stores (register and memory references) LOC which will be tracked
5837 to VTI (bb)->mos. EXPR is the RTL expression containing the store.
5838 CUIP->insn is instruction which the LOC is part of. */
5840 static void
5841 add_stores (rtx loc, const_rtx expr, void *cuip)
5843 machine_mode mode = VOIDmode, mode2;
5844 struct count_use_info *cui = (struct count_use_info *)cuip;
5845 basic_block bb = cui->bb;
5846 micro_operation mo;
5847 rtx oloc = loc, nloc, src = NULL;
5848 enum micro_operation_type type = use_type (loc, cui, &mode);
5849 bool track_p = false;
5850 cselib_val *v;
5851 bool resolve, preserve;
5853 if (type == MO_CLOBBER)
5854 return;
5856 mode2 = mode;
5858 if (REG_P (loc))
5860 gcc_assert (loc != cfa_base_rtx);
5861 if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
5862 || !(track_p = use_type (loc, NULL, &mode2) == MO_USE)
5863 || GET_CODE (expr) == CLOBBER)
5865 mo.type = MO_CLOBBER;
5866 mo.u.loc = loc;
5867 if (GET_CODE (expr) == SET
5868 && SET_DEST (expr) == loc
5869 && !unsuitable_loc (SET_SRC (expr))
5870 && find_use_val (loc, mode, cui))
5872 gcc_checking_assert (type == MO_VAL_SET);
5873 mo.u.loc = gen_rtx_SET (VOIDmode, loc, SET_SRC (expr));
5876 else
5878 if (GET_CODE (expr) == SET
5879 && SET_DEST (expr) == loc
5880 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
5881 src = var_lowpart (mode2, SET_SRC (expr));
5882 loc = var_lowpart (mode2, loc);
5884 if (src == NULL)
5886 mo.type = MO_SET;
5887 mo.u.loc = loc;
5889 else
5891 rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
5892 if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
5894 /* If this is an instruction copying (part of) a parameter
5895 passed by invisible reference to its register location,
5896 pretend it's a SET so that the initial memory location
5897 is discarded, as the parameter register can be reused
5898 for other purposes and we do not track locations based
5899 on generic registers. */
5900 if (MEM_P (src)
5901 && REG_EXPR (loc)
5902 && TREE_CODE (REG_EXPR (loc)) == PARM_DECL
5903 && DECL_MODE (REG_EXPR (loc)) != BLKmode
5904 && MEM_P (DECL_INCOMING_RTL (REG_EXPR (loc)))
5905 && XEXP (DECL_INCOMING_RTL (REG_EXPR (loc)), 0)
5906 != arg_pointer_rtx)
5907 mo.type = MO_SET;
5908 else
5909 mo.type = MO_COPY;
5911 else
5912 mo.type = MO_SET;
5913 mo.u.loc = xexpr;
5916 mo.insn = cui->insn;
5918 else if (MEM_P (loc)
5919 && ((track_p = use_type (loc, NULL, &mode2) == MO_USE)
5920 || cui->sets))
5922 if (MEM_P (loc) && type == MO_VAL_SET
5923 && !REG_P (XEXP (loc, 0))
5924 && !MEM_P (XEXP (loc, 0)))
5926 rtx mloc = loc;
5927 machine_mode address_mode = get_address_mode (mloc);
5928 cselib_val *val = cselib_lookup (XEXP (mloc, 0),
5929 address_mode, 0,
5930 GET_MODE (mloc));
5932 if (val && !cselib_preserved_value_p (val))
5933 preserve_value (val);
5936 if (GET_CODE (expr) == CLOBBER || !track_p)
5938 mo.type = MO_CLOBBER;
5939 mo.u.loc = track_p ? var_lowpart (mode2, loc) : loc;
5941 else
5943 if (GET_CODE (expr) == SET
5944 && SET_DEST (expr) == loc
5945 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
5946 src = var_lowpart (mode2, SET_SRC (expr));
5947 loc = var_lowpart (mode2, loc);
5949 if (src == NULL)
5951 mo.type = MO_SET;
5952 mo.u.loc = loc;
5954 else
5956 rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
5957 if (same_variable_part_p (SET_SRC (xexpr),
5958 MEM_EXPR (loc),
5959 INT_MEM_OFFSET (loc)))
5960 mo.type = MO_COPY;
5961 else
5962 mo.type = MO_SET;
5963 mo.u.loc = xexpr;
5966 mo.insn = cui->insn;
5968 else
5969 return;
5971 if (type != MO_VAL_SET)
5972 goto log_and_return;
5974 v = find_use_val (oloc, mode, cui);
5976 if (!v)
5977 goto log_and_return;
5979 resolve = preserve = !cselib_preserved_value_p (v);
5981 /* We cannot track values for multiple-part variables, so we track only
5982 locations for tracked parameters passed either by invisible reference
5983 or directly in multiple locations. */
5984 if (track_p
5985 && REG_P (loc)
5986 && REG_EXPR (loc)
5987 && TREE_CODE (REG_EXPR (loc)) == PARM_DECL
5988 && DECL_MODE (REG_EXPR (loc)) != BLKmode
5989 && TREE_CODE (TREE_TYPE (REG_EXPR (loc))) != UNION_TYPE
5990 && ((MEM_P (DECL_INCOMING_RTL (REG_EXPR (loc)))
5991 && XEXP (DECL_INCOMING_RTL (REG_EXPR (loc)), 0) != arg_pointer_rtx)
5992 || (GET_CODE (DECL_INCOMING_RTL (REG_EXPR (loc))) == PARALLEL
5993 && XVECLEN (DECL_INCOMING_RTL (REG_EXPR (loc)), 0) > 1)))
5995 /* Although we don't use the value here, it could be used later by the
5996 mere virtue of its existence as the operand of the reverse operation
5997 that gave rise to it (typically extension/truncation). Make sure it
5998 is preserved as required by vt_expand_var_loc_chain. */
5999 if (preserve)
6000 preserve_value (v);
6001 goto log_and_return;
6004 if (loc == stack_pointer_rtx
6005 && hard_frame_pointer_adjustment != -1
6006 && preserve)
6007 cselib_set_value_sp_based (v);
6009 nloc = replace_expr_with_values (oloc);
6010 if (nloc)
6011 oloc = nloc;
6013 if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC)
6015 cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0, VOIDmode);
6017 if (oval == v)
6018 return;
6019 gcc_assert (REG_P (oloc) || MEM_P (oloc));
6021 if (oval && !cselib_preserved_value_p (oval))
6023 micro_operation moa;
6025 preserve_value (oval);
6027 moa.type = MO_VAL_USE;
6028 moa.u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc);
6029 VAL_NEEDS_RESOLUTION (moa.u.loc) = 1;
6030 moa.insn = cui->insn;
6032 if (dump_file && (dump_flags & TDF_DETAILS))
6033 log_op_type (moa.u.loc, cui->bb, cui->insn,
6034 moa.type, dump_file);
6035 VTI (bb)->mos.safe_push (moa);
6038 resolve = false;
6040 else if (resolve && GET_CODE (mo.u.loc) == SET)
6042 if (REG_P (SET_SRC (expr)) || MEM_P (SET_SRC (expr)))
6043 nloc = replace_expr_with_values (SET_SRC (expr));
6044 else
6045 nloc = NULL_RTX;
6047 /* Avoid the mode mismatch between oexpr and expr. */
6048 if (!nloc && mode != mode2)
6050 nloc = SET_SRC (expr);
6051 gcc_assert (oloc == SET_DEST (expr));
6054 if (nloc && nloc != SET_SRC (mo.u.loc))
6055 oloc = gen_rtx_SET (GET_MODE (mo.u.loc), oloc, nloc);
6056 else
6058 if (oloc == SET_DEST (mo.u.loc))
6059 /* No point in duplicating. */
6060 oloc = mo.u.loc;
6061 if (!REG_P (SET_SRC (mo.u.loc)))
6062 resolve = false;
6065 else if (!resolve)
6067 if (GET_CODE (mo.u.loc) == SET
6068 && oloc == SET_DEST (mo.u.loc))
6069 /* No point in duplicating. */
6070 oloc = mo.u.loc;
6072 else
6073 resolve = false;
6075 loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc);
6077 if (mo.u.loc != oloc)
6078 loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, mo.u.loc);
6080 /* The loc of a MO_VAL_SET may have various forms:
6082 (concat val dst): dst now holds val
6084 (concat val (set dst src)): dst now holds val, copied from src
6086 (concat (concat val dstv) dst): dst now holds val; dstv is dst
6087 after replacing mems and non-top-level regs with values.
6089 (concat (concat val dstv) (set dst src)): dst now holds val,
6090 copied from src. dstv is a value-based representation of dst, if
6091 it differs from dst. If resolution is needed, src is a REG, and
6092 its mode is the same as that of val.
6094 (concat (concat val (set dstv srcv)) (set dst src)): src
6095 copied to dst, holding val. dstv and srcv are value-based
6096 representations of dst and src, respectively.
6100 if (GET_CODE (PATTERN (cui->insn)) != COND_EXEC)
6101 reverse_op (v->val_rtx, expr, cui->insn);
6103 mo.u.loc = loc;
6105 if (track_p)
6106 VAL_HOLDS_TRACK_EXPR (loc) = 1;
6107 if (preserve)
6109 VAL_NEEDS_RESOLUTION (loc) = resolve;
6110 preserve_value (v);
6112 if (mo.type == MO_CLOBBER)
6113 VAL_EXPR_IS_CLOBBERED (loc) = 1;
6114 if (mo.type == MO_COPY)
6115 VAL_EXPR_IS_COPIED (loc) = 1;
6117 mo.type = MO_VAL_SET;
6119 log_and_return:
6120 if (dump_file && (dump_flags & TDF_DETAILS))
6121 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
6122 VTI (bb)->mos.safe_push (mo);
6125 /* Arguments to the call. */
6126 static rtx call_arguments;
6128 /* Compute call_arguments. */
6130 static void
6131 prepare_call_arguments (basic_block bb, rtx_insn *insn)
6133 rtx link, x, call;
6134 rtx prev, cur, next;
6135 rtx this_arg = NULL_RTX;
6136 tree type = NULL_TREE, t, fndecl = NULL_TREE;
6137 tree obj_type_ref = NULL_TREE;
6138 CUMULATIVE_ARGS args_so_far_v;
6139 cumulative_args_t args_so_far;
6141 memset (&args_so_far_v, 0, sizeof (args_so_far_v));
6142 args_so_far = pack_cumulative_args (&args_so_far_v);
6143 call = get_call_rtx_from (insn);
6144 if (call)
6146 if (GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
6148 rtx symbol = XEXP (XEXP (call, 0), 0);
6149 if (SYMBOL_REF_DECL (symbol))
6150 fndecl = SYMBOL_REF_DECL (symbol);
6152 if (fndecl == NULL_TREE)
6153 fndecl = MEM_EXPR (XEXP (call, 0));
6154 if (fndecl
6155 && TREE_CODE (TREE_TYPE (fndecl)) != FUNCTION_TYPE
6156 && TREE_CODE (TREE_TYPE (fndecl)) != METHOD_TYPE)
6157 fndecl = NULL_TREE;
6158 if (fndecl && TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
6159 type = TREE_TYPE (fndecl);
6160 if (fndecl && TREE_CODE (fndecl) != FUNCTION_DECL)
6162 if (TREE_CODE (fndecl) == INDIRECT_REF
6163 && TREE_CODE (TREE_OPERAND (fndecl, 0)) == OBJ_TYPE_REF)
6164 obj_type_ref = TREE_OPERAND (fndecl, 0);
6165 fndecl = NULL_TREE;
6167 if (type)
6169 for (t = TYPE_ARG_TYPES (type); t && t != void_list_node;
6170 t = TREE_CHAIN (t))
6171 if (TREE_CODE (TREE_VALUE (t)) == REFERENCE_TYPE
6172 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_VALUE (t))))
6173 break;
6174 if ((t == NULL || t == void_list_node) && obj_type_ref == NULL_TREE)
6175 type = NULL;
6176 else
6178 int nargs ATTRIBUTE_UNUSED = list_length (TYPE_ARG_TYPES (type));
6179 link = CALL_INSN_FUNCTION_USAGE (insn);
6180 #ifndef PCC_STATIC_STRUCT_RETURN
6181 if (aggregate_value_p (TREE_TYPE (type), type)
6182 && targetm.calls.struct_value_rtx (type, 0) == 0)
6184 tree struct_addr = build_pointer_type (TREE_TYPE (type));
6185 machine_mode mode = TYPE_MODE (struct_addr);
6186 rtx reg;
6187 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
6188 nargs + 1);
6189 reg = targetm.calls.function_arg (args_so_far, mode,
6190 struct_addr, true);
6191 targetm.calls.function_arg_advance (args_so_far, mode,
6192 struct_addr, true);
6193 if (reg == NULL_RTX)
6195 for (; link; link = XEXP (link, 1))
6196 if (GET_CODE (XEXP (link, 0)) == USE
6197 && MEM_P (XEXP (XEXP (link, 0), 0)))
6199 link = XEXP (link, 1);
6200 break;
6204 else
6205 #endif
6206 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
6207 nargs);
6208 if (obj_type_ref && TYPE_ARG_TYPES (type) != void_list_node)
6210 machine_mode mode;
6211 t = TYPE_ARG_TYPES (type);
6212 mode = TYPE_MODE (TREE_VALUE (t));
6213 this_arg = targetm.calls.function_arg (args_so_far, mode,
6214 TREE_VALUE (t), true);
6215 if (this_arg && !REG_P (this_arg))
6216 this_arg = NULL_RTX;
6217 else if (this_arg == NULL_RTX)
6219 for (; link; link = XEXP (link, 1))
6220 if (GET_CODE (XEXP (link, 0)) == USE
6221 && MEM_P (XEXP (XEXP (link, 0), 0)))
6223 this_arg = XEXP (XEXP (link, 0), 0);
6224 break;
6231 t = type ? TYPE_ARG_TYPES (type) : NULL_TREE;
6233 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
6234 if (GET_CODE (XEXP (link, 0)) == USE)
6236 rtx item = NULL_RTX;
6237 x = XEXP (XEXP (link, 0), 0);
6238 if (GET_MODE (link) == VOIDmode
6239 || GET_MODE (link) == BLKmode
6240 || (GET_MODE (link) != GET_MODE (x)
6241 && ((GET_MODE_CLASS (GET_MODE (link)) != MODE_INT
6242 && GET_MODE_CLASS (GET_MODE (link)) != MODE_PARTIAL_INT)
6243 || (GET_MODE_CLASS (GET_MODE (x)) != MODE_INT
6244 && GET_MODE_CLASS (GET_MODE (x)) != MODE_PARTIAL_INT))))
6245 /* Can't do anything for these, if the original type mode
6246 isn't known or can't be converted. */;
6247 else if (REG_P (x))
6249 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
6250 if (val && cselib_preserved_value_p (val))
6251 item = val->val_rtx;
6252 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
6253 || GET_MODE_CLASS (GET_MODE (x)) == MODE_PARTIAL_INT)
6255 machine_mode mode = GET_MODE (x);
6257 while ((mode = GET_MODE_WIDER_MODE (mode)) != VOIDmode
6258 && GET_MODE_BITSIZE (mode) <= BITS_PER_WORD)
6260 rtx reg = simplify_subreg (mode, x, GET_MODE (x), 0);
6262 if (reg == NULL_RTX || !REG_P (reg))
6263 continue;
6264 val = cselib_lookup (reg, mode, 0, VOIDmode);
6265 if (val && cselib_preserved_value_p (val))
6267 item = val->val_rtx;
6268 break;
6273 else if (MEM_P (x))
6275 rtx mem = x;
6276 cselib_val *val;
6278 if (!frame_pointer_needed)
6280 struct adjust_mem_data amd;
6281 amd.mem_mode = VOIDmode;
6282 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
6283 amd.side_effects = NULL;
6284 amd.store = true;
6285 mem = simplify_replace_fn_rtx (mem, NULL_RTX, adjust_mems,
6286 &amd);
6287 gcc_assert (amd.side_effects == NULL_RTX);
6289 val = cselib_lookup (mem, GET_MODE (mem), 0, VOIDmode);
6290 if (val && cselib_preserved_value_p (val))
6291 item = val->val_rtx;
6292 else if (GET_MODE_CLASS (GET_MODE (mem)) != MODE_INT
6293 && GET_MODE_CLASS (GET_MODE (mem)) != MODE_PARTIAL_INT)
6295 /* For non-integer stack argument see also if they weren't
6296 initialized by integers. */
6297 machine_mode imode = int_mode_for_mode (GET_MODE (mem));
6298 if (imode != GET_MODE (mem) && imode != BLKmode)
6300 val = cselib_lookup (adjust_address_nv (mem, imode, 0),
6301 imode, 0, VOIDmode);
6302 if (val && cselib_preserved_value_p (val))
6303 item = lowpart_subreg (GET_MODE (x), val->val_rtx,
6304 imode);
6308 if (item)
6310 rtx x2 = x;
6311 if (GET_MODE (item) != GET_MODE (link))
6312 item = lowpart_subreg (GET_MODE (link), item, GET_MODE (item));
6313 if (GET_MODE (x2) != GET_MODE (link))
6314 x2 = lowpart_subreg (GET_MODE (link), x2, GET_MODE (x2));
6315 item = gen_rtx_CONCAT (GET_MODE (link), x2, item);
6316 call_arguments
6317 = gen_rtx_EXPR_LIST (VOIDmode, item, call_arguments);
6319 if (t && t != void_list_node)
6321 tree argtype = TREE_VALUE (t);
6322 machine_mode mode = TYPE_MODE (argtype);
6323 rtx reg;
6324 if (pass_by_reference (&args_so_far_v, mode, argtype, true))
6326 argtype = build_pointer_type (argtype);
6327 mode = TYPE_MODE (argtype);
6329 reg = targetm.calls.function_arg (args_so_far, mode,
6330 argtype, true);
6331 if (TREE_CODE (argtype) == REFERENCE_TYPE
6332 && INTEGRAL_TYPE_P (TREE_TYPE (argtype))
6333 && reg
6334 && REG_P (reg)
6335 && GET_MODE (reg) == mode
6336 && (GET_MODE_CLASS (mode) == MODE_INT
6337 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
6338 && REG_P (x)
6339 && REGNO (x) == REGNO (reg)
6340 && GET_MODE (x) == mode
6341 && item)
6343 machine_mode indmode
6344 = TYPE_MODE (TREE_TYPE (argtype));
6345 rtx mem = gen_rtx_MEM (indmode, x);
6346 cselib_val *val = cselib_lookup (mem, indmode, 0, VOIDmode);
6347 if (val && cselib_preserved_value_p (val))
6349 item = gen_rtx_CONCAT (indmode, mem, val->val_rtx);
6350 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
6351 call_arguments);
6353 else
6355 struct elt_loc_list *l;
6356 tree initial;
6358 /* Try harder, when passing address of a constant
6359 pool integer it can be easily read back. */
6360 item = XEXP (item, 1);
6361 if (GET_CODE (item) == SUBREG)
6362 item = SUBREG_REG (item);
6363 gcc_assert (GET_CODE (item) == VALUE);
6364 val = CSELIB_VAL_PTR (item);
6365 for (l = val->locs; l; l = l->next)
6366 if (GET_CODE (l->loc) == SYMBOL_REF
6367 && TREE_CONSTANT_POOL_ADDRESS_P (l->loc)
6368 && SYMBOL_REF_DECL (l->loc)
6369 && DECL_INITIAL (SYMBOL_REF_DECL (l->loc)))
6371 initial = DECL_INITIAL (SYMBOL_REF_DECL (l->loc));
6372 if (tree_fits_shwi_p (initial))
6374 item = GEN_INT (tree_to_shwi (initial));
6375 item = gen_rtx_CONCAT (indmode, mem, item);
6376 call_arguments
6377 = gen_rtx_EXPR_LIST (VOIDmode, item,
6378 call_arguments);
6380 break;
6384 targetm.calls.function_arg_advance (args_so_far, mode,
6385 argtype, true);
6386 t = TREE_CHAIN (t);
6390 /* Add debug arguments. */
6391 if (fndecl
6392 && TREE_CODE (fndecl) == FUNCTION_DECL
6393 && DECL_HAS_DEBUG_ARGS_P (fndecl))
6395 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (fndecl);
6396 if (debug_args)
6398 unsigned int ix;
6399 tree param;
6400 for (ix = 0; vec_safe_iterate (*debug_args, ix, &param); ix += 2)
6402 rtx item;
6403 tree dtemp = (**debug_args)[ix + 1];
6404 machine_mode mode = DECL_MODE (dtemp);
6405 item = gen_rtx_DEBUG_PARAMETER_REF (mode, param);
6406 item = gen_rtx_CONCAT (mode, item, DECL_RTL_KNOWN_SET (dtemp));
6407 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
6408 call_arguments);
6413 /* Reverse call_arguments chain. */
6414 prev = NULL_RTX;
6415 for (cur = call_arguments; cur; cur = next)
6417 next = XEXP (cur, 1);
6418 XEXP (cur, 1) = prev;
6419 prev = cur;
6421 call_arguments = prev;
6423 x = get_call_rtx_from (insn);
6424 if (x)
6426 x = XEXP (XEXP (x, 0), 0);
6427 if (GET_CODE (x) == SYMBOL_REF)
6428 /* Don't record anything. */;
6429 else if (CONSTANT_P (x))
6431 x = gen_rtx_CONCAT (GET_MODE (x) == VOIDmode ? Pmode : GET_MODE (x),
6432 pc_rtx, x);
6433 call_arguments
6434 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6436 else
6438 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
6439 if (val && cselib_preserved_value_p (val))
6441 x = gen_rtx_CONCAT (GET_MODE (x), pc_rtx, val->val_rtx);
6442 call_arguments
6443 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6447 if (this_arg)
6449 machine_mode mode
6450 = TYPE_MODE (TREE_TYPE (OBJ_TYPE_REF_EXPR (obj_type_ref)));
6451 rtx clobbered = gen_rtx_MEM (mode, this_arg);
6452 HOST_WIDE_INT token
6453 = tree_to_shwi (OBJ_TYPE_REF_TOKEN (obj_type_ref));
6454 if (token)
6455 clobbered = plus_constant (mode, clobbered,
6456 token * GET_MODE_SIZE (mode));
6457 clobbered = gen_rtx_MEM (mode, clobbered);
6458 x = gen_rtx_CONCAT (mode, gen_rtx_CLOBBER (VOIDmode, pc_rtx), clobbered);
6459 call_arguments
6460 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6464 /* Callback for cselib_record_sets_hook, that records as micro
6465 operations uses and stores in an insn after cselib_record_sets has
6466 analyzed the sets in an insn, but before it modifies the stored
6467 values in the internal tables, unless cselib_record_sets doesn't
6468 call it directly (perhaps because we're not doing cselib in the
6469 first place, in which case sets and n_sets will be 0). */
6471 static void
6472 add_with_sets (rtx_insn *insn, struct cselib_set *sets, int n_sets)
6474 basic_block bb = BLOCK_FOR_INSN (insn);
6475 int n1, n2;
6476 struct count_use_info cui;
6477 micro_operation *mos;
6479 cselib_hook_called = true;
6481 cui.insn = insn;
6482 cui.bb = bb;
6483 cui.sets = sets;
6484 cui.n_sets = n_sets;
6486 n1 = VTI (bb)->mos.length ();
6487 cui.store_p = false;
6488 note_uses (&PATTERN (insn), add_uses_1, &cui);
6489 n2 = VTI (bb)->mos.length () - 1;
6490 mos = VTI (bb)->mos.address ();
6492 /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
6493 MO_VAL_LOC last. */
6494 while (n1 < n2)
6496 while (n1 < n2 && mos[n1].type == MO_USE)
6497 n1++;
6498 while (n1 < n2 && mos[n2].type != MO_USE)
6499 n2--;
6500 if (n1 < n2)
6502 micro_operation sw;
6504 sw = mos[n1];
6505 mos[n1] = mos[n2];
6506 mos[n2] = sw;
6510 n2 = VTI (bb)->mos.length () - 1;
6511 while (n1 < n2)
6513 while (n1 < n2 && mos[n1].type != MO_VAL_LOC)
6514 n1++;
6515 while (n1 < n2 && mos[n2].type == MO_VAL_LOC)
6516 n2--;
6517 if (n1 < n2)
6519 micro_operation sw;
6521 sw = mos[n1];
6522 mos[n1] = mos[n2];
6523 mos[n2] = sw;
6527 if (CALL_P (insn))
6529 micro_operation mo;
6531 mo.type = MO_CALL;
6532 mo.insn = insn;
6533 mo.u.loc = call_arguments;
6534 call_arguments = NULL_RTX;
6536 if (dump_file && (dump_flags & TDF_DETAILS))
6537 log_op_type (PATTERN (insn), bb, insn, mo.type, dump_file);
6538 VTI (bb)->mos.safe_push (mo);
6541 n1 = VTI (bb)->mos.length ();
6542 /* This will record NEXT_INSN (insn), such that we can
6543 insert notes before it without worrying about any
6544 notes that MO_USEs might emit after the insn. */
6545 cui.store_p = true;
6546 note_stores (PATTERN (insn), add_stores, &cui);
6547 n2 = VTI (bb)->mos.length () - 1;
6548 mos = VTI (bb)->mos.address ();
6550 /* Order the MO_VAL_USEs first (note_stores does nothing
6551 on DEBUG_INSNs, so there are no MO_VAL_LOCs from this
6552 insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET. */
6553 while (n1 < n2)
6555 while (n1 < n2 && mos[n1].type == MO_VAL_USE)
6556 n1++;
6557 while (n1 < n2 && mos[n2].type != MO_VAL_USE)
6558 n2--;
6559 if (n1 < n2)
6561 micro_operation sw;
6563 sw = mos[n1];
6564 mos[n1] = mos[n2];
6565 mos[n2] = sw;
6569 n2 = VTI (bb)->mos.length () - 1;
6570 while (n1 < n2)
6572 while (n1 < n2 && mos[n1].type == MO_CLOBBER)
6573 n1++;
6574 while (n1 < n2 && mos[n2].type != MO_CLOBBER)
6575 n2--;
6576 if (n1 < n2)
6578 micro_operation sw;
6580 sw = mos[n1];
6581 mos[n1] = mos[n2];
6582 mos[n2] = sw;
6587 static enum var_init_status
6588 find_src_status (dataflow_set *in, rtx src)
6590 tree decl = NULL_TREE;
6591 enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
6593 if (! flag_var_tracking_uninit)
6594 status = VAR_INIT_STATUS_INITIALIZED;
6596 if (src && REG_P (src))
6597 decl = var_debug_decl (REG_EXPR (src));
6598 else if (src && MEM_P (src))
6599 decl = var_debug_decl (MEM_EXPR (src));
6601 if (src && decl)
6602 status = get_init_value (in, src, dv_from_decl (decl));
6604 return status;
6607 /* SRC is the source of an assignment. Use SET to try to find what
6608 was ultimately assigned to SRC. Return that value if known,
6609 otherwise return SRC itself. */
6611 static rtx
6612 find_src_set_src (dataflow_set *set, rtx src)
6614 tree decl = NULL_TREE; /* The variable being copied around. */
6615 rtx set_src = NULL_RTX; /* The value for "decl" stored in "src". */
6616 variable var;
6617 location_chain nextp;
6618 int i;
6619 bool found;
6621 if (src && REG_P (src))
6622 decl = var_debug_decl (REG_EXPR (src));
6623 else if (src && MEM_P (src))
6624 decl = var_debug_decl (MEM_EXPR (src));
6626 if (src && decl)
6628 decl_or_value dv = dv_from_decl (decl);
6630 var = shared_hash_find (set->vars, dv);
6631 if (var)
6633 found = false;
6634 for (i = 0; i < var->n_var_parts && !found; i++)
6635 for (nextp = var->var_part[i].loc_chain; nextp && !found;
6636 nextp = nextp->next)
6637 if (rtx_equal_p (nextp->loc, src))
6639 set_src = nextp->set_src;
6640 found = true;
6646 return set_src;
6649 /* Compute the changes of variable locations in the basic block BB. */
6651 static bool
6652 compute_bb_dataflow (basic_block bb)
6654 unsigned int i;
6655 micro_operation *mo;
6656 bool changed;
6657 dataflow_set old_out;
6658 dataflow_set *in = &VTI (bb)->in;
6659 dataflow_set *out = &VTI (bb)->out;
6661 dataflow_set_init (&old_out);
6662 dataflow_set_copy (&old_out, out);
6663 dataflow_set_copy (out, in);
6665 if (MAY_HAVE_DEBUG_INSNS)
6666 local_get_addr_cache = new hash_map<rtx, rtx>;
6668 FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo)
6670 rtx_insn *insn = mo->insn;
6672 switch (mo->type)
6674 case MO_CALL:
6675 dataflow_set_clear_at_call (out);
6676 break;
6678 case MO_USE:
6680 rtx loc = mo->u.loc;
6682 if (REG_P (loc))
6683 var_reg_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6684 else if (MEM_P (loc))
6685 var_mem_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6687 break;
6689 case MO_VAL_LOC:
6691 rtx loc = mo->u.loc;
6692 rtx val, vloc;
6693 tree var;
6695 if (GET_CODE (loc) == CONCAT)
6697 val = XEXP (loc, 0);
6698 vloc = XEXP (loc, 1);
6700 else
6702 val = NULL_RTX;
6703 vloc = loc;
6706 var = PAT_VAR_LOCATION_DECL (vloc);
6708 clobber_variable_part (out, NULL_RTX,
6709 dv_from_decl (var), 0, NULL_RTX);
6710 if (val)
6712 if (VAL_NEEDS_RESOLUTION (loc))
6713 val_resolve (out, val, PAT_VAR_LOCATION_LOC (vloc), insn);
6714 set_variable_part (out, val, dv_from_decl (var), 0,
6715 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6716 INSERT);
6718 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
6719 set_variable_part (out, PAT_VAR_LOCATION_LOC (vloc),
6720 dv_from_decl (var), 0,
6721 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6722 INSERT);
6724 break;
6726 case MO_VAL_USE:
6728 rtx loc = mo->u.loc;
6729 rtx val, vloc, uloc;
6731 vloc = uloc = XEXP (loc, 1);
6732 val = XEXP (loc, 0);
6734 if (GET_CODE (val) == CONCAT)
6736 uloc = XEXP (val, 1);
6737 val = XEXP (val, 0);
6740 if (VAL_NEEDS_RESOLUTION (loc))
6741 val_resolve (out, val, vloc, insn);
6742 else
6743 val_store (out, val, uloc, insn, false);
6745 if (VAL_HOLDS_TRACK_EXPR (loc))
6747 if (GET_CODE (uloc) == REG)
6748 var_reg_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6749 NULL);
6750 else if (GET_CODE (uloc) == MEM)
6751 var_mem_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6752 NULL);
6755 break;
6757 case MO_VAL_SET:
6759 rtx loc = mo->u.loc;
6760 rtx val, vloc, uloc;
6761 rtx dstv, srcv;
6763 vloc = loc;
6764 uloc = XEXP (vloc, 1);
6765 val = XEXP (vloc, 0);
6766 vloc = uloc;
6768 if (GET_CODE (uloc) == SET)
6770 dstv = SET_DEST (uloc);
6771 srcv = SET_SRC (uloc);
6773 else
6775 dstv = uloc;
6776 srcv = NULL;
6779 if (GET_CODE (val) == CONCAT)
6781 dstv = vloc = XEXP (val, 1);
6782 val = XEXP (val, 0);
6785 if (GET_CODE (vloc) == SET)
6787 srcv = SET_SRC (vloc);
6789 gcc_assert (val != srcv);
6790 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
6792 dstv = vloc = SET_DEST (vloc);
6794 if (VAL_NEEDS_RESOLUTION (loc))
6795 val_resolve (out, val, srcv, insn);
6797 else if (VAL_NEEDS_RESOLUTION (loc))
6799 gcc_assert (GET_CODE (uloc) == SET
6800 && GET_CODE (SET_SRC (uloc)) == REG);
6801 val_resolve (out, val, SET_SRC (uloc), insn);
6804 if (VAL_HOLDS_TRACK_EXPR (loc))
6806 if (VAL_EXPR_IS_CLOBBERED (loc))
6808 if (REG_P (uloc))
6809 var_reg_delete (out, uloc, true);
6810 else if (MEM_P (uloc))
6812 gcc_assert (MEM_P (dstv));
6813 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
6814 var_mem_delete (out, dstv, true);
6817 else
6819 bool copied_p = VAL_EXPR_IS_COPIED (loc);
6820 rtx src = NULL, dst = uloc;
6821 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
6823 if (GET_CODE (uloc) == SET)
6825 src = SET_SRC (uloc);
6826 dst = SET_DEST (uloc);
6829 if (copied_p)
6831 if (flag_var_tracking_uninit)
6833 status = find_src_status (in, src);
6835 if (status == VAR_INIT_STATUS_UNKNOWN)
6836 status = find_src_status (out, src);
6839 src = find_src_set_src (in, src);
6842 if (REG_P (dst))
6843 var_reg_delete_and_set (out, dst, !copied_p,
6844 status, srcv);
6845 else if (MEM_P (dst))
6847 gcc_assert (MEM_P (dstv));
6848 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
6849 var_mem_delete_and_set (out, dstv, !copied_p,
6850 status, srcv);
6854 else if (REG_P (uloc))
6855 var_regno_delete (out, REGNO (uloc));
6856 else if (MEM_P (uloc))
6858 gcc_checking_assert (GET_CODE (vloc) == MEM);
6859 gcc_checking_assert (dstv == vloc);
6860 if (dstv != vloc)
6861 clobber_overlapping_mems (out, vloc);
6864 val_store (out, val, dstv, insn, true);
6866 break;
6868 case MO_SET:
6870 rtx loc = mo->u.loc;
6871 rtx set_src = NULL;
6873 if (GET_CODE (loc) == SET)
6875 set_src = SET_SRC (loc);
6876 loc = SET_DEST (loc);
6879 if (REG_P (loc))
6880 var_reg_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6881 set_src);
6882 else if (MEM_P (loc))
6883 var_mem_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6884 set_src);
6886 break;
6888 case MO_COPY:
6890 rtx loc = mo->u.loc;
6891 enum var_init_status src_status;
6892 rtx set_src = NULL;
6894 if (GET_CODE (loc) == SET)
6896 set_src = SET_SRC (loc);
6897 loc = SET_DEST (loc);
6900 if (! flag_var_tracking_uninit)
6901 src_status = VAR_INIT_STATUS_INITIALIZED;
6902 else
6904 src_status = find_src_status (in, set_src);
6906 if (src_status == VAR_INIT_STATUS_UNKNOWN)
6907 src_status = find_src_status (out, set_src);
6910 set_src = find_src_set_src (in, set_src);
6912 if (REG_P (loc))
6913 var_reg_delete_and_set (out, loc, false, src_status, set_src);
6914 else if (MEM_P (loc))
6915 var_mem_delete_and_set (out, loc, false, src_status, set_src);
6917 break;
6919 case MO_USE_NO_VAR:
6921 rtx loc = mo->u.loc;
6923 if (REG_P (loc))
6924 var_reg_delete (out, loc, false);
6925 else if (MEM_P (loc))
6926 var_mem_delete (out, loc, false);
6928 break;
6930 case MO_CLOBBER:
6932 rtx loc = mo->u.loc;
6934 if (REG_P (loc))
6935 var_reg_delete (out, loc, true);
6936 else if (MEM_P (loc))
6937 var_mem_delete (out, loc, true);
6939 break;
6941 case MO_ADJUST:
6942 out->stack_adjust += mo->u.adjust;
6943 break;
6947 if (MAY_HAVE_DEBUG_INSNS)
6949 delete local_get_addr_cache;
6950 local_get_addr_cache = NULL;
6952 dataflow_set_equiv_regs (out);
6953 shared_hash_htab (out->vars)
6954 ->traverse <dataflow_set *, canonicalize_values_mark> (out);
6955 shared_hash_htab (out->vars)
6956 ->traverse <dataflow_set *, canonicalize_values_star> (out);
6957 #if ENABLE_CHECKING
6958 shared_hash_htab (out->vars)
6959 ->traverse <dataflow_set *, canonicalize_loc_order_check> (out);
6960 #endif
6962 changed = dataflow_set_different (&old_out, out);
6963 dataflow_set_destroy (&old_out);
6964 return changed;
6967 /* Find the locations of variables in the whole function. */
6969 static bool
6970 vt_find_locations (void)
6972 bb_heap_t *worklist = new bb_heap_t (LONG_MIN);
6973 bb_heap_t *pending = new bb_heap_t (LONG_MIN);
6974 bb_heap_t *fibheap_swap = NULL;
6975 sbitmap visited, in_worklist, in_pending, sbitmap_swap;
6976 basic_block bb;
6977 edge e;
6978 int *bb_order;
6979 int *rc_order;
6980 int i;
6981 int htabsz = 0;
6982 int htabmax = PARAM_VALUE (PARAM_MAX_VARTRACK_SIZE);
6983 bool success = true;
6985 timevar_push (TV_VAR_TRACKING_DATAFLOW);
6986 /* Compute reverse completion order of depth first search of the CFG
6987 so that the data-flow runs faster. */
6988 rc_order = XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
6989 bb_order = XNEWVEC (int, last_basic_block_for_fn (cfun));
6990 pre_and_rev_post_order_compute (NULL, rc_order, false);
6991 for (i = 0; i < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; i++)
6992 bb_order[rc_order[i]] = i;
6993 free (rc_order);
6995 visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
6996 in_worklist = sbitmap_alloc (last_basic_block_for_fn (cfun));
6997 in_pending = sbitmap_alloc (last_basic_block_for_fn (cfun));
6998 bitmap_clear (in_worklist);
7000 FOR_EACH_BB_FN (bb, cfun)
7001 pending->insert (bb_order[bb->index], bb);
7002 bitmap_ones (in_pending);
7004 while (success && !pending->empty ())
7006 fibheap_swap = pending;
7007 pending = worklist;
7008 worklist = fibheap_swap;
7009 sbitmap_swap = in_pending;
7010 in_pending = in_worklist;
7011 in_worklist = sbitmap_swap;
7013 bitmap_clear (visited);
7015 while (!worklist->empty ())
7017 bb = worklist->extract_min ();
7018 bitmap_clear_bit (in_worklist, bb->index);
7019 gcc_assert (!bitmap_bit_p (visited, bb->index));
7020 if (!bitmap_bit_p (visited, bb->index))
7022 bool changed;
7023 edge_iterator ei;
7024 int oldinsz, oldoutsz;
7026 bitmap_set_bit (visited, bb->index);
7028 if (VTI (bb)->in.vars)
7030 htabsz
7031 -= shared_hash_htab (VTI (bb)->in.vars)->size ()
7032 + shared_hash_htab (VTI (bb)->out.vars)->size ();
7033 oldinsz = shared_hash_htab (VTI (bb)->in.vars)->elements ();
7034 oldoutsz
7035 = shared_hash_htab (VTI (bb)->out.vars)->elements ();
7037 else
7038 oldinsz = oldoutsz = 0;
7040 if (MAY_HAVE_DEBUG_INSNS)
7042 dataflow_set *in = &VTI (bb)->in, *first_out = NULL;
7043 bool first = true, adjust = false;
7045 /* Calculate the IN set as the intersection of
7046 predecessor OUT sets. */
7048 dataflow_set_clear (in);
7049 dst_can_be_shared = true;
7051 FOR_EACH_EDGE (e, ei, bb->preds)
7052 if (!VTI (e->src)->flooded)
7053 gcc_assert (bb_order[bb->index]
7054 <= bb_order[e->src->index]);
7055 else if (first)
7057 dataflow_set_copy (in, &VTI (e->src)->out);
7058 first_out = &VTI (e->src)->out;
7059 first = false;
7061 else
7063 dataflow_set_merge (in, &VTI (e->src)->out);
7064 adjust = true;
7067 if (adjust)
7069 dataflow_post_merge_adjust (in, &VTI (bb)->permp);
7070 #if ENABLE_CHECKING
7071 /* Merge and merge_adjust should keep entries in
7072 canonical order. */
7073 shared_hash_htab (in->vars)
7074 ->traverse <dataflow_set *,
7075 canonicalize_loc_order_check> (in);
7076 #endif
7077 if (dst_can_be_shared)
7079 shared_hash_destroy (in->vars);
7080 in->vars = shared_hash_copy (first_out->vars);
7084 VTI (bb)->flooded = true;
7086 else
7088 /* Calculate the IN set as union of predecessor OUT sets. */
7089 dataflow_set_clear (&VTI (bb)->in);
7090 FOR_EACH_EDGE (e, ei, bb->preds)
7091 dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out);
7094 changed = compute_bb_dataflow (bb);
7095 htabsz += shared_hash_htab (VTI (bb)->in.vars)->size ()
7096 + shared_hash_htab (VTI (bb)->out.vars)->size ();
7098 if (htabmax && htabsz > htabmax)
7100 if (MAY_HAVE_DEBUG_INSNS)
7101 inform (DECL_SOURCE_LOCATION (cfun->decl),
7102 "variable tracking size limit exceeded with "
7103 "-fvar-tracking-assignments, retrying without");
7104 else
7105 inform (DECL_SOURCE_LOCATION (cfun->decl),
7106 "variable tracking size limit exceeded");
7107 success = false;
7108 break;
7111 if (changed)
7113 FOR_EACH_EDGE (e, ei, bb->succs)
7115 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7116 continue;
7118 if (bitmap_bit_p (visited, e->dest->index))
7120 if (!bitmap_bit_p (in_pending, e->dest->index))
7122 /* Send E->DEST to next round. */
7123 bitmap_set_bit (in_pending, e->dest->index);
7124 pending->insert (bb_order[e->dest->index],
7125 e->dest);
7128 else if (!bitmap_bit_p (in_worklist, e->dest->index))
7130 /* Add E->DEST to current round. */
7131 bitmap_set_bit (in_worklist, e->dest->index);
7132 worklist->insert (bb_order[e->dest->index],
7133 e->dest);
7138 if (dump_file)
7139 fprintf (dump_file,
7140 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
7141 bb->index,
7142 (int)shared_hash_htab (VTI (bb)->in.vars)->size (),
7143 oldinsz,
7144 (int)shared_hash_htab (VTI (bb)->out.vars)->size (),
7145 oldoutsz,
7146 (int)worklist->nodes (), (int)pending->nodes (),
7147 htabsz);
7149 if (dump_file && (dump_flags & TDF_DETAILS))
7151 fprintf (dump_file, "BB %i IN:\n", bb->index);
7152 dump_dataflow_set (&VTI (bb)->in);
7153 fprintf (dump_file, "BB %i OUT:\n", bb->index);
7154 dump_dataflow_set (&VTI (bb)->out);
7160 if (success && MAY_HAVE_DEBUG_INSNS)
7161 FOR_EACH_BB_FN (bb, cfun)
7162 gcc_assert (VTI (bb)->flooded);
7164 free (bb_order);
7165 delete worklist;
7166 delete pending;
7167 sbitmap_free (visited);
7168 sbitmap_free (in_worklist);
7169 sbitmap_free (in_pending);
7171 timevar_pop (TV_VAR_TRACKING_DATAFLOW);
7172 return success;
7175 /* Print the content of the LIST to dump file. */
7177 static void
7178 dump_attrs_list (attrs list)
7180 for (; list; list = list->next)
7182 if (dv_is_decl_p (list->dv))
7183 print_mem_expr (dump_file, dv_as_decl (list->dv));
7184 else
7185 print_rtl_single (dump_file, dv_as_value (list->dv));
7186 fprintf (dump_file, "+" HOST_WIDE_INT_PRINT_DEC, list->offset);
7188 fprintf (dump_file, "\n");
7191 /* Print the information about variable *SLOT to dump file. */
7194 dump_var_tracking_slot (variable_def **slot, void *data ATTRIBUTE_UNUSED)
7196 variable var = *slot;
7198 dump_var (var);
7200 /* Continue traversing the hash table. */
7201 return 1;
7204 /* Print the information about variable VAR to dump file. */
7206 static void
7207 dump_var (variable var)
7209 int i;
7210 location_chain node;
7212 if (dv_is_decl_p (var->dv))
7214 const_tree decl = dv_as_decl (var->dv);
7216 if (DECL_NAME (decl))
7218 fprintf (dump_file, " name: %s",
7219 IDENTIFIER_POINTER (DECL_NAME (decl)));
7220 if (dump_flags & TDF_UID)
7221 fprintf (dump_file, "D.%u", DECL_UID (decl));
7223 else if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
7224 fprintf (dump_file, " name: D#%u", DEBUG_TEMP_UID (decl));
7225 else
7226 fprintf (dump_file, " name: D.%u", DECL_UID (decl));
7227 fprintf (dump_file, "\n");
7229 else
7231 fputc (' ', dump_file);
7232 print_rtl_single (dump_file, dv_as_value (var->dv));
7235 for (i = 0; i < var->n_var_parts; i++)
7237 fprintf (dump_file, " offset %ld\n",
7238 (long)(var->onepart ? 0 : VAR_PART_OFFSET (var, i)));
7239 for (node = var->var_part[i].loc_chain; node; node = node->next)
7241 fprintf (dump_file, " ");
7242 if (node->init == VAR_INIT_STATUS_UNINITIALIZED)
7243 fprintf (dump_file, "[uninit]");
7244 print_rtl_single (dump_file, node->loc);
7249 /* Print the information about variables from hash table VARS to dump file. */
7251 static void
7252 dump_vars (variable_table_type *vars)
7254 if (vars->elements () > 0)
7256 fprintf (dump_file, "Variables:\n");
7257 vars->traverse <void *, dump_var_tracking_slot> (NULL);
7261 /* Print the dataflow set SET to dump file. */
7263 static void
7264 dump_dataflow_set (dataflow_set *set)
7266 int i;
7268 fprintf (dump_file, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC "\n",
7269 set->stack_adjust);
7270 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7272 if (set->regs[i])
7274 fprintf (dump_file, "Reg %d:", i);
7275 dump_attrs_list (set->regs[i]);
7278 dump_vars (shared_hash_htab (set->vars));
7279 fprintf (dump_file, "\n");
7282 /* Print the IN and OUT sets for each basic block to dump file. */
7284 static void
7285 dump_dataflow_sets (void)
7287 basic_block bb;
7289 FOR_EACH_BB_FN (bb, cfun)
7291 fprintf (dump_file, "\nBasic block %d:\n", bb->index);
7292 fprintf (dump_file, "IN:\n");
7293 dump_dataflow_set (&VTI (bb)->in);
7294 fprintf (dump_file, "OUT:\n");
7295 dump_dataflow_set (&VTI (bb)->out);
7299 /* Return the variable for DV in dropped_values, inserting one if
7300 requested with INSERT. */
7302 static inline variable
7303 variable_from_dropped (decl_or_value dv, enum insert_option insert)
7305 variable_def **slot;
7306 variable empty_var;
7307 onepart_enum_t onepart;
7309 slot = dropped_values->find_slot_with_hash (dv, dv_htab_hash (dv), insert);
7311 if (!slot)
7312 return NULL;
7314 if (*slot)
7315 return *slot;
7317 gcc_checking_assert (insert == INSERT);
7319 onepart = dv_onepart_p (dv);
7321 gcc_checking_assert (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR);
7323 empty_var = (variable) pool_alloc (onepart_pool (onepart));
7324 empty_var->dv = dv;
7325 empty_var->refcount = 1;
7326 empty_var->n_var_parts = 0;
7327 empty_var->onepart = onepart;
7328 empty_var->in_changed_variables = false;
7329 empty_var->var_part[0].loc_chain = NULL;
7330 empty_var->var_part[0].cur_loc = NULL;
7331 VAR_LOC_1PAUX (empty_var) = NULL;
7332 set_dv_changed (dv, true);
7334 *slot = empty_var;
7336 return empty_var;
7339 /* Recover the one-part aux from dropped_values. */
7341 static struct onepart_aux *
7342 recover_dropped_1paux (variable var)
7344 variable dvar;
7346 gcc_checking_assert (var->onepart);
7348 if (VAR_LOC_1PAUX (var))
7349 return VAR_LOC_1PAUX (var);
7351 if (var->onepart == ONEPART_VDECL)
7352 return NULL;
7354 dvar = variable_from_dropped (var->dv, NO_INSERT);
7356 if (!dvar)
7357 return NULL;
7359 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (dvar);
7360 VAR_LOC_1PAUX (dvar) = NULL;
7362 return VAR_LOC_1PAUX (var);
7365 /* Add variable VAR to the hash table of changed variables and
7366 if it has no locations delete it from SET's hash table. */
7368 static void
7369 variable_was_changed (variable var, dataflow_set *set)
7371 hashval_t hash = dv_htab_hash (var->dv);
7373 if (emit_notes)
7375 variable_def **slot;
7377 /* Remember this decl or VALUE has been added to changed_variables. */
7378 set_dv_changed (var->dv, true);
7380 slot = changed_variables->find_slot_with_hash (var->dv, hash, INSERT);
7382 if (*slot)
7384 variable old_var = *slot;
7385 gcc_assert (old_var->in_changed_variables);
7386 old_var->in_changed_variables = false;
7387 if (var != old_var && var->onepart)
7389 /* Restore the auxiliary info from an empty variable
7390 previously created for changed_variables, so it is
7391 not lost. */
7392 gcc_checking_assert (!VAR_LOC_1PAUX (var));
7393 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (old_var);
7394 VAR_LOC_1PAUX (old_var) = NULL;
7396 variable_htab_free (*slot);
7399 if (set && var->n_var_parts == 0)
7401 onepart_enum_t onepart = var->onepart;
7402 variable empty_var = NULL;
7403 variable_def **dslot = NULL;
7405 if (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR)
7407 dslot = dropped_values->find_slot_with_hash (var->dv,
7408 dv_htab_hash (var->dv),
7409 INSERT);
7410 empty_var = *dslot;
7412 if (empty_var)
7414 gcc_checking_assert (!empty_var->in_changed_variables);
7415 if (!VAR_LOC_1PAUX (var))
7417 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (empty_var);
7418 VAR_LOC_1PAUX (empty_var) = NULL;
7420 else
7421 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
7425 if (!empty_var)
7427 empty_var = (variable) pool_alloc (onepart_pool (onepart));
7428 empty_var->dv = var->dv;
7429 empty_var->refcount = 1;
7430 empty_var->n_var_parts = 0;
7431 empty_var->onepart = onepart;
7432 if (dslot)
7434 empty_var->refcount++;
7435 *dslot = empty_var;
7438 else
7439 empty_var->refcount++;
7440 empty_var->in_changed_variables = true;
7441 *slot = empty_var;
7442 if (onepart)
7444 empty_var->var_part[0].loc_chain = NULL;
7445 empty_var->var_part[0].cur_loc = NULL;
7446 VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (var);
7447 VAR_LOC_1PAUX (var) = NULL;
7449 goto drop_var;
7451 else
7453 if (var->onepart && !VAR_LOC_1PAUX (var))
7454 recover_dropped_1paux (var);
7455 var->refcount++;
7456 var->in_changed_variables = true;
7457 *slot = var;
7460 else
7462 gcc_assert (set);
7463 if (var->n_var_parts == 0)
7465 variable_def **slot;
7467 drop_var:
7468 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
7469 if (slot)
7471 if (shared_hash_shared (set->vars))
7472 slot = shared_hash_find_slot_unshare (&set->vars, var->dv,
7473 NO_INSERT);
7474 shared_hash_htab (set->vars)->clear_slot (slot);
7480 /* Look for the index in VAR->var_part corresponding to OFFSET.
7481 Return -1 if not found. If INSERTION_POINT is non-NULL, the
7482 referenced int will be set to the index that the part has or should
7483 have, if it should be inserted. */
7485 static inline int
7486 find_variable_location_part (variable var, HOST_WIDE_INT offset,
7487 int *insertion_point)
7489 int pos, low, high;
7491 if (var->onepart)
7493 if (offset != 0)
7494 return -1;
7496 if (insertion_point)
7497 *insertion_point = 0;
7499 return var->n_var_parts - 1;
7502 /* Find the location part. */
7503 low = 0;
7504 high = var->n_var_parts;
7505 while (low != high)
7507 pos = (low + high) / 2;
7508 if (VAR_PART_OFFSET (var, pos) < offset)
7509 low = pos + 1;
7510 else
7511 high = pos;
7513 pos = low;
7515 if (insertion_point)
7516 *insertion_point = pos;
7518 if (pos < var->n_var_parts && VAR_PART_OFFSET (var, pos) == offset)
7519 return pos;
7521 return -1;
7524 static variable_def **
7525 set_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
7526 decl_or_value dv, HOST_WIDE_INT offset,
7527 enum var_init_status initialized, rtx set_src)
7529 int pos;
7530 location_chain node, next;
7531 location_chain *nextp;
7532 variable var;
7533 onepart_enum_t onepart;
7535 var = *slot;
7537 if (var)
7538 onepart = var->onepart;
7539 else
7540 onepart = dv_onepart_p (dv);
7542 gcc_checking_assert (offset == 0 || !onepart);
7543 gcc_checking_assert (loc != dv_as_opaque (dv));
7545 if (! flag_var_tracking_uninit)
7546 initialized = VAR_INIT_STATUS_INITIALIZED;
7548 if (!var)
7550 /* Create new variable information. */
7551 var = (variable) pool_alloc (onepart_pool (onepart));
7552 var->dv = dv;
7553 var->refcount = 1;
7554 var->n_var_parts = 1;
7555 var->onepart = onepart;
7556 var->in_changed_variables = false;
7557 if (var->onepart)
7558 VAR_LOC_1PAUX (var) = NULL;
7559 else
7560 VAR_PART_OFFSET (var, 0) = offset;
7561 var->var_part[0].loc_chain = NULL;
7562 var->var_part[0].cur_loc = NULL;
7563 *slot = var;
7564 pos = 0;
7565 nextp = &var->var_part[0].loc_chain;
7567 else if (onepart)
7569 int r = -1, c = 0;
7571 gcc_assert (dv_as_opaque (var->dv) == dv_as_opaque (dv));
7573 pos = 0;
7575 if (GET_CODE (loc) == VALUE)
7577 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7578 nextp = &node->next)
7579 if (GET_CODE (node->loc) == VALUE)
7581 if (node->loc == loc)
7583 r = 0;
7584 break;
7586 if (canon_value_cmp (node->loc, loc))
7587 c++;
7588 else
7590 r = 1;
7591 break;
7594 else if (REG_P (node->loc) || MEM_P (node->loc))
7595 c++;
7596 else
7598 r = 1;
7599 break;
7602 else if (REG_P (loc))
7604 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7605 nextp = &node->next)
7606 if (REG_P (node->loc))
7608 if (REGNO (node->loc) < REGNO (loc))
7609 c++;
7610 else
7612 if (REGNO (node->loc) == REGNO (loc))
7613 r = 0;
7614 else
7615 r = 1;
7616 break;
7619 else
7621 r = 1;
7622 break;
7625 else if (MEM_P (loc))
7627 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7628 nextp = &node->next)
7629 if (REG_P (node->loc))
7630 c++;
7631 else if (MEM_P (node->loc))
7633 if ((r = loc_cmp (XEXP (node->loc, 0), XEXP (loc, 0))) >= 0)
7634 break;
7635 else
7636 c++;
7638 else
7640 r = 1;
7641 break;
7644 else
7645 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7646 nextp = &node->next)
7647 if ((r = loc_cmp (node->loc, loc)) >= 0)
7648 break;
7649 else
7650 c++;
7652 if (r == 0)
7653 return slot;
7655 if (shared_var_p (var, set->vars))
7657 slot = unshare_variable (set, slot, var, initialized);
7658 var = *slot;
7659 for (nextp = &var->var_part[0].loc_chain; c;
7660 nextp = &(*nextp)->next)
7661 c--;
7662 gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc);
7665 else
7667 int inspos = 0;
7669 gcc_assert (dv_as_decl (var->dv) == dv_as_decl (dv));
7671 pos = find_variable_location_part (var, offset, &inspos);
7673 if (pos >= 0)
7675 node = var->var_part[pos].loc_chain;
7677 if (node
7678 && ((REG_P (node->loc) && REG_P (loc)
7679 && REGNO (node->loc) == REGNO (loc))
7680 || rtx_equal_p (node->loc, loc)))
7682 /* LOC is in the beginning of the chain so we have nothing
7683 to do. */
7684 if (node->init < initialized)
7685 node->init = initialized;
7686 if (set_src != NULL)
7687 node->set_src = set_src;
7689 return slot;
7691 else
7693 /* We have to make a copy of a shared variable. */
7694 if (shared_var_p (var, set->vars))
7696 slot = unshare_variable (set, slot, var, initialized);
7697 var = *slot;
7701 else
7703 /* We have not found the location part, new one will be created. */
7705 /* We have to make a copy of the shared variable. */
7706 if (shared_var_p (var, set->vars))
7708 slot = unshare_variable (set, slot, var, initialized);
7709 var = *slot;
7712 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
7713 thus there are at most MAX_VAR_PARTS different offsets. */
7714 gcc_assert (var->n_var_parts < MAX_VAR_PARTS
7715 && (!var->n_var_parts || !onepart));
7717 /* We have to move the elements of array starting at index
7718 inspos to the next position. */
7719 for (pos = var->n_var_parts; pos > inspos; pos--)
7720 var->var_part[pos] = var->var_part[pos - 1];
7722 var->n_var_parts++;
7723 gcc_checking_assert (!onepart);
7724 VAR_PART_OFFSET (var, pos) = offset;
7725 var->var_part[pos].loc_chain = NULL;
7726 var->var_part[pos].cur_loc = NULL;
7729 /* Delete the location from the list. */
7730 nextp = &var->var_part[pos].loc_chain;
7731 for (node = var->var_part[pos].loc_chain; node; node = next)
7733 next = node->next;
7734 if ((REG_P (node->loc) && REG_P (loc)
7735 && REGNO (node->loc) == REGNO (loc))
7736 || rtx_equal_p (node->loc, loc))
7738 /* Save these values, to assign to the new node, before
7739 deleting this one. */
7740 if (node->init > initialized)
7741 initialized = node->init;
7742 if (node->set_src != NULL && set_src == NULL)
7743 set_src = node->set_src;
7744 if (var->var_part[pos].cur_loc == node->loc)
7745 var->var_part[pos].cur_loc = NULL;
7746 pool_free (loc_chain_pool, node);
7747 *nextp = next;
7748 break;
7750 else
7751 nextp = &node->next;
7754 nextp = &var->var_part[pos].loc_chain;
7757 /* Add the location to the beginning. */
7758 node = (location_chain) pool_alloc (loc_chain_pool);
7759 node->loc = loc;
7760 node->init = initialized;
7761 node->set_src = set_src;
7762 node->next = *nextp;
7763 *nextp = node;
7765 /* If no location was emitted do so. */
7766 if (var->var_part[pos].cur_loc == NULL)
7767 variable_was_changed (var, set);
7769 return slot;
7772 /* Set the part of variable's location in the dataflow set SET. The
7773 variable part is specified by variable's declaration in DV and
7774 offset OFFSET and the part's location by LOC. IOPT should be
7775 NO_INSERT if the variable is known to be in SET already and the
7776 variable hash table must not be resized, and INSERT otherwise. */
7778 static void
7779 set_variable_part (dataflow_set *set, rtx loc,
7780 decl_or_value dv, HOST_WIDE_INT offset,
7781 enum var_init_status initialized, rtx set_src,
7782 enum insert_option iopt)
7784 variable_def **slot;
7786 if (iopt == NO_INSERT)
7787 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7788 else
7790 slot = shared_hash_find_slot (set->vars, dv);
7791 if (!slot)
7792 slot = shared_hash_find_slot_unshare (&set->vars, dv, iopt);
7794 set_slot_part (set, loc, slot, dv, offset, initialized, set_src);
7797 /* Remove all recorded register locations for the given variable part
7798 from dataflow set SET, except for those that are identical to loc.
7799 The variable part is specified by variable's declaration or value
7800 DV and offset OFFSET. */
7802 static variable_def **
7803 clobber_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
7804 HOST_WIDE_INT offset, rtx set_src)
7806 variable var = *slot;
7807 int pos = find_variable_location_part (var, offset, NULL);
7809 if (pos >= 0)
7811 location_chain node, next;
7813 /* Remove the register locations from the dataflow set. */
7814 next = var->var_part[pos].loc_chain;
7815 for (node = next; node; node = next)
7817 next = node->next;
7818 if (node->loc != loc
7819 && (!flag_var_tracking_uninit
7820 || !set_src
7821 || MEM_P (set_src)
7822 || !rtx_equal_p (set_src, node->set_src)))
7824 if (REG_P (node->loc))
7826 attrs anode, anext;
7827 attrs *anextp;
7829 /* Remove the variable part from the register's
7830 list, but preserve any other variable parts
7831 that might be regarded as live in that same
7832 register. */
7833 anextp = &set->regs[REGNO (node->loc)];
7834 for (anode = *anextp; anode; anode = anext)
7836 anext = anode->next;
7837 if (dv_as_opaque (anode->dv) == dv_as_opaque (var->dv)
7838 && anode->offset == offset)
7840 pool_free (attrs_pool, anode);
7841 *anextp = anext;
7843 else
7844 anextp = &anode->next;
7848 slot = delete_slot_part (set, node->loc, slot, offset);
7853 return slot;
7856 /* Remove all recorded register locations for the given variable part
7857 from dataflow set SET, except for those that are identical to loc.
7858 The variable part is specified by variable's declaration or value
7859 DV and offset OFFSET. */
7861 static void
7862 clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
7863 HOST_WIDE_INT offset, rtx set_src)
7865 variable_def **slot;
7867 if (!dv_as_opaque (dv)
7868 || (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv))))
7869 return;
7871 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7872 if (!slot)
7873 return;
7875 clobber_slot_part (set, loc, slot, offset, set_src);
7878 /* Delete the part of variable's location from dataflow set SET. The
7879 variable part is specified by its SET->vars slot SLOT and offset
7880 OFFSET and the part's location by LOC. */
7882 static variable_def **
7883 delete_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
7884 HOST_WIDE_INT offset)
7886 variable var = *slot;
7887 int pos = find_variable_location_part (var, offset, NULL);
7889 if (pos >= 0)
7891 location_chain node, next;
7892 location_chain *nextp;
7893 bool changed;
7894 rtx cur_loc;
7896 if (shared_var_p (var, set->vars))
7898 /* If the variable contains the location part we have to
7899 make a copy of the variable. */
7900 for (node = var->var_part[pos].loc_chain; node;
7901 node = node->next)
7903 if ((REG_P (node->loc) && REG_P (loc)
7904 && REGNO (node->loc) == REGNO (loc))
7905 || rtx_equal_p (node->loc, loc))
7907 slot = unshare_variable (set, slot, var,
7908 VAR_INIT_STATUS_UNKNOWN);
7909 var = *slot;
7910 break;
7915 if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
7916 cur_loc = VAR_LOC_FROM (var);
7917 else
7918 cur_loc = var->var_part[pos].cur_loc;
7920 /* Delete the location part. */
7921 changed = false;
7922 nextp = &var->var_part[pos].loc_chain;
7923 for (node = *nextp; node; node = next)
7925 next = node->next;
7926 if ((REG_P (node->loc) && REG_P (loc)
7927 && REGNO (node->loc) == REGNO (loc))
7928 || rtx_equal_p (node->loc, loc))
7930 /* If we have deleted the location which was last emitted
7931 we have to emit new location so add the variable to set
7932 of changed variables. */
7933 if (cur_loc == node->loc)
7935 changed = true;
7936 var->var_part[pos].cur_loc = NULL;
7937 if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
7938 VAR_LOC_FROM (var) = NULL;
7940 pool_free (loc_chain_pool, node);
7941 *nextp = next;
7942 break;
7944 else
7945 nextp = &node->next;
7948 if (var->var_part[pos].loc_chain == NULL)
7950 changed = true;
7951 var->n_var_parts--;
7952 while (pos < var->n_var_parts)
7954 var->var_part[pos] = var->var_part[pos + 1];
7955 pos++;
7958 if (changed)
7959 variable_was_changed (var, set);
7962 return slot;
7965 /* Delete the part of variable's location from dataflow set SET. The
7966 variable part is specified by variable's declaration or value DV
7967 and offset OFFSET and the part's location by LOC. */
7969 static void
7970 delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
7971 HOST_WIDE_INT offset)
7973 variable_def **slot = shared_hash_find_slot_noinsert (set->vars, dv);
7974 if (!slot)
7975 return;
7977 delete_slot_part (set, loc, slot, offset);
7981 /* Structure for passing some other parameters to function
7982 vt_expand_loc_callback. */
7983 struct expand_loc_callback_data
7985 /* The variables and values active at this point. */
7986 variable_table_type *vars;
7988 /* Stack of values and debug_exprs under expansion, and their
7989 children. */
7990 auto_vec<rtx, 4> expanding;
7992 /* Stack of values and debug_exprs whose expansion hit recursion
7993 cycles. They will have VALUE_RECURSED_INTO marked when added to
7994 this list. This flag will be cleared if any of its dependencies
7995 resolves to a valid location. So, if the flag remains set at the
7996 end of the search, we know no valid location for this one can
7997 possibly exist. */
7998 auto_vec<rtx, 4> pending;
8000 /* The maximum depth among the sub-expressions under expansion.
8001 Zero indicates no expansion so far. */
8002 expand_depth depth;
8005 /* Allocate the one-part auxiliary data structure for VAR, with enough
8006 room for COUNT dependencies. */
8008 static void
8009 loc_exp_dep_alloc (variable var, int count)
8011 size_t allocsize;
8013 gcc_checking_assert (var->onepart);
8015 /* We can be called with COUNT == 0 to allocate the data structure
8016 without any dependencies, e.g. for the backlinks only. However,
8017 if we are specifying a COUNT, then the dependency list must have
8018 been emptied before. It would be possible to adjust pointers or
8019 force it empty here, but this is better done at an earlier point
8020 in the algorithm, so we instead leave an assertion to catch
8021 errors. */
8022 gcc_checking_assert (!count
8023 || VAR_LOC_DEP_VEC (var) == NULL
8024 || VAR_LOC_DEP_VEC (var)->is_empty ());
8026 if (VAR_LOC_1PAUX (var) && VAR_LOC_DEP_VEC (var)->space (count))
8027 return;
8029 allocsize = offsetof (struct onepart_aux, deps)
8030 + vec<loc_exp_dep, va_heap, vl_embed>::embedded_size (count);
8032 if (VAR_LOC_1PAUX (var))
8034 VAR_LOC_1PAUX (var) = XRESIZEVAR (struct onepart_aux,
8035 VAR_LOC_1PAUX (var), allocsize);
8036 /* If the reallocation moves the onepaux structure, the
8037 back-pointer to BACKLINKS in the first list member will still
8038 point to its old location. Adjust it. */
8039 if (VAR_LOC_DEP_LST (var))
8040 VAR_LOC_DEP_LST (var)->pprev = VAR_LOC_DEP_LSTP (var);
8042 else
8044 VAR_LOC_1PAUX (var) = XNEWVAR (struct onepart_aux, allocsize);
8045 *VAR_LOC_DEP_LSTP (var) = NULL;
8046 VAR_LOC_FROM (var) = NULL;
8047 VAR_LOC_DEPTH (var).complexity = 0;
8048 VAR_LOC_DEPTH (var).entryvals = 0;
8050 VAR_LOC_DEP_VEC (var)->embedded_init (count);
8053 /* Remove all entries from the vector of active dependencies of VAR,
8054 removing them from the back-links lists too. */
8056 static void
8057 loc_exp_dep_clear (variable var)
8059 while (VAR_LOC_DEP_VEC (var) && !VAR_LOC_DEP_VEC (var)->is_empty ())
8061 loc_exp_dep *led = &VAR_LOC_DEP_VEC (var)->last ();
8062 if (led->next)
8063 led->next->pprev = led->pprev;
8064 if (led->pprev)
8065 *led->pprev = led->next;
8066 VAR_LOC_DEP_VEC (var)->pop ();
8070 /* Insert an active dependency from VAR on X to the vector of
8071 dependencies, and add the corresponding back-link to X's list of
8072 back-links in VARS. */
8074 static void
8075 loc_exp_insert_dep (variable var, rtx x, variable_table_type *vars)
8077 decl_or_value dv;
8078 variable xvar;
8079 loc_exp_dep *led;
8081 dv = dv_from_rtx (x);
8083 /* ??? Build a vector of variables parallel to EXPANDING, to avoid
8084 an additional look up? */
8085 xvar = vars->find_with_hash (dv, dv_htab_hash (dv));
8087 if (!xvar)
8089 xvar = variable_from_dropped (dv, NO_INSERT);
8090 gcc_checking_assert (xvar);
8093 /* No point in adding the same backlink more than once. This may
8094 arise if say the same value appears in two complex expressions in
8095 the same loc_list, or even more than once in a single
8096 expression. */
8097 if (VAR_LOC_DEP_LST (xvar) && VAR_LOC_DEP_LST (xvar)->dv == var->dv)
8098 return;
8100 if (var->onepart == NOT_ONEPART)
8101 led = (loc_exp_dep *) pool_alloc (loc_exp_dep_pool);
8102 else
8104 loc_exp_dep empty;
8105 memset (&empty, 0, sizeof (empty));
8106 VAR_LOC_DEP_VEC (var)->quick_push (empty);
8107 led = &VAR_LOC_DEP_VEC (var)->last ();
8109 led->dv = var->dv;
8110 led->value = x;
8112 loc_exp_dep_alloc (xvar, 0);
8113 led->pprev = VAR_LOC_DEP_LSTP (xvar);
8114 led->next = *led->pprev;
8115 if (led->next)
8116 led->next->pprev = &led->next;
8117 *led->pprev = led;
8120 /* Create active dependencies of VAR on COUNT values starting at
8121 VALUE, and corresponding back-links to the entries in VARS. Return
8122 true if we found any pending-recursion results. */
8124 static bool
8125 loc_exp_dep_set (variable var, rtx result, rtx *value, int count,
8126 variable_table_type *vars)
8128 bool pending_recursion = false;
8130 gcc_checking_assert (VAR_LOC_DEP_VEC (var) == NULL
8131 || VAR_LOC_DEP_VEC (var)->is_empty ());
8133 /* Set up all dependencies from last_child (as set up at the end of
8134 the loop above) to the end. */
8135 loc_exp_dep_alloc (var, count);
8137 while (count--)
8139 rtx x = *value++;
8141 if (!pending_recursion)
8142 pending_recursion = !result && VALUE_RECURSED_INTO (x);
8144 loc_exp_insert_dep (var, x, vars);
8147 return pending_recursion;
8150 /* Notify the back-links of IVAR that are pending recursion that we
8151 have found a non-NIL value for it, so they are cleared for another
8152 attempt to compute a current location. */
8154 static void
8155 notify_dependents_of_resolved_value (variable ivar, variable_table_type *vars)
8157 loc_exp_dep *led, *next;
8159 for (led = VAR_LOC_DEP_LST (ivar); led; led = next)
8161 decl_or_value dv = led->dv;
8162 variable var;
8164 next = led->next;
8166 if (dv_is_value_p (dv))
8168 rtx value = dv_as_value (dv);
8170 /* If we have already resolved it, leave it alone. */
8171 if (!VALUE_RECURSED_INTO (value))
8172 continue;
8174 /* Check that VALUE_RECURSED_INTO, true from the test above,
8175 implies NO_LOC_P. */
8176 gcc_checking_assert (NO_LOC_P (value));
8178 /* We won't notify variables that are being expanded,
8179 because their dependency list is cleared before
8180 recursing. */
8181 NO_LOC_P (value) = false;
8182 VALUE_RECURSED_INTO (value) = false;
8184 gcc_checking_assert (dv_changed_p (dv));
8186 else
8188 gcc_checking_assert (dv_onepart_p (dv) != NOT_ONEPART);
8189 if (!dv_changed_p (dv))
8190 continue;
8193 var = vars->find_with_hash (dv, dv_htab_hash (dv));
8195 if (!var)
8196 var = variable_from_dropped (dv, NO_INSERT);
8198 if (var)
8199 notify_dependents_of_resolved_value (var, vars);
8201 if (next)
8202 next->pprev = led->pprev;
8203 if (led->pprev)
8204 *led->pprev = next;
8205 led->next = NULL;
8206 led->pprev = NULL;
8210 static rtx vt_expand_loc_callback (rtx x, bitmap regs,
8211 int max_depth, void *data);
8213 /* Return the combined depth, when one sub-expression evaluated to
8214 BEST_DEPTH and the previous known depth was SAVED_DEPTH. */
8216 static inline expand_depth
8217 update_depth (expand_depth saved_depth, expand_depth best_depth)
8219 /* If we didn't find anything, stick with what we had. */
8220 if (!best_depth.complexity)
8221 return saved_depth;
8223 /* If we found hadn't found anything, use the depth of the current
8224 expression. Do NOT add one extra level, we want to compute the
8225 maximum depth among sub-expressions. We'll increment it later,
8226 if appropriate. */
8227 if (!saved_depth.complexity)
8228 return best_depth;
8230 /* Combine the entryval count so that regardless of which one we
8231 return, the entryval count is accurate. */
8232 best_depth.entryvals = saved_depth.entryvals
8233 = best_depth.entryvals + saved_depth.entryvals;
8235 if (saved_depth.complexity < best_depth.complexity)
8236 return best_depth;
8237 else
8238 return saved_depth;
8241 /* Expand VAR to a location RTX, updating its cur_loc. Use REGS and
8242 DATA for cselib expand callback. If PENDRECP is given, indicate in
8243 it whether any sub-expression couldn't be fully evaluated because
8244 it is pending recursion resolution. */
8246 static inline rtx
8247 vt_expand_var_loc_chain (variable var, bitmap regs, void *data, bool *pendrecp)
8249 struct expand_loc_callback_data *elcd
8250 = (struct expand_loc_callback_data *) data;
8251 location_chain loc, next;
8252 rtx result = NULL;
8253 int first_child, result_first_child, last_child;
8254 bool pending_recursion;
8255 rtx loc_from = NULL;
8256 struct elt_loc_list *cloc = NULL;
8257 expand_depth depth = { 0, 0 }, saved_depth = elcd->depth;
8258 int wanted_entryvals, found_entryvals = 0;
8260 /* Clear all backlinks pointing at this, so that we're not notified
8261 while we're active. */
8262 loc_exp_dep_clear (var);
8264 retry:
8265 if (var->onepart == ONEPART_VALUE)
8267 cselib_val *val = CSELIB_VAL_PTR (dv_as_value (var->dv));
8269 gcc_checking_assert (cselib_preserved_value_p (val));
8271 cloc = val->locs;
8274 first_child = result_first_child = last_child
8275 = elcd->expanding.length ();
8277 wanted_entryvals = found_entryvals;
8279 /* Attempt to expand each available location in turn. */
8280 for (next = loc = var->n_var_parts ? var->var_part[0].loc_chain : NULL;
8281 loc || cloc; loc = next)
8283 result_first_child = last_child;
8285 if (!loc)
8287 loc_from = cloc->loc;
8288 next = loc;
8289 cloc = cloc->next;
8290 if (unsuitable_loc (loc_from))
8291 continue;
8293 else
8295 loc_from = loc->loc;
8296 next = loc->next;
8299 gcc_checking_assert (!unsuitable_loc (loc_from));
8301 elcd->depth.complexity = elcd->depth.entryvals = 0;
8302 result = cselib_expand_value_rtx_cb (loc_from, regs, EXPR_DEPTH,
8303 vt_expand_loc_callback, data);
8304 last_child = elcd->expanding.length ();
8306 if (result)
8308 depth = elcd->depth;
8310 gcc_checking_assert (depth.complexity
8311 || result_first_child == last_child);
8313 if (last_child - result_first_child != 1)
8315 if (!depth.complexity && GET_CODE (result) == ENTRY_VALUE)
8316 depth.entryvals++;
8317 depth.complexity++;
8320 if (depth.complexity <= EXPR_USE_DEPTH)
8322 if (depth.entryvals <= wanted_entryvals)
8323 break;
8324 else if (!found_entryvals || depth.entryvals < found_entryvals)
8325 found_entryvals = depth.entryvals;
8328 result = NULL;
8331 /* Set it up in case we leave the loop. */
8332 depth.complexity = depth.entryvals = 0;
8333 loc_from = NULL;
8334 result_first_child = first_child;
8337 if (!loc_from && wanted_entryvals < found_entryvals)
8339 /* We found entries with ENTRY_VALUEs and skipped them. Since
8340 we could not find any expansions without ENTRY_VALUEs, but we
8341 found at least one with them, go back and get an entry with
8342 the minimum number ENTRY_VALUE count that we found. We could
8343 avoid looping, but since each sub-loc is already resolved,
8344 the re-expansion should be trivial. ??? Should we record all
8345 attempted locs as dependencies, so that we retry the
8346 expansion should any of them change, in the hope it can give
8347 us a new entry without an ENTRY_VALUE? */
8348 elcd->expanding.truncate (first_child);
8349 goto retry;
8352 /* Register all encountered dependencies as active. */
8353 pending_recursion = loc_exp_dep_set
8354 (var, result, elcd->expanding.address () + result_first_child,
8355 last_child - result_first_child, elcd->vars);
8357 elcd->expanding.truncate (first_child);
8359 /* Record where the expansion came from. */
8360 gcc_checking_assert (!result || !pending_recursion);
8361 VAR_LOC_FROM (var) = loc_from;
8362 VAR_LOC_DEPTH (var) = depth;
8364 gcc_checking_assert (!depth.complexity == !result);
8366 elcd->depth = update_depth (saved_depth, depth);
8368 /* Indicate whether any of the dependencies are pending recursion
8369 resolution. */
8370 if (pendrecp)
8371 *pendrecp = pending_recursion;
8373 if (!pendrecp || !pending_recursion)
8374 var->var_part[0].cur_loc = result;
8376 return result;
8379 /* Callback for cselib_expand_value, that looks for expressions
8380 holding the value in the var-tracking hash tables. Return X for
8381 standard processing, anything else is to be used as-is. */
8383 static rtx
8384 vt_expand_loc_callback (rtx x, bitmap regs,
8385 int max_depth ATTRIBUTE_UNUSED,
8386 void *data)
8388 struct expand_loc_callback_data *elcd
8389 = (struct expand_loc_callback_data *) data;
8390 decl_or_value dv;
8391 variable var;
8392 rtx result, subreg;
8393 bool pending_recursion = false;
8394 bool from_empty = false;
8396 switch (GET_CODE (x))
8398 case SUBREG:
8399 subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs,
8400 EXPR_DEPTH,
8401 vt_expand_loc_callback, data);
8403 if (!subreg)
8404 return NULL;
8406 result = simplify_gen_subreg (GET_MODE (x), subreg,
8407 GET_MODE (SUBREG_REG (x)),
8408 SUBREG_BYTE (x));
8410 /* Invalid SUBREGs are ok in debug info. ??? We could try
8411 alternate expansions for the VALUE as well. */
8412 if (!result)
8413 result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x));
8415 return result;
8417 case DEBUG_EXPR:
8418 case VALUE:
8419 dv = dv_from_rtx (x);
8420 break;
8422 default:
8423 return x;
8426 elcd->expanding.safe_push (x);
8428 /* Check that VALUE_RECURSED_INTO implies NO_LOC_P. */
8429 gcc_checking_assert (!VALUE_RECURSED_INTO (x) || NO_LOC_P (x));
8431 if (NO_LOC_P (x))
8433 gcc_checking_assert (VALUE_RECURSED_INTO (x) || !dv_changed_p (dv));
8434 return NULL;
8437 var = elcd->vars->find_with_hash (dv, dv_htab_hash (dv));
8439 if (!var)
8441 from_empty = true;
8442 var = variable_from_dropped (dv, INSERT);
8445 gcc_checking_assert (var);
8447 if (!dv_changed_p (dv))
8449 gcc_checking_assert (!NO_LOC_P (x));
8450 gcc_checking_assert (var->var_part[0].cur_loc);
8451 gcc_checking_assert (VAR_LOC_1PAUX (var));
8452 gcc_checking_assert (VAR_LOC_1PAUX (var)->depth.complexity);
8454 elcd->depth = update_depth (elcd->depth, VAR_LOC_1PAUX (var)->depth);
8456 return var->var_part[0].cur_loc;
8459 VALUE_RECURSED_INTO (x) = true;
8460 /* This is tentative, but it makes some tests simpler. */
8461 NO_LOC_P (x) = true;
8463 gcc_checking_assert (var->n_var_parts == 1 || from_empty);
8465 result = vt_expand_var_loc_chain (var, regs, data, &pending_recursion);
8467 if (pending_recursion)
8469 gcc_checking_assert (!result);
8470 elcd->pending.safe_push (x);
8472 else
8474 NO_LOC_P (x) = !result;
8475 VALUE_RECURSED_INTO (x) = false;
8476 set_dv_changed (dv, false);
8478 if (result)
8479 notify_dependents_of_resolved_value (var, elcd->vars);
8482 return result;
8485 /* While expanding variables, we may encounter recursion cycles
8486 because of mutual (possibly indirect) dependencies between two
8487 particular variables (or values), say A and B. If we're trying to
8488 expand A when we get to B, which in turn attempts to expand A, if
8489 we can't find any other expansion for B, we'll add B to this
8490 pending-recursion stack, and tentatively return NULL for its
8491 location. This tentative value will be used for any other
8492 occurrences of B, unless A gets some other location, in which case
8493 it will notify B that it is worth another try at computing a
8494 location for it, and it will use the location computed for A then.
8495 At the end of the expansion, the tentative NULL locations become
8496 final for all members of PENDING that didn't get a notification.
8497 This function performs this finalization of NULL locations. */
8499 static void
8500 resolve_expansions_pending_recursion (vec<rtx, va_heap> *pending)
8502 while (!pending->is_empty ())
8504 rtx x = pending->pop ();
8505 decl_or_value dv;
8507 if (!VALUE_RECURSED_INTO (x))
8508 continue;
8510 gcc_checking_assert (NO_LOC_P (x));
8511 VALUE_RECURSED_INTO (x) = false;
8512 dv = dv_from_rtx (x);
8513 gcc_checking_assert (dv_changed_p (dv));
8514 set_dv_changed (dv, false);
8518 /* Initialize expand_loc_callback_data D with variable hash table V.
8519 It must be a macro because of alloca (vec stack). */
8520 #define INIT_ELCD(d, v) \
8521 do \
8523 (d).vars = (v); \
8524 (d).depth.complexity = (d).depth.entryvals = 0; \
8526 while (0)
8527 /* Finalize expand_loc_callback_data D, resolved to location L. */
8528 #define FINI_ELCD(d, l) \
8529 do \
8531 resolve_expansions_pending_recursion (&(d).pending); \
8532 (d).pending.release (); \
8533 (d).expanding.release (); \
8535 if ((l) && MEM_P (l)) \
8536 (l) = targetm.delegitimize_address (l); \
8538 while (0)
8540 /* Expand VALUEs and DEBUG_EXPRs in LOC to a location, using the
8541 equivalences in VARS, updating their CUR_LOCs in the process. */
8543 static rtx
8544 vt_expand_loc (rtx loc, variable_table_type *vars)
8546 struct expand_loc_callback_data data;
8547 rtx result;
8549 if (!MAY_HAVE_DEBUG_INSNS)
8550 return loc;
8552 INIT_ELCD (data, vars);
8554 result = cselib_expand_value_rtx_cb (loc, scratch_regs, EXPR_DEPTH,
8555 vt_expand_loc_callback, &data);
8557 FINI_ELCD (data, result);
8559 return result;
8562 /* Expand the one-part VARiable to a location, using the equivalences
8563 in VARS, updating their CUR_LOCs in the process. */
8565 static rtx
8566 vt_expand_1pvar (variable var, variable_table_type *vars)
8568 struct expand_loc_callback_data data;
8569 rtx loc;
8571 gcc_checking_assert (var->onepart && var->n_var_parts == 1);
8573 if (!dv_changed_p (var->dv))
8574 return var->var_part[0].cur_loc;
8576 INIT_ELCD (data, vars);
8578 loc = vt_expand_var_loc_chain (var, scratch_regs, &data, NULL);
8580 gcc_checking_assert (data.expanding.is_empty ());
8582 FINI_ELCD (data, loc);
8584 return loc;
8587 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
8588 additional parameters: WHERE specifies whether the note shall be emitted
8589 before or after instruction INSN. */
8592 emit_note_insn_var_location (variable_def **varp, emit_note_data *data)
8594 variable var = *varp;
8595 rtx_insn *insn = data->insn;
8596 enum emit_note_where where = data->where;
8597 variable_table_type *vars = data->vars;
8598 rtx_note *note;
8599 rtx note_vl;
8600 int i, j, n_var_parts;
8601 bool complete;
8602 enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED;
8603 HOST_WIDE_INT last_limit;
8604 tree type_size_unit;
8605 HOST_WIDE_INT offsets[MAX_VAR_PARTS];
8606 rtx loc[MAX_VAR_PARTS];
8607 tree decl;
8608 location_chain lc;
8610 gcc_checking_assert (var->onepart == NOT_ONEPART
8611 || var->onepart == ONEPART_VDECL);
8613 decl = dv_as_decl (var->dv);
8615 complete = true;
8616 last_limit = 0;
8617 n_var_parts = 0;
8618 if (!var->onepart)
8619 for (i = 0; i < var->n_var_parts; i++)
8620 if (var->var_part[i].cur_loc == NULL && var->var_part[i].loc_chain)
8621 var->var_part[i].cur_loc = var->var_part[i].loc_chain->loc;
8622 for (i = 0; i < var->n_var_parts; i++)
8624 machine_mode mode, wider_mode;
8625 rtx loc2;
8626 HOST_WIDE_INT offset;
8628 if (i == 0 && var->onepart)
8630 gcc_checking_assert (var->n_var_parts == 1);
8631 offset = 0;
8632 initialized = VAR_INIT_STATUS_INITIALIZED;
8633 loc2 = vt_expand_1pvar (var, vars);
8635 else
8637 if (last_limit < VAR_PART_OFFSET (var, i))
8639 complete = false;
8640 break;
8642 else if (last_limit > VAR_PART_OFFSET (var, i))
8643 continue;
8644 offset = VAR_PART_OFFSET (var, i);
8645 loc2 = var->var_part[i].cur_loc;
8646 if (loc2 && GET_CODE (loc2) == MEM
8647 && GET_CODE (XEXP (loc2, 0)) == VALUE)
8649 rtx depval = XEXP (loc2, 0);
8651 loc2 = vt_expand_loc (loc2, vars);
8653 if (loc2)
8654 loc_exp_insert_dep (var, depval, vars);
8656 if (!loc2)
8658 complete = false;
8659 continue;
8661 gcc_checking_assert (GET_CODE (loc2) != VALUE);
8662 for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
8663 if (var->var_part[i].cur_loc == lc->loc)
8665 initialized = lc->init;
8666 break;
8668 gcc_assert (lc);
8671 offsets[n_var_parts] = offset;
8672 if (!loc2)
8674 complete = false;
8675 continue;
8677 loc[n_var_parts] = loc2;
8678 mode = GET_MODE (var->var_part[i].cur_loc);
8679 if (mode == VOIDmode && var->onepart)
8680 mode = DECL_MODE (decl);
8681 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
8683 /* Attempt to merge adjacent registers or memory. */
8684 wider_mode = GET_MODE_WIDER_MODE (mode);
8685 for (j = i + 1; j < var->n_var_parts; j++)
8686 if (last_limit <= VAR_PART_OFFSET (var, j))
8687 break;
8688 if (j < var->n_var_parts
8689 && wider_mode != VOIDmode
8690 && var->var_part[j].cur_loc
8691 && mode == GET_MODE (var->var_part[j].cur_loc)
8692 && (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts]))
8693 && last_limit == (var->onepart ? 0 : VAR_PART_OFFSET (var, j))
8694 && (loc2 = vt_expand_loc (var->var_part[j].cur_loc, vars))
8695 && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2))
8697 rtx new_loc = NULL;
8699 if (REG_P (loc[n_var_parts])
8700 && hard_regno_nregs[REGNO (loc[n_var_parts])][mode] * 2
8701 == hard_regno_nregs[REGNO (loc[n_var_parts])][wider_mode]
8702 && end_hard_regno (mode, REGNO (loc[n_var_parts]))
8703 == REGNO (loc2))
8705 if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN)
8706 new_loc = simplify_subreg (wider_mode, loc[n_var_parts],
8707 mode, 0);
8708 else if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
8709 new_loc = simplify_subreg (wider_mode, loc2, mode, 0);
8710 if (new_loc)
8712 if (!REG_P (new_loc)
8713 || REGNO (new_loc) != REGNO (loc[n_var_parts]))
8714 new_loc = NULL;
8715 else
8716 REG_ATTRS (new_loc) = REG_ATTRS (loc[n_var_parts]);
8719 else if (MEM_P (loc[n_var_parts])
8720 && GET_CODE (XEXP (loc2, 0)) == PLUS
8721 && REG_P (XEXP (XEXP (loc2, 0), 0))
8722 && CONST_INT_P (XEXP (XEXP (loc2, 0), 1)))
8724 if ((REG_P (XEXP (loc[n_var_parts], 0))
8725 && rtx_equal_p (XEXP (loc[n_var_parts], 0),
8726 XEXP (XEXP (loc2, 0), 0))
8727 && INTVAL (XEXP (XEXP (loc2, 0), 1))
8728 == GET_MODE_SIZE (mode))
8729 || (GET_CODE (XEXP (loc[n_var_parts], 0)) == PLUS
8730 && CONST_INT_P (XEXP (XEXP (loc[n_var_parts], 0), 1))
8731 && rtx_equal_p (XEXP (XEXP (loc[n_var_parts], 0), 0),
8732 XEXP (XEXP (loc2, 0), 0))
8733 && INTVAL (XEXP (XEXP (loc[n_var_parts], 0), 1))
8734 + GET_MODE_SIZE (mode)
8735 == INTVAL (XEXP (XEXP (loc2, 0), 1))))
8736 new_loc = adjust_address_nv (loc[n_var_parts],
8737 wider_mode, 0);
8740 if (new_loc)
8742 loc[n_var_parts] = new_loc;
8743 mode = wider_mode;
8744 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
8745 i = j;
8748 ++n_var_parts;
8750 type_size_unit = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8751 if ((unsigned HOST_WIDE_INT) last_limit < TREE_INT_CST_LOW (type_size_unit))
8752 complete = false;
8754 if (! flag_var_tracking_uninit)
8755 initialized = VAR_INIT_STATUS_INITIALIZED;
8757 note_vl = NULL_RTX;
8758 if (!complete)
8759 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, NULL_RTX, initialized);
8760 else if (n_var_parts == 1)
8762 rtx expr_list;
8764 if (offsets[0] || GET_CODE (loc[0]) == PARALLEL)
8765 expr_list = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
8766 else
8767 expr_list = loc[0];
8769 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, expr_list, initialized);
8771 else if (n_var_parts)
8773 rtx parallel;
8775 for (i = 0; i < n_var_parts; i++)
8776 loc[i]
8777 = gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i]));
8779 parallel = gen_rtx_PARALLEL (VOIDmode,
8780 gen_rtvec_v (n_var_parts, loc));
8781 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl,
8782 parallel, initialized);
8785 if (where != EMIT_NOTE_BEFORE_INSN)
8787 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8788 if (where == EMIT_NOTE_AFTER_CALL_INSN)
8789 NOTE_DURING_CALL_P (note) = true;
8791 else
8793 /* Make sure that the call related notes come first. */
8794 while (NEXT_INSN (insn)
8795 && NOTE_P (insn)
8796 && ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8797 && NOTE_DURING_CALL_P (insn))
8798 || NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
8799 insn = NEXT_INSN (insn);
8800 if (NOTE_P (insn)
8801 && ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8802 && NOTE_DURING_CALL_P (insn))
8803 || NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
8804 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8805 else
8806 note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
8808 NOTE_VAR_LOCATION (note) = note_vl;
8810 set_dv_changed (var->dv, false);
8811 gcc_assert (var->in_changed_variables);
8812 var->in_changed_variables = false;
8813 changed_variables->clear_slot (varp);
8815 /* Continue traversing the hash table. */
8816 return 1;
8819 /* While traversing changed_variables, push onto DATA (a stack of RTX
8820 values) entries that aren't user variables. */
8823 var_track_values_to_stack (variable_def **slot,
8824 vec<rtx, va_heap> *changed_values_stack)
8826 variable var = *slot;
8828 if (var->onepart == ONEPART_VALUE)
8829 changed_values_stack->safe_push (dv_as_value (var->dv));
8830 else if (var->onepart == ONEPART_DEXPR)
8831 changed_values_stack->safe_push (DECL_RTL_KNOWN_SET (dv_as_decl (var->dv)));
8833 return 1;
8836 /* Remove from changed_variables the entry whose DV corresponds to
8837 value or debug_expr VAL. */
8838 static void
8839 remove_value_from_changed_variables (rtx val)
8841 decl_or_value dv = dv_from_rtx (val);
8842 variable_def **slot;
8843 variable var;
8845 slot = changed_variables->find_slot_with_hash (dv, dv_htab_hash (dv),
8846 NO_INSERT);
8847 var = *slot;
8848 var->in_changed_variables = false;
8849 changed_variables->clear_slot (slot);
8852 /* If VAL (a value or debug_expr) has backlinks to variables actively
8853 dependent on it in HTAB or in CHANGED_VARIABLES, mark them as
8854 changed, adding to CHANGED_VALUES_STACK any dependencies that may
8855 have dependencies of their own to notify. */
8857 static void
8858 notify_dependents_of_changed_value (rtx val, variable_table_type *htab,
8859 vec<rtx, va_heap> *changed_values_stack)
8861 variable_def **slot;
8862 variable var;
8863 loc_exp_dep *led;
8864 decl_or_value dv = dv_from_rtx (val);
8866 slot = changed_variables->find_slot_with_hash (dv, dv_htab_hash (dv),
8867 NO_INSERT);
8868 if (!slot)
8869 slot = htab->find_slot_with_hash (dv, dv_htab_hash (dv), NO_INSERT);
8870 if (!slot)
8871 slot = dropped_values->find_slot_with_hash (dv, dv_htab_hash (dv),
8872 NO_INSERT);
8873 var = *slot;
8875 while ((led = VAR_LOC_DEP_LST (var)))
8877 decl_or_value ldv = led->dv;
8878 variable ivar;
8880 /* Deactivate and remove the backlink, as it was “used up”. It
8881 makes no sense to attempt to notify the same entity again:
8882 either it will be recomputed and re-register an active
8883 dependency, or it will still have the changed mark. */
8884 if (led->next)
8885 led->next->pprev = led->pprev;
8886 if (led->pprev)
8887 *led->pprev = led->next;
8888 led->next = NULL;
8889 led->pprev = NULL;
8891 if (dv_changed_p (ldv))
8892 continue;
8894 switch (dv_onepart_p (ldv))
8896 case ONEPART_VALUE:
8897 case ONEPART_DEXPR:
8898 set_dv_changed (ldv, true);
8899 changed_values_stack->safe_push (dv_as_rtx (ldv));
8900 break;
8902 case ONEPART_VDECL:
8903 ivar = htab->find_with_hash (ldv, dv_htab_hash (ldv));
8904 gcc_checking_assert (!VAR_LOC_DEP_LST (ivar));
8905 variable_was_changed (ivar, NULL);
8906 break;
8908 case NOT_ONEPART:
8909 pool_free (loc_exp_dep_pool, led);
8910 ivar = htab->find_with_hash (ldv, dv_htab_hash (ldv));
8911 if (ivar)
8913 int i = ivar->n_var_parts;
8914 while (i--)
8916 rtx loc = ivar->var_part[i].cur_loc;
8918 if (loc && GET_CODE (loc) == MEM
8919 && XEXP (loc, 0) == val)
8921 variable_was_changed (ivar, NULL);
8922 break;
8926 break;
8928 default:
8929 gcc_unreachable ();
8934 /* Take out of changed_variables any entries that don't refer to use
8935 variables. Back-propagate change notifications from values and
8936 debug_exprs to their active dependencies in HTAB or in
8937 CHANGED_VARIABLES. */
8939 static void
8940 process_changed_values (variable_table_type *htab)
8942 int i, n;
8943 rtx val;
8944 auto_vec<rtx, 20> changed_values_stack;
8946 /* Move values from changed_variables to changed_values_stack. */
8947 changed_variables
8948 ->traverse <vec<rtx, va_heap>*, var_track_values_to_stack>
8949 (&changed_values_stack);
8951 /* Back-propagate change notifications in values while popping
8952 them from the stack. */
8953 for (n = i = changed_values_stack.length ();
8954 i > 0; i = changed_values_stack.length ())
8956 val = changed_values_stack.pop ();
8957 notify_dependents_of_changed_value (val, htab, &changed_values_stack);
8959 /* This condition will hold when visiting each of the entries
8960 originally in changed_variables. We can't remove them
8961 earlier because this could drop the backlinks before we got a
8962 chance to use them. */
8963 if (i == n)
8965 remove_value_from_changed_variables (val);
8966 n--;
8971 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
8972 CHANGED_VARIABLES and delete this chain. WHERE specifies whether
8973 the notes shall be emitted before of after instruction INSN. */
8975 static void
8976 emit_notes_for_changes (rtx_insn *insn, enum emit_note_where where,
8977 shared_hash vars)
8979 emit_note_data data;
8980 variable_table_type *htab = shared_hash_htab (vars);
8982 if (!changed_variables->elements ())
8983 return;
8985 if (MAY_HAVE_DEBUG_INSNS)
8986 process_changed_values (htab);
8988 data.insn = insn;
8989 data.where = where;
8990 data.vars = htab;
8992 changed_variables
8993 ->traverse <emit_note_data*, emit_note_insn_var_location> (&data);
8996 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
8997 same variable in hash table DATA or is not there at all. */
9000 emit_notes_for_differences_1 (variable_def **slot, variable_table_type *new_vars)
9002 variable old_var, new_var;
9004 old_var = *slot;
9005 new_var = new_vars->find_with_hash (old_var->dv, dv_htab_hash (old_var->dv));
9007 if (!new_var)
9009 /* Variable has disappeared. */
9010 variable empty_var = NULL;
9012 if (old_var->onepart == ONEPART_VALUE
9013 || old_var->onepart == ONEPART_DEXPR)
9015 empty_var = variable_from_dropped (old_var->dv, NO_INSERT);
9016 if (empty_var)
9018 gcc_checking_assert (!empty_var->in_changed_variables);
9019 if (!VAR_LOC_1PAUX (old_var))
9021 VAR_LOC_1PAUX (old_var) = VAR_LOC_1PAUX (empty_var);
9022 VAR_LOC_1PAUX (empty_var) = NULL;
9024 else
9025 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
9029 if (!empty_var)
9031 empty_var = (variable) pool_alloc (onepart_pool (old_var->onepart));
9032 empty_var->dv = old_var->dv;
9033 empty_var->refcount = 0;
9034 empty_var->n_var_parts = 0;
9035 empty_var->onepart = old_var->onepart;
9036 empty_var->in_changed_variables = false;
9039 if (empty_var->onepart)
9041 /* Propagate the auxiliary data to (ultimately)
9042 changed_variables. */
9043 empty_var->var_part[0].loc_chain = NULL;
9044 empty_var->var_part[0].cur_loc = NULL;
9045 VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (old_var);
9046 VAR_LOC_1PAUX (old_var) = NULL;
9048 variable_was_changed (empty_var, NULL);
9049 /* Continue traversing the hash table. */
9050 return 1;
9052 /* Update cur_loc and one-part auxiliary data, before new_var goes
9053 through variable_was_changed. */
9054 if (old_var != new_var && new_var->onepart)
9056 gcc_checking_assert (VAR_LOC_1PAUX (new_var) == NULL);
9057 VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (old_var);
9058 VAR_LOC_1PAUX (old_var) = NULL;
9059 new_var->var_part[0].cur_loc = old_var->var_part[0].cur_loc;
9061 if (variable_different_p (old_var, new_var))
9062 variable_was_changed (new_var, NULL);
9064 /* Continue traversing the hash table. */
9065 return 1;
9068 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
9069 table DATA. */
9072 emit_notes_for_differences_2 (variable_def **slot, variable_table_type *old_vars)
9074 variable old_var, new_var;
9076 new_var = *slot;
9077 old_var = old_vars->find_with_hash (new_var->dv, dv_htab_hash (new_var->dv));
9078 if (!old_var)
9080 int i;
9081 for (i = 0; i < new_var->n_var_parts; i++)
9082 new_var->var_part[i].cur_loc = NULL;
9083 variable_was_changed (new_var, NULL);
9086 /* Continue traversing the hash table. */
9087 return 1;
9090 /* Emit notes before INSN for differences between dataflow sets OLD_SET and
9091 NEW_SET. */
9093 static void
9094 emit_notes_for_differences (rtx_insn *insn, dataflow_set *old_set,
9095 dataflow_set *new_set)
9097 shared_hash_htab (old_set->vars)
9098 ->traverse <variable_table_type *, emit_notes_for_differences_1>
9099 (shared_hash_htab (new_set->vars));
9100 shared_hash_htab (new_set->vars)
9101 ->traverse <variable_table_type *, emit_notes_for_differences_2>
9102 (shared_hash_htab (old_set->vars));
9103 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars);
9106 /* Return the next insn after INSN that is not a NOTE_INSN_VAR_LOCATION. */
9108 static rtx_insn *
9109 next_non_note_insn_var_location (rtx_insn *insn)
9111 while (insn)
9113 insn = NEXT_INSN (insn);
9114 if (insn == 0
9115 || !NOTE_P (insn)
9116 || NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION)
9117 break;
9120 return insn;
9123 /* Emit the notes for changes of location parts in the basic block BB. */
9125 static void
9126 emit_notes_in_bb (basic_block bb, dataflow_set *set)
9128 unsigned int i;
9129 micro_operation *mo;
9131 dataflow_set_clear (set);
9132 dataflow_set_copy (set, &VTI (bb)->in);
9134 FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo)
9136 rtx_insn *insn = mo->insn;
9137 rtx_insn *next_insn = next_non_note_insn_var_location (insn);
9139 switch (mo->type)
9141 case MO_CALL:
9142 dataflow_set_clear_at_call (set);
9143 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars);
9145 rtx arguments = mo->u.loc, *p = &arguments;
9146 rtx_note *note;
9147 while (*p)
9149 XEXP (XEXP (*p, 0), 1)
9150 = vt_expand_loc (XEXP (XEXP (*p, 0), 1),
9151 shared_hash_htab (set->vars));
9152 /* If expansion is successful, keep it in the list. */
9153 if (XEXP (XEXP (*p, 0), 1))
9154 p = &XEXP (*p, 1);
9155 /* Otherwise, if the following item is data_value for it,
9156 drop it too too. */
9157 else if (XEXP (*p, 1)
9158 && REG_P (XEXP (XEXP (*p, 0), 0))
9159 && MEM_P (XEXP (XEXP (XEXP (*p, 1), 0), 0))
9160 && REG_P (XEXP (XEXP (XEXP (XEXP (*p, 1), 0), 0),
9162 && REGNO (XEXP (XEXP (*p, 0), 0))
9163 == REGNO (XEXP (XEXP (XEXP (XEXP (*p, 1), 0),
9164 0), 0)))
9165 *p = XEXP (XEXP (*p, 1), 1);
9166 /* Just drop this item. */
9167 else
9168 *p = XEXP (*p, 1);
9170 note = emit_note_after (NOTE_INSN_CALL_ARG_LOCATION, insn);
9171 NOTE_VAR_LOCATION (note) = arguments;
9173 break;
9175 case MO_USE:
9177 rtx loc = mo->u.loc;
9179 if (REG_P (loc))
9180 var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
9181 else
9182 var_mem_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
9184 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
9186 break;
9188 case MO_VAL_LOC:
9190 rtx loc = mo->u.loc;
9191 rtx val, vloc;
9192 tree var;
9194 if (GET_CODE (loc) == CONCAT)
9196 val = XEXP (loc, 0);
9197 vloc = XEXP (loc, 1);
9199 else
9201 val = NULL_RTX;
9202 vloc = loc;
9205 var = PAT_VAR_LOCATION_DECL (vloc);
9207 clobber_variable_part (set, NULL_RTX,
9208 dv_from_decl (var), 0, NULL_RTX);
9209 if (val)
9211 if (VAL_NEEDS_RESOLUTION (loc))
9212 val_resolve (set, val, PAT_VAR_LOCATION_LOC (vloc), insn);
9213 set_variable_part (set, val, dv_from_decl (var), 0,
9214 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
9215 INSERT);
9217 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
9218 set_variable_part (set, PAT_VAR_LOCATION_LOC (vloc),
9219 dv_from_decl (var), 0,
9220 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
9221 INSERT);
9223 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
9225 break;
9227 case MO_VAL_USE:
9229 rtx loc = mo->u.loc;
9230 rtx val, vloc, uloc;
9232 vloc = uloc = XEXP (loc, 1);
9233 val = XEXP (loc, 0);
9235 if (GET_CODE (val) == CONCAT)
9237 uloc = XEXP (val, 1);
9238 val = XEXP (val, 0);
9241 if (VAL_NEEDS_RESOLUTION (loc))
9242 val_resolve (set, val, vloc, insn);
9243 else
9244 val_store (set, val, uloc, insn, false);
9246 if (VAL_HOLDS_TRACK_EXPR (loc))
9248 if (GET_CODE (uloc) == REG)
9249 var_reg_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
9250 NULL);
9251 else if (GET_CODE (uloc) == MEM)
9252 var_mem_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
9253 NULL);
9256 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
9258 break;
9260 case MO_VAL_SET:
9262 rtx loc = mo->u.loc;
9263 rtx val, vloc, uloc;
9264 rtx dstv, srcv;
9266 vloc = loc;
9267 uloc = XEXP (vloc, 1);
9268 val = XEXP (vloc, 0);
9269 vloc = uloc;
9271 if (GET_CODE (uloc) == SET)
9273 dstv = SET_DEST (uloc);
9274 srcv = SET_SRC (uloc);
9276 else
9278 dstv = uloc;
9279 srcv = NULL;
9282 if (GET_CODE (val) == CONCAT)
9284 dstv = vloc = XEXP (val, 1);
9285 val = XEXP (val, 0);
9288 if (GET_CODE (vloc) == SET)
9290 srcv = SET_SRC (vloc);
9292 gcc_assert (val != srcv);
9293 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
9295 dstv = vloc = SET_DEST (vloc);
9297 if (VAL_NEEDS_RESOLUTION (loc))
9298 val_resolve (set, val, srcv, insn);
9300 else if (VAL_NEEDS_RESOLUTION (loc))
9302 gcc_assert (GET_CODE (uloc) == SET
9303 && GET_CODE (SET_SRC (uloc)) == REG);
9304 val_resolve (set, val, SET_SRC (uloc), insn);
9307 if (VAL_HOLDS_TRACK_EXPR (loc))
9309 if (VAL_EXPR_IS_CLOBBERED (loc))
9311 if (REG_P (uloc))
9312 var_reg_delete (set, uloc, true);
9313 else if (MEM_P (uloc))
9315 gcc_assert (MEM_P (dstv));
9316 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
9317 var_mem_delete (set, dstv, true);
9320 else
9322 bool copied_p = VAL_EXPR_IS_COPIED (loc);
9323 rtx src = NULL, dst = uloc;
9324 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
9326 if (GET_CODE (uloc) == SET)
9328 src = SET_SRC (uloc);
9329 dst = SET_DEST (uloc);
9332 if (copied_p)
9334 status = find_src_status (set, src);
9336 src = find_src_set_src (set, src);
9339 if (REG_P (dst))
9340 var_reg_delete_and_set (set, dst, !copied_p,
9341 status, srcv);
9342 else if (MEM_P (dst))
9344 gcc_assert (MEM_P (dstv));
9345 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
9346 var_mem_delete_and_set (set, dstv, !copied_p,
9347 status, srcv);
9351 else if (REG_P (uloc))
9352 var_regno_delete (set, REGNO (uloc));
9353 else if (MEM_P (uloc))
9355 gcc_checking_assert (GET_CODE (vloc) == MEM);
9356 gcc_checking_assert (vloc == dstv);
9357 if (vloc != dstv)
9358 clobber_overlapping_mems (set, vloc);
9361 val_store (set, val, dstv, insn, true);
9363 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9364 set->vars);
9366 break;
9368 case MO_SET:
9370 rtx loc = mo->u.loc;
9371 rtx set_src = NULL;
9373 if (GET_CODE (loc) == SET)
9375 set_src = SET_SRC (loc);
9376 loc = SET_DEST (loc);
9379 if (REG_P (loc))
9380 var_reg_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
9381 set_src);
9382 else
9383 var_mem_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
9384 set_src);
9386 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9387 set->vars);
9389 break;
9391 case MO_COPY:
9393 rtx loc = mo->u.loc;
9394 enum var_init_status src_status;
9395 rtx set_src = NULL;
9397 if (GET_CODE (loc) == SET)
9399 set_src = SET_SRC (loc);
9400 loc = SET_DEST (loc);
9403 src_status = find_src_status (set, set_src);
9404 set_src = find_src_set_src (set, set_src);
9406 if (REG_P (loc))
9407 var_reg_delete_and_set (set, loc, false, src_status, set_src);
9408 else
9409 var_mem_delete_and_set (set, loc, false, src_status, set_src);
9411 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9412 set->vars);
9414 break;
9416 case MO_USE_NO_VAR:
9418 rtx loc = mo->u.loc;
9420 if (REG_P (loc))
9421 var_reg_delete (set, loc, false);
9422 else
9423 var_mem_delete (set, loc, false);
9425 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
9427 break;
9429 case MO_CLOBBER:
9431 rtx loc = mo->u.loc;
9433 if (REG_P (loc))
9434 var_reg_delete (set, loc, true);
9435 else
9436 var_mem_delete (set, loc, true);
9438 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9439 set->vars);
9441 break;
9443 case MO_ADJUST:
9444 set->stack_adjust += mo->u.adjust;
9445 break;
9450 /* Emit notes for the whole function. */
9452 static void
9453 vt_emit_notes (void)
9455 basic_block bb;
9456 dataflow_set cur;
9458 gcc_assert (!changed_variables->elements ());
9460 /* Free memory occupied by the out hash tables, as they aren't used
9461 anymore. */
9462 FOR_EACH_BB_FN (bb, cfun)
9463 dataflow_set_clear (&VTI (bb)->out);
9465 /* Enable emitting notes by functions (mainly by set_variable_part and
9466 delete_variable_part). */
9467 emit_notes = true;
9469 if (MAY_HAVE_DEBUG_INSNS)
9471 dropped_values = new variable_table_type (cselib_get_next_uid () * 2);
9472 loc_exp_dep_pool = create_alloc_pool ("loc_exp_dep pool",
9473 sizeof (loc_exp_dep), 64);
9476 dataflow_set_init (&cur);
9478 FOR_EACH_BB_FN (bb, cfun)
9480 /* Emit the notes for changes of variable locations between two
9481 subsequent basic blocks. */
9482 emit_notes_for_differences (BB_HEAD (bb), &cur, &VTI (bb)->in);
9484 if (MAY_HAVE_DEBUG_INSNS)
9485 local_get_addr_cache = new hash_map<rtx, rtx>;
9487 /* Emit the notes for the changes in the basic block itself. */
9488 emit_notes_in_bb (bb, &cur);
9490 if (MAY_HAVE_DEBUG_INSNS)
9491 delete local_get_addr_cache;
9492 local_get_addr_cache = NULL;
9494 /* Free memory occupied by the in hash table, we won't need it
9495 again. */
9496 dataflow_set_clear (&VTI (bb)->in);
9498 #ifdef ENABLE_CHECKING
9499 shared_hash_htab (cur.vars)
9500 ->traverse <variable_table_type *, emit_notes_for_differences_1>
9501 (shared_hash_htab (empty_shared_hash));
9502 #endif
9503 dataflow_set_destroy (&cur);
9505 if (MAY_HAVE_DEBUG_INSNS)
9506 delete dropped_values;
9507 dropped_values = NULL;
9509 emit_notes = false;
9512 /* If there is a declaration and offset associated with register/memory RTL
9513 assign declaration to *DECLP and offset to *OFFSETP, and return true. */
9515 static bool
9516 vt_get_decl_and_offset (rtx rtl, tree *declp, HOST_WIDE_INT *offsetp)
9518 if (REG_P (rtl))
9520 if (REG_ATTRS (rtl))
9522 *declp = REG_EXPR (rtl);
9523 *offsetp = REG_OFFSET (rtl);
9524 return true;
9527 else if (GET_CODE (rtl) == PARALLEL)
9529 tree decl = NULL_TREE;
9530 HOST_WIDE_INT offset = MAX_VAR_PARTS;
9531 int len = XVECLEN (rtl, 0), i;
9533 for (i = 0; i < len; i++)
9535 rtx reg = XEXP (XVECEXP (rtl, 0, i), 0);
9536 if (!REG_P (reg) || !REG_ATTRS (reg))
9537 break;
9538 if (!decl)
9539 decl = REG_EXPR (reg);
9540 if (REG_EXPR (reg) != decl)
9541 break;
9542 if (REG_OFFSET (reg) < offset)
9543 offset = REG_OFFSET (reg);
9546 if (i == len)
9548 *declp = decl;
9549 *offsetp = offset;
9550 return true;
9553 else if (MEM_P (rtl))
9555 if (MEM_ATTRS (rtl))
9557 *declp = MEM_EXPR (rtl);
9558 *offsetp = INT_MEM_OFFSET (rtl);
9559 return true;
9562 return false;
9565 /* Record the value for the ENTRY_VALUE of RTL as a global equivalence
9566 of VAL. */
9568 static void
9569 record_entry_value (cselib_val *val, rtx rtl)
9571 rtx ev = gen_rtx_ENTRY_VALUE (GET_MODE (rtl));
9573 ENTRY_VALUE_EXP (ev) = rtl;
9575 cselib_add_permanent_equiv (val, ev, get_insns ());
9578 /* Insert function parameter PARM in IN and OUT sets of ENTRY_BLOCK. */
9580 static void
9581 vt_add_function_parameter (tree parm)
9583 rtx decl_rtl = DECL_RTL_IF_SET (parm);
9584 rtx incoming = DECL_INCOMING_RTL (parm);
9585 tree decl;
9586 machine_mode mode;
9587 HOST_WIDE_INT offset;
9588 dataflow_set *out;
9589 decl_or_value dv;
9591 if (TREE_CODE (parm) != PARM_DECL)
9592 return;
9594 if (!decl_rtl || !incoming)
9595 return;
9597 if (GET_MODE (decl_rtl) == BLKmode || GET_MODE (incoming) == BLKmode)
9598 return;
9600 /* If there is a DRAP register or a pseudo in internal_arg_pointer,
9601 rewrite the incoming location of parameters passed on the stack
9602 into MEMs based on the argument pointer, so that incoming doesn't
9603 depend on a pseudo. */
9604 if (MEM_P (incoming)
9605 && (XEXP (incoming, 0) == crtl->args.internal_arg_pointer
9606 || (GET_CODE (XEXP (incoming, 0)) == PLUS
9607 && XEXP (XEXP (incoming, 0), 0)
9608 == crtl->args.internal_arg_pointer
9609 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
9611 HOST_WIDE_INT off = -FIRST_PARM_OFFSET (current_function_decl);
9612 if (GET_CODE (XEXP (incoming, 0)) == PLUS)
9613 off += INTVAL (XEXP (XEXP (incoming, 0), 1));
9614 incoming
9615 = replace_equiv_address_nv (incoming,
9616 plus_constant (Pmode,
9617 arg_pointer_rtx, off));
9620 #ifdef HAVE_window_save
9621 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
9622 If the target machine has an explicit window save instruction, the
9623 actual entry value is the corresponding OUTGOING_REGNO instead. */
9624 if (HAVE_window_save && !crtl->uses_only_leaf_regs)
9626 if (REG_P (incoming)
9627 && HARD_REGISTER_P (incoming)
9628 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
9630 parm_reg_t p;
9631 p.incoming = incoming;
9632 incoming
9633 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
9634 OUTGOING_REGNO (REGNO (incoming)), 0);
9635 p.outgoing = incoming;
9636 vec_safe_push (windowed_parm_regs, p);
9638 else if (GET_CODE (incoming) == PARALLEL)
9640 rtx outgoing
9641 = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (XVECLEN (incoming, 0)));
9642 int i;
9644 for (i = 0; i < XVECLEN (incoming, 0); i++)
9646 rtx reg = XEXP (XVECEXP (incoming, 0, i), 0);
9647 parm_reg_t p;
9648 p.incoming = reg;
9649 reg = gen_rtx_REG_offset (reg, GET_MODE (reg),
9650 OUTGOING_REGNO (REGNO (reg)), 0);
9651 p.outgoing = reg;
9652 XVECEXP (outgoing, 0, i)
9653 = gen_rtx_EXPR_LIST (VOIDmode, reg,
9654 XEXP (XVECEXP (incoming, 0, i), 1));
9655 vec_safe_push (windowed_parm_regs, p);
9658 incoming = outgoing;
9660 else if (MEM_P (incoming)
9661 && REG_P (XEXP (incoming, 0))
9662 && HARD_REGISTER_P (XEXP (incoming, 0)))
9664 rtx reg = XEXP (incoming, 0);
9665 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
9667 parm_reg_t p;
9668 p.incoming = reg;
9669 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
9670 p.outgoing = reg;
9671 vec_safe_push (windowed_parm_regs, p);
9672 incoming = replace_equiv_address_nv (incoming, reg);
9676 #endif
9678 if (!vt_get_decl_and_offset (incoming, &decl, &offset))
9680 if (MEM_P (incoming))
9682 /* This means argument is passed by invisible reference. */
9683 offset = 0;
9684 decl = parm;
9686 else
9688 if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset))
9689 return;
9690 offset += byte_lowpart_offset (GET_MODE (incoming),
9691 GET_MODE (decl_rtl));
9695 if (!decl)
9696 return;
9698 if (parm != decl)
9700 /* If that DECL_RTL wasn't a pseudo that got spilled to
9701 memory, bail out. Otherwise, the spill slot sharing code
9702 will force the memory to reference spill_slot_decl (%sfp),
9703 so we don't match above. That's ok, the pseudo must have
9704 referenced the entire parameter, so just reset OFFSET. */
9705 if (decl != get_spill_slot_decl (false))
9706 return;
9707 offset = 0;
9710 if (!track_loc_p (incoming, parm, offset, false, &mode, &offset))
9711 return;
9713 out = &VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out;
9715 dv = dv_from_decl (parm);
9717 if (target_for_debug_bind (parm)
9718 /* We can't deal with these right now, because this kind of
9719 variable is single-part. ??? We could handle parallels
9720 that describe multiple locations for the same single
9721 value, but ATM we don't. */
9722 && GET_CODE (incoming) != PARALLEL)
9724 cselib_val *val;
9725 rtx lowpart;
9727 /* ??? We shouldn't ever hit this, but it may happen because
9728 arguments passed by invisible reference aren't dealt with
9729 above: incoming-rtl will have Pmode rather than the
9730 expected mode for the type. */
9731 if (offset)
9732 return;
9734 lowpart = var_lowpart (mode, incoming);
9735 if (!lowpart)
9736 return;
9738 val = cselib_lookup_from_insn (lowpart, mode, true,
9739 VOIDmode, get_insns ());
9741 /* ??? Float-typed values in memory are not handled by
9742 cselib. */
9743 if (val)
9745 preserve_value (val);
9746 set_variable_part (out, val->val_rtx, dv, offset,
9747 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9748 dv = dv_from_value (val->val_rtx);
9751 if (MEM_P (incoming))
9753 val = cselib_lookup_from_insn (XEXP (incoming, 0), mode, true,
9754 VOIDmode, get_insns ());
9755 if (val)
9757 preserve_value (val);
9758 incoming = replace_equiv_address_nv (incoming, val->val_rtx);
9763 if (REG_P (incoming))
9765 incoming = var_lowpart (mode, incoming);
9766 gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER);
9767 attrs_list_insert (&out->regs[REGNO (incoming)], dv, offset,
9768 incoming);
9769 set_variable_part (out, incoming, dv, offset,
9770 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9771 if (dv_is_value_p (dv))
9773 record_entry_value (CSELIB_VAL_PTR (dv_as_value (dv)), incoming);
9774 if (TREE_CODE (TREE_TYPE (parm)) == REFERENCE_TYPE
9775 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (parm))))
9777 machine_mode indmode
9778 = TYPE_MODE (TREE_TYPE (TREE_TYPE (parm)));
9779 rtx mem = gen_rtx_MEM (indmode, incoming);
9780 cselib_val *val = cselib_lookup_from_insn (mem, indmode, true,
9781 VOIDmode,
9782 get_insns ());
9783 if (val)
9785 preserve_value (val);
9786 record_entry_value (val, mem);
9787 set_variable_part (out, mem, dv_from_value (val->val_rtx), 0,
9788 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9793 else if (GET_CODE (incoming) == PARALLEL && !dv_onepart_p (dv))
9795 int i;
9797 for (i = 0; i < XVECLEN (incoming, 0); i++)
9799 rtx reg = XEXP (XVECEXP (incoming, 0, i), 0);
9800 offset = REG_OFFSET (reg);
9801 gcc_assert (REGNO (reg) < FIRST_PSEUDO_REGISTER);
9802 attrs_list_insert (&out->regs[REGNO (reg)], dv, offset, reg);
9803 set_variable_part (out, reg, dv, offset,
9804 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9807 else if (MEM_P (incoming))
9809 incoming = var_lowpart (mode, incoming);
9810 set_variable_part (out, incoming, dv, offset,
9811 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9815 /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
9817 static void
9818 vt_add_function_parameters (void)
9820 tree parm;
9822 for (parm = DECL_ARGUMENTS (current_function_decl);
9823 parm; parm = DECL_CHAIN (parm))
9824 if (!POINTER_BOUNDS_P (parm))
9825 vt_add_function_parameter (parm);
9827 if (DECL_HAS_VALUE_EXPR_P (DECL_RESULT (current_function_decl)))
9829 tree vexpr = DECL_VALUE_EXPR (DECL_RESULT (current_function_decl));
9831 if (TREE_CODE (vexpr) == INDIRECT_REF)
9832 vexpr = TREE_OPERAND (vexpr, 0);
9834 if (TREE_CODE (vexpr) == PARM_DECL
9835 && DECL_ARTIFICIAL (vexpr)
9836 && !DECL_IGNORED_P (vexpr)
9837 && DECL_NAMELESS (vexpr))
9838 vt_add_function_parameter (vexpr);
9842 /* Initialize cfa_base_rtx, create a preserved VALUE for it and
9843 ensure it isn't flushed during cselib_reset_table.
9844 Can be called only if frame_pointer_rtx resp. arg_pointer_rtx
9845 has been eliminated. */
9847 static void
9848 vt_init_cfa_base (void)
9850 cselib_val *val;
9852 #ifdef FRAME_POINTER_CFA_OFFSET
9853 cfa_base_rtx = frame_pointer_rtx;
9854 cfa_base_offset = -FRAME_POINTER_CFA_OFFSET (current_function_decl);
9855 #else
9856 cfa_base_rtx = arg_pointer_rtx;
9857 cfa_base_offset = -ARG_POINTER_CFA_OFFSET (current_function_decl);
9858 #endif
9859 if (cfa_base_rtx == hard_frame_pointer_rtx
9860 || !fixed_regs[REGNO (cfa_base_rtx)])
9862 cfa_base_rtx = NULL_RTX;
9863 return;
9865 if (!MAY_HAVE_DEBUG_INSNS)
9866 return;
9868 /* Tell alias analysis that cfa_base_rtx should share
9869 find_base_term value with stack pointer or hard frame pointer. */
9870 if (!frame_pointer_needed)
9871 vt_equate_reg_base_value (cfa_base_rtx, stack_pointer_rtx);
9872 else if (!crtl->stack_realign_tried)
9873 vt_equate_reg_base_value (cfa_base_rtx, hard_frame_pointer_rtx);
9875 val = cselib_lookup_from_insn (cfa_base_rtx, GET_MODE (cfa_base_rtx), 1,
9876 VOIDmode, get_insns ());
9877 preserve_value (val);
9878 cselib_preserve_cfa_base_value (val, REGNO (cfa_base_rtx));
9881 /* Allocate and initialize the data structures for variable tracking
9882 and parse the RTL to get the micro operations. */
9884 static bool
9885 vt_initialize (void)
9887 basic_block bb;
9888 HOST_WIDE_INT fp_cfa_offset = -1;
9890 alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def));
9892 attrs_pool = create_alloc_pool ("attrs_def pool",
9893 sizeof (struct attrs_def), 1024);
9894 var_pool = create_alloc_pool ("variable_def pool",
9895 sizeof (struct variable_def)
9896 + (MAX_VAR_PARTS - 1)
9897 * sizeof (((variable)NULL)->var_part[0]), 64);
9898 loc_chain_pool = create_alloc_pool ("location_chain_def pool",
9899 sizeof (struct location_chain_def),
9900 1024);
9901 shared_hash_pool = create_alloc_pool ("shared_hash_def pool",
9902 sizeof (struct shared_hash_def), 256);
9903 empty_shared_hash = (shared_hash) pool_alloc (shared_hash_pool);
9904 empty_shared_hash->refcount = 1;
9905 empty_shared_hash->htab = new variable_table_type (1);
9906 changed_variables = new variable_table_type (10);
9908 /* Init the IN and OUT sets. */
9909 FOR_ALL_BB_FN (bb, cfun)
9911 VTI (bb)->visited = false;
9912 VTI (bb)->flooded = false;
9913 dataflow_set_init (&VTI (bb)->in);
9914 dataflow_set_init (&VTI (bb)->out);
9915 VTI (bb)->permp = NULL;
9918 if (MAY_HAVE_DEBUG_INSNS)
9920 cselib_init (CSELIB_RECORD_MEMORY | CSELIB_PRESERVE_CONSTANTS);
9921 scratch_regs = BITMAP_ALLOC (NULL);
9922 valvar_pool = create_alloc_pool ("small variable_def pool",
9923 sizeof (struct variable_def), 256);
9924 preserved_values.create (256);
9925 global_get_addr_cache = new hash_map<rtx, rtx>;
9927 else
9929 scratch_regs = NULL;
9930 valvar_pool = NULL;
9931 global_get_addr_cache = NULL;
9934 if (MAY_HAVE_DEBUG_INSNS)
9936 rtx reg, expr;
9937 int ofst;
9938 cselib_val *val;
9940 #ifdef FRAME_POINTER_CFA_OFFSET
9941 reg = frame_pointer_rtx;
9942 ofst = FRAME_POINTER_CFA_OFFSET (current_function_decl);
9943 #else
9944 reg = arg_pointer_rtx;
9945 ofst = ARG_POINTER_CFA_OFFSET (current_function_decl);
9946 #endif
9948 ofst -= INCOMING_FRAME_SP_OFFSET;
9950 val = cselib_lookup_from_insn (reg, GET_MODE (reg), 1,
9951 VOIDmode, get_insns ());
9952 preserve_value (val);
9953 if (reg != hard_frame_pointer_rtx && fixed_regs[REGNO (reg)])
9954 cselib_preserve_cfa_base_value (val, REGNO (reg));
9955 expr = plus_constant (GET_MODE (stack_pointer_rtx),
9956 stack_pointer_rtx, -ofst);
9957 cselib_add_permanent_equiv (val, expr, get_insns ());
9959 if (ofst)
9961 val = cselib_lookup_from_insn (stack_pointer_rtx,
9962 GET_MODE (stack_pointer_rtx), 1,
9963 VOIDmode, get_insns ());
9964 preserve_value (val);
9965 expr = plus_constant (GET_MODE (reg), reg, ofst);
9966 cselib_add_permanent_equiv (val, expr, get_insns ());
9970 /* In order to factor out the adjustments made to the stack pointer or to
9971 the hard frame pointer and thus be able to use DW_OP_fbreg operations
9972 instead of individual location lists, we're going to rewrite MEMs based
9973 on them into MEMs based on the CFA by de-eliminating stack_pointer_rtx
9974 or hard_frame_pointer_rtx to the virtual CFA pointer frame_pointer_rtx
9975 resp. arg_pointer_rtx. We can do this either when there is no frame
9976 pointer in the function and stack adjustments are consistent for all
9977 basic blocks or when there is a frame pointer and no stack realignment.
9978 But we first have to check that frame_pointer_rtx resp. arg_pointer_rtx
9979 has been eliminated. */
9980 if (!frame_pointer_needed)
9982 rtx reg, elim;
9984 if (!vt_stack_adjustments ())
9985 return false;
9987 #ifdef FRAME_POINTER_CFA_OFFSET
9988 reg = frame_pointer_rtx;
9989 #else
9990 reg = arg_pointer_rtx;
9991 #endif
9992 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
9993 if (elim != reg)
9995 if (GET_CODE (elim) == PLUS)
9996 elim = XEXP (elim, 0);
9997 if (elim == stack_pointer_rtx)
9998 vt_init_cfa_base ();
10001 else if (!crtl->stack_realign_tried)
10003 rtx reg, elim;
10005 #ifdef FRAME_POINTER_CFA_OFFSET
10006 reg = frame_pointer_rtx;
10007 fp_cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
10008 #else
10009 reg = arg_pointer_rtx;
10010 fp_cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
10011 #endif
10012 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
10013 if (elim != reg)
10015 if (GET_CODE (elim) == PLUS)
10017 fp_cfa_offset -= INTVAL (XEXP (elim, 1));
10018 elim = XEXP (elim, 0);
10020 if (elim != hard_frame_pointer_rtx)
10021 fp_cfa_offset = -1;
10023 else
10024 fp_cfa_offset = -1;
10027 /* If the stack is realigned and a DRAP register is used, we're going to
10028 rewrite MEMs based on it representing incoming locations of parameters
10029 passed on the stack into MEMs based on the argument pointer. Although
10030 we aren't going to rewrite other MEMs, we still need to initialize the
10031 virtual CFA pointer in order to ensure that the argument pointer will
10032 be seen as a constant throughout the function.
10034 ??? This doesn't work if FRAME_POINTER_CFA_OFFSET is defined. */
10035 else if (stack_realign_drap)
10037 rtx reg, elim;
10039 #ifdef FRAME_POINTER_CFA_OFFSET
10040 reg = frame_pointer_rtx;
10041 #else
10042 reg = arg_pointer_rtx;
10043 #endif
10044 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
10045 if (elim != reg)
10047 if (GET_CODE (elim) == PLUS)
10048 elim = XEXP (elim, 0);
10049 if (elim == hard_frame_pointer_rtx)
10050 vt_init_cfa_base ();
10054 hard_frame_pointer_adjustment = -1;
10056 vt_add_function_parameters ();
10058 FOR_EACH_BB_FN (bb, cfun)
10060 rtx_insn *insn;
10061 HOST_WIDE_INT pre, post = 0;
10062 basic_block first_bb, last_bb;
10064 if (MAY_HAVE_DEBUG_INSNS)
10066 cselib_record_sets_hook = add_with_sets;
10067 if (dump_file && (dump_flags & TDF_DETAILS))
10068 fprintf (dump_file, "first value: %i\n",
10069 cselib_get_next_uid ());
10072 first_bb = bb;
10073 for (;;)
10075 edge e;
10076 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
10077 || ! single_pred_p (bb->next_bb))
10078 break;
10079 e = find_edge (bb, bb->next_bb);
10080 if (! e || (e->flags & EDGE_FALLTHRU) == 0)
10081 break;
10082 bb = bb->next_bb;
10084 last_bb = bb;
10086 /* Add the micro-operations to the vector. */
10087 FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb)
10089 HOST_WIDE_INT offset = VTI (bb)->out.stack_adjust;
10090 VTI (bb)->out.stack_adjust = VTI (bb)->in.stack_adjust;
10091 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
10092 insn = NEXT_INSN (insn))
10094 if (INSN_P (insn))
10096 if (!frame_pointer_needed)
10098 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
10099 if (pre)
10101 micro_operation mo;
10102 mo.type = MO_ADJUST;
10103 mo.u.adjust = pre;
10104 mo.insn = insn;
10105 if (dump_file && (dump_flags & TDF_DETAILS))
10106 log_op_type (PATTERN (insn), bb, insn,
10107 MO_ADJUST, dump_file);
10108 VTI (bb)->mos.safe_push (mo);
10109 VTI (bb)->out.stack_adjust += pre;
10113 cselib_hook_called = false;
10114 adjust_insn (bb, insn);
10115 if (MAY_HAVE_DEBUG_INSNS)
10117 if (CALL_P (insn))
10118 prepare_call_arguments (bb, insn);
10119 cselib_process_insn (insn);
10120 if (dump_file && (dump_flags & TDF_DETAILS))
10122 print_rtl_single (dump_file, insn);
10123 dump_cselib_table (dump_file);
10126 if (!cselib_hook_called)
10127 add_with_sets (insn, 0, 0);
10128 cancel_changes (0);
10130 if (!frame_pointer_needed && post)
10132 micro_operation mo;
10133 mo.type = MO_ADJUST;
10134 mo.u.adjust = post;
10135 mo.insn = insn;
10136 if (dump_file && (dump_flags & TDF_DETAILS))
10137 log_op_type (PATTERN (insn), bb, insn,
10138 MO_ADJUST, dump_file);
10139 VTI (bb)->mos.safe_push (mo);
10140 VTI (bb)->out.stack_adjust += post;
10143 if (fp_cfa_offset != -1
10144 && hard_frame_pointer_adjustment == -1
10145 && fp_setter_insn (insn))
10147 vt_init_cfa_base ();
10148 hard_frame_pointer_adjustment = fp_cfa_offset;
10149 /* Disassociate sp from fp now. */
10150 if (MAY_HAVE_DEBUG_INSNS)
10152 cselib_val *v;
10153 cselib_invalidate_rtx (stack_pointer_rtx);
10154 v = cselib_lookup (stack_pointer_rtx, Pmode, 1,
10155 VOIDmode);
10156 if (v && !cselib_preserved_value_p (v))
10158 cselib_set_value_sp_based (v);
10159 preserve_value (v);
10165 gcc_assert (offset == VTI (bb)->out.stack_adjust);
10168 bb = last_bb;
10170 if (MAY_HAVE_DEBUG_INSNS)
10172 cselib_preserve_only_values ();
10173 cselib_reset_table (cselib_get_next_uid ());
10174 cselib_record_sets_hook = NULL;
10178 hard_frame_pointer_adjustment = -1;
10179 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->flooded = true;
10180 cfa_base_rtx = NULL_RTX;
10181 return true;
10184 /* This is *not* reset after each function. It gives each
10185 NOTE_INSN_DELETED_DEBUG_LABEL in the entire compilation
10186 a unique label number. */
10188 static int debug_label_num = 1;
10190 /* Get rid of all debug insns from the insn stream. */
10192 static void
10193 delete_debug_insns (void)
10195 basic_block bb;
10196 rtx_insn *insn, *next;
10198 if (!MAY_HAVE_DEBUG_INSNS)
10199 return;
10201 FOR_EACH_BB_FN (bb, cfun)
10203 FOR_BB_INSNS_SAFE (bb, insn, next)
10204 if (DEBUG_INSN_P (insn))
10206 tree decl = INSN_VAR_LOCATION_DECL (insn);
10207 if (TREE_CODE (decl) == LABEL_DECL
10208 && DECL_NAME (decl)
10209 && !DECL_RTL_SET_P (decl))
10211 PUT_CODE (insn, NOTE);
10212 NOTE_KIND (insn) = NOTE_INSN_DELETED_DEBUG_LABEL;
10213 NOTE_DELETED_LABEL_NAME (insn)
10214 = IDENTIFIER_POINTER (DECL_NAME (decl));
10215 SET_DECL_RTL (decl, insn);
10216 CODE_LABEL_NUMBER (insn) = debug_label_num++;
10218 else
10219 delete_insn (insn);
10224 /* Run a fast, BB-local only version of var tracking, to take care of
10225 information that we don't do global analysis on, such that not all
10226 information is lost. If SKIPPED holds, we're skipping the global
10227 pass entirely, so we should try to use information it would have
10228 handled as well.. */
10230 static void
10231 vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED)
10233 /* ??? Just skip it all for now. */
10234 delete_debug_insns ();
10237 /* Free the data structures needed for variable tracking. */
10239 static void
10240 vt_finalize (void)
10242 basic_block bb;
10244 FOR_EACH_BB_FN (bb, cfun)
10246 VTI (bb)->mos.release ();
10249 FOR_ALL_BB_FN (bb, cfun)
10251 dataflow_set_destroy (&VTI (bb)->in);
10252 dataflow_set_destroy (&VTI (bb)->out);
10253 if (VTI (bb)->permp)
10255 dataflow_set_destroy (VTI (bb)->permp);
10256 XDELETE (VTI (bb)->permp);
10259 free_aux_for_blocks ();
10260 delete empty_shared_hash->htab;
10261 empty_shared_hash->htab = NULL;
10262 delete changed_variables;
10263 changed_variables = NULL;
10264 free_alloc_pool (attrs_pool);
10265 free_alloc_pool (var_pool);
10266 free_alloc_pool (loc_chain_pool);
10267 free_alloc_pool (shared_hash_pool);
10269 if (MAY_HAVE_DEBUG_INSNS)
10271 if (global_get_addr_cache)
10272 delete global_get_addr_cache;
10273 global_get_addr_cache = NULL;
10274 if (loc_exp_dep_pool)
10275 free_alloc_pool (loc_exp_dep_pool);
10276 loc_exp_dep_pool = NULL;
10277 free_alloc_pool (valvar_pool);
10278 preserved_values.release ();
10279 cselib_finish ();
10280 BITMAP_FREE (scratch_regs);
10281 scratch_regs = NULL;
10284 #ifdef HAVE_window_save
10285 vec_free (windowed_parm_regs);
10286 #endif
10288 if (vui_vec)
10289 XDELETEVEC (vui_vec);
10290 vui_vec = NULL;
10291 vui_allocated = 0;
10294 /* The entry point to variable tracking pass. */
10296 static inline unsigned int
10297 variable_tracking_main_1 (void)
10299 bool success;
10301 if (flag_var_tracking_assignments < 0)
10303 delete_debug_insns ();
10304 return 0;
10307 if (n_basic_blocks_for_fn (cfun) > 500 &&
10308 n_edges_for_fn (cfun) / n_basic_blocks_for_fn (cfun) >= 20)
10310 vt_debug_insns_local (true);
10311 return 0;
10314 mark_dfs_back_edges ();
10315 if (!vt_initialize ())
10317 vt_finalize ();
10318 vt_debug_insns_local (true);
10319 return 0;
10322 success = vt_find_locations ();
10324 if (!success && flag_var_tracking_assignments > 0)
10326 vt_finalize ();
10328 delete_debug_insns ();
10330 /* This is later restored by our caller. */
10331 flag_var_tracking_assignments = 0;
10333 success = vt_initialize ();
10334 gcc_assert (success);
10336 success = vt_find_locations ();
10339 if (!success)
10341 vt_finalize ();
10342 vt_debug_insns_local (false);
10343 return 0;
10346 if (dump_file && (dump_flags & TDF_DETAILS))
10348 dump_dataflow_sets ();
10349 dump_reg_info (dump_file);
10350 dump_flow_info (dump_file, dump_flags);
10353 timevar_push (TV_VAR_TRACKING_EMIT);
10354 vt_emit_notes ();
10355 timevar_pop (TV_VAR_TRACKING_EMIT);
10357 vt_finalize ();
10358 vt_debug_insns_local (false);
10359 return 0;
10362 unsigned int
10363 variable_tracking_main (void)
10365 unsigned int ret;
10366 int save = flag_var_tracking_assignments;
10368 ret = variable_tracking_main_1 ();
10370 flag_var_tracking_assignments = save;
10372 return ret;
10375 namespace {
10377 const pass_data pass_data_variable_tracking =
10379 RTL_PASS, /* type */
10380 "vartrack", /* name */
10381 OPTGROUP_NONE, /* optinfo_flags */
10382 TV_VAR_TRACKING, /* tv_id */
10383 0, /* properties_required */
10384 0, /* properties_provided */
10385 0, /* properties_destroyed */
10386 0, /* todo_flags_start */
10387 0, /* todo_flags_finish */
10390 class pass_variable_tracking : public rtl_opt_pass
10392 public:
10393 pass_variable_tracking (gcc::context *ctxt)
10394 : rtl_opt_pass (pass_data_variable_tracking, ctxt)
10397 /* opt_pass methods: */
10398 virtual bool gate (function *)
10400 return (flag_var_tracking && !targetm.delay_vartrack);
10403 virtual unsigned int execute (function *)
10405 return variable_tracking_main ();
10408 }; // class pass_variable_tracking
10410 } // anon namespace
10412 rtl_opt_pass *
10413 make_pass_variable_tracking (gcc::context *ctxt)
10415 return new pass_variable_tracking (ctxt);