gcc/ada/
[official-gcc.git] / gcc / var-tracking.c
blobfb3829925fcd7a914c496e2b5bb69b152f062653
1 /* Variable tracking routines for the GNU compiler.
2 Copyright (C) 2002-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the variable tracking pass. It computes where
21 variables are located (which registers or where in memory) at each position
22 in instruction stream and emits notes describing the locations.
23 Debug information (DWARF2 location lists) is finally generated from
24 these notes.
25 With this debug information, it is possible to show variables
26 even when debugging optimized code.
28 How does the variable tracking pass work?
30 First, it scans RTL code for uses, stores and clobbers (register/memory
31 references in instructions), for call insns and for stack adjustments
32 separately for each basic block and saves them to an array of micro
33 operations.
34 The micro operations of one instruction are ordered so that
35 pre-modifying stack adjustment < use < use with no var < call insn <
36 < clobber < set < post-modifying stack adjustment
38 Then, a forward dataflow analysis is performed to find out how locations
39 of variables change through code and to propagate the variable locations
40 along control flow graph.
41 The IN set for basic block BB is computed as a union of OUT sets of BB's
42 predecessors, the OUT set for BB is copied from the IN set for BB and
43 is changed according to micro operations in BB.
45 The IN and OUT sets for basic blocks consist of a current stack adjustment
46 (used for adjusting offset of variables addressed using stack pointer),
47 the table of structures describing the locations of parts of a variable
48 and for each physical register a linked list for each physical register.
49 The linked list is a list of variable parts stored in the register,
50 i.e. it is a list of triplets (reg, decl, offset) where decl is
51 REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
52 effective deleting appropriate variable parts when we set or clobber the
53 register.
55 There may be more than one variable part in a register. The linked lists
56 should be pretty short so it is a good data structure here.
57 For example in the following code, register allocator may assign same
58 register to variables A and B, and both of them are stored in the same
59 register in CODE:
61 if (cond)
62 set A;
63 else
64 set B;
65 CODE;
66 if (cond)
67 use A;
68 else
69 use B;
71 Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
72 are emitted to appropriate positions in RTL code. Each such a note describes
73 the location of one variable at the point in instruction stream where the
74 note is. There is no need to emit a note for each variable before each
75 instruction, we only emit these notes where the location of variable changes
76 (this means that we also emit notes for changes between the OUT set of the
77 previous block and the IN set of the current block).
79 The notes consist of two parts:
80 1. the declaration (from REG_EXPR or MEM_EXPR)
81 2. the location of a variable - it is either a simple register/memory
82 reference (for simple variables, for example int),
83 or a parallel of register/memory references (for a large variables
84 which consist of several parts, for example long long).
88 #include "config.h"
89 #include "system.h"
90 #include "coretypes.h"
91 #include "tm.h"
92 #include "rtl.h"
93 #include "tree.h"
94 #include "varasm.h"
95 #include "stor-layout.h"
96 #include "hash-map.h"
97 #include "hash-table.h"
98 #include "predict.h"
99 #include "vec.h"
100 #include "hashtab.h"
101 #include "hash-set.h"
102 #include "machmode.h"
103 #include "hard-reg-set.h"
104 #include "input.h"
105 #include "function.h"
106 #include "dominance.h"
107 #include "cfg.h"
108 #include "cfgrtl.h"
109 #include "cfganal.h"
110 #include "basic-block.h"
111 #include "tm_p.h"
112 #include "flags.h"
113 #include "insn-config.h"
114 #include "reload.h"
115 #include "sbitmap.h"
116 #include "alloc-pool.h"
117 #include "fibheap.h"
118 #include "regs.h"
119 #include "expr.h"
120 #include "tree-pass.h"
121 #include "bitmap.h"
122 #include "tree-dfa.h"
123 #include "tree-ssa.h"
124 #include "cselib.h"
125 #include "target.h"
126 #include "params.h"
127 #include "diagnostic.h"
128 #include "tree-pretty-print.h"
129 #include "recog.h"
130 #include "tm_p.h"
131 #include "alias.h"
132 #include "rtl-iter.h"
134 /* var-tracking.c assumes that tree code with the same value as VALUE rtx code
135 has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
136 Currently the value is the same as IDENTIFIER_NODE, which has such
137 a property. If this compile time assertion ever fails, make sure that
138 the new tree code that equals (int) VALUE has the same property. */
139 extern char check_value_val[(int) VALUE == (int) IDENTIFIER_NODE ? 1 : -1];
141 /* Type of micro operation. */
142 enum micro_operation_type
144 MO_USE, /* Use location (REG or MEM). */
145 MO_USE_NO_VAR,/* Use location which is not associated with a variable
146 or the variable is not trackable. */
147 MO_VAL_USE, /* Use location which is associated with a value. */
148 MO_VAL_LOC, /* Use location which appears in a debug insn. */
149 MO_VAL_SET, /* Set location associated with a value. */
150 MO_SET, /* Set location. */
151 MO_COPY, /* Copy the same portion of a variable from one
152 location to another. */
153 MO_CLOBBER, /* Clobber location. */
154 MO_CALL, /* Call insn. */
155 MO_ADJUST /* Adjust stack pointer. */
159 static const char * const ATTRIBUTE_UNUSED
160 micro_operation_type_name[] = {
161 "MO_USE",
162 "MO_USE_NO_VAR",
163 "MO_VAL_USE",
164 "MO_VAL_LOC",
165 "MO_VAL_SET",
166 "MO_SET",
167 "MO_COPY",
168 "MO_CLOBBER",
169 "MO_CALL",
170 "MO_ADJUST"
173 /* Where shall the note be emitted? BEFORE or AFTER the instruction.
174 Notes emitted as AFTER_CALL are to take effect during the call,
175 rather than after the call. */
176 enum emit_note_where
178 EMIT_NOTE_BEFORE_INSN,
179 EMIT_NOTE_AFTER_INSN,
180 EMIT_NOTE_AFTER_CALL_INSN
183 /* Structure holding information about micro operation. */
184 typedef struct micro_operation_def
186 /* Type of micro operation. */
187 enum micro_operation_type type;
189 /* The instruction which the micro operation is in, for MO_USE,
190 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
191 instruction or note in the original flow (before any var-tracking
192 notes are inserted, to simplify emission of notes), for MO_SET
193 and MO_CLOBBER. */
194 rtx_insn *insn;
196 union {
197 /* Location. For MO_SET and MO_COPY, this is the SET that
198 performs the assignment, if known, otherwise it is the target
199 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
200 CONCAT of the VALUE and the LOC associated with it. For
201 MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
202 associated with it. */
203 rtx loc;
205 /* Stack adjustment. */
206 HOST_WIDE_INT adjust;
207 } u;
208 } micro_operation;
211 /* A declaration of a variable, or an RTL value being handled like a
212 declaration. */
213 typedef void *decl_or_value;
215 /* Return true if a decl_or_value DV is a DECL or NULL. */
216 static inline bool
217 dv_is_decl_p (decl_or_value dv)
219 return !dv || (int) TREE_CODE ((tree) dv) != (int) VALUE;
222 /* Return true if a decl_or_value is a VALUE rtl. */
223 static inline bool
224 dv_is_value_p (decl_or_value dv)
226 return dv && !dv_is_decl_p (dv);
229 /* Return the decl in the decl_or_value. */
230 static inline tree
231 dv_as_decl (decl_or_value dv)
233 gcc_checking_assert (dv_is_decl_p (dv));
234 return (tree) dv;
237 /* Return the value in the decl_or_value. */
238 static inline rtx
239 dv_as_value (decl_or_value dv)
241 gcc_checking_assert (dv_is_value_p (dv));
242 return (rtx)dv;
245 /* Return the opaque pointer in the decl_or_value. */
246 static inline void *
247 dv_as_opaque (decl_or_value dv)
249 return dv;
253 /* Description of location of a part of a variable. The content of a physical
254 register is described by a chain of these structures.
255 The chains are pretty short (usually 1 or 2 elements) and thus
256 chain is the best data structure. */
257 typedef struct attrs_def
259 /* Pointer to next member of the list. */
260 struct attrs_def *next;
262 /* The rtx of register. */
263 rtx loc;
265 /* The declaration corresponding to LOC. */
266 decl_or_value dv;
268 /* Offset from start of DECL. */
269 HOST_WIDE_INT offset;
270 } *attrs;
272 /* Structure for chaining the locations. */
273 typedef struct location_chain_def
275 /* Next element in the chain. */
276 struct location_chain_def *next;
278 /* The location (REG, MEM or VALUE). */
279 rtx loc;
281 /* The "value" stored in this location. */
282 rtx set_src;
284 /* Initialized? */
285 enum var_init_status init;
286 } *location_chain;
288 /* A vector of loc_exp_dep holds the active dependencies of a one-part
289 DV on VALUEs, i.e., the VALUEs expanded so as to form the current
290 location of DV. Each entry is also part of VALUE' s linked-list of
291 backlinks back to DV. */
292 typedef struct loc_exp_dep_s
294 /* The dependent DV. */
295 decl_or_value dv;
296 /* The dependency VALUE or DECL_DEBUG. */
297 rtx value;
298 /* The next entry in VALUE's backlinks list. */
299 struct loc_exp_dep_s *next;
300 /* A pointer to the pointer to this entry (head or prev's next) in
301 the doubly-linked list. */
302 struct loc_exp_dep_s **pprev;
303 } loc_exp_dep;
306 /* This data structure holds information about the depth of a variable
307 expansion. */
308 typedef struct expand_depth_struct
310 /* This measures the complexity of the expanded expression. It
311 grows by one for each level of expansion that adds more than one
312 operand. */
313 int complexity;
314 /* This counts the number of ENTRY_VALUE expressions in an
315 expansion. We want to minimize their use. */
316 int entryvals;
317 } expand_depth;
319 /* This data structure is allocated for one-part variables at the time
320 of emitting notes. */
321 struct onepart_aux
323 /* Doubly-linked list of dependent DVs. These are DVs whose cur_loc
324 computation used the expansion of this variable, and that ought
325 to be notified should this variable change. If the DV's cur_loc
326 expanded to NULL, all components of the loc list are regarded as
327 active, so that any changes in them give us a chance to get a
328 location. Otherwise, only components of the loc that expanded to
329 non-NULL are regarded as active dependencies. */
330 loc_exp_dep *backlinks;
331 /* This holds the LOC that was expanded into cur_loc. We need only
332 mark a one-part variable as changed if the FROM loc is removed,
333 or if it has no known location and a loc is added, or if it gets
334 a change notification from any of its active dependencies. */
335 rtx from;
336 /* The depth of the cur_loc expression. */
337 expand_depth depth;
338 /* Dependencies actively used when expand FROM into cur_loc. */
339 vec<loc_exp_dep, va_heap, vl_embed> deps;
342 /* Structure describing one part of variable. */
343 typedef struct variable_part_def
345 /* Chain of locations of the part. */
346 location_chain loc_chain;
348 /* Location which was last emitted to location list. */
349 rtx cur_loc;
351 union variable_aux
353 /* The offset in the variable, if !var->onepart. */
354 HOST_WIDE_INT offset;
356 /* Pointer to auxiliary data, if var->onepart and emit_notes. */
357 struct onepart_aux *onepaux;
358 } aux;
359 } variable_part;
361 /* Maximum number of location parts. */
362 #define MAX_VAR_PARTS 16
364 /* Enumeration type used to discriminate various types of one-part
365 variables. */
366 typedef enum onepart_enum
368 /* Not a one-part variable. */
369 NOT_ONEPART = 0,
370 /* A one-part DECL that is not a DEBUG_EXPR_DECL. */
371 ONEPART_VDECL = 1,
372 /* A DEBUG_EXPR_DECL. */
373 ONEPART_DEXPR = 2,
374 /* A VALUE. */
375 ONEPART_VALUE = 3
376 } onepart_enum_t;
378 /* Structure describing where the variable is located. */
379 typedef struct variable_def
381 /* The declaration of the variable, or an RTL value being handled
382 like a declaration. */
383 decl_or_value dv;
385 /* Reference count. */
386 int refcount;
388 /* Number of variable parts. */
389 char n_var_parts;
391 /* What type of DV this is, according to enum onepart_enum. */
392 ENUM_BITFIELD (onepart_enum) onepart : CHAR_BIT;
394 /* True if this variable_def struct is currently in the
395 changed_variables hash table. */
396 bool in_changed_variables;
398 /* The variable parts. */
399 variable_part var_part[1];
400 } *variable;
401 typedef const struct variable_def *const_variable;
403 /* Pointer to the BB's information specific to variable tracking pass. */
404 #define VTI(BB) ((variable_tracking_info) (BB)->aux)
406 /* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */
407 #define INT_MEM_OFFSET(mem) (MEM_OFFSET_KNOWN_P (mem) ? MEM_OFFSET (mem) : 0)
409 #if ENABLE_CHECKING && (GCC_VERSION >= 2007)
411 /* Access VAR's Ith part's offset, checking that it's not a one-part
412 variable. */
413 #define VAR_PART_OFFSET(var, i) __extension__ \
414 (*({ variable const __v = (var); \
415 gcc_checking_assert (!__v->onepart); \
416 &__v->var_part[(i)].aux.offset; }))
418 /* Access VAR's one-part auxiliary data, checking that it is a
419 one-part variable. */
420 #define VAR_LOC_1PAUX(var) __extension__ \
421 (*({ variable const __v = (var); \
422 gcc_checking_assert (__v->onepart); \
423 &__v->var_part[0].aux.onepaux; }))
425 #else
426 #define VAR_PART_OFFSET(var, i) ((var)->var_part[(i)].aux.offset)
427 #define VAR_LOC_1PAUX(var) ((var)->var_part[0].aux.onepaux)
428 #endif
430 /* These are accessor macros for the one-part auxiliary data. When
431 convenient for users, they're guarded by tests that the data was
432 allocated. */
433 #define VAR_LOC_DEP_LST(var) (VAR_LOC_1PAUX (var) \
434 ? VAR_LOC_1PAUX (var)->backlinks \
435 : NULL)
436 #define VAR_LOC_DEP_LSTP(var) (VAR_LOC_1PAUX (var) \
437 ? &VAR_LOC_1PAUX (var)->backlinks \
438 : NULL)
439 #define VAR_LOC_FROM(var) (VAR_LOC_1PAUX (var)->from)
440 #define VAR_LOC_DEPTH(var) (VAR_LOC_1PAUX (var)->depth)
441 #define VAR_LOC_DEP_VEC(var) (VAR_LOC_1PAUX (var) \
442 ? &VAR_LOC_1PAUX (var)->deps \
443 : NULL)
447 typedef unsigned int dvuid;
449 /* Return the uid of DV. */
451 static inline dvuid
452 dv_uid (decl_or_value dv)
454 if (dv_is_value_p (dv))
455 return CSELIB_VAL_PTR (dv_as_value (dv))->uid;
456 else
457 return DECL_UID (dv_as_decl (dv));
460 /* Compute the hash from the uid. */
462 static inline hashval_t
463 dv_uid2hash (dvuid uid)
465 return uid;
468 /* The hash function for a mask table in a shared_htab chain. */
470 static inline hashval_t
471 dv_htab_hash (decl_or_value dv)
473 return dv_uid2hash (dv_uid (dv));
476 static void variable_htab_free (void *);
478 /* Variable hashtable helpers. */
480 struct variable_hasher
482 typedef variable_def value_type;
483 typedef void compare_type;
484 static inline hashval_t hash (const value_type *);
485 static inline bool equal (const value_type *, const compare_type *);
486 static inline void remove (value_type *);
489 /* The hash function for variable_htab, computes the hash value
490 from the declaration of variable X. */
492 inline hashval_t
493 variable_hasher::hash (const value_type *v)
495 return dv_htab_hash (v->dv);
498 /* Compare the declaration of variable X with declaration Y. */
500 inline bool
501 variable_hasher::equal (const value_type *v, const compare_type *y)
503 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
505 return (dv_as_opaque (v->dv) == dv_as_opaque (dv));
508 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
510 inline void
511 variable_hasher::remove (value_type *var)
513 variable_htab_free (var);
516 typedef hash_table<variable_hasher> variable_table_type;
517 typedef variable_table_type::iterator variable_iterator_type;
519 /* Structure for passing some other parameters to function
520 emit_note_insn_var_location. */
521 typedef struct emit_note_data_def
523 /* The instruction which the note will be emitted before/after. */
524 rtx_insn *insn;
526 /* Where the note will be emitted (before/after insn)? */
527 enum emit_note_where where;
529 /* The variables and values active at this point. */
530 variable_table_type *vars;
531 } emit_note_data;
533 /* Structure holding a refcounted hash table. If refcount > 1,
534 it must be first unshared before modified. */
535 typedef struct shared_hash_def
537 /* Reference count. */
538 int refcount;
540 /* Actual hash table. */
541 variable_table_type *htab;
542 } *shared_hash;
544 /* Structure holding the IN or OUT set for a basic block. */
545 typedef struct dataflow_set_def
547 /* Adjustment of stack offset. */
548 HOST_WIDE_INT stack_adjust;
550 /* Attributes for registers (lists of attrs). */
551 attrs regs[FIRST_PSEUDO_REGISTER];
553 /* Variable locations. */
554 shared_hash vars;
556 /* Vars that is being traversed. */
557 shared_hash traversed_vars;
558 } dataflow_set;
560 /* The structure (one for each basic block) containing the information
561 needed for variable tracking. */
562 typedef struct variable_tracking_info_def
564 /* The vector of micro operations. */
565 vec<micro_operation> mos;
567 /* The IN and OUT set for dataflow analysis. */
568 dataflow_set in;
569 dataflow_set out;
571 /* The permanent-in dataflow set for this block. This is used to
572 hold values for which we had to compute entry values. ??? This
573 should probably be dynamically allocated, to avoid using more
574 memory in non-debug builds. */
575 dataflow_set *permp;
577 /* Has the block been visited in DFS? */
578 bool visited;
580 /* Has the block been flooded in VTA? */
581 bool flooded;
583 } *variable_tracking_info;
585 /* Alloc pool for struct attrs_def. */
586 static alloc_pool attrs_pool;
588 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
589 static alloc_pool var_pool;
591 /* Alloc pool for struct variable_def with a single var_part entry. */
592 static alloc_pool valvar_pool;
594 /* Alloc pool for struct location_chain_def. */
595 static alloc_pool loc_chain_pool;
597 /* Alloc pool for struct shared_hash_def. */
598 static alloc_pool shared_hash_pool;
600 /* Alloc pool for struct loc_exp_dep_s for NOT_ONEPART variables. */
601 static alloc_pool loc_exp_dep_pool;
603 /* Changed variables, notes will be emitted for them. */
604 static variable_table_type *changed_variables;
606 /* Shall notes be emitted? */
607 static bool emit_notes;
609 /* Values whose dynamic location lists have gone empty, but whose
610 cselib location lists are still usable. Use this to hold the
611 current location, the backlinks, etc, during emit_notes. */
612 static variable_table_type *dropped_values;
614 /* Empty shared hashtable. */
615 static shared_hash empty_shared_hash;
617 /* Scratch register bitmap used by cselib_expand_value_rtx. */
618 static bitmap scratch_regs = NULL;
620 #ifdef HAVE_window_save
621 typedef struct GTY(()) parm_reg {
622 rtx outgoing;
623 rtx incoming;
624 } parm_reg_t;
627 /* Vector of windowed parameter registers, if any. */
628 static vec<parm_reg_t, va_gc> *windowed_parm_regs = NULL;
629 #endif
631 /* Variable used to tell whether cselib_process_insn called our hook. */
632 static bool cselib_hook_called;
634 /* Local function prototypes. */
635 static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
636 HOST_WIDE_INT *);
637 static void insn_stack_adjust_offset_pre_post (rtx_insn *, HOST_WIDE_INT *,
638 HOST_WIDE_INT *);
639 static bool vt_stack_adjustments (void);
641 static void init_attrs_list_set (attrs *);
642 static void attrs_list_clear (attrs *);
643 static attrs attrs_list_member (attrs, decl_or_value, HOST_WIDE_INT);
644 static void attrs_list_insert (attrs *, decl_or_value, HOST_WIDE_INT, rtx);
645 static void attrs_list_copy (attrs *, attrs);
646 static void attrs_list_union (attrs *, attrs);
648 static variable_def **unshare_variable (dataflow_set *set, variable_def **slot,
649 variable var, enum var_init_status);
650 static void vars_copy (variable_table_type *, variable_table_type *);
651 static tree var_debug_decl (tree);
652 static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
653 static void var_reg_delete_and_set (dataflow_set *, rtx, bool,
654 enum var_init_status, rtx);
655 static void var_reg_delete (dataflow_set *, rtx, bool);
656 static void var_regno_delete (dataflow_set *, int);
657 static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx);
658 static void var_mem_delete_and_set (dataflow_set *, rtx, bool,
659 enum var_init_status, rtx);
660 static void var_mem_delete (dataflow_set *, rtx, bool);
662 static void dataflow_set_init (dataflow_set *);
663 static void dataflow_set_clear (dataflow_set *);
664 static void dataflow_set_copy (dataflow_set *, dataflow_set *);
665 static int variable_union_info_cmp_pos (const void *, const void *);
666 static void dataflow_set_union (dataflow_set *, dataflow_set *);
667 static location_chain find_loc_in_1pdv (rtx, variable, variable_table_type *);
668 static bool canon_value_cmp (rtx, rtx);
669 static int loc_cmp (rtx, rtx);
670 static bool variable_part_different_p (variable_part *, variable_part *);
671 static bool onepart_variable_different_p (variable, variable);
672 static bool variable_different_p (variable, variable);
673 static bool dataflow_set_different (dataflow_set *, dataflow_set *);
674 static void dataflow_set_destroy (dataflow_set *);
676 static bool contains_symbol_ref (rtx);
677 static bool track_expr_p (tree, bool);
678 static bool same_variable_part_p (rtx, tree, HOST_WIDE_INT);
679 static void add_uses_1 (rtx *, void *);
680 static void add_stores (rtx, const_rtx, void *);
681 static bool compute_bb_dataflow (basic_block);
682 static bool vt_find_locations (void);
684 static void dump_attrs_list (attrs);
685 static void dump_var (variable);
686 static void dump_vars (variable_table_type *);
687 static void dump_dataflow_set (dataflow_set *);
688 static void dump_dataflow_sets (void);
690 static void set_dv_changed (decl_or_value, bool);
691 static void variable_was_changed (variable, dataflow_set *);
692 static variable_def **set_slot_part (dataflow_set *, rtx, variable_def **,
693 decl_or_value, HOST_WIDE_INT,
694 enum var_init_status, rtx);
695 static void set_variable_part (dataflow_set *, rtx,
696 decl_or_value, HOST_WIDE_INT,
697 enum var_init_status, rtx, enum insert_option);
698 static variable_def **clobber_slot_part (dataflow_set *, rtx,
699 variable_def **, HOST_WIDE_INT, rtx);
700 static void clobber_variable_part (dataflow_set *, rtx,
701 decl_or_value, HOST_WIDE_INT, rtx);
702 static variable_def **delete_slot_part (dataflow_set *, rtx, variable_def **,
703 HOST_WIDE_INT);
704 static void delete_variable_part (dataflow_set *, rtx,
705 decl_or_value, HOST_WIDE_INT);
706 static void emit_notes_in_bb (basic_block, dataflow_set *);
707 static void vt_emit_notes (void);
709 static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *);
710 static void vt_add_function_parameters (void);
711 static bool vt_initialize (void);
712 static void vt_finalize (void);
714 /* Callback for stack_adjust_offset_pre_post, called via for_each_inc_dec. */
716 static int
717 stack_adjust_offset_pre_post_cb (rtx, rtx op, rtx dest, rtx src, rtx srcoff,
718 void *arg)
720 if (dest != stack_pointer_rtx)
721 return 0;
723 switch (GET_CODE (op))
725 case PRE_INC:
726 case PRE_DEC:
727 ((HOST_WIDE_INT *)arg)[0] -= INTVAL (srcoff);
728 return 0;
729 case POST_INC:
730 case POST_DEC:
731 ((HOST_WIDE_INT *)arg)[1] -= INTVAL (srcoff);
732 return 0;
733 case PRE_MODIFY:
734 case POST_MODIFY:
735 /* We handle only adjustments by constant amount. */
736 gcc_assert (GET_CODE (src) == PLUS
737 && CONST_INT_P (XEXP (src, 1))
738 && XEXP (src, 0) == stack_pointer_rtx);
739 ((HOST_WIDE_INT *)arg)[GET_CODE (op) == POST_MODIFY]
740 -= INTVAL (XEXP (src, 1));
741 return 0;
742 default:
743 gcc_unreachable ();
747 /* Given a SET, calculate the amount of stack adjustment it contains
748 PRE- and POST-modifying stack pointer.
749 This function is similar to stack_adjust_offset. */
751 static void
752 stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
753 HOST_WIDE_INT *post)
755 rtx src = SET_SRC (pattern);
756 rtx dest = SET_DEST (pattern);
757 enum rtx_code code;
759 if (dest == stack_pointer_rtx)
761 /* (set (reg sp) (plus (reg sp) (const_int))) */
762 code = GET_CODE (src);
763 if (! (code == PLUS || code == MINUS)
764 || XEXP (src, 0) != stack_pointer_rtx
765 || !CONST_INT_P (XEXP (src, 1)))
766 return;
768 if (code == MINUS)
769 *post += INTVAL (XEXP (src, 1));
770 else
771 *post -= INTVAL (XEXP (src, 1));
772 return;
774 HOST_WIDE_INT res[2] = { 0, 0 };
775 for_each_inc_dec (pattern, stack_adjust_offset_pre_post_cb, res);
776 *pre += res[0];
777 *post += res[1];
780 /* Given an INSN, calculate the amount of stack adjustment it contains
781 PRE- and POST-modifying stack pointer. */
783 static void
784 insn_stack_adjust_offset_pre_post (rtx_insn *insn, HOST_WIDE_INT *pre,
785 HOST_WIDE_INT *post)
787 rtx pattern;
789 *pre = 0;
790 *post = 0;
792 pattern = PATTERN (insn);
793 if (RTX_FRAME_RELATED_P (insn))
795 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
796 if (expr)
797 pattern = XEXP (expr, 0);
800 if (GET_CODE (pattern) == SET)
801 stack_adjust_offset_pre_post (pattern, pre, post);
802 else if (GET_CODE (pattern) == PARALLEL
803 || GET_CODE (pattern) == SEQUENCE)
805 int i;
807 /* There may be stack adjustments inside compound insns. Search
808 for them. */
809 for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
810 if (GET_CODE (XVECEXP (pattern, 0, i)) == SET)
811 stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post);
815 /* Compute stack adjustments for all blocks by traversing DFS tree.
816 Return true when the adjustments on all incoming edges are consistent.
817 Heavily borrowed from pre_and_rev_post_order_compute. */
819 static bool
820 vt_stack_adjustments (void)
822 edge_iterator *stack;
823 int sp;
825 /* Initialize entry block. */
826 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->visited = true;
827 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->in.stack_adjust
828 = INCOMING_FRAME_SP_OFFSET;
829 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out.stack_adjust
830 = INCOMING_FRAME_SP_OFFSET;
832 /* Allocate stack for back-tracking up CFG. */
833 stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun) + 1);
834 sp = 0;
836 /* Push the first edge on to the stack. */
837 stack[sp++] = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
839 while (sp)
841 edge_iterator ei;
842 basic_block src;
843 basic_block dest;
845 /* Look at the edge on the top of the stack. */
846 ei = stack[sp - 1];
847 src = ei_edge (ei)->src;
848 dest = ei_edge (ei)->dest;
850 /* Check if the edge destination has been visited yet. */
851 if (!VTI (dest)->visited)
853 rtx_insn *insn;
854 HOST_WIDE_INT pre, post, offset;
855 VTI (dest)->visited = true;
856 VTI (dest)->in.stack_adjust = offset = VTI (src)->out.stack_adjust;
858 if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
859 for (insn = BB_HEAD (dest);
860 insn != NEXT_INSN (BB_END (dest));
861 insn = NEXT_INSN (insn))
862 if (INSN_P (insn))
864 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
865 offset += pre + post;
868 VTI (dest)->out.stack_adjust = offset;
870 if (EDGE_COUNT (dest->succs) > 0)
871 /* Since the DEST node has been visited for the first
872 time, check its successors. */
873 stack[sp++] = ei_start (dest->succs);
875 else
877 /* We can end up with different stack adjustments for the exit block
878 of a shrink-wrapped function if stack_adjust_offset_pre_post
879 doesn't understand the rtx pattern used to restore the stack
880 pointer in the epilogue. For example, on s390(x), the stack
881 pointer is often restored via a load-multiple instruction
882 and so no stack_adjust offset is recorded for it. This means
883 that the stack offset at the end of the epilogue block is the
884 the same as the offset before the epilogue, whereas other paths
885 to the exit block will have the correct stack_adjust.
887 It is safe to ignore these differences because (a) we never
888 use the stack_adjust for the exit block in this pass and
889 (b) dwarf2cfi checks whether the CFA notes in a shrink-wrapped
890 function are correct.
892 We must check whether the adjustments on other edges are
893 the same though. */
894 if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
895 && VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust)
897 free (stack);
898 return false;
901 if (! ei_one_before_end_p (ei))
902 /* Go to the next edge. */
903 ei_next (&stack[sp - 1]);
904 else
905 /* Return to previous level if there are no more edges. */
906 sp--;
910 free (stack);
911 return true;
914 /* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or
915 hard_frame_pointer_rtx is being mapped to it and offset for it. */
916 static rtx cfa_base_rtx;
917 static HOST_WIDE_INT cfa_base_offset;
919 /* Compute a CFA-based value for an ADJUSTMENT made to stack_pointer_rtx
920 or hard_frame_pointer_rtx. */
922 static inline rtx
923 compute_cfa_pointer (HOST_WIDE_INT adjustment)
925 return plus_constant (Pmode, cfa_base_rtx, adjustment + cfa_base_offset);
928 /* Adjustment for hard_frame_pointer_rtx to cfa base reg,
929 or -1 if the replacement shouldn't be done. */
930 static HOST_WIDE_INT hard_frame_pointer_adjustment = -1;
932 /* Data for adjust_mems callback. */
934 struct adjust_mem_data
936 bool store;
937 machine_mode mem_mode;
938 HOST_WIDE_INT stack_adjust;
939 rtx_expr_list *side_effects;
942 /* Helper for adjust_mems. Return true if X is suitable for
943 transformation of wider mode arithmetics to narrower mode. */
945 static bool
946 use_narrower_mode_test (rtx x, const_rtx subreg)
948 subrtx_var_iterator::array_type array;
949 FOR_EACH_SUBRTX_VAR (iter, array, x, NONCONST)
951 rtx x = *iter;
952 if (CONSTANT_P (x))
953 iter.skip_subrtxes ();
954 else
955 switch (GET_CODE (x))
957 case REG:
958 if (cselib_lookup (x, GET_MODE (SUBREG_REG (subreg)), 0, VOIDmode))
959 return false;
960 if (!validate_subreg (GET_MODE (subreg), GET_MODE (x), x,
961 subreg_lowpart_offset (GET_MODE (subreg),
962 GET_MODE (x))))
963 return false;
964 break;
965 case PLUS:
966 case MINUS:
967 case MULT:
968 break;
969 case ASHIFT:
970 iter.substitute (XEXP (x, 0));
971 break;
972 default:
973 return false;
976 return true;
979 /* Transform X into narrower mode MODE from wider mode WMODE. */
981 static rtx
982 use_narrower_mode (rtx x, machine_mode mode, machine_mode wmode)
984 rtx op0, op1;
985 if (CONSTANT_P (x))
986 return lowpart_subreg (mode, x, wmode);
987 switch (GET_CODE (x))
989 case REG:
990 return lowpart_subreg (mode, x, wmode);
991 case PLUS:
992 case MINUS:
993 case MULT:
994 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
995 op1 = use_narrower_mode (XEXP (x, 1), mode, wmode);
996 return simplify_gen_binary (GET_CODE (x), mode, op0, op1);
997 case ASHIFT:
998 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
999 return simplify_gen_binary (ASHIFT, mode, op0, XEXP (x, 1));
1000 default:
1001 gcc_unreachable ();
1005 /* Helper function for adjusting used MEMs. */
1007 static rtx
1008 adjust_mems (rtx loc, const_rtx old_rtx, void *data)
1010 struct adjust_mem_data *amd = (struct adjust_mem_data *) data;
1011 rtx mem, addr = loc, tem;
1012 machine_mode mem_mode_save;
1013 bool store_save;
1014 switch (GET_CODE (loc))
1016 case REG:
1017 /* Don't do any sp or fp replacements outside of MEM addresses
1018 on the LHS. */
1019 if (amd->mem_mode == VOIDmode && amd->store)
1020 return loc;
1021 if (loc == stack_pointer_rtx
1022 && !frame_pointer_needed
1023 && cfa_base_rtx)
1024 return compute_cfa_pointer (amd->stack_adjust);
1025 else if (loc == hard_frame_pointer_rtx
1026 && frame_pointer_needed
1027 && hard_frame_pointer_adjustment != -1
1028 && cfa_base_rtx)
1029 return compute_cfa_pointer (hard_frame_pointer_adjustment);
1030 gcc_checking_assert (loc != virtual_incoming_args_rtx);
1031 return loc;
1032 case MEM:
1033 mem = loc;
1034 if (!amd->store)
1036 mem = targetm.delegitimize_address (mem);
1037 if (mem != loc && !MEM_P (mem))
1038 return simplify_replace_fn_rtx (mem, old_rtx, adjust_mems, data);
1041 addr = XEXP (mem, 0);
1042 mem_mode_save = amd->mem_mode;
1043 amd->mem_mode = GET_MODE (mem);
1044 store_save = amd->store;
1045 amd->store = false;
1046 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1047 amd->store = store_save;
1048 amd->mem_mode = mem_mode_save;
1049 if (mem == loc)
1050 addr = targetm.delegitimize_address (addr);
1051 if (addr != XEXP (mem, 0))
1052 mem = replace_equiv_address_nv (mem, addr);
1053 if (!amd->store)
1054 mem = avoid_constant_pool_reference (mem);
1055 return mem;
1056 case PRE_INC:
1057 case PRE_DEC:
1058 addr = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
1059 gen_int_mode (GET_CODE (loc) == PRE_INC
1060 ? GET_MODE_SIZE (amd->mem_mode)
1061 : -GET_MODE_SIZE (amd->mem_mode),
1062 GET_MODE (loc)));
1063 case POST_INC:
1064 case POST_DEC:
1065 if (addr == loc)
1066 addr = XEXP (loc, 0);
1067 gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode);
1068 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1069 tem = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
1070 gen_int_mode ((GET_CODE (loc) == PRE_INC
1071 || GET_CODE (loc) == POST_INC)
1072 ? GET_MODE_SIZE (amd->mem_mode)
1073 : -GET_MODE_SIZE (amd->mem_mode),
1074 GET_MODE (loc)));
1075 store_save = amd->store;
1076 amd->store = false;
1077 tem = simplify_replace_fn_rtx (tem, old_rtx, adjust_mems, data);
1078 amd->store = store_save;
1079 amd->side_effects = alloc_EXPR_LIST (0,
1080 gen_rtx_SET (VOIDmode,
1081 XEXP (loc, 0), tem),
1082 amd->side_effects);
1083 return addr;
1084 case PRE_MODIFY:
1085 addr = XEXP (loc, 1);
1086 case POST_MODIFY:
1087 if (addr == loc)
1088 addr = XEXP (loc, 0);
1089 gcc_assert (amd->mem_mode != VOIDmode);
1090 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1091 store_save = amd->store;
1092 amd->store = false;
1093 tem = simplify_replace_fn_rtx (XEXP (loc, 1), old_rtx,
1094 adjust_mems, data);
1095 amd->store = store_save;
1096 amd->side_effects = alloc_EXPR_LIST (0,
1097 gen_rtx_SET (VOIDmode,
1098 XEXP (loc, 0), tem),
1099 amd->side_effects);
1100 return addr;
1101 case SUBREG:
1102 /* First try without delegitimization of whole MEMs and
1103 avoid_constant_pool_reference, which is more likely to succeed. */
1104 store_save = amd->store;
1105 amd->store = true;
1106 addr = simplify_replace_fn_rtx (SUBREG_REG (loc), old_rtx, adjust_mems,
1107 data);
1108 amd->store = store_save;
1109 mem = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1110 if (mem == SUBREG_REG (loc))
1112 tem = loc;
1113 goto finish_subreg;
1115 tem = simplify_gen_subreg (GET_MODE (loc), mem,
1116 GET_MODE (SUBREG_REG (loc)),
1117 SUBREG_BYTE (loc));
1118 if (tem)
1119 goto finish_subreg;
1120 tem = simplify_gen_subreg (GET_MODE (loc), addr,
1121 GET_MODE (SUBREG_REG (loc)),
1122 SUBREG_BYTE (loc));
1123 if (tem == NULL_RTX)
1124 tem = gen_rtx_raw_SUBREG (GET_MODE (loc), addr, SUBREG_BYTE (loc));
1125 finish_subreg:
1126 if (MAY_HAVE_DEBUG_INSNS
1127 && GET_CODE (tem) == SUBREG
1128 && (GET_CODE (SUBREG_REG (tem)) == PLUS
1129 || GET_CODE (SUBREG_REG (tem)) == MINUS
1130 || GET_CODE (SUBREG_REG (tem)) == MULT
1131 || GET_CODE (SUBREG_REG (tem)) == ASHIFT)
1132 && (GET_MODE_CLASS (GET_MODE (tem)) == MODE_INT
1133 || GET_MODE_CLASS (GET_MODE (tem)) == MODE_PARTIAL_INT)
1134 && (GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem))) == MODE_INT
1135 || GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem))) == MODE_PARTIAL_INT)
1136 && GET_MODE_PRECISION (GET_MODE (tem))
1137 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (tem)))
1138 && subreg_lowpart_p (tem)
1139 && use_narrower_mode_test (SUBREG_REG (tem), tem))
1140 return use_narrower_mode (SUBREG_REG (tem), GET_MODE (tem),
1141 GET_MODE (SUBREG_REG (tem)));
1142 return tem;
1143 case ASM_OPERANDS:
1144 /* Don't do any replacements in second and following
1145 ASM_OPERANDS of inline-asm with multiple sets.
1146 ASM_OPERANDS_INPUT_VEC, ASM_OPERANDS_INPUT_CONSTRAINT_VEC
1147 and ASM_OPERANDS_LABEL_VEC need to be equal between
1148 all the ASM_OPERANDs in the insn and adjust_insn will
1149 fix this up. */
1150 if (ASM_OPERANDS_OUTPUT_IDX (loc) != 0)
1151 return loc;
1152 break;
1153 default:
1154 break;
1156 return NULL_RTX;
1159 /* Helper function for replacement of uses. */
1161 static void
1162 adjust_mem_uses (rtx *x, void *data)
1164 rtx new_x = simplify_replace_fn_rtx (*x, NULL_RTX, adjust_mems, data);
1165 if (new_x != *x)
1166 validate_change (NULL_RTX, x, new_x, true);
1169 /* Helper function for replacement of stores. */
1171 static void
1172 adjust_mem_stores (rtx loc, const_rtx expr, void *data)
1174 if (MEM_P (loc))
1176 rtx new_dest = simplify_replace_fn_rtx (SET_DEST (expr), NULL_RTX,
1177 adjust_mems, data);
1178 if (new_dest != SET_DEST (expr))
1180 rtx xexpr = CONST_CAST_RTX (expr);
1181 validate_change (NULL_RTX, &SET_DEST (xexpr), new_dest, true);
1186 /* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
1187 replace them with their value in the insn and add the side-effects
1188 as other sets to the insn. */
1190 static void
1191 adjust_insn (basic_block bb, rtx_insn *insn)
1193 struct adjust_mem_data amd;
1194 rtx set;
1196 #ifdef HAVE_window_save
1197 /* If the target machine has an explicit window save instruction, the
1198 transformation OUTGOING_REGNO -> INCOMING_REGNO is done there. */
1199 if (RTX_FRAME_RELATED_P (insn)
1200 && find_reg_note (insn, REG_CFA_WINDOW_SAVE, NULL_RTX))
1202 unsigned int i, nregs = vec_safe_length (windowed_parm_regs);
1203 rtx rtl = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nregs * 2));
1204 parm_reg_t *p;
1206 FOR_EACH_VEC_SAFE_ELT (windowed_parm_regs, i, p)
1208 XVECEXP (rtl, 0, i * 2)
1209 = gen_rtx_SET (VOIDmode, p->incoming, p->outgoing);
1210 /* Do not clobber the attached DECL, but only the REG. */
1211 XVECEXP (rtl, 0, i * 2 + 1)
1212 = gen_rtx_CLOBBER (GET_MODE (p->outgoing),
1213 gen_raw_REG (GET_MODE (p->outgoing),
1214 REGNO (p->outgoing)));
1217 validate_change (NULL_RTX, &PATTERN (insn), rtl, true);
1218 return;
1220 #endif
1222 amd.mem_mode = VOIDmode;
1223 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
1224 amd.side_effects = NULL;
1226 amd.store = true;
1227 note_stores (PATTERN (insn), adjust_mem_stores, &amd);
1229 amd.store = false;
1230 if (GET_CODE (PATTERN (insn)) == PARALLEL
1231 && asm_noperands (PATTERN (insn)) > 0
1232 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1234 rtx body, set0;
1235 int i;
1237 /* inline-asm with multiple sets is tiny bit more complicated,
1238 because the 3 vectors in ASM_OPERANDS need to be shared between
1239 all ASM_OPERANDS in the instruction. adjust_mems will
1240 not touch ASM_OPERANDS other than the first one, asm_noperands
1241 test above needs to be called before that (otherwise it would fail)
1242 and afterwards this code fixes it up. */
1243 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1244 body = PATTERN (insn);
1245 set0 = XVECEXP (body, 0, 0);
1246 gcc_checking_assert (GET_CODE (set0) == SET
1247 && GET_CODE (SET_SRC (set0)) == ASM_OPERANDS
1248 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set0)) == 0);
1249 for (i = 1; i < XVECLEN (body, 0); i++)
1250 if (GET_CODE (XVECEXP (body, 0, i)) != SET)
1251 break;
1252 else
1254 set = XVECEXP (body, 0, i);
1255 gcc_checking_assert (GET_CODE (SET_SRC (set)) == ASM_OPERANDS
1256 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set))
1257 == i);
1258 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (set))
1259 != ASM_OPERANDS_INPUT_VEC (SET_SRC (set0))
1260 || ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set))
1261 != ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0))
1262 || ASM_OPERANDS_LABEL_VEC (SET_SRC (set))
1263 != ASM_OPERANDS_LABEL_VEC (SET_SRC (set0)))
1265 rtx newsrc = shallow_copy_rtx (SET_SRC (set));
1266 ASM_OPERANDS_INPUT_VEC (newsrc)
1267 = ASM_OPERANDS_INPUT_VEC (SET_SRC (set0));
1268 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (newsrc)
1269 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0));
1270 ASM_OPERANDS_LABEL_VEC (newsrc)
1271 = ASM_OPERANDS_LABEL_VEC (SET_SRC (set0));
1272 validate_change (NULL_RTX, &SET_SRC (set), newsrc, true);
1276 else
1277 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1279 /* For read-only MEMs containing some constant, prefer those
1280 constants. */
1281 set = single_set (insn);
1282 if (set && MEM_P (SET_SRC (set)) && MEM_READONLY_P (SET_SRC (set)))
1284 rtx note = find_reg_equal_equiv_note (insn);
1286 if (note && CONSTANT_P (XEXP (note, 0)))
1287 validate_change (NULL_RTX, &SET_SRC (set), XEXP (note, 0), true);
1290 if (amd.side_effects)
1292 rtx *pat, new_pat, s;
1293 int i, oldn, newn;
1295 pat = &PATTERN (insn);
1296 if (GET_CODE (*pat) == COND_EXEC)
1297 pat = &COND_EXEC_CODE (*pat);
1298 if (GET_CODE (*pat) == PARALLEL)
1299 oldn = XVECLEN (*pat, 0);
1300 else
1301 oldn = 1;
1302 for (s = amd.side_effects, newn = 0; s; newn++)
1303 s = XEXP (s, 1);
1304 new_pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (oldn + newn));
1305 if (GET_CODE (*pat) == PARALLEL)
1306 for (i = 0; i < oldn; i++)
1307 XVECEXP (new_pat, 0, i) = XVECEXP (*pat, 0, i);
1308 else
1309 XVECEXP (new_pat, 0, 0) = *pat;
1310 for (s = amd.side_effects, i = oldn; i < oldn + newn; i++, s = XEXP (s, 1))
1311 XVECEXP (new_pat, 0, i) = XEXP (s, 0);
1312 free_EXPR_LIST_list (&amd.side_effects);
1313 validate_change (NULL_RTX, pat, new_pat, true);
1317 /* Return the DEBUG_EXPR of a DEBUG_EXPR_DECL or the VALUE in DV. */
1318 static inline rtx
1319 dv_as_rtx (decl_or_value dv)
1321 tree decl;
1323 if (dv_is_value_p (dv))
1324 return dv_as_value (dv);
1326 decl = dv_as_decl (dv);
1328 gcc_checking_assert (TREE_CODE (decl) == DEBUG_EXPR_DECL);
1329 return DECL_RTL_KNOWN_SET (decl);
1332 /* Return nonzero if a decl_or_value must not have more than one
1333 variable part. The returned value discriminates among various
1334 kinds of one-part DVs ccording to enum onepart_enum. */
1335 static inline onepart_enum_t
1336 dv_onepart_p (decl_or_value dv)
1338 tree decl;
1340 if (!MAY_HAVE_DEBUG_INSNS)
1341 return NOT_ONEPART;
1343 if (dv_is_value_p (dv))
1344 return ONEPART_VALUE;
1346 decl = dv_as_decl (dv);
1348 if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
1349 return ONEPART_DEXPR;
1351 if (target_for_debug_bind (decl) != NULL_TREE)
1352 return ONEPART_VDECL;
1354 return NOT_ONEPART;
1357 /* Return the variable pool to be used for a dv of type ONEPART. */
1358 static inline alloc_pool
1359 onepart_pool (onepart_enum_t onepart)
1361 return onepart ? valvar_pool : var_pool;
1364 /* Build a decl_or_value out of a decl. */
1365 static inline decl_or_value
1366 dv_from_decl (tree decl)
1368 decl_or_value dv;
1369 dv = decl;
1370 gcc_checking_assert (dv_is_decl_p (dv));
1371 return dv;
1374 /* Build a decl_or_value out of a value. */
1375 static inline decl_or_value
1376 dv_from_value (rtx value)
1378 decl_or_value dv;
1379 dv = value;
1380 gcc_checking_assert (dv_is_value_p (dv));
1381 return dv;
1384 /* Return a value or the decl of a debug_expr as a decl_or_value. */
1385 static inline decl_or_value
1386 dv_from_rtx (rtx x)
1388 decl_or_value dv;
1390 switch (GET_CODE (x))
1392 case DEBUG_EXPR:
1393 dv = dv_from_decl (DEBUG_EXPR_TREE_DECL (x));
1394 gcc_checking_assert (DECL_RTL_KNOWN_SET (DEBUG_EXPR_TREE_DECL (x)) == x);
1395 break;
1397 case VALUE:
1398 dv = dv_from_value (x);
1399 break;
1401 default:
1402 gcc_unreachable ();
1405 return dv;
1408 extern void debug_dv (decl_or_value dv);
1410 DEBUG_FUNCTION void
1411 debug_dv (decl_or_value dv)
1413 if (dv_is_value_p (dv))
1414 debug_rtx (dv_as_value (dv));
1415 else
1416 debug_generic_stmt (dv_as_decl (dv));
1419 static void loc_exp_dep_clear (variable var);
1421 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
1423 static void
1424 variable_htab_free (void *elem)
1426 int i;
1427 variable var = (variable) elem;
1428 location_chain node, next;
1430 gcc_checking_assert (var->refcount > 0);
1432 var->refcount--;
1433 if (var->refcount > 0)
1434 return;
1436 for (i = 0; i < var->n_var_parts; i++)
1438 for (node = var->var_part[i].loc_chain; node; node = next)
1440 next = node->next;
1441 pool_free (loc_chain_pool, node);
1443 var->var_part[i].loc_chain = NULL;
1445 if (var->onepart && VAR_LOC_1PAUX (var))
1447 loc_exp_dep_clear (var);
1448 if (VAR_LOC_DEP_LST (var))
1449 VAR_LOC_DEP_LST (var)->pprev = NULL;
1450 XDELETE (VAR_LOC_1PAUX (var));
1451 /* These may be reused across functions, so reset
1452 e.g. NO_LOC_P. */
1453 if (var->onepart == ONEPART_DEXPR)
1454 set_dv_changed (var->dv, true);
1456 pool_free (onepart_pool (var->onepart), var);
1459 /* Initialize the set (array) SET of attrs to empty lists. */
1461 static void
1462 init_attrs_list_set (attrs *set)
1464 int i;
1466 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1467 set[i] = NULL;
1470 /* Make the list *LISTP empty. */
1472 static void
1473 attrs_list_clear (attrs *listp)
1475 attrs list, next;
1477 for (list = *listp; list; list = next)
1479 next = list->next;
1480 pool_free (attrs_pool, list);
1482 *listp = NULL;
1485 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */
1487 static attrs
1488 attrs_list_member (attrs list, decl_or_value dv, HOST_WIDE_INT offset)
1490 for (; list; list = list->next)
1491 if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset)
1492 return list;
1493 return NULL;
1496 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
1498 static void
1499 attrs_list_insert (attrs *listp, decl_or_value dv,
1500 HOST_WIDE_INT offset, rtx loc)
1502 attrs list;
1504 list = (attrs) pool_alloc (attrs_pool);
1505 list->loc = loc;
1506 list->dv = dv;
1507 list->offset = offset;
1508 list->next = *listp;
1509 *listp = list;
1512 /* Copy all nodes from SRC and create a list *DSTP of the copies. */
1514 static void
1515 attrs_list_copy (attrs *dstp, attrs src)
1517 attrs n;
1519 attrs_list_clear (dstp);
1520 for (; src; src = src->next)
1522 n = (attrs) pool_alloc (attrs_pool);
1523 n->loc = src->loc;
1524 n->dv = src->dv;
1525 n->offset = src->offset;
1526 n->next = *dstp;
1527 *dstp = n;
1531 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */
1533 static void
1534 attrs_list_union (attrs *dstp, attrs src)
1536 for (; src; src = src->next)
1538 if (!attrs_list_member (*dstp, src->dv, src->offset))
1539 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1543 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1544 *DSTP. */
1546 static void
1547 attrs_list_mpdv_union (attrs *dstp, attrs src, attrs src2)
1549 gcc_assert (!*dstp);
1550 for (; src; src = src->next)
1552 if (!dv_onepart_p (src->dv))
1553 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1555 for (src = src2; src; src = src->next)
1557 if (!dv_onepart_p (src->dv)
1558 && !attrs_list_member (*dstp, src->dv, src->offset))
1559 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1563 /* Shared hashtable support. */
1565 /* Return true if VARS is shared. */
1567 static inline bool
1568 shared_hash_shared (shared_hash vars)
1570 return vars->refcount > 1;
1573 /* Return the hash table for VARS. */
1575 static inline variable_table_type *
1576 shared_hash_htab (shared_hash vars)
1578 return vars->htab;
1581 /* Return true if VAR is shared, or maybe because VARS is shared. */
1583 static inline bool
1584 shared_var_p (variable var, shared_hash vars)
1586 /* Don't count an entry in the changed_variables table as a duplicate. */
1587 return ((var->refcount > 1 + (int) var->in_changed_variables)
1588 || shared_hash_shared (vars));
1591 /* Copy variables into a new hash table. */
1593 static shared_hash
1594 shared_hash_unshare (shared_hash vars)
1596 shared_hash new_vars = (shared_hash) pool_alloc (shared_hash_pool);
1597 gcc_assert (vars->refcount > 1);
1598 new_vars->refcount = 1;
1599 new_vars->htab = new variable_table_type (vars->htab->elements () + 3);
1600 vars_copy (new_vars->htab, vars->htab);
1601 vars->refcount--;
1602 return new_vars;
1605 /* Increment reference counter on VARS and return it. */
1607 static inline shared_hash
1608 shared_hash_copy (shared_hash vars)
1610 vars->refcount++;
1611 return vars;
1614 /* Decrement reference counter and destroy hash table if not shared
1615 anymore. */
1617 static void
1618 shared_hash_destroy (shared_hash vars)
1620 gcc_checking_assert (vars->refcount > 0);
1621 if (--vars->refcount == 0)
1623 delete vars->htab;
1624 pool_free (shared_hash_pool, vars);
1628 /* Unshare *PVARS if shared and return slot for DV. If INS is
1629 INSERT, insert it if not already present. */
1631 static inline variable_def **
1632 shared_hash_find_slot_unshare_1 (shared_hash *pvars, decl_or_value dv,
1633 hashval_t dvhash, enum insert_option ins)
1635 if (shared_hash_shared (*pvars))
1636 *pvars = shared_hash_unshare (*pvars);
1637 return shared_hash_htab (*pvars)->find_slot_with_hash (dv, dvhash, ins);
1640 static inline variable_def **
1641 shared_hash_find_slot_unshare (shared_hash *pvars, decl_or_value dv,
1642 enum insert_option ins)
1644 return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins);
1647 /* Return slot for DV, if it is already present in the hash table.
1648 If it is not present, insert it only VARS is not shared, otherwise
1649 return NULL. */
1651 static inline variable_def **
1652 shared_hash_find_slot_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1654 return shared_hash_htab (vars)->find_slot_with_hash (dv, dvhash,
1655 shared_hash_shared (vars)
1656 ? NO_INSERT : INSERT);
1659 static inline variable_def **
1660 shared_hash_find_slot (shared_hash vars, decl_or_value dv)
1662 return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv));
1665 /* Return slot for DV only if it is already present in the hash table. */
1667 static inline variable_def **
1668 shared_hash_find_slot_noinsert_1 (shared_hash vars, decl_or_value dv,
1669 hashval_t dvhash)
1671 return shared_hash_htab (vars)->find_slot_with_hash (dv, dvhash, NO_INSERT);
1674 static inline variable_def **
1675 shared_hash_find_slot_noinsert (shared_hash vars, decl_or_value dv)
1677 return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv));
1680 /* Return variable for DV or NULL if not already present in the hash
1681 table. */
1683 static inline variable
1684 shared_hash_find_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1686 return shared_hash_htab (vars)->find_with_hash (dv, dvhash);
1689 static inline variable
1690 shared_hash_find (shared_hash vars, decl_or_value dv)
1692 return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
1695 /* Return true if TVAL is better than CVAL as a canonival value. We
1696 choose lowest-numbered VALUEs, using the RTX address as a
1697 tie-breaker. The idea is to arrange them into a star topology,
1698 such that all of them are at most one step away from the canonical
1699 value, and the canonical value has backlinks to all of them, in
1700 addition to all the actual locations. We don't enforce this
1701 topology throughout the entire dataflow analysis, though.
1704 static inline bool
1705 canon_value_cmp (rtx tval, rtx cval)
1707 return !cval
1708 || CSELIB_VAL_PTR (tval)->uid < CSELIB_VAL_PTR (cval)->uid;
1711 static bool dst_can_be_shared;
1713 /* Return a copy of a variable VAR and insert it to dataflow set SET. */
1715 static variable_def **
1716 unshare_variable (dataflow_set *set, variable_def **slot, variable var,
1717 enum var_init_status initialized)
1719 variable new_var;
1720 int i;
1722 new_var = (variable) pool_alloc (onepart_pool (var->onepart));
1723 new_var->dv = var->dv;
1724 new_var->refcount = 1;
1725 var->refcount--;
1726 new_var->n_var_parts = var->n_var_parts;
1727 new_var->onepart = var->onepart;
1728 new_var->in_changed_variables = false;
1730 if (! flag_var_tracking_uninit)
1731 initialized = VAR_INIT_STATUS_INITIALIZED;
1733 for (i = 0; i < var->n_var_parts; i++)
1735 location_chain node;
1736 location_chain *nextp;
1738 if (i == 0 && var->onepart)
1740 /* One-part auxiliary data is only used while emitting
1741 notes, so propagate it to the new variable in the active
1742 dataflow set. If we're not emitting notes, this will be
1743 a no-op. */
1744 gcc_checking_assert (!VAR_LOC_1PAUX (var) || emit_notes);
1745 VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (var);
1746 VAR_LOC_1PAUX (var) = NULL;
1748 else
1749 VAR_PART_OFFSET (new_var, i) = VAR_PART_OFFSET (var, i);
1750 nextp = &new_var->var_part[i].loc_chain;
1751 for (node = var->var_part[i].loc_chain; node; node = node->next)
1753 location_chain new_lc;
1755 new_lc = (location_chain) pool_alloc (loc_chain_pool);
1756 new_lc->next = NULL;
1757 if (node->init > initialized)
1758 new_lc->init = node->init;
1759 else
1760 new_lc->init = initialized;
1761 if (node->set_src && !(MEM_P (node->set_src)))
1762 new_lc->set_src = node->set_src;
1763 else
1764 new_lc->set_src = NULL;
1765 new_lc->loc = node->loc;
1767 *nextp = new_lc;
1768 nextp = &new_lc->next;
1771 new_var->var_part[i].cur_loc = var->var_part[i].cur_loc;
1774 dst_can_be_shared = false;
1775 if (shared_hash_shared (set->vars))
1776 slot = shared_hash_find_slot_unshare (&set->vars, var->dv, NO_INSERT);
1777 else if (set->traversed_vars && set->vars != set->traversed_vars)
1778 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
1779 *slot = new_var;
1780 if (var->in_changed_variables)
1782 variable_def **cslot
1783 = changed_variables->find_slot_with_hash (var->dv,
1784 dv_htab_hash (var->dv),
1785 NO_INSERT);
1786 gcc_assert (*cslot == (void *) var);
1787 var->in_changed_variables = false;
1788 variable_htab_free (var);
1789 *cslot = new_var;
1790 new_var->in_changed_variables = true;
1792 return slot;
1795 /* Copy all variables from hash table SRC to hash table DST. */
1797 static void
1798 vars_copy (variable_table_type *dst, variable_table_type *src)
1800 variable_iterator_type hi;
1801 variable var;
1803 FOR_EACH_HASH_TABLE_ELEMENT (*src, var, variable, hi)
1805 variable_def **dstp;
1806 var->refcount++;
1807 dstp = dst->find_slot_with_hash (var->dv, dv_htab_hash (var->dv),
1808 INSERT);
1809 *dstp = var;
1813 /* Map a decl to its main debug decl. */
1815 static inline tree
1816 var_debug_decl (tree decl)
1818 if (decl && TREE_CODE (decl) == VAR_DECL
1819 && DECL_HAS_DEBUG_EXPR_P (decl))
1821 tree debugdecl = DECL_DEBUG_EXPR (decl);
1822 if (DECL_P (debugdecl))
1823 decl = debugdecl;
1826 return decl;
1829 /* Set the register LOC to contain DV, OFFSET. */
1831 static void
1832 var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1833 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1834 enum insert_option iopt)
1836 attrs node;
1837 bool decl_p = dv_is_decl_p (dv);
1839 if (decl_p)
1840 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1842 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1843 if (dv_as_opaque (node->dv) == dv_as_opaque (dv)
1844 && node->offset == offset)
1845 break;
1846 if (!node)
1847 attrs_list_insert (&set->regs[REGNO (loc)], dv, offset, loc);
1848 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1851 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
1853 static void
1854 var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1855 rtx set_src)
1857 tree decl = REG_EXPR (loc);
1858 HOST_WIDE_INT offset = REG_OFFSET (loc);
1860 var_reg_decl_set (set, loc, initialized,
1861 dv_from_decl (decl), offset, set_src, INSERT);
1864 static enum var_init_status
1865 get_init_value (dataflow_set *set, rtx loc, decl_or_value dv)
1867 variable var;
1868 int i;
1869 enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN;
1871 if (! flag_var_tracking_uninit)
1872 return VAR_INIT_STATUS_INITIALIZED;
1874 var = shared_hash_find (set->vars, dv);
1875 if (var)
1877 for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++)
1879 location_chain nextp;
1880 for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next)
1881 if (rtx_equal_p (nextp->loc, loc))
1883 ret_val = nextp->init;
1884 break;
1889 return ret_val;
1892 /* Delete current content of register LOC in dataflow set SET and set
1893 the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
1894 MODIFY is true, any other live copies of the same variable part are
1895 also deleted from the dataflow set, otherwise the variable part is
1896 assumed to be copied from another location holding the same
1897 part. */
1899 static void
1900 var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1901 enum var_init_status initialized, rtx set_src)
1903 tree decl = REG_EXPR (loc);
1904 HOST_WIDE_INT offset = REG_OFFSET (loc);
1905 attrs node, next;
1906 attrs *nextp;
1908 decl = var_debug_decl (decl);
1910 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1911 initialized = get_init_value (set, loc, dv_from_decl (decl));
1913 nextp = &set->regs[REGNO (loc)];
1914 for (node = *nextp; node; node = next)
1916 next = node->next;
1917 if (dv_as_opaque (node->dv) != decl || node->offset != offset)
1919 delete_variable_part (set, node->loc, node->dv, node->offset);
1920 pool_free (attrs_pool, node);
1921 *nextp = next;
1923 else
1925 node->loc = loc;
1926 nextp = &node->next;
1929 if (modify)
1930 clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src);
1931 var_reg_set (set, loc, initialized, set_src);
1934 /* Delete the association of register LOC in dataflow set SET with any
1935 variables that aren't onepart. If CLOBBER is true, also delete any
1936 other live copies of the same variable part, and delete the
1937 association with onepart dvs too. */
1939 static void
1940 var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
1942 attrs *nextp = &set->regs[REGNO (loc)];
1943 attrs node, next;
1945 if (clobber)
1947 tree decl = REG_EXPR (loc);
1948 HOST_WIDE_INT offset = REG_OFFSET (loc);
1950 decl = var_debug_decl (decl);
1952 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1955 for (node = *nextp; node; node = next)
1957 next = node->next;
1958 if (clobber || !dv_onepart_p (node->dv))
1960 delete_variable_part (set, node->loc, node->dv, node->offset);
1961 pool_free (attrs_pool, node);
1962 *nextp = next;
1964 else
1965 nextp = &node->next;
1969 /* Delete content of register with number REGNO in dataflow set SET. */
1971 static void
1972 var_regno_delete (dataflow_set *set, int regno)
1974 attrs *reg = &set->regs[regno];
1975 attrs node, next;
1977 for (node = *reg; node; node = next)
1979 next = node->next;
1980 delete_variable_part (set, node->loc, node->dv, node->offset);
1981 pool_free (attrs_pool, node);
1983 *reg = NULL;
1986 /* Return true if I is the negated value of a power of two. */
1987 static bool
1988 negative_power_of_two_p (HOST_WIDE_INT i)
1990 unsigned HOST_WIDE_INT x = -(unsigned HOST_WIDE_INT)i;
1991 return x == (x & -x);
1994 /* Strip constant offsets and alignments off of LOC. Return the base
1995 expression. */
1997 static rtx
1998 vt_get_canonicalize_base (rtx loc)
2000 while ((GET_CODE (loc) == PLUS
2001 || GET_CODE (loc) == AND)
2002 && GET_CODE (XEXP (loc, 1)) == CONST_INT
2003 && (GET_CODE (loc) != AND
2004 || negative_power_of_two_p (INTVAL (XEXP (loc, 1)))))
2005 loc = XEXP (loc, 0);
2007 return loc;
2010 /* This caches canonicalized addresses for VALUEs, computed using
2011 information in the global cselib table. */
2012 static hash_map<rtx, rtx> *global_get_addr_cache;
2014 /* This caches canonicalized addresses for VALUEs, computed using
2015 information from the global cache and information pertaining to a
2016 basic block being analyzed. */
2017 static hash_map<rtx, rtx> *local_get_addr_cache;
2019 static rtx vt_canonicalize_addr (dataflow_set *, rtx);
2021 /* Return the canonical address for LOC, that must be a VALUE, using a
2022 cached global equivalence or computing it and storing it in the
2023 global cache. */
2025 static rtx
2026 get_addr_from_global_cache (rtx const loc)
2028 rtx x;
2030 gcc_checking_assert (GET_CODE (loc) == VALUE);
2032 bool existed;
2033 rtx *slot = &global_get_addr_cache->get_or_insert (loc, &existed);
2034 if (existed)
2035 return *slot;
2037 x = canon_rtx (get_addr (loc));
2039 /* Tentative, avoiding infinite recursion. */
2040 *slot = x;
2042 if (x != loc)
2044 rtx nx = vt_canonicalize_addr (NULL, x);
2045 if (nx != x)
2047 /* The table may have moved during recursion, recompute
2048 SLOT. */
2049 *global_get_addr_cache->get (loc) = x = nx;
2053 return x;
2056 /* Return the canonical address for LOC, that must be a VALUE, using a
2057 cached local equivalence or computing it and storing it in the
2058 local cache. */
2060 static rtx
2061 get_addr_from_local_cache (dataflow_set *set, rtx const loc)
2063 rtx x;
2064 decl_or_value dv;
2065 variable var;
2066 location_chain l;
2068 gcc_checking_assert (GET_CODE (loc) == VALUE);
2070 bool existed;
2071 rtx *slot = &local_get_addr_cache->get_or_insert (loc, &existed);
2072 if (existed)
2073 return *slot;
2075 x = get_addr_from_global_cache (loc);
2077 /* Tentative, avoiding infinite recursion. */
2078 *slot = x;
2080 /* Recurse to cache local expansion of X, or if we need to search
2081 for a VALUE in the expansion. */
2082 if (x != loc)
2084 rtx nx = vt_canonicalize_addr (set, x);
2085 if (nx != x)
2087 slot = local_get_addr_cache->get (loc);
2088 *slot = x = nx;
2090 return x;
2093 dv = dv_from_rtx (x);
2094 var = shared_hash_find (set->vars, dv);
2095 if (!var)
2096 return x;
2098 /* Look for an improved equivalent expression. */
2099 for (l = var->var_part[0].loc_chain; l; l = l->next)
2101 rtx base = vt_get_canonicalize_base (l->loc);
2102 if (GET_CODE (base) == VALUE
2103 && canon_value_cmp (base, loc))
2105 rtx nx = vt_canonicalize_addr (set, l->loc);
2106 if (x != nx)
2108 slot = local_get_addr_cache->get (loc);
2109 *slot = x = nx;
2111 break;
2115 return x;
2118 /* Canonicalize LOC using equivalences from SET in addition to those
2119 in the cselib static table. It expects a VALUE-based expression,
2120 and it will only substitute VALUEs with other VALUEs or
2121 function-global equivalences, so that, if two addresses have base
2122 VALUEs that are locally or globally related in ways that
2123 memrefs_conflict_p cares about, they will both canonicalize to
2124 expressions that have the same base VALUE.
2126 The use of VALUEs as canonical base addresses enables the canonical
2127 RTXs to remain unchanged globally, if they resolve to a constant,
2128 or throughout a basic block otherwise, so that they can be cached
2129 and the cache needs not be invalidated when REGs, MEMs or such
2130 change. */
2132 static rtx
2133 vt_canonicalize_addr (dataflow_set *set, rtx oloc)
2135 HOST_WIDE_INT ofst = 0;
2136 machine_mode mode = GET_MODE (oloc);
2137 rtx loc = oloc;
2138 rtx x;
2139 bool retry = true;
2141 while (retry)
2143 while (GET_CODE (loc) == PLUS
2144 && GET_CODE (XEXP (loc, 1)) == CONST_INT)
2146 ofst += INTVAL (XEXP (loc, 1));
2147 loc = XEXP (loc, 0);
2150 /* Alignment operations can't normally be combined, so just
2151 canonicalize the base and we're done. We'll normally have
2152 only one stack alignment anyway. */
2153 if (GET_CODE (loc) == AND
2154 && GET_CODE (XEXP (loc, 1)) == CONST_INT
2155 && negative_power_of_two_p (INTVAL (XEXP (loc, 1))))
2157 x = vt_canonicalize_addr (set, XEXP (loc, 0));
2158 if (x != XEXP (loc, 0))
2159 loc = gen_rtx_AND (mode, x, XEXP (loc, 1));
2160 retry = false;
2163 if (GET_CODE (loc) == VALUE)
2165 if (set)
2166 loc = get_addr_from_local_cache (set, loc);
2167 else
2168 loc = get_addr_from_global_cache (loc);
2170 /* Consolidate plus_constants. */
2171 while (ofst && GET_CODE (loc) == PLUS
2172 && GET_CODE (XEXP (loc, 1)) == CONST_INT)
2174 ofst += INTVAL (XEXP (loc, 1));
2175 loc = XEXP (loc, 0);
2178 retry = false;
2180 else
2182 x = canon_rtx (loc);
2183 if (retry)
2184 retry = (x != loc);
2185 loc = x;
2189 /* Add OFST back in. */
2190 if (ofst)
2192 /* Don't build new RTL if we can help it. */
2193 if (GET_CODE (oloc) == PLUS
2194 && XEXP (oloc, 0) == loc
2195 && INTVAL (XEXP (oloc, 1)) == ofst)
2196 return oloc;
2198 loc = plus_constant (mode, loc, ofst);
2201 return loc;
2204 /* Return true iff there's a true dependence between MLOC and LOC.
2205 MADDR must be a canonicalized version of MLOC's address. */
2207 static inline bool
2208 vt_canon_true_dep (dataflow_set *set, rtx mloc, rtx maddr, rtx loc)
2210 if (GET_CODE (loc) != MEM)
2211 return false;
2213 rtx addr = vt_canonicalize_addr (set, XEXP (loc, 0));
2214 if (!canon_true_dependence (mloc, GET_MODE (mloc), maddr, loc, addr))
2215 return false;
2217 return true;
2220 /* Hold parameters for the hashtab traversal function
2221 drop_overlapping_mem_locs, see below. */
2223 struct overlapping_mems
2225 dataflow_set *set;
2226 rtx loc, addr;
2229 /* Remove all MEMs that overlap with COMS->LOC from the location list
2230 of a hash table entry for a value. COMS->ADDR must be a
2231 canonicalized form of COMS->LOC's address, and COMS->LOC must be
2232 canonicalized itself. */
2235 drop_overlapping_mem_locs (variable_def **slot, overlapping_mems *coms)
2237 dataflow_set *set = coms->set;
2238 rtx mloc = coms->loc, addr = coms->addr;
2239 variable var = *slot;
2241 if (var->onepart == ONEPART_VALUE)
2243 location_chain loc, *locp;
2244 bool changed = false;
2245 rtx cur_loc;
2247 gcc_assert (var->n_var_parts == 1);
2249 if (shared_var_p (var, set->vars))
2251 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
2252 if (vt_canon_true_dep (set, mloc, addr, loc->loc))
2253 break;
2255 if (!loc)
2256 return 1;
2258 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
2259 var = *slot;
2260 gcc_assert (var->n_var_parts == 1);
2263 if (VAR_LOC_1PAUX (var))
2264 cur_loc = VAR_LOC_FROM (var);
2265 else
2266 cur_loc = var->var_part[0].cur_loc;
2268 for (locp = &var->var_part[0].loc_chain, loc = *locp;
2269 loc; loc = *locp)
2271 if (!vt_canon_true_dep (set, mloc, addr, loc->loc))
2273 locp = &loc->next;
2274 continue;
2277 *locp = loc->next;
2278 /* If we have deleted the location which was last emitted
2279 we have to emit new location so add the variable to set
2280 of changed variables. */
2281 if (cur_loc == loc->loc)
2283 changed = true;
2284 var->var_part[0].cur_loc = NULL;
2285 if (VAR_LOC_1PAUX (var))
2286 VAR_LOC_FROM (var) = NULL;
2288 pool_free (loc_chain_pool, loc);
2291 if (!var->var_part[0].loc_chain)
2293 var->n_var_parts--;
2294 changed = true;
2296 if (changed)
2297 variable_was_changed (var, set);
2300 return 1;
2303 /* Remove from SET all VALUE bindings to MEMs that overlap with LOC. */
2305 static void
2306 clobber_overlapping_mems (dataflow_set *set, rtx loc)
2308 struct overlapping_mems coms;
2310 gcc_checking_assert (GET_CODE (loc) == MEM);
2312 coms.set = set;
2313 coms.loc = canon_rtx (loc);
2314 coms.addr = vt_canonicalize_addr (set, XEXP (loc, 0));
2316 set->traversed_vars = set->vars;
2317 shared_hash_htab (set->vars)
2318 ->traverse <overlapping_mems*, drop_overlapping_mem_locs> (&coms);
2319 set->traversed_vars = NULL;
2322 /* Set the location of DV, OFFSET as the MEM LOC. */
2324 static void
2325 var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
2326 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
2327 enum insert_option iopt)
2329 if (dv_is_decl_p (dv))
2330 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
2332 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
2335 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
2336 SET to LOC.
2337 Adjust the address first if it is stack pointer based. */
2339 static void
2340 var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
2341 rtx set_src)
2343 tree decl = MEM_EXPR (loc);
2344 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2346 var_mem_decl_set (set, loc, initialized,
2347 dv_from_decl (decl), offset, set_src, INSERT);
2350 /* Delete and set the location part of variable MEM_EXPR (LOC) in
2351 dataflow set SET to LOC. If MODIFY is true, any other live copies
2352 of the same variable part are also deleted from the dataflow set,
2353 otherwise the variable part is assumed to be copied from another
2354 location holding the same part.
2355 Adjust the address first if it is stack pointer based. */
2357 static void
2358 var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify,
2359 enum var_init_status initialized, rtx set_src)
2361 tree decl = MEM_EXPR (loc);
2362 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2364 clobber_overlapping_mems (set, loc);
2365 decl = var_debug_decl (decl);
2367 if (initialized == VAR_INIT_STATUS_UNKNOWN)
2368 initialized = get_init_value (set, loc, dv_from_decl (decl));
2370 if (modify)
2371 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src);
2372 var_mem_set (set, loc, initialized, set_src);
2375 /* Delete the location part LOC from dataflow set SET. If CLOBBER is
2376 true, also delete any other live copies of the same variable part.
2377 Adjust the address first if it is stack pointer based. */
2379 static void
2380 var_mem_delete (dataflow_set *set, rtx loc, bool clobber)
2382 tree decl = MEM_EXPR (loc);
2383 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2385 clobber_overlapping_mems (set, loc);
2386 decl = var_debug_decl (decl);
2387 if (clobber)
2388 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
2389 delete_variable_part (set, loc, dv_from_decl (decl), offset);
2392 /* Return true if LOC should not be expanded for location expressions,
2393 or used in them. */
2395 static inline bool
2396 unsuitable_loc (rtx loc)
2398 switch (GET_CODE (loc))
2400 case PC:
2401 case SCRATCH:
2402 case CC0:
2403 case ASM_INPUT:
2404 case ASM_OPERANDS:
2405 return true;
2407 default:
2408 return false;
2412 /* Bind VAL to LOC in SET. If MODIFIED, detach LOC from any values
2413 bound to it. */
2415 static inline void
2416 val_bind (dataflow_set *set, rtx val, rtx loc, bool modified)
2418 if (REG_P (loc))
2420 if (modified)
2421 var_regno_delete (set, REGNO (loc));
2422 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2423 dv_from_value (val), 0, NULL_RTX, INSERT);
2425 else if (MEM_P (loc))
2427 struct elt_loc_list *l = CSELIB_VAL_PTR (val)->locs;
2429 if (modified)
2430 clobber_overlapping_mems (set, loc);
2432 if (l && GET_CODE (l->loc) == VALUE)
2433 l = canonical_cselib_val (CSELIB_VAL_PTR (l->loc))->locs;
2435 /* If this MEM is a global constant, we don't need it in the
2436 dynamic tables. ??? We should test this before emitting the
2437 micro-op in the first place. */
2438 while (l)
2439 if (GET_CODE (l->loc) == MEM && XEXP (l->loc, 0) == XEXP (loc, 0))
2440 break;
2441 else
2442 l = l->next;
2444 if (!l)
2445 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2446 dv_from_value (val), 0, NULL_RTX, INSERT);
2448 else
2450 /* Other kinds of equivalences are necessarily static, at least
2451 so long as we do not perform substitutions while merging
2452 expressions. */
2453 gcc_unreachable ();
2454 set_variable_part (set, loc, dv_from_value (val), 0,
2455 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2459 /* Bind a value to a location it was just stored in. If MODIFIED
2460 holds, assume the location was modified, detaching it from any
2461 values bound to it. */
2463 static void
2464 val_store (dataflow_set *set, rtx val, rtx loc, rtx_insn *insn,
2465 bool modified)
2467 cselib_val *v = CSELIB_VAL_PTR (val);
2469 gcc_assert (cselib_preserved_value_p (v));
2471 if (dump_file)
2473 fprintf (dump_file, "%i: ", insn ? INSN_UID (insn) : 0);
2474 print_inline_rtx (dump_file, loc, 0);
2475 fprintf (dump_file, " evaluates to ");
2476 print_inline_rtx (dump_file, val, 0);
2477 if (v->locs)
2479 struct elt_loc_list *l;
2480 for (l = v->locs; l; l = l->next)
2482 fprintf (dump_file, "\n%i: ", INSN_UID (l->setting_insn));
2483 print_inline_rtx (dump_file, l->loc, 0);
2486 fprintf (dump_file, "\n");
2489 gcc_checking_assert (!unsuitable_loc (loc));
2491 val_bind (set, val, loc, modified);
2494 /* Clear (canonical address) slots that reference X. */
2496 bool
2497 local_get_addr_clear_given_value (rtx const &, rtx *slot, rtx x)
2499 if (vt_get_canonicalize_base (*slot) == x)
2500 *slot = NULL;
2501 return true;
2504 /* Reset this node, detaching all its equivalences. Return the slot
2505 in the variable hash table that holds dv, if there is one. */
2507 static void
2508 val_reset (dataflow_set *set, decl_or_value dv)
2510 variable var = shared_hash_find (set->vars, dv) ;
2511 location_chain node;
2512 rtx cval;
2514 if (!var || !var->n_var_parts)
2515 return;
2517 gcc_assert (var->n_var_parts == 1);
2519 if (var->onepart == ONEPART_VALUE)
2521 rtx x = dv_as_value (dv);
2523 /* Relationships in the global cache don't change, so reset the
2524 local cache entry only. */
2525 rtx *slot = local_get_addr_cache->get (x);
2526 if (slot)
2528 /* If the value resolved back to itself, odds are that other
2529 values may have cached it too. These entries now refer
2530 to the old X, so detach them too. Entries that used the
2531 old X but resolved to something else remain ok as long as
2532 that something else isn't also reset. */
2533 if (*slot == x)
2534 local_get_addr_cache
2535 ->traverse<rtx, local_get_addr_clear_given_value> (x);
2536 *slot = NULL;
2540 cval = NULL;
2541 for (node = var->var_part[0].loc_chain; node; node = node->next)
2542 if (GET_CODE (node->loc) == VALUE
2543 && canon_value_cmp (node->loc, cval))
2544 cval = node->loc;
2546 for (node = var->var_part[0].loc_chain; node; node = node->next)
2547 if (GET_CODE (node->loc) == VALUE && cval != node->loc)
2549 /* Redirect the equivalence link to the new canonical
2550 value, or simply remove it if it would point at
2551 itself. */
2552 if (cval)
2553 set_variable_part (set, cval, dv_from_value (node->loc),
2554 0, node->init, node->set_src, NO_INSERT);
2555 delete_variable_part (set, dv_as_value (dv),
2556 dv_from_value (node->loc), 0);
2559 if (cval)
2561 decl_or_value cdv = dv_from_value (cval);
2563 /* Keep the remaining values connected, accummulating links
2564 in the canonical value. */
2565 for (node = var->var_part[0].loc_chain; node; node = node->next)
2567 if (node->loc == cval)
2568 continue;
2569 else if (GET_CODE (node->loc) == REG)
2570 var_reg_decl_set (set, node->loc, node->init, cdv, 0,
2571 node->set_src, NO_INSERT);
2572 else if (GET_CODE (node->loc) == MEM)
2573 var_mem_decl_set (set, node->loc, node->init, cdv, 0,
2574 node->set_src, NO_INSERT);
2575 else
2576 set_variable_part (set, node->loc, cdv, 0,
2577 node->init, node->set_src, NO_INSERT);
2581 /* We remove this last, to make sure that the canonical value is not
2582 removed to the point of requiring reinsertion. */
2583 if (cval)
2584 delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0);
2586 clobber_variable_part (set, NULL, dv, 0, NULL);
2589 /* Find the values in a given location and map the val to another
2590 value, if it is unique, or add the location as one holding the
2591 value. */
2593 static void
2594 val_resolve (dataflow_set *set, rtx val, rtx loc, rtx_insn *insn)
2596 decl_or_value dv = dv_from_value (val);
2598 if (dump_file && (dump_flags & TDF_DETAILS))
2600 if (insn)
2601 fprintf (dump_file, "%i: ", INSN_UID (insn));
2602 else
2603 fprintf (dump_file, "head: ");
2604 print_inline_rtx (dump_file, val, 0);
2605 fputs (" is at ", dump_file);
2606 print_inline_rtx (dump_file, loc, 0);
2607 fputc ('\n', dump_file);
2610 val_reset (set, dv);
2612 gcc_checking_assert (!unsuitable_loc (loc));
2614 if (REG_P (loc))
2616 attrs node, found = NULL;
2618 for (node = set->regs[REGNO (loc)]; node; node = node->next)
2619 if (dv_is_value_p (node->dv)
2620 && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc))
2622 found = node;
2624 /* Map incoming equivalences. ??? Wouldn't it be nice if
2625 we just started sharing the location lists? Maybe a
2626 circular list ending at the value itself or some
2627 such. */
2628 set_variable_part (set, dv_as_value (node->dv),
2629 dv_from_value (val), node->offset,
2630 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2631 set_variable_part (set, val, node->dv, node->offset,
2632 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2635 /* If we didn't find any equivalence, we need to remember that
2636 this value is held in the named register. */
2637 if (found)
2638 return;
2640 /* ??? Attempt to find and merge equivalent MEMs or other
2641 expressions too. */
2643 val_bind (set, val, loc, false);
2646 /* Initialize dataflow set SET to be empty.
2647 VARS_SIZE is the initial size of hash table VARS. */
2649 static void
2650 dataflow_set_init (dataflow_set *set)
2652 init_attrs_list_set (set->regs);
2653 set->vars = shared_hash_copy (empty_shared_hash);
2654 set->stack_adjust = 0;
2655 set->traversed_vars = NULL;
2658 /* Delete the contents of dataflow set SET. */
2660 static void
2661 dataflow_set_clear (dataflow_set *set)
2663 int i;
2665 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2666 attrs_list_clear (&set->regs[i]);
2668 shared_hash_destroy (set->vars);
2669 set->vars = shared_hash_copy (empty_shared_hash);
2672 /* Copy the contents of dataflow set SRC to DST. */
2674 static void
2675 dataflow_set_copy (dataflow_set *dst, dataflow_set *src)
2677 int i;
2679 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2680 attrs_list_copy (&dst->regs[i], src->regs[i]);
2682 shared_hash_destroy (dst->vars);
2683 dst->vars = shared_hash_copy (src->vars);
2684 dst->stack_adjust = src->stack_adjust;
2687 /* Information for merging lists of locations for a given offset of variable.
2689 struct variable_union_info
2691 /* Node of the location chain. */
2692 location_chain lc;
2694 /* The sum of positions in the input chains. */
2695 int pos;
2697 /* The position in the chain of DST dataflow set. */
2698 int pos_dst;
2701 /* Buffer for location list sorting and its allocated size. */
2702 static struct variable_union_info *vui_vec;
2703 static int vui_allocated;
2705 /* Compare function for qsort, order the structures by POS element. */
2707 static int
2708 variable_union_info_cmp_pos (const void *n1, const void *n2)
2710 const struct variable_union_info *const i1 =
2711 (const struct variable_union_info *) n1;
2712 const struct variable_union_info *const i2 =
2713 ( const struct variable_union_info *) n2;
2715 if (i1->pos != i2->pos)
2716 return i1->pos - i2->pos;
2718 return (i1->pos_dst - i2->pos_dst);
2721 /* Compute union of location parts of variable *SLOT and the same variable
2722 from hash table DATA. Compute "sorted" union of the location chains
2723 for common offsets, i.e. the locations of a variable part are sorted by
2724 a priority where the priority is the sum of the positions in the 2 chains
2725 (if a location is only in one list the position in the second list is
2726 defined to be larger than the length of the chains).
2727 When we are updating the location parts the newest location is in the
2728 beginning of the chain, so when we do the described "sorted" union
2729 we keep the newest locations in the beginning. */
2731 static int
2732 variable_union (variable src, dataflow_set *set)
2734 variable dst;
2735 variable_def **dstp;
2736 int i, j, k;
2738 dstp = shared_hash_find_slot (set->vars, src->dv);
2739 if (!dstp || !*dstp)
2741 src->refcount++;
2743 dst_can_be_shared = false;
2744 if (!dstp)
2745 dstp = shared_hash_find_slot_unshare (&set->vars, src->dv, INSERT);
2747 *dstp = src;
2749 /* Continue traversing the hash table. */
2750 return 1;
2752 else
2753 dst = *dstp;
2755 gcc_assert (src->n_var_parts);
2756 gcc_checking_assert (src->onepart == dst->onepart);
2758 /* We can combine one-part variables very efficiently, because their
2759 entries are in canonical order. */
2760 if (src->onepart)
2762 location_chain *nodep, dnode, snode;
2764 gcc_assert (src->n_var_parts == 1
2765 && dst->n_var_parts == 1);
2767 snode = src->var_part[0].loc_chain;
2768 gcc_assert (snode);
2770 restart_onepart_unshared:
2771 nodep = &dst->var_part[0].loc_chain;
2772 dnode = *nodep;
2773 gcc_assert (dnode);
2775 while (snode)
2777 int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1;
2779 if (r > 0)
2781 location_chain nnode;
2783 if (shared_var_p (dst, set->vars))
2785 dstp = unshare_variable (set, dstp, dst,
2786 VAR_INIT_STATUS_INITIALIZED);
2787 dst = *dstp;
2788 goto restart_onepart_unshared;
2791 *nodep = nnode = (location_chain) pool_alloc (loc_chain_pool);
2792 nnode->loc = snode->loc;
2793 nnode->init = snode->init;
2794 if (!snode->set_src || MEM_P (snode->set_src))
2795 nnode->set_src = NULL;
2796 else
2797 nnode->set_src = snode->set_src;
2798 nnode->next = dnode;
2799 dnode = nnode;
2801 else if (r == 0)
2802 gcc_checking_assert (rtx_equal_p (dnode->loc, snode->loc));
2804 if (r >= 0)
2805 snode = snode->next;
2807 nodep = &dnode->next;
2808 dnode = *nodep;
2811 return 1;
2814 gcc_checking_assert (!src->onepart);
2816 /* Count the number of location parts, result is K. */
2817 for (i = 0, j = 0, k = 0;
2818 i < src->n_var_parts && j < dst->n_var_parts; k++)
2820 if (VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2822 i++;
2823 j++;
2825 else if (VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
2826 i++;
2827 else
2828 j++;
2830 k += src->n_var_parts - i;
2831 k += dst->n_var_parts - j;
2833 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
2834 thus there are at most MAX_VAR_PARTS different offsets. */
2835 gcc_checking_assert (dst->onepart ? k == 1 : k <= MAX_VAR_PARTS);
2837 if (dst->n_var_parts != k && shared_var_p (dst, set->vars))
2839 dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN);
2840 dst = *dstp;
2843 i = src->n_var_parts - 1;
2844 j = dst->n_var_parts - 1;
2845 dst->n_var_parts = k;
2847 for (k--; k >= 0; k--)
2849 location_chain node, node2;
2851 if (i >= 0 && j >= 0
2852 && VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2854 /* Compute the "sorted" union of the chains, i.e. the locations which
2855 are in both chains go first, they are sorted by the sum of
2856 positions in the chains. */
2857 int dst_l, src_l;
2858 int ii, jj, n;
2859 struct variable_union_info *vui;
2861 /* If DST is shared compare the location chains.
2862 If they are different we will modify the chain in DST with
2863 high probability so make a copy of DST. */
2864 if (shared_var_p (dst, set->vars))
2866 for (node = src->var_part[i].loc_chain,
2867 node2 = dst->var_part[j].loc_chain; node && node2;
2868 node = node->next, node2 = node2->next)
2870 if (!((REG_P (node2->loc)
2871 && REG_P (node->loc)
2872 && REGNO (node2->loc) == REGNO (node->loc))
2873 || rtx_equal_p (node2->loc, node->loc)))
2875 if (node2->init < node->init)
2876 node2->init = node->init;
2877 break;
2880 if (node || node2)
2882 dstp = unshare_variable (set, dstp, dst,
2883 VAR_INIT_STATUS_UNKNOWN);
2884 dst = (variable)*dstp;
2888 src_l = 0;
2889 for (node = src->var_part[i].loc_chain; node; node = node->next)
2890 src_l++;
2891 dst_l = 0;
2892 for (node = dst->var_part[j].loc_chain; node; node = node->next)
2893 dst_l++;
2895 if (dst_l == 1)
2897 /* The most common case, much simpler, no qsort is needed. */
2898 location_chain dstnode = dst->var_part[j].loc_chain;
2899 dst->var_part[k].loc_chain = dstnode;
2900 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
2901 node2 = dstnode;
2902 for (node = src->var_part[i].loc_chain; node; node = node->next)
2903 if (!((REG_P (dstnode->loc)
2904 && REG_P (node->loc)
2905 && REGNO (dstnode->loc) == REGNO (node->loc))
2906 || rtx_equal_p (dstnode->loc, node->loc)))
2908 location_chain new_node;
2910 /* Copy the location from SRC. */
2911 new_node = (location_chain) pool_alloc (loc_chain_pool);
2912 new_node->loc = node->loc;
2913 new_node->init = node->init;
2914 if (!node->set_src || MEM_P (node->set_src))
2915 new_node->set_src = NULL;
2916 else
2917 new_node->set_src = node->set_src;
2918 node2->next = new_node;
2919 node2 = new_node;
2921 node2->next = NULL;
2923 else
2925 if (src_l + dst_l > vui_allocated)
2927 vui_allocated = MAX (vui_allocated * 2, src_l + dst_l);
2928 vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec,
2929 vui_allocated);
2931 vui = vui_vec;
2933 /* Fill in the locations from DST. */
2934 for (node = dst->var_part[j].loc_chain, jj = 0; node;
2935 node = node->next, jj++)
2937 vui[jj].lc = node;
2938 vui[jj].pos_dst = jj;
2940 /* Pos plus value larger than a sum of 2 valid positions. */
2941 vui[jj].pos = jj + src_l + dst_l;
2944 /* Fill in the locations from SRC. */
2945 n = dst_l;
2946 for (node = src->var_part[i].loc_chain, ii = 0; node;
2947 node = node->next, ii++)
2949 /* Find location from NODE. */
2950 for (jj = 0; jj < dst_l; jj++)
2952 if ((REG_P (vui[jj].lc->loc)
2953 && REG_P (node->loc)
2954 && REGNO (vui[jj].lc->loc) == REGNO (node->loc))
2955 || rtx_equal_p (vui[jj].lc->loc, node->loc))
2957 vui[jj].pos = jj + ii;
2958 break;
2961 if (jj >= dst_l) /* The location has not been found. */
2963 location_chain new_node;
2965 /* Copy the location from SRC. */
2966 new_node = (location_chain) pool_alloc (loc_chain_pool);
2967 new_node->loc = node->loc;
2968 new_node->init = node->init;
2969 if (!node->set_src || MEM_P (node->set_src))
2970 new_node->set_src = NULL;
2971 else
2972 new_node->set_src = node->set_src;
2973 vui[n].lc = new_node;
2974 vui[n].pos_dst = src_l + dst_l;
2975 vui[n].pos = ii + src_l + dst_l;
2976 n++;
2980 if (dst_l == 2)
2982 /* Special case still very common case. For dst_l == 2
2983 all entries dst_l ... n-1 are sorted, with for i >= dst_l
2984 vui[i].pos == i + src_l + dst_l. */
2985 if (vui[0].pos > vui[1].pos)
2987 /* Order should be 1, 0, 2... */
2988 dst->var_part[k].loc_chain = vui[1].lc;
2989 vui[1].lc->next = vui[0].lc;
2990 if (n >= 3)
2992 vui[0].lc->next = vui[2].lc;
2993 vui[n - 1].lc->next = NULL;
2995 else
2996 vui[0].lc->next = NULL;
2997 ii = 3;
2999 else
3001 dst->var_part[k].loc_chain = vui[0].lc;
3002 if (n >= 3 && vui[2].pos < vui[1].pos)
3004 /* Order should be 0, 2, 1, 3... */
3005 vui[0].lc->next = vui[2].lc;
3006 vui[2].lc->next = vui[1].lc;
3007 if (n >= 4)
3009 vui[1].lc->next = vui[3].lc;
3010 vui[n - 1].lc->next = NULL;
3012 else
3013 vui[1].lc->next = NULL;
3014 ii = 4;
3016 else
3018 /* Order should be 0, 1, 2... */
3019 ii = 1;
3020 vui[n - 1].lc->next = NULL;
3023 for (; ii < n; ii++)
3024 vui[ii - 1].lc->next = vui[ii].lc;
3026 else
3028 qsort (vui, n, sizeof (struct variable_union_info),
3029 variable_union_info_cmp_pos);
3031 /* Reconnect the nodes in sorted order. */
3032 for (ii = 1; ii < n; ii++)
3033 vui[ii - 1].lc->next = vui[ii].lc;
3034 vui[n - 1].lc->next = NULL;
3035 dst->var_part[k].loc_chain = vui[0].lc;
3038 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
3040 i--;
3041 j--;
3043 else if ((i >= 0 && j >= 0
3044 && VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
3045 || i < 0)
3047 dst->var_part[k] = dst->var_part[j];
3048 j--;
3050 else if ((i >= 0 && j >= 0
3051 && VAR_PART_OFFSET (src, i) > VAR_PART_OFFSET (dst, j))
3052 || j < 0)
3054 location_chain *nextp;
3056 /* Copy the chain from SRC. */
3057 nextp = &dst->var_part[k].loc_chain;
3058 for (node = src->var_part[i].loc_chain; node; node = node->next)
3060 location_chain new_lc;
3062 new_lc = (location_chain) pool_alloc (loc_chain_pool);
3063 new_lc->next = NULL;
3064 new_lc->init = node->init;
3065 if (!node->set_src || MEM_P (node->set_src))
3066 new_lc->set_src = NULL;
3067 else
3068 new_lc->set_src = node->set_src;
3069 new_lc->loc = node->loc;
3071 *nextp = new_lc;
3072 nextp = &new_lc->next;
3075 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (src, i);
3076 i--;
3078 dst->var_part[k].cur_loc = NULL;
3081 if (flag_var_tracking_uninit)
3082 for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++)
3084 location_chain node, node2;
3085 for (node = src->var_part[i].loc_chain; node; node = node->next)
3086 for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next)
3087 if (rtx_equal_p (node->loc, node2->loc))
3089 if (node->init > node2->init)
3090 node2->init = node->init;
3094 /* Continue traversing the hash table. */
3095 return 1;
3098 /* Compute union of dataflow sets SRC and DST and store it to DST. */
3100 static void
3101 dataflow_set_union (dataflow_set *dst, dataflow_set *src)
3103 int i;
3105 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3106 attrs_list_union (&dst->regs[i], src->regs[i]);
3108 if (dst->vars == empty_shared_hash)
3110 shared_hash_destroy (dst->vars);
3111 dst->vars = shared_hash_copy (src->vars);
3113 else
3115 variable_iterator_type hi;
3116 variable var;
3118 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (src->vars),
3119 var, variable, hi)
3120 variable_union (var, dst);
3124 /* Whether the value is currently being expanded. */
3125 #define VALUE_RECURSED_INTO(x) \
3126 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
3128 /* Whether no expansion was found, saving useless lookups.
3129 It must only be set when VALUE_CHANGED is clear. */
3130 #define NO_LOC_P(x) \
3131 (RTL_FLAG_CHECK2 ("NO_LOC_P", (x), VALUE, DEBUG_EXPR)->return_val)
3133 /* Whether cur_loc in the value needs to be (re)computed. */
3134 #define VALUE_CHANGED(x) \
3135 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
3136 /* Whether cur_loc in the decl needs to be (re)computed. */
3137 #define DECL_CHANGED(x) TREE_VISITED (x)
3139 /* Record (if NEWV) that DV needs to have its cur_loc recomputed. For
3140 user DECLs, this means they're in changed_variables. Values and
3141 debug exprs may be left with this flag set if no user variable
3142 requires them to be evaluated. */
3144 static inline void
3145 set_dv_changed (decl_or_value dv, bool newv)
3147 switch (dv_onepart_p (dv))
3149 case ONEPART_VALUE:
3150 if (newv)
3151 NO_LOC_P (dv_as_value (dv)) = false;
3152 VALUE_CHANGED (dv_as_value (dv)) = newv;
3153 break;
3155 case ONEPART_DEXPR:
3156 if (newv)
3157 NO_LOC_P (DECL_RTL_KNOWN_SET (dv_as_decl (dv))) = false;
3158 /* Fall through... */
3160 default:
3161 DECL_CHANGED (dv_as_decl (dv)) = newv;
3162 break;
3166 /* Return true if DV needs to have its cur_loc recomputed. */
3168 static inline bool
3169 dv_changed_p (decl_or_value dv)
3171 return (dv_is_value_p (dv)
3172 ? VALUE_CHANGED (dv_as_value (dv))
3173 : DECL_CHANGED (dv_as_decl (dv)));
3176 /* Return a location list node whose loc is rtx_equal to LOC, in the
3177 location list of a one-part variable or value VAR, or in that of
3178 any values recursively mentioned in the location lists. VARS must
3179 be in star-canonical form. */
3181 static location_chain
3182 find_loc_in_1pdv (rtx loc, variable var, variable_table_type *vars)
3184 location_chain node;
3185 enum rtx_code loc_code;
3187 if (!var)
3188 return NULL;
3190 gcc_checking_assert (var->onepart);
3192 if (!var->n_var_parts)
3193 return NULL;
3195 gcc_checking_assert (loc != dv_as_opaque (var->dv));
3197 loc_code = GET_CODE (loc);
3198 for (node = var->var_part[0].loc_chain; node; node = node->next)
3200 decl_or_value dv;
3201 variable rvar;
3203 if (GET_CODE (node->loc) != loc_code)
3205 if (GET_CODE (node->loc) != VALUE)
3206 continue;
3208 else if (loc == node->loc)
3209 return node;
3210 else if (loc_code != VALUE)
3212 if (rtx_equal_p (loc, node->loc))
3213 return node;
3214 continue;
3217 /* Since we're in star-canonical form, we don't need to visit
3218 non-canonical nodes: one-part variables and non-canonical
3219 values would only point back to the canonical node. */
3220 if (dv_is_value_p (var->dv)
3221 && !canon_value_cmp (node->loc, dv_as_value (var->dv)))
3223 /* Skip all subsequent VALUEs. */
3224 while (node->next && GET_CODE (node->next->loc) == VALUE)
3226 node = node->next;
3227 gcc_checking_assert (!canon_value_cmp (node->loc,
3228 dv_as_value (var->dv)));
3229 if (loc == node->loc)
3230 return node;
3232 continue;
3235 gcc_checking_assert (node == var->var_part[0].loc_chain);
3236 gcc_checking_assert (!node->next);
3238 dv = dv_from_value (node->loc);
3239 rvar = vars->find_with_hash (dv, dv_htab_hash (dv));
3240 return find_loc_in_1pdv (loc, rvar, vars);
3243 /* ??? Gotta look in cselib_val locations too. */
3245 return NULL;
3248 /* Hash table iteration argument passed to variable_merge. */
3249 struct dfset_merge
3251 /* The set in which the merge is to be inserted. */
3252 dataflow_set *dst;
3253 /* The set that we're iterating in. */
3254 dataflow_set *cur;
3255 /* The set that may contain the other dv we are to merge with. */
3256 dataflow_set *src;
3257 /* Number of onepart dvs in src. */
3258 int src_onepart_cnt;
3261 /* Insert LOC in *DNODE, if it's not there yet. The list must be in
3262 loc_cmp order, and it is maintained as such. */
3264 static void
3265 insert_into_intersection (location_chain *nodep, rtx loc,
3266 enum var_init_status status)
3268 location_chain node;
3269 int r;
3271 for (node = *nodep; node; nodep = &node->next, node = *nodep)
3272 if ((r = loc_cmp (node->loc, loc)) == 0)
3274 node->init = MIN (node->init, status);
3275 return;
3277 else if (r > 0)
3278 break;
3280 node = (location_chain) pool_alloc (loc_chain_pool);
3282 node->loc = loc;
3283 node->set_src = NULL;
3284 node->init = status;
3285 node->next = *nodep;
3286 *nodep = node;
3289 /* Insert in DEST the intersection of the locations present in both
3290 S1NODE and S2VAR, directly or indirectly. S1NODE is from a
3291 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
3292 DSM->dst. */
3294 static void
3295 intersect_loc_chains (rtx val, location_chain *dest, struct dfset_merge *dsm,
3296 location_chain s1node, variable s2var)
3298 dataflow_set *s1set = dsm->cur;
3299 dataflow_set *s2set = dsm->src;
3300 location_chain found;
3302 if (s2var)
3304 location_chain s2node;
3306 gcc_checking_assert (s2var->onepart);
3308 if (s2var->n_var_parts)
3310 s2node = s2var->var_part[0].loc_chain;
3312 for (; s1node && s2node;
3313 s1node = s1node->next, s2node = s2node->next)
3314 if (s1node->loc != s2node->loc)
3315 break;
3316 else if (s1node->loc == val)
3317 continue;
3318 else
3319 insert_into_intersection (dest, s1node->loc,
3320 MIN (s1node->init, s2node->init));
3324 for (; s1node; s1node = s1node->next)
3326 if (s1node->loc == val)
3327 continue;
3329 if ((found = find_loc_in_1pdv (s1node->loc, s2var,
3330 shared_hash_htab (s2set->vars))))
3332 insert_into_intersection (dest, s1node->loc,
3333 MIN (s1node->init, found->init));
3334 continue;
3337 if (GET_CODE (s1node->loc) == VALUE
3338 && !VALUE_RECURSED_INTO (s1node->loc))
3340 decl_or_value dv = dv_from_value (s1node->loc);
3341 variable svar = shared_hash_find (s1set->vars, dv);
3342 if (svar)
3344 if (svar->n_var_parts == 1)
3346 VALUE_RECURSED_INTO (s1node->loc) = true;
3347 intersect_loc_chains (val, dest, dsm,
3348 svar->var_part[0].loc_chain,
3349 s2var);
3350 VALUE_RECURSED_INTO (s1node->loc) = false;
3355 /* ??? gotta look in cselib_val locations too. */
3357 /* ??? if the location is equivalent to any location in src,
3358 searched recursively
3360 add to dst the values needed to represent the equivalence
3362 telling whether locations S is equivalent to another dv's
3363 location list:
3365 for each location D in the list
3367 if S and D satisfy rtx_equal_p, then it is present
3369 else if D is a value, recurse without cycles
3371 else if S and D have the same CODE and MODE
3373 for each operand oS and the corresponding oD
3375 if oS and oD are not equivalent, then S an D are not equivalent
3377 else if they are RTX vectors
3379 if any vector oS element is not equivalent to its respective oD,
3380 then S and D are not equivalent
3388 /* Return -1 if X should be before Y in a location list for a 1-part
3389 variable, 1 if Y should be before X, and 0 if they're equivalent
3390 and should not appear in the list. */
3392 static int
3393 loc_cmp (rtx x, rtx y)
3395 int i, j, r;
3396 RTX_CODE code = GET_CODE (x);
3397 const char *fmt;
3399 if (x == y)
3400 return 0;
3402 if (REG_P (x))
3404 if (!REG_P (y))
3405 return -1;
3406 gcc_assert (GET_MODE (x) == GET_MODE (y));
3407 if (REGNO (x) == REGNO (y))
3408 return 0;
3409 else if (REGNO (x) < REGNO (y))
3410 return -1;
3411 else
3412 return 1;
3415 if (REG_P (y))
3416 return 1;
3418 if (MEM_P (x))
3420 if (!MEM_P (y))
3421 return -1;
3422 gcc_assert (GET_MODE (x) == GET_MODE (y));
3423 return loc_cmp (XEXP (x, 0), XEXP (y, 0));
3426 if (MEM_P (y))
3427 return 1;
3429 if (GET_CODE (x) == VALUE)
3431 if (GET_CODE (y) != VALUE)
3432 return -1;
3433 /* Don't assert the modes are the same, that is true only
3434 when not recursing. (subreg:QI (value:SI 1:1) 0)
3435 and (subreg:QI (value:DI 2:2) 0) can be compared,
3436 even when the modes are different. */
3437 if (canon_value_cmp (x, y))
3438 return -1;
3439 else
3440 return 1;
3443 if (GET_CODE (y) == VALUE)
3444 return 1;
3446 /* Entry value is the least preferable kind of expression. */
3447 if (GET_CODE (x) == ENTRY_VALUE)
3449 if (GET_CODE (y) != ENTRY_VALUE)
3450 return 1;
3451 gcc_assert (GET_MODE (x) == GET_MODE (y));
3452 return loc_cmp (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
3455 if (GET_CODE (y) == ENTRY_VALUE)
3456 return -1;
3458 if (GET_CODE (x) == GET_CODE (y))
3459 /* Compare operands below. */;
3460 else if (GET_CODE (x) < GET_CODE (y))
3461 return -1;
3462 else
3463 return 1;
3465 gcc_assert (GET_MODE (x) == GET_MODE (y));
3467 if (GET_CODE (x) == DEBUG_EXPR)
3469 if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3470 < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)))
3471 return -1;
3472 gcc_checking_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3473 > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)));
3474 return 1;
3477 fmt = GET_RTX_FORMAT (code);
3478 for (i = 0; i < GET_RTX_LENGTH (code); i++)
3479 switch (fmt[i])
3481 case 'w':
3482 if (XWINT (x, i) == XWINT (y, i))
3483 break;
3484 else if (XWINT (x, i) < XWINT (y, i))
3485 return -1;
3486 else
3487 return 1;
3489 case 'n':
3490 case 'i':
3491 if (XINT (x, i) == XINT (y, i))
3492 break;
3493 else if (XINT (x, i) < XINT (y, i))
3494 return -1;
3495 else
3496 return 1;
3498 case 'V':
3499 case 'E':
3500 /* Compare the vector length first. */
3501 if (XVECLEN (x, i) == XVECLEN (y, i))
3502 /* Compare the vectors elements. */;
3503 else if (XVECLEN (x, i) < XVECLEN (y, i))
3504 return -1;
3505 else
3506 return 1;
3508 for (j = 0; j < XVECLEN (x, i); j++)
3509 if ((r = loc_cmp (XVECEXP (x, i, j),
3510 XVECEXP (y, i, j))))
3511 return r;
3512 break;
3514 case 'e':
3515 if ((r = loc_cmp (XEXP (x, i), XEXP (y, i))))
3516 return r;
3517 break;
3519 case 'S':
3520 case 's':
3521 if (XSTR (x, i) == XSTR (y, i))
3522 break;
3523 if (!XSTR (x, i))
3524 return -1;
3525 if (!XSTR (y, i))
3526 return 1;
3527 if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0)
3528 break;
3529 else if (r < 0)
3530 return -1;
3531 else
3532 return 1;
3534 case 'u':
3535 /* These are just backpointers, so they don't matter. */
3536 break;
3538 case '0':
3539 case 't':
3540 break;
3542 /* It is believed that rtx's at this level will never
3543 contain anything but integers and other rtx's,
3544 except for within LABEL_REFs and SYMBOL_REFs. */
3545 default:
3546 gcc_unreachable ();
3548 if (CONST_WIDE_INT_P (x))
3550 /* Compare the vector length first. */
3551 if (CONST_WIDE_INT_NUNITS (x) >= CONST_WIDE_INT_NUNITS (y))
3552 return 1;
3553 else if (CONST_WIDE_INT_NUNITS (x) < CONST_WIDE_INT_NUNITS (y))
3554 return -1;
3556 /* Compare the vectors elements. */;
3557 for (j = CONST_WIDE_INT_NUNITS (x) - 1; j >= 0 ; j--)
3559 if (CONST_WIDE_INT_ELT (x, j) < CONST_WIDE_INT_ELT (y, j))
3560 return -1;
3561 if (CONST_WIDE_INT_ELT (x, j) > CONST_WIDE_INT_ELT (y, j))
3562 return 1;
3566 return 0;
3569 #if ENABLE_CHECKING
3570 /* Check the order of entries in one-part variables. */
3573 canonicalize_loc_order_check (variable_def **slot,
3574 dataflow_set *data ATTRIBUTE_UNUSED)
3576 variable var = *slot;
3577 location_chain node, next;
3579 #ifdef ENABLE_RTL_CHECKING
3580 int i;
3581 for (i = 0; i < var->n_var_parts; i++)
3582 gcc_assert (var->var_part[0].cur_loc == NULL);
3583 gcc_assert (!var->in_changed_variables);
3584 #endif
3586 if (!var->onepart)
3587 return 1;
3589 gcc_assert (var->n_var_parts == 1);
3590 node = var->var_part[0].loc_chain;
3591 gcc_assert (node);
3593 while ((next = node->next))
3595 gcc_assert (loc_cmp (node->loc, next->loc) < 0);
3596 node = next;
3599 return 1;
3601 #endif
3603 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
3604 more likely to be chosen as canonical for an equivalence set.
3605 Ensure less likely values can reach more likely neighbors, making
3606 the connections bidirectional. */
3609 canonicalize_values_mark (variable_def **slot, dataflow_set *set)
3611 variable var = *slot;
3612 decl_or_value dv = var->dv;
3613 rtx val;
3614 location_chain node;
3616 if (!dv_is_value_p (dv))
3617 return 1;
3619 gcc_checking_assert (var->n_var_parts == 1);
3621 val = dv_as_value (dv);
3623 for (node = var->var_part[0].loc_chain; node; node = node->next)
3624 if (GET_CODE (node->loc) == VALUE)
3626 if (canon_value_cmp (node->loc, val))
3627 VALUE_RECURSED_INTO (val) = true;
3628 else
3630 decl_or_value odv = dv_from_value (node->loc);
3631 variable_def **oslot;
3632 oslot = shared_hash_find_slot_noinsert (set->vars, odv);
3634 set_slot_part (set, val, oslot, odv, 0,
3635 node->init, NULL_RTX);
3637 VALUE_RECURSED_INTO (node->loc) = true;
3641 return 1;
3644 /* Remove redundant entries from equivalence lists in onepart
3645 variables, canonicalizing equivalence sets into star shapes. */
3648 canonicalize_values_star (variable_def **slot, dataflow_set *set)
3650 variable var = *slot;
3651 decl_or_value dv = var->dv;
3652 location_chain node;
3653 decl_or_value cdv;
3654 rtx val, cval;
3655 variable_def **cslot;
3656 bool has_value;
3657 bool has_marks;
3659 if (!var->onepart)
3660 return 1;
3662 gcc_checking_assert (var->n_var_parts == 1);
3664 if (dv_is_value_p (dv))
3666 cval = dv_as_value (dv);
3667 if (!VALUE_RECURSED_INTO (cval))
3668 return 1;
3669 VALUE_RECURSED_INTO (cval) = false;
3671 else
3672 cval = NULL_RTX;
3674 restart:
3675 val = cval;
3676 has_value = false;
3677 has_marks = false;
3679 gcc_assert (var->n_var_parts == 1);
3681 for (node = var->var_part[0].loc_chain; node; node = node->next)
3682 if (GET_CODE (node->loc) == VALUE)
3684 has_value = true;
3685 if (VALUE_RECURSED_INTO (node->loc))
3686 has_marks = true;
3687 if (canon_value_cmp (node->loc, cval))
3688 cval = node->loc;
3691 if (!has_value)
3692 return 1;
3694 if (cval == val)
3696 if (!has_marks || dv_is_decl_p (dv))
3697 return 1;
3699 /* Keep it marked so that we revisit it, either after visiting a
3700 child node, or after visiting a new parent that might be
3701 found out. */
3702 VALUE_RECURSED_INTO (val) = true;
3704 for (node = var->var_part[0].loc_chain; node; node = node->next)
3705 if (GET_CODE (node->loc) == VALUE
3706 && VALUE_RECURSED_INTO (node->loc))
3708 cval = node->loc;
3709 restart_with_cval:
3710 VALUE_RECURSED_INTO (cval) = false;
3711 dv = dv_from_value (cval);
3712 slot = shared_hash_find_slot_noinsert (set->vars, dv);
3713 if (!slot)
3715 gcc_assert (dv_is_decl_p (var->dv));
3716 /* The canonical value was reset and dropped.
3717 Remove it. */
3718 clobber_variable_part (set, NULL, var->dv, 0, NULL);
3719 return 1;
3721 var = *slot;
3722 gcc_assert (dv_is_value_p (var->dv));
3723 if (var->n_var_parts == 0)
3724 return 1;
3725 gcc_assert (var->n_var_parts == 1);
3726 goto restart;
3729 VALUE_RECURSED_INTO (val) = false;
3731 return 1;
3734 /* Push values to the canonical one. */
3735 cdv = dv_from_value (cval);
3736 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3738 for (node = var->var_part[0].loc_chain; node; node = node->next)
3739 if (node->loc != cval)
3741 cslot = set_slot_part (set, node->loc, cslot, cdv, 0,
3742 node->init, NULL_RTX);
3743 if (GET_CODE (node->loc) == VALUE)
3745 decl_or_value ndv = dv_from_value (node->loc);
3747 set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX,
3748 NO_INSERT);
3750 if (canon_value_cmp (node->loc, val))
3752 /* If it could have been a local minimum, it's not any more,
3753 since it's now neighbor to cval, so it may have to push
3754 to it. Conversely, if it wouldn't have prevailed over
3755 val, then whatever mark it has is fine: if it was to
3756 push, it will now push to a more canonical node, but if
3757 it wasn't, then it has already pushed any values it might
3758 have to. */
3759 VALUE_RECURSED_INTO (node->loc) = true;
3760 /* Make sure we visit node->loc by ensuring we cval is
3761 visited too. */
3762 VALUE_RECURSED_INTO (cval) = true;
3764 else if (!VALUE_RECURSED_INTO (node->loc))
3765 /* If we have no need to "recurse" into this node, it's
3766 already "canonicalized", so drop the link to the old
3767 parent. */
3768 clobber_variable_part (set, cval, ndv, 0, NULL);
3770 else if (GET_CODE (node->loc) == REG)
3772 attrs list = set->regs[REGNO (node->loc)], *listp;
3774 /* Change an existing attribute referring to dv so that it
3775 refers to cdv, removing any duplicate this might
3776 introduce, and checking that no previous duplicates
3777 existed, all in a single pass. */
3779 while (list)
3781 if (list->offset == 0
3782 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3783 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3784 break;
3786 list = list->next;
3789 gcc_assert (list);
3790 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3792 list->dv = cdv;
3793 for (listp = &list->next; (list = *listp); listp = &list->next)
3795 if (list->offset)
3796 continue;
3798 if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3800 *listp = list->next;
3801 pool_free (attrs_pool, list);
3802 list = *listp;
3803 break;
3806 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv));
3809 else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3811 for (listp = &list->next; (list = *listp); listp = &list->next)
3813 if (list->offset)
3814 continue;
3816 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3818 *listp = list->next;
3819 pool_free (attrs_pool, list);
3820 list = *listp;
3821 break;
3824 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv));
3827 else
3828 gcc_unreachable ();
3830 #if ENABLE_CHECKING
3831 while (list)
3833 if (list->offset == 0
3834 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3835 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3836 gcc_unreachable ();
3838 list = list->next;
3840 #endif
3844 if (val)
3845 set_slot_part (set, val, cslot, cdv, 0,
3846 VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
3848 slot = clobber_slot_part (set, cval, slot, 0, NULL);
3850 /* Variable may have been unshared. */
3851 var = *slot;
3852 gcc_checking_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval
3853 && var->var_part[0].loc_chain->next == NULL);
3855 if (VALUE_RECURSED_INTO (cval))
3856 goto restart_with_cval;
3858 return 1;
3861 /* Bind one-part variables to the canonical value in an equivalence
3862 set. Not doing this causes dataflow convergence failure in rare
3863 circumstances, see PR42873. Unfortunately we can't do this
3864 efficiently as part of canonicalize_values_star, since we may not
3865 have determined or even seen the canonical value of a set when we
3866 get to a variable that references another member of the set. */
3869 canonicalize_vars_star (variable_def **slot, dataflow_set *set)
3871 variable var = *slot;
3872 decl_or_value dv = var->dv;
3873 location_chain node;
3874 rtx cval;
3875 decl_or_value cdv;
3876 variable_def **cslot;
3877 variable cvar;
3878 location_chain cnode;
3880 if (!var->onepart || var->onepart == ONEPART_VALUE)
3881 return 1;
3883 gcc_assert (var->n_var_parts == 1);
3885 node = var->var_part[0].loc_chain;
3887 if (GET_CODE (node->loc) != VALUE)
3888 return 1;
3890 gcc_assert (!node->next);
3891 cval = node->loc;
3893 /* Push values to the canonical one. */
3894 cdv = dv_from_value (cval);
3895 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3896 if (!cslot)
3897 return 1;
3898 cvar = *cslot;
3899 gcc_assert (cvar->n_var_parts == 1);
3901 cnode = cvar->var_part[0].loc_chain;
3903 /* CVAL is canonical if its value list contains non-VALUEs or VALUEs
3904 that are not “more canonical” than it. */
3905 if (GET_CODE (cnode->loc) != VALUE
3906 || !canon_value_cmp (cnode->loc, cval))
3907 return 1;
3909 /* CVAL was found to be non-canonical. Change the variable to point
3910 to the canonical VALUE. */
3911 gcc_assert (!cnode->next);
3912 cval = cnode->loc;
3914 slot = set_slot_part (set, cval, slot, dv, 0,
3915 node->init, node->set_src);
3916 clobber_slot_part (set, cval, slot, 0, node->set_src);
3918 return 1;
3921 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
3922 corresponding entry in DSM->src. Multi-part variables are combined
3923 with variable_union, whereas onepart dvs are combined with
3924 intersection. */
3926 static int
3927 variable_merge_over_cur (variable s1var, struct dfset_merge *dsm)
3929 dataflow_set *dst = dsm->dst;
3930 variable_def **dstslot;
3931 variable s2var, dvar = NULL;
3932 decl_or_value dv = s1var->dv;
3933 onepart_enum_t onepart = s1var->onepart;
3934 rtx val;
3935 hashval_t dvhash;
3936 location_chain node, *nodep;
3938 /* If the incoming onepart variable has an empty location list, then
3939 the intersection will be just as empty. For other variables,
3940 it's always union. */
3941 gcc_checking_assert (s1var->n_var_parts
3942 && s1var->var_part[0].loc_chain);
3944 if (!onepart)
3945 return variable_union (s1var, dst);
3947 gcc_checking_assert (s1var->n_var_parts == 1);
3949 dvhash = dv_htab_hash (dv);
3950 if (dv_is_value_p (dv))
3951 val = dv_as_value (dv);
3952 else
3953 val = NULL;
3955 s2var = shared_hash_find_1 (dsm->src->vars, dv, dvhash);
3956 if (!s2var)
3958 dst_can_be_shared = false;
3959 return 1;
3962 dsm->src_onepart_cnt--;
3963 gcc_assert (s2var->var_part[0].loc_chain
3964 && s2var->onepart == onepart
3965 && s2var->n_var_parts == 1);
3967 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3968 if (dstslot)
3970 dvar = *dstslot;
3971 gcc_assert (dvar->refcount == 1
3972 && dvar->onepart == onepart
3973 && dvar->n_var_parts == 1);
3974 nodep = &dvar->var_part[0].loc_chain;
3976 else
3978 nodep = &node;
3979 node = NULL;
3982 if (!dstslot && !onepart_variable_different_p (s1var, s2var))
3984 dstslot = shared_hash_find_slot_unshare_1 (&dst->vars, dv,
3985 dvhash, INSERT);
3986 *dstslot = dvar = s2var;
3987 dvar->refcount++;
3989 else
3991 dst_can_be_shared = false;
3993 intersect_loc_chains (val, nodep, dsm,
3994 s1var->var_part[0].loc_chain, s2var);
3996 if (!dstslot)
3998 if (node)
4000 dvar = (variable) pool_alloc (onepart_pool (onepart));
4001 dvar->dv = dv;
4002 dvar->refcount = 1;
4003 dvar->n_var_parts = 1;
4004 dvar->onepart = onepart;
4005 dvar->in_changed_variables = false;
4006 dvar->var_part[0].loc_chain = node;
4007 dvar->var_part[0].cur_loc = NULL;
4008 if (onepart)
4009 VAR_LOC_1PAUX (dvar) = NULL;
4010 else
4011 VAR_PART_OFFSET (dvar, 0) = 0;
4013 dstslot
4014 = shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash,
4015 INSERT);
4016 gcc_assert (!*dstslot);
4017 *dstslot = dvar;
4019 else
4020 return 1;
4024 nodep = &dvar->var_part[0].loc_chain;
4025 while ((node = *nodep))
4027 location_chain *nextp = &node->next;
4029 if (GET_CODE (node->loc) == REG)
4031 attrs list;
4033 for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
4034 if (GET_MODE (node->loc) == GET_MODE (list->loc)
4035 && dv_is_value_p (list->dv))
4036 break;
4038 if (!list)
4039 attrs_list_insert (&dst->regs[REGNO (node->loc)],
4040 dv, 0, node->loc);
4041 /* If this value became canonical for another value that had
4042 this register, we want to leave it alone. */
4043 else if (dv_as_value (list->dv) != val)
4045 dstslot = set_slot_part (dst, dv_as_value (list->dv),
4046 dstslot, dv, 0,
4047 node->init, NULL_RTX);
4048 dstslot = delete_slot_part (dst, node->loc, dstslot, 0);
4050 /* Since nextp points into the removed node, we can't
4051 use it. The pointer to the next node moved to nodep.
4052 However, if the variable we're walking is unshared
4053 during our walk, we'll keep walking the location list
4054 of the previously-shared variable, in which case the
4055 node won't have been removed, and we'll want to skip
4056 it. That's why we test *nodep here. */
4057 if (*nodep != node)
4058 nextp = nodep;
4061 else
4062 /* Canonicalization puts registers first, so we don't have to
4063 walk it all. */
4064 break;
4065 nodep = nextp;
4068 if (dvar != *dstslot)
4069 dvar = *dstslot;
4070 nodep = &dvar->var_part[0].loc_chain;
4072 if (val)
4074 /* Mark all referenced nodes for canonicalization, and make sure
4075 we have mutual equivalence links. */
4076 VALUE_RECURSED_INTO (val) = true;
4077 for (node = *nodep; node; node = node->next)
4078 if (GET_CODE (node->loc) == VALUE)
4080 VALUE_RECURSED_INTO (node->loc) = true;
4081 set_variable_part (dst, val, dv_from_value (node->loc), 0,
4082 node->init, NULL, INSERT);
4085 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4086 gcc_assert (*dstslot == dvar);
4087 canonicalize_values_star (dstslot, dst);
4088 gcc_checking_assert (dstslot
4089 == shared_hash_find_slot_noinsert_1 (dst->vars,
4090 dv, dvhash));
4091 dvar = *dstslot;
4093 else
4095 bool has_value = false, has_other = false;
4097 /* If we have one value and anything else, we're going to
4098 canonicalize this, so make sure all values have an entry in
4099 the table and are marked for canonicalization. */
4100 for (node = *nodep; node; node = node->next)
4102 if (GET_CODE (node->loc) == VALUE)
4104 /* If this was marked during register canonicalization,
4105 we know we have to canonicalize values. */
4106 if (has_value)
4107 has_other = true;
4108 has_value = true;
4109 if (has_other)
4110 break;
4112 else
4114 has_other = true;
4115 if (has_value)
4116 break;
4120 if (has_value && has_other)
4122 for (node = *nodep; node; node = node->next)
4124 if (GET_CODE (node->loc) == VALUE)
4126 decl_or_value dv = dv_from_value (node->loc);
4127 variable_def **slot = NULL;
4129 if (shared_hash_shared (dst->vars))
4130 slot = shared_hash_find_slot_noinsert (dst->vars, dv);
4131 if (!slot)
4132 slot = shared_hash_find_slot_unshare (&dst->vars, dv,
4133 INSERT);
4134 if (!*slot)
4136 variable var = (variable) pool_alloc (onepart_pool
4137 (ONEPART_VALUE));
4138 var->dv = dv;
4139 var->refcount = 1;
4140 var->n_var_parts = 1;
4141 var->onepart = ONEPART_VALUE;
4142 var->in_changed_variables = false;
4143 var->var_part[0].loc_chain = NULL;
4144 var->var_part[0].cur_loc = NULL;
4145 VAR_LOC_1PAUX (var) = NULL;
4146 *slot = var;
4149 VALUE_RECURSED_INTO (node->loc) = true;
4153 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4154 gcc_assert (*dstslot == dvar);
4155 canonicalize_values_star (dstslot, dst);
4156 gcc_checking_assert (dstslot
4157 == shared_hash_find_slot_noinsert_1 (dst->vars,
4158 dv, dvhash));
4159 dvar = *dstslot;
4163 if (!onepart_variable_different_p (dvar, s2var))
4165 variable_htab_free (dvar);
4166 *dstslot = dvar = s2var;
4167 dvar->refcount++;
4169 else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var))
4171 variable_htab_free (dvar);
4172 *dstslot = dvar = s1var;
4173 dvar->refcount++;
4174 dst_can_be_shared = false;
4176 else
4177 dst_can_be_shared = false;
4179 return 1;
4182 /* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
4183 multi-part variable. Unions of multi-part variables and
4184 intersections of one-part ones will be handled in
4185 variable_merge_over_cur(). */
4187 static int
4188 variable_merge_over_src (variable s2var, struct dfset_merge *dsm)
4190 dataflow_set *dst = dsm->dst;
4191 decl_or_value dv = s2var->dv;
4193 if (!s2var->onepart)
4195 variable_def **dstp = shared_hash_find_slot (dst->vars, dv);
4196 *dstp = s2var;
4197 s2var->refcount++;
4198 return 1;
4201 dsm->src_onepart_cnt++;
4202 return 1;
4205 /* Combine dataflow set information from SRC2 into DST, using PDST
4206 to carry over information across passes. */
4208 static void
4209 dataflow_set_merge (dataflow_set *dst, dataflow_set *src2)
4211 dataflow_set cur = *dst;
4212 dataflow_set *src1 = &cur;
4213 struct dfset_merge dsm;
4214 int i;
4215 size_t src1_elems, src2_elems;
4216 variable_iterator_type hi;
4217 variable var;
4219 src1_elems = shared_hash_htab (src1->vars)->elements ();
4220 src2_elems = shared_hash_htab (src2->vars)->elements ();
4221 dataflow_set_init (dst);
4222 dst->stack_adjust = cur.stack_adjust;
4223 shared_hash_destroy (dst->vars);
4224 dst->vars = (shared_hash) pool_alloc (shared_hash_pool);
4225 dst->vars->refcount = 1;
4226 dst->vars->htab = new variable_table_type (MAX (src1_elems, src2_elems));
4228 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4229 attrs_list_mpdv_union (&dst->regs[i], src1->regs[i], src2->regs[i]);
4231 dsm.dst = dst;
4232 dsm.src = src2;
4233 dsm.cur = src1;
4234 dsm.src_onepart_cnt = 0;
4236 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm.src->vars),
4237 var, variable, hi)
4238 variable_merge_over_src (var, &dsm);
4239 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm.cur->vars),
4240 var, variable, hi)
4241 variable_merge_over_cur (var, &dsm);
4243 if (dsm.src_onepart_cnt)
4244 dst_can_be_shared = false;
4246 dataflow_set_destroy (src1);
4249 /* Mark register equivalences. */
4251 static void
4252 dataflow_set_equiv_regs (dataflow_set *set)
4254 int i;
4255 attrs list, *listp;
4257 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4259 rtx canon[NUM_MACHINE_MODES];
4261 /* If the list is empty or one entry, no need to canonicalize
4262 anything. */
4263 if (set->regs[i] == NULL || set->regs[i]->next == NULL)
4264 continue;
4266 memset (canon, 0, sizeof (canon));
4268 for (list = set->regs[i]; list; list = list->next)
4269 if (list->offset == 0 && dv_is_value_p (list->dv))
4271 rtx val = dv_as_value (list->dv);
4272 rtx *cvalp = &canon[(int)GET_MODE (val)];
4273 rtx cval = *cvalp;
4275 if (canon_value_cmp (val, cval))
4276 *cvalp = val;
4279 for (list = set->regs[i]; list; list = list->next)
4280 if (list->offset == 0 && dv_onepart_p (list->dv))
4282 rtx cval = canon[(int)GET_MODE (list->loc)];
4284 if (!cval)
4285 continue;
4287 if (dv_is_value_p (list->dv))
4289 rtx val = dv_as_value (list->dv);
4291 if (val == cval)
4292 continue;
4294 VALUE_RECURSED_INTO (val) = true;
4295 set_variable_part (set, val, dv_from_value (cval), 0,
4296 VAR_INIT_STATUS_INITIALIZED,
4297 NULL, NO_INSERT);
4300 VALUE_RECURSED_INTO (cval) = true;
4301 set_variable_part (set, cval, list->dv, 0,
4302 VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT);
4305 for (listp = &set->regs[i]; (list = *listp);
4306 listp = list ? &list->next : listp)
4307 if (list->offset == 0 && dv_onepart_p (list->dv))
4309 rtx cval = canon[(int)GET_MODE (list->loc)];
4310 variable_def **slot;
4312 if (!cval)
4313 continue;
4315 if (dv_is_value_p (list->dv))
4317 rtx val = dv_as_value (list->dv);
4318 if (!VALUE_RECURSED_INTO (val))
4319 continue;
4322 slot = shared_hash_find_slot_noinsert (set->vars, list->dv);
4323 canonicalize_values_star (slot, set);
4324 if (*listp != list)
4325 list = NULL;
4330 /* Remove any redundant values in the location list of VAR, which must
4331 be unshared and 1-part. */
4333 static void
4334 remove_duplicate_values (variable var)
4336 location_chain node, *nodep;
4338 gcc_assert (var->onepart);
4339 gcc_assert (var->n_var_parts == 1);
4340 gcc_assert (var->refcount == 1);
4342 for (nodep = &var->var_part[0].loc_chain; (node = *nodep); )
4344 if (GET_CODE (node->loc) == VALUE)
4346 if (VALUE_RECURSED_INTO (node->loc))
4348 /* Remove duplicate value node. */
4349 *nodep = node->next;
4350 pool_free (loc_chain_pool, node);
4351 continue;
4353 else
4354 VALUE_RECURSED_INTO (node->loc) = true;
4356 nodep = &node->next;
4359 for (node = var->var_part[0].loc_chain; node; node = node->next)
4360 if (GET_CODE (node->loc) == VALUE)
4362 gcc_assert (VALUE_RECURSED_INTO (node->loc));
4363 VALUE_RECURSED_INTO (node->loc) = false;
4368 /* Hash table iteration argument passed to variable_post_merge. */
4369 struct dfset_post_merge
4371 /* The new input set for the current block. */
4372 dataflow_set *set;
4373 /* Pointer to the permanent input set for the current block, or
4374 NULL. */
4375 dataflow_set **permp;
4378 /* Create values for incoming expressions associated with one-part
4379 variables that don't have value numbers for them. */
4382 variable_post_merge_new_vals (variable_def **slot, dfset_post_merge *dfpm)
4384 dataflow_set *set = dfpm->set;
4385 variable var = *slot;
4386 location_chain node;
4388 if (!var->onepart || !var->n_var_parts)
4389 return 1;
4391 gcc_assert (var->n_var_parts == 1);
4393 if (dv_is_decl_p (var->dv))
4395 bool check_dupes = false;
4397 restart:
4398 for (node = var->var_part[0].loc_chain; node; node = node->next)
4400 if (GET_CODE (node->loc) == VALUE)
4401 gcc_assert (!VALUE_RECURSED_INTO (node->loc));
4402 else if (GET_CODE (node->loc) == REG)
4404 attrs att, *attp, *curp = NULL;
4406 if (var->refcount != 1)
4408 slot = unshare_variable (set, slot, var,
4409 VAR_INIT_STATUS_INITIALIZED);
4410 var = *slot;
4411 goto restart;
4414 for (attp = &set->regs[REGNO (node->loc)]; (att = *attp);
4415 attp = &att->next)
4416 if (att->offset == 0
4417 && GET_MODE (att->loc) == GET_MODE (node->loc))
4419 if (dv_is_value_p (att->dv))
4421 rtx cval = dv_as_value (att->dv);
4422 node->loc = cval;
4423 check_dupes = true;
4424 break;
4426 else if (dv_as_opaque (att->dv) == dv_as_opaque (var->dv))
4427 curp = attp;
4430 if (!curp)
4432 curp = attp;
4433 while (*curp)
4434 if ((*curp)->offset == 0
4435 && GET_MODE ((*curp)->loc) == GET_MODE (node->loc)
4436 && dv_as_opaque ((*curp)->dv) == dv_as_opaque (var->dv))
4437 break;
4438 else
4439 curp = &(*curp)->next;
4440 gcc_assert (*curp);
4443 if (!att)
4445 decl_or_value cdv;
4446 rtx cval;
4448 if (!*dfpm->permp)
4450 *dfpm->permp = XNEW (dataflow_set);
4451 dataflow_set_init (*dfpm->permp);
4454 for (att = (*dfpm->permp)->regs[REGNO (node->loc)];
4455 att; att = att->next)
4456 if (GET_MODE (att->loc) == GET_MODE (node->loc))
4458 gcc_assert (att->offset == 0
4459 && dv_is_value_p (att->dv));
4460 val_reset (set, att->dv);
4461 break;
4464 if (att)
4466 cdv = att->dv;
4467 cval = dv_as_value (cdv);
4469 else
4471 /* Create a unique value to hold this register,
4472 that ought to be found and reused in
4473 subsequent rounds. */
4474 cselib_val *v;
4475 gcc_assert (!cselib_lookup (node->loc,
4476 GET_MODE (node->loc), 0,
4477 VOIDmode));
4478 v = cselib_lookup (node->loc, GET_MODE (node->loc), 1,
4479 VOIDmode);
4480 cselib_preserve_value (v);
4481 cselib_invalidate_rtx (node->loc);
4482 cval = v->val_rtx;
4483 cdv = dv_from_value (cval);
4484 if (dump_file)
4485 fprintf (dump_file,
4486 "Created new value %u:%u for reg %i\n",
4487 v->uid, v->hash, REGNO (node->loc));
4490 var_reg_decl_set (*dfpm->permp, node->loc,
4491 VAR_INIT_STATUS_INITIALIZED,
4492 cdv, 0, NULL, INSERT);
4494 node->loc = cval;
4495 check_dupes = true;
4498 /* Remove attribute referring to the decl, which now
4499 uses the value for the register, already existing or
4500 to be added when we bring perm in. */
4501 att = *curp;
4502 *curp = att->next;
4503 pool_free (attrs_pool, att);
4507 if (check_dupes)
4508 remove_duplicate_values (var);
4511 return 1;
4514 /* Reset values in the permanent set that are not associated with the
4515 chosen expression. */
4518 variable_post_merge_perm_vals (variable_def **pslot, dfset_post_merge *dfpm)
4520 dataflow_set *set = dfpm->set;
4521 variable pvar = *pslot, var;
4522 location_chain pnode;
4523 decl_or_value dv;
4524 attrs att;
4526 gcc_assert (dv_is_value_p (pvar->dv)
4527 && pvar->n_var_parts == 1);
4528 pnode = pvar->var_part[0].loc_chain;
4529 gcc_assert (pnode
4530 && !pnode->next
4531 && REG_P (pnode->loc));
4533 dv = pvar->dv;
4535 var = shared_hash_find (set->vars, dv);
4536 if (var)
4538 /* Although variable_post_merge_new_vals may have made decls
4539 non-star-canonical, values that pre-existed in canonical form
4540 remain canonical, and newly-created values reference a single
4541 REG, so they are canonical as well. Since VAR has the
4542 location list for a VALUE, using find_loc_in_1pdv for it is
4543 fine, since VALUEs don't map back to DECLs. */
4544 if (find_loc_in_1pdv (pnode->loc, var, shared_hash_htab (set->vars)))
4545 return 1;
4546 val_reset (set, dv);
4549 for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next)
4550 if (att->offset == 0
4551 && GET_MODE (att->loc) == GET_MODE (pnode->loc)
4552 && dv_is_value_p (att->dv))
4553 break;
4555 /* If there is a value associated with this register already, create
4556 an equivalence. */
4557 if (att && dv_as_value (att->dv) != dv_as_value (dv))
4559 rtx cval = dv_as_value (att->dv);
4560 set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT);
4561 set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init,
4562 NULL, INSERT);
4564 else if (!att)
4566 attrs_list_insert (&set->regs[REGNO (pnode->loc)],
4567 dv, 0, pnode->loc);
4568 variable_union (pvar, set);
4571 return 1;
4574 /* Just checking stuff and registering register attributes for
4575 now. */
4577 static void
4578 dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp)
4580 struct dfset_post_merge dfpm;
4582 dfpm.set = set;
4583 dfpm.permp = permp;
4585 shared_hash_htab (set->vars)
4586 ->traverse <dfset_post_merge*, variable_post_merge_new_vals> (&dfpm);
4587 if (*permp)
4588 shared_hash_htab ((*permp)->vars)
4589 ->traverse <dfset_post_merge*, variable_post_merge_perm_vals> (&dfpm);
4590 shared_hash_htab (set->vars)
4591 ->traverse <dataflow_set *, canonicalize_values_star> (set);
4592 shared_hash_htab (set->vars)
4593 ->traverse <dataflow_set *, canonicalize_vars_star> (set);
4596 /* Return a node whose loc is a MEM that refers to EXPR in the
4597 location list of a one-part variable or value VAR, or in that of
4598 any values recursively mentioned in the location lists. */
4600 static location_chain
4601 find_mem_expr_in_1pdv (tree expr, rtx val, variable_table_type *vars)
4603 location_chain node;
4604 decl_or_value dv;
4605 variable var;
4606 location_chain where = NULL;
4608 if (!val)
4609 return NULL;
4611 gcc_assert (GET_CODE (val) == VALUE
4612 && !VALUE_RECURSED_INTO (val));
4614 dv = dv_from_value (val);
4615 var = vars->find_with_hash (dv, dv_htab_hash (dv));
4617 if (!var)
4618 return NULL;
4620 gcc_assert (var->onepart);
4622 if (!var->n_var_parts)
4623 return NULL;
4625 VALUE_RECURSED_INTO (val) = true;
4627 for (node = var->var_part[0].loc_chain; node; node = node->next)
4628 if (MEM_P (node->loc)
4629 && MEM_EXPR (node->loc) == expr
4630 && INT_MEM_OFFSET (node->loc) == 0)
4632 where = node;
4633 break;
4635 else if (GET_CODE (node->loc) == VALUE
4636 && !VALUE_RECURSED_INTO (node->loc)
4637 && (where = find_mem_expr_in_1pdv (expr, node->loc, vars)))
4638 break;
4640 VALUE_RECURSED_INTO (val) = false;
4642 return where;
4645 /* Return TRUE if the value of MEM may vary across a call. */
4647 static bool
4648 mem_dies_at_call (rtx mem)
4650 tree expr = MEM_EXPR (mem);
4651 tree decl;
4653 if (!expr)
4654 return true;
4656 decl = get_base_address (expr);
4658 if (!decl)
4659 return true;
4661 if (!DECL_P (decl))
4662 return true;
4664 return (may_be_aliased (decl)
4665 || (!TREE_READONLY (decl) && is_global_var (decl)));
4668 /* Remove all MEMs from the location list of a hash table entry for a
4669 one-part variable, except those whose MEM attributes map back to
4670 the variable itself, directly or within a VALUE. */
4673 dataflow_set_preserve_mem_locs (variable_def **slot, dataflow_set *set)
4675 variable var = *slot;
4677 if (var->onepart == ONEPART_VDECL || var->onepart == ONEPART_DEXPR)
4679 tree decl = dv_as_decl (var->dv);
4680 location_chain loc, *locp;
4681 bool changed = false;
4683 if (!var->n_var_parts)
4684 return 1;
4686 gcc_assert (var->n_var_parts == 1);
4688 if (shared_var_p (var, set->vars))
4690 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4692 /* We want to remove dying MEMs that doesn't refer to DECL. */
4693 if (GET_CODE (loc->loc) == MEM
4694 && (MEM_EXPR (loc->loc) != decl
4695 || INT_MEM_OFFSET (loc->loc) != 0)
4696 && !mem_dies_at_call (loc->loc))
4697 break;
4698 /* We want to move here MEMs that do refer to DECL. */
4699 else if (GET_CODE (loc->loc) == VALUE
4700 && find_mem_expr_in_1pdv (decl, loc->loc,
4701 shared_hash_htab (set->vars)))
4702 break;
4705 if (!loc)
4706 return 1;
4708 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4709 var = *slot;
4710 gcc_assert (var->n_var_parts == 1);
4713 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4714 loc; loc = *locp)
4716 rtx old_loc = loc->loc;
4717 if (GET_CODE (old_loc) == VALUE)
4719 location_chain mem_node
4720 = find_mem_expr_in_1pdv (decl, loc->loc,
4721 shared_hash_htab (set->vars));
4723 /* ??? This picks up only one out of multiple MEMs that
4724 refer to the same variable. Do we ever need to be
4725 concerned about dealing with more than one, or, given
4726 that they should all map to the same variable
4727 location, their addresses will have been merged and
4728 they will be regarded as equivalent? */
4729 if (mem_node)
4731 loc->loc = mem_node->loc;
4732 loc->set_src = mem_node->set_src;
4733 loc->init = MIN (loc->init, mem_node->init);
4737 if (GET_CODE (loc->loc) != MEM
4738 || (MEM_EXPR (loc->loc) == decl
4739 && INT_MEM_OFFSET (loc->loc) == 0)
4740 || !mem_dies_at_call (loc->loc))
4742 if (old_loc != loc->loc && emit_notes)
4744 if (old_loc == var->var_part[0].cur_loc)
4746 changed = true;
4747 var->var_part[0].cur_loc = NULL;
4750 locp = &loc->next;
4751 continue;
4754 if (emit_notes)
4756 if (old_loc == var->var_part[0].cur_loc)
4758 changed = true;
4759 var->var_part[0].cur_loc = NULL;
4762 *locp = loc->next;
4763 pool_free (loc_chain_pool, loc);
4766 if (!var->var_part[0].loc_chain)
4768 var->n_var_parts--;
4769 changed = true;
4771 if (changed)
4772 variable_was_changed (var, set);
4775 return 1;
4778 /* Remove all MEMs from the location list of a hash table entry for a
4779 value. */
4782 dataflow_set_remove_mem_locs (variable_def **slot, dataflow_set *set)
4784 variable var = *slot;
4786 if (var->onepart == ONEPART_VALUE)
4788 location_chain loc, *locp;
4789 bool changed = false;
4790 rtx cur_loc;
4792 gcc_assert (var->n_var_parts == 1);
4794 if (shared_var_p (var, set->vars))
4796 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4797 if (GET_CODE (loc->loc) == MEM
4798 && mem_dies_at_call (loc->loc))
4799 break;
4801 if (!loc)
4802 return 1;
4804 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4805 var = *slot;
4806 gcc_assert (var->n_var_parts == 1);
4809 if (VAR_LOC_1PAUX (var))
4810 cur_loc = VAR_LOC_FROM (var);
4811 else
4812 cur_loc = var->var_part[0].cur_loc;
4814 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4815 loc; loc = *locp)
4817 if (GET_CODE (loc->loc) != MEM
4818 || !mem_dies_at_call (loc->loc))
4820 locp = &loc->next;
4821 continue;
4824 *locp = loc->next;
4825 /* If we have deleted the location which was last emitted
4826 we have to emit new location so add the variable to set
4827 of changed variables. */
4828 if (cur_loc == loc->loc)
4830 changed = true;
4831 var->var_part[0].cur_loc = NULL;
4832 if (VAR_LOC_1PAUX (var))
4833 VAR_LOC_FROM (var) = NULL;
4835 pool_free (loc_chain_pool, loc);
4838 if (!var->var_part[0].loc_chain)
4840 var->n_var_parts--;
4841 changed = true;
4843 if (changed)
4844 variable_was_changed (var, set);
4847 return 1;
4850 /* Remove all variable-location information about call-clobbered
4851 registers, as well as associations between MEMs and VALUEs. */
4853 static void
4854 dataflow_set_clear_at_call (dataflow_set *set)
4856 unsigned int r;
4857 hard_reg_set_iterator hrsi;
4859 EXECUTE_IF_SET_IN_HARD_REG_SET (regs_invalidated_by_call, 0, r, hrsi)
4860 var_regno_delete (set, r);
4862 if (MAY_HAVE_DEBUG_INSNS)
4864 set->traversed_vars = set->vars;
4865 shared_hash_htab (set->vars)
4866 ->traverse <dataflow_set *, dataflow_set_preserve_mem_locs> (set);
4867 set->traversed_vars = set->vars;
4868 shared_hash_htab (set->vars)
4869 ->traverse <dataflow_set *, dataflow_set_remove_mem_locs> (set);
4870 set->traversed_vars = NULL;
4874 static bool
4875 variable_part_different_p (variable_part *vp1, variable_part *vp2)
4877 location_chain lc1, lc2;
4879 for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next)
4881 for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next)
4883 if (REG_P (lc1->loc) && REG_P (lc2->loc))
4885 if (REGNO (lc1->loc) == REGNO (lc2->loc))
4886 break;
4888 if (rtx_equal_p (lc1->loc, lc2->loc))
4889 break;
4891 if (!lc2)
4892 return true;
4894 return false;
4897 /* Return true if one-part variables VAR1 and VAR2 are different.
4898 They must be in canonical order. */
4900 static bool
4901 onepart_variable_different_p (variable var1, variable var2)
4903 location_chain lc1, lc2;
4905 if (var1 == var2)
4906 return false;
4908 gcc_assert (var1->n_var_parts == 1
4909 && var2->n_var_parts == 1);
4911 lc1 = var1->var_part[0].loc_chain;
4912 lc2 = var2->var_part[0].loc_chain;
4914 gcc_assert (lc1 && lc2);
4916 while (lc1 && lc2)
4918 if (loc_cmp (lc1->loc, lc2->loc))
4919 return true;
4920 lc1 = lc1->next;
4921 lc2 = lc2->next;
4924 return lc1 != lc2;
4927 /* Return true if variables VAR1 and VAR2 are different. */
4929 static bool
4930 variable_different_p (variable var1, variable var2)
4932 int i;
4934 if (var1 == var2)
4935 return false;
4937 if (var1->onepart != var2->onepart)
4938 return true;
4940 if (var1->n_var_parts != var2->n_var_parts)
4941 return true;
4943 if (var1->onepart && var1->n_var_parts)
4945 gcc_checking_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv)
4946 && var1->n_var_parts == 1);
4947 /* One-part values have locations in a canonical order. */
4948 return onepart_variable_different_p (var1, var2);
4951 for (i = 0; i < var1->n_var_parts; i++)
4953 if (VAR_PART_OFFSET (var1, i) != VAR_PART_OFFSET (var2, i))
4954 return true;
4955 if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i]))
4956 return true;
4957 if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i]))
4958 return true;
4960 return false;
4963 /* Return true if dataflow sets OLD_SET and NEW_SET differ. */
4965 static bool
4966 dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
4968 variable_iterator_type hi;
4969 variable var1;
4971 if (old_set->vars == new_set->vars)
4972 return false;
4974 if (shared_hash_htab (old_set->vars)->elements ()
4975 != shared_hash_htab (new_set->vars)->elements ())
4976 return true;
4978 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (old_set->vars),
4979 var1, variable, hi)
4981 variable_table_type *htab = shared_hash_htab (new_set->vars);
4982 variable var2 = htab->find_with_hash (var1->dv, dv_htab_hash (var1->dv));
4983 if (!var2)
4985 if (dump_file && (dump_flags & TDF_DETAILS))
4987 fprintf (dump_file, "dataflow difference found: removal of:\n");
4988 dump_var (var1);
4990 return true;
4993 if (variable_different_p (var1, var2))
4995 if (dump_file && (dump_flags & TDF_DETAILS))
4997 fprintf (dump_file, "dataflow difference found: "
4998 "old and new follow:\n");
4999 dump_var (var1);
5000 dump_var (var2);
5002 return true;
5006 /* No need to traverse the second hashtab, if both have the same number
5007 of elements and the second one had all entries found in the first one,
5008 then it can't have any extra entries. */
5009 return false;
5012 /* Free the contents of dataflow set SET. */
5014 static void
5015 dataflow_set_destroy (dataflow_set *set)
5017 int i;
5019 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5020 attrs_list_clear (&set->regs[i]);
5022 shared_hash_destroy (set->vars);
5023 set->vars = NULL;
5026 /* Return true if RTL X contains a SYMBOL_REF. */
5028 static bool
5029 contains_symbol_ref (rtx x)
5031 const char *fmt;
5032 RTX_CODE code;
5033 int i;
5035 if (!x)
5036 return false;
5038 code = GET_CODE (x);
5039 if (code == SYMBOL_REF)
5040 return true;
5042 fmt = GET_RTX_FORMAT (code);
5043 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5045 if (fmt[i] == 'e')
5047 if (contains_symbol_ref (XEXP (x, i)))
5048 return true;
5050 else if (fmt[i] == 'E')
5052 int j;
5053 for (j = 0; j < XVECLEN (x, i); j++)
5054 if (contains_symbol_ref (XVECEXP (x, i, j)))
5055 return true;
5059 return false;
5062 /* Shall EXPR be tracked? */
5064 static bool
5065 track_expr_p (tree expr, bool need_rtl)
5067 rtx decl_rtl;
5068 tree realdecl;
5070 if (TREE_CODE (expr) == DEBUG_EXPR_DECL)
5071 return DECL_RTL_SET_P (expr);
5073 /* If EXPR is not a parameter or a variable do not track it. */
5074 if (TREE_CODE (expr) != VAR_DECL && TREE_CODE (expr) != PARM_DECL)
5075 return 0;
5077 /* It also must have a name... */
5078 if (!DECL_NAME (expr) && need_rtl)
5079 return 0;
5081 /* ... and a RTL assigned to it. */
5082 decl_rtl = DECL_RTL_IF_SET (expr);
5083 if (!decl_rtl && need_rtl)
5084 return 0;
5086 /* If this expression is really a debug alias of some other declaration, we
5087 don't need to track this expression if the ultimate declaration is
5088 ignored. */
5089 realdecl = expr;
5090 if (TREE_CODE (realdecl) == VAR_DECL && DECL_HAS_DEBUG_EXPR_P (realdecl))
5092 realdecl = DECL_DEBUG_EXPR (realdecl);
5093 if (!DECL_P (realdecl))
5095 if (handled_component_p (realdecl)
5096 || (TREE_CODE (realdecl) == MEM_REF
5097 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
5099 HOST_WIDE_INT bitsize, bitpos, maxsize;
5100 tree innerdecl
5101 = get_ref_base_and_extent (realdecl, &bitpos, &bitsize,
5102 &maxsize);
5103 if (!DECL_P (innerdecl)
5104 || DECL_IGNORED_P (innerdecl)
5105 /* Do not track declarations for parts of tracked parameters
5106 since we want to track them as a whole instead. */
5107 || (TREE_CODE (innerdecl) == PARM_DECL
5108 && DECL_MODE (innerdecl) != BLKmode
5109 && TREE_CODE (TREE_TYPE (innerdecl)) != UNION_TYPE)
5110 || TREE_STATIC (innerdecl)
5111 || bitsize <= 0
5112 || bitpos + bitsize > 256
5113 || bitsize != maxsize)
5114 return 0;
5115 else
5116 realdecl = expr;
5118 else
5119 return 0;
5123 /* Do not track EXPR if REALDECL it should be ignored for debugging
5124 purposes. */
5125 if (DECL_IGNORED_P (realdecl))
5126 return 0;
5128 /* Do not track global variables until we are able to emit correct location
5129 list for them. */
5130 if (TREE_STATIC (realdecl))
5131 return 0;
5133 /* When the EXPR is a DECL for alias of some variable (see example)
5134 the TREE_STATIC flag is not used. Disable tracking all DECLs whose
5135 DECL_RTL contains SYMBOL_REF.
5137 Example:
5138 extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
5139 char **_dl_argv;
5141 if (decl_rtl && MEM_P (decl_rtl)
5142 && contains_symbol_ref (XEXP (decl_rtl, 0)))
5143 return 0;
5145 /* If RTX is a memory it should not be very large (because it would be
5146 an array or struct). */
5147 if (decl_rtl && MEM_P (decl_rtl))
5149 /* Do not track structures and arrays. */
5150 if (GET_MODE (decl_rtl) == BLKmode
5151 || AGGREGATE_TYPE_P (TREE_TYPE (realdecl)))
5152 return 0;
5153 if (MEM_SIZE_KNOWN_P (decl_rtl)
5154 && MEM_SIZE (decl_rtl) > MAX_VAR_PARTS)
5155 return 0;
5158 DECL_CHANGED (expr) = 0;
5159 DECL_CHANGED (realdecl) = 0;
5160 return 1;
5163 /* Determine whether a given LOC refers to the same variable part as
5164 EXPR+OFFSET. */
5166 static bool
5167 same_variable_part_p (rtx loc, tree expr, HOST_WIDE_INT offset)
5169 tree expr2;
5170 HOST_WIDE_INT offset2;
5172 if (! DECL_P (expr))
5173 return false;
5175 if (REG_P (loc))
5177 expr2 = REG_EXPR (loc);
5178 offset2 = REG_OFFSET (loc);
5180 else if (MEM_P (loc))
5182 expr2 = MEM_EXPR (loc);
5183 offset2 = INT_MEM_OFFSET (loc);
5185 else
5186 return false;
5188 if (! expr2 || ! DECL_P (expr2))
5189 return false;
5191 expr = var_debug_decl (expr);
5192 expr2 = var_debug_decl (expr2);
5194 return (expr == expr2 && offset == offset2);
5197 /* LOC is a REG or MEM that we would like to track if possible.
5198 If EXPR is null, we don't know what expression LOC refers to,
5199 otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
5200 LOC is an lvalue register.
5202 Return true if EXPR is nonnull and if LOC, or some lowpart of it,
5203 is something we can track. When returning true, store the mode of
5204 the lowpart we can track in *MODE_OUT (if nonnull) and its offset
5205 from EXPR in *OFFSET_OUT (if nonnull). */
5207 static bool
5208 track_loc_p (rtx loc, tree expr, HOST_WIDE_INT offset, bool store_reg_p,
5209 machine_mode *mode_out, HOST_WIDE_INT *offset_out)
5211 machine_mode mode;
5213 if (expr == NULL || !track_expr_p (expr, true))
5214 return false;
5216 /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
5217 whole subreg, but only the old inner part is really relevant. */
5218 mode = GET_MODE (loc);
5219 if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc)))
5221 machine_mode pseudo_mode;
5223 pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc));
5224 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (pseudo_mode))
5226 offset += byte_lowpart_offset (pseudo_mode, mode);
5227 mode = pseudo_mode;
5231 /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
5232 Do the same if we are storing to a register and EXPR occupies
5233 the whole of register LOC; in that case, the whole of EXPR is
5234 being changed. We exclude complex modes from the second case
5235 because the real and imaginary parts are represented as separate
5236 pseudo registers, even if the whole complex value fits into one
5237 hard register. */
5238 if ((GET_MODE_SIZE (mode) > GET_MODE_SIZE (DECL_MODE (expr))
5239 || (store_reg_p
5240 && !COMPLEX_MODE_P (DECL_MODE (expr))
5241 && hard_regno_nregs[REGNO (loc)][DECL_MODE (expr)] == 1))
5242 && offset + byte_lowpart_offset (DECL_MODE (expr), mode) == 0)
5244 mode = DECL_MODE (expr);
5245 offset = 0;
5248 if (offset < 0 || offset >= MAX_VAR_PARTS)
5249 return false;
5251 if (mode_out)
5252 *mode_out = mode;
5253 if (offset_out)
5254 *offset_out = offset;
5255 return true;
5258 /* Return the MODE lowpart of LOC, or null if LOC is not something we
5259 want to track. When returning nonnull, make sure that the attributes
5260 on the returned value are updated. */
5262 static rtx
5263 var_lowpart (machine_mode mode, rtx loc)
5265 unsigned int offset, reg_offset, regno;
5267 if (GET_MODE (loc) == mode)
5268 return loc;
5270 if (!REG_P (loc) && !MEM_P (loc))
5271 return NULL;
5273 offset = byte_lowpart_offset (mode, GET_MODE (loc));
5275 if (MEM_P (loc))
5276 return adjust_address_nv (loc, mode, offset);
5278 reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc));
5279 regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc),
5280 reg_offset, mode);
5281 return gen_rtx_REG_offset (loc, mode, regno, offset);
5284 /* Carry information about uses and stores while walking rtx. */
5286 struct count_use_info
5288 /* The insn where the RTX is. */
5289 rtx_insn *insn;
5291 /* The basic block where insn is. */
5292 basic_block bb;
5294 /* The array of n_sets sets in the insn, as determined by cselib. */
5295 struct cselib_set *sets;
5296 int n_sets;
5298 /* True if we're counting stores, false otherwise. */
5299 bool store_p;
5302 /* Find a VALUE corresponding to X. */
5304 static inline cselib_val *
5305 find_use_val (rtx x, machine_mode mode, struct count_use_info *cui)
5307 int i;
5309 if (cui->sets)
5311 /* This is called after uses are set up and before stores are
5312 processed by cselib, so it's safe to look up srcs, but not
5313 dsts. So we look up expressions that appear in srcs or in
5314 dest expressions, but we search the sets array for dests of
5315 stores. */
5316 if (cui->store_p)
5318 /* Some targets represent memset and memcpy patterns
5319 by (set (mem:BLK ...) (reg:[QHSD]I ...)) or
5320 (set (mem:BLK ...) (const_int ...)) or
5321 (set (mem:BLK ...) (mem:BLK ...)). Don't return anything
5322 in that case, otherwise we end up with mode mismatches. */
5323 if (mode == BLKmode && MEM_P (x))
5324 return NULL;
5325 for (i = 0; i < cui->n_sets; i++)
5326 if (cui->sets[i].dest == x)
5327 return cui->sets[i].src_elt;
5329 else
5330 return cselib_lookup (x, mode, 0, VOIDmode);
5333 return NULL;
5336 /* Replace all registers and addresses in an expression with VALUE
5337 expressions that map back to them, unless the expression is a
5338 register. If no mapping is or can be performed, returns NULL. */
5340 static rtx
5341 replace_expr_with_values (rtx loc)
5343 if (REG_P (loc) || GET_CODE (loc) == ENTRY_VALUE)
5344 return NULL;
5345 else if (MEM_P (loc))
5347 cselib_val *addr = cselib_lookup (XEXP (loc, 0),
5348 get_address_mode (loc), 0,
5349 GET_MODE (loc));
5350 if (addr)
5351 return replace_equiv_address_nv (loc, addr->val_rtx);
5352 else
5353 return NULL;
5355 else
5356 return cselib_subst_to_values (loc, VOIDmode);
5359 /* Return true if X contains a DEBUG_EXPR. */
5361 static bool
5362 rtx_debug_expr_p (const_rtx x)
5364 subrtx_iterator::array_type array;
5365 FOR_EACH_SUBRTX (iter, array, x, ALL)
5366 if (GET_CODE (*iter) == DEBUG_EXPR)
5367 return true;
5368 return false;
5371 /* Determine what kind of micro operation to choose for a USE. Return
5372 MO_CLOBBER if no micro operation is to be generated. */
5374 static enum micro_operation_type
5375 use_type (rtx loc, struct count_use_info *cui, machine_mode *modep)
5377 tree expr;
5379 if (cui && cui->sets)
5381 if (GET_CODE (loc) == VAR_LOCATION)
5383 if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false))
5385 rtx ploc = PAT_VAR_LOCATION_LOC (loc);
5386 if (! VAR_LOC_UNKNOWN_P (ploc))
5388 cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1,
5389 VOIDmode);
5391 /* ??? flag_float_store and volatile mems are never
5392 given values, but we could in theory use them for
5393 locations. */
5394 gcc_assert (val || 1);
5396 return MO_VAL_LOC;
5398 else
5399 return MO_CLOBBER;
5402 if (REG_P (loc) || MEM_P (loc))
5404 if (modep)
5405 *modep = GET_MODE (loc);
5406 if (cui->store_p)
5408 if (REG_P (loc)
5409 || (find_use_val (loc, GET_MODE (loc), cui)
5410 && cselib_lookup (XEXP (loc, 0),
5411 get_address_mode (loc), 0,
5412 GET_MODE (loc))))
5413 return MO_VAL_SET;
5415 else
5417 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5419 if (val && !cselib_preserved_value_p (val))
5420 return MO_VAL_USE;
5425 if (REG_P (loc))
5427 gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER);
5429 if (loc == cfa_base_rtx)
5430 return MO_CLOBBER;
5431 expr = REG_EXPR (loc);
5433 if (!expr)
5434 return MO_USE_NO_VAR;
5435 else if (target_for_debug_bind (var_debug_decl (expr)))
5436 return MO_CLOBBER;
5437 else if (track_loc_p (loc, expr, REG_OFFSET (loc),
5438 false, modep, NULL))
5439 return MO_USE;
5440 else
5441 return MO_USE_NO_VAR;
5443 else if (MEM_P (loc))
5445 expr = MEM_EXPR (loc);
5447 if (!expr)
5448 return MO_CLOBBER;
5449 else if (target_for_debug_bind (var_debug_decl (expr)))
5450 return MO_CLOBBER;
5451 else if (track_loc_p (loc, expr, INT_MEM_OFFSET (loc),
5452 false, modep, NULL)
5453 /* Multi-part variables shouldn't refer to one-part
5454 variable names such as VALUEs (never happens) or
5455 DEBUG_EXPRs (only happens in the presence of debug
5456 insns). */
5457 && (!MAY_HAVE_DEBUG_INSNS
5458 || !rtx_debug_expr_p (XEXP (loc, 0))))
5459 return MO_USE;
5460 else
5461 return MO_CLOBBER;
5464 return MO_CLOBBER;
5467 /* Log to OUT information about micro-operation MOPT involving X in
5468 INSN of BB. */
5470 static inline void
5471 log_op_type (rtx x, basic_block bb, rtx_insn *insn,
5472 enum micro_operation_type mopt, FILE *out)
5474 fprintf (out, "bb %i op %i insn %i %s ",
5475 bb->index, VTI (bb)->mos.length (),
5476 INSN_UID (insn), micro_operation_type_name[mopt]);
5477 print_inline_rtx (out, x, 2);
5478 fputc ('\n', out);
5481 /* Tell whether the CONCAT used to holds a VALUE and its location
5482 needs value resolution, i.e., an attempt of mapping the location
5483 back to other incoming values. */
5484 #define VAL_NEEDS_RESOLUTION(x) \
5485 (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
5486 /* Whether the location in the CONCAT is a tracked expression, that
5487 should also be handled like a MO_USE. */
5488 #define VAL_HOLDS_TRACK_EXPR(x) \
5489 (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
5490 /* Whether the location in the CONCAT should be handled like a MO_COPY
5491 as well. */
5492 #define VAL_EXPR_IS_COPIED(x) \
5493 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
5494 /* Whether the location in the CONCAT should be handled like a
5495 MO_CLOBBER as well. */
5496 #define VAL_EXPR_IS_CLOBBERED(x) \
5497 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
5499 /* All preserved VALUEs. */
5500 static vec<rtx> preserved_values;
5502 /* Ensure VAL is preserved and remember it in a vector for vt_emit_notes. */
5504 static void
5505 preserve_value (cselib_val *val)
5507 cselib_preserve_value (val);
5508 preserved_values.safe_push (val->val_rtx);
5511 /* Helper function for MO_VAL_LOC handling. Return non-zero if
5512 any rtxes not suitable for CONST use not replaced by VALUEs
5513 are discovered. */
5515 static bool
5516 non_suitable_const (const_rtx x)
5518 subrtx_iterator::array_type array;
5519 FOR_EACH_SUBRTX (iter, array, x, ALL)
5521 const_rtx x = *iter;
5522 switch (GET_CODE (x))
5524 case REG:
5525 case DEBUG_EXPR:
5526 case PC:
5527 case SCRATCH:
5528 case CC0:
5529 case ASM_INPUT:
5530 case ASM_OPERANDS:
5531 return true;
5532 case MEM:
5533 if (!MEM_READONLY_P (x))
5534 return true;
5535 break;
5536 default:
5537 break;
5540 return false;
5543 /* Add uses (register and memory references) LOC which will be tracked
5544 to VTI (bb)->mos. */
5546 static void
5547 add_uses (rtx loc, struct count_use_info *cui)
5549 machine_mode mode = VOIDmode;
5550 enum micro_operation_type type = use_type (loc, cui, &mode);
5552 if (type != MO_CLOBBER)
5554 basic_block bb = cui->bb;
5555 micro_operation mo;
5557 mo.type = type;
5558 mo.u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc;
5559 mo.insn = cui->insn;
5561 if (type == MO_VAL_LOC)
5563 rtx oloc = loc;
5564 rtx vloc = PAT_VAR_LOCATION_LOC (oloc);
5565 cselib_val *val;
5567 gcc_assert (cui->sets);
5569 if (MEM_P (vloc)
5570 && !REG_P (XEXP (vloc, 0))
5571 && !MEM_P (XEXP (vloc, 0)))
5573 rtx mloc = vloc;
5574 machine_mode address_mode = get_address_mode (mloc);
5575 cselib_val *val
5576 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5577 GET_MODE (mloc));
5579 if (val && !cselib_preserved_value_p (val))
5580 preserve_value (val);
5583 if (CONSTANT_P (vloc)
5584 && (GET_CODE (vloc) != CONST || non_suitable_const (vloc)))
5585 /* For constants don't look up any value. */;
5586 else if (!VAR_LOC_UNKNOWN_P (vloc) && !unsuitable_loc (vloc)
5587 && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
5589 machine_mode mode2;
5590 enum micro_operation_type type2;
5591 rtx nloc = NULL;
5592 bool resolvable = REG_P (vloc) || MEM_P (vloc);
5594 if (resolvable)
5595 nloc = replace_expr_with_values (vloc);
5597 if (nloc)
5599 oloc = shallow_copy_rtx (oloc);
5600 PAT_VAR_LOCATION_LOC (oloc) = nloc;
5603 oloc = gen_rtx_CONCAT (mode, val->val_rtx, oloc);
5605 type2 = use_type (vloc, 0, &mode2);
5607 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5608 || type2 == MO_CLOBBER);
5610 if (type2 == MO_CLOBBER
5611 && !cselib_preserved_value_p (val))
5613 VAL_NEEDS_RESOLUTION (oloc) = resolvable;
5614 preserve_value (val);
5617 else if (!VAR_LOC_UNKNOWN_P (vloc))
5619 oloc = shallow_copy_rtx (oloc);
5620 PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC ();
5623 mo.u.loc = oloc;
5625 else if (type == MO_VAL_USE)
5627 machine_mode mode2 = VOIDmode;
5628 enum micro_operation_type type2;
5629 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5630 rtx vloc, oloc = loc, nloc;
5632 gcc_assert (cui->sets);
5634 if (MEM_P (oloc)
5635 && !REG_P (XEXP (oloc, 0))
5636 && !MEM_P (XEXP (oloc, 0)))
5638 rtx mloc = oloc;
5639 machine_mode address_mode = get_address_mode (mloc);
5640 cselib_val *val
5641 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5642 GET_MODE (mloc));
5644 if (val && !cselib_preserved_value_p (val))
5645 preserve_value (val);
5648 type2 = use_type (loc, 0, &mode2);
5650 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5651 || type2 == MO_CLOBBER);
5653 if (type2 == MO_USE)
5654 vloc = var_lowpart (mode2, loc);
5655 else
5656 vloc = oloc;
5658 /* The loc of a MO_VAL_USE may have two forms:
5660 (concat val src): val is at src, a value-based
5661 representation.
5663 (concat (concat val use) src): same as above, with use as
5664 the MO_USE tracked value, if it differs from src.
5668 gcc_checking_assert (REG_P (loc) || MEM_P (loc));
5669 nloc = replace_expr_with_values (loc);
5670 if (!nloc)
5671 nloc = oloc;
5673 if (vloc != nloc)
5674 oloc = gen_rtx_CONCAT (mode2, val->val_rtx, vloc);
5675 else
5676 oloc = val->val_rtx;
5678 mo.u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
5680 if (type2 == MO_USE)
5681 VAL_HOLDS_TRACK_EXPR (mo.u.loc) = 1;
5682 if (!cselib_preserved_value_p (val))
5684 VAL_NEEDS_RESOLUTION (mo.u.loc) = 1;
5685 preserve_value (val);
5688 else
5689 gcc_assert (type == MO_USE || type == MO_USE_NO_VAR);
5691 if (dump_file && (dump_flags & TDF_DETAILS))
5692 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
5693 VTI (bb)->mos.safe_push (mo);
5697 /* Helper function for finding all uses of REG/MEM in X in insn INSN. */
5699 static void
5700 add_uses_1 (rtx *x, void *cui)
5702 subrtx_var_iterator::array_type array;
5703 FOR_EACH_SUBRTX_VAR (iter, array, *x, NONCONST)
5704 add_uses (*iter, (struct count_use_info *) cui);
5707 /* This is the value used during expansion of locations. We want it
5708 to be unbounded, so that variables expanded deep in a recursion
5709 nest are fully evaluated, so that their values are cached
5710 correctly. We avoid recursion cycles through other means, and we
5711 don't unshare RTL, so excess complexity is not a problem. */
5712 #define EXPR_DEPTH (INT_MAX)
5713 /* We use this to keep too-complex expressions from being emitted as
5714 location notes, and then to debug information. Users can trade
5715 compile time for ridiculously complex expressions, although they're
5716 seldom useful, and they may often have to be discarded as not
5717 representable anyway. */
5718 #define EXPR_USE_DEPTH (PARAM_VALUE (PARAM_MAX_VARTRACK_EXPR_DEPTH))
5720 /* Attempt to reverse the EXPR operation in the debug info and record
5721 it in the cselib table. Say for reg1 = reg2 + 6 even when reg2 is
5722 no longer live we can express its value as VAL - 6. */
5724 static void
5725 reverse_op (rtx val, const_rtx expr, rtx_insn *insn)
5727 rtx src, arg, ret;
5728 cselib_val *v;
5729 struct elt_loc_list *l;
5730 enum rtx_code code;
5731 int count;
5733 if (GET_CODE (expr) != SET)
5734 return;
5736 if (!REG_P (SET_DEST (expr)) || GET_MODE (val) != GET_MODE (SET_DEST (expr)))
5737 return;
5739 src = SET_SRC (expr);
5740 switch (GET_CODE (src))
5742 case PLUS:
5743 case MINUS:
5744 case XOR:
5745 case NOT:
5746 case NEG:
5747 if (!REG_P (XEXP (src, 0)))
5748 return;
5749 break;
5750 case SIGN_EXTEND:
5751 case ZERO_EXTEND:
5752 if (!REG_P (XEXP (src, 0)) && !MEM_P (XEXP (src, 0)))
5753 return;
5754 break;
5755 default:
5756 return;
5759 if (!SCALAR_INT_MODE_P (GET_MODE (src)) || XEXP (src, 0) == cfa_base_rtx)
5760 return;
5762 v = cselib_lookup (XEXP (src, 0), GET_MODE (XEXP (src, 0)), 0, VOIDmode);
5763 if (!v || !cselib_preserved_value_p (v))
5764 return;
5766 /* Use canonical V to avoid creating multiple redundant expressions
5767 for different VALUES equivalent to V. */
5768 v = canonical_cselib_val (v);
5770 /* Adding a reverse op isn't useful if V already has an always valid
5771 location. Ignore ENTRY_VALUE, while it is always constant, we should
5772 prefer non-ENTRY_VALUE locations whenever possible. */
5773 for (l = v->locs, count = 0; l; l = l->next, count++)
5774 if (CONSTANT_P (l->loc)
5775 && (GET_CODE (l->loc) != CONST || !references_value_p (l->loc, 0)))
5776 return;
5777 /* Avoid creating too large locs lists. */
5778 else if (count == PARAM_VALUE (PARAM_MAX_VARTRACK_REVERSE_OP_SIZE))
5779 return;
5781 switch (GET_CODE (src))
5783 case NOT:
5784 case NEG:
5785 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5786 return;
5787 ret = gen_rtx_fmt_e (GET_CODE (src), GET_MODE (val), val);
5788 break;
5789 case SIGN_EXTEND:
5790 case ZERO_EXTEND:
5791 ret = gen_lowpart_SUBREG (GET_MODE (v->val_rtx), val);
5792 break;
5793 case XOR:
5794 code = XOR;
5795 goto binary;
5796 case PLUS:
5797 code = MINUS;
5798 goto binary;
5799 case MINUS:
5800 code = PLUS;
5801 goto binary;
5802 binary:
5803 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5804 return;
5805 arg = XEXP (src, 1);
5806 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5808 arg = cselib_expand_value_rtx (arg, scratch_regs, 5);
5809 if (arg == NULL_RTX)
5810 return;
5811 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5812 return;
5814 ret = simplify_gen_binary (code, GET_MODE (val), val, arg);
5815 if (ret == val)
5816 /* Ensure ret isn't VALUE itself (which can happen e.g. for
5817 (plus (reg1) (reg2)) when reg2 is known to be 0), as that
5818 breaks a lot of routines during var-tracking. */
5819 ret = gen_rtx_fmt_ee (PLUS, GET_MODE (val), val, const0_rtx);
5820 break;
5821 default:
5822 gcc_unreachable ();
5825 cselib_add_permanent_equiv (v, ret, insn);
5828 /* Add stores (register and memory references) LOC which will be tracked
5829 to VTI (bb)->mos. EXPR is the RTL expression containing the store.
5830 CUIP->insn is instruction which the LOC is part of. */
5832 static void
5833 add_stores (rtx loc, const_rtx expr, void *cuip)
5835 machine_mode mode = VOIDmode, mode2;
5836 struct count_use_info *cui = (struct count_use_info *)cuip;
5837 basic_block bb = cui->bb;
5838 micro_operation mo;
5839 rtx oloc = loc, nloc, src = NULL;
5840 enum micro_operation_type type = use_type (loc, cui, &mode);
5841 bool track_p = false;
5842 cselib_val *v;
5843 bool resolve, preserve;
5845 if (type == MO_CLOBBER)
5846 return;
5848 mode2 = mode;
5850 if (REG_P (loc))
5852 gcc_assert (loc != cfa_base_rtx);
5853 if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
5854 || !(track_p = use_type (loc, NULL, &mode2) == MO_USE)
5855 || GET_CODE (expr) == CLOBBER)
5857 mo.type = MO_CLOBBER;
5858 mo.u.loc = loc;
5859 if (GET_CODE (expr) == SET
5860 && SET_DEST (expr) == loc
5861 && !unsuitable_loc (SET_SRC (expr))
5862 && find_use_val (loc, mode, cui))
5864 gcc_checking_assert (type == MO_VAL_SET);
5865 mo.u.loc = gen_rtx_SET (VOIDmode, loc, SET_SRC (expr));
5868 else
5870 if (GET_CODE (expr) == SET
5871 && SET_DEST (expr) == loc
5872 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
5873 src = var_lowpart (mode2, SET_SRC (expr));
5874 loc = var_lowpart (mode2, loc);
5876 if (src == NULL)
5878 mo.type = MO_SET;
5879 mo.u.loc = loc;
5881 else
5883 rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
5884 if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
5886 /* If this is an instruction copying (part of) a parameter
5887 passed by invisible reference to its register location,
5888 pretend it's a SET so that the initial memory location
5889 is discarded, as the parameter register can be reused
5890 for other purposes and we do not track locations based
5891 on generic registers. */
5892 if (MEM_P (src)
5893 && REG_EXPR (loc)
5894 && TREE_CODE (REG_EXPR (loc)) == PARM_DECL
5895 && DECL_MODE (REG_EXPR (loc)) != BLKmode
5896 && MEM_P (DECL_INCOMING_RTL (REG_EXPR (loc)))
5897 && XEXP (DECL_INCOMING_RTL (REG_EXPR (loc)), 0)
5898 != arg_pointer_rtx)
5899 mo.type = MO_SET;
5900 else
5901 mo.type = MO_COPY;
5903 else
5904 mo.type = MO_SET;
5905 mo.u.loc = xexpr;
5908 mo.insn = cui->insn;
5910 else if (MEM_P (loc)
5911 && ((track_p = use_type (loc, NULL, &mode2) == MO_USE)
5912 || cui->sets))
5914 if (MEM_P (loc) && type == MO_VAL_SET
5915 && !REG_P (XEXP (loc, 0))
5916 && !MEM_P (XEXP (loc, 0)))
5918 rtx mloc = loc;
5919 machine_mode address_mode = get_address_mode (mloc);
5920 cselib_val *val = cselib_lookup (XEXP (mloc, 0),
5921 address_mode, 0,
5922 GET_MODE (mloc));
5924 if (val && !cselib_preserved_value_p (val))
5925 preserve_value (val);
5928 if (GET_CODE (expr) == CLOBBER || !track_p)
5930 mo.type = MO_CLOBBER;
5931 mo.u.loc = track_p ? var_lowpart (mode2, loc) : loc;
5933 else
5935 if (GET_CODE (expr) == SET
5936 && SET_DEST (expr) == loc
5937 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
5938 src = var_lowpart (mode2, SET_SRC (expr));
5939 loc = var_lowpart (mode2, loc);
5941 if (src == NULL)
5943 mo.type = MO_SET;
5944 mo.u.loc = loc;
5946 else
5948 rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
5949 if (same_variable_part_p (SET_SRC (xexpr),
5950 MEM_EXPR (loc),
5951 INT_MEM_OFFSET (loc)))
5952 mo.type = MO_COPY;
5953 else
5954 mo.type = MO_SET;
5955 mo.u.loc = xexpr;
5958 mo.insn = cui->insn;
5960 else
5961 return;
5963 if (type != MO_VAL_SET)
5964 goto log_and_return;
5966 v = find_use_val (oloc, mode, cui);
5968 if (!v)
5969 goto log_and_return;
5971 resolve = preserve = !cselib_preserved_value_p (v);
5973 /* We cannot track values for multiple-part variables, so we track only
5974 locations for tracked parameters passed either by invisible reference
5975 or directly in multiple locations. */
5976 if (track_p
5977 && REG_P (loc)
5978 && REG_EXPR (loc)
5979 && TREE_CODE (REG_EXPR (loc)) == PARM_DECL
5980 && DECL_MODE (REG_EXPR (loc)) != BLKmode
5981 && TREE_CODE (TREE_TYPE (REG_EXPR (loc))) != UNION_TYPE
5982 && ((MEM_P (DECL_INCOMING_RTL (REG_EXPR (loc)))
5983 && XEXP (DECL_INCOMING_RTL (REG_EXPR (loc)), 0) != arg_pointer_rtx)
5984 || (GET_CODE (DECL_INCOMING_RTL (REG_EXPR (loc))) == PARALLEL
5985 && XVECLEN (DECL_INCOMING_RTL (REG_EXPR (loc)), 0) > 1)))
5987 /* Although we don't use the value here, it could be used later by the
5988 mere virtue of its existence as the operand of the reverse operation
5989 that gave rise to it (typically extension/truncation). Make sure it
5990 is preserved as required by vt_expand_var_loc_chain. */
5991 if (preserve)
5992 preserve_value (v);
5993 goto log_and_return;
5996 if (loc == stack_pointer_rtx
5997 && hard_frame_pointer_adjustment != -1
5998 && preserve)
5999 cselib_set_value_sp_based (v);
6001 nloc = replace_expr_with_values (oloc);
6002 if (nloc)
6003 oloc = nloc;
6005 if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC)
6007 cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0, VOIDmode);
6009 if (oval == v)
6010 return;
6011 gcc_assert (REG_P (oloc) || MEM_P (oloc));
6013 if (oval && !cselib_preserved_value_p (oval))
6015 micro_operation moa;
6017 preserve_value (oval);
6019 moa.type = MO_VAL_USE;
6020 moa.u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc);
6021 VAL_NEEDS_RESOLUTION (moa.u.loc) = 1;
6022 moa.insn = cui->insn;
6024 if (dump_file && (dump_flags & TDF_DETAILS))
6025 log_op_type (moa.u.loc, cui->bb, cui->insn,
6026 moa.type, dump_file);
6027 VTI (bb)->mos.safe_push (moa);
6030 resolve = false;
6032 else if (resolve && GET_CODE (mo.u.loc) == SET)
6034 if (REG_P (SET_SRC (expr)) || MEM_P (SET_SRC (expr)))
6035 nloc = replace_expr_with_values (SET_SRC (expr));
6036 else
6037 nloc = NULL_RTX;
6039 /* Avoid the mode mismatch between oexpr and expr. */
6040 if (!nloc && mode != mode2)
6042 nloc = SET_SRC (expr);
6043 gcc_assert (oloc == SET_DEST (expr));
6046 if (nloc && nloc != SET_SRC (mo.u.loc))
6047 oloc = gen_rtx_SET (GET_MODE (mo.u.loc), oloc, nloc);
6048 else
6050 if (oloc == SET_DEST (mo.u.loc))
6051 /* No point in duplicating. */
6052 oloc = mo.u.loc;
6053 if (!REG_P (SET_SRC (mo.u.loc)))
6054 resolve = false;
6057 else if (!resolve)
6059 if (GET_CODE (mo.u.loc) == SET
6060 && oloc == SET_DEST (mo.u.loc))
6061 /* No point in duplicating. */
6062 oloc = mo.u.loc;
6064 else
6065 resolve = false;
6067 loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc);
6069 if (mo.u.loc != oloc)
6070 loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, mo.u.loc);
6072 /* The loc of a MO_VAL_SET may have various forms:
6074 (concat val dst): dst now holds val
6076 (concat val (set dst src)): dst now holds val, copied from src
6078 (concat (concat val dstv) dst): dst now holds val; dstv is dst
6079 after replacing mems and non-top-level regs with values.
6081 (concat (concat val dstv) (set dst src)): dst now holds val,
6082 copied from src. dstv is a value-based representation of dst, if
6083 it differs from dst. If resolution is needed, src is a REG, and
6084 its mode is the same as that of val.
6086 (concat (concat val (set dstv srcv)) (set dst src)): src
6087 copied to dst, holding val. dstv and srcv are value-based
6088 representations of dst and src, respectively.
6092 if (GET_CODE (PATTERN (cui->insn)) != COND_EXEC)
6093 reverse_op (v->val_rtx, expr, cui->insn);
6095 mo.u.loc = loc;
6097 if (track_p)
6098 VAL_HOLDS_TRACK_EXPR (loc) = 1;
6099 if (preserve)
6101 VAL_NEEDS_RESOLUTION (loc) = resolve;
6102 preserve_value (v);
6104 if (mo.type == MO_CLOBBER)
6105 VAL_EXPR_IS_CLOBBERED (loc) = 1;
6106 if (mo.type == MO_COPY)
6107 VAL_EXPR_IS_COPIED (loc) = 1;
6109 mo.type = MO_VAL_SET;
6111 log_and_return:
6112 if (dump_file && (dump_flags & TDF_DETAILS))
6113 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
6114 VTI (bb)->mos.safe_push (mo);
6117 /* Arguments to the call. */
6118 static rtx call_arguments;
6120 /* Compute call_arguments. */
6122 static void
6123 prepare_call_arguments (basic_block bb, rtx_insn *insn)
6125 rtx link, x, call;
6126 rtx prev, cur, next;
6127 rtx this_arg = NULL_RTX;
6128 tree type = NULL_TREE, t, fndecl = NULL_TREE;
6129 tree obj_type_ref = NULL_TREE;
6130 CUMULATIVE_ARGS args_so_far_v;
6131 cumulative_args_t args_so_far;
6133 memset (&args_so_far_v, 0, sizeof (args_so_far_v));
6134 args_so_far = pack_cumulative_args (&args_so_far_v);
6135 call = get_call_rtx_from (insn);
6136 if (call)
6138 if (GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
6140 rtx symbol = XEXP (XEXP (call, 0), 0);
6141 if (SYMBOL_REF_DECL (symbol))
6142 fndecl = SYMBOL_REF_DECL (symbol);
6144 if (fndecl == NULL_TREE)
6145 fndecl = MEM_EXPR (XEXP (call, 0));
6146 if (fndecl
6147 && TREE_CODE (TREE_TYPE (fndecl)) != FUNCTION_TYPE
6148 && TREE_CODE (TREE_TYPE (fndecl)) != METHOD_TYPE)
6149 fndecl = NULL_TREE;
6150 if (fndecl && TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
6151 type = TREE_TYPE (fndecl);
6152 if (fndecl && TREE_CODE (fndecl) != FUNCTION_DECL)
6154 if (TREE_CODE (fndecl) == INDIRECT_REF
6155 && TREE_CODE (TREE_OPERAND (fndecl, 0)) == OBJ_TYPE_REF)
6156 obj_type_ref = TREE_OPERAND (fndecl, 0);
6157 fndecl = NULL_TREE;
6159 if (type)
6161 for (t = TYPE_ARG_TYPES (type); t && t != void_list_node;
6162 t = TREE_CHAIN (t))
6163 if (TREE_CODE (TREE_VALUE (t)) == REFERENCE_TYPE
6164 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_VALUE (t))))
6165 break;
6166 if ((t == NULL || t == void_list_node) && obj_type_ref == NULL_TREE)
6167 type = NULL;
6168 else
6170 int nargs ATTRIBUTE_UNUSED = list_length (TYPE_ARG_TYPES (type));
6171 link = CALL_INSN_FUNCTION_USAGE (insn);
6172 #ifndef PCC_STATIC_STRUCT_RETURN
6173 if (aggregate_value_p (TREE_TYPE (type), type)
6174 && targetm.calls.struct_value_rtx (type, 0) == 0)
6176 tree struct_addr = build_pointer_type (TREE_TYPE (type));
6177 machine_mode mode = TYPE_MODE (struct_addr);
6178 rtx reg;
6179 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
6180 nargs + 1);
6181 reg = targetm.calls.function_arg (args_so_far, mode,
6182 struct_addr, true);
6183 targetm.calls.function_arg_advance (args_so_far, mode,
6184 struct_addr, true);
6185 if (reg == NULL_RTX)
6187 for (; link; link = XEXP (link, 1))
6188 if (GET_CODE (XEXP (link, 0)) == USE
6189 && MEM_P (XEXP (XEXP (link, 0), 0)))
6191 link = XEXP (link, 1);
6192 break;
6196 else
6197 #endif
6198 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
6199 nargs);
6200 if (obj_type_ref && TYPE_ARG_TYPES (type) != void_list_node)
6202 machine_mode mode;
6203 t = TYPE_ARG_TYPES (type);
6204 mode = TYPE_MODE (TREE_VALUE (t));
6205 this_arg = targetm.calls.function_arg (args_so_far, mode,
6206 TREE_VALUE (t), true);
6207 if (this_arg && !REG_P (this_arg))
6208 this_arg = NULL_RTX;
6209 else if (this_arg == NULL_RTX)
6211 for (; link; link = XEXP (link, 1))
6212 if (GET_CODE (XEXP (link, 0)) == USE
6213 && MEM_P (XEXP (XEXP (link, 0), 0)))
6215 this_arg = XEXP (XEXP (link, 0), 0);
6216 break;
6223 t = type ? TYPE_ARG_TYPES (type) : NULL_TREE;
6225 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
6226 if (GET_CODE (XEXP (link, 0)) == USE)
6228 rtx item = NULL_RTX;
6229 x = XEXP (XEXP (link, 0), 0);
6230 if (GET_MODE (link) == VOIDmode
6231 || GET_MODE (link) == BLKmode
6232 || (GET_MODE (link) != GET_MODE (x)
6233 && ((GET_MODE_CLASS (GET_MODE (link)) != MODE_INT
6234 && GET_MODE_CLASS (GET_MODE (link)) != MODE_PARTIAL_INT)
6235 || (GET_MODE_CLASS (GET_MODE (x)) != MODE_INT
6236 && GET_MODE_CLASS (GET_MODE (x)) != MODE_PARTIAL_INT))))
6237 /* Can't do anything for these, if the original type mode
6238 isn't known or can't be converted. */;
6239 else if (REG_P (x))
6241 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
6242 if (val && cselib_preserved_value_p (val))
6243 item = val->val_rtx;
6244 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
6245 || GET_MODE_CLASS (GET_MODE (x)) == MODE_PARTIAL_INT)
6247 machine_mode mode = GET_MODE (x);
6249 while ((mode = GET_MODE_WIDER_MODE (mode)) != VOIDmode
6250 && GET_MODE_BITSIZE (mode) <= BITS_PER_WORD)
6252 rtx reg = simplify_subreg (mode, x, GET_MODE (x), 0);
6254 if (reg == NULL_RTX || !REG_P (reg))
6255 continue;
6256 val = cselib_lookup (reg, mode, 0, VOIDmode);
6257 if (val && cselib_preserved_value_p (val))
6259 item = val->val_rtx;
6260 break;
6265 else if (MEM_P (x))
6267 rtx mem = x;
6268 cselib_val *val;
6270 if (!frame_pointer_needed)
6272 struct adjust_mem_data amd;
6273 amd.mem_mode = VOIDmode;
6274 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
6275 amd.side_effects = NULL;
6276 amd.store = true;
6277 mem = simplify_replace_fn_rtx (mem, NULL_RTX, adjust_mems,
6278 &amd);
6279 gcc_assert (amd.side_effects == NULL_RTX);
6281 val = cselib_lookup (mem, GET_MODE (mem), 0, VOIDmode);
6282 if (val && cselib_preserved_value_p (val))
6283 item = val->val_rtx;
6284 else if (GET_MODE_CLASS (GET_MODE (mem)) != MODE_INT
6285 && GET_MODE_CLASS (GET_MODE (mem)) != MODE_PARTIAL_INT)
6287 /* For non-integer stack argument see also if they weren't
6288 initialized by integers. */
6289 machine_mode imode = int_mode_for_mode (GET_MODE (mem));
6290 if (imode != GET_MODE (mem) && imode != BLKmode)
6292 val = cselib_lookup (adjust_address_nv (mem, imode, 0),
6293 imode, 0, VOIDmode);
6294 if (val && cselib_preserved_value_p (val))
6295 item = lowpart_subreg (GET_MODE (x), val->val_rtx,
6296 imode);
6300 if (item)
6302 rtx x2 = x;
6303 if (GET_MODE (item) != GET_MODE (link))
6304 item = lowpart_subreg (GET_MODE (link), item, GET_MODE (item));
6305 if (GET_MODE (x2) != GET_MODE (link))
6306 x2 = lowpart_subreg (GET_MODE (link), x2, GET_MODE (x2));
6307 item = gen_rtx_CONCAT (GET_MODE (link), x2, item);
6308 call_arguments
6309 = gen_rtx_EXPR_LIST (VOIDmode, item, call_arguments);
6311 if (t && t != void_list_node)
6313 tree argtype = TREE_VALUE (t);
6314 machine_mode mode = TYPE_MODE (argtype);
6315 rtx reg;
6316 if (pass_by_reference (&args_so_far_v, mode, argtype, true))
6318 argtype = build_pointer_type (argtype);
6319 mode = TYPE_MODE (argtype);
6321 reg = targetm.calls.function_arg (args_so_far, mode,
6322 argtype, true);
6323 if (TREE_CODE (argtype) == REFERENCE_TYPE
6324 && INTEGRAL_TYPE_P (TREE_TYPE (argtype))
6325 && reg
6326 && REG_P (reg)
6327 && GET_MODE (reg) == mode
6328 && (GET_MODE_CLASS (mode) == MODE_INT
6329 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
6330 && REG_P (x)
6331 && REGNO (x) == REGNO (reg)
6332 && GET_MODE (x) == mode
6333 && item)
6335 machine_mode indmode
6336 = TYPE_MODE (TREE_TYPE (argtype));
6337 rtx mem = gen_rtx_MEM (indmode, x);
6338 cselib_val *val = cselib_lookup (mem, indmode, 0, VOIDmode);
6339 if (val && cselib_preserved_value_p (val))
6341 item = gen_rtx_CONCAT (indmode, mem, val->val_rtx);
6342 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
6343 call_arguments);
6345 else
6347 struct elt_loc_list *l;
6348 tree initial;
6350 /* Try harder, when passing address of a constant
6351 pool integer it can be easily read back. */
6352 item = XEXP (item, 1);
6353 if (GET_CODE (item) == SUBREG)
6354 item = SUBREG_REG (item);
6355 gcc_assert (GET_CODE (item) == VALUE);
6356 val = CSELIB_VAL_PTR (item);
6357 for (l = val->locs; l; l = l->next)
6358 if (GET_CODE (l->loc) == SYMBOL_REF
6359 && TREE_CONSTANT_POOL_ADDRESS_P (l->loc)
6360 && SYMBOL_REF_DECL (l->loc)
6361 && DECL_INITIAL (SYMBOL_REF_DECL (l->loc)))
6363 initial = DECL_INITIAL (SYMBOL_REF_DECL (l->loc));
6364 if (tree_fits_shwi_p (initial))
6366 item = GEN_INT (tree_to_shwi (initial));
6367 item = gen_rtx_CONCAT (indmode, mem, item);
6368 call_arguments
6369 = gen_rtx_EXPR_LIST (VOIDmode, item,
6370 call_arguments);
6372 break;
6376 targetm.calls.function_arg_advance (args_so_far, mode,
6377 argtype, true);
6378 t = TREE_CHAIN (t);
6382 /* Add debug arguments. */
6383 if (fndecl
6384 && TREE_CODE (fndecl) == FUNCTION_DECL
6385 && DECL_HAS_DEBUG_ARGS_P (fndecl))
6387 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (fndecl);
6388 if (debug_args)
6390 unsigned int ix;
6391 tree param;
6392 for (ix = 0; vec_safe_iterate (*debug_args, ix, &param); ix += 2)
6394 rtx item;
6395 tree dtemp = (**debug_args)[ix + 1];
6396 machine_mode mode = DECL_MODE (dtemp);
6397 item = gen_rtx_DEBUG_PARAMETER_REF (mode, param);
6398 item = gen_rtx_CONCAT (mode, item, DECL_RTL_KNOWN_SET (dtemp));
6399 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
6400 call_arguments);
6405 /* Reverse call_arguments chain. */
6406 prev = NULL_RTX;
6407 for (cur = call_arguments; cur; cur = next)
6409 next = XEXP (cur, 1);
6410 XEXP (cur, 1) = prev;
6411 prev = cur;
6413 call_arguments = prev;
6415 x = get_call_rtx_from (insn);
6416 if (x)
6418 x = XEXP (XEXP (x, 0), 0);
6419 if (GET_CODE (x) == SYMBOL_REF)
6420 /* Don't record anything. */;
6421 else if (CONSTANT_P (x))
6423 x = gen_rtx_CONCAT (GET_MODE (x) == VOIDmode ? Pmode : GET_MODE (x),
6424 pc_rtx, x);
6425 call_arguments
6426 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6428 else
6430 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
6431 if (val && cselib_preserved_value_p (val))
6433 x = gen_rtx_CONCAT (GET_MODE (x), pc_rtx, val->val_rtx);
6434 call_arguments
6435 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6439 if (this_arg)
6441 machine_mode mode
6442 = TYPE_MODE (TREE_TYPE (OBJ_TYPE_REF_EXPR (obj_type_ref)));
6443 rtx clobbered = gen_rtx_MEM (mode, this_arg);
6444 HOST_WIDE_INT token
6445 = tree_to_shwi (OBJ_TYPE_REF_TOKEN (obj_type_ref));
6446 if (token)
6447 clobbered = plus_constant (mode, clobbered,
6448 token * GET_MODE_SIZE (mode));
6449 clobbered = gen_rtx_MEM (mode, clobbered);
6450 x = gen_rtx_CONCAT (mode, gen_rtx_CLOBBER (VOIDmode, pc_rtx), clobbered);
6451 call_arguments
6452 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6456 /* Callback for cselib_record_sets_hook, that records as micro
6457 operations uses and stores in an insn after cselib_record_sets has
6458 analyzed the sets in an insn, but before it modifies the stored
6459 values in the internal tables, unless cselib_record_sets doesn't
6460 call it directly (perhaps because we're not doing cselib in the
6461 first place, in which case sets and n_sets will be 0). */
6463 static void
6464 add_with_sets (rtx_insn *insn, struct cselib_set *sets, int n_sets)
6466 basic_block bb = BLOCK_FOR_INSN (insn);
6467 int n1, n2;
6468 struct count_use_info cui;
6469 micro_operation *mos;
6471 cselib_hook_called = true;
6473 cui.insn = insn;
6474 cui.bb = bb;
6475 cui.sets = sets;
6476 cui.n_sets = n_sets;
6478 n1 = VTI (bb)->mos.length ();
6479 cui.store_p = false;
6480 note_uses (&PATTERN (insn), add_uses_1, &cui);
6481 n2 = VTI (bb)->mos.length () - 1;
6482 mos = VTI (bb)->mos.address ();
6484 /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
6485 MO_VAL_LOC last. */
6486 while (n1 < n2)
6488 while (n1 < n2 && mos[n1].type == MO_USE)
6489 n1++;
6490 while (n1 < n2 && mos[n2].type != MO_USE)
6491 n2--;
6492 if (n1 < n2)
6494 micro_operation sw;
6496 sw = mos[n1];
6497 mos[n1] = mos[n2];
6498 mos[n2] = sw;
6502 n2 = VTI (bb)->mos.length () - 1;
6503 while (n1 < n2)
6505 while (n1 < n2 && mos[n1].type != MO_VAL_LOC)
6506 n1++;
6507 while (n1 < n2 && mos[n2].type == MO_VAL_LOC)
6508 n2--;
6509 if (n1 < n2)
6511 micro_operation sw;
6513 sw = mos[n1];
6514 mos[n1] = mos[n2];
6515 mos[n2] = sw;
6519 if (CALL_P (insn))
6521 micro_operation mo;
6523 mo.type = MO_CALL;
6524 mo.insn = insn;
6525 mo.u.loc = call_arguments;
6526 call_arguments = NULL_RTX;
6528 if (dump_file && (dump_flags & TDF_DETAILS))
6529 log_op_type (PATTERN (insn), bb, insn, mo.type, dump_file);
6530 VTI (bb)->mos.safe_push (mo);
6533 n1 = VTI (bb)->mos.length ();
6534 /* This will record NEXT_INSN (insn), such that we can
6535 insert notes before it without worrying about any
6536 notes that MO_USEs might emit after the insn. */
6537 cui.store_p = true;
6538 note_stores (PATTERN (insn), add_stores, &cui);
6539 n2 = VTI (bb)->mos.length () - 1;
6540 mos = VTI (bb)->mos.address ();
6542 /* Order the MO_VAL_USEs first (note_stores does nothing
6543 on DEBUG_INSNs, so there are no MO_VAL_LOCs from this
6544 insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET. */
6545 while (n1 < n2)
6547 while (n1 < n2 && mos[n1].type == MO_VAL_USE)
6548 n1++;
6549 while (n1 < n2 && mos[n2].type != MO_VAL_USE)
6550 n2--;
6551 if (n1 < n2)
6553 micro_operation sw;
6555 sw = mos[n1];
6556 mos[n1] = mos[n2];
6557 mos[n2] = sw;
6561 n2 = VTI (bb)->mos.length () - 1;
6562 while (n1 < n2)
6564 while (n1 < n2 && mos[n1].type == MO_CLOBBER)
6565 n1++;
6566 while (n1 < n2 && mos[n2].type != MO_CLOBBER)
6567 n2--;
6568 if (n1 < n2)
6570 micro_operation sw;
6572 sw = mos[n1];
6573 mos[n1] = mos[n2];
6574 mos[n2] = sw;
6579 static enum var_init_status
6580 find_src_status (dataflow_set *in, rtx src)
6582 tree decl = NULL_TREE;
6583 enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
6585 if (! flag_var_tracking_uninit)
6586 status = VAR_INIT_STATUS_INITIALIZED;
6588 if (src && REG_P (src))
6589 decl = var_debug_decl (REG_EXPR (src));
6590 else if (src && MEM_P (src))
6591 decl = var_debug_decl (MEM_EXPR (src));
6593 if (src && decl)
6594 status = get_init_value (in, src, dv_from_decl (decl));
6596 return status;
6599 /* SRC is the source of an assignment. Use SET to try to find what
6600 was ultimately assigned to SRC. Return that value if known,
6601 otherwise return SRC itself. */
6603 static rtx
6604 find_src_set_src (dataflow_set *set, rtx src)
6606 tree decl = NULL_TREE; /* The variable being copied around. */
6607 rtx set_src = NULL_RTX; /* The value for "decl" stored in "src". */
6608 variable var;
6609 location_chain nextp;
6610 int i;
6611 bool found;
6613 if (src && REG_P (src))
6614 decl = var_debug_decl (REG_EXPR (src));
6615 else if (src && MEM_P (src))
6616 decl = var_debug_decl (MEM_EXPR (src));
6618 if (src && decl)
6620 decl_or_value dv = dv_from_decl (decl);
6622 var = shared_hash_find (set->vars, dv);
6623 if (var)
6625 found = false;
6626 for (i = 0; i < var->n_var_parts && !found; i++)
6627 for (nextp = var->var_part[i].loc_chain; nextp && !found;
6628 nextp = nextp->next)
6629 if (rtx_equal_p (nextp->loc, src))
6631 set_src = nextp->set_src;
6632 found = true;
6638 return set_src;
6641 /* Compute the changes of variable locations in the basic block BB. */
6643 static bool
6644 compute_bb_dataflow (basic_block bb)
6646 unsigned int i;
6647 micro_operation *mo;
6648 bool changed;
6649 dataflow_set old_out;
6650 dataflow_set *in = &VTI (bb)->in;
6651 dataflow_set *out = &VTI (bb)->out;
6653 dataflow_set_init (&old_out);
6654 dataflow_set_copy (&old_out, out);
6655 dataflow_set_copy (out, in);
6657 if (MAY_HAVE_DEBUG_INSNS)
6658 local_get_addr_cache = new hash_map<rtx, rtx>;
6660 FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo)
6662 rtx_insn *insn = mo->insn;
6664 switch (mo->type)
6666 case MO_CALL:
6667 dataflow_set_clear_at_call (out);
6668 break;
6670 case MO_USE:
6672 rtx loc = mo->u.loc;
6674 if (REG_P (loc))
6675 var_reg_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6676 else if (MEM_P (loc))
6677 var_mem_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6679 break;
6681 case MO_VAL_LOC:
6683 rtx loc = mo->u.loc;
6684 rtx val, vloc;
6685 tree var;
6687 if (GET_CODE (loc) == CONCAT)
6689 val = XEXP (loc, 0);
6690 vloc = XEXP (loc, 1);
6692 else
6694 val = NULL_RTX;
6695 vloc = loc;
6698 var = PAT_VAR_LOCATION_DECL (vloc);
6700 clobber_variable_part (out, NULL_RTX,
6701 dv_from_decl (var), 0, NULL_RTX);
6702 if (val)
6704 if (VAL_NEEDS_RESOLUTION (loc))
6705 val_resolve (out, val, PAT_VAR_LOCATION_LOC (vloc), insn);
6706 set_variable_part (out, val, dv_from_decl (var), 0,
6707 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6708 INSERT);
6710 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
6711 set_variable_part (out, PAT_VAR_LOCATION_LOC (vloc),
6712 dv_from_decl (var), 0,
6713 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6714 INSERT);
6716 break;
6718 case MO_VAL_USE:
6720 rtx loc = mo->u.loc;
6721 rtx val, vloc, uloc;
6723 vloc = uloc = XEXP (loc, 1);
6724 val = XEXP (loc, 0);
6726 if (GET_CODE (val) == CONCAT)
6728 uloc = XEXP (val, 1);
6729 val = XEXP (val, 0);
6732 if (VAL_NEEDS_RESOLUTION (loc))
6733 val_resolve (out, val, vloc, insn);
6734 else
6735 val_store (out, val, uloc, insn, false);
6737 if (VAL_HOLDS_TRACK_EXPR (loc))
6739 if (GET_CODE (uloc) == REG)
6740 var_reg_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6741 NULL);
6742 else if (GET_CODE (uloc) == MEM)
6743 var_mem_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6744 NULL);
6747 break;
6749 case MO_VAL_SET:
6751 rtx loc = mo->u.loc;
6752 rtx val, vloc, uloc;
6753 rtx dstv, srcv;
6755 vloc = loc;
6756 uloc = XEXP (vloc, 1);
6757 val = XEXP (vloc, 0);
6758 vloc = uloc;
6760 if (GET_CODE (uloc) == SET)
6762 dstv = SET_DEST (uloc);
6763 srcv = SET_SRC (uloc);
6765 else
6767 dstv = uloc;
6768 srcv = NULL;
6771 if (GET_CODE (val) == CONCAT)
6773 dstv = vloc = XEXP (val, 1);
6774 val = XEXP (val, 0);
6777 if (GET_CODE (vloc) == SET)
6779 srcv = SET_SRC (vloc);
6781 gcc_assert (val != srcv);
6782 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
6784 dstv = vloc = SET_DEST (vloc);
6786 if (VAL_NEEDS_RESOLUTION (loc))
6787 val_resolve (out, val, srcv, insn);
6789 else if (VAL_NEEDS_RESOLUTION (loc))
6791 gcc_assert (GET_CODE (uloc) == SET
6792 && GET_CODE (SET_SRC (uloc)) == REG);
6793 val_resolve (out, val, SET_SRC (uloc), insn);
6796 if (VAL_HOLDS_TRACK_EXPR (loc))
6798 if (VAL_EXPR_IS_CLOBBERED (loc))
6800 if (REG_P (uloc))
6801 var_reg_delete (out, uloc, true);
6802 else if (MEM_P (uloc))
6804 gcc_assert (MEM_P (dstv));
6805 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
6806 var_mem_delete (out, dstv, true);
6809 else
6811 bool copied_p = VAL_EXPR_IS_COPIED (loc);
6812 rtx src = NULL, dst = uloc;
6813 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
6815 if (GET_CODE (uloc) == SET)
6817 src = SET_SRC (uloc);
6818 dst = SET_DEST (uloc);
6821 if (copied_p)
6823 if (flag_var_tracking_uninit)
6825 status = find_src_status (in, src);
6827 if (status == VAR_INIT_STATUS_UNKNOWN)
6828 status = find_src_status (out, src);
6831 src = find_src_set_src (in, src);
6834 if (REG_P (dst))
6835 var_reg_delete_and_set (out, dst, !copied_p,
6836 status, srcv);
6837 else if (MEM_P (dst))
6839 gcc_assert (MEM_P (dstv));
6840 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
6841 var_mem_delete_and_set (out, dstv, !copied_p,
6842 status, srcv);
6846 else if (REG_P (uloc))
6847 var_regno_delete (out, REGNO (uloc));
6848 else if (MEM_P (uloc))
6850 gcc_checking_assert (GET_CODE (vloc) == MEM);
6851 gcc_checking_assert (dstv == vloc);
6852 if (dstv != vloc)
6853 clobber_overlapping_mems (out, vloc);
6856 val_store (out, val, dstv, insn, true);
6858 break;
6860 case MO_SET:
6862 rtx loc = mo->u.loc;
6863 rtx set_src = NULL;
6865 if (GET_CODE (loc) == SET)
6867 set_src = SET_SRC (loc);
6868 loc = SET_DEST (loc);
6871 if (REG_P (loc))
6872 var_reg_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6873 set_src);
6874 else if (MEM_P (loc))
6875 var_mem_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6876 set_src);
6878 break;
6880 case MO_COPY:
6882 rtx loc = mo->u.loc;
6883 enum var_init_status src_status;
6884 rtx set_src = NULL;
6886 if (GET_CODE (loc) == SET)
6888 set_src = SET_SRC (loc);
6889 loc = SET_DEST (loc);
6892 if (! flag_var_tracking_uninit)
6893 src_status = VAR_INIT_STATUS_INITIALIZED;
6894 else
6896 src_status = find_src_status (in, set_src);
6898 if (src_status == VAR_INIT_STATUS_UNKNOWN)
6899 src_status = find_src_status (out, set_src);
6902 set_src = find_src_set_src (in, set_src);
6904 if (REG_P (loc))
6905 var_reg_delete_and_set (out, loc, false, src_status, set_src);
6906 else if (MEM_P (loc))
6907 var_mem_delete_and_set (out, loc, false, src_status, set_src);
6909 break;
6911 case MO_USE_NO_VAR:
6913 rtx loc = mo->u.loc;
6915 if (REG_P (loc))
6916 var_reg_delete (out, loc, false);
6917 else if (MEM_P (loc))
6918 var_mem_delete (out, loc, false);
6920 break;
6922 case MO_CLOBBER:
6924 rtx loc = mo->u.loc;
6926 if (REG_P (loc))
6927 var_reg_delete (out, loc, true);
6928 else if (MEM_P (loc))
6929 var_mem_delete (out, loc, true);
6931 break;
6933 case MO_ADJUST:
6934 out->stack_adjust += mo->u.adjust;
6935 break;
6939 if (MAY_HAVE_DEBUG_INSNS)
6941 delete local_get_addr_cache;
6942 local_get_addr_cache = NULL;
6944 dataflow_set_equiv_regs (out);
6945 shared_hash_htab (out->vars)
6946 ->traverse <dataflow_set *, canonicalize_values_mark> (out);
6947 shared_hash_htab (out->vars)
6948 ->traverse <dataflow_set *, canonicalize_values_star> (out);
6949 #if ENABLE_CHECKING
6950 shared_hash_htab (out->vars)
6951 ->traverse <dataflow_set *, canonicalize_loc_order_check> (out);
6952 #endif
6954 changed = dataflow_set_different (&old_out, out);
6955 dataflow_set_destroy (&old_out);
6956 return changed;
6959 /* Find the locations of variables in the whole function. */
6961 static bool
6962 vt_find_locations (void)
6964 fibheap_t worklist, pending, fibheap_swap;
6965 sbitmap visited, in_worklist, in_pending, sbitmap_swap;
6966 basic_block bb;
6967 edge e;
6968 int *bb_order;
6969 int *rc_order;
6970 int i;
6971 int htabsz = 0;
6972 int htabmax = PARAM_VALUE (PARAM_MAX_VARTRACK_SIZE);
6973 bool success = true;
6975 timevar_push (TV_VAR_TRACKING_DATAFLOW);
6976 /* Compute reverse completion order of depth first search of the CFG
6977 so that the data-flow runs faster. */
6978 rc_order = XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
6979 bb_order = XNEWVEC (int, last_basic_block_for_fn (cfun));
6980 pre_and_rev_post_order_compute (NULL, rc_order, false);
6981 for (i = 0; i < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; i++)
6982 bb_order[rc_order[i]] = i;
6983 free (rc_order);
6985 worklist = fibheap_new ();
6986 pending = fibheap_new ();
6987 visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
6988 in_worklist = sbitmap_alloc (last_basic_block_for_fn (cfun));
6989 in_pending = sbitmap_alloc (last_basic_block_for_fn (cfun));
6990 bitmap_clear (in_worklist);
6992 FOR_EACH_BB_FN (bb, cfun)
6993 fibheap_insert (pending, bb_order[bb->index], bb);
6994 bitmap_ones (in_pending);
6996 while (success && !fibheap_empty (pending))
6998 fibheap_swap = pending;
6999 pending = worklist;
7000 worklist = fibheap_swap;
7001 sbitmap_swap = in_pending;
7002 in_pending = in_worklist;
7003 in_worklist = sbitmap_swap;
7005 bitmap_clear (visited);
7007 while (!fibheap_empty (worklist))
7009 bb = (basic_block) fibheap_extract_min (worklist);
7010 bitmap_clear_bit (in_worklist, bb->index);
7011 gcc_assert (!bitmap_bit_p (visited, bb->index));
7012 if (!bitmap_bit_p (visited, bb->index))
7014 bool changed;
7015 edge_iterator ei;
7016 int oldinsz, oldoutsz;
7018 bitmap_set_bit (visited, bb->index);
7020 if (VTI (bb)->in.vars)
7022 htabsz
7023 -= shared_hash_htab (VTI (bb)->in.vars)->size ()
7024 + shared_hash_htab (VTI (bb)->out.vars)->size ();
7025 oldinsz = shared_hash_htab (VTI (bb)->in.vars)->elements ();
7026 oldoutsz
7027 = shared_hash_htab (VTI (bb)->out.vars)->elements ();
7029 else
7030 oldinsz = oldoutsz = 0;
7032 if (MAY_HAVE_DEBUG_INSNS)
7034 dataflow_set *in = &VTI (bb)->in, *first_out = NULL;
7035 bool first = true, adjust = false;
7037 /* Calculate the IN set as the intersection of
7038 predecessor OUT sets. */
7040 dataflow_set_clear (in);
7041 dst_can_be_shared = true;
7043 FOR_EACH_EDGE (e, ei, bb->preds)
7044 if (!VTI (e->src)->flooded)
7045 gcc_assert (bb_order[bb->index]
7046 <= bb_order[e->src->index]);
7047 else if (first)
7049 dataflow_set_copy (in, &VTI (e->src)->out);
7050 first_out = &VTI (e->src)->out;
7051 first = false;
7053 else
7055 dataflow_set_merge (in, &VTI (e->src)->out);
7056 adjust = true;
7059 if (adjust)
7061 dataflow_post_merge_adjust (in, &VTI (bb)->permp);
7062 #if ENABLE_CHECKING
7063 /* Merge and merge_adjust should keep entries in
7064 canonical order. */
7065 shared_hash_htab (in->vars)
7066 ->traverse <dataflow_set *,
7067 canonicalize_loc_order_check> (in);
7068 #endif
7069 if (dst_can_be_shared)
7071 shared_hash_destroy (in->vars);
7072 in->vars = shared_hash_copy (first_out->vars);
7076 VTI (bb)->flooded = true;
7078 else
7080 /* Calculate the IN set as union of predecessor OUT sets. */
7081 dataflow_set_clear (&VTI (bb)->in);
7082 FOR_EACH_EDGE (e, ei, bb->preds)
7083 dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out);
7086 changed = compute_bb_dataflow (bb);
7087 htabsz += shared_hash_htab (VTI (bb)->in.vars)->size ()
7088 + shared_hash_htab (VTI (bb)->out.vars)->size ();
7090 if (htabmax && htabsz > htabmax)
7092 if (MAY_HAVE_DEBUG_INSNS)
7093 inform (DECL_SOURCE_LOCATION (cfun->decl),
7094 "variable tracking size limit exceeded with "
7095 "-fvar-tracking-assignments, retrying without");
7096 else
7097 inform (DECL_SOURCE_LOCATION (cfun->decl),
7098 "variable tracking size limit exceeded");
7099 success = false;
7100 break;
7103 if (changed)
7105 FOR_EACH_EDGE (e, ei, bb->succs)
7107 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7108 continue;
7110 if (bitmap_bit_p (visited, e->dest->index))
7112 if (!bitmap_bit_p (in_pending, e->dest->index))
7114 /* Send E->DEST to next round. */
7115 bitmap_set_bit (in_pending, e->dest->index);
7116 fibheap_insert (pending,
7117 bb_order[e->dest->index],
7118 e->dest);
7121 else if (!bitmap_bit_p (in_worklist, e->dest->index))
7123 /* Add E->DEST to current round. */
7124 bitmap_set_bit (in_worklist, e->dest->index);
7125 fibheap_insert (worklist, bb_order[e->dest->index],
7126 e->dest);
7131 if (dump_file)
7132 fprintf (dump_file,
7133 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
7134 bb->index,
7135 (int)shared_hash_htab (VTI (bb)->in.vars)->size (),
7136 oldinsz,
7137 (int)shared_hash_htab (VTI (bb)->out.vars)->size (),
7138 oldoutsz,
7139 (int)worklist->nodes, (int)pending->nodes, htabsz);
7141 if (dump_file && (dump_flags & TDF_DETAILS))
7143 fprintf (dump_file, "BB %i IN:\n", bb->index);
7144 dump_dataflow_set (&VTI (bb)->in);
7145 fprintf (dump_file, "BB %i OUT:\n", bb->index);
7146 dump_dataflow_set (&VTI (bb)->out);
7152 if (success && MAY_HAVE_DEBUG_INSNS)
7153 FOR_EACH_BB_FN (bb, cfun)
7154 gcc_assert (VTI (bb)->flooded);
7156 free (bb_order);
7157 fibheap_delete (worklist);
7158 fibheap_delete (pending);
7159 sbitmap_free (visited);
7160 sbitmap_free (in_worklist);
7161 sbitmap_free (in_pending);
7163 timevar_pop (TV_VAR_TRACKING_DATAFLOW);
7164 return success;
7167 /* Print the content of the LIST to dump file. */
7169 static void
7170 dump_attrs_list (attrs list)
7172 for (; list; list = list->next)
7174 if (dv_is_decl_p (list->dv))
7175 print_mem_expr (dump_file, dv_as_decl (list->dv));
7176 else
7177 print_rtl_single (dump_file, dv_as_value (list->dv));
7178 fprintf (dump_file, "+" HOST_WIDE_INT_PRINT_DEC, list->offset);
7180 fprintf (dump_file, "\n");
7183 /* Print the information about variable *SLOT to dump file. */
7186 dump_var_tracking_slot (variable_def **slot, void *data ATTRIBUTE_UNUSED)
7188 variable var = *slot;
7190 dump_var (var);
7192 /* Continue traversing the hash table. */
7193 return 1;
7196 /* Print the information about variable VAR to dump file. */
7198 static void
7199 dump_var (variable var)
7201 int i;
7202 location_chain node;
7204 if (dv_is_decl_p (var->dv))
7206 const_tree decl = dv_as_decl (var->dv);
7208 if (DECL_NAME (decl))
7210 fprintf (dump_file, " name: %s",
7211 IDENTIFIER_POINTER (DECL_NAME (decl)));
7212 if (dump_flags & TDF_UID)
7213 fprintf (dump_file, "D.%u", DECL_UID (decl));
7215 else if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
7216 fprintf (dump_file, " name: D#%u", DEBUG_TEMP_UID (decl));
7217 else
7218 fprintf (dump_file, " name: D.%u", DECL_UID (decl));
7219 fprintf (dump_file, "\n");
7221 else
7223 fputc (' ', dump_file);
7224 print_rtl_single (dump_file, dv_as_value (var->dv));
7227 for (i = 0; i < var->n_var_parts; i++)
7229 fprintf (dump_file, " offset %ld\n",
7230 (long)(var->onepart ? 0 : VAR_PART_OFFSET (var, i)));
7231 for (node = var->var_part[i].loc_chain; node; node = node->next)
7233 fprintf (dump_file, " ");
7234 if (node->init == VAR_INIT_STATUS_UNINITIALIZED)
7235 fprintf (dump_file, "[uninit]");
7236 print_rtl_single (dump_file, node->loc);
7241 /* Print the information about variables from hash table VARS to dump file. */
7243 static void
7244 dump_vars (variable_table_type *vars)
7246 if (vars->elements () > 0)
7248 fprintf (dump_file, "Variables:\n");
7249 vars->traverse <void *, dump_var_tracking_slot> (NULL);
7253 /* Print the dataflow set SET to dump file. */
7255 static void
7256 dump_dataflow_set (dataflow_set *set)
7258 int i;
7260 fprintf (dump_file, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC "\n",
7261 set->stack_adjust);
7262 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7264 if (set->regs[i])
7266 fprintf (dump_file, "Reg %d:", i);
7267 dump_attrs_list (set->regs[i]);
7270 dump_vars (shared_hash_htab (set->vars));
7271 fprintf (dump_file, "\n");
7274 /* Print the IN and OUT sets for each basic block to dump file. */
7276 static void
7277 dump_dataflow_sets (void)
7279 basic_block bb;
7281 FOR_EACH_BB_FN (bb, cfun)
7283 fprintf (dump_file, "\nBasic block %d:\n", bb->index);
7284 fprintf (dump_file, "IN:\n");
7285 dump_dataflow_set (&VTI (bb)->in);
7286 fprintf (dump_file, "OUT:\n");
7287 dump_dataflow_set (&VTI (bb)->out);
7291 /* Return the variable for DV in dropped_values, inserting one if
7292 requested with INSERT. */
7294 static inline variable
7295 variable_from_dropped (decl_or_value dv, enum insert_option insert)
7297 variable_def **slot;
7298 variable empty_var;
7299 onepart_enum_t onepart;
7301 slot = dropped_values->find_slot_with_hash (dv, dv_htab_hash (dv), insert);
7303 if (!slot)
7304 return NULL;
7306 if (*slot)
7307 return *slot;
7309 gcc_checking_assert (insert == INSERT);
7311 onepart = dv_onepart_p (dv);
7313 gcc_checking_assert (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR);
7315 empty_var = (variable) pool_alloc (onepart_pool (onepart));
7316 empty_var->dv = dv;
7317 empty_var->refcount = 1;
7318 empty_var->n_var_parts = 0;
7319 empty_var->onepart = onepart;
7320 empty_var->in_changed_variables = false;
7321 empty_var->var_part[0].loc_chain = NULL;
7322 empty_var->var_part[0].cur_loc = NULL;
7323 VAR_LOC_1PAUX (empty_var) = NULL;
7324 set_dv_changed (dv, true);
7326 *slot = empty_var;
7328 return empty_var;
7331 /* Recover the one-part aux from dropped_values. */
7333 static struct onepart_aux *
7334 recover_dropped_1paux (variable var)
7336 variable dvar;
7338 gcc_checking_assert (var->onepart);
7340 if (VAR_LOC_1PAUX (var))
7341 return VAR_LOC_1PAUX (var);
7343 if (var->onepart == ONEPART_VDECL)
7344 return NULL;
7346 dvar = variable_from_dropped (var->dv, NO_INSERT);
7348 if (!dvar)
7349 return NULL;
7351 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (dvar);
7352 VAR_LOC_1PAUX (dvar) = NULL;
7354 return VAR_LOC_1PAUX (var);
7357 /* Add variable VAR to the hash table of changed variables and
7358 if it has no locations delete it from SET's hash table. */
7360 static void
7361 variable_was_changed (variable var, dataflow_set *set)
7363 hashval_t hash = dv_htab_hash (var->dv);
7365 if (emit_notes)
7367 variable_def **slot;
7369 /* Remember this decl or VALUE has been added to changed_variables. */
7370 set_dv_changed (var->dv, true);
7372 slot = changed_variables->find_slot_with_hash (var->dv, hash, INSERT);
7374 if (*slot)
7376 variable old_var = *slot;
7377 gcc_assert (old_var->in_changed_variables);
7378 old_var->in_changed_variables = false;
7379 if (var != old_var && var->onepart)
7381 /* Restore the auxiliary info from an empty variable
7382 previously created for changed_variables, so it is
7383 not lost. */
7384 gcc_checking_assert (!VAR_LOC_1PAUX (var));
7385 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (old_var);
7386 VAR_LOC_1PAUX (old_var) = NULL;
7388 variable_htab_free (*slot);
7391 if (set && var->n_var_parts == 0)
7393 onepart_enum_t onepart = var->onepart;
7394 variable empty_var = NULL;
7395 variable_def **dslot = NULL;
7397 if (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR)
7399 dslot = dropped_values->find_slot_with_hash (var->dv,
7400 dv_htab_hash (var->dv),
7401 INSERT);
7402 empty_var = *dslot;
7404 if (empty_var)
7406 gcc_checking_assert (!empty_var->in_changed_variables);
7407 if (!VAR_LOC_1PAUX (var))
7409 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (empty_var);
7410 VAR_LOC_1PAUX (empty_var) = NULL;
7412 else
7413 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
7417 if (!empty_var)
7419 empty_var = (variable) pool_alloc (onepart_pool (onepart));
7420 empty_var->dv = var->dv;
7421 empty_var->refcount = 1;
7422 empty_var->n_var_parts = 0;
7423 empty_var->onepart = onepart;
7424 if (dslot)
7426 empty_var->refcount++;
7427 *dslot = empty_var;
7430 else
7431 empty_var->refcount++;
7432 empty_var->in_changed_variables = true;
7433 *slot = empty_var;
7434 if (onepart)
7436 empty_var->var_part[0].loc_chain = NULL;
7437 empty_var->var_part[0].cur_loc = NULL;
7438 VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (var);
7439 VAR_LOC_1PAUX (var) = NULL;
7441 goto drop_var;
7443 else
7445 if (var->onepart && !VAR_LOC_1PAUX (var))
7446 recover_dropped_1paux (var);
7447 var->refcount++;
7448 var->in_changed_variables = true;
7449 *slot = var;
7452 else
7454 gcc_assert (set);
7455 if (var->n_var_parts == 0)
7457 variable_def **slot;
7459 drop_var:
7460 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
7461 if (slot)
7463 if (shared_hash_shared (set->vars))
7464 slot = shared_hash_find_slot_unshare (&set->vars, var->dv,
7465 NO_INSERT);
7466 shared_hash_htab (set->vars)->clear_slot (slot);
7472 /* Look for the index in VAR->var_part corresponding to OFFSET.
7473 Return -1 if not found. If INSERTION_POINT is non-NULL, the
7474 referenced int will be set to the index that the part has or should
7475 have, if it should be inserted. */
7477 static inline int
7478 find_variable_location_part (variable var, HOST_WIDE_INT offset,
7479 int *insertion_point)
7481 int pos, low, high;
7483 if (var->onepart)
7485 if (offset != 0)
7486 return -1;
7488 if (insertion_point)
7489 *insertion_point = 0;
7491 return var->n_var_parts - 1;
7494 /* Find the location part. */
7495 low = 0;
7496 high = var->n_var_parts;
7497 while (low != high)
7499 pos = (low + high) / 2;
7500 if (VAR_PART_OFFSET (var, pos) < offset)
7501 low = pos + 1;
7502 else
7503 high = pos;
7505 pos = low;
7507 if (insertion_point)
7508 *insertion_point = pos;
7510 if (pos < var->n_var_parts && VAR_PART_OFFSET (var, pos) == offset)
7511 return pos;
7513 return -1;
7516 static variable_def **
7517 set_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
7518 decl_or_value dv, HOST_WIDE_INT offset,
7519 enum var_init_status initialized, rtx set_src)
7521 int pos;
7522 location_chain node, next;
7523 location_chain *nextp;
7524 variable var;
7525 onepart_enum_t onepart;
7527 var = *slot;
7529 if (var)
7530 onepart = var->onepart;
7531 else
7532 onepart = dv_onepart_p (dv);
7534 gcc_checking_assert (offset == 0 || !onepart);
7535 gcc_checking_assert (loc != dv_as_opaque (dv));
7537 if (! flag_var_tracking_uninit)
7538 initialized = VAR_INIT_STATUS_INITIALIZED;
7540 if (!var)
7542 /* Create new variable information. */
7543 var = (variable) pool_alloc (onepart_pool (onepart));
7544 var->dv = dv;
7545 var->refcount = 1;
7546 var->n_var_parts = 1;
7547 var->onepart = onepart;
7548 var->in_changed_variables = false;
7549 if (var->onepart)
7550 VAR_LOC_1PAUX (var) = NULL;
7551 else
7552 VAR_PART_OFFSET (var, 0) = offset;
7553 var->var_part[0].loc_chain = NULL;
7554 var->var_part[0].cur_loc = NULL;
7555 *slot = var;
7556 pos = 0;
7557 nextp = &var->var_part[0].loc_chain;
7559 else if (onepart)
7561 int r = -1, c = 0;
7563 gcc_assert (dv_as_opaque (var->dv) == dv_as_opaque (dv));
7565 pos = 0;
7567 if (GET_CODE (loc) == VALUE)
7569 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7570 nextp = &node->next)
7571 if (GET_CODE (node->loc) == VALUE)
7573 if (node->loc == loc)
7575 r = 0;
7576 break;
7578 if (canon_value_cmp (node->loc, loc))
7579 c++;
7580 else
7582 r = 1;
7583 break;
7586 else if (REG_P (node->loc) || MEM_P (node->loc))
7587 c++;
7588 else
7590 r = 1;
7591 break;
7594 else if (REG_P (loc))
7596 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7597 nextp = &node->next)
7598 if (REG_P (node->loc))
7600 if (REGNO (node->loc) < REGNO (loc))
7601 c++;
7602 else
7604 if (REGNO (node->loc) == REGNO (loc))
7605 r = 0;
7606 else
7607 r = 1;
7608 break;
7611 else
7613 r = 1;
7614 break;
7617 else if (MEM_P (loc))
7619 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7620 nextp = &node->next)
7621 if (REG_P (node->loc))
7622 c++;
7623 else if (MEM_P (node->loc))
7625 if ((r = loc_cmp (XEXP (node->loc, 0), XEXP (loc, 0))) >= 0)
7626 break;
7627 else
7628 c++;
7630 else
7632 r = 1;
7633 break;
7636 else
7637 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7638 nextp = &node->next)
7639 if ((r = loc_cmp (node->loc, loc)) >= 0)
7640 break;
7641 else
7642 c++;
7644 if (r == 0)
7645 return slot;
7647 if (shared_var_p (var, set->vars))
7649 slot = unshare_variable (set, slot, var, initialized);
7650 var = *slot;
7651 for (nextp = &var->var_part[0].loc_chain; c;
7652 nextp = &(*nextp)->next)
7653 c--;
7654 gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc);
7657 else
7659 int inspos = 0;
7661 gcc_assert (dv_as_decl (var->dv) == dv_as_decl (dv));
7663 pos = find_variable_location_part (var, offset, &inspos);
7665 if (pos >= 0)
7667 node = var->var_part[pos].loc_chain;
7669 if (node
7670 && ((REG_P (node->loc) && REG_P (loc)
7671 && REGNO (node->loc) == REGNO (loc))
7672 || rtx_equal_p (node->loc, loc)))
7674 /* LOC is in the beginning of the chain so we have nothing
7675 to do. */
7676 if (node->init < initialized)
7677 node->init = initialized;
7678 if (set_src != NULL)
7679 node->set_src = set_src;
7681 return slot;
7683 else
7685 /* We have to make a copy of a shared variable. */
7686 if (shared_var_p (var, set->vars))
7688 slot = unshare_variable (set, slot, var, initialized);
7689 var = *slot;
7693 else
7695 /* We have not found the location part, new one will be created. */
7697 /* We have to make a copy of the shared variable. */
7698 if (shared_var_p (var, set->vars))
7700 slot = unshare_variable (set, slot, var, initialized);
7701 var = *slot;
7704 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
7705 thus there are at most MAX_VAR_PARTS different offsets. */
7706 gcc_assert (var->n_var_parts < MAX_VAR_PARTS
7707 && (!var->n_var_parts || !onepart));
7709 /* We have to move the elements of array starting at index
7710 inspos to the next position. */
7711 for (pos = var->n_var_parts; pos > inspos; pos--)
7712 var->var_part[pos] = var->var_part[pos - 1];
7714 var->n_var_parts++;
7715 gcc_checking_assert (!onepart);
7716 VAR_PART_OFFSET (var, pos) = offset;
7717 var->var_part[pos].loc_chain = NULL;
7718 var->var_part[pos].cur_loc = NULL;
7721 /* Delete the location from the list. */
7722 nextp = &var->var_part[pos].loc_chain;
7723 for (node = var->var_part[pos].loc_chain; node; node = next)
7725 next = node->next;
7726 if ((REG_P (node->loc) && REG_P (loc)
7727 && REGNO (node->loc) == REGNO (loc))
7728 || rtx_equal_p (node->loc, loc))
7730 /* Save these values, to assign to the new node, before
7731 deleting this one. */
7732 if (node->init > initialized)
7733 initialized = node->init;
7734 if (node->set_src != NULL && set_src == NULL)
7735 set_src = node->set_src;
7736 if (var->var_part[pos].cur_loc == node->loc)
7737 var->var_part[pos].cur_loc = NULL;
7738 pool_free (loc_chain_pool, node);
7739 *nextp = next;
7740 break;
7742 else
7743 nextp = &node->next;
7746 nextp = &var->var_part[pos].loc_chain;
7749 /* Add the location to the beginning. */
7750 node = (location_chain) pool_alloc (loc_chain_pool);
7751 node->loc = loc;
7752 node->init = initialized;
7753 node->set_src = set_src;
7754 node->next = *nextp;
7755 *nextp = node;
7757 /* If no location was emitted do so. */
7758 if (var->var_part[pos].cur_loc == NULL)
7759 variable_was_changed (var, set);
7761 return slot;
7764 /* Set the part of variable's location in the dataflow set SET. The
7765 variable part is specified by variable's declaration in DV and
7766 offset OFFSET and the part's location by LOC. IOPT should be
7767 NO_INSERT if the variable is known to be in SET already and the
7768 variable hash table must not be resized, and INSERT otherwise. */
7770 static void
7771 set_variable_part (dataflow_set *set, rtx loc,
7772 decl_or_value dv, HOST_WIDE_INT offset,
7773 enum var_init_status initialized, rtx set_src,
7774 enum insert_option iopt)
7776 variable_def **slot;
7778 if (iopt == NO_INSERT)
7779 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7780 else
7782 slot = shared_hash_find_slot (set->vars, dv);
7783 if (!slot)
7784 slot = shared_hash_find_slot_unshare (&set->vars, dv, iopt);
7786 set_slot_part (set, loc, slot, dv, offset, initialized, set_src);
7789 /* Remove all recorded register locations for the given variable part
7790 from dataflow set SET, except for those that are identical to loc.
7791 The variable part is specified by variable's declaration or value
7792 DV and offset OFFSET. */
7794 static variable_def **
7795 clobber_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
7796 HOST_WIDE_INT offset, rtx set_src)
7798 variable var = *slot;
7799 int pos = find_variable_location_part (var, offset, NULL);
7801 if (pos >= 0)
7803 location_chain node, next;
7805 /* Remove the register locations from the dataflow set. */
7806 next = var->var_part[pos].loc_chain;
7807 for (node = next; node; node = next)
7809 next = node->next;
7810 if (node->loc != loc
7811 && (!flag_var_tracking_uninit
7812 || !set_src
7813 || MEM_P (set_src)
7814 || !rtx_equal_p (set_src, node->set_src)))
7816 if (REG_P (node->loc))
7818 attrs anode, anext;
7819 attrs *anextp;
7821 /* Remove the variable part from the register's
7822 list, but preserve any other variable parts
7823 that might be regarded as live in that same
7824 register. */
7825 anextp = &set->regs[REGNO (node->loc)];
7826 for (anode = *anextp; anode; anode = anext)
7828 anext = anode->next;
7829 if (dv_as_opaque (anode->dv) == dv_as_opaque (var->dv)
7830 && anode->offset == offset)
7832 pool_free (attrs_pool, anode);
7833 *anextp = anext;
7835 else
7836 anextp = &anode->next;
7840 slot = delete_slot_part (set, node->loc, slot, offset);
7845 return slot;
7848 /* Remove all recorded register locations for the given variable part
7849 from dataflow set SET, except for those that are identical to loc.
7850 The variable part is specified by variable's declaration or value
7851 DV and offset OFFSET. */
7853 static void
7854 clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
7855 HOST_WIDE_INT offset, rtx set_src)
7857 variable_def **slot;
7859 if (!dv_as_opaque (dv)
7860 || (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv))))
7861 return;
7863 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7864 if (!slot)
7865 return;
7867 clobber_slot_part (set, loc, slot, offset, set_src);
7870 /* Delete the part of variable's location from dataflow set SET. The
7871 variable part is specified by its SET->vars slot SLOT and offset
7872 OFFSET and the part's location by LOC. */
7874 static variable_def **
7875 delete_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
7876 HOST_WIDE_INT offset)
7878 variable var = *slot;
7879 int pos = find_variable_location_part (var, offset, NULL);
7881 if (pos >= 0)
7883 location_chain node, next;
7884 location_chain *nextp;
7885 bool changed;
7886 rtx cur_loc;
7888 if (shared_var_p (var, set->vars))
7890 /* If the variable contains the location part we have to
7891 make a copy of the variable. */
7892 for (node = var->var_part[pos].loc_chain; node;
7893 node = node->next)
7895 if ((REG_P (node->loc) && REG_P (loc)
7896 && REGNO (node->loc) == REGNO (loc))
7897 || rtx_equal_p (node->loc, loc))
7899 slot = unshare_variable (set, slot, var,
7900 VAR_INIT_STATUS_UNKNOWN);
7901 var = *slot;
7902 break;
7907 if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
7908 cur_loc = VAR_LOC_FROM (var);
7909 else
7910 cur_loc = var->var_part[pos].cur_loc;
7912 /* Delete the location part. */
7913 changed = false;
7914 nextp = &var->var_part[pos].loc_chain;
7915 for (node = *nextp; node; node = next)
7917 next = node->next;
7918 if ((REG_P (node->loc) && REG_P (loc)
7919 && REGNO (node->loc) == REGNO (loc))
7920 || rtx_equal_p (node->loc, loc))
7922 /* If we have deleted the location which was last emitted
7923 we have to emit new location so add the variable to set
7924 of changed variables. */
7925 if (cur_loc == node->loc)
7927 changed = true;
7928 var->var_part[pos].cur_loc = NULL;
7929 if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
7930 VAR_LOC_FROM (var) = NULL;
7932 pool_free (loc_chain_pool, node);
7933 *nextp = next;
7934 break;
7936 else
7937 nextp = &node->next;
7940 if (var->var_part[pos].loc_chain == NULL)
7942 changed = true;
7943 var->n_var_parts--;
7944 while (pos < var->n_var_parts)
7946 var->var_part[pos] = var->var_part[pos + 1];
7947 pos++;
7950 if (changed)
7951 variable_was_changed (var, set);
7954 return slot;
7957 /* Delete the part of variable's location from dataflow set SET. The
7958 variable part is specified by variable's declaration or value DV
7959 and offset OFFSET and the part's location by LOC. */
7961 static void
7962 delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
7963 HOST_WIDE_INT offset)
7965 variable_def **slot = shared_hash_find_slot_noinsert (set->vars, dv);
7966 if (!slot)
7967 return;
7969 delete_slot_part (set, loc, slot, offset);
7973 /* Structure for passing some other parameters to function
7974 vt_expand_loc_callback. */
7975 struct expand_loc_callback_data
7977 /* The variables and values active at this point. */
7978 variable_table_type *vars;
7980 /* Stack of values and debug_exprs under expansion, and their
7981 children. */
7982 auto_vec<rtx, 4> expanding;
7984 /* Stack of values and debug_exprs whose expansion hit recursion
7985 cycles. They will have VALUE_RECURSED_INTO marked when added to
7986 this list. This flag will be cleared if any of its dependencies
7987 resolves to a valid location. So, if the flag remains set at the
7988 end of the search, we know no valid location for this one can
7989 possibly exist. */
7990 auto_vec<rtx, 4> pending;
7992 /* The maximum depth among the sub-expressions under expansion.
7993 Zero indicates no expansion so far. */
7994 expand_depth depth;
7997 /* Allocate the one-part auxiliary data structure for VAR, with enough
7998 room for COUNT dependencies. */
8000 static void
8001 loc_exp_dep_alloc (variable var, int count)
8003 size_t allocsize;
8005 gcc_checking_assert (var->onepart);
8007 /* We can be called with COUNT == 0 to allocate the data structure
8008 without any dependencies, e.g. for the backlinks only. However,
8009 if we are specifying a COUNT, then the dependency list must have
8010 been emptied before. It would be possible to adjust pointers or
8011 force it empty here, but this is better done at an earlier point
8012 in the algorithm, so we instead leave an assertion to catch
8013 errors. */
8014 gcc_checking_assert (!count
8015 || VAR_LOC_DEP_VEC (var) == NULL
8016 || VAR_LOC_DEP_VEC (var)->is_empty ());
8018 if (VAR_LOC_1PAUX (var) && VAR_LOC_DEP_VEC (var)->space (count))
8019 return;
8021 allocsize = offsetof (struct onepart_aux, deps)
8022 + vec<loc_exp_dep, va_heap, vl_embed>::embedded_size (count);
8024 if (VAR_LOC_1PAUX (var))
8026 VAR_LOC_1PAUX (var) = XRESIZEVAR (struct onepart_aux,
8027 VAR_LOC_1PAUX (var), allocsize);
8028 /* If the reallocation moves the onepaux structure, the
8029 back-pointer to BACKLINKS in the first list member will still
8030 point to its old location. Adjust it. */
8031 if (VAR_LOC_DEP_LST (var))
8032 VAR_LOC_DEP_LST (var)->pprev = VAR_LOC_DEP_LSTP (var);
8034 else
8036 VAR_LOC_1PAUX (var) = XNEWVAR (struct onepart_aux, allocsize);
8037 *VAR_LOC_DEP_LSTP (var) = NULL;
8038 VAR_LOC_FROM (var) = NULL;
8039 VAR_LOC_DEPTH (var).complexity = 0;
8040 VAR_LOC_DEPTH (var).entryvals = 0;
8042 VAR_LOC_DEP_VEC (var)->embedded_init (count);
8045 /* Remove all entries from the vector of active dependencies of VAR,
8046 removing them from the back-links lists too. */
8048 static void
8049 loc_exp_dep_clear (variable var)
8051 while (VAR_LOC_DEP_VEC (var) && !VAR_LOC_DEP_VEC (var)->is_empty ())
8053 loc_exp_dep *led = &VAR_LOC_DEP_VEC (var)->last ();
8054 if (led->next)
8055 led->next->pprev = led->pprev;
8056 if (led->pprev)
8057 *led->pprev = led->next;
8058 VAR_LOC_DEP_VEC (var)->pop ();
8062 /* Insert an active dependency from VAR on X to the vector of
8063 dependencies, and add the corresponding back-link to X's list of
8064 back-links in VARS. */
8066 static void
8067 loc_exp_insert_dep (variable var, rtx x, variable_table_type *vars)
8069 decl_or_value dv;
8070 variable xvar;
8071 loc_exp_dep *led;
8073 dv = dv_from_rtx (x);
8075 /* ??? Build a vector of variables parallel to EXPANDING, to avoid
8076 an additional look up? */
8077 xvar = vars->find_with_hash (dv, dv_htab_hash (dv));
8079 if (!xvar)
8081 xvar = variable_from_dropped (dv, NO_INSERT);
8082 gcc_checking_assert (xvar);
8085 /* No point in adding the same backlink more than once. This may
8086 arise if say the same value appears in two complex expressions in
8087 the same loc_list, or even more than once in a single
8088 expression. */
8089 if (VAR_LOC_DEP_LST (xvar) && VAR_LOC_DEP_LST (xvar)->dv == var->dv)
8090 return;
8092 if (var->onepart == NOT_ONEPART)
8093 led = (loc_exp_dep *) pool_alloc (loc_exp_dep_pool);
8094 else
8096 loc_exp_dep empty;
8097 memset (&empty, 0, sizeof (empty));
8098 VAR_LOC_DEP_VEC (var)->quick_push (empty);
8099 led = &VAR_LOC_DEP_VEC (var)->last ();
8101 led->dv = var->dv;
8102 led->value = x;
8104 loc_exp_dep_alloc (xvar, 0);
8105 led->pprev = VAR_LOC_DEP_LSTP (xvar);
8106 led->next = *led->pprev;
8107 if (led->next)
8108 led->next->pprev = &led->next;
8109 *led->pprev = led;
8112 /* Create active dependencies of VAR on COUNT values starting at
8113 VALUE, and corresponding back-links to the entries in VARS. Return
8114 true if we found any pending-recursion results. */
8116 static bool
8117 loc_exp_dep_set (variable var, rtx result, rtx *value, int count,
8118 variable_table_type *vars)
8120 bool pending_recursion = false;
8122 gcc_checking_assert (VAR_LOC_DEP_VEC (var) == NULL
8123 || VAR_LOC_DEP_VEC (var)->is_empty ());
8125 /* Set up all dependencies from last_child (as set up at the end of
8126 the loop above) to the end. */
8127 loc_exp_dep_alloc (var, count);
8129 while (count--)
8131 rtx x = *value++;
8133 if (!pending_recursion)
8134 pending_recursion = !result && VALUE_RECURSED_INTO (x);
8136 loc_exp_insert_dep (var, x, vars);
8139 return pending_recursion;
8142 /* Notify the back-links of IVAR that are pending recursion that we
8143 have found a non-NIL value for it, so they are cleared for another
8144 attempt to compute a current location. */
8146 static void
8147 notify_dependents_of_resolved_value (variable ivar, variable_table_type *vars)
8149 loc_exp_dep *led, *next;
8151 for (led = VAR_LOC_DEP_LST (ivar); led; led = next)
8153 decl_or_value dv = led->dv;
8154 variable var;
8156 next = led->next;
8158 if (dv_is_value_p (dv))
8160 rtx value = dv_as_value (dv);
8162 /* If we have already resolved it, leave it alone. */
8163 if (!VALUE_RECURSED_INTO (value))
8164 continue;
8166 /* Check that VALUE_RECURSED_INTO, true from the test above,
8167 implies NO_LOC_P. */
8168 gcc_checking_assert (NO_LOC_P (value));
8170 /* We won't notify variables that are being expanded,
8171 because their dependency list is cleared before
8172 recursing. */
8173 NO_LOC_P (value) = false;
8174 VALUE_RECURSED_INTO (value) = false;
8176 gcc_checking_assert (dv_changed_p (dv));
8178 else
8180 gcc_checking_assert (dv_onepart_p (dv) != NOT_ONEPART);
8181 if (!dv_changed_p (dv))
8182 continue;
8185 var = vars->find_with_hash (dv, dv_htab_hash (dv));
8187 if (!var)
8188 var = variable_from_dropped (dv, NO_INSERT);
8190 if (var)
8191 notify_dependents_of_resolved_value (var, vars);
8193 if (next)
8194 next->pprev = led->pprev;
8195 if (led->pprev)
8196 *led->pprev = next;
8197 led->next = NULL;
8198 led->pprev = NULL;
8202 static rtx vt_expand_loc_callback (rtx x, bitmap regs,
8203 int max_depth, void *data);
8205 /* Return the combined depth, when one sub-expression evaluated to
8206 BEST_DEPTH and the previous known depth was SAVED_DEPTH. */
8208 static inline expand_depth
8209 update_depth (expand_depth saved_depth, expand_depth best_depth)
8211 /* If we didn't find anything, stick with what we had. */
8212 if (!best_depth.complexity)
8213 return saved_depth;
8215 /* If we found hadn't found anything, use the depth of the current
8216 expression. Do NOT add one extra level, we want to compute the
8217 maximum depth among sub-expressions. We'll increment it later,
8218 if appropriate. */
8219 if (!saved_depth.complexity)
8220 return best_depth;
8222 /* Combine the entryval count so that regardless of which one we
8223 return, the entryval count is accurate. */
8224 best_depth.entryvals = saved_depth.entryvals
8225 = best_depth.entryvals + saved_depth.entryvals;
8227 if (saved_depth.complexity < best_depth.complexity)
8228 return best_depth;
8229 else
8230 return saved_depth;
8233 /* Expand VAR to a location RTX, updating its cur_loc. Use REGS and
8234 DATA for cselib expand callback. If PENDRECP is given, indicate in
8235 it whether any sub-expression couldn't be fully evaluated because
8236 it is pending recursion resolution. */
8238 static inline rtx
8239 vt_expand_var_loc_chain (variable var, bitmap regs, void *data, bool *pendrecp)
8241 struct expand_loc_callback_data *elcd
8242 = (struct expand_loc_callback_data *) data;
8243 location_chain loc, next;
8244 rtx result = NULL;
8245 int first_child, result_first_child, last_child;
8246 bool pending_recursion;
8247 rtx loc_from = NULL;
8248 struct elt_loc_list *cloc = NULL;
8249 expand_depth depth = { 0, 0 }, saved_depth = elcd->depth;
8250 int wanted_entryvals, found_entryvals = 0;
8252 /* Clear all backlinks pointing at this, so that we're not notified
8253 while we're active. */
8254 loc_exp_dep_clear (var);
8256 retry:
8257 if (var->onepart == ONEPART_VALUE)
8259 cselib_val *val = CSELIB_VAL_PTR (dv_as_value (var->dv));
8261 gcc_checking_assert (cselib_preserved_value_p (val));
8263 cloc = val->locs;
8266 first_child = result_first_child = last_child
8267 = elcd->expanding.length ();
8269 wanted_entryvals = found_entryvals;
8271 /* Attempt to expand each available location in turn. */
8272 for (next = loc = var->n_var_parts ? var->var_part[0].loc_chain : NULL;
8273 loc || cloc; loc = next)
8275 result_first_child = last_child;
8277 if (!loc)
8279 loc_from = cloc->loc;
8280 next = loc;
8281 cloc = cloc->next;
8282 if (unsuitable_loc (loc_from))
8283 continue;
8285 else
8287 loc_from = loc->loc;
8288 next = loc->next;
8291 gcc_checking_assert (!unsuitable_loc (loc_from));
8293 elcd->depth.complexity = elcd->depth.entryvals = 0;
8294 result = cselib_expand_value_rtx_cb (loc_from, regs, EXPR_DEPTH,
8295 vt_expand_loc_callback, data);
8296 last_child = elcd->expanding.length ();
8298 if (result)
8300 depth = elcd->depth;
8302 gcc_checking_assert (depth.complexity
8303 || result_first_child == last_child);
8305 if (last_child - result_first_child != 1)
8307 if (!depth.complexity && GET_CODE (result) == ENTRY_VALUE)
8308 depth.entryvals++;
8309 depth.complexity++;
8312 if (depth.complexity <= EXPR_USE_DEPTH)
8314 if (depth.entryvals <= wanted_entryvals)
8315 break;
8316 else if (!found_entryvals || depth.entryvals < found_entryvals)
8317 found_entryvals = depth.entryvals;
8320 result = NULL;
8323 /* Set it up in case we leave the loop. */
8324 depth.complexity = depth.entryvals = 0;
8325 loc_from = NULL;
8326 result_first_child = first_child;
8329 if (!loc_from && wanted_entryvals < found_entryvals)
8331 /* We found entries with ENTRY_VALUEs and skipped them. Since
8332 we could not find any expansions without ENTRY_VALUEs, but we
8333 found at least one with them, go back and get an entry with
8334 the minimum number ENTRY_VALUE count that we found. We could
8335 avoid looping, but since each sub-loc is already resolved,
8336 the re-expansion should be trivial. ??? Should we record all
8337 attempted locs as dependencies, so that we retry the
8338 expansion should any of them change, in the hope it can give
8339 us a new entry without an ENTRY_VALUE? */
8340 elcd->expanding.truncate (first_child);
8341 goto retry;
8344 /* Register all encountered dependencies as active. */
8345 pending_recursion = loc_exp_dep_set
8346 (var, result, elcd->expanding.address () + result_first_child,
8347 last_child - result_first_child, elcd->vars);
8349 elcd->expanding.truncate (first_child);
8351 /* Record where the expansion came from. */
8352 gcc_checking_assert (!result || !pending_recursion);
8353 VAR_LOC_FROM (var) = loc_from;
8354 VAR_LOC_DEPTH (var) = depth;
8356 gcc_checking_assert (!depth.complexity == !result);
8358 elcd->depth = update_depth (saved_depth, depth);
8360 /* Indicate whether any of the dependencies are pending recursion
8361 resolution. */
8362 if (pendrecp)
8363 *pendrecp = pending_recursion;
8365 if (!pendrecp || !pending_recursion)
8366 var->var_part[0].cur_loc = result;
8368 return result;
8371 /* Callback for cselib_expand_value, that looks for expressions
8372 holding the value in the var-tracking hash tables. Return X for
8373 standard processing, anything else is to be used as-is. */
8375 static rtx
8376 vt_expand_loc_callback (rtx x, bitmap regs,
8377 int max_depth ATTRIBUTE_UNUSED,
8378 void *data)
8380 struct expand_loc_callback_data *elcd
8381 = (struct expand_loc_callback_data *) data;
8382 decl_or_value dv;
8383 variable var;
8384 rtx result, subreg;
8385 bool pending_recursion = false;
8386 bool from_empty = false;
8388 switch (GET_CODE (x))
8390 case SUBREG:
8391 subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs,
8392 EXPR_DEPTH,
8393 vt_expand_loc_callback, data);
8395 if (!subreg)
8396 return NULL;
8398 result = simplify_gen_subreg (GET_MODE (x), subreg,
8399 GET_MODE (SUBREG_REG (x)),
8400 SUBREG_BYTE (x));
8402 /* Invalid SUBREGs are ok in debug info. ??? We could try
8403 alternate expansions for the VALUE as well. */
8404 if (!result)
8405 result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x));
8407 return result;
8409 case DEBUG_EXPR:
8410 case VALUE:
8411 dv = dv_from_rtx (x);
8412 break;
8414 default:
8415 return x;
8418 elcd->expanding.safe_push (x);
8420 /* Check that VALUE_RECURSED_INTO implies NO_LOC_P. */
8421 gcc_checking_assert (!VALUE_RECURSED_INTO (x) || NO_LOC_P (x));
8423 if (NO_LOC_P (x))
8425 gcc_checking_assert (VALUE_RECURSED_INTO (x) || !dv_changed_p (dv));
8426 return NULL;
8429 var = elcd->vars->find_with_hash (dv, dv_htab_hash (dv));
8431 if (!var)
8433 from_empty = true;
8434 var = variable_from_dropped (dv, INSERT);
8437 gcc_checking_assert (var);
8439 if (!dv_changed_p (dv))
8441 gcc_checking_assert (!NO_LOC_P (x));
8442 gcc_checking_assert (var->var_part[0].cur_loc);
8443 gcc_checking_assert (VAR_LOC_1PAUX (var));
8444 gcc_checking_assert (VAR_LOC_1PAUX (var)->depth.complexity);
8446 elcd->depth = update_depth (elcd->depth, VAR_LOC_1PAUX (var)->depth);
8448 return var->var_part[0].cur_loc;
8451 VALUE_RECURSED_INTO (x) = true;
8452 /* This is tentative, but it makes some tests simpler. */
8453 NO_LOC_P (x) = true;
8455 gcc_checking_assert (var->n_var_parts == 1 || from_empty);
8457 result = vt_expand_var_loc_chain (var, regs, data, &pending_recursion);
8459 if (pending_recursion)
8461 gcc_checking_assert (!result);
8462 elcd->pending.safe_push (x);
8464 else
8466 NO_LOC_P (x) = !result;
8467 VALUE_RECURSED_INTO (x) = false;
8468 set_dv_changed (dv, false);
8470 if (result)
8471 notify_dependents_of_resolved_value (var, elcd->vars);
8474 return result;
8477 /* While expanding variables, we may encounter recursion cycles
8478 because of mutual (possibly indirect) dependencies between two
8479 particular variables (or values), say A and B. If we're trying to
8480 expand A when we get to B, which in turn attempts to expand A, if
8481 we can't find any other expansion for B, we'll add B to this
8482 pending-recursion stack, and tentatively return NULL for its
8483 location. This tentative value will be used for any other
8484 occurrences of B, unless A gets some other location, in which case
8485 it will notify B that it is worth another try at computing a
8486 location for it, and it will use the location computed for A then.
8487 At the end of the expansion, the tentative NULL locations become
8488 final for all members of PENDING that didn't get a notification.
8489 This function performs this finalization of NULL locations. */
8491 static void
8492 resolve_expansions_pending_recursion (vec<rtx, va_heap> *pending)
8494 while (!pending->is_empty ())
8496 rtx x = pending->pop ();
8497 decl_or_value dv;
8499 if (!VALUE_RECURSED_INTO (x))
8500 continue;
8502 gcc_checking_assert (NO_LOC_P (x));
8503 VALUE_RECURSED_INTO (x) = false;
8504 dv = dv_from_rtx (x);
8505 gcc_checking_assert (dv_changed_p (dv));
8506 set_dv_changed (dv, false);
8510 /* Initialize expand_loc_callback_data D with variable hash table V.
8511 It must be a macro because of alloca (vec stack). */
8512 #define INIT_ELCD(d, v) \
8513 do \
8515 (d).vars = (v); \
8516 (d).depth.complexity = (d).depth.entryvals = 0; \
8518 while (0)
8519 /* Finalize expand_loc_callback_data D, resolved to location L. */
8520 #define FINI_ELCD(d, l) \
8521 do \
8523 resolve_expansions_pending_recursion (&(d).pending); \
8524 (d).pending.release (); \
8525 (d).expanding.release (); \
8527 if ((l) && MEM_P (l)) \
8528 (l) = targetm.delegitimize_address (l); \
8530 while (0)
8532 /* Expand VALUEs and DEBUG_EXPRs in LOC to a location, using the
8533 equivalences in VARS, updating their CUR_LOCs in the process. */
8535 static rtx
8536 vt_expand_loc (rtx loc, variable_table_type *vars)
8538 struct expand_loc_callback_data data;
8539 rtx result;
8541 if (!MAY_HAVE_DEBUG_INSNS)
8542 return loc;
8544 INIT_ELCD (data, vars);
8546 result = cselib_expand_value_rtx_cb (loc, scratch_regs, EXPR_DEPTH,
8547 vt_expand_loc_callback, &data);
8549 FINI_ELCD (data, result);
8551 return result;
8554 /* Expand the one-part VARiable to a location, using the equivalences
8555 in VARS, updating their CUR_LOCs in the process. */
8557 static rtx
8558 vt_expand_1pvar (variable var, variable_table_type *vars)
8560 struct expand_loc_callback_data data;
8561 rtx loc;
8563 gcc_checking_assert (var->onepart && var->n_var_parts == 1);
8565 if (!dv_changed_p (var->dv))
8566 return var->var_part[0].cur_loc;
8568 INIT_ELCD (data, vars);
8570 loc = vt_expand_var_loc_chain (var, scratch_regs, &data, NULL);
8572 gcc_checking_assert (data.expanding.is_empty ());
8574 FINI_ELCD (data, loc);
8576 return loc;
8579 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
8580 additional parameters: WHERE specifies whether the note shall be emitted
8581 before or after instruction INSN. */
8584 emit_note_insn_var_location (variable_def **varp, emit_note_data *data)
8586 variable var = *varp;
8587 rtx_insn *insn = data->insn;
8588 enum emit_note_where where = data->where;
8589 variable_table_type *vars = data->vars;
8590 rtx_note *note;
8591 rtx note_vl;
8592 int i, j, n_var_parts;
8593 bool complete;
8594 enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED;
8595 HOST_WIDE_INT last_limit;
8596 tree type_size_unit;
8597 HOST_WIDE_INT offsets[MAX_VAR_PARTS];
8598 rtx loc[MAX_VAR_PARTS];
8599 tree decl;
8600 location_chain lc;
8602 gcc_checking_assert (var->onepart == NOT_ONEPART
8603 || var->onepart == ONEPART_VDECL);
8605 decl = dv_as_decl (var->dv);
8607 complete = true;
8608 last_limit = 0;
8609 n_var_parts = 0;
8610 if (!var->onepart)
8611 for (i = 0; i < var->n_var_parts; i++)
8612 if (var->var_part[i].cur_loc == NULL && var->var_part[i].loc_chain)
8613 var->var_part[i].cur_loc = var->var_part[i].loc_chain->loc;
8614 for (i = 0; i < var->n_var_parts; i++)
8616 machine_mode mode, wider_mode;
8617 rtx loc2;
8618 HOST_WIDE_INT offset;
8620 if (i == 0 && var->onepart)
8622 gcc_checking_assert (var->n_var_parts == 1);
8623 offset = 0;
8624 initialized = VAR_INIT_STATUS_INITIALIZED;
8625 loc2 = vt_expand_1pvar (var, vars);
8627 else
8629 if (last_limit < VAR_PART_OFFSET (var, i))
8631 complete = false;
8632 break;
8634 else if (last_limit > VAR_PART_OFFSET (var, i))
8635 continue;
8636 offset = VAR_PART_OFFSET (var, i);
8637 loc2 = var->var_part[i].cur_loc;
8638 if (loc2 && GET_CODE (loc2) == MEM
8639 && GET_CODE (XEXP (loc2, 0)) == VALUE)
8641 rtx depval = XEXP (loc2, 0);
8643 loc2 = vt_expand_loc (loc2, vars);
8645 if (loc2)
8646 loc_exp_insert_dep (var, depval, vars);
8648 if (!loc2)
8650 complete = false;
8651 continue;
8653 gcc_checking_assert (GET_CODE (loc2) != VALUE);
8654 for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
8655 if (var->var_part[i].cur_loc == lc->loc)
8657 initialized = lc->init;
8658 break;
8660 gcc_assert (lc);
8663 offsets[n_var_parts] = offset;
8664 if (!loc2)
8666 complete = false;
8667 continue;
8669 loc[n_var_parts] = loc2;
8670 mode = GET_MODE (var->var_part[i].cur_loc);
8671 if (mode == VOIDmode && var->onepart)
8672 mode = DECL_MODE (decl);
8673 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
8675 /* Attempt to merge adjacent registers or memory. */
8676 wider_mode = GET_MODE_WIDER_MODE (mode);
8677 for (j = i + 1; j < var->n_var_parts; j++)
8678 if (last_limit <= VAR_PART_OFFSET (var, j))
8679 break;
8680 if (j < var->n_var_parts
8681 && wider_mode != VOIDmode
8682 && var->var_part[j].cur_loc
8683 && mode == GET_MODE (var->var_part[j].cur_loc)
8684 && (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts]))
8685 && last_limit == (var->onepart ? 0 : VAR_PART_OFFSET (var, j))
8686 && (loc2 = vt_expand_loc (var->var_part[j].cur_loc, vars))
8687 && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2))
8689 rtx new_loc = NULL;
8691 if (REG_P (loc[n_var_parts])
8692 && hard_regno_nregs[REGNO (loc[n_var_parts])][mode] * 2
8693 == hard_regno_nregs[REGNO (loc[n_var_parts])][wider_mode]
8694 && end_hard_regno (mode, REGNO (loc[n_var_parts]))
8695 == REGNO (loc2))
8697 if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN)
8698 new_loc = simplify_subreg (wider_mode, loc[n_var_parts],
8699 mode, 0);
8700 else if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
8701 new_loc = simplify_subreg (wider_mode, loc2, mode, 0);
8702 if (new_loc)
8704 if (!REG_P (new_loc)
8705 || REGNO (new_loc) != REGNO (loc[n_var_parts]))
8706 new_loc = NULL;
8707 else
8708 REG_ATTRS (new_loc) = REG_ATTRS (loc[n_var_parts]);
8711 else if (MEM_P (loc[n_var_parts])
8712 && GET_CODE (XEXP (loc2, 0)) == PLUS
8713 && REG_P (XEXP (XEXP (loc2, 0), 0))
8714 && CONST_INT_P (XEXP (XEXP (loc2, 0), 1)))
8716 if ((REG_P (XEXP (loc[n_var_parts], 0))
8717 && rtx_equal_p (XEXP (loc[n_var_parts], 0),
8718 XEXP (XEXP (loc2, 0), 0))
8719 && INTVAL (XEXP (XEXP (loc2, 0), 1))
8720 == GET_MODE_SIZE (mode))
8721 || (GET_CODE (XEXP (loc[n_var_parts], 0)) == PLUS
8722 && CONST_INT_P (XEXP (XEXP (loc[n_var_parts], 0), 1))
8723 && rtx_equal_p (XEXP (XEXP (loc[n_var_parts], 0), 0),
8724 XEXP (XEXP (loc2, 0), 0))
8725 && INTVAL (XEXP (XEXP (loc[n_var_parts], 0), 1))
8726 + GET_MODE_SIZE (mode)
8727 == INTVAL (XEXP (XEXP (loc2, 0), 1))))
8728 new_loc = adjust_address_nv (loc[n_var_parts],
8729 wider_mode, 0);
8732 if (new_loc)
8734 loc[n_var_parts] = new_loc;
8735 mode = wider_mode;
8736 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
8737 i = j;
8740 ++n_var_parts;
8742 type_size_unit = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8743 if ((unsigned HOST_WIDE_INT) last_limit < TREE_INT_CST_LOW (type_size_unit))
8744 complete = false;
8746 if (! flag_var_tracking_uninit)
8747 initialized = VAR_INIT_STATUS_INITIALIZED;
8749 note_vl = NULL_RTX;
8750 if (!complete)
8751 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, NULL_RTX, initialized);
8752 else if (n_var_parts == 1)
8754 rtx expr_list;
8756 if (offsets[0] || GET_CODE (loc[0]) == PARALLEL)
8757 expr_list = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
8758 else
8759 expr_list = loc[0];
8761 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, expr_list, initialized);
8763 else if (n_var_parts)
8765 rtx parallel;
8767 for (i = 0; i < n_var_parts; i++)
8768 loc[i]
8769 = gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i]));
8771 parallel = gen_rtx_PARALLEL (VOIDmode,
8772 gen_rtvec_v (n_var_parts, loc));
8773 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl,
8774 parallel, initialized);
8777 if (where != EMIT_NOTE_BEFORE_INSN)
8779 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8780 if (where == EMIT_NOTE_AFTER_CALL_INSN)
8781 NOTE_DURING_CALL_P (note) = true;
8783 else
8785 /* Make sure that the call related notes come first. */
8786 while (NEXT_INSN (insn)
8787 && NOTE_P (insn)
8788 && ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8789 && NOTE_DURING_CALL_P (insn))
8790 || NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
8791 insn = NEXT_INSN (insn);
8792 if (NOTE_P (insn)
8793 && ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8794 && NOTE_DURING_CALL_P (insn))
8795 || NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
8796 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8797 else
8798 note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
8800 NOTE_VAR_LOCATION (note) = note_vl;
8802 set_dv_changed (var->dv, false);
8803 gcc_assert (var->in_changed_variables);
8804 var->in_changed_variables = false;
8805 changed_variables->clear_slot (varp);
8807 /* Continue traversing the hash table. */
8808 return 1;
8811 /* While traversing changed_variables, push onto DATA (a stack of RTX
8812 values) entries that aren't user variables. */
8815 var_track_values_to_stack (variable_def **slot,
8816 vec<rtx, va_heap> *changed_values_stack)
8818 variable var = *slot;
8820 if (var->onepart == ONEPART_VALUE)
8821 changed_values_stack->safe_push (dv_as_value (var->dv));
8822 else if (var->onepart == ONEPART_DEXPR)
8823 changed_values_stack->safe_push (DECL_RTL_KNOWN_SET (dv_as_decl (var->dv)));
8825 return 1;
8828 /* Remove from changed_variables the entry whose DV corresponds to
8829 value or debug_expr VAL. */
8830 static void
8831 remove_value_from_changed_variables (rtx val)
8833 decl_or_value dv = dv_from_rtx (val);
8834 variable_def **slot;
8835 variable var;
8837 slot = changed_variables->find_slot_with_hash (dv, dv_htab_hash (dv),
8838 NO_INSERT);
8839 var = *slot;
8840 var->in_changed_variables = false;
8841 changed_variables->clear_slot (slot);
8844 /* If VAL (a value or debug_expr) has backlinks to variables actively
8845 dependent on it in HTAB or in CHANGED_VARIABLES, mark them as
8846 changed, adding to CHANGED_VALUES_STACK any dependencies that may
8847 have dependencies of their own to notify. */
8849 static void
8850 notify_dependents_of_changed_value (rtx val, variable_table_type *htab,
8851 vec<rtx, va_heap> *changed_values_stack)
8853 variable_def **slot;
8854 variable var;
8855 loc_exp_dep *led;
8856 decl_or_value dv = dv_from_rtx (val);
8858 slot = changed_variables->find_slot_with_hash (dv, dv_htab_hash (dv),
8859 NO_INSERT);
8860 if (!slot)
8861 slot = htab->find_slot_with_hash (dv, dv_htab_hash (dv), NO_INSERT);
8862 if (!slot)
8863 slot = dropped_values->find_slot_with_hash (dv, dv_htab_hash (dv),
8864 NO_INSERT);
8865 var = *slot;
8867 while ((led = VAR_LOC_DEP_LST (var)))
8869 decl_or_value ldv = led->dv;
8870 variable ivar;
8872 /* Deactivate and remove the backlink, as it was “used up”. It
8873 makes no sense to attempt to notify the same entity again:
8874 either it will be recomputed and re-register an active
8875 dependency, or it will still have the changed mark. */
8876 if (led->next)
8877 led->next->pprev = led->pprev;
8878 if (led->pprev)
8879 *led->pprev = led->next;
8880 led->next = NULL;
8881 led->pprev = NULL;
8883 if (dv_changed_p (ldv))
8884 continue;
8886 switch (dv_onepart_p (ldv))
8888 case ONEPART_VALUE:
8889 case ONEPART_DEXPR:
8890 set_dv_changed (ldv, true);
8891 changed_values_stack->safe_push (dv_as_rtx (ldv));
8892 break;
8894 case ONEPART_VDECL:
8895 ivar = htab->find_with_hash (ldv, dv_htab_hash (ldv));
8896 gcc_checking_assert (!VAR_LOC_DEP_LST (ivar));
8897 variable_was_changed (ivar, NULL);
8898 break;
8900 case NOT_ONEPART:
8901 pool_free (loc_exp_dep_pool, led);
8902 ivar = htab->find_with_hash (ldv, dv_htab_hash (ldv));
8903 if (ivar)
8905 int i = ivar->n_var_parts;
8906 while (i--)
8908 rtx loc = ivar->var_part[i].cur_loc;
8910 if (loc && GET_CODE (loc) == MEM
8911 && XEXP (loc, 0) == val)
8913 variable_was_changed (ivar, NULL);
8914 break;
8918 break;
8920 default:
8921 gcc_unreachable ();
8926 /* Take out of changed_variables any entries that don't refer to use
8927 variables. Back-propagate change notifications from values and
8928 debug_exprs to their active dependencies in HTAB or in
8929 CHANGED_VARIABLES. */
8931 static void
8932 process_changed_values (variable_table_type *htab)
8934 int i, n;
8935 rtx val;
8936 auto_vec<rtx, 20> changed_values_stack;
8938 /* Move values from changed_variables to changed_values_stack. */
8939 changed_variables
8940 ->traverse <vec<rtx, va_heap>*, var_track_values_to_stack>
8941 (&changed_values_stack);
8943 /* Back-propagate change notifications in values while popping
8944 them from the stack. */
8945 for (n = i = changed_values_stack.length ();
8946 i > 0; i = changed_values_stack.length ())
8948 val = changed_values_stack.pop ();
8949 notify_dependents_of_changed_value (val, htab, &changed_values_stack);
8951 /* This condition will hold when visiting each of the entries
8952 originally in changed_variables. We can't remove them
8953 earlier because this could drop the backlinks before we got a
8954 chance to use them. */
8955 if (i == n)
8957 remove_value_from_changed_variables (val);
8958 n--;
8963 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
8964 CHANGED_VARIABLES and delete this chain. WHERE specifies whether
8965 the notes shall be emitted before of after instruction INSN. */
8967 static void
8968 emit_notes_for_changes (rtx_insn *insn, enum emit_note_where where,
8969 shared_hash vars)
8971 emit_note_data data;
8972 variable_table_type *htab = shared_hash_htab (vars);
8974 if (!changed_variables->elements ())
8975 return;
8977 if (MAY_HAVE_DEBUG_INSNS)
8978 process_changed_values (htab);
8980 data.insn = insn;
8981 data.where = where;
8982 data.vars = htab;
8984 changed_variables
8985 ->traverse <emit_note_data*, emit_note_insn_var_location> (&data);
8988 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
8989 same variable in hash table DATA or is not there at all. */
8992 emit_notes_for_differences_1 (variable_def **slot, variable_table_type *new_vars)
8994 variable old_var, new_var;
8996 old_var = *slot;
8997 new_var = new_vars->find_with_hash (old_var->dv, dv_htab_hash (old_var->dv));
8999 if (!new_var)
9001 /* Variable has disappeared. */
9002 variable empty_var = NULL;
9004 if (old_var->onepart == ONEPART_VALUE
9005 || old_var->onepart == ONEPART_DEXPR)
9007 empty_var = variable_from_dropped (old_var->dv, NO_INSERT);
9008 if (empty_var)
9010 gcc_checking_assert (!empty_var->in_changed_variables);
9011 if (!VAR_LOC_1PAUX (old_var))
9013 VAR_LOC_1PAUX (old_var) = VAR_LOC_1PAUX (empty_var);
9014 VAR_LOC_1PAUX (empty_var) = NULL;
9016 else
9017 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
9021 if (!empty_var)
9023 empty_var = (variable) pool_alloc (onepart_pool (old_var->onepart));
9024 empty_var->dv = old_var->dv;
9025 empty_var->refcount = 0;
9026 empty_var->n_var_parts = 0;
9027 empty_var->onepart = old_var->onepart;
9028 empty_var->in_changed_variables = false;
9031 if (empty_var->onepart)
9033 /* Propagate the auxiliary data to (ultimately)
9034 changed_variables. */
9035 empty_var->var_part[0].loc_chain = NULL;
9036 empty_var->var_part[0].cur_loc = NULL;
9037 VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (old_var);
9038 VAR_LOC_1PAUX (old_var) = NULL;
9040 variable_was_changed (empty_var, NULL);
9041 /* Continue traversing the hash table. */
9042 return 1;
9044 /* Update cur_loc and one-part auxiliary data, before new_var goes
9045 through variable_was_changed. */
9046 if (old_var != new_var && new_var->onepart)
9048 gcc_checking_assert (VAR_LOC_1PAUX (new_var) == NULL);
9049 VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (old_var);
9050 VAR_LOC_1PAUX (old_var) = NULL;
9051 new_var->var_part[0].cur_loc = old_var->var_part[0].cur_loc;
9053 if (variable_different_p (old_var, new_var))
9054 variable_was_changed (new_var, NULL);
9056 /* Continue traversing the hash table. */
9057 return 1;
9060 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
9061 table DATA. */
9064 emit_notes_for_differences_2 (variable_def **slot, variable_table_type *old_vars)
9066 variable old_var, new_var;
9068 new_var = *slot;
9069 old_var = old_vars->find_with_hash (new_var->dv, dv_htab_hash (new_var->dv));
9070 if (!old_var)
9072 int i;
9073 for (i = 0; i < new_var->n_var_parts; i++)
9074 new_var->var_part[i].cur_loc = NULL;
9075 variable_was_changed (new_var, NULL);
9078 /* Continue traversing the hash table. */
9079 return 1;
9082 /* Emit notes before INSN for differences between dataflow sets OLD_SET and
9083 NEW_SET. */
9085 static void
9086 emit_notes_for_differences (rtx_insn *insn, dataflow_set *old_set,
9087 dataflow_set *new_set)
9089 shared_hash_htab (old_set->vars)
9090 ->traverse <variable_table_type *, emit_notes_for_differences_1>
9091 (shared_hash_htab (new_set->vars));
9092 shared_hash_htab (new_set->vars)
9093 ->traverse <variable_table_type *, emit_notes_for_differences_2>
9094 (shared_hash_htab (old_set->vars));
9095 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars);
9098 /* Return the next insn after INSN that is not a NOTE_INSN_VAR_LOCATION. */
9100 static rtx_insn *
9101 next_non_note_insn_var_location (rtx_insn *insn)
9103 while (insn)
9105 insn = NEXT_INSN (insn);
9106 if (insn == 0
9107 || !NOTE_P (insn)
9108 || NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION)
9109 break;
9112 return insn;
9115 /* Emit the notes for changes of location parts in the basic block BB. */
9117 static void
9118 emit_notes_in_bb (basic_block bb, dataflow_set *set)
9120 unsigned int i;
9121 micro_operation *mo;
9123 dataflow_set_clear (set);
9124 dataflow_set_copy (set, &VTI (bb)->in);
9126 FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo)
9128 rtx_insn *insn = mo->insn;
9129 rtx_insn *next_insn = next_non_note_insn_var_location (insn);
9131 switch (mo->type)
9133 case MO_CALL:
9134 dataflow_set_clear_at_call (set);
9135 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars);
9137 rtx arguments = mo->u.loc, *p = &arguments;
9138 rtx_note *note;
9139 while (*p)
9141 XEXP (XEXP (*p, 0), 1)
9142 = vt_expand_loc (XEXP (XEXP (*p, 0), 1),
9143 shared_hash_htab (set->vars));
9144 /* If expansion is successful, keep it in the list. */
9145 if (XEXP (XEXP (*p, 0), 1))
9146 p = &XEXP (*p, 1);
9147 /* Otherwise, if the following item is data_value for it,
9148 drop it too too. */
9149 else if (XEXP (*p, 1)
9150 && REG_P (XEXP (XEXP (*p, 0), 0))
9151 && MEM_P (XEXP (XEXP (XEXP (*p, 1), 0), 0))
9152 && REG_P (XEXP (XEXP (XEXP (XEXP (*p, 1), 0), 0),
9154 && REGNO (XEXP (XEXP (*p, 0), 0))
9155 == REGNO (XEXP (XEXP (XEXP (XEXP (*p, 1), 0),
9156 0), 0)))
9157 *p = XEXP (XEXP (*p, 1), 1);
9158 /* Just drop this item. */
9159 else
9160 *p = XEXP (*p, 1);
9162 note = emit_note_after (NOTE_INSN_CALL_ARG_LOCATION, insn);
9163 NOTE_VAR_LOCATION (note) = arguments;
9165 break;
9167 case MO_USE:
9169 rtx loc = mo->u.loc;
9171 if (REG_P (loc))
9172 var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
9173 else
9174 var_mem_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
9176 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
9178 break;
9180 case MO_VAL_LOC:
9182 rtx loc = mo->u.loc;
9183 rtx val, vloc;
9184 tree var;
9186 if (GET_CODE (loc) == CONCAT)
9188 val = XEXP (loc, 0);
9189 vloc = XEXP (loc, 1);
9191 else
9193 val = NULL_RTX;
9194 vloc = loc;
9197 var = PAT_VAR_LOCATION_DECL (vloc);
9199 clobber_variable_part (set, NULL_RTX,
9200 dv_from_decl (var), 0, NULL_RTX);
9201 if (val)
9203 if (VAL_NEEDS_RESOLUTION (loc))
9204 val_resolve (set, val, PAT_VAR_LOCATION_LOC (vloc), insn);
9205 set_variable_part (set, val, dv_from_decl (var), 0,
9206 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
9207 INSERT);
9209 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
9210 set_variable_part (set, PAT_VAR_LOCATION_LOC (vloc),
9211 dv_from_decl (var), 0,
9212 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
9213 INSERT);
9215 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
9217 break;
9219 case MO_VAL_USE:
9221 rtx loc = mo->u.loc;
9222 rtx val, vloc, uloc;
9224 vloc = uloc = XEXP (loc, 1);
9225 val = XEXP (loc, 0);
9227 if (GET_CODE (val) == CONCAT)
9229 uloc = XEXP (val, 1);
9230 val = XEXP (val, 0);
9233 if (VAL_NEEDS_RESOLUTION (loc))
9234 val_resolve (set, val, vloc, insn);
9235 else
9236 val_store (set, val, uloc, insn, false);
9238 if (VAL_HOLDS_TRACK_EXPR (loc))
9240 if (GET_CODE (uloc) == REG)
9241 var_reg_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
9242 NULL);
9243 else if (GET_CODE (uloc) == MEM)
9244 var_mem_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
9245 NULL);
9248 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
9250 break;
9252 case MO_VAL_SET:
9254 rtx loc = mo->u.loc;
9255 rtx val, vloc, uloc;
9256 rtx dstv, srcv;
9258 vloc = loc;
9259 uloc = XEXP (vloc, 1);
9260 val = XEXP (vloc, 0);
9261 vloc = uloc;
9263 if (GET_CODE (uloc) == SET)
9265 dstv = SET_DEST (uloc);
9266 srcv = SET_SRC (uloc);
9268 else
9270 dstv = uloc;
9271 srcv = NULL;
9274 if (GET_CODE (val) == CONCAT)
9276 dstv = vloc = XEXP (val, 1);
9277 val = XEXP (val, 0);
9280 if (GET_CODE (vloc) == SET)
9282 srcv = SET_SRC (vloc);
9284 gcc_assert (val != srcv);
9285 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
9287 dstv = vloc = SET_DEST (vloc);
9289 if (VAL_NEEDS_RESOLUTION (loc))
9290 val_resolve (set, val, srcv, insn);
9292 else if (VAL_NEEDS_RESOLUTION (loc))
9294 gcc_assert (GET_CODE (uloc) == SET
9295 && GET_CODE (SET_SRC (uloc)) == REG);
9296 val_resolve (set, val, SET_SRC (uloc), insn);
9299 if (VAL_HOLDS_TRACK_EXPR (loc))
9301 if (VAL_EXPR_IS_CLOBBERED (loc))
9303 if (REG_P (uloc))
9304 var_reg_delete (set, uloc, true);
9305 else if (MEM_P (uloc))
9307 gcc_assert (MEM_P (dstv));
9308 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
9309 var_mem_delete (set, dstv, true);
9312 else
9314 bool copied_p = VAL_EXPR_IS_COPIED (loc);
9315 rtx src = NULL, dst = uloc;
9316 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
9318 if (GET_CODE (uloc) == SET)
9320 src = SET_SRC (uloc);
9321 dst = SET_DEST (uloc);
9324 if (copied_p)
9326 status = find_src_status (set, src);
9328 src = find_src_set_src (set, src);
9331 if (REG_P (dst))
9332 var_reg_delete_and_set (set, dst, !copied_p,
9333 status, srcv);
9334 else if (MEM_P (dst))
9336 gcc_assert (MEM_P (dstv));
9337 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
9338 var_mem_delete_and_set (set, dstv, !copied_p,
9339 status, srcv);
9343 else if (REG_P (uloc))
9344 var_regno_delete (set, REGNO (uloc));
9345 else if (MEM_P (uloc))
9347 gcc_checking_assert (GET_CODE (vloc) == MEM);
9348 gcc_checking_assert (vloc == dstv);
9349 if (vloc != dstv)
9350 clobber_overlapping_mems (set, vloc);
9353 val_store (set, val, dstv, insn, true);
9355 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9356 set->vars);
9358 break;
9360 case MO_SET:
9362 rtx loc = mo->u.loc;
9363 rtx set_src = NULL;
9365 if (GET_CODE (loc) == SET)
9367 set_src = SET_SRC (loc);
9368 loc = SET_DEST (loc);
9371 if (REG_P (loc))
9372 var_reg_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
9373 set_src);
9374 else
9375 var_mem_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
9376 set_src);
9378 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9379 set->vars);
9381 break;
9383 case MO_COPY:
9385 rtx loc = mo->u.loc;
9386 enum var_init_status src_status;
9387 rtx set_src = NULL;
9389 if (GET_CODE (loc) == SET)
9391 set_src = SET_SRC (loc);
9392 loc = SET_DEST (loc);
9395 src_status = find_src_status (set, set_src);
9396 set_src = find_src_set_src (set, set_src);
9398 if (REG_P (loc))
9399 var_reg_delete_and_set (set, loc, false, src_status, set_src);
9400 else
9401 var_mem_delete_and_set (set, loc, false, src_status, set_src);
9403 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9404 set->vars);
9406 break;
9408 case MO_USE_NO_VAR:
9410 rtx loc = mo->u.loc;
9412 if (REG_P (loc))
9413 var_reg_delete (set, loc, false);
9414 else
9415 var_mem_delete (set, loc, false);
9417 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
9419 break;
9421 case MO_CLOBBER:
9423 rtx loc = mo->u.loc;
9425 if (REG_P (loc))
9426 var_reg_delete (set, loc, true);
9427 else
9428 var_mem_delete (set, loc, true);
9430 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9431 set->vars);
9433 break;
9435 case MO_ADJUST:
9436 set->stack_adjust += mo->u.adjust;
9437 break;
9442 /* Emit notes for the whole function. */
9444 static void
9445 vt_emit_notes (void)
9447 basic_block bb;
9448 dataflow_set cur;
9450 gcc_assert (!changed_variables->elements ());
9452 /* Free memory occupied by the out hash tables, as they aren't used
9453 anymore. */
9454 FOR_EACH_BB_FN (bb, cfun)
9455 dataflow_set_clear (&VTI (bb)->out);
9457 /* Enable emitting notes by functions (mainly by set_variable_part and
9458 delete_variable_part). */
9459 emit_notes = true;
9461 if (MAY_HAVE_DEBUG_INSNS)
9463 dropped_values = new variable_table_type (cselib_get_next_uid () * 2);
9464 loc_exp_dep_pool = create_alloc_pool ("loc_exp_dep pool",
9465 sizeof (loc_exp_dep), 64);
9468 dataflow_set_init (&cur);
9470 FOR_EACH_BB_FN (bb, cfun)
9472 /* Emit the notes for changes of variable locations between two
9473 subsequent basic blocks. */
9474 emit_notes_for_differences (BB_HEAD (bb), &cur, &VTI (bb)->in);
9476 if (MAY_HAVE_DEBUG_INSNS)
9477 local_get_addr_cache = new hash_map<rtx, rtx>;
9479 /* Emit the notes for the changes in the basic block itself. */
9480 emit_notes_in_bb (bb, &cur);
9482 if (MAY_HAVE_DEBUG_INSNS)
9483 delete local_get_addr_cache;
9484 local_get_addr_cache = NULL;
9486 /* Free memory occupied by the in hash table, we won't need it
9487 again. */
9488 dataflow_set_clear (&VTI (bb)->in);
9490 #ifdef ENABLE_CHECKING
9491 shared_hash_htab (cur.vars)
9492 ->traverse <variable_table_type *, emit_notes_for_differences_1>
9493 (shared_hash_htab (empty_shared_hash));
9494 #endif
9495 dataflow_set_destroy (&cur);
9497 if (MAY_HAVE_DEBUG_INSNS)
9498 delete dropped_values;
9499 dropped_values = NULL;
9501 emit_notes = false;
9504 /* If there is a declaration and offset associated with register/memory RTL
9505 assign declaration to *DECLP and offset to *OFFSETP, and return true. */
9507 static bool
9508 vt_get_decl_and_offset (rtx rtl, tree *declp, HOST_WIDE_INT *offsetp)
9510 if (REG_P (rtl))
9512 if (REG_ATTRS (rtl))
9514 *declp = REG_EXPR (rtl);
9515 *offsetp = REG_OFFSET (rtl);
9516 return true;
9519 else if (GET_CODE (rtl) == PARALLEL)
9521 tree decl = NULL_TREE;
9522 HOST_WIDE_INT offset = MAX_VAR_PARTS;
9523 int len = XVECLEN (rtl, 0), i;
9525 for (i = 0; i < len; i++)
9527 rtx reg = XEXP (XVECEXP (rtl, 0, i), 0);
9528 if (!REG_P (reg) || !REG_ATTRS (reg))
9529 break;
9530 if (!decl)
9531 decl = REG_EXPR (reg);
9532 if (REG_EXPR (reg) != decl)
9533 break;
9534 if (REG_OFFSET (reg) < offset)
9535 offset = REG_OFFSET (reg);
9538 if (i == len)
9540 *declp = decl;
9541 *offsetp = offset;
9542 return true;
9545 else if (MEM_P (rtl))
9547 if (MEM_ATTRS (rtl))
9549 *declp = MEM_EXPR (rtl);
9550 *offsetp = INT_MEM_OFFSET (rtl);
9551 return true;
9554 return false;
9557 /* Record the value for the ENTRY_VALUE of RTL as a global equivalence
9558 of VAL. */
9560 static void
9561 record_entry_value (cselib_val *val, rtx rtl)
9563 rtx ev = gen_rtx_ENTRY_VALUE (GET_MODE (rtl));
9565 ENTRY_VALUE_EXP (ev) = rtl;
9567 cselib_add_permanent_equiv (val, ev, get_insns ());
9570 /* Insert function parameter PARM in IN and OUT sets of ENTRY_BLOCK. */
9572 static void
9573 vt_add_function_parameter (tree parm)
9575 rtx decl_rtl = DECL_RTL_IF_SET (parm);
9576 rtx incoming = DECL_INCOMING_RTL (parm);
9577 tree decl;
9578 machine_mode mode;
9579 HOST_WIDE_INT offset;
9580 dataflow_set *out;
9581 decl_or_value dv;
9583 if (TREE_CODE (parm) != PARM_DECL)
9584 return;
9586 if (!decl_rtl || !incoming)
9587 return;
9589 if (GET_MODE (decl_rtl) == BLKmode || GET_MODE (incoming) == BLKmode)
9590 return;
9592 /* If there is a DRAP register or a pseudo in internal_arg_pointer,
9593 rewrite the incoming location of parameters passed on the stack
9594 into MEMs based on the argument pointer, so that incoming doesn't
9595 depend on a pseudo. */
9596 if (MEM_P (incoming)
9597 && (XEXP (incoming, 0) == crtl->args.internal_arg_pointer
9598 || (GET_CODE (XEXP (incoming, 0)) == PLUS
9599 && XEXP (XEXP (incoming, 0), 0)
9600 == crtl->args.internal_arg_pointer
9601 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
9603 HOST_WIDE_INT off = -FIRST_PARM_OFFSET (current_function_decl);
9604 if (GET_CODE (XEXP (incoming, 0)) == PLUS)
9605 off += INTVAL (XEXP (XEXP (incoming, 0), 1));
9606 incoming
9607 = replace_equiv_address_nv (incoming,
9608 plus_constant (Pmode,
9609 arg_pointer_rtx, off));
9612 #ifdef HAVE_window_save
9613 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
9614 If the target machine has an explicit window save instruction, the
9615 actual entry value is the corresponding OUTGOING_REGNO instead. */
9616 if (HAVE_window_save && !crtl->uses_only_leaf_regs)
9618 if (REG_P (incoming)
9619 && HARD_REGISTER_P (incoming)
9620 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
9622 parm_reg_t p;
9623 p.incoming = incoming;
9624 incoming
9625 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
9626 OUTGOING_REGNO (REGNO (incoming)), 0);
9627 p.outgoing = incoming;
9628 vec_safe_push (windowed_parm_regs, p);
9630 else if (GET_CODE (incoming) == PARALLEL)
9632 rtx outgoing
9633 = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (XVECLEN (incoming, 0)));
9634 int i;
9636 for (i = 0; i < XVECLEN (incoming, 0); i++)
9638 rtx reg = XEXP (XVECEXP (incoming, 0, i), 0);
9639 parm_reg_t p;
9640 p.incoming = reg;
9641 reg = gen_rtx_REG_offset (reg, GET_MODE (reg),
9642 OUTGOING_REGNO (REGNO (reg)), 0);
9643 p.outgoing = reg;
9644 XVECEXP (outgoing, 0, i)
9645 = gen_rtx_EXPR_LIST (VOIDmode, reg,
9646 XEXP (XVECEXP (incoming, 0, i), 1));
9647 vec_safe_push (windowed_parm_regs, p);
9650 incoming = outgoing;
9652 else if (MEM_P (incoming)
9653 && REG_P (XEXP (incoming, 0))
9654 && HARD_REGISTER_P (XEXP (incoming, 0)))
9656 rtx reg = XEXP (incoming, 0);
9657 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
9659 parm_reg_t p;
9660 p.incoming = reg;
9661 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
9662 p.outgoing = reg;
9663 vec_safe_push (windowed_parm_regs, p);
9664 incoming = replace_equiv_address_nv (incoming, reg);
9668 #endif
9670 if (!vt_get_decl_and_offset (incoming, &decl, &offset))
9672 if (MEM_P (incoming))
9674 /* This means argument is passed by invisible reference. */
9675 offset = 0;
9676 decl = parm;
9678 else
9680 if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset))
9681 return;
9682 offset += byte_lowpart_offset (GET_MODE (incoming),
9683 GET_MODE (decl_rtl));
9687 if (!decl)
9688 return;
9690 if (parm != decl)
9692 /* If that DECL_RTL wasn't a pseudo that got spilled to
9693 memory, bail out. Otherwise, the spill slot sharing code
9694 will force the memory to reference spill_slot_decl (%sfp),
9695 so we don't match above. That's ok, the pseudo must have
9696 referenced the entire parameter, so just reset OFFSET. */
9697 if (decl != get_spill_slot_decl (false))
9698 return;
9699 offset = 0;
9702 if (!track_loc_p (incoming, parm, offset, false, &mode, &offset))
9703 return;
9705 out = &VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out;
9707 dv = dv_from_decl (parm);
9709 if (target_for_debug_bind (parm)
9710 /* We can't deal with these right now, because this kind of
9711 variable is single-part. ??? We could handle parallels
9712 that describe multiple locations for the same single
9713 value, but ATM we don't. */
9714 && GET_CODE (incoming) != PARALLEL)
9716 cselib_val *val;
9717 rtx lowpart;
9719 /* ??? We shouldn't ever hit this, but it may happen because
9720 arguments passed by invisible reference aren't dealt with
9721 above: incoming-rtl will have Pmode rather than the
9722 expected mode for the type. */
9723 if (offset)
9724 return;
9726 lowpart = var_lowpart (mode, incoming);
9727 if (!lowpart)
9728 return;
9730 val = cselib_lookup_from_insn (lowpart, mode, true,
9731 VOIDmode, get_insns ());
9733 /* ??? Float-typed values in memory are not handled by
9734 cselib. */
9735 if (val)
9737 preserve_value (val);
9738 set_variable_part (out, val->val_rtx, dv, offset,
9739 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9740 dv = dv_from_value (val->val_rtx);
9743 if (MEM_P (incoming))
9745 val = cselib_lookup_from_insn (XEXP (incoming, 0), mode, true,
9746 VOIDmode, get_insns ());
9747 if (val)
9749 preserve_value (val);
9750 incoming = replace_equiv_address_nv (incoming, val->val_rtx);
9755 if (REG_P (incoming))
9757 incoming = var_lowpart (mode, incoming);
9758 gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER);
9759 attrs_list_insert (&out->regs[REGNO (incoming)], dv, offset,
9760 incoming);
9761 set_variable_part (out, incoming, dv, offset,
9762 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9763 if (dv_is_value_p (dv))
9765 record_entry_value (CSELIB_VAL_PTR (dv_as_value (dv)), incoming);
9766 if (TREE_CODE (TREE_TYPE (parm)) == REFERENCE_TYPE
9767 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (parm))))
9769 machine_mode indmode
9770 = TYPE_MODE (TREE_TYPE (TREE_TYPE (parm)));
9771 rtx mem = gen_rtx_MEM (indmode, incoming);
9772 cselib_val *val = cselib_lookup_from_insn (mem, indmode, true,
9773 VOIDmode,
9774 get_insns ());
9775 if (val)
9777 preserve_value (val);
9778 record_entry_value (val, mem);
9779 set_variable_part (out, mem, dv_from_value (val->val_rtx), 0,
9780 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9785 else if (GET_CODE (incoming) == PARALLEL && !dv_onepart_p (dv))
9787 int i;
9789 for (i = 0; i < XVECLEN (incoming, 0); i++)
9791 rtx reg = XEXP (XVECEXP (incoming, 0, i), 0);
9792 offset = REG_OFFSET (reg);
9793 gcc_assert (REGNO (reg) < FIRST_PSEUDO_REGISTER);
9794 attrs_list_insert (&out->regs[REGNO (reg)], dv, offset, reg);
9795 set_variable_part (out, reg, dv, offset,
9796 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9799 else if (MEM_P (incoming))
9801 incoming = var_lowpart (mode, incoming);
9802 set_variable_part (out, incoming, dv, offset,
9803 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9807 /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
9809 static void
9810 vt_add_function_parameters (void)
9812 tree parm;
9814 for (parm = DECL_ARGUMENTS (current_function_decl);
9815 parm; parm = DECL_CHAIN (parm))
9816 vt_add_function_parameter (parm);
9818 if (DECL_HAS_VALUE_EXPR_P (DECL_RESULT (current_function_decl)))
9820 tree vexpr = DECL_VALUE_EXPR (DECL_RESULT (current_function_decl));
9822 if (TREE_CODE (vexpr) == INDIRECT_REF)
9823 vexpr = TREE_OPERAND (vexpr, 0);
9825 if (TREE_CODE (vexpr) == PARM_DECL
9826 && DECL_ARTIFICIAL (vexpr)
9827 && !DECL_IGNORED_P (vexpr)
9828 && DECL_NAMELESS (vexpr))
9829 vt_add_function_parameter (vexpr);
9833 /* Initialize cfa_base_rtx, create a preserved VALUE for it and
9834 ensure it isn't flushed during cselib_reset_table.
9835 Can be called only if frame_pointer_rtx resp. arg_pointer_rtx
9836 has been eliminated. */
9838 static void
9839 vt_init_cfa_base (void)
9841 cselib_val *val;
9843 #ifdef FRAME_POINTER_CFA_OFFSET
9844 cfa_base_rtx = frame_pointer_rtx;
9845 cfa_base_offset = -FRAME_POINTER_CFA_OFFSET (current_function_decl);
9846 #else
9847 cfa_base_rtx = arg_pointer_rtx;
9848 cfa_base_offset = -ARG_POINTER_CFA_OFFSET (current_function_decl);
9849 #endif
9850 if (cfa_base_rtx == hard_frame_pointer_rtx
9851 || !fixed_regs[REGNO (cfa_base_rtx)])
9853 cfa_base_rtx = NULL_RTX;
9854 return;
9856 if (!MAY_HAVE_DEBUG_INSNS)
9857 return;
9859 /* Tell alias analysis that cfa_base_rtx should share
9860 find_base_term value with stack pointer or hard frame pointer. */
9861 if (!frame_pointer_needed)
9862 vt_equate_reg_base_value (cfa_base_rtx, stack_pointer_rtx);
9863 else if (!crtl->stack_realign_tried)
9864 vt_equate_reg_base_value (cfa_base_rtx, hard_frame_pointer_rtx);
9866 val = cselib_lookup_from_insn (cfa_base_rtx, GET_MODE (cfa_base_rtx), 1,
9867 VOIDmode, get_insns ());
9868 preserve_value (val);
9869 cselib_preserve_cfa_base_value (val, REGNO (cfa_base_rtx));
9872 /* Allocate and initialize the data structures for variable tracking
9873 and parse the RTL to get the micro operations. */
9875 static bool
9876 vt_initialize (void)
9878 basic_block bb;
9879 HOST_WIDE_INT fp_cfa_offset = -1;
9881 alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def));
9883 attrs_pool = create_alloc_pool ("attrs_def pool",
9884 sizeof (struct attrs_def), 1024);
9885 var_pool = create_alloc_pool ("variable_def pool",
9886 sizeof (struct variable_def)
9887 + (MAX_VAR_PARTS - 1)
9888 * sizeof (((variable)NULL)->var_part[0]), 64);
9889 loc_chain_pool = create_alloc_pool ("location_chain_def pool",
9890 sizeof (struct location_chain_def),
9891 1024);
9892 shared_hash_pool = create_alloc_pool ("shared_hash_def pool",
9893 sizeof (struct shared_hash_def), 256);
9894 empty_shared_hash = (shared_hash) pool_alloc (shared_hash_pool);
9895 empty_shared_hash->refcount = 1;
9896 empty_shared_hash->htab = new variable_table_type (1);
9897 changed_variables = new variable_table_type (10);
9899 /* Init the IN and OUT sets. */
9900 FOR_ALL_BB_FN (bb, cfun)
9902 VTI (bb)->visited = false;
9903 VTI (bb)->flooded = false;
9904 dataflow_set_init (&VTI (bb)->in);
9905 dataflow_set_init (&VTI (bb)->out);
9906 VTI (bb)->permp = NULL;
9909 if (MAY_HAVE_DEBUG_INSNS)
9911 cselib_init (CSELIB_RECORD_MEMORY | CSELIB_PRESERVE_CONSTANTS);
9912 scratch_regs = BITMAP_ALLOC (NULL);
9913 valvar_pool = create_alloc_pool ("small variable_def pool",
9914 sizeof (struct variable_def), 256);
9915 preserved_values.create (256);
9916 global_get_addr_cache = new hash_map<rtx, rtx>;
9918 else
9920 scratch_regs = NULL;
9921 valvar_pool = NULL;
9922 global_get_addr_cache = NULL;
9925 if (MAY_HAVE_DEBUG_INSNS)
9927 rtx reg, expr;
9928 int ofst;
9929 cselib_val *val;
9931 #ifdef FRAME_POINTER_CFA_OFFSET
9932 reg = frame_pointer_rtx;
9933 ofst = FRAME_POINTER_CFA_OFFSET (current_function_decl);
9934 #else
9935 reg = arg_pointer_rtx;
9936 ofst = ARG_POINTER_CFA_OFFSET (current_function_decl);
9937 #endif
9939 ofst -= INCOMING_FRAME_SP_OFFSET;
9941 val = cselib_lookup_from_insn (reg, GET_MODE (reg), 1,
9942 VOIDmode, get_insns ());
9943 preserve_value (val);
9944 if (reg != hard_frame_pointer_rtx && fixed_regs[REGNO (reg)])
9945 cselib_preserve_cfa_base_value (val, REGNO (reg));
9946 expr = plus_constant (GET_MODE (stack_pointer_rtx),
9947 stack_pointer_rtx, -ofst);
9948 cselib_add_permanent_equiv (val, expr, get_insns ());
9950 if (ofst)
9952 val = cselib_lookup_from_insn (stack_pointer_rtx,
9953 GET_MODE (stack_pointer_rtx), 1,
9954 VOIDmode, get_insns ());
9955 preserve_value (val);
9956 expr = plus_constant (GET_MODE (reg), reg, ofst);
9957 cselib_add_permanent_equiv (val, expr, get_insns ());
9961 /* In order to factor out the adjustments made to the stack pointer or to
9962 the hard frame pointer and thus be able to use DW_OP_fbreg operations
9963 instead of individual location lists, we're going to rewrite MEMs based
9964 on them into MEMs based on the CFA by de-eliminating stack_pointer_rtx
9965 or hard_frame_pointer_rtx to the virtual CFA pointer frame_pointer_rtx
9966 resp. arg_pointer_rtx. We can do this either when there is no frame
9967 pointer in the function and stack adjustments are consistent for all
9968 basic blocks or when there is a frame pointer and no stack realignment.
9969 But we first have to check that frame_pointer_rtx resp. arg_pointer_rtx
9970 has been eliminated. */
9971 if (!frame_pointer_needed)
9973 rtx reg, elim;
9975 if (!vt_stack_adjustments ())
9976 return false;
9978 #ifdef FRAME_POINTER_CFA_OFFSET
9979 reg = frame_pointer_rtx;
9980 #else
9981 reg = arg_pointer_rtx;
9982 #endif
9983 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
9984 if (elim != reg)
9986 if (GET_CODE (elim) == PLUS)
9987 elim = XEXP (elim, 0);
9988 if (elim == stack_pointer_rtx)
9989 vt_init_cfa_base ();
9992 else if (!crtl->stack_realign_tried)
9994 rtx reg, elim;
9996 #ifdef FRAME_POINTER_CFA_OFFSET
9997 reg = frame_pointer_rtx;
9998 fp_cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
9999 #else
10000 reg = arg_pointer_rtx;
10001 fp_cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
10002 #endif
10003 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
10004 if (elim != reg)
10006 if (GET_CODE (elim) == PLUS)
10008 fp_cfa_offset -= INTVAL (XEXP (elim, 1));
10009 elim = XEXP (elim, 0);
10011 if (elim != hard_frame_pointer_rtx)
10012 fp_cfa_offset = -1;
10014 else
10015 fp_cfa_offset = -1;
10018 /* If the stack is realigned and a DRAP register is used, we're going to
10019 rewrite MEMs based on it representing incoming locations of parameters
10020 passed on the stack into MEMs based on the argument pointer. Although
10021 we aren't going to rewrite other MEMs, we still need to initialize the
10022 virtual CFA pointer in order to ensure that the argument pointer will
10023 be seen as a constant throughout the function.
10025 ??? This doesn't work if FRAME_POINTER_CFA_OFFSET is defined. */
10026 else if (stack_realign_drap)
10028 rtx reg, elim;
10030 #ifdef FRAME_POINTER_CFA_OFFSET
10031 reg = frame_pointer_rtx;
10032 #else
10033 reg = arg_pointer_rtx;
10034 #endif
10035 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
10036 if (elim != reg)
10038 if (GET_CODE (elim) == PLUS)
10039 elim = XEXP (elim, 0);
10040 if (elim == hard_frame_pointer_rtx)
10041 vt_init_cfa_base ();
10045 hard_frame_pointer_adjustment = -1;
10047 vt_add_function_parameters ();
10049 FOR_EACH_BB_FN (bb, cfun)
10051 rtx_insn *insn;
10052 HOST_WIDE_INT pre, post = 0;
10053 basic_block first_bb, last_bb;
10055 if (MAY_HAVE_DEBUG_INSNS)
10057 cselib_record_sets_hook = add_with_sets;
10058 if (dump_file && (dump_flags & TDF_DETAILS))
10059 fprintf (dump_file, "first value: %i\n",
10060 cselib_get_next_uid ());
10063 first_bb = bb;
10064 for (;;)
10066 edge e;
10067 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
10068 || ! single_pred_p (bb->next_bb))
10069 break;
10070 e = find_edge (bb, bb->next_bb);
10071 if (! e || (e->flags & EDGE_FALLTHRU) == 0)
10072 break;
10073 bb = bb->next_bb;
10075 last_bb = bb;
10077 /* Add the micro-operations to the vector. */
10078 FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb)
10080 HOST_WIDE_INT offset = VTI (bb)->out.stack_adjust;
10081 VTI (bb)->out.stack_adjust = VTI (bb)->in.stack_adjust;
10082 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
10083 insn = NEXT_INSN (insn))
10085 if (INSN_P (insn))
10087 if (!frame_pointer_needed)
10089 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
10090 if (pre)
10092 micro_operation mo;
10093 mo.type = MO_ADJUST;
10094 mo.u.adjust = pre;
10095 mo.insn = insn;
10096 if (dump_file && (dump_flags & TDF_DETAILS))
10097 log_op_type (PATTERN (insn), bb, insn,
10098 MO_ADJUST, dump_file);
10099 VTI (bb)->mos.safe_push (mo);
10100 VTI (bb)->out.stack_adjust += pre;
10104 cselib_hook_called = false;
10105 adjust_insn (bb, insn);
10106 if (MAY_HAVE_DEBUG_INSNS)
10108 if (CALL_P (insn))
10109 prepare_call_arguments (bb, insn);
10110 cselib_process_insn (insn);
10111 if (dump_file && (dump_flags & TDF_DETAILS))
10113 print_rtl_single (dump_file, insn);
10114 dump_cselib_table (dump_file);
10117 if (!cselib_hook_called)
10118 add_with_sets (insn, 0, 0);
10119 cancel_changes (0);
10121 if (!frame_pointer_needed && post)
10123 micro_operation mo;
10124 mo.type = MO_ADJUST;
10125 mo.u.adjust = post;
10126 mo.insn = insn;
10127 if (dump_file && (dump_flags & TDF_DETAILS))
10128 log_op_type (PATTERN (insn), bb, insn,
10129 MO_ADJUST, dump_file);
10130 VTI (bb)->mos.safe_push (mo);
10131 VTI (bb)->out.stack_adjust += post;
10134 if (fp_cfa_offset != -1
10135 && hard_frame_pointer_adjustment == -1
10136 && fp_setter_insn (insn))
10138 vt_init_cfa_base ();
10139 hard_frame_pointer_adjustment = fp_cfa_offset;
10140 /* Disassociate sp from fp now. */
10141 if (MAY_HAVE_DEBUG_INSNS)
10143 cselib_val *v;
10144 cselib_invalidate_rtx (stack_pointer_rtx);
10145 v = cselib_lookup (stack_pointer_rtx, Pmode, 1,
10146 VOIDmode);
10147 if (v && !cselib_preserved_value_p (v))
10149 cselib_set_value_sp_based (v);
10150 preserve_value (v);
10156 gcc_assert (offset == VTI (bb)->out.stack_adjust);
10159 bb = last_bb;
10161 if (MAY_HAVE_DEBUG_INSNS)
10163 cselib_preserve_only_values ();
10164 cselib_reset_table (cselib_get_next_uid ());
10165 cselib_record_sets_hook = NULL;
10169 hard_frame_pointer_adjustment = -1;
10170 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->flooded = true;
10171 cfa_base_rtx = NULL_RTX;
10172 return true;
10175 /* This is *not* reset after each function. It gives each
10176 NOTE_INSN_DELETED_DEBUG_LABEL in the entire compilation
10177 a unique label number. */
10179 static int debug_label_num = 1;
10181 /* Get rid of all debug insns from the insn stream. */
10183 static void
10184 delete_debug_insns (void)
10186 basic_block bb;
10187 rtx_insn *insn, *next;
10189 if (!MAY_HAVE_DEBUG_INSNS)
10190 return;
10192 FOR_EACH_BB_FN (bb, cfun)
10194 FOR_BB_INSNS_SAFE (bb, insn, next)
10195 if (DEBUG_INSN_P (insn))
10197 tree decl = INSN_VAR_LOCATION_DECL (insn);
10198 if (TREE_CODE (decl) == LABEL_DECL
10199 && DECL_NAME (decl)
10200 && !DECL_RTL_SET_P (decl))
10202 PUT_CODE (insn, NOTE);
10203 NOTE_KIND (insn) = NOTE_INSN_DELETED_DEBUG_LABEL;
10204 NOTE_DELETED_LABEL_NAME (insn)
10205 = IDENTIFIER_POINTER (DECL_NAME (decl));
10206 SET_DECL_RTL (decl, insn);
10207 CODE_LABEL_NUMBER (insn) = debug_label_num++;
10209 else
10210 delete_insn (insn);
10215 /* Run a fast, BB-local only version of var tracking, to take care of
10216 information that we don't do global analysis on, such that not all
10217 information is lost. If SKIPPED holds, we're skipping the global
10218 pass entirely, so we should try to use information it would have
10219 handled as well.. */
10221 static void
10222 vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED)
10224 /* ??? Just skip it all for now. */
10225 delete_debug_insns ();
10228 /* Free the data structures needed for variable tracking. */
10230 static void
10231 vt_finalize (void)
10233 basic_block bb;
10235 FOR_EACH_BB_FN (bb, cfun)
10237 VTI (bb)->mos.release ();
10240 FOR_ALL_BB_FN (bb, cfun)
10242 dataflow_set_destroy (&VTI (bb)->in);
10243 dataflow_set_destroy (&VTI (bb)->out);
10244 if (VTI (bb)->permp)
10246 dataflow_set_destroy (VTI (bb)->permp);
10247 XDELETE (VTI (bb)->permp);
10250 free_aux_for_blocks ();
10251 delete empty_shared_hash->htab;
10252 empty_shared_hash->htab = NULL;
10253 delete changed_variables;
10254 changed_variables = NULL;
10255 free_alloc_pool (attrs_pool);
10256 free_alloc_pool (var_pool);
10257 free_alloc_pool (loc_chain_pool);
10258 free_alloc_pool (shared_hash_pool);
10260 if (MAY_HAVE_DEBUG_INSNS)
10262 if (global_get_addr_cache)
10263 delete global_get_addr_cache;
10264 global_get_addr_cache = NULL;
10265 if (loc_exp_dep_pool)
10266 free_alloc_pool (loc_exp_dep_pool);
10267 loc_exp_dep_pool = NULL;
10268 free_alloc_pool (valvar_pool);
10269 preserved_values.release ();
10270 cselib_finish ();
10271 BITMAP_FREE (scratch_regs);
10272 scratch_regs = NULL;
10275 #ifdef HAVE_window_save
10276 vec_free (windowed_parm_regs);
10277 #endif
10279 if (vui_vec)
10280 XDELETEVEC (vui_vec);
10281 vui_vec = NULL;
10282 vui_allocated = 0;
10285 /* The entry point to variable tracking pass. */
10287 static inline unsigned int
10288 variable_tracking_main_1 (void)
10290 bool success;
10292 if (flag_var_tracking_assignments < 0)
10294 delete_debug_insns ();
10295 return 0;
10298 if (n_basic_blocks_for_fn (cfun) > 500 &&
10299 n_edges_for_fn (cfun) / n_basic_blocks_for_fn (cfun) >= 20)
10301 vt_debug_insns_local (true);
10302 return 0;
10305 mark_dfs_back_edges ();
10306 if (!vt_initialize ())
10308 vt_finalize ();
10309 vt_debug_insns_local (true);
10310 return 0;
10313 success = vt_find_locations ();
10315 if (!success && flag_var_tracking_assignments > 0)
10317 vt_finalize ();
10319 delete_debug_insns ();
10321 /* This is later restored by our caller. */
10322 flag_var_tracking_assignments = 0;
10324 success = vt_initialize ();
10325 gcc_assert (success);
10327 success = vt_find_locations ();
10330 if (!success)
10332 vt_finalize ();
10333 vt_debug_insns_local (false);
10334 return 0;
10337 if (dump_file && (dump_flags & TDF_DETAILS))
10339 dump_dataflow_sets ();
10340 dump_reg_info (dump_file);
10341 dump_flow_info (dump_file, dump_flags);
10344 timevar_push (TV_VAR_TRACKING_EMIT);
10345 vt_emit_notes ();
10346 timevar_pop (TV_VAR_TRACKING_EMIT);
10348 vt_finalize ();
10349 vt_debug_insns_local (false);
10350 return 0;
10353 unsigned int
10354 variable_tracking_main (void)
10356 unsigned int ret;
10357 int save = flag_var_tracking_assignments;
10359 ret = variable_tracking_main_1 ();
10361 flag_var_tracking_assignments = save;
10363 return ret;
10366 namespace {
10368 const pass_data pass_data_variable_tracking =
10370 RTL_PASS, /* type */
10371 "vartrack", /* name */
10372 OPTGROUP_NONE, /* optinfo_flags */
10373 TV_VAR_TRACKING, /* tv_id */
10374 0, /* properties_required */
10375 0, /* properties_provided */
10376 0, /* properties_destroyed */
10377 0, /* todo_flags_start */
10378 0, /* todo_flags_finish */
10381 class pass_variable_tracking : public rtl_opt_pass
10383 public:
10384 pass_variable_tracking (gcc::context *ctxt)
10385 : rtl_opt_pass (pass_data_variable_tracking, ctxt)
10388 /* opt_pass methods: */
10389 virtual bool gate (function *)
10391 return (flag_var_tracking && !targetm.delay_vartrack);
10394 virtual unsigned int execute (function *)
10396 return variable_tracking_main ();
10399 }; // class pass_variable_tracking
10401 } // anon namespace
10403 rtl_opt_pass *
10404 make_pass_variable_tracking (gcc::context *ctxt)
10406 return new pass_variable_tracking (ctxt);