* config.gcc: Remove MASK_JUMP_IN_DELAY from target_cpu_default2.
[official-gcc.git] / gcc / var-tracking.c
blob52b7344458086b40bfa6c468b460e7f8e320eb72
1 /* Variable tracking routines for the GNU compiler.
2 Copyright (C) 2002-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the variable tracking pass. It computes where
21 variables are located (which registers or where in memory) at each position
22 in instruction stream and emits notes describing the locations.
23 Debug information (DWARF2 location lists) is finally generated from
24 these notes.
25 With this debug information, it is possible to show variables
26 even when debugging optimized code.
28 How does the variable tracking pass work?
30 First, it scans RTL code for uses, stores and clobbers (register/memory
31 references in instructions), for call insns and for stack adjustments
32 separately for each basic block and saves them to an array of micro
33 operations.
34 The micro operations of one instruction are ordered so that
35 pre-modifying stack adjustment < use < use with no var < call insn <
36 < clobber < set < post-modifying stack adjustment
38 Then, a forward dataflow analysis is performed to find out how locations
39 of variables change through code and to propagate the variable locations
40 along control flow graph.
41 The IN set for basic block BB is computed as a union of OUT sets of BB's
42 predecessors, the OUT set for BB is copied from the IN set for BB and
43 is changed according to micro operations in BB.
45 The IN and OUT sets for basic blocks consist of a current stack adjustment
46 (used for adjusting offset of variables addressed using stack pointer),
47 the table of structures describing the locations of parts of a variable
48 and for each physical register a linked list for each physical register.
49 The linked list is a list of variable parts stored in the register,
50 i.e. it is a list of triplets (reg, decl, offset) where decl is
51 REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
52 effective deleting appropriate variable parts when we set or clobber the
53 register.
55 There may be more than one variable part in a register. The linked lists
56 should be pretty short so it is a good data structure here.
57 For example in the following code, register allocator may assign same
58 register to variables A and B, and both of them are stored in the same
59 register in CODE:
61 if (cond)
62 set A;
63 else
64 set B;
65 CODE;
66 if (cond)
67 use A;
68 else
69 use B;
71 Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
72 are emitted to appropriate positions in RTL code. Each such a note describes
73 the location of one variable at the point in instruction stream where the
74 note is. There is no need to emit a note for each variable before each
75 instruction, we only emit these notes where the location of variable changes
76 (this means that we also emit notes for changes between the OUT set of the
77 previous block and the IN set of the current block).
79 The notes consist of two parts:
80 1. the declaration (from REG_EXPR or MEM_EXPR)
81 2. the location of a variable - it is either a simple register/memory
82 reference (for simple variables, for example int),
83 or a parallel of register/memory references (for a large variables
84 which consist of several parts, for example long long).
88 #include "config.h"
89 #include "system.h"
90 #include "coretypes.h"
91 #include "tm.h"
92 #include "rtl.h"
93 #include "tree.h"
94 #include "varasm.h"
95 #include "stor-layout.h"
96 #include "hash-map.h"
97 #include "hash-table.h"
98 #include "basic-block.h"
99 #include "tm_p.h"
100 #include "hard-reg-set.h"
101 #include "flags.h"
102 #include "insn-config.h"
103 #include "reload.h"
104 #include "sbitmap.h"
105 #include "alloc-pool.h"
106 #include "fibheap.h"
107 #include "regs.h"
108 #include "expr.h"
109 #include "tree-pass.h"
110 #include "bitmap.h"
111 #include "tree-dfa.h"
112 #include "tree-ssa.h"
113 #include "cselib.h"
114 #include "target.h"
115 #include "params.h"
116 #include "diagnostic.h"
117 #include "tree-pretty-print.h"
118 #include "recog.h"
119 #include "tm_p.h"
120 #include "alias.h"
121 #include "rtl-iter.h"
123 /* var-tracking.c assumes that tree code with the same value as VALUE rtx code
124 has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
125 Currently the value is the same as IDENTIFIER_NODE, which has such
126 a property. If this compile time assertion ever fails, make sure that
127 the new tree code that equals (int) VALUE has the same property. */
128 extern char check_value_val[(int) VALUE == (int) IDENTIFIER_NODE ? 1 : -1];
130 /* Type of micro operation. */
131 enum micro_operation_type
133 MO_USE, /* Use location (REG or MEM). */
134 MO_USE_NO_VAR,/* Use location which is not associated with a variable
135 or the variable is not trackable. */
136 MO_VAL_USE, /* Use location which is associated with a value. */
137 MO_VAL_LOC, /* Use location which appears in a debug insn. */
138 MO_VAL_SET, /* Set location associated with a value. */
139 MO_SET, /* Set location. */
140 MO_COPY, /* Copy the same portion of a variable from one
141 location to another. */
142 MO_CLOBBER, /* Clobber location. */
143 MO_CALL, /* Call insn. */
144 MO_ADJUST /* Adjust stack pointer. */
148 static const char * const ATTRIBUTE_UNUSED
149 micro_operation_type_name[] = {
150 "MO_USE",
151 "MO_USE_NO_VAR",
152 "MO_VAL_USE",
153 "MO_VAL_LOC",
154 "MO_VAL_SET",
155 "MO_SET",
156 "MO_COPY",
157 "MO_CLOBBER",
158 "MO_CALL",
159 "MO_ADJUST"
162 /* Where shall the note be emitted? BEFORE or AFTER the instruction.
163 Notes emitted as AFTER_CALL are to take effect during the call,
164 rather than after the call. */
165 enum emit_note_where
167 EMIT_NOTE_BEFORE_INSN,
168 EMIT_NOTE_AFTER_INSN,
169 EMIT_NOTE_AFTER_CALL_INSN
172 /* Structure holding information about micro operation. */
173 typedef struct micro_operation_def
175 /* Type of micro operation. */
176 enum micro_operation_type type;
178 /* The instruction which the micro operation is in, for MO_USE,
179 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
180 instruction or note in the original flow (before any var-tracking
181 notes are inserted, to simplify emission of notes), for MO_SET
182 and MO_CLOBBER. */
183 rtx_insn *insn;
185 union {
186 /* Location. For MO_SET and MO_COPY, this is the SET that
187 performs the assignment, if known, otherwise it is the target
188 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
189 CONCAT of the VALUE and the LOC associated with it. For
190 MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
191 associated with it. */
192 rtx loc;
194 /* Stack adjustment. */
195 HOST_WIDE_INT adjust;
196 } u;
197 } micro_operation;
200 /* A declaration of a variable, or an RTL value being handled like a
201 declaration. */
202 typedef void *decl_or_value;
204 /* Return true if a decl_or_value DV is a DECL or NULL. */
205 static inline bool
206 dv_is_decl_p (decl_or_value dv)
208 return !dv || (int) TREE_CODE ((tree) dv) != (int) VALUE;
211 /* Return true if a decl_or_value is a VALUE rtl. */
212 static inline bool
213 dv_is_value_p (decl_or_value dv)
215 return dv && !dv_is_decl_p (dv);
218 /* Return the decl in the decl_or_value. */
219 static inline tree
220 dv_as_decl (decl_or_value dv)
222 gcc_checking_assert (dv_is_decl_p (dv));
223 return (tree) dv;
226 /* Return the value in the decl_or_value. */
227 static inline rtx
228 dv_as_value (decl_or_value dv)
230 gcc_checking_assert (dv_is_value_p (dv));
231 return (rtx)dv;
234 /* Return the opaque pointer in the decl_or_value. */
235 static inline void *
236 dv_as_opaque (decl_or_value dv)
238 return dv;
242 /* Description of location of a part of a variable. The content of a physical
243 register is described by a chain of these structures.
244 The chains are pretty short (usually 1 or 2 elements) and thus
245 chain is the best data structure. */
246 typedef struct attrs_def
248 /* Pointer to next member of the list. */
249 struct attrs_def *next;
251 /* The rtx of register. */
252 rtx loc;
254 /* The declaration corresponding to LOC. */
255 decl_or_value dv;
257 /* Offset from start of DECL. */
258 HOST_WIDE_INT offset;
259 } *attrs;
261 /* Structure for chaining the locations. */
262 typedef struct location_chain_def
264 /* Next element in the chain. */
265 struct location_chain_def *next;
267 /* The location (REG, MEM or VALUE). */
268 rtx loc;
270 /* The "value" stored in this location. */
271 rtx set_src;
273 /* Initialized? */
274 enum var_init_status init;
275 } *location_chain;
277 /* A vector of loc_exp_dep holds the active dependencies of a one-part
278 DV on VALUEs, i.e., the VALUEs expanded so as to form the current
279 location of DV. Each entry is also part of VALUE' s linked-list of
280 backlinks back to DV. */
281 typedef struct loc_exp_dep_s
283 /* The dependent DV. */
284 decl_or_value dv;
285 /* The dependency VALUE or DECL_DEBUG. */
286 rtx value;
287 /* The next entry in VALUE's backlinks list. */
288 struct loc_exp_dep_s *next;
289 /* A pointer to the pointer to this entry (head or prev's next) in
290 the doubly-linked list. */
291 struct loc_exp_dep_s **pprev;
292 } loc_exp_dep;
295 /* This data structure holds information about the depth of a variable
296 expansion. */
297 typedef struct expand_depth_struct
299 /* This measures the complexity of the expanded expression. It
300 grows by one for each level of expansion that adds more than one
301 operand. */
302 int complexity;
303 /* This counts the number of ENTRY_VALUE expressions in an
304 expansion. We want to minimize their use. */
305 int entryvals;
306 } expand_depth;
308 /* This data structure is allocated for one-part variables at the time
309 of emitting notes. */
310 struct onepart_aux
312 /* Doubly-linked list of dependent DVs. These are DVs whose cur_loc
313 computation used the expansion of this variable, and that ought
314 to be notified should this variable change. If the DV's cur_loc
315 expanded to NULL, all components of the loc list are regarded as
316 active, so that any changes in them give us a chance to get a
317 location. Otherwise, only components of the loc that expanded to
318 non-NULL are regarded as active dependencies. */
319 loc_exp_dep *backlinks;
320 /* This holds the LOC that was expanded into cur_loc. We need only
321 mark a one-part variable as changed if the FROM loc is removed,
322 or if it has no known location and a loc is added, or if it gets
323 a change notification from any of its active dependencies. */
324 rtx from;
325 /* The depth of the cur_loc expression. */
326 expand_depth depth;
327 /* Dependencies actively used when expand FROM into cur_loc. */
328 vec<loc_exp_dep, va_heap, vl_embed> deps;
331 /* Structure describing one part of variable. */
332 typedef struct variable_part_def
334 /* Chain of locations of the part. */
335 location_chain loc_chain;
337 /* Location which was last emitted to location list. */
338 rtx cur_loc;
340 union variable_aux
342 /* The offset in the variable, if !var->onepart. */
343 HOST_WIDE_INT offset;
345 /* Pointer to auxiliary data, if var->onepart and emit_notes. */
346 struct onepart_aux *onepaux;
347 } aux;
348 } variable_part;
350 /* Maximum number of location parts. */
351 #define MAX_VAR_PARTS 16
353 /* Enumeration type used to discriminate various types of one-part
354 variables. */
355 typedef enum onepart_enum
357 /* Not a one-part variable. */
358 NOT_ONEPART = 0,
359 /* A one-part DECL that is not a DEBUG_EXPR_DECL. */
360 ONEPART_VDECL = 1,
361 /* A DEBUG_EXPR_DECL. */
362 ONEPART_DEXPR = 2,
363 /* A VALUE. */
364 ONEPART_VALUE = 3
365 } onepart_enum_t;
367 /* Structure describing where the variable is located. */
368 typedef struct variable_def
370 /* The declaration of the variable, or an RTL value being handled
371 like a declaration. */
372 decl_or_value dv;
374 /* Reference count. */
375 int refcount;
377 /* Number of variable parts. */
378 char n_var_parts;
380 /* What type of DV this is, according to enum onepart_enum. */
381 ENUM_BITFIELD (onepart_enum) onepart : CHAR_BIT;
383 /* True if this variable_def struct is currently in the
384 changed_variables hash table. */
385 bool in_changed_variables;
387 /* The variable parts. */
388 variable_part var_part[1];
389 } *variable;
390 typedef const struct variable_def *const_variable;
392 /* Pointer to the BB's information specific to variable tracking pass. */
393 #define VTI(BB) ((variable_tracking_info) (BB)->aux)
395 /* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */
396 #define INT_MEM_OFFSET(mem) (MEM_OFFSET_KNOWN_P (mem) ? MEM_OFFSET (mem) : 0)
398 #if ENABLE_CHECKING && (GCC_VERSION >= 2007)
400 /* Access VAR's Ith part's offset, checking that it's not a one-part
401 variable. */
402 #define VAR_PART_OFFSET(var, i) __extension__ \
403 (*({ variable const __v = (var); \
404 gcc_checking_assert (!__v->onepart); \
405 &__v->var_part[(i)].aux.offset; }))
407 /* Access VAR's one-part auxiliary data, checking that it is a
408 one-part variable. */
409 #define VAR_LOC_1PAUX(var) __extension__ \
410 (*({ variable const __v = (var); \
411 gcc_checking_assert (__v->onepart); \
412 &__v->var_part[0].aux.onepaux; }))
414 #else
415 #define VAR_PART_OFFSET(var, i) ((var)->var_part[(i)].aux.offset)
416 #define VAR_LOC_1PAUX(var) ((var)->var_part[0].aux.onepaux)
417 #endif
419 /* These are accessor macros for the one-part auxiliary data. When
420 convenient for users, they're guarded by tests that the data was
421 allocated. */
422 #define VAR_LOC_DEP_LST(var) (VAR_LOC_1PAUX (var) \
423 ? VAR_LOC_1PAUX (var)->backlinks \
424 : NULL)
425 #define VAR_LOC_DEP_LSTP(var) (VAR_LOC_1PAUX (var) \
426 ? &VAR_LOC_1PAUX (var)->backlinks \
427 : NULL)
428 #define VAR_LOC_FROM(var) (VAR_LOC_1PAUX (var)->from)
429 #define VAR_LOC_DEPTH(var) (VAR_LOC_1PAUX (var)->depth)
430 #define VAR_LOC_DEP_VEC(var) (VAR_LOC_1PAUX (var) \
431 ? &VAR_LOC_1PAUX (var)->deps \
432 : NULL)
436 typedef unsigned int dvuid;
438 /* Return the uid of DV. */
440 static inline dvuid
441 dv_uid (decl_or_value dv)
443 if (dv_is_value_p (dv))
444 return CSELIB_VAL_PTR (dv_as_value (dv))->uid;
445 else
446 return DECL_UID (dv_as_decl (dv));
449 /* Compute the hash from the uid. */
451 static inline hashval_t
452 dv_uid2hash (dvuid uid)
454 return uid;
457 /* The hash function for a mask table in a shared_htab chain. */
459 static inline hashval_t
460 dv_htab_hash (decl_or_value dv)
462 return dv_uid2hash (dv_uid (dv));
465 static void variable_htab_free (void *);
467 /* Variable hashtable helpers. */
469 struct variable_hasher
471 typedef variable_def value_type;
472 typedef void compare_type;
473 static inline hashval_t hash (const value_type *);
474 static inline bool equal (const value_type *, const compare_type *);
475 static inline void remove (value_type *);
478 /* The hash function for variable_htab, computes the hash value
479 from the declaration of variable X. */
481 inline hashval_t
482 variable_hasher::hash (const value_type *v)
484 return dv_htab_hash (v->dv);
487 /* Compare the declaration of variable X with declaration Y. */
489 inline bool
490 variable_hasher::equal (const value_type *v, const compare_type *y)
492 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
494 return (dv_as_opaque (v->dv) == dv_as_opaque (dv));
497 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
499 inline void
500 variable_hasher::remove (value_type *var)
502 variable_htab_free (var);
505 typedef hash_table<variable_hasher> variable_table_type;
506 typedef variable_table_type::iterator variable_iterator_type;
508 /* Structure for passing some other parameters to function
509 emit_note_insn_var_location. */
510 typedef struct emit_note_data_def
512 /* The instruction which the note will be emitted before/after. */
513 rtx_insn *insn;
515 /* Where the note will be emitted (before/after insn)? */
516 enum emit_note_where where;
518 /* The variables and values active at this point. */
519 variable_table_type *vars;
520 } emit_note_data;
522 /* Structure holding a refcounted hash table. If refcount > 1,
523 it must be first unshared before modified. */
524 typedef struct shared_hash_def
526 /* Reference count. */
527 int refcount;
529 /* Actual hash table. */
530 variable_table_type *htab;
531 } *shared_hash;
533 /* Structure holding the IN or OUT set for a basic block. */
534 typedef struct dataflow_set_def
536 /* Adjustment of stack offset. */
537 HOST_WIDE_INT stack_adjust;
539 /* Attributes for registers (lists of attrs). */
540 attrs regs[FIRST_PSEUDO_REGISTER];
542 /* Variable locations. */
543 shared_hash vars;
545 /* Vars that is being traversed. */
546 shared_hash traversed_vars;
547 } dataflow_set;
549 /* The structure (one for each basic block) containing the information
550 needed for variable tracking. */
551 typedef struct variable_tracking_info_def
553 /* The vector of micro operations. */
554 vec<micro_operation> mos;
556 /* The IN and OUT set for dataflow analysis. */
557 dataflow_set in;
558 dataflow_set out;
560 /* The permanent-in dataflow set for this block. This is used to
561 hold values for which we had to compute entry values. ??? This
562 should probably be dynamically allocated, to avoid using more
563 memory in non-debug builds. */
564 dataflow_set *permp;
566 /* Has the block been visited in DFS? */
567 bool visited;
569 /* Has the block been flooded in VTA? */
570 bool flooded;
572 } *variable_tracking_info;
574 /* Alloc pool for struct attrs_def. */
575 static alloc_pool attrs_pool;
577 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
578 static alloc_pool var_pool;
580 /* Alloc pool for struct variable_def with a single var_part entry. */
581 static alloc_pool valvar_pool;
583 /* Alloc pool for struct location_chain_def. */
584 static alloc_pool loc_chain_pool;
586 /* Alloc pool for struct shared_hash_def. */
587 static alloc_pool shared_hash_pool;
589 /* Alloc pool for struct loc_exp_dep_s for NOT_ONEPART variables. */
590 static alloc_pool loc_exp_dep_pool;
592 /* Changed variables, notes will be emitted for them. */
593 static variable_table_type *changed_variables;
595 /* Shall notes be emitted? */
596 static bool emit_notes;
598 /* Values whose dynamic location lists have gone empty, but whose
599 cselib location lists are still usable. Use this to hold the
600 current location, the backlinks, etc, during emit_notes. */
601 static variable_table_type *dropped_values;
603 /* Empty shared hashtable. */
604 static shared_hash empty_shared_hash;
606 /* Scratch register bitmap used by cselib_expand_value_rtx. */
607 static bitmap scratch_regs = NULL;
609 #ifdef HAVE_window_save
610 typedef struct GTY(()) parm_reg {
611 rtx outgoing;
612 rtx incoming;
613 } parm_reg_t;
616 /* Vector of windowed parameter registers, if any. */
617 static vec<parm_reg_t, va_gc> *windowed_parm_regs = NULL;
618 #endif
620 /* Variable used to tell whether cselib_process_insn called our hook. */
621 static bool cselib_hook_called;
623 /* Local function prototypes. */
624 static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
625 HOST_WIDE_INT *);
626 static void insn_stack_adjust_offset_pre_post (rtx_insn *, HOST_WIDE_INT *,
627 HOST_WIDE_INT *);
628 static bool vt_stack_adjustments (void);
630 static void init_attrs_list_set (attrs *);
631 static void attrs_list_clear (attrs *);
632 static attrs attrs_list_member (attrs, decl_or_value, HOST_WIDE_INT);
633 static void attrs_list_insert (attrs *, decl_or_value, HOST_WIDE_INT, rtx);
634 static void attrs_list_copy (attrs *, attrs);
635 static void attrs_list_union (attrs *, attrs);
637 static variable_def **unshare_variable (dataflow_set *set, variable_def **slot,
638 variable var, enum var_init_status);
639 static void vars_copy (variable_table_type *, variable_table_type *);
640 static tree var_debug_decl (tree);
641 static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
642 static void var_reg_delete_and_set (dataflow_set *, rtx, bool,
643 enum var_init_status, rtx);
644 static void var_reg_delete (dataflow_set *, rtx, bool);
645 static void var_regno_delete (dataflow_set *, int);
646 static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx);
647 static void var_mem_delete_and_set (dataflow_set *, rtx, bool,
648 enum var_init_status, rtx);
649 static void var_mem_delete (dataflow_set *, rtx, bool);
651 static void dataflow_set_init (dataflow_set *);
652 static void dataflow_set_clear (dataflow_set *);
653 static void dataflow_set_copy (dataflow_set *, dataflow_set *);
654 static int variable_union_info_cmp_pos (const void *, const void *);
655 static void dataflow_set_union (dataflow_set *, dataflow_set *);
656 static location_chain find_loc_in_1pdv (rtx, variable, variable_table_type *);
657 static bool canon_value_cmp (rtx, rtx);
658 static int loc_cmp (rtx, rtx);
659 static bool variable_part_different_p (variable_part *, variable_part *);
660 static bool onepart_variable_different_p (variable, variable);
661 static bool variable_different_p (variable, variable);
662 static bool dataflow_set_different (dataflow_set *, dataflow_set *);
663 static void dataflow_set_destroy (dataflow_set *);
665 static bool contains_symbol_ref (rtx);
666 static bool track_expr_p (tree, bool);
667 static bool same_variable_part_p (rtx, tree, HOST_WIDE_INT);
668 static void add_uses_1 (rtx *, void *);
669 static void add_stores (rtx, const_rtx, void *);
670 static bool compute_bb_dataflow (basic_block);
671 static bool vt_find_locations (void);
673 static void dump_attrs_list (attrs);
674 static void dump_var (variable);
675 static void dump_vars (variable_table_type *);
676 static void dump_dataflow_set (dataflow_set *);
677 static void dump_dataflow_sets (void);
679 static void set_dv_changed (decl_or_value, bool);
680 static void variable_was_changed (variable, dataflow_set *);
681 static variable_def **set_slot_part (dataflow_set *, rtx, variable_def **,
682 decl_or_value, HOST_WIDE_INT,
683 enum var_init_status, rtx);
684 static void set_variable_part (dataflow_set *, rtx,
685 decl_or_value, HOST_WIDE_INT,
686 enum var_init_status, rtx, enum insert_option);
687 static variable_def **clobber_slot_part (dataflow_set *, rtx,
688 variable_def **, HOST_WIDE_INT, rtx);
689 static void clobber_variable_part (dataflow_set *, rtx,
690 decl_or_value, HOST_WIDE_INT, rtx);
691 static variable_def **delete_slot_part (dataflow_set *, rtx, variable_def **,
692 HOST_WIDE_INT);
693 static void delete_variable_part (dataflow_set *, rtx,
694 decl_or_value, HOST_WIDE_INT);
695 static void emit_notes_in_bb (basic_block, dataflow_set *);
696 static void vt_emit_notes (void);
698 static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *);
699 static void vt_add_function_parameters (void);
700 static bool vt_initialize (void);
701 static void vt_finalize (void);
703 /* Given a SET, calculate the amount of stack adjustment it contains
704 PRE- and POST-modifying stack pointer.
705 This function is similar to stack_adjust_offset. */
707 static void
708 stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
709 HOST_WIDE_INT *post)
711 rtx src = SET_SRC (pattern);
712 rtx dest = SET_DEST (pattern);
713 enum rtx_code code;
715 if (dest == stack_pointer_rtx)
717 /* (set (reg sp) (plus (reg sp) (const_int))) */
718 code = GET_CODE (src);
719 if (! (code == PLUS || code == MINUS)
720 || XEXP (src, 0) != stack_pointer_rtx
721 || !CONST_INT_P (XEXP (src, 1)))
722 return;
724 if (code == MINUS)
725 *post += INTVAL (XEXP (src, 1));
726 else
727 *post -= INTVAL (XEXP (src, 1));
729 else if (MEM_P (dest))
731 /* (set (mem (pre_dec (reg sp))) (foo)) */
732 src = XEXP (dest, 0);
733 code = GET_CODE (src);
735 switch (code)
737 case PRE_MODIFY:
738 case POST_MODIFY:
739 if (XEXP (src, 0) == stack_pointer_rtx)
741 rtx val = XEXP (XEXP (src, 1), 1);
742 /* We handle only adjustments by constant amount. */
743 gcc_assert (GET_CODE (XEXP (src, 1)) == PLUS &&
744 CONST_INT_P (val));
746 if (code == PRE_MODIFY)
747 *pre -= INTVAL (val);
748 else
749 *post -= INTVAL (val);
750 break;
752 return;
754 case PRE_DEC:
755 if (XEXP (src, 0) == stack_pointer_rtx)
757 *pre += GET_MODE_SIZE (GET_MODE (dest));
758 break;
760 return;
762 case POST_DEC:
763 if (XEXP (src, 0) == stack_pointer_rtx)
765 *post += GET_MODE_SIZE (GET_MODE (dest));
766 break;
768 return;
770 case PRE_INC:
771 if (XEXP (src, 0) == stack_pointer_rtx)
773 *pre -= GET_MODE_SIZE (GET_MODE (dest));
774 break;
776 return;
778 case POST_INC:
779 if (XEXP (src, 0) == stack_pointer_rtx)
781 *post -= GET_MODE_SIZE (GET_MODE (dest));
782 break;
784 return;
786 default:
787 return;
792 /* Given an INSN, calculate the amount of stack adjustment it contains
793 PRE- and POST-modifying stack pointer. */
795 static void
796 insn_stack_adjust_offset_pre_post (rtx_insn *insn, HOST_WIDE_INT *pre,
797 HOST_WIDE_INT *post)
799 rtx pattern;
801 *pre = 0;
802 *post = 0;
804 pattern = PATTERN (insn);
805 if (RTX_FRAME_RELATED_P (insn))
807 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
808 if (expr)
809 pattern = XEXP (expr, 0);
812 if (GET_CODE (pattern) == SET)
813 stack_adjust_offset_pre_post (pattern, pre, post);
814 else if (GET_CODE (pattern) == PARALLEL
815 || GET_CODE (pattern) == SEQUENCE)
817 int i;
819 /* There may be stack adjustments inside compound insns. Search
820 for them. */
821 for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
822 if (GET_CODE (XVECEXP (pattern, 0, i)) == SET)
823 stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post);
827 /* Compute stack adjustments for all blocks by traversing DFS tree.
828 Return true when the adjustments on all incoming edges are consistent.
829 Heavily borrowed from pre_and_rev_post_order_compute. */
831 static bool
832 vt_stack_adjustments (void)
834 edge_iterator *stack;
835 int sp;
837 /* Initialize entry block. */
838 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->visited = true;
839 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->in.stack_adjust =
840 INCOMING_FRAME_SP_OFFSET;
841 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out.stack_adjust =
842 INCOMING_FRAME_SP_OFFSET;
844 /* Allocate stack for back-tracking up CFG. */
845 stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun) + 1);
846 sp = 0;
848 /* Push the first edge on to the stack. */
849 stack[sp++] = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
851 while (sp)
853 edge_iterator ei;
854 basic_block src;
855 basic_block dest;
857 /* Look at the edge on the top of the stack. */
858 ei = stack[sp - 1];
859 src = ei_edge (ei)->src;
860 dest = ei_edge (ei)->dest;
862 /* Check if the edge destination has been visited yet. */
863 if (!VTI (dest)->visited)
865 rtx_insn *insn;
866 HOST_WIDE_INT pre, post, offset;
867 VTI (dest)->visited = true;
868 VTI (dest)->in.stack_adjust = offset = VTI (src)->out.stack_adjust;
870 if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
871 for (insn = BB_HEAD (dest);
872 insn != NEXT_INSN (BB_END (dest));
873 insn = NEXT_INSN (insn))
874 if (INSN_P (insn))
876 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
877 offset += pre + post;
880 VTI (dest)->out.stack_adjust = offset;
882 if (EDGE_COUNT (dest->succs) > 0)
883 /* Since the DEST node has been visited for the first
884 time, check its successors. */
885 stack[sp++] = ei_start (dest->succs);
887 else
889 /* We can end up with different stack adjustments for the exit block
890 of a shrink-wrapped function if stack_adjust_offset_pre_post
891 doesn't understand the rtx pattern used to restore the stack
892 pointer in the epilogue. For example, on s390(x), the stack
893 pointer is often restored via a load-multiple instruction
894 and so no stack_adjust offset is recorded for it. This means
895 that the stack offset at the end of the epilogue block is the
896 the same as the offset before the epilogue, whereas other paths
897 to the exit block will have the correct stack_adjust.
899 It is safe to ignore these differences because (a) we never
900 use the stack_adjust for the exit block in this pass and
901 (b) dwarf2cfi checks whether the CFA notes in a shrink-wrapped
902 function are correct.
904 We must check whether the adjustments on other edges are
905 the same though. */
906 if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
907 && VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust)
909 free (stack);
910 return false;
913 if (! ei_one_before_end_p (ei))
914 /* Go to the next edge. */
915 ei_next (&stack[sp - 1]);
916 else
917 /* Return to previous level if there are no more edges. */
918 sp--;
922 free (stack);
923 return true;
926 /* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or
927 hard_frame_pointer_rtx is being mapped to it and offset for it. */
928 static rtx cfa_base_rtx;
929 static HOST_WIDE_INT cfa_base_offset;
931 /* Compute a CFA-based value for an ADJUSTMENT made to stack_pointer_rtx
932 or hard_frame_pointer_rtx. */
934 static inline rtx
935 compute_cfa_pointer (HOST_WIDE_INT adjustment)
937 return plus_constant (Pmode, cfa_base_rtx, adjustment + cfa_base_offset);
940 /* Adjustment for hard_frame_pointer_rtx to cfa base reg,
941 or -1 if the replacement shouldn't be done. */
942 static HOST_WIDE_INT hard_frame_pointer_adjustment = -1;
944 /* Data for adjust_mems callback. */
946 struct adjust_mem_data
948 bool store;
949 enum machine_mode mem_mode;
950 HOST_WIDE_INT stack_adjust;
951 rtx_expr_list *side_effects;
954 /* Helper for adjust_mems. Return true if X is suitable for
955 transformation of wider mode arithmetics to narrower mode. */
957 static bool
958 use_narrower_mode_test (rtx x, const_rtx subreg)
960 subrtx_var_iterator::array_type array;
961 FOR_EACH_SUBRTX_VAR (iter, array, x, NONCONST)
963 rtx x = *iter;
964 if (CONSTANT_P (x))
965 iter.skip_subrtxes ();
966 else
967 switch (GET_CODE (x))
969 case REG:
970 if (cselib_lookup (x, GET_MODE (SUBREG_REG (subreg)), 0, VOIDmode))
971 return false;
972 if (!validate_subreg (GET_MODE (subreg), GET_MODE (x), x,
973 subreg_lowpart_offset (GET_MODE (subreg),
974 GET_MODE (x))))
975 return false;
976 break;
977 case PLUS:
978 case MINUS:
979 case MULT:
980 break;
981 case ASHIFT:
982 iter.substitute (XEXP (x, 0));
983 break;
984 default:
985 return false;
988 return true;
991 /* Transform X into narrower mode MODE from wider mode WMODE. */
993 static rtx
994 use_narrower_mode (rtx x, enum machine_mode mode, enum machine_mode wmode)
996 rtx op0, op1;
997 if (CONSTANT_P (x))
998 return lowpart_subreg (mode, x, wmode);
999 switch (GET_CODE (x))
1001 case REG:
1002 return lowpart_subreg (mode, x, wmode);
1003 case PLUS:
1004 case MINUS:
1005 case MULT:
1006 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
1007 op1 = use_narrower_mode (XEXP (x, 1), mode, wmode);
1008 return simplify_gen_binary (GET_CODE (x), mode, op0, op1);
1009 case ASHIFT:
1010 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
1011 return simplify_gen_binary (ASHIFT, mode, op0, XEXP (x, 1));
1012 default:
1013 gcc_unreachable ();
1017 /* Helper function for adjusting used MEMs. */
1019 static rtx
1020 adjust_mems (rtx loc, const_rtx old_rtx, void *data)
1022 struct adjust_mem_data *amd = (struct adjust_mem_data *) data;
1023 rtx mem, addr = loc, tem;
1024 enum machine_mode mem_mode_save;
1025 bool store_save;
1026 switch (GET_CODE (loc))
1028 case REG:
1029 /* Don't do any sp or fp replacements outside of MEM addresses
1030 on the LHS. */
1031 if (amd->mem_mode == VOIDmode && amd->store)
1032 return loc;
1033 if (loc == stack_pointer_rtx
1034 && !frame_pointer_needed
1035 && cfa_base_rtx)
1036 return compute_cfa_pointer (amd->stack_adjust);
1037 else if (loc == hard_frame_pointer_rtx
1038 && frame_pointer_needed
1039 && hard_frame_pointer_adjustment != -1
1040 && cfa_base_rtx)
1041 return compute_cfa_pointer (hard_frame_pointer_adjustment);
1042 gcc_checking_assert (loc != virtual_incoming_args_rtx);
1043 return loc;
1044 case MEM:
1045 mem = loc;
1046 if (!amd->store)
1048 mem = targetm.delegitimize_address (mem);
1049 if (mem != loc && !MEM_P (mem))
1050 return simplify_replace_fn_rtx (mem, old_rtx, adjust_mems, data);
1053 addr = XEXP (mem, 0);
1054 mem_mode_save = amd->mem_mode;
1055 amd->mem_mode = GET_MODE (mem);
1056 store_save = amd->store;
1057 amd->store = false;
1058 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1059 amd->store = store_save;
1060 amd->mem_mode = mem_mode_save;
1061 if (mem == loc)
1062 addr = targetm.delegitimize_address (addr);
1063 if (addr != XEXP (mem, 0))
1064 mem = replace_equiv_address_nv (mem, addr);
1065 if (!amd->store)
1066 mem = avoid_constant_pool_reference (mem);
1067 return mem;
1068 case PRE_INC:
1069 case PRE_DEC:
1070 addr = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
1071 gen_int_mode (GET_CODE (loc) == PRE_INC
1072 ? GET_MODE_SIZE (amd->mem_mode)
1073 : -GET_MODE_SIZE (amd->mem_mode),
1074 GET_MODE (loc)));
1075 case POST_INC:
1076 case POST_DEC:
1077 if (addr == loc)
1078 addr = XEXP (loc, 0);
1079 gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode);
1080 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1081 tem = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
1082 gen_int_mode ((GET_CODE (loc) == PRE_INC
1083 || GET_CODE (loc) == POST_INC)
1084 ? GET_MODE_SIZE (amd->mem_mode)
1085 : -GET_MODE_SIZE (amd->mem_mode),
1086 GET_MODE (loc)));
1087 store_save = amd->store;
1088 amd->store = false;
1089 tem = simplify_replace_fn_rtx (tem, old_rtx, adjust_mems, data);
1090 amd->store = store_save;
1091 amd->side_effects = alloc_EXPR_LIST (0,
1092 gen_rtx_SET (VOIDmode,
1093 XEXP (loc, 0), tem),
1094 amd->side_effects);
1095 return addr;
1096 case PRE_MODIFY:
1097 addr = XEXP (loc, 1);
1098 case POST_MODIFY:
1099 if (addr == loc)
1100 addr = XEXP (loc, 0);
1101 gcc_assert (amd->mem_mode != VOIDmode);
1102 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1103 store_save = amd->store;
1104 amd->store = false;
1105 tem = simplify_replace_fn_rtx (XEXP (loc, 1), old_rtx,
1106 adjust_mems, data);
1107 amd->store = store_save;
1108 amd->side_effects = alloc_EXPR_LIST (0,
1109 gen_rtx_SET (VOIDmode,
1110 XEXP (loc, 0), tem),
1111 amd->side_effects);
1112 return addr;
1113 case SUBREG:
1114 /* First try without delegitimization of whole MEMs and
1115 avoid_constant_pool_reference, which is more likely to succeed. */
1116 store_save = amd->store;
1117 amd->store = true;
1118 addr = simplify_replace_fn_rtx (SUBREG_REG (loc), old_rtx, adjust_mems,
1119 data);
1120 amd->store = store_save;
1121 mem = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1122 if (mem == SUBREG_REG (loc))
1124 tem = loc;
1125 goto finish_subreg;
1127 tem = simplify_gen_subreg (GET_MODE (loc), mem,
1128 GET_MODE (SUBREG_REG (loc)),
1129 SUBREG_BYTE (loc));
1130 if (tem)
1131 goto finish_subreg;
1132 tem = simplify_gen_subreg (GET_MODE (loc), addr,
1133 GET_MODE (SUBREG_REG (loc)),
1134 SUBREG_BYTE (loc));
1135 if (tem == NULL_RTX)
1136 tem = gen_rtx_raw_SUBREG (GET_MODE (loc), addr, SUBREG_BYTE (loc));
1137 finish_subreg:
1138 if (MAY_HAVE_DEBUG_INSNS
1139 && GET_CODE (tem) == SUBREG
1140 && (GET_CODE (SUBREG_REG (tem)) == PLUS
1141 || GET_CODE (SUBREG_REG (tem)) == MINUS
1142 || GET_CODE (SUBREG_REG (tem)) == MULT
1143 || GET_CODE (SUBREG_REG (tem)) == ASHIFT)
1144 && (GET_MODE_CLASS (GET_MODE (tem)) == MODE_INT
1145 || GET_MODE_CLASS (GET_MODE (tem)) == MODE_PARTIAL_INT)
1146 && (GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem))) == MODE_INT
1147 || GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem))) == MODE_PARTIAL_INT)
1148 && GET_MODE_PRECISION (GET_MODE (tem))
1149 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (tem)))
1150 && subreg_lowpart_p (tem)
1151 && use_narrower_mode_test (SUBREG_REG (tem), tem))
1152 return use_narrower_mode (SUBREG_REG (tem), GET_MODE (tem),
1153 GET_MODE (SUBREG_REG (tem)));
1154 return tem;
1155 case ASM_OPERANDS:
1156 /* Don't do any replacements in second and following
1157 ASM_OPERANDS of inline-asm with multiple sets.
1158 ASM_OPERANDS_INPUT_VEC, ASM_OPERANDS_INPUT_CONSTRAINT_VEC
1159 and ASM_OPERANDS_LABEL_VEC need to be equal between
1160 all the ASM_OPERANDs in the insn and adjust_insn will
1161 fix this up. */
1162 if (ASM_OPERANDS_OUTPUT_IDX (loc) != 0)
1163 return loc;
1164 break;
1165 default:
1166 break;
1168 return NULL_RTX;
1171 /* Helper function for replacement of uses. */
1173 static void
1174 adjust_mem_uses (rtx *x, void *data)
1176 rtx new_x = simplify_replace_fn_rtx (*x, NULL_RTX, adjust_mems, data);
1177 if (new_x != *x)
1178 validate_change (NULL_RTX, x, new_x, true);
1181 /* Helper function for replacement of stores. */
1183 static void
1184 adjust_mem_stores (rtx loc, const_rtx expr, void *data)
1186 if (MEM_P (loc))
1188 rtx new_dest = simplify_replace_fn_rtx (SET_DEST (expr), NULL_RTX,
1189 adjust_mems, data);
1190 if (new_dest != SET_DEST (expr))
1192 rtx xexpr = CONST_CAST_RTX (expr);
1193 validate_change (NULL_RTX, &SET_DEST (xexpr), new_dest, true);
1198 /* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
1199 replace them with their value in the insn and add the side-effects
1200 as other sets to the insn. */
1202 static void
1203 adjust_insn (basic_block bb, rtx_insn *insn)
1205 struct adjust_mem_data amd;
1206 rtx set;
1208 #ifdef HAVE_window_save
1209 /* If the target machine has an explicit window save instruction, the
1210 transformation OUTGOING_REGNO -> INCOMING_REGNO is done there. */
1211 if (RTX_FRAME_RELATED_P (insn)
1212 && find_reg_note (insn, REG_CFA_WINDOW_SAVE, NULL_RTX))
1214 unsigned int i, nregs = vec_safe_length (windowed_parm_regs);
1215 rtx rtl = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nregs * 2));
1216 parm_reg_t *p;
1218 FOR_EACH_VEC_SAFE_ELT (windowed_parm_regs, i, p)
1220 XVECEXP (rtl, 0, i * 2)
1221 = gen_rtx_SET (VOIDmode, p->incoming, p->outgoing);
1222 /* Do not clobber the attached DECL, but only the REG. */
1223 XVECEXP (rtl, 0, i * 2 + 1)
1224 = gen_rtx_CLOBBER (GET_MODE (p->outgoing),
1225 gen_raw_REG (GET_MODE (p->outgoing),
1226 REGNO (p->outgoing)));
1229 validate_change (NULL_RTX, &PATTERN (insn), rtl, true);
1230 return;
1232 #endif
1234 amd.mem_mode = VOIDmode;
1235 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
1236 amd.side_effects = NULL;
1238 amd.store = true;
1239 note_stores (PATTERN (insn), adjust_mem_stores, &amd);
1241 amd.store = false;
1242 if (GET_CODE (PATTERN (insn)) == PARALLEL
1243 && asm_noperands (PATTERN (insn)) > 0
1244 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1246 rtx body, set0;
1247 int i;
1249 /* inline-asm with multiple sets is tiny bit more complicated,
1250 because the 3 vectors in ASM_OPERANDS need to be shared between
1251 all ASM_OPERANDS in the instruction. adjust_mems will
1252 not touch ASM_OPERANDS other than the first one, asm_noperands
1253 test above needs to be called before that (otherwise it would fail)
1254 and afterwards this code fixes it up. */
1255 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1256 body = PATTERN (insn);
1257 set0 = XVECEXP (body, 0, 0);
1258 gcc_checking_assert (GET_CODE (set0) == SET
1259 && GET_CODE (SET_SRC (set0)) == ASM_OPERANDS
1260 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set0)) == 0);
1261 for (i = 1; i < XVECLEN (body, 0); i++)
1262 if (GET_CODE (XVECEXP (body, 0, i)) != SET)
1263 break;
1264 else
1266 set = XVECEXP (body, 0, i);
1267 gcc_checking_assert (GET_CODE (SET_SRC (set)) == ASM_OPERANDS
1268 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set))
1269 == i);
1270 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (set))
1271 != ASM_OPERANDS_INPUT_VEC (SET_SRC (set0))
1272 || ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set))
1273 != ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0))
1274 || ASM_OPERANDS_LABEL_VEC (SET_SRC (set))
1275 != ASM_OPERANDS_LABEL_VEC (SET_SRC (set0)))
1277 rtx newsrc = shallow_copy_rtx (SET_SRC (set));
1278 ASM_OPERANDS_INPUT_VEC (newsrc)
1279 = ASM_OPERANDS_INPUT_VEC (SET_SRC (set0));
1280 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (newsrc)
1281 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0));
1282 ASM_OPERANDS_LABEL_VEC (newsrc)
1283 = ASM_OPERANDS_LABEL_VEC (SET_SRC (set0));
1284 validate_change (NULL_RTX, &SET_SRC (set), newsrc, true);
1288 else
1289 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1291 /* For read-only MEMs containing some constant, prefer those
1292 constants. */
1293 set = single_set (insn);
1294 if (set && MEM_P (SET_SRC (set)) && MEM_READONLY_P (SET_SRC (set)))
1296 rtx note = find_reg_equal_equiv_note (insn);
1298 if (note && CONSTANT_P (XEXP (note, 0)))
1299 validate_change (NULL_RTX, &SET_SRC (set), XEXP (note, 0), true);
1302 if (amd.side_effects)
1304 rtx *pat, new_pat, s;
1305 int i, oldn, newn;
1307 pat = &PATTERN (insn);
1308 if (GET_CODE (*pat) == COND_EXEC)
1309 pat = &COND_EXEC_CODE (*pat);
1310 if (GET_CODE (*pat) == PARALLEL)
1311 oldn = XVECLEN (*pat, 0);
1312 else
1313 oldn = 1;
1314 for (s = amd.side_effects, newn = 0; s; newn++)
1315 s = XEXP (s, 1);
1316 new_pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (oldn + newn));
1317 if (GET_CODE (*pat) == PARALLEL)
1318 for (i = 0; i < oldn; i++)
1319 XVECEXP (new_pat, 0, i) = XVECEXP (*pat, 0, i);
1320 else
1321 XVECEXP (new_pat, 0, 0) = *pat;
1322 for (s = amd.side_effects, i = oldn; i < oldn + newn; i++, s = XEXP (s, 1))
1323 XVECEXP (new_pat, 0, i) = XEXP (s, 0);
1324 free_EXPR_LIST_list (&amd.side_effects);
1325 validate_change (NULL_RTX, pat, new_pat, true);
1329 /* Return the DEBUG_EXPR of a DEBUG_EXPR_DECL or the VALUE in DV. */
1330 static inline rtx
1331 dv_as_rtx (decl_or_value dv)
1333 tree decl;
1335 if (dv_is_value_p (dv))
1336 return dv_as_value (dv);
1338 decl = dv_as_decl (dv);
1340 gcc_checking_assert (TREE_CODE (decl) == DEBUG_EXPR_DECL);
1341 return DECL_RTL_KNOWN_SET (decl);
1344 /* Return nonzero if a decl_or_value must not have more than one
1345 variable part. The returned value discriminates among various
1346 kinds of one-part DVs ccording to enum onepart_enum. */
1347 static inline onepart_enum_t
1348 dv_onepart_p (decl_or_value dv)
1350 tree decl;
1352 if (!MAY_HAVE_DEBUG_INSNS)
1353 return NOT_ONEPART;
1355 if (dv_is_value_p (dv))
1356 return ONEPART_VALUE;
1358 decl = dv_as_decl (dv);
1360 if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
1361 return ONEPART_DEXPR;
1363 if (target_for_debug_bind (decl) != NULL_TREE)
1364 return ONEPART_VDECL;
1366 return NOT_ONEPART;
1369 /* Return the variable pool to be used for a dv of type ONEPART. */
1370 static inline alloc_pool
1371 onepart_pool (onepart_enum_t onepart)
1373 return onepart ? valvar_pool : var_pool;
1376 /* Build a decl_or_value out of a decl. */
1377 static inline decl_or_value
1378 dv_from_decl (tree decl)
1380 decl_or_value dv;
1381 dv = decl;
1382 gcc_checking_assert (dv_is_decl_p (dv));
1383 return dv;
1386 /* Build a decl_or_value out of a value. */
1387 static inline decl_or_value
1388 dv_from_value (rtx value)
1390 decl_or_value dv;
1391 dv = value;
1392 gcc_checking_assert (dv_is_value_p (dv));
1393 return dv;
1396 /* Return a value or the decl of a debug_expr as a decl_or_value. */
1397 static inline decl_or_value
1398 dv_from_rtx (rtx x)
1400 decl_or_value dv;
1402 switch (GET_CODE (x))
1404 case DEBUG_EXPR:
1405 dv = dv_from_decl (DEBUG_EXPR_TREE_DECL (x));
1406 gcc_checking_assert (DECL_RTL_KNOWN_SET (DEBUG_EXPR_TREE_DECL (x)) == x);
1407 break;
1409 case VALUE:
1410 dv = dv_from_value (x);
1411 break;
1413 default:
1414 gcc_unreachable ();
1417 return dv;
1420 extern void debug_dv (decl_or_value dv);
1422 DEBUG_FUNCTION void
1423 debug_dv (decl_or_value dv)
1425 if (dv_is_value_p (dv))
1426 debug_rtx (dv_as_value (dv));
1427 else
1428 debug_generic_stmt (dv_as_decl (dv));
1431 static void loc_exp_dep_clear (variable var);
1433 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
1435 static void
1436 variable_htab_free (void *elem)
1438 int i;
1439 variable var = (variable) elem;
1440 location_chain node, next;
1442 gcc_checking_assert (var->refcount > 0);
1444 var->refcount--;
1445 if (var->refcount > 0)
1446 return;
1448 for (i = 0; i < var->n_var_parts; i++)
1450 for (node = var->var_part[i].loc_chain; node; node = next)
1452 next = node->next;
1453 pool_free (loc_chain_pool, node);
1455 var->var_part[i].loc_chain = NULL;
1457 if (var->onepart && VAR_LOC_1PAUX (var))
1459 loc_exp_dep_clear (var);
1460 if (VAR_LOC_DEP_LST (var))
1461 VAR_LOC_DEP_LST (var)->pprev = NULL;
1462 XDELETE (VAR_LOC_1PAUX (var));
1463 /* These may be reused across functions, so reset
1464 e.g. NO_LOC_P. */
1465 if (var->onepart == ONEPART_DEXPR)
1466 set_dv_changed (var->dv, true);
1468 pool_free (onepart_pool (var->onepart), var);
1471 /* Initialize the set (array) SET of attrs to empty lists. */
1473 static void
1474 init_attrs_list_set (attrs *set)
1476 int i;
1478 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1479 set[i] = NULL;
1482 /* Make the list *LISTP empty. */
1484 static void
1485 attrs_list_clear (attrs *listp)
1487 attrs list, next;
1489 for (list = *listp; list; list = next)
1491 next = list->next;
1492 pool_free (attrs_pool, list);
1494 *listp = NULL;
1497 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */
1499 static attrs
1500 attrs_list_member (attrs list, decl_or_value dv, HOST_WIDE_INT offset)
1502 for (; list; list = list->next)
1503 if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset)
1504 return list;
1505 return NULL;
1508 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
1510 static void
1511 attrs_list_insert (attrs *listp, decl_or_value dv,
1512 HOST_WIDE_INT offset, rtx loc)
1514 attrs list;
1516 list = (attrs) pool_alloc (attrs_pool);
1517 list->loc = loc;
1518 list->dv = dv;
1519 list->offset = offset;
1520 list->next = *listp;
1521 *listp = list;
1524 /* Copy all nodes from SRC and create a list *DSTP of the copies. */
1526 static void
1527 attrs_list_copy (attrs *dstp, attrs src)
1529 attrs n;
1531 attrs_list_clear (dstp);
1532 for (; src; src = src->next)
1534 n = (attrs) pool_alloc (attrs_pool);
1535 n->loc = src->loc;
1536 n->dv = src->dv;
1537 n->offset = src->offset;
1538 n->next = *dstp;
1539 *dstp = n;
1543 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */
1545 static void
1546 attrs_list_union (attrs *dstp, attrs src)
1548 for (; src; src = src->next)
1550 if (!attrs_list_member (*dstp, src->dv, src->offset))
1551 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1555 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1556 *DSTP. */
1558 static void
1559 attrs_list_mpdv_union (attrs *dstp, attrs src, attrs src2)
1561 gcc_assert (!*dstp);
1562 for (; src; src = src->next)
1564 if (!dv_onepart_p (src->dv))
1565 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1567 for (src = src2; src; src = src->next)
1569 if (!dv_onepart_p (src->dv)
1570 && !attrs_list_member (*dstp, src->dv, src->offset))
1571 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1575 /* Shared hashtable support. */
1577 /* Return true if VARS is shared. */
1579 static inline bool
1580 shared_hash_shared (shared_hash vars)
1582 return vars->refcount > 1;
1585 /* Return the hash table for VARS. */
1587 static inline variable_table_type *
1588 shared_hash_htab (shared_hash vars)
1590 return vars->htab;
1593 /* Return true if VAR is shared, or maybe because VARS is shared. */
1595 static inline bool
1596 shared_var_p (variable var, shared_hash vars)
1598 /* Don't count an entry in the changed_variables table as a duplicate. */
1599 return ((var->refcount > 1 + (int) var->in_changed_variables)
1600 || shared_hash_shared (vars));
1603 /* Copy variables into a new hash table. */
1605 static shared_hash
1606 shared_hash_unshare (shared_hash vars)
1608 shared_hash new_vars = (shared_hash) pool_alloc (shared_hash_pool);
1609 gcc_assert (vars->refcount > 1);
1610 new_vars->refcount = 1;
1611 new_vars->htab = new variable_table_type (vars->htab->elements () + 3);
1612 vars_copy (new_vars->htab, vars->htab);
1613 vars->refcount--;
1614 return new_vars;
1617 /* Increment reference counter on VARS and return it. */
1619 static inline shared_hash
1620 shared_hash_copy (shared_hash vars)
1622 vars->refcount++;
1623 return vars;
1626 /* Decrement reference counter and destroy hash table if not shared
1627 anymore. */
1629 static void
1630 shared_hash_destroy (shared_hash vars)
1632 gcc_checking_assert (vars->refcount > 0);
1633 if (--vars->refcount == 0)
1635 delete vars->htab;
1636 pool_free (shared_hash_pool, vars);
1640 /* Unshare *PVARS if shared and return slot for DV. If INS is
1641 INSERT, insert it if not already present. */
1643 static inline variable_def **
1644 shared_hash_find_slot_unshare_1 (shared_hash *pvars, decl_or_value dv,
1645 hashval_t dvhash, enum insert_option ins)
1647 if (shared_hash_shared (*pvars))
1648 *pvars = shared_hash_unshare (*pvars);
1649 return shared_hash_htab (*pvars)->find_slot_with_hash (dv, dvhash, ins);
1652 static inline variable_def **
1653 shared_hash_find_slot_unshare (shared_hash *pvars, decl_or_value dv,
1654 enum insert_option ins)
1656 return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins);
1659 /* Return slot for DV, if it is already present in the hash table.
1660 If it is not present, insert it only VARS is not shared, otherwise
1661 return NULL. */
1663 static inline variable_def **
1664 shared_hash_find_slot_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1666 return shared_hash_htab (vars)->find_slot_with_hash (dv, dvhash,
1667 shared_hash_shared (vars)
1668 ? NO_INSERT : INSERT);
1671 static inline variable_def **
1672 shared_hash_find_slot (shared_hash vars, decl_or_value dv)
1674 return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv));
1677 /* Return slot for DV only if it is already present in the hash table. */
1679 static inline variable_def **
1680 shared_hash_find_slot_noinsert_1 (shared_hash vars, decl_or_value dv,
1681 hashval_t dvhash)
1683 return shared_hash_htab (vars)->find_slot_with_hash (dv, dvhash, NO_INSERT);
1686 static inline variable_def **
1687 shared_hash_find_slot_noinsert (shared_hash vars, decl_or_value dv)
1689 return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv));
1692 /* Return variable for DV or NULL if not already present in the hash
1693 table. */
1695 static inline variable
1696 shared_hash_find_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1698 return shared_hash_htab (vars)->find_with_hash (dv, dvhash);
1701 static inline variable
1702 shared_hash_find (shared_hash vars, decl_or_value dv)
1704 return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
1707 /* Return true if TVAL is better than CVAL as a canonival value. We
1708 choose lowest-numbered VALUEs, using the RTX address as a
1709 tie-breaker. The idea is to arrange them into a star topology,
1710 such that all of them are at most one step away from the canonical
1711 value, and the canonical value has backlinks to all of them, in
1712 addition to all the actual locations. We don't enforce this
1713 topology throughout the entire dataflow analysis, though.
1716 static inline bool
1717 canon_value_cmp (rtx tval, rtx cval)
1719 return !cval
1720 || CSELIB_VAL_PTR (tval)->uid < CSELIB_VAL_PTR (cval)->uid;
1723 static bool dst_can_be_shared;
1725 /* Return a copy of a variable VAR and insert it to dataflow set SET. */
1727 static variable_def **
1728 unshare_variable (dataflow_set *set, variable_def **slot, variable var,
1729 enum var_init_status initialized)
1731 variable new_var;
1732 int i;
1734 new_var = (variable) pool_alloc (onepart_pool (var->onepart));
1735 new_var->dv = var->dv;
1736 new_var->refcount = 1;
1737 var->refcount--;
1738 new_var->n_var_parts = var->n_var_parts;
1739 new_var->onepart = var->onepart;
1740 new_var->in_changed_variables = false;
1742 if (! flag_var_tracking_uninit)
1743 initialized = VAR_INIT_STATUS_INITIALIZED;
1745 for (i = 0; i < var->n_var_parts; i++)
1747 location_chain node;
1748 location_chain *nextp;
1750 if (i == 0 && var->onepart)
1752 /* One-part auxiliary data is only used while emitting
1753 notes, so propagate it to the new variable in the active
1754 dataflow set. If we're not emitting notes, this will be
1755 a no-op. */
1756 gcc_checking_assert (!VAR_LOC_1PAUX (var) || emit_notes);
1757 VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (var);
1758 VAR_LOC_1PAUX (var) = NULL;
1760 else
1761 VAR_PART_OFFSET (new_var, i) = VAR_PART_OFFSET (var, i);
1762 nextp = &new_var->var_part[i].loc_chain;
1763 for (node = var->var_part[i].loc_chain; node; node = node->next)
1765 location_chain new_lc;
1767 new_lc = (location_chain) pool_alloc (loc_chain_pool);
1768 new_lc->next = NULL;
1769 if (node->init > initialized)
1770 new_lc->init = node->init;
1771 else
1772 new_lc->init = initialized;
1773 if (node->set_src && !(MEM_P (node->set_src)))
1774 new_lc->set_src = node->set_src;
1775 else
1776 new_lc->set_src = NULL;
1777 new_lc->loc = node->loc;
1779 *nextp = new_lc;
1780 nextp = &new_lc->next;
1783 new_var->var_part[i].cur_loc = var->var_part[i].cur_loc;
1786 dst_can_be_shared = false;
1787 if (shared_hash_shared (set->vars))
1788 slot = shared_hash_find_slot_unshare (&set->vars, var->dv, NO_INSERT);
1789 else if (set->traversed_vars && set->vars != set->traversed_vars)
1790 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
1791 *slot = new_var;
1792 if (var->in_changed_variables)
1794 variable_def **cslot
1795 = changed_variables->find_slot_with_hash (var->dv,
1796 dv_htab_hash (var->dv),
1797 NO_INSERT);
1798 gcc_assert (*cslot == (void *) var);
1799 var->in_changed_variables = false;
1800 variable_htab_free (var);
1801 *cslot = new_var;
1802 new_var->in_changed_variables = true;
1804 return slot;
1807 /* Copy all variables from hash table SRC to hash table DST. */
1809 static void
1810 vars_copy (variable_table_type *dst, variable_table_type *src)
1812 variable_iterator_type hi;
1813 variable var;
1815 FOR_EACH_HASH_TABLE_ELEMENT (*src, var, variable, hi)
1817 variable_def **dstp;
1818 var->refcount++;
1819 dstp = dst->find_slot_with_hash (var->dv, dv_htab_hash (var->dv),
1820 INSERT);
1821 *dstp = var;
1825 /* Map a decl to its main debug decl. */
1827 static inline tree
1828 var_debug_decl (tree decl)
1830 if (decl && TREE_CODE (decl) == VAR_DECL
1831 && DECL_HAS_DEBUG_EXPR_P (decl))
1833 tree debugdecl = DECL_DEBUG_EXPR (decl);
1834 if (DECL_P (debugdecl))
1835 decl = debugdecl;
1838 return decl;
1841 /* Set the register LOC to contain DV, OFFSET. */
1843 static void
1844 var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1845 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1846 enum insert_option iopt)
1848 attrs node;
1849 bool decl_p = dv_is_decl_p (dv);
1851 if (decl_p)
1852 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1854 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1855 if (dv_as_opaque (node->dv) == dv_as_opaque (dv)
1856 && node->offset == offset)
1857 break;
1858 if (!node)
1859 attrs_list_insert (&set->regs[REGNO (loc)], dv, offset, loc);
1860 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1863 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
1865 static void
1866 var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1867 rtx set_src)
1869 tree decl = REG_EXPR (loc);
1870 HOST_WIDE_INT offset = REG_OFFSET (loc);
1872 var_reg_decl_set (set, loc, initialized,
1873 dv_from_decl (decl), offset, set_src, INSERT);
1876 static enum var_init_status
1877 get_init_value (dataflow_set *set, rtx loc, decl_or_value dv)
1879 variable var;
1880 int i;
1881 enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN;
1883 if (! flag_var_tracking_uninit)
1884 return VAR_INIT_STATUS_INITIALIZED;
1886 var = shared_hash_find (set->vars, dv);
1887 if (var)
1889 for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++)
1891 location_chain nextp;
1892 for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next)
1893 if (rtx_equal_p (nextp->loc, loc))
1895 ret_val = nextp->init;
1896 break;
1901 return ret_val;
1904 /* Delete current content of register LOC in dataflow set SET and set
1905 the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
1906 MODIFY is true, any other live copies of the same variable part are
1907 also deleted from the dataflow set, otherwise the variable part is
1908 assumed to be copied from another location holding the same
1909 part. */
1911 static void
1912 var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1913 enum var_init_status initialized, rtx set_src)
1915 tree decl = REG_EXPR (loc);
1916 HOST_WIDE_INT offset = REG_OFFSET (loc);
1917 attrs node, next;
1918 attrs *nextp;
1920 decl = var_debug_decl (decl);
1922 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1923 initialized = get_init_value (set, loc, dv_from_decl (decl));
1925 nextp = &set->regs[REGNO (loc)];
1926 for (node = *nextp; node; node = next)
1928 next = node->next;
1929 if (dv_as_opaque (node->dv) != decl || node->offset != offset)
1931 delete_variable_part (set, node->loc, node->dv, node->offset);
1932 pool_free (attrs_pool, node);
1933 *nextp = next;
1935 else
1937 node->loc = loc;
1938 nextp = &node->next;
1941 if (modify)
1942 clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src);
1943 var_reg_set (set, loc, initialized, set_src);
1946 /* Delete the association of register LOC in dataflow set SET with any
1947 variables that aren't onepart. If CLOBBER is true, also delete any
1948 other live copies of the same variable part, and delete the
1949 association with onepart dvs too. */
1951 static void
1952 var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
1954 attrs *nextp = &set->regs[REGNO (loc)];
1955 attrs node, next;
1957 if (clobber)
1959 tree decl = REG_EXPR (loc);
1960 HOST_WIDE_INT offset = REG_OFFSET (loc);
1962 decl = var_debug_decl (decl);
1964 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1967 for (node = *nextp; node; node = next)
1969 next = node->next;
1970 if (clobber || !dv_onepart_p (node->dv))
1972 delete_variable_part (set, node->loc, node->dv, node->offset);
1973 pool_free (attrs_pool, node);
1974 *nextp = next;
1976 else
1977 nextp = &node->next;
1981 /* Delete content of register with number REGNO in dataflow set SET. */
1983 static void
1984 var_regno_delete (dataflow_set *set, int regno)
1986 attrs *reg = &set->regs[regno];
1987 attrs node, next;
1989 for (node = *reg; node; node = next)
1991 next = node->next;
1992 delete_variable_part (set, node->loc, node->dv, node->offset);
1993 pool_free (attrs_pool, node);
1995 *reg = NULL;
1998 /* Return true if I is the negated value of a power of two. */
1999 static bool
2000 negative_power_of_two_p (HOST_WIDE_INT i)
2002 unsigned HOST_WIDE_INT x = -(unsigned HOST_WIDE_INT)i;
2003 return x == (x & -x);
2006 /* Strip constant offsets and alignments off of LOC. Return the base
2007 expression. */
2009 static rtx
2010 vt_get_canonicalize_base (rtx loc)
2012 while ((GET_CODE (loc) == PLUS
2013 || GET_CODE (loc) == AND)
2014 && GET_CODE (XEXP (loc, 1)) == CONST_INT
2015 && (GET_CODE (loc) != AND
2016 || negative_power_of_two_p (INTVAL (XEXP (loc, 1)))))
2017 loc = XEXP (loc, 0);
2019 return loc;
2022 /* This caches canonicalized addresses for VALUEs, computed using
2023 information in the global cselib table. */
2024 static hash_map<rtx, rtx> *global_get_addr_cache;
2026 /* This caches canonicalized addresses for VALUEs, computed using
2027 information from the global cache and information pertaining to a
2028 basic block being analyzed. */
2029 static hash_map<rtx, rtx> *local_get_addr_cache;
2031 static rtx vt_canonicalize_addr (dataflow_set *, rtx);
2033 /* Return the canonical address for LOC, that must be a VALUE, using a
2034 cached global equivalence or computing it and storing it in the
2035 global cache. */
2037 static rtx
2038 get_addr_from_global_cache (rtx const loc)
2040 rtx x;
2042 gcc_checking_assert (GET_CODE (loc) == VALUE);
2044 bool existed;
2045 rtx *slot = &global_get_addr_cache->get_or_insert (loc, &existed);
2046 if (existed)
2047 return *slot;
2049 x = canon_rtx (get_addr (loc));
2051 /* Tentative, avoiding infinite recursion. */
2052 *slot = x;
2054 if (x != loc)
2056 rtx nx = vt_canonicalize_addr (NULL, x);
2057 if (nx != x)
2059 /* The table may have moved during recursion, recompute
2060 SLOT. */
2061 *global_get_addr_cache->get (loc) = x = nx;
2065 return x;
2068 /* Return the canonical address for LOC, that must be a VALUE, using a
2069 cached local equivalence or computing it and storing it in the
2070 local cache. */
2072 static rtx
2073 get_addr_from_local_cache (dataflow_set *set, rtx const loc)
2075 rtx x;
2076 decl_or_value dv;
2077 variable var;
2078 location_chain l;
2080 gcc_checking_assert (GET_CODE (loc) == VALUE);
2082 bool existed;
2083 rtx *slot = &local_get_addr_cache->get_or_insert (loc, &existed);
2084 if (existed)
2085 return *slot;
2087 x = get_addr_from_global_cache (loc);
2089 /* Tentative, avoiding infinite recursion. */
2090 *slot = x;
2092 /* Recurse to cache local expansion of X, or if we need to search
2093 for a VALUE in the expansion. */
2094 if (x != loc)
2096 rtx nx = vt_canonicalize_addr (set, x);
2097 if (nx != x)
2099 slot = local_get_addr_cache->get (loc);
2100 *slot = x = nx;
2102 return x;
2105 dv = dv_from_rtx (x);
2106 var = shared_hash_find (set->vars, dv);
2107 if (!var)
2108 return x;
2110 /* Look for an improved equivalent expression. */
2111 for (l = var->var_part[0].loc_chain; l; l = l->next)
2113 rtx base = vt_get_canonicalize_base (l->loc);
2114 if (GET_CODE (base) == VALUE
2115 && canon_value_cmp (base, loc))
2117 rtx nx = vt_canonicalize_addr (set, l->loc);
2118 if (x != nx)
2120 slot = local_get_addr_cache->get (loc);
2121 *slot = x = nx;
2123 break;
2127 return x;
2130 /* Canonicalize LOC using equivalences from SET in addition to those
2131 in the cselib static table. It expects a VALUE-based expression,
2132 and it will only substitute VALUEs with other VALUEs or
2133 function-global equivalences, so that, if two addresses have base
2134 VALUEs that are locally or globally related in ways that
2135 memrefs_conflict_p cares about, they will both canonicalize to
2136 expressions that have the same base VALUE.
2138 The use of VALUEs as canonical base addresses enables the canonical
2139 RTXs to remain unchanged globally, if they resolve to a constant,
2140 or throughout a basic block otherwise, so that they can be cached
2141 and the cache needs not be invalidated when REGs, MEMs or such
2142 change. */
2144 static rtx
2145 vt_canonicalize_addr (dataflow_set *set, rtx oloc)
2147 HOST_WIDE_INT ofst = 0;
2148 enum machine_mode mode = GET_MODE (oloc);
2149 rtx loc = oloc;
2150 rtx x;
2151 bool retry = true;
2153 while (retry)
2155 while (GET_CODE (loc) == PLUS
2156 && GET_CODE (XEXP (loc, 1)) == CONST_INT)
2158 ofst += INTVAL (XEXP (loc, 1));
2159 loc = XEXP (loc, 0);
2162 /* Alignment operations can't normally be combined, so just
2163 canonicalize the base and we're done. We'll normally have
2164 only one stack alignment anyway. */
2165 if (GET_CODE (loc) == AND
2166 && GET_CODE (XEXP (loc, 1)) == CONST_INT
2167 && negative_power_of_two_p (INTVAL (XEXP (loc, 1))))
2169 x = vt_canonicalize_addr (set, XEXP (loc, 0));
2170 if (x != XEXP (loc, 0))
2171 loc = gen_rtx_AND (mode, x, XEXP (loc, 1));
2172 retry = false;
2175 if (GET_CODE (loc) == VALUE)
2177 if (set)
2178 loc = get_addr_from_local_cache (set, loc);
2179 else
2180 loc = get_addr_from_global_cache (loc);
2182 /* Consolidate plus_constants. */
2183 while (ofst && GET_CODE (loc) == PLUS
2184 && GET_CODE (XEXP (loc, 1)) == CONST_INT)
2186 ofst += INTVAL (XEXP (loc, 1));
2187 loc = XEXP (loc, 0);
2190 retry = false;
2192 else
2194 x = canon_rtx (loc);
2195 if (retry)
2196 retry = (x != loc);
2197 loc = x;
2201 /* Add OFST back in. */
2202 if (ofst)
2204 /* Don't build new RTL if we can help it. */
2205 if (GET_CODE (oloc) == PLUS
2206 && XEXP (oloc, 0) == loc
2207 && INTVAL (XEXP (oloc, 1)) == ofst)
2208 return oloc;
2210 loc = plus_constant (mode, loc, ofst);
2213 return loc;
2216 /* Return true iff there's a true dependence between MLOC and LOC.
2217 MADDR must be a canonicalized version of MLOC's address. */
2219 static inline bool
2220 vt_canon_true_dep (dataflow_set *set, rtx mloc, rtx maddr, rtx loc)
2222 if (GET_CODE (loc) != MEM)
2223 return false;
2225 rtx addr = vt_canonicalize_addr (set, XEXP (loc, 0));
2226 if (!canon_true_dependence (mloc, GET_MODE (mloc), maddr, loc, addr))
2227 return false;
2229 return true;
2232 /* Hold parameters for the hashtab traversal function
2233 drop_overlapping_mem_locs, see below. */
2235 struct overlapping_mems
2237 dataflow_set *set;
2238 rtx loc, addr;
2241 /* Remove all MEMs that overlap with COMS->LOC from the location list
2242 of a hash table entry for a value. COMS->ADDR must be a
2243 canonicalized form of COMS->LOC's address, and COMS->LOC must be
2244 canonicalized itself. */
2247 drop_overlapping_mem_locs (variable_def **slot, overlapping_mems *coms)
2249 dataflow_set *set = coms->set;
2250 rtx mloc = coms->loc, addr = coms->addr;
2251 variable var = *slot;
2253 if (var->onepart == ONEPART_VALUE)
2255 location_chain loc, *locp;
2256 bool changed = false;
2257 rtx cur_loc;
2259 gcc_assert (var->n_var_parts == 1);
2261 if (shared_var_p (var, set->vars))
2263 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
2264 if (vt_canon_true_dep (set, mloc, addr, loc->loc))
2265 break;
2267 if (!loc)
2268 return 1;
2270 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
2271 var = *slot;
2272 gcc_assert (var->n_var_parts == 1);
2275 if (VAR_LOC_1PAUX (var))
2276 cur_loc = VAR_LOC_FROM (var);
2277 else
2278 cur_loc = var->var_part[0].cur_loc;
2280 for (locp = &var->var_part[0].loc_chain, loc = *locp;
2281 loc; loc = *locp)
2283 if (!vt_canon_true_dep (set, mloc, addr, loc->loc))
2285 locp = &loc->next;
2286 continue;
2289 *locp = loc->next;
2290 /* If we have deleted the location which was last emitted
2291 we have to emit new location so add the variable to set
2292 of changed variables. */
2293 if (cur_loc == loc->loc)
2295 changed = true;
2296 var->var_part[0].cur_loc = NULL;
2297 if (VAR_LOC_1PAUX (var))
2298 VAR_LOC_FROM (var) = NULL;
2300 pool_free (loc_chain_pool, loc);
2303 if (!var->var_part[0].loc_chain)
2305 var->n_var_parts--;
2306 changed = true;
2308 if (changed)
2309 variable_was_changed (var, set);
2312 return 1;
2315 /* Remove from SET all VALUE bindings to MEMs that overlap with LOC. */
2317 static void
2318 clobber_overlapping_mems (dataflow_set *set, rtx loc)
2320 struct overlapping_mems coms;
2322 gcc_checking_assert (GET_CODE (loc) == MEM);
2324 coms.set = set;
2325 coms.loc = canon_rtx (loc);
2326 coms.addr = vt_canonicalize_addr (set, XEXP (loc, 0));
2328 set->traversed_vars = set->vars;
2329 shared_hash_htab (set->vars)
2330 ->traverse <overlapping_mems*, drop_overlapping_mem_locs> (&coms);
2331 set->traversed_vars = NULL;
2334 /* Set the location of DV, OFFSET as the MEM LOC. */
2336 static void
2337 var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
2338 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
2339 enum insert_option iopt)
2341 if (dv_is_decl_p (dv))
2342 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
2344 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
2347 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
2348 SET to LOC.
2349 Adjust the address first if it is stack pointer based. */
2351 static void
2352 var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
2353 rtx set_src)
2355 tree decl = MEM_EXPR (loc);
2356 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2358 var_mem_decl_set (set, loc, initialized,
2359 dv_from_decl (decl), offset, set_src, INSERT);
2362 /* Delete and set the location part of variable MEM_EXPR (LOC) in
2363 dataflow set SET to LOC. If MODIFY is true, any other live copies
2364 of the same variable part are also deleted from the dataflow set,
2365 otherwise the variable part is assumed to be copied from another
2366 location holding the same part.
2367 Adjust the address first if it is stack pointer based. */
2369 static void
2370 var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify,
2371 enum var_init_status initialized, rtx set_src)
2373 tree decl = MEM_EXPR (loc);
2374 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2376 clobber_overlapping_mems (set, loc);
2377 decl = var_debug_decl (decl);
2379 if (initialized == VAR_INIT_STATUS_UNKNOWN)
2380 initialized = get_init_value (set, loc, dv_from_decl (decl));
2382 if (modify)
2383 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src);
2384 var_mem_set (set, loc, initialized, set_src);
2387 /* Delete the location part LOC from dataflow set SET. If CLOBBER is
2388 true, also delete any other live copies of the same variable part.
2389 Adjust the address first if it is stack pointer based. */
2391 static void
2392 var_mem_delete (dataflow_set *set, rtx loc, bool clobber)
2394 tree decl = MEM_EXPR (loc);
2395 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2397 clobber_overlapping_mems (set, loc);
2398 decl = var_debug_decl (decl);
2399 if (clobber)
2400 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
2401 delete_variable_part (set, loc, dv_from_decl (decl), offset);
2404 /* Return true if LOC should not be expanded for location expressions,
2405 or used in them. */
2407 static inline bool
2408 unsuitable_loc (rtx loc)
2410 switch (GET_CODE (loc))
2412 case PC:
2413 case SCRATCH:
2414 case CC0:
2415 case ASM_INPUT:
2416 case ASM_OPERANDS:
2417 return true;
2419 default:
2420 return false;
2424 /* Bind VAL to LOC in SET. If MODIFIED, detach LOC from any values
2425 bound to it. */
2427 static inline void
2428 val_bind (dataflow_set *set, rtx val, rtx loc, bool modified)
2430 if (REG_P (loc))
2432 if (modified)
2433 var_regno_delete (set, REGNO (loc));
2434 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2435 dv_from_value (val), 0, NULL_RTX, INSERT);
2437 else if (MEM_P (loc))
2439 struct elt_loc_list *l = CSELIB_VAL_PTR (val)->locs;
2441 if (modified)
2442 clobber_overlapping_mems (set, loc);
2444 if (l && GET_CODE (l->loc) == VALUE)
2445 l = canonical_cselib_val (CSELIB_VAL_PTR (l->loc))->locs;
2447 /* If this MEM is a global constant, we don't need it in the
2448 dynamic tables. ??? We should test this before emitting the
2449 micro-op in the first place. */
2450 while (l)
2451 if (GET_CODE (l->loc) == MEM && XEXP (l->loc, 0) == XEXP (loc, 0))
2452 break;
2453 else
2454 l = l->next;
2456 if (!l)
2457 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2458 dv_from_value (val), 0, NULL_RTX, INSERT);
2460 else
2462 /* Other kinds of equivalences are necessarily static, at least
2463 so long as we do not perform substitutions while merging
2464 expressions. */
2465 gcc_unreachable ();
2466 set_variable_part (set, loc, dv_from_value (val), 0,
2467 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2471 /* Bind a value to a location it was just stored in. If MODIFIED
2472 holds, assume the location was modified, detaching it from any
2473 values bound to it. */
2475 static void
2476 val_store (dataflow_set *set, rtx val, rtx loc, rtx_insn *insn,
2477 bool modified)
2479 cselib_val *v = CSELIB_VAL_PTR (val);
2481 gcc_assert (cselib_preserved_value_p (v));
2483 if (dump_file)
2485 fprintf (dump_file, "%i: ", insn ? INSN_UID (insn) : 0);
2486 print_inline_rtx (dump_file, loc, 0);
2487 fprintf (dump_file, " evaluates to ");
2488 print_inline_rtx (dump_file, val, 0);
2489 if (v->locs)
2491 struct elt_loc_list *l;
2492 for (l = v->locs; l; l = l->next)
2494 fprintf (dump_file, "\n%i: ", INSN_UID (l->setting_insn));
2495 print_inline_rtx (dump_file, l->loc, 0);
2498 fprintf (dump_file, "\n");
2501 gcc_checking_assert (!unsuitable_loc (loc));
2503 val_bind (set, val, loc, modified);
2506 /* Clear (canonical address) slots that reference X. */
2508 bool
2509 local_get_addr_clear_given_value (rtx const &, rtx *slot, rtx x)
2511 if (vt_get_canonicalize_base (*slot) == x)
2512 *slot = NULL;
2513 return true;
2516 /* Reset this node, detaching all its equivalences. Return the slot
2517 in the variable hash table that holds dv, if there is one. */
2519 static void
2520 val_reset (dataflow_set *set, decl_or_value dv)
2522 variable var = shared_hash_find (set->vars, dv) ;
2523 location_chain node;
2524 rtx cval;
2526 if (!var || !var->n_var_parts)
2527 return;
2529 gcc_assert (var->n_var_parts == 1);
2531 if (var->onepart == ONEPART_VALUE)
2533 rtx x = dv_as_value (dv);
2535 /* Relationships in the global cache don't change, so reset the
2536 local cache entry only. */
2537 rtx *slot = local_get_addr_cache->get (x);
2538 if (slot)
2540 /* If the value resolved back to itself, odds are that other
2541 values may have cached it too. These entries now refer
2542 to the old X, so detach them too. Entries that used the
2543 old X but resolved to something else remain ok as long as
2544 that something else isn't also reset. */
2545 if (*slot == x)
2546 local_get_addr_cache
2547 ->traverse<rtx, local_get_addr_clear_given_value> (x);
2548 *slot = NULL;
2552 cval = NULL;
2553 for (node = var->var_part[0].loc_chain; node; node = node->next)
2554 if (GET_CODE (node->loc) == VALUE
2555 && canon_value_cmp (node->loc, cval))
2556 cval = node->loc;
2558 for (node = var->var_part[0].loc_chain; node; node = node->next)
2559 if (GET_CODE (node->loc) == VALUE && cval != node->loc)
2561 /* Redirect the equivalence link to the new canonical
2562 value, or simply remove it if it would point at
2563 itself. */
2564 if (cval)
2565 set_variable_part (set, cval, dv_from_value (node->loc),
2566 0, node->init, node->set_src, NO_INSERT);
2567 delete_variable_part (set, dv_as_value (dv),
2568 dv_from_value (node->loc), 0);
2571 if (cval)
2573 decl_or_value cdv = dv_from_value (cval);
2575 /* Keep the remaining values connected, accummulating links
2576 in the canonical value. */
2577 for (node = var->var_part[0].loc_chain; node; node = node->next)
2579 if (node->loc == cval)
2580 continue;
2581 else if (GET_CODE (node->loc) == REG)
2582 var_reg_decl_set (set, node->loc, node->init, cdv, 0,
2583 node->set_src, NO_INSERT);
2584 else if (GET_CODE (node->loc) == MEM)
2585 var_mem_decl_set (set, node->loc, node->init, cdv, 0,
2586 node->set_src, NO_INSERT);
2587 else
2588 set_variable_part (set, node->loc, cdv, 0,
2589 node->init, node->set_src, NO_INSERT);
2593 /* We remove this last, to make sure that the canonical value is not
2594 removed to the point of requiring reinsertion. */
2595 if (cval)
2596 delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0);
2598 clobber_variable_part (set, NULL, dv, 0, NULL);
2601 /* Find the values in a given location and map the val to another
2602 value, if it is unique, or add the location as one holding the
2603 value. */
2605 static void
2606 val_resolve (dataflow_set *set, rtx val, rtx loc, rtx_insn *insn)
2608 decl_or_value dv = dv_from_value (val);
2610 if (dump_file && (dump_flags & TDF_DETAILS))
2612 if (insn)
2613 fprintf (dump_file, "%i: ", INSN_UID (insn));
2614 else
2615 fprintf (dump_file, "head: ");
2616 print_inline_rtx (dump_file, val, 0);
2617 fputs (" is at ", dump_file);
2618 print_inline_rtx (dump_file, loc, 0);
2619 fputc ('\n', dump_file);
2622 val_reset (set, dv);
2624 gcc_checking_assert (!unsuitable_loc (loc));
2626 if (REG_P (loc))
2628 attrs node, found = NULL;
2630 for (node = set->regs[REGNO (loc)]; node; node = node->next)
2631 if (dv_is_value_p (node->dv)
2632 && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc))
2634 found = node;
2636 /* Map incoming equivalences. ??? Wouldn't it be nice if
2637 we just started sharing the location lists? Maybe a
2638 circular list ending at the value itself or some
2639 such. */
2640 set_variable_part (set, dv_as_value (node->dv),
2641 dv_from_value (val), node->offset,
2642 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2643 set_variable_part (set, val, node->dv, node->offset,
2644 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2647 /* If we didn't find any equivalence, we need to remember that
2648 this value is held in the named register. */
2649 if (found)
2650 return;
2652 /* ??? Attempt to find and merge equivalent MEMs or other
2653 expressions too. */
2655 val_bind (set, val, loc, false);
2658 /* Initialize dataflow set SET to be empty.
2659 VARS_SIZE is the initial size of hash table VARS. */
2661 static void
2662 dataflow_set_init (dataflow_set *set)
2664 init_attrs_list_set (set->regs);
2665 set->vars = shared_hash_copy (empty_shared_hash);
2666 set->stack_adjust = 0;
2667 set->traversed_vars = NULL;
2670 /* Delete the contents of dataflow set SET. */
2672 static void
2673 dataflow_set_clear (dataflow_set *set)
2675 int i;
2677 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2678 attrs_list_clear (&set->regs[i]);
2680 shared_hash_destroy (set->vars);
2681 set->vars = shared_hash_copy (empty_shared_hash);
2684 /* Copy the contents of dataflow set SRC to DST. */
2686 static void
2687 dataflow_set_copy (dataflow_set *dst, dataflow_set *src)
2689 int i;
2691 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2692 attrs_list_copy (&dst->regs[i], src->regs[i]);
2694 shared_hash_destroy (dst->vars);
2695 dst->vars = shared_hash_copy (src->vars);
2696 dst->stack_adjust = src->stack_adjust;
2699 /* Information for merging lists of locations for a given offset of variable.
2701 struct variable_union_info
2703 /* Node of the location chain. */
2704 location_chain lc;
2706 /* The sum of positions in the input chains. */
2707 int pos;
2709 /* The position in the chain of DST dataflow set. */
2710 int pos_dst;
2713 /* Buffer for location list sorting and its allocated size. */
2714 static struct variable_union_info *vui_vec;
2715 static int vui_allocated;
2717 /* Compare function for qsort, order the structures by POS element. */
2719 static int
2720 variable_union_info_cmp_pos (const void *n1, const void *n2)
2722 const struct variable_union_info *const i1 =
2723 (const struct variable_union_info *) n1;
2724 const struct variable_union_info *const i2 =
2725 ( const struct variable_union_info *) n2;
2727 if (i1->pos != i2->pos)
2728 return i1->pos - i2->pos;
2730 return (i1->pos_dst - i2->pos_dst);
2733 /* Compute union of location parts of variable *SLOT and the same variable
2734 from hash table DATA. Compute "sorted" union of the location chains
2735 for common offsets, i.e. the locations of a variable part are sorted by
2736 a priority where the priority is the sum of the positions in the 2 chains
2737 (if a location is only in one list the position in the second list is
2738 defined to be larger than the length of the chains).
2739 When we are updating the location parts the newest location is in the
2740 beginning of the chain, so when we do the described "sorted" union
2741 we keep the newest locations in the beginning. */
2743 static int
2744 variable_union (variable src, dataflow_set *set)
2746 variable dst;
2747 variable_def **dstp;
2748 int i, j, k;
2750 dstp = shared_hash_find_slot (set->vars, src->dv);
2751 if (!dstp || !*dstp)
2753 src->refcount++;
2755 dst_can_be_shared = false;
2756 if (!dstp)
2757 dstp = shared_hash_find_slot_unshare (&set->vars, src->dv, INSERT);
2759 *dstp = src;
2761 /* Continue traversing the hash table. */
2762 return 1;
2764 else
2765 dst = *dstp;
2767 gcc_assert (src->n_var_parts);
2768 gcc_checking_assert (src->onepart == dst->onepart);
2770 /* We can combine one-part variables very efficiently, because their
2771 entries are in canonical order. */
2772 if (src->onepart)
2774 location_chain *nodep, dnode, snode;
2776 gcc_assert (src->n_var_parts == 1
2777 && dst->n_var_parts == 1);
2779 snode = src->var_part[0].loc_chain;
2780 gcc_assert (snode);
2782 restart_onepart_unshared:
2783 nodep = &dst->var_part[0].loc_chain;
2784 dnode = *nodep;
2785 gcc_assert (dnode);
2787 while (snode)
2789 int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1;
2791 if (r > 0)
2793 location_chain nnode;
2795 if (shared_var_p (dst, set->vars))
2797 dstp = unshare_variable (set, dstp, dst,
2798 VAR_INIT_STATUS_INITIALIZED);
2799 dst = *dstp;
2800 goto restart_onepart_unshared;
2803 *nodep = nnode = (location_chain) pool_alloc (loc_chain_pool);
2804 nnode->loc = snode->loc;
2805 nnode->init = snode->init;
2806 if (!snode->set_src || MEM_P (snode->set_src))
2807 nnode->set_src = NULL;
2808 else
2809 nnode->set_src = snode->set_src;
2810 nnode->next = dnode;
2811 dnode = nnode;
2813 else if (r == 0)
2814 gcc_checking_assert (rtx_equal_p (dnode->loc, snode->loc));
2816 if (r >= 0)
2817 snode = snode->next;
2819 nodep = &dnode->next;
2820 dnode = *nodep;
2823 return 1;
2826 gcc_checking_assert (!src->onepart);
2828 /* Count the number of location parts, result is K. */
2829 for (i = 0, j = 0, k = 0;
2830 i < src->n_var_parts && j < dst->n_var_parts; k++)
2832 if (VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2834 i++;
2835 j++;
2837 else if (VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
2838 i++;
2839 else
2840 j++;
2842 k += src->n_var_parts - i;
2843 k += dst->n_var_parts - j;
2845 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
2846 thus there are at most MAX_VAR_PARTS different offsets. */
2847 gcc_checking_assert (dst->onepart ? k == 1 : k <= MAX_VAR_PARTS);
2849 if (dst->n_var_parts != k && shared_var_p (dst, set->vars))
2851 dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN);
2852 dst = *dstp;
2855 i = src->n_var_parts - 1;
2856 j = dst->n_var_parts - 1;
2857 dst->n_var_parts = k;
2859 for (k--; k >= 0; k--)
2861 location_chain node, node2;
2863 if (i >= 0 && j >= 0
2864 && VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2866 /* Compute the "sorted" union of the chains, i.e. the locations which
2867 are in both chains go first, they are sorted by the sum of
2868 positions in the chains. */
2869 int dst_l, src_l;
2870 int ii, jj, n;
2871 struct variable_union_info *vui;
2873 /* If DST is shared compare the location chains.
2874 If they are different we will modify the chain in DST with
2875 high probability so make a copy of DST. */
2876 if (shared_var_p (dst, set->vars))
2878 for (node = src->var_part[i].loc_chain,
2879 node2 = dst->var_part[j].loc_chain; node && node2;
2880 node = node->next, node2 = node2->next)
2882 if (!((REG_P (node2->loc)
2883 && REG_P (node->loc)
2884 && REGNO (node2->loc) == REGNO (node->loc))
2885 || rtx_equal_p (node2->loc, node->loc)))
2887 if (node2->init < node->init)
2888 node2->init = node->init;
2889 break;
2892 if (node || node2)
2894 dstp = unshare_variable (set, dstp, dst,
2895 VAR_INIT_STATUS_UNKNOWN);
2896 dst = (variable)*dstp;
2900 src_l = 0;
2901 for (node = src->var_part[i].loc_chain; node; node = node->next)
2902 src_l++;
2903 dst_l = 0;
2904 for (node = dst->var_part[j].loc_chain; node; node = node->next)
2905 dst_l++;
2907 if (dst_l == 1)
2909 /* The most common case, much simpler, no qsort is needed. */
2910 location_chain dstnode = dst->var_part[j].loc_chain;
2911 dst->var_part[k].loc_chain = dstnode;
2912 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
2913 node2 = dstnode;
2914 for (node = src->var_part[i].loc_chain; node; node = node->next)
2915 if (!((REG_P (dstnode->loc)
2916 && REG_P (node->loc)
2917 && REGNO (dstnode->loc) == REGNO (node->loc))
2918 || rtx_equal_p (dstnode->loc, node->loc)))
2920 location_chain new_node;
2922 /* Copy the location from SRC. */
2923 new_node = (location_chain) pool_alloc (loc_chain_pool);
2924 new_node->loc = node->loc;
2925 new_node->init = node->init;
2926 if (!node->set_src || MEM_P (node->set_src))
2927 new_node->set_src = NULL;
2928 else
2929 new_node->set_src = node->set_src;
2930 node2->next = new_node;
2931 node2 = new_node;
2933 node2->next = NULL;
2935 else
2937 if (src_l + dst_l > vui_allocated)
2939 vui_allocated = MAX (vui_allocated * 2, src_l + dst_l);
2940 vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec,
2941 vui_allocated);
2943 vui = vui_vec;
2945 /* Fill in the locations from DST. */
2946 for (node = dst->var_part[j].loc_chain, jj = 0; node;
2947 node = node->next, jj++)
2949 vui[jj].lc = node;
2950 vui[jj].pos_dst = jj;
2952 /* Pos plus value larger than a sum of 2 valid positions. */
2953 vui[jj].pos = jj + src_l + dst_l;
2956 /* Fill in the locations from SRC. */
2957 n = dst_l;
2958 for (node = src->var_part[i].loc_chain, ii = 0; node;
2959 node = node->next, ii++)
2961 /* Find location from NODE. */
2962 for (jj = 0; jj < dst_l; jj++)
2964 if ((REG_P (vui[jj].lc->loc)
2965 && REG_P (node->loc)
2966 && REGNO (vui[jj].lc->loc) == REGNO (node->loc))
2967 || rtx_equal_p (vui[jj].lc->loc, node->loc))
2969 vui[jj].pos = jj + ii;
2970 break;
2973 if (jj >= dst_l) /* The location has not been found. */
2975 location_chain new_node;
2977 /* Copy the location from SRC. */
2978 new_node = (location_chain) pool_alloc (loc_chain_pool);
2979 new_node->loc = node->loc;
2980 new_node->init = node->init;
2981 if (!node->set_src || MEM_P (node->set_src))
2982 new_node->set_src = NULL;
2983 else
2984 new_node->set_src = node->set_src;
2985 vui[n].lc = new_node;
2986 vui[n].pos_dst = src_l + dst_l;
2987 vui[n].pos = ii + src_l + dst_l;
2988 n++;
2992 if (dst_l == 2)
2994 /* Special case still very common case. For dst_l == 2
2995 all entries dst_l ... n-1 are sorted, with for i >= dst_l
2996 vui[i].pos == i + src_l + dst_l. */
2997 if (vui[0].pos > vui[1].pos)
2999 /* Order should be 1, 0, 2... */
3000 dst->var_part[k].loc_chain = vui[1].lc;
3001 vui[1].lc->next = vui[0].lc;
3002 if (n >= 3)
3004 vui[0].lc->next = vui[2].lc;
3005 vui[n - 1].lc->next = NULL;
3007 else
3008 vui[0].lc->next = NULL;
3009 ii = 3;
3011 else
3013 dst->var_part[k].loc_chain = vui[0].lc;
3014 if (n >= 3 && vui[2].pos < vui[1].pos)
3016 /* Order should be 0, 2, 1, 3... */
3017 vui[0].lc->next = vui[2].lc;
3018 vui[2].lc->next = vui[1].lc;
3019 if (n >= 4)
3021 vui[1].lc->next = vui[3].lc;
3022 vui[n - 1].lc->next = NULL;
3024 else
3025 vui[1].lc->next = NULL;
3026 ii = 4;
3028 else
3030 /* Order should be 0, 1, 2... */
3031 ii = 1;
3032 vui[n - 1].lc->next = NULL;
3035 for (; ii < n; ii++)
3036 vui[ii - 1].lc->next = vui[ii].lc;
3038 else
3040 qsort (vui, n, sizeof (struct variable_union_info),
3041 variable_union_info_cmp_pos);
3043 /* Reconnect the nodes in sorted order. */
3044 for (ii = 1; ii < n; ii++)
3045 vui[ii - 1].lc->next = vui[ii].lc;
3046 vui[n - 1].lc->next = NULL;
3047 dst->var_part[k].loc_chain = vui[0].lc;
3050 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
3052 i--;
3053 j--;
3055 else if ((i >= 0 && j >= 0
3056 && VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
3057 || i < 0)
3059 dst->var_part[k] = dst->var_part[j];
3060 j--;
3062 else if ((i >= 0 && j >= 0
3063 && VAR_PART_OFFSET (src, i) > VAR_PART_OFFSET (dst, j))
3064 || j < 0)
3066 location_chain *nextp;
3068 /* Copy the chain from SRC. */
3069 nextp = &dst->var_part[k].loc_chain;
3070 for (node = src->var_part[i].loc_chain; node; node = node->next)
3072 location_chain new_lc;
3074 new_lc = (location_chain) pool_alloc (loc_chain_pool);
3075 new_lc->next = NULL;
3076 new_lc->init = node->init;
3077 if (!node->set_src || MEM_P (node->set_src))
3078 new_lc->set_src = NULL;
3079 else
3080 new_lc->set_src = node->set_src;
3081 new_lc->loc = node->loc;
3083 *nextp = new_lc;
3084 nextp = &new_lc->next;
3087 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (src, i);
3088 i--;
3090 dst->var_part[k].cur_loc = NULL;
3093 if (flag_var_tracking_uninit)
3094 for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++)
3096 location_chain node, node2;
3097 for (node = src->var_part[i].loc_chain; node; node = node->next)
3098 for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next)
3099 if (rtx_equal_p (node->loc, node2->loc))
3101 if (node->init > node2->init)
3102 node2->init = node->init;
3106 /* Continue traversing the hash table. */
3107 return 1;
3110 /* Compute union of dataflow sets SRC and DST and store it to DST. */
3112 static void
3113 dataflow_set_union (dataflow_set *dst, dataflow_set *src)
3115 int i;
3117 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3118 attrs_list_union (&dst->regs[i], src->regs[i]);
3120 if (dst->vars == empty_shared_hash)
3122 shared_hash_destroy (dst->vars);
3123 dst->vars = shared_hash_copy (src->vars);
3125 else
3127 variable_iterator_type hi;
3128 variable var;
3130 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (src->vars),
3131 var, variable, hi)
3132 variable_union (var, dst);
3136 /* Whether the value is currently being expanded. */
3137 #define VALUE_RECURSED_INTO(x) \
3138 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
3140 /* Whether no expansion was found, saving useless lookups.
3141 It must only be set when VALUE_CHANGED is clear. */
3142 #define NO_LOC_P(x) \
3143 (RTL_FLAG_CHECK2 ("NO_LOC_P", (x), VALUE, DEBUG_EXPR)->return_val)
3145 /* Whether cur_loc in the value needs to be (re)computed. */
3146 #define VALUE_CHANGED(x) \
3147 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
3148 /* Whether cur_loc in the decl needs to be (re)computed. */
3149 #define DECL_CHANGED(x) TREE_VISITED (x)
3151 /* Record (if NEWV) that DV needs to have its cur_loc recomputed. For
3152 user DECLs, this means they're in changed_variables. Values and
3153 debug exprs may be left with this flag set if no user variable
3154 requires them to be evaluated. */
3156 static inline void
3157 set_dv_changed (decl_or_value dv, bool newv)
3159 switch (dv_onepart_p (dv))
3161 case ONEPART_VALUE:
3162 if (newv)
3163 NO_LOC_P (dv_as_value (dv)) = false;
3164 VALUE_CHANGED (dv_as_value (dv)) = newv;
3165 break;
3167 case ONEPART_DEXPR:
3168 if (newv)
3169 NO_LOC_P (DECL_RTL_KNOWN_SET (dv_as_decl (dv))) = false;
3170 /* Fall through... */
3172 default:
3173 DECL_CHANGED (dv_as_decl (dv)) = newv;
3174 break;
3178 /* Return true if DV needs to have its cur_loc recomputed. */
3180 static inline bool
3181 dv_changed_p (decl_or_value dv)
3183 return (dv_is_value_p (dv)
3184 ? VALUE_CHANGED (dv_as_value (dv))
3185 : DECL_CHANGED (dv_as_decl (dv)));
3188 /* Return a location list node whose loc is rtx_equal to LOC, in the
3189 location list of a one-part variable or value VAR, or in that of
3190 any values recursively mentioned in the location lists. VARS must
3191 be in star-canonical form. */
3193 static location_chain
3194 find_loc_in_1pdv (rtx loc, variable var, variable_table_type *vars)
3196 location_chain node;
3197 enum rtx_code loc_code;
3199 if (!var)
3200 return NULL;
3202 gcc_checking_assert (var->onepart);
3204 if (!var->n_var_parts)
3205 return NULL;
3207 gcc_checking_assert (loc != dv_as_opaque (var->dv));
3209 loc_code = GET_CODE (loc);
3210 for (node = var->var_part[0].loc_chain; node; node = node->next)
3212 decl_or_value dv;
3213 variable rvar;
3215 if (GET_CODE (node->loc) != loc_code)
3217 if (GET_CODE (node->loc) != VALUE)
3218 continue;
3220 else if (loc == node->loc)
3221 return node;
3222 else if (loc_code != VALUE)
3224 if (rtx_equal_p (loc, node->loc))
3225 return node;
3226 continue;
3229 /* Since we're in star-canonical form, we don't need to visit
3230 non-canonical nodes: one-part variables and non-canonical
3231 values would only point back to the canonical node. */
3232 if (dv_is_value_p (var->dv)
3233 && !canon_value_cmp (node->loc, dv_as_value (var->dv)))
3235 /* Skip all subsequent VALUEs. */
3236 while (node->next && GET_CODE (node->next->loc) == VALUE)
3238 node = node->next;
3239 gcc_checking_assert (!canon_value_cmp (node->loc,
3240 dv_as_value (var->dv)));
3241 if (loc == node->loc)
3242 return node;
3244 continue;
3247 gcc_checking_assert (node == var->var_part[0].loc_chain);
3248 gcc_checking_assert (!node->next);
3250 dv = dv_from_value (node->loc);
3251 rvar = vars->find_with_hash (dv, dv_htab_hash (dv));
3252 return find_loc_in_1pdv (loc, rvar, vars);
3255 /* ??? Gotta look in cselib_val locations too. */
3257 return NULL;
3260 /* Hash table iteration argument passed to variable_merge. */
3261 struct dfset_merge
3263 /* The set in which the merge is to be inserted. */
3264 dataflow_set *dst;
3265 /* The set that we're iterating in. */
3266 dataflow_set *cur;
3267 /* The set that may contain the other dv we are to merge with. */
3268 dataflow_set *src;
3269 /* Number of onepart dvs in src. */
3270 int src_onepart_cnt;
3273 /* Insert LOC in *DNODE, if it's not there yet. The list must be in
3274 loc_cmp order, and it is maintained as such. */
3276 static void
3277 insert_into_intersection (location_chain *nodep, rtx loc,
3278 enum var_init_status status)
3280 location_chain node;
3281 int r;
3283 for (node = *nodep; node; nodep = &node->next, node = *nodep)
3284 if ((r = loc_cmp (node->loc, loc)) == 0)
3286 node->init = MIN (node->init, status);
3287 return;
3289 else if (r > 0)
3290 break;
3292 node = (location_chain) pool_alloc (loc_chain_pool);
3294 node->loc = loc;
3295 node->set_src = NULL;
3296 node->init = status;
3297 node->next = *nodep;
3298 *nodep = node;
3301 /* Insert in DEST the intersection of the locations present in both
3302 S1NODE and S2VAR, directly or indirectly. S1NODE is from a
3303 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
3304 DSM->dst. */
3306 static void
3307 intersect_loc_chains (rtx val, location_chain *dest, struct dfset_merge *dsm,
3308 location_chain s1node, variable s2var)
3310 dataflow_set *s1set = dsm->cur;
3311 dataflow_set *s2set = dsm->src;
3312 location_chain found;
3314 if (s2var)
3316 location_chain s2node;
3318 gcc_checking_assert (s2var->onepart);
3320 if (s2var->n_var_parts)
3322 s2node = s2var->var_part[0].loc_chain;
3324 for (; s1node && s2node;
3325 s1node = s1node->next, s2node = s2node->next)
3326 if (s1node->loc != s2node->loc)
3327 break;
3328 else if (s1node->loc == val)
3329 continue;
3330 else
3331 insert_into_intersection (dest, s1node->loc,
3332 MIN (s1node->init, s2node->init));
3336 for (; s1node; s1node = s1node->next)
3338 if (s1node->loc == val)
3339 continue;
3341 if ((found = find_loc_in_1pdv (s1node->loc, s2var,
3342 shared_hash_htab (s2set->vars))))
3344 insert_into_intersection (dest, s1node->loc,
3345 MIN (s1node->init, found->init));
3346 continue;
3349 if (GET_CODE (s1node->loc) == VALUE
3350 && !VALUE_RECURSED_INTO (s1node->loc))
3352 decl_or_value dv = dv_from_value (s1node->loc);
3353 variable svar = shared_hash_find (s1set->vars, dv);
3354 if (svar)
3356 if (svar->n_var_parts == 1)
3358 VALUE_RECURSED_INTO (s1node->loc) = true;
3359 intersect_loc_chains (val, dest, dsm,
3360 svar->var_part[0].loc_chain,
3361 s2var);
3362 VALUE_RECURSED_INTO (s1node->loc) = false;
3367 /* ??? gotta look in cselib_val locations too. */
3369 /* ??? if the location is equivalent to any location in src,
3370 searched recursively
3372 add to dst the values needed to represent the equivalence
3374 telling whether locations S is equivalent to another dv's
3375 location list:
3377 for each location D in the list
3379 if S and D satisfy rtx_equal_p, then it is present
3381 else if D is a value, recurse without cycles
3383 else if S and D have the same CODE and MODE
3385 for each operand oS and the corresponding oD
3387 if oS and oD are not equivalent, then S an D are not equivalent
3389 else if they are RTX vectors
3391 if any vector oS element is not equivalent to its respective oD,
3392 then S and D are not equivalent
3400 /* Return -1 if X should be before Y in a location list for a 1-part
3401 variable, 1 if Y should be before X, and 0 if they're equivalent
3402 and should not appear in the list. */
3404 static int
3405 loc_cmp (rtx x, rtx y)
3407 int i, j, r;
3408 RTX_CODE code = GET_CODE (x);
3409 const char *fmt;
3411 if (x == y)
3412 return 0;
3414 if (REG_P (x))
3416 if (!REG_P (y))
3417 return -1;
3418 gcc_assert (GET_MODE (x) == GET_MODE (y));
3419 if (REGNO (x) == REGNO (y))
3420 return 0;
3421 else if (REGNO (x) < REGNO (y))
3422 return -1;
3423 else
3424 return 1;
3427 if (REG_P (y))
3428 return 1;
3430 if (MEM_P (x))
3432 if (!MEM_P (y))
3433 return -1;
3434 gcc_assert (GET_MODE (x) == GET_MODE (y));
3435 return loc_cmp (XEXP (x, 0), XEXP (y, 0));
3438 if (MEM_P (y))
3439 return 1;
3441 if (GET_CODE (x) == VALUE)
3443 if (GET_CODE (y) != VALUE)
3444 return -1;
3445 /* Don't assert the modes are the same, that is true only
3446 when not recursing. (subreg:QI (value:SI 1:1) 0)
3447 and (subreg:QI (value:DI 2:2) 0) can be compared,
3448 even when the modes are different. */
3449 if (canon_value_cmp (x, y))
3450 return -1;
3451 else
3452 return 1;
3455 if (GET_CODE (y) == VALUE)
3456 return 1;
3458 /* Entry value is the least preferable kind of expression. */
3459 if (GET_CODE (x) == ENTRY_VALUE)
3461 if (GET_CODE (y) != ENTRY_VALUE)
3462 return 1;
3463 gcc_assert (GET_MODE (x) == GET_MODE (y));
3464 return loc_cmp (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
3467 if (GET_CODE (y) == ENTRY_VALUE)
3468 return -1;
3470 if (GET_CODE (x) == GET_CODE (y))
3471 /* Compare operands below. */;
3472 else if (GET_CODE (x) < GET_CODE (y))
3473 return -1;
3474 else
3475 return 1;
3477 gcc_assert (GET_MODE (x) == GET_MODE (y));
3479 if (GET_CODE (x) == DEBUG_EXPR)
3481 if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3482 < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)))
3483 return -1;
3484 gcc_checking_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3485 > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)));
3486 return 1;
3489 fmt = GET_RTX_FORMAT (code);
3490 for (i = 0; i < GET_RTX_LENGTH (code); i++)
3491 switch (fmt[i])
3493 case 'w':
3494 if (XWINT (x, i) == XWINT (y, i))
3495 break;
3496 else if (XWINT (x, i) < XWINT (y, i))
3497 return -1;
3498 else
3499 return 1;
3501 case 'n':
3502 case 'i':
3503 if (XINT (x, i) == XINT (y, i))
3504 break;
3505 else if (XINT (x, i) < XINT (y, i))
3506 return -1;
3507 else
3508 return 1;
3510 case 'V':
3511 case 'E':
3512 /* Compare the vector length first. */
3513 if (XVECLEN (x, i) == XVECLEN (y, i))
3514 /* Compare the vectors elements. */;
3515 else if (XVECLEN (x, i) < XVECLEN (y, i))
3516 return -1;
3517 else
3518 return 1;
3520 for (j = 0; j < XVECLEN (x, i); j++)
3521 if ((r = loc_cmp (XVECEXP (x, i, j),
3522 XVECEXP (y, i, j))))
3523 return r;
3524 break;
3526 case 'e':
3527 if ((r = loc_cmp (XEXP (x, i), XEXP (y, i))))
3528 return r;
3529 break;
3531 case 'S':
3532 case 's':
3533 if (XSTR (x, i) == XSTR (y, i))
3534 break;
3535 if (!XSTR (x, i))
3536 return -1;
3537 if (!XSTR (y, i))
3538 return 1;
3539 if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0)
3540 break;
3541 else if (r < 0)
3542 return -1;
3543 else
3544 return 1;
3546 case 'u':
3547 /* These are just backpointers, so they don't matter. */
3548 break;
3550 case '0':
3551 case 't':
3552 break;
3554 /* It is believed that rtx's at this level will never
3555 contain anything but integers and other rtx's,
3556 except for within LABEL_REFs and SYMBOL_REFs. */
3557 default:
3558 gcc_unreachable ();
3560 if (CONST_WIDE_INT_P (x))
3562 /* Compare the vector length first. */
3563 if (CONST_WIDE_INT_NUNITS (x) >= CONST_WIDE_INT_NUNITS (y))
3564 return 1;
3565 else if (CONST_WIDE_INT_NUNITS (x) < CONST_WIDE_INT_NUNITS (y))
3566 return -1;
3568 /* Compare the vectors elements. */;
3569 for (j = CONST_WIDE_INT_NUNITS (x) - 1; j >= 0 ; j--)
3571 if (CONST_WIDE_INT_ELT (x, j) < CONST_WIDE_INT_ELT (y, j))
3572 return -1;
3573 if (CONST_WIDE_INT_ELT (x, j) > CONST_WIDE_INT_ELT (y, j))
3574 return 1;
3578 return 0;
3581 #if ENABLE_CHECKING
3582 /* Check the order of entries in one-part variables. */
3585 canonicalize_loc_order_check (variable_def **slot,
3586 dataflow_set *data ATTRIBUTE_UNUSED)
3588 variable var = *slot;
3589 location_chain node, next;
3591 #ifdef ENABLE_RTL_CHECKING
3592 int i;
3593 for (i = 0; i < var->n_var_parts; i++)
3594 gcc_assert (var->var_part[0].cur_loc == NULL);
3595 gcc_assert (!var->in_changed_variables);
3596 #endif
3598 if (!var->onepart)
3599 return 1;
3601 gcc_assert (var->n_var_parts == 1);
3602 node = var->var_part[0].loc_chain;
3603 gcc_assert (node);
3605 while ((next = node->next))
3607 gcc_assert (loc_cmp (node->loc, next->loc) < 0);
3608 node = next;
3611 return 1;
3613 #endif
3615 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
3616 more likely to be chosen as canonical for an equivalence set.
3617 Ensure less likely values can reach more likely neighbors, making
3618 the connections bidirectional. */
3621 canonicalize_values_mark (variable_def **slot, dataflow_set *set)
3623 variable var = *slot;
3624 decl_or_value dv = var->dv;
3625 rtx val;
3626 location_chain node;
3628 if (!dv_is_value_p (dv))
3629 return 1;
3631 gcc_checking_assert (var->n_var_parts == 1);
3633 val = dv_as_value (dv);
3635 for (node = var->var_part[0].loc_chain; node; node = node->next)
3636 if (GET_CODE (node->loc) == VALUE)
3638 if (canon_value_cmp (node->loc, val))
3639 VALUE_RECURSED_INTO (val) = true;
3640 else
3642 decl_or_value odv = dv_from_value (node->loc);
3643 variable_def **oslot;
3644 oslot = shared_hash_find_slot_noinsert (set->vars, odv);
3646 set_slot_part (set, val, oslot, odv, 0,
3647 node->init, NULL_RTX);
3649 VALUE_RECURSED_INTO (node->loc) = true;
3653 return 1;
3656 /* Remove redundant entries from equivalence lists in onepart
3657 variables, canonicalizing equivalence sets into star shapes. */
3660 canonicalize_values_star (variable_def **slot, dataflow_set *set)
3662 variable var = *slot;
3663 decl_or_value dv = var->dv;
3664 location_chain node;
3665 decl_or_value cdv;
3666 rtx val, cval;
3667 variable_def **cslot;
3668 bool has_value;
3669 bool has_marks;
3671 if (!var->onepart)
3672 return 1;
3674 gcc_checking_assert (var->n_var_parts == 1);
3676 if (dv_is_value_p (dv))
3678 cval = dv_as_value (dv);
3679 if (!VALUE_RECURSED_INTO (cval))
3680 return 1;
3681 VALUE_RECURSED_INTO (cval) = false;
3683 else
3684 cval = NULL_RTX;
3686 restart:
3687 val = cval;
3688 has_value = false;
3689 has_marks = false;
3691 gcc_assert (var->n_var_parts == 1);
3693 for (node = var->var_part[0].loc_chain; node; node = node->next)
3694 if (GET_CODE (node->loc) == VALUE)
3696 has_value = true;
3697 if (VALUE_RECURSED_INTO (node->loc))
3698 has_marks = true;
3699 if (canon_value_cmp (node->loc, cval))
3700 cval = node->loc;
3703 if (!has_value)
3704 return 1;
3706 if (cval == val)
3708 if (!has_marks || dv_is_decl_p (dv))
3709 return 1;
3711 /* Keep it marked so that we revisit it, either after visiting a
3712 child node, or after visiting a new parent that might be
3713 found out. */
3714 VALUE_RECURSED_INTO (val) = true;
3716 for (node = var->var_part[0].loc_chain; node; node = node->next)
3717 if (GET_CODE (node->loc) == VALUE
3718 && VALUE_RECURSED_INTO (node->loc))
3720 cval = node->loc;
3721 restart_with_cval:
3722 VALUE_RECURSED_INTO (cval) = false;
3723 dv = dv_from_value (cval);
3724 slot = shared_hash_find_slot_noinsert (set->vars, dv);
3725 if (!slot)
3727 gcc_assert (dv_is_decl_p (var->dv));
3728 /* The canonical value was reset and dropped.
3729 Remove it. */
3730 clobber_variable_part (set, NULL, var->dv, 0, NULL);
3731 return 1;
3733 var = *slot;
3734 gcc_assert (dv_is_value_p (var->dv));
3735 if (var->n_var_parts == 0)
3736 return 1;
3737 gcc_assert (var->n_var_parts == 1);
3738 goto restart;
3741 VALUE_RECURSED_INTO (val) = false;
3743 return 1;
3746 /* Push values to the canonical one. */
3747 cdv = dv_from_value (cval);
3748 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3750 for (node = var->var_part[0].loc_chain; node; node = node->next)
3751 if (node->loc != cval)
3753 cslot = set_slot_part (set, node->loc, cslot, cdv, 0,
3754 node->init, NULL_RTX);
3755 if (GET_CODE (node->loc) == VALUE)
3757 decl_or_value ndv = dv_from_value (node->loc);
3759 set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX,
3760 NO_INSERT);
3762 if (canon_value_cmp (node->loc, val))
3764 /* If it could have been a local minimum, it's not any more,
3765 since it's now neighbor to cval, so it may have to push
3766 to it. Conversely, if it wouldn't have prevailed over
3767 val, then whatever mark it has is fine: if it was to
3768 push, it will now push to a more canonical node, but if
3769 it wasn't, then it has already pushed any values it might
3770 have to. */
3771 VALUE_RECURSED_INTO (node->loc) = true;
3772 /* Make sure we visit node->loc by ensuring we cval is
3773 visited too. */
3774 VALUE_RECURSED_INTO (cval) = true;
3776 else if (!VALUE_RECURSED_INTO (node->loc))
3777 /* If we have no need to "recurse" into this node, it's
3778 already "canonicalized", so drop the link to the old
3779 parent. */
3780 clobber_variable_part (set, cval, ndv, 0, NULL);
3782 else if (GET_CODE (node->loc) == REG)
3784 attrs list = set->regs[REGNO (node->loc)], *listp;
3786 /* Change an existing attribute referring to dv so that it
3787 refers to cdv, removing any duplicate this might
3788 introduce, and checking that no previous duplicates
3789 existed, all in a single pass. */
3791 while (list)
3793 if (list->offset == 0
3794 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3795 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3796 break;
3798 list = list->next;
3801 gcc_assert (list);
3802 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3804 list->dv = cdv;
3805 for (listp = &list->next; (list = *listp); listp = &list->next)
3807 if (list->offset)
3808 continue;
3810 if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3812 *listp = list->next;
3813 pool_free (attrs_pool, list);
3814 list = *listp;
3815 break;
3818 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv));
3821 else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3823 for (listp = &list->next; (list = *listp); listp = &list->next)
3825 if (list->offset)
3826 continue;
3828 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3830 *listp = list->next;
3831 pool_free (attrs_pool, list);
3832 list = *listp;
3833 break;
3836 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv));
3839 else
3840 gcc_unreachable ();
3842 #if ENABLE_CHECKING
3843 while (list)
3845 if (list->offset == 0
3846 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3847 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3848 gcc_unreachable ();
3850 list = list->next;
3852 #endif
3856 if (val)
3857 set_slot_part (set, val, cslot, cdv, 0,
3858 VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
3860 slot = clobber_slot_part (set, cval, slot, 0, NULL);
3862 /* Variable may have been unshared. */
3863 var = *slot;
3864 gcc_checking_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval
3865 && var->var_part[0].loc_chain->next == NULL);
3867 if (VALUE_RECURSED_INTO (cval))
3868 goto restart_with_cval;
3870 return 1;
3873 /* Bind one-part variables to the canonical value in an equivalence
3874 set. Not doing this causes dataflow convergence failure in rare
3875 circumstances, see PR42873. Unfortunately we can't do this
3876 efficiently as part of canonicalize_values_star, since we may not
3877 have determined or even seen the canonical value of a set when we
3878 get to a variable that references another member of the set. */
3881 canonicalize_vars_star (variable_def **slot, dataflow_set *set)
3883 variable var = *slot;
3884 decl_or_value dv = var->dv;
3885 location_chain node;
3886 rtx cval;
3887 decl_or_value cdv;
3888 variable_def **cslot;
3889 variable cvar;
3890 location_chain cnode;
3892 if (!var->onepart || var->onepart == ONEPART_VALUE)
3893 return 1;
3895 gcc_assert (var->n_var_parts == 1);
3897 node = var->var_part[0].loc_chain;
3899 if (GET_CODE (node->loc) != VALUE)
3900 return 1;
3902 gcc_assert (!node->next);
3903 cval = node->loc;
3905 /* Push values to the canonical one. */
3906 cdv = dv_from_value (cval);
3907 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3908 if (!cslot)
3909 return 1;
3910 cvar = *cslot;
3911 gcc_assert (cvar->n_var_parts == 1);
3913 cnode = cvar->var_part[0].loc_chain;
3915 /* CVAL is canonical if its value list contains non-VALUEs or VALUEs
3916 that are not “more canonical” than it. */
3917 if (GET_CODE (cnode->loc) != VALUE
3918 || !canon_value_cmp (cnode->loc, cval))
3919 return 1;
3921 /* CVAL was found to be non-canonical. Change the variable to point
3922 to the canonical VALUE. */
3923 gcc_assert (!cnode->next);
3924 cval = cnode->loc;
3926 slot = set_slot_part (set, cval, slot, dv, 0,
3927 node->init, node->set_src);
3928 clobber_slot_part (set, cval, slot, 0, node->set_src);
3930 return 1;
3933 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
3934 corresponding entry in DSM->src. Multi-part variables are combined
3935 with variable_union, whereas onepart dvs are combined with
3936 intersection. */
3938 static int
3939 variable_merge_over_cur (variable s1var, struct dfset_merge *dsm)
3941 dataflow_set *dst = dsm->dst;
3942 variable_def **dstslot;
3943 variable s2var, dvar = NULL;
3944 decl_or_value dv = s1var->dv;
3945 onepart_enum_t onepart = s1var->onepart;
3946 rtx val;
3947 hashval_t dvhash;
3948 location_chain node, *nodep;
3950 /* If the incoming onepart variable has an empty location list, then
3951 the intersection will be just as empty. For other variables,
3952 it's always union. */
3953 gcc_checking_assert (s1var->n_var_parts
3954 && s1var->var_part[0].loc_chain);
3956 if (!onepart)
3957 return variable_union (s1var, dst);
3959 gcc_checking_assert (s1var->n_var_parts == 1);
3961 dvhash = dv_htab_hash (dv);
3962 if (dv_is_value_p (dv))
3963 val = dv_as_value (dv);
3964 else
3965 val = NULL;
3967 s2var = shared_hash_find_1 (dsm->src->vars, dv, dvhash);
3968 if (!s2var)
3970 dst_can_be_shared = false;
3971 return 1;
3974 dsm->src_onepart_cnt--;
3975 gcc_assert (s2var->var_part[0].loc_chain
3976 && s2var->onepart == onepart
3977 && s2var->n_var_parts == 1);
3979 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3980 if (dstslot)
3982 dvar = *dstslot;
3983 gcc_assert (dvar->refcount == 1
3984 && dvar->onepart == onepart
3985 && dvar->n_var_parts == 1);
3986 nodep = &dvar->var_part[0].loc_chain;
3988 else
3990 nodep = &node;
3991 node = NULL;
3994 if (!dstslot && !onepart_variable_different_p (s1var, s2var))
3996 dstslot = shared_hash_find_slot_unshare_1 (&dst->vars, dv,
3997 dvhash, INSERT);
3998 *dstslot = dvar = s2var;
3999 dvar->refcount++;
4001 else
4003 dst_can_be_shared = false;
4005 intersect_loc_chains (val, nodep, dsm,
4006 s1var->var_part[0].loc_chain, s2var);
4008 if (!dstslot)
4010 if (node)
4012 dvar = (variable) pool_alloc (onepart_pool (onepart));
4013 dvar->dv = dv;
4014 dvar->refcount = 1;
4015 dvar->n_var_parts = 1;
4016 dvar->onepart = onepart;
4017 dvar->in_changed_variables = false;
4018 dvar->var_part[0].loc_chain = node;
4019 dvar->var_part[0].cur_loc = NULL;
4020 if (onepart)
4021 VAR_LOC_1PAUX (dvar) = NULL;
4022 else
4023 VAR_PART_OFFSET (dvar, 0) = 0;
4025 dstslot
4026 = shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash,
4027 INSERT);
4028 gcc_assert (!*dstslot);
4029 *dstslot = dvar;
4031 else
4032 return 1;
4036 nodep = &dvar->var_part[0].loc_chain;
4037 while ((node = *nodep))
4039 location_chain *nextp = &node->next;
4041 if (GET_CODE (node->loc) == REG)
4043 attrs list;
4045 for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
4046 if (GET_MODE (node->loc) == GET_MODE (list->loc)
4047 && dv_is_value_p (list->dv))
4048 break;
4050 if (!list)
4051 attrs_list_insert (&dst->regs[REGNO (node->loc)],
4052 dv, 0, node->loc);
4053 /* If this value became canonical for another value that had
4054 this register, we want to leave it alone. */
4055 else if (dv_as_value (list->dv) != val)
4057 dstslot = set_slot_part (dst, dv_as_value (list->dv),
4058 dstslot, dv, 0,
4059 node->init, NULL_RTX);
4060 dstslot = delete_slot_part (dst, node->loc, dstslot, 0);
4062 /* Since nextp points into the removed node, we can't
4063 use it. The pointer to the next node moved to nodep.
4064 However, if the variable we're walking is unshared
4065 during our walk, we'll keep walking the location list
4066 of the previously-shared variable, in which case the
4067 node won't have been removed, and we'll want to skip
4068 it. That's why we test *nodep here. */
4069 if (*nodep != node)
4070 nextp = nodep;
4073 else
4074 /* Canonicalization puts registers first, so we don't have to
4075 walk it all. */
4076 break;
4077 nodep = nextp;
4080 if (dvar != *dstslot)
4081 dvar = *dstslot;
4082 nodep = &dvar->var_part[0].loc_chain;
4084 if (val)
4086 /* Mark all referenced nodes for canonicalization, and make sure
4087 we have mutual equivalence links. */
4088 VALUE_RECURSED_INTO (val) = true;
4089 for (node = *nodep; node; node = node->next)
4090 if (GET_CODE (node->loc) == VALUE)
4092 VALUE_RECURSED_INTO (node->loc) = true;
4093 set_variable_part (dst, val, dv_from_value (node->loc), 0,
4094 node->init, NULL, INSERT);
4097 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4098 gcc_assert (*dstslot == dvar);
4099 canonicalize_values_star (dstslot, dst);
4100 gcc_checking_assert (dstslot
4101 == shared_hash_find_slot_noinsert_1 (dst->vars,
4102 dv, dvhash));
4103 dvar = *dstslot;
4105 else
4107 bool has_value = false, has_other = false;
4109 /* If we have one value and anything else, we're going to
4110 canonicalize this, so make sure all values have an entry in
4111 the table and are marked for canonicalization. */
4112 for (node = *nodep; node; node = node->next)
4114 if (GET_CODE (node->loc) == VALUE)
4116 /* If this was marked during register canonicalization,
4117 we know we have to canonicalize values. */
4118 if (has_value)
4119 has_other = true;
4120 has_value = true;
4121 if (has_other)
4122 break;
4124 else
4126 has_other = true;
4127 if (has_value)
4128 break;
4132 if (has_value && has_other)
4134 for (node = *nodep; node; node = node->next)
4136 if (GET_CODE (node->loc) == VALUE)
4138 decl_or_value dv = dv_from_value (node->loc);
4139 variable_def **slot = NULL;
4141 if (shared_hash_shared (dst->vars))
4142 slot = shared_hash_find_slot_noinsert (dst->vars, dv);
4143 if (!slot)
4144 slot = shared_hash_find_slot_unshare (&dst->vars, dv,
4145 INSERT);
4146 if (!*slot)
4148 variable var = (variable) pool_alloc (onepart_pool
4149 (ONEPART_VALUE));
4150 var->dv = dv;
4151 var->refcount = 1;
4152 var->n_var_parts = 1;
4153 var->onepart = ONEPART_VALUE;
4154 var->in_changed_variables = false;
4155 var->var_part[0].loc_chain = NULL;
4156 var->var_part[0].cur_loc = NULL;
4157 VAR_LOC_1PAUX (var) = NULL;
4158 *slot = var;
4161 VALUE_RECURSED_INTO (node->loc) = true;
4165 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4166 gcc_assert (*dstslot == dvar);
4167 canonicalize_values_star (dstslot, dst);
4168 gcc_checking_assert (dstslot
4169 == shared_hash_find_slot_noinsert_1 (dst->vars,
4170 dv, dvhash));
4171 dvar = *dstslot;
4175 if (!onepart_variable_different_p (dvar, s2var))
4177 variable_htab_free (dvar);
4178 *dstslot = dvar = s2var;
4179 dvar->refcount++;
4181 else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var))
4183 variable_htab_free (dvar);
4184 *dstslot = dvar = s1var;
4185 dvar->refcount++;
4186 dst_can_be_shared = false;
4188 else
4189 dst_can_be_shared = false;
4191 return 1;
4194 /* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
4195 multi-part variable. Unions of multi-part variables and
4196 intersections of one-part ones will be handled in
4197 variable_merge_over_cur(). */
4199 static int
4200 variable_merge_over_src (variable s2var, struct dfset_merge *dsm)
4202 dataflow_set *dst = dsm->dst;
4203 decl_or_value dv = s2var->dv;
4205 if (!s2var->onepart)
4207 variable_def **dstp = shared_hash_find_slot (dst->vars, dv);
4208 *dstp = s2var;
4209 s2var->refcount++;
4210 return 1;
4213 dsm->src_onepart_cnt++;
4214 return 1;
4217 /* Combine dataflow set information from SRC2 into DST, using PDST
4218 to carry over information across passes. */
4220 static void
4221 dataflow_set_merge (dataflow_set *dst, dataflow_set *src2)
4223 dataflow_set cur = *dst;
4224 dataflow_set *src1 = &cur;
4225 struct dfset_merge dsm;
4226 int i;
4227 size_t src1_elems, src2_elems;
4228 variable_iterator_type hi;
4229 variable var;
4231 src1_elems = shared_hash_htab (src1->vars)->elements ();
4232 src2_elems = shared_hash_htab (src2->vars)->elements ();
4233 dataflow_set_init (dst);
4234 dst->stack_adjust = cur.stack_adjust;
4235 shared_hash_destroy (dst->vars);
4236 dst->vars = (shared_hash) pool_alloc (shared_hash_pool);
4237 dst->vars->refcount = 1;
4238 dst->vars->htab = new variable_table_type (MAX (src1_elems, src2_elems));
4240 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4241 attrs_list_mpdv_union (&dst->regs[i], src1->regs[i], src2->regs[i]);
4243 dsm.dst = dst;
4244 dsm.src = src2;
4245 dsm.cur = src1;
4246 dsm.src_onepart_cnt = 0;
4248 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm.src->vars),
4249 var, variable, hi)
4250 variable_merge_over_src (var, &dsm);
4251 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm.cur->vars),
4252 var, variable, hi)
4253 variable_merge_over_cur (var, &dsm);
4255 if (dsm.src_onepart_cnt)
4256 dst_can_be_shared = false;
4258 dataflow_set_destroy (src1);
4261 /* Mark register equivalences. */
4263 static void
4264 dataflow_set_equiv_regs (dataflow_set *set)
4266 int i;
4267 attrs list, *listp;
4269 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4271 rtx canon[NUM_MACHINE_MODES];
4273 /* If the list is empty or one entry, no need to canonicalize
4274 anything. */
4275 if (set->regs[i] == NULL || set->regs[i]->next == NULL)
4276 continue;
4278 memset (canon, 0, sizeof (canon));
4280 for (list = set->regs[i]; list; list = list->next)
4281 if (list->offset == 0 && dv_is_value_p (list->dv))
4283 rtx val = dv_as_value (list->dv);
4284 rtx *cvalp = &canon[(int)GET_MODE (val)];
4285 rtx cval = *cvalp;
4287 if (canon_value_cmp (val, cval))
4288 *cvalp = val;
4291 for (list = set->regs[i]; list; list = list->next)
4292 if (list->offset == 0 && dv_onepart_p (list->dv))
4294 rtx cval = canon[(int)GET_MODE (list->loc)];
4296 if (!cval)
4297 continue;
4299 if (dv_is_value_p (list->dv))
4301 rtx val = dv_as_value (list->dv);
4303 if (val == cval)
4304 continue;
4306 VALUE_RECURSED_INTO (val) = true;
4307 set_variable_part (set, val, dv_from_value (cval), 0,
4308 VAR_INIT_STATUS_INITIALIZED,
4309 NULL, NO_INSERT);
4312 VALUE_RECURSED_INTO (cval) = true;
4313 set_variable_part (set, cval, list->dv, 0,
4314 VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT);
4317 for (listp = &set->regs[i]; (list = *listp);
4318 listp = list ? &list->next : listp)
4319 if (list->offset == 0 && dv_onepart_p (list->dv))
4321 rtx cval = canon[(int)GET_MODE (list->loc)];
4322 variable_def **slot;
4324 if (!cval)
4325 continue;
4327 if (dv_is_value_p (list->dv))
4329 rtx val = dv_as_value (list->dv);
4330 if (!VALUE_RECURSED_INTO (val))
4331 continue;
4334 slot = shared_hash_find_slot_noinsert (set->vars, list->dv);
4335 canonicalize_values_star (slot, set);
4336 if (*listp != list)
4337 list = NULL;
4342 /* Remove any redundant values in the location list of VAR, which must
4343 be unshared and 1-part. */
4345 static void
4346 remove_duplicate_values (variable var)
4348 location_chain node, *nodep;
4350 gcc_assert (var->onepart);
4351 gcc_assert (var->n_var_parts == 1);
4352 gcc_assert (var->refcount == 1);
4354 for (nodep = &var->var_part[0].loc_chain; (node = *nodep); )
4356 if (GET_CODE (node->loc) == VALUE)
4358 if (VALUE_RECURSED_INTO (node->loc))
4360 /* Remove duplicate value node. */
4361 *nodep = node->next;
4362 pool_free (loc_chain_pool, node);
4363 continue;
4365 else
4366 VALUE_RECURSED_INTO (node->loc) = true;
4368 nodep = &node->next;
4371 for (node = var->var_part[0].loc_chain; node; node = node->next)
4372 if (GET_CODE (node->loc) == VALUE)
4374 gcc_assert (VALUE_RECURSED_INTO (node->loc));
4375 VALUE_RECURSED_INTO (node->loc) = false;
4380 /* Hash table iteration argument passed to variable_post_merge. */
4381 struct dfset_post_merge
4383 /* The new input set for the current block. */
4384 dataflow_set *set;
4385 /* Pointer to the permanent input set for the current block, or
4386 NULL. */
4387 dataflow_set **permp;
4390 /* Create values for incoming expressions associated with one-part
4391 variables that don't have value numbers for them. */
4394 variable_post_merge_new_vals (variable_def **slot, dfset_post_merge *dfpm)
4396 dataflow_set *set = dfpm->set;
4397 variable var = *slot;
4398 location_chain node;
4400 if (!var->onepart || !var->n_var_parts)
4401 return 1;
4403 gcc_assert (var->n_var_parts == 1);
4405 if (dv_is_decl_p (var->dv))
4407 bool check_dupes = false;
4409 restart:
4410 for (node = var->var_part[0].loc_chain; node; node = node->next)
4412 if (GET_CODE (node->loc) == VALUE)
4413 gcc_assert (!VALUE_RECURSED_INTO (node->loc));
4414 else if (GET_CODE (node->loc) == REG)
4416 attrs att, *attp, *curp = NULL;
4418 if (var->refcount != 1)
4420 slot = unshare_variable (set, slot, var,
4421 VAR_INIT_STATUS_INITIALIZED);
4422 var = *slot;
4423 goto restart;
4426 for (attp = &set->regs[REGNO (node->loc)]; (att = *attp);
4427 attp = &att->next)
4428 if (att->offset == 0
4429 && GET_MODE (att->loc) == GET_MODE (node->loc))
4431 if (dv_is_value_p (att->dv))
4433 rtx cval = dv_as_value (att->dv);
4434 node->loc = cval;
4435 check_dupes = true;
4436 break;
4438 else if (dv_as_opaque (att->dv) == dv_as_opaque (var->dv))
4439 curp = attp;
4442 if (!curp)
4444 curp = attp;
4445 while (*curp)
4446 if ((*curp)->offset == 0
4447 && GET_MODE ((*curp)->loc) == GET_MODE (node->loc)
4448 && dv_as_opaque ((*curp)->dv) == dv_as_opaque (var->dv))
4449 break;
4450 else
4451 curp = &(*curp)->next;
4452 gcc_assert (*curp);
4455 if (!att)
4457 decl_or_value cdv;
4458 rtx cval;
4460 if (!*dfpm->permp)
4462 *dfpm->permp = XNEW (dataflow_set);
4463 dataflow_set_init (*dfpm->permp);
4466 for (att = (*dfpm->permp)->regs[REGNO (node->loc)];
4467 att; att = att->next)
4468 if (GET_MODE (att->loc) == GET_MODE (node->loc))
4470 gcc_assert (att->offset == 0
4471 && dv_is_value_p (att->dv));
4472 val_reset (set, att->dv);
4473 break;
4476 if (att)
4478 cdv = att->dv;
4479 cval = dv_as_value (cdv);
4481 else
4483 /* Create a unique value to hold this register,
4484 that ought to be found and reused in
4485 subsequent rounds. */
4486 cselib_val *v;
4487 gcc_assert (!cselib_lookup (node->loc,
4488 GET_MODE (node->loc), 0,
4489 VOIDmode));
4490 v = cselib_lookup (node->loc, GET_MODE (node->loc), 1,
4491 VOIDmode);
4492 cselib_preserve_value (v);
4493 cselib_invalidate_rtx (node->loc);
4494 cval = v->val_rtx;
4495 cdv = dv_from_value (cval);
4496 if (dump_file)
4497 fprintf (dump_file,
4498 "Created new value %u:%u for reg %i\n",
4499 v->uid, v->hash, REGNO (node->loc));
4502 var_reg_decl_set (*dfpm->permp, node->loc,
4503 VAR_INIT_STATUS_INITIALIZED,
4504 cdv, 0, NULL, INSERT);
4506 node->loc = cval;
4507 check_dupes = true;
4510 /* Remove attribute referring to the decl, which now
4511 uses the value for the register, already existing or
4512 to be added when we bring perm in. */
4513 att = *curp;
4514 *curp = att->next;
4515 pool_free (attrs_pool, att);
4519 if (check_dupes)
4520 remove_duplicate_values (var);
4523 return 1;
4526 /* Reset values in the permanent set that are not associated with the
4527 chosen expression. */
4530 variable_post_merge_perm_vals (variable_def **pslot, dfset_post_merge *dfpm)
4532 dataflow_set *set = dfpm->set;
4533 variable pvar = *pslot, var;
4534 location_chain pnode;
4535 decl_or_value dv;
4536 attrs att;
4538 gcc_assert (dv_is_value_p (pvar->dv)
4539 && pvar->n_var_parts == 1);
4540 pnode = pvar->var_part[0].loc_chain;
4541 gcc_assert (pnode
4542 && !pnode->next
4543 && REG_P (pnode->loc));
4545 dv = pvar->dv;
4547 var = shared_hash_find (set->vars, dv);
4548 if (var)
4550 /* Although variable_post_merge_new_vals may have made decls
4551 non-star-canonical, values that pre-existed in canonical form
4552 remain canonical, and newly-created values reference a single
4553 REG, so they are canonical as well. Since VAR has the
4554 location list for a VALUE, using find_loc_in_1pdv for it is
4555 fine, since VALUEs don't map back to DECLs. */
4556 if (find_loc_in_1pdv (pnode->loc, var, shared_hash_htab (set->vars)))
4557 return 1;
4558 val_reset (set, dv);
4561 for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next)
4562 if (att->offset == 0
4563 && GET_MODE (att->loc) == GET_MODE (pnode->loc)
4564 && dv_is_value_p (att->dv))
4565 break;
4567 /* If there is a value associated with this register already, create
4568 an equivalence. */
4569 if (att && dv_as_value (att->dv) != dv_as_value (dv))
4571 rtx cval = dv_as_value (att->dv);
4572 set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT);
4573 set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init,
4574 NULL, INSERT);
4576 else if (!att)
4578 attrs_list_insert (&set->regs[REGNO (pnode->loc)],
4579 dv, 0, pnode->loc);
4580 variable_union (pvar, set);
4583 return 1;
4586 /* Just checking stuff and registering register attributes for
4587 now. */
4589 static void
4590 dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp)
4592 struct dfset_post_merge dfpm;
4594 dfpm.set = set;
4595 dfpm.permp = permp;
4597 shared_hash_htab (set->vars)
4598 ->traverse <dfset_post_merge*, variable_post_merge_new_vals> (&dfpm);
4599 if (*permp)
4600 shared_hash_htab ((*permp)->vars)
4601 ->traverse <dfset_post_merge*, variable_post_merge_perm_vals> (&dfpm);
4602 shared_hash_htab (set->vars)
4603 ->traverse <dataflow_set *, canonicalize_values_star> (set);
4604 shared_hash_htab (set->vars)
4605 ->traverse <dataflow_set *, canonicalize_vars_star> (set);
4608 /* Return a node whose loc is a MEM that refers to EXPR in the
4609 location list of a one-part variable or value VAR, or in that of
4610 any values recursively mentioned in the location lists. */
4612 static location_chain
4613 find_mem_expr_in_1pdv (tree expr, rtx val, variable_table_type *vars)
4615 location_chain node;
4616 decl_or_value dv;
4617 variable var;
4618 location_chain where = NULL;
4620 if (!val)
4621 return NULL;
4623 gcc_assert (GET_CODE (val) == VALUE
4624 && !VALUE_RECURSED_INTO (val));
4626 dv = dv_from_value (val);
4627 var = vars->find_with_hash (dv, dv_htab_hash (dv));
4629 if (!var)
4630 return NULL;
4632 gcc_assert (var->onepart);
4634 if (!var->n_var_parts)
4635 return NULL;
4637 VALUE_RECURSED_INTO (val) = true;
4639 for (node = var->var_part[0].loc_chain; node; node = node->next)
4640 if (MEM_P (node->loc)
4641 && MEM_EXPR (node->loc) == expr
4642 && INT_MEM_OFFSET (node->loc) == 0)
4644 where = node;
4645 break;
4647 else if (GET_CODE (node->loc) == VALUE
4648 && !VALUE_RECURSED_INTO (node->loc)
4649 && (where = find_mem_expr_in_1pdv (expr, node->loc, vars)))
4650 break;
4652 VALUE_RECURSED_INTO (val) = false;
4654 return where;
4657 /* Return TRUE if the value of MEM may vary across a call. */
4659 static bool
4660 mem_dies_at_call (rtx mem)
4662 tree expr = MEM_EXPR (mem);
4663 tree decl;
4665 if (!expr)
4666 return true;
4668 decl = get_base_address (expr);
4670 if (!decl)
4671 return true;
4673 if (!DECL_P (decl))
4674 return true;
4676 return (may_be_aliased (decl)
4677 || (!TREE_READONLY (decl) && is_global_var (decl)));
4680 /* Remove all MEMs from the location list of a hash table entry for a
4681 one-part variable, except those whose MEM attributes map back to
4682 the variable itself, directly or within a VALUE. */
4685 dataflow_set_preserve_mem_locs (variable_def **slot, dataflow_set *set)
4687 variable var = *slot;
4689 if (var->onepart == ONEPART_VDECL || var->onepart == ONEPART_DEXPR)
4691 tree decl = dv_as_decl (var->dv);
4692 location_chain loc, *locp;
4693 bool changed = false;
4695 if (!var->n_var_parts)
4696 return 1;
4698 gcc_assert (var->n_var_parts == 1);
4700 if (shared_var_p (var, set->vars))
4702 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4704 /* We want to remove dying MEMs that doesn't refer to DECL. */
4705 if (GET_CODE (loc->loc) == MEM
4706 && (MEM_EXPR (loc->loc) != decl
4707 || INT_MEM_OFFSET (loc->loc) != 0)
4708 && !mem_dies_at_call (loc->loc))
4709 break;
4710 /* We want to move here MEMs that do refer to DECL. */
4711 else if (GET_CODE (loc->loc) == VALUE
4712 && find_mem_expr_in_1pdv (decl, loc->loc,
4713 shared_hash_htab (set->vars)))
4714 break;
4717 if (!loc)
4718 return 1;
4720 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4721 var = *slot;
4722 gcc_assert (var->n_var_parts == 1);
4725 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4726 loc; loc = *locp)
4728 rtx old_loc = loc->loc;
4729 if (GET_CODE (old_loc) == VALUE)
4731 location_chain mem_node
4732 = find_mem_expr_in_1pdv (decl, loc->loc,
4733 shared_hash_htab (set->vars));
4735 /* ??? This picks up only one out of multiple MEMs that
4736 refer to the same variable. Do we ever need to be
4737 concerned about dealing with more than one, or, given
4738 that they should all map to the same variable
4739 location, their addresses will have been merged and
4740 they will be regarded as equivalent? */
4741 if (mem_node)
4743 loc->loc = mem_node->loc;
4744 loc->set_src = mem_node->set_src;
4745 loc->init = MIN (loc->init, mem_node->init);
4749 if (GET_CODE (loc->loc) != MEM
4750 || (MEM_EXPR (loc->loc) == decl
4751 && INT_MEM_OFFSET (loc->loc) == 0)
4752 || !mem_dies_at_call (loc->loc))
4754 if (old_loc != loc->loc && emit_notes)
4756 if (old_loc == var->var_part[0].cur_loc)
4758 changed = true;
4759 var->var_part[0].cur_loc = NULL;
4762 locp = &loc->next;
4763 continue;
4766 if (emit_notes)
4768 if (old_loc == var->var_part[0].cur_loc)
4770 changed = true;
4771 var->var_part[0].cur_loc = NULL;
4774 *locp = loc->next;
4775 pool_free (loc_chain_pool, loc);
4778 if (!var->var_part[0].loc_chain)
4780 var->n_var_parts--;
4781 changed = true;
4783 if (changed)
4784 variable_was_changed (var, set);
4787 return 1;
4790 /* Remove all MEMs from the location list of a hash table entry for a
4791 value. */
4794 dataflow_set_remove_mem_locs (variable_def **slot, dataflow_set *set)
4796 variable var = *slot;
4798 if (var->onepart == ONEPART_VALUE)
4800 location_chain loc, *locp;
4801 bool changed = false;
4802 rtx cur_loc;
4804 gcc_assert (var->n_var_parts == 1);
4806 if (shared_var_p (var, set->vars))
4808 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4809 if (GET_CODE (loc->loc) == MEM
4810 && mem_dies_at_call (loc->loc))
4811 break;
4813 if (!loc)
4814 return 1;
4816 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4817 var = *slot;
4818 gcc_assert (var->n_var_parts == 1);
4821 if (VAR_LOC_1PAUX (var))
4822 cur_loc = VAR_LOC_FROM (var);
4823 else
4824 cur_loc = var->var_part[0].cur_loc;
4826 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4827 loc; loc = *locp)
4829 if (GET_CODE (loc->loc) != MEM
4830 || !mem_dies_at_call (loc->loc))
4832 locp = &loc->next;
4833 continue;
4836 *locp = loc->next;
4837 /* If we have deleted the location which was last emitted
4838 we have to emit new location so add the variable to set
4839 of changed variables. */
4840 if (cur_loc == loc->loc)
4842 changed = true;
4843 var->var_part[0].cur_loc = NULL;
4844 if (VAR_LOC_1PAUX (var))
4845 VAR_LOC_FROM (var) = NULL;
4847 pool_free (loc_chain_pool, loc);
4850 if (!var->var_part[0].loc_chain)
4852 var->n_var_parts--;
4853 changed = true;
4855 if (changed)
4856 variable_was_changed (var, set);
4859 return 1;
4862 /* Remove all variable-location information about call-clobbered
4863 registers, as well as associations between MEMs and VALUEs. */
4865 static void
4866 dataflow_set_clear_at_call (dataflow_set *set)
4868 unsigned int r;
4869 hard_reg_set_iterator hrsi;
4871 EXECUTE_IF_SET_IN_HARD_REG_SET (regs_invalidated_by_call, 0, r, hrsi)
4872 var_regno_delete (set, r);
4874 if (MAY_HAVE_DEBUG_INSNS)
4876 set->traversed_vars = set->vars;
4877 shared_hash_htab (set->vars)
4878 ->traverse <dataflow_set *, dataflow_set_preserve_mem_locs> (set);
4879 set->traversed_vars = set->vars;
4880 shared_hash_htab (set->vars)
4881 ->traverse <dataflow_set *, dataflow_set_remove_mem_locs> (set);
4882 set->traversed_vars = NULL;
4886 static bool
4887 variable_part_different_p (variable_part *vp1, variable_part *vp2)
4889 location_chain lc1, lc2;
4891 for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next)
4893 for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next)
4895 if (REG_P (lc1->loc) && REG_P (lc2->loc))
4897 if (REGNO (lc1->loc) == REGNO (lc2->loc))
4898 break;
4900 if (rtx_equal_p (lc1->loc, lc2->loc))
4901 break;
4903 if (!lc2)
4904 return true;
4906 return false;
4909 /* Return true if one-part variables VAR1 and VAR2 are different.
4910 They must be in canonical order. */
4912 static bool
4913 onepart_variable_different_p (variable var1, variable var2)
4915 location_chain lc1, lc2;
4917 if (var1 == var2)
4918 return false;
4920 gcc_assert (var1->n_var_parts == 1
4921 && var2->n_var_parts == 1);
4923 lc1 = var1->var_part[0].loc_chain;
4924 lc2 = var2->var_part[0].loc_chain;
4926 gcc_assert (lc1 && lc2);
4928 while (lc1 && lc2)
4930 if (loc_cmp (lc1->loc, lc2->loc))
4931 return true;
4932 lc1 = lc1->next;
4933 lc2 = lc2->next;
4936 return lc1 != lc2;
4939 /* Return true if variables VAR1 and VAR2 are different. */
4941 static bool
4942 variable_different_p (variable var1, variable var2)
4944 int i;
4946 if (var1 == var2)
4947 return false;
4949 if (var1->onepart != var2->onepart)
4950 return true;
4952 if (var1->n_var_parts != var2->n_var_parts)
4953 return true;
4955 if (var1->onepart && var1->n_var_parts)
4957 gcc_checking_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv)
4958 && var1->n_var_parts == 1);
4959 /* One-part values have locations in a canonical order. */
4960 return onepart_variable_different_p (var1, var2);
4963 for (i = 0; i < var1->n_var_parts; i++)
4965 if (VAR_PART_OFFSET (var1, i) != VAR_PART_OFFSET (var2, i))
4966 return true;
4967 if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i]))
4968 return true;
4969 if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i]))
4970 return true;
4972 return false;
4975 /* Return true if dataflow sets OLD_SET and NEW_SET differ. */
4977 static bool
4978 dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
4980 variable_iterator_type hi;
4981 variable var1;
4983 if (old_set->vars == new_set->vars)
4984 return false;
4986 if (shared_hash_htab (old_set->vars)->elements ()
4987 != shared_hash_htab (new_set->vars)->elements ())
4988 return true;
4990 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (old_set->vars),
4991 var1, variable, hi)
4993 variable_table_type *htab = shared_hash_htab (new_set->vars);
4994 variable var2 = htab->find_with_hash (var1->dv, dv_htab_hash (var1->dv));
4995 if (!var2)
4997 if (dump_file && (dump_flags & TDF_DETAILS))
4999 fprintf (dump_file, "dataflow difference found: removal of:\n");
5000 dump_var (var1);
5002 return true;
5005 if (variable_different_p (var1, var2))
5007 if (dump_file && (dump_flags & TDF_DETAILS))
5009 fprintf (dump_file, "dataflow difference found: "
5010 "old and new follow:\n");
5011 dump_var (var1);
5012 dump_var (var2);
5014 return true;
5018 /* No need to traverse the second hashtab, if both have the same number
5019 of elements and the second one had all entries found in the first one,
5020 then it can't have any extra entries. */
5021 return false;
5024 /* Free the contents of dataflow set SET. */
5026 static void
5027 dataflow_set_destroy (dataflow_set *set)
5029 int i;
5031 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5032 attrs_list_clear (&set->regs[i]);
5034 shared_hash_destroy (set->vars);
5035 set->vars = NULL;
5038 /* Return true if RTL X contains a SYMBOL_REF. */
5040 static bool
5041 contains_symbol_ref (rtx x)
5043 const char *fmt;
5044 RTX_CODE code;
5045 int i;
5047 if (!x)
5048 return false;
5050 code = GET_CODE (x);
5051 if (code == SYMBOL_REF)
5052 return true;
5054 fmt = GET_RTX_FORMAT (code);
5055 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5057 if (fmt[i] == 'e')
5059 if (contains_symbol_ref (XEXP (x, i)))
5060 return true;
5062 else if (fmt[i] == 'E')
5064 int j;
5065 for (j = 0; j < XVECLEN (x, i); j++)
5066 if (contains_symbol_ref (XVECEXP (x, i, j)))
5067 return true;
5071 return false;
5074 /* Shall EXPR be tracked? */
5076 static bool
5077 track_expr_p (tree expr, bool need_rtl)
5079 rtx decl_rtl;
5080 tree realdecl;
5082 if (TREE_CODE (expr) == DEBUG_EXPR_DECL)
5083 return DECL_RTL_SET_P (expr);
5085 /* If EXPR is not a parameter or a variable do not track it. */
5086 if (TREE_CODE (expr) != VAR_DECL && TREE_CODE (expr) != PARM_DECL)
5087 return 0;
5089 /* It also must have a name... */
5090 if (!DECL_NAME (expr) && need_rtl)
5091 return 0;
5093 /* ... and a RTL assigned to it. */
5094 decl_rtl = DECL_RTL_IF_SET (expr);
5095 if (!decl_rtl && need_rtl)
5096 return 0;
5098 /* If this expression is really a debug alias of some other declaration, we
5099 don't need to track this expression if the ultimate declaration is
5100 ignored. */
5101 realdecl = expr;
5102 if (TREE_CODE (realdecl) == VAR_DECL && DECL_HAS_DEBUG_EXPR_P (realdecl))
5104 realdecl = DECL_DEBUG_EXPR (realdecl);
5105 if (!DECL_P (realdecl))
5107 if (handled_component_p (realdecl)
5108 || (TREE_CODE (realdecl) == MEM_REF
5109 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
5111 HOST_WIDE_INT bitsize, bitpos, maxsize;
5112 tree innerdecl
5113 = get_ref_base_and_extent (realdecl, &bitpos, &bitsize,
5114 &maxsize);
5115 if (!DECL_P (innerdecl)
5116 || DECL_IGNORED_P (innerdecl)
5117 /* Do not track declarations for parts of tracked parameters
5118 since we want to track them as a whole instead. */
5119 || (TREE_CODE (innerdecl) == PARM_DECL
5120 && DECL_MODE (innerdecl) != BLKmode
5121 && TREE_CODE (TREE_TYPE (innerdecl)) != UNION_TYPE)
5122 || TREE_STATIC (innerdecl)
5123 || bitsize <= 0
5124 || bitpos + bitsize > 256
5125 || bitsize != maxsize)
5126 return 0;
5127 else
5128 realdecl = expr;
5130 else
5131 return 0;
5135 /* Do not track EXPR if REALDECL it should be ignored for debugging
5136 purposes. */
5137 if (DECL_IGNORED_P (realdecl))
5138 return 0;
5140 /* Do not track global variables until we are able to emit correct location
5141 list for them. */
5142 if (TREE_STATIC (realdecl))
5143 return 0;
5145 /* When the EXPR is a DECL for alias of some variable (see example)
5146 the TREE_STATIC flag is not used. Disable tracking all DECLs whose
5147 DECL_RTL contains SYMBOL_REF.
5149 Example:
5150 extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
5151 char **_dl_argv;
5153 if (decl_rtl && MEM_P (decl_rtl)
5154 && contains_symbol_ref (XEXP (decl_rtl, 0)))
5155 return 0;
5157 /* If RTX is a memory it should not be very large (because it would be
5158 an array or struct). */
5159 if (decl_rtl && MEM_P (decl_rtl))
5161 /* Do not track structures and arrays. */
5162 if (GET_MODE (decl_rtl) == BLKmode
5163 || AGGREGATE_TYPE_P (TREE_TYPE (realdecl)))
5164 return 0;
5165 if (MEM_SIZE_KNOWN_P (decl_rtl)
5166 && MEM_SIZE (decl_rtl) > MAX_VAR_PARTS)
5167 return 0;
5170 DECL_CHANGED (expr) = 0;
5171 DECL_CHANGED (realdecl) = 0;
5172 return 1;
5175 /* Determine whether a given LOC refers to the same variable part as
5176 EXPR+OFFSET. */
5178 static bool
5179 same_variable_part_p (rtx loc, tree expr, HOST_WIDE_INT offset)
5181 tree expr2;
5182 HOST_WIDE_INT offset2;
5184 if (! DECL_P (expr))
5185 return false;
5187 if (REG_P (loc))
5189 expr2 = REG_EXPR (loc);
5190 offset2 = REG_OFFSET (loc);
5192 else if (MEM_P (loc))
5194 expr2 = MEM_EXPR (loc);
5195 offset2 = INT_MEM_OFFSET (loc);
5197 else
5198 return false;
5200 if (! expr2 || ! DECL_P (expr2))
5201 return false;
5203 expr = var_debug_decl (expr);
5204 expr2 = var_debug_decl (expr2);
5206 return (expr == expr2 && offset == offset2);
5209 /* LOC is a REG or MEM that we would like to track if possible.
5210 If EXPR is null, we don't know what expression LOC refers to,
5211 otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
5212 LOC is an lvalue register.
5214 Return true if EXPR is nonnull and if LOC, or some lowpart of it,
5215 is something we can track. When returning true, store the mode of
5216 the lowpart we can track in *MODE_OUT (if nonnull) and its offset
5217 from EXPR in *OFFSET_OUT (if nonnull). */
5219 static bool
5220 track_loc_p (rtx loc, tree expr, HOST_WIDE_INT offset, bool store_reg_p,
5221 enum machine_mode *mode_out, HOST_WIDE_INT *offset_out)
5223 enum machine_mode mode;
5225 if (expr == NULL || !track_expr_p (expr, true))
5226 return false;
5228 /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
5229 whole subreg, but only the old inner part is really relevant. */
5230 mode = GET_MODE (loc);
5231 if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc)))
5233 enum machine_mode pseudo_mode;
5235 pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc));
5236 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (pseudo_mode))
5238 offset += byte_lowpart_offset (pseudo_mode, mode);
5239 mode = pseudo_mode;
5243 /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
5244 Do the same if we are storing to a register and EXPR occupies
5245 the whole of register LOC; in that case, the whole of EXPR is
5246 being changed. We exclude complex modes from the second case
5247 because the real and imaginary parts are represented as separate
5248 pseudo registers, even if the whole complex value fits into one
5249 hard register. */
5250 if ((GET_MODE_SIZE (mode) > GET_MODE_SIZE (DECL_MODE (expr))
5251 || (store_reg_p
5252 && !COMPLEX_MODE_P (DECL_MODE (expr))
5253 && hard_regno_nregs[REGNO (loc)][DECL_MODE (expr)] == 1))
5254 && offset + byte_lowpart_offset (DECL_MODE (expr), mode) == 0)
5256 mode = DECL_MODE (expr);
5257 offset = 0;
5260 if (offset < 0 || offset >= MAX_VAR_PARTS)
5261 return false;
5263 if (mode_out)
5264 *mode_out = mode;
5265 if (offset_out)
5266 *offset_out = offset;
5267 return true;
5270 /* Return the MODE lowpart of LOC, or null if LOC is not something we
5271 want to track. When returning nonnull, make sure that the attributes
5272 on the returned value are updated. */
5274 static rtx
5275 var_lowpart (enum machine_mode mode, rtx loc)
5277 unsigned int offset, reg_offset, regno;
5279 if (GET_MODE (loc) == mode)
5280 return loc;
5282 if (!REG_P (loc) && !MEM_P (loc))
5283 return NULL;
5285 offset = byte_lowpart_offset (mode, GET_MODE (loc));
5287 if (MEM_P (loc))
5288 return adjust_address_nv (loc, mode, offset);
5290 reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc));
5291 regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc),
5292 reg_offset, mode);
5293 return gen_rtx_REG_offset (loc, mode, regno, offset);
5296 /* Carry information about uses and stores while walking rtx. */
5298 struct count_use_info
5300 /* The insn where the RTX is. */
5301 rtx_insn *insn;
5303 /* The basic block where insn is. */
5304 basic_block bb;
5306 /* The array of n_sets sets in the insn, as determined by cselib. */
5307 struct cselib_set *sets;
5308 int n_sets;
5310 /* True if we're counting stores, false otherwise. */
5311 bool store_p;
5314 /* Find a VALUE corresponding to X. */
5316 static inline cselib_val *
5317 find_use_val (rtx x, enum machine_mode mode, struct count_use_info *cui)
5319 int i;
5321 if (cui->sets)
5323 /* This is called after uses are set up and before stores are
5324 processed by cselib, so it's safe to look up srcs, but not
5325 dsts. So we look up expressions that appear in srcs or in
5326 dest expressions, but we search the sets array for dests of
5327 stores. */
5328 if (cui->store_p)
5330 /* Some targets represent memset and memcpy patterns
5331 by (set (mem:BLK ...) (reg:[QHSD]I ...)) or
5332 (set (mem:BLK ...) (const_int ...)) or
5333 (set (mem:BLK ...) (mem:BLK ...)). Don't return anything
5334 in that case, otherwise we end up with mode mismatches. */
5335 if (mode == BLKmode && MEM_P (x))
5336 return NULL;
5337 for (i = 0; i < cui->n_sets; i++)
5338 if (cui->sets[i].dest == x)
5339 return cui->sets[i].src_elt;
5341 else
5342 return cselib_lookup (x, mode, 0, VOIDmode);
5345 return NULL;
5348 /* Replace all registers and addresses in an expression with VALUE
5349 expressions that map back to them, unless the expression is a
5350 register. If no mapping is or can be performed, returns NULL. */
5352 static rtx
5353 replace_expr_with_values (rtx loc)
5355 if (REG_P (loc) || GET_CODE (loc) == ENTRY_VALUE)
5356 return NULL;
5357 else if (MEM_P (loc))
5359 cselib_val *addr = cselib_lookup (XEXP (loc, 0),
5360 get_address_mode (loc), 0,
5361 GET_MODE (loc));
5362 if (addr)
5363 return replace_equiv_address_nv (loc, addr->val_rtx);
5364 else
5365 return NULL;
5367 else
5368 return cselib_subst_to_values (loc, VOIDmode);
5371 /* Return true if X contains a DEBUG_EXPR. */
5373 static bool
5374 rtx_debug_expr_p (const_rtx x)
5376 subrtx_iterator::array_type array;
5377 FOR_EACH_SUBRTX (iter, array, x, ALL)
5378 if (GET_CODE (*iter) == DEBUG_EXPR)
5379 return true;
5380 return false;
5383 /* Determine what kind of micro operation to choose for a USE. Return
5384 MO_CLOBBER if no micro operation is to be generated. */
5386 static enum micro_operation_type
5387 use_type (rtx loc, struct count_use_info *cui, enum machine_mode *modep)
5389 tree expr;
5391 if (cui && cui->sets)
5393 if (GET_CODE (loc) == VAR_LOCATION)
5395 if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false))
5397 rtx ploc = PAT_VAR_LOCATION_LOC (loc);
5398 if (! VAR_LOC_UNKNOWN_P (ploc))
5400 cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1,
5401 VOIDmode);
5403 /* ??? flag_float_store and volatile mems are never
5404 given values, but we could in theory use them for
5405 locations. */
5406 gcc_assert (val || 1);
5408 return MO_VAL_LOC;
5410 else
5411 return MO_CLOBBER;
5414 if (REG_P (loc) || MEM_P (loc))
5416 if (modep)
5417 *modep = GET_MODE (loc);
5418 if (cui->store_p)
5420 if (REG_P (loc)
5421 || (find_use_val (loc, GET_MODE (loc), cui)
5422 && cselib_lookup (XEXP (loc, 0),
5423 get_address_mode (loc), 0,
5424 GET_MODE (loc))))
5425 return MO_VAL_SET;
5427 else
5429 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5431 if (val && !cselib_preserved_value_p (val))
5432 return MO_VAL_USE;
5437 if (REG_P (loc))
5439 gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER);
5441 if (loc == cfa_base_rtx)
5442 return MO_CLOBBER;
5443 expr = REG_EXPR (loc);
5445 if (!expr)
5446 return MO_USE_NO_VAR;
5447 else if (target_for_debug_bind (var_debug_decl (expr)))
5448 return MO_CLOBBER;
5449 else if (track_loc_p (loc, expr, REG_OFFSET (loc),
5450 false, modep, NULL))
5451 return MO_USE;
5452 else
5453 return MO_USE_NO_VAR;
5455 else if (MEM_P (loc))
5457 expr = MEM_EXPR (loc);
5459 if (!expr)
5460 return MO_CLOBBER;
5461 else if (target_for_debug_bind (var_debug_decl (expr)))
5462 return MO_CLOBBER;
5463 else if (track_loc_p (loc, expr, INT_MEM_OFFSET (loc),
5464 false, modep, NULL)
5465 /* Multi-part variables shouldn't refer to one-part
5466 variable names such as VALUEs (never happens) or
5467 DEBUG_EXPRs (only happens in the presence of debug
5468 insns). */
5469 && (!MAY_HAVE_DEBUG_INSNS
5470 || !rtx_debug_expr_p (XEXP (loc, 0))))
5471 return MO_USE;
5472 else
5473 return MO_CLOBBER;
5476 return MO_CLOBBER;
5479 /* Log to OUT information about micro-operation MOPT involving X in
5480 INSN of BB. */
5482 static inline void
5483 log_op_type (rtx x, basic_block bb, rtx_insn *insn,
5484 enum micro_operation_type mopt, FILE *out)
5486 fprintf (out, "bb %i op %i insn %i %s ",
5487 bb->index, VTI (bb)->mos.length (),
5488 INSN_UID (insn), micro_operation_type_name[mopt]);
5489 print_inline_rtx (out, x, 2);
5490 fputc ('\n', out);
5493 /* Tell whether the CONCAT used to holds a VALUE and its location
5494 needs value resolution, i.e., an attempt of mapping the location
5495 back to other incoming values. */
5496 #define VAL_NEEDS_RESOLUTION(x) \
5497 (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
5498 /* Whether the location in the CONCAT is a tracked expression, that
5499 should also be handled like a MO_USE. */
5500 #define VAL_HOLDS_TRACK_EXPR(x) \
5501 (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
5502 /* Whether the location in the CONCAT should be handled like a MO_COPY
5503 as well. */
5504 #define VAL_EXPR_IS_COPIED(x) \
5505 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
5506 /* Whether the location in the CONCAT should be handled like a
5507 MO_CLOBBER as well. */
5508 #define VAL_EXPR_IS_CLOBBERED(x) \
5509 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
5511 /* All preserved VALUEs. */
5512 static vec<rtx> preserved_values;
5514 /* Ensure VAL is preserved and remember it in a vector for vt_emit_notes. */
5516 static void
5517 preserve_value (cselib_val *val)
5519 cselib_preserve_value (val);
5520 preserved_values.safe_push (val->val_rtx);
5523 /* Helper function for MO_VAL_LOC handling. Return non-zero if
5524 any rtxes not suitable for CONST use not replaced by VALUEs
5525 are discovered. */
5527 static bool
5528 non_suitable_const (const_rtx x)
5530 subrtx_iterator::array_type array;
5531 FOR_EACH_SUBRTX (iter, array, x, ALL)
5533 const_rtx x = *iter;
5534 switch (GET_CODE (x))
5536 case REG:
5537 case DEBUG_EXPR:
5538 case PC:
5539 case SCRATCH:
5540 case CC0:
5541 case ASM_INPUT:
5542 case ASM_OPERANDS:
5543 return true;
5544 case MEM:
5545 if (!MEM_READONLY_P (x))
5546 return true;
5547 break;
5548 default:
5549 break;
5552 return false;
5555 /* Add uses (register and memory references) LOC which will be tracked
5556 to VTI (bb)->mos. */
5558 static void
5559 add_uses (rtx loc, struct count_use_info *cui)
5561 enum machine_mode mode = VOIDmode;
5562 enum micro_operation_type type = use_type (loc, cui, &mode);
5564 if (type != MO_CLOBBER)
5566 basic_block bb = cui->bb;
5567 micro_operation mo;
5569 mo.type = type;
5570 mo.u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc;
5571 mo.insn = cui->insn;
5573 if (type == MO_VAL_LOC)
5575 rtx oloc = loc;
5576 rtx vloc = PAT_VAR_LOCATION_LOC (oloc);
5577 cselib_val *val;
5579 gcc_assert (cui->sets);
5581 if (MEM_P (vloc)
5582 && !REG_P (XEXP (vloc, 0))
5583 && !MEM_P (XEXP (vloc, 0)))
5585 rtx mloc = vloc;
5586 enum machine_mode address_mode = get_address_mode (mloc);
5587 cselib_val *val
5588 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5589 GET_MODE (mloc));
5591 if (val && !cselib_preserved_value_p (val))
5592 preserve_value (val);
5595 if (CONSTANT_P (vloc)
5596 && (GET_CODE (vloc) != CONST || non_suitable_const (vloc)))
5597 /* For constants don't look up any value. */;
5598 else if (!VAR_LOC_UNKNOWN_P (vloc) && !unsuitable_loc (vloc)
5599 && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
5601 enum machine_mode mode2;
5602 enum micro_operation_type type2;
5603 rtx nloc = NULL;
5604 bool resolvable = REG_P (vloc) || MEM_P (vloc);
5606 if (resolvable)
5607 nloc = replace_expr_with_values (vloc);
5609 if (nloc)
5611 oloc = shallow_copy_rtx (oloc);
5612 PAT_VAR_LOCATION_LOC (oloc) = nloc;
5615 oloc = gen_rtx_CONCAT (mode, val->val_rtx, oloc);
5617 type2 = use_type (vloc, 0, &mode2);
5619 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5620 || type2 == MO_CLOBBER);
5622 if (type2 == MO_CLOBBER
5623 && !cselib_preserved_value_p (val))
5625 VAL_NEEDS_RESOLUTION (oloc) = resolvable;
5626 preserve_value (val);
5629 else if (!VAR_LOC_UNKNOWN_P (vloc))
5631 oloc = shallow_copy_rtx (oloc);
5632 PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC ();
5635 mo.u.loc = oloc;
5637 else if (type == MO_VAL_USE)
5639 enum machine_mode mode2 = VOIDmode;
5640 enum micro_operation_type type2;
5641 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5642 rtx vloc, oloc = loc, nloc;
5644 gcc_assert (cui->sets);
5646 if (MEM_P (oloc)
5647 && !REG_P (XEXP (oloc, 0))
5648 && !MEM_P (XEXP (oloc, 0)))
5650 rtx mloc = oloc;
5651 enum machine_mode address_mode = get_address_mode (mloc);
5652 cselib_val *val
5653 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5654 GET_MODE (mloc));
5656 if (val && !cselib_preserved_value_p (val))
5657 preserve_value (val);
5660 type2 = use_type (loc, 0, &mode2);
5662 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5663 || type2 == MO_CLOBBER);
5665 if (type2 == MO_USE)
5666 vloc = var_lowpart (mode2, loc);
5667 else
5668 vloc = oloc;
5670 /* The loc of a MO_VAL_USE may have two forms:
5672 (concat val src): val is at src, a value-based
5673 representation.
5675 (concat (concat val use) src): same as above, with use as
5676 the MO_USE tracked value, if it differs from src.
5680 gcc_checking_assert (REG_P (loc) || MEM_P (loc));
5681 nloc = replace_expr_with_values (loc);
5682 if (!nloc)
5683 nloc = oloc;
5685 if (vloc != nloc)
5686 oloc = gen_rtx_CONCAT (mode2, val->val_rtx, vloc);
5687 else
5688 oloc = val->val_rtx;
5690 mo.u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
5692 if (type2 == MO_USE)
5693 VAL_HOLDS_TRACK_EXPR (mo.u.loc) = 1;
5694 if (!cselib_preserved_value_p (val))
5696 VAL_NEEDS_RESOLUTION (mo.u.loc) = 1;
5697 preserve_value (val);
5700 else
5701 gcc_assert (type == MO_USE || type == MO_USE_NO_VAR);
5703 if (dump_file && (dump_flags & TDF_DETAILS))
5704 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
5705 VTI (bb)->mos.safe_push (mo);
5709 /* Helper function for finding all uses of REG/MEM in X in insn INSN. */
5711 static void
5712 add_uses_1 (rtx *x, void *cui)
5714 subrtx_var_iterator::array_type array;
5715 FOR_EACH_SUBRTX_VAR (iter, array, *x, NONCONST)
5716 add_uses (*iter, (struct count_use_info *) cui);
5719 /* This is the value used during expansion of locations. We want it
5720 to be unbounded, so that variables expanded deep in a recursion
5721 nest are fully evaluated, so that their values are cached
5722 correctly. We avoid recursion cycles through other means, and we
5723 don't unshare RTL, so excess complexity is not a problem. */
5724 #define EXPR_DEPTH (INT_MAX)
5725 /* We use this to keep too-complex expressions from being emitted as
5726 location notes, and then to debug information. Users can trade
5727 compile time for ridiculously complex expressions, although they're
5728 seldom useful, and they may often have to be discarded as not
5729 representable anyway. */
5730 #define EXPR_USE_DEPTH (PARAM_VALUE (PARAM_MAX_VARTRACK_EXPR_DEPTH))
5732 /* Attempt to reverse the EXPR operation in the debug info and record
5733 it in the cselib table. Say for reg1 = reg2 + 6 even when reg2 is
5734 no longer live we can express its value as VAL - 6. */
5736 static void
5737 reverse_op (rtx val, const_rtx expr, rtx_insn *insn)
5739 rtx src, arg, ret;
5740 cselib_val *v;
5741 struct elt_loc_list *l;
5742 enum rtx_code code;
5743 int count;
5745 if (GET_CODE (expr) != SET)
5746 return;
5748 if (!REG_P (SET_DEST (expr)) || GET_MODE (val) != GET_MODE (SET_DEST (expr)))
5749 return;
5751 src = SET_SRC (expr);
5752 switch (GET_CODE (src))
5754 case PLUS:
5755 case MINUS:
5756 case XOR:
5757 case NOT:
5758 case NEG:
5759 if (!REG_P (XEXP (src, 0)))
5760 return;
5761 break;
5762 case SIGN_EXTEND:
5763 case ZERO_EXTEND:
5764 if (!REG_P (XEXP (src, 0)) && !MEM_P (XEXP (src, 0)))
5765 return;
5766 break;
5767 default:
5768 return;
5771 if (!SCALAR_INT_MODE_P (GET_MODE (src)) || XEXP (src, 0) == cfa_base_rtx)
5772 return;
5774 v = cselib_lookup (XEXP (src, 0), GET_MODE (XEXP (src, 0)), 0, VOIDmode);
5775 if (!v || !cselib_preserved_value_p (v))
5776 return;
5778 /* Use canonical V to avoid creating multiple redundant expressions
5779 for different VALUES equivalent to V. */
5780 v = canonical_cselib_val (v);
5782 /* Adding a reverse op isn't useful if V already has an always valid
5783 location. Ignore ENTRY_VALUE, while it is always constant, we should
5784 prefer non-ENTRY_VALUE locations whenever possible. */
5785 for (l = v->locs, count = 0; l; l = l->next, count++)
5786 if (CONSTANT_P (l->loc)
5787 && (GET_CODE (l->loc) != CONST || !references_value_p (l->loc, 0)))
5788 return;
5789 /* Avoid creating too large locs lists. */
5790 else if (count == PARAM_VALUE (PARAM_MAX_VARTRACK_REVERSE_OP_SIZE))
5791 return;
5793 switch (GET_CODE (src))
5795 case NOT:
5796 case NEG:
5797 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5798 return;
5799 ret = gen_rtx_fmt_e (GET_CODE (src), GET_MODE (val), val);
5800 break;
5801 case SIGN_EXTEND:
5802 case ZERO_EXTEND:
5803 ret = gen_lowpart_SUBREG (GET_MODE (v->val_rtx), val);
5804 break;
5805 case XOR:
5806 code = XOR;
5807 goto binary;
5808 case PLUS:
5809 code = MINUS;
5810 goto binary;
5811 case MINUS:
5812 code = PLUS;
5813 goto binary;
5814 binary:
5815 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5816 return;
5817 arg = XEXP (src, 1);
5818 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5820 arg = cselib_expand_value_rtx (arg, scratch_regs, 5);
5821 if (arg == NULL_RTX)
5822 return;
5823 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5824 return;
5826 ret = simplify_gen_binary (code, GET_MODE (val), val, arg);
5827 if (ret == val)
5828 /* Ensure ret isn't VALUE itself (which can happen e.g. for
5829 (plus (reg1) (reg2)) when reg2 is known to be 0), as that
5830 breaks a lot of routines during var-tracking. */
5831 ret = gen_rtx_fmt_ee (PLUS, GET_MODE (val), val, const0_rtx);
5832 break;
5833 default:
5834 gcc_unreachable ();
5837 cselib_add_permanent_equiv (v, ret, insn);
5840 /* Add stores (register and memory references) LOC which will be tracked
5841 to VTI (bb)->mos. EXPR is the RTL expression containing the store.
5842 CUIP->insn is instruction which the LOC is part of. */
5844 static void
5845 add_stores (rtx loc, const_rtx expr, void *cuip)
5847 enum machine_mode mode = VOIDmode, mode2;
5848 struct count_use_info *cui = (struct count_use_info *)cuip;
5849 basic_block bb = cui->bb;
5850 micro_operation mo;
5851 rtx oloc = loc, nloc, src = NULL;
5852 enum micro_operation_type type = use_type (loc, cui, &mode);
5853 bool track_p = false;
5854 cselib_val *v;
5855 bool resolve, preserve;
5857 if (type == MO_CLOBBER)
5858 return;
5860 mode2 = mode;
5862 if (REG_P (loc))
5864 gcc_assert (loc != cfa_base_rtx);
5865 if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
5866 || !(track_p = use_type (loc, NULL, &mode2) == MO_USE)
5867 || GET_CODE (expr) == CLOBBER)
5869 mo.type = MO_CLOBBER;
5870 mo.u.loc = loc;
5871 if (GET_CODE (expr) == SET
5872 && SET_DEST (expr) == loc
5873 && !unsuitable_loc (SET_SRC (expr))
5874 && find_use_val (loc, mode, cui))
5876 gcc_checking_assert (type == MO_VAL_SET);
5877 mo.u.loc = gen_rtx_SET (VOIDmode, loc, SET_SRC (expr));
5880 else
5882 if (GET_CODE (expr) == SET
5883 && SET_DEST (expr) == loc
5884 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
5885 src = var_lowpart (mode2, SET_SRC (expr));
5886 loc = var_lowpart (mode2, loc);
5888 if (src == NULL)
5890 mo.type = MO_SET;
5891 mo.u.loc = loc;
5893 else
5895 rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
5896 if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
5898 /* If this is an instruction copying (part of) a parameter
5899 passed by invisible reference to its register location,
5900 pretend it's a SET so that the initial memory location
5901 is discarded, as the parameter register can be reused
5902 for other purposes and we do not track locations based
5903 on generic registers. */
5904 if (MEM_P (src)
5905 && REG_EXPR (loc)
5906 && TREE_CODE (REG_EXPR (loc)) == PARM_DECL
5907 && DECL_MODE (REG_EXPR (loc)) != BLKmode
5908 && MEM_P (DECL_INCOMING_RTL (REG_EXPR (loc)))
5909 && XEXP (DECL_INCOMING_RTL (REG_EXPR (loc)), 0)
5910 != arg_pointer_rtx)
5911 mo.type = MO_SET;
5912 else
5913 mo.type = MO_COPY;
5915 else
5916 mo.type = MO_SET;
5917 mo.u.loc = xexpr;
5920 mo.insn = cui->insn;
5922 else if (MEM_P (loc)
5923 && ((track_p = use_type (loc, NULL, &mode2) == MO_USE)
5924 || cui->sets))
5926 if (MEM_P (loc) && type == MO_VAL_SET
5927 && !REG_P (XEXP (loc, 0))
5928 && !MEM_P (XEXP (loc, 0)))
5930 rtx mloc = loc;
5931 enum machine_mode address_mode = get_address_mode (mloc);
5932 cselib_val *val = cselib_lookup (XEXP (mloc, 0),
5933 address_mode, 0,
5934 GET_MODE (mloc));
5936 if (val && !cselib_preserved_value_p (val))
5937 preserve_value (val);
5940 if (GET_CODE (expr) == CLOBBER || !track_p)
5942 mo.type = MO_CLOBBER;
5943 mo.u.loc = track_p ? var_lowpart (mode2, loc) : loc;
5945 else
5947 if (GET_CODE (expr) == SET
5948 && SET_DEST (expr) == loc
5949 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
5950 src = var_lowpart (mode2, SET_SRC (expr));
5951 loc = var_lowpart (mode2, loc);
5953 if (src == NULL)
5955 mo.type = MO_SET;
5956 mo.u.loc = loc;
5958 else
5960 rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
5961 if (same_variable_part_p (SET_SRC (xexpr),
5962 MEM_EXPR (loc),
5963 INT_MEM_OFFSET (loc)))
5964 mo.type = MO_COPY;
5965 else
5966 mo.type = MO_SET;
5967 mo.u.loc = xexpr;
5970 mo.insn = cui->insn;
5972 else
5973 return;
5975 if (type != MO_VAL_SET)
5976 goto log_and_return;
5978 v = find_use_val (oloc, mode, cui);
5980 if (!v)
5981 goto log_and_return;
5983 resolve = preserve = !cselib_preserved_value_p (v);
5985 /* We cannot track values for multiple-part variables, so we track only
5986 locations for tracked parameters passed either by invisible reference
5987 or directly in multiple locations. */
5988 if (track_p
5989 && REG_P (loc)
5990 && REG_EXPR (loc)
5991 && TREE_CODE (REG_EXPR (loc)) == PARM_DECL
5992 && DECL_MODE (REG_EXPR (loc)) != BLKmode
5993 && TREE_CODE (TREE_TYPE (REG_EXPR (loc))) != UNION_TYPE
5994 && ((MEM_P (DECL_INCOMING_RTL (REG_EXPR (loc)))
5995 && XEXP (DECL_INCOMING_RTL (REG_EXPR (loc)), 0) != arg_pointer_rtx)
5996 || (GET_CODE (DECL_INCOMING_RTL (REG_EXPR (loc))) == PARALLEL
5997 && XVECLEN (DECL_INCOMING_RTL (REG_EXPR (loc)), 0) > 1)))
5999 /* Although we don't use the value here, it could be used later by the
6000 mere virtue of its existence as the operand of the reverse operation
6001 that gave rise to it (typically extension/truncation). Make sure it
6002 is preserved as required by vt_expand_var_loc_chain. */
6003 if (preserve)
6004 preserve_value (v);
6005 goto log_and_return;
6008 if (loc == stack_pointer_rtx
6009 && hard_frame_pointer_adjustment != -1
6010 && preserve)
6011 cselib_set_value_sp_based (v);
6013 nloc = replace_expr_with_values (oloc);
6014 if (nloc)
6015 oloc = nloc;
6017 if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC)
6019 cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0, VOIDmode);
6021 if (oval == v)
6022 return;
6023 gcc_assert (REG_P (oloc) || MEM_P (oloc));
6025 if (oval && !cselib_preserved_value_p (oval))
6027 micro_operation moa;
6029 preserve_value (oval);
6031 moa.type = MO_VAL_USE;
6032 moa.u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc);
6033 VAL_NEEDS_RESOLUTION (moa.u.loc) = 1;
6034 moa.insn = cui->insn;
6036 if (dump_file && (dump_flags & TDF_DETAILS))
6037 log_op_type (moa.u.loc, cui->bb, cui->insn,
6038 moa.type, dump_file);
6039 VTI (bb)->mos.safe_push (moa);
6042 resolve = false;
6044 else if (resolve && GET_CODE (mo.u.loc) == SET)
6046 if (REG_P (SET_SRC (expr)) || MEM_P (SET_SRC (expr)))
6047 nloc = replace_expr_with_values (SET_SRC (expr));
6048 else
6049 nloc = NULL_RTX;
6051 /* Avoid the mode mismatch between oexpr and expr. */
6052 if (!nloc && mode != mode2)
6054 nloc = SET_SRC (expr);
6055 gcc_assert (oloc == SET_DEST (expr));
6058 if (nloc && nloc != SET_SRC (mo.u.loc))
6059 oloc = gen_rtx_SET (GET_MODE (mo.u.loc), oloc, nloc);
6060 else
6062 if (oloc == SET_DEST (mo.u.loc))
6063 /* No point in duplicating. */
6064 oloc = mo.u.loc;
6065 if (!REG_P (SET_SRC (mo.u.loc)))
6066 resolve = false;
6069 else if (!resolve)
6071 if (GET_CODE (mo.u.loc) == SET
6072 && oloc == SET_DEST (mo.u.loc))
6073 /* No point in duplicating. */
6074 oloc = mo.u.loc;
6076 else
6077 resolve = false;
6079 loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc);
6081 if (mo.u.loc != oloc)
6082 loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, mo.u.loc);
6084 /* The loc of a MO_VAL_SET may have various forms:
6086 (concat val dst): dst now holds val
6088 (concat val (set dst src)): dst now holds val, copied from src
6090 (concat (concat val dstv) dst): dst now holds val; dstv is dst
6091 after replacing mems and non-top-level regs with values.
6093 (concat (concat val dstv) (set dst src)): dst now holds val,
6094 copied from src. dstv is a value-based representation of dst, if
6095 it differs from dst. If resolution is needed, src is a REG, and
6096 its mode is the same as that of val.
6098 (concat (concat val (set dstv srcv)) (set dst src)): src
6099 copied to dst, holding val. dstv and srcv are value-based
6100 representations of dst and src, respectively.
6104 if (GET_CODE (PATTERN (cui->insn)) != COND_EXEC)
6105 reverse_op (v->val_rtx, expr, cui->insn);
6107 mo.u.loc = loc;
6109 if (track_p)
6110 VAL_HOLDS_TRACK_EXPR (loc) = 1;
6111 if (preserve)
6113 VAL_NEEDS_RESOLUTION (loc) = resolve;
6114 preserve_value (v);
6116 if (mo.type == MO_CLOBBER)
6117 VAL_EXPR_IS_CLOBBERED (loc) = 1;
6118 if (mo.type == MO_COPY)
6119 VAL_EXPR_IS_COPIED (loc) = 1;
6121 mo.type = MO_VAL_SET;
6123 log_and_return:
6124 if (dump_file && (dump_flags & TDF_DETAILS))
6125 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
6126 VTI (bb)->mos.safe_push (mo);
6129 /* Arguments to the call. */
6130 static rtx call_arguments;
6132 /* Compute call_arguments. */
6134 static void
6135 prepare_call_arguments (basic_block bb, rtx_insn *insn)
6137 rtx link, x, call;
6138 rtx prev, cur, next;
6139 rtx this_arg = NULL_RTX;
6140 tree type = NULL_TREE, t, fndecl = NULL_TREE;
6141 tree obj_type_ref = NULL_TREE;
6142 CUMULATIVE_ARGS args_so_far_v;
6143 cumulative_args_t args_so_far;
6145 memset (&args_so_far_v, 0, sizeof (args_so_far_v));
6146 args_so_far = pack_cumulative_args (&args_so_far_v);
6147 call = get_call_rtx_from (insn);
6148 if (call)
6150 if (GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
6152 rtx symbol = XEXP (XEXP (call, 0), 0);
6153 if (SYMBOL_REF_DECL (symbol))
6154 fndecl = SYMBOL_REF_DECL (symbol);
6156 if (fndecl == NULL_TREE)
6157 fndecl = MEM_EXPR (XEXP (call, 0));
6158 if (fndecl
6159 && TREE_CODE (TREE_TYPE (fndecl)) != FUNCTION_TYPE
6160 && TREE_CODE (TREE_TYPE (fndecl)) != METHOD_TYPE)
6161 fndecl = NULL_TREE;
6162 if (fndecl && TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
6163 type = TREE_TYPE (fndecl);
6164 if (fndecl && TREE_CODE (fndecl) != FUNCTION_DECL)
6166 if (TREE_CODE (fndecl) == INDIRECT_REF
6167 && TREE_CODE (TREE_OPERAND (fndecl, 0)) == OBJ_TYPE_REF)
6168 obj_type_ref = TREE_OPERAND (fndecl, 0);
6169 fndecl = NULL_TREE;
6171 if (type)
6173 for (t = TYPE_ARG_TYPES (type); t && t != void_list_node;
6174 t = TREE_CHAIN (t))
6175 if (TREE_CODE (TREE_VALUE (t)) == REFERENCE_TYPE
6176 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_VALUE (t))))
6177 break;
6178 if ((t == NULL || t == void_list_node) && obj_type_ref == NULL_TREE)
6179 type = NULL;
6180 else
6182 int nargs ATTRIBUTE_UNUSED = list_length (TYPE_ARG_TYPES (type));
6183 link = CALL_INSN_FUNCTION_USAGE (insn);
6184 #ifndef PCC_STATIC_STRUCT_RETURN
6185 if (aggregate_value_p (TREE_TYPE (type), type)
6186 && targetm.calls.struct_value_rtx (type, 0) == 0)
6188 tree struct_addr = build_pointer_type (TREE_TYPE (type));
6189 enum machine_mode mode = TYPE_MODE (struct_addr);
6190 rtx reg;
6191 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
6192 nargs + 1);
6193 reg = targetm.calls.function_arg (args_so_far, mode,
6194 struct_addr, true);
6195 targetm.calls.function_arg_advance (args_so_far, mode,
6196 struct_addr, true);
6197 if (reg == NULL_RTX)
6199 for (; link; link = XEXP (link, 1))
6200 if (GET_CODE (XEXP (link, 0)) == USE
6201 && MEM_P (XEXP (XEXP (link, 0), 0)))
6203 link = XEXP (link, 1);
6204 break;
6208 else
6209 #endif
6210 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
6211 nargs);
6212 if (obj_type_ref && TYPE_ARG_TYPES (type) != void_list_node)
6214 enum machine_mode mode;
6215 t = TYPE_ARG_TYPES (type);
6216 mode = TYPE_MODE (TREE_VALUE (t));
6217 this_arg = targetm.calls.function_arg (args_so_far, mode,
6218 TREE_VALUE (t), true);
6219 if (this_arg && !REG_P (this_arg))
6220 this_arg = NULL_RTX;
6221 else if (this_arg == NULL_RTX)
6223 for (; link; link = XEXP (link, 1))
6224 if (GET_CODE (XEXP (link, 0)) == USE
6225 && MEM_P (XEXP (XEXP (link, 0), 0)))
6227 this_arg = XEXP (XEXP (link, 0), 0);
6228 break;
6235 t = type ? TYPE_ARG_TYPES (type) : NULL_TREE;
6237 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
6238 if (GET_CODE (XEXP (link, 0)) == USE)
6240 rtx item = NULL_RTX;
6241 x = XEXP (XEXP (link, 0), 0);
6242 if (GET_MODE (link) == VOIDmode
6243 || GET_MODE (link) == BLKmode
6244 || (GET_MODE (link) != GET_MODE (x)
6245 && ((GET_MODE_CLASS (GET_MODE (link)) != MODE_INT
6246 && GET_MODE_CLASS (GET_MODE (link)) != MODE_PARTIAL_INT)
6247 || (GET_MODE_CLASS (GET_MODE (x)) != MODE_INT
6248 && GET_MODE_CLASS (GET_MODE (x)) != MODE_PARTIAL_INT))))
6249 /* Can't do anything for these, if the original type mode
6250 isn't known or can't be converted. */;
6251 else if (REG_P (x))
6253 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
6254 if (val && cselib_preserved_value_p (val))
6255 item = val->val_rtx;
6256 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
6257 || GET_MODE_CLASS (GET_MODE (x)) == MODE_PARTIAL_INT)
6259 enum machine_mode mode = GET_MODE (x);
6261 while ((mode = GET_MODE_WIDER_MODE (mode)) != VOIDmode
6262 && GET_MODE_BITSIZE (mode) <= BITS_PER_WORD)
6264 rtx reg = simplify_subreg (mode, x, GET_MODE (x), 0);
6266 if (reg == NULL_RTX || !REG_P (reg))
6267 continue;
6268 val = cselib_lookup (reg, mode, 0, VOIDmode);
6269 if (val && cselib_preserved_value_p (val))
6271 item = val->val_rtx;
6272 break;
6277 else if (MEM_P (x))
6279 rtx mem = x;
6280 cselib_val *val;
6282 if (!frame_pointer_needed)
6284 struct adjust_mem_data amd;
6285 amd.mem_mode = VOIDmode;
6286 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
6287 amd.side_effects = NULL;
6288 amd.store = true;
6289 mem = simplify_replace_fn_rtx (mem, NULL_RTX, adjust_mems,
6290 &amd);
6291 gcc_assert (amd.side_effects == NULL_RTX);
6293 val = cselib_lookup (mem, GET_MODE (mem), 0, VOIDmode);
6294 if (val && cselib_preserved_value_p (val))
6295 item = val->val_rtx;
6296 else if (GET_MODE_CLASS (GET_MODE (mem)) != MODE_INT
6297 && GET_MODE_CLASS (GET_MODE (mem)) != MODE_PARTIAL_INT)
6299 /* For non-integer stack argument see also if they weren't
6300 initialized by integers. */
6301 enum machine_mode imode = int_mode_for_mode (GET_MODE (mem));
6302 if (imode != GET_MODE (mem) && imode != BLKmode)
6304 val = cselib_lookup (adjust_address_nv (mem, imode, 0),
6305 imode, 0, VOIDmode);
6306 if (val && cselib_preserved_value_p (val))
6307 item = lowpart_subreg (GET_MODE (x), val->val_rtx,
6308 imode);
6312 if (item)
6314 rtx x2 = x;
6315 if (GET_MODE (item) != GET_MODE (link))
6316 item = lowpart_subreg (GET_MODE (link), item, GET_MODE (item));
6317 if (GET_MODE (x2) != GET_MODE (link))
6318 x2 = lowpart_subreg (GET_MODE (link), x2, GET_MODE (x2));
6319 item = gen_rtx_CONCAT (GET_MODE (link), x2, item);
6320 call_arguments
6321 = gen_rtx_EXPR_LIST (VOIDmode, item, call_arguments);
6323 if (t && t != void_list_node)
6325 tree argtype = TREE_VALUE (t);
6326 enum machine_mode mode = TYPE_MODE (argtype);
6327 rtx reg;
6328 if (pass_by_reference (&args_so_far_v, mode, argtype, true))
6330 argtype = build_pointer_type (argtype);
6331 mode = TYPE_MODE (argtype);
6333 reg = targetm.calls.function_arg (args_so_far, mode,
6334 argtype, true);
6335 if (TREE_CODE (argtype) == REFERENCE_TYPE
6336 && INTEGRAL_TYPE_P (TREE_TYPE (argtype))
6337 && reg
6338 && REG_P (reg)
6339 && GET_MODE (reg) == mode
6340 && (GET_MODE_CLASS (mode) == MODE_INT
6341 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
6342 && REG_P (x)
6343 && REGNO (x) == REGNO (reg)
6344 && GET_MODE (x) == mode
6345 && item)
6347 enum machine_mode indmode
6348 = TYPE_MODE (TREE_TYPE (argtype));
6349 rtx mem = gen_rtx_MEM (indmode, x);
6350 cselib_val *val = cselib_lookup (mem, indmode, 0, VOIDmode);
6351 if (val && cselib_preserved_value_p (val))
6353 item = gen_rtx_CONCAT (indmode, mem, val->val_rtx);
6354 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
6355 call_arguments);
6357 else
6359 struct elt_loc_list *l;
6360 tree initial;
6362 /* Try harder, when passing address of a constant
6363 pool integer it can be easily read back. */
6364 item = XEXP (item, 1);
6365 if (GET_CODE (item) == SUBREG)
6366 item = SUBREG_REG (item);
6367 gcc_assert (GET_CODE (item) == VALUE);
6368 val = CSELIB_VAL_PTR (item);
6369 for (l = val->locs; l; l = l->next)
6370 if (GET_CODE (l->loc) == SYMBOL_REF
6371 && TREE_CONSTANT_POOL_ADDRESS_P (l->loc)
6372 && SYMBOL_REF_DECL (l->loc)
6373 && DECL_INITIAL (SYMBOL_REF_DECL (l->loc)))
6375 initial = DECL_INITIAL (SYMBOL_REF_DECL (l->loc));
6376 if (tree_fits_shwi_p (initial))
6378 item = GEN_INT (tree_to_shwi (initial));
6379 item = gen_rtx_CONCAT (indmode, mem, item);
6380 call_arguments
6381 = gen_rtx_EXPR_LIST (VOIDmode, item,
6382 call_arguments);
6384 break;
6388 targetm.calls.function_arg_advance (args_so_far, mode,
6389 argtype, true);
6390 t = TREE_CHAIN (t);
6394 /* Add debug arguments. */
6395 if (fndecl
6396 && TREE_CODE (fndecl) == FUNCTION_DECL
6397 && DECL_HAS_DEBUG_ARGS_P (fndecl))
6399 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (fndecl);
6400 if (debug_args)
6402 unsigned int ix;
6403 tree param;
6404 for (ix = 0; vec_safe_iterate (*debug_args, ix, &param); ix += 2)
6406 rtx item;
6407 tree dtemp = (**debug_args)[ix + 1];
6408 enum machine_mode mode = DECL_MODE (dtemp);
6409 item = gen_rtx_DEBUG_PARAMETER_REF (mode, param);
6410 item = gen_rtx_CONCAT (mode, item, DECL_RTL_KNOWN_SET (dtemp));
6411 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
6412 call_arguments);
6417 /* Reverse call_arguments chain. */
6418 prev = NULL_RTX;
6419 for (cur = call_arguments; cur; cur = next)
6421 next = XEXP (cur, 1);
6422 XEXP (cur, 1) = prev;
6423 prev = cur;
6425 call_arguments = prev;
6427 x = get_call_rtx_from (insn);
6428 if (x)
6430 x = XEXP (XEXP (x, 0), 0);
6431 if (GET_CODE (x) == SYMBOL_REF)
6432 /* Don't record anything. */;
6433 else if (CONSTANT_P (x))
6435 x = gen_rtx_CONCAT (GET_MODE (x) == VOIDmode ? Pmode : GET_MODE (x),
6436 pc_rtx, x);
6437 call_arguments
6438 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6440 else
6442 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
6443 if (val && cselib_preserved_value_p (val))
6445 x = gen_rtx_CONCAT (GET_MODE (x), pc_rtx, val->val_rtx);
6446 call_arguments
6447 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6451 if (this_arg)
6453 enum machine_mode mode
6454 = TYPE_MODE (TREE_TYPE (OBJ_TYPE_REF_EXPR (obj_type_ref)));
6455 rtx clobbered = gen_rtx_MEM (mode, this_arg);
6456 HOST_WIDE_INT token
6457 = tree_to_shwi (OBJ_TYPE_REF_TOKEN (obj_type_ref));
6458 if (token)
6459 clobbered = plus_constant (mode, clobbered,
6460 token * GET_MODE_SIZE (mode));
6461 clobbered = gen_rtx_MEM (mode, clobbered);
6462 x = gen_rtx_CONCAT (mode, gen_rtx_CLOBBER (VOIDmode, pc_rtx), clobbered);
6463 call_arguments
6464 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6468 /* Callback for cselib_record_sets_hook, that records as micro
6469 operations uses and stores in an insn after cselib_record_sets has
6470 analyzed the sets in an insn, but before it modifies the stored
6471 values in the internal tables, unless cselib_record_sets doesn't
6472 call it directly (perhaps because we're not doing cselib in the
6473 first place, in which case sets and n_sets will be 0). */
6475 static void
6476 add_with_sets (rtx_insn *insn, struct cselib_set *sets, int n_sets)
6478 basic_block bb = BLOCK_FOR_INSN (insn);
6479 int n1, n2;
6480 struct count_use_info cui;
6481 micro_operation *mos;
6483 cselib_hook_called = true;
6485 cui.insn = insn;
6486 cui.bb = bb;
6487 cui.sets = sets;
6488 cui.n_sets = n_sets;
6490 n1 = VTI (bb)->mos.length ();
6491 cui.store_p = false;
6492 note_uses (&PATTERN (insn), add_uses_1, &cui);
6493 n2 = VTI (bb)->mos.length () - 1;
6494 mos = VTI (bb)->mos.address ();
6496 /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
6497 MO_VAL_LOC last. */
6498 while (n1 < n2)
6500 while (n1 < n2 && mos[n1].type == MO_USE)
6501 n1++;
6502 while (n1 < n2 && mos[n2].type != MO_USE)
6503 n2--;
6504 if (n1 < n2)
6506 micro_operation sw;
6508 sw = mos[n1];
6509 mos[n1] = mos[n2];
6510 mos[n2] = sw;
6514 n2 = VTI (bb)->mos.length () - 1;
6515 while (n1 < n2)
6517 while (n1 < n2 && mos[n1].type != MO_VAL_LOC)
6518 n1++;
6519 while (n1 < n2 && mos[n2].type == MO_VAL_LOC)
6520 n2--;
6521 if (n1 < n2)
6523 micro_operation sw;
6525 sw = mos[n1];
6526 mos[n1] = mos[n2];
6527 mos[n2] = sw;
6531 if (CALL_P (insn))
6533 micro_operation mo;
6535 mo.type = MO_CALL;
6536 mo.insn = insn;
6537 mo.u.loc = call_arguments;
6538 call_arguments = NULL_RTX;
6540 if (dump_file && (dump_flags & TDF_DETAILS))
6541 log_op_type (PATTERN (insn), bb, insn, mo.type, dump_file);
6542 VTI (bb)->mos.safe_push (mo);
6545 n1 = VTI (bb)->mos.length ();
6546 /* This will record NEXT_INSN (insn), such that we can
6547 insert notes before it without worrying about any
6548 notes that MO_USEs might emit after the insn. */
6549 cui.store_p = true;
6550 note_stores (PATTERN (insn), add_stores, &cui);
6551 n2 = VTI (bb)->mos.length () - 1;
6552 mos = VTI (bb)->mos.address ();
6554 /* Order the MO_VAL_USEs first (note_stores does nothing
6555 on DEBUG_INSNs, so there are no MO_VAL_LOCs from this
6556 insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET. */
6557 while (n1 < n2)
6559 while (n1 < n2 && mos[n1].type == MO_VAL_USE)
6560 n1++;
6561 while (n1 < n2 && mos[n2].type != MO_VAL_USE)
6562 n2--;
6563 if (n1 < n2)
6565 micro_operation sw;
6567 sw = mos[n1];
6568 mos[n1] = mos[n2];
6569 mos[n2] = sw;
6573 n2 = VTI (bb)->mos.length () - 1;
6574 while (n1 < n2)
6576 while (n1 < n2 && mos[n1].type == MO_CLOBBER)
6577 n1++;
6578 while (n1 < n2 && mos[n2].type != MO_CLOBBER)
6579 n2--;
6580 if (n1 < n2)
6582 micro_operation sw;
6584 sw = mos[n1];
6585 mos[n1] = mos[n2];
6586 mos[n2] = sw;
6591 static enum var_init_status
6592 find_src_status (dataflow_set *in, rtx src)
6594 tree decl = NULL_TREE;
6595 enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
6597 if (! flag_var_tracking_uninit)
6598 status = VAR_INIT_STATUS_INITIALIZED;
6600 if (src && REG_P (src))
6601 decl = var_debug_decl (REG_EXPR (src));
6602 else if (src && MEM_P (src))
6603 decl = var_debug_decl (MEM_EXPR (src));
6605 if (src && decl)
6606 status = get_init_value (in, src, dv_from_decl (decl));
6608 return status;
6611 /* SRC is the source of an assignment. Use SET to try to find what
6612 was ultimately assigned to SRC. Return that value if known,
6613 otherwise return SRC itself. */
6615 static rtx
6616 find_src_set_src (dataflow_set *set, rtx src)
6618 tree decl = NULL_TREE; /* The variable being copied around. */
6619 rtx set_src = NULL_RTX; /* The value for "decl" stored in "src". */
6620 variable var;
6621 location_chain nextp;
6622 int i;
6623 bool found;
6625 if (src && REG_P (src))
6626 decl = var_debug_decl (REG_EXPR (src));
6627 else if (src && MEM_P (src))
6628 decl = var_debug_decl (MEM_EXPR (src));
6630 if (src && decl)
6632 decl_or_value dv = dv_from_decl (decl);
6634 var = shared_hash_find (set->vars, dv);
6635 if (var)
6637 found = false;
6638 for (i = 0; i < var->n_var_parts && !found; i++)
6639 for (nextp = var->var_part[i].loc_chain; nextp && !found;
6640 nextp = nextp->next)
6641 if (rtx_equal_p (nextp->loc, src))
6643 set_src = nextp->set_src;
6644 found = true;
6650 return set_src;
6653 /* Compute the changes of variable locations in the basic block BB. */
6655 static bool
6656 compute_bb_dataflow (basic_block bb)
6658 unsigned int i;
6659 micro_operation *mo;
6660 bool changed;
6661 dataflow_set old_out;
6662 dataflow_set *in = &VTI (bb)->in;
6663 dataflow_set *out = &VTI (bb)->out;
6665 dataflow_set_init (&old_out);
6666 dataflow_set_copy (&old_out, out);
6667 dataflow_set_copy (out, in);
6669 if (MAY_HAVE_DEBUG_INSNS)
6670 local_get_addr_cache = new hash_map<rtx, rtx>;
6672 FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo)
6674 rtx_insn *insn = mo->insn;
6676 switch (mo->type)
6678 case MO_CALL:
6679 dataflow_set_clear_at_call (out);
6680 break;
6682 case MO_USE:
6684 rtx loc = mo->u.loc;
6686 if (REG_P (loc))
6687 var_reg_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6688 else if (MEM_P (loc))
6689 var_mem_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6691 break;
6693 case MO_VAL_LOC:
6695 rtx loc = mo->u.loc;
6696 rtx val, vloc;
6697 tree var;
6699 if (GET_CODE (loc) == CONCAT)
6701 val = XEXP (loc, 0);
6702 vloc = XEXP (loc, 1);
6704 else
6706 val = NULL_RTX;
6707 vloc = loc;
6710 var = PAT_VAR_LOCATION_DECL (vloc);
6712 clobber_variable_part (out, NULL_RTX,
6713 dv_from_decl (var), 0, NULL_RTX);
6714 if (val)
6716 if (VAL_NEEDS_RESOLUTION (loc))
6717 val_resolve (out, val, PAT_VAR_LOCATION_LOC (vloc), insn);
6718 set_variable_part (out, val, dv_from_decl (var), 0,
6719 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6720 INSERT);
6722 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
6723 set_variable_part (out, PAT_VAR_LOCATION_LOC (vloc),
6724 dv_from_decl (var), 0,
6725 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6726 INSERT);
6728 break;
6730 case MO_VAL_USE:
6732 rtx loc = mo->u.loc;
6733 rtx val, vloc, uloc;
6735 vloc = uloc = XEXP (loc, 1);
6736 val = XEXP (loc, 0);
6738 if (GET_CODE (val) == CONCAT)
6740 uloc = XEXP (val, 1);
6741 val = XEXP (val, 0);
6744 if (VAL_NEEDS_RESOLUTION (loc))
6745 val_resolve (out, val, vloc, insn);
6746 else
6747 val_store (out, val, uloc, insn, false);
6749 if (VAL_HOLDS_TRACK_EXPR (loc))
6751 if (GET_CODE (uloc) == REG)
6752 var_reg_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6753 NULL);
6754 else if (GET_CODE (uloc) == MEM)
6755 var_mem_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6756 NULL);
6759 break;
6761 case MO_VAL_SET:
6763 rtx loc = mo->u.loc;
6764 rtx val, vloc, uloc;
6765 rtx dstv, srcv;
6767 vloc = loc;
6768 uloc = XEXP (vloc, 1);
6769 val = XEXP (vloc, 0);
6770 vloc = uloc;
6772 if (GET_CODE (uloc) == SET)
6774 dstv = SET_DEST (uloc);
6775 srcv = SET_SRC (uloc);
6777 else
6779 dstv = uloc;
6780 srcv = NULL;
6783 if (GET_CODE (val) == CONCAT)
6785 dstv = vloc = XEXP (val, 1);
6786 val = XEXP (val, 0);
6789 if (GET_CODE (vloc) == SET)
6791 srcv = SET_SRC (vloc);
6793 gcc_assert (val != srcv);
6794 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
6796 dstv = vloc = SET_DEST (vloc);
6798 if (VAL_NEEDS_RESOLUTION (loc))
6799 val_resolve (out, val, srcv, insn);
6801 else if (VAL_NEEDS_RESOLUTION (loc))
6803 gcc_assert (GET_CODE (uloc) == SET
6804 && GET_CODE (SET_SRC (uloc)) == REG);
6805 val_resolve (out, val, SET_SRC (uloc), insn);
6808 if (VAL_HOLDS_TRACK_EXPR (loc))
6810 if (VAL_EXPR_IS_CLOBBERED (loc))
6812 if (REG_P (uloc))
6813 var_reg_delete (out, uloc, true);
6814 else if (MEM_P (uloc))
6816 gcc_assert (MEM_P (dstv));
6817 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
6818 var_mem_delete (out, dstv, true);
6821 else
6823 bool copied_p = VAL_EXPR_IS_COPIED (loc);
6824 rtx src = NULL, dst = uloc;
6825 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
6827 if (GET_CODE (uloc) == SET)
6829 src = SET_SRC (uloc);
6830 dst = SET_DEST (uloc);
6833 if (copied_p)
6835 if (flag_var_tracking_uninit)
6837 status = find_src_status (in, src);
6839 if (status == VAR_INIT_STATUS_UNKNOWN)
6840 status = find_src_status (out, src);
6843 src = find_src_set_src (in, src);
6846 if (REG_P (dst))
6847 var_reg_delete_and_set (out, dst, !copied_p,
6848 status, srcv);
6849 else if (MEM_P (dst))
6851 gcc_assert (MEM_P (dstv));
6852 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
6853 var_mem_delete_and_set (out, dstv, !copied_p,
6854 status, srcv);
6858 else if (REG_P (uloc))
6859 var_regno_delete (out, REGNO (uloc));
6860 else if (MEM_P (uloc))
6862 gcc_checking_assert (GET_CODE (vloc) == MEM);
6863 gcc_checking_assert (dstv == vloc);
6864 if (dstv != vloc)
6865 clobber_overlapping_mems (out, vloc);
6868 val_store (out, val, dstv, insn, true);
6870 break;
6872 case MO_SET:
6874 rtx loc = mo->u.loc;
6875 rtx set_src = NULL;
6877 if (GET_CODE (loc) == SET)
6879 set_src = SET_SRC (loc);
6880 loc = SET_DEST (loc);
6883 if (REG_P (loc))
6884 var_reg_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6885 set_src);
6886 else if (MEM_P (loc))
6887 var_mem_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6888 set_src);
6890 break;
6892 case MO_COPY:
6894 rtx loc = mo->u.loc;
6895 enum var_init_status src_status;
6896 rtx set_src = NULL;
6898 if (GET_CODE (loc) == SET)
6900 set_src = SET_SRC (loc);
6901 loc = SET_DEST (loc);
6904 if (! flag_var_tracking_uninit)
6905 src_status = VAR_INIT_STATUS_INITIALIZED;
6906 else
6908 src_status = find_src_status (in, set_src);
6910 if (src_status == VAR_INIT_STATUS_UNKNOWN)
6911 src_status = find_src_status (out, set_src);
6914 set_src = find_src_set_src (in, set_src);
6916 if (REG_P (loc))
6917 var_reg_delete_and_set (out, loc, false, src_status, set_src);
6918 else if (MEM_P (loc))
6919 var_mem_delete_and_set (out, loc, false, src_status, set_src);
6921 break;
6923 case MO_USE_NO_VAR:
6925 rtx loc = mo->u.loc;
6927 if (REG_P (loc))
6928 var_reg_delete (out, loc, false);
6929 else if (MEM_P (loc))
6930 var_mem_delete (out, loc, false);
6932 break;
6934 case MO_CLOBBER:
6936 rtx loc = mo->u.loc;
6938 if (REG_P (loc))
6939 var_reg_delete (out, loc, true);
6940 else if (MEM_P (loc))
6941 var_mem_delete (out, loc, true);
6943 break;
6945 case MO_ADJUST:
6946 out->stack_adjust += mo->u.adjust;
6947 break;
6951 if (MAY_HAVE_DEBUG_INSNS)
6953 delete local_get_addr_cache;
6954 local_get_addr_cache = NULL;
6956 dataflow_set_equiv_regs (out);
6957 shared_hash_htab (out->vars)
6958 ->traverse <dataflow_set *, canonicalize_values_mark> (out);
6959 shared_hash_htab (out->vars)
6960 ->traverse <dataflow_set *, canonicalize_values_star> (out);
6961 #if ENABLE_CHECKING
6962 shared_hash_htab (out->vars)
6963 ->traverse <dataflow_set *, canonicalize_loc_order_check> (out);
6964 #endif
6966 changed = dataflow_set_different (&old_out, out);
6967 dataflow_set_destroy (&old_out);
6968 return changed;
6971 /* Find the locations of variables in the whole function. */
6973 static bool
6974 vt_find_locations (void)
6976 fibheap_t worklist, pending, fibheap_swap;
6977 sbitmap visited, in_worklist, in_pending, sbitmap_swap;
6978 basic_block bb;
6979 edge e;
6980 int *bb_order;
6981 int *rc_order;
6982 int i;
6983 int htabsz = 0;
6984 int htabmax = PARAM_VALUE (PARAM_MAX_VARTRACK_SIZE);
6985 bool success = true;
6987 timevar_push (TV_VAR_TRACKING_DATAFLOW);
6988 /* Compute reverse completion order of depth first search of the CFG
6989 so that the data-flow runs faster. */
6990 rc_order = XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
6991 bb_order = XNEWVEC (int, last_basic_block_for_fn (cfun));
6992 pre_and_rev_post_order_compute (NULL, rc_order, false);
6993 for (i = 0; i < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; i++)
6994 bb_order[rc_order[i]] = i;
6995 free (rc_order);
6997 worklist = fibheap_new ();
6998 pending = fibheap_new ();
6999 visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
7000 in_worklist = sbitmap_alloc (last_basic_block_for_fn (cfun));
7001 in_pending = sbitmap_alloc (last_basic_block_for_fn (cfun));
7002 bitmap_clear (in_worklist);
7004 FOR_EACH_BB_FN (bb, cfun)
7005 fibheap_insert (pending, bb_order[bb->index], bb);
7006 bitmap_ones (in_pending);
7008 while (success && !fibheap_empty (pending))
7010 fibheap_swap = pending;
7011 pending = worklist;
7012 worklist = fibheap_swap;
7013 sbitmap_swap = in_pending;
7014 in_pending = in_worklist;
7015 in_worklist = sbitmap_swap;
7017 bitmap_clear (visited);
7019 while (!fibheap_empty (worklist))
7021 bb = (basic_block) fibheap_extract_min (worklist);
7022 bitmap_clear_bit (in_worklist, bb->index);
7023 gcc_assert (!bitmap_bit_p (visited, bb->index));
7024 if (!bitmap_bit_p (visited, bb->index))
7026 bool changed;
7027 edge_iterator ei;
7028 int oldinsz, oldoutsz;
7030 bitmap_set_bit (visited, bb->index);
7032 if (VTI (bb)->in.vars)
7034 htabsz
7035 -= shared_hash_htab (VTI (bb)->in.vars)->size ()
7036 + shared_hash_htab (VTI (bb)->out.vars)->size ();
7037 oldinsz = shared_hash_htab (VTI (bb)->in.vars)->elements ();
7038 oldoutsz
7039 = shared_hash_htab (VTI (bb)->out.vars)->elements ();
7041 else
7042 oldinsz = oldoutsz = 0;
7044 if (MAY_HAVE_DEBUG_INSNS)
7046 dataflow_set *in = &VTI (bb)->in, *first_out = NULL;
7047 bool first = true, adjust = false;
7049 /* Calculate the IN set as the intersection of
7050 predecessor OUT sets. */
7052 dataflow_set_clear (in);
7053 dst_can_be_shared = true;
7055 FOR_EACH_EDGE (e, ei, bb->preds)
7056 if (!VTI (e->src)->flooded)
7057 gcc_assert (bb_order[bb->index]
7058 <= bb_order[e->src->index]);
7059 else if (first)
7061 dataflow_set_copy (in, &VTI (e->src)->out);
7062 first_out = &VTI (e->src)->out;
7063 first = false;
7065 else
7067 dataflow_set_merge (in, &VTI (e->src)->out);
7068 adjust = true;
7071 if (adjust)
7073 dataflow_post_merge_adjust (in, &VTI (bb)->permp);
7074 #if ENABLE_CHECKING
7075 /* Merge and merge_adjust should keep entries in
7076 canonical order. */
7077 shared_hash_htab (in->vars)
7078 ->traverse <dataflow_set *,
7079 canonicalize_loc_order_check> (in);
7080 #endif
7081 if (dst_can_be_shared)
7083 shared_hash_destroy (in->vars);
7084 in->vars = shared_hash_copy (first_out->vars);
7088 VTI (bb)->flooded = true;
7090 else
7092 /* Calculate the IN set as union of predecessor OUT sets. */
7093 dataflow_set_clear (&VTI (bb)->in);
7094 FOR_EACH_EDGE (e, ei, bb->preds)
7095 dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out);
7098 changed = compute_bb_dataflow (bb);
7099 htabsz += shared_hash_htab (VTI (bb)->in.vars)->size ()
7100 + shared_hash_htab (VTI (bb)->out.vars)->size ();
7102 if (htabmax && htabsz > htabmax)
7104 if (MAY_HAVE_DEBUG_INSNS)
7105 inform (DECL_SOURCE_LOCATION (cfun->decl),
7106 "variable tracking size limit exceeded with "
7107 "-fvar-tracking-assignments, retrying without");
7108 else
7109 inform (DECL_SOURCE_LOCATION (cfun->decl),
7110 "variable tracking size limit exceeded");
7111 success = false;
7112 break;
7115 if (changed)
7117 FOR_EACH_EDGE (e, ei, bb->succs)
7119 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7120 continue;
7122 if (bitmap_bit_p (visited, e->dest->index))
7124 if (!bitmap_bit_p (in_pending, e->dest->index))
7126 /* Send E->DEST to next round. */
7127 bitmap_set_bit (in_pending, e->dest->index);
7128 fibheap_insert (pending,
7129 bb_order[e->dest->index],
7130 e->dest);
7133 else if (!bitmap_bit_p (in_worklist, e->dest->index))
7135 /* Add E->DEST to current round. */
7136 bitmap_set_bit (in_worklist, e->dest->index);
7137 fibheap_insert (worklist, bb_order[e->dest->index],
7138 e->dest);
7143 if (dump_file)
7144 fprintf (dump_file,
7145 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
7146 bb->index,
7147 (int)shared_hash_htab (VTI (bb)->in.vars)->size (),
7148 oldinsz,
7149 (int)shared_hash_htab (VTI (bb)->out.vars)->size (),
7150 oldoutsz,
7151 (int)worklist->nodes, (int)pending->nodes, htabsz);
7153 if (dump_file && (dump_flags & TDF_DETAILS))
7155 fprintf (dump_file, "BB %i IN:\n", bb->index);
7156 dump_dataflow_set (&VTI (bb)->in);
7157 fprintf (dump_file, "BB %i OUT:\n", bb->index);
7158 dump_dataflow_set (&VTI (bb)->out);
7164 if (success && MAY_HAVE_DEBUG_INSNS)
7165 FOR_EACH_BB_FN (bb, cfun)
7166 gcc_assert (VTI (bb)->flooded);
7168 free (bb_order);
7169 fibheap_delete (worklist);
7170 fibheap_delete (pending);
7171 sbitmap_free (visited);
7172 sbitmap_free (in_worklist);
7173 sbitmap_free (in_pending);
7175 timevar_pop (TV_VAR_TRACKING_DATAFLOW);
7176 return success;
7179 /* Print the content of the LIST to dump file. */
7181 static void
7182 dump_attrs_list (attrs list)
7184 for (; list; list = list->next)
7186 if (dv_is_decl_p (list->dv))
7187 print_mem_expr (dump_file, dv_as_decl (list->dv));
7188 else
7189 print_rtl_single (dump_file, dv_as_value (list->dv));
7190 fprintf (dump_file, "+" HOST_WIDE_INT_PRINT_DEC, list->offset);
7192 fprintf (dump_file, "\n");
7195 /* Print the information about variable *SLOT to dump file. */
7198 dump_var_tracking_slot (variable_def **slot, void *data ATTRIBUTE_UNUSED)
7200 variable var = *slot;
7202 dump_var (var);
7204 /* Continue traversing the hash table. */
7205 return 1;
7208 /* Print the information about variable VAR to dump file. */
7210 static void
7211 dump_var (variable var)
7213 int i;
7214 location_chain node;
7216 if (dv_is_decl_p (var->dv))
7218 const_tree decl = dv_as_decl (var->dv);
7220 if (DECL_NAME (decl))
7222 fprintf (dump_file, " name: %s",
7223 IDENTIFIER_POINTER (DECL_NAME (decl)));
7224 if (dump_flags & TDF_UID)
7225 fprintf (dump_file, "D.%u", DECL_UID (decl));
7227 else if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
7228 fprintf (dump_file, " name: D#%u", DEBUG_TEMP_UID (decl));
7229 else
7230 fprintf (dump_file, " name: D.%u", DECL_UID (decl));
7231 fprintf (dump_file, "\n");
7233 else
7235 fputc (' ', dump_file);
7236 print_rtl_single (dump_file, dv_as_value (var->dv));
7239 for (i = 0; i < var->n_var_parts; i++)
7241 fprintf (dump_file, " offset %ld\n",
7242 (long)(var->onepart ? 0 : VAR_PART_OFFSET (var, i)));
7243 for (node = var->var_part[i].loc_chain; node; node = node->next)
7245 fprintf (dump_file, " ");
7246 if (node->init == VAR_INIT_STATUS_UNINITIALIZED)
7247 fprintf (dump_file, "[uninit]");
7248 print_rtl_single (dump_file, node->loc);
7253 /* Print the information about variables from hash table VARS to dump file. */
7255 static void
7256 dump_vars (variable_table_type *vars)
7258 if (vars->elements () > 0)
7260 fprintf (dump_file, "Variables:\n");
7261 vars->traverse <void *, dump_var_tracking_slot> (NULL);
7265 /* Print the dataflow set SET to dump file. */
7267 static void
7268 dump_dataflow_set (dataflow_set *set)
7270 int i;
7272 fprintf (dump_file, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC "\n",
7273 set->stack_adjust);
7274 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7276 if (set->regs[i])
7278 fprintf (dump_file, "Reg %d:", i);
7279 dump_attrs_list (set->regs[i]);
7282 dump_vars (shared_hash_htab (set->vars));
7283 fprintf (dump_file, "\n");
7286 /* Print the IN and OUT sets for each basic block to dump file. */
7288 static void
7289 dump_dataflow_sets (void)
7291 basic_block bb;
7293 FOR_EACH_BB_FN (bb, cfun)
7295 fprintf (dump_file, "\nBasic block %d:\n", bb->index);
7296 fprintf (dump_file, "IN:\n");
7297 dump_dataflow_set (&VTI (bb)->in);
7298 fprintf (dump_file, "OUT:\n");
7299 dump_dataflow_set (&VTI (bb)->out);
7303 /* Return the variable for DV in dropped_values, inserting one if
7304 requested with INSERT. */
7306 static inline variable
7307 variable_from_dropped (decl_or_value dv, enum insert_option insert)
7309 variable_def **slot;
7310 variable empty_var;
7311 onepart_enum_t onepart;
7313 slot = dropped_values->find_slot_with_hash (dv, dv_htab_hash (dv), insert);
7315 if (!slot)
7316 return NULL;
7318 if (*slot)
7319 return *slot;
7321 gcc_checking_assert (insert == INSERT);
7323 onepart = dv_onepart_p (dv);
7325 gcc_checking_assert (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR);
7327 empty_var = (variable) pool_alloc (onepart_pool (onepart));
7328 empty_var->dv = dv;
7329 empty_var->refcount = 1;
7330 empty_var->n_var_parts = 0;
7331 empty_var->onepart = onepart;
7332 empty_var->in_changed_variables = false;
7333 empty_var->var_part[0].loc_chain = NULL;
7334 empty_var->var_part[0].cur_loc = NULL;
7335 VAR_LOC_1PAUX (empty_var) = NULL;
7336 set_dv_changed (dv, true);
7338 *slot = empty_var;
7340 return empty_var;
7343 /* Recover the one-part aux from dropped_values. */
7345 static struct onepart_aux *
7346 recover_dropped_1paux (variable var)
7348 variable dvar;
7350 gcc_checking_assert (var->onepart);
7352 if (VAR_LOC_1PAUX (var))
7353 return VAR_LOC_1PAUX (var);
7355 if (var->onepart == ONEPART_VDECL)
7356 return NULL;
7358 dvar = variable_from_dropped (var->dv, NO_INSERT);
7360 if (!dvar)
7361 return NULL;
7363 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (dvar);
7364 VAR_LOC_1PAUX (dvar) = NULL;
7366 return VAR_LOC_1PAUX (var);
7369 /* Add variable VAR to the hash table of changed variables and
7370 if it has no locations delete it from SET's hash table. */
7372 static void
7373 variable_was_changed (variable var, dataflow_set *set)
7375 hashval_t hash = dv_htab_hash (var->dv);
7377 if (emit_notes)
7379 variable_def **slot;
7381 /* Remember this decl or VALUE has been added to changed_variables. */
7382 set_dv_changed (var->dv, true);
7384 slot = changed_variables->find_slot_with_hash (var->dv, hash, INSERT);
7386 if (*slot)
7388 variable old_var = *slot;
7389 gcc_assert (old_var->in_changed_variables);
7390 old_var->in_changed_variables = false;
7391 if (var != old_var && var->onepart)
7393 /* Restore the auxiliary info from an empty variable
7394 previously created for changed_variables, so it is
7395 not lost. */
7396 gcc_checking_assert (!VAR_LOC_1PAUX (var));
7397 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (old_var);
7398 VAR_LOC_1PAUX (old_var) = NULL;
7400 variable_htab_free (*slot);
7403 if (set && var->n_var_parts == 0)
7405 onepart_enum_t onepart = var->onepart;
7406 variable empty_var = NULL;
7407 variable_def **dslot = NULL;
7409 if (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR)
7411 dslot = dropped_values->find_slot_with_hash (var->dv,
7412 dv_htab_hash (var->dv),
7413 INSERT);
7414 empty_var = *dslot;
7416 if (empty_var)
7418 gcc_checking_assert (!empty_var->in_changed_variables);
7419 if (!VAR_LOC_1PAUX (var))
7421 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (empty_var);
7422 VAR_LOC_1PAUX (empty_var) = NULL;
7424 else
7425 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
7429 if (!empty_var)
7431 empty_var = (variable) pool_alloc (onepart_pool (onepart));
7432 empty_var->dv = var->dv;
7433 empty_var->refcount = 1;
7434 empty_var->n_var_parts = 0;
7435 empty_var->onepart = onepart;
7436 if (dslot)
7438 empty_var->refcount++;
7439 *dslot = empty_var;
7442 else
7443 empty_var->refcount++;
7444 empty_var->in_changed_variables = true;
7445 *slot = empty_var;
7446 if (onepart)
7448 empty_var->var_part[0].loc_chain = NULL;
7449 empty_var->var_part[0].cur_loc = NULL;
7450 VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (var);
7451 VAR_LOC_1PAUX (var) = NULL;
7453 goto drop_var;
7455 else
7457 if (var->onepart && !VAR_LOC_1PAUX (var))
7458 recover_dropped_1paux (var);
7459 var->refcount++;
7460 var->in_changed_variables = true;
7461 *slot = var;
7464 else
7466 gcc_assert (set);
7467 if (var->n_var_parts == 0)
7469 variable_def **slot;
7471 drop_var:
7472 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
7473 if (slot)
7475 if (shared_hash_shared (set->vars))
7476 slot = shared_hash_find_slot_unshare (&set->vars, var->dv,
7477 NO_INSERT);
7478 shared_hash_htab (set->vars)->clear_slot (slot);
7484 /* Look for the index in VAR->var_part corresponding to OFFSET.
7485 Return -1 if not found. If INSERTION_POINT is non-NULL, the
7486 referenced int will be set to the index that the part has or should
7487 have, if it should be inserted. */
7489 static inline int
7490 find_variable_location_part (variable var, HOST_WIDE_INT offset,
7491 int *insertion_point)
7493 int pos, low, high;
7495 if (var->onepart)
7497 if (offset != 0)
7498 return -1;
7500 if (insertion_point)
7501 *insertion_point = 0;
7503 return var->n_var_parts - 1;
7506 /* Find the location part. */
7507 low = 0;
7508 high = var->n_var_parts;
7509 while (low != high)
7511 pos = (low + high) / 2;
7512 if (VAR_PART_OFFSET (var, pos) < offset)
7513 low = pos + 1;
7514 else
7515 high = pos;
7517 pos = low;
7519 if (insertion_point)
7520 *insertion_point = pos;
7522 if (pos < var->n_var_parts && VAR_PART_OFFSET (var, pos) == offset)
7523 return pos;
7525 return -1;
7528 static variable_def **
7529 set_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
7530 decl_or_value dv, HOST_WIDE_INT offset,
7531 enum var_init_status initialized, rtx set_src)
7533 int pos;
7534 location_chain node, next;
7535 location_chain *nextp;
7536 variable var;
7537 onepart_enum_t onepart;
7539 var = *slot;
7541 if (var)
7542 onepart = var->onepart;
7543 else
7544 onepart = dv_onepart_p (dv);
7546 gcc_checking_assert (offset == 0 || !onepart);
7547 gcc_checking_assert (loc != dv_as_opaque (dv));
7549 if (! flag_var_tracking_uninit)
7550 initialized = VAR_INIT_STATUS_INITIALIZED;
7552 if (!var)
7554 /* Create new variable information. */
7555 var = (variable) pool_alloc (onepart_pool (onepart));
7556 var->dv = dv;
7557 var->refcount = 1;
7558 var->n_var_parts = 1;
7559 var->onepart = onepart;
7560 var->in_changed_variables = false;
7561 if (var->onepart)
7562 VAR_LOC_1PAUX (var) = NULL;
7563 else
7564 VAR_PART_OFFSET (var, 0) = offset;
7565 var->var_part[0].loc_chain = NULL;
7566 var->var_part[0].cur_loc = NULL;
7567 *slot = var;
7568 pos = 0;
7569 nextp = &var->var_part[0].loc_chain;
7571 else if (onepart)
7573 int r = -1, c = 0;
7575 gcc_assert (dv_as_opaque (var->dv) == dv_as_opaque (dv));
7577 pos = 0;
7579 if (GET_CODE (loc) == VALUE)
7581 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7582 nextp = &node->next)
7583 if (GET_CODE (node->loc) == VALUE)
7585 if (node->loc == loc)
7587 r = 0;
7588 break;
7590 if (canon_value_cmp (node->loc, loc))
7591 c++;
7592 else
7594 r = 1;
7595 break;
7598 else if (REG_P (node->loc) || MEM_P (node->loc))
7599 c++;
7600 else
7602 r = 1;
7603 break;
7606 else if (REG_P (loc))
7608 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7609 nextp = &node->next)
7610 if (REG_P (node->loc))
7612 if (REGNO (node->loc) < REGNO (loc))
7613 c++;
7614 else
7616 if (REGNO (node->loc) == REGNO (loc))
7617 r = 0;
7618 else
7619 r = 1;
7620 break;
7623 else
7625 r = 1;
7626 break;
7629 else if (MEM_P (loc))
7631 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7632 nextp = &node->next)
7633 if (REG_P (node->loc))
7634 c++;
7635 else if (MEM_P (node->loc))
7637 if ((r = loc_cmp (XEXP (node->loc, 0), XEXP (loc, 0))) >= 0)
7638 break;
7639 else
7640 c++;
7642 else
7644 r = 1;
7645 break;
7648 else
7649 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7650 nextp = &node->next)
7651 if ((r = loc_cmp (node->loc, loc)) >= 0)
7652 break;
7653 else
7654 c++;
7656 if (r == 0)
7657 return slot;
7659 if (shared_var_p (var, set->vars))
7661 slot = unshare_variable (set, slot, var, initialized);
7662 var = *slot;
7663 for (nextp = &var->var_part[0].loc_chain; c;
7664 nextp = &(*nextp)->next)
7665 c--;
7666 gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc);
7669 else
7671 int inspos = 0;
7673 gcc_assert (dv_as_decl (var->dv) == dv_as_decl (dv));
7675 pos = find_variable_location_part (var, offset, &inspos);
7677 if (pos >= 0)
7679 node = var->var_part[pos].loc_chain;
7681 if (node
7682 && ((REG_P (node->loc) && REG_P (loc)
7683 && REGNO (node->loc) == REGNO (loc))
7684 || rtx_equal_p (node->loc, loc)))
7686 /* LOC is in the beginning of the chain so we have nothing
7687 to do. */
7688 if (node->init < initialized)
7689 node->init = initialized;
7690 if (set_src != NULL)
7691 node->set_src = set_src;
7693 return slot;
7695 else
7697 /* We have to make a copy of a shared variable. */
7698 if (shared_var_p (var, set->vars))
7700 slot = unshare_variable (set, slot, var, initialized);
7701 var = *slot;
7705 else
7707 /* We have not found the location part, new one will be created. */
7709 /* We have to make a copy of the shared variable. */
7710 if (shared_var_p (var, set->vars))
7712 slot = unshare_variable (set, slot, var, initialized);
7713 var = *slot;
7716 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
7717 thus there are at most MAX_VAR_PARTS different offsets. */
7718 gcc_assert (var->n_var_parts < MAX_VAR_PARTS
7719 && (!var->n_var_parts || !onepart));
7721 /* We have to move the elements of array starting at index
7722 inspos to the next position. */
7723 for (pos = var->n_var_parts; pos > inspos; pos--)
7724 var->var_part[pos] = var->var_part[pos - 1];
7726 var->n_var_parts++;
7727 gcc_checking_assert (!onepart);
7728 VAR_PART_OFFSET (var, pos) = offset;
7729 var->var_part[pos].loc_chain = NULL;
7730 var->var_part[pos].cur_loc = NULL;
7733 /* Delete the location from the list. */
7734 nextp = &var->var_part[pos].loc_chain;
7735 for (node = var->var_part[pos].loc_chain; node; node = next)
7737 next = node->next;
7738 if ((REG_P (node->loc) && REG_P (loc)
7739 && REGNO (node->loc) == REGNO (loc))
7740 || rtx_equal_p (node->loc, loc))
7742 /* Save these values, to assign to the new node, before
7743 deleting this one. */
7744 if (node->init > initialized)
7745 initialized = node->init;
7746 if (node->set_src != NULL && set_src == NULL)
7747 set_src = node->set_src;
7748 if (var->var_part[pos].cur_loc == node->loc)
7749 var->var_part[pos].cur_loc = NULL;
7750 pool_free (loc_chain_pool, node);
7751 *nextp = next;
7752 break;
7754 else
7755 nextp = &node->next;
7758 nextp = &var->var_part[pos].loc_chain;
7761 /* Add the location to the beginning. */
7762 node = (location_chain) pool_alloc (loc_chain_pool);
7763 node->loc = loc;
7764 node->init = initialized;
7765 node->set_src = set_src;
7766 node->next = *nextp;
7767 *nextp = node;
7769 /* If no location was emitted do so. */
7770 if (var->var_part[pos].cur_loc == NULL)
7771 variable_was_changed (var, set);
7773 return slot;
7776 /* Set the part of variable's location in the dataflow set SET. The
7777 variable part is specified by variable's declaration in DV and
7778 offset OFFSET and the part's location by LOC. IOPT should be
7779 NO_INSERT if the variable is known to be in SET already and the
7780 variable hash table must not be resized, and INSERT otherwise. */
7782 static void
7783 set_variable_part (dataflow_set *set, rtx loc,
7784 decl_or_value dv, HOST_WIDE_INT offset,
7785 enum var_init_status initialized, rtx set_src,
7786 enum insert_option iopt)
7788 variable_def **slot;
7790 if (iopt == NO_INSERT)
7791 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7792 else
7794 slot = shared_hash_find_slot (set->vars, dv);
7795 if (!slot)
7796 slot = shared_hash_find_slot_unshare (&set->vars, dv, iopt);
7798 set_slot_part (set, loc, slot, dv, offset, initialized, set_src);
7801 /* Remove all recorded register locations for the given variable part
7802 from dataflow set SET, except for those that are identical to loc.
7803 The variable part is specified by variable's declaration or value
7804 DV and offset OFFSET. */
7806 static variable_def **
7807 clobber_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
7808 HOST_WIDE_INT offset, rtx set_src)
7810 variable var = *slot;
7811 int pos = find_variable_location_part (var, offset, NULL);
7813 if (pos >= 0)
7815 location_chain node, next;
7817 /* Remove the register locations from the dataflow set. */
7818 next = var->var_part[pos].loc_chain;
7819 for (node = next; node; node = next)
7821 next = node->next;
7822 if (node->loc != loc
7823 && (!flag_var_tracking_uninit
7824 || !set_src
7825 || MEM_P (set_src)
7826 || !rtx_equal_p (set_src, node->set_src)))
7828 if (REG_P (node->loc))
7830 attrs anode, anext;
7831 attrs *anextp;
7833 /* Remove the variable part from the register's
7834 list, but preserve any other variable parts
7835 that might be regarded as live in that same
7836 register. */
7837 anextp = &set->regs[REGNO (node->loc)];
7838 for (anode = *anextp; anode; anode = anext)
7840 anext = anode->next;
7841 if (dv_as_opaque (anode->dv) == dv_as_opaque (var->dv)
7842 && anode->offset == offset)
7844 pool_free (attrs_pool, anode);
7845 *anextp = anext;
7847 else
7848 anextp = &anode->next;
7852 slot = delete_slot_part (set, node->loc, slot, offset);
7857 return slot;
7860 /* Remove all recorded register locations for the given variable part
7861 from dataflow set SET, except for those that are identical to loc.
7862 The variable part is specified by variable's declaration or value
7863 DV and offset OFFSET. */
7865 static void
7866 clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
7867 HOST_WIDE_INT offset, rtx set_src)
7869 variable_def **slot;
7871 if (!dv_as_opaque (dv)
7872 || (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv))))
7873 return;
7875 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7876 if (!slot)
7877 return;
7879 clobber_slot_part (set, loc, slot, offset, set_src);
7882 /* Delete the part of variable's location from dataflow set SET. The
7883 variable part is specified by its SET->vars slot SLOT and offset
7884 OFFSET and the part's location by LOC. */
7886 static variable_def **
7887 delete_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
7888 HOST_WIDE_INT offset)
7890 variable var = *slot;
7891 int pos = find_variable_location_part (var, offset, NULL);
7893 if (pos >= 0)
7895 location_chain node, next;
7896 location_chain *nextp;
7897 bool changed;
7898 rtx cur_loc;
7900 if (shared_var_p (var, set->vars))
7902 /* If the variable contains the location part we have to
7903 make a copy of the variable. */
7904 for (node = var->var_part[pos].loc_chain; node;
7905 node = node->next)
7907 if ((REG_P (node->loc) && REG_P (loc)
7908 && REGNO (node->loc) == REGNO (loc))
7909 || rtx_equal_p (node->loc, loc))
7911 slot = unshare_variable (set, slot, var,
7912 VAR_INIT_STATUS_UNKNOWN);
7913 var = *slot;
7914 break;
7919 if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
7920 cur_loc = VAR_LOC_FROM (var);
7921 else
7922 cur_loc = var->var_part[pos].cur_loc;
7924 /* Delete the location part. */
7925 changed = false;
7926 nextp = &var->var_part[pos].loc_chain;
7927 for (node = *nextp; node; node = next)
7929 next = node->next;
7930 if ((REG_P (node->loc) && REG_P (loc)
7931 && REGNO (node->loc) == REGNO (loc))
7932 || rtx_equal_p (node->loc, loc))
7934 /* If we have deleted the location which was last emitted
7935 we have to emit new location so add the variable to set
7936 of changed variables. */
7937 if (cur_loc == node->loc)
7939 changed = true;
7940 var->var_part[pos].cur_loc = NULL;
7941 if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
7942 VAR_LOC_FROM (var) = NULL;
7944 pool_free (loc_chain_pool, node);
7945 *nextp = next;
7946 break;
7948 else
7949 nextp = &node->next;
7952 if (var->var_part[pos].loc_chain == NULL)
7954 changed = true;
7955 var->n_var_parts--;
7956 while (pos < var->n_var_parts)
7958 var->var_part[pos] = var->var_part[pos + 1];
7959 pos++;
7962 if (changed)
7963 variable_was_changed (var, set);
7966 return slot;
7969 /* Delete the part of variable's location from dataflow set SET. The
7970 variable part is specified by variable's declaration or value DV
7971 and offset OFFSET and the part's location by LOC. */
7973 static void
7974 delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
7975 HOST_WIDE_INT offset)
7977 variable_def **slot = shared_hash_find_slot_noinsert (set->vars, dv);
7978 if (!slot)
7979 return;
7981 delete_slot_part (set, loc, slot, offset);
7985 /* Structure for passing some other parameters to function
7986 vt_expand_loc_callback. */
7987 struct expand_loc_callback_data
7989 /* The variables and values active at this point. */
7990 variable_table_type *vars;
7992 /* Stack of values and debug_exprs under expansion, and their
7993 children. */
7994 auto_vec<rtx, 4> expanding;
7996 /* Stack of values and debug_exprs whose expansion hit recursion
7997 cycles. They will have VALUE_RECURSED_INTO marked when added to
7998 this list. This flag will be cleared if any of its dependencies
7999 resolves to a valid location. So, if the flag remains set at the
8000 end of the search, we know no valid location for this one can
8001 possibly exist. */
8002 auto_vec<rtx, 4> pending;
8004 /* The maximum depth among the sub-expressions under expansion.
8005 Zero indicates no expansion so far. */
8006 expand_depth depth;
8009 /* Allocate the one-part auxiliary data structure for VAR, with enough
8010 room for COUNT dependencies. */
8012 static void
8013 loc_exp_dep_alloc (variable var, int count)
8015 size_t allocsize;
8017 gcc_checking_assert (var->onepart);
8019 /* We can be called with COUNT == 0 to allocate the data structure
8020 without any dependencies, e.g. for the backlinks only. However,
8021 if we are specifying a COUNT, then the dependency list must have
8022 been emptied before. It would be possible to adjust pointers or
8023 force it empty here, but this is better done at an earlier point
8024 in the algorithm, so we instead leave an assertion to catch
8025 errors. */
8026 gcc_checking_assert (!count
8027 || VAR_LOC_DEP_VEC (var) == NULL
8028 || VAR_LOC_DEP_VEC (var)->is_empty ());
8030 if (VAR_LOC_1PAUX (var) && VAR_LOC_DEP_VEC (var)->space (count))
8031 return;
8033 allocsize = offsetof (struct onepart_aux, deps)
8034 + vec<loc_exp_dep, va_heap, vl_embed>::embedded_size (count);
8036 if (VAR_LOC_1PAUX (var))
8038 VAR_LOC_1PAUX (var) = XRESIZEVAR (struct onepart_aux,
8039 VAR_LOC_1PAUX (var), allocsize);
8040 /* If the reallocation moves the onepaux structure, the
8041 back-pointer to BACKLINKS in the first list member will still
8042 point to its old location. Adjust it. */
8043 if (VAR_LOC_DEP_LST (var))
8044 VAR_LOC_DEP_LST (var)->pprev = VAR_LOC_DEP_LSTP (var);
8046 else
8048 VAR_LOC_1PAUX (var) = XNEWVAR (struct onepart_aux, allocsize);
8049 *VAR_LOC_DEP_LSTP (var) = NULL;
8050 VAR_LOC_FROM (var) = NULL;
8051 VAR_LOC_DEPTH (var).complexity = 0;
8052 VAR_LOC_DEPTH (var).entryvals = 0;
8054 VAR_LOC_DEP_VEC (var)->embedded_init (count);
8057 /* Remove all entries from the vector of active dependencies of VAR,
8058 removing them from the back-links lists too. */
8060 static void
8061 loc_exp_dep_clear (variable var)
8063 while (VAR_LOC_DEP_VEC (var) && !VAR_LOC_DEP_VEC (var)->is_empty ())
8065 loc_exp_dep *led = &VAR_LOC_DEP_VEC (var)->last ();
8066 if (led->next)
8067 led->next->pprev = led->pprev;
8068 if (led->pprev)
8069 *led->pprev = led->next;
8070 VAR_LOC_DEP_VEC (var)->pop ();
8074 /* Insert an active dependency from VAR on X to the vector of
8075 dependencies, and add the corresponding back-link to X's list of
8076 back-links in VARS. */
8078 static void
8079 loc_exp_insert_dep (variable var, rtx x, variable_table_type *vars)
8081 decl_or_value dv;
8082 variable xvar;
8083 loc_exp_dep *led;
8085 dv = dv_from_rtx (x);
8087 /* ??? Build a vector of variables parallel to EXPANDING, to avoid
8088 an additional look up? */
8089 xvar = vars->find_with_hash (dv, dv_htab_hash (dv));
8091 if (!xvar)
8093 xvar = variable_from_dropped (dv, NO_INSERT);
8094 gcc_checking_assert (xvar);
8097 /* No point in adding the same backlink more than once. This may
8098 arise if say the same value appears in two complex expressions in
8099 the same loc_list, or even more than once in a single
8100 expression. */
8101 if (VAR_LOC_DEP_LST (xvar) && VAR_LOC_DEP_LST (xvar)->dv == var->dv)
8102 return;
8104 if (var->onepart == NOT_ONEPART)
8105 led = (loc_exp_dep *) pool_alloc (loc_exp_dep_pool);
8106 else
8108 loc_exp_dep empty;
8109 memset (&empty, 0, sizeof (empty));
8110 VAR_LOC_DEP_VEC (var)->quick_push (empty);
8111 led = &VAR_LOC_DEP_VEC (var)->last ();
8113 led->dv = var->dv;
8114 led->value = x;
8116 loc_exp_dep_alloc (xvar, 0);
8117 led->pprev = VAR_LOC_DEP_LSTP (xvar);
8118 led->next = *led->pprev;
8119 if (led->next)
8120 led->next->pprev = &led->next;
8121 *led->pprev = led;
8124 /* Create active dependencies of VAR on COUNT values starting at
8125 VALUE, and corresponding back-links to the entries in VARS. Return
8126 true if we found any pending-recursion results. */
8128 static bool
8129 loc_exp_dep_set (variable var, rtx result, rtx *value, int count,
8130 variable_table_type *vars)
8132 bool pending_recursion = false;
8134 gcc_checking_assert (VAR_LOC_DEP_VEC (var) == NULL
8135 || VAR_LOC_DEP_VEC (var)->is_empty ());
8137 /* Set up all dependencies from last_child (as set up at the end of
8138 the loop above) to the end. */
8139 loc_exp_dep_alloc (var, count);
8141 while (count--)
8143 rtx x = *value++;
8145 if (!pending_recursion)
8146 pending_recursion = !result && VALUE_RECURSED_INTO (x);
8148 loc_exp_insert_dep (var, x, vars);
8151 return pending_recursion;
8154 /* Notify the back-links of IVAR that are pending recursion that we
8155 have found a non-NIL value for it, so they are cleared for another
8156 attempt to compute a current location. */
8158 static void
8159 notify_dependents_of_resolved_value (variable ivar, variable_table_type *vars)
8161 loc_exp_dep *led, *next;
8163 for (led = VAR_LOC_DEP_LST (ivar); led; led = next)
8165 decl_or_value dv = led->dv;
8166 variable var;
8168 next = led->next;
8170 if (dv_is_value_p (dv))
8172 rtx value = dv_as_value (dv);
8174 /* If we have already resolved it, leave it alone. */
8175 if (!VALUE_RECURSED_INTO (value))
8176 continue;
8178 /* Check that VALUE_RECURSED_INTO, true from the test above,
8179 implies NO_LOC_P. */
8180 gcc_checking_assert (NO_LOC_P (value));
8182 /* We won't notify variables that are being expanded,
8183 because their dependency list is cleared before
8184 recursing. */
8185 NO_LOC_P (value) = false;
8186 VALUE_RECURSED_INTO (value) = false;
8188 gcc_checking_assert (dv_changed_p (dv));
8190 else
8192 gcc_checking_assert (dv_onepart_p (dv) != NOT_ONEPART);
8193 if (!dv_changed_p (dv))
8194 continue;
8197 var = vars->find_with_hash (dv, dv_htab_hash (dv));
8199 if (!var)
8200 var = variable_from_dropped (dv, NO_INSERT);
8202 if (var)
8203 notify_dependents_of_resolved_value (var, vars);
8205 if (next)
8206 next->pprev = led->pprev;
8207 if (led->pprev)
8208 *led->pprev = next;
8209 led->next = NULL;
8210 led->pprev = NULL;
8214 static rtx vt_expand_loc_callback (rtx x, bitmap regs,
8215 int max_depth, void *data);
8217 /* Return the combined depth, when one sub-expression evaluated to
8218 BEST_DEPTH and the previous known depth was SAVED_DEPTH. */
8220 static inline expand_depth
8221 update_depth (expand_depth saved_depth, expand_depth best_depth)
8223 /* If we didn't find anything, stick with what we had. */
8224 if (!best_depth.complexity)
8225 return saved_depth;
8227 /* If we found hadn't found anything, use the depth of the current
8228 expression. Do NOT add one extra level, we want to compute the
8229 maximum depth among sub-expressions. We'll increment it later,
8230 if appropriate. */
8231 if (!saved_depth.complexity)
8232 return best_depth;
8234 /* Combine the entryval count so that regardless of which one we
8235 return, the entryval count is accurate. */
8236 best_depth.entryvals = saved_depth.entryvals
8237 = best_depth.entryvals + saved_depth.entryvals;
8239 if (saved_depth.complexity < best_depth.complexity)
8240 return best_depth;
8241 else
8242 return saved_depth;
8245 /* Expand VAR to a location RTX, updating its cur_loc. Use REGS and
8246 DATA for cselib expand callback. If PENDRECP is given, indicate in
8247 it whether any sub-expression couldn't be fully evaluated because
8248 it is pending recursion resolution. */
8250 static inline rtx
8251 vt_expand_var_loc_chain (variable var, bitmap regs, void *data, bool *pendrecp)
8253 struct expand_loc_callback_data *elcd
8254 = (struct expand_loc_callback_data *) data;
8255 location_chain loc, next;
8256 rtx result = NULL;
8257 int first_child, result_first_child, last_child;
8258 bool pending_recursion;
8259 rtx loc_from = NULL;
8260 struct elt_loc_list *cloc = NULL;
8261 expand_depth depth = { 0, 0 }, saved_depth = elcd->depth;
8262 int wanted_entryvals, found_entryvals = 0;
8264 /* Clear all backlinks pointing at this, so that we're not notified
8265 while we're active. */
8266 loc_exp_dep_clear (var);
8268 retry:
8269 if (var->onepart == ONEPART_VALUE)
8271 cselib_val *val = CSELIB_VAL_PTR (dv_as_value (var->dv));
8273 gcc_checking_assert (cselib_preserved_value_p (val));
8275 cloc = val->locs;
8278 first_child = result_first_child = last_child
8279 = elcd->expanding.length ();
8281 wanted_entryvals = found_entryvals;
8283 /* Attempt to expand each available location in turn. */
8284 for (next = loc = var->n_var_parts ? var->var_part[0].loc_chain : NULL;
8285 loc || cloc; loc = next)
8287 result_first_child = last_child;
8289 if (!loc)
8291 loc_from = cloc->loc;
8292 next = loc;
8293 cloc = cloc->next;
8294 if (unsuitable_loc (loc_from))
8295 continue;
8297 else
8299 loc_from = loc->loc;
8300 next = loc->next;
8303 gcc_checking_assert (!unsuitable_loc (loc_from));
8305 elcd->depth.complexity = elcd->depth.entryvals = 0;
8306 result = cselib_expand_value_rtx_cb (loc_from, regs, EXPR_DEPTH,
8307 vt_expand_loc_callback, data);
8308 last_child = elcd->expanding.length ();
8310 if (result)
8312 depth = elcd->depth;
8314 gcc_checking_assert (depth.complexity
8315 || result_first_child == last_child);
8317 if (last_child - result_first_child != 1)
8319 if (!depth.complexity && GET_CODE (result) == ENTRY_VALUE)
8320 depth.entryvals++;
8321 depth.complexity++;
8324 if (depth.complexity <= EXPR_USE_DEPTH)
8326 if (depth.entryvals <= wanted_entryvals)
8327 break;
8328 else if (!found_entryvals || depth.entryvals < found_entryvals)
8329 found_entryvals = depth.entryvals;
8332 result = NULL;
8335 /* Set it up in case we leave the loop. */
8336 depth.complexity = depth.entryvals = 0;
8337 loc_from = NULL;
8338 result_first_child = first_child;
8341 if (!loc_from && wanted_entryvals < found_entryvals)
8343 /* We found entries with ENTRY_VALUEs and skipped them. Since
8344 we could not find any expansions without ENTRY_VALUEs, but we
8345 found at least one with them, go back and get an entry with
8346 the minimum number ENTRY_VALUE count that we found. We could
8347 avoid looping, but since each sub-loc is already resolved,
8348 the re-expansion should be trivial. ??? Should we record all
8349 attempted locs as dependencies, so that we retry the
8350 expansion should any of them change, in the hope it can give
8351 us a new entry without an ENTRY_VALUE? */
8352 elcd->expanding.truncate (first_child);
8353 goto retry;
8356 /* Register all encountered dependencies as active. */
8357 pending_recursion = loc_exp_dep_set
8358 (var, result, elcd->expanding.address () + result_first_child,
8359 last_child - result_first_child, elcd->vars);
8361 elcd->expanding.truncate (first_child);
8363 /* Record where the expansion came from. */
8364 gcc_checking_assert (!result || !pending_recursion);
8365 VAR_LOC_FROM (var) = loc_from;
8366 VAR_LOC_DEPTH (var) = depth;
8368 gcc_checking_assert (!depth.complexity == !result);
8370 elcd->depth = update_depth (saved_depth, depth);
8372 /* Indicate whether any of the dependencies are pending recursion
8373 resolution. */
8374 if (pendrecp)
8375 *pendrecp = pending_recursion;
8377 if (!pendrecp || !pending_recursion)
8378 var->var_part[0].cur_loc = result;
8380 return result;
8383 /* Callback for cselib_expand_value, that looks for expressions
8384 holding the value in the var-tracking hash tables. Return X for
8385 standard processing, anything else is to be used as-is. */
8387 static rtx
8388 vt_expand_loc_callback (rtx x, bitmap regs,
8389 int max_depth ATTRIBUTE_UNUSED,
8390 void *data)
8392 struct expand_loc_callback_data *elcd
8393 = (struct expand_loc_callback_data *) data;
8394 decl_or_value dv;
8395 variable var;
8396 rtx result, subreg;
8397 bool pending_recursion = false;
8398 bool from_empty = false;
8400 switch (GET_CODE (x))
8402 case SUBREG:
8403 subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs,
8404 EXPR_DEPTH,
8405 vt_expand_loc_callback, data);
8407 if (!subreg)
8408 return NULL;
8410 result = simplify_gen_subreg (GET_MODE (x), subreg,
8411 GET_MODE (SUBREG_REG (x)),
8412 SUBREG_BYTE (x));
8414 /* Invalid SUBREGs are ok in debug info. ??? We could try
8415 alternate expansions for the VALUE as well. */
8416 if (!result)
8417 result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x));
8419 return result;
8421 case DEBUG_EXPR:
8422 case VALUE:
8423 dv = dv_from_rtx (x);
8424 break;
8426 default:
8427 return x;
8430 elcd->expanding.safe_push (x);
8432 /* Check that VALUE_RECURSED_INTO implies NO_LOC_P. */
8433 gcc_checking_assert (!VALUE_RECURSED_INTO (x) || NO_LOC_P (x));
8435 if (NO_LOC_P (x))
8437 gcc_checking_assert (VALUE_RECURSED_INTO (x) || !dv_changed_p (dv));
8438 return NULL;
8441 var = elcd->vars->find_with_hash (dv, dv_htab_hash (dv));
8443 if (!var)
8445 from_empty = true;
8446 var = variable_from_dropped (dv, INSERT);
8449 gcc_checking_assert (var);
8451 if (!dv_changed_p (dv))
8453 gcc_checking_assert (!NO_LOC_P (x));
8454 gcc_checking_assert (var->var_part[0].cur_loc);
8455 gcc_checking_assert (VAR_LOC_1PAUX (var));
8456 gcc_checking_assert (VAR_LOC_1PAUX (var)->depth.complexity);
8458 elcd->depth = update_depth (elcd->depth, VAR_LOC_1PAUX (var)->depth);
8460 return var->var_part[0].cur_loc;
8463 VALUE_RECURSED_INTO (x) = true;
8464 /* This is tentative, but it makes some tests simpler. */
8465 NO_LOC_P (x) = true;
8467 gcc_checking_assert (var->n_var_parts == 1 || from_empty);
8469 result = vt_expand_var_loc_chain (var, regs, data, &pending_recursion);
8471 if (pending_recursion)
8473 gcc_checking_assert (!result);
8474 elcd->pending.safe_push (x);
8476 else
8478 NO_LOC_P (x) = !result;
8479 VALUE_RECURSED_INTO (x) = false;
8480 set_dv_changed (dv, false);
8482 if (result)
8483 notify_dependents_of_resolved_value (var, elcd->vars);
8486 return result;
8489 /* While expanding variables, we may encounter recursion cycles
8490 because of mutual (possibly indirect) dependencies between two
8491 particular variables (or values), say A and B. If we're trying to
8492 expand A when we get to B, which in turn attempts to expand A, if
8493 we can't find any other expansion for B, we'll add B to this
8494 pending-recursion stack, and tentatively return NULL for its
8495 location. This tentative value will be used for any other
8496 occurrences of B, unless A gets some other location, in which case
8497 it will notify B that it is worth another try at computing a
8498 location for it, and it will use the location computed for A then.
8499 At the end of the expansion, the tentative NULL locations become
8500 final for all members of PENDING that didn't get a notification.
8501 This function performs this finalization of NULL locations. */
8503 static void
8504 resolve_expansions_pending_recursion (vec<rtx, va_heap> *pending)
8506 while (!pending->is_empty ())
8508 rtx x = pending->pop ();
8509 decl_or_value dv;
8511 if (!VALUE_RECURSED_INTO (x))
8512 continue;
8514 gcc_checking_assert (NO_LOC_P (x));
8515 VALUE_RECURSED_INTO (x) = false;
8516 dv = dv_from_rtx (x);
8517 gcc_checking_assert (dv_changed_p (dv));
8518 set_dv_changed (dv, false);
8522 /* Initialize expand_loc_callback_data D with variable hash table V.
8523 It must be a macro because of alloca (vec stack). */
8524 #define INIT_ELCD(d, v) \
8525 do \
8527 (d).vars = (v); \
8528 (d).depth.complexity = (d).depth.entryvals = 0; \
8530 while (0)
8531 /* Finalize expand_loc_callback_data D, resolved to location L. */
8532 #define FINI_ELCD(d, l) \
8533 do \
8535 resolve_expansions_pending_recursion (&(d).pending); \
8536 (d).pending.release (); \
8537 (d).expanding.release (); \
8539 if ((l) && MEM_P (l)) \
8540 (l) = targetm.delegitimize_address (l); \
8542 while (0)
8544 /* Expand VALUEs and DEBUG_EXPRs in LOC to a location, using the
8545 equivalences in VARS, updating their CUR_LOCs in the process. */
8547 static rtx
8548 vt_expand_loc (rtx loc, variable_table_type *vars)
8550 struct expand_loc_callback_data data;
8551 rtx result;
8553 if (!MAY_HAVE_DEBUG_INSNS)
8554 return loc;
8556 INIT_ELCD (data, vars);
8558 result = cselib_expand_value_rtx_cb (loc, scratch_regs, EXPR_DEPTH,
8559 vt_expand_loc_callback, &data);
8561 FINI_ELCD (data, result);
8563 return result;
8566 /* Expand the one-part VARiable to a location, using the equivalences
8567 in VARS, updating their CUR_LOCs in the process. */
8569 static rtx
8570 vt_expand_1pvar (variable var, variable_table_type *vars)
8572 struct expand_loc_callback_data data;
8573 rtx loc;
8575 gcc_checking_assert (var->onepart && var->n_var_parts == 1);
8577 if (!dv_changed_p (var->dv))
8578 return var->var_part[0].cur_loc;
8580 INIT_ELCD (data, vars);
8582 loc = vt_expand_var_loc_chain (var, scratch_regs, &data, NULL);
8584 gcc_checking_assert (data.expanding.is_empty ());
8586 FINI_ELCD (data, loc);
8588 return loc;
8591 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
8592 additional parameters: WHERE specifies whether the note shall be emitted
8593 before or after instruction INSN. */
8596 emit_note_insn_var_location (variable_def **varp, emit_note_data *data)
8598 variable var = *varp;
8599 rtx_insn *insn = data->insn;
8600 enum emit_note_where where = data->where;
8601 variable_table_type *vars = data->vars;
8602 rtx_note *note;
8603 rtx note_vl;
8604 int i, j, n_var_parts;
8605 bool complete;
8606 enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED;
8607 HOST_WIDE_INT last_limit;
8608 tree type_size_unit;
8609 HOST_WIDE_INT offsets[MAX_VAR_PARTS];
8610 rtx loc[MAX_VAR_PARTS];
8611 tree decl;
8612 location_chain lc;
8614 gcc_checking_assert (var->onepart == NOT_ONEPART
8615 || var->onepart == ONEPART_VDECL);
8617 decl = dv_as_decl (var->dv);
8619 complete = true;
8620 last_limit = 0;
8621 n_var_parts = 0;
8622 if (!var->onepart)
8623 for (i = 0; i < var->n_var_parts; i++)
8624 if (var->var_part[i].cur_loc == NULL && var->var_part[i].loc_chain)
8625 var->var_part[i].cur_loc = var->var_part[i].loc_chain->loc;
8626 for (i = 0; i < var->n_var_parts; i++)
8628 enum machine_mode mode, wider_mode;
8629 rtx loc2;
8630 HOST_WIDE_INT offset;
8632 if (i == 0 && var->onepart)
8634 gcc_checking_assert (var->n_var_parts == 1);
8635 offset = 0;
8636 initialized = VAR_INIT_STATUS_INITIALIZED;
8637 loc2 = vt_expand_1pvar (var, vars);
8639 else
8641 if (last_limit < VAR_PART_OFFSET (var, i))
8643 complete = false;
8644 break;
8646 else if (last_limit > VAR_PART_OFFSET (var, i))
8647 continue;
8648 offset = VAR_PART_OFFSET (var, i);
8649 loc2 = var->var_part[i].cur_loc;
8650 if (loc2 && GET_CODE (loc2) == MEM
8651 && GET_CODE (XEXP (loc2, 0)) == VALUE)
8653 rtx depval = XEXP (loc2, 0);
8655 loc2 = vt_expand_loc (loc2, vars);
8657 if (loc2)
8658 loc_exp_insert_dep (var, depval, vars);
8660 if (!loc2)
8662 complete = false;
8663 continue;
8665 gcc_checking_assert (GET_CODE (loc2) != VALUE);
8666 for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
8667 if (var->var_part[i].cur_loc == lc->loc)
8669 initialized = lc->init;
8670 break;
8672 gcc_assert (lc);
8675 offsets[n_var_parts] = offset;
8676 if (!loc2)
8678 complete = false;
8679 continue;
8681 loc[n_var_parts] = loc2;
8682 mode = GET_MODE (var->var_part[i].cur_loc);
8683 if (mode == VOIDmode && var->onepart)
8684 mode = DECL_MODE (decl);
8685 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
8687 /* Attempt to merge adjacent registers or memory. */
8688 wider_mode = GET_MODE_WIDER_MODE (mode);
8689 for (j = i + 1; j < var->n_var_parts; j++)
8690 if (last_limit <= VAR_PART_OFFSET (var, j))
8691 break;
8692 if (j < var->n_var_parts
8693 && wider_mode != VOIDmode
8694 && var->var_part[j].cur_loc
8695 && mode == GET_MODE (var->var_part[j].cur_loc)
8696 && (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts]))
8697 && last_limit == (var->onepart ? 0 : VAR_PART_OFFSET (var, j))
8698 && (loc2 = vt_expand_loc (var->var_part[j].cur_loc, vars))
8699 && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2))
8701 rtx new_loc = NULL;
8703 if (REG_P (loc[n_var_parts])
8704 && hard_regno_nregs[REGNO (loc[n_var_parts])][mode] * 2
8705 == hard_regno_nregs[REGNO (loc[n_var_parts])][wider_mode]
8706 && end_hard_regno (mode, REGNO (loc[n_var_parts]))
8707 == REGNO (loc2))
8709 if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN)
8710 new_loc = simplify_subreg (wider_mode, loc[n_var_parts],
8711 mode, 0);
8712 else if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
8713 new_loc = simplify_subreg (wider_mode, loc2, mode, 0);
8714 if (new_loc)
8716 if (!REG_P (new_loc)
8717 || REGNO (new_loc) != REGNO (loc[n_var_parts]))
8718 new_loc = NULL;
8719 else
8720 REG_ATTRS (new_loc) = REG_ATTRS (loc[n_var_parts]);
8723 else if (MEM_P (loc[n_var_parts])
8724 && GET_CODE (XEXP (loc2, 0)) == PLUS
8725 && REG_P (XEXP (XEXP (loc2, 0), 0))
8726 && CONST_INT_P (XEXP (XEXP (loc2, 0), 1)))
8728 if ((REG_P (XEXP (loc[n_var_parts], 0))
8729 && rtx_equal_p (XEXP (loc[n_var_parts], 0),
8730 XEXP (XEXP (loc2, 0), 0))
8731 && INTVAL (XEXP (XEXP (loc2, 0), 1))
8732 == GET_MODE_SIZE (mode))
8733 || (GET_CODE (XEXP (loc[n_var_parts], 0)) == PLUS
8734 && CONST_INT_P (XEXP (XEXP (loc[n_var_parts], 0), 1))
8735 && rtx_equal_p (XEXP (XEXP (loc[n_var_parts], 0), 0),
8736 XEXP (XEXP (loc2, 0), 0))
8737 && INTVAL (XEXP (XEXP (loc[n_var_parts], 0), 1))
8738 + GET_MODE_SIZE (mode)
8739 == INTVAL (XEXP (XEXP (loc2, 0), 1))))
8740 new_loc = adjust_address_nv (loc[n_var_parts],
8741 wider_mode, 0);
8744 if (new_loc)
8746 loc[n_var_parts] = new_loc;
8747 mode = wider_mode;
8748 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
8749 i = j;
8752 ++n_var_parts;
8754 type_size_unit = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8755 if ((unsigned HOST_WIDE_INT) last_limit < TREE_INT_CST_LOW (type_size_unit))
8756 complete = false;
8758 if (! flag_var_tracking_uninit)
8759 initialized = VAR_INIT_STATUS_INITIALIZED;
8761 note_vl = NULL_RTX;
8762 if (!complete)
8763 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, NULL_RTX, initialized);
8764 else if (n_var_parts == 1)
8766 rtx expr_list;
8768 if (offsets[0] || GET_CODE (loc[0]) == PARALLEL)
8769 expr_list = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
8770 else
8771 expr_list = loc[0];
8773 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, expr_list, initialized);
8775 else if (n_var_parts)
8777 rtx parallel;
8779 for (i = 0; i < n_var_parts; i++)
8780 loc[i]
8781 = gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i]));
8783 parallel = gen_rtx_PARALLEL (VOIDmode,
8784 gen_rtvec_v (n_var_parts, loc));
8785 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl,
8786 parallel, initialized);
8789 if (where != EMIT_NOTE_BEFORE_INSN)
8791 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8792 if (where == EMIT_NOTE_AFTER_CALL_INSN)
8793 NOTE_DURING_CALL_P (note) = true;
8795 else
8797 /* Make sure that the call related notes come first. */
8798 while (NEXT_INSN (insn)
8799 && NOTE_P (insn)
8800 && ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8801 && NOTE_DURING_CALL_P (insn))
8802 || NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
8803 insn = NEXT_INSN (insn);
8804 if (NOTE_P (insn)
8805 && ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8806 && NOTE_DURING_CALL_P (insn))
8807 || NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
8808 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8809 else
8810 note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
8812 NOTE_VAR_LOCATION (note) = note_vl;
8814 set_dv_changed (var->dv, false);
8815 gcc_assert (var->in_changed_variables);
8816 var->in_changed_variables = false;
8817 changed_variables->clear_slot (varp);
8819 /* Continue traversing the hash table. */
8820 return 1;
8823 /* While traversing changed_variables, push onto DATA (a stack of RTX
8824 values) entries that aren't user variables. */
8827 var_track_values_to_stack (variable_def **slot,
8828 vec<rtx, va_heap> *changed_values_stack)
8830 variable var = *slot;
8832 if (var->onepart == ONEPART_VALUE)
8833 changed_values_stack->safe_push (dv_as_value (var->dv));
8834 else if (var->onepart == ONEPART_DEXPR)
8835 changed_values_stack->safe_push (DECL_RTL_KNOWN_SET (dv_as_decl (var->dv)));
8837 return 1;
8840 /* Remove from changed_variables the entry whose DV corresponds to
8841 value or debug_expr VAL. */
8842 static void
8843 remove_value_from_changed_variables (rtx val)
8845 decl_or_value dv = dv_from_rtx (val);
8846 variable_def **slot;
8847 variable var;
8849 slot = changed_variables->find_slot_with_hash (dv, dv_htab_hash (dv),
8850 NO_INSERT);
8851 var = *slot;
8852 var->in_changed_variables = false;
8853 changed_variables->clear_slot (slot);
8856 /* If VAL (a value or debug_expr) has backlinks to variables actively
8857 dependent on it in HTAB or in CHANGED_VARIABLES, mark them as
8858 changed, adding to CHANGED_VALUES_STACK any dependencies that may
8859 have dependencies of their own to notify. */
8861 static void
8862 notify_dependents_of_changed_value (rtx val, variable_table_type *htab,
8863 vec<rtx, va_heap> *changed_values_stack)
8865 variable_def **slot;
8866 variable var;
8867 loc_exp_dep *led;
8868 decl_or_value dv = dv_from_rtx (val);
8870 slot = changed_variables->find_slot_with_hash (dv, dv_htab_hash (dv),
8871 NO_INSERT);
8872 if (!slot)
8873 slot = htab->find_slot_with_hash (dv, dv_htab_hash (dv), NO_INSERT);
8874 if (!slot)
8875 slot = dropped_values->find_slot_with_hash (dv, dv_htab_hash (dv),
8876 NO_INSERT);
8877 var = *slot;
8879 while ((led = VAR_LOC_DEP_LST (var)))
8881 decl_or_value ldv = led->dv;
8882 variable ivar;
8884 /* Deactivate and remove the backlink, as it was “used up”. It
8885 makes no sense to attempt to notify the same entity again:
8886 either it will be recomputed and re-register an active
8887 dependency, or it will still have the changed mark. */
8888 if (led->next)
8889 led->next->pprev = led->pprev;
8890 if (led->pprev)
8891 *led->pprev = led->next;
8892 led->next = NULL;
8893 led->pprev = NULL;
8895 if (dv_changed_p (ldv))
8896 continue;
8898 switch (dv_onepart_p (ldv))
8900 case ONEPART_VALUE:
8901 case ONEPART_DEXPR:
8902 set_dv_changed (ldv, true);
8903 changed_values_stack->safe_push (dv_as_rtx (ldv));
8904 break;
8906 case ONEPART_VDECL:
8907 ivar = htab->find_with_hash (ldv, dv_htab_hash (ldv));
8908 gcc_checking_assert (!VAR_LOC_DEP_LST (ivar));
8909 variable_was_changed (ivar, NULL);
8910 break;
8912 case NOT_ONEPART:
8913 pool_free (loc_exp_dep_pool, led);
8914 ivar = htab->find_with_hash (ldv, dv_htab_hash (ldv));
8915 if (ivar)
8917 int i = ivar->n_var_parts;
8918 while (i--)
8920 rtx loc = ivar->var_part[i].cur_loc;
8922 if (loc && GET_CODE (loc) == MEM
8923 && XEXP (loc, 0) == val)
8925 variable_was_changed (ivar, NULL);
8926 break;
8930 break;
8932 default:
8933 gcc_unreachable ();
8938 /* Take out of changed_variables any entries that don't refer to use
8939 variables. Back-propagate change notifications from values and
8940 debug_exprs to their active dependencies in HTAB or in
8941 CHANGED_VARIABLES. */
8943 static void
8944 process_changed_values (variable_table_type *htab)
8946 int i, n;
8947 rtx val;
8948 auto_vec<rtx, 20> changed_values_stack;
8950 /* Move values from changed_variables to changed_values_stack. */
8951 changed_variables
8952 ->traverse <vec<rtx, va_heap>*, var_track_values_to_stack>
8953 (&changed_values_stack);
8955 /* Back-propagate change notifications in values while popping
8956 them from the stack. */
8957 for (n = i = changed_values_stack.length ();
8958 i > 0; i = changed_values_stack.length ())
8960 val = changed_values_stack.pop ();
8961 notify_dependents_of_changed_value (val, htab, &changed_values_stack);
8963 /* This condition will hold when visiting each of the entries
8964 originally in changed_variables. We can't remove them
8965 earlier because this could drop the backlinks before we got a
8966 chance to use them. */
8967 if (i == n)
8969 remove_value_from_changed_variables (val);
8970 n--;
8975 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
8976 CHANGED_VARIABLES and delete this chain. WHERE specifies whether
8977 the notes shall be emitted before of after instruction INSN. */
8979 static void
8980 emit_notes_for_changes (rtx_insn *insn, enum emit_note_where where,
8981 shared_hash vars)
8983 emit_note_data data;
8984 variable_table_type *htab = shared_hash_htab (vars);
8986 if (!changed_variables->elements ())
8987 return;
8989 if (MAY_HAVE_DEBUG_INSNS)
8990 process_changed_values (htab);
8992 data.insn = insn;
8993 data.where = where;
8994 data.vars = htab;
8996 changed_variables
8997 ->traverse <emit_note_data*, emit_note_insn_var_location> (&data);
9000 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
9001 same variable in hash table DATA or is not there at all. */
9004 emit_notes_for_differences_1 (variable_def **slot, variable_table_type *new_vars)
9006 variable old_var, new_var;
9008 old_var = *slot;
9009 new_var = new_vars->find_with_hash (old_var->dv, dv_htab_hash (old_var->dv));
9011 if (!new_var)
9013 /* Variable has disappeared. */
9014 variable empty_var = NULL;
9016 if (old_var->onepart == ONEPART_VALUE
9017 || old_var->onepart == ONEPART_DEXPR)
9019 empty_var = variable_from_dropped (old_var->dv, NO_INSERT);
9020 if (empty_var)
9022 gcc_checking_assert (!empty_var->in_changed_variables);
9023 if (!VAR_LOC_1PAUX (old_var))
9025 VAR_LOC_1PAUX (old_var) = VAR_LOC_1PAUX (empty_var);
9026 VAR_LOC_1PAUX (empty_var) = NULL;
9028 else
9029 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
9033 if (!empty_var)
9035 empty_var = (variable) pool_alloc (onepart_pool (old_var->onepart));
9036 empty_var->dv = old_var->dv;
9037 empty_var->refcount = 0;
9038 empty_var->n_var_parts = 0;
9039 empty_var->onepart = old_var->onepart;
9040 empty_var->in_changed_variables = false;
9043 if (empty_var->onepart)
9045 /* Propagate the auxiliary data to (ultimately)
9046 changed_variables. */
9047 empty_var->var_part[0].loc_chain = NULL;
9048 empty_var->var_part[0].cur_loc = NULL;
9049 VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (old_var);
9050 VAR_LOC_1PAUX (old_var) = NULL;
9052 variable_was_changed (empty_var, NULL);
9053 /* Continue traversing the hash table. */
9054 return 1;
9056 /* Update cur_loc and one-part auxiliary data, before new_var goes
9057 through variable_was_changed. */
9058 if (old_var != new_var && new_var->onepart)
9060 gcc_checking_assert (VAR_LOC_1PAUX (new_var) == NULL);
9061 VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (old_var);
9062 VAR_LOC_1PAUX (old_var) = NULL;
9063 new_var->var_part[0].cur_loc = old_var->var_part[0].cur_loc;
9065 if (variable_different_p (old_var, new_var))
9066 variable_was_changed (new_var, NULL);
9068 /* Continue traversing the hash table. */
9069 return 1;
9072 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
9073 table DATA. */
9076 emit_notes_for_differences_2 (variable_def **slot, variable_table_type *old_vars)
9078 variable old_var, new_var;
9080 new_var = *slot;
9081 old_var = old_vars->find_with_hash (new_var->dv, dv_htab_hash (new_var->dv));
9082 if (!old_var)
9084 int i;
9085 for (i = 0; i < new_var->n_var_parts; i++)
9086 new_var->var_part[i].cur_loc = NULL;
9087 variable_was_changed (new_var, NULL);
9090 /* Continue traversing the hash table. */
9091 return 1;
9094 /* Emit notes before INSN for differences between dataflow sets OLD_SET and
9095 NEW_SET. */
9097 static void
9098 emit_notes_for_differences (rtx_insn *insn, dataflow_set *old_set,
9099 dataflow_set *new_set)
9101 shared_hash_htab (old_set->vars)
9102 ->traverse <variable_table_type *, emit_notes_for_differences_1>
9103 (shared_hash_htab (new_set->vars));
9104 shared_hash_htab (new_set->vars)
9105 ->traverse <variable_table_type *, emit_notes_for_differences_2>
9106 (shared_hash_htab (old_set->vars));
9107 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars);
9110 /* Return the next insn after INSN that is not a NOTE_INSN_VAR_LOCATION. */
9112 static rtx_insn *
9113 next_non_note_insn_var_location (rtx_insn *insn)
9115 while (insn)
9117 insn = NEXT_INSN (insn);
9118 if (insn == 0
9119 || !NOTE_P (insn)
9120 || NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION)
9121 break;
9124 return insn;
9127 /* Emit the notes for changes of location parts in the basic block BB. */
9129 static void
9130 emit_notes_in_bb (basic_block bb, dataflow_set *set)
9132 unsigned int i;
9133 micro_operation *mo;
9135 dataflow_set_clear (set);
9136 dataflow_set_copy (set, &VTI (bb)->in);
9138 FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo)
9140 rtx_insn *insn = mo->insn;
9141 rtx_insn *next_insn = next_non_note_insn_var_location (insn);
9143 switch (mo->type)
9145 case MO_CALL:
9146 dataflow_set_clear_at_call (set);
9147 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars);
9149 rtx arguments = mo->u.loc, *p = &arguments;
9150 rtx_note *note;
9151 while (*p)
9153 XEXP (XEXP (*p, 0), 1)
9154 = vt_expand_loc (XEXP (XEXP (*p, 0), 1),
9155 shared_hash_htab (set->vars));
9156 /* If expansion is successful, keep it in the list. */
9157 if (XEXP (XEXP (*p, 0), 1))
9158 p = &XEXP (*p, 1);
9159 /* Otherwise, if the following item is data_value for it,
9160 drop it too too. */
9161 else if (XEXP (*p, 1)
9162 && REG_P (XEXP (XEXP (*p, 0), 0))
9163 && MEM_P (XEXP (XEXP (XEXP (*p, 1), 0), 0))
9164 && REG_P (XEXP (XEXP (XEXP (XEXP (*p, 1), 0), 0),
9166 && REGNO (XEXP (XEXP (*p, 0), 0))
9167 == REGNO (XEXP (XEXP (XEXP (XEXP (*p, 1), 0),
9168 0), 0)))
9169 *p = XEXP (XEXP (*p, 1), 1);
9170 /* Just drop this item. */
9171 else
9172 *p = XEXP (*p, 1);
9174 note = emit_note_after (NOTE_INSN_CALL_ARG_LOCATION, insn);
9175 NOTE_VAR_LOCATION (note) = arguments;
9177 break;
9179 case MO_USE:
9181 rtx loc = mo->u.loc;
9183 if (REG_P (loc))
9184 var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
9185 else
9186 var_mem_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
9188 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
9190 break;
9192 case MO_VAL_LOC:
9194 rtx loc = mo->u.loc;
9195 rtx val, vloc;
9196 tree var;
9198 if (GET_CODE (loc) == CONCAT)
9200 val = XEXP (loc, 0);
9201 vloc = XEXP (loc, 1);
9203 else
9205 val = NULL_RTX;
9206 vloc = loc;
9209 var = PAT_VAR_LOCATION_DECL (vloc);
9211 clobber_variable_part (set, NULL_RTX,
9212 dv_from_decl (var), 0, NULL_RTX);
9213 if (val)
9215 if (VAL_NEEDS_RESOLUTION (loc))
9216 val_resolve (set, val, PAT_VAR_LOCATION_LOC (vloc), insn);
9217 set_variable_part (set, val, dv_from_decl (var), 0,
9218 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
9219 INSERT);
9221 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
9222 set_variable_part (set, PAT_VAR_LOCATION_LOC (vloc),
9223 dv_from_decl (var), 0,
9224 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
9225 INSERT);
9227 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
9229 break;
9231 case MO_VAL_USE:
9233 rtx loc = mo->u.loc;
9234 rtx val, vloc, uloc;
9236 vloc = uloc = XEXP (loc, 1);
9237 val = XEXP (loc, 0);
9239 if (GET_CODE (val) == CONCAT)
9241 uloc = XEXP (val, 1);
9242 val = XEXP (val, 0);
9245 if (VAL_NEEDS_RESOLUTION (loc))
9246 val_resolve (set, val, vloc, insn);
9247 else
9248 val_store (set, val, uloc, insn, false);
9250 if (VAL_HOLDS_TRACK_EXPR (loc))
9252 if (GET_CODE (uloc) == REG)
9253 var_reg_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
9254 NULL);
9255 else if (GET_CODE (uloc) == MEM)
9256 var_mem_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
9257 NULL);
9260 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
9262 break;
9264 case MO_VAL_SET:
9266 rtx loc = mo->u.loc;
9267 rtx val, vloc, uloc;
9268 rtx dstv, srcv;
9270 vloc = loc;
9271 uloc = XEXP (vloc, 1);
9272 val = XEXP (vloc, 0);
9273 vloc = uloc;
9275 if (GET_CODE (uloc) == SET)
9277 dstv = SET_DEST (uloc);
9278 srcv = SET_SRC (uloc);
9280 else
9282 dstv = uloc;
9283 srcv = NULL;
9286 if (GET_CODE (val) == CONCAT)
9288 dstv = vloc = XEXP (val, 1);
9289 val = XEXP (val, 0);
9292 if (GET_CODE (vloc) == SET)
9294 srcv = SET_SRC (vloc);
9296 gcc_assert (val != srcv);
9297 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
9299 dstv = vloc = SET_DEST (vloc);
9301 if (VAL_NEEDS_RESOLUTION (loc))
9302 val_resolve (set, val, srcv, insn);
9304 else if (VAL_NEEDS_RESOLUTION (loc))
9306 gcc_assert (GET_CODE (uloc) == SET
9307 && GET_CODE (SET_SRC (uloc)) == REG);
9308 val_resolve (set, val, SET_SRC (uloc), insn);
9311 if (VAL_HOLDS_TRACK_EXPR (loc))
9313 if (VAL_EXPR_IS_CLOBBERED (loc))
9315 if (REG_P (uloc))
9316 var_reg_delete (set, uloc, true);
9317 else if (MEM_P (uloc))
9319 gcc_assert (MEM_P (dstv));
9320 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
9321 var_mem_delete (set, dstv, true);
9324 else
9326 bool copied_p = VAL_EXPR_IS_COPIED (loc);
9327 rtx src = NULL, dst = uloc;
9328 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
9330 if (GET_CODE (uloc) == SET)
9332 src = SET_SRC (uloc);
9333 dst = SET_DEST (uloc);
9336 if (copied_p)
9338 status = find_src_status (set, src);
9340 src = find_src_set_src (set, src);
9343 if (REG_P (dst))
9344 var_reg_delete_and_set (set, dst, !copied_p,
9345 status, srcv);
9346 else if (MEM_P (dst))
9348 gcc_assert (MEM_P (dstv));
9349 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
9350 var_mem_delete_and_set (set, dstv, !copied_p,
9351 status, srcv);
9355 else if (REG_P (uloc))
9356 var_regno_delete (set, REGNO (uloc));
9357 else if (MEM_P (uloc))
9359 gcc_checking_assert (GET_CODE (vloc) == MEM);
9360 gcc_checking_assert (vloc == dstv);
9361 if (vloc != dstv)
9362 clobber_overlapping_mems (set, vloc);
9365 val_store (set, val, dstv, insn, true);
9367 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9368 set->vars);
9370 break;
9372 case MO_SET:
9374 rtx loc = mo->u.loc;
9375 rtx set_src = NULL;
9377 if (GET_CODE (loc) == SET)
9379 set_src = SET_SRC (loc);
9380 loc = SET_DEST (loc);
9383 if (REG_P (loc))
9384 var_reg_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
9385 set_src);
9386 else
9387 var_mem_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
9388 set_src);
9390 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9391 set->vars);
9393 break;
9395 case MO_COPY:
9397 rtx loc = mo->u.loc;
9398 enum var_init_status src_status;
9399 rtx set_src = NULL;
9401 if (GET_CODE (loc) == SET)
9403 set_src = SET_SRC (loc);
9404 loc = SET_DEST (loc);
9407 src_status = find_src_status (set, set_src);
9408 set_src = find_src_set_src (set, set_src);
9410 if (REG_P (loc))
9411 var_reg_delete_and_set (set, loc, false, src_status, set_src);
9412 else
9413 var_mem_delete_and_set (set, loc, false, src_status, set_src);
9415 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9416 set->vars);
9418 break;
9420 case MO_USE_NO_VAR:
9422 rtx loc = mo->u.loc;
9424 if (REG_P (loc))
9425 var_reg_delete (set, loc, false);
9426 else
9427 var_mem_delete (set, loc, false);
9429 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
9431 break;
9433 case MO_CLOBBER:
9435 rtx loc = mo->u.loc;
9437 if (REG_P (loc))
9438 var_reg_delete (set, loc, true);
9439 else
9440 var_mem_delete (set, loc, true);
9442 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9443 set->vars);
9445 break;
9447 case MO_ADJUST:
9448 set->stack_adjust += mo->u.adjust;
9449 break;
9454 /* Emit notes for the whole function. */
9456 static void
9457 vt_emit_notes (void)
9459 basic_block bb;
9460 dataflow_set cur;
9462 gcc_assert (!changed_variables->elements ());
9464 /* Free memory occupied by the out hash tables, as they aren't used
9465 anymore. */
9466 FOR_EACH_BB_FN (bb, cfun)
9467 dataflow_set_clear (&VTI (bb)->out);
9469 /* Enable emitting notes by functions (mainly by set_variable_part and
9470 delete_variable_part). */
9471 emit_notes = true;
9473 if (MAY_HAVE_DEBUG_INSNS)
9475 dropped_values = new variable_table_type (cselib_get_next_uid () * 2);
9476 loc_exp_dep_pool = create_alloc_pool ("loc_exp_dep pool",
9477 sizeof (loc_exp_dep), 64);
9480 dataflow_set_init (&cur);
9482 FOR_EACH_BB_FN (bb, cfun)
9484 /* Emit the notes for changes of variable locations between two
9485 subsequent basic blocks. */
9486 emit_notes_for_differences (BB_HEAD (bb), &cur, &VTI (bb)->in);
9488 if (MAY_HAVE_DEBUG_INSNS)
9489 local_get_addr_cache = new hash_map<rtx, rtx>;
9491 /* Emit the notes for the changes in the basic block itself. */
9492 emit_notes_in_bb (bb, &cur);
9494 if (MAY_HAVE_DEBUG_INSNS)
9495 delete local_get_addr_cache;
9496 local_get_addr_cache = NULL;
9498 /* Free memory occupied by the in hash table, we won't need it
9499 again. */
9500 dataflow_set_clear (&VTI (bb)->in);
9502 #ifdef ENABLE_CHECKING
9503 shared_hash_htab (cur.vars)
9504 ->traverse <variable_table_type *, emit_notes_for_differences_1>
9505 (shared_hash_htab (empty_shared_hash));
9506 #endif
9507 dataflow_set_destroy (&cur);
9509 if (MAY_HAVE_DEBUG_INSNS)
9510 delete dropped_values;
9511 dropped_values = NULL;
9513 emit_notes = false;
9516 /* If there is a declaration and offset associated with register/memory RTL
9517 assign declaration to *DECLP and offset to *OFFSETP, and return true. */
9519 static bool
9520 vt_get_decl_and_offset (rtx rtl, tree *declp, HOST_WIDE_INT *offsetp)
9522 if (REG_P (rtl))
9524 if (REG_ATTRS (rtl))
9526 *declp = REG_EXPR (rtl);
9527 *offsetp = REG_OFFSET (rtl);
9528 return true;
9531 else if (GET_CODE (rtl) == PARALLEL)
9533 tree decl = NULL_TREE;
9534 HOST_WIDE_INT offset = MAX_VAR_PARTS;
9535 int len = XVECLEN (rtl, 0), i;
9537 for (i = 0; i < len; i++)
9539 rtx reg = XEXP (XVECEXP (rtl, 0, i), 0);
9540 if (!REG_P (reg) || !REG_ATTRS (reg))
9541 break;
9542 if (!decl)
9543 decl = REG_EXPR (reg);
9544 if (REG_EXPR (reg) != decl)
9545 break;
9546 if (REG_OFFSET (reg) < offset)
9547 offset = REG_OFFSET (reg);
9550 if (i == len)
9552 *declp = decl;
9553 *offsetp = offset;
9554 return true;
9557 else if (MEM_P (rtl))
9559 if (MEM_ATTRS (rtl))
9561 *declp = MEM_EXPR (rtl);
9562 *offsetp = INT_MEM_OFFSET (rtl);
9563 return true;
9566 return false;
9569 /* Record the value for the ENTRY_VALUE of RTL as a global equivalence
9570 of VAL. */
9572 static void
9573 record_entry_value (cselib_val *val, rtx rtl)
9575 rtx ev = gen_rtx_ENTRY_VALUE (GET_MODE (rtl));
9577 ENTRY_VALUE_EXP (ev) = rtl;
9579 cselib_add_permanent_equiv (val, ev, get_insns ());
9582 /* Insert function parameter PARM in IN and OUT sets of ENTRY_BLOCK. */
9584 static void
9585 vt_add_function_parameter (tree parm)
9587 rtx decl_rtl = DECL_RTL_IF_SET (parm);
9588 rtx incoming = DECL_INCOMING_RTL (parm);
9589 tree decl;
9590 enum machine_mode mode;
9591 HOST_WIDE_INT offset;
9592 dataflow_set *out;
9593 decl_or_value dv;
9595 if (TREE_CODE (parm) != PARM_DECL)
9596 return;
9598 if (!decl_rtl || !incoming)
9599 return;
9601 if (GET_MODE (decl_rtl) == BLKmode || GET_MODE (incoming) == BLKmode)
9602 return;
9604 /* If there is a DRAP register or a pseudo in internal_arg_pointer,
9605 rewrite the incoming location of parameters passed on the stack
9606 into MEMs based on the argument pointer, so that incoming doesn't
9607 depend on a pseudo. */
9608 if (MEM_P (incoming)
9609 && (XEXP (incoming, 0) == crtl->args.internal_arg_pointer
9610 || (GET_CODE (XEXP (incoming, 0)) == PLUS
9611 && XEXP (XEXP (incoming, 0), 0)
9612 == crtl->args.internal_arg_pointer
9613 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
9615 HOST_WIDE_INT off = -FIRST_PARM_OFFSET (current_function_decl);
9616 if (GET_CODE (XEXP (incoming, 0)) == PLUS)
9617 off += INTVAL (XEXP (XEXP (incoming, 0), 1));
9618 incoming
9619 = replace_equiv_address_nv (incoming,
9620 plus_constant (Pmode,
9621 arg_pointer_rtx, off));
9624 #ifdef HAVE_window_save
9625 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
9626 If the target machine has an explicit window save instruction, the
9627 actual entry value is the corresponding OUTGOING_REGNO instead. */
9628 if (HAVE_window_save && !crtl->uses_only_leaf_regs)
9630 if (REG_P (incoming)
9631 && HARD_REGISTER_P (incoming)
9632 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
9634 parm_reg_t p;
9635 p.incoming = incoming;
9636 incoming
9637 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
9638 OUTGOING_REGNO (REGNO (incoming)), 0);
9639 p.outgoing = incoming;
9640 vec_safe_push (windowed_parm_regs, p);
9642 else if (GET_CODE (incoming) == PARALLEL)
9644 rtx outgoing
9645 = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (XVECLEN (incoming, 0)));
9646 int i;
9648 for (i = 0; i < XVECLEN (incoming, 0); i++)
9650 rtx reg = XEXP (XVECEXP (incoming, 0, i), 0);
9651 parm_reg_t p;
9652 p.incoming = reg;
9653 reg = gen_rtx_REG_offset (reg, GET_MODE (reg),
9654 OUTGOING_REGNO (REGNO (reg)), 0);
9655 p.outgoing = reg;
9656 XVECEXP (outgoing, 0, i)
9657 = gen_rtx_EXPR_LIST (VOIDmode, reg,
9658 XEXP (XVECEXP (incoming, 0, i), 1));
9659 vec_safe_push (windowed_parm_regs, p);
9662 incoming = outgoing;
9664 else if (MEM_P (incoming)
9665 && REG_P (XEXP (incoming, 0))
9666 && HARD_REGISTER_P (XEXP (incoming, 0)))
9668 rtx reg = XEXP (incoming, 0);
9669 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
9671 parm_reg_t p;
9672 p.incoming = reg;
9673 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
9674 p.outgoing = reg;
9675 vec_safe_push (windowed_parm_regs, p);
9676 incoming = replace_equiv_address_nv (incoming, reg);
9680 #endif
9682 if (!vt_get_decl_and_offset (incoming, &decl, &offset))
9684 if (MEM_P (incoming))
9686 /* This means argument is passed by invisible reference. */
9687 offset = 0;
9688 decl = parm;
9690 else
9692 if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset))
9693 return;
9694 offset += byte_lowpart_offset (GET_MODE (incoming),
9695 GET_MODE (decl_rtl));
9699 if (!decl)
9700 return;
9702 if (parm != decl)
9704 /* If that DECL_RTL wasn't a pseudo that got spilled to
9705 memory, bail out. Otherwise, the spill slot sharing code
9706 will force the memory to reference spill_slot_decl (%sfp),
9707 so we don't match above. That's ok, the pseudo must have
9708 referenced the entire parameter, so just reset OFFSET. */
9709 if (decl != get_spill_slot_decl (false))
9710 return;
9711 offset = 0;
9714 if (!track_loc_p (incoming, parm, offset, false, &mode, &offset))
9715 return;
9717 out = &VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out;
9719 dv = dv_from_decl (parm);
9721 if (target_for_debug_bind (parm)
9722 /* We can't deal with these right now, because this kind of
9723 variable is single-part. ??? We could handle parallels
9724 that describe multiple locations for the same single
9725 value, but ATM we don't. */
9726 && GET_CODE (incoming) != PARALLEL)
9728 cselib_val *val;
9729 rtx lowpart;
9731 /* ??? We shouldn't ever hit this, but it may happen because
9732 arguments passed by invisible reference aren't dealt with
9733 above: incoming-rtl will have Pmode rather than the
9734 expected mode for the type. */
9735 if (offset)
9736 return;
9738 lowpart = var_lowpart (mode, incoming);
9739 if (!lowpart)
9740 return;
9742 val = cselib_lookup_from_insn (lowpart, mode, true,
9743 VOIDmode, get_insns ());
9745 /* ??? Float-typed values in memory are not handled by
9746 cselib. */
9747 if (val)
9749 preserve_value (val);
9750 set_variable_part (out, val->val_rtx, dv, offset,
9751 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9752 dv = dv_from_value (val->val_rtx);
9755 if (MEM_P (incoming))
9757 val = cselib_lookup_from_insn (XEXP (incoming, 0), mode, true,
9758 VOIDmode, get_insns ());
9759 if (val)
9761 preserve_value (val);
9762 incoming = replace_equiv_address_nv (incoming, val->val_rtx);
9767 if (REG_P (incoming))
9769 incoming = var_lowpart (mode, incoming);
9770 gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER);
9771 attrs_list_insert (&out->regs[REGNO (incoming)], dv, offset,
9772 incoming);
9773 set_variable_part (out, incoming, dv, offset,
9774 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9775 if (dv_is_value_p (dv))
9777 record_entry_value (CSELIB_VAL_PTR (dv_as_value (dv)), incoming);
9778 if (TREE_CODE (TREE_TYPE (parm)) == REFERENCE_TYPE
9779 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (parm))))
9781 enum machine_mode indmode
9782 = TYPE_MODE (TREE_TYPE (TREE_TYPE (parm)));
9783 rtx mem = gen_rtx_MEM (indmode, incoming);
9784 cselib_val *val = cselib_lookup_from_insn (mem, indmode, true,
9785 VOIDmode,
9786 get_insns ());
9787 if (val)
9789 preserve_value (val);
9790 record_entry_value (val, mem);
9791 set_variable_part (out, mem, dv_from_value (val->val_rtx), 0,
9792 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9797 else if (GET_CODE (incoming) == PARALLEL && !dv_onepart_p (dv))
9799 int i;
9801 for (i = 0; i < XVECLEN (incoming, 0); i++)
9803 rtx reg = XEXP (XVECEXP (incoming, 0, i), 0);
9804 offset = REG_OFFSET (reg);
9805 gcc_assert (REGNO (reg) < FIRST_PSEUDO_REGISTER);
9806 attrs_list_insert (&out->regs[REGNO (reg)], dv, offset, reg);
9807 set_variable_part (out, reg, dv, offset,
9808 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9811 else if (MEM_P (incoming))
9813 incoming = var_lowpart (mode, incoming);
9814 set_variable_part (out, incoming, dv, offset,
9815 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9819 /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
9821 static void
9822 vt_add_function_parameters (void)
9824 tree parm;
9826 for (parm = DECL_ARGUMENTS (current_function_decl);
9827 parm; parm = DECL_CHAIN (parm))
9828 vt_add_function_parameter (parm);
9830 if (DECL_HAS_VALUE_EXPR_P (DECL_RESULT (current_function_decl)))
9832 tree vexpr = DECL_VALUE_EXPR (DECL_RESULT (current_function_decl));
9834 if (TREE_CODE (vexpr) == INDIRECT_REF)
9835 vexpr = TREE_OPERAND (vexpr, 0);
9837 if (TREE_CODE (vexpr) == PARM_DECL
9838 && DECL_ARTIFICIAL (vexpr)
9839 && !DECL_IGNORED_P (vexpr)
9840 && DECL_NAMELESS (vexpr))
9841 vt_add_function_parameter (vexpr);
9845 /* Initialize cfa_base_rtx, create a preserved VALUE for it and
9846 ensure it isn't flushed during cselib_reset_table.
9847 Can be called only if frame_pointer_rtx resp. arg_pointer_rtx
9848 has been eliminated. */
9850 static void
9851 vt_init_cfa_base (void)
9853 cselib_val *val;
9855 #ifdef FRAME_POINTER_CFA_OFFSET
9856 cfa_base_rtx = frame_pointer_rtx;
9857 cfa_base_offset = -FRAME_POINTER_CFA_OFFSET (current_function_decl);
9858 #else
9859 cfa_base_rtx = arg_pointer_rtx;
9860 cfa_base_offset = -ARG_POINTER_CFA_OFFSET (current_function_decl);
9861 #endif
9862 if (cfa_base_rtx == hard_frame_pointer_rtx
9863 || !fixed_regs[REGNO (cfa_base_rtx)])
9865 cfa_base_rtx = NULL_RTX;
9866 return;
9868 if (!MAY_HAVE_DEBUG_INSNS)
9869 return;
9871 /* Tell alias analysis that cfa_base_rtx should share
9872 find_base_term value with stack pointer or hard frame pointer. */
9873 if (!frame_pointer_needed)
9874 vt_equate_reg_base_value (cfa_base_rtx, stack_pointer_rtx);
9875 else if (!crtl->stack_realign_tried)
9876 vt_equate_reg_base_value (cfa_base_rtx, hard_frame_pointer_rtx);
9878 val = cselib_lookup_from_insn (cfa_base_rtx, GET_MODE (cfa_base_rtx), 1,
9879 VOIDmode, get_insns ());
9880 preserve_value (val);
9881 cselib_preserve_cfa_base_value (val, REGNO (cfa_base_rtx));
9884 /* Allocate and initialize the data structures for variable tracking
9885 and parse the RTL to get the micro operations. */
9887 static bool
9888 vt_initialize (void)
9890 basic_block bb;
9891 HOST_WIDE_INT fp_cfa_offset = -1;
9893 alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def));
9895 attrs_pool = create_alloc_pool ("attrs_def pool",
9896 sizeof (struct attrs_def), 1024);
9897 var_pool = create_alloc_pool ("variable_def pool",
9898 sizeof (struct variable_def)
9899 + (MAX_VAR_PARTS - 1)
9900 * sizeof (((variable)NULL)->var_part[0]), 64);
9901 loc_chain_pool = create_alloc_pool ("location_chain_def pool",
9902 sizeof (struct location_chain_def),
9903 1024);
9904 shared_hash_pool = create_alloc_pool ("shared_hash_def pool",
9905 sizeof (struct shared_hash_def), 256);
9906 empty_shared_hash = (shared_hash) pool_alloc (shared_hash_pool);
9907 empty_shared_hash->refcount = 1;
9908 empty_shared_hash->htab = new variable_table_type (1);
9909 changed_variables = new variable_table_type (10);
9911 /* Init the IN and OUT sets. */
9912 FOR_ALL_BB_FN (bb, cfun)
9914 VTI (bb)->visited = false;
9915 VTI (bb)->flooded = false;
9916 dataflow_set_init (&VTI (bb)->in);
9917 dataflow_set_init (&VTI (bb)->out);
9918 VTI (bb)->permp = NULL;
9921 if (MAY_HAVE_DEBUG_INSNS)
9923 cselib_init (CSELIB_RECORD_MEMORY | CSELIB_PRESERVE_CONSTANTS);
9924 scratch_regs = BITMAP_ALLOC (NULL);
9925 valvar_pool = create_alloc_pool ("small variable_def pool",
9926 sizeof (struct variable_def), 256);
9927 preserved_values.create (256);
9928 global_get_addr_cache = new hash_map<rtx, rtx>;
9930 else
9932 scratch_regs = NULL;
9933 valvar_pool = NULL;
9934 global_get_addr_cache = NULL;
9937 if (MAY_HAVE_DEBUG_INSNS)
9939 rtx reg, expr;
9940 int ofst;
9941 cselib_val *val;
9943 #ifdef FRAME_POINTER_CFA_OFFSET
9944 reg = frame_pointer_rtx;
9945 ofst = FRAME_POINTER_CFA_OFFSET (current_function_decl);
9946 #else
9947 reg = arg_pointer_rtx;
9948 ofst = ARG_POINTER_CFA_OFFSET (current_function_decl);
9949 #endif
9951 ofst -= INCOMING_FRAME_SP_OFFSET;
9953 val = cselib_lookup_from_insn (reg, GET_MODE (reg), 1,
9954 VOIDmode, get_insns ());
9955 preserve_value (val);
9956 if (reg != hard_frame_pointer_rtx && fixed_regs[REGNO (reg)])
9957 cselib_preserve_cfa_base_value (val, REGNO (reg));
9958 expr = plus_constant (GET_MODE (stack_pointer_rtx),
9959 stack_pointer_rtx, -ofst);
9960 cselib_add_permanent_equiv (val, expr, get_insns ());
9962 if (ofst)
9964 val = cselib_lookup_from_insn (stack_pointer_rtx,
9965 GET_MODE (stack_pointer_rtx), 1,
9966 VOIDmode, get_insns ());
9967 preserve_value (val);
9968 expr = plus_constant (GET_MODE (reg), reg, ofst);
9969 cselib_add_permanent_equiv (val, expr, get_insns ());
9973 /* In order to factor out the adjustments made to the stack pointer or to
9974 the hard frame pointer and thus be able to use DW_OP_fbreg operations
9975 instead of individual location lists, we're going to rewrite MEMs based
9976 on them into MEMs based on the CFA by de-eliminating stack_pointer_rtx
9977 or hard_frame_pointer_rtx to the virtual CFA pointer frame_pointer_rtx
9978 resp. arg_pointer_rtx. We can do this either when there is no frame
9979 pointer in the function and stack adjustments are consistent for all
9980 basic blocks or when there is a frame pointer and no stack realignment.
9981 But we first have to check that frame_pointer_rtx resp. arg_pointer_rtx
9982 has been eliminated. */
9983 if (!frame_pointer_needed)
9985 rtx reg, elim;
9987 if (!vt_stack_adjustments ())
9988 return false;
9990 #ifdef FRAME_POINTER_CFA_OFFSET
9991 reg = frame_pointer_rtx;
9992 #else
9993 reg = arg_pointer_rtx;
9994 #endif
9995 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
9996 if (elim != reg)
9998 if (GET_CODE (elim) == PLUS)
9999 elim = XEXP (elim, 0);
10000 if (elim == stack_pointer_rtx)
10001 vt_init_cfa_base ();
10004 else if (!crtl->stack_realign_tried)
10006 rtx reg, elim;
10008 #ifdef FRAME_POINTER_CFA_OFFSET
10009 reg = frame_pointer_rtx;
10010 fp_cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
10011 #else
10012 reg = arg_pointer_rtx;
10013 fp_cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
10014 #endif
10015 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
10016 if (elim != reg)
10018 if (GET_CODE (elim) == PLUS)
10020 fp_cfa_offset -= INTVAL (XEXP (elim, 1));
10021 elim = XEXP (elim, 0);
10023 if (elim != hard_frame_pointer_rtx)
10024 fp_cfa_offset = -1;
10026 else
10027 fp_cfa_offset = -1;
10030 /* If the stack is realigned and a DRAP register is used, we're going to
10031 rewrite MEMs based on it representing incoming locations of parameters
10032 passed on the stack into MEMs based on the argument pointer. Although
10033 we aren't going to rewrite other MEMs, we still need to initialize the
10034 virtual CFA pointer in order to ensure that the argument pointer will
10035 be seen as a constant throughout the function.
10037 ??? This doesn't work if FRAME_POINTER_CFA_OFFSET is defined. */
10038 else if (stack_realign_drap)
10040 rtx reg, elim;
10042 #ifdef FRAME_POINTER_CFA_OFFSET
10043 reg = frame_pointer_rtx;
10044 #else
10045 reg = arg_pointer_rtx;
10046 #endif
10047 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
10048 if (elim != reg)
10050 if (GET_CODE (elim) == PLUS)
10051 elim = XEXP (elim, 0);
10052 if (elim == hard_frame_pointer_rtx)
10053 vt_init_cfa_base ();
10057 hard_frame_pointer_adjustment = -1;
10059 vt_add_function_parameters ();
10061 FOR_EACH_BB_FN (bb, cfun)
10063 rtx_insn *insn;
10064 HOST_WIDE_INT pre, post = 0;
10065 basic_block first_bb, last_bb;
10067 if (MAY_HAVE_DEBUG_INSNS)
10069 cselib_record_sets_hook = add_with_sets;
10070 if (dump_file && (dump_flags & TDF_DETAILS))
10071 fprintf (dump_file, "first value: %i\n",
10072 cselib_get_next_uid ());
10075 first_bb = bb;
10076 for (;;)
10078 edge e;
10079 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
10080 || ! single_pred_p (bb->next_bb))
10081 break;
10082 e = find_edge (bb, bb->next_bb);
10083 if (! e || (e->flags & EDGE_FALLTHRU) == 0)
10084 break;
10085 bb = bb->next_bb;
10087 last_bb = bb;
10089 /* Add the micro-operations to the vector. */
10090 FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb)
10092 HOST_WIDE_INT offset = VTI (bb)->out.stack_adjust;
10093 VTI (bb)->out.stack_adjust = VTI (bb)->in.stack_adjust;
10094 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
10095 insn = NEXT_INSN (insn))
10097 if (INSN_P (insn))
10099 if (!frame_pointer_needed)
10101 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
10102 if (pre)
10104 micro_operation mo;
10105 mo.type = MO_ADJUST;
10106 mo.u.adjust = pre;
10107 mo.insn = insn;
10108 if (dump_file && (dump_flags & TDF_DETAILS))
10109 log_op_type (PATTERN (insn), bb, insn,
10110 MO_ADJUST, dump_file);
10111 VTI (bb)->mos.safe_push (mo);
10112 VTI (bb)->out.stack_adjust += pre;
10116 cselib_hook_called = false;
10117 adjust_insn (bb, insn);
10118 if (MAY_HAVE_DEBUG_INSNS)
10120 if (CALL_P (insn))
10121 prepare_call_arguments (bb, insn);
10122 cselib_process_insn (insn);
10123 if (dump_file && (dump_flags & TDF_DETAILS))
10125 print_rtl_single (dump_file, insn);
10126 dump_cselib_table (dump_file);
10129 if (!cselib_hook_called)
10130 add_with_sets (insn, 0, 0);
10131 cancel_changes (0);
10133 if (!frame_pointer_needed && post)
10135 micro_operation mo;
10136 mo.type = MO_ADJUST;
10137 mo.u.adjust = post;
10138 mo.insn = insn;
10139 if (dump_file && (dump_flags & TDF_DETAILS))
10140 log_op_type (PATTERN (insn), bb, insn,
10141 MO_ADJUST, dump_file);
10142 VTI (bb)->mos.safe_push (mo);
10143 VTI (bb)->out.stack_adjust += post;
10146 if (fp_cfa_offset != -1
10147 && hard_frame_pointer_adjustment == -1
10148 && fp_setter_insn (insn))
10150 vt_init_cfa_base ();
10151 hard_frame_pointer_adjustment = fp_cfa_offset;
10152 /* Disassociate sp from fp now. */
10153 if (MAY_HAVE_DEBUG_INSNS)
10155 cselib_val *v;
10156 cselib_invalidate_rtx (stack_pointer_rtx);
10157 v = cselib_lookup (stack_pointer_rtx, Pmode, 1,
10158 VOIDmode);
10159 if (v && !cselib_preserved_value_p (v))
10161 cselib_set_value_sp_based (v);
10162 preserve_value (v);
10168 gcc_assert (offset == VTI (bb)->out.stack_adjust);
10171 bb = last_bb;
10173 if (MAY_HAVE_DEBUG_INSNS)
10175 cselib_preserve_only_values ();
10176 cselib_reset_table (cselib_get_next_uid ());
10177 cselib_record_sets_hook = NULL;
10181 hard_frame_pointer_adjustment = -1;
10182 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->flooded = true;
10183 cfa_base_rtx = NULL_RTX;
10184 return true;
10187 /* This is *not* reset after each function. It gives each
10188 NOTE_INSN_DELETED_DEBUG_LABEL in the entire compilation
10189 a unique label number. */
10191 static int debug_label_num = 1;
10193 /* Get rid of all debug insns from the insn stream. */
10195 static void
10196 delete_debug_insns (void)
10198 basic_block bb;
10199 rtx_insn *insn, *next;
10201 if (!MAY_HAVE_DEBUG_INSNS)
10202 return;
10204 FOR_EACH_BB_FN (bb, cfun)
10206 FOR_BB_INSNS_SAFE (bb, insn, next)
10207 if (DEBUG_INSN_P (insn))
10209 tree decl = INSN_VAR_LOCATION_DECL (insn);
10210 if (TREE_CODE (decl) == LABEL_DECL
10211 && DECL_NAME (decl)
10212 && !DECL_RTL_SET_P (decl))
10214 PUT_CODE (insn, NOTE);
10215 NOTE_KIND (insn) = NOTE_INSN_DELETED_DEBUG_LABEL;
10216 NOTE_DELETED_LABEL_NAME (insn)
10217 = IDENTIFIER_POINTER (DECL_NAME (decl));
10218 SET_DECL_RTL (decl, insn);
10219 CODE_LABEL_NUMBER (insn) = debug_label_num++;
10221 else
10222 delete_insn (insn);
10227 /* Run a fast, BB-local only version of var tracking, to take care of
10228 information that we don't do global analysis on, such that not all
10229 information is lost. If SKIPPED holds, we're skipping the global
10230 pass entirely, so we should try to use information it would have
10231 handled as well.. */
10233 static void
10234 vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED)
10236 /* ??? Just skip it all for now. */
10237 delete_debug_insns ();
10240 /* Free the data structures needed for variable tracking. */
10242 static void
10243 vt_finalize (void)
10245 basic_block bb;
10247 FOR_EACH_BB_FN (bb, cfun)
10249 VTI (bb)->mos.release ();
10252 FOR_ALL_BB_FN (bb, cfun)
10254 dataflow_set_destroy (&VTI (bb)->in);
10255 dataflow_set_destroy (&VTI (bb)->out);
10256 if (VTI (bb)->permp)
10258 dataflow_set_destroy (VTI (bb)->permp);
10259 XDELETE (VTI (bb)->permp);
10262 free_aux_for_blocks ();
10263 delete empty_shared_hash->htab;
10264 empty_shared_hash->htab = NULL;
10265 delete changed_variables;
10266 changed_variables = NULL;
10267 free_alloc_pool (attrs_pool);
10268 free_alloc_pool (var_pool);
10269 free_alloc_pool (loc_chain_pool);
10270 free_alloc_pool (shared_hash_pool);
10272 if (MAY_HAVE_DEBUG_INSNS)
10274 if (global_get_addr_cache)
10275 delete global_get_addr_cache;
10276 global_get_addr_cache = NULL;
10277 if (loc_exp_dep_pool)
10278 free_alloc_pool (loc_exp_dep_pool);
10279 loc_exp_dep_pool = NULL;
10280 free_alloc_pool (valvar_pool);
10281 preserved_values.release ();
10282 cselib_finish ();
10283 BITMAP_FREE (scratch_regs);
10284 scratch_regs = NULL;
10287 #ifdef HAVE_window_save
10288 vec_free (windowed_parm_regs);
10289 #endif
10291 if (vui_vec)
10292 XDELETEVEC (vui_vec);
10293 vui_vec = NULL;
10294 vui_allocated = 0;
10297 /* The entry point to variable tracking pass. */
10299 static inline unsigned int
10300 variable_tracking_main_1 (void)
10302 bool success;
10304 if (flag_var_tracking_assignments < 0)
10306 delete_debug_insns ();
10307 return 0;
10310 if (n_basic_blocks_for_fn (cfun) > 500 &&
10311 n_edges_for_fn (cfun) / n_basic_blocks_for_fn (cfun) >= 20)
10313 vt_debug_insns_local (true);
10314 return 0;
10317 mark_dfs_back_edges ();
10318 if (!vt_initialize ())
10320 vt_finalize ();
10321 vt_debug_insns_local (true);
10322 return 0;
10325 success = vt_find_locations ();
10327 if (!success && flag_var_tracking_assignments > 0)
10329 vt_finalize ();
10331 delete_debug_insns ();
10333 /* This is later restored by our caller. */
10334 flag_var_tracking_assignments = 0;
10336 success = vt_initialize ();
10337 gcc_assert (success);
10339 success = vt_find_locations ();
10342 if (!success)
10344 vt_finalize ();
10345 vt_debug_insns_local (false);
10346 return 0;
10349 if (dump_file && (dump_flags & TDF_DETAILS))
10351 dump_dataflow_sets ();
10352 dump_reg_info (dump_file);
10353 dump_flow_info (dump_file, dump_flags);
10356 timevar_push (TV_VAR_TRACKING_EMIT);
10357 vt_emit_notes ();
10358 timevar_pop (TV_VAR_TRACKING_EMIT);
10360 vt_finalize ();
10361 vt_debug_insns_local (false);
10362 return 0;
10365 unsigned int
10366 variable_tracking_main (void)
10368 unsigned int ret;
10369 int save = flag_var_tracking_assignments;
10371 ret = variable_tracking_main_1 ();
10373 flag_var_tracking_assignments = save;
10375 return ret;
10378 namespace {
10380 const pass_data pass_data_variable_tracking =
10382 RTL_PASS, /* type */
10383 "vartrack", /* name */
10384 OPTGROUP_NONE, /* optinfo_flags */
10385 TV_VAR_TRACKING, /* tv_id */
10386 0, /* properties_required */
10387 0, /* properties_provided */
10388 0, /* properties_destroyed */
10389 0, /* todo_flags_start */
10390 0, /* todo_flags_finish */
10393 class pass_variable_tracking : public rtl_opt_pass
10395 public:
10396 pass_variable_tracking (gcc::context *ctxt)
10397 : rtl_opt_pass (pass_data_variable_tracking, ctxt)
10400 /* opt_pass methods: */
10401 virtual bool gate (function *)
10403 return (flag_var_tracking && !targetm.delay_vartrack);
10406 virtual unsigned int execute (function *)
10408 return variable_tracking_main ();
10411 }; // class pass_variable_tracking
10413 } // anon namespace
10415 rtl_opt_pass *
10416 make_pass_variable_tracking (gcc::context *ctxt)
10418 return new pass_variable_tracking (ctxt);