2014-10-24 Christophe Lyon <christophe.lyon@linaro.org>
[official-gcc.git] / gcc / var-tracking.c
blob4c5ae41b248e6dd4d67377f269bdc7c5c5bfa1c4
1 /* Variable tracking routines for the GNU compiler.
2 Copyright (C) 2002-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the variable tracking pass. It computes where
21 variables are located (which registers or where in memory) at each position
22 in instruction stream and emits notes describing the locations.
23 Debug information (DWARF2 location lists) is finally generated from
24 these notes.
25 With this debug information, it is possible to show variables
26 even when debugging optimized code.
28 How does the variable tracking pass work?
30 First, it scans RTL code for uses, stores and clobbers (register/memory
31 references in instructions), for call insns and for stack adjustments
32 separately for each basic block and saves them to an array of micro
33 operations.
34 The micro operations of one instruction are ordered so that
35 pre-modifying stack adjustment < use < use with no var < call insn <
36 < clobber < set < post-modifying stack adjustment
38 Then, a forward dataflow analysis is performed to find out how locations
39 of variables change through code and to propagate the variable locations
40 along control flow graph.
41 The IN set for basic block BB is computed as a union of OUT sets of BB's
42 predecessors, the OUT set for BB is copied from the IN set for BB and
43 is changed according to micro operations in BB.
45 The IN and OUT sets for basic blocks consist of a current stack adjustment
46 (used for adjusting offset of variables addressed using stack pointer),
47 the table of structures describing the locations of parts of a variable
48 and for each physical register a linked list for each physical register.
49 The linked list is a list of variable parts stored in the register,
50 i.e. it is a list of triplets (reg, decl, offset) where decl is
51 REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
52 effective deleting appropriate variable parts when we set or clobber the
53 register.
55 There may be more than one variable part in a register. The linked lists
56 should be pretty short so it is a good data structure here.
57 For example in the following code, register allocator may assign same
58 register to variables A and B, and both of them are stored in the same
59 register in CODE:
61 if (cond)
62 set A;
63 else
64 set B;
65 CODE;
66 if (cond)
67 use A;
68 else
69 use B;
71 Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
72 are emitted to appropriate positions in RTL code. Each such a note describes
73 the location of one variable at the point in instruction stream where the
74 note is. There is no need to emit a note for each variable before each
75 instruction, we only emit these notes where the location of variable changes
76 (this means that we also emit notes for changes between the OUT set of the
77 previous block and the IN set of the current block).
79 The notes consist of two parts:
80 1. the declaration (from REG_EXPR or MEM_EXPR)
81 2. the location of a variable - it is either a simple register/memory
82 reference (for simple variables, for example int),
83 or a parallel of register/memory references (for a large variables
84 which consist of several parts, for example long long).
88 #include "config.h"
89 #include "system.h"
90 #include "coretypes.h"
91 #include "tm.h"
92 #include "rtl.h"
93 #include "tree.h"
94 #include "varasm.h"
95 #include "stor-layout.h"
96 #include "hash-map.h"
97 #include "hash-table.h"
98 #include "basic-block.h"
99 #include "tm_p.h"
100 #include "hard-reg-set.h"
101 #include "flags.h"
102 #include "insn-config.h"
103 #include "reload.h"
104 #include "sbitmap.h"
105 #include "alloc-pool.h"
106 #include "fibheap.h"
107 #include "regs.h"
108 #include "expr.h"
109 #include "tree-pass.h"
110 #include "bitmap.h"
111 #include "tree-dfa.h"
112 #include "tree-ssa.h"
113 #include "cselib.h"
114 #include "target.h"
115 #include "params.h"
116 #include "diagnostic.h"
117 #include "tree-pretty-print.h"
118 #include "recog.h"
119 #include "tm_p.h"
120 #include "alias.h"
121 #include "rtl-iter.h"
123 /* var-tracking.c assumes that tree code with the same value as VALUE rtx code
124 has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
125 Currently the value is the same as IDENTIFIER_NODE, which has such
126 a property. If this compile time assertion ever fails, make sure that
127 the new tree code that equals (int) VALUE has the same property. */
128 extern char check_value_val[(int) VALUE == (int) IDENTIFIER_NODE ? 1 : -1];
130 /* Type of micro operation. */
131 enum micro_operation_type
133 MO_USE, /* Use location (REG or MEM). */
134 MO_USE_NO_VAR,/* Use location which is not associated with a variable
135 or the variable is not trackable. */
136 MO_VAL_USE, /* Use location which is associated with a value. */
137 MO_VAL_LOC, /* Use location which appears in a debug insn. */
138 MO_VAL_SET, /* Set location associated with a value. */
139 MO_SET, /* Set location. */
140 MO_COPY, /* Copy the same portion of a variable from one
141 location to another. */
142 MO_CLOBBER, /* Clobber location. */
143 MO_CALL, /* Call insn. */
144 MO_ADJUST /* Adjust stack pointer. */
148 static const char * const ATTRIBUTE_UNUSED
149 micro_operation_type_name[] = {
150 "MO_USE",
151 "MO_USE_NO_VAR",
152 "MO_VAL_USE",
153 "MO_VAL_LOC",
154 "MO_VAL_SET",
155 "MO_SET",
156 "MO_COPY",
157 "MO_CLOBBER",
158 "MO_CALL",
159 "MO_ADJUST"
162 /* Where shall the note be emitted? BEFORE or AFTER the instruction.
163 Notes emitted as AFTER_CALL are to take effect during the call,
164 rather than after the call. */
165 enum emit_note_where
167 EMIT_NOTE_BEFORE_INSN,
168 EMIT_NOTE_AFTER_INSN,
169 EMIT_NOTE_AFTER_CALL_INSN
172 /* Structure holding information about micro operation. */
173 typedef struct micro_operation_def
175 /* Type of micro operation. */
176 enum micro_operation_type type;
178 /* The instruction which the micro operation is in, for MO_USE,
179 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
180 instruction or note in the original flow (before any var-tracking
181 notes are inserted, to simplify emission of notes), for MO_SET
182 and MO_CLOBBER. */
183 rtx_insn *insn;
185 union {
186 /* Location. For MO_SET and MO_COPY, this is the SET that
187 performs the assignment, if known, otherwise it is the target
188 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
189 CONCAT of the VALUE and the LOC associated with it. For
190 MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
191 associated with it. */
192 rtx loc;
194 /* Stack adjustment. */
195 HOST_WIDE_INT adjust;
196 } u;
197 } micro_operation;
200 /* A declaration of a variable, or an RTL value being handled like a
201 declaration. */
202 typedef void *decl_or_value;
204 /* Return true if a decl_or_value DV is a DECL or NULL. */
205 static inline bool
206 dv_is_decl_p (decl_or_value dv)
208 return !dv || (int) TREE_CODE ((tree) dv) != (int) VALUE;
211 /* Return true if a decl_or_value is a VALUE rtl. */
212 static inline bool
213 dv_is_value_p (decl_or_value dv)
215 return dv && !dv_is_decl_p (dv);
218 /* Return the decl in the decl_or_value. */
219 static inline tree
220 dv_as_decl (decl_or_value dv)
222 gcc_checking_assert (dv_is_decl_p (dv));
223 return (tree) dv;
226 /* Return the value in the decl_or_value. */
227 static inline rtx
228 dv_as_value (decl_or_value dv)
230 gcc_checking_assert (dv_is_value_p (dv));
231 return (rtx)dv;
234 /* Return the opaque pointer in the decl_or_value. */
235 static inline void *
236 dv_as_opaque (decl_or_value dv)
238 return dv;
242 /* Description of location of a part of a variable. The content of a physical
243 register is described by a chain of these structures.
244 The chains are pretty short (usually 1 or 2 elements) and thus
245 chain is the best data structure. */
246 typedef struct attrs_def
248 /* Pointer to next member of the list. */
249 struct attrs_def *next;
251 /* The rtx of register. */
252 rtx loc;
254 /* The declaration corresponding to LOC. */
255 decl_or_value dv;
257 /* Offset from start of DECL. */
258 HOST_WIDE_INT offset;
259 } *attrs;
261 /* Structure for chaining the locations. */
262 typedef struct location_chain_def
264 /* Next element in the chain. */
265 struct location_chain_def *next;
267 /* The location (REG, MEM or VALUE). */
268 rtx loc;
270 /* The "value" stored in this location. */
271 rtx set_src;
273 /* Initialized? */
274 enum var_init_status init;
275 } *location_chain;
277 /* A vector of loc_exp_dep holds the active dependencies of a one-part
278 DV on VALUEs, i.e., the VALUEs expanded so as to form the current
279 location of DV. Each entry is also part of VALUE' s linked-list of
280 backlinks back to DV. */
281 typedef struct loc_exp_dep_s
283 /* The dependent DV. */
284 decl_or_value dv;
285 /* The dependency VALUE or DECL_DEBUG. */
286 rtx value;
287 /* The next entry in VALUE's backlinks list. */
288 struct loc_exp_dep_s *next;
289 /* A pointer to the pointer to this entry (head or prev's next) in
290 the doubly-linked list. */
291 struct loc_exp_dep_s **pprev;
292 } loc_exp_dep;
295 /* This data structure holds information about the depth of a variable
296 expansion. */
297 typedef struct expand_depth_struct
299 /* This measures the complexity of the expanded expression. It
300 grows by one for each level of expansion that adds more than one
301 operand. */
302 int complexity;
303 /* This counts the number of ENTRY_VALUE expressions in an
304 expansion. We want to minimize their use. */
305 int entryvals;
306 } expand_depth;
308 /* This data structure is allocated for one-part variables at the time
309 of emitting notes. */
310 struct onepart_aux
312 /* Doubly-linked list of dependent DVs. These are DVs whose cur_loc
313 computation used the expansion of this variable, and that ought
314 to be notified should this variable change. If the DV's cur_loc
315 expanded to NULL, all components of the loc list are regarded as
316 active, so that any changes in them give us a chance to get a
317 location. Otherwise, only components of the loc that expanded to
318 non-NULL are regarded as active dependencies. */
319 loc_exp_dep *backlinks;
320 /* This holds the LOC that was expanded into cur_loc. We need only
321 mark a one-part variable as changed if the FROM loc is removed,
322 or if it has no known location and a loc is added, or if it gets
323 a change notification from any of its active dependencies. */
324 rtx from;
325 /* The depth of the cur_loc expression. */
326 expand_depth depth;
327 /* Dependencies actively used when expand FROM into cur_loc. */
328 vec<loc_exp_dep, va_heap, vl_embed> deps;
331 /* Structure describing one part of variable. */
332 typedef struct variable_part_def
334 /* Chain of locations of the part. */
335 location_chain loc_chain;
337 /* Location which was last emitted to location list. */
338 rtx cur_loc;
340 union variable_aux
342 /* The offset in the variable, if !var->onepart. */
343 HOST_WIDE_INT offset;
345 /* Pointer to auxiliary data, if var->onepart and emit_notes. */
346 struct onepart_aux *onepaux;
347 } aux;
348 } variable_part;
350 /* Maximum number of location parts. */
351 #define MAX_VAR_PARTS 16
353 /* Enumeration type used to discriminate various types of one-part
354 variables. */
355 typedef enum onepart_enum
357 /* Not a one-part variable. */
358 NOT_ONEPART = 0,
359 /* A one-part DECL that is not a DEBUG_EXPR_DECL. */
360 ONEPART_VDECL = 1,
361 /* A DEBUG_EXPR_DECL. */
362 ONEPART_DEXPR = 2,
363 /* A VALUE. */
364 ONEPART_VALUE = 3
365 } onepart_enum_t;
367 /* Structure describing where the variable is located. */
368 typedef struct variable_def
370 /* The declaration of the variable, or an RTL value being handled
371 like a declaration. */
372 decl_or_value dv;
374 /* Reference count. */
375 int refcount;
377 /* Number of variable parts. */
378 char n_var_parts;
380 /* What type of DV this is, according to enum onepart_enum. */
381 ENUM_BITFIELD (onepart_enum) onepart : CHAR_BIT;
383 /* True if this variable_def struct is currently in the
384 changed_variables hash table. */
385 bool in_changed_variables;
387 /* The variable parts. */
388 variable_part var_part[1];
389 } *variable;
390 typedef const struct variable_def *const_variable;
392 /* Pointer to the BB's information specific to variable tracking pass. */
393 #define VTI(BB) ((variable_tracking_info) (BB)->aux)
395 /* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */
396 #define INT_MEM_OFFSET(mem) (MEM_OFFSET_KNOWN_P (mem) ? MEM_OFFSET (mem) : 0)
398 #if ENABLE_CHECKING && (GCC_VERSION >= 2007)
400 /* Access VAR's Ith part's offset, checking that it's not a one-part
401 variable. */
402 #define VAR_PART_OFFSET(var, i) __extension__ \
403 (*({ variable const __v = (var); \
404 gcc_checking_assert (!__v->onepart); \
405 &__v->var_part[(i)].aux.offset; }))
407 /* Access VAR's one-part auxiliary data, checking that it is a
408 one-part variable. */
409 #define VAR_LOC_1PAUX(var) __extension__ \
410 (*({ variable const __v = (var); \
411 gcc_checking_assert (__v->onepart); \
412 &__v->var_part[0].aux.onepaux; }))
414 #else
415 #define VAR_PART_OFFSET(var, i) ((var)->var_part[(i)].aux.offset)
416 #define VAR_LOC_1PAUX(var) ((var)->var_part[0].aux.onepaux)
417 #endif
419 /* These are accessor macros for the one-part auxiliary data. When
420 convenient for users, they're guarded by tests that the data was
421 allocated. */
422 #define VAR_LOC_DEP_LST(var) (VAR_LOC_1PAUX (var) \
423 ? VAR_LOC_1PAUX (var)->backlinks \
424 : NULL)
425 #define VAR_LOC_DEP_LSTP(var) (VAR_LOC_1PAUX (var) \
426 ? &VAR_LOC_1PAUX (var)->backlinks \
427 : NULL)
428 #define VAR_LOC_FROM(var) (VAR_LOC_1PAUX (var)->from)
429 #define VAR_LOC_DEPTH(var) (VAR_LOC_1PAUX (var)->depth)
430 #define VAR_LOC_DEP_VEC(var) (VAR_LOC_1PAUX (var) \
431 ? &VAR_LOC_1PAUX (var)->deps \
432 : NULL)
436 typedef unsigned int dvuid;
438 /* Return the uid of DV. */
440 static inline dvuid
441 dv_uid (decl_or_value dv)
443 if (dv_is_value_p (dv))
444 return CSELIB_VAL_PTR (dv_as_value (dv))->uid;
445 else
446 return DECL_UID (dv_as_decl (dv));
449 /* Compute the hash from the uid. */
451 static inline hashval_t
452 dv_uid2hash (dvuid uid)
454 return uid;
457 /* The hash function for a mask table in a shared_htab chain. */
459 static inline hashval_t
460 dv_htab_hash (decl_or_value dv)
462 return dv_uid2hash (dv_uid (dv));
465 static void variable_htab_free (void *);
467 /* Variable hashtable helpers. */
469 struct variable_hasher
471 typedef variable_def value_type;
472 typedef void compare_type;
473 static inline hashval_t hash (const value_type *);
474 static inline bool equal (const value_type *, const compare_type *);
475 static inline void remove (value_type *);
478 /* The hash function for variable_htab, computes the hash value
479 from the declaration of variable X. */
481 inline hashval_t
482 variable_hasher::hash (const value_type *v)
484 return dv_htab_hash (v->dv);
487 /* Compare the declaration of variable X with declaration Y. */
489 inline bool
490 variable_hasher::equal (const value_type *v, const compare_type *y)
492 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
494 return (dv_as_opaque (v->dv) == dv_as_opaque (dv));
497 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
499 inline void
500 variable_hasher::remove (value_type *var)
502 variable_htab_free (var);
505 typedef hash_table<variable_hasher> variable_table_type;
506 typedef variable_table_type::iterator variable_iterator_type;
508 /* Structure for passing some other parameters to function
509 emit_note_insn_var_location. */
510 typedef struct emit_note_data_def
512 /* The instruction which the note will be emitted before/after. */
513 rtx_insn *insn;
515 /* Where the note will be emitted (before/after insn)? */
516 enum emit_note_where where;
518 /* The variables and values active at this point. */
519 variable_table_type *vars;
520 } emit_note_data;
522 /* Structure holding a refcounted hash table. If refcount > 1,
523 it must be first unshared before modified. */
524 typedef struct shared_hash_def
526 /* Reference count. */
527 int refcount;
529 /* Actual hash table. */
530 variable_table_type *htab;
531 } *shared_hash;
533 /* Structure holding the IN or OUT set for a basic block. */
534 typedef struct dataflow_set_def
536 /* Adjustment of stack offset. */
537 HOST_WIDE_INT stack_adjust;
539 /* Attributes for registers (lists of attrs). */
540 attrs regs[FIRST_PSEUDO_REGISTER];
542 /* Variable locations. */
543 shared_hash vars;
545 /* Vars that is being traversed. */
546 shared_hash traversed_vars;
547 } dataflow_set;
549 /* The structure (one for each basic block) containing the information
550 needed for variable tracking. */
551 typedef struct variable_tracking_info_def
553 /* The vector of micro operations. */
554 vec<micro_operation> mos;
556 /* The IN and OUT set for dataflow analysis. */
557 dataflow_set in;
558 dataflow_set out;
560 /* The permanent-in dataflow set for this block. This is used to
561 hold values for which we had to compute entry values. ??? This
562 should probably be dynamically allocated, to avoid using more
563 memory in non-debug builds. */
564 dataflow_set *permp;
566 /* Has the block been visited in DFS? */
567 bool visited;
569 /* Has the block been flooded in VTA? */
570 bool flooded;
572 } *variable_tracking_info;
574 /* Alloc pool for struct attrs_def. */
575 static alloc_pool attrs_pool;
577 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
578 static alloc_pool var_pool;
580 /* Alloc pool for struct variable_def with a single var_part entry. */
581 static alloc_pool valvar_pool;
583 /* Alloc pool for struct location_chain_def. */
584 static alloc_pool loc_chain_pool;
586 /* Alloc pool for struct shared_hash_def. */
587 static alloc_pool shared_hash_pool;
589 /* Alloc pool for struct loc_exp_dep_s for NOT_ONEPART variables. */
590 static alloc_pool loc_exp_dep_pool;
592 /* Changed variables, notes will be emitted for them. */
593 static variable_table_type *changed_variables;
595 /* Shall notes be emitted? */
596 static bool emit_notes;
598 /* Values whose dynamic location lists have gone empty, but whose
599 cselib location lists are still usable. Use this to hold the
600 current location, the backlinks, etc, during emit_notes. */
601 static variable_table_type *dropped_values;
603 /* Empty shared hashtable. */
604 static shared_hash empty_shared_hash;
606 /* Scratch register bitmap used by cselib_expand_value_rtx. */
607 static bitmap scratch_regs = NULL;
609 #ifdef HAVE_window_save
610 typedef struct GTY(()) parm_reg {
611 rtx outgoing;
612 rtx incoming;
613 } parm_reg_t;
616 /* Vector of windowed parameter registers, if any. */
617 static vec<parm_reg_t, va_gc> *windowed_parm_regs = NULL;
618 #endif
620 /* Variable used to tell whether cselib_process_insn called our hook. */
621 static bool cselib_hook_called;
623 /* Local function prototypes. */
624 static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
625 HOST_WIDE_INT *);
626 static void insn_stack_adjust_offset_pre_post (rtx_insn *, HOST_WIDE_INT *,
627 HOST_WIDE_INT *);
628 static bool vt_stack_adjustments (void);
630 static void init_attrs_list_set (attrs *);
631 static void attrs_list_clear (attrs *);
632 static attrs attrs_list_member (attrs, decl_or_value, HOST_WIDE_INT);
633 static void attrs_list_insert (attrs *, decl_or_value, HOST_WIDE_INT, rtx);
634 static void attrs_list_copy (attrs *, attrs);
635 static void attrs_list_union (attrs *, attrs);
637 static variable_def **unshare_variable (dataflow_set *set, variable_def **slot,
638 variable var, enum var_init_status);
639 static void vars_copy (variable_table_type *, variable_table_type *);
640 static tree var_debug_decl (tree);
641 static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
642 static void var_reg_delete_and_set (dataflow_set *, rtx, bool,
643 enum var_init_status, rtx);
644 static void var_reg_delete (dataflow_set *, rtx, bool);
645 static void var_regno_delete (dataflow_set *, int);
646 static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx);
647 static void var_mem_delete_and_set (dataflow_set *, rtx, bool,
648 enum var_init_status, rtx);
649 static void var_mem_delete (dataflow_set *, rtx, bool);
651 static void dataflow_set_init (dataflow_set *);
652 static void dataflow_set_clear (dataflow_set *);
653 static void dataflow_set_copy (dataflow_set *, dataflow_set *);
654 static int variable_union_info_cmp_pos (const void *, const void *);
655 static void dataflow_set_union (dataflow_set *, dataflow_set *);
656 static location_chain find_loc_in_1pdv (rtx, variable, variable_table_type *);
657 static bool canon_value_cmp (rtx, rtx);
658 static int loc_cmp (rtx, rtx);
659 static bool variable_part_different_p (variable_part *, variable_part *);
660 static bool onepart_variable_different_p (variable, variable);
661 static bool variable_different_p (variable, variable);
662 static bool dataflow_set_different (dataflow_set *, dataflow_set *);
663 static void dataflow_set_destroy (dataflow_set *);
665 static bool contains_symbol_ref (rtx);
666 static bool track_expr_p (tree, bool);
667 static bool same_variable_part_p (rtx, tree, HOST_WIDE_INT);
668 static void add_uses_1 (rtx *, void *);
669 static void add_stores (rtx, const_rtx, void *);
670 static bool compute_bb_dataflow (basic_block);
671 static bool vt_find_locations (void);
673 static void dump_attrs_list (attrs);
674 static void dump_var (variable);
675 static void dump_vars (variable_table_type *);
676 static void dump_dataflow_set (dataflow_set *);
677 static void dump_dataflow_sets (void);
679 static void set_dv_changed (decl_or_value, bool);
680 static void variable_was_changed (variable, dataflow_set *);
681 static variable_def **set_slot_part (dataflow_set *, rtx, variable_def **,
682 decl_or_value, HOST_WIDE_INT,
683 enum var_init_status, rtx);
684 static void set_variable_part (dataflow_set *, rtx,
685 decl_or_value, HOST_WIDE_INT,
686 enum var_init_status, rtx, enum insert_option);
687 static variable_def **clobber_slot_part (dataflow_set *, rtx,
688 variable_def **, HOST_WIDE_INT, rtx);
689 static void clobber_variable_part (dataflow_set *, rtx,
690 decl_or_value, HOST_WIDE_INT, rtx);
691 static variable_def **delete_slot_part (dataflow_set *, rtx, variable_def **,
692 HOST_WIDE_INT);
693 static void delete_variable_part (dataflow_set *, rtx,
694 decl_or_value, HOST_WIDE_INT);
695 static void emit_notes_in_bb (basic_block, dataflow_set *);
696 static void vt_emit_notes (void);
698 static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *);
699 static void vt_add_function_parameters (void);
700 static bool vt_initialize (void);
701 static void vt_finalize (void);
703 /* Callback for stack_adjust_offset_pre_post, called via for_each_inc_dec. */
705 static int
706 stack_adjust_offset_pre_post_cb (rtx, rtx op, rtx dest, rtx src, rtx srcoff,
707 void *arg)
709 if (dest != stack_pointer_rtx)
710 return 0;
712 switch (GET_CODE (op))
714 case PRE_INC:
715 case PRE_DEC:
716 ((HOST_WIDE_INT *)arg)[0] -= INTVAL (srcoff);
717 return 0;
718 case POST_INC:
719 case POST_DEC:
720 ((HOST_WIDE_INT *)arg)[1] -= INTVAL (srcoff);
721 return 0;
722 case PRE_MODIFY:
723 case POST_MODIFY:
724 /* We handle only adjustments by constant amount. */
725 gcc_assert (GET_CODE (src) == PLUS
726 && CONST_INT_P (XEXP (src, 1))
727 && XEXP (src, 0) == stack_pointer_rtx);
728 ((HOST_WIDE_INT *)arg)[GET_CODE (op) == POST_MODIFY]
729 -= INTVAL (XEXP (src, 1));
730 return 0;
731 default:
732 gcc_unreachable ();
736 /* Given a SET, calculate the amount of stack adjustment it contains
737 PRE- and POST-modifying stack pointer.
738 This function is similar to stack_adjust_offset. */
740 static void
741 stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
742 HOST_WIDE_INT *post)
744 rtx src = SET_SRC (pattern);
745 rtx dest = SET_DEST (pattern);
746 enum rtx_code code;
748 if (dest == stack_pointer_rtx)
750 /* (set (reg sp) (plus (reg sp) (const_int))) */
751 code = GET_CODE (src);
752 if (! (code == PLUS || code == MINUS)
753 || XEXP (src, 0) != stack_pointer_rtx
754 || !CONST_INT_P (XEXP (src, 1)))
755 return;
757 if (code == MINUS)
758 *post += INTVAL (XEXP (src, 1));
759 else
760 *post -= INTVAL (XEXP (src, 1));
761 return;
763 HOST_WIDE_INT res[2] = { 0, 0 };
764 for_each_inc_dec (pattern, stack_adjust_offset_pre_post_cb, res);
765 *pre += res[0];
766 *post += res[1];
769 /* Given an INSN, calculate the amount of stack adjustment it contains
770 PRE- and POST-modifying stack pointer. */
772 static void
773 insn_stack_adjust_offset_pre_post (rtx_insn *insn, HOST_WIDE_INT *pre,
774 HOST_WIDE_INT *post)
776 rtx pattern;
778 *pre = 0;
779 *post = 0;
781 pattern = PATTERN (insn);
782 if (RTX_FRAME_RELATED_P (insn))
784 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
785 if (expr)
786 pattern = XEXP (expr, 0);
789 if (GET_CODE (pattern) == SET)
790 stack_adjust_offset_pre_post (pattern, pre, post);
791 else if (GET_CODE (pattern) == PARALLEL
792 || GET_CODE (pattern) == SEQUENCE)
794 int i;
796 /* There may be stack adjustments inside compound insns. Search
797 for them. */
798 for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
799 if (GET_CODE (XVECEXP (pattern, 0, i)) == SET)
800 stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post);
804 /* Compute stack adjustments for all blocks by traversing DFS tree.
805 Return true when the adjustments on all incoming edges are consistent.
806 Heavily borrowed from pre_and_rev_post_order_compute. */
808 static bool
809 vt_stack_adjustments (void)
811 edge_iterator *stack;
812 int sp;
814 /* Initialize entry block. */
815 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->visited = true;
816 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->in.stack_adjust
817 = INCOMING_FRAME_SP_OFFSET;
818 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out.stack_adjust
819 = INCOMING_FRAME_SP_OFFSET;
821 /* Allocate stack for back-tracking up CFG. */
822 stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun) + 1);
823 sp = 0;
825 /* Push the first edge on to the stack. */
826 stack[sp++] = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
828 while (sp)
830 edge_iterator ei;
831 basic_block src;
832 basic_block dest;
834 /* Look at the edge on the top of the stack. */
835 ei = stack[sp - 1];
836 src = ei_edge (ei)->src;
837 dest = ei_edge (ei)->dest;
839 /* Check if the edge destination has been visited yet. */
840 if (!VTI (dest)->visited)
842 rtx_insn *insn;
843 HOST_WIDE_INT pre, post, offset;
844 VTI (dest)->visited = true;
845 VTI (dest)->in.stack_adjust = offset = VTI (src)->out.stack_adjust;
847 if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
848 for (insn = BB_HEAD (dest);
849 insn != NEXT_INSN (BB_END (dest));
850 insn = NEXT_INSN (insn))
851 if (INSN_P (insn))
853 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
854 offset += pre + post;
857 VTI (dest)->out.stack_adjust = offset;
859 if (EDGE_COUNT (dest->succs) > 0)
860 /* Since the DEST node has been visited for the first
861 time, check its successors. */
862 stack[sp++] = ei_start (dest->succs);
864 else
866 /* We can end up with different stack adjustments for the exit block
867 of a shrink-wrapped function if stack_adjust_offset_pre_post
868 doesn't understand the rtx pattern used to restore the stack
869 pointer in the epilogue. For example, on s390(x), the stack
870 pointer is often restored via a load-multiple instruction
871 and so no stack_adjust offset is recorded for it. This means
872 that the stack offset at the end of the epilogue block is the
873 the same as the offset before the epilogue, whereas other paths
874 to the exit block will have the correct stack_adjust.
876 It is safe to ignore these differences because (a) we never
877 use the stack_adjust for the exit block in this pass and
878 (b) dwarf2cfi checks whether the CFA notes in a shrink-wrapped
879 function are correct.
881 We must check whether the adjustments on other edges are
882 the same though. */
883 if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
884 && VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust)
886 free (stack);
887 return false;
890 if (! ei_one_before_end_p (ei))
891 /* Go to the next edge. */
892 ei_next (&stack[sp - 1]);
893 else
894 /* Return to previous level if there are no more edges. */
895 sp--;
899 free (stack);
900 return true;
903 /* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or
904 hard_frame_pointer_rtx is being mapped to it and offset for it. */
905 static rtx cfa_base_rtx;
906 static HOST_WIDE_INT cfa_base_offset;
908 /* Compute a CFA-based value for an ADJUSTMENT made to stack_pointer_rtx
909 or hard_frame_pointer_rtx. */
911 static inline rtx
912 compute_cfa_pointer (HOST_WIDE_INT adjustment)
914 return plus_constant (Pmode, cfa_base_rtx, adjustment + cfa_base_offset);
917 /* Adjustment for hard_frame_pointer_rtx to cfa base reg,
918 or -1 if the replacement shouldn't be done. */
919 static HOST_WIDE_INT hard_frame_pointer_adjustment = -1;
921 /* Data for adjust_mems callback. */
923 struct adjust_mem_data
925 bool store;
926 enum machine_mode mem_mode;
927 HOST_WIDE_INT stack_adjust;
928 rtx_expr_list *side_effects;
931 /* Helper for adjust_mems. Return true if X is suitable for
932 transformation of wider mode arithmetics to narrower mode. */
934 static bool
935 use_narrower_mode_test (rtx x, const_rtx subreg)
937 subrtx_var_iterator::array_type array;
938 FOR_EACH_SUBRTX_VAR (iter, array, x, NONCONST)
940 rtx x = *iter;
941 if (CONSTANT_P (x))
942 iter.skip_subrtxes ();
943 else
944 switch (GET_CODE (x))
946 case REG:
947 if (cselib_lookup (x, GET_MODE (SUBREG_REG (subreg)), 0, VOIDmode))
948 return false;
949 if (!validate_subreg (GET_MODE (subreg), GET_MODE (x), x,
950 subreg_lowpart_offset (GET_MODE (subreg),
951 GET_MODE (x))))
952 return false;
953 break;
954 case PLUS:
955 case MINUS:
956 case MULT:
957 break;
958 case ASHIFT:
959 iter.substitute (XEXP (x, 0));
960 break;
961 default:
962 return false;
965 return true;
968 /* Transform X into narrower mode MODE from wider mode WMODE. */
970 static rtx
971 use_narrower_mode (rtx x, enum machine_mode mode, enum machine_mode wmode)
973 rtx op0, op1;
974 if (CONSTANT_P (x))
975 return lowpart_subreg (mode, x, wmode);
976 switch (GET_CODE (x))
978 case REG:
979 return lowpart_subreg (mode, x, wmode);
980 case PLUS:
981 case MINUS:
982 case MULT:
983 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
984 op1 = use_narrower_mode (XEXP (x, 1), mode, wmode);
985 return simplify_gen_binary (GET_CODE (x), mode, op0, op1);
986 case ASHIFT:
987 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
988 return simplify_gen_binary (ASHIFT, mode, op0, XEXP (x, 1));
989 default:
990 gcc_unreachable ();
994 /* Helper function for adjusting used MEMs. */
996 static rtx
997 adjust_mems (rtx loc, const_rtx old_rtx, void *data)
999 struct adjust_mem_data *amd = (struct adjust_mem_data *) data;
1000 rtx mem, addr = loc, tem;
1001 enum machine_mode mem_mode_save;
1002 bool store_save;
1003 switch (GET_CODE (loc))
1005 case REG:
1006 /* Don't do any sp or fp replacements outside of MEM addresses
1007 on the LHS. */
1008 if (amd->mem_mode == VOIDmode && amd->store)
1009 return loc;
1010 if (loc == stack_pointer_rtx
1011 && !frame_pointer_needed
1012 && cfa_base_rtx)
1013 return compute_cfa_pointer (amd->stack_adjust);
1014 else if (loc == hard_frame_pointer_rtx
1015 && frame_pointer_needed
1016 && hard_frame_pointer_adjustment != -1
1017 && cfa_base_rtx)
1018 return compute_cfa_pointer (hard_frame_pointer_adjustment);
1019 gcc_checking_assert (loc != virtual_incoming_args_rtx);
1020 return loc;
1021 case MEM:
1022 mem = loc;
1023 if (!amd->store)
1025 mem = targetm.delegitimize_address (mem);
1026 if (mem != loc && !MEM_P (mem))
1027 return simplify_replace_fn_rtx (mem, old_rtx, adjust_mems, data);
1030 addr = XEXP (mem, 0);
1031 mem_mode_save = amd->mem_mode;
1032 amd->mem_mode = GET_MODE (mem);
1033 store_save = amd->store;
1034 amd->store = false;
1035 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1036 amd->store = store_save;
1037 amd->mem_mode = mem_mode_save;
1038 if (mem == loc)
1039 addr = targetm.delegitimize_address (addr);
1040 if (addr != XEXP (mem, 0))
1041 mem = replace_equiv_address_nv (mem, addr);
1042 if (!amd->store)
1043 mem = avoid_constant_pool_reference (mem);
1044 return mem;
1045 case PRE_INC:
1046 case PRE_DEC:
1047 addr = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
1048 gen_int_mode (GET_CODE (loc) == PRE_INC
1049 ? GET_MODE_SIZE (amd->mem_mode)
1050 : -GET_MODE_SIZE (amd->mem_mode),
1051 GET_MODE (loc)));
1052 case POST_INC:
1053 case POST_DEC:
1054 if (addr == loc)
1055 addr = XEXP (loc, 0);
1056 gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode);
1057 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1058 tem = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
1059 gen_int_mode ((GET_CODE (loc) == PRE_INC
1060 || GET_CODE (loc) == POST_INC)
1061 ? GET_MODE_SIZE (amd->mem_mode)
1062 : -GET_MODE_SIZE (amd->mem_mode),
1063 GET_MODE (loc)));
1064 store_save = amd->store;
1065 amd->store = false;
1066 tem = simplify_replace_fn_rtx (tem, old_rtx, adjust_mems, data);
1067 amd->store = store_save;
1068 amd->side_effects = alloc_EXPR_LIST (0,
1069 gen_rtx_SET (VOIDmode,
1070 XEXP (loc, 0), tem),
1071 amd->side_effects);
1072 return addr;
1073 case PRE_MODIFY:
1074 addr = XEXP (loc, 1);
1075 case POST_MODIFY:
1076 if (addr == loc)
1077 addr = XEXP (loc, 0);
1078 gcc_assert (amd->mem_mode != VOIDmode);
1079 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1080 store_save = amd->store;
1081 amd->store = false;
1082 tem = simplify_replace_fn_rtx (XEXP (loc, 1), old_rtx,
1083 adjust_mems, data);
1084 amd->store = store_save;
1085 amd->side_effects = alloc_EXPR_LIST (0,
1086 gen_rtx_SET (VOIDmode,
1087 XEXP (loc, 0), tem),
1088 amd->side_effects);
1089 return addr;
1090 case SUBREG:
1091 /* First try without delegitimization of whole MEMs and
1092 avoid_constant_pool_reference, which is more likely to succeed. */
1093 store_save = amd->store;
1094 amd->store = true;
1095 addr = simplify_replace_fn_rtx (SUBREG_REG (loc), old_rtx, adjust_mems,
1096 data);
1097 amd->store = store_save;
1098 mem = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1099 if (mem == SUBREG_REG (loc))
1101 tem = loc;
1102 goto finish_subreg;
1104 tem = simplify_gen_subreg (GET_MODE (loc), mem,
1105 GET_MODE (SUBREG_REG (loc)),
1106 SUBREG_BYTE (loc));
1107 if (tem)
1108 goto finish_subreg;
1109 tem = simplify_gen_subreg (GET_MODE (loc), addr,
1110 GET_MODE (SUBREG_REG (loc)),
1111 SUBREG_BYTE (loc));
1112 if (tem == NULL_RTX)
1113 tem = gen_rtx_raw_SUBREG (GET_MODE (loc), addr, SUBREG_BYTE (loc));
1114 finish_subreg:
1115 if (MAY_HAVE_DEBUG_INSNS
1116 && GET_CODE (tem) == SUBREG
1117 && (GET_CODE (SUBREG_REG (tem)) == PLUS
1118 || GET_CODE (SUBREG_REG (tem)) == MINUS
1119 || GET_CODE (SUBREG_REG (tem)) == MULT
1120 || GET_CODE (SUBREG_REG (tem)) == ASHIFT)
1121 && (GET_MODE_CLASS (GET_MODE (tem)) == MODE_INT
1122 || GET_MODE_CLASS (GET_MODE (tem)) == MODE_PARTIAL_INT)
1123 && (GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem))) == MODE_INT
1124 || GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem))) == MODE_PARTIAL_INT)
1125 && GET_MODE_PRECISION (GET_MODE (tem))
1126 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (tem)))
1127 && subreg_lowpart_p (tem)
1128 && use_narrower_mode_test (SUBREG_REG (tem), tem))
1129 return use_narrower_mode (SUBREG_REG (tem), GET_MODE (tem),
1130 GET_MODE (SUBREG_REG (tem)));
1131 return tem;
1132 case ASM_OPERANDS:
1133 /* Don't do any replacements in second and following
1134 ASM_OPERANDS of inline-asm with multiple sets.
1135 ASM_OPERANDS_INPUT_VEC, ASM_OPERANDS_INPUT_CONSTRAINT_VEC
1136 and ASM_OPERANDS_LABEL_VEC need to be equal between
1137 all the ASM_OPERANDs in the insn and adjust_insn will
1138 fix this up. */
1139 if (ASM_OPERANDS_OUTPUT_IDX (loc) != 0)
1140 return loc;
1141 break;
1142 default:
1143 break;
1145 return NULL_RTX;
1148 /* Helper function for replacement of uses. */
1150 static void
1151 adjust_mem_uses (rtx *x, void *data)
1153 rtx new_x = simplify_replace_fn_rtx (*x, NULL_RTX, adjust_mems, data);
1154 if (new_x != *x)
1155 validate_change (NULL_RTX, x, new_x, true);
1158 /* Helper function for replacement of stores. */
1160 static void
1161 adjust_mem_stores (rtx loc, const_rtx expr, void *data)
1163 if (MEM_P (loc))
1165 rtx new_dest = simplify_replace_fn_rtx (SET_DEST (expr), NULL_RTX,
1166 adjust_mems, data);
1167 if (new_dest != SET_DEST (expr))
1169 rtx xexpr = CONST_CAST_RTX (expr);
1170 validate_change (NULL_RTX, &SET_DEST (xexpr), new_dest, true);
1175 /* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
1176 replace them with their value in the insn and add the side-effects
1177 as other sets to the insn. */
1179 static void
1180 adjust_insn (basic_block bb, rtx_insn *insn)
1182 struct adjust_mem_data amd;
1183 rtx set;
1185 #ifdef HAVE_window_save
1186 /* If the target machine has an explicit window save instruction, the
1187 transformation OUTGOING_REGNO -> INCOMING_REGNO is done there. */
1188 if (RTX_FRAME_RELATED_P (insn)
1189 && find_reg_note (insn, REG_CFA_WINDOW_SAVE, NULL_RTX))
1191 unsigned int i, nregs = vec_safe_length (windowed_parm_regs);
1192 rtx rtl = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nregs * 2));
1193 parm_reg_t *p;
1195 FOR_EACH_VEC_SAFE_ELT (windowed_parm_regs, i, p)
1197 XVECEXP (rtl, 0, i * 2)
1198 = gen_rtx_SET (VOIDmode, p->incoming, p->outgoing);
1199 /* Do not clobber the attached DECL, but only the REG. */
1200 XVECEXP (rtl, 0, i * 2 + 1)
1201 = gen_rtx_CLOBBER (GET_MODE (p->outgoing),
1202 gen_raw_REG (GET_MODE (p->outgoing),
1203 REGNO (p->outgoing)));
1206 validate_change (NULL_RTX, &PATTERN (insn), rtl, true);
1207 return;
1209 #endif
1211 amd.mem_mode = VOIDmode;
1212 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
1213 amd.side_effects = NULL;
1215 amd.store = true;
1216 note_stores (PATTERN (insn), adjust_mem_stores, &amd);
1218 amd.store = false;
1219 if (GET_CODE (PATTERN (insn)) == PARALLEL
1220 && asm_noperands (PATTERN (insn)) > 0
1221 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1223 rtx body, set0;
1224 int i;
1226 /* inline-asm with multiple sets is tiny bit more complicated,
1227 because the 3 vectors in ASM_OPERANDS need to be shared between
1228 all ASM_OPERANDS in the instruction. adjust_mems will
1229 not touch ASM_OPERANDS other than the first one, asm_noperands
1230 test above needs to be called before that (otherwise it would fail)
1231 and afterwards this code fixes it up. */
1232 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1233 body = PATTERN (insn);
1234 set0 = XVECEXP (body, 0, 0);
1235 gcc_checking_assert (GET_CODE (set0) == SET
1236 && GET_CODE (SET_SRC (set0)) == ASM_OPERANDS
1237 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set0)) == 0);
1238 for (i = 1; i < XVECLEN (body, 0); i++)
1239 if (GET_CODE (XVECEXP (body, 0, i)) != SET)
1240 break;
1241 else
1243 set = XVECEXP (body, 0, i);
1244 gcc_checking_assert (GET_CODE (SET_SRC (set)) == ASM_OPERANDS
1245 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set))
1246 == i);
1247 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (set))
1248 != ASM_OPERANDS_INPUT_VEC (SET_SRC (set0))
1249 || ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set))
1250 != ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0))
1251 || ASM_OPERANDS_LABEL_VEC (SET_SRC (set))
1252 != ASM_OPERANDS_LABEL_VEC (SET_SRC (set0)))
1254 rtx newsrc = shallow_copy_rtx (SET_SRC (set));
1255 ASM_OPERANDS_INPUT_VEC (newsrc)
1256 = ASM_OPERANDS_INPUT_VEC (SET_SRC (set0));
1257 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (newsrc)
1258 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0));
1259 ASM_OPERANDS_LABEL_VEC (newsrc)
1260 = ASM_OPERANDS_LABEL_VEC (SET_SRC (set0));
1261 validate_change (NULL_RTX, &SET_SRC (set), newsrc, true);
1265 else
1266 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1268 /* For read-only MEMs containing some constant, prefer those
1269 constants. */
1270 set = single_set (insn);
1271 if (set && MEM_P (SET_SRC (set)) && MEM_READONLY_P (SET_SRC (set)))
1273 rtx note = find_reg_equal_equiv_note (insn);
1275 if (note && CONSTANT_P (XEXP (note, 0)))
1276 validate_change (NULL_RTX, &SET_SRC (set), XEXP (note, 0), true);
1279 if (amd.side_effects)
1281 rtx *pat, new_pat, s;
1282 int i, oldn, newn;
1284 pat = &PATTERN (insn);
1285 if (GET_CODE (*pat) == COND_EXEC)
1286 pat = &COND_EXEC_CODE (*pat);
1287 if (GET_CODE (*pat) == PARALLEL)
1288 oldn = XVECLEN (*pat, 0);
1289 else
1290 oldn = 1;
1291 for (s = amd.side_effects, newn = 0; s; newn++)
1292 s = XEXP (s, 1);
1293 new_pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (oldn + newn));
1294 if (GET_CODE (*pat) == PARALLEL)
1295 for (i = 0; i < oldn; i++)
1296 XVECEXP (new_pat, 0, i) = XVECEXP (*pat, 0, i);
1297 else
1298 XVECEXP (new_pat, 0, 0) = *pat;
1299 for (s = amd.side_effects, i = oldn; i < oldn + newn; i++, s = XEXP (s, 1))
1300 XVECEXP (new_pat, 0, i) = XEXP (s, 0);
1301 free_EXPR_LIST_list (&amd.side_effects);
1302 validate_change (NULL_RTX, pat, new_pat, true);
1306 /* Return the DEBUG_EXPR of a DEBUG_EXPR_DECL or the VALUE in DV. */
1307 static inline rtx
1308 dv_as_rtx (decl_or_value dv)
1310 tree decl;
1312 if (dv_is_value_p (dv))
1313 return dv_as_value (dv);
1315 decl = dv_as_decl (dv);
1317 gcc_checking_assert (TREE_CODE (decl) == DEBUG_EXPR_DECL);
1318 return DECL_RTL_KNOWN_SET (decl);
1321 /* Return nonzero if a decl_or_value must not have more than one
1322 variable part. The returned value discriminates among various
1323 kinds of one-part DVs ccording to enum onepart_enum. */
1324 static inline onepart_enum_t
1325 dv_onepart_p (decl_or_value dv)
1327 tree decl;
1329 if (!MAY_HAVE_DEBUG_INSNS)
1330 return NOT_ONEPART;
1332 if (dv_is_value_p (dv))
1333 return ONEPART_VALUE;
1335 decl = dv_as_decl (dv);
1337 if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
1338 return ONEPART_DEXPR;
1340 if (target_for_debug_bind (decl) != NULL_TREE)
1341 return ONEPART_VDECL;
1343 return NOT_ONEPART;
1346 /* Return the variable pool to be used for a dv of type ONEPART. */
1347 static inline alloc_pool
1348 onepart_pool (onepart_enum_t onepart)
1350 return onepart ? valvar_pool : var_pool;
1353 /* Build a decl_or_value out of a decl. */
1354 static inline decl_or_value
1355 dv_from_decl (tree decl)
1357 decl_or_value dv;
1358 dv = decl;
1359 gcc_checking_assert (dv_is_decl_p (dv));
1360 return dv;
1363 /* Build a decl_or_value out of a value. */
1364 static inline decl_or_value
1365 dv_from_value (rtx value)
1367 decl_or_value dv;
1368 dv = value;
1369 gcc_checking_assert (dv_is_value_p (dv));
1370 return dv;
1373 /* Return a value or the decl of a debug_expr as a decl_or_value. */
1374 static inline decl_or_value
1375 dv_from_rtx (rtx x)
1377 decl_or_value dv;
1379 switch (GET_CODE (x))
1381 case DEBUG_EXPR:
1382 dv = dv_from_decl (DEBUG_EXPR_TREE_DECL (x));
1383 gcc_checking_assert (DECL_RTL_KNOWN_SET (DEBUG_EXPR_TREE_DECL (x)) == x);
1384 break;
1386 case VALUE:
1387 dv = dv_from_value (x);
1388 break;
1390 default:
1391 gcc_unreachable ();
1394 return dv;
1397 extern void debug_dv (decl_or_value dv);
1399 DEBUG_FUNCTION void
1400 debug_dv (decl_or_value dv)
1402 if (dv_is_value_p (dv))
1403 debug_rtx (dv_as_value (dv));
1404 else
1405 debug_generic_stmt (dv_as_decl (dv));
1408 static void loc_exp_dep_clear (variable var);
1410 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
1412 static void
1413 variable_htab_free (void *elem)
1415 int i;
1416 variable var = (variable) elem;
1417 location_chain node, next;
1419 gcc_checking_assert (var->refcount > 0);
1421 var->refcount--;
1422 if (var->refcount > 0)
1423 return;
1425 for (i = 0; i < var->n_var_parts; i++)
1427 for (node = var->var_part[i].loc_chain; node; node = next)
1429 next = node->next;
1430 pool_free (loc_chain_pool, node);
1432 var->var_part[i].loc_chain = NULL;
1434 if (var->onepart && VAR_LOC_1PAUX (var))
1436 loc_exp_dep_clear (var);
1437 if (VAR_LOC_DEP_LST (var))
1438 VAR_LOC_DEP_LST (var)->pprev = NULL;
1439 XDELETE (VAR_LOC_1PAUX (var));
1440 /* These may be reused across functions, so reset
1441 e.g. NO_LOC_P. */
1442 if (var->onepart == ONEPART_DEXPR)
1443 set_dv_changed (var->dv, true);
1445 pool_free (onepart_pool (var->onepart), var);
1448 /* Initialize the set (array) SET of attrs to empty lists. */
1450 static void
1451 init_attrs_list_set (attrs *set)
1453 int i;
1455 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1456 set[i] = NULL;
1459 /* Make the list *LISTP empty. */
1461 static void
1462 attrs_list_clear (attrs *listp)
1464 attrs list, next;
1466 for (list = *listp; list; list = next)
1468 next = list->next;
1469 pool_free (attrs_pool, list);
1471 *listp = NULL;
1474 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */
1476 static attrs
1477 attrs_list_member (attrs list, decl_or_value dv, HOST_WIDE_INT offset)
1479 for (; list; list = list->next)
1480 if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset)
1481 return list;
1482 return NULL;
1485 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
1487 static void
1488 attrs_list_insert (attrs *listp, decl_or_value dv,
1489 HOST_WIDE_INT offset, rtx loc)
1491 attrs list;
1493 list = (attrs) pool_alloc (attrs_pool);
1494 list->loc = loc;
1495 list->dv = dv;
1496 list->offset = offset;
1497 list->next = *listp;
1498 *listp = list;
1501 /* Copy all nodes from SRC and create a list *DSTP of the copies. */
1503 static void
1504 attrs_list_copy (attrs *dstp, attrs src)
1506 attrs n;
1508 attrs_list_clear (dstp);
1509 for (; src; src = src->next)
1511 n = (attrs) pool_alloc (attrs_pool);
1512 n->loc = src->loc;
1513 n->dv = src->dv;
1514 n->offset = src->offset;
1515 n->next = *dstp;
1516 *dstp = n;
1520 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */
1522 static void
1523 attrs_list_union (attrs *dstp, attrs src)
1525 for (; src; src = src->next)
1527 if (!attrs_list_member (*dstp, src->dv, src->offset))
1528 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1532 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1533 *DSTP. */
1535 static void
1536 attrs_list_mpdv_union (attrs *dstp, attrs src, attrs src2)
1538 gcc_assert (!*dstp);
1539 for (; src; src = src->next)
1541 if (!dv_onepart_p (src->dv))
1542 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1544 for (src = src2; src; src = src->next)
1546 if (!dv_onepart_p (src->dv)
1547 && !attrs_list_member (*dstp, src->dv, src->offset))
1548 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1552 /* Shared hashtable support. */
1554 /* Return true if VARS is shared. */
1556 static inline bool
1557 shared_hash_shared (shared_hash vars)
1559 return vars->refcount > 1;
1562 /* Return the hash table for VARS. */
1564 static inline variable_table_type *
1565 shared_hash_htab (shared_hash vars)
1567 return vars->htab;
1570 /* Return true if VAR is shared, or maybe because VARS is shared. */
1572 static inline bool
1573 shared_var_p (variable var, shared_hash vars)
1575 /* Don't count an entry in the changed_variables table as a duplicate. */
1576 return ((var->refcount > 1 + (int) var->in_changed_variables)
1577 || shared_hash_shared (vars));
1580 /* Copy variables into a new hash table. */
1582 static shared_hash
1583 shared_hash_unshare (shared_hash vars)
1585 shared_hash new_vars = (shared_hash) pool_alloc (shared_hash_pool);
1586 gcc_assert (vars->refcount > 1);
1587 new_vars->refcount = 1;
1588 new_vars->htab = new variable_table_type (vars->htab->elements () + 3);
1589 vars_copy (new_vars->htab, vars->htab);
1590 vars->refcount--;
1591 return new_vars;
1594 /* Increment reference counter on VARS and return it. */
1596 static inline shared_hash
1597 shared_hash_copy (shared_hash vars)
1599 vars->refcount++;
1600 return vars;
1603 /* Decrement reference counter and destroy hash table if not shared
1604 anymore. */
1606 static void
1607 shared_hash_destroy (shared_hash vars)
1609 gcc_checking_assert (vars->refcount > 0);
1610 if (--vars->refcount == 0)
1612 delete vars->htab;
1613 pool_free (shared_hash_pool, vars);
1617 /* Unshare *PVARS if shared and return slot for DV. If INS is
1618 INSERT, insert it if not already present. */
1620 static inline variable_def **
1621 shared_hash_find_slot_unshare_1 (shared_hash *pvars, decl_or_value dv,
1622 hashval_t dvhash, enum insert_option ins)
1624 if (shared_hash_shared (*pvars))
1625 *pvars = shared_hash_unshare (*pvars);
1626 return shared_hash_htab (*pvars)->find_slot_with_hash (dv, dvhash, ins);
1629 static inline variable_def **
1630 shared_hash_find_slot_unshare (shared_hash *pvars, decl_or_value dv,
1631 enum insert_option ins)
1633 return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins);
1636 /* Return slot for DV, if it is already present in the hash table.
1637 If it is not present, insert it only VARS is not shared, otherwise
1638 return NULL. */
1640 static inline variable_def **
1641 shared_hash_find_slot_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1643 return shared_hash_htab (vars)->find_slot_with_hash (dv, dvhash,
1644 shared_hash_shared (vars)
1645 ? NO_INSERT : INSERT);
1648 static inline variable_def **
1649 shared_hash_find_slot (shared_hash vars, decl_or_value dv)
1651 return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv));
1654 /* Return slot for DV only if it is already present in the hash table. */
1656 static inline variable_def **
1657 shared_hash_find_slot_noinsert_1 (shared_hash vars, decl_or_value dv,
1658 hashval_t dvhash)
1660 return shared_hash_htab (vars)->find_slot_with_hash (dv, dvhash, NO_INSERT);
1663 static inline variable_def **
1664 shared_hash_find_slot_noinsert (shared_hash vars, decl_or_value dv)
1666 return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv));
1669 /* Return variable for DV or NULL if not already present in the hash
1670 table. */
1672 static inline variable
1673 shared_hash_find_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1675 return shared_hash_htab (vars)->find_with_hash (dv, dvhash);
1678 static inline variable
1679 shared_hash_find (shared_hash vars, decl_or_value dv)
1681 return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
1684 /* Return true if TVAL is better than CVAL as a canonival value. We
1685 choose lowest-numbered VALUEs, using the RTX address as a
1686 tie-breaker. The idea is to arrange them into a star topology,
1687 such that all of them are at most one step away from the canonical
1688 value, and the canonical value has backlinks to all of them, in
1689 addition to all the actual locations. We don't enforce this
1690 topology throughout the entire dataflow analysis, though.
1693 static inline bool
1694 canon_value_cmp (rtx tval, rtx cval)
1696 return !cval
1697 || CSELIB_VAL_PTR (tval)->uid < CSELIB_VAL_PTR (cval)->uid;
1700 static bool dst_can_be_shared;
1702 /* Return a copy of a variable VAR and insert it to dataflow set SET. */
1704 static variable_def **
1705 unshare_variable (dataflow_set *set, variable_def **slot, variable var,
1706 enum var_init_status initialized)
1708 variable new_var;
1709 int i;
1711 new_var = (variable) pool_alloc (onepart_pool (var->onepart));
1712 new_var->dv = var->dv;
1713 new_var->refcount = 1;
1714 var->refcount--;
1715 new_var->n_var_parts = var->n_var_parts;
1716 new_var->onepart = var->onepart;
1717 new_var->in_changed_variables = false;
1719 if (! flag_var_tracking_uninit)
1720 initialized = VAR_INIT_STATUS_INITIALIZED;
1722 for (i = 0; i < var->n_var_parts; i++)
1724 location_chain node;
1725 location_chain *nextp;
1727 if (i == 0 && var->onepart)
1729 /* One-part auxiliary data is only used while emitting
1730 notes, so propagate it to the new variable in the active
1731 dataflow set. If we're not emitting notes, this will be
1732 a no-op. */
1733 gcc_checking_assert (!VAR_LOC_1PAUX (var) || emit_notes);
1734 VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (var);
1735 VAR_LOC_1PAUX (var) = NULL;
1737 else
1738 VAR_PART_OFFSET (new_var, i) = VAR_PART_OFFSET (var, i);
1739 nextp = &new_var->var_part[i].loc_chain;
1740 for (node = var->var_part[i].loc_chain; node; node = node->next)
1742 location_chain new_lc;
1744 new_lc = (location_chain) pool_alloc (loc_chain_pool);
1745 new_lc->next = NULL;
1746 if (node->init > initialized)
1747 new_lc->init = node->init;
1748 else
1749 new_lc->init = initialized;
1750 if (node->set_src && !(MEM_P (node->set_src)))
1751 new_lc->set_src = node->set_src;
1752 else
1753 new_lc->set_src = NULL;
1754 new_lc->loc = node->loc;
1756 *nextp = new_lc;
1757 nextp = &new_lc->next;
1760 new_var->var_part[i].cur_loc = var->var_part[i].cur_loc;
1763 dst_can_be_shared = false;
1764 if (shared_hash_shared (set->vars))
1765 slot = shared_hash_find_slot_unshare (&set->vars, var->dv, NO_INSERT);
1766 else if (set->traversed_vars && set->vars != set->traversed_vars)
1767 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
1768 *slot = new_var;
1769 if (var->in_changed_variables)
1771 variable_def **cslot
1772 = changed_variables->find_slot_with_hash (var->dv,
1773 dv_htab_hash (var->dv),
1774 NO_INSERT);
1775 gcc_assert (*cslot == (void *) var);
1776 var->in_changed_variables = false;
1777 variable_htab_free (var);
1778 *cslot = new_var;
1779 new_var->in_changed_variables = true;
1781 return slot;
1784 /* Copy all variables from hash table SRC to hash table DST. */
1786 static void
1787 vars_copy (variable_table_type *dst, variable_table_type *src)
1789 variable_iterator_type hi;
1790 variable var;
1792 FOR_EACH_HASH_TABLE_ELEMENT (*src, var, variable, hi)
1794 variable_def **dstp;
1795 var->refcount++;
1796 dstp = dst->find_slot_with_hash (var->dv, dv_htab_hash (var->dv),
1797 INSERT);
1798 *dstp = var;
1802 /* Map a decl to its main debug decl. */
1804 static inline tree
1805 var_debug_decl (tree decl)
1807 if (decl && TREE_CODE (decl) == VAR_DECL
1808 && DECL_HAS_DEBUG_EXPR_P (decl))
1810 tree debugdecl = DECL_DEBUG_EXPR (decl);
1811 if (DECL_P (debugdecl))
1812 decl = debugdecl;
1815 return decl;
1818 /* Set the register LOC to contain DV, OFFSET. */
1820 static void
1821 var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1822 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1823 enum insert_option iopt)
1825 attrs node;
1826 bool decl_p = dv_is_decl_p (dv);
1828 if (decl_p)
1829 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1831 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1832 if (dv_as_opaque (node->dv) == dv_as_opaque (dv)
1833 && node->offset == offset)
1834 break;
1835 if (!node)
1836 attrs_list_insert (&set->regs[REGNO (loc)], dv, offset, loc);
1837 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1840 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
1842 static void
1843 var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1844 rtx set_src)
1846 tree decl = REG_EXPR (loc);
1847 HOST_WIDE_INT offset = REG_OFFSET (loc);
1849 var_reg_decl_set (set, loc, initialized,
1850 dv_from_decl (decl), offset, set_src, INSERT);
1853 static enum var_init_status
1854 get_init_value (dataflow_set *set, rtx loc, decl_or_value dv)
1856 variable var;
1857 int i;
1858 enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN;
1860 if (! flag_var_tracking_uninit)
1861 return VAR_INIT_STATUS_INITIALIZED;
1863 var = shared_hash_find (set->vars, dv);
1864 if (var)
1866 for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++)
1868 location_chain nextp;
1869 for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next)
1870 if (rtx_equal_p (nextp->loc, loc))
1872 ret_val = nextp->init;
1873 break;
1878 return ret_val;
1881 /* Delete current content of register LOC in dataflow set SET and set
1882 the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
1883 MODIFY is true, any other live copies of the same variable part are
1884 also deleted from the dataflow set, otherwise the variable part is
1885 assumed to be copied from another location holding the same
1886 part. */
1888 static void
1889 var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1890 enum var_init_status initialized, rtx set_src)
1892 tree decl = REG_EXPR (loc);
1893 HOST_WIDE_INT offset = REG_OFFSET (loc);
1894 attrs node, next;
1895 attrs *nextp;
1897 decl = var_debug_decl (decl);
1899 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1900 initialized = get_init_value (set, loc, dv_from_decl (decl));
1902 nextp = &set->regs[REGNO (loc)];
1903 for (node = *nextp; node; node = next)
1905 next = node->next;
1906 if (dv_as_opaque (node->dv) != decl || node->offset != offset)
1908 delete_variable_part (set, node->loc, node->dv, node->offset);
1909 pool_free (attrs_pool, node);
1910 *nextp = next;
1912 else
1914 node->loc = loc;
1915 nextp = &node->next;
1918 if (modify)
1919 clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src);
1920 var_reg_set (set, loc, initialized, set_src);
1923 /* Delete the association of register LOC in dataflow set SET with any
1924 variables that aren't onepart. If CLOBBER is true, also delete any
1925 other live copies of the same variable part, and delete the
1926 association with onepart dvs too. */
1928 static void
1929 var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
1931 attrs *nextp = &set->regs[REGNO (loc)];
1932 attrs node, next;
1934 if (clobber)
1936 tree decl = REG_EXPR (loc);
1937 HOST_WIDE_INT offset = REG_OFFSET (loc);
1939 decl = var_debug_decl (decl);
1941 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1944 for (node = *nextp; node; node = next)
1946 next = node->next;
1947 if (clobber || !dv_onepart_p (node->dv))
1949 delete_variable_part (set, node->loc, node->dv, node->offset);
1950 pool_free (attrs_pool, node);
1951 *nextp = next;
1953 else
1954 nextp = &node->next;
1958 /* Delete content of register with number REGNO in dataflow set SET. */
1960 static void
1961 var_regno_delete (dataflow_set *set, int regno)
1963 attrs *reg = &set->regs[regno];
1964 attrs node, next;
1966 for (node = *reg; node; node = next)
1968 next = node->next;
1969 delete_variable_part (set, node->loc, node->dv, node->offset);
1970 pool_free (attrs_pool, node);
1972 *reg = NULL;
1975 /* Return true if I is the negated value of a power of two. */
1976 static bool
1977 negative_power_of_two_p (HOST_WIDE_INT i)
1979 unsigned HOST_WIDE_INT x = -(unsigned HOST_WIDE_INT)i;
1980 return x == (x & -x);
1983 /* Strip constant offsets and alignments off of LOC. Return the base
1984 expression. */
1986 static rtx
1987 vt_get_canonicalize_base (rtx loc)
1989 while ((GET_CODE (loc) == PLUS
1990 || GET_CODE (loc) == AND)
1991 && GET_CODE (XEXP (loc, 1)) == CONST_INT
1992 && (GET_CODE (loc) != AND
1993 || negative_power_of_two_p (INTVAL (XEXP (loc, 1)))))
1994 loc = XEXP (loc, 0);
1996 return loc;
1999 /* This caches canonicalized addresses for VALUEs, computed using
2000 information in the global cselib table. */
2001 static hash_map<rtx, rtx> *global_get_addr_cache;
2003 /* This caches canonicalized addresses for VALUEs, computed using
2004 information from the global cache and information pertaining to a
2005 basic block being analyzed. */
2006 static hash_map<rtx, rtx> *local_get_addr_cache;
2008 static rtx vt_canonicalize_addr (dataflow_set *, rtx);
2010 /* Return the canonical address for LOC, that must be a VALUE, using a
2011 cached global equivalence or computing it and storing it in the
2012 global cache. */
2014 static rtx
2015 get_addr_from_global_cache (rtx const loc)
2017 rtx x;
2019 gcc_checking_assert (GET_CODE (loc) == VALUE);
2021 bool existed;
2022 rtx *slot = &global_get_addr_cache->get_or_insert (loc, &existed);
2023 if (existed)
2024 return *slot;
2026 x = canon_rtx (get_addr (loc));
2028 /* Tentative, avoiding infinite recursion. */
2029 *slot = x;
2031 if (x != loc)
2033 rtx nx = vt_canonicalize_addr (NULL, x);
2034 if (nx != x)
2036 /* The table may have moved during recursion, recompute
2037 SLOT. */
2038 *global_get_addr_cache->get (loc) = x = nx;
2042 return x;
2045 /* Return the canonical address for LOC, that must be a VALUE, using a
2046 cached local equivalence or computing it and storing it in the
2047 local cache. */
2049 static rtx
2050 get_addr_from_local_cache (dataflow_set *set, rtx const loc)
2052 rtx x;
2053 decl_or_value dv;
2054 variable var;
2055 location_chain l;
2057 gcc_checking_assert (GET_CODE (loc) == VALUE);
2059 bool existed;
2060 rtx *slot = &local_get_addr_cache->get_or_insert (loc, &existed);
2061 if (existed)
2062 return *slot;
2064 x = get_addr_from_global_cache (loc);
2066 /* Tentative, avoiding infinite recursion. */
2067 *slot = x;
2069 /* Recurse to cache local expansion of X, or if we need to search
2070 for a VALUE in the expansion. */
2071 if (x != loc)
2073 rtx nx = vt_canonicalize_addr (set, x);
2074 if (nx != x)
2076 slot = local_get_addr_cache->get (loc);
2077 *slot = x = nx;
2079 return x;
2082 dv = dv_from_rtx (x);
2083 var = shared_hash_find (set->vars, dv);
2084 if (!var)
2085 return x;
2087 /* Look for an improved equivalent expression. */
2088 for (l = var->var_part[0].loc_chain; l; l = l->next)
2090 rtx base = vt_get_canonicalize_base (l->loc);
2091 if (GET_CODE (base) == VALUE
2092 && canon_value_cmp (base, loc))
2094 rtx nx = vt_canonicalize_addr (set, l->loc);
2095 if (x != nx)
2097 slot = local_get_addr_cache->get (loc);
2098 *slot = x = nx;
2100 break;
2104 return x;
2107 /* Canonicalize LOC using equivalences from SET in addition to those
2108 in the cselib static table. It expects a VALUE-based expression,
2109 and it will only substitute VALUEs with other VALUEs or
2110 function-global equivalences, so that, if two addresses have base
2111 VALUEs that are locally or globally related in ways that
2112 memrefs_conflict_p cares about, they will both canonicalize to
2113 expressions that have the same base VALUE.
2115 The use of VALUEs as canonical base addresses enables the canonical
2116 RTXs to remain unchanged globally, if they resolve to a constant,
2117 or throughout a basic block otherwise, so that they can be cached
2118 and the cache needs not be invalidated when REGs, MEMs or such
2119 change. */
2121 static rtx
2122 vt_canonicalize_addr (dataflow_set *set, rtx oloc)
2124 HOST_WIDE_INT ofst = 0;
2125 enum machine_mode mode = GET_MODE (oloc);
2126 rtx loc = oloc;
2127 rtx x;
2128 bool retry = true;
2130 while (retry)
2132 while (GET_CODE (loc) == PLUS
2133 && GET_CODE (XEXP (loc, 1)) == CONST_INT)
2135 ofst += INTVAL (XEXP (loc, 1));
2136 loc = XEXP (loc, 0);
2139 /* Alignment operations can't normally be combined, so just
2140 canonicalize the base and we're done. We'll normally have
2141 only one stack alignment anyway. */
2142 if (GET_CODE (loc) == AND
2143 && GET_CODE (XEXP (loc, 1)) == CONST_INT
2144 && negative_power_of_two_p (INTVAL (XEXP (loc, 1))))
2146 x = vt_canonicalize_addr (set, XEXP (loc, 0));
2147 if (x != XEXP (loc, 0))
2148 loc = gen_rtx_AND (mode, x, XEXP (loc, 1));
2149 retry = false;
2152 if (GET_CODE (loc) == VALUE)
2154 if (set)
2155 loc = get_addr_from_local_cache (set, loc);
2156 else
2157 loc = get_addr_from_global_cache (loc);
2159 /* Consolidate plus_constants. */
2160 while (ofst && GET_CODE (loc) == PLUS
2161 && GET_CODE (XEXP (loc, 1)) == CONST_INT)
2163 ofst += INTVAL (XEXP (loc, 1));
2164 loc = XEXP (loc, 0);
2167 retry = false;
2169 else
2171 x = canon_rtx (loc);
2172 if (retry)
2173 retry = (x != loc);
2174 loc = x;
2178 /* Add OFST back in. */
2179 if (ofst)
2181 /* Don't build new RTL if we can help it. */
2182 if (GET_CODE (oloc) == PLUS
2183 && XEXP (oloc, 0) == loc
2184 && INTVAL (XEXP (oloc, 1)) == ofst)
2185 return oloc;
2187 loc = plus_constant (mode, loc, ofst);
2190 return loc;
2193 /* Return true iff there's a true dependence between MLOC and LOC.
2194 MADDR must be a canonicalized version of MLOC's address. */
2196 static inline bool
2197 vt_canon_true_dep (dataflow_set *set, rtx mloc, rtx maddr, rtx loc)
2199 if (GET_CODE (loc) != MEM)
2200 return false;
2202 rtx addr = vt_canonicalize_addr (set, XEXP (loc, 0));
2203 if (!canon_true_dependence (mloc, GET_MODE (mloc), maddr, loc, addr))
2204 return false;
2206 return true;
2209 /* Hold parameters for the hashtab traversal function
2210 drop_overlapping_mem_locs, see below. */
2212 struct overlapping_mems
2214 dataflow_set *set;
2215 rtx loc, addr;
2218 /* Remove all MEMs that overlap with COMS->LOC from the location list
2219 of a hash table entry for a value. COMS->ADDR must be a
2220 canonicalized form of COMS->LOC's address, and COMS->LOC must be
2221 canonicalized itself. */
2224 drop_overlapping_mem_locs (variable_def **slot, overlapping_mems *coms)
2226 dataflow_set *set = coms->set;
2227 rtx mloc = coms->loc, addr = coms->addr;
2228 variable var = *slot;
2230 if (var->onepart == ONEPART_VALUE)
2232 location_chain loc, *locp;
2233 bool changed = false;
2234 rtx cur_loc;
2236 gcc_assert (var->n_var_parts == 1);
2238 if (shared_var_p (var, set->vars))
2240 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
2241 if (vt_canon_true_dep (set, mloc, addr, loc->loc))
2242 break;
2244 if (!loc)
2245 return 1;
2247 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
2248 var = *slot;
2249 gcc_assert (var->n_var_parts == 1);
2252 if (VAR_LOC_1PAUX (var))
2253 cur_loc = VAR_LOC_FROM (var);
2254 else
2255 cur_loc = var->var_part[0].cur_loc;
2257 for (locp = &var->var_part[0].loc_chain, loc = *locp;
2258 loc; loc = *locp)
2260 if (!vt_canon_true_dep (set, mloc, addr, loc->loc))
2262 locp = &loc->next;
2263 continue;
2266 *locp = loc->next;
2267 /* If we have deleted the location which was last emitted
2268 we have to emit new location so add the variable to set
2269 of changed variables. */
2270 if (cur_loc == loc->loc)
2272 changed = true;
2273 var->var_part[0].cur_loc = NULL;
2274 if (VAR_LOC_1PAUX (var))
2275 VAR_LOC_FROM (var) = NULL;
2277 pool_free (loc_chain_pool, loc);
2280 if (!var->var_part[0].loc_chain)
2282 var->n_var_parts--;
2283 changed = true;
2285 if (changed)
2286 variable_was_changed (var, set);
2289 return 1;
2292 /* Remove from SET all VALUE bindings to MEMs that overlap with LOC. */
2294 static void
2295 clobber_overlapping_mems (dataflow_set *set, rtx loc)
2297 struct overlapping_mems coms;
2299 gcc_checking_assert (GET_CODE (loc) == MEM);
2301 coms.set = set;
2302 coms.loc = canon_rtx (loc);
2303 coms.addr = vt_canonicalize_addr (set, XEXP (loc, 0));
2305 set->traversed_vars = set->vars;
2306 shared_hash_htab (set->vars)
2307 ->traverse <overlapping_mems*, drop_overlapping_mem_locs> (&coms);
2308 set->traversed_vars = NULL;
2311 /* Set the location of DV, OFFSET as the MEM LOC. */
2313 static void
2314 var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
2315 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
2316 enum insert_option iopt)
2318 if (dv_is_decl_p (dv))
2319 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
2321 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
2324 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
2325 SET to LOC.
2326 Adjust the address first if it is stack pointer based. */
2328 static void
2329 var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
2330 rtx set_src)
2332 tree decl = MEM_EXPR (loc);
2333 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2335 var_mem_decl_set (set, loc, initialized,
2336 dv_from_decl (decl), offset, set_src, INSERT);
2339 /* Delete and set the location part of variable MEM_EXPR (LOC) in
2340 dataflow set SET to LOC. If MODIFY is true, any other live copies
2341 of the same variable part are also deleted from the dataflow set,
2342 otherwise the variable part is assumed to be copied from another
2343 location holding the same part.
2344 Adjust the address first if it is stack pointer based. */
2346 static void
2347 var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify,
2348 enum var_init_status initialized, rtx set_src)
2350 tree decl = MEM_EXPR (loc);
2351 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2353 clobber_overlapping_mems (set, loc);
2354 decl = var_debug_decl (decl);
2356 if (initialized == VAR_INIT_STATUS_UNKNOWN)
2357 initialized = get_init_value (set, loc, dv_from_decl (decl));
2359 if (modify)
2360 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src);
2361 var_mem_set (set, loc, initialized, set_src);
2364 /* Delete the location part LOC from dataflow set SET. If CLOBBER is
2365 true, also delete any other live copies of the same variable part.
2366 Adjust the address first if it is stack pointer based. */
2368 static void
2369 var_mem_delete (dataflow_set *set, rtx loc, bool clobber)
2371 tree decl = MEM_EXPR (loc);
2372 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2374 clobber_overlapping_mems (set, loc);
2375 decl = var_debug_decl (decl);
2376 if (clobber)
2377 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
2378 delete_variable_part (set, loc, dv_from_decl (decl), offset);
2381 /* Return true if LOC should not be expanded for location expressions,
2382 or used in them. */
2384 static inline bool
2385 unsuitable_loc (rtx loc)
2387 switch (GET_CODE (loc))
2389 case PC:
2390 case SCRATCH:
2391 case CC0:
2392 case ASM_INPUT:
2393 case ASM_OPERANDS:
2394 return true;
2396 default:
2397 return false;
2401 /* Bind VAL to LOC in SET. If MODIFIED, detach LOC from any values
2402 bound to it. */
2404 static inline void
2405 val_bind (dataflow_set *set, rtx val, rtx loc, bool modified)
2407 if (REG_P (loc))
2409 if (modified)
2410 var_regno_delete (set, REGNO (loc));
2411 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2412 dv_from_value (val), 0, NULL_RTX, INSERT);
2414 else if (MEM_P (loc))
2416 struct elt_loc_list *l = CSELIB_VAL_PTR (val)->locs;
2418 if (modified)
2419 clobber_overlapping_mems (set, loc);
2421 if (l && GET_CODE (l->loc) == VALUE)
2422 l = canonical_cselib_val (CSELIB_VAL_PTR (l->loc))->locs;
2424 /* If this MEM is a global constant, we don't need it in the
2425 dynamic tables. ??? We should test this before emitting the
2426 micro-op in the first place. */
2427 while (l)
2428 if (GET_CODE (l->loc) == MEM && XEXP (l->loc, 0) == XEXP (loc, 0))
2429 break;
2430 else
2431 l = l->next;
2433 if (!l)
2434 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2435 dv_from_value (val), 0, NULL_RTX, INSERT);
2437 else
2439 /* Other kinds of equivalences are necessarily static, at least
2440 so long as we do not perform substitutions while merging
2441 expressions. */
2442 gcc_unreachable ();
2443 set_variable_part (set, loc, dv_from_value (val), 0,
2444 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2448 /* Bind a value to a location it was just stored in. If MODIFIED
2449 holds, assume the location was modified, detaching it from any
2450 values bound to it. */
2452 static void
2453 val_store (dataflow_set *set, rtx val, rtx loc, rtx_insn *insn,
2454 bool modified)
2456 cselib_val *v = CSELIB_VAL_PTR (val);
2458 gcc_assert (cselib_preserved_value_p (v));
2460 if (dump_file)
2462 fprintf (dump_file, "%i: ", insn ? INSN_UID (insn) : 0);
2463 print_inline_rtx (dump_file, loc, 0);
2464 fprintf (dump_file, " evaluates to ");
2465 print_inline_rtx (dump_file, val, 0);
2466 if (v->locs)
2468 struct elt_loc_list *l;
2469 for (l = v->locs; l; l = l->next)
2471 fprintf (dump_file, "\n%i: ", INSN_UID (l->setting_insn));
2472 print_inline_rtx (dump_file, l->loc, 0);
2475 fprintf (dump_file, "\n");
2478 gcc_checking_assert (!unsuitable_loc (loc));
2480 val_bind (set, val, loc, modified);
2483 /* Clear (canonical address) slots that reference X. */
2485 bool
2486 local_get_addr_clear_given_value (rtx const &, rtx *slot, rtx x)
2488 if (vt_get_canonicalize_base (*slot) == x)
2489 *slot = NULL;
2490 return true;
2493 /* Reset this node, detaching all its equivalences. Return the slot
2494 in the variable hash table that holds dv, if there is one. */
2496 static void
2497 val_reset (dataflow_set *set, decl_or_value dv)
2499 variable var = shared_hash_find (set->vars, dv) ;
2500 location_chain node;
2501 rtx cval;
2503 if (!var || !var->n_var_parts)
2504 return;
2506 gcc_assert (var->n_var_parts == 1);
2508 if (var->onepart == ONEPART_VALUE)
2510 rtx x = dv_as_value (dv);
2512 /* Relationships in the global cache don't change, so reset the
2513 local cache entry only. */
2514 rtx *slot = local_get_addr_cache->get (x);
2515 if (slot)
2517 /* If the value resolved back to itself, odds are that other
2518 values may have cached it too. These entries now refer
2519 to the old X, so detach them too. Entries that used the
2520 old X but resolved to something else remain ok as long as
2521 that something else isn't also reset. */
2522 if (*slot == x)
2523 local_get_addr_cache
2524 ->traverse<rtx, local_get_addr_clear_given_value> (x);
2525 *slot = NULL;
2529 cval = NULL;
2530 for (node = var->var_part[0].loc_chain; node; node = node->next)
2531 if (GET_CODE (node->loc) == VALUE
2532 && canon_value_cmp (node->loc, cval))
2533 cval = node->loc;
2535 for (node = var->var_part[0].loc_chain; node; node = node->next)
2536 if (GET_CODE (node->loc) == VALUE && cval != node->loc)
2538 /* Redirect the equivalence link to the new canonical
2539 value, or simply remove it if it would point at
2540 itself. */
2541 if (cval)
2542 set_variable_part (set, cval, dv_from_value (node->loc),
2543 0, node->init, node->set_src, NO_INSERT);
2544 delete_variable_part (set, dv_as_value (dv),
2545 dv_from_value (node->loc), 0);
2548 if (cval)
2550 decl_or_value cdv = dv_from_value (cval);
2552 /* Keep the remaining values connected, accummulating links
2553 in the canonical value. */
2554 for (node = var->var_part[0].loc_chain; node; node = node->next)
2556 if (node->loc == cval)
2557 continue;
2558 else if (GET_CODE (node->loc) == REG)
2559 var_reg_decl_set (set, node->loc, node->init, cdv, 0,
2560 node->set_src, NO_INSERT);
2561 else if (GET_CODE (node->loc) == MEM)
2562 var_mem_decl_set (set, node->loc, node->init, cdv, 0,
2563 node->set_src, NO_INSERT);
2564 else
2565 set_variable_part (set, node->loc, cdv, 0,
2566 node->init, node->set_src, NO_INSERT);
2570 /* We remove this last, to make sure that the canonical value is not
2571 removed to the point of requiring reinsertion. */
2572 if (cval)
2573 delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0);
2575 clobber_variable_part (set, NULL, dv, 0, NULL);
2578 /* Find the values in a given location and map the val to another
2579 value, if it is unique, or add the location as one holding the
2580 value. */
2582 static void
2583 val_resolve (dataflow_set *set, rtx val, rtx loc, rtx_insn *insn)
2585 decl_or_value dv = dv_from_value (val);
2587 if (dump_file && (dump_flags & TDF_DETAILS))
2589 if (insn)
2590 fprintf (dump_file, "%i: ", INSN_UID (insn));
2591 else
2592 fprintf (dump_file, "head: ");
2593 print_inline_rtx (dump_file, val, 0);
2594 fputs (" is at ", dump_file);
2595 print_inline_rtx (dump_file, loc, 0);
2596 fputc ('\n', dump_file);
2599 val_reset (set, dv);
2601 gcc_checking_assert (!unsuitable_loc (loc));
2603 if (REG_P (loc))
2605 attrs node, found = NULL;
2607 for (node = set->regs[REGNO (loc)]; node; node = node->next)
2608 if (dv_is_value_p (node->dv)
2609 && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc))
2611 found = node;
2613 /* Map incoming equivalences. ??? Wouldn't it be nice if
2614 we just started sharing the location lists? Maybe a
2615 circular list ending at the value itself or some
2616 such. */
2617 set_variable_part (set, dv_as_value (node->dv),
2618 dv_from_value (val), node->offset,
2619 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2620 set_variable_part (set, val, node->dv, node->offset,
2621 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2624 /* If we didn't find any equivalence, we need to remember that
2625 this value is held in the named register. */
2626 if (found)
2627 return;
2629 /* ??? Attempt to find and merge equivalent MEMs or other
2630 expressions too. */
2632 val_bind (set, val, loc, false);
2635 /* Initialize dataflow set SET to be empty.
2636 VARS_SIZE is the initial size of hash table VARS. */
2638 static void
2639 dataflow_set_init (dataflow_set *set)
2641 init_attrs_list_set (set->regs);
2642 set->vars = shared_hash_copy (empty_shared_hash);
2643 set->stack_adjust = 0;
2644 set->traversed_vars = NULL;
2647 /* Delete the contents of dataflow set SET. */
2649 static void
2650 dataflow_set_clear (dataflow_set *set)
2652 int i;
2654 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2655 attrs_list_clear (&set->regs[i]);
2657 shared_hash_destroy (set->vars);
2658 set->vars = shared_hash_copy (empty_shared_hash);
2661 /* Copy the contents of dataflow set SRC to DST. */
2663 static void
2664 dataflow_set_copy (dataflow_set *dst, dataflow_set *src)
2666 int i;
2668 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2669 attrs_list_copy (&dst->regs[i], src->regs[i]);
2671 shared_hash_destroy (dst->vars);
2672 dst->vars = shared_hash_copy (src->vars);
2673 dst->stack_adjust = src->stack_adjust;
2676 /* Information for merging lists of locations for a given offset of variable.
2678 struct variable_union_info
2680 /* Node of the location chain. */
2681 location_chain lc;
2683 /* The sum of positions in the input chains. */
2684 int pos;
2686 /* The position in the chain of DST dataflow set. */
2687 int pos_dst;
2690 /* Buffer for location list sorting and its allocated size. */
2691 static struct variable_union_info *vui_vec;
2692 static int vui_allocated;
2694 /* Compare function for qsort, order the structures by POS element. */
2696 static int
2697 variable_union_info_cmp_pos (const void *n1, const void *n2)
2699 const struct variable_union_info *const i1 =
2700 (const struct variable_union_info *) n1;
2701 const struct variable_union_info *const i2 =
2702 ( const struct variable_union_info *) n2;
2704 if (i1->pos != i2->pos)
2705 return i1->pos - i2->pos;
2707 return (i1->pos_dst - i2->pos_dst);
2710 /* Compute union of location parts of variable *SLOT and the same variable
2711 from hash table DATA. Compute "sorted" union of the location chains
2712 for common offsets, i.e. the locations of a variable part are sorted by
2713 a priority where the priority is the sum of the positions in the 2 chains
2714 (if a location is only in one list the position in the second list is
2715 defined to be larger than the length of the chains).
2716 When we are updating the location parts the newest location is in the
2717 beginning of the chain, so when we do the described "sorted" union
2718 we keep the newest locations in the beginning. */
2720 static int
2721 variable_union (variable src, dataflow_set *set)
2723 variable dst;
2724 variable_def **dstp;
2725 int i, j, k;
2727 dstp = shared_hash_find_slot (set->vars, src->dv);
2728 if (!dstp || !*dstp)
2730 src->refcount++;
2732 dst_can_be_shared = false;
2733 if (!dstp)
2734 dstp = shared_hash_find_slot_unshare (&set->vars, src->dv, INSERT);
2736 *dstp = src;
2738 /* Continue traversing the hash table. */
2739 return 1;
2741 else
2742 dst = *dstp;
2744 gcc_assert (src->n_var_parts);
2745 gcc_checking_assert (src->onepart == dst->onepart);
2747 /* We can combine one-part variables very efficiently, because their
2748 entries are in canonical order. */
2749 if (src->onepart)
2751 location_chain *nodep, dnode, snode;
2753 gcc_assert (src->n_var_parts == 1
2754 && dst->n_var_parts == 1);
2756 snode = src->var_part[0].loc_chain;
2757 gcc_assert (snode);
2759 restart_onepart_unshared:
2760 nodep = &dst->var_part[0].loc_chain;
2761 dnode = *nodep;
2762 gcc_assert (dnode);
2764 while (snode)
2766 int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1;
2768 if (r > 0)
2770 location_chain nnode;
2772 if (shared_var_p (dst, set->vars))
2774 dstp = unshare_variable (set, dstp, dst,
2775 VAR_INIT_STATUS_INITIALIZED);
2776 dst = *dstp;
2777 goto restart_onepart_unshared;
2780 *nodep = nnode = (location_chain) pool_alloc (loc_chain_pool);
2781 nnode->loc = snode->loc;
2782 nnode->init = snode->init;
2783 if (!snode->set_src || MEM_P (snode->set_src))
2784 nnode->set_src = NULL;
2785 else
2786 nnode->set_src = snode->set_src;
2787 nnode->next = dnode;
2788 dnode = nnode;
2790 else if (r == 0)
2791 gcc_checking_assert (rtx_equal_p (dnode->loc, snode->loc));
2793 if (r >= 0)
2794 snode = snode->next;
2796 nodep = &dnode->next;
2797 dnode = *nodep;
2800 return 1;
2803 gcc_checking_assert (!src->onepart);
2805 /* Count the number of location parts, result is K. */
2806 for (i = 0, j = 0, k = 0;
2807 i < src->n_var_parts && j < dst->n_var_parts; k++)
2809 if (VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2811 i++;
2812 j++;
2814 else if (VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
2815 i++;
2816 else
2817 j++;
2819 k += src->n_var_parts - i;
2820 k += dst->n_var_parts - j;
2822 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
2823 thus there are at most MAX_VAR_PARTS different offsets. */
2824 gcc_checking_assert (dst->onepart ? k == 1 : k <= MAX_VAR_PARTS);
2826 if (dst->n_var_parts != k && shared_var_p (dst, set->vars))
2828 dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN);
2829 dst = *dstp;
2832 i = src->n_var_parts - 1;
2833 j = dst->n_var_parts - 1;
2834 dst->n_var_parts = k;
2836 for (k--; k >= 0; k--)
2838 location_chain node, node2;
2840 if (i >= 0 && j >= 0
2841 && VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2843 /* Compute the "sorted" union of the chains, i.e. the locations which
2844 are in both chains go first, they are sorted by the sum of
2845 positions in the chains. */
2846 int dst_l, src_l;
2847 int ii, jj, n;
2848 struct variable_union_info *vui;
2850 /* If DST is shared compare the location chains.
2851 If they are different we will modify the chain in DST with
2852 high probability so make a copy of DST. */
2853 if (shared_var_p (dst, set->vars))
2855 for (node = src->var_part[i].loc_chain,
2856 node2 = dst->var_part[j].loc_chain; node && node2;
2857 node = node->next, node2 = node2->next)
2859 if (!((REG_P (node2->loc)
2860 && REG_P (node->loc)
2861 && REGNO (node2->loc) == REGNO (node->loc))
2862 || rtx_equal_p (node2->loc, node->loc)))
2864 if (node2->init < node->init)
2865 node2->init = node->init;
2866 break;
2869 if (node || node2)
2871 dstp = unshare_variable (set, dstp, dst,
2872 VAR_INIT_STATUS_UNKNOWN);
2873 dst = (variable)*dstp;
2877 src_l = 0;
2878 for (node = src->var_part[i].loc_chain; node; node = node->next)
2879 src_l++;
2880 dst_l = 0;
2881 for (node = dst->var_part[j].loc_chain; node; node = node->next)
2882 dst_l++;
2884 if (dst_l == 1)
2886 /* The most common case, much simpler, no qsort is needed. */
2887 location_chain dstnode = dst->var_part[j].loc_chain;
2888 dst->var_part[k].loc_chain = dstnode;
2889 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
2890 node2 = dstnode;
2891 for (node = src->var_part[i].loc_chain; node; node = node->next)
2892 if (!((REG_P (dstnode->loc)
2893 && REG_P (node->loc)
2894 && REGNO (dstnode->loc) == REGNO (node->loc))
2895 || rtx_equal_p (dstnode->loc, node->loc)))
2897 location_chain new_node;
2899 /* Copy the location from SRC. */
2900 new_node = (location_chain) pool_alloc (loc_chain_pool);
2901 new_node->loc = node->loc;
2902 new_node->init = node->init;
2903 if (!node->set_src || MEM_P (node->set_src))
2904 new_node->set_src = NULL;
2905 else
2906 new_node->set_src = node->set_src;
2907 node2->next = new_node;
2908 node2 = new_node;
2910 node2->next = NULL;
2912 else
2914 if (src_l + dst_l > vui_allocated)
2916 vui_allocated = MAX (vui_allocated * 2, src_l + dst_l);
2917 vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec,
2918 vui_allocated);
2920 vui = vui_vec;
2922 /* Fill in the locations from DST. */
2923 for (node = dst->var_part[j].loc_chain, jj = 0; node;
2924 node = node->next, jj++)
2926 vui[jj].lc = node;
2927 vui[jj].pos_dst = jj;
2929 /* Pos plus value larger than a sum of 2 valid positions. */
2930 vui[jj].pos = jj + src_l + dst_l;
2933 /* Fill in the locations from SRC. */
2934 n = dst_l;
2935 for (node = src->var_part[i].loc_chain, ii = 0; node;
2936 node = node->next, ii++)
2938 /* Find location from NODE. */
2939 for (jj = 0; jj < dst_l; jj++)
2941 if ((REG_P (vui[jj].lc->loc)
2942 && REG_P (node->loc)
2943 && REGNO (vui[jj].lc->loc) == REGNO (node->loc))
2944 || rtx_equal_p (vui[jj].lc->loc, node->loc))
2946 vui[jj].pos = jj + ii;
2947 break;
2950 if (jj >= dst_l) /* The location has not been found. */
2952 location_chain new_node;
2954 /* Copy the location from SRC. */
2955 new_node = (location_chain) pool_alloc (loc_chain_pool);
2956 new_node->loc = node->loc;
2957 new_node->init = node->init;
2958 if (!node->set_src || MEM_P (node->set_src))
2959 new_node->set_src = NULL;
2960 else
2961 new_node->set_src = node->set_src;
2962 vui[n].lc = new_node;
2963 vui[n].pos_dst = src_l + dst_l;
2964 vui[n].pos = ii + src_l + dst_l;
2965 n++;
2969 if (dst_l == 2)
2971 /* Special case still very common case. For dst_l == 2
2972 all entries dst_l ... n-1 are sorted, with for i >= dst_l
2973 vui[i].pos == i + src_l + dst_l. */
2974 if (vui[0].pos > vui[1].pos)
2976 /* Order should be 1, 0, 2... */
2977 dst->var_part[k].loc_chain = vui[1].lc;
2978 vui[1].lc->next = vui[0].lc;
2979 if (n >= 3)
2981 vui[0].lc->next = vui[2].lc;
2982 vui[n - 1].lc->next = NULL;
2984 else
2985 vui[0].lc->next = NULL;
2986 ii = 3;
2988 else
2990 dst->var_part[k].loc_chain = vui[0].lc;
2991 if (n >= 3 && vui[2].pos < vui[1].pos)
2993 /* Order should be 0, 2, 1, 3... */
2994 vui[0].lc->next = vui[2].lc;
2995 vui[2].lc->next = vui[1].lc;
2996 if (n >= 4)
2998 vui[1].lc->next = vui[3].lc;
2999 vui[n - 1].lc->next = NULL;
3001 else
3002 vui[1].lc->next = NULL;
3003 ii = 4;
3005 else
3007 /* Order should be 0, 1, 2... */
3008 ii = 1;
3009 vui[n - 1].lc->next = NULL;
3012 for (; ii < n; ii++)
3013 vui[ii - 1].lc->next = vui[ii].lc;
3015 else
3017 qsort (vui, n, sizeof (struct variable_union_info),
3018 variable_union_info_cmp_pos);
3020 /* Reconnect the nodes in sorted order. */
3021 for (ii = 1; ii < n; ii++)
3022 vui[ii - 1].lc->next = vui[ii].lc;
3023 vui[n - 1].lc->next = NULL;
3024 dst->var_part[k].loc_chain = vui[0].lc;
3027 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
3029 i--;
3030 j--;
3032 else if ((i >= 0 && j >= 0
3033 && VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
3034 || i < 0)
3036 dst->var_part[k] = dst->var_part[j];
3037 j--;
3039 else if ((i >= 0 && j >= 0
3040 && VAR_PART_OFFSET (src, i) > VAR_PART_OFFSET (dst, j))
3041 || j < 0)
3043 location_chain *nextp;
3045 /* Copy the chain from SRC. */
3046 nextp = &dst->var_part[k].loc_chain;
3047 for (node = src->var_part[i].loc_chain; node; node = node->next)
3049 location_chain new_lc;
3051 new_lc = (location_chain) pool_alloc (loc_chain_pool);
3052 new_lc->next = NULL;
3053 new_lc->init = node->init;
3054 if (!node->set_src || MEM_P (node->set_src))
3055 new_lc->set_src = NULL;
3056 else
3057 new_lc->set_src = node->set_src;
3058 new_lc->loc = node->loc;
3060 *nextp = new_lc;
3061 nextp = &new_lc->next;
3064 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (src, i);
3065 i--;
3067 dst->var_part[k].cur_loc = NULL;
3070 if (flag_var_tracking_uninit)
3071 for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++)
3073 location_chain node, node2;
3074 for (node = src->var_part[i].loc_chain; node; node = node->next)
3075 for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next)
3076 if (rtx_equal_p (node->loc, node2->loc))
3078 if (node->init > node2->init)
3079 node2->init = node->init;
3083 /* Continue traversing the hash table. */
3084 return 1;
3087 /* Compute union of dataflow sets SRC and DST and store it to DST. */
3089 static void
3090 dataflow_set_union (dataflow_set *dst, dataflow_set *src)
3092 int i;
3094 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3095 attrs_list_union (&dst->regs[i], src->regs[i]);
3097 if (dst->vars == empty_shared_hash)
3099 shared_hash_destroy (dst->vars);
3100 dst->vars = shared_hash_copy (src->vars);
3102 else
3104 variable_iterator_type hi;
3105 variable var;
3107 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (src->vars),
3108 var, variable, hi)
3109 variable_union (var, dst);
3113 /* Whether the value is currently being expanded. */
3114 #define VALUE_RECURSED_INTO(x) \
3115 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
3117 /* Whether no expansion was found, saving useless lookups.
3118 It must only be set when VALUE_CHANGED is clear. */
3119 #define NO_LOC_P(x) \
3120 (RTL_FLAG_CHECK2 ("NO_LOC_P", (x), VALUE, DEBUG_EXPR)->return_val)
3122 /* Whether cur_loc in the value needs to be (re)computed. */
3123 #define VALUE_CHANGED(x) \
3124 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
3125 /* Whether cur_loc in the decl needs to be (re)computed. */
3126 #define DECL_CHANGED(x) TREE_VISITED (x)
3128 /* Record (if NEWV) that DV needs to have its cur_loc recomputed. For
3129 user DECLs, this means they're in changed_variables. Values and
3130 debug exprs may be left with this flag set if no user variable
3131 requires them to be evaluated. */
3133 static inline void
3134 set_dv_changed (decl_or_value dv, bool newv)
3136 switch (dv_onepart_p (dv))
3138 case ONEPART_VALUE:
3139 if (newv)
3140 NO_LOC_P (dv_as_value (dv)) = false;
3141 VALUE_CHANGED (dv_as_value (dv)) = newv;
3142 break;
3144 case ONEPART_DEXPR:
3145 if (newv)
3146 NO_LOC_P (DECL_RTL_KNOWN_SET (dv_as_decl (dv))) = false;
3147 /* Fall through... */
3149 default:
3150 DECL_CHANGED (dv_as_decl (dv)) = newv;
3151 break;
3155 /* Return true if DV needs to have its cur_loc recomputed. */
3157 static inline bool
3158 dv_changed_p (decl_or_value dv)
3160 return (dv_is_value_p (dv)
3161 ? VALUE_CHANGED (dv_as_value (dv))
3162 : DECL_CHANGED (dv_as_decl (dv)));
3165 /* Return a location list node whose loc is rtx_equal to LOC, in the
3166 location list of a one-part variable or value VAR, or in that of
3167 any values recursively mentioned in the location lists. VARS must
3168 be in star-canonical form. */
3170 static location_chain
3171 find_loc_in_1pdv (rtx loc, variable var, variable_table_type *vars)
3173 location_chain node;
3174 enum rtx_code loc_code;
3176 if (!var)
3177 return NULL;
3179 gcc_checking_assert (var->onepart);
3181 if (!var->n_var_parts)
3182 return NULL;
3184 gcc_checking_assert (loc != dv_as_opaque (var->dv));
3186 loc_code = GET_CODE (loc);
3187 for (node = var->var_part[0].loc_chain; node; node = node->next)
3189 decl_or_value dv;
3190 variable rvar;
3192 if (GET_CODE (node->loc) != loc_code)
3194 if (GET_CODE (node->loc) != VALUE)
3195 continue;
3197 else if (loc == node->loc)
3198 return node;
3199 else if (loc_code != VALUE)
3201 if (rtx_equal_p (loc, node->loc))
3202 return node;
3203 continue;
3206 /* Since we're in star-canonical form, we don't need to visit
3207 non-canonical nodes: one-part variables and non-canonical
3208 values would only point back to the canonical node. */
3209 if (dv_is_value_p (var->dv)
3210 && !canon_value_cmp (node->loc, dv_as_value (var->dv)))
3212 /* Skip all subsequent VALUEs. */
3213 while (node->next && GET_CODE (node->next->loc) == VALUE)
3215 node = node->next;
3216 gcc_checking_assert (!canon_value_cmp (node->loc,
3217 dv_as_value (var->dv)));
3218 if (loc == node->loc)
3219 return node;
3221 continue;
3224 gcc_checking_assert (node == var->var_part[0].loc_chain);
3225 gcc_checking_assert (!node->next);
3227 dv = dv_from_value (node->loc);
3228 rvar = vars->find_with_hash (dv, dv_htab_hash (dv));
3229 return find_loc_in_1pdv (loc, rvar, vars);
3232 /* ??? Gotta look in cselib_val locations too. */
3234 return NULL;
3237 /* Hash table iteration argument passed to variable_merge. */
3238 struct dfset_merge
3240 /* The set in which the merge is to be inserted. */
3241 dataflow_set *dst;
3242 /* The set that we're iterating in. */
3243 dataflow_set *cur;
3244 /* The set that may contain the other dv we are to merge with. */
3245 dataflow_set *src;
3246 /* Number of onepart dvs in src. */
3247 int src_onepart_cnt;
3250 /* Insert LOC in *DNODE, if it's not there yet. The list must be in
3251 loc_cmp order, and it is maintained as such. */
3253 static void
3254 insert_into_intersection (location_chain *nodep, rtx loc,
3255 enum var_init_status status)
3257 location_chain node;
3258 int r;
3260 for (node = *nodep; node; nodep = &node->next, node = *nodep)
3261 if ((r = loc_cmp (node->loc, loc)) == 0)
3263 node->init = MIN (node->init, status);
3264 return;
3266 else if (r > 0)
3267 break;
3269 node = (location_chain) pool_alloc (loc_chain_pool);
3271 node->loc = loc;
3272 node->set_src = NULL;
3273 node->init = status;
3274 node->next = *nodep;
3275 *nodep = node;
3278 /* Insert in DEST the intersection of the locations present in both
3279 S1NODE and S2VAR, directly or indirectly. S1NODE is from a
3280 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
3281 DSM->dst. */
3283 static void
3284 intersect_loc_chains (rtx val, location_chain *dest, struct dfset_merge *dsm,
3285 location_chain s1node, variable s2var)
3287 dataflow_set *s1set = dsm->cur;
3288 dataflow_set *s2set = dsm->src;
3289 location_chain found;
3291 if (s2var)
3293 location_chain s2node;
3295 gcc_checking_assert (s2var->onepart);
3297 if (s2var->n_var_parts)
3299 s2node = s2var->var_part[0].loc_chain;
3301 for (; s1node && s2node;
3302 s1node = s1node->next, s2node = s2node->next)
3303 if (s1node->loc != s2node->loc)
3304 break;
3305 else if (s1node->loc == val)
3306 continue;
3307 else
3308 insert_into_intersection (dest, s1node->loc,
3309 MIN (s1node->init, s2node->init));
3313 for (; s1node; s1node = s1node->next)
3315 if (s1node->loc == val)
3316 continue;
3318 if ((found = find_loc_in_1pdv (s1node->loc, s2var,
3319 shared_hash_htab (s2set->vars))))
3321 insert_into_intersection (dest, s1node->loc,
3322 MIN (s1node->init, found->init));
3323 continue;
3326 if (GET_CODE (s1node->loc) == VALUE
3327 && !VALUE_RECURSED_INTO (s1node->loc))
3329 decl_or_value dv = dv_from_value (s1node->loc);
3330 variable svar = shared_hash_find (s1set->vars, dv);
3331 if (svar)
3333 if (svar->n_var_parts == 1)
3335 VALUE_RECURSED_INTO (s1node->loc) = true;
3336 intersect_loc_chains (val, dest, dsm,
3337 svar->var_part[0].loc_chain,
3338 s2var);
3339 VALUE_RECURSED_INTO (s1node->loc) = false;
3344 /* ??? gotta look in cselib_val locations too. */
3346 /* ??? if the location is equivalent to any location in src,
3347 searched recursively
3349 add to dst the values needed to represent the equivalence
3351 telling whether locations S is equivalent to another dv's
3352 location list:
3354 for each location D in the list
3356 if S and D satisfy rtx_equal_p, then it is present
3358 else if D is a value, recurse without cycles
3360 else if S and D have the same CODE and MODE
3362 for each operand oS and the corresponding oD
3364 if oS and oD are not equivalent, then S an D are not equivalent
3366 else if they are RTX vectors
3368 if any vector oS element is not equivalent to its respective oD,
3369 then S and D are not equivalent
3377 /* Return -1 if X should be before Y in a location list for a 1-part
3378 variable, 1 if Y should be before X, and 0 if they're equivalent
3379 and should not appear in the list. */
3381 static int
3382 loc_cmp (rtx x, rtx y)
3384 int i, j, r;
3385 RTX_CODE code = GET_CODE (x);
3386 const char *fmt;
3388 if (x == y)
3389 return 0;
3391 if (REG_P (x))
3393 if (!REG_P (y))
3394 return -1;
3395 gcc_assert (GET_MODE (x) == GET_MODE (y));
3396 if (REGNO (x) == REGNO (y))
3397 return 0;
3398 else if (REGNO (x) < REGNO (y))
3399 return -1;
3400 else
3401 return 1;
3404 if (REG_P (y))
3405 return 1;
3407 if (MEM_P (x))
3409 if (!MEM_P (y))
3410 return -1;
3411 gcc_assert (GET_MODE (x) == GET_MODE (y));
3412 return loc_cmp (XEXP (x, 0), XEXP (y, 0));
3415 if (MEM_P (y))
3416 return 1;
3418 if (GET_CODE (x) == VALUE)
3420 if (GET_CODE (y) != VALUE)
3421 return -1;
3422 /* Don't assert the modes are the same, that is true only
3423 when not recursing. (subreg:QI (value:SI 1:1) 0)
3424 and (subreg:QI (value:DI 2:2) 0) can be compared,
3425 even when the modes are different. */
3426 if (canon_value_cmp (x, y))
3427 return -1;
3428 else
3429 return 1;
3432 if (GET_CODE (y) == VALUE)
3433 return 1;
3435 /* Entry value is the least preferable kind of expression. */
3436 if (GET_CODE (x) == ENTRY_VALUE)
3438 if (GET_CODE (y) != ENTRY_VALUE)
3439 return 1;
3440 gcc_assert (GET_MODE (x) == GET_MODE (y));
3441 return loc_cmp (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
3444 if (GET_CODE (y) == ENTRY_VALUE)
3445 return -1;
3447 if (GET_CODE (x) == GET_CODE (y))
3448 /* Compare operands below. */;
3449 else if (GET_CODE (x) < GET_CODE (y))
3450 return -1;
3451 else
3452 return 1;
3454 gcc_assert (GET_MODE (x) == GET_MODE (y));
3456 if (GET_CODE (x) == DEBUG_EXPR)
3458 if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3459 < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)))
3460 return -1;
3461 gcc_checking_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3462 > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)));
3463 return 1;
3466 fmt = GET_RTX_FORMAT (code);
3467 for (i = 0; i < GET_RTX_LENGTH (code); i++)
3468 switch (fmt[i])
3470 case 'w':
3471 if (XWINT (x, i) == XWINT (y, i))
3472 break;
3473 else if (XWINT (x, i) < XWINT (y, i))
3474 return -1;
3475 else
3476 return 1;
3478 case 'n':
3479 case 'i':
3480 if (XINT (x, i) == XINT (y, i))
3481 break;
3482 else if (XINT (x, i) < XINT (y, i))
3483 return -1;
3484 else
3485 return 1;
3487 case 'V':
3488 case 'E':
3489 /* Compare the vector length first. */
3490 if (XVECLEN (x, i) == XVECLEN (y, i))
3491 /* Compare the vectors elements. */;
3492 else if (XVECLEN (x, i) < XVECLEN (y, i))
3493 return -1;
3494 else
3495 return 1;
3497 for (j = 0; j < XVECLEN (x, i); j++)
3498 if ((r = loc_cmp (XVECEXP (x, i, j),
3499 XVECEXP (y, i, j))))
3500 return r;
3501 break;
3503 case 'e':
3504 if ((r = loc_cmp (XEXP (x, i), XEXP (y, i))))
3505 return r;
3506 break;
3508 case 'S':
3509 case 's':
3510 if (XSTR (x, i) == XSTR (y, i))
3511 break;
3512 if (!XSTR (x, i))
3513 return -1;
3514 if (!XSTR (y, i))
3515 return 1;
3516 if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0)
3517 break;
3518 else if (r < 0)
3519 return -1;
3520 else
3521 return 1;
3523 case 'u':
3524 /* These are just backpointers, so they don't matter. */
3525 break;
3527 case '0':
3528 case 't':
3529 break;
3531 /* It is believed that rtx's at this level will never
3532 contain anything but integers and other rtx's,
3533 except for within LABEL_REFs and SYMBOL_REFs. */
3534 default:
3535 gcc_unreachable ();
3537 if (CONST_WIDE_INT_P (x))
3539 /* Compare the vector length first. */
3540 if (CONST_WIDE_INT_NUNITS (x) >= CONST_WIDE_INT_NUNITS (y))
3541 return 1;
3542 else if (CONST_WIDE_INT_NUNITS (x) < CONST_WIDE_INT_NUNITS (y))
3543 return -1;
3545 /* Compare the vectors elements. */;
3546 for (j = CONST_WIDE_INT_NUNITS (x) - 1; j >= 0 ; j--)
3548 if (CONST_WIDE_INT_ELT (x, j) < CONST_WIDE_INT_ELT (y, j))
3549 return -1;
3550 if (CONST_WIDE_INT_ELT (x, j) > CONST_WIDE_INT_ELT (y, j))
3551 return 1;
3555 return 0;
3558 #if ENABLE_CHECKING
3559 /* Check the order of entries in one-part variables. */
3562 canonicalize_loc_order_check (variable_def **slot,
3563 dataflow_set *data ATTRIBUTE_UNUSED)
3565 variable var = *slot;
3566 location_chain node, next;
3568 #ifdef ENABLE_RTL_CHECKING
3569 int i;
3570 for (i = 0; i < var->n_var_parts; i++)
3571 gcc_assert (var->var_part[0].cur_loc == NULL);
3572 gcc_assert (!var->in_changed_variables);
3573 #endif
3575 if (!var->onepart)
3576 return 1;
3578 gcc_assert (var->n_var_parts == 1);
3579 node = var->var_part[0].loc_chain;
3580 gcc_assert (node);
3582 while ((next = node->next))
3584 gcc_assert (loc_cmp (node->loc, next->loc) < 0);
3585 node = next;
3588 return 1;
3590 #endif
3592 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
3593 more likely to be chosen as canonical for an equivalence set.
3594 Ensure less likely values can reach more likely neighbors, making
3595 the connections bidirectional. */
3598 canonicalize_values_mark (variable_def **slot, dataflow_set *set)
3600 variable var = *slot;
3601 decl_or_value dv = var->dv;
3602 rtx val;
3603 location_chain node;
3605 if (!dv_is_value_p (dv))
3606 return 1;
3608 gcc_checking_assert (var->n_var_parts == 1);
3610 val = dv_as_value (dv);
3612 for (node = var->var_part[0].loc_chain; node; node = node->next)
3613 if (GET_CODE (node->loc) == VALUE)
3615 if (canon_value_cmp (node->loc, val))
3616 VALUE_RECURSED_INTO (val) = true;
3617 else
3619 decl_or_value odv = dv_from_value (node->loc);
3620 variable_def **oslot;
3621 oslot = shared_hash_find_slot_noinsert (set->vars, odv);
3623 set_slot_part (set, val, oslot, odv, 0,
3624 node->init, NULL_RTX);
3626 VALUE_RECURSED_INTO (node->loc) = true;
3630 return 1;
3633 /* Remove redundant entries from equivalence lists in onepart
3634 variables, canonicalizing equivalence sets into star shapes. */
3637 canonicalize_values_star (variable_def **slot, dataflow_set *set)
3639 variable var = *slot;
3640 decl_or_value dv = var->dv;
3641 location_chain node;
3642 decl_or_value cdv;
3643 rtx val, cval;
3644 variable_def **cslot;
3645 bool has_value;
3646 bool has_marks;
3648 if (!var->onepart)
3649 return 1;
3651 gcc_checking_assert (var->n_var_parts == 1);
3653 if (dv_is_value_p (dv))
3655 cval = dv_as_value (dv);
3656 if (!VALUE_RECURSED_INTO (cval))
3657 return 1;
3658 VALUE_RECURSED_INTO (cval) = false;
3660 else
3661 cval = NULL_RTX;
3663 restart:
3664 val = cval;
3665 has_value = false;
3666 has_marks = false;
3668 gcc_assert (var->n_var_parts == 1);
3670 for (node = var->var_part[0].loc_chain; node; node = node->next)
3671 if (GET_CODE (node->loc) == VALUE)
3673 has_value = true;
3674 if (VALUE_RECURSED_INTO (node->loc))
3675 has_marks = true;
3676 if (canon_value_cmp (node->loc, cval))
3677 cval = node->loc;
3680 if (!has_value)
3681 return 1;
3683 if (cval == val)
3685 if (!has_marks || dv_is_decl_p (dv))
3686 return 1;
3688 /* Keep it marked so that we revisit it, either after visiting a
3689 child node, or after visiting a new parent that might be
3690 found out. */
3691 VALUE_RECURSED_INTO (val) = true;
3693 for (node = var->var_part[0].loc_chain; node; node = node->next)
3694 if (GET_CODE (node->loc) == VALUE
3695 && VALUE_RECURSED_INTO (node->loc))
3697 cval = node->loc;
3698 restart_with_cval:
3699 VALUE_RECURSED_INTO (cval) = false;
3700 dv = dv_from_value (cval);
3701 slot = shared_hash_find_slot_noinsert (set->vars, dv);
3702 if (!slot)
3704 gcc_assert (dv_is_decl_p (var->dv));
3705 /* The canonical value was reset and dropped.
3706 Remove it. */
3707 clobber_variable_part (set, NULL, var->dv, 0, NULL);
3708 return 1;
3710 var = *slot;
3711 gcc_assert (dv_is_value_p (var->dv));
3712 if (var->n_var_parts == 0)
3713 return 1;
3714 gcc_assert (var->n_var_parts == 1);
3715 goto restart;
3718 VALUE_RECURSED_INTO (val) = false;
3720 return 1;
3723 /* Push values to the canonical one. */
3724 cdv = dv_from_value (cval);
3725 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3727 for (node = var->var_part[0].loc_chain; node; node = node->next)
3728 if (node->loc != cval)
3730 cslot = set_slot_part (set, node->loc, cslot, cdv, 0,
3731 node->init, NULL_RTX);
3732 if (GET_CODE (node->loc) == VALUE)
3734 decl_or_value ndv = dv_from_value (node->loc);
3736 set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX,
3737 NO_INSERT);
3739 if (canon_value_cmp (node->loc, val))
3741 /* If it could have been a local minimum, it's not any more,
3742 since it's now neighbor to cval, so it may have to push
3743 to it. Conversely, if it wouldn't have prevailed over
3744 val, then whatever mark it has is fine: if it was to
3745 push, it will now push to a more canonical node, but if
3746 it wasn't, then it has already pushed any values it might
3747 have to. */
3748 VALUE_RECURSED_INTO (node->loc) = true;
3749 /* Make sure we visit node->loc by ensuring we cval is
3750 visited too. */
3751 VALUE_RECURSED_INTO (cval) = true;
3753 else if (!VALUE_RECURSED_INTO (node->loc))
3754 /* If we have no need to "recurse" into this node, it's
3755 already "canonicalized", so drop the link to the old
3756 parent. */
3757 clobber_variable_part (set, cval, ndv, 0, NULL);
3759 else if (GET_CODE (node->loc) == REG)
3761 attrs list = set->regs[REGNO (node->loc)], *listp;
3763 /* Change an existing attribute referring to dv so that it
3764 refers to cdv, removing any duplicate this might
3765 introduce, and checking that no previous duplicates
3766 existed, all in a single pass. */
3768 while (list)
3770 if (list->offset == 0
3771 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3772 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3773 break;
3775 list = list->next;
3778 gcc_assert (list);
3779 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3781 list->dv = cdv;
3782 for (listp = &list->next; (list = *listp); listp = &list->next)
3784 if (list->offset)
3785 continue;
3787 if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3789 *listp = list->next;
3790 pool_free (attrs_pool, list);
3791 list = *listp;
3792 break;
3795 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv));
3798 else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3800 for (listp = &list->next; (list = *listp); listp = &list->next)
3802 if (list->offset)
3803 continue;
3805 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3807 *listp = list->next;
3808 pool_free (attrs_pool, list);
3809 list = *listp;
3810 break;
3813 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv));
3816 else
3817 gcc_unreachable ();
3819 #if ENABLE_CHECKING
3820 while (list)
3822 if (list->offset == 0
3823 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3824 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3825 gcc_unreachable ();
3827 list = list->next;
3829 #endif
3833 if (val)
3834 set_slot_part (set, val, cslot, cdv, 0,
3835 VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
3837 slot = clobber_slot_part (set, cval, slot, 0, NULL);
3839 /* Variable may have been unshared. */
3840 var = *slot;
3841 gcc_checking_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval
3842 && var->var_part[0].loc_chain->next == NULL);
3844 if (VALUE_RECURSED_INTO (cval))
3845 goto restart_with_cval;
3847 return 1;
3850 /* Bind one-part variables to the canonical value in an equivalence
3851 set. Not doing this causes dataflow convergence failure in rare
3852 circumstances, see PR42873. Unfortunately we can't do this
3853 efficiently as part of canonicalize_values_star, since we may not
3854 have determined or even seen the canonical value of a set when we
3855 get to a variable that references another member of the set. */
3858 canonicalize_vars_star (variable_def **slot, dataflow_set *set)
3860 variable var = *slot;
3861 decl_or_value dv = var->dv;
3862 location_chain node;
3863 rtx cval;
3864 decl_or_value cdv;
3865 variable_def **cslot;
3866 variable cvar;
3867 location_chain cnode;
3869 if (!var->onepart || var->onepart == ONEPART_VALUE)
3870 return 1;
3872 gcc_assert (var->n_var_parts == 1);
3874 node = var->var_part[0].loc_chain;
3876 if (GET_CODE (node->loc) != VALUE)
3877 return 1;
3879 gcc_assert (!node->next);
3880 cval = node->loc;
3882 /* Push values to the canonical one. */
3883 cdv = dv_from_value (cval);
3884 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3885 if (!cslot)
3886 return 1;
3887 cvar = *cslot;
3888 gcc_assert (cvar->n_var_parts == 1);
3890 cnode = cvar->var_part[0].loc_chain;
3892 /* CVAL is canonical if its value list contains non-VALUEs or VALUEs
3893 that are not “more canonical” than it. */
3894 if (GET_CODE (cnode->loc) != VALUE
3895 || !canon_value_cmp (cnode->loc, cval))
3896 return 1;
3898 /* CVAL was found to be non-canonical. Change the variable to point
3899 to the canonical VALUE. */
3900 gcc_assert (!cnode->next);
3901 cval = cnode->loc;
3903 slot = set_slot_part (set, cval, slot, dv, 0,
3904 node->init, node->set_src);
3905 clobber_slot_part (set, cval, slot, 0, node->set_src);
3907 return 1;
3910 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
3911 corresponding entry in DSM->src. Multi-part variables are combined
3912 with variable_union, whereas onepart dvs are combined with
3913 intersection. */
3915 static int
3916 variable_merge_over_cur (variable s1var, struct dfset_merge *dsm)
3918 dataflow_set *dst = dsm->dst;
3919 variable_def **dstslot;
3920 variable s2var, dvar = NULL;
3921 decl_or_value dv = s1var->dv;
3922 onepart_enum_t onepart = s1var->onepart;
3923 rtx val;
3924 hashval_t dvhash;
3925 location_chain node, *nodep;
3927 /* If the incoming onepart variable has an empty location list, then
3928 the intersection will be just as empty. For other variables,
3929 it's always union. */
3930 gcc_checking_assert (s1var->n_var_parts
3931 && s1var->var_part[0].loc_chain);
3933 if (!onepart)
3934 return variable_union (s1var, dst);
3936 gcc_checking_assert (s1var->n_var_parts == 1);
3938 dvhash = dv_htab_hash (dv);
3939 if (dv_is_value_p (dv))
3940 val = dv_as_value (dv);
3941 else
3942 val = NULL;
3944 s2var = shared_hash_find_1 (dsm->src->vars, dv, dvhash);
3945 if (!s2var)
3947 dst_can_be_shared = false;
3948 return 1;
3951 dsm->src_onepart_cnt--;
3952 gcc_assert (s2var->var_part[0].loc_chain
3953 && s2var->onepart == onepart
3954 && s2var->n_var_parts == 1);
3956 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3957 if (dstslot)
3959 dvar = *dstslot;
3960 gcc_assert (dvar->refcount == 1
3961 && dvar->onepart == onepart
3962 && dvar->n_var_parts == 1);
3963 nodep = &dvar->var_part[0].loc_chain;
3965 else
3967 nodep = &node;
3968 node = NULL;
3971 if (!dstslot && !onepart_variable_different_p (s1var, s2var))
3973 dstslot = shared_hash_find_slot_unshare_1 (&dst->vars, dv,
3974 dvhash, INSERT);
3975 *dstslot = dvar = s2var;
3976 dvar->refcount++;
3978 else
3980 dst_can_be_shared = false;
3982 intersect_loc_chains (val, nodep, dsm,
3983 s1var->var_part[0].loc_chain, s2var);
3985 if (!dstslot)
3987 if (node)
3989 dvar = (variable) pool_alloc (onepart_pool (onepart));
3990 dvar->dv = dv;
3991 dvar->refcount = 1;
3992 dvar->n_var_parts = 1;
3993 dvar->onepart = onepart;
3994 dvar->in_changed_variables = false;
3995 dvar->var_part[0].loc_chain = node;
3996 dvar->var_part[0].cur_loc = NULL;
3997 if (onepart)
3998 VAR_LOC_1PAUX (dvar) = NULL;
3999 else
4000 VAR_PART_OFFSET (dvar, 0) = 0;
4002 dstslot
4003 = shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash,
4004 INSERT);
4005 gcc_assert (!*dstslot);
4006 *dstslot = dvar;
4008 else
4009 return 1;
4013 nodep = &dvar->var_part[0].loc_chain;
4014 while ((node = *nodep))
4016 location_chain *nextp = &node->next;
4018 if (GET_CODE (node->loc) == REG)
4020 attrs list;
4022 for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
4023 if (GET_MODE (node->loc) == GET_MODE (list->loc)
4024 && dv_is_value_p (list->dv))
4025 break;
4027 if (!list)
4028 attrs_list_insert (&dst->regs[REGNO (node->loc)],
4029 dv, 0, node->loc);
4030 /* If this value became canonical for another value that had
4031 this register, we want to leave it alone. */
4032 else if (dv_as_value (list->dv) != val)
4034 dstslot = set_slot_part (dst, dv_as_value (list->dv),
4035 dstslot, dv, 0,
4036 node->init, NULL_RTX);
4037 dstslot = delete_slot_part (dst, node->loc, dstslot, 0);
4039 /* Since nextp points into the removed node, we can't
4040 use it. The pointer to the next node moved to nodep.
4041 However, if the variable we're walking is unshared
4042 during our walk, we'll keep walking the location list
4043 of the previously-shared variable, in which case the
4044 node won't have been removed, and we'll want to skip
4045 it. That's why we test *nodep here. */
4046 if (*nodep != node)
4047 nextp = nodep;
4050 else
4051 /* Canonicalization puts registers first, so we don't have to
4052 walk it all. */
4053 break;
4054 nodep = nextp;
4057 if (dvar != *dstslot)
4058 dvar = *dstslot;
4059 nodep = &dvar->var_part[0].loc_chain;
4061 if (val)
4063 /* Mark all referenced nodes for canonicalization, and make sure
4064 we have mutual equivalence links. */
4065 VALUE_RECURSED_INTO (val) = true;
4066 for (node = *nodep; node; node = node->next)
4067 if (GET_CODE (node->loc) == VALUE)
4069 VALUE_RECURSED_INTO (node->loc) = true;
4070 set_variable_part (dst, val, dv_from_value (node->loc), 0,
4071 node->init, NULL, INSERT);
4074 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4075 gcc_assert (*dstslot == dvar);
4076 canonicalize_values_star (dstslot, dst);
4077 gcc_checking_assert (dstslot
4078 == shared_hash_find_slot_noinsert_1 (dst->vars,
4079 dv, dvhash));
4080 dvar = *dstslot;
4082 else
4084 bool has_value = false, has_other = false;
4086 /* If we have one value and anything else, we're going to
4087 canonicalize this, so make sure all values have an entry in
4088 the table and are marked for canonicalization. */
4089 for (node = *nodep; node; node = node->next)
4091 if (GET_CODE (node->loc) == VALUE)
4093 /* If this was marked during register canonicalization,
4094 we know we have to canonicalize values. */
4095 if (has_value)
4096 has_other = true;
4097 has_value = true;
4098 if (has_other)
4099 break;
4101 else
4103 has_other = true;
4104 if (has_value)
4105 break;
4109 if (has_value && has_other)
4111 for (node = *nodep; node; node = node->next)
4113 if (GET_CODE (node->loc) == VALUE)
4115 decl_or_value dv = dv_from_value (node->loc);
4116 variable_def **slot = NULL;
4118 if (shared_hash_shared (dst->vars))
4119 slot = shared_hash_find_slot_noinsert (dst->vars, dv);
4120 if (!slot)
4121 slot = shared_hash_find_slot_unshare (&dst->vars, dv,
4122 INSERT);
4123 if (!*slot)
4125 variable var = (variable) pool_alloc (onepart_pool
4126 (ONEPART_VALUE));
4127 var->dv = dv;
4128 var->refcount = 1;
4129 var->n_var_parts = 1;
4130 var->onepart = ONEPART_VALUE;
4131 var->in_changed_variables = false;
4132 var->var_part[0].loc_chain = NULL;
4133 var->var_part[0].cur_loc = NULL;
4134 VAR_LOC_1PAUX (var) = NULL;
4135 *slot = var;
4138 VALUE_RECURSED_INTO (node->loc) = true;
4142 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4143 gcc_assert (*dstslot == dvar);
4144 canonicalize_values_star (dstslot, dst);
4145 gcc_checking_assert (dstslot
4146 == shared_hash_find_slot_noinsert_1 (dst->vars,
4147 dv, dvhash));
4148 dvar = *dstslot;
4152 if (!onepart_variable_different_p (dvar, s2var))
4154 variable_htab_free (dvar);
4155 *dstslot = dvar = s2var;
4156 dvar->refcount++;
4158 else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var))
4160 variable_htab_free (dvar);
4161 *dstslot = dvar = s1var;
4162 dvar->refcount++;
4163 dst_can_be_shared = false;
4165 else
4166 dst_can_be_shared = false;
4168 return 1;
4171 /* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
4172 multi-part variable. Unions of multi-part variables and
4173 intersections of one-part ones will be handled in
4174 variable_merge_over_cur(). */
4176 static int
4177 variable_merge_over_src (variable s2var, struct dfset_merge *dsm)
4179 dataflow_set *dst = dsm->dst;
4180 decl_or_value dv = s2var->dv;
4182 if (!s2var->onepart)
4184 variable_def **dstp = shared_hash_find_slot (dst->vars, dv);
4185 *dstp = s2var;
4186 s2var->refcount++;
4187 return 1;
4190 dsm->src_onepart_cnt++;
4191 return 1;
4194 /* Combine dataflow set information from SRC2 into DST, using PDST
4195 to carry over information across passes. */
4197 static void
4198 dataflow_set_merge (dataflow_set *dst, dataflow_set *src2)
4200 dataflow_set cur = *dst;
4201 dataflow_set *src1 = &cur;
4202 struct dfset_merge dsm;
4203 int i;
4204 size_t src1_elems, src2_elems;
4205 variable_iterator_type hi;
4206 variable var;
4208 src1_elems = shared_hash_htab (src1->vars)->elements ();
4209 src2_elems = shared_hash_htab (src2->vars)->elements ();
4210 dataflow_set_init (dst);
4211 dst->stack_adjust = cur.stack_adjust;
4212 shared_hash_destroy (dst->vars);
4213 dst->vars = (shared_hash) pool_alloc (shared_hash_pool);
4214 dst->vars->refcount = 1;
4215 dst->vars->htab = new variable_table_type (MAX (src1_elems, src2_elems));
4217 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4218 attrs_list_mpdv_union (&dst->regs[i], src1->regs[i], src2->regs[i]);
4220 dsm.dst = dst;
4221 dsm.src = src2;
4222 dsm.cur = src1;
4223 dsm.src_onepart_cnt = 0;
4225 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm.src->vars),
4226 var, variable, hi)
4227 variable_merge_over_src (var, &dsm);
4228 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm.cur->vars),
4229 var, variable, hi)
4230 variable_merge_over_cur (var, &dsm);
4232 if (dsm.src_onepart_cnt)
4233 dst_can_be_shared = false;
4235 dataflow_set_destroy (src1);
4238 /* Mark register equivalences. */
4240 static void
4241 dataflow_set_equiv_regs (dataflow_set *set)
4243 int i;
4244 attrs list, *listp;
4246 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4248 rtx canon[NUM_MACHINE_MODES];
4250 /* If the list is empty or one entry, no need to canonicalize
4251 anything. */
4252 if (set->regs[i] == NULL || set->regs[i]->next == NULL)
4253 continue;
4255 memset (canon, 0, sizeof (canon));
4257 for (list = set->regs[i]; list; list = list->next)
4258 if (list->offset == 0 && dv_is_value_p (list->dv))
4260 rtx val = dv_as_value (list->dv);
4261 rtx *cvalp = &canon[(int)GET_MODE (val)];
4262 rtx cval = *cvalp;
4264 if (canon_value_cmp (val, cval))
4265 *cvalp = val;
4268 for (list = set->regs[i]; list; list = list->next)
4269 if (list->offset == 0 && dv_onepart_p (list->dv))
4271 rtx cval = canon[(int)GET_MODE (list->loc)];
4273 if (!cval)
4274 continue;
4276 if (dv_is_value_p (list->dv))
4278 rtx val = dv_as_value (list->dv);
4280 if (val == cval)
4281 continue;
4283 VALUE_RECURSED_INTO (val) = true;
4284 set_variable_part (set, val, dv_from_value (cval), 0,
4285 VAR_INIT_STATUS_INITIALIZED,
4286 NULL, NO_INSERT);
4289 VALUE_RECURSED_INTO (cval) = true;
4290 set_variable_part (set, cval, list->dv, 0,
4291 VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT);
4294 for (listp = &set->regs[i]; (list = *listp);
4295 listp = list ? &list->next : listp)
4296 if (list->offset == 0 && dv_onepart_p (list->dv))
4298 rtx cval = canon[(int)GET_MODE (list->loc)];
4299 variable_def **slot;
4301 if (!cval)
4302 continue;
4304 if (dv_is_value_p (list->dv))
4306 rtx val = dv_as_value (list->dv);
4307 if (!VALUE_RECURSED_INTO (val))
4308 continue;
4311 slot = shared_hash_find_slot_noinsert (set->vars, list->dv);
4312 canonicalize_values_star (slot, set);
4313 if (*listp != list)
4314 list = NULL;
4319 /* Remove any redundant values in the location list of VAR, which must
4320 be unshared and 1-part. */
4322 static void
4323 remove_duplicate_values (variable var)
4325 location_chain node, *nodep;
4327 gcc_assert (var->onepart);
4328 gcc_assert (var->n_var_parts == 1);
4329 gcc_assert (var->refcount == 1);
4331 for (nodep = &var->var_part[0].loc_chain; (node = *nodep); )
4333 if (GET_CODE (node->loc) == VALUE)
4335 if (VALUE_RECURSED_INTO (node->loc))
4337 /* Remove duplicate value node. */
4338 *nodep = node->next;
4339 pool_free (loc_chain_pool, node);
4340 continue;
4342 else
4343 VALUE_RECURSED_INTO (node->loc) = true;
4345 nodep = &node->next;
4348 for (node = var->var_part[0].loc_chain; node; node = node->next)
4349 if (GET_CODE (node->loc) == VALUE)
4351 gcc_assert (VALUE_RECURSED_INTO (node->loc));
4352 VALUE_RECURSED_INTO (node->loc) = false;
4357 /* Hash table iteration argument passed to variable_post_merge. */
4358 struct dfset_post_merge
4360 /* The new input set for the current block. */
4361 dataflow_set *set;
4362 /* Pointer to the permanent input set for the current block, or
4363 NULL. */
4364 dataflow_set **permp;
4367 /* Create values for incoming expressions associated with one-part
4368 variables that don't have value numbers for them. */
4371 variable_post_merge_new_vals (variable_def **slot, dfset_post_merge *dfpm)
4373 dataflow_set *set = dfpm->set;
4374 variable var = *slot;
4375 location_chain node;
4377 if (!var->onepart || !var->n_var_parts)
4378 return 1;
4380 gcc_assert (var->n_var_parts == 1);
4382 if (dv_is_decl_p (var->dv))
4384 bool check_dupes = false;
4386 restart:
4387 for (node = var->var_part[0].loc_chain; node; node = node->next)
4389 if (GET_CODE (node->loc) == VALUE)
4390 gcc_assert (!VALUE_RECURSED_INTO (node->loc));
4391 else if (GET_CODE (node->loc) == REG)
4393 attrs att, *attp, *curp = NULL;
4395 if (var->refcount != 1)
4397 slot = unshare_variable (set, slot, var,
4398 VAR_INIT_STATUS_INITIALIZED);
4399 var = *slot;
4400 goto restart;
4403 for (attp = &set->regs[REGNO (node->loc)]; (att = *attp);
4404 attp = &att->next)
4405 if (att->offset == 0
4406 && GET_MODE (att->loc) == GET_MODE (node->loc))
4408 if (dv_is_value_p (att->dv))
4410 rtx cval = dv_as_value (att->dv);
4411 node->loc = cval;
4412 check_dupes = true;
4413 break;
4415 else if (dv_as_opaque (att->dv) == dv_as_opaque (var->dv))
4416 curp = attp;
4419 if (!curp)
4421 curp = attp;
4422 while (*curp)
4423 if ((*curp)->offset == 0
4424 && GET_MODE ((*curp)->loc) == GET_MODE (node->loc)
4425 && dv_as_opaque ((*curp)->dv) == dv_as_opaque (var->dv))
4426 break;
4427 else
4428 curp = &(*curp)->next;
4429 gcc_assert (*curp);
4432 if (!att)
4434 decl_or_value cdv;
4435 rtx cval;
4437 if (!*dfpm->permp)
4439 *dfpm->permp = XNEW (dataflow_set);
4440 dataflow_set_init (*dfpm->permp);
4443 for (att = (*dfpm->permp)->regs[REGNO (node->loc)];
4444 att; att = att->next)
4445 if (GET_MODE (att->loc) == GET_MODE (node->loc))
4447 gcc_assert (att->offset == 0
4448 && dv_is_value_p (att->dv));
4449 val_reset (set, att->dv);
4450 break;
4453 if (att)
4455 cdv = att->dv;
4456 cval = dv_as_value (cdv);
4458 else
4460 /* Create a unique value to hold this register,
4461 that ought to be found and reused in
4462 subsequent rounds. */
4463 cselib_val *v;
4464 gcc_assert (!cselib_lookup (node->loc,
4465 GET_MODE (node->loc), 0,
4466 VOIDmode));
4467 v = cselib_lookup (node->loc, GET_MODE (node->loc), 1,
4468 VOIDmode);
4469 cselib_preserve_value (v);
4470 cselib_invalidate_rtx (node->loc);
4471 cval = v->val_rtx;
4472 cdv = dv_from_value (cval);
4473 if (dump_file)
4474 fprintf (dump_file,
4475 "Created new value %u:%u for reg %i\n",
4476 v->uid, v->hash, REGNO (node->loc));
4479 var_reg_decl_set (*dfpm->permp, node->loc,
4480 VAR_INIT_STATUS_INITIALIZED,
4481 cdv, 0, NULL, INSERT);
4483 node->loc = cval;
4484 check_dupes = true;
4487 /* Remove attribute referring to the decl, which now
4488 uses the value for the register, already existing or
4489 to be added when we bring perm in. */
4490 att = *curp;
4491 *curp = att->next;
4492 pool_free (attrs_pool, att);
4496 if (check_dupes)
4497 remove_duplicate_values (var);
4500 return 1;
4503 /* Reset values in the permanent set that are not associated with the
4504 chosen expression. */
4507 variable_post_merge_perm_vals (variable_def **pslot, dfset_post_merge *dfpm)
4509 dataflow_set *set = dfpm->set;
4510 variable pvar = *pslot, var;
4511 location_chain pnode;
4512 decl_or_value dv;
4513 attrs att;
4515 gcc_assert (dv_is_value_p (pvar->dv)
4516 && pvar->n_var_parts == 1);
4517 pnode = pvar->var_part[0].loc_chain;
4518 gcc_assert (pnode
4519 && !pnode->next
4520 && REG_P (pnode->loc));
4522 dv = pvar->dv;
4524 var = shared_hash_find (set->vars, dv);
4525 if (var)
4527 /* Although variable_post_merge_new_vals may have made decls
4528 non-star-canonical, values that pre-existed in canonical form
4529 remain canonical, and newly-created values reference a single
4530 REG, so they are canonical as well. Since VAR has the
4531 location list for a VALUE, using find_loc_in_1pdv for it is
4532 fine, since VALUEs don't map back to DECLs. */
4533 if (find_loc_in_1pdv (pnode->loc, var, shared_hash_htab (set->vars)))
4534 return 1;
4535 val_reset (set, dv);
4538 for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next)
4539 if (att->offset == 0
4540 && GET_MODE (att->loc) == GET_MODE (pnode->loc)
4541 && dv_is_value_p (att->dv))
4542 break;
4544 /* If there is a value associated with this register already, create
4545 an equivalence. */
4546 if (att && dv_as_value (att->dv) != dv_as_value (dv))
4548 rtx cval = dv_as_value (att->dv);
4549 set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT);
4550 set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init,
4551 NULL, INSERT);
4553 else if (!att)
4555 attrs_list_insert (&set->regs[REGNO (pnode->loc)],
4556 dv, 0, pnode->loc);
4557 variable_union (pvar, set);
4560 return 1;
4563 /* Just checking stuff and registering register attributes for
4564 now. */
4566 static void
4567 dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp)
4569 struct dfset_post_merge dfpm;
4571 dfpm.set = set;
4572 dfpm.permp = permp;
4574 shared_hash_htab (set->vars)
4575 ->traverse <dfset_post_merge*, variable_post_merge_new_vals> (&dfpm);
4576 if (*permp)
4577 shared_hash_htab ((*permp)->vars)
4578 ->traverse <dfset_post_merge*, variable_post_merge_perm_vals> (&dfpm);
4579 shared_hash_htab (set->vars)
4580 ->traverse <dataflow_set *, canonicalize_values_star> (set);
4581 shared_hash_htab (set->vars)
4582 ->traverse <dataflow_set *, canonicalize_vars_star> (set);
4585 /* Return a node whose loc is a MEM that refers to EXPR in the
4586 location list of a one-part variable or value VAR, or in that of
4587 any values recursively mentioned in the location lists. */
4589 static location_chain
4590 find_mem_expr_in_1pdv (tree expr, rtx val, variable_table_type *vars)
4592 location_chain node;
4593 decl_or_value dv;
4594 variable var;
4595 location_chain where = NULL;
4597 if (!val)
4598 return NULL;
4600 gcc_assert (GET_CODE (val) == VALUE
4601 && !VALUE_RECURSED_INTO (val));
4603 dv = dv_from_value (val);
4604 var = vars->find_with_hash (dv, dv_htab_hash (dv));
4606 if (!var)
4607 return NULL;
4609 gcc_assert (var->onepart);
4611 if (!var->n_var_parts)
4612 return NULL;
4614 VALUE_RECURSED_INTO (val) = true;
4616 for (node = var->var_part[0].loc_chain; node; node = node->next)
4617 if (MEM_P (node->loc)
4618 && MEM_EXPR (node->loc) == expr
4619 && INT_MEM_OFFSET (node->loc) == 0)
4621 where = node;
4622 break;
4624 else if (GET_CODE (node->loc) == VALUE
4625 && !VALUE_RECURSED_INTO (node->loc)
4626 && (where = find_mem_expr_in_1pdv (expr, node->loc, vars)))
4627 break;
4629 VALUE_RECURSED_INTO (val) = false;
4631 return where;
4634 /* Return TRUE if the value of MEM may vary across a call. */
4636 static bool
4637 mem_dies_at_call (rtx mem)
4639 tree expr = MEM_EXPR (mem);
4640 tree decl;
4642 if (!expr)
4643 return true;
4645 decl = get_base_address (expr);
4647 if (!decl)
4648 return true;
4650 if (!DECL_P (decl))
4651 return true;
4653 return (may_be_aliased (decl)
4654 || (!TREE_READONLY (decl) && is_global_var (decl)));
4657 /* Remove all MEMs from the location list of a hash table entry for a
4658 one-part variable, except those whose MEM attributes map back to
4659 the variable itself, directly or within a VALUE. */
4662 dataflow_set_preserve_mem_locs (variable_def **slot, dataflow_set *set)
4664 variable var = *slot;
4666 if (var->onepart == ONEPART_VDECL || var->onepart == ONEPART_DEXPR)
4668 tree decl = dv_as_decl (var->dv);
4669 location_chain loc, *locp;
4670 bool changed = false;
4672 if (!var->n_var_parts)
4673 return 1;
4675 gcc_assert (var->n_var_parts == 1);
4677 if (shared_var_p (var, set->vars))
4679 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4681 /* We want to remove dying MEMs that doesn't refer to DECL. */
4682 if (GET_CODE (loc->loc) == MEM
4683 && (MEM_EXPR (loc->loc) != decl
4684 || INT_MEM_OFFSET (loc->loc) != 0)
4685 && !mem_dies_at_call (loc->loc))
4686 break;
4687 /* We want to move here MEMs that do refer to DECL. */
4688 else if (GET_CODE (loc->loc) == VALUE
4689 && find_mem_expr_in_1pdv (decl, loc->loc,
4690 shared_hash_htab (set->vars)))
4691 break;
4694 if (!loc)
4695 return 1;
4697 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4698 var = *slot;
4699 gcc_assert (var->n_var_parts == 1);
4702 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4703 loc; loc = *locp)
4705 rtx old_loc = loc->loc;
4706 if (GET_CODE (old_loc) == VALUE)
4708 location_chain mem_node
4709 = find_mem_expr_in_1pdv (decl, loc->loc,
4710 shared_hash_htab (set->vars));
4712 /* ??? This picks up only one out of multiple MEMs that
4713 refer to the same variable. Do we ever need to be
4714 concerned about dealing with more than one, or, given
4715 that they should all map to the same variable
4716 location, their addresses will have been merged and
4717 they will be regarded as equivalent? */
4718 if (mem_node)
4720 loc->loc = mem_node->loc;
4721 loc->set_src = mem_node->set_src;
4722 loc->init = MIN (loc->init, mem_node->init);
4726 if (GET_CODE (loc->loc) != MEM
4727 || (MEM_EXPR (loc->loc) == decl
4728 && INT_MEM_OFFSET (loc->loc) == 0)
4729 || !mem_dies_at_call (loc->loc))
4731 if (old_loc != loc->loc && emit_notes)
4733 if (old_loc == var->var_part[0].cur_loc)
4735 changed = true;
4736 var->var_part[0].cur_loc = NULL;
4739 locp = &loc->next;
4740 continue;
4743 if (emit_notes)
4745 if (old_loc == var->var_part[0].cur_loc)
4747 changed = true;
4748 var->var_part[0].cur_loc = NULL;
4751 *locp = loc->next;
4752 pool_free (loc_chain_pool, loc);
4755 if (!var->var_part[0].loc_chain)
4757 var->n_var_parts--;
4758 changed = true;
4760 if (changed)
4761 variable_was_changed (var, set);
4764 return 1;
4767 /* Remove all MEMs from the location list of a hash table entry for a
4768 value. */
4771 dataflow_set_remove_mem_locs (variable_def **slot, dataflow_set *set)
4773 variable var = *slot;
4775 if (var->onepart == ONEPART_VALUE)
4777 location_chain loc, *locp;
4778 bool changed = false;
4779 rtx cur_loc;
4781 gcc_assert (var->n_var_parts == 1);
4783 if (shared_var_p (var, set->vars))
4785 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4786 if (GET_CODE (loc->loc) == MEM
4787 && mem_dies_at_call (loc->loc))
4788 break;
4790 if (!loc)
4791 return 1;
4793 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4794 var = *slot;
4795 gcc_assert (var->n_var_parts == 1);
4798 if (VAR_LOC_1PAUX (var))
4799 cur_loc = VAR_LOC_FROM (var);
4800 else
4801 cur_loc = var->var_part[0].cur_loc;
4803 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4804 loc; loc = *locp)
4806 if (GET_CODE (loc->loc) != MEM
4807 || !mem_dies_at_call (loc->loc))
4809 locp = &loc->next;
4810 continue;
4813 *locp = loc->next;
4814 /* If we have deleted the location which was last emitted
4815 we have to emit new location so add the variable to set
4816 of changed variables. */
4817 if (cur_loc == loc->loc)
4819 changed = true;
4820 var->var_part[0].cur_loc = NULL;
4821 if (VAR_LOC_1PAUX (var))
4822 VAR_LOC_FROM (var) = NULL;
4824 pool_free (loc_chain_pool, loc);
4827 if (!var->var_part[0].loc_chain)
4829 var->n_var_parts--;
4830 changed = true;
4832 if (changed)
4833 variable_was_changed (var, set);
4836 return 1;
4839 /* Remove all variable-location information about call-clobbered
4840 registers, as well as associations between MEMs and VALUEs. */
4842 static void
4843 dataflow_set_clear_at_call (dataflow_set *set)
4845 unsigned int r;
4846 hard_reg_set_iterator hrsi;
4848 EXECUTE_IF_SET_IN_HARD_REG_SET (regs_invalidated_by_call, 0, r, hrsi)
4849 var_regno_delete (set, r);
4851 if (MAY_HAVE_DEBUG_INSNS)
4853 set->traversed_vars = set->vars;
4854 shared_hash_htab (set->vars)
4855 ->traverse <dataflow_set *, dataflow_set_preserve_mem_locs> (set);
4856 set->traversed_vars = set->vars;
4857 shared_hash_htab (set->vars)
4858 ->traverse <dataflow_set *, dataflow_set_remove_mem_locs> (set);
4859 set->traversed_vars = NULL;
4863 static bool
4864 variable_part_different_p (variable_part *vp1, variable_part *vp2)
4866 location_chain lc1, lc2;
4868 for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next)
4870 for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next)
4872 if (REG_P (lc1->loc) && REG_P (lc2->loc))
4874 if (REGNO (lc1->loc) == REGNO (lc2->loc))
4875 break;
4877 if (rtx_equal_p (lc1->loc, lc2->loc))
4878 break;
4880 if (!lc2)
4881 return true;
4883 return false;
4886 /* Return true if one-part variables VAR1 and VAR2 are different.
4887 They must be in canonical order. */
4889 static bool
4890 onepart_variable_different_p (variable var1, variable var2)
4892 location_chain lc1, lc2;
4894 if (var1 == var2)
4895 return false;
4897 gcc_assert (var1->n_var_parts == 1
4898 && var2->n_var_parts == 1);
4900 lc1 = var1->var_part[0].loc_chain;
4901 lc2 = var2->var_part[0].loc_chain;
4903 gcc_assert (lc1 && lc2);
4905 while (lc1 && lc2)
4907 if (loc_cmp (lc1->loc, lc2->loc))
4908 return true;
4909 lc1 = lc1->next;
4910 lc2 = lc2->next;
4913 return lc1 != lc2;
4916 /* Return true if variables VAR1 and VAR2 are different. */
4918 static bool
4919 variable_different_p (variable var1, variable var2)
4921 int i;
4923 if (var1 == var2)
4924 return false;
4926 if (var1->onepart != var2->onepart)
4927 return true;
4929 if (var1->n_var_parts != var2->n_var_parts)
4930 return true;
4932 if (var1->onepart && var1->n_var_parts)
4934 gcc_checking_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv)
4935 && var1->n_var_parts == 1);
4936 /* One-part values have locations in a canonical order. */
4937 return onepart_variable_different_p (var1, var2);
4940 for (i = 0; i < var1->n_var_parts; i++)
4942 if (VAR_PART_OFFSET (var1, i) != VAR_PART_OFFSET (var2, i))
4943 return true;
4944 if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i]))
4945 return true;
4946 if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i]))
4947 return true;
4949 return false;
4952 /* Return true if dataflow sets OLD_SET and NEW_SET differ. */
4954 static bool
4955 dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
4957 variable_iterator_type hi;
4958 variable var1;
4960 if (old_set->vars == new_set->vars)
4961 return false;
4963 if (shared_hash_htab (old_set->vars)->elements ()
4964 != shared_hash_htab (new_set->vars)->elements ())
4965 return true;
4967 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (old_set->vars),
4968 var1, variable, hi)
4970 variable_table_type *htab = shared_hash_htab (new_set->vars);
4971 variable var2 = htab->find_with_hash (var1->dv, dv_htab_hash (var1->dv));
4972 if (!var2)
4974 if (dump_file && (dump_flags & TDF_DETAILS))
4976 fprintf (dump_file, "dataflow difference found: removal of:\n");
4977 dump_var (var1);
4979 return true;
4982 if (variable_different_p (var1, var2))
4984 if (dump_file && (dump_flags & TDF_DETAILS))
4986 fprintf (dump_file, "dataflow difference found: "
4987 "old and new follow:\n");
4988 dump_var (var1);
4989 dump_var (var2);
4991 return true;
4995 /* No need to traverse the second hashtab, if both have the same number
4996 of elements and the second one had all entries found in the first one,
4997 then it can't have any extra entries. */
4998 return false;
5001 /* Free the contents of dataflow set SET. */
5003 static void
5004 dataflow_set_destroy (dataflow_set *set)
5006 int i;
5008 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5009 attrs_list_clear (&set->regs[i]);
5011 shared_hash_destroy (set->vars);
5012 set->vars = NULL;
5015 /* Return true if RTL X contains a SYMBOL_REF. */
5017 static bool
5018 contains_symbol_ref (rtx x)
5020 const char *fmt;
5021 RTX_CODE code;
5022 int i;
5024 if (!x)
5025 return false;
5027 code = GET_CODE (x);
5028 if (code == SYMBOL_REF)
5029 return true;
5031 fmt = GET_RTX_FORMAT (code);
5032 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5034 if (fmt[i] == 'e')
5036 if (contains_symbol_ref (XEXP (x, i)))
5037 return true;
5039 else if (fmt[i] == 'E')
5041 int j;
5042 for (j = 0; j < XVECLEN (x, i); j++)
5043 if (contains_symbol_ref (XVECEXP (x, i, j)))
5044 return true;
5048 return false;
5051 /* Shall EXPR be tracked? */
5053 static bool
5054 track_expr_p (tree expr, bool need_rtl)
5056 rtx decl_rtl;
5057 tree realdecl;
5059 if (TREE_CODE (expr) == DEBUG_EXPR_DECL)
5060 return DECL_RTL_SET_P (expr);
5062 /* If EXPR is not a parameter or a variable do not track it. */
5063 if (TREE_CODE (expr) != VAR_DECL && TREE_CODE (expr) != PARM_DECL)
5064 return 0;
5066 /* It also must have a name... */
5067 if (!DECL_NAME (expr) && need_rtl)
5068 return 0;
5070 /* ... and a RTL assigned to it. */
5071 decl_rtl = DECL_RTL_IF_SET (expr);
5072 if (!decl_rtl && need_rtl)
5073 return 0;
5075 /* If this expression is really a debug alias of some other declaration, we
5076 don't need to track this expression if the ultimate declaration is
5077 ignored. */
5078 realdecl = expr;
5079 if (TREE_CODE (realdecl) == VAR_DECL && DECL_HAS_DEBUG_EXPR_P (realdecl))
5081 realdecl = DECL_DEBUG_EXPR (realdecl);
5082 if (!DECL_P (realdecl))
5084 if (handled_component_p (realdecl)
5085 || (TREE_CODE (realdecl) == MEM_REF
5086 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
5088 HOST_WIDE_INT bitsize, bitpos, maxsize;
5089 tree innerdecl
5090 = get_ref_base_and_extent (realdecl, &bitpos, &bitsize,
5091 &maxsize);
5092 if (!DECL_P (innerdecl)
5093 || DECL_IGNORED_P (innerdecl)
5094 /* Do not track declarations for parts of tracked parameters
5095 since we want to track them as a whole instead. */
5096 || (TREE_CODE (innerdecl) == PARM_DECL
5097 && DECL_MODE (innerdecl) != BLKmode
5098 && TREE_CODE (TREE_TYPE (innerdecl)) != UNION_TYPE)
5099 || TREE_STATIC (innerdecl)
5100 || bitsize <= 0
5101 || bitpos + bitsize > 256
5102 || bitsize != maxsize)
5103 return 0;
5104 else
5105 realdecl = expr;
5107 else
5108 return 0;
5112 /* Do not track EXPR if REALDECL it should be ignored for debugging
5113 purposes. */
5114 if (DECL_IGNORED_P (realdecl))
5115 return 0;
5117 /* Do not track global variables until we are able to emit correct location
5118 list for them. */
5119 if (TREE_STATIC (realdecl))
5120 return 0;
5122 /* When the EXPR is a DECL for alias of some variable (see example)
5123 the TREE_STATIC flag is not used. Disable tracking all DECLs whose
5124 DECL_RTL contains SYMBOL_REF.
5126 Example:
5127 extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
5128 char **_dl_argv;
5130 if (decl_rtl && MEM_P (decl_rtl)
5131 && contains_symbol_ref (XEXP (decl_rtl, 0)))
5132 return 0;
5134 /* If RTX is a memory it should not be very large (because it would be
5135 an array or struct). */
5136 if (decl_rtl && MEM_P (decl_rtl))
5138 /* Do not track structures and arrays. */
5139 if (GET_MODE (decl_rtl) == BLKmode
5140 || AGGREGATE_TYPE_P (TREE_TYPE (realdecl)))
5141 return 0;
5142 if (MEM_SIZE_KNOWN_P (decl_rtl)
5143 && MEM_SIZE (decl_rtl) > MAX_VAR_PARTS)
5144 return 0;
5147 DECL_CHANGED (expr) = 0;
5148 DECL_CHANGED (realdecl) = 0;
5149 return 1;
5152 /* Determine whether a given LOC refers to the same variable part as
5153 EXPR+OFFSET. */
5155 static bool
5156 same_variable_part_p (rtx loc, tree expr, HOST_WIDE_INT offset)
5158 tree expr2;
5159 HOST_WIDE_INT offset2;
5161 if (! DECL_P (expr))
5162 return false;
5164 if (REG_P (loc))
5166 expr2 = REG_EXPR (loc);
5167 offset2 = REG_OFFSET (loc);
5169 else if (MEM_P (loc))
5171 expr2 = MEM_EXPR (loc);
5172 offset2 = INT_MEM_OFFSET (loc);
5174 else
5175 return false;
5177 if (! expr2 || ! DECL_P (expr2))
5178 return false;
5180 expr = var_debug_decl (expr);
5181 expr2 = var_debug_decl (expr2);
5183 return (expr == expr2 && offset == offset2);
5186 /* LOC is a REG or MEM that we would like to track if possible.
5187 If EXPR is null, we don't know what expression LOC refers to,
5188 otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
5189 LOC is an lvalue register.
5191 Return true if EXPR is nonnull and if LOC, or some lowpart of it,
5192 is something we can track. When returning true, store the mode of
5193 the lowpart we can track in *MODE_OUT (if nonnull) and its offset
5194 from EXPR in *OFFSET_OUT (if nonnull). */
5196 static bool
5197 track_loc_p (rtx loc, tree expr, HOST_WIDE_INT offset, bool store_reg_p,
5198 enum machine_mode *mode_out, HOST_WIDE_INT *offset_out)
5200 enum machine_mode mode;
5202 if (expr == NULL || !track_expr_p (expr, true))
5203 return false;
5205 /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
5206 whole subreg, but only the old inner part is really relevant. */
5207 mode = GET_MODE (loc);
5208 if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc)))
5210 enum machine_mode pseudo_mode;
5212 pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc));
5213 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (pseudo_mode))
5215 offset += byte_lowpart_offset (pseudo_mode, mode);
5216 mode = pseudo_mode;
5220 /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
5221 Do the same if we are storing to a register and EXPR occupies
5222 the whole of register LOC; in that case, the whole of EXPR is
5223 being changed. We exclude complex modes from the second case
5224 because the real and imaginary parts are represented as separate
5225 pseudo registers, even if the whole complex value fits into one
5226 hard register. */
5227 if ((GET_MODE_SIZE (mode) > GET_MODE_SIZE (DECL_MODE (expr))
5228 || (store_reg_p
5229 && !COMPLEX_MODE_P (DECL_MODE (expr))
5230 && hard_regno_nregs[REGNO (loc)][DECL_MODE (expr)] == 1))
5231 && offset + byte_lowpart_offset (DECL_MODE (expr), mode) == 0)
5233 mode = DECL_MODE (expr);
5234 offset = 0;
5237 if (offset < 0 || offset >= MAX_VAR_PARTS)
5238 return false;
5240 if (mode_out)
5241 *mode_out = mode;
5242 if (offset_out)
5243 *offset_out = offset;
5244 return true;
5247 /* Return the MODE lowpart of LOC, or null if LOC is not something we
5248 want to track. When returning nonnull, make sure that the attributes
5249 on the returned value are updated. */
5251 static rtx
5252 var_lowpart (enum machine_mode mode, rtx loc)
5254 unsigned int offset, reg_offset, regno;
5256 if (GET_MODE (loc) == mode)
5257 return loc;
5259 if (!REG_P (loc) && !MEM_P (loc))
5260 return NULL;
5262 offset = byte_lowpart_offset (mode, GET_MODE (loc));
5264 if (MEM_P (loc))
5265 return adjust_address_nv (loc, mode, offset);
5267 reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc));
5268 regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc),
5269 reg_offset, mode);
5270 return gen_rtx_REG_offset (loc, mode, regno, offset);
5273 /* Carry information about uses and stores while walking rtx. */
5275 struct count_use_info
5277 /* The insn where the RTX is. */
5278 rtx_insn *insn;
5280 /* The basic block where insn is. */
5281 basic_block bb;
5283 /* The array of n_sets sets in the insn, as determined by cselib. */
5284 struct cselib_set *sets;
5285 int n_sets;
5287 /* True if we're counting stores, false otherwise. */
5288 bool store_p;
5291 /* Find a VALUE corresponding to X. */
5293 static inline cselib_val *
5294 find_use_val (rtx x, enum machine_mode mode, struct count_use_info *cui)
5296 int i;
5298 if (cui->sets)
5300 /* This is called after uses are set up and before stores are
5301 processed by cselib, so it's safe to look up srcs, but not
5302 dsts. So we look up expressions that appear in srcs or in
5303 dest expressions, but we search the sets array for dests of
5304 stores. */
5305 if (cui->store_p)
5307 /* Some targets represent memset and memcpy patterns
5308 by (set (mem:BLK ...) (reg:[QHSD]I ...)) or
5309 (set (mem:BLK ...) (const_int ...)) or
5310 (set (mem:BLK ...) (mem:BLK ...)). Don't return anything
5311 in that case, otherwise we end up with mode mismatches. */
5312 if (mode == BLKmode && MEM_P (x))
5313 return NULL;
5314 for (i = 0; i < cui->n_sets; i++)
5315 if (cui->sets[i].dest == x)
5316 return cui->sets[i].src_elt;
5318 else
5319 return cselib_lookup (x, mode, 0, VOIDmode);
5322 return NULL;
5325 /* Replace all registers and addresses in an expression with VALUE
5326 expressions that map back to them, unless the expression is a
5327 register. If no mapping is or can be performed, returns NULL. */
5329 static rtx
5330 replace_expr_with_values (rtx loc)
5332 if (REG_P (loc) || GET_CODE (loc) == ENTRY_VALUE)
5333 return NULL;
5334 else if (MEM_P (loc))
5336 cselib_val *addr = cselib_lookup (XEXP (loc, 0),
5337 get_address_mode (loc), 0,
5338 GET_MODE (loc));
5339 if (addr)
5340 return replace_equiv_address_nv (loc, addr->val_rtx);
5341 else
5342 return NULL;
5344 else
5345 return cselib_subst_to_values (loc, VOIDmode);
5348 /* Return true if X contains a DEBUG_EXPR. */
5350 static bool
5351 rtx_debug_expr_p (const_rtx x)
5353 subrtx_iterator::array_type array;
5354 FOR_EACH_SUBRTX (iter, array, x, ALL)
5355 if (GET_CODE (*iter) == DEBUG_EXPR)
5356 return true;
5357 return false;
5360 /* Determine what kind of micro operation to choose for a USE. Return
5361 MO_CLOBBER if no micro operation is to be generated. */
5363 static enum micro_operation_type
5364 use_type (rtx loc, struct count_use_info *cui, enum machine_mode *modep)
5366 tree expr;
5368 if (cui && cui->sets)
5370 if (GET_CODE (loc) == VAR_LOCATION)
5372 if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false))
5374 rtx ploc = PAT_VAR_LOCATION_LOC (loc);
5375 if (! VAR_LOC_UNKNOWN_P (ploc))
5377 cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1,
5378 VOIDmode);
5380 /* ??? flag_float_store and volatile mems are never
5381 given values, but we could in theory use them for
5382 locations. */
5383 gcc_assert (val || 1);
5385 return MO_VAL_LOC;
5387 else
5388 return MO_CLOBBER;
5391 if (REG_P (loc) || MEM_P (loc))
5393 if (modep)
5394 *modep = GET_MODE (loc);
5395 if (cui->store_p)
5397 if (REG_P (loc)
5398 || (find_use_val (loc, GET_MODE (loc), cui)
5399 && cselib_lookup (XEXP (loc, 0),
5400 get_address_mode (loc), 0,
5401 GET_MODE (loc))))
5402 return MO_VAL_SET;
5404 else
5406 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5408 if (val && !cselib_preserved_value_p (val))
5409 return MO_VAL_USE;
5414 if (REG_P (loc))
5416 gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER);
5418 if (loc == cfa_base_rtx)
5419 return MO_CLOBBER;
5420 expr = REG_EXPR (loc);
5422 if (!expr)
5423 return MO_USE_NO_VAR;
5424 else if (target_for_debug_bind (var_debug_decl (expr)))
5425 return MO_CLOBBER;
5426 else if (track_loc_p (loc, expr, REG_OFFSET (loc),
5427 false, modep, NULL))
5428 return MO_USE;
5429 else
5430 return MO_USE_NO_VAR;
5432 else if (MEM_P (loc))
5434 expr = MEM_EXPR (loc);
5436 if (!expr)
5437 return MO_CLOBBER;
5438 else if (target_for_debug_bind (var_debug_decl (expr)))
5439 return MO_CLOBBER;
5440 else if (track_loc_p (loc, expr, INT_MEM_OFFSET (loc),
5441 false, modep, NULL)
5442 /* Multi-part variables shouldn't refer to one-part
5443 variable names such as VALUEs (never happens) or
5444 DEBUG_EXPRs (only happens in the presence of debug
5445 insns). */
5446 && (!MAY_HAVE_DEBUG_INSNS
5447 || !rtx_debug_expr_p (XEXP (loc, 0))))
5448 return MO_USE;
5449 else
5450 return MO_CLOBBER;
5453 return MO_CLOBBER;
5456 /* Log to OUT information about micro-operation MOPT involving X in
5457 INSN of BB. */
5459 static inline void
5460 log_op_type (rtx x, basic_block bb, rtx_insn *insn,
5461 enum micro_operation_type mopt, FILE *out)
5463 fprintf (out, "bb %i op %i insn %i %s ",
5464 bb->index, VTI (bb)->mos.length (),
5465 INSN_UID (insn), micro_operation_type_name[mopt]);
5466 print_inline_rtx (out, x, 2);
5467 fputc ('\n', out);
5470 /* Tell whether the CONCAT used to holds a VALUE and its location
5471 needs value resolution, i.e., an attempt of mapping the location
5472 back to other incoming values. */
5473 #define VAL_NEEDS_RESOLUTION(x) \
5474 (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
5475 /* Whether the location in the CONCAT is a tracked expression, that
5476 should also be handled like a MO_USE. */
5477 #define VAL_HOLDS_TRACK_EXPR(x) \
5478 (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
5479 /* Whether the location in the CONCAT should be handled like a MO_COPY
5480 as well. */
5481 #define VAL_EXPR_IS_COPIED(x) \
5482 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
5483 /* Whether the location in the CONCAT should be handled like a
5484 MO_CLOBBER as well. */
5485 #define VAL_EXPR_IS_CLOBBERED(x) \
5486 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
5488 /* All preserved VALUEs. */
5489 static vec<rtx> preserved_values;
5491 /* Ensure VAL is preserved and remember it in a vector for vt_emit_notes. */
5493 static void
5494 preserve_value (cselib_val *val)
5496 cselib_preserve_value (val);
5497 preserved_values.safe_push (val->val_rtx);
5500 /* Helper function for MO_VAL_LOC handling. Return non-zero if
5501 any rtxes not suitable for CONST use not replaced by VALUEs
5502 are discovered. */
5504 static bool
5505 non_suitable_const (const_rtx x)
5507 subrtx_iterator::array_type array;
5508 FOR_EACH_SUBRTX (iter, array, x, ALL)
5510 const_rtx x = *iter;
5511 switch (GET_CODE (x))
5513 case REG:
5514 case DEBUG_EXPR:
5515 case PC:
5516 case SCRATCH:
5517 case CC0:
5518 case ASM_INPUT:
5519 case ASM_OPERANDS:
5520 return true;
5521 case MEM:
5522 if (!MEM_READONLY_P (x))
5523 return true;
5524 break;
5525 default:
5526 break;
5529 return false;
5532 /* Add uses (register and memory references) LOC which will be tracked
5533 to VTI (bb)->mos. */
5535 static void
5536 add_uses (rtx loc, struct count_use_info *cui)
5538 enum machine_mode mode = VOIDmode;
5539 enum micro_operation_type type = use_type (loc, cui, &mode);
5541 if (type != MO_CLOBBER)
5543 basic_block bb = cui->bb;
5544 micro_operation mo;
5546 mo.type = type;
5547 mo.u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc;
5548 mo.insn = cui->insn;
5550 if (type == MO_VAL_LOC)
5552 rtx oloc = loc;
5553 rtx vloc = PAT_VAR_LOCATION_LOC (oloc);
5554 cselib_val *val;
5556 gcc_assert (cui->sets);
5558 if (MEM_P (vloc)
5559 && !REG_P (XEXP (vloc, 0))
5560 && !MEM_P (XEXP (vloc, 0)))
5562 rtx mloc = vloc;
5563 enum machine_mode address_mode = get_address_mode (mloc);
5564 cselib_val *val
5565 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5566 GET_MODE (mloc));
5568 if (val && !cselib_preserved_value_p (val))
5569 preserve_value (val);
5572 if (CONSTANT_P (vloc)
5573 && (GET_CODE (vloc) != CONST || non_suitable_const (vloc)))
5574 /* For constants don't look up any value. */;
5575 else if (!VAR_LOC_UNKNOWN_P (vloc) && !unsuitable_loc (vloc)
5576 && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
5578 enum machine_mode mode2;
5579 enum micro_operation_type type2;
5580 rtx nloc = NULL;
5581 bool resolvable = REG_P (vloc) || MEM_P (vloc);
5583 if (resolvable)
5584 nloc = replace_expr_with_values (vloc);
5586 if (nloc)
5588 oloc = shallow_copy_rtx (oloc);
5589 PAT_VAR_LOCATION_LOC (oloc) = nloc;
5592 oloc = gen_rtx_CONCAT (mode, val->val_rtx, oloc);
5594 type2 = use_type (vloc, 0, &mode2);
5596 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5597 || type2 == MO_CLOBBER);
5599 if (type2 == MO_CLOBBER
5600 && !cselib_preserved_value_p (val))
5602 VAL_NEEDS_RESOLUTION (oloc) = resolvable;
5603 preserve_value (val);
5606 else if (!VAR_LOC_UNKNOWN_P (vloc))
5608 oloc = shallow_copy_rtx (oloc);
5609 PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC ();
5612 mo.u.loc = oloc;
5614 else if (type == MO_VAL_USE)
5616 enum machine_mode mode2 = VOIDmode;
5617 enum micro_operation_type type2;
5618 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5619 rtx vloc, oloc = loc, nloc;
5621 gcc_assert (cui->sets);
5623 if (MEM_P (oloc)
5624 && !REG_P (XEXP (oloc, 0))
5625 && !MEM_P (XEXP (oloc, 0)))
5627 rtx mloc = oloc;
5628 enum machine_mode address_mode = get_address_mode (mloc);
5629 cselib_val *val
5630 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5631 GET_MODE (mloc));
5633 if (val && !cselib_preserved_value_p (val))
5634 preserve_value (val);
5637 type2 = use_type (loc, 0, &mode2);
5639 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5640 || type2 == MO_CLOBBER);
5642 if (type2 == MO_USE)
5643 vloc = var_lowpart (mode2, loc);
5644 else
5645 vloc = oloc;
5647 /* The loc of a MO_VAL_USE may have two forms:
5649 (concat val src): val is at src, a value-based
5650 representation.
5652 (concat (concat val use) src): same as above, with use as
5653 the MO_USE tracked value, if it differs from src.
5657 gcc_checking_assert (REG_P (loc) || MEM_P (loc));
5658 nloc = replace_expr_with_values (loc);
5659 if (!nloc)
5660 nloc = oloc;
5662 if (vloc != nloc)
5663 oloc = gen_rtx_CONCAT (mode2, val->val_rtx, vloc);
5664 else
5665 oloc = val->val_rtx;
5667 mo.u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
5669 if (type2 == MO_USE)
5670 VAL_HOLDS_TRACK_EXPR (mo.u.loc) = 1;
5671 if (!cselib_preserved_value_p (val))
5673 VAL_NEEDS_RESOLUTION (mo.u.loc) = 1;
5674 preserve_value (val);
5677 else
5678 gcc_assert (type == MO_USE || type == MO_USE_NO_VAR);
5680 if (dump_file && (dump_flags & TDF_DETAILS))
5681 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
5682 VTI (bb)->mos.safe_push (mo);
5686 /* Helper function for finding all uses of REG/MEM in X in insn INSN. */
5688 static void
5689 add_uses_1 (rtx *x, void *cui)
5691 subrtx_var_iterator::array_type array;
5692 FOR_EACH_SUBRTX_VAR (iter, array, *x, NONCONST)
5693 add_uses (*iter, (struct count_use_info *) cui);
5696 /* This is the value used during expansion of locations. We want it
5697 to be unbounded, so that variables expanded deep in a recursion
5698 nest are fully evaluated, so that their values are cached
5699 correctly. We avoid recursion cycles through other means, and we
5700 don't unshare RTL, so excess complexity is not a problem. */
5701 #define EXPR_DEPTH (INT_MAX)
5702 /* We use this to keep too-complex expressions from being emitted as
5703 location notes, and then to debug information. Users can trade
5704 compile time for ridiculously complex expressions, although they're
5705 seldom useful, and they may often have to be discarded as not
5706 representable anyway. */
5707 #define EXPR_USE_DEPTH (PARAM_VALUE (PARAM_MAX_VARTRACK_EXPR_DEPTH))
5709 /* Attempt to reverse the EXPR operation in the debug info and record
5710 it in the cselib table. Say for reg1 = reg2 + 6 even when reg2 is
5711 no longer live we can express its value as VAL - 6. */
5713 static void
5714 reverse_op (rtx val, const_rtx expr, rtx_insn *insn)
5716 rtx src, arg, ret;
5717 cselib_val *v;
5718 struct elt_loc_list *l;
5719 enum rtx_code code;
5720 int count;
5722 if (GET_CODE (expr) != SET)
5723 return;
5725 if (!REG_P (SET_DEST (expr)) || GET_MODE (val) != GET_MODE (SET_DEST (expr)))
5726 return;
5728 src = SET_SRC (expr);
5729 switch (GET_CODE (src))
5731 case PLUS:
5732 case MINUS:
5733 case XOR:
5734 case NOT:
5735 case NEG:
5736 if (!REG_P (XEXP (src, 0)))
5737 return;
5738 break;
5739 case SIGN_EXTEND:
5740 case ZERO_EXTEND:
5741 if (!REG_P (XEXP (src, 0)) && !MEM_P (XEXP (src, 0)))
5742 return;
5743 break;
5744 default:
5745 return;
5748 if (!SCALAR_INT_MODE_P (GET_MODE (src)) || XEXP (src, 0) == cfa_base_rtx)
5749 return;
5751 v = cselib_lookup (XEXP (src, 0), GET_MODE (XEXP (src, 0)), 0, VOIDmode);
5752 if (!v || !cselib_preserved_value_p (v))
5753 return;
5755 /* Use canonical V to avoid creating multiple redundant expressions
5756 for different VALUES equivalent to V. */
5757 v = canonical_cselib_val (v);
5759 /* Adding a reverse op isn't useful if V already has an always valid
5760 location. Ignore ENTRY_VALUE, while it is always constant, we should
5761 prefer non-ENTRY_VALUE locations whenever possible. */
5762 for (l = v->locs, count = 0; l; l = l->next, count++)
5763 if (CONSTANT_P (l->loc)
5764 && (GET_CODE (l->loc) != CONST || !references_value_p (l->loc, 0)))
5765 return;
5766 /* Avoid creating too large locs lists. */
5767 else if (count == PARAM_VALUE (PARAM_MAX_VARTRACK_REVERSE_OP_SIZE))
5768 return;
5770 switch (GET_CODE (src))
5772 case NOT:
5773 case NEG:
5774 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5775 return;
5776 ret = gen_rtx_fmt_e (GET_CODE (src), GET_MODE (val), val);
5777 break;
5778 case SIGN_EXTEND:
5779 case ZERO_EXTEND:
5780 ret = gen_lowpart_SUBREG (GET_MODE (v->val_rtx), val);
5781 break;
5782 case XOR:
5783 code = XOR;
5784 goto binary;
5785 case PLUS:
5786 code = MINUS;
5787 goto binary;
5788 case MINUS:
5789 code = PLUS;
5790 goto binary;
5791 binary:
5792 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5793 return;
5794 arg = XEXP (src, 1);
5795 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5797 arg = cselib_expand_value_rtx (arg, scratch_regs, 5);
5798 if (arg == NULL_RTX)
5799 return;
5800 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5801 return;
5803 ret = simplify_gen_binary (code, GET_MODE (val), val, arg);
5804 if (ret == val)
5805 /* Ensure ret isn't VALUE itself (which can happen e.g. for
5806 (plus (reg1) (reg2)) when reg2 is known to be 0), as that
5807 breaks a lot of routines during var-tracking. */
5808 ret = gen_rtx_fmt_ee (PLUS, GET_MODE (val), val, const0_rtx);
5809 break;
5810 default:
5811 gcc_unreachable ();
5814 cselib_add_permanent_equiv (v, ret, insn);
5817 /* Add stores (register and memory references) LOC which will be tracked
5818 to VTI (bb)->mos. EXPR is the RTL expression containing the store.
5819 CUIP->insn is instruction which the LOC is part of. */
5821 static void
5822 add_stores (rtx loc, const_rtx expr, void *cuip)
5824 enum machine_mode mode = VOIDmode, mode2;
5825 struct count_use_info *cui = (struct count_use_info *)cuip;
5826 basic_block bb = cui->bb;
5827 micro_operation mo;
5828 rtx oloc = loc, nloc, src = NULL;
5829 enum micro_operation_type type = use_type (loc, cui, &mode);
5830 bool track_p = false;
5831 cselib_val *v;
5832 bool resolve, preserve;
5834 if (type == MO_CLOBBER)
5835 return;
5837 mode2 = mode;
5839 if (REG_P (loc))
5841 gcc_assert (loc != cfa_base_rtx);
5842 if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
5843 || !(track_p = use_type (loc, NULL, &mode2) == MO_USE)
5844 || GET_CODE (expr) == CLOBBER)
5846 mo.type = MO_CLOBBER;
5847 mo.u.loc = loc;
5848 if (GET_CODE (expr) == SET
5849 && SET_DEST (expr) == loc
5850 && !unsuitable_loc (SET_SRC (expr))
5851 && find_use_val (loc, mode, cui))
5853 gcc_checking_assert (type == MO_VAL_SET);
5854 mo.u.loc = gen_rtx_SET (VOIDmode, loc, SET_SRC (expr));
5857 else
5859 if (GET_CODE (expr) == SET
5860 && SET_DEST (expr) == loc
5861 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
5862 src = var_lowpart (mode2, SET_SRC (expr));
5863 loc = var_lowpart (mode2, loc);
5865 if (src == NULL)
5867 mo.type = MO_SET;
5868 mo.u.loc = loc;
5870 else
5872 rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
5873 if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
5875 /* If this is an instruction copying (part of) a parameter
5876 passed by invisible reference to its register location,
5877 pretend it's a SET so that the initial memory location
5878 is discarded, as the parameter register can be reused
5879 for other purposes and we do not track locations based
5880 on generic registers. */
5881 if (MEM_P (src)
5882 && REG_EXPR (loc)
5883 && TREE_CODE (REG_EXPR (loc)) == PARM_DECL
5884 && DECL_MODE (REG_EXPR (loc)) != BLKmode
5885 && MEM_P (DECL_INCOMING_RTL (REG_EXPR (loc)))
5886 && XEXP (DECL_INCOMING_RTL (REG_EXPR (loc)), 0)
5887 != arg_pointer_rtx)
5888 mo.type = MO_SET;
5889 else
5890 mo.type = MO_COPY;
5892 else
5893 mo.type = MO_SET;
5894 mo.u.loc = xexpr;
5897 mo.insn = cui->insn;
5899 else if (MEM_P (loc)
5900 && ((track_p = use_type (loc, NULL, &mode2) == MO_USE)
5901 || cui->sets))
5903 if (MEM_P (loc) && type == MO_VAL_SET
5904 && !REG_P (XEXP (loc, 0))
5905 && !MEM_P (XEXP (loc, 0)))
5907 rtx mloc = loc;
5908 enum machine_mode address_mode = get_address_mode (mloc);
5909 cselib_val *val = cselib_lookup (XEXP (mloc, 0),
5910 address_mode, 0,
5911 GET_MODE (mloc));
5913 if (val && !cselib_preserved_value_p (val))
5914 preserve_value (val);
5917 if (GET_CODE (expr) == CLOBBER || !track_p)
5919 mo.type = MO_CLOBBER;
5920 mo.u.loc = track_p ? var_lowpart (mode2, loc) : loc;
5922 else
5924 if (GET_CODE (expr) == SET
5925 && SET_DEST (expr) == loc
5926 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
5927 src = var_lowpart (mode2, SET_SRC (expr));
5928 loc = var_lowpart (mode2, loc);
5930 if (src == NULL)
5932 mo.type = MO_SET;
5933 mo.u.loc = loc;
5935 else
5937 rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
5938 if (same_variable_part_p (SET_SRC (xexpr),
5939 MEM_EXPR (loc),
5940 INT_MEM_OFFSET (loc)))
5941 mo.type = MO_COPY;
5942 else
5943 mo.type = MO_SET;
5944 mo.u.loc = xexpr;
5947 mo.insn = cui->insn;
5949 else
5950 return;
5952 if (type != MO_VAL_SET)
5953 goto log_and_return;
5955 v = find_use_val (oloc, mode, cui);
5957 if (!v)
5958 goto log_and_return;
5960 resolve = preserve = !cselib_preserved_value_p (v);
5962 /* We cannot track values for multiple-part variables, so we track only
5963 locations for tracked parameters passed either by invisible reference
5964 or directly in multiple locations. */
5965 if (track_p
5966 && REG_P (loc)
5967 && REG_EXPR (loc)
5968 && TREE_CODE (REG_EXPR (loc)) == PARM_DECL
5969 && DECL_MODE (REG_EXPR (loc)) != BLKmode
5970 && TREE_CODE (TREE_TYPE (REG_EXPR (loc))) != UNION_TYPE
5971 && ((MEM_P (DECL_INCOMING_RTL (REG_EXPR (loc)))
5972 && XEXP (DECL_INCOMING_RTL (REG_EXPR (loc)), 0) != arg_pointer_rtx)
5973 || (GET_CODE (DECL_INCOMING_RTL (REG_EXPR (loc))) == PARALLEL
5974 && XVECLEN (DECL_INCOMING_RTL (REG_EXPR (loc)), 0) > 1)))
5976 /* Although we don't use the value here, it could be used later by the
5977 mere virtue of its existence as the operand of the reverse operation
5978 that gave rise to it (typically extension/truncation). Make sure it
5979 is preserved as required by vt_expand_var_loc_chain. */
5980 if (preserve)
5981 preserve_value (v);
5982 goto log_and_return;
5985 if (loc == stack_pointer_rtx
5986 && hard_frame_pointer_adjustment != -1
5987 && preserve)
5988 cselib_set_value_sp_based (v);
5990 nloc = replace_expr_with_values (oloc);
5991 if (nloc)
5992 oloc = nloc;
5994 if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC)
5996 cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0, VOIDmode);
5998 if (oval == v)
5999 return;
6000 gcc_assert (REG_P (oloc) || MEM_P (oloc));
6002 if (oval && !cselib_preserved_value_p (oval))
6004 micro_operation moa;
6006 preserve_value (oval);
6008 moa.type = MO_VAL_USE;
6009 moa.u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc);
6010 VAL_NEEDS_RESOLUTION (moa.u.loc) = 1;
6011 moa.insn = cui->insn;
6013 if (dump_file && (dump_flags & TDF_DETAILS))
6014 log_op_type (moa.u.loc, cui->bb, cui->insn,
6015 moa.type, dump_file);
6016 VTI (bb)->mos.safe_push (moa);
6019 resolve = false;
6021 else if (resolve && GET_CODE (mo.u.loc) == SET)
6023 if (REG_P (SET_SRC (expr)) || MEM_P (SET_SRC (expr)))
6024 nloc = replace_expr_with_values (SET_SRC (expr));
6025 else
6026 nloc = NULL_RTX;
6028 /* Avoid the mode mismatch between oexpr and expr. */
6029 if (!nloc && mode != mode2)
6031 nloc = SET_SRC (expr);
6032 gcc_assert (oloc == SET_DEST (expr));
6035 if (nloc && nloc != SET_SRC (mo.u.loc))
6036 oloc = gen_rtx_SET (GET_MODE (mo.u.loc), oloc, nloc);
6037 else
6039 if (oloc == SET_DEST (mo.u.loc))
6040 /* No point in duplicating. */
6041 oloc = mo.u.loc;
6042 if (!REG_P (SET_SRC (mo.u.loc)))
6043 resolve = false;
6046 else if (!resolve)
6048 if (GET_CODE (mo.u.loc) == SET
6049 && oloc == SET_DEST (mo.u.loc))
6050 /* No point in duplicating. */
6051 oloc = mo.u.loc;
6053 else
6054 resolve = false;
6056 loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc);
6058 if (mo.u.loc != oloc)
6059 loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, mo.u.loc);
6061 /* The loc of a MO_VAL_SET may have various forms:
6063 (concat val dst): dst now holds val
6065 (concat val (set dst src)): dst now holds val, copied from src
6067 (concat (concat val dstv) dst): dst now holds val; dstv is dst
6068 after replacing mems and non-top-level regs with values.
6070 (concat (concat val dstv) (set dst src)): dst now holds val,
6071 copied from src. dstv is a value-based representation of dst, if
6072 it differs from dst. If resolution is needed, src is a REG, and
6073 its mode is the same as that of val.
6075 (concat (concat val (set dstv srcv)) (set dst src)): src
6076 copied to dst, holding val. dstv and srcv are value-based
6077 representations of dst and src, respectively.
6081 if (GET_CODE (PATTERN (cui->insn)) != COND_EXEC)
6082 reverse_op (v->val_rtx, expr, cui->insn);
6084 mo.u.loc = loc;
6086 if (track_p)
6087 VAL_HOLDS_TRACK_EXPR (loc) = 1;
6088 if (preserve)
6090 VAL_NEEDS_RESOLUTION (loc) = resolve;
6091 preserve_value (v);
6093 if (mo.type == MO_CLOBBER)
6094 VAL_EXPR_IS_CLOBBERED (loc) = 1;
6095 if (mo.type == MO_COPY)
6096 VAL_EXPR_IS_COPIED (loc) = 1;
6098 mo.type = MO_VAL_SET;
6100 log_and_return:
6101 if (dump_file && (dump_flags & TDF_DETAILS))
6102 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
6103 VTI (bb)->mos.safe_push (mo);
6106 /* Arguments to the call. */
6107 static rtx call_arguments;
6109 /* Compute call_arguments. */
6111 static void
6112 prepare_call_arguments (basic_block bb, rtx_insn *insn)
6114 rtx link, x, call;
6115 rtx prev, cur, next;
6116 rtx this_arg = NULL_RTX;
6117 tree type = NULL_TREE, t, fndecl = NULL_TREE;
6118 tree obj_type_ref = NULL_TREE;
6119 CUMULATIVE_ARGS args_so_far_v;
6120 cumulative_args_t args_so_far;
6122 memset (&args_so_far_v, 0, sizeof (args_so_far_v));
6123 args_so_far = pack_cumulative_args (&args_so_far_v);
6124 call = get_call_rtx_from (insn);
6125 if (call)
6127 if (GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
6129 rtx symbol = XEXP (XEXP (call, 0), 0);
6130 if (SYMBOL_REF_DECL (symbol))
6131 fndecl = SYMBOL_REF_DECL (symbol);
6133 if (fndecl == NULL_TREE)
6134 fndecl = MEM_EXPR (XEXP (call, 0));
6135 if (fndecl
6136 && TREE_CODE (TREE_TYPE (fndecl)) != FUNCTION_TYPE
6137 && TREE_CODE (TREE_TYPE (fndecl)) != METHOD_TYPE)
6138 fndecl = NULL_TREE;
6139 if (fndecl && TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
6140 type = TREE_TYPE (fndecl);
6141 if (fndecl && TREE_CODE (fndecl) != FUNCTION_DECL)
6143 if (TREE_CODE (fndecl) == INDIRECT_REF
6144 && TREE_CODE (TREE_OPERAND (fndecl, 0)) == OBJ_TYPE_REF)
6145 obj_type_ref = TREE_OPERAND (fndecl, 0);
6146 fndecl = NULL_TREE;
6148 if (type)
6150 for (t = TYPE_ARG_TYPES (type); t && t != void_list_node;
6151 t = TREE_CHAIN (t))
6152 if (TREE_CODE (TREE_VALUE (t)) == REFERENCE_TYPE
6153 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_VALUE (t))))
6154 break;
6155 if ((t == NULL || t == void_list_node) && obj_type_ref == NULL_TREE)
6156 type = NULL;
6157 else
6159 int nargs ATTRIBUTE_UNUSED = list_length (TYPE_ARG_TYPES (type));
6160 link = CALL_INSN_FUNCTION_USAGE (insn);
6161 #ifndef PCC_STATIC_STRUCT_RETURN
6162 if (aggregate_value_p (TREE_TYPE (type), type)
6163 && targetm.calls.struct_value_rtx (type, 0) == 0)
6165 tree struct_addr = build_pointer_type (TREE_TYPE (type));
6166 enum machine_mode mode = TYPE_MODE (struct_addr);
6167 rtx reg;
6168 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
6169 nargs + 1);
6170 reg = targetm.calls.function_arg (args_so_far, mode,
6171 struct_addr, true);
6172 targetm.calls.function_arg_advance (args_so_far, mode,
6173 struct_addr, true);
6174 if (reg == NULL_RTX)
6176 for (; link; link = XEXP (link, 1))
6177 if (GET_CODE (XEXP (link, 0)) == USE
6178 && MEM_P (XEXP (XEXP (link, 0), 0)))
6180 link = XEXP (link, 1);
6181 break;
6185 else
6186 #endif
6187 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
6188 nargs);
6189 if (obj_type_ref && TYPE_ARG_TYPES (type) != void_list_node)
6191 enum machine_mode mode;
6192 t = TYPE_ARG_TYPES (type);
6193 mode = TYPE_MODE (TREE_VALUE (t));
6194 this_arg = targetm.calls.function_arg (args_so_far, mode,
6195 TREE_VALUE (t), true);
6196 if (this_arg && !REG_P (this_arg))
6197 this_arg = NULL_RTX;
6198 else if (this_arg == NULL_RTX)
6200 for (; link; link = XEXP (link, 1))
6201 if (GET_CODE (XEXP (link, 0)) == USE
6202 && MEM_P (XEXP (XEXP (link, 0), 0)))
6204 this_arg = XEXP (XEXP (link, 0), 0);
6205 break;
6212 t = type ? TYPE_ARG_TYPES (type) : NULL_TREE;
6214 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
6215 if (GET_CODE (XEXP (link, 0)) == USE)
6217 rtx item = NULL_RTX;
6218 x = XEXP (XEXP (link, 0), 0);
6219 if (GET_MODE (link) == VOIDmode
6220 || GET_MODE (link) == BLKmode
6221 || (GET_MODE (link) != GET_MODE (x)
6222 && ((GET_MODE_CLASS (GET_MODE (link)) != MODE_INT
6223 && GET_MODE_CLASS (GET_MODE (link)) != MODE_PARTIAL_INT)
6224 || (GET_MODE_CLASS (GET_MODE (x)) != MODE_INT
6225 && GET_MODE_CLASS (GET_MODE (x)) != MODE_PARTIAL_INT))))
6226 /* Can't do anything for these, if the original type mode
6227 isn't known or can't be converted. */;
6228 else if (REG_P (x))
6230 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
6231 if (val && cselib_preserved_value_p (val))
6232 item = val->val_rtx;
6233 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
6234 || GET_MODE_CLASS (GET_MODE (x)) == MODE_PARTIAL_INT)
6236 enum machine_mode mode = GET_MODE (x);
6238 while ((mode = GET_MODE_WIDER_MODE (mode)) != VOIDmode
6239 && GET_MODE_BITSIZE (mode) <= BITS_PER_WORD)
6241 rtx reg = simplify_subreg (mode, x, GET_MODE (x), 0);
6243 if (reg == NULL_RTX || !REG_P (reg))
6244 continue;
6245 val = cselib_lookup (reg, mode, 0, VOIDmode);
6246 if (val && cselib_preserved_value_p (val))
6248 item = val->val_rtx;
6249 break;
6254 else if (MEM_P (x))
6256 rtx mem = x;
6257 cselib_val *val;
6259 if (!frame_pointer_needed)
6261 struct adjust_mem_data amd;
6262 amd.mem_mode = VOIDmode;
6263 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
6264 amd.side_effects = NULL;
6265 amd.store = true;
6266 mem = simplify_replace_fn_rtx (mem, NULL_RTX, adjust_mems,
6267 &amd);
6268 gcc_assert (amd.side_effects == NULL_RTX);
6270 val = cselib_lookup (mem, GET_MODE (mem), 0, VOIDmode);
6271 if (val && cselib_preserved_value_p (val))
6272 item = val->val_rtx;
6273 else if (GET_MODE_CLASS (GET_MODE (mem)) != MODE_INT
6274 && GET_MODE_CLASS (GET_MODE (mem)) != MODE_PARTIAL_INT)
6276 /* For non-integer stack argument see also if they weren't
6277 initialized by integers. */
6278 enum machine_mode imode = int_mode_for_mode (GET_MODE (mem));
6279 if (imode != GET_MODE (mem) && imode != BLKmode)
6281 val = cselib_lookup (adjust_address_nv (mem, imode, 0),
6282 imode, 0, VOIDmode);
6283 if (val && cselib_preserved_value_p (val))
6284 item = lowpart_subreg (GET_MODE (x), val->val_rtx,
6285 imode);
6289 if (item)
6291 rtx x2 = x;
6292 if (GET_MODE (item) != GET_MODE (link))
6293 item = lowpart_subreg (GET_MODE (link), item, GET_MODE (item));
6294 if (GET_MODE (x2) != GET_MODE (link))
6295 x2 = lowpart_subreg (GET_MODE (link), x2, GET_MODE (x2));
6296 item = gen_rtx_CONCAT (GET_MODE (link), x2, item);
6297 call_arguments
6298 = gen_rtx_EXPR_LIST (VOIDmode, item, call_arguments);
6300 if (t && t != void_list_node)
6302 tree argtype = TREE_VALUE (t);
6303 enum machine_mode mode = TYPE_MODE (argtype);
6304 rtx reg;
6305 if (pass_by_reference (&args_so_far_v, mode, argtype, true))
6307 argtype = build_pointer_type (argtype);
6308 mode = TYPE_MODE (argtype);
6310 reg = targetm.calls.function_arg (args_so_far, mode,
6311 argtype, true);
6312 if (TREE_CODE (argtype) == REFERENCE_TYPE
6313 && INTEGRAL_TYPE_P (TREE_TYPE (argtype))
6314 && reg
6315 && REG_P (reg)
6316 && GET_MODE (reg) == mode
6317 && (GET_MODE_CLASS (mode) == MODE_INT
6318 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
6319 && REG_P (x)
6320 && REGNO (x) == REGNO (reg)
6321 && GET_MODE (x) == mode
6322 && item)
6324 enum machine_mode indmode
6325 = TYPE_MODE (TREE_TYPE (argtype));
6326 rtx mem = gen_rtx_MEM (indmode, x);
6327 cselib_val *val = cselib_lookup (mem, indmode, 0, VOIDmode);
6328 if (val && cselib_preserved_value_p (val))
6330 item = gen_rtx_CONCAT (indmode, mem, val->val_rtx);
6331 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
6332 call_arguments);
6334 else
6336 struct elt_loc_list *l;
6337 tree initial;
6339 /* Try harder, when passing address of a constant
6340 pool integer it can be easily read back. */
6341 item = XEXP (item, 1);
6342 if (GET_CODE (item) == SUBREG)
6343 item = SUBREG_REG (item);
6344 gcc_assert (GET_CODE (item) == VALUE);
6345 val = CSELIB_VAL_PTR (item);
6346 for (l = val->locs; l; l = l->next)
6347 if (GET_CODE (l->loc) == SYMBOL_REF
6348 && TREE_CONSTANT_POOL_ADDRESS_P (l->loc)
6349 && SYMBOL_REF_DECL (l->loc)
6350 && DECL_INITIAL (SYMBOL_REF_DECL (l->loc)))
6352 initial = DECL_INITIAL (SYMBOL_REF_DECL (l->loc));
6353 if (tree_fits_shwi_p (initial))
6355 item = GEN_INT (tree_to_shwi (initial));
6356 item = gen_rtx_CONCAT (indmode, mem, item);
6357 call_arguments
6358 = gen_rtx_EXPR_LIST (VOIDmode, item,
6359 call_arguments);
6361 break;
6365 targetm.calls.function_arg_advance (args_so_far, mode,
6366 argtype, true);
6367 t = TREE_CHAIN (t);
6371 /* Add debug arguments. */
6372 if (fndecl
6373 && TREE_CODE (fndecl) == FUNCTION_DECL
6374 && DECL_HAS_DEBUG_ARGS_P (fndecl))
6376 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (fndecl);
6377 if (debug_args)
6379 unsigned int ix;
6380 tree param;
6381 for (ix = 0; vec_safe_iterate (*debug_args, ix, &param); ix += 2)
6383 rtx item;
6384 tree dtemp = (**debug_args)[ix + 1];
6385 enum machine_mode mode = DECL_MODE (dtemp);
6386 item = gen_rtx_DEBUG_PARAMETER_REF (mode, param);
6387 item = gen_rtx_CONCAT (mode, item, DECL_RTL_KNOWN_SET (dtemp));
6388 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
6389 call_arguments);
6394 /* Reverse call_arguments chain. */
6395 prev = NULL_RTX;
6396 for (cur = call_arguments; cur; cur = next)
6398 next = XEXP (cur, 1);
6399 XEXP (cur, 1) = prev;
6400 prev = cur;
6402 call_arguments = prev;
6404 x = get_call_rtx_from (insn);
6405 if (x)
6407 x = XEXP (XEXP (x, 0), 0);
6408 if (GET_CODE (x) == SYMBOL_REF)
6409 /* Don't record anything. */;
6410 else if (CONSTANT_P (x))
6412 x = gen_rtx_CONCAT (GET_MODE (x) == VOIDmode ? Pmode : GET_MODE (x),
6413 pc_rtx, x);
6414 call_arguments
6415 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6417 else
6419 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
6420 if (val && cselib_preserved_value_p (val))
6422 x = gen_rtx_CONCAT (GET_MODE (x), pc_rtx, val->val_rtx);
6423 call_arguments
6424 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6428 if (this_arg)
6430 enum machine_mode mode
6431 = TYPE_MODE (TREE_TYPE (OBJ_TYPE_REF_EXPR (obj_type_ref)));
6432 rtx clobbered = gen_rtx_MEM (mode, this_arg);
6433 HOST_WIDE_INT token
6434 = tree_to_shwi (OBJ_TYPE_REF_TOKEN (obj_type_ref));
6435 if (token)
6436 clobbered = plus_constant (mode, clobbered,
6437 token * GET_MODE_SIZE (mode));
6438 clobbered = gen_rtx_MEM (mode, clobbered);
6439 x = gen_rtx_CONCAT (mode, gen_rtx_CLOBBER (VOIDmode, pc_rtx), clobbered);
6440 call_arguments
6441 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6445 /* Callback for cselib_record_sets_hook, that records as micro
6446 operations uses and stores in an insn after cselib_record_sets has
6447 analyzed the sets in an insn, but before it modifies the stored
6448 values in the internal tables, unless cselib_record_sets doesn't
6449 call it directly (perhaps because we're not doing cselib in the
6450 first place, in which case sets and n_sets will be 0). */
6452 static void
6453 add_with_sets (rtx_insn *insn, struct cselib_set *sets, int n_sets)
6455 basic_block bb = BLOCK_FOR_INSN (insn);
6456 int n1, n2;
6457 struct count_use_info cui;
6458 micro_operation *mos;
6460 cselib_hook_called = true;
6462 cui.insn = insn;
6463 cui.bb = bb;
6464 cui.sets = sets;
6465 cui.n_sets = n_sets;
6467 n1 = VTI (bb)->mos.length ();
6468 cui.store_p = false;
6469 note_uses (&PATTERN (insn), add_uses_1, &cui);
6470 n2 = VTI (bb)->mos.length () - 1;
6471 mos = VTI (bb)->mos.address ();
6473 /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
6474 MO_VAL_LOC last. */
6475 while (n1 < n2)
6477 while (n1 < n2 && mos[n1].type == MO_USE)
6478 n1++;
6479 while (n1 < n2 && mos[n2].type != MO_USE)
6480 n2--;
6481 if (n1 < n2)
6483 micro_operation sw;
6485 sw = mos[n1];
6486 mos[n1] = mos[n2];
6487 mos[n2] = sw;
6491 n2 = VTI (bb)->mos.length () - 1;
6492 while (n1 < n2)
6494 while (n1 < n2 && mos[n1].type != MO_VAL_LOC)
6495 n1++;
6496 while (n1 < n2 && mos[n2].type == MO_VAL_LOC)
6497 n2--;
6498 if (n1 < n2)
6500 micro_operation sw;
6502 sw = mos[n1];
6503 mos[n1] = mos[n2];
6504 mos[n2] = sw;
6508 if (CALL_P (insn))
6510 micro_operation mo;
6512 mo.type = MO_CALL;
6513 mo.insn = insn;
6514 mo.u.loc = call_arguments;
6515 call_arguments = NULL_RTX;
6517 if (dump_file && (dump_flags & TDF_DETAILS))
6518 log_op_type (PATTERN (insn), bb, insn, mo.type, dump_file);
6519 VTI (bb)->mos.safe_push (mo);
6522 n1 = VTI (bb)->mos.length ();
6523 /* This will record NEXT_INSN (insn), such that we can
6524 insert notes before it without worrying about any
6525 notes that MO_USEs might emit after the insn. */
6526 cui.store_p = true;
6527 note_stores (PATTERN (insn), add_stores, &cui);
6528 n2 = VTI (bb)->mos.length () - 1;
6529 mos = VTI (bb)->mos.address ();
6531 /* Order the MO_VAL_USEs first (note_stores does nothing
6532 on DEBUG_INSNs, so there are no MO_VAL_LOCs from this
6533 insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET. */
6534 while (n1 < n2)
6536 while (n1 < n2 && mos[n1].type == MO_VAL_USE)
6537 n1++;
6538 while (n1 < n2 && mos[n2].type != MO_VAL_USE)
6539 n2--;
6540 if (n1 < n2)
6542 micro_operation sw;
6544 sw = mos[n1];
6545 mos[n1] = mos[n2];
6546 mos[n2] = sw;
6550 n2 = VTI (bb)->mos.length () - 1;
6551 while (n1 < n2)
6553 while (n1 < n2 && mos[n1].type == MO_CLOBBER)
6554 n1++;
6555 while (n1 < n2 && mos[n2].type != MO_CLOBBER)
6556 n2--;
6557 if (n1 < n2)
6559 micro_operation sw;
6561 sw = mos[n1];
6562 mos[n1] = mos[n2];
6563 mos[n2] = sw;
6568 static enum var_init_status
6569 find_src_status (dataflow_set *in, rtx src)
6571 tree decl = NULL_TREE;
6572 enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
6574 if (! flag_var_tracking_uninit)
6575 status = VAR_INIT_STATUS_INITIALIZED;
6577 if (src && REG_P (src))
6578 decl = var_debug_decl (REG_EXPR (src));
6579 else if (src && MEM_P (src))
6580 decl = var_debug_decl (MEM_EXPR (src));
6582 if (src && decl)
6583 status = get_init_value (in, src, dv_from_decl (decl));
6585 return status;
6588 /* SRC is the source of an assignment. Use SET to try to find what
6589 was ultimately assigned to SRC. Return that value if known,
6590 otherwise return SRC itself. */
6592 static rtx
6593 find_src_set_src (dataflow_set *set, rtx src)
6595 tree decl = NULL_TREE; /* The variable being copied around. */
6596 rtx set_src = NULL_RTX; /* The value for "decl" stored in "src". */
6597 variable var;
6598 location_chain nextp;
6599 int i;
6600 bool found;
6602 if (src && REG_P (src))
6603 decl = var_debug_decl (REG_EXPR (src));
6604 else if (src && MEM_P (src))
6605 decl = var_debug_decl (MEM_EXPR (src));
6607 if (src && decl)
6609 decl_or_value dv = dv_from_decl (decl);
6611 var = shared_hash_find (set->vars, dv);
6612 if (var)
6614 found = false;
6615 for (i = 0; i < var->n_var_parts && !found; i++)
6616 for (nextp = var->var_part[i].loc_chain; nextp && !found;
6617 nextp = nextp->next)
6618 if (rtx_equal_p (nextp->loc, src))
6620 set_src = nextp->set_src;
6621 found = true;
6627 return set_src;
6630 /* Compute the changes of variable locations in the basic block BB. */
6632 static bool
6633 compute_bb_dataflow (basic_block bb)
6635 unsigned int i;
6636 micro_operation *mo;
6637 bool changed;
6638 dataflow_set old_out;
6639 dataflow_set *in = &VTI (bb)->in;
6640 dataflow_set *out = &VTI (bb)->out;
6642 dataflow_set_init (&old_out);
6643 dataflow_set_copy (&old_out, out);
6644 dataflow_set_copy (out, in);
6646 if (MAY_HAVE_DEBUG_INSNS)
6647 local_get_addr_cache = new hash_map<rtx, rtx>;
6649 FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo)
6651 rtx_insn *insn = mo->insn;
6653 switch (mo->type)
6655 case MO_CALL:
6656 dataflow_set_clear_at_call (out);
6657 break;
6659 case MO_USE:
6661 rtx loc = mo->u.loc;
6663 if (REG_P (loc))
6664 var_reg_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6665 else if (MEM_P (loc))
6666 var_mem_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6668 break;
6670 case MO_VAL_LOC:
6672 rtx loc = mo->u.loc;
6673 rtx val, vloc;
6674 tree var;
6676 if (GET_CODE (loc) == CONCAT)
6678 val = XEXP (loc, 0);
6679 vloc = XEXP (loc, 1);
6681 else
6683 val = NULL_RTX;
6684 vloc = loc;
6687 var = PAT_VAR_LOCATION_DECL (vloc);
6689 clobber_variable_part (out, NULL_RTX,
6690 dv_from_decl (var), 0, NULL_RTX);
6691 if (val)
6693 if (VAL_NEEDS_RESOLUTION (loc))
6694 val_resolve (out, val, PAT_VAR_LOCATION_LOC (vloc), insn);
6695 set_variable_part (out, val, dv_from_decl (var), 0,
6696 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6697 INSERT);
6699 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
6700 set_variable_part (out, PAT_VAR_LOCATION_LOC (vloc),
6701 dv_from_decl (var), 0,
6702 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6703 INSERT);
6705 break;
6707 case MO_VAL_USE:
6709 rtx loc = mo->u.loc;
6710 rtx val, vloc, uloc;
6712 vloc = uloc = XEXP (loc, 1);
6713 val = XEXP (loc, 0);
6715 if (GET_CODE (val) == CONCAT)
6717 uloc = XEXP (val, 1);
6718 val = XEXP (val, 0);
6721 if (VAL_NEEDS_RESOLUTION (loc))
6722 val_resolve (out, val, vloc, insn);
6723 else
6724 val_store (out, val, uloc, insn, false);
6726 if (VAL_HOLDS_TRACK_EXPR (loc))
6728 if (GET_CODE (uloc) == REG)
6729 var_reg_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6730 NULL);
6731 else if (GET_CODE (uloc) == MEM)
6732 var_mem_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6733 NULL);
6736 break;
6738 case MO_VAL_SET:
6740 rtx loc = mo->u.loc;
6741 rtx val, vloc, uloc;
6742 rtx dstv, srcv;
6744 vloc = loc;
6745 uloc = XEXP (vloc, 1);
6746 val = XEXP (vloc, 0);
6747 vloc = uloc;
6749 if (GET_CODE (uloc) == SET)
6751 dstv = SET_DEST (uloc);
6752 srcv = SET_SRC (uloc);
6754 else
6756 dstv = uloc;
6757 srcv = NULL;
6760 if (GET_CODE (val) == CONCAT)
6762 dstv = vloc = XEXP (val, 1);
6763 val = XEXP (val, 0);
6766 if (GET_CODE (vloc) == SET)
6768 srcv = SET_SRC (vloc);
6770 gcc_assert (val != srcv);
6771 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
6773 dstv = vloc = SET_DEST (vloc);
6775 if (VAL_NEEDS_RESOLUTION (loc))
6776 val_resolve (out, val, srcv, insn);
6778 else if (VAL_NEEDS_RESOLUTION (loc))
6780 gcc_assert (GET_CODE (uloc) == SET
6781 && GET_CODE (SET_SRC (uloc)) == REG);
6782 val_resolve (out, val, SET_SRC (uloc), insn);
6785 if (VAL_HOLDS_TRACK_EXPR (loc))
6787 if (VAL_EXPR_IS_CLOBBERED (loc))
6789 if (REG_P (uloc))
6790 var_reg_delete (out, uloc, true);
6791 else if (MEM_P (uloc))
6793 gcc_assert (MEM_P (dstv));
6794 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
6795 var_mem_delete (out, dstv, true);
6798 else
6800 bool copied_p = VAL_EXPR_IS_COPIED (loc);
6801 rtx src = NULL, dst = uloc;
6802 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
6804 if (GET_CODE (uloc) == SET)
6806 src = SET_SRC (uloc);
6807 dst = SET_DEST (uloc);
6810 if (copied_p)
6812 if (flag_var_tracking_uninit)
6814 status = find_src_status (in, src);
6816 if (status == VAR_INIT_STATUS_UNKNOWN)
6817 status = find_src_status (out, src);
6820 src = find_src_set_src (in, src);
6823 if (REG_P (dst))
6824 var_reg_delete_and_set (out, dst, !copied_p,
6825 status, srcv);
6826 else if (MEM_P (dst))
6828 gcc_assert (MEM_P (dstv));
6829 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
6830 var_mem_delete_and_set (out, dstv, !copied_p,
6831 status, srcv);
6835 else if (REG_P (uloc))
6836 var_regno_delete (out, REGNO (uloc));
6837 else if (MEM_P (uloc))
6839 gcc_checking_assert (GET_CODE (vloc) == MEM);
6840 gcc_checking_assert (dstv == vloc);
6841 if (dstv != vloc)
6842 clobber_overlapping_mems (out, vloc);
6845 val_store (out, val, dstv, insn, true);
6847 break;
6849 case MO_SET:
6851 rtx loc = mo->u.loc;
6852 rtx set_src = NULL;
6854 if (GET_CODE (loc) == SET)
6856 set_src = SET_SRC (loc);
6857 loc = SET_DEST (loc);
6860 if (REG_P (loc))
6861 var_reg_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6862 set_src);
6863 else if (MEM_P (loc))
6864 var_mem_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6865 set_src);
6867 break;
6869 case MO_COPY:
6871 rtx loc = mo->u.loc;
6872 enum var_init_status src_status;
6873 rtx set_src = NULL;
6875 if (GET_CODE (loc) == SET)
6877 set_src = SET_SRC (loc);
6878 loc = SET_DEST (loc);
6881 if (! flag_var_tracking_uninit)
6882 src_status = VAR_INIT_STATUS_INITIALIZED;
6883 else
6885 src_status = find_src_status (in, set_src);
6887 if (src_status == VAR_INIT_STATUS_UNKNOWN)
6888 src_status = find_src_status (out, set_src);
6891 set_src = find_src_set_src (in, set_src);
6893 if (REG_P (loc))
6894 var_reg_delete_and_set (out, loc, false, src_status, set_src);
6895 else if (MEM_P (loc))
6896 var_mem_delete_and_set (out, loc, false, src_status, set_src);
6898 break;
6900 case MO_USE_NO_VAR:
6902 rtx loc = mo->u.loc;
6904 if (REG_P (loc))
6905 var_reg_delete (out, loc, false);
6906 else if (MEM_P (loc))
6907 var_mem_delete (out, loc, false);
6909 break;
6911 case MO_CLOBBER:
6913 rtx loc = mo->u.loc;
6915 if (REG_P (loc))
6916 var_reg_delete (out, loc, true);
6917 else if (MEM_P (loc))
6918 var_mem_delete (out, loc, true);
6920 break;
6922 case MO_ADJUST:
6923 out->stack_adjust += mo->u.adjust;
6924 break;
6928 if (MAY_HAVE_DEBUG_INSNS)
6930 delete local_get_addr_cache;
6931 local_get_addr_cache = NULL;
6933 dataflow_set_equiv_regs (out);
6934 shared_hash_htab (out->vars)
6935 ->traverse <dataflow_set *, canonicalize_values_mark> (out);
6936 shared_hash_htab (out->vars)
6937 ->traverse <dataflow_set *, canonicalize_values_star> (out);
6938 #if ENABLE_CHECKING
6939 shared_hash_htab (out->vars)
6940 ->traverse <dataflow_set *, canonicalize_loc_order_check> (out);
6941 #endif
6943 changed = dataflow_set_different (&old_out, out);
6944 dataflow_set_destroy (&old_out);
6945 return changed;
6948 /* Find the locations of variables in the whole function. */
6950 static bool
6951 vt_find_locations (void)
6953 fibheap_t worklist, pending, fibheap_swap;
6954 sbitmap visited, in_worklist, in_pending, sbitmap_swap;
6955 basic_block bb;
6956 edge e;
6957 int *bb_order;
6958 int *rc_order;
6959 int i;
6960 int htabsz = 0;
6961 int htabmax = PARAM_VALUE (PARAM_MAX_VARTRACK_SIZE);
6962 bool success = true;
6964 timevar_push (TV_VAR_TRACKING_DATAFLOW);
6965 /* Compute reverse completion order of depth first search of the CFG
6966 so that the data-flow runs faster. */
6967 rc_order = XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
6968 bb_order = XNEWVEC (int, last_basic_block_for_fn (cfun));
6969 pre_and_rev_post_order_compute (NULL, rc_order, false);
6970 for (i = 0; i < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; i++)
6971 bb_order[rc_order[i]] = i;
6972 free (rc_order);
6974 worklist = fibheap_new ();
6975 pending = fibheap_new ();
6976 visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
6977 in_worklist = sbitmap_alloc (last_basic_block_for_fn (cfun));
6978 in_pending = sbitmap_alloc (last_basic_block_for_fn (cfun));
6979 bitmap_clear (in_worklist);
6981 FOR_EACH_BB_FN (bb, cfun)
6982 fibheap_insert (pending, bb_order[bb->index], bb);
6983 bitmap_ones (in_pending);
6985 while (success && !fibheap_empty (pending))
6987 fibheap_swap = pending;
6988 pending = worklist;
6989 worklist = fibheap_swap;
6990 sbitmap_swap = in_pending;
6991 in_pending = in_worklist;
6992 in_worklist = sbitmap_swap;
6994 bitmap_clear (visited);
6996 while (!fibheap_empty (worklist))
6998 bb = (basic_block) fibheap_extract_min (worklist);
6999 bitmap_clear_bit (in_worklist, bb->index);
7000 gcc_assert (!bitmap_bit_p (visited, bb->index));
7001 if (!bitmap_bit_p (visited, bb->index))
7003 bool changed;
7004 edge_iterator ei;
7005 int oldinsz, oldoutsz;
7007 bitmap_set_bit (visited, bb->index);
7009 if (VTI (bb)->in.vars)
7011 htabsz
7012 -= shared_hash_htab (VTI (bb)->in.vars)->size ()
7013 + shared_hash_htab (VTI (bb)->out.vars)->size ();
7014 oldinsz = shared_hash_htab (VTI (bb)->in.vars)->elements ();
7015 oldoutsz
7016 = shared_hash_htab (VTI (bb)->out.vars)->elements ();
7018 else
7019 oldinsz = oldoutsz = 0;
7021 if (MAY_HAVE_DEBUG_INSNS)
7023 dataflow_set *in = &VTI (bb)->in, *first_out = NULL;
7024 bool first = true, adjust = false;
7026 /* Calculate the IN set as the intersection of
7027 predecessor OUT sets. */
7029 dataflow_set_clear (in);
7030 dst_can_be_shared = true;
7032 FOR_EACH_EDGE (e, ei, bb->preds)
7033 if (!VTI (e->src)->flooded)
7034 gcc_assert (bb_order[bb->index]
7035 <= bb_order[e->src->index]);
7036 else if (first)
7038 dataflow_set_copy (in, &VTI (e->src)->out);
7039 first_out = &VTI (e->src)->out;
7040 first = false;
7042 else
7044 dataflow_set_merge (in, &VTI (e->src)->out);
7045 adjust = true;
7048 if (adjust)
7050 dataflow_post_merge_adjust (in, &VTI (bb)->permp);
7051 #if ENABLE_CHECKING
7052 /* Merge and merge_adjust should keep entries in
7053 canonical order. */
7054 shared_hash_htab (in->vars)
7055 ->traverse <dataflow_set *,
7056 canonicalize_loc_order_check> (in);
7057 #endif
7058 if (dst_can_be_shared)
7060 shared_hash_destroy (in->vars);
7061 in->vars = shared_hash_copy (first_out->vars);
7065 VTI (bb)->flooded = true;
7067 else
7069 /* Calculate the IN set as union of predecessor OUT sets. */
7070 dataflow_set_clear (&VTI (bb)->in);
7071 FOR_EACH_EDGE (e, ei, bb->preds)
7072 dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out);
7075 changed = compute_bb_dataflow (bb);
7076 htabsz += shared_hash_htab (VTI (bb)->in.vars)->size ()
7077 + shared_hash_htab (VTI (bb)->out.vars)->size ();
7079 if (htabmax && htabsz > htabmax)
7081 if (MAY_HAVE_DEBUG_INSNS)
7082 inform (DECL_SOURCE_LOCATION (cfun->decl),
7083 "variable tracking size limit exceeded with "
7084 "-fvar-tracking-assignments, retrying without");
7085 else
7086 inform (DECL_SOURCE_LOCATION (cfun->decl),
7087 "variable tracking size limit exceeded");
7088 success = false;
7089 break;
7092 if (changed)
7094 FOR_EACH_EDGE (e, ei, bb->succs)
7096 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7097 continue;
7099 if (bitmap_bit_p (visited, e->dest->index))
7101 if (!bitmap_bit_p (in_pending, e->dest->index))
7103 /* Send E->DEST to next round. */
7104 bitmap_set_bit (in_pending, e->dest->index);
7105 fibheap_insert (pending,
7106 bb_order[e->dest->index],
7107 e->dest);
7110 else if (!bitmap_bit_p (in_worklist, e->dest->index))
7112 /* Add E->DEST to current round. */
7113 bitmap_set_bit (in_worklist, e->dest->index);
7114 fibheap_insert (worklist, bb_order[e->dest->index],
7115 e->dest);
7120 if (dump_file)
7121 fprintf (dump_file,
7122 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
7123 bb->index,
7124 (int)shared_hash_htab (VTI (bb)->in.vars)->size (),
7125 oldinsz,
7126 (int)shared_hash_htab (VTI (bb)->out.vars)->size (),
7127 oldoutsz,
7128 (int)worklist->nodes, (int)pending->nodes, htabsz);
7130 if (dump_file && (dump_flags & TDF_DETAILS))
7132 fprintf (dump_file, "BB %i IN:\n", bb->index);
7133 dump_dataflow_set (&VTI (bb)->in);
7134 fprintf (dump_file, "BB %i OUT:\n", bb->index);
7135 dump_dataflow_set (&VTI (bb)->out);
7141 if (success && MAY_HAVE_DEBUG_INSNS)
7142 FOR_EACH_BB_FN (bb, cfun)
7143 gcc_assert (VTI (bb)->flooded);
7145 free (bb_order);
7146 fibheap_delete (worklist);
7147 fibheap_delete (pending);
7148 sbitmap_free (visited);
7149 sbitmap_free (in_worklist);
7150 sbitmap_free (in_pending);
7152 timevar_pop (TV_VAR_TRACKING_DATAFLOW);
7153 return success;
7156 /* Print the content of the LIST to dump file. */
7158 static void
7159 dump_attrs_list (attrs list)
7161 for (; list; list = list->next)
7163 if (dv_is_decl_p (list->dv))
7164 print_mem_expr (dump_file, dv_as_decl (list->dv));
7165 else
7166 print_rtl_single (dump_file, dv_as_value (list->dv));
7167 fprintf (dump_file, "+" HOST_WIDE_INT_PRINT_DEC, list->offset);
7169 fprintf (dump_file, "\n");
7172 /* Print the information about variable *SLOT to dump file. */
7175 dump_var_tracking_slot (variable_def **slot, void *data ATTRIBUTE_UNUSED)
7177 variable var = *slot;
7179 dump_var (var);
7181 /* Continue traversing the hash table. */
7182 return 1;
7185 /* Print the information about variable VAR to dump file. */
7187 static void
7188 dump_var (variable var)
7190 int i;
7191 location_chain node;
7193 if (dv_is_decl_p (var->dv))
7195 const_tree decl = dv_as_decl (var->dv);
7197 if (DECL_NAME (decl))
7199 fprintf (dump_file, " name: %s",
7200 IDENTIFIER_POINTER (DECL_NAME (decl)));
7201 if (dump_flags & TDF_UID)
7202 fprintf (dump_file, "D.%u", DECL_UID (decl));
7204 else if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
7205 fprintf (dump_file, " name: D#%u", DEBUG_TEMP_UID (decl));
7206 else
7207 fprintf (dump_file, " name: D.%u", DECL_UID (decl));
7208 fprintf (dump_file, "\n");
7210 else
7212 fputc (' ', dump_file);
7213 print_rtl_single (dump_file, dv_as_value (var->dv));
7216 for (i = 0; i < var->n_var_parts; i++)
7218 fprintf (dump_file, " offset %ld\n",
7219 (long)(var->onepart ? 0 : VAR_PART_OFFSET (var, i)));
7220 for (node = var->var_part[i].loc_chain; node; node = node->next)
7222 fprintf (dump_file, " ");
7223 if (node->init == VAR_INIT_STATUS_UNINITIALIZED)
7224 fprintf (dump_file, "[uninit]");
7225 print_rtl_single (dump_file, node->loc);
7230 /* Print the information about variables from hash table VARS to dump file. */
7232 static void
7233 dump_vars (variable_table_type *vars)
7235 if (vars->elements () > 0)
7237 fprintf (dump_file, "Variables:\n");
7238 vars->traverse <void *, dump_var_tracking_slot> (NULL);
7242 /* Print the dataflow set SET to dump file. */
7244 static void
7245 dump_dataflow_set (dataflow_set *set)
7247 int i;
7249 fprintf (dump_file, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC "\n",
7250 set->stack_adjust);
7251 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7253 if (set->regs[i])
7255 fprintf (dump_file, "Reg %d:", i);
7256 dump_attrs_list (set->regs[i]);
7259 dump_vars (shared_hash_htab (set->vars));
7260 fprintf (dump_file, "\n");
7263 /* Print the IN and OUT sets for each basic block to dump file. */
7265 static void
7266 dump_dataflow_sets (void)
7268 basic_block bb;
7270 FOR_EACH_BB_FN (bb, cfun)
7272 fprintf (dump_file, "\nBasic block %d:\n", bb->index);
7273 fprintf (dump_file, "IN:\n");
7274 dump_dataflow_set (&VTI (bb)->in);
7275 fprintf (dump_file, "OUT:\n");
7276 dump_dataflow_set (&VTI (bb)->out);
7280 /* Return the variable for DV in dropped_values, inserting one if
7281 requested with INSERT. */
7283 static inline variable
7284 variable_from_dropped (decl_or_value dv, enum insert_option insert)
7286 variable_def **slot;
7287 variable empty_var;
7288 onepart_enum_t onepart;
7290 slot = dropped_values->find_slot_with_hash (dv, dv_htab_hash (dv), insert);
7292 if (!slot)
7293 return NULL;
7295 if (*slot)
7296 return *slot;
7298 gcc_checking_assert (insert == INSERT);
7300 onepart = dv_onepart_p (dv);
7302 gcc_checking_assert (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR);
7304 empty_var = (variable) pool_alloc (onepart_pool (onepart));
7305 empty_var->dv = dv;
7306 empty_var->refcount = 1;
7307 empty_var->n_var_parts = 0;
7308 empty_var->onepart = onepart;
7309 empty_var->in_changed_variables = false;
7310 empty_var->var_part[0].loc_chain = NULL;
7311 empty_var->var_part[0].cur_loc = NULL;
7312 VAR_LOC_1PAUX (empty_var) = NULL;
7313 set_dv_changed (dv, true);
7315 *slot = empty_var;
7317 return empty_var;
7320 /* Recover the one-part aux from dropped_values. */
7322 static struct onepart_aux *
7323 recover_dropped_1paux (variable var)
7325 variable dvar;
7327 gcc_checking_assert (var->onepart);
7329 if (VAR_LOC_1PAUX (var))
7330 return VAR_LOC_1PAUX (var);
7332 if (var->onepart == ONEPART_VDECL)
7333 return NULL;
7335 dvar = variable_from_dropped (var->dv, NO_INSERT);
7337 if (!dvar)
7338 return NULL;
7340 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (dvar);
7341 VAR_LOC_1PAUX (dvar) = NULL;
7343 return VAR_LOC_1PAUX (var);
7346 /* Add variable VAR to the hash table of changed variables and
7347 if it has no locations delete it from SET's hash table. */
7349 static void
7350 variable_was_changed (variable var, dataflow_set *set)
7352 hashval_t hash = dv_htab_hash (var->dv);
7354 if (emit_notes)
7356 variable_def **slot;
7358 /* Remember this decl or VALUE has been added to changed_variables. */
7359 set_dv_changed (var->dv, true);
7361 slot = changed_variables->find_slot_with_hash (var->dv, hash, INSERT);
7363 if (*slot)
7365 variable old_var = *slot;
7366 gcc_assert (old_var->in_changed_variables);
7367 old_var->in_changed_variables = false;
7368 if (var != old_var && var->onepart)
7370 /* Restore the auxiliary info from an empty variable
7371 previously created for changed_variables, so it is
7372 not lost. */
7373 gcc_checking_assert (!VAR_LOC_1PAUX (var));
7374 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (old_var);
7375 VAR_LOC_1PAUX (old_var) = NULL;
7377 variable_htab_free (*slot);
7380 if (set && var->n_var_parts == 0)
7382 onepart_enum_t onepart = var->onepart;
7383 variable empty_var = NULL;
7384 variable_def **dslot = NULL;
7386 if (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR)
7388 dslot = dropped_values->find_slot_with_hash (var->dv,
7389 dv_htab_hash (var->dv),
7390 INSERT);
7391 empty_var = *dslot;
7393 if (empty_var)
7395 gcc_checking_assert (!empty_var->in_changed_variables);
7396 if (!VAR_LOC_1PAUX (var))
7398 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (empty_var);
7399 VAR_LOC_1PAUX (empty_var) = NULL;
7401 else
7402 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
7406 if (!empty_var)
7408 empty_var = (variable) pool_alloc (onepart_pool (onepart));
7409 empty_var->dv = var->dv;
7410 empty_var->refcount = 1;
7411 empty_var->n_var_parts = 0;
7412 empty_var->onepart = onepart;
7413 if (dslot)
7415 empty_var->refcount++;
7416 *dslot = empty_var;
7419 else
7420 empty_var->refcount++;
7421 empty_var->in_changed_variables = true;
7422 *slot = empty_var;
7423 if (onepart)
7425 empty_var->var_part[0].loc_chain = NULL;
7426 empty_var->var_part[0].cur_loc = NULL;
7427 VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (var);
7428 VAR_LOC_1PAUX (var) = NULL;
7430 goto drop_var;
7432 else
7434 if (var->onepart && !VAR_LOC_1PAUX (var))
7435 recover_dropped_1paux (var);
7436 var->refcount++;
7437 var->in_changed_variables = true;
7438 *slot = var;
7441 else
7443 gcc_assert (set);
7444 if (var->n_var_parts == 0)
7446 variable_def **slot;
7448 drop_var:
7449 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
7450 if (slot)
7452 if (shared_hash_shared (set->vars))
7453 slot = shared_hash_find_slot_unshare (&set->vars, var->dv,
7454 NO_INSERT);
7455 shared_hash_htab (set->vars)->clear_slot (slot);
7461 /* Look for the index in VAR->var_part corresponding to OFFSET.
7462 Return -1 if not found. If INSERTION_POINT is non-NULL, the
7463 referenced int will be set to the index that the part has or should
7464 have, if it should be inserted. */
7466 static inline int
7467 find_variable_location_part (variable var, HOST_WIDE_INT offset,
7468 int *insertion_point)
7470 int pos, low, high;
7472 if (var->onepart)
7474 if (offset != 0)
7475 return -1;
7477 if (insertion_point)
7478 *insertion_point = 0;
7480 return var->n_var_parts - 1;
7483 /* Find the location part. */
7484 low = 0;
7485 high = var->n_var_parts;
7486 while (low != high)
7488 pos = (low + high) / 2;
7489 if (VAR_PART_OFFSET (var, pos) < offset)
7490 low = pos + 1;
7491 else
7492 high = pos;
7494 pos = low;
7496 if (insertion_point)
7497 *insertion_point = pos;
7499 if (pos < var->n_var_parts && VAR_PART_OFFSET (var, pos) == offset)
7500 return pos;
7502 return -1;
7505 static variable_def **
7506 set_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
7507 decl_or_value dv, HOST_WIDE_INT offset,
7508 enum var_init_status initialized, rtx set_src)
7510 int pos;
7511 location_chain node, next;
7512 location_chain *nextp;
7513 variable var;
7514 onepart_enum_t onepart;
7516 var = *slot;
7518 if (var)
7519 onepart = var->onepart;
7520 else
7521 onepart = dv_onepart_p (dv);
7523 gcc_checking_assert (offset == 0 || !onepart);
7524 gcc_checking_assert (loc != dv_as_opaque (dv));
7526 if (! flag_var_tracking_uninit)
7527 initialized = VAR_INIT_STATUS_INITIALIZED;
7529 if (!var)
7531 /* Create new variable information. */
7532 var = (variable) pool_alloc (onepart_pool (onepart));
7533 var->dv = dv;
7534 var->refcount = 1;
7535 var->n_var_parts = 1;
7536 var->onepart = onepart;
7537 var->in_changed_variables = false;
7538 if (var->onepart)
7539 VAR_LOC_1PAUX (var) = NULL;
7540 else
7541 VAR_PART_OFFSET (var, 0) = offset;
7542 var->var_part[0].loc_chain = NULL;
7543 var->var_part[0].cur_loc = NULL;
7544 *slot = var;
7545 pos = 0;
7546 nextp = &var->var_part[0].loc_chain;
7548 else if (onepart)
7550 int r = -1, c = 0;
7552 gcc_assert (dv_as_opaque (var->dv) == dv_as_opaque (dv));
7554 pos = 0;
7556 if (GET_CODE (loc) == VALUE)
7558 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7559 nextp = &node->next)
7560 if (GET_CODE (node->loc) == VALUE)
7562 if (node->loc == loc)
7564 r = 0;
7565 break;
7567 if (canon_value_cmp (node->loc, loc))
7568 c++;
7569 else
7571 r = 1;
7572 break;
7575 else if (REG_P (node->loc) || MEM_P (node->loc))
7576 c++;
7577 else
7579 r = 1;
7580 break;
7583 else if (REG_P (loc))
7585 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7586 nextp = &node->next)
7587 if (REG_P (node->loc))
7589 if (REGNO (node->loc) < REGNO (loc))
7590 c++;
7591 else
7593 if (REGNO (node->loc) == REGNO (loc))
7594 r = 0;
7595 else
7596 r = 1;
7597 break;
7600 else
7602 r = 1;
7603 break;
7606 else if (MEM_P (loc))
7608 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7609 nextp = &node->next)
7610 if (REG_P (node->loc))
7611 c++;
7612 else if (MEM_P (node->loc))
7614 if ((r = loc_cmp (XEXP (node->loc, 0), XEXP (loc, 0))) >= 0)
7615 break;
7616 else
7617 c++;
7619 else
7621 r = 1;
7622 break;
7625 else
7626 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7627 nextp = &node->next)
7628 if ((r = loc_cmp (node->loc, loc)) >= 0)
7629 break;
7630 else
7631 c++;
7633 if (r == 0)
7634 return slot;
7636 if (shared_var_p (var, set->vars))
7638 slot = unshare_variable (set, slot, var, initialized);
7639 var = *slot;
7640 for (nextp = &var->var_part[0].loc_chain; c;
7641 nextp = &(*nextp)->next)
7642 c--;
7643 gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc);
7646 else
7648 int inspos = 0;
7650 gcc_assert (dv_as_decl (var->dv) == dv_as_decl (dv));
7652 pos = find_variable_location_part (var, offset, &inspos);
7654 if (pos >= 0)
7656 node = var->var_part[pos].loc_chain;
7658 if (node
7659 && ((REG_P (node->loc) && REG_P (loc)
7660 && REGNO (node->loc) == REGNO (loc))
7661 || rtx_equal_p (node->loc, loc)))
7663 /* LOC is in the beginning of the chain so we have nothing
7664 to do. */
7665 if (node->init < initialized)
7666 node->init = initialized;
7667 if (set_src != NULL)
7668 node->set_src = set_src;
7670 return slot;
7672 else
7674 /* We have to make a copy of a shared variable. */
7675 if (shared_var_p (var, set->vars))
7677 slot = unshare_variable (set, slot, var, initialized);
7678 var = *slot;
7682 else
7684 /* We have not found the location part, new one will be created. */
7686 /* We have to make a copy of the shared variable. */
7687 if (shared_var_p (var, set->vars))
7689 slot = unshare_variable (set, slot, var, initialized);
7690 var = *slot;
7693 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
7694 thus there are at most MAX_VAR_PARTS different offsets. */
7695 gcc_assert (var->n_var_parts < MAX_VAR_PARTS
7696 && (!var->n_var_parts || !onepart));
7698 /* We have to move the elements of array starting at index
7699 inspos to the next position. */
7700 for (pos = var->n_var_parts; pos > inspos; pos--)
7701 var->var_part[pos] = var->var_part[pos - 1];
7703 var->n_var_parts++;
7704 gcc_checking_assert (!onepart);
7705 VAR_PART_OFFSET (var, pos) = offset;
7706 var->var_part[pos].loc_chain = NULL;
7707 var->var_part[pos].cur_loc = NULL;
7710 /* Delete the location from the list. */
7711 nextp = &var->var_part[pos].loc_chain;
7712 for (node = var->var_part[pos].loc_chain; node; node = next)
7714 next = node->next;
7715 if ((REG_P (node->loc) && REG_P (loc)
7716 && REGNO (node->loc) == REGNO (loc))
7717 || rtx_equal_p (node->loc, loc))
7719 /* Save these values, to assign to the new node, before
7720 deleting this one. */
7721 if (node->init > initialized)
7722 initialized = node->init;
7723 if (node->set_src != NULL && set_src == NULL)
7724 set_src = node->set_src;
7725 if (var->var_part[pos].cur_loc == node->loc)
7726 var->var_part[pos].cur_loc = NULL;
7727 pool_free (loc_chain_pool, node);
7728 *nextp = next;
7729 break;
7731 else
7732 nextp = &node->next;
7735 nextp = &var->var_part[pos].loc_chain;
7738 /* Add the location to the beginning. */
7739 node = (location_chain) pool_alloc (loc_chain_pool);
7740 node->loc = loc;
7741 node->init = initialized;
7742 node->set_src = set_src;
7743 node->next = *nextp;
7744 *nextp = node;
7746 /* If no location was emitted do so. */
7747 if (var->var_part[pos].cur_loc == NULL)
7748 variable_was_changed (var, set);
7750 return slot;
7753 /* Set the part of variable's location in the dataflow set SET. The
7754 variable part is specified by variable's declaration in DV and
7755 offset OFFSET and the part's location by LOC. IOPT should be
7756 NO_INSERT if the variable is known to be in SET already and the
7757 variable hash table must not be resized, and INSERT otherwise. */
7759 static void
7760 set_variable_part (dataflow_set *set, rtx loc,
7761 decl_or_value dv, HOST_WIDE_INT offset,
7762 enum var_init_status initialized, rtx set_src,
7763 enum insert_option iopt)
7765 variable_def **slot;
7767 if (iopt == NO_INSERT)
7768 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7769 else
7771 slot = shared_hash_find_slot (set->vars, dv);
7772 if (!slot)
7773 slot = shared_hash_find_slot_unshare (&set->vars, dv, iopt);
7775 set_slot_part (set, loc, slot, dv, offset, initialized, set_src);
7778 /* Remove all recorded register locations for the given variable part
7779 from dataflow set SET, except for those that are identical to loc.
7780 The variable part is specified by variable's declaration or value
7781 DV and offset OFFSET. */
7783 static variable_def **
7784 clobber_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
7785 HOST_WIDE_INT offset, rtx set_src)
7787 variable var = *slot;
7788 int pos = find_variable_location_part (var, offset, NULL);
7790 if (pos >= 0)
7792 location_chain node, next;
7794 /* Remove the register locations from the dataflow set. */
7795 next = var->var_part[pos].loc_chain;
7796 for (node = next; node; node = next)
7798 next = node->next;
7799 if (node->loc != loc
7800 && (!flag_var_tracking_uninit
7801 || !set_src
7802 || MEM_P (set_src)
7803 || !rtx_equal_p (set_src, node->set_src)))
7805 if (REG_P (node->loc))
7807 attrs anode, anext;
7808 attrs *anextp;
7810 /* Remove the variable part from the register's
7811 list, but preserve any other variable parts
7812 that might be regarded as live in that same
7813 register. */
7814 anextp = &set->regs[REGNO (node->loc)];
7815 for (anode = *anextp; anode; anode = anext)
7817 anext = anode->next;
7818 if (dv_as_opaque (anode->dv) == dv_as_opaque (var->dv)
7819 && anode->offset == offset)
7821 pool_free (attrs_pool, anode);
7822 *anextp = anext;
7824 else
7825 anextp = &anode->next;
7829 slot = delete_slot_part (set, node->loc, slot, offset);
7834 return slot;
7837 /* Remove all recorded register locations for the given variable part
7838 from dataflow set SET, except for those that are identical to loc.
7839 The variable part is specified by variable's declaration or value
7840 DV and offset OFFSET. */
7842 static void
7843 clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
7844 HOST_WIDE_INT offset, rtx set_src)
7846 variable_def **slot;
7848 if (!dv_as_opaque (dv)
7849 || (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv))))
7850 return;
7852 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7853 if (!slot)
7854 return;
7856 clobber_slot_part (set, loc, slot, offset, set_src);
7859 /* Delete the part of variable's location from dataflow set SET. The
7860 variable part is specified by its SET->vars slot SLOT and offset
7861 OFFSET and the part's location by LOC. */
7863 static variable_def **
7864 delete_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
7865 HOST_WIDE_INT offset)
7867 variable var = *slot;
7868 int pos = find_variable_location_part (var, offset, NULL);
7870 if (pos >= 0)
7872 location_chain node, next;
7873 location_chain *nextp;
7874 bool changed;
7875 rtx cur_loc;
7877 if (shared_var_p (var, set->vars))
7879 /* If the variable contains the location part we have to
7880 make a copy of the variable. */
7881 for (node = var->var_part[pos].loc_chain; node;
7882 node = node->next)
7884 if ((REG_P (node->loc) && REG_P (loc)
7885 && REGNO (node->loc) == REGNO (loc))
7886 || rtx_equal_p (node->loc, loc))
7888 slot = unshare_variable (set, slot, var,
7889 VAR_INIT_STATUS_UNKNOWN);
7890 var = *slot;
7891 break;
7896 if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
7897 cur_loc = VAR_LOC_FROM (var);
7898 else
7899 cur_loc = var->var_part[pos].cur_loc;
7901 /* Delete the location part. */
7902 changed = false;
7903 nextp = &var->var_part[pos].loc_chain;
7904 for (node = *nextp; node; node = next)
7906 next = node->next;
7907 if ((REG_P (node->loc) && REG_P (loc)
7908 && REGNO (node->loc) == REGNO (loc))
7909 || rtx_equal_p (node->loc, loc))
7911 /* If we have deleted the location which was last emitted
7912 we have to emit new location so add the variable to set
7913 of changed variables. */
7914 if (cur_loc == node->loc)
7916 changed = true;
7917 var->var_part[pos].cur_loc = NULL;
7918 if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
7919 VAR_LOC_FROM (var) = NULL;
7921 pool_free (loc_chain_pool, node);
7922 *nextp = next;
7923 break;
7925 else
7926 nextp = &node->next;
7929 if (var->var_part[pos].loc_chain == NULL)
7931 changed = true;
7932 var->n_var_parts--;
7933 while (pos < var->n_var_parts)
7935 var->var_part[pos] = var->var_part[pos + 1];
7936 pos++;
7939 if (changed)
7940 variable_was_changed (var, set);
7943 return slot;
7946 /* Delete the part of variable's location from dataflow set SET. The
7947 variable part is specified by variable's declaration or value DV
7948 and offset OFFSET and the part's location by LOC. */
7950 static void
7951 delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
7952 HOST_WIDE_INT offset)
7954 variable_def **slot = shared_hash_find_slot_noinsert (set->vars, dv);
7955 if (!slot)
7956 return;
7958 delete_slot_part (set, loc, slot, offset);
7962 /* Structure for passing some other parameters to function
7963 vt_expand_loc_callback. */
7964 struct expand_loc_callback_data
7966 /* The variables and values active at this point. */
7967 variable_table_type *vars;
7969 /* Stack of values and debug_exprs under expansion, and their
7970 children. */
7971 auto_vec<rtx, 4> expanding;
7973 /* Stack of values and debug_exprs whose expansion hit recursion
7974 cycles. They will have VALUE_RECURSED_INTO marked when added to
7975 this list. This flag will be cleared if any of its dependencies
7976 resolves to a valid location. So, if the flag remains set at the
7977 end of the search, we know no valid location for this one can
7978 possibly exist. */
7979 auto_vec<rtx, 4> pending;
7981 /* The maximum depth among the sub-expressions under expansion.
7982 Zero indicates no expansion so far. */
7983 expand_depth depth;
7986 /* Allocate the one-part auxiliary data structure for VAR, with enough
7987 room for COUNT dependencies. */
7989 static void
7990 loc_exp_dep_alloc (variable var, int count)
7992 size_t allocsize;
7994 gcc_checking_assert (var->onepart);
7996 /* We can be called with COUNT == 0 to allocate the data structure
7997 without any dependencies, e.g. for the backlinks only. However,
7998 if we are specifying a COUNT, then the dependency list must have
7999 been emptied before. It would be possible to adjust pointers or
8000 force it empty here, but this is better done at an earlier point
8001 in the algorithm, so we instead leave an assertion to catch
8002 errors. */
8003 gcc_checking_assert (!count
8004 || VAR_LOC_DEP_VEC (var) == NULL
8005 || VAR_LOC_DEP_VEC (var)->is_empty ());
8007 if (VAR_LOC_1PAUX (var) && VAR_LOC_DEP_VEC (var)->space (count))
8008 return;
8010 allocsize = offsetof (struct onepart_aux, deps)
8011 + vec<loc_exp_dep, va_heap, vl_embed>::embedded_size (count);
8013 if (VAR_LOC_1PAUX (var))
8015 VAR_LOC_1PAUX (var) = XRESIZEVAR (struct onepart_aux,
8016 VAR_LOC_1PAUX (var), allocsize);
8017 /* If the reallocation moves the onepaux structure, the
8018 back-pointer to BACKLINKS in the first list member will still
8019 point to its old location. Adjust it. */
8020 if (VAR_LOC_DEP_LST (var))
8021 VAR_LOC_DEP_LST (var)->pprev = VAR_LOC_DEP_LSTP (var);
8023 else
8025 VAR_LOC_1PAUX (var) = XNEWVAR (struct onepart_aux, allocsize);
8026 *VAR_LOC_DEP_LSTP (var) = NULL;
8027 VAR_LOC_FROM (var) = NULL;
8028 VAR_LOC_DEPTH (var).complexity = 0;
8029 VAR_LOC_DEPTH (var).entryvals = 0;
8031 VAR_LOC_DEP_VEC (var)->embedded_init (count);
8034 /* Remove all entries from the vector of active dependencies of VAR,
8035 removing them from the back-links lists too. */
8037 static void
8038 loc_exp_dep_clear (variable var)
8040 while (VAR_LOC_DEP_VEC (var) && !VAR_LOC_DEP_VEC (var)->is_empty ())
8042 loc_exp_dep *led = &VAR_LOC_DEP_VEC (var)->last ();
8043 if (led->next)
8044 led->next->pprev = led->pprev;
8045 if (led->pprev)
8046 *led->pprev = led->next;
8047 VAR_LOC_DEP_VEC (var)->pop ();
8051 /* Insert an active dependency from VAR on X to the vector of
8052 dependencies, and add the corresponding back-link to X's list of
8053 back-links in VARS. */
8055 static void
8056 loc_exp_insert_dep (variable var, rtx x, variable_table_type *vars)
8058 decl_or_value dv;
8059 variable xvar;
8060 loc_exp_dep *led;
8062 dv = dv_from_rtx (x);
8064 /* ??? Build a vector of variables parallel to EXPANDING, to avoid
8065 an additional look up? */
8066 xvar = vars->find_with_hash (dv, dv_htab_hash (dv));
8068 if (!xvar)
8070 xvar = variable_from_dropped (dv, NO_INSERT);
8071 gcc_checking_assert (xvar);
8074 /* No point in adding the same backlink more than once. This may
8075 arise if say the same value appears in two complex expressions in
8076 the same loc_list, or even more than once in a single
8077 expression. */
8078 if (VAR_LOC_DEP_LST (xvar) && VAR_LOC_DEP_LST (xvar)->dv == var->dv)
8079 return;
8081 if (var->onepart == NOT_ONEPART)
8082 led = (loc_exp_dep *) pool_alloc (loc_exp_dep_pool);
8083 else
8085 loc_exp_dep empty;
8086 memset (&empty, 0, sizeof (empty));
8087 VAR_LOC_DEP_VEC (var)->quick_push (empty);
8088 led = &VAR_LOC_DEP_VEC (var)->last ();
8090 led->dv = var->dv;
8091 led->value = x;
8093 loc_exp_dep_alloc (xvar, 0);
8094 led->pprev = VAR_LOC_DEP_LSTP (xvar);
8095 led->next = *led->pprev;
8096 if (led->next)
8097 led->next->pprev = &led->next;
8098 *led->pprev = led;
8101 /* Create active dependencies of VAR on COUNT values starting at
8102 VALUE, and corresponding back-links to the entries in VARS. Return
8103 true if we found any pending-recursion results. */
8105 static bool
8106 loc_exp_dep_set (variable var, rtx result, rtx *value, int count,
8107 variable_table_type *vars)
8109 bool pending_recursion = false;
8111 gcc_checking_assert (VAR_LOC_DEP_VEC (var) == NULL
8112 || VAR_LOC_DEP_VEC (var)->is_empty ());
8114 /* Set up all dependencies from last_child (as set up at the end of
8115 the loop above) to the end. */
8116 loc_exp_dep_alloc (var, count);
8118 while (count--)
8120 rtx x = *value++;
8122 if (!pending_recursion)
8123 pending_recursion = !result && VALUE_RECURSED_INTO (x);
8125 loc_exp_insert_dep (var, x, vars);
8128 return pending_recursion;
8131 /* Notify the back-links of IVAR that are pending recursion that we
8132 have found a non-NIL value for it, so they are cleared for another
8133 attempt to compute a current location. */
8135 static void
8136 notify_dependents_of_resolved_value (variable ivar, variable_table_type *vars)
8138 loc_exp_dep *led, *next;
8140 for (led = VAR_LOC_DEP_LST (ivar); led; led = next)
8142 decl_or_value dv = led->dv;
8143 variable var;
8145 next = led->next;
8147 if (dv_is_value_p (dv))
8149 rtx value = dv_as_value (dv);
8151 /* If we have already resolved it, leave it alone. */
8152 if (!VALUE_RECURSED_INTO (value))
8153 continue;
8155 /* Check that VALUE_RECURSED_INTO, true from the test above,
8156 implies NO_LOC_P. */
8157 gcc_checking_assert (NO_LOC_P (value));
8159 /* We won't notify variables that are being expanded,
8160 because their dependency list is cleared before
8161 recursing. */
8162 NO_LOC_P (value) = false;
8163 VALUE_RECURSED_INTO (value) = false;
8165 gcc_checking_assert (dv_changed_p (dv));
8167 else
8169 gcc_checking_assert (dv_onepart_p (dv) != NOT_ONEPART);
8170 if (!dv_changed_p (dv))
8171 continue;
8174 var = vars->find_with_hash (dv, dv_htab_hash (dv));
8176 if (!var)
8177 var = variable_from_dropped (dv, NO_INSERT);
8179 if (var)
8180 notify_dependents_of_resolved_value (var, vars);
8182 if (next)
8183 next->pprev = led->pprev;
8184 if (led->pprev)
8185 *led->pprev = next;
8186 led->next = NULL;
8187 led->pprev = NULL;
8191 static rtx vt_expand_loc_callback (rtx x, bitmap regs,
8192 int max_depth, void *data);
8194 /* Return the combined depth, when one sub-expression evaluated to
8195 BEST_DEPTH and the previous known depth was SAVED_DEPTH. */
8197 static inline expand_depth
8198 update_depth (expand_depth saved_depth, expand_depth best_depth)
8200 /* If we didn't find anything, stick with what we had. */
8201 if (!best_depth.complexity)
8202 return saved_depth;
8204 /* If we found hadn't found anything, use the depth of the current
8205 expression. Do NOT add one extra level, we want to compute the
8206 maximum depth among sub-expressions. We'll increment it later,
8207 if appropriate. */
8208 if (!saved_depth.complexity)
8209 return best_depth;
8211 /* Combine the entryval count so that regardless of which one we
8212 return, the entryval count is accurate. */
8213 best_depth.entryvals = saved_depth.entryvals
8214 = best_depth.entryvals + saved_depth.entryvals;
8216 if (saved_depth.complexity < best_depth.complexity)
8217 return best_depth;
8218 else
8219 return saved_depth;
8222 /* Expand VAR to a location RTX, updating its cur_loc. Use REGS and
8223 DATA for cselib expand callback. If PENDRECP is given, indicate in
8224 it whether any sub-expression couldn't be fully evaluated because
8225 it is pending recursion resolution. */
8227 static inline rtx
8228 vt_expand_var_loc_chain (variable var, bitmap regs, void *data, bool *pendrecp)
8230 struct expand_loc_callback_data *elcd
8231 = (struct expand_loc_callback_data *) data;
8232 location_chain loc, next;
8233 rtx result = NULL;
8234 int first_child, result_first_child, last_child;
8235 bool pending_recursion;
8236 rtx loc_from = NULL;
8237 struct elt_loc_list *cloc = NULL;
8238 expand_depth depth = { 0, 0 }, saved_depth = elcd->depth;
8239 int wanted_entryvals, found_entryvals = 0;
8241 /* Clear all backlinks pointing at this, so that we're not notified
8242 while we're active. */
8243 loc_exp_dep_clear (var);
8245 retry:
8246 if (var->onepart == ONEPART_VALUE)
8248 cselib_val *val = CSELIB_VAL_PTR (dv_as_value (var->dv));
8250 gcc_checking_assert (cselib_preserved_value_p (val));
8252 cloc = val->locs;
8255 first_child = result_first_child = last_child
8256 = elcd->expanding.length ();
8258 wanted_entryvals = found_entryvals;
8260 /* Attempt to expand each available location in turn. */
8261 for (next = loc = var->n_var_parts ? var->var_part[0].loc_chain : NULL;
8262 loc || cloc; loc = next)
8264 result_first_child = last_child;
8266 if (!loc)
8268 loc_from = cloc->loc;
8269 next = loc;
8270 cloc = cloc->next;
8271 if (unsuitable_loc (loc_from))
8272 continue;
8274 else
8276 loc_from = loc->loc;
8277 next = loc->next;
8280 gcc_checking_assert (!unsuitable_loc (loc_from));
8282 elcd->depth.complexity = elcd->depth.entryvals = 0;
8283 result = cselib_expand_value_rtx_cb (loc_from, regs, EXPR_DEPTH,
8284 vt_expand_loc_callback, data);
8285 last_child = elcd->expanding.length ();
8287 if (result)
8289 depth = elcd->depth;
8291 gcc_checking_assert (depth.complexity
8292 || result_first_child == last_child);
8294 if (last_child - result_first_child != 1)
8296 if (!depth.complexity && GET_CODE (result) == ENTRY_VALUE)
8297 depth.entryvals++;
8298 depth.complexity++;
8301 if (depth.complexity <= EXPR_USE_DEPTH)
8303 if (depth.entryvals <= wanted_entryvals)
8304 break;
8305 else if (!found_entryvals || depth.entryvals < found_entryvals)
8306 found_entryvals = depth.entryvals;
8309 result = NULL;
8312 /* Set it up in case we leave the loop. */
8313 depth.complexity = depth.entryvals = 0;
8314 loc_from = NULL;
8315 result_first_child = first_child;
8318 if (!loc_from && wanted_entryvals < found_entryvals)
8320 /* We found entries with ENTRY_VALUEs and skipped them. Since
8321 we could not find any expansions without ENTRY_VALUEs, but we
8322 found at least one with them, go back and get an entry with
8323 the minimum number ENTRY_VALUE count that we found. We could
8324 avoid looping, but since each sub-loc is already resolved,
8325 the re-expansion should be trivial. ??? Should we record all
8326 attempted locs as dependencies, so that we retry the
8327 expansion should any of them change, in the hope it can give
8328 us a new entry without an ENTRY_VALUE? */
8329 elcd->expanding.truncate (first_child);
8330 goto retry;
8333 /* Register all encountered dependencies as active. */
8334 pending_recursion = loc_exp_dep_set
8335 (var, result, elcd->expanding.address () + result_first_child,
8336 last_child - result_first_child, elcd->vars);
8338 elcd->expanding.truncate (first_child);
8340 /* Record where the expansion came from. */
8341 gcc_checking_assert (!result || !pending_recursion);
8342 VAR_LOC_FROM (var) = loc_from;
8343 VAR_LOC_DEPTH (var) = depth;
8345 gcc_checking_assert (!depth.complexity == !result);
8347 elcd->depth = update_depth (saved_depth, depth);
8349 /* Indicate whether any of the dependencies are pending recursion
8350 resolution. */
8351 if (pendrecp)
8352 *pendrecp = pending_recursion;
8354 if (!pendrecp || !pending_recursion)
8355 var->var_part[0].cur_loc = result;
8357 return result;
8360 /* Callback for cselib_expand_value, that looks for expressions
8361 holding the value in the var-tracking hash tables. Return X for
8362 standard processing, anything else is to be used as-is. */
8364 static rtx
8365 vt_expand_loc_callback (rtx x, bitmap regs,
8366 int max_depth ATTRIBUTE_UNUSED,
8367 void *data)
8369 struct expand_loc_callback_data *elcd
8370 = (struct expand_loc_callback_data *) data;
8371 decl_or_value dv;
8372 variable var;
8373 rtx result, subreg;
8374 bool pending_recursion = false;
8375 bool from_empty = false;
8377 switch (GET_CODE (x))
8379 case SUBREG:
8380 subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs,
8381 EXPR_DEPTH,
8382 vt_expand_loc_callback, data);
8384 if (!subreg)
8385 return NULL;
8387 result = simplify_gen_subreg (GET_MODE (x), subreg,
8388 GET_MODE (SUBREG_REG (x)),
8389 SUBREG_BYTE (x));
8391 /* Invalid SUBREGs are ok in debug info. ??? We could try
8392 alternate expansions for the VALUE as well. */
8393 if (!result)
8394 result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x));
8396 return result;
8398 case DEBUG_EXPR:
8399 case VALUE:
8400 dv = dv_from_rtx (x);
8401 break;
8403 default:
8404 return x;
8407 elcd->expanding.safe_push (x);
8409 /* Check that VALUE_RECURSED_INTO implies NO_LOC_P. */
8410 gcc_checking_assert (!VALUE_RECURSED_INTO (x) || NO_LOC_P (x));
8412 if (NO_LOC_P (x))
8414 gcc_checking_assert (VALUE_RECURSED_INTO (x) || !dv_changed_p (dv));
8415 return NULL;
8418 var = elcd->vars->find_with_hash (dv, dv_htab_hash (dv));
8420 if (!var)
8422 from_empty = true;
8423 var = variable_from_dropped (dv, INSERT);
8426 gcc_checking_assert (var);
8428 if (!dv_changed_p (dv))
8430 gcc_checking_assert (!NO_LOC_P (x));
8431 gcc_checking_assert (var->var_part[0].cur_loc);
8432 gcc_checking_assert (VAR_LOC_1PAUX (var));
8433 gcc_checking_assert (VAR_LOC_1PAUX (var)->depth.complexity);
8435 elcd->depth = update_depth (elcd->depth, VAR_LOC_1PAUX (var)->depth);
8437 return var->var_part[0].cur_loc;
8440 VALUE_RECURSED_INTO (x) = true;
8441 /* This is tentative, but it makes some tests simpler. */
8442 NO_LOC_P (x) = true;
8444 gcc_checking_assert (var->n_var_parts == 1 || from_empty);
8446 result = vt_expand_var_loc_chain (var, regs, data, &pending_recursion);
8448 if (pending_recursion)
8450 gcc_checking_assert (!result);
8451 elcd->pending.safe_push (x);
8453 else
8455 NO_LOC_P (x) = !result;
8456 VALUE_RECURSED_INTO (x) = false;
8457 set_dv_changed (dv, false);
8459 if (result)
8460 notify_dependents_of_resolved_value (var, elcd->vars);
8463 return result;
8466 /* While expanding variables, we may encounter recursion cycles
8467 because of mutual (possibly indirect) dependencies between two
8468 particular variables (or values), say A and B. If we're trying to
8469 expand A when we get to B, which in turn attempts to expand A, if
8470 we can't find any other expansion for B, we'll add B to this
8471 pending-recursion stack, and tentatively return NULL for its
8472 location. This tentative value will be used for any other
8473 occurrences of B, unless A gets some other location, in which case
8474 it will notify B that it is worth another try at computing a
8475 location for it, and it will use the location computed for A then.
8476 At the end of the expansion, the tentative NULL locations become
8477 final for all members of PENDING that didn't get a notification.
8478 This function performs this finalization of NULL locations. */
8480 static void
8481 resolve_expansions_pending_recursion (vec<rtx, va_heap> *pending)
8483 while (!pending->is_empty ())
8485 rtx x = pending->pop ();
8486 decl_or_value dv;
8488 if (!VALUE_RECURSED_INTO (x))
8489 continue;
8491 gcc_checking_assert (NO_LOC_P (x));
8492 VALUE_RECURSED_INTO (x) = false;
8493 dv = dv_from_rtx (x);
8494 gcc_checking_assert (dv_changed_p (dv));
8495 set_dv_changed (dv, false);
8499 /* Initialize expand_loc_callback_data D with variable hash table V.
8500 It must be a macro because of alloca (vec stack). */
8501 #define INIT_ELCD(d, v) \
8502 do \
8504 (d).vars = (v); \
8505 (d).depth.complexity = (d).depth.entryvals = 0; \
8507 while (0)
8508 /* Finalize expand_loc_callback_data D, resolved to location L. */
8509 #define FINI_ELCD(d, l) \
8510 do \
8512 resolve_expansions_pending_recursion (&(d).pending); \
8513 (d).pending.release (); \
8514 (d).expanding.release (); \
8516 if ((l) && MEM_P (l)) \
8517 (l) = targetm.delegitimize_address (l); \
8519 while (0)
8521 /* Expand VALUEs and DEBUG_EXPRs in LOC to a location, using the
8522 equivalences in VARS, updating their CUR_LOCs in the process. */
8524 static rtx
8525 vt_expand_loc (rtx loc, variable_table_type *vars)
8527 struct expand_loc_callback_data data;
8528 rtx result;
8530 if (!MAY_HAVE_DEBUG_INSNS)
8531 return loc;
8533 INIT_ELCD (data, vars);
8535 result = cselib_expand_value_rtx_cb (loc, scratch_regs, EXPR_DEPTH,
8536 vt_expand_loc_callback, &data);
8538 FINI_ELCD (data, result);
8540 return result;
8543 /* Expand the one-part VARiable to a location, using the equivalences
8544 in VARS, updating their CUR_LOCs in the process. */
8546 static rtx
8547 vt_expand_1pvar (variable var, variable_table_type *vars)
8549 struct expand_loc_callback_data data;
8550 rtx loc;
8552 gcc_checking_assert (var->onepart && var->n_var_parts == 1);
8554 if (!dv_changed_p (var->dv))
8555 return var->var_part[0].cur_loc;
8557 INIT_ELCD (data, vars);
8559 loc = vt_expand_var_loc_chain (var, scratch_regs, &data, NULL);
8561 gcc_checking_assert (data.expanding.is_empty ());
8563 FINI_ELCD (data, loc);
8565 return loc;
8568 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
8569 additional parameters: WHERE specifies whether the note shall be emitted
8570 before or after instruction INSN. */
8573 emit_note_insn_var_location (variable_def **varp, emit_note_data *data)
8575 variable var = *varp;
8576 rtx_insn *insn = data->insn;
8577 enum emit_note_where where = data->where;
8578 variable_table_type *vars = data->vars;
8579 rtx_note *note;
8580 rtx note_vl;
8581 int i, j, n_var_parts;
8582 bool complete;
8583 enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED;
8584 HOST_WIDE_INT last_limit;
8585 tree type_size_unit;
8586 HOST_WIDE_INT offsets[MAX_VAR_PARTS];
8587 rtx loc[MAX_VAR_PARTS];
8588 tree decl;
8589 location_chain lc;
8591 gcc_checking_assert (var->onepart == NOT_ONEPART
8592 || var->onepart == ONEPART_VDECL);
8594 decl = dv_as_decl (var->dv);
8596 complete = true;
8597 last_limit = 0;
8598 n_var_parts = 0;
8599 if (!var->onepart)
8600 for (i = 0; i < var->n_var_parts; i++)
8601 if (var->var_part[i].cur_loc == NULL && var->var_part[i].loc_chain)
8602 var->var_part[i].cur_loc = var->var_part[i].loc_chain->loc;
8603 for (i = 0; i < var->n_var_parts; i++)
8605 enum machine_mode mode, wider_mode;
8606 rtx loc2;
8607 HOST_WIDE_INT offset;
8609 if (i == 0 && var->onepart)
8611 gcc_checking_assert (var->n_var_parts == 1);
8612 offset = 0;
8613 initialized = VAR_INIT_STATUS_INITIALIZED;
8614 loc2 = vt_expand_1pvar (var, vars);
8616 else
8618 if (last_limit < VAR_PART_OFFSET (var, i))
8620 complete = false;
8621 break;
8623 else if (last_limit > VAR_PART_OFFSET (var, i))
8624 continue;
8625 offset = VAR_PART_OFFSET (var, i);
8626 loc2 = var->var_part[i].cur_loc;
8627 if (loc2 && GET_CODE (loc2) == MEM
8628 && GET_CODE (XEXP (loc2, 0)) == VALUE)
8630 rtx depval = XEXP (loc2, 0);
8632 loc2 = vt_expand_loc (loc2, vars);
8634 if (loc2)
8635 loc_exp_insert_dep (var, depval, vars);
8637 if (!loc2)
8639 complete = false;
8640 continue;
8642 gcc_checking_assert (GET_CODE (loc2) != VALUE);
8643 for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
8644 if (var->var_part[i].cur_loc == lc->loc)
8646 initialized = lc->init;
8647 break;
8649 gcc_assert (lc);
8652 offsets[n_var_parts] = offset;
8653 if (!loc2)
8655 complete = false;
8656 continue;
8658 loc[n_var_parts] = loc2;
8659 mode = GET_MODE (var->var_part[i].cur_loc);
8660 if (mode == VOIDmode && var->onepart)
8661 mode = DECL_MODE (decl);
8662 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
8664 /* Attempt to merge adjacent registers or memory. */
8665 wider_mode = GET_MODE_WIDER_MODE (mode);
8666 for (j = i + 1; j < var->n_var_parts; j++)
8667 if (last_limit <= VAR_PART_OFFSET (var, j))
8668 break;
8669 if (j < var->n_var_parts
8670 && wider_mode != VOIDmode
8671 && var->var_part[j].cur_loc
8672 && mode == GET_MODE (var->var_part[j].cur_loc)
8673 && (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts]))
8674 && last_limit == (var->onepart ? 0 : VAR_PART_OFFSET (var, j))
8675 && (loc2 = vt_expand_loc (var->var_part[j].cur_loc, vars))
8676 && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2))
8678 rtx new_loc = NULL;
8680 if (REG_P (loc[n_var_parts])
8681 && hard_regno_nregs[REGNO (loc[n_var_parts])][mode] * 2
8682 == hard_regno_nregs[REGNO (loc[n_var_parts])][wider_mode]
8683 && end_hard_regno (mode, REGNO (loc[n_var_parts]))
8684 == REGNO (loc2))
8686 if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN)
8687 new_loc = simplify_subreg (wider_mode, loc[n_var_parts],
8688 mode, 0);
8689 else if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
8690 new_loc = simplify_subreg (wider_mode, loc2, mode, 0);
8691 if (new_loc)
8693 if (!REG_P (new_loc)
8694 || REGNO (new_loc) != REGNO (loc[n_var_parts]))
8695 new_loc = NULL;
8696 else
8697 REG_ATTRS (new_loc) = REG_ATTRS (loc[n_var_parts]);
8700 else if (MEM_P (loc[n_var_parts])
8701 && GET_CODE (XEXP (loc2, 0)) == PLUS
8702 && REG_P (XEXP (XEXP (loc2, 0), 0))
8703 && CONST_INT_P (XEXP (XEXP (loc2, 0), 1)))
8705 if ((REG_P (XEXP (loc[n_var_parts], 0))
8706 && rtx_equal_p (XEXP (loc[n_var_parts], 0),
8707 XEXP (XEXP (loc2, 0), 0))
8708 && INTVAL (XEXP (XEXP (loc2, 0), 1))
8709 == GET_MODE_SIZE (mode))
8710 || (GET_CODE (XEXP (loc[n_var_parts], 0)) == PLUS
8711 && CONST_INT_P (XEXP (XEXP (loc[n_var_parts], 0), 1))
8712 && rtx_equal_p (XEXP (XEXP (loc[n_var_parts], 0), 0),
8713 XEXP (XEXP (loc2, 0), 0))
8714 && INTVAL (XEXP (XEXP (loc[n_var_parts], 0), 1))
8715 + GET_MODE_SIZE (mode)
8716 == INTVAL (XEXP (XEXP (loc2, 0), 1))))
8717 new_loc = adjust_address_nv (loc[n_var_parts],
8718 wider_mode, 0);
8721 if (new_loc)
8723 loc[n_var_parts] = new_loc;
8724 mode = wider_mode;
8725 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
8726 i = j;
8729 ++n_var_parts;
8731 type_size_unit = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8732 if ((unsigned HOST_WIDE_INT) last_limit < TREE_INT_CST_LOW (type_size_unit))
8733 complete = false;
8735 if (! flag_var_tracking_uninit)
8736 initialized = VAR_INIT_STATUS_INITIALIZED;
8738 note_vl = NULL_RTX;
8739 if (!complete)
8740 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, NULL_RTX, initialized);
8741 else if (n_var_parts == 1)
8743 rtx expr_list;
8745 if (offsets[0] || GET_CODE (loc[0]) == PARALLEL)
8746 expr_list = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
8747 else
8748 expr_list = loc[0];
8750 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, expr_list, initialized);
8752 else if (n_var_parts)
8754 rtx parallel;
8756 for (i = 0; i < n_var_parts; i++)
8757 loc[i]
8758 = gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i]));
8760 parallel = gen_rtx_PARALLEL (VOIDmode,
8761 gen_rtvec_v (n_var_parts, loc));
8762 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl,
8763 parallel, initialized);
8766 if (where != EMIT_NOTE_BEFORE_INSN)
8768 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8769 if (where == EMIT_NOTE_AFTER_CALL_INSN)
8770 NOTE_DURING_CALL_P (note) = true;
8772 else
8774 /* Make sure that the call related notes come first. */
8775 while (NEXT_INSN (insn)
8776 && NOTE_P (insn)
8777 && ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8778 && NOTE_DURING_CALL_P (insn))
8779 || NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
8780 insn = NEXT_INSN (insn);
8781 if (NOTE_P (insn)
8782 && ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8783 && NOTE_DURING_CALL_P (insn))
8784 || NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
8785 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8786 else
8787 note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
8789 NOTE_VAR_LOCATION (note) = note_vl;
8791 set_dv_changed (var->dv, false);
8792 gcc_assert (var->in_changed_variables);
8793 var->in_changed_variables = false;
8794 changed_variables->clear_slot (varp);
8796 /* Continue traversing the hash table. */
8797 return 1;
8800 /* While traversing changed_variables, push onto DATA (a stack of RTX
8801 values) entries that aren't user variables. */
8804 var_track_values_to_stack (variable_def **slot,
8805 vec<rtx, va_heap> *changed_values_stack)
8807 variable var = *slot;
8809 if (var->onepart == ONEPART_VALUE)
8810 changed_values_stack->safe_push (dv_as_value (var->dv));
8811 else if (var->onepart == ONEPART_DEXPR)
8812 changed_values_stack->safe_push (DECL_RTL_KNOWN_SET (dv_as_decl (var->dv)));
8814 return 1;
8817 /* Remove from changed_variables the entry whose DV corresponds to
8818 value or debug_expr VAL. */
8819 static void
8820 remove_value_from_changed_variables (rtx val)
8822 decl_or_value dv = dv_from_rtx (val);
8823 variable_def **slot;
8824 variable var;
8826 slot = changed_variables->find_slot_with_hash (dv, dv_htab_hash (dv),
8827 NO_INSERT);
8828 var = *slot;
8829 var->in_changed_variables = false;
8830 changed_variables->clear_slot (slot);
8833 /* If VAL (a value or debug_expr) has backlinks to variables actively
8834 dependent on it in HTAB or in CHANGED_VARIABLES, mark them as
8835 changed, adding to CHANGED_VALUES_STACK any dependencies that may
8836 have dependencies of their own to notify. */
8838 static void
8839 notify_dependents_of_changed_value (rtx val, variable_table_type *htab,
8840 vec<rtx, va_heap> *changed_values_stack)
8842 variable_def **slot;
8843 variable var;
8844 loc_exp_dep *led;
8845 decl_or_value dv = dv_from_rtx (val);
8847 slot = changed_variables->find_slot_with_hash (dv, dv_htab_hash (dv),
8848 NO_INSERT);
8849 if (!slot)
8850 slot = htab->find_slot_with_hash (dv, dv_htab_hash (dv), NO_INSERT);
8851 if (!slot)
8852 slot = dropped_values->find_slot_with_hash (dv, dv_htab_hash (dv),
8853 NO_INSERT);
8854 var = *slot;
8856 while ((led = VAR_LOC_DEP_LST (var)))
8858 decl_or_value ldv = led->dv;
8859 variable ivar;
8861 /* Deactivate and remove the backlink, as it was “used up”. It
8862 makes no sense to attempt to notify the same entity again:
8863 either it will be recomputed and re-register an active
8864 dependency, or it will still have the changed mark. */
8865 if (led->next)
8866 led->next->pprev = led->pprev;
8867 if (led->pprev)
8868 *led->pprev = led->next;
8869 led->next = NULL;
8870 led->pprev = NULL;
8872 if (dv_changed_p (ldv))
8873 continue;
8875 switch (dv_onepart_p (ldv))
8877 case ONEPART_VALUE:
8878 case ONEPART_DEXPR:
8879 set_dv_changed (ldv, true);
8880 changed_values_stack->safe_push (dv_as_rtx (ldv));
8881 break;
8883 case ONEPART_VDECL:
8884 ivar = htab->find_with_hash (ldv, dv_htab_hash (ldv));
8885 gcc_checking_assert (!VAR_LOC_DEP_LST (ivar));
8886 variable_was_changed (ivar, NULL);
8887 break;
8889 case NOT_ONEPART:
8890 pool_free (loc_exp_dep_pool, led);
8891 ivar = htab->find_with_hash (ldv, dv_htab_hash (ldv));
8892 if (ivar)
8894 int i = ivar->n_var_parts;
8895 while (i--)
8897 rtx loc = ivar->var_part[i].cur_loc;
8899 if (loc && GET_CODE (loc) == MEM
8900 && XEXP (loc, 0) == val)
8902 variable_was_changed (ivar, NULL);
8903 break;
8907 break;
8909 default:
8910 gcc_unreachable ();
8915 /* Take out of changed_variables any entries that don't refer to use
8916 variables. Back-propagate change notifications from values and
8917 debug_exprs to their active dependencies in HTAB or in
8918 CHANGED_VARIABLES. */
8920 static void
8921 process_changed_values (variable_table_type *htab)
8923 int i, n;
8924 rtx val;
8925 auto_vec<rtx, 20> changed_values_stack;
8927 /* Move values from changed_variables to changed_values_stack. */
8928 changed_variables
8929 ->traverse <vec<rtx, va_heap>*, var_track_values_to_stack>
8930 (&changed_values_stack);
8932 /* Back-propagate change notifications in values while popping
8933 them from the stack. */
8934 for (n = i = changed_values_stack.length ();
8935 i > 0; i = changed_values_stack.length ())
8937 val = changed_values_stack.pop ();
8938 notify_dependents_of_changed_value (val, htab, &changed_values_stack);
8940 /* This condition will hold when visiting each of the entries
8941 originally in changed_variables. We can't remove them
8942 earlier because this could drop the backlinks before we got a
8943 chance to use them. */
8944 if (i == n)
8946 remove_value_from_changed_variables (val);
8947 n--;
8952 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
8953 CHANGED_VARIABLES and delete this chain. WHERE specifies whether
8954 the notes shall be emitted before of after instruction INSN. */
8956 static void
8957 emit_notes_for_changes (rtx_insn *insn, enum emit_note_where where,
8958 shared_hash vars)
8960 emit_note_data data;
8961 variable_table_type *htab = shared_hash_htab (vars);
8963 if (!changed_variables->elements ())
8964 return;
8966 if (MAY_HAVE_DEBUG_INSNS)
8967 process_changed_values (htab);
8969 data.insn = insn;
8970 data.where = where;
8971 data.vars = htab;
8973 changed_variables
8974 ->traverse <emit_note_data*, emit_note_insn_var_location> (&data);
8977 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
8978 same variable in hash table DATA or is not there at all. */
8981 emit_notes_for_differences_1 (variable_def **slot, variable_table_type *new_vars)
8983 variable old_var, new_var;
8985 old_var = *slot;
8986 new_var = new_vars->find_with_hash (old_var->dv, dv_htab_hash (old_var->dv));
8988 if (!new_var)
8990 /* Variable has disappeared. */
8991 variable empty_var = NULL;
8993 if (old_var->onepart == ONEPART_VALUE
8994 || old_var->onepart == ONEPART_DEXPR)
8996 empty_var = variable_from_dropped (old_var->dv, NO_INSERT);
8997 if (empty_var)
8999 gcc_checking_assert (!empty_var->in_changed_variables);
9000 if (!VAR_LOC_1PAUX (old_var))
9002 VAR_LOC_1PAUX (old_var) = VAR_LOC_1PAUX (empty_var);
9003 VAR_LOC_1PAUX (empty_var) = NULL;
9005 else
9006 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
9010 if (!empty_var)
9012 empty_var = (variable) pool_alloc (onepart_pool (old_var->onepart));
9013 empty_var->dv = old_var->dv;
9014 empty_var->refcount = 0;
9015 empty_var->n_var_parts = 0;
9016 empty_var->onepart = old_var->onepart;
9017 empty_var->in_changed_variables = false;
9020 if (empty_var->onepart)
9022 /* Propagate the auxiliary data to (ultimately)
9023 changed_variables. */
9024 empty_var->var_part[0].loc_chain = NULL;
9025 empty_var->var_part[0].cur_loc = NULL;
9026 VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (old_var);
9027 VAR_LOC_1PAUX (old_var) = NULL;
9029 variable_was_changed (empty_var, NULL);
9030 /* Continue traversing the hash table. */
9031 return 1;
9033 /* Update cur_loc and one-part auxiliary data, before new_var goes
9034 through variable_was_changed. */
9035 if (old_var != new_var && new_var->onepart)
9037 gcc_checking_assert (VAR_LOC_1PAUX (new_var) == NULL);
9038 VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (old_var);
9039 VAR_LOC_1PAUX (old_var) = NULL;
9040 new_var->var_part[0].cur_loc = old_var->var_part[0].cur_loc;
9042 if (variable_different_p (old_var, new_var))
9043 variable_was_changed (new_var, NULL);
9045 /* Continue traversing the hash table. */
9046 return 1;
9049 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
9050 table DATA. */
9053 emit_notes_for_differences_2 (variable_def **slot, variable_table_type *old_vars)
9055 variable old_var, new_var;
9057 new_var = *slot;
9058 old_var = old_vars->find_with_hash (new_var->dv, dv_htab_hash (new_var->dv));
9059 if (!old_var)
9061 int i;
9062 for (i = 0; i < new_var->n_var_parts; i++)
9063 new_var->var_part[i].cur_loc = NULL;
9064 variable_was_changed (new_var, NULL);
9067 /* Continue traversing the hash table. */
9068 return 1;
9071 /* Emit notes before INSN for differences between dataflow sets OLD_SET and
9072 NEW_SET. */
9074 static void
9075 emit_notes_for_differences (rtx_insn *insn, dataflow_set *old_set,
9076 dataflow_set *new_set)
9078 shared_hash_htab (old_set->vars)
9079 ->traverse <variable_table_type *, emit_notes_for_differences_1>
9080 (shared_hash_htab (new_set->vars));
9081 shared_hash_htab (new_set->vars)
9082 ->traverse <variable_table_type *, emit_notes_for_differences_2>
9083 (shared_hash_htab (old_set->vars));
9084 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars);
9087 /* Return the next insn after INSN that is not a NOTE_INSN_VAR_LOCATION. */
9089 static rtx_insn *
9090 next_non_note_insn_var_location (rtx_insn *insn)
9092 while (insn)
9094 insn = NEXT_INSN (insn);
9095 if (insn == 0
9096 || !NOTE_P (insn)
9097 || NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION)
9098 break;
9101 return insn;
9104 /* Emit the notes for changes of location parts in the basic block BB. */
9106 static void
9107 emit_notes_in_bb (basic_block bb, dataflow_set *set)
9109 unsigned int i;
9110 micro_operation *mo;
9112 dataflow_set_clear (set);
9113 dataflow_set_copy (set, &VTI (bb)->in);
9115 FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo)
9117 rtx_insn *insn = mo->insn;
9118 rtx_insn *next_insn = next_non_note_insn_var_location (insn);
9120 switch (mo->type)
9122 case MO_CALL:
9123 dataflow_set_clear_at_call (set);
9124 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars);
9126 rtx arguments = mo->u.loc, *p = &arguments;
9127 rtx_note *note;
9128 while (*p)
9130 XEXP (XEXP (*p, 0), 1)
9131 = vt_expand_loc (XEXP (XEXP (*p, 0), 1),
9132 shared_hash_htab (set->vars));
9133 /* If expansion is successful, keep it in the list. */
9134 if (XEXP (XEXP (*p, 0), 1))
9135 p = &XEXP (*p, 1);
9136 /* Otherwise, if the following item is data_value for it,
9137 drop it too too. */
9138 else if (XEXP (*p, 1)
9139 && REG_P (XEXP (XEXP (*p, 0), 0))
9140 && MEM_P (XEXP (XEXP (XEXP (*p, 1), 0), 0))
9141 && REG_P (XEXP (XEXP (XEXP (XEXP (*p, 1), 0), 0),
9143 && REGNO (XEXP (XEXP (*p, 0), 0))
9144 == REGNO (XEXP (XEXP (XEXP (XEXP (*p, 1), 0),
9145 0), 0)))
9146 *p = XEXP (XEXP (*p, 1), 1);
9147 /* Just drop this item. */
9148 else
9149 *p = XEXP (*p, 1);
9151 note = emit_note_after (NOTE_INSN_CALL_ARG_LOCATION, insn);
9152 NOTE_VAR_LOCATION (note) = arguments;
9154 break;
9156 case MO_USE:
9158 rtx loc = mo->u.loc;
9160 if (REG_P (loc))
9161 var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
9162 else
9163 var_mem_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
9165 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
9167 break;
9169 case MO_VAL_LOC:
9171 rtx loc = mo->u.loc;
9172 rtx val, vloc;
9173 tree var;
9175 if (GET_CODE (loc) == CONCAT)
9177 val = XEXP (loc, 0);
9178 vloc = XEXP (loc, 1);
9180 else
9182 val = NULL_RTX;
9183 vloc = loc;
9186 var = PAT_VAR_LOCATION_DECL (vloc);
9188 clobber_variable_part (set, NULL_RTX,
9189 dv_from_decl (var), 0, NULL_RTX);
9190 if (val)
9192 if (VAL_NEEDS_RESOLUTION (loc))
9193 val_resolve (set, val, PAT_VAR_LOCATION_LOC (vloc), insn);
9194 set_variable_part (set, val, dv_from_decl (var), 0,
9195 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
9196 INSERT);
9198 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
9199 set_variable_part (set, PAT_VAR_LOCATION_LOC (vloc),
9200 dv_from_decl (var), 0,
9201 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
9202 INSERT);
9204 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
9206 break;
9208 case MO_VAL_USE:
9210 rtx loc = mo->u.loc;
9211 rtx val, vloc, uloc;
9213 vloc = uloc = XEXP (loc, 1);
9214 val = XEXP (loc, 0);
9216 if (GET_CODE (val) == CONCAT)
9218 uloc = XEXP (val, 1);
9219 val = XEXP (val, 0);
9222 if (VAL_NEEDS_RESOLUTION (loc))
9223 val_resolve (set, val, vloc, insn);
9224 else
9225 val_store (set, val, uloc, insn, false);
9227 if (VAL_HOLDS_TRACK_EXPR (loc))
9229 if (GET_CODE (uloc) == REG)
9230 var_reg_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
9231 NULL);
9232 else if (GET_CODE (uloc) == MEM)
9233 var_mem_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
9234 NULL);
9237 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
9239 break;
9241 case MO_VAL_SET:
9243 rtx loc = mo->u.loc;
9244 rtx val, vloc, uloc;
9245 rtx dstv, srcv;
9247 vloc = loc;
9248 uloc = XEXP (vloc, 1);
9249 val = XEXP (vloc, 0);
9250 vloc = uloc;
9252 if (GET_CODE (uloc) == SET)
9254 dstv = SET_DEST (uloc);
9255 srcv = SET_SRC (uloc);
9257 else
9259 dstv = uloc;
9260 srcv = NULL;
9263 if (GET_CODE (val) == CONCAT)
9265 dstv = vloc = XEXP (val, 1);
9266 val = XEXP (val, 0);
9269 if (GET_CODE (vloc) == SET)
9271 srcv = SET_SRC (vloc);
9273 gcc_assert (val != srcv);
9274 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
9276 dstv = vloc = SET_DEST (vloc);
9278 if (VAL_NEEDS_RESOLUTION (loc))
9279 val_resolve (set, val, srcv, insn);
9281 else if (VAL_NEEDS_RESOLUTION (loc))
9283 gcc_assert (GET_CODE (uloc) == SET
9284 && GET_CODE (SET_SRC (uloc)) == REG);
9285 val_resolve (set, val, SET_SRC (uloc), insn);
9288 if (VAL_HOLDS_TRACK_EXPR (loc))
9290 if (VAL_EXPR_IS_CLOBBERED (loc))
9292 if (REG_P (uloc))
9293 var_reg_delete (set, uloc, true);
9294 else if (MEM_P (uloc))
9296 gcc_assert (MEM_P (dstv));
9297 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
9298 var_mem_delete (set, dstv, true);
9301 else
9303 bool copied_p = VAL_EXPR_IS_COPIED (loc);
9304 rtx src = NULL, dst = uloc;
9305 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
9307 if (GET_CODE (uloc) == SET)
9309 src = SET_SRC (uloc);
9310 dst = SET_DEST (uloc);
9313 if (copied_p)
9315 status = find_src_status (set, src);
9317 src = find_src_set_src (set, src);
9320 if (REG_P (dst))
9321 var_reg_delete_and_set (set, dst, !copied_p,
9322 status, srcv);
9323 else if (MEM_P (dst))
9325 gcc_assert (MEM_P (dstv));
9326 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
9327 var_mem_delete_and_set (set, dstv, !copied_p,
9328 status, srcv);
9332 else if (REG_P (uloc))
9333 var_regno_delete (set, REGNO (uloc));
9334 else if (MEM_P (uloc))
9336 gcc_checking_assert (GET_CODE (vloc) == MEM);
9337 gcc_checking_assert (vloc == dstv);
9338 if (vloc != dstv)
9339 clobber_overlapping_mems (set, vloc);
9342 val_store (set, val, dstv, insn, true);
9344 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9345 set->vars);
9347 break;
9349 case MO_SET:
9351 rtx loc = mo->u.loc;
9352 rtx set_src = NULL;
9354 if (GET_CODE (loc) == SET)
9356 set_src = SET_SRC (loc);
9357 loc = SET_DEST (loc);
9360 if (REG_P (loc))
9361 var_reg_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
9362 set_src);
9363 else
9364 var_mem_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
9365 set_src);
9367 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9368 set->vars);
9370 break;
9372 case MO_COPY:
9374 rtx loc = mo->u.loc;
9375 enum var_init_status src_status;
9376 rtx set_src = NULL;
9378 if (GET_CODE (loc) == SET)
9380 set_src = SET_SRC (loc);
9381 loc = SET_DEST (loc);
9384 src_status = find_src_status (set, set_src);
9385 set_src = find_src_set_src (set, set_src);
9387 if (REG_P (loc))
9388 var_reg_delete_and_set (set, loc, false, src_status, set_src);
9389 else
9390 var_mem_delete_and_set (set, loc, false, src_status, set_src);
9392 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9393 set->vars);
9395 break;
9397 case MO_USE_NO_VAR:
9399 rtx loc = mo->u.loc;
9401 if (REG_P (loc))
9402 var_reg_delete (set, loc, false);
9403 else
9404 var_mem_delete (set, loc, false);
9406 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
9408 break;
9410 case MO_CLOBBER:
9412 rtx loc = mo->u.loc;
9414 if (REG_P (loc))
9415 var_reg_delete (set, loc, true);
9416 else
9417 var_mem_delete (set, loc, true);
9419 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9420 set->vars);
9422 break;
9424 case MO_ADJUST:
9425 set->stack_adjust += mo->u.adjust;
9426 break;
9431 /* Emit notes for the whole function. */
9433 static void
9434 vt_emit_notes (void)
9436 basic_block bb;
9437 dataflow_set cur;
9439 gcc_assert (!changed_variables->elements ());
9441 /* Free memory occupied by the out hash tables, as they aren't used
9442 anymore. */
9443 FOR_EACH_BB_FN (bb, cfun)
9444 dataflow_set_clear (&VTI (bb)->out);
9446 /* Enable emitting notes by functions (mainly by set_variable_part and
9447 delete_variable_part). */
9448 emit_notes = true;
9450 if (MAY_HAVE_DEBUG_INSNS)
9452 dropped_values = new variable_table_type (cselib_get_next_uid () * 2);
9453 loc_exp_dep_pool = create_alloc_pool ("loc_exp_dep pool",
9454 sizeof (loc_exp_dep), 64);
9457 dataflow_set_init (&cur);
9459 FOR_EACH_BB_FN (bb, cfun)
9461 /* Emit the notes for changes of variable locations between two
9462 subsequent basic blocks. */
9463 emit_notes_for_differences (BB_HEAD (bb), &cur, &VTI (bb)->in);
9465 if (MAY_HAVE_DEBUG_INSNS)
9466 local_get_addr_cache = new hash_map<rtx, rtx>;
9468 /* Emit the notes for the changes in the basic block itself. */
9469 emit_notes_in_bb (bb, &cur);
9471 if (MAY_HAVE_DEBUG_INSNS)
9472 delete local_get_addr_cache;
9473 local_get_addr_cache = NULL;
9475 /* Free memory occupied by the in hash table, we won't need it
9476 again. */
9477 dataflow_set_clear (&VTI (bb)->in);
9479 #ifdef ENABLE_CHECKING
9480 shared_hash_htab (cur.vars)
9481 ->traverse <variable_table_type *, emit_notes_for_differences_1>
9482 (shared_hash_htab (empty_shared_hash));
9483 #endif
9484 dataflow_set_destroy (&cur);
9486 if (MAY_HAVE_DEBUG_INSNS)
9487 delete dropped_values;
9488 dropped_values = NULL;
9490 emit_notes = false;
9493 /* If there is a declaration and offset associated with register/memory RTL
9494 assign declaration to *DECLP and offset to *OFFSETP, and return true. */
9496 static bool
9497 vt_get_decl_and_offset (rtx rtl, tree *declp, HOST_WIDE_INT *offsetp)
9499 if (REG_P (rtl))
9501 if (REG_ATTRS (rtl))
9503 *declp = REG_EXPR (rtl);
9504 *offsetp = REG_OFFSET (rtl);
9505 return true;
9508 else if (GET_CODE (rtl) == PARALLEL)
9510 tree decl = NULL_TREE;
9511 HOST_WIDE_INT offset = MAX_VAR_PARTS;
9512 int len = XVECLEN (rtl, 0), i;
9514 for (i = 0; i < len; i++)
9516 rtx reg = XEXP (XVECEXP (rtl, 0, i), 0);
9517 if (!REG_P (reg) || !REG_ATTRS (reg))
9518 break;
9519 if (!decl)
9520 decl = REG_EXPR (reg);
9521 if (REG_EXPR (reg) != decl)
9522 break;
9523 if (REG_OFFSET (reg) < offset)
9524 offset = REG_OFFSET (reg);
9527 if (i == len)
9529 *declp = decl;
9530 *offsetp = offset;
9531 return true;
9534 else if (MEM_P (rtl))
9536 if (MEM_ATTRS (rtl))
9538 *declp = MEM_EXPR (rtl);
9539 *offsetp = INT_MEM_OFFSET (rtl);
9540 return true;
9543 return false;
9546 /* Record the value for the ENTRY_VALUE of RTL as a global equivalence
9547 of VAL. */
9549 static void
9550 record_entry_value (cselib_val *val, rtx rtl)
9552 rtx ev = gen_rtx_ENTRY_VALUE (GET_MODE (rtl));
9554 ENTRY_VALUE_EXP (ev) = rtl;
9556 cselib_add_permanent_equiv (val, ev, get_insns ());
9559 /* Insert function parameter PARM in IN and OUT sets of ENTRY_BLOCK. */
9561 static void
9562 vt_add_function_parameter (tree parm)
9564 rtx decl_rtl = DECL_RTL_IF_SET (parm);
9565 rtx incoming = DECL_INCOMING_RTL (parm);
9566 tree decl;
9567 enum machine_mode mode;
9568 HOST_WIDE_INT offset;
9569 dataflow_set *out;
9570 decl_or_value dv;
9572 if (TREE_CODE (parm) != PARM_DECL)
9573 return;
9575 if (!decl_rtl || !incoming)
9576 return;
9578 if (GET_MODE (decl_rtl) == BLKmode || GET_MODE (incoming) == BLKmode)
9579 return;
9581 /* If there is a DRAP register or a pseudo in internal_arg_pointer,
9582 rewrite the incoming location of parameters passed on the stack
9583 into MEMs based on the argument pointer, so that incoming doesn't
9584 depend on a pseudo. */
9585 if (MEM_P (incoming)
9586 && (XEXP (incoming, 0) == crtl->args.internal_arg_pointer
9587 || (GET_CODE (XEXP (incoming, 0)) == PLUS
9588 && XEXP (XEXP (incoming, 0), 0)
9589 == crtl->args.internal_arg_pointer
9590 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
9592 HOST_WIDE_INT off = -FIRST_PARM_OFFSET (current_function_decl);
9593 if (GET_CODE (XEXP (incoming, 0)) == PLUS)
9594 off += INTVAL (XEXP (XEXP (incoming, 0), 1));
9595 incoming
9596 = replace_equiv_address_nv (incoming,
9597 plus_constant (Pmode,
9598 arg_pointer_rtx, off));
9601 #ifdef HAVE_window_save
9602 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
9603 If the target machine has an explicit window save instruction, the
9604 actual entry value is the corresponding OUTGOING_REGNO instead. */
9605 if (HAVE_window_save && !crtl->uses_only_leaf_regs)
9607 if (REG_P (incoming)
9608 && HARD_REGISTER_P (incoming)
9609 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
9611 parm_reg_t p;
9612 p.incoming = incoming;
9613 incoming
9614 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
9615 OUTGOING_REGNO (REGNO (incoming)), 0);
9616 p.outgoing = incoming;
9617 vec_safe_push (windowed_parm_regs, p);
9619 else if (GET_CODE (incoming) == PARALLEL)
9621 rtx outgoing
9622 = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (XVECLEN (incoming, 0)));
9623 int i;
9625 for (i = 0; i < XVECLEN (incoming, 0); i++)
9627 rtx reg = XEXP (XVECEXP (incoming, 0, i), 0);
9628 parm_reg_t p;
9629 p.incoming = reg;
9630 reg = gen_rtx_REG_offset (reg, GET_MODE (reg),
9631 OUTGOING_REGNO (REGNO (reg)), 0);
9632 p.outgoing = reg;
9633 XVECEXP (outgoing, 0, i)
9634 = gen_rtx_EXPR_LIST (VOIDmode, reg,
9635 XEXP (XVECEXP (incoming, 0, i), 1));
9636 vec_safe_push (windowed_parm_regs, p);
9639 incoming = outgoing;
9641 else if (MEM_P (incoming)
9642 && REG_P (XEXP (incoming, 0))
9643 && HARD_REGISTER_P (XEXP (incoming, 0)))
9645 rtx reg = XEXP (incoming, 0);
9646 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
9648 parm_reg_t p;
9649 p.incoming = reg;
9650 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
9651 p.outgoing = reg;
9652 vec_safe_push (windowed_parm_regs, p);
9653 incoming = replace_equiv_address_nv (incoming, reg);
9657 #endif
9659 if (!vt_get_decl_and_offset (incoming, &decl, &offset))
9661 if (MEM_P (incoming))
9663 /* This means argument is passed by invisible reference. */
9664 offset = 0;
9665 decl = parm;
9667 else
9669 if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset))
9670 return;
9671 offset += byte_lowpart_offset (GET_MODE (incoming),
9672 GET_MODE (decl_rtl));
9676 if (!decl)
9677 return;
9679 if (parm != decl)
9681 /* If that DECL_RTL wasn't a pseudo that got spilled to
9682 memory, bail out. Otherwise, the spill slot sharing code
9683 will force the memory to reference spill_slot_decl (%sfp),
9684 so we don't match above. That's ok, the pseudo must have
9685 referenced the entire parameter, so just reset OFFSET. */
9686 if (decl != get_spill_slot_decl (false))
9687 return;
9688 offset = 0;
9691 if (!track_loc_p (incoming, parm, offset, false, &mode, &offset))
9692 return;
9694 out = &VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out;
9696 dv = dv_from_decl (parm);
9698 if (target_for_debug_bind (parm)
9699 /* We can't deal with these right now, because this kind of
9700 variable is single-part. ??? We could handle parallels
9701 that describe multiple locations for the same single
9702 value, but ATM we don't. */
9703 && GET_CODE (incoming) != PARALLEL)
9705 cselib_val *val;
9706 rtx lowpart;
9708 /* ??? We shouldn't ever hit this, but it may happen because
9709 arguments passed by invisible reference aren't dealt with
9710 above: incoming-rtl will have Pmode rather than the
9711 expected mode for the type. */
9712 if (offset)
9713 return;
9715 lowpart = var_lowpart (mode, incoming);
9716 if (!lowpart)
9717 return;
9719 val = cselib_lookup_from_insn (lowpart, mode, true,
9720 VOIDmode, get_insns ());
9722 /* ??? Float-typed values in memory are not handled by
9723 cselib. */
9724 if (val)
9726 preserve_value (val);
9727 set_variable_part (out, val->val_rtx, dv, offset,
9728 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9729 dv = dv_from_value (val->val_rtx);
9732 if (MEM_P (incoming))
9734 val = cselib_lookup_from_insn (XEXP (incoming, 0), mode, true,
9735 VOIDmode, get_insns ());
9736 if (val)
9738 preserve_value (val);
9739 incoming = replace_equiv_address_nv (incoming, val->val_rtx);
9744 if (REG_P (incoming))
9746 incoming = var_lowpart (mode, incoming);
9747 gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER);
9748 attrs_list_insert (&out->regs[REGNO (incoming)], dv, offset,
9749 incoming);
9750 set_variable_part (out, incoming, dv, offset,
9751 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9752 if (dv_is_value_p (dv))
9754 record_entry_value (CSELIB_VAL_PTR (dv_as_value (dv)), incoming);
9755 if (TREE_CODE (TREE_TYPE (parm)) == REFERENCE_TYPE
9756 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (parm))))
9758 enum machine_mode indmode
9759 = TYPE_MODE (TREE_TYPE (TREE_TYPE (parm)));
9760 rtx mem = gen_rtx_MEM (indmode, incoming);
9761 cselib_val *val = cselib_lookup_from_insn (mem, indmode, true,
9762 VOIDmode,
9763 get_insns ());
9764 if (val)
9766 preserve_value (val);
9767 record_entry_value (val, mem);
9768 set_variable_part (out, mem, dv_from_value (val->val_rtx), 0,
9769 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9774 else if (GET_CODE (incoming) == PARALLEL && !dv_onepart_p (dv))
9776 int i;
9778 for (i = 0; i < XVECLEN (incoming, 0); i++)
9780 rtx reg = XEXP (XVECEXP (incoming, 0, i), 0);
9781 offset = REG_OFFSET (reg);
9782 gcc_assert (REGNO (reg) < FIRST_PSEUDO_REGISTER);
9783 attrs_list_insert (&out->regs[REGNO (reg)], dv, offset, reg);
9784 set_variable_part (out, reg, dv, offset,
9785 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9788 else if (MEM_P (incoming))
9790 incoming = var_lowpart (mode, incoming);
9791 set_variable_part (out, incoming, dv, offset,
9792 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9796 /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
9798 static void
9799 vt_add_function_parameters (void)
9801 tree parm;
9803 for (parm = DECL_ARGUMENTS (current_function_decl);
9804 parm; parm = DECL_CHAIN (parm))
9805 vt_add_function_parameter (parm);
9807 if (DECL_HAS_VALUE_EXPR_P (DECL_RESULT (current_function_decl)))
9809 tree vexpr = DECL_VALUE_EXPR (DECL_RESULT (current_function_decl));
9811 if (TREE_CODE (vexpr) == INDIRECT_REF)
9812 vexpr = TREE_OPERAND (vexpr, 0);
9814 if (TREE_CODE (vexpr) == PARM_DECL
9815 && DECL_ARTIFICIAL (vexpr)
9816 && !DECL_IGNORED_P (vexpr)
9817 && DECL_NAMELESS (vexpr))
9818 vt_add_function_parameter (vexpr);
9822 /* Initialize cfa_base_rtx, create a preserved VALUE for it and
9823 ensure it isn't flushed during cselib_reset_table.
9824 Can be called only if frame_pointer_rtx resp. arg_pointer_rtx
9825 has been eliminated. */
9827 static void
9828 vt_init_cfa_base (void)
9830 cselib_val *val;
9832 #ifdef FRAME_POINTER_CFA_OFFSET
9833 cfa_base_rtx = frame_pointer_rtx;
9834 cfa_base_offset = -FRAME_POINTER_CFA_OFFSET (current_function_decl);
9835 #else
9836 cfa_base_rtx = arg_pointer_rtx;
9837 cfa_base_offset = -ARG_POINTER_CFA_OFFSET (current_function_decl);
9838 #endif
9839 if (cfa_base_rtx == hard_frame_pointer_rtx
9840 || !fixed_regs[REGNO (cfa_base_rtx)])
9842 cfa_base_rtx = NULL_RTX;
9843 return;
9845 if (!MAY_HAVE_DEBUG_INSNS)
9846 return;
9848 /* Tell alias analysis that cfa_base_rtx should share
9849 find_base_term value with stack pointer or hard frame pointer. */
9850 if (!frame_pointer_needed)
9851 vt_equate_reg_base_value (cfa_base_rtx, stack_pointer_rtx);
9852 else if (!crtl->stack_realign_tried)
9853 vt_equate_reg_base_value (cfa_base_rtx, hard_frame_pointer_rtx);
9855 val = cselib_lookup_from_insn (cfa_base_rtx, GET_MODE (cfa_base_rtx), 1,
9856 VOIDmode, get_insns ());
9857 preserve_value (val);
9858 cselib_preserve_cfa_base_value (val, REGNO (cfa_base_rtx));
9861 /* Allocate and initialize the data structures for variable tracking
9862 and parse the RTL to get the micro operations. */
9864 static bool
9865 vt_initialize (void)
9867 basic_block bb;
9868 HOST_WIDE_INT fp_cfa_offset = -1;
9870 alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def));
9872 attrs_pool = create_alloc_pool ("attrs_def pool",
9873 sizeof (struct attrs_def), 1024);
9874 var_pool = create_alloc_pool ("variable_def pool",
9875 sizeof (struct variable_def)
9876 + (MAX_VAR_PARTS - 1)
9877 * sizeof (((variable)NULL)->var_part[0]), 64);
9878 loc_chain_pool = create_alloc_pool ("location_chain_def pool",
9879 sizeof (struct location_chain_def),
9880 1024);
9881 shared_hash_pool = create_alloc_pool ("shared_hash_def pool",
9882 sizeof (struct shared_hash_def), 256);
9883 empty_shared_hash = (shared_hash) pool_alloc (shared_hash_pool);
9884 empty_shared_hash->refcount = 1;
9885 empty_shared_hash->htab = new variable_table_type (1);
9886 changed_variables = new variable_table_type (10);
9888 /* Init the IN and OUT sets. */
9889 FOR_ALL_BB_FN (bb, cfun)
9891 VTI (bb)->visited = false;
9892 VTI (bb)->flooded = false;
9893 dataflow_set_init (&VTI (bb)->in);
9894 dataflow_set_init (&VTI (bb)->out);
9895 VTI (bb)->permp = NULL;
9898 if (MAY_HAVE_DEBUG_INSNS)
9900 cselib_init (CSELIB_RECORD_MEMORY | CSELIB_PRESERVE_CONSTANTS);
9901 scratch_regs = BITMAP_ALLOC (NULL);
9902 valvar_pool = create_alloc_pool ("small variable_def pool",
9903 sizeof (struct variable_def), 256);
9904 preserved_values.create (256);
9905 global_get_addr_cache = new hash_map<rtx, rtx>;
9907 else
9909 scratch_regs = NULL;
9910 valvar_pool = NULL;
9911 global_get_addr_cache = NULL;
9914 if (MAY_HAVE_DEBUG_INSNS)
9916 rtx reg, expr;
9917 int ofst;
9918 cselib_val *val;
9920 #ifdef FRAME_POINTER_CFA_OFFSET
9921 reg = frame_pointer_rtx;
9922 ofst = FRAME_POINTER_CFA_OFFSET (current_function_decl);
9923 #else
9924 reg = arg_pointer_rtx;
9925 ofst = ARG_POINTER_CFA_OFFSET (current_function_decl);
9926 #endif
9928 ofst -= INCOMING_FRAME_SP_OFFSET;
9930 val = cselib_lookup_from_insn (reg, GET_MODE (reg), 1,
9931 VOIDmode, get_insns ());
9932 preserve_value (val);
9933 if (reg != hard_frame_pointer_rtx && fixed_regs[REGNO (reg)])
9934 cselib_preserve_cfa_base_value (val, REGNO (reg));
9935 expr = plus_constant (GET_MODE (stack_pointer_rtx),
9936 stack_pointer_rtx, -ofst);
9937 cselib_add_permanent_equiv (val, expr, get_insns ());
9939 if (ofst)
9941 val = cselib_lookup_from_insn (stack_pointer_rtx,
9942 GET_MODE (stack_pointer_rtx), 1,
9943 VOIDmode, get_insns ());
9944 preserve_value (val);
9945 expr = plus_constant (GET_MODE (reg), reg, ofst);
9946 cselib_add_permanent_equiv (val, expr, get_insns ());
9950 /* In order to factor out the adjustments made to the stack pointer or to
9951 the hard frame pointer and thus be able to use DW_OP_fbreg operations
9952 instead of individual location lists, we're going to rewrite MEMs based
9953 on them into MEMs based on the CFA by de-eliminating stack_pointer_rtx
9954 or hard_frame_pointer_rtx to the virtual CFA pointer frame_pointer_rtx
9955 resp. arg_pointer_rtx. We can do this either when there is no frame
9956 pointer in the function and stack adjustments are consistent for all
9957 basic blocks or when there is a frame pointer and no stack realignment.
9958 But we first have to check that frame_pointer_rtx resp. arg_pointer_rtx
9959 has been eliminated. */
9960 if (!frame_pointer_needed)
9962 rtx reg, elim;
9964 if (!vt_stack_adjustments ())
9965 return false;
9967 #ifdef FRAME_POINTER_CFA_OFFSET
9968 reg = frame_pointer_rtx;
9969 #else
9970 reg = arg_pointer_rtx;
9971 #endif
9972 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
9973 if (elim != reg)
9975 if (GET_CODE (elim) == PLUS)
9976 elim = XEXP (elim, 0);
9977 if (elim == stack_pointer_rtx)
9978 vt_init_cfa_base ();
9981 else if (!crtl->stack_realign_tried)
9983 rtx reg, elim;
9985 #ifdef FRAME_POINTER_CFA_OFFSET
9986 reg = frame_pointer_rtx;
9987 fp_cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
9988 #else
9989 reg = arg_pointer_rtx;
9990 fp_cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
9991 #endif
9992 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
9993 if (elim != reg)
9995 if (GET_CODE (elim) == PLUS)
9997 fp_cfa_offset -= INTVAL (XEXP (elim, 1));
9998 elim = XEXP (elim, 0);
10000 if (elim != hard_frame_pointer_rtx)
10001 fp_cfa_offset = -1;
10003 else
10004 fp_cfa_offset = -1;
10007 /* If the stack is realigned and a DRAP register is used, we're going to
10008 rewrite MEMs based on it representing incoming locations of parameters
10009 passed on the stack into MEMs based on the argument pointer. Although
10010 we aren't going to rewrite other MEMs, we still need to initialize the
10011 virtual CFA pointer in order to ensure that the argument pointer will
10012 be seen as a constant throughout the function.
10014 ??? This doesn't work if FRAME_POINTER_CFA_OFFSET is defined. */
10015 else if (stack_realign_drap)
10017 rtx reg, elim;
10019 #ifdef FRAME_POINTER_CFA_OFFSET
10020 reg = frame_pointer_rtx;
10021 #else
10022 reg = arg_pointer_rtx;
10023 #endif
10024 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
10025 if (elim != reg)
10027 if (GET_CODE (elim) == PLUS)
10028 elim = XEXP (elim, 0);
10029 if (elim == hard_frame_pointer_rtx)
10030 vt_init_cfa_base ();
10034 hard_frame_pointer_adjustment = -1;
10036 vt_add_function_parameters ();
10038 FOR_EACH_BB_FN (bb, cfun)
10040 rtx_insn *insn;
10041 HOST_WIDE_INT pre, post = 0;
10042 basic_block first_bb, last_bb;
10044 if (MAY_HAVE_DEBUG_INSNS)
10046 cselib_record_sets_hook = add_with_sets;
10047 if (dump_file && (dump_flags & TDF_DETAILS))
10048 fprintf (dump_file, "first value: %i\n",
10049 cselib_get_next_uid ());
10052 first_bb = bb;
10053 for (;;)
10055 edge e;
10056 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
10057 || ! single_pred_p (bb->next_bb))
10058 break;
10059 e = find_edge (bb, bb->next_bb);
10060 if (! e || (e->flags & EDGE_FALLTHRU) == 0)
10061 break;
10062 bb = bb->next_bb;
10064 last_bb = bb;
10066 /* Add the micro-operations to the vector. */
10067 FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb)
10069 HOST_WIDE_INT offset = VTI (bb)->out.stack_adjust;
10070 VTI (bb)->out.stack_adjust = VTI (bb)->in.stack_adjust;
10071 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
10072 insn = NEXT_INSN (insn))
10074 if (INSN_P (insn))
10076 if (!frame_pointer_needed)
10078 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
10079 if (pre)
10081 micro_operation mo;
10082 mo.type = MO_ADJUST;
10083 mo.u.adjust = pre;
10084 mo.insn = insn;
10085 if (dump_file && (dump_flags & TDF_DETAILS))
10086 log_op_type (PATTERN (insn), bb, insn,
10087 MO_ADJUST, dump_file);
10088 VTI (bb)->mos.safe_push (mo);
10089 VTI (bb)->out.stack_adjust += pre;
10093 cselib_hook_called = false;
10094 adjust_insn (bb, insn);
10095 if (MAY_HAVE_DEBUG_INSNS)
10097 if (CALL_P (insn))
10098 prepare_call_arguments (bb, insn);
10099 cselib_process_insn (insn);
10100 if (dump_file && (dump_flags & TDF_DETAILS))
10102 print_rtl_single (dump_file, insn);
10103 dump_cselib_table (dump_file);
10106 if (!cselib_hook_called)
10107 add_with_sets (insn, 0, 0);
10108 cancel_changes (0);
10110 if (!frame_pointer_needed && post)
10112 micro_operation mo;
10113 mo.type = MO_ADJUST;
10114 mo.u.adjust = post;
10115 mo.insn = insn;
10116 if (dump_file && (dump_flags & TDF_DETAILS))
10117 log_op_type (PATTERN (insn), bb, insn,
10118 MO_ADJUST, dump_file);
10119 VTI (bb)->mos.safe_push (mo);
10120 VTI (bb)->out.stack_adjust += post;
10123 if (fp_cfa_offset != -1
10124 && hard_frame_pointer_adjustment == -1
10125 && fp_setter_insn (insn))
10127 vt_init_cfa_base ();
10128 hard_frame_pointer_adjustment = fp_cfa_offset;
10129 /* Disassociate sp from fp now. */
10130 if (MAY_HAVE_DEBUG_INSNS)
10132 cselib_val *v;
10133 cselib_invalidate_rtx (stack_pointer_rtx);
10134 v = cselib_lookup (stack_pointer_rtx, Pmode, 1,
10135 VOIDmode);
10136 if (v && !cselib_preserved_value_p (v))
10138 cselib_set_value_sp_based (v);
10139 preserve_value (v);
10145 gcc_assert (offset == VTI (bb)->out.stack_adjust);
10148 bb = last_bb;
10150 if (MAY_HAVE_DEBUG_INSNS)
10152 cselib_preserve_only_values ();
10153 cselib_reset_table (cselib_get_next_uid ());
10154 cselib_record_sets_hook = NULL;
10158 hard_frame_pointer_adjustment = -1;
10159 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->flooded = true;
10160 cfa_base_rtx = NULL_RTX;
10161 return true;
10164 /* This is *not* reset after each function. It gives each
10165 NOTE_INSN_DELETED_DEBUG_LABEL in the entire compilation
10166 a unique label number. */
10168 static int debug_label_num = 1;
10170 /* Get rid of all debug insns from the insn stream. */
10172 static void
10173 delete_debug_insns (void)
10175 basic_block bb;
10176 rtx_insn *insn, *next;
10178 if (!MAY_HAVE_DEBUG_INSNS)
10179 return;
10181 FOR_EACH_BB_FN (bb, cfun)
10183 FOR_BB_INSNS_SAFE (bb, insn, next)
10184 if (DEBUG_INSN_P (insn))
10186 tree decl = INSN_VAR_LOCATION_DECL (insn);
10187 if (TREE_CODE (decl) == LABEL_DECL
10188 && DECL_NAME (decl)
10189 && !DECL_RTL_SET_P (decl))
10191 PUT_CODE (insn, NOTE);
10192 NOTE_KIND (insn) = NOTE_INSN_DELETED_DEBUG_LABEL;
10193 NOTE_DELETED_LABEL_NAME (insn)
10194 = IDENTIFIER_POINTER (DECL_NAME (decl));
10195 SET_DECL_RTL (decl, insn);
10196 CODE_LABEL_NUMBER (insn) = debug_label_num++;
10198 else
10199 delete_insn (insn);
10204 /* Run a fast, BB-local only version of var tracking, to take care of
10205 information that we don't do global analysis on, such that not all
10206 information is lost. If SKIPPED holds, we're skipping the global
10207 pass entirely, so we should try to use information it would have
10208 handled as well.. */
10210 static void
10211 vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED)
10213 /* ??? Just skip it all for now. */
10214 delete_debug_insns ();
10217 /* Free the data structures needed for variable tracking. */
10219 static void
10220 vt_finalize (void)
10222 basic_block bb;
10224 FOR_EACH_BB_FN (bb, cfun)
10226 VTI (bb)->mos.release ();
10229 FOR_ALL_BB_FN (bb, cfun)
10231 dataflow_set_destroy (&VTI (bb)->in);
10232 dataflow_set_destroy (&VTI (bb)->out);
10233 if (VTI (bb)->permp)
10235 dataflow_set_destroy (VTI (bb)->permp);
10236 XDELETE (VTI (bb)->permp);
10239 free_aux_for_blocks ();
10240 delete empty_shared_hash->htab;
10241 empty_shared_hash->htab = NULL;
10242 delete changed_variables;
10243 changed_variables = NULL;
10244 free_alloc_pool (attrs_pool);
10245 free_alloc_pool (var_pool);
10246 free_alloc_pool (loc_chain_pool);
10247 free_alloc_pool (shared_hash_pool);
10249 if (MAY_HAVE_DEBUG_INSNS)
10251 if (global_get_addr_cache)
10252 delete global_get_addr_cache;
10253 global_get_addr_cache = NULL;
10254 if (loc_exp_dep_pool)
10255 free_alloc_pool (loc_exp_dep_pool);
10256 loc_exp_dep_pool = NULL;
10257 free_alloc_pool (valvar_pool);
10258 preserved_values.release ();
10259 cselib_finish ();
10260 BITMAP_FREE (scratch_regs);
10261 scratch_regs = NULL;
10264 #ifdef HAVE_window_save
10265 vec_free (windowed_parm_regs);
10266 #endif
10268 if (vui_vec)
10269 XDELETEVEC (vui_vec);
10270 vui_vec = NULL;
10271 vui_allocated = 0;
10274 /* The entry point to variable tracking pass. */
10276 static inline unsigned int
10277 variable_tracking_main_1 (void)
10279 bool success;
10281 if (flag_var_tracking_assignments < 0)
10283 delete_debug_insns ();
10284 return 0;
10287 if (n_basic_blocks_for_fn (cfun) > 500 &&
10288 n_edges_for_fn (cfun) / n_basic_blocks_for_fn (cfun) >= 20)
10290 vt_debug_insns_local (true);
10291 return 0;
10294 mark_dfs_back_edges ();
10295 if (!vt_initialize ())
10297 vt_finalize ();
10298 vt_debug_insns_local (true);
10299 return 0;
10302 success = vt_find_locations ();
10304 if (!success && flag_var_tracking_assignments > 0)
10306 vt_finalize ();
10308 delete_debug_insns ();
10310 /* This is later restored by our caller. */
10311 flag_var_tracking_assignments = 0;
10313 success = vt_initialize ();
10314 gcc_assert (success);
10316 success = vt_find_locations ();
10319 if (!success)
10321 vt_finalize ();
10322 vt_debug_insns_local (false);
10323 return 0;
10326 if (dump_file && (dump_flags & TDF_DETAILS))
10328 dump_dataflow_sets ();
10329 dump_reg_info (dump_file);
10330 dump_flow_info (dump_file, dump_flags);
10333 timevar_push (TV_VAR_TRACKING_EMIT);
10334 vt_emit_notes ();
10335 timevar_pop (TV_VAR_TRACKING_EMIT);
10337 vt_finalize ();
10338 vt_debug_insns_local (false);
10339 return 0;
10342 unsigned int
10343 variable_tracking_main (void)
10345 unsigned int ret;
10346 int save = flag_var_tracking_assignments;
10348 ret = variable_tracking_main_1 ();
10350 flag_var_tracking_assignments = save;
10352 return ret;
10355 namespace {
10357 const pass_data pass_data_variable_tracking =
10359 RTL_PASS, /* type */
10360 "vartrack", /* name */
10361 OPTGROUP_NONE, /* optinfo_flags */
10362 TV_VAR_TRACKING, /* tv_id */
10363 0, /* properties_required */
10364 0, /* properties_provided */
10365 0, /* properties_destroyed */
10366 0, /* todo_flags_start */
10367 0, /* todo_flags_finish */
10370 class pass_variable_tracking : public rtl_opt_pass
10372 public:
10373 pass_variable_tracking (gcc::context *ctxt)
10374 : rtl_opt_pass (pass_data_variable_tracking, ctxt)
10377 /* opt_pass methods: */
10378 virtual bool gate (function *)
10380 return (flag_var_tracking && !targetm.delay_vartrack);
10383 virtual unsigned int execute (function *)
10385 return variable_tracking_main ();
10388 }; // class pass_variable_tracking
10390 } // anon namespace
10392 rtl_opt_pass *
10393 make_pass_variable_tracking (gcc::context *ctxt)
10395 return new pass_variable_tracking (ctxt);