2013-11-21 Edward Smith-Rowland <3dw4rd@verizon.net>
[official-gcc.git] / gcc / var-tracking.c
blob591747be516772d2e169054c532cd242fb9bf9e6
1 /* Variable tracking routines for the GNU compiler.
2 Copyright (C) 2002-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the variable tracking pass. It computes where
21 variables are located (which registers or where in memory) at each position
22 in instruction stream and emits notes describing the locations.
23 Debug information (DWARF2 location lists) is finally generated from
24 these notes.
25 With this debug information, it is possible to show variables
26 even when debugging optimized code.
28 How does the variable tracking pass work?
30 First, it scans RTL code for uses, stores and clobbers (register/memory
31 references in instructions), for call insns and for stack adjustments
32 separately for each basic block and saves them to an array of micro
33 operations.
34 The micro operations of one instruction are ordered so that
35 pre-modifying stack adjustment < use < use with no var < call insn <
36 < clobber < set < post-modifying stack adjustment
38 Then, a forward dataflow analysis is performed to find out how locations
39 of variables change through code and to propagate the variable locations
40 along control flow graph.
41 The IN set for basic block BB is computed as a union of OUT sets of BB's
42 predecessors, the OUT set for BB is copied from the IN set for BB and
43 is changed according to micro operations in BB.
45 The IN and OUT sets for basic blocks consist of a current stack adjustment
46 (used for adjusting offset of variables addressed using stack pointer),
47 the table of structures describing the locations of parts of a variable
48 and for each physical register a linked list for each physical register.
49 The linked list is a list of variable parts stored in the register,
50 i.e. it is a list of triplets (reg, decl, offset) where decl is
51 REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
52 effective deleting appropriate variable parts when we set or clobber the
53 register.
55 There may be more than one variable part in a register. The linked lists
56 should be pretty short so it is a good data structure here.
57 For example in the following code, register allocator may assign same
58 register to variables A and B, and both of them are stored in the same
59 register in CODE:
61 if (cond)
62 set A;
63 else
64 set B;
65 CODE;
66 if (cond)
67 use A;
68 else
69 use B;
71 Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
72 are emitted to appropriate positions in RTL code. Each such a note describes
73 the location of one variable at the point in instruction stream where the
74 note is. There is no need to emit a note for each variable before each
75 instruction, we only emit these notes where the location of variable changes
76 (this means that we also emit notes for changes between the OUT set of the
77 previous block and the IN set of the current block).
79 The notes consist of two parts:
80 1. the declaration (from REG_EXPR or MEM_EXPR)
81 2. the location of a variable - it is either a simple register/memory
82 reference (for simple variables, for example int),
83 or a parallel of register/memory references (for a large variables
84 which consist of several parts, for example long long).
88 #include "config.h"
89 #include "system.h"
90 #include "coretypes.h"
91 #include "tm.h"
92 #include "rtl.h"
93 #include "tree.h"
94 #include "varasm.h"
95 #include "stor-layout.h"
96 #include "gimple.h"
97 #include "tm_p.h"
98 #include "hard-reg-set.h"
99 #include "basic-block.h"
100 #include "flags.h"
101 #include "insn-config.h"
102 #include "reload.h"
103 #include "sbitmap.h"
104 #include "alloc-pool.h"
105 #include "fibheap.h"
106 #include "hash-table.h"
107 #include "regs.h"
108 #include "expr.h"
109 #include "tree-pass.h"
110 #include "bitmap.h"
111 #include "tree-dfa.h"
112 #include "tree-ssa.h"
113 #include "cselib.h"
114 #include "target.h"
115 #include "params.h"
116 #include "diagnostic.h"
117 #include "tree-pretty-print.h"
118 #include "pointer-set.h"
119 #include "recog.h"
120 #include "tm_p.h"
121 #include "alias.h"
123 /* var-tracking.c assumes that tree code with the same value as VALUE rtx code
124 has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
125 Currently the value is the same as IDENTIFIER_NODE, which has such
126 a property. If this compile time assertion ever fails, make sure that
127 the new tree code that equals (int) VALUE has the same property. */
128 extern char check_value_val[(int) VALUE == (int) IDENTIFIER_NODE ? 1 : -1];
130 /* Type of micro operation. */
131 enum micro_operation_type
133 MO_USE, /* Use location (REG or MEM). */
134 MO_USE_NO_VAR,/* Use location which is not associated with a variable
135 or the variable is not trackable. */
136 MO_VAL_USE, /* Use location which is associated with a value. */
137 MO_VAL_LOC, /* Use location which appears in a debug insn. */
138 MO_VAL_SET, /* Set location associated with a value. */
139 MO_SET, /* Set location. */
140 MO_COPY, /* Copy the same portion of a variable from one
141 location to another. */
142 MO_CLOBBER, /* Clobber location. */
143 MO_CALL, /* Call insn. */
144 MO_ADJUST /* Adjust stack pointer. */
148 static const char * const ATTRIBUTE_UNUSED
149 micro_operation_type_name[] = {
150 "MO_USE",
151 "MO_USE_NO_VAR",
152 "MO_VAL_USE",
153 "MO_VAL_LOC",
154 "MO_VAL_SET",
155 "MO_SET",
156 "MO_COPY",
157 "MO_CLOBBER",
158 "MO_CALL",
159 "MO_ADJUST"
162 /* Where shall the note be emitted? BEFORE or AFTER the instruction.
163 Notes emitted as AFTER_CALL are to take effect during the call,
164 rather than after the call. */
165 enum emit_note_where
167 EMIT_NOTE_BEFORE_INSN,
168 EMIT_NOTE_AFTER_INSN,
169 EMIT_NOTE_AFTER_CALL_INSN
172 /* Structure holding information about micro operation. */
173 typedef struct micro_operation_def
175 /* Type of micro operation. */
176 enum micro_operation_type type;
178 /* The instruction which the micro operation is in, for MO_USE,
179 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
180 instruction or note in the original flow (before any var-tracking
181 notes are inserted, to simplify emission of notes), for MO_SET
182 and MO_CLOBBER. */
183 rtx insn;
185 union {
186 /* Location. For MO_SET and MO_COPY, this is the SET that
187 performs the assignment, if known, otherwise it is the target
188 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
189 CONCAT of the VALUE and the LOC associated with it. For
190 MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
191 associated with it. */
192 rtx loc;
194 /* Stack adjustment. */
195 HOST_WIDE_INT adjust;
196 } u;
197 } micro_operation;
200 /* A declaration of a variable, or an RTL value being handled like a
201 declaration. */
202 typedef void *decl_or_value;
204 /* Return true if a decl_or_value DV is a DECL or NULL. */
205 static inline bool
206 dv_is_decl_p (decl_or_value dv)
208 return !dv || (int) TREE_CODE ((tree) dv) != (int) VALUE;
211 /* Return true if a decl_or_value is a VALUE rtl. */
212 static inline bool
213 dv_is_value_p (decl_or_value dv)
215 return dv && !dv_is_decl_p (dv);
218 /* Return the decl in the decl_or_value. */
219 static inline tree
220 dv_as_decl (decl_or_value dv)
222 gcc_checking_assert (dv_is_decl_p (dv));
223 return (tree) dv;
226 /* Return the value in the decl_or_value. */
227 static inline rtx
228 dv_as_value (decl_or_value dv)
230 gcc_checking_assert (dv_is_value_p (dv));
231 return (rtx)dv;
234 /* Return the opaque pointer in the decl_or_value. */
235 static inline void *
236 dv_as_opaque (decl_or_value dv)
238 return dv;
242 /* Description of location of a part of a variable. The content of a physical
243 register is described by a chain of these structures.
244 The chains are pretty short (usually 1 or 2 elements) and thus
245 chain is the best data structure. */
246 typedef struct attrs_def
248 /* Pointer to next member of the list. */
249 struct attrs_def *next;
251 /* The rtx of register. */
252 rtx loc;
254 /* The declaration corresponding to LOC. */
255 decl_or_value dv;
257 /* Offset from start of DECL. */
258 HOST_WIDE_INT offset;
259 } *attrs;
261 /* Structure for chaining the locations. */
262 typedef struct location_chain_def
264 /* Next element in the chain. */
265 struct location_chain_def *next;
267 /* The location (REG, MEM or VALUE). */
268 rtx loc;
270 /* The "value" stored in this location. */
271 rtx set_src;
273 /* Initialized? */
274 enum var_init_status init;
275 } *location_chain;
277 /* A vector of loc_exp_dep holds the active dependencies of a one-part
278 DV on VALUEs, i.e., the VALUEs expanded so as to form the current
279 location of DV. Each entry is also part of VALUE' s linked-list of
280 backlinks back to DV. */
281 typedef struct loc_exp_dep_s
283 /* The dependent DV. */
284 decl_or_value dv;
285 /* The dependency VALUE or DECL_DEBUG. */
286 rtx value;
287 /* The next entry in VALUE's backlinks list. */
288 struct loc_exp_dep_s *next;
289 /* A pointer to the pointer to this entry (head or prev's next) in
290 the doubly-linked list. */
291 struct loc_exp_dep_s **pprev;
292 } loc_exp_dep;
295 /* This data structure holds information about the depth of a variable
296 expansion. */
297 typedef struct expand_depth_struct
299 /* This measures the complexity of the expanded expression. It
300 grows by one for each level of expansion that adds more than one
301 operand. */
302 int complexity;
303 /* This counts the number of ENTRY_VALUE expressions in an
304 expansion. We want to minimize their use. */
305 int entryvals;
306 } expand_depth;
308 /* This data structure is allocated for one-part variables at the time
309 of emitting notes. */
310 struct onepart_aux
312 /* Doubly-linked list of dependent DVs. These are DVs whose cur_loc
313 computation used the expansion of this variable, and that ought
314 to be notified should this variable change. If the DV's cur_loc
315 expanded to NULL, all components of the loc list are regarded as
316 active, so that any changes in them give us a chance to get a
317 location. Otherwise, only components of the loc that expanded to
318 non-NULL are regarded as active dependencies. */
319 loc_exp_dep *backlinks;
320 /* This holds the LOC that was expanded into cur_loc. We need only
321 mark a one-part variable as changed if the FROM loc is removed,
322 or if it has no known location and a loc is added, or if it gets
323 a change notification from any of its active dependencies. */
324 rtx from;
325 /* The depth of the cur_loc expression. */
326 expand_depth depth;
327 /* Dependencies actively used when expand FROM into cur_loc. */
328 vec<loc_exp_dep, va_heap, vl_embed> deps;
331 /* Structure describing one part of variable. */
332 typedef struct variable_part_def
334 /* Chain of locations of the part. */
335 location_chain loc_chain;
337 /* Location which was last emitted to location list. */
338 rtx cur_loc;
340 union variable_aux
342 /* The offset in the variable, if !var->onepart. */
343 HOST_WIDE_INT offset;
345 /* Pointer to auxiliary data, if var->onepart and emit_notes. */
346 struct onepart_aux *onepaux;
347 } aux;
348 } variable_part;
350 /* Maximum number of location parts. */
351 #define MAX_VAR_PARTS 16
353 /* Enumeration type used to discriminate various types of one-part
354 variables. */
355 typedef enum onepart_enum
357 /* Not a one-part variable. */
358 NOT_ONEPART = 0,
359 /* A one-part DECL that is not a DEBUG_EXPR_DECL. */
360 ONEPART_VDECL = 1,
361 /* A DEBUG_EXPR_DECL. */
362 ONEPART_DEXPR = 2,
363 /* A VALUE. */
364 ONEPART_VALUE = 3
365 } onepart_enum_t;
367 /* Structure describing where the variable is located. */
368 typedef struct variable_def
370 /* The declaration of the variable, or an RTL value being handled
371 like a declaration. */
372 decl_or_value dv;
374 /* Reference count. */
375 int refcount;
377 /* Number of variable parts. */
378 char n_var_parts;
380 /* What type of DV this is, according to enum onepart_enum. */
381 ENUM_BITFIELD (onepart_enum) onepart : CHAR_BIT;
383 /* True if this variable_def struct is currently in the
384 changed_variables hash table. */
385 bool in_changed_variables;
387 /* The variable parts. */
388 variable_part var_part[1];
389 } *variable;
390 typedef const struct variable_def *const_variable;
392 /* Pointer to the BB's information specific to variable tracking pass. */
393 #define VTI(BB) ((variable_tracking_info) (BB)->aux)
395 /* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */
396 #define INT_MEM_OFFSET(mem) (MEM_OFFSET_KNOWN_P (mem) ? MEM_OFFSET (mem) : 0)
398 #if ENABLE_CHECKING && (GCC_VERSION >= 2007)
400 /* Access VAR's Ith part's offset, checking that it's not a one-part
401 variable. */
402 #define VAR_PART_OFFSET(var, i) __extension__ \
403 (*({ variable const __v = (var); \
404 gcc_checking_assert (!__v->onepart); \
405 &__v->var_part[(i)].aux.offset; }))
407 /* Access VAR's one-part auxiliary data, checking that it is a
408 one-part variable. */
409 #define VAR_LOC_1PAUX(var) __extension__ \
410 (*({ variable const __v = (var); \
411 gcc_checking_assert (__v->onepart); \
412 &__v->var_part[0].aux.onepaux; }))
414 #else
415 #define VAR_PART_OFFSET(var, i) ((var)->var_part[(i)].aux.offset)
416 #define VAR_LOC_1PAUX(var) ((var)->var_part[0].aux.onepaux)
417 #endif
419 /* These are accessor macros for the one-part auxiliary data. When
420 convenient for users, they're guarded by tests that the data was
421 allocated. */
422 #define VAR_LOC_DEP_LST(var) (VAR_LOC_1PAUX (var) \
423 ? VAR_LOC_1PAUX (var)->backlinks \
424 : NULL)
425 #define VAR_LOC_DEP_LSTP(var) (VAR_LOC_1PAUX (var) \
426 ? &VAR_LOC_1PAUX (var)->backlinks \
427 : NULL)
428 #define VAR_LOC_FROM(var) (VAR_LOC_1PAUX (var)->from)
429 #define VAR_LOC_DEPTH(var) (VAR_LOC_1PAUX (var)->depth)
430 #define VAR_LOC_DEP_VEC(var) (VAR_LOC_1PAUX (var) \
431 ? &VAR_LOC_1PAUX (var)->deps \
432 : NULL)
436 typedef unsigned int dvuid;
438 /* Return the uid of DV. */
440 static inline dvuid
441 dv_uid (decl_or_value dv)
443 if (dv_is_value_p (dv))
444 return CSELIB_VAL_PTR (dv_as_value (dv))->uid;
445 else
446 return DECL_UID (dv_as_decl (dv));
449 /* Compute the hash from the uid. */
451 static inline hashval_t
452 dv_uid2hash (dvuid uid)
454 return uid;
457 /* The hash function for a mask table in a shared_htab chain. */
459 static inline hashval_t
460 dv_htab_hash (decl_or_value dv)
462 return dv_uid2hash (dv_uid (dv));
465 static void variable_htab_free (void *);
467 /* Variable hashtable helpers. */
469 struct variable_hasher
471 typedef variable_def value_type;
472 typedef void compare_type;
473 static inline hashval_t hash (const value_type *);
474 static inline bool equal (const value_type *, const compare_type *);
475 static inline void remove (value_type *);
478 /* The hash function for variable_htab, computes the hash value
479 from the declaration of variable X. */
481 inline hashval_t
482 variable_hasher::hash (const value_type *v)
484 return dv_htab_hash (v->dv);
487 /* Compare the declaration of variable X with declaration Y. */
489 inline bool
490 variable_hasher::equal (const value_type *v, const compare_type *y)
492 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
494 return (dv_as_opaque (v->dv) == dv_as_opaque (dv));
497 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
499 inline void
500 variable_hasher::remove (value_type *var)
502 variable_htab_free (var);
505 typedef hash_table <variable_hasher> variable_table_type;
506 typedef variable_table_type::iterator variable_iterator_type;
508 /* Structure for passing some other parameters to function
509 emit_note_insn_var_location. */
510 typedef struct emit_note_data_def
512 /* The instruction which the note will be emitted before/after. */
513 rtx insn;
515 /* Where the note will be emitted (before/after insn)? */
516 enum emit_note_where where;
518 /* The variables and values active at this point. */
519 variable_table_type vars;
520 } emit_note_data;
522 /* Structure holding a refcounted hash table. If refcount > 1,
523 it must be first unshared before modified. */
524 typedef struct shared_hash_def
526 /* Reference count. */
527 int refcount;
529 /* Actual hash table. */
530 variable_table_type htab;
531 } *shared_hash;
533 /* Structure holding the IN or OUT set for a basic block. */
534 typedef struct dataflow_set_def
536 /* Adjustment of stack offset. */
537 HOST_WIDE_INT stack_adjust;
539 /* Attributes for registers (lists of attrs). */
540 attrs regs[FIRST_PSEUDO_REGISTER];
542 /* Variable locations. */
543 shared_hash vars;
545 /* Vars that is being traversed. */
546 shared_hash traversed_vars;
547 } dataflow_set;
549 /* The structure (one for each basic block) containing the information
550 needed for variable tracking. */
551 typedef struct variable_tracking_info_def
553 /* The vector of micro operations. */
554 vec<micro_operation> mos;
556 /* The IN and OUT set for dataflow analysis. */
557 dataflow_set in;
558 dataflow_set out;
560 /* The permanent-in dataflow set for this block. This is used to
561 hold values for which we had to compute entry values. ??? This
562 should probably be dynamically allocated, to avoid using more
563 memory in non-debug builds. */
564 dataflow_set *permp;
566 /* Has the block been visited in DFS? */
567 bool visited;
569 /* Has the block been flooded in VTA? */
570 bool flooded;
572 } *variable_tracking_info;
574 /* Alloc pool for struct attrs_def. */
575 static alloc_pool attrs_pool;
577 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
578 static alloc_pool var_pool;
580 /* Alloc pool for struct variable_def with a single var_part entry. */
581 static alloc_pool valvar_pool;
583 /* Alloc pool for struct location_chain_def. */
584 static alloc_pool loc_chain_pool;
586 /* Alloc pool for struct shared_hash_def. */
587 static alloc_pool shared_hash_pool;
589 /* Alloc pool for struct loc_exp_dep_s for NOT_ONEPART variables. */
590 static alloc_pool loc_exp_dep_pool;
592 /* Changed variables, notes will be emitted for them. */
593 static variable_table_type changed_variables;
595 /* Shall notes be emitted? */
596 static bool emit_notes;
598 /* Values whose dynamic location lists have gone empty, but whose
599 cselib location lists are still usable. Use this to hold the
600 current location, the backlinks, etc, during emit_notes. */
601 static variable_table_type dropped_values;
603 /* Empty shared hashtable. */
604 static shared_hash empty_shared_hash;
606 /* Scratch register bitmap used by cselib_expand_value_rtx. */
607 static bitmap scratch_regs = NULL;
609 #ifdef HAVE_window_save
610 typedef struct GTY(()) parm_reg {
611 rtx outgoing;
612 rtx incoming;
613 } parm_reg_t;
616 /* Vector of windowed parameter registers, if any. */
617 static vec<parm_reg_t, va_gc> *windowed_parm_regs = NULL;
618 #endif
620 /* Variable used to tell whether cselib_process_insn called our hook. */
621 static bool cselib_hook_called;
623 /* Local function prototypes. */
624 static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
625 HOST_WIDE_INT *);
626 static void insn_stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
627 HOST_WIDE_INT *);
628 static bool vt_stack_adjustments (void);
630 static void init_attrs_list_set (attrs *);
631 static void attrs_list_clear (attrs *);
632 static attrs attrs_list_member (attrs, decl_or_value, HOST_WIDE_INT);
633 static void attrs_list_insert (attrs *, decl_or_value, HOST_WIDE_INT, rtx);
634 static void attrs_list_copy (attrs *, attrs);
635 static void attrs_list_union (attrs *, attrs);
637 static variable_def **unshare_variable (dataflow_set *set, variable_def **slot,
638 variable var, enum var_init_status);
639 static void vars_copy (variable_table_type, variable_table_type);
640 static tree var_debug_decl (tree);
641 static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
642 static void var_reg_delete_and_set (dataflow_set *, rtx, bool,
643 enum var_init_status, rtx);
644 static void var_reg_delete (dataflow_set *, rtx, bool);
645 static void var_regno_delete (dataflow_set *, int);
646 static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx);
647 static void var_mem_delete_and_set (dataflow_set *, rtx, bool,
648 enum var_init_status, rtx);
649 static void var_mem_delete (dataflow_set *, rtx, bool);
651 static void dataflow_set_init (dataflow_set *);
652 static void dataflow_set_clear (dataflow_set *);
653 static void dataflow_set_copy (dataflow_set *, dataflow_set *);
654 static int variable_union_info_cmp_pos (const void *, const void *);
655 static void dataflow_set_union (dataflow_set *, dataflow_set *);
656 static location_chain find_loc_in_1pdv (rtx, variable, variable_table_type);
657 static bool canon_value_cmp (rtx, rtx);
658 static int loc_cmp (rtx, rtx);
659 static bool variable_part_different_p (variable_part *, variable_part *);
660 static bool onepart_variable_different_p (variable, variable);
661 static bool variable_different_p (variable, variable);
662 static bool dataflow_set_different (dataflow_set *, dataflow_set *);
663 static void dataflow_set_destroy (dataflow_set *);
665 static bool contains_symbol_ref (rtx);
666 static bool track_expr_p (tree, bool);
667 static bool same_variable_part_p (rtx, tree, HOST_WIDE_INT);
668 static int add_uses (rtx *, void *);
669 static void add_uses_1 (rtx *, void *);
670 static void add_stores (rtx, const_rtx, void *);
671 static bool compute_bb_dataflow (basic_block);
672 static bool vt_find_locations (void);
674 static void dump_attrs_list (attrs);
675 static void dump_var (variable);
676 static void dump_vars (variable_table_type);
677 static void dump_dataflow_set (dataflow_set *);
678 static void dump_dataflow_sets (void);
680 static void set_dv_changed (decl_or_value, bool);
681 static void variable_was_changed (variable, dataflow_set *);
682 static variable_def **set_slot_part (dataflow_set *, rtx, variable_def **,
683 decl_or_value, HOST_WIDE_INT,
684 enum var_init_status, rtx);
685 static void set_variable_part (dataflow_set *, rtx,
686 decl_or_value, HOST_WIDE_INT,
687 enum var_init_status, rtx, enum insert_option);
688 static variable_def **clobber_slot_part (dataflow_set *, rtx,
689 variable_def **, HOST_WIDE_INT, rtx);
690 static void clobber_variable_part (dataflow_set *, rtx,
691 decl_or_value, HOST_WIDE_INT, rtx);
692 static variable_def **delete_slot_part (dataflow_set *, rtx, variable_def **,
693 HOST_WIDE_INT);
694 static void delete_variable_part (dataflow_set *, rtx,
695 decl_or_value, HOST_WIDE_INT);
696 static void emit_notes_in_bb (basic_block, dataflow_set *);
697 static void vt_emit_notes (void);
699 static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *);
700 static void vt_add_function_parameters (void);
701 static bool vt_initialize (void);
702 static void vt_finalize (void);
704 /* Given a SET, calculate the amount of stack adjustment it contains
705 PRE- and POST-modifying stack pointer.
706 This function is similar to stack_adjust_offset. */
708 static void
709 stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
710 HOST_WIDE_INT *post)
712 rtx src = SET_SRC (pattern);
713 rtx dest = SET_DEST (pattern);
714 enum rtx_code code;
716 if (dest == stack_pointer_rtx)
718 /* (set (reg sp) (plus (reg sp) (const_int))) */
719 code = GET_CODE (src);
720 if (! (code == PLUS || code == MINUS)
721 || XEXP (src, 0) != stack_pointer_rtx
722 || !CONST_INT_P (XEXP (src, 1)))
723 return;
725 if (code == MINUS)
726 *post += INTVAL (XEXP (src, 1));
727 else
728 *post -= INTVAL (XEXP (src, 1));
730 else if (MEM_P (dest))
732 /* (set (mem (pre_dec (reg sp))) (foo)) */
733 src = XEXP (dest, 0);
734 code = GET_CODE (src);
736 switch (code)
738 case PRE_MODIFY:
739 case POST_MODIFY:
740 if (XEXP (src, 0) == stack_pointer_rtx)
742 rtx val = XEXP (XEXP (src, 1), 1);
743 /* We handle only adjustments by constant amount. */
744 gcc_assert (GET_CODE (XEXP (src, 1)) == PLUS &&
745 CONST_INT_P (val));
747 if (code == PRE_MODIFY)
748 *pre -= INTVAL (val);
749 else
750 *post -= INTVAL (val);
751 break;
753 return;
755 case PRE_DEC:
756 if (XEXP (src, 0) == stack_pointer_rtx)
758 *pre += GET_MODE_SIZE (GET_MODE (dest));
759 break;
761 return;
763 case POST_DEC:
764 if (XEXP (src, 0) == stack_pointer_rtx)
766 *post += GET_MODE_SIZE (GET_MODE (dest));
767 break;
769 return;
771 case PRE_INC:
772 if (XEXP (src, 0) == stack_pointer_rtx)
774 *pre -= GET_MODE_SIZE (GET_MODE (dest));
775 break;
777 return;
779 case POST_INC:
780 if (XEXP (src, 0) == stack_pointer_rtx)
782 *post -= GET_MODE_SIZE (GET_MODE (dest));
783 break;
785 return;
787 default:
788 return;
793 /* Given an INSN, calculate the amount of stack adjustment it contains
794 PRE- and POST-modifying stack pointer. */
796 static void
797 insn_stack_adjust_offset_pre_post (rtx insn, HOST_WIDE_INT *pre,
798 HOST_WIDE_INT *post)
800 rtx pattern;
802 *pre = 0;
803 *post = 0;
805 pattern = PATTERN (insn);
806 if (RTX_FRAME_RELATED_P (insn))
808 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
809 if (expr)
810 pattern = XEXP (expr, 0);
813 if (GET_CODE (pattern) == SET)
814 stack_adjust_offset_pre_post (pattern, pre, post);
815 else if (GET_CODE (pattern) == PARALLEL
816 || GET_CODE (pattern) == SEQUENCE)
818 int i;
820 /* There may be stack adjustments inside compound insns. Search
821 for them. */
822 for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
823 if (GET_CODE (XVECEXP (pattern, 0, i)) == SET)
824 stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post);
828 /* Compute stack adjustments for all blocks by traversing DFS tree.
829 Return true when the adjustments on all incoming edges are consistent.
830 Heavily borrowed from pre_and_rev_post_order_compute. */
832 static bool
833 vt_stack_adjustments (void)
835 edge_iterator *stack;
836 int sp;
838 /* Initialize entry block. */
839 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->visited = true;
840 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->in.stack_adjust =
841 INCOMING_FRAME_SP_OFFSET;
842 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out.stack_adjust =
843 INCOMING_FRAME_SP_OFFSET;
845 /* Allocate stack for back-tracking up CFG. */
846 stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun) + 1);
847 sp = 0;
849 /* Push the first edge on to the stack. */
850 stack[sp++] = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
852 while (sp)
854 edge_iterator ei;
855 basic_block src;
856 basic_block dest;
858 /* Look at the edge on the top of the stack. */
859 ei = stack[sp - 1];
860 src = ei_edge (ei)->src;
861 dest = ei_edge (ei)->dest;
863 /* Check if the edge destination has been visited yet. */
864 if (!VTI (dest)->visited)
866 rtx insn;
867 HOST_WIDE_INT pre, post, offset;
868 VTI (dest)->visited = true;
869 VTI (dest)->in.stack_adjust = offset = VTI (src)->out.stack_adjust;
871 if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
872 for (insn = BB_HEAD (dest);
873 insn != NEXT_INSN (BB_END (dest));
874 insn = NEXT_INSN (insn))
875 if (INSN_P (insn))
877 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
878 offset += pre + post;
881 VTI (dest)->out.stack_adjust = offset;
883 if (EDGE_COUNT (dest->succs) > 0)
884 /* Since the DEST node has been visited for the first
885 time, check its successors. */
886 stack[sp++] = ei_start (dest->succs);
888 else
890 /* Check whether the adjustments on the edges are the same. */
891 if (VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust)
893 free (stack);
894 return false;
897 if (! ei_one_before_end_p (ei))
898 /* Go to the next edge. */
899 ei_next (&stack[sp - 1]);
900 else
901 /* Return to previous level if there are no more edges. */
902 sp--;
906 free (stack);
907 return true;
910 /* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or
911 hard_frame_pointer_rtx is being mapped to it and offset for it. */
912 static rtx cfa_base_rtx;
913 static HOST_WIDE_INT cfa_base_offset;
915 /* Compute a CFA-based value for an ADJUSTMENT made to stack_pointer_rtx
916 or hard_frame_pointer_rtx. */
918 static inline rtx
919 compute_cfa_pointer (HOST_WIDE_INT adjustment)
921 return plus_constant (Pmode, cfa_base_rtx, adjustment + cfa_base_offset);
924 /* Adjustment for hard_frame_pointer_rtx to cfa base reg,
925 or -1 if the replacement shouldn't be done. */
926 static HOST_WIDE_INT hard_frame_pointer_adjustment = -1;
928 /* Data for adjust_mems callback. */
930 struct adjust_mem_data
932 bool store;
933 enum machine_mode mem_mode;
934 HOST_WIDE_INT stack_adjust;
935 rtx side_effects;
938 /* Helper for adjust_mems. Return 1 if *loc is unsuitable for
939 transformation of wider mode arithmetics to narrower mode,
940 -1 if it is suitable and subexpressions shouldn't be
941 traversed and 0 if it is suitable and subexpressions should
942 be traversed. Called through for_each_rtx. */
944 static int
945 use_narrower_mode_test (rtx *loc, void *data)
947 rtx subreg = (rtx) data;
949 if (CONSTANT_P (*loc))
950 return -1;
951 switch (GET_CODE (*loc))
953 case REG:
954 if (cselib_lookup (*loc, GET_MODE (SUBREG_REG (subreg)), 0, VOIDmode))
955 return 1;
956 if (!validate_subreg (GET_MODE (subreg), GET_MODE (*loc),
957 *loc, subreg_lowpart_offset (GET_MODE (subreg),
958 GET_MODE (*loc))))
959 return 1;
960 return -1;
961 case PLUS:
962 case MINUS:
963 case MULT:
964 return 0;
965 case ASHIFT:
966 if (for_each_rtx (&XEXP (*loc, 0), use_narrower_mode_test, data))
967 return 1;
968 else
969 return -1;
970 default:
971 return 1;
975 /* Transform X into narrower mode MODE from wider mode WMODE. */
977 static rtx
978 use_narrower_mode (rtx x, enum machine_mode mode, enum machine_mode wmode)
980 rtx op0, op1;
981 if (CONSTANT_P (x))
982 return lowpart_subreg (mode, x, wmode);
983 switch (GET_CODE (x))
985 case REG:
986 return lowpart_subreg (mode, x, wmode);
987 case PLUS:
988 case MINUS:
989 case MULT:
990 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
991 op1 = use_narrower_mode (XEXP (x, 1), mode, wmode);
992 return simplify_gen_binary (GET_CODE (x), mode, op0, op1);
993 case ASHIFT:
994 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
995 return simplify_gen_binary (ASHIFT, mode, op0, XEXP (x, 1));
996 default:
997 gcc_unreachable ();
1001 /* Helper function for adjusting used MEMs. */
1003 static rtx
1004 adjust_mems (rtx loc, const_rtx old_rtx, void *data)
1006 struct adjust_mem_data *amd = (struct adjust_mem_data *) data;
1007 rtx mem, addr = loc, tem;
1008 enum machine_mode mem_mode_save;
1009 bool store_save;
1010 switch (GET_CODE (loc))
1012 case REG:
1013 /* Don't do any sp or fp replacements outside of MEM addresses
1014 on the LHS. */
1015 if (amd->mem_mode == VOIDmode && amd->store)
1016 return loc;
1017 if (loc == stack_pointer_rtx
1018 && !frame_pointer_needed
1019 && cfa_base_rtx)
1020 return compute_cfa_pointer (amd->stack_adjust);
1021 else if (loc == hard_frame_pointer_rtx
1022 && frame_pointer_needed
1023 && hard_frame_pointer_adjustment != -1
1024 && cfa_base_rtx)
1025 return compute_cfa_pointer (hard_frame_pointer_adjustment);
1026 gcc_checking_assert (loc != virtual_incoming_args_rtx);
1027 return loc;
1028 case MEM:
1029 mem = loc;
1030 if (!amd->store)
1032 mem = targetm.delegitimize_address (mem);
1033 if (mem != loc && !MEM_P (mem))
1034 return simplify_replace_fn_rtx (mem, old_rtx, adjust_mems, data);
1037 addr = XEXP (mem, 0);
1038 mem_mode_save = amd->mem_mode;
1039 amd->mem_mode = GET_MODE (mem);
1040 store_save = amd->store;
1041 amd->store = false;
1042 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1043 amd->store = store_save;
1044 amd->mem_mode = mem_mode_save;
1045 if (mem == loc)
1046 addr = targetm.delegitimize_address (addr);
1047 if (addr != XEXP (mem, 0))
1048 mem = replace_equiv_address_nv (mem, addr);
1049 if (!amd->store)
1050 mem = avoid_constant_pool_reference (mem);
1051 return mem;
1052 case PRE_INC:
1053 case PRE_DEC:
1054 addr = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
1055 gen_int_mode (GET_CODE (loc) == PRE_INC
1056 ? GET_MODE_SIZE (amd->mem_mode)
1057 : -GET_MODE_SIZE (amd->mem_mode),
1058 GET_MODE (loc)));
1059 case POST_INC:
1060 case POST_DEC:
1061 if (addr == loc)
1062 addr = XEXP (loc, 0);
1063 gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode);
1064 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1065 tem = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
1066 gen_int_mode ((GET_CODE (loc) == PRE_INC
1067 || GET_CODE (loc) == POST_INC)
1068 ? GET_MODE_SIZE (amd->mem_mode)
1069 : -GET_MODE_SIZE (amd->mem_mode),
1070 GET_MODE (loc)));
1071 amd->side_effects = alloc_EXPR_LIST (0,
1072 gen_rtx_SET (VOIDmode,
1073 XEXP (loc, 0),
1074 tem),
1075 amd->side_effects);
1076 return addr;
1077 case PRE_MODIFY:
1078 addr = XEXP (loc, 1);
1079 case POST_MODIFY:
1080 if (addr == loc)
1081 addr = XEXP (loc, 0);
1082 gcc_assert (amd->mem_mode != VOIDmode);
1083 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1084 amd->side_effects = alloc_EXPR_LIST (0,
1085 gen_rtx_SET (VOIDmode,
1086 XEXP (loc, 0),
1087 XEXP (loc, 1)),
1088 amd->side_effects);
1089 return addr;
1090 case SUBREG:
1091 /* First try without delegitimization of whole MEMs and
1092 avoid_constant_pool_reference, which is more likely to succeed. */
1093 store_save = amd->store;
1094 amd->store = true;
1095 addr = simplify_replace_fn_rtx (SUBREG_REG (loc), old_rtx, adjust_mems,
1096 data);
1097 amd->store = store_save;
1098 mem = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1099 if (mem == SUBREG_REG (loc))
1101 tem = loc;
1102 goto finish_subreg;
1104 tem = simplify_gen_subreg (GET_MODE (loc), mem,
1105 GET_MODE (SUBREG_REG (loc)),
1106 SUBREG_BYTE (loc));
1107 if (tem)
1108 goto finish_subreg;
1109 tem = simplify_gen_subreg (GET_MODE (loc), addr,
1110 GET_MODE (SUBREG_REG (loc)),
1111 SUBREG_BYTE (loc));
1112 if (tem == NULL_RTX)
1113 tem = gen_rtx_raw_SUBREG (GET_MODE (loc), addr, SUBREG_BYTE (loc));
1114 finish_subreg:
1115 if (MAY_HAVE_DEBUG_INSNS
1116 && GET_CODE (tem) == SUBREG
1117 && (GET_CODE (SUBREG_REG (tem)) == PLUS
1118 || GET_CODE (SUBREG_REG (tem)) == MINUS
1119 || GET_CODE (SUBREG_REG (tem)) == MULT
1120 || GET_CODE (SUBREG_REG (tem)) == ASHIFT)
1121 && GET_MODE_CLASS (GET_MODE (tem)) == MODE_INT
1122 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem))) == MODE_INT
1123 && GET_MODE_SIZE (GET_MODE (tem))
1124 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (tem)))
1125 && subreg_lowpart_p (tem)
1126 && !for_each_rtx (&SUBREG_REG (tem), use_narrower_mode_test, tem))
1127 return use_narrower_mode (SUBREG_REG (tem), GET_MODE (tem),
1128 GET_MODE (SUBREG_REG (tem)));
1129 return tem;
1130 case ASM_OPERANDS:
1131 /* Don't do any replacements in second and following
1132 ASM_OPERANDS of inline-asm with multiple sets.
1133 ASM_OPERANDS_INPUT_VEC, ASM_OPERANDS_INPUT_CONSTRAINT_VEC
1134 and ASM_OPERANDS_LABEL_VEC need to be equal between
1135 all the ASM_OPERANDs in the insn and adjust_insn will
1136 fix this up. */
1137 if (ASM_OPERANDS_OUTPUT_IDX (loc) != 0)
1138 return loc;
1139 break;
1140 default:
1141 break;
1143 return NULL_RTX;
1146 /* Helper function for replacement of uses. */
1148 static void
1149 adjust_mem_uses (rtx *x, void *data)
1151 rtx new_x = simplify_replace_fn_rtx (*x, NULL_RTX, adjust_mems, data);
1152 if (new_x != *x)
1153 validate_change (NULL_RTX, x, new_x, true);
1156 /* Helper function for replacement of stores. */
1158 static void
1159 adjust_mem_stores (rtx loc, const_rtx expr, void *data)
1161 if (MEM_P (loc))
1163 rtx new_dest = simplify_replace_fn_rtx (SET_DEST (expr), NULL_RTX,
1164 adjust_mems, data);
1165 if (new_dest != SET_DEST (expr))
1167 rtx xexpr = CONST_CAST_RTX (expr);
1168 validate_change (NULL_RTX, &SET_DEST (xexpr), new_dest, true);
1173 /* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
1174 replace them with their value in the insn and add the side-effects
1175 as other sets to the insn. */
1177 static void
1178 adjust_insn (basic_block bb, rtx insn)
1180 struct adjust_mem_data amd;
1181 rtx set;
1183 #ifdef HAVE_window_save
1184 /* If the target machine has an explicit window save instruction, the
1185 transformation OUTGOING_REGNO -> INCOMING_REGNO is done there. */
1186 if (RTX_FRAME_RELATED_P (insn)
1187 && find_reg_note (insn, REG_CFA_WINDOW_SAVE, NULL_RTX))
1189 unsigned int i, nregs = vec_safe_length (windowed_parm_regs);
1190 rtx rtl = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nregs * 2));
1191 parm_reg_t *p;
1193 FOR_EACH_VEC_SAFE_ELT (windowed_parm_regs, i, p)
1195 XVECEXP (rtl, 0, i * 2)
1196 = gen_rtx_SET (VOIDmode, p->incoming, p->outgoing);
1197 /* Do not clobber the attached DECL, but only the REG. */
1198 XVECEXP (rtl, 0, i * 2 + 1)
1199 = gen_rtx_CLOBBER (GET_MODE (p->outgoing),
1200 gen_raw_REG (GET_MODE (p->outgoing),
1201 REGNO (p->outgoing)));
1204 validate_change (NULL_RTX, &PATTERN (insn), rtl, true);
1205 return;
1207 #endif
1209 amd.mem_mode = VOIDmode;
1210 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
1211 amd.side_effects = NULL_RTX;
1213 amd.store = true;
1214 note_stores (PATTERN (insn), adjust_mem_stores, &amd);
1216 amd.store = false;
1217 if (GET_CODE (PATTERN (insn)) == PARALLEL
1218 && asm_noperands (PATTERN (insn)) > 0
1219 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1221 rtx body, set0;
1222 int i;
1224 /* inline-asm with multiple sets is tiny bit more complicated,
1225 because the 3 vectors in ASM_OPERANDS need to be shared between
1226 all ASM_OPERANDS in the instruction. adjust_mems will
1227 not touch ASM_OPERANDS other than the first one, asm_noperands
1228 test above needs to be called before that (otherwise it would fail)
1229 and afterwards this code fixes it up. */
1230 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1231 body = PATTERN (insn);
1232 set0 = XVECEXP (body, 0, 0);
1233 gcc_checking_assert (GET_CODE (set0) == SET
1234 && GET_CODE (SET_SRC (set0)) == ASM_OPERANDS
1235 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set0)) == 0);
1236 for (i = 1; i < XVECLEN (body, 0); i++)
1237 if (GET_CODE (XVECEXP (body, 0, i)) != SET)
1238 break;
1239 else
1241 set = XVECEXP (body, 0, i);
1242 gcc_checking_assert (GET_CODE (SET_SRC (set)) == ASM_OPERANDS
1243 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set))
1244 == i);
1245 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (set))
1246 != ASM_OPERANDS_INPUT_VEC (SET_SRC (set0))
1247 || ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set))
1248 != ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0))
1249 || ASM_OPERANDS_LABEL_VEC (SET_SRC (set))
1250 != ASM_OPERANDS_LABEL_VEC (SET_SRC (set0)))
1252 rtx newsrc = shallow_copy_rtx (SET_SRC (set));
1253 ASM_OPERANDS_INPUT_VEC (newsrc)
1254 = ASM_OPERANDS_INPUT_VEC (SET_SRC (set0));
1255 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (newsrc)
1256 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0));
1257 ASM_OPERANDS_LABEL_VEC (newsrc)
1258 = ASM_OPERANDS_LABEL_VEC (SET_SRC (set0));
1259 validate_change (NULL_RTX, &SET_SRC (set), newsrc, true);
1263 else
1264 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1266 /* For read-only MEMs containing some constant, prefer those
1267 constants. */
1268 set = single_set (insn);
1269 if (set && MEM_P (SET_SRC (set)) && MEM_READONLY_P (SET_SRC (set)))
1271 rtx note = find_reg_equal_equiv_note (insn);
1273 if (note && CONSTANT_P (XEXP (note, 0)))
1274 validate_change (NULL_RTX, &SET_SRC (set), XEXP (note, 0), true);
1277 if (amd.side_effects)
1279 rtx *pat, new_pat, s;
1280 int i, oldn, newn;
1282 pat = &PATTERN (insn);
1283 if (GET_CODE (*pat) == COND_EXEC)
1284 pat = &COND_EXEC_CODE (*pat);
1285 if (GET_CODE (*pat) == PARALLEL)
1286 oldn = XVECLEN (*pat, 0);
1287 else
1288 oldn = 1;
1289 for (s = amd.side_effects, newn = 0; s; newn++)
1290 s = XEXP (s, 1);
1291 new_pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (oldn + newn));
1292 if (GET_CODE (*pat) == PARALLEL)
1293 for (i = 0; i < oldn; i++)
1294 XVECEXP (new_pat, 0, i) = XVECEXP (*pat, 0, i);
1295 else
1296 XVECEXP (new_pat, 0, 0) = *pat;
1297 for (s = amd.side_effects, i = oldn; i < oldn + newn; i++, s = XEXP (s, 1))
1298 XVECEXP (new_pat, 0, i) = XEXP (s, 0);
1299 free_EXPR_LIST_list (&amd.side_effects);
1300 validate_change (NULL_RTX, pat, new_pat, true);
1304 /* Return the DEBUG_EXPR of a DEBUG_EXPR_DECL or the VALUE in DV. */
1305 static inline rtx
1306 dv_as_rtx (decl_or_value dv)
1308 tree decl;
1310 if (dv_is_value_p (dv))
1311 return dv_as_value (dv);
1313 decl = dv_as_decl (dv);
1315 gcc_checking_assert (TREE_CODE (decl) == DEBUG_EXPR_DECL);
1316 return DECL_RTL_KNOWN_SET (decl);
1319 /* Return nonzero if a decl_or_value must not have more than one
1320 variable part. The returned value discriminates among various
1321 kinds of one-part DVs ccording to enum onepart_enum. */
1322 static inline onepart_enum_t
1323 dv_onepart_p (decl_or_value dv)
1325 tree decl;
1327 if (!MAY_HAVE_DEBUG_INSNS)
1328 return NOT_ONEPART;
1330 if (dv_is_value_p (dv))
1331 return ONEPART_VALUE;
1333 decl = dv_as_decl (dv);
1335 if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
1336 return ONEPART_DEXPR;
1338 if (target_for_debug_bind (decl) != NULL_TREE)
1339 return ONEPART_VDECL;
1341 return NOT_ONEPART;
1344 /* Return the variable pool to be used for a dv of type ONEPART. */
1345 static inline alloc_pool
1346 onepart_pool (onepart_enum_t onepart)
1348 return onepart ? valvar_pool : var_pool;
1351 /* Build a decl_or_value out of a decl. */
1352 static inline decl_or_value
1353 dv_from_decl (tree decl)
1355 decl_or_value dv;
1356 dv = decl;
1357 gcc_checking_assert (dv_is_decl_p (dv));
1358 return dv;
1361 /* Build a decl_or_value out of a value. */
1362 static inline decl_or_value
1363 dv_from_value (rtx value)
1365 decl_or_value dv;
1366 dv = value;
1367 gcc_checking_assert (dv_is_value_p (dv));
1368 return dv;
1371 /* Return a value or the decl of a debug_expr as a decl_or_value. */
1372 static inline decl_or_value
1373 dv_from_rtx (rtx x)
1375 decl_or_value dv;
1377 switch (GET_CODE (x))
1379 case DEBUG_EXPR:
1380 dv = dv_from_decl (DEBUG_EXPR_TREE_DECL (x));
1381 gcc_checking_assert (DECL_RTL_KNOWN_SET (DEBUG_EXPR_TREE_DECL (x)) == x);
1382 break;
1384 case VALUE:
1385 dv = dv_from_value (x);
1386 break;
1388 default:
1389 gcc_unreachable ();
1392 return dv;
1395 extern void debug_dv (decl_or_value dv);
1397 DEBUG_FUNCTION void
1398 debug_dv (decl_or_value dv)
1400 if (dv_is_value_p (dv))
1401 debug_rtx (dv_as_value (dv));
1402 else
1403 debug_generic_stmt (dv_as_decl (dv));
1406 static void loc_exp_dep_clear (variable var);
1408 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
1410 static void
1411 variable_htab_free (void *elem)
1413 int i;
1414 variable var = (variable) elem;
1415 location_chain node, next;
1417 gcc_checking_assert (var->refcount > 0);
1419 var->refcount--;
1420 if (var->refcount > 0)
1421 return;
1423 for (i = 0; i < var->n_var_parts; i++)
1425 for (node = var->var_part[i].loc_chain; node; node = next)
1427 next = node->next;
1428 pool_free (loc_chain_pool, node);
1430 var->var_part[i].loc_chain = NULL;
1432 if (var->onepart && VAR_LOC_1PAUX (var))
1434 loc_exp_dep_clear (var);
1435 if (VAR_LOC_DEP_LST (var))
1436 VAR_LOC_DEP_LST (var)->pprev = NULL;
1437 XDELETE (VAR_LOC_1PAUX (var));
1438 /* These may be reused across functions, so reset
1439 e.g. NO_LOC_P. */
1440 if (var->onepart == ONEPART_DEXPR)
1441 set_dv_changed (var->dv, true);
1443 pool_free (onepart_pool (var->onepart), var);
1446 /* Initialize the set (array) SET of attrs to empty lists. */
1448 static void
1449 init_attrs_list_set (attrs *set)
1451 int i;
1453 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1454 set[i] = NULL;
1457 /* Make the list *LISTP empty. */
1459 static void
1460 attrs_list_clear (attrs *listp)
1462 attrs list, next;
1464 for (list = *listp; list; list = next)
1466 next = list->next;
1467 pool_free (attrs_pool, list);
1469 *listp = NULL;
1472 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */
1474 static attrs
1475 attrs_list_member (attrs list, decl_or_value dv, HOST_WIDE_INT offset)
1477 for (; list; list = list->next)
1478 if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset)
1479 return list;
1480 return NULL;
1483 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
1485 static void
1486 attrs_list_insert (attrs *listp, decl_or_value dv,
1487 HOST_WIDE_INT offset, rtx loc)
1489 attrs list;
1491 list = (attrs) pool_alloc (attrs_pool);
1492 list->loc = loc;
1493 list->dv = dv;
1494 list->offset = offset;
1495 list->next = *listp;
1496 *listp = list;
1499 /* Copy all nodes from SRC and create a list *DSTP of the copies. */
1501 static void
1502 attrs_list_copy (attrs *dstp, attrs src)
1504 attrs n;
1506 attrs_list_clear (dstp);
1507 for (; src; src = src->next)
1509 n = (attrs) pool_alloc (attrs_pool);
1510 n->loc = src->loc;
1511 n->dv = src->dv;
1512 n->offset = src->offset;
1513 n->next = *dstp;
1514 *dstp = n;
1518 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */
1520 static void
1521 attrs_list_union (attrs *dstp, attrs src)
1523 for (; src; src = src->next)
1525 if (!attrs_list_member (*dstp, src->dv, src->offset))
1526 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1530 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1531 *DSTP. */
1533 static void
1534 attrs_list_mpdv_union (attrs *dstp, attrs src, attrs src2)
1536 gcc_assert (!*dstp);
1537 for (; src; src = src->next)
1539 if (!dv_onepart_p (src->dv))
1540 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1542 for (src = src2; src; src = src->next)
1544 if (!dv_onepart_p (src->dv)
1545 && !attrs_list_member (*dstp, src->dv, src->offset))
1546 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1550 /* Shared hashtable support. */
1552 /* Return true if VARS is shared. */
1554 static inline bool
1555 shared_hash_shared (shared_hash vars)
1557 return vars->refcount > 1;
1560 /* Return the hash table for VARS. */
1562 static inline variable_table_type
1563 shared_hash_htab (shared_hash vars)
1565 return vars->htab;
1568 /* Return true if VAR is shared, or maybe because VARS is shared. */
1570 static inline bool
1571 shared_var_p (variable var, shared_hash vars)
1573 /* Don't count an entry in the changed_variables table as a duplicate. */
1574 return ((var->refcount > 1 + (int) var->in_changed_variables)
1575 || shared_hash_shared (vars));
1578 /* Copy variables into a new hash table. */
1580 static shared_hash
1581 shared_hash_unshare (shared_hash vars)
1583 shared_hash new_vars = (shared_hash) pool_alloc (shared_hash_pool);
1584 gcc_assert (vars->refcount > 1);
1585 new_vars->refcount = 1;
1586 new_vars->htab.create (vars->htab.elements () + 3);
1587 vars_copy (new_vars->htab, vars->htab);
1588 vars->refcount--;
1589 return new_vars;
1592 /* Increment reference counter on VARS and return it. */
1594 static inline shared_hash
1595 shared_hash_copy (shared_hash vars)
1597 vars->refcount++;
1598 return vars;
1601 /* Decrement reference counter and destroy hash table if not shared
1602 anymore. */
1604 static void
1605 shared_hash_destroy (shared_hash vars)
1607 gcc_checking_assert (vars->refcount > 0);
1608 if (--vars->refcount == 0)
1610 vars->htab.dispose ();
1611 pool_free (shared_hash_pool, vars);
1615 /* Unshare *PVARS if shared and return slot for DV. If INS is
1616 INSERT, insert it if not already present. */
1618 static inline variable_def **
1619 shared_hash_find_slot_unshare_1 (shared_hash *pvars, decl_or_value dv,
1620 hashval_t dvhash, enum insert_option ins)
1622 if (shared_hash_shared (*pvars))
1623 *pvars = shared_hash_unshare (*pvars);
1624 return shared_hash_htab (*pvars).find_slot_with_hash (dv, dvhash, ins);
1627 static inline variable_def **
1628 shared_hash_find_slot_unshare (shared_hash *pvars, decl_or_value dv,
1629 enum insert_option ins)
1631 return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins);
1634 /* Return slot for DV, if it is already present in the hash table.
1635 If it is not present, insert it only VARS is not shared, otherwise
1636 return NULL. */
1638 static inline variable_def **
1639 shared_hash_find_slot_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1641 return shared_hash_htab (vars).find_slot_with_hash (dv, dvhash,
1642 shared_hash_shared (vars)
1643 ? NO_INSERT : INSERT);
1646 static inline variable_def **
1647 shared_hash_find_slot (shared_hash vars, decl_or_value dv)
1649 return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv));
1652 /* Return slot for DV only if it is already present in the hash table. */
1654 static inline variable_def **
1655 shared_hash_find_slot_noinsert_1 (shared_hash vars, decl_or_value dv,
1656 hashval_t dvhash)
1658 return shared_hash_htab (vars).find_slot_with_hash (dv, dvhash, NO_INSERT);
1661 static inline variable_def **
1662 shared_hash_find_slot_noinsert (shared_hash vars, decl_or_value dv)
1664 return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv));
1667 /* Return variable for DV or NULL if not already present in the hash
1668 table. */
1670 static inline variable
1671 shared_hash_find_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1673 return shared_hash_htab (vars).find_with_hash (dv, dvhash);
1676 static inline variable
1677 shared_hash_find (shared_hash vars, decl_or_value dv)
1679 return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
1682 /* Return true if TVAL is better than CVAL as a canonival value. We
1683 choose lowest-numbered VALUEs, using the RTX address as a
1684 tie-breaker. The idea is to arrange them into a star topology,
1685 such that all of them are at most one step away from the canonical
1686 value, and the canonical value has backlinks to all of them, in
1687 addition to all the actual locations. We don't enforce this
1688 topology throughout the entire dataflow analysis, though.
1691 static inline bool
1692 canon_value_cmp (rtx tval, rtx cval)
1694 return !cval
1695 || CSELIB_VAL_PTR (tval)->uid < CSELIB_VAL_PTR (cval)->uid;
1698 static bool dst_can_be_shared;
1700 /* Return a copy of a variable VAR and insert it to dataflow set SET. */
1702 static variable_def **
1703 unshare_variable (dataflow_set *set, variable_def **slot, variable var,
1704 enum var_init_status initialized)
1706 variable new_var;
1707 int i;
1709 new_var = (variable) pool_alloc (onepart_pool (var->onepart));
1710 new_var->dv = var->dv;
1711 new_var->refcount = 1;
1712 var->refcount--;
1713 new_var->n_var_parts = var->n_var_parts;
1714 new_var->onepart = var->onepart;
1715 new_var->in_changed_variables = false;
1717 if (! flag_var_tracking_uninit)
1718 initialized = VAR_INIT_STATUS_INITIALIZED;
1720 for (i = 0; i < var->n_var_parts; i++)
1722 location_chain node;
1723 location_chain *nextp;
1725 if (i == 0 && var->onepart)
1727 /* One-part auxiliary data is only used while emitting
1728 notes, so propagate it to the new variable in the active
1729 dataflow set. If we're not emitting notes, this will be
1730 a no-op. */
1731 gcc_checking_assert (!VAR_LOC_1PAUX (var) || emit_notes);
1732 VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (var);
1733 VAR_LOC_1PAUX (var) = NULL;
1735 else
1736 VAR_PART_OFFSET (new_var, i) = VAR_PART_OFFSET (var, i);
1737 nextp = &new_var->var_part[i].loc_chain;
1738 for (node = var->var_part[i].loc_chain; node; node = node->next)
1740 location_chain new_lc;
1742 new_lc = (location_chain) pool_alloc (loc_chain_pool);
1743 new_lc->next = NULL;
1744 if (node->init > initialized)
1745 new_lc->init = node->init;
1746 else
1747 new_lc->init = initialized;
1748 if (node->set_src && !(MEM_P (node->set_src)))
1749 new_lc->set_src = node->set_src;
1750 else
1751 new_lc->set_src = NULL;
1752 new_lc->loc = node->loc;
1754 *nextp = new_lc;
1755 nextp = &new_lc->next;
1758 new_var->var_part[i].cur_loc = var->var_part[i].cur_loc;
1761 dst_can_be_shared = false;
1762 if (shared_hash_shared (set->vars))
1763 slot = shared_hash_find_slot_unshare (&set->vars, var->dv, NO_INSERT);
1764 else if (set->traversed_vars && set->vars != set->traversed_vars)
1765 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
1766 *slot = new_var;
1767 if (var->in_changed_variables)
1769 variable_def **cslot
1770 = changed_variables.find_slot_with_hash (var->dv,
1771 dv_htab_hash (var->dv), NO_INSERT);
1772 gcc_assert (*cslot == (void *) var);
1773 var->in_changed_variables = false;
1774 variable_htab_free (var);
1775 *cslot = new_var;
1776 new_var->in_changed_variables = true;
1778 return slot;
1781 /* Copy all variables from hash table SRC to hash table DST. */
1783 static void
1784 vars_copy (variable_table_type dst, variable_table_type src)
1786 variable_iterator_type hi;
1787 variable var;
1789 FOR_EACH_HASH_TABLE_ELEMENT (src, var, variable, hi)
1791 variable_def **dstp;
1792 var->refcount++;
1793 dstp = dst.find_slot_with_hash (var->dv, dv_htab_hash (var->dv), INSERT);
1794 *dstp = var;
1798 /* Map a decl to its main debug decl. */
1800 static inline tree
1801 var_debug_decl (tree decl)
1803 if (decl && TREE_CODE (decl) == VAR_DECL
1804 && DECL_HAS_DEBUG_EXPR_P (decl))
1806 tree debugdecl = DECL_DEBUG_EXPR (decl);
1807 if (DECL_P (debugdecl))
1808 decl = debugdecl;
1811 return decl;
1814 /* Set the register LOC to contain DV, OFFSET. */
1816 static void
1817 var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1818 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1819 enum insert_option iopt)
1821 attrs node;
1822 bool decl_p = dv_is_decl_p (dv);
1824 if (decl_p)
1825 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1827 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1828 if (dv_as_opaque (node->dv) == dv_as_opaque (dv)
1829 && node->offset == offset)
1830 break;
1831 if (!node)
1832 attrs_list_insert (&set->regs[REGNO (loc)], dv, offset, loc);
1833 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1836 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
1838 static void
1839 var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1840 rtx set_src)
1842 tree decl = REG_EXPR (loc);
1843 HOST_WIDE_INT offset = REG_OFFSET (loc);
1845 var_reg_decl_set (set, loc, initialized,
1846 dv_from_decl (decl), offset, set_src, INSERT);
1849 static enum var_init_status
1850 get_init_value (dataflow_set *set, rtx loc, decl_or_value dv)
1852 variable var;
1853 int i;
1854 enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN;
1856 if (! flag_var_tracking_uninit)
1857 return VAR_INIT_STATUS_INITIALIZED;
1859 var = shared_hash_find (set->vars, dv);
1860 if (var)
1862 for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++)
1864 location_chain nextp;
1865 for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next)
1866 if (rtx_equal_p (nextp->loc, loc))
1868 ret_val = nextp->init;
1869 break;
1874 return ret_val;
1877 /* Delete current content of register LOC in dataflow set SET and set
1878 the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
1879 MODIFY is true, any other live copies of the same variable part are
1880 also deleted from the dataflow set, otherwise the variable part is
1881 assumed to be copied from another location holding the same
1882 part. */
1884 static void
1885 var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1886 enum var_init_status initialized, rtx set_src)
1888 tree decl = REG_EXPR (loc);
1889 HOST_WIDE_INT offset = REG_OFFSET (loc);
1890 attrs node, next;
1891 attrs *nextp;
1893 decl = var_debug_decl (decl);
1895 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1896 initialized = get_init_value (set, loc, dv_from_decl (decl));
1898 nextp = &set->regs[REGNO (loc)];
1899 for (node = *nextp; node; node = next)
1901 next = node->next;
1902 if (dv_as_opaque (node->dv) != decl || node->offset != offset)
1904 delete_variable_part (set, node->loc, node->dv, node->offset);
1905 pool_free (attrs_pool, node);
1906 *nextp = next;
1908 else
1910 node->loc = loc;
1911 nextp = &node->next;
1914 if (modify)
1915 clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src);
1916 var_reg_set (set, loc, initialized, set_src);
1919 /* Delete the association of register LOC in dataflow set SET with any
1920 variables that aren't onepart. If CLOBBER is true, also delete any
1921 other live copies of the same variable part, and delete the
1922 association with onepart dvs too. */
1924 static void
1925 var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
1927 attrs *nextp = &set->regs[REGNO (loc)];
1928 attrs node, next;
1930 if (clobber)
1932 tree decl = REG_EXPR (loc);
1933 HOST_WIDE_INT offset = REG_OFFSET (loc);
1935 decl = var_debug_decl (decl);
1937 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1940 for (node = *nextp; node; node = next)
1942 next = node->next;
1943 if (clobber || !dv_onepart_p (node->dv))
1945 delete_variable_part (set, node->loc, node->dv, node->offset);
1946 pool_free (attrs_pool, node);
1947 *nextp = next;
1949 else
1950 nextp = &node->next;
1954 /* Delete content of register with number REGNO in dataflow set SET. */
1956 static void
1957 var_regno_delete (dataflow_set *set, int regno)
1959 attrs *reg = &set->regs[regno];
1960 attrs node, next;
1962 for (node = *reg; node; node = next)
1964 next = node->next;
1965 delete_variable_part (set, node->loc, node->dv, node->offset);
1966 pool_free (attrs_pool, node);
1968 *reg = NULL;
1971 /* Return true if I is the negated value of a power of two. */
1972 static bool
1973 negative_power_of_two_p (HOST_WIDE_INT i)
1975 unsigned HOST_WIDE_INT x = -(unsigned HOST_WIDE_INT)i;
1976 return x == (x & -x);
1979 /* Strip constant offsets and alignments off of LOC. Return the base
1980 expression. */
1982 static rtx
1983 vt_get_canonicalize_base (rtx loc)
1985 while ((GET_CODE (loc) == PLUS
1986 || GET_CODE (loc) == AND)
1987 && GET_CODE (XEXP (loc, 1)) == CONST_INT
1988 && (GET_CODE (loc) != AND
1989 || negative_power_of_two_p (INTVAL (XEXP (loc, 1)))))
1990 loc = XEXP (loc, 0);
1992 return loc;
1995 /* This caches canonicalized addresses for VALUEs, computed using
1996 information in the global cselib table. */
1997 static struct pointer_map_t *global_get_addr_cache;
1999 /* This caches canonicalized addresses for VALUEs, computed using
2000 information from the global cache and information pertaining to a
2001 basic block being analyzed. */
2002 static struct pointer_map_t *local_get_addr_cache;
2004 static rtx vt_canonicalize_addr (dataflow_set *, rtx);
2006 /* Return the canonical address for LOC, that must be a VALUE, using a
2007 cached global equivalence or computing it and storing it in the
2008 global cache. */
2010 static rtx
2011 get_addr_from_global_cache (rtx const loc)
2013 rtx x;
2014 void **slot;
2016 gcc_checking_assert (GET_CODE (loc) == VALUE);
2018 slot = pointer_map_insert (global_get_addr_cache, loc);
2019 if (*slot)
2020 return (rtx)*slot;
2022 x = canon_rtx (get_addr (loc));
2024 /* Tentative, avoiding infinite recursion. */
2025 *slot = x;
2027 if (x != loc)
2029 rtx nx = vt_canonicalize_addr (NULL, x);
2030 if (nx != x)
2032 /* The table may have moved during recursion, recompute
2033 SLOT. */
2034 slot = pointer_map_contains (global_get_addr_cache, loc);
2035 *slot = x = nx;
2039 return x;
2042 /* Return the canonical address for LOC, that must be a VALUE, using a
2043 cached local equivalence or computing it and storing it in the
2044 local cache. */
2046 static rtx
2047 get_addr_from_local_cache (dataflow_set *set, rtx const loc)
2049 rtx x;
2050 void **slot;
2051 decl_or_value dv;
2052 variable var;
2053 location_chain l;
2055 gcc_checking_assert (GET_CODE (loc) == VALUE);
2057 slot = pointer_map_insert (local_get_addr_cache, loc);
2058 if (*slot)
2059 return (rtx)*slot;
2061 x = get_addr_from_global_cache (loc);
2063 /* Tentative, avoiding infinite recursion. */
2064 *slot = x;
2066 /* Recurse to cache local expansion of X, or if we need to search
2067 for a VALUE in the expansion. */
2068 if (x != loc)
2070 rtx nx = vt_canonicalize_addr (set, x);
2071 if (nx != x)
2073 slot = pointer_map_contains (local_get_addr_cache, loc);
2074 *slot = x = nx;
2076 return x;
2079 dv = dv_from_rtx (x);
2080 var = shared_hash_find (set->vars, dv);
2081 if (!var)
2082 return x;
2084 /* Look for an improved equivalent expression. */
2085 for (l = var->var_part[0].loc_chain; l; l = l->next)
2087 rtx base = vt_get_canonicalize_base (l->loc);
2088 if (GET_CODE (base) == VALUE
2089 && canon_value_cmp (base, loc))
2091 rtx nx = vt_canonicalize_addr (set, l->loc);
2092 if (x != nx)
2094 slot = pointer_map_contains (local_get_addr_cache, loc);
2095 *slot = x = nx;
2097 break;
2101 return x;
2104 /* Canonicalize LOC using equivalences from SET in addition to those
2105 in the cselib static table. It expects a VALUE-based expression,
2106 and it will only substitute VALUEs with other VALUEs or
2107 function-global equivalences, so that, if two addresses have base
2108 VALUEs that are locally or globally related in ways that
2109 memrefs_conflict_p cares about, they will both canonicalize to
2110 expressions that have the same base VALUE.
2112 The use of VALUEs as canonical base addresses enables the canonical
2113 RTXs to remain unchanged globally, if they resolve to a constant,
2114 or throughout a basic block otherwise, so that they can be cached
2115 and the cache needs not be invalidated when REGs, MEMs or such
2116 change. */
2118 static rtx
2119 vt_canonicalize_addr (dataflow_set *set, rtx oloc)
2121 HOST_WIDE_INT ofst = 0;
2122 enum machine_mode mode = GET_MODE (oloc);
2123 rtx loc = oloc;
2124 rtx x;
2125 bool retry = true;
2127 while (retry)
2129 while (GET_CODE (loc) == PLUS
2130 && GET_CODE (XEXP (loc, 1)) == CONST_INT)
2132 ofst += INTVAL (XEXP (loc, 1));
2133 loc = XEXP (loc, 0);
2136 /* Alignment operations can't normally be combined, so just
2137 canonicalize the base and we're done. We'll normally have
2138 only one stack alignment anyway. */
2139 if (GET_CODE (loc) == AND
2140 && GET_CODE (XEXP (loc, 1)) == CONST_INT
2141 && negative_power_of_two_p (INTVAL (XEXP (loc, 1))))
2143 x = vt_canonicalize_addr (set, XEXP (loc, 0));
2144 if (x != XEXP (loc, 0))
2145 loc = gen_rtx_AND (mode, x, XEXP (loc, 1));
2146 retry = false;
2149 if (GET_CODE (loc) == VALUE)
2151 if (set)
2152 loc = get_addr_from_local_cache (set, loc);
2153 else
2154 loc = get_addr_from_global_cache (loc);
2156 /* Consolidate plus_constants. */
2157 while (ofst && GET_CODE (loc) == PLUS
2158 && GET_CODE (XEXP (loc, 1)) == CONST_INT)
2160 ofst += INTVAL (XEXP (loc, 1));
2161 loc = XEXP (loc, 0);
2164 retry = false;
2166 else
2168 x = canon_rtx (loc);
2169 if (retry)
2170 retry = (x != loc);
2171 loc = x;
2175 /* Add OFST back in. */
2176 if (ofst)
2178 /* Don't build new RTL if we can help it. */
2179 if (GET_CODE (oloc) == PLUS
2180 && XEXP (oloc, 0) == loc
2181 && INTVAL (XEXP (oloc, 1)) == ofst)
2182 return oloc;
2184 loc = plus_constant (mode, loc, ofst);
2187 return loc;
2190 /* Return true iff there's a true dependence between MLOC and LOC.
2191 MADDR must be a canonicalized version of MLOC's address. */
2193 static inline bool
2194 vt_canon_true_dep (dataflow_set *set, rtx mloc, rtx maddr, rtx loc)
2196 if (GET_CODE (loc) != MEM)
2197 return false;
2199 rtx addr = vt_canonicalize_addr (set, XEXP (loc, 0));
2200 if (!canon_true_dependence (mloc, GET_MODE (mloc), maddr, loc, addr))
2201 return false;
2203 return true;
2206 /* Hold parameters for the hashtab traversal function
2207 drop_overlapping_mem_locs, see below. */
2209 struct overlapping_mems
2211 dataflow_set *set;
2212 rtx loc, addr;
2215 /* Remove all MEMs that overlap with COMS->LOC from the location list
2216 of a hash table entry for a value. COMS->ADDR must be a
2217 canonicalized form of COMS->LOC's address, and COMS->LOC must be
2218 canonicalized itself. */
2221 drop_overlapping_mem_locs (variable_def **slot, overlapping_mems *coms)
2223 dataflow_set *set = coms->set;
2224 rtx mloc = coms->loc, addr = coms->addr;
2225 variable var = *slot;
2227 if (var->onepart == ONEPART_VALUE)
2229 location_chain loc, *locp;
2230 bool changed = false;
2231 rtx cur_loc;
2233 gcc_assert (var->n_var_parts == 1);
2235 if (shared_var_p (var, set->vars))
2237 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
2238 if (vt_canon_true_dep (set, mloc, addr, loc->loc))
2239 break;
2241 if (!loc)
2242 return 1;
2244 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
2245 var = *slot;
2246 gcc_assert (var->n_var_parts == 1);
2249 if (VAR_LOC_1PAUX (var))
2250 cur_loc = VAR_LOC_FROM (var);
2251 else
2252 cur_loc = var->var_part[0].cur_loc;
2254 for (locp = &var->var_part[0].loc_chain, loc = *locp;
2255 loc; loc = *locp)
2257 if (!vt_canon_true_dep (set, mloc, addr, loc->loc))
2259 locp = &loc->next;
2260 continue;
2263 *locp = loc->next;
2264 /* If we have deleted the location which was last emitted
2265 we have to emit new location so add the variable to set
2266 of changed variables. */
2267 if (cur_loc == loc->loc)
2269 changed = true;
2270 var->var_part[0].cur_loc = NULL;
2271 if (VAR_LOC_1PAUX (var))
2272 VAR_LOC_FROM (var) = NULL;
2274 pool_free (loc_chain_pool, loc);
2277 if (!var->var_part[0].loc_chain)
2279 var->n_var_parts--;
2280 changed = true;
2282 if (changed)
2283 variable_was_changed (var, set);
2286 return 1;
2289 /* Remove from SET all VALUE bindings to MEMs that overlap with LOC. */
2291 static void
2292 clobber_overlapping_mems (dataflow_set *set, rtx loc)
2294 struct overlapping_mems coms;
2296 gcc_checking_assert (GET_CODE (loc) == MEM);
2298 coms.set = set;
2299 coms.loc = canon_rtx (loc);
2300 coms.addr = vt_canonicalize_addr (set, XEXP (loc, 0));
2302 set->traversed_vars = set->vars;
2303 shared_hash_htab (set->vars)
2304 .traverse <overlapping_mems*, drop_overlapping_mem_locs> (&coms);
2305 set->traversed_vars = NULL;
2308 /* Set the location of DV, OFFSET as the MEM LOC. */
2310 static void
2311 var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
2312 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
2313 enum insert_option iopt)
2315 if (dv_is_decl_p (dv))
2316 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
2318 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
2321 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
2322 SET to LOC.
2323 Adjust the address first if it is stack pointer based. */
2325 static void
2326 var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
2327 rtx set_src)
2329 tree decl = MEM_EXPR (loc);
2330 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2332 var_mem_decl_set (set, loc, initialized,
2333 dv_from_decl (decl), offset, set_src, INSERT);
2336 /* Delete and set the location part of variable MEM_EXPR (LOC) in
2337 dataflow set SET to LOC. If MODIFY is true, any other live copies
2338 of the same variable part are also deleted from the dataflow set,
2339 otherwise the variable part is assumed to be copied from another
2340 location holding the same part.
2341 Adjust the address first if it is stack pointer based. */
2343 static void
2344 var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify,
2345 enum var_init_status initialized, rtx set_src)
2347 tree decl = MEM_EXPR (loc);
2348 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2350 clobber_overlapping_mems (set, loc);
2351 decl = var_debug_decl (decl);
2353 if (initialized == VAR_INIT_STATUS_UNKNOWN)
2354 initialized = get_init_value (set, loc, dv_from_decl (decl));
2356 if (modify)
2357 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src);
2358 var_mem_set (set, loc, initialized, set_src);
2361 /* Delete the location part LOC from dataflow set SET. If CLOBBER is
2362 true, also delete any other live copies of the same variable part.
2363 Adjust the address first if it is stack pointer based. */
2365 static void
2366 var_mem_delete (dataflow_set *set, rtx loc, bool clobber)
2368 tree decl = MEM_EXPR (loc);
2369 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2371 clobber_overlapping_mems (set, loc);
2372 decl = var_debug_decl (decl);
2373 if (clobber)
2374 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
2375 delete_variable_part (set, loc, dv_from_decl (decl), offset);
2378 /* Return true if LOC should not be expanded for location expressions,
2379 or used in them. */
2381 static inline bool
2382 unsuitable_loc (rtx loc)
2384 switch (GET_CODE (loc))
2386 case PC:
2387 case SCRATCH:
2388 case CC0:
2389 case ASM_INPUT:
2390 case ASM_OPERANDS:
2391 return true;
2393 default:
2394 return false;
2398 /* Bind VAL to LOC in SET. If MODIFIED, detach LOC from any values
2399 bound to it. */
2401 static inline void
2402 val_bind (dataflow_set *set, rtx val, rtx loc, bool modified)
2404 if (REG_P (loc))
2406 if (modified)
2407 var_regno_delete (set, REGNO (loc));
2408 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2409 dv_from_value (val), 0, NULL_RTX, INSERT);
2411 else if (MEM_P (loc))
2413 struct elt_loc_list *l = CSELIB_VAL_PTR (val)->locs;
2415 if (modified)
2416 clobber_overlapping_mems (set, loc);
2418 if (l && GET_CODE (l->loc) == VALUE)
2419 l = canonical_cselib_val (CSELIB_VAL_PTR (l->loc))->locs;
2421 /* If this MEM is a global constant, we don't need it in the
2422 dynamic tables. ??? We should test this before emitting the
2423 micro-op in the first place. */
2424 while (l)
2425 if (GET_CODE (l->loc) == MEM && XEXP (l->loc, 0) == XEXP (loc, 0))
2426 break;
2427 else
2428 l = l->next;
2430 if (!l)
2431 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2432 dv_from_value (val), 0, NULL_RTX, INSERT);
2434 else
2436 /* Other kinds of equivalences are necessarily static, at least
2437 so long as we do not perform substitutions while merging
2438 expressions. */
2439 gcc_unreachable ();
2440 set_variable_part (set, loc, dv_from_value (val), 0,
2441 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2445 /* Bind a value to a location it was just stored in. If MODIFIED
2446 holds, assume the location was modified, detaching it from any
2447 values bound to it. */
2449 static void
2450 val_store (dataflow_set *set, rtx val, rtx loc, rtx insn, bool modified)
2452 cselib_val *v = CSELIB_VAL_PTR (val);
2454 gcc_assert (cselib_preserved_value_p (v));
2456 if (dump_file)
2458 fprintf (dump_file, "%i: ", insn ? INSN_UID (insn) : 0);
2459 print_inline_rtx (dump_file, loc, 0);
2460 fprintf (dump_file, " evaluates to ");
2461 print_inline_rtx (dump_file, val, 0);
2462 if (v->locs)
2464 struct elt_loc_list *l;
2465 for (l = v->locs; l; l = l->next)
2467 fprintf (dump_file, "\n%i: ", INSN_UID (l->setting_insn));
2468 print_inline_rtx (dump_file, l->loc, 0);
2471 fprintf (dump_file, "\n");
2474 gcc_checking_assert (!unsuitable_loc (loc));
2476 val_bind (set, val, loc, modified);
2479 /* Clear (canonical address) slots that reference X. */
2481 static bool
2482 local_get_addr_clear_given_value (const void *v ATTRIBUTE_UNUSED,
2483 void **slot, void *x)
2485 if (vt_get_canonicalize_base ((rtx)*slot) == x)
2486 *slot = NULL;
2487 return true;
2490 /* Reset this node, detaching all its equivalences. Return the slot
2491 in the variable hash table that holds dv, if there is one. */
2493 static void
2494 val_reset (dataflow_set *set, decl_or_value dv)
2496 variable var = shared_hash_find (set->vars, dv) ;
2497 location_chain node;
2498 rtx cval;
2500 if (!var || !var->n_var_parts)
2501 return;
2503 gcc_assert (var->n_var_parts == 1);
2505 if (var->onepart == ONEPART_VALUE)
2507 rtx x = dv_as_value (dv);
2508 void **slot;
2510 /* Relationships in the global cache don't change, so reset the
2511 local cache entry only. */
2512 slot = pointer_map_contains (local_get_addr_cache, x);
2513 if (slot)
2515 /* If the value resolved back to itself, odds are that other
2516 values may have cached it too. These entries now refer
2517 to the old X, so detach them too. Entries that used the
2518 old X but resolved to something else remain ok as long as
2519 that something else isn't also reset. */
2520 if (*slot == x)
2521 pointer_map_traverse (local_get_addr_cache,
2522 local_get_addr_clear_given_value, x);
2523 *slot = NULL;
2527 cval = NULL;
2528 for (node = var->var_part[0].loc_chain; node; node = node->next)
2529 if (GET_CODE (node->loc) == VALUE
2530 && canon_value_cmp (node->loc, cval))
2531 cval = node->loc;
2533 for (node = var->var_part[0].loc_chain; node; node = node->next)
2534 if (GET_CODE (node->loc) == VALUE && cval != node->loc)
2536 /* Redirect the equivalence link to the new canonical
2537 value, or simply remove it if it would point at
2538 itself. */
2539 if (cval)
2540 set_variable_part (set, cval, dv_from_value (node->loc),
2541 0, node->init, node->set_src, NO_INSERT);
2542 delete_variable_part (set, dv_as_value (dv),
2543 dv_from_value (node->loc), 0);
2546 if (cval)
2548 decl_or_value cdv = dv_from_value (cval);
2550 /* Keep the remaining values connected, accummulating links
2551 in the canonical value. */
2552 for (node = var->var_part[0].loc_chain; node; node = node->next)
2554 if (node->loc == cval)
2555 continue;
2556 else if (GET_CODE (node->loc) == REG)
2557 var_reg_decl_set (set, node->loc, node->init, cdv, 0,
2558 node->set_src, NO_INSERT);
2559 else if (GET_CODE (node->loc) == MEM)
2560 var_mem_decl_set (set, node->loc, node->init, cdv, 0,
2561 node->set_src, NO_INSERT);
2562 else
2563 set_variable_part (set, node->loc, cdv, 0,
2564 node->init, node->set_src, NO_INSERT);
2568 /* We remove this last, to make sure that the canonical value is not
2569 removed to the point of requiring reinsertion. */
2570 if (cval)
2571 delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0);
2573 clobber_variable_part (set, NULL, dv, 0, NULL);
2576 /* Find the values in a given location and map the val to another
2577 value, if it is unique, or add the location as one holding the
2578 value. */
2580 static void
2581 val_resolve (dataflow_set *set, rtx val, rtx loc, rtx insn)
2583 decl_or_value dv = dv_from_value (val);
2585 if (dump_file && (dump_flags & TDF_DETAILS))
2587 if (insn)
2588 fprintf (dump_file, "%i: ", INSN_UID (insn));
2589 else
2590 fprintf (dump_file, "head: ");
2591 print_inline_rtx (dump_file, val, 0);
2592 fputs (" is at ", dump_file);
2593 print_inline_rtx (dump_file, loc, 0);
2594 fputc ('\n', dump_file);
2597 val_reset (set, dv);
2599 gcc_checking_assert (!unsuitable_loc (loc));
2601 if (REG_P (loc))
2603 attrs node, found = NULL;
2605 for (node = set->regs[REGNO (loc)]; node; node = node->next)
2606 if (dv_is_value_p (node->dv)
2607 && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc))
2609 found = node;
2611 /* Map incoming equivalences. ??? Wouldn't it be nice if
2612 we just started sharing the location lists? Maybe a
2613 circular list ending at the value itself or some
2614 such. */
2615 set_variable_part (set, dv_as_value (node->dv),
2616 dv_from_value (val), node->offset,
2617 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2618 set_variable_part (set, val, node->dv, node->offset,
2619 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2622 /* If we didn't find any equivalence, we need to remember that
2623 this value is held in the named register. */
2624 if (found)
2625 return;
2627 /* ??? Attempt to find and merge equivalent MEMs or other
2628 expressions too. */
2630 val_bind (set, val, loc, false);
2633 /* Initialize dataflow set SET to be empty.
2634 VARS_SIZE is the initial size of hash table VARS. */
2636 static void
2637 dataflow_set_init (dataflow_set *set)
2639 init_attrs_list_set (set->regs);
2640 set->vars = shared_hash_copy (empty_shared_hash);
2641 set->stack_adjust = 0;
2642 set->traversed_vars = NULL;
2645 /* Delete the contents of dataflow set SET. */
2647 static void
2648 dataflow_set_clear (dataflow_set *set)
2650 int i;
2652 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2653 attrs_list_clear (&set->regs[i]);
2655 shared_hash_destroy (set->vars);
2656 set->vars = shared_hash_copy (empty_shared_hash);
2659 /* Copy the contents of dataflow set SRC to DST. */
2661 static void
2662 dataflow_set_copy (dataflow_set *dst, dataflow_set *src)
2664 int i;
2666 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2667 attrs_list_copy (&dst->regs[i], src->regs[i]);
2669 shared_hash_destroy (dst->vars);
2670 dst->vars = shared_hash_copy (src->vars);
2671 dst->stack_adjust = src->stack_adjust;
2674 /* Information for merging lists of locations for a given offset of variable.
2676 struct variable_union_info
2678 /* Node of the location chain. */
2679 location_chain lc;
2681 /* The sum of positions in the input chains. */
2682 int pos;
2684 /* The position in the chain of DST dataflow set. */
2685 int pos_dst;
2688 /* Buffer for location list sorting and its allocated size. */
2689 static struct variable_union_info *vui_vec;
2690 static int vui_allocated;
2692 /* Compare function for qsort, order the structures by POS element. */
2694 static int
2695 variable_union_info_cmp_pos (const void *n1, const void *n2)
2697 const struct variable_union_info *const i1 =
2698 (const struct variable_union_info *) n1;
2699 const struct variable_union_info *const i2 =
2700 ( const struct variable_union_info *) n2;
2702 if (i1->pos != i2->pos)
2703 return i1->pos - i2->pos;
2705 return (i1->pos_dst - i2->pos_dst);
2708 /* Compute union of location parts of variable *SLOT and the same variable
2709 from hash table DATA. Compute "sorted" union of the location chains
2710 for common offsets, i.e. the locations of a variable part are sorted by
2711 a priority where the priority is the sum of the positions in the 2 chains
2712 (if a location is only in one list the position in the second list is
2713 defined to be larger than the length of the chains).
2714 When we are updating the location parts the newest location is in the
2715 beginning of the chain, so when we do the described "sorted" union
2716 we keep the newest locations in the beginning. */
2718 static int
2719 variable_union (variable src, dataflow_set *set)
2721 variable dst;
2722 variable_def **dstp;
2723 int i, j, k;
2725 dstp = shared_hash_find_slot (set->vars, src->dv);
2726 if (!dstp || !*dstp)
2728 src->refcount++;
2730 dst_can_be_shared = false;
2731 if (!dstp)
2732 dstp = shared_hash_find_slot_unshare (&set->vars, src->dv, INSERT);
2734 *dstp = src;
2736 /* Continue traversing the hash table. */
2737 return 1;
2739 else
2740 dst = *dstp;
2742 gcc_assert (src->n_var_parts);
2743 gcc_checking_assert (src->onepart == dst->onepart);
2745 /* We can combine one-part variables very efficiently, because their
2746 entries are in canonical order. */
2747 if (src->onepart)
2749 location_chain *nodep, dnode, snode;
2751 gcc_assert (src->n_var_parts == 1
2752 && dst->n_var_parts == 1);
2754 snode = src->var_part[0].loc_chain;
2755 gcc_assert (snode);
2757 restart_onepart_unshared:
2758 nodep = &dst->var_part[0].loc_chain;
2759 dnode = *nodep;
2760 gcc_assert (dnode);
2762 while (snode)
2764 int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1;
2766 if (r > 0)
2768 location_chain nnode;
2770 if (shared_var_p (dst, set->vars))
2772 dstp = unshare_variable (set, dstp, dst,
2773 VAR_INIT_STATUS_INITIALIZED);
2774 dst = *dstp;
2775 goto restart_onepart_unshared;
2778 *nodep = nnode = (location_chain) pool_alloc (loc_chain_pool);
2779 nnode->loc = snode->loc;
2780 nnode->init = snode->init;
2781 if (!snode->set_src || MEM_P (snode->set_src))
2782 nnode->set_src = NULL;
2783 else
2784 nnode->set_src = snode->set_src;
2785 nnode->next = dnode;
2786 dnode = nnode;
2788 else if (r == 0)
2789 gcc_checking_assert (rtx_equal_p (dnode->loc, snode->loc));
2791 if (r >= 0)
2792 snode = snode->next;
2794 nodep = &dnode->next;
2795 dnode = *nodep;
2798 return 1;
2801 gcc_checking_assert (!src->onepart);
2803 /* Count the number of location parts, result is K. */
2804 for (i = 0, j = 0, k = 0;
2805 i < src->n_var_parts && j < dst->n_var_parts; k++)
2807 if (VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2809 i++;
2810 j++;
2812 else if (VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
2813 i++;
2814 else
2815 j++;
2817 k += src->n_var_parts - i;
2818 k += dst->n_var_parts - j;
2820 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
2821 thus there are at most MAX_VAR_PARTS different offsets. */
2822 gcc_checking_assert (dst->onepart ? k == 1 : k <= MAX_VAR_PARTS);
2824 if (dst->n_var_parts != k && shared_var_p (dst, set->vars))
2826 dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN);
2827 dst = *dstp;
2830 i = src->n_var_parts - 1;
2831 j = dst->n_var_parts - 1;
2832 dst->n_var_parts = k;
2834 for (k--; k >= 0; k--)
2836 location_chain node, node2;
2838 if (i >= 0 && j >= 0
2839 && VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2841 /* Compute the "sorted" union of the chains, i.e. the locations which
2842 are in both chains go first, they are sorted by the sum of
2843 positions in the chains. */
2844 int dst_l, src_l;
2845 int ii, jj, n;
2846 struct variable_union_info *vui;
2848 /* If DST is shared compare the location chains.
2849 If they are different we will modify the chain in DST with
2850 high probability so make a copy of DST. */
2851 if (shared_var_p (dst, set->vars))
2853 for (node = src->var_part[i].loc_chain,
2854 node2 = dst->var_part[j].loc_chain; node && node2;
2855 node = node->next, node2 = node2->next)
2857 if (!((REG_P (node2->loc)
2858 && REG_P (node->loc)
2859 && REGNO (node2->loc) == REGNO (node->loc))
2860 || rtx_equal_p (node2->loc, node->loc)))
2862 if (node2->init < node->init)
2863 node2->init = node->init;
2864 break;
2867 if (node || node2)
2869 dstp = unshare_variable (set, dstp, dst,
2870 VAR_INIT_STATUS_UNKNOWN);
2871 dst = (variable)*dstp;
2875 src_l = 0;
2876 for (node = src->var_part[i].loc_chain; node; node = node->next)
2877 src_l++;
2878 dst_l = 0;
2879 for (node = dst->var_part[j].loc_chain; node; node = node->next)
2880 dst_l++;
2882 if (dst_l == 1)
2884 /* The most common case, much simpler, no qsort is needed. */
2885 location_chain dstnode = dst->var_part[j].loc_chain;
2886 dst->var_part[k].loc_chain = dstnode;
2887 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
2888 node2 = dstnode;
2889 for (node = src->var_part[i].loc_chain; node; node = node->next)
2890 if (!((REG_P (dstnode->loc)
2891 && REG_P (node->loc)
2892 && REGNO (dstnode->loc) == REGNO (node->loc))
2893 || rtx_equal_p (dstnode->loc, node->loc)))
2895 location_chain new_node;
2897 /* Copy the location from SRC. */
2898 new_node = (location_chain) pool_alloc (loc_chain_pool);
2899 new_node->loc = node->loc;
2900 new_node->init = node->init;
2901 if (!node->set_src || MEM_P (node->set_src))
2902 new_node->set_src = NULL;
2903 else
2904 new_node->set_src = node->set_src;
2905 node2->next = new_node;
2906 node2 = new_node;
2908 node2->next = NULL;
2910 else
2912 if (src_l + dst_l > vui_allocated)
2914 vui_allocated = MAX (vui_allocated * 2, src_l + dst_l);
2915 vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec,
2916 vui_allocated);
2918 vui = vui_vec;
2920 /* Fill in the locations from DST. */
2921 for (node = dst->var_part[j].loc_chain, jj = 0; node;
2922 node = node->next, jj++)
2924 vui[jj].lc = node;
2925 vui[jj].pos_dst = jj;
2927 /* Pos plus value larger than a sum of 2 valid positions. */
2928 vui[jj].pos = jj + src_l + dst_l;
2931 /* Fill in the locations from SRC. */
2932 n = dst_l;
2933 for (node = src->var_part[i].loc_chain, ii = 0; node;
2934 node = node->next, ii++)
2936 /* Find location from NODE. */
2937 for (jj = 0; jj < dst_l; jj++)
2939 if ((REG_P (vui[jj].lc->loc)
2940 && REG_P (node->loc)
2941 && REGNO (vui[jj].lc->loc) == REGNO (node->loc))
2942 || rtx_equal_p (vui[jj].lc->loc, node->loc))
2944 vui[jj].pos = jj + ii;
2945 break;
2948 if (jj >= dst_l) /* The location has not been found. */
2950 location_chain new_node;
2952 /* Copy the location from SRC. */
2953 new_node = (location_chain) pool_alloc (loc_chain_pool);
2954 new_node->loc = node->loc;
2955 new_node->init = node->init;
2956 if (!node->set_src || MEM_P (node->set_src))
2957 new_node->set_src = NULL;
2958 else
2959 new_node->set_src = node->set_src;
2960 vui[n].lc = new_node;
2961 vui[n].pos_dst = src_l + dst_l;
2962 vui[n].pos = ii + src_l + dst_l;
2963 n++;
2967 if (dst_l == 2)
2969 /* Special case still very common case. For dst_l == 2
2970 all entries dst_l ... n-1 are sorted, with for i >= dst_l
2971 vui[i].pos == i + src_l + dst_l. */
2972 if (vui[0].pos > vui[1].pos)
2974 /* Order should be 1, 0, 2... */
2975 dst->var_part[k].loc_chain = vui[1].lc;
2976 vui[1].lc->next = vui[0].lc;
2977 if (n >= 3)
2979 vui[0].lc->next = vui[2].lc;
2980 vui[n - 1].lc->next = NULL;
2982 else
2983 vui[0].lc->next = NULL;
2984 ii = 3;
2986 else
2988 dst->var_part[k].loc_chain = vui[0].lc;
2989 if (n >= 3 && vui[2].pos < vui[1].pos)
2991 /* Order should be 0, 2, 1, 3... */
2992 vui[0].lc->next = vui[2].lc;
2993 vui[2].lc->next = vui[1].lc;
2994 if (n >= 4)
2996 vui[1].lc->next = vui[3].lc;
2997 vui[n - 1].lc->next = NULL;
2999 else
3000 vui[1].lc->next = NULL;
3001 ii = 4;
3003 else
3005 /* Order should be 0, 1, 2... */
3006 ii = 1;
3007 vui[n - 1].lc->next = NULL;
3010 for (; ii < n; ii++)
3011 vui[ii - 1].lc->next = vui[ii].lc;
3013 else
3015 qsort (vui, n, sizeof (struct variable_union_info),
3016 variable_union_info_cmp_pos);
3018 /* Reconnect the nodes in sorted order. */
3019 for (ii = 1; ii < n; ii++)
3020 vui[ii - 1].lc->next = vui[ii].lc;
3021 vui[n - 1].lc->next = NULL;
3022 dst->var_part[k].loc_chain = vui[0].lc;
3025 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
3027 i--;
3028 j--;
3030 else if ((i >= 0 && j >= 0
3031 && VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
3032 || i < 0)
3034 dst->var_part[k] = dst->var_part[j];
3035 j--;
3037 else if ((i >= 0 && j >= 0
3038 && VAR_PART_OFFSET (src, i) > VAR_PART_OFFSET (dst, j))
3039 || j < 0)
3041 location_chain *nextp;
3043 /* Copy the chain from SRC. */
3044 nextp = &dst->var_part[k].loc_chain;
3045 for (node = src->var_part[i].loc_chain; node; node = node->next)
3047 location_chain new_lc;
3049 new_lc = (location_chain) pool_alloc (loc_chain_pool);
3050 new_lc->next = NULL;
3051 new_lc->init = node->init;
3052 if (!node->set_src || MEM_P (node->set_src))
3053 new_lc->set_src = NULL;
3054 else
3055 new_lc->set_src = node->set_src;
3056 new_lc->loc = node->loc;
3058 *nextp = new_lc;
3059 nextp = &new_lc->next;
3062 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (src, i);
3063 i--;
3065 dst->var_part[k].cur_loc = NULL;
3068 if (flag_var_tracking_uninit)
3069 for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++)
3071 location_chain node, node2;
3072 for (node = src->var_part[i].loc_chain; node; node = node->next)
3073 for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next)
3074 if (rtx_equal_p (node->loc, node2->loc))
3076 if (node->init > node2->init)
3077 node2->init = node->init;
3081 /* Continue traversing the hash table. */
3082 return 1;
3085 /* Compute union of dataflow sets SRC and DST and store it to DST. */
3087 static void
3088 dataflow_set_union (dataflow_set *dst, dataflow_set *src)
3090 int i;
3092 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3093 attrs_list_union (&dst->regs[i], src->regs[i]);
3095 if (dst->vars == empty_shared_hash)
3097 shared_hash_destroy (dst->vars);
3098 dst->vars = shared_hash_copy (src->vars);
3100 else
3102 variable_iterator_type hi;
3103 variable var;
3105 FOR_EACH_HASH_TABLE_ELEMENT (shared_hash_htab (src->vars),
3106 var, variable, hi)
3107 variable_union (var, dst);
3111 /* Whether the value is currently being expanded. */
3112 #define VALUE_RECURSED_INTO(x) \
3113 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
3115 /* Whether no expansion was found, saving useless lookups.
3116 It must only be set when VALUE_CHANGED is clear. */
3117 #define NO_LOC_P(x) \
3118 (RTL_FLAG_CHECK2 ("NO_LOC_P", (x), VALUE, DEBUG_EXPR)->return_val)
3120 /* Whether cur_loc in the value needs to be (re)computed. */
3121 #define VALUE_CHANGED(x) \
3122 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
3123 /* Whether cur_loc in the decl needs to be (re)computed. */
3124 #define DECL_CHANGED(x) TREE_VISITED (x)
3126 /* Record (if NEWV) that DV needs to have its cur_loc recomputed. For
3127 user DECLs, this means they're in changed_variables. Values and
3128 debug exprs may be left with this flag set if no user variable
3129 requires them to be evaluated. */
3131 static inline void
3132 set_dv_changed (decl_or_value dv, bool newv)
3134 switch (dv_onepart_p (dv))
3136 case ONEPART_VALUE:
3137 if (newv)
3138 NO_LOC_P (dv_as_value (dv)) = false;
3139 VALUE_CHANGED (dv_as_value (dv)) = newv;
3140 break;
3142 case ONEPART_DEXPR:
3143 if (newv)
3144 NO_LOC_P (DECL_RTL_KNOWN_SET (dv_as_decl (dv))) = false;
3145 /* Fall through... */
3147 default:
3148 DECL_CHANGED (dv_as_decl (dv)) = newv;
3149 break;
3153 /* Return true if DV needs to have its cur_loc recomputed. */
3155 static inline bool
3156 dv_changed_p (decl_or_value dv)
3158 return (dv_is_value_p (dv)
3159 ? VALUE_CHANGED (dv_as_value (dv))
3160 : DECL_CHANGED (dv_as_decl (dv)));
3163 /* Return a location list node whose loc is rtx_equal to LOC, in the
3164 location list of a one-part variable or value VAR, or in that of
3165 any values recursively mentioned in the location lists. VARS must
3166 be in star-canonical form. */
3168 static location_chain
3169 find_loc_in_1pdv (rtx loc, variable var, variable_table_type vars)
3171 location_chain node;
3172 enum rtx_code loc_code;
3174 if (!var)
3175 return NULL;
3177 gcc_checking_assert (var->onepart);
3179 if (!var->n_var_parts)
3180 return NULL;
3182 gcc_checking_assert (loc != dv_as_opaque (var->dv));
3184 loc_code = GET_CODE (loc);
3185 for (node = var->var_part[0].loc_chain; node; node = node->next)
3187 decl_or_value dv;
3188 variable rvar;
3190 if (GET_CODE (node->loc) != loc_code)
3192 if (GET_CODE (node->loc) != VALUE)
3193 continue;
3195 else if (loc == node->loc)
3196 return node;
3197 else if (loc_code != VALUE)
3199 if (rtx_equal_p (loc, node->loc))
3200 return node;
3201 continue;
3204 /* Since we're in star-canonical form, we don't need to visit
3205 non-canonical nodes: one-part variables and non-canonical
3206 values would only point back to the canonical node. */
3207 if (dv_is_value_p (var->dv)
3208 && !canon_value_cmp (node->loc, dv_as_value (var->dv)))
3210 /* Skip all subsequent VALUEs. */
3211 while (node->next && GET_CODE (node->next->loc) == VALUE)
3213 node = node->next;
3214 gcc_checking_assert (!canon_value_cmp (node->loc,
3215 dv_as_value (var->dv)));
3216 if (loc == node->loc)
3217 return node;
3219 continue;
3222 gcc_checking_assert (node == var->var_part[0].loc_chain);
3223 gcc_checking_assert (!node->next);
3225 dv = dv_from_value (node->loc);
3226 rvar = vars.find_with_hash (dv, dv_htab_hash (dv));
3227 return find_loc_in_1pdv (loc, rvar, vars);
3230 /* ??? Gotta look in cselib_val locations too. */
3232 return NULL;
3235 /* Hash table iteration argument passed to variable_merge. */
3236 struct dfset_merge
3238 /* The set in which the merge is to be inserted. */
3239 dataflow_set *dst;
3240 /* The set that we're iterating in. */
3241 dataflow_set *cur;
3242 /* The set that may contain the other dv we are to merge with. */
3243 dataflow_set *src;
3244 /* Number of onepart dvs in src. */
3245 int src_onepart_cnt;
3248 /* Insert LOC in *DNODE, if it's not there yet. The list must be in
3249 loc_cmp order, and it is maintained as such. */
3251 static void
3252 insert_into_intersection (location_chain *nodep, rtx loc,
3253 enum var_init_status status)
3255 location_chain node;
3256 int r;
3258 for (node = *nodep; node; nodep = &node->next, node = *nodep)
3259 if ((r = loc_cmp (node->loc, loc)) == 0)
3261 node->init = MIN (node->init, status);
3262 return;
3264 else if (r > 0)
3265 break;
3267 node = (location_chain) pool_alloc (loc_chain_pool);
3269 node->loc = loc;
3270 node->set_src = NULL;
3271 node->init = status;
3272 node->next = *nodep;
3273 *nodep = node;
3276 /* Insert in DEST the intersection of the locations present in both
3277 S1NODE and S2VAR, directly or indirectly. S1NODE is from a
3278 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
3279 DSM->dst. */
3281 static void
3282 intersect_loc_chains (rtx val, location_chain *dest, struct dfset_merge *dsm,
3283 location_chain s1node, variable s2var)
3285 dataflow_set *s1set = dsm->cur;
3286 dataflow_set *s2set = dsm->src;
3287 location_chain found;
3289 if (s2var)
3291 location_chain s2node;
3293 gcc_checking_assert (s2var->onepart);
3295 if (s2var->n_var_parts)
3297 s2node = s2var->var_part[0].loc_chain;
3299 for (; s1node && s2node;
3300 s1node = s1node->next, s2node = s2node->next)
3301 if (s1node->loc != s2node->loc)
3302 break;
3303 else if (s1node->loc == val)
3304 continue;
3305 else
3306 insert_into_intersection (dest, s1node->loc,
3307 MIN (s1node->init, s2node->init));
3311 for (; s1node; s1node = s1node->next)
3313 if (s1node->loc == val)
3314 continue;
3316 if ((found = find_loc_in_1pdv (s1node->loc, s2var,
3317 shared_hash_htab (s2set->vars))))
3319 insert_into_intersection (dest, s1node->loc,
3320 MIN (s1node->init, found->init));
3321 continue;
3324 if (GET_CODE (s1node->loc) == VALUE
3325 && !VALUE_RECURSED_INTO (s1node->loc))
3327 decl_or_value dv = dv_from_value (s1node->loc);
3328 variable svar = shared_hash_find (s1set->vars, dv);
3329 if (svar)
3331 if (svar->n_var_parts == 1)
3333 VALUE_RECURSED_INTO (s1node->loc) = true;
3334 intersect_loc_chains (val, dest, dsm,
3335 svar->var_part[0].loc_chain,
3336 s2var);
3337 VALUE_RECURSED_INTO (s1node->loc) = false;
3342 /* ??? gotta look in cselib_val locations too. */
3344 /* ??? if the location is equivalent to any location in src,
3345 searched recursively
3347 add to dst the values needed to represent the equivalence
3349 telling whether locations S is equivalent to another dv's
3350 location list:
3352 for each location D in the list
3354 if S and D satisfy rtx_equal_p, then it is present
3356 else if D is a value, recurse without cycles
3358 else if S and D have the same CODE and MODE
3360 for each operand oS and the corresponding oD
3362 if oS and oD are not equivalent, then S an D are not equivalent
3364 else if they are RTX vectors
3366 if any vector oS element is not equivalent to its respective oD,
3367 then S and D are not equivalent
3375 /* Return -1 if X should be before Y in a location list for a 1-part
3376 variable, 1 if Y should be before X, and 0 if they're equivalent
3377 and should not appear in the list. */
3379 static int
3380 loc_cmp (rtx x, rtx y)
3382 int i, j, r;
3383 RTX_CODE code = GET_CODE (x);
3384 const char *fmt;
3386 if (x == y)
3387 return 0;
3389 if (REG_P (x))
3391 if (!REG_P (y))
3392 return -1;
3393 gcc_assert (GET_MODE (x) == GET_MODE (y));
3394 if (REGNO (x) == REGNO (y))
3395 return 0;
3396 else if (REGNO (x) < REGNO (y))
3397 return -1;
3398 else
3399 return 1;
3402 if (REG_P (y))
3403 return 1;
3405 if (MEM_P (x))
3407 if (!MEM_P (y))
3408 return -1;
3409 gcc_assert (GET_MODE (x) == GET_MODE (y));
3410 return loc_cmp (XEXP (x, 0), XEXP (y, 0));
3413 if (MEM_P (y))
3414 return 1;
3416 if (GET_CODE (x) == VALUE)
3418 if (GET_CODE (y) != VALUE)
3419 return -1;
3420 /* Don't assert the modes are the same, that is true only
3421 when not recursing. (subreg:QI (value:SI 1:1) 0)
3422 and (subreg:QI (value:DI 2:2) 0) can be compared,
3423 even when the modes are different. */
3424 if (canon_value_cmp (x, y))
3425 return -1;
3426 else
3427 return 1;
3430 if (GET_CODE (y) == VALUE)
3431 return 1;
3433 /* Entry value is the least preferable kind of expression. */
3434 if (GET_CODE (x) == ENTRY_VALUE)
3436 if (GET_CODE (y) != ENTRY_VALUE)
3437 return 1;
3438 gcc_assert (GET_MODE (x) == GET_MODE (y));
3439 return loc_cmp (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
3442 if (GET_CODE (y) == ENTRY_VALUE)
3443 return -1;
3445 if (GET_CODE (x) == GET_CODE (y))
3446 /* Compare operands below. */;
3447 else if (GET_CODE (x) < GET_CODE (y))
3448 return -1;
3449 else
3450 return 1;
3452 gcc_assert (GET_MODE (x) == GET_MODE (y));
3454 if (GET_CODE (x) == DEBUG_EXPR)
3456 if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3457 < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)))
3458 return -1;
3459 gcc_checking_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3460 > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)));
3461 return 1;
3464 fmt = GET_RTX_FORMAT (code);
3465 for (i = 0; i < GET_RTX_LENGTH (code); i++)
3466 switch (fmt[i])
3468 case 'w':
3469 if (XWINT (x, i) == XWINT (y, i))
3470 break;
3471 else if (XWINT (x, i) < XWINT (y, i))
3472 return -1;
3473 else
3474 return 1;
3476 case 'n':
3477 case 'i':
3478 if (XINT (x, i) == XINT (y, i))
3479 break;
3480 else if (XINT (x, i) < XINT (y, i))
3481 return -1;
3482 else
3483 return 1;
3485 case 'V':
3486 case 'E':
3487 /* Compare the vector length first. */
3488 if (XVECLEN (x, i) == XVECLEN (y, i))
3489 /* Compare the vectors elements. */;
3490 else if (XVECLEN (x, i) < XVECLEN (y, i))
3491 return -1;
3492 else
3493 return 1;
3495 for (j = 0; j < XVECLEN (x, i); j++)
3496 if ((r = loc_cmp (XVECEXP (x, i, j),
3497 XVECEXP (y, i, j))))
3498 return r;
3499 break;
3501 case 'e':
3502 if ((r = loc_cmp (XEXP (x, i), XEXP (y, i))))
3503 return r;
3504 break;
3506 case 'S':
3507 case 's':
3508 if (XSTR (x, i) == XSTR (y, i))
3509 break;
3510 if (!XSTR (x, i))
3511 return -1;
3512 if (!XSTR (y, i))
3513 return 1;
3514 if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0)
3515 break;
3516 else if (r < 0)
3517 return -1;
3518 else
3519 return 1;
3521 case 'u':
3522 /* These are just backpointers, so they don't matter. */
3523 break;
3525 case '0':
3526 case 't':
3527 break;
3529 /* It is believed that rtx's at this level will never
3530 contain anything but integers and other rtx's,
3531 except for within LABEL_REFs and SYMBOL_REFs. */
3532 default:
3533 gcc_unreachable ();
3536 return 0;
3539 #if ENABLE_CHECKING
3540 /* Check the order of entries in one-part variables. */
3543 canonicalize_loc_order_check (variable_def **slot,
3544 dataflow_set *data ATTRIBUTE_UNUSED)
3546 variable var = *slot;
3547 location_chain node, next;
3549 #ifdef ENABLE_RTL_CHECKING
3550 int i;
3551 for (i = 0; i < var->n_var_parts; i++)
3552 gcc_assert (var->var_part[0].cur_loc == NULL);
3553 gcc_assert (!var->in_changed_variables);
3554 #endif
3556 if (!var->onepart)
3557 return 1;
3559 gcc_assert (var->n_var_parts == 1);
3560 node = var->var_part[0].loc_chain;
3561 gcc_assert (node);
3563 while ((next = node->next))
3565 gcc_assert (loc_cmp (node->loc, next->loc) < 0);
3566 node = next;
3569 return 1;
3571 #endif
3573 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
3574 more likely to be chosen as canonical for an equivalence set.
3575 Ensure less likely values can reach more likely neighbors, making
3576 the connections bidirectional. */
3579 canonicalize_values_mark (variable_def **slot, dataflow_set *set)
3581 variable var = *slot;
3582 decl_or_value dv = var->dv;
3583 rtx val;
3584 location_chain node;
3586 if (!dv_is_value_p (dv))
3587 return 1;
3589 gcc_checking_assert (var->n_var_parts == 1);
3591 val = dv_as_value (dv);
3593 for (node = var->var_part[0].loc_chain; node; node = node->next)
3594 if (GET_CODE (node->loc) == VALUE)
3596 if (canon_value_cmp (node->loc, val))
3597 VALUE_RECURSED_INTO (val) = true;
3598 else
3600 decl_or_value odv = dv_from_value (node->loc);
3601 variable_def **oslot;
3602 oslot = shared_hash_find_slot_noinsert (set->vars, odv);
3604 set_slot_part (set, val, oslot, odv, 0,
3605 node->init, NULL_RTX);
3607 VALUE_RECURSED_INTO (node->loc) = true;
3611 return 1;
3614 /* Remove redundant entries from equivalence lists in onepart
3615 variables, canonicalizing equivalence sets into star shapes. */
3618 canonicalize_values_star (variable_def **slot, dataflow_set *set)
3620 variable var = *slot;
3621 decl_or_value dv = var->dv;
3622 location_chain node;
3623 decl_or_value cdv;
3624 rtx val, cval;
3625 variable_def **cslot;
3626 bool has_value;
3627 bool has_marks;
3629 if (!var->onepart)
3630 return 1;
3632 gcc_checking_assert (var->n_var_parts == 1);
3634 if (dv_is_value_p (dv))
3636 cval = dv_as_value (dv);
3637 if (!VALUE_RECURSED_INTO (cval))
3638 return 1;
3639 VALUE_RECURSED_INTO (cval) = false;
3641 else
3642 cval = NULL_RTX;
3644 restart:
3645 val = cval;
3646 has_value = false;
3647 has_marks = false;
3649 gcc_assert (var->n_var_parts == 1);
3651 for (node = var->var_part[0].loc_chain; node; node = node->next)
3652 if (GET_CODE (node->loc) == VALUE)
3654 has_value = true;
3655 if (VALUE_RECURSED_INTO (node->loc))
3656 has_marks = true;
3657 if (canon_value_cmp (node->loc, cval))
3658 cval = node->loc;
3661 if (!has_value)
3662 return 1;
3664 if (cval == val)
3666 if (!has_marks || dv_is_decl_p (dv))
3667 return 1;
3669 /* Keep it marked so that we revisit it, either after visiting a
3670 child node, or after visiting a new parent that might be
3671 found out. */
3672 VALUE_RECURSED_INTO (val) = true;
3674 for (node = var->var_part[0].loc_chain; node; node = node->next)
3675 if (GET_CODE (node->loc) == VALUE
3676 && VALUE_RECURSED_INTO (node->loc))
3678 cval = node->loc;
3679 restart_with_cval:
3680 VALUE_RECURSED_INTO (cval) = false;
3681 dv = dv_from_value (cval);
3682 slot = shared_hash_find_slot_noinsert (set->vars, dv);
3683 if (!slot)
3685 gcc_assert (dv_is_decl_p (var->dv));
3686 /* The canonical value was reset and dropped.
3687 Remove it. */
3688 clobber_variable_part (set, NULL, var->dv, 0, NULL);
3689 return 1;
3691 var = *slot;
3692 gcc_assert (dv_is_value_p (var->dv));
3693 if (var->n_var_parts == 0)
3694 return 1;
3695 gcc_assert (var->n_var_parts == 1);
3696 goto restart;
3699 VALUE_RECURSED_INTO (val) = false;
3701 return 1;
3704 /* Push values to the canonical one. */
3705 cdv = dv_from_value (cval);
3706 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3708 for (node = var->var_part[0].loc_chain; node; node = node->next)
3709 if (node->loc != cval)
3711 cslot = set_slot_part (set, node->loc, cslot, cdv, 0,
3712 node->init, NULL_RTX);
3713 if (GET_CODE (node->loc) == VALUE)
3715 decl_or_value ndv = dv_from_value (node->loc);
3717 set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX,
3718 NO_INSERT);
3720 if (canon_value_cmp (node->loc, val))
3722 /* If it could have been a local minimum, it's not any more,
3723 since it's now neighbor to cval, so it may have to push
3724 to it. Conversely, if it wouldn't have prevailed over
3725 val, then whatever mark it has is fine: if it was to
3726 push, it will now push to a more canonical node, but if
3727 it wasn't, then it has already pushed any values it might
3728 have to. */
3729 VALUE_RECURSED_INTO (node->loc) = true;
3730 /* Make sure we visit node->loc by ensuring we cval is
3731 visited too. */
3732 VALUE_RECURSED_INTO (cval) = true;
3734 else if (!VALUE_RECURSED_INTO (node->loc))
3735 /* If we have no need to "recurse" into this node, it's
3736 already "canonicalized", so drop the link to the old
3737 parent. */
3738 clobber_variable_part (set, cval, ndv, 0, NULL);
3740 else if (GET_CODE (node->loc) == REG)
3742 attrs list = set->regs[REGNO (node->loc)], *listp;
3744 /* Change an existing attribute referring to dv so that it
3745 refers to cdv, removing any duplicate this might
3746 introduce, and checking that no previous duplicates
3747 existed, all in a single pass. */
3749 while (list)
3751 if (list->offset == 0
3752 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3753 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3754 break;
3756 list = list->next;
3759 gcc_assert (list);
3760 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3762 list->dv = cdv;
3763 for (listp = &list->next; (list = *listp); listp = &list->next)
3765 if (list->offset)
3766 continue;
3768 if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3770 *listp = list->next;
3771 pool_free (attrs_pool, list);
3772 list = *listp;
3773 break;
3776 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv));
3779 else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3781 for (listp = &list->next; (list = *listp); listp = &list->next)
3783 if (list->offset)
3784 continue;
3786 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3788 *listp = list->next;
3789 pool_free (attrs_pool, list);
3790 list = *listp;
3791 break;
3794 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv));
3797 else
3798 gcc_unreachable ();
3800 #if ENABLE_CHECKING
3801 while (list)
3803 if (list->offset == 0
3804 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3805 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3806 gcc_unreachable ();
3808 list = list->next;
3810 #endif
3814 if (val)
3815 set_slot_part (set, val, cslot, cdv, 0,
3816 VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
3818 slot = clobber_slot_part (set, cval, slot, 0, NULL);
3820 /* Variable may have been unshared. */
3821 var = *slot;
3822 gcc_checking_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval
3823 && var->var_part[0].loc_chain->next == NULL);
3825 if (VALUE_RECURSED_INTO (cval))
3826 goto restart_with_cval;
3828 return 1;
3831 /* Bind one-part variables to the canonical value in an equivalence
3832 set. Not doing this causes dataflow convergence failure in rare
3833 circumstances, see PR42873. Unfortunately we can't do this
3834 efficiently as part of canonicalize_values_star, since we may not
3835 have determined or even seen the canonical value of a set when we
3836 get to a variable that references another member of the set. */
3839 canonicalize_vars_star (variable_def **slot, dataflow_set *set)
3841 variable var = *slot;
3842 decl_or_value dv = var->dv;
3843 location_chain node;
3844 rtx cval;
3845 decl_or_value cdv;
3846 variable_def **cslot;
3847 variable cvar;
3848 location_chain cnode;
3850 if (!var->onepart || var->onepart == ONEPART_VALUE)
3851 return 1;
3853 gcc_assert (var->n_var_parts == 1);
3855 node = var->var_part[0].loc_chain;
3857 if (GET_CODE (node->loc) != VALUE)
3858 return 1;
3860 gcc_assert (!node->next);
3861 cval = node->loc;
3863 /* Push values to the canonical one. */
3864 cdv = dv_from_value (cval);
3865 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3866 if (!cslot)
3867 return 1;
3868 cvar = *cslot;
3869 gcc_assert (cvar->n_var_parts == 1);
3871 cnode = cvar->var_part[0].loc_chain;
3873 /* CVAL is canonical if its value list contains non-VALUEs or VALUEs
3874 that are not “more canonical” than it. */
3875 if (GET_CODE (cnode->loc) != VALUE
3876 || !canon_value_cmp (cnode->loc, cval))
3877 return 1;
3879 /* CVAL was found to be non-canonical. Change the variable to point
3880 to the canonical VALUE. */
3881 gcc_assert (!cnode->next);
3882 cval = cnode->loc;
3884 slot = set_slot_part (set, cval, slot, dv, 0,
3885 node->init, node->set_src);
3886 clobber_slot_part (set, cval, slot, 0, node->set_src);
3888 return 1;
3891 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
3892 corresponding entry in DSM->src. Multi-part variables are combined
3893 with variable_union, whereas onepart dvs are combined with
3894 intersection. */
3896 static int
3897 variable_merge_over_cur (variable s1var, struct dfset_merge *dsm)
3899 dataflow_set *dst = dsm->dst;
3900 variable_def **dstslot;
3901 variable s2var, dvar = NULL;
3902 decl_or_value dv = s1var->dv;
3903 onepart_enum_t onepart = s1var->onepart;
3904 rtx val;
3905 hashval_t dvhash;
3906 location_chain node, *nodep;
3908 /* If the incoming onepart variable has an empty location list, then
3909 the intersection will be just as empty. For other variables,
3910 it's always union. */
3911 gcc_checking_assert (s1var->n_var_parts
3912 && s1var->var_part[0].loc_chain);
3914 if (!onepart)
3915 return variable_union (s1var, dst);
3917 gcc_checking_assert (s1var->n_var_parts == 1);
3919 dvhash = dv_htab_hash (dv);
3920 if (dv_is_value_p (dv))
3921 val = dv_as_value (dv);
3922 else
3923 val = NULL;
3925 s2var = shared_hash_find_1 (dsm->src->vars, dv, dvhash);
3926 if (!s2var)
3928 dst_can_be_shared = false;
3929 return 1;
3932 dsm->src_onepart_cnt--;
3933 gcc_assert (s2var->var_part[0].loc_chain
3934 && s2var->onepart == onepart
3935 && s2var->n_var_parts == 1);
3937 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3938 if (dstslot)
3940 dvar = *dstslot;
3941 gcc_assert (dvar->refcount == 1
3942 && dvar->onepart == onepart
3943 && dvar->n_var_parts == 1);
3944 nodep = &dvar->var_part[0].loc_chain;
3946 else
3948 nodep = &node;
3949 node = NULL;
3952 if (!dstslot && !onepart_variable_different_p (s1var, s2var))
3954 dstslot = shared_hash_find_slot_unshare_1 (&dst->vars, dv,
3955 dvhash, INSERT);
3956 *dstslot = dvar = s2var;
3957 dvar->refcount++;
3959 else
3961 dst_can_be_shared = false;
3963 intersect_loc_chains (val, nodep, dsm,
3964 s1var->var_part[0].loc_chain, s2var);
3966 if (!dstslot)
3968 if (node)
3970 dvar = (variable) pool_alloc (onepart_pool (onepart));
3971 dvar->dv = dv;
3972 dvar->refcount = 1;
3973 dvar->n_var_parts = 1;
3974 dvar->onepart = onepart;
3975 dvar->in_changed_variables = false;
3976 dvar->var_part[0].loc_chain = node;
3977 dvar->var_part[0].cur_loc = NULL;
3978 if (onepart)
3979 VAR_LOC_1PAUX (dvar) = NULL;
3980 else
3981 VAR_PART_OFFSET (dvar, 0) = 0;
3983 dstslot
3984 = shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash,
3985 INSERT);
3986 gcc_assert (!*dstslot);
3987 *dstslot = dvar;
3989 else
3990 return 1;
3994 nodep = &dvar->var_part[0].loc_chain;
3995 while ((node = *nodep))
3997 location_chain *nextp = &node->next;
3999 if (GET_CODE (node->loc) == REG)
4001 attrs list;
4003 for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
4004 if (GET_MODE (node->loc) == GET_MODE (list->loc)
4005 && dv_is_value_p (list->dv))
4006 break;
4008 if (!list)
4009 attrs_list_insert (&dst->regs[REGNO (node->loc)],
4010 dv, 0, node->loc);
4011 /* If this value became canonical for another value that had
4012 this register, we want to leave it alone. */
4013 else if (dv_as_value (list->dv) != val)
4015 dstslot = set_slot_part (dst, dv_as_value (list->dv),
4016 dstslot, dv, 0,
4017 node->init, NULL_RTX);
4018 dstslot = delete_slot_part (dst, node->loc, dstslot, 0);
4020 /* Since nextp points into the removed node, we can't
4021 use it. The pointer to the next node moved to nodep.
4022 However, if the variable we're walking is unshared
4023 during our walk, we'll keep walking the location list
4024 of the previously-shared variable, in which case the
4025 node won't have been removed, and we'll want to skip
4026 it. That's why we test *nodep here. */
4027 if (*nodep != node)
4028 nextp = nodep;
4031 else
4032 /* Canonicalization puts registers first, so we don't have to
4033 walk it all. */
4034 break;
4035 nodep = nextp;
4038 if (dvar != *dstslot)
4039 dvar = *dstslot;
4040 nodep = &dvar->var_part[0].loc_chain;
4042 if (val)
4044 /* Mark all referenced nodes for canonicalization, and make sure
4045 we have mutual equivalence links. */
4046 VALUE_RECURSED_INTO (val) = true;
4047 for (node = *nodep; node; node = node->next)
4048 if (GET_CODE (node->loc) == VALUE)
4050 VALUE_RECURSED_INTO (node->loc) = true;
4051 set_variable_part (dst, val, dv_from_value (node->loc), 0,
4052 node->init, NULL, INSERT);
4055 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4056 gcc_assert (*dstslot == dvar);
4057 canonicalize_values_star (dstslot, dst);
4058 gcc_checking_assert (dstslot
4059 == shared_hash_find_slot_noinsert_1 (dst->vars,
4060 dv, dvhash));
4061 dvar = *dstslot;
4063 else
4065 bool has_value = false, has_other = false;
4067 /* If we have one value and anything else, we're going to
4068 canonicalize this, so make sure all values have an entry in
4069 the table and are marked for canonicalization. */
4070 for (node = *nodep; node; node = node->next)
4072 if (GET_CODE (node->loc) == VALUE)
4074 /* If this was marked during register canonicalization,
4075 we know we have to canonicalize values. */
4076 if (has_value)
4077 has_other = true;
4078 has_value = true;
4079 if (has_other)
4080 break;
4082 else
4084 has_other = true;
4085 if (has_value)
4086 break;
4090 if (has_value && has_other)
4092 for (node = *nodep; node; node = node->next)
4094 if (GET_CODE (node->loc) == VALUE)
4096 decl_or_value dv = dv_from_value (node->loc);
4097 variable_def **slot = NULL;
4099 if (shared_hash_shared (dst->vars))
4100 slot = shared_hash_find_slot_noinsert (dst->vars, dv);
4101 if (!slot)
4102 slot = shared_hash_find_slot_unshare (&dst->vars, dv,
4103 INSERT);
4104 if (!*slot)
4106 variable var = (variable) pool_alloc (onepart_pool
4107 (ONEPART_VALUE));
4108 var->dv = dv;
4109 var->refcount = 1;
4110 var->n_var_parts = 1;
4111 var->onepart = ONEPART_VALUE;
4112 var->in_changed_variables = false;
4113 var->var_part[0].loc_chain = NULL;
4114 var->var_part[0].cur_loc = NULL;
4115 VAR_LOC_1PAUX (var) = NULL;
4116 *slot = var;
4119 VALUE_RECURSED_INTO (node->loc) = true;
4123 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4124 gcc_assert (*dstslot == dvar);
4125 canonicalize_values_star (dstslot, dst);
4126 gcc_checking_assert (dstslot
4127 == shared_hash_find_slot_noinsert_1 (dst->vars,
4128 dv, dvhash));
4129 dvar = *dstslot;
4133 if (!onepart_variable_different_p (dvar, s2var))
4135 variable_htab_free (dvar);
4136 *dstslot = dvar = s2var;
4137 dvar->refcount++;
4139 else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var))
4141 variable_htab_free (dvar);
4142 *dstslot = dvar = s1var;
4143 dvar->refcount++;
4144 dst_can_be_shared = false;
4146 else
4147 dst_can_be_shared = false;
4149 return 1;
4152 /* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
4153 multi-part variable. Unions of multi-part variables and
4154 intersections of one-part ones will be handled in
4155 variable_merge_over_cur(). */
4157 static int
4158 variable_merge_over_src (variable s2var, struct dfset_merge *dsm)
4160 dataflow_set *dst = dsm->dst;
4161 decl_or_value dv = s2var->dv;
4163 if (!s2var->onepart)
4165 variable_def **dstp = shared_hash_find_slot (dst->vars, dv);
4166 *dstp = s2var;
4167 s2var->refcount++;
4168 return 1;
4171 dsm->src_onepart_cnt++;
4172 return 1;
4175 /* Combine dataflow set information from SRC2 into DST, using PDST
4176 to carry over information across passes. */
4178 static void
4179 dataflow_set_merge (dataflow_set *dst, dataflow_set *src2)
4181 dataflow_set cur = *dst;
4182 dataflow_set *src1 = &cur;
4183 struct dfset_merge dsm;
4184 int i;
4185 size_t src1_elems, src2_elems;
4186 variable_iterator_type hi;
4187 variable var;
4189 src1_elems = shared_hash_htab (src1->vars).elements ();
4190 src2_elems = shared_hash_htab (src2->vars).elements ();
4191 dataflow_set_init (dst);
4192 dst->stack_adjust = cur.stack_adjust;
4193 shared_hash_destroy (dst->vars);
4194 dst->vars = (shared_hash) pool_alloc (shared_hash_pool);
4195 dst->vars->refcount = 1;
4196 dst->vars->htab.create (MAX (src1_elems, src2_elems));
4198 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4199 attrs_list_mpdv_union (&dst->regs[i], src1->regs[i], src2->regs[i]);
4201 dsm.dst = dst;
4202 dsm.src = src2;
4203 dsm.cur = src1;
4204 dsm.src_onepart_cnt = 0;
4206 FOR_EACH_HASH_TABLE_ELEMENT (shared_hash_htab (dsm.src->vars),
4207 var, variable, hi)
4208 variable_merge_over_src (var, &dsm);
4209 FOR_EACH_HASH_TABLE_ELEMENT (shared_hash_htab (dsm.cur->vars),
4210 var, variable, hi)
4211 variable_merge_over_cur (var, &dsm);
4213 if (dsm.src_onepart_cnt)
4214 dst_can_be_shared = false;
4216 dataflow_set_destroy (src1);
4219 /* Mark register equivalences. */
4221 static void
4222 dataflow_set_equiv_regs (dataflow_set *set)
4224 int i;
4225 attrs list, *listp;
4227 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4229 rtx canon[NUM_MACHINE_MODES];
4231 /* If the list is empty or one entry, no need to canonicalize
4232 anything. */
4233 if (set->regs[i] == NULL || set->regs[i]->next == NULL)
4234 continue;
4236 memset (canon, 0, sizeof (canon));
4238 for (list = set->regs[i]; list; list = list->next)
4239 if (list->offset == 0 && dv_is_value_p (list->dv))
4241 rtx val = dv_as_value (list->dv);
4242 rtx *cvalp = &canon[(int)GET_MODE (val)];
4243 rtx cval = *cvalp;
4245 if (canon_value_cmp (val, cval))
4246 *cvalp = val;
4249 for (list = set->regs[i]; list; list = list->next)
4250 if (list->offset == 0 && dv_onepart_p (list->dv))
4252 rtx cval = canon[(int)GET_MODE (list->loc)];
4254 if (!cval)
4255 continue;
4257 if (dv_is_value_p (list->dv))
4259 rtx val = dv_as_value (list->dv);
4261 if (val == cval)
4262 continue;
4264 VALUE_RECURSED_INTO (val) = true;
4265 set_variable_part (set, val, dv_from_value (cval), 0,
4266 VAR_INIT_STATUS_INITIALIZED,
4267 NULL, NO_INSERT);
4270 VALUE_RECURSED_INTO (cval) = true;
4271 set_variable_part (set, cval, list->dv, 0,
4272 VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT);
4275 for (listp = &set->regs[i]; (list = *listp);
4276 listp = list ? &list->next : listp)
4277 if (list->offset == 0 && dv_onepart_p (list->dv))
4279 rtx cval = canon[(int)GET_MODE (list->loc)];
4280 variable_def **slot;
4282 if (!cval)
4283 continue;
4285 if (dv_is_value_p (list->dv))
4287 rtx val = dv_as_value (list->dv);
4288 if (!VALUE_RECURSED_INTO (val))
4289 continue;
4292 slot = shared_hash_find_slot_noinsert (set->vars, list->dv);
4293 canonicalize_values_star (slot, set);
4294 if (*listp != list)
4295 list = NULL;
4300 /* Remove any redundant values in the location list of VAR, which must
4301 be unshared and 1-part. */
4303 static void
4304 remove_duplicate_values (variable var)
4306 location_chain node, *nodep;
4308 gcc_assert (var->onepart);
4309 gcc_assert (var->n_var_parts == 1);
4310 gcc_assert (var->refcount == 1);
4312 for (nodep = &var->var_part[0].loc_chain; (node = *nodep); )
4314 if (GET_CODE (node->loc) == VALUE)
4316 if (VALUE_RECURSED_INTO (node->loc))
4318 /* Remove duplicate value node. */
4319 *nodep = node->next;
4320 pool_free (loc_chain_pool, node);
4321 continue;
4323 else
4324 VALUE_RECURSED_INTO (node->loc) = true;
4326 nodep = &node->next;
4329 for (node = var->var_part[0].loc_chain; node; node = node->next)
4330 if (GET_CODE (node->loc) == VALUE)
4332 gcc_assert (VALUE_RECURSED_INTO (node->loc));
4333 VALUE_RECURSED_INTO (node->loc) = false;
4338 /* Hash table iteration argument passed to variable_post_merge. */
4339 struct dfset_post_merge
4341 /* The new input set for the current block. */
4342 dataflow_set *set;
4343 /* Pointer to the permanent input set for the current block, or
4344 NULL. */
4345 dataflow_set **permp;
4348 /* Create values for incoming expressions associated with one-part
4349 variables that don't have value numbers for them. */
4352 variable_post_merge_new_vals (variable_def **slot, dfset_post_merge *dfpm)
4354 dataflow_set *set = dfpm->set;
4355 variable var = *slot;
4356 location_chain node;
4358 if (!var->onepart || !var->n_var_parts)
4359 return 1;
4361 gcc_assert (var->n_var_parts == 1);
4363 if (dv_is_decl_p (var->dv))
4365 bool check_dupes = false;
4367 restart:
4368 for (node = var->var_part[0].loc_chain; node; node = node->next)
4370 if (GET_CODE (node->loc) == VALUE)
4371 gcc_assert (!VALUE_RECURSED_INTO (node->loc));
4372 else if (GET_CODE (node->loc) == REG)
4374 attrs att, *attp, *curp = NULL;
4376 if (var->refcount != 1)
4378 slot = unshare_variable (set, slot, var,
4379 VAR_INIT_STATUS_INITIALIZED);
4380 var = *slot;
4381 goto restart;
4384 for (attp = &set->regs[REGNO (node->loc)]; (att = *attp);
4385 attp = &att->next)
4386 if (att->offset == 0
4387 && GET_MODE (att->loc) == GET_MODE (node->loc))
4389 if (dv_is_value_p (att->dv))
4391 rtx cval = dv_as_value (att->dv);
4392 node->loc = cval;
4393 check_dupes = true;
4394 break;
4396 else if (dv_as_opaque (att->dv) == dv_as_opaque (var->dv))
4397 curp = attp;
4400 if (!curp)
4402 curp = attp;
4403 while (*curp)
4404 if ((*curp)->offset == 0
4405 && GET_MODE ((*curp)->loc) == GET_MODE (node->loc)
4406 && dv_as_opaque ((*curp)->dv) == dv_as_opaque (var->dv))
4407 break;
4408 else
4409 curp = &(*curp)->next;
4410 gcc_assert (*curp);
4413 if (!att)
4415 decl_or_value cdv;
4416 rtx cval;
4418 if (!*dfpm->permp)
4420 *dfpm->permp = XNEW (dataflow_set);
4421 dataflow_set_init (*dfpm->permp);
4424 for (att = (*dfpm->permp)->regs[REGNO (node->loc)];
4425 att; att = att->next)
4426 if (GET_MODE (att->loc) == GET_MODE (node->loc))
4428 gcc_assert (att->offset == 0
4429 && dv_is_value_p (att->dv));
4430 val_reset (set, att->dv);
4431 break;
4434 if (att)
4436 cdv = att->dv;
4437 cval = dv_as_value (cdv);
4439 else
4441 /* Create a unique value to hold this register,
4442 that ought to be found and reused in
4443 subsequent rounds. */
4444 cselib_val *v;
4445 gcc_assert (!cselib_lookup (node->loc,
4446 GET_MODE (node->loc), 0,
4447 VOIDmode));
4448 v = cselib_lookup (node->loc, GET_MODE (node->loc), 1,
4449 VOIDmode);
4450 cselib_preserve_value (v);
4451 cselib_invalidate_rtx (node->loc);
4452 cval = v->val_rtx;
4453 cdv = dv_from_value (cval);
4454 if (dump_file)
4455 fprintf (dump_file,
4456 "Created new value %u:%u for reg %i\n",
4457 v->uid, v->hash, REGNO (node->loc));
4460 var_reg_decl_set (*dfpm->permp, node->loc,
4461 VAR_INIT_STATUS_INITIALIZED,
4462 cdv, 0, NULL, INSERT);
4464 node->loc = cval;
4465 check_dupes = true;
4468 /* Remove attribute referring to the decl, which now
4469 uses the value for the register, already existing or
4470 to be added when we bring perm in. */
4471 att = *curp;
4472 *curp = att->next;
4473 pool_free (attrs_pool, att);
4477 if (check_dupes)
4478 remove_duplicate_values (var);
4481 return 1;
4484 /* Reset values in the permanent set that are not associated with the
4485 chosen expression. */
4488 variable_post_merge_perm_vals (variable_def **pslot, dfset_post_merge *dfpm)
4490 dataflow_set *set = dfpm->set;
4491 variable pvar = *pslot, var;
4492 location_chain pnode;
4493 decl_or_value dv;
4494 attrs att;
4496 gcc_assert (dv_is_value_p (pvar->dv)
4497 && pvar->n_var_parts == 1);
4498 pnode = pvar->var_part[0].loc_chain;
4499 gcc_assert (pnode
4500 && !pnode->next
4501 && REG_P (pnode->loc));
4503 dv = pvar->dv;
4505 var = shared_hash_find (set->vars, dv);
4506 if (var)
4508 /* Although variable_post_merge_new_vals may have made decls
4509 non-star-canonical, values that pre-existed in canonical form
4510 remain canonical, and newly-created values reference a single
4511 REG, so they are canonical as well. Since VAR has the
4512 location list for a VALUE, using find_loc_in_1pdv for it is
4513 fine, since VALUEs don't map back to DECLs. */
4514 if (find_loc_in_1pdv (pnode->loc, var, shared_hash_htab (set->vars)))
4515 return 1;
4516 val_reset (set, dv);
4519 for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next)
4520 if (att->offset == 0
4521 && GET_MODE (att->loc) == GET_MODE (pnode->loc)
4522 && dv_is_value_p (att->dv))
4523 break;
4525 /* If there is a value associated with this register already, create
4526 an equivalence. */
4527 if (att && dv_as_value (att->dv) != dv_as_value (dv))
4529 rtx cval = dv_as_value (att->dv);
4530 set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT);
4531 set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init,
4532 NULL, INSERT);
4534 else if (!att)
4536 attrs_list_insert (&set->regs[REGNO (pnode->loc)],
4537 dv, 0, pnode->loc);
4538 variable_union (pvar, set);
4541 return 1;
4544 /* Just checking stuff and registering register attributes for
4545 now. */
4547 static void
4548 dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp)
4550 struct dfset_post_merge dfpm;
4552 dfpm.set = set;
4553 dfpm.permp = permp;
4555 shared_hash_htab (set->vars)
4556 .traverse <dfset_post_merge*, variable_post_merge_new_vals> (&dfpm);
4557 if (*permp)
4558 shared_hash_htab ((*permp)->vars)
4559 .traverse <dfset_post_merge*, variable_post_merge_perm_vals> (&dfpm);
4560 shared_hash_htab (set->vars)
4561 .traverse <dataflow_set *, canonicalize_values_star> (set);
4562 shared_hash_htab (set->vars)
4563 .traverse <dataflow_set *, canonicalize_vars_star> (set);
4566 /* Return a node whose loc is a MEM that refers to EXPR in the
4567 location list of a one-part variable or value VAR, or in that of
4568 any values recursively mentioned in the location lists. */
4570 static location_chain
4571 find_mem_expr_in_1pdv (tree expr, rtx val, variable_table_type vars)
4573 location_chain node;
4574 decl_or_value dv;
4575 variable var;
4576 location_chain where = NULL;
4578 if (!val)
4579 return NULL;
4581 gcc_assert (GET_CODE (val) == VALUE
4582 && !VALUE_RECURSED_INTO (val));
4584 dv = dv_from_value (val);
4585 var = vars.find_with_hash (dv, dv_htab_hash (dv));
4587 if (!var)
4588 return NULL;
4590 gcc_assert (var->onepart);
4592 if (!var->n_var_parts)
4593 return NULL;
4595 VALUE_RECURSED_INTO (val) = true;
4597 for (node = var->var_part[0].loc_chain; node; node = node->next)
4598 if (MEM_P (node->loc)
4599 && MEM_EXPR (node->loc) == expr
4600 && INT_MEM_OFFSET (node->loc) == 0)
4602 where = node;
4603 break;
4605 else if (GET_CODE (node->loc) == VALUE
4606 && !VALUE_RECURSED_INTO (node->loc)
4607 && (where = find_mem_expr_in_1pdv (expr, node->loc, vars)))
4608 break;
4610 VALUE_RECURSED_INTO (val) = false;
4612 return where;
4615 /* Return TRUE if the value of MEM may vary across a call. */
4617 static bool
4618 mem_dies_at_call (rtx mem)
4620 tree expr = MEM_EXPR (mem);
4621 tree decl;
4623 if (!expr)
4624 return true;
4626 decl = get_base_address (expr);
4628 if (!decl)
4629 return true;
4631 if (!DECL_P (decl))
4632 return true;
4634 return (may_be_aliased (decl)
4635 || (!TREE_READONLY (decl) && is_global_var (decl)));
4638 /* Remove all MEMs from the location list of a hash table entry for a
4639 one-part variable, except those whose MEM attributes map back to
4640 the variable itself, directly or within a VALUE. */
4643 dataflow_set_preserve_mem_locs (variable_def **slot, dataflow_set *set)
4645 variable var = *slot;
4647 if (var->onepart == ONEPART_VDECL || var->onepart == ONEPART_DEXPR)
4649 tree decl = dv_as_decl (var->dv);
4650 location_chain loc, *locp;
4651 bool changed = false;
4653 if (!var->n_var_parts)
4654 return 1;
4656 gcc_assert (var->n_var_parts == 1);
4658 if (shared_var_p (var, set->vars))
4660 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4662 /* We want to remove dying MEMs that doesn't refer to DECL. */
4663 if (GET_CODE (loc->loc) == MEM
4664 && (MEM_EXPR (loc->loc) != decl
4665 || INT_MEM_OFFSET (loc->loc) != 0)
4666 && !mem_dies_at_call (loc->loc))
4667 break;
4668 /* We want to move here MEMs that do refer to DECL. */
4669 else if (GET_CODE (loc->loc) == VALUE
4670 && find_mem_expr_in_1pdv (decl, loc->loc,
4671 shared_hash_htab (set->vars)))
4672 break;
4675 if (!loc)
4676 return 1;
4678 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4679 var = *slot;
4680 gcc_assert (var->n_var_parts == 1);
4683 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4684 loc; loc = *locp)
4686 rtx old_loc = loc->loc;
4687 if (GET_CODE (old_loc) == VALUE)
4689 location_chain mem_node
4690 = find_mem_expr_in_1pdv (decl, loc->loc,
4691 shared_hash_htab (set->vars));
4693 /* ??? This picks up only one out of multiple MEMs that
4694 refer to the same variable. Do we ever need to be
4695 concerned about dealing with more than one, or, given
4696 that they should all map to the same variable
4697 location, their addresses will have been merged and
4698 they will be regarded as equivalent? */
4699 if (mem_node)
4701 loc->loc = mem_node->loc;
4702 loc->set_src = mem_node->set_src;
4703 loc->init = MIN (loc->init, mem_node->init);
4707 if (GET_CODE (loc->loc) != MEM
4708 || (MEM_EXPR (loc->loc) == decl
4709 && INT_MEM_OFFSET (loc->loc) == 0)
4710 || !mem_dies_at_call (loc->loc))
4712 if (old_loc != loc->loc && emit_notes)
4714 if (old_loc == var->var_part[0].cur_loc)
4716 changed = true;
4717 var->var_part[0].cur_loc = NULL;
4720 locp = &loc->next;
4721 continue;
4724 if (emit_notes)
4726 if (old_loc == var->var_part[0].cur_loc)
4728 changed = true;
4729 var->var_part[0].cur_loc = NULL;
4732 *locp = loc->next;
4733 pool_free (loc_chain_pool, loc);
4736 if (!var->var_part[0].loc_chain)
4738 var->n_var_parts--;
4739 changed = true;
4741 if (changed)
4742 variable_was_changed (var, set);
4745 return 1;
4748 /* Remove all MEMs from the location list of a hash table entry for a
4749 value. */
4752 dataflow_set_remove_mem_locs (variable_def **slot, dataflow_set *set)
4754 variable var = *slot;
4756 if (var->onepart == ONEPART_VALUE)
4758 location_chain loc, *locp;
4759 bool changed = false;
4760 rtx cur_loc;
4762 gcc_assert (var->n_var_parts == 1);
4764 if (shared_var_p (var, set->vars))
4766 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4767 if (GET_CODE (loc->loc) == MEM
4768 && mem_dies_at_call (loc->loc))
4769 break;
4771 if (!loc)
4772 return 1;
4774 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4775 var = *slot;
4776 gcc_assert (var->n_var_parts == 1);
4779 if (VAR_LOC_1PAUX (var))
4780 cur_loc = VAR_LOC_FROM (var);
4781 else
4782 cur_loc = var->var_part[0].cur_loc;
4784 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4785 loc; loc = *locp)
4787 if (GET_CODE (loc->loc) != MEM
4788 || !mem_dies_at_call (loc->loc))
4790 locp = &loc->next;
4791 continue;
4794 *locp = loc->next;
4795 /* If we have deleted the location which was last emitted
4796 we have to emit new location so add the variable to set
4797 of changed variables. */
4798 if (cur_loc == loc->loc)
4800 changed = true;
4801 var->var_part[0].cur_loc = NULL;
4802 if (VAR_LOC_1PAUX (var))
4803 VAR_LOC_FROM (var) = NULL;
4805 pool_free (loc_chain_pool, loc);
4808 if (!var->var_part[0].loc_chain)
4810 var->n_var_parts--;
4811 changed = true;
4813 if (changed)
4814 variable_was_changed (var, set);
4817 return 1;
4820 /* Remove all variable-location information about call-clobbered
4821 registers, as well as associations between MEMs and VALUEs. */
4823 static void
4824 dataflow_set_clear_at_call (dataflow_set *set)
4826 unsigned int r;
4827 hard_reg_set_iterator hrsi;
4829 EXECUTE_IF_SET_IN_HARD_REG_SET (regs_invalidated_by_call, 0, r, hrsi)
4830 var_regno_delete (set, r);
4832 if (MAY_HAVE_DEBUG_INSNS)
4834 set->traversed_vars = set->vars;
4835 shared_hash_htab (set->vars)
4836 .traverse <dataflow_set *, dataflow_set_preserve_mem_locs> (set);
4837 set->traversed_vars = set->vars;
4838 shared_hash_htab (set->vars)
4839 .traverse <dataflow_set *, dataflow_set_remove_mem_locs> (set);
4840 set->traversed_vars = NULL;
4844 static bool
4845 variable_part_different_p (variable_part *vp1, variable_part *vp2)
4847 location_chain lc1, lc2;
4849 for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next)
4851 for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next)
4853 if (REG_P (lc1->loc) && REG_P (lc2->loc))
4855 if (REGNO (lc1->loc) == REGNO (lc2->loc))
4856 break;
4858 if (rtx_equal_p (lc1->loc, lc2->loc))
4859 break;
4861 if (!lc2)
4862 return true;
4864 return false;
4867 /* Return true if one-part variables VAR1 and VAR2 are different.
4868 They must be in canonical order. */
4870 static bool
4871 onepart_variable_different_p (variable var1, variable var2)
4873 location_chain lc1, lc2;
4875 if (var1 == var2)
4876 return false;
4878 gcc_assert (var1->n_var_parts == 1
4879 && var2->n_var_parts == 1);
4881 lc1 = var1->var_part[0].loc_chain;
4882 lc2 = var2->var_part[0].loc_chain;
4884 gcc_assert (lc1 && lc2);
4886 while (lc1 && lc2)
4888 if (loc_cmp (lc1->loc, lc2->loc))
4889 return true;
4890 lc1 = lc1->next;
4891 lc2 = lc2->next;
4894 return lc1 != lc2;
4897 /* Return true if variables VAR1 and VAR2 are different. */
4899 static bool
4900 variable_different_p (variable var1, variable var2)
4902 int i;
4904 if (var1 == var2)
4905 return false;
4907 if (var1->onepart != var2->onepart)
4908 return true;
4910 if (var1->n_var_parts != var2->n_var_parts)
4911 return true;
4913 if (var1->onepart && var1->n_var_parts)
4915 gcc_checking_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv)
4916 && var1->n_var_parts == 1);
4917 /* One-part values have locations in a canonical order. */
4918 return onepart_variable_different_p (var1, var2);
4921 for (i = 0; i < var1->n_var_parts; i++)
4923 if (VAR_PART_OFFSET (var1, i) != VAR_PART_OFFSET (var2, i))
4924 return true;
4925 if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i]))
4926 return true;
4927 if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i]))
4928 return true;
4930 return false;
4933 /* Return true if dataflow sets OLD_SET and NEW_SET differ. */
4935 static bool
4936 dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
4938 variable_iterator_type hi;
4939 variable var1;
4941 if (old_set->vars == new_set->vars)
4942 return false;
4944 if (shared_hash_htab (old_set->vars).elements ()
4945 != shared_hash_htab (new_set->vars).elements ())
4946 return true;
4948 FOR_EACH_HASH_TABLE_ELEMENT (shared_hash_htab (old_set->vars),
4949 var1, variable, hi)
4951 variable_table_type htab = shared_hash_htab (new_set->vars);
4952 variable var2 = htab.find_with_hash (var1->dv, dv_htab_hash (var1->dv));
4953 if (!var2)
4955 if (dump_file && (dump_flags & TDF_DETAILS))
4957 fprintf (dump_file, "dataflow difference found: removal of:\n");
4958 dump_var (var1);
4960 return true;
4963 if (variable_different_p (var1, var2))
4965 if (dump_file && (dump_flags & TDF_DETAILS))
4967 fprintf (dump_file, "dataflow difference found: "
4968 "old and new follow:\n");
4969 dump_var (var1);
4970 dump_var (var2);
4972 return true;
4976 /* No need to traverse the second hashtab, if both have the same number
4977 of elements and the second one had all entries found in the first one,
4978 then it can't have any extra entries. */
4979 return false;
4982 /* Free the contents of dataflow set SET. */
4984 static void
4985 dataflow_set_destroy (dataflow_set *set)
4987 int i;
4989 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4990 attrs_list_clear (&set->regs[i]);
4992 shared_hash_destroy (set->vars);
4993 set->vars = NULL;
4996 /* Return true if RTL X contains a SYMBOL_REF. */
4998 static bool
4999 contains_symbol_ref (rtx x)
5001 const char *fmt;
5002 RTX_CODE code;
5003 int i;
5005 if (!x)
5006 return false;
5008 code = GET_CODE (x);
5009 if (code == SYMBOL_REF)
5010 return true;
5012 fmt = GET_RTX_FORMAT (code);
5013 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5015 if (fmt[i] == 'e')
5017 if (contains_symbol_ref (XEXP (x, i)))
5018 return true;
5020 else if (fmt[i] == 'E')
5022 int j;
5023 for (j = 0; j < XVECLEN (x, i); j++)
5024 if (contains_symbol_ref (XVECEXP (x, i, j)))
5025 return true;
5029 return false;
5032 /* Shall EXPR be tracked? */
5034 static bool
5035 track_expr_p (tree expr, bool need_rtl)
5037 rtx decl_rtl;
5038 tree realdecl;
5040 if (TREE_CODE (expr) == DEBUG_EXPR_DECL)
5041 return DECL_RTL_SET_P (expr);
5043 /* If EXPR is not a parameter or a variable do not track it. */
5044 if (TREE_CODE (expr) != VAR_DECL && TREE_CODE (expr) != PARM_DECL)
5045 return 0;
5047 /* It also must have a name... */
5048 if (!DECL_NAME (expr) && need_rtl)
5049 return 0;
5051 /* ... and a RTL assigned to it. */
5052 decl_rtl = DECL_RTL_IF_SET (expr);
5053 if (!decl_rtl && need_rtl)
5054 return 0;
5056 /* If this expression is really a debug alias of some other declaration, we
5057 don't need to track this expression if the ultimate declaration is
5058 ignored. */
5059 realdecl = expr;
5060 if (TREE_CODE (realdecl) == VAR_DECL && DECL_HAS_DEBUG_EXPR_P (realdecl))
5062 realdecl = DECL_DEBUG_EXPR (realdecl);
5063 if (!DECL_P (realdecl))
5065 if (handled_component_p (realdecl)
5066 || (TREE_CODE (realdecl) == MEM_REF
5067 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
5069 HOST_WIDE_INT bitsize, bitpos, maxsize;
5070 tree innerdecl
5071 = get_ref_base_and_extent (realdecl, &bitpos, &bitsize,
5072 &maxsize);
5073 if (!DECL_P (innerdecl)
5074 || DECL_IGNORED_P (innerdecl)
5075 || TREE_STATIC (innerdecl)
5076 || bitsize <= 0
5077 || bitpos + bitsize > 256
5078 || bitsize != maxsize)
5079 return 0;
5080 else
5081 realdecl = expr;
5083 else
5084 return 0;
5088 /* Do not track EXPR if REALDECL it should be ignored for debugging
5089 purposes. */
5090 if (DECL_IGNORED_P (realdecl))
5091 return 0;
5093 /* Do not track global variables until we are able to emit correct location
5094 list for them. */
5095 if (TREE_STATIC (realdecl))
5096 return 0;
5098 /* When the EXPR is a DECL for alias of some variable (see example)
5099 the TREE_STATIC flag is not used. Disable tracking all DECLs whose
5100 DECL_RTL contains SYMBOL_REF.
5102 Example:
5103 extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
5104 char **_dl_argv;
5106 if (decl_rtl && MEM_P (decl_rtl)
5107 && contains_symbol_ref (XEXP (decl_rtl, 0)))
5108 return 0;
5110 /* If RTX is a memory it should not be very large (because it would be
5111 an array or struct). */
5112 if (decl_rtl && MEM_P (decl_rtl))
5114 /* Do not track structures and arrays. */
5115 if (GET_MODE (decl_rtl) == BLKmode
5116 || AGGREGATE_TYPE_P (TREE_TYPE (realdecl)))
5117 return 0;
5118 if (MEM_SIZE_KNOWN_P (decl_rtl)
5119 && MEM_SIZE (decl_rtl) > MAX_VAR_PARTS)
5120 return 0;
5123 DECL_CHANGED (expr) = 0;
5124 DECL_CHANGED (realdecl) = 0;
5125 return 1;
5128 /* Determine whether a given LOC refers to the same variable part as
5129 EXPR+OFFSET. */
5131 static bool
5132 same_variable_part_p (rtx loc, tree expr, HOST_WIDE_INT offset)
5134 tree expr2;
5135 HOST_WIDE_INT offset2;
5137 if (! DECL_P (expr))
5138 return false;
5140 if (REG_P (loc))
5142 expr2 = REG_EXPR (loc);
5143 offset2 = REG_OFFSET (loc);
5145 else if (MEM_P (loc))
5147 expr2 = MEM_EXPR (loc);
5148 offset2 = INT_MEM_OFFSET (loc);
5150 else
5151 return false;
5153 if (! expr2 || ! DECL_P (expr2))
5154 return false;
5156 expr = var_debug_decl (expr);
5157 expr2 = var_debug_decl (expr2);
5159 return (expr == expr2 && offset == offset2);
5162 /* LOC is a REG or MEM that we would like to track if possible.
5163 If EXPR is null, we don't know what expression LOC refers to,
5164 otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
5165 LOC is an lvalue register.
5167 Return true if EXPR is nonnull and if LOC, or some lowpart of it,
5168 is something we can track. When returning true, store the mode of
5169 the lowpart we can track in *MODE_OUT (if nonnull) and its offset
5170 from EXPR in *OFFSET_OUT (if nonnull). */
5172 static bool
5173 track_loc_p (rtx loc, tree expr, HOST_WIDE_INT offset, bool store_reg_p,
5174 enum machine_mode *mode_out, HOST_WIDE_INT *offset_out)
5176 enum machine_mode mode;
5178 if (expr == NULL || !track_expr_p (expr, true))
5179 return false;
5181 /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
5182 whole subreg, but only the old inner part is really relevant. */
5183 mode = GET_MODE (loc);
5184 if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc)))
5186 enum machine_mode pseudo_mode;
5188 pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc));
5189 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (pseudo_mode))
5191 offset += byte_lowpart_offset (pseudo_mode, mode);
5192 mode = pseudo_mode;
5196 /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
5197 Do the same if we are storing to a register and EXPR occupies
5198 the whole of register LOC; in that case, the whole of EXPR is
5199 being changed. We exclude complex modes from the second case
5200 because the real and imaginary parts are represented as separate
5201 pseudo registers, even if the whole complex value fits into one
5202 hard register. */
5203 if ((GET_MODE_SIZE (mode) > GET_MODE_SIZE (DECL_MODE (expr))
5204 || (store_reg_p
5205 && !COMPLEX_MODE_P (DECL_MODE (expr))
5206 && hard_regno_nregs[REGNO (loc)][DECL_MODE (expr)] == 1))
5207 && offset + byte_lowpart_offset (DECL_MODE (expr), mode) == 0)
5209 mode = DECL_MODE (expr);
5210 offset = 0;
5213 if (offset < 0 || offset >= MAX_VAR_PARTS)
5214 return false;
5216 if (mode_out)
5217 *mode_out = mode;
5218 if (offset_out)
5219 *offset_out = offset;
5220 return true;
5223 /* Return the MODE lowpart of LOC, or null if LOC is not something we
5224 want to track. When returning nonnull, make sure that the attributes
5225 on the returned value are updated. */
5227 static rtx
5228 var_lowpart (enum machine_mode mode, rtx loc)
5230 unsigned int offset, reg_offset, regno;
5232 if (GET_MODE (loc) == mode)
5233 return loc;
5235 if (!REG_P (loc) && !MEM_P (loc))
5236 return NULL;
5238 offset = byte_lowpart_offset (mode, GET_MODE (loc));
5240 if (MEM_P (loc))
5241 return adjust_address_nv (loc, mode, offset);
5243 reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc));
5244 regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc),
5245 reg_offset, mode);
5246 return gen_rtx_REG_offset (loc, mode, regno, offset);
5249 /* Carry information about uses and stores while walking rtx. */
5251 struct count_use_info
5253 /* The insn where the RTX is. */
5254 rtx insn;
5256 /* The basic block where insn is. */
5257 basic_block bb;
5259 /* The array of n_sets sets in the insn, as determined by cselib. */
5260 struct cselib_set *sets;
5261 int n_sets;
5263 /* True if we're counting stores, false otherwise. */
5264 bool store_p;
5267 /* Find a VALUE corresponding to X. */
5269 static inline cselib_val *
5270 find_use_val (rtx x, enum machine_mode mode, struct count_use_info *cui)
5272 int i;
5274 if (cui->sets)
5276 /* This is called after uses are set up and before stores are
5277 processed by cselib, so it's safe to look up srcs, but not
5278 dsts. So we look up expressions that appear in srcs or in
5279 dest expressions, but we search the sets array for dests of
5280 stores. */
5281 if (cui->store_p)
5283 /* Some targets represent memset and memcpy patterns
5284 by (set (mem:BLK ...) (reg:[QHSD]I ...)) or
5285 (set (mem:BLK ...) (const_int ...)) or
5286 (set (mem:BLK ...) (mem:BLK ...)). Don't return anything
5287 in that case, otherwise we end up with mode mismatches. */
5288 if (mode == BLKmode && MEM_P (x))
5289 return NULL;
5290 for (i = 0; i < cui->n_sets; i++)
5291 if (cui->sets[i].dest == x)
5292 return cui->sets[i].src_elt;
5294 else
5295 return cselib_lookup (x, mode, 0, VOIDmode);
5298 return NULL;
5301 /* Replace all registers and addresses in an expression with VALUE
5302 expressions that map back to them, unless the expression is a
5303 register. If no mapping is or can be performed, returns NULL. */
5305 static rtx
5306 replace_expr_with_values (rtx loc)
5308 if (REG_P (loc) || GET_CODE (loc) == ENTRY_VALUE)
5309 return NULL;
5310 else if (MEM_P (loc))
5312 cselib_val *addr = cselib_lookup (XEXP (loc, 0),
5313 get_address_mode (loc), 0,
5314 GET_MODE (loc));
5315 if (addr)
5316 return replace_equiv_address_nv (loc, addr->val_rtx);
5317 else
5318 return NULL;
5320 else
5321 return cselib_subst_to_values (loc, VOIDmode);
5324 /* Return true if *X is a DEBUG_EXPR. Usable as an argument to
5325 for_each_rtx to tell whether there are any DEBUG_EXPRs within
5326 RTX. */
5328 static int
5329 rtx_debug_expr_p (rtx *x, void *data ATTRIBUTE_UNUSED)
5331 rtx loc = *x;
5333 return GET_CODE (loc) == DEBUG_EXPR;
5336 /* Determine what kind of micro operation to choose for a USE. Return
5337 MO_CLOBBER if no micro operation is to be generated. */
5339 static enum micro_operation_type
5340 use_type (rtx loc, struct count_use_info *cui, enum machine_mode *modep)
5342 tree expr;
5344 if (cui && cui->sets)
5346 if (GET_CODE (loc) == VAR_LOCATION)
5348 if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false))
5350 rtx ploc = PAT_VAR_LOCATION_LOC (loc);
5351 if (! VAR_LOC_UNKNOWN_P (ploc))
5353 cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1,
5354 VOIDmode);
5356 /* ??? flag_float_store and volatile mems are never
5357 given values, but we could in theory use them for
5358 locations. */
5359 gcc_assert (val || 1);
5361 return MO_VAL_LOC;
5363 else
5364 return MO_CLOBBER;
5367 if (REG_P (loc) || MEM_P (loc))
5369 if (modep)
5370 *modep = GET_MODE (loc);
5371 if (cui->store_p)
5373 if (REG_P (loc)
5374 || (find_use_val (loc, GET_MODE (loc), cui)
5375 && cselib_lookup (XEXP (loc, 0),
5376 get_address_mode (loc), 0,
5377 GET_MODE (loc))))
5378 return MO_VAL_SET;
5380 else
5382 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5384 if (val && !cselib_preserved_value_p (val))
5385 return MO_VAL_USE;
5390 if (REG_P (loc))
5392 gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER);
5394 if (loc == cfa_base_rtx)
5395 return MO_CLOBBER;
5396 expr = REG_EXPR (loc);
5398 if (!expr)
5399 return MO_USE_NO_VAR;
5400 else if (target_for_debug_bind (var_debug_decl (expr)))
5401 return MO_CLOBBER;
5402 else if (track_loc_p (loc, expr, REG_OFFSET (loc),
5403 false, modep, NULL))
5404 return MO_USE;
5405 else
5406 return MO_USE_NO_VAR;
5408 else if (MEM_P (loc))
5410 expr = MEM_EXPR (loc);
5412 if (!expr)
5413 return MO_CLOBBER;
5414 else if (target_for_debug_bind (var_debug_decl (expr)))
5415 return MO_CLOBBER;
5416 else if (track_loc_p (loc, expr, INT_MEM_OFFSET (loc),
5417 false, modep, NULL)
5418 /* Multi-part variables shouldn't refer to one-part
5419 variable names such as VALUEs (never happens) or
5420 DEBUG_EXPRs (only happens in the presence of debug
5421 insns). */
5422 && (!MAY_HAVE_DEBUG_INSNS
5423 || !for_each_rtx (&XEXP (loc, 0), rtx_debug_expr_p, NULL)))
5424 return MO_USE;
5425 else
5426 return MO_CLOBBER;
5429 return MO_CLOBBER;
5432 /* Log to OUT information about micro-operation MOPT involving X in
5433 INSN of BB. */
5435 static inline void
5436 log_op_type (rtx x, basic_block bb, rtx insn,
5437 enum micro_operation_type mopt, FILE *out)
5439 fprintf (out, "bb %i op %i insn %i %s ",
5440 bb->index, VTI (bb)->mos.length (),
5441 INSN_UID (insn), micro_operation_type_name[mopt]);
5442 print_inline_rtx (out, x, 2);
5443 fputc ('\n', out);
5446 /* Tell whether the CONCAT used to holds a VALUE and its location
5447 needs value resolution, i.e., an attempt of mapping the location
5448 back to other incoming values. */
5449 #define VAL_NEEDS_RESOLUTION(x) \
5450 (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
5451 /* Whether the location in the CONCAT is a tracked expression, that
5452 should also be handled like a MO_USE. */
5453 #define VAL_HOLDS_TRACK_EXPR(x) \
5454 (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
5455 /* Whether the location in the CONCAT should be handled like a MO_COPY
5456 as well. */
5457 #define VAL_EXPR_IS_COPIED(x) \
5458 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
5459 /* Whether the location in the CONCAT should be handled like a
5460 MO_CLOBBER as well. */
5461 #define VAL_EXPR_IS_CLOBBERED(x) \
5462 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
5464 /* All preserved VALUEs. */
5465 static vec<rtx> preserved_values;
5467 /* Ensure VAL is preserved and remember it in a vector for vt_emit_notes. */
5469 static void
5470 preserve_value (cselib_val *val)
5472 cselib_preserve_value (val);
5473 preserved_values.safe_push (val->val_rtx);
5476 /* Helper function for MO_VAL_LOC handling. Return non-zero if
5477 any rtxes not suitable for CONST use not replaced by VALUEs
5478 are discovered. */
5480 static int
5481 non_suitable_const (rtx *x, void *data ATTRIBUTE_UNUSED)
5483 if (*x == NULL_RTX)
5484 return 0;
5486 switch (GET_CODE (*x))
5488 case REG:
5489 case DEBUG_EXPR:
5490 case PC:
5491 case SCRATCH:
5492 case CC0:
5493 case ASM_INPUT:
5494 case ASM_OPERANDS:
5495 return 1;
5496 case MEM:
5497 return !MEM_READONLY_P (*x);
5498 default:
5499 return 0;
5503 /* Add uses (register and memory references) LOC which will be tracked
5504 to VTI (bb)->mos. INSN is instruction which the LOC is part of. */
5506 static int
5507 add_uses (rtx *ploc, void *data)
5509 rtx loc = *ploc;
5510 enum machine_mode mode = VOIDmode;
5511 struct count_use_info *cui = (struct count_use_info *)data;
5512 enum micro_operation_type type = use_type (loc, cui, &mode);
5514 if (type != MO_CLOBBER)
5516 basic_block bb = cui->bb;
5517 micro_operation mo;
5519 mo.type = type;
5520 mo.u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc;
5521 mo.insn = cui->insn;
5523 if (type == MO_VAL_LOC)
5525 rtx oloc = loc;
5526 rtx vloc = PAT_VAR_LOCATION_LOC (oloc);
5527 cselib_val *val;
5529 gcc_assert (cui->sets);
5531 if (MEM_P (vloc)
5532 && !REG_P (XEXP (vloc, 0))
5533 && !MEM_P (XEXP (vloc, 0)))
5535 rtx mloc = vloc;
5536 enum machine_mode address_mode = get_address_mode (mloc);
5537 cselib_val *val
5538 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5539 GET_MODE (mloc));
5541 if (val && !cselib_preserved_value_p (val))
5542 preserve_value (val);
5545 if (CONSTANT_P (vloc)
5546 && (GET_CODE (vloc) != CONST
5547 || for_each_rtx (&vloc, non_suitable_const, NULL)))
5548 /* For constants don't look up any value. */;
5549 else if (!VAR_LOC_UNKNOWN_P (vloc) && !unsuitable_loc (vloc)
5550 && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
5552 enum machine_mode mode2;
5553 enum micro_operation_type type2;
5554 rtx nloc = NULL;
5555 bool resolvable = REG_P (vloc) || MEM_P (vloc);
5557 if (resolvable)
5558 nloc = replace_expr_with_values (vloc);
5560 if (nloc)
5562 oloc = shallow_copy_rtx (oloc);
5563 PAT_VAR_LOCATION_LOC (oloc) = nloc;
5566 oloc = gen_rtx_CONCAT (mode, val->val_rtx, oloc);
5568 type2 = use_type (vloc, 0, &mode2);
5570 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5571 || type2 == MO_CLOBBER);
5573 if (type2 == MO_CLOBBER
5574 && !cselib_preserved_value_p (val))
5576 VAL_NEEDS_RESOLUTION (oloc) = resolvable;
5577 preserve_value (val);
5580 else if (!VAR_LOC_UNKNOWN_P (vloc))
5582 oloc = shallow_copy_rtx (oloc);
5583 PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC ();
5586 mo.u.loc = oloc;
5588 else if (type == MO_VAL_USE)
5590 enum machine_mode mode2 = VOIDmode;
5591 enum micro_operation_type type2;
5592 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5593 rtx vloc, oloc = loc, nloc;
5595 gcc_assert (cui->sets);
5597 if (MEM_P (oloc)
5598 && !REG_P (XEXP (oloc, 0))
5599 && !MEM_P (XEXP (oloc, 0)))
5601 rtx mloc = oloc;
5602 enum machine_mode address_mode = get_address_mode (mloc);
5603 cselib_val *val
5604 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5605 GET_MODE (mloc));
5607 if (val && !cselib_preserved_value_p (val))
5608 preserve_value (val);
5611 type2 = use_type (loc, 0, &mode2);
5613 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5614 || type2 == MO_CLOBBER);
5616 if (type2 == MO_USE)
5617 vloc = var_lowpart (mode2, loc);
5618 else
5619 vloc = oloc;
5621 /* The loc of a MO_VAL_USE may have two forms:
5623 (concat val src): val is at src, a value-based
5624 representation.
5626 (concat (concat val use) src): same as above, with use as
5627 the MO_USE tracked value, if it differs from src.
5631 gcc_checking_assert (REG_P (loc) || MEM_P (loc));
5632 nloc = replace_expr_with_values (loc);
5633 if (!nloc)
5634 nloc = oloc;
5636 if (vloc != nloc)
5637 oloc = gen_rtx_CONCAT (mode2, val->val_rtx, vloc);
5638 else
5639 oloc = val->val_rtx;
5641 mo.u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
5643 if (type2 == MO_USE)
5644 VAL_HOLDS_TRACK_EXPR (mo.u.loc) = 1;
5645 if (!cselib_preserved_value_p (val))
5647 VAL_NEEDS_RESOLUTION (mo.u.loc) = 1;
5648 preserve_value (val);
5651 else
5652 gcc_assert (type == MO_USE || type == MO_USE_NO_VAR);
5654 if (dump_file && (dump_flags & TDF_DETAILS))
5655 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
5656 VTI (bb)->mos.safe_push (mo);
5659 return 0;
5662 /* Helper function for finding all uses of REG/MEM in X in insn INSN. */
5664 static void
5665 add_uses_1 (rtx *x, void *cui)
5667 for_each_rtx (x, add_uses, cui);
5670 /* This is the value used during expansion of locations. We want it
5671 to be unbounded, so that variables expanded deep in a recursion
5672 nest are fully evaluated, so that their values are cached
5673 correctly. We avoid recursion cycles through other means, and we
5674 don't unshare RTL, so excess complexity is not a problem. */
5675 #define EXPR_DEPTH (INT_MAX)
5676 /* We use this to keep too-complex expressions from being emitted as
5677 location notes, and then to debug information. Users can trade
5678 compile time for ridiculously complex expressions, although they're
5679 seldom useful, and they may often have to be discarded as not
5680 representable anyway. */
5681 #define EXPR_USE_DEPTH (PARAM_VALUE (PARAM_MAX_VARTRACK_EXPR_DEPTH))
5683 /* Attempt to reverse the EXPR operation in the debug info and record
5684 it in the cselib table. Say for reg1 = reg2 + 6 even when reg2 is
5685 no longer live we can express its value as VAL - 6. */
5687 static void
5688 reverse_op (rtx val, const_rtx expr, rtx insn)
5690 rtx src, arg, ret;
5691 cselib_val *v;
5692 struct elt_loc_list *l;
5693 enum rtx_code code;
5694 int count;
5696 if (GET_CODE (expr) != SET)
5697 return;
5699 if (!REG_P (SET_DEST (expr)) || GET_MODE (val) != GET_MODE (SET_DEST (expr)))
5700 return;
5702 src = SET_SRC (expr);
5703 switch (GET_CODE (src))
5705 case PLUS:
5706 case MINUS:
5707 case XOR:
5708 case NOT:
5709 case NEG:
5710 if (!REG_P (XEXP (src, 0)))
5711 return;
5712 break;
5713 case SIGN_EXTEND:
5714 case ZERO_EXTEND:
5715 if (!REG_P (XEXP (src, 0)) && !MEM_P (XEXP (src, 0)))
5716 return;
5717 break;
5718 default:
5719 return;
5722 if (!SCALAR_INT_MODE_P (GET_MODE (src)) || XEXP (src, 0) == cfa_base_rtx)
5723 return;
5725 v = cselib_lookup (XEXP (src, 0), GET_MODE (XEXP (src, 0)), 0, VOIDmode);
5726 if (!v || !cselib_preserved_value_p (v))
5727 return;
5729 /* Use canonical V to avoid creating multiple redundant expressions
5730 for different VALUES equivalent to V. */
5731 v = canonical_cselib_val (v);
5733 /* Adding a reverse op isn't useful if V already has an always valid
5734 location. Ignore ENTRY_VALUE, while it is always constant, we should
5735 prefer non-ENTRY_VALUE locations whenever possible. */
5736 for (l = v->locs, count = 0; l; l = l->next, count++)
5737 if (CONSTANT_P (l->loc)
5738 && (GET_CODE (l->loc) != CONST || !references_value_p (l->loc, 0)))
5739 return;
5740 /* Avoid creating too large locs lists. */
5741 else if (count == PARAM_VALUE (PARAM_MAX_VARTRACK_REVERSE_OP_SIZE))
5742 return;
5744 switch (GET_CODE (src))
5746 case NOT:
5747 case NEG:
5748 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5749 return;
5750 ret = gen_rtx_fmt_e (GET_CODE (src), GET_MODE (val), val);
5751 break;
5752 case SIGN_EXTEND:
5753 case ZERO_EXTEND:
5754 ret = gen_lowpart_SUBREG (GET_MODE (v->val_rtx), val);
5755 break;
5756 case XOR:
5757 code = XOR;
5758 goto binary;
5759 case PLUS:
5760 code = MINUS;
5761 goto binary;
5762 case MINUS:
5763 code = PLUS;
5764 goto binary;
5765 binary:
5766 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5767 return;
5768 arg = XEXP (src, 1);
5769 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5771 arg = cselib_expand_value_rtx (arg, scratch_regs, 5);
5772 if (arg == NULL_RTX)
5773 return;
5774 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5775 return;
5777 ret = simplify_gen_binary (code, GET_MODE (val), val, arg);
5778 if (ret == val)
5779 /* Ensure ret isn't VALUE itself (which can happen e.g. for
5780 (plus (reg1) (reg2)) when reg2 is known to be 0), as that
5781 breaks a lot of routines during var-tracking. */
5782 ret = gen_rtx_fmt_ee (PLUS, GET_MODE (val), val, const0_rtx);
5783 break;
5784 default:
5785 gcc_unreachable ();
5788 cselib_add_permanent_equiv (v, ret, insn);
5791 /* Add stores (register and memory references) LOC which will be tracked
5792 to VTI (bb)->mos. EXPR is the RTL expression containing the store.
5793 CUIP->insn is instruction which the LOC is part of. */
5795 static void
5796 add_stores (rtx loc, const_rtx expr, void *cuip)
5798 enum machine_mode mode = VOIDmode, mode2;
5799 struct count_use_info *cui = (struct count_use_info *)cuip;
5800 basic_block bb = cui->bb;
5801 micro_operation mo;
5802 rtx oloc = loc, nloc, src = NULL;
5803 enum micro_operation_type type = use_type (loc, cui, &mode);
5804 bool track_p = false;
5805 cselib_val *v;
5806 bool resolve, preserve;
5808 if (type == MO_CLOBBER)
5809 return;
5811 mode2 = mode;
5813 if (REG_P (loc))
5815 gcc_assert (loc != cfa_base_rtx);
5816 if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
5817 || !(track_p = use_type (loc, NULL, &mode2) == MO_USE)
5818 || GET_CODE (expr) == CLOBBER)
5820 mo.type = MO_CLOBBER;
5821 mo.u.loc = loc;
5822 if (GET_CODE (expr) == SET
5823 && SET_DEST (expr) == loc
5824 && !unsuitable_loc (SET_SRC (expr))
5825 && find_use_val (loc, mode, cui))
5827 gcc_checking_assert (type == MO_VAL_SET);
5828 mo.u.loc = gen_rtx_SET (VOIDmode, loc, SET_SRC (expr));
5831 else
5833 if (GET_CODE (expr) == SET
5834 && SET_DEST (expr) == loc
5835 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
5836 src = var_lowpart (mode2, SET_SRC (expr));
5837 loc = var_lowpart (mode2, loc);
5839 if (src == NULL)
5841 mo.type = MO_SET;
5842 mo.u.loc = loc;
5844 else
5846 rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
5847 if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
5849 /* If this is an instruction copying (part of) a parameter
5850 passed by invisible reference to its register location,
5851 pretend it's a SET so that the initial memory location
5852 is discarded, as the parameter register can be reused
5853 for other purposes and we do not track locations based
5854 on generic registers. */
5855 if (MEM_P (src)
5856 && REG_EXPR (loc)
5857 && TREE_CODE (REG_EXPR (loc)) == PARM_DECL
5858 && DECL_MODE (REG_EXPR (loc)) != BLKmode
5859 && MEM_P (DECL_INCOMING_RTL (REG_EXPR (loc)))
5860 && XEXP (DECL_INCOMING_RTL (REG_EXPR (loc)), 0)
5861 != arg_pointer_rtx)
5862 mo.type = MO_SET;
5863 else
5864 mo.type = MO_COPY;
5866 else
5867 mo.type = MO_SET;
5868 mo.u.loc = xexpr;
5871 mo.insn = cui->insn;
5873 else if (MEM_P (loc)
5874 && ((track_p = use_type (loc, NULL, &mode2) == MO_USE)
5875 || cui->sets))
5877 if (MEM_P (loc) && type == MO_VAL_SET
5878 && !REG_P (XEXP (loc, 0))
5879 && !MEM_P (XEXP (loc, 0)))
5881 rtx mloc = loc;
5882 enum machine_mode address_mode = get_address_mode (mloc);
5883 cselib_val *val = cselib_lookup (XEXP (mloc, 0),
5884 address_mode, 0,
5885 GET_MODE (mloc));
5887 if (val && !cselib_preserved_value_p (val))
5888 preserve_value (val);
5891 if (GET_CODE (expr) == CLOBBER || !track_p)
5893 mo.type = MO_CLOBBER;
5894 mo.u.loc = track_p ? var_lowpart (mode2, loc) : loc;
5896 else
5898 if (GET_CODE (expr) == SET
5899 && SET_DEST (expr) == loc
5900 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
5901 src = var_lowpart (mode2, SET_SRC (expr));
5902 loc = var_lowpart (mode2, loc);
5904 if (src == NULL)
5906 mo.type = MO_SET;
5907 mo.u.loc = loc;
5909 else
5911 rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
5912 if (same_variable_part_p (SET_SRC (xexpr),
5913 MEM_EXPR (loc),
5914 INT_MEM_OFFSET (loc)))
5915 mo.type = MO_COPY;
5916 else
5917 mo.type = MO_SET;
5918 mo.u.loc = xexpr;
5921 mo.insn = cui->insn;
5923 else
5924 return;
5926 if (type != MO_VAL_SET)
5927 goto log_and_return;
5929 v = find_use_val (oloc, mode, cui);
5931 if (!v)
5932 goto log_and_return;
5934 resolve = preserve = !cselib_preserved_value_p (v);
5936 if (loc == stack_pointer_rtx
5937 && hard_frame_pointer_adjustment != -1
5938 && preserve)
5939 cselib_set_value_sp_based (v);
5941 nloc = replace_expr_with_values (oloc);
5942 if (nloc)
5943 oloc = nloc;
5945 if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC)
5947 cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0, VOIDmode);
5949 gcc_assert (oval != v);
5950 gcc_assert (REG_P (oloc) || MEM_P (oloc));
5952 if (oval && !cselib_preserved_value_p (oval))
5954 micro_operation moa;
5956 preserve_value (oval);
5958 moa.type = MO_VAL_USE;
5959 moa.u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc);
5960 VAL_NEEDS_RESOLUTION (moa.u.loc) = 1;
5961 moa.insn = cui->insn;
5963 if (dump_file && (dump_flags & TDF_DETAILS))
5964 log_op_type (moa.u.loc, cui->bb, cui->insn,
5965 moa.type, dump_file);
5966 VTI (bb)->mos.safe_push (moa);
5969 resolve = false;
5971 else if (resolve && GET_CODE (mo.u.loc) == SET)
5973 if (REG_P (SET_SRC (expr)) || MEM_P (SET_SRC (expr)))
5974 nloc = replace_expr_with_values (SET_SRC (expr));
5975 else
5976 nloc = NULL_RTX;
5978 /* Avoid the mode mismatch between oexpr and expr. */
5979 if (!nloc && mode != mode2)
5981 nloc = SET_SRC (expr);
5982 gcc_assert (oloc == SET_DEST (expr));
5985 if (nloc && nloc != SET_SRC (mo.u.loc))
5986 oloc = gen_rtx_SET (GET_MODE (mo.u.loc), oloc, nloc);
5987 else
5989 if (oloc == SET_DEST (mo.u.loc))
5990 /* No point in duplicating. */
5991 oloc = mo.u.loc;
5992 if (!REG_P (SET_SRC (mo.u.loc)))
5993 resolve = false;
5996 else if (!resolve)
5998 if (GET_CODE (mo.u.loc) == SET
5999 && oloc == SET_DEST (mo.u.loc))
6000 /* No point in duplicating. */
6001 oloc = mo.u.loc;
6003 else
6004 resolve = false;
6006 loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc);
6008 if (mo.u.loc != oloc)
6009 loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, mo.u.loc);
6011 /* The loc of a MO_VAL_SET may have various forms:
6013 (concat val dst): dst now holds val
6015 (concat val (set dst src)): dst now holds val, copied from src
6017 (concat (concat val dstv) dst): dst now holds val; dstv is dst
6018 after replacing mems and non-top-level regs with values.
6020 (concat (concat val dstv) (set dst src)): dst now holds val,
6021 copied from src. dstv is a value-based representation of dst, if
6022 it differs from dst. If resolution is needed, src is a REG, and
6023 its mode is the same as that of val.
6025 (concat (concat val (set dstv srcv)) (set dst src)): src
6026 copied to dst, holding val. dstv and srcv are value-based
6027 representations of dst and src, respectively.
6031 if (GET_CODE (PATTERN (cui->insn)) != COND_EXEC)
6032 reverse_op (v->val_rtx, expr, cui->insn);
6034 mo.u.loc = loc;
6036 if (track_p)
6037 VAL_HOLDS_TRACK_EXPR (loc) = 1;
6038 if (preserve)
6040 VAL_NEEDS_RESOLUTION (loc) = resolve;
6041 preserve_value (v);
6043 if (mo.type == MO_CLOBBER)
6044 VAL_EXPR_IS_CLOBBERED (loc) = 1;
6045 if (mo.type == MO_COPY)
6046 VAL_EXPR_IS_COPIED (loc) = 1;
6048 mo.type = MO_VAL_SET;
6050 log_and_return:
6051 if (dump_file && (dump_flags & TDF_DETAILS))
6052 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
6053 VTI (bb)->mos.safe_push (mo);
6056 /* Arguments to the call. */
6057 static rtx call_arguments;
6059 /* Compute call_arguments. */
6061 static void
6062 prepare_call_arguments (basic_block bb, rtx insn)
6064 rtx link, x, call;
6065 rtx prev, cur, next;
6066 rtx this_arg = NULL_RTX;
6067 tree type = NULL_TREE, t, fndecl = NULL_TREE;
6068 tree obj_type_ref = NULL_TREE;
6069 CUMULATIVE_ARGS args_so_far_v;
6070 cumulative_args_t args_so_far;
6072 memset (&args_so_far_v, 0, sizeof (args_so_far_v));
6073 args_so_far = pack_cumulative_args (&args_so_far_v);
6074 call = get_call_rtx_from (insn);
6075 if (call)
6077 if (GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
6079 rtx symbol = XEXP (XEXP (call, 0), 0);
6080 if (SYMBOL_REF_DECL (symbol))
6081 fndecl = SYMBOL_REF_DECL (symbol);
6083 if (fndecl == NULL_TREE)
6084 fndecl = MEM_EXPR (XEXP (call, 0));
6085 if (fndecl
6086 && TREE_CODE (TREE_TYPE (fndecl)) != FUNCTION_TYPE
6087 && TREE_CODE (TREE_TYPE (fndecl)) != METHOD_TYPE)
6088 fndecl = NULL_TREE;
6089 if (fndecl && TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
6090 type = TREE_TYPE (fndecl);
6091 if (fndecl && TREE_CODE (fndecl) != FUNCTION_DECL)
6093 if (TREE_CODE (fndecl) == INDIRECT_REF
6094 && TREE_CODE (TREE_OPERAND (fndecl, 0)) == OBJ_TYPE_REF)
6095 obj_type_ref = TREE_OPERAND (fndecl, 0);
6096 fndecl = NULL_TREE;
6098 if (type)
6100 for (t = TYPE_ARG_TYPES (type); t && t != void_list_node;
6101 t = TREE_CHAIN (t))
6102 if (TREE_CODE (TREE_VALUE (t)) == REFERENCE_TYPE
6103 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_VALUE (t))))
6104 break;
6105 if ((t == NULL || t == void_list_node) && obj_type_ref == NULL_TREE)
6106 type = NULL;
6107 else
6109 int nargs ATTRIBUTE_UNUSED = list_length (TYPE_ARG_TYPES (type));
6110 link = CALL_INSN_FUNCTION_USAGE (insn);
6111 #ifndef PCC_STATIC_STRUCT_RETURN
6112 if (aggregate_value_p (TREE_TYPE (type), type)
6113 && targetm.calls.struct_value_rtx (type, 0) == 0)
6115 tree struct_addr = build_pointer_type (TREE_TYPE (type));
6116 enum machine_mode mode = TYPE_MODE (struct_addr);
6117 rtx reg;
6118 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
6119 nargs + 1);
6120 reg = targetm.calls.function_arg (args_so_far, mode,
6121 struct_addr, true);
6122 targetm.calls.function_arg_advance (args_so_far, mode,
6123 struct_addr, true);
6124 if (reg == NULL_RTX)
6126 for (; link; link = XEXP (link, 1))
6127 if (GET_CODE (XEXP (link, 0)) == USE
6128 && MEM_P (XEXP (XEXP (link, 0), 0)))
6130 link = XEXP (link, 1);
6131 break;
6135 else
6136 #endif
6137 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
6138 nargs);
6139 if (obj_type_ref && TYPE_ARG_TYPES (type) != void_list_node)
6141 enum machine_mode mode;
6142 t = TYPE_ARG_TYPES (type);
6143 mode = TYPE_MODE (TREE_VALUE (t));
6144 this_arg = targetm.calls.function_arg (args_so_far, mode,
6145 TREE_VALUE (t), true);
6146 if (this_arg && !REG_P (this_arg))
6147 this_arg = NULL_RTX;
6148 else if (this_arg == NULL_RTX)
6150 for (; link; link = XEXP (link, 1))
6151 if (GET_CODE (XEXP (link, 0)) == USE
6152 && MEM_P (XEXP (XEXP (link, 0), 0)))
6154 this_arg = XEXP (XEXP (link, 0), 0);
6155 break;
6162 t = type ? TYPE_ARG_TYPES (type) : NULL_TREE;
6164 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
6165 if (GET_CODE (XEXP (link, 0)) == USE)
6167 rtx item = NULL_RTX;
6168 x = XEXP (XEXP (link, 0), 0);
6169 if (GET_MODE (link) == VOIDmode
6170 || GET_MODE (link) == BLKmode
6171 || (GET_MODE (link) != GET_MODE (x)
6172 && (GET_MODE_CLASS (GET_MODE (link)) != MODE_INT
6173 || GET_MODE_CLASS (GET_MODE (x)) != MODE_INT)))
6174 /* Can't do anything for these, if the original type mode
6175 isn't known or can't be converted. */;
6176 else if (REG_P (x))
6178 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
6179 if (val && cselib_preserved_value_p (val))
6180 item = val->val_rtx;
6181 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT)
6183 enum machine_mode mode = GET_MODE (x);
6185 while ((mode = GET_MODE_WIDER_MODE (mode)) != VOIDmode
6186 && GET_MODE_BITSIZE (mode) <= BITS_PER_WORD)
6188 rtx reg = simplify_subreg (mode, x, GET_MODE (x), 0);
6190 if (reg == NULL_RTX || !REG_P (reg))
6191 continue;
6192 val = cselib_lookup (reg, mode, 0, VOIDmode);
6193 if (val && cselib_preserved_value_p (val))
6195 item = val->val_rtx;
6196 break;
6201 else if (MEM_P (x))
6203 rtx mem = x;
6204 cselib_val *val;
6206 if (!frame_pointer_needed)
6208 struct adjust_mem_data amd;
6209 amd.mem_mode = VOIDmode;
6210 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
6211 amd.side_effects = NULL_RTX;
6212 amd.store = true;
6213 mem = simplify_replace_fn_rtx (mem, NULL_RTX, adjust_mems,
6214 &amd);
6215 gcc_assert (amd.side_effects == NULL_RTX);
6217 val = cselib_lookup (mem, GET_MODE (mem), 0, VOIDmode);
6218 if (val && cselib_preserved_value_p (val))
6219 item = val->val_rtx;
6220 else if (GET_MODE_CLASS (GET_MODE (mem)) != MODE_INT)
6222 /* For non-integer stack argument see also if they weren't
6223 initialized by integers. */
6224 enum machine_mode imode = int_mode_for_mode (GET_MODE (mem));
6225 if (imode != GET_MODE (mem) && imode != BLKmode)
6227 val = cselib_lookup (adjust_address_nv (mem, imode, 0),
6228 imode, 0, VOIDmode);
6229 if (val && cselib_preserved_value_p (val))
6230 item = lowpart_subreg (GET_MODE (x), val->val_rtx,
6231 imode);
6235 if (item)
6237 rtx x2 = x;
6238 if (GET_MODE (item) != GET_MODE (link))
6239 item = lowpart_subreg (GET_MODE (link), item, GET_MODE (item));
6240 if (GET_MODE (x2) != GET_MODE (link))
6241 x2 = lowpart_subreg (GET_MODE (link), x2, GET_MODE (x2));
6242 item = gen_rtx_CONCAT (GET_MODE (link), x2, item);
6243 call_arguments
6244 = gen_rtx_EXPR_LIST (VOIDmode, item, call_arguments);
6246 if (t && t != void_list_node)
6248 tree argtype = TREE_VALUE (t);
6249 enum machine_mode mode = TYPE_MODE (argtype);
6250 rtx reg;
6251 if (pass_by_reference (&args_so_far_v, mode, argtype, true))
6253 argtype = build_pointer_type (argtype);
6254 mode = TYPE_MODE (argtype);
6256 reg = targetm.calls.function_arg (args_so_far, mode,
6257 argtype, true);
6258 if (TREE_CODE (argtype) == REFERENCE_TYPE
6259 && INTEGRAL_TYPE_P (TREE_TYPE (argtype))
6260 && reg
6261 && REG_P (reg)
6262 && GET_MODE (reg) == mode
6263 && GET_MODE_CLASS (mode) == MODE_INT
6264 && REG_P (x)
6265 && REGNO (x) == REGNO (reg)
6266 && GET_MODE (x) == mode
6267 && item)
6269 enum machine_mode indmode
6270 = TYPE_MODE (TREE_TYPE (argtype));
6271 rtx mem = gen_rtx_MEM (indmode, x);
6272 cselib_val *val = cselib_lookup (mem, indmode, 0, VOIDmode);
6273 if (val && cselib_preserved_value_p (val))
6275 item = gen_rtx_CONCAT (indmode, mem, val->val_rtx);
6276 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
6277 call_arguments);
6279 else
6281 struct elt_loc_list *l;
6282 tree initial;
6284 /* Try harder, when passing address of a constant
6285 pool integer it can be easily read back. */
6286 item = XEXP (item, 1);
6287 if (GET_CODE (item) == SUBREG)
6288 item = SUBREG_REG (item);
6289 gcc_assert (GET_CODE (item) == VALUE);
6290 val = CSELIB_VAL_PTR (item);
6291 for (l = val->locs; l; l = l->next)
6292 if (GET_CODE (l->loc) == SYMBOL_REF
6293 && TREE_CONSTANT_POOL_ADDRESS_P (l->loc)
6294 && SYMBOL_REF_DECL (l->loc)
6295 && DECL_INITIAL (SYMBOL_REF_DECL (l->loc)))
6297 initial = DECL_INITIAL (SYMBOL_REF_DECL (l->loc));
6298 if (tree_fits_shwi_p (initial))
6300 item = GEN_INT (tree_to_shwi (initial));
6301 item = gen_rtx_CONCAT (indmode, mem, item);
6302 call_arguments
6303 = gen_rtx_EXPR_LIST (VOIDmode, item,
6304 call_arguments);
6306 break;
6310 targetm.calls.function_arg_advance (args_so_far, mode,
6311 argtype, true);
6312 t = TREE_CHAIN (t);
6316 /* Add debug arguments. */
6317 if (fndecl
6318 && TREE_CODE (fndecl) == FUNCTION_DECL
6319 && DECL_HAS_DEBUG_ARGS_P (fndecl))
6321 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (fndecl);
6322 if (debug_args)
6324 unsigned int ix;
6325 tree param;
6326 for (ix = 0; vec_safe_iterate (*debug_args, ix, &param); ix += 2)
6328 rtx item;
6329 tree dtemp = (**debug_args)[ix + 1];
6330 enum machine_mode mode = DECL_MODE (dtemp);
6331 item = gen_rtx_DEBUG_PARAMETER_REF (mode, param);
6332 item = gen_rtx_CONCAT (mode, item, DECL_RTL_KNOWN_SET (dtemp));
6333 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
6334 call_arguments);
6339 /* Reverse call_arguments chain. */
6340 prev = NULL_RTX;
6341 for (cur = call_arguments; cur; cur = next)
6343 next = XEXP (cur, 1);
6344 XEXP (cur, 1) = prev;
6345 prev = cur;
6347 call_arguments = prev;
6349 x = get_call_rtx_from (insn);
6350 if (x)
6352 x = XEXP (XEXP (x, 0), 0);
6353 if (GET_CODE (x) == SYMBOL_REF)
6354 /* Don't record anything. */;
6355 else if (CONSTANT_P (x))
6357 x = gen_rtx_CONCAT (GET_MODE (x) == VOIDmode ? Pmode : GET_MODE (x),
6358 pc_rtx, x);
6359 call_arguments
6360 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6362 else
6364 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
6365 if (val && cselib_preserved_value_p (val))
6367 x = gen_rtx_CONCAT (GET_MODE (x), pc_rtx, val->val_rtx);
6368 call_arguments
6369 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6373 if (this_arg)
6375 enum machine_mode mode
6376 = TYPE_MODE (TREE_TYPE (OBJ_TYPE_REF_EXPR (obj_type_ref)));
6377 rtx clobbered = gen_rtx_MEM (mode, this_arg);
6378 HOST_WIDE_INT token
6379 = tree_to_shwi (OBJ_TYPE_REF_TOKEN (obj_type_ref));
6380 if (token)
6381 clobbered = plus_constant (mode, clobbered,
6382 token * GET_MODE_SIZE (mode));
6383 clobbered = gen_rtx_MEM (mode, clobbered);
6384 x = gen_rtx_CONCAT (mode, gen_rtx_CLOBBER (VOIDmode, pc_rtx), clobbered);
6385 call_arguments
6386 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6390 /* Callback for cselib_record_sets_hook, that records as micro
6391 operations uses and stores in an insn after cselib_record_sets has
6392 analyzed the sets in an insn, but before it modifies the stored
6393 values in the internal tables, unless cselib_record_sets doesn't
6394 call it directly (perhaps because we're not doing cselib in the
6395 first place, in which case sets and n_sets will be 0). */
6397 static void
6398 add_with_sets (rtx insn, struct cselib_set *sets, int n_sets)
6400 basic_block bb = BLOCK_FOR_INSN (insn);
6401 int n1, n2;
6402 struct count_use_info cui;
6403 micro_operation *mos;
6405 cselib_hook_called = true;
6407 cui.insn = insn;
6408 cui.bb = bb;
6409 cui.sets = sets;
6410 cui.n_sets = n_sets;
6412 n1 = VTI (bb)->mos.length ();
6413 cui.store_p = false;
6414 note_uses (&PATTERN (insn), add_uses_1, &cui);
6415 n2 = VTI (bb)->mos.length () - 1;
6416 mos = VTI (bb)->mos.address ();
6418 /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
6419 MO_VAL_LOC last. */
6420 while (n1 < n2)
6422 while (n1 < n2 && mos[n1].type == MO_USE)
6423 n1++;
6424 while (n1 < n2 && mos[n2].type != MO_USE)
6425 n2--;
6426 if (n1 < n2)
6428 micro_operation sw;
6430 sw = mos[n1];
6431 mos[n1] = mos[n2];
6432 mos[n2] = sw;
6436 n2 = VTI (bb)->mos.length () - 1;
6437 while (n1 < n2)
6439 while (n1 < n2 && mos[n1].type != MO_VAL_LOC)
6440 n1++;
6441 while (n1 < n2 && mos[n2].type == MO_VAL_LOC)
6442 n2--;
6443 if (n1 < n2)
6445 micro_operation sw;
6447 sw = mos[n1];
6448 mos[n1] = mos[n2];
6449 mos[n2] = sw;
6453 if (CALL_P (insn))
6455 micro_operation mo;
6457 mo.type = MO_CALL;
6458 mo.insn = insn;
6459 mo.u.loc = call_arguments;
6460 call_arguments = NULL_RTX;
6462 if (dump_file && (dump_flags & TDF_DETAILS))
6463 log_op_type (PATTERN (insn), bb, insn, mo.type, dump_file);
6464 VTI (bb)->mos.safe_push (mo);
6467 n1 = VTI (bb)->mos.length ();
6468 /* This will record NEXT_INSN (insn), such that we can
6469 insert notes before it without worrying about any
6470 notes that MO_USEs might emit after the insn. */
6471 cui.store_p = true;
6472 note_stores (PATTERN (insn), add_stores, &cui);
6473 n2 = VTI (bb)->mos.length () - 1;
6474 mos = VTI (bb)->mos.address ();
6476 /* Order the MO_VAL_USEs first (note_stores does nothing
6477 on DEBUG_INSNs, so there are no MO_VAL_LOCs from this
6478 insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET. */
6479 while (n1 < n2)
6481 while (n1 < n2 && mos[n1].type == MO_VAL_USE)
6482 n1++;
6483 while (n1 < n2 && mos[n2].type != MO_VAL_USE)
6484 n2--;
6485 if (n1 < n2)
6487 micro_operation sw;
6489 sw = mos[n1];
6490 mos[n1] = mos[n2];
6491 mos[n2] = sw;
6495 n2 = VTI (bb)->mos.length () - 1;
6496 while (n1 < n2)
6498 while (n1 < n2 && mos[n1].type == MO_CLOBBER)
6499 n1++;
6500 while (n1 < n2 && mos[n2].type != MO_CLOBBER)
6501 n2--;
6502 if (n1 < n2)
6504 micro_operation sw;
6506 sw = mos[n1];
6507 mos[n1] = mos[n2];
6508 mos[n2] = sw;
6513 static enum var_init_status
6514 find_src_status (dataflow_set *in, rtx src)
6516 tree decl = NULL_TREE;
6517 enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
6519 if (! flag_var_tracking_uninit)
6520 status = VAR_INIT_STATUS_INITIALIZED;
6522 if (src && REG_P (src))
6523 decl = var_debug_decl (REG_EXPR (src));
6524 else if (src && MEM_P (src))
6525 decl = var_debug_decl (MEM_EXPR (src));
6527 if (src && decl)
6528 status = get_init_value (in, src, dv_from_decl (decl));
6530 return status;
6533 /* SRC is the source of an assignment. Use SET to try to find what
6534 was ultimately assigned to SRC. Return that value if known,
6535 otherwise return SRC itself. */
6537 static rtx
6538 find_src_set_src (dataflow_set *set, rtx src)
6540 tree decl = NULL_TREE; /* The variable being copied around. */
6541 rtx set_src = NULL_RTX; /* The value for "decl" stored in "src". */
6542 variable var;
6543 location_chain nextp;
6544 int i;
6545 bool found;
6547 if (src && REG_P (src))
6548 decl = var_debug_decl (REG_EXPR (src));
6549 else if (src && MEM_P (src))
6550 decl = var_debug_decl (MEM_EXPR (src));
6552 if (src && decl)
6554 decl_or_value dv = dv_from_decl (decl);
6556 var = shared_hash_find (set->vars, dv);
6557 if (var)
6559 found = false;
6560 for (i = 0; i < var->n_var_parts && !found; i++)
6561 for (nextp = var->var_part[i].loc_chain; nextp && !found;
6562 nextp = nextp->next)
6563 if (rtx_equal_p (nextp->loc, src))
6565 set_src = nextp->set_src;
6566 found = true;
6572 return set_src;
6575 /* Compute the changes of variable locations in the basic block BB. */
6577 static bool
6578 compute_bb_dataflow (basic_block bb)
6580 unsigned int i;
6581 micro_operation *mo;
6582 bool changed;
6583 dataflow_set old_out;
6584 dataflow_set *in = &VTI (bb)->in;
6585 dataflow_set *out = &VTI (bb)->out;
6587 dataflow_set_init (&old_out);
6588 dataflow_set_copy (&old_out, out);
6589 dataflow_set_copy (out, in);
6591 if (MAY_HAVE_DEBUG_INSNS)
6592 local_get_addr_cache = pointer_map_create ();
6594 FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo)
6596 rtx insn = mo->insn;
6598 switch (mo->type)
6600 case MO_CALL:
6601 dataflow_set_clear_at_call (out);
6602 break;
6604 case MO_USE:
6606 rtx loc = mo->u.loc;
6608 if (REG_P (loc))
6609 var_reg_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6610 else if (MEM_P (loc))
6611 var_mem_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6613 break;
6615 case MO_VAL_LOC:
6617 rtx loc = mo->u.loc;
6618 rtx val, vloc;
6619 tree var;
6621 if (GET_CODE (loc) == CONCAT)
6623 val = XEXP (loc, 0);
6624 vloc = XEXP (loc, 1);
6626 else
6628 val = NULL_RTX;
6629 vloc = loc;
6632 var = PAT_VAR_LOCATION_DECL (vloc);
6634 clobber_variable_part (out, NULL_RTX,
6635 dv_from_decl (var), 0, NULL_RTX);
6636 if (val)
6638 if (VAL_NEEDS_RESOLUTION (loc))
6639 val_resolve (out, val, PAT_VAR_LOCATION_LOC (vloc), insn);
6640 set_variable_part (out, val, dv_from_decl (var), 0,
6641 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6642 INSERT);
6644 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
6645 set_variable_part (out, PAT_VAR_LOCATION_LOC (vloc),
6646 dv_from_decl (var), 0,
6647 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6648 INSERT);
6650 break;
6652 case MO_VAL_USE:
6654 rtx loc = mo->u.loc;
6655 rtx val, vloc, uloc;
6657 vloc = uloc = XEXP (loc, 1);
6658 val = XEXP (loc, 0);
6660 if (GET_CODE (val) == CONCAT)
6662 uloc = XEXP (val, 1);
6663 val = XEXP (val, 0);
6666 if (VAL_NEEDS_RESOLUTION (loc))
6667 val_resolve (out, val, vloc, insn);
6668 else
6669 val_store (out, val, uloc, insn, false);
6671 if (VAL_HOLDS_TRACK_EXPR (loc))
6673 if (GET_CODE (uloc) == REG)
6674 var_reg_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6675 NULL);
6676 else if (GET_CODE (uloc) == MEM)
6677 var_mem_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6678 NULL);
6681 break;
6683 case MO_VAL_SET:
6685 rtx loc = mo->u.loc;
6686 rtx val, vloc, uloc;
6687 rtx dstv, srcv;
6689 vloc = loc;
6690 uloc = XEXP (vloc, 1);
6691 val = XEXP (vloc, 0);
6692 vloc = uloc;
6694 if (GET_CODE (uloc) == SET)
6696 dstv = SET_DEST (uloc);
6697 srcv = SET_SRC (uloc);
6699 else
6701 dstv = uloc;
6702 srcv = NULL;
6705 if (GET_CODE (val) == CONCAT)
6707 dstv = vloc = XEXP (val, 1);
6708 val = XEXP (val, 0);
6711 if (GET_CODE (vloc) == SET)
6713 srcv = SET_SRC (vloc);
6715 gcc_assert (val != srcv);
6716 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
6718 dstv = vloc = SET_DEST (vloc);
6720 if (VAL_NEEDS_RESOLUTION (loc))
6721 val_resolve (out, val, srcv, insn);
6723 else if (VAL_NEEDS_RESOLUTION (loc))
6725 gcc_assert (GET_CODE (uloc) == SET
6726 && GET_CODE (SET_SRC (uloc)) == REG);
6727 val_resolve (out, val, SET_SRC (uloc), insn);
6730 if (VAL_HOLDS_TRACK_EXPR (loc))
6732 if (VAL_EXPR_IS_CLOBBERED (loc))
6734 if (REG_P (uloc))
6735 var_reg_delete (out, uloc, true);
6736 else if (MEM_P (uloc))
6738 gcc_assert (MEM_P (dstv));
6739 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
6740 var_mem_delete (out, dstv, true);
6743 else
6745 bool copied_p = VAL_EXPR_IS_COPIED (loc);
6746 rtx src = NULL, dst = uloc;
6747 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
6749 if (GET_CODE (uloc) == SET)
6751 src = SET_SRC (uloc);
6752 dst = SET_DEST (uloc);
6755 if (copied_p)
6757 if (flag_var_tracking_uninit)
6759 status = find_src_status (in, src);
6761 if (status == VAR_INIT_STATUS_UNKNOWN)
6762 status = find_src_status (out, src);
6765 src = find_src_set_src (in, src);
6768 if (REG_P (dst))
6769 var_reg_delete_and_set (out, dst, !copied_p,
6770 status, srcv);
6771 else if (MEM_P (dst))
6773 gcc_assert (MEM_P (dstv));
6774 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
6775 var_mem_delete_and_set (out, dstv, !copied_p,
6776 status, srcv);
6780 else if (REG_P (uloc))
6781 var_regno_delete (out, REGNO (uloc));
6782 else if (MEM_P (uloc))
6784 gcc_checking_assert (GET_CODE (vloc) == MEM);
6785 gcc_checking_assert (dstv == vloc);
6786 if (dstv != vloc)
6787 clobber_overlapping_mems (out, vloc);
6790 val_store (out, val, dstv, insn, true);
6792 break;
6794 case MO_SET:
6796 rtx loc = mo->u.loc;
6797 rtx set_src = NULL;
6799 if (GET_CODE (loc) == SET)
6801 set_src = SET_SRC (loc);
6802 loc = SET_DEST (loc);
6805 if (REG_P (loc))
6806 var_reg_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6807 set_src);
6808 else if (MEM_P (loc))
6809 var_mem_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6810 set_src);
6812 break;
6814 case MO_COPY:
6816 rtx loc = mo->u.loc;
6817 enum var_init_status src_status;
6818 rtx set_src = NULL;
6820 if (GET_CODE (loc) == SET)
6822 set_src = SET_SRC (loc);
6823 loc = SET_DEST (loc);
6826 if (! flag_var_tracking_uninit)
6827 src_status = VAR_INIT_STATUS_INITIALIZED;
6828 else
6830 src_status = find_src_status (in, set_src);
6832 if (src_status == VAR_INIT_STATUS_UNKNOWN)
6833 src_status = find_src_status (out, set_src);
6836 set_src = find_src_set_src (in, set_src);
6838 if (REG_P (loc))
6839 var_reg_delete_and_set (out, loc, false, src_status, set_src);
6840 else if (MEM_P (loc))
6841 var_mem_delete_and_set (out, loc, false, src_status, set_src);
6843 break;
6845 case MO_USE_NO_VAR:
6847 rtx loc = mo->u.loc;
6849 if (REG_P (loc))
6850 var_reg_delete (out, loc, false);
6851 else if (MEM_P (loc))
6852 var_mem_delete (out, loc, false);
6854 break;
6856 case MO_CLOBBER:
6858 rtx loc = mo->u.loc;
6860 if (REG_P (loc))
6861 var_reg_delete (out, loc, true);
6862 else if (MEM_P (loc))
6863 var_mem_delete (out, loc, true);
6865 break;
6867 case MO_ADJUST:
6868 out->stack_adjust += mo->u.adjust;
6869 break;
6873 if (MAY_HAVE_DEBUG_INSNS)
6875 pointer_map_destroy (local_get_addr_cache);
6876 local_get_addr_cache = NULL;
6878 dataflow_set_equiv_regs (out);
6879 shared_hash_htab (out->vars)
6880 .traverse <dataflow_set *, canonicalize_values_mark> (out);
6881 shared_hash_htab (out->vars)
6882 .traverse <dataflow_set *, canonicalize_values_star> (out);
6883 #if ENABLE_CHECKING
6884 shared_hash_htab (out->vars)
6885 .traverse <dataflow_set *, canonicalize_loc_order_check> (out);
6886 #endif
6888 changed = dataflow_set_different (&old_out, out);
6889 dataflow_set_destroy (&old_out);
6890 return changed;
6893 /* Find the locations of variables in the whole function. */
6895 static bool
6896 vt_find_locations (void)
6898 fibheap_t worklist, pending, fibheap_swap;
6899 sbitmap visited, in_worklist, in_pending, sbitmap_swap;
6900 basic_block bb;
6901 edge e;
6902 int *bb_order;
6903 int *rc_order;
6904 int i;
6905 int htabsz = 0;
6906 int htabmax = PARAM_VALUE (PARAM_MAX_VARTRACK_SIZE);
6907 bool success = true;
6909 timevar_push (TV_VAR_TRACKING_DATAFLOW);
6910 /* Compute reverse completion order of depth first search of the CFG
6911 so that the data-flow runs faster. */
6912 rc_order = XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
6913 bb_order = XNEWVEC (int, last_basic_block);
6914 pre_and_rev_post_order_compute (NULL, rc_order, false);
6915 for (i = 0; i < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; i++)
6916 bb_order[rc_order[i]] = i;
6917 free (rc_order);
6919 worklist = fibheap_new ();
6920 pending = fibheap_new ();
6921 visited = sbitmap_alloc (last_basic_block);
6922 in_worklist = sbitmap_alloc (last_basic_block);
6923 in_pending = sbitmap_alloc (last_basic_block);
6924 bitmap_clear (in_worklist);
6926 FOR_EACH_BB (bb)
6927 fibheap_insert (pending, bb_order[bb->index], bb);
6928 bitmap_ones (in_pending);
6930 while (success && !fibheap_empty (pending))
6932 fibheap_swap = pending;
6933 pending = worklist;
6934 worklist = fibheap_swap;
6935 sbitmap_swap = in_pending;
6936 in_pending = in_worklist;
6937 in_worklist = sbitmap_swap;
6939 bitmap_clear (visited);
6941 while (!fibheap_empty (worklist))
6943 bb = (basic_block) fibheap_extract_min (worklist);
6944 bitmap_clear_bit (in_worklist, bb->index);
6945 gcc_assert (!bitmap_bit_p (visited, bb->index));
6946 if (!bitmap_bit_p (visited, bb->index))
6948 bool changed;
6949 edge_iterator ei;
6950 int oldinsz, oldoutsz;
6952 bitmap_set_bit (visited, bb->index);
6954 if (VTI (bb)->in.vars)
6956 htabsz
6957 -= shared_hash_htab (VTI (bb)->in.vars).size ()
6958 + shared_hash_htab (VTI (bb)->out.vars).size ();
6959 oldinsz = shared_hash_htab (VTI (bb)->in.vars).elements ();
6960 oldoutsz = shared_hash_htab (VTI (bb)->out.vars).elements ();
6962 else
6963 oldinsz = oldoutsz = 0;
6965 if (MAY_HAVE_DEBUG_INSNS)
6967 dataflow_set *in = &VTI (bb)->in, *first_out = NULL;
6968 bool first = true, adjust = false;
6970 /* Calculate the IN set as the intersection of
6971 predecessor OUT sets. */
6973 dataflow_set_clear (in);
6974 dst_can_be_shared = true;
6976 FOR_EACH_EDGE (e, ei, bb->preds)
6977 if (!VTI (e->src)->flooded)
6978 gcc_assert (bb_order[bb->index]
6979 <= bb_order[e->src->index]);
6980 else if (first)
6982 dataflow_set_copy (in, &VTI (e->src)->out);
6983 first_out = &VTI (e->src)->out;
6984 first = false;
6986 else
6988 dataflow_set_merge (in, &VTI (e->src)->out);
6989 adjust = true;
6992 if (adjust)
6994 dataflow_post_merge_adjust (in, &VTI (bb)->permp);
6995 #if ENABLE_CHECKING
6996 /* Merge and merge_adjust should keep entries in
6997 canonical order. */
6998 shared_hash_htab (in->vars)
6999 .traverse <dataflow_set *,
7000 canonicalize_loc_order_check> (in);
7001 #endif
7002 if (dst_can_be_shared)
7004 shared_hash_destroy (in->vars);
7005 in->vars = shared_hash_copy (first_out->vars);
7009 VTI (bb)->flooded = true;
7011 else
7013 /* Calculate the IN set as union of predecessor OUT sets. */
7014 dataflow_set_clear (&VTI (bb)->in);
7015 FOR_EACH_EDGE (e, ei, bb->preds)
7016 dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out);
7019 changed = compute_bb_dataflow (bb);
7020 htabsz += shared_hash_htab (VTI (bb)->in.vars).size ()
7021 + shared_hash_htab (VTI (bb)->out.vars).size ();
7023 if (htabmax && htabsz > htabmax)
7025 if (MAY_HAVE_DEBUG_INSNS)
7026 inform (DECL_SOURCE_LOCATION (cfun->decl),
7027 "variable tracking size limit exceeded with "
7028 "-fvar-tracking-assignments, retrying without");
7029 else
7030 inform (DECL_SOURCE_LOCATION (cfun->decl),
7031 "variable tracking size limit exceeded");
7032 success = false;
7033 break;
7036 if (changed)
7038 FOR_EACH_EDGE (e, ei, bb->succs)
7040 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7041 continue;
7043 if (bitmap_bit_p (visited, e->dest->index))
7045 if (!bitmap_bit_p (in_pending, e->dest->index))
7047 /* Send E->DEST to next round. */
7048 bitmap_set_bit (in_pending, e->dest->index);
7049 fibheap_insert (pending,
7050 bb_order[e->dest->index],
7051 e->dest);
7054 else if (!bitmap_bit_p (in_worklist, e->dest->index))
7056 /* Add E->DEST to current round. */
7057 bitmap_set_bit (in_worklist, e->dest->index);
7058 fibheap_insert (worklist, bb_order[e->dest->index],
7059 e->dest);
7064 if (dump_file)
7065 fprintf (dump_file,
7066 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
7067 bb->index,
7068 (int)shared_hash_htab (VTI (bb)->in.vars).size (),
7069 oldinsz,
7070 (int)shared_hash_htab (VTI (bb)->out.vars).size (),
7071 oldoutsz,
7072 (int)worklist->nodes, (int)pending->nodes, htabsz);
7074 if (dump_file && (dump_flags & TDF_DETAILS))
7076 fprintf (dump_file, "BB %i IN:\n", bb->index);
7077 dump_dataflow_set (&VTI (bb)->in);
7078 fprintf (dump_file, "BB %i OUT:\n", bb->index);
7079 dump_dataflow_set (&VTI (bb)->out);
7085 if (success && MAY_HAVE_DEBUG_INSNS)
7086 FOR_EACH_BB (bb)
7087 gcc_assert (VTI (bb)->flooded);
7089 free (bb_order);
7090 fibheap_delete (worklist);
7091 fibheap_delete (pending);
7092 sbitmap_free (visited);
7093 sbitmap_free (in_worklist);
7094 sbitmap_free (in_pending);
7096 timevar_pop (TV_VAR_TRACKING_DATAFLOW);
7097 return success;
7100 /* Print the content of the LIST to dump file. */
7102 static void
7103 dump_attrs_list (attrs list)
7105 for (; list; list = list->next)
7107 if (dv_is_decl_p (list->dv))
7108 print_mem_expr (dump_file, dv_as_decl (list->dv));
7109 else
7110 print_rtl_single (dump_file, dv_as_value (list->dv));
7111 fprintf (dump_file, "+" HOST_WIDE_INT_PRINT_DEC, list->offset);
7113 fprintf (dump_file, "\n");
7116 /* Print the information about variable *SLOT to dump file. */
7119 dump_var_tracking_slot (variable_def **slot, void *data ATTRIBUTE_UNUSED)
7121 variable var = *slot;
7123 dump_var (var);
7125 /* Continue traversing the hash table. */
7126 return 1;
7129 /* Print the information about variable VAR to dump file. */
7131 static void
7132 dump_var (variable var)
7134 int i;
7135 location_chain node;
7137 if (dv_is_decl_p (var->dv))
7139 const_tree decl = dv_as_decl (var->dv);
7141 if (DECL_NAME (decl))
7143 fprintf (dump_file, " name: %s",
7144 IDENTIFIER_POINTER (DECL_NAME (decl)));
7145 if (dump_flags & TDF_UID)
7146 fprintf (dump_file, "D.%u", DECL_UID (decl));
7148 else if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
7149 fprintf (dump_file, " name: D#%u", DEBUG_TEMP_UID (decl));
7150 else
7151 fprintf (dump_file, " name: D.%u", DECL_UID (decl));
7152 fprintf (dump_file, "\n");
7154 else
7156 fputc (' ', dump_file);
7157 print_rtl_single (dump_file, dv_as_value (var->dv));
7160 for (i = 0; i < var->n_var_parts; i++)
7162 fprintf (dump_file, " offset %ld\n",
7163 (long)(var->onepart ? 0 : VAR_PART_OFFSET (var, i)));
7164 for (node = var->var_part[i].loc_chain; node; node = node->next)
7166 fprintf (dump_file, " ");
7167 if (node->init == VAR_INIT_STATUS_UNINITIALIZED)
7168 fprintf (dump_file, "[uninit]");
7169 print_rtl_single (dump_file, node->loc);
7174 /* Print the information about variables from hash table VARS to dump file. */
7176 static void
7177 dump_vars (variable_table_type vars)
7179 if (vars.elements () > 0)
7181 fprintf (dump_file, "Variables:\n");
7182 vars.traverse <void *, dump_var_tracking_slot> (NULL);
7186 /* Print the dataflow set SET to dump file. */
7188 static void
7189 dump_dataflow_set (dataflow_set *set)
7191 int i;
7193 fprintf (dump_file, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC "\n",
7194 set->stack_adjust);
7195 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7197 if (set->regs[i])
7199 fprintf (dump_file, "Reg %d:", i);
7200 dump_attrs_list (set->regs[i]);
7203 dump_vars (shared_hash_htab (set->vars));
7204 fprintf (dump_file, "\n");
7207 /* Print the IN and OUT sets for each basic block to dump file. */
7209 static void
7210 dump_dataflow_sets (void)
7212 basic_block bb;
7214 FOR_EACH_BB (bb)
7216 fprintf (dump_file, "\nBasic block %d:\n", bb->index);
7217 fprintf (dump_file, "IN:\n");
7218 dump_dataflow_set (&VTI (bb)->in);
7219 fprintf (dump_file, "OUT:\n");
7220 dump_dataflow_set (&VTI (bb)->out);
7224 /* Return the variable for DV in dropped_values, inserting one if
7225 requested with INSERT. */
7227 static inline variable
7228 variable_from_dropped (decl_or_value dv, enum insert_option insert)
7230 variable_def **slot;
7231 variable empty_var;
7232 onepart_enum_t onepart;
7234 slot = dropped_values.find_slot_with_hash (dv, dv_htab_hash (dv), insert);
7236 if (!slot)
7237 return NULL;
7239 if (*slot)
7240 return *slot;
7242 gcc_checking_assert (insert == INSERT);
7244 onepart = dv_onepart_p (dv);
7246 gcc_checking_assert (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR);
7248 empty_var = (variable) pool_alloc (onepart_pool (onepart));
7249 empty_var->dv = dv;
7250 empty_var->refcount = 1;
7251 empty_var->n_var_parts = 0;
7252 empty_var->onepart = onepart;
7253 empty_var->in_changed_variables = false;
7254 empty_var->var_part[0].loc_chain = NULL;
7255 empty_var->var_part[0].cur_loc = NULL;
7256 VAR_LOC_1PAUX (empty_var) = NULL;
7257 set_dv_changed (dv, true);
7259 *slot = empty_var;
7261 return empty_var;
7264 /* Recover the one-part aux from dropped_values. */
7266 static struct onepart_aux *
7267 recover_dropped_1paux (variable var)
7269 variable dvar;
7271 gcc_checking_assert (var->onepart);
7273 if (VAR_LOC_1PAUX (var))
7274 return VAR_LOC_1PAUX (var);
7276 if (var->onepart == ONEPART_VDECL)
7277 return NULL;
7279 dvar = variable_from_dropped (var->dv, NO_INSERT);
7281 if (!dvar)
7282 return NULL;
7284 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (dvar);
7285 VAR_LOC_1PAUX (dvar) = NULL;
7287 return VAR_LOC_1PAUX (var);
7290 /* Add variable VAR to the hash table of changed variables and
7291 if it has no locations delete it from SET's hash table. */
7293 static void
7294 variable_was_changed (variable var, dataflow_set *set)
7296 hashval_t hash = dv_htab_hash (var->dv);
7298 if (emit_notes)
7300 variable_def **slot;
7302 /* Remember this decl or VALUE has been added to changed_variables. */
7303 set_dv_changed (var->dv, true);
7305 slot = changed_variables.find_slot_with_hash (var->dv, hash, INSERT);
7307 if (*slot)
7309 variable old_var = *slot;
7310 gcc_assert (old_var->in_changed_variables);
7311 old_var->in_changed_variables = false;
7312 if (var != old_var && var->onepart)
7314 /* Restore the auxiliary info from an empty variable
7315 previously created for changed_variables, so it is
7316 not lost. */
7317 gcc_checking_assert (!VAR_LOC_1PAUX (var));
7318 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (old_var);
7319 VAR_LOC_1PAUX (old_var) = NULL;
7321 variable_htab_free (*slot);
7324 if (set && var->n_var_parts == 0)
7326 onepart_enum_t onepart = var->onepart;
7327 variable empty_var = NULL;
7328 variable_def **dslot = NULL;
7330 if (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR)
7332 dslot = dropped_values.find_slot_with_hash (var->dv,
7333 dv_htab_hash (var->dv),
7334 INSERT);
7335 empty_var = *dslot;
7337 if (empty_var)
7339 gcc_checking_assert (!empty_var->in_changed_variables);
7340 if (!VAR_LOC_1PAUX (var))
7342 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (empty_var);
7343 VAR_LOC_1PAUX (empty_var) = NULL;
7345 else
7346 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
7350 if (!empty_var)
7352 empty_var = (variable) pool_alloc (onepart_pool (onepart));
7353 empty_var->dv = var->dv;
7354 empty_var->refcount = 1;
7355 empty_var->n_var_parts = 0;
7356 empty_var->onepart = onepart;
7357 if (dslot)
7359 empty_var->refcount++;
7360 *dslot = empty_var;
7363 else
7364 empty_var->refcount++;
7365 empty_var->in_changed_variables = true;
7366 *slot = empty_var;
7367 if (onepart)
7369 empty_var->var_part[0].loc_chain = NULL;
7370 empty_var->var_part[0].cur_loc = NULL;
7371 VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (var);
7372 VAR_LOC_1PAUX (var) = NULL;
7374 goto drop_var;
7376 else
7378 if (var->onepart && !VAR_LOC_1PAUX (var))
7379 recover_dropped_1paux (var);
7380 var->refcount++;
7381 var->in_changed_variables = true;
7382 *slot = var;
7385 else
7387 gcc_assert (set);
7388 if (var->n_var_parts == 0)
7390 variable_def **slot;
7392 drop_var:
7393 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
7394 if (slot)
7396 if (shared_hash_shared (set->vars))
7397 slot = shared_hash_find_slot_unshare (&set->vars, var->dv,
7398 NO_INSERT);
7399 shared_hash_htab (set->vars).clear_slot (slot);
7405 /* Look for the index in VAR->var_part corresponding to OFFSET.
7406 Return -1 if not found. If INSERTION_POINT is non-NULL, the
7407 referenced int will be set to the index that the part has or should
7408 have, if it should be inserted. */
7410 static inline int
7411 find_variable_location_part (variable var, HOST_WIDE_INT offset,
7412 int *insertion_point)
7414 int pos, low, high;
7416 if (var->onepart)
7418 if (offset != 0)
7419 return -1;
7421 if (insertion_point)
7422 *insertion_point = 0;
7424 return var->n_var_parts - 1;
7427 /* Find the location part. */
7428 low = 0;
7429 high = var->n_var_parts;
7430 while (low != high)
7432 pos = (low + high) / 2;
7433 if (VAR_PART_OFFSET (var, pos) < offset)
7434 low = pos + 1;
7435 else
7436 high = pos;
7438 pos = low;
7440 if (insertion_point)
7441 *insertion_point = pos;
7443 if (pos < var->n_var_parts && VAR_PART_OFFSET (var, pos) == offset)
7444 return pos;
7446 return -1;
7449 static variable_def **
7450 set_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
7451 decl_or_value dv, HOST_WIDE_INT offset,
7452 enum var_init_status initialized, rtx set_src)
7454 int pos;
7455 location_chain node, next;
7456 location_chain *nextp;
7457 variable var;
7458 onepart_enum_t onepart;
7460 var = *slot;
7462 if (var)
7463 onepart = var->onepart;
7464 else
7465 onepart = dv_onepart_p (dv);
7467 gcc_checking_assert (offset == 0 || !onepart);
7468 gcc_checking_assert (loc != dv_as_opaque (dv));
7470 if (! flag_var_tracking_uninit)
7471 initialized = VAR_INIT_STATUS_INITIALIZED;
7473 if (!var)
7475 /* Create new variable information. */
7476 var = (variable) pool_alloc (onepart_pool (onepart));
7477 var->dv = dv;
7478 var->refcount = 1;
7479 var->n_var_parts = 1;
7480 var->onepart = onepart;
7481 var->in_changed_variables = false;
7482 if (var->onepart)
7483 VAR_LOC_1PAUX (var) = NULL;
7484 else
7485 VAR_PART_OFFSET (var, 0) = offset;
7486 var->var_part[0].loc_chain = NULL;
7487 var->var_part[0].cur_loc = NULL;
7488 *slot = var;
7489 pos = 0;
7490 nextp = &var->var_part[0].loc_chain;
7492 else if (onepart)
7494 int r = -1, c = 0;
7496 gcc_assert (dv_as_opaque (var->dv) == dv_as_opaque (dv));
7498 pos = 0;
7500 if (GET_CODE (loc) == VALUE)
7502 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7503 nextp = &node->next)
7504 if (GET_CODE (node->loc) == VALUE)
7506 if (node->loc == loc)
7508 r = 0;
7509 break;
7511 if (canon_value_cmp (node->loc, loc))
7512 c++;
7513 else
7515 r = 1;
7516 break;
7519 else if (REG_P (node->loc) || MEM_P (node->loc))
7520 c++;
7521 else
7523 r = 1;
7524 break;
7527 else if (REG_P (loc))
7529 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7530 nextp = &node->next)
7531 if (REG_P (node->loc))
7533 if (REGNO (node->loc) < REGNO (loc))
7534 c++;
7535 else
7537 if (REGNO (node->loc) == REGNO (loc))
7538 r = 0;
7539 else
7540 r = 1;
7541 break;
7544 else
7546 r = 1;
7547 break;
7550 else if (MEM_P (loc))
7552 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7553 nextp = &node->next)
7554 if (REG_P (node->loc))
7555 c++;
7556 else if (MEM_P (node->loc))
7558 if ((r = loc_cmp (XEXP (node->loc, 0), XEXP (loc, 0))) >= 0)
7559 break;
7560 else
7561 c++;
7563 else
7565 r = 1;
7566 break;
7569 else
7570 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7571 nextp = &node->next)
7572 if ((r = loc_cmp (node->loc, loc)) >= 0)
7573 break;
7574 else
7575 c++;
7577 if (r == 0)
7578 return slot;
7580 if (shared_var_p (var, set->vars))
7582 slot = unshare_variable (set, slot, var, initialized);
7583 var = *slot;
7584 for (nextp = &var->var_part[0].loc_chain; c;
7585 nextp = &(*nextp)->next)
7586 c--;
7587 gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc);
7590 else
7592 int inspos = 0;
7594 gcc_assert (dv_as_decl (var->dv) == dv_as_decl (dv));
7596 pos = find_variable_location_part (var, offset, &inspos);
7598 if (pos >= 0)
7600 node = var->var_part[pos].loc_chain;
7602 if (node
7603 && ((REG_P (node->loc) && REG_P (loc)
7604 && REGNO (node->loc) == REGNO (loc))
7605 || rtx_equal_p (node->loc, loc)))
7607 /* LOC is in the beginning of the chain so we have nothing
7608 to do. */
7609 if (node->init < initialized)
7610 node->init = initialized;
7611 if (set_src != NULL)
7612 node->set_src = set_src;
7614 return slot;
7616 else
7618 /* We have to make a copy of a shared variable. */
7619 if (shared_var_p (var, set->vars))
7621 slot = unshare_variable (set, slot, var, initialized);
7622 var = *slot;
7626 else
7628 /* We have not found the location part, new one will be created. */
7630 /* We have to make a copy of the shared variable. */
7631 if (shared_var_p (var, set->vars))
7633 slot = unshare_variable (set, slot, var, initialized);
7634 var = *slot;
7637 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
7638 thus there are at most MAX_VAR_PARTS different offsets. */
7639 gcc_assert (var->n_var_parts < MAX_VAR_PARTS
7640 && (!var->n_var_parts || !onepart));
7642 /* We have to move the elements of array starting at index
7643 inspos to the next position. */
7644 for (pos = var->n_var_parts; pos > inspos; pos--)
7645 var->var_part[pos] = var->var_part[pos - 1];
7647 var->n_var_parts++;
7648 gcc_checking_assert (!onepart);
7649 VAR_PART_OFFSET (var, pos) = offset;
7650 var->var_part[pos].loc_chain = NULL;
7651 var->var_part[pos].cur_loc = NULL;
7654 /* Delete the location from the list. */
7655 nextp = &var->var_part[pos].loc_chain;
7656 for (node = var->var_part[pos].loc_chain; node; node = next)
7658 next = node->next;
7659 if ((REG_P (node->loc) && REG_P (loc)
7660 && REGNO (node->loc) == REGNO (loc))
7661 || rtx_equal_p (node->loc, loc))
7663 /* Save these values, to assign to the new node, before
7664 deleting this one. */
7665 if (node->init > initialized)
7666 initialized = node->init;
7667 if (node->set_src != NULL && set_src == NULL)
7668 set_src = node->set_src;
7669 if (var->var_part[pos].cur_loc == node->loc)
7670 var->var_part[pos].cur_loc = NULL;
7671 pool_free (loc_chain_pool, node);
7672 *nextp = next;
7673 break;
7675 else
7676 nextp = &node->next;
7679 nextp = &var->var_part[pos].loc_chain;
7682 /* Add the location to the beginning. */
7683 node = (location_chain) pool_alloc (loc_chain_pool);
7684 node->loc = loc;
7685 node->init = initialized;
7686 node->set_src = set_src;
7687 node->next = *nextp;
7688 *nextp = node;
7690 /* If no location was emitted do so. */
7691 if (var->var_part[pos].cur_loc == NULL)
7692 variable_was_changed (var, set);
7694 return slot;
7697 /* Set the part of variable's location in the dataflow set SET. The
7698 variable part is specified by variable's declaration in DV and
7699 offset OFFSET and the part's location by LOC. IOPT should be
7700 NO_INSERT if the variable is known to be in SET already and the
7701 variable hash table must not be resized, and INSERT otherwise. */
7703 static void
7704 set_variable_part (dataflow_set *set, rtx loc,
7705 decl_or_value dv, HOST_WIDE_INT offset,
7706 enum var_init_status initialized, rtx set_src,
7707 enum insert_option iopt)
7709 variable_def **slot;
7711 if (iopt == NO_INSERT)
7712 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7713 else
7715 slot = shared_hash_find_slot (set->vars, dv);
7716 if (!slot)
7717 slot = shared_hash_find_slot_unshare (&set->vars, dv, iopt);
7719 set_slot_part (set, loc, slot, dv, offset, initialized, set_src);
7722 /* Remove all recorded register locations for the given variable part
7723 from dataflow set SET, except for those that are identical to loc.
7724 The variable part is specified by variable's declaration or value
7725 DV and offset OFFSET. */
7727 static variable_def **
7728 clobber_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
7729 HOST_WIDE_INT offset, rtx set_src)
7731 variable var = *slot;
7732 int pos = find_variable_location_part (var, offset, NULL);
7734 if (pos >= 0)
7736 location_chain node, next;
7738 /* Remove the register locations from the dataflow set. */
7739 next = var->var_part[pos].loc_chain;
7740 for (node = next; node; node = next)
7742 next = node->next;
7743 if (node->loc != loc
7744 && (!flag_var_tracking_uninit
7745 || !set_src
7746 || MEM_P (set_src)
7747 || !rtx_equal_p (set_src, node->set_src)))
7749 if (REG_P (node->loc))
7751 attrs anode, anext;
7752 attrs *anextp;
7754 /* Remove the variable part from the register's
7755 list, but preserve any other variable parts
7756 that might be regarded as live in that same
7757 register. */
7758 anextp = &set->regs[REGNO (node->loc)];
7759 for (anode = *anextp; anode; anode = anext)
7761 anext = anode->next;
7762 if (dv_as_opaque (anode->dv) == dv_as_opaque (var->dv)
7763 && anode->offset == offset)
7765 pool_free (attrs_pool, anode);
7766 *anextp = anext;
7768 else
7769 anextp = &anode->next;
7773 slot = delete_slot_part (set, node->loc, slot, offset);
7778 return slot;
7781 /* Remove all recorded register locations for the given variable part
7782 from dataflow set SET, except for those that are identical to loc.
7783 The variable part is specified by variable's declaration or value
7784 DV and offset OFFSET. */
7786 static void
7787 clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
7788 HOST_WIDE_INT offset, rtx set_src)
7790 variable_def **slot;
7792 if (!dv_as_opaque (dv)
7793 || (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv))))
7794 return;
7796 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7797 if (!slot)
7798 return;
7800 clobber_slot_part (set, loc, slot, offset, set_src);
7803 /* Delete the part of variable's location from dataflow set SET. The
7804 variable part is specified by its SET->vars slot SLOT and offset
7805 OFFSET and the part's location by LOC. */
7807 static variable_def **
7808 delete_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
7809 HOST_WIDE_INT offset)
7811 variable var = *slot;
7812 int pos = find_variable_location_part (var, offset, NULL);
7814 if (pos >= 0)
7816 location_chain node, next;
7817 location_chain *nextp;
7818 bool changed;
7819 rtx cur_loc;
7821 if (shared_var_p (var, set->vars))
7823 /* If the variable contains the location part we have to
7824 make a copy of the variable. */
7825 for (node = var->var_part[pos].loc_chain; node;
7826 node = node->next)
7828 if ((REG_P (node->loc) && REG_P (loc)
7829 && REGNO (node->loc) == REGNO (loc))
7830 || rtx_equal_p (node->loc, loc))
7832 slot = unshare_variable (set, slot, var,
7833 VAR_INIT_STATUS_UNKNOWN);
7834 var = *slot;
7835 break;
7840 if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
7841 cur_loc = VAR_LOC_FROM (var);
7842 else
7843 cur_loc = var->var_part[pos].cur_loc;
7845 /* Delete the location part. */
7846 changed = false;
7847 nextp = &var->var_part[pos].loc_chain;
7848 for (node = *nextp; node; node = next)
7850 next = node->next;
7851 if ((REG_P (node->loc) && REG_P (loc)
7852 && REGNO (node->loc) == REGNO (loc))
7853 || rtx_equal_p (node->loc, loc))
7855 /* If we have deleted the location which was last emitted
7856 we have to emit new location so add the variable to set
7857 of changed variables. */
7858 if (cur_loc == node->loc)
7860 changed = true;
7861 var->var_part[pos].cur_loc = NULL;
7862 if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
7863 VAR_LOC_FROM (var) = NULL;
7865 pool_free (loc_chain_pool, node);
7866 *nextp = next;
7867 break;
7869 else
7870 nextp = &node->next;
7873 if (var->var_part[pos].loc_chain == NULL)
7875 changed = true;
7876 var->n_var_parts--;
7877 while (pos < var->n_var_parts)
7879 var->var_part[pos] = var->var_part[pos + 1];
7880 pos++;
7883 if (changed)
7884 variable_was_changed (var, set);
7887 return slot;
7890 /* Delete the part of variable's location from dataflow set SET. The
7891 variable part is specified by variable's declaration or value DV
7892 and offset OFFSET and the part's location by LOC. */
7894 static void
7895 delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
7896 HOST_WIDE_INT offset)
7898 variable_def **slot = shared_hash_find_slot_noinsert (set->vars, dv);
7899 if (!slot)
7900 return;
7902 delete_slot_part (set, loc, slot, offset);
7906 /* Structure for passing some other parameters to function
7907 vt_expand_loc_callback. */
7908 struct expand_loc_callback_data
7910 /* The variables and values active at this point. */
7911 variable_table_type vars;
7913 /* Stack of values and debug_exprs under expansion, and their
7914 children. */
7915 stack_vec<rtx, 4> expanding;
7917 /* Stack of values and debug_exprs whose expansion hit recursion
7918 cycles. They will have VALUE_RECURSED_INTO marked when added to
7919 this list. This flag will be cleared if any of its dependencies
7920 resolves to a valid location. So, if the flag remains set at the
7921 end of the search, we know no valid location for this one can
7922 possibly exist. */
7923 stack_vec<rtx, 4> pending;
7925 /* The maximum depth among the sub-expressions under expansion.
7926 Zero indicates no expansion so far. */
7927 expand_depth depth;
7930 /* Allocate the one-part auxiliary data structure for VAR, with enough
7931 room for COUNT dependencies. */
7933 static void
7934 loc_exp_dep_alloc (variable var, int count)
7936 size_t allocsize;
7938 gcc_checking_assert (var->onepart);
7940 /* We can be called with COUNT == 0 to allocate the data structure
7941 without any dependencies, e.g. for the backlinks only. However,
7942 if we are specifying a COUNT, then the dependency list must have
7943 been emptied before. It would be possible to adjust pointers or
7944 force it empty here, but this is better done at an earlier point
7945 in the algorithm, so we instead leave an assertion to catch
7946 errors. */
7947 gcc_checking_assert (!count
7948 || VAR_LOC_DEP_VEC (var) == NULL
7949 || VAR_LOC_DEP_VEC (var)->is_empty ());
7951 if (VAR_LOC_1PAUX (var) && VAR_LOC_DEP_VEC (var)->space (count))
7952 return;
7954 allocsize = offsetof (struct onepart_aux, deps)
7955 + vec<loc_exp_dep, va_heap, vl_embed>::embedded_size (count);
7957 if (VAR_LOC_1PAUX (var))
7959 VAR_LOC_1PAUX (var) = XRESIZEVAR (struct onepart_aux,
7960 VAR_LOC_1PAUX (var), allocsize);
7961 /* If the reallocation moves the onepaux structure, the
7962 back-pointer to BACKLINKS in the first list member will still
7963 point to its old location. Adjust it. */
7964 if (VAR_LOC_DEP_LST (var))
7965 VAR_LOC_DEP_LST (var)->pprev = VAR_LOC_DEP_LSTP (var);
7967 else
7969 VAR_LOC_1PAUX (var) = XNEWVAR (struct onepart_aux, allocsize);
7970 *VAR_LOC_DEP_LSTP (var) = NULL;
7971 VAR_LOC_FROM (var) = NULL;
7972 VAR_LOC_DEPTH (var).complexity = 0;
7973 VAR_LOC_DEPTH (var).entryvals = 0;
7975 VAR_LOC_DEP_VEC (var)->embedded_init (count);
7978 /* Remove all entries from the vector of active dependencies of VAR,
7979 removing them from the back-links lists too. */
7981 static void
7982 loc_exp_dep_clear (variable var)
7984 while (VAR_LOC_DEP_VEC (var) && !VAR_LOC_DEP_VEC (var)->is_empty ())
7986 loc_exp_dep *led = &VAR_LOC_DEP_VEC (var)->last ();
7987 if (led->next)
7988 led->next->pprev = led->pprev;
7989 if (led->pprev)
7990 *led->pprev = led->next;
7991 VAR_LOC_DEP_VEC (var)->pop ();
7995 /* Insert an active dependency from VAR on X to the vector of
7996 dependencies, and add the corresponding back-link to X's list of
7997 back-links in VARS. */
7999 static void
8000 loc_exp_insert_dep (variable var, rtx x, variable_table_type vars)
8002 decl_or_value dv;
8003 variable xvar;
8004 loc_exp_dep *led;
8006 dv = dv_from_rtx (x);
8008 /* ??? Build a vector of variables parallel to EXPANDING, to avoid
8009 an additional look up? */
8010 xvar = vars.find_with_hash (dv, dv_htab_hash (dv));
8012 if (!xvar)
8014 xvar = variable_from_dropped (dv, NO_INSERT);
8015 gcc_checking_assert (xvar);
8018 /* No point in adding the same backlink more than once. This may
8019 arise if say the same value appears in two complex expressions in
8020 the same loc_list, or even more than once in a single
8021 expression. */
8022 if (VAR_LOC_DEP_LST (xvar) && VAR_LOC_DEP_LST (xvar)->dv == var->dv)
8023 return;
8025 if (var->onepart == NOT_ONEPART)
8026 led = (loc_exp_dep *) pool_alloc (loc_exp_dep_pool);
8027 else
8029 loc_exp_dep empty;
8030 memset (&empty, 0, sizeof (empty));
8031 VAR_LOC_DEP_VEC (var)->quick_push (empty);
8032 led = &VAR_LOC_DEP_VEC (var)->last ();
8034 led->dv = var->dv;
8035 led->value = x;
8037 loc_exp_dep_alloc (xvar, 0);
8038 led->pprev = VAR_LOC_DEP_LSTP (xvar);
8039 led->next = *led->pprev;
8040 if (led->next)
8041 led->next->pprev = &led->next;
8042 *led->pprev = led;
8045 /* Create active dependencies of VAR on COUNT values starting at
8046 VALUE, and corresponding back-links to the entries in VARS. Return
8047 true if we found any pending-recursion results. */
8049 static bool
8050 loc_exp_dep_set (variable var, rtx result, rtx *value, int count,
8051 variable_table_type vars)
8053 bool pending_recursion = false;
8055 gcc_checking_assert (VAR_LOC_DEP_VEC (var) == NULL
8056 || VAR_LOC_DEP_VEC (var)->is_empty ());
8058 /* Set up all dependencies from last_child (as set up at the end of
8059 the loop above) to the end. */
8060 loc_exp_dep_alloc (var, count);
8062 while (count--)
8064 rtx x = *value++;
8066 if (!pending_recursion)
8067 pending_recursion = !result && VALUE_RECURSED_INTO (x);
8069 loc_exp_insert_dep (var, x, vars);
8072 return pending_recursion;
8075 /* Notify the back-links of IVAR that are pending recursion that we
8076 have found a non-NIL value for it, so they are cleared for another
8077 attempt to compute a current location. */
8079 static void
8080 notify_dependents_of_resolved_value (variable ivar, variable_table_type vars)
8082 loc_exp_dep *led, *next;
8084 for (led = VAR_LOC_DEP_LST (ivar); led; led = next)
8086 decl_or_value dv = led->dv;
8087 variable var;
8089 next = led->next;
8091 if (dv_is_value_p (dv))
8093 rtx value = dv_as_value (dv);
8095 /* If we have already resolved it, leave it alone. */
8096 if (!VALUE_RECURSED_INTO (value))
8097 continue;
8099 /* Check that VALUE_RECURSED_INTO, true from the test above,
8100 implies NO_LOC_P. */
8101 gcc_checking_assert (NO_LOC_P (value));
8103 /* We won't notify variables that are being expanded,
8104 because their dependency list is cleared before
8105 recursing. */
8106 NO_LOC_P (value) = false;
8107 VALUE_RECURSED_INTO (value) = false;
8109 gcc_checking_assert (dv_changed_p (dv));
8111 else
8113 gcc_checking_assert (dv_onepart_p (dv) != NOT_ONEPART);
8114 if (!dv_changed_p (dv))
8115 continue;
8118 var = vars.find_with_hash (dv, dv_htab_hash (dv));
8120 if (!var)
8121 var = variable_from_dropped (dv, NO_INSERT);
8123 if (var)
8124 notify_dependents_of_resolved_value (var, vars);
8126 if (next)
8127 next->pprev = led->pprev;
8128 if (led->pprev)
8129 *led->pprev = next;
8130 led->next = NULL;
8131 led->pprev = NULL;
8135 static rtx vt_expand_loc_callback (rtx x, bitmap regs,
8136 int max_depth, void *data);
8138 /* Return the combined depth, when one sub-expression evaluated to
8139 BEST_DEPTH and the previous known depth was SAVED_DEPTH. */
8141 static inline expand_depth
8142 update_depth (expand_depth saved_depth, expand_depth best_depth)
8144 /* If we didn't find anything, stick with what we had. */
8145 if (!best_depth.complexity)
8146 return saved_depth;
8148 /* If we found hadn't found anything, use the depth of the current
8149 expression. Do NOT add one extra level, we want to compute the
8150 maximum depth among sub-expressions. We'll increment it later,
8151 if appropriate. */
8152 if (!saved_depth.complexity)
8153 return best_depth;
8155 /* Combine the entryval count so that regardless of which one we
8156 return, the entryval count is accurate. */
8157 best_depth.entryvals = saved_depth.entryvals
8158 = best_depth.entryvals + saved_depth.entryvals;
8160 if (saved_depth.complexity < best_depth.complexity)
8161 return best_depth;
8162 else
8163 return saved_depth;
8166 /* Expand VAR to a location RTX, updating its cur_loc. Use REGS and
8167 DATA for cselib expand callback. If PENDRECP is given, indicate in
8168 it whether any sub-expression couldn't be fully evaluated because
8169 it is pending recursion resolution. */
8171 static inline rtx
8172 vt_expand_var_loc_chain (variable var, bitmap regs, void *data, bool *pendrecp)
8174 struct expand_loc_callback_data *elcd
8175 = (struct expand_loc_callback_data *) data;
8176 location_chain loc, next;
8177 rtx result = NULL;
8178 int first_child, result_first_child, last_child;
8179 bool pending_recursion;
8180 rtx loc_from = NULL;
8181 struct elt_loc_list *cloc = NULL;
8182 expand_depth depth = { 0, 0 }, saved_depth = elcd->depth;
8183 int wanted_entryvals, found_entryvals = 0;
8185 /* Clear all backlinks pointing at this, so that we're not notified
8186 while we're active. */
8187 loc_exp_dep_clear (var);
8189 retry:
8190 if (var->onepart == ONEPART_VALUE)
8192 cselib_val *val = CSELIB_VAL_PTR (dv_as_value (var->dv));
8194 gcc_checking_assert (cselib_preserved_value_p (val));
8196 cloc = val->locs;
8199 first_child = result_first_child = last_child
8200 = elcd->expanding.length ();
8202 wanted_entryvals = found_entryvals;
8204 /* Attempt to expand each available location in turn. */
8205 for (next = loc = var->n_var_parts ? var->var_part[0].loc_chain : NULL;
8206 loc || cloc; loc = next)
8208 result_first_child = last_child;
8210 if (!loc)
8212 loc_from = cloc->loc;
8213 next = loc;
8214 cloc = cloc->next;
8215 if (unsuitable_loc (loc_from))
8216 continue;
8218 else
8220 loc_from = loc->loc;
8221 next = loc->next;
8224 gcc_checking_assert (!unsuitable_loc (loc_from));
8226 elcd->depth.complexity = elcd->depth.entryvals = 0;
8227 result = cselib_expand_value_rtx_cb (loc_from, regs, EXPR_DEPTH,
8228 vt_expand_loc_callback, data);
8229 last_child = elcd->expanding.length ();
8231 if (result)
8233 depth = elcd->depth;
8235 gcc_checking_assert (depth.complexity
8236 || result_first_child == last_child);
8238 if (last_child - result_first_child != 1)
8240 if (!depth.complexity && GET_CODE (result) == ENTRY_VALUE)
8241 depth.entryvals++;
8242 depth.complexity++;
8245 if (depth.complexity <= EXPR_USE_DEPTH)
8247 if (depth.entryvals <= wanted_entryvals)
8248 break;
8249 else if (!found_entryvals || depth.entryvals < found_entryvals)
8250 found_entryvals = depth.entryvals;
8253 result = NULL;
8256 /* Set it up in case we leave the loop. */
8257 depth.complexity = depth.entryvals = 0;
8258 loc_from = NULL;
8259 result_first_child = first_child;
8262 if (!loc_from && wanted_entryvals < found_entryvals)
8264 /* We found entries with ENTRY_VALUEs and skipped them. Since
8265 we could not find any expansions without ENTRY_VALUEs, but we
8266 found at least one with them, go back and get an entry with
8267 the minimum number ENTRY_VALUE count that we found. We could
8268 avoid looping, but since each sub-loc is already resolved,
8269 the re-expansion should be trivial. ??? Should we record all
8270 attempted locs as dependencies, so that we retry the
8271 expansion should any of them change, in the hope it can give
8272 us a new entry without an ENTRY_VALUE? */
8273 elcd->expanding.truncate (first_child);
8274 goto retry;
8277 /* Register all encountered dependencies as active. */
8278 pending_recursion = loc_exp_dep_set
8279 (var, result, elcd->expanding.address () + result_first_child,
8280 last_child - result_first_child, elcd->vars);
8282 elcd->expanding.truncate (first_child);
8284 /* Record where the expansion came from. */
8285 gcc_checking_assert (!result || !pending_recursion);
8286 VAR_LOC_FROM (var) = loc_from;
8287 VAR_LOC_DEPTH (var) = depth;
8289 gcc_checking_assert (!depth.complexity == !result);
8291 elcd->depth = update_depth (saved_depth, depth);
8293 /* Indicate whether any of the dependencies are pending recursion
8294 resolution. */
8295 if (pendrecp)
8296 *pendrecp = pending_recursion;
8298 if (!pendrecp || !pending_recursion)
8299 var->var_part[0].cur_loc = result;
8301 return result;
8304 /* Callback for cselib_expand_value, that looks for expressions
8305 holding the value in the var-tracking hash tables. Return X for
8306 standard processing, anything else is to be used as-is. */
8308 static rtx
8309 vt_expand_loc_callback (rtx x, bitmap regs,
8310 int max_depth ATTRIBUTE_UNUSED,
8311 void *data)
8313 struct expand_loc_callback_data *elcd
8314 = (struct expand_loc_callback_data *) data;
8315 decl_or_value dv;
8316 variable var;
8317 rtx result, subreg;
8318 bool pending_recursion = false;
8319 bool from_empty = false;
8321 switch (GET_CODE (x))
8323 case SUBREG:
8324 subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs,
8325 EXPR_DEPTH,
8326 vt_expand_loc_callback, data);
8328 if (!subreg)
8329 return NULL;
8331 result = simplify_gen_subreg (GET_MODE (x), subreg,
8332 GET_MODE (SUBREG_REG (x)),
8333 SUBREG_BYTE (x));
8335 /* Invalid SUBREGs are ok in debug info. ??? We could try
8336 alternate expansions for the VALUE as well. */
8337 if (!result)
8338 result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x));
8340 return result;
8342 case DEBUG_EXPR:
8343 case VALUE:
8344 dv = dv_from_rtx (x);
8345 break;
8347 default:
8348 return x;
8351 elcd->expanding.safe_push (x);
8353 /* Check that VALUE_RECURSED_INTO implies NO_LOC_P. */
8354 gcc_checking_assert (!VALUE_RECURSED_INTO (x) || NO_LOC_P (x));
8356 if (NO_LOC_P (x))
8358 gcc_checking_assert (VALUE_RECURSED_INTO (x) || !dv_changed_p (dv));
8359 return NULL;
8362 var = elcd->vars.find_with_hash (dv, dv_htab_hash (dv));
8364 if (!var)
8366 from_empty = true;
8367 var = variable_from_dropped (dv, INSERT);
8370 gcc_checking_assert (var);
8372 if (!dv_changed_p (dv))
8374 gcc_checking_assert (!NO_LOC_P (x));
8375 gcc_checking_assert (var->var_part[0].cur_loc);
8376 gcc_checking_assert (VAR_LOC_1PAUX (var));
8377 gcc_checking_assert (VAR_LOC_1PAUX (var)->depth.complexity);
8379 elcd->depth = update_depth (elcd->depth, VAR_LOC_1PAUX (var)->depth);
8381 return var->var_part[0].cur_loc;
8384 VALUE_RECURSED_INTO (x) = true;
8385 /* This is tentative, but it makes some tests simpler. */
8386 NO_LOC_P (x) = true;
8388 gcc_checking_assert (var->n_var_parts == 1 || from_empty);
8390 result = vt_expand_var_loc_chain (var, regs, data, &pending_recursion);
8392 if (pending_recursion)
8394 gcc_checking_assert (!result);
8395 elcd->pending.safe_push (x);
8397 else
8399 NO_LOC_P (x) = !result;
8400 VALUE_RECURSED_INTO (x) = false;
8401 set_dv_changed (dv, false);
8403 if (result)
8404 notify_dependents_of_resolved_value (var, elcd->vars);
8407 return result;
8410 /* While expanding variables, we may encounter recursion cycles
8411 because of mutual (possibly indirect) dependencies between two
8412 particular variables (or values), say A and B. If we're trying to
8413 expand A when we get to B, which in turn attempts to expand A, if
8414 we can't find any other expansion for B, we'll add B to this
8415 pending-recursion stack, and tentatively return NULL for its
8416 location. This tentative value will be used for any other
8417 occurrences of B, unless A gets some other location, in which case
8418 it will notify B that it is worth another try at computing a
8419 location for it, and it will use the location computed for A then.
8420 At the end of the expansion, the tentative NULL locations become
8421 final for all members of PENDING that didn't get a notification.
8422 This function performs this finalization of NULL locations. */
8424 static void
8425 resolve_expansions_pending_recursion (vec<rtx, va_heap> *pending)
8427 while (!pending->is_empty ())
8429 rtx x = pending->pop ();
8430 decl_or_value dv;
8432 if (!VALUE_RECURSED_INTO (x))
8433 continue;
8435 gcc_checking_assert (NO_LOC_P (x));
8436 VALUE_RECURSED_INTO (x) = false;
8437 dv = dv_from_rtx (x);
8438 gcc_checking_assert (dv_changed_p (dv));
8439 set_dv_changed (dv, false);
8443 /* Initialize expand_loc_callback_data D with variable hash table V.
8444 It must be a macro because of alloca (vec stack). */
8445 #define INIT_ELCD(d, v) \
8446 do \
8448 (d).vars = (v); \
8449 (d).depth.complexity = (d).depth.entryvals = 0; \
8451 while (0)
8452 /* Finalize expand_loc_callback_data D, resolved to location L. */
8453 #define FINI_ELCD(d, l) \
8454 do \
8456 resolve_expansions_pending_recursion (&(d).pending); \
8457 (d).pending.release (); \
8458 (d).expanding.release (); \
8460 if ((l) && MEM_P (l)) \
8461 (l) = targetm.delegitimize_address (l); \
8463 while (0)
8465 /* Expand VALUEs and DEBUG_EXPRs in LOC to a location, using the
8466 equivalences in VARS, updating their CUR_LOCs in the process. */
8468 static rtx
8469 vt_expand_loc (rtx loc, variable_table_type vars)
8471 struct expand_loc_callback_data data;
8472 rtx result;
8474 if (!MAY_HAVE_DEBUG_INSNS)
8475 return loc;
8477 INIT_ELCD (data, vars);
8479 result = cselib_expand_value_rtx_cb (loc, scratch_regs, EXPR_DEPTH,
8480 vt_expand_loc_callback, &data);
8482 FINI_ELCD (data, result);
8484 return result;
8487 /* Expand the one-part VARiable to a location, using the equivalences
8488 in VARS, updating their CUR_LOCs in the process. */
8490 static rtx
8491 vt_expand_1pvar (variable var, variable_table_type vars)
8493 struct expand_loc_callback_data data;
8494 rtx loc;
8496 gcc_checking_assert (var->onepart && var->n_var_parts == 1);
8498 if (!dv_changed_p (var->dv))
8499 return var->var_part[0].cur_loc;
8501 INIT_ELCD (data, vars);
8503 loc = vt_expand_var_loc_chain (var, scratch_regs, &data, NULL);
8505 gcc_checking_assert (data.expanding.is_empty ());
8507 FINI_ELCD (data, loc);
8509 return loc;
8512 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
8513 additional parameters: WHERE specifies whether the note shall be emitted
8514 before or after instruction INSN. */
8517 emit_note_insn_var_location (variable_def **varp, emit_note_data *data)
8519 variable var = *varp;
8520 rtx insn = data->insn;
8521 enum emit_note_where where = data->where;
8522 variable_table_type vars = data->vars;
8523 rtx note, note_vl;
8524 int i, j, n_var_parts;
8525 bool complete;
8526 enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED;
8527 HOST_WIDE_INT last_limit;
8528 tree type_size_unit;
8529 HOST_WIDE_INT offsets[MAX_VAR_PARTS];
8530 rtx loc[MAX_VAR_PARTS];
8531 tree decl;
8532 location_chain lc;
8534 gcc_checking_assert (var->onepart == NOT_ONEPART
8535 || var->onepart == ONEPART_VDECL);
8537 decl = dv_as_decl (var->dv);
8539 complete = true;
8540 last_limit = 0;
8541 n_var_parts = 0;
8542 if (!var->onepart)
8543 for (i = 0; i < var->n_var_parts; i++)
8544 if (var->var_part[i].cur_loc == NULL && var->var_part[i].loc_chain)
8545 var->var_part[i].cur_loc = var->var_part[i].loc_chain->loc;
8546 for (i = 0; i < var->n_var_parts; i++)
8548 enum machine_mode mode, wider_mode;
8549 rtx loc2;
8550 HOST_WIDE_INT offset;
8552 if (i == 0 && var->onepart)
8554 gcc_checking_assert (var->n_var_parts == 1);
8555 offset = 0;
8556 initialized = VAR_INIT_STATUS_INITIALIZED;
8557 loc2 = vt_expand_1pvar (var, vars);
8559 else
8561 if (last_limit < VAR_PART_OFFSET (var, i))
8563 complete = false;
8564 break;
8566 else if (last_limit > VAR_PART_OFFSET (var, i))
8567 continue;
8568 offset = VAR_PART_OFFSET (var, i);
8569 loc2 = var->var_part[i].cur_loc;
8570 if (loc2 && GET_CODE (loc2) == MEM
8571 && GET_CODE (XEXP (loc2, 0)) == VALUE)
8573 rtx depval = XEXP (loc2, 0);
8575 loc2 = vt_expand_loc (loc2, vars);
8577 if (loc2)
8578 loc_exp_insert_dep (var, depval, vars);
8580 if (!loc2)
8582 complete = false;
8583 continue;
8585 gcc_checking_assert (GET_CODE (loc2) != VALUE);
8586 for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
8587 if (var->var_part[i].cur_loc == lc->loc)
8589 initialized = lc->init;
8590 break;
8592 gcc_assert (lc);
8595 offsets[n_var_parts] = offset;
8596 if (!loc2)
8598 complete = false;
8599 continue;
8601 loc[n_var_parts] = loc2;
8602 mode = GET_MODE (var->var_part[i].cur_loc);
8603 if (mode == VOIDmode && var->onepart)
8604 mode = DECL_MODE (decl);
8605 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
8607 /* Attempt to merge adjacent registers or memory. */
8608 wider_mode = GET_MODE_WIDER_MODE (mode);
8609 for (j = i + 1; j < var->n_var_parts; j++)
8610 if (last_limit <= VAR_PART_OFFSET (var, j))
8611 break;
8612 if (j < var->n_var_parts
8613 && wider_mode != VOIDmode
8614 && var->var_part[j].cur_loc
8615 && mode == GET_MODE (var->var_part[j].cur_loc)
8616 && (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts]))
8617 && last_limit == (var->onepart ? 0 : VAR_PART_OFFSET (var, j))
8618 && (loc2 = vt_expand_loc (var->var_part[j].cur_loc, vars))
8619 && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2))
8621 rtx new_loc = NULL;
8623 if (REG_P (loc[n_var_parts])
8624 && hard_regno_nregs[REGNO (loc[n_var_parts])][mode] * 2
8625 == hard_regno_nregs[REGNO (loc[n_var_parts])][wider_mode]
8626 && end_hard_regno (mode, REGNO (loc[n_var_parts]))
8627 == REGNO (loc2))
8629 if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN)
8630 new_loc = simplify_subreg (wider_mode, loc[n_var_parts],
8631 mode, 0);
8632 else if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
8633 new_loc = simplify_subreg (wider_mode, loc2, mode, 0);
8634 if (new_loc)
8636 if (!REG_P (new_loc)
8637 || REGNO (new_loc) != REGNO (loc[n_var_parts]))
8638 new_loc = NULL;
8639 else
8640 REG_ATTRS (new_loc) = REG_ATTRS (loc[n_var_parts]);
8643 else if (MEM_P (loc[n_var_parts])
8644 && GET_CODE (XEXP (loc2, 0)) == PLUS
8645 && REG_P (XEXP (XEXP (loc2, 0), 0))
8646 && CONST_INT_P (XEXP (XEXP (loc2, 0), 1)))
8648 if ((REG_P (XEXP (loc[n_var_parts], 0))
8649 && rtx_equal_p (XEXP (loc[n_var_parts], 0),
8650 XEXP (XEXP (loc2, 0), 0))
8651 && INTVAL (XEXP (XEXP (loc2, 0), 1))
8652 == GET_MODE_SIZE (mode))
8653 || (GET_CODE (XEXP (loc[n_var_parts], 0)) == PLUS
8654 && CONST_INT_P (XEXP (XEXP (loc[n_var_parts], 0), 1))
8655 && rtx_equal_p (XEXP (XEXP (loc[n_var_parts], 0), 0),
8656 XEXP (XEXP (loc2, 0), 0))
8657 && INTVAL (XEXP (XEXP (loc[n_var_parts], 0), 1))
8658 + GET_MODE_SIZE (mode)
8659 == INTVAL (XEXP (XEXP (loc2, 0), 1))))
8660 new_loc = adjust_address_nv (loc[n_var_parts],
8661 wider_mode, 0);
8664 if (new_loc)
8666 loc[n_var_parts] = new_loc;
8667 mode = wider_mode;
8668 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
8669 i = j;
8672 ++n_var_parts;
8674 type_size_unit = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8675 if ((unsigned HOST_WIDE_INT) last_limit < TREE_INT_CST_LOW (type_size_unit))
8676 complete = false;
8678 if (! flag_var_tracking_uninit)
8679 initialized = VAR_INIT_STATUS_INITIALIZED;
8681 note_vl = NULL_RTX;
8682 if (!complete)
8683 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, NULL_RTX,
8684 (int) initialized);
8685 else if (n_var_parts == 1)
8687 rtx expr_list;
8689 if (offsets[0] || GET_CODE (loc[0]) == PARALLEL)
8690 expr_list = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
8691 else
8692 expr_list = loc[0];
8694 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, expr_list,
8695 (int) initialized);
8697 else if (n_var_parts)
8699 rtx parallel;
8701 for (i = 0; i < n_var_parts; i++)
8702 loc[i]
8703 = gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i]));
8705 parallel = gen_rtx_PARALLEL (VOIDmode,
8706 gen_rtvec_v (n_var_parts, loc));
8707 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl,
8708 parallel, (int) initialized);
8711 if (where != EMIT_NOTE_BEFORE_INSN)
8713 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8714 if (where == EMIT_NOTE_AFTER_CALL_INSN)
8715 NOTE_DURING_CALL_P (note) = true;
8717 else
8719 /* Make sure that the call related notes come first. */
8720 while (NEXT_INSN (insn)
8721 && NOTE_P (insn)
8722 && ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8723 && NOTE_DURING_CALL_P (insn))
8724 || NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
8725 insn = NEXT_INSN (insn);
8726 if (NOTE_P (insn)
8727 && ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8728 && NOTE_DURING_CALL_P (insn))
8729 || NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
8730 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8731 else
8732 note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
8734 NOTE_VAR_LOCATION (note) = note_vl;
8736 set_dv_changed (var->dv, false);
8737 gcc_assert (var->in_changed_variables);
8738 var->in_changed_variables = false;
8739 changed_variables.clear_slot (varp);
8741 /* Continue traversing the hash table. */
8742 return 1;
8745 /* While traversing changed_variables, push onto DATA (a stack of RTX
8746 values) entries that aren't user variables. */
8749 var_track_values_to_stack (variable_def **slot,
8750 vec<rtx, va_heap> *changed_values_stack)
8752 variable var = *slot;
8754 if (var->onepart == ONEPART_VALUE)
8755 changed_values_stack->safe_push (dv_as_value (var->dv));
8756 else if (var->onepart == ONEPART_DEXPR)
8757 changed_values_stack->safe_push (DECL_RTL_KNOWN_SET (dv_as_decl (var->dv)));
8759 return 1;
8762 /* Remove from changed_variables the entry whose DV corresponds to
8763 value or debug_expr VAL. */
8764 static void
8765 remove_value_from_changed_variables (rtx val)
8767 decl_or_value dv = dv_from_rtx (val);
8768 variable_def **slot;
8769 variable var;
8771 slot = changed_variables.find_slot_with_hash (dv, dv_htab_hash (dv),
8772 NO_INSERT);
8773 var = *slot;
8774 var->in_changed_variables = false;
8775 changed_variables.clear_slot (slot);
8778 /* If VAL (a value or debug_expr) has backlinks to variables actively
8779 dependent on it in HTAB or in CHANGED_VARIABLES, mark them as
8780 changed, adding to CHANGED_VALUES_STACK any dependencies that may
8781 have dependencies of their own to notify. */
8783 static void
8784 notify_dependents_of_changed_value (rtx val, variable_table_type htab,
8785 vec<rtx, va_heap> *changed_values_stack)
8787 variable_def **slot;
8788 variable var;
8789 loc_exp_dep *led;
8790 decl_or_value dv = dv_from_rtx (val);
8792 slot = changed_variables.find_slot_with_hash (dv, dv_htab_hash (dv),
8793 NO_INSERT);
8794 if (!slot)
8795 slot = htab.find_slot_with_hash (dv, dv_htab_hash (dv), NO_INSERT);
8796 if (!slot)
8797 slot = dropped_values.find_slot_with_hash (dv, dv_htab_hash (dv),
8798 NO_INSERT);
8799 var = *slot;
8801 while ((led = VAR_LOC_DEP_LST (var)))
8803 decl_or_value ldv = led->dv;
8804 variable ivar;
8806 /* Deactivate and remove the backlink, as it was “used up”. It
8807 makes no sense to attempt to notify the same entity again:
8808 either it will be recomputed and re-register an active
8809 dependency, or it will still have the changed mark. */
8810 if (led->next)
8811 led->next->pprev = led->pprev;
8812 if (led->pprev)
8813 *led->pprev = led->next;
8814 led->next = NULL;
8815 led->pprev = NULL;
8817 if (dv_changed_p (ldv))
8818 continue;
8820 switch (dv_onepart_p (ldv))
8822 case ONEPART_VALUE:
8823 case ONEPART_DEXPR:
8824 set_dv_changed (ldv, true);
8825 changed_values_stack->safe_push (dv_as_rtx (ldv));
8826 break;
8828 case ONEPART_VDECL:
8829 ivar = htab.find_with_hash (ldv, dv_htab_hash (ldv));
8830 gcc_checking_assert (!VAR_LOC_DEP_LST (ivar));
8831 variable_was_changed (ivar, NULL);
8832 break;
8834 case NOT_ONEPART:
8835 pool_free (loc_exp_dep_pool, led);
8836 ivar = htab.find_with_hash (ldv, dv_htab_hash (ldv));
8837 if (ivar)
8839 int i = ivar->n_var_parts;
8840 while (i--)
8842 rtx loc = ivar->var_part[i].cur_loc;
8844 if (loc && GET_CODE (loc) == MEM
8845 && XEXP (loc, 0) == val)
8847 variable_was_changed (ivar, NULL);
8848 break;
8852 break;
8854 default:
8855 gcc_unreachable ();
8860 /* Take out of changed_variables any entries that don't refer to use
8861 variables. Back-propagate change notifications from values and
8862 debug_exprs to their active dependencies in HTAB or in
8863 CHANGED_VARIABLES. */
8865 static void
8866 process_changed_values (variable_table_type htab)
8868 int i, n;
8869 rtx val;
8870 stack_vec<rtx, 20> changed_values_stack;
8872 /* Move values from changed_variables to changed_values_stack. */
8873 changed_variables
8874 .traverse <vec<rtx, va_heap>*, var_track_values_to_stack>
8875 (&changed_values_stack);
8877 /* Back-propagate change notifications in values while popping
8878 them from the stack. */
8879 for (n = i = changed_values_stack.length ();
8880 i > 0; i = changed_values_stack.length ())
8882 val = changed_values_stack.pop ();
8883 notify_dependents_of_changed_value (val, htab, &changed_values_stack);
8885 /* This condition will hold when visiting each of the entries
8886 originally in changed_variables. We can't remove them
8887 earlier because this could drop the backlinks before we got a
8888 chance to use them. */
8889 if (i == n)
8891 remove_value_from_changed_variables (val);
8892 n--;
8897 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
8898 CHANGED_VARIABLES and delete this chain. WHERE specifies whether
8899 the notes shall be emitted before of after instruction INSN. */
8901 static void
8902 emit_notes_for_changes (rtx insn, enum emit_note_where where,
8903 shared_hash vars)
8905 emit_note_data data;
8906 variable_table_type htab = shared_hash_htab (vars);
8908 if (!changed_variables.elements ())
8909 return;
8911 if (MAY_HAVE_DEBUG_INSNS)
8912 process_changed_values (htab);
8914 data.insn = insn;
8915 data.where = where;
8916 data.vars = htab;
8918 changed_variables
8919 .traverse <emit_note_data*, emit_note_insn_var_location> (&data);
8922 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
8923 same variable in hash table DATA or is not there at all. */
8926 emit_notes_for_differences_1 (variable_def **slot, variable_table_type new_vars)
8928 variable old_var, new_var;
8930 old_var = *slot;
8931 new_var = new_vars.find_with_hash (old_var->dv, dv_htab_hash (old_var->dv));
8933 if (!new_var)
8935 /* Variable has disappeared. */
8936 variable empty_var = NULL;
8938 if (old_var->onepart == ONEPART_VALUE
8939 || old_var->onepart == ONEPART_DEXPR)
8941 empty_var = variable_from_dropped (old_var->dv, NO_INSERT);
8942 if (empty_var)
8944 gcc_checking_assert (!empty_var->in_changed_variables);
8945 if (!VAR_LOC_1PAUX (old_var))
8947 VAR_LOC_1PAUX (old_var) = VAR_LOC_1PAUX (empty_var);
8948 VAR_LOC_1PAUX (empty_var) = NULL;
8950 else
8951 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
8955 if (!empty_var)
8957 empty_var = (variable) pool_alloc (onepart_pool (old_var->onepart));
8958 empty_var->dv = old_var->dv;
8959 empty_var->refcount = 0;
8960 empty_var->n_var_parts = 0;
8961 empty_var->onepart = old_var->onepart;
8962 empty_var->in_changed_variables = false;
8965 if (empty_var->onepart)
8967 /* Propagate the auxiliary data to (ultimately)
8968 changed_variables. */
8969 empty_var->var_part[0].loc_chain = NULL;
8970 empty_var->var_part[0].cur_loc = NULL;
8971 VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (old_var);
8972 VAR_LOC_1PAUX (old_var) = NULL;
8974 variable_was_changed (empty_var, NULL);
8975 /* Continue traversing the hash table. */
8976 return 1;
8978 /* Update cur_loc and one-part auxiliary data, before new_var goes
8979 through variable_was_changed. */
8980 if (old_var != new_var && new_var->onepart)
8982 gcc_checking_assert (VAR_LOC_1PAUX (new_var) == NULL);
8983 VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (old_var);
8984 VAR_LOC_1PAUX (old_var) = NULL;
8985 new_var->var_part[0].cur_loc = old_var->var_part[0].cur_loc;
8987 if (variable_different_p (old_var, new_var))
8988 variable_was_changed (new_var, NULL);
8990 /* Continue traversing the hash table. */
8991 return 1;
8994 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
8995 table DATA. */
8998 emit_notes_for_differences_2 (variable_def **slot, variable_table_type old_vars)
9000 variable old_var, new_var;
9002 new_var = *slot;
9003 old_var = old_vars.find_with_hash (new_var->dv, dv_htab_hash (new_var->dv));
9004 if (!old_var)
9006 int i;
9007 for (i = 0; i < new_var->n_var_parts; i++)
9008 new_var->var_part[i].cur_loc = NULL;
9009 variable_was_changed (new_var, NULL);
9012 /* Continue traversing the hash table. */
9013 return 1;
9016 /* Emit notes before INSN for differences between dataflow sets OLD_SET and
9017 NEW_SET. */
9019 static void
9020 emit_notes_for_differences (rtx insn, dataflow_set *old_set,
9021 dataflow_set *new_set)
9023 shared_hash_htab (old_set->vars)
9024 .traverse <variable_table_type, emit_notes_for_differences_1>
9025 (shared_hash_htab (new_set->vars));
9026 shared_hash_htab (new_set->vars)
9027 .traverse <variable_table_type, emit_notes_for_differences_2>
9028 (shared_hash_htab (old_set->vars));
9029 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars);
9032 /* Return the next insn after INSN that is not a NOTE_INSN_VAR_LOCATION. */
9034 static rtx
9035 next_non_note_insn_var_location (rtx insn)
9037 while (insn)
9039 insn = NEXT_INSN (insn);
9040 if (insn == 0
9041 || !NOTE_P (insn)
9042 || NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION)
9043 break;
9046 return insn;
9049 /* Emit the notes for changes of location parts in the basic block BB. */
9051 static void
9052 emit_notes_in_bb (basic_block bb, dataflow_set *set)
9054 unsigned int i;
9055 micro_operation *mo;
9057 dataflow_set_clear (set);
9058 dataflow_set_copy (set, &VTI (bb)->in);
9060 FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo)
9062 rtx insn = mo->insn;
9063 rtx next_insn = next_non_note_insn_var_location (insn);
9065 switch (mo->type)
9067 case MO_CALL:
9068 dataflow_set_clear_at_call (set);
9069 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars);
9071 rtx arguments = mo->u.loc, *p = &arguments, note;
9072 while (*p)
9074 XEXP (XEXP (*p, 0), 1)
9075 = vt_expand_loc (XEXP (XEXP (*p, 0), 1),
9076 shared_hash_htab (set->vars));
9077 /* If expansion is successful, keep it in the list. */
9078 if (XEXP (XEXP (*p, 0), 1))
9079 p = &XEXP (*p, 1);
9080 /* Otherwise, if the following item is data_value for it,
9081 drop it too too. */
9082 else if (XEXP (*p, 1)
9083 && REG_P (XEXP (XEXP (*p, 0), 0))
9084 && MEM_P (XEXP (XEXP (XEXP (*p, 1), 0), 0))
9085 && REG_P (XEXP (XEXP (XEXP (XEXP (*p, 1), 0), 0),
9087 && REGNO (XEXP (XEXP (*p, 0), 0))
9088 == REGNO (XEXP (XEXP (XEXP (XEXP (*p, 1), 0),
9089 0), 0)))
9090 *p = XEXP (XEXP (*p, 1), 1);
9091 /* Just drop this item. */
9092 else
9093 *p = XEXP (*p, 1);
9095 note = emit_note_after (NOTE_INSN_CALL_ARG_LOCATION, insn);
9096 NOTE_VAR_LOCATION (note) = arguments;
9098 break;
9100 case MO_USE:
9102 rtx loc = mo->u.loc;
9104 if (REG_P (loc))
9105 var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
9106 else
9107 var_mem_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
9109 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
9111 break;
9113 case MO_VAL_LOC:
9115 rtx loc = mo->u.loc;
9116 rtx val, vloc;
9117 tree var;
9119 if (GET_CODE (loc) == CONCAT)
9121 val = XEXP (loc, 0);
9122 vloc = XEXP (loc, 1);
9124 else
9126 val = NULL_RTX;
9127 vloc = loc;
9130 var = PAT_VAR_LOCATION_DECL (vloc);
9132 clobber_variable_part (set, NULL_RTX,
9133 dv_from_decl (var), 0, NULL_RTX);
9134 if (val)
9136 if (VAL_NEEDS_RESOLUTION (loc))
9137 val_resolve (set, val, PAT_VAR_LOCATION_LOC (vloc), insn);
9138 set_variable_part (set, val, dv_from_decl (var), 0,
9139 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
9140 INSERT);
9142 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
9143 set_variable_part (set, PAT_VAR_LOCATION_LOC (vloc),
9144 dv_from_decl (var), 0,
9145 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
9146 INSERT);
9148 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
9150 break;
9152 case MO_VAL_USE:
9154 rtx loc = mo->u.loc;
9155 rtx val, vloc, uloc;
9157 vloc = uloc = XEXP (loc, 1);
9158 val = XEXP (loc, 0);
9160 if (GET_CODE (val) == CONCAT)
9162 uloc = XEXP (val, 1);
9163 val = XEXP (val, 0);
9166 if (VAL_NEEDS_RESOLUTION (loc))
9167 val_resolve (set, val, vloc, insn);
9168 else
9169 val_store (set, val, uloc, insn, false);
9171 if (VAL_HOLDS_TRACK_EXPR (loc))
9173 if (GET_CODE (uloc) == REG)
9174 var_reg_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
9175 NULL);
9176 else if (GET_CODE (uloc) == MEM)
9177 var_mem_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
9178 NULL);
9181 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
9183 break;
9185 case MO_VAL_SET:
9187 rtx loc = mo->u.loc;
9188 rtx val, vloc, uloc;
9189 rtx dstv, srcv;
9191 vloc = loc;
9192 uloc = XEXP (vloc, 1);
9193 val = XEXP (vloc, 0);
9194 vloc = uloc;
9196 if (GET_CODE (uloc) == SET)
9198 dstv = SET_DEST (uloc);
9199 srcv = SET_SRC (uloc);
9201 else
9203 dstv = uloc;
9204 srcv = NULL;
9207 if (GET_CODE (val) == CONCAT)
9209 dstv = vloc = XEXP (val, 1);
9210 val = XEXP (val, 0);
9213 if (GET_CODE (vloc) == SET)
9215 srcv = SET_SRC (vloc);
9217 gcc_assert (val != srcv);
9218 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
9220 dstv = vloc = SET_DEST (vloc);
9222 if (VAL_NEEDS_RESOLUTION (loc))
9223 val_resolve (set, val, srcv, insn);
9225 else if (VAL_NEEDS_RESOLUTION (loc))
9227 gcc_assert (GET_CODE (uloc) == SET
9228 && GET_CODE (SET_SRC (uloc)) == REG);
9229 val_resolve (set, val, SET_SRC (uloc), insn);
9232 if (VAL_HOLDS_TRACK_EXPR (loc))
9234 if (VAL_EXPR_IS_CLOBBERED (loc))
9236 if (REG_P (uloc))
9237 var_reg_delete (set, uloc, true);
9238 else if (MEM_P (uloc))
9240 gcc_assert (MEM_P (dstv));
9241 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
9242 var_mem_delete (set, dstv, true);
9245 else
9247 bool copied_p = VAL_EXPR_IS_COPIED (loc);
9248 rtx src = NULL, dst = uloc;
9249 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
9251 if (GET_CODE (uloc) == SET)
9253 src = SET_SRC (uloc);
9254 dst = SET_DEST (uloc);
9257 if (copied_p)
9259 status = find_src_status (set, src);
9261 src = find_src_set_src (set, src);
9264 if (REG_P (dst))
9265 var_reg_delete_and_set (set, dst, !copied_p,
9266 status, srcv);
9267 else if (MEM_P (dst))
9269 gcc_assert (MEM_P (dstv));
9270 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
9271 var_mem_delete_and_set (set, dstv, !copied_p,
9272 status, srcv);
9276 else if (REG_P (uloc))
9277 var_regno_delete (set, REGNO (uloc));
9278 else if (MEM_P (uloc))
9280 gcc_checking_assert (GET_CODE (vloc) == MEM);
9281 gcc_checking_assert (vloc == dstv);
9282 if (vloc != dstv)
9283 clobber_overlapping_mems (set, vloc);
9286 val_store (set, val, dstv, insn, true);
9288 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9289 set->vars);
9291 break;
9293 case MO_SET:
9295 rtx loc = mo->u.loc;
9296 rtx set_src = NULL;
9298 if (GET_CODE (loc) == SET)
9300 set_src = SET_SRC (loc);
9301 loc = SET_DEST (loc);
9304 if (REG_P (loc))
9305 var_reg_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
9306 set_src);
9307 else
9308 var_mem_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
9309 set_src);
9311 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9312 set->vars);
9314 break;
9316 case MO_COPY:
9318 rtx loc = mo->u.loc;
9319 enum var_init_status src_status;
9320 rtx set_src = NULL;
9322 if (GET_CODE (loc) == SET)
9324 set_src = SET_SRC (loc);
9325 loc = SET_DEST (loc);
9328 src_status = find_src_status (set, set_src);
9329 set_src = find_src_set_src (set, set_src);
9331 if (REG_P (loc))
9332 var_reg_delete_and_set (set, loc, false, src_status, set_src);
9333 else
9334 var_mem_delete_and_set (set, loc, false, src_status, set_src);
9336 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9337 set->vars);
9339 break;
9341 case MO_USE_NO_VAR:
9343 rtx loc = mo->u.loc;
9345 if (REG_P (loc))
9346 var_reg_delete (set, loc, false);
9347 else
9348 var_mem_delete (set, loc, false);
9350 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
9352 break;
9354 case MO_CLOBBER:
9356 rtx loc = mo->u.loc;
9358 if (REG_P (loc))
9359 var_reg_delete (set, loc, true);
9360 else
9361 var_mem_delete (set, loc, true);
9363 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9364 set->vars);
9366 break;
9368 case MO_ADJUST:
9369 set->stack_adjust += mo->u.adjust;
9370 break;
9375 /* Emit notes for the whole function. */
9377 static void
9378 vt_emit_notes (void)
9380 basic_block bb;
9381 dataflow_set cur;
9383 gcc_assert (!changed_variables.elements ());
9385 /* Free memory occupied by the out hash tables, as they aren't used
9386 anymore. */
9387 FOR_EACH_BB (bb)
9388 dataflow_set_clear (&VTI (bb)->out);
9390 /* Enable emitting notes by functions (mainly by set_variable_part and
9391 delete_variable_part). */
9392 emit_notes = true;
9394 if (MAY_HAVE_DEBUG_INSNS)
9396 dropped_values.create (cselib_get_next_uid () * 2);
9397 loc_exp_dep_pool = create_alloc_pool ("loc_exp_dep pool",
9398 sizeof (loc_exp_dep), 64);
9401 dataflow_set_init (&cur);
9403 FOR_EACH_BB (bb)
9405 /* Emit the notes for changes of variable locations between two
9406 subsequent basic blocks. */
9407 emit_notes_for_differences (BB_HEAD (bb), &cur, &VTI (bb)->in);
9409 if (MAY_HAVE_DEBUG_INSNS)
9410 local_get_addr_cache = pointer_map_create ();
9412 /* Emit the notes for the changes in the basic block itself. */
9413 emit_notes_in_bb (bb, &cur);
9415 if (MAY_HAVE_DEBUG_INSNS)
9416 pointer_map_destroy (local_get_addr_cache);
9417 local_get_addr_cache = NULL;
9419 /* Free memory occupied by the in hash table, we won't need it
9420 again. */
9421 dataflow_set_clear (&VTI (bb)->in);
9423 #ifdef ENABLE_CHECKING
9424 shared_hash_htab (cur.vars)
9425 .traverse <variable_table_type, emit_notes_for_differences_1>
9426 (shared_hash_htab (empty_shared_hash));
9427 #endif
9428 dataflow_set_destroy (&cur);
9430 if (MAY_HAVE_DEBUG_INSNS)
9431 dropped_values.dispose ();
9433 emit_notes = false;
9436 /* If there is a declaration and offset associated with register/memory RTL
9437 assign declaration to *DECLP and offset to *OFFSETP, and return true. */
9439 static bool
9440 vt_get_decl_and_offset (rtx rtl, tree *declp, HOST_WIDE_INT *offsetp)
9442 if (REG_P (rtl))
9444 if (REG_ATTRS (rtl))
9446 *declp = REG_EXPR (rtl);
9447 *offsetp = REG_OFFSET (rtl);
9448 return true;
9451 else if (MEM_P (rtl))
9453 if (MEM_ATTRS (rtl))
9455 *declp = MEM_EXPR (rtl);
9456 *offsetp = INT_MEM_OFFSET (rtl);
9457 return true;
9460 return false;
9463 /* Record the value for the ENTRY_VALUE of RTL as a global equivalence
9464 of VAL. */
9466 static void
9467 record_entry_value (cselib_val *val, rtx rtl)
9469 rtx ev = gen_rtx_ENTRY_VALUE (GET_MODE (rtl));
9471 ENTRY_VALUE_EXP (ev) = rtl;
9473 cselib_add_permanent_equiv (val, ev, get_insns ());
9476 /* Insert function parameter PARM in IN and OUT sets of ENTRY_BLOCK. */
9478 static void
9479 vt_add_function_parameter (tree parm)
9481 rtx decl_rtl = DECL_RTL_IF_SET (parm);
9482 rtx incoming = DECL_INCOMING_RTL (parm);
9483 tree decl;
9484 enum machine_mode mode;
9485 HOST_WIDE_INT offset;
9486 dataflow_set *out;
9487 decl_or_value dv;
9489 if (TREE_CODE (parm) != PARM_DECL)
9490 return;
9492 if (!decl_rtl || !incoming)
9493 return;
9495 if (GET_MODE (decl_rtl) == BLKmode || GET_MODE (incoming) == BLKmode)
9496 return;
9498 /* If there is a DRAP register or a pseudo in internal_arg_pointer,
9499 rewrite the incoming location of parameters passed on the stack
9500 into MEMs based on the argument pointer, so that incoming doesn't
9501 depend on a pseudo. */
9502 if (MEM_P (incoming)
9503 && (XEXP (incoming, 0) == crtl->args.internal_arg_pointer
9504 || (GET_CODE (XEXP (incoming, 0)) == PLUS
9505 && XEXP (XEXP (incoming, 0), 0)
9506 == crtl->args.internal_arg_pointer
9507 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
9509 HOST_WIDE_INT off = -FIRST_PARM_OFFSET (current_function_decl);
9510 if (GET_CODE (XEXP (incoming, 0)) == PLUS)
9511 off += INTVAL (XEXP (XEXP (incoming, 0), 1));
9512 incoming
9513 = replace_equiv_address_nv (incoming,
9514 plus_constant (Pmode,
9515 arg_pointer_rtx, off));
9518 #ifdef HAVE_window_save
9519 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
9520 If the target machine has an explicit window save instruction, the
9521 actual entry value is the corresponding OUTGOING_REGNO instead. */
9522 if (HAVE_window_save && !crtl->uses_only_leaf_regs)
9524 if (REG_P (incoming)
9525 && HARD_REGISTER_P (incoming)
9526 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
9528 parm_reg_t p;
9529 p.incoming = incoming;
9530 incoming
9531 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
9532 OUTGOING_REGNO (REGNO (incoming)), 0);
9533 p.outgoing = incoming;
9534 vec_safe_push (windowed_parm_regs, p);
9536 else if (MEM_P (incoming)
9537 && REG_P (XEXP (incoming, 0))
9538 && HARD_REGISTER_P (XEXP (incoming, 0)))
9540 rtx reg = XEXP (incoming, 0);
9541 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
9543 parm_reg_t p;
9544 p.incoming = reg;
9545 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
9546 p.outgoing = reg;
9547 vec_safe_push (windowed_parm_regs, p);
9548 incoming = replace_equiv_address_nv (incoming, reg);
9552 #endif
9554 if (!vt_get_decl_and_offset (incoming, &decl, &offset))
9556 if (MEM_P (incoming))
9558 /* This means argument is passed by invisible reference. */
9559 offset = 0;
9560 decl = parm;
9562 else
9564 if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset))
9565 return;
9566 offset += byte_lowpart_offset (GET_MODE (incoming),
9567 GET_MODE (decl_rtl));
9571 if (!decl)
9572 return;
9574 if (parm != decl)
9576 /* If that DECL_RTL wasn't a pseudo that got spilled to
9577 memory, bail out. Otherwise, the spill slot sharing code
9578 will force the memory to reference spill_slot_decl (%sfp),
9579 so we don't match above. That's ok, the pseudo must have
9580 referenced the entire parameter, so just reset OFFSET. */
9581 if (decl != get_spill_slot_decl (false))
9582 return;
9583 offset = 0;
9586 if (!track_loc_p (incoming, parm, offset, false, &mode, &offset))
9587 return;
9589 out = &VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out;
9591 dv = dv_from_decl (parm);
9593 if (target_for_debug_bind (parm)
9594 /* We can't deal with these right now, because this kind of
9595 variable is single-part. ??? We could handle parallels
9596 that describe multiple locations for the same single
9597 value, but ATM we don't. */
9598 && GET_CODE (incoming) != PARALLEL)
9600 cselib_val *val;
9601 rtx lowpart;
9603 /* ??? We shouldn't ever hit this, but it may happen because
9604 arguments passed by invisible reference aren't dealt with
9605 above: incoming-rtl will have Pmode rather than the
9606 expected mode for the type. */
9607 if (offset)
9608 return;
9610 lowpart = var_lowpart (mode, incoming);
9611 if (!lowpart)
9612 return;
9614 val = cselib_lookup_from_insn (lowpart, mode, true,
9615 VOIDmode, get_insns ());
9617 /* ??? Float-typed values in memory are not handled by
9618 cselib. */
9619 if (val)
9621 preserve_value (val);
9622 set_variable_part (out, val->val_rtx, dv, offset,
9623 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9624 dv = dv_from_value (val->val_rtx);
9627 if (MEM_P (incoming))
9629 val = cselib_lookup_from_insn (XEXP (incoming, 0), mode, true,
9630 VOIDmode, get_insns ());
9631 if (val)
9633 preserve_value (val);
9634 incoming = replace_equiv_address_nv (incoming, val->val_rtx);
9639 if (REG_P (incoming))
9641 incoming = var_lowpart (mode, incoming);
9642 gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER);
9643 attrs_list_insert (&out->regs[REGNO (incoming)], dv, offset,
9644 incoming);
9645 set_variable_part (out, incoming, dv, offset,
9646 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9647 if (dv_is_value_p (dv))
9649 record_entry_value (CSELIB_VAL_PTR (dv_as_value (dv)), incoming);
9650 if (TREE_CODE (TREE_TYPE (parm)) == REFERENCE_TYPE
9651 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (parm))))
9653 enum machine_mode indmode
9654 = TYPE_MODE (TREE_TYPE (TREE_TYPE (parm)));
9655 rtx mem = gen_rtx_MEM (indmode, incoming);
9656 cselib_val *val = cselib_lookup_from_insn (mem, indmode, true,
9657 VOIDmode,
9658 get_insns ());
9659 if (val)
9661 preserve_value (val);
9662 record_entry_value (val, mem);
9663 set_variable_part (out, mem, dv_from_value (val->val_rtx), 0,
9664 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9669 else if (MEM_P (incoming))
9671 incoming = var_lowpart (mode, incoming);
9672 set_variable_part (out, incoming, dv, offset,
9673 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9677 /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
9679 static void
9680 vt_add_function_parameters (void)
9682 tree parm;
9684 for (parm = DECL_ARGUMENTS (current_function_decl);
9685 parm; parm = DECL_CHAIN (parm))
9686 vt_add_function_parameter (parm);
9688 if (DECL_HAS_VALUE_EXPR_P (DECL_RESULT (current_function_decl)))
9690 tree vexpr = DECL_VALUE_EXPR (DECL_RESULT (current_function_decl));
9692 if (TREE_CODE (vexpr) == INDIRECT_REF)
9693 vexpr = TREE_OPERAND (vexpr, 0);
9695 if (TREE_CODE (vexpr) == PARM_DECL
9696 && DECL_ARTIFICIAL (vexpr)
9697 && !DECL_IGNORED_P (vexpr)
9698 && DECL_NAMELESS (vexpr))
9699 vt_add_function_parameter (vexpr);
9703 /* Initialize cfa_base_rtx, create a preserved VALUE for it and
9704 ensure it isn't flushed during cselib_reset_table.
9705 Can be called only if frame_pointer_rtx resp. arg_pointer_rtx
9706 has been eliminated. */
9708 static void
9709 vt_init_cfa_base (void)
9711 cselib_val *val;
9713 #ifdef FRAME_POINTER_CFA_OFFSET
9714 cfa_base_rtx = frame_pointer_rtx;
9715 cfa_base_offset = -FRAME_POINTER_CFA_OFFSET (current_function_decl);
9716 #else
9717 cfa_base_rtx = arg_pointer_rtx;
9718 cfa_base_offset = -ARG_POINTER_CFA_OFFSET (current_function_decl);
9719 #endif
9720 if (cfa_base_rtx == hard_frame_pointer_rtx
9721 || !fixed_regs[REGNO (cfa_base_rtx)])
9723 cfa_base_rtx = NULL_RTX;
9724 return;
9726 if (!MAY_HAVE_DEBUG_INSNS)
9727 return;
9729 /* Tell alias analysis that cfa_base_rtx should share
9730 find_base_term value with stack pointer or hard frame pointer. */
9731 if (!frame_pointer_needed)
9732 vt_equate_reg_base_value (cfa_base_rtx, stack_pointer_rtx);
9733 else if (!crtl->stack_realign_tried)
9734 vt_equate_reg_base_value (cfa_base_rtx, hard_frame_pointer_rtx);
9736 val = cselib_lookup_from_insn (cfa_base_rtx, GET_MODE (cfa_base_rtx), 1,
9737 VOIDmode, get_insns ());
9738 preserve_value (val);
9739 cselib_preserve_cfa_base_value (val, REGNO (cfa_base_rtx));
9742 /* Allocate and initialize the data structures for variable tracking
9743 and parse the RTL to get the micro operations. */
9745 static bool
9746 vt_initialize (void)
9748 basic_block bb;
9749 HOST_WIDE_INT fp_cfa_offset = -1;
9751 alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def));
9753 attrs_pool = create_alloc_pool ("attrs_def pool",
9754 sizeof (struct attrs_def), 1024);
9755 var_pool = create_alloc_pool ("variable_def pool",
9756 sizeof (struct variable_def)
9757 + (MAX_VAR_PARTS - 1)
9758 * sizeof (((variable)NULL)->var_part[0]), 64);
9759 loc_chain_pool = create_alloc_pool ("location_chain_def pool",
9760 sizeof (struct location_chain_def),
9761 1024);
9762 shared_hash_pool = create_alloc_pool ("shared_hash_def pool",
9763 sizeof (struct shared_hash_def), 256);
9764 empty_shared_hash = (shared_hash) pool_alloc (shared_hash_pool);
9765 empty_shared_hash->refcount = 1;
9766 empty_shared_hash->htab.create (1);
9767 changed_variables.create (10);
9769 /* Init the IN and OUT sets. */
9770 FOR_ALL_BB (bb)
9772 VTI (bb)->visited = false;
9773 VTI (bb)->flooded = false;
9774 dataflow_set_init (&VTI (bb)->in);
9775 dataflow_set_init (&VTI (bb)->out);
9776 VTI (bb)->permp = NULL;
9779 if (MAY_HAVE_DEBUG_INSNS)
9781 cselib_init (CSELIB_RECORD_MEMORY | CSELIB_PRESERVE_CONSTANTS);
9782 scratch_regs = BITMAP_ALLOC (NULL);
9783 valvar_pool = create_alloc_pool ("small variable_def pool",
9784 sizeof (struct variable_def), 256);
9785 preserved_values.create (256);
9786 global_get_addr_cache = pointer_map_create ();
9788 else
9790 scratch_regs = NULL;
9791 valvar_pool = NULL;
9792 global_get_addr_cache = NULL;
9795 if (MAY_HAVE_DEBUG_INSNS)
9797 rtx reg, expr;
9798 int ofst;
9799 cselib_val *val;
9801 #ifdef FRAME_POINTER_CFA_OFFSET
9802 reg = frame_pointer_rtx;
9803 ofst = FRAME_POINTER_CFA_OFFSET (current_function_decl);
9804 #else
9805 reg = arg_pointer_rtx;
9806 ofst = ARG_POINTER_CFA_OFFSET (current_function_decl);
9807 #endif
9809 ofst -= INCOMING_FRAME_SP_OFFSET;
9811 val = cselib_lookup_from_insn (reg, GET_MODE (reg), 1,
9812 VOIDmode, get_insns ());
9813 preserve_value (val);
9814 cselib_preserve_cfa_base_value (val, REGNO (reg));
9815 expr = plus_constant (GET_MODE (stack_pointer_rtx),
9816 stack_pointer_rtx, -ofst);
9817 cselib_add_permanent_equiv (val, expr, get_insns ());
9819 if (ofst)
9821 val = cselib_lookup_from_insn (stack_pointer_rtx,
9822 GET_MODE (stack_pointer_rtx), 1,
9823 VOIDmode, get_insns ());
9824 preserve_value (val);
9825 expr = plus_constant (GET_MODE (reg), reg, ofst);
9826 cselib_add_permanent_equiv (val, expr, get_insns ());
9830 /* In order to factor out the adjustments made to the stack pointer or to
9831 the hard frame pointer and thus be able to use DW_OP_fbreg operations
9832 instead of individual location lists, we're going to rewrite MEMs based
9833 on them into MEMs based on the CFA by de-eliminating stack_pointer_rtx
9834 or hard_frame_pointer_rtx to the virtual CFA pointer frame_pointer_rtx
9835 resp. arg_pointer_rtx. We can do this either when there is no frame
9836 pointer in the function and stack adjustments are consistent for all
9837 basic blocks or when there is a frame pointer and no stack realignment.
9838 But we first have to check that frame_pointer_rtx resp. arg_pointer_rtx
9839 has been eliminated. */
9840 if (!frame_pointer_needed)
9842 rtx reg, elim;
9844 if (!vt_stack_adjustments ())
9845 return false;
9847 #ifdef FRAME_POINTER_CFA_OFFSET
9848 reg = frame_pointer_rtx;
9849 #else
9850 reg = arg_pointer_rtx;
9851 #endif
9852 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
9853 if (elim != reg)
9855 if (GET_CODE (elim) == PLUS)
9856 elim = XEXP (elim, 0);
9857 if (elim == stack_pointer_rtx)
9858 vt_init_cfa_base ();
9861 else if (!crtl->stack_realign_tried)
9863 rtx reg, elim;
9865 #ifdef FRAME_POINTER_CFA_OFFSET
9866 reg = frame_pointer_rtx;
9867 fp_cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
9868 #else
9869 reg = arg_pointer_rtx;
9870 fp_cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
9871 #endif
9872 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
9873 if (elim != reg)
9875 if (GET_CODE (elim) == PLUS)
9877 fp_cfa_offset -= INTVAL (XEXP (elim, 1));
9878 elim = XEXP (elim, 0);
9880 if (elim != hard_frame_pointer_rtx)
9881 fp_cfa_offset = -1;
9883 else
9884 fp_cfa_offset = -1;
9887 /* If the stack is realigned and a DRAP register is used, we're going to
9888 rewrite MEMs based on it representing incoming locations of parameters
9889 passed on the stack into MEMs based on the argument pointer. Although
9890 we aren't going to rewrite other MEMs, we still need to initialize the
9891 virtual CFA pointer in order to ensure that the argument pointer will
9892 be seen as a constant throughout the function.
9894 ??? This doesn't work if FRAME_POINTER_CFA_OFFSET is defined. */
9895 else if (stack_realign_drap)
9897 rtx reg, elim;
9899 #ifdef FRAME_POINTER_CFA_OFFSET
9900 reg = frame_pointer_rtx;
9901 #else
9902 reg = arg_pointer_rtx;
9903 #endif
9904 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
9905 if (elim != reg)
9907 if (GET_CODE (elim) == PLUS)
9908 elim = XEXP (elim, 0);
9909 if (elim == hard_frame_pointer_rtx)
9910 vt_init_cfa_base ();
9914 hard_frame_pointer_adjustment = -1;
9916 vt_add_function_parameters ();
9918 FOR_EACH_BB (bb)
9920 rtx insn;
9921 HOST_WIDE_INT pre, post = 0;
9922 basic_block first_bb, last_bb;
9924 if (MAY_HAVE_DEBUG_INSNS)
9926 cselib_record_sets_hook = add_with_sets;
9927 if (dump_file && (dump_flags & TDF_DETAILS))
9928 fprintf (dump_file, "first value: %i\n",
9929 cselib_get_next_uid ());
9932 first_bb = bb;
9933 for (;;)
9935 edge e;
9936 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
9937 || ! single_pred_p (bb->next_bb))
9938 break;
9939 e = find_edge (bb, bb->next_bb);
9940 if (! e || (e->flags & EDGE_FALLTHRU) == 0)
9941 break;
9942 bb = bb->next_bb;
9944 last_bb = bb;
9946 /* Add the micro-operations to the vector. */
9947 FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb)
9949 HOST_WIDE_INT offset = VTI (bb)->out.stack_adjust;
9950 VTI (bb)->out.stack_adjust = VTI (bb)->in.stack_adjust;
9951 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
9952 insn = NEXT_INSN (insn))
9954 if (INSN_P (insn))
9956 if (!frame_pointer_needed)
9958 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
9959 if (pre)
9961 micro_operation mo;
9962 mo.type = MO_ADJUST;
9963 mo.u.adjust = pre;
9964 mo.insn = insn;
9965 if (dump_file && (dump_flags & TDF_DETAILS))
9966 log_op_type (PATTERN (insn), bb, insn,
9967 MO_ADJUST, dump_file);
9968 VTI (bb)->mos.safe_push (mo);
9969 VTI (bb)->out.stack_adjust += pre;
9973 cselib_hook_called = false;
9974 adjust_insn (bb, insn);
9975 if (MAY_HAVE_DEBUG_INSNS)
9977 if (CALL_P (insn))
9978 prepare_call_arguments (bb, insn);
9979 cselib_process_insn (insn);
9980 if (dump_file && (dump_flags & TDF_DETAILS))
9982 print_rtl_single (dump_file, insn);
9983 dump_cselib_table (dump_file);
9986 if (!cselib_hook_called)
9987 add_with_sets (insn, 0, 0);
9988 cancel_changes (0);
9990 if (!frame_pointer_needed && post)
9992 micro_operation mo;
9993 mo.type = MO_ADJUST;
9994 mo.u.adjust = post;
9995 mo.insn = insn;
9996 if (dump_file && (dump_flags & TDF_DETAILS))
9997 log_op_type (PATTERN (insn), bb, insn,
9998 MO_ADJUST, dump_file);
9999 VTI (bb)->mos.safe_push (mo);
10000 VTI (bb)->out.stack_adjust += post;
10003 if (fp_cfa_offset != -1
10004 && hard_frame_pointer_adjustment == -1
10005 && fp_setter_insn (insn))
10007 vt_init_cfa_base ();
10008 hard_frame_pointer_adjustment = fp_cfa_offset;
10009 /* Disassociate sp from fp now. */
10010 if (MAY_HAVE_DEBUG_INSNS)
10012 cselib_val *v;
10013 cselib_invalidate_rtx (stack_pointer_rtx);
10014 v = cselib_lookup (stack_pointer_rtx, Pmode, 1,
10015 VOIDmode);
10016 if (v && !cselib_preserved_value_p (v))
10018 cselib_set_value_sp_based (v);
10019 preserve_value (v);
10025 gcc_assert (offset == VTI (bb)->out.stack_adjust);
10028 bb = last_bb;
10030 if (MAY_HAVE_DEBUG_INSNS)
10032 cselib_preserve_only_values ();
10033 cselib_reset_table (cselib_get_next_uid ());
10034 cselib_record_sets_hook = NULL;
10038 hard_frame_pointer_adjustment = -1;
10039 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->flooded = true;
10040 cfa_base_rtx = NULL_RTX;
10041 return true;
10044 /* This is *not* reset after each function. It gives each
10045 NOTE_INSN_DELETED_DEBUG_LABEL in the entire compilation
10046 a unique label number. */
10048 static int debug_label_num = 1;
10050 /* Get rid of all debug insns from the insn stream. */
10052 static void
10053 delete_debug_insns (void)
10055 basic_block bb;
10056 rtx insn, next;
10058 if (!MAY_HAVE_DEBUG_INSNS)
10059 return;
10061 FOR_EACH_BB (bb)
10063 FOR_BB_INSNS_SAFE (bb, insn, next)
10064 if (DEBUG_INSN_P (insn))
10066 tree decl = INSN_VAR_LOCATION_DECL (insn);
10067 if (TREE_CODE (decl) == LABEL_DECL
10068 && DECL_NAME (decl)
10069 && !DECL_RTL_SET_P (decl))
10071 PUT_CODE (insn, NOTE);
10072 NOTE_KIND (insn) = NOTE_INSN_DELETED_DEBUG_LABEL;
10073 NOTE_DELETED_LABEL_NAME (insn)
10074 = IDENTIFIER_POINTER (DECL_NAME (decl));
10075 SET_DECL_RTL (decl, insn);
10076 CODE_LABEL_NUMBER (insn) = debug_label_num++;
10078 else
10079 delete_insn (insn);
10084 /* Run a fast, BB-local only version of var tracking, to take care of
10085 information that we don't do global analysis on, such that not all
10086 information is lost. If SKIPPED holds, we're skipping the global
10087 pass entirely, so we should try to use information it would have
10088 handled as well.. */
10090 static void
10091 vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED)
10093 /* ??? Just skip it all for now. */
10094 delete_debug_insns ();
10097 /* Free the data structures needed for variable tracking. */
10099 static void
10100 vt_finalize (void)
10102 basic_block bb;
10104 FOR_EACH_BB (bb)
10106 VTI (bb)->mos.release ();
10109 FOR_ALL_BB (bb)
10111 dataflow_set_destroy (&VTI (bb)->in);
10112 dataflow_set_destroy (&VTI (bb)->out);
10113 if (VTI (bb)->permp)
10115 dataflow_set_destroy (VTI (bb)->permp);
10116 XDELETE (VTI (bb)->permp);
10119 free_aux_for_blocks ();
10120 empty_shared_hash->htab.dispose ();
10121 changed_variables.dispose ();
10122 free_alloc_pool (attrs_pool);
10123 free_alloc_pool (var_pool);
10124 free_alloc_pool (loc_chain_pool);
10125 free_alloc_pool (shared_hash_pool);
10127 if (MAY_HAVE_DEBUG_INSNS)
10129 if (global_get_addr_cache)
10130 pointer_map_destroy (global_get_addr_cache);
10131 global_get_addr_cache = NULL;
10132 if (loc_exp_dep_pool)
10133 free_alloc_pool (loc_exp_dep_pool);
10134 loc_exp_dep_pool = NULL;
10135 free_alloc_pool (valvar_pool);
10136 preserved_values.release ();
10137 cselib_finish ();
10138 BITMAP_FREE (scratch_regs);
10139 scratch_regs = NULL;
10142 #ifdef HAVE_window_save
10143 vec_free (windowed_parm_regs);
10144 #endif
10146 if (vui_vec)
10147 XDELETEVEC (vui_vec);
10148 vui_vec = NULL;
10149 vui_allocated = 0;
10152 /* The entry point to variable tracking pass. */
10154 static inline unsigned int
10155 variable_tracking_main_1 (void)
10157 bool success;
10159 if (flag_var_tracking_assignments < 0)
10161 delete_debug_insns ();
10162 return 0;
10165 if (n_basic_blocks_for_fn (cfun) > 500 &&
10166 n_edges_for_fn (cfun) / n_basic_blocks_for_fn (cfun) >= 20)
10168 vt_debug_insns_local (true);
10169 return 0;
10172 mark_dfs_back_edges ();
10173 if (!vt_initialize ())
10175 vt_finalize ();
10176 vt_debug_insns_local (true);
10177 return 0;
10180 success = vt_find_locations ();
10182 if (!success && flag_var_tracking_assignments > 0)
10184 vt_finalize ();
10186 delete_debug_insns ();
10188 /* This is later restored by our caller. */
10189 flag_var_tracking_assignments = 0;
10191 success = vt_initialize ();
10192 gcc_assert (success);
10194 success = vt_find_locations ();
10197 if (!success)
10199 vt_finalize ();
10200 vt_debug_insns_local (false);
10201 return 0;
10204 if (dump_file && (dump_flags & TDF_DETAILS))
10206 dump_dataflow_sets ();
10207 dump_reg_info (dump_file);
10208 dump_flow_info (dump_file, dump_flags);
10211 timevar_push (TV_VAR_TRACKING_EMIT);
10212 vt_emit_notes ();
10213 timevar_pop (TV_VAR_TRACKING_EMIT);
10215 vt_finalize ();
10216 vt_debug_insns_local (false);
10217 return 0;
10220 unsigned int
10221 variable_tracking_main (void)
10223 unsigned int ret;
10224 int save = flag_var_tracking_assignments;
10226 ret = variable_tracking_main_1 ();
10228 flag_var_tracking_assignments = save;
10230 return ret;
10233 static bool
10234 gate_handle_var_tracking (void)
10236 return (flag_var_tracking && !targetm.delay_vartrack);
10241 namespace {
10243 const pass_data pass_data_variable_tracking =
10245 RTL_PASS, /* type */
10246 "vartrack", /* name */
10247 OPTGROUP_NONE, /* optinfo_flags */
10248 true, /* has_gate */
10249 true, /* has_execute */
10250 TV_VAR_TRACKING, /* tv_id */
10251 0, /* properties_required */
10252 0, /* properties_provided */
10253 0, /* properties_destroyed */
10254 0, /* todo_flags_start */
10255 ( TODO_verify_rtl_sharing | TODO_verify_flow ), /* todo_flags_finish */
10258 class pass_variable_tracking : public rtl_opt_pass
10260 public:
10261 pass_variable_tracking (gcc::context *ctxt)
10262 : rtl_opt_pass (pass_data_variable_tracking, ctxt)
10265 /* opt_pass methods: */
10266 bool gate () { return gate_handle_var_tracking (); }
10267 unsigned int execute () { return variable_tracking_main (); }
10269 }; // class pass_variable_tracking
10271 } // anon namespace
10273 rtl_opt_pass *
10274 make_pass_variable_tracking (gcc::context *ctxt)
10276 return new pass_variable_tracking (ctxt);