Daily bump.
[official-gcc.git] / gcc / var-tracking.c
blob899a5c0290dd200a0b2ee4e9402fe0c1280f36fc
1 /* Variable tracking routines for the GNU compiler.
2 Copyright (C) 2002-2020 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the variable tracking pass. It computes where
21 variables are located (which registers or where in memory) at each position
22 in instruction stream and emits notes describing the locations.
23 Debug information (DWARF2 location lists) is finally generated from
24 these notes.
25 With this debug information, it is possible to show variables
26 even when debugging optimized code.
28 How does the variable tracking pass work?
30 First, it scans RTL code for uses, stores and clobbers (register/memory
31 references in instructions), for call insns and for stack adjustments
32 separately for each basic block and saves them to an array of micro
33 operations.
34 The micro operations of one instruction are ordered so that
35 pre-modifying stack adjustment < use < use with no var < call insn <
36 < clobber < set < post-modifying stack adjustment
38 Then, a forward dataflow analysis is performed to find out how locations
39 of variables change through code and to propagate the variable locations
40 along control flow graph.
41 The IN set for basic block BB is computed as a union of OUT sets of BB's
42 predecessors, the OUT set for BB is copied from the IN set for BB and
43 is changed according to micro operations in BB.
45 The IN and OUT sets for basic blocks consist of a current stack adjustment
46 (used for adjusting offset of variables addressed using stack pointer),
47 the table of structures describing the locations of parts of a variable
48 and for each physical register a linked list for each physical register.
49 The linked list is a list of variable parts stored in the register,
50 i.e. it is a list of triplets (reg, decl, offset) where decl is
51 REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
52 effective deleting appropriate variable parts when we set or clobber the
53 register.
55 There may be more than one variable part in a register. The linked lists
56 should be pretty short so it is a good data structure here.
57 For example in the following code, register allocator may assign same
58 register to variables A and B, and both of them are stored in the same
59 register in CODE:
61 if (cond)
62 set A;
63 else
64 set B;
65 CODE;
66 if (cond)
67 use A;
68 else
69 use B;
71 Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
72 are emitted to appropriate positions in RTL code. Each such a note describes
73 the location of one variable at the point in instruction stream where the
74 note is. There is no need to emit a note for each variable before each
75 instruction, we only emit these notes where the location of variable changes
76 (this means that we also emit notes for changes between the OUT set of the
77 previous block and the IN set of the current block).
79 The notes consist of two parts:
80 1. the declaration (from REG_EXPR or MEM_EXPR)
81 2. the location of a variable - it is either a simple register/memory
82 reference (for simple variables, for example int),
83 or a parallel of register/memory references (for a large variables
84 which consist of several parts, for example long long).
88 #include "config.h"
89 #include "system.h"
90 #include "coretypes.h"
91 #include "backend.h"
92 #include "target.h"
93 #include "rtl.h"
94 #include "tree.h"
95 #include "cfghooks.h"
96 #include "alloc-pool.h"
97 #include "tree-pass.h"
98 #include "memmodel.h"
99 #include "tm_p.h"
100 #include "insn-config.h"
101 #include "regs.h"
102 #include "emit-rtl.h"
103 #include "recog.h"
104 #include "diagnostic.h"
105 #include "varasm.h"
106 #include "stor-layout.h"
107 #include "cfgrtl.h"
108 #include "cfganal.h"
109 #include "reload.h"
110 #include "calls.h"
111 #include "tree-dfa.h"
112 #include "tree-ssa.h"
113 #include "cselib.h"
114 #include "tree-pretty-print.h"
115 #include "rtl-iter.h"
116 #include "fibonacci_heap.h"
117 #include "print-rtl.h"
118 #include "function-abi.h"
120 typedef fibonacci_heap <long, basic_block_def> bb_heap_t;
121 typedef fibonacci_node <long, basic_block_def> bb_heap_node_t;
123 /* var-tracking.c assumes that tree code with the same value as VALUE rtx code
124 has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
125 Currently the value is the same as IDENTIFIER_NODE, which has such
126 a property. If this compile time assertion ever fails, make sure that
127 the new tree code that equals (int) VALUE has the same property. */
128 extern char check_value_val[(int) VALUE == (int) IDENTIFIER_NODE ? 1 : -1];
130 /* Type of micro operation. */
131 enum micro_operation_type
133 MO_USE, /* Use location (REG or MEM). */
134 MO_USE_NO_VAR,/* Use location which is not associated with a variable
135 or the variable is not trackable. */
136 MO_VAL_USE, /* Use location which is associated with a value. */
137 MO_VAL_LOC, /* Use location which appears in a debug insn. */
138 MO_VAL_SET, /* Set location associated with a value. */
139 MO_SET, /* Set location. */
140 MO_COPY, /* Copy the same portion of a variable from one
141 location to another. */
142 MO_CLOBBER, /* Clobber location. */
143 MO_CALL, /* Call insn. */
144 MO_ADJUST /* Adjust stack pointer. */
148 static const char * const ATTRIBUTE_UNUSED
149 micro_operation_type_name[] = {
150 "MO_USE",
151 "MO_USE_NO_VAR",
152 "MO_VAL_USE",
153 "MO_VAL_LOC",
154 "MO_VAL_SET",
155 "MO_SET",
156 "MO_COPY",
157 "MO_CLOBBER",
158 "MO_CALL",
159 "MO_ADJUST"
162 /* Where shall the note be emitted? BEFORE or AFTER the instruction.
163 Notes emitted as AFTER_CALL are to take effect during the call,
164 rather than after the call. */
165 enum emit_note_where
167 EMIT_NOTE_BEFORE_INSN,
168 EMIT_NOTE_AFTER_INSN,
169 EMIT_NOTE_AFTER_CALL_INSN
172 /* Structure holding information about micro operation. */
173 struct micro_operation
175 /* Type of micro operation. */
176 enum micro_operation_type type;
178 /* The instruction which the micro operation is in, for MO_USE,
179 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
180 instruction or note in the original flow (before any var-tracking
181 notes are inserted, to simplify emission of notes), for MO_SET
182 and MO_CLOBBER. */
183 rtx_insn *insn;
185 union {
186 /* Location. For MO_SET and MO_COPY, this is the SET that
187 performs the assignment, if known, otherwise it is the target
188 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
189 CONCAT of the VALUE and the LOC associated with it. For
190 MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
191 associated with it. */
192 rtx loc;
194 /* Stack adjustment. */
195 HOST_WIDE_INT adjust;
196 } u;
200 /* A declaration of a variable, or an RTL value being handled like a
201 declaration. */
202 typedef void *decl_or_value;
204 /* Return true if a decl_or_value DV is a DECL or NULL. */
205 static inline bool
206 dv_is_decl_p (decl_or_value dv)
208 return !dv || (int) TREE_CODE ((tree) dv) != (int) VALUE;
211 /* Return true if a decl_or_value is a VALUE rtl. */
212 static inline bool
213 dv_is_value_p (decl_or_value dv)
215 return dv && !dv_is_decl_p (dv);
218 /* Return the decl in the decl_or_value. */
219 static inline tree
220 dv_as_decl (decl_or_value dv)
222 gcc_checking_assert (dv_is_decl_p (dv));
223 return (tree) dv;
226 /* Return the value in the decl_or_value. */
227 static inline rtx
228 dv_as_value (decl_or_value dv)
230 gcc_checking_assert (dv_is_value_p (dv));
231 return (rtx)dv;
234 /* Return the opaque pointer in the decl_or_value. */
235 static inline void *
236 dv_as_opaque (decl_or_value dv)
238 return dv;
242 /* Description of location of a part of a variable. The content of a physical
243 register is described by a chain of these structures.
244 The chains are pretty short (usually 1 or 2 elements) and thus
245 chain is the best data structure. */
246 struct attrs
248 /* Pointer to next member of the list. */
249 attrs *next;
251 /* The rtx of register. */
252 rtx loc;
254 /* The declaration corresponding to LOC. */
255 decl_or_value dv;
257 /* Offset from start of DECL. */
258 HOST_WIDE_INT offset;
261 /* Structure for chaining the locations. */
262 struct location_chain
264 /* Next element in the chain. */
265 location_chain *next;
267 /* The location (REG, MEM or VALUE). */
268 rtx loc;
270 /* The "value" stored in this location. */
271 rtx set_src;
273 /* Initialized? */
274 enum var_init_status init;
277 /* A vector of loc_exp_dep holds the active dependencies of a one-part
278 DV on VALUEs, i.e., the VALUEs expanded so as to form the current
279 location of DV. Each entry is also part of VALUE' s linked-list of
280 backlinks back to DV. */
281 struct loc_exp_dep
283 /* The dependent DV. */
284 decl_or_value dv;
285 /* The dependency VALUE or DECL_DEBUG. */
286 rtx value;
287 /* The next entry in VALUE's backlinks list. */
288 struct loc_exp_dep *next;
289 /* A pointer to the pointer to this entry (head or prev's next) in
290 the doubly-linked list. */
291 struct loc_exp_dep **pprev;
295 /* This data structure holds information about the depth of a variable
296 expansion. */
297 struct expand_depth
299 /* This measures the complexity of the expanded expression. It
300 grows by one for each level of expansion that adds more than one
301 operand. */
302 int complexity;
303 /* This counts the number of ENTRY_VALUE expressions in an
304 expansion. We want to minimize their use. */
305 int entryvals;
308 /* Type for dependencies actively used when expand FROM into cur_loc. */
309 typedef vec<loc_exp_dep, va_heap, vl_embed> deps_vec;
311 /* This data structure is allocated for one-part variables at the time
312 of emitting notes. */
313 struct onepart_aux
315 /* Doubly-linked list of dependent DVs. These are DVs whose cur_loc
316 computation used the expansion of this variable, and that ought
317 to be notified should this variable change. If the DV's cur_loc
318 expanded to NULL, all components of the loc list are regarded as
319 active, so that any changes in them give us a chance to get a
320 location. Otherwise, only components of the loc that expanded to
321 non-NULL are regarded as active dependencies. */
322 loc_exp_dep *backlinks;
323 /* This holds the LOC that was expanded into cur_loc. We need only
324 mark a one-part variable as changed if the FROM loc is removed,
325 or if it has no known location and a loc is added, or if it gets
326 a change notification from any of its active dependencies. */
327 rtx from;
328 /* The depth of the cur_loc expression. */
329 expand_depth depth;
330 /* Dependencies actively used when expand FROM into cur_loc. */
331 deps_vec deps;
334 /* Structure describing one part of variable. */
335 struct variable_part
337 /* Chain of locations of the part. */
338 location_chain *loc_chain;
340 /* Location which was last emitted to location list. */
341 rtx cur_loc;
343 union variable_aux
345 /* The offset in the variable, if !var->onepart. */
346 HOST_WIDE_INT offset;
348 /* Pointer to auxiliary data, if var->onepart and emit_notes. */
349 struct onepart_aux *onepaux;
350 } aux;
353 /* Maximum number of location parts. */
354 #define MAX_VAR_PARTS 16
356 /* Enumeration type used to discriminate various types of one-part
357 variables. */
358 enum onepart_enum
360 /* Not a one-part variable. */
361 NOT_ONEPART = 0,
362 /* A one-part DECL that is not a DEBUG_EXPR_DECL. */
363 ONEPART_VDECL = 1,
364 /* A DEBUG_EXPR_DECL. */
365 ONEPART_DEXPR = 2,
366 /* A VALUE. */
367 ONEPART_VALUE = 3
370 /* Structure describing where the variable is located. */
371 struct variable
373 /* The declaration of the variable, or an RTL value being handled
374 like a declaration. */
375 decl_or_value dv;
377 /* Reference count. */
378 int refcount;
380 /* Number of variable parts. */
381 char n_var_parts;
383 /* What type of DV this is, according to enum onepart_enum. */
384 ENUM_BITFIELD (onepart_enum) onepart : CHAR_BIT;
386 /* True if this variable_def struct is currently in the
387 changed_variables hash table. */
388 bool in_changed_variables;
390 /* The variable parts. */
391 variable_part var_part[1];
394 /* Pointer to the BB's information specific to variable tracking pass. */
395 #define VTI(BB) ((variable_tracking_info *) (BB)->aux)
397 /* Return MEM_OFFSET (MEM) as a HOST_WIDE_INT, or 0 if we can't. */
399 static inline HOST_WIDE_INT
400 int_mem_offset (const_rtx mem)
402 HOST_WIDE_INT offset;
403 if (MEM_OFFSET_KNOWN_P (mem) && MEM_OFFSET (mem).is_constant (&offset))
404 return offset;
405 return 0;
408 #if CHECKING_P && (GCC_VERSION >= 2007)
410 /* Access VAR's Ith part's offset, checking that it's not a one-part
411 variable. */
412 #define VAR_PART_OFFSET(var, i) __extension__ \
413 (*({ variable *const __v = (var); \
414 gcc_checking_assert (!__v->onepart); \
415 &__v->var_part[(i)].aux.offset; }))
417 /* Access VAR's one-part auxiliary data, checking that it is a
418 one-part variable. */
419 #define VAR_LOC_1PAUX(var) __extension__ \
420 (*({ variable *const __v = (var); \
421 gcc_checking_assert (__v->onepart); \
422 &__v->var_part[0].aux.onepaux; }))
424 #else
425 #define VAR_PART_OFFSET(var, i) ((var)->var_part[(i)].aux.offset)
426 #define VAR_LOC_1PAUX(var) ((var)->var_part[0].aux.onepaux)
427 #endif
429 /* These are accessor macros for the one-part auxiliary data. When
430 convenient for users, they're guarded by tests that the data was
431 allocated. */
432 #define VAR_LOC_DEP_LST(var) (VAR_LOC_1PAUX (var) \
433 ? VAR_LOC_1PAUX (var)->backlinks \
434 : NULL)
435 #define VAR_LOC_DEP_LSTP(var) (VAR_LOC_1PAUX (var) \
436 ? &VAR_LOC_1PAUX (var)->backlinks \
437 : NULL)
438 #define VAR_LOC_FROM(var) (VAR_LOC_1PAUX (var)->from)
439 #define VAR_LOC_DEPTH(var) (VAR_LOC_1PAUX (var)->depth)
440 #define VAR_LOC_DEP_VEC(var) var_loc_dep_vec (var)
442 /* Implements the VAR_LOC_DEP_VEC above as a function to work around
443 a bogus -Wnonnull (PR c/95554). */
445 static inline deps_vec*
446 var_loc_dep_vec (variable *var)
448 return VAR_LOC_1PAUX (var) ? &VAR_LOC_1PAUX (var)->deps : NULL;
452 typedef unsigned int dvuid;
454 /* Return the uid of DV. */
456 static inline dvuid
457 dv_uid (decl_or_value dv)
459 if (dv_is_value_p (dv))
460 return CSELIB_VAL_PTR (dv_as_value (dv))->uid;
461 else
462 return DECL_UID (dv_as_decl (dv));
465 /* Compute the hash from the uid. */
467 static inline hashval_t
468 dv_uid2hash (dvuid uid)
470 return uid;
473 /* The hash function for a mask table in a shared_htab chain. */
475 static inline hashval_t
476 dv_htab_hash (decl_or_value dv)
478 return dv_uid2hash (dv_uid (dv));
481 static void variable_htab_free (void *);
483 /* Variable hashtable helpers. */
485 struct variable_hasher : pointer_hash <variable>
487 typedef void *compare_type;
488 static inline hashval_t hash (const variable *);
489 static inline bool equal (const variable *, const void *);
490 static inline void remove (variable *);
493 /* The hash function for variable_htab, computes the hash value
494 from the declaration of variable X. */
496 inline hashval_t
497 variable_hasher::hash (const variable *v)
499 return dv_htab_hash (v->dv);
502 /* Compare the declaration of variable X with declaration Y. */
504 inline bool
505 variable_hasher::equal (const variable *v, const void *y)
507 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
509 return (dv_as_opaque (v->dv) == dv_as_opaque (dv));
512 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
514 inline void
515 variable_hasher::remove (variable *var)
517 variable_htab_free (var);
520 typedef hash_table<variable_hasher> variable_table_type;
521 typedef variable_table_type::iterator variable_iterator_type;
523 /* Structure for passing some other parameters to function
524 emit_note_insn_var_location. */
525 struct emit_note_data
527 /* The instruction which the note will be emitted before/after. */
528 rtx_insn *insn;
530 /* Where the note will be emitted (before/after insn)? */
531 enum emit_note_where where;
533 /* The variables and values active at this point. */
534 variable_table_type *vars;
537 /* Structure holding a refcounted hash table. If refcount > 1,
538 it must be first unshared before modified. */
539 struct shared_hash
541 /* Reference count. */
542 int refcount;
544 /* Actual hash table. */
545 variable_table_type *htab;
548 /* Structure holding the IN or OUT set for a basic block. */
549 struct dataflow_set
551 /* Adjustment of stack offset. */
552 HOST_WIDE_INT stack_adjust;
554 /* Attributes for registers (lists of attrs). */
555 attrs *regs[FIRST_PSEUDO_REGISTER];
557 /* Variable locations. */
558 shared_hash *vars;
560 /* Vars that is being traversed. */
561 shared_hash *traversed_vars;
564 /* The structure (one for each basic block) containing the information
565 needed for variable tracking. */
566 struct variable_tracking_info
568 /* The vector of micro operations. */
569 vec<micro_operation> mos;
571 /* The IN and OUT set for dataflow analysis. */
572 dataflow_set in;
573 dataflow_set out;
575 /* The permanent-in dataflow set for this block. This is used to
576 hold values for which we had to compute entry values. ??? This
577 should probably be dynamically allocated, to avoid using more
578 memory in non-debug builds. */
579 dataflow_set *permp;
581 /* Has the block been visited in DFS? */
582 bool visited;
584 /* Has the block been flooded in VTA? */
585 bool flooded;
589 /* Alloc pool for struct attrs_def. */
590 object_allocator<attrs> attrs_pool ("attrs pool");
592 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
594 static pool_allocator var_pool
595 ("variable_def pool", sizeof (variable) +
596 (MAX_VAR_PARTS - 1) * sizeof (((variable *)NULL)->var_part[0]));
598 /* Alloc pool for struct variable_def with a single var_part entry. */
599 static pool_allocator valvar_pool
600 ("small variable_def pool", sizeof (variable));
602 /* Alloc pool for struct location_chain. */
603 static object_allocator<location_chain> location_chain_pool
604 ("location_chain pool");
606 /* Alloc pool for struct shared_hash. */
607 static object_allocator<shared_hash> shared_hash_pool ("shared_hash pool");
609 /* Alloc pool for struct loc_exp_dep_s for NOT_ONEPART variables. */
610 object_allocator<loc_exp_dep> loc_exp_dep_pool ("loc_exp_dep pool");
612 /* Changed variables, notes will be emitted for them. */
613 static variable_table_type *changed_variables;
615 /* Shall notes be emitted? */
616 static bool emit_notes;
618 /* Values whose dynamic location lists have gone empty, but whose
619 cselib location lists are still usable. Use this to hold the
620 current location, the backlinks, etc, during emit_notes. */
621 static variable_table_type *dropped_values;
623 /* Empty shared hashtable. */
624 static shared_hash *empty_shared_hash;
626 /* Scratch register bitmap used by cselib_expand_value_rtx. */
627 static bitmap scratch_regs = NULL;
629 #ifdef HAVE_window_save
630 struct GTY(()) parm_reg {
631 rtx outgoing;
632 rtx incoming;
636 /* Vector of windowed parameter registers, if any. */
637 static vec<parm_reg, va_gc> *windowed_parm_regs = NULL;
638 #endif
640 /* Variable used to tell whether cselib_process_insn called our hook. */
641 static bool cselib_hook_called;
643 /* Local function prototypes. */
644 static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
645 HOST_WIDE_INT *);
646 static void insn_stack_adjust_offset_pre_post (rtx_insn *, HOST_WIDE_INT *,
647 HOST_WIDE_INT *);
648 static bool vt_stack_adjustments (void);
650 static void init_attrs_list_set (attrs **);
651 static void attrs_list_clear (attrs **);
652 static attrs *attrs_list_member (attrs *, decl_or_value, HOST_WIDE_INT);
653 static void attrs_list_insert (attrs **, decl_or_value, HOST_WIDE_INT, rtx);
654 static void attrs_list_copy (attrs **, attrs *);
655 static void attrs_list_union (attrs **, attrs *);
657 static variable **unshare_variable (dataflow_set *set, variable **slot,
658 variable *var, enum var_init_status);
659 static void vars_copy (variable_table_type *, variable_table_type *);
660 static tree var_debug_decl (tree);
661 static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
662 static void var_reg_delete_and_set (dataflow_set *, rtx, bool,
663 enum var_init_status, rtx);
664 static void var_reg_delete (dataflow_set *, rtx, bool);
665 static void var_regno_delete (dataflow_set *, int);
666 static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx);
667 static void var_mem_delete_and_set (dataflow_set *, rtx, bool,
668 enum var_init_status, rtx);
669 static void var_mem_delete (dataflow_set *, rtx, bool);
671 static void dataflow_set_init (dataflow_set *);
672 static void dataflow_set_clear (dataflow_set *);
673 static void dataflow_set_copy (dataflow_set *, dataflow_set *);
674 static int variable_union_info_cmp_pos (const void *, const void *);
675 static void dataflow_set_union (dataflow_set *, dataflow_set *);
676 static location_chain *find_loc_in_1pdv (rtx, variable *,
677 variable_table_type *);
678 static bool canon_value_cmp (rtx, rtx);
679 static int loc_cmp (rtx, rtx);
680 static bool variable_part_different_p (variable_part *, variable_part *);
681 static bool onepart_variable_different_p (variable *, variable *);
682 static bool variable_different_p (variable *, variable *);
683 static bool dataflow_set_different (dataflow_set *, dataflow_set *);
684 static void dataflow_set_destroy (dataflow_set *);
686 static bool track_expr_p (tree, bool);
687 static void add_uses_1 (rtx *, void *);
688 static void add_stores (rtx, const_rtx, void *);
689 static bool compute_bb_dataflow (basic_block);
690 static bool vt_find_locations (void);
692 static void dump_attrs_list (attrs *);
693 static void dump_var (variable *);
694 static void dump_vars (variable_table_type *);
695 static void dump_dataflow_set (dataflow_set *);
696 static void dump_dataflow_sets (void);
698 static void set_dv_changed (decl_or_value, bool);
699 static void variable_was_changed (variable *, dataflow_set *);
700 static variable **set_slot_part (dataflow_set *, rtx, variable **,
701 decl_or_value, HOST_WIDE_INT,
702 enum var_init_status, rtx);
703 static void set_variable_part (dataflow_set *, rtx,
704 decl_or_value, HOST_WIDE_INT,
705 enum var_init_status, rtx, enum insert_option);
706 static variable **clobber_slot_part (dataflow_set *, rtx,
707 variable **, HOST_WIDE_INT, rtx);
708 static void clobber_variable_part (dataflow_set *, rtx,
709 decl_or_value, HOST_WIDE_INT, rtx);
710 static variable **delete_slot_part (dataflow_set *, rtx, variable **,
711 HOST_WIDE_INT);
712 static void delete_variable_part (dataflow_set *, rtx,
713 decl_or_value, HOST_WIDE_INT);
714 static void emit_notes_in_bb (basic_block, dataflow_set *);
715 static void vt_emit_notes (void);
717 static void vt_add_function_parameters (void);
718 static bool vt_initialize (void);
719 static void vt_finalize (void);
721 /* Callback for stack_adjust_offset_pre_post, called via for_each_inc_dec. */
723 static int
724 stack_adjust_offset_pre_post_cb (rtx, rtx op, rtx dest, rtx src, rtx srcoff,
725 void *arg)
727 if (dest != stack_pointer_rtx)
728 return 0;
730 switch (GET_CODE (op))
732 case PRE_INC:
733 case PRE_DEC:
734 ((HOST_WIDE_INT *)arg)[0] -= INTVAL (srcoff);
735 return 0;
736 case POST_INC:
737 case POST_DEC:
738 ((HOST_WIDE_INT *)arg)[1] -= INTVAL (srcoff);
739 return 0;
740 case PRE_MODIFY:
741 case POST_MODIFY:
742 /* We handle only adjustments by constant amount. */
743 gcc_assert (GET_CODE (src) == PLUS
744 && CONST_INT_P (XEXP (src, 1))
745 && XEXP (src, 0) == stack_pointer_rtx);
746 ((HOST_WIDE_INT *)arg)[GET_CODE (op) == POST_MODIFY]
747 -= INTVAL (XEXP (src, 1));
748 return 0;
749 default:
750 gcc_unreachable ();
754 /* Given a SET, calculate the amount of stack adjustment it contains
755 PRE- and POST-modifying stack pointer.
756 This function is similar to stack_adjust_offset. */
758 static void
759 stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
760 HOST_WIDE_INT *post)
762 rtx src = SET_SRC (pattern);
763 rtx dest = SET_DEST (pattern);
764 enum rtx_code code;
766 if (dest == stack_pointer_rtx)
768 /* (set (reg sp) (plus (reg sp) (const_int))) */
769 code = GET_CODE (src);
770 if (! (code == PLUS || code == MINUS)
771 || XEXP (src, 0) != stack_pointer_rtx
772 || !CONST_INT_P (XEXP (src, 1)))
773 return;
775 if (code == MINUS)
776 *post += INTVAL (XEXP (src, 1));
777 else
778 *post -= INTVAL (XEXP (src, 1));
779 return;
781 HOST_WIDE_INT res[2] = { 0, 0 };
782 for_each_inc_dec (pattern, stack_adjust_offset_pre_post_cb, res);
783 *pre += res[0];
784 *post += res[1];
787 /* Given an INSN, calculate the amount of stack adjustment it contains
788 PRE- and POST-modifying stack pointer. */
790 static void
791 insn_stack_adjust_offset_pre_post (rtx_insn *insn, HOST_WIDE_INT *pre,
792 HOST_WIDE_INT *post)
794 rtx pattern;
796 *pre = 0;
797 *post = 0;
799 pattern = PATTERN (insn);
800 if (RTX_FRAME_RELATED_P (insn))
802 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
803 if (expr)
804 pattern = XEXP (expr, 0);
807 if (GET_CODE (pattern) == SET)
808 stack_adjust_offset_pre_post (pattern, pre, post);
809 else if (GET_CODE (pattern) == PARALLEL
810 || GET_CODE (pattern) == SEQUENCE)
812 int i;
814 /* There may be stack adjustments inside compound insns. Search
815 for them. */
816 for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
817 if (GET_CODE (XVECEXP (pattern, 0, i)) == SET)
818 stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post);
822 /* Compute stack adjustments for all blocks by traversing DFS tree.
823 Return true when the adjustments on all incoming edges are consistent.
824 Heavily borrowed from pre_and_rev_post_order_compute. */
826 static bool
827 vt_stack_adjustments (void)
829 edge_iterator *stack;
830 int sp;
832 /* Initialize entry block. */
833 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->visited = true;
834 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->in.stack_adjust
835 = INCOMING_FRAME_SP_OFFSET;
836 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out.stack_adjust
837 = INCOMING_FRAME_SP_OFFSET;
839 /* Allocate stack for back-tracking up CFG. */
840 stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun) + 1);
841 sp = 0;
843 /* Push the first edge on to the stack. */
844 stack[sp++] = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
846 while (sp)
848 edge_iterator ei;
849 basic_block src;
850 basic_block dest;
852 /* Look at the edge on the top of the stack. */
853 ei = stack[sp - 1];
854 src = ei_edge (ei)->src;
855 dest = ei_edge (ei)->dest;
857 /* Check if the edge destination has been visited yet. */
858 if (!VTI (dest)->visited)
860 rtx_insn *insn;
861 HOST_WIDE_INT pre, post, offset;
862 VTI (dest)->visited = true;
863 VTI (dest)->in.stack_adjust = offset = VTI (src)->out.stack_adjust;
865 if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
866 for (insn = BB_HEAD (dest);
867 insn != NEXT_INSN (BB_END (dest));
868 insn = NEXT_INSN (insn))
869 if (INSN_P (insn))
871 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
872 offset += pre + post;
875 VTI (dest)->out.stack_adjust = offset;
877 if (EDGE_COUNT (dest->succs) > 0)
878 /* Since the DEST node has been visited for the first
879 time, check its successors. */
880 stack[sp++] = ei_start (dest->succs);
882 else
884 /* We can end up with different stack adjustments for the exit block
885 of a shrink-wrapped function if stack_adjust_offset_pre_post
886 doesn't understand the rtx pattern used to restore the stack
887 pointer in the epilogue. For example, on s390(x), the stack
888 pointer is often restored via a load-multiple instruction
889 and so no stack_adjust offset is recorded for it. This means
890 that the stack offset at the end of the epilogue block is the
891 same as the offset before the epilogue, whereas other paths
892 to the exit block will have the correct stack_adjust.
894 It is safe to ignore these differences because (a) we never
895 use the stack_adjust for the exit block in this pass and
896 (b) dwarf2cfi checks whether the CFA notes in a shrink-wrapped
897 function are correct.
899 We must check whether the adjustments on other edges are
900 the same though. */
901 if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
902 && VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust)
904 free (stack);
905 return false;
908 if (! ei_one_before_end_p (ei))
909 /* Go to the next edge. */
910 ei_next (&stack[sp - 1]);
911 else
912 /* Return to previous level if there are no more edges. */
913 sp--;
917 free (stack);
918 return true;
921 /* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or
922 hard_frame_pointer_rtx is being mapped to it and offset for it. */
923 static rtx cfa_base_rtx;
924 static HOST_WIDE_INT cfa_base_offset;
926 /* Compute a CFA-based value for an ADJUSTMENT made to stack_pointer_rtx
927 or hard_frame_pointer_rtx. */
929 static inline rtx
930 compute_cfa_pointer (poly_int64 adjustment)
932 return plus_constant (Pmode, cfa_base_rtx, adjustment + cfa_base_offset);
935 /* Adjustment for hard_frame_pointer_rtx to cfa base reg,
936 or -1 if the replacement shouldn't be done. */
937 static poly_int64 hard_frame_pointer_adjustment = -1;
939 /* Data for adjust_mems callback. */
941 class adjust_mem_data
943 public:
944 bool store;
945 machine_mode mem_mode;
946 HOST_WIDE_INT stack_adjust;
947 auto_vec<rtx> side_effects;
950 /* Helper for adjust_mems. Return true if X is suitable for
951 transformation of wider mode arithmetics to narrower mode. */
953 static bool
954 use_narrower_mode_test (rtx x, const_rtx subreg)
956 subrtx_var_iterator::array_type array;
957 FOR_EACH_SUBRTX_VAR (iter, array, x, NONCONST)
959 rtx x = *iter;
960 if (CONSTANT_P (x))
961 iter.skip_subrtxes ();
962 else
963 switch (GET_CODE (x))
965 case REG:
966 if (cselib_lookup (x, GET_MODE (SUBREG_REG (subreg)), 0, VOIDmode))
967 return false;
968 if (!validate_subreg (GET_MODE (subreg), GET_MODE (x), x,
969 subreg_lowpart_offset (GET_MODE (subreg),
970 GET_MODE (x))))
971 return false;
972 break;
973 case PLUS:
974 case MINUS:
975 case MULT:
976 break;
977 case ASHIFT:
978 if (GET_MODE (XEXP (x, 1)) != VOIDmode)
980 enum machine_mode mode = GET_MODE (subreg);
981 rtx op1 = XEXP (x, 1);
982 enum machine_mode op1_mode = GET_MODE (op1);
983 if (GET_MODE_PRECISION (as_a <scalar_int_mode> (mode))
984 < GET_MODE_PRECISION (as_a <scalar_int_mode> (op1_mode)))
986 poly_uint64 byte = subreg_lowpart_offset (mode, op1_mode);
987 if (GET_CODE (op1) == SUBREG || GET_CODE (op1) == CONCAT)
989 if (!simplify_subreg (mode, op1, op1_mode, byte))
990 return false;
992 else if (!validate_subreg (mode, op1_mode, op1, byte))
993 return false;
996 iter.substitute (XEXP (x, 0));
997 break;
998 default:
999 return false;
1002 return true;
1005 /* Transform X into narrower mode MODE from wider mode WMODE. */
1007 static rtx
1008 use_narrower_mode (rtx x, scalar_int_mode mode, scalar_int_mode wmode)
1010 rtx op0, op1;
1011 if (CONSTANT_P (x))
1012 return lowpart_subreg (mode, x, wmode);
1013 switch (GET_CODE (x))
1015 case REG:
1016 return lowpart_subreg (mode, x, wmode);
1017 case PLUS:
1018 case MINUS:
1019 case MULT:
1020 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
1021 op1 = use_narrower_mode (XEXP (x, 1), mode, wmode);
1022 return simplify_gen_binary (GET_CODE (x), mode, op0, op1);
1023 case ASHIFT:
1024 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
1025 op1 = XEXP (x, 1);
1026 /* Ensure shift amount is not wider than mode. */
1027 if (GET_MODE (op1) == VOIDmode)
1028 op1 = lowpart_subreg (mode, op1, wmode);
1029 else if (GET_MODE_PRECISION (mode)
1030 < GET_MODE_PRECISION (as_a <scalar_int_mode> (GET_MODE (op1))))
1031 op1 = lowpart_subreg (mode, op1, GET_MODE (op1));
1032 return simplify_gen_binary (ASHIFT, mode, op0, op1);
1033 default:
1034 gcc_unreachable ();
1038 /* Helper function for adjusting used MEMs. */
1040 static rtx
1041 adjust_mems (rtx loc, const_rtx old_rtx, void *data)
1043 class adjust_mem_data *amd = (class adjust_mem_data *) data;
1044 rtx mem, addr = loc, tem;
1045 machine_mode mem_mode_save;
1046 bool store_save;
1047 scalar_int_mode tem_mode, tem_subreg_mode;
1048 poly_int64 size;
1049 switch (GET_CODE (loc))
1051 case REG:
1052 /* Don't do any sp or fp replacements outside of MEM addresses
1053 on the LHS. */
1054 if (amd->mem_mode == VOIDmode && amd->store)
1055 return loc;
1056 if (loc == stack_pointer_rtx
1057 && !frame_pointer_needed
1058 && cfa_base_rtx)
1059 return compute_cfa_pointer (amd->stack_adjust);
1060 else if (loc == hard_frame_pointer_rtx
1061 && frame_pointer_needed
1062 && maybe_ne (hard_frame_pointer_adjustment, -1)
1063 && cfa_base_rtx)
1064 return compute_cfa_pointer (hard_frame_pointer_adjustment);
1065 gcc_checking_assert (loc != virtual_incoming_args_rtx);
1066 return loc;
1067 case MEM:
1068 mem = loc;
1069 if (!amd->store)
1071 mem = targetm.delegitimize_address (mem);
1072 if (mem != loc && !MEM_P (mem))
1073 return simplify_replace_fn_rtx (mem, old_rtx, adjust_mems, data);
1076 addr = XEXP (mem, 0);
1077 mem_mode_save = amd->mem_mode;
1078 amd->mem_mode = GET_MODE (mem);
1079 store_save = amd->store;
1080 amd->store = false;
1081 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1082 amd->store = store_save;
1083 amd->mem_mode = mem_mode_save;
1084 if (mem == loc)
1085 addr = targetm.delegitimize_address (addr);
1086 if (addr != XEXP (mem, 0))
1087 mem = replace_equiv_address_nv (mem, addr);
1088 if (!amd->store)
1089 mem = avoid_constant_pool_reference (mem);
1090 return mem;
1091 case PRE_INC:
1092 case PRE_DEC:
1093 size = GET_MODE_SIZE (amd->mem_mode);
1094 addr = plus_constant (GET_MODE (loc), XEXP (loc, 0),
1095 GET_CODE (loc) == PRE_INC ? size : -size);
1096 /* FALLTHRU */
1097 case POST_INC:
1098 case POST_DEC:
1099 if (addr == loc)
1100 addr = XEXP (loc, 0);
1101 gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode);
1102 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1103 size = GET_MODE_SIZE (amd->mem_mode);
1104 tem = plus_constant (GET_MODE (loc), XEXP (loc, 0),
1105 (GET_CODE (loc) == PRE_INC
1106 || GET_CODE (loc) == POST_INC) ? size : -size);
1107 store_save = amd->store;
1108 amd->store = false;
1109 tem = simplify_replace_fn_rtx (tem, old_rtx, adjust_mems, data);
1110 amd->store = store_save;
1111 amd->side_effects.safe_push (gen_rtx_SET (XEXP (loc, 0), tem));
1112 return addr;
1113 case PRE_MODIFY:
1114 addr = XEXP (loc, 1);
1115 /* FALLTHRU */
1116 case POST_MODIFY:
1117 if (addr == loc)
1118 addr = XEXP (loc, 0);
1119 gcc_assert (amd->mem_mode != VOIDmode);
1120 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1121 store_save = amd->store;
1122 amd->store = false;
1123 tem = simplify_replace_fn_rtx (XEXP (loc, 1), old_rtx,
1124 adjust_mems, data);
1125 amd->store = store_save;
1126 amd->side_effects.safe_push (gen_rtx_SET (XEXP (loc, 0), tem));
1127 return addr;
1128 case SUBREG:
1129 /* First try without delegitimization of whole MEMs and
1130 avoid_constant_pool_reference, which is more likely to succeed. */
1131 store_save = amd->store;
1132 amd->store = true;
1133 addr = simplify_replace_fn_rtx (SUBREG_REG (loc), old_rtx, adjust_mems,
1134 data);
1135 amd->store = store_save;
1136 mem = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1137 if (mem == SUBREG_REG (loc))
1139 tem = loc;
1140 goto finish_subreg;
1142 tem = simplify_gen_subreg (GET_MODE (loc), mem,
1143 GET_MODE (SUBREG_REG (loc)),
1144 SUBREG_BYTE (loc));
1145 if (tem)
1146 goto finish_subreg;
1147 tem = simplify_gen_subreg (GET_MODE (loc), addr,
1148 GET_MODE (SUBREG_REG (loc)),
1149 SUBREG_BYTE (loc));
1150 if (tem == NULL_RTX)
1151 tem = gen_rtx_raw_SUBREG (GET_MODE (loc), addr, SUBREG_BYTE (loc));
1152 finish_subreg:
1153 if (MAY_HAVE_DEBUG_BIND_INSNS
1154 && GET_CODE (tem) == SUBREG
1155 && (GET_CODE (SUBREG_REG (tem)) == PLUS
1156 || GET_CODE (SUBREG_REG (tem)) == MINUS
1157 || GET_CODE (SUBREG_REG (tem)) == MULT
1158 || GET_CODE (SUBREG_REG (tem)) == ASHIFT)
1159 && is_a <scalar_int_mode> (GET_MODE (tem), &tem_mode)
1160 && is_a <scalar_int_mode> (GET_MODE (SUBREG_REG (tem)),
1161 &tem_subreg_mode)
1162 && (GET_MODE_PRECISION (tem_mode)
1163 < GET_MODE_PRECISION (tem_subreg_mode))
1164 && subreg_lowpart_p (tem)
1165 && use_narrower_mode_test (SUBREG_REG (tem), tem))
1166 return use_narrower_mode (SUBREG_REG (tem), tem_mode, tem_subreg_mode);
1167 return tem;
1168 case ASM_OPERANDS:
1169 /* Don't do any replacements in second and following
1170 ASM_OPERANDS of inline-asm with multiple sets.
1171 ASM_OPERANDS_INPUT_VEC, ASM_OPERANDS_INPUT_CONSTRAINT_VEC
1172 and ASM_OPERANDS_LABEL_VEC need to be equal between
1173 all the ASM_OPERANDs in the insn and adjust_insn will
1174 fix this up. */
1175 if (ASM_OPERANDS_OUTPUT_IDX (loc) != 0)
1176 return loc;
1177 break;
1178 default:
1179 break;
1181 return NULL_RTX;
1184 /* Helper function for replacement of uses. */
1186 static void
1187 adjust_mem_uses (rtx *x, void *data)
1189 rtx new_x = simplify_replace_fn_rtx (*x, NULL_RTX, adjust_mems, data);
1190 if (new_x != *x)
1191 validate_change (NULL_RTX, x, new_x, true);
1194 /* Helper function for replacement of stores. */
1196 static void
1197 adjust_mem_stores (rtx loc, const_rtx expr, void *data)
1199 if (MEM_P (loc))
1201 rtx new_dest = simplify_replace_fn_rtx (SET_DEST (expr), NULL_RTX,
1202 adjust_mems, data);
1203 if (new_dest != SET_DEST (expr))
1205 rtx xexpr = CONST_CAST_RTX (expr);
1206 validate_change (NULL_RTX, &SET_DEST (xexpr), new_dest, true);
1211 /* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
1212 replace them with their value in the insn and add the side-effects
1213 as other sets to the insn. */
1215 static void
1216 adjust_insn (basic_block bb, rtx_insn *insn)
1218 rtx set;
1220 #ifdef HAVE_window_save
1221 /* If the target machine has an explicit window save instruction, the
1222 transformation OUTGOING_REGNO -> INCOMING_REGNO is done there. */
1223 if (RTX_FRAME_RELATED_P (insn)
1224 && find_reg_note (insn, REG_CFA_WINDOW_SAVE, NULL_RTX))
1226 unsigned int i, nregs = vec_safe_length (windowed_parm_regs);
1227 rtx rtl = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nregs * 2));
1228 parm_reg *p;
1230 FOR_EACH_VEC_SAFE_ELT (windowed_parm_regs, i, p)
1232 XVECEXP (rtl, 0, i * 2)
1233 = gen_rtx_SET (p->incoming, p->outgoing);
1234 /* Do not clobber the attached DECL, but only the REG. */
1235 XVECEXP (rtl, 0, i * 2 + 1)
1236 = gen_rtx_CLOBBER (GET_MODE (p->outgoing),
1237 gen_raw_REG (GET_MODE (p->outgoing),
1238 REGNO (p->outgoing)));
1241 validate_change (NULL_RTX, &PATTERN (insn), rtl, true);
1242 return;
1244 #endif
1246 adjust_mem_data amd;
1247 amd.mem_mode = VOIDmode;
1248 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
1250 amd.store = true;
1251 note_stores (insn, adjust_mem_stores, &amd);
1253 amd.store = false;
1254 if (GET_CODE (PATTERN (insn)) == PARALLEL
1255 && asm_noperands (PATTERN (insn)) > 0
1256 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1258 rtx body, set0;
1259 int i;
1261 /* inline-asm with multiple sets is tiny bit more complicated,
1262 because the 3 vectors in ASM_OPERANDS need to be shared between
1263 all ASM_OPERANDS in the instruction. adjust_mems will
1264 not touch ASM_OPERANDS other than the first one, asm_noperands
1265 test above needs to be called before that (otherwise it would fail)
1266 and afterwards this code fixes it up. */
1267 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1268 body = PATTERN (insn);
1269 set0 = XVECEXP (body, 0, 0);
1270 gcc_checking_assert (GET_CODE (set0) == SET
1271 && GET_CODE (SET_SRC (set0)) == ASM_OPERANDS
1272 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set0)) == 0);
1273 for (i = 1; i < XVECLEN (body, 0); i++)
1274 if (GET_CODE (XVECEXP (body, 0, i)) != SET)
1275 break;
1276 else
1278 set = XVECEXP (body, 0, i);
1279 gcc_checking_assert (GET_CODE (SET_SRC (set)) == ASM_OPERANDS
1280 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set))
1281 == i);
1282 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (set))
1283 != ASM_OPERANDS_INPUT_VEC (SET_SRC (set0))
1284 || ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set))
1285 != ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0))
1286 || ASM_OPERANDS_LABEL_VEC (SET_SRC (set))
1287 != ASM_OPERANDS_LABEL_VEC (SET_SRC (set0)))
1289 rtx newsrc = shallow_copy_rtx (SET_SRC (set));
1290 ASM_OPERANDS_INPUT_VEC (newsrc)
1291 = ASM_OPERANDS_INPUT_VEC (SET_SRC (set0));
1292 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (newsrc)
1293 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0));
1294 ASM_OPERANDS_LABEL_VEC (newsrc)
1295 = ASM_OPERANDS_LABEL_VEC (SET_SRC (set0));
1296 validate_change (NULL_RTX, &SET_SRC (set), newsrc, true);
1300 else
1301 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1303 /* For read-only MEMs containing some constant, prefer those
1304 constants. */
1305 set = single_set (insn);
1306 if (set && MEM_P (SET_SRC (set)) && MEM_READONLY_P (SET_SRC (set)))
1308 rtx note = find_reg_equal_equiv_note (insn);
1310 if (note && CONSTANT_P (XEXP (note, 0)))
1311 validate_change (NULL_RTX, &SET_SRC (set), XEXP (note, 0), true);
1314 if (!amd.side_effects.is_empty ())
1316 rtx *pat, new_pat;
1317 int i, oldn;
1319 pat = &PATTERN (insn);
1320 if (GET_CODE (*pat) == COND_EXEC)
1321 pat = &COND_EXEC_CODE (*pat);
1322 if (GET_CODE (*pat) == PARALLEL)
1323 oldn = XVECLEN (*pat, 0);
1324 else
1325 oldn = 1;
1326 unsigned int newn = amd.side_effects.length ();
1327 new_pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (oldn + newn));
1328 if (GET_CODE (*pat) == PARALLEL)
1329 for (i = 0; i < oldn; i++)
1330 XVECEXP (new_pat, 0, i) = XVECEXP (*pat, 0, i);
1331 else
1332 XVECEXP (new_pat, 0, 0) = *pat;
1334 rtx effect;
1335 unsigned int j;
1336 FOR_EACH_VEC_ELT_REVERSE (amd.side_effects, j, effect)
1337 XVECEXP (new_pat, 0, j + oldn) = effect;
1338 validate_change (NULL_RTX, pat, new_pat, true);
1342 /* Return the DEBUG_EXPR of a DEBUG_EXPR_DECL or the VALUE in DV. */
1343 static inline rtx
1344 dv_as_rtx (decl_or_value dv)
1346 tree decl;
1348 if (dv_is_value_p (dv))
1349 return dv_as_value (dv);
1351 decl = dv_as_decl (dv);
1353 gcc_checking_assert (TREE_CODE (decl) == DEBUG_EXPR_DECL);
1354 return DECL_RTL_KNOWN_SET (decl);
1357 /* Return nonzero if a decl_or_value must not have more than one
1358 variable part. The returned value discriminates among various
1359 kinds of one-part DVs ccording to enum onepart_enum. */
1360 static inline onepart_enum
1361 dv_onepart_p (decl_or_value dv)
1363 tree decl;
1365 if (!MAY_HAVE_DEBUG_BIND_INSNS)
1366 return NOT_ONEPART;
1368 if (dv_is_value_p (dv))
1369 return ONEPART_VALUE;
1371 decl = dv_as_decl (dv);
1373 if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
1374 return ONEPART_DEXPR;
1376 if (target_for_debug_bind (decl) != NULL_TREE)
1377 return ONEPART_VDECL;
1379 return NOT_ONEPART;
1382 /* Return the variable pool to be used for a dv of type ONEPART. */
1383 static inline pool_allocator &
1384 onepart_pool (onepart_enum onepart)
1386 return onepart ? valvar_pool : var_pool;
1389 /* Allocate a variable_def from the corresponding variable pool. */
1390 static inline variable *
1391 onepart_pool_allocate (onepart_enum onepart)
1393 return (variable*) onepart_pool (onepart).allocate ();
1396 /* Build a decl_or_value out of a decl. */
1397 static inline decl_or_value
1398 dv_from_decl (tree decl)
1400 decl_or_value dv;
1401 dv = decl;
1402 gcc_checking_assert (dv_is_decl_p (dv));
1403 return dv;
1406 /* Build a decl_or_value out of a value. */
1407 static inline decl_or_value
1408 dv_from_value (rtx value)
1410 decl_or_value dv;
1411 dv = value;
1412 gcc_checking_assert (dv_is_value_p (dv));
1413 return dv;
1416 /* Return a value or the decl of a debug_expr as a decl_or_value. */
1417 static inline decl_or_value
1418 dv_from_rtx (rtx x)
1420 decl_or_value dv;
1422 switch (GET_CODE (x))
1424 case DEBUG_EXPR:
1425 dv = dv_from_decl (DEBUG_EXPR_TREE_DECL (x));
1426 gcc_checking_assert (DECL_RTL_KNOWN_SET (DEBUG_EXPR_TREE_DECL (x)) == x);
1427 break;
1429 case VALUE:
1430 dv = dv_from_value (x);
1431 break;
1433 default:
1434 gcc_unreachable ();
1437 return dv;
1440 extern void debug_dv (decl_or_value dv);
1442 DEBUG_FUNCTION void
1443 debug_dv (decl_or_value dv)
1445 if (dv_is_value_p (dv))
1446 debug_rtx (dv_as_value (dv));
1447 else
1448 debug_generic_stmt (dv_as_decl (dv));
1451 static void loc_exp_dep_clear (variable *var);
1453 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
1455 static void
1456 variable_htab_free (void *elem)
1458 int i;
1459 variable *var = (variable *) elem;
1460 location_chain *node, *next;
1462 gcc_checking_assert (var->refcount > 0);
1464 var->refcount--;
1465 if (var->refcount > 0)
1466 return;
1468 for (i = 0; i < var->n_var_parts; i++)
1470 for (node = var->var_part[i].loc_chain; node; node = next)
1472 next = node->next;
1473 delete node;
1475 var->var_part[i].loc_chain = NULL;
1477 if (var->onepart && VAR_LOC_1PAUX (var))
1479 loc_exp_dep_clear (var);
1480 if (VAR_LOC_DEP_LST (var))
1481 VAR_LOC_DEP_LST (var)->pprev = NULL;
1482 XDELETE (VAR_LOC_1PAUX (var));
1483 /* These may be reused across functions, so reset
1484 e.g. NO_LOC_P. */
1485 if (var->onepart == ONEPART_DEXPR)
1486 set_dv_changed (var->dv, true);
1488 onepart_pool (var->onepart).remove (var);
1491 /* Initialize the set (array) SET of attrs to empty lists. */
1493 static void
1494 init_attrs_list_set (attrs **set)
1496 int i;
1498 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1499 set[i] = NULL;
1502 /* Make the list *LISTP empty. */
1504 static void
1505 attrs_list_clear (attrs **listp)
1507 attrs *list, *next;
1509 for (list = *listp; list; list = next)
1511 next = list->next;
1512 delete list;
1514 *listp = NULL;
1517 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */
1519 static attrs *
1520 attrs_list_member (attrs *list, decl_or_value dv, HOST_WIDE_INT offset)
1522 for (; list; list = list->next)
1523 if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset)
1524 return list;
1525 return NULL;
1528 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
1530 static void
1531 attrs_list_insert (attrs **listp, decl_or_value dv,
1532 HOST_WIDE_INT offset, rtx loc)
1534 attrs *list = new attrs;
1535 list->loc = loc;
1536 list->dv = dv;
1537 list->offset = offset;
1538 list->next = *listp;
1539 *listp = list;
1542 /* Copy all nodes from SRC and create a list *DSTP of the copies. */
1544 static void
1545 attrs_list_copy (attrs **dstp, attrs *src)
1547 attrs_list_clear (dstp);
1548 for (; src; src = src->next)
1550 attrs *n = new attrs;
1551 n->loc = src->loc;
1552 n->dv = src->dv;
1553 n->offset = src->offset;
1554 n->next = *dstp;
1555 *dstp = n;
1559 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */
1561 static void
1562 attrs_list_union (attrs **dstp, attrs *src)
1564 for (; src; src = src->next)
1566 if (!attrs_list_member (*dstp, src->dv, src->offset))
1567 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1571 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1572 *DSTP. */
1574 static void
1575 attrs_list_mpdv_union (attrs **dstp, attrs *src, attrs *src2)
1577 gcc_assert (!*dstp);
1578 for (; src; src = src->next)
1580 if (!dv_onepart_p (src->dv))
1581 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1583 for (src = src2; src; src = src->next)
1585 if (!dv_onepart_p (src->dv)
1586 && !attrs_list_member (*dstp, src->dv, src->offset))
1587 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1591 /* Shared hashtable support. */
1593 /* Return true if VARS is shared. */
1595 static inline bool
1596 shared_hash_shared (shared_hash *vars)
1598 return vars->refcount > 1;
1601 /* Return the hash table for VARS. */
1603 static inline variable_table_type *
1604 shared_hash_htab (shared_hash *vars)
1606 return vars->htab;
1609 /* Return true if VAR is shared, or maybe because VARS is shared. */
1611 static inline bool
1612 shared_var_p (variable *var, shared_hash *vars)
1614 /* Don't count an entry in the changed_variables table as a duplicate. */
1615 return ((var->refcount > 1 + (int) var->in_changed_variables)
1616 || shared_hash_shared (vars));
1619 /* Copy variables into a new hash table. */
1621 static shared_hash *
1622 shared_hash_unshare (shared_hash *vars)
1624 shared_hash *new_vars = new shared_hash;
1625 gcc_assert (vars->refcount > 1);
1626 new_vars->refcount = 1;
1627 new_vars->htab = new variable_table_type (vars->htab->elements () + 3);
1628 vars_copy (new_vars->htab, vars->htab);
1629 vars->refcount--;
1630 return new_vars;
1633 /* Increment reference counter on VARS and return it. */
1635 static inline shared_hash *
1636 shared_hash_copy (shared_hash *vars)
1638 vars->refcount++;
1639 return vars;
1642 /* Decrement reference counter and destroy hash table if not shared
1643 anymore. */
1645 static void
1646 shared_hash_destroy (shared_hash *vars)
1648 gcc_checking_assert (vars->refcount > 0);
1649 if (--vars->refcount == 0)
1651 delete vars->htab;
1652 delete vars;
1656 /* Unshare *PVARS if shared and return slot for DV. If INS is
1657 INSERT, insert it if not already present. */
1659 static inline variable **
1660 shared_hash_find_slot_unshare_1 (shared_hash **pvars, decl_or_value dv,
1661 hashval_t dvhash, enum insert_option ins)
1663 if (shared_hash_shared (*pvars))
1664 *pvars = shared_hash_unshare (*pvars);
1665 return shared_hash_htab (*pvars)->find_slot_with_hash (dv, dvhash, ins);
1668 static inline variable **
1669 shared_hash_find_slot_unshare (shared_hash **pvars, decl_or_value dv,
1670 enum insert_option ins)
1672 return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins);
1675 /* Return slot for DV, if it is already present in the hash table.
1676 If it is not present, insert it only VARS is not shared, otherwise
1677 return NULL. */
1679 static inline variable **
1680 shared_hash_find_slot_1 (shared_hash *vars, decl_or_value dv, hashval_t dvhash)
1682 return shared_hash_htab (vars)->find_slot_with_hash (dv, dvhash,
1683 shared_hash_shared (vars)
1684 ? NO_INSERT : INSERT);
1687 static inline variable **
1688 shared_hash_find_slot (shared_hash *vars, decl_or_value dv)
1690 return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv));
1693 /* Return slot for DV only if it is already present in the hash table. */
1695 static inline variable **
1696 shared_hash_find_slot_noinsert_1 (shared_hash *vars, decl_or_value dv,
1697 hashval_t dvhash)
1699 return shared_hash_htab (vars)->find_slot_with_hash (dv, dvhash, NO_INSERT);
1702 static inline variable **
1703 shared_hash_find_slot_noinsert (shared_hash *vars, decl_or_value dv)
1705 return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv));
1708 /* Return variable for DV or NULL if not already present in the hash
1709 table. */
1711 static inline variable *
1712 shared_hash_find_1 (shared_hash *vars, decl_or_value dv, hashval_t dvhash)
1714 return shared_hash_htab (vars)->find_with_hash (dv, dvhash);
1717 static inline variable *
1718 shared_hash_find (shared_hash *vars, decl_or_value dv)
1720 return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
1723 /* Return true if TVAL is better than CVAL as a canonival value. We
1724 choose lowest-numbered VALUEs, using the RTX address as a
1725 tie-breaker. The idea is to arrange them into a star topology,
1726 such that all of them are at most one step away from the canonical
1727 value, and the canonical value has backlinks to all of them, in
1728 addition to all the actual locations. We don't enforce this
1729 topology throughout the entire dataflow analysis, though.
1732 static inline bool
1733 canon_value_cmp (rtx tval, rtx cval)
1735 return !cval
1736 || CSELIB_VAL_PTR (tval)->uid < CSELIB_VAL_PTR (cval)->uid;
1739 static bool dst_can_be_shared;
1741 /* Return a copy of a variable VAR and insert it to dataflow set SET. */
1743 static variable **
1744 unshare_variable (dataflow_set *set, variable **slot, variable *var,
1745 enum var_init_status initialized)
1747 variable *new_var;
1748 int i;
1750 new_var = onepart_pool_allocate (var->onepart);
1751 new_var->dv = var->dv;
1752 new_var->refcount = 1;
1753 var->refcount--;
1754 new_var->n_var_parts = var->n_var_parts;
1755 new_var->onepart = var->onepart;
1756 new_var->in_changed_variables = false;
1758 if (! flag_var_tracking_uninit)
1759 initialized = VAR_INIT_STATUS_INITIALIZED;
1761 for (i = 0; i < var->n_var_parts; i++)
1763 location_chain *node;
1764 location_chain **nextp;
1766 if (i == 0 && var->onepart)
1768 /* One-part auxiliary data is only used while emitting
1769 notes, so propagate it to the new variable in the active
1770 dataflow set. If we're not emitting notes, this will be
1771 a no-op. */
1772 gcc_checking_assert (!VAR_LOC_1PAUX (var) || emit_notes);
1773 VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (var);
1774 VAR_LOC_1PAUX (var) = NULL;
1776 else
1777 VAR_PART_OFFSET (new_var, i) = VAR_PART_OFFSET (var, i);
1778 nextp = &new_var->var_part[i].loc_chain;
1779 for (node = var->var_part[i].loc_chain; node; node = node->next)
1781 location_chain *new_lc;
1783 new_lc = new location_chain;
1784 new_lc->next = NULL;
1785 if (node->init > initialized)
1786 new_lc->init = node->init;
1787 else
1788 new_lc->init = initialized;
1789 if (node->set_src && !(MEM_P (node->set_src)))
1790 new_lc->set_src = node->set_src;
1791 else
1792 new_lc->set_src = NULL;
1793 new_lc->loc = node->loc;
1795 *nextp = new_lc;
1796 nextp = &new_lc->next;
1799 new_var->var_part[i].cur_loc = var->var_part[i].cur_loc;
1802 dst_can_be_shared = false;
1803 if (shared_hash_shared (set->vars))
1804 slot = shared_hash_find_slot_unshare (&set->vars, var->dv, NO_INSERT);
1805 else if (set->traversed_vars && set->vars != set->traversed_vars)
1806 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
1807 *slot = new_var;
1808 if (var->in_changed_variables)
1810 variable **cslot
1811 = changed_variables->find_slot_with_hash (var->dv,
1812 dv_htab_hash (var->dv),
1813 NO_INSERT);
1814 gcc_assert (*cslot == (void *) var);
1815 var->in_changed_variables = false;
1816 variable_htab_free (var);
1817 *cslot = new_var;
1818 new_var->in_changed_variables = true;
1820 return slot;
1823 /* Copy all variables from hash table SRC to hash table DST. */
1825 static void
1826 vars_copy (variable_table_type *dst, variable_table_type *src)
1828 variable_iterator_type hi;
1829 variable *var;
1831 FOR_EACH_HASH_TABLE_ELEMENT (*src, var, variable, hi)
1833 variable **dstp;
1834 var->refcount++;
1835 dstp = dst->find_slot_with_hash (var->dv, dv_htab_hash (var->dv),
1836 INSERT);
1837 *dstp = var;
1841 /* Map a decl to its main debug decl. */
1843 static inline tree
1844 var_debug_decl (tree decl)
1846 if (decl && VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl))
1848 tree debugdecl = DECL_DEBUG_EXPR (decl);
1849 if (DECL_P (debugdecl))
1850 decl = debugdecl;
1853 return decl;
1856 /* Set the register LOC to contain DV, OFFSET. */
1858 static void
1859 var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1860 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1861 enum insert_option iopt)
1863 attrs *node;
1864 bool decl_p = dv_is_decl_p (dv);
1866 if (decl_p)
1867 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1869 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1870 if (dv_as_opaque (node->dv) == dv_as_opaque (dv)
1871 && node->offset == offset)
1872 break;
1873 if (!node)
1874 attrs_list_insert (&set->regs[REGNO (loc)], dv, offset, loc);
1875 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1878 /* Return true if we should track a location that is OFFSET bytes from
1879 a variable. Store the constant offset in *OFFSET_OUT if so. */
1881 static bool
1882 track_offset_p (poly_int64 offset, HOST_WIDE_INT *offset_out)
1884 HOST_WIDE_INT const_offset;
1885 if (!offset.is_constant (&const_offset)
1886 || !IN_RANGE (const_offset, 0, MAX_VAR_PARTS - 1))
1887 return false;
1888 *offset_out = const_offset;
1889 return true;
1892 /* Return the offset of a register that track_offset_p says we
1893 should track. */
1895 static HOST_WIDE_INT
1896 get_tracked_reg_offset (rtx loc)
1898 HOST_WIDE_INT offset;
1899 if (!track_offset_p (REG_OFFSET (loc), &offset))
1900 gcc_unreachable ();
1901 return offset;
1904 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
1906 static void
1907 var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1908 rtx set_src)
1910 tree decl = REG_EXPR (loc);
1911 HOST_WIDE_INT offset = get_tracked_reg_offset (loc);
1913 var_reg_decl_set (set, loc, initialized,
1914 dv_from_decl (decl), offset, set_src, INSERT);
1917 static enum var_init_status
1918 get_init_value (dataflow_set *set, rtx loc, decl_or_value dv)
1920 variable *var;
1921 int i;
1922 enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN;
1924 if (! flag_var_tracking_uninit)
1925 return VAR_INIT_STATUS_INITIALIZED;
1927 var = shared_hash_find (set->vars, dv);
1928 if (var)
1930 for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++)
1932 location_chain *nextp;
1933 for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next)
1934 if (rtx_equal_p (nextp->loc, loc))
1936 ret_val = nextp->init;
1937 break;
1942 return ret_val;
1945 /* Delete current content of register LOC in dataflow set SET and set
1946 the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
1947 MODIFY is true, any other live copies of the same variable part are
1948 also deleted from the dataflow set, otherwise the variable part is
1949 assumed to be copied from another location holding the same
1950 part. */
1952 static void
1953 var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1954 enum var_init_status initialized, rtx set_src)
1956 tree decl = REG_EXPR (loc);
1957 HOST_WIDE_INT offset = get_tracked_reg_offset (loc);
1958 attrs *node, *next;
1959 attrs **nextp;
1961 decl = var_debug_decl (decl);
1963 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1964 initialized = get_init_value (set, loc, dv_from_decl (decl));
1966 nextp = &set->regs[REGNO (loc)];
1967 for (node = *nextp; node; node = next)
1969 next = node->next;
1970 if (dv_as_opaque (node->dv) != decl || node->offset != offset)
1972 delete_variable_part (set, node->loc, node->dv, node->offset);
1973 delete node;
1974 *nextp = next;
1976 else
1978 node->loc = loc;
1979 nextp = &node->next;
1982 if (modify)
1983 clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src);
1984 var_reg_set (set, loc, initialized, set_src);
1987 /* Delete the association of register LOC in dataflow set SET with any
1988 variables that aren't onepart. If CLOBBER is true, also delete any
1989 other live copies of the same variable part, and delete the
1990 association with onepart dvs too. */
1992 static void
1993 var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
1995 attrs **nextp = &set->regs[REGNO (loc)];
1996 attrs *node, *next;
1998 HOST_WIDE_INT offset;
1999 if (clobber && track_offset_p (REG_OFFSET (loc), &offset))
2001 tree decl = REG_EXPR (loc);
2003 decl = var_debug_decl (decl);
2005 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
2008 for (node = *nextp; node; node = next)
2010 next = node->next;
2011 if (clobber || !dv_onepart_p (node->dv))
2013 delete_variable_part (set, node->loc, node->dv, node->offset);
2014 delete node;
2015 *nextp = next;
2017 else
2018 nextp = &node->next;
2022 /* Delete content of register with number REGNO in dataflow set SET. */
2024 static void
2025 var_regno_delete (dataflow_set *set, int regno)
2027 attrs **reg = &set->regs[regno];
2028 attrs *node, *next;
2030 for (node = *reg; node; node = next)
2032 next = node->next;
2033 delete_variable_part (set, node->loc, node->dv, node->offset);
2034 delete node;
2036 *reg = NULL;
2039 /* Return true if I is the negated value of a power of two. */
2040 static bool
2041 negative_power_of_two_p (HOST_WIDE_INT i)
2043 unsigned HOST_WIDE_INT x = -(unsigned HOST_WIDE_INT)i;
2044 return pow2_or_zerop (x);
2047 /* Strip constant offsets and alignments off of LOC. Return the base
2048 expression. */
2050 static rtx
2051 vt_get_canonicalize_base (rtx loc)
2053 while ((GET_CODE (loc) == PLUS
2054 || GET_CODE (loc) == AND)
2055 && GET_CODE (XEXP (loc, 1)) == CONST_INT
2056 && (GET_CODE (loc) != AND
2057 || negative_power_of_two_p (INTVAL (XEXP (loc, 1)))))
2058 loc = XEXP (loc, 0);
2060 return loc;
2063 /* This caches canonicalized addresses for VALUEs, computed using
2064 information in the global cselib table. */
2065 static hash_map<rtx, rtx> *global_get_addr_cache;
2067 /* This caches canonicalized addresses for VALUEs, computed using
2068 information from the global cache and information pertaining to a
2069 basic block being analyzed. */
2070 static hash_map<rtx, rtx> *local_get_addr_cache;
2072 static rtx vt_canonicalize_addr (dataflow_set *, rtx);
2074 /* Return the canonical address for LOC, that must be a VALUE, using a
2075 cached global equivalence or computing it and storing it in the
2076 global cache. */
2078 static rtx
2079 get_addr_from_global_cache (rtx const loc)
2081 rtx x;
2083 gcc_checking_assert (GET_CODE (loc) == VALUE);
2085 bool existed;
2086 rtx *slot = &global_get_addr_cache->get_or_insert (loc, &existed);
2087 if (existed)
2088 return *slot;
2090 x = canon_rtx (get_addr (loc));
2092 /* Tentative, avoiding infinite recursion. */
2093 *slot = x;
2095 if (x != loc)
2097 rtx nx = vt_canonicalize_addr (NULL, x);
2098 if (nx != x)
2100 /* The table may have moved during recursion, recompute
2101 SLOT. */
2102 *global_get_addr_cache->get (loc) = x = nx;
2106 return x;
2109 /* Return the canonical address for LOC, that must be a VALUE, using a
2110 cached local equivalence or computing it and storing it in the
2111 local cache. */
2113 static rtx
2114 get_addr_from_local_cache (dataflow_set *set, rtx const loc)
2116 rtx x;
2117 decl_or_value dv;
2118 variable *var;
2119 location_chain *l;
2121 gcc_checking_assert (GET_CODE (loc) == VALUE);
2123 bool existed;
2124 rtx *slot = &local_get_addr_cache->get_or_insert (loc, &existed);
2125 if (existed)
2126 return *slot;
2128 x = get_addr_from_global_cache (loc);
2130 /* Tentative, avoiding infinite recursion. */
2131 *slot = x;
2133 /* Recurse to cache local expansion of X, or if we need to search
2134 for a VALUE in the expansion. */
2135 if (x != loc)
2137 rtx nx = vt_canonicalize_addr (set, x);
2138 if (nx != x)
2140 slot = local_get_addr_cache->get (loc);
2141 *slot = x = nx;
2143 return x;
2146 dv = dv_from_rtx (x);
2147 var = shared_hash_find (set->vars, dv);
2148 if (!var)
2149 return x;
2151 /* Look for an improved equivalent expression. */
2152 for (l = var->var_part[0].loc_chain; l; l = l->next)
2154 rtx base = vt_get_canonicalize_base (l->loc);
2155 if (GET_CODE (base) == VALUE
2156 && canon_value_cmp (base, loc))
2158 rtx nx = vt_canonicalize_addr (set, l->loc);
2159 if (x != nx)
2161 slot = local_get_addr_cache->get (loc);
2162 *slot = x = nx;
2164 break;
2168 return x;
2171 /* Canonicalize LOC using equivalences from SET in addition to those
2172 in the cselib static table. It expects a VALUE-based expression,
2173 and it will only substitute VALUEs with other VALUEs or
2174 function-global equivalences, so that, if two addresses have base
2175 VALUEs that are locally or globally related in ways that
2176 memrefs_conflict_p cares about, they will both canonicalize to
2177 expressions that have the same base VALUE.
2179 The use of VALUEs as canonical base addresses enables the canonical
2180 RTXs to remain unchanged globally, if they resolve to a constant,
2181 or throughout a basic block otherwise, so that they can be cached
2182 and the cache needs not be invalidated when REGs, MEMs or such
2183 change. */
2185 static rtx
2186 vt_canonicalize_addr (dataflow_set *set, rtx oloc)
2188 poly_int64 ofst = 0, term;
2189 machine_mode mode = GET_MODE (oloc);
2190 rtx loc = oloc;
2191 rtx x;
2192 bool retry = true;
2194 while (retry)
2196 while (GET_CODE (loc) == PLUS
2197 && poly_int_rtx_p (XEXP (loc, 1), &term))
2199 ofst += term;
2200 loc = XEXP (loc, 0);
2203 /* Alignment operations can't normally be combined, so just
2204 canonicalize the base and we're done. We'll normally have
2205 only one stack alignment anyway. */
2206 if (GET_CODE (loc) == AND
2207 && GET_CODE (XEXP (loc, 1)) == CONST_INT
2208 && negative_power_of_two_p (INTVAL (XEXP (loc, 1))))
2210 x = vt_canonicalize_addr (set, XEXP (loc, 0));
2211 if (x != XEXP (loc, 0))
2212 loc = gen_rtx_AND (mode, x, XEXP (loc, 1));
2213 retry = false;
2216 if (GET_CODE (loc) == VALUE)
2218 if (set)
2219 loc = get_addr_from_local_cache (set, loc);
2220 else
2221 loc = get_addr_from_global_cache (loc);
2223 /* Consolidate plus_constants. */
2224 while (maybe_ne (ofst, 0)
2225 && GET_CODE (loc) == PLUS
2226 && poly_int_rtx_p (XEXP (loc, 1), &term))
2228 ofst += term;
2229 loc = XEXP (loc, 0);
2232 retry = false;
2234 else
2236 x = canon_rtx (loc);
2237 if (retry)
2238 retry = (x != loc);
2239 loc = x;
2243 /* Add OFST back in. */
2244 if (maybe_ne (ofst, 0))
2246 /* Don't build new RTL if we can help it. */
2247 if (strip_offset (oloc, &term) == loc && known_eq (term, ofst))
2248 return oloc;
2250 loc = plus_constant (mode, loc, ofst);
2253 return loc;
2256 /* Return true iff there's a true dependence between MLOC and LOC.
2257 MADDR must be a canonicalized version of MLOC's address. */
2259 static inline bool
2260 vt_canon_true_dep (dataflow_set *set, rtx mloc, rtx maddr, rtx loc)
2262 if (GET_CODE (loc) != MEM)
2263 return false;
2265 rtx addr = vt_canonicalize_addr (set, XEXP (loc, 0));
2266 if (!canon_true_dependence (mloc, GET_MODE (mloc), maddr, loc, addr))
2267 return false;
2269 return true;
2272 /* Hold parameters for the hashtab traversal function
2273 drop_overlapping_mem_locs, see below. */
2275 struct overlapping_mems
2277 dataflow_set *set;
2278 rtx loc, addr;
2281 /* Remove all MEMs that overlap with COMS->LOC from the location list
2282 of a hash table entry for a onepart variable. COMS->ADDR must be a
2283 canonicalized form of COMS->LOC's address, and COMS->LOC must be
2284 canonicalized itself. */
2287 drop_overlapping_mem_locs (variable **slot, overlapping_mems *coms)
2289 dataflow_set *set = coms->set;
2290 rtx mloc = coms->loc, addr = coms->addr;
2291 variable *var = *slot;
2293 if (var->onepart != NOT_ONEPART)
2295 location_chain *loc, **locp;
2296 bool changed = false;
2297 rtx cur_loc;
2299 gcc_assert (var->n_var_parts == 1);
2301 if (shared_var_p (var, set->vars))
2303 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
2304 if (vt_canon_true_dep (set, mloc, addr, loc->loc))
2305 break;
2307 if (!loc)
2308 return 1;
2310 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
2311 var = *slot;
2312 gcc_assert (var->n_var_parts == 1);
2315 if (VAR_LOC_1PAUX (var))
2316 cur_loc = VAR_LOC_FROM (var);
2317 else
2318 cur_loc = var->var_part[0].cur_loc;
2320 for (locp = &var->var_part[0].loc_chain, loc = *locp;
2321 loc; loc = *locp)
2323 if (!vt_canon_true_dep (set, mloc, addr, loc->loc))
2325 locp = &loc->next;
2326 continue;
2329 *locp = loc->next;
2330 /* If we have deleted the location which was last emitted
2331 we have to emit new location so add the variable to set
2332 of changed variables. */
2333 if (cur_loc == loc->loc)
2335 changed = true;
2336 var->var_part[0].cur_loc = NULL;
2337 if (VAR_LOC_1PAUX (var))
2338 VAR_LOC_FROM (var) = NULL;
2340 delete loc;
2343 if (!var->var_part[0].loc_chain)
2345 var->n_var_parts--;
2346 changed = true;
2348 if (changed)
2349 variable_was_changed (var, set);
2352 return 1;
2355 /* Remove from SET all VALUE bindings to MEMs that overlap with LOC. */
2357 static void
2358 clobber_overlapping_mems (dataflow_set *set, rtx loc)
2360 struct overlapping_mems coms;
2362 gcc_checking_assert (GET_CODE (loc) == MEM);
2364 coms.set = set;
2365 coms.loc = canon_rtx (loc);
2366 coms.addr = vt_canonicalize_addr (set, XEXP (loc, 0));
2368 set->traversed_vars = set->vars;
2369 shared_hash_htab (set->vars)
2370 ->traverse <overlapping_mems*, drop_overlapping_mem_locs> (&coms);
2371 set->traversed_vars = NULL;
2374 /* Set the location of DV, OFFSET as the MEM LOC. */
2376 static void
2377 var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
2378 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
2379 enum insert_option iopt)
2381 if (dv_is_decl_p (dv))
2382 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
2384 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
2387 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
2388 SET to LOC.
2389 Adjust the address first if it is stack pointer based. */
2391 static void
2392 var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
2393 rtx set_src)
2395 tree decl = MEM_EXPR (loc);
2396 HOST_WIDE_INT offset = int_mem_offset (loc);
2398 var_mem_decl_set (set, loc, initialized,
2399 dv_from_decl (decl), offset, set_src, INSERT);
2402 /* Delete and set the location part of variable MEM_EXPR (LOC) in
2403 dataflow set SET to LOC. If MODIFY is true, any other live copies
2404 of the same variable part are also deleted from the dataflow set,
2405 otherwise the variable part is assumed to be copied from another
2406 location holding the same part.
2407 Adjust the address first if it is stack pointer based. */
2409 static void
2410 var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify,
2411 enum var_init_status initialized, rtx set_src)
2413 tree decl = MEM_EXPR (loc);
2414 HOST_WIDE_INT offset = int_mem_offset (loc);
2416 clobber_overlapping_mems (set, loc);
2417 decl = var_debug_decl (decl);
2419 if (initialized == VAR_INIT_STATUS_UNKNOWN)
2420 initialized = get_init_value (set, loc, dv_from_decl (decl));
2422 if (modify)
2423 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src);
2424 var_mem_set (set, loc, initialized, set_src);
2427 /* Delete the location part LOC from dataflow set SET. If CLOBBER is
2428 true, also delete any other live copies of the same variable part.
2429 Adjust the address first if it is stack pointer based. */
2431 static void
2432 var_mem_delete (dataflow_set *set, rtx loc, bool clobber)
2434 tree decl = MEM_EXPR (loc);
2435 HOST_WIDE_INT offset = int_mem_offset (loc);
2437 clobber_overlapping_mems (set, loc);
2438 decl = var_debug_decl (decl);
2439 if (clobber)
2440 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
2441 delete_variable_part (set, loc, dv_from_decl (decl), offset);
2444 /* Return true if LOC should not be expanded for location expressions,
2445 or used in them. */
2447 static inline bool
2448 unsuitable_loc (rtx loc)
2450 switch (GET_CODE (loc))
2452 case PC:
2453 case SCRATCH:
2454 case CC0:
2455 case ASM_INPUT:
2456 case ASM_OPERANDS:
2457 return true;
2459 default:
2460 return false;
2464 /* Bind VAL to LOC in SET. If MODIFIED, detach LOC from any values
2465 bound to it. */
2467 static inline void
2468 val_bind (dataflow_set *set, rtx val, rtx loc, bool modified)
2470 if (REG_P (loc))
2472 if (modified)
2473 var_regno_delete (set, REGNO (loc));
2474 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2475 dv_from_value (val), 0, NULL_RTX, INSERT);
2477 else if (MEM_P (loc))
2479 struct elt_loc_list *l = CSELIB_VAL_PTR (val)->locs;
2481 if (modified)
2482 clobber_overlapping_mems (set, loc);
2484 if (l && GET_CODE (l->loc) == VALUE)
2485 l = canonical_cselib_val (CSELIB_VAL_PTR (l->loc))->locs;
2487 /* If this MEM is a global constant, we don't need it in the
2488 dynamic tables. ??? We should test this before emitting the
2489 micro-op in the first place. */
2490 while (l)
2491 if (GET_CODE (l->loc) == MEM && XEXP (l->loc, 0) == XEXP (loc, 0))
2492 break;
2493 else
2494 l = l->next;
2496 if (!l)
2497 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2498 dv_from_value (val), 0, NULL_RTX, INSERT);
2500 else
2502 /* Other kinds of equivalences are necessarily static, at least
2503 so long as we do not perform substitutions while merging
2504 expressions. */
2505 gcc_unreachable ();
2506 set_variable_part (set, loc, dv_from_value (val), 0,
2507 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2511 /* Bind a value to a location it was just stored in. If MODIFIED
2512 holds, assume the location was modified, detaching it from any
2513 values bound to it. */
2515 static void
2516 val_store (dataflow_set *set, rtx val, rtx loc, rtx_insn *insn,
2517 bool modified)
2519 cselib_val *v = CSELIB_VAL_PTR (val);
2521 gcc_assert (cselib_preserved_value_p (v));
2523 if (dump_file)
2525 fprintf (dump_file, "%i: ", insn ? INSN_UID (insn) : 0);
2526 print_inline_rtx (dump_file, loc, 0);
2527 fprintf (dump_file, " evaluates to ");
2528 print_inline_rtx (dump_file, val, 0);
2529 if (v->locs)
2531 struct elt_loc_list *l;
2532 for (l = v->locs; l; l = l->next)
2534 fprintf (dump_file, "\n%i: ", INSN_UID (l->setting_insn));
2535 print_inline_rtx (dump_file, l->loc, 0);
2538 fprintf (dump_file, "\n");
2541 gcc_checking_assert (!unsuitable_loc (loc));
2543 val_bind (set, val, loc, modified);
2546 /* Clear (canonical address) slots that reference X. */
2548 bool
2549 local_get_addr_clear_given_value (rtx const &, rtx *slot, rtx x)
2551 if (vt_get_canonicalize_base (*slot) == x)
2552 *slot = NULL;
2553 return true;
2556 /* Reset this node, detaching all its equivalences. Return the slot
2557 in the variable hash table that holds dv, if there is one. */
2559 static void
2560 val_reset (dataflow_set *set, decl_or_value dv)
2562 variable *var = shared_hash_find (set->vars, dv) ;
2563 location_chain *node;
2564 rtx cval;
2566 if (!var || !var->n_var_parts)
2567 return;
2569 gcc_assert (var->n_var_parts == 1);
2571 if (var->onepart == ONEPART_VALUE)
2573 rtx x = dv_as_value (dv);
2575 /* Relationships in the global cache don't change, so reset the
2576 local cache entry only. */
2577 rtx *slot = local_get_addr_cache->get (x);
2578 if (slot)
2580 /* If the value resolved back to itself, odds are that other
2581 values may have cached it too. These entries now refer
2582 to the old X, so detach them too. Entries that used the
2583 old X but resolved to something else remain ok as long as
2584 that something else isn't also reset. */
2585 if (*slot == x)
2586 local_get_addr_cache
2587 ->traverse<rtx, local_get_addr_clear_given_value> (x);
2588 *slot = NULL;
2592 cval = NULL;
2593 for (node = var->var_part[0].loc_chain; node; node = node->next)
2594 if (GET_CODE (node->loc) == VALUE
2595 && canon_value_cmp (node->loc, cval))
2596 cval = node->loc;
2598 for (node = var->var_part[0].loc_chain; node; node = node->next)
2599 if (GET_CODE (node->loc) == VALUE && cval != node->loc)
2601 /* Redirect the equivalence link to the new canonical
2602 value, or simply remove it if it would point at
2603 itself. */
2604 if (cval)
2605 set_variable_part (set, cval, dv_from_value (node->loc),
2606 0, node->init, node->set_src, NO_INSERT);
2607 delete_variable_part (set, dv_as_value (dv),
2608 dv_from_value (node->loc), 0);
2611 if (cval)
2613 decl_or_value cdv = dv_from_value (cval);
2615 /* Keep the remaining values connected, accumulating links
2616 in the canonical value. */
2617 for (node = var->var_part[0].loc_chain; node; node = node->next)
2619 if (node->loc == cval)
2620 continue;
2621 else if (GET_CODE (node->loc) == REG)
2622 var_reg_decl_set (set, node->loc, node->init, cdv, 0,
2623 node->set_src, NO_INSERT);
2624 else if (GET_CODE (node->loc) == MEM)
2625 var_mem_decl_set (set, node->loc, node->init, cdv, 0,
2626 node->set_src, NO_INSERT);
2627 else
2628 set_variable_part (set, node->loc, cdv, 0,
2629 node->init, node->set_src, NO_INSERT);
2633 /* We remove this last, to make sure that the canonical value is not
2634 removed to the point of requiring reinsertion. */
2635 if (cval)
2636 delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0);
2638 clobber_variable_part (set, NULL, dv, 0, NULL);
2641 /* Find the values in a given location and map the val to another
2642 value, if it is unique, or add the location as one holding the
2643 value. */
2645 static void
2646 val_resolve (dataflow_set *set, rtx val, rtx loc, rtx_insn *insn)
2648 decl_or_value dv = dv_from_value (val);
2650 if (dump_file && (dump_flags & TDF_DETAILS))
2652 if (insn)
2653 fprintf (dump_file, "%i: ", INSN_UID (insn));
2654 else
2655 fprintf (dump_file, "head: ");
2656 print_inline_rtx (dump_file, val, 0);
2657 fputs (" is at ", dump_file);
2658 print_inline_rtx (dump_file, loc, 0);
2659 fputc ('\n', dump_file);
2662 val_reset (set, dv);
2664 gcc_checking_assert (!unsuitable_loc (loc));
2666 if (REG_P (loc))
2668 attrs *node, *found = NULL;
2670 for (node = set->regs[REGNO (loc)]; node; node = node->next)
2671 if (dv_is_value_p (node->dv)
2672 && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc))
2674 found = node;
2676 /* Map incoming equivalences. ??? Wouldn't it be nice if
2677 we just started sharing the location lists? Maybe a
2678 circular list ending at the value itself or some
2679 such. */
2680 set_variable_part (set, dv_as_value (node->dv),
2681 dv_from_value (val), node->offset,
2682 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2683 set_variable_part (set, val, node->dv, node->offset,
2684 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2687 /* If we didn't find any equivalence, we need to remember that
2688 this value is held in the named register. */
2689 if (found)
2690 return;
2692 /* ??? Attempt to find and merge equivalent MEMs or other
2693 expressions too. */
2695 val_bind (set, val, loc, false);
2698 /* Initialize dataflow set SET to be empty.
2699 VARS_SIZE is the initial size of hash table VARS. */
2701 static void
2702 dataflow_set_init (dataflow_set *set)
2704 init_attrs_list_set (set->regs);
2705 set->vars = shared_hash_copy (empty_shared_hash);
2706 set->stack_adjust = 0;
2707 set->traversed_vars = NULL;
2710 /* Delete the contents of dataflow set SET. */
2712 static void
2713 dataflow_set_clear (dataflow_set *set)
2715 int i;
2717 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2718 attrs_list_clear (&set->regs[i]);
2720 shared_hash_destroy (set->vars);
2721 set->vars = shared_hash_copy (empty_shared_hash);
2724 /* Copy the contents of dataflow set SRC to DST. */
2726 static void
2727 dataflow_set_copy (dataflow_set *dst, dataflow_set *src)
2729 int i;
2731 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2732 attrs_list_copy (&dst->regs[i], src->regs[i]);
2734 shared_hash_destroy (dst->vars);
2735 dst->vars = shared_hash_copy (src->vars);
2736 dst->stack_adjust = src->stack_adjust;
2739 /* Information for merging lists of locations for a given offset of variable.
2741 struct variable_union_info
2743 /* Node of the location chain. */
2744 location_chain *lc;
2746 /* The sum of positions in the input chains. */
2747 int pos;
2749 /* The position in the chain of DST dataflow set. */
2750 int pos_dst;
2753 /* Buffer for location list sorting and its allocated size. */
2754 static struct variable_union_info *vui_vec;
2755 static int vui_allocated;
2757 /* Compare function for qsort, order the structures by POS element. */
2759 static int
2760 variable_union_info_cmp_pos (const void *n1, const void *n2)
2762 const struct variable_union_info *const i1 =
2763 (const struct variable_union_info *) n1;
2764 const struct variable_union_info *const i2 =
2765 ( const struct variable_union_info *) n2;
2767 if (i1->pos != i2->pos)
2768 return i1->pos - i2->pos;
2770 return (i1->pos_dst - i2->pos_dst);
2773 /* Compute union of location parts of variable *SLOT and the same variable
2774 from hash table DATA. Compute "sorted" union of the location chains
2775 for common offsets, i.e. the locations of a variable part are sorted by
2776 a priority where the priority is the sum of the positions in the 2 chains
2777 (if a location is only in one list the position in the second list is
2778 defined to be larger than the length of the chains).
2779 When we are updating the location parts the newest location is in the
2780 beginning of the chain, so when we do the described "sorted" union
2781 we keep the newest locations in the beginning. */
2783 static int
2784 variable_union (variable *src, dataflow_set *set)
2786 variable *dst;
2787 variable **dstp;
2788 int i, j, k;
2790 dstp = shared_hash_find_slot (set->vars, src->dv);
2791 if (!dstp || !*dstp)
2793 src->refcount++;
2795 dst_can_be_shared = false;
2796 if (!dstp)
2797 dstp = shared_hash_find_slot_unshare (&set->vars, src->dv, INSERT);
2799 *dstp = src;
2801 /* Continue traversing the hash table. */
2802 return 1;
2804 else
2805 dst = *dstp;
2807 gcc_assert (src->n_var_parts);
2808 gcc_checking_assert (src->onepart == dst->onepart);
2810 /* We can combine one-part variables very efficiently, because their
2811 entries are in canonical order. */
2812 if (src->onepart)
2814 location_chain **nodep, *dnode, *snode;
2816 gcc_assert (src->n_var_parts == 1
2817 && dst->n_var_parts == 1);
2819 snode = src->var_part[0].loc_chain;
2820 gcc_assert (snode);
2822 restart_onepart_unshared:
2823 nodep = &dst->var_part[0].loc_chain;
2824 dnode = *nodep;
2825 gcc_assert (dnode);
2827 while (snode)
2829 int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1;
2831 if (r > 0)
2833 location_chain *nnode;
2835 if (shared_var_p (dst, set->vars))
2837 dstp = unshare_variable (set, dstp, dst,
2838 VAR_INIT_STATUS_INITIALIZED);
2839 dst = *dstp;
2840 goto restart_onepart_unshared;
2843 *nodep = nnode = new location_chain;
2844 nnode->loc = snode->loc;
2845 nnode->init = snode->init;
2846 if (!snode->set_src || MEM_P (snode->set_src))
2847 nnode->set_src = NULL;
2848 else
2849 nnode->set_src = snode->set_src;
2850 nnode->next = dnode;
2851 dnode = nnode;
2853 else if (r == 0)
2854 gcc_checking_assert (rtx_equal_p (dnode->loc, snode->loc));
2856 if (r >= 0)
2857 snode = snode->next;
2859 nodep = &dnode->next;
2860 dnode = *nodep;
2863 return 1;
2866 gcc_checking_assert (!src->onepart);
2868 /* Count the number of location parts, result is K. */
2869 for (i = 0, j = 0, k = 0;
2870 i < src->n_var_parts && j < dst->n_var_parts; k++)
2872 if (VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2874 i++;
2875 j++;
2877 else if (VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
2878 i++;
2879 else
2880 j++;
2882 k += src->n_var_parts - i;
2883 k += dst->n_var_parts - j;
2885 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
2886 thus there are at most MAX_VAR_PARTS different offsets. */
2887 gcc_checking_assert (dst->onepart ? k == 1 : k <= MAX_VAR_PARTS);
2889 if (dst->n_var_parts != k && shared_var_p (dst, set->vars))
2891 dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN);
2892 dst = *dstp;
2895 i = src->n_var_parts - 1;
2896 j = dst->n_var_parts - 1;
2897 dst->n_var_parts = k;
2899 for (k--; k >= 0; k--)
2901 location_chain *node, *node2;
2903 if (i >= 0 && j >= 0
2904 && VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2906 /* Compute the "sorted" union of the chains, i.e. the locations which
2907 are in both chains go first, they are sorted by the sum of
2908 positions in the chains. */
2909 int dst_l, src_l;
2910 int ii, jj, n;
2911 struct variable_union_info *vui;
2913 /* If DST is shared compare the location chains.
2914 If they are different we will modify the chain in DST with
2915 high probability so make a copy of DST. */
2916 if (shared_var_p (dst, set->vars))
2918 for (node = src->var_part[i].loc_chain,
2919 node2 = dst->var_part[j].loc_chain; node && node2;
2920 node = node->next, node2 = node2->next)
2922 if (!((REG_P (node2->loc)
2923 && REG_P (node->loc)
2924 && REGNO (node2->loc) == REGNO (node->loc))
2925 || rtx_equal_p (node2->loc, node->loc)))
2927 if (node2->init < node->init)
2928 node2->init = node->init;
2929 break;
2932 if (node || node2)
2934 dstp = unshare_variable (set, dstp, dst,
2935 VAR_INIT_STATUS_UNKNOWN);
2936 dst = (variable *)*dstp;
2940 src_l = 0;
2941 for (node = src->var_part[i].loc_chain; node; node = node->next)
2942 src_l++;
2943 dst_l = 0;
2944 for (node = dst->var_part[j].loc_chain; node; node = node->next)
2945 dst_l++;
2947 if (dst_l == 1)
2949 /* The most common case, much simpler, no qsort is needed. */
2950 location_chain *dstnode = dst->var_part[j].loc_chain;
2951 dst->var_part[k].loc_chain = dstnode;
2952 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
2953 node2 = dstnode;
2954 for (node = src->var_part[i].loc_chain; node; node = node->next)
2955 if (!((REG_P (dstnode->loc)
2956 && REG_P (node->loc)
2957 && REGNO (dstnode->loc) == REGNO (node->loc))
2958 || rtx_equal_p (dstnode->loc, node->loc)))
2960 location_chain *new_node;
2962 /* Copy the location from SRC. */
2963 new_node = new location_chain;
2964 new_node->loc = node->loc;
2965 new_node->init = node->init;
2966 if (!node->set_src || MEM_P (node->set_src))
2967 new_node->set_src = NULL;
2968 else
2969 new_node->set_src = node->set_src;
2970 node2->next = new_node;
2971 node2 = new_node;
2973 node2->next = NULL;
2975 else
2977 if (src_l + dst_l > vui_allocated)
2979 vui_allocated = MAX (vui_allocated * 2, src_l + dst_l);
2980 vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec,
2981 vui_allocated);
2983 vui = vui_vec;
2985 /* Fill in the locations from DST. */
2986 for (node = dst->var_part[j].loc_chain, jj = 0; node;
2987 node = node->next, jj++)
2989 vui[jj].lc = node;
2990 vui[jj].pos_dst = jj;
2992 /* Pos plus value larger than a sum of 2 valid positions. */
2993 vui[jj].pos = jj + src_l + dst_l;
2996 /* Fill in the locations from SRC. */
2997 n = dst_l;
2998 for (node = src->var_part[i].loc_chain, ii = 0; node;
2999 node = node->next, ii++)
3001 /* Find location from NODE. */
3002 for (jj = 0; jj < dst_l; jj++)
3004 if ((REG_P (vui[jj].lc->loc)
3005 && REG_P (node->loc)
3006 && REGNO (vui[jj].lc->loc) == REGNO (node->loc))
3007 || rtx_equal_p (vui[jj].lc->loc, node->loc))
3009 vui[jj].pos = jj + ii;
3010 break;
3013 if (jj >= dst_l) /* The location has not been found. */
3015 location_chain *new_node;
3017 /* Copy the location from SRC. */
3018 new_node = new location_chain;
3019 new_node->loc = node->loc;
3020 new_node->init = node->init;
3021 if (!node->set_src || MEM_P (node->set_src))
3022 new_node->set_src = NULL;
3023 else
3024 new_node->set_src = node->set_src;
3025 vui[n].lc = new_node;
3026 vui[n].pos_dst = src_l + dst_l;
3027 vui[n].pos = ii + src_l + dst_l;
3028 n++;
3032 if (dst_l == 2)
3034 /* Special case still very common case. For dst_l == 2
3035 all entries dst_l ... n-1 are sorted, with for i >= dst_l
3036 vui[i].pos == i + src_l + dst_l. */
3037 if (vui[0].pos > vui[1].pos)
3039 /* Order should be 1, 0, 2... */
3040 dst->var_part[k].loc_chain = vui[1].lc;
3041 vui[1].lc->next = vui[0].lc;
3042 if (n >= 3)
3044 vui[0].lc->next = vui[2].lc;
3045 vui[n - 1].lc->next = NULL;
3047 else
3048 vui[0].lc->next = NULL;
3049 ii = 3;
3051 else
3053 dst->var_part[k].loc_chain = vui[0].lc;
3054 if (n >= 3 && vui[2].pos < vui[1].pos)
3056 /* Order should be 0, 2, 1, 3... */
3057 vui[0].lc->next = vui[2].lc;
3058 vui[2].lc->next = vui[1].lc;
3059 if (n >= 4)
3061 vui[1].lc->next = vui[3].lc;
3062 vui[n - 1].lc->next = NULL;
3064 else
3065 vui[1].lc->next = NULL;
3066 ii = 4;
3068 else
3070 /* Order should be 0, 1, 2... */
3071 ii = 1;
3072 vui[n - 1].lc->next = NULL;
3075 for (; ii < n; ii++)
3076 vui[ii - 1].lc->next = vui[ii].lc;
3078 else
3080 qsort (vui, n, sizeof (struct variable_union_info),
3081 variable_union_info_cmp_pos);
3083 /* Reconnect the nodes in sorted order. */
3084 for (ii = 1; ii < n; ii++)
3085 vui[ii - 1].lc->next = vui[ii].lc;
3086 vui[n - 1].lc->next = NULL;
3087 dst->var_part[k].loc_chain = vui[0].lc;
3090 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
3092 i--;
3093 j--;
3095 else if ((i >= 0 && j >= 0
3096 && VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
3097 || i < 0)
3099 dst->var_part[k] = dst->var_part[j];
3100 j--;
3102 else if ((i >= 0 && j >= 0
3103 && VAR_PART_OFFSET (src, i) > VAR_PART_OFFSET (dst, j))
3104 || j < 0)
3106 location_chain **nextp;
3108 /* Copy the chain from SRC. */
3109 nextp = &dst->var_part[k].loc_chain;
3110 for (node = src->var_part[i].loc_chain; node; node = node->next)
3112 location_chain *new_lc;
3114 new_lc = new location_chain;
3115 new_lc->next = NULL;
3116 new_lc->init = node->init;
3117 if (!node->set_src || MEM_P (node->set_src))
3118 new_lc->set_src = NULL;
3119 else
3120 new_lc->set_src = node->set_src;
3121 new_lc->loc = node->loc;
3123 *nextp = new_lc;
3124 nextp = &new_lc->next;
3127 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (src, i);
3128 i--;
3130 dst->var_part[k].cur_loc = NULL;
3133 if (flag_var_tracking_uninit)
3134 for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++)
3136 location_chain *node, *node2;
3137 for (node = src->var_part[i].loc_chain; node; node = node->next)
3138 for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next)
3139 if (rtx_equal_p (node->loc, node2->loc))
3141 if (node->init > node2->init)
3142 node2->init = node->init;
3146 /* Continue traversing the hash table. */
3147 return 1;
3150 /* Compute union of dataflow sets SRC and DST and store it to DST. */
3152 static void
3153 dataflow_set_union (dataflow_set *dst, dataflow_set *src)
3155 int i;
3157 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3158 attrs_list_union (&dst->regs[i], src->regs[i]);
3160 if (dst->vars == empty_shared_hash)
3162 shared_hash_destroy (dst->vars);
3163 dst->vars = shared_hash_copy (src->vars);
3165 else
3167 variable_iterator_type hi;
3168 variable *var;
3170 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (src->vars),
3171 var, variable, hi)
3172 variable_union (var, dst);
3176 /* Whether the value is currently being expanded. */
3177 #define VALUE_RECURSED_INTO(x) \
3178 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
3180 /* Whether no expansion was found, saving useless lookups.
3181 It must only be set when VALUE_CHANGED is clear. */
3182 #define NO_LOC_P(x) \
3183 (RTL_FLAG_CHECK2 ("NO_LOC_P", (x), VALUE, DEBUG_EXPR)->return_val)
3185 /* Whether cur_loc in the value needs to be (re)computed. */
3186 #define VALUE_CHANGED(x) \
3187 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
3188 /* Whether cur_loc in the decl needs to be (re)computed. */
3189 #define DECL_CHANGED(x) TREE_VISITED (x)
3191 /* Record (if NEWV) that DV needs to have its cur_loc recomputed. For
3192 user DECLs, this means they're in changed_variables. Values and
3193 debug exprs may be left with this flag set if no user variable
3194 requires them to be evaluated. */
3196 static inline void
3197 set_dv_changed (decl_or_value dv, bool newv)
3199 switch (dv_onepart_p (dv))
3201 case ONEPART_VALUE:
3202 if (newv)
3203 NO_LOC_P (dv_as_value (dv)) = false;
3204 VALUE_CHANGED (dv_as_value (dv)) = newv;
3205 break;
3207 case ONEPART_DEXPR:
3208 if (newv)
3209 NO_LOC_P (DECL_RTL_KNOWN_SET (dv_as_decl (dv))) = false;
3210 /* Fall through. */
3212 default:
3213 DECL_CHANGED (dv_as_decl (dv)) = newv;
3214 break;
3218 /* Return true if DV needs to have its cur_loc recomputed. */
3220 static inline bool
3221 dv_changed_p (decl_or_value dv)
3223 return (dv_is_value_p (dv)
3224 ? VALUE_CHANGED (dv_as_value (dv))
3225 : DECL_CHANGED (dv_as_decl (dv)));
3228 /* Return a location list node whose loc is rtx_equal to LOC, in the
3229 location list of a one-part variable or value VAR, or in that of
3230 any values recursively mentioned in the location lists. VARS must
3231 be in star-canonical form. */
3233 static location_chain *
3234 find_loc_in_1pdv (rtx loc, variable *var, variable_table_type *vars)
3236 location_chain *node;
3237 enum rtx_code loc_code;
3239 if (!var)
3240 return NULL;
3242 gcc_checking_assert (var->onepart);
3244 if (!var->n_var_parts)
3245 return NULL;
3247 gcc_checking_assert (loc != dv_as_opaque (var->dv));
3249 loc_code = GET_CODE (loc);
3250 for (node = var->var_part[0].loc_chain; node; node = node->next)
3252 decl_or_value dv;
3253 variable *rvar;
3255 if (GET_CODE (node->loc) != loc_code)
3257 if (GET_CODE (node->loc) != VALUE)
3258 continue;
3260 else if (loc == node->loc)
3261 return node;
3262 else if (loc_code != VALUE)
3264 if (rtx_equal_p (loc, node->loc))
3265 return node;
3266 continue;
3269 /* Since we're in star-canonical form, we don't need to visit
3270 non-canonical nodes: one-part variables and non-canonical
3271 values would only point back to the canonical node. */
3272 if (dv_is_value_p (var->dv)
3273 && !canon_value_cmp (node->loc, dv_as_value (var->dv)))
3275 /* Skip all subsequent VALUEs. */
3276 while (node->next && GET_CODE (node->next->loc) == VALUE)
3278 node = node->next;
3279 gcc_checking_assert (!canon_value_cmp (node->loc,
3280 dv_as_value (var->dv)));
3281 if (loc == node->loc)
3282 return node;
3284 continue;
3287 gcc_checking_assert (node == var->var_part[0].loc_chain);
3288 gcc_checking_assert (!node->next);
3290 dv = dv_from_value (node->loc);
3291 rvar = vars->find_with_hash (dv, dv_htab_hash (dv));
3292 return find_loc_in_1pdv (loc, rvar, vars);
3295 /* ??? Gotta look in cselib_val locations too. */
3297 return NULL;
3300 /* Hash table iteration argument passed to variable_merge. */
3301 struct dfset_merge
3303 /* The set in which the merge is to be inserted. */
3304 dataflow_set *dst;
3305 /* The set that we're iterating in. */
3306 dataflow_set *cur;
3307 /* The set that may contain the other dv we are to merge with. */
3308 dataflow_set *src;
3309 /* Number of onepart dvs in src. */
3310 int src_onepart_cnt;
3313 /* Insert LOC in *DNODE, if it's not there yet. The list must be in
3314 loc_cmp order, and it is maintained as such. */
3316 static void
3317 insert_into_intersection (location_chain **nodep, rtx loc,
3318 enum var_init_status status)
3320 location_chain *node;
3321 int r;
3323 for (node = *nodep; node; nodep = &node->next, node = *nodep)
3324 if ((r = loc_cmp (node->loc, loc)) == 0)
3326 node->init = MIN (node->init, status);
3327 return;
3329 else if (r > 0)
3330 break;
3332 node = new location_chain;
3334 node->loc = loc;
3335 node->set_src = NULL;
3336 node->init = status;
3337 node->next = *nodep;
3338 *nodep = node;
3341 /* Insert in DEST the intersection of the locations present in both
3342 S1NODE and S2VAR, directly or indirectly. S1NODE is from a
3343 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
3344 DSM->dst. */
3346 static void
3347 intersect_loc_chains (rtx val, location_chain **dest, struct dfset_merge *dsm,
3348 location_chain *s1node, variable *s2var)
3350 dataflow_set *s1set = dsm->cur;
3351 dataflow_set *s2set = dsm->src;
3352 location_chain *found;
3354 if (s2var)
3356 location_chain *s2node;
3358 gcc_checking_assert (s2var->onepart);
3360 if (s2var->n_var_parts)
3362 s2node = s2var->var_part[0].loc_chain;
3364 for (; s1node && s2node;
3365 s1node = s1node->next, s2node = s2node->next)
3366 if (s1node->loc != s2node->loc)
3367 break;
3368 else if (s1node->loc == val)
3369 continue;
3370 else
3371 insert_into_intersection (dest, s1node->loc,
3372 MIN (s1node->init, s2node->init));
3376 for (; s1node; s1node = s1node->next)
3378 if (s1node->loc == val)
3379 continue;
3381 if ((found = find_loc_in_1pdv (s1node->loc, s2var,
3382 shared_hash_htab (s2set->vars))))
3384 insert_into_intersection (dest, s1node->loc,
3385 MIN (s1node->init, found->init));
3386 continue;
3389 if (GET_CODE (s1node->loc) == VALUE
3390 && !VALUE_RECURSED_INTO (s1node->loc))
3392 decl_or_value dv = dv_from_value (s1node->loc);
3393 variable *svar = shared_hash_find (s1set->vars, dv);
3394 if (svar)
3396 if (svar->n_var_parts == 1)
3398 VALUE_RECURSED_INTO (s1node->loc) = true;
3399 intersect_loc_chains (val, dest, dsm,
3400 svar->var_part[0].loc_chain,
3401 s2var);
3402 VALUE_RECURSED_INTO (s1node->loc) = false;
3407 /* ??? gotta look in cselib_val locations too. */
3409 /* ??? if the location is equivalent to any location in src,
3410 searched recursively
3412 add to dst the values needed to represent the equivalence
3414 telling whether locations S is equivalent to another dv's
3415 location list:
3417 for each location D in the list
3419 if S and D satisfy rtx_equal_p, then it is present
3421 else if D is a value, recurse without cycles
3423 else if S and D have the same CODE and MODE
3425 for each operand oS and the corresponding oD
3427 if oS and oD are not equivalent, then S an D are not equivalent
3429 else if they are RTX vectors
3431 if any vector oS element is not equivalent to its respective oD,
3432 then S and D are not equivalent
3440 /* Return -1 if X should be before Y in a location list for a 1-part
3441 variable, 1 if Y should be before X, and 0 if they're equivalent
3442 and should not appear in the list. */
3444 static int
3445 loc_cmp (rtx x, rtx y)
3447 int i, j, r;
3448 RTX_CODE code = GET_CODE (x);
3449 const char *fmt;
3451 if (x == y)
3452 return 0;
3454 if (REG_P (x))
3456 if (!REG_P (y))
3457 return -1;
3458 gcc_assert (GET_MODE (x) == GET_MODE (y));
3459 if (REGNO (x) == REGNO (y))
3460 return 0;
3461 else if (REGNO (x) < REGNO (y))
3462 return -1;
3463 else
3464 return 1;
3467 if (REG_P (y))
3468 return 1;
3470 if (MEM_P (x))
3472 if (!MEM_P (y))
3473 return -1;
3474 gcc_assert (GET_MODE (x) == GET_MODE (y));
3475 return loc_cmp (XEXP (x, 0), XEXP (y, 0));
3478 if (MEM_P (y))
3479 return 1;
3481 if (GET_CODE (x) == VALUE)
3483 if (GET_CODE (y) != VALUE)
3484 return -1;
3485 /* Don't assert the modes are the same, that is true only
3486 when not recursing. (subreg:QI (value:SI 1:1) 0)
3487 and (subreg:QI (value:DI 2:2) 0) can be compared,
3488 even when the modes are different. */
3489 if (canon_value_cmp (x, y))
3490 return -1;
3491 else
3492 return 1;
3495 if (GET_CODE (y) == VALUE)
3496 return 1;
3498 /* Entry value is the least preferable kind of expression. */
3499 if (GET_CODE (x) == ENTRY_VALUE)
3501 if (GET_CODE (y) != ENTRY_VALUE)
3502 return 1;
3503 gcc_assert (GET_MODE (x) == GET_MODE (y));
3504 return loc_cmp (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
3507 if (GET_CODE (y) == ENTRY_VALUE)
3508 return -1;
3510 if (GET_CODE (x) == GET_CODE (y))
3511 /* Compare operands below. */;
3512 else if (GET_CODE (x) < GET_CODE (y))
3513 return -1;
3514 else
3515 return 1;
3517 gcc_assert (GET_MODE (x) == GET_MODE (y));
3519 if (GET_CODE (x) == DEBUG_EXPR)
3521 if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3522 < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)))
3523 return -1;
3524 gcc_checking_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3525 > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)));
3526 return 1;
3529 fmt = GET_RTX_FORMAT (code);
3530 for (i = 0; i < GET_RTX_LENGTH (code); i++)
3531 switch (fmt[i])
3533 case 'w':
3534 if (XWINT (x, i) == XWINT (y, i))
3535 break;
3536 else if (XWINT (x, i) < XWINT (y, i))
3537 return -1;
3538 else
3539 return 1;
3541 case 'n':
3542 case 'i':
3543 if (XINT (x, i) == XINT (y, i))
3544 break;
3545 else if (XINT (x, i) < XINT (y, i))
3546 return -1;
3547 else
3548 return 1;
3550 case 'p':
3551 r = compare_sizes_for_sort (SUBREG_BYTE (x), SUBREG_BYTE (y));
3552 if (r != 0)
3553 return r;
3554 break;
3556 case 'V':
3557 case 'E':
3558 /* Compare the vector length first. */
3559 if (XVECLEN (x, i) == XVECLEN (y, i))
3560 /* Compare the vectors elements. */;
3561 else if (XVECLEN (x, i) < XVECLEN (y, i))
3562 return -1;
3563 else
3564 return 1;
3566 for (j = 0; j < XVECLEN (x, i); j++)
3567 if ((r = loc_cmp (XVECEXP (x, i, j),
3568 XVECEXP (y, i, j))))
3569 return r;
3570 break;
3572 case 'e':
3573 if ((r = loc_cmp (XEXP (x, i), XEXP (y, i))))
3574 return r;
3575 break;
3577 case 'S':
3578 case 's':
3579 if (XSTR (x, i) == XSTR (y, i))
3580 break;
3581 if (!XSTR (x, i))
3582 return -1;
3583 if (!XSTR (y, i))
3584 return 1;
3585 if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0)
3586 break;
3587 else if (r < 0)
3588 return -1;
3589 else
3590 return 1;
3592 case 'u':
3593 /* These are just backpointers, so they don't matter. */
3594 break;
3596 case '0':
3597 case 't':
3598 break;
3600 /* It is believed that rtx's at this level will never
3601 contain anything but integers and other rtx's,
3602 except for within LABEL_REFs and SYMBOL_REFs. */
3603 default:
3604 gcc_unreachable ();
3606 if (CONST_WIDE_INT_P (x))
3608 /* Compare the vector length first. */
3609 if (CONST_WIDE_INT_NUNITS (x) >= CONST_WIDE_INT_NUNITS (y))
3610 return 1;
3611 else if (CONST_WIDE_INT_NUNITS (x) < CONST_WIDE_INT_NUNITS (y))
3612 return -1;
3614 /* Compare the vectors elements. */;
3615 for (j = CONST_WIDE_INT_NUNITS (x) - 1; j >= 0 ; j--)
3617 if (CONST_WIDE_INT_ELT (x, j) < CONST_WIDE_INT_ELT (y, j))
3618 return -1;
3619 if (CONST_WIDE_INT_ELT (x, j) > CONST_WIDE_INT_ELT (y, j))
3620 return 1;
3624 return 0;
3627 /* Check the order of entries in one-part variables. */
3630 canonicalize_loc_order_check (variable **slot,
3631 dataflow_set *data ATTRIBUTE_UNUSED)
3633 variable *var = *slot;
3634 location_chain *node, *next;
3636 #ifdef ENABLE_RTL_CHECKING
3637 int i;
3638 for (i = 0; i < var->n_var_parts; i++)
3639 gcc_assert (var->var_part[0].cur_loc == NULL);
3640 gcc_assert (!var->in_changed_variables);
3641 #endif
3643 if (!var->onepart)
3644 return 1;
3646 gcc_assert (var->n_var_parts == 1);
3647 node = var->var_part[0].loc_chain;
3648 gcc_assert (node);
3650 while ((next = node->next))
3652 gcc_assert (loc_cmp (node->loc, next->loc) < 0);
3653 node = next;
3656 return 1;
3659 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
3660 more likely to be chosen as canonical for an equivalence set.
3661 Ensure less likely values can reach more likely neighbors, making
3662 the connections bidirectional. */
3665 canonicalize_values_mark (variable **slot, dataflow_set *set)
3667 variable *var = *slot;
3668 decl_or_value dv = var->dv;
3669 rtx val;
3670 location_chain *node;
3672 if (!dv_is_value_p (dv))
3673 return 1;
3675 gcc_checking_assert (var->n_var_parts == 1);
3677 val = dv_as_value (dv);
3679 for (node = var->var_part[0].loc_chain; node; node = node->next)
3680 if (GET_CODE (node->loc) == VALUE)
3682 if (canon_value_cmp (node->loc, val))
3683 VALUE_RECURSED_INTO (val) = true;
3684 else
3686 decl_or_value odv = dv_from_value (node->loc);
3687 variable **oslot;
3688 oslot = shared_hash_find_slot_noinsert (set->vars, odv);
3690 set_slot_part (set, val, oslot, odv, 0,
3691 node->init, NULL_RTX);
3693 VALUE_RECURSED_INTO (node->loc) = true;
3697 return 1;
3700 /* Remove redundant entries from equivalence lists in onepart
3701 variables, canonicalizing equivalence sets into star shapes. */
3704 canonicalize_values_star (variable **slot, dataflow_set *set)
3706 variable *var = *slot;
3707 decl_or_value dv = var->dv;
3708 location_chain *node;
3709 decl_or_value cdv;
3710 rtx val, cval;
3711 variable **cslot;
3712 bool has_value;
3713 bool has_marks;
3715 if (!var->onepart)
3716 return 1;
3718 gcc_checking_assert (var->n_var_parts == 1);
3720 if (dv_is_value_p (dv))
3722 cval = dv_as_value (dv);
3723 if (!VALUE_RECURSED_INTO (cval))
3724 return 1;
3725 VALUE_RECURSED_INTO (cval) = false;
3727 else
3728 cval = NULL_RTX;
3730 restart:
3731 val = cval;
3732 has_value = false;
3733 has_marks = false;
3735 gcc_assert (var->n_var_parts == 1);
3737 for (node = var->var_part[0].loc_chain; node; node = node->next)
3738 if (GET_CODE (node->loc) == VALUE)
3740 has_value = true;
3741 if (VALUE_RECURSED_INTO (node->loc))
3742 has_marks = true;
3743 if (canon_value_cmp (node->loc, cval))
3744 cval = node->loc;
3747 if (!has_value)
3748 return 1;
3750 if (cval == val)
3752 if (!has_marks || dv_is_decl_p (dv))
3753 return 1;
3755 /* Keep it marked so that we revisit it, either after visiting a
3756 child node, or after visiting a new parent that might be
3757 found out. */
3758 VALUE_RECURSED_INTO (val) = true;
3760 for (node = var->var_part[0].loc_chain; node; node = node->next)
3761 if (GET_CODE (node->loc) == VALUE
3762 && VALUE_RECURSED_INTO (node->loc))
3764 cval = node->loc;
3765 restart_with_cval:
3766 VALUE_RECURSED_INTO (cval) = false;
3767 dv = dv_from_value (cval);
3768 slot = shared_hash_find_slot_noinsert (set->vars, dv);
3769 if (!slot)
3771 gcc_assert (dv_is_decl_p (var->dv));
3772 /* The canonical value was reset and dropped.
3773 Remove it. */
3774 clobber_variable_part (set, NULL, var->dv, 0, NULL);
3775 return 1;
3777 var = *slot;
3778 gcc_assert (dv_is_value_p (var->dv));
3779 if (var->n_var_parts == 0)
3780 return 1;
3781 gcc_assert (var->n_var_parts == 1);
3782 goto restart;
3785 VALUE_RECURSED_INTO (val) = false;
3787 return 1;
3790 /* Push values to the canonical one. */
3791 cdv = dv_from_value (cval);
3792 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3794 for (node = var->var_part[0].loc_chain; node; node = node->next)
3795 if (node->loc != cval)
3797 cslot = set_slot_part (set, node->loc, cslot, cdv, 0,
3798 node->init, NULL_RTX);
3799 if (GET_CODE (node->loc) == VALUE)
3801 decl_or_value ndv = dv_from_value (node->loc);
3803 set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX,
3804 NO_INSERT);
3806 if (canon_value_cmp (node->loc, val))
3808 /* If it could have been a local minimum, it's not any more,
3809 since it's now neighbor to cval, so it may have to push
3810 to it. Conversely, if it wouldn't have prevailed over
3811 val, then whatever mark it has is fine: if it was to
3812 push, it will now push to a more canonical node, but if
3813 it wasn't, then it has already pushed any values it might
3814 have to. */
3815 VALUE_RECURSED_INTO (node->loc) = true;
3816 /* Make sure we visit node->loc by ensuring we cval is
3817 visited too. */
3818 VALUE_RECURSED_INTO (cval) = true;
3820 else if (!VALUE_RECURSED_INTO (node->loc))
3821 /* If we have no need to "recurse" into this node, it's
3822 already "canonicalized", so drop the link to the old
3823 parent. */
3824 clobber_variable_part (set, cval, ndv, 0, NULL);
3826 else if (GET_CODE (node->loc) == REG)
3828 attrs *list = set->regs[REGNO (node->loc)], **listp;
3830 /* Change an existing attribute referring to dv so that it
3831 refers to cdv, removing any duplicate this might
3832 introduce, and checking that no previous duplicates
3833 existed, all in a single pass. */
3835 while (list)
3837 if (list->offset == 0
3838 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3839 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3840 break;
3842 list = list->next;
3845 gcc_assert (list);
3846 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3848 list->dv = cdv;
3849 for (listp = &list->next; (list = *listp); listp = &list->next)
3851 if (list->offset)
3852 continue;
3854 if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3856 *listp = list->next;
3857 delete list;
3858 list = *listp;
3859 break;
3862 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv));
3865 else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3867 for (listp = &list->next; (list = *listp); listp = &list->next)
3869 if (list->offset)
3870 continue;
3872 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3874 *listp = list->next;
3875 delete list;
3876 list = *listp;
3877 break;
3880 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv));
3883 else
3884 gcc_unreachable ();
3886 if (flag_checking)
3887 while (list)
3889 if (list->offset == 0
3890 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3891 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3892 gcc_unreachable ();
3894 list = list->next;
3899 if (val)
3900 set_slot_part (set, val, cslot, cdv, 0,
3901 VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
3903 slot = clobber_slot_part (set, cval, slot, 0, NULL);
3905 /* Variable may have been unshared. */
3906 var = *slot;
3907 gcc_checking_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval
3908 && var->var_part[0].loc_chain->next == NULL);
3910 if (VALUE_RECURSED_INTO (cval))
3911 goto restart_with_cval;
3913 return 1;
3916 /* Bind one-part variables to the canonical value in an equivalence
3917 set. Not doing this causes dataflow convergence failure in rare
3918 circumstances, see PR42873. Unfortunately we can't do this
3919 efficiently as part of canonicalize_values_star, since we may not
3920 have determined or even seen the canonical value of a set when we
3921 get to a variable that references another member of the set. */
3924 canonicalize_vars_star (variable **slot, dataflow_set *set)
3926 variable *var = *slot;
3927 decl_or_value dv = var->dv;
3928 location_chain *node;
3929 rtx cval;
3930 decl_or_value cdv;
3931 variable **cslot;
3932 variable *cvar;
3933 location_chain *cnode;
3935 if (!var->onepart || var->onepart == ONEPART_VALUE)
3936 return 1;
3938 gcc_assert (var->n_var_parts == 1);
3940 node = var->var_part[0].loc_chain;
3942 if (GET_CODE (node->loc) != VALUE)
3943 return 1;
3945 gcc_assert (!node->next);
3946 cval = node->loc;
3948 /* Push values to the canonical one. */
3949 cdv = dv_from_value (cval);
3950 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3951 if (!cslot)
3952 return 1;
3953 cvar = *cslot;
3954 gcc_assert (cvar->n_var_parts == 1);
3956 cnode = cvar->var_part[0].loc_chain;
3958 /* CVAL is canonical if its value list contains non-VALUEs or VALUEs
3959 that are not “more canonical” than it. */
3960 if (GET_CODE (cnode->loc) != VALUE
3961 || !canon_value_cmp (cnode->loc, cval))
3962 return 1;
3964 /* CVAL was found to be non-canonical. Change the variable to point
3965 to the canonical VALUE. */
3966 gcc_assert (!cnode->next);
3967 cval = cnode->loc;
3969 slot = set_slot_part (set, cval, slot, dv, 0,
3970 node->init, node->set_src);
3971 clobber_slot_part (set, cval, slot, 0, node->set_src);
3973 return 1;
3976 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
3977 corresponding entry in DSM->src. Multi-part variables are combined
3978 with variable_union, whereas onepart dvs are combined with
3979 intersection. */
3981 static int
3982 variable_merge_over_cur (variable *s1var, struct dfset_merge *dsm)
3984 dataflow_set *dst = dsm->dst;
3985 variable **dstslot;
3986 variable *s2var, *dvar = NULL;
3987 decl_or_value dv = s1var->dv;
3988 onepart_enum onepart = s1var->onepart;
3989 rtx val;
3990 hashval_t dvhash;
3991 location_chain *node, **nodep;
3993 /* If the incoming onepart variable has an empty location list, then
3994 the intersection will be just as empty. For other variables,
3995 it's always union. */
3996 gcc_checking_assert (s1var->n_var_parts
3997 && s1var->var_part[0].loc_chain);
3999 if (!onepart)
4000 return variable_union (s1var, dst);
4002 gcc_checking_assert (s1var->n_var_parts == 1);
4004 dvhash = dv_htab_hash (dv);
4005 if (dv_is_value_p (dv))
4006 val = dv_as_value (dv);
4007 else
4008 val = NULL;
4010 s2var = shared_hash_find_1 (dsm->src->vars, dv, dvhash);
4011 if (!s2var)
4013 dst_can_be_shared = false;
4014 return 1;
4017 dsm->src_onepart_cnt--;
4018 gcc_assert (s2var->var_part[0].loc_chain
4019 && s2var->onepart == onepart
4020 && s2var->n_var_parts == 1);
4022 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4023 if (dstslot)
4025 dvar = *dstslot;
4026 gcc_assert (dvar->refcount == 1
4027 && dvar->onepart == onepart
4028 && dvar->n_var_parts == 1);
4029 nodep = &dvar->var_part[0].loc_chain;
4031 else
4033 nodep = &node;
4034 node = NULL;
4037 if (!dstslot && !onepart_variable_different_p (s1var, s2var))
4039 dstslot = shared_hash_find_slot_unshare_1 (&dst->vars, dv,
4040 dvhash, INSERT);
4041 *dstslot = dvar = s2var;
4042 dvar->refcount++;
4044 else
4046 dst_can_be_shared = false;
4048 intersect_loc_chains (val, nodep, dsm,
4049 s1var->var_part[0].loc_chain, s2var);
4051 if (!dstslot)
4053 if (node)
4055 dvar = onepart_pool_allocate (onepart);
4056 dvar->dv = dv;
4057 dvar->refcount = 1;
4058 dvar->n_var_parts = 1;
4059 dvar->onepart = onepart;
4060 dvar->in_changed_variables = false;
4061 dvar->var_part[0].loc_chain = node;
4062 dvar->var_part[0].cur_loc = NULL;
4063 if (onepart)
4064 VAR_LOC_1PAUX (dvar) = NULL;
4065 else
4066 VAR_PART_OFFSET (dvar, 0) = 0;
4068 dstslot
4069 = shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash,
4070 INSERT);
4071 gcc_assert (!*dstslot);
4072 *dstslot = dvar;
4074 else
4075 return 1;
4079 nodep = &dvar->var_part[0].loc_chain;
4080 while ((node = *nodep))
4082 location_chain **nextp = &node->next;
4084 if (GET_CODE (node->loc) == REG)
4086 attrs *list;
4088 for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
4089 if (GET_MODE (node->loc) == GET_MODE (list->loc)
4090 && dv_is_value_p (list->dv))
4091 break;
4093 if (!list)
4094 attrs_list_insert (&dst->regs[REGNO (node->loc)],
4095 dv, 0, node->loc);
4096 /* If this value became canonical for another value that had
4097 this register, we want to leave it alone. */
4098 else if (dv_as_value (list->dv) != val)
4100 dstslot = set_slot_part (dst, dv_as_value (list->dv),
4101 dstslot, dv, 0,
4102 node->init, NULL_RTX);
4103 dstslot = delete_slot_part (dst, node->loc, dstslot, 0);
4105 /* Since nextp points into the removed node, we can't
4106 use it. The pointer to the next node moved to nodep.
4107 However, if the variable we're walking is unshared
4108 during our walk, we'll keep walking the location list
4109 of the previously-shared variable, in which case the
4110 node won't have been removed, and we'll want to skip
4111 it. That's why we test *nodep here. */
4112 if (*nodep != node)
4113 nextp = nodep;
4116 else
4117 /* Canonicalization puts registers first, so we don't have to
4118 walk it all. */
4119 break;
4120 nodep = nextp;
4123 if (dvar != *dstslot)
4124 dvar = *dstslot;
4125 nodep = &dvar->var_part[0].loc_chain;
4127 if (val)
4129 /* Mark all referenced nodes for canonicalization, and make sure
4130 we have mutual equivalence links. */
4131 VALUE_RECURSED_INTO (val) = true;
4132 for (node = *nodep; node; node = node->next)
4133 if (GET_CODE (node->loc) == VALUE)
4135 VALUE_RECURSED_INTO (node->loc) = true;
4136 set_variable_part (dst, val, dv_from_value (node->loc), 0,
4137 node->init, NULL, INSERT);
4140 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4141 gcc_assert (*dstslot == dvar);
4142 canonicalize_values_star (dstslot, dst);
4143 gcc_checking_assert (dstslot
4144 == shared_hash_find_slot_noinsert_1 (dst->vars,
4145 dv, dvhash));
4146 dvar = *dstslot;
4148 else
4150 bool has_value = false, has_other = false;
4152 /* If we have one value and anything else, we're going to
4153 canonicalize this, so make sure all values have an entry in
4154 the table and are marked for canonicalization. */
4155 for (node = *nodep; node; node = node->next)
4157 if (GET_CODE (node->loc) == VALUE)
4159 /* If this was marked during register canonicalization,
4160 we know we have to canonicalize values. */
4161 if (has_value)
4162 has_other = true;
4163 has_value = true;
4164 if (has_other)
4165 break;
4167 else
4169 has_other = true;
4170 if (has_value)
4171 break;
4175 if (has_value && has_other)
4177 for (node = *nodep; node; node = node->next)
4179 if (GET_CODE (node->loc) == VALUE)
4181 decl_or_value dv = dv_from_value (node->loc);
4182 variable **slot = NULL;
4184 if (shared_hash_shared (dst->vars))
4185 slot = shared_hash_find_slot_noinsert (dst->vars, dv);
4186 if (!slot)
4187 slot = shared_hash_find_slot_unshare (&dst->vars, dv,
4188 INSERT);
4189 if (!*slot)
4191 variable *var = onepart_pool_allocate (ONEPART_VALUE);
4192 var->dv = dv;
4193 var->refcount = 1;
4194 var->n_var_parts = 1;
4195 var->onepart = ONEPART_VALUE;
4196 var->in_changed_variables = false;
4197 var->var_part[0].loc_chain = NULL;
4198 var->var_part[0].cur_loc = NULL;
4199 VAR_LOC_1PAUX (var) = NULL;
4200 *slot = var;
4203 VALUE_RECURSED_INTO (node->loc) = true;
4207 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4208 gcc_assert (*dstslot == dvar);
4209 canonicalize_values_star (dstslot, dst);
4210 gcc_checking_assert (dstslot
4211 == shared_hash_find_slot_noinsert_1 (dst->vars,
4212 dv, dvhash));
4213 dvar = *dstslot;
4217 if (!onepart_variable_different_p (dvar, s2var))
4219 variable_htab_free (dvar);
4220 *dstslot = dvar = s2var;
4221 dvar->refcount++;
4223 else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var))
4225 variable_htab_free (dvar);
4226 *dstslot = dvar = s1var;
4227 dvar->refcount++;
4228 dst_can_be_shared = false;
4230 else
4231 dst_can_be_shared = false;
4233 return 1;
4236 /* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
4237 multi-part variable. Unions of multi-part variables and
4238 intersections of one-part ones will be handled in
4239 variable_merge_over_cur(). */
4241 static int
4242 variable_merge_over_src (variable *s2var, struct dfset_merge *dsm)
4244 dataflow_set *dst = dsm->dst;
4245 decl_or_value dv = s2var->dv;
4247 if (!s2var->onepart)
4249 variable **dstp = shared_hash_find_slot (dst->vars, dv);
4250 *dstp = s2var;
4251 s2var->refcount++;
4252 return 1;
4255 dsm->src_onepart_cnt++;
4256 return 1;
4259 /* Combine dataflow set information from SRC2 into DST, using PDST
4260 to carry over information across passes. */
4262 static void
4263 dataflow_set_merge (dataflow_set *dst, dataflow_set *src2)
4265 dataflow_set cur = *dst;
4266 dataflow_set *src1 = &cur;
4267 struct dfset_merge dsm;
4268 int i;
4269 size_t src1_elems, src2_elems;
4270 variable_iterator_type hi;
4271 variable *var;
4273 src1_elems = shared_hash_htab (src1->vars)->elements ();
4274 src2_elems = shared_hash_htab (src2->vars)->elements ();
4275 dataflow_set_init (dst);
4276 dst->stack_adjust = cur.stack_adjust;
4277 shared_hash_destroy (dst->vars);
4278 dst->vars = new shared_hash;
4279 dst->vars->refcount = 1;
4280 dst->vars->htab = new variable_table_type (MAX (src1_elems, src2_elems));
4282 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4283 attrs_list_mpdv_union (&dst->regs[i], src1->regs[i], src2->regs[i]);
4285 dsm.dst = dst;
4286 dsm.src = src2;
4287 dsm.cur = src1;
4288 dsm.src_onepart_cnt = 0;
4290 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm.src->vars),
4291 var, variable, hi)
4292 variable_merge_over_src (var, &dsm);
4293 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm.cur->vars),
4294 var, variable, hi)
4295 variable_merge_over_cur (var, &dsm);
4297 if (dsm.src_onepart_cnt)
4298 dst_can_be_shared = false;
4300 dataflow_set_destroy (src1);
4303 /* Mark register equivalences. */
4305 static void
4306 dataflow_set_equiv_regs (dataflow_set *set)
4308 int i;
4309 attrs *list, **listp;
4311 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4313 rtx canon[NUM_MACHINE_MODES];
4315 /* If the list is empty or one entry, no need to canonicalize
4316 anything. */
4317 if (set->regs[i] == NULL || set->regs[i]->next == NULL)
4318 continue;
4320 memset (canon, 0, sizeof (canon));
4322 for (list = set->regs[i]; list; list = list->next)
4323 if (list->offset == 0 && dv_is_value_p (list->dv))
4325 rtx val = dv_as_value (list->dv);
4326 rtx *cvalp = &canon[(int)GET_MODE (val)];
4327 rtx cval = *cvalp;
4329 if (canon_value_cmp (val, cval))
4330 *cvalp = val;
4333 for (list = set->regs[i]; list; list = list->next)
4334 if (list->offset == 0 && dv_onepart_p (list->dv))
4336 rtx cval = canon[(int)GET_MODE (list->loc)];
4338 if (!cval)
4339 continue;
4341 if (dv_is_value_p (list->dv))
4343 rtx val = dv_as_value (list->dv);
4345 if (val == cval)
4346 continue;
4348 VALUE_RECURSED_INTO (val) = true;
4349 set_variable_part (set, val, dv_from_value (cval), 0,
4350 VAR_INIT_STATUS_INITIALIZED,
4351 NULL, NO_INSERT);
4354 VALUE_RECURSED_INTO (cval) = true;
4355 set_variable_part (set, cval, list->dv, 0,
4356 VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT);
4359 for (listp = &set->regs[i]; (list = *listp);
4360 listp = list ? &list->next : listp)
4361 if (list->offset == 0 && dv_onepart_p (list->dv))
4363 rtx cval = canon[(int)GET_MODE (list->loc)];
4364 variable **slot;
4366 if (!cval)
4367 continue;
4369 if (dv_is_value_p (list->dv))
4371 rtx val = dv_as_value (list->dv);
4372 if (!VALUE_RECURSED_INTO (val))
4373 continue;
4376 slot = shared_hash_find_slot_noinsert (set->vars, list->dv);
4377 canonicalize_values_star (slot, set);
4378 if (*listp != list)
4379 list = NULL;
4384 /* Remove any redundant values in the location list of VAR, which must
4385 be unshared and 1-part. */
4387 static void
4388 remove_duplicate_values (variable *var)
4390 location_chain *node, **nodep;
4392 gcc_assert (var->onepart);
4393 gcc_assert (var->n_var_parts == 1);
4394 gcc_assert (var->refcount == 1);
4396 for (nodep = &var->var_part[0].loc_chain; (node = *nodep); )
4398 if (GET_CODE (node->loc) == VALUE)
4400 if (VALUE_RECURSED_INTO (node->loc))
4402 /* Remove duplicate value node. */
4403 *nodep = node->next;
4404 delete node;
4405 continue;
4407 else
4408 VALUE_RECURSED_INTO (node->loc) = true;
4410 nodep = &node->next;
4413 for (node = var->var_part[0].loc_chain; node; node = node->next)
4414 if (GET_CODE (node->loc) == VALUE)
4416 gcc_assert (VALUE_RECURSED_INTO (node->loc));
4417 VALUE_RECURSED_INTO (node->loc) = false;
4422 /* Hash table iteration argument passed to variable_post_merge. */
4423 struct dfset_post_merge
4425 /* The new input set for the current block. */
4426 dataflow_set *set;
4427 /* Pointer to the permanent input set for the current block, or
4428 NULL. */
4429 dataflow_set **permp;
4432 /* Create values for incoming expressions associated with one-part
4433 variables that don't have value numbers for them. */
4436 variable_post_merge_new_vals (variable **slot, dfset_post_merge *dfpm)
4438 dataflow_set *set = dfpm->set;
4439 variable *var = *slot;
4440 location_chain *node;
4442 if (!var->onepart || !var->n_var_parts)
4443 return 1;
4445 gcc_assert (var->n_var_parts == 1);
4447 if (dv_is_decl_p (var->dv))
4449 bool check_dupes = false;
4451 restart:
4452 for (node = var->var_part[0].loc_chain; node; node = node->next)
4454 if (GET_CODE (node->loc) == VALUE)
4455 gcc_assert (!VALUE_RECURSED_INTO (node->loc));
4456 else if (GET_CODE (node->loc) == REG)
4458 attrs *att, **attp, **curp = NULL;
4460 if (var->refcount != 1)
4462 slot = unshare_variable (set, slot, var,
4463 VAR_INIT_STATUS_INITIALIZED);
4464 var = *slot;
4465 goto restart;
4468 for (attp = &set->regs[REGNO (node->loc)]; (att = *attp);
4469 attp = &att->next)
4470 if (att->offset == 0
4471 && GET_MODE (att->loc) == GET_MODE (node->loc))
4473 if (dv_is_value_p (att->dv))
4475 rtx cval = dv_as_value (att->dv);
4476 node->loc = cval;
4477 check_dupes = true;
4478 break;
4480 else if (dv_as_opaque (att->dv) == dv_as_opaque (var->dv))
4481 curp = attp;
4484 if (!curp)
4486 curp = attp;
4487 while (*curp)
4488 if ((*curp)->offset == 0
4489 && GET_MODE ((*curp)->loc) == GET_MODE (node->loc)
4490 && dv_as_opaque ((*curp)->dv) == dv_as_opaque (var->dv))
4491 break;
4492 else
4493 curp = &(*curp)->next;
4494 gcc_assert (*curp);
4497 if (!att)
4499 decl_or_value cdv;
4500 rtx cval;
4502 if (!*dfpm->permp)
4504 *dfpm->permp = XNEW (dataflow_set);
4505 dataflow_set_init (*dfpm->permp);
4508 for (att = (*dfpm->permp)->regs[REGNO (node->loc)];
4509 att; att = att->next)
4510 if (GET_MODE (att->loc) == GET_MODE (node->loc))
4512 gcc_assert (att->offset == 0
4513 && dv_is_value_p (att->dv));
4514 val_reset (set, att->dv);
4515 break;
4518 if (att)
4520 cdv = att->dv;
4521 cval = dv_as_value (cdv);
4523 else
4525 /* Create a unique value to hold this register,
4526 that ought to be found and reused in
4527 subsequent rounds. */
4528 cselib_val *v;
4529 gcc_assert (!cselib_lookup (node->loc,
4530 GET_MODE (node->loc), 0,
4531 VOIDmode));
4532 v = cselib_lookup (node->loc, GET_MODE (node->loc), 1,
4533 VOIDmode);
4534 cselib_preserve_value (v);
4535 cselib_invalidate_rtx (node->loc);
4536 cval = v->val_rtx;
4537 cdv = dv_from_value (cval);
4538 if (dump_file)
4539 fprintf (dump_file,
4540 "Created new value %u:%u for reg %i\n",
4541 v->uid, v->hash, REGNO (node->loc));
4544 var_reg_decl_set (*dfpm->permp, node->loc,
4545 VAR_INIT_STATUS_INITIALIZED,
4546 cdv, 0, NULL, INSERT);
4548 node->loc = cval;
4549 check_dupes = true;
4552 /* Remove attribute referring to the decl, which now
4553 uses the value for the register, already existing or
4554 to be added when we bring perm in. */
4555 att = *curp;
4556 *curp = att->next;
4557 delete att;
4561 if (check_dupes)
4562 remove_duplicate_values (var);
4565 return 1;
4568 /* Reset values in the permanent set that are not associated with the
4569 chosen expression. */
4572 variable_post_merge_perm_vals (variable **pslot, dfset_post_merge *dfpm)
4574 dataflow_set *set = dfpm->set;
4575 variable *pvar = *pslot, *var;
4576 location_chain *pnode;
4577 decl_or_value dv;
4578 attrs *att;
4580 gcc_assert (dv_is_value_p (pvar->dv)
4581 && pvar->n_var_parts == 1);
4582 pnode = pvar->var_part[0].loc_chain;
4583 gcc_assert (pnode
4584 && !pnode->next
4585 && REG_P (pnode->loc));
4587 dv = pvar->dv;
4589 var = shared_hash_find (set->vars, dv);
4590 if (var)
4592 /* Although variable_post_merge_new_vals may have made decls
4593 non-star-canonical, values that pre-existed in canonical form
4594 remain canonical, and newly-created values reference a single
4595 REG, so they are canonical as well. Since VAR has the
4596 location list for a VALUE, using find_loc_in_1pdv for it is
4597 fine, since VALUEs don't map back to DECLs. */
4598 if (find_loc_in_1pdv (pnode->loc, var, shared_hash_htab (set->vars)))
4599 return 1;
4600 val_reset (set, dv);
4603 for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next)
4604 if (att->offset == 0
4605 && GET_MODE (att->loc) == GET_MODE (pnode->loc)
4606 && dv_is_value_p (att->dv))
4607 break;
4609 /* If there is a value associated with this register already, create
4610 an equivalence. */
4611 if (att && dv_as_value (att->dv) != dv_as_value (dv))
4613 rtx cval = dv_as_value (att->dv);
4614 set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT);
4615 set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init,
4616 NULL, INSERT);
4618 else if (!att)
4620 attrs_list_insert (&set->regs[REGNO (pnode->loc)],
4621 dv, 0, pnode->loc);
4622 variable_union (pvar, set);
4625 return 1;
4628 /* Just checking stuff and registering register attributes for
4629 now. */
4631 static void
4632 dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp)
4634 struct dfset_post_merge dfpm;
4636 dfpm.set = set;
4637 dfpm.permp = permp;
4639 shared_hash_htab (set->vars)
4640 ->traverse <dfset_post_merge*, variable_post_merge_new_vals> (&dfpm);
4641 if (*permp)
4642 shared_hash_htab ((*permp)->vars)
4643 ->traverse <dfset_post_merge*, variable_post_merge_perm_vals> (&dfpm);
4644 shared_hash_htab (set->vars)
4645 ->traverse <dataflow_set *, canonicalize_values_star> (set);
4646 shared_hash_htab (set->vars)
4647 ->traverse <dataflow_set *, canonicalize_vars_star> (set);
4650 /* Return a node whose loc is a MEM that refers to EXPR in the
4651 location list of a one-part variable or value VAR, or in that of
4652 any values recursively mentioned in the location lists. */
4654 static location_chain *
4655 find_mem_expr_in_1pdv (tree expr, rtx val, variable_table_type *vars)
4657 location_chain *node;
4658 decl_or_value dv;
4659 variable *var;
4660 location_chain *where = NULL;
4662 if (!val)
4663 return NULL;
4665 gcc_assert (GET_CODE (val) == VALUE
4666 && !VALUE_RECURSED_INTO (val));
4668 dv = dv_from_value (val);
4669 var = vars->find_with_hash (dv, dv_htab_hash (dv));
4671 if (!var)
4672 return NULL;
4674 gcc_assert (var->onepart);
4676 if (!var->n_var_parts)
4677 return NULL;
4679 VALUE_RECURSED_INTO (val) = true;
4681 for (node = var->var_part[0].loc_chain; node; node = node->next)
4682 if (MEM_P (node->loc)
4683 && MEM_EXPR (node->loc) == expr
4684 && int_mem_offset (node->loc) == 0)
4686 where = node;
4687 break;
4689 else if (GET_CODE (node->loc) == VALUE
4690 && !VALUE_RECURSED_INTO (node->loc)
4691 && (where = find_mem_expr_in_1pdv (expr, node->loc, vars)))
4692 break;
4694 VALUE_RECURSED_INTO (val) = false;
4696 return where;
4699 /* Return TRUE if the value of MEM may vary across a call. */
4701 static bool
4702 mem_dies_at_call (rtx mem)
4704 tree expr = MEM_EXPR (mem);
4705 tree decl;
4707 if (!expr)
4708 return true;
4710 decl = get_base_address (expr);
4712 if (!decl)
4713 return true;
4715 if (!DECL_P (decl))
4716 return true;
4718 return (may_be_aliased (decl)
4719 || (!TREE_READONLY (decl) && is_global_var (decl)));
4722 /* Remove all MEMs from the location list of a hash table entry for a
4723 one-part variable, except those whose MEM attributes map back to
4724 the variable itself, directly or within a VALUE. */
4727 dataflow_set_preserve_mem_locs (variable **slot, dataflow_set *set)
4729 variable *var = *slot;
4731 if (var->onepart == ONEPART_VDECL || var->onepart == ONEPART_DEXPR)
4733 tree decl = dv_as_decl (var->dv);
4734 location_chain *loc, **locp;
4735 bool changed = false;
4737 if (!var->n_var_parts)
4738 return 1;
4740 gcc_assert (var->n_var_parts == 1);
4742 if (shared_var_p (var, set->vars))
4744 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4746 /* We want to remove dying MEMs that don't refer to DECL. */
4747 if (GET_CODE (loc->loc) == MEM
4748 && (MEM_EXPR (loc->loc) != decl
4749 || int_mem_offset (loc->loc) != 0)
4750 && mem_dies_at_call (loc->loc))
4751 break;
4752 /* We want to move here MEMs that do refer to DECL. */
4753 else if (GET_CODE (loc->loc) == VALUE
4754 && find_mem_expr_in_1pdv (decl, loc->loc,
4755 shared_hash_htab (set->vars)))
4756 break;
4759 if (!loc)
4760 return 1;
4762 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4763 var = *slot;
4764 gcc_assert (var->n_var_parts == 1);
4767 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4768 loc; loc = *locp)
4770 rtx old_loc = loc->loc;
4771 if (GET_CODE (old_loc) == VALUE)
4773 location_chain *mem_node
4774 = find_mem_expr_in_1pdv (decl, loc->loc,
4775 shared_hash_htab (set->vars));
4777 /* ??? This picks up only one out of multiple MEMs that
4778 refer to the same variable. Do we ever need to be
4779 concerned about dealing with more than one, or, given
4780 that they should all map to the same variable
4781 location, their addresses will have been merged and
4782 they will be regarded as equivalent? */
4783 if (mem_node)
4785 loc->loc = mem_node->loc;
4786 loc->set_src = mem_node->set_src;
4787 loc->init = MIN (loc->init, mem_node->init);
4791 if (GET_CODE (loc->loc) != MEM
4792 || (MEM_EXPR (loc->loc) == decl
4793 && int_mem_offset (loc->loc) == 0)
4794 || !mem_dies_at_call (loc->loc))
4796 if (old_loc != loc->loc && emit_notes)
4798 if (old_loc == var->var_part[0].cur_loc)
4800 changed = true;
4801 var->var_part[0].cur_loc = NULL;
4804 locp = &loc->next;
4805 continue;
4808 if (emit_notes)
4810 if (old_loc == var->var_part[0].cur_loc)
4812 changed = true;
4813 var->var_part[0].cur_loc = NULL;
4816 *locp = loc->next;
4817 delete loc;
4820 if (!var->var_part[0].loc_chain)
4822 var->n_var_parts--;
4823 changed = true;
4825 if (changed)
4826 variable_was_changed (var, set);
4829 return 1;
4832 /* Remove all MEMs from the location list of a hash table entry for a
4833 onepart variable. */
4836 dataflow_set_remove_mem_locs (variable **slot, dataflow_set *set)
4838 variable *var = *slot;
4840 if (var->onepart != NOT_ONEPART)
4842 location_chain *loc, **locp;
4843 bool changed = false;
4844 rtx cur_loc;
4846 gcc_assert (var->n_var_parts == 1);
4848 if (shared_var_p (var, set->vars))
4850 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4851 if (GET_CODE (loc->loc) == MEM
4852 && mem_dies_at_call (loc->loc))
4853 break;
4855 if (!loc)
4856 return 1;
4858 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4859 var = *slot;
4860 gcc_assert (var->n_var_parts == 1);
4863 if (VAR_LOC_1PAUX (var))
4864 cur_loc = VAR_LOC_FROM (var);
4865 else
4866 cur_loc = var->var_part[0].cur_loc;
4868 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4869 loc; loc = *locp)
4871 if (GET_CODE (loc->loc) != MEM
4872 || !mem_dies_at_call (loc->loc))
4874 locp = &loc->next;
4875 continue;
4878 *locp = loc->next;
4879 /* If we have deleted the location which was last emitted
4880 we have to emit new location so add the variable to set
4881 of changed variables. */
4882 if (cur_loc == loc->loc)
4884 changed = true;
4885 var->var_part[0].cur_loc = NULL;
4886 if (VAR_LOC_1PAUX (var))
4887 VAR_LOC_FROM (var) = NULL;
4889 delete loc;
4892 if (!var->var_part[0].loc_chain)
4894 var->n_var_parts--;
4895 changed = true;
4897 if (changed)
4898 variable_was_changed (var, set);
4901 return 1;
4904 /* Remove all variable-location information about call-clobbered
4905 registers, as well as associations between MEMs and VALUEs. */
4907 static void
4908 dataflow_set_clear_at_call (dataflow_set *set, rtx_insn *call_insn)
4910 unsigned int r;
4911 hard_reg_set_iterator hrsi;
4913 HARD_REG_SET callee_clobbers
4914 = insn_callee_abi (call_insn).full_reg_clobbers ();
4916 EXECUTE_IF_SET_IN_HARD_REG_SET (callee_clobbers, 0, r, hrsi)
4917 var_regno_delete (set, r);
4919 if (MAY_HAVE_DEBUG_BIND_INSNS)
4921 set->traversed_vars = set->vars;
4922 shared_hash_htab (set->vars)
4923 ->traverse <dataflow_set *, dataflow_set_preserve_mem_locs> (set);
4924 set->traversed_vars = set->vars;
4925 shared_hash_htab (set->vars)
4926 ->traverse <dataflow_set *, dataflow_set_remove_mem_locs> (set);
4927 set->traversed_vars = NULL;
4931 static bool
4932 variable_part_different_p (variable_part *vp1, variable_part *vp2)
4934 location_chain *lc1, *lc2;
4936 for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next)
4938 for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next)
4940 if (REG_P (lc1->loc) && REG_P (lc2->loc))
4942 if (REGNO (lc1->loc) == REGNO (lc2->loc))
4943 break;
4945 if (rtx_equal_p (lc1->loc, lc2->loc))
4946 break;
4948 if (!lc2)
4949 return true;
4951 return false;
4954 /* Return true if one-part variables VAR1 and VAR2 are different.
4955 They must be in canonical order. */
4957 static bool
4958 onepart_variable_different_p (variable *var1, variable *var2)
4960 location_chain *lc1, *lc2;
4962 if (var1 == var2)
4963 return false;
4965 gcc_assert (var1->n_var_parts == 1
4966 && var2->n_var_parts == 1);
4968 lc1 = var1->var_part[0].loc_chain;
4969 lc2 = var2->var_part[0].loc_chain;
4971 gcc_assert (lc1 && lc2);
4973 while (lc1 && lc2)
4975 if (loc_cmp (lc1->loc, lc2->loc))
4976 return true;
4977 lc1 = lc1->next;
4978 lc2 = lc2->next;
4981 return lc1 != lc2;
4984 /* Return true if one-part variables VAR1 and VAR2 are different.
4985 They must be in canonical order. */
4987 static void
4988 dump_onepart_variable_differences (variable *var1, variable *var2)
4990 location_chain *lc1, *lc2;
4992 gcc_assert (var1 != var2);
4993 gcc_assert (dump_file);
4994 gcc_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv));
4995 gcc_assert (var1->n_var_parts == 1
4996 && var2->n_var_parts == 1);
4998 lc1 = var1->var_part[0].loc_chain;
4999 lc2 = var2->var_part[0].loc_chain;
5001 gcc_assert (lc1 && lc2);
5003 while (lc1 && lc2)
5005 switch (loc_cmp (lc1->loc, lc2->loc))
5007 case -1:
5008 fprintf (dump_file, "removed: ");
5009 print_rtl_single (dump_file, lc1->loc);
5010 lc1 = lc1->next;
5011 continue;
5012 case 0:
5013 break;
5014 case 1:
5015 fprintf (dump_file, "added: ");
5016 print_rtl_single (dump_file, lc2->loc);
5017 lc2 = lc2->next;
5018 continue;
5019 default:
5020 gcc_unreachable ();
5022 lc1 = lc1->next;
5023 lc2 = lc2->next;
5026 while (lc1)
5028 fprintf (dump_file, "removed: ");
5029 print_rtl_single (dump_file, lc1->loc);
5030 lc1 = lc1->next;
5033 while (lc2)
5035 fprintf (dump_file, "added: ");
5036 print_rtl_single (dump_file, lc2->loc);
5037 lc2 = lc2->next;
5041 /* Return true if variables VAR1 and VAR2 are different. */
5043 static bool
5044 variable_different_p (variable *var1, variable *var2)
5046 int i;
5048 if (var1 == var2)
5049 return false;
5051 if (var1->onepart != var2->onepart)
5052 return true;
5054 if (var1->n_var_parts != var2->n_var_parts)
5055 return true;
5057 if (var1->onepart && var1->n_var_parts)
5059 gcc_checking_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv)
5060 && var1->n_var_parts == 1);
5061 /* One-part values have locations in a canonical order. */
5062 return onepart_variable_different_p (var1, var2);
5065 for (i = 0; i < var1->n_var_parts; i++)
5067 if (VAR_PART_OFFSET (var1, i) != VAR_PART_OFFSET (var2, i))
5068 return true;
5069 if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i]))
5070 return true;
5071 if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i]))
5072 return true;
5074 return false;
5077 /* Return true if dataflow sets OLD_SET and NEW_SET differ. */
5079 static bool
5080 dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
5082 variable_iterator_type hi;
5083 variable *var1;
5084 bool diffound = false;
5085 bool details = (dump_file && (dump_flags & TDF_DETAILS));
5087 #define RETRUE \
5088 do \
5090 if (!details) \
5091 return true; \
5092 else \
5093 diffound = true; \
5095 while (0)
5097 if (old_set->vars == new_set->vars)
5098 return false;
5100 if (shared_hash_htab (old_set->vars)->elements ()
5101 != shared_hash_htab (new_set->vars)->elements ())
5102 RETRUE;
5104 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (old_set->vars),
5105 var1, variable, hi)
5107 variable_table_type *htab = shared_hash_htab (new_set->vars);
5108 variable *var2 = htab->find_with_hash (var1->dv, dv_htab_hash (var1->dv));
5110 if (!var2)
5112 if (dump_file && (dump_flags & TDF_DETAILS))
5114 fprintf (dump_file, "dataflow difference found: removal of:\n");
5115 dump_var (var1);
5117 RETRUE;
5119 else if (variable_different_p (var1, var2))
5121 if (details)
5123 fprintf (dump_file, "dataflow difference found: "
5124 "old and new follow:\n");
5125 dump_var (var1);
5126 if (dv_onepart_p (var1->dv))
5127 dump_onepart_variable_differences (var1, var2);
5128 dump_var (var2);
5130 RETRUE;
5134 /* There's no need to traverse the second hashtab unless we want to
5135 print the details. If both have the same number of elements and
5136 the second one had all entries found in the first one, then the
5137 second can't have any extra entries. */
5138 if (!details)
5139 return diffound;
5141 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (new_set->vars),
5142 var1, variable, hi)
5144 variable_table_type *htab = shared_hash_htab (old_set->vars);
5145 variable *var2 = htab->find_with_hash (var1->dv, dv_htab_hash (var1->dv));
5146 if (!var2)
5148 if (details)
5150 fprintf (dump_file, "dataflow difference found: addition of:\n");
5151 dump_var (var1);
5153 RETRUE;
5157 #undef RETRUE
5159 return diffound;
5162 /* Free the contents of dataflow set SET. */
5164 static void
5165 dataflow_set_destroy (dataflow_set *set)
5167 int i;
5169 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5170 attrs_list_clear (&set->regs[i]);
5172 shared_hash_destroy (set->vars);
5173 set->vars = NULL;
5176 /* Return true if T is a tracked parameter with non-degenerate record type. */
5178 static bool
5179 tracked_record_parameter_p (tree t)
5181 if (TREE_CODE (t) != PARM_DECL)
5182 return false;
5184 if (DECL_MODE (t) == BLKmode)
5185 return false;
5187 tree type = TREE_TYPE (t);
5188 if (TREE_CODE (type) != RECORD_TYPE)
5189 return false;
5191 if (TYPE_FIELDS (type) == NULL_TREE
5192 || DECL_CHAIN (TYPE_FIELDS (type)) == NULL_TREE)
5193 return false;
5195 return true;
5198 /* Shall EXPR be tracked? */
5200 static bool
5201 track_expr_p (tree expr, bool need_rtl)
5203 rtx decl_rtl;
5204 tree realdecl;
5206 if (TREE_CODE (expr) == DEBUG_EXPR_DECL)
5207 return DECL_RTL_SET_P (expr);
5209 /* If EXPR is not a parameter or a variable do not track it. */
5210 if (!VAR_P (expr) && TREE_CODE (expr) != PARM_DECL)
5211 return 0;
5213 /* It also must have a name... */
5214 if (!DECL_NAME (expr) && need_rtl)
5215 return 0;
5217 /* ... and a RTL assigned to it. */
5218 decl_rtl = DECL_RTL_IF_SET (expr);
5219 if (!decl_rtl && need_rtl)
5220 return 0;
5222 /* If this expression is really a debug alias of some other declaration, we
5223 don't need to track this expression if the ultimate declaration is
5224 ignored. */
5225 realdecl = expr;
5226 if (VAR_P (realdecl) && DECL_HAS_DEBUG_EXPR_P (realdecl))
5228 realdecl = DECL_DEBUG_EXPR (realdecl);
5229 if (!DECL_P (realdecl))
5231 if (handled_component_p (realdecl)
5232 || (TREE_CODE (realdecl) == MEM_REF
5233 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
5235 HOST_WIDE_INT bitsize, bitpos;
5236 bool reverse;
5237 tree innerdecl
5238 = get_ref_base_and_extent_hwi (realdecl, &bitpos,
5239 &bitsize, &reverse);
5240 if (!innerdecl
5241 || !DECL_P (innerdecl)
5242 || DECL_IGNORED_P (innerdecl)
5243 /* Do not track declarations for parts of tracked record
5244 parameters since we want to track them as a whole. */
5245 || tracked_record_parameter_p (innerdecl)
5246 || TREE_STATIC (innerdecl)
5247 || bitsize == 0
5248 || bitpos + bitsize > 256)
5249 return 0;
5250 else
5251 realdecl = expr;
5253 else
5254 return 0;
5258 /* Do not track EXPR if REALDECL it should be ignored for debugging
5259 purposes. */
5260 if (DECL_IGNORED_P (realdecl))
5261 return 0;
5263 /* Do not track global variables until we are able to emit correct location
5264 list for them. */
5265 if (TREE_STATIC (realdecl))
5266 return 0;
5268 /* When the EXPR is a DECL for alias of some variable (see example)
5269 the TREE_STATIC flag is not used. Disable tracking all DECLs whose
5270 DECL_RTL contains SYMBOL_REF.
5272 Example:
5273 extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
5274 char **_dl_argv;
5276 if (decl_rtl && MEM_P (decl_rtl)
5277 && contains_symbol_ref_p (XEXP (decl_rtl, 0)))
5278 return 0;
5280 /* If RTX is a memory it should not be very large (because it would be
5281 an array or struct). */
5282 if (decl_rtl && MEM_P (decl_rtl))
5284 /* Do not track structures and arrays. */
5285 if ((GET_MODE (decl_rtl) == BLKmode
5286 || AGGREGATE_TYPE_P (TREE_TYPE (realdecl)))
5287 && !tracked_record_parameter_p (realdecl))
5288 return 0;
5289 if (MEM_SIZE_KNOWN_P (decl_rtl)
5290 && maybe_gt (MEM_SIZE (decl_rtl), MAX_VAR_PARTS))
5291 return 0;
5294 DECL_CHANGED (expr) = 0;
5295 DECL_CHANGED (realdecl) = 0;
5296 return 1;
5299 /* Determine whether a given LOC refers to the same variable part as
5300 EXPR+OFFSET. */
5302 static bool
5303 same_variable_part_p (rtx loc, tree expr, poly_int64 offset)
5305 tree expr2;
5306 poly_int64 offset2;
5308 if (! DECL_P (expr))
5309 return false;
5311 if (REG_P (loc))
5313 expr2 = REG_EXPR (loc);
5314 offset2 = REG_OFFSET (loc);
5316 else if (MEM_P (loc))
5318 expr2 = MEM_EXPR (loc);
5319 offset2 = int_mem_offset (loc);
5321 else
5322 return false;
5324 if (! expr2 || ! DECL_P (expr2))
5325 return false;
5327 expr = var_debug_decl (expr);
5328 expr2 = var_debug_decl (expr2);
5330 return (expr == expr2 && known_eq (offset, offset2));
5333 /* LOC is a REG or MEM that we would like to track if possible.
5334 If EXPR is null, we don't know what expression LOC refers to,
5335 otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
5336 LOC is an lvalue register.
5338 Return true if EXPR is nonnull and if LOC, or some lowpart of it,
5339 is something we can track. When returning true, store the mode of
5340 the lowpart we can track in *MODE_OUT (if nonnull) and its offset
5341 from EXPR in *OFFSET_OUT (if nonnull). */
5343 static bool
5344 track_loc_p (rtx loc, tree expr, poly_int64 offset, bool store_reg_p,
5345 machine_mode *mode_out, HOST_WIDE_INT *offset_out)
5347 machine_mode mode;
5349 if (expr == NULL || !track_expr_p (expr, true))
5350 return false;
5352 /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
5353 whole subreg, but only the old inner part is really relevant. */
5354 mode = GET_MODE (loc);
5355 if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc)))
5357 machine_mode pseudo_mode;
5359 pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc));
5360 if (paradoxical_subreg_p (mode, pseudo_mode))
5362 offset += byte_lowpart_offset (pseudo_mode, mode);
5363 mode = pseudo_mode;
5367 /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
5368 Do the same if we are storing to a register and EXPR occupies
5369 the whole of register LOC; in that case, the whole of EXPR is
5370 being changed. We exclude complex modes from the second case
5371 because the real and imaginary parts are represented as separate
5372 pseudo registers, even if the whole complex value fits into one
5373 hard register. */
5374 if ((paradoxical_subreg_p (mode, DECL_MODE (expr))
5375 || (store_reg_p
5376 && !COMPLEX_MODE_P (DECL_MODE (expr))
5377 && hard_regno_nregs (REGNO (loc), DECL_MODE (expr)) == 1))
5378 && known_eq (offset + byte_lowpart_offset (DECL_MODE (expr), mode), 0))
5380 mode = DECL_MODE (expr);
5381 offset = 0;
5384 HOST_WIDE_INT const_offset;
5385 if (!track_offset_p (offset, &const_offset))
5386 return false;
5388 if (mode_out)
5389 *mode_out = mode;
5390 if (offset_out)
5391 *offset_out = const_offset;
5392 return true;
5395 /* Return the MODE lowpart of LOC, or null if LOC is not something we
5396 want to track. When returning nonnull, make sure that the attributes
5397 on the returned value are updated. */
5399 static rtx
5400 var_lowpart (machine_mode mode, rtx loc)
5402 unsigned int regno;
5404 if (GET_MODE (loc) == mode)
5405 return loc;
5407 if (!REG_P (loc) && !MEM_P (loc))
5408 return NULL;
5410 poly_uint64 offset = byte_lowpart_offset (mode, GET_MODE (loc));
5412 if (MEM_P (loc))
5413 return adjust_address_nv (loc, mode, offset);
5415 poly_uint64 reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc));
5416 regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc),
5417 reg_offset, mode);
5418 return gen_rtx_REG_offset (loc, mode, regno, offset);
5421 /* Carry information about uses and stores while walking rtx. */
5423 struct count_use_info
5425 /* The insn where the RTX is. */
5426 rtx_insn *insn;
5428 /* The basic block where insn is. */
5429 basic_block bb;
5431 /* The array of n_sets sets in the insn, as determined by cselib. */
5432 struct cselib_set *sets;
5433 int n_sets;
5435 /* True if we're counting stores, false otherwise. */
5436 bool store_p;
5439 /* Find a VALUE corresponding to X. */
5441 static inline cselib_val *
5442 find_use_val (rtx x, machine_mode mode, struct count_use_info *cui)
5444 int i;
5446 if (cui->sets)
5448 /* This is called after uses are set up and before stores are
5449 processed by cselib, so it's safe to look up srcs, but not
5450 dsts. So we look up expressions that appear in srcs or in
5451 dest expressions, but we search the sets array for dests of
5452 stores. */
5453 if (cui->store_p)
5455 /* Some targets represent memset and memcpy patterns
5456 by (set (mem:BLK ...) (reg:[QHSD]I ...)) or
5457 (set (mem:BLK ...) (const_int ...)) or
5458 (set (mem:BLK ...) (mem:BLK ...)). Don't return anything
5459 in that case, otherwise we end up with mode mismatches. */
5460 if (mode == BLKmode && MEM_P (x))
5461 return NULL;
5462 for (i = 0; i < cui->n_sets; i++)
5463 if (cui->sets[i].dest == x)
5464 return cui->sets[i].src_elt;
5466 else
5467 return cselib_lookup (x, mode, 0, VOIDmode);
5470 return NULL;
5473 /* Replace all registers and addresses in an expression with VALUE
5474 expressions that map back to them, unless the expression is a
5475 register. If no mapping is or can be performed, returns NULL. */
5477 static rtx
5478 replace_expr_with_values (rtx loc)
5480 if (REG_P (loc) || GET_CODE (loc) == ENTRY_VALUE)
5481 return NULL;
5482 else if (MEM_P (loc))
5484 cselib_val *addr = cselib_lookup (XEXP (loc, 0),
5485 get_address_mode (loc), 0,
5486 GET_MODE (loc));
5487 if (addr)
5488 return replace_equiv_address_nv (loc, addr->val_rtx);
5489 else
5490 return NULL;
5492 else
5493 return cselib_subst_to_values (loc, VOIDmode);
5496 /* Return true if X contains a DEBUG_EXPR. */
5498 static bool
5499 rtx_debug_expr_p (const_rtx x)
5501 subrtx_iterator::array_type array;
5502 FOR_EACH_SUBRTX (iter, array, x, ALL)
5503 if (GET_CODE (*iter) == DEBUG_EXPR)
5504 return true;
5505 return false;
5508 /* Determine what kind of micro operation to choose for a USE. Return
5509 MO_CLOBBER if no micro operation is to be generated. */
5511 static enum micro_operation_type
5512 use_type (rtx loc, struct count_use_info *cui, machine_mode *modep)
5514 tree expr;
5516 if (cui && cui->sets)
5518 if (GET_CODE (loc) == VAR_LOCATION)
5520 if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false))
5522 rtx ploc = PAT_VAR_LOCATION_LOC (loc);
5523 if (! VAR_LOC_UNKNOWN_P (ploc))
5525 cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1,
5526 VOIDmode);
5528 /* ??? flag_float_store and volatile mems are never
5529 given values, but we could in theory use them for
5530 locations. */
5531 gcc_assert (val || 1);
5533 return MO_VAL_LOC;
5535 else
5536 return MO_CLOBBER;
5539 if (REG_P (loc) || MEM_P (loc))
5541 if (modep)
5542 *modep = GET_MODE (loc);
5543 if (cui->store_p)
5545 if (REG_P (loc)
5546 || (find_use_val (loc, GET_MODE (loc), cui)
5547 && cselib_lookup (XEXP (loc, 0),
5548 get_address_mode (loc), 0,
5549 GET_MODE (loc))))
5550 return MO_VAL_SET;
5552 else
5554 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5556 if (val && !cselib_preserved_value_p (val))
5557 return MO_VAL_USE;
5562 if (REG_P (loc))
5564 gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER);
5566 if (loc == cfa_base_rtx)
5567 return MO_CLOBBER;
5568 expr = REG_EXPR (loc);
5570 if (!expr)
5571 return MO_USE_NO_VAR;
5572 else if (target_for_debug_bind (var_debug_decl (expr)))
5573 return MO_CLOBBER;
5574 else if (track_loc_p (loc, expr, REG_OFFSET (loc),
5575 false, modep, NULL))
5576 return MO_USE;
5577 else
5578 return MO_USE_NO_VAR;
5580 else if (MEM_P (loc))
5582 expr = MEM_EXPR (loc);
5584 if (!expr)
5585 return MO_CLOBBER;
5586 else if (target_for_debug_bind (var_debug_decl (expr)))
5587 return MO_CLOBBER;
5588 else if (track_loc_p (loc, expr, int_mem_offset (loc),
5589 false, modep, NULL)
5590 /* Multi-part variables shouldn't refer to one-part
5591 variable names such as VALUEs (never happens) or
5592 DEBUG_EXPRs (only happens in the presence of debug
5593 insns). */
5594 && (!MAY_HAVE_DEBUG_BIND_INSNS
5595 || !rtx_debug_expr_p (XEXP (loc, 0))))
5596 return MO_USE;
5597 else
5598 return MO_CLOBBER;
5601 return MO_CLOBBER;
5604 /* Log to OUT information about micro-operation MOPT involving X in
5605 INSN of BB. */
5607 static inline void
5608 log_op_type (rtx x, basic_block bb, rtx_insn *insn,
5609 enum micro_operation_type mopt, FILE *out)
5611 fprintf (out, "bb %i op %i insn %i %s ",
5612 bb->index, VTI (bb)->mos.length (),
5613 INSN_UID (insn), micro_operation_type_name[mopt]);
5614 print_inline_rtx (out, x, 2);
5615 fputc ('\n', out);
5618 /* Tell whether the CONCAT used to holds a VALUE and its location
5619 needs value resolution, i.e., an attempt of mapping the location
5620 back to other incoming values. */
5621 #define VAL_NEEDS_RESOLUTION(x) \
5622 (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
5623 /* Whether the location in the CONCAT is a tracked expression, that
5624 should also be handled like a MO_USE. */
5625 #define VAL_HOLDS_TRACK_EXPR(x) \
5626 (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
5627 /* Whether the location in the CONCAT should be handled like a MO_COPY
5628 as well. */
5629 #define VAL_EXPR_IS_COPIED(x) \
5630 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
5631 /* Whether the location in the CONCAT should be handled like a
5632 MO_CLOBBER as well. */
5633 #define VAL_EXPR_IS_CLOBBERED(x) \
5634 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
5636 /* All preserved VALUEs. */
5637 static vec<rtx> preserved_values;
5639 /* Ensure VAL is preserved and remember it in a vector for vt_emit_notes. */
5641 static void
5642 preserve_value (cselib_val *val)
5644 cselib_preserve_value (val);
5645 preserved_values.safe_push (val->val_rtx);
5648 /* Helper function for MO_VAL_LOC handling. Return non-zero if
5649 any rtxes not suitable for CONST use not replaced by VALUEs
5650 are discovered. */
5652 static bool
5653 non_suitable_const (const_rtx x)
5655 subrtx_iterator::array_type array;
5656 FOR_EACH_SUBRTX (iter, array, x, ALL)
5658 const_rtx x = *iter;
5659 switch (GET_CODE (x))
5661 case REG:
5662 case DEBUG_EXPR:
5663 case PC:
5664 case SCRATCH:
5665 case CC0:
5666 case ASM_INPUT:
5667 case ASM_OPERANDS:
5668 return true;
5669 case MEM:
5670 if (!MEM_READONLY_P (x))
5671 return true;
5672 break;
5673 default:
5674 break;
5677 return false;
5680 /* Add uses (register and memory references) LOC which will be tracked
5681 to VTI (bb)->mos. */
5683 static void
5684 add_uses (rtx loc, struct count_use_info *cui)
5686 machine_mode mode = VOIDmode;
5687 enum micro_operation_type type = use_type (loc, cui, &mode);
5689 if (type != MO_CLOBBER)
5691 basic_block bb = cui->bb;
5692 micro_operation mo;
5694 mo.type = type;
5695 mo.u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc;
5696 mo.insn = cui->insn;
5698 if (type == MO_VAL_LOC)
5700 rtx oloc = loc;
5701 rtx vloc = PAT_VAR_LOCATION_LOC (oloc);
5702 cselib_val *val;
5704 gcc_assert (cui->sets);
5706 if (MEM_P (vloc)
5707 && !REG_P (XEXP (vloc, 0))
5708 && !MEM_P (XEXP (vloc, 0)))
5710 rtx mloc = vloc;
5711 machine_mode address_mode = get_address_mode (mloc);
5712 cselib_val *val
5713 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5714 GET_MODE (mloc));
5716 if (val && !cselib_preserved_value_p (val))
5717 preserve_value (val);
5720 if (CONSTANT_P (vloc)
5721 && (GET_CODE (vloc) != CONST || non_suitable_const (vloc)))
5722 /* For constants don't look up any value. */;
5723 else if (!VAR_LOC_UNKNOWN_P (vloc) && !unsuitable_loc (vloc)
5724 && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
5726 machine_mode mode2;
5727 enum micro_operation_type type2;
5728 rtx nloc = NULL;
5729 bool resolvable = REG_P (vloc) || MEM_P (vloc);
5731 if (resolvable)
5732 nloc = replace_expr_with_values (vloc);
5734 if (nloc)
5736 oloc = shallow_copy_rtx (oloc);
5737 PAT_VAR_LOCATION_LOC (oloc) = nloc;
5740 oloc = gen_rtx_CONCAT (mode, val->val_rtx, oloc);
5742 type2 = use_type (vloc, 0, &mode2);
5744 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5745 || type2 == MO_CLOBBER);
5747 if (type2 == MO_CLOBBER
5748 && !cselib_preserved_value_p (val))
5750 VAL_NEEDS_RESOLUTION (oloc) = resolvable;
5751 preserve_value (val);
5754 else if (!VAR_LOC_UNKNOWN_P (vloc))
5756 oloc = shallow_copy_rtx (oloc);
5757 PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC ();
5760 mo.u.loc = oloc;
5762 else if (type == MO_VAL_USE)
5764 machine_mode mode2 = VOIDmode;
5765 enum micro_operation_type type2;
5766 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5767 rtx vloc, oloc = loc, nloc;
5769 gcc_assert (cui->sets);
5771 if (MEM_P (oloc)
5772 && !REG_P (XEXP (oloc, 0))
5773 && !MEM_P (XEXP (oloc, 0)))
5775 rtx mloc = oloc;
5776 machine_mode address_mode = get_address_mode (mloc);
5777 cselib_val *val
5778 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5779 GET_MODE (mloc));
5781 if (val && !cselib_preserved_value_p (val))
5782 preserve_value (val);
5785 type2 = use_type (loc, 0, &mode2);
5787 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5788 || type2 == MO_CLOBBER);
5790 if (type2 == MO_USE)
5791 vloc = var_lowpart (mode2, loc);
5792 else
5793 vloc = oloc;
5795 /* The loc of a MO_VAL_USE may have two forms:
5797 (concat val src): val is at src, a value-based
5798 representation.
5800 (concat (concat val use) src): same as above, with use as
5801 the MO_USE tracked value, if it differs from src.
5805 gcc_checking_assert (REG_P (loc) || MEM_P (loc));
5806 nloc = replace_expr_with_values (loc);
5807 if (!nloc)
5808 nloc = oloc;
5810 if (vloc != nloc)
5811 oloc = gen_rtx_CONCAT (mode2, val->val_rtx, vloc);
5812 else
5813 oloc = val->val_rtx;
5815 mo.u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
5817 if (type2 == MO_USE)
5818 VAL_HOLDS_TRACK_EXPR (mo.u.loc) = 1;
5819 if (!cselib_preserved_value_p (val))
5821 VAL_NEEDS_RESOLUTION (mo.u.loc) = 1;
5822 preserve_value (val);
5825 else
5826 gcc_assert (type == MO_USE || type == MO_USE_NO_VAR);
5828 if (dump_file && (dump_flags & TDF_DETAILS))
5829 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
5830 VTI (bb)->mos.safe_push (mo);
5834 /* Helper function for finding all uses of REG/MEM in X in insn INSN. */
5836 static void
5837 add_uses_1 (rtx *x, void *cui)
5839 subrtx_var_iterator::array_type array;
5840 FOR_EACH_SUBRTX_VAR (iter, array, *x, NONCONST)
5841 add_uses (*iter, (struct count_use_info *) cui);
5844 /* This is the value used during expansion of locations. We want it
5845 to be unbounded, so that variables expanded deep in a recursion
5846 nest are fully evaluated, so that their values are cached
5847 correctly. We avoid recursion cycles through other means, and we
5848 don't unshare RTL, so excess complexity is not a problem. */
5849 #define EXPR_DEPTH (INT_MAX)
5850 /* We use this to keep too-complex expressions from being emitted as
5851 location notes, and then to debug information. Users can trade
5852 compile time for ridiculously complex expressions, although they're
5853 seldom useful, and they may often have to be discarded as not
5854 representable anyway. */
5855 #define EXPR_USE_DEPTH (param_max_vartrack_expr_depth)
5857 /* Attempt to reverse the EXPR operation in the debug info and record
5858 it in the cselib table. Say for reg1 = reg2 + 6 even when reg2 is
5859 no longer live we can express its value as VAL - 6. */
5861 static void
5862 reverse_op (rtx val, const_rtx expr, rtx_insn *insn)
5864 rtx src, arg, ret;
5865 cselib_val *v;
5866 struct elt_loc_list *l;
5867 enum rtx_code code;
5868 int count;
5870 if (GET_CODE (expr) != SET)
5871 return;
5873 if (!REG_P (SET_DEST (expr)) || GET_MODE (val) != GET_MODE (SET_DEST (expr)))
5874 return;
5876 src = SET_SRC (expr);
5877 switch (GET_CODE (src))
5879 case PLUS:
5880 case MINUS:
5881 case XOR:
5882 case NOT:
5883 case NEG:
5884 if (!REG_P (XEXP (src, 0)))
5885 return;
5886 break;
5887 case SIGN_EXTEND:
5888 case ZERO_EXTEND:
5889 if (!REG_P (XEXP (src, 0)) && !MEM_P (XEXP (src, 0)))
5890 return;
5891 break;
5892 default:
5893 return;
5896 if (!SCALAR_INT_MODE_P (GET_MODE (src)) || XEXP (src, 0) == cfa_base_rtx)
5897 return;
5899 v = cselib_lookup (XEXP (src, 0), GET_MODE (XEXP (src, 0)), 0, VOIDmode);
5900 if (!v || !cselib_preserved_value_p (v))
5901 return;
5903 /* Use canonical V to avoid creating multiple redundant expressions
5904 for different VALUES equivalent to V. */
5905 v = canonical_cselib_val (v);
5907 /* Adding a reverse op isn't useful if V already has an always valid
5908 location. Ignore ENTRY_VALUE, while it is always constant, we should
5909 prefer non-ENTRY_VALUE locations whenever possible. */
5910 for (l = v->locs, count = 0; l; l = l->next, count++)
5911 if (CONSTANT_P (l->loc)
5912 && (GET_CODE (l->loc) != CONST || !references_value_p (l->loc, 0)))
5913 return;
5914 /* Avoid creating too large locs lists. */
5915 else if (count == param_max_vartrack_reverse_op_size)
5916 return;
5918 switch (GET_CODE (src))
5920 case NOT:
5921 case NEG:
5922 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5923 return;
5924 ret = gen_rtx_fmt_e (GET_CODE (src), GET_MODE (val), val);
5925 break;
5926 case SIGN_EXTEND:
5927 case ZERO_EXTEND:
5928 ret = gen_lowpart_SUBREG (GET_MODE (v->val_rtx), val);
5929 break;
5930 case XOR:
5931 code = XOR;
5932 goto binary;
5933 case PLUS:
5934 code = MINUS;
5935 goto binary;
5936 case MINUS:
5937 code = PLUS;
5938 goto binary;
5939 binary:
5940 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5941 return;
5942 arg = XEXP (src, 1);
5943 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5945 arg = cselib_expand_value_rtx (arg, scratch_regs, 5);
5946 if (arg == NULL_RTX)
5947 return;
5948 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5949 return;
5951 ret = simplify_gen_binary (code, GET_MODE (val), val, arg);
5952 break;
5953 default:
5954 gcc_unreachable ();
5957 cselib_add_permanent_equiv (v, ret, insn);
5960 /* Add stores (register and memory references) LOC which will be tracked
5961 to VTI (bb)->mos. EXPR is the RTL expression containing the store.
5962 CUIP->insn is instruction which the LOC is part of. */
5964 static void
5965 add_stores (rtx loc, const_rtx expr, void *cuip)
5967 machine_mode mode = VOIDmode, mode2;
5968 struct count_use_info *cui = (struct count_use_info *)cuip;
5969 basic_block bb = cui->bb;
5970 micro_operation mo;
5971 rtx oloc = loc, nloc, src = NULL;
5972 enum micro_operation_type type = use_type (loc, cui, &mode);
5973 bool track_p = false;
5974 cselib_val *v;
5975 bool resolve, preserve;
5977 if (type == MO_CLOBBER)
5978 return;
5980 mode2 = mode;
5982 if (REG_P (loc))
5984 gcc_assert (loc != cfa_base_rtx);
5985 if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
5986 || !(track_p = use_type (loc, NULL, &mode2) == MO_USE)
5987 || GET_CODE (expr) == CLOBBER)
5989 mo.type = MO_CLOBBER;
5990 mo.u.loc = loc;
5991 if (GET_CODE (expr) == SET
5992 && (SET_DEST (expr) == loc
5993 || (GET_CODE (SET_DEST (expr)) == STRICT_LOW_PART
5994 && XEXP (SET_DEST (expr), 0) == loc))
5995 && !unsuitable_loc (SET_SRC (expr))
5996 && find_use_val (loc, mode, cui))
5998 gcc_checking_assert (type == MO_VAL_SET);
5999 mo.u.loc = gen_rtx_SET (loc, SET_SRC (expr));
6002 else
6004 if (GET_CODE (expr) == SET
6005 && SET_DEST (expr) == loc
6006 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
6007 src = var_lowpart (mode2, SET_SRC (expr));
6008 loc = var_lowpart (mode2, loc);
6010 if (src == NULL)
6012 mo.type = MO_SET;
6013 mo.u.loc = loc;
6015 else
6017 rtx xexpr = gen_rtx_SET (loc, src);
6018 if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
6020 /* If this is an instruction copying (part of) a parameter
6021 passed by invisible reference to its register location,
6022 pretend it's a SET so that the initial memory location
6023 is discarded, as the parameter register can be reused
6024 for other purposes and we do not track locations based
6025 on generic registers. */
6026 if (MEM_P (src)
6027 && REG_EXPR (loc)
6028 && TREE_CODE (REG_EXPR (loc)) == PARM_DECL
6029 && DECL_MODE (REG_EXPR (loc)) != BLKmode
6030 && MEM_P (DECL_INCOMING_RTL (REG_EXPR (loc)))
6031 && XEXP (DECL_INCOMING_RTL (REG_EXPR (loc)), 0)
6032 != arg_pointer_rtx)
6033 mo.type = MO_SET;
6034 else
6035 mo.type = MO_COPY;
6037 else
6038 mo.type = MO_SET;
6039 mo.u.loc = xexpr;
6042 mo.insn = cui->insn;
6044 else if (MEM_P (loc)
6045 && ((track_p = use_type (loc, NULL, &mode2) == MO_USE)
6046 || cui->sets))
6048 if (MEM_P (loc) && type == MO_VAL_SET
6049 && !REG_P (XEXP (loc, 0))
6050 && !MEM_P (XEXP (loc, 0)))
6052 rtx mloc = loc;
6053 machine_mode address_mode = get_address_mode (mloc);
6054 cselib_val *val = cselib_lookup (XEXP (mloc, 0),
6055 address_mode, 0,
6056 GET_MODE (mloc));
6058 if (val && !cselib_preserved_value_p (val))
6059 preserve_value (val);
6062 if (GET_CODE (expr) == CLOBBER || !track_p)
6064 mo.type = MO_CLOBBER;
6065 mo.u.loc = track_p ? var_lowpart (mode2, loc) : loc;
6067 else
6069 if (GET_CODE (expr) == SET
6070 && SET_DEST (expr) == loc
6071 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
6072 src = var_lowpart (mode2, SET_SRC (expr));
6073 loc = var_lowpart (mode2, loc);
6075 if (src == NULL)
6077 mo.type = MO_SET;
6078 mo.u.loc = loc;
6080 else
6082 rtx xexpr = gen_rtx_SET (loc, src);
6083 if (same_variable_part_p (SET_SRC (xexpr),
6084 MEM_EXPR (loc),
6085 int_mem_offset (loc)))
6086 mo.type = MO_COPY;
6087 else
6088 mo.type = MO_SET;
6089 mo.u.loc = xexpr;
6092 mo.insn = cui->insn;
6094 else
6095 return;
6097 if (type != MO_VAL_SET)
6098 goto log_and_return;
6100 v = find_use_val (oloc, mode, cui);
6102 if (!v)
6103 goto log_and_return;
6105 resolve = preserve = !cselib_preserved_value_p (v);
6107 /* We cannot track values for multiple-part variables, so we track only
6108 locations for tracked record parameters. */
6109 if (track_p
6110 && REG_P (loc)
6111 && REG_EXPR (loc)
6112 && tracked_record_parameter_p (REG_EXPR (loc)))
6114 /* Although we don't use the value here, it could be used later by the
6115 mere virtue of its existence as the operand of the reverse operation
6116 that gave rise to it (typically extension/truncation). Make sure it
6117 is preserved as required by vt_expand_var_loc_chain. */
6118 if (preserve)
6119 preserve_value (v);
6120 goto log_and_return;
6123 if (loc == stack_pointer_rtx
6124 && (maybe_ne (hard_frame_pointer_adjustment, -1)
6125 || (!frame_pointer_needed && !ACCUMULATE_OUTGOING_ARGS))
6126 && preserve)
6127 cselib_set_value_sp_based (v);
6129 /* Don't record MO_VAL_SET for VALUEs that can be described using
6130 cfa_base_rtx or cfa_base_rtx + CONST_INT, cselib already knows
6131 all the needed equivalences and they shouldn't change depending
6132 on which register holds that VALUE in some instruction. */
6133 if (!frame_pointer_needed
6134 && cfa_base_rtx
6135 && cselib_sp_derived_value_p (v))
6137 if (preserve)
6138 preserve_value (v);
6139 return;
6142 nloc = replace_expr_with_values (oloc);
6143 if (nloc)
6144 oloc = nloc;
6146 if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC)
6148 cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0, VOIDmode);
6150 if (oval == v)
6151 return;
6152 gcc_assert (REG_P (oloc) || MEM_P (oloc));
6154 if (oval && !cselib_preserved_value_p (oval))
6156 micro_operation moa;
6158 preserve_value (oval);
6160 moa.type = MO_VAL_USE;
6161 moa.u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc);
6162 VAL_NEEDS_RESOLUTION (moa.u.loc) = 1;
6163 moa.insn = cui->insn;
6165 if (dump_file && (dump_flags & TDF_DETAILS))
6166 log_op_type (moa.u.loc, cui->bb, cui->insn,
6167 moa.type, dump_file);
6168 VTI (bb)->mos.safe_push (moa);
6171 resolve = false;
6173 else if (resolve && GET_CODE (mo.u.loc) == SET)
6175 if (REG_P (SET_SRC (expr)) || MEM_P (SET_SRC (expr)))
6176 nloc = replace_expr_with_values (SET_SRC (expr));
6177 else
6178 nloc = NULL_RTX;
6180 /* Avoid the mode mismatch between oexpr and expr. */
6181 if (!nloc && mode != mode2)
6183 nloc = SET_SRC (expr);
6184 gcc_assert (oloc == SET_DEST (expr));
6187 if (nloc && nloc != SET_SRC (mo.u.loc))
6188 oloc = gen_rtx_SET (oloc, nloc);
6189 else
6191 if (oloc == SET_DEST (mo.u.loc))
6192 /* No point in duplicating. */
6193 oloc = mo.u.loc;
6194 if (!REG_P (SET_SRC (mo.u.loc)))
6195 resolve = false;
6198 else if (!resolve)
6200 if (GET_CODE (mo.u.loc) == SET
6201 && oloc == SET_DEST (mo.u.loc))
6202 /* No point in duplicating. */
6203 oloc = mo.u.loc;
6205 else
6206 resolve = false;
6208 loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc);
6210 if (mo.u.loc != oloc)
6211 loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, mo.u.loc);
6213 /* The loc of a MO_VAL_SET may have various forms:
6215 (concat val dst): dst now holds val
6217 (concat val (set dst src)): dst now holds val, copied from src
6219 (concat (concat val dstv) dst): dst now holds val; dstv is dst
6220 after replacing mems and non-top-level regs with values.
6222 (concat (concat val dstv) (set dst src)): dst now holds val,
6223 copied from src. dstv is a value-based representation of dst, if
6224 it differs from dst. If resolution is needed, src is a REG, and
6225 its mode is the same as that of val.
6227 (concat (concat val (set dstv srcv)) (set dst src)): src
6228 copied to dst, holding val. dstv and srcv are value-based
6229 representations of dst and src, respectively.
6233 if (GET_CODE (PATTERN (cui->insn)) != COND_EXEC)
6234 reverse_op (v->val_rtx, expr, cui->insn);
6236 mo.u.loc = loc;
6238 if (track_p)
6239 VAL_HOLDS_TRACK_EXPR (loc) = 1;
6240 if (preserve)
6242 VAL_NEEDS_RESOLUTION (loc) = resolve;
6243 preserve_value (v);
6245 if (mo.type == MO_CLOBBER)
6246 VAL_EXPR_IS_CLOBBERED (loc) = 1;
6247 if (mo.type == MO_COPY)
6248 VAL_EXPR_IS_COPIED (loc) = 1;
6250 mo.type = MO_VAL_SET;
6252 log_and_return:
6253 if (dump_file && (dump_flags & TDF_DETAILS))
6254 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
6255 VTI (bb)->mos.safe_push (mo);
6258 /* Arguments to the call. */
6259 static rtx call_arguments;
6261 /* Compute call_arguments. */
6263 static void
6264 prepare_call_arguments (basic_block bb, rtx_insn *insn)
6266 rtx link, x, call;
6267 rtx prev, cur, next;
6268 rtx this_arg = NULL_RTX;
6269 tree type = NULL_TREE, t, fndecl = NULL_TREE;
6270 tree obj_type_ref = NULL_TREE;
6271 CUMULATIVE_ARGS args_so_far_v;
6272 cumulative_args_t args_so_far;
6274 memset (&args_so_far_v, 0, sizeof (args_so_far_v));
6275 args_so_far = pack_cumulative_args (&args_so_far_v);
6276 call = get_call_rtx_from (insn);
6277 if (call)
6279 if (GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
6281 rtx symbol = XEXP (XEXP (call, 0), 0);
6282 if (SYMBOL_REF_DECL (symbol))
6283 fndecl = SYMBOL_REF_DECL (symbol);
6285 if (fndecl == NULL_TREE)
6286 fndecl = MEM_EXPR (XEXP (call, 0));
6287 if (fndecl
6288 && TREE_CODE (TREE_TYPE (fndecl)) != FUNCTION_TYPE
6289 && TREE_CODE (TREE_TYPE (fndecl)) != METHOD_TYPE)
6290 fndecl = NULL_TREE;
6291 if (fndecl && TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
6292 type = TREE_TYPE (fndecl);
6293 if (fndecl && TREE_CODE (fndecl) != FUNCTION_DECL)
6295 if (TREE_CODE (fndecl) == INDIRECT_REF
6296 && TREE_CODE (TREE_OPERAND (fndecl, 0)) == OBJ_TYPE_REF)
6297 obj_type_ref = TREE_OPERAND (fndecl, 0);
6298 fndecl = NULL_TREE;
6300 if (type)
6302 for (t = TYPE_ARG_TYPES (type); t && t != void_list_node;
6303 t = TREE_CHAIN (t))
6304 if (TREE_CODE (TREE_VALUE (t)) == REFERENCE_TYPE
6305 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_VALUE (t))))
6306 break;
6307 if ((t == NULL || t == void_list_node) && obj_type_ref == NULL_TREE)
6308 type = NULL;
6309 else
6311 int nargs ATTRIBUTE_UNUSED = list_length (TYPE_ARG_TYPES (type));
6312 link = CALL_INSN_FUNCTION_USAGE (insn);
6313 #ifndef PCC_STATIC_STRUCT_RETURN
6314 if (aggregate_value_p (TREE_TYPE (type), type)
6315 && targetm.calls.struct_value_rtx (type, 0) == 0)
6317 tree struct_addr = build_pointer_type (TREE_TYPE (type));
6318 function_arg_info arg (struct_addr, /*named=*/true);
6319 rtx reg;
6320 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
6321 nargs + 1);
6322 reg = targetm.calls.function_arg (args_so_far, arg);
6323 targetm.calls.function_arg_advance (args_so_far, arg);
6324 if (reg == NULL_RTX)
6326 for (; link; link = XEXP (link, 1))
6327 if (GET_CODE (XEXP (link, 0)) == USE
6328 && MEM_P (XEXP (XEXP (link, 0), 0)))
6330 link = XEXP (link, 1);
6331 break;
6335 else
6336 #endif
6337 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
6338 nargs);
6339 if (obj_type_ref && TYPE_ARG_TYPES (type) != void_list_node)
6341 t = TYPE_ARG_TYPES (type);
6342 function_arg_info arg (TREE_VALUE (t), /*named=*/true);
6343 this_arg = targetm.calls.function_arg (args_so_far, arg);
6344 if (this_arg && !REG_P (this_arg))
6345 this_arg = NULL_RTX;
6346 else if (this_arg == NULL_RTX)
6348 for (; link; link = XEXP (link, 1))
6349 if (GET_CODE (XEXP (link, 0)) == USE
6350 && MEM_P (XEXP (XEXP (link, 0), 0)))
6352 this_arg = XEXP (XEXP (link, 0), 0);
6353 break;
6360 t = type ? TYPE_ARG_TYPES (type) : NULL_TREE;
6362 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
6363 if (GET_CODE (XEXP (link, 0)) == USE)
6365 rtx item = NULL_RTX;
6366 x = XEXP (XEXP (link, 0), 0);
6367 if (GET_MODE (link) == VOIDmode
6368 || GET_MODE (link) == BLKmode
6369 || (GET_MODE (link) != GET_MODE (x)
6370 && ((GET_MODE_CLASS (GET_MODE (link)) != MODE_INT
6371 && GET_MODE_CLASS (GET_MODE (link)) != MODE_PARTIAL_INT)
6372 || (GET_MODE_CLASS (GET_MODE (x)) != MODE_INT
6373 && GET_MODE_CLASS (GET_MODE (x)) != MODE_PARTIAL_INT))))
6374 /* Can't do anything for these, if the original type mode
6375 isn't known or can't be converted. */;
6376 else if (REG_P (x))
6378 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
6379 scalar_int_mode mode;
6380 if (val && cselib_preserved_value_p (val))
6381 item = val->val_rtx;
6382 else if (is_a <scalar_int_mode> (GET_MODE (x), &mode))
6384 opt_scalar_int_mode mode_iter;
6385 FOR_EACH_WIDER_MODE (mode_iter, mode)
6387 mode = mode_iter.require ();
6388 if (GET_MODE_BITSIZE (mode) > BITS_PER_WORD)
6389 break;
6391 rtx reg = simplify_subreg (mode, x, GET_MODE (x), 0);
6392 if (reg == NULL_RTX || !REG_P (reg))
6393 continue;
6394 val = cselib_lookup (reg, mode, 0, VOIDmode);
6395 if (val && cselib_preserved_value_p (val))
6397 item = val->val_rtx;
6398 break;
6403 else if (MEM_P (x))
6405 rtx mem = x;
6406 cselib_val *val;
6408 if (!frame_pointer_needed)
6410 class adjust_mem_data amd;
6411 amd.mem_mode = VOIDmode;
6412 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
6413 amd.store = true;
6414 mem = simplify_replace_fn_rtx (mem, NULL_RTX, adjust_mems,
6415 &amd);
6416 gcc_assert (amd.side_effects.is_empty ());
6418 val = cselib_lookup (mem, GET_MODE (mem), 0, VOIDmode);
6419 if (val && cselib_preserved_value_p (val))
6420 item = val->val_rtx;
6421 else if (GET_MODE_CLASS (GET_MODE (mem)) != MODE_INT
6422 && GET_MODE_CLASS (GET_MODE (mem)) != MODE_PARTIAL_INT)
6424 /* For non-integer stack argument see also if they weren't
6425 initialized by integers. */
6426 scalar_int_mode imode;
6427 if (int_mode_for_mode (GET_MODE (mem)).exists (&imode)
6428 && imode != GET_MODE (mem))
6430 val = cselib_lookup (adjust_address_nv (mem, imode, 0),
6431 imode, 0, VOIDmode);
6432 if (val && cselib_preserved_value_p (val))
6433 item = lowpart_subreg (GET_MODE (x), val->val_rtx,
6434 imode);
6438 if (item)
6440 rtx x2 = x;
6441 if (GET_MODE (item) != GET_MODE (link))
6442 item = lowpart_subreg (GET_MODE (link), item, GET_MODE (item));
6443 if (GET_MODE (x2) != GET_MODE (link))
6444 x2 = lowpart_subreg (GET_MODE (link), x2, GET_MODE (x2));
6445 item = gen_rtx_CONCAT (GET_MODE (link), x2, item);
6446 call_arguments
6447 = gen_rtx_EXPR_LIST (VOIDmode, item, call_arguments);
6449 if (t && t != void_list_node)
6451 rtx reg;
6452 function_arg_info arg (TREE_VALUE (t), /*named=*/true);
6453 apply_pass_by_reference_rules (&args_so_far_v, arg);
6454 reg = targetm.calls.function_arg (args_so_far, arg);
6455 if (TREE_CODE (arg.type) == REFERENCE_TYPE
6456 && INTEGRAL_TYPE_P (TREE_TYPE (arg.type))
6457 && reg
6458 && REG_P (reg)
6459 && GET_MODE (reg) == arg.mode
6460 && (GET_MODE_CLASS (arg.mode) == MODE_INT
6461 || GET_MODE_CLASS (arg.mode) == MODE_PARTIAL_INT)
6462 && REG_P (x)
6463 && REGNO (x) == REGNO (reg)
6464 && GET_MODE (x) == arg.mode
6465 && item)
6467 machine_mode indmode
6468 = TYPE_MODE (TREE_TYPE (arg.type));
6469 rtx mem = gen_rtx_MEM (indmode, x);
6470 cselib_val *val = cselib_lookup (mem, indmode, 0, VOIDmode);
6471 if (val && cselib_preserved_value_p (val))
6473 item = gen_rtx_CONCAT (indmode, mem, val->val_rtx);
6474 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
6475 call_arguments);
6477 else
6479 struct elt_loc_list *l;
6480 tree initial;
6482 /* Try harder, when passing address of a constant
6483 pool integer it can be easily read back. */
6484 item = XEXP (item, 1);
6485 if (GET_CODE (item) == SUBREG)
6486 item = SUBREG_REG (item);
6487 gcc_assert (GET_CODE (item) == VALUE);
6488 val = CSELIB_VAL_PTR (item);
6489 for (l = val->locs; l; l = l->next)
6490 if (GET_CODE (l->loc) == SYMBOL_REF
6491 && TREE_CONSTANT_POOL_ADDRESS_P (l->loc)
6492 && SYMBOL_REF_DECL (l->loc)
6493 && DECL_INITIAL (SYMBOL_REF_DECL (l->loc)))
6495 initial = DECL_INITIAL (SYMBOL_REF_DECL (l->loc));
6496 if (tree_fits_shwi_p (initial))
6498 item = GEN_INT (tree_to_shwi (initial));
6499 item = gen_rtx_CONCAT (indmode, mem, item);
6500 call_arguments
6501 = gen_rtx_EXPR_LIST (VOIDmode, item,
6502 call_arguments);
6504 break;
6508 targetm.calls.function_arg_advance (args_so_far, arg);
6509 t = TREE_CHAIN (t);
6513 /* Add debug arguments. */
6514 if (fndecl
6515 && TREE_CODE (fndecl) == FUNCTION_DECL
6516 && DECL_HAS_DEBUG_ARGS_P (fndecl))
6518 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (fndecl);
6519 if (debug_args)
6521 unsigned int ix;
6522 tree param;
6523 for (ix = 0; vec_safe_iterate (*debug_args, ix, &param); ix += 2)
6525 rtx item;
6526 tree dtemp = (**debug_args)[ix + 1];
6527 machine_mode mode = DECL_MODE (dtemp);
6528 item = gen_rtx_DEBUG_PARAMETER_REF (mode, param);
6529 item = gen_rtx_CONCAT (mode, item, DECL_RTL_KNOWN_SET (dtemp));
6530 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
6531 call_arguments);
6536 /* Reverse call_arguments chain. */
6537 prev = NULL_RTX;
6538 for (cur = call_arguments; cur; cur = next)
6540 next = XEXP (cur, 1);
6541 XEXP (cur, 1) = prev;
6542 prev = cur;
6544 call_arguments = prev;
6546 x = get_call_rtx_from (insn);
6547 if (x)
6549 x = XEXP (XEXP (x, 0), 0);
6550 if (GET_CODE (x) == SYMBOL_REF)
6551 /* Don't record anything. */;
6552 else if (CONSTANT_P (x))
6554 x = gen_rtx_CONCAT (GET_MODE (x) == VOIDmode ? Pmode : GET_MODE (x),
6555 pc_rtx, x);
6556 call_arguments
6557 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6559 else
6561 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
6562 if (val && cselib_preserved_value_p (val))
6564 x = gen_rtx_CONCAT (GET_MODE (x), pc_rtx, val->val_rtx);
6565 call_arguments
6566 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6570 if (this_arg)
6572 machine_mode mode
6573 = TYPE_MODE (TREE_TYPE (OBJ_TYPE_REF_EXPR (obj_type_ref)));
6574 rtx clobbered = gen_rtx_MEM (mode, this_arg);
6575 HOST_WIDE_INT token
6576 = tree_to_shwi (OBJ_TYPE_REF_TOKEN (obj_type_ref));
6577 if (token)
6578 clobbered = plus_constant (mode, clobbered,
6579 token * GET_MODE_SIZE (mode));
6580 clobbered = gen_rtx_MEM (mode, clobbered);
6581 x = gen_rtx_CONCAT (mode, gen_rtx_CLOBBER (VOIDmode, pc_rtx), clobbered);
6582 call_arguments
6583 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6587 /* Callback for cselib_record_sets_hook, that records as micro
6588 operations uses and stores in an insn after cselib_record_sets has
6589 analyzed the sets in an insn, but before it modifies the stored
6590 values in the internal tables, unless cselib_record_sets doesn't
6591 call it directly (perhaps because we're not doing cselib in the
6592 first place, in which case sets and n_sets will be 0). */
6594 static void
6595 add_with_sets (rtx_insn *insn, struct cselib_set *sets, int n_sets)
6597 basic_block bb = BLOCK_FOR_INSN (insn);
6598 int n1, n2;
6599 struct count_use_info cui;
6600 micro_operation *mos;
6602 cselib_hook_called = true;
6604 cui.insn = insn;
6605 cui.bb = bb;
6606 cui.sets = sets;
6607 cui.n_sets = n_sets;
6609 n1 = VTI (bb)->mos.length ();
6610 cui.store_p = false;
6611 note_uses (&PATTERN (insn), add_uses_1, &cui);
6612 n2 = VTI (bb)->mos.length () - 1;
6613 mos = VTI (bb)->mos.address ();
6615 /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
6616 MO_VAL_LOC last. */
6617 while (n1 < n2)
6619 while (n1 < n2 && mos[n1].type == MO_USE)
6620 n1++;
6621 while (n1 < n2 && mos[n2].type != MO_USE)
6622 n2--;
6623 if (n1 < n2)
6624 std::swap (mos[n1], mos[n2]);
6627 n2 = VTI (bb)->mos.length () - 1;
6628 while (n1 < n2)
6630 while (n1 < n2 && mos[n1].type != MO_VAL_LOC)
6631 n1++;
6632 while (n1 < n2 && mos[n2].type == MO_VAL_LOC)
6633 n2--;
6634 if (n1 < n2)
6635 std::swap (mos[n1], mos[n2]);
6638 if (CALL_P (insn))
6640 micro_operation mo;
6642 mo.type = MO_CALL;
6643 mo.insn = insn;
6644 mo.u.loc = call_arguments;
6645 call_arguments = NULL_RTX;
6647 if (dump_file && (dump_flags & TDF_DETAILS))
6648 log_op_type (PATTERN (insn), bb, insn, mo.type, dump_file);
6649 VTI (bb)->mos.safe_push (mo);
6652 n1 = VTI (bb)->mos.length ();
6653 /* This will record NEXT_INSN (insn), such that we can
6654 insert notes before it without worrying about any
6655 notes that MO_USEs might emit after the insn. */
6656 cui.store_p = true;
6657 note_stores (insn, add_stores, &cui);
6658 n2 = VTI (bb)->mos.length () - 1;
6659 mos = VTI (bb)->mos.address ();
6661 /* Order the MO_VAL_USEs first (note_stores does nothing
6662 on DEBUG_INSNs, so there are no MO_VAL_LOCs from this
6663 insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET. */
6664 while (n1 < n2)
6666 while (n1 < n2 && mos[n1].type == MO_VAL_USE)
6667 n1++;
6668 while (n1 < n2 && mos[n2].type != MO_VAL_USE)
6669 n2--;
6670 if (n1 < n2)
6671 std::swap (mos[n1], mos[n2]);
6674 n2 = VTI (bb)->mos.length () - 1;
6675 while (n1 < n2)
6677 while (n1 < n2 && mos[n1].type == MO_CLOBBER)
6678 n1++;
6679 while (n1 < n2 && mos[n2].type != MO_CLOBBER)
6680 n2--;
6681 if (n1 < n2)
6682 std::swap (mos[n1], mos[n2]);
6686 static enum var_init_status
6687 find_src_status (dataflow_set *in, rtx src)
6689 tree decl = NULL_TREE;
6690 enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
6692 if (! flag_var_tracking_uninit)
6693 status = VAR_INIT_STATUS_INITIALIZED;
6695 if (src && REG_P (src))
6696 decl = var_debug_decl (REG_EXPR (src));
6697 else if (src && MEM_P (src))
6698 decl = var_debug_decl (MEM_EXPR (src));
6700 if (src && decl)
6701 status = get_init_value (in, src, dv_from_decl (decl));
6703 return status;
6706 /* SRC is the source of an assignment. Use SET to try to find what
6707 was ultimately assigned to SRC. Return that value if known,
6708 otherwise return SRC itself. */
6710 static rtx
6711 find_src_set_src (dataflow_set *set, rtx src)
6713 tree decl = NULL_TREE; /* The variable being copied around. */
6714 rtx set_src = NULL_RTX; /* The value for "decl" stored in "src". */
6715 variable *var;
6716 location_chain *nextp;
6717 int i;
6718 bool found;
6720 if (src && REG_P (src))
6721 decl = var_debug_decl (REG_EXPR (src));
6722 else if (src && MEM_P (src))
6723 decl = var_debug_decl (MEM_EXPR (src));
6725 if (src && decl)
6727 decl_or_value dv = dv_from_decl (decl);
6729 var = shared_hash_find (set->vars, dv);
6730 if (var)
6732 found = false;
6733 for (i = 0; i < var->n_var_parts && !found; i++)
6734 for (nextp = var->var_part[i].loc_chain; nextp && !found;
6735 nextp = nextp->next)
6736 if (rtx_equal_p (nextp->loc, src))
6738 set_src = nextp->set_src;
6739 found = true;
6745 return set_src;
6748 /* Compute the changes of variable locations in the basic block BB. */
6750 static bool
6751 compute_bb_dataflow (basic_block bb)
6753 unsigned int i;
6754 micro_operation *mo;
6755 bool changed;
6756 dataflow_set old_out;
6757 dataflow_set *in = &VTI (bb)->in;
6758 dataflow_set *out = &VTI (bb)->out;
6760 dataflow_set_init (&old_out);
6761 dataflow_set_copy (&old_out, out);
6762 dataflow_set_copy (out, in);
6764 if (MAY_HAVE_DEBUG_BIND_INSNS)
6765 local_get_addr_cache = new hash_map<rtx, rtx>;
6767 FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo)
6769 rtx_insn *insn = mo->insn;
6771 switch (mo->type)
6773 case MO_CALL:
6774 dataflow_set_clear_at_call (out, insn);
6775 break;
6777 case MO_USE:
6779 rtx loc = mo->u.loc;
6781 if (REG_P (loc))
6782 var_reg_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6783 else if (MEM_P (loc))
6784 var_mem_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6786 break;
6788 case MO_VAL_LOC:
6790 rtx loc = mo->u.loc;
6791 rtx val, vloc;
6792 tree var;
6794 if (GET_CODE (loc) == CONCAT)
6796 val = XEXP (loc, 0);
6797 vloc = XEXP (loc, 1);
6799 else
6801 val = NULL_RTX;
6802 vloc = loc;
6805 var = PAT_VAR_LOCATION_DECL (vloc);
6807 clobber_variable_part (out, NULL_RTX,
6808 dv_from_decl (var), 0, NULL_RTX);
6809 if (val)
6811 if (VAL_NEEDS_RESOLUTION (loc))
6812 val_resolve (out, val, PAT_VAR_LOCATION_LOC (vloc), insn);
6813 set_variable_part (out, val, dv_from_decl (var), 0,
6814 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6815 INSERT);
6817 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
6818 set_variable_part (out, PAT_VAR_LOCATION_LOC (vloc),
6819 dv_from_decl (var), 0,
6820 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6821 INSERT);
6823 break;
6825 case MO_VAL_USE:
6827 rtx loc = mo->u.loc;
6828 rtx val, vloc, uloc;
6830 vloc = uloc = XEXP (loc, 1);
6831 val = XEXP (loc, 0);
6833 if (GET_CODE (val) == CONCAT)
6835 uloc = XEXP (val, 1);
6836 val = XEXP (val, 0);
6839 if (VAL_NEEDS_RESOLUTION (loc))
6840 val_resolve (out, val, vloc, insn);
6841 else
6842 val_store (out, val, uloc, insn, false);
6844 if (VAL_HOLDS_TRACK_EXPR (loc))
6846 if (GET_CODE (uloc) == REG)
6847 var_reg_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6848 NULL);
6849 else if (GET_CODE (uloc) == MEM)
6850 var_mem_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6851 NULL);
6854 break;
6856 case MO_VAL_SET:
6858 rtx loc = mo->u.loc;
6859 rtx val, vloc, uloc;
6860 rtx dstv, srcv;
6862 vloc = loc;
6863 uloc = XEXP (vloc, 1);
6864 val = XEXP (vloc, 0);
6865 vloc = uloc;
6867 if (GET_CODE (uloc) == SET)
6869 dstv = SET_DEST (uloc);
6870 srcv = SET_SRC (uloc);
6872 else
6874 dstv = uloc;
6875 srcv = NULL;
6878 if (GET_CODE (val) == CONCAT)
6880 dstv = vloc = XEXP (val, 1);
6881 val = XEXP (val, 0);
6884 if (GET_CODE (vloc) == SET)
6886 srcv = SET_SRC (vloc);
6888 gcc_assert (val != srcv);
6889 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
6891 dstv = vloc = SET_DEST (vloc);
6893 if (VAL_NEEDS_RESOLUTION (loc))
6894 val_resolve (out, val, srcv, insn);
6896 else if (VAL_NEEDS_RESOLUTION (loc))
6898 gcc_assert (GET_CODE (uloc) == SET
6899 && GET_CODE (SET_SRC (uloc)) == REG);
6900 val_resolve (out, val, SET_SRC (uloc), insn);
6903 if (VAL_HOLDS_TRACK_EXPR (loc))
6905 if (VAL_EXPR_IS_CLOBBERED (loc))
6907 if (REG_P (uloc))
6908 var_reg_delete (out, uloc, true);
6909 else if (MEM_P (uloc))
6911 gcc_assert (MEM_P (dstv));
6912 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
6913 var_mem_delete (out, dstv, true);
6916 else
6918 bool copied_p = VAL_EXPR_IS_COPIED (loc);
6919 rtx src = NULL, dst = uloc;
6920 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
6922 if (GET_CODE (uloc) == SET)
6924 src = SET_SRC (uloc);
6925 dst = SET_DEST (uloc);
6928 if (copied_p)
6930 if (flag_var_tracking_uninit)
6932 status = find_src_status (in, src);
6934 if (status == VAR_INIT_STATUS_UNKNOWN)
6935 status = find_src_status (out, src);
6938 src = find_src_set_src (in, src);
6941 if (REG_P (dst))
6942 var_reg_delete_and_set (out, dst, !copied_p,
6943 status, srcv);
6944 else if (MEM_P (dst))
6946 gcc_assert (MEM_P (dstv));
6947 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
6948 var_mem_delete_and_set (out, dstv, !copied_p,
6949 status, srcv);
6953 else if (REG_P (uloc))
6954 var_regno_delete (out, REGNO (uloc));
6955 else if (MEM_P (uloc))
6957 gcc_checking_assert (GET_CODE (vloc) == MEM);
6958 gcc_checking_assert (dstv == vloc);
6959 if (dstv != vloc)
6960 clobber_overlapping_mems (out, vloc);
6963 val_store (out, val, dstv, insn, true);
6965 break;
6967 case MO_SET:
6969 rtx loc = mo->u.loc;
6970 rtx set_src = NULL;
6972 if (GET_CODE (loc) == SET)
6974 set_src = SET_SRC (loc);
6975 loc = SET_DEST (loc);
6978 if (REG_P (loc))
6979 var_reg_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6980 set_src);
6981 else if (MEM_P (loc))
6982 var_mem_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6983 set_src);
6985 break;
6987 case MO_COPY:
6989 rtx loc = mo->u.loc;
6990 enum var_init_status src_status;
6991 rtx set_src = NULL;
6993 if (GET_CODE (loc) == SET)
6995 set_src = SET_SRC (loc);
6996 loc = SET_DEST (loc);
6999 if (! flag_var_tracking_uninit)
7000 src_status = VAR_INIT_STATUS_INITIALIZED;
7001 else
7003 src_status = find_src_status (in, set_src);
7005 if (src_status == VAR_INIT_STATUS_UNKNOWN)
7006 src_status = find_src_status (out, set_src);
7009 set_src = find_src_set_src (in, set_src);
7011 if (REG_P (loc))
7012 var_reg_delete_and_set (out, loc, false, src_status, set_src);
7013 else if (MEM_P (loc))
7014 var_mem_delete_and_set (out, loc, false, src_status, set_src);
7016 break;
7018 case MO_USE_NO_VAR:
7020 rtx loc = mo->u.loc;
7022 if (REG_P (loc))
7023 var_reg_delete (out, loc, false);
7024 else if (MEM_P (loc))
7025 var_mem_delete (out, loc, false);
7027 break;
7029 case MO_CLOBBER:
7031 rtx loc = mo->u.loc;
7033 if (REG_P (loc))
7034 var_reg_delete (out, loc, true);
7035 else if (MEM_P (loc))
7036 var_mem_delete (out, loc, true);
7038 break;
7040 case MO_ADJUST:
7041 out->stack_adjust += mo->u.adjust;
7042 break;
7046 if (MAY_HAVE_DEBUG_BIND_INSNS)
7048 delete local_get_addr_cache;
7049 local_get_addr_cache = NULL;
7051 dataflow_set_equiv_regs (out);
7052 shared_hash_htab (out->vars)
7053 ->traverse <dataflow_set *, canonicalize_values_mark> (out);
7054 shared_hash_htab (out->vars)
7055 ->traverse <dataflow_set *, canonicalize_values_star> (out);
7056 if (flag_checking)
7057 shared_hash_htab (out->vars)
7058 ->traverse <dataflow_set *, canonicalize_loc_order_check> (out);
7060 changed = dataflow_set_different (&old_out, out);
7061 dataflow_set_destroy (&old_out);
7062 return changed;
7065 /* Find the locations of variables in the whole function. */
7067 static bool
7068 vt_find_locations (void)
7070 bb_heap_t *worklist = new bb_heap_t (LONG_MIN);
7071 bb_heap_t *pending = new bb_heap_t (LONG_MIN);
7072 sbitmap in_worklist, in_pending;
7073 basic_block bb;
7074 edge e;
7075 int *bb_order;
7076 int *rc_order;
7077 int i;
7078 int htabsz = 0;
7079 int htabmax = param_max_vartrack_size;
7080 bool success = true;
7082 timevar_push (TV_VAR_TRACKING_DATAFLOW);
7083 /* Compute reverse completion order of depth first search of the CFG
7084 so that the data-flow runs faster. */
7085 rc_order = XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
7086 bb_order = XNEWVEC (int, last_basic_block_for_fn (cfun));
7087 pre_and_rev_post_order_compute (NULL, rc_order, false);
7088 for (i = 0; i < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; i++)
7089 bb_order[rc_order[i]] = i;
7090 free (rc_order);
7092 auto_sbitmap visited (last_basic_block_for_fn (cfun));
7093 in_worklist = sbitmap_alloc (last_basic_block_for_fn (cfun));
7094 in_pending = sbitmap_alloc (last_basic_block_for_fn (cfun));
7095 bitmap_clear (in_worklist);
7097 FOR_EACH_BB_FN (bb, cfun)
7098 pending->insert (bb_order[bb->index], bb);
7099 bitmap_ones (in_pending);
7101 while (success && !pending->empty ())
7103 std::swap (worklist, pending);
7104 std::swap (in_worklist, in_pending);
7106 bitmap_clear (visited);
7108 while (!worklist->empty ())
7110 bb = worklist->extract_min ();
7111 bitmap_clear_bit (in_worklist, bb->index);
7112 gcc_assert (!bitmap_bit_p (visited, bb->index));
7113 if (!bitmap_bit_p (visited, bb->index))
7115 bool changed;
7116 edge_iterator ei;
7117 int oldinsz, oldoutsz;
7119 bitmap_set_bit (visited, bb->index);
7121 if (VTI (bb)->in.vars)
7123 htabsz
7124 -= shared_hash_htab (VTI (bb)->in.vars)->size ()
7125 + shared_hash_htab (VTI (bb)->out.vars)->size ();
7126 oldinsz = shared_hash_htab (VTI (bb)->in.vars)->elements ();
7127 oldoutsz
7128 = shared_hash_htab (VTI (bb)->out.vars)->elements ();
7130 else
7131 oldinsz = oldoutsz = 0;
7133 if (MAY_HAVE_DEBUG_BIND_INSNS)
7135 dataflow_set *in = &VTI (bb)->in, *first_out = NULL;
7136 bool first = true, adjust = false;
7138 /* Calculate the IN set as the intersection of
7139 predecessor OUT sets. */
7141 dataflow_set_clear (in);
7142 dst_can_be_shared = true;
7144 FOR_EACH_EDGE (e, ei, bb->preds)
7145 if (!VTI (e->src)->flooded)
7146 gcc_assert (bb_order[bb->index]
7147 <= bb_order[e->src->index]);
7148 else if (first)
7150 dataflow_set_copy (in, &VTI (e->src)->out);
7151 first_out = &VTI (e->src)->out;
7152 first = false;
7154 else
7156 dataflow_set_merge (in, &VTI (e->src)->out);
7157 adjust = true;
7160 if (adjust)
7162 dataflow_post_merge_adjust (in, &VTI (bb)->permp);
7164 if (flag_checking)
7165 /* Merge and merge_adjust should keep entries in
7166 canonical order. */
7167 shared_hash_htab (in->vars)
7168 ->traverse <dataflow_set *,
7169 canonicalize_loc_order_check> (in);
7171 if (dst_can_be_shared)
7173 shared_hash_destroy (in->vars);
7174 in->vars = shared_hash_copy (first_out->vars);
7178 VTI (bb)->flooded = true;
7180 else
7182 /* Calculate the IN set as union of predecessor OUT sets. */
7183 dataflow_set_clear (&VTI (bb)->in);
7184 FOR_EACH_EDGE (e, ei, bb->preds)
7185 dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out);
7188 changed = compute_bb_dataflow (bb);
7189 htabsz += shared_hash_htab (VTI (bb)->in.vars)->size ()
7190 + shared_hash_htab (VTI (bb)->out.vars)->size ();
7192 if (htabmax && htabsz > htabmax)
7194 if (MAY_HAVE_DEBUG_BIND_INSNS)
7195 inform (DECL_SOURCE_LOCATION (cfun->decl),
7196 "variable tracking size limit exceeded with "
7197 "%<-fvar-tracking-assignments%>, retrying without");
7198 else
7199 inform (DECL_SOURCE_LOCATION (cfun->decl),
7200 "variable tracking size limit exceeded");
7201 success = false;
7202 break;
7205 if (changed)
7207 FOR_EACH_EDGE (e, ei, bb->succs)
7209 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7210 continue;
7212 if (bitmap_bit_p (visited, e->dest->index))
7214 if (!bitmap_bit_p (in_pending, e->dest->index))
7216 /* Send E->DEST to next round. */
7217 bitmap_set_bit (in_pending, e->dest->index);
7218 pending->insert (bb_order[e->dest->index],
7219 e->dest);
7222 else if (!bitmap_bit_p (in_worklist, e->dest->index))
7224 /* Add E->DEST to current round. */
7225 bitmap_set_bit (in_worklist, e->dest->index);
7226 worklist->insert (bb_order[e->dest->index],
7227 e->dest);
7232 if (dump_file)
7233 fprintf (dump_file,
7234 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
7235 bb->index,
7236 (int)shared_hash_htab (VTI (bb)->in.vars)->size (),
7237 oldinsz,
7238 (int)shared_hash_htab (VTI (bb)->out.vars)->size (),
7239 oldoutsz,
7240 (int)worklist->nodes (), (int)pending->nodes (),
7241 htabsz);
7243 if (dump_file && (dump_flags & TDF_DETAILS))
7245 fprintf (dump_file, "BB %i IN:\n", bb->index);
7246 dump_dataflow_set (&VTI (bb)->in);
7247 fprintf (dump_file, "BB %i OUT:\n", bb->index);
7248 dump_dataflow_set (&VTI (bb)->out);
7254 if (success && MAY_HAVE_DEBUG_BIND_INSNS)
7255 FOR_EACH_BB_FN (bb, cfun)
7256 gcc_assert (VTI (bb)->flooded);
7258 free (bb_order);
7259 delete worklist;
7260 delete pending;
7261 sbitmap_free (in_worklist);
7262 sbitmap_free (in_pending);
7264 timevar_pop (TV_VAR_TRACKING_DATAFLOW);
7265 return success;
7268 /* Print the content of the LIST to dump file. */
7270 static void
7271 dump_attrs_list (attrs *list)
7273 for (; list; list = list->next)
7275 if (dv_is_decl_p (list->dv))
7276 print_mem_expr (dump_file, dv_as_decl (list->dv));
7277 else
7278 print_rtl_single (dump_file, dv_as_value (list->dv));
7279 fprintf (dump_file, "+" HOST_WIDE_INT_PRINT_DEC, list->offset);
7281 fprintf (dump_file, "\n");
7284 /* Print the information about variable *SLOT to dump file. */
7287 dump_var_tracking_slot (variable **slot, void *data ATTRIBUTE_UNUSED)
7289 variable *var = *slot;
7291 dump_var (var);
7293 /* Continue traversing the hash table. */
7294 return 1;
7297 /* Print the information about variable VAR to dump file. */
7299 static void
7300 dump_var (variable *var)
7302 int i;
7303 location_chain *node;
7305 if (dv_is_decl_p (var->dv))
7307 const_tree decl = dv_as_decl (var->dv);
7309 if (DECL_NAME (decl))
7311 fprintf (dump_file, " name: %s",
7312 IDENTIFIER_POINTER (DECL_NAME (decl)));
7313 if (dump_flags & TDF_UID)
7314 fprintf (dump_file, "D.%u", DECL_UID (decl));
7316 else if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
7317 fprintf (dump_file, " name: D#%u", DEBUG_TEMP_UID (decl));
7318 else
7319 fprintf (dump_file, " name: D.%u", DECL_UID (decl));
7320 fprintf (dump_file, "\n");
7322 else
7324 fputc (' ', dump_file);
7325 print_rtl_single (dump_file, dv_as_value (var->dv));
7328 for (i = 0; i < var->n_var_parts; i++)
7330 fprintf (dump_file, " offset %ld\n",
7331 (long)(var->onepart ? 0 : VAR_PART_OFFSET (var, i)));
7332 for (node = var->var_part[i].loc_chain; node; node = node->next)
7334 fprintf (dump_file, " ");
7335 if (node->init == VAR_INIT_STATUS_UNINITIALIZED)
7336 fprintf (dump_file, "[uninit]");
7337 print_rtl_single (dump_file, node->loc);
7342 /* Print the information about variables from hash table VARS to dump file. */
7344 static void
7345 dump_vars (variable_table_type *vars)
7347 if (!vars->is_empty ())
7349 fprintf (dump_file, "Variables:\n");
7350 vars->traverse <void *, dump_var_tracking_slot> (NULL);
7354 /* Print the dataflow set SET to dump file. */
7356 static void
7357 dump_dataflow_set (dataflow_set *set)
7359 int i;
7361 fprintf (dump_file, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC "\n",
7362 set->stack_adjust);
7363 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7365 if (set->regs[i])
7367 fprintf (dump_file, "Reg %d:", i);
7368 dump_attrs_list (set->regs[i]);
7371 dump_vars (shared_hash_htab (set->vars));
7372 fprintf (dump_file, "\n");
7375 /* Print the IN and OUT sets for each basic block to dump file. */
7377 static void
7378 dump_dataflow_sets (void)
7380 basic_block bb;
7382 FOR_EACH_BB_FN (bb, cfun)
7384 fprintf (dump_file, "\nBasic block %d:\n", bb->index);
7385 fprintf (dump_file, "IN:\n");
7386 dump_dataflow_set (&VTI (bb)->in);
7387 fprintf (dump_file, "OUT:\n");
7388 dump_dataflow_set (&VTI (bb)->out);
7392 /* Return the variable for DV in dropped_values, inserting one if
7393 requested with INSERT. */
7395 static inline variable *
7396 variable_from_dropped (decl_or_value dv, enum insert_option insert)
7398 variable **slot;
7399 variable *empty_var;
7400 onepart_enum onepart;
7402 slot = dropped_values->find_slot_with_hash (dv, dv_htab_hash (dv), insert);
7404 if (!slot)
7405 return NULL;
7407 if (*slot)
7408 return *slot;
7410 gcc_checking_assert (insert == INSERT);
7412 onepart = dv_onepart_p (dv);
7414 gcc_checking_assert (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR);
7416 empty_var = onepart_pool_allocate (onepart);
7417 empty_var->dv = dv;
7418 empty_var->refcount = 1;
7419 empty_var->n_var_parts = 0;
7420 empty_var->onepart = onepart;
7421 empty_var->in_changed_variables = false;
7422 empty_var->var_part[0].loc_chain = NULL;
7423 empty_var->var_part[0].cur_loc = NULL;
7424 VAR_LOC_1PAUX (empty_var) = NULL;
7425 set_dv_changed (dv, true);
7427 *slot = empty_var;
7429 return empty_var;
7432 /* Recover the one-part aux from dropped_values. */
7434 static struct onepart_aux *
7435 recover_dropped_1paux (variable *var)
7437 variable *dvar;
7439 gcc_checking_assert (var->onepart);
7441 if (VAR_LOC_1PAUX (var))
7442 return VAR_LOC_1PAUX (var);
7444 if (var->onepart == ONEPART_VDECL)
7445 return NULL;
7447 dvar = variable_from_dropped (var->dv, NO_INSERT);
7449 if (!dvar)
7450 return NULL;
7452 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (dvar);
7453 VAR_LOC_1PAUX (dvar) = NULL;
7455 return VAR_LOC_1PAUX (var);
7458 /* Add variable VAR to the hash table of changed variables and
7459 if it has no locations delete it from SET's hash table. */
7461 static void
7462 variable_was_changed (variable *var, dataflow_set *set)
7464 hashval_t hash = dv_htab_hash (var->dv);
7466 if (emit_notes)
7468 variable **slot;
7470 /* Remember this decl or VALUE has been added to changed_variables. */
7471 set_dv_changed (var->dv, true);
7473 slot = changed_variables->find_slot_with_hash (var->dv, hash, INSERT);
7475 if (*slot)
7477 variable *old_var = *slot;
7478 gcc_assert (old_var->in_changed_variables);
7479 old_var->in_changed_variables = false;
7480 if (var != old_var && var->onepart)
7482 /* Restore the auxiliary info from an empty variable
7483 previously created for changed_variables, so it is
7484 not lost. */
7485 gcc_checking_assert (!VAR_LOC_1PAUX (var));
7486 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (old_var);
7487 VAR_LOC_1PAUX (old_var) = NULL;
7489 variable_htab_free (*slot);
7492 if (set && var->n_var_parts == 0)
7494 onepart_enum onepart = var->onepart;
7495 variable *empty_var = NULL;
7496 variable **dslot = NULL;
7498 if (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR)
7500 dslot = dropped_values->find_slot_with_hash (var->dv,
7501 dv_htab_hash (var->dv),
7502 INSERT);
7503 empty_var = *dslot;
7505 if (empty_var)
7507 gcc_checking_assert (!empty_var->in_changed_variables);
7508 if (!VAR_LOC_1PAUX (var))
7510 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (empty_var);
7511 VAR_LOC_1PAUX (empty_var) = NULL;
7513 else
7514 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
7518 if (!empty_var)
7520 empty_var = onepart_pool_allocate (onepart);
7521 empty_var->dv = var->dv;
7522 empty_var->refcount = 1;
7523 empty_var->n_var_parts = 0;
7524 empty_var->onepart = onepart;
7525 if (dslot)
7527 empty_var->refcount++;
7528 *dslot = empty_var;
7531 else
7532 empty_var->refcount++;
7533 empty_var->in_changed_variables = true;
7534 *slot = empty_var;
7535 if (onepart)
7537 empty_var->var_part[0].loc_chain = NULL;
7538 empty_var->var_part[0].cur_loc = NULL;
7539 VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (var);
7540 VAR_LOC_1PAUX (var) = NULL;
7542 goto drop_var;
7544 else
7546 if (var->onepart && !VAR_LOC_1PAUX (var))
7547 recover_dropped_1paux (var);
7548 var->refcount++;
7549 var->in_changed_variables = true;
7550 *slot = var;
7553 else
7555 gcc_assert (set);
7556 if (var->n_var_parts == 0)
7558 variable **slot;
7560 drop_var:
7561 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
7562 if (slot)
7564 if (shared_hash_shared (set->vars))
7565 slot = shared_hash_find_slot_unshare (&set->vars, var->dv,
7566 NO_INSERT);
7567 shared_hash_htab (set->vars)->clear_slot (slot);
7573 /* Look for the index in VAR->var_part corresponding to OFFSET.
7574 Return -1 if not found. If INSERTION_POINT is non-NULL, the
7575 referenced int will be set to the index that the part has or should
7576 have, if it should be inserted. */
7578 static inline int
7579 find_variable_location_part (variable *var, HOST_WIDE_INT offset,
7580 int *insertion_point)
7582 int pos, low, high;
7584 if (var->onepart)
7586 if (offset != 0)
7587 return -1;
7589 if (insertion_point)
7590 *insertion_point = 0;
7592 return var->n_var_parts - 1;
7595 /* Find the location part. */
7596 low = 0;
7597 high = var->n_var_parts;
7598 while (low != high)
7600 pos = (low + high) / 2;
7601 if (VAR_PART_OFFSET (var, pos) < offset)
7602 low = pos + 1;
7603 else
7604 high = pos;
7606 pos = low;
7608 if (insertion_point)
7609 *insertion_point = pos;
7611 if (pos < var->n_var_parts && VAR_PART_OFFSET (var, pos) == offset)
7612 return pos;
7614 return -1;
7617 static variable **
7618 set_slot_part (dataflow_set *set, rtx loc, variable **slot,
7619 decl_or_value dv, HOST_WIDE_INT offset,
7620 enum var_init_status initialized, rtx set_src)
7622 int pos;
7623 location_chain *node, *next;
7624 location_chain **nextp;
7625 variable *var;
7626 onepart_enum onepart;
7628 var = *slot;
7630 if (var)
7631 onepart = var->onepart;
7632 else
7633 onepart = dv_onepart_p (dv);
7635 gcc_checking_assert (offset == 0 || !onepart);
7636 gcc_checking_assert (loc != dv_as_opaque (dv));
7638 if (! flag_var_tracking_uninit)
7639 initialized = VAR_INIT_STATUS_INITIALIZED;
7641 if (!var)
7643 /* Create new variable information. */
7644 var = onepart_pool_allocate (onepart);
7645 var->dv = dv;
7646 var->refcount = 1;
7647 var->n_var_parts = 1;
7648 var->onepart = onepart;
7649 var->in_changed_variables = false;
7650 if (var->onepart)
7651 VAR_LOC_1PAUX (var) = NULL;
7652 else
7653 VAR_PART_OFFSET (var, 0) = offset;
7654 var->var_part[0].loc_chain = NULL;
7655 var->var_part[0].cur_loc = NULL;
7656 *slot = var;
7657 pos = 0;
7658 nextp = &var->var_part[0].loc_chain;
7660 else if (onepart)
7662 int r = -1, c = 0;
7664 gcc_assert (dv_as_opaque (var->dv) == dv_as_opaque (dv));
7666 pos = 0;
7668 if (GET_CODE (loc) == VALUE)
7670 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7671 nextp = &node->next)
7672 if (GET_CODE (node->loc) == VALUE)
7674 if (node->loc == loc)
7676 r = 0;
7677 break;
7679 if (canon_value_cmp (node->loc, loc))
7680 c++;
7681 else
7683 r = 1;
7684 break;
7687 else if (REG_P (node->loc) || MEM_P (node->loc))
7688 c++;
7689 else
7691 r = 1;
7692 break;
7695 else if (REG_P (loc))
7697 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7698 nextp = &node->next)
7699 if (REG_P (node->loc))
7701 if (REGNO (node->loc) < REGNO (loc))
7702 c++;
7703 else
7705 if (REGNO (node->loc) == REGNO (loc))
7706 r = 0;
7707 else
7708 r = 1;
7709 break;
7712 else
7714 r = 1;
7715 break;
7718 else if (MEM_P (loc))
7720 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7721 nextp = &node->next)
7722 if (REG_P (node->loc))
7723 c++;
7724 else if (MEM_P (node->loc))
7726 if ((r = loc_cmp (XEXP (node->loc, 0), XEXP (loc, 0))) >= 0)
7727 break;
7728 else
7729 c++;
7731 else
7733 r = 1;
7734 break;
7737 else
7738 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7739 nextp = &node->next)
7740 if ((r = loc_cmp (node->loc, loc)) >= 0)
7741 break;
7742 else
7743 c++;
7745 if (r == 0)
7746 return slot;
7748 if (shared_var_p (var, set->vars))
7750 slot = unshare_variable (set, slot, var, initialized);
7751 var = *slot;
7752 for (nextp = &var->var_part[0].loc_chain; c;
7753 nextp = &(*nextp)->next)
7754 c--;
7755 gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc);
7758 else
7760 int inspos = 0;
7762 gcc_assert (dv_as_decl (var->dv) == dv_as_decl (dv));
7764 pos = find_variable_location_part (var, offset, &inspos);
7766 if (pos >= 0)
7768 node = var->var_part[pos].loc_chain;
7770 if (node
7771 && ((REG_P (node->loc) && REG_P (loc)
7772 && REGNO (node->loc) == REGNO (loc))
7773 || rtx_equal_p (node->loc, loc)))
7775 /* LOC is in the beginning of the chain so we have nothing
7776 to do. */
7777 if (node->init < initialized)
7778 node->init = initialized;
7779 if (set_src != NULL)
7780 node->set_src = set_src;
7782 return slot;
7784 else
7786 /* We have to make a copy of a shared variable. */
7787 if (shared_var_p (var, set->vars))
7789 slot = unshare_variable (set, slot, var, initialized);
7790 var = *slot;
7794 else
7796 /* We have not found the location part, new one will be created. */
7798 /* We have to make a copy of the shared variable. */
7799 if (shared_var_p (var, set->vars))
7801 slot = unshare_variable (set, slot, var, initialized);
7802 var = *slot;
7805 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
7806 thus there are at most MAX_VAR_PARTS different offsets. */
7807 gcc_assert (var->n_var_parts < MAX_VAR_PARTS
7808 && (!var->n_var_parts || !onepart));
7810 /* We have to move the elements of array starting at index
7811 inspos to the next position. */
7812 for (pos = var->n_var_parts; pos > inspos; pos--)
7813 var->var_part[pos] = var->var_part[pos - 1];
7815 var->n_var_parts++;
7816 gcc_checking_assert (!onepart);
7817 VAR_PART_OFFSET (var, pos) = offset;
7818 var->var_part[pos].loc_chain = NULL;
7819 var->var_part[pos].cur_loc = NULL;
7822 /* Delete the location from the list. */
7823 nextp = &var->var_part[pos].loc_chain;
7824 for (node = var->var_part[pos].loc_chain; node; node = next)
7826 next = node->next;
7827 if ((REG_P (node->loc) && REG_P (loc)
7828 && REGNO (node->loc) == REGNO (loc))
7829 || rtx_equal_p (node->loc, loc))
7831 /* Save these values, to assign to the new node, before
7832 deleting this one. */
7833 if (node->init > initialized)
7834 initialized = node->init;
7835 if (node->set_src != NULL && set_src == NULL)
7836 set_src = node->set_src;
7837 if (var->var_part[pos].cur_loc == node->loc)
7838 var->var_part[pos].cur_loc = NULL;
7839 delete node;
7840 *nextp = next;
7841 break;
7843 else
7844 nextp = &node->next;
7847 nextp = &var->var_part[pos].loc_chain;
7850 /* Add the location to the beginning. */
7851 node = new location_chain;
7852 node->loc = loc;
7853 node->init = initialized;
7854 node->set_src = set_src;
7855 node->next = *nextp;
7856 *nextp = node;
7858 /* If no location was emitted do so. */
7859 if (var->var_part[pos].cur_loc == NULL)
7860 variable_was_changed (var, set);
7862 return slot;
7865 /* Set the part of variable's location in the dataflow set SET. The
7866 variable part is specified by variable's declaration in DV and
7867 offset OFFSET and the part's location by LOC. IOPT should be
7868 NO_INSERT if the variable is known to be in SET already and the
7869 variable hash table must not be resized, and INSERT otherwise. */
7871 static void
7872 set_variable_part (dataflow_set *set, rtx loc,
7873 decl_or_value dv, HOST_WIDE_INT offset,
7874 enum var_init_status initialized, rtx set_src,
7875 enum insert_option iopt)
7877 variable **slot;
7879 if (iopt == NO_INSERT)
7880 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7881 else
7883 slot = shared_hash_find_slot (set->vars, dv);
7884 if (!slot)
7885 slot = shared_hash_find_slot_unshare (&set->vars, dv, iopt);
7887 set_slot_part (set, loc, slot, dv, offset, initialized, set_src);
7890 /* Remove all recorded register locations for the given variable part
7891 from dataflow set SET, except for those that are identical to loc.
7892 The variable part is specified by variable's declaration or value
7893 DV and offset OFFSET. */
7895 static variable **
7896 clobber_slot_part (dataflow_set *set, rtx loc, variable **slot,
7897 HOST_WIDE_INT offset, rtx set_src)
7899 variable *var = *slot;
7900 int pos = find_variable_location_part (var, offset, NULL);
7902 if (pos >= 0)
7904 location_chain *node, *next;
7906 /* Remove the register locations from the dataflow set. */
7907 next = var->var_part[pos].loc_chain;
7908 for (node = next; node; node = next)
7910 next = node->next;
7911 if (node->loc != loc
7912 && (!flag_var_tracking_uninit
7913 || !set_src
7914 || MEM_P (set_src)
7915 || !rtx_equal_p (set_src, node->set_src)))
7917 if (REG_P (node->loc))
7919 attrs *anode, *anext;
7920 attrs **anextp;
7922 /* Remove the variable part from the register's
7923 list, but preserve any other variable parts
7924 that might be regarded as live in that same
7925 register. */
7926 anextp = &set->regs[REGNO (node->loc)];
7927 for (anode = *anextp; anode; anode = anext)
7929 anext = anode->next;
7930 if (dv_as_opaque (anode->dv) == dv_as_opaque (var->dv)
7931 && anode->offset == offset)
7933 delete anode;
7934 *anextp = anext;
7936 else
7937 anextp = &anode->next;
7941 slot = delete_slot_part (set, node->loc, slot, offset);
7946 return slot;
7949 /* Remove all recorded register locations for the given variable part
7950 from dataflow set SET, except for those that are identical to loc.
7951 The variable part is specified by variable's declaration or value
7952 DV and offset OFFSET. */
7954 static void
7955 clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
7956 HOST_WIDE_INT offset, rtx set_src)
7958 variable **slot;
7960 if (!dv_as_opaque (dv)
7961 || (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv))))
7962 return;
7964 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7965 if (!slot)
7966 return;
7968 clobber_slot_part (set, loc, slot, offset, set_src);
7971 /* Delete the part of variable's location from dataflow set SET. The
7972 variable part is specified by its SET->vars slot SLOT and offset
7973 OFFSET and the part's location by LOC. */
7975 static variable **
7976 delete_slot_part (dataflow_set *set, rtx loc, variable **slot,
7977 HOST_WIDE_INT offset)
7979 variable *var = *slot;
7980 int pos = find_variable_location_part (var, offset, NULL);
7982 if (pos >= 0)
7984 location_chain *node, *next;
7985 location_chain **nextp;
7986 bool changed;
7987 rtx cur_loc;
7989 if (shared_var_p (var, set->vars))
7991 /* If the variable contains the location part we have to
7992 make a copy of the variable. */
7993 for (node = var->var_part[pos].loc_chain; node;
7994 node = node->next)
7996 if ((REG_P (node->loc) && REG_P (loc)
7997 && REGNO (node->loc) == REGNO (loc))
7998 || rtx_equal_p (node->loc, loc))
8000 slot = unshare_variable (set, slot, var,
8001 VAR_INIT_STATUS_UNKNOWN);
8002 var = *slot;
8003 break;
8008 if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
8009 cur_loc = VAR_LOC_FROM (var);
8010 else
8011 cur_loc = var->var_part[pos].cur_loc;
8013 /* Delete the location part. */
8014 changed = false;
8015 nextp = &var->var_part[pos].loc_chain;
8016 for (node = *nextp; node; node = next)
8018 next = node->next;
8019 if ((REG_P (node->loc) && REG_P (loc)
8020 && REGNO (node->loc) == REGNO (loc))
8021 || rtx_equal_p (node->loc, loc))
8023 /* If we have deleted the location which was last emitted
8024 we have to emit new location so add the variable to set
8025 of changed variables. */
8026 if (cur_loc == node->loc)
8028 changed = true;
8029 var->var_part[pos].cur_loc = NULL;
8030 if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
8031 VAR_LOC_FROM (var) = NULL;
8033 delete node;
8034 *nextp = next;
8035 break;
8037 else
8038 nextp = &node->next;
8041 if (var->var_part[pos].loc_chain == NULL)
8043 changed = true;
8044 var->n_var_parts--;
8045 while (pos < var->n_var_parts)
8047 var->var_part[pos] = var->var_part[pos + 1];
8048 pos++;
8051 if (changed)
8052 variable_was_changed (var, set);
8055 return slot;
8058 /* Delete the part of variable's location from dataflow set SET. The
8059 variable part is specified by variable's declaration or value DV
8060 and offset OFFSET and the part's location by LOC. */
8062 static void
8063 delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
8064 HOST_WIDE_INT offset)
8066 variable **slot = shared_hash_find_slot_noinsert (set->vars, dv);
8067 if (!slot)
8068 return;
8070 delete_slot_part (set, loc, slot, offset);
8074 /* Structure for passing some other parameters to function
8075 vt_expand_loc_callback. */
8076 class expand_loc_callback_data
8078 public:
8079 /* The variables and values active at this point. */
8080 variable_table_type *vars;
8082 /* Stack of values and debug_exprs under expansion, and their
8083 children. */
8084 auto_vec<rtx, 4> expanding;
8086 /* Stack of values and debug_exprs whose expansion hit recursion
8087 cycles. They will have VALUE_RECURSED_INTO marked when added to
8088 this list. This flag will be cleared if any of its dependencies
8089 resolves to a valid location. So, if the flag remains set at the
8090 end of the search, we know no valid location for this one can
8091 possibly exist. */
8092 auto_vec<rtx, 4> pending;
8094 /* The maximum depth among the sub-expressions under expansion.
8095 Zero indicates no expansion so far. */
8096 expand_depth depth;
8099 /* Allocate the one-part auxiliary data structure for VAR, with enough
8100 room for COUNT dependencies. */
8102 static void
8103 loc_exp_dep_alloc (variable *var, int count)
8105 size_t allocsize;
8107 gcc_checking_assert (var->onepart);
8109 /* We can be called with COUNT == 0 to allocate the data structure
8110 without any dependencies, e.g. for the backlinks only. However,
8111 if we are specifying a COUNT, then the dependency list must have
8112 been emptied before. It would be possible to adjust pointers or
8113 force it empty here, but this is better done at an earlier point
8114 in the algorithm, so we instead leave an assertion to catch
8115 errors. */
8116 gcc_checking_assert (!count
8117 || VAR_LOC_DEP_VEC (var) == NULL
8118 || VAR_LOC_DEP_VEC (var)->is_empty ());
8120 if (VAR_LOC_1PAUX (var) && VAR_LOC_DEP_VEC (var)->space (count))
8121 return;
8123 allocsize = offsetof (struct onepart_aux, deps)
8124 + deps_vec::embedded_size (count);
8126 if (VAR_LOC_1PAUX (var))
8128 VAR_LOC_1PAUX (var) = XRESIZEVAR (struct onepart_aux,
8129 VAR_LOC_1PAUX (var), allocsize);
8130 /* If the reallocation moves the onepaux structure, the
8131 back-pointer to BACKLINKS in the first list member will still
8132 point to its old location. Adjust it. */
8133 if (VAR_LOC_DEP_LST (var))
8134 VAR_LOC_DEP_LST (var)->pprev = VAR_LOC_DEP_LSTP (var);
8136 else
8138 VAR_LOC_1PAUX (var) = XNEWVAR (struct onepart_aux, allocsize);
8139 *VAR_LOC_DEP_LSTP (var) = NULL;
8140 VAR_LOC_FROM (var) = NULL;
8141 VAR_LOC_DEPTH (var).complexity = 0;
8142 VAR_LOC_DEPTH (var).entryvals = 0;
8144 VAR_LOC_DEP_VEC (var)->embedded_init (count);
8147 /* Remove all entries from the vector of active dependencies of VAR,
8148 removing them from the back-links lists too. */
8150 static void
8151 loc_exp_dep_clear (variable *var)
8153 while (VAR_LOC_DEP_VEC (var) && !VAR_LOC_DEP_VEC (var)->is_empty ())
8155 loc_exp_dep *led = &VAR_LOC_DEP_VEC (var)->last ();
8156 if (led->next)
8157 led->next->pprev = led->pprev;
8158 if (led->pprev)
8159 *led->pprev = led->next;
8160 VAR_LOC_DEP_VEC (var)->pop ();
8164 /* Insert an active dependency from VAR on X to the vector of
8165 dependencies, and add the corresponding back-link to X's list of
8166 back-links in VARS. */
8168 static void
8169 loc_exp_insert_dep (variable *var, rtx x, variable_table_type *vars)
8171 decl_or_value dv;
8172 variable *xvar;
8173 loc_exp_dep *led;
8175 dv = dv_from_rtx (x);
8177 /* ??? Build a vector of variables parallel to EXPANDING, to avoid
8178 an additional look up? */
8179 xvar = vars->find_with_hash (dv, dv_htab_hash (dv));
8181 if (!xvar)
8183 xvar = variable_from_dropped (dv, NO_INSERT);
8184 gcc_checking_assert (xvar);
8187 /* No point in adding the same backlink more than once. This may
8188 arise if say the same value appears in two complex expressions in
8189 the same loc_list, or even more than once in a single
8190 expression. */
8191 if (VAR_LOC_DEP_LST (xvar) && VAR_LOC_DEP_LST (xvar)->dv == var->dv)
8192 return;
8194 if (var->onepart == NOT_ONEPART)
8195 led = new loc_exp_dep;
8196 else
8198 loc_exp_dep empty;
8199 memset (&empty, 0, sizeof (empty));
8200 VAR_LOC_DEP_VEC (var)->quick_push (empty);
8201 led = &VAR_LOC_DEP_VEC (var)->last ();
8203 led->dv = var->dv;
8204 led->value = x;
8206 loc_exp_dep_alloc (xvar, 0);
8207 led->pprev = VAR_LOC_DEP_LSTP (xvar);
8208 led->next = *led->pprev;
8209 if (led->next)
8210 led->next->pprev = &led->next;
8211 *led->pprev = led;
8214 /* Create active dependencies of VAR on COUNT values starting at
8215 VALUE, and corresponding back-links to the entries in VARS. Return
8216 true if we found any pending-recursion results. */
8218 static bool
8219 loc_exp_dep_set (variable *var, rtx result, rtx *value, int count,
8220 variable_table_type *vars)
8222 bool pending_recursion = false;
8224 gcc_checking_assert (VAR_LOC_DEP_VEC (var) == NULL
8225 || VAR_LOC_DEP_VEC (var)->is_empty ());
8227 /* Set up all dependencies from last_child (as set up at the end of
8228 the loop above) to the end. */
8229 loc_exp_dep_alloc (var, count);
8231 while (count--)
8233 rtx x = *value++;
8235 if (!pending_recursion)
8236 pending_recursion = !result && VALUE_RECURSED_INTO (x);
8238 loc_exp_insert_dep (var, x, vars);
8241 return pending_recursion;
8244 /* Notify the back-links of IVAR that are pending recursion that we
8245 have found a non-NIL value for it, so they are cleared for another
8246 attempt to compute a current location. */
8248 static void
8249 notify_dependents_of_resolved_value (variable *ivar, variable_table_type *vars)
8251 loc_exp_dep *led, *next;
8253 for (led = VAR_LOC_DEP_LST (ivar); led; led = next)
8255 decl_or_value dv = led->dv;
8256 variable *var;
8258 next = led->next;
8260 if (dv_is_value_p (dv))
8262 rtx value = dv_as_value (dv);
8264 /* If we have already resolved it, leave it alone. */
8265 if (!VALUE_RECURSED_INTO (value))
8266 continue;
8268 /* Check that VALUE_RECURSED_INTO, true from the test above,
8269 implies NO_LOC_P. */
8270 gcc_checking_assert (NO_LOC_P (value));
8272 /* We won't notify variables that are being expanded,
8273 because their dependency list is cleared before
8274 recursing. */
8275 NO_LOC_P (value) = false;
8276 VALUE_RECURSED_INTO (value) = false;
8278 gcc_checking_assert (dv_changed_p (dv));
8280 else
8282 gcc_checking_assert (dv_onepart_p (dv) != NOT_ONEPART);
8283 if (!dv_changed_p (dv))
8284 continue;
8287 var = vars->find_with_hash (dv, dv_htab_hash (dv));
8289 if (!var)
8290 var = variable_from_dropped (dv, NO_INSERT);
8292 if (var)
8293 notify_dependents_of_resolved_value (var, vars);
8295 if (next)
8296 next->pprev = led->pprev;
8297 if (led->pprev)
8298 *led->pprev = next;
8299 led->next = NULL;
8300 led->pprev = NULL;
8304 static rtx vt_expand_loc_callback (rtx x, bitmap regs,
8305 int max_depth, void *data);
8307 /* Return the combined depth, when one sub-expression evaluated to
8308 BEST_DEPTH and the previous known depth was SAVED_DEPTH. */
8310 static inline expand_depth
8311 update_depth (expand_depth saved_depth, expand_depth best_depth)
8313 /* If we didn't find anything, stick with what we had. */
8314 if (!best_depth.complexity)
8315 return saved_depth;
8317 /* If we found hadn't found anything, use the depth of the current
8318 expression. Do NOT add one extra level, we want to compute the
8319 maximum depth among sub-expressions. We'll increment it later,
8320 if appropriate. */
8321 if (!saved_depth.complexity)
8322 return best_depth;
8324 /* Combine the entryval count so that regardless of which one we
8325 return, the entryval count is accurate. */
8326 best_depth.entryvals = saved_depth.entryvals
8327 = best_depth.entryvals + saved_depth.entryvals;
8329 if (saved_depth.complexity < best_depth.complexity)
8330 return best_depth;
8331 else
8332 return saved_depth;
8335 /* Expand VAR to a location RTX, updating its cur_loc. Use REGS and
8336 DATA for cselib expand callback. If PENDRECP is given, indicate in
8337 it whether any sub-expression couldn't be fully evaluated because
8338 it is pending recursion resolution. */
8340 static inline rtx
8341 vt_expand_var_loc_chain (variable *var, bitmap regs, void *data,
8342 bool *pendrecp)
8344 class expand_loc_callback_data *elcd
8345 = (class expand_loc_callback_data *) data;
8346 location_chain *loc, *next;
8347 rtx result = NULL;
8348 int first_child, result_first_child, last_child;
8349 bool pending_recursion;
8350 rtx loc_from = NULL;
8351 struct elt_loc_list *cloc = NULL;
8352 expand_depth depth = { 0, 0 }, saved_depth = elcd->depth;
8353 int wanted_entryvals, found_entryvals = 0;
8355 /* Clear all backlinks pointing at this, so that we're not notified
8356 while we're active. */
8357 loc_exp_dep_clear (var);
8359 retry:
8360 if (var->onepart == ONEPART_VALUE)
8362 cselib_val *val = CSELIB_VAL_PTR (dv_as_value (var->dv));
8364 gcc_checking_assert (cselib_preserved_value_p (val));
8366 cloc = val->locs;
8369 first_child = result_first_child = last_child
8370 = elcd->expanding.length ();
8372 wanted_entryvals = found_entryvals;
8374 /* Attempt to expand each available location in turn. */
8375 for (next = loc = var->n_var_parts ? var->var_part[0].loc_chain : NULL;
8376 loc || cloc; loc = next)
8378 result_first_child = last_child;
8380 if (!loc)
8382 loc_from = cloc->loc;
8383 next = loc;
8384 cloc = cloc->next;
8385 if (unsuitable_loc (loc_from))
8386 continue;
8388 else
8390 loc_from = loc->loc;
8391 next = loc->next;
8394 gcc_checking_assert (!unsuitable_loc (loc_from));
8396 elcd->depth.complexity = elcd->depth.entryvals = 0;
8397 result = cselib_expand_value_rtx_cb (loc_from, regs, EXPR_DEPTH,
8398 vt_expand_loc_callback, data);
8399 last_child = elcd->expanding.length ();
8401 if (result)
8403 depth = elcd->depth;
8405 gcc_checking_assert (depth.complexity
8406 || result_first_child == last_child);
8408 if (last_child - result_first_child != 1)
8410 if (!depth.complexity && GET_CODE (result) == ENTRY_VALUE)
8411 depth.entryvals++;
8412 depth.complexity++;
8415 if (depth.complexity <= EXPR_USE_DEPTH)
8417 if (depth.entryvals <= wanted_entryvals)
8418 break;
8419 else if (!found_entryvals || depth.entryvals < found_entryvals)
8420 found_entryvals = depth.entryvals;
8423 result = NULL;
8426 /* Set it up in case we leave the loop. */
8427 depth.complexity = depth.entryvals = 0;
8428 loc_from = NULL;
8429 result_first_child = first_child;
8432 if (!loc_from && wanted_entryvals < found_entryvals)
8434 /* We found entries with ENTRY_VALUEs and skipped them. Since
8435 we could not find any expansions without ENTRY_VALUEs, but we
8436 found at least one with them, go back and get an entry with
8437 the minimum number ENTRY_VALUE count that we found. We could
8438 avoid looping, but since each sub-loc is already resolved,
8439 the re-expansion should be trivial. ??? Should we record all
8440 attempted locs as dependencies, so that we retry the
8441 expansion should any of them change, in the hope it can give
8442 us a new entry without an ENTRY_VALUE? */
8443 elcd->expanding.truncate (first_child);
8444 goto retry;
8447 /* Register all encountered dependencies as active. */
8448 pending_recursion = loc_exp_dep_set
8449 (var, result, elcd->expanding.address () + result_first_child,
8450 last_child - result_first_child, elcd->vars);
8452 elcd->expanding.truncate (first_child);
8454 /* Record where the expansion came from. */
8455 gcc_checking_assert (!result || !pending_recursion);
8456 VAR_LOC_FROM (var) = loc_from;
8457 VAR_LOC_DEPTH (var) = depth;
8459 gcc_checking_assert (!depth.complexity == !result);
8461 elcd->depth = update_depth (saved_depth, depth);
8463 /* Indicate whether any of the dependencies are pending recursion
8464 resolution. */
8465 if (pendrecp)
8466 *pendrecp = pending_recursion;
8468 if (!pendrecp || !pending_recursion)
8469 var->var_part[0].cur_loc = result;
8471 return result;
8474 /* Callback for cselib_expand_value, that looks for expressions
8475 holding the value in the var-tracking hash tables. Return X for
8476 standard processing, anything else is to be used as-is. */
8478 static rtx
8479 vt_expand_loc_callback (rtx x, bitmap regs,
8480 int max_depth ATTRIBUTE_UNUSED,
8481 void *data)
8483 class expand_loc_callback_data *elcd
8484 = (class expand_loc_callback_data *) data;
8485 decl_or_value dv;
8486 variable *var;
8487 rtx result, subreg;
8488 bool pending_recursion = false;
8489 bool from_empty = false;
8491 switch (GET_CODE (x))
8493 case SUBREG:
8494 subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs,
8495 EXPR_DEPTH,
8496 vt_expand_loc_callback, data);
8498 if (!subreg)
8499 return NULL;
8501 result = simplify_gen_subreg (GET_MODE (x), subreg,
8502 GET_MODE (SUBREG_REG (x)),
8503 SUBREG_BYTE (x));
8505 /* Invalid SUBREGs are ok in debug info. ??? We could try
8506 alternate expansions for the VALUE as well. */
8507 if (!result && GET_MODE (subreg) != VOIDmode)
8508 result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x));
8510 return result;
8512 case DEBUG_EXPR:
8513 case VALUE:
8514 dv = dv_from_rtx (x);
8515 break;
8517 default:
8518 return x;
8521 elcd->expanding.safe_push (x);
8523 /* Check that VALUE_RECURSED_INTO implies NO_LOC_P. */
8524 gcc_checking_assert (!VALUE_RECURSED_INTO (x) || NO_LOC_P (x));
8526 if (NO_LOC_P (x))
8528 gcc_checking_assert (VALUE_RECURSED_INTO (x) || !dv_changed_p (dv));
8529 return NULL;
8532 var = elcd->vars->find_with_hash (dv, dv_htab_hash (dv));
8534 if (!var)
8536 from_empty = true;
8537 var = variable_from_dropped (dv, INSERT);
8540 gcc_checking_assert (var);
8542 if (!dv_changed_p (dv))
8544 gcc_checking_assert (!NO_LOC_P (x));
8545 gcc_checking_assert (var->var_part[0].cur_loc);
8546 gcc_checking_assert (VAR_LOC_1PAUX (var));
8547 gcc_checking_assert (VAR_LOC_1PAUX (var)->depth.complexity);
8549 elcd->depth = update_depth (elcd->depth, VAR_LOC_1PAUX (var)->depth);
8551 return var->var_part[0].cur_loc;
8554 VALUE_RECURSED_INTO (x) = true;
8555 /* This is tentative, but it makes some tests simpler. */
8556 NO_LOC_P (x) = true;
8558 gcc_checking_assert (var->n_var_parts == 1 || from_empty);
8560 result = vt_expand_var_loc_chain (var, regs, data, &pending_recursion);
8562 if (pending_recursion)
8564 gcc_checking_assert (!result);
8565 elcd->pending.safe_push (x);
8567 else
8569 NO_LOC_P (x) = !result;
8570 VALUE_RECURSED_INTO (x) = false;
8571 set_dv_changed (dv, false);
8573 if (result)
8574 notify_dependents_of_resolved_value (var, elcd->vars);
8577 return result;
8580 /* While expanding variables, we may encounter recursion cycles
8581 because of mutual (possibly indirect) dependencies between two
8582 particular variables (or values), say A and B. If we're trying to
8583 expand A when we get to B, which in turn attempts to expand A, if
8584 we can't find any other expansion for B, we'll add B to this
8585 pending-recursion stack, and tentatively return NULL for its
8586 location. This tentative value will be used for any other
8587 occurrences of B, unless A gets some other location, in which case
8588 it will notify B that it is worth another try at computing a
8589 location for it, and it will use the location computed for A then.
8590 At the end of the expansion, the tentative NULL locations become
8591 final for all members of PENDING that didn't get a notification.
8592 This function performs this finalization of NULL locations. */
8594 static void
8595 resolve_expansions_pending_recursion (vec<rtx, va_heap> *pending)
8597 while (!pending->is_empty ())
8599 rtx x = pending->pop ();
8600 decl_or_value dv;
8602 if (!VALUE_RECURSED_INTO (x))
8603 continue;
8605 gcc_checking_assert (NO_LOC_P (x));
8606 VALUE_RECURSED_INTO (x) = false;
8607 dv = dv_from_rtx (x);
8608 gcc_checking_assert (dv_changed_p (dv));
8609 set_dv_changed (dv, false);
8613 /* Initialize expand_loc_callback_data D with variable hash table V.
8614 It must be a macro because of alloca (vec stack). */
8615 #define INIT_ELCD(d, v) \
8616 do \
8618 (d).vars = (v); \
8619 (d).depth.complexity = (d).depth.entryvals = 0; \
8621 while (0)
8622 /* Finalize expand_loc_callback_data D, resolved to location L. */
8623 #define FINI_ELCD(d, l) \
8624 do \
8626 resolve_expansions_pending_recursion (&(d).pending); \
8627 (d).pending.release (); \
8628 (d).expanding.release (); \
8630 if ((l) && MEM_P (l)) \
8631 (l) = targetm.delegitimize_address (l); \
8633 while (0)
8635 /* Expand VALUEs and DEBUG_EXPRs in LOC to a location, using the
8636 equivalences in VARS, updating their CUR_LOCs in the process. */
8638 static rtx
8639 vt_expand_loc (rtx loc, variable_table_type *vars)
8641 class expand_loc_callback_data data;
8642 rtx result;
8644 if (!MAY_HAVE_DEBUG_BIND_INSNS)
8645 return loc;
8647 INIT_ELCD (data, vars);
8649 result = cselib_expand_value_rtx_cb (loc, scratch_regs, EXPR_DEPTH,
8650 vt_expand_loc_callback, &data);
8652 FINI_ELCD (data, result);
8654 return result;
8657 /* Expand the one-part VARiable to a location, using the equivalences
8658 in VARS, updating their CUR_LOCs in the process. */
8660 static rtx
8661 vt_expand_1pvar (variable *var, variable_table_type *vars)
8663 class expand_loc_callback_data data;
8664 rtx loc;
8666 gcc_checking_assert (var->onepart && var->n_var_parts == 1);
8668 if (!dv_changed_p (var->dv))
8669 return var->var_part[0].cur_loc;
8671 INIT_ELCD (data, vars);
8673 loc = vt_expand_var_loc_chain (var, scratch_regs, &data, NULL);
8675 gcc_checking_assert (data.expanding.is_empty ());
8677 FINI_ELCD (data, loc);
8679 return loc;
8682 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
8683 additional parameters: WHERE specifies whether the note shall be emitted
8684 before or after instruction INSN. */
8687 emit_note_insn_var_location (variable **varp, emit_note_data *data)
8689 variable *var = *varp;
8690 rtx_insn *insn = data->insn;
8691 enum emit_note_where where = data->where;
8692 variable_table_type *vars = data->vars;
8693 rtx_note *note;
8694 rtx note_vl;
8695 int i, j, n_var_parts;
8696 bool complete;
8697 enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED;
8698 HOST_WIDE_INT last_limit;
8699 HOST_WIDE_INT offsets[MAX_VAR_PARTS];
8700 rtx loc[MAX_VAR_PARTS];
8701 tree decl;
8702 location_chain *lc;
8704 gcc_checking_assert (var->onepart == NOT_ONEPART
8705 || var->onepart == ONEPART_VDECL);
8707 decl = dv_as_decl (var->dv);
8709 complete = true;
8710 last_limit = 0;
8711 n_var_parts = 0;
8712 if (!var->onepart)
8713 for (i = 0; i < var->n_var_parts; i++)
8714 if (var->var_part[i].cur_loc == NULL && var->var_part[i].loc_chain)
8715 var->var_part[i].cur_loc = var->var_part[i].loc_chain->loc;
8716 for (i = 0; i < var->n_var_parts; i++)
8718 machine_mode mode, wider_mode;
8719 rtx loc2;
8720 HOST_WIDE_INT offset, size, wider_size;
8722 if (i == 0 && var->onepart)
8724 gcc_checking_assert (var->n_var_parts == 1);
8725 offset = 0;
8726 initialized = VAR_INIT_STATUS_INITIALIZED;
8727 loc2 = vt_expand_1pvar (var, vars);
8729 else
8731 if (last_limit < VAR_PART_OFFSET (var, i))
8733 complete = false;
8734 break;
8736 else if (last_limit > VAR_PART_OFFSET (var, i))
8737 continue;
8738 offset = VAR_PART_OFFSET (var, i);
8739 loc2 = var->var_part[i].cur_loc;
8740 if (loc2 && GET_CODE (loc2) == MEM
8741 && GET_CODE (XEXP (loc2, 0)) == VALUE)
8743 rtx depval = XEXP (loc2, 0);
8745 loc2 = vt_expand_loc (loc2, vars);
8747 if (loc2)
8748 loc_exp_insert_dep (var, depval, vars);
8750 if (!loc2)
8752 complete = false;
8753 continue;
8755 gcc_checking_assert (GET_CODE (loc2) != VALUE);
8756 for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
8757 if (var->var_part[i].cur_loc == lc->loc)
8759 initialized = lc->init;
8760 break;
8762 gcc_assert (lc);
8765 offsets[n_var_parts] = offset;
8766 if (!loc2)
8768 complete = false;
8769 continue;
8771 loc[n_var_parts] = loc2;
8772 mode = GET_MODE (var->var_part[i].cur_loc);
8773 if (mode == VOIDmode && var->onepart)
8774 mode = DECL_MODE (decl);
8775 /* We ony track subparts of constant-sized objects, since at present
8776 there's no representation for polynomial pieces. */
8777 if (!GET_MODE_SIZE (mode).is_constant (&size))
8779 complete = false;
8780 continue;
8782 last_limit = offsets[n_var_parts] + size;
8784 /* Attempt to merge adjacent registers or memory. */
8785 for (j = i + 1; j < var->n_var_parts; j++)
8786 if (last_limit <= VAR_PART_OFFSET (var, j))
8787 break;
8788 if (j < var->n_var_parts
8789 && GET_MODE_WIDER_MODE (mode).exists (&wider_mode)
8790 && GET_MODE_SIZE (wider_mode).is_constant (&wider_size)
8791 && var->var_part[j].cur_loc
8792 && mode == GET_MODE (var->var_part[j].cur_loc)
8793 && (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts]))
8794 && last_limit == (var->onepart ? 0 : VAR_PART_OFFSET (var, j))
8795 && (loc2 = vt_expand_loc (var->var_part[j].cur_loc, vars))
8796 && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2))
8798 rtx new_loc = NULL;
8799 poly_int64 offset2;
8801 if (REG_P (loc[n_var_parts])
8802 && hard_regno_nregs (REGNO (loc[n_var_parts]), mode) * 2
8803 == hard_regno_nregs (REGNO (loc[n_var_parts]), wider_mode)
8804 && end_hard_regno (mode, REGNO (loc[n_var_parts]))
8805 == REGNO (loc2))
8807 if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN)
8808 new_loc = simplify_subreg (wider_mode, loc[n_var_parts],
8809 mode, 0);
8810 else if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
8811 new_loc = simplify_subreg (wider_mode, loc2, mode, 0);
8812 if (new_loc)
8814 if (!REG_P (new_loc)
8815 || REGNO (new_loc) != REGNO (loc[n_var_parts]))
8816 new_loc = NULL;
8817 else
8818 REG_ATTRS (new_loc) = REG_ATTRS (loc[n_var_parts]);
8821 else if (MEM_P (loc[n_var_parts])
8822 && GET_CODE (XEXP (loc2, 0)) == PLUS
8823 && REG_P (XEXP (XEXP (loc2, 0), 0))
8824 && poly_int_rtx_p (XEXP (XEXP (loc2, 0), 1), &offset2))
8826 poly_int64 end1 = size;
8827 rtx base1 = strip_offset_and_add (XEXP (loc[n_var_parts], 0),
8828 &end1);
8829 if (rtx_equal_p (base1, XEXP (XEXP (loc2, 0), 0))
8830 && known_eq (end1, offset2))
8831 new_loc = adjust_address_nv (loc[n_var_parts],
8832 wider_mode, 0);
8835 if (new_loc)
8837 loc[n_var_parts] = new_loc;
8838 mode = wider_mode;
8839 last_limit = offsets[n_var_parts] + wider_size;
8840 i = j;
8843 ++n_var_parts;
8845 poly_uint64 type_size_unit
8846 = tree_to_poly_uint64 (TYPE_SIZE_UNIT (TREE_TYPE (decl)));
8847 if (maybe_lt (poly_uint64 (last_limit), type_size_unit))
8848 complete = false;
8850 if (! flag_var_tracking_uninit)
8851 initialized = VAR_INIT_STATUS_INITIALIZED;
8853 note_vl = NULL_RTX;
8854 if (!complete)
8855 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, NULL_RTX, initialized);
8856 else if (n_var_parts == 1)
8858 rtx expr_list;
8860 if (offsets[0] || GET_CODE (loc[0]) == PARALLEL)
8861 expr_list = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
8862 else
8863 expr_list = loc[0];
8865 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, expr_list, initialized);
8867 else if (n_var_parts)
8869 rtx parallel;
8871 for (i = 0; i < n_var_parts; i++)
8872 loc[i]
8873 = gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i]));
8875 parallel = gen_rtx_PARALLEL (VOIDmode,
8876 gen_rtvec_v (n_var_parts, loc));
8877 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl,
8878 parallel, initialized);
8881 if (where != EMIT_NOTE_BEFORE_INSN)
8883 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8884 if (where == EMIT_NOTE_AFTER_CALL_INSN)
8885 NOTE_DURING_CALL_P (note) = true;
8887 else
8889 /* Make sure that the call related notes come first. */
8890 while (NEXT_INSN (insn)
8891 && NOTE_P (insn)
8892 && NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8893 && NOTE_DURING_CALL_P (insn))
8894 insn = NEXT_INSN (insn);
8895 if (NOTE_P (insn)
8896 && NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8897 && NOTE_DURING_CALL_P (insn))
8898 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8899 else
8900 note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
8902 NOTE_VAR_LOCATION (note) = note_vl;
8904 set_dv_changed (var->dv, false);
8905 gcc_assert (var->in_changed_variables);
8906 var->in_changed_variables = false;
8907 changed_variables->clear_slot (varp);
8909 /* Continue traversing the hash table. */
8910 return 1;
8913 /* While traversing changed_variables, push onto DATA (a stack of RTX
8914 values) entries that aren't user variables. */
8917 var_track_values_to_stack (variable **slot,
8918 vec<rtx, va_heap> *changed_values_stack)
8920 variable *var = *slot;
8922 if (var->onepart == ONEPART_VALUE)
8923 changed_values_stack->safe_push (dv_as_value (var->dv));
8924 else if (var->onepart == ONEPART_DEXPR)
8925 changed_values_stack->safe_push (DECL_RTL_KNOWN_SET (dv_as_decl (var->dv)));
8927 return 1;
8930 /* Remove from changed_variables the entry whose DV corresponds to
8931 value or debug_expr VAL. */
8932 static void
8933 remove_value_from_changed_variables (rtx val)
8935 decl_or_value dv = dv_from_rtx (val);
8936 variable **slot;
8937 variable *var;
8939 slot = changed_variables->find_slot_with_hash (dv, dv_htab_hash (dv),
8940 NO_INSERT);
8941 var = *slot;
8942 var->in_changed_variables = false;
8943 changed_variables->clear_slot (slot);
8946 /* If VAL (a value or debug_expr) has backlinks to variables actively
8947 dependent on it in HTAB or in CHANGED_VARIABLES, mark them as
8948 changed, adding to CHANGED_VALUES_STACK any dependencies that may
8949 have dependencies of their own to notify. */
8951 static void
8952 notify_dependents_of_changed_value (rtx val, variable_table_type *htab,
8953 vec<rtx, va_heap> *changed_values_stack)
8955 variable **slot;
8956 variable *var;
8957 loc_exp_dep *led;
8958 decl_or_value dv = dv_from_rtx (val);
8960 slot = changed_variables->find_slot_with_hash (dv, dv_htab_hash (dv),
8961 NO_INSERT);
8962 if (!slot)
8963 slot = htab->find_slot_with_hash (dv, dv_htab_hash (dv), NO_INSERT);
8964 if (!slot)
8965 slot = dropped_values->find_slot_with_hash (dv, dv_htab_hash (dv),
8966 NO_INSERT);
8967 var = *slot;
8969 while ((led = VAR_LOC_DEP_LST (var)))
8971 decl_or_value ldv = led->dv;
8972 variable *ivar;
8974 /* Deactivate and remove the backlink, as it was “used up”. It
8975 makes no sense to attempt to notify the same entity again:
8976 either it will be recomputed and re-register an active
8977 dependency, or it will still have the changed mark. */
8978 if (led->next)
8979 led->next->pprev = led->pprev;
8980 if (led->pprev)
8981 *led->pprev = led->next;
8982 led->next = NULL;
8983 led->pprev = NULL;
8985 if (dv_changed_p (ldv))
8986 continue;
8988 switch (dv_onepart_p (ldv))
8990 case ONEPART_VALUE:
8991 case ONEPART_DEXPR:
8992 set_dv_changed (ldv, true);
8993 changed_values_stack->safe_push (dv_as_rtx (ldv));
8994 break;
8996 case ONEPART_VDECL:
8997 ivar = htab->find_with_hash (ldv, dv_htab_hash (ldv));
8998 gcc_checking_assert (!VAR_LOC_DEP_LST (ivar));
8999 variable_was_changed (ivar, NULL);
9000 break;
9002 case NOT_ONEPART:
9003 delete led;
9004 ivar = htab->find_with_hash (ldv, dv_htab_hash (ldv));
9005 if (ivar)
9007 int i = ivar->n_var_parts;
9008 while (i--)
9010 rtx loc = ivar->var_part[i].cur_loc;
9012 if (loc && GET_CODE (loc) == MEM
9013 && XEXP (loc, 0) == val)
9015 variable_was_changed (ivar, NULL);
9016 break;
9020 break;
9022 default:
9023 gcc_unreachable ();
9028 /* Take out of changed_variables any entries that don't refer to use
9029 variables. Back-propagate change notifications from values and
9030 debug_exprs to their active dependencies in HTAB or in
9031 CHANGED_VARIABLES. */
9033 static void
9034 process_changed_values (variable_table_type *htab)
9036 int i, n;
9037 rtx val;
9038 auto_vec<rtx, 20> changed_values_stack;
9040 /* Move values from changed_variables to changed_values_stack. */
9041 changed_variables
9042 ->traverse <vec<rtx, va_heap>*, var_track_values_to_stack>
9043 (&changed_values_stack);
9045 /* Back-propagate change notifications in values while popping
9046 them from the stack. */
9047 for (n = i = changed_values_stack.length ();
9048 i > 0; i = changed_values_stack.length ())
9050 val = changed_values_stack.pop ();
9051 notify_dependents_of_changed_value (val, htab, &changed_values_stack);
9053 /* This condition will hold when visiting each of the entries
9054 originally in changed_variables. We can't remove them
9055 earlier because this could drop the backlinks before we got a
9056 chance to use them. */
9057 if (i == n)
9059 remove_value_from_changed_variables (val);
9060 n--;
9065 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
9066 CHANGED_VARIABLES and delete this chain. WHERE specifies whether
9067 the notes shall be emitted before of after instruction INSN. */
9069 static void
9070 emit_notes_for_changes (rtx_insn *insn, enum emit_note_where where,
9071 shared_hash *vars)
9073 emit_note_data data;
9074 variable_table_type *htab = shared_hash_htab (vars);
9076 if (changed_variables->is_empty ())
9077 return;
9079 if (MAY_HAVE_DEBUG_BIND_INSNS)
9080 process_changed_values (htab);
9082 data.insn = insn;
9083 data.where = where;
9084 data.vars = htab;
9086 changed_variables
9087 ->traverse <emit_note_data*, emit_note_insn_var_location> (&data);
9090 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
9091 same variable in hash table DATA or is not there at all. */
9094 emit_notes_for_differences_1 (variable **slot, variable_table_type *new_vars)
9096 variable *old_var, *new_var;
9098 old_var = *slot;
9099 new_var = new_vars->find_with_hash (old_var->dv, dv_htab_hash (old_var->dv));
9101 if (!new_var)
9103 /* Variable has disappeared. */
9104 variable *empty_var = NULL;
9106 if (old_var->onepart == ONEPART_VALUE
9107 || old_var->onepart == ONEPART_DEXPR)
9109 empty_var = variable_from_dropped (old_var->dv, NO_INSERT);
9110 if (empty_var)
9112 gcc_checking_assert (!empty_var->in_changed_variables);
9113 if (!VAR_LOC_1PAUX (old_var))
9115 VAR_LOC_1PAUX (old_var) = VAR_LOC_1PAUX (empty_var);
9116 VAR_LOC_1PAUX (empty_var) = NULL;
9118 else
9119 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
9123 if (!empty_var)
9125 empty_var = onepart_pool_allocate (old_var->onepart);
9126 empty_var->dv = old_var->dv;
9127 empty_var->refcount = 0;
9128 empty_var->n_var_parts = 0;
9129 empty_var->onepart = old_var->onepart;
9130 empty_var->in_changed_variables = false;
9133 if (empty_var->onepart)
9135 /* Propagate the auxiliary data to (ultimately)
9136 changed_variables. */
9137 empty_var->var_part[0].loc_chain = NULL;
9138 empty_var->var_part[0].cur_loc = NULL;
9139 VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (old_var);
9140 VAR_LOC_1PAUX (old_var) = NULL;
9142 variable_was_changed (empty_var, NULL);
9143 /* Continue traversing the hash table. */
9144 return 1;
9146 /* Update cur_loc and one-part auxiliary data, before new_var goes
9147 through variable_was_changed. */
9148 if (old_var != new_var && new_var->onepart)
9150 gcc_checking_assert (VAR_LOC_1PAUX (new_var) == NULL);
9151 VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (old_var);
9152 VAR_LOC_1PAUX (old_var) = NULL;
9153 new_var->var_part[0].cur_loc = old_var->var_part[0].cur_loc;
9155 if (variable_different_p (old_var, new_var))
9156 variable_was_changed (new_var, NULL);
9158 /* Continue traversing the hash table. */
9159 return 1;
9162 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
9163 table DATA. */
9166 emit_notes_for_differences_2 (variable **slot, variable_table_type *old_vars)
9168 variable *old_var, *new_var;
9170 new_var = *slot;
9171 old_var = old_vars->find_with_hash (new_var->dv, dv_htab_hash (new_var->dv));
9172 if (!old_var)
9174 int i;
9175 for (i = 0; i < new_var->n_var_parts; i++)
9176 new_var->var_part[i].cur_loc = NULL;
9177 variable_was_changed (new_var, NULL);
9180 /* Continue traversing the hash table. */
9181 return 1;
9184 /* Emit notes before INSN for differences between dataflow sets OLD_SET and
9185 NEW_SET. */
9187 static void
9188 emit_notes_for_differences (rtx_insn *insn, dataflow_set *old_set,
9189 dataflow_set *new_set)
9191 shared_hash_htab (old_set->vars)
9192 ->traverse <variable_table_type *, emit_notes_for_differences_1>
9193 (shared_hash_htab (new_set->vars));
9194 shared_hash_htab (new_set->vars)
9195 ->traverse <variable_table_type *, emit_notes_for_differences_2>
9196 (shared_hash_htab (old_set->vars));
9197 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars);
9200 /* Return the next insn after INSN that is not a NOTE_INSN_VAR_LOCATION. */
9202 static rtx_insn *
9203 next_non_note_insn_var_location (rtx_insn *insn)
9205 while (insn)
9207 insn = NEXT_INSN (insn);
9208 if (insn == 0
9209 || !NOTE_P (insn)
9210 || NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION)
9211 break;
9214 return insn;
9217 /* Emit the notes for changes of location parts in the basic block BB. */
9219 static void
9220 emit_notes_in_bb (basic_block bb, dataflow_set *set)
9222 unsigned int i;
9223 micro_operation *mo;
9225 dataflow_set_clear (set);
9226 dataflow_set_copy (set, &VTI (bb)->in);
9228 FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo)
9230 rtx_insn *insn = mo->insn;
9231 rtx_insn *next_insn = next_non_note_insn_var_location (insn);
9233 switch (mo->type)
9235 case MO_CALL:
9236 dataflow_set_clear_at_call (set, insn);
9237 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars);
9239 rtx arguments = mo->u.loc, *p = &arguments;
9240 while (*p)
9242 XEXP (XEXP (*p, 0), 1)
9243 = vt_expand_loc (XEXP (XEXP (*p, 0), 1),
9244 shared_hash_htab (set->vars));
9245 /* If expansion is successful, keep it in the list. */
9246 if (XEXP (XEXP (*p, 0), 1))
9248 XEXP (XEXP (*p, 0), 1)
9249 = copy_rtx_if_shared (XEXP (XEXP (*p, 0), 1));
9250 p = &XEXP (*p, 1);
9252 /* Otherwise, if the following item is data_value for it,
9253 drop it too too. */
9254 else if (XEXP (*p, 1)
9255 && REG_P (XEXP (XEXP (*p, 0), 0))
9256 && MEM_P (XEXP (XEXP (XEXP (*p, 1), 0), 0))
9257 && REG_P (XEXP (XEXP (XEXP (XEXP (*p, 1), 0), 0),
9259 && REGNO (XEXP (XEXP (*p, 0), 0))
9260 == REGNO (XEXP (XEXP (XEXP (XEXP (*p, 1), 0),
9261 0), 0)))
9262 *p = XEXP (XEXP (*p, 1), 1);
9263 /* Just drop this item. */
9264 else
9265 *p = XEXP (*p, 1);
9267 add_reg_note (insn, REG_CALL_ARG_LOCATION, arguments);
9269 break;
9271 case MO_USE:
9273 rtx loc = mo->u.loc;
9275 if (REG_P (loc))
9276 var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
9277 else
9278 var_mem_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
9280 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
9282 break;
9284 case MO_VAL_LOC:
9286 rtx loc = mo->u.loc;
9287 rtx val, vloc;
9288 tree var;
9290 if (GET_CODE (loc) == CONCAT)
9292 val = XEXP (loc, 0);
9293 vloc = XEXP (loc, 1);
9295 else
9297 val = NULL_RTX;
9298 vloc = loc;
9301 var = PAT_VAR_LOCATION_DECL (vloc);
9303 clobber_variable_part (set, NULL_RTX,
9304 dv_from_decl (var), 0, NULL_RTX);
9305 if (val)
9307 if (VAL_NEEDS_RESOLUTION (loc))
9308 val_resolve (set, val, PAT_VAR_LOCATION_LOC (vloc), insn);
9309 set_variable_part (set, val, dv_from_decl (var), 0,
9310 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
9311 INSERT);
9313 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
9314 set_variable_part (set, PAT_VAR_LOCATION_LOC (vloc),
9315 dv_from_decl (var), 0,
9316 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
9317 INSERT);
9319 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
9321 break;
9323 case MO_VAL_USE:
9325 rtx loc = mo->u.loc;
9326 rtx val, vloc, uloc;
9328 vloc = uloc = XEXP (loc, 1);
9329 val = XEXP (loc, 0);
9331 if (GET_CODE (val) == CONCAT)
9333 uloc = XEXP (val, 1);
9334 val = XEXP (val, 0);
9337 if (VAL_NEEDS_RESOLUTION (loc))
9338 val_resolve (set, val, vloc, insn);
9339 else
9340 val_store (set, val, uloc, insn, false);
9342 if (VAL_HOLDS_TRACK_EXPR (loc))
9344 if (GET_CODE (uloc) == REG)
9345 var_reg_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
9346 NULL);
9347 else if (GET_CODE (uloc) == MEM)
9348 var_mem_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
9349 NULL);
9352 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
9354 break;
9356 case MO_VAL_SET:
9358 rtx loc = mo->u.loc;
9359 rtx val, vloc, uloc;
9360 rtx dstv, srcv;
9362 vloc = loc;
9363 uloc = XEXP (vloc, 1);
9364 val = XEXP (vloc, 0);
9365 vloc = uloc;
9367 if (GET_CODE (uloc) == SET)
9369 dstv = SET_DEST (uloc);
9370 srcv = SET_SRC (uloc);
9372 else
9374 dstv = uloc;
9375 srcv = NULL;
9378 if (GET_CODE (val) == CONCAT)
9380 dstv = vloc = XEXP (val, 1);
9381 val = XEXP (val, 0);
9384 if (GET_CODE (vloc) == SET)
9386 srcv = SET_SRC (vloc);
9388 gcc_assert (val != srcv);
9389 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
9391 dstv = vloc = SET_DEST (vloc);
9393 if (VAL_NEEDS_RESOLUTION (loc))
9394 val_resolve (set, val, srcv, insn);
9396 else if (VAL_NEEDS_RESOLUTION (loc))
9398 gcc_assert (GET_CODE (uloc) == SET
9399 && GET_CODE (SET_SRC (uloc)) == REG);
9400 val_resolve (set, val, SET_SRC (uloc), insn);
9403 if (VAL_HOLDS_TRACK_EXPR (loc))
9405 if (VAL_EXPR_IS_CLOBBERED (loc))
9407 if (REG_P (uloc))
9408 var_reg_delete (set, uloc, true);
9409 else if (MEM_P (uloc))
9411 gcc_assert (MEM_P (dstv));
9412 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
9413 var_mem_delete (set, dstv, true);
9416 else
9418 bool copied_p = VAL_EXPR_IS_COPIED (loc);
9419 rtx src = NULL, dst = uloc;
9420 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
9422 if (GET_CODE (uloc) == SET)
9424 src = SET_SRC (uloc);
9425 dst = SET_DEST (uloc);
9428 if (copied_p)
9430 status = find_src_status (set, src);
9432 src = find_src_set_src (set, src);
9435 if (REG_P (dst))
9436 var_reg_delete_and_set (set, dst, !copied_p,
9437 status, srcv);
9438 else if (MEM_P (dst))
9440 gcc_assert (MEM_P (dstv));
9441 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
9442 var_mem_delete_and_set (set, dstv, !copied_p,
9443 status, srcv);
9447 else if (REG_P (uloc))
9448 var_regno_delete (set, REGNO (uloc));
9449 else if (MEM_P (uloc))
9451 gcc_checking_assert (GET_CODE (vloc) == MEM);
9452 gcc_checking_assert (vloc == dstv);
9453 if (vloc != dstv)
9454 clobber_overlapping_mems (set, vloc);
9457 val_store (set, val, dstv, insn, true);
9459 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9460 set->vars);
9462 break;
9464 case MO_SET:
9466 rtx loc = mo->u.loc;
9467 rtx set_src = NULL;
9469 if (GET_CODE (loc) == SET)
9471 set_src = SET_SRC (loc);
9472 loc = SET_DEST (loc);
9475 if (REG_P (loc))
9476 var_reg_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
9477 set_src);
9478 else
9479 var_mem_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
9480 set_src);
9482 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9483 set->vars);
9485 break;
9487 case MO_COPY:
9489 rtx loc = mo->u.loc;
9490 enum var_init_status src_status;
9491 rtx set_src = NULL;
9493 if (GET_CODE (loc) == SET)
9495 set_src = SET_SRC (loc);
9496 loc = SET_DEST (loc);
9499 src_status = find_src_status (set, set_src);
9500 set_src = find_src_set_src (set, set_src);
9502 if (REG_P (loc))
9503 var_reg_delete_and_set (set, loc, false, src_status, set_src);
9504 else
9505 var_mem_delete_and_set (set, loc, false, src_status, set_src);
9507 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9508 set->vars);
9510 break;
9512 case MO_USE_NO_VAR:
9514 rtx loc = mo->u.loc;
9516 if (REG_P (loc))
9517 var_reg_delete (set, loc, false);
9518 else
9519 var_mem_delete (set, loc, false);
9521 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
9523 break;
9525 case MO_CLOBBER:
9527 rtx loc = mo->u.loc;
9529 if (REG_P (loc))
9530 var_reg_delete (set, loc, true);
9531 else
9532 var_mem_delete (set, loc, true);
9534 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9535 set->vars);
9537 break;
9539 case MO_ADJUST:
9540 set->stack_adjust += mo->u.adjust;
9541 break;
9546 /* Emit notes for the whole function. */
9548 static void
9549 vt_emit_notes (void)
9551 basic_block bb;
9552 dataflow_set cur;
9554 gcc_assert (changed_variables->is_empty ());
9556 /* Free memory occupied by the out hash tables, as they aren't used
9557 anymore. */
9558 FOR_EACH_BB_FN (bb, cfun)
9559 dataflow_set_clear (&VTI (bb)->out);
9561 /* Enable emitting notes by functions (mainly by set_variable_part and
9562 delete_variable_part). */
9563 emit_notes = true;
9565 if (MAY_HAVE_DEBUG_BIND_INSNS)
9566 dropped_values = new variable_table_type (cselib_get_next_uid () * 2);
9568 dataflow_set_init (&cur);
9570 FOR_EACH_BB_FN (bb, cfun)
9572 /* Emit the notes for changes of variable locations between two
9573 subsequent basic blocks. */
9574 emit_notes_for_differences (BB_HEAD (bb), &cur, &VTI (bb)->in);
9576 if (MAY_HAVE_DEBUG_BIND_INSNS)
9577 local_get_addr_cache = new hash_map<rtx, rtx>;
9579 /* Emit the notes for the changes in the basic block itself. */
9580 emit_notes_in_bb (bb, &cur);
9582 if (MAY_HAVE_DEBUG_BIND_INSNS)
9583 delete local_get_addr_cache;
9584 local_get_addr_cache = NULL;
9586 /* Free memory occupied by the in hash table, we won't need it
9587 again. */
9588 dataflow_set_clear (&VTI (bb)->in);
9591 if (flag_checking)
9592 shared_hash_htab (cur.vars)
9593 ->traverse <variable_table_type *, emit_notes_for_differences_1>
9594 (shared_hash_htab (empty_shared_hash));
9596 dataflow_set_destroy (&cur);
9598 if (MAY_HAVE_DEBUG_BIND_INSNS)
9599 delete dropped_values;
9600 dropped_values = NULL;
9602 emit_notes = false;
9605 /* If there is a declaration and offset associated with register/memory RTL
9606 assign declaration to *DECLP and offset to *OFFSETP, and return true. */
9608 static bool
9609 vt_get_decl_and_offset (rtx rtl, tree *declp, poly_int64 *offsetp)
9611 if (REG_P (rtl))
9613 if (REG_ATTRS (rtl))
9615 *declp = REG_EXPR (rtl);
9616 *offsetp = REG_OFFSET (rtl);
9617 return true;
9620 else if (GET_CODE (rtl) == PARALLEL)
9622 tree decl = NULL_TREE;
9623 HOST_WIDE_INT offset = MAX_VAR_PARTS;
9624 int len = XVECLEN (rtl, 0), i;
9626 for (i = 0; i < len; i++)
9628 rtx reg = XEXP (XVECEXP (rtl, 0, i), 0);
9629 if (!REG_P (reg) || !REG_ATTRS (reg))
9630 break;
9631 if (!decl)
9632 decl = REG_EXPR (reg);
9633 if (REG_EXPR (reg) != decl)
9634 break;
9635 HOST_WIDE_INT this_offset;
9636 if (!track_offset_p (REG_OFFSET (reg), &this_offset))
9637 break;
9638 offset = MIN (offset, this_offset);
9641 if (i == len)
9643 *declp = decl;
9644 *offsetp = offset;
9645 return true;
9648 else if (MEM_P (rtl))
9650 if (MEM_ATTRS (rtl))
9652 *declp = MEM_EXPR (rtl);
9653 *offsetp = int_mem_offset (rtl);
9654 return true;
9657 return false;
9660 /* Record the value for the ENTRY_VALUE of RTL as a global equivalence
9661 of VAL. */
9663 static void
9664 record_entry_value (cselib_val *val, rtx rtl)
9666 rtx ev = gen_rtx_ENTRY_VALUE (GET_MODE (rtl));
9668 ENTRY_VALUE_EXP (ev) = rtl;
9670 cselib_add_permanent_equiv (val, ev, get_insns ());
9673 /* Insert function parameter PARM in IN and OUT sets of ENTRY_BLOCK. */
9675 static void
9676 vt_add_function_parameter (tree parm)
9678 rtx decl_rtl = DECL_RTL_IF_SET (parm);
9679 rtx incoming = DECL_INCOMING_RTL (parm);
9680 tree decl;
9681 machine_mode mode;
9682 poly_int64 offset;
9683 dataflow_set *out;
9684 decl_or_value dv;
9685 bool incoming_ok = true;
9687 if (TREE_CODE (parm) != PARM_DECL)
9688 return;
9690 if (!decl_rtl || !incoming)
9691 return;
9693 if (GET_MODE (decl_rtl) == BLKmode || GET_MODE (incoming) == BLKmode)
9694 return;
9696 /* If there is a DRAP register or a pseudo in internal_arg_pointer,
9697 rewrite the incoming location of parameters passed on the stack
9698 into MEMs based on the argument pointer, so that incoming doesn't
9699 depend on a pseudo. */
9700 poly_int64 incoming_offset = 0;
9701 if (MEM_P (incoming)
9702 && (strip_offset (XEXP (incoming, 0), &incoming_offset)
9703 == crtl->args.internal_arg_pointer))
9705 HOST_WIDE_INT off = -FIRST_PARM_OFFSET (current_function_decl);
9706 incoming
9707 = replace_equiv_address_nv (incoming,
9708 plus_constant (Pmode,
9709 arg_pointer_rtx,
9710 off + incoming_offset));
9713 #ifdef HAVE_window_save
9714 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
9715 If the target machine has an explicit window save instruction, the
9716 actual entry value is the corresponding OUTGOING_REGNO instead. */
9717 if (HAVE_window_save && !crtl->uses_only_leaf_regs)
9719 if (REG_P (incoming)
9720 && HARD_REGISTER_P (incoming)
9721 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
9723 parm_reg p;
9724 p.incoming = incoming;
9725 incoming
9726 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
9727 OUTGOING_REGNO (REGNO (incoming)), 0);
9728 p.outgoing = incoming;
9729 vec_safe_push (windowed_parm_regs, p);
9731 else if (GET_CODE (incoming) == PARALLEL)
9733 rtx outgoing
9734 = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (XVECLEN (incoming, 0)));
9735 int i;
9737 for (i = 0; i < XVECLEN (incoming, 0); i++)
9739 rtx reg = XEXP (XVECEXP (incoming, 0, i), 0);
9740 parm_reg p;
9741 p.incoming = reg;
9742 reg = gen_rtx_REG_offset (reg, GET_MODE (reg),
9743 OUTGOING_REGNO (REGNO (reg)), 0);
9744 p.outgoing = reg;
9745 XVECEXP (outgoing, 0, i)
9746 = gen_rtx_EXPR_LIST (VOIDmode, reg,
9747 XEXP (XVECEXP (incoming, 0, i), 1));
9748 vec_safe_push (windowed_parm_regs, p);
9751 incoming = outgoing;
9753 else if (MEM_P (incoming)
9754 && REG_P (XEXP (incoming, 0))
9755 && HARD_REGISTER_P (XEXP (incoming, 0)))
9757 rtx reg = XEXP (incoming, 0);
9758 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
9760 parm_reg p;
9761 p.incoming = reg;
9762 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
9763 p.outgoing = reg;
9764 vec_safe_push (windowed_parm_regs, p);
9765 incoming = replace_equiv_address_nv (incoming, reg);
9769 #endif
9771 if (!vt_get_decl_and_offset (incoming, &decl, &offset))
9773 incoming_ok = false;
9774 if (MEM_P (incoming))
9776 /* This means argument is passed by invisible reference. */
9777 offset = 0;
9778 decl = parm;
9780 else
9782 if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset))
9783 return;
9784 offset += byte_lowpart_offset (GET_MODE (incoming),
9785 GET_MODE (decl_rtl));
9789 if (!decl)
9790 return;
9792 if (parm != decl)
9794 /* If that DECL_RTL wasn't a pseudo that got spilled to
9795 memory, bail out. Otherwise, the spill slot sharing code
9796 will force the memory to reference spill_slot_decl (%sfp),
9797 so we don't match above. That's ok, the pseudo must have
9798 referenced the entire parameter, so just reset OFFSET. */
9799 if (decl != get_spill_slot_decl (false))
9800 return;
9801 offset = 0;
9804 HOST_WIDE_INT const_offset;
9805 if (!track_loc_p (incoming, parm, offset, false, &mode, &const_offset))
9806 return;
9808 out = &VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out;
9810 dv = dv_from_decl (parm);
9812 if (target_for_debug_bind (parm)
9813 /* We can't deal with these right now, because this kind of
9814 variable is single-part. ??? We could handle parallels
9815 that describe multiple locations for the same single
9816 value, but ATM we don't. */
9817 && GET_CODE (incoming) != PARALLEL)
9819 cselib_val *val;
9820 rtx lowpart;
9822 /* ??? We shouldn't ever hit this, but it may happen because
9823 arguments passed by invisible reference aren't dealt with
9824 above: incoming-rtl will have Pmode rather than the
9825 expected mode for the type. */
9826 if (const_offset)
9827 return;
9829 lowpart = var_lowpart (mode, incoming);
9830 if (!lowpart)
9831 return;
9833 val = cselib_lookup_from_insn (lowpart, mode, true,
9834 VOIDmode, get_insns ());
9836 /* ??? Float-typed values in memory are not handled by
9837 cselib. */
9838 if (val)
9840 preserve_value (val);
9841 set_variable_part (out, val->val_rtx, dv, const_offset,
9842 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9843 dv = dv_from_value (val->val_rtx);
9846 if (MEM_P (incoming))
9848 val = cselib_lookup_from_insn (XEXP (incoming, 0), mode, true,
9849 VOIDmode, get_insns ());
9850 if (val)
9852 preserve_value (val);
9853 incoming = replace_equiv_address_nv (incoming, val->val_rtx);
9858 if (REG_P (incoming))
9860 incoming = var_lowpart (mode, incoming);
9861 gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER);
9862 attrs_list_insert (&out->regs[REGNO (incoming)], dv, const_offset,
9863 incoming);
9864 set_variable_part (out, incoming, dv, const_offset,
9865 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9866 if (dv_is_value_p (dv))
9868 record_entry_value (CSELIB_VAL_PTR (dv_as_value (dv)), incoming);
9869 if (TREE_CODE (TREE_TYPE (parm)) == REFERENCE_TYPE
9870 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (parm))))
9872 machine_mode indmode
9873 = TYPE_MODE (TREE_TYPE (TREE_TYPE (parm)));
9874 rtx mem = gen_rtx_MEM (indmode, incoming);
9875 cselib_val *val = cselib_lookup_from_insn (mem, indmode, true,
9876 VOIDmode,
9877 get_insns ());
9878 if (val)
9880 preserve_value (val);
9881 record_entry_value (val, mem);
9882 set_variable_part (out, mem, dv_from_value (val->val_rtx), 0,
9883 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9888 else if (GET_CODE (incoming) == PARALLEL && !dv_onepart_p (dv))
9890 int i;
9892 /* The following code relies on vt_get_decl_and_offset returning true for
9893 incoming, which might not be always the case. */
9894 if (!incoming_ok)
9895 return;
9896 for (i = 0; i < XVECLEN (incoming, 0); i++)
9898 rtx reg = XEXP (XVECEXP (incoming, 0, i), 0);
9899 /* vt_get_decl_and_offset has already checked that the offset
9900 is a valid variable part. */
9901 const_offset = get_tracked_reg_offset (reg);
9902 gcc_assert (REGNO (reg) < FIRST_PSEUDO_REGISTER);
9903 attrs_list_insert (&out->regs[REGNO (reg)], dv, const_offset, reg);
9904 set_variable_part (out, reg, dv, const_offset,
9905 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9908 else if (MEM_P (incoming))
9910 incoming = var_lowpart (mode, incoming);
9911 set_variable_part (out, incoming, dv, const_offset,
9912 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9916 /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
9918 static void
9919 vt_add_function_parameters (void)
9921 tree parm;
9923 for (parm = DECL_ARGUMENTS (current_function_decl);
9924 parm; parm = DECL_CHAIN (parm))
9925 vt_add_function_parameter (parm);
9927 if (DECL_HAS_VALUE_EXPR_P (DECL_RESULT (current_function_decl)))
9929 tree vexpr = DECL_VALUE_EXPR (DECL_RESULT (current_function_decl));
9931 if (TREE_CODE (vexpr) == INDIRECT_REF)
9932 vexpr = TREE_OPERAND (vexpr, 0);
9934 if (TREE_CODE (vexpr) == PARM_DECL
9935 && DECL_ARTIFICIAL (vexpr)
9936 && !DECL_IGNORED_P (vexpr)
9937 && DECL_NAMELESS (vexpr))
9938 vt_add_function_parameter (vexpr);
9942 /* Initialize cfa_base_rtx, create a preserved VALUE for it and
9943 ensure it isn't flushed during cselib_reset_table.
9944 Can be called only if frame_pointer_rtx resp. arg_pointer_rtx
9945 has been eliminated. */
9947 static void
9948 vt_init_cfa_base (void)
9950 cselib_val *val;
9952 #ifdef FRAME_POINTER_CFA_OFFSET
9953 cfa_base_rtx = frame_pointer_rtx;
9954 cfa_base_offset = -FRAME_POINTER_CFA_OFFSET (current_function_decl);
9955 #else
9956 cfa_base_rtx = arg_pointer_rtx;
9957 cfa_base_offset = -ARG_POINTER_CFA_OFFSET (current_function_decl);
9958 #endif
9959 if (cfa_base_rtx == hard_frame_pointer_rtx
9960 || !fixed_regs[REGNO (cfa_base_rtx)])
9962 cfa_base_rtx = NULL_RTX;
9963 return;
9965 if (!MAY_HAVE_DEBUG_BIND_INSNS)
9966 return;
9968 /* Tell alias analysis that cfa_base_rtx should share
9969 find_base_term value with stack pointer or hard frame pointer. */
9970 if (!frame_pointer_needed)
9971 vt_equate_reg_base_value (cfa_base_rtx, stack_pointer_rtx);
9972 else if (!crtl->stack_realign_tried)
9973 vt_equate_reg_base_value (cfa_base_rtx, hard_frame_pointer_rtx);
9975 val = cselib_lookup_from_insn (cfa_base_rtx, GET_MODE (cfa_base_rtx), 1,
9976 VOIDmode, get_insns ());
9977 preserve_value (val);
9978 cselib_preserve_cfa_base_value (val, REGNO (cfa_base_rtx));
9981 /* Reemit INSN, a MARKER_DEBUG_INSN, as a note. */
9983 static rtx_insn *
9984 reemit_marker_as_note (rtx_insn *insn)
9986 gcc_checking_assert (DEBUG_MARKER_INSN_P (insn));
9988 enum insn_note kind = INSN_DEBUG_MARKER_KIND (insn);
9990 switch (kind)
9992 case NOTE_INSN_BEGIN_STMT:
9993 case NOTE_INSN_INLINE_ENTRY:
9995 rtx_insn *note = NULL;
9996 if (cfun->debug_nonbind_markers)
9998 note = emit_note_before (kind, insn);
9999 NOTE_MARKER_LOCATION (note) = INSN_LOCATION (insn);
10001 delete_insn (insn);
10002 return note;
10005 default:
10006 gcc_unreachable ();
10010 /* Allocate and initialize the data structures for variable tracking
10011 and parse the RTL to get the micro operations. */
10013 static bool
10014 vt_initialize (void)
10016 basic_block bb;
10017 poly_int64 fp_cfa_offset = -1;
10019 alloc_aux_for_blocks (sizeof (variable_tracking_info));
10021 empty_shared_hash = shared_hash_pool.allocate ();
10022 empty_shared_hash->refcount = 1;
10023 empty_shared_hash->htab = new variable_table_type (1);
10024 changed_variables = new variable_table_type (10);
10026 /* Init the IN and OUT sets. */
10027 FOR_ALL_BB_FN (bb, cfun)
10029 VTI (bb)->visited = false;
10030 VTI (bb)->flooded = false;
10031 dataflow_set_init (&VTI (bb)->in);
10032 dataflow_set_init (&VTI (bb)->out);
10033 VTI (bb)->permp = NULL;
10036 if (MAY_HAVE_DEBUG_BIND_INSNS)
10038 cselib_init (CSELIB_RECORD_MEMORY | CSELIB_PRESERVE_CONSTANTS);
10039 scratch_regs = BITMAP_ALLOC (NULL);
10040 preserved_values.create (256);
10041 global_get_addr_cache = new hash_map<rtx, rtx>;
10043 else
10045 scratch_regs = NULL;
10046 global_get_addr_cache = NULL;
10049 if (MAY_HAVE_DEBUG_BIND_INSNS)
10051 rtx reg, expr;
10052 int ofst;
10053 cselib_val *val;
10055 #ifdef FRAME_POINTER_CFA_OFFSET
10056 reg = frame_pointer_rtx;
10057 ofst = FRAME_POINTER_CFA_OFFSET (current_function_decl);
10058 #else
10059 reg = arg_pointer_rtx;
10060 ofst = ARG_POINTER_CFA_OFFSET (current_function_decl);
10061 #endif
10063 ofst -= INCOMING_FRAME_SP_OFFSET;
10065 val = cselib_lookup_from_insn (reg, GET_MODE (reg), 1,
10066 VOIDmode, get_insns ());
10067 preserve_value (val);
10068 if (reg != hard_frame_pointer_rtx && fixed_regs[REGNO (reg)])
10069 cselib_preserve_cfa_base_value (val, REGNO (reg));
10070 if (ofst)
10072 cselib_val *valsp
10073 = cselib_lookup_from_insn (stack_pointer_rtx,
10074 GET_MODE (stack_pointer_rtx), 1,
10075 VOIDmode, get_insns ());
10076 preserve_value (valsp);
10077 expr = plus_constant (GET_MODE (reg), reg, ofst);
10078 /* This cselib_add_permanent_equiv call needs to be done before
10079 the other cselib_add_permanent_equiv a few lines later,
10080 because after that one is done, cselib_lookup on this expr
10081 will due to the cselib SP_DERIVED_VALUE_P optimizations
10082 return valsp and so no permanent equivalency will be added. */
10083 cselib_add_permanent_equiv (valsp, expr, get_insns ());
10086 expr = plus_constant (GET_MODE (stack_pointer_rtx),
10087 stack_pointer_rtx, -ofst);
10088 cselib_add_permanent_equiv (val, expr, get_insns ());
10091 /* In order to factor out the adjustments made to the stack pointer or to
10092 the hard frame pointer and thus be able to use DW_OP_fbreg operations
10093 instead of individual location lists, we're going to rewrite MEMs based
10094 on them into MEMs based on the CFA by de-eliminating stack_pointer_rtx
10095 or hard_frame_pointer_rtx to the virtual CFA pointer frame_pointer_rtx
10096 resp. arg_pointer_rtx. We can do this either when there is no frame
10097 pointer in the function and stack adjustments are consistent for all
10098 basic blocks or when there is a frame pointer and no stack realignment.
10099 But we first have to check that frame_pointer_rtx resp. arg_pointer_rtx
10100 has been eliminated. */
10101 if (!frame_pointer_needed)
10103 rtx reg, elim;
10105 if (!vt_stack_adjustments ())
10106 return false;
10108 #ifdef FRAME_POINTER_CFA_OFFSET
10109 reg = frame_pointer_rtx;
10110 #else
10111 reg = arg_pointer_rtx;
10112 #endif
10113 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
10114 if (elim != reg)
10116 if (GET_CODE (elim) == PLUS)
10117 elim = XEXP (elim, 0);
10118 if (elim == stack_pointer_rtx)
10119 vt_init_cfa_base ();
10122 else if (!crtl->stack_realign_tried)
10124 rtx reg, elim;
10126 #ifdef FRAME_POINTER_CFA_OFFSET
10127 reg = frame_pointer_rtx;
10128 fp_cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
10129 #else
10130 reg = arg_pointer_rtx;
10131 fp_cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
10132 #endif
10133 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
10134 if (elim != reg)
10136 if (GET_CODE (elim) == PLUS)
10138 fp_cfa_offset -= rtx_to_poly_int64 (XEXP (elim, 1));
10139 elim = XEXP (elim, 0);
10141 if (elim != hard_frame_pointer_rtx)
10142 fp_cfa_offset = -1;
10144 else
10145 fp_cfa_offset = -1;
10148 /* If the stack is realigned and a DRAP register is used, we're going to
10149 rewrite MEMs based on it representing incoming locations of parameters
10150 passed on the stack into MEMs based on the argument pointer. Although
10151 we aren't going to rewrite other MEMs, we still need to initialize the
10152 virtual CFA pointer in order to ensure that the argument pointer will
10153 be seen as a constant throughout the function.
10155 ??? This doesn't work if FRAME_POINTER_CFA_OFFSET is defined. */
10156 else if (stack_realign_drap)
10158 rtx reg, elim;
10160 #ifdef FRAME_POINTER_CFA_OFFSET
10161 reg = frame_pointer_rtx;
10162 #else
10163 reg = arg_pointer_rtx;
10164 #endif
10165 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
10166 if (elim != reg)
10168 if (GET_CODE (elim) == PLUS)
10169 elim = XEXP (elim, 0);
10170 if (elim == hard_frame_pointer_rtx)
10171 vt_init_cfa_base ();
10175 hard_frame_pointer_adjustment = -1;
10177 vt_add_function_parameters ();
10179 bool record_sp_value = false;
10180 FOR_EACH_BB_FN (bb, cfun)
10182 rtx_insn *insn;
10183 basic_block first_bb, last_bb;
10185 if (MAY_HAVE_DEBUG_BIND_INSNS)
10187 cselib_record_sets_hook = add_with_sets;
10188 if (dump_file && (dump_flags & TDF_DETAILS))
10189 fprintf (dump_file, "first value: %i\n",
10190 cselib_get_next_uid ());
10193 if (MAY_HAVE_DEBUG_BIND_INSNS
10194 && cfa_base_rtx
10195 && !frame_pointer_needed
10196 && record_sp_value)
10197 cselib_record_sp_cfa_base_equiv (-cfa_base_offset
10198 - VTI (bb)->in.stack_adjust,
10199 BB_HEAD (bb));
10200 record_sp_value = true;
10202 first_bb = bb;
10203 for (;;)
10205 edge e;
10206 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
10207 || ! single_pred_p (bb->next_bb))
10208 break;
10209 e = find_edge (bb, bb->next_bb);
10210 if (! e || (e->flags & EDGE_FALLTHRU) == 0)
10211 break;
10212 bb = bb->next_bb;
10214 last_bb = bb;
10216 /* Add the micro-operations to the vector. */
10217 FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb)
10219 HOST_WIDE_INT offset = VTI (bb)->out.stack_adjust;
10220 VTI (bb)->out.stack_adjust = VTI (bb)->in.stack_adjust;
10222 rtx_insn *next;
10223 FOR_BB_INSNS_SAFE (bb, insn, next)
10225 if (INSN_P (insn))
10227 HOST_WIDE_INT pre = 0, post = 0;
10229 if (!frame_pointer_needed)
10231 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
10232 if (pre)
10234 micro_operation mo;
10235 mo.type = MO_ADJUST;
10236 mo.u.adjust = pre;
10237 mo.insn = insn;
10238 if (dump_file && (dump_flags & TDF_DETAILS))
10239 log_op_type (PATTERN (insn), bb, insn,
10240 MO_ADJUST, dump_file);
10241 VTI (bb)->mos.safe_push (mo);
10245 cselib_hook_called = false;
10246 adjust_insn (bb, insn);
10248 if (pre)
10249 VTI (bb)->out.stack_adjust += pre;
10251 if (DEBUG_MARKER_INSN_P (insn))
10253 reemit_marker_as_note (insn);
10254 continue;
10257 if (MAY_HAVE_DEBUG_BIND_INSNS)
10259 if (CALL_P (insn))
10260 prepare_call_arguments (bb, insn);
10261 cselib_process_insn (insn);
10262 if (dump_file && (dump_flags & TDF_DETAILS))
10264 if (dump_flags & TDF_SLIM)
10265 dump_insn_slim (dump_file, insn);
10266 else
10267 print_rtl_single (dump_file, insn);
10268 dump_cselib_table (dump_file);
10271 if (!cselib_hook_called)
10272 add_with_sets (insn, 0, 0);
10273 cancel_changes (0);
10275 if (post)
10277 micro_operation mo;
10278 mo.type = MO_ADJUST;
10279 mo.u.adjust = post;
10280 mo.insn = insn;
10281 if (dump_file && (dump_flags & TDF_DETAILS))
10282 log_op_type (PATTERN (insn), bb, insn,
10283 MO_ADJUST, dump_file);
10284 VTI (bb)->mos.safe_push (mo);
10285 VTI (bb)->out.stack_adjust += post;
10288 if (maybe_ne (fp_cfa_offset, -1)
10289 && known_eq (hard_frame_pointer_adjustment, -1)
10290 && fp_setter_insn (insn))
10292 vt_init_cfa_base ();
10293 hard_frame_pointer_adjustment = fp_cfa_offset;
10294 /* Disassociate sp from fp now. */
10295 if (MAY_HAVE_DEBUG_BIND_INSNS)
10297 cselib_val *v;
10298 cselib_invalidate_rtx (stack_pointer_rtx);
10299 v = cselib_lookup (stack_pointer_rtx, Pmode, 1,
10300 VOIDmode);
10301 if (v && !cselib_preserved_value_p (v))
10303 cselib_set_value_sp_based (v);
10304 preserve_value (v);
10310 gcc_assert (offset == VTI (bb)->out.stack_adjust);
10313 bb = last_bb;
10315 if (MAY_HAVE_DEBUG_BIND_INSNS)
10317 cselib_preserve_only_values ();
10318 cselib_reset_table (cselib_get_next_uid ());
10319 cselib_record_sets_hook = NULL;
10323 hard_frame_pointer_adjustment = -1;
10324 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->flooded = true;
10325 cfa_base_rtx = NULL_RTX;
10326 return true;
10329 /* This is *not* reset after each function. It gives each
10330 NOTE_INSN_DELETED_DEBUG_LABEL in the entire compilation
10331 a unique label number. */
10333 static int debug_label_num = 1;
10335 /* Remove from the insn stream a single debug insn used for
10336 variable tracking at assignments. */
10338 static inline void
10339 delete_vta_debug_insn (rtx_insn *insn)
10341 if (DEBUG_MARKER_INSN_P (insn))
10343 reemit_marker_as_note (insn);
10344 return;
10347 tree decl = INSN_VAR_LOCATION_DECL (insn);
10348 if (TREE_CODE (decl) == LABEL_DECL
10349 && DECL_NAME (decl)
10350 && !DECL_RTL_SET_P (decl))
10352 PUT_CODE (insn, NOTE);
10353 NOTE_KIND (insn) = NOTE_INSN_DELETED_DEBUG_LABEL;
10354 NOTE_DELETED_LABEL_NAME (insn)
10355 = IDENTIFIER_POINTER (DECL_NAME (decl));
10356 SET_DECL_RTL (decl, insn);
10357 CODE_LABEL_NUMBER (insn) = debug_label_num++;
10359 else
10360 delete_insn (insn);
10363 /* Remove from the insn stream all debug insns used for variable
10364 tracking at assignments. USE_CFG should be false if the cfg is no
10365 longer usable. */
10367 void
10368 delete_vta_debug_insns (bool use_cfg)
10370 basic_block bb;
10371 rtx_insn *insn, *next;
10373 if (!MAY_HAVE_DEBUG_INSNS)
10374 return;
10376 if (use_cfg)
10377 FOR_EACH_BB_FN (bb, cfun)
10379 FOR_BB_INSNS_SAFE (bb, insn, next)
10380 if (DEBUG_INSN_P (insn))
10381 delete_vta_debug_insn (insn);
10383 else
10384 for (insn = get_insns (); insn; insn = next)
10386 next = NEXT_INSN (insn);
10387 if (DEBUG_INSN_P (insn))
10388 delete_vta_debug_insn (insn);
10392 /* Run a fast, BB-local only version of var tracking, to take care of
10393 information that we don't do global analysis on, such that not all
10394 information is lost. If SKIPPED holds, we're skipping the global
10395 pass entirely, so we should try to use information it would have
10396 handled as well.. */
10398 static void
10399 vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED)
10401 /* ??? Just skip it all for now. */
10402 delete_vta_debug_insns (true);
10405 /* Free the data structures needed for variable tracking. */
10407 static void
10408 vt_finalize (void)
10410 basic_block bb;
10412 FOR_EACH_BB_FN (bb, cfun)
10414 VTI (bb)->mos.release ();
10417 FOR_ALL_BB_FN (bb, cfun)
10419 dataflow_set_destroy (&VTI (bb)->in);
10420 dataflow_set_destroy (&VTI (bb)->out);
10421 if (VTI (bb)->permp)
10423 dataflow_set_destroy (VTI (bb)->permp);
10424 XDELETE (VTI (bb)->permp);
10427 free_aux_for_blocks ();
10428 delete empty_shared_hash->htab;
10429 empty_shared_hash->htab = NULL;
10430 delete changed_variables;
10431 changed_variables = NULL;
10432 attrs_pool.release ();
10433 var_pool.release ();
10434 location_chain_pool.release ();
10435 shared_hash_pool.release ();
10437 if (MAY_HAVE_DEBUG_BIND_INSNS)
10439 if (global_get_addr_cache)
10440 delete global_get_addr_cache;
10441 global_get_addr_cache = NULL;
10442 loc_exp_dep_pool.release ();
10443 valvar_pool.release ();
10444 preserved_values.release ();
10445 cselib_finish ();
10446 BITMAP_FREE (scratch_regs);
10447 scratch_regs = NULL;
10450 #ifdef HAVE_window_save
10451 vec_free (windowed_parm_regs);
10452 #endif
10454 if (vui_vec)
10455 XDELETEVEC (vui_vec);
10456 vui_vec = NULL;
10457 vui_allocated = 0;
10460 /* The entry point to variable tracking pass. */
10462 static inline unsigned int
10463 variable_tracking_main_1 (void)
10465 bool success;
10467 /* We won't be called as a separate pass if flag_var_tracking is not
10468 set, but final may call us to turn debug markers into notes. */
10469 if ((!flag_var_tracking && MAY_HAVE_DEBUG_INSNS)
10470 || flag_var_tracking_assignments < 0
10471 /* Var-tracking right now assumes the IR doesn't contain
10472 any pseudos at this point. */
10473 || targetm.no_register_allocation)
10475 delete_vta_debug_insns (true);
10476 return 0;
10479 if (!flag_var_tracking)
10480 return 0;
10482 if (n_basic_blocks_for_fn (cfun) > 500
10483 && n_edges_for_fn (cfun) / n_basic_blocks_for_fn (cfun) >= 20)
10485 vt_debug_insns_local (true);
10486 return 0;
10489 mark_dfs_back_edges ();
10490 if (!vt_initialize ())
10492 vt_finalize ();
10493 vt_debug_insns_local (true);
10494 return 0;
10497 success = vt_find_locations ();
10499 if (!success && flag_var_tracking_assignments > 0)
10501 vt_finalize ();
10503 delete_vta_debug_insns (true);
10505 /* This is later restored by our caller. */
10506 flag_var_tracking_assignments = 0;
10508 success = vt_initialize ();
10509 gcc_assert (success);
10511 success = vt_find_locations ();
10514 if (!success)
10516 vt_finalize ();
10517 vt_debug_insns_local (false);
10518 return 0;
10521 if (dump_file && (dump_flags & TDF_DETAILS))
10523 dump_dataflow_sets ();
10524 dump_reg_info (dump_file);
10525 dump_flow_info (dump_file, dump_flags);
10528 timevar_push (TV_VAR_TRACKING_EMIT);
10529 vt_emit_notes ();
10530 timevar_pop (TV_VAR_TRACKING_EMIT);
10532 vt_finalize ();
10533 vt_debug_insns_local (false);
10534 return 0;
10537 unsigned int
10538 variable_tracking_main (void)
10540 unsigned int ret;
10541 int save = flag_var_tracking_assignments;
10543 ret = variable_tracking_main_1 ();
10545 flag_var_tracking_assignments = save;
10547 return ret;
10550 namespace {
10552 const pass_data pass_data_variable_tracking =
10554 RTL_PASS, /* type */
10555 "vartrack", /* name */
10556 OPTGROUP_NONE, /* optinfo_flags */
10557 TV_VAR_TRACKING, /* tv_id */
10558 0, /* properties_required */
10559 0, /* properties_provided */
10560 0, /* properties_destroyed */
10561 0, /* todo_flags_start */
10562 0, /* todo_flags_finish */
10565 class pass_variable_tracking : public rtl_opt_pass
10567 public:
10568 pass_variable_tracking (gcc::context *ctxt)
10569 : rtl_opt_pass (pass_data_variable_tracking, ctxt)
10572 /* opt_pass methods: */
10573 virtual bool gate (function *)
10575 return (flag_var_tracking && !targetm.delay_vartrack);
10578 virtual unsigned int execute (function *)
10580 return variable_tracking_main ();
10583 }; // class pass_variable_tracking
10585 } // anon namespace
10587 rtl_opt_pass *
10588 make_pass_variable_tracking (gcc::context *ctxt)
10590 return new pass_variable_tracking (ctxt);