* ipa/devirt9.C: Fix previous change.
[official-gcc.git] / gcc / var-tracking.c
bloba569d468ec3a38da077faa63db79abad581a7c4d
1 /* Variable tracking routines for the GNU compiler.
2 Copyright (C) 2002-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the variable tracking pass. It computes where
21 variables are located (which registers or where in memory) at each position
22 in instruction stream and emits notes describing the locations.
23 Debug information (DWARF2 location lists) is finally generated from
24 these notes.
25 With this debug information, it is possible to show variables
26 even when debugging optimized code.
28 How does the variable tracking pass work?
30 First, it scans RTL code for uses, stores and clobbers (register/memory
31 references in instructions), for call insns and for stack adjustments
32 separately for each basic block and saves them to an array of micro
33 operations.
34 The micro operations of one instruction are ordered so that
35 pre-modifying stack adjustment < use < use with no var < call insn <
36 < clobber < set < post-modifying stack adjustment
38 Then, a forward dataflow analysis is performed to find out how locations
39 of variables change through code and to propagate the variable locations
40 along control flow graph.
41 The IN set for basic block BB is computed as a union of OUT sets of BB's
42 predecessors, the OUT set for BB is copied from the IN set for BB and
43 is changed according to micro operations in BB.
45 The IN and OUT sets for basic blocks consist of a current stack adjustment
46 (used for adjusting offset of variables addressed using stack pointer),
47 the table of structures describing the locations of parts of a variable
48 and for each physical register a linked list for each physical register.
49 The linked list is a list of variable parts stored in the register,
50 i.e. it is a list of triplets (reg, decl, offset) where decl is
51 REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
52 effective deleting appropriate variable parts when we set or clobber the
53 register.
55 There may be more than one variable part in a register. The linked lists
56 should be pretty short so it is a good data structure here.
57 For example in the following code, register allocator may assign same
58 register to variables A and B, and both of them are stored in the same
59 register in CODE:
61 if (cond)
62 set A;
63 else
64 set B;
65 CODE;
66 if (cond)
67 use A;
68 else
69 use B;
71 Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
72 are emitted to appropriate positions in RTL code. Each such a note describes
73 the location of one variable at the point in instruction stream where the
74 note is. There is no need to emit a note for each variable before each
75 instruction, we only emit these notes where the location of variable changes
76 (this means that we also emit notes for changes between the OUT set of the
77 previous block and the IN set of the current block).
79 The notes consist of two parts:
80 1. the declaration (from REG_EXPR or MEM_EXPR)
81 2. the location of a variable - it is either a simple register/memory
82 reference (for simple variables, for example int),
83 or a parallel of register/memory references (for a large variables
84 which consist of several parts, for example long long).
88 #include "config.h"
89 #include "system.h"
90 #include "coretypes.h"
91 #include "tm.h"
92 #include "rtl.h"
93 #include "tree.h"
94 #include "varasm.h"
95 #include "stor-layout.h"
96 #include "gimple.h"
97 #include "tm_p.h"
98 #include "hard-reg-set.h"
99 #include "basic-block.h"
100 #include "flags.h"
101 #include "insn-config.h"
102 #include "reload.h"
103 #include "sbitmap.h"
104 #include "alloc-pool.h"
105 #include "fibheap.h"
106 #include "hash-table.h"
107 #include "regs.h"
108 #include "expr.h"
109 #include "tree-pass.h"
110 #include "bitmap.h"
111 #include "tree-dfa.h"
112 #include "tree-ssa.h"
113 #include "cselib.h"
114 #include "target.h"
115 #include "params.h"
116 #include "diagnostic.h"
117 #include "tree-pretty-print.h"
118 #include "pointer-set.h"
119 #include "recog.h"
120 #include "tm_p.h"
121 #include "alias.h"
123 /* var-tracking.c assumes that tree code with the same value as VALUE rtx code
124 has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
125 Currently the value is the same as IDENTIFIER_NODE, which has such
126 a property. If this compile time assertion ever fails, make sure that
127 the new tree code that equals (int) VALUE has the same property. */
128 extern char check_value_val[(int) VALUE == (int) IDENTIFIER_NODE ? 1 : -1];
130 /* Type of micro operation. */
131 enum micro_operation_type
133 MO_USE, /* Use location (REG or MEM). */
134 MO_USE_NO_VAR,/* Use location which is not associated with a variable
135 or the variable is not trackable. */
136 MO_VAL_USE, /* Use location which is associated with a value. */
137 MO_VAL_LOC, /* Use location which appears in a debug insn. */
138 MO_VAL_SET, /* Set location associated with a value. */
139 MO_SET, /* Set location. */
140 MO_COPY, /* Copy the same portion of a variable from one
141 location to another. */
142 MO_CLOBBER, /* Clobber location. */
143 MO_CALL, /* Call insn. */
144 MO_ADJUST /* Adjust stack pointer. */
148 static const char * const ATTRIBUTE_UNUSED
149 micro_operation_type_name[] = {
150 "MO_USE",
151 "MO_USE_NO_VAR",
152 "MO_VAL_USE",
153 "MO_VAL_LOC",
154 "MO_VAL_SET",
155 "MO_SET",
156 "MO_COPY",
157 "MO_CLOBBER",
158 "MO_CALL",
159 "MO_ADJUST"
162 /* Where shall the note be emitted? BEFORE or AFTER the instruction.
163 Notes emitted as AFTER_CALL are to take effect during the call,
164 rather than after the call. */
165 enum emit_note_where
167 EMIT_NOTE_BEFORE_INSN,
168 EMIT_NOTE_AFTER_INSN,
169 EMIT_NOTE_AFTER_CALL_INSN
172 /* Structure holding information about micro operation. */
173 typedef struct micro_operation_def
175 /* Type of micro operation. */
176 enum micro_operation_type type;
178 /* The instruction which the micro operation is in, for MO_USE,
179 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
180 instruction or note in the original flow (before any var-tracking
181 notes are inserted, to simplify emission of notes), for MO_SET
182 and MO_CLOBBER. */
183 rtx insn;
185 union {
186 /* Location. For MO_SET and MO_COPY, this is the SET that
187 performs the assignment, if known, otherwise it is the target
188 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
189 CONCAT of the VALUE and the LOC associated with it. For
190 MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
191 associated with it. */
192 rtx loc;
194 /* Stack adjustment. */
195 HOST_WIDE_INT adjust;
196 } u;
197 } micro_operation;
200 /* A declaration of a variable, or an RTL value being handled like a
201 declaration. */
202 typedef void *decl_or_value;
204 /* Return true if a decl_or_value DV is a DECL or NULL. */
205 static inline bool
206 dv_is_decl_p (decl_or_value dv)
208 return !dv || (int) TREE_CODE ((tree) dv) != (int) VALUE;
211 /* Return true if a decl_or_value is a VALUE rtl. */
212 static inline bool
213 dv_is_value_p (decl_or_value dv)
215 return dv && !dv_is_decl_p (dv);
218 /* Return the decl in the decl_or_value. */
219 static inline tree
220 dv_as_decl (decl_or_value dv)
222 gcc_checking_assert (dv_is_decl_p (dv));
223 return (tree) dv;
226 /* Return the value in the decl_or_value. */
227 static inline rtx
228 dv_as_value (decl_or_value dv)
230 gcc_checking_assert (dv_is_value_p (dv));
231 return (rtx)dv;
234 /* Return the opaque pointer in the decl_or_value. */
235 static inline void *
236 dv_as_opaque (decl_or_value dv)
238 return dv;
242 /* Description of location of a part of a variable. The content of a physical
243 register is described by a chain of these structures.
244 The chains are pretty short (usually 1 or 2 elements) and thus
245 chain is the best data structure. */
246 typedef struct attrs_def
248 /* Pointer to next member of the list. */
249 struct attrs_def *next;
251 /* The rtx of register. */
252 rtx loc;
254 /* The declaration corresponding to LOC. */
255 decl_or_value dv;
257 /* Offset from start of DECL. */
258 HOST_WIDE_INT offset;
259 } *attrs;
261 /* Structure for chaining the locations. */
262 typedef struct location_chain_def
264 /* Next element in the chain. */
265 struct location_chain_def *next;
267 /* The location (REG, MEM or VALUE). */
268 rtx loc;
270 /* The "value" stored in this location. */
271 rtx set_src;
273 /* Initialized? */
274 enum var_init_status init;
275 } *location_chain;
277 /* A vector of loc_exp_dep holds the active dependencies of a one-part
278 DV on VALUEs, i.e., the VALUEs expanded so as to form the current
279 location of DV. Each entry is also part of VALUE' s linked-list of
280 backlinks back to DV. */
281 typedef struct loc_exp_dep_s
283 /* The dependent DV. */
284 decl_or_value dv;
285 /* The dependency VALUE or DECL_DEBUG. */
286 rtx value;
287 /* The next entry in VALUE's backlinks list. */
288 struct loc_exp_dep_s *next;
289 /* A pointer to the pointer to this entry (head or prev's next) in
290 the doubly-linked list. */
291 struct loc_exp_dep_s **pprev;
292 } loc_exp_dep;
295 /* This data structure holds information about the depth of a variable
296 expansion. */
297 typedef struct expand_depth_struct
299 /* This measures the complexity of the expanded expression. It
300 grows by one for each level of expansion that adds more than one
301 operand. */
302 int complexity;
303 /* This counts the number of ENTRY_VALUE expressions in an
304 expansion. We want to minimize their use. */
305 int entryvals;
306 } expand_depth;
308 /* This data structure is allocated for one-part variables at the time
309 of emitting notes. */
310 struct onepart_aux
312 /* Doubly-linked list of dependent DVs. These are DVs whose cur_loc
313 computation used the expansion of this variable, and that ought
314 to be notified should this variable change. If the DV's cur_loc
315 expanded to NULL, all components of the loc list are regarded as
316 active, so that any changes in them give us a chance to get a
317 location. Otherwise, only components of the loc that expanded to
318 non-NULL are regarded as active dependencies. */
319 loc_exp_dep *backlinks;
320 /* This holds the LOC that was expanded into cur_loc. We need only
321 mark a one-part variable as changed if the FROM loc is removed,
322 or if it has no known location and a loc is added, or if it gets
323 a change notification from any of its active dependencies. */
324 rtx from;
325 /* The depth of the cur_loc expression. */
326 expand_depth depth;
327 /* Dependencies actively used when expand FROM into cur_loc. */
328 vec<loc_exp_dep, va_heap, vl_embed> deps;
331 /* Structure describing one part of variable. */
332 typedef struct variable_part_def
334 /* Chain of locations of the part. */
335 location_chain loc_chain;
337 /* Location which was last emitted to location list. */
338 rtx cur_loc;
340 union variable_aux
342 /* The offset in the variable, if !var->onepart. */
343 HOST_WIDE_INT offset;
345 /* Pointer to auxiliary data, if var->onepart and emit_notes. */
346 struct onepart_aux *onepaux;
347 } aux;
348 } variable_part;
350 /* Maximum number of location parts. */
351 #define MAX_VAR_PARTS 16
353 /* Enumeration type used to discriminate various types of one-part
354 variables. */
355 typedef enum onepart_enum
357 /* Not a one-part variable. */
358 NOT_ONEPART = 0,
359 /* A one-part DECL that is not a DEBUG_EXPR_DECL. */
360 ONEPART_VDECL = 1,
361 /* A DEBUG_EXPR_DECL. */
362 ONEPART_DEXPR = 2,
363 /* A VALUE. */
364 ONEPART_VALUE = 3
365 } onepart_enum_t;
367 /* Structure describing where the variable is located. */
368 typedef struct variable_def
370 /* The declaration of the variable, or an RTL value being handled
371 like a declaration. */
372 decl_or_value dv;
374 /* Reference count. */
375 int refcount;
377 /* Number of variable parts. */
378 char n_var_parts;
380 /* What type of DV this is, according to enum onepart_enum. */
381 ENUM_BITFIELD (onepart_enum) onepart : CHAR_BIT;
383 /* True if this variable_def struct is currently in the
384 changed_variables hash table. */
385 bool in_changed_variables;
387 /* The variable parts. */
388 variable_part var_part[1];
389 } *variable;
390 typedef const struct variable_def *const_variable;
392 /* Pointer to the BB's information specific to variable tracking pass. */
393 #define VTI(BB) ((variable_tracking_info) (BB)->aux)
395 /* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */
396 #define INT_MEM_OFFSET(mem) (MEM_OFFSET_KNOWN_P (mem) ? MEM_OFFSET (mem) : 0)
398 #if ENABLE_CHECKING && (GCC_VERSION >= 2007)
400 /* Access VAR's Ith part's offset, checking that it's not a one-part
401 variable. */
402 #define VAR_PART_OFFSET(var, i) __extension__ \
403 (*({ variable const __v = (var); \
404 gcc_checking_assert (!__v->onepart); \
405 &__v->var_part[(i)].aux.offset; }))
407 /* Access VAR's one-part auxiliary data, checking that it is a
408 one-part variable. */
409 #define VAR_LOC_1PAUX(var) __extension__ \
410 (*({ variable const __v = (var); \
411 gcc_checking_assert (__v->onepart); \
412 &__v->var_part[0].aux.onepaux; }))
414 #else
415 #define VAR_PART_OFFSET(var, i) ((var)->var_part[(i)].aux.offset)
416 #define VAR_LOC_1PAUX(var) ((var)->var_part[0].aux.onepaux)
417 #endif
419 /* These are accessor macros for the one-part auxiliary data. When
420 convenient for users, they're guarded by tests that the data was
421 allocated. */
422 #define VAR_LOC_DEP_LST(var) (VAR_LOC_1PAUX (var) \
423 ? VAR_LOC_1PAUX (var)->backlinks \
424 : NULL)
425 #define VAR_LOC_DEP_LSTP(var) (VAR_LOC_1PAUX (var) \
426 ? &VAR_LOC_1PAUX (var)->backlinks \
427 : NULL)
428 #define VAR_LOC_FROM(var) (VAR_LOC_1PAUX (var)->from)
429 #define VAR_LOC_DEPTH(var) (VAR_LOC_1PAUX (var)->depth)
430 #define VAR_LOC_DEP_VEC(var) (VAR_LOC_1PAUX (var) \
431 ? &VAR_LOC_1PAUX (var)->deps \
432 : NULL)
436 typedef unsigned int dvuid;
438 /* Return the uid of DV. */
440 static inline dvuid
441 dv_uid (decl_or_value dv)
443 if (dv_is_value_p (dv))
444 return CSELIB_VAL_PTR (dv_as_value (dv))->uid;
445 else
446 return DECL_UID (dv_as_decl (dv));
449 /* Compute the hash from the uid. */
451 static inline hashval_t
452 dv_uid2hash (dvuid uid)
454 return uid;
457 /* The hash function for a mask table in a shared_htab chain. */
459 static inline hashval_t
460 dv_htab_hash (decl_or_value dv)
462 return dv_uid2hash (dv_uid (dv));
465 static void variable_htab_free (void *);
467 /* Variable hashtable helpers. */
469 struct variable_hasher
471 typedef variable_def value_type;
472 typedef void compare_type;
473 static inline hashval_t hash (const value_type *);
474 static inline bool equal (const value_type *, const compare_type *);
475 static inline void remove (value_type *);
478 /* The hash function for variable_htab, computes the hash value
479 from the declaration of variable X. */
481 inline hashval_t
482 variable_hasher::hash (const value_type *v)
484 return dv_htab_hash (v->dv);
487 /* Compare the declaration of variable X with declaration Y. */
489 inline bool
490 variable_hasher::equal (const value_type *v, const compare_type *y)
492 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
494 return (dv_as_opaque (v->dv) == dv_as_opaque (dv));
497 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
499 inline void
500 variable_hasher::remove (value_type *var)
502 variable_htab_free (var);
505 typedef hash_table <variable_hasher> variable_table_type;
506 typedef variable_table_type::iterator variable_iterator_type;
508 /* Structure for passing some other parameters to function
509 emit_note_insn_var_location. */
510 typedef struct emit_note_data_def
512 /* The instruction which the note will be emitted before/after. */
513 rtx insn;
515 /* Where the note will be emitted (before/after insn)? */
516 enum emit_note_where where;
518 /* The variables and values active at this point. */
519 variable_table_type vars;
520 } emit_note_data;
522 /* Structure holding a refcounted hash table. If refcount > 1,
523 it must be first unshared before modified. */
524 typedef struct shared_hash_def
526 /* Reference count. */
527 int refcount;
529 /* Actual hash table. */
530 variable_table_type htab;
531 } *shared_hash;
533 /* Structure holding the IN or OUT set for a basic block. */
534 typedef struct dataflow_set_def
536 /* Adjustment of stack offset. */
537 HOST_WIDE_INT stack_adjust;
539 /* Attributes for registers (lists of attrs). */
540 attrs regs[FIRST_PSEUDO_REGISTER];
542 /* Variable locations. */
543 shared_hash vars;
545 /* Vars that is being traversed. */
546 shared_hash traversed_vars;
547 } dataflow_set;
549 /* The structure (one for each basic block) containing the information
550 needed for variable tracking. */
551 typedef struct variable_tracking_info_def
553 /* The vector of micro operations. */
554 vec<micro_operation> mos;
556 /* The IN and OUT set for dataflow analysis. */
557 dataflow_set in;
558 dataflow_set out;
560 /* The permanent-in dataflow set for this block. This is used to
561 hold values for which we had to compute entry values. ??? This
562 should probably be dynamically allocated, to avoid using more
563 memory in non-debug builds. */
564 dataflow_set *permp;
566 /* Has the block been visited in DFS? */
567 bool visited;
569 /* Has the block been flooded in VTA? */
570 bool flooded;
572 } *variable_tracking_info;
574 /* Alloc pool for struct attrs_def. */
575 static alloc_pool attrs_pool;
577 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
578 static alloc_pool var_pool;
580 /* Alloc pool for struct variable_def with a single var_part entry. */
581 static alloc_pool valvar_pool;
583 /* Alloc pool for struct location_chain_def. */
584 static alloc_pool loc_chain_pool;
586 /* Alloc pool for struct shared_hash_def. */
587 static alloc_pool shared_hash_pool;
589 /* Alloc pool for struct loc_exp_dep_s for NOT_ONEPART variables. */
590 static alloc_pool loc_exp_dep_pool;
592 /* Changed variables, notes will be emitted for them. */
593 static variable_table_type changed_variables;
595 /* Shall notes be emitted? */
596 static bool emit_notes;
598 /* Values whose dynamic location lists have gone empty, but whose
599 cselib location lists are still usable. Use this to hold the
600 current location, the backlinks, etc, during emit_notes. */
601 static variable_table_type dropped_values;
603 /* Empty shared hashtable. */
604 static shared_hash empty_shared_hash;
606 /* Scratch register bitmap used by cselib_expand_value_rtx. */
607 static bitmap scratch_regs = NULL;
609 #ifdef HAVE_window_save
610 typedef struct GTY(()) parm_reg {
611 rtx outgoing;
612 rtx incoming;
613 } parm_reg_t;
616 /* Vector of windowed parameter registers, if any. */
617 static vec<parm_reg_t, va_gc> *windowed_parm_regs = NULL;
618 #endif
620 /* Variable used to tell whether cselib_process_insn called our hook. */
621 static bool cselib_hook_called;
623 /* Local function prototypes. */
624 static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
625 HOST_WIDE_INT *);
626 static void insn_stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
627 HOST_WIDE_INT *);
628 static bool vt_stack_adjustments (void);
630 static void init_attrs_list_set (attrs *);
631 static void attrs_list_clear (attrs *);
632 static attrs attrs_list_member (attrs, decl_or_value, HOST_WIDE_INT);
633 static void attrs_list_insert (attrs *, decl_or_value, HOST_WIDE_INT, rtx);
634 static void attrs_list_copy (attrs *, attrs);
635 static void attrs_list_union (attrs *, attrs);
637 static variable_def **unshare_variable (dataflow_set *set, variable_def **slot,
638 variable var, enum var_init_status);
639 static void vars_copy (variable_table_type, variable_table_type);
640 static tree var_debug_decl (tree);
641 static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
642 static void var_reg_delete_and_set (dataflow_set *, rtx, bool,
643 enum var_init_status, rtx);
644 static void var_reg_delete (dataflow_set *, rtx, bool);
645 static void var_regno_delete (dataflow_set *, int);
646 static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx);
647 static void var_mem_delete_and_set (dataflow_set *, rtx, bool,
648 enum var_init_status, rtx);
649 static void var_mem_delete (dataflow_set *, rtx, bool);
651 static void dataflow_set_init (dataflow_set *);
652 static void dataflow_set_clear (dataflow_set *);
653 static void dataflow_set_copy (dataflow_set *, dataflow_set *);
654 static int variable_union_info_cmp_pos (const void *, const void *);
655 static void dataflow_set_union (dataflow_set *, dataflow_set *);
656 static location_chain find_loc_in_1pdv (rtx, variable, variable_table_type);
657 static bool canon_value_cmp (rtx, rtx);
658 static int loc_cmp (rtx, rtx);
659 static bool variable_part_different_p (variable_part *, variable_part *);
660 static bool onepart_variable_different_p (variable, variable);
661 static bool variable_different_p (variable, variable);
662 static bool dataflow_set_different (dataflow_set *, dataflow_set *);
663 static void dataflow_set_destroy (dataflow_set *);
665 static bool contains_symbol_ref (rtx);
666 static bool track_expr_p (tree, bool);
667 static bool same_variable_part_p (rtx, tree, HOST_WIDE_INT);
668 static int add_uses (rtx *, void *);
669 static void add_uses_1 (rtx *, void *);
670 static void add_stores (rtx, const_rtx, void *);
671 static bool compute_bb_dataflow (basic_block);
672 static bool vt_find_locations (void);
674 static void dump_attrs_list (attrs);
675 static void dump_var (variable);
676 static void dump_vars (variable_table_type);
677 static void dump_dataflow_set (dataflow_set *);
678 static void dump_dataflow_sets (void);
680 static void set_dv_changed (decl_or_value, bool);
681 static void variable_was_changed (variable, dataflow_set *);
682 static variable_def **set_slot_part (dataflow_set *, rtx, variable_def **,
683 decl_or_value, HOST_WIDE_INT,
684 enum var_init_status, rtx);
685 static void set_variable_part (dataflow_set *, rtx,
686 decl_or_value, HOST_WIDE_INT,
687 enum var_init_status, rtx, enum insert_option);
688 static variable_def **clobber_slot_part (dataflow_set *, rtx,
689 variable_def **, HOST_WIDE_INT, rtx);
690 static void clobber_variable_part (dataflow_set *, rtx,
691 decl_or_value, HOST_WIDE_INT, rtx);
692 static variable_def **delete_slot_part (dataflow_set *, rtx, variable_def **,
693 HOST_WIDE_INT);
694 static void delete_variable_part (dataflow_set *, rtx,
695 decl_or_value, HOST_WIDE_INT);
696 static void emit_notes_in_bb (basic_block, dataflow_set *);
697 static void vt_emit_notes (void);
699 static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *);
700 static void vt_add_function_parameters (void);
701 static bool vt_initialize (void);
702 static void vt_finalize (void);
704 /* Given a SET, calculate the amount of stack adjustment it contains
705 PRE- and POST-modifying stack pointer.
706 This function is similar to stack_adjust_offset. */
708 static void
709 stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
710 HOST_WIDE_INT *post)
712 rtx src = SET_SRC (pattern);
713 rtx dest = SET_DEST (pattern);
714 enum rtx_code code;
716 if (dest == stack_pointer_rtx)
718 /* (set (reg sp) (plus (reg sp) (const_int))) */
719 code = GET_CODE (src);
720 if (! (code == PLUS || code == MINUS)
721 || XEXP (src, 0) != stack_pointer_rtx
722 || !CONST_INT_P (XEXP (src, 1)))
723 return;
725 if (code == MINUS)
726 *post += INTVAL (XEXP (src, 1));
727 else
728 *post -= INTVAL (XEXP (src, 1));
730 else if (MEM_P (dest))
732 /* (set (mem (pre_dec (reg sp))) (foo)) */
733 src = XEXP (dest, 0);
734 code = GET_CODE (src);
736 switch (code)
738 case PRE_MODIFY:
739 case POST_MODIFY:
740 if (XEXP (src, 0) == stack_pointer_rtx)
742 rtx val = XEXP (XEXP (src, 1), 1);
743 /* We handle only adjustments by constant amount. */
744 gcc_assert (GET_CODE (XEXP (src, 1)) == PLUS &&
745 CONST_INT_P (val));
747 if (code == PRE_MODIFY)
748 *pre -= INTVAL (val);
749 else
750 *post -= INTVAL (val);
751 break;
753 return;
755 case PRE_DEC:
756 if (XEXP (src, 0) == stack_pointer_rtx)
758 *pre += GET_MODE_SIZE (GET_MODE (dest));
759 break;
761 return;
763 case POST_DEC:
764 if (XEXP (src, 0) == stack_pointer_rtx)
766 *post += GET_MODE_SIZE (GET_MODE (dest));
767 break;
769 return;
771 case PRE_INC:
772 if (XEXP (src, 0) == stack_pointer_rtx)
774 *pre -= GET_MODE_SIZE (GET_MODE (dest));
775 break;
777 return;
779 case POST_INC:
780 if (XEXP (src, 0) == stack_pointer_rtx)
782 *post -= GET_MODE_SIZE (GET_MODE (dest));
783 break;
785 return;
787 default:
788 return;
793 /* Given an INSN, calculate the amount of stack adjustment it contains
794 PRE- and POST-modifying stack pointer. */
796 static void
797 insn_stack_adjust_offset_pre_post (rtx insn, HOST_WIDE_INT *pre,
798 HOST_WIDE_INT *post)
800 rtx pattern;
802 *pre = 0;
803 *post = 0;
805 pattern = PATTERN (insn);
806 if (RTX_FRAME_RELATED_P (insn))
808 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
809 if (expr)
810 pattern = XEXP (expr, 0);
813 if (GET_CODE (pattern) == SET)
814 stack_adjust_offset_pre_post (pattern, pre, post);
815 else if (GET_CODE (pattern) == PARALLEL
816 || GET_CODE (pattern) == SEQUENCE)
818 int i;
820 /* There may be stack adjustments inside compound insns. Search
821 for them. */
822 for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
823 if (GET_CODE (XVECEXP (pattern, 0, i)) == SET)
824 stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post);
828 /* Compute stack adjustments for all blocks by traversing DFS tree.
829 Return true when the adjustments on all incoming edges are consistent.
830 Heavily borrowed from pre_and_rev_post_order_compute. */
832 static bool
833 vt_stack_adjustments (void)
835 edge_iterator *stack;
836 int sp;
838 /* Initialize entry block. */
839 VTI (ENTRY_BLOCK_PTR)->visited = true;
840 VTI (ENTRY_BLOCK_PTR)->in.stack_adjust = INCOMING_FRAME_SP_OFFSET;
841 VTI (ENTRY_BLOCK_PTR)->out.stack_adjust = INCOMING_FRAME_SP_OFFSET;
843 /* Allocate stack for back-tracking up CFG. */
844 stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun) + 1);
845 sp = 0;
847 /* Push the first edge on to the stack. */
848 stack[sp++] = ei_start (ENTRY_BLOCK_PTR->succs);
850 while (sp)
852 edge_iterator ei;
853 basic_block src;
854 basic_block dest;
856 /* Look at the edge on the top of the stack. */
857 ei = stack[sp - 1];
858 src = ei_edge (ei)->src;
859 dest = ei_edge (ei)->dest;
861 /* Check if the edge destination has been visited yet. */
862 if (!VTI (dest)->visited)
864 rtx insn;
865 HOST_WIDE_INT pre, post, offset;
866 VTI (dest)->visited = true;
867 VTI (dest)->in.stack_adjust = offset = VTI (src)->out.stack_adjust;
869 if (dest != EXIT_BLOCK_PTR)
870 for (insn = BB_HEAD (dest);
871 insn != NEXT_INSN (BB_END (dest));
872 insn = NEXT_INSN (insn))
873 if (INSN_P (insn))
875 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
876 offset += pre + post;
879 VTI (dest)->out.stack_adjust = offset;
881 if (EDGE_COUNT (dest->succs) > 0)
882 /* Since the DEST node has been visited for the first
883 time, check its successors. */
884 stack[sp++] = ei_start (dest->succs);
886 else
888 /* Check whether the adjustments on the edges are the same. */
889 if (VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust)
891 free (stack);
892 return false;
895 if (! ei_one_before_end_p (ei))
896 /* Go to the next edge. */
897 ei_next (&stack[sp - 1]);
898 else
899 /* Return to previous level if there are no more edges. */
900 sp--;
904 free (stack);
905 return true;
908 /* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or
909 hard_frame_pointer_rtx is being mapped to it and offset for it. */
910 static rtx cfa_base_rtx;
911 static HOST_WIDE_INT cfa_base_offset;
913 /* Compute a CFA-based value for an ADJUSTMENT made to stack_pointer_rtx
914 or hard_frame_pointer_rtx. */
916 static inline rtx
917 compute_cfa_pointer (HOST_WIDE_INT adjustment)
919 return plus_constant (Pmode, cfa_base_rtx, adjustment + cfa_base_offset);
922 /* Adjustment for hard_frame_pointer_rtx to cfa base reg,
923 or -1 if the replacement shouldn't be done. */
924 static HOST_WIDE_INT hard_frame_pointer_adjustment = -1;
926 /* Data for adjust_mems callback. */
928 struct adjust_mem_data
930 bool store;
931 enum machine_mode mem_mode;
932 HOST_WIDE_INT stack_adjust;
933 rtx side_effects;
936 /* Helper for adjust_mems. Return 1 if *loc is unsuitable for
937 transformation of wider mode arithmetics to narrower mode,
938 -1 if it is suitable and subexpressions shouldn't be
939 traversed and 0 if it is suitable and subexpressions should
940 be traversed. Called through for_each_rtx. */
942 static int
943 use_narrower_mode_test (rtx *loc, void *data)
945 rtx subreg = (rtx) data;
947 if (CONSTANT_P (*loc))
948 return -1;
949 switch (GET_CODE (*loc))
951 case REG:
952 if (cselib_lookup (*loc, GET_MODE (SUBREG_REG (subreg)), 0, VOIDmode))
953 return 1;
954 if (!validate_subreg (GET_MODE (subreg), GET_MODE (*loc),
955 *loc, subreg_lowpart_offset (GET_MODE (subreg),
956 GET_MODE (*loc))))
957 return 1;
958 return -1;
959 case PLUS:
960 case MINUS:
961 case MULT:
962 return 0;
963 case ASHIFT:
964 if (for_each_rtx (&XEXP (*loc, 0), use_narrower_mode_test, data))
965 return 1;
966 else
967 return -1;
968 default:
969 return 1;
973 /* Transform X into narrower mode MODE from wider mode WMODE. */
975 static rtx
976 use_narrower_mode (rtx x, enum machine_mode mode, enum machine_mode wmode)
978 rtx op0, op1;
979 if (CONSTANT_P (x))
980 return lowpart_subreg (mode, x, wmode);
981 switch (GET_CODE (x))
983 case REG:
984 return lowpart_subreg (mode, x, wmode);
985 case PLUS:
986 case MINUS:
987 case MULT:
988 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
989 op1 = use_narrower_mode (XEXP (x, 1), mode, wmode);
990 return simplify_gen_binary (GET_CODE (x), mode, op0, op1);
991 case ASHIFT:
992 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
993 return simplify_gen_binary (ASHIFT, mode, op0, XEXP (x, 1));
994 default:
995 gcc_unreachable ();
999 /* Helper function for adjusting used MEMs. */
1001 static rtx
1002 adjust_mems (rtx loc, const_rtx old_rtx, void *data)
1004 struct adjust_mem_data *amd = (struct adjust_mem_data *) data;
1005 rtx mem, addr = loc, tem;
1006 enum machine_mode mem_mode_save;
1007 bool store_save;
1008 switch (GET_CODE (loc))
1010 case REG:
1011 /* Don't do any sp or fp replacements outside of MEM addresses
1012 on the LHS. */
1013 if (amd->mem_mode == VOIDmode && amd->store)
1014 return loc;
1015 if (loc == stack_pointer_rtx
1016 && !frame_pointer_needed
1017 && cfa_base_rtx)
1018 return compute_cfa_pointer (amd->stack_adjust);
1019 else if (loc == hard_frame_pointer_rtx
1020 && frame_pointer_needed
1021 && hard_frame_pointer_adjustment != -1
1022 && cfa_base_rtx)
1023 return compute_cfa_pointer (hard_frame_pointer_adjustment);
1024 gcc_checking_assert (loc != virtual_incoming_args_rtx);
1025 return loc;
1026 case MEM:
1027 mem = loc;
1028 if (!amd->store)
1030 mem = targetm.delegitimize_address (mem);
1031 if (mem != loc && !MEM_P (mem))
1032 return simplify_replace_fn_rtx (mem, old_rtx, adjust_mems, data);
1035 addr = XEXP (mem, 0);
1036 mem_mode_save = amd->mem_mode;
1037 amd->mem_mode = GET_MODE (mem);
1038 store_save = amd->store;
1039 amd->store = false;
1040 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1041 amd->store = store_save;
1042 amd->mem_mode = mem_mode_save;
1043 if (mem == loc)
1044 addr = targetm.delegitimize_address (addr);
1045 if (addr != XEXP (mem, 0))
1046 mem = replace_equiv_address_nv (mem, addr);
1047 if (!amd->store)
1048 mem = avoid_constant_pool_reference (mem);
1049 return mem;
1050 case PRE_INC:
1051 case PRE_DEC:
1052 addr = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
1053 gen_int_mode (GET_CODE (loc) == PRE_INC
1054 ? GET_MODE_SIZE (amd->mem_mode)
1055 : -GET_MODE_SIZE (amd->mem_mode),
1056 GET_MODE (loc)));
1057 case POST_INC:
1058 case POST_DEC:
1059 if (addr == loc)
1060 addr = XEXP (loc, 0);
1061 gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode);
1062 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1063 tem = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
1064 gen_int_mode ((GET_CODE (loc) == PRE_INC
1065 || GET_CODE (loc) == POST_INC)
1066 ? GET_MODE_SIZE (amd->mem_mode)
1067 : -GET_MODE_SIZE (amd->mem_mode),
1068 GET_MODE (loc)));
1069 amd->side_effects = alloc_EXPR_LIST (0,
1070 gen_rtx_SET (VOIDmode,
1071 XEXP (loc, 0),
1072 tem),
1073 amd->side_effects);
1074 return addr;
1075 case PRE_MODIFY:
1076 addr = XEXP (loc, 1);
1077 case POST_MODIFY:
1078 if (addr == loc)
1079 addr = XEXP (loc, 0);
1080 gcc_assert (amd->mem_mode != VOIDmode);
1081 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1082 amd->side_effects = alloc_EXPR_LIST (0,
1083 gen_rtx_SET (VOIDmode,
1084 XEXP (loc, 0),
1085 XEXP (loc, 1)),
1086 amd->side_effects);
1087 return addr;
1088 case SUBREG:
1089 /* First try without delegitimization of whole MEMs and
1090 avoid_constant_pool_reference, which is more likely to succeed. */
1091 store_save = amd->store;
1092 amd->store = true;
1093 addr = simplify_replace_fn_rtx (SUBREG_REG (loc), old_rtx, adjust_mems,
1094 data);
1095 amd->store = store_save;
1096 mem = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1097 if (mem == SUBREG_REG (loc))
1099 tem = loc;
1100 goto finish_subreg;
1102 tem = simplify_gen_subreg (GET_MODE (loc), mem,
1103 GET_MODE (SUBREG_REG (loc)),
1104 SUBREG_BYTE (loc));
1105 if (tem)
1106 goto finish_subreg;
1107 tem = simplify_gen_subreg (GET_MODE (loc), addr,
1108 GET_MODE (SUBREG_REG (loc)),
1109 SUBREG_BYTE (loc));
1110 if (tem == NULL_RTX)
1111 tem = gen_rtx_raw_SUBREG (GET_MODE (loc), addr, SUBREG_BYTE (loc));
1112 finish_subreg:
1113 if (MAY_HAVE_DEBUG_INSNS
1114 && GET_CODE (tem) == SUBREG
1115 && (GET_CODE (SUBREG_REG (tem)) == PLUS
1116 || GET_CODE (SUBREG_REG (tem)) == MINUS
1117 || GET_CODE (SUBREG_REG (tem)) == MULT
1118 || GET_CODE (SUBREG_REG (tem)) == ASHIFT)
1119 && GET_MODE_CLASS (GET_MODE (tem)) == MODE_INT
1120 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem))) == MODE_INT
1121 && GET_MODE_SIZE (GET_MODE (tem))
1122 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (tem)))
1123 && subreg_lowpart_p (tem)
1124 && !for_each_rtx (&SUBREG_REG (tem), use_narrower_mode_test, tem))
1125 return use_narrower_mode (SUBREG_REG (tem), GET_MODE (tem),
1126 GET_MODE (SUBREG_REG (tem)));
1127 return tem;
1128 case ASM_OPERANDS:
1129 /* Don't do any replacements in second and following
1130 ASM_OPERANDS of inline-asm with multiple sets.
1131 ASM_OPERANDS_INPUT_VEC, ASM_OPERANDS_INPUT_CONSTRAINT_VEC
1132 and ASM_OPERANDS_LABEL_VEC need to be equal between
1133 all the ASM_OPERANDs in the insn and adjust_insn will
1134 fix this up. */
1135 if (ASM_OPERANDS_OUTPUT_IDX (loc) != 0)
1136 return loc;
1137 break;
1138 default:
1139 break;
1141 return NULL_RTX;
1144 /* Helper function for replacement of uses. */
1146 static void
1147 adjust_mem_uses (rtx *x, void *data)
1149 rtx new_x = simplify_replace_fn_rtx (*x, NULL_RTX, adjust_mems, data);
1150 if (new_x != *x)
1151 validate_change (NULL_RTX, x, new_x, true);
1154 /* Helper function for replacement of stores. */
1156 static void
1157 adjust_mem_stores (rtx loc, const_rtx expr, void *data)
1159 if (MEM_P (loc))
1161 rtx new_dest = simplify_replace_fn_rtx (SET_DEST (expr), NULL_RTX,
1162 adjust_mems, data);
1163 if (new_dest != SET_DEST (expr))
1165 rtx xexpr = CONST_CAST_RTX (expr);
1166 validate_change (NULL_RTX, &SET_DEST (xexpr), new_dest, true);
1171 /* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
1172 replace them with their value in the insn and add the side-effects
1173 as other sets to the insn. */
1175 static void
1176 adjust_insn (basic_block bb, rtx insn)
1178 struct adjust_mem_data amd;
1179 rtx set;
1181 #ifdef HAVE_window_save
1182 /* If the target machine has an explicit window save instruction, the
1183 transformation OUTGOING_REGNO -> INCOMING_REGNO is done there. */
1184 if (RTX_FRAME_RELATED_P (insn)
1185 && find_reg_note (insn, REG_CFA_WINDOW_SAVE, NULL_RTX))
1187 unsigned int i, nregs = vec_safe_length (windowed_parm_regs);
1188 rtx rtl = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nregs * 2));
1189 parm_reg_t *p;
1191 FOR_EACH_VEC_SAFE_ELT (windowed_parm_regs, i, p)
1193 XVECEXP (rtl, 0, i * 2)
1194 = gen_rtx_SET (VOIDmode, p->incoming, p->outgoing);
1195 /* Do not clobber the attached DECL, but only the REG. */
1196 XVECEXP (rtl, 0, i * 2 + 1)
1197 = gen_rtx_CLOBBER (GET_MODE (p->outgoing),
1198 gen_raw_REG (GET_MODE (p->outgoing),
1199 REGNO (p->outgoing)));
1202 validate_change (NULL_RTX, &PATTERN (insn), rtl, true);
1203 return;
1205 #endif
1207 amd.mem_mode = VOIDmode;
1208 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
1209 amd.side_effects = NULL_RTX;
1211 amd.store = true;
1212 note_stores (PATTERN (insn), adjust_mem_stores, &amd);
1214 amd.store = false;
1215 if (GET_CODE (PATTERN (insn)) == PARALLEL
1216 && asm_noperands (PATTERN (insn)) > 0
1217 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1219 rtx body, set0;
1220 int i;
1222 /* inline-asm with multiple sets is tiny bit more complicated,
1223 because the 3 vectors in ASM_OPERANDS need to be shared between
1224 all ASM_OPERANDS in the instruction. adjust_mems will
1225 not touch ASM_OPERANDS other than the first one, asm_noperands
1226 test above needs to be called before that (otherwise it would fail)
1227 and afterwards this code fixes it up. */
1228 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1229 body = PATTERN (insn);
1230 set0 = XVECEXP (body, 0, 0);
1231 gcc_checking_assert (GET_CODE (set0) == SET
1232 && GET_CODE (SET_SRC (set0)) == ASM_OPERANDS
1233 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set0)) == 0);
1234 for (i = 1; i < XVECLEN (body, 0); i++)
1235 if (GET_CODE (XVECEXP (body, 0, i)) != SET)
1236 break;
1237 else
1239 set = XVECEXP (body, 0, i);
1240 gcc_checking_assert (GET_CODE (SET_SRC (set)) == ASM_OPERANDS
1241 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set))
1242 == i);
1243 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (set))
1244 != ASM_OPERANDS_INPUT_VEC (SET_SRC (set0))
1245 || ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set))
1246 != ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0))
1247 || ASM_OPERANDS_LABEL_VEC (SET_SRC (set))
1248 != ASM_OPERANDS_LABEL_VEC (SET_SRC (set0)))
1250 rtx newsrc = shallow_copy_rtx (SET_SRC (set));
1251 ASM_OPERANDS_INPUT_VEC (newsrc)
1252 = ASM_OPERANDS_INPUT_VEC (SET_SRC (set0));
1253 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (newsrc)
1254 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0));
1255 ASM_OPERANDS_LABEL_VEC (newsrc)
1256 = ASM_OPERANDS_LABEL_VEC (SET_SRC (set0));
1257 validate_change (NULL_RTX, &SET_SRC (set), newsrc, true);
1261 else
1262 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1264 /* For read-only MEMs containing some constant, prefer those
1265 constants. */
1266 set = single_set (insn);
1267 if (set && MEM_P (SET_SRC (set)) && MEM_READONLY_P (SET_SRC (set)))
1269 rtx note = find_reg_equal_equiv_note (insn);
1271 if (note && CONSTANT_P (XEXP (note, 0)))
1272 validate_change (NULL_RTX, &SET_SRC (set), XEXP (note, 0), true);
1275 if (amd.side_effects)
1277 rtx *pat, new_pat, s;
1278 int i, oldn, newn;
1280 pat = &PATTERN (insn);
1281 if (GET_CODE (*pat) == COND_EXEC)
1282 pat = &COND_EXEC_CODE (*pat);
1283 if (GET_CODE (*pat) == PARALLEL)
1284 oldn = XVECLEN (*pat, 0);
1285 else
1286 oldn = 1;
1287 for (s = amd.side_effects, newn = 0; s; newn++)
1288 s = XEXP (s, 1);
1289 new_pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (oldn + newn));
1290 if (GET_CODE (*pat) == PARALLEL)
1291 for (i = 0; i < oldn; i++)
1292 XVECEXP (new_pat, 0, i) = XVECEXP (*pat, 0, i);
1293 else
1294 XVECEXP (new_pat, 0, 0) = *pat;
1295 for (s = amd.side_effects, i = oldn; i < oldn + newn; i++, s = XEXP (s, 1))
1296 XVECEXP (new_pat, 0, i) = XEXP (s, 0);
1297 free_EXPR_LIST_list (&amd.side_effects);
1298 validate_change (NULL_RTX, pat, new_pat, true);
1302 /* Return the DEBUG_EXPR of a DEBUG_EXPR_DECL or the VALUE in DV. */
1303 static inline rtx
1304 dv_as_rtx (decl_or_value dv)
1306 tree decl;
1308 if (dv_is_value_p (dv))
1309 return dv_as_value (dv);
1311 decl = dv_as_decl (dv);
1313 gcc_checking_assert (TREE_CODE (decl) == DEBUG_EXPR_DECL);
1314 return DECL_RTL_KNOWN_SET (decl);
1317 /* Return nonzero if a decl_or_value must not have more than one
1318 variable part. The returned value discriminates among various
1319 kinds of one-part DVs ccording to enum onepart_enum. */
1320 static inline onepart_enum_t
1321 dv_onepart_p (decl_or_value dv)
1323 tree decl;
1325 if (!MAY_HAVE_DEBUG_INSNS)
1326 return NOT_ONEPART;
1328 if (dv_is_value_p (dv))
1329 return ONEPART_VALUE;
1331 decl = dv_as_decl (dv);
1333 if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
1334 return ONEPART_DEXPR;
1336 if (target_for_debug_bind (decl) != NULL_TREE)
1337 return ONEPART_VDECL;
1339 return NOT_ONEPART;
1342 /* Return the variable pool to be used for a dv of type ONEPART. */
1343 static inline alloc_pool
1344 onepart_pool (onepart_enum_t onepart)
1346 return onepart ? valvar_pool : var_pool;
1349 /* Build a decl_or_value out of a decl. */
1350 static inline decl_or_value
1351 dv_from_decl (tree decl)
1353 decl_or_value dv;
1354 dv = decl;
1355 gcc_checking_assert (dv_is_decl_p (dv));
1356 return dv;
1359 /* Build a decl_or_value out of a value. */
1360 static inline decl_or_value
1361 dv_from_value (rtx value)
1363 decl_or_value dv;
1364 dv = value;
1365 gcc_checking_assert (dv_is_value_p (dv));
1366 return dv;
1369 /* Return a value or the decl of a debug_expr as a decl_or_value. */
1370 static inline decl_or_value
1371 dv_from_rtx (rtx x)
1373 decl_or_value dv;
1375 switch (GET_CODE (x))
1377 case DEBUG_EXPR:
1378 dv = dv_from_decl (DEBUG_EXPR_TREE_DECL (x));
1379 gcc_checking_assert (DECL_RTL_KNOWN_SET (DEBUG_EXPR_TREE_DECL (x)) == x);
1380 break;
1382 case VALUE:
1383 dv = dv_from_value (x);
1384 break;
1386 default:
1387 gcc_unreachable ();
1390 return dv;
1393 extern void debug_dv (decl_or_value dv);
1395 DEBUG_FUNCTION void
1396 debug_dv (decl_or_value dv)
1398 if (dv_is_value_p (dv))
1399 debug_rtx (dv_as_value (dv));
1400 else
1401 debug_generic_stmt (dv_as_decl (dv));
1404 static void loc_exp_dep_clear (variable var);
1406 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
1408 static void
1409 variable_htab_free (void *elem)
1411 int i;
1412 variable var = (variable) elem;
1413 location_chain node, next;
1415 gcc_checking_assert (var->refcount > 0);
1417 var->refcount--;
1418 if (var->refcount > 0)
1419 return;
1421 for (i = 0; i < var->n_var_parts; i++)
1423 for (node = var->var_part[i].loc_chain; node; node = next)
1425 next = node->next;
1426 pool_free (loc_chain_pool, node);
1428 var->var_part[i].loc_chain = NULL;
1430 if (var->onepart && VAR_LOC_1PAUX (var))
1432 loc_exp_dep_clear (var);
1433 if (VAR_LOC_DEP_LST (var))
1434 VAR_LOC_DEP_LST (var)->pprev = NULL;
1435 XDELETE (VAR_LOC_1PAUX (var));
1436 /* These may be reused across functions, so reset
1437 e.g. NO_LOC_P. */
1438 if (var->onepart == ONEPART_DEXPR)
1439 set_dv_changed (var->dv, true);
1441 pool_free (onepart_pool (var->onepart), var);
1444 /* Initialize the set (array) SET of attrs to empty lists. */
1446 static void
1447 init_attrs_list_set (attrs *set)
1449 int i;
1451 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1452 set[i] = NULL;
1455 /* Make the list *LISTP empty. */
1457 static void
1458 attrs_list_clear (attrs *listp)
1460 attrs list, next;
1462 for (list = *listp; list; list = next)
1464 next = list->next;
1465 pool_free (attrs_pool, list);
1467 *listp = NULL;
1470 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */
1472 static attrs
1473 attrs_list_member (attrs list, decl_or_value dv, HOST_WIDE_INT offset)
1475 for (; list; list = list->next)
1476 if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset)
1477 return list;
1478 return NULL;
1481 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
1483 static void
1484 attrs_list_insert (attrs *listp, decl_or_value dv,
1485 HOST_WIDE_INT offset, rtx loc)
1487 attrs list;
1489 list = (attrs) pool_alloc (attrs_pool);
1490 list->loc = loc;
1491 list->dv = dv;
1492 list->offset = offset;
1493 list->next = *listp;
1494 *listp = list;
1497 /* Copy all nodes from SRC and create a list *DSTP of the copies. */
1499 static void
1500 attrs_list_copy (attrs *dstp, attrs src)
1502 attrs n;
1504 attrs_list_clear (dstp);
1505 for (; src; src = src->next)
1507 n = (attrs) pool_alloc (attrs_pool);
1508 n->loc = src->loc;
1509 n->dv = src->dv;
1510 n->offset = src->offset;
1511 n->next = *dstp;
1512 *dstp = n;
1516 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */
1518 static void
1519 attrs_list_union (attrs *dstp, attrs src)
1521 for (; src; src = src->next)
1523 if (!attrs_list_member (*dstp, src->dv, src->offset))
1524 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1528 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1529 *DSTP. */
1531 static void
1532 attrs_list_mpdv_union (attrs *dstp, attrs src, attrs src2)
1534 gcc_assert (!*dstp);
1535 for (; src; src = src->next)
1537 if (!dv_onepart_p (src->dv))
1538 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1540 for (src = src2; src; src = src->next)
1542 if (!dv_onepart_p (src->dv)
1543 && !attrs_list_member (*dstp, src->dv, src->offset))
1544 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1548 /* Shared hashtable support. */
1550 /* Return true if VARS is shared. */
1552 static inline bool
1553 shared_hash_shared (shared_hash vars)
1555 return vars->refcount > 1;
1558 /* Return the hash table for VARS. */
1560 static inline variable_table_type
1561 shared_hash_htab (shared_hash vars)
1563 return vars->htab;
1566 /* Return true if VAR is shared, or maybe because VARS is shared. */
1568 static inline bool
1569 shared_var_p (variable var, shared_hash vars)
1571 /* Don't count an entry in the changed_variables table as a duplicate. */
1572 return ((var->refcount > 1 + (int) var->in_changed_variables)
1573 || shared_hash_shared (vars));
1576 /* Copy variables into a new hash table. */
1578 static shared_hash
1579 shared_hash_unshare (shared_hash vars)
1581 shared_hash new_vars = (shared_hash) pool_alloc (shared_hash_pool);
1582 gcc_assert (vars->refcount > 1);
1583 new_vars->refcount = 1;
1584 new_vars->htab.create (vars->htab.elements () + 3);
1585 vars_copy (new_vars->htab, vars->htab);
1586 vars->refcount--;
1587 return new_vars;
1590 /* Increment reference counter on VARS and return it. */
1592 static inline shared_hash
1593 shared_hash_copy (shared_hash vars)
1595 vars->refcount++;
1596 return vars;
1599 /* Decrement reference counter and destroy hash table if not shared
1600 anymore. */
1602 static void
1603 shared_hash_destroy (shared_hash vars)
1605 gcc_checking_assert (vars->refcount > 0);
1606 if (--vars->refcount == 0)
1608 vars->htab.dispose ();
1609 pool_free (shared_hash_pool, vars);
1613 /* Unshare *PVARS if shared and return slot for DV. If INS is
1614 INSERT, insert it if not already present. */
1616 static inline variable_def **
1617 shared_hash_find_slot_unshare_1 (shared_hash *pvars, decl_or_value dv,
1618 hashval_t dvhash, enum insert_option ins)
1620 if (shared_hash_shared (*pvars))
1621 *pvars = shared_hash_unshare (*pvars);
1622 return shared_hash_htab (*pvars).find_slot_with_hash (dv, dvhash, ins);
1625 static inline variable_def **
1626 shared_hash_find_slot_unshare (shared_hash *pvars, decl_or_value dv,
1627 enum insert_option ins)
1629 return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins);
1632 /* Return slot for DV, if it is already present in the hash table.
1633 If it is not present, insert it only VARS is not shared, otherwise
1634 return NULL. */
1636 static inline variable_def **
1637 shared_hash_find_slot_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1639 return shared_hash_htab (vars).find_slot_with_hash (dv, dvhash,
1640 shared_hash_shared (vars)
1641 ? NO_INSERT : INSERT);
1644 static inline variable_def **
1645 shared_hash_find_slot (shared_hash vars, decl_or_value dv)
1647 return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv));
1650 /* Return slot for DV only if it is already present in the hash table. */
1652 static inline variable_def **
1653 shared_hash_find_slot_noinsert_1 (shared_hash vars, decl_or_value dv,
1654 hashval_t dvhash)
1656 return shared_hash_htab (vars).find_slot_with_hash (dv, dvhash, NO_INSERT);
1659 static inline variable_def **
1660 shared_hash_find_slot_noinsert (shared_hash vars, decl_or_value dv)
1662 return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv));
1665 /* Return variable for DV or NULL if not already present in the hash
1666 table. */
1668 static inline variable
1669 shared_hash_find_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1671 return shared_hash_htab (vars).find_with_hash (dv, dvhash);
1674 static inline variable
1675 shared_hash_find (shared_hash vars, decl_or_value dv)
1677 return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
1680 /* Return true if TVAL is better than CVAL as a canonival value. We
1681 choose lowest-numbered VALUEs, using the RTX address as a
1682 tie-breaker. The idea is to arrange them into a star topology,
1683 such that all of them are at most one step away from the canonical
1684 value, and the canonical value has backlinks to all of them, in
1685 addition to all the actual locations. We don't enforce this
1686 topology throughout the entire dataflow analysis, though.
1689 static inline bool
1690 canon_value_cmp (rtx tval, rtx cval)
1692 return !cval
1693 || CSELIB_VAL_PTR (tval)->uid < CSELIB_VAL_PTR (cval)->uid;
1696 static bool dst_can_be_shared;
1698 /* Return a copy of a variable VAR and insert it to dataflow set SET. */
1700 static variable_def **
1701 unshare_variable (dataflow_set *set, variable_def **slot, variable var,
1702 enum var_init_status initialized)
1704 variable new_var;
1705 int i;
1707 new_var = (variable) pool_alloc (onepart_pool (var->onepart));
1708 new_var->dv = var->dv;
1709 new_var->refcount = 1;
1710 var->refcount--;
1711 new_var->n_var_parts = var->n_var_parts;
1712 new_var->onepart = var->onepart;
1713 new_var->in_changed_variables = false;
1715 if (! flag_var_tracking_uninit)
1716 initialized = VAR_INIT_STATUS_INITIALIZED;
1718 for (i = 0; i < var->n_var_parts; i++)
1720 location_chain node;
1721 location_chain *nextp;
1723 if (i == 0 && var->onepart)
1725 /* One-part auxiliary data is only used while emitting
1726 notes, so propagate it to the new variable in the active
1727 dataflow set. If we're not emitting notes, this will be
1728 a no-op. */
1729 gcc_checking_assert (!VAR_LOC_1PAUX (var) || emit_notes);
1730 VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (var);
1731 VAR_LOC_1PAUX (var) = NULL;
1733 else
1734 VAR_PART_OFFSET (new_var, i) = VAR_PART_OFFSET (var, i);
1735 nextp = &new_var->var_part[i].loc_chain;
1736 for (node = var->var_part[i].loc_chain; node; node = node->next)
1738 location_chain new_lc;
1740 new_lc = (location_chain) pool_alloc (loc_chain_pool);
1741 new_lc->next = NULL;
1742 if (node->init > initialized)
1743 new_lc->init = node->init;
1744 else
1745 new_lc->init = initialized;
1746 if (node->set_src && !(MEM_P (node->set_src)))
1747 new_lc->set_src = node->set_src;
1748 else
1749 new_lc->set_src = NULL;
1750 new_lc->loc = node->loc;
1752 *nextp = new_lc;
1753 nextp = &new_lc->next;
1756 new_var->var_part[i].cur_loc = var->var_part[i].cur_loc;
1759 dst_can_be_shared = false;
1760 if (shared_hash_shared (set->vars))
1761 slot = shared_hash_find_slot_unshare (&set->vars, var->dv, NO_INSERT);
1762 else if (set->traversed_vars && set->vars != set->traversed_vars)
1763 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
1764 *slot = new_var;
1765 if (var->in_changed_variables)
1767 variable_def **cslot
1768 = changed_variables.find_slot_with_hash (var->dv,
1769 dv_htab_hash (var->dv), NO_INSERT);
1770 gcc_assert (*cslot == (void *) var);
1771 var->in_changed_variables = false;
1772 variable_htab_free (var);
1773 *cslot = new_var;
1774 new_var->in_changed_variables = true;
1776 return slot;
1779 /* Copy all variables from hash table SRC to hash table DST. */
1781 static void
1782 vars_copy (variable_table_type dst, variable_table_type src)
1784 variable_iterator_type hi;
1785 variable var;
1787 FOR_EACH_HASH_TABLE_ELEMENT (src, var, variable, hi)
1789 variable_def **dstp;
1790 var->refcount++;
1791 dstp = dst.find_slot_with_hash (var->dv, dv_htab_hash (var->dv), INSERT);
1792 *dstp = var;
1796 /* Map a decl to its main debug decl. */
1798 static inline tree
1799 var_debug_decl (tree decl)
1801 if (decl && TREE_CODE (decl) == VAR_DECL
1802 && DECL_HAS_DEBUG_EXPR_P (decl))
1804 tree debugdecl = DECL_DEBUG_EXPR (decl);
1805 if (DECL_P (debugdecl))
1806 decl = debugdecl;
1809 return decl;
1812 /* Set the register LOC to contain DV, OFFSET. */
1814 static void
1815 var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1816 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1817 enum insert_option iopt)
1819 attrs node;
1820 bool decl_p = dv_is_decl_p (dv);
1822 if (decl_p)
1823 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1825 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1826 if (dv_as_opaque (node->dv) == dv_as_opaque (dv)
1827 && node->offset == offset)
1828 break;
1829 if (!node)
1830 attrs_list_insert (&set->regs[REGNO (loc)], dv, offset, loc);
1831 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1834 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
1836 static void
1837 var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1838 rtx set_src)
1840 tree decl = REG_EXPR (loc);
1841 HOST_WIDE_INT offset = REG_OFFSET (loc);
1843 var_reg_decl_set (set, loc, initialized,
1844 dv_from_decl (decl), offset, set_src, INSERT);
1847 static enum var_init_status
1848 get_init_value (dataflow_set *set, rtx loc, decl_or_value dv)
1850 variable var;
1851 int i;
1852 enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN;
1854 if (! flag_var_tracking_uninit)
1855 return VAR_INIT_STATUS_INITIALIZED;
1857 var = shared_hash_find (set->vars, dv);
1858 if (var)
1860 for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++)
1862 location_chain nextp;
1863 for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next)
1864 if (rtx_equal_p (nextp->loc, loc))
1866 ret_val = nextp->init;
1867 break;
1872 return ret_val;
1875 /* Delete current content of register LOC in dataflow set SET and set
1876 the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
1877 MODIFY is true, any other live copies of the same variable part are
1878 also deleted from the dataflow set, otherwise the variable part is
1879 assumed to be copied from another location holding the same
1880 part. */
1882 static void
1883 var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1884 enum var_init_status initialized, rtx set_src)
1886 tree decl = REG_EXPR (loc);
1887 HOST_WIDE_INT offset = REG_OFFSET (loc);
1888 attrs node, next;
1889 attrs *nextp;
1891 decl = var_debug_decl (decl);
1893 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1894 initialized = get_init_value (set, loc, dv_from_decl (decl));
1896 nextp = &set->regs[REGNO (loc)];
1897 for (node = *nextp; node; node = next)
1899 next = node->next;
1900 if (dv_as_opaque (node->dv) != decl || node->offset != offset)
1902 delete_variable_part (set, node->loc, node->dv, node->offset);
1903 pool_free (attrs_pool, node);
1904 *nextp = next;
1906 else
1908 node->loc = loc;
1909 nextp = &node->next;
1912 if (modify)
1913 clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src);
1914 var_reg_set (set, loc, initialized, set_src);
1917 /* Delete the association of register LOC in dataflow set SET with any
1918 variables that aren't onepart. If CLOBBER is true, also delete any
1919 other live copies of the same variable part, and delete the
1920 association with onepart dvs too. */
1922 static void
1923 var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
1925 attrs *nextp = &set->regs[REGNO (loc)];
1926 attrs node, next;
1928 if (clobber)
1930 tree decl = REG_EXPR (loc);
1931 HOST_WIDE_INT offset = REG_OFFSET (loc);
1933 decl = var_debug_decl (decl);
1935 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1938 for (node = *nextp; node; node = next)
1940 next = node->next;
1941 if (clobber || !dv_onepart_p (node->dv))
1943 delete_variable_part (set, node->loc, node->dv, node->offset);
1944 pool_free (attrs_pool, node);
1945 *nextp = next;
1947 else
1948 nextp = &node->next;
1952 /* Delete content of register with number REGNO in dataflow set SET. */
1954 static void
1955 var_regno_delete (dataflow_set *set, int regno)
1957 attrs *reg = &set->regs[regno];
1958 attrs node, next;
1960 for (node = *reg; node; node = next)
1962 next = node->next;
1963 delete_variable_part (set, node->loc, node->dv, node->offset);
1964 pool_free (attrs_pool, node);
1966 *reg = NULL;
1969 /* Return true if I is the negated value of a power of two. */
1970 static bool
1971 negative_power_of_two_p (HOST_WIDE_INT i)
1973 unsigned HOST_WIDE_INT x = -(unsigned HOST_WIDE_INT)i;
1974 return x == (x & -x);
1977 /* Strip constant offsets and alignments off of LOC. Return the base
1978 expression. */
1980 static rtx
1981 vt_get_canonicalize_base (rtx loc)
1983 while ((GET_CODE (loc) == PLUS
1984 || GET_CODE (loc) == AND)
1985 && GET_CODE (XEXP (loc, 1)) == CONST_INT
1986 && (GET_CODE (loc) != AND
1987 || negative_power_of_two_p (INTVAL (XEXP (loc, 1)))))
1988 loc = XEXP (loc, 0);
1990 return loc;
1993 /* This caches canonicalized addresses for VALUEs, computed using
1994 information in the global cselib table. */
1995 static struct pointer_map_t *global_get_addr_cache;
1997 /* This caches canonicalized addresses for VALUEs, computed using
1998 information from the global cache and information pertaining to a
1999 basic block being analyzed. */
2000 static struct pointer_map_t *local_get_addr_cache;
2002 static rtx vt_canonicalize_addr (dataflow_set *, rtx);
2004 /* Return the canonical address for LOC, that must be a VALUE, using a
2005 cached global equivalence or computing it and storing it in the
2006 global cache. */
2008 static rtx
2009 get_addr_from_global_cache (rtx const loc)
2011 rtx x;
2012 void **slot;
2014 gcc_checking_assert (GET_CODE (loc) == VALUE);
2016 slot = pointer_map_insert (global_get_addr_cache, loc);
2017 if (*slot)
2018 return (rtx)*slot;
2020 x = canon_rtx (get_addr (loc));
2022 /* Tentative, avoiding infinite recursion. */
2023 *slot = x;
2025 if (x != loc)
2027 rtx nx = vt_canonicalize_addr (NULL, x);
2028 if (nx != x)
2030 /* The table may have moved during recursion, recompute
2031 SLOT. */
2032 slot = pointer_map_contains (global_get_addr_cache, loc);
2033 *slot = x = nx;
2037 return x;
2040 /* Return the canonical address for LOC, that must be a VALUE, using a
2041 cached local equivalence or computing it and storing it in the
2042 local cache. */
2044 static rtx
2045 get_addr_from_local_cache (dataflow_set *set, rtx const loc)
2047 rtx x;
2048 void **slot;
2049 decl_or_value dv;
2050 variable var;
2051 location_chain l;
2053 gcc_checking_assert (GET_CODE (loc) == VALUE);
2055 slot = pointer_map_insert (local_get_addr_cache, loc);
2056 if (*slot)
2057 return (rtx)*slot;
2059 x = get_addr_from_global_cache (loc);
2061 /* Tentative, avoiding infinite recursion. */
2062 *slot = x;
2064 /* Recurse to cache local expansion of X, or if we need to search
2065 for a VALUE in the expansion. */
2066 if (x != loc)
2068 rtx nx = vt_canonicalize_addr (set, x);
2069 if (nx != x)
2071 slot = pointer_map_contains (local_get_addr_cache, loc);
2072 *slot = x = nx;
2074 return x;
2077 dv = dv_from_rtx (x);
2078 var = shared_hash_find (set->vars, dv);
2079 if (!var)
2080 return x;
2082 /* Look for an improved equivalent expression. */
2083 for (l = var->var_part[0].loc_chain; l; l = l->next)
2085 rtx base = vt_get_canonicalize_base (l->loc);
2086 if (GET_CODE (base) == VALUE
2087 && canon_value_cmp (base, loc))
2089 rtx nx = vt_canonicalize_addr (set, l->loc);
2090 if (x != nx)
2092 slot = pointer_map_contains (local_get_addr_cache, loc);
2093 *slot = x = nx;
2095 break;
2099 return x;
2102 /* Canonicalize LOC using equivalences from SET in addition to those
2103 in the cselib static table. It expects a VALUE-based expression,
2104 and it will only substitute VALUEs with other VALUEs or
2105 function-global equivalences, so that, if two addresses have base
2106 VALUEs that are locally or globally related in ways that
2107 memrefs_conflict_p cares about, they will both canonicalize to
2108 expressions that have the same base VALUE.
2110 The use of VALUEs as canonical base addresses enables the canonical
2111 RTXs to remain unchanged globally, if they resolve to a constant,
2112 or throughout a basic block otherwise, so that they can be cached
2113 and the cache needs not be invalidated when REGs, MEMs or such
2114 change. */
2116 static rtx
2117 vt_canonicalize_addr (dataflow_set *set, rtx oloc)
2119 HOST_WIDE_INT ofst = 0;
2120 enum machine_mode mode = GET_MODE (oloc);
2121 rtx loc = oloc;
2122 rtx x;
2123 bool retry = true;
2125 while (retry)
2127 while (GET_CODE (loc) == PLUS
2128 && GET_CODE (XEXP (loc, 1)) == CONST_INT)
2130 ofst += INTVAL (XEXP (loc, 1));
2131 loc = XEXP (loc, 0);
2134 /* Alignment operations can't normally be combined, so just
2135 canonicalize the base and we're done. We'll normally have
2136 only one stack alignment anyway. */
2137 if (GET_CODE (loc) == AND
2138 && GET_CODE (XEXP (loc, 1)) == CONST_INT
2139 && negative_power_of_two_p (INTVAL (XEXP (loc, 1))))
2141 x = vt_canonicalize_addr (set, XEXP (loc, 0));
2142 if (x != XEXP (loc, 0))
2143 loc = gen_rtx_AND (mode, x, XEXP (loc, 1));
2144 retry = false;
2147 if (GET_CODE (loc) == VALUE)
2149 if (set)
2150 loc = get_addr_from_local_cache (set, loc);
2151 else
2152 loc = get_addr_from_global_cache (loc);
2154 /* Consolidate plus_constants. */
2155 while (ofst && GET_CODE (loc) == PLUS
2156 && GET_CODE (XEXP (loc, 1)) == CONST_INT)
2158 ofst += INTVAL (XEXP (loc, 1));
2159 loc = XEXP (loc, 0);
2162 retry = false;
2164 else
2166 x = canon_rtx (loc);
2167 if (retry)
2168 retry = (x != loc);
2169 loc = x;
2173 /* Add OFST back in. */
2174 if (ofst)
2176 /* Don't build new RTL if we can help it. */
2177 if (GET_CODE (oloc) == PLUS
2178 && XEXP (oloc, 0) == loc
2179 && INTVAL (XEXP (oloc, 1)) == ofst)
2180 return oloc;
2182 loc = plus_constant (mode, loc, ofst);
2185 return loc;
2188 /* Return true iff there's a true dependence between MLOC and LOC.
2189 MADDR must be a canonicalized version of MLOC's address. */
2191 static inline bool
2192 vt_canon_true_dep (dataflow_set *set, rtx mloc, rtx maddr, rtx loc)
2194 if (GET_CODE (loc) != MEM)
2195 return false;
2197 rtx addr = vt_canonicalize_addr (set, XEXP (loc, 0));
2198 if (!canon_true_dependence (mloc, GET_MODE (mloc), maddr, loc, addr))
2199 return false;
2201 return true;
2204 /* Hold parameters for the hashtab traversal function
2205 drop_overlapping_mem_locs, see below. */
2207 struct overlapping_mems
2209 dataflow_set *set;
2210 rtx loc, addr;
2213 /* Remove all MEMs that overlap with COMS->LOC from the location list
2214 of a hash table entry for a value. COMS->ADDR must be a
2215 canonicalized form of COMS->LOC's address, and COMS->LOC must be
2216 canonicalized itself. */
2219 drop_overlapping_mem_locs (variable_def **slot, overlapping_mems *coms)
2221 dataflow_set *set = coms->set;
2222 rtx mloc = coms->loc, addr = coms->addr;
2223 variable var = *slot;
2225 if (var->onepart == ONEPART_VALUE)
2227 location_chain loc, *locp;
2228 bool changed = false;
2229 rtx cur_loc;
2231 gcc_assert (var->n_var_parts == 1);
2233 if (shared_var_p (var, set->vars))
2235 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
2236 if (vt_canon_true_dep (set, mloc, addr, loc->loc))
2237 break;
2239 if (!loc)
2240 return 1;
2242 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
2243 var = *slot;
2244 gcc_assert (var->n_var_parts == 1);
2247 if (VAR_LOC_1PAUX (var))
2248 cur_loc = VAR_LOC_FROM (var);
2249 else
2250 cur_loc = var->var_part[0].cur_loc;
2252 for (locp = &var->var_part[0].loc_chain, loc = *locp;
2253 loc; loc = *locp)
2255 if (!vt_canon_true_dep (set, mloc, addr, loc->loc))
2257 locp = &loc->next;
2258 continue;
2261 *locp = loc->next;
2262 /* If we have deleted the location which was last emitted
2263 we have to emit new location so add the variable to set
2264 of changed variables. */
2265 if (cur_loc == loc->loc)
2267 changed = true;
2268 var->var_part[0].cur_loc = NULL;
2269 if (VAR_LOC_1PAUX (var))
2270 VAR_LOC_FROM (var) = NULL;
2272 pool_free (loc_chain_pool, loc);
2275 if (!var->var_part[0].loc_chain)
2277 var->n_var_parts--;
2278 changed = true;
2280 if (changed)
2281 variable_was_changed (var, set);
2284 return 1;
2287 /* Remove from SET all VALUE bindings to MEMs that overlap with LOC. */
2289 static void
2290 clobber_overlapping_mems (dataflow_set *set, rtx loc)
2292 struct overlapping_mems coms;
2294 gcc_checking_assert (GET_CODE (loc) == MEM);
2296 coms.set = set;
2297 coms.loc = canon_rtx (loc);
2298 coms.addr = vt_canonicalize_addr (set, XEXP (loc, 0));
2300 set->traversed_vars = set->vars;
2301 shared_hash_htab (set->vars)
2302 .traverse <overlapping_mems*, drop_overlapping_mem_locs> (&coms);
2303 set->traversed_vars = NULL;
2306 /* Set the location of DV, OFFSET as the MEM LOC. */
2308 static void
2309 var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
2310 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
2311 enum insert_option iopt)
2313 if (dv_is_decl_p (dv))
2314 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
2316 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
2319 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
2320 SET to LOC.
2321 Adjust the address first if it is stack pointer based. */
2323 static void
2324 var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
2325 rtx set_src)
2327 tree decl = MEM_EXPR (loc);
2328 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2330 var_mem_decl_set (set, loc, initialized,
2331 dv_from_decl (decl), offset, set_src, INSERT);
2334 /* Delete and set the location part of variable MEM_EXPR (LOC) in
2335 dataflow set SET to LOC. If MODIFY is true, any other live copies
2336 of the same variable part are also deleted from the dataflow set,
2337 otherwise the variable part is assumed to be copied from another
2338 location holding the same part.
2339 Adjust the address first if it is stack pointer based. */
2341 static void
2342 var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify,
2343 enum var_init_status initialized, rtx set_src)
2345 tree decl = MEM_EXPR (loc);
2346 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2348 clobber_overlapping_mems (set, loc);
2349 decl = var_debug_decl (decl);
2351 if (initialized == VAR_INIT_STATUS_UNKNOWN)
2352 initialized = get_init_value (set, loc, dv_from_decl (decl));
2354 if (modify)
2355 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src);
2356 var_mem_set (set, loc, initialized, set_src);
2359 /* Delete the location part LOC from dataflow set SET. If CLOBBER is
2360 true, also delete any other live copies of the same variable part.
2361 Adjust the address first if it is stack pointer based. */
2363 static void
2364 var_mem_delete (dataflow_set *set, rtx loc, bool clobber)
2366 tree decl = MEM_EXPR (loc);
2367 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2369 clobber_overlapping_mems (set, loc);
2370 decl = var_debug_decl (decl);
2371 if (clobber)
2372 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
2373 delete_variable_part (set, loc, dv_from_decl (decl), offset);
2376 /* Return true if LOC should not be expanded for location expressions,
2377 or used in them. */
2379 static inline bool
2380 unsuitable_loc (rtx loc)
2382 switch (GET_CODE (loc))
2384 case PC:
2385 case SCRATCH:
2386 case CC0:
2387 case ASM_INPUT:
2388 case ASM_OPERANDS:
2389 return true;
2391 default:
2392 return false;
2396 /* Bind VAL to LOC in SET. If MODIFIED, detach LOC from any values
2397 bound to it. */
2399 static inline void
2400 val_bind (dataflow_set *set, rtx val, rtx loc, bool modified)
2402 if (REG_P (loc))
2404 if (modified)
2405 var_regno_delete (set, REGNO (loc));
2406 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2407 dv_from_value (val), 0, NULL_RTX, INSERT);
2409 else if (MEM_P (loc))
2411 struct elt_loc_list *l = CSELIB_VAL_PTR (val)->locs;
2413 if (modified)
2414 clobber_overlapping_mems (set, loc);
2416 if (l && GET_CODE (l->loc) == VALUE)
2417 l = canonical_cselib_val (CSELIB_VAL_PTR (l->loc))->locs;
2419 /* If this MEM is a global constant, we don't need it in the
2420 dynamic tables. ??? We should test this before emitting the
2421 micro-op in the first place. */
2422 while (l)
2423 if (GET_CODE (l->loc) == MEM && XEXP (l->loc, 0) == XEXP (loc, 0))
2424 break;
2425 else
2426 l = l->next;
2428 if (!l)
2429 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2430 dv_from_value (val), 0, NULL_RTX, INSERT);
2432 else
2434 /* Other kinds of equivalences are necessarily static, at least
2435 so long as we do not perform substitutions while merging
2436 expressions. */
2437 gcc_unreachable ();
2438 set_variable_part (set, loc, dv_from_value (val), 0,
2439 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2443 /* Bind a value to a location it was just stored in. If MODIFIED
2444 holds, assume the location was modified, detaching it from any
2445 values bound to it. */
2447 static void
2448 val_store (dataflow_set *set, rtx val, rtx loc, rtx insn, bool modified)
2450 cselib_val *v = CSELIB_VAL_PTR (val);
2452 gcc_assert (cselib_preserved_value_p (v));
2454 if (dump_file)
2456 fprintf (dump_file, "%i: ", insn ? INSN_UID (insn) : 0);
2457 print_inline_rtx (dump_file, loc, 0);
2458 fprintf (dump_file, " evaluates to ");
2459 print_inline_rtx (dump_file, val, 0);
2460 if (v->locs)
2462 struct elt_loc_list *l;
2463 for (l = v->locs; l; l = l->next)
2465 fprintf (dump_file, "\n%i: ", INSN_UID (l->setting_insn));
2466 print_inline_rtx (dump_file, l->loc, 0);
2469 fprintf (dump_file, "\n");
2472 gcc_checking_assert (!unsuitable_loc (loc));
2474 val_bind (set, val, loc, modified);
2477 /* Clear (canonical address) slots that reference X. */
2479 static bool
2480 local_get_addr_clear_given_value (const void *v ATTRIBUTE_UNUSED,
2481 void **slot, void *x)
2483 if (vt_get_canonicalize_base ((rtx)*slot) == x)
2484 *slot = NULL;
2485 return true;
2488 /* Reset this node, detaching all its equivalences. Return the slot
2489 in the variable hash table that holds dv, if there is one. */
2491 static void
2492 val_reset (dataflow_set *set, decl_or_value dv)
2494 variable var = shared_hash_find (set->vars, dv) ;
2495 location_chain node;
2496 rtx cval;
2498 if (!var || !var->n_var_parts)
2499 return;
2501 gcc_assert (var->n_var_parts == 1);
2503 if (var->onepart == ONEPART_VALUE)
2505 rtx x = dv_as_value (dv);
2506 void **slot;
2508 /* Relationships in the global cache don't change, so reset the
2509 local cache entry only. */
2510 slot = pointer_map_contains (local_get_addr_cache, x);
2511 if (slot)
2513 /* If the value resolved back to itself, odds are that other
2514 values may have cached it too. These entries now refer
2515 to the old X, so detach them too. Entries that used the
2516 old X but resolved to something else remain ok as long as
2517 that something else isn't also reset. */
2518 if (*slot == x)
2519 pointer_map_traverse (local_get_addr_cache,
2520 local_get_addr_clear_given_value, x);
2521 *slot = NULL;
2525 cval = NULL;
2526 for (node = var->var_part[0].loc_chain; node; node = node->next)
2527 if (GET_CODE (node->loc) == VALUE
2528 && canon_value_cmp (node->loc, cval))
2529 cval = node->loc;
2531 for (node = var->var_part[0].loc_chain; node; node = node->next)
2532 if (GET_CODE (node->loc) == VALUE && cval != node->loc)
2534 /* Redirect the equivalence link to the new canonical
2535 value, or simply remove it if it would point at
2536 itself. */
2537 if (cval)
2538 set_variable_part (set, cval, dv_from_value (node->loc),
2539 0, node->init, node->set_src, NO_INSERT);
2540 delete_variable_part (set, dv_as_value (dv),
2541 dv_from_value (node->loc), 0);
2544 if (cval)
2546 decl_or_value cdv = dv_from_value (cval);
2548 /* Keep the remaining values connected, accummulating links
2549 in the canonical value. */
2550 for (node = var->var_part[0].loc_chain; node; node = node->next)
2552 if (node->loc == cval)
2553 continue;
2554 else if (GET_CODE (node->loc) == REG)
2555 var_reg_decl_set (set, node->loc, node->init, cdv, 0,
2556 node->set_src, NO_INSERT);
2557 else if (GET_CODE (node->loc) == MEM)
2558 var_mem_decl_set (set, node->loc, node->init, cdv, 0,
2559 node->set_src, NO_INSERT);
2560 else
2561 set_variable_part (set, node->loc, cdv, 0,
2562 node->init, node->set_src, NO_INSERT);
2566 /* We remove this last, to make sure that the canonical value is not
2567 removed to the point of requiring reinsertion. */
2568 if (cval)
2569 delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0);
2571 clobber_variable_part (set, NULL, dv, 0, NULL);
2574 /* Find the values in a given location and map the val to another
2575 value, if it is unique, or add the location as one holding the
2576 value. */
2578 static void
2579 val_resolve (dataflow_set *set, rtx val, rtx loc, rtx insn)
2581 decl_or_value dv = dv_from_value (val);
2583 if (dump_file && (dump_flags & TDF_DETAILS))
2585 if (insn)
2586 fprintf (dump_file, "%i: ", INSN_UID (insn));
2587 else
2588 fprintf (dump_file, "head: ");
2589 print_inline_rtx (dump_file, val, 0);
2590 fputs (" is at ", dump_file);
2591 print_inline_rtx (dump_file, loc, 0);
2592 fputc ('\n', dump_file);
2595 val_reset (set, dv);
2597 gcc_checking_assert (!unsuitable_loc (loc));
2599 if (REG_P (loc))
2601 attrs node, found = NULL;
2603 for (node = set->regs[REGNO (loc)]; node; node = node->next)
2604 if (dv_is_value_p (node->dv)
2605 && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc))
2607 found = node;
2609 /* Map incoming equivalences. ??? Wouldn't it be nice if
2610 we just started sharing the location lists? Maybe a
2611 circular list ending at the value itself or some
2612 such. */
2613 set_variable_part (set, dv_as_value (node->dv),
2614 dv_from_value (val), node->offset,
2615 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2616 set_variable_part (set, val, node->dv, node->offset,
2617 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2620 /* If we didn't find any equivalence, we need to remember that
2621 this value is held in the named register. */
2622 if (found)
2623 return;
2625 /* ??? Attempt to find and merge equivalent MEMs or other
2626 expressions too. */
2628 val_bind (set, val, loc, false);
2631 /* Initialize dataflow set SET to be empty.
2632 VARS_SIZE is the initial size of hash table VARS. */
2634 static void
2635 dataflow_set_init (dataflow_set *set)
2637 init_attrs_list_set (set->regs);
2638 set->vars = shared_hash_copy (empty_shared_hash);
2639 set->stack_adjust = 0;
2640 set->traversed_vars = NULL;
2643 /* Delete the contents of dataflow set SET. */
2645 static void
2646 dataflow_set_clear (dataflow_set *set)
2648 int i;
2650 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2651 attrs_list_clear (&set->regs[i]);
2653 shared_hash_destroy (set->vars);
2654 set->vars = shared_hash_copy (empty_shared_hash);
2657 /* Copy the contents of dataflow set SRC to DST. */
2659 static void
2660 dataflow_set_copy (dataflow_set *dst, dataflow_set *src)
2662 int i;
2664 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2665 attrs_list_copy (&dst->regs[i], src->regs[i]);
2667 shared_hash_destroy (dst->vars);
2668 dst->vars = shared_hash_copy (src->vars);
2669 dst->stack_adjust = src->stack_adjust;
2672 /* Information for merging lists of locations for a given offset of variable.
2674 struct variable_union_info
2676 /* Node of the location chain. */
2677 location_chain lc;
2679 /* The sum of positions in the input chains. */
2680 int pos;
2682 /* The position in the chain of DST dataflow set. */
2683 int pos_dst;
2686 /* Buffer for location list sorting and its allocated size. */
2687 static struct variable_union_info *vui_vec;
2688 static int vui_allocated;
2690 /* Compare function for qsort, order the structures by POS element. */
2692 static int
2693 variable_union_info_cmp_pos (const void *n1, const void *n2)
2695 const struct variable_union_info *const i1 =
2696 (const struct variable_union_info *) n1;
2697 const struct variable_union_info *const i2 =
2698 ( const struct variable_union_info *) n2;
2700 if (i1->pos != i2->pos)
2701 return i1->pos - i2->pos;
2703 return (i1->pos_dst - i2->pos_dst);
2706 /* Compute union of location parts of variable *SLOT and the same variable
2707 from hash table DATA. Compute "sorted" union of the location chains
2708 for common offsets, i.e. the locations of a variable part are sorted by
2709 a priority where the priority is the sum of the positions in the 2 chains
2710 (if a location is only in one list the position in the second list is
2711 defined to be larger than the length of the chains).
2712 When we are updating the location parts the newest location is in the
2713 beginning of the chain, so when we do the described "sorted" union
2714 we keep the newest locations in the beginning. */
2716 static int
2717 variable_union (variable src, dataflow_set *set)
2719 variable dst;
2720 variable_def **dstp;
2721 int i, j, k;
2723 dstp = shared_hash_find_slot (set->vars, src->dv);
2724 if (!dstp || !*dstp)
2726 src->refcount++;
2728 dst_can_be_shared = false;
2729 if (!dstp)
2730 dstp = shared_hash_find_slot_unshare (&set->vars, src->dv, INSERT);
2732 *dstp = src;
2734 /* Continue traversing the hash table. */
2735 return 1;
2737 else
2738 dst = *dstp;
2740 gcc_assert (src->n_var_parts);
2741 gcc_checking_assert (src->onepart == dst->onepart);
2743 /* We can combine one-part variables very efficiently, because their
2744 entries are in canonical order. */
2745 if (src->onepart)
2747 location_chain *nodep, dnode, snode;
2749 gcc_assert (src->n_var_parts == 1
2750 && dst->n_var_parts == 1);
2752 snode = src->var_part[0].loc_chain;
2753 gcc_assert (snode);
2755 restart_onepart_unshared:
2756 nodep = &dst->var_part[0].loc_chain;
2757 dnode = *nodep;
2758 gcc_assert (dnode);
2760 while (snode)
2762 int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1;
2764 if (r > 0)
2766 location_chain nnode;
2768 if (shared_var_p (dst, set->vars))
2770 dstp = unshare_variable (set, dstp, dst,
2771 VAR_INIT_STATUS_INITIALIZED);
2772 dst = *dstp;
2773 goto restart_onepart_unshared;
2776 *nodep = nnode = (location_chain) pool_alloc (loc_chain_pool);
2777 nnode->loc = snode->loc;
2778 nnode->init = snode->init;
2779 if (!snode->set_src || MEM_P (snode->set_src))
2780 nnode->set_src = NULL;
2781 else
2782 nnode->set_src = snode->set_src;
2783 nnode->next = dnode;
2784 dnode = nnode;
2786 else if (r == 0)
2787 gcc_checking_assert (rtx_equal_p (dnode->loc, snode->loc));
2789 if (r >= 0)
2790 snode = snode->next;
2792 nodep = &dnode->next;
2793 dnode = *nodep;
2796 return 1;
2799 gcc_checking_assert (!src->onepart);
2801 /* Count the number of location parts, result is K. */
2802 for (i = 0, j = 0, k = 0;
2803 i < src->n_var_parts && j < dst->n_var_parts; k++)
2805 if (VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2807 i++;
2808 j++;
2810 else if (VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
2811 i++;
2812 else
2813 j++;
2815 k += src->n_var_parts - i;
2816 k += dst->n_var_parts - j;
2818 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
2819 thus there are at most MAX_VAR_PARTS different offsets. */
2820 gcc_checking_assert (dst->onepart ? k == 1 : k <= MAX_VAR_PARTS);
2822 if (dst->n_var_parts != k && shared_var_p (dst, set->vars))
2824 dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN);
2825 dst = *dstp;
2828 i = src->n_var_parts - 1;
2829 j = dst->n_var_parts - 1;
2830 dst->n_var_parts = k;
2832 for (k--; k >= 0; k--)
2834 location_chain node, node2;
2836 if (i >= 0 && j >= 0
2837 && VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2839 /* Compute the "sorted" union of the chains, i.e. the locations which
2840 are in both chains go first, they are sorted by the sum of
2841 positions in the chains. */
2842 int dst_l, src_l;
2843 int ii, jj, n;
2844 struct variable_union_info *vui;
2846 /* If DST is shared compare the location chains.
2847 If they are different we will modify the chain in DST with
2848 high probability so make a copy of DST. */
2849 if (shared_var_p (dst, set->vars))
2851 for (node = src->var_part[i].loc_chain,
2852 node2 = dst->var_part[j].loc_chain; node && node2;
2853 node = node->next, node2 = node2->next)
2855 if (!((REG_P (node2->loc)
2856 && REG_P (node->loc)
2857 && REGNO (node2->loc) == REGNO (node->loc))
2858 || rtx_equal_p (node2->loc, node->loc)))
2860 if (node2->init < node->init)
2861 node2->init = node->init;
2862 break;
2865 if (node || node2)
2867 dstp = unshare_variable (set, dstp, dst,
2868 VAR_INIT_STATUS_UNKNOWN);
2869 dst = (variable)*dstp;
2873 src_l = 0;
2874 for (node = src->var_part[i].loc_chain; node; node = node->next)
2875 src_l++;
2876 dst_l = 0;
2877 for (node = dst->var_part[j].loc_chain; node; node = node->next)
2878 dst_l++;
2880 if (dst_l == 1)
2882 /* The most common case, much simpler, no qsort is needed. */
2883 location_chain dstnode = dst->var_part[j].loc_chain;
2884 dst->var_part[k].loc_chain = dstnode;
2885 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
2886 node2 = dstnode;
2887 for (node = src->var_part[i].loc_chain; node; node = node->next)
2888 if (!((REG_P (dstnode->loc)
2889 && REG_P (node->loc)
2890 && REGNO (dstnode->loc) == REGNO (node->loc))
2891 || rtx_equal_p (dstnode->loc, node->loc)))
2893 location_chain new_node;
2895 /* Copy the location from SRC. */
2896 new_node = (location_chain) pool_alloc (loc_chain_pool);
2897 new_node->loc = node->loc;
2898 new_node->init = node->init;
2899 if (!node->set_src || MEM_P (node->set_src))
2900 new_node->set_src = NULL;
2901 else
2902 new_node->set_src = node->set_src;
2903 node2->next = new_node;
2904 node2 = new_node;
2906 node2->next = NULL;
2908 else
2910 if (src_l + dst_l > vui_allocated)
2912 vui_allocated = MAX (vui_allocated * 2, src_l + dst_l);
2913 vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec,
2914 vui_allocated);
2916 vui = vui_vec;
2918 /* Fill in the locations from DST. */
2919 for (node = dst->var_part[j].loc_chain, jj = 0; node;
2920 node = node->next, jj++)
2922 vui[jj].lc = node;
2923 vui[jj].pos_dst = jj;
2925 /* Pos plus value larger than a sum of 2 valid positions. */
2926 vui[jj].pos = jj + src_l + dst_l;
2929 /* Fill in the locations from SRC. */
2930 n = dst_l;
2931 for (node = src->var_part[i].loc_chain, ii = 0; node;
2932 node = node->next, ii++)
2934 /* Find location from NODE. */
2935 for (jj = 0; jj < dst_l; jj++)
2937 if ((REG_P (vui[jj].lc->loc)
2938 && REG_P (node->loc)
2939 && REGNO (vui[jj].lc->loc) == REGNO (node->loc))
2940 || rtx_equal_p (vui[jj].lc->loc, node->loc))
2942 vui[jj].pos = jj + ii;
2943 break;
2946 if (jj >= dst_l) /* The location has not been found. */
2948 location_chain new_node;
2950 /* Copy the location from SRC. */
2951 new_node = (location_chain) pool_alloc (loc_chain_pool);
2952 new_node->loc = node->loc;
2953 new_node->init = node->init;
2954 if (!node->set_src || MEM_P (node->set_src))
2955 new_node->set_src = NULL;
2956 else
2957 new_node->set_src = node->set_src;
2958 vui[n].lc = new_node;
2959 vui[n].pos_dst = src_l + dst_l;
2960 vui[n].pos = ii + src_l + dst_l;
2961 n++;
2965 if (dst_l == 2)
2967 /* Special case still very common case. For dst_l == 2
2968 all entries dst_l ... n-1 are sorted, with for i >= dst_l
2969 vui[i].pos == i + src_l + dst_l. */
2970 if (vui[0].pos > vui[1].pos)
2972 /* Order should be 1, 0, 2... */
2973 dst->var_part[k].loc_chain = vui[1].lc;
2974 vui[1].lc->next = vui[0].lc;
2975 if (n >= 3)
2977 vui[0].lc->next = vui[2].lc;
2978 vui[n - 1].lc->next = NULL;
2980 else
2981 vui[0].lc->next = NULL;
2982 ii = 3;
2984 else
2986 dst->var_part[k].loc_chain = vui[0].lc;
2987 if (n >= 3 && vui[2].pos < vui[1].pos)
2989 /* Order should be 0, 2, 1, 3... */
2990 vui[0].lc->next = vui[2].lc;
2991 vui[2].lc->next = vui[1].lc;
2992 if (n >= 4)
2994 vui[1].lc->next = vui[3].lc;
2995 vui[n - 1].lc->next = NULL;
2997 else
2998 vui[1].lc->next = NULL;
2999 ii = 4;
3001 else
3003 /* Order should be 0, 1, 2... */
3004 ii = 1;
3005 vui[n - 1].lc->next = NULL;
3008 for (; ii < n; ii++)
3009 vui[ii - 1].lc->next = vui[ii].lc;
3011 else
3013 qsort (vui, n, sizeof (struct variable_union_info),
3014 variable_union_info_cmp_pos);
3016 /* Reconnect the nodes in sorted order. */
3017 for (ii = 1; ii < n; ii++)
3018 vui[ii - 1].lc->next = vui[ii].lc;
3019 vui[n - 1].lc->next = NULL;
3020 dst->var_part[k].loc_chain = vui[0].lc;
3023 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
3025 i--;
3026 j--;
3028 else if ((i >= 0 && j >= 0
3029 && VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
3030 || i < 0)
3032 dst->var_part[k] = dst->var_part[j];
3033 j--;
3035 else if ((i >= 0 && j >= 0
3036 && VAR_PART_OFFSET (src, i) > VAR_PART_OFFSET (dst, j))
3037 || j < 0)
3039 location_chain *nextp;
3041 /* Copy the chain from SRC. */
3042 nextp = &dst->var_part[k].loc_chain;
3043 for (node = src->var_part[i].loc_chain; node; node = node->next)
3045 location_chain new_lc;
3047 new_lc = (location_chain) pool_alloc (loc_chain_pool);
3048 new_lc->next = NULL;
3049 new_lc->init = node->init;
3050 if (!node->set_src || MEM_P (node->set_src))
3051 new_lc->set_src = NULL;
3052 else
3053 new_lc->set_src = node->set_src;
3054 new_lc->loc = node->loc;
3056 *nextp = new_lc;
3057 nextp = &new_lc->next;
3060 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (src, i);
3061 i--;
3063 dst->var_part[k].cur_loc = NULL;
3066 if (flag_var_tracking_uninit)
3067 for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++)
3069 location_chain node, node2;
3070 for (node = src->var_part[i].loc_chain; node; node = node->next)
3071 for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next)
3072 if (rtx_equal_p (node->loc, node2->loc))
3074 if (node->init > node2->init)
3075 node2->init = node->init;
3079 /* Continue traversing the hash table. */
3080 return 1;
3083 /* Compute union of dataflow sets SRC and DST and store it to DST. */
3085 static void
3086 dataflow_set_union (dataflow_set *dst, dataflow_set *src)
3088 int i;
3090 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3091 attrs_list_union (&dst->regs[i], src->regs[i]);
3093 if (dst->vars == empty_shared_hash)
3095 shared_hash_destroy (dst->vars);
3096 dst->vars = shared_hash_copy (src->vars);
3098 else
3100 variable_iterator_type hi;
3101 variable var;
3103 FOR_EACH_HASH_TABLE_ELEMENT (shared_hash_htab (src->vars),
3104 var, variable, hi)
3105 variable_union (var, dst);
3109 /* Whether the value is currently being expanded. */
3110 #define VALUE_RECURSED_INTO(x) \
3111 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
3113 /* Whether no expansion was found, saving useless lookups.
3114 It must only be set when VALUE_CHANGED is clear. */
3115 #define NO_LOC_P(x) \
3116 (RTL_FLAG_CHECK2 ("NO_LOC_P", (x), VALUE, DEBUG_EXPR)->return_val)
3118 /* Whether cur_loc in the value needs to be (re)computed. */
3119 #define VALUE_CHANGED(x) \
3120 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
3121 /* Whether cur_loc in the decl needs to be (re)computed. */
3122 #define DECL_CHANGED(x) TREE_VISITED (x)
3124 /* Record (if NEWV) that DV needs to have its cur_loc recomputed. For
3125 user DECLs, this means they're in changed_variables. Values and
3126 debug exprs may be left with this flag set if no user variable
3127 requires them to be evaluated. */
3129 static inline void
3130 set_dv_changed (decl_or_value dv, bool newv)
3132 switch (dv_onepart_p (dv))
3134 case ONEPART_VALUE:
3135 if (newv)
3136 NO_LOC_P (dv_as_value (dv)) = false;
3137 VALUE_CHANGED (dv_as_value (dv)) = newv;
3138 break;
3140 case ONEPART_DEXPR:
3141 if (newv)
3142 NO_LOC_P (DECL_RTL_KNOWN_SET (dv_as_decl (dv))) = false;
3143 /* Fall through... */
3145 default:
3146 DECL_CHANGED (dv_as_decl (dv)) = newv;
3147 break;
3151 /* Return true if DV needs to have its cur_loc recomputed. */
3153 static inline bool
3154 dv_changed_p (decl_or_value dv)
3156 return (dv_is_value_p (dv)
3157 ? VALUE_CHANGED (dv_as_value (dv))
3158 : DECL_CHANGED (dv_as_decl (dv)));
3161 /* Return a location list node whose loc is rtx_equal to LOC, in the
3162 location list of a one-part variable or value VAR, or in that of
3163 any values recursively mentioned in the location lists. VARS must
3164 be in star-canonical form. */
3166 static location_chain
3167 find_loc_in_1pdv (rtx loc, variable var, variable_table_type vars)
3169 location_chain node;
3170 enum rtx_code loc_code;
3172 if (!var)
3173 return NULL;
3175 gcc_checking_assert (var->onepart);
3177 if (!var->n_var_parts)
3178 return NULL;
3180 gcc_checking_assert (loc != dv_as_opaque (var->dv));
3182 loc_code = GET_CODE (loc);
3183 for (node = var->var_part[0].loc_chain; node; node = node->next)
3185 decl_or_value dv;
3186 variable rvar;
3188 if (GET_CODE (node->loc) != loc_code)
3190 if (GET_CODE (node->loc) != VALUE)
3191 continue;
3193 else if (loc == node->loc)
3194 return node;
3195 else if (loc_code != VALUE)
3197 if (rtx_equal_p (loc, node->loc))
3198 return node;
3199 continue;
3202 /* Since we're in star-canonical form, we don't need to visit
3203 non-canonical nodes: one-part variables and non-canonical
3204 values would only point back to the canonical node. */
3205 if (dv_is_value_p (var->dv)
3206 && !canon_value_cmp (node->loc, dv_as_value (var->dv)))
3208 /* Skip all subsequent VALUEs. */
3209 while (node->next && GET_CODE (node->next->loc) == VALUE)
3211 node = node->next;
3212 gcc_checking_assert (!canon_value_cmp (node->loc,
3213 dv_as_value (var->dv)));
3214 if (loc == node->loc)
3215 return node;
3217 continue;
3220 gcc_checking_assert (node == var->var_part[0].loc_chain);
3221 gcc_checking_assert (!node->next);
3223 dv = dv_from_value (node->loc);
3224 rvar = vars.find_with_hash (dv, dv_htab_hash (dv));
3225 return find_loc_in_1pdv (loc, rvar, vars);
3228 /* ??? Gotta look in cselib_val locations too. */
3230 return NULL;
3233 /* Hash table iteration argument passed to variable_merge. */
3234 struct dfset_merge
3236 /* The set in which the merge is to be inserted. */
3237 dataflow_set *dst;
3238 /* The set that we're iterating in. */
3239 dataflow_set *cur;
3240 /* The set that may contain the other dv we are to merge with. */
3241 dataflow_set *src;
3242 /* Number of onepart dvs in src. */
3243 int src_onepart_cnt;
3246 /* Insert LOC in *DNODE, if it's not there yet. The list must be in
3247 loc_cmp order, and it is maintained as such. */
3249 static void
3250 insert_into_intersection (location_chain *nodep, rtx loc,
3251 enum var_init_status status)
3253 location_chain node;
3254 int r;
3256 for (node = *nodep; node; nodep = &node->next, node = *nodep)
3257 if ((r = loc_cmp (node->loc, loc)) == 0)
3259 node->init = MIN (node->init, status);
3260 return;
3262 else if (r > 0)
3263 break;
3265 node = (location_chain) pool_alloc (loc_chain_pool);
3267 node->loc = loc;
3268 node->set_src = NULL;
3269 node->init = status;
3270 node->next = *nodep;
3271 *nodep = node;
3274 /* Insert in DEST the intersection of the locations present in both
3275 S1NODE and S2VAR, directly or indirectly. S1NODE is from a
3276 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
3277 DSM->dst. */
3279 static void
3280 intersect_loc_chains (rtx val, location_chain *dest, struct dfset_merge *dsm,
3281 location_chain s1node, variable s2var)
3283 dataflow_set *s1set = dsm->cur;
3284 dataflow_set *s2set = dsm->src;
3285 location_chain found;
3287 if (s2var)
3289 location_chain s2node;
3291 gcc_checking_assert (s2var->onepart);
3293 if (s2var->n_var_parts)
3295 s2node = s2var->var_part[0].loc_chain;
3297 for (; s1node && s2node;
3298 s1node = s1node->next, s2node = s2node->next)
3299 if (s1node->loc != s2node->loc)
3300 break;
3301 else if (s1node->loc == val)
3302 continue;
3303 else
3304 insert_into_intersection (dest, s1node->loc,
3305 MIN (s1node->init, s2node->init));
3309 for (; s1node; s1node = s1node->next)
3311 if (s1node->loc == val)
3312 continue;
3314 if ((found = find_loc_in_1pdv (s1node->loc, s2var,
3315 shared_hash_htab (s2set->vars))))
3317 insert_into_intersection (dest, s1node->loc,
3318 MIN (s1node->init, found->init));
3319 continue;
3322 if (GET_CODE (s1node->loc) == VALUE
3323 && !VALUE_RECURSED_INTO (s1node->loc))
3325 decl_or_value dv = dv_from_value (s1node->loc);
3326 variable svar = shared_hash_find (s1set->vars, dv);
3327 if (svar)
3329 if (svar->n_var_parts == 1)
3331 VALUE_RECURSED_INTO (s1node->loc) = true;
3332 intersect_loc_chains (val, dest, dsm,
3333 svar->var_part[0].loc_chain,
3334 s2var);
3335 VALUE_RECURSED_INTO (s1node->loc) = false;
3340 /* ??? gotta look in cselib_val locations too. */
3342 /* ??? if the location is equivalent to any location in src,
3343 searched recursively
3345 add to dst the values needed to represent the equivalence
3347 telling whether locations S is equivalent to another dv's
3348 location list:
3350 for each location D in the list
3352 if S and D satisfy rtx_equal_p, then it is present
3354 else if D is a value, recurse without cycles
3356 else if S and D have the same CODE and MODE
3358 for each operand oS and the corresponding oD
3360 if oS and oD are not equivalent, then S an D are not equivalent
3362 else if they are RTX vectors
3364 if any vector oS element is not equivalent to its respective oD,
3365 then S and D are not equivalent
3373 /* Return -1 if X should be before Y in a location list for a 1-part
3374 variable, 1 if Y should be before X, and 0 if they're equivalent
3375 and should not appear in the list. */
3377 static int
3378 loc_cmp (rtx x, rtx y)
3380 int i, j, r;
3381 RTX_CODE code = GET_CODE (x);
3382 const char *fmt;
3384 if (x == y)
3385 return 0;
3387 if (REG_P (x))
3389 if (!REG_P (y))
3390 return -1;
3391 gcc_assert (GET_MODE (x) == GET_MODE (y));
3392 if (REGNO (x) == REGNO (y))
3393 return 0;
3394 else if (REGNO (x) < REGNO (y))
3395 return -1;
3396 else
3397 return 1;
3400 if (REG_P (y))
3401 return 1;
3403 if (MEM_P (x))
3405 if (!MEM_P (y))
3406 return -1;
3407 gcc_assert (GET_MODE (x) == GET_MODE (y));
3408 return loc_cmp (XEXP (x, 0), XEXP (y, 0));
3411 if (MEM_P (y))
3412 return 1;
3414 if (GET_CODE (x) == VALUE)
3416 if (GET_CODE (y) != VALUE)
3417 return -1;
3418 /* Don't assert the modes are the same, that is true only
3419 when not recursing. (subreg:QI (value:SI 1:1) 0)
3420 and (subreg:QI (value:DI 2:2) 0) can be compared,
3421 even when the modes are different. */
3422 if (canon_value_cmp (x, y))
3423 return -1;
3424 else
3425 return 1;
3428 if (GET_CODE (y) == VALUE)
3429 return 1;
3431 /* Entry value is the least preferable kind of expression. */
3432 if (GET_CODE (x) == ENTRY_VALUE)
3434 if (GET_CODE (y) != ENTRY_VALUE)
3435 return 1;
3436 gcc_assert (GET_MODE (x) == GET_MODE (y));
3437 return loc_cmp (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
3440 if (GET_CODE (y) == ENTRY_VALUE)
3441 return -1;
3443 if (GET_CODE (x) == GET_CODE (y))
3444 /* Compare operands below. */;
3445 else if (GET_CODE (x) < GET_CODE (y))
3446 return -1;
3447 else
3448 return 1;
3450 gcc_assert (GET_MODE (x) == GET_MODE (y));
3452 if (GET_CODE (x) == DEBUG_EXPR)
3454 if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3455 < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)))
3456 return -1;
3457 gcc_checking_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3458 > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)));
3459 return 1;
3462 fmt = GET_RTX_FORMAT (code);
3463 for (i = 0; i < GET_RTX_LENGTH (code); i++)
3464 switch (fmt[i])
3466 case 'w':
3467 if (XWINT (x, i) == XWINT (y, i))
3468 break;
3469 else if (XWINT (x, i) < XWINT (y, i))
3470 return -1;
3471 else
3472 return 1;
3474 case 'n':
3475 case 'i':
3476 if (XINT (x, i) == XINT (y, i))
3477 break;
3478 else if (XINT (x, i) < XINT (y, i))
3479 return -1;
3480 else
3481 return 1;
3483 case 'V':
3484 case 'E':
3485 /* Compare the vector length first. */
3486 if (XVECLEN (x, i) == XVECLEN (y, i))
3487 /* Compare the vectors elements. */;
3488 else if (XVECLEN (x, i) < XVECLEN (y, i))
3489 return -1;
3490 else
3491 return 1;
3493 for (j = 0; j < XVECLEN (x, i); j++)
3494 if ((r = loc_cmp (XVECEXP (x, i, j),
3495 XVECEXP (y, i, j))))
3496 return r;
3497 break;
3499 case 'e':
3500 if ((r = loc_cmp (XEXP (x, i), XEXP (y, i))))
3501 return r;
3502 break;
3504 case 'S':
3505 case 's':
3506 if (XSTR (x, i) == XSTR (y, i))
3507 break;
3508 if (!XSTR (x, i))
3509 return -1;
3510 if (!XSTR (y, i))
3511 return 1;
3512 if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0)
3513 break;
3514 else if (r < 0)
3515 return -1;
3516 else
3517 return 1;
3519 case 'u':
3520 /* These are just backpointers, so they don't matter. */
3521 break;
3523 case '0':
3524 case 't':
3525 break;
3527 /* It is believed that rtx's at this level will never
3528 contain anything but integers and other rtx's,
3529 except for within LABEL_REFs and SYMBOL_REFs. */
3530 default:
3531 gcc_unreachable ();
3534 return 0;
3537 #if ENABLE_CHECKING
3538 /* Check the order of entries in one-part variables. */
3541 canonicalize_loc_order_check (variable_def **slot,
3542 dataflow_set *data ATTRIBUTE_UNUSED)
3544 variable var = *slot;
3545 location_chain node, next;
3547 #ifdef ENABLE_RTL_CHECKING
3548 int i;
3549 for (i = 0; i < var->n_var_parts; i++)
3550 gcc_assert (var->var_part[0].cur_loc == NULL);
3551 gcc_assert (!var->in_changed_variables);
3552 #endif
3554 if (!var->onepart)
3555 return 1;
3557 gcc_assert (var->n_var_parts == 1);
3558 node = var->var_part[0].loc_chain;
3559 gcc_assert (node);
3561 while ((next = node->next))
3563 gcc_assert (loc_cmp (node->loc, next->loc) < 0);
3564 node = next;
3567 return 1;
3569 #endif
3571 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
3572 more likely to be chosen as canonical for an equivalence set.
3573 Ensure less likely values can reach more likely neighbors, making
3574 the connections bidirectional. */
3577 canonicalize_values_mark (variable_def **slot, dataflow_set *set)
3579 variable var = *slot;
3580 decl_or_value dv = var->dv;
3581 rtx val;
3582 location_chain node;
3584 if (!dv_is_value_p (dv))
3585 return 1;
3587 gcc_checking_assert (var->n_var_parts == 1);
3589 val = dv_as_value (dv);
3591 for (node = var->var_part[0].loc_chain; node; node = node->next)
3592 if (GET_CODE (node->loc) == VALUE)
3594 if (canon_value_cmp (node->loc, val))
3595 VALUE_RECURSED_INTO (val) = true;
3596 else
3598 decl_or_value odv = dv_from_value (node->loc);
3599 variable_def **oslot;
3600 oslot = shared_hash_find_slot_noinsert (set->vars, odv);
3602 set_slot_part (set, val, oslot, odv, 0,
3603 node->init, NULL_RTX);
3605 VALUE_RECURSED_INTO (node->loc) = true;
3609 return 1;
3612 /* Remove redundant entries from equivalence lists in onepart
3613 variables, canonicalizing equivalence sets into star shapes. */
3616 canonicalize_values_star (variable_def **slot, dataflow_set *set)
3618 variable var = *slot;
3619 decl_or_value dv = var->dv;
3620 location_chain node;
3621 decl_or_value cdv;
3622 rtx val, cval;
3623 variable_def **cslot;
3624 bool has_value;
3625 bool has_marks;
3627 if (!var->onepart)
3628 return 1;
3630 gcc_checking_assert (var->n_var_parts == 1);
3632 if (dv_is_value_p (dv))
3634 cval = dv_as_value (dv);
3635 if (!VALUE_RECURSED_INTO (cval))
3636 return 1;
3637 VALUE_RECURSED_INTO (cval) = false;
3639 else
3640 cval = NULL_RTX;
3642 restart:
3643 val = cval;
3644 has_value = false;
3645 has_marks = false;
3647 gcc_assert (var->n_var_parts == 1);
3649 for (node = var->var_part[0].loc_chain; node; node = node->next)
3650 if (GET_CODE (node->loc) == VALUE)
3652 has_value = true;
3653 if (VALUE_RECURSED_INTO (node->loc))
3654 has_marks = true;
3655 if (canon_value_cmp (node->loc, cval))
3656 cval = node->loc;
3659 if (!has_value)
3660 return 1;
3662 if (cval == val)
3664 if (!has_marks || dv_is_decl_p (dv))
3665 return 1;
3667 /* Keep it marked so that we revisit it, either after visiting a
3668 child node, or after visiting a new parent that might be
3669 found out. */
3670 VALUE_RECURSED_INTO (val) = true;
3672 for (node = var->var_part[0].loc_chain; node; node = node->next)
3673 if (GET_CODE (node->loc) == VALUE
3674 && VALUE_RECURSED_INTO (node->loc))
3676 cval = node->loc;
3677 restart_with_cval:
3678 VALUE_RECURSED_INTO (cval) = false;
3679 dv = dv_from_value (cval);
3680 slot = shared_hash_find_slot_noinsert (set->vars, dv);
3681 if (!slot)
3683 gcc_assert (dv_is_decl_p (var->dv));
3684 /* The canonical value was reset and dropped.
3685 Remove it. */
3686 clobber_variable_part (set, NULL, var->dv, 0, NULL);
3687 return 1;
3689 var = *slot;
3690 gcc_assert (dv_is_value_p (var->dv));
3691 if (var->n_var_parts == 0)
3692 return 1;
3693 gcc_assert (var->n_var_parts == 1);
3694 goto restart;
3697 VALUE_RECURSED_INTO (val) = false;
3699 return 1;
3702 /* Push values to the canonical one. */
3703 cdv = dv_from_value (cval);
3704 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3706 for (node = var->var_part[0].loc_chain; node; node = node->next)
3707 if (node->loc != cval)
3709 cslot = set_slot_part (set, node->loc, cslot, cdv, 0,
3710 node->init, NULL_RTX);
3711 if (GET_CODE (node->loc) == VALUE)
3713 decl_or_value ndv = dv_from_value (node->loc);
3715 set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX,
3716 NO_INSERT);
3718 if (canon_value_cmp (node->loc, val))
3720 /* If it could have been a local minimum, it's not any more,
3721 since it's now neighbor to cval, so it may have to push
3722 to it. Conversely, if it wouldn't have prevailed over
3723 val, then whatever mark it has is fine: if it was to
3724 push, it will now push to a more canonical node, but if
3725 it wasn't, then it has already pushed any values it might
3726 have to. */
3727 VALUE_RECURSED_INTO (node->loc) = true;
3728 /* Make sure we visit node->loc by ensuring we cval is
3729 visited too. */
3730 VALUE_RECURSED_INTO (cval) = true;
3732 else if (!VALUE_RECURSED_INTO (node->loc))
3733 /* If we have no need to "recurse" into this node, it's
3734 already "canonicalized", so drop the link to the old
3735 parent. */
3736 clobber_variable_part (set, cval, ndv, 0, NULL);
3738 else if (GET_CODE (node->loc) == REG)
3740 attrs list = set->regs[REGNO (node->loc)], *listp;
3742 /* Change an existing attribute referring to dv so that it
3743 refers to cdv, removing any duplicate this might
3744 introduce, and checking that no previous duplicates
3745 existed, all in a single pass. */
3747 while (list)
3749 if (list->offset == 0
3750 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3751 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3752 break;
3754 list = list->next;
3757 gcc_assert (list);
3758 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3760 list->dv = cdv;
3761 for (listp = &list->next; (list = *listp); listp = &list->next)
3763 if (list->offset)
3764 continue;
3766 if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3768 *listp = list->next;
3769 pool_free (attrs_pool, list);
3770 list = *listp;
3771 break;
3774 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv));
3777 else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3779 for (listp = &list->next; (list = *listp); listp = &list->next)
3781 if (list->offset)
3782 continue;
3784 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3786 *listp = list->next;
3787 pool_free (attrs_pool, list);
3788 list = *listp;
3789 break;
3792 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv));
3795 else
3796 gcc_unreachable ();
3798 #if ENABLE_CHECKING
3799 while (list)
3801 if (list->offset == 0
3802 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3803 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3804 gcc_unreachable ();
3806 list = list->next;
3808 #endif
3812 if (val)
3813 set_slot_part (set, val, cslot, cdv, 0,
3814 VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
3816 slot = clobber_slot_part (set, cval, slot, 0, NULL);
3818 /* Variable may have been unshared. */
3819 var = *slot;
3820 gcc_checking_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval
3821 && var->var_part[0].loc_chain->next == NULL);
3823 if (VALUE_RECURSED_INTO (cval))
3824 goto restart_with_cval;
3826 return 1;
3829 /* Bind one-part variables to the canonical value in an equivalence
3830 set. Not doing this causes dataflow convergence failure in rare
3831 circumstances, see PR42873. Unfortunately we can't do this
3832 efficiently as part of canonicalize_values_star, since we may not
3833 have determined or even seen the canonical value of a set when we
3834 get to a variable that references another member of the set. */
3837 canonicalize_vars_star (variable_def **slot, dataflow_set *set)
3839 variable var = *slot;
3840 decl_or_value dv = var->dv;
3841 location_chain node;
3842 rtx cval;
3843 decl_or_value cdv;
3844 variable_def **cslot;
3845 variable cvar;
3846 location_chain cnode;
3848 if (!var->onepart || var->onepart == ONEPART_VALUE)
3849 return 1;
3851 gcc_assert (var->n_var_parts == 1);
3853 node = var->var_part[0].loc_chain;
3855 if (GET_CODE (node->loc) != VALUE)
3856 return 1;
3858 gcc_assert (!node->next);
3859 cval = node->loc;
3861 /* Push values to the canonical one. */
3862 cdv = dv_from_value (cval);
3863 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3864 if (!cslot)
3865 return 1;
3866 cvar = *cslot;
3867 gcc_assert (cvar->n_var_parts == 1);
3869 cnode = cvar->var_part[0].loc_chain;
3871 /* CVAL is canonical if its value list contains non-VALUEs or VALUEs
3872 that are not “more canonical” than it. */
3873 if (GET_CODE (cnode->loc) != VALUE
3874 || !canon_value_cmp (cnode->loc, cval))
3875 return 1;
3877 /* CVAL was found to be non-canonical. Change the variable to point
3878 to the canonical VALUE. */
3879 gcc_assert (!cnode->next);
3880 cval = cnode->loc;
3882 slot = set_slot_part (set, cval, slot, dv, 0,
3883 node->init, node->set_src);
3884 clobber_slot_part (set, cval, slot, 0, node->set_src);
3886 return 1;
3889 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
3890 corresponding entry in DSM->src. Multi-part variables are combined
3891 with variable_union, whereas onepart dvs are combined with
3892 intersection. */
3894 static int
3895 variable_merge_over_cur (variable s1var, struct dfset_merge *dsm)
3897 dataflow_set *dst = dsm->dst;
3898 variable_def **dstslot;
3899 variable s2var, dvar = NULL;
3900 decl_or_value dv = s1var->dv;
3901 onepart_enum_t onepart = s1var->onepart;
3902 rtx val;
3903 hashval_t dvhash;
3904 location_chain node, *nodep;
3906 /* If the incoming onepart variable has an empty location list, then
3907 the intersection will be just as empty. For other variables,
3908 it's always union. */
3909 gcc_checking_assert (s1var->n_var_parts
3910 && s1var->var_part[0].loc_chain);
3912 if (!onepart)
3913 return variable_union (s1var, dst);
3915 gcc_checking_assert (s1var->n_var_parts == 1);
3917 dvhash = dv_htab_hash (dv);
3918 if (dv_is_value_p (dv))
3919 val = dv_as_value (dv);
3920 else
3921 val = NULL;
3923 s2var = shared_hash_find_1 (dsm->src->vars, dv, dvhash);
3924 if (!s2var)
3926 dst_can_be_shared = false;
3927 return 1;
3930 dsm->src_onepart_cnt--;
3931 gcc_assert (s2var->var_part[0].loc_chain
3932 && s2var->onepart == onepart
3933 && s2var->n_var_parts == 1);
3935 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3936 if (dstslot)
3938 dvar = *dstslot;
3939 gcc_assert (dvar->refcount == 1
3940 && dvar->onepart == onepart
3941 && dvar->n_var_parts == 1);
3942 nodep = &dvar->var_part[0].loc_chain;
3944 else
3946 nodep = &node;
3947 node = NULL;
3950 if (!dstslot && !onepart_variable_different_p (s1var, s2var))
3952 dstslot = shared_hash_find_slot_unshare_1 (&dst->vars, dv,
3953 dvhash, INSERT);
3954 *dstslot = dvar = s2var;
3955 dvar->refcount++;
3957 else
3959 dst_can_be_shared = false;
3961 intersect_loc_chains (val, nodep, dsm,
3962 s1var->var_part[0].loc_chain, s2var);
3964 if (!dstslot)
3966 if (node)
3968 dvar = (variable) pool_alloc (onepart_pool (onepart));
3969 dvar->dv = dv;
3970 dvar->refcount = 1;
3971 dvar->n_var_parts = 1;
3972 dvar->onepart = onepart;
3973 dvar->in_changed_variables = false;
3974 dvar->var_part[0].loc_chain = node;
3975 dvar->var_part[0].cur_loc = NULL;
3976 if (onepart)
3977 VAR_LOC_1PAUX (dvar) = NULL;
3978 else
3979 VAR_PART_OFFSET (dvar, 0) = 0;
3981 dstslot
3982 = shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash,
3983 INSERT);
3984 gcc_assert (!*dstslot);
3985 *dstslot = dvar;
3987 else
3988 return 1;
3992 nodep = &dvar->var_part[0].loc_chain;
3993 while ((node = *nodep))
3995 location_chain *nextp = &node->next;
3997 if (GET_CODE (node->loc) == REG)
3999 attrs list;
4001 for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
4002 if (GET_MODE (node->loc) == GET_MODE (list->loc)
4003 && dv_is_value_p (list->dv))
4004 break;
4006 if (!list)
4007 attrs_list_insert (&dst->regs[REGNO (node->loc)],
4008 dv, 0, node->loc);
4009 /* If this value became canonical for another value that had
4010 this register, we want to leave it alone. */
4011 else if (dv_as_value (list->dv) != val)
4013 dstslot = set_slot_part (dst, dv_as_value (list->dv),
4014 dstslot, dv, 0,
4015 node->init, NULL_RTX);
4016 dstslot = delete_slot_part (dst, node->loc, dstslot, 0);
4018 /* Since nextp points into the removed node, we can't
4019 use it. The pointer to the next node moved to nodep.
4020 However, if the variable we're walking is unshared
4021 during our walk, we'll keep walking the location list
4022 of the previously-shared variable, in which case the
4023 node won't have been removed, and we'll want to skip
4024 it. That's why we test *nodep here. */
4025 if (*nodep != node)
4026 nextp = nodep;
4029 else
4030 /* Canonicalization puts registers first, so we don't have to
4031 walk it all. */
4032 break;
4033 nodep = nextp;
4036 if (dvar != *dstslot)
4037 dvar = *dstslot;
4038 nodep = &dvar->var_part[0].loc_chain;
4040 if (val)
4042 /* Mark all referenced nodes for canonicalization, and make sure
4043 we have mutual equivalence links. */
4044 VALUE_RECURSED_INTO (val) = true;
4045 for (node = *nodep; node; node = node->next)
4046 if (GET_CODE (node->loc) == VALUE)
4048 VALUE_RECURSED_INTO (node->loc) = true;
4049 set_variable_part (dst, val, dv_from_value (node->loc), 0,
4050 node->init, NULL, INSERT);
4053 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4054 gcc_assert (*dstslot == dvar);
4055 canonicalize_values_star (dstslot, dst);
4056 gcc_checking_assert (dstslot
4057 == shared_hash_find_slot_noinsert_1 (dst->vars,
4058 dv, dvhash));
4059 dvar = *dstslot;
4061 else
4063 bool has_value = false, has_other = false;
4065 /* If we have one value and anything else, we're going to
4066 canonicalize this, so make sure all values have an entry in
4067 the table and are marked for canonicalization. */
4068 for (node = *nodep; node; node = node->next)
4070 if (GET_CODE (node->loc) == VALUE)
4072 /* If this was marked during register canonicalization,
4073 we know we have to canonicalize values. */
4074 if (has_value)
4075 has_other = true;
4076 has_value = true;
4077 if (has_other)
4078 break;
4080 else
4082 has_other = true;
4083 if (has_value)
4084 break;
4088 if (has_value && has_other)
4090 for (node = *nodep; node; node = node->next)
4092 if (GET_CODE (node->loc) == VALUE)
4094 decl_or_value dv = dv_from_value (node->loc);
4095 variable_def **slot = NULL;
4097 if (shared_hash_shared (dst->vars))
4098 slot = shared_hash_find_slot_noinsert (dst->vars, dv);
4099 if (!slot)
4100 slot = shared_hash_find_slot_unshare (&dst->vars, dv,
4101 INSERT);
4102 if (!*slot)
4104 variable var = (variable) pool_alloc (onepart_pool
4105 (ONEPART_VALUE));
4106 var->dv = dv;
4107 var->refcount = 1;
4108 var->n_var_parts = 1;
4109 var->onepart = ONEPART_VALUE;
4110 var->in_changed_variables = false;
4111 var->var_part[0].loc_chain = NULL;
4112 var->var_part[0].cur_loc = NULL;
4113 VAR_LOC_1PAUX (var) = NULL;
4114 *slot = var;
4117 VALUE_RECURSED_INTO (node->loc) = true;
4121 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4122 gcc_assert (*dstslot == dvar);
4123 canonicalize_values_star (dstslot, dst);
4124 gcc_checking_assert (dstslot
4125 == shared_hash_find_slot_noinsert_1 (dst->vars,
4126 dv, dvhash));
4127 dvar = *dstslot;
4131 if (!onepart_variable_different_p (dvar, s2var))
4133 variable_htab_free (dvar);
4134 *dstslot = dvar = s2var;
4135 dvar->refcount++;
4137 else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var))
4139 variable_htab_free (dvar);
4140 *dstslot = dvar = s1var;
4141 dvar->refcount++;
4142 dst_can_be_shared = false;
4144 else
4145 dst_can_be_shared = false;
4147 return 1;
4150 /* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
4151 multi-part variable. Unions of multi-part variables and
4152 intersections of one-part ones will be handled in
4153 variable_merge_over_cur(). */
4155 static int
4156 variable_merge_over_src (variable s2var, struct dfset_merge *dsm)
4158 dataflow_set *dst = dsm->dst;
4159 decl_or_value dv = s2var->dv;
4161 if (!s2var->onepart)
4163 variable_def **dstp = shared_hash_find_slot (dst->vars, dv);
4164 *dstp = s2var;
4165 s2var->refcount++;
4166 return 1;
4169 dsm->src_onepart_cnt++;
4170 return 1;
4173 /* Combine dataflow set information from SRC2 into DST, using PDST
4174 to carry over information across passes. */
4176 static void
4177 dataflow_set_merge (dataflow_set *dst, dataflow_set *src2)
4179 dataflow_set cur = *dst;
4180 dataflow_set *src1 = &cur;
4181 struct dfset_merge dsm;
4182 int i;
4183 size_t src1_elems, src2_elems;
4184 variable_iterator_type hi;
4185 variable var;
4187 src1_elems = shared_hash_htab (src1->vars).elements ();
4188 src2_elems = shared_hash_htab (src2->vars).elements ();
4189 dataflow_set_init (dst);
4190 dst->stack_adjust = cur.stack_adjust;
4191 shared_hash_destroy (dst->vars);
4192 dst->vars = (shared_hash) pool_alloc (shared_hash_pool);
4193 dst->vars->refcount = 1;
4194 dst->vars->htab.create (MAX (src1_elems, src2_elems));
4196 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4197 attrs_list_mpdv_union (&dst->regs[i], src1->regs[i], src2->regs[i]);
4199 dsm.dst = dst;
4200 dsm.src = src2;
4201 dsm.cur = src1;
4202 dsm.src_onepart_cnt = 0;
4204 FOR_EACH_HASH_TABLE_ELEMENT (shared_hash_htab (dsm.src->vars),
4205 var, variable, hi)
4206 variable_merge_over_src (var, &dsm);
4207 FOR_EACH_HASH_TABLE_ELEMENT (shared_hash_htab (dsm.cur->vars),
4208 var, variable, hi)
4209 variable_merge_over_cur (var, &dsm);
4211 if (dsm.src_onepart_cnt)
4212 dst_can_be_shared = false;
4214 dataflow_set_destroy (src1);
4217 /* Mark register equivalences. */
4219 static void
4220 dataflow_set_equiv_regs (dataflow_set *set)
4222 int i;
4223 attrs list, *listp;
4225 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4227 rtx canon[NUM_MACHINE_MODES];
4229 /* If the list is empty or one entry, no need to canonicalize
4230 anything. */
4231 if (set->regs[i] == NULL || set->regs[i]->next == NULL)
4232 continue;
4234 memset (canon, 0, sizeof (canon));
4236 for (list = set->regs[i]; list; list = list->next)
4237 if (list->offset == 0 && dv_is_value_p (list->dv))
4239 rtx val = dv_as_value (list->dv);
4240 rtx *cvalp = &canon[(int)GET_MODE (val)];
4241 rtx cval = *cvalp;
4243 if (canon_value_cmp (val, cval))
4244 *cvalp = val;
4247 for (list = set->regs[i]; list; list = list->next)
4248 if (list->offset == 0 && dv_onepart_p (list->dv))
4250 rtx cval = canon[(int)GET_MODE (list->loc)];
4252 if (!cval)
4253 continue;
4255 if (dv_is_value_p (list->dv))
4257 rtx val = dv_as_value (list->dv);
4259 if (val == cval)
4260 continue;
4262 VALUE_RECURSED_INTO (val) = true;
4263 set_variable_part (set, val, dv_from_value (cval), 0,
4264 VAR_INIT_STATUS_INITIALIZED,
4265 NULL, NO_INSERT);
4268 VALUE_RECURSED_INTO (cval) = true;
4269 set_variable_part (set, cval, list->dv, 0,
4270 VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT);
4273 for (listp = &set->regs[i]; (list = *listp);
4274 listp = list ? &list->next : listp)
4275 if (list->offset == 0 && dv_onepart_p (list->dv))
4277 rtx cval = canon[(int)GET_MODE (list->loc)];
4278 variable_def **slot;
4280 if (!cval)
4281 continue;
4283 if (dv_is_value_p (list->dv))
4285 rtx val = dv_as_value (list->dv);
4286 if (!VALUE_RECURSED_INTO (val))
4287 continue;
4290 slot = shared_hash_find_slot_noinsert (set->vars, list->dv);
4291 canonicalize_values_star (slot, set);
4292 if (*listp != list)
4293 list = NULL;
4298 /* Remove any redundant values in the location list of VAR, which must
4299 be unshared and 1-part. */
4301 static void
4302 remove_duplicate_values (variable var)
4304 location_chain node, *nodep;
4306 gcc_assert (var->onepart);
4307 gcc_assert (var->n_var_parts == 1);
4308 gcc_assert (var->refcount == 1);
4310 for (nodep = &var->var_part[0].loc_chain; (node = *nodep); )
4312 if (GET_CODE (node->loc) == VALUE)
4314 if (VALUE_RECURSED_INTO (node->loc))
4316 /* Remove duplicate value node. */
4317 *nodep = node->next;
4318 pool_free (loc_chain_pool, node);
4319 continue;
4321 else
4322 VALUE_RECURSED_INTO (node->loc) = true;
4324 nodep = &node->next;
4327 for (node = var->var_part[0].loc_chain; node; node = node->next)
4328 if (GET_CODE (node->loc) == VALUE)
4330 gcc_assert (VALUE_RECURSED_INTO (node->loc));
4331 VALUE_RECURSED_INTO (node->loc) = false;
4336 /* Hash table iteration argument passed to variable_post_merge. */
4337 struct dfset_post_merge
4339 /* The new input set for the current block. */
4340 dataflow_set *set;
4341 /* Pointer to the permanent input set for the current block, or
4342 NULL. */
4343 dataflow_set **permp;
4346 /* Create values for incoming expressions associated with one-part
4347 variables that don't have value numbers for them. */
4350 variable_post_merge_new_vals (variable_def **slot, dfset_post_merge *dfpm)
4352 dataflow_set *set = dfpm->set;
4353 variable var = *slot;
4354 location_chain node;
4356 if (!var->onepart || !var->n_var_parts)
4357 return 1;
4359 gcc_assert (var->n_var_parts == 1);
4361 if (dv_is_decl_p (var->dv))
4363 bool check_dupes = false;
4365 restart:
4366 for (node = var->var_part[0].loc_chain; node; node = node->next)
4368 if (GET_CODE (node->loc) == VALUE)
4369 gcc_assert (!VALUE_RECURSED_INTO (node->loc));
4370 else if (GET_CODE (node->loc) == REG)
4372 attrs att, *attp, *curp = NULL;
4374 if (var->refcount != 1)
4376 slot = unshare_variable (set, slot, var,
4377 VAR_INIT_STATUS_INITIALIZED);
4378 var = *slot;
4379 goto restart;
4382 for (attp = &set->regs[REGNO (node->loc)]; (att = *attp);
4383 attp = &att->next)
4384 if (att->offset == 0
4385 && GET_MODE (att->loc) == GET_MODE (node->loc))
4387 if (dv_is_value_p (att->dv))
4389 rtx cval = dv_as_value (att->dv);
4390 node->loc = cval;
4391 check_dupes = true;
4392 break;
4394 else if (dv_as_opaque (att->dv) == dv_as_opaque (var->dv))
4395 curp = attp;
4398 if (!curp)
4400 curp = attp;
4401 while (*curp)
4402 if ((*curp)->offset == 0
4403 && GET_MODE ((*curp)->loc) == GET_MODE (node->loc)
4404 && dv_as_opaque ((*curp)->dv) == dv_as_opaque (var->dv))
4405 break;
4406 else
4407 curp = &(*curp)->next;
4408 gcc_assert (*curp);
4411 if (!att)
4413 decl_or_value cdv;
4414 rtx cval;
4416 if (!*dfpm->permp)
4418 *dfpm->permp = XNEW (dataflow_set);
4419 dataflow_set_init (*dfpm->permp);
4422 for (att = (*dfpm->permp)->regs[REGNO (node->loc)];
4423 att; att = att->next)
4424 if (GET_MODE (att->loc) == GET_MODE (node->loc))
4426 gcc_assert (att->offset == 0
4427 && dv_is_value_p (att->dv));
4428 val_reset (set, att->dv);
4429 break;
4432 if (att)
4434 cdv = att->dv;
4435 cval = dv_as_value (cdv);
4437 else
4439 /* Create a unique value to hold this register,
4440 that ought to be found and reused in
4441 subsequent rounds. */
4442 cselib_val *v;
4443 gcc_assert (!cselib_lookup (node->loc,
4444 GET_MODE (node->loc), 0,
4445 VOIDmode));
4446 v = cselib_lookup (node->loc, GET_MODE (node->loc), 1,
4447 VOIDmode);
4448 cselib_preserve_value (v);
4449 cselib_invalidate_rtx (node->loc);
4450 cval = v->val_rtx;
4451 cdv = dv_from_value (cval);
4452 if (dump_file)
4453 fprintf (dump_file,
4454 "Created new value %u:%u for reg %i\n",
4455 v->uid, v->hash, REGNO (node->loc));
4458 var_reg_decl_set (*dfpm->permp, node->loc,
4459 VAR_INIT_STATUS_INITIALIZED,
4460 cdv, 0, NULL, INSERT);
4462 node->loc = cval;
4463 check_dupes = true;
4466 /* Remove attribute referring to the decl, which now
4467 uses the value for the register, already existing or
4468 to be added when we bring perm in. */
4469 att = *curp;
4470 *curp = att->next;
4471 pool_free (attrs_pool, att);
4475 if (check_dupes)
4476 remove_duplicate_values (var);
4479 return 1;
4482 /* Reset values in the permanent set that are not associated with the
4483 chosen expression. */
4486 variable_post_merge_perm_vals (variable_def **pslot, dfset_post_merge *dfpm)
4488 dataflow_set *set = dfpm->set;
4489 variable pvar = *pslot, var;
4490 location_chain pnode;
4491 decl_or_value dv;
4492 attrs att;
4494 gcc_assert (dv_is_value_p (pvar->dv)
4495 && pvar->n_var_parts == 1);
4496 pnode = pvar->var_part[0].loc_chain;
4497 gcc_assert (pnode
4498 && !pnode->next
4499 && REG_P (pnode->loc));
4501 dv = pvar->dv;
4503 var = shared_hash_find (set->vars, dv);
4504 if (var)
4506 /* Although variable_post_merge_new_vals may have made decls
4507 non-star-canonical, values that pre-existed in canonical form
4508 remain canonical, and newly-created values reference a single
4509 REG, so they are canonical as well. Since VAR has the
4510 location list for a VALUE, using find_loc_in_1pdv for it is
4511 fine, since VALUEs don't map back to DECLs. */
4512 if (find_loc_in_1pdv (pnode->loc, var, shared_hash_htab (set->vars)))
4513 return 1;
4514 val_reset (set, dv);
4517 for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next)
4518 if (att->offset == 0
4519 && GET_MODE (att->loc) == GET_MODE (pnode->loc)
4520 && dv_is_value_p (att->dv))
4521 break;
4523 /* If there is a value associated with this register already, create
4524 an equivalence. */
4525 if (att && dv_as_value (att->dv) != dv_as_value (dv))
4527 rtx cval = dv_as_value (att->dv);
4528 set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT);
4529 set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init,
4530 NULL, INSERT);
4532 else if (!att)
4534 attrs_list_insert (&set->regs[REGNO (pnode->loc)],
4535 dv, 0, pnode->loc);
4536 variable_union (pvar, set);
4539 return 1;
4542 /* Just checking stuff and registering register attributes for
4543 now. */
4545 static void
4546 dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp)
4548 struct dfset_post_merge dfpm;
4550 dfpm.set = set;
4551 dfpm.permp = permp;
4553 shared_hash_htab (set->vars)
4554 .traverse <dfset_post_merge*, variable_post_merge_new_vals> (&dfpm);
4555 if (*permp)
4556 shared_hash_htab ((*permp)->vars)
4557 .traverse <dfset_post_merge*, variable_post_merge_perm_vals> (&dfpm);
4558 shared_hash_htab (set->vars)
4559 .traverse <dataflow_set *, canonicalize_values_star> (set);
4560 shared_hash_htab (set->vars)
4561 .traverse <dataflow_set *, canonicalize_vars_star> (set);
4564 /* Return a node whose loc is a MEM that refers to EXPR in the
4565 location list of a one-part variable or value VAR, or in that of
4566 any values recursively mentioned in the location lists. */
4568 static location_chain
4569 find_mem_expr_in_1pdv (tree expr, rtx val, variable_table_type vars)
4571 location_chain node;
4572 decl_or_value dv;
4573 variable var;
4574 location_chain where = NULL;
4576 if (!val)
4577 return NULL;
4579 gcc_assert (GET_CODE (val) == VALUE
4580 && !VALUE_RECURSED_INTO (val));
4582 dv = dv_from_value (val);
4583 var = vars.find_with_hash (dv, dv_htab_hash (dv));
4585 if (!var)
4586 return NULL;
4588 gcc_assert (var->onepart);
4590 if (!var->n_var_parts)
4591 return NULL;
4593 VALUE_RECURSED_INTO (val) = true;
4595 for (node = var->var_part[0].loc_chain; node; node = node->next)
4596 if (MEM_P (node->loc)
4597 && MEM_EXPR (node->loc) == expr
4598 && INT_MEM_OFFSET (node->loc) == 0)
4600 where = node;
4601 break;
4603 else if (GET_CODE (node->loc) == VALUE
4604 && !VALUE_RECURSED_INTO (node->loc)
4605 && (where = find_mem_expr_in_1pdv (expr, node->loc, vars)))
4606 break;
4608 VALUE_RECURSED_INTO (val) = false;
4610 return where;
4613 /* Return TRUE if the value of MEM may vary across a call. */
4615 static bool
4616 mem_dies_at_call (rtx mem)
4618 tree expr = MEM_EXPR (mem);
4619 tree decl;
4621 if (!expr)
4622 return true;
4624 decl = get_base_address (expr);
4626 if (!decl)
4627 return true;
4629 if (!DECL_P (decl))
4630 return true;
4632 return (may_be_aliased (decl)
4633 || (!TREE_READONLY (decl) && is_global_var (decl)));
4636 /* Remove all MEMs from the location list of a hash table entry for a
4637 one-part variable, except those whose MEM attributes map back to
4638 the variable itself, directly or within a VALUE. */
4641 dataflow_set_preserve_mem_locs (variable_def **slot, dataflow_set *set)
4643 variable var = *slot;
4645 if (var->onepart == ONEPART_VDECL || var->onepart == ONEPART_DEXPR)
4647 tree decl = dv_as_decl (var->dv);
4648 location_chain loc, *locp;
4649 bool changed = false;
4651 if (!var->n_var_parts)
4652 return 1;
4654 gcc_assert (var->n_var_parts == 1);
4656 if (shared_var_p (var, set->vars))
4658 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4660 /* We want to remove dying MEMs that doesn't refer to DECL. */
4661 if (GET_CODE (loc->loc) == MEM
4662 && (MEM_EXPR (loc->loc) != decl
4663 || INT_MEM_OFFSET (loc->loc) != 0)
4664 && !mem_dies_at_call (loc->loc))
4665 break;
4666 /* We want to move here MEMs that do refer to DECL. */
4667 else if (GET_CODE (loc->loc) == VALUE
4668 && find_mem_expr_in_1pdv (decl, loc->loc,
4669 shared_hash_htab (set->vars)))
4670 break;
4673 if (!loc)
4674 return 1;
4676 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4677 var = *slot;
4678 gcc_assert (var->n_var_parts == 1);
4681 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4682 loc; loc = *locp)
4684 rtx old_loc = loc->loc;
4685 if (GET_CODE (old_loc) == VALUE)
4687 location_chain mem_node
4688 = find_mem_expr_in_1pdv (decl, loc->loc,
4689 shared_hash_htab (set->vars));
4691 /* ??? This picks up only one out of multiple MEMs that
4692 refer to the same variable. Do we ever need to be
4693 concerned about dealing with more than one, or, given
4694 that they should all map to the same variable
4695 location, their addresses will have been merged and
4696 they will be regarded as equivalent? */
4697 if (mem_node)
4699 loc->loc = mem_node->loc;
4700 loc->set_src = mem_node->set_src;
4701 loc->init = MIN (loc->init, mem_node->init);
4705 if (GET_CODE (loc->loc) != MEM
4706 || (MEM_EXPR (loc->loc) == decl
4707 && INT_MEM_OFFSET (loc->loc) == 0)
4708 || !mem_dies_at_call (loc->loc))
4710 if (old_loc != loc->loc && emit_notes)
4712 if (old_loc == var->var_part[0].cur_loc)
4714 changed = true;
4715 var->var_part[0].cur_loc = NULL;
4718 locp = &loc->next;
4719 continue;
4722 if (emit_notes)
4724 if (old_loc == var->var_part[0].cur_loc)
4726 changed = true;
4727 var->var_part[0].cur_loc = NULL;
4730 *locp = loc->next;
4731 pool_free (loc_chain_pool, loc);
4734 if (!var->var_part[0].loc_chain)
4736 var->n_var_parts--;
4737 changed = true;
4739 if (changed)
4740 variable_was_changed (var, set);
4743 return 1;
4746 /* Remove all MEMs from the location list of a hash table entry for a
4747 value. */
4750 dataflow_set_remove_mem_locs (variable_def **slot, dataflow_set *set)
4752 variable var = *slot;
4754 if (var->onepart == ONEPART_VALUE)
4756 location_chain loc, *locp;
4757 bool changed = false;
4758 rtx cur_loc;
4760 gcc_assert (var->n_var_parts == 1);
4762 if (shared_var_p (var, set->vars))
4764 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4765 if (GET_CODE (loc->loc) == MEM
4766 && mem_dies_at_call (loc->loc))
4767 break;
4769 if (!loc)
4770 return 1;
4772 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4773 var = *slot;
4774 gcc_assert (var->n_var_parts == 1);
4777 if (VAR_LOC_1PAUX (var))
4778 cur_loc = VAR_LOC_FROM (var);
4779 else
4780 cur_loc = var->var_part[0].cur_loc;
4782 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4783 loc; loc = *locp)
4785 if (GET_CODE (loc->loc) != MEM
4786 || !mem_dies_at_call (loc->loc))
4788 locp = &loc->next;
4789 continue;
4792 *locp = loc->next;
4793 /* If we have deleted the location which was last emitted
4794 we have to emit new location so add the variable to set
4795 of changed variables. */
4796 if (cur_loc == loc->loc)
4798 changed = true;
4799 var->var_part[0].cur_loc = NULL;
4800 if (VAR_LOC_1PAUX (var))
4801 VAR_LOC_FROM (var) = NULL;
4803 pool_free (loc_chain_pool, loc);
4806 if (!var->var_part[0].loc_chain)
4808 var->n_var_parts--;
4809 changed = true;
4811 if (changed)
4812 variable_was_changed (var, set);
4815 return 1;
4818 /* Remove all variable-location information about call-clobbered
4819 registers, as well as associations between MEMs and VALUEs. */
4821 static void
4822 dataflow_set_clear_at_call (dataflow_set *set)
4824 unsigned int r;
4825 hard_reg_set_iterator hrsi;
4827 EXECUTE_IF_SET_IN_HARD_REG_SET (regs_invalidated_by_call, 0, r, hrsi)
4828 var_regno_delete (set, r);
4830 if (MAY_HAVE_DEBUG_INSNS)
4832 set->traversed_vars = set->vars;
4833 shared_hash_htab (set->vars)
4834 .traverse <dataflow_set *, dataflow_set_preserve_mem_locs> (set);
4835 set->traversed_vars = set->vars;
4836 shared_hash_htab (set->vars)
4837 .traverse <dataflow_set *, dataflow_set_remove_mem_locs> (set);
4838 set->traversed_vars = NULL;
4842 static bool
4843 variable_part_different_p (variable_part *vp1, variable_part *vp2)
4845 location_chain lc1, lc2;
4847 for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next)
4849 for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next)
4851 if (REG_P (lc1->loc) && REG_P (lc2->loc))
4853 if (REGNO (lc1->loc) == REGNO (lc2->loc))
4854 break;
4856 if (rtx_equal_p (lc1->loc, lc2->loc))
4857 break;
4859 if (!lc2)
4860 return true;
4862 return false;
4865 /* Return true if one-part variables VAR1 and VAR2 are different.
4866 They must be in canonical order. */
4868 static bool
4869 onepart_variable_different_p (variable var1, variable var2)
4871 location_chain lc1, lc2;
4873 if (var1 == var2)
4874 return false;
4876 gcc_assert (var1->n_var_parts == 1
4877 && var2->n_var_parts == 1);
4879 lc1 = var1->var_part[0].loc_chain;
4880 lc2 = var2->var_part[0].loc_chain;
4882 gcc_assert (lc1 && lc2);
4884 while (lc1 && lc2)
4886 if (loc_cmp (lc1->loc, lc2->loc))
4887 return true;
4888 lc1 = lc1->next;
4889 lc2 = lc2->next;
4892 return lc1 != lc2;
4895 /* Return true if variables VAR1 and VAR2 are different. */
4897 static bool
4898 variable_different_p (variable var1, variable var2)
4900 int i;
4902 if (var1 == var2)
4903 return false;
4905 if (var1->onepart != var2->onepart)
4906 return true;
4908 if (var1->n_var_parts != var2->n_var_parts)
4909 return true;
4911 if (var1->onepart && var1->n_var_parts)
4913 gcc_checking_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv)
4914 && var1->n_var_parts == 1);
4915 /* One-part values have locations in a canonical order. */
4916 return onepart_variable_different_p (var1, var2);
4919 for (i = 0; i < var1->n_var_parts; i++)
4921 if (VAR_PART_OFFSET (var1, i) != VAR_PART_OFFSET (var2, i))
4922 return true;
4923 if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i]))
4924 return true;
4925 if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i]))
4926 return true;
4928 return false;
4931 /* Return true if dataflow sets OLD_SET and NEW_SET differ. */
4933 static bool
4934 dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
4936 variable_iterator_type hi;
4937 variable var1;
4939 if (old_set->vars == new_set->vars)
4940 return false;
4942 if (shared_hash_htab (old_set->vars).elements ()
4943 != shared_hash_htab (new_set->vars).elements ())
4944 return true;
4946 FOR_EACH_HASH_TABLE_ELEMENT (shared_hash_htab (old_set->vars),
4947 var1, variable, hi)
4949 variable_table_type htab = shared_hash_htab (new_set->vars);
4950 variable var2 = htab.find_with_hash (var1->dv, dv_htab_hash (var1->dv));
4951 if (!var2)
4953 if (dump_file && (dump_flags & TDF_DETAILS))
4955 fprintf (dump_file, "dataflow difference found: removal of:\n");
4956 dump_var (var1);
4958 return true;
4961 if (variable_different_p (var1, var2))
4963 if (dump_file && (dump_flags & TDF_DETAILS))
4965 fprintf (dump_file, "dataflow difference found: "
4966 "old and new follow:\n");
4967 dump_var (var1);
4968 dump_var (var2);
4970 return true;
4974 /* No need to traverse the second hashtab, if both have the same number
4975 of elements and the second one had all entries found in the first one,
4976 then it can't have any extra entries. */
4977 return false;
4980 /* Free the contents of dataflow set SET. */
4982 static void
4983 dataflow_set_destroy (dataflow_set *set)
4985 int i;
4987 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4988 attrs_list_clear (&set->regs[i]);
4990 shared_hash_destroy (set->vars);
4991 set->vars = NULL;
4994 /* Return true if RTL X contains a SYMBOL_REF. */
4996 static bool
4997 contains_symbol_ref (rtx x)
4999 const char *fmt;
5000 RTX_CODE code;
5001 int i;
5003 if (!x)
5004 return false;
5006 code = GET_CODE (x);
5007 if (code == SYMBOL_REF)
5008 return true;
5010 fmt = GET_RTX_FORMAT (code);
5011 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5013 if (fmt[i] == 'e')
5015 if (contains_symbol_ref (XEXP (x, i)))
5016 return true;
5018 else if (fmt[i] == 'E')
5020 int j;
5021 for (j = 0; j < XVECLEN (x, i); j++)
5022 if (contains_symbol_ref (XVECEXP (x, i, j)))
5023 return true;
5027 return false;
5030 /* Shall EXPR be tracked? */
5032 static bool
5033 track_expr_p (tree expr, bool need_rtl)
5035 rtx decl_rtl;
5036 tree realdecl;
5038 if (TREE_CODE (expr) == DEBUG_EXPR_DECL)
5039 return DECL_RTL_SET_P (expr);
5041 /* If EXPR is not a parameter or a variable do not track it. */
5042 if (TREE_CODE (expr) != VAR_DECL && TREE_CODE (expr) != PARM_DECL)
5043 return 0;
5045 /* It also must have a name... */
5046 if (!DECL_NAME (expr) && need_rtl)
5047 return 0;
5049 /* ... and a RTL assigned to it. */
5050 decl_rtl = DECL_RTL_IF_SET (expr);
5051 if (!decl_rtl && need_rtl)
5052 return 0;
5054 /* If this expression is really a debug alias of some other declaration, we
5055 don't need to track this expression if the ultimate declaration is
5056 ignored. */
5057 realdecl = expr;
5058 if (TREE_CODE (realdecl) == VAR_DECL && DECL_HAS_DEBUG_EXPR_P (realdecl))
5060 realdecl = DECL_DEBUG_EXPR (realdecl);
5061 if (!DECL_P (realdecl))
5063 if (handled_component_p (realdecl)
5064 || (TREE_CODE (realdecl) == MEM_REF
5065 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
5067 HOST_WIDE_INT bitsize, bitpos, maxsize;
5068 tree innerdecl
5069 = get_ref_base_and_extent (realdecl, &bitpos, &bitsize,
5070 &maxsize);
5071 if (!DECL_P (innerdecl)
5072 || DECL_IGNORED_P (innerdecl)
5073 || TREE_STATIC (innerdecl)
5074 || bitsize <= 0
5075 || bitpos + bitsize > 256
5076 || bitsize != maxsize)
5077 return 0;
5078 else
5079 realdecl = expr;
5081 else
5082 return 0;
5086 /* Do not track EXPR if REALDECL it should be ignored for debugging
5087 purposes. */
5088 if (DECL_IGNORED_P (realdecl))
5089 return 0;
5091 /* Do not track global variables until we are able to emit correct location
5092 list for them. */
5093 if (TREE_STATIC (realdecl))
5094 return 0;
5096 /* When the EXPR is a DECL for alias of some variable (see example)
5097 the TREE_STATIC flag is not used. Disable tracking all DECLs whose
5098 DECL_RTL contains SYMBOL_REF.
5100 Example:
5101 extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
5102 char **_dl_argv;
5104 if (decl_rtl && MEM_P (decl_rtl)
5105 && contains_symbol_ref (XEXP (decl_rtl, 0)))
5106 return 0;
5108 /* If RTX is a memory it should not be very large (because it would be
5109 an array or struct). */
5110 if (decl_rtl && MEM_P (decl_rtl))
5112 /* Do not track structures and arrays. */
5113 if (GET_MODE (decl_rtl) == BLKmode
5114 || AGGREGATE_TYPE_P (TREE_TYPE (realdecl)))
5115 return 0;
5116 if (MEM_SIZE_KNOWN_P (decl_rtl)
5117 && MEM_SIZE (decl_rtl) > MAX_VAR_PARTS)
5118 return 0;
5121 DECL_CHANGED (expr) = 0;
5122 DECL_CHANGED (realdecl) = 0;
5123 return 1;
5126 /* Determine whether a given LOC refers to the same variable part as
5127 EXPR+OFFSET. */
5129 static bool
5130 same_variable_part_p (rtx loc, tree expr, HOST_WIDE_INT offset)
5132 tree expr2;
5133 HOST_WIDE_INT offset2;
5135 if (! DECL_P (expr))
5136 return false;
5138 if (REG_P (loc))
5140 expr2 = REG_EXPR (loc);
5141 offset2 = REG_OFFSET (loc);
5143 else if (MEM_P (loc))
5145 expr2 = MEM_EXPR (loc);
5146 offset2 = INT_MEM_OFFSET (loc);
5148 else
5149 return false;
5151 if (! expr2 || ! DECL_P (expr2))
5152 return false;
5154 expr = var_debug_decl (expr);
5155 expr2 = var_debug_decl (expr2);
5157 return (expr == expr2 && offset == offset2);
5160 /* LOC is a REG or MEM that we would like to track if possible.
5161 If EXPR is null, we don't know what expression LOC refers to,
5162 otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
5163 LOC is an lvalue register.
5165 Return true if EXPR is nonnull and if LOC, or some lowpart of it,
5166 is something we can track. When returning true, store the mode of
5167 the lowpart we can track in *MODE_OUT (if nonnull) and its offset
5168 from EXPR in *OFFSET_OUT (if nonnull). */
5170 static bool
5171 track_loc_p (rtx loc, tree expr, HOST_WIDE_INT offset, bool store_reg_p,
5172 enum machine_mode *mode_out, HOST_WIDE_INT *offset_out)
5174 enum machine_mode mode;
5176 if (expr == NULL || !track_expr_p (expr, true))
5177 return false;
5179 /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
5180 whole subreg, but only the old inner part is really relevant. */
5181 mode = GET_MODE (loc);
5182 if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc)))
5184 enum machine_mode pseudo_mode;
5186 pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc));
5187 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (pseudo_mode))
5189 offset += byte_lowpart_offset (pseudo_mode, mode);
5190 mode = pseudo_mode;
5194 /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
5195 Do the same if we are storing to a register and EXPR occupies
5196 the whole of register LOC; in that case, the whole of EXPR is
5197 being changed. We exclude complex modes from the second case
5198 because the real and imaginary parts are represented as separate
5199 pseudo registers, even if the whole complex value fits into one
5200 hard register. */
5201 if ((GET_MODE_SIZE (mode) > GET_MODE_SIZE (DECL_MODE (expr))
5202 || (store_reg_p
5203 && !COMPLEX_MODE_P (DECL_MODE (expr))
5204 && hard_regno_nregs[REGNO (loc)][DECL_MODE (expr)] == 1))
5205 && offset + byte_lowpart_offset (DECL_MODE (expr), mode) == 0)
5207 mode = DECL_MODE (expr);
5208 offset = 0;
5211 if (offset < 0 || offset >= MAX_VAR_PARTS)
5212 return false;
5214 if (mode_out)
5215 *mode_out = mode;
5216 if (offset_out)
5217 *offset_out = offset;
5218 return true;
5221 /* Return the MODE lowpart of LOC, or null if LOC is not something we
5222 want to track. When returning nonnull, make sure that the attributes
5223 on the returned value are updated. */
5225 static rtx
5226 var_lowpart (enum machine_mode mode, rtx loc)
5228 unsigned int offset, reg_offset, regno;
5230 if (GET_MODE (loc) == mode)
5231 return loc;
5233 if (!REG_P (loc) && !MEM_P (loc))
5234 return NULL;
5236 offset = byte_lowpart_offset (mode, GET_MODE (loc));
5238 if (MEM_P (loc))
5239 return adjust_address_nv (loc, mode, offset);
5241 reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc));
5242 regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc),
5243 reg_offset, mode);
5244 return gen_rtx_REG_offset (loc, mode, regno, offset);
5247 /* Carry information about uses and stores while walking rtx. */
5249 struct count_use_info
5251 /* The insn where the RTX is. */
5252 rtx insn;
5254 /* The basic block where insn is. */
5255 basic_block bb;
5257 /* The array of n_sets sets in the insn, as determined by cselib. */
5258 struct cselib_set *sets;
5259 int n_sets;
5261 /* True if we're counting stores, false otherwise. */
5262 bool store_p;
5265 /* Find a VALUE corresponding to X. */
5267 static inline cselib_val *
5268 find_use_val (rtx x, enum machine_mode mode, struct count_use_info *cui)
5270 int i;
5272 if (cui->sets)
5274 /* This is called after uses are set up and before stores are
5275 processed by cselib, so it's safe to look up srcs, but not
5276 dsts. So we look up expressions that appear in srcs or in
5277 dest expressions, but we search the sets array for dests of
5278 stores. */
5279 if (cui->store_p)
5281 /* Some targets represent memset and memcpy patterns
5282 by (set (mem:BLK ...) (reg:[QHSD]I ...)) or
5283 (set (mem:BLK ...) (const_int ...)) or
5284 (set (mem:BLK ...) (mem:BLK ...)). Don't return anything
5285 in that case, otherwise we end up with mode mismatches. */
5286 if (mode == BLKmode && MEM_P (x))
5287 return NULL;
5288 for (i = 0; i < cui->n_sets; i++)
5289 if (cui->sets[i].dest == x)
5290 return cui->sets[i].src_elt;
5292 else
5293 return cselib_lookup (x, mode, 0, VOIDmode);
5296 return NULL;
5299 /* Replace all registers and addresses in an expression with VALUE
5300 expressions that map back to them, unless the expression is a
5301 register. If no mapping is or can be performed, returns NULL. */
5303 static rtx
5304 replace_expr_with_values (rtx loc)
5306 if (REG_P (loc) || GET_CODE (loc) == ENTRY_VALUE)
5307 return NULL;
5308 else if (MEM_P (loc))
5310 cselib_val *addr = cselib_lookup (XEXP (loc, 0),
5311 get_address_mode (loc), 0,
5312 GET_MODE (loc));
5313 if (addr)
5314 return replace_equiv_address_nv (loc, addr->val_rtx);
5315 else
5316 return NULL;
5318 else
5319 return cselib_subst_to_values (loc, VOIDmode);
5322 /* Return true if *X is a DEBUG_EXPR. Usable as an argument to
5323 for_each_rtx to tell whether there are any DEBUG_EXPRs within
5324 RTX. */
5326 static int
5327 rtx_debug_expr_p (rtx *x, void *data ATTRIBUTE_UNUSED)
5329 rtx loc = *x;
5331 return GET_CODE (loc) == DEBUG_EXPR;
5334 /* Determine what kind of micro operation to choose for a USE. Return
5335 MO_CLOBBER if no micro operation is to be generated. */
5337 static enum micro_operation_type
5338 use_type (rtx loc, struct count_use_info *cui, enum machine_mode *modep)
5340 tree expr;
5342 if (cui && cui->sets)
5344 if (GET_CODE (loc) == VAR_LOCATION)
5346 if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false))
5348 rtx ploc = PAT_VAR_LOCATION_LOC (loc);
5349 if (! VAR_LOC_UNKNOWN_P (ploc))
5351 cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1,
5352 VOIDmode);
5354 /* ??? flag_float_store and volatile mems are never
5355 given values, but we could in theory use them for
5356 locations. */
5357 gcc_assert (val || 1);
5359 return MO_VAL_LOC;
5361 else
5362 return MO_CLOBBER;
5365 if (REG_P (loc) || MEM_P (loc))
5367 if (modep)
5368 *modep = GET_MODE (loc);
5369 if (cui->store_p)
5371 if (REG_P (loc)
5372 || (find_use_val (loc, GET_MODE (loc), cui)
5373 && cselib_lookup (XEXP (loc, 0),
5374 get_address_mode (loc), 0,
5375 GET_MODE (loc))))
5376 return MO_VAL_SET;
5378 else
5380 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5382 if (val && !cselib_preserved_value_p (val))
5383 return MO_VAL_USE;
5388 if (REG_P (loc))
5390 gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER);
5392 if (loc == cfa_base_rtx)
5393 return MO_CLOBBER;
5394 expr = REG_EXPR (loc);
5396 if (!expr)
5397 return MO_USE_NO_VAR;
5398 else if (target_for_debug_bind (var_debug_decl (expr)))
5399 return MO_CLOBBER;
5400 else if (track_loc_p (loc, expr, REG_OFFSET (loc),
5401 false, modep, NULL))
5402 return MO_USE;
5403 else
5404 return MO_USE_NO_VAR;
5406 else if (MEM_P (loc))
5408 expr = MEM_EXPR (loc);
5410 if (!expr)
5411 return MO_CLOBBER;
5412 else if (target_for_debug_bind (var_debug_decl (expr)))
5413 return MO_CLOBBER;
5414 else if (track_loc_p (loc, expr, INT_MEM_OFFSET (loc),
5415 false, modep, NULL)
5416 /* Multi-part variables shouldn't refer to one-part
5417 variable names such as VALUEs (never happens) or
5418 DEBUG_EXPRs (only happens in the presence of debug
5419 insns). */
5420 && (!MAY_HAVE_DEBUG_INSNS
5421 || !for_each_rtx (&XEXP (loc, 0), rtx_debug_expr_p, NULL)))
5422 return MO_USE;
5423 else
5424 return MO_CLOBBER;
5427 return MO_CLOBBER;
5430 /* Log to OUT information about micro-operation MOPT involving X in
5431 INSN of BB. */
5433 static inline void
5434 log_op_type (rtx x, basic_block bb, rtx insn,
5435 enum micro_operation_type mopt, FILE *out)
5437 fprintf (out, "bb %i op %i insn %i %s ",
5438 bb->index, VTI (bb)->mos.length (),
5439 INSN_UID (insn), micro_operation_type_name[mopt]);
5440 print_inline_rtx (out, x, 2);
5441 fputc ('\n', out);
5444 /* Tell whether the CONCAT used to holds a VALUE and its location
5445 needs value resolution, i.e., an attempt of mapping the location
5446 back to other incoming values. */
5447 #define VAL_NEEDS_RESOLUTION(x) \
5448 (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
5449 /* Whether the location in the CONCAT is a tracked expression, that
5450 should also be handled like a MO_USE. */
5451 #define VAL_HOLDS_TRACK_EXPR(x) \
5452 (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
5453 /* Whether the location in the CONCAT should be handled like a MO_COPY
5454 as well. */
5455 #define VAL_EXPR_IS_COPIED(x) \
5456 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
5457 /* Whether the location in the CONCAT should be handled like a
5458 MO_CLOBBER as well. */
5459 #define VAL_EXPR_IS_CLOBBERED(x) \
5460 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
5462 /* All preserved VALUEs. */
5463 static vec<rtx> preserved_values;
5465 /* Ensure VAL is preserved and remember it in a vector for vt_emit_notes. */
5467 static void
5468 preserve_value (cselib_val *val)
5470 cselib_preserve_value (val);
5471 preserved_values.safe_push (val->val_rtx);
5474 /* Helper function for MO_VAL_LOC handling. Return non-zero if
5475 any rtxes not suitable for CONST use not replaced by VALUEs
5476 are discovered. */
5478 static int
5479 non_suitable_const (rtx *x, void *data ATTRIBUTE_UNUSED)
5481 if (*x == NULL_RTX)
5482 return 0;
5484 switch (GET_CODE (*x))
5486 case REG:
5487 case DEBUG_EXPR:
5488 case PC:
5489 case SCRATCH:
5490 case CC0:
5491 case ASM_INPUT:
5492 case ASM_OPERANDS:
5493 return 1;
5494 case MEM:
5495 return !MEM_READONLY_P (*x);
5496 default:
5497 return 0;
5501 /* Add uses (register and memory references) LOC which will be tracked
5502 to VTI (bb)->mos. INSN is instruction which the LOC is part of. */
5504 static int
5505 add_uses (rtx *ploc, void *data)
5507 rtx loc = *ploc;
5508 enum machine_mode mode = VOIDmode;
5509 struct count_use_info *cui = (struct count_use_info *)data;
5510 enum micro_operation_type type = use_type (loc, cui, &mode);
5512 if (type != MO_CLOBBER)
5514 basic_block bb = cui->bb;
5515 micro_operation mo;
5517 mo.type = type;
5518 mo.u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc;
5519 mo.insn = cui->insn;
5521 if (type == MO_VAL_LOC)
5523 rtx oloc = loc;
5524 rtx vloc = PAT_VAR_LOCATION_LOC (oloc);
5525 cselib_val *val;
5527 gcc_assert (cui->sets);
5529 if (MEM_P (vloc)
5530 && !REG_P (XEXP (vloc, 0))
5531 && !MEM_P (XEXP (vloc, 0)))
5533 rtx mloc = vloc;
5534 enum machine_mode address_mode = get_address_mode (mloc);
5535 cselib_val *val
5536 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5537 GET_MODE (mloc));
5539 if (val && !cselib_preserved_value_p (val))
5540 preserve_value (val);
5543 if (CONSTANT_P (vloc)
5544 && (GET_CODE (vloc) != CONST
5545 || for_each_rtx (&vloc, non_suitable_const, NULL)))
5546 /* For constants don't look up any value. */;
5547 else if (!VAR_LOC_UNKNOWN_P (vloc) && !unsuitable_loc (vloc)
5548 && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
5550 enum machine_mode mode2;
5551 enum micro_operation_type type2;
5552 rtx nloc = NULL;
5553 bool resolvable = REG_P (vloc) || MEM_P (vloc);
5555 if (resolvable)
5556 nloc = replace_expr_with_values (vloc);
5558 if (nloc)
5560 oloc = shallow_copy_rtx (oloc);
5561 PAT_VAR_LOCATION_LOC (oloc) = nloc;
5564 oloc = gen_rtx_CONCAT (mode, val->val_rtx, oloc);
5566 type2 = use_type (vloc, 0, &mode2);
5568 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5569 || type2 == MO_CLOBBER);
5571 if (type2 == MO_CLOBBER
5572 && !cselib_preserved_value_p (val))
5574 VAL_NEEDS_RESOLUTION (oloc) = resolvable;
5575 preserve_value (val);
5578 else if (!VAR_LOC_UNKNOWN_P (vloc))
5580 oloc = shallow_copy_rtx (oloc);
5581 PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC ();
5584 mo.u.loc = oloc;
5586 else if (type == MO_VAL_USE)
5588 enum machine_mode mode2 = VOIDmode;
5589 enum micro_operation_type type2;
5590 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5591 rtx vloc, oloc = loc, nloc;
5593 gcc_assert (cui->sets);
5595 if (MEM_P (oloc)
5596 && !REG_P (XEXP (oloc, 0))
5597 && !MEM_P (XEXP (oloc, 0)))
5599 rtx mloc = oloc;
5600 enum machine_mode address_mode = get_address_mode (mloc);
5601 cselib_val *val
5602 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5603 GET_MODE (mloc));
5605 if (val && !cselib_preserved_value_p (val))
5606 preserve_value (val);
5609 type2 = use_type (loc, 0, &mode2);
5611 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5612 || type2 == MO_CLOBBER);
5614 if (type2 == MO_USE)
5615 vloc = var_lowpart (mode2, loc);
5616 else
5617 vloc = oloc;
5619 /* The loc of a MO_VAL_USE may have two forms:
5621 (concat val src): val is at src, a value-based
5622 representation.
5624 (concat (concat val use) src): same as above, with use as
5625 the MO_USE tracked value, if it differs from src.
5629 gcc_checking_assert (REG_P (loc) || MEM_P (loc));
5630 nloc = replace_expr_with_values (loc);
5631 if (!nloc)
5632 nloc = oloc;
5634 if (vloc != nloc)
5635 oloc = gen_rtx_CONCAT (mode2, val->val_rtx, vloc);
5636 else
5637 oloc = val->val_rtx;
5639 mo.u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
5641 if (type2 == MO_USE)
5642 VAL_HOLDS_TRACK_EXPR (mo.u.loc) = 1;
5643 if (!cselib_preserved_value_p (val))
5645 VAL_NEEDS_RESOLUTION (mo.u.loc) = 1;
5646 preserve_value (val);
5649 else
5650 gcc_assert (type == MO_USE || type == MO_USE_NO_VAR);
5652 if (dump_file && (dump_flags & TDF_DETAILS))
5653 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
5654 VTI (bb)->mos.safe_push (mo);
5657 return 0;
5660 /* Helper function for finding all uses of REG/MEM in X in insn INSN. */
5662 static void
5663 add_uses_1 (rtx *x, void *cui)
5665 for_each_rtx (x, add_uses, cui);
5668 /* This is the value used during expansion of locations. We want it
5669 to be unbounded, so that variables expanded deep in a recursion
5670 nest are fully evaluated, so that their values are cached
5671 correctly. We avoid recursion cycles through other means, and we
5672 don't unshare RTL, so excess complexity is not a problem. */
5673 #define EXPR_DEPTH (INT_MAX)
5674 /* We use this to keep too-complex expressions from being emitted as
5675 location notes, and then to debug information. Users can trade
5676 compile time for ridiculously complex expressions, although they're
5677 seldom useful, and they may often have to be discarded as not
5678 representable anyway. */
5679 #define EXPR_USE_DEPTH (PARAM_VALUE (PARAM_MAX_VARTRACK_EXPR_DEPTH))
5681 /* Attempt to reverse the EXPR operation in the debug info and record
5682 it in the cselib table. Say for reg1 = reg2 + 6 even when reg2 is
5683 no longer live we can express its value as VAL - 6. */
5685 static void
5686 reverse_op (rtx val, const_rtx expr, rtx insn)
5688 rtx src, arg, ret;
5689 cselib_val *v;
5690 struct elt_loc_list *l;
5691 enum rtx_code code;
5692 int count;
5694 if (GET_CODE (expr) != SET)
5695 return;
5697 if (!REG_P (SET_DEST (expr)) || GET_MODE (val) != GET_MODE (SET_DEST (expr)))
5698 return;
5700 src = SET_SRC (expr);
5701 switch (GET_CODE (src))
5703 case PLUS:
5704 case MINUS:
5705 case XOR:
5706 case NOT:
5707 case NEG:
5708 if (!REG_P (XEXP (src, 0)))
5709 return;
5710 break;
5711 case SIGN_EXTEND:
5712 case ZERO_EXTEND:
5713 if (!REG_P (XEXP (src, 0)) && !MEM_P (XEXP (src, 0)))
5714 return;
5715 break;
5716 default:
5717 return;
5720 if (!SCALAR_INT_MODE_P (GET_MODE (src)) || XEXP (src, 0) == cfa_base_rtx)
5721 return;
5723 v = cselib_lookup (XEXP (src, 0), GET_MODE (XEXP (src, 0)), 0, VOIDmode);
5724 if (!v || !cselib_preserved_value_p (v))
5725 return;
5727 /* Use canonical V to avoid creating multiple redundant expressions
5728 for different VALUES equivalent to V. */
5729 v = canonical_cselib_val (v);
5731 /* Adding a reverse op isn't useful if V already has an always valid
5732 location. Ignore ENTRY_VALUE, while it is always constant, we should
5733 prefer non-ENTRY_VALUE locations whenever possible. */
5734 for (l = v->locs, count = 0; l; l = l->next, count++)
5735 if (CONSTANT_P (l->loc)
5736 && (GET_CODE (l->loc) != CONST || !references_value_p (l->loc, 0)))
5737 return;
5738 /* Avoid creating too large locs lists. */
5739 else if (count == PARAM_VALUE (PARAM_MAX_VARTRACK_REVERSE_OP_SIZE))
5740 return;
5742 switch (GET_CODE (src))
5744 case NOT:
5745 case NEG:
5746 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5747 return;
5748 ret = gen_rtx_fmt_e (GET_CODE (src), GET_MODE (val), val);
5749 break;
5750 case SIGN_EXTEND:
5751 case ZERO_EXTEND:
5752 ret = gen_lowpart_SUBREG (GET_MODE (v->val_rtx), val);
5753 break;
5754 case XOR:
5755 code = XOR;
5756 goto binary;
5757 case PLUS:
5758 code = MINUS;
5759 goto binary;
5760 case MINUS:
5761 code = PLUS;
5762 goto binary;
5763 binary:
5764 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5765 return;
5766 arg = XEXP (src, 1);
5767 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5769 arg = cselib_expand_value_rtx (arg, scratch_regs, 5);
5770 if (arg == NULL_RTX)
5771 return;
5772 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5773 return;
5775 ret = simplify_gen_binary (code, GET_MODE (val), val, arg);
5776 if (ret == val)
5777 /* Ensure ret isn't VALUE itself (which can happen e.g. for
5778 (plus (reg1) (reg2)) when reg2 is known to be 0), as that
5779 breaks a lot of routines during var-tracking. */
5780 ret = gen_rtx_fmt_ee (PLUS, GET_MODE (val), val, const0_rtx);
5781 break;
5782 default:
5783 gcc_unreachable ();
5786 cselib_add_permanent_equiv (v, ret, insn);
5789 /* Add stores (register and memory references) LOC which will be tracked
5790 to VTI (bb)->mos. EXPR is the RTL expression containing the store.
5791 CUIP->insn is instruction which the LOC is part of. */
5793 static void
5794 add_stores (rtx loc, const_rtx expr, void *cuip)
5796 enum machine_mode mode = VOIDmode, mode2;
5797 struct count_use_info *cui = (struct count_use_info *)cuip;
5798 basic_block bb = cui->bb;
5799 micro_operation mo;
5800 rtx oloc = loc, nloc, src = NULL;
5801 enum micro_operation_type type = use_type (loc, cui, &mode);
5802 bool track_p = false;
5803 cselib_val *v;
5804 bool resolve, preserve;
5806 if (type == MO_CLOBBER)
5807 return;
5809 mode2 = mode;
5811 if (REG_P (loc))
5813 gcc_assert (loc != cfa_base_rtx);
5814 if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
5815 || !(track_p = use_type (loc, NULL, &mode2) == MO_USE)
5816 || GET_CODE (expr) == CLOBBER)
5818 mo.type = MO_CLOBBER;
5819 mo.u.loc = loc;
5820 if (GET_CODE (expr) == SET
5821 && SET_DEST (expr) == loc
5822 && !unsuitable_loc (SET_SRC (expr))
5823 && find_use_val (loc, mode, cui))
5825 gcc_checking_assert (type == MO_VAL_SET);
5826 mo.u.loc = gen_rtx_SET (VOIDmode, loc, SET_SRC (expr));
5829 else
5831 if (GET_CODE (expr) == SET
5832 && SET_DEST (expr) == loc
5833 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
5834 src = var_lowpart (mode2, SET_SRC (expr));
5835 loc = var_lowpart (mode2, loc);
5837 if (src == NULL)
5839 mo.type = MO_SET;
5840 mo.u.loc = loc;
5842 else
5844 rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
5845 if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
5847 /* If this is an instruction copying (part of) a parameter
5848 passed by invisible reference to its register location,
5849 pretend it's a SET so that the initial memory location
5850 is discarded, as the parameter register can be reused
5851 for other purposes and we do not track locations based
5852 on generic registers. */
5853 if (MEM_P (src)
5854 && REG_EXPR (loc)
5855 && TREE_CODE (REG_EXPR (loc)) == PARM_DECL
5856 && DECL_MODE (REG_EXPR (loc)) != BLKmode
5857 && MEM_P (DECL_INCOMING_RTL (REG_EXPR (loc)))
5858 && XEXP (DECL_INCOMING_RTL (REG_EXPR (loc)), 0)
5859 != arg_pointer_rtx)
5860 mo.type = MO_SET;
5861 else
5862 mo.type = MO_COPY;
5864 else
5865 mo.type = MO_SET;
5866 mo.u.loc = xexpr;
5869 mo.insn = cui->insn;
5871 else if (MEM_P (loc)
5872 && ((track_p = use_type (loc, NULL, &mode2) == MO_USE)
5873 || cui->sets))
5875 if (MEM_P (loc) && type == MO_VAL_SET
5876 && !REG_P (XEXP (loc, 0))
5877 && !MEM_P (XEXP (loc, 0)))
5879 rtx mloc = loc;
5880 enum machine_mode address_mode = get_address_mode (mloc);
5881 cselib_val *val = cselib_lookup (XEXP (mloc, 0),
5882 address_mode, 0,
5883 GET_MODE (mloc));
5885 if (val && !cselib_preserved_value_p (val))
5886 preserve_value (val);
5889 if (GET_CODE (expr) == CLOBBER || !track_p)
5891 mo.type = MO_CLOBBER;
5892 mo.u.loc = track_p ? var_lowpart (mode2, loc) : loc;
5894 else
5896 if (GET_CODE (expr) == SET
5897 && SET_DEST (expr) == loc
5898 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
5899 src = var_lowpart (mode2, SET_SRC (expr));
5900 loc = var_lowpart (mode2, loc);
5902 if (src == NULL)
5904 mo.type = MO_SET;
5905 mo.u.loc = loc;
5907 else
5909 rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
5910 if (same_variable_part_p (SET_SRC (xexpr),
5911 MEM_EXPR (loc),
5912 INT_MEM_OFFSET (loc)))
5913 mo.type = MO_COPY;
5914 else
5915 mo.type = MO_SET;
5916 mo.u.loc = xexpr;
5919 mo.insn = cui->insn;
5921 else
5922 return;
5924 if (type != MO_VAL_SET)
5925 goto log_and_return;
5927 v = find_use_val (oloc, mode, cui);
5929 if (!v)
5930 goto log_and_return;
5932 resolve = preserve = !cselib_preserved_value_p (v);
5934 if (loc == stack_pointer_rtx
5935 && hard_frame_pointer_adjustment != -1
5936 && preserve)
5937 cselib_set_value_sp_based (v);
5939 nloc = replace_expr_with_values (oloc);
5940 if (nloc)
5941 oloc = nloc;
5943 if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC)
5945 cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0, VOIDmode);
5947 gcc_assert (oval != v);
5948 gcc_assert (REG_P (oloc) || MEM_P (oloc));
5950 if (oval && !cselib_preserved_value_p (oval))
5952 micro_operation moa;
5954 preserve_value (oval);
5956 moa.type = MO_VAL_USE;
5957 moa.u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc);
5958 VAL_NEEDS_RESOLUTION (moa.u.loc) = 1;
5959 moa.insn = cui->insn;
5961 if (dump_file && (dump_flags & TDF_DETAILS))
5962 log_op_type (moa.u.loc, cui->bb, cui->insn,
5963 moa.type, dump_file);
5964 VTI (bb)->mos.safe_push (moa);
5967 resolve = false;
5969 else if (resolve && GET_CODE (mo.u.loc) == SET)
5971 if (REG_P (SET_SRC (expr)) || MEM_P (SET_SRC (expr)))
5972 nloc = replace_expr_with_values (SET_SRC (expr));
5973 else
5974 nloc = NULL_RTX;
5976 /* Avoid the mode mismatch between oexpr and expr. */
5977 if (!nloc && mode != mode2)
5979 nloc = SET_SRC (expr);
5980 gcc_assert (oloc == SET_DEST (expr));
5983 if (nloc && nloc != SET_SRC (mo.u.loc))
5984 oloc = gen_rtx_SET (GET_MODE (mo.u.loc), oloc, nloc);
5985 else
5987 if (oloc == SET_DEST (mo.u.loc))
5988 /* No point in duplicating. */
5989 oloc = mo.u.loc;
5990 if (!REG_P (SET_SRC (mo.u.loc)))
5991 resolve = false;
5994 else if (!resolve)
5996 if (GET_CODE (mo.u.loc) == SET
5997 && oloc == SET_DEST (mo.u.loc))
5998 /* No point in duplicating. */
5999 oloc = mo.u.loc;
6001 else
6002 resolve = false;
6004 loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc);
6006 if (mo.u.loc != oloc)
6007 loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, mo.u.loc);
6009 /* The loc of a MO_VAL_SET may have various forms:
6011 (concat val dst): dst now holds val
6013 (concat val (set dst src)): dst now holds val, copied from src
6015 (concat (concat val dstv) dst): dst now holds val; dstv is dst
6016 after replacing mems and non-top-level regs with values.
6018 (concat (concat val dstv) (set dst src)): dst now holds val,
6019 copied from src. dstv is a value-based representation of dst, if
6020 it differs from dst. If resolution is needed, src is a REG, and
6021 its mode is the same as that of val.
6023 (concat (concat val (set dstv srcv)) (set dst src)): src
6024 copied to dst, holding val. dstv and srcv are value-based
6025 representations of dst and src, respectively.
6029 if (GET_CODE (PATTERN (cui->insn)) != COND_EXEC)
6030 reverse_op (v->val_rtx, expr, cui->insn);
6032 mo.u.loc = loc;
6034 if (track_p)
6035 VAL_HOLDS_TRACK_EXPR (loc) = 1;
6036 if (preserve)
6038 VAL_NEEDS_RESOLUTION (loc) = resolve;
6039 preserve_value (v);
6041 if (mo.type == MO_CLOBBER)
6042 VAL_EXPR_IS_CLOBBERED (loc) = 1;
6043 if (mo.type == MO_COPY)
6044 VAL_EXPR_IS_COPIED (loc) = 1;
6046 mo.type = MO_VAL_SET;
6048 log_and_return:
6049 if (dump_file && (dump_flags & TDF_DETAILS))
6050 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
6051 VTI (bb)->mos.safe_push (mo);
6054 /* Arguments to the call. */
6055 static rtx call_arguments;
6057 /* Compute call_arguments. */
6059 static void
6060 prepare_call_arguments (basic_block bb, rtx insn)
6062 rtx link, x, call;
6063 rtx prev, cur, next;
6064 rtx this_arg = NULL_RTX;
6065 tree type = NULL_TREE, t, fndecl = NULL_TREE;
6066 tree obj_type_ref = NULL_TREE;
6067 CUMULATIVE_ARGS args_so_far_v;
6068 cumulative_args_t args_so_far;
6070 memset (&args_so_far_v, 0, sizeof (args_so_far_v));
6071 args_so_far = pack_cumulative_args (&args_so_far_v);
6072 call = get_call_rtx_from (insn);
6073 if (call)
6075 if (GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
6077 rtx symbol = XEXP (XEXP (call, 0), 0);
6078 if (SYMBOL_REF_DECL (symbol))
6079 fndecl = SYMBOL_REF_DECL (symbol);
6081 if (fndecl == NULL_TREE)
6082 fndecl = MEM_EXPR (XEXP (call, 0));
6083 if (fndecl
6084 && TREE_CODE (TREE_TYPE (fndecl)) != FUNCTION_TYPE
6085 && TREE_CODE (TREE_TYPE (fndecl)) != METHOD_TYPE)
6086 fndecl = NULL_TREE;
6087 if (fndecl && TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
6088 type = TREE_TYPE (fndecl);
6089 if (fndecl && TREE_CODE (fndecl) != FUNCTION_DECL)
6091 if (TREE_CODE (fndecl) == INDIRECT_REF
6092 && TREE_CODE (TREE_OPERAND (fndecl, 0)) == OBJ_TYPE_REF)
6093 obj_type_ref = TREE_OPERAND (fndecl, 0);
6094 fndecl = NULL_TREE;
6096 if (type)
6098 for (t = TYPE_ARG_TYPES (type); t && t != void_list_node;
6099 t = TREE_CHAIN (t))
6100 if (TREE_CODE (TREE_VALUE (t)) == REFERENCE_TYPE
6101 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_VALUE (t))))
6102 break;
6103 if ((t == NULL || t == void_list_node) && obj_type_ref == NULL_TREE)
6104 type = NULL;
6105 else
6107 int nargs ATTRIBUTE_UNUSED = list_length (TYPE_ARG_TYPES (type));
6108 link = CALL_INSN_FUNCTION_USAGE (insn);
6109 #ifndef PCC_STATIC_STRUCT_RETURN
6110 if (aggregate_value_p (TREE_TYPE (type), type)
6111 && targetm.calls.struct_value_rtx (type, 0) == 0)
6113 tree struct_addr = build_pointer_type (TREE_TYPE (type));
6114 enum machine_mode mode = TYPE_MODE (struct_addr);
6115 rtx reg;
6116 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
6117 nargs + 1);
6118 reg = targetm.calls.function_arg (args_so_far, mode,
6119 struct_addr, true);
6120 targetm.calls.function_arg_advance (args_so_far, mode,
6121 struct_addr, true);
6122 if (reg == NULL_RTX)
6124 for (; link; link = XEXP (link, 1))
6125 if (GET_CODE (XEXP (link, 0)) == USE
6126 && MEM_P (XEXP (XEXP (link, 0), 0)))
6128 link = XEXP (link, 1);
6129 break;
6133 else
6134 #endif
6135 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
6136 nargs);
6137 if (obj_type_ref && TYPE_ARG_TYPES (type) != void_list_node)
6139 enum machine_mode mode;
6140 t = TYPE_ARG_TYPES (type);
6141 mode = TYPE_MODE (TREE_VALUE (t));
6142 this_arg = targetm.calls.function_arg (args_so_far, mode,
6143 TREE_VALUE (t), true);
6144 if (this_arg && !REG_P (this_arg))
6145 this_arg = NULL_RTX;
6146 else if (this_arg == NULL_RTX)
6148 for (; link; link = XEXP (link, 1))
6149 if (GET_CODE (XEXP (link, 0)) == USE
6150 && MEM_P (XEXP (XEXP (link, 0), 0)))
6152 this_arg = XEXP (XEXP (link, 0), 0);
6153 break;
6160 t = type ? TYPE_ARG_TYPES (type) : NULL_TREE;
6162 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
6163 if (GET_CODE (XEXP (link, 0)) == USE)
6165 rtx item = NULL_RTX;
6166 x = XEXP (XEXP (link, 0), 0);
6167 if (GET_MODE (link) == VOIDmode
6168 || GET_MODE (link) == BLKmode
6169 || (GET_MODE (link) != GET_MODE (x)
6170 && (GET_MODE_CLASS (GET_MODE (link)) != MODE_INT
6171 || GET_MODE_CLASS (GET_MODE (x)) != MODE_INT)))
6172 /* Can't do anything for these, if the original type mode
6173 isn't known or can't be converted. */;
6174 else if (REG_P (x))
6176 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
6177 if (val && cselib_preserved_value_p (val))
6178 item = val->val_rtx;
6179 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT)
6181 enum machine_mode mode = GET_MODE (x);
6183 while ((mode = GET_MODE_WIDER_MODE (mode)) != VOIDmode
6184 && GET_MODE_BITSIZE (mode) <= BITS_PER_WORD)
6186 rtx reg = simplify_subreg (mode, x, GET_MODE (x), 0);
6188 if (reg == NULL_RTX || !REG_P (reg))
6189 continue;
6190 val = cselib_lookup (reg, mode, 0, VOIDmode);
6191 if (val && cselib_preserved_value_p (val))
6193 item = val->val_rtx;
6194 break;
6199 else if (MEM_P (x))
6201 rtx mem = x;
6202 cselib_val *val;
6204 if (!frame_pointer_needed)
6206 struct adjust_mem_data amd;
6207 amd.mem_mode = VOIDmode;
6208 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
6209 amd.side_effects = NULL_RTX;
6210 amd.store = true;
6211 mem = simplify_replace_fn_rtx (mem, NULL_RTX, adjust_mems,
6212 &amd);
6213 gcc_assert (amd.side_effects == NULL_RTX);
6215 val = cselib_lookup (mem, GET_MODE (mem), 0, VOIDmode);
6216 if (val && cselib_preserved_value_p (val))
6217 item = val->val_rtx;
6218 else if (GET_MODE_CLASS (GET_MODE (mem)) != MODE_INT)
6220 /* For non-integer stack argument see also if they weren't
6221 initialized by integers. */
6222 enum machine_mode imode = int_mode_for_mode (GET_MODE (mem));
6223 if (imode != GET_MODE (mem) && imode != BLKmode)
6225 val = cselib_lookup (adjust_address_nv (mem, imode, 0),
6226 imode, 0, VOIDmode);
6227 if (val && cselib_preserved_value_p (val))
6228 item = lowpart_subreg (GET_MODE (x), val->val_rtx,
6229 imode);
6233 if (item)
6235 rtx x2 = x;
6236 if (GET_MODE (item) != GET_MODE (link))
6237 item = lowpart_subreg (GET_MODE (link), item, GET_MODE (item));
6238 if (GET_MODE (x2) != GET_MODE (link))
6239 x2 = lowpart_subreg (GET_MODE (link), x2, GET_MODE (x2));
6240 item = gen_rtx_CONCAT (GET_MODE (link), x2, item);
6241 call_arguments
6242 = gen_rtx_EXPR_LIST (VOIDmode, item, call_arguments);
6244 if (t && t != void_list_node)
6246 tree argtype = TREE_VALUE (t);
6247 enum machine_mode mode = TYPE_MODE (argtype);
6248 rtx reg;
6249 if (pass_by_reference (&args_so_far_v, mode, argtype, true))
6251 argtype = build_pointer_type (argtype);
6252 mode = TYPE_MODE (argtype);
6254 reg = targetm.calls.function_arg (args_so_far, mode,
6255 argtype, true);
6256 if (TREE_CODE (argtype) == REFERENCE_TYPE
6257 && INTEGRAL_TYPE_P (TREE_TYPE (argtype))
6258 && reg
6259 && REG_P (reg)
6260 && GET_MODE (reg) == mode
6261 && GET_MODE_CLASS (mode) == MODE_INT
6262 && REG_P (x)
6263 && REGNO (x) == REGNO (reg)
6264 && GET_MODE (x) == mode
6265 && item)
6267 enum machine_mode indmode
6268 = TYPE_MODE (TREE_TYPE (argtype));
6269 rtx mem = gen_rtx_MEM (indmode, x);
6270 cselib_val *val = cselib_lookup (mem, indmode, 0, VOIDmode);
6271 if (val && cselib_preserved_value_p (val))
6273 item = gen_rtx_CONCAT (indmode, mem, val->val_rtx);
6274 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
6275 call_arguments);
6277 else
6279 struct elt_loc_list *l;
6280 tree initial;
6282 /* Try harder, when passing address of a constant
6283 pool integer it can be easily read back. */
6284 item = XEXP (item, 1);
6285 if (GET_CODE (item) == SUBREG)
6286 item = SUBREG_REG (item);
6287 gcc_assert (GET_CODE (item) == VALUE);
6288 val = CSELIB_VAL_PTR (item);
6289 for (l = val->locs; l; l = l->next)
6290 if (GET_CODE (l->loc) == SYMBOL_REF
6291 && TREE_CONSTANT_POOL_ADDRESS_P (l->loc)
6292 && SYMBOL_REF_DECL (l->loc)
6293 && DECL_INITIAL (SYMBOL_REF_DECL (l->loc)))
6295 initial = DECL_INITIAL (SYMBOL_REF_DECL (l->loc));
6296 if (tree_fits_shwi_p (initial))
6298 item = GEN_INT (tree_to_shwi (initial));
6299 item = gen_rtx_CONCAT (indmode, mem, item);
6300 call_arguments
6301 = gen_rtx_EXPR_LIST (VOIDmode, item,
6302 call_arguments);
6304 break;
6308 targetm.calls.function_arg_advance (args_so_far, mode,
6309 argtype, true);
6310 t = TREE_CHAIN (t);
6314 /* Add debug arguments. */
6315 if (fndecl
6316 && TREE_CODE (fndecl) == FUNCTION_DECL
6317 && DECL_HAS_DEBUG_ARGS_P (fndecl))
6319 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (fndecl);
6320 if (debug_args)
6322 unsigned int ix;
6323 tree param;
6324 for (ix = 0; vec_safe_iterate (*debug_args, ix, &param); ix += 2)
6326 rtx item;
6327 tree dtemp = (**debug_args)[ix + 1];
6328 enum machine_mode mode = DECL_MODE (dtemp);
6329 item = gen_rtx_DEBUG_PARAMETER_REF (mode, param);
6330 item = gen_rtx_CONCAT (mode, item, DECL_RTL_KNOWN_SET (dtemp));
6331 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
6332 call_arguments);
6337 /* Reverse call_arguments chain. */
6338 prev = NULL_RTX;
6339 for (cur = call_arguments; cur; cur = next)
6341 next = XEXP (cur, 1);
6342 XEXP (cur, 1) = prev;
6343 prev = cur;
6345 call_arguments = prev;
6347 x = get_call_rtx_from (insn);
6348 if (x)
6350 x = XEXP (XEXP (x, 0), 0);
6351 if (GET_CODE (x) == SYMBOL_REF)
6352 /* Don't record anything. */;
6353 else if (CONSTANT_P (x))
6355 x = gen_rtx_CONCAT (GET_MODE (x) == VOIDmode ? Pmode : GET_MODE (x),
6356 pc_rtx, x);
6357 call_arguments
6358 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6360 else
6362 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
6363 if (val && cselib_preserved_value_p (val))
6365 x = gen_rtx_CONCAT (GET_MODE (x), pc_rtx, val->val_rtx);
6366 call_arguments
6367 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6371 if (this_arg)
6373 enum machine_mode mode
6374 = TYPE_MODE (TREE_TYPE (OBJ_TYPE_REF_EXPR (obj_type_ref)));
6375 rtx clobbered = gen_rtx_MEM (mode, this_arg);
6376 HOST_WIDE_INT token
6377 = tree_to_shwi (OBJ_TYPE_REF_TOKEN (obj_type_ref));
6378 if (token)
6379 clobbered = plus_constant (mode, clobbered,
6380 token * GET_MODE_SIZE (mode));
6381 clobbered = gen_rtx_MEM (mode, clobbered);
6382 x = gen_rtx_CONCAT (mode, gen_rtx_CLOBBER (VOIDmode, pc_rtx), clobbered);
6383 call_arguments
6384 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6388 /* Callback for cselib_record_sets_hook, that records as micro
6389 operations uses and stores in an insn after cselib_record_sets has
6390 analyzed the sets in an insn, but before it modifies the stored
6391 values in the internal tables, unless cselib_record_sets doesn't
6392 call it directly (perhaps because we're not doing cselib in the
6393 first place, in which case sets and n_sets will be 0). */
6395 static void
6396 add_with_sets (rtx insn, struct cselib_set *sets, int n_sets)
6398 basic_block bb = BLOCK_FOR_INSN (insn);
6399 int n1, n2;
6400 struct count_use_info cui;
6401 micro_operation *mos;
6403 cselib_hook_called = true;
6405 cui.insn = insn;
6406 cui.bb = bb;
6407 cui.sets = sets;
6408 cui.n_sets = n_sets;
6410 n1 = VTI (bb)->mos.length ();
6411 cui.store_p = false;
6412 note_uses (&PATTERN (insn), add_uses_1, &cui);
6413 n2 = VTI (bb)->mos.length () - 1;
6414 mos = VTI (bb)->mos.address ();
6416 /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
6417 MO_VAL_LOC last. */
6418 while (n1 < n2)
6420 while (n1 < n2 && mos[n1].type == MO_USE)
6421 n1++;
6422 while (n1 < n2 && mos[n2].type != MO_USE)
6423 n2--;
6424 if (n1 < n2)
6426 micro_operation sw;
6428 sw = mos[n1];
6429 mos[n1] = mos[n2];
6430 mos[n2] = sw;
6434 n2 = VTI (bb)->mos.length () - 1;
6435 while (n1 < n2)
6437 while (n1 < n2 && mos[n1].type != MO_VAL_LOC)
6438 n1++;
6439 while (n1 < n2 && mos[n2].type == MO_VAL_LOC)
6440 n2--;
6441 if (n1 < n2)
6443 micro_operation sw;
6445 sw = mos[n1];
6446 mos[n1] = mos[n2];
6447 mos[n2] = sw;
6451 if (CALL_P (insn))
6453 micro_operation mo;
6455 mo.type = MO_CALL;
6456 mo.insn = insn;
6457 mo.u.loc = call_arguments;
6458 call_arguments = NULL_RTX;
6460 if (dump_file && (dump_flags & TDF_DETAILS))
6461 log_op_type (PATTERN (insn), bb, insn, mo.type, dump_file);
6462 VTI (bb)->mos.safe_push (mo);
6465 n1 = VTI (bb)->mos.length ();
6466 /* This will record NEXT_INSN (insn), such that we can
6467 insert notes before it without worrying about any
6468 notes that MO_USEs might emit after the insn. */
6469 cui.store_p = true;
6470 note_stores (PATTERN (insn), add_stores, &cui);
6471 n2 = VTI (bb)->mos.length () - 1;
6472 mos = VTI (bb)->mos.address ();
6474 /* Order the MO_VAL_USEs first (note_stores does nothing
6475 on DEBUG_INSNs, so there are no MO_VAL_LOCs from this
6476 insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET. */
6477 while (n1 < n2)
6479 while (n1 < n2 && mos[n1].type == MO_VAL_USE)
6480 n1++;
6481 while (n1 < n2 && mos[n2].type != MO_VAL_USE)
6482 n2--;
6483 if (n1 < n2)
6485 micro_operation sw;
6487 sw = mos[n1];
6488 mos[n1] = mos[n2];
6489 mos[n2] = sw;
6493 n2 = VTI (bb)->mos.length () - 1;
6494 while (n1 < n2)
6496 while (n1 < n2 && mos[n1].type == MO_CLOBBER)
6497 n1++;
6498 while (n1 < n2 && mos[n2].type != MO_CLOBBER)
6499 n2--;
6500 if (n1 < n2)
6502 micro_operation sw;
6504 sw = mos[n1];
6505 mos[n1] = mos[n2];
6506 mos[n2] = sw;
6511 static enum var_init_status
6512 find_src_status (dataflow_set *in, rtx src)
6514 tree decl = NULL_TREE;
6515 enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
6517 if (! flag_var_tracking_uninit)
6518 status = VAR_INIT_STATUS_INITIALIZED;
6520 if (src && REG_P (src))
6521 decl = var_debug_decl (REG_EXPR (src));
6522 else if (src && MEM_P (src))
6523 decl = var_debug_decl (MEM_EXPR (src));
6525 if (src && decl)
6526 status = get_init_value (in, src, dv_from_decl (decl));
6528 return status;
6531 /* SRC is the source of an assignment. Use SET to try to find what
6532 was ultimately assigned to SRC. Return that value if known,
6533 otherwise return SRC itself. */
6535 static rtx
6536 find_src_set_src (dataflow_set *set, rtx src)
6538 tree decl = NULL_TREE; /* The variable being copied around. */
6539 rtx set_src = NULL_RTX; /* The value for "decl" stored in "src". */
6540 variable var;
6541 location_chain nextp;
6542 int i;
6543 bool found;
6545 if (src && REG_P (src))
6546 decl = var_debug_decl (REG_EXPR (src));
6547 else if (src && MEM_P (src))
6548 decl = var_debug_decl (MEM_EXPR (src));
6550 if (src && decl)
6552 decl_or_value dv = dv_from_decl (decl);
6554 var = shared_hash_find (set->vars, dv);
6555 if (var)
6557 found = false;
6558 for (i = 0; i < var->n_var_parts && !found; i++)
6559 for (nextp = var->var_part[i].loc_chain; nextp && !found;
6560 nextp = nextp->next)
6561 if (rtx_equal_p (nextp->loc, src))
6563 set_src = nextp->set_src;
6564 found = true;
6570 return set_src;
6573 /* Compute the changes of variable locations in the basic block BB. */
6575 static bool
6576 compute_bb_dataflow (basic_block bb)
6578 unsigned int i;
6579 micro_operation *mo;
6580 bool changed;
6581 dataflow_set old_out;
6582 dataflow_set *in = &VTI (bb)->in;
6583 dataflow_set *out = &VTI (bb)->out;
6585 dataflow_set_init (&old_out);
6586 dataflow_set_copy (&old_out, out);
6587 dataflow_set_copy (out, in);
6589 if (MAY_HAVE_DEBUG_INSNS)
6590 local_get_addr_cache = pointer_map_create ();
6592 FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo)
6594 rtx insn = mo->insn;
6596 switch (mo->type)
6598 case MO_CALL:
6599 dataflow_set_clear_at_call (out);
6600 break;
6602 case MO_USE:
6604 rtx loc = mo->u.loc;
6606 if (REG_P (loc))
6607 var_reg_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6608 else if (MEM_P (loc))
6609 var_mem_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6611 break;
6613 case MO_VAL_LOC:
6615 rtx loc = mo->u.loc;
6616 rtx val, vloc;
6617 tree var;
6619 if (GET_CODE (loc) == CONCAT)
6621 val = XEXP (loc, 0);
6622 vloc = XEXP (loc, 1);
6624 else
6626 val = NULL_RTX;
6627 vloc = loc;
6630 var = PAT_VAR_LOCATION_DECL (vloc);
6632 clobber_variable_part (out, NULL_RTX,
6633 dv_from_decl (var), 0, NULL_RTX);
6634 if (val)
6636 if (VAL_NEEDS_RESOLUTION (loc))
6637 val_resolve (out, val, PAT_VAR_LOCATION_LOC (vloc), insn);
6638 set_variable_part (out, val, dv_from_decl (var), 0,
6639 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6640 INSERT);
6642 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
6643 set_variable_part (out, PAT_VAR_LOCATION_LOC (vloc),
6644 dv_from_decl (var), 0,
6645 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6646 INSERT);
6648 break;
6650 case MO_VAL_USE:
6652 rtx loc = mo->u.loc;
6653 rtx val, vloc, uloc;
6655 vloc = uloc = XEXP (loc, 1);
6656 val = XEXP (loc, 0);
6658 if (GET_CODE (val) == CONCAT)
6660 uloc = XEXP (val, 1);
6661 val = XEXP (val, 0);
6664 if (VAL_NEEDS_RESOLUTION (loc))
6665 val_resolve (out, val, vloc, insn);
6666 else
6667 val_store (out, val, uloc, insn, false);
6669 if (VAL_HOLDS_TRACK_EXPR (loc))
6671 if (GET_CODE (uloc) == REG)
6672 var_reg_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6673 NULL);
6674 else if (GET_CODE (uloc) == MEM)
6675 var_mem_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6676 NULL);
6679 break;
6681 case MO_VAL_SET:
6683 rtx loc = mo->u.loc;
6684 rtx val, vloc, uloc;
6685 rtx dstv, srcv;
6687 vloc = loc;
6688 uloc = XEXP (vloc, 1);
6689 val = XEXP (vloc, 0);
6690 vloc = uloc;
6692 if (GET_CODE (uloc) == SET)
6694 dstv = SET_DEST (uloc);
6695 srcv = SET_SRC (uloc);
6697 else
6699 dstv = uloc;
6700 srcv = NULL;
6703 if (GET_CODE (val) == CONCAT)
6705 dstv = vloc = XEXP (val, 1);
6706 val = XEXP (val, 0);
6709 if (GET_CODE (vloc) == SET)
6711 srcv = SET_SRC (vloc);
6713 gcc_assert (val != srcv);
6714 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
6716 dstv = vloc = SET_DEST (vloc);
6718 if (VAL_NEEDS_RESOLUTION (loc))
6719 val_resolve (out, val, srcv, insn);
6721 else if (VAL_NEEDS_RESOLUTION (loc))
6723 gcc_assert (GET_CODE (uloc) == SET
6724 && GET_CODE (SET_SRC (uloc)) == REG);
6725 val_resolve (out, val, SET_SRC (uloc), insn);
6728 if (VAL_HOLDS_TRACK_EXPR (loc))
6730 if (VAL_EXPR_IS_CLOBBERED (loc))
6732 if (REG_P (uloc))
6733 var_reg_delete (out, uloc, true);
6734 else if (MEM_P (uloc))
6736 gcc_assert (MEM_P (dstv));
6737 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
6738 var_mem_delete (out, dstv, true);
6741 else
6743 bool copied_p = VAL_EXPR_IS_COPIED (loc);
6744 rtx src = NULL, dst = uloc;
6745 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
6747 if (GET_CODE (uloc) == SET)
6749 src = SET_SRC (uloc);
6750 dst = SET_DEST (uloc);
6753 if (copied_p)
6755 if (flag_var_tracking_uninit)
6757 status = find_src_status (in, src);
6759 if (status == VAR_INIT_STATUS_UNKNOWN)
6760 status = find_src_status (out, src);
6763 src = find_src_set_src (in, src);
6766 if (REG_P (dst))
6767 var_reg_delete_and_set (out, dst, !copied_p,
6768 status, srcv);
6769 else if (MEM_P (dst))
6771 gcc_assert (MEM_P (dstv));
6772 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
6773 var_mem_delete_and_set (out, dstv, !copied_p,
6774 status, srcv);
6778 else if (REG_P (uloc))
6779 var_regno_delete (out, REGNO (uloc));
6780 else if (MEM_P (uloc))
6782 gcc_checking_assert (GET_CODE (vloc) == MEM);
6783 gcc_checking_assert (dstv == vloc);
6784 if (dstv != vloc)
6785 clobber_overlapping_mems (out, vloc);
6788 val_store (out, val, dstv, insn, true);
6790 break;
6792 case MO_SET:
6794 rtx loc = mo->u.loc;
6795 rtx set_src = NULL;
6797 if (GET_CODE (loc) == SET)
6799 set_src = SET_SRC (loc);
6800 loc = SET_DEST (loc);
6803 if (REG_P (loc))
6804 var_reg_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6805 set_src);
6806 else if (MEM_P (loc))
6807 var_mem_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6808 set_src);
6810 break;
6812 case MO_COPY:
6814 rtx loc = mo->u.loc;
6815 enum var_init_status src_status;
6816 rtx set_src = NULL;
6818 if (GET_CODE (loc) == SET)
6820 set_src = SET_SRC (loc);
6821 loc = SET_DEST (loc);
6824 if (! flag_var_tracking_uninit)
6825 src_status = VAR_INIT_STATUS_INITIALIZED;
6826 else
6828 src_status = find_src_status (in, set_src);
6830 if (src_status == VAR_INIT_STATUS_UNKNOWN)
6831 src_status = find_src_status (out, set_src);
6834 set_src = find_src_set_src (in, set_src);
6836 if (REG_P (loc))
6837 var_reg_delete_and_set (out, loc, false, src_status, set_src);
6838 else if (MEM_P (loc))
6839 var_mem_delete_and_set (out, loc, false, src_status, set_src);
6841 break;
6843 case MO_USE_NO_VAR:
6845 rtx loc = mo->u.loc;
6847 if (REG_P (loc))
6848 var_reg_delete (out, loc, false);
6849 else if (MEM_P (loc))
6850 var_mem_delete (out, loc, false);
6852 break;
6854 case MO_CLOBBER:
6856 rtx loc = mo->u.loc;
6858 if (REG_P (loc))
6859 var_reg_delete (out, loc, true);
6860 else if (MEM_P (loc))
6861 var_mem_delete (out, loc, true);
6863 break;
6865 case MO_ADJUST:
6866 out->stack_adjust += mo->u.adjust;
6867 break;
6871 if (MAY_HAVE_DEBUG_INSNS)
6873 pointer_map_destroy (local_get_addr_cache);
6874 local_get_addr_cache = NULL;
6876 dataflow_set_equiv_regs (out);
6877 shared_hash_htab (out->vars)
6878 .traverse <dataflow_set *, canonicalize_values_mark> (out);
6879 shared_hash_htab (out->vars)
6880 .traverse <dataflow_set *, canonicalize_values_star> (out);
6881 #if ENABLE_CHECKING
6882 shared_hash_htab (out->vars)
6883 .traverse <dataflow_set *, canonicalize_loc_order_check> (out);
6884 #endif
6886 changed = dataflow_set_different (&old_out, out);
6887 dataflow_set_destroy (&old_out);
6888 return changed;
6891 /* Find the locations of variables in the whole function. */
6893 static bool
6894 vt_find_locations (void)
6896 fibheap_t worklist, pending, fibheap_swap;
6897 sbitmap visited, in_worklist, in_pending, sbitmap_swap;
6898 basic_block bb;
6899 edge e;
6900 int *bb_order;
6901 int *rc_order;
6902 int i;
6903 int htabsz = 0;
6904 int htabmax = PARAM_VALUE (PARAM_MAX_VARTRACK_SIZE);
6905 bool success = true;
6907 timevar_push (TV_VAR_TRACKING_DATAFLOW);
6908 /* Compute reverse completion order of depth first search of the CFG
6909 so that the data-flow runs faster. */
6910 rc_order = XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
6911 bb_order = XNEWVEC (int, last_basic_block);
6912 pre_and_rev_post_order_compute (NULL, rc_order, false);
6913 for (i = 0; i < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; i++)
6914 bb_order[rc_order[i]] = i;
6915 free (rc_order);
6917 worklist = fibheap_new ();
6918 pending = fibheap_new ();
6919 visited = sbitmap_alloc (last_basic_block);
6920 in_worklist = sbitmap_alloc (last_basic_block);
6921 in_pending = sbitmap_alloc (last_basic_block);
6922 bitmap_clear (in_worklist);
6924 FOR_EACH_BB (bb)
6925 fibheap_insert (pending, bb_order[bb->index], bb);
6926 bitmap_ones (in_pending);
6928 while (success && !fibheap_empty (pending))
6930 fibheap_swap = pending;
6931 pending = worklist;
6932 worklist = fibheap_swap;
6933 sbitmap_swap = in_pending;
6934 in_pending = in_worklist;
6935 in_worklist = sbitmap_swap;
6937 bitmap_clear (visited);
6939 while (!fibheap_empty (worklist))
6941 bb = (basic_block) fibheap_extract_min (worklist);
6942 bitmap_clear_bit (in_worklist, bb->index);
6943 gcc_assert (!bitmap_bit_p (visited, bb->index));
6944 if (!bitmap_bit_p (visited, bb->index))
6946 bool changed;
6947 edge_iterator ei;
6948 int oldinsz, oldoutsz;
6950 bitmap_set_bit (visited, bb->index);
6952 if (VTI (bb)->in.vars)
6954 htabsz
6955 -= shared_hash_htab (VTI (bb)->in.vars).size ()
6956 + shared_hash_htab (VTI (bb)->out.vars).size ();
6957 oldinsz = shared_hash_htab (VTI (bb)->in.vars).elements ();
6958 oldoutsz = shared_hash_htab (VTI (bb)->out.vars).elements ();
6960 else
6961 oldinsz = oldoutsz = 0;
6963 if (MAY_HAVE_DEBUG_INSNS)
6965 dataflow_set *in = &VTI (bb)->in, *first_out = NULL;
6966 bool first = true, adjust = false;
6968 /* Calculate the IN set as the intersection of
6969 predecessor OUT sets. */
6971 dataflow_set_clear (in);
6972 dst_can_be_shared = true;
6974 FOR_EACH_EDGE (e, ei, bb->preds)
6975 if (!VTI (e->src)->flooded)
6976 gcc_assert (bb_order[bb->index]
6977 <= bb_order[e->src->index]);
6978 else if (first)
6980 dataflow_set_copy (in, &VTI (e->src)->out);
6981 first_out = &VTI (e->src)->out;
6982 first = false;
6984 else
6986 dataflow_set_merge (in, &VTI (e->src)->out);
6987 adjust = true;
6990 if (adjust)
6992 dataflow_post_merge_adjust (in, &VTI (bb)->permp);
6993 #if ENABLE_CHECKING
6994 /* Merge and merge_adjust should keep entries in
6995 canonical order. */
6996 shared_hash_htab (in->vars)
6997 .traverse <dataflow_set *,
6998 canonicalize_loc_order_check> (in);
6999 #endif
7000 if (dst_can_be_shared)
7002 shared_hash_destroy (in->vars);
7003 in->vars = shared_hash_copy (first_out->vars);
7007 VTI (bb)->flooded = true;
7009 else
7011 /* Calculate the IN set as union of predecessor OUT sets. */
7012 dataflow_set_clear (&VTI (bb)->in);
7013 FOR_EACH_EDGE (e, ei, bb->preds)
7014 dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out);
7017 changed = compute_bb_dataflow (bb);
7018 htabsz += shared_hash_htab (VTI (bb)->in.vars).size ()
7019 + shared_hash_htab (VTI (bb)->out.vars).size ();
7021 if (htabmax && htabsz > htabmax)
7023 if (MAY_HAVE_DEBUG_INSNS)
7024 inform (DECL_SOURCE_LOCATION (cfun->decl),
7025 "variable tracking size limit exceeded with "
7026 "-fvar-tracking-assignments, retrying without");
7027 else
7028 inform (DECL_SOURCE_LOCATION (cfun->decl),
7029 "variable tracking size limit exceeded");
7030 success = false;
7031 break;
7034 if (changed)
7036 FOR_EACH_EDGE (e, ei, bb->succs)
7038 if (e->dest == EXIT_BLOCK_PTR)
7039 continue;
7041 if (bitmap_bit_p (visited, e->dest->index))
7043 if (!bitmap_bit_p (in_pending, e->dest->index))
7045 /* Send E->DEST to next round. */
7046 bitmap_set_bit (in_pending, e->dest->index);
7047 fibheap_insert (pending,
7048 bb_order[e->dest->index],
7049 e->dest);
7052 else if (!bitmap_bit_p (in_worklist, e->dest->index))
7054 /* Add E->DEST to current round. */
7055 bitmap_set_bit (in_worklist, e->dest->index);
7056 fibheap_insert (worklist, bb_order[e->dest->index],
7057 e->dest);
7062 if (dump_file)
7063 fprintf (dump_file,
7064 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
7065 bb->index,
7066 (int)shared_hash_htab (VTI (bb)->in.vars).size (),
7067 oldinsz,
7068 (int)shared_hash_htab (VTI (bb)->out.vars).size (),
7069 oldoutsz,
7070 (int)worklist->nodes, (int)pending->nodes, htabsz);
7072 if (dump_file && (dump_flags & TDF_DETAILS))
7074 fprintf (dump_file, "BB %i IN:\n", bb->index);
7075 dump_dataflow_set (&VTI (bb)->in);
7076 fprintf (dump_file, "BB %i OUT:\n", bb->index);
7077 dump_dataflow_set (&VTI (bb)->out);
7083 if (success && MAY_HAVE_DEBUG_INSNS)
7084 FOR_EACH_BB (bb)
7085 gcc_assert (VTI (bb)->flooded);
7087 free (bb_order);
7088 fibheap_delete (worklist);
7089 fibheap_delete (pending);
7090 sbitmap_free (visited);
7091 sbitmap_free (in_worklist);
7092 sbitmap_free (in_pending);
7094 timevar_pop (TV_VAR_TRACKING_DATAFLOW);
7095 return success;
7098 /* Print the content of the LIST to dump file. */
7100 static void
7101 dump_attrs_list (attrs list)
7103 for (; list; list = list->next)
7105 if (dv_is_decl_p (list->dv))
7106 print_mem_expr (dump_file, dv_as_decl (list->dv));
7107 else
7108 print_rtl_single (dump_file, dv_as_value (list->dv));
7109 fprintf (dump_file, "+" HOST_WIDE_INT_PRINT_DEC, list->offset);
7111 fprintf (dump_file, "\n");
7114 /* Print the information about variable *SLOT to dump file. */
7117 dump_var_tracking_slot (variable_def **slot, void *data ATTRIBUTE_UNUSED)
7119 variable var = *slot;
7121 dump_var (var);
7123 /* Continue traversing the hash table. */
7124 return 1;
7127 /* Print the information about variable VAR to dump file. */
7129 static void
7130 dump_var (variable var)
7132 int i;
7133 location_chain node;
7135 if (dv_is_decl_p (var->dv))
7137 const_tree decl = dv_as_decl (var->dv);
7139 if (DECL_NAME (decl))
7141 fprintf (dump_file, " name: %s",
7142 IDENTIFIER_POINTER (DECL_NAME (decl)));
7143 if (dump_flags & TDF_UID)
7144 fprintf (dump_file, "D.%u", DECL_UID (decl));
7146 else if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
7147 fprintf (dump_file, " name: D#%u", DEBUG_TEMP_UID (decl));
7148 else
7149 fprintf (dump_file, " name: D.%u", DECL_UID (decl));
7150 fprintf (dump_file, "\n");
7152 else
7154 fputc (' ', dump_file);
7155 print_rtl_single (dump_file, dv_as_value (var->dv));
7158 for (i = 0; i < var->n_var_parts; i++)
7160 fprintf (dump_file, " offset %ld\n",
7161 (long)(var->onepart ? 0 : VAR_PART_OFFSET (var, i)));
7162 for (node = var->var_part[i].loc_chain; node; node = node->next)
7164 fprintf (dump_file, " ");
7165 if (node->init == VAR_INIT_STATUS_UNINITIALIZED)
7166 fprintf (dump_file, "[uninit]");
7167 print_rtl_single (dump_file, node->loc);
7172 /* Print the information about variables from hash table VARS to dump file. */
7174 static void
7175 dump_vars (variable_table_type vars)
7177 if (vars.elements () > 0)
7179 fprintf (dump_file, "Variables:\n");
7180 vars.traverse <void *, dump_var_tracking_slot> (NULL);
7184 /* Print the dataflow set SET to dump file. */
7186 static void
7187 dump_dataflow_set (dataflow_set *set)
7189 int i;
7191 fprintf (dump_file, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC "\n",
7192 set->stack_adjust);
7193 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7195 if (set->regs[i])
7197 fprintf (dump_file, "Reg %d:", i);
7198 dump_attrs_list (set->regs[i]);
7201 dump_vars (shared_hash_htab (set->vars));
7202 fprintf (dump_file, "\n");
7205 /* Print the IN and OUT sets for each basic block to dump file. */
7207 static void
7208 dump_dataflow_sets (void)
7210 basic_block bb;
7212 FOR_EACH_BB (bb)
7214 fprintf (dump_file, "\nBasic block %d:\n", bb->index);
7215 fprintf (dump_file, "IN:\n");
7216 dump_dataflow_set (&VTI (bb)->in);
7217 fprintf (dump_file, "OUT:\n");
7218 dump_dataflow_set (&VTI (bb)->out);
7222 /* Return the variable for DV in dropped_values, inserting one if
7223 requested with INSERT. */
7225 static inline variable
7226 variable_from_dropped (decl_or_value dv, enum insert_option insert)
7228 variable_def **slot;
7229 variable empty_var;
7230 onepart_enum_t onepart;
7232 slot = dropped_values.find_slot_with_hash (dv, dv_htab_hash (dv), insert);
7234 if (!slot)
7235 return NULL;
7237 if (*slot)
7238 return *slot;
7240 gcc_checking_assert (insert == INSERT);
7242 onepart = dv_onepart_p (dv);
7244 gcc_checking_assert (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR);
7246 empty_var = (variable) pool_alloc (onepart_pool (onepart));
7247 empty_var->dv = dv;
7248 empty_var->refcount = 1;
7249 empty_var->n_var_parts = 0;
7250 empty_var->onepart = onepart;
7251 empty_var->in_changed_variables = false;
7252 empty_var->var_part[0].loc_chain = NULL;
7253 empty_var->var_part[0].cur_loc = NULL;
7254 VAR_LOC_1PAUX (empty_var) = NULL;
7255 set_dv_changed (dv, true);
7257 *slot = empty_var;
7259 return empty_var;
7262 /* Recover the one-part aux from dropped_values. */
7264 static struct onepart_aux *
7265 recover_dropped_1paux (variable var)
7267 variable dvar;
7269 gcc_checking_assert (var->onepart);
7271 if (VAR_LOC_1PAUX (var))
7272 return VAR_LOC_1PAUX (var);
7274 if (var->onepart == ONEPART_VDECL)
7275 return NULL;
7277 dvar = variable_from_dropped (var->dv, NO_INSERT);
7279 if (!dvar)
7280 return NULL;
7282 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (dvar);
7283 VAR_LOC_1PAUX (dvar) = NULL;
7285 return VAR_LOC_1PAUX (var);
7288 /* Add variable VAR to the hash table of changed variables and
7289 if it has no locations delete it from SET's hash table. */
7291 static void
7292 variable_was_changed (variable var, dataflow_set *set)
7294 hashval_t hash = dv_htab_hash (var->dv);
7296 if (emit_notes)
7298 variable_def **slot;
7300 /* Remember this decl or VALUE has been added to changed_variables. */
7301 set_dv_changed (var->dv, true);
7303 slot = changed_variables.find_slot_with_hash (var->dv, hash, INSERT);
7305 if (*slot)
7307 variable old_var = *slot;
7308 gcc_assert (old_var->in_changed_variables);
7309 old_var->in_changed_variables = false;
7310 if (var != old_var && var->onepart)
7312 /* Restore the auxiliary info from an empty variable
7313 previously created for changed_variables, so it is
7314 not lost. */
7315 gcc_checking_assert (!VAR_LOC_1PAUX (var));
7316 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (old_var);
7317 VAR_LOC_1PAUX (old_var) = NULL;
7319 variable_htab_free (*slot);
7322 if (set && var->n_var_parts == 0)
7324 onepart_enum_t onepart = var->onepart;
7325 variable empty_var = NULL;
7326 variable_def **dslot = NULL;
7328 if (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR)
7330 dslot = dropped_values.find_slot_with_hash (var->dv,
7331 dv_htab_hash (var->dv),
7332 INSERT);
7333 empty_var = *dslot;
7335 if (empty_var)
7337 gcc_checking_assert (!empty_var->in_changed_variables);
7338 if (!VAR_LOC_1PAUX (var))
7340 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (empty_var);
7341 VAR_LOC_1PAUX (empty_var) = NULL;
7343 else
7344 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
7348 if (!empty_var)
7350 empty_var = (variable) pool_alloc (onepart_pool (onepart));
7351 empty_var->dv = var->dv;
7352 empty_var->refcount = 1;
7353 empty_var->n_var_parts = 0;
7354 empty_var->onepart = onepart;
7355 if (dslot)
7357 empty_var->refcount++;
7358 *dslot = empty_var;
7361 else
7362 empty_var->refcount++;
7363 empty_var->in_changed_variables = true;
7364 *slot = empty_var;
7365 if (onepart)
7367 empty_var->var_part[0].loc_chain = NULL;
7368 empty_var->var_part[0].cur_loc = NULL;
7369 VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (var);
7370 VAR_LOC_1PAUX (var) = NULL;
7372 goto drop_var;
7374 else
7376 if (var->onepart && !VAR_LOC_1PAUX (var))
7377 recover_dropped_1paux (var);
7378 var->refcount++;
7379 var->in_changed_variables = true;
7380 *slot = var;
7383 else
7385 gcc_assert (set);
7386 if (var->n_var_parts == 0)
7388 variable_def **slot;
7390 drop_var:
7391 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
7392 if (slot)
7394 if (shared_hash_shared (set->vars))
7395 slot = shared_hash_find_slot_unshare (&set->vars, var->dv,
7396 NO_INSERT);
7397 shared_hash_htab (set->vars).clear_slot (slot);
7403 /* Look for the index in VAR->var_part corresponding to OFFSET.
7404 Return -1 if not found. If INSERTION_POINT is non-NULL, the
7405 referenced int will be set to the index that the part has or should
7406 have, if it should be inserted. */
7408 static inline int
7409 find_variable_location_part (variable var, HOST_WIDE_INT offset,
7410 int *insertion_point)
7412 int pos, low, high;
7414 if (var->onepart)
7416 if (offset != 0)
7417 return -1;
7419 if (insertion_point)
7420 *insertion_point = 0;
7422 return var->n_var_parts - 1;
7425 /* Find the location part. */
7426 low = 0;
7427 high = var->n_var_parts;
7428 while (low != high)
7430 pos = (low + high) / 2;
7431 if (VAR_PART_OFFSET (var, pos) < offset)
7432 low = pos + 1;
7433 else
7434 high = pos;
7436 pos = low;
7438 if (insertion_point)
7439 *insertion_point = pos;
7441 if (pos < var->n_var_parts && VAR_PART_OFFSET (var, pos) == offset)
7442 return pos;
7444 return -1;
7447 static variable_def **
7448 set_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
7449 decl_or_value dv, HOST_WIDE_INT offset,
7450 enum var_init_status initialized, rtx set_src)
7452 int pos;
7453 location_chain node, next;
7454 location_chain *nextp;
7455 variable var;
7456 onepart_enum_t onepart;
7458 var = *slot;
7460 if (var)
7461 onepart = var->onepart;
7462 else
7463 onepart = dv_onepart_p (dv);
7465 gcc_checking_assert (offset == 0 || !onepart);
7466 gcc_checking_assert (loc != dv_as_opaque (dv));
7468 if (! flag_var_tracking_uninit)
7469 initialized = VAR_INIT_STATUS_INITIALIZED;
7471 if (!var)
7473 /* Create new variable information. */
7474 var = (variable) pool_alloc (onepart_pool (onepart));
7475 var->dv = dv;
7476 var->refcount = 1;
7477 var->n_var_parts = 1;
7478 var->onepart = onepart;
7479 var->in_changed_variables = false;
7480 if (var->onepart)
7481 VAR_LOC_1PAUX (var) = NULL;
7482 else
7483 VAR_PART_OFFSET (var, 0) = offset;
7484 var->var_part[0].loc_chain = NULL;
7485 var->var_part[0].cur_loc = NULL;
7486 *slot = var;
7487 pos = 0;
7488 nextp = &var->var_part[0].loc_chain;
7490 else if (onepart)
7492 int r = -1, c = 0;
7494 gcc_assert (dv_as_opaque (var->dv) == dv_as_opaque (dv));
7496 pos = 0;
7498 if (GET_CODE (loc) == VALUE)
7500 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7501 nextp = &node->next)
7502 if (GET_CODE (node->loc) == VALUE)
7504 if (node->loc == loc)
7506 r = 0;
7507 break;
7509 if (canon_value_cmp (node->loc, loc))
7510 c++;
7511 else
7513 r = 1;
7514 break;
7517 else if (REG_P (node->loc) || MEM_P (node->loc))
7518 c++;
7519 else
7521 r = 1;
7522 break;
7525 else if (REG_P (loc))
7527 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7528 nextp = &node->next)
7529 if (REG_P (node->loc))
7531 if (REGNO (node->loc) < REGNO (loc))
7532 c++;
7533 else
7535 if (REGNO (node->loc) == REGNO (loc))
7536 r = 0;
7537 else
7538 r = 1;
7539 break;
7542 else
7544 r = 1;
7545 break;
7548 else if (MEM_P (loc))
7550 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7551 nextp = &node->next)
7552 if (REG_P (node->loc))
7553 c++;
7554 else if (MEM_P (node->loc))
7556 if ((r = loc_cmp (XEXP (node->loc, 0), XEXP (loc, 0))) >= 0)
7557 break;
7558 else
7559 c++;
7561 else
7563 r = 1;
7564 break;
7567 else
7568 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7569 nextp = &node->next)
7570 if ((r = loc_cmp (node->loc, loc)) >= 0)
7571 break;
7572 else
7573 c++;
7575 if (r == 0)
7576 return slot;
7578 if (shared_var_p (var, set->vars))
7580 slot = unshare_variable (set, slot, var, initialized);
7581 var = *slot;
7582 for (nextp = &var->var_part[0].loc_chain; c;
7583 nextp = &(*nextp)->next)
7584 c--;
7585 gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc);
7588 else
7590 int inspos = 0;
7592 gcc_assert (dv_as_decl (var->dv) == dv_as_decl (dv));
7594 pos = find_variable_location_part (var, offset, &inspos);
7596 if (pos >= 0)
7598 node = var->var_part[pos].loc_chain;
7600 if (node
7601 && ((REG_P (node->loc) && REG_P (loc)
7602 && REGNO (node->loc) == REGNO (loc))
7603 || rtx_equal_p (node->loc, loc)))
7605 /* LOC is in the beginning of the chain so we have nothing
7606 to do. */
7607 if (node->init < initialized)
7608 node->init = initialized;
7609 if (set_src != NULL)
7610 node->set_src = set_src;
7612 return slot;
7614 else
7616 /* We have to make a copy of a shared variable. */
7617 if (shared_var_p (var, set->vars))
7619 slot = unshare_variable (set, slot, var, initialized);
7620 var = *slot;
7624 else
7626 /* We have not found the location part, new one will be created. */
7628 /* We have to make a copy of the shared variable. */
7629 if (shared_var_p (var, set->vars))
7631 slot = unshare_variable (set, slot, var, initialized);
7632 var = *slot;
7635 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
7636 thus there are at most MAX_VAR_PARTS different offsets. */
7637 gcc_assert (var->n_var_parts < MAX_VAR_PARTS
7638 && (!var->n_var_parts || !onepart));
7640 /* We have to move the elements of array starting at index
7641 inspos to the next position. */
7642 for (pos = var->n_var_parts; pos > inspos; pos--)
7643 var->var_part[pos] = var->var_part[pos - 1];
7645 var->n_var_parts++;
7646 gcc_checking_assert (!onepart);
7647 VAR_PART_OFFSET (var, pos) = offset;
7648 var->var_part[pos].loc_chain = NULL;
7649 var->var_part[pos].cur_loc = NULL;
7652 /* Delete the location from the list. */
7653 nextp = &var->var_part[pos].loc_chain;
7654 for (node = var->var_part[pos].loc_chain; node; node = next)
7656 next = node->next;
7657 if ((REG_P (node->loc) && REG_P (loc)
7658 && REGNO (node->loc) == REGNO (loc))
7659 || rtx_equal_p (node->loc, loc))
7661 /* Save these values, to assign to the new node, before
7662 deleting this one. */
7663 if (node->init > initialized)
7664 initialized = node->init;
7665 if (node->set_src != NULL && set_src == NULL)
7666 set_src = node->set_src;
7667 if (var->var_part[pos].cur_loc == node->loc)
7668 var->var_part[pos].cur_loc = NULL;
7669 pool_free (loc_chain_pool, node);
7670 *nextp = next;
7671 break;
7673 else
7674 nextp = &node->next;
7677 nextp = &var->var_part[pos].loc_chain;
7680 /* Add the location to the beginning. */
7681 node = (location_chain) pool_alloc (loc_chain_pool);
7682 node->loc = loc;
7683 node->init = initialized;
7684 node->set_src = set_src;
7685 node->next = *nextp;
7686 *nextp = node;
7688 /* If no location was emitted do so. */
7689 if (var->var_part[pos].cur_loc == NULL)
7690 variable_was_changed (var, set);
7692 return slot;
7695 /* Set the part of variable's location in the dataflow set SET. The
7696 variable part is specified by variable's declaration in DV and
7697 offset OFFSET and the part's location by LOC. IOPT should be
7698 NO_INSERT if the variable is known to be in SET already and the
7699 variable hash table must not be resized, and INSERT otherwise. */
7701 static void
7702 set_variable_part (dataflow_set *set, rtx loc,
7703 decl_or_value dv, HOST_WIDE_INT offset,
7704 enum var_init_status initialized, rtx set_src,
7705 enum insert_option iopt)
7707 variable_def **slot;
7709 if (iopt == NO_INSERT)
7710 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7711 else
7713 slot = shared_hash_find_slot (set->vars, dv);
7714 if (!slot)
7715 slot = shared_hash_find_slot_unshare (&set->vars, dv, iopt);
7717 set_slot_part (set, loc, slot, dv, offset, initialized, set_src);
7720 /* Remove all recorded register locations for the given variable part
7721 from dataflow set SET, except for those that are identical to loc.
7722 The variable part is specified by variable's declaration or value
7723 DV and offset OFFSET. */
7725 static variable_def **
7726 clobber_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
7727 HOST_WIDE_INT offset, rtx set_src)
7729 variable var = *slot;
7730 int pos = find_variable_location_part (var, offset, NULL);
7732 if (pos >= 0)
7734 location_chain node, next;
7736 /* Remove the register locations from the dataflow set. */
7737 next = var->var_part[pos].loc_chain;
7738 for (node = next; node; node = next)
7740 next = node->next;
7741 if (node->loc != loc
7742 && (!flag_var_tracking_uninit
7743 || !set_src
7744 || MEM_P (set_src)
7745 || !rtx_equal_p (set_src, node->set_src)))
7747 if (REG_P (node->loc))
7749 attrs anode, anext;
7750 attrs *anextp;
7752 /* Remove the variable part from the register's
7753 list, but preserve any other variable parts
7754 that might be regarded as live in that same
7755 register. */
7756 anextp = &set->regs[REGNO (node->loc)];
7757 for (anode = *anextp; anode; anode = anext)
7759 anext = anode->next;
7760 if (dv_as_opaque (anode->dv) == dv_as_opaque (var->dv)
7761 && anode->offset == offset)
7763 pool_free (attrs_pool, anode);
7764 *anextp = anext;
7766 else
7767 anextp = &anode->next;
7771 slot = delete_slot_part (set, node->loc, slot, offset);
7776 return slot;
7779 /* Remove all recorded register locations for the given variable part
7780 from dataflow set SET, except for those that are identical to loc.
7781 The variable part is specified by variable's declaration or value
7782 DV and offset OFFSET. */
7784 static void
7785 clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
7786 HOST_WIDE_INT offset, rtx set_src)
7788 variable_def **slot;
7790 if (!dv_as_opaque (dv)
7791 || (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv))))
7792 return;
7794 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7795 if (!slot)
7796 return;
7798 clobber_slot_part (set, loc, slot, offset, set_src);
7801 /* Delete the part of variable's location from dataflow set SET. The
7802 variable part is specified by its SET->vars slot SLOT and offset
7803 OFFSET and the part's location by LOC. */
7805 static variable_def **
7806 delete_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
7807 HOST_WIDE_INT offset)
7809 variable var = *slot;
7810 int pos = find_variable_location_part (var, offset, NULL);
7812 if (pos >= 0)
7814 location_chain node, next;
7815 location_chain *nextp;
7816 bool changed;
7817 rtx cur_loc;
7819 if (shared_var_p (var, set->vars))
7821 /* If the variable contains the location part we have to
7822 make a copy of the variable. */
7823 for (node = var->var_part[pos].loc_chain; node;
7824 node = node->next)
7826 if ((REG_P (node->loc) && REG_P (loc)
7827 && REGNO (node->loc) == REGNO (loc))
7828 || rtx_equal_p (node->loc, loc))
7830 slot = unshare_variable (set, slot, var,
7831 VAR_INIT_STATUS_UNKNOWN);
7832 var = *slot;
7833 break;
7838 if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
7839 cur_loc = VAR_LOC_FROM (var);
7840 else
7841 cur_loc = var->var_part[pos].cur_loc;
7843 /* Delete the location part. */
7844 changed = false;
7845 nextp = &var->var_part[pos].loc_chain;
7846 for (node = *nextp; node; node = next)
7848 next = node->next;
7849 if ((REG_P (node->loc) && REG_P (loc)
7850 && REGNO (node->loc) == REGNO (loc))
7851 || rtx_equal_p (node->loc, loc))
7853 /* If we have deleted the location which was last emitted
7854 we have to emit new location so add the variable to set
7855 of changed variables. */
7856 if (cur_loc == node->loc)
7858 changed = true;
7859 var->var_part[pos].cur_loc = NULL;
7860 if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
7861 VAR_LOC_FROM (var) = NULL;
7863 pool_free (loc_chain_pool, node);
7864 *nextp = next;
7865 break;
7867 else
7868 nextp = &node->next;
7871 if (var->var_part[pos].loc_chain == NULL)
7873 changed = true;
7874 var->n_var_parts--;
7875 while (pos < var->n_var_parts)
7877 var->var_part[pos] = var->var_part[pos + 1];
7878 pos++;
7881 if (changed)
7882 variable_was_changed (var, set);
7885 return slot;
7888 /* Delete the part of variable's location from dataflow set SET. The
7889 variable part is specified by variable's declaration or value DV
7890 and offset OFFSET and the part's location by LOC. */
7892 static void
7893 delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
7894 HOST_WIDE_INT offset)
7896 variable_def **slot = shared_hash_find_slot_noinsert (set->vars, dv);
7897 if (!slot)
7898 return;
7900 delete_slot_part (set, loc, slot, offset);
7904 /* Structure for passing some other parameters to function
7905 vt_expand_loc_callback. */
7906 struct expand_loc_callback_data
7908 /* The variables and values active at this point. */
7909 variable_table_type vars;
7911 /* Stack of values and debug_exprs under expansion, and their
7912 children. */
7913 stack_vec<rtx, 4> expanding;
7915 /* Stack of values and debug_exprs whose expansion hit recursion
7916 cycles. They will have VALUE_RECURSED_INTO marked when added to
7917 this list. This flag will be cleared if any of its dependencies
7918 resolves to a valid location. So, if the flag remains set at the
7919 end of the search, we know no valid location for this one can
7920 possibly exist. */
7921 stack_vec<rtx, 4> pending;
7923 /* The maximum depth among the sub-expressions under expansion.
7924 Zero indicates no expansion so far. */
7925 expand_depth depth;
7928 /* Allocate the one-part auxiliary data structure for VAR, with enough
7929 room for COUNT dependencies. */
7931 static void
7932 loc_exp_dep_alloc (variable var, int count)
7934 size_t allocsize;
7936 gcc_checking_assert (var->onepart);
7938 /* We can be called with COUNT == 0 to allocate the data structure
7939 without any dependencies, e.g. for the backlinks only. However,
7940 if we are specifying a COUNT, then the dependency list must have
7941 been emptied before. It would be possible to adjust pointers or
7942 force it empty here, but this is better done at an earlier point
7943 in the algorithm, so we instead leave an assertion to catch
7944 errors. */
7945 gcc_checking_assert (!count
7946 || VAR_LOC_DEP_VEC (var) == NULL
7947 || VAR_LOC_DEP_VEC (var)->is_empty ());
7949 if (VAR_LOC_1PAUX (var) && VAR_LOC_DEP_VEC (var)->space (count))
7950 return;
7952 allocsize = offsetof (struct onepart_aux, deps)
7953 + vec<loc_exp_dep, va_heap, vl_embed>::embedded_size (count);
7955 if (VAR_LOC_1PAUX (var))
7957 VAR_LOC_1PAUX (var) = XRESIZEVAR (struct onepart_aux,
7958 VAR_LOC_1PAUX (var), allocsize);
7959 /* If the reallocation moves the onepaux structure, the
7960 back-pointer to BACKLINKS in the first list member will still
7961 point to its old location. Adjust it. */
7962 if (VAR_LOC_DEP_LST (var))
7963 VAR_LOC_DEP_LST (var)->pprev = VAR_LOC_DEP_LSTP (var);
7965 else
7967 VAR_LOC_1PAUX (var) = XNEWVAR (struct onepart_aux, allocsize);
7968 *VAR_LOC_DEP_LSTP (var) = NULL;
7969 VAR_LOC_FROM (var) = NULL;
7970 VAR_LOC_DEPTH (var).complexity = 0;
7971 VAR_LOC_DEPTH (var).entryvals = 0;
7973 VAR_LOC_DEP_VEC (var)->embedded_init (count);
7976 /* Remove all entries from the vector of active dependencies of VAR,
7977 removing them from the back-links lists too. */
7979 static void
7980 loc_exp_dep_clear (variable var)
7982 while (VAR_LOC_DEP_VEC (var) && !VAR_LOC_DEP_VEC (var)->is_empty ())
7984 loc_exp_dep *led = &VAR_LOC_DEP_VEC (var)->last ();
7985 if (led->next)
7986 led->next->pprev = led->pprev;
7987 if (led->pprev)
7988 *led->pprev = led->next;
7989 VAR_LOC_DEP_VEC (var)->pop ();
7993 /* Insert an active dependency from VAR on X to the vector of
7994 dependencies, and add the corresponding back-link to X's list of
7995 back-links in VARS. */
7997 static void
7998 loc_exp_insert_dep (variable var, rtx x, variable_table_type vars)
8000 decl_or_value dv;
8001 variable xvar;
8002 loc_exp_dep *led;
8004 dv = dv_from_rtx (x);
8006 /* ??? Build a vector of variables parallel to EXPANDING, to avoid
8007 an additional look up? */
8008 xvar = vars.find_with_hash (dv, dv_htab_hash (dv));
8010 if (!xvar)
8012 xvar = variable_from_dropped (dv, NO_INSERT);
8013 gcc_checking_assert (xvar);
8016 /* No point in adding the same backlink more than once. This may
8017 arise if say the same value appears in two complex expressions in
8018 the same loc_list, or even more than once in a single
8019 expression. */
8020 if (VAR_LOC_DEP_LST (xvar) && VAR_LOC_DEP_LST (xvar)->dv == var->dv)
8021 return;
8023 if (var->onepart == NOT_ONEPART)
8024 led = (loc_exp_dep *) pool_alloc (loc_exp_dep_pool);
8025 else
8027 loc_exp_dep empty;
8028 memset (&empty, 0, sizeof (empty));
8029 VAR_LOC_DEP_VEC (var)->quick_push (empty);
8030 led = &VAR_LOC_DEP_VEC (var)->last ();
8032 led->dv = var->dv;
8033 led->value = x;
8035 loc_exp_dep_alloc (xvar, 0);
8036 led->pprev = VAR_LOC_DEP_LSTP (xvar);
8037 led->next = *led->pprev;
8038 if (led->next)
8039 led->next->pprev = &led->next;
8040 *led->pprev = led;
8043 /* Create active dependencies of VAR on COUNT values starting at
8044 VALUE, and corresponding back-links to the entries in VARS. Return
8045 true if we found any pending-recursion results. */
8047 static bool
8048 loc_exp_dep_set (variable var, rtx result, rtx *value, int count,
8049 variable_table_type vars)
8051 bool pending_recursion = false;
8053 gcc_checking_assert (VAR_LOC_DEP_VEC (var) == NULL
8054 || VAR_LOC_DEP_VEC (var)->is_empty ());
8056 /* Set up all dependencies from last_child (as set up at the end of
8057 the loop above) to the end. */
8058 loc_exp_dep_alloc (var, count);
8060 while (count--)
8062 rtx x = *value++;
8064 if (!pending_recursion)
8065 pending_recursion = !result && VALUE_RECURSED_INTO (x);
8067 loc_exp_insert_dep (var, x, vars);
8070 return pending_recursion;
8073 /* Notify the back-links of IVAR that are pending recursion that we
8074 have found a non-NIL value for it, so they are cleared for another
8075 attempt to compute a current location. */
8077 static void
8078 notify_dependents_of_resolved_value (variable ivar, variable_table_type vars)
8080 loc_exp_dep *led, *next;
8082 for (led = VAR_LOC_DEP_LST (ivar); led; led = next)
8084 decl_or_value dv = led->dv;
8085 variable var;
8087 next = led->next;
8089 if (dv_is_value_p (dv))
8091 rtx value = dv_as_value (dv);
8093 /* If we have already resolved it, leave it alone. */
8094 if (!VALUE_RECURSED_INTO (value))
8095 continue;
8097 /* Check that VALUE_RECURSED_INTO, true from the test above,
8098 implies NO_LOC_P. */
8099 gcc_checking_assert (NO_LOC_P (value));
8101 /* We won't notify variables that are being expanded,
8102 because their dependency list is cleared before
8103 recursing. */
8104 NO_LOC_P (value) = false;
8105 VALUE_RECURSED_INTO (value) = false;
8107 gcc_checking_assert (dv_changed_p (dv));
8109 else
8111 gcc_checking_assert (dv_onepart_p (dv) != NOT_ONEPART);
8112 if (!dv_changed_p (dv))
8113 continue;
8116 var = vars.find_with_hash (dv, dv_htab_hash (dv));
8118 if (!var)
8119 var = variable_from_dropped (dv, NO_INSERT);
8121 if (var)
8122 notify_dependents_of_resolved_value (var, vars);
8124 if (next)
8125 next->pprev = led->pprev;
8126 if (led->pprev)
8127 *led->pprev = next;
8128 led->next = NULL;
8129 led->pprev = NULL;
8133 static rtx vt_expand_loc_callback (rtx x, bitmap regs,
8134 int max_depth, void *data);
8136 /* Return the combined depth, when one sub-expression evaluated to
8137 BEST_DEPTH and the previous known depth was SAVED_DEPTH. */
8139 static inline expand_depth
8140 update_depth (expand_depth saved_depth, expand_depth best_depth)
8142 /* If we didn't find anything, stick with what we had. */
8143 if (!best_depth.complexity)
8144 return saved_depth;
8146 /* If we found hadn't found anything, use the depth of the current
8147 expression. Do NOT add one extra level, we want to compute the
8148 maximum depth among sub-expressions. We'll increment it later,
8149 if appropriate. */
8150 if (!saved_depth.complexity)
8151 return best_depth;
8153 /* Combine the entryval count so that regardless of which one we
8154 return, the entryval count is accurate. */
8155 best_depth.entryvals = saved_depth.entryvals
8156 = best_depth.entryvals + saved_depth.entryvals;
8158 if (saved_depth.complexity < best_depth.complexity)
8159 return best_depth;
8160 else
8161 return saved_depth;
8164 /* Expand VAR to a location RTX, updating its cur_loc. Use REGS and
8165 DATA for cselib expand callback. If PENDRECP is given, indicate in
8166 it whether any sub-expression couldn't be fully evaluated because
8167 it is pending recursion resolution. */
8169 static inline rtx
8170 vt_expand_var_loc_chain (variable var, bitmap regs, void *data, bool *pendrecp)
8172 struct expand_loc_callback_data *elcd
8173 = (struct expand_loc_callback_data *) data;
8174 location_chain loc, next;
8175 rtx result = NULL;
8176 int first_child, result_first_child, last_child;
8177 bool pending_recursion;
8178 rtx loc_from = NULL;
8179 struct elt_loc_list *cloc = NULL;
8180 expand_depth depth = { 0, 0 }, saved_depth = elcd->depth;
8181 int wanted_entryvals, found_entryvals = 0;
8183 /* Clear all backlinks pointing at this, so that we're not notified
8184 while we're active. */
8185 loc_exp_dep_clear (var);
8187 retry:
8188 if (var->onepart == ONEPART_VALUE)
8190 cselib_val *val = CSELIB_VAL_PTR (dv_as_value (var->dv));
8192 gcc_checking_assert (cselib_preserved_value_p (val));
8194 cloc = val->locs;
8197 first_child = result_first_child = last_child
8198 = elcd->expanding.length ();
8200 wanted_entryvals = found_entryvals;
8202 /* Attempt to expand each available location in turn. */
8203 for (next = loc = var->n_var_parts ? var->var_part[0].loc_chain : NULL;
8204 loc || cloc; loc = next)
8206 result_first_child = last_child;
8208 if (!loc)
8210 loc_from = cloc->loc;
8211 next = loc;
8212 cloc = cloc->next;
8213 if (unsuitable_loc (loc_from))
8214 continue;
8216 else
8218 loc_from = loc->loc;
8219 next = loc->next;
8222 gcc_checking_assert (!unsuitable_loc (loc_from));
8224 elcd->depth.complexity = elcd->depth.entryvals = 0;
8225 result = cselib_expand_value_rtx_cb (loc_from, regs, EXPR_DEPTH,
8226 vt_expand_loc_callback, data);
8227 last_child = elcd->expanding.length ();
8229 if (result)
8231 depth = elcd->depth;
8233 gcc_checking_assert (depth.complexity
8234 || result_first_child == last_child);
8236 if (last_child - result_first_child != 1)
8238 if (!depth.complexity && GET_CODE (result) == ENTRY_VALUE)
8239 depth.entryvals++;
8240 depth.complexity++;
8243 if (depth.complexity <= EXPR_USE_DEPTH)
8245 if (depth.entryvals <= wanted_entryvals)
8246 break;
8247 else if (!found_entryvals || depth.entryvals < found_entryvals)
8248 found_entryvals = depth.entryvals;
8251 result = NULL;
8254 /* Set it up in case we leave the loop. */
8255 depth.complexity = depth.entryvals = 0;
8256 loc_from = NULL;
8257 result_first_child = first_child;
8260 if (!loc_from && wanted_entryvals < found_entryvals)
8262 /* We found entries with ENTRY_VALUEs and skipped them. Since
8263 we could not find any expansions without ENTRY_VALUEs, but we
8264 found at least one with them, go back and get an entry with
8265 the minimum number ENTRY_VALUE count that we found. We could
8266 avoid looping, but since each sub-loc is already resolved,
8267 the re-expansion should be trivial. ??? Should we record all
8268 attempted locs as dependencies, so that we retry the
8269 expansion should any of them change, in the hope it can give
8270 us a new entry without an ENTRY_VALUE? */
8271 elcd->expanding.truncate (first_child);
8272 goto retry;
8275 /* Register all encountered dependencies as active. */
8276 pending_recursion = loc_exp_dep_set
8277 (var, result, elcd->expanding.address () + result_first_child,
8278 last_child - result_first_child, elcd->vars);
8280 elcd->expanding.truncate (first_child);
8282 /* Record where the expansion came from. */
8283 gcc_checking_assert (!result || !pending_recursion);
8284 VAR_LOC_FROM (var) = loc_from;
8285 VAR_LOC_DEPTH (var) = depth;
8287 gcc_checking_assert (!depth.complexity == !result);
8289 elcd->depth = update_depth (saved_depth, depth);
8291 /* Indicate whether any of the dependencies are pending recursion
8292 resolution. */
8293 if (pendrecp)
8294 *pendrecp = pending_recursion;
8296 if (!pendrecp || !pending_recursion)
8297 var->var_part[0].cur_loc = result;
8299 return result;
8302 /* Callback for cselib_expand_value, that looks for expressions
8303 holding the value in the var-tracking hash tables. Return X for
8304 standard processing, anything else is to be used as-is. */
8306 static rtx
8307 vt_expand_loc_callback (rtx x, bitmap regs,
8308 int max_depth ATTRIBUTE_UNUSED,
8309 void *data)
8311 struct expand_loc_callback_data *elcd
8312 = (struct expand_loc_callback_data *) data;
8313 decl_or_value dv;
8314 variable var;
8315 rtx result, subreg;
8316 bool pending_recursion = false;
8317 bool from_empty = false;
8319 switch (GET_CODE (x))
8321 case SUBREG:
8322 subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs,
8323 EXPR_DEPTH,
8324 vt_expand_loc_callback, data);
8326 if (!subreg)
8327 return NULL;
8329 result = simplify_gen_subreg (GET_MODE (x), subreg,
8330 GET_MODE (SUBREG_REG (x)),
8331 SUBREG_BYTE (x));
8333 /* Invalid SUBREGs are ok in debug info. ??? We could try
8334 alternate expansions for the VALUE as well. */
8335 if (!result)
8336 result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x));
8338 return result;
8340 case DEBUG_EXPR:
8341 case VALUE:
8342 dv = dv_from_rtx (x);
8343 break;
8345 default:
8346 return x;
8349 elcd->expanding.safe_push (x);
8351 /* Check that VALUE_RECURSED_INTO implies NO_LOC_P. */
8352 gcc_checking_assert (!VALUE_RECURSED_INTO (x) || NO_LOC_P (x));
8354 if (NO_LOC_P (x))
8356 gcc_checking_assert (VALUE_RECURSED_INTO (x) || !dv_changed_p (dv));
8357 return NULL;
8360 var = elcd->vars.find_with_hash (dv, dv_htab_hash (dv));
8362 if (!var)
8364 from_empty = true;
8365 var = variable_from_dropped (dv, INSERT);
8368 gcc_checking_assert (var);
8370 if (!dv_changed_p (dv))
8372 gcc_checking_assert (!NO_LOC_P (x));
8373 gcc_checking_assert (var->var_part[0].cur_loc);
8374 gcc_checking_assert (VAR_LOC_1PAUX (var));
8375 gcc_checking_assert (VAR_LOC_1PAUX (var)->depth.complexity);
8377 elcd->depth = update_depth (elcd->depth, VAR_LOC_1PAUX (var)->depth);
8379 return var->var_part[0].cur_loc;
8382 VALUE_RECURSED_INTO (x) = true;
8383 /* This is tentative, but it makes some tests simpler. */
8384 NO_LOC_P (x) = true;
8386 gcc_checking_assert (var->n_var_parts == 1 || from_empty);
8388 result = vt_expand_var_loc_chain (var, regs, data, &pending_recursion);
8390 if (pending_recursion)
8392 gcc_checking_assert (!result);
8393 elcd->pending.safe_push (x);
8395 else
8397 NO_LOC_P (x) = !result;
8398 VALUE_RECURSED_INTO (x) = false;
8399 set_dv_changed (dv, false);
8401 if (result)
8402 notify_dependents_of_resolved_value (var, elcd->vars);
8405 return result;
8408 /* While expanding variables, we may encounter recursion cycles
8409 because of mutual (possibly indirect) dependencies between two
8410 particular variables (or values), say A and B. If we're trying to
8411 expand A when we get to B, which in turn attempts to expand A, if
8412 we can't find any other expansion for B, we'll add B to this
8413 pending-recursion stack, and tentatively return NULL for its
8414 location. This tentative value will be used for any other
8415 occurrences of B, unless A gets some other location, in which case
8416 it will notify B that it is worth another try at computing a
8417 location for it, and it will use the location computed for A then.
8418 At the end of the expansion, the tentative NULL locations become
8419 final for all members of PENDING that didn't get a notification.
8420 This function performs this finalization of NULL locations. */
8422 static void
8423 resolve_expansions_pending_recursion (vec<rtx, va_heap> *pending)
8425 while (!pending->is_empty ())
8427 rtx x = pending->pop ();
8428 decl_or_value dv;
8430 if (!VALUE_RECURSED_INTO (x))
8431 continue;
8433 gcc_checking_assert (NO_LOC_P (x));
8434 VALUE_RECURSED_INTO (x) = false;
8435 dv = dv_from_rtx (x);
8436 gcc_checking_assert (dv_changed_p (dv));
8437 set_dv_changed (dv, false);
8441 /* Initialize expand_loc_callback_data D with variable hash table V.
8442 It must be a macro because of alloca (vec stack). */
8443 #define INIT_ELCD(d, v) \
8444 do \
8446 (d).vars = (v); \
8447 (d).depth.complexity = (d).depth.entryvals = 0; \
8449 while (0)
8450 /* Finalize expand_loc_callback_data D, resolved to location L. */
8451 #define FINI_ELCD(d, l) \
8452 do \
8454 resolve_expansions_pending_recursion (&(d).pending); \
8455 (d).pending.release (); \
8456 (d).expanding.release (); \
8458 if ((l) && MEM_P (l)) \
8459 (l) = targetm.delegitimize_address (l); \
8461 while (0)
8463 /* Expand VALUEs and DEBUG_EXPRs in LOC to a location, using the
8464 equivalences in VARS, updating their CUR_LOCs in the process. */
8466 static rtx
8467 vt_expand_loc (rtx loc, variable_table_type vars)
8469 struct expand_loc_callback_data data;
8470 rtx result;
8472 if (!MAY_HAVE_DEBUG_INSNS)
8473 return loc;
8475 INIT_ELCD (data, vars);
8477 result = cselib_expand_value_rtx_cb (loc, scratch_regs, EXPR_DEPTH,
8478 vt_expand_loc_callback, &data);
8480 FINI_ELCD (data, result);
8482 return result;
8485 /* Expand the one-part VARiable to a location, using the equivalences
8486 in VARS, updating their CUR_LOCs in the process. */
8488 static rtx
8489 vt_expand_1pvar (variable var, variable_table_type vars)
8491 struct expand_loc_callback_data data;
8492 rtx loc;
8494 gcc_checking_assert (var->onepart && var->n_var_parts == 1);
8496 if (!dv_changed_p (var->dv))
8497 return var->var_part[0].cur_loc;
8499 INIT_ELCD (data, vars);
8501 loc = vt_expand_var_loc_chain (var, scratch_regs, &data, NULL);
8503 gcc_checking_assert (data.expanding.is_empty ());
8505 FINI_ELCD (data, loc);
8507 return loc;
8510 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
8511 additional parameters: WHERE specifies whether the note shall be emitted
8512 before or after instruction INSN. */
8515 emit_note_insn_var_location (variable_def **varp, emit_note_data *data)
8517 variable var = *varp;
8518 rtx insn = data->insn;
8519 enum emit_note_where where = data->where;
8520 variable_table_type vars = data->vars;
8521 rtx note, note_vl;
8522 int i, j, n_var_parts;
8523 bool complete;
8524 enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED;
8525 HOST_WIDE_INT last_limit;
8526 tree type_size_unit;
8527 HOST_WIDE_INT offsets[MAX_VAR_PARTS];
8528 rtx loc[MAX_VAR_PARTS];
8529 tree decl;
8530 location_chain lc;
8532 gcc_checking_assert (var->onepart == NOT_ONEPART
8533 || var->onepart == ONEPART_VDECL);
8535 decl = dv_as_decl (var->dv);
8537 complete = true;
8538 last_limit = 0;
8539 n_var_parts = 0;
8540 if (!var->onepart)
8541 for (i = 0; i < var->n_var_parts; i++)
8542 if (var->var_part[i].cur_loc == NULL && var->var_part[i].loc_chain)
8543 var->var_part[i].cur_loc = var->var_part[i].loc_chain->loc;
8544 for (i = 0; i < var->n_var_parts; i++)
8546 enum machine_mode mode, wider_mode;
8547 rtx loc2;
8548 HOST_WIDE_INT offset;
8550 if (i == 0 && var->onepart)
8552 gcc_checking_assert (var->n_var_parts == 1);
8553 offset = 0;
8554 initialized = VAR_INIT_STATUS_INITIALIZED;
8555 loc2 = vt_expand_1pvar (var, vars);
8557 else
8559 if (last_limit < VAR_PART_OFFSET (var, i))
8561 complete = false;
8562 break;
8564 else if (last_limit > VAR_PART_OFFSET (var, i))
8565 continue;
8566 offset = VAR_PART_OFFSET (var, i);
8567 loc2 = var->var_part[i].cur_loc;
8568 if (loc2 && GET_CODE (loc2) == MEM
8569 && GET_CODE (XEXP (loc2, 0)) == VALUE)
8571 rtx depval = XEXP (loc2, 0);
8573 loc2 = vt_expand_loc (loc2, vars);
8575 if (loc2)
8576 loc_exp_insert_dep (var, depval, vars);
8578 if (!loc2)
8580 complete = false;
8581 continue;
8583 gcc_checking_assert (GET_CODE (loc2) != VALUE);
8584 for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
8585 if (var->var_part[i].cur_loc == lc->loc)
8587 initialized = lc->init;
8588 break;
8590 gcc_assert (lc);
8593 offsets[n_var_parts] = offset;
8594 if (!loc2)
8596 complete = false;
8597 continue;
8599 loc[n_var_parts] = loc2;
8600 mode = GET_MODE (var->var_part[i].cur_loc);
8601 if (mode == VOIDmode && var->onepart)
8602 mode = DECL_MODE (decl);
8603 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
8605 /* Attempt to merge adjacent registers or memory. */
8606 wider_mode = GET_MODE_WIDER_MODE (mode);
8607 for (j = i + 1; j < var->n_var_parts; j++)
8608 if (last_limit <= VAR_PART_OFFSET (var, j))
8609 break;
8610 if (j < var->n_var_parts
8611 && wider_mode != VOIDmode
8612 && var->var_part[j].cur_loc
8613 && mode == GET_MODE (var->var_part[j].cur_loc)
8614 && (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts]))
8615 && last_limit == (var->onepart ? 0 : VAR_PART_OFFSET (var, j))
8616 && (loc2 = vt_expand_loc (var->var_part[j].cur_loc, vars))
8617 && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2))
8619 rtx new_loc = NULL;
8621 if (REG_P (loc[n_var_parts])
8622 && hard_regno_nregs[REGNO (loc[n_var_parts])][mode] * 2
8623 == hard_regno_nregs[REGNO (loc[n_var_parts])][wider_mode]
8624 && end_hard_regno (mode, REGNO (loc[n_var_parts]))
8625 == REGNO (loc2))
8627 if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN)
8628 new_loc = simplify_subreg (wider_mode, loc[n_var_parts],
8629 mode, 0);
8630 else if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
8631 new_loc = simplify_subreg (wider_mode, loc2, mode, 0);
8632 if (new_loc)
8634 if (!REG_P (new_loc)
8635 || REGNO (new_loc) != REGNO (loc[n_var_parts]))
8636 new_loc = NULL;
8637 else
8638 REG_ATTRS (new_loc) = REG_ATTRS (loc[n_var_parts]);
8641 else if (MEM_P (loc[n_var_parts])
8642 && GET_CODE (XEXP (loc2, 0)) == PLUS
8643 && REG_P (XEXP (XEXP (loc2, 0), 0))
8644 && CONST_INT_P (XEXP (XEXP (loc2, 0), 1)))
8646 if ((REG_P (XEXP (loc[n_var_parts], 0))
8647 && rtx_equal_p (XEXP (loc[n_var_parts], 0),
8648 XEXP (XEXP (loc2, 0), 0))
8649 && INTVAL (XEXP (XEXP (loc2, 0), 1))
8650 == GET_MODE_SIZE (mode))
8651 || (GET_CODE (XEXP (loc[n_var_parts], 0)) == PLUS
8652 && CONST_INT_P (XEXP (XEXP (loc[n_var_parts], 0), 1))
8653 && rtx_equal_p (XEXP (XEXP (loc[n_var_parts], 0), 0),
8654 XEXP (XEXP (loc2, 0), 0))
8655 && INTVAL (XEXP (XEXP (loc[n_var_parts], 0), 1))
8656 + GET_MODE_SIZE (mode)
8657 == INTVAL (XEXP (XEXP (loc2, 0), 1))))
8658 new_loc = adjust_address_nv (loc[n_var_parts],
8659 wider_mode, 0);
8662 if (new_loc)
8664 loc[n_var_parts] = new_loc;
8665 mode = wider_mode;
8666 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
8667 i = j;
8670 ++n_var_parts;
8672 type_size_unit = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8673 if ((unsigned HOST_WIDE_INT) last_limit < TREE_INT_CST_LOW (type_size_unit))
8674 complete = false;
8676 if (! flag_var_tracking_uninit)
8677 initialized = VAR_INIT_STATUS_INITIALIZED;
8679 note_vl = NULL_RTX;
8680 if (!complete)
8681 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, NULL_RTX,
8682 (int) initialized);
8683 else if (n_var_parts == 1)
8685 rtx expr_list;
8687 if (offsets[0] || GET_CODE (loc[0]) == PARALLEL)
8688 expr_list = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
8689 else
8690 expr_list = loc[0];
8692 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, expr_list,
8693 (int) initialized);
8695 else if (n_var_parts)
8697 rtx parallel;
8699 for (i = 0; i < n_var_parts; i++)
8700 loc[i]
8701 = gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i]));
8703 parallel = gen_rtx_PARALLEL (VOIDmode,
8704 gen_rtvec_v (n_var_parts, loc));
8705 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl,
8706 parallel, (int) initialized);
8709 if (where != EMIT_NOTE_BEFORE_INSN)
8711 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8712 if (where == EMIT_NOTE_AFTER_CALL_INSN)
8713 NOTE_DURING_CALL_P (note) = true;
8715 else
8717 /* Make sure that the call related notes come first. */
8718 while (NEXT_INSN (insn)
8719 && NOTE_P (insn)
8720 && ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8721 && NOTE_DURING_CALL_P (insn))
8722 || NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
8723 insn = NEXT_INSN (insn);
8724 if (NOTE_P (insn)
8725 && ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8726 && NOTE_DURING_CALL_P (insn))
8727 || NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
8728 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8729 else
8730 note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
8732 NOTE_VAR_LOCATION (note) = note_vl;
8734 set_dv_changed (var->dv, false);
8735 gcc_assert (var->in_changed_variables);
8736 var->in_changed_variables = false;
8737 changed_variables.clear_slot (varp);
8739 /* Continue traversing the hash table. */
8740 return 1;
8743 /* While traversing changed_variables, push onto DATA (a stack of RTX
8744 values) entries that aren't user variables. */
8747 var_track_values_to_stack (variable_def **slot,
8748 vec<rtx, va_heap> *changed_values_stack)
8750 variable var = *slot;
8752 if (var->onepart == ONEPART_VALUE)
8753 changed_values_stack->safe_push (dv_as_value (var->dv));
8754 else if (var->onepart == ONEPART_DEXPR)
8755 changed_values_stack->safe_push (DECL_RTL_KNOWN_SET (dv_as_decl (var->dv)));
8757 return 1;
8760 /* Remove from changed_variables the entry whose DV corresponds to
8761 value or debug_expr VAL. */
8762 static void
8763 remove_value_from_changed_variables (rtx val)
8765 decl_or_value dv = dv_from_rtx (val);
8766 variable_def **slot;
8767 variable var;
8769 slot = changed_variables.find_slot_with_hash (dv, dv_htab_hash (dv),
8770 NO_INSERT);
8771 var = *slot;
8772 var->in_changed_variables = false;
8773 changed_variables.clear_slot (slot);
8776 /* If VAL (a value or debug_expr) has backlinks to variables actively
8777 dependent on it in HTAB or in CHANGED_VARIABLES, mark them as
8778 changed, adding to CHANGED_VALUES_STACK any dependencies that may
8779 have dependencies of their own to notify. */
8781 static void
8782 notify_dependents_of_changed_value (rtx val, variable_table_type htab,
8783 vec<rtx, va_heap> *changed_values_stack)
8785 variable_def **slot;
8786 variable var;
8787 loc_exp_dep *led;
8788 decl_or_value dv = dv_from_rtx (val);
8790 slot = changed_variables.find_slot_with_hash (dv, dv_htab_hash (dv),
8791 NO_INSERT);
8792 if (!slot)
8793 slot = htab.find_slot_with_hash (dv, dv_htab_hash (dv), NO_INSERT);
8794 if (!slot)
8795 slot = dropped_values.find_slot_with_hash (dv, dv_htab_hash (dv),
8796 NO_INSERT);
8797 var = *slot;
8799 while ((led = VAR_LOC_DEP_LST (var)))
8801 decl_or_value ldv = led->dv;
8802 variable ivar;
8804 /* Deactivate and remove the backlink, as it was “used up”. It
8805 makes no sense to attempt to notify the same entity again:
8806 either it will be recomputed and re-register an active
8807 dependency, or it will still have the changed mark. */
8808 if (led->next)
8809 led->next->pprev = led->pprev;
8810 if (led->pprev)
8811 *led->pprev = led->next;
8812 led->next = NULL;
8813 led->pprev = NULL;
8815 if (dv_changed_p (ldv))
8816 continue;
8818 switch (dv_onepart_p (ldv))
8820 case ONEPART_VALUE:
8821 case ONEPART_DEXPR:
8822 set_dv_changed (ldv, true);
8823 changed_values_stack->safe_push (dv_as_rtx (ldv));
8824 break;
8826 case ONEPART_VDECL:
8827 ivar = htab.find_with_hash (ldv, dv_htab_hash (ldv));
8828 gcc_checking_assert (!VAR_LOC_DEP_LST (ivar));
8829 variable_was_changed (ivar, NULL);
8830 break;
8832 case NOT_ONEPART:
8833 pool_free (loc_exp_dep_pool, led);
8834 ivar = htab.find_with_hash (ldv, dv_htab_hash (ldv));
8835 if (ivar)
8837 int i = ivar->n_var_parts;
8838 while (i--)
8840 rtx loc = ivar->var_part[i].cur_loc;
8842 if (loc && GET_CODE (loc) == MEM
8843 && XEXP (loc, 0) == val)
8845 variable_was_changed (ivar, NULL);
8846 break;
8850 break;
8852 default:
8853 gcc_unreachable ();
8858 /* Take out of changed_variables any entries that don't refer to use
8859 variables. Back-propagate change notifications from values and
8860 debug_exprs to their active dependencies in HTAB or in
8861 CHANGED_VARIABLES. */
8863 static void
8864 process_changed_values (variable_table_type htab)
8866 int i, n;
8867 rtx val;
8868 stack_vec<rtx, 20> changed_values_stack;
8870 /* Move values from changed_variables to changed_values_stack. */
8871 changed_variables
8872 .traverse <vec<rtx, va_heap>*, var_track_values_to_stack>
8873 (&changed_values_stack);
8875 /* Back-propagate change notifications in values while popping
8876 them from the stack. */
8877 for (n = i = changed_values_stack.length ();
8878 i > 0; i = changed_values_stack.length ())
8880 val = changed_values_stack.pop ();
8881 notify_dependents_of_changed_value (val, htab, &changed_values_stack);
8883 /* This condition will hold when visiting each of the entries
8884 originally in changed_variables. We can't remove them
8885 earlier because this could drop the backlinks before we got a
8886 chance to use them. */
8887 if (i == n)
8889 remove_value_from_changed_variables (val);
8890 n--;
8895 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
8896 CHANGED_VARIABLES and delete this chain. WHERE specifies whether
8897 the notes shall be emitted before of after instruction INSN. */
8899 static void
8900 emit_notes_for_changes (rtx insn, enum emit_note_where where,
8901 shared_hash vars)
8903 emit_note_data data;
8904 variable_table_type htab = shared_hash_htab (vars);
8906 if (!changed_variables.elements ())
8907 return;
8909 if (MAY_HAVE_DEBUG_INSNS)
8910 process_changed_values (htab);
8912 data.insn = insn;
8913 data.where = where;
8914 data.vars = htab;
8916 changed_variables
8917 .traverse <emit_note_data*, emit_note_insn_var_location> (&data);
8920 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
8921 same variable in hash table DATA or is not there at all. */
8924 emit_notes_for_differences_1 (variable_def **slot, variable_table_type new_vars)
8926 variable old_var, new_var;
8928 old_var = *slot;
8929 new_var = new_vars.find_with_hash (old_var->dv, dv_htab_hash (old_var->dv));
8931 if (!new_var)
8933 /* Variable has disappeared. */
8934 variable empty_var = NULL;
8936 if (old_var->onepart == ONEPART_VALUE
8937 || old_var->onepart == ONEPART_DEXPR)
8939 empty_var = variable_from_dropped (old_var->dv, NO_INSERT);
8940 if (empty_var)
8942 gcc_checking_assert (!empty_var->in_changed_variables);
8943 if (!VAR_LOC_1PAUX (old_var))
8945 VAR_LOC_1PAUX (old_var) = VAR_LOC_1PAUX (empty_var);
8946 VAR_LOC_1PAUX (empty_var) = NULL;
8948 else
8949 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
8953 if (!empty_var)
8955 empty_var = (variable) pool_alloc (onepart_pool (old_var->onepart));
8956 empty_var->dv = old_var->dv;
8957 empty_var->refcount = 0;
8958 empty_var->n_var_parts = 0;
8959 empty_var->onepart = old_var->onepart;
8960 empty_var->in_changed_variables = false;
8963 if (empty_var->onepart)
8965 /* Propagate the auxiliary data to (ultimately)
8966 changed_variables. */
8967 empty_var->var_part[0].loc_chain = NULL;
8968 empty_var->var_part[0].cur_loc = NULL;
8969 VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (old_var);
8970 VAR_LOC_1PAUX (old_var) = NULL;
8972 variable_was_changed (empty_var, NULL);
8973 /* Continue traversing the hash table. */
8974 return 1;
8976 /* Update cur_loc and one-part auxiliary data, before new_var goes
8977 through variable_was_changed. */
8978 if (old_var != new_var && new_var->onepart)
8980 gcc_checking_assert (VAR_LOC_1PAUX (new_var) == NULL);
8981 VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (old_var);
8982 VAR_LOC_1PAUX (old_var) = NULL;
8983 new_var->var_part[0].cur_loc = old_var->var_part[0].cur_loc;
8985 if (variable_different_p (old_var, new_var))
8986 variable_was_changed (new_var, NULL);
8988 /* Continue traversing the hash table. */
8989 return 1;
8992 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
8993 table DATA. */
8996 emit_notes_for_differences_2 (variable_def **slot, variable_table_type old_vars)
8998 variable old_var, new_var;
9000 new_var = *slot;
9001 old_var = old_vars.find_with_hash (new_var->dv, dv_htab_hash (new_var->dv));
9002 if (!old_var)
9004 int i;
9005 for (i = 0; i < new_var->n_var_parts; i++)
9006 new_var->var_part[i].cur_loc = NULL;
9007 variable_was_changed (new_var, NULL);
9010 /* Continue traversing the hash table. */
9011 return 1;
9014 /* Emit notes before INSN for differences between dataflow sets OLD_SET and
9015 NEW_SET. */
9017 static void
9018 emit_notes_for_differences (rtx insn, dataflow_set *old_set,
9019 dataflow_set *new_set)
9021 shared_hash_htab (old_set->vars)
9022 .traverse <variable_table_type, emit_notes_for_differences_1>
9023 (shared_hash_htab (new_set->vars));
9024 shared_hash_htab (new_set->vars)
9025 .traverse <variable_table_type, emit_notes_for_differences_2>
9026 (shared_hash_htab (old_set->vars));
9027 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars);
9030 /* Return the next insn after INSN that is not a NOTE_INSN_VAR_LOCATION. */
9032 static rtx
9033 next_non_note_insn_var_location (rtx insn)
9035 while (insn)
9037 insn = NEXT_INSN (insn);
9038 if (insn == 0
9039 || !NOTE_P (insn)
9040 || NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION)
9041 break;
9044 return insn;
9047 /* Emit the notes for changes of location parts in the basic block BB. */
9049 static void
9050 emit_notes_in_bb (basic_block bb, dataflow_set *set)
9052 unsigned int i;
9053 micro_operation *mo;
9055 dataflow_set_clear (set);
9056 dataflow_set_copy (set, &VTI (bb)->in);
9058 FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo)
9060 rtx insn = mo->insn;
9061 rtx next_insn = next_non_note_insn_var_location (insn);
9063 switch (mo->type)
9065 case MO_CALL:
9066 dataflow_set_clear_at_call (set);
9067 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars);
9069 rtx arguments = mo->u.loc, *p = &arguments, note;
9070 while (*p)
9072 XEXP (XEXP (*p, 0), 1)
9073 = vt_expand_loc (XEXP (XEXP (*p, 0), 1),
9074 shared_hash_htab (set->vars));
9075 /* If expansion is successful, keep it in the list. */
9076 if (XEXP (XEXP (*p, 0), 1))
9077 p = &XEXP (*p, 1);
9078 /* Otherwise, if the following item is data_value for it,
9079 drop it too too. */
9080 else if (XEXP (*p, 1)
9081 && REG_P (XEXP (XEXP (*p, 0), 0))
9082 && MEM_P (XEXP (XEXP (XEXP (*p, 1), 0), 0))
9083 && REG_P (XEXP (XEXP (XEXP (XEXP (*p, 1), 0), 0),
9085 && REGNO (XEXP (XEXP (*p, 0), 0))
9086 == REGNO (XEXP (XEXP (XEXP (XEXP (*p, 1), 0),
9087 0), 0)))
9088 *p = XEXP (XEXP (*p, 1), 1);
9089 /* Just drop this item. */
9090 else
9091 *p = XEXP (*p, 1);
9093 note = emit_note_after (NOTE_INSN_CALL_ARG_LOCATION, insn);
9094 NOTE_VAR_LOCATION (note) = arguments;
9096 break;
9098 case MO_USE:
9100 rtx loc = mo->u.loc;
9102 if (REG_P (loc))
9103 var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
9104 else
9105 var_mem_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
9107 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
9109 break;
9111 case MO_VAL_LOC:
9113 rtx loc = mo->u.loc;
9114 rtx val, vloc;
9115 tree var;
9117 if (GET_CODE (loc) == CONCAT)
9119 val = XEXP (loc, 0);
9120 vloc = XEXP (loc, 1);
9122 else
9124 val = NULL_RTX;
9125 vloc = loc;
9128 var = PAT_VAR_LOCATION_DECL (vloc);
9130 clobber_variable_part (set, NULL_RTX,
9131 dv_from_decl (var), 0, NULL_RTX);
9132 if (val)
9134 if (VAL_NEEDS_RESOLUTION (loc))
9135 val_resolve (set, val, PAT_VAR_LOCATION_LOC (vloc), insn);
9136 set_variable_part (set, val, dv_from_decl (var), 0,
9137 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
9138 INSERT);
9140 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
9141 set_variable_part (set, PAT_VAR_LOCATION_LOC (vloc),
9142 dv_from_decl (var), 0,
9143 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
9144 INSERT);
9146 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
9148 break;
9150 case MO_VAL_USE:
9152 rtx loc = mo->u.loc;
9153 rtx val, vloc, uloc;
9155 vloc = uloc = XEXP (loc, 1);
9156 val = XEXP (loc, 0);
9158 if (GET_CODE (val) == CONCAT)
9160 uloc = XEXP (val, 1);
9161 val = XEXP (val, 0);
9164 if (VAL_NEEDS_RESOLUTION (loc))
9165 val_resolve (set, val, vloc, insn);
9166 else
9167 val_store (set, val, uloc, insn, false);
9169 if (VAL_HOLDS_TRACK_EXPR (loc))
9171 if (GET_CODE (uloc) == REG)
9172 var_reg_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
9173 NULL);
9174 else if (GET_CODE (uloc) == MEM)
9175 var_mem_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
9176 NULL);
9179 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
9181 break;
9183 case MO_VAL_SET:
9185 rtx loc = mo->u.loc;
9186 rtx val, vloc, uloc;
9187 rtx dstv, srcv;
9189 vloc = loc;
9190 uloc = XEXP (vloc, 1);
9191 val = XEXP (vloc, 0);
9192 vloc = uloc;
9194 if (GET_CODE (uloc) == SET)
9196 dstv = SET_DEST (uloc);
9197 srcv = SET_SRC (uloc);
9199 else
9201 dstv = uloc;
9202 srcv = NULL;
9205 if (GET_CODE (val) == CONCAT)
9207 dstv = vloc = XEXP (val, 1);
9208 val = XEXP (val, 0);
9211 if (GET_CODE (vloc) == SET)
9213 srcv = SET_SRC (vloc);
9215 gcc_assert (val != srcv);
9216 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
9218 dstv = vloc = SET_DEST (vloc);
9220 if (VAL_NEEDS_RESOLUTION (loc))
9221 val_resolve (set, val, srcv, insn);
9223 else if (VAL_NEEDS_RESOLUTION (loc))
9225 gcc_assert (GET_CODE (uloc) == SET
9226 && GET_CODE (SET_SRC (uloc)) == REG);
9227 val_resolve (set, val, SET_SRC (uloc), insn);
9230 if (VAL_HOLDS_TRACK_EXPR (loc))
9232 if (VAL_EXPR_IS_CLOBBERED (loc))
9234 if (REG_P (uloc))
9235 var_reg_delete (set, uloc, true);
9236 else if (MEM_P (uloc))
9238 gcc_assert (MEM_P (dstv));
9239 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
9240 var_mem_delete (set, dstv, true);
9243 else
9245 bool copied_p = VAL_EXPR_IS_COPIED (loc);
9246 rtx src = NULL, dst = uloc;
9247 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
9249 if (GET_CODE (uloc) == SET)
9251 src = SET_SRC (uloc);
9252 dst = SET_DEST (uloc);
9255 if (copied_p)
9257 status = find_src_status (set, src);
9259 src = find_src_set_src (set, src);
9262 if (REG_P (dst))
9263 var_reg_delete_and_set (set, dst, !copied_p,
9264 status, srcv);
9265 else if (MEM_P (dst))
9267 gcc_assert (MEM_P (dstv));
9268 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
9269 var_mem_delete_and_set (set, dstv, !copied_p,
9270 status, srcv);
9274 else if (REG_P (uloc))
9275 var_regno_delete (set, REGNO (uloc));
9276 else if (MEM_P (uloc))
9278 gcc_checking_assert (GET_CODE (vloc) == MEM);
9279 gcc_checking_assert (vloc == dstv);
9280 if (vloc != dstv)
9281 clobber_overlapping_mems (set, vloc);
9284 val_store (set, val, dstv, insn, true);
9286 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9287 set->vars);
9289 break;
9291 case MO_SET:
9293 rtx loc = mo->u.loc;
9294 rtx set_src = NULL;
9296 if (GET_CODE (loc) == SET)
9298 set_src = SET_SRC (loc);
9299 loc = SET_DEST (loc);
9302 if (REG_P (loc))
9303 var_reg_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
9304 set_src);
9305 else
9306 var_mem_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
9307 set_src);
9309 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9310 set->vars);
9312 break;
9314 case MO_COPY:
9316 rtx loc = mo->u.loc;
9317 enum var_init_status src_status;
9318 rtx set_src = NULL;
9320 if (GET_CODE (loc) == SET)
9322 set_src = SET_SRC (loc);
9323 loc = SET_DEST (loc);
9326 src_status = find_src_status (set, set_src);
9327 set_src = find_src_set_src (set, set_src);
9329 if (REG_P (loc))
9330 var_reg_delete_and_set (set, loc, false, src_status, set_src);
9331 else
9332 var_mem_delete_and_set (set, loc, false, src_status, set_src);
9334 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9335 set->vars);
9337 break;
9339 case MO_USE_NO_VAR:
9341 rtx loc = mo->u.loc;
9343 if (REG_P (loc))
9344 var_reg_delete (set, loc, false);
9345 else
9346 var_mem_delete (set, loc, false);
9348 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
9350 break;
9352 case MO_CLOBBER:
9354 rtx loc = mo->u.loc;
9356 if (REG_P (loc))
9357 var_reg_delete (set, loc, true);
9358 else
9359 var_mem_delete (set, loc, true);
9361 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9362 set->vars);
9364 break;
9366 case MO_ADJUST:
9367 set->stack_adjust += mo->u.adjust;
9368 break;
9373 /* Emit notes for the whole function. */
9375 static void
9376 vt_emit_notes (void)
9378 basic_block bb;
9379 dataflow_set cur;
9381 gcc_assert (!changed_variables.elements ());
9383 /* Free memory occupied by the out hash tables, as they aren't used
9384 anymore. */
9385 FOR_EACH_BB (bb)
9386 dataflow_set_clear (&VTI (bb)->out);
9388 /* Enable emitting notes by functions (mainly by set_variable_part and
9389 delete_variable_part). */
9390 emit_notes = true;
9392 if (MAY_HAVE_DEBUG_INSNS)
9394 dropped_values.create (cselib_get_next_uid () * 2);
9395 loc_exp_dep_pool = create_alloc_pool ("loc_exp_dep pool",
9396 sizeof (loc_exp_dep), 64);
9399 dataflow_set_init (&cur);
9401 FOR_EACH_BB (bb)
9403 /* Emit the notes for changes of variable locations between two
9404 subsequent basic blocks. */
9405 emit_notes_for_differences (BB_HEAD (bb), &cur, &VTI (bb)->in);
9407 if (MAY_HAVE_DEBUG_INSNS)
9408 local_get_addr_cache = pointer_map_create ();
9410 /* Emit the notes for the changes in the basic block itself. */
9411 emit_notes_in_bb (bb, &cur);
9413 if (MAY_HAVE_DEBUG_INSNS)
9414 pointer_map_destroy (local_get_addr_cache);
9415 local_get_addr_cache = NULL;
9417 /* Free memory occupied by the in hash table, we won't need it
9418 again. */
9419 dataflow_set_clear (&VTI (bb)->in);
9421 #ifdef ENABLE_CHECKING
9422 shared_hash_htab (cur.vars)
9423 .traverse <variable_table_type, emit_notes_for_differences_1>
9424 (shared_hash_htab (empty_shared_hash));
9425 #endif
9426 dataflow_set_destroy (&cur);
9428 if (MAY_HAVE_DEBUG_INSNS)
9429 dropped_values.dispose ();
9431 emit_notes = false;
9434 /* If there is a declaration and offset associated with register/memory RTL
9435 assign declaration to *DECLP and offset to *OFFSETP, and return true. */
9437 static bool
9438 vt_get_decl_and_offset (rtx rtl, tree *declp, HOST_WIDE_INT *offsetp)
9440 if (REG_P (rtl))
9442 if (REG_ATTRS (rtl))
9444 *declp = REG_EXPR (rtl);
9445 *offsetp = REG_OFFSET (rtl);
9446 return true;
9449 else if (MEM_P (rtl))
9451 if (MEM_ATTRS (rtl))
9453 *declp = MEM_EXPR (rtl);
9454 *offsetp = INT_MEM_OFFSET (rtl);
9455 return true;
9458 return false;
9461 /* Record the value for the ENTRY_VALUE of RTL as a global equivalence
9462 of VAL. */
9464 static void
9465 record_entry_value (cselib_val *val, rtx rtl)
9467 rtx ev = gen_rtx_ENTRY_VALUE (GET_MODE (rtl));
9469 ENTRY_VALUE_EXP (ev) = rtl;
9471 cselib_add_permanent_equiv (val, ev, get_insns ());
9474 /* Insert function parameter PARM in IN and OUT sets of ENTRY_BLOCK. */
9476 static void
9477 vt_add_function_parameter (tree parm)
9479 rtx decl_rtl = DECL_RTL_IF_SET (parm);
9480 rtx incoming = DECL_INCOMING_RTL (parm);
9481 tree decl;
9482 enum machine_mode mode;
9483 HOST_WIDE_INT offset;
9484 dataflow_set *out;
9485 decl_or_value dv;
9487 if (TREE_CODE (parm) != PARM_DECL)
9488 return;
9490 if (!decl_rtl || !incoming)
9491 return;
9493 if (GET_MODE (decl_rtl) == BLKmode || GET_MODE (incoming) == BLKmode)
9494 return;
9496 /* If there is a DRAP register or a pseudo in internal_arg_pointer,
9497 rewrite the incoming location of parameters passed on the stack
9498 into MEMs based on the argument pointer, so that incoming doesn't
9499 depend on a pseudo. */
9500 if (MEM_P (incoming)
9501 && (XEXP (incoming, 0) == crtl->args.internal_arg_pointer
9502 || (GET_CODE (XEXP (incoming, 0)) == PLUS
9503 && XEXP (XEXP (incoming, 0), 0)
9504 == crtl->args.internal_arg_pointer
9505 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
9507 HOST_WIDE_INT off = -FIRST_PARM_OFFSET (current_function_decl);
9508 if (GET_CODE (XEXP (incoming, 0)) == PLUS)
9509 off += INTVAL (XEXP (XEXP (incoming, 0), 1));
9510 incoming
9511 = replace_equiv_address_nv (incoming,
9512 plus_constant (Pmode,
9513 arg_pointer_rtx, off));
9516 #ifdef HAVE_window_save
9517 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
9518 If the target machine has an explicit window save instruction, the
9519 actual entry value is the corresponding OUTGOING_REGNO instead. */
9520 if (HAVE_window_save && !crtl->uses_only_leaf_regs)
9522 if (REG_P (incoming)
9523 && HARD_REGISTER_P (incoming)
9524 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
9526 parm_reg_t p;
9527 p.incoming = incoming;
9528 incoming
9529 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
9530 OUTGOING_REGNO (REGNO (incoming)), 0);
9531 p.outgoing = incoming;
9532 vec_safe_push (windowed_parm_regs, p);
9534 else if (MEM_P (incoming)
9535 && REG_P (XEXP (incoming, 0))
9536 && HARD_REGISTER_P (XEXP (incoming, 0)))
9538 rtx reg = XEXP (incoming, 0);
9539 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
9541 parm_reg_t p;
9542 p.incoming = reg;
9543 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
9544 p.outgoing = reg;
9545 vec_safe_push (windowed_parm_regs, p);
9546 incoming = replace_equiv_address_nv (incoming, reg);
9550 #endif
9552 if (!vt_get_decl_and_offset (incoming, &decl, &offset))
9554 if (MEM_P (incoming))
9556 /* This means argument is passed by invisible reference. */
9557 offset = 0;
9558 decl = parm;
9560 else
9562 if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset))
9563 return;
9564 offset += byte_lowpart_offset (GET_MODE (incoming),
9565 GET_MODE (decl_rtl));
9569 if (!decl)
9570 return;
9572 if (parm != decl)
9574 /* If that DECL_RTL wasn't a pseudo that got spilled to
9575 memory, bail out. Otherwise, the spill slot sharing code
9576 will force the memory to reference spill_slot_decl (%sfp),
9577 so we don't match above. That's ok, the pseudo must have
9578 referenced the entire parameter, so just reset OFFSET. */
9579 if (decl != get_spill_slot_decl (false))
9580 return;
9581 offset = 0;
9584 if (!track_loc_p (incoming, parm, offset, false, &mode, &offset))
9585 return;
9587 out = &VTI (ENTRY_BLOCK_PTR)->out;
9589 dv = dv_from_decl (parm);
9591 if (target_for_debug_bind (parm)
9592 /* We can't deal with these right now, because this kind of
9593 variable is single-part. ??? We could handle parallels
9594 that describe multiple locations for the same single
9595 value, but ATM we don't. */
9596 && GET_CODE (incoming) != PARALLEL)
9598 cselib_val *val;
9599 rtx lowpart;
9601 /* ??? We shouldn't ever hit this, but it may happen because
9602 arguments passed by invisible reference aren't dealt with
9603 above: incoming-rtl will have Pmode rather than the
9604 expected mode for the type. */
9605 if (offset)
9606 return;
9608 lowpart = var_lowpart (mode, incoming);
9609 if (!lowpart)
9610 return;
9612 val = cselib_lookup_from_insn (lowpart, mode, true,
9613 VOIDmode, get_insns ());
9615 /* ??? Float-typed values in memory are not handled by
9616 cselib. */
9617 if (val)
9619 preserve_value (val);
9620 set_variable_part (out, val->val_rtx, dv, offset,
9621 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9622 dv = dv_from_value (val->val_rtx);
9625 if (MEM_P (incoming))
9627 val = cselib_lookup_from_insn (XEXP (incoming, 0), mode, true,
9628 VOIDmode, get_insns ());
9629 if (val)
9631 preserve_value (val);
9632 incoming = replace_equiv_address_nv (incoming, val->val_rtx);
9637 if (REG_P (incoming))
9639 incoming = var_lowpart (mode, incoming);
9640 gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER);
9641 attrs_list_insert (&out->regs[REGNO (incoming)], dv, offset,
9642 incoming);
9643 set_variable_part (out, incoming, dv, offset,
9644 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9645 if (dv_is_value_p (dv))
9647 record_entry_value (CSELIB_VAL_PTR (dv_as_value (dv)), incoming);
9648 if (TREE_CODE (TREE_TYPE (parm)) == REFERENCE_TYPE
9649 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (parm))))
9651 enum machine_mode indmode
9652 = TYPE_MODE (TREE_TYPE (TREE_TYPE (parm)));
9653 rtx mem = gen_rtx_MEM (indmode, incoming);
9654 cselib_val *val = cselib_lookup_from_insn (mem, indmode, true,
9655 VOIDmode,
9656 get_insns ());
9657 if (val)
9659 preserve_value (val);
9660 record_entry_value (val, mem);
9661 set_variable_part (out, mem, dv_from_value (val->val_rtx), 0,
9662 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9667 else if (MEM_P (incoming))
9669 incoming = var_lowpart (mode, incoming);
9670 set_variable_part (out, incoming, dv, offset,
9671 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9675 /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
9677 static void
9678 vt_add_function_parameters (void)
9680 tree parm;
9682 for (parm = DECL_ARGUMENTS (current_function_decl);
9683 parm; parm = DECL_CHAIN (parm))
9684 vt_add_function_parameter (parm);
9686 if (DECL_HAS_VALUE_EXPR_P (DECL_RESULT (current_function_decl)))
9688 tree vexpr = DECL_VALUE_EXPR (DECL_RESULT (current_function_decl));
9690 if (TREE_CODE (vexpr) == INDIRECT_REF)
9691 vexpr = TREE_OPERAND (vexpr, 0);
9693 if (TREE_CODE (vexpr) == PARM_DECL
9694 && DECL_ARTIFICIAL (vexpr)
9695 && !DECL_IGNORED_P (vexpr)
9696 && DECL_NAMELESS (vexpr))
9697 vt_add_function_parameter (vexpr);
9701 /* Initialize cfa_base_rtx, create a preserved VALUE for it and
9702 ensure it isn't flushed during cselib_reset_table.
9703 Can be called only if frame_pointer_rtx resp. arg_pointer_rtx
9704 has been eliminated. */
9706 static void
9707 vt_init_cfa_base (void)
9709 cselib_val *val;
9711 #ifdef FRAME_POINTER_CFA_OFFSET
9712 cfa_base_rtx = frame_pointer_rtx;
9713 cfa_base_offset = -FRAME_POINTER_CFA_OFFSET (current_function_decl);
9714 #else
9715 cfa_base_rtx = arg_pointer_rtx;
9716 cfa_base_offset = -ARG_POINTER_CFA_OFFSET (current_function_decl);
9717 #endif
9718 if (cfa_base_rtx == hard_frame_pointer_rtx
9719 || !fixed_regs[REGNO (cfa_base_rtx)])
9721 cfa_base_rtx = NULL_RTX;
9722 return;
9724 if (!MAY_HAVE_DEBUG_INSNS)
9725 return;
9727 /* Tell alias analysis that cfa_base_rtx should share
9728 find_base_term value with stack pointer or hard frame pointer. */
9729 if (!frame_pointer_needed)
9730 vt_equate_reg_base_value (cfa_base_rtx, stack_pointer_rtx);
9731 else if (!crtl->stack_realign_tried)
9732 vt_equate_reg_base_value (cfa_base_rtx, hard_frame_pointer_rtx);
9734 val = cselib_lookup_from_insn (cfa_base_rtx, GET_MODE (cfa_base_rtx), 1,
9735 VOIDmode, get_insns ());
9736 preserve_value (val);
9737 cselib_preserve_cfa_base_value (val, REGNO (cfa_base_rtx));
9740 /* Allocate and initialize the data structures for variable tracking
9741 and parse the RTL to get the micro operations. */
9743 static bool
9744 vt_initialize (void)
9746 basic_block bb;
9747 HOST_WIDE_INT fp_cfa_offset = -1;
9749 alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def));
9751 attrs_pool = create_alloc_pool ("attrs_def pool",
9752 sizeof (struct attrs_def), 1024);
9753 var_pool = create_alloc_pool ("variable_def pool",
9754 sizeof (struct variable_def)
9755 + (MAX_VAR_PARTS - 1)
9756 * sizeof (((variable)NULL)->var_part[0]), 64);
9757 loc_chain_pool = create_alloc_pool ("location_chain_def pool",
9758 sizeof (struct location_chain_def),
9759 1024);
9760 shared_hash_pool = create_alloc_pool ("shared_hash_def pool",
9761 sizeof (struct shared_hash_def), 256);
9762 empty_shared_hash = (shared_hash) pool_alloc (shared_hash_pool);
9763 empty_shared_hash->refcount = 1;
9764 empty_shared_hash->htab.create (1);
9765 changed_variables.create (10);
9767 /* Init the IN and OUT sets. */
9768 FOR_ALL_BB (bb)
9770 VTI (bb)->visited = false;
9771 VTI (bb)->flooded = false;
9772 dataflow_set_init (&VTI (bb)->in);
9773 dataflow_set_init (&VTI (bb)->out);
9774 VTI (bb)->permp = NULL;
9777 if (MAY_HAVE_DEBUG_INSNS)
9779 cselib_init (CSELIB_RECORD_MEMORY | CSELIB_PRESERVE_CONSTANTS);
9780 scratch_regs = BITMAP_ALLOC (NULL);
9781 valvar_pool = create_alloc_pool ("small variable_def pool",
9782 sizeof (struct variable_def), 256);
9783 preserved_values.create (256);
9784 global_get_addr_cache = pointer_map_create ();
9786 else
9788 scratch_regs = NULL;
9789 valvar_pool = NULL;
9790 global_get_addr_cache = NULL;
9793 if (MAY_HAVE_DEBUG_INSNS)
9795 rtx reg, expr;
9796 int ofst;
9797 cselib_val *val;
9799 #ifdef FRAME_POINTER_CFA_OFFSET
9800 reg = frame_pointer_rtx;
9801 ofst = FRAME_POINTER_CFA_OFFSET (current_function_decl);
9802 #else
9803 reg = arg_pointer_rtx;
9804 ofst = ARG_POINTER_CFA_OFFSET (current_function_decl);
9805 #endif
9807 ofst -= INCOMING_FRAME_SP_OFFSET;
9809 val = cselib_lookup_from_insn (reg, GET_MODE (reg), 1,
9810 VOIDmode, get_insns ());
9811 preserve_value (val);
9812 cselib_preserve_cfa_base_value (val, REGNO (reg));
9813 expr = plus_constant (GET_MODE (stack_pointer_rtx),
9814 stack_pointer_rtx, -ofst);
9815 cselib_add_permanent_equiv (val, expr, get_insns ());
9817 if (ofst)
9819 val = cselib_lookup_from_insn (stack_pointer_rtx,
9820 GET_MODE (stack_pointer_rtx), 1,
9821 VOIDmode, get_insns ());
9822 preserve_value (val);
9823 expr = plus_constant (GET_MODE (reg), reg, ofst);
9824 cselib_add_permanent_equiv (val, expr, get_insns ());
9828 /* In order to factor out the adjustments made to the stack pointer or to
9829 the hard frame pointer and thus be able to use DW_OP_fbreg operations
9830 instead of individual location lists, we're going to rewrite MEMs based
9831 on them into MEMs based on the CFA by de-eliminating stack_pointer_rtx
9832 or hard_frame_pointer_rtx to the virtual CFA pointer frame_pointer_rtx
9833 resp. arg_pointer_rtx. We can do this either when there is no frame
9834 pointer in the function and stack adjustments are consistent for all
9835 basic blocks or when there is a frame pointer and no stack realignment.
9836 But we first have to check that frame_pointer_rtx resp. arg_pointer_rtx
9837 has been eliminated. */
9838 if (!frame_pointer_needed)
9840 rtx reg, elim;
9842 if (!vt_stack_adjustments ())
9843 return false;
9845 #ifdef FRAME_POINTER_CFA_OFFSET
9846 reg = frame_pointer_rtx;
9847 #else
9848 reg = arg_pointer_rtx;
9849 #endif
9850 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
9851 if (elim != reg)
9853 if (GET_CODE (elim) == PLUS)
9854 elim = XEXP (elim, 0);
9855 if (elim == stack_pointer_rtx)
9856 vt_init_cfa_base ();
9859 else if (!crtl->stack_realign_tried)
9861 rtx reg, elim;
9863 #ifdef FRAME_POINTER_CFA_OFFSET
9864 reg = frame_pointer_rtx;
9865 fp_cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
9866 #else
9867 reg = arg_pointer_rtx;
9868 fp_cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
9869 #endif
9870 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
9871 if (elim != reg)
9873 if (GET_CODE (elim) == PLUS)
9875 fp_cfa_offset -= INTVAL (XEXP (elim, 1));
9876 elim = XEXP (elim, 0);
9878 if (elim != hard_frame_pointer_rtx)
9879 fp_cfa_offset = -1;
9881 else
9882 fp_cfa_offset = -1;
9885 /* If the stack is realigned and a DRAP register is used, we're going to
9886 rewrite MEMs based on it representing incoming locations of parameters
9887 passed on the stack into MEMs based on the argument pointer. Although
9888 we aren't going to rewrite other MEMs, we still need to initialize the
9889 virtual CFA pointer in order to ensure that the argument pointer will
9890 be seen as a constant throughout the function.
9892 ??? This doesn't work if FRAME_POINTER_CFA_OFFSET is defined. */
9893 else if (stack_realign_drap)
9895 rtx reg, elim;
9897 #ifdef FRAME_POINTER_CFA_OFFSET
9898 reg = frame_pointer_rtx;
9899 #else
9900 reg = arg_pointer_rtx;
9901 #endif
9902 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
9903 if (elim != reg)
9905 if (GET_CODE (elim) == PLUS)
9906 elim = XEXP (elim, 0);
9907 if (elim == hard_frame_pointer_rtx)
9908 vt_init_cfa_base ();
9912 hard_frame_pointer_adjustment = -1;
9914 vt_add_function_parameters ();
9916 FOR_EACH_BB (bb)
9918 rtx insn;
9919 HOST_WIDE_INT pre, post = 0;
9920 basic_block first_bb, last_bb;
9922 if (MAY_HAVE_DEBUG_INSNS)
9924 cselib_record_sets_hook = add_with_sets;
9925 if (dump_file && (dump_flags & TDF_DETAILS))
9926 fprintf (dump_file, "first value: %i\n",
9927 cselib_get_next_uid ());
9930 first_bb = bb;
9931 for (;;)
9933 edge e;
9934 if (bb->next_bb == EXIT_BLOCK_PTR
9935 || ! single_pred_p (bb->next_bb))
9936 break;
9937 e = find_edge (bb, bb->next_bb);
9938 if (! e || (e->flags & EDGE_FALLTHRU) == 0)
9939 break;
9940 bb = bb->next_bb;
9942 last_bb = bb;
9944 /* Add the micro-operations to the vector. */
9945 FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb)
9947 HOST_WIDE_INT offset = VTI (bb)->out.stack_adjust;
9948 VTI (bb)->out.stack_adjust = VTI (bb)->in.stack_adjust;
9949 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
9950 insn = NEXT_INSN (insn))
9952 if (INSN_P (insn))
9954 if (!frame_pointer_needed)
9956 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
9957 if (pre)
9959 micro_operation mo;
9960 mo.type = MO_ADJUST;
9961 mo.u.adjust = pre;
9962 mo.insn = insn;
9963 if (dump_file && (dump_flags & TDF_DETAILS))
9964 log_op_type (PATTERN (insn), bb, insn,
9965 MO_ADJUST, dump_file);
9966 VTI (bb)->mos.safe_push (mo);
9967 VTI (bb)->out.stack_adjust += pre;
9971 cselib_hook_called = false;
9972 adjust_insn (bb, insn);
9973 if (MAY_HAVE_DEBUG_INSNS)
9975 if (CALL_P (insn))
9976 prepare_call_arguments (bb, insn);
9977 cselib_process_insn (insn);
9978 if (dump_file && (dump_flags & TDF_DETAILS))
9980 print_rtl_single (dump_file, insn);
9981 dump_cselib_table (dump_file);
9984 if (!cselib_hook_called)
9985 add_with_sets (insn, 0, 0);
9986 cancel_changes (0);
9988 if (!frame_pointer_needed && post)
9990 micro_operation mo;
9991 mo.type = MO_ADJUST;
9992 mo.u.adjust = post;
9993 mo.insn = insn;
9994 if (dump_file && (dump_flags & TDF_DETAILS))
9995 log_op_type (PATTERN (insn), bb, insn,
9996 MO_ADJUST, dump_file);
9997 VTI (bb)->mos.safe_push (mo);
9998 VTI (bb)->out.stack_adjust += post;
10001 if (fp_cfa_offset != -1
10002 && hard_frame_pointer_adjustment == -1
10003 && fp_setter_insn (insn))
10005 vt_init_cfa_base ();
10006 hard_frame_pointer_adjustment = fp_cfa_offset;
10007 /* Disassociate sp from fp now. */
10008 if (MAY_HAVE_DEBUG_INSNS)
10010 cselib_val *v;
10011 cselib_invalidate_rtx (stack_pointer_rtx);
10012 v = cselib_lookup (stack_pointer_rtx, Pmode, 1,
10013 VOIDmode);
10014 if (v && !cselib_preserved_value_p (v))
10016 cselib_set_value_sp_based (v);
10017 preserve_value (v);
10023 gcc_assert (offset == VTI (bb)->out.stack_adjust);
10026 bb = last_bb;
10028 if (MAY_HAVE_DEBUG_INSNS)
10030 cselib_preserve_only_values ();
10031 cselib_reset_table (cselib_get_next_uid ());
10032 cselib_record_sets_hook = NULL;
10036 hard_frame_pointer_adjustment = -1;
10037 VTI (ENTRY_BLOCK_PTR)->flooded = true;
10038 cfa_base_rtx = NULL_RTX;
10039 return true;
10042 /* This is *not* reset after each function. It gives each
10043 NOTE_INSN_DELETED_DEBUG_LABEL in the entire compilation
10044 a unique label number. */
10046 static int debug_label_num = 1;
10048 /* Get rid of all debug insns from the insn stream. */
10050 static void
10051 delete_debug_insns (void)
10053 basic_block bb;
10054 rtx insn, next;
10056 if (!MAY_HAVE_DEBUG_INSNS)
10057 return;
10059 FOR_EACH_BB (bb)
10061 FOR_BB_INSNS_SAFE (bb, insn, next)
10062 if (DEBUG_INSN_P (insn))
10064 tree decl = INSN_VAR_LOCATION_DECL (insn);
10065 if (TREE_CODE (decl) == LABEL_DECL
10066 && DECL_NAME (decl)
10067 && !DECL_RTL_SET_P (decl))
10069 PUT_CODE (insn, NOTE);
10070 NOTE_KIND (insn) = NOTE_INSN_DELETED_DEBUG_LABEL;
10071 NOTE_DELETED_LABEL_NAME (insn)
10072 = IDENTIFIER_POINTER (DECL_NAME (decl));
10073 SET_DECL_RTL (decl, insn);
10074 CODE_LABEL_NUMBER (insn) = debug_label_num++;
10076 else
10077 delete_insn (insn);
10082 /* Run a fast, BB-local only version of var tracking, to take care of
10083 information that we don't do global analysis on, such that not all
10084 information is lost. If SKIPPED holds, we're skipping the global
10085 pass entirely, so we should try to use information it would have
10086 handled as well.. */
10088 static void
10089 vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED)
10091 /* ??? Just skip it all for now. */
10092 delete_debug_insns ();
10095 /* Free the data structures needed for variable tracking. */
10097 static void
10098 vt_finalize (void)
10100 basic_block bb;
10102 FOR_EACH_BB (bb)
10104 VTI (bb)->mos.release ();
10107 FOR_ALL_BB (bb)
10109 dataflow_set_destroy (&VTI (bb)->in);
10110 dataflow_set_destroy (&VTI (bb)->out);
10111 if (VTI (bb)->permp)
10113 dataflow_set_destroy (VTI (bb)->permp);
10114 XDELETE (VTI (bb)->permp);
10117 free_aux_for_blocks ();
10118 empty_shared_hash->htab.dispose ();
10119 changed_variables.dispose ();
10120 free_alloc_pool (attrs_pool);
10121 free_alloc_pool (var_pool);
10122 free_alloc_pool (loc_chain_pool);
10123 free_alloc_pool (shared_hash_pool);
10125 if (MAY_HAVE_DEBUG_INSNS)
10127 if (global_get_addr_cache)
10128 pointer_map_destroy (global_get_addr_cache);
10129 global_get_addr_cache = NULL;
10130 if (loc_exp_dep_pool)
10131 free_alloc_pool (loc_exp_dep_pool);
10132 loc_exp_dep_pool = NULL;
10133 free_alloc_pool (valvar_pool);
10134 preserved_values.release ();
10135 cselib_finish ();
10136 BITMAP_FREE (scratch_regs);
10137 scratch_regs = NULL;
10140 #ifdef HAVE_window_save
10141 vec_free (windowed_parm_regs);
10142 #endif
10144 if (vui_vec)
10145 XDELETEVEC (vui_vec);
10146 vui_vec = NULL;
10147 vui_allocated = 0;
10150 /* The entry point to variable tracking pass. */
10152 static inline unsigned int
10153 variable_tracking_main_1 (void)
10155 bool success;
10157 if (flag_var_tracking_assignments < 0)
10159 delete_debug_insns ();
10160 return 0;
10163 if (n_basic_blocks_for_fn (cfun) > 500 &&
10164 n_edges / n_basic_blocks_for_fn (cfun) >= 20)
10166 vt_debug_insns_local (true);
10167 return 0;
10170 mark_dfs_back_edges ();
10171 if (!vt_initialize ())
10173 vt_finalize ();
10174 vt_debug_insns_local (true);
10175 return 0;
10178 success = vt_find_locations ();
10180 if (!success && flag_var_tracking_assignments > 0)
10182 vt_finalize ();
10184 delete_debug_insns ();
10186 /* This is later restored by our caller. */
10187 flag_var_tracking_assignments = 0;
10189 success = vt_initialize ();
10190 gcc_assert (success);
10192 success = vt_find_locations ();
10195 if (!success)
10197 vt_finalize ();
10198 vt_debug_insns_local (false);
10199 return 0;
10202 if (dump_file && (dump_flags & TDF_DETAILS))
10204 dump_dataflow_sets ();
10205 dump_reg_info (dump_file);
10206 dump_flow_info (dump_file, dump_flags);
10209 timevar_push (TV_VAR_TRACKING_EMIT);
10210 vt_emit_notes ();
10211 timevar_pop (TV_VAR_TRACKING_EMIT);
10213 vt_finalize ();
10214 vt_debug_insns_local (false);
10215 return 0;
10218 unsigned int
10219 variable_tracking_main (void)
10221 unsigned int ret;
10222 int save = flag_var_tracking_assignments;
10224 ret = variable_tracking_main_1 ();
10226 flag_var_tracking_assignments = save;
10228 return ret;
10231 static bool
10232 gate_handle_var_tracking (void)
10234 return (flag_var_tracking && !targetm.delay_vartrack);
10239 namespace {
10241 const pass_data pass_data_variable_tracking =
10243 RTL_PASS, /* type */
10244 "vartrack", /* name */
10245 OPTGROUP_NONE, /* optinfo_flags */
10246 true, /* has_gate */
10247 true, /* has_execute */
10248 TV_VAR_TRACKING, /* tv_id */
10249 0, /* properties_required */
10250 0, /* properties_provided */
10251 0, /* properties_destroyed */
10252 0, /* todo_flags_start */
10253 ( TODO_verify_rtl_sharing | TODO_verify_flow ), /* todo_flags_finish */
10256 class pass_variable_tracking : public rtl_opt_pass
10258 public:
10259 pass_variable_tracking (gcc::context *ctxt)
10260 : rtl_opt_pass (pass_data_variable_tracking, ctxt)
10263 /* opt_pass methods: */
10264 bool gate () { return gate_handle_var_tracking (); }
10265 unsigned int execute () { return variable_tracking_main (); }
10267 }; // class pass_variable_tracking
10269 } // anon namespace
10271 rtl_opt_pass *
10272 make_pass_variable_tracking (gcc::context *ctxt)
10274 return new pass_variable_tracking (ctxt);