* tree-ssa-loop-ivopts.c (strip_offset_1): Change parameter type.
[official-gcc.git] / gcc / var-tracking.c
blobf957c0c52ac3973ee52f8d3da248e83bee496d4b
1 /* Variable tracking routines for the GNU compiler.
2 Copyright (C) 2002-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the variable tracking pass. It computes where
21 variables are located (which registers or where in memory) at each position
22 in instruction stream and emits notes describing the locations.
23 Debug information (DWARF2 location lists) is finally generated from
24 these notes.
25 With this debug information, it is possible to show variables
26 even when debugging optimized code.
28 How does the variable tracking pass work?
30 First, it scans RTL code for uses, stores and clobbers (register/memory
31 references in instructions), for call insns and for stack adjustments
32 separately for each basic block and saves them to an array of micro
33 operations.
34 The micro operations of one instruction are ordered so that
35 pre-modifying stack adjustment < use < use with no var < call insn <
36 < clobber < set < post-modifying stack adjustment
38 Then, a forward dataflow analysis is performed to find out how locations
39 of variables change through code and to propagate the variable locations
40 along control flow graph.
41 The IN set for basic block BB is computed as a union of OUT sets of BB's
42 predecessors, the OUT set for BB is copied from the IN set for BB and
43 is changed according to micro operations in BB.
45 The IN and OUT sets for basic blocks consist of a current stack adjustment
46 (used for adjusting offset of variables addressed using stack pointer),
47 the table of structures describing the locations of parts of a variable
48 and for each physical register a linked list for each physical register.
49 The linked list is a list of variable parts stored in the register,
50 i.e. it is a list of triplets (reg, decl, offset) where decl is
51 REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
52 effective deleting appropriate variable parts when we set or clobber the
53 register.
55 There may be more than one variable part in a register. The linked lists
56 should be pretty short so it is a good data structure here.
57 For example in the following code, register allocator may assign same
58 register to variables A and B, and both of them are stored in the same
59 register in CODE:
61 if (cond)
62 set A;
63 else
64 set B;
65 CODE;
66 if (cond)
67 use A;
68 else
69 use B;
71 Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
72 are emitted to appropriate positions in RTL code. Each such a note describes
73 the location of one variable at the point in instruction stream where the
74 note is. There is no need to emit a note for each variable before each
75 instruction, we only emit these notes where the location of variable changes
76 (this means that we also emit notes for changes between the OUT set of the
77 previous block and the IN set of the current block).
79 The notes consist of two parts:
80 1. the declaration (from REG_EXPR or MEM_EXPR)
81 2. the location of a variable - it is either a simple register/memory
82 reference (for simple variables, for example int),
83 or a parallel of register/memory references (for a large variables
84 which consist of several parts, for example long long).
88 #include "config.h"
89 #include "system.h"
90 #include "coretypes.h"
91 #include "tm.h"
92 #include "rtl.h"
93 #include "tree.h"
94 #include "tm_p.h"
95 #include "hard-reg-set.h"
96 #include "basic-block.h"
97 #include "flags.h"
98 #include "insn-config.h"
99 #include "reload.h"
100 #include "sbitmap.h"
101 #include "alloc-pool.h"
102 #include "fibheap.h"
103 #include "hash-table.h"
104 #include "regs.h"
105 #include "expr.h"
106 #include "tree-pass.h"
107 #include "bitmap.h"
108 #include "tree-dfa.h"
109 #include "tree-ssa.h"
110 #include "cselib.h"
111 #include "target.h"
112 #include "params.h"
113 #include "diagnostic.h"
114 #include "tree-pretty-print.h"
115 #include "pointer-set.h"
116 #include "recog.h"
117 #include "tm_p.h"
118 #include "alias.h"
120 /* var-tracking.c assumes that tree code with the same value as VALUE rtx code
121 has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
122 Currently the value is the same as IDENTIFIER_NODE, which has such
123 a property. If this compile time assertion ever fails, make sure that
124 the new tree code that equals (int) VALUE has the same property. */
125 extern char check_value_val[(int) VALUE == (int) IDENTIFIER_NODE ? 1 : -1];
127 /* Type of micro operation. */
128 enum micro_operation_type
130 MO_USE, /* Use location (REG or MEM). */
131 MO_USE_NO_VAR,/* Use location which is not associated with a variable
132 or the variable is not trackable. */
133 MO_VAL_USE, /* Use location which is associated with a value. */
134 MO_VAL_LOC, /* Use location which appears in a debug insn. */
135 MO_VAL_SET, /* Set location associated with a value. */
136 MO_SET, /* Set location. */
137 MO_COPY, /* Copy the same portion of a variable from one
138 location to another. */
139 MO_CLOBBER, /* Clobber location. */
140 MO_CALL, /* Call insn. */
141 MO_ADJUST /* Adjust stack pointer. */
145 static const char * const ATTRIBUTE_UNUSED
146 micro_operation_type_name[] = {
147 "MO_USE",
148 "MO_USE_NO_VAR",
149 "MO_VAL_USE",
150 "MO_VAL_LOC",
151 "MO_VAL_SET",
152 "MO_SET",
153 "MO_COPY",
154 "MO_CLOBBER",
155 "MO_CALL",
156 "MO_ADJUST"
159 /* Where shall the note be emitted? BEFORE or AFTER the instruction.
160 Notes emitted as AFTER_CALL are to take effect during the call,
161 rather than after the call. */
162 enum emit_note_where
164 EMIT_NOTE_BEFORE_INSN,
165 EMIT_NOTE_AFTER_INSN,
166 EMIT_NOTE_AFTER_CALL_INSN
169 /* Structure holding information about micro operation. */
170 typedef struct micro_operation_def
172 /* Type of micro operation. */
173 enum micro_operation_type type;
175 /* The instruction which the micro operation is in, for MO_USE,
176 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
177 instruction or note in the original flow (before any var-tracking
178 notes are inserted, to simplify emission of notes), for MO_SET
179 and MO_CLOBBER. */
180 rtx insn;
182 union {
183 /* Location. For MO_SET and MO_COPY, this is the SET that
184 performs the assignment, if known, otherwise it is the target
185 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
186 CONCAT of the VALUE and the LOC associated with it. For
187 MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
188 associated with it. */
189 rtx loc;
191 /* Stack adjustment. */
192 HOST_WIDE_INT adjust;
193 } u;
194 } micro_operation;
197 /* A declaration of a variable, or an RTL value being handled like a
198 declaration. */
199 typedef void *decl_or_value;
201 /* Return true if a decl_or_value DV is a DECL or NULL. */
202 static inline bool
203 dv_is_decl_p (decl_or_value dv)
205 return !dv || (int) TREE_CODE ((tree) dv) != (int) VALUE;
208 /* Return true if a decl_or_value is a VALUE rtl. */
209 static inline bool
210 dv_is_value_p (decl_or_value dv)
212 return dv && !dv_is_decl_p (dv);
215 /* Return the decl in the decl_or_value. */
216 static inline tree
217 dv_as_decl (decl_or_value dv)
219 gcc_checking_assert (dv_is_decl_p (dv));
220 return (tree) dv;
223 /* Return the value in the decl_or_value. */
224 static inline rtx
225 dv_as_value (decl_or_value dv)
227 gcc_checking_assert (dv_is_value_p (dv));
228 return (rtx)dv;
231 /* Return the opaque pointer in the decl_or_value. */
232 static inline void *
233 dv_as_opaque (decl_or_value dv)
235 return dv;
239 /* Description of location of a part of a variable. The content of a physical
240 register is described by a chain of these structures.
241 The chains are pretty short (usually 1 or 2 elements) and thus
242 chain is the best data structure. */
243 typedef struct attrs_def
245 /* Pointer to next member of the list. */
246 struct attrs_def *next;
248 /* The rtx of register. */
249 rtx loc;
251 /* The declaration corresponding to LOC. */
252 decl_or_value dv;
254 /* Offset from start of DECL. */
255 HOST_WIDE_INT offset;
256 } *attrs;
258 /* Structure for chaining the locations. */
259 typedef struct location_chain_def
261 /* Next element in the chain. */
262 struct location_chain_def *next;
264 /* The location (REG, MEM or VALUE). */
265 rtx loc;
267 /* The "value" stored in this location. */
268 rtx set_src;
270 /* Initialized? */
271 enum var_init_status init;
272 } *location_chain;
274 /* A vector of loc_exp_dep holds the active dependencies of a one-part
275 DV on VALUEs, i.e., the VALUEs expanded so as to form the current
276 location of DV. Each entry is also part of VALUE' s linked-list of
277 backlinks back to DV. */
278 typedef struct loc_exp_dep_s
280 /* The dependent DV. */
281 decl_or_value dv;
282 /* The dependency VALUE or DECL_DEBUG. */
283 rtx value;
284 /* The next entry in VALUE's backlinks list. */
285 struct loc_exp_dep_s *next;
286 /* A pointer to the pointer to this entry (head or prev's next) in
287 the doubly-linked list. */
288 struct loc_exp_dep_s **pprev;
289 } loc_exp_dep;
292 /* This data structure holds information about the depth of a variable
293 expansion. */
294 typedef struct expand_depth_struct
296 /* This measures the complexity of the expanded expression. It
297 grows by one for each level of expansion that adds more than one
298 operand. */
299 int complexity;
300 /* This counts the number of ENTRY_VALUE expressions in an
301 expansion. We want to minimize their use. */
302 int entryvals;
303 } expand_depth;
305 /* This data structure is allocated for one-part variables at the time
306 of emitting notes. */
307 struct onepart_aux
309 /* Doubly-linked list of dependent DVs. These are DVs whose cur_loc
310 computation used the expansion of this variable, and that ought
311 to be notified should this variable change. If the DV's cur_loc
312 expanded to NULL, all components of the loc list are regarded as
313 active, so that any changes in them give us a chance to get a
314 location. Otherwise, only components of the loc that expanded to
315 non-NULL are regarded as active dependencies. */
316 loc_exp_dep *backlinks;
317 /* This holds the LOC that was expanded into cur_loc. We need only
318 mark a one-part variable as changed if the FROM loc is removed,
319 or if it has no known location and a loc is added, or if it gets
320 a change notification from any of its active dependencies. */
321 rtx from;
322 /* The depth of the cur_loc expression. */
323 expand_depth depth;
324 /* Dependencies actively used when expand FROM into cur_loc. */
325 vec<loc_exp_dep, va_heap, vl_embed> deps;
328 /* Structure describing one part of variable. */
329 typedef struct variable_part_def
331 /* Chain of locations of the part. */
332 location_chain loc_chain;
334 /* Location which was last emitted to location list. */
335 rtx cur_loc;
337 union variable_aux
339 /* The offset in the variable, if !var->onepart. */
340 HOST_WIDE_INT offset;
342 /* Pointer to auxiliary data, if var->onepart and emit_notes. */
343 struct onepart_aux *onepaux;
344 } aux;
345 } variable_part;
347 /* Maximum number of location parts. */
348 #define MAX_VAR_PARTS 16
350 /* Enumeration type used to discriminate various types of one-part
351 variables. */
352 typedef enum onepart_enum
354 /* Not a one-part variable. */
355 NOT_ONEPART = 0,
356 /* A one-part DECL that is not a DEBUG_EXPR_DECL. */
357 ONEPART_VDECL = 1,
358 /* A DEBUG_EXPR_DECL. */
359 ONEPART_DEXPR = 2,
360 /* A VALUE. */
361 ONEPART_VALUE = 3
362 } onepart_enum_t;
364 /* Structure describing where the variable is located. */
365 typedef struct variable_def
367 /* The declaration of the variable, or an RTL value being handled
368 like a declaration. */
369 decl_or_value dv;
371 /* Reference count. */
372 int refcount;
374 /* Number of variable parts. */
375 char n_var_parts;
377 /* What type of DV this is, according to enum onepart_enum. */
378 ENUM_BITFIELD (onepart_enum) onepart : CHAR_BIT;
380 /* True if this variable_def struct is currently in the
381 changed_variables hash table. */
382 bool in_changed_variables;
384 /* The variable parts. */
385 variable_part var_part[1];
386 } *variable;
387 typedef const struct variable_def *const_variable;
389 /* Pointer to the BB's information specific to variable tracking pass. */
390 #define VTI(BB) ((variable_tracking_info) (BB)->aux)
392 /* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */
393 #define INT_MEM_OFFSET(mem) (MEM_OFFSET_KNOWN_P (mem) ? MEM_OFFSET (mem) : 0)
395 #if ENABLE_CHECKING && (GCC_VERSION >= 2007)
397 /* Access VAR's Ith part's offset, checking that it's not a one-part
398 variable. */
399 #define VAR_PART_OFFSET(var, i) __extension__ \
400 (*({ variable const __v = (var); \
401 gcc_checking_assert (!__v->onepart); \
402 &__v->var_part[(i)].aux.offset; }))
404 /* Access VAR's one-part auxiliary data, checking that it is a
405 one-part variable. */
406 #define VAR_LOC_1PAUX(var) __extension__ \
407 (*({ variable const __v = (var); \
408 gcc_checking_assert (__v->onepart); \
409 &__v->var_part[0].aux.onepaux; }))
411 #else
412 #define VAR_PART_OFFSET(var, i) ((var)->var_part[(i)].aux.offset)
413 #define VAR_LOC_1PAUX(var) ((var)->var_part[0].aux.onepaux)
414 #endif
416 /* These are accessor macros for the one-part auxiliary data. When
417 convenient for users, they're guarded by tests that the data was
418 allocated. */
419 #define VAR_LOC_DEP_LST(var) (VAR_LOC_1PAUX (var) \
420 ? VAR_LOC_1PAUX (var)->backlinks \
421 : NULL)
422 #define VAR_LOC_DEP_LSTP(var) (VAR_LOC_1PAUX (var) \
423 ? &VAR_LOC_1PAUX (var)->backlinks \
424 : NULL)
425 #define VAR_LOC_FROM(var) (VAR_LOC_1PAUX (var)->from)
426 #define VAR_LOC_DEPTH(var) (VAR_LOC_1PAUX (var)->depth)
427 #define VAR_LOC_DEP_VEC(var) (VAR_LOC_1PAUX (var) \
428 ? &VAR_LOC_1PAUX (var)->deps \
429 : NULL)
433 typedef unsigned int dvuid;
435 /* Return the uid of DV. */
437 static inline dvuid
438 dv_uid (decl_or_value dv)
440 if (dv_is_value_p (dv))
441 return CSELIB_VAL_PTR (dv_as_value (dv))->uid;
442 else
443 return DECL_UID (dv_as_decl (dv));
446 /* Compute the hash from the uid. */
448 static inline hashval_t
449 dv_uid2hash (dvuid uid)
451 return uid;
454 /* The hash function for a mask table in a shared_htab chain. */
456 static inline hashval_t
457 dv_htab_hash (decl_or_value dv)
459 return dv_uid2hash (dv_uid (dv));
462 static void variable_htab_free (void *);
464 /* Variable hashtable helpers. */
466 struct variable_hasher
468 typedef variable_def value_type;
469 typedef void compare_type;
470 static inline hashval_t hash (const value_type *);
471 static inline bool equal (const value_type *, const compare_type *);
472 static inline void remove (value_type *);
475 /* The hash function for variable_htab, computes the hash value
476 from the declaration of variable X. */
478 inline hashval_t
479 variable_hasher::hash (const value_type *v)
481 return dv_htab_hash (v->dv);
484 /* Compare the declaration of variable X with declaration Y. */
486 inline bool
487 variable_hasher::equal (const value_type *v, const compare_type *y)
489 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
491 return (dv_as_opaque (v->dv) == dv_as_opaque (dv));
494 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
496 inline void
497 variable_hasher::remove (value_type *var)
499 variable_htab_free (var);
502 typedef hash_table <variable_hasher> variable_table_type;
503 typedef variable_table_type::iterator variable_iterator_type;
505 /* Structure for passing some other parameters to function
506 emit_note_insn_var_location. */
507 typedef struct emit_note_data_def
509 /* The instruction which the note will be emitted before/after. */
510 rtx insn;
512 /* Where the note will be emitted (before/after insn)? */
513 enum emit_note_where where;
515 /* The variables and values active at this point. */
516 variable_table_type vars;
517 } emit_note_data;
519 /* Structure holding a refcounted hash table. If refcount > 1,
520 it must be first unshared before modified. */
521 typedef struct shared_hash_def
523 /* Reference count. */
524 int refcount;
526 /* Actual hash table. */
527 variable_table_type htab;
528 } *shared_hash;
530 /* Structure holding the IN or OUT set for a basic block. */
531 typedef struct dataflow_set_def
533 /* Adjustment of stack offset. */
534 HOST_WIDE_INT stack_adjust;
536 /* Attributes for registers (lists of attrs). */
537 attrs regs[FIRST_PSEUDO_REGISTER];
539 /* Variable locations. */
540 shared_hash vars;
542 /* Vars that is being traversed. */
543 shared_hash traversed_vars;
544 } dataflow_set;
546 /* The structure (one for each basic block) containing the information
547 needed for variable tracking. */
548 typedef struct variable_tracking_info_def
550 /* The vector of micro operations. */
551 vec<micro_operation> mos;
553 /* The IN and OUT set for dataflow analysis. */
554 dataflow_set in;
555 dataflow_set out;
557 /* The permanent-in dataflow set for this block. This is used to
558 hold values for which we had to compute entry values. ??? This
559 should probably be dynamically allocated, to avoid using more
560 memory in non-debug builds. */
561 dataflow_set *permp;
563 /* Has the block been visited in DFS? */
564 bool visited;
566 /* Has the block been flooded in VTA? */
567 bool flooded;
569 } *variable_tracking_info;
571 /* Alloc pool for struct attrs_def. */
572 static alloc_pool attrs_pool;
574 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
575 static alloc_pool var_pool;
577 /* Alloc pool for struct variable_def with a single var_part entry. */
578 static alloc_pool valvar_pool;
580 /* Alloc pool for struct location_chain_def. */
581 static alloc_pool loc_chain_pool;
583 /* Alloc pool for struct shared_hash_def. */
584 static alloc_pool shared_hash_pool;
586 /* Alloc pool for struct loc_exp_dep_s for NOT_ONEPART variables. */
587 static alloc_pool loc_exp_dep_pool;
589 /* Changed variables, notes will be emitted for them. */
590 static variable_table_type changed_variables;
592 /* Shall notes be emitted? */
593 static bool emit_notes;
595 /* Values whose dynamic location lists have gone empty, but whose
596 cselib location lists are still usable. Use this to hold the
597 current location, the backlinks, etc, during emit_notes. */
598 static variable_table_type dropped_values;
600 /* Empty shared hashtable. */
601 static shared_hash empty_shared_hash;
603 /* Scratch register bitmap used by cselib_expand_value_rtx. */
604 static bitmap scratch_regs = NULL;
606 #ifdef HAVE_window_save
607 typedef struct GTY(()) parm_reg {
608 rtx outgoing;
609 rtx incoming;
610 } parm_reg_t;
613 /* Vector of windowed parameter registers, if any. */
614 static vec<parm_reg_t, va_gc> *windowed_parm_regs = NULL;
615 #endif
617 /* Variable used to tell whether cselib_process_insn called our hook. */
618 static bool cselib_hook_called;
620 /* Local function prototypes. */
621 static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
622 HOST_WIDE_INT *);
623 static void insn_stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
624 HOST_WIDE_INT *);
625 static bool vt_stack_adjustments (void);
627 static void init_attrs_list_set (attrs *);
628 static void attrs_list_clear (attrs *);
629 static attrs attrs_list_member (attrs, decl_or_value, HOST_WIDE_INT);
630 static void attrs_list_insert (attrs *, decl_or_value, HOST_WIDE_INT, rtx);
631 static void attrs_list_copy (attrs *, attrs);
632 static void attrs_list_union (attrs *, attrs);
634 static variable_def **unshare_variable (dataflow_set *set, variable_def **slot,
635 variable var, enum var_init_status);
636 static void vars_copy (variable_table_type, variable_table_type);
637 static tree var_debug_decl (tree);
638 static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
639 static void var_reg_delete_and_set (dataflow_set *, rtx, bool,
640 enum var_init_status, rtx);
641 static void var_reg_delete (dataflow_set *, rtx, bool);
642 static void var_regno_delete (dataflow_set *, int);
643 static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx);
644 static void var_mem_delete_and_set (dataflow_set *, rtx, bool,
645 enum var_init_status, rtx);
646 static void var_mem_delete (dataflow_set *, rtx, bool);
648 static void dataflow_set_init (dataflow_set *);
649 static void dataflow_set_clear (dataflow_set *);
650 static void dataflow_set_copy (dataflow_set *, dataflow_set *);
651 static int variable_union_info_cmp_pos (const void *, const void *);
652 static void dataflow_set_union (dataflow_set *, dataflow_set *);
653 static location_chain find_loc_in_1pdv (rtx, variable, variable_table_type);
654 static bool canon_value_cmp (rtx, rtx);
655 static int loc_cmp (rtx, rtx);
656 static bool variable_part_different_p (variable_part *, variable_part *);
657 static bool onepart_variable_different_p (variable, variable);
658 static bool variable_different_p (variable, variable);
659 static bool dataflow_set_different (dataflow_set *, dataflow_set *);
660 static void dataflow_set_destroy (dataflow_set *);
662 static bool contains_symbol_ref (rtx);
663 static bool track_expr_p (tree, bool);
664 static bool same_variable_part_p (rtx, tree, HOST_WIDE_INT);
665 static int add_uses (rtx *, void *);
666 static void add_uses_1 (rtx *, void *);
667 static void add_stores (rtx, const_rtx, void *);
668 static bool compute_bb_dataflow (basic_block);
669 static bool vt_find_locations (void);
671 static void dump_attrs_list (attrs);
672 static void dump_var (variable);
673 static void dump_vars (variable_table_type);
674 static void dump_dataflow_set (dataflow_set *);
675 static void dump_dataflow_sets (void);
677 static void set_dv_changed (decl_or_value, bool);
678 static void variable_was_changed (variable, dataflow_set *);
679 static variable_def **set_slot_part (dataflow_set *, rtx, variable_def **,
680 decl_or_value, HOST_WIDE_INT,
681 enum var_init_status, rtx);
682 static void set_variable_part (dataflow_set *, rtx,
683 decl_or_value, HOST_WIDE_INT,
684 enum var_init_status, rtx, enum insert_option);
685 static variable_def **clobber_slot_part (dataflow_set *, rtx,
686 variable_def **, HOST_WIDE_INT, rtx);
687 static void clobber_variable_part (dataflow_set *, rtx,
688 decl_or_value, HOST_WIDE_INT, rtx);
689 static variable_def **delete_slot_part (dataflow_set *, rtx, variable_def **,
690 HOST_WIDE_INT);
691 static void delete_variable_part (dataflow_set *, rtx,
692 decl_or_value, HOST_WIDE_INT);
693 static void emit_notes_in_bb (basic_block, dataflow_set *);
694 static void vt_emit_notes (void);
696 static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *);
697 static void vt_add_function_parameters (void);
698 static bool vt_initialize (void);
699 static void vt_finalize (void);
701 /* Given a SET, calculate the amount of stack adjustment it contains
702 PRE- and POST-modifying stack pointer.
703 This function is similar to stack_adjust_offset. */
705 static void
706 stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
707 HOST_WIDE_INT *post)
709 rtx src = SET_SRC (pattern);
710 rtx dest = SET_DEST (pattern);
711 enum rtx_code code;
713 if (dest == stack_pointer_rtx)
715 /* (set (reg sp) (plus (reg sp) (const_int))) */
716 code = GET_CODE (src);
717 if (! (code == PLUS || code == MINUS)
718 || XEXP (src, 0) != stack_pointer_rtx
719 || !CONST_INT_P (XEXP (src, 1)))
720 return;
722 if (code == MINUS)
723 *post += INTVAL (XEXP (src, 1));
724 else
725 *post -= INTVAL (XEXP (src, 1));
727 else if (MEM_P (dest))
729 /* (set (mem (pre_dec (reg sp))) (foo)) */
730 src = XEXP (dest, 0);
731 code = GET_CODE (src);
733 switch (code)
735 case PRE_MODIFY:
736 case POST_MODIFY:
737 if (XEXP (src, 0) == stack_pointer_rtx)
739 rtx val = XEXP (XEXP (src, 1), 1);
740 /* We handle only adjustments by constant amount. */
741 gcc_assert (GET_CODE (XEXP (src, 1)) == PLUS &&
742 CONST_INT_P (val));
744 if (code == PRE_MODIFY)
745 *pre -= INTVAL (val);
746 else
747 *post -= INTVAL (val);
748 break;
750 return;
752 case PRE_DEC:
753 if (XEXP (src, 0) == stack_pointer_rtx)
755 *pre += GET_MODE_SIZE (GET_MODE (dest));
756 break;
758 return;
760 case POST_DEC:
761 if (XEXP (src, 0) == stack_pointer_rtx)
763 *post += GET_MODE_SIZE (GET_MODE (dest));
764 break;
766 return;
768 case PRE_INC:
769 if (XEXP (src, 0) == stack_pointer_rtx)
771 *pre -= GET_MODE_SIZE (GET_MODE (dest));
772 break;
774 return;
776 case POST_INC:
777 if (XEXP (src, 0) == stack_pointer_rtx)
779 *post -= GET_MODE_SIZE (GET_MODE (dest));
780 break;
782 return;
784 default:
785 return;
790 /* Given an INSN, calculate the amount of stack adjustment it contains
791 PRE- and POST-modifying stack pointer. */
793 static void
794 insn_stack_adjust_offset_pre_post (rtx insn, HOST_WIDE_INT *pre,
795 HOST_WIDE_INT *post)
797 rtx pattern;
799 *pre = 0;
800 *post = 0;
802 pattern = PATTERN (insn);
803 if (RTX_FRAME_RELATED_P (insn))
805 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
806 if (expr)
807 pattern = XEXP (expr, 0);
810 if (GET_CODE (pattern) == SET)
811 stack_adjust_offset_pre_post (pattern, pre, post);
812 else if (GET_CODE (pattern) == PARALLEL
813 || GET_CODE (pattern) == SEQUENCE)
815 int i;
817 /* There may be stack adjustments inside compound insns. Search
818 for them. */
819 for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
820 if (GET_CODE (XVECEXP (pattern, 0, i)) == SET)
821 stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post);
825 /* Compute stack adjustments for all blocks by traversing DFS tree.
826 Return true when the adjustments on all incoming edges are consistent.
827 Heavily borrowed from pre_and_rev_post_order_compute. */
829 static bool
830 vt_stack_adjustments (void)
832 edge_iterator *stack;
833 int sp;
835 /* Initialize entry block. */
836 VTI (ENTRY_BLOCK_PTR)->visited = true;
837 VTI (ENTRY_BLOCK_PTR)->in.stack_adjust = INCOMING_FRAME_SP_OFFSET;
838 VTI (ENTRY_BLOCK_PTR)->out.stack_adjust = INCOMING_FRAME_SP_OFFSET;
840 /* Allocate stack for back-tracking up CFG. */
841 stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
842 sp = 0;
844 /* Push the first edge on to the stack. */
845 stack[sp++] = ei_start (ENTRY_BLOCK_PTR->succs);
847 while (sp)
849 edge_iterator ei;
850 basic_block src;
851 basic_block dest;
853 /* Look at the edge on the top of the stack. */
854 ei = stack[sp - 1];
855 src = ei_edge (ei)->src;
856 dest = ei_edge (ei)->dest;
858 /* Check if the edge destination has been visited yet. */
859 if (!VTI (dest)->visited)
861 rtx insn;
862 HOST_WIDE_INT pre, post, offset;
863 VTI (dest)->visited = true;
864 VTI (dest)->in.stack_adjust = offset = VTI (src)->out.stack_adjust;
866 if (dest != EXIT_BLOCK_PTR)
867 for (insn = BB_HEAD (dest);
868 insn != NEXT_INSN (BB_END (dest));
869 insn = NEXT_INSN (insn))
870 if (INSN_P (insn))
872 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
873 offset += pre + post;
876 VTI (dest)->out.stack_adjust = offset;
878 if (EDGE_COUNT (dest->succs) > 0)
879 /* Since the DEST node has been visited for the first
880 time, check its successors. */
881 stack[sp++] = ei_start (dest->succs);
883 else
885 /* Check whether the adjustments on the edges are the same. */
886 if (VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust)
888 free (stack);
889 return false;
892 if (! ei_one_before_end_p (ei))
893 /* Go to the next edge. */
894 ei_next (&stack[sp - 1]);
895 else
896 /* Return to previous level if there are no more edges. */
897 sp--;
901 free (stack);
902 return true;
905 /* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or
906 hard_frame_pointer_rtx is being mapped to it and offset for it. */
907 static rtx cfa_base_rtx;
908 static HOST_WIDE_INT cfa_base_offset;
910 /* Compute a CFA-based value for an ADJUSTMENT made to stack_pointer_rtx
911 or hard_frame_pointer_rtx. */
913 static inline rtx
914 compute_cfa_pointer (HOST_WIDE_INT adjustment)
916 return plus_constant (Pmode, cfa_base_rtx, adjustment + cfa_base_offset);
919 /* Adjustment for hard_frame_pointer_rtx to cfa base reg,
920 or -1 if the replacement shouldn't be done. */
921 static HOST_WIDE_INT hard_frame_pointer_adjustment = -1;
923 /* Data for adjust_mems callback. */
925 struct adjust_mem_data
927 bool store;
928 enum machine_mode mem_mode;
929 HOST_WIDE_INT stack_adjust;
930 rtx side_effects;
933 /* Helper for adjust_mems. Return 1 if *loc is unsuitable for
934 transformation of wider mode arithmetics to narrower mode,
935 -1 if it is suitable and subexpressions shouldn't be
936 traversed and 0 if it is suitable and subexpressions should
937 be traversed. Called through for_each_rtx. */
939 static int
940 use_narrower_mode_test (rtx *loc, void *data)
942 rtx subreg = (rtx) data;
944 if (CONSTANT_P (*loc))
945 return -1;
946 switch (GET_CODE (*loc))
948 case REG:
949 if (cselib_lookup (*loc, GET_MODE (SUBREG_REG (subreg)), 0, VOIDmode))
950 return 1;
951 if (!validate_subreg (GET_MODE (subreg), GET_MODE (*loc),
952 *loc, subreg_lowpart_offset (GET_MODE (subreg),
953 GET_MODE (*loc))))
954 return 1;
955 return -1;
956 case PLUS:
957 case MINUS:
958 case MULT:
959 return 0;
960 case ASHIFT:
961 if (for_each_rtx (&XEXP (*loc, 0), use_narrower_mode_test, data))
962 return 1;
963 else
964 return -1;
965 default:
966 return 1;
970 /* Transform X into narrower mode MODE from wider mode WMODE. */
972 static rtx
973 use_narrower_mode (rtx x, enum machine_mode mode, enum machine_mode wmode)
975 rtx op0, op1;
976 if (CONSTANT_P (x))
977 return lowpart_subreg (mode, x, wmode);
978 switch (GET_CODE (x))
980 case REG:
981 return lowpart_subreg (mode, x, wmode);
982 case PLUS:
983 case MINUS:
984 case MULT:
985 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
986 op1 = use_narrower_mode (XEXP (x, 1), mode, wmode);
987 return simplify_gen_binary (GET_CODE (x), mode, op0, op1);
988 case ASHIFT:
989 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
990 return simplify_gen_binary (ASHIFT, mode, op0, XEXP (x, 1));
991 default:
992 gcc_unreachable ();
996 /* Helper function for adjusting used MEMs. */
998 static rtx
999 adjust_mems (rtx loc, const_rtx old_rtx, void *data)
1001 struct adjust_mem_data *amd = (struct adjust_mem_data *) data;
1002 rtx mem, addr = loc, tem;
1003 enum machine_mode mem_mode_save;
1004 bool store_save;
1005 switch (GET_CODE (loc))
1007 case REG:
1008 /* Don't do any sp or fp replacements outside of MEM addresses
1009 on the LHS. */
1010 if (amd->mem_mode == VOIDmode && amd->store)
1011 return loc;
1012 if (loc == stack_pointer_rtx
1013 && !frame_pointer_needed
1014 && cfa_base_rtx)
1015 return compute_cfa_pointer (amd->stack_adjust);
1016 else if (loc == hard_frame_pointer_rtx
1017 && frame_pointer_needed
1018 && hard_frame_pointer_adjustment != -1
1019 && cfa_base_rtx)
1020 return compute_cfa_pointer (hard_frame_pointer_adjustment);
1021 gcc_checking_assert (loc != virtual_incoming_args_rtx);
1022 return loc;
1023 case MEM:
1024 mem = loc;
1025 if (!amd->store)
1027 mem = targetm.delegitimize_address (mem);
1028 if (mem != loc && !MEM_P (mem))
1029 return simplify_replace_fn_rtx (mem, old_rtx, adjust_mems, data);
1032 addr = XEXP (mem, 0);
1033 mem_mode_save = amd->mem_mode;
1034 amd->mem_mode = GET_MODE (mem);
1035 store_save = amd->store;
1036 amd->store = false;
1037 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1038 amd->store = store_save;
1039 amd->mem_mode = mem_mode_save;
1040 if (mem == loc)
1041 addr = targetm.delegitimize_address (addr);
1042 if (addr != XEXP (mem, 0))
1043 mem = replace_equiv_address_nv (mem, addr);
1044 if (!amd->store)
1045 mem = avoid_constant_pool_reference (mem);
1046 return mem;
1047 case PRE_INC:
1048 case PRE_DEC:
1049 addr = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
1050 gen_int_mode (GET_CODE (loc) == PRE_INC
1051 ? GET_MODE_SIZE (amd->mem_mode)
1052 : -GET_MODE_SIZE (amd->mem_mode),
1053 GET_MODE (loc)));
1054 case POST_INC:
1055 case POST_DEC:
1056 if (addr == loc)
1057 addr = XEXP (loc, 0);
1058 gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode);
1059 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1060 tem = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
1061 gen_int_mode ((GET_CODE (loc) == PRE_INC
1062 || GET_CODE (loc) == POST_INC)
1063 ? GET_MODE_SIZE (amd->mem_mode)
1064 : -GET_MODE_SIZE (amd->mem_mode),
1065 GET_MODE (loc)));
1066 amd->side_effects = alloc_EXPR_LIST (0,
1067 gen_rtx_SET (VOIDmode,
1068 XEXP (loc, 0),
1069 tem),
1070 amd->side_effects);
1071 return addr;
1072 case PRE_MODIFY:
1073 addr = XEXP (loc, 1);
1074 case POST_MODIFY:
1075 if (addr == loc)
1076 addr = XEXP (loc, 0);
1077 gcc_assert (amd->mem_mode != VOIDmode);
1078 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1079 amd->side_effects = alloc_EXPR_LIST (0,
1080 gen_rtx_SET (VOIDmode,
1081 XEXP (loc, 0),
1082 XEXP (loc, 1)),
1083 amd->side_effects);
1084 return addr;
1085 case SUBREG:
1086 /* First try without delegitimization of whole MEMs and
1087 avoid_constant_pool_reference, which is more likely to succeed. */
1088 store_save = amd->store;
1089 amd->store = true;
1090 addr = simplify_replace_fn_rtx (SUBREG_REG (loc), old_rtx, adjust_mems,
1091 data);
1092 amd->store = store_save;
1093 mem = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1094 if (mem == SUBREG_REG (loc))
1096 tem = loc;
1097 goto finish_subreg;
1099 tem = simplify_gen_subreg (GET_MODE (loc), mem,
1100 GET_MODE (SUBREG_REG (loc)),
1101 SUBREG_BYTE (loc));
1102 if (tem)
1103 goto finish_subreg;
1104 tem = simplify_gen_subreg (GET_MODE (loc), addr,
1105 GET_MODE (SUBREG_REG (loc)),
1106 SUBREG_BYTE (loc));
1107 if (tem == NULL_RTX)
1108 tem = gen_rtx_raw_SUBREG (GET_MODE (loc), addr, SUBREG_BYTE (loc));
1109 finish_subreg:
1110 if (MAY_HAVE_DEBUG_INSNS
1111 && GET_CODE (tem) == SUBREG
1112 && (GET_CODE (SUBREG_REG (tem)) == PLUS
1113 || GET_CODE (SUBREG_REG (tem)) == MINUS
1114 || GET_CODE (SUBREG_REG (tem)) == MULT
1115 || GET_CODE (SUBREG_REG (tem)) == ASHIFT)
1116 && GET_MODE_CLASS (GET_MODE (tem)) == MODE_INT
1117 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem))) == MODE_INT
1118 && GET_MODE_SIZE (GET_MODE (tem))
1119 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (tem)))
1120 && subreg_lowpart_p (tem)
1121 && !for_each_rtx (&SUBREG_REG (tem), use_narrower_mode_test, tem))
1122 return use_narrower_mode (SUBREG_REG (tem), GET_MODE (tem),
1123 GET_MODE (SUBREG_REG (tem)));
1124 return tem;
1125 case ASM_OPERANDS:
1126 /* Don't do any replacements in second and following
1127 ASM_OPERANDS of inline-asm with multiple sets.
1128 ASM_OPERANDS_INPUT_VEC, ASM_OPERANDS_INPUT_CONSTRAINT_VEC
1129 and ASM_OPERANDS_LABEL_VEC need to be equal between
1130 all the ASM_OPERANDs in the insn and adjust_insn will
1131 fix this up. */
1132 if (ASM_OPERANDS_OUTPUT_IDX (loc) != 0)
1133 return loc;
1134 break;
1135 default:
1136 break;
1138 return NULL_RTX;
1141 /* Helper function for replacement of uses. */
1143 static void
1144 adjust_mem_uses (rtx *x, void *data)
1146 rtx new_x = simplify_replace_fn_rtx (*x, NULL_RTX, adjust_mems, data);
1147 if (new_x != *x)
1148 validate_change (NULL_RTX, x, new_x, true);
1151 /* Helper function for replacement of stores. */
1153 static void
1154 adjust_mem_stores (rtx loc, const_rtx expr, void *data)
1156 if (MEM_P (loc))
1158 rtx new_dest = simplify_replace_fn_rtx (SET_DEST (expr), NULL_RTX,
1159 adjust_mems, data);
1160 if (new_dest != SET_DEST (expr))
1162 rtx xexpr = CONST_CAST_RTX (expr);
1163 validate_change (NULL_RTX, &SET_DEST (xexpr), new_dest, true);
1168 /* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
1169 replace them with their value in the insn and add the side-effects
1170 as other sets to the insn. */
1172 static void
1173 adjust_insn (basic_block bb, rtx insn)
1175 struct adjust_mem_data amd;
1176 rtx set;
1178 #ifdef HAVE_window_save
1179 /* If the target machine has an explicit window save instruction, the
1180 transformation OUTGOING_REGNO -> INCOMING_REGNO is done there. */
1181 if (RTX_FRAME_RELATED_P (insn)
1182 && find_reg_note (insn, REG_CFA_WINDOW_SAVE, NULL_RTX))
1184 unsigned int i, nregs = vec_safe_length (windowed_parm_regs);
1185 rtx rtl = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nregs * 2));
1186 parm_reg_t *p;
1188 FOR_EACH_VEC_SAFE_ELT (windowed_parm_regs, i, p)
1190 XVECEXP (rtl, 0, i * 2)
1191 = gen_rtx_SET (VOIDmode, p->incoming, p->outgoing);
1192 /* Do not clobber the attached DECL, but only the REG. */
1193 XVECEXP (rtl, 0, i * 2 + 1)
1194 = gen_rtx_CLOBBER (GET_MODE (p->outgoing),
1195 gen_raw_REG (GET_MODE (p->outgoing),
1196 REGNO (p->outgoing)));
1199 validate_change (NULL_RTX, &PATTERN (insn), rtl, true);
1200 return;
1202 #endif
1204 amd.mem_mode = VOIDmode;
1205 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
1206 amd.side_effects = NULL_RTX;
1208 amd.store = true;
1209 note_stores (PATTERN (insn), adjust_mem_stores, &amd);
1211 amd.store = false;
1212 if (GET_CODE (PATTERN (insn)) == PARALLEL
1213 && asm_noperands (PATTERN (insn)) > 0
1214 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1216 rtx body, set0;
1217 int i;
1219 /* inline-asm with multiple sets is tiny bit more complicated,
1220 because the 3 vectors in ASM_OPERANDS need to be shared between
1221 all ASM_OPERANDS in the instruction. adjust_mems will
1222 not touch ASM_OPERANDS other than the first one, asm_noperands
1223 test above needs to be called before that (otherwise it would fail)
1224 and afterwards this code fixes it up. */
1225 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1226 body = PATTERN (insn);
1227 set0 = XVECEXP (body, 0, 0);
1228 gcc_checking_assert (GET_CODE (set0) == SET
1229 && GET_CODE (SET_SRC (set0)) == ASM_OPERANDS
1230 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set0)) == 0);
1231 for (i = 1; i < XVECLEN (body, 0); i++)
1232 if (GET_CODE (XVECEXP (body, 0, i)) != SET)
1233 break;
1234 else
1236 set = XVECEXP (body, 0, i);
1237 gcc_checking_assert (GET_CODE (SET_SRC (set)) == ASM_OPERANDS
1238 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set))
1239 == i);
1240 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (set))
1241 != ASM_OPERANDS_INPUT_VEC (SET_SRC (set0))
1242 || ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set))
1243 != ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0))
1244 || ASM_OPERANDS_LABEL_VEC (SET_SRC (set))
1245 != ASM_OPERANDS_LABEL_VEC (SET_SRC (set0)))
1247 rtx newsrc = shallow_copy_rtx (SET_SRC (set));
1248 ASM_OPERANDS_INPUT_VEC (newsrc)
1249 = ASM_OPERANDS_INPUT_VEC (SET_SRC (set0));
1250 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (newsrc)
1251 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0));
1252 ASM_OPERANDS_LABEL_VEC (newsrc)
1253 = ASM_OPERANDS_LABEL_VEC (SET_SRC (set0));
1254 validate_change (NULL_RTX, &SET_SRC (set), newsrc, true);
1258 else
1259 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1261 /* For read-only MEMs containing some constant, prefer those
1262 constants. */
1263 set = single_set (insn);
1264 if (set && MEM_P (SET_SRC (set)) && MEM_READONLY_P (SET_SRC (set)))
1266 rtx note = find_reg_equal_equiv_note (insn);
1268 if (note && CONSTANT_P (XEXP (note, 0)))
1269 validate_change (NULL_RTX, &SET_SRC (set), XEXP (note, 0), true);
1272 if (amd.side_effects)
1274 rtx *pat, new_pat, s;
1275 int i, oldn, newn;
1277 pat = &PATTERN (insn);
1278 if (GET_CODE (*pat) == COND_EXEC)
1279 pat = &COND_EXEC_CODE (*pat);
1280 if (GET_CODE (*pat) == PARALLEL)
1281 oldn = XVECLEN (*pat, 0);
1282 else
1283 oldn = 1;
1284 for (s = amd.side_effects, newn = 0; s; newn++)
1285 s = XEXP (s, 1);
1286 new_pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (oldn + newn));
1287 if (GET_CODE (*pat) == PARALLEL)
1288 for (i = 0; i < oldn; i++)
1289 XVECEXP (new_pat, 0, i) = XVECEXP (*pat, 0, i);
1290 else
1291 XVECEXP (new_pat, 0, 0) = *pat;
1292 for (s = amd.side_effects, i = oldn; i < oldn + newn; i++, s = XEXP (s, 1))
1293 XVECEXP (new_pat, 0, i) = XEXP (s, 0);
1294 free_EXPR_LIST_list (&amd.side_effects);
1295 validate_change (NULL_RTX, pat, new_pat, true);
1299 /* Return the DEBUG_EXPR of a DEBUG_EXPR_DECL or the VALUE in DV. */
1300 static inline rtx
1301 dv_as_rtx (decl_or_value dv)
1303 tree decl;
1305 if (dv_is_value_p (dv))
1306 return dv_as_value (dv);
1308 decl = dv_as_decl (dv);
1310 gcc_checking_assert (TREE_CODE (decl) == DEBUG_EXPR_DECL);
1311 return DECL_RTL_KNOWN_SET (decl);
1314 /* Return nonzero if a decl_or_value must not have more than one
1315 variable part. The returned value discriminates among various
1316 kinds of one-part DVs ccording to enum onepart_enum. */
1317 static inline onepart_enum_t
1318 dv_onepart_p (decl_or_value dv)
1320 tree decl;
1322 if (!MAY_HAVE_DEBUG_INSNS)
1323 return NOT_ONEPART;
1325 if (dv_is_value_p (dv))
1326 return ONEPART_VALUE;
1328 decl = dv_as_decl (dv);
1330 if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
1331 return ONEPART_DEXPR;
1333 if (target_for_debug_bind (decl) != NULL_TREE)
1334 return ONEPART_VDECL;
1336 return NOT_ONEPART;
1339 /* Return the variable pool to be used for a dv of type ONEPART. */
1340 static inline alloc_pool
1341 onepart_pool (onepart_enum_t onepart)
1343 return onepart ? valvar_pool : var_pool;
1346 /* Build a decl_or_value out of a decl. */
1347 static inline decl_or_value
1348 dv_from_decl (tree decl)
1350 decl_or_value dv;
1351 dv = decl;
1352 gcc_checking_assert (dv_is_decl_p (dv));
1353 return dv;
1356 /* Build a decl_or_value out of a value. */
1357 static inline decl_or_value
1358 dv_from_value (rtx value)
1360 decl_or_value dv;
1361 dv = value;
1362 gcc_checking_assert (dv_is_value_p (dv));
1363 return dv;
1366 /* Return a value or the decl of a debug_expr as a decl_or_value. */
1367 static inline decl_or_value
1368 dv_from_rtx (rtx x)
1370 decl_or_value dv;
1372 switch (GET_CODE (x))
1374 case DEBUG_EXPR:
1375 dv = dv_from_decl (DEBUG_EXPR_TREE_DECL (x));
1376 gcc_checking_assert (DECL_RTL_KNOWN_SET (DEBUG_EXPR_TREE_DECL (x)) == x);
1377 break;
1379 case VALUE:
1380 dv = dv_from_value (x);
1381 break;
1383 default:
1384 gcc_unreachable ();
1387 return dv;
1390 extern void debug_dv (decl_or_value dv);
1392 DEBUG_FUNCTION void
1393 debug_dv (decl_or_value dv)
1395 if (dv_is_value_p (dv))
1396 debug_rtx (dv_as_value (dv));
1397 else
1398 debug_generic_stmt (dv_as_decl (dv));
1401 static void loc_exp_dep_clear (variable var);
1403 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
1405 static void
1406 variable_htab_free (void *elem)
1408 int i;
1409 variable var = (variable) elem;
1410 location_chain node, next;
1412 gcc_checking_assert (var->refcount > 0);
1414 var->refcount--;
1415 if (var->refcount > 0)
1416 return;
1418 for (i = 0; i < var->n_var_parts; i++)
1420 for (node = var->var_part[i].loc_chain; node; node = next)
1422 next = node->next;
1423 pool_free (loc_chain_pool, node);
1425 var->var_part[i].loc_chain = NULL;
1427 if (var->onepart && VAR_LOC_1PAUX (var))
1429 loc_exp_dep_clear (var);
1430 if (VAR_LOC_DEP_LST (var))
1431 VAR_LOC_DEP_LST (var)->pprev = NULL;
1432 XDELETE (VAR_LOC_1PAUX (var));
1433 /* These may be reused across functions, so reset
1434 e.g. NO_LOC_P. */
1435 if (var->onepart == ONEPART_DEXPR)
1436 set_dv_changed (var->dv, true);
1438 pool_free (onepart_pool (var->onepart), var);
1441 /* Initialize the set (array) SET of attrs to empty lists. */
1443 static void
1444 init_attrs_list_set (attrs *set)
1446 int i;
1448 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1449 set[i] = NULL;
1452 /* Make the list *LISTP empty. */
1454 static void
1455 attrs_list_clear (attrs *listp)
1457 attrs list, next;
1459 for (list = *listp; list; list = next)
1461 next = list->next;
1462 pool_free (attrs_pool, list);
1464 *listp = NULL;
1467 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */
1469 static attrs
1470 attrs_list_member (attrs list, decl_or_value dv, HOST_WIDE_INT offset)
1472 for (; list; list = list->next)
1473 if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset)
1474 return list;
1475 return NULL;
1478 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
1480 static void
1481 attrs_list_insert (attrs *listp, decl_or_value dv,
1482 HOST_WIDE_INT offset, rtx loc)
1484 attrs list;
1486 list = (attrs) pool_alloc (attrs_pool);
1487 list->loc = loc;
1488 list->dv = dv;
1489 list->offset = offset;
1490 list->next = *listp;
1491 *listp = list;
1494 /* Copy all nodes from SRC and create a list *DSTP of the copies. */
1496 static void
1497 attrs_list_copy (attrs *dstp, attrs src)
1499 attrs n;
1501 attrs_list_clear (dstp);
1502 for (; src; src = src->next)
1504 n = (attrs) pool_alloc (attrs_pool);
1505 n->loc = src->loc;
1506 n->dv = src->dv;
1507 n->offset = src->offset;
1508 n->next = *dstp;
1509 *dstp = n;
1513 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */
1515 static void
1516 attrs_list_union (attrs *dstp, attrs src)
1518 for (; src; src = src->next)
1520 if (!attrs_list_member (*dstp, src->dv, src->offset))
1521 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1525 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1526 *DSTP. */
1528 static void
1529 attrs_list_mpdv_union (attrs *dstp, attrs src, attrs src2)
1531 gcc_assert (!*dstp);
1532 for (; src; src = src->next)
1534 if (!dv_onepart_p (src->dv))
1535 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1537 for (src = src2; src; src = src->next)
1539 if (!dv_onepart_p (src->dv)
1540 && !attrs_list_member (*dstp, src->dv, src->offset))
1541 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1545 /* Shared hashtable support. */
1547 /* Return true if VARS is shared. */
1549 static inline bool
1550 shared_hash_shared (shared_hash vars)
1552 return vars->refcount > 1;
1555 /* Return the hash table for VARS. */
1557 static inline variable_table_type
1558 shared_hash_htab (shared_hash vars)
1560 return vars->htab;
1563 /* Return true if VAR is shared, or maybe because VARS is shared. */
1565 static inline bool
1566 shared_var_p (variable var, shared_hash vars)
1568 /* Don't count an entry in the changed_variables table as a duplicate. */
1569 return ((var->refcount > 1 + (int) var->in_changed_variables)
1570 || shared_hash_shared (vars));
1573 /* Copy variables into a new hash table. */
1575 static shared_hash
1576 shared_hash_unshare (shared_hash vars)
1578 shared_hash new_vars = (shared_hash) pool_alloc (shared_hash_pool);
1579 gcc_assert (vars->refcount > 1);
1580 new_vars->refcount = 1;
1581 new_vars->htab.create (vars->htab.elements () + 3);
1582 vars_copy (new_vars->htab, vars->htab);
1583 vars->refcount--;
1584 return new_vars;
1587 /* Increment reference counter on VARS and return it. */
1589 static inline shared_hash
1590 shared_hash_copy (shared_hash vars)
1592 vars->refcount++;
1593 return vars;
1596 /* Decrement reference counter and destroy hash table if not shared
1597 anymore. */
1599 static void
1600 shared_hash_destroy (shared_hash vars)
1602 gcc_checking_assert (vars->refcount > 0);
1603 if (--vars->refcount == 0)
1605 vars->htab.dispose ();
1606 pool_free (shared_hash_pool, vars);
1610 /* Unshare *PVARS if shared and return slot for DV. If INS is
1611 INSERT, insert it if not already present. */
1613 static inline variable_def **
1614 shared_hash_find_slot_unshare_1 (shared_hash *pvars, decl_or_value dv,
1615 hashval_t dvhash, enum insert_option ins)
1617 if (shared_hash_shared (*pvars))
1618 *pvars = shared_hash_unshare (*pvars);
1619 return shared_hash_htab (*pvars).find_slot_with_hash (dv, dvhash, ins);
1622 static inline variable_def **
1623 shared_hash_find_slot_unshare (shared_hash *pvars, decl_or_value dv,
1624 enum insert_option ins)
1626 return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins);
1629 /* Return slot for DV, if it is already present in the hash table.
1630 If it is not present, insert it only VARS is not shared, otherwise
1631 return NULL. */
1633 static inline variable_def **
1634 shared_hash_find_slot_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1636 return shared_hash_htab (vars).find_slot_with_hash (dv, dvhash,
1637 shared_hash_shared (vars)
1638 ? NO_INSERT : INSERT);
1641 static inline variable_def **
1642 shared_hash_find_slot (shared_hash vars, decl_or_value dv)
1644 return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv));
1647 /* Return slot for DV only if it is already present in the hash table. */
1649 static inline variable_def **
1650 shared_hash_find_slot_noinsert_1 (shared_hash vars, decl_or_value dv,
1651 hashval_t dvhash)
1653 return shared_hash_htab (vars).find_slot_with_hash (dv, dvhash, NO_INSERT);
1656 static inline variable_def **
1657 shared_hash_find_slot_noinsert (shared_hash vars, decl_or_value dv)
1659 return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv));
1662 /* Return variable for DV or NULL if not already present in the hash
1663 table. */
1665 static inline variable
1666 shared_hash_find_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1668 return shared_hash_htab (vars).find_with_hash (dv, dvhash);
1671 static inline variable
1672 shared_hash_find (shared_hash vars, decl_or_value dv)
1674 return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
1677 /* Return true if TVAL is better than CVAL as a canonival value. We
1678 choose lowest-numbered VALUEs, using the RTX address as a
1679 tie-breaker. The idea is to arrange them into a star topology,
1680 such that all of them are at most one step away from the canonical
1681 value, and the canonical value has backlinks to all of them, in
1682 addition to all the actual locations. We don't enforce this
1683 topology throughout the entire dataflow analysis, though.
1686 static inline bool
1687 canon_value_cmp (rtx tval, rtx cval)
1689 return !cval
1690 || CSELIB_VAL_PTR (tval)->uid < CSELIB_VAL_PTR (cval)->uid;
1693 static bool dst_can_be_shared;
1695 /* Return a copy of a variable VAR and insert it to dataflow set SET. */
1697 static variable_def **
1698 unshare_variable (dataflow_set *set, variable_def **slot, variable var,
1699 enum var_init_status initialized)
1701 variable new_var;
1702 int i;
1704 new_var = (variable) pool_alloc (onepart_pool (var->onepart));
1705 new_var->dv = var->dv;
1706 new_var->refcount = 1;
1707 var->refcount--;
1708 new_var->n_var_parts = var->n_var_parts;
1709 new_var->onepart = var->onepart;
1710 new_var->in_changed_variables = false;
1712 if (! flag_var_tracking_uninit)
1713 initialized = VAR_INIT_STATUS_INITIALIZED;
1715 for (i = 0; i < var->n_var_parts; i++)
1717 location_chain node;
1718 location_chain *nextp;
1720 if (i == 0 && var->onepart)
1722 /* One-part auxiliary data is only used while emitting
1723 notes, so propagate it to the new variable in the active
1724 dataflow set. If we're not emitting notes, this will be
1725 a no-op. */
1726 gcc_checking_assert (!VAR_LOC_1PAUX (var) || emit_notes);
1727 VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (var);
1728 VAR_LOC_1PAUX (var) = NULL;
1730 else
1731 VAR_PART_OFFSET (new_var, i) = VAR_PART_OFFSET (var, i);
1732 nextp = &new_var->var_part[i].loc_chain;
1733 for (node = var->var_part[i].loc_chain; node; node = node->next)
1735 location_chain new_lc;
1737 new_lc = (location_chain) pool_alloc (loc_chain_pool);
1738 new_lc->next = NULL;
1739 if (node->init > initialized)
1740 new_lc->init = node->init;
1741 else
1742 new_lc->init = initialized;
1743 if (node->set_src && !(MEM_P (node->set_src)))
1744 new_lc->set_src = node->set_src;
1745 else
1746 new_lc->set_src = NULL;
1747 new_lc->loc = node->loc;
1749 *nextp = new_lc;
1750 nextp = &new_lc->next;
1753 new_var->var_part[i].cur_loc = var->var_part[i].cur_loc;
1756 dst_can_be_shared = false;
1757 if (shared_hash_shared (set->vars))
1758 slot = shared_hash_find_slot_unshare (&set->vars, var->dv, NO_INSERT);
1759 else if (set->traversed_vars && set->vars != set->traversed_vars)
1760 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
1761 *slot = new_var;
1762 if (var->in_changed_variables)
1764 variable_def **cslot
1765 = changed_variables.find_slot_with_hash (var->dv,
1766 dv_htab_hash (var->dv), NO_INSERT);
1767 gcc_assert (*cslot == (void *) var);
1768 var->in_changed_variables = false;
1769 variable_htab_free (var);
1770 *cslot = new_var;
1771 new_var->in_changed_variables = true;
1773 return slot;
1776 /* Copy all variables from hash table SRC to hash table DST. */
1778 static void
1779 vars_copy (variable_table_type dst, variable_table_type src)
1781 variable_iterator_type hi;
1782 variable var;
1784 FOR_EACH_HASH_TABLE_ELEMENT (src, var, variable, hi)
1786 variable_def **dstp;
1787 var->refcount++;
1788 dstp = dst.find_slot_with_hash (var->dv, dv_htab_hash (var->dv), INSERT);
1789 *dstp = var;
1793 /* Map a decl to its main debug decl. */
1795 static inline tree
1796 var_debug_decl (tree decl)
1798 if (decl && TREE_CODE (decl) == VAR_DECL
1799 && DECL_HAS_DEBUG_EXPR_P (decl))
1801 tree debugdecl = DECL_DEBUG_EXPR (decl);
1802 if (DECL_P (debugdecl))
1803 decl = debugdecl;
1806 return decl;
1809 /* Set the register LOC to contain DV, OFFSET. */
1811 static void
1812 var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1813 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1814 enum insert_option iopt)
1816 attrs node;
1817 bool decl_p = dv_is_decl_p (dv);
1819 if (decl_p)
1820 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1822 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1823 if (dv_as_opaque (node->dv) == dv_as_opaque (dv)
1824 && node->offset == offset)
1825 break;
1826 if (!node)
1827 attrs_list_insert (&set->regs[REGNO (loc)], dv, offset, loc);
1828 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1831 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
1833 static void
1834 var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1835 rtx set_src)
1837 tree decl = REG_EXPR (loc);
1838 HOST_WIDE_INT offset = REG_OFFSET (loc);
1840 var_reg_decl_set (set, loc, initialized,
1841 dv_from_decl (decl), offset, set_src, INSERT);
1844 static enum var_init_status
1845 get_init_value (dataflow_set *set, rtx loc, decl_or_value dv)
1847 variable var;
1848 int i;
1849 enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN;
1851 if (! flag_var_tracking_uninit)
1852 return VAR_INIT_STATUS_INITIALIZED;
1854 var = shared_hash_find (set->vars, dv);
1855 if (var)
1857 for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++)
1859 location_chain nextp;
1860 for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next)
1861 if (rtx_equal_p (nextp->loc, loc))
1863 ret_val = nextp->init;
1864 break;
1869 return ret_val;
1872 /* Delete current content of register LOC in dataflow set SET and set
1873 the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
1874 MODIFY is true, any other live copies of the same variable part are
1875 also deleted from the dataflow set, otherwise the variable part is
1876 assumed to be copied from another location holding the same
1877 part. */
1879 static void
1880 var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1881 enum var_init_status initialized, rtx set_src)
1883 tree decl = REG_EXPR (loc);
1884 HOST_WIDE_INT offset = REG_OFFSET (loc);
1885 attrs node, next;
1886 attrs *nextp;
1888 decl = var_debug_decl (decl);
1890 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1891 initialized = get_init_value (set, loc, dv_from_decl (decl));
1893 nextp = &set->regs[REGNO (loc)];
1894 for (node = *nextp; node; node = next)
1896 next = node->next;
1897 if (dv_as_opaque (node->dv) != decl || node->offset != offset)
1899 delete_variable_part (set, node->loc, node->dv, node->offset);
1900 pool_free (attrs_pool, node);
1901 *nextp = next;
1903 else
1905 node->loc = loc;
1906 nextp = &node->next;
1909 if (modify)
1910 clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src);
1911 var_reg_set (set, loc, initialized, set_src);
1914 /* Delete the association of register LOC in dataflow set SET with any
1915 variables that aren't onepart. If CLOBBER is true, also delete any
1916 other live copies of the same variable part, and delete the
1917 association with onepart dvs too. */
1919 static void
1920 var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
1922 attrs *nextp = &set->regs[REGNO (loc)];
1923 attrs node, next;
1925 if (clobber)
1927 tree decl = REG_EXPR (loc);
1928 HOST_WIDE_INT offset = REG_OFFSET (loc);
1930 decl = var_debug_decl (decl);
1932 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1935 for (node = *nextp; node; node = next)
1937 next = node->next;
1938 if (clobber || !dv_onepart_p (node->dv))
1940 delete_variable_part (set, node->loc, node->dv, node->offset);
1941 pool_free (attrs_pool, node);
1942 *nextp = next;
1944 else
1945 nextp = &node->next;
1949 /* Delete content of register with number REGNO in dataflow set SET. */
1951 static void
1952 var_regno_delete (dataflow_set *set, int regno)
1954 attrs *reg = &set->regs[regno];
1955 attrs node, next;
1957 for (node = *reg; node; node = next)
1959 next = node->next;
1960 delete_variable_part (set, node->loc, node->dv, node->offset);
1961 pool_free (attrs_pool, node);
1963 *reg = NULL;
1966 /* Return true if I is the negated value of a power of two. */
1967 static bool
1968 negative_power_of_two_p (HOST_WIDE_INT i)
1970 unsigned HOST_WIDE_INT x = -(unsigned HOST_WIDE_INT)i;
1971 return x == (x & -x);
1974 /* Strip constant offsets and alignments off of LOC. Return the base
1975 expression. */
1977 static rtx
1978 vt_get_canonicalize_base (rtx loc)
1980 while ((GET_CODE (loc) == PLUS
1981 || GET_CODE (loc) == AND)
1982 && GET_CODE (XEXP (loc, 1)) == CONST_INT
1983 && (GET_CODE (loc) != AND
1984 || negative_power_of_two_p (INTVAL (XEXP (loc, 1)))))
1985 loc = XEXP (loc, 0);
1987 return loc;
1990 /* This caches canonicalized addresses for VALUEs, computed using
1991 information in the global cselib table. */
1992 static struct pointer_map_t *global_get_addr_cache;
1994 /* This caches canonicalized addresses for VALUEs, computed using
1995 information from the global cache and information pertaining to a
1996 basic block being analyzed. */
1997 static struct pointer_map_t *local_get_addr_cache;
1999 static rtx vt_canonicalize_addr (dataflow_set *, rtx);
2001 /* Return the canonical address for LOC, that must be a VALUE, using a
2002 cached global equivalence or computing it and storing it in the
2003 global cache. */
2005 static rtx
2006 get_addr_from_global_cache (rtx const loc)
2008 rtx x;
2009 void **slot;
2011 gcc_checking_assert (GET_CODE (loc) == VALUE);
2013 slot = pointer_map_insert (global_get_addr_cache, loc);
2014 if (*slot)
2015 return (rtx)*slot;
2017 x = canon_rtx (get_addr (loc));
2019 /* Tentative, avoiding infinite recursion. */
2020 *slot = x;
2022 if (x != loc)
2024 rtx nx = vt_canonicalize_addr (NULL, x);
2025 if (nx != x)
2027 /* The table may have moved during recursion, recompute
2028 SLOT. */
2029 slot = pointer_map_contains (global_get_addr_cache, loc);
2030 *slot = x = nx;
2034 return x;
2037 /* Return the canonical address for LOC, that must be a VALUE, using a
2038 cached local equivalence or computing it and storing it in the
2039 local cache. */
2041 static rtx
2042 get_addr_from_local_cache (dataflow_set *set, rtx const loc)
2044 rtx x;
2045 void **slot;
2046 decl_or_value dv;
2047 variable var;
2048 location_chain l;
2050 gcc_checking_assert (GET_CODE (loc) == VALUE);
2052 slot = pointer_map_insert (local_get_addr_cache, loc);
2053 if (*slot)
2054 return (rtx)*slot;
2056 x = get_addr_from_global_cache (loc);
2058 /* Tentative, avoiding infinite recursion. */
2059 *slot = x;
2061 /* Recurse to cache local expansion of X, or if we need to search
2062 for a VALUE in the expansion. */
2063 if (x != loc)
2065 rtx nx = vt_canonicalize_addr (set, x);
2066 if (nx != x)
2068 slot = pointer_map_contains (local_get_addr_cache, loc);
2069 *slot = x = nx;
2071 return x;
2074 dv = dv_from_rtx (x);
2075 var = shared_hash_find (set->vars, dv);
2076 if (!var)
2077 return x;
2079 /* Look for an improved equivalent expression. */
2080 for (l = var->var_part[0].loc_chain; l; l = l->next)
2082 rtx base = vt_get_canonicalize_base (l->loc);
2083 if (GET_CODE (base) == VALUE
2084 && canon_value_cmp (base, loc))
2086 rtx nx = vt_canonicalize_addr (set, l->loc);
2087 if (x != nx)
2089 slot = pointer_map_contains (local_get_addr_cache, loc);
2090 *slot = x = nx;
2092 break;
2096 return x;
2099 /* Canonicalize LOC using equivalences from SET in addition to those
2100 in the cselib static table. It expects a VALUE-based expression,
2101 and it will only substitute VALUEs with other VALUEs or
2102 function-global equivalences, so that, if two addresses have base
2103 VALUEs that are locally or globally related in ways that
2104 memrefs_conflict_p cares about, they will both canonicalize to
2105 expressions that have the same base VALUE.
2107 The use of VALUEs as canonical base addresses enables the canonical
2108 RTXs to remain unchanged globally, if they resolve to a constant,
2109 or throughout a basic block otherwise, so that they can be cached
2110 and the cache needs not be invalidated when REGs, MEMs or such
2111 change. */
2113 static rtx
2114 vt_canonicalize_addr (dataflow_set *set, rtx oloc)
2116 HOST_WIDE_INT ofst = 0;
2117 enum machine_mode mode = GET_MODE (oloc);
2118 rtx loc = oloc;
2119 rtx x;
2120 bool retry = true;
2122 while (retry)
2124 while (GET_CODE (loc) == PLUS
2125 && GET_CODE (XEXP (loc, 1)) == CONST_INT)
2127 ofst += INTVAL (XEXP (loc, 1));
2128 loc = XEXP (loc, 0);
2131 /* Alignment operations can't normally be combined, so just
2132 canonicalize the base and we're done. We'll normally have
2133 only one stack alignment anyway. */
2134 if (GET_CODE (loc) == AND
2135 && GET_CODE (XEXP (loc, 1)) == CONST_INT
2136 && negative_power_of_two_p (INTVAL (XEXP (loc, 1))))
2138 x = vt_canonicalize_addr (set, XEXP (loc, 0));
2139 if (x != XEXP (loc, 0))
2140 loc = gen_rtx_AND (mode, x, XEXP (loc, 1));
2141 retry = false;
2144 if (GET_CODE (loc) == VALUE)
2146 if (set)
2147 loc = get_addr_from_local_cache (set, loc);
2148 else
2149 loc = get_addr_from_global_cache (loc);
2151 /* Consolidate plus_constants. */
2152 while (ofst && GET_CODE (loc) == PLUS
2153 && GET_CODE (XEXP (loc, 1)) == CONST_INT)
2155 ofst += INTVAL (XEXP (loc, 1));
2156 loc = XEXP (loc, 0);
2159 retry = false;
2161 else
2163 x = canon_rtx (loc);
2164 if (retry)
2165 retry = (x != loc);
2166 loc = x;
2170 /* Add OFST back in. */
2171 if (ofst)
2173 /* Don't build new RTL if we can help it. */
2174 if (GET_CODE (oloc) == PLUS
2175 && XEXP (oloc, 0) == loc
2176 && INTVAL (XEXP (oloc, 1)) == ofst)
2177 return oloc;
2179 loc = plus_constant (mode, loc, ofst);
2182 return loc;
2185 /* Return true iff there's a true dependence between MLOC and LOC.
2186 MADDR must be a canonicalized version of MLOC's address. */
2188 static inline bool
2189 vt_canon_true_dep (dataflow_set *set, rtx mloc, rtx maddr, rtx loc)
2191 if (GET_CODE (loc) != MEM)
2192 return false;
2194 rtx addr = vt_canonicalize_addr (set, XEXP (loc, 0));
2195 if (!canon_true_dependence (mloc, GET_MODE (mloc), maddr, loc, addr))
2196 return false;
2198 return true;
2201 /* Hold parameters for the hashtab traversal function
2202 drop_overlapping_mem_locs, see below. */
2204 struct overlapping_mems
2206 dataflow_set *set;
2207 rtx loc, addr;
2210 /* Remove all MEMs that overlap with COMS->LOC from the location list
2211 of a hash table entry for a value. COMS->ADDR must be a
2212 canonicalized form of COMS->LOC's address, and COMS->LOC must be
2213 canonicalized itself. */
2216 drop_overlapping_mem_locs (variable_def **slot, overlapping_mems *coms)
2218 dataflow_set *set = coms->set;
2219 rtx mloc = coms->loc, addr = coms->addr;
2220 variable var = *slot;
2222 if (var->onepart == ONEPART_VALUE)
2224 location_chain loc, *locp;
2225 bool changed = false;
2226 rtx cur_loc;
2228 gcc_assert (var->n_var_parts == 1);
2230 if (shared_var_p (var, set->vars))
2232 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
2233 if (vt_canon_true_dep (set, mloc, addr, loc->loc))
2234 break;
2236 if (!loc)
2237 return 1;
2239 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
2240 var = *slot;
2241 gcc_assert (var->n_var_parts == 1);
2244 if (VAR_LOC_1PAUX (var))
2245 cur_loc = VAR_LOC_FROM (var);
2246 else
2247 cur_loc = var->var_part[0].cur_loc;
2249 for (locp = &var->var_part[0].loc_chain, loc = *locp;
2250 loc; loc = *locp)
2252 if (!vt_canon_true_dep (set, mloc, addr, loc->loc))
2254 locp = &loc->next;
2255 continue;
2258 *locp = loc->next;
2259 /* If we have deleted the location which was last emitted
2260 we have to emit new location so add the variable to set
2261 of changed variables. */
2262 if (cur_loc == loc->loc)
2264 changed = true;
2265 var->var_part[0].cur_loc = NULL;
2266 if (VAR_LOC_1PAUX (var))
2267 VAR_LOC_FROM (var) = NULL;
2269 pool_free (loc_chain_pool, loc);
2272 if (!var->var_part[0].loc_chain)
2274 var->n_var_parts--;
2275 changed = true;
2277 if (changed)
2278 variable_was_changed (var, set);
2281 return 1;
2284 /* Remove from SET all VALUE bindings to MEMs that overlap with LOC. */
2286 static void
2287 clobber_overlapping_mems (dataflow_set *set, rtx loc)
2289 struct overlapping_mems coms;
2291 gcc_checking_assert (GET_CODE (loc) == MEM);
2293 coms.set = set;
2294 coms.loc = canon_rtx (loc);
2295 coms.addr = vt_canonicalize_addr (set, XEXP (loc, 0));
2297 set->traversed_vars = set->vars;
2298 shared_hash_htab (set->vars)
2299 .traverse <overlapping_mems*, drop_overlapping_mem_locs> (&coms);
2300 set->traversed_vars = NULL;
2303 /* Set the location of DV, OFFSET as the MEM LOC. */
2305 static void
2306 var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
2307 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
2308 enum insert_option iopt)
2310 if (dv_is_decl_p (dv))
2311 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
2313 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
2316 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
2317 SET to LOC.
2318 Adjust the address first if it is stack pointer based. */
2320 static void
2321 var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
2322 rtx set_src)
2324 tree decl = MEM_EXPR (loc);
2325 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2327 var_mem_decl_set (set, loc, initialized,
2328 dv_from_decl (decl), offset, set_src, INSERT);
2331 /* Delete and set the location part of variable MEM_EXPR (LOC) in
2332 dataflow set SET to LOC. If MODIFY is true, any other live copies
2333 of the same variable part are also deleted from the dataflow set,
2334 otherwise the variable part is assumed to be copied from another
2335 location holding the same part.
2336 Adjust the address first if it is stack pointer based. */
2338 static void
2339 var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify,
2340 enum var_init_status initialized, rtx set_src)
2342 tree decl = MEM_EXPR (loc);
2343 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2345 clobber_overlapping_mems (set, loc);
2346 decl = var_debug_decl (decl);
2348 if (initialized == VAR_INIT_STATUS_UNKNOWN)
2349 initialized = get_init_value (set, loc, dv_from_decl (decl));
2351 if (modify)
2352 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src);
2353 var_mem_set (set, loc, initialized, set_src);
2356 /* Delete the location part LOC from dataflow set SET. If CLOBBER is
2357 true, also delete any other live copies of the same variable part.
2358 Adjust the address first if it is stack pointer based. */
2360 static void
2361 var_mem_delete (dataflow_set *set, rtx loc, bool clobber)
2363 tree decl = MEM_EXPR (loc);
2364 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2366 clobber_overlapping_mems (set, loc);
2367 decl = var_debug_decl (decl);
2368 if (clobber)
2369 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
2370 delete_variable_part (set, loc, dv_from_decl (decl), offset);
2373 /* Return true if LOC should not be expanded for location expressions,
2374 or used in them. */
2376 static inline bool
2377 unsuitable_loc (rtx loc)
2379 switch (GET_CODE (loc))
2381 case PC:
2382 case SCRATCH:
2383 case CC0:
2384 case ASM_INPUT:
2385 case ASM_OPERANDS:
2386 return true;
2388 default:
2389 return false;
2393 /* Bind VAL to LOC in SET. If MODIFIED, detach LOC from any values
2394 bound to it. */
2396 static inline void
2397 val_bind (dataflow_set *set, rtx val, rtx loc, bool modified)
2399 if (REG_P (loc))
2401 if (modified)
2402 var_regno_delete (set, REGNO (loc));
2403 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2404 dv_from_value (val), 0, NULL_RTX, INSERT);
2406 else if (MEM_P (loc))
2408 struct elt_loc_list *l = CSELIB_VAL_PTR (val)->locs;
2410 if (modified)
2411 clobber_overlapping_mems (set, loc);
2413 if (l && GET_CODE (l->loc) == VALUE)
2414 l = canonical_cselib_val (CSELIB_VAL_PTR (l->loc))->locs;
2416 /* If this MEM is a global constant, we don't need it in the
2417 dynamic tables. ??? We should test this before emitting the
2418 micro-op in the first place. */
2419 while (l)
2420 if (GET_CODE (l->loc) == MEM && XEXP (l->loc, 0) == XEXP (loc, 0))
2421 break;
2422 else
2423 l = l->next;
2425 if (!l)
2426 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2427 dv_from_value (val), 0, NULL_RTX, INSERT);
2429 else
2431 /* Other kinds of equivalences are necessarily static, at least
2432 so long as we do not perform substitutions while merging
2433 expressions. */
2434 gcc_unreachable ();
2435 set_variable_part (set, loc, dv_from_value (val), 0,
2436 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2440 /* Bind a value to a location it was just stored in. If MODIFIED
2441 holds, assume the location was modified, detaching it from any
2442 values bound to it. */
2444 static void
2445 val_store (dataflow_set *set, rtx val, rtx loc, rtx insn, bool modified)
2447 cselib_val *v = CSELIB_VAL_PTR (val);
2449 gcc_assert (cselib_preserved_value_p (v));
2451 if (dump_file)
2453 fprintf (dump_file, "%i: ", insn ? INSN_UID (insn) : 0);
2454 print_inline_rtx (dump_file, loc, 0);
2455 fprintf (dump_file, " evaluates to ");
2456 print_inline_rtx (dump_file, val, 0);
2457 if (v->locs)
2459 struct elt_loc_list *l;
2460 for (l = v->locs; l; l = l->next)
2462 fprintf (dump_file, "\n%i: ", INSN_UID (l->setting_insn));
2463 print_inline_rtx (dump_file, l->loc, 0);
2466 fprintf (dump_file, "\n");
2469 gcc_checking_assert (!unsuitable_loc (loc));
2471 val_bind (set, val, loc, modified);
2474 /* Clear (canonical address) slots that reference X. */
2476 static bool
2477 local_get_addr_clear_given_value (const void *v ATTRIBUTE_UNUSED,
2478 void **slot, void *x)
2480 if (vt_get_canonicalize_base ((rtx)*slot) == x)
2481 *slot = NULL;
2482 return true;
2485 /* Reset this node, detaching all its equivalences. Return the slot
2486 in the variable hash table that holds dv, if there is one. */
2488 static void
2489 val_reset (dataflow_set *set, decl_or_value dv)
2491 variable var = shared_hash_find (set->vars, dv) ;
2492 location_chain node;
2493 rtx cval;
2495 if (!var || !var->n_var_parts)
2496 return;
2498 gcc_assert (var->n_var_parts == 1);
2500 if (var->onepart == ONEPART_VALUE)
2502 rtx x = dv_as_value (dv);
2503 void **slot;
2505 /* Relationships in the global cache don't change, so reset the
2506 local cache entry only. */
2507 slot = pointer_map_contains (local_get_addr_cache, x);
2508 if (slot)
2510 /* If the value resolved back to itself, odds are that other
2511 values may have cached it too. These entries now refer
2512 to the old X, so detach them too. Entries that used the
2513 old X but resolved to something else remain ok as long as
2514 that something else isn't also reset. */
2515 if (*slot == x)
2516 pointer_map_traverse (local_get_addr_cache,
2517 local_get_addr_clear_given_value, x);
2518 *slot = NULL;
2522 cval = NULL;
2523 for (node = var->var_part[0].loc_chain; node; node = node->next)
2524 if (GET_CODE (node->loc) == VALUE
2525 && canon_value_cmp (node->loc, cval))
2526 cval = node->loc;
2528 for (node = var->var_part[0].loc_chain; node; node = node->next)
2529 if (GET_CODE (node->loc) == VALUE && cval != node->loc)
2531 /* Redirect the equivalence link to the new canonical
2532 value, or simply remove it if it would point at
2533 itself. */
2534 if (cval)
2535 set_variable_part (set, cval, dv_from_value (node->loc),
2536 0, node->init, node->set_src, NO_INSERT);
2537 delete_variable_part (set, dv_as_value (dv),
2538 dv_from_value (node->loc), 0);
2541 if (cval)
2543 decl_or_value cdv = dv_from_value (cval);
2545 /* Keep the remaining values connected, accummulating links
2546 in the canonical value. */
2547 for (node = var->var_part[0].loc_chain; node; node = node->next)
2549 if (node->loc == cval)
2550 continue;
2551 else if (GET_CODE (node->loc) == REG)
2552 var_reg_decl_set (set, node->loc, node->init, cdv, 0,
2553 node->set_src, NO_INSERT);
2554 else if (GET_CODE (node->loc) == MEM)
2555 var_mem_decl_set (set, node->loc, node->init, cdv, 0,
2556 node->set_src, NO_INSERT);
2557 else
2558 set_variable_part (set, node->loc, cdv, 0,
2559 node->init, node->set_src, NO_INSERT);
2563 /* We remove this last, to make sure that the canonical value is not
2564 removed to the point of requiring reinsertion. */
2565 if (cval)
2566 delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0);
2568 clobber_variable_part (set, NULL, dv, 0, NULL);
2571 /* Find the values in a given location and map the val to another
2572 value, if it is unique, or add the location as one holding the
2573 value. */
2575 static void
2576 val_resolve (dataflow_set *set, rtx val, rtx loc, rtx insn)
2578 decl_or_value dv = dv_from_value (val);
2580 if (dump_file && (dump_flags & TDF_DETAILS))
2582 if (insn)
2583 fprintf (dump_file, "%i: ", INSN_UID (insn));
2584 else
2585 fprintf (dump_file, "head: ");
2586 print_inline_rtx (dump_file, val, 0);
2587 fputs (" is at ", dump_file);
2588 print_inline_rtx (dump_file, loc, 0);
2589 fputc ('\n', dump_file);
2592 val_reset (set, dv);
2594 gcc_checking_assert (!unsuitable_loc (loc));
2596 if (REG_P (loc))
2598 attrs node, found = NULL;
2600 for (node = set->regs[REGNO (loc)]; node; node = node->next)
2601 if (dv_is_value_p (node->dv)
2602 && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc))
2604 found = node;
2606 /* Map incoming equivalences. ??? Wouldn't it be nice if
2607 we just started sharing the location lists? Maybe a
2608 circular list ending at the value itself or some
2609 such. */
2610 set_variable_part (set, dv_as_value (node->dv),
2611 dv_from_value (val), node->offset,
2612 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2613 set_variable_part (set, val, node->dv, node->offset,
2614 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2617 /* If we didn't find any equivalence, we need to remember that
2618 this value is held in the named register. */
2619 if (found)
2620 return;
2622 /* ??? Attempt to find and merge equivalent MEMs or other
2623 expressions too. */
2625 val_bind (set, val, loc, false);
2628 /* Initialize dataflow set SET to be empty.
2629 VARS_SIZE is the initial size of hash table VARS. */
2631 static void
2632 dataflow_set_init (dataflow_set *set)
2634 init_attrs_list_set (set->regs);
2635 set->vars = shared_hash_copy (empty_shared_hash);
2636 set->stack_adjust = 0;
2637 set->traversed_vars = NULL;
2640 /* Delete the contents of dataflow set SET. */
2642 static void
2643 dataflow_set_clear (dataflow_set *set)
2645 int i;
2647 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2648 attrs_list_clear (&set->regs[i]);
2650 shared_hash_destroy (set->vars);
2651 set->vars = shared_hash_copy (empty_shared_hash);
2654 /* Copy the contents of dataflow set SRC to DST. */
2656 static void
2657 dataflow_set_copy (dataflow_set *dst, dataflow_set *src)
2659 int i;
2661 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2662 attrs_list_copy (&dst->regs[i], src->regs[i]);
2664 shared_hash_destroy (dst->vars);
2665 dst->vars = shared_hash_copy (src->vars);
2666 dst->stack_adjust = src->stack_adjust;
2669 /* Information for merging lists of locations for a given offset of variable.
2671 struct variable_union_info
2673 /* Node of the location chain. */
2674 location_chain lc;
2676 /* The sum of positions in the input chains. */
2677 int pos;
2679 /* The position in the chain of DST dataflow set. */
2680 int pos_dst;
2683 /* Buffer for location list sorting and its allocated size. */
2684 static struct variable_union_info *vui_vec;
2685 static int vui_allocated;
2687 /* Compare function for qsort, order the structures by POS element. */
2689 static int
2690 variable_union_info_cmp_pos (const void *n1, const void *n2)
2692 const struct variable_union_info *const i1 =
2693 (const struct variable_union_info *) n1;
2694 const struct variable_union_info *const i2 =
2695 ( const struct variable_union_info *) n2;
2697 if (i1->pos != i2->pos)
2698 return i1->pos - i2->pos;
2700 return (i1->pos_dst - i2->pos_dst);
2703 /* Compute union of location parts of variable *SLOT and the same variable
2704 from hash table DATA. Compute "sorted" union of the location chains
2705 for common offsets, i.e. the locations of a variable part are sorted by
2706 a priority where the priority is the sum of the positions in the 2 chains
2707 (if a location is only in one list the position in the second list is
2708 defined to be larger than the length of the chains).
2709 When we are updating the location parts the newest location is in the
2710 beginning of the chain, so when we do the described "sorted" union
2711 we keep the newest locations in the beginning. */
2713 static int
2714 variable_union (variable src, dataflow_set *set)
2716 variable dst;
2717 variable_def **dstp;
2718 int i, j, k;
2720 dstp = shared_hash_find_slot (set->vars, src->dv);
2721 if (!dstp || !*dstp)
2723 src->refcount++;
2725 dst_can_be_shared = false;
2726 if (!dstp)
2727 dstp = shared_hash_find_slot_unshare (&set->vars, src->dv, INSERT);
2729 *dstp = src;
2731 /* Continue traversing the hash table. */
2732 return 1;
2734 else
2735 dst = *dstp;
2737 gcc_assert (src->n_var_parts);
2738 gcc_checking_assert (src->onepart == dst->onepart);
2740 /* We can combine one-part variables very efficiently, because their
2741 entries are in canonical order. */
2742 if (src->onepart)
2744 location_chain *nodep, dnode, snode;
2746 gcc_assert (src->n_var_parts == 1
2747 && dst->n_var_parts == 1);
2749 snode = src->var_part[0].loc_chain;
2750 gcc_assert (snode);
2752 restart_onepart_unshared:
2753 nodep = &dst->var_part[0].loc_chain;
2754 dnode = *nodep;
2755 gcc_assert (dnode);
2757 while (snode)
2759 int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1;
2761 if (r > 0)
2763 location_chain nnode;
2765 if (shared_var_p (dst, set->vars))
2767 dstp = unshare_variable (set, dstp, dst,
2768 VAR_INIT_STATUS_INITIALIZED);
2769 dst = *dstp;
2770 goto restart_onepart_unshared;
2773 *nodep = nnode = (location_chain) pool_alloc (loc_chain_pool);
2774 nnode->loc = snode->loc;
2775 nnode->init = snode->init;
2776 if (!snode->set_src || MEM_P (snode->set_src))
2777 nnode->set_src = NULL;
2778 else
2779 nnode->set_src = snode->set_src;
2780 nnode->next = dnode;
2781 dnode = nnode;
2783 else if (r == 0)
2784 gcc_checking_assert (rtx_equal_p (dnode->loc, snode->loc));
2786 if (r >= 0)
2787 snode = snode->next;
2789 nodep = &dnode->next;
2790 dnode = *nodep;
2793 return 1;
2796 gcc_checking_assert (!src->onepart);
2798 /* Count the number of location parts, result is K. */
2799 for (i = 0, j = 0, k = 0;
2800 i < src->n_var_parts && j < dst->n_var_parts; k++)
2802 if (VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2804 i++;
2805 j++;
2807 else if (VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
2808 i++;
2809 else
2810 j++;
2812 k += src->n_var_parts - i;
2813 k += dst->n_var_parts - j;
2815 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
2816 thus there are at most MAX_VAR_PARTS different offsets. */
2817 gcc_checking_assert (dst->onepart ? k == 1 : k <= MAX_VAR_PARTS);
2819 if (dst->n_var_parts != k && shared_var_p (dst, set->vars))
2821 dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN);
2822 dst = *dstp;
2825 i = src->n_var_parts - 1;
2826 j = dst->n_var_parts - 1;
2827 dst->n_var_parts = k;
2829 for (k--; k >= 0; k--)
2831 location_chain node, node2;
2833 if (i >= 0 && j >= 0
2834 && VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2836 /* Compute the "sorted" union of the chains, i.e. the locations which
2837 are in both chains go first, they are sorted by the sum of
2838 positions in the chains. */
2839 int dst_l, src_l;
2840 int ii, jj, n;
2841 struct variable_union_info *vui;
2843 /* If DST is shared compare the location chains.
2844 If they are different we will modify the chain in DST with
2845 high probability so make a copy of DST. */
2846 if (shared_var_p (dst, set->vars))
2848 for (node = src->var_part[i].loc_chain,
2849 node2 = dst->var_part[j].loc_chain; node && node2;
2850 node = node->next, node2 = node2->next)
2852 if (!((REG_P (node2->loc)
2853 && REG_P (node->loc)
2854 && REGNO (node2->loc) == REGNO (node->loc))
2855 || rtx_equal_p (node2->loc, node->loc)))
2857 if (node2->init < node->init)
2858 node2->init = node->init;
2859 break;
2862 if (node || node2)
2864 dstp = unshare_variable (set, dstp, dst,
2865 VAR_INIT_STATUS_UNKNOWN);
2866 dst = (variable)*dstp;
2870 src_l = 0;
2871 for (node = src->var_part[i].loc_chain; node; node = node->next)
2872 src_l++;
2873 dst_l = 0;
2874 for (node = dst->var_part[j].loc_chain; node; node = node->next)
2875 dst_l++;
2877 if (dst_l == 1)
2879 /* The most common case, much simpler, no qsort is needed. */
2880 location_chain dstnode = dst->var_part[j].loc_chain;
2881 dst->var_part[k].loc_chain = dstnode;
2882 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
2883 node2 = dstnode;
2884 for (node = src->var_part[i].loc_chain; node; node = node->next)
2885 if (!((REG_P (dstnode->loc)
2886 && REG_P (node->loc)
2887 && REGNO (dstnode->loc) == REGNO (node->loc))
2888 || rtx_equal_p (dstnode->loc, node->loc)))
2890 location_chain new_node;
2892 /* Copy the location from SRC. */
2893 new_node = (location_chain) pool_alloc (loc_chain_pool);
2894 new_node->loc = node->loc;
2895 new_node->init = node->init;
2896 if (!node->set_src || MEM_P (node->set_src))
2897 new_node->set_src = NULL;
2898 else
2899 new_node->set_src = node->set_src;
2900 node2->next = new_node;
2901 node2 = new_node;
2903 node2->next = NULL;
2905 else
2907 if (src_l + dst_l > vui_allocated)
2909 vui_allocated = MAX (vui_allocated * 2, src_l + dst_l);
2910 vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec,
2911 vui_allocated);
2913 vui = vui_vec;
2915 /* Fill in the locations from DST. */
2916 for (node = dst->var_part[j].loc_chain, jj = 0; node;
2917 node = node->next, jj++)
2919 vui[jj].lc = node;
2920 vui[jj].pos_dst = jj;
2922 /* Pos plus value larger than a sum of 2 valid positions. */
2923 vui[jj].pos = jj + src_l + dst_l;
2926 /* Fill in the locations from SRC. */
2927 n = dst_l;
2928 for (node = src->var_part[i].loc_chain, ii = 0; node;
2929 node = node->next, ii++)
2931 /* Find location from NODE. */
2932 for (jj = 0; jj < dst_l; jj++)
2934 if ((REG_P (vui[jj].lc->loc)
2935 && REG_P (node->loc)
2936 && REGNO (vui[jj].lc->loc) == REGNO (node->loc))
2937 || rtx_equal_p (vui[jj].lc->loc, node->loc))
2939 vui[jj].pos = jj + ii;
2940 break;
2943 if (jj >= dst_l) /* The location has not been found. */
2945 location_chain new_node;
2947 /* Copy the location from SRC. */
2948 new_node = (location_chain) pool_alloc (loc_chain_pool);
2949 new_node->loc = node->loc;
2950 new_node->init = node->init;
2951 if (!node->set_src || MEM_P (node->set_src))
2952 new_node->set_src = NULL;
2953 else
2954 new_node->set_src = node->set_src;
2955 vui[n].lc = new_node;
2956 vui[n].pos_dst = src_l + dst_l;
2957 vui[n].pos = ii + src_l + dst_l;
2958 n++;
2962 if (dst_l == 2)
2964 /* Special case still very common case. For dst_l == 2
2965 all entries dst_l ... n-1 are sorted, with for i >= dst_l
2966 vui[i].pos == i + src_l + dst_l. */
2967 if (vui[0].pos > vui[1].pos)
2969 /* Order should be 1, 0, 2... */
2970 dst->var_part[k].loc_chain = vui[1].lc;
2971 vui[1].lc->next = vui[0].lc;
2972 if (n >= 3)
2974 vui[0].lc->next = vui[2].lc;
2975 vui[n - 1].lc->next = NULL;
2977 else
2978 vui[0].lc->next = NULL;
2979 ii = 3;
2981 else
2983 dst->var_part[k].loc_chain = vui[0].lc;
2984 if (n >= 3 && vui[2].pos < vui[1].pos)
2986 /* Order should be 0, 2, 1, 3... */
2987 vui[0].lc->next = vui[2].lc;
2988 vui[2].lc->next = vui[1].lc;
2989 if (n >= 4)
2991 vui[1].lc->next = vui[3].lc;
2992 vui[n - 1].lc->next = NULL;
2994 else
2995 vui[1].lc->next = NULL;
2996 ii = 4;
2998 else
3000 /* Order should be 0, 1, 2... */
3001 ii = 1;
3002 vui[n - 1].lc->next = NULL;
3005 for (; ii < n; ii++)
3006 vui[ii - 1].lc->next = vui[ii].lc;
3008 else
3010 qsort (vui, n, sizeof (struct variable_union_info),
3011 variable_union_info_cmp_pos);
3013 /* Reconnect the nodes in sorted order. */
3014 for (ii = 1; ii < n; ii++)
3015 vui[ii - 1].lc->next = vui[ii].lc;
3016 vui[n - 1].lc->next = NULL;
3017 dst->var_part[k].loc_chain = vui[0].lc;
3020 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
3022 i--;
3023 j--;
3025 else if ((i >= 0 && j >= 0
3026 && VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
3027 || i < 0)
3029 dst->var_part[k] = dst->var_part[j];
3030 j--;
3032 else if ((i >= 0 && j >= 0
3033 && VAR_PART_OFFSET (src, i) > VAR_PART_OFFSET (dst, j))
3034 || j < 0)
3036 location_chain *nextp;
3038 /* Copy the chain from SRC. */
3039 nextp = &dst->var_part[k].loc_chain;
3040 for (node = src->var_part[i].loc_chain; node; node = node->next)
3042 location_chain new_lc;
3044 new_lc = (location_chain) pool_alloc (loc_chain_pool);
3045 new_lc->next = NULL;
3046 new_lc->init = node->init;
3047 if (!node->set_src || MEM_P (node->set_src))
3048 new_lc->set_src = NULL;
3049 else
3050 new_lc->set_src = node->set_src;
3051 new_lc->loc = node->loc;
3053 *nextp = new_lc;
3054 nextp = &new_lc->next;
3057 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (src, i);
3058 i--;
3060 dst->var_part[k].cur_loc = NULL;
3063 if (flag_var_tracking_uninit)
3064 for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++)
3066 location_chain node, node2;
3067 for (node = src->var_part[i].loc_chain; node; node = node->next)
3068 for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next)
3069 if (rtx_equal_p (node->loc, node2->loc))
3071 if (node->init > node2->init)
3072 node2->init = node->init;
3076 /* Continue traversing the hash table. */
3077 return 1;
3080 /* Compute union of dataflow sets SRC and DST and store it to DST. */
3082 static void
3083 dataflow_set_union (dataflow_set *dst, dataflow_set *src)
3085 int i;
3087 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3088 attrs_list_union (&dst->regs[i], src->regs[i]);
3090 if (dst->vars == empty_shared_hash)
3092 shared_hash_destroy (dst->vars);
3093 dst->vars = shared_hash_copy (src->vars);
3095 else
3097 variable_iterator_type hi;
3098 variable var;
3100 FOR_EACH_HASH_TABLE_ELEMENT (shared_hash_htab (src->vars),
3101 var, variable, hi)
3102 variable_union (var, dst);
3106 /* Whether the value is currently being expanded. */
3107 #define VALUE_RECURSED_INTO(x) \
3108 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
3110 /* Whether no expansion was found, saving useless lookups.
3111 It must only be set when VALUE_CHANGED is clear. */
3112 #define NO_LOC_P(x) \
3113 (RTL_FLAG_CHECK2 ("NO_LOC_P", (x), VALUE, DEBUG_EXPR)->return_val)
3115 /* Whether cur_loc in the value needs to be (re)computed. */
3116 #define VALUE_CHANGED(x) \
3117 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
3118 /* Whether cur_loc in the decl needs to be (re)computed. */
3119 #define DECL_CHANGED(x) TREE_VISITED (x)
3121 /* Record (if NEWV) that DV needs to have its cur_loc recomputed. For
3122 user DECLs, this means they're in changed_variables. Values and
3123 debug exprs may be left with this flag set if no user variable
3124 requires them to be evaluated. */
3126 static inline void
3127 set_dv_changed (decl_or_value dv, bool newv)
3129 switch (dv_onepart_p (dv))
3131 case ONEPART_VALUE:
3132 if (newv)
3133 NO_LOC_P (dv_as_value (dv)) = false;
3134 VALUE_CHANGED (dv_as_value (dv)) = newv;
3135 break;
3137 case ONEPART_DEXPR:
3138 if (newv)
3139 NO_LOC_P (DECL_RTL_KNOWN_SET (dv_as_decl (dv))) = false;
3140 /* Fall through... */
3142 default:
3143 DECL_CHANGED (dv_as_decl (dv)) = newv;
3144 break;
3148 /* Return true if DV needs to have its cur_loc recomputed. */
3150 static inline bool
3151 dv_changed_p (decl_or_value dv)
3153 return (dv_is_value_p (dv)
3154 ? VALUE_CHANGED (dv_as_value (dv))
3155 : DECL_CHANGED (dv_as_decl (dv)));
3158 /* Return a location list node whose loc is rtx_equal to LOC, in the
3159 location list of a one-part variable or value VAR, or in that of
3160 any values recursively mentioned in the location lists. VARS must
3161 be in star-canonical form. */
3163 static location_chain
3164 find_loc_in_1pdv (rtx loc, variable var, variable_table_type vars)
3166 location_chain node;
3167 enum rtx_code loc_code;
3169 if (!var)
3170 return NULL;
3172 gcc_checking_assert (var->onepart);
3174 if (!var->n_var_parts)
3175 return NULL;
3177 gcc_checking_assert (loc != dv_as_opaque (var->dv));
3179 loc_code = GET_CODE (loc);
3180 for (node = var->var_part[0].loc_chain; node; node = node->next)
3182 decl_or_value dv;
3183 variable rvar;
3185 if (GET_CODE (node->loc) != loc_code)
3187 if (GET_CODE (node->loc) != VALUE)
3188 continue;
3190 else if (loc == node->loc)
3191 return node;
3192 else if (loc_code != VALUE)
3194 if (rtx_equal_p (loc, node->loc))
3195 return node;
3196 continue;
3199 /* Since we're in star-canonical form, we don't need to visit
3200 non-canonical nodes: one-part variables and non-canonical
3201 values would only point back to the canonical node. */
3202 if (dv_is_value_p (var->dv)
3203 && !canon_value_cmp (node->loc, dv_as_value (var->dv)))
3205 /* Skip all subsequent VALUEs. */
3206 while (node->next && GET_CODE (node->next->loc) == VALUE)
3208 node = node->next;
3209 gcc_checking_assert (!canon_value_cmp (node->loc,
3210 dv_as_value (var->dv)));
3211 if (loc == node->loc)
3212 return node;
3214 continue;
3217 gcc_checking_assert (node == var->var_part[0].loc_chain);
3218 gcc_checking_assert (!node->next);
3220 dv = dv_from_value (node->loc);
3221 rvar = vars.find_with_hash (dv, dv_htab_hash (dv));
3222 return find_loc_in_1pdv (loc, rvar, vars);
3225 /* ??? Gotta look in cselib_val locations too. */
3227 return NULL;
3230 /* Hash table iteration argument passed to variable_merge. */
3231 struct dfset_merge
3233 /* The set in which the merge is to be inserted. */
3234 dataflow_set *dst;
3235 /* The set that we're iterating in. */
3236 dataflow_set *cur;
3237 /* The set that may contain the other dv we are to merge with. */
3238 dataflow_set *src;
3239 /* Number of onepart dvs in src. */
3240 int src_onepart_cnt;
3243 /* Insert LOC in *DNODE, if it's not there yet. The list must be in
3244 loc_cmp order, and it is maintained as such. */
3246 static void
3247 insert_into_intersection (location_chain *nodep, rtx loc,
3248 enum var_init_status status)
3250 location_chain node;
3251 int r;
3253 for (node = *nodep; node; nodep = &node->next, node = *nodep)
3254 if ((r = loc_cmp (node->loc, loc)) == 0)
3256 node->init = MIN (node->init, status);
3257 return;
3259 else if (r > 0)
3260 break;
3262 node = (location_chain) pool_alloc (loc_chain_pool);
3264 node->loc = loc;
3265 node->set_src = NULL;
3266 node->init = status;
3267 node->next = *nodep;
3268 *nodep = node;
3271 /* Insert in DEST the intersection of the locations present in both
3272 S1NODE and S2VAR, directly or indirectly. S1NODE is from a
3273 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
3274 DSM->dst. */
3276 static void
3277 intersect_loc_chains (rtx val, location_chain *dest, struct dfset_merge *dsm,
3278 location_chain s1node, variable s2var)
3280 dataflow_set *s1set = dsm->cur;
3281 dataflow_set *s2set = dsm->src;
3282 location_chain found;
3284 if (s2var)
3286 location_chain s2node;
3288 gcc_checking_assert (s2var->onepart);
3290 if (s2var->n_var_parts)
3292 s2node = s2var->var_part[0].loc_chain;
3294 for (; s1node && s2node;
3295 s1node = s1node->next, s2node = s2node->next)
3296 if (s1node->loc != s2node->loc)
3297 break;
3298 else if (s1node->loc == val)
3299 continue;
3300 else
3301 insert_into_intersection (dest, s1node->loc,
3302 MIN (s1node->init, s2node->init));
3306 for (; s1node; s1node = s1node->next)
3308 if (s1node->loc == val)
3309 continue;
3311 if ((found = find_loc_in_1pdv (s1node->loc, s2var,
3312 shared_hash_htab (s2set->vars))))
3314 insert_into_intersection (dest, s1node->loc,
3315 MIN (s1node->init, found->init));
3316 continue;
3319 if (GET_CODE (s1node->loc) == VALUE
3320 && !VALUE_RECURSED_INTO (s1node->loc))
3322 decl_or_value dv = dv_from_value (s1node->loc);
3323 variable svar = shared_hash_find (s1set->vars, dv);
3324 if (svar)
3326 if (svar->n_var_parts == 1)
3328 VALUE_RECURSED_INTO (s1node->loc) = true;
3329 intersect_loc_chains (val, dest, dsm,
3330 svar->var_part[0].loc_chain,
3331 s2var);
3332 VALUE_RECURSED_INTO (s1node->loc) = false;
3337 /* ??? gotta look in cselib_val locations too. */
3339 /* ??? if the location is equivalent to any location in src,
3340 searched recursively
3342 add to dst the values needed to represent the equivalence
3344 telling whether locations S is equivalent to another dv's
3345 location list:
3347 for each location D in the list
3349 if S and D satisfy rtx_equal_p, then it is present
3351 else if D is a value, recurse without cycles
3353 else if S and D have the same CODE and MODE
3355 for each operand oS and the corresponding oD
3357 if oS and oD are not equivalent, then S an D are not equivalent
3359 else if they are RTX vectors
3361 if any vector oS element is not equivalent to its respective oD,
3362 then S and D are not equivalent
3370 /* Return -1 if X should be before Y in a location list for a 1-part
3371 variable, 1 if Y should be before X, and 0 if they're equivalent
3372 and should not appear in the list. */
3374 static int
3375 loc_cmp (rtx x, rtx y)
3377 int i, j, r;
3378 RTX_CODE code = GET_CODE (x);
3379 const char *fmt;
3381 if (x == y)
3382 return 0;
3384 if (REG_P (x))
3386 if (!REG_P (y))
3387 return -1;
3388 gcc_assert (GET_MODE (x) == GET_MODE (y));
3389 if (REGNO (x) == REGNO (y))
3390 return 0;
3391 else if (REGNO (x) < REGNO (y))
3392 return -1;
3393 else
3394 return 1;
3397 if (REG_P (y))
3398 return 1;
3400 if (MEM_P (x))
3402 if (!MEM_P (y))
3403 return -1;
3404 gcc_assert (GET_MODE (x) == GET_MODE (y));
3405 return loc_cmp (XEXP (x, 0), XEXP (y, 0));
3408 if (MEM_P (y))
3409 return 1;
3411 if (GET_CODE (x) == VALUE)
3413 if (GET_CODE (y) != VALUE)
3414 return -1;
3415 /* Don't assert the modes are the same, that is true only
3416 when not recursing. (subreg:QI (value:SI 1:1) 0)
3417 and (subreg:QI (value:DI 2:2) 0) can be compared,
3418 even when the modes are different. */
3419 if (canon_value_cmp (x, y))
3420 return -1;
3421 else
3422 return 1;
3425 if (GET_CODE (y) == VALUE)
3426 return 1;
3428 /* Entry value is the least preferable kind of expression. */
3429 if (GET_CODE (x) == ENTRY_VALUE)
3431 if (GET_CODE (y) != ENTRY_VALUE)
3432 return 1;
3433 gcc_assert (GET_MODE (x) == GET_MODE (y));
3434 return loc_cmp (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
3437 if (GET_CODE (y) == ENTRY_VALUE)
3438 return -1;
3440 if (GET_CODE (x) == GET_CODE (y))
3441 /* Compare operands below. */;
3442 else if (GET_CODE (x) < GET_CODE (y))
3443 return -1;
3444 else
3445 return 1;
3447 gcc_assert (GET_MODE (x) == GET_MODE (y));
3449 if (GET_CODE (x) == DEBUG_EXPR)
3451 if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3452 < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)))
3453 return -1;
3454 gcc_checking_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3455 > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)));
3456 return 1;
3459 fmt = GET_RTX_FORMAT (code);
3460 for (i = 0; i < GET_RTX_LENGTH (code); i++)
3461 switch (fmt[i])
3463 case 'w':
3464 if (XWINT (x, i) == XWINT (y, i))
3465 break;
3466 else if (XWINT (x, i) < XWINT (y, i))
3467 return -1;
3468 else
3469 return 1;
3471 case 'n':
3472 case 'i':
3473 if (XINT (x, i) == XINT (y, i))
3474 break;
3475 else if (XINT (x, i) < XINT (y, i))
3476 return -1;
3477 else
3478 return 1;
3480 case 'V':
3481 case 'E':
3482 /* Compare the vector length first. */
3483 if (XVECLEN (x, i) == XVECLEN (y, i))
3484 /* Compare the vectors elements. */;
3485 else if (XVECLEN (x, i) < XVECLEN (y, i))
3486 return -1;
3487 else
3488 return 1;
3490 for (j = 0; j < XVECLEN (x, i); j++)
3491 if ((r = loc_cmp (XVECEXP (x, i, j),
3492 XVECEXP (y, i, j))))
3493 return r;
3494 break;
3496 case 'e':
3497 if ((r = loc_cmp (XEXP (x, i), XEXP (y, i))))
3498 return r;
3499 break;
3501 case 'S':
3502 case 's':
3503 if (XSTR (x, i) == XSTR (y, i))
3504 break;
3505 if (!XSTR (x, i))
3506 return -1;
3507 if (!XSTR (y, i))
3508 return 1;
3509 if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0)
3510 break;
3511 else if (r < 0)
3512 return -1;
3513 else
3514 return 1;
3516 case 'u':
3517 /* These are just backpointers, so they don't matter. */
3518 break;
3520 case '0':
3521 case 't':
3522 break;
3524 /* It is believed that rtx's at this level will never
3525 contain anything but integers and other rtx's,
3526 except for within LABEL_REFs and SYMBOL_REFs. */
3527 default:
3528 gcc_unreachable ();
3531 return 0;
3534 #if ENABLE_CHECKING
3535 /* Check the order of entries in one-part variables. */
3538 canonicalize_loc_order_check (variable_def **slot,
3539 dataflow_set *data ATTRIBUTE_UNUSED)
3541 variable var = *slot;
3542 location_chain node, next;
3544 #ifdef ENABLE_RTL_CHECKING
3545 int i;
3546 for (i = 0; i < var->n_var_parts; i++)
3547 gcc_assert (var->var_part[0].cur_loc == NULL);
3548 gcc_assert (!var->in_changed_variables);
3549 #endif
3551 if (!var->onepart)
3552 return 1;
3554 gcc_assert (var->n_var_parts == 1);
3555 node = var->var_part[0].loc_chain;
3556 gcc_assert (node);
3558 while ((next = node->next))
3560 gcc_assert (loc_cmp (node->loc, next->loc) < 0);
3561 node = next;
3564 return 1;
3566 #endif
3568 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
3569 more likely to be chosen as canonical for an equivalence set.
3570 Ensure less likely values can reach more likely neighbors, making
3571 the connections bidirectional. */
3574 canonicalize_values_mark (variable_def **slot, dataflow_set *set)
3576 variable var = *slot;
3577 decl_or_value dv = var->dv;
3578 rtx val;
3579 location_chain node;
3581 if (!dv_is_value_p (dv))
3582 return 1;
3584 gcc_checking_assert (var->n_var_parts == 1);
3586 val = dv_as_value (dv);
3588 for (node = var->var_part[0].loc_chain; node; node = node->next)
3589 if (GET_CODE (node->loc) == VALUE)
3591 if (canon_value_cmp (node->loc, val))
3592 VALUE_RECURSED_INTO (val) = true;
3593 else
3595 decl_or_value odv = dv_from_value (node->loc);
3596 variable_def **oslot;
3597 oslot = shared_hash_find_slot_noinsert (set->vars, odv);
3599 set_slot_part (set, val, oslot, odv, 0,
3600 node->init, NULL_RTX);
3602 VALUE_RECURSED_INTO (node->loc) = true;
3606 return 1;
3609 /* Remove redundant entries from equivalence lists in onepart
3610 variables, canonicalizing equivalence sets into star shapes. */
3613 canonicalize_values_star (variable_def **slot, dataflow_set *set)
3615 variable var = *slot;
3616 decl_or_value dv = var->dv;
3617 location_chain node;
3618 decl_or_value cdv;
3619 rtx val, cval;
3620 variable_def **cslot;
3621 bool has_value;
3622 bool has_marks;
3624 if (!var->onepart)
3625 return 1;
3627 gcc_checking_assert (var->n_var_parts == 1);
3629 if (dv_is_value_p (dv))
3631 cval = dv_as_value (dv);
3632 if (!VALUE_RECURSED_INTO (cval))
3633 return 1;
3634 VALUE_RECURSED_INTO (cval) = false;
3636 else
3637 cval = NULL_RTX;
3639 restart:
3640 val = cval;
3641 has_value = false;
3642 has_marks = false;
3644 gcc_assert (var->n_var_parts == 1);
3646 for (node = var->var_part[0].loc_chain; node; node = node->next)
3647 if (GET_CODE (node->loc) == VALUE)
3649 has_value = true;
3650 if (VALUE_RECURSED_INTO (node->loc))
3651 has_marks = true;
3652 if (canon_value_cmp (node->loc, cval))
3653 cval = node->loc;
3656 if (!has_value)
3657 return 1;
3659 if (cval == val)
3661 if (!has_marks || dv_is_decl_p (dv))
3662 return 1;
3664 /* Keep it marked so that we revisit it, either after visiting a
3665 child node, or after visiting a new parent that might be
3666 found out. */
3667 VALUE_RECURSED_INTO (val) = true;
3669 for (node = var->var_part[0].loc_chain; node; node = node->next)
3670 if (GET_CODE (node->loc) == VALUE
3671 && VALUE_RECURSED_INTO (node->loc))
3673 cval = node->loc;
3674 restart_with_cval:
3675 VALUE_RECURSED_INTO (cval) = false;
3676 dv = dv_from_value (cval);
3677 slot = shared_hash_find_slot_noinsert (set->vars, dv);
3678 if (!slot)
3680 gcc_assert (dv_is_decl_p (var->dv));
3681 /* The canonical value was reset and dropped.
3682 Remove it. */
3683 clobber_variable_part (set, NULL, var->dv, 0, NULL);
3684 return 1;
3686 var = *slot;
3687 gcc_assert (dv_is_value_p (var->dv));
3688 if (var->n_var_parts == 0)
3689 return 1;
3690 gcc_assert (var->n_var_parts == 1);
3691 goto restart;
3694 VALUE_RECURSED_INTO (val) = false;
3696 return 1;
3699 /* Push values to the canonical one. */
3700 cdv = dv_from_value (cval);
3701 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3703 for (node = var->var_part[0].loc_chain; node; node = node->next)
3704 if (node->loc != cval)
3706 cslot = set_slot_part (set, node->loc, cslot, cdv, 0,
3707 node->init, NULL_RTX);
3708 if (GET_CODE (node->loc) == VALUE)
3710 decl_or_value ndv = dv_from_value (node->loc);
3712 set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX,
3713 NO_INSERT);
3715 if (canon_value_cmp (node->loc, val))
3717 /* If it could have been a local minimum, it's not any more,
3718 since it's now neighbor to cval, so it may have to push
3719 to it. Conversely, if it wouldn't have prevailed over
3720 val, then whatever mark it has is fine: if it was to
3721 push, it will now push to a more canonical node, but if
3722 it wasn't, then it has already pushed any values it might
3723 have to. */
3724 VALUE_RECURSED_INTO (node->loc) = true;
3725 /* Make sure we visit node->loc by ensuring we cval is
3726 visited too. */
3727 VALUE_RECURSED_INTO (cval) = true;
3729 else if (!VALUE_RECURSED_INTO (node->loc))
3730 /* If we have no need to "recurse" into this node, it's
3731 already "canonicalized", so drop the link to the old
3732 parent. */
3733 clobber_variable_part (set, cval, ndv, 0, NULL);
3735 else if (GET_CODE (node->loc) == REG)
3737 attrs list = set->regs[REGNO (node->loc)], *listp;
3739 /* Change an existing attribute referring to dv so that it
3740 refers to cdv, removing any duplicate this might
3741 introduce, and checking that no previous duplicates
3742 existed, all in a single pass. */
3744 while (list)
3746 if (list->offset == 0
3747 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3748 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3749 break;
3751 list = list->next;
3754 gcc_assert (list);
3755 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3757 list->dv = cdv;
3758 for (listp = &list->next; (list = *listp); listp = &list->next)
3760 if (list->offset)
3761 continue;
3763 if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3765 *listp = list->next;
3766 pool_free (attrs_pool, list);
3767 list = *listp;
3768 break;
3771 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv));
3774 else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3776 for (listp = &list->next; (list = *listp); listp = &list->next)
3778 if (list->offset)
3779 continue;
3781 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3783 *listp = list->next;
3784 pool_free (attrs_pool, list);
3785 list = *listp;
3786 break;
3789 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv));
3792 else
3793 gcc_unreachable ();
3795 #if ENABLE_CHECKING
3796 while (list)
3798 if (list->offset == 0
3799 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3800 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3801 gcc_unreachable ();
3803 list = list->next;
3805 #endif
3809 if (val)
3810 set_slot_part (set, val, cslot, cdv, 0,
3811 VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
3813 slot = clobber_slot_part (set, cval, slot, 0, NULL);
3815 /* Variable may have been unshared. */
3816 var = *slot;
3817 gcc_checking_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval
3818 && var->var_part[0].loc_chain->next == NULL);
3820 if (VALUE_RECURSED_INTO (cval))
3821 goto restart_with_cval;
3823 return 1;
3826 /* Bind one-part variables to the canonical value in an equivalence
3827 set. Not doing this causes dataflow convergence failure in rare
3828 circumstances, see PR42873. Unfortunately we can't do this
3829 efficiently as part of canonicalize_values_star, since we may not
3830 have determined or even seen the canonical value of a set when we
3831 get to a variable that references another member of the set. */
3834 canonicalize_vars_star (variable_def **slot, dataflow_set *set)
3836 variable var = *slot;
3837 decl_or_value dv = var->dv;
3838 location_chain node;
3839 rtx cval;
3840 decl_or_value cdv;
3841 variable_def **cslot;
3842 variable cvar;
3843 location_chain cnode;
3845 if (!var->onepart || var->onepart == ONEPART_VALUE)
3846 return 1;
3848 gcc_assert (var->n_var_parts == 1);
3850 node = var->var_part[0].loc_chain;
3852 if (GET_CODE (node->loc) != VALUE)
3853 return 1;
3855 gcc_assert (!node->next);
3856 cval = node->loc;
3858 /* Push values to the canonical one. */
3859 cdv = dv_from_value (cval);
3860 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3861 if (!cslot)
3862 return 1;
3863 cvar = *cslot;
3864 gcc_assert (cvar->n_var_parts == 1);
3866 cnode = cvar->var_part[0].loc_chain;
3868 /* CVAL is canonical if its value list contains non-VALUEs or VALUEs
3869 that are not “more canonical” than it. */
3870 if (GET_CODE (cnode->loc) != VALUE
3871 || !canon_value_cmp (cnode->loc, cval))
3872 return 1;
3874 /* CVAL was found to be non-canonical. Change the variable to point
3875 to the canonical VALUE. */
3876 gcc_assert (!cnode->next);
3877 cval = cnode->loc;
3879 slot = set_slot_part (set, cval, slot, dv, 0,
3880 node->init, node->set_src);
3881 clobber_slot_part (set, cval, slot, 0, node->set_src);
3883 return 1;
3886 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
3887 corresponding entry in DSM->src. Multi-part variables are combined
3888 with variable_union, whereas onepart dvs are combined with
3889 intersection. */
3891 static int
3892 variable_merge_over_cur (variable s1var, struct dfset_merge *dsm)
3894 dataflow_set *dst = dsm->dst;
3895 variable_def **dstslot;
3896 variable s2var, dvar = NULL;
3897 decl_or_value dv = s1var->dv;
3898 onepart_enum_t onepart = s1var->onepart;
3899 rtx val;
3900 hashval_t dvhash;
3901 location_chain node, *nodep;
3903 /* If the incoming onepart variable has an empty location list, then
3904 the intersection will be just as empty. For other variables,
3905 it's always union. */
3906 gcc_checking_assert (s1var->n_var_parts
3907 && s1var->var_part[0].loc_chain);
3909 if (!onepart)
3910 return variable_union (s1var, dst);
3912 gcc_checking_assert (s1var->n_var_parts == 1);
3914 dvhash = dv_htab_hash (dv);
3915 if (dv_is_value_p (dv))
3916 val = dv_as_value (dv);
3917 else
3918 val = NULL;
3920 s2var = shared_hash_find_1 (dsm->src->vars, dv, dvhash);
3921 if (!s2var)
3923 dst_can_be_shared = false;
3924 return 1;
3927 dsm->src_onepart_cnt--;
3928 gcc_assert (s2var->var_part[0].loc_chain
3929 && s2var->onepart == onepart
3930 && s2var->n_var_parts == 1);
3932 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3933 if (dstslot)
3935 dvar = *dstslot;
3936 gcc_assert (dvar->refcount == 1
3937 && dvar->onepart == onepart
3938 && dvar->n_var_parts == 1);
3939 nodep = &dvar->var_part[0].loc_chain;
3941 else
3943 nodep = &node;
3944 node = NULL;
3947 if (!dstslot && !onepart_variable_different_p (s1var, s2var))
3949 dstslot = shared_hash_find_slot_unshare_1 (&dst->vars, dv,
3950 dvhash, INSERT);
3951 *dstslot = dvar = s2var;
3952 dvar->refcount++;
3954 else
3956 dst_can_be_shared = false;
3958 intersect_loc_chains (val, nodep, dsm,
3959 s1var->var_part[0].loc_chain, s2var);
3961 if (!dstslot)
3963 if (node)
3965 dvar = (variable) pool_alloc (onepart_pool (onepart));
3966 dvar->dv = dv;
3967 dvar->refcount = 1;
3968 dvar->n_var_parts = 1;
3969 dvar->onepart = onepart;
3970 dvar->in_changed_variables = false;
3971 dvar->var_part[0].loc_chain = node;
3972 dvar->var_part[0].cur_loc = NULL;
3973 if (onepart)
3974 VAR_LOC_1PAUX (dvar) = NULL;
3975 else
3976 VAR_PART_OFFSET (dvar, 0) = 0;
3978 dstslot
3979 = shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash,
3980 INSERT);
3981 gcc_assert (!*dstslot);
3982 *dstslot = dvar;
3984 else
3985 return 1;
3989 nodep = &dvar->var_part[0].loc_chain;
3990 while ((node = *nodep))
3992 location_chain *nextp = &node->next;
3994 if (GET_CODE (node->loc) == REG)
3996 attrs list;
3998 for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
3999 if (GET_MODE (node->loc) == GET_MODE (list->loc)
4000 && dv_is_value_p (list->dv))
4001 break;
4003 if (!list)
4004 attrs_list_insert (&dst->regs[REGNO (node->loc)],
4005 dv, 0, node->loc);
4006 /* If this value became canonical for another value that had
4007 this register, we want to leave it alone. */
4008 else if (dv_as_value (list->dv) != val)
4010 dstslot = set_slot_part (dst, dv_as_value (list->dv),
4011 dstslot, dv, 0,
4012 node->init, NULL_RTX);
4013 dstslot = delete_slot_part (dst, node->loc, dstslot, 0);
4015 /* Since nextp points into the removed node, we can't
4016 use it. The pointer to the next node moved to nodep.
4017 However, if the variable we're walking is unshared
4018 during our walk, we'll keep walking the location list
4019 of the previously-shared variable, in which case the
4020 node won't have been removed, and we'll want to skip
4021 it. That's why we test *nodep here. */
4022 if (*nodep != node)
4023 nextp = nodep;
4026 else
4027 /* Canonicalization puts registers first, so we don't have to
4028 walk it all. */
4029 break;
4030 nodep = nextp;
4033 if (dvar != *dstslot)
4034 dvar = *dstslot;
4035 nodep = &dvar->var_part[0].loc_chain;
4037 if (val)
4039 /* Mark all referenced nodes for canonicalization, and make sure
4040 we have mutual equivalence links. */
4041 VALUE_RECURSED_INTO (val) = true;
4042 for (node = *nodep; node; node = node->next)
4043 if (GET_CODE (node->loc) == VALUE)
4045 VALUE_RECURSED_INTO (node->loc) = true;
4046 set_variable_part (dst, val, dv_from_value (node->loc), 0,
4047 node->init, NULL, INSERT);
4050 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4051 gcc_assert (*dstslot == dvar);
4052 canonicalize_values_star (dstslot, dst);
4053 gcc_checking_assert (dstslot
4054 == shared_hash_find_slot_noinsert_1 (dst->vars,
4055 dv, dvhash));
4056 dvar = *dstslot;
4058 else
4060 bool has_value = false, has_other = false;
4062 /* If we have one value and anything else, we're going to
4063 canonicalize this, so make sure all values have an entry in
4064 the table and are marked for canonicalization. */
4065 for (node = *nodep; node; node = node->next)
4067 if (GET_CODE (node->loc) == VALUE)
4069 /* If this was marked during register canonicalization,
4070 we know we have to canonicalize values. */
4071 if (has_value)
4072 has_other = true;
4073 has_value = true;
4074 if (has_other)
4075 break;
4077 else
4079 has_other = true;
4080 if (has_value)
4081 break;
4085 if (has_value && has_other)
4087 for (node = *nodep; node; node = node->next)
4089 if (GET_CODE (node->loc) == VALUE)
4091 decl_or_value dv = dv_from_value (node->loc);
4092 variable_def **slot = NULL;
4094 if (shared_hash_shared (dst->vars))
4095 slot = shared_hash_find_slot_noinsert (dst->vars, dv);
4096 if (!slot)
4097 slot = shared_hash_find_slot_unshare (&dst->vars, dv,
4098 INSERT);
4099 if (!*slot)
4101 variable var = (variable) pool_alloc (onepart_pool
4102 (ONEPART_VALUE));
4103 var->dv = dv;
4104 var->refcount = 1;
4105 var->n_var_parts = 1;
4106 var->onepart = ONEPART_VALUE;
4107 var->in_changed_variables = false;
4108 var->var_part[0].loc_chain = NULL;
4109 var->var_part[0].cur_loc = NULL;
4110 VAR_LOC_1PAUX (var) = NULL;
4111 *slot = var;
4114 VALUE_RECURSED_INTO (node->loc) = true;
4118 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4119 gcc_assert (*dstslot == dvar);
4120 canonicalize_values_star (dstslot, dst);
4121 gcc_checking_assert (dstslot
4122 == shared_hash_find_slot_noinsert_1 (dst->vars,
4123 dv, dvhash));
4124 dvar = *dstslot;
4128 if (!onepart_variable_different_p (dvar, s2var))
4130 variable_htab_free (dvar);
4131 *dstslot = dvar = s2var;
4132 dvar->refcount++;
4134 else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var))
4136 variable_htab_free (dvar);
4137 *dstslot = dvar = s1var;
4138 dvar->refcount++;
4139 dst_can_be_shared = false;
4141 else
4142 dst_can_be_shared = false;
4144 return 1;
4147 /* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
4148 multi-part variable. Unions of multi-part variables and
4149 intersections of one-part ones will be handled in
4150 variable_merge_over_cur(). */
4152 static int
4153 variable_merge_over_src (variable s2var, struct dfset_merge *dsm)
4155 dataflow_set *dst = dsm->dst;
4156 decl_or_value dv = s2var->dv;
4158 if (!s2var->onepart)
4160 variable_def **dstp = shared_hash_find_slot (dst->vars, dv);
4161 *dstp = s2var;
4162 s2var->refcount++;
4163 return 1;
4166 dsm->src_onepart_cnt++;
4167 return 1;
4170 /* Combine dataflow set information from SRC2 into DST, using PDST
4171 to carry over information across passes. */
4173 static void
4174 dataflow_set_merge (dataflow_set *dst, dataflow_set *src2)
4176 dataflow_set cur = *dst;
4177 dataflow_set *src1 = &cur;
4178 struct dfset_merge dsm;
4179 int i;
4180 size_t src1_elems, src2_elems;
4181 variable_iterator_type hi;
4182 variable var;
4184 src1_elems = shared_hash_htab (src1->vars).elements ();
4185 src2_elems = shared_hash_htab (src2->vars).elements ();
4186 dataflow_set_init (dst);
4187 dst->stack_adjust = cur.stack_adjust;
4188 shared_hash_destroy (dst->vars);
4189 dst->vars = (shared_hash) pool_alloc (shared_hash_pool);
4190 dst->vars->refcount = 1;
4191 dst->vars->htab.create (MAX (src1_elems, src2_elems));
4193 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4194 attrs_list_mpdv_union (&dst->regs[i], src1->regs[i], src2->regs[i]);
4196 dsm.dst = dst;
4197 dsm.src = src2;
4198 dsm.cur = src1;
4199 dsm.src_onepart_cnt = 0;
4201 FOR_EACH_HASH_TABLE_ELEMENT (shared_hash_htab (dsm.src->vars),
4202 var, variable, hi)
4203 variable_merge_over_src (var, &dsm);
4204 FOR_EACH_HASH_TABLE_ELEMENT (shared_hash_htab (dsm.cur->vars),
4205 var, variable, hi)
4206 variable_merge_over_cur (var, &dsm);
4208 if (dsm.src_onepart_cnt)
4209 dst_can_be_shared = false;
4211 dataflow_set_destroy (src1);
4214 /* Mark register equivalences. */
4216 static void
4217 dataflow_set_equiv_regs (dataflow_set *set)
4219 int i;
4220 attrs list, *listp;
4222 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4224 rtx canon[NUM_MACHINE_MODES];
4226 /* If the list is empty or one entry, no need to canonicalize
4227 anything. */
4228 if (set->regs[i] == NULL || set->regs[i]->next == NULL)
4229 continue;
4231 memset (canon, 0, sizeof (canon));
4233 for (list = set->regs[i]; list; list = list->next)
4234 if (list->offset == 0 && dv_is_value_p (list->dv))
4236 rtx val = dv_as_value (list->dv);
4237 rtx *cvalp = &canon[(int)GET_MODE (val)];
4238 rtx cval = *cvalp;
4240 if (canon_value_cmp (val, cval))
4241 *cvalp = val;
4244 for (list = set->regs[i]; list; list = list->next)
4245 if (list->offset == 0 && dv_onepart_p (list->dv))
4247 rtx cval = canon[(int)GET_MODE (list->loc)];
4249 if (!cval)
4250 continue;
4252 if (dv_is_value_p (list->dv))
4254 rtx val = dv_as_value (list->dv);
4256 if (val == cval)
4257 continue;
4259 VALUE_RECURSED_INTO (val) = true;
4260 set_variable_part (set, val, dv_from_value (cval), 0,
4261 VAR_INIT_STATUS_INITIALIZED,
4262 NULL, NO_INSERT);
4265 VALUE_RECURSED_INTO (cval) = true;
4266 set_variable_part (set, cval, list->dv, 0,
4267 VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT);
4270 for (listp = &set->regs[i]; (list = *listp);
4271 listp = list ? &list->next : listp)
4272 if (list->offset == 0 && dv_onepart_p (list->dv))
4274 rtx cval = canon[(int)GET_MODE (list->loc)];
4275 variable_def **slot;
4277 if (!cval)
4278 continue;
4280 if (dv_is_value_p (list->dv))
4282 rtx val = dv_as_value (list->dv);
4283 if (!VALUE_RECURSED_INTO (val))
4284 continue;
4287 slot = shared_hash_find_slot_noinsert (set->vars, list->dv);
4288 canonicalize_values_star (slot, set);
4289 if (*listp != list)
4290 list = NULL;
4295 /* Remove any redundant values in the location list of VAR, which must
4296 be unshared and 1-part. */
4298 static void
4299 remove_duplicate_values (variable var)
4301 location_chain node, *nodep;
4303 gcc_assert (var->onepart);
4304 gcc_assert (var->n_var_parts == 1);
4305 gcc_assert (var->refcount == 1);
4307 for (nodep = &var->var_part[0].loc_chain; (node = *nodep); )
4309 if (GET_CODE (node->loc) == VALUE)
4311 if (VALUE_RECURSED_INTO (node->loc))
4313 /* Remove duplicate value node. */
4314 *nodep = node->next;
4315 pool_free (loc_chain_pool, node);
4316 continue;
4318 else
4319 VALUE_RECURSED_INTO (node->loc) = true;
4321 nodep = &node->next;
4324 for (node = var->var_part[0].loc_chain; node; node = node->next)
4325 if (GET_CODE (node->loc) == VALUE)
4327 gcc_assert (VALUE_RECURSED_INTO (node->loc));
4328 VALUE_RECURSED_INTO (node->loc) = false;
4333 /* Hash table iteration argument passed to variable_post_merge. */
4334 struct dfset_post_merge
4336 /* The new input set for the current block. */
4337 dataflow_set *set;
4338 /* Pointer to the permanent input set for the current block, or
4339 NULL. */
4340 dataflow_set **permp;
4343 /* Create values for incoming expressions associated with one-part
4344 variables that don't have value numbers for them. */
4347 variable_post_merge_new_vals (variable_def **slot, dfset_post_merge *dfpm)
4349 dataflow_set *set = dfpm->set;
4350 variable var = *slot;
4351 location_chain node;
4353 if (!var->onepart || !var->n_var_parts)
4354 return 1;
4356 gcc_assert (var->n_var_parts == 1);
4358 if (dv_is_decl_p (var->dv))
4360 bool check_dupes = false;
4362 restart:
4363 for (node = var->var_part[0].loc_chain; node; node = node->next)
4365 if (GET_CODE (node->loc) == VALUE)
4366 gcc_assert (!VALUE_RECURSED_INTO (node->loc));
4367 else if (GET_CODE (node->loc) == REG)
4369 attrs att, *attp, *curp = NULL;
4371 if (var->refcount != 1)
4373 slot = unshare_variable (set, slot, var,
4374 VAR_INIT_STATUS_INITIALIZED);
4375 var = *slot;
4376 goto restart;
4379 for (attp = &set->regs[REGNO (node->loc)]; (att = *attp);
4380 attp = &att->next)
4381 if (att->offset == 0
4382 && GET_MODE (att->loc) == GET_MODE (node->loc))
4384 if (dv_is_value_p (att->dv))
4386 rtx cval = dv_as_value (att->dv);
4387 node->loc = cval;
4388 check_dupes = true;
4389 break;
4391 else if (dv_as_opaque (att->dv) == dv_as_opaque (var->dv))
4392 curp = attp;
4395 if (!curp)
4397 curp = attp;
4398 while (*curp)
4399 if ((*curp)->offset == 0
4400 && GET_MODE ((*curp)->loc) == GET_MODE (node->loc)
4401 && dv_as_opaque ((*curp)->dv) == dv_as_opaque (var->dv))
4402 break;
4403 else
4404 curp = &(*curp)->next;
4405 gcc_assert (*curp);
4408 if (!att)
4410 decl_or_value cdv;
4411 rtx cval;
4413 if (!*dfpm->permp)
4415 *dfpm->permp = XNEW (dataflow_set);
4416 dataflow_set_init (*dfpm->permp);
4419 for (att = (*dfpm->permp)->regs[REGNO (node->loc)];
4420 att; att = att->next)
4421 if (GET_MODE (att->loc) == GET_MODE (node->loc))
4423 gcc_assert (att->offset == 0
4424 && dv_is_value_p (att->dv));
4425 val_reset (set, att->dv);
4426 break;
4429 if (att)
4431 cdv = att->dv;
4432 cval = dv_as_value (cdv);
4434 else
4436 /* Create a unique value to hold this register,
4437 that ought to be found and reused in
4438 subsequent rounds. */
4439 cselib_val *v;
4440 gcc_assert (!cselib_lookup (node->loc,
4441 GET_MODE (node->loc), 0,
4442 VOIDmode));
4443 v = cselib_lookup (node->loc, GET_MODE (node->loc), 1,
4444 VOIDmode);
4445 cselib_preserve_value (v);
4446 cselib_invalidate_rtx (node->loc);
4447 cval = v->val_rtx;
4448 cdv = dv_from_value (cval);
4449 if (dump_file)
4450 fprintf (dump_file,
4451 "Created new value %u:%u for reg %i\n",
4452 v->uid, v->hash, REGNO (node->loc));
4455 var_reg_decl_set (*dfpm->permp, node->loc,
4456 VAR_INIT_STATUS_INITIALIZED,
4457 cdv, 0, NULL, INSERT);
4459 node->loc = cval;
4460 check_dupes = true;
4463 /* Remove attribute referring to the decl, which now
4464 uses the value for the register, already existing or
4465 to be added when we bring perm in. */
4466 att = *curp;
4467 *curp = att->next;
4468 pool_free (attrs_pool, att);
4472 if (check_dupes)
4473 remove_duplicate_values (var);
4476 return 1;
4479 /* Reset values in the permanent set that are not associated with the
4480 chosen expression. */
4483 variable_post_merge_perm_vals (variable_def **pslot, dfset_post_merge *dfpm)
4485 dataflow_set *set = dfpm->set;
4486 variable pvar = *pslot, var;
4487 location_chain pnode;
4488 decl_or_value dv;
4489 attrs att;
4491 gcc_assert (dv_is_value_p (pvar->dv)
4492 && pvar->n_var_parts == 1);
4493 pnode = pvar->var_part[0].loc_chain;
4494 gcc_assert (pnode
4495 && !pnode->next
4496 && REG_P (pnode->loc));
4498 dv = pvar->dv;
4500 var = shared_hash_find (set->vars, dv);
4501 if (var)
4503 /* Although variable_post_merge_new_vals may have made decls
4504 non-star-canonical, values that pre-existed in canonical form
4505 remain canonical, and newly-created values reference a single
4506 REG, so they are canonical as well. Since VAR has the
4507 location list for a VALUE, using find_loc_in_1pdv for it is
4508 fine, since VALUEs don't map back to DECLs. */
4509 if (find_loc_in_1pdv (pnode->loc, var, shared_hash_htab (set->vars)))
4510 return 1;
4511 val_reset (set, dv);
4514 for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next)
4515 if (att->offset == 0
4516 && GET_MODE (att->loc) == GET_MODE (pnode->loc)
4517 && dv_is_value_p (att->dv))
4518 break;
4520 /* If there is a value associated with this register already, create
4521 an equivalence. */
4522 if (att && dv_as_value (att->dv) != dv_as_value (dv))
4524 rtx cval = dv_as_value (att->dv);
4525 set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT);
4526 set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init,
4527 NULL, INSERT);
4529 else if (!att)
4531 attrs_list_insert (&set->regs[REGNO (pnode->loc)],
4532 dv, 0, pnode->loc);
4533 variable_union (pvar, set);
4536 return 1;
4539 /* Just checking stuff and registering register attributes for
4540 now. */
4542 static void
4543 dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp)
4545 struct dfset_post_merge dfpm;
4547 dfpm.set = set;
4548 dfpm.permp = permp;
4550 shared_hash_htab (set->vars)
4551 .traverse <dfset_post_merge*, variable_post_merge_new_vals> (&dfpm);
4552 if (*permp)
4553 shared_hash_htab ((*permp)->vars)
4554 .traverse <dfset_post_merge*, variable_post_merge_perm_vals> (&dfpm);
4555 shared_hash_htab (set->vars)
4556 .traverse <dataflow_set *, canonicalize_values_star> (set);
4557 shared_hash_htab (set->vars)
4558 .traverse <dataflow_set *, canonicalize_vars_star> (set);
4561 /* Return a node whose loc is a MEM that refers to EXPR in the
4562 location list of a one-part variable or value VAR, or in that of
4563 any values recursively mentioned in the location lists. */
4565 static location_chain
4566 find_mem_expr_in_1pdv (tree expr, rtx val, variable_table_type vars)
4568 location_chain node;
4569 decl_or_value dv;
4570 variable var;
4571 location_chain where = NULL;
4573 if (!val)
4574 return NULL;
4576 gcc_assert (GET_CODE (val) == VALUE
4577 && !VALUE_RECURSED_INTO (val));
4579 dv = dv_from_value (val);
4580 var = vars.find_with_hash (dv, dv_htab_hash (dv));
4582 if (!var)
4583 return NULL;
4585 gcc_assert (var->onepart);
4587 if (!var->n_var_parts)
4588 return NULL;
4590 VALUE_RECURSED_INTO (val) = true;
4592 for (node = var->var_part[0].loc_chain; node; node = node->next)
4593 if (MEM_P (node->loc)
4594 && MEM_EXPR (node->loc) == expr
4595 && INT_MEM_OFFSET (node->loc) == 0)
4597 where = node;
4598 break;
4600 else if (GET_CODE (node->loc) == VALUE
4601 && !VALUE_RECURSED_INTO (node->loc)
4602 && (where = find_mem_expr_in_1pdv (expr, node->loc, vars)))
4603 break;
4605 VALUE_RECURSED_INTO (val) = false;
4607 return where;
4610 /* Return TRUE if the value of MEM may vary across a call. */
4612 static bool
4613 mem_dies_at_call (rtx mem)
4615 tree expr = MEM_EXPR (mem);
4616 tree decl;
4618 if (!expr)
4619 return true;
4621 decl = get_base_address (expr);
4623 if (!decl)
4624 return true;
4626 if (!DECL_P (decl))
4627 return true;
4629 return (may_be_aliased (decl)
4630 || (!TREE_READONLY (decl) && is_global_var (decl)));
4633 /* Remove all MEMs from the location list of a hash table entry for a
4634 one-part variable, except those whose MEM attributes map back to
4635 the variable itself, directly or within a VALUE. */
4638 dataflow_set_preserve_mem_locs (variable_def **slot, dataflow_set *set)
4640 variable var = *slot;
4642 if (var->onepart == ONEPART_VDECL || var->onepart == ONEPART_DEXPR)
4644 tree decl = dv_as_decl (var->dv);
4645 location_chain loc, *locp;
4646 bool changed = false;
4648 if (!var->n_var_parts)
4649 return 1;
4651 gcc_assert (var->n_var_parts == 1);
4653 if (shared_var_p (var, set->vars))
4655 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4657 /* We want to remove dying MEMs that doesn't refer to DECL. */
4658 if (GET_CODE (loc->loc) == MEM
4659 && (MEM_EXPR (loc->loc) != decl
4660 || INT_MEM_OFFSET (loc->loc) != 0)
4661 && !mem_dies_at_call (loc->loc))
4662 break;
4663 /* We want to move here MEMs that do refer to DECL. */
4664 else if (GET_CODE (loc->loc) == VALUE
4665 && find_mem_expr_in_1pdv (decl, loc->loc,
4666 shared_hash_htab (set->vars)))
4667 break;
4670 if (!loc)
4671 return 1;
4673 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4674 var = *slot;
4675 gcc_assert (var->n_var_parts == 1);
4678 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4679 loc; loc = *locp)
4681 rtx old_loc = loc->loc;
4682 if (GET_CODE (old_loc) == VALUE)
4684 location_chain mem_node
4685 = find_mem_expr_in_1pdv (decl, loc->loc,
4686 shared_hash_htab (set->vars));
4688 /* ??? This picks up only one out of multiple MEMs that
4689 refer to the same variable. Do we ever need to be
4690 concerned about dealing with more than one, or, given
4691 that they should all map to the same variable
4692 location, their addresses will have been merged and
4693 they will be regarded as equivalent? */
4694 if (mem_node)
4696 loc->loc = mem_node->loc;
4697 loc->set_src = mem_node->set_src;
4698 loc->init = MIN (loc->init, mem_node->init);
4702 if (GET_CODE (loc->loc) != MEM
4703 || (MEM_EXPR (loc->loc) == decl
4704 && INT_MEM_OFFSET (loc->loc) == 0)
4705 || !mem_dies_at_call (loc->loc))
4707 if (old_loc != loc->loc && emit_notes)
4709 if (old_loc == var->var_part[0].cur_loc)
4711 changed = true;
4712 var->var_part[0].cur_loc = NULL;
4715 locp = &loc->next;
4716 continue;
4719 if (emit_notes)
4721 if (old_loc == var->var_part[0].cur_loc)
4723 changed = true;
4724 var->var_part[0].cur_loc = NULL;
4727 *locp = loc->next;
4728 pool_free (loc_chain_pool, loc);
4731 if (!var->var_part[0].loc_chain)
4733 var->n_var_parts--;
4734 changed = true;
4736 if (changed)
4737 variable_was_changed (var, set);
4740 return 1;
4743 /* Remove all MEMs from the location list of a hash table entry for a
4744 value. */
4747 dataflow_set_remove_mem_locs (variable_def **slot, dataflow_set *set)
4749 variable var = *slot;
4751 if (var->onepart == ONEPART_VALUE)
4753 location_chain loc, *locp;
4754 bool changed = false;
4755 rtx cur_loc;
4757 gcc_assert (var->n_var_parts == 1);
4759 if (shared_var_p (var, set->vars))
4761 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4762 if (GET_CODE (loc->loc) == MEM
4763 && mem_dies_at_call (loc->loc))
4764 break;
4766 if (!loc)
4767 return 1;
4769 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4770 var = *slot;
4771 gcc_assert (var->n_var_parts == 1);
4774 if (VAR_LOC_1PAUX (var))
4775 cur_loc = VAR_LOC_FROM (var);
4776 else
4777 cur_loc = var->var_part[0].cur_loc;
4779 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4780 loc; loc = *locp)
4782 if (GET_CODE (loc->loc) != MEM
4783 || !mem_dies_at_call (loc->loc))
4785 locp = &loc->next;
4786 continue;
4789 *locp = loc->next;
4790 /* If we have deleted the location which was last emitted
4791 we have to emit new location so add the variable to set
4792 of changed variables. */
4793 if (cur_loc == loc->loc)
4795 changed = true;
4796 var->var_part[0].cur_loc = NULL;
4797 if (VAR_LOC_1PAUX (var))
4798 VAR_LOC_FROM (var) = NULL;
4800 pool_free (loc_chain_pool, loc);
4803 if (!var->var_part[0].loc_chain)
4805 var->n_var_parts--;
4806 changed = true;
4808 if (changed)
4809 variable_was_changed (var, set);
4812 return 1;
4815 /* Remove all variable-location information about call-clobbered
4816 registers, as well as associations between MEMs and VALUEs. */
4818 static void
4819 dataflow_set_clear_at_call (dataflow_set *set)
4821 unsigned int r;
4822 hard_reg_set_iterator hrsi;
4824 EXECUTE_IF_SET_IN_HARD_REG_SET (regs_invalidated_by_call, 0, r, hrsi)
4825 var_regno_delete (set, r);
4827 if (MAY_HAVE_DEBUG_INSNS)
4829 set->traversed_vars = set->vars;
4830 shared_hash_htab (set->vars)
4831 .traverse <dataflow_set *, dataflow_set_preserve_mem_locs> (set);
4832 set->traversed_vars = set->vars;
4833 shared_hash_htab (set->vars)
4834 .traverse <dataflow_set *, dataflow_set_remove_mem_locs> (set);
4835 set->traversed_vars = NULL;
4839 static bool
4840 variable_part_different_p (variable_part *vp1, variable_part *vp2)
4842 location_chain lc1, lc2;
4844 for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next)
4846 for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next)
4848 if (REG_P (lc1->loc) && REG_P (lc2->loc))
4850 if (REGNO (lc1->loc) == REGNO (lc2->loc))
4851 break;
4853 if (rtx_equal_p (lc1->loc, lc2->loc))
4854 break;
4856 if (!lc2)
4857 return true;
4859 return false;
4862 /* Return true if one-part variables VAR1 and VAR2 are different.
4863 They must be in canonical order. */
4865 static bool
4866 onepart_variable_different_p (variable var1, variable var2)
4868 location_chain lc1, lc2;
4870 if (var1 == var2)
4871 return false;
4873 gcc_assert (var1->n_var_parts == 1
4874 && var2->n_var_parts == 1);
4876 lc1 = var1->var_part[0].loc_chain;
4877 lc2 = var2->var_part[0].loc_chain;
4879 gcc_assert (lc1 && lc2);
4881 while (lc1 && lc2)
4883 if (loc_cmp (lc1->loc, lc2->loc))
4884 return true;
4885 lc1 = lc1->next;
4886 lc2 = lc2->next;
4889 return lc1 != lc2;
4892 /* Return true if variables VAR1 and VAR2 are different. */
4894 static bool
4895 variable_different_p (variable var1, variable var2)
4897 int i;
4899 if (var1 == var2)
4900 return false;
4902 if (var1->onepart != var2->onepart)
4903 return true;
4905 if (var1->n_var_parts != var2->n_var_parts)
4906 return true;
4908 if (var1->onepart && var1->n_var_parts)
4910 gcc_checking_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv)
4911 && var1->n_var_parts == 1);
4912 /* One-part values have locations in a canonical order. */
4913 return onepart_variable_different_p (var1, var2);
4916 for (i = 0; i < var1->n_var_parts; i++)
4918 if (VAR_PART_OFFSET (var1, i) != VAR_PART_OFFSET (var2, i))
4919 return true;
4920 if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i]))
4921 return true;
4922 if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i]))
4923 return true;
4925 return false;
4928 /* Return true if dataflow sets OLD_SET and NEW_SET differ. */
4930 static bool
4931 dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
4933 variable_iterator_type hi;
4934 variable var1;
4936 if (old_set->vars == new_set->vars)
4937 return false;
4939 if (shared_hash_htab (old_set->vars).elements ()
4940 != shared_hash_htab (new_set->vars).elements ())
4941 return true;
4943 FOR_EACH_HASH_TABLE_ELEMENT (shared_hash_htab (old_set->vars),
4944 var1, variable, hi)
4946 variable_table_type htab = shared_hash_htab (new_set->vars);
4947 variable var2 = htab.find_with_hash (var1->dv, dv_htab_hash (var1->dv));
4948 if (!var2)
4950 if (dump_file && (dump_flags & TDF_DETAILS))
4952 fprintf (dump_file, "dataflow difference found: removal of:\n");
4953 dump_var (var1);
4955 return true;
4958 if (variable_different_p (var1, var2))
4960 if (dump_file && (dump_flags & TDF_DETAILS))
4962 fprintf (dump_file, "dataflow difference found: "
4963 "old and new follow:\n");
4964 dump_var (var1);
4965 dump_var (var2);
4967 return true;
4971 /* No need to traverse the second hashtab, if both have the same number
4972 of elements and the second one had all entries found in the first one,
4973 then it can't have any extra entries. */
4974 return false;
4977 /* Free the contents of dataflow set SET. */
4979 static void
4980 dataflow_set_destroy (dataflow_set *set)
4982 int i;
4984 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4985 attrs_list_clear (&set->regs[i]);
4987 shared_hash_destroy (set->vars);
4988 set->vars = NULL;
4991 /* Return true if RTL X contains a SYMBOL_REF. */
4993 static bool
4994 contains_symbol_ref (rtx x)
4996 const char *fmt;
4997 RTX_CODE code;
4998 int i;
5000 if (!x)
5001 return false;
5003 code = GET_CODE (x);
5004 if (code == SYMBOL_REF)
5005 return true;
5007 fmt = GET_RTX_FORMAT (code);
5008 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5010 if (fmt[i] == 'e')
5012 if (contains_symbol_ref (XEXP (x, i)))
5013 return true;
5015 else if (fmt[i] == 'E')
5017 int j;
5018 for (j = 0; j < XVECLEN (x, i); j++)
5019 if (contains_symbol_ref (XVECEXP (x, i, j)))
5020 return true;
5024 return false;
5027 /* Shall EXPR be tracked? */
5029 static bool
5030 track_expr_p (tree expr, bool need_rtl)
5032 rtx decl_rtl;
5033 tree realdecl;
5035 if (TREE_CODE (expr) == DEBUG_EXPR_DECL)
5036 return DECL_RTL_SET_P (expr);
5038 /* If EXPR is not a parameter or a variable do not track it. */
5039 if (TREE_CODE (expr) != VAR_DECL && TREE_CODE (expr) != PARM_DECL)
5040 return 0;
5042 /* It also must have a name... */
5043 if (!DECL_NAME (expr) && need_rtl)
5044 return 0;
5046 /* ... and a RTL assigned to it. */
5047 decl_rtl = DECL_RTL_IF_SET (expr);
5048 if (!decl_rtl && need_rtl)
5049 return 0;
5051 /* If this expression is really a debug alias of some other declaration, we
5052 don't need to track this expression if the ultimate declaration is
5053 ignored. */
5054 realdecl = expr;
5055 if (TREE_CODE (realdecl) == VAR_DECL && DECL_HAS_DEBUG_EXPR_P (realdecl))
5057 realdecl = DECL_DEBUG_EXPR (realdecl);
5058 if (!DECL_P (realdecl))
5060 if (handled_component_p (realdecl)
5061 || (TREE_CODE (realdecl) == MEM_REF
5062 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
5064 HOST_WIDE_INT bitsize, bitpos, maxsize;
5065 tree innerdecl
5066 = get_ref_base_and_extent (realdecl, &bitpos, &bitsize,
5067 &maxsize);
5068 if (!DECL_P (innerdecl)
5069 || DECL_IGNORED_P (innerdecl)
5070 || TREE_STATIC (innerdecl)
5071 || bitsize <= 0
5072 || bitpos + bitsize > 256
5073 || bitsize != maxsize)
5074 return 0;
5075 else
5076 realdecl = expr;
5078 else
5079 return 0;
5083 /* Do not track EXPR if REALDECL it should be ignored for debugging
5084 purposes. */
5085 if (DECL_IGNORED_P (realdecl))
5086 return 0;
5088 /* Do not track global variables until we are able to emit correct location
5089 list for them. */
5090 if (TREE_STATIC (realdecl))
5091 return 0;
5093 /* When the EXPR is a DECL for alias of some variable (see example)
5094 the TREE_STATIC flag is not used. Disable tracking all DECLs whose
5095 DECL_RTL contains SYMBOL_REF.
5097 Example:
5098 extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
5099 char **_dl_argv;
5101 if (decl_rtl && MEM_P (decl_rtl)
5102 && contains_symbol_ref (XEXP (decl_rtl, 0)))
5103 return 0;
5105 /* If RTX is a memory it should not be very large (because it would be
5106 an array or struct). */
5107 if (decl_rtl && MEM_P (decl_rtl))
5109 /* Do not track structures and arrays. */
5110 if (GET_MODE (decl_rtl) == BLKmode
5111 || AGGREGATE_TYPE_P (TREE_TYPE (realdecl)))
5112 return 0;
5113 if (MEM_SIZE_KNOWN_P (decl_rtl)
5114 && MEM_SIZE (decl_rtl) > MAX_VAR_PARTS)
5115 return 0;
5118 DECL_CHANGED (expr) = 0;
5119 DECL_CHANGED (realdecl) = 0;
5120 return 1;
5123 /* Determine whether a given LOC refers to the same variable part as
5124 EXPR+OFFSET. */
5126 static bool
5127 same_variable_part_p (rtx loc, tree expr, HOST_WIDE_INT offset)
5129 tree expr2;
5130 HOST_WIDE_INT offset2;
5132 if (! DECL_P (expr))
5133 return false;
5135 if (REG_P (loc))
5137 expr2 = REG_EXPR (loc);
5138 offset2 = REG_OFFSET (loc);
5140 else if (MEM_P (loc))
5142 expr2 = MEM_EXPR (loc);
5143 offset2 = INT_MEM_OFFSET (loc);
5145 else
5146 return false;
5148 if (! expr2 || ! DECL_P (expr2))
5149 return false;
5151 expr = var_debug_decl (expr);
5152 expr2 = var_debug_decl (expr2);
5154 return (expr == expr2 && offset == offset2);
5157 /* LOC is a REG or MEM that we would like to track if possible.
5158 If EXPR is null, we don't know what expression LOC refers to,
5159 otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
5160 LOC is an lvalue register.
5162 Return true if EXPR is nonnull and if LOC, or some lowpart of it,
5163 is something we can track. When returning true, store the mode of
5164 the lowpart we can track in *MODE_OUT (if nonnull) and its offset
5165 from EXPR in *OFFSET_OUT (if nonnull). */
5167 static bool
5168 track_loc_p (rtx loc, tree expr, HOST_WIDE_INT offset, bool store_reg_p,
5169 enum machine_mode *mode_out, HOST_WIDE_INT *offset_out)
5171 enum machine_mode mode;
5173 if (expr == NULL || !track_expr_p (expr, true))
5174 return false;
5176 /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
5177 whole subreg, but only the old inner part is really relevant. */
5178 mode = GET_MODE (loc);
5179 if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc)))
5181 enum machine_mode pseudo_mode;
5183 pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc));
5184 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (pseudo_mode))
5186 offset += byte_lowpart_offset (pseudo_mode, mode);
5187 mode = pseudo_mode;
5191 /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
5192 Do the same if we are storing to a register and EXPR occupies
5193 the whole of register LOC; in that case, the whole of EXPR is
5194 being changed. We exclude complex modes from the second case
5195 because the real and imaginary parts are represented as separate
5196 pseudo registers, even if the whole complex value fits into one
5197 hard register. */
5198 if ((GET_MODE_SIZE (mode) > GET_MODE_SIZE (DECL_MODE (expr))
5199 || (store_reg_p
5200 && !COMPLEX_MODE_P (DECL_MODE (expr))
5201 && hard_regno_nregs[REGNO (loc)][DECL_MODE (expr)] == 1))
5202 && offset + byte_lowpart_offset (DECL_MODE (expr), mode) == 0)
5204 mode = DECL_MODE (expr);
5205 offset = 0;
5208 if (offset < 0 || offset >= MAX_VAR_PARTS)
5209 return false;
5211 if (mode_out)
5212 *mode_out = mode;
5213 if (offset_out)
5214 *offset_out = offset;
5215 return true;
5218 /* Return the MODE lowpart of LOC, or null if LOC is not something we
5219 want to track. When returning nonnull, make sure that the attributes
5220 on the returned value are updated. */
5222 static rtx
5223 var_lowpart (enum machine_mode mode, rtx loc)
5225 unsigned int offset, reg_offset, regno;
5227 if (GET_MODE (loc) == mode)
5228 return loc;
5230 if (!REG_P (loc) && !MEM_P (loc))
5231 return NULL;
5233 offset = byte_lowpart_offset (mode, GET_MODE (loc));
5235 if (MEM_P (loc))
5236 return adjust_address_nv (loc, mode, offset);
5238 reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc));
5239 regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc),
5240 reg_offset, mode);
5241 return gen_rtx_REG_offset (loc, mode, regno, offset);
5244 /* Carry information about uses and stores while walking rtx. */
5246 struct count_use_info
5248 /* The insn where the RTX is. */
5249 rtx insn;
5251 /* The basic block where insn is. */
5252 basic_block bb;
5254 /* The array of n_sets sets in the insn, as determined by cselib. */
5255 struct cselib_set *sets;
5256 int n_sets;
5258 /* True if we're counting stores, false otherwise. */
5259 bool store_p;
5262 /* Find a VALUE corresponding to X. */
5264 static inline cselib_val *
5265 find_use_val (rtx x, enum machine_mode mode, struct count_use_info *cui)
5267 int i;
5269 if (cui->sets)
5271 /* This is called after uses are set up and before stores are
5272 processed by cselib, so it's safe to look up srcs, but not
5273 dsts. So we look up expressions that appear in srcs or in
5274 dest expressions, but we search the sets array for dests of
5275 stores. */
5276 if (cui->store_p)
5278 /* Some targets represent memset and memcpy patterns
5279 by (set (mem:BLK ...) (reg:[QHSD]I ...)) or
5280 (set (mem:BLK ...) (const_int ...)) or
5281 (set (mem:BLK ...) (mem:BLK ...)). Don't return anything
5282 in that case, otherwise we end up with mode mismatches. */
5283 if (mode == BLKmode && MEM_P (x))
5284 return NULL;
5285 for (i = 0; i < cui->n_sets; i++)
5286 if (cui->sets[i].dest == x)
5287 return cui->sets[i].src_elt;
5289 else
5290 return cselib_lookup (x, mode, 0, VOIDmode);
5293 return NULL;
5296 /* Replace all registers and addresses in an expression with VALUE
5297 expressions that map back to them, unless the expression is a
5298 register. If no mapping is or can be performed, returns NULL. */
5300 static rtx
5301 replace_expr_with_values (rtx loc)
5303 if (REG_P (loc) || GET_CODE (loc) == ENTRY_VALUE)
5304 return NULL;
5305 else if (MEM_P (loc))
5307 cselib_val *addr = cselib_lookup (XEXP (loc, 0),
5308 get_address_mode (loc), 0,
5309 GET_MODE (loc));
5310 if (addr)
5311 return replace_equiv_address_nv (loc, addr->val_rtx);
5312 else
5313 return NULL;
5315 else
5316 return cselib_subst_to_values (loc, VOIDmode);
5319 /* Return true if *X is a DEBUG_EXPR. Usable as an argument to
5320 for_each_rtx to tell whether there are any DEBUG_EXPRs within
5321 RTX. */
5323 static int
5324 rtx_debug_expr_p (rtx *x, void *data ATTRIBUTE_UNUSED)
5326 rtx loc = *x;
5328 return GET_CODE (loc) == DEBUG_EXPR;
5331 /* Determine what kind of micro operation to choose for a USE. Return
5332 MO_CLOBBER if no micro operation is to be generated. */
5334 static enum micro_operation_type
5335 use_type (rtx loc, struct count_use_info *cui, enum machine_mode *modep)
5337 tree expr;
5339 if (cui && cui->sets)
5341 if (GET_CODE (loc) == VAR_LOCATION)
5343 if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false))
5345 rtx ploc = PAT_VAR_LOCATION_LOC (loc);
5346 if (! VAR_LOC_UNKNOWN_P (ploc))
5348 cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1,
5349 VOIDmode);
5351 /* ??? flag_float_store and volatile mems are never
5352 given values, but we could in theory use them for
5353 locations. */
5354 gcc_assert (val || 1);
5356 return MO_VAL_LOC;
5358 else
5359 return MO_CLOBBER;
5362 if (REG_P (loc) || MEM_P (loc))
5364 if (modep)
5365 *modep = GET_MODE (loc);
5366 if (cui->store_p)
5368 if (REG_P (loc)
5369 || (find_use_val (loc, GET_MODE (loc), cui)
5370 && cselib_lookup (XEXP (loc, 0),
5371 get_address_mode (loc), 0,
5372 GET_MODE (loc))))
5373 return MO_VAL_SET;
5375 else
5377 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5379 if (val && !cselib_preserved_value_p (val))
5380 return MO_VAL_USE;
5385 if (REG_P (loc))
5387 gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER);
5389 if (loc == cfa_base_rtx)
5390 return MO_CLOBBER;
5391 expr = REG_EXPR (loc);
5393 if (!expr)
5394 return MO_USE_NO_VAR;
5395 else if (target_for_debug_bind (var_debug_decl (expr)))
5396 return MO_CLOBBER;
5397 else if (track_loc_p (loc, expr, REG_OFFSET (loc),
5398 false, modep, NULL))
5399 return MO_USE;
5400 else
5401 return MO_USE_NO_VAR;
5403 else if (MEM_P (loc))
5405 expr = MEM_EXPR (loc);
5407 if (!expr)
5408 return MO_CLOBBER;
5409 else if (target_for_debug_bind (var_debug_decl (expr)))
5410 return MO_CLOBBER;
5411 else if (track_loc_p (loc, expr, INT_MEM_OFFSET (loc),
5412 false, modep, NULL)
5413 /* Multi-part variables shouldn't refer to one-part
5414 variable names such as VALUEs (never happens) or
5415 DEBUG_EXPRs (only happens in the presence of debug
5416 insns). */
5417 && (!MAY_HAVE_DEBUG_INSNS
5418 || !for_each_rtx (&XEXP (loc, 0), rtx_debug_expr_p, NULL)))
5419 return MO_USE;
5420 else
5421 return MO_CLOBBER;
5424 return MO_CLOBBER;
5427 /* Log to OUT information about micro-operation MOPT involving X in
5428 INSN of BB. */
5430 static inline void
5431 log_op_type (rtx x, basic_block bb, rtx insn,
5432 enum micro_operation_type mopt, FILE *out)
5434 fprintf (out, "bb %i op %i insn %i %s ",
5435 bb->index, VTI (bb)->mos.length (),
5436 INSN_UID (insn), micro_operation_type_name[mopt]);
5437 print_inline_rtx (out, x, 2);
5438 fputc ('\n', out);
5441 /* Tell whether the CONCAT used to holds a VALUE and its location
5442 needs value resolution, i.e., an attempt of mapping the location
5443 back to other incoming values. */
5444 #define VAL_NEEDS_RESOLUTION(x) \
5445 (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
5446 /* Whether the location in the CONCAT is a tracked expression, that
5447 should also be handled like a MO_USE. */
5448 #define VAL_HOLDS_TRACK_EXPR(x) \
5449 (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
5450 /* Whether the location in the CONCAT should be handled like a MO_COPY
5451 as well. */
5452 #define VAL_EXPR_IS_COPIED(x) \
5453 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
5454 /* Whether the location in the CONCAT should be handled like a
5455 MO_CLOBBER as well. */
5456 #define VAL_EXPR_IS_CLOBBERED(x) \
5457 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
5459 /* All preserved VALUEs. */
5460 static vec<rtx> preserved_values;
5462 /* Ensure VAL is preserved and remember it in a vector for vt_emit_notes. */
5464 static void
5465 preserve_value (cselib_val *val)
5467 cselib_preserve_value (val);
5468 preserved_values.safe_push (val->val_rtx);
5471 /* Helper function for MO_VAL_LOC handling. Return non-zero if
5472 any rtxes not suitable for CONST use not replaced by VALUEs
5473 are discovered. */
5475 static int
5476 non_suitable_const (rtx *x, void *data ATTRIBUTE_UNUSED)
5478 if (*x == NULL_RTX)
5479 return 0;
5481 switch (GET_CODE (*x))
5483 case REG:
5484 case DEBUG_EXPR:
5485 case PC:
5486 case SCRATCH:
5487 case CC0:
5488 case ASM_INPUT:
5489 case ASM_OPERANDS:
5490 return 1;
5491 case MEM:
5492 return !MEM_READONLY_P (*x);
5493 default:
5494 return 0;
5498 /* Add uses (register and memory references) LOC which will be tracked
5499 to VTI (bb)->mos. INSN is instruction which the LOC is part of. */
5501 static int
5502 add_uses (rtx *ploc, void *data)
5504 rtx loc = *ploc;
5505 enum machine_mode mode = VOIDmode;
5506 struct count_use_info *cui = (struct count_use_info *)data;
5507 enum micro_operation_type type = use_type (loc, cui, &mode);
5509 if (type != MO_CLOBBER)
5511 basic_block bb = cui->bb;
5512 micro_operation mo;
5514 mo.type = type;
5515 mo.u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc;
5516 mo.insn = cui->insn;
5518 if (type == MO_VAL_LOC)
5520 rtx oloc = loc;
5521 rtx vloc = PAT_VAR_LOCATION_LOC (oloc);
5522 cselib_val *val;
5524 gcc_assert (cui->sets);
5526 if (MEM_P (vloc)
5527 && !REG_P (XEXP (vloc, 0))
5528 && !MEM_P (XEXP (vloc, 0)))
5530 rtx mloc = vloc;
5531 enum machine_mode address_mode = get_address_mode (mloc);
5532 cselib_val *val
5533 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5534 GET_MODE (mloc));
5536 if (val && !cselib_preserved_value_p (val))
5537 preserve_value (val);
5540 if (CONSTANT_P (vloc)
5541 && (GET_CODE (vloc) != CONST
5542 || for_each_rtx (&vloc, non_suitable_const, NULL)))
5543 /* For constants don't look up any value. */;
5544 else if (!VAR_LOC_UNKNOWN_P (vloc) && !unsuitable_loc (vloc)
5545 && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
5547 enum machine_mode mode2;
5548 enum micro_operation_type type2;
5549 rtx nloc = NULL;
5550 bool resolvable = REG_P (vloc) || MEM_P (vloc);
5552 if (resolvable)
5553 nloc = replace_expr_with_values (vloc);
5555 if (nloc)
5557 oloc = shallow_copy_rtx (oloc);
5558 PAT_VAR_LOCATION_LOC (oloc) = nloc;
5561 oloc = gen_rtx_CONCAT (mode, val->val_rtx, oloc);
5563 type2 = use_type (vloc, 0, &mode2);
5565 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5566 || type2 == MO_CLOBBER);
5568 if (type2 == MO_CLOBBER
5569 && !cselib_preserved_value_p (val))
5571 VAL_NEEDS_RESOLUTION (oloc) = resolvable;
5572 preserve_value (val);
5575 else if (!VAR_LOC_UNKNOWN_P (vloc))
5577 oloc = shallow_copy_rtx (oloc);
5578 PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC ();
5581 mo.u.loc = oloc;
5583 else if (type == MO_VAL_USE)
5585 enum machine_mode mode2 = VOIDmode;
5586 enum micro_operation_type type2;
5587 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5588 rtx vloc, oloc = loc, nloc;
5590 gcc_assert (cui->sets);
5592 if (MEM_P (oloc)
5593 && !REG_P (XEXP (oloc, 0))
5594 && !MEM_P (XEXP (oloc, 0)))
5596 rtx mloc = oloc;
5597 enum machine_mode address_mode = get_address_mode (mloc);
5598 cselib_val *val
5599 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5600 GET_MODE (mloc));
5602 if (val && !cselib_preserved_value_p (val))
5603 preserve_value (val);
5606 type2 = use_type (loc, 0, &mode2);
5608 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5609 || type2 == MO_CLOBBER);
5611 if (type2 == MO_USE)
5612 vloc = var_lowpart (mode2, loc);
5613 else
5614 vloc = oloc;
5616 /* The loc of a MO_VAL_USE may have two forms:
5618 (concat val src): val is at src, a value-based
5619 representation.
5621 (concat (concat val use) src): same as above, with use as
5622 the MO_USE tracked value, if it differs from src.
5626 gcc_checking_assert (REG_P (loc) || MEM_P (loc));
5627 nloc = replace_expr_with_values (loc);
5628 if (!nloc)
5629 nloc = oloc;
5631 if (vloc != nloc)
5632 oloc = gen_rtx_CONCAT (mode2, val->val_rtx, vloc);
5633 else
5634 oloc = val->val_rtx;
5636 mo.u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
5638 if (type2 == MO_USE)
5639 VAL_HOLDS_TRACK_EXPR (mo.u.loc) = 1;
5640 if (!cselib_preserved_value_p (val))
5642 VAL_NEEDS_RESOLUTION (mo.u.loc) = 1;
5643 preserve_value (val);
5646 else
5647 gcc_assert (type == MO_USE || type == MO_USE_NO_VAR);
5649 if (dump_file && (dump_flags & TDF_DETAILS))
5650 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
5651 VTI (bb)->mos.safe_push (mo);
5654 return 0;
5657 /* Helper function for finding all uses of REG/MEM in X in insn INSN. */
5659 static void
5660 add_uses_1 (rtx *x, void *cui)
5662 for_each_rtx (x, add_uses, cui);
5665 /* This is the value used during expansion of locations. We want it
5666 to be unbounded, so that variables expanded deep in a recursion
5667 nest are fully evaluated, so that their values are cached
5668 correctly. We avoid recursion cycles through other means, and we
5669 don't unshare RTL, so excess complexity is not a problem. */
5670 #define EXPR_DEPTH (INT_MAX)
5671 /* We use this to keep too-complex expressions from being emitted as
5672 location notes, and then to debug information. Users can trade
5673 compile time for ridiculously complex expressions, although they're
5674 seldom useful, and they may often have to be discarded as not
5675 representable anyway. */
5676 #define EXPR_USE_DEPTH (PARAM_VALUE (PARAM_MAX_VARTRACK_EXPR_DEPTH))
5678 /* Attempt to reverse the EXPR operation in the debug info and record
5679 it in the cselib table. Say for reg1 = reg2 + 6 even when reg2 is
5680 no longer live we can express its value as VAL - 6. */
5682 static void
5683 reverse_op (rtx val, const_rtx expr, rtx insn)
5685 rtx src, arg, ret;
5686 cselib_val *v;
5687 struct elt_loc_list *l;
5688 enum rtx_code code;
5689 int count;
5691 if (GET_CODE (expr) != SET)
5692 return;
5694 if (!REG_P (SET_DEST (expr)) || GET_MODE (val) != GET_MODE (SET_DEST (expr)))
5695 return;
5697 src = SET_SRC (expr);
5698 switch (GET_CODE (src))
5700 case PLUS:
5701 case MINUS:
5702 case XOR:
5703 case NOT:
5704 case NEG:
5705 if (!REG_P (XEXP (src, 0)))
5706 return;
5707 break;
5708 case SIGN_EXTEND:
5709 case ZERO_EXTEND:
5710 if (!REG_P (XEXP (src, 0)) && !MEM_P (XEXP (src, 0)))
5711 return;
5712 break;
5713 default:
5714 return;
5717 if (!SCALAR_INT_MODE_P (GET_MODE (src)) || XEXP (src, 0) == cfa_base_rtx)
5718 return;
5720 v = cselib_lookup (XEXP (src, 0), GET_MODE (XEXP (src, 0)), 0, VOIDmode);
5721 if (!v || !cselib_preserved_value_p (v))
5722 return;
5724 /* Use canonical V to avoid creating multiple redundant expressions
5725 for different VALUES equivalent to V. */
5726 v = canonical_cselib_val (v);
5728 /* Adding a reverse op isn't useful if V already has an always valid
5729 location. Ignore ENTRY_VALUE, while it is always constant, we should
5730 prefer non-ENTRY_VALUE locations whenever possible. */
5731 for (l = v->locs, count = 0; l; l = l->next, count++)
5732 if (CONSTANT_P (l->loc)
5733 && (GET_CODE (l->loc) != CONST || !references_value_p (l->loc, 0)))
5734 return;
5735 /* Avoid creating too large locs lists. */
5736 else if (count == PARAM_VALUE (PARAM_MAX_VARTRACK_REVERSE_OP_SIZE))
5737 return;
5739 switch (GET_CODE (src))
5741 case NOT:
5742 case NEG:
5743 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5744 return;
5745 ret = gen_rtx_fmt_e (GET_CODE (src), GET_MODE (val), val);
5746 break;
5747 case SIGN_EXTEND:
5748 case ZERO_EXTEND:
5749 ret = gen_lowpart_SUBREG (GET_MODE (v->val_rtx), val);
5750 break;
5751 case XOR:
5752 code = XOR;
5753 goto binary;
5754 case PLUS:
5755 code = MINUS;
5756 goto binary;
5757 case MINUS:
5758 code = PLUS;
5759 goto binary;
5760 binary:
5761 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5762 return;
5763 arg = XEXP (src, 1);
5764 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5766 arg = cselib_expand_value_rtx (arg, scratch_regs, 5);
5767 if (arg == NULL_RTX)
5768 return;
5769 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5770 return;
5772 ret = simplify_gen_binary (code, GET_MODE (val), val, arg);
5773 if (ret == val)
5774 /* Ensure ret isn't VALUE itself (which can happen e.g. for
5775 (plus (reg1) (reg2)) when reg2 is known to be 0), as that
5776 breaks a lot of routines during var-tracking. */
5777 ret = gen_rtx_fmt_ee (PLUS, GET_MODE (val), val, const0_rtx);
5778 break;
5779 default:
5780 gcc_unreachable ();
5783 cselib_add_permanent_equiv (v, ret, insn);
5786 /* Add stores (register and memory references) LOC which will be tracked
5787 to VTI (bb)->mos. EXPR is the RTL expression containing the store.
5788 CUIP->insn is instruction which the LOC is part of. */
5790 static void
5791 add_stores (rtx loc, const_rtx expr, void *cuip)
5793 enum machine_mode mode = VOIDmode, mode2;
5794 struct count_use_info *cui = (struct count_use_info *)cuip;
5795 basic_block bb = cui->bb;
5796 micro_operation mo;
5797 rtx oloc = loc, nloc, src = NULL;
5798 enum micro_operation_type type = use_type (loc, cui, &mode);
5799 bool track_p = false;
5800 cselib_val *v;
5801 bool resolve, preserve;
5803 if (type == MO_CLOBBER)
5804 return;
5806 mode2 = mode;
5808 if (REG_P (loc))
5810 gcc_assert (loc != cfa_base_rtx);
5811 if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
5812 || !(track_p = use_type (loc, NULL, &mode2) == MO_USE)
5813 || GET_CODE (expr) == CLOBBER)
5815 mo.type = MO_CLOBBER;
5816 mo.u.loc = loc;
5817 if (GET_CODE (expr) == SET
5818 && SET_DEST (expr) == loc
5819 && !unsuitable_loc (SET_SRC (expr))
5820 && find_use_val (loc, mode, cui))
5822 gcc_checking_assert (type == MO_VAL_SET);
5823 mo.u.loc = gen_rtx_SET (VOIDmode, loc, SET_SRC (expr));
5826 else
5828 if (GET_CODE (expr) == SET
5829 && SET_DEST (expr) == loc
5830 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
5831 src = var_lowpart (mode2, SET_SRC (expr));
5832 loc = var_lowpart (mode2, loc);
5834 if (src == NULL)
5836 mo.type = MO_SET;
5837 mo.u.loc = loc;
5839 else
5841 rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
5842 if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
5844 /* If this is an instruction copying (part of) a parameter
5845 passed by invisible reference to its register location,
5846 pretend it's a SET so that the initial memory location
5847 is discarded, as the parameter register can be reused
5848 for other purposes and we do not track locations based
5849 on generic registers. */
5850 if (MEM_P (src)
5851 && REG_EXPR (loc)
5852 && TREE_CODE (REG_EXPR (loc)) == PARM_DECL
5853 && DECL_MODE (REG_EXPR (loc)) != BLKmode
5854 && MEM_P (DECL_INCOMING_RTL (REG_EXPR (loc)))
5855 && XEXP (DECL_INCOMING_RTL (REG_EXPR (loc)), 0)
5856 != arg_pointer_rtx)
5857 mo.type = MO_SET;
5858 else
5859 mo.type = MO_COPY;
5861 else
5862 mo.type = MO_SET;
5863 mo.u.loc = xexpr;
5866 mo.insn = cui->insn;
5868 else if (MEM_P (loc)
5869 && ((track_p = use_type (loc, NULL, &mode2) == MO_USE)
5870 || cui->sets))
5872 if (MEM_P (loc) && type == MO_VAL_SET
5873 && !REG_P (XEXP (loc, 0))
5874 && !MEM_P (XEXP (loc, 0)))
5876 rtx mloc = loc;
5877 enum machine_mode address_mode = get_address_mode (mloc);
5878 cselib_val *val = cselib_lookup (XEXP (mloc, 0),
5879 address_mode, 0,
5880 GET_MODE (mloc));
5882 if (val && !cselib_preserved_value_p (val))
5883 preserve_value (val);
5886 if (GET_CODE (expr) == CLOBBER || !track_p)
5888 mo.type = MO_CLOBBER;
5889 mo.u.loc = track_p ? var_lowpart (mode2, loc) : loc;
5891 else
5893 if (GET_CODE (expr) == SET
5894 && SET_DEST (expr) == loc
5895 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
5896 src = var_lowpart (mode2, SET_SRC (expr));
5897 loc = var_lowpart (mode2, loc);
5899 if (src == NULL)
5901 mo.type = MO_SET;
5902 mo.u.loc = loc;
5904 else
5906 rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
5907 if (same_variable_part_p (SET_SRC (xexpr),
5908 MEM_EXPR (loc),
5909 INT_MEM_OFFSET (loc)))
5910 mo.type = MO_COPY;
5911 else
5912 mo.type = MO_SET;
5913 mo.u.loc = xexpr;
5916 mo.insn = cui->insn;
5918 else
5919 return;
5921 if (type != MO_VAL_SET)
5922 goto log_and_return;
5924 v = find_use_val (oloc, mode, cui);
5926 if (!v)
5927 goto log_and_return;
5929 resolve = preserve = !cselib_preserved_value_p (v);
5931 if (loc == stack_pointer_rtx
5932 && hard_frame_pointer_adjustment != -1
5933 && preserve)
5934 cselib_set_value_sp_based (v);
5936 nloc = replace_expr_with_values (oloc);
5937 if (nloc)
5938 oloc = nloc;
5940 if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC)
5942 cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0, VOIDmode);
5944 gcc_assert (oval != v);
5945 gcc_assert (REG_P (oloc) || MEM_P (oloc));
5947 if (oval && !cselib_preserved_value_p (oval))
5949 micro_operation moa;
5951 preserve_value (oval);
5953 moa.type = MO_VAL_USE;
5954 moa.u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc);
5955 VAL_NEEDS_RESOLUTION (moa.u.loc) = 1;
5956 moa.insn = cui->insn;
5958 if (dump_file && (dump_flags & TDF_DETAILS))
5959 log_op_type (moa.u.loc, cui->bb, cui->insn,
5960 moa.type, dump_file);
5961 VTI (bb)->mos.safe_push (moa);
5964 resolve = false;
5966 else if (resolve && GET_CODE (mo.u.loc) == SET)
5968 if (REG_P (SET_SRC (expr)) || MEM_P (SET_SRC (expr)))
5969 nloc = replace_expr_with_values (SET_SRC (expr));
5970 else
5971 nloc = NULL_RTX;
5973 /* Avoid the mode mismatch between oexpr and expr. */
5974 if (!nloc && mode != mode2)
5976 nloc = SET_SRC (expr);
5977 gcc_assert (oloc == SET_DEST (expr));
5980 if (nloc && nloc != SET_SRC (mo.u.loc))
5981 oloc = gen_rtx_SET (GET_MODE (mo.u.loc), oloc, nloc);
5982 else
5984 if (oloc == SET_DEST (mo.u.loc))
5985 /* No point in duplicating. */
5986 oloc = mo.u.loc;
5987 if (!REG_P (SET_SRC (mo.u.loc)))
5988 resolve = false;
5991 else if (!resolve)
5993 if (GET_CODE (mo.u.loc) == SET
5994 && oloc == SET_DEST (mo.u.loc))
5995 /* No point in duplicating. */
5996 oloc = mo.u.loc;
5998 else
5999 resolve = false;
6001 loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc);
6003 if (mo.u.loc != oloc)
6004 loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, mo.u.loc);
6006 /* The loc of a MO_VAL_SET may have various forms:
6008 (concat val dst): dst now holds val
6010 (concat val (set dst src)): dst now holds val, copied from src
6012 (concat (concat val dstv) dst): dst now holds val; dstv is dst
6013 after replacing mems and non-top-level regs with values.
6015 (concat (concat val dstv) (set dst src)): dst now holds val,
6016 copied from src. dstv is a value-based representation of dst, if
6017 it differs from dst. If resolution is needed, src is a REG, and
6018 its mode is the same as that of val.
6020 (concat (concat val (set dstv srcv)) (set dst src)): src
6021 copied to dst, holding val. dstv and srcv are value-based
6022 representations of dst and src, respectively.
6026 if (GET_CODE (PATTERN (cui->insn)) != COND_EXEC)
6027 reverse_op (v->val_rtx, expr, cui->insn);
6029 mo.u.loc = loc;
6031 if (track_p)
6032 VAL_HOLDS_TRACK_EXPR (loc) = 1;
6033 if (preserve)
6035 VAL_NEEDS_RESOLUTION (loc) = resolve;
6036 preserve_value (v);
6038 if (mo.type == MO_CLOBBER)
6039 VAL_EXPR_IS_CLOBBERED (loc) = 1;
6040 if (mo.type == MO_COPY)
6041 VAL_EXPR_IS_COPIED (loc) = 1;
6043 mo.type = MO_VAL_SET;
6045 log_and_return:
6046 if (dump_file && (dump_flags & TDF_DETAILS))
6047 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
6048 VTI (bb)->mos.safe_push (mo);
6051 /* Arguments to the call. */
6052 static rtx call_arguments;
6054 /* Compute call_arguments. */
6056 static void
6057 prepare_call_arguments (basic_block bb, rtx insn)
6059 rtx link, x, call;
6060 rtx prev, cur, next;
6061 rtx this_arg = NULL_RTX;
6062 tree type = NULL_TREE, t, fndecl = NULL_TREE;
6063 tree obj_type_ref = NULL_TREE;
6064 CUMULATIVE_ARGS args_so_far_v;
6065 cumulative_args_t args_so_far;
6067 memset (&args_so_far_v, 0, sizeof (args_so_far_v));
6068 args_so_far = pack_cumulative_args (&args_so_far_v);
6069 call = get_call_rtx_from (insn);
6070 if (call)
6072 if (GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
6074 rtx symbol = XEXP (XEXP (call, 0), 0);
6075 if (SYMBOL_REF_DECL (symbol))
6076 fndecl = SYMBOL_REF_DECL (symbol);
6078 if (fndecl == NULL_TREE)
6079 fndecl = MEM_EXPR (XEXP (call, 0));
6080 if (fndecl
6081 && TREE_CODE (TREE_TYPE (fndecl)) != FUNCTION_TYPE
6082 && TREE_CODE (TREE_TYPE (fndecl)) != METHOD_TYPE)
6083 fndecl = NULL_TREE;
6084 if (fndecl && TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
6085 type = TREE_TYPE (fndecl);
6086 if (fndecl && TREE_CODE (fndecl) != FUNCTION_DECL)
6088 if (TREE_CODE (fndecl) == INDIRECT_REF
6089 && TREE_CODE (TREE_OPERAND (fndecl, 0)) == OBJ_TYPE_REF)
6090 obj_type_ref = TREE_OPERAND (fndecl, 0);
6091 fndecl = NULL_TREE;
6093 if (type)
6095 for (t = TYPE_ARG_TYPES (type); t && t != void_list_node;
6096 t = TREE_CHAIN (t))
6097 if (TREE_CODE (TREE_VALUE (t)) == REFERENCE_TYPE
6098 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_VALUE (t))))
6099 break;
6100 if ((t == NULL || t == void_list_node) && obj_type_ref == NULL_TREE)
6101 type = NULL;
6102 else
6104 int nargs ATTRIBUTE_UNUSED = list_length (TYPE_ARG_TYPES (type));
6105 link = CALL_INSN_FUNCTION_USAGE (insn);
6106 #ifndef PCC_STATIC_STRUCT_RETURN
6107 if (aggregate_value_p (TREE_TYPE (type), type)
6108 && targetm.calls.struct_value_rtx (type, 0) == 0)
6110 tree struct_addr = build_pointer_type (TREE_TYPE (type));
6111 enum machine_mode mode = TYPE_MODE (struct_addr);
6112 rtx reg;
6113 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
6114 nargs + 1);
6115 reg = targetm.calls.function_arg (args_so_far, mode,
6116 struct_addr, true);
6117 targetm.calls.function_arg_advance (args_so_far, mode,
6118 struct_addr, true);
6119 if (reg == NULL_RTX)
6121 for (; link; link = XEXP (link, 1))
6122 if (GET_CODE (XEXP (link, 0)) == USE
6123 && MEM_P (XEXP (XEXP (link, 0), 0)))
6125 link = XEXP (link, 1);
6126 break;
6130 else
6131 #endif
6132 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
6133 nargs);
6134 if (obj_type_ref && TYPE_ARG_TYPES (type) != void_list_node)
6136 enum machine_mode mode;
6137 t = TYPE_ARG_TYPES (type);
6138 mode = TYPE_MODE (TREE_VALUE (t));
6139 this_arg = targetm.calls.function_arg (args_so_far, mode,
6140 TREE_VALUE (t), true);
6141 if (this_arg && !REG_P (this_arg))
6142 this_arg = NULL_RTX;
6143 else if (this_arg == NULL_RTX)
6145 for (; link; link = XEXP (link, 1))
6146 if (GET_CODE (XEXP (link, 0)) == USE
6147 && MEM_P (XEXP (XEXP (link, 0), 0)))
6149 this_arg = XEXP (XEXP (link, 0), 0);
6150 break;
6157 t = type ? TYPE_ARG_TYPES (type) : NULL_TREE;
6159 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
6160 if (GET_CODE (XEXP (link, 0)) == USE)
6162 rtx item = NULL_RTX;
6163 x = XEXP (XEXP (link, 0), 0);
6164 if (GET_MODE (link) == VOIDmode
6165 || GET_MODE (link) == BLKmode
6166 || (GET_MODE (link) != GET_MODE (x)
6167 && (GET_MODE_CLASS (GET_MODE (link)) != MODE_INT
6168 || GET_MODE_CLASS (GET_MODE (x)) != MODE_INT)))
6169 /* Can't do anything for these, if the original type mode
6170 isn't known or can't be converted. */;
6171 else if (REG_P (x))
6173 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
6174 if (val && cselib_preserved_value_p (val))
6175 item = val->val_rtx;
6176 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT)
6178 enum machine_mode mode = GET_MODE (x);
6180 while ((mode = GET_MODE_WIDER_MODE (mode)) != VOIDmode
6181 && GET_MODE_BITSIZE (mode) <= BITS_PER_WORD)
6183 rtx reg = simplify_subreg (mode, x, GET_MODE (x), 0);
6185 if (reg == NULL_RTX || !REG_P (reg))
6186 continue;
6187 val = cselib_lookup (reg, mode, 0, VOIDmode);
6188 if (val && cselib_preserved_value_p (val))
6190 item = val->val_rtx;
6191 break;
6196 else if (MEM_P (x))
6198 rtx mem = x;
6199 cselib_val *val;
6201 if (!frame_pointer_needed)
6203 struct adjust_mem_data amd;
6204 amd.mem_mode = VOIDmode;
6205 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
6206 amd.side_effects = NULL_RTX;
6207 amd.store = true;
6208 mem = simplify_replace_fn_rtx (mem, NULL_RTX, adjust_mems,
6209 &amd);
6210 gcc_assert (amd.side_effects == NULL_RTX);
6212 val = cselib_lookup (mem, GET_MODE (mem), 0, VOIDmode);
6213 if (val && cselib_preserved_value_p (val))
6214 item = val->val_rtx;
6215 else if (GET_MODE_CLASS (GET_MODE (mem)) != MODE_INT)
6217 /* For non-integer stack argument see also if they weren't
6218 initialized by integers. */
6219 enum machine_mode imode = int_mode_for_mode (GET_MODE (mem));
6220 if (imode != GET_MODE (mem) && imode != BLKmode)
6222 val = cselib_lookup (adjust_address_nv (mem, imode, 0),
6223 imode, 0, VOIDmode);
6224 if (val && cselib_preserved_value_p (val))
6225 item = lowpart_subreg (GET_MODE (x), val->val_rtx,
6226 imode);
6230 if (item)
6232 rtx x2 = x;
6233 if (GET_MODE (item) != GET_MODE (link))
6234 item = lowpart_subreg (GET_MODE (link), item, GET_MODE (item));
6235 if (GET_MODE (x2) != GET_MODE (link))
6236 x2 = lowpart_subreg (GET_MODE (link), x2, GET_MODE (x2));
6237 item = gen_rtx_CONCAT (GET_MODE (link), x2, item);
6238 call_arguments
6239 = gen_rtx_EXPR_LIST (VOIDmode, item, call_arguments);
6241 if (t && t != void_list_node)
6243 tree argtype = TREE_VALUE (t);
6244 enum machine_mode mode = TYPE_MODE (argtype);
6245 rtx reg;
6246 if (pass_by_reference (&args_so_far_v, mode, argtype, true))
6248 argtype = build_pointer_type (argtype);
6249 mode = TYPE_MODE (argtype);
6251 reg = targetm.calls.function_arg (args_so_far, mode,
6252 argtype, true);
6253 if (TREE_CODE (argtype) == REFERENCE_TYPE
6254 && INTEGRAL_TYPE_P (TREE_TYPE (argtype))
6255 && reg
6256 && REG_P (reg)
6257 && GET_MODE (reg) == mode
6258 && GET_MODE_CLASS (mode) == MODE_INT
6259 && REG_P (x)
6260 && REGNO (x) == REGNO (reg)
6261 && GET_MODE (x) == mode
6262 && item)
6264 enum machine_mode indmode
6265 = TYPE_MODE (TREE_TYPE (argtype));
6266 rtx mem = gen_rtx_MEM (indmode, x);
6267 cselib_val *val = cselib_lookup (mem, indmode, 0, VOIDmode);
6268 if (val && cselib_preserved_value_p (val))
6270 item = gen_rtx_CONCAT (indmode, mem, val->val_rtx);
6271 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
6272 call_arguments);
6274 else
6276 struct elt_loc_list *l;
6277 tree initial;
6279 /* Try harder, when passing address of a constant
6280 pool integer it can be easily read back. */
6281 item = XEXP (item, 1);
6282 if (GET_CODE (item) == SUBREG)
6283 item = SUBREG_REG (item);
6284 gcc_assert (GET_CODE (item) == VALUE);
6285 val = CSELIB_VAL_PTR (item);
6286 for (l = val->locs; l; l = l->next)
6287 if (GET_CODE (l->loc) == SYMBOL_REF
6288 && TREE_CONSTANT_POOL_ADDRESS_P (l->loc)
6289 && SYMBOL_REF_DECL (l->loc)
6290 && DECL_INITIAL (SYMBOL_REF_DECL (l->loc)))
6292 initial = DECL_INITIAL (SYMBOL_REF_DECL (l->loc));
6293 if (host_integerp (initial, 0))
6295 item = GEN_INT (tree_low_cst (initial, 0));
6296 item = gen_rtx_CONCAT (indmode, mem, item);
6297 call_arguments
6298 = gen_rtx_EXPR_LIST (VOIDmode, item,
6299 call_arguments);
6301 break;
6305 targetm.calls.function_arg_advance (args_so_far, mode,
6306 argtype, true);
6307 t = TREE_CHAIN (t);
6311 /* Add debug arguments. */
6312 if (fndecl
6313 && TREE_CODE (fndecl) == FUNCTION_DECL
6314 && DECL_HAS_DEBUG_ARGS_P (fndecl))
6316 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (fndecl);
6317 if (debug_args)
6319 unsigned int ix;
6320 tree param;
6321 for (ix = 0; vec_safe_iterate (*debug_args, ix, &param); ix += 2)
6323 rtx item;
6324 tree dtemp = (**debug_args)[ix + 1];
6325 enum machine_mode mode = DECL_MODE (dtemp);
6326 item = gen_rtx_DEBUG_PARAMETER_REF (mode, param);
6327 item = gen_rtx_CONCAT (mode, item, DECL_RTL_KNOWN_SET (dtemp));
6328 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
6329 call_arguments);
6334 /* Reverse call_arguments chain. */
6335 prev = NULL_RTX;
6336 for (cur = call_arguments; cur; cur = next)
6338 next = XEXP (cur, 1);
6339 XEXP (cur, 1) = prev;
6340 prev = cur;
6342 call_arguments = prev;
6344 x = get_call_rtx_from (insn);
6345 if (x)
6347 x = XEXP (XEXP (x, 0), 0);
6348 if (GET_CODE (x) == SYMBOL_REF)
6349 /* Don't record anything. */;
6350 else if (CONSTANT_P (x))
6352 x = gen_rtx_CONCAT (GET_MODE (x) == VOIDmode ? Pmode : GET_MODE (x),
6353 pc_rtx, x);
6354 call_arguments
6355 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6357 else
6359 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
6360 if (val && cselib_preserved_value_p (val))
6362 x = gen_rtx_CONCAT (GET_MODE (x), pc_rtx, val->val_rtx);
6363 call_arguments
6364 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6368 if (this_arg)
6370 enum machine_mode mode
6371 = TYPE_MODE (TREE_TYPE (OBJ_TYPE_REF_EXPR (obj_type_ref)));
6372 rtx clobbered = gen_rtx_MEM (mode, this_arg);
6373 HOST_WIDE_INT token
6374 = tree_low_cst (OBJ_TYPE_REF_TOKEN (obj_type_ref), 0);
6375 if (token)
6376 clobbered = plus_constant (mode, clobbered,
6377 token * GET_MODE_SIZE (mode));
6378 clobbered = gen_rtx_MEM (mode, clobbered);
6379 x = gen_rtx_CONCAT (mode, gen_rtx_CLOBBER (VOIDmode, pc_rtx), clobbered);
6380 call_arguments
6381 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6385 /* Callback for cselib_record_sets_hook, that records as micro
6386 operations uses and stores in an insn after cselib_record_sets has
6387 analyzed the sets in an insn, but before it modifies the stored
6388 values in the internal tables, unless cselib_record_sets doesn't
6389 call it directly (perhaps because we're not doing cselib in the
6390 first place, in which case sets and n_sets will be 0). */
6392 static void
6393 add_with_sets (rtx insn, struct cselib_set *sets, int n_sets)
6395 basic_block bb = BLOCK_FOR_INSN (insn);
6396 int n1, n2;
6397 struct count_use_info cui;
6398 micro_operation *mos;
6400 cselib_hook_called = true;
6402 cui.insn = insn;
6403 cui.bb = bb;
6404 cui.sets = sets;
6405 cui.n_sets = n_sets;
6407 n1 = VTI (bb)->mos.length ();
6408 cui.store_p = false;
6409 note_uses (&PATTERN (insn), add_uses_1, &cui);
6410 n2 = VTI (bb)->mos.length () - 1;
6411 mos = VTI (bb)->mos.address ();
6413 /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
6414 MO_VAL_LOC last. */
6415 while (n1 < n2)
6417 while (n1 < n2 && mos[n1].type == MO_USE)
6418 n1++;
6419 while (n1 < n2 && mos[n2].type != MO_USE)
6420 n2--;
6421 if (n1 < n2)
6423 micro_operation sw;
6425 sw = mos[n1];
6426 mos[n1] = mos[n2];
6427 mos[n2] = sw;
6431 n2 = VTI (bb)->mos.length () - 1;
6432 while (n1 < n2)
6434 while (n1 < n2 && mos[n1].type != MO_VAL_LOC)
6435 n1++;
6436 while (n1 < n2 && mos[n2].type == MO_VAL_LOC)
6437 n2--;
6438 if (n1 < n2)
6440 micro_operation sw;
6442 sw = mos[n1];
6443 mos[n1] = mos[n2];
6444 mos[n2] = sw;
6448 if (CALL_P (insn))
6450 micro_operation mo;
6452 mo.type = MO_CALL;
6453 mo.insn = insn;
6454 mo.u.loc = call_arguments;
6455 call_arguments = NULL_RTX;
6457 if (dump_file && (dump_flags & TDF_DETAILS))
6458 log_op_type (PATTERN (insn), bb, insn, mo.type, dump_file);
6459 VTI (bb)->mos.safe_push (mo);
6462 n1 = VTI (bb)->mos.length ();
6463 /* This will record NEXT_INSN (insn), such that we can
6464 insert notes before it without worrying about any
6465 notes that MO_USEs might emit after the insn. */
6466 cui.store_p = true;
6467 note_stores (PATTERN (insn), add_stores, &cui);
6468 n2 = VTI (bb)->mos.length () - 1;
6469 mos = VTI (bb)->mos.address ();
6471 /* Order the MO_VAL_USEs first (note_stores does nothing
6472 on DEBUG_INSNs, so there are no MO_VAL_LOCs from this
6473 insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET. */
6474 while (n1 < n2)
6476 while (n1 < n2 && mos[n1].type == MO_VAL_USE)
6477 n1++;
6478 while (n1 < n2 && mos[n2].type != MO_VAL_USE)
6479 n2--;
6480 if (n1 < n2)
6482 micro_operation sw;
6484 sw = mos[n1];
6485 mos[n1] = mos[n2];
6486 mos[n2] = sw;
6490 n2 = VTI (bb)->mos.length () - 1;
6491 while (n1 < n2)
6493 while (n1 < n2 && mos[n1].type == MO_CLOBBER)
6494 n1++;
6495 while (n1 < n2 && mos[n2].type != MO_CLOBBER)
6496 n2--;
6497 if (n1 < n2)
6499 micro_operation sw;
6501 sw = mos[n1];
6502 mos[n1] = mos[n2];
6503 mos[n2] = sw;
6508 static enum var_init_status
6509 find_src_status (dataflow_set *in, rtx src)
6511 tree decl = NULL_TREE;
6512 enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
6514 if (! flag_var_tracking_uninit)
6515 status = VAR_INIT_STATUS_INITIALIZED;
6517 if (src && REG_P (src))
6518 decl = var_debug_decl (REG_EXPR (src));
6519 else if (src && MEM_P (src))
6520 decl = var_debug_decl (MEM_EXPR (src));
6522 if (src && decl)
6523 status = get_init_value (in, src, dv_from_decl (decl));
6525 return status;
6528 /* SRC is the source of an assignment. Use SET to try to find what
6529 was ultimately assigned to SRC. Return that value if known,
6530 otherwise return SRC itself. */
6532 static rtx
6533 find_src_set_src (dataflow_set *set, rtx src)
6535 tree decl = NULL_TREE; /* The variable being copied around. */
6536 rtx set_src = NULL_RTX; /* The value for "decl" stored in "src". */
6537 variable var;
6538 location_chain nextp;
6539 int i;
6540 bool found;
6542 if (src && REG_P (src))
6543 decl = var_debug_decl (REG_EXPR (src));
6544 else if (src && MEM_P (src))
6545 decl = var_debug_decl (MEM_EXPR (src));
6547 if (src && decl)
6549 decl_or_value dv = dv_from_decl (decl);
6551 var = shared_hash_find (set->vars, dv);
6552 if (var)
6554 found = false;
6555 for (i = 0; i < var->n_var_parts && !found; i++)
6556 for (nextp = var->var_part[i].loc_chain; nextp && !found;
6557 nextp = nextp->next)
6558 if (rtx_equal_p (nextp->loc, src))
6560 set_src = nextp->set_src;
6561 found = true;
6567 return set_src;
6570 /* Compute the changes of variable locations in the basic block BB. */
6572 static bool
6573 compute_bb_dataflow (basic_block bb)
6575 unsigned int i;
6576 micro_operation *mo;
6577 bool changed;
6578 dataflow_set old_out;
6579 dataflow_set *in = &VTI (bb)->in;
6580 dataflow_set *out = &VTI (bb)->out;
6582 dataflow_set_init (&old_out);
6583 dataflow_set_copy (&old_out, out);
6584 dataflow_set_copy (out, in);
6586 if (MAY_HAVE_DEBUG_INSNS)
6587 local_get_addr_cache = pointer_map_create ();
6589 FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo)
6591 rtx insn = mo->insn;
6593 switch (mo->type)
6595 case MO_CALL:
6596 dataflow_set_clear_at_call (out);
6597 break;
6599 case MO_USE:
6601 rtx loc = mo->u.loc;
6603 if (REG_P (loc))
6604 var_reg_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6605 else if (MEM_P (loc))
6606 var_mem_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6608 break;
6610 case MO_VAL_LOC:
6612 rtx loc = mo->u.loc;
6613 rtx val, vloc;
6614 tree var;
6616 if (GET_CODE (loc) == CONCAT)
6618 val = XEXP (loc, 0);
6619 vloc = XEXP (loc, 1);
6621 else
6623 val = NULL_RTX;
6624 vloc = loc;
6627 var = PAT_VAR_LOCATION_DECL (vloc);
6629 clobber_variable_part (out, NULL_RTX,
6630 dv_from_decl (var), 0, NULL_RTX);
6631 if (val)
6633 if (VAL_NEEDS_RESOLUTION (loc))
6634 val_resolve (out, val, PAT_VAR_LOCATION_LOC (vloc), insn);
6635 set_variable_part (out, val, dv_from_decl (var), 0,
6636 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6637 INSERT);
6639 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
6640 set_variable_part (out, PAT_VAR_LOCATION_LOC (vloc),
6641 dv_from_decl (var), 0,
6642 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6643 INSERT);
6645 break;
6647 case MO_VAL_USE:
6649 rtx loc = mo->u.loc;
6650 rtx val, vloc, uloc;
6652 vloc = uloc = XEXP (loc, 1);
6653 val = XEXP (loc, 0);
6655 if (GET_CODE (val) == CONCAT)
6657 uloc = XEXP (val, 1);
6658 val = XEXP (val, 0);
6661 if (VAL_NEEDS_RESOLUTION (loc))
6662 val_resolve (out, val, vloc, insn);
6663 else
6664 val_store (out, val, uloc, insn, false);
6666 if (VAL_HOLDS_TRACK_EXPR (loc))
6668 if (GET_CODE (uloc) == REG)
6669 var_reg_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6670 NULL);
6671 else if (GET_CODE (uloc) == MEM)
6672 var_mem_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6673 NULL);
6676 break;
6678 case MO_VAL_SET:
6680 rtx loc = mo->u.loc;
6681 rtx val, vloc, uloc;
6682 rtx dstv, srcv;
6684 vloc = loc;
6685 uloc = XEXP (vloc, 1);
6686 val = XEXP (vloc, 0);
6687 vloc = uloc;
6689 if (GET_CODE (uloc) == SET)
6691 dstv = SET_DEST (uloc);
6692 srcv = SET_SRC (uloc);
6694 else
6696 dstv = uloc;
6697 srcv = NULL;
6700 if (GET_CODE (val) == CONCAT)
6702 dstv = vloc = XEXP (val, 1);
6703 val = XEXP (val, 0);
6706 if (GET_CODE (vloc) == SET)
6708 srcv = SET_SRC (vloc);
6710 gcc_assert (val != srcv);
6711 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
6713 dstv = vloc = SET_DEST (vloc);
6715 if (VAL_NEEDS_RESOLUTION (loc))
6716 val_resolve (out, val, srcv, insn);
6718 else if (VAL_NEEDS_RESOLUTION (loc))
6720 gcc_assert (GET_CODE (uloc) == SET
6721 && GET_CODE (SET_SRC (uloc)) == REG);
6722 val_resolve (out, val, SET_SRC (uloc), insn);
6725 if (VAL_HOLDS_TRACK_EXPR (loc))
6727 if (VAL_EXPR_IS_CLOBBERED (loc))
6729 if (REG_P (uloc))
6730 var_reg_delete (out, uloc, true);
6731 else if (MEM_P (uloc))
6733 gcc_assert (MEM_P (dstv));
6734 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
6735 var_mem_delete (out, dstv, true);
6738 else
6740 bool copied_p = VAL_EXPR_IS_COPIED (loc);
6741 rtx src = NULL, dst = uloc;
6742 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
6744 if (GET_CODE (uloc) == SET)
6746 src = SET_SRC (uloc);
6747 dst = SET_DEST (uloc);
6750 if (copied_p)
6752 if (flag_var_tracking_uninit)
6754 status = find_src_status (in, src);
6756 if (status == VAR_INIT_STATUS_UNKNOWN)
6757 status = find_src_status (out, src);
6760 src = find_src_set_src (in, src);
6763 if (REG_P (dst))
6764 var_reg_delete_and_set (out, dst, !copied_p,
6765 status, srcv);
6766 else if (MEM_P (dst))
6768 gcc_assert (MEM_P (dstv));
6769 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
6770 var_mem_delete_and_set (out, dstv, !copied_p,
6771 status, srcv);
6775 else if (REG_P (uloc))
6776 var_regno_delete (out, REGNO (uloc));
6777 else if (MEM_P (uloc))
6779 gcc_checking_assert (GET_CODE (vloc) == MEM);
6780 gcc_checking_assert (dstv == vloc);
6781 if (dstv != vloc)
6782 clobber_overlapping_mems (out, vloc);
6785 val_store (out, val, dstv, insn, true);
6787 break;
6789 case MO_SET:
6791 rtx loc = mo->u.loc;
6792 rtx set_src = NULL;
6794 if (GET_CODE (loc) == SET)
6796 set_src = SET_SRC (loc);
6797 loc = SET_DEST (loc);
6800 if (REG_P (loc))
6801 var_reg_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6802 set_src);
6803 else if (MEM_P (loc))
6804 var_mem_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6805 set_src);
6807 break;
6809 case MO_COPY:
6811 rtx loc = mo->u.loc;
6812 enum var_init_status src_status;
6813 rtx set_src = NULL;
6815 if (GET_CODE (loc) == SET)
6817 set_src = SET_SRC (loc);
6818 loc = SET_DEST (loc);
6821 if (! flag_var_tracking_uninit)
6822 src_status = VAR_INIT_STATUS_INITIALIZED;
6823 else
6825 src_status = find_src_status (in, set_src);
6827 if (src_status == VAR_INIT_STATUS_UNKNOWN)
6828 src_status = find_src_status (out, set_src);
6831 set_src = find_src_set_src (in, set_src);
6833 if (REG_P (loc))
6834 var_reg_delete_and_set (out, loc, false, src_status, set_src);
6835 else if (MEM_P (loc))
6836 var_mem_delete_and_set (out, loc, false, src_status, set_src);
6838 break;
6840 case MO_USE_NO_VAR:
6842 rtx loc = mo->u.loc;
6844 if (REG_P (loc))
6845 var_reg_delete (out, loc, false);
6846 else if (MEM_P (loc))
6847 var_mem_delete (out, loc, false);
6849 break;
6851 case MO_CLOBBER:
6853 rtx loc = mo->u.loc;
6855 if (REG_P (loc))
6856 var_reg_delete (out, loc, true);
6857 else if (MEM_P (loc))
6858 var_mem_delete (out, loc, true);
6860 break;
6862 case MO_ADJUST:
6863 out->stack_adjust += mo->u.adjust;
6864 break;
6868 if (MAY_HAVE_DEBUG_INSNS)
6870 pointer_map_destroy (local_get_addr_cache);
6871 local_get_addr_cache = NULL;
6873 dataflow_set_equiv_regs (out);
6874 shared_hash_htab (out->vars)
6875 .traverse <dataflow_set *, canonicalize_values_mark> (out);
6876 shared_hash_htab (out->vars)
6877 .traverse <dataflow_set *, canonicalize_values_star> (out);
6878 #if ENABLE_CHECKING
6879 shared_hash_htab (out->vars)
6880 .traverse <dataflow_set *, canonicalize_loc_order_check> (out);
6881 #endif
6883 changed = dataflow_set_different (&old_out, out);
6884 dataflow_set_destroy (&old_out);
6885 return changed;
6888 /* Find the locations of variables in the whole function. */
6890 static bool
6891 vt_find_locations (void)
6893 fibheap_t worklist, pending, fibheap_swap;
6894 sbitmap visited, in_worklist, in_pending, sbitmap_swap;
6895 basic_block bb;
6896 edge e;
6897 int *bb_order;
6898 int *rc_order;
6899 int i;
6900 int htabsz = 0;
6901 int htabmax = PARAM_VALUE (PARAM_MAX_VARTRACK_SIZE);
6902 bool success = true;
6904 timevar_push (TV_VAR_TRACKING_DATAFLOW);
6905 /* Compute reverse completion order of depth first search of the CFG
6906 so that the data-flow runs faster. */
6907 rc_order = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
6908 bb_order = XNEWVEC (int, last_basic_block);
6909 pre_and_rev_post_order_compute (NULL, rc_order, false);
6910 for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
6911 bb_order[rc_order[i]] = i;
6912 free (rc_order);
6914 worklist = fibheap_new ();
6915 pending = fibheap_new ();
6916 visited = sbitmap_alloc (last_basic_block);
6917 in_worklist = sbitmap_alloc (last_basic_block);
6918 in_pending = sbitmap_alloc (last_basic_block);
6919 bitmap_clear (in_worklist);
6921 FOR_EACH_BB (bb)
6922 fibheap_insert (pending, bb_order[bb->index], bb);
6923 bitmap_ones (in_pending);
6925 while (success && !fibheap_empty (pending))
6927 fibheap_swap = pending;
6928 pending = worklist;
6929 worklist = fibheap_swap;
6930 sbitmap_swap = in_pending;
6931 in_pending = in_worklist;
6932 in_worklist = sbitmap_swap;
6934 bitmap_clear (visited);
6936 while (!fibheap_empty (worklist))
6938 bb = (basic_block) fibheap_extract_min (worklist);
6939 bitmap_clear_bit (in_worklist, bb->index);
6940 gcc_assert (!bitmap_bit_p (visited, bb->index));
6941 if (!bitmap_bit_p (visited, bb->index))
6943 bool changed;
6944 edge_iterator ei;
6945 int oldinsz, oldoutsz;
6947 bitmap_set_bit (visited, bb->index);
6949 if (VTI (bb)->in.vars)
6951 htabsz
6952 -= shared_hash_htab (VTI (bb)->in.vars).size ()
6953 + shared_hash_htab (VTI (bb)->out.vars).size ();
6954 oldinsz = shared_hash_htab (VTI (bb)->in.vars).elements ();
6955 oldoutsz = shared_hash_htab (VTI (bb)->out.vars).elements ();
6957 else
6958 oldinsz = oldoutsz = 0;
6960 if (MAY_HAVE_DEBUG_INSNS)
6962 dataflow_set *in = &VTI (bb)->in, *first_out = NULL;
6963 bool first = true, adjust = false;
6965 /* Calculate the IN set as the intersection of
6966 predecessor OUT sets. */
6968 dataflow_set_clear (in);
6969 dst_can_be_shared = true;
6971 FOR_EACH_EDGE (e, ei, bb->preds)
6972 if (!VTI (e->src)->flooded)
6973 gcc_assert (bb_order[bb->index]
6974 <= bb_order[e->src->index]);
6975 else if (first)
6977 dataflow_set_copy (in, &VTI (e->src)->out);
6978 first_out = &VTI (e->src)->out;
6979 first = false;
6981 else
6983 dataflow_set_merge (in, &VTI (e->src)->out);
6984 adjust = true;
6987 if (adjust)
6989 dataflow_post_merge_adjust (in, &VTI (bb)->permp);
6990 #if ENABLE_CHECKING
6991 /* Merge and merge_adjust should keep entries in
6992 canonical order. */
6993 shared_hash_htab (in->vars)
6994 .traverse <dataflow_set *,
6995 canonicalize_loc_order_check> (in);
6996 #endif
6997 if (dst_can_be_shared)
6999 shared_hash_destroy (in->vars);
7000 in->vars = shared_hash_copy (first_out->vars);
7004 VTI (bb)->flooded = true;
7006 else
7008 /* Calculate the IN set as union of predecessor OUT sets. */
7009 dataflow_set_clear (&VTI (bb)->in);
7010 FOR_EACH_EDGE (e, ei, bb->preds)
7011 dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out);
7014 changed = compute_bb_dataflow (bb);
7015 htabsz += shared_hash_htab (VTI (bb)->in.vars).size ()
7016 + shared_hash_htab (VTI (bb)->out.vars).size ();
7018 if (htabmax && htabsz > htabmax)
7020 if (MAY_HAVE_DEBUG_INSNS)
7021 inform (DECL_SOURCE_LOCATION (cfun->decl),
7022 "variable tracking size limit exceeded with "
7023 "-fvar-tracking-assignments, retrying without");
7024 else
7025 inform (DECL_SOURCE_LOCATION (cfun->decl),
7026 "variable tracking size limit exceeded");
7027 success = false;
7028 break;
7031 if (changed)
7033 FOR_EACH_EDGE (e, ei, bb->succs)
7035 if (e->dest == EXIT_BLOCK_PTR)
7036 continue;
7038 if (bitmap_bit_p (visited, e->dest->index))
7040 if (!bitmap_bit_p (in_pending, e->dest->index))
7042 /* Send E->DEST to next round. */
7043 bitmap_set_bit (in_pending, e->dest->index);
7044 fibheap_insert (pending,
7045 bb_order[e->dest->index],
7046 e->dest);
7049 else if (!bitmap_bit_p (in_worklist, e->dest->index))
7051 /* Add E->DEST to current round. */
7052 bitmap_set_bit (in_worklist, e->dest->index);
7053 fibheap_insert (worklist, bb_order[e->dest->index],
7054 e->dest);
7059 if (dump_file)
7060 fprintf (dump_file,
7061 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
7062 bb->index,
7063 (int)shared_hash_htab (VTI (bb)->in.vars).size (),
7064 oldinsz,
7065 (int)shared_hash_htab (VTI (bb)->out.vars).size (),
7066 oldoutsz,
7067 (int)worklist->nodes, (int)pending->nodes, htabsz);
7069 if (dump_file && (dump_flags & TDF_DETAILS))
7071 fprintf (dump_file, "BB %i IN:\n", bb->index);
7072 dump_dataflow_set (&VTI (bb)->in);
7073 fprintf (dump_file, "BB %i OUT:\n", bb->index);
7074 dump_dataflow_set (&VTI (bb)->out);
7080 if (success && MAY_HAVE_DEBUG_INSNS)
7081 FOR_EACH_BB (bb)
7082 gcc_assert (VTI (bb)->flooded);
7084 free (bb_order);
7085 fibheap_delete (worklist);
7086 fibheap_delete (pending);
7087 sbitmap_free (visited);
7088 sbitmap_free (in_worklist);
7089 sbitmap_free (in_pending);
7091 timevar_pop (TV_VAR_TRACKING_DATAFLOW);
7092 return success;
7095 /* Print the content of the LIST to dump file. */
7097 static void
7098 dump_attrs_list (attrs list)
7100 for (; list; list = list->next)
7102 if (dv_is_decl_p (list->dv))
7103 print_mem_expr (dump_file, dv_as_decl (list->dv));
7104 else
7105 print_rtl_single (dump_file, dv_as_value (list->dv));
7106 fprintf (dump_file, "+" HOST_WIDE_INT_PRINT_DEC, list->offset);
7108 fprintf (dump_file, "\n");
7111 /* Print the information about variable *SLOT to dump file. */
7114 dump_var_tracking_slot (variable_def **slot, void *data ATTRIBUTE_UNUSED)
7116 variable var = *slot;
7118 dump_var (var);
7120 /* Continue traversing the hash table. */
7121 return 1;
7124 /* Print the information about variable VAR to dump file. */
7126 static void
7127 dump_var (variable var)
7129 int i;
7130 location_chain node;
7132 if (dv_is_decl_p (var->dv))
7134 const_tree decl = dv_as_decl (var->dv);
7136 if (DECL_NAME (decl))
7138 fprintf (dump_file, " name: %s",
7139 IDENTIFIER_POINTER (DECL_NAME (decl)));
7140 if (dump_flags & TDF_UID)
7141 fprintf (dump_file, "D.%u", DECL_UID (decl));
7143 else if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
7144 fprintf (dump_file, " name: D#%u", DEBUG_TEMP_UID (decl));
7145 else
7146 fprintf (dump_file, " name: D.%u", DECL_UID (decl));
7147 fprintf (dump_file, "\n");
7149 else
7151 fputc (' ', dump_file);
7152 print_rtl_single (dump_file, dv_as_value (var->dv));
7155 for (i = 0; i < var->n_var_parts; i++)
7157 fprintf (dump_file, " offset %ld\n",
7158 (long)(var->onepart ? 0 : VAR_PART_OFFSET (var, i)));
7159 for (node = var->var_part[i].loc_chain; node; node = node->next)
7161 fprintf (dump_file, " ");
7162 if (node->init == VAR_INIT_STATUS_UNINITIALIZED)
7163 fprintf (dump_file, "[uninit]");
7164 print_rtl_single (dump_file, node->loc);
7169 /* Print the information about variables from hash table VARS to dump file. */
7171 static void
7172 dump_vars (variable_table_type vars)
7174 if (vars.elements () > 0)
7176 fprintf (dump_file, "Variables:\n");
7177 vars.traverse <void *, dump_var_tracking_slot> (NULL);
7181 /* Print the dataflow set SET to dump file. */
7183 static void
7184 dump_dataflow_set (dataflow_set *set)
7186 int i;
7188 fprintf (dump_file, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC "\n",
7189 set->stack_adjust);
7190 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7192 if (set->regs[i])
7194 fprintf (dump_file, "Reg %d:", i);
7195 dump_attrs_list (set->regs[i]);
7198 dump_vars (shared_hash_htab (set->vars));
7199 fprintf (dump_file, "\n");
7202 /* Print the IN and OUT sets for each basic block to dump file. */
7204 static void
7205 dump_dataflow_sets (void)
7207 basic_block bb;
7209 FOR_EACH_BB (bb)
7211 fprintf (dump_file, "\nBasic block %d:\n", bb->index);
7212 fprintf (dump_file, "IN:\n");
7213 dump_dataflow_set (&VTI (bb)->in);
7214 fprintf (dump_file, "OUT:\n");
7215 dump_dataflow_set (&VTI (bb)->out);
7219 /* Return the variable for DV in dropped_values, inserting one if
7220 requested with INSERT. */
7222 static inline variable
7223 variable_from_dropped (decl_or_value dv, enum insert_option insert)
7225 variable_def **slot;
7226 variable empty_var;
7227 onepart_enum_t onepart;
7229 slot = dropped_values.find_slot_with_hash (dv, dv_htab_hash (dv), insert);
7231 if (!slot)
7232 return NULL;
7234 if (*slot)
7235 return *slot;
7237 gcc_checking_assert (insert == INSERT);
7239 onepart = dv_onepart_p (dv);
7241 gcc_checking_assert (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR);
7243 empty_var = (variable) pool_alloc (onepart_pool (onepart));
7244 empty_var->dv = dv;
7245 empty_var->refcount = 1;
7246 empty_var->n_var_parts = 0;
7247 empty_var->onepart = onepart;
7248 empty_var->in_changed_variables = false;
7249 empty_var->var_part[0].loc_chain = NULL;
7250 empty_var->var_part[0].cur_loc = NULL;
7251 VAR_LOC_1PAUX (empty_var) = NULL;
7252 set_dv_changed (dv, true);
7254 *slot = empty_var;
7256 return empty_var;
7259 /* Recover the one-part aux from dropped_values. */
7261 static struct onepart_aux *
7262 recover_dropped_1paux (variable var)
7264 variable dvar;
7266 gcc_checking_assert (var->onepart);
7268 if (VAR_LOC_1PAUX (var))
7269 return VAR_LOC_1PAUX (var);
7271 if (var->onepart == ONEPART_VDECL)
7272 return NULL;
7274 dvar = variable_from_dropped (var->dv, NO_INSERT);
7276 if (!dvar)
7277 return NULL;
7279 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (dvar);
7280 VAR_LOC_1PAUX (dvar) = NULL;
7282 return VAR_LOC_1PAUX (var);
7285 /* Add variable VAR to the hash table of changed variables and
7286 if it has no locations delete it from SET's hash table. */
7288 static void
7289 variable_was_changed (variable var, dataflow_set *set)
7291 hashval_t hash = dv_htab_hash (var->dv);
7293 if (emit_notes)
7295 variable_def **slot;
7297 /* Remember this decl or VALUE has been added to changed_variables. */
7298 set_dv_changed (var->dv, true);
7300 slot = changed_variables.find_slot_with_hash (var->dv, hash, INSERT);
7302 if (*slot)
7304 variable old_var = *slot;
7305 gcc_assert (old_var->in_changed_variables);
7306 old_var->in_changed_variables = false;
7307 if (var != old_var && var->onepart)
7309 /* Restore the auxiliary info from an empty variable
7310 previously created for changed_variables, so it is
7311 not lost. */
7312 gcc_checking_assert (!VAR_LOC_1PAUX (var));
7313 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (old_var);
7314 VAR_LOC_1PAUX (old_var) = NULL;
7316 variable_htab_free (*slot);
7319 if (set && var->n_var_parts == 0)
7321 onepart_enum_t onepart = var->onepart;
7322 variable empty_var = NULL;
7323 variable_def **dslot = NULL;
7325 if (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR)
7327 dslot = dropped_values.find_slot_with_hash (var->dv,
7328 dv_htab_hash (var->dv),
7329 INSERT);
7330 empty_var = *dslot;
7332 if (empty_var)
7334 gcc_checking_assert (!empty_var->in_changed_variables);
7335 if (!VAR_LOC_1PAUX (var))
7337 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (empty_var);
7338 VAR_LOC_1PAUX (empty_var) = NULL;
7340 else
7341 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
7345 if (!empty_var)
7347 empty_var = (variable) pool_alloc (onepart_pool (onepart));
7348 empty_var->dv = var->dv;
7349 empty_var->refcount = 1;
7350 empty_var->n_var_parts = 0;
7351 empty_var->onepart = onepart;
7352 if (dslot)
7354 empty_var->refcount++;
7355 *dslot = empty_var;
7358 else
7359 empty_var->refcount++;
7360 empty_var->in_changed_variables = true;
7361 *slot = empty_var;
7362 if (onepart)
7364 empty_var->var_part[0].loc_chain = NULL;
7365 empty_var->var_part[0].cur_loc = NULL;
7366 VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (var);
7367 VAR_LOC_1PAUX (var) = NULL;
7369 goto drop_var;
7371 else
7373 if (var->onepart && !VAR_LOC_1PAUX (var))
7374 recover_dropped_1paux (var);
7375 var->refcount++;
7376 var->in_changed_variables = true;
7377 *slot = var;
7380 else
7382 gcc_assert (set);
7383 if (var->n_var_parts == 0)
7385 variable_def **slot;
7387 drop_var:
7388 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
7389 if (slot)
7391 if (shared_hash_shared (set->vars))
7392 slot = shared_hash_find_slot_unshare (&set->vars, var->dv,
7393 NO_INSERT);
7394 shared_hash_htab (set->vars).clear_slot (slot);
7400 /* Look for the index in VAR->var_part corresponding to OFFSET.
7401 Return -1 if not found. If INSERTION_POINT is non-NULL, the
7402 referenced int will be set to the index that the part has or should
7403 have, if it should be inserted. */
7405 static inline int
7406 find_variable_location_part (variable var, HOST_WIDE_INT offset,
7407 int *insertion_point)
7409 int pos, low, high;
7411 if (var->onepart)
7413 if (offset != 0)
7414 return -1;
7416 if (insertion_point)
7417 *insertion_point = 0;
7419 return var->n_var_parts - 1;
7422 /* Find the location part. */
7423 low = 0;
7424 high = var->n_var_parts;
7425 while (low != high)
7427 pos = (low + high) / 2;
7428 if (VAR_PART_OFFSET (var, pos) < offset)
7429 low = pos + 1;
7430 else
7431 high = pos;
7433 pos = low;
7435 if (insertion_point)
7436 *insertion_point = pos;
7438 if (pos < var->n_var_parts && VAR_PART_OFFSET (var, pos) == offset)
7439 return pos;
7441 return -1;
7444 static variable_def **
7445 set_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
7446 decl_or_value dv, HOST_WIDE_INT offset,
7447 enum var_init_status initialized, rtx set_src)
7449 int pos;
7450 location_chain node, next;
7451 location_chain *nextp;
7452 variable var;
7453 onepart_enum_t onepart;
7455 var = *slot;
7457 if (var)
7458 onepart = var->onepart;
7459 else
7460 onepart = dv_onepart_p (dv);
7462 gcc_checking_assert (offset == 0 || !onepart);
7463 gcc_checking_assert (loc != dv_as_opaque (dv));
7465 if (! flag_var_tracking_uninit)
7466 initialized = VAR_INIT_STATUS_INITIALIZED;
7468 if (!var)
7470 /* Create new variable information. */
7471 var = (variable) pool_alloc (onepart_pool (onepart));
7472 var->dv = dv;
7473 var->refcount = 1;
7474 var->n_var_parts = 1;
7475 var->onepart = onepart;
7476 var->in_changed_variables = false;
7477 if (var->onepart)
7478 VAR_LOC_1PAUX (var) = NULL;
7479 else
7480 VAR_PART_OFFSET (var, 0) = offset;
7481 var->var_part[0].loc_chain = NULL;
7482 var->var_part[0].cur_loc = NULL;
7483 *slot = var;
7484 pos = 0;
7485 nextp = &var->var_part[0].loc_chain;
7487 else if (onepart)
7489 int r = -1, c = 0;
7491 gcc_assert (dv_as_opaque (var->dv) == dv_as_opaque (dv));
7493 pos = 0;
7495 if (GET_CODE (loc) == VALUE)
7497 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7498 nextp = &node->next)
7499 if (GET_CODE (node->loc) == VALUE)
7501 if (node->loc == loc)
7503 r = 0;
7504 break;
7506 if (canon_value_cmp (node->loc, loc))
7507 c++;
7508 else
7510 r = 1;
7511 break;
7514 else if (REG_P (node->loc) || MEM_P (node->loc))
7515 c++;
7516 else
7518 r = 1;
7519 break;
7522 else if (REG_P (loc))
7524 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7525 nextp = &node->next)
7526 if (REG_P (node->loc))
7528 if (REGNO (node->loc) < REGNO (loc))
7529 c++;
7530 else
7532 if (REGNO (node->loc) == REGNO (loc))
7533 r = 0;
7534 else
7535 r = 1;
7536 break;
7539 else
7541 r = 1;
7542 break;
7545 else if (MEM_P (loc))
7547 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7548 nextp = &node->next)
7549 if (REG_P (node->loc))
7550 c++;
7551 else if (MEM_P (node->loc))
7553 if ((r = loc_cmp (XEXP (node->loc, 0), XEXP (loc, 0))) >= 0)
7554 break;
7555 else
7556 c++;
7558 else
7560 r = 1;
7561 break;
7564 else
7565 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7566 nextp = &node->next)
7567 if ((r = loc_cmp (node->loc, loc)) >= 0)
7568 break;
7569 else
7570 c++;
7572 if (r == 0)
7573 return slot;
7575 if (shared_var_p (var, set->vars))
7577 slot = unshare_variable (set, slot, var, initialized);
7578 var = *slot;
7579 for (nextp = &var->var_part[0].loc_chain; c;
7580 nextp = &(*nextp)->next)
7581 c--;
7582 gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc);
7585 else
7587 int inspos = 0;
7589 gcc_assert (dv_as_decl (var->dv) == dv_as_decl (dv));
7591 pos = find_variable_location_part (var, offset, &inspos);
7593 if (pos >= 0)
7595 node = var->var_part[pos].loc_chain;
7597 if (node
7598 && ((REG_P (node->loc) && REG_P (loc)
7599 && REGNO (node->loc) == REGNO (loc))
7600 || rtx_equal_p (node->loc, loc)))
7602 /* LOC is in the beginning of the chain so we have nothing
7603 to do. */
7604 if (node->init < initialized)
7605 node->init = initialized;
7606 if (set_src != NULL)
7607 node->set_src = set_src;
7609 return slot;
7611 else
7613 /* We have to make a copy of a shared variable. */
7614 if (shared_var_p (var, set->vars))
7616 slot = unshare_variable (set, slot, var, initialized);
7617 var = *slot;
7621 else
7623 /* We have not found the location part, new one will be created. */
7625 /* We have to make a copy of the shared variable. */
7626 if (shared_var_p (var, set->vars))
7628 slot = unshare_variable (set, slot, var, initialized);
7629 var = *slot;
7632 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
7633 thus there are at most MAX_VAR_PARTS different offsets. */
7634 gcc_assert (var->n_var_parts < MAX_VAR_PARTS
7635 && (!var->n_var_parts || !onepart));
7637 /* We have to move the elements of array starting at index
7638 inspos to the next position. */
7639 for (pos = var->n_var_parts; pos > inspos; pos--)
7640 var->var_part[pos] = var->var_part[pos - 1];
7642 var->n_var_parts++;
7643 gcc_checking_assert (!onepart);
7644 VAR_PART_OFFSET (var, pos) = offset;
7645 var->var_part[pos].loc_chain = NULL;
7646 var->var_part[pos].cur_loc = NULL;
7649 /* Delete the location from the list. */
7650 nextp = &var->var_part[pos].loc_chain;
7651 for (node = var->var_part[pos].loc_chain; node; node = next)
7653 next = node->next;
7654 if ((REG_P (node->loc) && REG_P (loc)
7655 && REGNO (node->loc) == REGNO (loc))
7656 || rtx_equal_p (node->loc, loc))
7658 /* Save these values, to assign to the new node, before
7659 deleting this one. */
7660 if (node->init > initialized)
7661 initialized = node->init;
7662 if (node->set_src != NULL && set_src == NULL)
7663 set_src = node->set_src;
7664 if (var->var_part[pos].cur_loc == node->loc)
7665 var->var_part[pos].cur_loc = NULL;
7666 pool_free (loc_chain_pool, node);
7667 *nextp = next;
7668 break;
7670 else
7671 nextp = &node->next;
7674 nextp = &var->var_part[pos].loc_chain;
7677 /* Add the location to the beginning. */
7678 node = (location_chain) pool_alloc (loc_chain_pool);
7679 node->loc = loc;
7680 node->init = initialized;
7681 node->set_src = set_src;
7682 node->next = *nextp;
7683 *nextp = node;
7685 /* If no location was emitted do so. */
7686 if (var->var_part[pos].cur_loc == NULL)
7687 variable_was_changed (var, set);
7689 return slot;
7692 /* Set the part of variable's location in the dataflow set SET. The
7693 variable part is specified by variable's declaration in DV and
7694 offset OFFSET and the part's location by LOC. IOPT should be
7695 NO_INSERT if the variable is known to be in SET already and the
7696 variable hash table must not be resized, and INSERT otherwise. */
7698 static void
7699 set_variable_part (dataflow_set *set, rtx loc,
7700 decl_or_value dv, HOST_WIDE_INT offset,
7701 enum var_init_status initialized, rtx set_src,
7702 enum insert_option iopt)
7704 variable_def **slot;
7706 if (iopt == NO_INSERT)
7707 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7708 else
7710 slot = shared_hash_find_slot (set->vars, dv);
7711 if (!slot)
7712 slot = shared_hash_find_slot_unshare (&set->vars, dv, iopt);
7714 set_slot_part (set, loc, slot, dv, offset, initialized, set_src);
7717 /* Remove all recorded register locations for the given variable part
7718 from dataflow set SET, except for those that are identical to loc.
7719 The variable part is specified by variable's declaration or value
7720 DV and offset OFFSET. */
7722 static variable_def **
7723 clobber_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
7724 HOST_WIDE_INT offset, rtx set_src)
7726 variable var = *slot;
7727 int pos = find_variable_location_part (var, offset, NULL);
7729 if (pos >= 0)
7731 location_chain node, next;
7733 /* Remove the register locations from the dataflow set. */
7734 next = var->var_part[pos].loc_chain;
7735 for (node = next; node; node = next)
7737 next = node->next;
7738 if (node->loc != loc
7739 && (!flag_var_tracking_uninit
7740 || !set_src
7741 || MEM_P (set_src)
7742 || !rtx_equal_p (set_src, node->set_src)))
7744 if (REG_P (node->loc))
7746 attrs anode, anext;
7747 attrs *anextp;
7749 /* Remove the variable part from the register's
7750 list, but preserve any other variable parts
7751 that might be regarded as live in that same
7752 register. */
7753 anextp = &set->regs[REGNO (node->loc)];
7754 for (anode = *anextp; anode; anode = anext)
7756 anext = anode->next;
7757 if (dv_as_opaque (anode->dv) == dv_as_opaque (var->dv)
7758 && anode->offset == offset)
7760 pool_free (attrs_pool, anode);
7761 *anextp = anext;
7763 else
7764 anextp = &anode->next;
7768 slot = delete_slot_part (set, node->loc, slot, offset);
7773 return slot;
7776 /* Remove all recorded register locations for the given variable part
7777 from dataflow set SET, except for those that are identical to loc.
7778 The variable part is specified by variable's declaration or value
7779 DV and offset OFFSET. */
7781 static void
7782 clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
7783 HOST_WIDE_INT offset, rtx set_src)
7785 variable_def **slot;
7787 if (!dv_as_opaque (dv)
7788 || (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv))))
7789 return;
7791 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7792 if (!slot)
7793 return;
7795 clobber_slot_part (set, loc, slot, offset, set_src);
7798 /* Delete the part of variable's location from dataflow set SET. The
7799 variable part is specified by its SET->vars slot SLOT and offset
7800 OFFSET and the part's location by LOC. */
7802 static variable_def **
7803 delete_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
7804 HOST_WIDE_INT offset)
7806 variable var = *slot;
7807 int pos = find_variable_location_part (var, offset, NULL);
7809 if (pos >= 0)
7811 location_chain node, next;
7812 location_chain *nextp;
7813 bool changed;
7814 rtx cur_loc;
7816 if (shared_var_p (var, set->vars))
7818 /* If the variable contains the location part we have to
7819 make a copy of the variable. */
7820 for (node = var->var_part[pos].loc_chain; node;
7821 node = node->next)
7823 if ((REG_P (node->loc) && REG_P (loc)
7824 && REGNO (node->loc) == REGNO (loc))
7825 || rtx_equal_p (node->loc, loc))
7827 slot = unshare_variable (set, slot, var,
7828 VAR_INIT_STATUS_UNKNOWN);
7829 var = *slot;
7830 break;
7835 if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
7836 cur_loc = VAR_LOC_FROM (var);
7837 else
7838 cur_loc = var->var_part[pos].cur_loc;
7840 /* Delete the location part. */
7841 changed = false;
7842 nextp = &var->var_part[pos].loc_chain;
7843 for (node = *nextp; node; node = next)
7845 next = node->next;
7846 if ((REG_P (node->loc) && REG_P (loc)
7847 && REGNO (node->loc) == REGNO (loc))
7848 || rtx_equal_p (node->loc, loc))
7850 /* If we have deleted the location which was last emitted
7851 we have to emit new location so add the variable to set
7852 of changed variables. */
7853 if (cur_loc == node->loc)
7855 changed = true;
7856 var->var_part[pos].cur_loc = NULL;
7857 if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
7858 VAR_LOC_FROM (var) = NULL;
7860 pool_free (loc_chain_pool, node);
7861 *nextp = next;
7862 break;
7864 else
7865 nextp = &node->next;
7868 if (var->var_part[pos].loc_chain == NULL)
7870 changed = true;
7871 var->n_var_parts--;
7872 while (pos < var->n_var_parts)
7874 var->var_part[pos] = var->var_part[pos + 1];
7875 pos++;
7878 if (changed)
7879 variable_was_changed (var, set);
7882 return slot;
7885 /* Delete the part of variable's location from dataflow set SET. The
7886 variable part is specified by variable's declaration or value DV
7887 and offset OFFSET and the part's location by LOC. */
7889 static void
7890 delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
7891 HOST_WIDE_INT offset)
7893 variable_def **slot = shared_hash_find_slot_noinsert (set->vars, dv);
7894 if (!slot)
7895 return;
7897 delete_slot_part (set, loc, slot, offset);
7901 /* Structure for passing some other parameters to function
7902 vt_expand_loc_callback. */
7903 struct expand_loc_callback_data
7905 /* The variables and values active at this point. */
7906 variable_table_type vars;
7908 /* Stack of values and debug_exprs under expansion, and their
7909 children. */
7910 vec<rtx, va_stack> expanding;
7912 /* Stack of values and debug_exprs whose expansion hit recursion
7913 cycles. They will have VALUE_RECURSED_INTO marked when added to
7914 this list. This flag will be cleared if any of its dependencies
7915 resolves to a valid location. So, if the flag remains set at the
7916 end of the search, we know no valid location for this one can
7917 possibly exist. */
7918 vec<rtx, va_stack> pending;
7920 /* The maximum depth among the sub-expressions under expansion.
7921 Zero indicates no expansion so far. */
7922 expand_depth depth;
7925 /* Allocate the one-part auxiliary data structure for VAR, with enough
7926 room for COUNT dependencies. */
7928 static void
7929 loc_exp_dep_alloc (variable var, int count)
7931 size_t allocsize;
7933 gcc_checking_assert (var->onepart);
7935 /* We can be called with COUNT == 0 to allocate the data structure
7936 without any dependencies, e.g. for the backlinks only. However,
7937 if we are specifying a COUNT, then the dependency list must have
7938 been emptied before. It would be possible to adjust pointers or
7939 force it empty here, but this is better done at an earlier point
7940 in the algorithm, so we instead leave an assertion to catch
7941 errors. */
7942 gcc_checking_assert (!count
7943 || VAR_LOC_DEP_VEC (var) == NULL
7944 || VAR_LOC_DEP_VEC (var)->is_empty ());
7946 if (VAR_LOC_1PAUX (var) && VAR_LOC_DEP_VEC (var)->space (count))
7947 return;
7949 allocsize = offsetof (struct onepart_aux, deps)
7950 + vec<loc_exp_dep, va_heap, vl_embed>::embedded_size (count);
7952 if (VAR_LOC_1PAUX (var))
7954 VAR_LOC_1PAUX (var) = XRESIZEVAR (struct onepart_aux,
7955 VAR_LOC_1PAUX (var), allocsize);
7956 /* If the reallocation moves the onepaux structure, the
7957 back-pointer to BACKLINKS in the first list member will still
7958 point to its old location. Adjust it. */
7959 if (VAR_LOC_DEP_LST (var))
7960 VAR_LOC_DEP_LST (var)->pprev = VAR_LOC_DEP_LSTP (var);
7962 else
7964 VAR_LOC_1PAUX (var) = XNEWVAR (struct onepart_aux, allocsize);
7965 *VAR_LOC_DEP_LSTP (var) = NULL;
7966 VAR_LOC_FROM (var) = NULL;
7967 VAR_LOC_DEPTH (var).complexity = 0;
7968 VAR_LOC_DEPTH (var).entryvals = 0;
7970 VAR_LOC_DEP_VEC (var)->embedded_init (count);
7973 /* Remove all entries from the vector of active dependencies of VAR,
7974 removing them from the back-links lists too. */
7976 static void
7977 loc_exp_dep_clear (variable var)
7979 while (VAR_LOC_DEP_VEC (var) && !VAR_LOC_DEP_VEC (var)->is_empty ())
7981 loc_exp_dep *led = &VAR_LOC_DEP_VEC (var)->last ();
7982 if (led->next)
7983 led->next->pprev = led->pprev;
7984 if (led->pprev)
7985 *led->pprev = led->next;
7986 VAR_LOC_DEP_VEC (var)->pop ();
7990 /* Insert an active dependency from VAR on X to the vector of
7991 dependencies, and add the corresponding back-link to X's list of
7992 back-links in VARS. */
7994 static void
7995 loc_exp_insert_dep (variable var, rtx x, variable_table_type vars)
7997 decl_or_value dv;
7998 variable xvar;
7999 loc_exp_dep *led;
8001 dv = dv_from_rtx (x);
8003 /* ??? Build a vector of variables parallel to EXPANDING, to avoid
8004 an additional look up? */
8005 xvar = vars.find_with_hash (dv, dv_htab_hash (dv));
8007 if (!xvar)
8009 xvar = variable_from_dropped (dv, NO_INSERT);
8010 gcc_checking_assert (xvar);
8013 /* No point in adding the same backlink more than once. This may
8014 arise if say the same value appears in two complex expressions in
8015 the same loc_list, or even more than once in a single
8016 expression. */
8017 if (VAR_LOC_DEP_LST (xvar) && VAR_LOC_DEP_LST (xvar)->dv == var->dv)
8018 return;
8020 if (var->onepart == NOT_ONEPART)
8021 led = (loc_exp_dep *) pool_alloc (loc_exp_dep_pool);
8022 else
8024 loc_exp_dep empty;
8025 memset (&empty, 0, sizeof (empty));
8026 VAR_LOC_DEP_VEC (var)->quick_push (empty);
8027 led = &VAR_LOC_DEP_VEC (var)->last ();
8029 led->dv = var->dv;
8030 led->value = x;
8032 loc_exp_dep_alloc (xvar, 0);
8033 led->pprev = VAR_LOC_DEP_LSTP (xvar);
8034 led->next = *led->pprev;
8035 if (led->next)
8036 led->next->pprev = &led->next;
8037 *led->pprev = led;
8040 /* Create active dependencies of VAR on COUNT values starting at
8041 VALUE, and corresponding back-links to the entries in VARS. Return
8042 true if we found any pending-recursion results. */
8044 static bool
8045 loc_exp_dep_set (variable var, rtx result, rtx *value, int count,
8046 variable_table_type vars)
8048 bool pending_recursion = false;
8050 gcc_checking_assert (VAR_LOC_DEP_VEC (var) == NULL
8051 || VAR_LOC_DEP_VEC (var)->is_empty ());
8053 /* Set up all dependencies from last_child (as set up at the end of
8054 the loop above) to the end. */
8055 loc_exp_dep_alloc (var, count);
8057 while (count--)
8059 rtx x = *value++;
8061 if (!pending_recursion)
8062 pending_recursion = !result && VALUE_RECURSED_INTO (x);
8064 loc_exp_insert_dep (var, x, vars);
8067 return pending_recursion;
8070 /* Notify the back-links of IVAR that are pending recursion that we
8071 have found a non-NIL value for it, so they are cleared for another
8072 attempt to compute a current location. */
8074 static void
8075 notify_dependents_of_resolved_value (variable ivar, variable_table_type vars)
8077 loc_exp_dep *led, *next;
8079 for (led = VAR_LOC_DEP_LST (ivar); led; led = next)
8081 decl_or_value dv = led->dv;
8082 variable var;
8084 next = led->next;
8086 if (dv_is_value_p (dv))
8088 rtx value = dv_as_value (dv);
8090 /* If we have already resolved it, leave it alone. */
8091 if (!VALUE_RECURSED_INTO (value))
8092 continue;
8094 /* Check that VALUE_RECURSED_INTO, true from the test above,
8095 implies NO_LOC_P. */
8096 gcc_checking_assert (NO_LOC_P (value));
8098 /* We won't notify variables that are being expanded,
8099 because their dependency list is cleared before
8100 recursing. */
8101 NO_LOC_P (value) = false;
8102 VALUE_RECURSED_INTO (value) = false;
8104 gcc_checking_assert (dv_changed_p (dv));
8106 else
8108 gcc_checking_assert (dv_onepart_p (dv) != NOT_ONEPART);
8109 if (!dv_changed_p (dv))
8110 continue;
8113 var = vars.find_with_hash (dv, dv_htab_hash (dv));
8115 if (!var)
8116 var = variable_from_dropped (dv, NO_INSERT);
8118 if (var)
8119 notify_dependents_of_resolved_value (var, vars);
8121 if (next)
8122 next->pprev = led->pprev;
8123 if (led->pprev)
8124 *led->pprev = next;
8125 led->next = NULL;
8126 led->pprev = NULL;
8130 static rtx vt_expand_loc_callback (rtx x, bitmap regs,
8131 int max_depth, void *data);
8133 /* Return the combined depth, when one sub-expression evaluated to
8134 BEST_DEPTH and the previous known depth was SAVED_DEPTH. */
8136 static inline expand_depth
8137 update_depth (expand_depth saved_depth, expand_depth best_depth)
8139 /* If we didn't find anything, stick with what we had. */
8140 if (!best_depth.complexity)
8141 return saved_depth;
8143 /* If we found hadn't found anything, use the depth of the current
8144 expression. Do NOT add one extra level, we want to compute the
8145 maximum depth among sub-expressions. We'll increment it later,
8146 if appropriate. */
8147 if (!saved_depth.complexity)
8148 return best_depth;
8150 /* Combine the entryval count so that regardless of which one we
8151 return, the entryval count is accurate. */
8152 best_depth.entryvals = saved_depth.entryvals
8153 = best_depth.entryvals + saved_depth.entryvals;
8155 if (saved_depth.complexity < best_depth.complexity)
8156 return best_depth;
8157 else
8158 return saved_depth;
8161 /* Expand VAR to a location RTX, updating its cur_loc. Use REGS and
8162 DATA for cselib expand callback. If PENDRECP is given, indicate in
8163 it whether any sub-expression couldn't be fully evaluated because
8164 it is pending recursion resolution. */
8166 static inline rtx
8167 vt_expand_var_loc_chain (variable var, bitmap regs, void *data, bool *pendrecp)
8169 struct expand_loc_callback_data *elcd
8170 = (struct expand_loc_callback_data *) data;
8171 location_chain loc, next;
8172 rtx result = NULL;
8173 int first_child, result_first_child, last_child;
8174 bool pending_recursion;
8175 rtx loc_from = NULL;
8176 struct elt_loc_list *cloc = NULL;
8177 expand_depth depth = { 0, 0 }, saved_depth = elcd->depth;
8178 int wanted_entryvals, found_entryvals = 0;
8180 /* Clear all backlinks pointing at this, so that we're not notified
8181 while we're active. */
8182 loc_exp_dep_clear (var);
8184 retry:
8185 if (var->onepart == ONEPART_VALUE)
8187 cselib_val *val = CSELIB_VAL_PTR (dv_as_value (var->dv));
8189 gcc_checking_assert (cselib_preserved_value_p (val));
8191 cloc = val->locs;
8194 first_child = result_first_child = last_child
8195 = elcd->expanding.length ();
8197 wanted_entryvals = found_entryvals;
8199 /* Attempt to expand each available location in turn. */
8200 for (next = loc = var->n_var_parts ? var->var_part[0].loc_chain : NULL;
8201 loc || cloc; loc = next)
8203 result_first_child = last_child;
8205 if (!loc)
8207 loc_from = cloc->loc;
8208 next = loc;
8209 cloc = cloc->next;
8210 if (unsuitable_loc (loc_from))
8211 continue;
8213 else
8215 loc_from = loc->loc;
8216 next = loc->next;
8219 gcc_checking_assert (!unsuitable_loc (loc_from));
8221 elcd->depth.complexity = elcd->depth.entryvals = 0;
8222 result = cselib_expand_value_rtx_cb (loc_from, regs, EXPR_DEPTH,
8223 vt_expand_loc_callback, data);
8224 last_child = elcd->expanding.length ();
8226 if (result)
8228 depth = elcd->depth;
8230 gcc_checking_assert (depth.complexity
8231 || result_first_child == last_child);
8233 if (last_child - result_first_child != 1)
8235 if (!depth.complexity && GET_CODE (result) == ENTRY_VALUE)
8236 depth.entryvals++;
8237 depth.complexity++;
8240 if (depth.complexity <= EXPR_USE_DEPTH)
8242 if (depth.entryvals <= wanted_entryvals)
8243 break;
8244 else if (!found_entryvals || depth.entryvals < found_entryvals)
8245 found_entryvals = depth.entryvals;
8248 result = NULL;
8251 /* Set it up in case we leave the loop. */
8252 depth.complexity = depth.entryvals = 0;
8253 loc_from = NULL;
8254 result_first_child = first_child;
8257 if (!loc_from && wanted_entryvals < found_entryvals)
8259 /* We found entries with ENTRY_VALUEs and skipped them. Since
8260 we could not find any expansions without ENTRY_VALUEs, but we
8261 found at least one with them, go back and get an entry with
8262 the minimum number ENTRY_VALUE count that we found. We could
8263 avoid looping, but since each sub-loc is already resolved,
8264 the re-expansion should be trivial. ??? Should we record all
8265 attempted locs as dependencies, so that we retry the
8266 expansion should any of them change, in the hope it can give
8267 us a new entry without an ENTRY_VALUE? */
8268 elcd->expanding.truncate (first_child);
8269 goto retry;
8272 /* Register all encountered dependencies as active. */
8273 pending_recursion = loc_exp_dep_set
8274 (var, result, elcd->expanding.address () + result_first_child,
8275 last_child - result_first_child, elcd->vars);
8277 elcd->expanding.truncate (first_child);
8279 /* Record where the expansion came from. */
8280 gcc_checking_assert (!result || !pending_recursion);
8281 VAR_LOC_FROM (var) = loc_from;
8282 VAR_LOC_DEPTH (var) = depth;
8284 gcc_checking_assert (!depth.complexity == !result);
8286 elcd->depth = update_depth (saved_depth, depth);
8288 /* Indicate whether any of the dependencies are pending recursion
8289 resolution. */
8290 if (pendrecp)
8291 *pendrecp = pending_recursion;
8293 if (!pendrecp || !pending_recursion)
8294 var->var_part[0].cur_loc = result;
8296 return result;
8299 /* Callback for cselib_expand_value, that looks for expressions
8300 holding the value in the var-tracking hash tables. Return X for
8301 standard processing, anything else is to be used as-is. */
8303 static rtx
8304 vt_expand_loc_callback (rtx x, bitmap regs,
8305 int max_depth ATTRIBUTE_UNUSED,
8306 void *data)
8308 struct expand_loc_callback_data *elcd
8309 = (struct expand_loc_callback_data *) data;
8310 decl_or_value dv;
8311 variable var;
8312 rtx result, subreg;
8313 bool pending_recursion = false;
8314 bool from_empty = false;
8316 switch (GET_CODE (x))
8318 case SUBREG:
8319 subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs,
8320 EXPR_DEPTH,
8321 vt_expand_loc_callback, data);
8323 if (!subreg)
8324 return NULL;
8326 result = simplify_gen_subreg (GET_MODE (x), subreg,
8327 GET_MODE (SUBREG_REG (x)),
8328 SUBREG_BYTE (x));
8330 /* Invalid SUBREGs are ok in debug info. ??? We could try
8331 alternate expansions for the VALUE as well. */
8332 if (!result)
8333 result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x));
8335 return result;
8337 case DEBUG_EXPR:
8338 case VALUE:
8339 dv = dv_from_rtx (x);
8340 break;
8342 default:
8343 return x;
8346 elcd->expanding.safe_push (x);
8348 /* Check that VALUE_RECURSED_INTO implies NO_LOC_P. */
8349 gcc_checking_assert (!VALUE_RECURSED_INTO (x) || NO_LOC_P (x));
8351 if (NO_LOC_P (x))
8353 gcc_checking_assert (VALUE_RECURSED_INTO (x) || !dv_changed_p (dv));
8354 return NULL;
8357 var = elcd->vars.find_with_hash (dv, dv_htab_hash (dv));
8359 if (!var)
8361 from_empty = true;
8362 var = variable_from_dropped (dv, INSERT);
8365 gcc_checking_assert (var);
8367 if (!dv_changed_p (dv))
8369 gcc_checking_assert (!NO_LOC_P (x));
8370 gcc_checking_assert (var->var_part[0].cur_loc);
8371 gcc_checking_assert (VAR_LOC_1PAUX (var));
8372 gcc_checking_assert (VAR_LOC_1PAUX (var)->depth.complexity);
8374 elcd->depth = update_depth (elcd->depth, VAR_LOC_1PAUX (var)->depth);
8376 return var->var_part[0].cur_loc;
8379 VALUE_RECURSED_INTO (x) = true;
8380 /* This is tentative, but it makes some tests simpler. */
8381 NO_LOC_P (x) = true;
8383 gcc_checking_assert (var->n_var_parts == 1 || from_empty);
8385 result = vt_expand_var_loc_chain (var, regs, data, &pending_recursion);
8387 if (pending_recursion)
8389 gcc_checking_assert (!result);
8390 elcd->pending.safe_push (x);
8392 else
8394 NO_LOC_P (x) = !result;
8395 VALUE_RECURSED_INTO (x) = false;
8396 set_dv_changed (dv, false);
8398 if (result)
8399 notify_dependents_of_resolved_value (var, elcd->vars);
8402 return result;
8405 /* While expanding variables, we may encounter recursion cycles
8406 because of mutual (possibly indirect) dependencies between two
8407 particular variables (or values), say A and B. If we're trying to
8408 expand A when we get to B, which in turn attempts to expand A, if
8409 we can't find any other expansion for B, we'll add B to this
8410 pending-recursion stack, and tentatively return NULL for its
8411 location. This tentative value will be used for any other
8412 occurrences of B, unless A gets some other location, in which case
8413 it will notify B that it is worth another try at computing a
8414 location for it, and it will use the location computed for A then.
8415 At the end of the expansion, the tentative NULL locations become
8416 final for all members of PENDING that didn't get a notification.
8417 This function performs this finalization of NULL locations. */
8419 static void
8420 resolve_expansions_pending_recursion (vec<rtx, va_stack> pending)
8422 while (!pending.is_empty ())
8424 rtx x = pending.pop ();
8425 decl_or_value dv;
8427 if (!VALUE_RECURSED_INTO (x))
8428 continue;
8430 gcc_checking_assert (NO_LOC_P (x));
8431 VALUE_RECURSED_INTO (x) = false;
8432 dv = dv_from_rtx (x);
8433 gcc_checking_assert (dv_changed_p (dv));
8434 set_dv_changed (dv, false);
8438 /* Initialize expand_loc_callback_data D with variable hash table V.
8439 It must be a macro because of alloca (vec stack). */
8440 #define INIT_ELCD(d, v) \
8441 do \
8443 (d).vars = (v); \
8444 vec_stack_alloc (rtx, (d).expanding, 4); \
8445 vec_stack_alloc (rtx, (d).pending, 4); \
8446 (d).depth.complexity = (d).depth.entryvals = 0; \
8448 while (0)
8449 /* Finalize expand_loc_callback_data D, resolved to location L. */
8450 #define FINI_ELCD(d, l) \
8451 do \
8453 resolve_expansions_pending_recursion ((d).pending); \
8454 (d).pending.release (); \
8455 (d).expanding.release (); \
8457 if ((l) && MEM_P (l)) \
8458 (l) = targetm.delegitimize_address (l); \
8460 while (0)
8462 /* Expand VALUEs and DEBUG_EXPRs in LOC to a location, using the
8463 equivalences in VARS, updating their CUR_LOCs in the process. */
8465 static rtx
8466 vt_expand_loc (rtx loc, variable_table_type vars)
8468 struct expand_loc_callback_data data;
8469 rtx result;
8471 if (!MAY_HAVE_DEBUG_INSNS)
8472 return loc;
8474 INIT_ELCD (data, vars);
8476 result = cselib_expand_value_rtx_cb (loc, scratch_regs, EXPR_DEPTH,
8477 vt_expand_loc_callback, &data);
8479 FINI_ELCD (data, result);
8481 return result;
8484 /* Expand the one-part VARiable to a location, using the equivalences
8485 in VARS, updating their CUR_LOCs in the process. */
8487 static rtx
8488 vt_expand_1pvar (variable var, variable_table_type vars)
8490 struct expand_loc_callback_data data;
8491 rtx loc;
8493 gcc_checking_assert (var->onepart && var->n_var_parts == 1);
8495 if (!dv_changed_p (var->dv))
8496 return var->var_part[0].cur_loc;
8498 INIT_ELCD (data, vars);
8500 loc = vt_expand_var_loc_chain (var, scratch_regs, &data, NULL);
8502 gcc_checking_assert (data.expanding.is_empty ());
8504 FINI_ELCD (data, loc);
8506 return loc;
8509 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
8510 additional parameters: WHERE specifies whether the note shall be emitted
8511 before or after instruction INSN. */
8514 emit_note_insn_var_location (variable_def **varp, emit_note_data *data)
8516 variable var = *varp;
8517 rtx insn = data->insn;
8518 enum emit_note_where where = data->where;
8519 variable_table_type vars = data->vars;
8520 rtx note, note_vl;
8521 int i, j, n_var_parts;
8522 bool complete;
8523 enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED;
8524 HOST_WIDE_INT last_limit;
8525 tree type_size_unit;
8526 HOST_WIDE_INT offsets[MAX_VAR_PARTS];
8527 rtx loc[MAX_VAR_PARTS];
8528 tree decl;
8529 location_chain lc;
8531 gcc_checking_assert (var->onepart == NOT_ONEPART
8532 || var->onepart == ONEPART_VDECL);
8534 decl = dv_as_decl (var->dv);
8536 complete = true;
8537 last_limit = 0;
8538 n_var_parts = 0;
8539 if (!var->onepart)
8540 for (i = 0; i < var->n_var_parts; i++)
8541 if (var->var_part[i].cur_loc == NULL && var->var_part[i].loc_chain)
8542 var->var_part[i].cur_loc = var->var_part[i].loc_chain->loc;
8543 for (i = 0; i < var->n_var_parts; i++)
8545 enum machine_mode mode, wider_mode;
8546 rtx loc2;
8547 HOST_WIDE_INT offset;
8549 if (i == 0 && var->onepart)
8551 gcc_checking_assert (var->n_var_parts == 1);
8552 offset = 0;
8553 initialized = VAR_INIT_STATUS_INITIALIZED;
8554 loc2 = vt_expand_1pvar (var, vars);
8556 else
8558 if (last_limit < VAR_PART_OFFSET (var, i))
8560 complete = false;
8561 break;
8563 else if (last_limit > VAR_PART_OFFSET (var, i))
8564 continue;
8565 offset = VAR_PART_OFFSET (var, i);
8566 loc2 = var->var_part[i].cur_loc;
8567 if (loc2 && GET_CODE (loc2) == MEM
8568 && GET_CODE (XEXP (loc2, 0)) == VALUE)
8570 rtx depval = XEXP (loc2, 0);
8572 loc2 = vt_expand_loc (loc2, vars);
8574 if (loc2)
8575 loc_exp_insert_dep (var, depval, vars);
8577 if (!loc2)
8579 complete = false;
8580 continue;
8582 gcc_checking_assert (GET_CODE (loc2) != VALUE);
8583 for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
8584 if (var->var_part[i].cur_loc == lc->loc)
8586 initialized = lc->init;
8587 break;
8589 gcc_assert (lc);
8592 offsets[n_var_parts] = offset;
8593 if (!loc2)
8595 complete = false;
8596 continue;
8598 loc[n_var_parts] = loc2;
8599 mode = GET_MODE (var->var_part[i].cur_loc);
8600 if (mode == VOIDmode && var->onepart)
8601 mode = DECL_MODE (decl);
8602 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
8604 /* Attempt to merge adjacent registers or memory. */
8605 wider_mode = GET_MODE_WIDER_MODE (mode);
8606 for (j = i + 1; j < var->n_var_parts; j++)
8607 if (last_limit <= VAR_PART_OFFSET (var, j))
8608 break;
8609 if (j < var->n_var_parts
8610 && wider_mode != VOIDmode
8611 && var->var_part[j].cur_loc
8612 && mode == GET_MODE (var->var_part[j].cur_loc)
8613 && (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts]))
8614 && last_limit == (var->onepart ? 0 : VAR_PART_OFFSET (var, j))
8615 && (loc2 = vt_expand_loc (var->var_part[j].cur_loc, vars))
8616 && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2))
8618 rtx new_loc = NULL;
8620 if (REG_P (loc[n_var_parts])
8621 && hard_regno_nregs[REGNO (loc[n_var_parts])][mode] * 2
8622 == hard_regno_nregs[REGNO (loc[n_var_parts])][wider_mode]
8623 && end_hard_regno (mode, REGNO (loc[n_var_parts]))
8624 == REGNO (loc2))
8626 if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN)
8627 new_loc = simplify_subreg (wider_mode, loc[n_var_parts],
8628 mode, 0);
8629 else if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
8630 new_loc = simplify_subreg (wider_mode, loc2, mode, 0);
8631 if (new_loc)
8633 if (!REG_P (new_loc)
8634 || REGNO (new_loc) != REGNO (loc[n_var_parts]))
8635 new_loc = NULL;
8636 else
8637 REG_ATTRS (new_loc) = REG_ATTRS (loc[n_var_parts]);
8640 else if (MEM_P (loc[n_var_parts])
8641 && GET_CODE (XEXP (loc2, 0)) == PLUS
8642 && REG_P (XEXP (XEXP (loc2, 0), 0))
8643 && CONST_INT_P (XEXP (XEXP (loc2, 0), 1)))
8645 if ((REG_P (XEXP (loc[n_var_parts], 0))
8646 && rtx_equal_p (XEXP (loc[n_var_parts], 0),
8647 XEXP (XEXP (loc2, 0), 0))
8648 && INTVAL (XEXP (XEXP (loc2, 0), 1))
8649 == GET_MODE_SIZE (mode))
8650 || (GET_CODE (XEXP (loc[n_var_parts], 0)) == PLUS
8651 && CONST_INT_P (XEXP (XEXP (loc[n_var_parts], 0), 1))
8652 && rtx_equal_p (XEXP (XEXP (loc[n_var_parts], 0), 0),
8653 XEXP (XEXP (loc2, 0), 0))
8654 && INTVAL (XEXP (XEXP (loc[n_var_parts], 0), 1))
8655 + GET_MODE_SIZE (mode)
8656 == INTVAL (XEXP (XEXP (loc2, 0), 1))))
8657 new_loc = adjust_address_nv (loc[n_var_parts],
8658 wider_mode, 0);
8661 if (new_loc)
8663 loc[n_var_parts] = new_loc;
8664 mode = wider_mode;
8665 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
8666 i = j;
8669 ++n_var_parts;
8671 type_size_unit = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8672 if ((unsigned HOST_WIDE_INT) last_limit < TREE_INT_CST_LOW (type_size_unit))
8673 complete = false;
8675 if (! flag_var_tracking_uninit)
8676 initialized = VAR_INIT_STATUS_INITIALIZED;
8678 note_vl = NULL_RTX;
8679 if (!complete)
8680 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, NULL_RTX,
8681 (int) initialized);
8682 else if (n_var_parts == 1)
8684 rtx expr_list;
8686 if (offsets[0] || GET_CODE (loc[0]) == PARALLEL)
8687 expr_list = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
8688 else
8689 expr_list = loc[0];
8691 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, expr_list,
8692 (int) initialized);
8694 else if (n_var_parts)
8696 rtx parallel;
8698 for (i = 0; i < n_var_parts; i++)
8699 loc[i]
8700 = gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i]));
8702 parallel = gen_rtx_PARALLEL (VOIDmode,
8703 gen_rtvec_v (n_var_parts, loc));
8704 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl,
8705 parallel, (int) initialized);
8708 if (where != EMIT_NOTE_BEFORE_INSN)
8710 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8711 if (where == EMIT_NOTE_AFTER_CALL_INSN)
8712 NOTE_DURING_CALL_P (note) = true;
8714 else
8716 /* Make sure that the call related notes come first. */
8717 while (NEXT_INSN (insn)
8718 && NOTE_P (insn)
8719 && ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8720 && NOTE_DURING_CALL_P (insn))
8721 || NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
8722 insn = NEXT_INSN (insn);
8723 if (NOTE_P (insn)
8724 && ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8725 && NOTE_DURING_CALL_P (insn))
8726 || NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
8727 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8728 else
8729 note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
8731 NOTE_VAR_LOCATION (note) = note_vl;
8733 set_dv_changed (var->dv, false);
8734 gcc_assert (var->in_changed_variables);
8735 var->in_changed_variables = false;
8736 changed_variables.clear_slot (varp);
8738 /* Continue traversing the hash table. */
8739 return 1;
8742 /* While traversing changed_variables, push onto DATA (a stack of RTX
8743 values) entries that aren't user variables. */
8746 var_track_values_to_stack (variable_def **slot,
8747 vec<rtx, va_stack> *changed_values_stack)
8749 variable var = *slot;
8751 if (var->onepart == ONEPART_VALUE)
8752 changed_values_stack->safe_push (dv_as_value (var->dv));
8753 else if (var->onepart == ONEPART_DEXPR)
8754 changed_values_stack->safe_push (DECL_RTL_KNOWN_SET (dv_as_decl (var->dv)));
8756 return 1;
8759 /* Remove from changed_variables the entry whose DV corresponds to
8760 value or debug_expr VAL. */
8761 static void
8762 remove_value_from_changed_variables (rtx val)
8764 decl_or_value dv = dv_from_rtx (val);
8765 variable_def **slot;
8766 variable var;
8768 slot = changed_variables.find_slot_with_hash (dv, dv_htab_hash (dv),
8769 NO_INSERT);
8770 var = *slot;
8771 var->in_changed_variables = false;
8772 changed_variables.clear_slot (slot);
8775 /* If VAL (a value or debug_expr) has backlinks to variables actively
8776 dependent on it in HTAB or in CHANGED_VARIABLES, mark them as
8777 changed, adding to CHANGED_VALUES_STACK any dependencies that may
8778 have dependencies of their own to notify. */
8780 static void
8781 notify_dependents_of_changed_value (rtx val, variable_table_type htab,
8782 vec<rtx, va_stack> *changed_values_stack)
8784 variable_def **slot;
8785 variable var;
8786 loc_exp_dep *led;
8787 decl_or_value dv = dv_from_rtx (val);
8789 slot = changed_variables.find_slot_with_hash (dv, dv_htab_hash (dv),
8790 NO_INSERT);
8791 if (!slot)
8792 slot = htab.find_slot_with_hash (dv, dv_htab_hash (dv), NO_INSERT);
8793 if (!slot)
8794 slot = dropped_values.find_slot_with_hash (dv, dv_htab_hash (dv),
8795 NO_INSERT);
8796 var = *slot;
8798 while ((led = VAR_LOC_DEP_LST (var)))
8800 decl_or_value ldv = led->dv;
8801 variable ivar;
8803 /* Deactivate and remove the backlink, as it was “used up”. It
8804 makes no sense to attempt to notify the same entity again:
8805 either it will be recomputed and re-register an active
8806 dependency, or it will still have the changed mark. */
8807 if (led->next)
8808 led->next->pprev = led->pprev;
8809 if (led->pprev)
8810 *led->pprev = led->next;
8811 led->next = NULL;
8812 led->pprev = NULL;
8814 if (dv_changed_p (ldv))
8815 continue;
8817 switch (dv_onepart_p (ldv))
8819 case ONEPART_VALUE:
8820 case ONEPART_DEXPR:
8821 set_dv_changed (ldv, true);
8822 changed_values_stack->safe_push (dv_as_rtx (ldv));
8823 break;
8825 case ONEPART_VDECL:
8826 ivar = htab.find_with_hash (ldv, dv_htab_hash (ldv));
8827 gcc_checking_assert (!VAR_LOC_DEP_LST (ivar));
8828 variable_was_changed (ivar, NULL);
8829 break;
8831 case NOT_ONEPART:
8832 pool_free (loc_exp_dep_pool, led);
8833 ivar = htab.find_with_hash (ldv, dv_htab_hash (ldv));
8834 if (ivar)
8836 int i = ivar->n_var_parts;
8837 while (i--)
8839 rtx loc = ivar->var_part[i].cur_loc;
8841 if (loc && GET_CODE (loc) == MEM
8842 && XEXP (loc, 0) == val)
8844 variable_was_changed (ivar, NULL);
8845 break;
8849 break;
8851 default:
8852 gcc_unreachable ();
8857 /* Take out of changed_variables any entries that don't refer to use
8858 variables. Back-propagate change notifications from values and
8859 debug_exprs to their active dependencies in HTAB or in
8860 CHANGED_VARIABLES. */
8862 static void
8863 process_changed_values (variable_table_type htab)
8865 int i, n;
8866 rtx val;
8867 vec<rtx, va_stack> changed_values_stack;
8869 vec_stack_alloc (rtx, changed_values_stack, 20);
8871 /* Move values from changed_variables to changed_values_stack. */
8872 changed_variables
8873 .traverse <vec<rtx, va_stack>*, var_track_values_to_stack>
8874 (&changed_values_stack);
8876 /* Back-propagate change notifications in values while popping
8877 them from the stack. */
8878 for (n = i = changed_values_stack.length ();
8879 i > 0; i = changed_values_stack.length ())
8881 val = changed_values_stack.pop ();
8882 notify_dependents_of_changed_value (val, htab, &changed_values_stack);
8884 /* This condition will hold when visiting each of the entries
8885 originally in changed_variables. We can't remove them
8886 earlier because this could drop the backlinks before we got a
8887 chance to use them. */
8888 if (i == n)
8890 remove_value_from_changed_variables (val);
8891 n--;
8895 changed_values_stack.release ();
8898 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
8899 CHANGED_VARIABLES and delete this chain. WHERE specifies whether
8900 the notes shall be emitted before of after instruction INSN. */
8902 static void
8903 emit_notes_for_changes (rtx insn, enum emit_note_where where,
8904 shared_hash vars)
8906 emit_note_data data;
8907 variable_table_type htab = shared_hash_htab (vars);
8909 if (!changed_variables.elements ())
8910 return;
8912 if (MAY_HAVE_DEBUG_INSNS)
8913 process_changed_values (htab);
8915 data.insn = insn;
8916 data.where = where;
8917 data.vars = htab;
8919 changed_variables
8920 .traverse <emit_note_data*, emit_note_insn_var_location> (&data);
8923 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
8924 same variable in hash table DATA or is not there at all. */
8927 emit_notes_for_differences_1 (variable_def **slot, variable_table_type new_vars)
8929 variable old_var, new_var;
8931 old_var = *slot;
8932 new_var = new_vars.find_with_hash (old_var->dv, dv_htab_hash (old_var->dv));
8934 if (!new_var)
8936 /* Variable has disappeared. */
8937 variable empty_var = NULL;
8939 if (old_var->onepart == ONEPART_VALUE
8940 || old_var->onepart == ONEPART_DEXPR)
8942 empty_var = variable_from_dropped (old_var->dv, NO_INSERT);
8943 if (empty_var)
8945 gcc_checking_assert (!empty_var->in_changed_variables);
8946 if (!VAR_LOC_1PAUX (old_var))
8948 VAR_LOC_1PAUX (old_var) = VAR_LOC_1PAUX (empty_var);
8949 VAR_LOC_1PAUX (empty_var) = NULL;
8951 else
8952 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
8956 if (!empty_var)
8958 empty_var = (variable) pool_alloc (onepart_pool (old_var->onepart));
8959 empty_var->dv = old_var->dv;
8960 empty_var->refcount = 0;
8961 empty_var->n_var_parts = 0;
8962 empty_var->onepart = old_var->onepart;
8963 empty_var->in_changed_variables = false;
8966 if (empty_var->onepart)
8968 /* Propagate the auxiliary data to (ultimately)
8969 changed_variables. */
8970 empty_var->var_part[0].loc_chain = NULL;
8971 empty_var->var_part[0].cur_loc = NULL;
8972 VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (old_var);
8973 VAR_LOC_1PAUX (old_var) = NULL;
8975 variable_was_changed (empty_var, NULL);
8976 /* Continue traversing the hash table. */
8977 return 1;
8979 /* Update cur_loc and one-part auxiliary data, before new_var goes
8980 through variable_was_changed. */
8981 if (old_var != new_var && new_var->onepart)
8983 gcc_checking_assert (VAR_LOC_1PAUX (new_var) == NULL);
8984 VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (old_var);
8985 VAR_LOC_1PAUX (old_var) = NULL;
8986 new_var->var_part[0].cur_loc = old_var->var_part[0].cur_loc;
8988 if (variable_different_p (old_var, new_var))
8989 variable_was_changed (new_var, NULL);
8991 /* Continue traversing the hash table. */
8992 return 1;
8995 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
8996 table DATA. */
8999 emit_notes_for_differences_2 (variable_def **slot, variable_table_type old_vars)
9001 variable old_var, new_var;
9003 new_var = *slot;
9004 old_var = old_vars.find_with_hash (new_var->dv, dv_htab_hash (new_var->dv));
9005 if (!old_var)
9007 int i;
9008 for (i = 0; i < new_var->n_var_parts; i++)
9009 new_var->var_part[i].cur_loc = NULL;
9010 variable_was_changed (new_var, NULL);
9013 /* Continue traversing the hash table. */
9014 return 1;
9017 /* Emit notes before INSN for differences between dataflow sets OLD_SET and
9018 NEW_SET. */
9020 static void
9021 emit_notes_for_differences (rtx insn, dataflow_set *old_set,
9022 dataflow_set *new_set)
9024 shared_hash_htab (old_set->vars)
9025 .traverse <variable_table_type, emit_notes_for_differences_1>
9026 (shared_hash_htab (new_set->vars));
9027 shared_hash_htab (new_set->vars)
9028 .traverse <variable_table_type, emit_notes_for_differences_2>
9029 (shared_hash_htab (old_set->vars));
9030 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars);
9033 /* Return the next insn after INSN that is not a NOTE_INSN_VAR_LOCATION. */
9035 static rtx
9036 next_non_note_insn_var_location (rtx insn)
9038 while (insn)
9040 insn = NEXT_INSN (insn);
9041 if (insn == 0
9042 || !NOTE_P (insn)
9043 || NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION)
9044 break;
9047 return insn;
9050 /* Emit the notes for changes of location parts in the basic block BB. */
9052 static void
9053 emit_notes_in_bb (basic_block bb, dataflow_set *set)
9055 unsigned int i;
9056 micro_operation *mo;
9058 dataflow_set_clear (set);
9059 dataflow_set_copy (set, &VTI (bb)->in);
9061 FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo)
9063 rtx insn = mo->insn;
9064 rtx next_insn = next_non_note_insn_var_location (insn);
9066 switch (mo->type)
9068 case MO_CALL:
9069 dataflow_set_clear_at_call (set);
9070 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars);
9072 rtx arguments = mo->u.loc, *p = &arguments, note;
9073 while (*p)
9075 XEXP (XEXP (*p, 0), 1)
9076 = vt_expand_loc (XEXP (XEXP (*p, 0), 1),
9077 shared_hash_htab (set->vars));
9078 /* If expansion is successful, keep it in the list. */
9079 if (XEXP (XEXP (*p, 0), 1))
9080 p = &XEXP (*p, 1);
9081 /* Otherwise, if the following item is data_value for it,
9082 drop it too too. */
9083 else if (XEXP (*p, 1)
9084 && REG_P (XEXP (XEXP (*p, 0), 0))
9085 && MEM_P (XEXP (XEXP (XEXP (*p, 1), 0), 0))
9086 && REG_P (XEXP (XEXP (XEXP (XEXP (*p, 1), 0), 0),
9088 && REGNO (XEXP (XEXP (*p, 0), 0))
9089 == REGNO (XEXP (XEXP (XEXP (XEXP (*p, 1), 0),
9090 0), 0)))
9091 *p = XEXP (XEXP (*p, 1), 1);
9092 /* Just drop this item. */
9093 else
9094 *p = XEXP (*p, 1);
9096 note = emit_note_after (NOTE_INSN_CALL_ARG_LOCATION, insn);
9097 NOTE_VAR_LOCATION (note) = arguments;
9099 break;
9101 case MO_USE:
9103 rtx loc = mo->u.loc;
9105 if (REG_P (loc))
9106 var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
9107 else
9108 var_mem_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
9110 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
9112 break;
9114 case MO_VAL_LOC:
9116 rtx loc = mo->u.loc;
9117 rtx val, vloc;
9118 tree var;
9120 if (GET_CODE (loc) == CONCAT)
9122 val = XEXP (loc, 0);
9123 vloc = XEXP (loc, 1);
9125 else
9127 val = NULL_RTX;
9128 vloc = loc;
9131 var = PAT_VAR_LOCATION_DECL (vloc);
9133 clobber_variable_part (set, NULL_RTX,
9134 dv_from_decl (var), 0, NULL_RTX);
9135 if (val)
9137 if (VAL_NEEDS_RESOLUTION (loc))
9138 val_resolve (set, val, PAT_VAR_LOCATION_LOC (vloc), insn);
9139 set_variable_part (set, val, dv_from_decl (var), 0,
9140 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
9141 INSERT);
9143 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
9144 set_variable_part (set, PAT_VAR_LOCATION_LOC (vloc),
9145 dv_from_decl (var), 0,
9146 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
9147 INSERT);
9149 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
9151 break;
9153 case MO_VAL_USE:
9155 rtx loc = mo->u.loc;
9156 rtx val, vloc, uloc;
9158 vloc = uloc = XEXP (loc, 1);
9159 val = XEXP (loc, 0);
9161 if (GET_CODE (val) == CONCAT)
9163 uloc = XEXP (val, 1);
9164 val = XEXP (val, 0);
9167 if (VAL_NEEDS_RESOLUTION (loc))
9168 val_resolve (set, val, vloc, insn);
9169 else
9170 val_store (set, val, uloc, insn, false);
9172 if (VAL_HOLDS_TRACK_EXPR (loc))
9174 if (GET_CODE (uloc) == REG)
9175 var_reg_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
9176 NULL);
9177 else if (GET_CODE (uloc) == MEM)
9178 var_mem_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
9179 NULL);
9182 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
9184 break;
9186 case MO_VAL_SET:
9188 rtx loc = mo->u.loc;
9189 rtx val, vloc, uloc;
9190 rtx dstv, srcv;
9192 vloc = loc;
9193 uloc = XEXP (vloc, 1);
9194 val = XEXP (vloc, 0);
9195 vloc = uloc;
9197 if (GET_CODE (uloc) == SET)
9199 dstv = SET_DEST (uloc);
9200 srcv = SET_SRC (uloc);
9202 else
9204 dstv = uloc;
9205 srcv = NULL;
9208 if (GET_CODE (val) == CONCAT)
9210 dstv = vloc = XEXP (val, 1);
9211 val = XEXP (val, 0);
9214 if (GET_CODE (vloc) == SET)
9216 srcv = SET_SRC (vloc);
9218 gcc_assert (val != srcv);
9219 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
9221 dstv = vloc = SET_DEST (vloc);
9223 if (VAL_NEEDS_RESOLUTION (loc))
9224 val_resolve (set, val, srcv, insn);
9226 else if (VAL_NEEDS_RESOLUTION (loc))
9228 gcc_assert (GET_CODE (uloc) == SET
9229 && GET_CODE (SET_SRC (uloc)) == REG);
9230 val_resolve (set, val, SET_SRC (uloc), insn);
9233 if (VAL_HOLDS_TRACK_EXPR (loc))
9235 if (VAL_EXPR_IS_CLOBBERED (loc))
9237 if (REG_P (uloc))
9238 var_reg_delete (set, uloc, true);
9239 else if (MEM_P (uloc))
9241 gcc_assert (MEM_P (dstv));
9242 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
9243 var_mem_delete (set, dstv, true);
9246 else
9248 bool copied_p = VAL_EXPR_IS_COPIED (loc);
9249 rtx src = NULL, dst = uloc;
9250 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
9252 if (GET_CODE (uloc) == SET)
9254 src = SET_SRC (uloc);
9255 dst = SET_DEST (uloc);
9258 if (copied_p)
9260 status = find_src_status (set, src);
9262 src = find_src_set_src (set, src);
9265 if (REG_P (dst))
9266 var_reg_delete_and_set (set, dst, !copied_p,
9267 status, srcv);
9268 else if (MEM_P (dst))
9270 gcc_assert (MEM_P (dstv));
9271 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
9272 var_mem_delete_and_set (set, dstv, !copied_p,
9273 status, srcv);
9277 else if (REG_P (uloc))
9278 var_regno_delete (set, REGNO (uloc));
9279 else if (MEM_P (uloc))
9281 gcc_checking_assert (GET_CODE (vloc) == MEM);
9282 gcc_checking_assert (vloc == dstv);
9283 if (vloc != dstv)
9284 clobber_overlapping_mems (set, vloc);
9287 val_store (set, val, dstv, insn, true);
9289 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9290 set->vars);
9292 break;
9294 case MO_SET:
9296 rtx loc = mo->u.loc;
9297 rtx set_src = NULL;
9299 if (GET_CODE (loc) == SET)
9301 set_src = SET_SRC (loc);
9302 loc = SET_DEST (loc);
9305 if (REG_P (loc))
9306 var_reg_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
9307 set_src);
9308 else
9309 var_mem_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
9310 set_src);
9312 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9313 set->vars);
9315 break;
9317 case MO_COPY:
9319 rtx loc = mo->u.loc;
9320 enum var_init_status src_status;
9321 rtx set_src = NULL;
9323 if (GET_CODE (loc) == SET)
9325 set_src = SET_SRC (loc);
9326 loc = SET_DEST (loc);
9329 src_status = find_src_status (set, set_src);
9330 set_src = find_src_set_src (set, set_src);
9332 if (REG_P (loc))
9333 var_reg_delete_and_set (set, loc, false, src_status, set_src);
9334 else
9335 var_mem_delete_and_set (set, loc, false, src_status, set_src);
9337 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9338 set->vars);
9340 break;
9342 case MO_USE_NO_VAR:
9344 rtx loc = mo->u.loc;
9346 if (REG_P (loc))
9347 var_reg_delete (set, loc, false);
9348 else
9349 var_mem_delete (set, loc, false);
9351 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
9353 break;
9355 case MO_CLOBBER:
9357 rtx loc = mo->u.loc;
9359 if (REG_P (loc))
9360 var_reg_delete (set, loc, true);
9361 else
9362 var_mem_delete (set, loc, true);
9364 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9365 set->vars);
9367 break;
9369 case MO_ADJUST:
9370 set->stack_adjust += mo->u.adjust;
9371 break;
9376 /* Emit notes for the whole function. */
9378 static void
9379 vt_emit_notes (void)
9381 basic_block bb;
9382 dataflow_set cur;
9384 gcc_assert (!changed_variables.elements ());
9386 /* Free memory occupied by the out hash tables, as they aren't used
9387 anymore. */
9388 FOR_EACH_BB (bb)
9389 dataflow_set_clear (&VTI (bb)->out);
9391 /* Enable emitting notes by functions (mainly by set_variable_part and
9392 delete_variable_part). */
9393 emit_notes = true;
9395 if (MAY_HAVE_DEBUG_INSNS)
9397 dropped_values.create (cselib_get_next_uid () * 2);
9398 loc_exp_dep_pool = create_alloc_pool ("loc_exp_dep pool",
9399 sizeof (loc_exp_dep), 64);
9402 dataflow_set_init (&cur);
9404 FOR_EACH_BB (bb)
9406 /* Emit the notes for changes of variable locations between two
9407 subsequent basic blocks. */
9408 emit_notes_for_differences (BB_HEAD (bb), &cur, &VTI (bb)->in);
9410 if (MAY_HAVE_DEBUG_INSNS)
9411 local_get_addr_cache = pointer_map_create ();
9413 /* Emit the notes for the changes in the basic block itself. */
9414 emit_notes_in_bb (bb, &cur);
9416 if (MAY_HAVE_DEBUG_INSNS)
9417 pointer_map_destroy (local_get_addr_cache);
9418 local_get_addr_cache = NULL;
9420 /* Free memory occupied by the in hash table, we won't need it
9421 again. */
9422 dataflow_set_clear (&VTI (bb)->in);
9424 #ifdef ENABLE_CHECKING
9425 shared_hash_htab (cur.vars)
9426 .traverse <variable_table_type, emit_notes_for_differences_1>
9427 (shared_hash_htab (empty_shared_hash));
9428 #endif
9429 dataflow_set_destroy (&cur);
9431 if (MAY_HAVE_DEBUG_INSNS)
9432 dropped_values.dispose ();
9434 emit_notes = false;
9437 /* If there is a declaration and offset associated with register/memory RTL
9438 assign declaration to *DECLP and offset to *OFFSETP, and return true. */
9440 static bool
9441 vt_get_decl_and_offset (rtx rtl, tree *declp, HOST_WIDE_INT *offsetp)
9443 if (REG_P (rtl))
9445 if (REG_ATTRS (rtl))
9447 *declp = REG_EXPR (rtl);
9448 *offsetp = REG_OFFSET (rtl);
9449 return true;
9452 else if (MEM_P (rtl))
9454 if (MEM_ATTRS (rtl))
9456 *declp = MEM_EXPR (rtl);
9457 *offsetp = INT_MEM_OFFSET (rtl);
9458 return true;
9461 return false;
9464 /* Record the value for the ENTRY_VALUE of RTL as a global equivalence
9465 of VAL. */
9467 static void
9468 record_entry_value (cselib_val *val, rtx rtl)
9470 rtx ev = gen_rtx_ENTRY_VALUE (GET_MODE (rtl));
9472 ENTRY_VALUE_EXP (ev) = rtl;
9474 cselib_add_permanent_equiv (val, ev, get_insns ());
9477 /* Insert function parameter PARM in IN and OUT sets of ENTRY_BLOCK. */
9479 static void
9480 vt_add_function_parameter (tree parm)
9482 rtx decl_rtl = DECL_RTL_IF_SET (parm);
9483 rtx incoming = DECL_INCOMING_RTL (parm);
9484 tree decl;
9485 enum machine_mode mode;
9486 HOST_WIDE_INT offset;
9487 dataflow_set *out;
9488 decl_or_value dv;
9490 if (TREE_CODE (parm) != PARM_DECL)
9491 return;
9493 if (!decl_rtl || !incoming)
9494 return;
9496 if (GET_MODE (decl_rtl) == BLKmode || GET_MODE (incoming) == BLKmode)
9497 return;
9499 /* If there is a DRAP register or a pseudo in internal_arg_pointer,
9500 rewrite the incoming location of parameters passed on the stack
9501 into MEMs based on the argument pointer, so that incoming doesn't
9502 depend on a pseudo. */
9503 if (MEM_P (incoming)
9504 && (XEXP (incoming, 0) == crtl->args.internal_arg_pointer
9505 || (GET_CODE (XEXP (incoming, 0)) == PLUS
9506 && XEXP (XEXP (incoming, 0), 0)
9507 == crtl->args.internal_arg_pointer
9508 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
9510 HOST_WIDE_INT off = -FIRST_PARM_OFFSET (current_function_decl);
9511 if (GET_CODE (XEXP (incoming, 0)) == PLUS)
9512 off += INTVAL (XEXP (XEXP (incoming, 0), 1));
9513 incoming
9514 = replace_equiv_address_nv (incoming,
9515 plus_constant (Pmode,
9516 arg_pointer_rtx, off));
9519 #ifdef HAVE_window_save
9520 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
9521 If the target machine has an explicit window save instruction, the
9522 actual entry value is the corresponding OUTGOING_REGNO instead. */
9523 if (HAVE_window_save && !crtl->uses_only_leaf_regs)
9525 if (REG_P (incoming)
9526 && HARD_REGISTER_P (incoming)
9527 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
9529 parm_reg_t p;
9530 p.incoming = incoming;
9531 incoming
9532 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
9533 OUTGOING_REGNO (REGNO (incoming)), 0);
9534 p.outgoing = incoming;
9535 vec_safe_push (windowed_parm_regs, p);
9537 else if (MEM_P (incoming)
9538 && REG_P (XEXP (incoming, 0))
9539 && HARD_REGISTER_P (XEXP (incoming, 0)))
9541 rtx reg = XEXP (incoming, 0);
9542 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
9544 parm_reg_t p;
9545 p.incoming = reg;
9546 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
9547 p.outgoing = reg;
9548 vec_safe_push (windowed_parm_regs, p);
9549 incoming = replace_equiv_address_nv (incoming, reg);
9553 #endif
9555 if (!vt_get_decl_and_offset (incoming, &decl, &offset))
9557 if (MEM_P (incoming))
9559 /* This means argument is passed by invisible reference. */
9560 offset = 0;
9561 decl = parm;
9563 else
9565 if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset))
9566 return;
9567 offset += byte_lowpart_offset (GET_MODE (incoming),
9568 GET_MODE (decl_rtl));
9572 if (!decl)
9573 return;
9575 if (parm != decl)
9577 /* If that DECL_RTL wasn't a pseudo that got spilled to
9578 memory, bail out. Otherwise, the spill slot sharing code
9579 will force the memory to reference spill_slot_decl (%sfp),
9580 so we don't match above. That's ok, the pseudo must have
9581 referenced the entire parameter, so just reset OFFSET. */
9582 if (decl != get_spill_slot_decl (false))
9583 return;
9584 offset = 0;
9587 if (!track_loc_p (incoming, parm, offset, false, &mode, &offset))
9588 return;
9590 out = &VTI (ENTRY_BLOCK_PTR)->out;
9592 dv = dv_from_decl (parm);
9594 if (target_for_debug_bind (parm)
9595 /* We can't deal with these right now, because this kind of
9596 variable is single-part. ??? We could handle parallels
9597 that describe multiple locations for the same single
9598 value, but ATM we don't. */
9599 && GET_CODE (incoming) != PARALLEL)
9601 cselib_val *val;
9602 rtx lowpart;
9604 /* ??? We shouldn't ever hit this, but it may happen because
9605 arguments passed by invisible reference aren't dealt with
9606 above: incoming-rtl will have Pmode rather than the
9607 expected mode for the type. */
9608 if (offset)
9609 return;
9611 lowpart = var_lowpart (mode, incoming);
9612 if (!lowpart)
9613 return;
9615 val = cselib_lookup_from_insn (lowpart, mode, true,
9616 VOIDmode, get_insns ());
9618 /* ??? Float-typed values in memory are not handled by
9619 cselib. */
9620 if (val)
9622 preserve_value (val);
9623 set_variable_part (out, val->val_rtx, dv, offset,
9624 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9625 dv = dv_from_value (val->val_rtx);
9628 if (MEM_P (incoming))
9630 val = cselib_lookup_from_insn (XEXP (incoming, 0), mode, true,
9631 VOIDmode, get_insns ());
9632 if (val)
9634 preserve_value (val);
9635 incoming = replace_equiv_address_nv (incoming, val->val_rtx);
9640 if (REG_P (incoming))
9642 incoming = var_lowpart (mode, incoming);
9643 gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER);
9644 attrs_list_insert (&out->regs[REGNO (incoming)], dv, offset,
9645 incoming);
9646 set_variable_part (out, incoming, dv, offset,
9647 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9648 if (dv_is_value_p (dv))
9650 record_entry_value (CSELIB_VAL_PTR (dv_as_value (dv)), incoming);
9651 if (TREE_CODE (TREE_TYPE (parm)) == REFERENCE_TYPE
9652 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (parm))))
9654 enum machine_mode indmode
9655 = TYPE_MODE (TREE_TYPE (TREE_TYPE (parm)));
9656 rtx mem = gen_rtx_MEM (indmode, incoming);
9657 cselib_val *val = cselib_lookup_from_insn (mem, indmode, true,
9658 VOIDmode,
9659 get_insns ());
9660 if (val)
9662 preserve_value (val);
9663 record_entry_value (val, mem);
9664 set_variable_part (out, mem, dv_from_value (val->val_rtx), 0,
9665 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9670 else if (MEM_P (incoming))
9672 incoming = var_lowpart (mode, incoming);
9673 set_variable_part (out, incoming, dv, offset,
9674 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9678 /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
9680 static void
9681 vt_add_function_parameters (void)
9683 tree parm;
9685 for (parm = DECL_ARGUMENTS (current_function_decl);
9686 parm; parm = DECL_CHAIN (parm))
9687 vt_add_function_parameter (parm);
9689 if (DECL_HAS_VALUE_EXPR_P (DECL_RESULT (current_function_decl)))
9691 tree vexpr = DECL_VALUE_EXPR (DECL_RESULT (current_function_decl));
9693 if (TREE_CODE (vexpr) == INDIRECT_REF)
9694 vexpr = TREE_OPERAND (vexpr, 0);
9696 if (TREE_CODE (vexpr) == PARM_DECL
9697 && DECL_ARTIFICIAL (vexpr)
9698 && !DECL_IGNORED_P (vexpr)
9699 && DECL_NAMELESS (vexpr))
9700 vt_add_function_parameter (vexpr);
9704 /* Initialize cfa_base_rtx, create a preserved VALUE for it and
9705 ensure it isn't flushed during cselib_reset_table.
9706 Can be called only if frame_pointer_rtx resp. arg_pointer_rtx
9707 has been eliminated. */
9709 static void
9710 vt_init_cfa_base (void)
9712 cselib_val *val;
9714 #ifdef FRAME_POINTER_CFA_OFFSET
9715 cfa_base_rtx = frame_pointer_rtx;
9716 cfa_base_offset = -FRAME_POINTER_CFA_OFFSET (current_function_decl);
9717 #else
9718 cfa_base_rtx = arg_pointer_rtx;
9719 cfa_base_offset = -ARG_POINTER_CFA_OFFSET (current_function_decl);
9720 #endif
9721 if (cfa_base_rtx == hard_frame_pointer_rtx
9722 || !fixed_regs[REGNO (cfa_base_rtx)])
9724 cfa_base_rtx = NULL_RTX;
9725 return;
9727 if (!MAY_HAVE_DEBUG_INSNS)
9728 return;
9730 /* Tell alias analysis that cfa_base_rtx should share
9731 find_base_term value with stack pointer or hard frame pointer. */
9732 if (!frame_pointer_needed)
9733 vt_equate_reg_base_value (cfa_base_rtx, stack_pointer_rtx);
9734 else if (!crtl->stack_realign_tried)
9735 vt_equate_reg_base_value (cfa_base_rtx, hard_frame_pointer_rtx);
9737 val = cselib_lookup_from_insn (cfa_base_rtx, GET_MODE (cfa_base_rtx), 1,
9738 VOIDmode, get_insns ());
9739 preserve_value (val);
9740 cselib_preserve_cfa_base_value (val, REGNO (cfa_base_rtx));
9743 /* Allocate and initialize the data structures for variable tracking
9744 and parse the RTL to get the micro operations. */
9746 static bool
9747 vt_initialize (void)
9749 basic_block bb;
9750 HOST_WIDE_INT fp_cfa_offset = -1;
9752 alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def));
9754 attrs_pool = create_alloc_pool ("attrs_def pool",
9755 sizeof (struct attrs_def), 1024);
9756 var_pool = create_alloc_pool ("variable_def pool",
9757 sizeof (struct variable_def)
9758 + (MAX_VAR_PARTS - 1)
9759 * sizeof (((variable)NULL)->var_part[0]), 64);
9760 loc_chain_pool = create_alloc_pool ("location_chain_def pool",
9761 sizeof (struct location_chain_def),
9762 1024);
9763 shared_hash_pool = create_alloc_pool ("shared_hash_def pool",
9764 sizeof (struct shared_hash_def), 256);
9765 empty_shared_hash = (shared_hash) pool_alloc (shared_hash_pool);
9766 empty_shared_hash->refcount = 1;
9767 empty_shared_hash->htab.create (1);
9768 changed_variables.create (10);
9770 /* Init the IN and OUT sets. */
9771 FOR_ALL_BB (bb)
9773 VTI (bb)->visited = false;
9774 VTI (bb)->flooded = false;
9775 dataflow_set_init (&VTI (bb)->in);
9776 dataflow_set_init (&VTI (bb)->out);
9777 VTI (bb)->permp = NULL;
9780 if (MAY_HAVE_DEBUG_INSNS)
9782 cselib_init (CSELIB_RECORD_MEMORY | CSELIB_PRESERVE_CONSTANTS);
9783 scratch_regs = BITMAP_ALLOC (NULL);
9784 valvar_pool = create_alloc_pool ("small variable_def pool",
9785 sizeof (struct variable_def), 256);
9786 preserved_values.create (256);
9787 global_get_addr_cache = pointer_map_create ();
9789 else
9791 scratch_regs = NULL;
9792 valvar_pool = NULL;
9793 global_get_addr_cache = NULL;
9796 if (MAY_HAVE_DEBUG_INSNS)
9798 rtx reg, expr;
9799 int ofst;
9800 cselib_val *val;
9802 #ifdef FRAME_POINTER_CFA_OFFSET
9803 reg = frame_pointer_rtx;
9804 ofst = FRAME_POINTER_CFA_OFFSET (current_function_decl);
9805 #else
9806 reg = arg_pointer_rtx;
9807 ofst = ARG_POINTER_CFA_OFFSET (current_function_decl);
9808 #endif
9810 ofst -= INCOMING_FRAME_SP_OFFSET;
9812 val = cselib_lookup_from_insn (reg, GET_MODE (reg), 1,
9813 VOIDmode, get_insns ());
9814 preserve_value (val);
9815 cselib_preserve_cfa_base_value (val, REGNO (reg));
9816 expr = plus_constant (GET_MODE (stack_pointer_rtx),
9817 stack_pointer_rtx, -ofst);
9818 cselib_add_permanent_equiv (val, expr, get_insns ());
9820 if (ofst)
9822 val = cselib_lookup_from_insn (stack_pointer_rtx,
9823 GET_MODE (stack_pointer_rtx), 1,
9824 VOIDmode, get_insns ());
9825 preserve_value (val);
9826 expr = plus_constant (GET_MODE (reg), reg, ofst);
9827 cselib_add_permanent_equiv (val, expr, get_insns ());
9831 /* In order to factor out the adjustments made to the stack pointer or to
9832 the hard frame pointer and thus be able to use DW_OP_fbreg operations
9833 instead of individual location lists, we're going to rewrite MEMs based
9834 on them into MEMs based on the CFA by de-eliminating stack_pointer_rtx
9835 or hard_frame_pointer_rtx to the virtual CFA pointer frame_pointer_rtx
9836 resp. arg_pointer_rtx. We can do this either when there is no frame
9837 pointer in the function and stack adjustments are consistent for all
9838 basic blocks or when there is a frame pointer and no stack realignment.
9839 But we first have to check that frame_pointer_rtx resp. arg_pointer_rtx
9840 has been eliminated. */
9841 if (!frame_pointer_needed)
9843 rtx reg, elim;
9845 if (!vt_stack_adjustments ())
9846 return false;
9848 #ifdef FRAME_POINTER_CFA_OFFSET
9849 reg = frame_pointer_rtx;
9850 #else
9851 reg = arg_pointer_rtx;
9852 #endif
9853 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
9854 if (elim != reg)
9856 if (GET_CODE (elim) == PLUS)
9857 elim = XEXP (elim, 0);
9858 if (elim == stack_pointer_rtx)
9859 vt_init_cfa_base ();
9862 else if (!crtl->stack_realign_tried)
9864 rtx reg, elim;
9866 #ifdef FRAME_POINTER_CFA_OFFSET
9867 reg = frame_pointer_rtx;
9868 fp_cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
9869 #else
9870 reg = arg_pointer_rtx;
9871 fp_cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
9872 #endif
9873 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
9874 if (elim != reg)
9876 if (GET_CODE (elim) == PLUS)
9878 fp_cfa_offset -= INTVAL (XEXP (elim, 1));
9879 elim = XEXP (elim, 0);
9881 if (elim != hard_frame_pointer_rtx)
9882 fp_cfa_offset = -1;
9884 else
9885 fp_cfa_offset = -1;
9888 /* If the stack is realigned and a DRAP register is used, we're going to
9889 rewrite MEMs based on it representing incoming locations of parameters
9890 passed on the stack into MEMs based on the argument pointer. Although
9891 we aren't going to rewrite other MEMs, we still need to initialize the
9892 virtual CFA pointer in order to ensure that the argument pointer will
9893 be seen as a constant throughout the function.
9895 ??? This doesn't work if FRAME_POINTER_CFA_OFFSET is defined. */
9896 else if (stack_realign_drap)
9898 rtx reg, elim;
9900 #ifdef FRAME_POINTER_CFA_OFFSET
9901 reg = frame_pointer_rtx;
9902 #else
9903 reg = arg_pointer_rtx;
9904 #endif
9905 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
9906 if (elim != reg)
9908 if (GET_CODE (elim) == PLUS)
9909 elim = XEXP (elim, 0);
9910 if (elim == hard_frame_pointer_rtx)
9911 vt_init_cfa_base ();
9915 hard_frame_pointer_adjustment = -1;
9917 vt_add_function_parameters ();
9919 FOR_EACH_BB (bb)
9921 rtx insn;
9922 HOST_WIDE_INT pre, post = 0;
9923 basic_block first_bb, last_bb;
9925 if (MAY_HAVE_DEBUG_INSNS)
9927 cselib_record_sets_hook = add_with_sets;
9928 if (dump_file && (dump_flags & TDF_DETAILS))
9929 fprintf (dump_file, "first value: %i\n",
9930 cselib_get_next_uid ());
9933 first_bb = bb;
9934 for (;;)
9936 edge e;
9937 if (bb->next_bb == EXIT_BLOCK_PTR
9938 || ! single_pred_p (bb->next_bb))
9939 break;
9940 e = find_edge (bb, bb->next_bb);
9941 if (! e || (e->flags & EDGE_FALLTHRU) == 0)
9942 break;
9943 bb = bb->next_bb;
9945 last_bb = bb;
9947 /* Add the micro-operations to the vector. */
9948 FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb)
9950 HOST_WIDE_INT offset = VTI (bb)->out.stack_adjust;
9951 VTI (bb)->out.stack_adjust = VTI (bb)->in.stack_adjust;
9952 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
9953 insn = NEXT_INSN (insn))
9955 if (INSN_P (insn))
9957 if (!frame_pointer_needed)
9959 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
9960 if (pre)
9962 micro_operation mo;
9963 mo.type = MO_ADJUST;
9964 mo.u.adjust = pre;
9965 mo.insn = insn;
9966 if (dump_file && (dump_flags & TDF_DETAILS))
9967 log_op_type (PATTERN (insn), bb, insn,
9968 MO_ADJUST, dump_file);
9969 VTI (bb)->mos.safe_push (mo);
9970 VTI (bb)->out.stack_adjust += pre;
9974 cselib_hook_called = false;
9975 adjust_insn (bb, insn);
9976 if (MAY_HAVE_DEBUG_INSNS)
9978 if (CALL_P (insn))
9979 prepare_call_arguments (bb, insn);
9980 cselib_process_insn (insn);
9981 if (dump_file && (dump_flags & TDF_DETAILS))
9983 print_rtl_single (dump_file, insn);
9984 dump_cselib_table (dump_file);
9987 if (!cselib_hook_called)
9988 add_with_sets (insn, 0, 0);
9989 cancel_changes (0);
9991 if (!frame_pointer_needed && post)
9993 micro_operation mo;
9994 mo.type = MO_ADJUST;
9995 mo.u.adjust = post;
9996 mo.insn = insn;
9997 if (dump_file && (dump_flags & TDF_DETAILS))
9998 log_op_type (PATTERN (insn), bb, insn,
9999 MO_ADJUST, dump_file);
10000 VTI (bb)->mos.safe_push (mo);
10001 VTI (bb)->out.stack_adjust += post;
10004 if (fp_cfa_offset != -1
10005 && hard_frame_pointer_adjustment == -1
10006 && fp_setter_insn (insn))
10008 vt_init_cfa_base ();
10009 hard_frame_pointer_adjustment = fp_cfa_offset;
10010 /* Disassociate sp from fp now. */
10011 if (MAY_HAVE_DEBUG_INSNS)
10013 cselib_val *v;
10014 cselib_invalidate_rtx (stack_pointer_rtx);
10015 v = cselib_lookup (stack_pointer_rtx, Pmode, 1,
10016 VOIDmode);
10017 if (v && !cselib_preserved_value_p (v))
10019 cselib_set_value_sp_based (v);
10020 preserve_value (v);
10026 gcc_assert (offset == VTI (bb)->out.stack_adjust);
10029 bb = last_bb;
10031 if (MAY_HAVE_DEBUG_INSNS)
10033 cselib_preserve_only_values ();
10034 cselib_reset_table (cselib_get_next_uid ());
10035 cselib_record_sets_hook = NULL;
10039 hard_frame_pointer_adjustment = -1;
10040 VTI (ENTRY_BLOCK_PTR)->flooded = true;
10041 cfa_base_rtx = NULL_RTX;
10042 return true;
10045 /* This is *not* reset after each function. It gives each
10046 NOTE_INSN_DELETED_DEBUG_LABEL in the entire compilation
10047 a unique label number. */
10049 static int debug_label_num = 1;
10051 /* Get rid of all debug insns from the insn stream. */
10053 static void
10054 delete_debug_insns (void)
10056 basic_block bb;
10057 rtx insn, next;
10059 if (!MAY_HAVE_DEBUG_INSNS)
10060 return;
10062 FOR_EACH_BB (bb)
10064 FOR_BB_INSNS_SAFE (bb, insn, next)
10065 if (DEBUG_INSN_P (insn))
10067 tree decl = INSN_VAR_LOCATION_DECL (insn);
10068 if (TREE_CODE (decl) == LABEL_DECL
10069 && DECL_NAME (decl)
10070 && !DECL_RTL_SET_P (decl))
10072 PUT_CODE (insn, NOTE);
10073 NOTE_KIND (insn) = NOTE_INSN_DELETED_DEBUG_LABEL;
10074 NOTE_DELETED_LABEL_NAME (insn)
10075 = IDENTIFIER_POINTER (DECL_NAME (decl));
10076 SET_DECL_RTL (decl, insn);
10077 CODE_LABEL_NUMBER (insn) = debug_label_num++;
10079 else
10080 delete_insn (insn);
10085 /* Run a fast, BB-local only version of var tracking, to take care of
10086 information that we don't do global analysis on, such that not all
10087 information is lost. If SKIPPED holds, we're skipping the global
10088 pass entirely, so we should try to use information it would have
10089 handled as well.. */
10091 static void
10092 vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED)
10094 /* ??? Just skip it all for now. */
10095 delete_debug_insns ();
10098 /* Free the data structures needed for variable tracking. */
10100 static void
10101 vt_finalize (void)
10103 basic_block bb;
10105 FOR_EACH_BB (bb)
10107 VTI (bb)->mos.release ();
10110 FOR_ALL_BB (bb)
10112 dataflow_set_destroy (&VTI (bb)->in);
10113 dataflow_set_destroy (&VTI (bb)->out);
10114 if (VTI (bb)->permp)
10116 dataflow_set_destroy (VTI (bb)->permp);
10117 XDELETE (VTI (bb)->permp);
10120 free_aux_for_blocks ();
10121 empty_shared_hash->htab.dispose ();
10122 changed_variables.dispose ();
10123 free_alloc_pool (attrs_pool);
10124 free_alloc_pool (var_pool);
10125 free_alloc_pool (loc_chain_pool);
10126 free_alloc_pool (shared_hash_pool);
10128 if (MAY_HAVE_DEBUG_INSNS)
10130 if (global_get_addr_cache)
10131 pointer_map_destroy (global_get_addr_cache);
10132 global_get_addr_cache = NULL;
10133 if (loc_exp_dep_pool)
10134 free_alloc_pool (loc_exp_dep_pool);
10135 loc_exp_dep_pool = NULL;
10136 free_alloc_pool (valvar_pool);
10137 preserved_values.release ();
10138 cselib_finish ();
10139 BITMAP_FREE (scratch_regs);
10140 scratch_regs = NULL;
10143 #ifdef HAVE_window_save
10144 vec_free (windowed_parm_regs);
10145 #endif
10147 if (vui_vec)
10148 XDELETEVEC (vui_vec);
10149 vui_vec = NULL;
10150 vui_allocated = 0;
10153 /* The entry point to variable tracking pass. */
10155 static inline unsigned int
10156 variable_tracking_main_1 (void)
10158 bool success;
10160 if (flag_var_tracking_assignments < 0)
10162 delete_debug_insns ();
10163 return 0;
10166 if (n_basic_blocks > 500 && n_edges / n_basic_blocks >= 20)
10168 vt_debug_insns_local (true);
10169 return 0;
10172 mark_dfs_back_edges ();
10173 if (!vt_initialize ())
10175 vt_finalize ();
10176 vt_debug_insns_local (true);
10177 return 0;
10180 success = vt_find_locations ();
10182 if (!success && flag_var_tracking_assignments > 0)
10184 vt_finalize ();
10186 delete_debug_insns ();
10188 /* This is later restored by our caller. */
10189 flag_var_tracking_assignments = 0;
10191 success = vt_initialize ();
10192 gcc_assert (success);
10194 success = vt_find_locations ();
10197 if (!success)
10199 vt_finalize ();
10200 vt_debug_insns_local (false);
10201 return 0;
10204 if (dump_file && (dump_flags & TDF_DETAILS))
10206 dump_dataflow_sets ();
10207 dump_reg_info (dump_file);
10208 dump_flow_info (dump_file, dump_flags);
10211 timevar_push (TV_VAR_TRACKING_EMIT);
10212 vt_emit_notes ();
10213 timevar_pop (TV_VAR_TRACKING_EMIT);
10215 vt_finalize ();
10216 vt_debug_insns_local (false);
10217 return 0;
10220 unsigned int
10221 variable_tracking_main (void)
10223 unsigned int ret;
10224 int save = flag_var_tracking_assignments;
10226 ret = variable_tracking_main_1 ();
10228 flag_var_tracking_assignments = save;
10230 return ret;
10233 static bool
10234 gate_handle_var_tracking (void)
10236 return (flag_var_tracking && !targetm.delay_vartrack);
10241 namespace {
10243 const pass_data pass_data_variable_tracking =
10245 RTL_PASS, /* type */
10246 "vartrack", /* name */
10247 OPTGROUP_NONE, /* optinfo_flags */
10248 true, /* has_gate */
10249 true, /* has_execute */
10250 TV_VAR_TRACKING, /* tv_id */
10251 0, /* properties_required */
10252 0, /* properties_provided */
10253 0, /* properties_destroyed */
10254 0, /* todo_flags_start */
10255 ( TODO_verify_rtl_sharing | TODO_verify_flow ), /* todo_flags_finish */
10258 class pass_variable_tracking : public rtl_opt_pass
10260 public:
10261 pass_variable_tracking (gcc::context *ctxt)
10262 : rtl_opt_pass (pass_data_variable_tracking, ctxt)
10265 /* opt_pass methods: */
10266 bool gate () { return gate_handle_var_tracking (); }
10267 unsigned int execute () { return variable_tracking_main (); }
10269 }; // class pass_variable_tracking
10271 } // anon namespace
10273 rtl_opt_pass *
10274 make_pass_variable_tracking (gcc::context *ctxt)
10276 return new pass_variable_tracking (ctxt);