1 /* Variable tracking routines for the GNU compiler.
2 Copyright (C) 2002-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the variable tracking pass. It computes where
21 variables are located (which registers or where in memory) at each position
22 in instruction stream and emits notes describing the locations.
23 Debug information (DWARF2 location lists) is finally generated from
25 With this debug information, it is possible to show variables
26 even when debugging optimized code.
28 How does the variable tracking pass work?
30 First, it scans RTL code for uses, stores and clobbers (register/memory
31 references in instructions), for call insns and for stack adjustments
32 separately for each basic block and saves them to an array of micro
34 The micro operations of one instruction are ordered so that
35 pre-modifying stack adjustment < use < use with no var < call insn <
36 < clobber < set < post-modifying stack adjustment
38 Then, a forward dataflow analysis is performed to find out how locations
39 of variables change through code and to propagate the variable locations
40 along control flow graph.
41 The IN set for basic block BB is computed as a union of OUT sets of BB's
42 predecessors, the OUT set for BB is copied from the IN set for BB and
43 is changed according to micro operations in BB.
45 The IN and OUT sets for basic blocks consist of a current stack adjustment
46 (used for adjusting offset of variables addressed using stack pointer),
47 the table of structures describing the locations of parts of a variable
48 and for each physical register a linked list for each physical register.
49 The linked list is a list of variable parts stored in the register,
50 i.e. it is a list of triplets (reg, decl, offset) where decl is
51 REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
52 effective deleting appropriate variable parts when we set or clobber the
55 There may be more than one variable part in a register. The linked lists
56 should be pretty short so it is a good data structure here.
57 For example in the following code, register allocator may assign same
58 register to variables A and B, and both of them are stored in the same
71 Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
72 are emitted to appropriate positions in RTL code. Each such a note describes
73 the location of one variable at the point in instruction stream where the
74 note is. There is no need to emit a note for each variable before each
75 instruction, we only emit these notes where the location of variable changes
76 (this means that we also emit notes for changes between the OUT set of the
77 previous block and the IN set of the current block).
79 The notes consist of two parts:
80 1. the declaration (from REG_EXPR or MEM_EXPR)
81 2. the location of a variable - it is either a simple register/memory
82 reference (for simple variables, for example int),
83 or a parallel of register/memory references (for a large variables
84 which consist of several parts, for example long long).
90 #include "coretypes.h"
96 #include "double-int.h"
100 #include "wide-int.h"
104 #include "stor-layout.h"
105 #include "hash-map.h"
106 #include "hash-table.h"
108 #include "hard-reg-set.h"
109 #include "function.h"
110 #include "dominance.h"
114 #include "basic-block.h"
117 #include "insn-config.h"
120 #include "alloc-pool.h"
123 #include "statistics.h"
125 #include "fixed-value.h"
130 #include "emit-rtl.h"
133 #include "tree-pass.h"
135 #include "tree-dfa.h"
136 #include "tree-ssa.h"
140 #include "diagnostic.h"
141 #include "tree-pretty-print.h"
143 #include "rtl-iter.h"
144 #include "fibonacci_heap.h"
146 typedef fibonacci_heap
<long, basic_block_def
> bb_heap_t
;
147 typedef fibonacci_node
<long, basic_block_def
> bb_heap_node_t
;
149 /* var-tracking.c assumes that tree code with the same value as VALUE rtx code
150 has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
151 Currently the value is the same as IDENTIFIER_NODE, which has such
152 a property. If this compile time assertion ever fails, make sure that
153 the new tree code that equals (int) VALUE has the same property. */
154 extern char check_value_val
[(int) VALUE
== (int) IDENTIFIER_NODE
? 1 : -1];
156 /* Type of micro operation. */
157 enum micro_operation_type
159 MO_USE
, /* Use location (REG or MEM). */
160 MO_USE_NO_VAR
,/* Use location which is not associated with a variable
161 or the variable is not trackable. */
162 MO_VAL_USE
, /* Use location which is associated with a value. */
163 MO_VAL_LOC
, /* Use location which appears in a debug insn. */
164 MO_VAL_SET
, /* Set location associated with a value. */
165 MO_SET
, /* Set location. */
166 MO_COPY
, /* Copy the same portion of a variable from one
167 location to another. */
168 MO_CLOBBER
, /* Clobber location. */
169 MO_CALL
, /* Call insn. */
170 MO_ADJUST
/* Adjust stack pointer. */
174 static const char * const ATTRIBUTE_UNUSED
175 micro_operation_type_name
[] = {
188 /* Where shall the note be emitted? BEFORE or AFTER the instruction.
189 Notes emitted as AFTER_CALL are to take effect during the call,
190 rather than after the call. */
193 EMIT_NOTE_BEFORE_INSN
,
194 EMIT_NOTE_AFTER_INSN
,
195 EMIT_NOTE_AFTER_CALL_INSN
198 /* Structure holding information about micro operation. */
199 typedef struct micro_operation_def
201 /* Type of micro operation. */
202 enum micro_operation_type type
;
204 /* The instruction which the micro operation is in, for MO_USE,
205 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
206 instruction or note in the original flow (before any var-tracking
207 notes are inserted, to simplify emission of notes), for MO_SET
212 /* Location. For MO_SET and MO_COPY, this is the SET that
213 performs the assignment, if known, otherwise it is the target
214 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
215 CONCAT of the VALUE and the LOC associated with it. For
216 MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
217 associated with it. */
220 /* Stack adjustment. */
221 HOST_WIDE_INT adjust
;
226 /* A declaration of a variable, or an RTL value being handled like a
228 typedef void *decl_or_value
;
230 /* Return true if a decl_or_value DV is a DECL or NULL. */
232 dv_is_decl_p (decl_or_value dv
)
234 return !dv
|| (int) TREE_CODE ((tree
) dv
) != (int) VALUE
;
237 /* Return true if a decl_or_value is a VALUE rtl. */
239 dv_is_value_p (decl_or_value dv
)
241 return dv
&& !dv_is_decl_p (dv
);
244 /* Return the decl in the decl_or_value. */
246 dv_as_decl (decl_or_value dv
)
248 gcc_checking_assert (dv_is_decl_p (dv
));
252 /* Return the value in the decl_or_value. */
254 dv_as_value (decl_or_value dv
)
256 gcc_checking_assert (dv_is_value_p (dv
));
260 /* Return the opaque pointer in the decl_or_value. */
262 dv_as_opaque (decl_or_value dv
)
268 /* Description of location of a part of a variable. The content of a physical
269 register is described by a chain of these structures.
270 The chains are pretty short (usually 1 or 2 elements) and thus
271 chain is the best data structure. */
272 typedef struct attrs_def
274 /* Pointer to next member of the list. */
275 struct attrs_def
*next
;
277 /* The rtx of register. */
280 /* The declaration corresponding to LOC. */
283 /* Offset from start of DECL. */
284 HOST_WIDE_INT offset
;
286 /* Pool allocation new operator. */
287 inline void *operator new (size_t)
289 return pool
.allocate ();
292 /* Delete operator utilizing pool allocation. */
293 inline void operator delete (void *ptr
)
295 pool
.remove ((attrs_def
*) ptr
);
298 /* Memory allocation pool. */
299 static pool_allocator
<attrs_def
> pool
;
302 /* Structure for chaining the locations. */
303 typedef struct location_chain_def
305 /* Next element in the chain. */
306 struct location_chain_def
*next
;
308 /* The location (REG, MEM or VALUE). */
311 /* The "value" stored in this location. */
315 enum var_init_status init
;
317 /* Pool allocation new operator. */
318 inline void *operator new (size_t)
320 return pool
.allocate ();
323 /* Delete operator utilizing pool allocation. */
324 inline void operator delete (void *ptr
)
326 pool
.remove ((location_chain_def
*) ptr
);
329 /* Memory allocation pool. */
330 static pool_allocator
<location_chain_def
> pool
;
333 /* A vector of loc_exp_dep holds the active dependencies of a one-part
334 DV on VALUEs, i.e., the VALUEs expanded so as to form the current
335 location of DV. Each entry is also part of VALUE' s linked-list of
336 backlinks back to DV. */
337 typedef struct loc_exp_dep_s
339 /* The dependent DV. */
341 /* The dependency VALUE or DECL_DEBUG. */
343 /* The next entry in VALUE's backlinks list. */
344 struct loc_exp_dep_s
*next
;
345 /* A pointer to the pointer to this entry (head or prev's next) in
346 the doubly-linked list. */
347 struct loc_exp_dep_s
**pprev
;
349 /* Pool allocation new operator. */
350 inline void *operator new (size_t)
352 return pool
.allocate ();
355 /* Delete operator utilizing pool allocation. */
356 inline void operator delete (void *ptr
)
358 pool
.remove ((loc_exp_dep_s
*) ptr
);
361 /* Memory allocation pool. */
362 static pool_allocator
<loc_exp_dep_s
> pool
;
366 /* This data structure holds information about the depth of a variable
368 typedef struct expand_depth_struct
370 /* This measures the complexity of the expanded expression. It
371 grows by one for each level of expansion that adds more than one
374 /* This counts the number of ENTRY_VALUE expressions in an
375 expansion. We want to minimize their use. */
379 /* This data structure is allocated for one-part variables at the time
380 of emitting notes. */
383 /* Doubly-linked list of dependent DVs. These are DVs whose cur_loc
384 computation used the expansion of this variable, and that ought
385 to be notified should this variable change. If the DV's cur_loc
386 expanded to NULL, all components of the loc list are regarded as
387 active, so that any changes in them give us a chance to get a
388 location. Otherwise, only components of the loc that expanded to
389 non-NULL are regarded as active dependencies. */
390 loc_exp_dep
*backlinks
;
391 /* This holds the LOC that was expanded into cur_loc. We need only
392 mark a one-part variable as changed if the FROM loc is removed,
393 or if it has no known location and a loc is added, or if it gets
394 a change notification from any of its active dependencies. */
396 /* The depth of the cur_loc expression. */
398 /* Dependencies actively used when expand FROM into cur_loc. */
399 vec
<loc_exp_dep
, va_heap
, vl_embed
> deps
;
402 /* Structure describing one part of variable. */
403 typedef struct variable_part_def
405 /* Chain of locations of the part. */
406 location_chain loc_chain
;
408 /* Location which was last emitted to location list. */
413 /* The offset in the variable, if !var->onepart. */
414 HOST_WIDE_INT offset
;
416 /* Pointer to auxiliary data, if var->onepart and emit_notes. */
417 struct onepart_aux
*onepaux
;
421 /* Maximum number of location parts. */
422 #define MAX_VAR_PARTS 16
424 /* Enumeration type used to discriminate various types of one-part
426 typedef enum onepart_enum
428 /* Not a one-part variable. */
430 /* A one-part DECL that is not a DEBUG_EXPR_DECL. */
432 /* A DEBUG_EXPR_DECL. */
438 /* Structure describing where the variable is located. */
439 typedef struct variable_def
441 /* The declaration of the variable, or an RTL value being handled
442 like a declaration. */
445 /* Reference count. */
448 /* Number of variable parts. */
451 /* What type of DV this is, according to enum onepart_enum. */
452 ENUM_BITFIELD (onepart_enum
) onepart
: CHAR_BIT
;
454 /* True if this variable_def struct is currently in the
455 changed_variables hash table. */
456 bool in_changed_variables
;
458 /* The variable parts. */
459 variable_part var_part
[1];
461 typedef const struct variable_def
*const_variable
;
463 /* Pointer to the BB's information specific to variable tracking pass. */
464 #define VTI(BB) ((variable_tracking_info) (BB)->aux)
466 /* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */
467 #define INT_MEM_OFFSET(mem) (MEM_OFFSET_KNOWN_P (mem) ? MEM_OFFSET (mem) : 0)
469 #if ENABLE_CHECKING && (GCC_VERSION >= 2007)
471 /* Access VAR's Ith part's offset, checking that it's not a one-part
473 #define VAR_PART_OFFSET(var, i) __extension__ \
474 (*({ variable const __v = (var); \
475 gcc_checking_assert (!__v->onepart); \
476 &__v->var_part[(i)].aux.offset; }))
478 /* Access VAR's one-part auxiliary data, checking that it is a
479 one-part variable. */
480 #define VAR_LOC_1PAUX(var) __extension__ \
481 (*({ variable const __v = (var); \
482 gcc_checking_assert (__v->onepart); \
483 &__v->var_part[0].aux.onepaux; }))
486 #define VAR_PART_OFFSET(var, i) ((var)->var_part[(i)].aux.offset)
487 #define VAR_LOC_1PAUX(var) ((var)->var_part[0].aux.onepaux)
490 /* These are accessor macros for the one-part auxiliary data. When
491 convenient for users, they're guarded by tests that the data was
493 #define VAR_LOC_DEP_LST(var) (VAR_LOC_1PAUX (var) \
494 ? VAR_LOC_1PAUX (var)->backlinks \
496 #define VAR_LOC_DEP_LSTP(var) (VAR_LOC_1PAUX (var) \
497 ? &VAR_LOC_1PAUX (var)->backlinks \
499 #define VAR_LOC_FROM(var) (VAR_LOC_1PAUX (var)->from)
500 #define VAR_LOC_DEPTH(var) (VAR_LOC_1PAUX (var)->depth)
501 #define VAR_LOC_DEP_VEC(var) (VAR_LOC_1PAUX (var) \
502 ? &VAR_LOC_1PAUX (var)->deps \
507 typedef unsigned int dvuid
;
509 /* Return the uid of DV. */
512 dv_uid (decl_or_value dv
)
514 if (dv_is_value_p (dv
))
515 return CSELIB_VAL_PTR (dv_as_value (dv
))->uid
;
517 return DECL_UID (dv_as_decl (dv
));
520 /* Compute the hash from the uid. */
522 static inline hashval_t
523 dv_uid2hash (dvuid uid
)
528 /* The hash function for a mask table in a shared_htab chain. */
530 static inline hashval_t
531 dv_htab_hash (decl_or_value dv
)
533 return dv_uid2hash (dv_uid (dv
));
536 static void variable_htab_free (void *);
538 /* Variable hashtable helpers. */
540 struct variable_hasher
542 typedef variable_def
*value_type
;
543 typedef void *compare_type
;
544 static inline hashval_t
hash (const variable_def
*);
545 static inline bool equal (const variable_def
*, const void *);
546 static inline void remove (variable_def
*);
549 /* The hash function for variable_htab, computes the hash value
550 from the declaration of variable X. */
553 variable_hasher::hash (const variable_def
*v
)
555 return dv_htab_hash (v
->dv
);
558 /* Compare the declaration of variable X with declaration Y. */
561 variable_hasher::equal (const variable_def
*v
, const void *y
)
563 decl_or_value dv
= CONST_CAST2 (decl_or_value
, const void *, y
);
565 return (dv_as_opaque (v
->dv
) == dv_as_opaque (dv
));
568 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
571 variable_hasher::remove (variable_def
*var
)
573 variable_htab_free (var
);
576 typedef hash_table
<variable_hasher
> variable_table_type
;
577 typedef variable_table_type::iterator variable_iterator_type
;
579 /* Structure for passing some other parameters to function
580 emit_note_insn_var_location. */
581 typedef struct emit_note_data_def
583 /* The instruction which the note will be emitted before/after. */
586 /* Where the note will be emitted (before/after insn)? */
587 enum emit_note_where where
;
589 /* The variables and values active at this point. */
590 variable_table_type
*vars
;
593 /* Structure holding a refcounted hash table. If refcount > 1,
594 it must be first unshared before modified. */
595 typedef struct shared_hash_def
597 /* Reference count. */
600 /* Actual hash table. */
601 variable_table_type
*htab
;
603 /* Pool allocation new operator. */
604 inline void *operator new (size_t)
606 return pool
.allocate ();
609 /* Delete operator utilizing pool allocation. */
610 inline void operator delete (void *ptr
)
612 pool
.remove ((shared_hash_def
*) ptr
);
615 /* Memory allocation pool. */
616 static pool_allocator
<shared_hash_def
> pool
;
619 /* Structure holding the IN or OUT set for a basic block. */
620 typedef struct dataflow_set_def
622 /* Adjustment of stack offset. */
623 HOST_WIDE_INT stack_adjust
;
625 /* Attributes for registers (lists of attrs). */
626 attrs regs
[FIRST_PSEUDO_REGISTER
];
628 /* Variable locations. */
631 /* Vars that is being traversed. */
632 shared_hash traversed_vars
;
635 /* The structure (one for each basic block) containing the information
636 needed for variable tracking. */
637 typedef struct variable_tracking_info_def
639 /* The vector of micro operations. */
640 vec
<micro_operation
> mos
;
642 /* The IN and OUT set for dataflow analysis. */
646 /* The permanent-in dataflow set for this block. This is used to
647 hold values for which we had to compute entry values. ??? This
648 should probably be dynamically allocated, to avoid using more
649 memory in non-debug builds. */
652 /* Has the block been visited in DFS? */
655 /* Has the block been flooded in VTA? */
658 } *variable_tracking_info
;
660 /* Alloc pool for struct attrs_def. */
661 pool_allocator
<attrs_def
> attrs_def::pool ("attrs_def pool", 1024);
663 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
665 static pool_allocator
<variable_def
> var_pool
666 ("variable_def pool", 64,
667 (MAX_VAR_PARTS
- 1) * sizeof (((variable
)NULL
)->var_part
[0]));
669 /* Alloc pool for struct variable_def with a single var_part entry. */
670 static pool_allocator
<variable_def
> valvar_pool
671 ("small variable_def pool", 256);
673 /* Alloc pool for struct location_chain_def. */
674 pool_allocator
<location_chain_def
> location_chain_def::pool
675 ("location_chain_def pool", 1024);
677 /* Alloc pool for struct shared_hash_def. */
678 pool_allocator
<shared_hash_def
> shared_hash_def::pool
679 ("shared_hash_def pool", 256);
681 /* Alloc pool for struct loc_exp_dep_s for NOT_ONEPART variables. */
682 pool_allocator
<loc_exp_dep
> loc_exp_dep::pool ("loc_exp_dep pool", 64);
684 /* Changed variables, notes will be emitted for them. */
685 static variable_table_type
*changed_variables
;
687 /* Shall notes be emitted? */
688 static bool emit_notes
;
690 /* Values whose dynamic location lists have gone empty, but whose
691 cselib location lists are still usable. Use this to hold the
692 current location, the backlinks, etc, during emit_notes. */
693 static variable_table_type
*dropped_values
;
695 /* Empty shared hashtable. */
696 static shared_hash empty_shared_hash
;
698 /* Scratch register bitmap used by cselib_expand_value_rtx. */
699 static bitmap scratch_regs
= NULL
;
701 #ifdef HAVE_window_save
702 typedef struct GTY(()) parm_reg
{
708 /* Vector of windowed parameter registers, if any. */
709 static vec
<parm_reg_t
, va_gc
> *windowed_parm_regs
= NULL
;
712 /* Variable used to tell whether cselib_process_insn called our hook. */
713 static bool cselib_hook_called
;
715 /* Local function prototypes. */
716 static void stack_adjust_offset_pre_post (rtx
, HOST_WIDE_INT
*,
718 static void insn_stack_adjust_offset_pre_post (rtx_insn
*, HOST_WIDE_INT
*,
720 static bool vt_stack_adjustments (void);
722 static void init_attrs_list_set (attrs
*);
723 static void attrs_list_clear (attrs
*);
724 static attrs
attrs_list_member (attrs
, decl_or_value
, HOST_WIDE_INT
);
725 static void attrs_list_insert (attrs
*, decl_or_value
, HOST_WIDE_INT
, rtx
);
726 static void attrs_list_copy (attrs
*, attrs
);
727 static void attrs_list_union (attrs
*, attrs
);
729 static variable_def
**unshare_variable (dataflow_set
*set
, variable_def
**slot
,
730 variable var
, enum var_init_status
);
731 static void vars_copy (variable_table_type
*, variable_table_type
*);
732 static tree
var_debug_decl (tree
);
733 static void var_reg_set (dataflow_set
*, rtx
, enum var_init_status
, rtx
);
734 static void var_reg_delete_and_set (dataflow_set
*, rtx
, bool,
735 enum var_init_status
, rtx
);
736 static void var_reg_delete (dataflow_set
*, rtx
, bool);
737 static void var_regno_delete (dataflow_set
*, int);
738 static void var_mem_set (dataflow_set
*, rtx
, enum var_init_status
, rtx
);
739 static void var_mem_delete_and_set (dataflow_set
*, rtx
, bool,
740 enum var_init_status
, rtx
);
741 static void var_mem_delete (dataflow_set
*, rtx
, bool);
743 static void dataflow_set_init (dataflow_set
*);
744 static void dataflow_set_clear (dataflow_set
*);
745 static void dataflow_set_copy (dataflow_set
*, dataflow_set
*);
746 static int variable_union_info_cmp_pos (const void *, const void *);
747 static void dataflow_set_union (dataflow_set
*, dataflow_set
*);
748 static location_chain
find_loc_in_1pdv (rtx
, variable
, variable_table_type
*);
749 static bool canon_value_cmp (rtx
, rtx
);
750 static int loc_cmp (rtx
, rtx
);
751 static bool variable_part_different_p (variable_part
*, variable_part
*);
752 static bool onepart_variable_different_p (variable
, variable
);
753 static bool variable_different_p (variable
, variable
);
754 static bool dataflow_set_different (dataflow_set
*, dataflow_set
*);
755 static void dataflow_set_destroy (dataflow_set
*);
757 static bool contains_symbol_ref (rtx
);
758 static bool track_expr_p (tree
, bool);
759 static bool same_variable_part_p (rtx
, tree
, HOST_WIDE_INT
);
760 static void add_uses_1 (rtx
*, void *);
761 static void add_stores (rtx
, const_rtx
, void *);
762 static bool compute_bb_dataflow (basic_block
);
763 static bool vt_find_locations (void);
765 static void dump_attrs_list (attrs
);
766 static void dump_var (variable
);
767 static void dump_vars (variable_table_type
*);
768 static void dump_dataflow_set (dataflow_set
*);
769 static void dump_dataflow_sets (void);
771 static void set_dv_changed (decl_or_value
, bool);
772 static void variable_was_changed (variable
, dataflow_set
*);
773 static variable_def
**set_slot_part (dataflow_set
*, rtx
, variable_def
**,
774 decl_or_value
, HOST_WIDE_INT
,
775 enum var_init_status
, rtx
);
776 static void set_variable_part (dataflow_set
*, rtx
,
777 decl_or_value
, HOST_WIDE_INT
,
778 enum var_init_status
, rtx
, enum insert_option
);
779 static variable_def
**clobber_slot_part (dataflow_set
*, rtx
,
780 variable_def
**, HOST_WIDE_INT
, rtx
);
781 static void clobber_variable_part (dataflow_set
*, rtx
,
782 decl_or_value
, HOST_WIDE_INT
, rtx
);
783 static variable_def
**delete_slot_part (dataflow_set
*, rtx
, variable_def
**,
785 static void delete_variable_part (dataflow_set
*, rtx
,
786 decl_or_value
, HOST_WIDE_INT
);
787 static void emit_notes_in_bb (basic_block
, dataflow_set
*);
788 static void vt_emit_notes (void);
790 static bool vt_get_decl_and_offset (rtx
, tree
*, HOST_WIDE_INT
*);
791 static void vt_add_function_parameters (void);
792 static bool vt_initialize (void);
793 static void vt_finalize (void);
795 /* Callback for stack_adjust_offset_pre_post, called via for_each_inc_dec. */
798 stack_adjust_offset_pre_post_cb (rtx
, rtx op
, rtx dest
, rtx src
, rtx srcoff
,
801 if (dest
!= stack_pointer_rtx
)
804 switch (GET_CODE (op
))
808 ((HOST_WIDE_INT
*)arg
)[0] -= INTVAL (srcoff
);
812 ((HOST_WIDE_INT
*)arg
)[1] -= INTVAL (srcoff
);
816 /* We handle only adjustments by constant amount. */
817 gcc_assert (GET_CODE (src
) == PLUS
818 && CONST_INT_P (XEXP (src
, 1))
819 && XEXP (src
, 0) == stack_pointer_rtx
);
820 ((HOST_WIDE_INT
*)arg
)[GET_CODE (op
) == POST_MODIFY
]
821 -= INTVAL (XEXP (src
, 1));
828 /* Given a SET, calculate the amount of stack adjustment it contains
829 PRE- and POST-modifying stack pointer.
830 This function is similar to stack_adjust_offset. */
833 stack_adjust_offset_pre_post (rtx pattern
, HOST_WIDE_INT
*pre
,
836 rtx src
= SET_SRC (pattern
);
837 rtx dest
= SET_DEST (pattern
);
840 if (dest
== stack_pointer_rtx
)
842 /* (set (reg sp) (plus (reg sp) (const_int))) */
843 code
= GET_CODE (src
);
844 if (! (code
== PLUS
|| code
== MINUS
)
845 || XEXP (src
, 0) != stack_pointer_rtx
846 || !CONST_INT_P (XEXP (src
, 1)))
850 *post
+= INTVAL (XEXP (src
, 1));
852 *post
-= INTVAL (XEXP (src
, 1));
855 HOST_WIDE_INT res
[2] = { 0, 0 };
856 for_each_inc_dec (pattern
, stack_adjust_offset_pre_post_cb
, res
);
861 /* Given an INSN, calculate the amount of stack adjustment it contains
862 PRE- and POST-modifying stack pointer. */
865 insn_stack_adjust_offset_pre_post (rtx_insn
*insn
, HOST_WIDE_INT
*pre
,
873 pattern
= PATTERN (insn
);
874 if (RTX_FRAME_RELATED_P (insn
))
876 rtx expr
= find_reg_note (insn
, REG_FRAME_RELATED_EXPR
, NULL_RTX
);
878 pattern
= XEXP (expr
, 0);
881 if (GET_CODE (pattern
) == SET
)
882 stack_adjust_offset_pre_post (pattern
, pre
, post
);
883 else if (GET_CODE (pattern
) == PARALLEL
884 || GET_CODE (pattern
) == SEQUENCE
)
888 /* There may be stack adjustments inside compound insns. Search
890 for ( i
= XVECLEN (pattern
, 0) - 1; i
>= 0; i
--)
891 if (GET_CODE (XVECEXP (pattern
, 0, i
)) == SET
)
892 stack_adjust_offset_pre_post (XVECEXP (pattern
, 0, i
), pre
, post
);
896 /* Compute stack adjustments for all blocks by traversing DFS tree.
897 Return true when the adjustments on all incoming edges are consistent.
898 Heavily borrowed from pre_and_rev_post_order_compute. */
901 vt_stack_adjustments (void)
903 edge_iterator
*stack
;
906 /* Initialize entry block. */
907 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun
))->visited
= true;
908 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun
))->in
.stack_adjust
909 = INCOMING_FRAME_SP_OFFSET
;
910 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun
))->out
.stack_adjust
911 = INCOMING_FRAME_SP_OFFSET
;
913 /* Allocate stack for back-tracking up CFG. */
914 stack
= XNEWVEC (edge_iterator
, n_basic_blocks_for_fn (cfun
) + 1);
917 /* Push the first edge on to the stack. */
918 stack
[sp
++] = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->succs
);
926 /* Look at the edge on the top of the stack. */
928 src
= ei_edge (ei
)->src
;
929 dest
= ei_edge (ei
)->dest
;
931 /* Check if the edge destination has been visited yet. */
932 if (!VTI (dest
)->visited
)
935 HOST_WIDE_INT pre
, post
, offset
;
936 VTI (dest
)->visited
= true;
937 VTI (dest
)->in
.stack_adjust
= offset
= VTI (src
)->out
.stack_adjust
;
939 if (dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
940 for (insn
= BB_HEAD (dest
);
941 insn
!= NEXT_INSN (BB_END (dest
));
942 insn
= NEXT_INSN (insn
))
945 insn_stack_adjust_offset_pre_post (insn
, &pre
, &post
);
946 offset
+= pre
+ post
;
949 VTI (dest
)->out
.stack_adjust
= offset
;
951 if (EDGE_COUNT (dest
->succs
) > 0)
952 /* Since the DEST node has been visited for the first
953 time, check its successors. */
954 stack
[sp
++] = ei_start (dest
->succs
);
958 /* We can end up with different stack adjustments for the exit block
959 of a shrink-wrapped function if stack_adjust_offset_pre_post
960 doesn't understand the rtx pattern used to restore the stack
961 pointer in the epilogue. For example, on s390(x), the stack
962 pointer is often restored via a load-multiple instruction
963 and so no stack_adjust offset is recorded for it. This means
964 that the stack offset at the end of the epilogue block is the
965 the same as the offset before the epilogue, whereas other paths
966 to the exit block will have the correct stack_adjust.
968 It is safe to ignore these differences because (a) we never
969 use the stack_adjust for the exit block in this pass and
970 (b) dwarf2cfi checks whether the CFA notes in a shrink-wrapped
971 function are correct.
973 We must check whether the adjustments on other edges are
975 if (dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
)
976 && VTI (dest
)->in
.stack_adjust
!= VTI (src
)->out
.stack_adjust
)
982 if (! ei_one_before_end_p (ei
))
983 /* Go to the next edge. */
984 ei_next (&stack
[sp
- 1]);
986 /* Return to previous level if there are no more edges. */
995 /* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or
996 hard_frame_pointer_rtx is being mapped to it and offset for it. */
997 static rtx cfa_base_rtx
;
998 static HOST_WIDE_INT cfa_base_offset
;
1000 /* Compute a CFA-based value for an ADJUSTMENT made to stack_pointer_rtx
1001 or hard_frame_pointer_rtx. */
1004 compute_cfa_pointer (HOST_WIDE_INT adjustment
)
1006 return plus_constant (Pmode
, cfa_base_rtx
, adjustment
+ cfa_base_offset
);
1009 /* Adjustment for hard_frame_pointer_rtx to cfa base reg,
1010 or -1 if the replacement shouldn't be done. */
1011 static HOST_WIDE_INT hard_frame_pointer_adjustment
= -1;
1013 /* Data for adjust_mems callback. */
1015 struct adjust_mem_data
1018 machine_mode mem_mode
;
1019 HOST_WIDE_INT stack_adjust
;
1020 rtx_expr_list
*side_effects
;
1023 /* Helper for adjust_mems. Return true if X is suitable for
1024 transformation of wider mode arithmetics to narrower mode. */
1027 use_narrower_mode_test (rtx x
, const_rtx subreg
)
1029 subrtx_var_iterator::array_type array
;
1030 FOR_EACH_SUBRTX_VAR (iter
, array
, x
, NONCONST
)
1034 iter
.skip_subrtxes ();
1036 switch (GET_CODE (x
))
1039 if (cselib_lookup (x
, GET_MODE (SUBREG_REG (subreg
)), 0, VOIDmode
))
1041 if (!validate_subreg (GET_MODE (subreg
), GET_MODE (x
), x
,
1042 subreg_lowpart_offset (GET_MODE (subreg
),
1051 iter
.substitute (XEXP (x
, 0));
1060 /* Transform X into narrower mode MODE from wider mode WMODE. */
1063 use_narrower_mode (rtx x
, machine_mode mode
, machine_mode wmode
)
1067 return lowpart_subreg (mode
, x
, wmode
);
1068 switch (GET_CODE (x
))
1071 return lowpart_subreg (mode
, x
, wmode
);
1075 op0
= use_narrower_mode (XEXP (x
, 0), mode
, wmode
);
1076 op1
= use_narrower_mode (XEXP (x
, 1), mode
, wmode
);
1077 return simplify_gen_binary (GET_CODE (x
), mode
, op0
, op1
);
1079 op0
= use_narrower_mode (XEXP (x
, 0), mode
, wmode
);
1081 /* Ensure shift amount is not wider than mode. */
1082 if (GET_MODE (op1
) == VOIDmode
)
1083 op1
= lowpart_subreg (mode
, op1
, wmode
);
1084 else if (GET_MODE_PRECISION (mode
) < GET_MODE_PRECISION (GET_MODE (op1
)))
1085 op1
= lowpart_subreg (mode
, op1
, GET_MODE (op1
));
1086 return simplify_gen_binary (ASHIFT
, mode
, op0
, op1
);
1092 /* Helper function for adjusting used MEMs. */
1095 adjust_mems (rtx loc
, const_rtx old_rtx
, void *data
)
1097 struct adjust_mem_data
*amd
= (struct adjust_mem_data
*) data
;
1098 rtx mem
, addr
= loc
, tem
;
1099 machine_mode mem_mode_save
;
1101 switch (GET_CODE (loc
))
1104 /* Don't do any sp or fp replacements outside of MEM addresses
1106 if (amd
->mem_mode
== VOIDmode
&& amd
->store
)
1108 if (loc
== stack_pointer_rtx
1109 && !frame_pointer_needed
1111 return compute_cfa_pointer (amd
->stack_adjust
);
1112 else if (loc
== hard_frame_pointer_rtx
1113 && frame_pointer_needed
1114 && hard_frame_pointer_adjustment
!= -1
1116 return compute_cfa_pointer (hard_frame_pointer_adjustment
);
1117 gcc_checking_assert (loc
!= virtual_incoming_args_rtx
);
1123 mem
= targetm
.delegitimize_address (mem
);
1124 if (mem
!= loc
&& !MEM_P (mem
))
1125 return simplify_replace_fn_rtx (mem
, old_rtx
, adjust_mems
, data
);
1128 addr
= XEXP (mem
, 0);
1129 mem_mode_save
= amd
->mem_mode
;
1130 amd
->mem_mode
= GET_MODE (mem
);
1131 store_save
= amd
->store
;
1133 addr
= simplify_replace_fn_rtx (addr
, old_rtx
, adjust_mems
, data
);
1134 amd
->store
= store_save
;
1135 amd
->mem_mode
= mem_mode_save
;
1137 addr
= targetm
.delegitimize_address (addr
);
1138 if (addr
!= XEXP (mem
, 0))
1139 mem
= replace_equiv_address_nv (mem
, addr
);
1141 mem
= avoid_constant_pool_reference (mem
);
1145 addr
= gen_rtx_PLUS (GET_MODE (loc
), XEXP (loc
, 0),
1146 gen_int_mode (GET_CODE (loc
) == PRE_INC
1147 ? GET_MODE_SIZE (amd
->mem_mode
)
1148 : -GET_MODE_SIZE (amd
->mem_mode
),
1153 addr
= XEXP (loc
, 0);
1154 gcc_assert (amd
->mem_mode
!= VOIDmode
&& amd
->mem_mode
!= BLKmode
);
1155 addr
= simplify_replace_fn_rtx (addr
, old_rtx
, adjust_mems
, data
);
1156 tem
= gen_rtx_PLUS (GET_MODE (loc
), XEXP (loc
, 0),
1157 gen_int_mode ((GET_CODE (loc
) == PRE_INC
1158 || GET_CODE (loc
) == POST_INC
)
1159 ? GET_MODE_SIZE (amd
->mem_mode
)
1160 : -GET_MODE_SIZE (amd
->mem_mode
),
1162 store_save
= amd
->store
;
1164 tem
= simplify_replace_fn_rtx (tem
, old_rtx
, adjust_mems
, data
);
1165 amd
->store
= store_save
;
1166 amd
->side_effects
= alloc_EXPR_LIST (0,
1167 gen_rtx_SET (XEXP (loc
, 0), tem
),
1171 addr
= XEXP (loc
, 1);
1174 addr
= XEXP (loc
, 0);
1175 gcc_assert (amd
->mem_mode
!= VOIDmode
);
1176 addr
= simplify_replace_fn_rtx (addr
, old_rtx
, adjust_mems
, data
);
1177 store_save
= amd
->store
;
1179 tem
= simplify_replace_fn_rtx (XEXP (loc
, 1), old_rtx
,
1181 amd
->store
= store_save
;
1182 amd
->side_effects
= alloc_EXPR_LIST (0,
1183 gen_rtx_SET (XEXP (loc
, 0), tem
),
1187 /* First try without delegitimization of whole MEMs and
1188 avoid_constant_pool_reference, which is more likely to succeed. */
1189 store_save
= amd
->store
;
1191 addr
= simplify_replace_fn_rtx (SUBREG_REG (loc
), old_rtx
, adjust_mems
,
1193 amd
->store
= store_save
;
1194 mem
= simplify_replace_fn_rtx (addr
, old_rtx
, adjust_mems
, data
);
1195 if (mem
== SUBREG_REG (loc
))
1200 tem
= simplify_gen_subreg (GET_MODE (loc
), mem
,
1201 GET_MODE (SUBREG_REG (loc
)),
1205 tem
= simplify_gen_subreg (GET_MODE (loc
), addr
,
1206 GET_MODE (SUBREG_REG (loc
)),
1208 if (tem
== NULL_RTX
)
1209 tem
= gen_rtx_raw_SUBREG (GET_MODE (loc
), addr
, SUBREG_BYTE (loc
));
1211 if (MAY_HAVE_DEBUG_INSNS
1212 && GET_CODE (tem
) == SUBREG
1213 && (GET_CODE (SUBREG_REG (tem
)) == PLUS
1214 || GET_CODE (SUBREG_REG (tem
)) == MINUS
1215 || GET_CODE (SUBREG_REG (tem
)) == MULT
1216 || GET_CODE (SUBREG_REG (tem
)) == ASHIFT
)
1217 && (GET_MODE_CLASS (GET_MODE (tem
)) == MODE_INT
1218 || GET_MODE_CLASS (GET_MODE (tem
)) == MODE_PARTIAL_INT
)
1219 && (GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem
))) == MODE_INT
1220 || GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem
))) == MODE_PARTIAL_INT
)
1221 && GET_MODE_PRECISION (GET_MODE (tem
))
1222 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (tem
)))
1223 && subreg_lowpart_p (tem
)
1224 && use_narrower_mode_test (SUBREG_REG (tem
), tem
))
1225 return use_narrower_mode (SUBREG_REG (tem
), GET_MODE (tem
),
1226 GET_MODE (SUBREG_REG (tem
)));
1229 /* Don't do any replacements in second and following
1230 ASM_OPERANDS of inline-asm with multiple sets.
1231 ASM_OPERANDS_INPUT_VEC, ASM_OPERANDS_INPUT_CONSTRAINT_VEC
1232 and ASM_OPERANDS_LABEL_VEC need to be equal between
1233 all the ASM_OPERANDs in the insn and adjust_insn will
1235 if (ASM_OPERANDS_OUTPUT_IDX (loc
) != 0)
1244 /* Helper function for replacement of uses. */
1247 adjust_mem_uses (rtx
*x
, void *data
)
1249 rtx new_x
= simplify_replace_fn_rtx (*x
, NULL_RTX
, adjust_mems
, data
);
1251 validate_change (NULL_RTX
, x
, new_x
, true);
1254 /* Helper function for replacement of stores. */
1257 adjust_mem_stores (rtx loc
, const_rtx expr
, void *data
)
1261 rtx new_dest
= simplify_replace_fn_rtx (SET_DEST (expr
), NULL_RTX
,
1263 if (new_dest
!= SET_DEST (expr
))
1265 rtx xexpr
= CONST_CAST_RTX (expr
);
1266 validate_change (NULL_RTX
, &SET_DEST (xexpr
), new_dest
, true);
1271 /* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
1272 replace them with their value in the insn and add the side-effects
1273 as other sets to the insn. */
1276 adjust_insn (basic_block bb
, rtx_insn
*insn
)
1278 struct adjust_mem_data amd
;
1281 #ifdef HAVE_window_save
1282 /* If the target machine has an explicit window save instruction, the
1283 transformation OUTGOING_REGNO -> INCOMING_REGNO is done there. */
1284 if (RTX_FRAME_RELATED_P (insn
)
1285 && find_reg_note (insn
, REG_CFA_WINDOW_SAVE
, NULL_RTX
))
1287 unsigned int i
, nregs
= vec_safe_length (windowed_parm_regs
);
1288 rtx rtl
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (nregs
* 2));
1291 FOR_EACH_VEC_SAFE_ELT (windowed_parm_regs
, i
, p
)
1293 XVECEXP (rtl
, 0, i
* 2)
1294 = gen_rtx_SET (p
->incoming
, p
->outgoing
);
1295 /* Do not clobber the attached DECL, but only the REG. */
1296 XVECEXP (rtl
, 0, i
* 2 + 1)
1297 = gen_rtx_CLOBBER (GET_MODE (p
->outgoing
),
1298 gen_raw_REG (GET_MODE (p
->outgoing
),
1299 REGNO (p
->outgoing
)));
1302 validate_change (NULL_RTX
, &PATTERN (insn
), rtl
, true);
1307 amd
.mem_mode
= VOIDmode
;
1308 amd
.stack_adjust
= -VTI (bb
)->out
.stack_adjust
;
1309 amd
.side_effects
= NULL
;
1312 note_stores (PATTERN (insn
), adjust_mem_stores
, &amd
);
1315 if (GET_CODE (PATTERN (insn
)) == PARALLEL
1316 && asm_noperands (PATTERN (insn
)) > 0
1317 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == SET
)
1322 /* inline-asm with multiple sets is tiny bit more complicated,
1323 because the 3 vectors in ASM_OPERANDS need to be shared between
1324 all ASM_OPERANDS in the instruction. adjust_mems will
1325 not touch ASM_OPERANDS other than the first one, asm_noperands
1326 test above needs to be called before that (otherwise it would fail)
1327 and afterwards this code fixes it up. */
1328 note_uses (&PATTERN (insn
), adjust_mem_uses
, &amd
);
1329 body
= PATTERN (insn
);
1330 set0
= XVECEXP (body
, 0, 0);
1331 gcc_checking_assert (GET_CODE (set0
) == SET
1332 && GET_CODE (SET_SRC (set0
)) == ASM_OPERANDS
1333 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set0
)) == 0);
1334 for (i
= 1; i
< XVECLEN (body
, 0); i
++)
1335 if (GET_CODE (XVECEXP (body
, 0, i
)) != SET
)
1339 set
= XVECEXP (body
, 0, i
);
1340 gcc_checking_assert (GET_CODE (SET_SRC (set
)) == ASM_OPERANDS
1341 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set
))
1343 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (set
))
1344 != ASM_OPERANDS_INPUT_VEC (SET_SRC (set0
))
1345 || ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set
))
1346 != ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0
))
1347 || ASM_OPERANDS_LABEL_VEC (SET_SRC (set
))
1348 != ASM_OPERANDS_LABEL_VEC (SET_SRC (set0
)))
1350 rtx newsrc
= shallow_copy_rtx (SET_SRC (set
));
1351 ASM_OPERANDS_INPUT_VEC (newsrc
)
1352 = ASM_OPERANDS_INPUT_VEC (SET_SRC (set0
));
1353 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (newsrc
)
1354 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0
));
1355 ASM_OPERANDS_LABEL_VEC (newsrc
)
1356 = ASM_OPERANDS_LABEL_VEC (SET_SRC (set0
));
1357 validate_change (NULL_RTX
, &SET_SRC (set
), newsrc
, true);
1362 note_uses (&PATTERN (insn
), adjust_mem_uses
, &amd
);
1364 /* For read-only MEMs containing some constant, prefer those
1366 set
= single_set (insn
);
1367 if (set
&& MEM_P (SET_SRC (set
)) && MEM_READONLY_P (SET_SRC (set
)))
1369 rtx note
= find_reg_equal_equiv_note (insn
);
1371 if (note
&& CONSTANT_P (XEXP (note
, 0)))
1372 validate_change (NULL_RTX
, &SET_SRC (set
), XEXP (note
, 0), true);
1375 if (amd
.side_effects
)
1377 rtx
*pat
, new_pat
, s
;
1380 pat
= &PATTERN (insn
);
1381 if (GET_CODE (*pat
) == COND_EXEC
)
1382 pat
= &COND_EXEC_CODE (*pat
);
1383 if (GET_CODE (*pat
) == PARALLEL
)
1384 oldn
= XVECLEN (*pat
, 0);
1387 for (s
= amd
.side_effects
, newn
= 0; s
; newn
++)
1389 new_pat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (oldn
+ newn
));
1390 if (GET_CODE (*pat
) == PARALLEL
)
1391 for (i
= 0; i
< oldn
; i
++)
1392 XVECEXP (new_pat
, 0, i
) = XVECEXP (*pat
, 0, i
);
1394 XVECEXP (new_pat
, 0, 0) = *pat
;
1395 for (s
= amd
.side_effects
, i
= oldn
; i
< oldn
+ newn
; i
++, s
= XEXP (s
, 1))
1396 XVECEXP (new_pat
, 0, i
) = XEXP (s
, 0);
1397 free_EXPR_LIST_list (&amd
.side_effects
);
1398 validate_change (NULL_RTX
, pat
, new_pat
, true);
1402 /* Return the DEBUG_EXPR of a DEBUG_EXPR_DECL or the VALUE in DV. */
1404 dv_as_rtx (decl_or_value dv
)
1408 if (dv_is_value_p (dv
))
1409 return dv_as_value (dv
);
1411 decl
= dv_as_decl (dv
);
1413 gcc_checking_assert (TREE_CODE (decl
) == DEBUG_EXPR_DECL
);
1414 return DECL_RTL_KNOWN_SET (decl
);
1417 /* Return nonzero if a decl_or_value must not have more than one
1418 variable part. The returned value discriminates among various
1419 kinds of one-part DVs ccording to enum onepart_enum. */
1420 static inline onepart_enum_t
1421 dv_onepart_p (decl_or_value dv
)
1425 if (!MAY_HAVE_DEBUG_INSNS
)
1428 if (dv_is_value_p (dv
))
1429 return ONEPART_VALUE
;
1431 decl
= dv_as_decl (dv
);
1433 if (TREE_CODE (decl
) == DEBUG_EXPR_DECL
)
1434 return ONEPART_DEXPR
;
1436 if (target_for_debug_bind (decl
) != NULL_TREE
)
1437 return ONEPART_VDECL
;
1442 /* Return the variable pool to be used for a dv of type ONEPART. */
1443 static inline pool_allocator
<variable_def
> &
1444 onepart_pool (onepart_enum_t onepart
)
1446 return onepart
? valvar_pool
: var_pool
;
1449 /* Build a decl_or_value out of a decl. */
1450 static inline decl_or_value
1451 dv_from_decl (tree decl
)
1455 gcc_checking_assert (dv_is_decl_p (dv
));
1459 /* Build a decl_or_value out of a value. */
1460 static inline decl_or_value
1461 dv_from_value (rtx value
)
1465 gcc_checking_assert (dv_is_value_p (dv
));
1469 /* Return a value or the decl of a debug_expr as a decl_or_value. */
1470 static inline decl_or_value
1475 switch (GET_CODE (x
))
1478 dv
= dv_from_decl (DEBUG_EXPR_TREE_DECL (x
));
1479 gcc_checking_assert (DECL_RTL_KNOWN_SET (DEBUG_EXPR_TREE_DECL (x
)) == x
);
1483 dv
= dv_from_value (x
);
1493 extern void debug_dv (decl_or_value dv
);
1496 debug_dv (decl_or_value dv
)
1498 if (dv_is_value_p (dv
))
1499 debug_rtx (dv_as_value (dv
));
1501 debug_generic_stmt (dv_as_decl (dv
));
1504 static void loc_exp_dep_clear (variable var
);
1506 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
1509 variable_htab_free (void *elem
)
1512 variable var
= (variable
) elem
;
1513 location_chain node
, next
;
1515 gcc_checking_assert (var
->refcount
> 0);
1518 if (var
->refcount
> 0)
1521 for (i
= 0; i
< var
->n_var_parts
; i
++)
1523 for (node
= var
->var_part
[i
].loc_chain
; node
; node
= next
)
1528 var
->var_part
[i
].loc_chain
= NULL
;
1530 if (var
->onepart
&& VAR_LOC_1PAUX (var
))
1532 loc_exp_dep_clear (var
);
1533 if (VAR_LOC_DEP_LST (var
))
1534 VAR_LOC_DEP_LST (var
)->pprev
= NULL
;
1535 XDELETE (VAR_LOC_1PAUX (var
));
1536 /* These may be reused across functions, so reset
1538 if (var
->onepart
== ONEPART_DEXPR
)
1539 set_dv_changed (var
->dv
, true);
1541 onepart_pool (var
->onepart
).remove (var
);
1544 /* Initialize the set (array) SET of attrs to empty lists. */
1547 init_attrs_list_set (attrs
*set
)
1551 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1555 /* Make the list *LISTP empty. */
1558 attrs_list_clear (attrs
*listp
)
1562 for (list
= *listp
; list
; list
= next
)
1570 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */
1573 attrs_list_member (attrs list
, decl_or_value dv
, HOST_WIDE_INT offset
)
1575 for (; list
; list
= list
->next
)
1576 if (dv_as_opaque (list
->dv
) == dv_as_opaque (dv
) && list
->offset
== offset
)
1581 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
1584 attrs_list_insert (attrs
*listp
, decl_or_value dv
,
1585 HOST_WIDE_INT offset
, rtx loc
)
1587 attrs list
= new attrs_def
;
1590 list
->offset
= offset
;
1591 list
->next
= *listp
;
1595 /* Copy all nodes from SRC and create a list *DSTP of the copies. */
1598 attrs_list_copy (attrs
*dstp
, attrs src
)
1600 attrs_list_clear (dstp
);
1601 for (; src
; src
= src
->next
)
1603 attrs n
= new attrs_def
;
1606 n
->offset
= src
->offset
;
1612 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */
1615 attrs_list_union (attrs
*dstp
, attrs src
)
1617 for (; src
; src
= src
->next
)
1619 if (!attrs_list_member (*dstp
, src
->dv
, src
->offset
))
1620 attrs_list_insert (dstp
, src
->dv
, src
->offset
, src
->loc
);
1624 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1628 attrs_list_mpdv_union (attrs
*dstp
, attrs src
, attrs src2
)
1630 gcc_assert (!*dstp
);
1631 for (; src
; src
= src
->next
)
1633 if (!dv_onepart_p (src
->dv
))
1634 attrs_list_insert (dstp
, src
->dv
, src
->offset
, src
->loc
);
1636 for (src
= src2
; src
; src
= src
->next
)
1638 if (!dv_onepart_p (src
->dv
)
1639 && !attrs_list_member (*dstp
, src
->dv
, src
->offset
))
1640 attrs_list_insert (dstp
, src
->dv
, src
->offset
, src
->loc
);
1644 /* Shared hashtable support. */
1646 /* Return true if VARS is shared. */
1649 shared_hash_shared (shared_hash vars
)
1651 return vars
->refcount
> 1;
1654 /* Return the hash table for VARS. */
1656 static inline variable_table_type
*
1657 shared_hash_htab (shared_hash vars
)
1662 /* Return true if VAR is shared, or maybe because VARS is shared. */
1665 shared_var_p (variable var
, shared_hash vars
)
1667 /* Don't count an entry in the changed_variables table as a duplicate. */
1668 return ((var
->refcount
> 1 + (int) var
->in_changed_variables
)
1669 || shared_hash_shared (vars
));
1672 /* Copy variables into a new hash table. */
1675 shared_hash_unshare (shared_hash vars
)
1677 shared_hash new_vars
= new shared_hash_def
;
1678 gcc_assert (vars
->refcount
> 1);
1679 new_vars
->refcount
= 1;
1680 new_vars
->htab
= new variable_table_type (vars
->htab
->elements () + 3);
1681 vars_copy (new_vars
->htab
, vars
->htab
);
1686 /* Increment reference counter on VARS and return it. */
1688 static inline shared_hash
1689 shared_hash_copy (shared_hash vars
)
1695 /* Decrement reference counter and destroy hash table if not shared
1699 shared_hash_destroy (shared_hash vars
)
1701 gcc_checking_assert (vars
->refcount
> 0);
1702 if (--vars
->refcount
== 0)
1709 /* Unshare *PVARS if shared and return slot for DV. If INS is
1710 INSERT, insert it if not already present. */
1712 static inline variable_def
**
1713 shared_hash_find_slot_unshare_1 (shared_hash
*pvars
, decl_or_value dv
,
1714 hashval_t dvhash
, enum insert_option ins
)
1716 if (shared_hash_shared (*pvars
))
1717 *pvars
= shared_hash_unshare (*pvars
);
1718 return shared_hash_htab (*pvars
)->find_slot_with_hash (dv
, dvhash
, ins
);
1721 static inline variable_def
**
1722 shared_hash_find_slot_unshare (shared_hash
*pvars
, decl_or_value dv
,
1723 enum insert_option ins
)
1725 return shared_hash_find_slot_unshare_1 (pvars
, dv
, dv_htab_hash (dv
), ins
);
1728 /* Return slot for DV, if it is already present in the hash table.
1729 If it is not present, insert it only VARS is not shared, otherwise
1732 static inline variable_def
**
1733 shared_hash_find_slot_1 (shared_hash vars
, decl_or_value dv
, hashval_t dvhash
)
1735 return shared_hash_htab (vars
)->find_slot_with_hash (dv
, dvhash
,
1736 shared_hash_shared (vars
)
1737 ? NO_INSERT
: INSERT
);
1740 static inline variable_def
**
1741 shared_hash_find_slot (shared_hash vars
, decl_or_value dv
)
1743 return shared_hash_find_slot_1 (vars
, dv
, dv_htab_hash (dv
));
1746 /* Return slot for DV only if it is already present in the hash table. */
1748 static inline variable_def
**
1749 shared_hash_find_slot_noinsert_1 (shared_hash vars
, decl_or_value dv
,
1752 return shared_hash_htab (vars
)->find_slot_with_hash (dv
, dvhash
, NO_INSERT
);
1755 static inline variable_def
**
1756 shared_hash_find_slot_noinsert (shared_hash vars
, decl_or_value dv
)
1758 return shared_hash_find_slot_noinsert_1 (vars
, dv
, dv_htab_hash (dv
));
1761 /* Return variable for DV or NULL if not already present in the hash
1764 static inline variable
1765 shared_hash_find_1 (shared_hash vars
, decl_or_value dv
, hashval_t dvhash
)
1767 return shared_hash_htab (vars
)->find_with_hash (dv
, dvhash
);
1770 static inline variable
1771 shared_hash_find (shared_hash vars
, decl_or_value dv
)
1773 return shared_hash_find_1 (vars
, dv
, dv_htab_hash (dv
));
1776 /* Return true if TVAL is better than CVAL as a canonival value. We
1777 choose lowest-numbered VALUEs, using the RTX address as a
1778 tie-breaker. The idea is to arrange them into a star topology,
1779 such that all of them are at most one step away from the canonical
1780 value, and the canonical value has backlinks to all of them, in
1781 addition to all the actual locations. We don't enforce this
1782 topology throughout the entire dataflow analysis, though.
1786 canon_value_cmp (rtx tval
, rtx cval
)
1789 || CSELIB_VAL_PTR (tval
)->uid
< CSELIB_VAL_PTR (cval
)->uid
;
1792 static bool dst_can_be_shared
;
1794 /* Return a copy of a variable VAR and insert it to dataflow set SET. */
1796 static variable_def
**
1797 unshare_variable (dataflow_set
*set
, variable_def
**slot
, variable var
,
1798 enum var_init_status initialized
)
1803 new_var
= onepart_pool (var
->onepart
).allocate ();
1804 new_var
->dv
= var
->dv
;
1805 new_var
->refcount
= 1;
1807 new_var
->n_var_parts
= var
->n_var_parts
;
1808 new_var
->onepart
= var
->onepart
;
1809 new_var
->in_changed_variables
= false;
1811 if (! flag_var_tracking_uninit
)
1812 initialized
= VAR_INIT_STATUS_INITIALIZED
;
1814 for (i
= 0; i
< var
->n_var_parts
; i
++)
1816 location_chain node
;
1817 location_chain
*nextp
;
1819 if (i
== 0 && var
->onepart
)
1821 /* One-part auxiliary data is only used while emitting
1822 notes, so propagate it to the new variable in the active
1823 dataflow set. If we're not emitting notes, this will be
1825 gcc_checking_assert (!VAR_LOC_1PAUX (var
) || emit_notes
);
1826 VAR_LOC_1PAUX (new_var
) = VAR_LOC_1PAUX (var
);
1827 VAR_LOC_1PAUX (var
) = NULL
;
1830 VAR_PART_OFFSET (new_var
, i
) = VAR_PART_OFFSET (var
, i
);
1831 nextp
= &new_var
->var_part
[i
].loc_chain
;
1832 for (node
= var
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
1834 location_chain new_lc
;
1836 new_lc
= new location_chain_def
;
1837 new_lc
->next
= NULL
;
1838 if (node
->init
> initialized
)
1839 new_lc
->init
= node
->init
;
1841 new_lc
->init
= initialized
;
1842 if (node
->set_src
&& !(MEM_P (node
->set_src
)))
1843 new_lc
->set_src
= node
->set_src
;
1845 new_lc
->set_src
= NULL
;
1846 new_lc
->loc
= node
->loc
;
1849 nextp
= &new_lc
->next
;
1852 new_var
->var_part
[i
].cur_loc
= var
->var_part
[i
].cur_loc
;
1855 dst_can_be_shared
= false;
1856 if (shared_hash_shared (set
->vars
))
1857 slot
= shared_hash_find_slot_unshare (&set
->vars
, var
->dv
, NO_INSERT
);
1858 else if (set
->traversed_vars
&& set
->vars
!= set
->traversed_vars
)
1859 slot
= shared_hash_find_slot_noinsert (set
->vars
, var
->dv
);
1861 if (var
->in_changed_variables
)
1863 variable_def
**cslot
1864 = changed_variables
->find_slot_with_hash (var
->dv
,
1865 dv_htab_hash (var
->dv
),
1867 gcc_assert (*cslot
== (void *) var
);
1868 var
->in_changed_variables
= false;
1869 variable_htab_free (var
);
1871 new_var
->in_changed_variables
= true;
1876 /* Copy all variables from hash table SRC to hash table DST. */
1879 vars_copy (variable_table_type
*dst
, variable_table_type
*src
)
1881 variable_iterator_type hi
;
1884 FOR_EACH_HASH_TABLE_ELEMENT (*src
, var
, variable
, hi
)
1886 variable_def
**dstp
;
1888 dstp
= dst
->find_slot_with_hash (var
->dv
, dv_htab_hash (var
->dv
),
1894 /* Map a decl to its main debug decl. */
1897 var_debug_decl (tree decl
)
1899 if (decl
&& TREE_CODE (decl
) == VAR_DECL
1900 && DECL_HAS_DEBUG_EXPR_P (decl
))
1902 tree debugdecl
= DECL_DEBUG_EXPR (decl
);
1903 if (DECL_P (debugdecl
))
1910 /* Set the register LOC to contain DV, OFFSET. */
1913 var_reg_decl_set (dataflow_set
*set
, rtx loc
, enum var_init_status initialized
,
1914 decl_or_value dv
, HOST_WIDE_INT offset
, rtx set_src
,
1915 enum insert_option iopt
)
1918 bool decl_p
= dv_is_decl_p (dv
);
1921 dv
= dv_from_decl (var_debug_decl (dv_as_decl (dv
)));
1923 for (node
= set
->regs
[REGNO (loc
)]; node
; node
= node
->next
)
1924 if (dv_as_opaque (node
->dv
) == dv_as_opaque (dv
)
1925 && node
->offset
== offset
)
1928 attrs_list_insert (&set
->regs
[REGNO (loc
)], dv
, offset
, loc
);
1929 set_variable_part (set
, loc
, dv
, offset
, initialized
, set_src
, iopt
);
1932 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
1935 var_reg_set (dataflow_set
*set
, rtx loc
, enum var_init_status initialized
,
1938 tree decl
= REG_EXPR (loc
);
1939 HOST_WIDE_INT offset
= REG_OFFSET (loc
);
1941 var_reg_decl_set (set
, loc
, initialized
,
1942 dv_from_decl (decl
), offset
, set_src
, INSERT
);
1945 static enum var_init_status
1946 get_init_value (dataflow_set
*set
, rtx loc
, decl_or_value dv
)
1950 enum var_init_status ret_val
= VAR_INIT_STATUS_UNKNOWN
;
1952 if (! flag_var_tracking_uninit
)
1953 return VAR_INIT_STATUS_INITIALIZED
;
1955 var
= shared_hash_find (set
->vars
, dv
);
1958 for (i
= 0; i
< var
->n_var_parts
&& ret_val
== VAR_INIT_STATUS_UNKNOWN
; i
++)
1960 location_chain nextp
;
1961 for (nextp
= var
->var_part
[i
].loc_chain
; nextp
; nextp
= nextp
->next
)
1962 if (rtx_equal_p (nextp
->loc
, loc
))
1964 ret_val
= nextp
->init
;
1973 /* Delete current content of register LOC in dataflow set SET and set
1974 the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
1975 MODIFY is true, any other live copies of the same variable part are
1976 also deleted from the dataflow set, otherwise the variable part is
1977 assumed to be copied from another location holding the same
1981 var_reg_delete_and_set (dataflow_set
*set
, rtx loc
, bool modify
,
1982 enum var_init_status initialized
, rtx set_src
)
1984 tree decl
= REG_EXPR (loc
);
1985 HOST_WIDE_INT offset
= REG_OFFSET (loc
);
1989 decl
= var_debug_decl (decl
);
1991 if (initialized
== VAR_INIT_STATUS_UNKNOWN
)
1992 initialized
= get_init_value (set
, loc
, dv_from_decl (decl
));
1994 nextp
= &set
->regs
[REGNO (loc
)];
1995 for (node
= *nextp
; node
; node
= next
)
1998 if (dv_as_opaque (node
->dv
) != decl
|| node
->offset
!= offset
)
2000 delete_variable_part (set
, node
->loc
, node
->dv
, node
->offset
);
2007 nextp
= &node
->next
;
2011 clobber_variable_part (set
, loc
, dv_from_decl (decl
), offset
, set_src
);
2012 var_reg_set (set
, loc
, initialized
, set_src
);
2015 /* Delete the association of register LOC in dataflow set SET with any
2016 variables that aren't onepart. If CLOBBER is true, also delete any
2017 other live copies of the same variable part, and delete the
2018 association with onepart dvs too. */
2021 var_reg_delete (dataflow_set
*set
, rtx loc
, bool clobber
)
2023 attrs
*nextp
= &set
->regs
[REGNO (loc
)];
2028 tree decl
= REG_EXPR (loc
);
2029 HOST_WIDE_INT offset
= REG_OFFSET (loc
);
2031 decl
= var_debug_decl (decl
);
2033 clobber_variable_part (set
, NULL
, dv_from_decl (decl
), offset
, NULL
);
2036 for (node
= *nextp
; node
; node
= next
)
2039 if (clobber
|| !dv_onepart_p (node
->dv
))
2041 delete_variable_part (set
, node
->loc
, node
->dv
, node
->offset
);
2046 nextp
= &node
->next
;
2050 /* Delete content of register with number REGNO in dataflow set SET. */
2053 var_regno_delete (dataflow_set
*set
, int regno
)
2055 attrs
*reg
= &set
->regs
[regno
];
2058 for (node
= *reg
; node
; node
= next
)
2061 delete_variable_part (set
, node
->loc
, node
->dv
, node
->offset
);
2067 /* Return true if I is the negated value of a power of two. */
2069 negative_power_of_two_p (HOST_WIDE_INT i
)
2071 unsigned HOST_WIDE_INT x
= -(unsigned HOST_WIDE_INT
)i
;
2072 return x
== (x
& -x
);
2075 /* Strip constant offsets and alignments off of LOC. Return the base
2079 vt_get_canonicalize_base (rtx loc
)
2081 while ((GET_CODE (loc
) == PLUS
2082 || GET_CODE (loc
) == AND
)
2083 && GET_CODE (XEXP (loc
, 1)) == CONST_INT
2084 && (GET_CODE (loc
) != AND
2085 || negative_power_of_two_p (INTVAL (XEXP (loc
, 1)))))
2086 loc
= XEXP (loc
, 0);
2091 /* This caches canonicalized addresses for VALUEs, computed using
2092 information in the global cselib table. */
2093 static hash_map
<rtx
, rtx
> *global_get_addr_cache
;
2095 /* This caches canonicalized addresses for VALUEs, computed using
2096 information from the global cache and information pertaining to a
2097 basic block being analyzed. */
2098 static hash_map
<rtx
, rtx
> *local_get_addr_cache
;
2100 static rtx
vt_canonicalize_addr (dataflow_set
*, rtx
);
2102 /* Return the canonical address for LOC, that must be a VALUE, using a
2103 cached global equivalence or computing it and storing it in the
2107 get_addr_from_global_cache (rtx
const loc
)
2111 gcc_checking_assert (GET_CODE (loc
) == VALUE
);
2114 rtx
*slot
= &global_get_addr_cache
->get_or_insert (loc
, &existed
);
2118 x
= canon_rtx (get_addr (loc
));
2120 /* Tentative, avoiding infinite recursion. */
2125 rtx nx
= vt_canonicalize_addr (NULL
, x
);
2128 /* The table may have moved during recursion, recompute
2130 *global_get_addr_cache
->get (loc
) = x
= nx
;
2137 /* Return the canonical address for LOC, that must be a VALUE, using a
2138 cached local equivalence or computing it and storing it in the
2142 get_addr_from_local_cache (dataflow_set
*set
, rtx
const loc
)
2149 gcc_checking_assert (GET_CODE (loc
) == VALUE
);
2152 rtx
*slot
= &local_get_addr_cache
->get_or_insert (loc
, &existed
);
2156 x
= get_addr_from_global_cache (loc
);
2158 /* Tentative, avoiding infinite recursion. */
2161 /* Recurse to cache local expansion of X, or if we need to search
2162 for a VALUE in the expansion. */
2165 rtx nx
= vt_canonicalize_addr (set
, x
);
2168 slot
= local_get_addr_cache
->get (loc
);
2174 dv
= dv_from_rtx (x
);
2175 var
= shared_hash_find (set
->vars
, dv
);
2179 /* Look for an improved equivalent expression. */
2180 for (l
= var
->var_part
[0].loc_chain
; l
; l
= l
->next
)
2182 rtx base
= vt_get_canonicalize_base (l
->loc
);
2183 if (GET_CODE (base
) == VALUE
2184 && canon_value_cmp (base
, loc
))
2186 rtx nx
= vt_canonicalize_addr (set
, l
->loc
);
2189 slot
= local_get_addr_cache
->get (loc
);
2199 /* Canonicalize LOC using equivalences from SET in addition to those
2200 in the cselib static table. It expects a VALUE-based expression,
2201 and it will only substitute VALUEs with other VALUEs or
2202 function-global equivalences, so that, if two addresses have base
2203 VALUEs that are locally or globally related in ways that
2204 memrefs_conflict_p cares about, they will both canonicalize to
2205 expressions that have the same base VALUE.
2207 The use of VALUEs as canonical base addresses enables the canonical
2208 RTXs to remain unchanged globally, if they resolve to a constant,
2209 or throughout a basic block otherwise, so that they can be cached
2210 and the cache needs not be invalidated when REGs, MEMs or such
2214 vt_canonicalize_addr (dataflow_set
*set
, rtx oloc
)
2216 HOST_WIDE_INT ofst
= 0;
2217 machine_mode mode
= GET_MODE (oloc
);
2224 while (GET_CODE (loc
) == PLUS
2225 && GET_CODE (XEXP (loc
, 1)) == CONST_INT
)
2227 ofst
+= INTVAL (XEXP (loc
, 1));
2228 loc
= XEXP (loc
, 0);
2231 /* Alignment operations can't normally be combined, so just
2232 canonicalize the base and we're done. We'll normally have
2233 only one stack alignment anyway. */
2234 if (GET_CODE (loc
) == AND
2235 && GET_CODE (XEXP (loc
, 1)) == CONST_INT
2236 && negative_power_of_two_p (INTVAL (XEXP (loc
, 1))))
2238 x
= vt_canonicalize_addr (set
, XEXP (loc
, 0));
2239 if (x
!= XEXP (loc
, 0))
2240 loc
= gen_rtx_AND (mode
, x
, XEXP (loc
, 1));
2244 if (GET_CODE (loc
) == VALUE
)
2247 loc
= get_addr_from_local_cache (set
, loc
);
2249 loc
= get_addr_from_global_cache (loc
);
2251 /* Consolidate plus_constants. */
2252 while (ofst
&& GET_CODE (loc
) == PLUS
2253 && GET_CODE (XEXP (loc
, 1)) == CONST_INT
)
2255 ofst
+= INTVAL (XEXP (loc
, 1));
2256 loc
= XEXP (loc
, 0);
2263 x
= canon_rtx (loc
);
2270 /* Add OFST back in. */
2273 /* Don't build new RTL if we can help it. */
2274 if (GET_CODE (oloc
) == PLUS
2275 && XEXP (oloc
, 0) == loc
2276 && INTVAL (XEXP (oloc
, 1)) == ofst
)
2279 loc
= plus_constant (mode
, loc
, ofst
);
2285 /* Return true iff there's a true dependence between MLOC and LOC.
2286 MADDR must be a canonicalized version of MLOC's address. */
2289 vt_canon_true_dep (dataflow_set
*set
, rtx mloc
, rtx maddr
, rtx loc
)
2291 if (GET_CODE (loc
) != MEM
)
2294 rtx addr
= vt_canonicalize_addr (set
, XEXP (loc
, 0));
2295 if (!canon_true_dependence (mloc
, GET_MODE (mloc
), maddr
, loc
, addr
))
2301 /* Hold parameters for the hashtab traversal function
2302 drop_overlapping_mem_locs, see below. */
2304 struct overlapping_mems
2310 /* Remove all MEMs that overlap with COMS->LOC from the location list
2311 of a hash table entry for a value. COMS->ADDR must be a
2312 canonicalized form of COMS->LOC's address, and COMS->LOC must be
2313 canonicalized itself. */
2316 drop_overlapping_mem_locs (variable_def
**slot
, overlapping_mems
*coms
)
2318 dataflow_set
*set
= coms
->set
;
2319 rtx mloc
= coms
->loc
, addr
= coms
->addr
;
2320 variable var
= *slot
;
2322 if (var
->onepart
== ONEPART_VALUE
)
2324 location_chain loc
, *locp
;
2325 bool changed
= false;
2328 gcc_assert (var
->n_var_parts
== 1);
2330 if (shared_var_p (var
, set
->vars
))
2332 for (loc
= var
->var_part
[0].loc_chain
; loc
; loc
= loc
->next
)
2333 if (vt_canon_true_dep (set
, mloc
, addr
, loc
->loc
))
2339 slot
= unshare_variable (set
, slot
, var
, VAR_INIT_STATUS_UNKNOWN
);
2341 gcc_assert (var
->n_var_parts
== 1);
2344 if (VAR_LOC_1PAUX (var
))
2345 cur_loc
= VAR_LOC_FROM (var
);
2347 cur_loc
= var
->var_part
[0].cur_loc
;
2349 for (locp
= &var
->var_part
[0].loc_chain
, loc
= *locp
;
2352 if (!vt_canon_true_dep (set
, mloc
, addr
, loc
->loc
))
2359 /* If we have deleted the location which was last emitted
2360 we have to emit new location so add the variable to set
2361 of changed variables. */
2362 if (cur_loc
== loc
->loc
)
2365 var
->var_part
[0].cur_loc
= NULL
;
2366 if (VAR_LOC_1PAUX (var
))
2367 VAR_LOC_FROM (var
) = NULL
;
2372 if (!var
->var_part
[0].loc_chain
)
2378 variable_was_changed (var
, set
);
2384 /* Remove from SET all VALUE bindings to MEMs that overlap with LOC. */
2387 clobber_overlapping_mems (dataflow_set
*set
, rtx loc
)
2389 struct overlapping_mems coms
;
2391 gcc_checking_assert (GET_CODE (loc
) == MEM
);
2394 coms
.loc
= canon_rtx (loc
);
2395 coms
.addr
= vt_canonicalize_addr (set
, XEXP (loc
, 0));
2397 set
->traversed_vars
= set
->vars
;
2398 shared_hash_htab (set
->vars
)
2399 ->traverse
<overlapping_mems
*, drop_overlapping_mem_locs
> (&coms
);
2400 set
->traversed_vars
= NULL
;
2403 /* Set the location of DV, OFFSET as the MEM LOC. */
2406 var_mem_decl_set (dataflow_set
*set
, rtx loc
, enum var_init_status initialized
,
2407 decl_or_value dv
, HOST_WIDE_INT offset
, rtx set_src
,
2408 enum insert_option iopt
)
2410 if (dv_is_decl_p (dv
))
2411 dv
= dv_from_decl (var_debug_decl (dv_as_decl (dv
)));
2413 set_variable_part (set
, loc
, dv
, offset
, initialized
, set_src
, iopt
);
2416 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
2418 Adjust the address first if it is stack pointer based. */
2421 var_mem_set (dataflow_set
*set
, rtx loc
, enum var_init_status initialized
,
2424 tree decl
= MEM_EXPR (loc
);
2425 HOST_WIDE_INT offset
= INT_MEM_OFFSET (loc
);
2427 var_mem_decl_set (set
, loc
, initialized
,
2428 dv_from_decl (decl
), offset
, set_src
, INSERT
);
2431 /* Delete and set the location part of variable MEM_EXPR (LOC) in
2432 dataflow set SET to LOC. If MODIFY is true, any other live copies
2433 of the same variable part are also deleted from the dataflow set,
2434 otherwise the variable part is assumed to be copied from another
2435 location holding the same part.
2436 Adjust the address first if it is stack pointer based. */
2439 var_mem_delete_and_set (dataflow_set
*set
, rtx loc
, bool modify
,
2440 enum var_init_status initialized
, rtx set_src
)
2442 tree decl
= MEM_EXPR (loc
);
2443 HOST_WIDE_INT offset
= INT_MEM_OFFSET (loc
);
2445 clobber_overlapping_mems (set
, loc
);
2446 decl
= var_debug_decl (decl
);
2448 if (initialized
== VAR_INIT_STATUS_UNKNOWN
)
2449 initialized
= get_init_value (set
, loc
, dv_from_decl (decl
));
2452 clobber_variable_part (set
, NULL
, dv_from_decl (decl
), offset
, set_src
);
2453 var_mem_set (set
, loc
, initialized
, set_src
);
2456 /* Delete the location part LOC from dataflow set SET. If CLOBBER is
2457 true, also delete any other live copies of the same variable part.
2458 Adjust the address first if it is stack pointer based. */
2461 var_mem_delete (dataflow_set
*set
, rtx loc
, bool clobber
)
2463 tree decl
= MEM_EXPR (loc
);
2464 HOST_WIDE_INT offset
= INT_MEM_OFFSET (loc
);
2466 clobber_overlapping_mems (set
, loc
);
2467 decl
= var_debug_decl (decl
);
2469 clobber_variable_part (set
, NULL
, dv_from_decl (decl
), offset
, NULL
);
2470 delete_variable_part (set
, loc
, dv_from_decl (decl
), offset
);
2473 /* Return true if LOC should not be expanded for location expressions,
2477 unsuitable_loc (rtx loc
)
2479 switch (GET_CODE (loc
))
2493 /* Bind VAL to LOC in SET. If MODIFIED, detach LOC from any values
2497 val_bind (dataflow_set
*set
, rtx val
, rtx loc
, bool modified
)
2502 var_regno_delete (set
, REGNO (loc
));
2503 var_reg_decl_set (set
, loc
, VAR_INIT_STATUS_INITIALIZED
,
2504 dv_from_value (val
), 0, NULL_RTX
, INSERT
);
2506 else if (MEM_P (loc
))
2508 struct elt_loc_list
*l
= CSELIB_VAL_PTR (val
)->locs
;
2511 clobber_overlapping_mems (set
, loc
);
2513 if (l
&& GET_CODE (l
->loc
) == VALUE
)
2514 l
= canonical_cselib_val (CSELIB_VAL_PTR (l
->loc
))->locs
;
2516 /* If this MEM is a global constant, we don't need it in the
2517 dynamic tables. ??? We should test this before emitting the
2518 micro-op in the first place. */
2520 if (GET_CODE (l
->loc
) == MEM
&& XEXP (l
->loc
, 0) == XEXP (loc
, 0))
2526 var_mem_decl_set (set
, loc
, VAR_INIT_STATUS_INITIALIZED
,
2527 dv_from_value (val
), 0, NULL_RTX
, INSERT
);
2531 /* Other kinds of equivalences are necessarily static, at least
2532 so long as we do not perform substitutions while merging
2535 set_variable_part (set
, loc
, dv_from_value (val
), 0,
2536 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
, INSERT
);
2540 /* Bind a value to a location it was just stored in. If MODIFIED
2541 holds, assume the location was modified, detaching it from any
2542 values bound to it. */
2545 val_store (dataflow_set
*set
, rtx val
, rtx loc
, rtx_insn
*insn
,
2548 cselib_val
*v
= CSELIB_VAL_PTR (val
);
2550 gcc_assert (cselib_preserved_value_p (v
));
2554 fprintf (dump_file
, "%i: ", insn
? INSN_UID (insn
) : 0);
2555 print_inline_rtx (dump_file
, loc
, 0);
2556 fprintf (dump_file
, " evaluates to ");
2557 print_inline_rtx (dump_file
, val
, 0);
2560 struct elt_loc_list
*l
;
2561 for (l
= v
->locs
; l
; l
= l
->next
)
2563 fprintf (dump_file
, "\n%i: ", INSN_UID (l
->setting_insn
));
2564 print_inline_rtx (dump_file
, l
->loc
, 0);
2567 fprintf (dump_file
, "\n");
2570 gcc_checking_assert (!unsuitable_loc (loc
));
2572 val_bind (set
, val
, loc
, modified
);
2575 /* Clear (canonical address) slots that reference X. */
2578 local_get_addr_clear_given_value (rtx
const &, rtx
*slot
, rtx x
)
2580 if (vt_get_canonicalize_base (*slot
) == x
)
2585 /* Reset this node, detaching all its equivalences. Return the slot
2586 in the variable hash table that holds dv, if there is one. */
2589 val_reset (dataflow_set
*set
, decl_or_value dv
)
2591 variable var
= shared_hash_find (set
->vars
, dv
) ;
2592 location_chain node
;
2595 if (!var
|| !var
->n_var_parts
)
2598 gcc_assert (var
->n_var_parts
== 1);
2600 if (var
->onepart
== ONEPART_VALUE
)
2602 rtx x
= dv_as_value (dv
);
2604 /* Relationships in the global cache don't change, so reset the
2605 local cache entry only. */
2606 rtx
*slot
= local_get_addr_cache
->get (x
);
2609 /* If the value resolved back to itself, odds are that other
2610 values may have cached it too. These entries now refer
2611 to the old X, so detach them too. Entries that used the
2612 old X but resolved to something else remain ok as long as
2613 that something else isn't also reset. */
2615 local_get_addr_cache
2616 ->traverse
<rtx
, local_get_addr_clear_given_value
> (x
);
2622 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
2623 if (GET_CODE (node
->loc
) == VALUE
2624 && canon_value_cmp (node
->loc
, cval
))
2627 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
2628 if (GET_CODE (node
->loc
) == VALUE
&& cval
!= node
->loc
)
2630 /* Redirect the equivalence link to the new canonical
2631 value, or simply remove it if it would point at
2634 set_variable_part (set
, cval
, dv_from_value (node
->loc
),
2635 0, node
->init
, node
->set_src
, NO_INSERT
);
2636 delete_variable_part (set
, dv_as_value (dv
),
2637 dv_from_value (node
->loc
), 0);
2642 decl_or_value cdv
= dv_from_value (cval
);
2644 /* Keep the remaining values connected, accummulating links
2645 in the canonical value. */
2646 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
2648 if (node
->loc
== cval
)
2650 else if (GET_CODE (node
->loc
) == REG
)
2651 var_reg_decl_set (set
, node
->loc
, node
->init
, cdv
, 0,
2652 node
->set_src
, NO_INSERT
);
2653 else if (GET_CODE (node
->loc
) == MEM
)
2654 var_mem_decl_set (set
, node
->loc
, node
->init
, cdv
, 0,
2655 node
->set_src
, NO_INSERT
);
2657 set_variable_part (set
, node
->loc
, cdv
, 0,
2658 node
->init
, node
->set_src
, NO_INSERT
);
2662 /* We remove this last, to make sure that the canonical value is not
2663 removed to the point of requiring reinsertion. */
2665 delete_variable_part (set
, dv_as_value (dv
), dv_from_value (cval
), 0);
2667 clobber_variable_part (set
, NULL
, dv
, 0, NULL
);
2670 /* Find the values in a given location and map the val to another
2671 value, if it is unique, or add the location as one holding the
2675 val_resolve (dataflow_set
*set
, rtx val
, rtx loc
, rtx_insn
*insn
)
2677 decl_or_value dv
= dv_from_value (val
);
2679 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2682 fprintf (dump_file
, "%i: ", INSN_UID (insn
));
2684 fprintf (dump_file
, "head: ");
2685 print_inline_rtx (dump_file
, val
, 0);
2686 fputs (" is at ", dump_file
);
2687 print_inline_rtx (dump_file
, loc
, 0);
2688 fputc ('\n', dump_file
);
2691 val_reset (set
, dv
);
2693 gcc_checking_assert (!unsuitable_loc (loc
));
2697 attrs node
, found
= NULL
;
2699 for (node
= set
->regs
[REGNO (loc
)]; node
; node
= node
->next
)
2700 if (dv_is_value_p (node
->dv
)
2701 && GET_MODE (dv_as_value (node
->dv
)) == GET_MODE (loc
))
2705 /* Map incoming equivalences. ??? Wouldn't it be nice if
2706 we just started sharing the location lists? Maybe a
2707 circular list ending at the value itself or some
2709 set_variable_part (set
, dv_as_value (node
->dv
),
2710 dv_from_value (val
), node
->offset
,
2711 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
, INSERT
);
2712 set_variable_part (set
, val
, node
->dv
, node
->offset
,
2713 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
, INSERT
);
2716 /* If we didn't find any equivalence, we need to remember that
2717 this value is held in the named register. */
2721 /* ??? Attempt to find and merge equivalent MEMs or other
2724 val_bind (set
, val
, loc
, false);
2727 /* Initialize dataflow set SET to be empty.
2728 VARS_SIZE is the initial size of hash table VARS. */
2731 dataflow_set_init (dataflow_set
*set
)
2733 init_attrs_list_set (set
->regs
);
2734 set
->vars
= shared_hash_copy (empty_shared_hash
);
2735 set
->stack_adjust
= 0;
2736 set
->traversed_vars
= NULL
;
2739 /* Delete the contents of dataflow set SET. */
2742 dataflow_set_clear (dataflow_set
*set
)
2746 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2747 attrs_list_clear (&set
->regs
[i
]);
2749 shared_hash_destroy (set
->vars
);
2750 set
->vars
= shared_hash_copy (empty_shared_hash
);
2753 /* Copy the contents of dataflow set SRC to DST. */
2756 dataflow_set_copy (dataflow_set
*dst
, dataflow_set
*src
)
2760 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2761 attrs_list_copy (&dst
->regs
[i
], src
->regs
[i
]);
2763 shared_hash_destroy (dst
->vars
);
2764 dst
->vars
= shared_hash_copy (src
->vars
);
2765 dst
->stack_adjust
= src
->stack_adjust
;
2768 /* Information for merging lists of locations for a given offset of variable.
2770 struct variable_union_info
2772 /* Node of the location chain. */
2775 /* The sum of positions in the input chains. */
2778 /* The position in the chain of DST dataflow set. */
2782 /* Buffer for location list sorting and its allocated size. */
2783 static struct variable_union_info
*vui_vec
;
2784 static int vui_allocated
;
2786 /* Compare function for qsort, order the structures by POS element. */
2789 variable_union_info_cmp_pos (const void *n1
, const void *n2
)
2791 const struct variable_union_info
*const i1
=
2792 (const struct variable_union_info
*) n1
;
2793 const struct variable_union_info
*const i2
=
2794 ( const struct variable_union_info
*) n2
;
2796 if (i1
->pos
!= i2
->pos
)
2797 return i1
->pos
- i2
->pos
;
2799 return (i1
->pos_dst
- i2
->pos_dst
);
2802 /* Compute union of location parts of variable *SLOT and the same variable
2803 from hash table DATA. Compute "sorted" union of the location chains
2804 for common offsets, i.e. the locations of a variable part are sorted by
2805 a priority where the priority is the sum of the positions in the 2 chains
2806 (if a location is only in one list the position in the second list is
2807 defined to be larger than the length of the chains).
2808 When we are updating the location parts the newest location is in the
2809 beginning of the chain, so when we do the described "sorted" union
2810 we keep the newest locations in the beginning. */
2813 variable_union (variable src
, dataflow_set
*set
)
2816 variable_def
**dstp
;
2819 dstp
= shared_hash_find_slot (set
->vars
, src
->dv
);
2820 if (!dstp
|| !*dstp
)
2824 dst_can_be_shared
= false;
2826 dstp
= shared_hash_find_slot_unshare (&set
->vars
, src
->dv
, INSERT
);
2830 /* Continue traversing the hash table. */
2836 gcc_assert (src
->n_var_parts
);
2837 gcc_checking_assert (src
->onepart
== dst
->onepart
);
2839 /* We can combine one-part variables very efficiently, because their
2840 entries are in canonical order. */
2843 location_chain
*nodep
, dnode
, snode
;
2845 gcc_assert (src
->n_var_parts
== 1
2846 && dst
->n_var_parts
== 1);
2848 snode
= src
->var_part
[0].loc_chain
;
2851 restart_onepart_unshared
:
2852 nodep
= &dst
->var_part
[0].loc_chain
;
2858 int r
= dnode
? loc_cmp (dnode
->loc
, snode
->loc
) : 1;
2862 location_chain nnode
;
2864 if (shared_var_p (dst
, set
->vars
))
2866 dstp
= unshare_variable (set
, dstp
, dst
,
2867 VAR_INIT_STATUS_INITIALIZED
);
2869 goto restart_onepart_unshared
;
2872 *nodep
= nnode
= new location_chain_def
;
2873 nnode
->loc
= snode
->loc
;
2874 nnode
->init
= snode
->init
;
2875 if (!snode
->set_src
|| MEM_P (snode
->set_src
))
2876 nnode
->set_src
= NULL
;
2878 nnode
->set_src
= snode
->set_src
;
2879 nnode
->next
= dnode
;
2883 gcc_checking_assert (rtx_equal_p (dnode
->loc
, snode
->loc
));
2886 snode
= snode
->next
;
2888 nodep
= &dnode
->next
;
2895 gcc_checking_assert (!src
->onepart
);
2897 /* Count the number of location parts, result is K. */
2898 for (i
= 0, j
= 0, k
= 0;
2899 i
< src
->n_var_parts
&& j
< dst
->n_var_parts
; k
++)
2901 if (VAR_PART_OFFSET (src
, i
) == VAR_PART_OFFSET (dst
, j
))
2906 else if (VAR_PART_OFFSET (src
, i
) < VAR_PART_OFFSET (dst
, j
))
2911 k
+= src
->n_var_parts
- i
;
2912 k
+= dst
->n_var_parts
- j
;
2914 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
2915 thus there are at most MAX_VAR_PARTS different offsets. */
2916 gcc_checking_assert (dst
->onepart
? k
== 1 : k
<= MAX_VAR_PARTS
);
2918 if (dst
->n_var_parts
!= k
&& shared_var_p (dst
, set
->vars
))
2920 dstp
= unshare_variable (set
, dstp
, dst
, VAR_INIT_STATUS_UNKNOWN
);
2924 i
= src
->n_var_parts
- 1;
2925 j
= dst
->n_var_parts
- 1;
2926 dst
->n_var_parts
= k
;
2928 for (k
--; k
>= 0; k
--)
2930 location_chain node
, node2
;
2932 if (i
>= 0 && j
>= 0
2933 && VAR_PART_OFFSET (src
, i
) == VAR_PART_OFFSET (dst
, j
))
2935 /* Compute the "sorted" union of the chains, i.e. the locations which
2936 are in both chains go first, they are sorted by the sum of
2937 positions in the chains. */
2940 struct variable_union_info
*vui
;
2942 /* If DST is shared compare the location chains.
2943 If they are different we will modify the chain in DST with
2944 high probability so make a copy of DST. */
2945 if (shared_var_p (dst
, set
->vars
))
2947 for (node
= src
->var_part
[i
].loc_chain
,
2948 node2
= dst
->var_part
[j
].loc_chain
; node
&& node2
;
2949 node
= node
->next
, node2
= node2
->next
)
2951 if (!((REG_P (node2
->loc
)
2952 && REG_P (node
->loc
)
2953 && REGNO (node2
->loc
) == REGNO (node
->loc
))
2954 || rtx_equal_p (node2
->loc
, node
->loc
)))
2956 if (node2
->init
< node
->init
)
2957 node2
->init
= node
->init
;
2963 dstp
= unshare_variable (set
, dstp
, dst
,
2964 VAR_INIT_STATUS_UNKNOWN
);
2965 dst
= (variable
)*dstp
;
2970 for (node
= src
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
2973 for (node
= dst
->var_part
[j
].loc_chain
; node
; node
= node
->next
)
2978 /* The most common case, much simpler, no qsort is needed. */
2979 location_chain dstnode
= dst
->var_part
[j
].loc_chain
;
2980 dst
->var_part
[k
].loc_chain
= dstnode
;
2981 VAR_PART_OFFSET (dst
, k
) = VAR_PART_OFFSET (dst
, j
);
2983 for (node
= src
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
2984 if (!((REG_P (dstnode
->loc
)
2985 && REG_P (node
->loc
)
2986 && REGNO (dstnode
->loc
) == REGNO (node
->loc
))
2987 || rtx_equal_p (dstnode
->loc
, node
->loc
)))
2989 location_chain new_node
;
2991 /* Copy the location from SRC. */
2992 new_node
= new location_chain_def
;
2993 new_node
->loc
= node
->loc
;
2994 new_node
->init
= node
->init
;
2995 if (!node
->set_src
|| MEM_P (node
->set_src
))
2996 new_node
->set_src
= NULL
;
2998 new_node
->set_src
= node
->set_src
;
2999 node2
->next
= new_node
;
3006 if (src_l
+ dst_l
> vui_allocated
)
3008 vui_allocated
= MAX (vui_allocated
* 2, src_l
+ dst_l
);
3009 vui_vec
= XRESIZEVEC (struct variable_union_info
, vui_vec
,
3014 /* Fill in the locations from DST. */
3015 for (node
= dst
->var_part
[j
].loc_chain
, jj
= 0; node
;
3016 node
= node
->next
, jj
++)
3019 vui
[jj
].pos_dst
= jj
;
3021 /* Pos plus value larger than a sum of 2 valid positions. */
3022 vui
[jj
].pos
= jj
+ src_l
+ dst_l
;
3025 /* Fill in the locations from SRC. */
3027 for (node
= src
->var_part
[i
].loc_chain
, ii
= 0; node
;
3028 node
= node
->next
, ii
++)
3030 /* Find location from NODE. */
3031 for (jj
= 0; jj
< dst_l
; jj
++)
3033 if ((REG_P (vui
[jj
].lc
->loc
)
3034 && REG_P (node
->loc
)
3035 && REGNO (vui
[jj
].lc
->loc
) == REGNO (node
->loc
))
3036 || rtx_equal_p (vui
[jj
].lc
->loc
, node
->loc
))
3038 vui
[jj
].pos
= jj
+ ii
;
3042 if (jj
>= dst_l
) /* The location has not been found. */
3044 location_chain new_node
;
3046 /* Copy the location from SRC. */
3047 new_node
= new location_chain_def
;
3048 new_node
->loc
= node
->loc
;
3049 new_node
->init
= node
->init
;
3050 if (!node
->set_src
|| MEM_P (node
->set_src
))
3051 new_node
->set_src
= NULL
;
3053 new_node
->set_src
= node
->set_src
;
3054 vui
[n
].lc
= new_node
;
3055 vui
[n
].pos_dst
= src_l
+ dst_l
;
3056 vui
[n
].pos
= ii
+ src_l
+ dst_l
;
3063 /* Special case still very common case. For dst_l == 2
3064 all entries dst_l ... n-1 are sorted, with for i >= dst_l
3065 vui[i].pos == i + src_l + dst_l. */
3066 if (vui
[0].pos
> vui
[1].pos
)
3068 /* Order should be 1, 0, 2... */
3069 dst
->var_part
[k
].loc_chain
= vui
[1].lc
;
3070 vui
[1].lc
->next
= vui
[0].lc
;
3073 vui
[0].lc
->next
= vui
[2].lc
;
3074 vui
[n
- 1].lc
->next
= NULL
;
3077 vui
[0].lc
->next
= NULL
;
3082 dst
->var_part
[k
].loc_chain
= vui
[0].lc
;
3083 if (n
>= 3 && vui
[2].pos
< vui
[1].pos
)
3085 /* Order should be 0, 2, 1, 3... */
3086 vui
[0].lc
->next
= vui
[2].lc
;
3087 vui
[2].lc
->next
= vui
[1].lc
;
3090 vui
[1].lc
->next
= vui
[3].lc
;
3091 vui
[n
- 1].lc
->next
= NULL
;
3094 vui
[1].lc
->next
= NULL
;
3099 /* Order should be 0, 1, 2... */
3101 vui
[n
- 1].lc
->next
= NULL
;
3104 for (; ii
< n
; ii
++)
3105 vui
[ii
- 1].lc
->next
= vui
[ii
].lc
;
3109 qsort (vui
, n
, sizeof (struct variable_union_info
),
3110 variable_union_info_cmp_pos
);
3112 /* Reconnect the nodes in sorted order. */
3113 for (ii
= 1; ii
< n
; ii
++)
3114 vui
[ii
- 1].lc
->next
= vui
[ii
].lc
;
3115 vui
[n
- 1].lc
->next
= NULL
;
3116 dst
->var_part
[k
].loc_chain
= vui
[0].lc
;
3119 VAR_PART_OFFSET (dst
, k
) = VAR_PART_OFFSET (dst
, j
);
3124 else if ((i
>= 0 && j
>= 0
3125 && VAR_PART_OFFSET (src
, i
) < VAR_PART_OFFSET (dst
, j
))
3128 dst
->var_part
[k
] = dst
->var_part
[j
];
3131 else if ((i
>= 0 && j
>= 0
3132 && VAR_PART_OFFSET (src
, i
) > VAR_PART_OFFSET (dst
, j
))
3135 location_chain
*nextp
;
3137 /* Copy the chain from SRC. */
3138 nextp
= &dst
->var_part
[k
].loc_chain
;
3139 for (node
= src
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
3141 location_chain new_lc
;
3143 new_lc
= new location_chain_def
;
3144 new_lc
->next
= NULL
;
3145 new_lc
->init
= node
->init
;
3146 if (!node
->set_src
|| MEM_P (node
->set_src
))
3147 new_lc
->set_src
= NULL
;
3149 new_lc
->set_src
= node
->set_src
;
3150 new_lc
->loc
= node
->loc
;
3153 nextp
= &new_lc
->next
;
3156 VAR_PART_OFFSET (dst
, k
) = VAR_PART_OFFSET (src
, i
);
3159 dst
->var_part
[k
].cur_loc
= NULL
;
3162 if (flag_var_tracking_uninit
)
3163 for (i
= 0; i
< src
->n_var_parts
&& i
< dst
->n_var_parts
; i
++)
3165 location_chain node
, node2
;
3166 for (node
= src
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
3167 for (node2
= dst
->var_part
[i
].loc_chain
; node2
; node2
= node2
->next
)
3168 if (rtx_equal_p (node
->loc
, node2
->loc
))
3170 if (node
->init
> node2
->init
)
3171 node2
->init
= node
->init
;
3175 /* Continue traversing the hash table. */
3179 /* Compute union of dataflow sets SRC and DST and store it to DST. */
3182 dataflow_set_union (dataflow_set
*dst
, dataflow_set
*src
)
3186 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3187 attrs_list_union (&dst
->regs
[i
], src
->regs
[i
]);
3189 if (dst
->vars
== empty_shared_hash
)
3191 shared_hash_destroy (dst
->vars
);
3192 dst
->vars
= shared_hash_copy (src
->vars
);
3196 variable_iterator_type hi
;
3199 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (src
->vars
),
3201 variable_union (var
, dst
);
3205 /* Whether the value is currently being expanded. */
3206 #define VALUE_RECURSED_INTO(x) \
3207 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
3209 /* Whether no expansion was found, saving useless lookups.
3210 It must only be set when VALUE_CHANGED is clear. */
3211 #define NO_LOC_P(x) \
3212 (RTL_FLAG_CHECK2 ("NO_LOC_P", (x), VALUE, DEBUG_EXPR)->return_val)
3214 /* Whether cur_loc in the value needs to be (re)computed. */
3215 #define VALUE_CHANGED(x) \
3216 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
3217 /* Whether cur_loc in the decl needs to be (re)computed. */
3218 #define DECL_CHANGED(x) TREE_VISITED (x)
3220 /* Record (if NEWV) that DV needs to have its cur_loc recomputed. For
3221 user DECLs, this means they're in changed_variables. Values and
3222 debug exprs may be left with this flag set if no user variable
3223 requires them to be evaluated. */
3226 set_dv_changed (decl_or_value dv
, bool newv
)
3228 switch (dv_onepart_p (dv
))
3232 NO_LOC_P (dv_as_value (dv
)) = false;
3233 VALUE_CHANGED (dv_as_value (dv
)) = newv
;
3238 NO_LOC_P (DECL_RTL_KNOWN_SET (dv_as_decl (dv
))) = false;
3239 /* Fall through... */
3242 DECL_CHANGED (dv_as_decl (dv
)) = newv
;
3247 /* Return true if DV needs to have its cur_loc recomputed. */
3250 dv_changed_p (decl_or_value dv
)
3252 return (dv_is_value_p (dv
)
3253 ? VALUE_CHANGED (dv_as_value (dv
))
3254 : DECL_CHANGED (dv_as_decl (dv
)));
3257 /* Return a location list node whose loc is rtx_equal to LOC, in the
3258 location list of a one-part variable or value VAR, or in that of
3259 any values recursively mentioned in the location lists. VARS must
3260 be in star-canonical form. */
3262 static location_chain
3263 find_loc_in_1pdv (rtx loc
, variable var
, variable_table_type
*vars
)
3265 location_chain node
;
3266 enum rtx_code loc_code
;
3271 gcc_checking_assert (var
->onepart
);
3273 if (!var
->n_var_parts
)
3276 gcc_checking_assert (loc
!= dv_as_opaque (var
->dv
));
3278 loc_code
= GET_CODE (loc
);
3279 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
3284 if (GET_CODE (node
->loc
) != loc_code
)
3286 if (GET_CODE (node
->loc
) != VALUE
)
3289 else if (loc
== node
->loc
)
3291 else if (loc_code
!= VALUE
)
3293 if (rtx_equal_p (loc
, node
->loc
))
3298 /* Since we're in star-canonical form, we don't need to visit
3299 non-canonical nodes: one-part variables and non-canonical
3300 values would only point back to the canonical node. */
3301 if (dv_is_value_p (var
->dv
)
3302 && !canon_value_cmp (node
->loc
, dv_as_value (var
->dv
)))
3304 /* Skip all subsequent VALUEs. */
3305 while (node
->next
&& GET_CODE (node
->next
->loc
) == VALUE
)
3308 gcc_checking_assert (!canon_value_cmp (node
->loc
,
3309 dv_as_value (var
->dv
)));
3310 if (loc
== node
->loc
)
3316 gcc_checking_assert (node
== var
->var_part
[0].loc_chain
);
3317 gcc_checking_assert (!node
->next
);
3319 dv
= dv_from_value (node
->loc
);
3320 rvar
= vars
->find_with_hash (dv
, dv_htab_hash (dv
));
3321 return find_loc_in_1pdv (loc
, rvar
, vars
);
3324 /* ??? Gotta look in cselib_val locations too. */
3329 /* Hash table iteration argument passed to variable_merge. */
3332 /* The set in which the merge is to be inserted. */
3334 /* The set that we're iterating in. */
3336 /* The set that may contain the other dv we are to merge with. */
3338 /* Number of onepart dvs in src. */
3339 int src_onepart_cnt
;
3342 /* Insert LOC in *DNODE, if it's not there yet. The list must be in
3343 loc_cmp order, and it is maintained as such. */
3346 insert_into_intersection (location_chain
*nodep
, rtx loc
,
3347 enum var_init_status status
)
3349 location_chain node
;
3352 for (node
= *nodep
; node
; nodep
= &node
->next
, node
= *nodep
)
3353 if ((r
= loc_cmp (node
->loc
, loc
)) == 0)
3355 node
->init
= MIN (node
->init
, status
);
3361 node
= new location_chain_def
;
3364 node
->set_src
= NULL
;
3365 node
->init
= status
;
3366 node
->next
= *nodep
;
3370 /* Insert in DEST the intersection of the locations present in both
3371 S1NODE and S2VAR, directly or indirectly. S1NODE is from a
3372 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
3376 intersect_loc_chains (rtx val
, location_chain
*dest
, struct dfset_merge
*dsm
,
3377 location_chain s1node
, variable s2var
)
3379 dataflow_set
*s1set
= dsm
->cur
;
3380 dataflow_set
*s2set
= dsm
->src
;
3381 location_chain found
;
3385 location_chain s2node
;
3387 gcc_checking_assert (s2var
->onepart
);
3389 if (s2var
->n_var_parts
)
3391 s2node
= s2var
->var_part
[0].loc_chain
;
3393 for (; s1node
&& s2node
;
3394 s1node
= s1node
->next
, s2node
= s2node
->next
)
3395 if (s1node
->loc
!= s2node
->loc
)
3397 else if (s1node
->loc
== val
)
3400 insert_into_intersection (dest
, s1node
->loc
,
3401 MIN (s1node
->init
, s2node
->init
));
3405 for (; s1node
; s1node
= s1node
->next
)
3407 if (s1node
->loc
== val
)
3410 if ((found
= find_loc_in_1pdv (s1node
->loc
, s2var
,
3411 shared_hash_htab (s2set
->vars
))))
3413 insert_into_intersection (dest
, s1node
->loc
,
3414 MIN (s1node
->init
, found
->init
));
3418 if (GET_CODE (s1node
->loc
) == VALUE
3419 && !VALUE_RECURSED_INTO (s1node
->loc
))
3421 decl_or_value dv
= dv_from_value (s1node
->loc
);
3422 variable svar
= shared_hash_find (s1set
->vars
, dv
);
3425 if (svar
->n_var_parts
== 1)
3427 VALUE_RECURSED_INTO (s1node
->loc
) = true;
3428 intersect_loc_chains (val
, dest
, dsm
,
3429 svar
->var_part
[0].loc_chain
,
3431 VALUE_RECURSED_INTO (s1node
->loc
) = false;
3436 /* ??? gotta look in cselib_val locations too. */
3438 /* ??? if the location is equivalent to any location in src,
3439 searched recursively
3441 add to dst the values needed to represent the equivalence
3443 telling whether locations S is equivalent to another dv's
3446 for each location D in the list
3448 if S and D satisfy rtx_equal_p, then it is present
3450 else if D is a value, recurse without cycles
3452 else if S and D have the same CODE and MODE
3454 for each operand oS and the corresponding oD
3456 if oS and oD are not equivalent, then S an D are not equivalent
3458 else if they are RTX vectors
3460 if any vector oS element is not equivalent to its respective oD,
3461 then S and D are not equivalent
3469 /* Return -1 if X should be before Y in a location list for a 1-part
3470 variable, 1 if Y should be before X, and 0 if they're equivalent
3471 and should not appear in the list. */
3474 loc_cmp (rtx x
, rtx y
)
3477 RTX_CODE code
= GET_CODE (x
);
3487 gcc_assert (GET_MODE (x
) == GET_MODE (y
));
3488 if (REGNO (x
) == REGNO (y
))
3490 else if (REGNO (x
) < REGNO (y
))
3503 gcc_assert (GET_MODE (x
) == GET_MODE (y
));
3504 return loc_cmp (XEXP (x
, 0), XEXP (y
, 0));
3510 if (GET_CODE (x
) == VALUE
)
3512 if (GET_CODE (y
) != VALUE
)
3514 /* Don't assert the modes are the same, that is true only
3515 when not recursing. (subreg:QI (value:SI 1:1) 0)
3516 and (subreg:QI (value:DI 2:2) 0) can be compared,
3517 even when the modes are different. */
3518 if (canon_value_cmp (x
, y
))
3524 if (GET_CODE (y
) == VALUE
)
3527 /* Entry value is the least preferable kind of expression. */
3528 if (GET_CODE (x
) == ENTRY_VALUE
)
3530 if (GET_CODE (y
) != ENTRY_VALUE
)
3532 gcc_assert (GET_MODE (x
) == GET_MODE (y
));
3533 return loc_cmp (ENTRY_VALUE_EXP (x
), ENTRY_VALUE_EXP (y
));
3536 if (GET_CODE (y
) == ENTRY_VALUE
)
3539 if (GET_CODE (x
) == GET_CODE (y
))
3540 /* Compare operands below. */;
3541 else if (GET_CODE (x
) < GET_CODE (y
))
3546 gcc_assert (GET_MODE (x
) == GET_MODE (y
));
3548 if (GET_CODE (x
) == DEBUG_EXPR
)
3550 if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x
))
3551 < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y
)))
3553 gcc_checking_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x
))
3554 > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y
)));
3558 fmt
= GET_RTX_FORMAT (code
);
3559 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
3563 if (XWINT (x
, i
) == XWINT (y
, i
))
3565 else if (XWINT (x
, i
) < XWINT (y
, i
))
3572 if (XINT (x
, i
) == XINT (y
, i
))
3574 else if (XINT (x
, i
) < XINT (y
, i
))
3581 /* Compare the vector length first. */
3582 if (XVECLEN (x
, i
) == XVECLEN (y
, i
))
3583 /* Compare the vectors elements. */;
3584 else if (XVECLEN (x
, i
) < XVECLEN (y
, i
))
3589 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3590 if ((r
= loc_cmp (XVECEXP (x
, i
, j
),
3591 XVECEXP (y
, i
, j
))))
3596 if ((r
= loc_cmp (XEXP (x
, i
), XEXP (y
, i
))))
3602 if (XSTR (x
, i
) == XSTR (y
, i
))
3608 if ((r
= strcmp (XSTR (x
, i
), XSTR (y
, i
))) == 0)
3616 /* These are just backpointers, so they don't matter. */
3623 /* It is believed that rtx's at this level will never
3624 contain anything but integers and other rtx's,
3625 except for within LABEL_REFs and SYMBOL_REFs. */
3629 if (CONST_WIDE_INT_P (x
))
3631 /* Compare the vector length first. */
3632 if (CONST_WIDE_INT_NUNITS (x
) >= CONST_WIDE_INT_NUNITS (y
))
3634 else if (CONST_WIDE_INT_NUNITS (x
) < CONST_WIDE_INT_NUNITS (y
))
3637 /* Compare the vectors elements. */;
3638 for (j
= CONST_WIDE_INT_NUNITS (x
) - 1; j
>= 0 ; j
--)
3640 if (CONST_WIDE_INT_ELT (x
, j
) < CONST_WIDE_INT_ELT (y
, j
))
3642 if (CONST_WIDE_INT_ELT (x
, j
) > CONST_WIDE_INT_ELT (y
, j
))
3651 /* Check the order of entries in one-part variables. */
3654 canonicalize_loc_order_check (variable_def
**slot
,
3655 dataflow_set
*data ATTRIBUTE_UNUSED
)
3657 variable var
= *slot
;
3658 location_chain node
, next
;
3660 #ifdef ENABLE_RTL_CHECKING
3662 for (i
= 0; i
< var
->n_var_parts
; i
++)
3663 gcc_assert (var
->var_part
[0].cur_loc
== NULL
);
3664 gcc_assert (!var
->in_changed_variables
);
3670 gcc_assert (var
->n_var_parts
== 1);
3671 node
= var
->var_part
[0].loc_chain
;
3674 while ((next
= node
->next
))
3676 gcc_assert (loc_cmp (node
->loc
, next
->loc
) < 0);
3684 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
3685 more likely to be chosen as canonical for an equivalence set.
3686 Ensure less likely values can reach more likely neighbors, making
3687 the connections bidirectional. */
3690 canonicalize_values_mark (variable_def
**slot
, dataflow_set
*set
)
3692 variable var
= *slot
;
3693 decl_or_value dv
= var
->dv
;
3695 location_chain node
;
3697 if (!dv_is_value_p (dv
))
3700 gcc_checking_assert (var
->n_var_parts
== 1);
3702 val
= dv_as_value (dv
);
3704 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
3705 if (GET_CODE (node
->loc
) == VALUE
)
3707 if (canon_value_cmp (node
->loc
, val
))
3708 VALUE_RECURSED_INTO (val
) = true;
3711 decl_or_value odv
= dv_from_value (node
->loc
);
3712 variable_def
**oslot
;
3713 oslot
= shared_hash_find_slot_noinsert (set
->vars
, odv
);
3715 set_slot_part (set
, val
, oslot
, odv
, 0,
3716 node
->init
, NULL_RTX
);
3718 VALUE_RECURSED_INTO (node
->loc
) = true;
3725 /* Remove redundant entries from equivalence lists in onepart
3726 variables, canonicalizing equivalence sets into star shapes. */
3729 canonicalize_values_star (variable_def
**slot
, dataflow_set
*set
)
3731 variable var
= *slot
;
3732 decl_or_value dv
= var
->dv
;
3733 location_chain node
;
3736 variable_def
**cslot
;
3743 gcc_checking_assert (var
->n_var_parts
== 1);
3745 if (dv_is_value_p (dv
))
3747 cval
= dv_as_value (dv
);
3748 if (!VALUE_RECURSED_INTO (cval
))
3750 VALUE_RECURSED_INTO (cval
) = false;
3760 gcc_assert (var
->n_var_parts
== 1);
3762 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
3763 if (GET_CODE (node
->loc
) == VALUE
)
3766 if (VALUE_RECURSED_INTO (node
->loc
))
3768 if (canon_value_cmp (node
->loc
, cval
))
3777 if (!has_marks
|| dv_is_decl_p (dv
))
3780 /* Keep it marked so that we revisit it, either after visiting a
3781 child node, or after visiting a new parent that might be
3783 VALUE_RECURSED_INTO (val
) = true;
3785 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
3786 if (GET_CODE (node
->loc
) == VALUE
3787 && VALUE_RECURSED_INTO (node
->loc
))
3791 VALUE_RECURSED_INTO (cval
) = false;
3792 dv
= dv_from_value (cval
);
3793 slot
= shared_hash_find_slot_noinsert (set
->vars
, dv
);
3796 gcc_assert (dv_is_decl_p (var
->dv
));
3797 /* The canonical value was reset and dropped.
3799 clobber_variable_part (set
, NULL
, var
->dv
, 0, NULL
);
3803 gcc_assert (dv_is_value_p (var
->dv
));
3804 if (var
->n_var_parts
== 0)
3806 gcc_assert (var
->n_var_parts
== 1);
3810 VALUE_RECURSED_INTO (val
) = false;
3815 /* Push values to the canonical one. */
3816 cdv
= dv_from_value (cval
);
3817 cslot
= shared_hash_find_slot_noinsert (set
->vars
, cdv
);
3819 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
3820 if (node
->loc
!= cval
)
3822 cslot
= set_slot_part (set
, node
->loc
, cslot
, cdv
, 0,
3823 node
->init
, NULL_RTX
);
3824 if (GET_CODE (node
->loc
) == VALUE
)
3826 decl_or_value ndv
= dv_from_value (node
->loc
);
3828 set_variable_part (set
, cval
, ndv
, 0, node
->init
, NULL_RTX
,
3831 if (canon_value_cmp (node
->loc
, val
))
3833 /* If it could have been a local minimum, it's not any more,
3834 since it's now neighbor to cval, so it may have to push
3835 to it. Conversely, if it wouldn't have prevailed over
3836 val, then whatever mark it has is fine: if it was to
3837 push, it will now push to a more canonical node, but if
3838 it wasn't, then it has already pushed any values it might
3840 VALUE_RECURSED_INTO (node
->loc
) = true;
3841 /* Make sure we visit node->loc by ensuring we cval is
3843 VALUE_RECURSED_INTO (cval
) = true;
3845 else if (!VALUE_RECURSED_INTO (node
->loc
))
3846 /* If we have no need to "recurse" into this node, it's
3847 already "canonicalized", so drop the link to the old
3849 clobber_variable_part (set
, cval
, ndv
, 0, NULL
);
3851 else if (GET_CODE (node
->loc
) == REG
)
3853 attrs list
= set
->regs
[REGNO (node
->loc
)], *listp
;
3855 /* Change an existing attribute referring to dv so that it
3856 refers to cdv, removing any duplicate this might
3857 introduce, and checking that no previous duplicates
3858 existed, all in a single pass. */
3862 if (list
->offset
== 0
3863 && (dv_as_opaque (list
->dv
) == dv_as_opaque (dv
)
3864 || dv_as_opaque (list
->dv
) == dv_as_opaque (cdv
)))
3871 if (dv_as_opaque (list
->dv
) == dv_as_opaque (dv
))
3874 for (listp
= &list
->next
; (list
= *listp
); listp
= &list
->next
)
3879 if (dv_as_opaque (list
->dv
) == dv_as_opaque (cdv
))
3881 *listp
= list
->next
;
3887 gcc_assert (dv_as_opaque (list
->dv
) != dv_as_opaque (dv
));
3890 else if (dv_as_opaque (list
->dv
) == dv_as_opaque (cdv
))
3892 for (listp
= &list
->next
; (list
= *listp
); listp
= &list
->next
)
3897 if (dv_as_opaque (list
->dv
) == dv_as_opaque (dv
))
3899 *listp
= list
->next
;
3905 gcc_assert (dv_as_opaque (list
->dv
) != dv_as_opaque (cdv
));
3914 if (list
->offset
== 0
3915 && (dv_as_opaque (list
->dv
) == dv_as_opaque (dv
)
3916 || dv_as_opaque (list
->dv
) == dv_as_opaque (cdv
)))
3926 set_slot_part (set
, val
, cslot
, cdv
, 0,
3927 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
);
3929 slot
= clobber_slot_part (set
, cval
, slot
, 0, NULL
);
3931 /* Variable may have been unshared. */
3933 gcc_checking_assert (var
->n_var_parts
&& var
->var_part
[0].loc_chain
->loc
== cval
3934 && var
->var_part
[0].loc_chain
->next
== NULL
);
3936 if (VALUE_RECURSED_INTO (cval
))
3937 goto restart_with_cval
;
3942 /* Bind one-part variables to the canonical value in an equivalence
3943 set. Not doing this causes dataflow convergence failure in rare
3944 circumstances, see PR42873. Unfortunately we can't do this
3945 efficiently as part of canonicalize_values_star, since we may not
3946 have determined or even seen the canonical value of a set when we
3947 get to a variable that references another member of the set. */
3950 canonicalize_vars_star (variable_def
**slot
, dataflow_set
*set
)
3952 variable var
= *slot
;
3953 decl_or_value dv
= var
->dv
;
3954 location_chain node
;
3957 variable_def
**cslot
;
3959 location_chain cnode
;
3961 if (!var
->onepart
|| var
->onepart
== ONEPART_VALUE
)
3964 gcc_assert (var
->n_var_parts
== 1);
3966 node
= var
->var_part
[0].loc_chain
;
3968 if (GET_CODE (node
->loc
) != VALUE
)
3971 gcc_assert (!node
->next
);
3974 /* Push values to the canonical one. */
3975 cdv
= dv_from_value (cval
);
3976 cslot
= shared_hash_find_slot_noinsert (set
->vars
, cdv
);
3980 gcc_assert (cvar
->n_var_parts
== 1);
3982 cnode
= cvar
->var_part
[0].loc_chain
;
3984 /* CVAL is canonical if its value list contains non-VALUEs or VALUEs
3985 that are not “more canonical” than it. */
3986 if (GET_CODE (cnode
->loc
) != VALUE
3987 || !canon_value_cmp (cnode
->loc
, cval
))
3990 /* CVAL was found to be non-canonical. Change the variable to point
3991 to the canonical VALUE. */
3992 gcc_assert (!cnode
->next
);
3995 slot
= set_slot_part (set
, cval
, slot
, dv
, 0,
3996 node
->init
, node
->set_src
);
3997 clobber_slot_part (set
, cval
, slot
, 0, node
->set_src
);
4002 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
4003 corresponding entry in DSM->src. Multi-part variables are combined
4004 with variable_union, whereas onepart dvs are combined with
4008 variable_merge_over_cur (variable s1var
, struct dfset_merge
*dsm
)
4010 dataflow_set
*dst
= dsm
->dst
;
4011 variable_def
**dstslot
;
4012 variable s2var
, dvar
= NULL
;
4013 decl_or_value dv
= s1var
->dv
;
4014 onepart_enum_t onepart
= s1var
->onepart
;
4017 location_chain node
, *nodep
;
4019 /* If the incoming onepart variable has an empty location list, then
4020 the intersection will be just as empty. For other variables,
4021 it's always union. */
4022 gcc_checking_assert (s1var
->n_var_parts
4023 && s1var
->var_part
[0].loc_chain
);
4026 return variable_union (s1var
, dst
);
4028 gcc_checking_assert (s1var
->n_var_parts
== 1);
4030 dvhash
= dv_htab_hash (dv
);
4031 if (dv_is_value_p (dv
))
4032 val
= dv_as_value (dv
);
4036 s2var
= shared_hash_find_1 (dsm
->src
->vars
, dv
, dvhash
);
4039 dst_can_be_shared
= false;
4043 dsm
->src_onepart_cnt
--;
4044 gcc_assert (s2var
->var_part
[0].loc_chain
4045 && s2var
->onepart
== onepart
4046 && s2var
->n_var_parts
== 1);
4048 dstslot
= shared_hash_find_slot_noinsert_1 (dst
->vars
, dv
, dvhash
);
4052 gcc_assert (dvar
->refcount
== 1
4053 && dvar
->onepart
== onepart
4054 && dvar
->n_var_parts
== 1);
4055 nodep
= &dvar
->var_part
[0].loc_chain
;
4063 if (!dstslot
&& !onepart_variable_different_p (s1var
, s2var
))
4065 dstslot
= shared_hash_find_slot_unshare_1 (&dst
->vars
, dv
,
4067 *dstslot
= dvar
= s2var
;
4072 dst_can_be_shared
= false;
4074 intersect_loc_chains (val
, nodep
, dsm
,
4075 s1var
->var_part
[0].loc_chain
, s2var
);
4081 dvar
= onepart_pool (onepart
).allocate ();
4084 dvar
->n_var_parts
= 1;
4085 dvar
->onepart
= onepart
;
4086 dvar
->in_changed_variables
= false;
4087 dvar
->var_part
[0].loc_chain
= node
;
4088 dvar
->var_part
[0].cur_loc
= NULL
;
4090 VAR_LOC_1PAUX (dvar
) = NULL
;
4092 VAR_PART_OFFSET (dvar
, 0) = 0;
4095 = shared_hash_find_slot_unshare_1 (&dst
->vars
, dv
, dvhash
,
4097 gcc_assert (!*dstslot
);
4105 nodep
= &dvar
->var_part
[0].loc_chain
;
4106 while ((node
= *nodep
))
4108 location_chain
*nextp
= &node
->next
;
4110 if (GET_CODE (node
->loc
) == REG
)
4114 for (list
= dst
->regs
[REGNO (node
->loc
)]; list
; list
= list
->next
)
4115 if (GET_MODE (node
->loc
) == GET_MODE (list
->loc
)
4116 && dv_is_value_p (list
->dv
))
4120 attrs_list_insert (&dst
->regs
[REGNO (node
->loc
)],
4122 /* If this value became canonical for another value that had
4123 this register, we want to leave it alone. */
4124 else if (dv_as_value (list
->dv
) != val
)
4126 dstslot
= set_slot_part (dst
, dv_as_value (list
->dv
),
4128 node
->init
, NULL_RTX
);
4129 dstslot
= delete_slot_part (dst
, node
->loc
, dstslot
, 0);
4131 /* Since nextp points into the removed node, we can't
4132 use it. The pointer to the next node moved to nodep.
4133 However, if the variable we're walking is unshared
4134 during our walk, we'll keep walking the location list
4135 of the previously-shared variable, in which case the
4136 node won't have been removed, and we'll want to skip
4137 it. That's why we test *nodep here. */
4143 /* Canonicalization puts registers first, so we don't have to
4149 if (dvar
!= *dstslot
)
4151 nodep
= &dvar
->var_part
[0].loc_chain
;
4155 /* Mark all referenced nodes for canonicalization, and make sure
4156 we have mutual equivalence links. */
4157 VALUE_RECURSED_INTO (val
) = true;
4158 for (node
= *nodep
; node
; node
= node
->next
)
4159 if (GET_CODE (node
->loc
) == VALUE
)
4161 VALUE_RECURSED_INTO (node
->loc
) = true;
4162 set_variable_part (dst
, val
, dv_from_value (node
->loc
), 0,
4163 node
->init
, NULL
, INSERT
);
4166 dstslot
= shared_hash_find_slot_noinsert_1 (dst
->vars
, dv
, dvhash
);
4167 gcc_assert (*dstslot
== dvar
);
4168 canonicalize_values_star (dstslot
, dst
);
4169 gcc_checking_assert (dstslot
4170 == shared_hash_find_slot_noinsert_1 (dst
->vars
,
4176 bool has_value
= false, has_other
= false;
4178 /* If we have one value and anything else, we're going to
4179 canonicalize this, so make sure all values have an entry in
4180 the table and are marked for canonicalization. */
4181 for (node
= *nodep
; node
; node
= node
->next
)
4183 if (GET_CODE (node
->loc
) == VALUE
)
4185 /* If this was marked during register canonicalization,
4186 we know we have to canonicalize values. */
4201 if (has_value
&& has_other
)
4203 for (node
= *nodep
; node
; node
= node
->next
)
4205 if (GET_CODE (node
->loc
) == VALUE
)
4207 decl_or_value dv
= dv_from_value (node
->loc
);
4208 variable_def
**slot
= NULL
;
4210 if (shared_hash_shared (dst
->vars
))
4211 slot
= shared_hash_find_slot_noinsert (dst
->vars
, dv
);
4213 slot
= shared_hash_find_slot_unshare (&dst
->vars
, dv
,
4217 variable var
= onepart_pool (ONEPART_VALUE
).allocate ();
4220 var
->n_var_parts
= 1;
4221 var
->onepart
= ONEPART_VALUE
;
4222 var
->in_changed_variables
= false;
4223 var
->var_part
[0].loc_chain
= NULL
;
4224 var
->var_part
[0].cur_loc
= NULL
;
4225 VAR_LOC_1PAUX (var
) = NULL
;
4229 VALUE_RECURSED_INTO (node
->loc
) = true;
4233 dstslot
= shared_hash_find_slot_noinsert_1 (dst
->vars
, dv
, dvhash
);
4234 gcc_assert (*dstslot
== dvar
);
4235 canonicalize_values_star (dstslot
, dst
);
4236 gcc_checking_assert (dstslot
4237 == shared_hash_find_slot_noinsert_1 (dst
->vars
,
4243 if (!onepart_variable_different_p (dvar
, s2var
))
4245 variable_htab_free (dvar
);
4246 *dstslot
= dvar
= s2var
;
4249 else if (s2var
!= s1var
&& !onepart_variable_different_p (dvar
, s1var
))
4251 variable_htab_free (dvar
);
4252 *dstslot
= dvar
= s1var
;
4254 dst_can_be_shared
= false;
4257 dst_can_be_shared
= false;
4262 /* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
4263 multi-part variable. Unions of multi-part variables and
4264 intersections of one-part ones will be handled in
4265 variable_merge_over_cur(). */
4268 variable_merge_over_src (variable s2var
, struct dfset_merge
*dsm
)
4270 dataflow_set
*dst
= dsm
->dst
;
4271 decl_or_value dv
= s2var
->dv
;
4273 if (!s2var
->onepart
)
4275 variable_def
**dstp
= shared_hash_find_slot (dst
->vars
, dv
);
4281 dsm
->src_onepart_cnt
++;
4285 /* Combine dataflow set information from SRC2 into DST, using PDST
4286 to carry over information across passes. */
4289 dataflow_set_merge (dataflow_set
*dst
, dataflow_set
*src2
)
4291 dataflow_set cur
= *dst
;
4292 dataflow_set
*src1
= &cur
;
4293 struct dfset_merge dsm
;
4295 size_t src1_elems
, src2_elems
;
4296 variable_iterator_type hi
;
4299 src1_elems
= shared_hash_htab (src1
->vars
)->elements ();
4300 src2_elems
= shared_hash_htab (src2
->vars
)->elements ();
4301 dataflow_set_init (dst
);
4302 dst
->stack_adjust
= cur
.stack_adjust
;
4303 shared_hash_destroy (dst
->vars
);
4304 dst
->vars
= new shared_hash_def
;
4305 dst
->vars
->refcount
= 1;
4306 dst
->vars
->htab
= new variable_table_type (MAX (src1_elems
, src2_elems
));
4308 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
4309 attrs_list_mpdv_union (&dst
->regs
[i
], src1
->regs
[i
], src2
->regs
[i
]);
4314 dsm
.src_onepart_cnt
= 0;
4316 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm
.src
->vars
),
4318 variable_merge_over_src (var
, &dsm
);
4319 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm
.cur
->vars
),
4321 variable_merge_over_cur (var
, &dsm
);
4323 if (dsm
.src_onepart_cnt
)
4324 dst_can_be_shared
= false;
4326 dataflow_set_destroy (src1
);
4329 /* Mark register equivalences. */
4332 dataflow_set_equiv_regs (dataflow_set
*set
)
4337 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
4339 rtx canon
[NUM_MACHINE_MODES
];
4341 /* If the list is empty or one entry, no need to canonicalize
4343 if (set
->regs
[i
] == NULL
|| set
->regs
[i
]->next
== NULL
)
4346 memset (canon
, 0, sizeof (canon
));
4348 for (list
= set
->regs
[i
]; list
; list
= list
->next
)
4349 if (list
->offset
== 0 && dv_is_value_p (list
->dv
))
4351 rtx val
= dv_as_value (list
->dv
);
4352 rtx
*cvalp
= &canon
[(int)GET_MODE (val
)];
4355 if (canon_value_cmp (val
, cval
))
4359 for (list
= set
->regs
[i
]; list
; list
= list
->next
)
4360 if (list
->offset
== 0 && dv_onepart_p (list
->dv
))
4362 rtx cval
= canon
[(int)GET_MODE (list
->loc
)];
4367 if (dv_is_value_p (list
->dv
))
4369 rtx val
= dv_as_value (list
->dv
);
4374 VALUE_RECURSED_INTO (val
) = true;
4375 set_variable_part (set
, val
, dv_from_value (cval
), 0,
4376 VAR_INIT_STATUS_INITIALIZED
,
4380 VALUE_RECURSED_INTO (cval
) = true;
4381 set_variable_part (set
, cval
, list
->dv
, 0,
4382 VAR_INIT_STATUS_INITIALIZED
, NULL
, NO_INSERT
);
4385 for (listp
= &set
->regs
[i
]; (list
= *listp
);
4386 listp
= list
? &list
->next
: listp
)
4387 if (list
->offset
== 0 && dv_onepart_p (list
->dv
))
4389 rtx cval
= canon
[(int)GET_MODE (list
->loc
)];
4390 variable_def
**slot
;
4395 if (dv_is_value_p (list
->dv
))
4397 rtx val
= dv_as_value (list
->dv
);
4398 if (!VALUE_RECURSED_INTO (val
))
4402 slot
= shared_hash_find_slot_noinsert (set
->vars
, list
->dv
);
4403 canonicalize_values_star (slot
, set
);
4410 /* Remove any redundant values in the location list of VAR, which must
4411 be unshared and 1-part. */
4414 remove_duplicate_values (variable var
)
4416 location_chain node
, *nodep
;
4418 gcc_assert (var
->onepart
);
4419 gcc_assert (var
->n_var_parts
== 1);
4420 gcc_assert (var
->refcount
== 1);
4422 for (nodep
= &var
->var_part
[0].loc_chain
; (node
= *nodep
); )
4424 if (GET_CODE (node
->loc
) == VALUE
)
4426 if (VALUE_RECURSED_INTO (node
->loc
))
4428 /* Remove duplicate value node. */
4429 *nodep
= node
->next
;
4434 VALUE_RECURSED_INTO (node
->loc
) = true;
4436 nodep
= &node
->next
;
4439 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
4440 if (GET_CODE (node
->loc
) == VALUE
)
4442 gcc_assert (VALUE_RECURSED_INTO (node
->loc
));
4443 VALUE_RECURSED_INTO (node
->loc
) = false;
4448 /* Hash table iteration argument passed to variable_post_merge. */
4449 struct dfset_post_merge
4451 /* The new input set for the current block. */
4453 /* Pointer to the permanent input set for the current block, or
4455 dataflow_set
**permp
;
4458 /* Create values for incoming expressions associated with one-part
4459 variables that don't have value numbers for them. */
4462 variable_post_merge_new_vals (variable_def
**slot
, dfset_post_merge
*dfpm
)
4464 dataflow_set
*set
= dfpm
->set
;
4465 variable var
= *slot
;
4466 location_chain node
;
4468 if (!var
->onepart
|| !var
->n_var_parts
)
4471 gcc_assert (var
->n_var_parts
== 1);
4473 if (dv_is_decl_p (var
->dv
))
4475 bool check_dupes
= false;
4478 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
4480 if (GET_CODE (node
->loc
) == VALUE
)
4481 gcc_assert (!VALUE_RECURSED_INTO (node
->loc
));
4482 else if (GET_CODE (node
->loc
) == REG
)
4484 attrs att
, *attp
, *curp
= NULL
;
4486 if (var
->refcount
!= 1)
4488 slot
= unshare_variable (set
, slot
, var
,
4489 VAR_INIT_STATUS_INITIALIZED
);
4494 for (attp
= &set
->regs
[REGNO (node
->loc
)]; (att
= *attp
);
4496 if (att
->offset
== 0
4497 && GET_MODE (att
->loc
) == GET_MODE (node
->loc
))
4499 if (dv_is_value_p (att
->dv
))
4501 rtx cval
= dv_as_value (att
->dv
);
4506 else if (dv_as_opaque (att
->dv
) == dv_as_opaque (var
->dv
))
4514 if ((*curp
)->offset
== 0
4515 && GET_MODE ((*curp
)->loc
) == GET_MODE (node
->loc
)
4516 && dv_as_opaque ((*curp
)->dv
) == dv_as_opaque (var
->dv
))
4519 curp
= &(*curp
)->next
;
4530 *dfpm
->permp
= XNEW (dataflow_set
);
4531 dataflow_set_init (*dfpm
->permp
);
4534 for (att
= (*dfpm
->permp
)->regs
[REGNO (node
->loc
)];
4535 att
; att
= att
->next
)
4536 if (GET_MODE (att
->loc
) == GET_MODE (node
->loc
))
4538 gcc_assert (att
->offset
== 0
4539 && dv_is_value_p (att
->dv
));
4540 val_reset (set
, att
->dv
);
4547 cval
= dv_as_value (cdv
);
4551 /* Create a unique value to hold this register,
4552 that ought to be found and reused in
4553 subsequent rounds. */
4555 gcc_assert (!cselib_lookup (node
->loc
,
4556 GET_MODE (node
->loc
), 0,
4558 v
= cselib_lookup (node
->loc
, GET_MODE (node
->loc
), 1,
4560 cselib_preserve_value (v
);
4561 cselib_invalidate_rtx (node
->loc
);
4563 cdv
= dv_from_value (cval
);
4566 "Created new value %u:%u for reg %i\n",
4567 v
->uid
, v
->hash
, REGNO (node
->loc
));
4570 var_reg_decl_set (*dfpm
->permp
, node
->loc
,
4571 VAR_INIT_STATUS_INITIALIZED
,
4572 cdv
, 0, NULL
, INSERT
);
4578 /* Remove attribute referring to the decl, which now
4579 uses the value for the register, already existing or
4580 to be added when we bring perm in. */
4588 remove_duplicate_values (var
);
4594 /* Reset values in the permanent set that are not associated with the
4595 chosen expression. */
4598 variable_post_merge_perm_vals (variable_def
**pslot
, dfset_post_merge
*dfpm
)
4600 dataflow_set
*set
= dfpm
->set
;
4601 variable pvar
= *pslot
, var
;
4602 location_chain pnode
;
4606 gcc_assert (dv_is_value_p (pvar
->dv
)
4607 && pvar
->n_var_parts
== 1);
4608 pnode
= pvar
->var_part
[0].loc_chain
;
4611 && REG_P (pnode
->loc
));
4615 var
= shared_hash_find (set
->vars
, dv
);
4618 /* Although variable_post_merge_new_vals may have made decls
4619 non-star-canonical, values that pre-existed in canonical form
4620 remain canonical, and newly-created values reference a single
4621 REG, so they are canonical as well. Since VAR has the
4622 location list for a VALUE, using find_loc_in_1pdv for it is
4623 fine, since VALUEs don't map back to DECLs. */
4624 if (find_loc_in_1pdv (pnode
->loc
, var
, shared_hash_htab (set
->vars
)))
4626 val_reset (set
, dv
);
4629 for (att
= set
->regs
[REGNO (pnode
->loc
)]; att
; att
= att
->next
)
4630 if (att
->offset
== 0
4631 && GET_MODE (att
->loc
) == GET_MODE (pnode
->loc
)
4632 && dv_is_value_p (att
->dv
))
4635 /* If there is a value associated with this register already, create
4637 if (att
&& dv_as_value (att
->dv
) != dv_as_value (dv
))
4639 rtx cval
= dv_as_value (att
->dv
);
4640 set_variable_part (set
, cval
, dv
, 0, pnode
->init
, NULL
, INSERT
);
4641 set_variable_part (set
, dv_as_value (dv
), att
->dv
, 0, pnode
->init
,
4646 attrs_list_insert (&set
->regs
[REGNO (pnode
->loc
)],
4648 variable_union (pvar
, set
);
4654 /* Just checking stuff and registering register attributes for
4658 dataflow_post_merge_adjust (dataflow_set
*set
, dataflow_set
**permp
)
4660 struct dfset_post_merge dfpm
;
4665 shared_hash_htab (set
->vars
)
4666 ->traverse
<dfset_post_merge
*, variable_post_merge_new_vals
> (&dfpm
);
4668 shared_hash_htab ((*permp
)->vars
)
4669 ->traverse
<dfset_post_merge
*, variable_post_merge_perm_vals
> (&dfpm
);
4670 shared_hash_htab (set
->vars
)
4671 ->traverse
<dataflow_set
*, canonicalize_values_star
> (set
);
4672 shared_hash_htab (set
->vars
)
4673 ->traverse
<dataflow_set
*, canonicalize_vars_star
> (set
);
4676 /* Return a node whose loc is a MEM that refers to EXPR in the
4677 location list of a one-part variable or value VAR, or in that of
4678 any values recursively mentioned in the location lists. */
4680 static location_chain
4681 find_mem_expr_in_1pdv (tree expr
, rtx val
, variable_table_type
*vars
)
4683 location_chain node
;
4686 location_chain where
= NULL
;
4691 gcc_assert (GET_CODE (val
) == VALUE
4692 && !VALUE_RECURSED_INTO (val
));
4694 dv
= dv_from_value (val
);
4695 var
= vars
->find_with_hash (dv
, dv_htab_hash (dv
));
4700 gcc_assert (var
->onepart
);
4702 if (!var
->n_var_parts
)
4705 VALUE_RECURSED_INTO (val
) = true;
4707 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
4708 if (MEM_P (node
->loc
)
4709 && MEM_EXPR (node
->loc
) == expr
4710 && INT_MEM_OFFSET (node
->loc
) == 0)
4715 else if (GET_CODE (node
->loc
) == VALUE
4716 && !VALUE_RECURSED_INTO (node
->loc
)
4717 && (where
= find_mem_expr_in_1pdv (expr
, node
->loc
, vars
)))
4720 VALUE_RECURSED_INTO (val
) = false;
4725 /* Return TRUE if the value of MEM may vary across a call. */
4728 mem_dies_at_call (rtx mem
)
4730 tree expr
= MEM_EXPR (mem
);
4736 decl
= get_base_address (expr
);
4744 return (may_be_aliased (decl
)
4745 || (!TREE_READONLY (decl
) && is_global_var (decl
)));
4748 /* Remove all MEMs from the location list of a hash table entry for a
4749 one-part variable, except those whose MEM attributes map back to
4750 the variable itself, directly or within a VALUE. */
4753 dataflow_set_preserve_mem_locs (variable_def
**slot
, dataflow_set
*set
)
4755 variable var
= *slot
;
4757 if (var
->onepart
== ONEPART_VDECL
|| var
->onepart
== ONEPART_DEXPR
)
4759 tree decl
= dv_as_decl (var
->dv
);
4760 location_chain loc
, *locp
;
4761 bool changed
= false;
4763 if (!var
->n_var_parts
)
4766 gcc_assert (var
->n_var_parts
== 1);
4768 if (shared_var_p (var
, set
->vars
))
4770 for (loc
= var
->var_part
[0].loc_chain
; loc
; loc
= loc
->next
)
4772 /* We want to remove dying MEMs that doesn't refer to DECL. */
4773 if (GET_CODE (loc
->loc
) == MEM
4774 && (MEM_EXPR (loc
->loc
) != decl
4775 || INT_MEM_OFFSET (loc
->loc
) != 0)
4776 && !mem_dies_at_call (loc
->loc
))
4778 /* We want to move here MEMs that do refer to DECL. */
4779 else if (GET_CODE (loc
->loc
) == VALUE
4780 && find_mem_expr_in_1pdv (decl
, loc
->loc
,
4781 shared_hash_htab (set
->vars
)))
4788 slot
= unshare_variable (set
, slot
, var
, VAR_INIT_STATUS_UNKNOWN
);
4790 gcc_assert (var
->n_var_parts
== 1);
4793 for (locp
= &var
->var_part
[0].loc_chain
, loc
= *locp
;
4796 rtx old_loc
= loc
->loc
;
4797 if (GET_CODE (old_loc
) == VALUE
)
4799 location_chain mem_node
4800 = find_mem_expr_in_1pdv (decl
, loc
->loc
,
4801 shared_hash_htab (set
->vars
));
4803 /* ??? This picks up only one out of multiple MEMs that
4804 refer to the same variable. Do we ever need to be
4805 concerned about dealing with more than one, or, given
4806 that they should all map to the same variable
4807 location, their addresses will have been merged and
4808 they will be regarded as equivalent? */
4811 loc
->loc
= mem_node
->loc
;
4812 loc
->set_src
= mem_node
->set_src
;
4813 loc
->init
= MIN (loc
->init
, mem_node
->init
);
4817 if (GET_CODE (loc
->loc
) != MEM
4818 || (MEM_EXPR (loc
->loc
) == decl
4819 && INT_MEM_OFFSET (loc
->loc
) == 0)
4820 || !mem_dies_at_call (loc
->loc
))
4822 if (old_loc
!= loc
->loc
&& emit_notes
)
4824 if (old_loc
== var
->var_part
[0].cur_loc
)
4827 var
->var_part
[0].cur_loc
= NULL
;
4836 if (old_loc
== var
->var_part
[0].cur_loc
)
4839 var
->var_part
[0].cur_loc
= NULL
;
4846 if (!var
->var_part
[0].loc_chain
)
4852 variable_was_changed (var
, set
);
4858 /* Remove all MEMs from the location list of a hash table entry for a
4862 dataflow_set_remove_mem_locs (variable_def
**slot
, dataflow_set
*set
)
4864 variable var
= *slot
;
4866 if (var
->onepart
== ONEPART_VALUE
)
4868 location_chain loc
, *locp
;
4869 bool changed
= false;
4872 gcc_assert (var
->n_var_parts
== 1);
4874 if (shared_var_p (var
, set
->vars
))
4876 for (loc
= var
->var_part
[0].loc_chain
; loc
; loc
= loc
->next
)
4877 if (GET_CODE (loc
->loc
) == MEM
4878 && mem_dies_at_call (loc
->loc
))
4884 slot
= unshare_variable (set
, slot
, var
, VAR_INIT_STATUS_UNKNOWN
);
4886 gcc_assert (var
->n_var_parts
== 1);
4889 if (VAR_LOC_1PAUX (var
))
4890 cur_loc
= VAR_LOC_FROM (var
);
4892 cur_loc
= var
->var_part
[0].cur_loc
;
4894 for (locp
= &var
->var_part
[0].loc_chain
, loc
= *locp
;
4897 if (GET_CODE (loc
->loc
) != MEM
4898 || !mem_dies_at_call (loc
->loc
))
4905 /* If we have deleted the location which was last emitted
4906 we have to emit new location so add the variable to set
4907 of changed variables. */
4908 if (cur_loc
== loc
->loc
)
4911 var
->var_part
[0].cur_loc
= NULL
;
4912 if (VAR_LOC_1PAUX (var
))
4913 VAR_LOC_FROM (var
) = NULL
;
4918 if (!var
->var_part
[0].loc_chain
)
4924 variable_was_changed (var
, set
);
4930 /* Remove all variable-location information about call-clobbered
4931 registers, as well as associations between MEMs and VALUEs. */
4934 dataflow_set_clear_at_call (dataflow_set
*set
)
4937 hard_reg_set_iterator hrsi
;
4939 EXECUTE_IF_SET_IN_HARD_REG_SET (regs_invalidated_by_call
, 0, r
, hrsi
)
4940 var_regno_delete (set
, r
);
4942 if (MAY_HAVE_DEBUG_INSNS
)
4944 set
->traversed_vars
= set
->vars
;
4945 shared_hash_htab (set
->vars
)
4946 ->traverse
<dataflow_set
*, dataflow_set_preserve_mem_locs
> (set
);
4947 set
->traversed_vars
= set
->vars
;
4948 shared_hash_htab (set
->vars
)
4949 ->traverse
<dataflow_set
*, dataflow_set_remove_mem_locs
> (set
);
4950 set
->traversed_vars
= NULL
;
4955 variable_part_different_p (variable_part
*vp1
, variable_part
*vp2
)
4957 location_chain lc1
, lc2
;
4959 for (lc1
= vp1
->loc_chain
; lc1
; lc1
= lc1
->next
)
4961 for (lc2
= vp2
->loc_chain
; lc2
; lc2
= lc2
->next
)
4963 if (REG_P (lc1
->loc
) && REG_P (lc2
->loc
))
4965 if (REGNO (lc1
->loc
) == REGNO (lc2
->loc
))
4968 if (rtx_equal_p (lc1
->loc
, lc2
->loc
))
4977 /* Return true if one-part variables VAR1 and VAR2 are different.
4978 They must be in canonical order. */
4981 onepart_variable_different_p (variable var1
, variable var2
)
4983 location_chain lc1
, lc2
;
4988 gcc_assert (var1
->n_var_parts
== 1
4989 && var2
->n_var_parts
== 1);
4991 lc1
= var1
->var_part
[0].loc_chain
;
4992 lc2
= var2
->var_part
[0].loc_chain
;
4994 gcc_assert (lc1
&& lc2
);
4998 if (loc_cmp (lc1
->loc
, lc2
->loc
))
5007 /* Return true if variables VAR1 and VAR2 are different. */
5010 variable_different_p (variable var1
, variable var2
)
5017 if (var1
->onepart
!= var2
->onepart
)
5020 if (var1
->n_var_parts
!= var2
->n_var_parts
)
5023 if (var1
->onepart
&& var1
->n_var_parts
)
5025 gcc_checking_assert (dv_as_opaque (var1
->dv
) == dv_as_opaque (var2
->dv
)
5026 && var1
->n_var_parts
== 1);
5027 /* One-part values have locations in a canonical order. */
5028 return onepart_variable_different_p (var1
, var2
);
5031 for (i
= 0; i
< var1
->n_var_parts
; i
++)
5033 if (VAR_PART_OFFSET (var1
, i
) != VAR_PART_OFFSET (var2
, i
))
5035 if (variable_part_different_p (&var1
->var_part
[i
], &var2
->var_part
[i
]))
5037 if (variable_part_different_p (&var2
->var_part
[i
], &var1
->var_part
[i
]))
5043 /* Return true if dataflow sets OLD_SET and NEW_SET differ. */
5046 dataflow_set_different (dataflow_set
*old_set
, dataflow_set
*new_set
)
5048 variable_iterator_type hi
;
5051 if (old_set
->vars
== new_set
->vars
)
5054 if (shared_hash_htab (old_set
->vars
)->elements ()
5055 != shared_hash_htab (new_set
->vars
)->elements ())
5058 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (old_set
->vars
),
5061 variable_table_type
*htab
= shared_hash_htab (new_set
->vars
);
5062 variable var2
= htab
->find_with_hash (var1
->dv
, dv_htab_hash (var1
->dv
));
5065 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5067 fprintf (dump_file
, "dataflow difference found: removal of:\n");
5073 if (variable_different_p (var1
, var2
))
5075 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5077 fprintf (dump_file
, "dataflow difference found: "
5078 "old and new follow:\n");
5086 /* No need to traverse the second hashtab, if both have the same number
5087 of elements and the second one had all entries found in the first one,
5088 then it can't have any extra entries. */
5092 /* Free the contents of dataflow set SET. */
5095 dataflow_set_destroy (dataflow_set
*set
)
5099 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
5100 attrs_list_clear (&set
->regs
[i
]);
5102 shared_hash_destroy (set
->vars
);
5106 /* Return true if RTL X contains a SYMBOL_REF. */
5109 contains_symbol_ref (rtx x
)
5118 code
= GET_CODE (x
);
5119 if (code
== SYMBOL_REF
)
5122 fmt
= GET_RTX_FORMAT (code
);
5123 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
5127 if (contains_symbol_ref (XEXP (x
, i
)))
5130 else if (fmt
[i
] == 'E')
5133 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
5134 if (contains_symbol_ref (XVECEXP (x
, i
, j
)))
5142 /* Shall EXPR be tracked? */
5145 track_expr_p (tree expr
, bool need_rtl
)
5150 if (TREE_CODE (expr
) == DEBUG_EXPR_DECL
)
5151 return DECL_RTL_SET_P (expr
);
5153 /* If EXPR is not a parameter or a variable do not track it. */
5154 if (TREE_CODE (expr
) != VAR_DECL
&& TREE_CODE (expr
) != PARM_DECL
)
5157 /* It also must have a name... */
5158 if (!DECL_NAME (expr
) && need_rtl
)
5161 /* ... and a RTL assigned to it. */
5162 decl_rtl
= DECL_RTL_IF_SET (expr
);
5163 if (!decl_rtl
&& need_rtl
)
5166 /* If this expression is really a debug alias of some other declaration, we
5167 don't need to track this expression if the ultimate declaration is
5170 if (TREE_CODE (realdecl
) == VAR_DECL
&& DECL_HAS_DEBUG_EXPR_P (realdecl
))
5172 realdecl
= DECL_DEBUG_EXPR (realdecl
);
5173 if (!DECL_P (realdecl
))
5175 if (handled_component_p (realdecl
)
5176 || (TREE_CODE (realdecl
) == MEM_REF
5177 && TREE_CODE (TREE_OPERAND (realdecl
, 0)) == ADDR_EXPR
))
5179 HOST_WIDE_INT bitsize
, bitpos
, maxsize
;
5181 = get_ref_base_and_extent (realdecl
, &bitpos
, &bitsize
,
5183 if (!DECL_P (innerdecl
)
5184 || DECL_IGNORED_P (innerdecl
)
5185 /* Do not track declarations for parts of tracked parameters
5186 since we want to track them as a whole instead. */
5187 || (TREE_CODE (innerdecl
) == PARM_DECL
5188 && DECL_MODE (innerdecl
) != BLKmode
5189 && TREE_CODE (TREE_TYPE (innerdecl
)) != UNION_TYPE
)
5190 || TREE_STATIC (innerdecl
)
5192 || bitpos
+ bitsize
> 256
5193 || bitsize
!= maxsize
)
5203 /* Do not track EXPR if REALDECL it should be ignored for debugging
5205 if (DECL_IGNORED_P (realdecl
))
5208 /* Do not track global variables until we are able to emit correct location
5210 if (TREE_STATIC (realdecl
))
5213 /* When the EXPR is a DECL for alias of some variable (see example)
5214 the TREE_STATIC flag is not used. Disable tracking all DECLs whose
5215 DECL_RTL contains SYMBOL_REF.
5218 extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
5221 if (decl_rtl
&& MEM_P (decl_rtl
)
5222 && contains_symbol_ref (XEXP (decl_rtl
, 0)))
5225 /* If RTX is a memory it should not be very large (because it would be
5226 an array or struct). */
5227 if (decl_rtl
&& MEM_P (decl_rtl
))
5229 /* Do not track structures and arrays. */
5230 if (GET_MODE (decl_rtl
) == BLKmode
5231 || AGGREGATE_TYPE_P (TREE_TYPE (realdecl
)))
5233 if (MEM_SIZE_KNOWN_P (decl_rtl
)
5234 && MEM_SIZE (decl_rtl
) > MAX_VAR_PARTS
)
5238 DECL_CHANGED (expr
) = 0;
5239 DECL_CHANGED (realdecl
) = 0;
5243 /* Determine whether a given LOC refers to the same variable part as
5247 same_variable_part_p (rtx loc
, tree expr
, HOST_WIDE_INT offset
)
5250 HOST_WIDE_INT offset2
;
5252 if (! DECL_P (expr
))
5257 expr2
= REG_EXPR (loc
);
5258 offset2
= REG_OFFSET (loc
);
5260 else if (MEM_P (loc
))
5262 expr2
= MEM_EXPR (loc
);
5263 offset2
= INT_MEM_OFFSET (loc
);
5268 if (! expr2
|| ! DECL_P (expr2
))
5271 expr
= var_debug_decl (expr
);
5272 expr2
= var_debug_decl (expr2
);
5274 return (expr
== expr2
&& offset
== offset2
);
5277 /* LOC is a REG or MEM that we would like to track if possible.
5278 If EXPR is null, we don't know what expression LOC refers to,
5279 otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
5280 LOC is an lvalue register.
5282 Return true if EXPR is nonnull and if LOC, or some lowpart of it,
5283 is something we can track. When returning true, store the mode of
5284 the lowpart we can track in *MODE_OUT (if nonnull) and its offset
5285 from EXPR in *OFFSET_OUT (if nonnull). */
5288 track_loc_p (rtx loc
, tree expr
, HOST_WIDE_INT offset
, bool store_reg_p
,
5289 machine_mode
*mode_out
, HOST_WIDE_INT
*offset_out
)
5293 if (expr
== NULL
|| !track_expr_p (expr
, true))
5296 /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
5297 whole subreg, but only the old inner part is really relevant. */
5298 mode
= GET_MODE (loc
);
5299 if (REG_P (loc
) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc
)))
5301 machine_mode pseudo_mode
;
5303 pseudo_mode
= PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc
));
5304 if (GET_MODE_SIZE (mode
) > GET_MODE_SIZE (pseudo_mode
))
5306 offset
+= byte_lowpart_offset (pseudo_mode
, mode
);
5311 /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
5312 Do the same if we are storing to a register and EXPR occupies
5313 the whole of register LOC; in that case, the whole of EXPR is
5314 being changed. We exclude complex modes from the second case
5315 because the real and imaginary parts are represented as separate
5316 pseudo registers, even if the whole complex value fits into one
5318 if ((GET_MODE_SIZE (mode
) > GET_MODE_SIZE (DECL_MODE (expr
))
5320 && !COMPLEX_MODE_P (DECL_MODE (expr
))
5321 && hard_regno_nregs
[REGNO (loc
)][DECL_MODE (expr
)] == 1))
5322 && offset
+ byte_lowpart_offset (DECL_MODE (expr
), mode
) == 0)
5324 mode
= DECL_MODE (expr
);
5328 if (offset
< 0 || offset
>= MAX_VAR_PARTS
)
5334 *offset_out
= offset
;
5338 /* Return the MODE lowpart of LOC, or null if LOC is not something we
5339 want to track. When returning nonnull, make sure that the attributes
5340 on the returned value are updated. */
5343 var_lowpart (machine_mode mode
, rtx loc
)
5345 unsigned int offset
, reg_offset
, regno
;
5347 if (GET_MODE (loc
) == mode
)
5350 if (!REG_P (loc
) && !MEM_P (loc
))
5353 offset
= byte_lowpart_offset (mode
, GET_MODE (loc
));
5356 return adjust_address_nv (loc
, mode
, offset
);
5358 reg_offset
= subreg_lowpart_offset (mode
, GET_MODE (loc
));
5359 regno
= REGNO (loc
) + subreg_regno_offset (REGNO (loc
), GET_MODE (loc
),
5361 return gen_rtx_REG_offset (loc
, mode
, regno
, offset
);
5364 /* Carry information about uses and stores while walking rtx. */
5366 struct count_use_info
5368 /* The insn where the RTX is. */
5371 /* The basic block where insn is. */
5374 /* The array of n_sets sets in the insn, as determined by cselib. */
5375 struct cselib_set
*sets
;
5378 /* True if we're counting stores, false otherwise. */
5382 /* Find a VALUE corresponding to X. */
5384 static inline cselib_val
*
5385 find_use_val (rtx x
, machine_mode mode
, struct count_use_info
*cui
)
5391 /* This is called after uses are set up and before stores are
5392 processed by cselib, so it's safe to look up srcs, but not
5393 dsts. So we look up expressions that appear in srcs or in
5394 dest expressions, but we search the sets array for dests of
5398 /* Some targets represent memset and memcpy patterns
5399 by (set (mem:BLK ...) (reg:[QHSD]I ...)) or
5400 (set (mem:BLK ...) (const_int ...)) or
5401 (set (mem:BLK ...) (mem:BLK ...)). Don't return anything
5402 in that case, otherwise we end up with mode mismatches. */
5403 if (mode
== BLKmode
&& MEM_P (x
))
5405 for (i
= 0; i
< cui
->n_sets
; i
++)
5406 if (cui
->sets
[i
].dest
== x
)
5407 return cui
->sets
[i
].src_elt
;
5410 return cselib_lookup (x
, mode
, 0, VOIDmode
);
5416 /* Replace all registers and addresses in an expression with VALUE
5417 expressions that map back to them, unless the expression is a
5418 register. If no mapping is or can be performed, returns NULL. */
5421 replace_expr_with_values (rtx loc
)
5423 if (REG_P (loc
) || GET_CODE (loc
) == ENTRY_VALUE
)
5425 else if (MEM_P (loc
))
5427 cselib_val
*addr
= cselib_lookup (XEXP (loc
, 0),
5428 get_address_mode (loc
), 0,
5431 return replace_equiv_address_nv (loc
, addr
->val_rtx
);
5436 return cselib_subst_to_values (loc
, VOIDmode
);
5439 /* Return true if X contains a DEBUG_EXPR. */
5442 rtx_debug_expr_p (const_rtx x
)
5444 subrtx_iterator::array_type array
;
5445 FOR_EACH_SUBRTX (iter
, array
, x
, ALL
)
5446 if (GET_CODE (*iter
) == DEBUG_EXPR
)
5451 /* Determine what kind of micro operation to choose for a USE. Return
5452 MO_CLOBBER if no micro operation is to be generated. */
5454 static enum micro_operation_type
5455 use_type (rtx loc
, struct count_use_info
*cui
, machine_mode
*modep
)
5459 if (cui
&& cui
->sets
)
5461 if (GET_CODE (loc
) == VAR_LOCATION
)
5463 if (track_expr_p (PAT_VAR_LOCATION_DECL (loc
), false))
5465 rtx ploc
= PAT_VAR_LOCATION_LOC (loc
);
5466 if (! VAR_LOC_UNKNOWN_P (ploc
))
5468 cselib_val
*val
= cselib_lookup (ploc
, GET_MODE (loc
), 1,
5471 /* ??? flag_float_store and volatile mems are never
5472 given values, but we could in theory use them for
5474 gcc_assert (val
|| 1);
5482 if (REG_P (loc
) || MEM_P (loc
))
5485 *modep
= GET_MODE (loc
);
5489 || (find_use_val (loc
, GET_MODE (loc
), cui
)
5490 && cselib_lookup (XEXP (loc
, 0),
5491 get_address_mode (loc
), 0,
5497 cselib_val
*val
= find_use_val (loc
, GET_MODE (loc
), cui
);
5499 if (val
&& !cselib_preserved_value_p (val
))
5507 gcc_assert (REGNO (loc
) < FIRST_PSEUDO_REGISTER
);
5509 if (loc
== cfa_base_rtx
)
5511 expr
= REG_EXPR (loc
);
5514 return MO_USE_NO_VAR
;
5515 else if (target_for_debug_bind (var_debug_decl (expr
)))
5517 else if (track_loc_p (loc
, expr
, REG_OFFSET (loc
),
5518 false, modep
, NULL
))
5521 return MO_USE_NO_VAR
;
5523 else if (MEM_P (loc
))
5525 expr
= MEM_EXPR (loc
);
5529 else if (target_for_debug_bind (var_debug_decl (expr
)))
5531 else if (track_loc_p (loc
, expr
, INT_MEM_OFFSET (loc
),
5533 /* Multi-part variables shouldn't refer to one-part
5534 variable names such as VALUEs (never happens) or
5535 DEBUG_EXPRs (only happens in the presence of debug
5537 && (!MAY_HAVE_DEBUG_INSNS
5538 || !rtx_debug_expr_p (XEXP (loc
, 0))))
5547 /* Log to OUT information about micro-operation MOPT involving X in
5551 log_op_type (rtx x
, basic_block bb
, rtx_insn
*insn
,
5552 enum micro_operation_type mopt
, FILE *out
)
5554 fprintf (out
, "bb %i op %i insn %i %s ",
5555 bb
->index
, VTI (bb
)->mos
.length (),
5556 INSN_UID (insn
), micro_operation_type_name
[mopt
]);
5557 print_inline_rtx (out
, x
, 2);
5561 /* Tell whether the CONCAT used to holds a VALUE and its location
5562 needs value resolution, i.e., an attempt of mapping the location
5563 back to other incoming values. */
5564 #define VAL_NEEDS_RESOLUTION(x) \
5565 (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
5566 /* Whether the location in the CONCAT is a tracked expression, that
5567 should also be handled like a MO_USE. */
5568 #define VAL_HOLDS_TRACK_EXPR(x) \
5569 (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
5570 /* Whether the location in the CONCAT should be handled like a MO_COPY
5572 #define VAL_EXPR_IS_COPIED(x) \
5573 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
5574 /* Whether the location in the CONCAT should be handled like a
5575 MO_CLOBBER as well. */
5576 #define VAL_EXPR_IS_CLOBBERED(x) \
5577 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
5579 /* All preserved VALUEs. */
5580 static vec
<rtx
> preserved_values
;
5582 /* Ensure VAL is preserved and remember it in a vector for vt_emit_notes. */
5585 preserve_value (cselib_val
*val
)
5587 cselib_preserve_value (val
);
5588 preserved_values
.safe_push (val
->val_rtx
);
5591 /* Helper function for MO_VAL_LOC handling. Return non-zero if
5592 any rtxes not suitable for CONST use not replaced by VALUEs
5596 non_suitable_const (const_rtx x
)
5598 subrtx_iterator::array_type array
;
5599 FOR_EACH_SUBRTX (iter
, array
, x
, ALL
)
5601 const_rtx x
= *iter
;
5602 switch (GET_CODE (x
))
5613 if (!MEM_READONLY_P (x
))
5623 /* Add uses (register and memory references) LOC which will be tracked
5624 to VTI (bb)->mos. */
5627 add_uses (rtx loc
, struct count_use_info
*cui
)
5629 machine_mode mode
= VOIDmode
;
5630 enum micro_operation_type type
= use_type (loc
, cui
, &mode
);
5632 if (type
!= MO_CLOBBER
)
5634 basic_block bb
= cui
->bb
;
5638 mo
.u
.loc
= type
== MO_USE
? var_lowpart (mode
, loc
) : loc
;
5639 mo
.insn
= cui
->insn
;
5641 if (type
== MO_VAL_LOC
)
5644 rtx vloc
= PAT_VAR_LOCATION_LOC (oloc
);
5647 gcc_assert (cui
->sets
);
5650 && !REG_P (XEXP (vloc
, 0))
5651 && !MEM_P (XEXP (vloc
, 0)))
5654 machine_mode address_mode
= get_address_mode (mloc
);
5656 = cselib_lookup (XEXP (mloc
, 0), address_mode
, 0,
5659 if (val
&& !cselib_preserved_value_p (val
))
5660 preserve_value (val
);
5663 if (CONSTANT_P (vloc
)
5664 && (GET_CODE (vloc
) != CONST
|| non_suitable_const (vloc
)))
5665 /* For constants don't look up any value. */;
5666 else if (!VAR_LOC_UNKNOWN_P (vloc
) && !unsuitable_loc (vloc
)
5667 && (val
= find_use_val (vloc
, GET_MODE (oloc
), cui
)))
5670 enum micro_operation_type type2
;
5672 bool resolvable
= REG_P (vloc
) || MEM_P (vloc
);
5675 nloc
= replace_expr_with_values (vloc
);
5679 oloc
= shallow_copy_rtx (oloc
);
5680 PAT_VAR_LOCATION_LOC (oloc
) = nloc
;
5683 oloc
= gen_rtx_CONCAT (mode
, val
->val_rtx
, oloc
);
5685 type2
= use_type (vloc
, 0, &mode2
);
5687 gcc_assert (type2
== MO_USE
|| type2
== MO_USE_NO_VAR
5688 || type2
== MO_CLOBBER
);
5690 if (type2
== MO_CLOBBER
5691 && !cselib_preserved_value_p (val
))
5693 VAL_NEEDS_RESOLUTION (oloc
) = resolvable
;
5694 preserve_value (val
);
5697 else if (!VAR_LOC_UNKNOWN_P (vloc
))
5699 oloc
= shallow_copy_rtx (oloc
);
5700 PAT_VAR_LOCATION_LOC (oloc
) = gen_rtx_UNKNOWN_VAR_LOC ();
5705 else if (type
== MO_VAL_USE
)
5707 machine_mode mode2
= VOIDmode
;
5708 enum micro_operation_type type2
;
5709 cselib_val
*val
= find_use_val (loc
, GET_MODE (loc
), cui
);
5710 rtx vloc
, oloc
= loc
, nloc
;
5712 gcc_assert (cui
->sets
);
5715 && !REG_P (XEXP (oloc
, 0))
5716 && !MEM_P (XEXP (oloc
, 0)))
5719 machine_mode address_mode
= get_address_mode (mloc
);
5721 = cselib_lookup (XEXP (mloc
, 0), address_mode
, 0,
5724 if (val
&& !cselib_preserved_value_p (val
))
5725 preserve_value (val
);
5728 type2
= use_type (loc
, 0, &mode2
);
5730 gcc_assert (type2
== MO_USE
|| type2
== MO_USE_NO_VAR
5731 || type2
== MO_CLOBBER
);
5733 if (type2
== MO_USE
)
5734 vloc
= var_lowpart (mode2
, loc
);
5738 /* The loc of a MO_VAL_USE may have two forms:
5740 (concat val src): val is at src, a value-based
5743 (concat (concat val use) src): same as above, with use as
5744 the MO_USE tracked value, if it differs from src.
5748 gcc_checking_assert (REG_P (loc
) || MEM_P (loc
));
5749 nloc
= replace_expr_with_values (loc
);
5754 oloc
= gen_rtx_CONCAT (mode2
, val
->val_rtx
, vloc
);
5756 oloc
= val
->val_rtx
;
5758 mo
.u
.loc
= gen_rtx_CONCAT (mode
, oloc
, nloc
);
5760 if (type2
== MO_USE
)
5761 VAL_HOLDS_TRACK_EXPR (mo
.u
.loc
) = 1;
5762 if (!cselib_preserved_value_p (val
))
5764 VAL_NEEDS_RESOLUTION (mo
.u
.loc
) = 1;
5765 preserve_value (val
);
5769 gcc_assert (type
== MO_USE
|| type
== MO_USE_NO_VAR
);
5771 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5772 log_op_type (mo
.u
.loc
, cui
->bb
, cui
->insn
, mo
.type
, dump_file
);
5773 VTI (bb
)->mos
.safe_push (mo
);
5777 /* Helper function for finding all uses of REG/MEM in X in insn INSN. */
5780 add_uses_1 (rtx
*x
, void *cui
)
5782 subrtx_var_iterator::array_type array
;
5783 FOR_EACH_SUBRTX_VAR (iter
, array
, *x
, NONCONST
)
5784 add_uses (*iter
, (struct count_use_info
*) cui
);
5787 /* This is the value used during expansion of locations. We want it
5788 to be unbounded, so that variables expanded deep in a recursion
5789 nest are fully evaluated, so that their values are cached
5790 correctly. We avoid recursion cycles through other means, and we
5791 don't unshare RTL, so excess complexity is not a problem. */
5792 #define EXPR_DEPTH (INT_MAX)
5793 /* We use this to keep too-complex expressions from being emitted as
5794 location notes, and then to debug information. Users can trade
5795 compile time for ridiculously complex expressions, although they're
5796 seldom useful, and they may often have to be discarded as not
5797 representable anyway. */
5798 #define EXPR_USE_DEPTH (PARAM_VALUE (PARAM_MAX_VARTRACK_EXPR_DEPTH))
5800 /* Attempt to reverse the EXPR operation in the debug info and record
5801 it in the cselib table. Say for reg1 = reg2 + 6 even when reg2 is
5802 no longer live we can express its value as VAL - 6. */
5805 reverse_op (rtx val
, const_rtx expr
, rtx_insn
*insn
)
5809 struct elt_loc_list
*l
;
5813 if (GET_CODE (expr
) != SET
)
5816 if (!REG_P (SET_DEST (expr
)) || GET_MODE (val
) != GET_MODE (SET_DEST (expr
)))
5819 src
= SET_SRC (expr
);
5820 switch (GET_CODE (src
))
5827 if (!REG_P (XEXP (src
, 0)))
5832 if (!REG_P (XEXP (src
, 0)) && !MEM_P (XEXP (src
, 0)))
5839 if (!SCALAR_INT_MODE_P (GET_MODE (src
)) || XEXP (src
, 0) == cfa_base_rtx
)
5842 v
= cselib_lookup (XEXP (src
, 0), GET_MODE (XEXP (src
, 0)), 0, VOIDmode
);
5843 if (!v
|| !cselib_preserved_value_p (v
))
5846 /* Use canonical V to avoid creating multiple redundant expressions
5847 for different VALUES equivalent to V. */
5848 v
= canonical_cselib_val (v
);
5850 /* Adding a reverse op isn't useful if V already has an always valid
5851 location. Ignore ENTRY_VALUE, while it is always constant, we should
5852 prefer non-ENTRY_VALUE locations whenever possible. */
5853 for (l
= v
->locs
, count
= 0; l
; l
= l
->next
, count
++)
5854 if (CONSTANT_P (l
->loc
)
5855 && (GET_CODE (l
->loc
) != CONST
|| !references_value_p (l
->loc
, 0)))
5857 /* Avoid creating too large locs lists. */
5858 else if (count
== PARAM_VALUE (PARAM_MAX_VARTRACK_REVERSE_OP_SIZE
))
5861 switch (GET_CODE (src
))
5865 if (GET_MODE (v
->val_rtx
) != GET_MODE (val
))
5867 ret
= gen_rtx_fmt_e (GET_CODE (src
), GET_MODE (val
), val
);
5871 ret
= gen_lowpart_SUBREG (GET_MODE (v
->val_rtx
), val
);
5883 if (GET_MODE (v
->val_rtx
) != GET_MODE (val
))
5885 arg
= XEXP (src
, 1);
5886 if (!CONST_INT_P (arg
) && GET_CODE (arg
) != SYMBOL_REF
)
5888 arg
= cselib_expand_value_rtx (arg
, scratch_regs
, 5);
5889 if (arg
== NULL_RTX
)
5891 if (!CONST_INT_P (arg
) && GET_CODE (arg
) != SYMBOL_REF
)
5894 ret
= simplify_gen_binary (code
, GET_MODE (val
), val
, arg
);
5896 /* Ensure ret isn't VALUE itself (which can happen e.g. for
5897 (plus (reg1) (reg2)) when reg2 is known to be 0), as that
5898 breaks a lot of routines during var-tracking. */
5899 ret
= gen_rtx_fmt_ee (PLUS
, GET_MODE (val
), val
, const0_rtx
);
5905 cselib_add_permanent_equiv (v
, ret
, insn
);
5908 /* Add stores (register and memory references) LOC which will be tracked
5909 to VTI (bb)->mos. EXPR is the RTL expression containing the store.
5910 CUIP->insn is instruction which the LOC is part of. */
5913 add_stores (rtx loc
, const_rtx expr
, void *cuip
)
5915 machine_mode mode
= VOIDmode
, mode2
;
5916 struct count_use_info
*cui
= (struct count_use_info
*)cuip
;
5917 basic_block bb
= cui
->bb
;
5919 rtx oloc
= loc
, nloc
, src
= NULL
;
5920 enum micro_operation_type type
= use_type (loc
, cui
, &mode
);
5921 bool track_p
= false;
5923 bool resolve
, preserve
;
5925 if (type
== MO_CLOBBER
)
5932 gcc_assert (loc
!= cfa_base_rtx
);
5933 if ((GET_CODE (expr
) == CLOBBER
&& type
!= MO_VAL_SET
)
5934 || !(track_p
= use_type (loc
, NULL
, &mode2
) == MO_USE
)
5935 || GET_CODE (expr
) == CLOBBER
)
5937 mo
.type
= MO_CLOBBER
;
5939 if (GET_CODE (expr
) == SET
5940 && SET_DEST (expr
) == loc
5941 && !unsuitable_loc (SET_SRC (expr
))
5942 && find_use_val (loc
, mode
, cui
))
5944 gcc_checking_assert (type
== MO_VAL_SET
);
5945 mo
.u
.loc
= gen_rtx_SET (loc
, SET_SRC (expr
));
5950 if (GET_CODE (expr
) == SET
5951 && SET_DEST (expr
) == loc
5952 && GET_CODE (SET_SRC (expr
)) != ASM_OPERANDS
)
5953 src
= var_lowpart (mode2
, SET_SRC (expr
));
5954 loc
= var_lowpart (mode2
, loc
);
5963 rtx xexpr
= gen_rtx_SET (loc
, src
);
5964 if (same_variable_part_p (src
, REG_EXPR (loc
), REG_OFFSET (loc
)))
5966 /* If this is an instruction copying (part of) a parameter
5967 passed by invisible reference to its register location,
5968 pretend it's a SET so that the initial memory location
5969 is discarded, as the parameter register can be reused
5970 for other purposes and we do not track locations based
5971 on generic registers. */
5974 && TREE_CODE (REG_EXPR (loc
)) == PARM_DECL
5975 && DECL_MODE (REG_EXPR (loc
)) != BLKmode
5976 && MEM_P (DECL_INCOMING_RTL (REG_EXPR (loc
)))
5977 && XEXP (DECL_INCOMING_RTL (REG_EXPR (loc
)), 0)
5988 mo
.insn
= cui
->insn
;
5990 else if (MEM_P (loc
)
5991 && ((track_p
= use_type (loc
, NULL
, &mode2
) == MO_USE
)
5994 if (MEM_P (loc
) && type
== MO_VAL_SET
5995 && !REG_P (XEXP (loc
, 0))
5996 && !MEM_P (XEXP (loc
, 0)))
5999 machine_mode address_mode
= get_address_mode (mloc
);
6000 cselib_val
*val
= cselib_lookup (XEXP (mloc
, 0),
6004 if (val
&& !cselib_preserved_value_p (val
))
6005 preserve_value (val
);
6008 if (GET_CODE (expr
) == CLOBBER
|| !track_p
)
6010 mo
.type
= MO_CLOBBER
;
6011 mo
.u
.loc
= track_p
? var_lowpart (mode2
, loc
) : loc
;
6015 if (GET_CODE (expr
) == SET
6016 && SET_DEST (expr
) == loc
6017 && GET_CODE (SET_SRC (expr
)) != ASM_OPERANDS
)
6018 src
= var_lowpart (mode2
, SET_SRC (expr
));
6019 loc
= var_lowpart (mode2
, loc
);
6028 rtx xexpr
= gen_rtx_SET (loc
, src
);
6029 if (same_variable_part_p (SET_SRC (xexpr
),
6031 INT_MEM_OFFSET (loc
)))
6038 mo
.insn
= cui
->insn
;
6043 if (type
!= MO_VAL_SET
)
6044 goto log_and_return
;
6046 v
= find_use_val (oloc
, mode
, cui
);
6049 goto log_and_return
;
6051 resolve
= preserve
= !cselib_preserved_value_p (v
);
6053 /* We cannot track values for multiple-part variables, so we track only
6054 locations for tracked parameters passed either by invisible reference
6055 or directly in multiple locations. */
6059 && TREE_CODE (REG_EXPR (loc
)) == PARM_DECL
6060 && DECL_MODE (REG_EXPR (loc
)) != BLKmode
6061 && TREE_CODE (TREE_TYPE (REG_EXPR (loc
))) != UNION_TYPE
6062 && ((MEM_P (DECL_INCOMING_RTL (REG_EXPR (loc
)))
6063 && XEXP (DECL_INCOMING_RTL (REG_EXPR (loc
)), 0) != arg_pointer_rtx
)
6064 || (GET_CODE (DECL_INCOMING_RTL (REG_EXPR (loc
))) == PARALLEL
6065 && XVECLEN (DECL_INCOMING_RTL (REG_EXPR (loc
)), 0) > 1)))
6067 /* Although we don't use the value here, it could be used later by the
6068 mere virtue of its existence as the operand of the reverse operation
6069 that gave rise to it (typically extension/truncation). Make sure it
6070 is preserved as required by vt_expand_var_loc_chain. */
6073 goto log_and_return
;
6076 if (loc
== stack_pointer_rtx
6077 && hard_frame_pointer_adjustment
!= -1
6079 cselib_set_value_sp_based (v
);
6081 nloc
= replace_expr_with_values (oloc
);
6085 if (GET_CODE (PATTERN (cui
->insn
)) == COND_EXEC
)
6087 cselib_val
*oval
= cselib_lookup (oloc
, GET_MODE (oloc
), 0, VOIDmode
);
6091 gcc_assert (REG_P (oloc
) || MEM_P (oloc
));
6093 if (oval
&& !cselib_preserved_value_p (oval
))
6095 micro_operation moa
;
6097 preserve_value (oval
);
6099 moa
.type
= MO_VAL_USE
;
6100 moa
.u
.loc
= gen_rtx_CONCAT (mode
, oval
->val_rtx
, oloc
);
6101 VAL_NEEDS_RESOLUTION (moa
.u
.loc
) = 1;
6102 moa
.insn
= cui
->insn
;
6104 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6105 log_op_type (moa
.u
.loc
, cui
->bb
, cui
->insn
,
6106 moa
.type
, dump_file
);
6107 VTI (bb
)->mos
.safe_push (moa
);
6112 else if (resolve
&& GET_CODE (mo
.u
.loc
) == SET
)
6114 if (REG_P (SET_SRC (expr
)) || MEM_P (SET_SRC (expr
)))
6115 nloc
= replace_expr_with_values (SET_SRC (expr
));
6119 /* Avoid the mode mismatch between oexpr and expr. */
6120 if (!nloc
&& mode
!= mode2
)
6122 nloc
= SET_SRC (expr
);
6123 gcc_assert (oloc
== SET_DEST (expr
));
6126 if (nloc
&& nloc
!= SET_SRC (mo
.u
.loc
))
6127 oloc
= gen_rtx_SET (oloc
, nloc
);
6130 if (oloc
== SET_DEST (mo
.u
.loc
))
6131 /* No point in duplicating. */
6133 if (!REG_P (SET_SRC (mo
.u
.loc
)))
6139 if (GET_CODE (mo
.u
.loc
) == SET
6140 && oloc
== SET_DEST (mo
.u
.loc
))
6141 /* No point in duplicating. */
6147 loc
= gen_rtx_CONCAT (mode
, v
->val_rtx
, oloc
);
6149 if (mo
.u
.loc
!= oloc
)
6150 loc
= gen_rtx_CONCAT (GET_MODE (mo
.u
.loc
), loc
, mo
.u
.loc
);
6152 /* The loc of a MO_VAL_SET may have various forms:
6154 (concat val dst): dst now holds val
6156 (concat val (set dst src)): dst now holds val, copied from src
6158 (concat (concat val dstv) dst): dst now holds val; dstv is dst
6159 after replacing mems and non-top-level regs with values.
6161 (concat (concat val dstv) (set dst src)): dst now holds val,
6162 copied from src. dstv is a value-based representation of dst, if
6163 it differs from dst. If resolution is needed, src is a REG, and
6164 its mode is the same as that of val.
6166 (concat (concat val (set dstv srcv)) (set dst src)): src
6167 copied to dst, holding val. dstv and srcv are value-based
6168 representations of dst and src, respectively.
6172 if (GET_CODE (PATTERN (cui
->insn
)) != COND_EXEC
)
6173 reverse_op (v
->val_rtx
, expr
, cui
->insn
);
6178 VAL_HOLDS_TRACK_EXPR (loc
) = 1;
6181 VAL_NEEDS_RESOLUTION (loc
) = resolve
;
6184 if (mo
.type
== MO_CLOBBER
)
6185 VAL_EXPR_IS_CLOBBERED (loc
) = 1;
6186 if (mo
.type
== MO_COPY
)
6187 VAL_EXPR_IS_COPIED (loc
) = 1;
6189 mo
.type
= MO_VAL_SET
;
6192 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6193 log_op_type (mo
.u
.loc
, cui
->bb
, cui
->insn
, mo
.type
, dump_file
);
6194 VTI (bb
)->mos
.safe_push (mo
);
6197 /* Arguments to the call. */
6198 static rtx call_arguments
;
6200 /* Compute call_arguments. */
6203 prepare_call_arguments (basic_block bb
, rtx_insn
*insn
)
6206 rtx prev
, cur
, next
;
6207 rtx this_arg
= NULL_RTX
;
6208 tree type
= NULL_TREE
, t
, fndecl
= NULL_TREE
;
6209 tree obj_type_ref
= NULL_TREE
;
6210 CUMULATIVE_ARGS args_so_far_v
;
6211 cumulative_args_t args_so_far
;
6213 memset (&args_so_far_v
, 0, sizeof (args_so_far_v
));
6214 args_so_far
= pack_cumulative_args (&args_so_far_v
);
6215 call
= get_call_rtx_from (insn
);
6218 if (GET_CODE (XEXP (XEXP (call
, 0), 0)) == SYMBOL_REF
)
6220 rtx symbol
= XEXP (XEXP (call
, 0), 0);
6221 if (SYMBOL_REF_DECL (symbol
))
6222 fndecl
= SYMBOL_REF_DECL (symbol
);
6224 if (fndecl
== NULL_TREE
)
6225 fndecl
= MEM_EXPR (XEXP (call
, 0));
6227 && TREE_CODE (TREE_TYPE (fndecl
)) != FUNCTION_TYPE
6228 && TREE_CODE (TREE_TYPE (fndecl
)) != METHOD_TYPE
)
6230 if (fndecl
&& TYPE_ARG_TYPES (TREE_TYPE (fndecl
)))
6231 type
= TREE_TYPE (fndecl
);
6232 if (fndecl
&& TREE_CODE (fndecl
) != FUNCTION_DECL
)
6234 if (TREE_CODE (fndecl
) == INDIRECT_REF
6235 && TREE_CODE (TREE_OPERAND (fndecl
, 0)) == OBJ_TYPE_REF
)
6236 obj_type_ref
= TREE_OPERAND (fndecl
, 0);
6241 for (t
= TYPE_ARG_TYPES (type
); t
&& t
!= void_list_node
;
6243 if (TREE_CODE (TREE_VALUE (t
)) == REFERENCE_TYPE
6244 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_VALUE (t
))))
6246 if ((t
== NULL
|| t
== void_list_node
) && obj_type_ref
== NULL_TREE
)
6250 int nargs ATTRIBUTE_UNUSED
= list_length (TYPE_ARG_TYPES (type
));
6251 link
= CALL_INSN_FUNCTION_USAGE (insn
);
6252 #ifndef PCC_STATIC_STRUCT_RETURN
6253 if (aggregate_value_p (TREE_TYPE (type
), type
)
6254 && targetm
.calls
.struct_value_rtx (type
, 0) == 0)
6256 tree struct_addr
= build_pointer_type (TREE_TYPE (type
));
6257 machine_mode mode
= TYPE_MODE (struct_addr
);
6259 INIT_CUMULATIVE_ARGS (args_so_far_v
, type
, NULL_RTX
, fndecl
,
6261 reg
= targetm
.calls
.function_arg (args_so_far
, mode
,
6263 targetm
.calls
.function_arg_advance (args_so_far
, mode
,
6265 if (reg
== NULL_RTX
)
6267 for (; link
; link
= XEXP (link
, 1))
6268 if (GET_CODE (XEXP (link
, 0)) == USE
6269 && MEM_P (XEXP (XEXP (link
, 0), 0)))
6271 link
= XEXP (link
, 1);
6278 INIT_CUMULATIVE_ARGS (args_so_far_v
, type
, NULL_RTX
, fndecl
,
6280 if (obj_type_ref
&& TYPE_ARG_TYPES (type
) != void_list_node
)
6283 t
= TYPE_ARG_TYPES (type
);
6284 mode
= TYPE_MODE (TREE_VALUE (t
));
6285 this_arg
= targetm
.calls
.function_arg (args_so_far
, mode
,
6286 TREE_VALUE (t
), true);
6287 if (this_arg
&& !REG_P (this_arg
))
6288 this_arg
= NULL_RTX
;
6289 else if (this_arg
== NULL_RTX
)
6291 for (; link
; link
= XEXP (link
, 1))
6292 if (GET_CODE (XEXP (link
, 0)) == USE
6293 && MEM_P (XEXP (XEXP (link
, 0), 0)))
6295 this_arg
= XEXP (XEXP (link
, 0), 0);
6303 t
= type
? TYPE_ARG_TYPES (type
) : NULL_TREE
;
6305 for (link
= CALL_INSN_FUNCTION_USAGE (insn
); link
; link
= XEXP (link
, 1))
6306 if (GET_CODE (XEXP (link
, 0)) == USE
)
6308 rtx item
= NULL_RTX
;
6309 x
= XEXP (XEXP (link
, 0), 0);
6310 if (GET_MODE (link
) == VOIDmode
6311 || GET_MODE (link
) == BLKmode
6312 || (GET_MODE (link
) != GET_MODE (x
)
6313 && ((GET_MODE_CLASS (GET_MODE (link
)) != MODE_INT
6314 && GET_MODE_CLASS (GET_MODE (link
)) != MODE_PARTIAL_INT
)
6315 || (GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
6316 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_PARTIAL_INT
))))
6317 /* Can't do anything for these, if the original type mode
6318 isn't known or can't be converted. */;
6321 cselib_val
*val
= cselib_lookup (x
, GET_MODE (x
), 0, VOIDmode
);
6322 if (val
&& cselib_preserved_value_p (val
))
6323 item
= val
->val_rtx
;
6324 else if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_INT
6325 || GET_MODE_CLASS (GET_MODE (x
)) == MODE_PARTIAL_INT
)
6327 machine_mode mode
= GET_MODE (x
);
6329 while ((mode
= GET_MODE_WIDER_MODE (mode
)) != VOIDmode
6330 && GET_MODE_BITSIZE (mode
) <= BITS_PER_WORD
)
6332 rtx reg
= simplify_subreg (mode
, x
, GET_MODE (x
), 0);
6334 if (reg
== NULL_RTX
|| !REG_P (reg
))
6336 val
= cselib_lookup (reg
, mode
, 0, VOIDmode
);
6337 if (val
&& cselib_preserved_value_p (val
))
6339 item
= val
->val_rtx
;
6350 if (!frame_pointer_needed
)
6352 struct adjust_mem_data amd
;
6353 amd
.mem_mode
= VOIDmode
;
6354 amd
.stack_adjust
= -VTI (bb
)->out
.stack_adjust
;
6355 amd
.side_effects
= NULL
;
6357 mem
= simplify_replace_fn_rtx (mem
, NULL_RTX
, adjust_mems
,
6359 gcc_assert (amd
.side_effects
== NULL_RTX
);
6361 val
= cselib_lookup (mem
, GET_MODE (mem
), 0, VOIDmode
);
6362 if (val
&& cselib_preserved_value_p (val
))
6363 item
= val
->val_rtx
;
6364 else if (GET_MODE_CLASS (GET_MODE (mem
)) != MODE_INT
6365 && GET_MODE_CLASS (GET_MODE (mem
)) != MODE_PARTIAL_INT
)
6367 /* For non-integer stack argument see also if they weren't
6368 initialized by integers. */
6369 machine_mode imode
= int_mode_for_mode (GET_MODE (mem
));
6370 if (imode
!= GET_MODE (mem
) && imode
!= BLKmode
)
6372 val
= cselib_lookup (adjust_address_nv (mem
, imode
, 0),
6373 imode
, 0, VOIDmode
);
6374 if (val
&& cselib_preserved_value_p (val
))
6375 item
= lowpart_subreg (GET_MODE (x
), val
->val_rtx
,
6383 if (GET_MODE (item
) != GET_MODE (link
))
6384 item
= lowpart_subreg (GET_MODE (link
), item
, GET_MODE (item
));
6385 if (GET_MODE (x2
) != GET_MODE (link
))
6386 x2
= lowpart_subreg (GET_MODE (link
), x2
, GET_MODE (x2
));
6387 item
= gen_rtx_CONCAT (GET_MODE (link
), x2
, item
);
6389 = gen_rtx_EXPR_LIST (VOIDmode
, item
, call_arguments
);
6391 if (t
&& t
!= void_list_node
)
6393 tree argtype
= TREE_VALUE (t
);
6394 machine_mode mode
= TYPE_MODE (argtype
);
6396 if (pass_by_reference (&args_so_far_v
, mode
, argtype
, true))
6398 argtype
= build_pointer_type (argtype
);
6399 mode
= TYPE_MODE (argtype
);
6401 reg
= targetm
.calls
.function_arg (args_so_far
, mode
,
6403 if (TREE_CODE (argtype
) == REFERENCE_TYPE
6404 && INTEGRAL_TYPE_P (TREE_TYPE (argtype
))
6407 && GET_MODE (reg
) == mode
6408 && (GET_MODE_CLASS (mode
) == MODE_INT
6409 || GET_MODE_CLASS (mode
) == MODE_PARTIAL_INT
)
6411 && REGNO (x
) == REGNO (reg
)
6412 && GET_MODE (x
) == mode
6415 machine_mode indmode
6416 = TYPE_MODE (TREE_TYPE (argtype
));
6417 rtx mem
= gen_rtx_MEM (indmode
, x
);
6418 cselib_val
*val
= cselib_lookup (mem
, indmode
, 0, VOIDmode
);
6419 if (val
&& cselib_preserved_value_p (val
))
6421 item
= gen_rtx_CONCAT (indmode
, mem
, val
->val_rtx
);
6422 call_arguments
= gen_rtx_EXPR_LIST (VOIDmode
, item
,
6427 struct elt_loc_list
*l
;
6430 /* Try harder, when passing address of a constant
6431 pool integer it can be easily read back. */
6432 item
= XEXP (item
, 1);
6433 if (GET_CODE (item
) == SUBREG
)
6434 item
= SUBREG_REG (item
);
6435 gcc_assert (GET_CODE (item
) == VALUE
);
6436 val
= CSELIB_VAL_PTR (item
);
6437 for (l
= val
->locs
; l
; l
= l
->next
)
6438 if (GET_CODE (l
->loc
) == SYMBOL_REF
6439 && TREE_CONSTANT_POOL_ADDRESS_P (l
->loc
)
6440 && SYMBOL_REF_DECL (l
->loc
)
6441 && DECL_INITIAL (SYMBOL_REF_DECL (l
->loc
)))
6443 initial
= DECL_INITIAL (SYMBOL_REF_DECL (l
->loc
));
6444 if (tree_fits_shwi_p (initial
))
6446 item
= GEN_INT (tree_to_shwi (initial
));
6447 item
= gen_rtx_CONCAT (indmode
, mem
, item
);
6449 = gen_rtx_EXPR_LIST (VOIDmode
, item
,
6456 targetm
.calls
.function_arg_advance (args_so_far
, mode
,
6462 /* Add debug arguments. */
6464 && TREE_CODE (fndecl
) == FUNCTION_DECL
6465 && DECL_HAS_DEBUG_ARGS_P (fndecl
))
6467 vec
<tree
, va_gc
> **debug_args
= decl_debug_args_lookup (fndecl
);
6472 for (ix
= 0; vec_safe_iterate (*debug_args
, ix
, ¶m
); ix
+= 2)
6475 tree dtemp
= (**debug_args
)[ix
+ 1];
6476 machine_mode mode
= DECL_MODE (dtemp
);
6477 item
= gen_rtx_DEBUG_PARAMETER_REF (mode
, param
);
6478 item
= gen_rtx_CONCAT (mode
, item
, DECL_RTL_KNOWN_SET (dtemp
));
6479 call_arguments
= gen_rtx_EXPR_LIST (VOIDmode
, item
,
6485 /* Reverse call_arguments chain. */
6487 for (cur
= call_arguments
; cur
; cur
= next
)
6489 next
= XEXP (cur
, 1);
6490 XEXP (cur
, 1) = prev
;
6493 call_arguments
= prev
;
6495 x
= get_call_rtx_from (insn
);
6498 x
= XEXP (XEXP (x
, 0), 0);
6499 if (GET_CODE (x
) == SYMBOL_REF
)
6500 /* Don't record anything. */;
6501 else if (CONSTANT_P (x
))
6503 x
= gen_rtx_CONCAT (GET_MODE (x
) == VOIDmode
? Pmode
: GET_MODE (x
),
6506 = gen_rtx_EXPR_LIST (VOIDmode
, x
, call_arguments
);
6510 cselib_val
*val
= cselib_lookup (x
, GET_MODE (x
), 0, VOIDmode
);
6511 if (val
&& cselib_preserved_value_p (val
))
6513 x
= gen_rtx_CONCAT (GET_MODE (x
), pc_rtx
, val
->val_rtx
);
6515 = gen_rtx_EXPR_LIST (VOIDmode
, x
, call_arguments
);
6522 = TYPE_MODE (TREE_TYPE (OBJ_TYPE_REF_EXPR (obj_type_ref
)));
6523 rtx clobbered
= gen_rtx_MEM (mode
, this_arg
);
6525 = tree_to_shwi (OBJ_TYPE_REF_TOKEN (obj_type_ref
));
6527 clobbered
= plus_constant (mode
, clobbered
,
6528 token
* GET_MODE_SIZE (mode
));
6529 clobbered
= gen_rtx_MEM (mode
, clobbered
);
6530 x
= gen_rtx_CONCAT (mode
, gen_rtx_CLOBBER (VOIDmode
, pc_rtx
), clobbered
);
6532 = gen_rtx_EXPR_LIST (VOIDmode
, x
, call_arguments
);
6536 /* Callback for cselib_record_sets_hook, that records as micro
6537 operations uses and stores in an insn after cselib_record_sets has
6538 analyzed the sets in an insn, but before it modifies the stored
6539 values in the internal tables, unless cselib_record_sets doesn't
6540 call it directly (perhaps because we're not doing cselib in the
6541 first place, in which case sets and n_sets will be 0). */
6544 add_with_sets (rtx_insn
*insn
, struct cselib_set
*sets
, int n_sets
)
6546 basic_block bb
= BLOCK_FOR_INSN (insn
);
6548 struct count_use_info cui
;
6549 micro_operation
*mos
;
6551 cselib_hook_called
= true;
6556 cui
.n_sets
= n_sets
;
6558 n1
= VTI (bb
)->mos
.length ();
6559 cui
.store_p
= false;
6560 note_uses (&PATTERN (insn
), add_uses_1
, &cui
);
6561 n2
= VTI (bb
)->mos
.length () - 1;
6562 mos
= VTI (bb
)->mos
.address ();
6564 /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
6568 while (n1
< n2
&& mos
[n1
].type
== MO_USE
)
6570 while (n1
< n2
&& mos
[n2
].type
!= MO_USE
)
6573 std::swap (mos
[n1
], mos
[n2
]);
6576 n2
= VTI (bb
)->mos
.length () - 1;
6579 while (n1
< n2
&& mos
[n1
].type
!= MO_VAL_LOC
)
6581 while (n1
< n2
&& mos
[n2
].type
== MO_VAL_LOC
)
6584 std::swap (mos
[n1
], mos
[n2
]);
6593 mo
.u
.loc
= call_arguments
;
6594 call_arguments
= NULL_RTX
;
6596 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6597 log_op_type (PATTERN (insn
), bb
, insn
, mo
.type
, dump_file
);
6598 VTI (bb
)->mos
.safe_push (mo
);
6601 n1
= VTI (bb
)->mos
.length ();
6602 /* This will record NEXT_INSN (insn), such that we can
6603 insert notes before it without worrying about any
6604 notes that MO_USEs might emit after the insn. */
6606 note_stores (PATTERN (insn
), add_stores
, &cui
);
6607 n2
= VTI (bb
)->mos
.length () - 1;
6608 mos
= VTI (bb
)->mos
.address ();
6610 /* Order the MO_VAL_USEs first (note_stores does nothing
6611 on DEBUG_INSNs, so there are no MO_VAL_LOCs from this
6612 insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET. */
6615 while (n1
< n2
&& mos
[n1
].type
== MO_VAL_USE
)
6617 while (n1
< n2
&& mos
[n2
].type
!= MO_VAL_USE
)
6620 std::swap (mos
[n1
], mos
[n2
]);
6623 n2
= VTI (bb
)->mos
.length () - 1;
6626 while (n1
< n2
&& mos
[n1
].type
== MO_CLOBBER
)
6628 while (n1
< n2
&& mos
[n2
].type
!= MO_CLOBBER
)
6631 std::swap (mos
[n1
], mos
[n2
]);
6635 static enum var_init_status
6636 find_src_status (dataflow_set
*in
, rtx src
)
6638 tree decl
= NULL_TREE
;
6639 enum var_init_status status
= VAR_INIT_STATUS_UNINITIALIZED
;
6641 if (! flag_var_tracking_uninit
)
6642 status
= VAR_INIT_STATUS_INITIALIZED
;
6644 if (src
&& REG_P (src
))
6645 decl
= var_debug_decl (REG_EXPR (src
));
6646 else if (src
&& MEM_P (src
))
6647 decl
= var_debug_decl (MEM_EXPR (src
));
6650 status
= get_init_value (in
, src
, dv_from_decl (decl
));
6655 /* SRC is the source of an assignment. Use SET to try to find what
6656 was ultimately assigned to SRC. Return that value if known,
6657 otherwise return SRC itself. */
6660 find_src_set_src (dataflow_set
*set
, rtx src
)
6662 tree decl
= NULL_TREE
; /* The variable being copied around. */
6663 rtx set_src
= NULL_RTX
; /* The value for "decl" stored in "src". */
6665 location_chain nextp
;
6669 if (src
&& REG_P (src
))
6670 decl
= var_debug_decl (REG_EXPR (src
));
6671 else if (src
&& MEM_P (src
))
6672 decl
= var_debug_decl (MEM_EXPR (src
));
6676 decl_or_value dv
= dv_from_decl (decl
);
6678 var
= shared_hash_find (set
->vars
, dv
);
6682 for (i
= 0; i
< var
->n_var_parts
&& !found
; i
++)
6683 for (nextp
= var
->var_part
[i
].loc_chain
; nextp
&& !found
;
6684 nextp
= nextp
->next
)
6685 if (rtx_equal_p (nextp
->loc
, src
))
6687 set_src
= nextp
->set_src
;
6697 /* Compute the changes of variable locations in the basic block BB. */
6700 compute_bb_dataflow (basic_block bb
)
6703 micro_operation
*mo
;
6705 dataflow_set old_out
;
6706 dataflow_set
*in
= &VTI (bb
)->in
;
6707 dataflow_set
*out
= &VTI (bb
)->out
;
6709 dataflow_set_init (&old_out
);
6710 dataflow_set_copy (&old_out
, out
);
6711 dataflow_set_copy (out
, in
);
6713 if (MAY_HAVE_DEBUG_INSNS
)
6714 local_get_addr_cache
= new hash_map
<rtx
, rtx
>;
6716 FOR_EACH_VEC_ELT (VTI (bb
)->mos
, i
, mo
)
6718 rtx_insn
*insn
= mo
->insn
;
6723 dataflow_set_clear_at_call (out
);
6728 rtx loc
= mo
->u
.loc
;
6731 var_reg_set (out
, loc
, VAR_INIT_STATUS_UNINITIALIZED
, NULL
);
6732 else if (MEM_P (loc
))
6733 var_mem_set (out
, loc
, VAR_INIT_STATUS_UNINITIALIZED
, NULL
);
6739 rtx loc
= mo
->u
.loc
;
6743 if (GET_CODE (loc
) == CONCAT
)
6745 val
= XEXP (loc
, 0);
6746 vloc
= XEXP (loc
, 1);
6754 var
= PAT_VAR_LOCATION_DECL (vloc
);
6756 clobber_variable_part (out
, NULL_RTX
,
6757 dv_from_decl (var
), 0, NULL_RTX
);
6760 if (VAL_NEEDS_RESOLUTION (loc
))
6761 val_resolve (out
, val
, PAT_VAR_LOCATION_LOC (vloc
), insn
);
6762 set_variable_part (out
, val
, dv_from_decl (var
), 0,
6763 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
,
6766 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc
)))
6767 set_variable_part (out
, PAT_VAR_LOCATION_LOC (vloc
),
6768 dv_from_decl (var
), 0,
6769 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
,
6776 rtx loc
= mo
->u
.loc
;
6777 rtx val
, vloc
, uloc
;
6779 vloc
= uloc
= XEXP (loc
, 1);
6780 val
= XEXP (loc
, 0);
6782 if (GET_CODE (val
) == CONCAT
)
6784 uloc
= XEXP (val
, 1);
6785 val
= XEXP (val
, 0);
6788 if (VAL_NEEDS_RESOLUTION (loc
))
6789 val_resolve (out
, val
, vloc
, insn
);
6791 val_store (out
, val
, uloc
, insn
, false);
6793 if (VAL_HOLDS_TRACK_EXPR (loc
))
6795 if (GET_CODE (uloc
) == REG
)
6796 var_reg_set (out
, uloc
, VAR_INIT_STATUS_UNINITIALIZED
,
6798 else if (GET_CODE (uloc
) == MEM
)
6799 var_mem_set (out
, uloc
, VAR_INIT_STATUS_UNINITIALIZED
,
6807 rtx loc
= mo
->u
.loc
;
6808 rtx val
, vloc
, uloc
;
6812 uloc
= XEXP (vloc
, 1);
6813 val
= XEXP (vloc
, 0);
6816 if (GET_CODE (uloc
) == SET
)
6818 dstv
= SET_DEST (uloc
);
6819 srcv
= SET_SRC (uloc
);
6827 if (GET_CODE (val
) == CONCAT
)
6829 dstv
= vloc
= XEXP (val
, 1);
6830 val
= XEXP (val
, 0);
6833 if (GET_CODE (vloc
) == SET
)
6835 srcv
= SET_SRC (vloc
);
6837 gcc_assert (val
!= srcv
);
6838 gcc_assert (vloc
== uloc
|| VAL_NEEDS_RESOLUTION (loc
));
6840 dstv
= vloc
= SET_DEST (vloc
);
6842 if (VAL_NEEDS_RESOLUTION (loc
))
6843 val_resolve (out
, val
, srcv
, insn
);
6845 else if (VAL_NEEDS_RESOLUTION (loc
))
6847 gcc_assert (GET_CODE (uloc
) == SET
6848 && GET_CODE (SET_SRC (uloc
)) == REG
);
6849 val_resolve (out
, val
, SET_SRC (uloc
), insn
);
6852 if (VAL_HOLDS_TRACK_EXPR (loc
))
6854 if (VAL_EXPR_IS_CLOBBERED (loc
))
6857 var_reg_delete (out
, uloc
, true);
6858 else if (MEM_P (uloc
))
6860 gcc_assert (MEM_P (dstv
));
6861 gcc_assert (MEM_ATTRS (dstv
) == MEM_ATTRS (uloc
));
6862 var_mem_delete (out
, dstv
, true);
6867 bool copied_p
= VAL_EXPR_IS_COPIED (loc
);
6868 rtx src
= NULL
, dst
= uloc
;
6869 enum var_init_status status
= VAR_INIT_STATUS_INITIALIZED
;
6871 if (GET_CODE (uloc
) == SET
)
6873 src
= SET_SRC (uloc
);
6874 dst
= SET_DEST (uloc
);
6879 if (flag_var_tracking_uninit
)
6881 status
= find_src_status (in
, src
);
6883 if (status
== VAR_INIT_STATUS_UNKNOWN
)
6884 status
= find_src_status (out
, src
);
6887 src
= find_src_set_src (in
, src
);
6891 var_reg_delete_and_set (out
, dst
, !copied_p
,
6893 else if (MEM_P (dst
))
6895 gcc_assert (MEM_P (dstv
));
6896 gcc_assert (MEM_ATTRS (dstv
) == MEM_ATTRS (dst
));
6897 var_mem_delete_and_set (out
, dstv
, !copied_p
,
6902 else if (REG_P (uloc
))
6903 var_regno_delete (out
, REGNO (uloc
));
6904 else if (MEM_P (uloc
))
6906 gcc_checking_assert (GET_CODE (vloc
) == MEM
);
6907 gcc_checking_assert (dstv
== vloc
);
6909 clobber_overlapping_mems (out
, vloc
);
6912 val_store (out
, val
, dstv
, insn
, true);
6918 rtx loc
= mo
->u
.loc
;
6921 if (GET_CODE (loc
) == SET
)
6923 set_src
= SET_SRC (loc
);
6924 loc
= SET_DEST (loc
);
6928 var_reg_delete_and_set (out
, loc
, true, VAR_INIT_STATUS_INITIALIZED
,
6930 else if (MEM_P (loc
))
6931 var_mem_delete_and_set (out
, loc
, true, VAR_INIT_STATUS_INITIALIZED
,
6938 rtx loc
= mo
->u
.loc
;
6939 enum var_init_status src_status
;
6942 if (GET_CODE (loc
) == SET
)
6944 set_src
= SET_SRC (loc
);
6945 loc
= SET_DEST (loc
);
6948 if (! flag_var_tracking_uninit
)
6949 src_status
= VAR_INIT_STATUS_INITIALIZED
;
6952 src_status
= find_src_status (in
, set_src
);
6954 if (src_status
== VAR_INIT_STATUS_UNKNOWN
)
6955 src_status
= find_src_status (out
, set_src
);
6958 set_src
= find_src_set_src (in
, set_src
);
6961 var_reg_delete_and_set (out
, loc
, false, src_status
, set_src
);
6962 else if (MEM_P (loc
))
6963 var_mem_delete_and_set (out
, loc
, false, src_status
, set_src
);
6969 rtx loc
= mo
->u
.loc
;
6972 var_reg_delete (out
, loc
, false);
6973 else if (MEM_P (loc
))
6974 var_mem_delete (out
, loc
, false);
6980 rtx loc
= mo
->u
.loc
;
6983 var_reg_delete (out
, loc
, true);
6984 else if (MEM_P (loc
))
6985 var_mem_delete (out
, loc
, true);
6990 out
->stack_adjust
+= mo
->u
.adjust
;
6995 if (MAY_HAVE_DEBUG_INSNS
)
6997 delete local_get_addr_cache
;
6998 local_get_addr_cache
= NULL
;
7000 dataflow_set_equiv_regs (out
);
7001 shared_hash_htab (out
->vars
)
7002 ->traverse
<dataflow_set
*, canonicalize_values_mark
> (out
);
7003 shared_hash_htab (out
->vars
)
7004 ->traverse
<dataflow_set
*, canonicalize_values_star
> (out
);
7006 shared_hash_htab (out
->vars
)
7007 ->traverse
<dataflow_set
*, canonicalize_loc_order_check
> (out
);
7010 changed
= dataflow_set_different (&old_out
, out
);
7011 dataflow_set_destroy (&old_out
);
7015 /* Find the locations of variables in the whole function. */
7018 vt_find_locations (void)
7020 bb_heap_t
*worklist
= new bb_heap_t (LONG_MIN
);
7021 bb_heap_t
*pending
= new bb_heap_t (LONG_MIN
);
7022 sbitmap visited
, in_worklist
, in_pending
;
7029 int htabmax
= PARAM_VALUE (PARAM_MAX_VARTRACK_SIZE
);
7030 bool success
= true;
7032 timevar_push (TV_VAR_TRACKING_DATAFLOW
);
7033 /* Compute reverse completion order of depth first search of the CFG
7034 so that the data-flow runs faster. */
7035 rc_order
= XNEWVEC (int, n_basic_blocks_for_fn (cfun
) - NUM_FIXED_BLOCKS
);
7036 bb_order
= XNEWVEC (int, last_basic_block_for_fn (cfun
));
7037 pre_and_rev_post_order_compute (NULL
, rc_order
, false);
7038 for (i
= 0; i
< n_basic_blocks_for_fn (cfun
) - NUM_FIXED_BLOCKS
; i
++)
7039 bb_order
[rc_order
[i
]] = i
;
7042 visited
= sbitmap_alloc (last_basic_block_for_fn (cfun
));
7043 in_worklist
= sbitmap_alloc (last_basic_block_for_fn (cfun
));
7044 in_pending
= sbitmap_alloc (last_basic_block_for_fn (cfun
));
7045 bitmap_clear (in_worklist
);
7047 FOR_EACH_BB_FN (bb
, cfun
)
7048 pending
->insert (bb_order
[bb
->index
], bb
);
7049 bitmap_ones (in_pending
);
7051 while (success
&& !pending
->empty ())
7053 std::swap (worklist
, pending
);
7054 std::swap (in_worklist
, in_pending
);
7056 bitmap_clear (visited
);
7058 while (!worklist
->empty ())
7060 bb
= worklist
->extract_min ();
7061 bitmap_clear_bit (in_worklist
, bb
->index
);
7062 gcc_assert (!bitmap_bit_p (visited
, bb
->index
));
7063 if (!bitmap_bit_p (visited
, bb
->index
))
7067 int oldinsz
, oldoutsz
;
7069 bitmap_set_bit (visited
, bb
->index
);
7071 if (VTI (bb
)->in
.vars
)
7074 -= shared_hash_htab (VTI (bb
)->in
.vars
)->size ()
7075 + shared_hash_htab (VTI (bb
)->out
.vars
)->size ();
7076 oldinsz
= shared_hash_htab (VTI (bb
)->in
.vars
)->elements ();
7078 = shared_hash_htab (VTI (bb
)->out
.vars
)->elements ();
7081 oldinsz
= oldoutsz
= 0;
7083 if (MAY_HAVE_DEBUG_INSNS
)
7085 dataflow_set
*in
= &VTI (bb
)->in
, *first_out
= NULL
;
7086 bool first
= true, adjust
= false;
7088 /* Calculate the IN set as the intersection of
7089 predecessor OUT sets. */
7091 dataflow_set_clear (in
);
7092 dst_can_be_shared
= true;
7094 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
7095 if (!VTI (e
->src
)->flooded
)
7096 gcc_assert (bb_order
[bb
->index
]
7097 <= bb_order
[e
->src
->index
]);
7100 dataflow_set_copy (in
, &VTI (e
->src
)->out
);
7101 first_out
= &VTI (e
->src
)->out
;
7106 dataflow_set_merge (in
, &VTI (e
->src
)->out
);
7112 dataflow_post_merge_adjust (in
, &VTI (bb
)->permp
);
7114 /* Merge and merge_adjust should keep entries in
7116 shared_hash_htab (in
->vars
)
7117 ->traverse
<dataflow_set
*,
7118 canonicalize_loc_order_check
> (in
);
7120 if (dst_can_be_shared
)
7122 shared_hash_destroy (in
->vars
);
7123 in
->vars
= shared_hash_copy (first_out
->vars
);
7127 VTI (bb
)->flooded
= true;
7131 /* Calculate the IN set as union of predecessor OUT sets. */
7132 dataflow_set_clear (&VTI (bb
)->in
);
7133 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
7134 dataflow_set_union (&VTI (bb
)->in
, &VTI (e
->src
)->out
);
7137 changed
= compute_bb_dataflow (bb
);
7138 htabsz
+= shared_hash_htab (VTI (bb
)->in
.vars
)->size ()
7139 + shared_hash_htab (VTI (bb
)->out
.vars
)->size ();
7141 if (htabmax
&& htabsz
> htabmax
)
7143 if (MAY_HAVE_DEBUG_INSNS
)
7144 inform (DECL_SOURCE_LOCATION (cfun
->decl
),
7145 "variable tracking size limit exceeded with "
7146 "-fvar-tracking-assignments, retrying without");
7148 inform (DECL_SOURCE_LOCATION (cfun
->decl
),
7149 "variable tracking size limit exceeded");
7156 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7158 if (e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
7161 if (bitmap_bit_p (visited
, e
->dest
->index
))
7163 if (!bitmap_bit_p (in_pending
, e
->dest
->index
))
7165 /* Send E->DEST to next round. */
7166 bitmap_set_bit (in_pending
, e
->dest
->index
);
7167 pending
->insert (bb_order
[e
->dest
->index
],
7171 else if (!bitmap_bit_p (in_worklist
, e
->dest
->index
))
7173 /* Add E->DEST to current round. */
7174 bitmap_set_bit (in_worklist
, e
->dest
->index
);
7175 worklist
->insert (bb_order
[e
->dest
->index
],
7183 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
7185 (int)shared_hash_htab (VTI (bb
)->in
.vars
)->size (),
7187 (int)shared_hash_htab (VTI (bb
)->out
.vars
)->size (),
7189 (int)worklist
->nodes (), (int)pending
->nodes (),
7192 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7194 fprintf (dump_file
, "BB %i IN:\n", bb
->index
);
7195 dump_dataflow_set (&VTI (bb
)->in
);
7196 fprintf (dump_file
, "BB %i OUT:\n", bb
->index
);
7197 dump_dataflow_set (&VTI (bb
)->out
);
7203 if (success
&& MAY_HAVE_DEBUG_INSNS
)
7204 FOR_EACH_BB_FN (bb
, cfun
)
7205 gcc_assert (VTI (bb
)->flooded
);
7210 sbitmap_free (visited
);
7211 sbitmap_free (in_worklist
);
7212 sbitmap_free (in_pending
);
7214 timevar_pop (TV_VAR_TRACKING_DATAFLOW
);
7218 /* Print the content of the LIST to dump file. */
7221 dump_attrs_list (attrs list
)
7223 for (; list
; list
= list
->next
)
7225 if (dv_is_decl_p (list
->dv
))
7226 print_mem_expr (dump_file
, dv_as_decl (list
->dv
));
7228 print_rtl_single (dump_file
, dv_as_value (list
->dv
));
7229 fprintf (dump_file
, "+" HOST_WIDE_INT_PRINT_DEC
, list
->offset
);
7231 fprintf (dump_file
, "\n");
7234 /* Print the information about variable *SLOT to dump file. */
7237 dump_var_tracking_slot (variable_def
**slot
, void *data ATTRIBUTE_UNUSED
)
7239 variable var
= *slot
;
7243 /* Continue traversing the hash table. */
7247 /* Print the information about variable VAR to dump file. */
7250 dump_var (variable var
)
7253 location_chain node
;
7255 if (dv_is_decl_p (var
->dv
))
7257 const_tree decl
= dv_as_decl (var
->dv
);
7259 if (DECL_NAME (decl
))
7261 fprintf (dump_file
, " name: %s",
7262 IDENTIFIER_POINTER (DECL_NAME (decl
)));
7263 if (dump_flags
& TDF_UID
)
7264 fprintf (dump_file
, "D.%u", DECL_UID (decl
));
7266 else if (TREE_CODE (decl
) == DEBUG_EXPR_DECL
)
7267 fprintf (dump_file
, " name: D#%u", DEBUG_TEMP_UID (decl
));
7269 fprintf (dump_file
, " name: D.%u", DECL_UID (decl
));
7270 fprintf (dump_file
, "\n");
7274 fputc (' ', dump_file
);
7275 print_rtl_single (dump_file
, dv_as_value (var
->dv
));
7278 for (i
= 0; i
< var
->n_var_parts
; i
++)
7280 fprintf (dump_file
, " offset %ld\n",
7281 (long)(var
->onepart
? 0 : VAR_PART_OFFSET (var
, i
)));
7282 for (node
= var
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
7284 fprintf (dump_file
, " ");
7285 if (node
->init
== VAR_INIT_STATUS_UNINITIALIZED
)
7286 fprintf (dump_file
, "[uninit]");
7287 print_rtl_single (dump_file
, node
->loc
);
7292 /* Print the information about variables from hash table VARS to dump file. */
7295 dump_vars (variable_table_type
*vars
)
7297 if (vars
->elements () > 0)
7299 fprintf (dump_file
, "Variables:\n");
7300 vars
->traverse
<void *, dump_var_tracking_slot
> (NULL
);
7304 /* Print the dataflow set SET to dump file. */
7307 dump_dataflow_set (dataflow_set
*set
)
7311 fprintf (dump_file
, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC
"\n",
7313 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
7317 fprintf (dump_file
, "Reg %d:", i
);
7318 dump_attrs_list (set
->regs
[i
]);
7321 dump_vars (shared_hash_htab (set
->vars
));
7322 fprintf (dump_file
, "\n");
7325 /* Print the IN and OUT sets for each basic block to dump file. */
7328 dump_dataflow_sets (void)
7332 FOR_EACH_BB_FN (bb
, cfun
)
7334 fprintf (dump_file
, "\nBasic block %d:\n", bb
->index
);
7335 fprintf (dump_file
, "IN:\n");
7336 dump_dataflow_set (&VTI (bb
)->in
);
7337 fprintf (dump_file
, "OUT:\n");
7338 dump_dataflow_set (&VTI (bb
)->out
);
7342 /* Return the variable for DV in dropped_values, inserting one if
7343 requested with INSERT. */
7345 static inline variable
7346 variable_from_dropped (decl_or_value dv
, enum insert_option insert
)
7348 variable_def
**slot
;
7350 onepart_enum_t onepart
;
7352 slot
= dropped_values
->find_slot_with_hash (dv
, dv_htab_hash (dv
), insert
);
7360 gcc_checking_assert (insert
== INSERT
);
7362 onepart
= dv_onepart_p (dv
);
7364 gcc_checking_assert (onepart
== ONEPART_VALUE
|| onepart
== ONEPART_DEXPR
);
7366 empty_var
= onepart_pool (onepart
).allocate ();
7368 empty_var
->refcount
= 1;
7369 empty_var
->n_var_parts
= 0;
7370 empty_var
->onepart
= onepart
;
7371 empty_var
->in_changed_variables
= false;
7372 empty_var
->var_part
[0].loc_chain
= NULL
;
7373 empty_var
->var_part
[0].cur_loc
= NULL
;
7374 VAR_LOC_1PAUX (empty_var
) = NULL
;
7375 set_dv_changed (dv
, true);
7382 /* Recover the one-part aux from dropped_values. */
7384 static struct onepart_aux
*
7385 recover_dropped_1paux (variable var
)
7389 gcc_checking_assert (var
->onepart
);
7391 if (VAR_LOC_1PAUX (var
))
7392 return VAR_LOC_1PAUX (var
);
7394 if (var
->onepart
== ONEPART_VDECL
)
7397 dvar
= variable_from_dropped (var
->dv
, NO_INSERT
);
7402 VAR_LOC_1PAUX (var
) = VAR_LOC_1PAUX (dvar
);
7403 VAR_LOC_1PAUX (dvar
) = NULL
;
7405 return VAR_LOC_1PAUX (var
);
7408 /* Add variable VAR to the hash table of changed variables and
7409 if it has no locations delete it from SET's hash table. */
7412 variable_was_changed (variable var
, dataflow_set
*set
)
7414 hashval_t hash
= dv_htab_hash (var
->dv
);
7418 variable_def
**slot
;
7420 /* Remember this decl or VALUE has been added to changed_variables. */
7421 set_dv_changed (var
->dv
, true);
7423 slot
= changed_variables
->find_slot_with_hash (var
->dv
, hash
, INSERT
);
7427 variable old_var
= *slot
;
7428 gcc_assert (old_var
->in_changed_variables
);
7429 old_var
->in_changed_variables
= false;
7430 if (var
!= old_var
&& var
->onepart
)
7432 /* Restore the auxiliary info from an empty variable
7433 previously created for changed_variables, so it is
7435 gcc_checking_assert (!VAR_LOC_1PAUX (var
));
7436 VAR_LOC_1PAUX (var
) = VAR_LOC_1PAUX (old_var
);
7437 VAR_LOC_1PAUX (old_var
) = NULL
;
7439 variable_htab_free (*slot
);
7442 if (set
&& var
->n_var_parts
== 0)
7444 onepart_enum_t onepart
= var
->onepart
;
7445 variable empty_var
= NULL
;
7446 variable_def
**dslot
= NULL
;
7448 if (onepart
== ONEPART_VALUE
|| onepart
== ONEPART_DEXPR
)
7450 dslot
= dropped_values
->find_slot_with_hash (var
->dv
,
7451 dv_htab_hash (var
->dv
),
7457 gcc_checking_assert (!empty_var
->in_changed_variables
);
7458 if (!VAR_LOC_1PAUX (var
))
7460 VAR_LOC_1PAUX (var
) = VAR_LOC_1PAUX (empty_var
);
7461 VAR_LOC_1PAUX (empty_var
) = NULL
;
7464 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var
));
7470 empty_var
= onepart_pool (onepart
).allocate ();
7471 empty_var
->dv
= var
->dv
;
7472 empty_var
->refcount
= 1;
7473 empty_var
->n_var_parts
= 0;
7474 empty_var
->onepart
= onepart
;
7477 empty_var
->refcount
++;
7482 empty_var
->refcount
++;
7483 empty_var
->in_changed_variables
= true;
7487 empty_var
->var_part
[0].loc_chain
= NULL
;
7488 empty_var
->var_part
[0].cur_loc
= NULL
;
7489 VAR_LOC_1PAUX (empty_var
) = VAR_LOC_1PAUX (var
);
7490 VAR_LOC_1PAUX (var
) = NULL
;
7496 if (var
->onepart
&& !VAR_LOC_1PAUX (var
))
7497 recover_dropped_1paux (var
);
7499 var
->in_changed_variables
= true;
7506 if (var
->n_var_parts
== 0)
7508 variable_def
**slot
;
7511 slot
= shared_hash_find_slot_noinsert (set
->vars
, var
->dv
);
7514 if (shared_hash_shared (set
->vars
))
7515 slot
= shared_hash_find_slot_unshare (&set
->vars
, var
->dv
,
7517 shared_hash_htab (set
->vars
)->clear_slot (slot
);
7523 /* Look for the index in VAR->var_part corresponding to OFFSET.
7524 Return -1 if not found. If INSERTION_POINT is non-NULL, the
7525 referenced int will be set to the index that the part has or should
7526 have, if it should be inserted. */
7529 find_variable_location_part (variable var
, HOST_WIDE_INT offset
,
7530 int *insertion_point
)
7539 if (insertion_point
)
7540 *insertion_point
= 0;
7542 return var
->n_var_parts
- 1;
7545 /* Find the location part. */
7547 high
= var
->n_var_parts
;
7550 pos
= (low
+ high
) / 2;
7551 if (VAR_PART_OFFSET (var
, pos
) < offset
)
7558 if (insertion_point
)
7559 *insertion_point
= pos
;
7561 if (pos
< var
->n_var_parts
&& VAR_PART_OFFSET (var
, pos
) == offset
)
7567 static variable_def
**
7568 set_slot_part (dataflow_set
*set
, rtx loc
, variable_def
**slot
,
7569 decl_or_value dv
, HOST_WIDE_INT offset
,
7570 enum var_init_status initialized
, rtx set_src
)
7573 location_chain node
, next
;
7574 location_chain
*nextp
;
7576 onepart_enum_t onepart
;
7581 onepart
= var
->onepart
;
7583 onepart
= dv_onepart_p (dv
);
7585 gcc_checking_assert (offset
== 0 || !onepart
);
7586 gcc_checking_assert (loc
!= dv_as_opaque (dv
));
7588 if (! flag_var_tracking_uninit
)
7589 initialized
= VAR_INIT_STATUS_INITIALIZED
;
7593 /* Create new variable information. */
7594 var
= onepart_pool (onepart
).allocate ();
7597 var
->n_var_parts
= 1;
7598 var
->onepart
= onepart
;
7599 var
->in_changed_variables
= false;
7601 VAR_LOC_1PAUX (var
) = NULL
;
7603 VAR_PART_OFFSET (var
, 0) = offset
;
7604 var
->var_part
[0].loc_chain
= NULL
;
7605 var
->var_part
[0].cur_loc
= NULL
;
7608 nextp
= &var
->var_part
[0].loc_chain
;
7614 gcc_assert (dv_as_opaque (var
->dv
) == dv_as_opaque (dv
));
7618 if (GET_CODE (loc
) == VALUE
)
7620 for (nextp
= &var
->var_part
[0].loc_chain
; (node
= *nextp
);
7621 nextp
= &node
->next
)
7622 if (GET_CODE (node
->loc
) == VALUE
)
7624 if (node
->loc
== loc
)
7629 if (canon_value_cmp (node
->loc
, loc
))
7637 else if (REG_P (node
->loc
) || MEM_P (node
->loc
))
7645 else if (REG_P (loc
))
7647 for (nextp
= &var
->var_part
[0].loc_chain
; (node
= *nextp
);
7648 nextp
= &node
->next
)
7649 if (REG_P (node
->loc
))
7651 if (REGNO (node
->loc
) < REGNO (loc
))
7655 if (REGNO (node
->loc
) == REGNO (loc
))
7668 else if (MEM_P (loc
))
7670 for (nextp
= &var
->var_part
[0].loc_chain
; (node
= *nextp
);
7671 nextp
= &node
->next
)
7672 if (REG_P (node
->loc
))
7674 else if (MEM_P (node
->loc
))
7676 if ((r
= loc_cmp (XEXP (node
->loc
, 0), XEXP (loc
, 0))) >= 0)
7688 for (nextp
= &var
->var_part
[0].loc_chain
; (node
= *nextp
);
7689 nextp
= &node
->next
)
7690 if ((r
= loc_cmp (node
->loc
, loc
)) >= 0)
7698 if (shared_var_p (var
, set
->vars
))
7700 slot
= unshare_variable (set
, slot
, var
, initialized
);
7702 for (nextp
= &var
->var_part
[0].loc_chain
; c
;
7703 nextp
= &(*nextp
)->next
)
7705 gcc_assert ((!node
&& !*nextp
) || node
->loc
== (*nextp
)->loc
);
7712 gcc_assert (dv_as_decl (var
->dv
) == dv_as_decl (dv
));
7714 pos
= find_variable_location_part (var
, offset
, &inspos
);
7718 node
= var
->var_part
[pos
].loc_chain
;
7721 && ((REG_P (node
->loc
) && REG_P (loc
)
7722 && REGNO (node
->loc
) == REGNO (loc
))
7723 || rtx_equal_p (node
->loc
, loc
)))
7725 /* LOC is in the beginning of the chain so we have nothing
7727 if (node
->init
< initialized
)
7728 node
->init
= initialized
;
7729 if (set_src
!= NULL
)
7730 node
->set_src
= set_src
;
7736 /* We have to make a copy of a shared variable. */
7737 if (shared_var_p (var
, set
->vars
))
7739 slot
= unshare_variable (set
, slot
, var
, initialized
);
7746 /* We have not found the location part, new one will be created. */
7748 /* We have to make a copy of the shared variable. */
7749 if (shared_var_p (var
, set
->vars
))
7751 slot
= unshare_variable (set
, slot
, var
, initialized
);
7755 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
7756 thus there are at most MAX_VAR_PARTS different offsets. */
7757 gcc_assert (var
->n_var_parts
< MAX_VAR_PARTS
7758 && (!var
->n_var_parts
|| !onepart
));
7760 /* We have to move the elements of array starting at index
7761 inspos to the next position. */
7762 for (pos
= var
->n_var_parts
; pos
> inspos
; pos
--)
7763 var
->var_part
[pos
] = var
->var_part
[pos
- 1];
7766 gcc_checking_assert (!onepart
);
7767 VAR_PART_OFFSET (var
, pos
) = offset
;
7768 var
->var_part
[pos
].loc_chain
= NULL
;
7769 var
->var_part
[pos
].cur_loc
= NULL
;
7772 /* Delete the location from the list. */
7773 nextp
= &var
->var_part
[pos
].loc_chain
;
7774 for (node
= var
->var_part
[pos
].loc_chain
; node
; node
= next
)
7777 if ((REG_P (node
->loc
) && REG_P (loc
)
7778 && REGNO (node
->loc
) == REGNO (loc
))
7779 || rtx_equal_p (node
->loc
, loc
))
7781 /* Save these values, to assign to the new node, before
7782 deleting this one. */
7783 if (node
->init
> initialized
)
7784 initialized
= node
->init
;
7785 if (node
->set_src
!= NULL
&& set_src
== NULL
)
7786 set_src
= node
->set_src
;
7787 if (var
->var_part
[pos
].cur_loc
== node
->loc
)
7788 var
->var_part
[pos
].cur_loc
= NULL
;
7794 nextp
= &node
->next
;
7797 nextp
= &var
->var_part
[pos
].loc_chain
;
7800 /* Add the location to the beginning. */
7801 node
= new location_chain_def
;
7803 node
->init
= initialized
;
7804 node
->set_src
= set_src
;
7805 node
->next
= *nextp
;
7808 /* If no location was emitted do so. */
7809 if (var
->var_part
[pos
].cur_loc
== NULL
)
7810 variable_was_changed (var
, set
);
7815 /* Set the part of variable's location in the dataflow set SET. The
7816 variable part is specified by variable's declaration in DV and
7817 offset OFFSET and the part's location by LOC. IOPT should be
7818 NO_INSERT if the variable is known to be in SET already and the
7819 variable hash table must not be resized, and INSERT otherwise. */
7822 set_variable_part (dataflow_set
*set
, rtx loc
,
7823 decl_or_value dv
, HOST_WIDE_INT offset
,
7824 enum var_init_status initialized
, rtx set_src
,
7825 enum insert_option iopt
)
7827 variable_def
**slot
;
7829 if (iopt
== NO_INSERT
)
7830 slot
= shared_hash_find_slot_noinsert (set
->vars
, dv
);
7833 slot
= shared_hash_find_slot (set
->vars
, dv
);
7835 slot
= shared_hash_find_slot_unshare (&set
->vars
, dv
, iopt
);
7837 set_slot_part (set
, loc
, slot
, dv
, offset
, initialized
, set_src
);
7840 /* Remove all recorded register locations for the given variable part
7841 from dataflow set SET, except for those that are identical to loc.
7842 The variable part is specified by variable's declaration or value
7843 DV and offset OFFSET. */
7845 static variable_def
**
7846 clobber_slot_part (dataflow_set
*set
, rtx loc
, variable_def
**slot
,
7847 HOST_WIDE_INT offset
, rtx set_src
)
7849 variable var
= *slot
;
7850 int pos
= find_variable_location_part (var
, offset
, NULL
);
7854 location_chain node
, next
;
7856 /* Remove the register locations from the dataflow set. */
7857 next
= var
->var_part
[pos
].loc_chain
;
7858 for (node
= next
; node
; node
= next
)
7861 if (node
->loc
!= loc
7862 && (!flag_var_tracking_uninit
7865 || !rtx_equal_p (set_src
, node
->set_src
)))
7867 if (REG_P (node
->loc
))
7872 /* Remove the variable part from the register's
7873 list, but preserve any other variable parts
7874 that might be regarded as live in that same
7876 anextp
= &set
->regs
[REGNO (node
->loc
)];
7877 for (anode
= *anextp
; anode
; anode
= anext
)
7879 anext
= anode
->next
;
7880 if (dv_as_opaque (anode
->dv
) == dv_as_opaque (var
->dv
)
7881 && anode
->offset
== offset
)
7887 anextp
= &anode
->next
;
7891 slot
= delete_slot_part (set
, node
->loc
, slot
, offset
);
7899 /* Remove all recorded register locations for the given variable part
7900 from dataflow set SET, except for those that are identical to loc.
7901 The variable part is specified by variable's declaration or value
7902 DV and offset OFFSET. */
7905 clobber_variable_part (dataflow_set
*set
, rtx loc
, decl_or_value dv
,
7906 HOST_WIDE_INT offset
, rtx set_src
)
7908 variable_def
**slot
;
7910 if (!dv_as_opaque (dv
)
7911 || (!dv_is_value_p (dv
) && ! DECL_P (dv_as_decl (dv
))))
7914 slot
= shared_hash_find_slot_noinsert (set
->vars
, dv
);
7918 clobber_slot_part (set
, loc
, slot
, offset
, set_src
);
7921 /* Delete the part of variable's location from dataflow set SET. The
7922 variable part is specified by its SET->vars slot SLOT and offset
7923 OFFSET and the part's location by LOC. */
7925 static variable_def
**
7926 delete_slot_part (dataflow_set
*set
, rtx loc
, variable_def
**slot
,
7927 HOST_WIDE_INT offset
)
7929 variable var
= *slot
;
7930 int pos
= find_variable_location_part (var
, offset
, NULL
);
7934 location_chain node
, next
;
7935 location_chain
*nextp
;
7939 if (shared_var_p (var
, set
->vars
))
7941 /* If the variable contains the location part we have to
7942 make a copy of the variable. */
7943 for (node
= var
->var_part
[pos
].loc_chain
; node
;
7946 if ((REG_P (node
->loc
) && REG_P (loc
)
7947 && REGNO (node
->loc
) == REGNO (loc
))
7948 || rtx_equal_p (node
->loc
, loc
))
7950 slot
= unshare_variable (set
, slot
, var
,
7951 VAR_INIT_STATUS_UNKNOWN
);
7958 if (pos
== 0 && var
->onepart
&& VAR_LOC_1PAUX (var
))
7959 cur_loc
= VAR_LOC_FROM (var
);
7961 cur_loc
= var
->var_part
[pos
].cur_loc
;
7963 /* Delete the location part. */
7965 nextp
= &var
->var_part
[pos
].loc_chain
;
7966 for (node
= *nextp
; node
; node
= next
)
7969 if ((REG_P (node
->loc
) && REG_P (loc
)
7970 && REGNO (node
->loc
) == REGNO (loc
))
7971 || rtx_equal_p (node
->loc
, loc
))
7973 /* If we have deleted the location which was last emitted
7974 we have to emit new location so add the variable to set
7975 of changed variables. */
7976 if (cur_loc
== node
->loc
)
7979 var
->var_part
[pos
].cur_loc
= NULL
;
7980 if (pos
== 0 && var
->onepart
&& VAR_LOC_1PAUX (var
))
7981 VAR_LOC_FROM (var
) = NULL
;
7988 nextp
= &node
->next
;
7991 if (var
->var_part
[pos
].loc_chain
== NULL
)
7995 while (pos
< var
->n_var_parts
)
7997 var
->var_part
[pos
] = var
->var_part
[pos
+ 1];
8002 variable_was_changed (var
, set
);
8008 /* Delete the part of variable's location from dataflow set SET. The
8009 variable part is specified by variable's declaration or value DV
8010 and offset OFFSET and the part's location by LOC. */
8013 delete_variable_part (dataflow_set
*set
, rtx loc
, decl_or_value dv
,
8014 HOST_WIDE_INT offset
)
8016 variable_def
**slot
= shared_hash_find_slot_noinsert (set
->vars
, dv
);
8020 delete_slot_part (set
, loc
, slot
, offset
);
8024 /* Structure for passing some other parameters to function
8025 vt_expand_loc_callback. */
8026 struct expand_loc_callback_data
8028 /* The variables and values active at this point. */
8029 variable_table_type
*vars
;
8031 /* Stack of values and debug_exprs under expansion, and their
8033 auto_vec
<rtx
, 4> expanding
;
8035 /* Stack of values and debug_exprs whose expansion hit recursion
8036 cycles. They will have VALUE_RECURSED_INTO marked when added to
8037 this list. This flag will be cleared if any of its dependencies
8038 resolves to a valid location. So, if the flag remains set at the
8039 end of the search, we know no valid location for this one can
8041 auto_vec
<rtx
, 4> pending
;
8043 /* The maximum depth among the sub-expressions under expansion.
8044 Zero indicates no expansion so far. */
8048 /* Allocate the one-part auxiliary data structure for VAR, with enough
8049 room for COUNT dependencies. */
8052 loc_exp_dep_alloc (variable var
, int count
)
8056 gcc_checking_assert (var
->onepart
);
8058 /* We can be called with COUNT == 0 to allocate the data structure
8059 without any dependencies, e.g. for the backlinks only. However,
8060 if we are specifying a COUNT, then the dependency list must have
8061 been emptied before. It would be possible to adjust pointers or
8062 force it empty here, but this is better done at an earlier point
8063 in the algorithm, so we instead leave an assertion to catch
8065 gcc_checking_assert (!count
8066 || VAR_LOC_DEP_VEC (var
) == NULL
8067 || VAR_LOC_DEP_VEC (var
)->is_empty ());
8069 if (VAR_LOC_1PAUX (var
) && VAR_LOC_DEP_VEC (var
)->space (count
))
8072 allocsize
= offsetof (struct onepart_aux
, deps
)
8073 + vec
<loc_exp_dep
, va_heap
, vl_embed
>::embedded_size (count
);
8075 if (VAR_LOC_1PAUX (var
))
8077 VAR_LOC_1PAUX (var
) = XRESIZEVAR (struct onepart_aux
,
8078 VAR_LOC_1PAUX (var
), allocsize
);
8079 /* If the reallocation moves the onepaux structure, the
8080 back-pointer to BACKLINKS in the first list member will still
8081 point to its old location. Adjust it. */
8082 if (VAR_LOC_DEP_LST (var
))
8083 VAR_LOC_DEP_LST (var
)->pprev
= VAR_LOC_DEP_LSTP (var
);
8087 VAR_LOC_1PAUX (var
) = XNEWVAR (struct onepart_aux
, allocsize
);
8088 *VAR_LOC_DEP_LSTP (var
) = NULL
;
8089 VAR_LOC_FROM (var
) = NULL
;
8090 VAR_LOC_DEPTH (var
).complexity
= 0;
8091 VAR_LOC_DEPTH (var
).entryvals
= 0;
8093 VAR_LOC_DEP_VEC (var
)->embedded_init (count
);
8096 /* Remove all entries from the vector of active dependencies of VAR,
8097 removing them from the back-links lists too. */
8100 loc_exp_dep_clear (variable var
)
8102 while (VAR_LOC_DEP_VEC (var
) && !VAR_LOC_DEP_VEC (var
)->is_empty ())
8104 loc_exp_dep
*led
= &VAR_LOC_DEP_VEC (var
)->last ();
8106 led
->next
->pprev
= led
->pprev
;
8108 *led
->pprev
= led
->next
;
8109 VAR_LOC_DEP_VEC (var
)->pop ();
8113 /* Insert an active dependency from VAR on X to the vector of
8114 dependencies, and add the corresponding back-link to X's list of
8115 back-links in VARS. */
8118 loc_exp_insert_dep (variable var
, rtx x
, variable_table_type
*vars
)
8124 dv
= dv_from_rtx (x
);
8126 /* ??? Build a vector of variables parallel to EXPANDING, to avoid
8127 an additional look up? */
8128 xvar
= vars
->find_with_hash (dv
, dv_htab_hash (dv
));
8132 xvar
= variable_from_dropped (dv
, NO_INSERT
);
8133 gcc_checking_assert (xvar
);
8136 /* No point in adding the same backlink more than once. This may
8137 arise if say the same value appears in two complex expressions in
8138 the same loc_list, or even more than once in a single
8140 if (VAR_LOC_DEP_LST (xvar
) && VAR_LOC_DEP_LST (xvar
)->dv
== var
->dv
)
8143 if (var
->onepart
== NOT_ONEPART
)
8144 led
= new loc_exp_dep
;
8148 memset (&empty
, 0, sizeof (empty
));
8149 VAR_LOC_DEP_VEC (var
)->quick_push (empty
);
8150 led
= &VAR_LOC_DEP_VEC (var
)->last ();
8155 loc_exp_dep_alloc (xvar
, 0);
8156 led
->pprev
= VAR_LOC_DEP_LSTP (xvar
);
8157 led
->next
= *led
->pprev
;
8159 led
->next
->pprev
= &led
->next
;
8163 /* Create active dependencies of VAR on COUNT values starting at
8164 VALUE, and corresponding back-links to the entries in VARS. Return
8165 true if we found any pending-recursion results. */
8168 loc_exp_dep_set (variable var
, rtx result
, rtx
*value
, int count
,
8169 variable_table_type
*vars
)
8171 bool pending_recursion
= false;
8173 gcc_checking_assert (VAR_LOC_DEP_VEC (var
) == NULL
8174 || VAR_LOC_DEP_VEC (var
)->is_empty ());
8176 /* Set up all dependencies from last_child (as set up at the end of
8177 the loop above) to the end. */
8178 loc_exp_dep_alloc (var
, count
);
8184 if (!pending_recursion
)
8185 pending_recursion
= !result
&& VALUE_RECURSED_INTO (x
);
8187 loc_exp_insert_dep (var
, x
, vars
);
8190 return pending_recursion
;
8193 /* Notify the back-links of IVAR that are pending recursion that we
8194 have found a non-NIL value for it, so they are cleared for another
8195 attempt to compute a current location. */
8198 notify_dependents_of_resolved_value (variable ivar
, variable_table_type
*vars
)
8200 loc_exp_dep
*led
, *next
;
8202 for (led
= VAR_LOC_DEP_LST (ivar
); led
; led
= next
)
8204 decl_or_value dv
= led
->dv
;
8209 if (dv_is_value_p (dv
))
8211 rtx value
= dv_as_value (dv
);
8213 /* If we have already resolved it, leave it alone. */
8214 if (!VALUE_RECURSED_INTO (value
))
8217 /* Check that VALUE_RECURSED_INTO, true from the test above,
8218 implies NO_LOC_P. */
8219 gcc_checking_assert (NO_LOC_P (value
));
8221 /* We won't notify variables that are being expanded,
8222 because their dependency list is cleared before
8224 NO_LOC_P (value
) = false;
8225 VALUE_RECURSED_INTO (value
) = false;
8227 gcc_checking_assert (dv_changed_p (dv
));
8231 gcc_checking_assert (dv_onepart_p (dv
) != NOT_ONEPART
);
8232 if (!dv_changed_p (dv
))
8236 var
= vars
->find_with_hash (dv
, dv_htab_hash (dv
));
8239 var
= variable_from_dropped (dv
, NO_INSERT
);
8242 notify_dependents_of_resolved_value (var
, vars
);
8245 next
->pprev
= led
->pprev
;
8253 static rtx
vt_expand_loc_callback (rtx x
, bitmap regs
,
8254 int max_depth
, void *data
);
8256 /* Return the combined depth, when one sub-expression evaluated to
8257 BEST_DEPTH and the previous known depth was SAVED_DEPTH. */
8259 static inline expand_depth
8260 update_depth (expand_depth saved_depth
, expand_depth best_depth
)
8262 /* If we didn't find anything, stick with what we had. */
8263 if (!best_depth
.complexity
)
8266 /* If we found hadn't found anything, use the depth of the current
8267 expression. Do NOT add one extra level, we want to compute the
8268 maximum depth among sub-expressions. We'll increment it later,
8270 if (!saved_depth
.complexity
)
8273 /* Combine the entryval count so that regardless of which one we
8274 return, the entryval count is accurate. */
8275 best_depth
.entryvals
= saved_depth
.entryvals
8276 = best_depth
.entryvals
+ saved_depth
.entryvals
;
8278 if (saved_depth
.complexity
< best_depth
.complexity
)
8284 /* Expand VAR to a location RTX, updating its cur_loc. Use REGS and
8285 DATA for cselib expand callback. If PENDRECP is given, indicate in
8286 it whether any sub-expression couldn't be fully evaluated because
8287 it is pending recursion resolution. */
8290 vt_expand_var_loc_chain (variable var
, bitmap regs
, void *data
, bool *pendrecp
)
8292 struct expand_loc_callback_data
*elcd
8293 = (struct expand_loc_callback_data
*) data
;
8294 location_chain loc
, next
;
8296 int first_child
, result_first_child
, last_child
;
8297 bool pending_recursion
;
8298 rtx loc_from
= NULL
;
8299 struct elt_loc_list
*cloc
= NULL
;
8300 expand_depth depth
= { 0, 0 }, saved_depth
= elcd
->depth
;
8301 int wanted_entryvals
, found_entryvals
= 0;
8303 /* Clear all backlinks pointing at this, so that we're not notified
8304 while we're active. */
8305 loc_exp_dep_clear (var
);
8308 if (var
->onepart
== ONEPART_VALUE
)
8310 cselib_val
*val
= CSELIB_VAL_PTR (dv_as_value (var
->dv
));
8312 gcc_checking_assert (cselib_preserved_value_p (val
));
8317 first_child
= result_first_child
= last_child
8318 = elcd
->expanding
.length ();
8320 wanted_entryvals
= found_entryvals
;
8322 /* Attempt to expand each available location in turn. */
8323 for (next
= loc
= var
->n_var_parts
? var
->var_part
[0].loc_chain
: NULL
;
8324 loc
|| cloc
; loc
= next
)
8326 result_first_child
= last_child
;
8330 loc_from
= cloc
->loc
;
8333 if (unsuitable_loc (loc_from
))
8338 loc_from
= loc
->loc
;
8342 gcc_checking_assert (!unsuitable_loc (loc_from
));
8344 elcd
->depth
.complexity
= elcd
->depth
.entryvals
= 0;
8345 result
= cselib_expand_value_rtx_cb (loc_from
, regs
, EXPR_DEPTH
,
8346 vt_expand_loc_callback
, data
);
8347 last_child
= elcd
->expanding
.length ();
8351 depth
= elcd
->depth
;
8353 gcc_checking_assert (depth
.complexity
8354 || result_first_child
== last_child
);
8356 if (last_child
- result_first_child
!= 1)
8358 if (!depth
.complexity
&& GET_CODE (result
) == ENTRY_VALUE
)
8363 if (depth
.complexity
<= EXPR_USE_DEPTH
)
8365 if (depth
.entryvals
<= wanted_entryvals
)
8367 else if (!found_entryvals
|| depth
.entryvals
< found_entryvals
)
8368 found_entryvals
= depth
.entryvals
;
8374 /* Set it up in case we leave the loop. */
8375 depth
.complexity
= depth
.entryvals
= 0;
8377 result_first_child
= first_child
;
8380 if (!loc_from
&& wanted_entryvals
< found_entryvals
)
8382 /* We found entries with ENTRY_VALUEs and skipped them. Since
8383 we could not find any expansions without ENTRY_VALUEs, but we
8384 found at least one with them, go back and get an entry with
8385 the minimum number ENTRY_VALUE count that we found. We could
8386 avoid looping, but since each sub-loc is already resolved,
8387 the re-expansion should be trivial. ??? Should we record all
8388 attempted locs as dependencies, so that we retry the
8389 expansion should any of them change, in the hope it can give
8390 us a new entry without an ENTRY_VALUE? */
8391 elcd
->expanding
.truncate (first_child
);
8395 /* Register all encountered dependencies as active. */
8396 pending_recursion
= loc_exp_dep_set
8397 (var
, result
, elcd
->expanding
.address () + result_first_child
,
8398 last_child
- result_first_child
, elcd
->vars
);
8400 elcd
->expanding
.truncate (first_child
);
8402 /* Record where the expansion came from. */
8403 gcc_checking_assert (!result
|| !pending_recursion
);
8404 VAR_LOC_FROM (var
) = loc_from
;
8405 VAR_LOC_DEPTH (var
) = depth
;
8407 gcc_checking_assert (!depth
.complexity
== !result
);
8409 elcd
->depth
= update_depth (saved_depth
, depth
);
8411 /* Indicate whether any of the dependencies are pending recursion
8414 *pendrecp
= pending_recursion
;
8416 if (!pendrecp
|| !pending_recursion
)
8417 var
->var_part
[0].cur_loc
= result
;
8422 /* Callback for cselib_expand_value, that looks for expressions
8423 holding the value in the var-tracking hash tables. Return X for
8424 standard processing, anything else is to be used as-is. */
8427 vt_expand_loc_callback (rtx x
, bitmap regs
,
8428 int max_depth ATTRIBUTE_UNUSED
,
8431 struct expand_loc_callback_data
*elcd
8432 = (struct expand_loc_callback_data
*) data
;
8436 bool pending_recursion
= false;
8437 bool from_empty
= false;
8439 switch (GET_CODE (x
))
8442 subreg
= cselib_expand_value_rtx_cb (SUBREG_REG (x
), regs
,
8444 vt_expand_loc_callback
, data
);
8449 result
= simplify_gen_subreg (GET_MODE (x
), subreg
,
8450 GET_MODE (SUBREG_REG (x
)),
8453 /* Invalid SUBREGs are ok in debug info. ??? We could try
8454 alternate expansions for the VALUE as well. */
8456 result
= gen_rtx_raw_SUBREG (GET_MODE (x
), subreg
, SUBREG_BYTE (x
));
8462 dv
= dv_from_rtx (x
);
8469 elcd
->expanding
.safe_push (x
);
8471 /* Check that VALUE_RECURSED_INTO implies NO_LOC_P. */
8472 gcc_checking_assert (!VALUE_RECURSED_INTO (x
) || NO_LOC_P (x
));
8476 gcc_checking_assert (VALUE_RECURSED_INTO (x
) || !dv_changed_p (dv
));
8480 var
= elcd
->vars
->find_with_hash (dv
, dv_htab_hash (dv
));
8485 var
= variable_from_dropped (dv
, INSERT
);
8488 gcc_checking_assert (var
);
8490 if (!dv_changed_p (dv
))
8492 gcc_checking_assert (!NO_LOC_P (x
));
8493 gcc_checking_assert (var
->var_part
[0].cur_loc
);
8494 gcc_checking_assert (VAR_LOC_1PAUX (var
));
8495 gcc_checking_assert (VAR_LOC_1PAUX (var
)->depth
.complexity
);
8497 elcd
->depth
= update_depth (elcd
->depth
, VAR_LOC_1PAUX (var
)->depth
);
8499 return var
->var_part
[0].cur_loc
;
8502 VALUE_RECURSED_INTO (x
) = true;
8503 /* This is tentative, but it makes some tests simpler. */
8504 NO_LOC_P (x
) = true;
8506 gcc_checking_assert (var
->n_var_parts
== 1 || from_empty
);
8508 result
= vt_expand_var_loc_chain (var
, regs
, data
, &pending_recursion
);
8510 if (pending_recursion
)
8512 gcc_checking_assert (!result
);
8513 elcd
->pending
.safe_push (x
);
8517 NO_LOC_P (x
) = !result
;
8518 VALUE_RECURSED_INTO (x
) = false;
8519 set_dv_changed (dv
, false);
8522 notify_dependents_of_resolved_value (var
, elcd
->vars
);
8528 /* While expanding variables, we may encounter recursion cycles
8529 because of mutual (possibly indirect) dependencies between two
8530 particular variables (or values), say A and B. If we're trying to
8531 expand A when we get to B, which in turn attempts to expand A, if
8532 we can't find any other expansion for B, we'll add B to this
8533 pending-recursion stack, and tentatively return NULL for its
8534 location. This tentative value will be used for any other
8535 occurrences of B, unless A gets some other location, in which case
8536 it will notify B that it is worth another try at computing a
8537 location for it, and it will use the location computed for A then.
8538 At the end of the expansion, the tentative NULL locations become
8539 final for all members of PENDING that didn't get a notification.
8540 This function performs this finalization of NULL locations. */
8543 resolve_expansions_pending_recursion (vec
<rtx
, va_heap
> *pending
)
8545 while (!pending
->is_empty ())
8547 rtx x
= pending
->pop ();
8550 if (!VALUE_RECURSED_INTO (x
))
8553 gcc_checking_assert (NO_LOC_P (x
));
8554 VALUE_RECURSED_INTO (x
) = false;
8555 dv
= dv_from_rtx (x
);
8556 gcc_checking_assert (dv_changed_p (dv
));
8557 set_dv_changed (dv
, false);
8561 /* Initialize expand_loc_callback_data D with variable hash table V.
8562 It must be a macro because of alloca (vec stack). */
8563 #define INIT_ELCD(d, v) \
8567 (d).depth.complexity = (d).depth.entryvals = 0; \
8570 /* Finalize expand_loc_callback_data D, resolved to location L. */
8571 #define FINI_ELCD(d, l) \
8574 resolve_expansions_pending_recursion (&(d).pending); \
8575 (d).pending.release (); \
8576 (d).expanding.release (); \
8578 if ((l) && MEM_P (l)) \
8579 (l) = targetm.delegitimize_address (l); \
8583 /* Expand VALUEs and DEBUG_EXPRs in LOC to a location, using the
8584 equivalences in VARS, updating their CUR_LOCs in the process. */
8587 vt_expand_loc (rtx loc
, variable_table_type
*vars
)
8589 struct expand_loc_callback_data data
;
8592 if (!MAY_HAVE_DEBUG_INSNS
)
8595 INIT_ELCD (data
, vars
);
8597 result
= cselib_expand_value_rtx_cb (loc
, scratch_regs
, EXPR_DEPTH
,
8598 vt_expand_loc_callback
, &data
);
8600 FINI_ELCD (data
, result
);
8605 /* Expand the one-part VARiable to a location, using the equivalences
8606 in VARS, updating their CUR_LOCs in the process. */
8609 vt_expand_1pvar (variable var
, variable_table_type
*vars
)
8611 struct expand_loc_callback_data data
;
8614 gcc_checking_assert (var
->onepart
&& var
->n_var_parts
== 1);
8616 if (!dv_changed_p (var
->dv
))
8617 return var
->var_part
[0].cur_loc
;
8619 INIT_ELCD (data
, vars
);
8621 loc
= vt_expand_var_loc_chain (var
, scratch_regs
, &data
, NULL
);
8623 gcc_checking_assert (data
.expanding
.is_empty ());
8625 FINI_ELCD (data
, loc
);
8630 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
8631 additional parameters: WHERE specifies whether the note shall be emitted
8632 before or after instruction INSN. */
8635 emit_note_insn_var_location (variable_def
**varp
, emit_note_data
*data
)
8637 variable var
= *varp
;
8638 rtx_insn
*insn
= data
->insn
;
8639 enum emit_note_where where
= data
->where
;
8640 variable_table_type
*vars
= data
->vars
;
8643 int i
, j
, n_var_parts
;
8645 enum var_init_status initialized
= VAR_INIT_STATUS_UNINITIALIZED
;
8646 HOST_WIDE_INT last_limit
;
8647 tree type_size_unit
;
8648 HOST_WIDE_INT offsets
[MAX_VAR_PARTS
];
8649 rtx loc
[MAX_VAR_PARTS
];
8653 gcc_checking_assert (var
->onepart
== NOT_ONEPART
8654 || var
->onepart
== ONEPART_VDECL
);
8656 decl
= dv_as_decl (var
->dv
);
8662 for (i
= 0; i
< var
->n_var_parts
; i
++)
8663 if (var
->var_part
[i
].cur_loc
== NULL
&& var
->var_part
[i
].loc_chain
)
8664 var
->var_part
[i
].cur_loc
= var
->var_part
[i
].loc_chain
->loc
;
8665 for (i
= 0; i
< var
->n_var_parts
; i
++)
8667 machine_mode mode
, wider_mode
;
8669 HOST_WIDE_INT offset
;
8671 if (i
== 0 && var
->onepart
)
8673 gcc_checking_assert (var
->n_var_parts
== 1);
8675 initialized
= VAR_INIT_STATUS_INITIALIZED
;
8676 loc2
= vt_expand_1pvar (var
, vars
);
8680 if (last_limit
< VAR_PART_OFFSET (var
, i
))
8685 else if (last_limit
> VAR_PART_OFFSET (var
, i
))
8687 offset
= VAR_PART_OFFSET (var
, i
);
8688 loc2
= var
->var_part
[i
].cur_loc
;
8689 if (loc2
&& GET_CODE (loc2
) == MEM
8690 && GET_CODE (XEXP (loc2
, 0)) == VALUE
)
8692 rtx depval
= XEXP (loc2
, 0);
8694 loc2
= vt_expand_loc (loc2
, vars
);
8697 loc_exp_insert_dep (var
, depval
, vars
);
8704 gcc_checking_assert (GET_CODE (loc2
) != VALUE
);
8705 for (lc
= var
->var_part
[i
].loc_chain
; lc
; lc
= lc
->next
)
8706 if (var
->var_part
[i
].cur_loc
== lc
->loc
)
8708 initialized
= lc
->init
;
8714 offsets
[n_var_parts
] = offset
;
8720 loc
[n_var_parts
] = loc2
;
8721 mode
= GET_MODE (var
->var_part
[i
].cur_loc
);
8722 if (mode
== VOIDmode
&& var
->onepart
)
8723 mode
= DECL_MODE (decl
);
8724 last_limit
= offsets
[n_var_parts
] + GET_MODE_SIZE (mode
);
8726 /* Attempt to merge adjacent registers or memory. */
8727 wider_mode
= GET_MODE_WIDER_MODE (mode
);
8728 for (j
= i
+ 1; j
< var
->n_var_parts
; j
++)
8729 if (last_limit
<= VAR_PART_OFFSET (var
, j
))
8731 if (j
< var
->n_var_parts
8732 && wider_mode
!= VOIDmode
8733 && var
->var_part
[j
].cur_loc
8734 && mode
== GET_MODE (var
->var_part
[j
].cur_loc
)
8735 && (REG_P (loc
[n_var_parts
]) || MEM_P (loc
[n_var_parts
]))
8736 && last_limit
== (var
->onepart
? 0 : VAR_PART_OFFSET (var
, j
))
8737 && (loc2
= vt_expand_loc (var
->var_part
[j
].cur_loc
, vars
))
8738 && GET_CODE (loc
[n_var_parts
]) == GET_CODE (loc2
))
8742 if (REG_P (loc
[n_var_parts
])
8743 && hard_regno_nregs
[REGNO (loc
[n_var_parts
])][mode
] * 2
8744 == hard_regno_nregs
[REGNO (loc
[n_var_parts
])][wider_mode
]
8745 && end_hard_regno (mode
, REGNO (loc
[n_var_parts
]))
8748 if (! WORDS_BIG_ENDIAN
&& ! BYTES_BIG_ENDIAN
)
8749 new_loc
= simplify_subreg (wider_mode
, loc
[n_var_parts
],
8751 else if (WORDS_BIG_ENDIAN
&& BYTES_BIG_ENDIAN
)
8752 new_loc
= simplify_subreg (wider_mode
, loc2
, mode
, 0);
8755 if (!REG_P (new_loc
)
8756 || REGNO (new_loc
) != REGNO (loc
[n_var_parts
]))
8759 REG_ATTRS (new_loc
) = REG_ATTRS (loc
[n_var_parts
]);
8762 else if (MEM_P (loc
[n_var_parts
])
8763 && GET_CODE (XEXP (loc2
, 0)) == PLUS
8764 && REG_P (XEXP (XEXP (loc2
, 0), 0))
8765 && CONST_INT_P (XEXP (XEXP (loc2
, 0), 1)))
8767 if ((REG_P (XEXP (loc
[n_var_parts
], 0))
8768 && rtx_equal_p (XEXP (loc
[n_var_parts
], 0),
8769 XEXP (XEXP (loc2
, 0), 0))
8770 && INTVAL (XEXP (XEXP (loc2
, 0), 1))
8771 == GET_MODE_SIZE (mode
))
8772 || (GET_CODE (XEXP (loc
[n_var_parts
], 0)) == PLUS
8773 && CONST_INT_P (XEXP (XEXP (loc
[n_var_parts
], 0), 1))
8774 && rtx_equal_p (XEXP (XEXP (loc
[n_var_parts
], 0), 0),
8775 XEXP (XEXP (loc2
, 0), 0))
8776 && INTVAL (XEXP (XEXP (loc
[n_var_parts
], 0), 1))
8777 + GET_MODE_SIZE (mode
)
8778 == INTVAL (XEXP (XEXP (loc2
, 0), 1))))
8779 new_loc
= adjust_address_nv (loc
[n_var_parts
],
8785 loc
[n_var_parts
] = new_loc
;
8787 last_limit
= offsets
[n_var_parts
] + GET_MODE_SIZE (mode
);
8793 type_size_unit
= TYPE_SIZE_UNIT (TREE_TYPE (decl
));
8794 if ((unsigned HOST_WIDE_INT
) last_limit
< TREE_INT_CST_LOW (type_size_unit
))
8797 if (! flag_var_tracking_uninit
)
8798 initialized
= VAR_INIT_STATUS_INITIALIZED
;
8802 note_vl
= gen_rtx_VAR_LOCATION (VOIDmode
, decl
, NULL_RTX
, initialized
);
8803 else if (n_var_parts
== 1)
8807 if (offsets
[0] || GET_CODE (loc
[0]) == PARALLEL
)
8808 expr_list
= gen_rtx_EXPR_LIST (VOIDmode
, loc
[0], GEN_INT (offsets
[0]));
8812 note_vl
= gen_rtx_VAR_LOCATION (VOIDmode
, decl
, expr_list
, initialized
);
8814 else if (n_var_parts
)
8818 for (i
= 0; i
< n_var_parts
; i
++)
8820 = gen_rtx_EXPR_LIST (VOIDmode
, loc
[i
], GEN_INT (offsets
[i
]));
8822 parallel
= gen_rtx_PARALLEL (VOIDmode
,
8823 gen_rtvec_v (n_var_parts
, loc
));
8824 note_vl
= gen_rtx_VAR_LOCATION (VOIDmode
, decl
,
8825 parallel
, initialized
);
8828 if (where
!= EMIT_NOTE_BEFORE_INSN
)
8830 note
= emit_note_after (NOTE_INSN_VAR_LOCATION
, insn
);
8831 if (where
== EMIT_NOTE_AFTER_CALL_INSN
)
8832 NOTE_DURING_CALL_P (note
) = true;
8836 /* Make sure that the call related notes come first. */
8837 while (NEXT_INSN (insn
)
8839 && ((NOTE_KIND (insn
) == NOTE_INSN_VAR_LOCATION
8840 && NOTE_DURING_CALL_P (insn
))
8841 || NOTE_KIND (insn
) == NOTE_INSN_CALL_ARG_LOCATION
))
8842 insn
= NEXT_INSN (insn
);
8844 && ((NOTE_KIND (insn
) == NOTE_INSN_VAR_LOCATION
8845 && NOTE_DURING_CALL_P (insn
))
8846 || NOTE_KIND (insn
) == NOTE_INSN_CALL_ARG_LOCATION
))
8847 note
= emit_note_after (NOTE_INSN_VAR_LOCATION
, insn
);
8849 note
= emit_note_before (NOTE_INSN_VAR_LOCATION
, insn
);
8851 NOTE_VAR_LOCATION (note
) = note_vl
;
8853 set_dv_changed (var
->dv
, false);
8854 gcc_assert (var
->in_changed_variables
);
8855 var
->in_changed_variables
= false;
8856 changed_variables
->clear_slot (varp
);
8858 /* Continue traversing the hash table. */
8862 /* While traversing changed_variables, push onto DATA (a stack of RTX
8863 values) entries that aren't user variables. */
8866 var_track_values_to_stack (variable_def
**slot
,
8867 vec
<rtx
, va_heap
> *changed_values_stack
)
8869 variable var
= *slot
;
8871 if (var
->onepart
== ONEPART_VALUE
)
8872 changed_values_stack
->safe_push (dv_as_value (var
->dv
));
8873 else if (var
->onepart
== ONEPART_DEXPR
)
8874 changed_values_stack
->safe_push (DECL_RTL_KNOWN_SET (dv_as_decl (var
->dv
)));
8879 /* Remove from changed_variables the entry whose DV corresponds to
8880 value or debug_expr VAL. */
8882 remove_value_from_changed_variables (rtx val
)
8884 decl_or_value dv
= dv_from_rtx (val
);
8885 variable_def
**slot
;
8888 slot
= changed_variables
->find_slot_with_hash (dv
, dv_htab_hash (dv
),
8891 var
->in_changed_variables
= false;
8892 changed_variables
->clear_slot (slot
);
8895 /* If VAL (a value or debug_expr) has backlinks to variables actively
8896 dependent on it in HTAB or in CHANGED_VARIABLES, mark them as
8897 changed, adding to CHANGED_VALUES_STACK any dependencies that may
8898 have dependencies of their own to notify. */
8901 notify_dependents_of_changed_value (rtx val
, variable_table_type
*htab
,
8902 vec
<rtx
, va_heap
> *changed_values_stack
)
8904 variable_def
**slot
;
8907 decl_or_value dv
= dv_from_rtx (val
);
8909 slot
= changed_variables
->find_slot_with_hash (dv
, dv_htab_hash (dv
),
8912 slot
= htab
->find_slot_with_hash (dv
, dv_htab_hash (dv
), NO_INSERT
);
8914 slot
= dropped_values
->find_slot_with_hash (dv
, dv_htab_hash (dv
),
8918 while ((led
= VAR_LOC_DEP_LST (var
)))
8920 decl_or_value ldv
= led
->dv
;
8923 /* Deactivate and remove the backlink, as it was “used up”. It
8924 makes no sense to attempt to notify the same entity again:
8925 either it will be recomputed and re-register an active
8926 dependency, or it will still have the changed mark. */
8928 led
->next
->pprev
= led
->pprev
;
8930 *led
->pprev
= led
->next
;
8934 if (dv_changed_p (ldv
))
8937 switch (dv_onepart_p (ldv
))
8941 set_dv_changed (ldv
, true);
8942 changed_values_stack
->safe_push (dv_as_rtx (ldv
));
8946 ivar
= htab
->find_with_hash (ldv
, dv_htab_hash (ldv
));
8947 gcc_checking_assert (!VAR_LOC_DEP_LST (ivar
));
8948 variable_was_changed (ivar
, NULL
);
8953 ivar
= htab
->find_with_hash (ldv
, dv_htab_hash (ldv
));
8956 int i
= ivar
->n_var_parts
;
8959 rtx loc
= ivar
->var_part
[i
].cur_loc
;
8961 if (loc
&& GET_CODE (loc
) == MEM
8962 && XEXP (loc
, 0) == val
)
8964 variable_was_changed (ivar
, NULL
);
8977 /* Take out of changed_variables any entries that don't refer to use
8978 variables. Back-propagate change notifications from values and
8979 debug_exprs to their active dependencies in HTAB or in
8980 CHANGED_VARIABLES. */
8983 process_changed_values (variable_table_type
*htab
)
8987 auto_vec
<rtx
, 20> changed_values_stack
;
8989 /* Move values from changed_variables to changed_values_stack. */
8991 ->traverse
<vec
<rtx
, va_heap
>*, var_track_values_to_stack
>
8992 (&changed_values_stack
);
8994 /* Back-propagate change notifications in values while popping
8995 them from the stack. */
8996 for (n
= i
= changed_values_stack
.length ();
8997 i
> 0; i
= changed_values_stack
.length ())
8999 val
= changed_values_stack
.pop ();
9000 notify_dependents_of_changed_value (val
, htab
, &changed_values_stack
);
9002 /* This condition will hold when visiting each of the entries
9003 originally in changed_variables. We can't remove them
9004 earlier because this could drop the backlinks before we got a
9005 chance to use them. */
9008 remove_value_from_changed_variables (val
);
9014 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
9015 CHANGED_VARIABLES and delete this chain. WHERE specifies whether
9016 the notes shall be emitted before of after instruction INSN. */
9019 emit_notes_for_changes (rtx_insn
*insn
, enum emit_note_where where
,
9022 emit_note_data data
;
9023 variable_table_type
*htab
= shared_hash_htab (vars
);
9025 if (!changed_variables
->elements ())
9028 if (MAY_HAVE_DEBUG_INSNS
)
9029 process_changed_values (htab
);
9036 ->traverse
<emit_note_data
*, emit_note_insn_var_location
> (&data
);
9039 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
9040 same variable in hash table DATA or is not there at all. */
9043 emit_notes_for_differences_1 (variable_def
**slot
, variable_table_type
*new_vars
)
9045 variable old_var
, new_var
;
9048 new_var
= new_vars
->find_with_hash (old_var
->dv
, dv_htab_hash (old_var
->dv
));
9052 /* Variable has disappeared. */
9053 variable empty_var
= NULL
;
9055 if (old_var
->onepart
== ONEPART_VALUE
9056 || old_var
->onepart
== ONEPART_DEXPR
)
9058 empty_var
= variable_from_dropped (old_var
->dv
, NO_INSERT
);
9061 gcc_checking_assert (!empty_var
->in_changed_variables
);
9062 if (!VAR_LOC_1PAUX (old_var
))
9064 VAR_LOC_1PAUX (old_var
) = VAR_LOC_1PAUX (empty_var
);
9065 VAR_LOC_1PAUX (empty_var
) = NULL
;
9068 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var
));
9074 empty_var
= onepart_pool (old_var
->onepart
).allocate ();
9075 empty_var
->dv
= old_var
->dv
;
9076 empty_var
->refcount
= 0;
9077 empty_var
->n_var_parts
= 0;
9078 empty_var
->onepart
= old_var
->onepart
;
9079 empty_var
->in_changed_variables
= false;
9082 if (empty_var
->onepart
)
9084 /* Propagate the auxiliary data to (ultimately)
9085 changed_variables. */
9086 empty_var
->var_part
[0].loc_chain
= NULL
;
9087 empty_var
->var_part
[0].cur_loc
= NULL
;
9088 VAR_LOC_1PAUX (empty_var
) = VAR_LOC_1PAUX (old_var
);
9089 VAR_LOC_1PAUX (old_var
) = NULL
;
9091 variable_was_changed (empty_var
, NULL
);
9092 /* Continue traversing the hash table. */
9095 /* Update cur_loc and one-part auxiliary data, before new_var goes
9096 through variable_was_changed. */
9097 if (old_var
!= new_var
&& new_var
->onepart
)
9099 gcc_checking_assert (VAR_LOC_1PAUX (new_var
) == NULL
);
9100 VAR_LOC_1PAUX (new_var
) = VAR_LOC_1PAUX (old_var
);
9101 VAR_LOC_1PAUX (old_var
) = NULL
;
9102 new_var
->var_part
[0].cur_loc
= old_var
->var_part
[0].cur_loc
;
9104 if (variable_different_p (old_var
, new_var
))
9105 variable_was_changed (new_var
, NULL
);
9107 /* Continue traversing the hash table. */
9111 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
9115 emit_notes_for_differences_2 (variable_def
**slot
, variable_table_type
*old_vars
)
9117 variable old_var
, new_var
;
9120 old_var
= old_vars
->find_with_hash (new_var
->dv
, dv_htab_hash (new_var
->dv
));
9124 for (i
= 0; i
< new_var
->n_var_parts
; i
++)
9125 new_var
->var_part
[i
].cur_loc
= NULL
;
9126 variable_was_changed (new_var
, NULL
);
9129 /* Continue traversing the hash table. */
9133 /* Emit notes before INSN for differences between dataflow sets OLD_SET and
9137 emit_notes_for_differences (rtx_insn
*insn
, dataflow_set
*old_set
,
9138 dataflow_set
*new_set
)
9140 shared_hash_htab (old_set
->vars
)
9141 ->traverse
<variable_table_type
*, emit_notes_for_differences_1
>
9142 (shared_hash_htab (new_set
->vars
));
9143 shared_hash_htab (new_set
->vars
)
9144 ->traverse
<variable_table_type
*, emit_notes_for_differences_2
>
9145 (shared_hash_htab (old_set
->vars
));
9146 emit_notes_for_changes (insn
, EMIT_NOTE_BEFORE_INSN
, new_set
->vars
);
9149 /* Return the next insn after INSN that is not a NOTE_INSN_VAR_LOCATION. */
9152 next_non_note_insn_var_location (rtx_insn
*insn
)
9156 insn
= NEXT_INSN (insn
);
9159 || NOTE_KIND (insn
) != NOTE_INSN_VAR_LOCATION
)
9166 /* Emit the notes for changes of location parts in the basic block BB. */
9169 emit_notes_in_bb (basic_block bb
, dataflow_set
*set
)
9172 micro_operation
*mo
;
9174 dataflow_set_clear (set
);
9175 dataflow_set_copy (set
, &VTI (bb
)->in
);
9177 FOR_EACH_VEC_ELT (VTI (bb
)->mos
, i
, mo
)
9179 rtx_insn
*insn
= mo
->insn
;
9180 rtx_insn
*next_insn
= next_non_note_insn_var_location (insn
);
9185 dataflow_set_clear_at_call (set
);
9186 emit_notes_for_changes (insn
, EMIT_NOTE_AFTER_CALL_INSN
, set
->vars
);
9188 rtx arguments
= mo
->u
.loc
, *p
= &arguments
;
9192 XEXP (XEXP (*p
, 0), 1)
9193 = vt_expand_loc (XEXP (XEXP (*p
, 0), 1),
9194 shared_hash_htab (set
->vars
));
9195 /* If expansion is successful, keep it in the list. */
9196 if (XEXP (XEXP (*p
, 0), 1))
9198 /* Otherwise, if the following item is data_value for it,
9200 else if (XEXP (*p
, 1)
9201 && REG_P (XEXP (XEXP (*p
, 0), 0))
9202 && MEM_P (XEXP (XEXP (XEXP (*p
, 1), 0), 0))
9203 && REG_P (XEXP (XEXP (XEXP (XEXP (*p
, 1), 0), 0),
9205 && REGNO (XEXP (XEXP (*p
, 0), 0))
9206 == REGNO (XEXP (XEXP (XEXP (XEXP (*p
, 1), 0),
9208 *p
= XEXP (XEXP (*p
, 1), 1);
9209 /* Just drop this item. */
9213 note
= emit_note_after (NOTE_INSN_CALL_ARG_LOCATION
, insn
);
9214 NOTE_VAR_LOCATION (note
) = arguments
;
9220 rtx loc
= mo
->u
.loc
;
9223 var_reg_set (set
, loc
, VAR_INIT_STATUS_UNINITIALIZED
, NULL
);
9225 var_mem_set (set
, loc
, VAR_INIT_STATUS_UNINITIALIZED
, NULL
);
9227 emit_notes_for_changes (insn
, EMIT_NOTE_BEFORE_INSN
, set
->vars
);
9233 rtx loc
= mo
->u
.loc
;
9237 if (GET_CODE (loc
) == CONCAT
)
9239 val
= XEXP (loc
, 0);
9240 vloc
= XEXP (loc
, 1);
9248 var
= PAT_VAR_LOCATION_DECL (vloc
);
9250 clobber_variable_part (set
, NULL_RTX
,
9251 dv_from_decl (var
), 0, NULL_RTX
);
9254 if (VAL_NEEDS_RESOLUTION (loc
))
9255 val_resolve (set
, val
, PAT_VAR_LOCATION_LOC (vloc
), insn
);
9256 set_variable_part (set
, val
, dv_from_decl (var
), 0,
9257 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
,
9260 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc
)))
9261 set_variable_part (set
, PAT_VAR_LOCATION_LOC (vloc
),
9262 dv_from_decl (var
), 0,
9263 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
,
9266 emit_notes_for_changes (insn
, EMIT_NOTE_AFTER_INSN
, set
->vars
);
9272 rtx loc
= mo
->u
.loc
;
9273 rtx val
, vloc
, uloc
;
9275 vloc
= uloc
= XEXP (loc
, 1);
9276 val
= XEXP (loc
, 0);
9278 if (GET_CODE (val
) == CONCAT
)
9280 uloc
= XEXP (val
, 1);
9281 val
= XEXP (val
, 0);
9284 if (VAL_NEEDS_RESOLUTION (loc
))
9285 val_resolve (set
, val
, vloc
, insn
);
9287 val_store (set
, val
, uloc
, insn
, false);
9289 if (VAL_HOLDS_TRACK_EXPR (loc
))
9291 if (GET_CODE (uloc
) == REG
)
9292 var_reg_set (set
, uloc
, VAR_INIT_STATUS_UNINITIALIZED
,
9294 else if (GET_CODE (uloc
) == MEM
)
9295 var_mem_set (set
, uloc
, VAR_INIT_STATUS_UNINITIALIZED
,
9299 emit_notes_for_changes (insn
, EMIT_NOTE_BEFORE_INSN
, set
->vars
);
9305 rtx loc
= mo
->u
.loc
;
9306 rtx val
, vloc
, uloc
;
9310 uloc
= XEXP (vloc
, 1);
9311 val
= XEXP (vloc
, 0);
9314 if (GET_CODE (uloc
) == SET
)
9316 dstv
= SET_DEST (uloc
);
9317 srcv
= SET_SRC (uloc
);
9325 if (GET_CODE (val
) == CONCAT
)
9327 dstv
= vloc
= XEXP (val
, 1);
9328 val
= XEXP (val
, 0);
9331 if (GET_CODE (vloc
) == SET
)
9333 srcv
= SET_SRC (vloc
);
9335 gcc_assert (val
!= srcv
);
9336 gcc_assert (vloc
== uloc
|| VAL_NEEDS_RESOLUTION (loc
));
9338 dstv
= vloc
= SET_DEST (vloc
);
9340 if (VAL_NEEDS_RESOLUTION (loc
))
9341 val_resolve (set
, val
, srcv
, insn
);
9343 else if (VAL_NEEDS_RESOLUTION (loc
))
9345 gcc_assert (GET_CODE (uloc
) == SET
9346 && GET_CODE (SET_SRC (uloc
)) == REG
);
9347 val_resolve (set
, val
, SET_SRC (uloc
), insn
);
9350 if (VAL_HOLDS_TRACK_EXPR (loc
))
9352 if (VAL_EXPR_IS_CLOBBERED (loc
))
9355 var_reg_delete (set
, uloc
, true);
9356 else if (MEM_P (uloc
))
9358 gcc_assert (MEM_P (dstv
));
9359 gcc_assert (MEM_ATTRS (dstv
) == MEM_ATTRS (uloc
));
9360 var_mem_delete (set
, dstv
, true);
9365 bool copied_p
= VAL_EXPR_IS_COPIED (loc
);
9366 rtx src
= NULL
, dst
= uloc
;
9367 enum var_init_status status
= VAR_INIT_STATUS_INITIALIZED
;
9369 if (GET_CODE (uloc
) == SET
)
9371 src
= SET_SRC (uloc
);
9372 dst
= SET_DEST (uloc
);
9377 status
= find_src_status (set
, src
);
9379 src
= find_src_set_src (set
, src
);
9383 var_reg_delete_and_set (set
, dst
, !copied_p
,
9385 else if (MEM_P (dst
))
9387 gcc_assert (MEM_P (dstv
));
9388 gcc_assert (MEM_ATTRS (dstv
) == MEM_ATTRS (dst
));
9389 var_mem_delete_and_set (set
, dstv
, !copied_p
,
9394 else if (REG_P (uloc
))
9395 var_regno_delete (set
, REGNO (uloc
));
9396 else if (MEM_P (uloc
))
9398 gcc_checking_assert (GET_CODE (vloc
) == MEM
);
9399 gcc_checking_assert (vloc
== dstv
);
9401 clobber_overlapping_mems (set
, vloc
);
9404 val_store (set
, val
, dstv
, insn
, true);
9406 emit_notes_for_changes (next_insn
, EMIT_NOTE_BEFORE_INSN
,
9413 rtx loc
= mo
->u
.loc
;
9416 if (GET_CODE (loc
) == SET
)
9418 set_src
= SET_SRC (loc
);
9419 loc
= SET_DEST (loc
);
9423 var_reg_delete_and_set (set
, loc
, true, VAR_INIT_STATUS_INITIALIZED
,
9426 var_mem_delete_and_set (set
, loc
, true, VAR_INIT_STATUS_INITIALIZED
,
9429 emit_notes_for_changes (next_insn
, EMIT_NOTE_BEFORE_INSN
,
9436 rtx loc
= mo
->u
.loc
;
9437 enum var_init_status src_status
;
9440 if (GET_CODE (loc
) == SET
)
9442 set_src
= SET_SRC (loc
);
9443 loc
= SET_DEST (loc
);
9446 src_status
= find_src_status (set
, set_src
);
9447 set_src
= find_src_set_src (set
, set_src
);
9450 var_reg_delete_and_set (set
, loc
, false, src_status
, set_src
);
9452 var_mem_delete_and_set (set
, loc
, false, src_status
, set_src
);
9454 emit_notes_for_changes (next_insn
, EMIT_NOTE_BEFORE_INSN
,
9461 rtx loc
= mo
->u
.loc
;
9464 var_reg_delete (set
, loc
, false);
9466 var_mem_delete (set
, loc
, false);
9468 emit_notes_for_changes (insn
, EMIT_NOTE_AFTER_INSN
, set
->vars
);
9474 rtx loc
= mo
->u
.loc
;
9477 var_reg_delete (set
, loc
, true);
9479 var_mem_delete (set
, loc
, true);
9481 emit_notes_for_changes (next_insn
, EMIT_NOTE_BEFORE_INSN
,
9487 set
->stack_adjust
+= mo
->u
.adjust
;
9493 /* Emit notes for the whole function. */
9496 vt_emit_notes (void)
9501 gcc_assert (!changed_variables
->elements ());
9503 /* Free memory occupied by the out hash tables, as they aren't used
9505 FOR_EACH_BB_FN (bb
, cfun
)
9506 dataflow_set_clear (&VTI (bb
)->out
);
9508 /* Enable emitting notes by functions (mainly by set_variable_part and
9509 delete_variable_part). */
9512 if (MAY_HAVE_DEBUG_INSNS
)
9514 dropped_values
= new variable_table_type (cselib_get_next_uid () * 2);
9517 dataflow_set_init (&cur
);
9519 FOR_EACH_BB_FN (bb
, cfun
)
9521 /* Emit the notes for changes of variable locations between two
9522 subsequent basic blocks. */
9523 emit_notes_for_differences (BB_HEAD (bb
), &cur
, &VTI (bb
)->in
);
9525 if (MAY_HAVE_DEBUG_INSNS
)
9526 local_get_addr_cache
= new hash_map
<rtx
, rtx
>;
9528 /* Emit the notes for the changes in the basic block itself. */
9529 emit_notes_in_bb (bb
, &cur
);
9531 if (MAY_HAVE_DEBUG_INSNS
)
9532 delete local_get_addr_cache
;
9533 local_get_addr_cache
= NULL
;
9535 /* Free memory occupied by the in hash table, we won't need it
9537 dataflow_set_clear (&VTI (bb
)->in
);
9539 #ifdef ENABLE_CHECKING
9540 shared_hash_htab (cur
.vars
)
9541 ->traverse
<variable_table_type
*, emit_notes_for_differences_1
>
9542 (shared_hash_htab (empty_shared_hash
));
9544 dataflow_set_destroy (&cur
);
9546 if (MAY_HAVE_DEBUG_INSNS
)
9547 delete dropped_values
;
9548 dropped_values
= NULL
;
9553 /* If there is a declaration and offset associated with register/memory RTL
9554 assign declaration to *DECLP and offset to *OFFSETP, and return true. */
9557 vt_get_decl_and_offset (rtx rtl
, tree
*declp
, HOST_WIDE_INT
*offsetp
)
9561 if (REG_ATTRS (rtl
))
9563 *declp
= REG_EXPR (rtl
);
9564 *offsetp
= REG_OFFSET (rtl
);
9568 else if (GET_CODE (rtl
) == PARALLEL
)
9570 tree decl
= NULL_TREE
;
9571 HOST_WIDE_INT offset
= MAX_VAR_PARTS
;
9572 int len
= XVECLEN (rtl
, 0), i
;
9574 for (i
= 0; i
< len
; i
++)
9576 rtx reg
= XEXP (XVECEXP (rtl
, 0, i
), 0);
9577 if (!REG_P (reg
) || !REG_ATTRS (reg
))
9580 decl
= REG_EXPR (reg
);
9581 if (REG_EXPR (reg
) != decl
)
9583 if (REG_OFFSET (reg
) < offset
)
9584 offset
= REG_OFFSET (reg
);
9594 else if (MEM_P (rtl
))
9596 if (MEM_ATTRS (rtl
))
9598 *declp
= MEM_EXPR (rtl
);
9599 *offsetp
= INT_MEM_OFFSET (rtl
);
9606 /* Record the value for the ENTRY_VALUE of RTL as a global equivalence
9610 record_entry_value (cselib_val
*val
, rtx rtl
)
9612 rtx ev
= gen_rtx_ENTRY_VALUE (GET_MODE (rtl
));
9614 ENTRY_VALUE_EXP (ev
) = rtl
;
9616 cselib_add_permanent_equiv (val
, ev
, get_insns ());
9619 /* Insert function parameter PARM in IN and OUT sets of ENTRY_BLOCK. */
9622 vt_add_function_parameter (tree parm
)
9624 rtx decl_rtl
= DECL_RTL_IF_SET (parm
);
9625 rtx incoming
= DECL_INCOMING_RTL (parm
);
9628 HOST_WIDE_INT offset
;
9632 if (TREE_CODE (parm
) != PARM_DECL
)
9635 if (!decl_rtl
|| !incoming
)
9638 if (GET_MODE (decl_rtl
) == BLKmode
|| GET_MODE (incoming
) == BLKmode
)
9641 /* If there is a DRAP register or a pseudo in internal_arg_pointer,
9642 rewrite the incoming location of parameters passed on the stack
9643 into MEMs based on the argument pointer, so that incoming doesn't
9644 depend on a pseudo. */
9645 if (MEM_P (incoming
)
9646 && (XEXP (incoming
, 0) == crtl
->args
.internal_arg_pointer
9647 || (GET_CODE (XEXP (incoming
, 0)) == PLUS
9648 && XEXP (XEXP (incoming
, 0), 0)
9649 == crtl
->args
.internal_arg_pointer
9650 && CONST_INT_P (XEXP (XEXP (incoming
, 0), 1)))))
9652 HOST_WIDE_INT off
= -FIRST_PARM_OFFSET (current_function_decl
);
9653 if (GET_CODE (XEXP (incoming
, 0)) == PLUS
)
9654 off
+= INTVAL (XEXP (XEXP (incoming
, 0), 1));
9656 = replace_equiv_address_nv (incoming
,
9657 plus_constant (Pmode
,
9658 arg_pointer_rtx
, off
));
9661 #ifdef HAVE_window_save
9662 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
9663 If the target machine has an explicit window save instruction, the
9664 actual entry value is the corresponding OUTGOING_REGNO instead. */
9665 if (HAVE_window_save
&& !crtl
->uses_only_leaf_regs
)
9667 if (REG_P (incoming
)
9668 && HARD_REGISTER_P (incoming
)
9669 && OUTGOING_REGNO (REGNO (incoming
)) != REGNO (incoming
))
9672 p
.incoming
= incoming
;
9674 = gen_rtx_REG_offset (incoming
, GET_MODE (incoming
),
9675 OUTGOING_REGNO (REGNO (incoming
)), 0);
9676 p
.outgoing
= incoming
;
9677 vec_safe_push (windowed_parm_regs
, p
);
9679 else if (GET_CODE (incoming
) == PARALLEL
)
9682 = gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (XVECLEN (incoming
, 0)));
9685 for (i
= 0; i
< XVECLEN (incoming
, 0); i
++)
9687 rtx reg
= XEXP (XVECEXP (incoming
, 0, i
), 0);
9690 reg
= gen_rtx_REG_offset (reg
, GET_MODE (reg
),
9691 OUTGOING_REGNO (REGNO (reg
)), 0);
9693 XVECEXP (outgoing
, 0, i
)
9694 = gen_rtx_EXPR_LIST (VOIDmode
, reg
,
9695 XEXP (XVECEXP (incoming
, 0, i
), 1));
9696 vec_safe_push (windowed_parm_regs
, p
);
9699 incoming
= outgoing
;
9701 else if (MEM_P (incoming
)
9702 && REG_P (XEXP (incoming
, 0))
9703 && HARD_REGISTER_P (XEXP (incoming
, 0)))
9705 rtx reg
= XEXP (incoming
, 0);
9706 if (OUTGOING_REGNO (REGNO (reg
)) != REGNO (reg
))
9710 reg
= gen_raw_REG (GET_MODE (reg
), OUTGOING_REGNO (REGNO (reg
)));
9712 vec_safe_push (windowed_parm_regs
, p
);
9713 incoming
= replace_equiv_address_nv (incoming
, reg
);
9719 if (!vt_get_decl_and_offset (incoming
, &decl
, &offset
))
9721 if (MEM_P (incoming
))
9723 /* This means argument is passed by invisible reference. */
9729 if (!vt_get_decl_and_offset (decl_rtl
, &decl
, &offset
))
9731 offset
+= byte_lowpart_offset (GET_MODE (incoming
),
9732 GET_MODE (decl_rtl
));
9741 /* If that DECL_RTL wasn't a pseudo that got spilled to
9742 memory, bail out. Otherwise, the spill slot sharing code
9743 will force the memory to reference spill_slot_decl (%sfp),
9744 so we don't match above. That's ok, the pseudo must have
9745 referenced the entire parameter, so just reset OFFSET. */
9746 if (decl
!= get_spill_slot_decl (false))
9751 if (!track_loc_p (incoming
, parm
, offset
, false, &mode
, &offset
))
9754 out
= &VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun
))->out
;
9756 dv
= dv_from_decl (parm
);
9758 if (target_for_debug_bind (parm
)
9759 /* We can't deal with these right now, because this kind of
9760 variable is single-part. ??? We could handle parallels
9761 that describe multiple locations for the same single
9762 value, but ATM we don't. */
9763 && GET_CODE (incoming
) != PARALLEL
)
9768 /* ??? We shouldn't ever hit this, but it may happen because
9769 arguments passed by invisible reference aren't dealt with
9770 above: incoming-rtl will have Pmode rather than the
9771 expected mode for the type. */
9775 lowpart
= var_lowpart (mode
, incoming
);
9779 val
= cselib_lookup_from_insn (lowpart
, mode
, true,
9780 VOIDmode
, get_insns ());
9782 /* ??? Float-typed values in memory are not handled by
9786 preserve_value (val
);
9787 set_variable_part (out
, val
->val_rtx
, dv
, offset
,
9788 VAR_INIT_STATUS_INITIALIZED
, NULL
, INSERT
);
9789 dv
= dv_from_value (val
->val_rtx
);
9792 if (MEM_P (incoming
))
9794 val
= cselib_lookup_from_insn (XEXP (incoming
, 0), mode
, true,
9795 VOIDmode
, get_insns ());
9798 preserve_value (val
);
9799 incoming
= replace_equiv_address_nv (incoming
, val
->val_rtx
);
9804 if (REG_P (incoming
))
9806 incoming
= var_lowpart (mode
, incoming
);
9807 gcc_assert (REGNO (incoming
) < FIRST_PSEUDO_REGISTER
);
9808 attrs_list_insert (&out
->regs
[REGNO (incoming
)], dv
, offset
,
9810 set_variable_part (out
, incoming
, dv
, offset
,
9811 VAR_INIT_STATUS_INITIALIZED
, NULL
, INSERT
);
9812 if (dv_is_value_p (dv
))
9814 record_entry_value (CSELIB_VAL_PTR (dv_as_value (dv
)), incoming
);
9815 if (TREE_CODE (TREE_TYPE (parm
)) == REFERENCE_TYPE
9816 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (parm
))))
9818 machine_mode indmode
9819 = TYPE_MODE (TREE_TYPE (TREE_TYPE (parm
)));
9820 rtx mem
= gen_rtx_MEM (indmode
, incoming
);
9821 cselib_val
*val
= cselib_lookup_from_insn (mem
, indmode
, true,
9826 preserve_value (val
);
9827 record_entry_value (val
, mem
);
9828 set_variable_part (out
, mem
, dv_from_value (val
->val_rtx
), 0,
9829 VAR_INIT_STATUS_INITIALIZED
, NULL
, INSERT
);
9834 else if (GET_CODE (incoming
) == PARALLEL
&& !dv_onepart_p (dv
))
9838 for (i
= 0; i
< XVECLEN (incoming
, 0); i
++)
9840 rtx reg
= XEXP (XVECEXP (incoming
, 0, i
), 0);
9841 offset
= REG_OFFSET (reg
);
9842 gcc_assert (REGNO (reg
) < FIRST_PSEUDO_REGISTER
);
9843 attrs_list_insert (&out
->regs
[REGNO (reg
)], dv
, offset
, reg
);
9844 set_variable_part (out
, reg
, dv
, offset
,
9845 VAR_INIT_STATUS_INITIALIZED
, NULL
, INSERT
);
9848 else if (MEM_P (incoming
))
9850 incoming
= var_lowpart (mode
, incoming
);
9851 set_variable_part (out
, incoming
, dv
, offset
,
9852 VAR_INIT_STATUS_INITIALIZED
, NULL
, INSERT
);
9856 /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
9859 vt_add_function_parameters (void)
9863 for (parm
= DECL_ARGUMENTS (current_function_decl
);
9864 parm
; parm
= DECL_CHAIN (parm
))
9865 if (!POINTER_BOUNDS_P (parm
))
9866 vt_add_function_parameter (parm
);
9868 if (DECL_HAS_VALUE_EXPR_P (DECL_RESULT (current_function_decl
)))
9870 tree vexpr
= DECL_VALUE_EXPR (DECL_RESULT (current_function_decl
));
9872 if (TREE_CODE (vexpr
) == INDIRECT_REF
)
9873 vexpr
= TREE_OPERAND (vexpr
, 0);
9875 if (TREE_CODE (vexpr
) == PARM_DECL
9876 && DECL_ARTIFICIAL (vexpr
)
9877 && !DECL_IGNORED_P (vexpr
)
9878 && DECL_NAMELESS (vexpr
))
9879 vt_add_function_parameter (vexpr
);
9883 /* Initialize cfa_base_rtx, create a preserved VALUE for it and
9884 ensure it isn't flushed during cselib_reset_table.
9885 Can be called only if frame_pointer_rtx resp. arg_pointer_rtx
9886 has been eliminated. */
9889 vt_init_cfa_base (void)
9893 #ifdef FRAME_POINTER_CFA_OFFSET
9894 cfa_base_rtx
= frame_pointer_rtx
;
9895 cfa_base_offset
= -FRAME_POINTER_CFA_OFFSET (current_function_decl
);
9897 cfa_base_rtx
= arg_pointer_rtx
;
9898 cfa_base_offset
= -ARG_POINTER_CFA_OFFSET (current_function_decl
);
9900 if (cfa_base_rtx
== hard_frame_pointer_rtx
9901 || !fixed_regs
[REGNO (cfa_base_rtx
)])
9903 cfa_base_rtx
= NULL_RTX
;
9906 if (!MAY_HAVE_DEBUG_INSNS
)
9909 /* Tell alias analysis that cfa_base_rtx should share
9910 find_base_term value with stack pointer or hard frame pointer. */
9911 if (!frame_pointer_needed
)
9912 vt_equate_reg_base_value (cfa_base_rtx
, stack_pointer_rtx
);
9913 else if (!crtl
->stack_realign_tried
)
9914 vt_equate_reg_base_value (cfa_base_rtx
, hard_frame_pointer_rtx
);
9916 val
= cselib_lookup_from_insn (cfa_base_rtx
, GET_MODE (cfa_base_rtx
), 1,
9917 VOIDmode
, get_insns ());
9918 preserve_value (val
);
9919 cselib_preserve_cfa_base_value (val
, REGNO (cfa_base_rtx
));
9922 /* Allocate and initialize the data structures for variable tracking
9923 and parse the RTL to get the micro operations. */
9926 vt_initialize (void)
9929 HOST_WIDE_INT fp_cfa_offset
= -1;
9931 alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def
));
9933 empty_shared_hash
= new shared_hash_def
;
9934 empty_shared_hash
->refcount
= 1;
9935 empty_shared_hash
->htab
= new variable_table_type (1);
9936 changed_variables
= new variable_table_type (10);
9938 /* Init the IN and OUT sets. */
9939 FOR_ALL_BB_FN (bb
, cfun
)
9941 VTI (bb
)->visited
= false;
9942 VTI (bb
)->flooded
= false;
9943 dataflow_set_init (&VTI (bb
)->in
);
9944 dataflow_set_init (&VTI (bb
)->out
);
9945 VTI (bb
)->permp
= NULL
;
9948 if (MAY_HAVE_DEBUG_INSNS
)
9950 cselib_init (CSELIB_RECORD_MEMORY
| CSELIB_PRESERVE_CONSTANTS
);
9951 scratch_regs
= BITMAP_ALLOC (NULL
);
9952 preserved_values
.create (256);
9953 global_get_addr_cache
= new hash_map
<rtx
, rtx
>;
9957 scratch_regs
= NULL
;
9958 global_get_addr_cache
= NULL
;
9961 if (MAY_HAVE_DEBUG_INSNS
)
9967 #ifdef FRAME_POINTER_CFA_OFFSET
9968 reg
= frame_pointer_rtx
;
9969 ofst
= FRAME_POINTER_CFA_OFFSET (current_function_decl
);
9971 reg
= arg_pointer_rtx
;
9972 ofst
= ARG_POINTER_CFA_OFFSET (current_function_decl
);
9975 ofst
-= INCOMING_FRAME_SP_OFFSET
;
9977 val
= cselib_lookup_from_insn (reg
, GET_MODE (reg
), 1,
9978 VOIDmode
, get_insns ());
9979 preserve_value (val
);
9980 if (reg
!= hard_frame_pointer_rtx
&& fixed_regs
[REGNO (reg
)])
9981 cselib_preserve_cfa_base_value (val
, REGNO (reg
));
9982 expr
= plus_constant (GET_MODE (stack_pointer_rtx
),
9983 stack_pointer_rtx
, -ofst
);
9984 cselib_add_permanent_equiv (val
, expr
, get_insns ());
9988 val
= cselib_lookup_from_insn (stack_pointer_rtx
,
9989 GET_MODE (stack_pointer_rtx
), 1,
9990 VOIDmode
, get_insns ());
9991 preserve_value (val
);
9992 expr
= plus_constant (GET_MODE (reg
), reg
, ofst
);
9993 cselib_add_permanent_equiv (val
, expr
, get_insns ());
9997 /* In order to factor out the adjustments made to the stack pointer or to
9998 the hard frame pointer and thus be able to use DW_OP_fbreg operations
9999 instead of individual location lists, we're going to rewrite MEMs based
10000 on them into MEMs based on the CFA by de-eliminating stack_pointer_rtx
10001 or hard_frame_pointer_rtx to the virtual CFA pointer frame_pointer_rtx
10002 resp. arg_pointer_rtx. We can do this either when there is no frame
10003 pointer in the function and stack adjustments are consistent for all
10004 basic blocks or when there is a frame pointer and no stack realignment.
10005 But we first have to check that frame_pointer_rtx resp. arg_pointer_rtx
10006 has been eliminated. */
10007 if (!frame_pointer_needed
)
10011 if (!vt_stack_adjustments ())
10014 #ifdef FRAME_POINTER_CFA_OFFSET
10015 reg
= frame_pointer_rtx
;
10017 reg
= arg_pointer_rtx
;
10019 elim
= eliminate_regs (reg
, VOIDmode
, NULL_RTX
);
10022 if (GET_CODE (elim
) == PLUS
)
10023 elim
= XEXP (elim
, 0);
10024 if (elim
== stack_pointer_rtx
)
10025 vt_init_cfa_base ();
10028 else if (!crtl
->stack_realign_tried
)
10032 #ifdef FRAME_POINTER_CFA_OFFSET
10033 reg
= frame_pointer_rtx
;
10034 fp_cfa_offset
= FRAME_POINTER_CFA_OFFSET (current_function_decl
);
10036 reg
= arg_pointer_rtx
;
10037 fp_cfa_offset
= ARG_POINTER_CFA_OFFSET (current_function_decl
);
10039 elim
= eliminate_regs (reg
, VOIDmode
, NULL_RTX
);
10042 if (GET_CODE (elim
) == PLUS
)
10044 fp_cfa_offset
-= INTVAL (XEXP (elim
, 1));
10045 elim
= XEXP (elim
, 0);
10047 if (elim
!= hard_frame_pointer_rtx
)
10048 fp_cfa_offset
= -1;
10051 fp_cfa_offset
= -1;
10054 /* If the stack is realigned and a DRAP register is used, we're going to
10055 rewrite MEMs based on it representing incoming locations of parameters
10056 passed on the stack into MEMs based on the argument pointer. Although
10057 we aren't going to rewrite other MEMs, we still need to initialize the
10058 virtual CFA pointer in order to ensure that the argument pointer will
10059 be seen as a constant throughout the function.
10061 ??? This doesn't work if FRAME_POINTER_CFA_OFFSET is defined. */
10062 else if (stack_realign_drap
)
10066 #ifdef FRAME_POINTER_CFA_OFFSET
10067 reg
= frame_pointer_rtx
;
10069 reg
= arg_pointer_rtx
;
10071 elim
= eliminate_regs (reg
, VOIDmode
, NULL_RTX
);
10074 if (GET_CODE (elim
) == PLUS
)
10075 elim
= XEXP (elim
, 0);
10076 if (elim
== hard_frame_pointer_rtx
)
10077 vt_init_cfa_base ();
10081 hard_frame_pointer_adjustment
= -1;
10083 vt_add_function_parameters ();
10085 FOR_EACH_BB_FN (bb
, cfun
)
10088 HOST_WIDE_INT pre
, post
= 0;
10089 basic_block first_bb
, last_bb
;
10091 if (MAY_HAVE_DEBUG_INSNS
)
10093 cselib_record_sets_hook
= add_with_sets
;
10094 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
10095 fprintf (dump_file
, "first value: %i\n",
10096 cselib_get_next_uid ());
10103 if (bb
->next_bb
== EXIT_BLOCK_PTR_FOR_FN (cfun
)
10104 || ! single_pred_p (bb
->next_bb
))
10106 e
= find_edge (bb
, bb
->next_bb
);
10107 if (! e
|| (e
->flags
& EDGE_FALLTHRU
) == 0)
10113 /* Add the micro-operations to the vector. */
10114 FOR_BB_BETWEEN (bb
, first_bb
, last_bb
->next_bb
, next_bb
)
10116 HOST_WIDE_INT offset
= VTI (bb
)->out
.stack_adjust
;
10117 VTI (bb
)->out
.stack_adjust
= VTI (bb
)->in
.stack_adjust
;
10118 for (insn
= BB_HEAD (bb
); insn
!= NEXT_INSN (BB_END (bb
));
10119 insn
= NEXT_INSN (insn
))
10123 if (!frame_pointer_needed
)
10125 insn_stack_adjust_offset_pre_post (insn
, &pre
, &post
);
10128 micro_operation mo
;
10129 mo
.type
= MO_ADJUST
;
10132 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
10133 log_op_type (PATTERN (insn
), bb
, insn
,
10134 MO_ADJUST
, dump_file
);
10135 VTI (bb
)->mos
.safe_push (mo
);
10136 VTI (bb
)->out
.stack_adjust
+= pre
;
10140 cselib_hook_called
= false;
10141 adjust_insn (bb
, insn
);
10142 if (MAY_HAVE_DEBUG_INSNS
)
10145 prepare_call_arguments (bb
, insn
);
10146 cselib_process_insn (insn
);
10147 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
10149 print_rtl_single (dump_file
, insn
);
10150 dump_cselib_table (dump_file
);
10153 if (!cselib_hook_called
)
10154 add_with_sets (insn
, 0, 0);
10155 cancel_changes (0);
10157 if (!frame_pointer_needed
&& post
)
10159 micro_operation mo
;
10160 mo
.type
= MO_ADJUST
;
10161 mo
.u
.adjust
= post
;
10163 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
10164 log_op_type (PATTERN (insn
), bb
, insn
,
10165 MO_ADJUST
, dump_file
);
10166 VTI (bb
)->mos
.safe_push (mo
);
10167 VTI (bb
)->out
.stack_adjust
+= post
;
10170 if (fp_cfa_offset
!= -1
10171 && hard_frame_pointer_adjustment
== -1
10172 && fp_setter_insn (insn
))
10174 vt_init_cfa_base ();
10175 hard_frame_pointer_adjustment
= fp_cfa_offset
;
10176 /* Disassociate sp from fp now. */
10177 if (MAY_HAVE_DEBUG_INSNS
)
10180 cselib_invalidate_rtx (stack_pointer_rtx
);
10181 v
= cselib_lookup (stack_pointer_rtx
, Pmode
, 1,
10183 if (v
&& !cselib_preserved_value_p (v
))
10185 cselib_set_value_sp_based (v
);
10186 preserve_value (v
);
10192 gcc_assert (offset
== VTI (bb
)->out
.stack_adjust
);
10197 if (MAY_HAVE_DEBUG_INSNS
)
10199 cselib_preserve_only_values ();
10200 cselib_reset_table (cselib_get_next_uid ());
10201 cselib_record_sets_hook
= NULL
;
10205 hard_frame_pointer_adjustment
= -1;
10206 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun
))->flooded
= true;
10207 cfa_base_rtx
= NULL_RTX
;
10211 /* This is *not* reset after each function. It gives each
10212 NOTE_INSN_DELETED_DEBUG_LABEL in the entire compilation
10213 a unique label number. */
10215 static int debug_label_num
= 1;
10217 /* Get rid of all debug insns from the insn stream. */
10220 delete_debug_insns (void)
10223 rtx_insn
*insn
, *next
;
10225 if (!MAY_HAVE_DEBUG_INSNS
)
10228 FOR_EACH_BB_FN (bb
, cfun
)
10230 FOR_BB_INSNS_SAFE (bb
, insn
, next
)
10231 if (DEBUG_INSN_P (insn
))
10233 tree decl
= INSN_VAR_LOCATION_DECL (insn
);
10234 if (TREE_CODE (decl
) == LABEL_DECL
10235 && DECL_NAME (decl
)
10236 && !DECL_RTL_SET_P (decl
))
10238 PUT_CODE (insn
, NOTE
);
10239 NOTE_KIND (insn
) = NOTE_INSN_DELETED_DEBUG_LABEL
;
10240 NOTE_DELETED_LABEL_NAME (insn
)
10241 = IDENTIFIER_POINTER (DECL_NAME (decl
));
10242 SET_DECL_RTL (decl
, insn
);
10243 CODE_LABEL_NUMBER (insn
) = debug_label_num
++;
10246 delete_insn (insn
);
10251 /* Run a fast, BB-local only version of var tracking, to take care of
10252 information that we don't do global analysis on, such that not all
10253 information is lost. If SKIPPED holds, we're skipping the global
10254 pass entirely, so we should try to use information it would have
10255 handled as well.. */
10258 vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED
)
10260 /* ??? Just skip it all for now. */
10261 delete_debug_insns ();
10264 /* Free the data structures needed for variable tracking. */
10271 FOR_EACH_BB_FN (bb
, cfun
)
10273 VTI (bb
)->mos
.release ();
10276 FOR_ALL_BB_FN (bb
, cfun
)
10278 dataflow_set_destroy (&VTI (bb
)->in
);
10279 dataflow_set_destroy (&VTI (bb
)->out
);
10280 if (VTI (bb
)->permp
)
10282 dataflow_set_destroy (VTI (bb
)->permp
);
10283 XDELETE (VTI (bb
)->permp
);
10286 free_aux_for_blocks ();
10287 delete empty_shared_hash
->htab
;
10288 empty_shared_hash
->htab
= NULL
;
10289 delete changed_variables
;
10290 changed_variables
= NULL
;
10291 attrs_def::pool
.release ();
10292 var_pool
.release ();
10293 location_chain_def::pool
.release ();
10294 shared_hash_def::pool
.release ();
10296 if (MAY_HAVE_DEBUG_INSNS
)
10298 if (global_get_addr_cache
)
10299 delete global_get_addr_cache
;
10300 global_get_addr_cache
= NULL
;
10301 loc_exp_dep::pool
.release ();
10302 valvar_pool
.release ();
10303 preserved_values
.release ();
10305 BITMAP_FREE (scratch_regs
);
10306 scratch_regs
= NULL
;
10309 #ifdef HAVE_window_save
10310 vec_free (windowed_parm_regs
);
10314 XDELETEVEC (vui_vec
);
10319 /* The entry point to variable tracking pass. */
10321 static inline unsigned int
10322 variable_tracking_main_1 (void)
10326 if (flag_var_tracking_assignments
< 0
10327 /* Var-tracking right now assumes the IR doesn't contain
10328 any pseudos at this point. */
10329 || targetm
.no_register_allocation
)
10331 delete_debug_insns ();
10335 if (n_basic_blocks_for_fn (cfun
) > 500 &&
10336 n_edges_for_fn (cfun
) / n_basic_blocks_for_fn (cfun
) >= 20)
10338 vt_debug_insns_local (true);
10342 mark_dfs_back_edges ();
10343 if (!vt_initialize ())
10346 vt_debug_insns_local (true);
10350 success
= vt_find_locations ();
10352 if (!success
&& flag_var_tracking_assignments
> 0)
10356 delete_debug_insns ();
10358 /* This is later restored by our caller. */
10359 flag_var_tracking_assignments
= 0;
10361 success
= vt_initialize ();
10362 gcc_assert (success
);
10364 success
= vt_find_locations ();
10370 vt_debug_insns_local (false);
10374 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
10376 dump_dataflow_sets ();
10377 dump_reg_info (dump_file
);
10378 dump_flow_info (dump_file
, dump_flags
);
10381 timevar_push (TV_VAR_TRACKING_EMIT
);
10383 timevar_pop (TV_VAR_TRACKING_EMIT
);
10386 vt_debug_insns_local (false);
10391 variable_tracking_main (void)
10394 int save
= flag_var_tracking_assignments
;
10396 ret
= variable_tracking_main_1 ();
10398 flag_var_tracking_assignments
= save
;
10405 const pass_data pass_data_variable_tracking
=
10407 RTL_PASS
, /* type */
10408 "vartrack", /* name */
10409 OPTGROUP_NONE
, /* optinfo_flags */
10410 TV_VAR_TRACKING
, /* tv_id */
10411 0, /* properties_required */
10412 0, /* properties_provided */
10413 0, /* properties_destroyed */
10414 0, /* todo_flags_start */
10415 0, /* todo_flags_finish */
10418 class pass_variable_tracking
: public rtl_opt_pass
10421 pass_variable_tracking (gcc::context
*ctxt
)
10422 : rtl_opt_pass (pass_data_variable_tracking
, ctxt
)
10425 /* opt_pass methods: */
10426 virtual bool gate (function
*)
10428 return (flag_var_tracking
&& !targetm
.delay_vartrack
);
10431 virtual unsigned int execute (function
*)
10433 return variable_tracking_main ();
10436 }; // class pass_variable_tracking
10438 } // anon namespace
10441 make_pass_variable_tracking (gcc::context
*ctxt
)
10443 return new pass_variable_tracking (ctxt
);