1 /* Variable tracking routines for the GNU compiler.
2 Copyright (C) 2002-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the variable tracking pass. It computes where
21 variables are located (which registers or where in memory) at each position
22 in instruction stream and emits notes describing the locations.
23 Debug information (DWARF2 location lists) is finally generated from
25 With this debug information, it is possible to show variables
26 even when debugging optimized code.
28 How does the variable tracking pass work?
30 First, it scans RTL code for uses, stores and clobbers (register/memory
31 references in instructions), for call insns and for stack adjustments
32 separately for each basic block and saves them to an array of micro
34 The micro operations of one instruction are ordered so that
35 pre-modifying stack adjustment < use < use with no var < call insn <
36 < clobber < set < post-modifying stack adjustment
38 Then, a forward dataflow analysis is performed to find out how locations
39 of variables change through code and to propagate the variable locations
40 along control flow graph.
41 The IN set for basic block BB is computed as a union of OUT sets of BB's
42 predecessors, the OUT set for BB is copied from the IN set for BB and
43 is changed according to micro operations in BB.
45 The IN and OUT sets for basic blocks consist of a current stack adjustment
46 (used for adjusting offset of variables addressed using stack pointer),
47 the table of structures describing the locations of parts of a variable
48 and for each physical register a linked list for each physical register.
49 The linked list is a list of variable parts stored in the register,
50 i.e. it is a list of triplets (reg, decl, offset) where decl is
51 REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
52 effective deleting appropriate variable parts when we set or clobber the
55 There may be more than one variable part in a register. The linked lists
56 should be pretty short so it is a good data structure here.
57 For example in the following code, register allocator may assign same
58 register to variables A and B, and both of them are stored in the same
71 Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
72 are emitted to appropriate positions in RTL code. Each such a note describes
73 the location of one variable at the point in instruction stream where the
74 note is. There is no need to emit a note for each variable before each
75 instruction, we only emit these notes where the location of variable changes
76 (this means that we also emit notes for changes between the OUT set of the
77 previous block and the IN set of the current block).
79 The notes consist of two parts:
80 1. the declaration (from REG_EXPR or MEM_EXPR)
81 2. the location of a variable - it is either a simple register/memory
82 reference (for simple variables, for example int),
83 or a parallel of register/memory references (for a large variables
84 which consist of several parts, for example long long).
90 #include "coretypes.h"
95 #include "stor-layout.h"
97 #include "hash-table.h"
98 #include "basic-block.h"
100 #include "hard-reg-set.h"
102 #include "insn-config.h"
105 #include "alloc-pool.h"
109 #include "tree-pass.h"
111 #include "tree-dfa.h"
112 #include "tree-ssa.h"
116 #include "diagnostic.h"
117 #include "tree-pretty-print.h"
121 #include "rtl-iter.h"
123 /* var-tracking.c assumes that tree code with the same value as VALUE rtx code
124 has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
125 Currently the value is the same as IDENTIFIER_NODE, which has such
126 a property. If this compile time assertion ever fails, make sure that
127 the new tree code that equals (int) VALUE has the same property. */
128 extern char check_value_val
[(int) VALUE
== (int) IDENTIFIER_NODE
? 1 : -1];
130 /* Type of micro operation. */
131 enum micro_operation_type
133 MO_USE
, /* Use location (REG or MEM). */
134 MO_USE_NO_VAR
,/* Use location which is not associated with a variable
135 or the variable is not trackable. */
136 MO_VAL_USE
, /* Use location which is associated with a value. */
137 MO_VAL_LOC
, /* Use location which appears in a debug insn. */
138 MO_VAL_SET
, /* Set location associated with a value. */
139 MO_SET
, /* Set location. */
140 MO_COPY
, /* Copy the same portion of a variable from one
141 location to another. */
142 MO_CLOBBER
, /* Clobber location. */
143 MO_CALL
, /* Call insn. */
144 MO_ADJUST
/* Adjust stack pointer. */
148 static const char * const ATTRIBUTE_UNUSED
149 micro_operation_type_name
[] = {
162 /* Where shall the note be emitted? BEFORE or AFTER the instruction.
163 Notes emitted as AFTER_CALL are to take effect during the call,
164 rather than after the call. */
167 EMIT_NOTE_BEFORE_INSN
,
168 EMIT_NOTE_AFTER_INSN
,
169 EMIT_NOTE_AFTER_CALL_INSN
172 /* Structure holding information about micro operation. */
173 typedef struct micro_operation_def
175 /* Type of micro operation. */
176 enum micro_operation_type type
;
178 /* The instruction which the micro operation is in, for MO_USE,
179 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
180 instruction or note in the original flow (before any var-tracking
181 notes are inserted, to simplify emission of notes), for MO_SET
186 /* Location. For MO_SET and MO_COPY, this is the SET that
187 performs the assignment, if known, otherwise it is the target
188 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
189 CONCAT of the VALUE and the LOC associated with it. For
190 MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
191 associated with it. */
194 /* Stack adjustment. */
195 HOST_WIDE_INT adjust
;
200 /* A declaration of a variable, or an RTL value being handled like a
202 typedef void *decl_or_value
;
204 /* Return true if a decl_or_value DV is a DECL or NULL. */
206 dv_is_decl_p (decl_or_value dv
)
208 return !dv
|| (int) TREE_CODE ((tree
) dv
) != (int) VALUE
;
211 /* Return true if a decl_or_value is a VALUE rtl. */
213 dv_is_value_p (decl_or_value dv
)
215 return dv
&& !dv_is_decl_p (dv
);
218 /* Return the decl in the decl_or_value. */
220 dv_as_decl (decl_or_value dv
)
222 gcc_checking_assert (dv_is_decl_p (dv
));
226 /* Return the value in the decl_or_value. */
228 dv_as_value (decl_or_value dv
)
230 gcc_checking_assert (dv_is_value_p (dv
));
234 /* Return the opaque pointer in the decl_or_value. */
236 dv_as_opaque (decl_or_value dv
)
242 /* Description of location of a part of a variable. The content of a physical
243 register is described by a chain of these structures.
244 The chains are pretty short (usually 1 or 2 elements) and thus
245 chain is the best data structure. */
246 typedef struct attrs_def
248 /* Pointer to next member of the list. */
249 struct attrs_def
*next
;
251 /* The rtx of register. */
254 /* The declaration corresponding to LOC. */
257 /* Offset from start of DECL. */
258 HOST_WIDE_INT offset
;
261 /* Structure for chaining the locations. */
262 typedef struct location_chain_def
264 /* Next element in the chain. */
265 struct location_chain_def
*next
;
267 /* The location (REG, MEM or VALUE). */
270 /* The "value" stored in this location. */
274 enum var_init_status init
;
277 /* A vector of loc_exp_dep holds the active dependencies of a one-part
278 DV on VALUEs, i.e., the VALUEs expanded so as to form the current
279 location of DV. Each entry is also part of VALUE' s linked-list of
280 backlinks back to DV. */
281 typedef struct loc_exp_dep_s
283 /* The dependent DV. */
285 /* The dependency VALUE or DECL_DEBUG. */
287 /* The next entry in VALUE's backlinks list. */
288 struct loc_exp_dep_s
*next
;
289 /* A pointer to the pointer to this entry (head or prev's next) in
290 the doubly-linked list. */
291 struct loc_exp_dep_s
**pprev
;
295 /* This data structure holds information about the depth of a variable
297 typedef struct expand_depth_struct
299 /* This measures the complexity of the expanded expression. It
300 grows by one for each level of expansion that adds more than one
303 /* This counts the number of ENTRY_VALUE expressions in an
304 expansion. We want to minimize their use. */
308 /* This data structure is allocated for one-part variables at the time
309 of emitting notes. */
312 /* Doubly-linked list of dependent DVs. These are DVs whose cur_loc
313 computation used the expansion of this variable, and that ought
314 to be notified should this variable change. If the DV's cur_loc
315 expanded to NULL, all components of the loc list are regarded as
316 active, so that any changes in them give us a chance to get a
317 location. Otherwise, only components of the loc that expanded to
318 non-NULL are regarded as active dependencies. */
319 loc_exp_dep
*backlinks
;
320 /* This holds the LOC that was expanded into cur_loc. We need only
321 mark a one-part variable as changed if the FROM loc is removed,
322 or if it has no known location and a loc is added, or if it gets
323 a change notification from any of its active dependencies. */
325 /* The depth of the cur_loc expression. */
327 /* Dependencies actively used when expand FROM into cur_loc. */
328 vec
<loc_exp_dep
, va_heap
, vl_embed
> deps
;
331 /* Structure describing one part of variable. */
332 typedef struct variable_part_def
334 /* Chain of locations of the part. */
335 location_chain loc_chain
;
337 /* Location which was last emitted to location list. */
342 /* The offset in the variable, if !var->onepart. */
343 HOST_WIDE_INT offset
;
345 /* Pointer to auxiliary data, if var->onepart and emit_notes. */
346 struct onepart_aux
*onepaux
;
350 /* Maximum number of location parts. */
351 #define MAX_VAR_PARTS 16
353 /* Enumeration type used to discriminate various types of one-part
355 typedef enum onepart_enum
357 /* Not a one-part variable. */
359 /* A one-part DECL that is not a DEBUG_EXPR_DECL. */
361 /* A DEBUG_EXPR_DECL. */
367 /* Structure describing where the variable is located. */
368 typedef struct variable_def
370 /* The declaration of the variable, or an RTL value being handled
371 like a declaration. */
374 /* Reference count. */
377 /* Number of variable parts. */
380 /* What type of DV this is, according to enum onepart_enum. */
381 ENUM_BITFIELD (onepart_enum
) onepart
: CHAR_BIT
;
383 /* True if this variable_def struct is currently in the
384 changed_variables hash table. */
385 bool in_changed_variables
;
387 /* The variable parts. */
388 variable_part var_part
[1];
390 typedef const struct variable_def
*const_variable
;
392 /* Pointer to the BB's information specific to variable tracking pass. */
393 #define VTI(BB) ((variable_tracking_info) (BB)->aux)
395 /* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */
396 #define INT_MEM_OFFSET(mem) (MEM_OFFSET_KNOWN_P (mem) ? MEM_OFFSET (mem) : 0)
398 #if ENABLE_CHECKING && (GCC_VERSION >= 2007)
400 /* Access VAR's Ith part's offset, checking that it's not a one-part
402 #define VAR_PART_OFFSET(var, i) __extension__ \
403 (*({ variable const __v = (var); \
404 gcc_checking_assert (!__v->onepart); \
405 &__v->var_part[(i)].aux.offset; }))
407 /* Access VAR's one-part auxiliary data, checking that it is a
408 one-part variable. */
409 #define VAR_LOC_1PAUX(var) __extension__ \
410 (*({ variable const __v = (var); \
411 gcc_checking_assert (__v->onepart); \
412 &__v->var_part[0].aux.onepaux; }))
415 #define VAR_PART_OFFSET(var, i) ((var)->var_part[(i)].aux.offset)
416 #define VAR_LOC_1PAUX(var) ((var)->var_part[0].aux.onepaux)
419 /* These are accessor macros for the one-part auxiliary data. When
420 convenient for users, they're guarded by tests that the data was
422 #define VAR_LOC_DEP_LST(var) (VAR_LOC_1PAUX (var) \
423 ? VAR_LOC_1PAUX (var)->backlinks \
425 #define VAR_LOC_DEP_LSTP(var) (VAR_LOC_1PAUX (var) \
426 ? &VAR_LOC_1PAUX (var)->backlinks \
428 #define VAR_LOC_FROM(var) (VAR_LOC_1PAUX (var)->from)
429 #define VAR_LOC_DEPTH(var) (VAR_LOC_1PAUX (var)->depth)
430 #define VAR_LOC_DEP_VEC(var) (VAR_LOC_1PAUX (var) \
431 ? &VAR_LOC_1PAUX (var)->deps \
436 typedef unsigned int dvuid
;
438 /* Return the uid of DV. */
441 dv_uid (decl_or_value dv
)
443 if (dv_is_value_p (dv
))
444 return CSELIB_VAL_PTR (dv_as_value (dv
))->uid
;
446 return DECL_UID (dv_as_decl (dv
));
449 /* Compute the hash from the uid. */
451 static inline hashval_t
452 dv_uid2hash (dvuid uid
)
457 /* The hash function for a mask table in a shared_htab chain. */
459 static inline hashval_t
460 dv_htab_hash (decl_or_value dv
)
462 return dv_uid2hash (dv_uid (dv
));
465 static void variable_htab_free (void *);
467 /* Variable hashtable helpers. */
469 struct variable_hasher
471 typedef variable_def value_type
;
472 typedef void compare_type
;
473 static inline hashval_t
hash (const value_type
*);
474 static inline bool equal (const value_type
*, const compare_type
*);
475 static inline void remove (value_type
*);
478 /* The hash function for variable_htab, computes the hash value
479 from the declaration of variable X. */
482 variable_hasher::hash (const value_type
*v
)
484 return dv_htab_hash (v
->dv
);
487 /* Compare the declaration of variable X with declaration Y. */
490 variable_hasher::equal (const value_type
*v
, const compare_type
*y
)
492 decl_or_value dv
= CONST_CAST2 (decl_or_value
, const void *, y
);
494 return (dv_as_opaque (v
->dv
) == dv_as_opaque (dv
));
497 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
500 variable_hasher::remove (value_type
*var
)
502 variable_htab_free (var
);
505 typedef hash_table
<variable_hasher
> variable_table_type
;
506 typedef variable_table_type::iterator variable_iterator_type
;
508 /* Structure for passing some other parameters to function
509 emit_note_insn_var_location. */
510 typedef struct emit_note_data_def
512 /* The instruction which the note will be emitted before/after. */
515 /* Where the note will be emitted (before/after insn)? */
516 enum emit_note_where where
;
518 /* The variables and values active at this point. */
519 variable_table_type
*vars
;
522 /* Structure holding a refcounted hash table. If refcount > 1,
523 it must be first unshared before modified. */
524 typedef struct shared_hash_def
526 /* Reference count. */
529 /* Actual hash table. */
530 variable_table_type
*htab
;
533 /* Structure holding the IN or OUT set for a basic block. */
534 typedef struct dataflow_set_def
536 /* Adjustment of stack offset. */
537 HOST_WIDE_INT stack_adjust
;
539 /* Attributes for registers (lists of attrs). */
540 attrs regs
[FIRST_PSEUDO_REGISTER
];
542 /* Variable locations. */
545 /* Vars that is being traversed. */
546 shared_hash traversed_vars
;
549 /* The structure (one for each basic block) containing the information
550 needed for variable tracking. */
551 typedef struct variable_tracking_info_def
553 /* The vector of micro operations. */
554 vec
<micro_operation
> mos
;
556 /* The IN and OUT set for dataflow analysis. */
560 /* The permanent-in dataflow set for this block. This is used to
561 hold values for which we had to compute entry values. ??? This
562 should probably be dynamically allocated, to avoid using more
563 memory in non-debug builds. */
566 /* Has the block been visited in DFS? */
569 /* Has the block been flooded in VTA? */
572 } *variable_tracking_info
;
574 /* Alloc pool for struct attrs_def. */
575 static alloc_pool attrs_pool
;
577 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
578 static alloc_pool var_pool
;
580 /* Alloc pool for struct variable_def with a single var_part entry. */
581 static alloc_pool valvar_pool
;
583 /* Alloc pool for struct location_chain_def. */
584 static alloc_pool loc_chain_pool
;
586 /* Alloc pool for struct shared_hash_def. */
587 static alloc_pool shared_hash_pool
;
589 /* Alloc pool for struct loc_exp_dep_s for NOT_ONEPART variables. */
590 static alloc_pool loc_exp_dep_pool
;
592 /* Changed variables, notes will be emitted for them. */
593 static variable_table_type
*changed_variables
;
595 /* Shall notes be emitted? */
596 static bool emit_notes
;
598 /* Values whose dynamic location lists have gone empty, but whose
599 cselib location lists are still usable. Use this to hold the
600 current location, the backlinks, etc, during emit_notes. */
601 static variable_table_type
*dropped_values
;
603 /* Empty shared hashtable. */
604 static shared_hash empty_shared_hash
;
606 /* Scratch register bitmap used by cselib_expand_value_rtx. */
607 static bitmap scratch_regs
= NULL
;
609 #ifdef HAVE_window_save
610 typedef struct GTY(()) parm_reg
{
616 /* Vector of windowed parameter registers, if any. */
617 static vec
<parm_reg_t
, va_gc
> *windowed_parm_regs
= NULL
;
620 /* Variable used to tell whether cselib_process_insn called our hook. */
621 static bool cselib_hook_called
;
623 /* Local function prototypes. */
624 static void stack_adjust_offset_pre_post (rtx
, HOST_WIDE_INT
*,
626 static void insn_stack_adjust_offset_pre_post (rtx_insn
*, HOST_WIDE_INT
*,
628 static bool vt_stack_adjustments (void);
630 static void init_attrs_list_set (attrs
*);
631 static void attrs_list_clear (attrs
*);
632 static attrs
attrs_list_member (attrs
, decl_or_value
, HOST_WIDE_INT
);
633 static void attrs_list_insert (attrs
*, decl_or_value
, HOST_WIDE_INT
, rtx
);
634 static void attrs_list_copy (attrs
*, attrs
);
635 static void attrs_list_union (attrs
*, attrs
);
637 static variable_def
**unshare_variable (dataflow_set
*set
, variable_def
**slot
,
638 variable var
, enum var_init_status
);
639 static void vars_copy (variable_table_type
*, variable_table_type
*);
640 static tree
var_debug_decl (tree
);
641 static void var_reg_set (dataflow_set
*, rtx
, enum var_init_status
, rtx
);
642 static void var_reg_delete_and_set (dataflow_set
*, rtx
, bool,
643 enum var_init_status
, rtx
);
644 static void var_reg_delete (dataflow_set
*, rtx
, bool);
645 static void var_regno_delete (dataflow_set
*, int);
646 static void var_mem_set (dataflow_set
*, rtx
, enum var_init_status
, rtx
);
647 static void var_mem_delete_and_set (dataflow_set
*, rtx
, bool,
648 enum var_init_status
, rtx
);
649 static void var_mem_delete (dataflow_set
*, rtx
, bool);
651 static void dataflow_set_init (dataflow_set
*);
652 static void dataflow_set_clear (dataflow_set
*);
653 static void dataflow_set_copy (dataflow_set
*, dataflow_set
*);
654 static int variable_union_info_cmp_pos (const void *, const void *);
655 static void dataflow_set_union (dataflow_set
*, dataflow_set
*);
656 static location_chain
find_loc_in_1pdv (rtx
, variable
, variable_table_type
*);
657 static bool canon_value_cmp (rtx
, rtx
);
658 static int loc_cmp (rtx
, rtx
);
659 static bool variable_part_different_p (variable_part
*, variable_part
*);
660 static bool onepart_variable_different_p (variable
, variable
);
661 static bool variable_different_p (variable
, variable
);
662 static bool dataflow_set_different (dataflow_set
*, dataflow_set
*);
663 static void dataflow_set_destroy (dataflow_set
*);
665 static bool contains_symbol_ref (rtx
);
666 static bool track_expr_p (tree
, bool);
667 static bool same_variable_part_p (rtx
, tree
, HOST_WIDE_INT
);
668 static void add_uses_1 (rtx
*, void *);
669 static void add_stores (rtx
, const_rtx
, void *);
670 static bool compute_bb_dataflow (basic_block
);
671 static bool vt_find_locations (void);
673 static void dump_attrs_list (attrs
);
674 static void dump_var (variable
);
675 static void dump_vars (variable_table_type
*);
676 static void dump_dataflow_set (dataflow_set
*);
677 static void dump_dataflow_sets (void);
679 static void set_dv_changed (decl_or_value
, bool);
680 static void variable_was_changed (variable
, dataflow_set
*);
681 static variable_def
**set_slot_part (dataflow_set
*, rtx
, variable_def
**,
682 decl_or_value
, HOST_WIDE_INT
,
683 enum var_init_status
, rtx
);
684 static void set_variable_part (dataflow_set
*, rtx
,
685 decl_or_value
, HOST_WIDE_INT
,
686 enum var_init_status
, rtx
, enum insert_option
);
687 static variable_def
**clobber_slot_part (dataflow_set
*, rtx
,
688 variable_def
**, HOST_WIDE_INT
, rtx
);
689 static void clobber_variable_part (dataflow_set
*, rtx
,
690 decl_or_value
, HOST_WIDE_INT
, rtx
);
691 static variable_def
**delete_slot_part (dataflow_set
*, rtx
, variable_def
**,
693 static void delete_variable_part (dataflow_set
*, rtx
,
694 decl_or_value
, HOST_WIDE_INT
);
695 static void emit_notes_in_bb (basic_block
, dataflow_set
*);
696 static void vt_emit_notes (void);
698 static bool vt_get_decl_and_offset (rtx
, tree
*, HOST_WIDE_INT
*);
699 static void vt_add_function_parameters (void);
700 static bool vt_initialize (void);
701 static void vt_finalize (void);
703 /* Given a SET, calculate the amount of stack adjustment it contains
704 PRE- and POST-modifying stack pointer.
705 This function is similar to stack_adjust_offset. */
708 stack_adjust_offset_pre_post (rtx pattern
, HOST_WIDE_INT
*pre
,
711 rtx src
= SET_SRC (pattern
);
712 rtx dest
= SET_DEST (pattern
);
715 if (dest
== stack_pointer_rtx
)
717 /* (set (reg sp) (plus (reg sp) (const_int))) */
718 code
= GET_CODE (src
);
719 if (! (code
== PLUS
|| code
== MINUS
)
720 || XEXP (src
, 0) != stack_pointer_rtx
721 || !CONST_INT_P (XEXP (src
, 1)))
725 *post
+= INTVAL (XEXP (src
, 1));
727 *post
-= INTVAL (XEXP (src
, 1));
729 else if (MEM_P (dest
))
731 /* (set (mem (pre_dec (reg sp))) (foo)) */
732 src
= XEXP (dest
, 0);
733 code
= GET_CODE (src
);
739 if (XEXP (src
, 0) == stack_pointer_rtx
)
741 rtx val
= XEXP (XEXP (src
, 1), 1);
742 /* We handle only adjustments by constant amount. */
743 gcc_assert (GET_CODE (XEXP (src
, 1)) == PLUS
&&
746 if (code
== PRE_MODIFY
)
747 *pre
-= INTVAL (val
);
749 *post
-= INTVAL (val
);
755 if (XEXP (src
, 0) == stack_pointer_rtx
)
757 *pre
+= GET_MODE_SIZE (GET_MODE (dest
));
763 if (XEXP (src
, 0) == stack_pointer_rtx
)
765 *post
+= GET_MODE_SIZE (GET_MODE (dest
));
771 if (XEXP (src
, 0) == stack_pointer_rtx
)
773 *pre
-= GET_MODE_SIZE (GET_MODE (dest
));
779 if (XEXP (src
, 0) == stack_pointer_rtx
)
781 *post
-= GET_MODE_SIZE (GET_MODE (dest
));
792 /* Given an INSN, calculate the amount of stack adjustment it contains
793 PRE- and POST-modifying stack pointer. */
796 insn_stack_adjust_offset_pre_post (rtx_insn
*insn
, HOST_WIDE_INT
*pre
,
804 pattern
= PATTERN (insn
);
805 if (RTX_FRAME_RELATED_P (insn
))
807 rtx expr
= find_reg_note (insn
, REG_FRAME_RELATED_EXPR
, NULL_RTX
);
809 pattern
= XEXP (expr
, 0);
812 if (GET_CODE (pattern
) == SET
)
813 stack_adjust_offset_pre_post (pattern
, pre
, post
);
814 else if (GET_CODE (pattern
) == PARALLEL
815 || GET_CODE (pattern
) == SEQUENCE
)
819 /* There may be stack adjustments inside compound insns. Search
821 for ( i
= XVECLEN (pattern
, 0) - 1; i
>= 0; i
--)
822 if (GET_CODE (XVECEXP (pattern
, 0, i
)) == SET
)
823 stack_adjust_offset_pre_post (XVECEXP (pattern
, 0, i
), pre
, post
);
827 /* Compute stack adjustments for all blocks by traversing DFS tree.
828 Return true when the adjustments on all incoming edges are consistent.
829 Heavily borrowed from pre_and_rev_post_order_compute. */
832 vt_stack_adjustments (void)
834 edge_iterator
*stack
;
837 /* Initialize entry block. */
838 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun
))->visited
= true;
839 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun
))->in
.stack_adjust
=
840 INCOMING_FRAME_SP_OFFSET
;
841 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun
))->out
.stack_adjust
=
842 INCOMING_FRAME_SP_OFFSET
;
844 /* Allocate stack for back-tracking up CFG. */
845 stack
= XNEWVEC (edge_iterator
, n_basic_blocks_for_fn (cfun
) + 1);
848 /* Push the first edge on to the stack. */
849 stack
[sp
++] = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->succs
);
857 /* Look at the edge on the top of the stack. */
859 src
= ei_edge (ei
)->src
;
860 dest
= ei_edge (ei
)->dest
;
862 /* Check if the edge destination has been visited yet. */
863 if (!VTI (dest
)->visited
)
866 HOST_WIDE_INT pre
, post
, offset
;
867 VTI (dest
)->visited
= true;
868 VTI (dest
)->in
.stack_adjust
= offset
= VTI (src
)->out
.stack_adjust
;
870 if (dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
871 for (insn
= BB_HEAD (dest
);
872 insn
!= NEXT_INSN (BB_END (dest
));
873 insn
= NEXT_INSN (insn
))
876 insn_stack_adjust_offset_pre_post (insn
, &pre
, &post
);
877 offset
+= pre
+ post
;
880 VTI (dest
)->out
.stack_adjust
= offset
;
882 if (EDGE_COUNT (dest
->succs
) > 0)
883 /* Since the DEST node has been visited for the first
884 time, check its successors. */
885 stack
[sp
++] = ei_start (dest
->succs
);
889 /* We can end up with different stack adjustments for the exit block
890 of a shrink-wrapped function if stack_adjust_offset_pre_post
891 doesn't understand the rtx pattern used to restore the stack
892 pointer in the epilogue. For example, on s390(x), the stack
893 pointer is often restored via a load-multiple instruction
894 and so no stack_adjust offset is recorded for it. This means
895 that the stack offset at the end of the epilogue block is the
896 the same as the offset before the epilogue, whereas other paths
897 to the exit block will have the correct stack_adjust.
899 It is safe to ignore these differences because (a) we never
900 use the stack_adjust for the exit block in this pass and
901 (b) dwarf2cfi checks whether the CFA notes in a shrink-wrapped
902 function are correct.
904 We must check whether the adjustments on other edges are
906 if (dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
)
907 && VTI (dest
)->in
.stack_adjust
!= VTI (src
)->out
.stack_adjust
)
913 if (! ei_one_before_end_p (ei
))
914 /* Go to the next edge. */
915 ei_next (&stack
[sp
- 1]);
917 /* Return to previous level if there are no more edges. */
926 /* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or
927 hard_frame_pointer_rtx is being mapped to it and offset for it. */
928 static rtx cfa_base_rtx
;
929 static HOST_WIDE_INT cfa_base_offset
;
931 /* Compute a CFA-based value for an ADJUSTMENT made to stack_pointer_rtx
932 or hard_frame_pointer_rtx. */
935 compute_cfa_pointer (HOST_WIDE_INT adjustment
)
937 return plus_constant (Pmode
, cfa_base_rtx
, adjustment
+ cfa_base_offset
);
940 /* Adjustment for hard_frame_pointer_rtx to cfa base reg,
941 or -1 if the replacement shouldn't be done. */
942 static HOST_WIDE_INT hard_frame_pointer_adjustment
= -1;
944 /* Data for adjust_mems callback. */
946 struct adjust_mem_data
949 enum machine_mode mem_mode
;
950 HOST_WIDE_INT stack_adjust
;
951 rtx_expr_list
*side_effects
;
954 /* Helper for adjust_mems. Return true if X is suitable for
955 transformation of wider mode arithmetics to narrower mode. */
958 use_narrower_mode_test (rtx x
, const_rtx subreg
)
960 subrtx_var_iterator::array_type array
;
961 FOR_EACH_SUBRTX_VAR (iter
, array
, x
, NONCONST
)
965 iter
.skip_subrtxes ();
967 switch (GET_CODE (x
))
970 if (cselib_lookup (x
, GET_MODE (SUBREG_REG (subreg
)), 0, VOIDmode
))
972 if (!validate_subreg (GET_MODE (subreg
), GET_MODE (x
), x
,
973 subreg_lowpart_offset (GET_MODE (subreg
),
982 iter
.substitute (XEXP (x
, 0));
991 /* Transform X into narrower mode MODE from wider mode WMODE. */
994 use_narrower_mode (rtx x
, enum machine_mode mode
, enum machine_mode wmode
)
998 return lowpart_subreg (mode
, x
, wmode
);
999 switch (GET_CODE (x
))
1002 return lowpart_subreg (mode
, x
, wmode
);
1006 op0
= use_narrower_mode (XEXP (x
, 0), mode
, wmode
);
1007 op1
= use_narrower_mode (XEXP (x
, 1), mode
, wmode
);
1008 return simplify_gen_binary (GET_CODE (x
), mode
, op0
, op1
);
1010 op0
= use_narrower_mode (XEXP (x
, 0), mode
, wmode
);
1011 return simplify_gen_binary (ASHIFT
, mode
, op0
, XEXP (x
, 1));
1017 /* Helper function for adjusting used MEMs. */
1020 adjust_mems (rtx loc
, const_rtx old_rtx
, void *data
)
1022 struct adjust_mem_data
*amd
= (struct adjust_mem_data
*) data
;
1023 rtx mem
, addr
= loc
, tem
;
1024 enum machine_mode mem_mode_save
;
1026 switch (GET_CODE (loc
))
1029 /* Don't do any sp or fp replacements outside of MEM addresses
1031 if (amd
->mem_mode
== VOIDmode
&& amd
->store
)
1033 if (loc
== stack_pointer_rtx
1034 && !frame_pointer_needed
1036 return compute_cfa_pointer (amd
->stack_adjust
);
1037 else if (loc
== hard_frame_pointer_rtx
1038 && frame_pointer_needed
1039 && hard_frame_pointer_adjustment
!= -1
1041 return compute_cfa_pointer (hard_frame_pointer_adjustment
);
1042 gcc_checking_assert (loc
!= virtual_incoming_args_rtx
);
1048 mem
= targetm
.delegitimize_address (mem
);
1049 if (mem
!= loc
&& !MEM_P (mem
))
1050 return simplify_replace_fn_rtx (mem
, old_rtx
, adjust_mems
, data
);
1053 addr
= XEXP (mem
, 0);
1054 mem_mode_save
= amd
->mem_mode
;
1055 amd
->mem_mode
= GET_MODE (mem
);
1056 store_save
= amd
->store
;
1058 addr
= simplify_replace_fn_rtx (addr
, old_rtx
, adjust_mems
, data
);
1059 amd
->store
= store_save
;
1060 amd
->mem_mode
= mem_mode_save
;
1062 addr
= targetm
.delegitimize_address (addr
);
1063 if (addr
!= XEXP (mem
, 0))
1064 mem
= replace_equiv_address_nv (mem
, addr
);
1066 mem
= avoid_constant_pool_reference (mem
);
1070 addr
= gen_rtx_PLUS (GET_MODE (loc
), XEXP (loc
, 0),
1071 gen_int_mode (GET_CODE (loc
) == PRE_INC
1072 ? GET_MODE_SIZE (amd
->mem_mode
)
1073 : -GET_MODE_SIZE (amd
->mem_mode
),
1078 addr
= XEXP (loc
, 0);
1079 gcc_assert (amd
->mem_mode
!= VOIDmode
&& amd
->mem_mode
!= BLKmode
);
1080 addr
= simplify_replace_fn_rtx (addr
, old_rtx
, adjust_mems
, data
);
1081 tem
= gen_rtx_PLUS (GET_MODE (loc
), XEXP (loc
, 0),
1082 gen_int_mode ((GET_CODE (loc
) == PRE_INC
1083 || GET_CODE (loc
) == POST_INC
)
1084 ? GET_MODE_SIZE (amd
->mem_mode
)
1085 : -GET_MODE_SIZE (amd
->mem_mode
),
1087 store_save
= amd
->store
;
1089 tem
= simplify_replace_fn_rtx (tem
, old_rtx
, adjust_mems
, data
);
1090 amd
->store
= store_save
;
1091 amd
->side_effects
= alloc_EXPR_LIST (0,
1092 gen_rtx_SET (VOIDmode
,
1093 XEXP (loc
, 0), tem
),
1097 addr
= XEXP (loc
, 1);
1100 addr
= XEXP (loc
, 0);
1101 gcc_assert (amd
->mem_mode
!= VOIDmode
);
1102 addr
= simplify_replace_fn_rtx (addr
, old_rtx
, adjust_mems
, data
);
1103 store_save
= amd
->store
;
1105 tem
= simplify_replace_fn_rtx (XEXP (loc
, 1), old_rtx
,
1107 amd
->store
= store_save
;
1108 amd
->side_effects
= alloc_EXPR_LIST (0,
1109 gen_rtx_SET (VOIDmode
,
1110 XEXP (loc
, 0), tem
),
1114 /* First try without delegitimization of whole MEMs and
1115 avoid_constant_pool_reference, which is more likely to succeed. */
1116 store_save
= amd
->store
;
1118 addr
= simplify_replace_fn_rtx (SUBREG_REG (loc
), old_rtx
, adjust_mems
,
1120 amd
->store
= store_save
;
1121 mem
= simplify_replace_fn_rtx (addr
, old_rtx
, adjust_mems
, data
);
1122 if (mem
== SUBREG_REG (loc
))
1127 tem
= simplify_gen_subreg (GET_MODE (loc
), mem
,
1128 GET_MODE (SUBREG_REG (loc
)),
1132 tem
= simplify_gen_subreg (GET_MODE (loc
), addr
,
1133 GET_MODE (SUBREG_REG (loc
)),
1135 if (tem
== NULL_RTX
)
1136 tem
= gen_rtx_raw_SUBREG (GET_MODE (loc
), addr
, SUBREG_BYTE (loc
));
1138 if (MAY_HAVE_DEBUG_INSNS
1139 && GET_CODE (tem
) == SUBREG
1140 && (GET_CODE (SUBREG_REG (tem
)) == PLUS
1141 || GET_CODE (SUBREG_REG (tem
)) == MINUS
1142 || GET_CODE (SUBREG_REG (tem
)) == MULT
1143 || GET_CODE (SUBREG_REG (tem
)) == ASHIFT
)
1144 && (GET_MODE_CLASS (GET_MODE (tem
)) == MODE_INT
1145 || GET_MODE_CLASS (GET_MODE (tem
)) == MODE_PARTIAL_INT
)
1146 && (GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem
))) == MODE_INT
1147 || GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem
))) == MODE_PARTIAL_INT
)
1148 && GET_MODE_PRECISION (GET_MODE (tem
))
1149 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (tem
)))
1150 && subreg_lowpart_p (tem
)
1151 && use_narrower_mode_test (SUBREG_REG (tem
), tem
))
1152 return use_narrower_mode (SUBREG_REG (tem
), GET_MODE (tem
),
1153 GET_MODE (SUBREG_REG (tem
)));
1156 /* Don't do any replacements in second and following
1157 ASM_OPERANDS of inline-asm with multiple sets.
1158 ASM_OPERANDS_INPUT_VEC, ASM_OPERANDS_INPUT_CONSTRAINT_VEC
1159 and ASM_OPERANDS_LABEL_VEC need to be equal between
1160 all the ASM_OPERANDs in the insn and adjust_insn will
1162 if (ASM_OPERANDS_OUTPUT_IDX (loc
) != 0)
1171 /* Helper function for replacement of uses. */
1174 adjust_mem_uses (rtx
*x
, void *data
)
1176 rtx new_x
= simplify_replace_fn_rtx (*x
, NULL_RTX
, adjust_mems
, data
);
1178 validate_change (NULL_RTX
, x
, new_x
, true);
1181 /* Helper function for replacement of stores. */
1184 adjust_mem_stores (rtx loc
, const_rtx expr
, void *data
)
1188 rtx new_dest
= simplify_replace_fn_rtx (SET_DEST (expr
), NULL_RTX
,
1190 if (new_dest
!= SET_DEST (expr
))
1192 rtx xexpr
= CONST_CAST_RTX (expr
);
1193 validate_change (NULL_RTX
, &SET_DEST (xexpr
), new_dest
, true);
1198 /* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
1199 replace them with their value in the insn and add the side-effects
1200 as other sets to the insn. */
1203 adjust_insn (basic_block bb
, rtx_insn
*insn
)
1205 struct adjust_mem_data amd
;
1208 #ifdef HAVE_window_save
1209 /* If the target machine has an explicit window save instruction, the
1210 transformation OUTGOING_REGNO -> INCOMING_REGNO is done there. */
1211 if (RTX_FRAME_RELATED_P (insn
)
1212 && find_reg_note (insn
, REG_CFA_WINDOW_SAVE
, NULL_RTX
))
1214 unsigned int i
, nregs
= vec_safe_length (windowed_parm_regs
);
1215 rtx rtl
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (nregs
* 2));
1218 FOR_EACH_VEC_SAFE_ELT (windowed_parm_regs
, i
, p
)
1220 XVECEXP (rtl
, 0, i
* 2)
1221 = gen_rtx_SET (VOIDmode
, p
->incoming
, p
->outgoing
);
1222 /* Do not clobber the attached DECL, but only the REG. */
1223 XVECEXP (rtl
, 0, i
* 2 + 1)
1224 = gen_rtx_CLOBBER (GET_MODE (p
->outgoing
),
1225 gen_raw_REG (GET_MODE (p
->outgoing
),
1226 REGNO (p
->outgoing
)));
1229 validate_change (NULL_RTX
, &PATTERN (insn
), rtl
, true);
1234 amd
.mem_mode
= VOIDmode
;
1235 amd
.stack_adjust
= -VTI (bb
)->out
.stack_adjust
;
1236 amd
.side_effects
= NULL
;
1239 note_stores (PATTERN (insn
), adjust_mem_stores
, &amd
);
1242 if (GET_CODE (PATTERN (insn
)) == PARALLEL
1243 && asm_noperands (PATTERN (insn
)) > 0
1244 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == SET
)
1249 /* inline-asm with multiple sets is tiny bit more complicated,
1250 because the 3 vectors in ASM_OPERANDS need to be shared between
1251 all ASM_OPERANDS in the instruction. adjust_mems will
1252 not touch ASM_OPERANDS other than the first one, asm_noperands
1253 test above needs to be called before that (otherwise it would fail)
1254 and afterwards this code fixes it up. */
1255 note_uses (&PATTERN (insn
), adjust_mem_uses
, &amd
);
1256 body
= PATTERN (insn
);
1257 set0
= XVECEXP (body
, 0, 0);
1258 gcc_checking_assert (GET_CODE (set0
) == SET
1259 && GET_CODE (SET_SRC (set0
)) == ASM_OPERANDS
1260 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set0
)) == 0);
1261 for (i
= 1; i
< XVECLEN (body
, 0); i
++)
1262 if (GET_CODE (XVECEXP (body
, 0, i
)) != SET
)
1266 set
= XVECEXP (body
, 0, i
);
1267 gcc_checking_assert (GET_CODE (SET_SRC (set
)) == ASM_OPERANDS
1268 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set
))
1270 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (set
))
1271 != ASM_OPERANDS_INPUT_VEC (SET_SRC (set0
))
1272 || ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set
))
1273 != ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0
))
1274 || ASM_OPERANDS_LABEL_VEC (SET_SRC (set
))
1275 != ASM_OPERANDS_LABEL_VEC (SET_SRC (set0
)))
1277 rtx newsrc
= shallow_copy_rtx (SET_SRC (set
));
1278 ASM_OPERANDS_INPUT_VEC (newsrc
)
1279 = ASM_OPERANDS_INPUT_VEC (SET_SRC (set0
));
1280 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (newsrc
)
1281 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0
));
1282 ASM_OPERANDS_LABEL_VEC (newsrc
)
1283 = ASM_OPERANDS_LABEL_VEC (SET_SRC (set0
));
1284 validate_change (NULL_RTX
, &SET_SRC (set
), newsrc
, true);
1289 note_uses (&PATTERN (insn
), adjust_mem_uses
, &amd
);
1291 /* For read-only MEMs containing some constant, prefer those
1293 set
= single_set (insn
);
1294 if (set
&& MEM_P (SET_SRC (set
)) && MEM_READONLY_P (SET_SRC (set
)))
1296 rtx note
= find_reg_equal_equiv_note (insn
);
1298 if (note
&& CONSTANT_P (XEXP (note
, 0)))
1299 validate_change (NULL_RTX
, &SET_SRC (set
), XEXP (note
, 0), true);
1302 if (amd
.side_effects
)
1304 rtx
*pat
, new_pat
, s
;
1307 pat
= &PATTERN (insn
);
1308 if (GET_CODE (*pat
) == COND_EXEC
)
1309 pat
= &COND_EXEC_CODE (*pat
);
1310 if (GET_CODE (*pat
) == PARALLEL
)
1311 oldn
= XVECLEN (*pat
, 0);
1314 for (s
= amd
.side_effects
, newn
= 0; s
; newn
++)
1316 new_pat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (oldn
+ newn
));
1317 if (GET_CODE (*pat
) == PARALLEL
)
1318 for (i
= 0; i
< oldn
; i
++)
1319 XVECEXP (new_pat
, 0, i
) = XVECEXP (*pat
, 0, i
);
1321 XVECEXP (new_pat
, 0, 0) = *pat
;
1322 for (s
= amd
.side_effects
, i
= oldn
; i
< oldn
+ newn
; i
++, s
= XEXP (s
, 1))
1323 XVECEXP (new_pat
, 0, i
) = XEXP (s
, 0);
1324 free_EXPR_LIST_list (&amd
.side_effects
);
1325 validate_change (NULL_RTX
, pat
, new_pat
, true);
1329 /* Return the DEBUG_EXPR of a DEBUG_EXPR_DECL or the VALUE in DV. */
1331 dv_as_rtx (decl_or_value dv
)
1335 if (dv_is_value_p (dv
))
1336 return dv_as_value (dv
);
1338 decl
= dv_as_decl (dv
);
1340 gcc_checking_assert (TREE_CODE (decl
) == DEBUG_EXPR_DECL
);
1341 return DECL_RTL_KNOWN_SET (decl
);
1344 /* Return nonzero if a decl_or_value must not have more than one
1345 variable part. The returned value discriminates among various
1346 kinds of one-part DVs ccording to enum onepart_enum. */
1347 static inline onepart_enum_t
1348 dv_onepart_p (decl_or_value dv
)
1352 if (!MAY_HAVE_DEBUG_INSNS
)
1355 if (dv_is_value_p (dv
))
1356 return ONEPART_VALUE
;
1358 decl
= dv_as_decl (dv
);
1360 if (TREE_CODE (decl
) == DEBUG_EXPR_DECL
)
1361 return ONEPART_DEXPR
;
1363 if (target_for_debug_bind (decl
) != NULL_TREE
)
1364 return ONEPART_VDECL
;
1369 /* Return the variable pool to be used for a dv of type ONEPART. */
1370 static inline alloc_pool
1371 onepart_pool (onepart_enum_t onepart
)
1373 return onepart
? valvar_pool
: var_pool
;
1376 /* Build a decl_or_value out of a decl. */
1377 static inline decl_or_value
1378 dv_from_decl (tree decl
)
1382 gcc_checking_assert (dv_is_decl_p (dv
));
1386 /* Build a decl_or_value out of a value. */
1387 static inline decl_or_value
1388 dv_from_value (rtx value
)
1392 gcc_checking_assert (dv_is_value_p (dv
));
1396 /* Return a value or the decl of a debug_expr as a decl_or_value. */
1397 static inline decl_or_value
1402 switch (GET_CODE (x
))
1405 dv
= dv_from_decl (DEBUG_EXPR_TREE_DECL (x
));
1406 gcc_checking_assert (DECL_RTL_KNOWN_SET (DEBUG_EXPR_TREE_DECL (x
)) == x
);
1410 dv
= dv_from_value (x
);
1420 extern void debug_dv (decl_or_value dv
);
1423 debug_dv (decl_or_value dv
)
1425 if (dv_is_value_p (dv
))
1426 debug_rtx (dv_as_value (dv
));
1428 debug_generic_stmt (dv_as_decl (dv
));
1431 static void loc_exp_dep_clear (variable var
);
1433 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
1436 variable_htab_free (void *elem
)
1439 variable var
= (variable
) elem
;
1440 location_chain node
, next
;
1442 gcc_checking_assert (var
->refcount
> 0);
1445 if (var
->refcount
> 0)
1448 for (i
= 0; i
< var
->n_var_parts
; i
++)
1450 for (node
= var
->var_part
[i
].loc_chain
; node
; node
= next
)
1453 pool_free (loc_chain_pool
, node
);
1455 var
->var_part
[i
].loc_chain
= NULL
;
1457 if (var
->onepart
&& VAR_LOC_1PAUX (var
))
1459 loc_exp_dep_clear (var
);
1460 if (VAR_LOC_DEP_LST (var
))
1461 VAR_LOC_DEP_LST (var
)->pprev
= NULL
;
1462 XDELETE (VAR_LOC_1PAUX (var
));
1463 /* These may be reused across functions, so reset
1465 if (var
->onepart
== ONEPART_DEXPR
)
1466 set_dv_changed (var
->dv
, true);
1468 pool_free (onepart_pool (var
->onepart
), var
);
1471 /* Initialize the set (array) SET of attrs to empty lists. */
1474 init_attrs_list_set (attrs
*set
)
1478 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1482 /* Make the list *LISTP empty. */
1485 attrs_list_clear (attrs
*listp
)
1489 for (list
= *listp
; list
; list
= next
)
1492 pool_free (attrs_pool
, list
);
1497 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */
1500 attrs_list_member (attrs list
, decl_or_value dv
, HOST_WIDE_INT offset
)
1502 for (; list
; list
= list
->next
)
1503 if (dv_as_opaque (list
->dv
) == dv_as_opaque (dv
) && list
->offset
== offset
)
1508 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
1511 attrs_list_insert (attrs
*listp
, decl_or_value dv
,
1512 HOST_WIDE_INT offset
, rtx loc
)
1516 list
= (attrs
) pool_alloc (attrs_pool
);
1519 list
->offset
= offset
;
1520 list
->next
= *listp
;
1524 /* Copy all nodes from SRC and create a list *DSTP of the copies. */
1527 attrs_list_copy (attrs
*dstp
, attrs src
)
1531 attrs_list_clear (dstp
);
1532 for (; src
; src
= src
->next
)
1534 n
= (attrs
) pool_alloc (attrs_pool
);
1537 n
->offset
= src
->offset
;
1543 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */
1546 attrs_list_union (attrs
*dstp
, attrs src
)
1548 for (; src
; src
= src
->next
)
1550 if (!attrs_list_member (*dstp
, src
->dv
, src
->offset
))
1551 attrs_list_insert (dstp
, src
->dv
, src
->offset
, src
->loc
);
1555 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1559 attrs_list_mpdv_union (attrs
*dstp
, attrs src
, attrs src2
)
1561 gcc_assert (!*dstp
);
1562 for (; src
; src
= src
->next
)
1564 if (!dv_onepart_p (src
->dv
))
1565 attrs_list_insert (dstp
, src
->dv
, src
->offset
, src
->loc
);
1567 for (src
= src2
; src
; src
= src
->next
)
1569 if (!dv_onepart_p (src
->dv
)
1570 && !attrs_list_member (*dstp
, src
->dv
, src
->offset
))
1571 attrs_list_insert (dstp
, src
->dv
, src
->offset
, src
->loc
);
1575 /* Shared hashtable support. */
1577 /* Return true if VARS is shared. */
1580 shared_hash_shared (shared_hash vars
)
1582 return vars
->refcount
> 1;
1585 /* Return the hash table for VARS. */
1587 static inline variable_table_type
*
1588 shared_hash_htab (shared_hash vars
)
1593 /* Return true if VAR is shared, or maybe because VARS is shared. */
1596 shared_var_p (variable var
, shared_hash vars
)
1598 /* Don't count an entry in the changed_variables table as a duplicate. */
1599 return ((var
->refcount
> 1 + (int) var
->in_changed_variables
)
1600 || shared_hash_shared (vars
));
1603 /* Copy variables into a new hash table. */
1606 shared_hash_unshare (shared_hash vars
)
1608 shared_hash new_vars
= (shared_hash
) pool_alloc (shared_hash_pool
);
1609 gcc_assert (vars
->refcount
> 1);
1610 new_vars
->refcount
= 1;
1611 new_vars
->htab
= new variable_table_type (vars
->htab
->elements () + 3);
1612 vars_copy (new_vars
->htab
, vars
->htab
);
1617 /* Increment reference counter on VARS and return it. */
1619 static inline shared_hash
1620 shared_hash_copy (shared_hash vars
)
1626 /* Decrement reference counter and destroy hash table if not shared
1630 shared_hash_destroy (shared_hash vars
)
1632 gcc_checking_assert (vars
->refcount
> 0);
1633 if (--vars
->refcount
== 0)
1636 pool_free (shared_hash_pool
, vars
);
1640 /* Unshare *PVARS if shared and return slot for DV. If INS is
1641 INSERT, insert it if not already present. */
1643 static inline variable_def
**
1644 shared_hash_find_slot_unshare_1 (shared_hash
*pvars
, decl_or_value dv
,
1645 hashval_t dvhash
, enum insert_option ins
)
1647 if (shared_hash_shared (*pvars
))
1648 *pvars
= shared_hash_unshare (*pvars
);
1649 return shared_hash_htab (*pvars
)->find_slot_with_hash (dv
, dvhash
, ins
);
1652 static inline variable_def
**
1653 shared_hash_find_slot_unshare (shared_hash
*pvars
, decl_or_value dv
,
1654 enum insert_option ins
)
1656 return shared_hash_find_slot_unshare_1 (pvars
, dv
, dv_htab_hash (dv
), ins
);
1659 /* Return slot for DV, if it is already present in the hash table.
1660 If it is not present, insert it only VARS is not shared, otherwise
1663 static inline variable_def
**
1664 shared_hash_find_slot_1 (shared_hash vars
, decl_or_value dv
, hashval_t dvhash
)
1666 return shared_hash_htab (vars
)->find_slot_with_hash (dv
, dvhash
,
1667 shared_hash_shared (vars
)
1668 ? NO_INSERT
: INSERT
);
1671 static inline variable_def
**
1672 shared_hash_find_slot (shared_hash vars
, decl_or_value dv
)
1674 return shared_hash_find_slot_1 (vars
, dv
, dv_htab_hash (dv
));
1677 /* Return slot for DV only if it is already present in the hash table. */
1679 static inline variable_def
**
1680 shared_hash_find_slot_noinsert_1 (shared_hash vars
, decl_or_value dv
,
1683 return shared_hash_htab (vars
)->find_slot_with_hash (dv
, dvhash
, NO_INSERT
);
1686 static inline variable_def
**
1687 shared_hash_find_slot_noinsert (shared_hash vars
, decl_or_value dv
)
1689 return shared_hash_find_slot_noinsert_1 (vars
, dv
, dv_htab_hash (dv
));
1692 /* Return variable for DV or NULL if not already present in the hash
1695 static inline variable
1696 shared_hash_find_1 (shared_hash vars
, decl_or_value dv
, hashval_t dvhash
)
1698 return shared_hash_htab (vars
)->find_with_hash (dv
, dvhash
);
1701 static inline variable
1702 shared_hash_find (shared_hash vars
, decl_or_value dv
)
1704 return shared_hash_find_1 (vars
, dv
, dv_htab_hash (dv
));
1707 /* Return true if TVAL is better than CVAL as a canonival value. We
1708 choose lowest-numbered VALUEs, using the RTX address as a
1709 tie-breaker. The idea is to arrange them into a star topology,
1710 such that all of them are at most one step away from the canonical
1711 value, and the canonical value has backlinks to all of them, in
1712 addition to all the actual locations. We don't enforce this
1713 topology throughout the entire dataflow analysis, though.
1717 canon_value_cmp (rtx tval
, rtx cval
)
1720 || CSELIB_VAL_PTR (tval
)->uid
< CSELIB_VAL_PTR (cval
)->uid
;
1723 static bool dst_can_be_shared
;
1725 /* Return a copy of a variable VAR and insert it to dataflow set SET. */
1727 static variable_def
**
1728 unshare_variable (dataflow_set
*set
, variable_def
**slot
, variable var
,
1729 enum var_init_status initialized
)
1734 new_var
= (variable
) pool_alloc (onepart_pool (var
->onepart
));
1735 new_var
->dv
= var
->dv
;
1736 new_var
->refcount
= 1;
1738 new_var
->n_var_parts
= var
->n_var_parts
;
1739 new_var
->onepart
= var
->onepart
;
1740 new_var
->in_changed_variables
= false;
1742 if (! flag_var_tracking_uninit
)
1743 initialized
= VAR_INIT_STATUS_INITIALIZED
;
1745 for (i
= 0; i
< var
->n_var_parts
; i
++)
1747 location_chain node
;
1748 location_chain
*nextp
;
1750 if (i
== 0 && var
->onepart
)
1752 /* One-part auxiliary data is only used while emitting
1753 notes, so propagate it to the new variable in the active
1754 dataflow set. If we're not emitting notes, this will be
1756 gcc_checking_assert (!VAR_LOC_1PAUX (var
) || emit_notes
);
1757 VAR_LOC_1PAUX (new_var
) = VAR_LOC_1PAUX (var
);
1758 VAR_LOC_1PAUX (var
) = NULL
;
1761 VAR_PART_OFFSET (new_var
, i
) = VAR_PART_OFFSET (var
, i
);
1762 nextp
= &new_var
->var_part
[i
].loc_chain
;
1763 for (node
= var
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
1765 location_chain new_lc
;
1767 new_lc
= (location_chain
) pool_alloc (loc_chain_pool
);
1768 new_lc
->next
= NULL
;
1769 if (node
->init
> initialized
)
1770 new_lc
->init
= node
->init
;
1772 new_lc
->init
= initialized
;
1773 if (node
->set_src
&& !(MEM_P (node
->set_src
)))
1774 new_lc
->set_src
= node
->set_src
;
1776 new_lc
->set_src
= NULL
;
1777 new_lc
->loc
= node
->loc
;
1780 nextp
= &new_lc
->next
;
1783 new_var
->var_part
[i
].cur_loc
= var
->var_part
[i
].cur_loc
;
1786 dst_can_be_shared
= false;
1787 if (shared_hash_shared (set
->vars
))
1788 slot
= shared_hash_find_slot_unshare (&set
->vars
, var
->dv
, NO_INSERT
);
1789 else if (set
->traversed_vars
&& set
->vars
!= set
->traversed_vars
)
1790 slot
= shared_hash_find_slot_noinsert (set
->vars
, var
->dv
);
1792 if (var
->in_changed_variables
)
1794 variable_def
**cslot
1795 = changed_variables
->find_slot_with_hash (var
->dv
,
1796 dv_htab_hash (var
->dv
),
1798 gcc_assert (*cslot
== (void *) var
);
1799 var
->in_changed_variables
= false;
1800 variable_htab_free (var
);
1802 new_var
->in_changed_variables
= true;
1807 /* Copy all variables from hash table SRC to hash table DST. */
1810 vars_copy (variable_table_type
*dst
, variable_table_type
*src
)
1812 variable_iterator_type hi
;
1815 FOR_EACH_HASH_TABLE_ELEMENT (*src
, var
, variable
, hi
)
1817 variable_def
**dstp
;
1819 dstp
= dst
->find_slot_with_hash (var
->dv
, dv_htab_hash (var
->dv
),
1825 /* Map a decl to its main debug decl. */
1828 var_debug_decl (tree decl
)
1830 if (decl
&& TREE_CODE (decl
) == VAR_DECL
1831 && DECL_HAS_DEBUG_EXPR_P (decl
))
1833 tree debugdecl
= DECL_DEBUG_EXPR (decl
);
1834 if (DECL_P (debugdecl
))
1841 /* Set the register LOC to contain DV, OFFSET. */
1844 var_reg_decl_set (dataflow_set
*set
, rtx loc
, enum var_init_status initialized
,
1845 decl_or_value dv
, HOST_WIDE_INT offset
, rtx set_src
,
1846 enum insert_option iopt
)
1849 bool decl_p
= dv_is_decl_p (dv
);
1852 dv
= dv_from_decl (var_debug_decl (dv_as_decl (dv
)));
1854 for (node
= set
->regs
[REGNO (loc
)]; node
; node
= node
->next
)
1855 if (dv_as_opaque (node
->dv
) == dv_as_opaque (dv
)
1856 && node
->offset
== offset
)
1859 attrs_list_insert (&set
->regs
[REGNO (loc
)], dv
, offset
, loc
);
1860 set_variable_part (set
, loc
, dv
, offset
, initialized
, set_src
, iopt
);
1863 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
1866 var_reg_set (dataflow_set
*set
, rtx loc
, enum var_init_status initialized
,
1869 tree decl
= REG_EXPR (loc
);
1870 HOST_WIDE_INT offset
= REG_OFFSET (loc
);
1872 var_reg_decl_set (set
, loc
, initialized
,
1873 dv_from_decl (decl
), offset
, set_src
, INSERT
);
1876 static enum var_init_status
1877 get_init_value (dataflow_set
*set
, rtx loc
, decl_or_value dv
)
1881 enum var_init_status ret_val
= VAR_INIT_STATUS_UNKNOWN
;
1883 if (! flag_var_tracking_uninit
)
1884 return VAR_INIT_STATUS_INITIALIZED
;
1886 var
= shared_hash_find (set
->vars
, dv
);
1889 for (i
= 0; i
< var
->n_var_parts
&& ret_val
== VAR_INIT_STATUS_UNKNOWN
; i
++)
1891 location_chain nextp
;
1892 for (nextp
= var
->var_part
[i
].loc_chain
; nextp
; nextp
= nextp
->next
)
1893 if (rtx_equal_p (nextp
->loc
, loc
))
1895 ret_val
= nextp
->init
;
1904 /* Delete current content of register LOC in dataflow set SET and set
1905 the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
1906 MODIFY is true, any other live copies of the same variable part are
1907 also deleted from the dataflow set, otherwise the variable part is
1908 assumed to be copied from another location holding the same
1912 var_reg_delete_and_set (dataflow_set
*set
, rtx loc
, bool modify
,
1913 enum var_init_status initialized
, rtx set_src
)
1915 tree decl
= REG_EXPR (loc
);
1916 HOST_WIDE_INT offset
= REG_OFFSET (loc
);
1920 decl
= var_debug_decl (decl
);
1922 if (initialized
== VAR_INIT_STATUS_UNKNOWN
)
1923 initialized
= get_init_value (set
, loc
, dv_from_decl (decl
));
1925 nextp
= &set
->regs
[REGNO (loc
)];
1926 for (node
= *nextp
; node
; node
= next
)
1929 if (dv_as_opaque (node
->dv
) != decl
|| node
->offset
!= offset
)
1931 delete_variable_part (set
, node
->loc
, node
->dv
, node
->offset
);
1932 pool_free (attrs_pool
, node
);
1938 nextp
= &node
->next
;
1942 clobber_variable_part (set
, loc
, dv_from_decl (decl
), offset
, set_src
);
1943 var_reg_set (set
, loc
, initialized
, set_src
);
1946 /* Delete the association of register LOC in dataflow set SET with any
1947 variables that aren't onepart. If CLOBBER is true, also delete any
1948 other live copies of the same variable part, and delete the
1949 association with onepart dvs too. */
1952 var_reg_delete (dataflow_set
*set
, rtx loc
, bool clobber
)
1954 attrs
*nextp
= &set
->regs
[REGNO (loc
)];
1959 tree decl
= REG_EXPR (loc
);
1960 HOST_WIDE_INT offset
= REG_OFFSET (loc
);
1962 decl
= var_debug_decl (decl
);
1964 clobber_variable_part (set
, NULL
, dv_from_decl (decl
), offset
, NULL
);
1967 for (node
= *nextp
; node
; node
= next
)
1970 if (clobber
|| !dv_onepart_p (node
->dv
))
1972 delete_variable_part (set
, node
->loc
, node
->dv
, node
->offset
);
1973 pool_free (attrs_pool
, node
);
1977 nextp
= &node
->next
;
1981 /* Delete content of register with number REGNO in dataflow set SET. */
1984 var_regno_delete (dataflow_set
*set
, int regno
)
1986 attrs
*reg
= &set
->regs
[regno
];
1989 for (node
= *reg
; node
; node
= next
)
1992 delete_variable_part (set
, node
->loc
, node
->dv
, node
->offset
);
1993 pool_free (attrs_pool
, node
);
1998 /* Return true if I is the negated value of a power of two. */
2000 negative_power_of_two_p (HOST_WIDE_INT i
)
2002 unsigned HOST_WIDE_INT x
= -(unsigned HOST_WIDE_INT
)i
;
2003 return x
== (x
& -x
);
2006 /* Strip constant offsets and alignments off of LOC. Return the base
2010 vt_get_canonicalize_base (rtx loc
)
2012 while ((GET_CODE (loc
) == PLUS
2013 || GET_CODE (loc
) == AND
)
2014 && GET_CODE (XEXP (loc
, 1)) == CONST_INT
2015 && (GET_CODE (loc
) != AND
2016 || negative_power_of_two_p (INTVAL (XEXP (loc
, 1)))))
2017 loc
= XEXP (loc
, 0);
2022 /* This caches canonicalized addresses for VALUEs, computed using
2023 information in the global cselib table. */
2024 static hash_map
<rtx
, rtx
> *global_get_addr_cache
;
2026 /* This caches canonicalized addresses for VALUEs, computed using
2027 information from the global cache and information pertaining to a
2028 basic block being analyzed. */
2029 static hash_map
<rtx
, rtx
> *local_get_addr_cache
;
2031 static rtx
vt_canonicalize_addr (dataflow_set
*, rtx
);
2033 /* Return the canonical address for LOC, that must be a VALUE, using a
2034 cached global equivalence or computing it and storing it in the
2038 get_addr_from_global_cache (rtx
const loc
)
2042 gcc_checking_assert (GET_CODE (loc
) == VALUE
);
2045 rtx
*slot
= &global_get_addr_cache
->get_or_insert (loc
, &existed
);
2049 x
= canon_rtx (get_addr (loc
));
2051 /* Tentative, avoiding infinite recursion. */
2056 rtx nx
= vt_canonicalize_addr (NULL
, x
);
2059 /* The table may have moved during recursion, recompute
2061 *global_get_addr_cache
->get (loc
) = x
= nx
;
2068 /* Return the canonical address for LOC, that must be a VALUE, using a
2069 cached local equivalence or computing it and storing it in the
2073 get_addr_from_local_cache (dataflow_set
*set
, rtx
const loc
)
2080 gcc_checking_assert (GET_CODE (loc
) == VALUE
);
2083 rtx
*slot
= &local_get_addr_cache
->get_or_insert (loc
, &existed
);
2087 x
= get_addr_from_global_cache (loc
);
2089 /* Tentative, avoiding infinite recursion. */
2092 /* Recurse to cache local expansion of X, or if we need to search
2093 for a VALUE in the expansion. */
2096 rtx nx
= vt_canonicalize_addr (set
, x
);
2099 slot
= local_get_addr_cache
->get (loc
);
2105 dv
= dv_from_rtx (x
);
2106 var
= shared_hash_find (set
->vars
, dv
);
2110 /* Look for an improved equivalent expression. */
2111 for (l
= var
->var_part
[0].loc_chain
; l
; l
= l
->next
)
2113 rtx base
= vt_get_canonicalize_base (l
->loc
);
2114 if (GET_CODE (base
) == VALUE
2115 && canon_value_cmp (base
, loc
))
2117 rtx nx
= vt_canonicalize_addr (set
, l
->loc
);
2120 slot
= local_get_addr_cache
->get (loc
);
2130 /* Canonicalize LOC using equivalences from SET in addition to those
2131 in the cselib static table. It expects a VALUE-based expression,
2132 and it will only substitute VALUEs with other VALUEs or
2133 function-global equivalences, so that, if two addresses have base
2134 VALUEs that are locally or globally related in ways that
2135 memrefs_conflict_p cares about, they will both canonicalize to
2136 expressions that have the same base VALUE.
2138 The use of VALUEs as canonical base addresses enables the canonical
2139 RTXs to remain unchanged globally, if they resolve to a constant,
2140 or throughout a basic block otherwise, so that they can be cached
2141 and the cache needs not be invalidated when REGs, MEMs or such
2145 vt_canonicalize_addr (dataflow_set
*set
, rtx oloc
)
2147 HOST_WIDE_INT ofst
= 0;
2148 enum machine_mode mode
= GET_MODE (oloc
);
2155 while (GET_CODE (loc
) == PLUS
2156 && GET_CODE (XEXP (loc
, 1)) == CONST_INT
)
2158 ofst
+= INTVAL (XEXP (loc
, 1));
2159 loc
= XEXP (loc
, 0);
2162 /* Alignment operations can't normally be combined, so just
2163 canonicalize the base and we're done. We'll normally have
2164 only one stack alignment anyway. */
2165 if (GET_CODE (loc
) == AND
2166 && GET_CODE (XEXP (loc
, 1)) == CONST_INT
2167 && negative_power_of_two_p (INTVAL (XEXP (loc
, 1))))
2169 x
= vt_canonicalize_addr (set
, XEXP (loc
, 0));
2170 if (x
!= XEXP (loc
, 0))
2171 loc
= gen_rtx_AND (mode
, x
, XEXP (loc
, 1));
2175 if (GET_CODE (loc
) == VALUE
)
2178 loc
= get_addr_from_local_cache (set
, loc
);
2180 loc
= get_addr_from_global_cache (loc
);
2182 /* Consolidate plus_constants. */
2183 while (ofst
&& GET_CODE (loc
) == PLUS
2184 && GET_CODE (XEXP (loc
, 1)) == CONST_INT
)
2186 ofst
+= INTVAL (XEXP (loc
, 1));
2187 loc
= XEXP (loc
, 0);
2194 x
= canon_rtx (loc
);
2201 /* Add OFST back in. */
2204 /* Don't build new RTL if we can help it. */
2205 if (GET_CODE (oloc
) == PLUS
2206 && XEXP (oloc
, 0) == loc
2207 && INTVAL (XEXP (oloc
, 1)) == ofst
)
2210 loc
= plus_constant (mode
, loc
, ofst
);
2216 /* Return true iff there's a true dependence between MLOC and LOC.
2217 MADDR must be a canonicalized version of MLOC's address. */
2220 vt_canon_true_dep (dataflow_set
*set
, rtx mloc
, rtx maddr
, rtx loc
)
2222 if (GET_CODE (loc
) != MEM
)
2225 rtx addr
= vt_canonicalize_addr (set
, XEXP (loc
, 0));
2226 if (!canon_true_dependence (mloc
, GET_MODE (mloc
), maddr
, loc
, addr
))
2232 /* Hold parameters for the hashtab traversal function
2233 drop_overlapping_mem_locs, see below. */
2235 struct overlapping_mems
2241 /* Remove all MEMs that overlap with COMS->LOC from the location list
2242 of a hash table entry for a value. COMS->ADDR must be a
2243 canonicalized form of COMS->LOC's address, and COMS->LOC must be
2244 canonicalized itself. */
2247 drop_overlapping_mem_locs (variable_def
**slot
, overlapping_mems
*coms
)
2249 dataflow_set
*set
= coms
->set
;
2250 rtx mloc
= coms
->loc
, addr
= coms
->addr
;
2251 variable var
= *slot
;
2253 if (var
->onepart
== ONEPART_VALUE
)
2255 location_chain loc
, *locp
;
2256 bool changed
= false;
2259 gcc_assert (var
->n_var_parts
== 1);
2261 if (shared_var_p (var
, set
->vars
))
2263 for (loc
= var
->var_part
[0].loc_chain
; loc
; loc
= loc
->next
)
2264 if (vt_canon_true_dep (set
, mloc
, addr
, loc
->loc
))
2270 slot
= unshare_variable (set
, slot
, var
, VAR_INIT_STATUS_UNKNOWN
);
2272 gcc_assert (var
->n_var_parts
== 1);
2275 if (VAR_LOC_1PAUX (var
))
2276 cur_loc
= VAR_LOC_FROM (var
);
2278 cur_loc
= var
->var_part
[0].cur_loc
;
2280 for (locp
= &var
->var_part
[0].loc_chain
, loc
= *locp
;
2283 if (!vt_canon_true_dep (set
, mloc
, addr
, loc
->loc
))
2290 /* If we have deleted the location which was last emitted
2291 we have to emit new location so add the variable to set
2292 of changed variables. */
2293 if (cur_loc
== loc
->loc
)
2296 var
->var_part
[0].cur_loc
= NULL
;
2297 if (VAR_LOC_1PAUX (var
))
2298 VAR_LOC_FROM (var
) = NULL
;
2300 pool_free (loc_chain_pool
, loc
);
2303 if (!var
->var_part
[0].loc_chain
)
2309 variable_was_changed (var
, set
);
2315 /* Remove from SET all VALUE bindings to MEMs that overlap with LOC. */
2318 clobber_overlapping_mems (dataflow_set
*set
, rtx loc
)
2320 struct overlapping_mems coms
;
2322 gcc_checking_assert (GET_CODE (loc
) == MEM
);
2325 coms
.loc
= canon_rtx (loc
);
2326 coms
.addr
= vt_canonicalize_addr (set
, XEXP (loc
, 0));
2328 set
->traversed_vars
= set
->vars
;
2329 shared_hash_htab (set
->vars
)
2330 ->traverse
<overlapping_mems
*, drop_overlapping_mem_locs
> (&coms
);
2331 set
->traversed_vars
= NULL
;
2334 /* Set the location of DV, OFFSET as the MEM LOC. */
2337 var_mem_decl_set (dataflow_set
*set
, rtx loc
, enum var_init_status initialized
,
2338 decl_or_value dv
, HOST_WIDE_INT offset
, rtx set_src
,
2339 enum insert_option iopt
)
2341 if (dv_is_decl_p (dv
))
2342 dv
= dv_from_decl (var_debug_decl (dv_as_decl (dv
)));
2344 set_variable_part (set
, loc
, dv
, offset
, initialized
, set_src
, iopt
);
2347 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
2349 Adjust the address first if it is stack pointer based. */
2352 var_mem_set (dataflow_set
*set
, rtx loc
, enum var_init_status initialized
,
2355 tree decl
= MEM_EXPR (loc
);
2356 HOST_WIDE_INT offset
= INT_MEM_OFFSET (loc
);
2358 var_mem_decl_set (set
, loc
, initialized
,
2359 dv_from_decl (decl
), offset
, set_src
, INSERT
);
2362 /* Delete and set the location part of variable MEM_EXPR (LOC) in
2363 dataflow set SET to LOC. If MODIFY is true, any other live copies
2364 of the same variable part are also deleted from the dataflow set,
2365 otherwise the variable part is assumed to be copied from another
2366 location holding the same part.
2367 Adjust the address first if it is stack pointer based. */
2370 var_mem_delete_and_set (dataflow_set
*set
, rtx loc
, bool modify
,
2371 enum var_init_status initialized
, rtx set_src
)
2373 tree decl
= MEM_EXPR (loc
);
2374 HOST_WIDE_INT offset
= INT_MEM_OFFSET (loc
);
2376 clobber_overlapping_mems (set
, loc
);
2377 decl
= var_debug_decl (decl
);
2379 if (initialized
== VAR_INIT_STATUS_UNKNOWN
)
2380 initialized
= get_init_value (set
, loc
, dv_from_decl (decl
));
2383 clobber_variable_part (set
, NULL
, dv_from_decl (decl
), offset
, set_src
);
2384 var_mem_set (set
, loc
, initialized
, set_src
);
2387 /* Delete the location part LOC from dataflow set SET. If CLOBBER is
2388 true, also delete any other live copies of the same variable part.
2389 Adjust the address first if it is stack pointer based. */
2392 var_mem_delete (dataflow_set
*set
, rtx loc
, bool clobber
)
2394 tree decl
= MEM_EXPR (loc
);
2395 HOST_WIDE_INT offset
= INT_MEM_OFFSET (loc
);
2397 clobber_overlapping_mems (set
, loc
);
2398 decl
= var_debug_decl (decl
);
2400 clobber_variable_part (set
, NULL
, dv_from_decl (decl
), offset
, NULL
);
2401 delete_variable_part (set
, loc
, dv_from_decl (decl
), offset
);
2404 /* Return true if LOC should not be expanded for location expressions,
2408 unsuitable_loc (rtx loc
)
2410 switch (GET_CODE (loc
))
2424 /* Bind VAL to LOC in SET. If MODIFIED, detach LOC from any values
2428 val_bind (dataflow_set
*set
, rtx val
, rtx loc
, bool modified
)
2433 var_regno_delete (set
, REGNO (loc
));
2434 var_reg_decl_set (set
, loc
, VAR_INIT_STATUS_INITIALIZED
,
2435 dv_from_value (val
), 0, NULL_RTX
, INSERT
);
2437 else if (MEM_P (loc
))
2439 struct elt_loc_list
*l
= CSELIB_VAL_PTR (val
)->locs
;
2442 clobber_overlapping_mems (set
, loc
);
2444 if (l
&& GET_CODE (l
->loc
) == VALUE
)
2445 l
= canonical_cselib_val (CSELIB_VAL_PTR (l
->loc
))->locs
;
2447 /* If this MEM is a global constant, we don't need it in the
2448 dynamic tables. ??? We should test this before emitting the
2449 micro-op in the first place. */
2451 if (GET_CODE (l
->loc
) == MEM
&& XEXP (l
->loc
, 0) == XEXP (loc
, 0))
2457 var_mem_decl_set (set
, loc
, VAR_INIT_STATUS_INITIALIZED
,
2458 dv_from_value (val
), 0, NULL_RTX
, INSERT
);
2462 /* Other kinds of equivalences are necessarily static, at least
2463 so long as we do not perform substitutions while merging
2466 set_variable_part (set
, loc
, dv_from_value (val
), 0,
2467 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
, INSERT
);
2471 /* Bind a value to a location it was just stored in. If MODIFIED
2472 holds, assume the location was modified, detaching it from any
2473 values bound to it. */
2476 val_store (dataflow_set
*set
, rtx val
, rtx loc
, rtx_insn
*insn
,
2479 cselib_val
*v
= CSELIB_VAL_PTR (val
);
2481 gcc_assert (cselib_preserved_value_p (v
));
2485 fprintf (dump_file
, "%i: ", insn
? INSN_UID (insn
) : 0);
2486 print_inline_rtx (dump_file
, loc
, 0);
2487 fprintf (dump_file
, " evaluates to ");
2488 print_inline_rtx (dump_file
, val
, 0);
2491 struct elt_loc_list
*l
;
2492 for (l
= v
->locs
; l
; l
= l
->next
)
2494 fprintf (dump_file
, "\n%i: ", INSN_UID (l
->setting_insn
));
2495 print_inline_rtx (dump_file
, l
->loc
, 0);
2498 fprintf (dump_file
, "\n");
2501 gcc_checking_assert (!unsuitable_loc (loc
));
2503 val_bind (set
, val
, loc
, modified
);
2506 /* Clear (canonical address) slots that reference X. */
2509 local_get_addr_clear_given_value (rtx
const &, rtx
*slot
, rtx x
)
2511 if (vt_get_canonicalize_base (*slot
) == x
)
2516 /* Reset this node, detaching all its equivalences. Return the slot
2517 in the variable hash table that holds dv, if there is one. */
2520 val_reset (dataflow_set
*set
, decl_or_value dv
)
2522 variable var
= shared_hash_find (set
->vars
, dv
) ;
2523 location_chain node
;
2526 if (!var
|| !var
->n_var_parts
)
2529 gcc_assert (var
->n_var_parts
== 1);
2531 if (var
->onepart
== ONEPART_VALUE
)
2533 rtx x
= dv_as_value (dv
);
2535 /* Relationships in the global cache don't change, so reset the
2536 local cache entry only. */
2537 rtx
*slot
= local_get_addr_cache
->get (x
);
2540 /* If the value resolved back to itself, odds are that other
2541 values may have cached it too. These entries now refer
2542 to the old X, so detach them too. Entries that used the
2543 old X but resolved to something else remain ok as long as
2544 that something else isn't also reset. */
2546 local_get_addr_cache
2547 ->traverse
<rtx
, local_get_addr_clear_given_value
> (x
);
2553 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
2554 if (GET_CODE (node
->loc
) == VALUE
2555 && canon_value_cmp (node
->loc
, cval
))
2558 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
2559 if (GET_CODE (node
->loc
) == VALUE
&& cval
!= node
->loc
)
2561 /* Redirect the equivalence link to the new canonical
2562 value, or simply remove it if it would point at
2565 set_variable_part (set
, cval
, dv_from_value (node
->loc
),
2566 0, node
->init
, node
->set_src
, NO_INSERT
);
2567 delete_variable_part (set
, dv_as_value (dv
),
2568 dv_from_value (node
->loc
), 0);
2573 decl_or_value cdv
= dv_from_value (cval
);
2575 /* Keep the remaining values connected, accummulating links
2576 in the canonical value. */
2577 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
2579 if (node
->loc
== cval
)
2581 else if (GET_CODE (node
->loc
) == REG
)
2582 var_reg_decl_set (set
, node
->loc
, node
->init
, cdv
, 0,
2583 node
->set_src
, NO_INSERT
);
2584 else if (GET_CODE (node
->loc
) == MEM
)
2585 var_mem_decl_set (set
, node
->loc
, node
->init
, cdv
, 0,
2586 node
->set_src
, NO_INSERT
);
2588 set_variable_part (set
, node
->loc
, cdv
, 0,
2589 node
->init
, node
->set_src
, NO_INSERT
);
2593 /* We remove this last, to make sure that the canonical value is not
2594 removed to the point of requiring reinsertion. */
2596 delete_variable_part (set
, dv_as_value (dv
), dv_from_value (cval
), 0);
2598 clobber_variable_part (set
, NULL
, dv
, 0, NULL
);
2601 /* Find the values in a given location and map the val to another
2602 value, if it is unique, or add the location as one holding the
2606 val_resolve (dataflow_set
*set
, rtx val
, rtx loc
, rtx_insn
*insn
)
2608 decl_or_value dv
= dv_from_value (val
);
2610 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2613 fprintf (dump_file
, "%i: ", INSN_UID (insn
));
2615 fprintf (dump_file
, "head: ");
2616 print_inline_rtx (dump_file
, val
, 0);
2617 fputs (" is at ", dump_file
);
2618 print_inline_rtx (dump_file
, loc
, 0);
2619 fputc ('\n', dump_file
);
2622 val_reset (set
, dv
);
2624 gcc_checking_assert (!unsuitable_loc (loc
));
2628 attrs node
, found
= NULL
;
2630 for (node
= set
->regs
[REGNO (loc
)]; node
; node
= node
->next
)
2631 if (dv_is_value_p (node
->dv
)
2632 && GET_MODE (dv_as_value (node
->dv
)) == GET_MODE (loc
))
2636 /* Map incoming equivalences. ??? Wouldn't it be nice if
2637 we just started sharing the location lists? Maybe a
2638 circular list ending at the value itself or some
2640 set_variable_part (set
, dv_as_value (node
->dv
),
2641 dv_from_value (val
), node
->offset
,
2642 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
, INSERT
);
2643 set_variable_part (set
, val
, node
->dv
, node
->offset
,
2644 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
, INSERT
);
2647 /* If we didn't find any equivalence, we need to remember that
2648 this value is held in the named register. */
2652 /* ??? Attempt to find and merge equivalent MEMs or other
2655 val_bind (set
, val
, loc
, false);
2658 /* Initialize dataflow set SET to be empty.
2659 VARS_SIZE is the initial size of hash table VARS. */
2662 dataflow_set_init (dataflow_set
*set
)
2664 init_attrs_list_set (set
->regs
);
2665 set
->vars
= shared_hash_copy (empty_shared_hash
);
2666 set
->stack_adjust
= 0;
2667 set
->traversed_vars
= NULL
;
2670 /* Delete the contents of dataflow set SET. */
2673 dataflow_set_clear (dataflow_set
*set
)
2677 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2678 attrs_list_clear (&set
->regs
[i
]);
2680 shared_hash_destroy (set
->vars
);
2681 set
->vars
= shared_hash_copy (empty_shared_hash
);
2684 /* Copy the contents of dataflow set SRC to DST. */
2687 dataflow_set_copy (dataflow_set
*dst
, dataflow_set
*src
)
2691 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2692 attrs_list_copy (&dst
->regs
[i
], src
->regs
[i
]);
2694 shared_hash_destroy (dst
->vars
);
2695 dst
->vars
= shared_hash_copy (src
->vars
);
2696 dst
->stack_adjust
= src
->stack_adjust
;
2699 /* Information for merging lists of locations for a given offset of variable.
2701 struct variable_union_info
2703 /* Node of the location chain. */
2706 /* The sum of positions in the input chains. */
2709 /* The position in the chain of DST dataflow set. */
2713 /* Buffer for location list sorting and its allocated size. */
2714 static struct variable_union_info
*vui_vec
;
2715 static int vui_allocated
;
2717 /* Compare function for qsort, order the structures by POS element. */
2720 variable_union_info_cmp_pos (const void *n1
, const void *n2
)
2722 const struct variable_union_info
*const i1
=
2723 (const struct variable_union_info
*) n1
;
2724 const struct variable_union_info
*const i2
=
2725 ( const struct variable_union_info
*) n2
;
2727 if (i1
->pos
!= i2
->pos
)
2728 return i1
->pos
- i2
->pos
;
2730 return (i1
->pos_dst
- i2
->pos_dst
);
2733 /* Compute union of location parts of variable *SLOT and the same variable
2734 from hash table DATA. Compute "sorted" union of the location chains
2735 for common offsets, i.e. the locations of a variable part are sorted by
2736 a priority where the priority is the sum of the positions in the 2 chains
2737 (if a location is only in one list the position in the second list is
2738 defined to be larger than the length of the chains).
2739 When we are updating the location parts the newest location is in the
2740 beginning of the chain, so when we do the described "sorted" union
2741 we keep the newest locations in the beginning. */
2744 variable_union (variable src
, dataflow_set
*set
)
2747 variable_def
**dstp
;
2750 dstp
= shared_hash_find_slot (set
->vars
, src
->dv
);
2751 if (!dstp
|| !*dstp
)
2755 dst_can_be_shared
= false;
2757 dstp
= shared_hash_find_slot_unshare (&set
->vars
, src
->dv
, INSERT
);
2761 /* Continue traversing the hash table. */
2767 gcc_assert (src
->n_var_parts
);
2768 gcc_checking_assert (src
->onepart
== dst
->onepart
);
2770 /* We can combine one-part variables very efficiently, because their
2771 entries are in canonical order. */
2774 location_chain
*nodep
, dnode
, snode
;
2776 gcc_assert (src
->n_var_parts
== 1
2777 && dst
->n_var_parts
== 1);
2779 snode
= src
->var_part
[0].loc_chain
;
2782 restart_onepart_unshared
:
2783 nodep
= &dst
->var_part
[0].loc_chain
;
2789 int r
= dnode
? loc_cmp (dnode
->loc
, snode
->loc
) : 1;
2793 location_chain nnode
;
2795 if (shared_var_p (dst
, set
->vars
))
2797 dstp
= unshare_variable (set
, dstp
, dst
,
2798 VAR_INIT_STATUS_INITIALIZED
);
2800 goto restart_onepart_unshared
;
2803 *nodep
= nnode
= (location_chain
) pool_alloc (loc_chain_pool
);
2804 nnode
->loc
= snode
->loc
;
2805 nnode
->init
= snode
->init
;
2806 if (!snode
->set_src
|| MEM_P (snode
->set_src
))
2807 nnode
->set_src
= NULL
;
2809 nnode
->set_src
= snode
->set_src
;
2810 nnode
->next
= dnode
;
2814 gcc_checking_assert (rtx_equal_p (dnode
->loc
, snode
->loc
));
2817 snode
= snode
->next
;
2819 nodep
= &dnode
->next
;
2826 gcc_checking_assert (!src
->onepart
);
2828 /* Count the number of location parts, result is K. */
2829 for (i
= 0, j
= 0, k
= 0;
2830 i
< src
->n_var_parts
&& j
< dst
->n_var_parts
; k
++)
2832 if (VAR_PART_OFFSET (src
, i
) == VAR_PART_OFFSET (dst
, j
))
2837 else if (VAR_PART_OFFSET (src
, i
) < VAR_PART_OFFSET (dst
, j
))
2842 k
+= src
->n_var_parts
- i
;
2843 k
+= dst
->n_var_parts
- j
;
2845 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
2846 thus there are at most MAX_VAR_PARTS different offsets. */
2847 gcc_checking_assert (dst
->onepart
? k
== 1 : k
<= MAX_VAR_PARTS
);
2849 if (dst
->n_var_parts
!= k
&& shared_var_p (dst
, set
->vars
))
2851 dstp
= unshare_variable (set
, dstp
, dst
, VAR_INIT_STATUS_UNKNOWN
);
2855 i
= src
->n_var_parts
- 1;
2856 j
= dst
->n_var_parts
- 1;
2857 dst
->n_var_parts
= k
;
2859 for (k
--; k
>= 0; k
--)
2861 location_chain node
, node2
;
2863 if (i
>= 0 && j
>= 0
2864 && VAR_PART_OFFSET (src
, i
) == VAR_PART_OFFSET (dst
, j
))
2866 /* Compute the "sorted" union of the chains, i.e. the locations which
2867 are in both chains go first, they are sorted by the sum of
2868 positions in the chains. */
2871 struct variable_union_info
*vui
;
2873 /* If DST is shared compare the location chains.
2874 If they are different we will modify the chain in DST with
2875 high probability so make a copy of DST. */
2876 if (shared_var_p (dst
, set
->vars
))
2878 for (node
= src
->var_part
[i
].loc_chain
,
2879 node2
= dst
->var_part
[j
].loc_chain
; node
&& node2
;
2880 node
= node
->next
, node2
= node2
->next
)
2882 if (!((REG_P (node2
->loc
)
2883 && REG_P (node
->loc
)
2884 && REGNO (node2
->loc
) == REGNO (node
->loc
))
2885 || rtx_equal_p (node2
->loc
, node
->loc
)))
2887 if (node2
->init
< node
->init
)
2888 node2
->init
= node
->init
;
2894 dstp
= unshare_variable (set
, dstp
, dst
,
2895 VAR_INIT_STATUS_UNKNOWN
);
2896 dst
= (variable
)*dstp
;
2901 for (node
= src
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
2904 for (node
= dst
->var_part
[j
].loc_chain
; node
; node
= node
->next
)
2909 /* The most common case, much simpler, no qsort is needed. */
2910 location_chain dstnode
= dst
->var_part
[j
].loc_chain
;
2911 dst
->var_part
[k
].loc_chain
= dstnode
;
2912 VAR_PART_OFFSET (dst
, k
) = VAR_PART_OFFSET (dst
, j
);
2914 for (node
= src
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
2915 if (!((REG_P (dstnode
->loc
)
2916 && REG_P (node
->loc
)
2917 && REGNO (dstnode
->loc
) == REGNO (node
->loc
))
2918 || rtx_equal_p (dstnode
->loc
, node
->loc
)))
2920 location_chain new_node
;
2922 /* Copy the location from SRC. */
2923 new_node
= (location_chain
) pool_alloc (loc_chain_pool
);
2924 new_node
->loc
= node
->loc
;
2925 new_node
->init
= node
->init
;
2926 if (!node
->set_src
|| MEM_P (node
->set_src
))
2927 new_node
->set_src
= NULL
;
2929 new_node
->set_src
= node
->set_src
;
2930 node2
->next
= new_node
;
2937 if (src_l
+ dst_l
> vui_allocated
)
2939 vui_allocated
= MAX (vui_allocated
* 2, src_l
+ dst_l
);
2940 vui_vec
= XRESIZEVEC (struct variable_union_info
, vui_vec
,
2945 /* Fill in the locations from DST. */
2946 for (node
= dst
->var_part
[j
].loc_chain
, jj
= 0; node
;
2947 node
= node
->next
, jj
++)
2950 vui
[jj
].pos_dst
= jj
;
2952 /* Pos plus value larger than a sum of 2 valid positions. */
2953 vui
[jj
].pos
= jj
+ src_l
+ dst_l
;
2956 /* Fill in the locations from SRC. */
2958 for (node
= src
->var_part
[i
].loc_chain
, ii
= 0; node
;
2959 node
= node
->next
, ii
++)
2961 /* Find location from NODE. */
2962 for (jj
= 0; jj
< dst_l
; jj
++)
2964 if ((REG_P (vui
[jj
].lc
->loc
)
2965 && REG_P (node
->loc
)
2966 && REGNO (vui
[jj
].lc
->loc
) == REGNO (node
->loc
))
2967 || rtx_equal_p (vui
[jj
].lc
->loc
, node
->loc
))
2969 vui
[jj
].pos
= jj
+ ii
;
2973 if (jj
>= dst_l
) /* The location has not been found. */
2975 location_chain new_node
;
2977 /* Copy the location from SRC. */
2978 new_node
= (location_chain
) pool_alloc (loc_chain_pool
);
2979 new_node
->loc
= node
->loc
;
2980 new_node
->init
= node
->init
;
2981 if (!node
->set_src
|| MEM_P (node
->set_src
))
2982 new_node
->set_src
= NULL
;
2984 new_node
->set_src
= node
->set_src
;
2985 vui
[n
].lc
= new_node
;
2986 vui
[n
].pos_dst
= src_l
+ dst_l
;
2987 vui
[n
].pos
= ii
+ src_l
+ dst_l
;
2994 /* Special case still very common case. For dst_l == 2
2995 all entries dst_l ... n-1 are sorted, with for i >= dst_l
2996 vui[i].pos == i + src_l + dst_l. */
2997 if (vui
[0].pos
> vui
[1].pos
)
2999 /* Order should be 1, 0, 2... */
3000 dst
->var_part
[k
].loc_chain
= vui
[1].lc
;
3001 vui
[1].lc
->next
= vui
[0].lc
;
3004 vui
[0].lc
->next
= vui
[2].lc
;
3005 vui
[n
- 1].lc
->next
= NULL
;
3008 vui
[0].lc
->next
= NULL
;
3013 dst
->var_part
[k
].loc_chain
= vui
[0].lc
;
3014 if (n
>= 3 && vui
[2].pos
< vui
[1].pos
)
3016 /* Order should be 0, 2, 1, 3... */
3017 vui
[0].lc
->next
= vui
[2].lc
;
3018 vui
[2].lc
->next
= vui
[1].lc
;
3021 vui
[1].lc
->next
= vui
[3].lc
;
3022 vui
[n
- 1].lc
->next
= NULL
;
3025 vui
[1].lc
->next
= NULL
;
3030 /* Order should be 0, 1, 2... */
3032 vui
[n
- 1].lc
->next
= NULL
;
3035 for (; ii
< n
; ii
++)
3036 vui
[ii
- 1].lc
->next
= vui
[ii
].lc
;
3040 qsort (vui
, n
, sizeof (struct variable_union_info
),
3041 variable_union_info_cmp_pos
);
3043 /* Reconnect the nodes in sorted order. */
3044 for (ii
= 1; ii
< n
; ii
++)
3045 vui
[ii
- 1].lc
->next
= vui
[ii
].lc
;
3046 vui
[n
- 1].lc
->next
= NULL
;
3047 dst
->var_part
[k
].loc_chain
= vui
[0].lc
;
3050 VAR_PART_OFFSET (dst
, k
) = VAR_PART_OFFSET (dst
, j
);
3055 else if ((i
>= 0 && j
>= 0
3056 && VAR_PART_OFFSET (src
, i
) < VAR_PART_OFFSET (dst
, j
))
3059 dst
->var_part
[k
] = dst
->var_part
[j
];
3062 else if ((i
>= 0 && j
>= 0
3063 && VAR_PART_OFFSET (src
, i
) > VAR_PART_OFFSET (dst
, j
))
3066 location_chain
*nextp
;
3068 /* Copy the chain from SRC. */
3069 nextp
= &dst
->var_part
[k
].loc_chain
;
3070 for (node
= src
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
3072 location_chain new_lc
;
3074 new_lc
= (location_chain
) pool_alloc (loc_chain_pool
);
3075 new_lc
->next
= NULL
;
3076 new_lc
->init
= node
->init
;
3077 if (!node
->set_src
|| MEM_P (node
->set_src
))
3078 new_lc
->set_src
= NULL
;
3080 new_lc
->set_src
= node
->set_src
;
3081 new_lc
->loc
= node
->loc
;
3084 nextp
= &new_lc
->next
;
3087 VAR_PART_OFFSET (dst
, k
) = VAR_PART_OFFSET (src
, i
);
3090 dst
->var_part
[k
].cur_loc
= NULL
;
3093 if (flag_var_tracking_uninit
)
3094 for (i
= 0; i
< src
->n_var_parts
&& i
< dst
->n_var_parts
; i
++)
3096 location_chain node
, node2
;
3097 for (node
= src
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
3098 for (node2
= dst
->var_part
[i
].loc_chain
; node2
; node2
= node2
->next
)
3099 if (rtx_equal_p (node
->loc
, node2
->loc
))
3101 if (node
->init
> node2
->init
)
3102 node2
->init
= node
->init
;
3106 /* Continue traversing the hash table. */
3110 /* Compute union of dataflow sets SRC and DST and store it to DST. */
3113 dataflow_set_union (dataflow_set
*dst
, dataflow_set
*src
)
3117 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3118 attrs_list_union (&dst
->regs
[i
], src
->regs
[i
]);
3120 if (dst
->vars
== empty_shared_hash
)
3122 shared_hash_destroy (dst
->vars
);
3123 dst
->vars
= shared_hash_copy (src
->vars
);
3127 variable_iterator_type hi
;
3130 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (src
->vars
),
3132 variable_union (var
, dst
);
3136 /* Whether the value is currently being expanded. */
3137 #define VALUE_RECURSED_INTO(x) \
3138 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
3140 /* Whether no expansion was found, saving useless lookups.
3141 It must only be set when VALUE_CHANGED is clear. */
3142 #define NO_LOC_P(x) \
3143 (RTL_FLAG_CHECK2 ("NO_LOC_P", (x), VALUE, DEBUG_EXPR)->return_val)
3145 /* Whether cur_loc in the value needs to be (re)computed. */
3146 #define VALUE_CHANGED(x) \
3147 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
3148 /* Whether cur_loc in the decl needs to be (re)computed. */
3149 #define DECL_CHANGED(x) TREE_VISITED (x)
3151 /* Record (if NEWV) that DV needs to have its cur_loc recomputed. For
3152 user DECLs, this means they're in changed_variables. Values and
3153 debug exprs may be left with this flag set if no user variable
3154 requires them to be evaluated. */
3157 set_dv_changed (decl_or_value dv
, bool newv
)
3159 switch (dv_onepart_p (dv
))
3163 NO_LOC_P (dv_as_value (dv
)) = false;
3164 VALUE_CHANGED (dv_as_value (dv
)) = newv
;
3169 NO_LOC_P (DECL_RTL_KNOWN_SET (dv_as_decl (dv
))) = false;
3170 /* Fall through... */
3173 DECL_CHANGED (dv_as_decl (dv
)) = newv
;
3178 /* Return true if DV needs to have its cur_loc recomputed. */
3181 dv_changed_p (decl_or_value dv
)
3183 return (dv_is_value_p (dv
)
3184 ? VALUE_CHANGED (dv_as_value (dv
))
3185 : DECL_CHANGED (dv_as_decl (dv
)));
3188 /* Return a location list node whose loc is rtx_equal to LOC, in the
3189 location list of a one-part variable or value VAR, or in that of
3190 any values recursively mentioned in the location lists. VARS must
3191 be in star-canonical form. */
3193 static location_chain
3194 find_loc_in_1pdv (rtx loc
, variable var
, variable_table_type
*vars
)
3196 location_chain node
;
3197 enum rtx_code loc_code
;
3202 gcc_checking_assert (var
->onepart
);
3204 if (!var
->n_var_parts
)
3207 gcc_checking_assert (loc
!= dv_as_opaque (var
->dv
));
3209 loc_code
= GET_CODE (loc
);
3210 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
3215 if (GET_CODE (node
->loc
) != loc_code
)
3217 if (GET_CODE (node
->loc
) != VALUE
)
3220 else if (loc
== node
->loc
)
3222 else if (loc_code
!= VALUE
)
3224 if (rtx_equal_p (loc
, node
->loc
))
3229 /* Since we're in star-canonical form, we don't need to visit
3230 non-canonical nodes: one-part variables and non-canonical
3231 values would only point back to the canonical node. */
3232 if (dv_is_value_p (var
->dv
)
3233 && !canon_value_cmp (node
->loc
, dv_as_value (var
->dv
)))
3235 /* Skip all subsequent VALUEs. */
3236 while (node
->next
&& GET_CODE (node
->next
->loc
) == VALUE
)
3239 gcc_checking_assert (!canon_value_cmp (node
->loc
,
3240 dv_as_value (var
->dv
)));
3241 if (loc
== node
->loc
)
3247 gcc_checking_assert (node
== var
->var_part
[0].loc_chain
);
3248 gcc_checking_assert (!node
->next
);
3250 dv
= dv_from_value (node
->loc
);
3251 rvar
= vars
->find_with_hash (dv
, dv_htab_hash (dv
));
3252 return find_loc_in_1pdv (loc
, rvar
, vars
);
3255 /* ??? Gotta look in cselib_val locations too. */
3260 /* Hash table iteration argument passed to variable_merge. */
3263 /* The set in which the merge is to be inserted. */
3265 /* The set that we're iterating in. */
3267 /* The set that may contain the other dv we are to merge with. */
3269 /* Number of onepart dvs in src. */
3270 int src_onepart_cnt
;
3273 /* Insert LOC in *DNODE, if it's not there yet. The list must be in
3274 loc_cmp order, and it is maintained as such. */
3277 insert_into_intersection (location_chain
*nodep
, rtx loc
,
3278 enum var_init_status status
)
3280 location_chain node
;
3283 for (node
= *nodep
; node
; nodep
= &node
->next
, node
= *nodep
)
3284 if ((r
= loc_cmp (node
->loc
, loc
)) == 0)
3286 node
->init
= MIN (node
->init
, status
);
3292 node
= (location_chain
) pool_alloc (loc_chain_pool
);
3295 node
->set_src
= NULL
;
3296 node
->init
= status
;
3297 node
->next
= *nodep
;
3301 /* Insert in DEST the intersection of the locations present in both
3302 S1NODE and S2VAR, directly or indirectly. S1NODE is from a
3303 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
3307 intersect_loc_chains (rtx val
, location_chain
*dest
, struct dfset_merge
*dsm
,
3308 location_chain s1node
, variable s2var
)
3310 dataflow_set
*s1set
= dsm
->cur
;
3311 dataflow_set
*s2set
= dsm
->src
;
3312 location_chain found
;
3316 location_chain s2node
;
3318 gcc_checking_assert (s2var
->onepart
);
3320 if (s2var
->n_var_parts
)
3322 s2node
= s2var
->var_part
[0].loc_chain
;
3324 for (; s1node
&& s2node
;
3325 s1node
= s1node
->next
, s2node
= s2node
->next
)
3326 if (s1node
->loc
!= s2node
->loc
)
3328 else if (s1node
->loc
== val
)
3331 insert_into_intersection (dest
, s1node
->loc
,
3332 MIN (s1node
->init
, s2node
->init
));
3336 for (; s1node
; s1node
= s1node
->next
)
3338 if (s1node
->loc
== val
)
3341 if ((found
= find_loc_in_1pdv (s1node
->loc
, s2var
,
3342 shared_hash_htab (s2set
->vars
))))
3344 insert_into_intersection (dest
, s1node
->loc
,
3345 MIN (s1node
->init
, found
->init
));
3349 if (GET_CODE (s1node
->loc
) == VALUE
3350 && !VALUE_RECURSED_INTO (s1node
->loc
))
3352 decl_or_value dv
= dv_from_value (s1node
->loc
);
3353 variable svar
= shared_hash_find (s1set
->vars
, dv
);
3356 if (svar
->n_var_parts
== 1)
3358 VALUE_RECURSED_INTO (s1node
->loc
) = true;
3359 intersect_loc_chains (val
, dest
, dsm
,
3360 svar
->var_part
[0].loc_chain
,
3362 VALUE_RECURSED_INTO (s1node
->loc
) = false;
3367 /* ??? gotta look in cselib_val locations too. */
3369 /* ??? if the location is equivalent to any location in src,
3370 searched recursively
3372 add to dst the values needed to represent the equivalence
3374 telling whether locations S is equivalent to another dv's
3377 for each location D in the list
3379 if S and D satisfy rtx_equal_p, then it is present
3381 else if D is a value, recurse without cycles
3383 else if S and D have the same CODE and MODE
3385 for each operand oS and the corresponding oD
3387 if oS and oD are not equivalent, then S an D are not equivalent
3389 else if they are RTX vectors
3391 if any vector oS element is not equivalent to its respective oD,
3392 then S and D are not equivalent
3400 /* Return -1 if X should be before Y in a location list for a 1-part
3401 variable, 1 if Y should be before X, and 0 if they're equivalent
3402 and should not appear in the list. */
3405 loc_cmp (rtx x
, rtx y
)
3408 RTX_CODE code
= GET_CODE (x
);
3418 gcc_assert (GET_MODE (x
) == GET_MODE (y
));
3419 if (REGNO (x
) == REGNO (y
))
3421 else if (REGNO (x
) < REGNO (y
))
3434 gcc_assert (GET_MODE (x
) == GET_MODE (y
));
3435 return loc_cmp (XEXP (x
, 0), XEXP (y
, 0));
3441 if (GET_CODE (x
) == VALUE
)
3443 if (GET_CODE (y
) != VALUE
)
3445 /* Don't assert the modes are the same, that is true only
3446 when not recursing. (subreg:QI (value:SI 1:1) 0)
3447 and (subreg:QI (value:DI 2:2) 0) can be compared,
3448 even when the modes are different. */
3449 if (canon_value_cmp (x
, y
))
3455 if (GET_CODE (y
) == VALUE
)
3458 /* Entry value is the least preferable kind of expression. */
3459 if (GET_CODE (x
) == ENTRY_VALUE
)
3461 if (GET_CODE (y
) != ENTRY_VALUE
)
3463 gcc_assert (GET_MODE (x
) == GET_MODE (y
));
3464 return loc_cmp (ENTRY_VALUE_EXP (x
), ENTRY_VALUE_EXP (y
));
3467 if (GET_CODE (y
) == ENTRY_VALUE
)
3470 if (GET_CODE (x
) == GET_CODE (y
))
3471 /* Compare operands below. */;
3472 else if (GET_CODE (x
) < GET_CODE (y
))
3477 gcc_assert (GET_MODE (x
) == GET_MODE (y
));
3479 if (GET_CODE (x
) == DEBUG_EXPR
)
3481 if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x
))
3482 < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y
)))
3484 gcc_checking_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x
))
3485 > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y
)));
3489 fmt
= GET_RTX_FORMAT (code
);
3490 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
3494 if (XWINT (x
, i
) == XWINT (y
, i
))
3496 else if (XWINT (x
, i
) < XWINT (y
, i
))
3503 if (XINT (x
, i
) == XINT (y
, i
))
3505 else if (XINT (x
, i
) < XINT (y
, i
))
3512 /* Compare the vector length first. */
3513 if (XVECLEN (x
, i
) == XVECLEN (y
, i
))
3514 /* Compare the vectors elements. */;
3515 else if (XVECLEN (x
, i
) < XVECLEN (y
, i
))
3520 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3521 if ((r
= loc_cmp (XVECEXP (x
, i
, j
),
3522 XVECEXP (y
, i
, j
))))
3527 if ((r
= loc_cmp (XEXP (x
, i
), XEXP (y
, i
))))
3533 if (XSTR (x
, i
) == XSTR (y
, i
))
3539 if ((r
= strcmp (XSTR (x
, i
), XSTR (y
, i
))) == 0)
3547 /* These are just backpointers, so they don't matter. */
3554 /* It is believed that rtx's at this level will never
3555 contain anything but integers and other rtx's,
3556 except for within LABEL_REFs and SYMBOL_REFs. */
3560 if (CONST_WIDE_INT_P (x
))
3562 /* Compare the vector length first. */
3563 if (CONST_WIDE_INT_NUNITS (x
) >= CONST_WIDE_INT_NUNITS (y
))
3565 else if (CONST_WIDE_INT_NUNITS (x
) < CONST_WIDE_INT_NUNITS (y
))
3568 /* Compare the vectors elements. */;
3569 for (j
= CONST_WIDE_INT_NUNITS (x
) - 1; j
>= 0 ; j
--)
3571 if (CONST_WIDE_INT_ELT (x
, j
) < CONST_WIDE_INT_ELT (y
, j
))
3573 if (CONST_WIDE_INT_ELT (x
, j
) > CONST_WIDE_INT_ELT (y
, j
))
3582 /* Check the order of entries in one-part variables. */
3585 canonicalize_loc_order_check (variable_def
**slot
,
3586 dataflow_set
*data ATTRIBUTE_UNUSED
)
3588 variable var
= *slot
;
3589 location_chain node
, next
;
3591 #ifdef ENABLE_RTL_CHECKING
3593 for (i
= 0; i
< var
->n_var_parts
; i
++)
3594 gcc_assert (var
->var_part
[0].cur_loc
== NULL
);
3595 gcc_assert (!var
->in_changed_variables
);
3601 gcc_assert (var
->n_var_parts
== 1);
3602 node
= var
->var_part
[0].loc_chain
;
3605 while ((next
= node
->next
))
3607 gcc_assert (loc_cmp (node
->loc
, next
->loc
) < 0);
3615 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
3616 more likely to be chosen as canonical for an equivalence set.
3617 Ensure less likely values can reach more likely neighbors, making
3618 the connections bidirectional. */
3621 canonicalize_values_mark (variable_def
**slot
, dataflow_set
*set
)
3623 variable var
= *slot
;
3624 decl_or_value dv
= var
->dv
;
3626 location_chain node
;
3628 if (!dv_is_value_p (dv
))
3631 gcc_checking_assert (var
->n_var_parts
== 1);
3633 val
= dv_as_value (dv
);
3635 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
3636 if (GET_CODE (node
->loc
) == VALUE
)
3638 if (canon_value_cmp (node
->loc
, val
))
3639 VALUE_RECURSED_INTO (val
) = true;
3642 decl_or_value odv
= dv_from_value (node
->loc
);
3643 variable_def
**oslot
;
3644 oslot
= shared_hash_find_slot_noinsert (set
->vars
, odv
);
3646 set_slot_part (set
, val
, oslot
, odv
, 0,
3647 node
->init
, NULL_RTX
);
3649 VALUE_RECURSED_INTO (node
->loc
) = true;
3656 /* Remove redundant entries from equivalence lists in onepart
3657 variables, canonicalizing equivalence sets into star shapes. */
3660 canonicalize_values_star (variable_def
**slot
, dataflow_set
*set
)
3662 variable var
= *slot
;
3663 decl_or_value dv
= var
->dv
;
3664 location_chain node
;
3667 variable_def
**cslot
;
3674 gcc_checking_assert (var
->n_var_parts
== 1);
3676 if (dv_is_value_p (dv
))
3678 cval
= dv_as_value (dv
);
3679 if (!VALUE_RECURSED_INTO (cval
))
3681 VALUE_RECURSED_INTO (cval
) = false;
3691 gcc_assert (var
->n_var_parts
== 1);
3693 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
3694 if (GET_CODE (node
->loc
) == VALUE
)
3697 if (VALUE_RECURSED_INTO (node
->loc
))
3699 if (canon_value_cmp (node
->loc
, cval
))
3708 if (!has_marks
|| dv_is_decl_p (dv
))
3711 /* Keep it marked so that we revisit it, either after visiting a
3712 child node, or after visiting a new parent that might be
3714 VALUE_RECURSED_INTO (val
) = true;
3716 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
3717 if (GET_CODE (node
->loc
) == VALUE
3718 && VALUE_RECURSED_INTO (node
->loc
))
3722 VALUE_RECURSED_INTO (cval
) = false;
3723 dv
= dv_from_value (cval
);
3724 slot
= shared_hash_find_slot_noinsert (set
->vars
, dv
);
3727 gcc_assert (dv_is_decl_p (var
->dv
));
3728 /* The canonical value was reset and dropped.
3730 clobber_variable_part (set
, NULL
, var
->dv
, 0, NULL
);
3734 gcc_assert (dv_is_value_p (var
->dv
));
3735 if (var
->n_var_parts
== 0)
3737 gcc_assert (var
->n_var_parts
== 1);
3741 VALUE_RECURSED_INTO (val
) = false;
3746 /* Push values to the canonical one. */
3747 cdv
= dv_from_value (cval
);
3748 cslot
= shared_hash_find_slot_noinsert (set
->vars
, cdv
);
3750 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
3751 if (node
->loc
!= cval
)
3753 cslot
= set_slot_part (set
, node
->loc
, cslot
, cdv
, 0,
3754 node
->init
, NULL_RTX
);
3755 if (GET_CODE (node
->loc
) == VALUE
)
3757 decl_or_value ndv
= dv_from_value (node
->loc
);
3759 set_variable_part (set
, cval
, ndv
, 0, node
->init
, NULL_RTX
,
3762 if (canon_value_cmp (node
->loc
, val
))
3764 /* If it could have been a local minimum, it's not any more,
3765 since it's now neighbor to cval, so it may have to push
3766 to it. Conversely, if it wouldn't have prevailed over
3767 val, then whatever mark it has is fine: if it was to
3768 push, it will now push to a more canonical node, but if
3769 it wasn't, then it has already pushed any values it might
3771 VALUE_RECURSED_INTO (node
->loc
) = true;
3772 /* Make sure we visit node->loc by ensuring we cval is
3774 VALUE_RECURSED_INTO (cval
) = true;
3776 else if (!VALUE_RECURSED_INTO (node
->loc
))
3777 /* If we have no need to "recurse" into this node, it's
3778 already "canonicalized", so drop the link to the old
3780 clobber_variable_part (set
, cval
, ndv
, 0, NULL
);
3782 else if (GET_CODE (node
->loc
) == REG
)
3784 attrs list
= set
->regs
[REGNO (node
->loc
)], *listp
;
3786 /* Change an existing attribute referring to dv so that it
3787 refers to cdv, removing any duplicate this might
3788 introduce, and checking that no previous duplicates
3789 existed, all in a single pass. */
3793 if (list
->offset
== 0
3794 && (dv_as_opaque (list
->dv
) == dv_as_opaque (dv
)
3795 || dv_as_opaque (list
->dv
) == dv_as_opaque (cdv
)))
3802 if (dv_as_opaque (list
->dv
) == dv_as_opaque (dv
))
3805 for (listp
= &list
->next
; (list
= *listp
); listp
= &list
->next
)
3810 if (dv_as_opaque (list
->dv
) == dv_as_opaque (cdv
))
3812 *listp
= list
->next
;
3813 pool_free (attrs_pool
, list
);
3818 gcc_assert (dv_as_opaque (list
->dv
) != dv_as_opaque (dv
));
3821 else if (dv_as_opaque (list
->dv
) == dv_as_opaque (cdv
))
3823 for (listp
= &list
->next
; (list
= *listp
); listp
= &list
->next
)
3828 if (dv_as_opaque (list
->dv
) == dv_as_opaque (dv
))
3830 *listp
= list
->next
;
3831 pool_free (attrs_pool
, list
);
3836 gcc_assert (dv_as_opaque (list
->dv
) != dv_as_opaque (cdv
));
3845 if (list
->offset
== 0
3846 && (dv_as_opaque (list
->dv
) == dv_as_opaque (dv
)
3847 || dv_as_opaque (list
->dv
) == dv_as_opaque (cdv
)))
3857 set_slot_part (set
, val
, cslot
, cdv
, 0,
3858 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
);
3860 slot
= clobber_slot_part (set
, cval
, slot
, 0, NULL
);
3862 /* Variable may have been unshared. */
3864 gcc_checking_assert (var
->n_var_parts
&& var
->var_part
[0].loc_chain
->loc
== cval
3865 && var
->var_part
[0].loc_chain
->next
== NULL
);
3867 if (VALUE_RECURSED_INTO (cval
))
3868 goto restart_with_cval
;
3873 /* Bind one-part variables to the canonical value in an equivalence
3874 set. Not doing this causes dataflow convergence failure in rare
3875 circumstances, see PR42873. Unfortunately we can't do this
3876 efficiently as part of canonicalize_values_star, since we may not
3877 have determined or even seen the canonical value of a set when we
3878 get to a variable that references another member of the set. */
3881 canonicalize_vars_star (variable_def
**slot
, dataflow_set
*set
)
3883 variable var
= *slot
;
3884 decl_or_value dv
= var
->dv
;
3885 location_chain node
;
3888 variable_def
**cslot
;
3890 location_chain cnode
;
3892 if (!var
->onepart
|| var
->onepart
== ONEPART_VALUE
)
3895 gcc_assert (var
->n_var_parts
== 1);
3897 node
= var
->var_part
[0].loc_chain
;
3899 if (GET_CODE (node
->loc
) != VALUE
)
3902 gcc_assert (!node
->next
);
3905 /* Push values to the canonical one. */
3906 cdv
= dv_from_value (cval
);
3907 cslot
= shared_hash_find_slot_noinsert (set
->vars
, cdv
);
3911 gcc_assert (cvar
->n_var_parts
== 1);
3913 cnode
= cvar
->var_part
[0].loc_chain
;
3915 /* CVAL is canonical if its value list contains non-VALUEs or VALUEs
3916 that are not “more canonical” than it. */
3917 if (GET_CODE (cnode
->loc
) != VALUE
3918 || !canon_value_cmp (cnode
->loc
, cval
))
3921 /* CVAL was found to be non-canonical. Change the variable to point
3922 to the canonical VALUE. */
3923 gcc_assert (!cnode
->next
);
3926 slot
= set_slot_part (set
, cval
, slot
, dv
, 0,
3927 node
->init
, node
->set_src
);
3928 clobber_slot_part (set
, cval
, slot
, 0, node
->set_src
);
3933 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
3934 corresponding entry in DSM->src. Multi-part variables are combined
3935 with variable_union, whereas onepart dvs are combined with
3939 variable_merge_over_cur (variable s1var
, struct dfset_merge
*dsm
)
3941 dataflow_set
*dst
= dsm
->dst
;
3942 variable_def
**dstslot
;
3943 variable s2var
, dvar
= NULL
;
3944 decl_or_value dv
= s1var
->dv
;
3945 onepart_enum_t onepart
= s1var
->onepart
;
3948 location_chain node
, *nodep
;
3950 /* If the incoming onepart variable has an empty location list, then
3951 the intersection will be just as empty. For other variables,
3952 it's always union. */
3953 gcc_checking_assert (s1var
->n_var_parts
3954 && s1var
->var_part
[0].loc_chain
);
3957 return variable_union (s1var
, dst
);
3959 gcc_checking_assert (s1var
->n_var_parts
== 1);
3961 dvhash
= dv_htab_hash (dv
);
3962 if (dv_is_value_p (dv
))
3963 val
= dv_as_value (dv
);
3967 s2var
= shared_hash_find_1 (dsm
->src
->vars
, dv
, dvhash
);
3970 dst_can_be_shared
= false;
3974 dsm
->src_onepart_cnt
--;
3975 gcc_assert (s2var
->var_part
[0].loc_chain
3976 && s2var
->onepart
== onepart
3977 && s2var
->n_var_parts
== 1);
3979 dstslot
= shared_hash_find_slot_noinsert_1 (dst
->vars
, dv
, dvhash
);
3983 gcc_assert (dvar
->refcount
== 1
3984 && dvar
->onepart
== onepart
3985 && dvar
->n_var_parts
== 1);
3986 nodep
= &dvar
->var_part
[0].loc_chain
;
3994 if (!dstslot
&& !onepart_variable_different_p (s1var
, s2var
))
3996 dstslot
= shared_hash_find_slot_unshare_1 (&dst
->vars
, dv
,
3998 *dstslot
= dvar
= s2var
;
4003 dst_can_be_shared
= false;
4005 intersect_loc_chains (val
, nodep
, dsm
,
4006 s1var
->var_part
[0].loc_chain
, s2var
);
4012 dvar
= (variable
) pool_alloc (onepart_pool (onepart
));
4015 dvar
->n_var_parts
= 1;
4016 dvar
->onepart
= onepart
;
4017 dvar
->in_changed_variables
= false;
4018 dvar
->var_part
[0].loc_chain
= node
;
4019 dvar
->var_part
[0].cur_loc
= NULL
;
4021 VAR_LOC_1PAUX (dvar
) = NULL
;
4023 VAR_PART_OFFSET (dvar
, 0) = 0;
4026 = shared_hash_find_slot_unshare_1 (&dst
->vars
, dv
, dvhash
,
4028 gcc_assert (!*dstslot
);
4036 nodep
= &dvar
->var_part
[0].loc_chain
;
4037 while ((node
= *nodep
))
4039 location_chain
*nextp
= &node
->next
;
4041 if (GET_CODE (node
->loc
) == REG
)
4045 for (list
= dst
->regs
[REGNO (node
->loc
)]; list
; list
= list
->next
)
4046 if (GET_MODE (node
->loc
) == GET_MODE (list
->loc
)
4047 && dv_is_value_p (list
->dv
))
4051 attrs_list_insert (&dst
->regs
[REGNO (node
->loc
)],
4053 /* If this value became canonical for another value that had
4054 this register, we want to leave it alone. */
4055 else if (dv_as_value (list
->dv
) != val
)
4057 dstslot
= set_slot_part (dst
, dv_as_value (list
->dv
),
4059 node
->init
, NULL_RTX
);
4060 dstslot
= delete_slot_part (dst
, node
->loc
, dstslot
, 0);
4062 /* Since nextp points into the removed node, we can't
4063 use it. The pointer to the next node moved to nodep.
4064 However, if the variable we're walking is unshared
4065 during our walk, we'll keep walking the location list
4066 of the previously-shared variable, in which case the
4067 node won't have been removed, and we'll want to skip
4068 it. That's why we test *nodep here. */
4074 /* Canonicalization puts registers first, so we don't have to
4080 if (dvar
!= *dstslot
)
4082 nodep
= &dvar
->var_part
[0].loc_chain
;
4086 /* Mark all referenced nodes for canonicalization, and make sure
4087 we have mutual equivalence links. */
4088 VALUE_RECURSED_INTO (val
) = true;
4089 for (node
= *nodep
; node
; node
= node
->next
)
4090 if (GET_CODE (node
->loc
) == VALUE
)
4092 VALUE_RECURSED_INTO (node
->loc
) = true;
4093 set_variable_part (dst
, val
, dv_from_value (node
->loc
), 0,
4094 node
->init
, NULL
, INSERT
);
4097 dstslot
= shared_hash_find_slot_noinsert_1 (dst
->vars
, dv
, dvhash
);
4098 gcc_assert (*dstslot
== dvar
);
4099 canonicalize_values_star (dstslot
, dst
);
4100 gcc_checking_assert (dstslot
4101 == shared_hash_find_slot_noinsert_1 (dst
->vars
,
4107 bool has_value
= false, has_other
= false;
4109 /* If we have one value and anything else, we're going to
4110 canonicalize this, so make sure all values have an entry in
4111 the table and are marked for canonicalization. */
4112 for (node
= *nodep
; node
; node
= node
->next
)
4114 if (GET_CODE (node
->loc
) == VALUE
)
4116 /* If this was marked during register canonicalization,
4117 we know we have to canonicalize values. */
4132 if (has_value
&& has_other
)
4134 for (node
= *nodep
; node
; node
= node
->next
)
4136 if (GET_CODE (node
->loc
) == VALUE
)
4138 decl_or_value dv
= dv_from_value (node
->loc
);
4139 variable_def
**slot
= NULL
;
4141 if (shared_hash_shared (dst
->vars
))
4142 slot
= shared_hash_find_slot_noinsert (dst
->vars
, dv
);
4144 slot
= shared_hash_find_slot_unshare (&dst
->vars
, dv
,
4148 variable var
= (variable
) pool_alloc (onepart_pool
4152 var
->n_var_parts
= 1;
4153 var
->onepart
= ONEPART_VALUE
;
4154 var
->in_changed_variables
= false;
4155 var
->var_part
[0].loc_chain
= NULL
;
4156 var
->var_part
[0].cur_loc
= NULL
;
4157 VAR_LOC_1PAUX (var
) = NULL
;
4161 VALUE_RECURSED_INTO (node
->loc
) = true;
4165 dstslot
= shared_hash_find_slot_noinsert_1 (dst
->vars
, dv
, dvhash
);
4166 gcc_assert (*dstslot
== dvar
);
4167 canonicalize_values_star (dstslot
, dst
);
4168 gcc_checking_assert (dstslot
4169 == shared_hash_find_slot_noinsert_1 (dst
->vars
,
4175 if (!onepart_variable_different_p (dvar
, s2var
))
4177 variable_htab_free (dvar
);
4178 *dstslot
= dvar
= s2var
;
4181 else if (s2var
!= s1var
&& !onepart_variable_different_p (dvar
, s1var
))
4183 variable_htab_free (dvar
);
4184 *dstslot
= dvar
= s1var
;
4186 dst_can_be_shared
= false;
4189 dst_can_be_shared
= false;
4194 /* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
4195 multi-part variable. Unions of multi-part variables and
4196 intersections of one-part ones will be handled in
4197 variable_merge_over_cur(). */
4200 variable_merge_over_src (variable s2var
, struct dfset_merge
*dsm
)
4202 dataflow_set
*dst
= dsm
->dst
;
4203 decl_or_value dv
= s2var
->dv
;
4205 if (!s2var
->onepart
)
4207 variable_def
**dstp
= shared_hash_find_slot (dst
->vars
, dv
);
4213 dsm
->src_onepart_cnt
++;
4217 /* Combine dataflow set information from SRC2 into DST, using PDST
4218 to carry over information across passes. */
4221 dataflow_set_merge (dataflow_set
*dst
, dataflow_set
*src2
)
4223 dataflow_set cur
= *dst
;
4224 dataflow_set
*src1
= &cur
;
4225 struct dfset_merge dsm
;
4227 size_t src1_elems
, src2_elems
;
4228 variable_iterator_type hi
;
4231 src1_elems
= shared_hash_htab (src1
->vars
)->elements ();
4232 src2_elems
= shared_hash_htab (src2
->vars
)->elements ();
4233 dataflow_set_init (dst
);
4234 dst
->stack_adjust
= cur
.stack_adjust
;
4235 shared_hash_destroy (dst
->vars
);
4236 dst
->vars
= (shared_hash
) pool_alloc (shared_hash_pool
);
4237 dst
->vars
->refcount
= 1;
4238 dst
->vars
->htab
= new variable_table_type (MAX (src1_elems
, src2_elems
));
4240 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
4241 attrs_list_mpdv_union (&dst
->regs
[i
], src1
->regs
[i
], src2
->regs
[i
]);
4246 dsm
.src_onepart_cnt
= 0;
4248 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm
.src
->vars
),
4250 variable_merge_over_src (var
, &dsm
);
4251 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm
.cur
->vars
),
4253 variable_merge_over_cur (var
, &dsm
);
4255 if (dsm
.src_onepart_cnt
)
4256 dst_can_be_shared
= false;
4258 dataflow_set_destroy (src1
);
4261 /* Mark register equivalences. */
4264 dataflow_set_equiv_regs (dataflow_set
*set
)
4269 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
4271 rtx canon
[NUM_MACHINE_MODES
];
4273 /* If the list is empty or one entry, no need to canonicalize
4275 if (set
->regs
[i
] == NULL
|| set
->regs
[i
]->next
== NULL
)
4278 memset (canon
, 0, sizeof (canon
));
4280 for (list
= set
->regs
[i
]; list
; list
= list
->next
)
4281 if (list
->offset
== 0 && dv_is_value_p (list
->dv
))
4283 rtx val
= dv_as_value (list
->dv
);
4284 rtx
*cvalp
= &canon
[(int)GET_MODE (val
)];
4287 if (canon_value_cmp (val
, cval
))
4291 for (list
= set
->regs
[i
]; list
; list
= list
->next
)
4292 if (list
->offset
== 0 && dv_onepart_p (list
->dv
))
4294 rtx cval
= canon
[(int)GET_MODE (list
->loc
)];
4299 if (dv_is_value_p (list
->dv
))
4301 rtx val
= dv_as_value (list
->dv
);
4306 VALUE_RECURSED_INTO (val
) = true;
4307 set_variable_part (set
, val
, dv_from_value (cval
), 0,
4308 VAR_INIT_STATUS_INITIALIZED
,
4312 VALUE_RECURSED_INTO (cval
) = true;
4313 set_variable_part (set
, cval
, list
->dv
, 0,
4314 VAR_INIT_STATUS_INITIALIZED
, NULL
, NO_INSERT
);
4317 for (listp
= &set
->regs
[i
]; (list
= *listp
);
4318 listp
= list
? &list
->next
: listp
)
4319 if (list
->offset
== 0 && dv_onepart_p (list
->dv
))
4321 rtx cval
= canon
[(int)GET_MODE (list
->loc
)];
4322 variable_def
**slot
;
4327 if (dv_is_value_p (list
->dv
))
4329 rtx val
= dv_as_value (list
->dv
);
4330 if (!VALUE_RECURSED_INTO (val
))
4334 slot
= shared_hash_find_slot_noinsert (set
->vars
, list
->dv
);
4335 canonicalize_values_star (slot
, set
);
4342 /* Remove any redundant values in the location list of VAR, which must
4343 be unshared and 1-part. */
4346 remove_duplicate_values (variable var
)
4348 location_chain node
, *nodep
;
4350 gcc_assert (var
->onepart
);
4351 gcc_assert (var
->n_var_parts
== 1);
4352 gcc_assert (var
->refcount
== 1);
4354 for (nodep
= &var
->var_part
[0].loc_chain
; (node
= *nodep
); )
4356 if (GET_CODE (node
->loc
) == VALUE
)
4358 if (VALUE_RECURSED_INTO (node
->loc
))
4360 /* Remove duplicate value node. */
4361 *nodep
= node
->next
;
4362 pool_free (loc_chain_pool
, node
);
4366 VALUE_RECURSED_INTO (node
->loc
) = true;
4368 nodep
= &node
->next
;
4371 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
4372 if (GET_CODE (node
->loc
) == VALUE
)
4374 gcc_assert (VALUE_RECURSED_INTO (node
->loc
));
4375 VALUE_RECURSED_INTO (node
->loc
) = false;
4380 /* Hash table iteration argument passed to variable_post_merge. */
4381 struct dfset_post_merge
4383 /* The new input set for the current block. */
4385 /* Pointer to the permanent input set for the current block, or
4387 dataflow_set
**permp
;
4390 /* Create values for incoming expressions associated with one-part
4391 variables that don't have value numbers for them. */
4394 variable_post_merge_new_vals (variable_def
**slot
, dfset_post_merge
*dfpm
)
4396 dataflow_set
*set
= dfpm
->set
;
4397 variable var
= *slot
;
4398 location_chain node
;
4400 if (!var
->onepart
|| !var
->n_var_parts
)
4403 gcc_assert (var
->n_var_parts
== 1);
4405 if (dv_is_decl_p (var
->dv
))
4407 bool check_dupes
= false;
4410 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
4412 if (GET_CODE (node
->loc
) == VALUE
)
4413 gcc_assert (!VALUE_RECURSED_INTO (node
->loc
));
4414 else if (GET_CODE (node
->loc
) == REG
)
4416 attrs att
, *attp
, *curp
= NULL
;
4418 if (var
->refcount
!= 1)
4420 slot
= unshare_variable (set
, slot
, var
,
4421 VAR_INIT_STATUS_INITIALIZED
);
4426 for (attp
= &set
->regs
[REGNO (node
->loc
)]; (att
= *attp
);
4428 if (att
->offset
== 0
4429 && GET_MODE (att
->loc
) == GET_MODE (node
->loc
))
4431 if (dv_is_value_p (att
->dv
))
4433 rtx cval
= dv_as_value (att
->dv
);
4438 else if (dv_as_opaque (att
->dv
) == dv_as_opaque (var
->dv
))
4446 if ((*curp
)->offset
== 0
4447 && GET_MODE ((*curp
)->loc
) == GET_MODE (node
->loc
)
4448 && dv_as_opaque ((*curp
)->dv
) == dv_as_opaque (var
->dv
))
4451 curp
= &(*curp
)->next
;
4462 *dfpm
->permp
= XNEW (dataflow_set
);
4463 dataflow_set_init (*dfpm
->permp
);
4466 for (att
= (*dfpm
->permp
)->regs
[REGNO (node
->loc
)];
4467 att
; att
= att
->next
)
4468 if (GET_MODE (att
->loc
) == GET_MODE (node
->loc
))
4470 gcc_assert (att
->offset
== 0
4471 && dv_is_value_p (att
->dv
));
4472 val_reset (set
, att
->dv
);
4479 cval
= dv_as_value (cdv
);
4483 /* Create a unique value to hold this register,
4484 that ought to be found and reused in
4485 subsequent rounds. */
4487 gcc_assert (!cselib_lookup (node
->loc
,
4488 GET_MODE (node
->loc
), 0,
4490 v
= cselib_lookup (node
->loc
, GET_MODE (node
->loc
), 1,
4492 cselib_preserve_value (v
);
4493 cselib_invalidate_rtx (node
->loc
);
4495 cdv
= dv_from_value (cval
);
4498 "Created new value %u:%u for reg %i\n",
4499 v
->uid
, v
->hash
, REGNO (node
->loc
));
4502 var_reg_decl_set (*dfpm
->permp
, node
->loc
,
4503 VAR_INIT_STATUS_INITIALIZED
,
4504 cdv
, 0, NULL
, INSERT
);
4510 /* Remove attribute referring to the decl, which now
4511 uses the value for the register, already existing or
4512 to be added when we bring perm in. */
4515 pool_free (attrs_pool
, att
);
4520 remove_duplicate_values (var
);
4526 /* Reset values in the permanent set that are not associated with the
4527 chosen expression. */
4530 variable_post_merge_perm_vals (variable_def
**pslot
, dfset_post_merge
*dfpm
)
4532 dataflow_set
*set
= dfpm
->set
;
4533 variable pvar
= *pslot
, var
;
4534 location_chain pnode
;
4538 gcc_assert (dv_is_value_p (pvar
->dv
)
4539 && pvar
->n_var_parts
== 1);
4540 pnode
= pvar
->var_part
[0].loc_chain
;
4543 && REG_P (pnode
->loc
));
4547 var
= shared_hash_find (set
->vars
, dv
);
4550 /* Although variable_post_merge_new_vals may have made decls
4551 non-star-canonical, values that pre-existed in canonical form
4552 remain canonical, and newly-created values reference a single
4553 REG, so they are canonical as well. Since VAR has the
4554 location list for a VALUE, using find_loc_in_1pdv for it is
4555 fine, since VALUEs don't map back to DECLs. */
4556 if (find_loc_in_1pdv (pnode
->loc
, var
, shared_hash_htab (set
->vars
)))
4558 val_reset (set
, dv
);
4561 for (att
= set
->regs
[REGNO (pnode
->loc
)]; att
; att
= att
->next
)
4562 if (att
->offset
== 0
4563 && GET_MODE (att
->loc
) == GET_MODE (pnode
->loc
)
4564 && dv_is_value_p (att
->dv
))
4567 /* If there is a value associated with this register already, create
4569 if (att
&& dv_as_value (att
->dv
) != dv_as_value (dv
))
4571 rtx cval
= dv_as_value (att
->dv
);
4572 set_variable_part (set
, cval
, dv
, 0, pnode
->init
, NULL
, INSERT
);
4573 set_variable_part (set
, dv_as_value (dv
), att
->dv
, 0, pnode
->init
,
4578 attrs_list_insert (&set
->regs
[REGNO (pnode
->loc
)],
4580 variable_union (pvar
, set
);
4586 /* Just checking stuff and registering register attributes for
4590 dataflow_post_merge_adjust (dataflow_set
*set
, dataflow_set
**permp
)
4592 struct dfset_post_merge dfpm
;
4597 shared_hash_htab (set
->vars
)
4598 ->traverse
<dfset_post_merge
*, variable_post_merge_new_vals
> (&dfpm
);
4600 shared_hash_htab ((*permp
)->vars
)
4601 ->traverse
<dfset_post_merge
*, variable_post_merge_perm_vals
> (&dfpm
);
4602 shared_hash_htab (set
->vars
)
4603 ->traverse
<dataflow_set
*, canonicalize_values_star
> (set
);
4604 shared_hash_htab (set
->vars
)
4605 ->traverse
<dataflow_set
*, canonicalize_vars_star
> (set
);
4608 /* Return a node whose loc is a MEM that refers to EXPR in the
4609 location list of a one-part variable or value VAR, or in that of
4610 any values recursively mentioned in the location lists. */
4612 static location_chain
4613 find_mem_expr_in_1pdv (tree expr
, rtx val
, variable_table_type
*vars
)
4615 location_chain node
;
4618 location_chain where
= NULL
;
4623 gcc_assert (GET_CODE (val
) == VALUE
4624 && !VALUE_RECURSED_INTO (val
));
4626 dv
= dv_from_value (val
);
4627 var
= vars
->find_with_hash (dv
, dv_htab_hash (dv
));
4632 gcc_assert (var
->onepart
);
4634 if (!var
->n_var_parts
)
4637 VALUE_RECURSED_INTO (val
) = true;
4639 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
4640 if (MEM_P (node
->loc
)
4641 && MEM_EXPR (node
->loc
) == expr
4642 && INT_MEM_OFFSET (node
->loc
) == 0)
4647 else if (GET_CODE (node
->loc
) == VALUE
4648 && !VALUE_RECURSED_INTO (node
->loc
)
4649 && (where
= find_mem_expr_in_1pdv (expr
, node
->loc
, vars
)))
4652 VALUE_RECURSED_INTO (val
) = false;
4657 /* Return TRUE if the value of MEM may vary across a call. */
4660 mem_dies_at_call (rtx mem
)
4662 tree expr
= MEM_EXPR (mem
);
4668 decl
= get_base_address (expr
);
4676 return (may_be_aliased (decl
)
4677 || (!TREE_READONLY (decl
) && is_global_var (decl
)));
4680 /* Remove all MEMs from the location list of a hash table entry for a
4681 one-part variable, except those whose MEM attributes map back to
4682 the variable itself, directly or within a VALUE. */
4685 dataflow_set_preserve_mem_locs (variable_def
**slot
, dataflow_set
*set
)
4687 variable var
= *slot
;
4689 if (var
->onepart
== ONEPART_VDECL
|| var
->onepart
== ONEPART_DEXPR
)
4691 tree decl
= dv_as_decl (var
->dv
);
4692 location_chain loc
, *locp
;
4693 bool changed
= false;
4695 if (!var
->n_var_parts
)
4698 gcc_assert (var
->n_var_parts
== 1);
4700 if (shared_var_p (var
, set
->vars
))
4702 for (loc
= var
->var_part
[0].loc_chain
; loc
; loc
= loc
->next
)
4704 /* We want to remove dying MEMs that doesn't refer to DECL. */
4705 if (GET_CODE (loc
->loc
) == MEM
4706 && (MEM_EXPR (loc
->loc
) != decl
4707 || INT_MEM_OFFSET (loc
->loc
) != 0)
4708 && !mem_dies_at_call (loc
->loc
))
4710 /* We want to move here MEMs that do refer to DECL. */
4711 else if (GET_CODE (loc
->loc
) == VALUE
4712 && find_mem_expr_in_1pdv (decl
, loc
->loc
,
4713 shared_hash_htab (set
->vars
)))
4720 slot
= unshare_variable (set
, slot
, var
, VAR_INIT_STATUS_UNKNOWN
);
4722 gcc_assert (var
->n_var_parts
== 1);
4725 for (locp
= &var
->var_part
[0].loc_chain
, loc
= *locp
;
4728 rtx old_loc
= loc
->loc
;
4729 if (GET_CODE (old_loc
) == VALUE
)
4731 location_chain mem_node
4732 = find_mem_expr_in_1pdv (decl
, loc
->loc
,
4733 shared_hash_htab (set
->vars
));
4735 /* ??? This picks up only one out of multiple MEMs that
4736 refer to the same variable. Do we ever need to be
4737 concerned about dealing with more than one, or, given
4738 that they should all map to the same variable
4739 location, their addresses will have been merged and
4740 they will be regarded as equivalent? */
4743 loc
->loc
= mem_node
->loc
;
4744 loc
->set_src
= mem_node
->set_src
;
4745 loc
->init
= MIN (loc
->init
, mem_node
->init
);
4749 if (GET_CODE (loc
->loc
) != MEM
4750 || (MEM_EXPR (loc
->loc
) == decl
4751 && INT_MEM_OFFSET (loc
->loc
) == 0)
4752 || !mem_dies_at_call (loc
->loc
))
4754 if (old_loc
!= loc
->loc
&& emit_notes
)
4756 if (old_loc
== var
->var_part
[0].cur_loc
)
4759 var
->var_part
[0].cur_loc
= NULL
;
4768 if (old_loc
== var
->var_part
[0].cur_loc
)
4771 var
->var_part
[0].cur_loc
= NULL
;
4775 pool_free (loc_chain_pool
, loc
);
4778 if (!var
->var_part
[0].loc_chain
)
4784 variable_was_changed (var
, set
);
4790 /* Remove all MEMs from the location list of a hash table entry for a
4794 dataflow_set_remove_mem_locs (variable_def
**slot
, dataflow_set
*set
)
4796 variable var
= *slot
;
4798 if (var
->onepart
== ONEPART_VALUE
)
4800 location_chain loc
, *locp
;
4801 bool changed
= false;
4804 gcc_assert (var
->n_var_parts
== 1);
4806 if (shared_var_p (var
, set
->vars
))
4808 for (loc
= var
->var_part
[0].loc_chain
; loc
; loc
= loc
->next
)
4809 if (GET_CODE (loc
->loc
) == MEM
4810 && mem_dies_at_call (loc
->loc
))
4816 slot
= unshare_variable (set
, slot
, var
, VAR_INIT_STATUS_UNKNOWN
);
4818 gcc_assert (var
->n_var_parts
== 1);
4821 if (VAR_LOC_1PAUX (var
))
4822 cur_loc
= VAR_LOC_FROM (var
);
4824 cur_loc
= var
->var_part
[0].cur_loc
;
4826 for (locp
= &var
->var_part
[0].loc_chain
, loc
= *locp
;
4829 if (GET_CODE (loc
->loc
) != MEM
4830 || !mem_dies_at_call (loc
->loc
))
4837 /* If we have deleted the location which was last emitted
4838 we have to emit new location so add the variable to set
4839 of changed variables. */
4840 if (cur_loc
== loc
->loc
)
4843 var
->var_part
[0].cur_loc
= NULL
;
4844 if (VAR_LOC_1PAUX (var
))
4845 VAR_LOC_FROM (var
) = NULL
;
4847 pool_free (loc_chain_pool
, loc
);
4850 if (!var
->var_part
[0].loc_chain
)
4856 variable_was_changed (var
, set
);
4862 /* Remove all variable-location information about call-clobbered
4863 registers, as well as associations between MEMs and VALUEs. */
4866 dataflow_set_clear_at_call (dataflow_set
*set
)
4869 hard_reg_set_iterator hrsi
;
4871 EXECUTE_IF_SET_IN_HARD_REG_SET (regs_invalidated_by_call
, 0, r
, hrsi
)
4872 var_regno_delete (set
, r
);
4874 if (MAY_HAVE_DEBUG_INSNS
)
4876 set
->traversed_vars
= set
->vars
;
4877 shared_hash_htab (set
->vars
)
4878 ->traverse
<dataflow_set
*, dataflow_set_preserve_mem_locs
> (set
);
4879 set
->traversed_vars
= set
->vars
;
4880 shared_hash_htab (set
->vars
)
4881 ->traverse
<dataflow_set
*, dataflow_set_remove_mem_locs
> (set
);
4882 set
->traversed_vars
= NULL
;
4887 variable_part_different_p (variable_part
*vp1
, variable_part
*vp2
)
4889 location_chain lc1
, lc2
;
4891 for (lc1
= vp1
->loc_chain
; lc1
; lc1
= lc1
->next
)
4893 for (lc2
= vp2
->loc_chain
; lc2
; lc2
= lc2
->next
)
4895 if (REG_P (lc1
->loc
) && REG_P (lc2
->loc
))
4897 if (REGNO (lc1
->loc
) == REGNO (lc2
->loc
))
4900 if (rtx_equal_p (lc1
->loc
, lc2
->loc
))
4909 /* Return true if one-part variables VAR1 and VAR2 are different.
4910 They must be in canonical order. */
4913 onepart_variable_different_p (variable var1
, variable var2
)
4915 location_chain lc1
, lc2
;
4920 gcc_assert (var1
->n_var_parts
== 1
4921 && var2
->n_var_parts
== 1);
4923 lc1
= var1
->var_part
[0].loc_chain
;
4924 lc2
= var2
->var_part
[0].loc_chain
;
4926 gcc_assert (lc1
&& lc2
);
4930 if (loc_cmp (lc1
->loc
, lc2
->loc
))
4939 /* Return true if variables VAR1 and VAR2 are different. */
4942 variable_different_p (variable var1
, variable var2
)
4949 if (var1
->onepart
!= var2
->onepart
)
4952 if (var1
->n_var_parts
!= var2
->n_var_parts
)
4955 if (var1
->onepart
&& var1
->n_var_parts
)
4957 gcc_checking_assert (dv_as_opaque (var1
->dv
) == dv_as_opaque (var2
->dv
)
4958 && var1
->n_var_parts
== 1);
4959 /* One-part values have locations in a canonical order. */
4960 return onepart_variable_different_p (var1
, var2
);
4963 for (i
= 0; i
< var1
->n_var_parts
; i
++)
4965 if (VAR_PART_OFFSET (var1
, i
) != VAR_PART_OFFSET (var2
, i
))
4967 if (variable_part_different_p (&var1
->var_part
[i
], &var2
->var_part
[i
]))
4969 if (variable_part_different_p (&var2
->var_part
[i
], &var1
->var_part
[i
]))
4975 /* Return true if dataflow sets OLD_SET and NEW_SET differ. */
4978 dataflow_set_different (dataflow_set
*old_set
, dataflow_set
*new_set
)
4980 variable_iterator_type hi
;
4983 if (old_set
->vars
== new_set
->vars
)
4986 if (shared_hash_htab (old_set
->vars
)->elements ()
4987 != shared_hash_htab (new_set
->vars
)->elements ())
4990 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (old_set
->vars
),
4993 variable_table_type
*htab
= shared_hash_htab (new_set
->vars
);
4994 variable var2
= htab
->find_with_hash (var1
->dv
, dv_htab_hash (var1
->dv
));
4997 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4999 fprintf (dump_file
, "dataflow difference found: removal of:\n");
5005 if (variable_different_p (var1
, var2
))
5007 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5009 fprintf (dump_file
, "dataflow difference found: "
5010 "old and new follow:\n");
5018 /* No need to traverse the second hashtab, if both have the same number
5019 of elements and the second one had all entries found in the first one,
5020 then it can't have any extra entries. */
5024 /* Free the contents of dataflow set SET. */
5027 dataflow_set_destroy (dataflow_set
*set
)
5031 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
5032 attrs_list_clear (&set
->regs
[i
]);
5034 shared_hash_destroy (set
->vars
);
5038 /* Return true if RTL X contains a SYMBOL_REF. */
5041 contains_symbol_ref (rtx x
)
5050 code
= GET_CODE (x
);
5051 if (code
== SYMBOL_REF
)
5054 fmt
= GET_RTX_FORMAT (code
);
5055 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
5059 if (contains_symbol_ref (XEXP (x
, i
)))
5062 else if (fmt
[i
] == 'E')
5065 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
5066 if (contains_symbol_ref (XVECEXP (x
, i
, j
)))
5074 /* Shall EXPR be tracked? */
5077 track_expr_p (tree expr
, bool need_rtl
)
5082 if (TREE_CODE (expr
) == DEBUG_EXPR_DECL
)
5083 return DECL_RTL_SET_P (expr
);
5085 /* If EXPR is not a parameter or a variable do not track it. */
5086 if (TREE_CODE (expr
) != VAR_DECL
&& TREE_CODE (expr
) != PARM_DECL
)
5089 /* It also must have a name... */
5090 if (!DECL_NAME (expr
) && need_rtl
)
5093 /* ... and a RTL assigned to it. */
5094 decl_rtl
= DECL_RTL_IF_SET (expr
);
5095 if (!decl_rtl
&& need_rtl
)
5098 /* If this expression is really a debug alias of some other declaration, we
5099 don't need to track this expression if the ultimate declaration is
5102 if (TREE_CODE (realdecl
) == VAR_DECL
&& DECL_HAS_DEBUG_EXPR_P (realdecl
))
5104 realdecl
= DECL_DEBUG_EXPR (realdecl
);
5105 if (!DECL_P (realdecl
))
5107 if (handled_component_p (realdecl
)
5108 || (TREE_CODE (realdecl
) == MEM_REF
5109 && TREE_CODE (TREE_OPERAND (realdecl
, 0)) == ADDR_EXPR
))
5111 HOST_WIDE_INT bitsize
, bitpos
, maxsize
;
5114 = get_ref_base_and_extent (realdecl
, &bitpos
, &bitsize
,
5115 &maxsize
, &reverse
);
5116 if (!DECL_P (innerdecl
)
5117 || DECL_IGNORED_P (innerdecl
)
5118 /* Do not track declarations for parts of tracked parameters
5119 since we want to track them as a whole instead. */
5120 || (TREE_CODE (innerdecl
) == PARM_DECL
5121 && DECL_MODE (innerdecl
) != BLKmode
5122 && TREE_CODE (TREE_TYPE (innerdecl
)) != UNION_TYPE
)
5123 || TREE_STATIC (innerdecl
)
5125 || bitpos
+ bitsize
> 256
5126 || bitsize
!= maxsize
)
5136 /* Do not track EXPR if REALDECL it should be ignored for debugging
5138 if (DECL_IGNORED_P (realdecl
))
5141 /* Do not track global variables until we are able to emit correct location
5143 if (TREE_STATIC (realdecl
))
5146 /* When the EXPR is a DECL for alias of some variable (see example)
5147 the TREE_STATIC flag is not used. Disable tracking all DECLs whose
5148 DECL_RTL contains SYMBOL_REF.
5151 extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
5154 if (decl_rtl
&& MEM_P (decl_rtl
)
5155 && contains_symbol_ref (XEXP (decl_rtl
, 0)))
5158 /* If RTX is a memory it should not be very large (because it would be
5159 an array or struct). */
5160 if (decl_rtl
&& MEM_P (decl_rtl
))
5162 /* Do not track structures and arrays. */
5163 if (GET_MODE (decl_rtl
) == BLKmode
5164 || AGGREGATE_TYPE_P (TREE_TYPE (realdecl
)))
5166 if (MEM_SIZE_KNOWN_P (decl_rtl
)
5167 && MEM_SIZE (decl_rtl
) > MAX_VAR_PARTS
)
5171 DECL_CHANGED (expr
) = 0;
5172 DECL_CHANGED (realdecl
) = 0;
5176 /* Determine whether a given LOC refers to the same variable part as
5180 same_variable_part_p (rtx loc
, tree expr
, HOST_WIDE_INT offset
)
5183 HOST_WIDE_INT offset2
;
5185 if (! DECL_P (expr
))
5190 expr2
= REG_EXPR (loc
);
5191 offset2
= REG_OFFSET (loc
);
5193 else if (MEM_P (loc
))
5195 expr2
= MEM_EXPR (loc
);
5196 offset2
= INT_MEM_OFFSET (loc
);
5201 if (! expr2
|| ! DECL_P (expr2
))
5204 expr
= var_debug_decl (expr
);
5205 expr2
= var_debug_decl (expr2
);
5207 return (expr
== expr2
&& offset
== offset2
);
5210 /* LOC is a REG or MEM that we would like to track if possible.
5211 If EXPR is null, we don't know what expression LOC refers to,
5212 otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
5213 LOC is an lvalue register.
5215 Return true if EXPR is nonnull and if LOC, or some lowpart of it,
5216 is something we can track. When returning true, store the mode of
5217 the lowpart we can track in *MODE_OUT (if nonnull) and its offset
5218 from EXPR in *OFFSET_OUT (if nonnull). */
5221 track_loc_p (rtx loc
, tree expr
, HOST_WIDE_INT offset
, bool store_reg_p
,
5222 enum machine_mode
*mode_out
, HOST_WIDE_INT
*offset_out
)
5224 enum machine_mode mode
;
5226 if (expr
== NULL
|| !track_expr_p (expr
, true))
5229 /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
5230 whole subreg, but only the old inner part is really relevant. */
5231 mode
= GET_MODE (loc
);
5232 if (REG_P (loc
) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc
)))
5234 enum machine_mode pseudo_mode
;
5236 pseudo_mode
= PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc
));
5237 if (GET_MODE_SIZE (mode
) > GET_MODE_SIZE (pseudo_mode
))
5239 offset
+= byte_lowpart_offset (pseudo_mode
, mode
);
5244 /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
5245 Do the same if we are storing to a register and EXPR occupies
5246 the whole of register LOC; in that case, the whole of EXPR is
5247 being changed. We exclude complex modes from the second case
5248 because the real and imaginary parts are represented as separate
5249 pseudo registers, even if the whole complex value fits into one
5251 if ((GET_MODE_SIZE (mode
) > GET_MODE_SIZE (DECL_MODE (expr
))
5253 && !COMPLEX_MODE_P (DECL_MODE (expr
))
5254 && hard_regno_nregs
[REGNO (loc
)][DECL_MODE (expr
)] == 1))
5255 && offset
+ byte_lowpart_offset (DECL_MODE (expr
), mode
) == 0)
5257 mode
= DECL_MODE (expr
);
5261 if (offset
< 0 || offset
>= MAX_VAR_PARTS
)
5267 *offset_out
= offset
;
5271 /* Return the MODE lowpart of LOC, or null if LOC is not something we
5272 want to track. When returning nonnull, make sure that the attributes
5273 on the returned value are updated. */
5276 var_lowpart (enum machine_mode mode
, rtx loc
)
5278 unsigned int offset
, reg_offset
, regno
;
5280 if (GET_MODE (loc
) == mode
)
5283 if (!REG_P (loc
) && !MEM_P (loc
))
5286 offset
= byte_lowpart_offset (mode
, GET_MODE (loc
));
5289 return adjust_address_nv (loc
, mode
, offset
);
5291 reg_offset
= subreg_lowpart_offset (mode
, GET_MODE (loc
));
5292 regno
= REGNO (loc
) + subreg_regno_offset (REGNO (loc
), GET_MODE (loc
),
5294 return gen_rtx_REG_offset (loc
, mode
, regno
, offset
);
5297 /* Carry information about uses and stores while walking rtx. */
5299 struct count_use_info
5301 /* The insn where the RTX is. */
5304 /* The basic block where insn is. */
5307 /* The array of n_sets sets in the insn, as determined by cselib. */
5308 struct cselib_set
*sets
;
5311 /* True if we're counting stores, false otherwise. */
5315 /* Find a VALUE corresponding to X. */
5317 static inline cselib_val
*
5318 find_use_val (rtx x
, enum machine_mode mode
, struct count_use_info
*cui
)
5324 /* This is called after uses are set up and before stores are
5325 processed by cselib, so it's safe to look up srcs, but not
5326 dsts. So we look up expressions that appear in srcs or in
5327 dest expressions, but we search the sets array for dests of
5331 /* Some targets represent memset and memcpy patterns
5332 by (set (mem:BLK ...) (reg:[QHSD]I ...)) or
5333 (set (mem:BLK ...) (const_int ...)) or
5334 (set (mem:BLK ...) (mem:BLK ...)). Don't return anything
5335 in that case, otherwise we end up with mode mismatches. */
5336 if (mode
== BLKmode
&& MEM_P (x
))
5338 for (i
= 0; i
< cui
->n_sets
; i
++)
5339 if (cui
->sets
[i
].dest
== x
)
5340 return cui
->sets
[i
].src_elt
;
5343 return cselib_lookup (x
, mode
, 0, VOIDmode
);
5349 /* Replace all registers and addresses in an expression with VALUE
5350 expressions that map back to them, unless the expression is a
5351 register. If no mapping is or can be performed, returns NULL. */
5354 replace_expr_with_values (rtx loc
)
5356 if (REG_P (loc
) || GET_CODE (loc
) == ENTRY_VALUE
)
5358 else if (MEM_P (loc
))
5360 cselib_val
*addr
= cselib_lookup (XEXP (loc
, 0),
5361 get_address_mode (loc
), 0,
5364 return replace_equiv_address_nv (loc
, addr
->val_rtx
);
5369 return cselib_subst_to_values (loc
, VOIDmode
);
5372 /* Return true if X contains a DEBUG_EXPR. */
5375 rtx_debug_expr_p (const_rtx x
)
5377 subrtx_iterator::array_type array
;
5378 FOR_EACH_SUBRTX (iter
, array
, x
, ALL
)
5379 if (GET_CODE (*iter
) == DEBUG_EXPR
)
5384 /* Determine what kind of micro operation to choose for a USE. Return
5385 MO_CLOBBER if no micro operation is to be generated. */
5387 static enum micro_operation_type
5388 use_type (rtx loc
, struct count_use_info
*cui
, enum machine_mode
*modep
)
5392 if (cui
&& cui
->sets
)
5394 if (GET_CODE (loc
) == VAR_LOCATION
)
5396 if (track_expr_p (PAT_VAR_LOCATION_DECL (loc
), false))
5398 rtx ploc
= PAT_VAR_LOCATION_LOC (loc
);
5399 if (! VAR_LOC_UNKNOWN_P (ploc
))
5401 cselib_val
*val
= cselib_lookup (ploc
, GET_MODE (loc
), 1,
5404 /* ??? flag_float_store and volatile mems are never
5405 given values, but we could in theory use them for
5407 gcc_assert (val
|| 1);
5415 if (REG_P (loc
) || MEM_P (loc
))
5418 *modep
= GET_MODE (loc
);
5422 || (find_use_val (loc
, GET_MODE (loc
), cui
)
5423 && cselib_lookup (XEXP (loc
, 0),
5424 get_address_mode (loc
), 0,
5430 cselib_val
*val
= find_use_val (loc
, GET_MODE (loc
), cui
);
5432 if (val
&& !cselib_preserved_value_p (val
))
5440 gcc_assert (REGNO (loc
) < FIRST_PSEUDO_REGISTER
);
5442 if (loc
== cfa_base_rtx
)
5444 expr
= REG_EXPR (loc
);
5447 return MO_USE_NO_VAR
;
5448 else if (target_for_debug_bind (var_debug_decl (expr
)))
5450 else if (track_loc_p (loc
, expr
, REG_OFFSET (loc
),
5451 false, modep
, NULL
))
5454 return MO_USE_NO_VAR
;
5456 else if (MEM_P (loc
))
5458 expr
= MEM_EXPR (loc
);
5462 else if (target_for_debug_bind (var_debug_decl (expr
)))
5464 else if (track_loc_p (loc
, expr
, INT_MEM_OFFSET (loc
),
5466 /* Multi-part variables shouldn't refer to one-part
5467 variable names such as VALUEs (never happens) or
5468 DEBUG_EXPRs (only happens in the presence of debug
5470 && (!MAY_HAVE_DEBUG_INSNS
5471 || !rtx_debug_expr_p (XEXP (loc
, 0))))
5480 /* Log to OUT information about micro-operation MOPT involving X in
5484 log_op_type (rtx x
, basic_block bb
, rtx_insn
*insn
,
5485 enum micro_operation_type mopt
, FILE *out
)
5487 fprintf (out
, "bb %i op %i insn %i %s ",
5488 bb
->index
, VTI (bb
)->mos
.length (),
5489 INSN_UID (insn
), micro_operation_type_name
[mopt
]);
5490 print_inline_rtx (out
, x
, 2);
5494 /* Tell whether the CONCAT used to holds a VALUE and its location
5495 needs value resolution, i.e., an attempt of mapping the location
5496 back to other incoming values. */
5497 #define VAL_NEEDS_RESOLUTION(x) \
5498 (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
5499 /* Whether the location in the CONCAT is a tracked expression, that
5500 should also be handled like a MO_USE. */
5501 #define VAL_HOLDS_TRACK_EXPR(x) \
5502 (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
5503 /* Whether the location in the CONCAT should be handled like a MO_COPY
5505 #define VAL_EXPR_IS_COPIED(x) \
5506 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
5507 /* Whether the location in the CONCAT should be handled like a
5508 MO_CLOBBER as well. */
5509 #define VAL_EXPR_IS_CLOBBERED(x) \
5510 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
5512 /* All preserved VALUEs. */
5513 static vec
<rtx
> preserved_values
;
5515 /* Ensure VAL is preserved and remember it in a vector for vt_emit_notes. */
5518 preserve_value (cselib_val
*val
)
5520 cselib_preserve_value (val
);
5521 preserved_values
.safe_push (val
->val_rtx
);
5524 /* Helper function for MO_VAL_LOC handling. Return non-zero if
5525 any rtxes not suitable for CONST use not replaced by VALUEs
5529 non_suitable_const (const_rtx x
)
5531 subrtx_iterator::array_type array
;
5532 FOR_EACH_SUBRTX (iter
, array
, x
, ALL
)
5534 const_rtx x
= *iter
;
5535 switch (GET_CODE (x
))
5546 if (!MEM_READONLY_P (x
))
5556 /* Add uses (register and memory references) LOC which will be tracked
5557 to VTI (bb)->mos. */
5560 add_uses (rtx loc
, struct count_use_info
*cui
)
5562 enum machine_mode mode
= VOIDmode
;
5563 enum micro_operation_type type
= use_type (loc
, cui
, &mode
);
5565 if (type
!= MO_CLOBBER
)
5567 basic_block bb
= cui
->bb
;
5571 mo
.u
.loc
= type
== MO_USE
? var_lowpart (mode
, loc
) : loc
;
5572 mo
.insn
= cui
->insn
;
5574 if (type
== MO_VAL_LOC
)
5577 rtx vloc
= PAT_VAR_LOCATION_LOC (oloc
);
5580 gcc_assert (cui
->sets
);
5583 && !REG_P (XEXP (vloc
, 0))
5584 && !MEM_P (XEXP (vloc
, 0)))
5587 enum machine_mode address_mode
= get_address_mode (mloc
);
5589 = cselib_lookup (XEXP (mloc
, 0), address_mode
, 0,
5592 if (val
&& !cselib_preserved_value_p (val
))
5593 preserve_value (val
);
5596 if (CONSTANT_P (vloc
)
5597 && (GET_CODE (vloc
) != CONST
|| non_suitable_const (vloc
)))
5598 /* For constants don't look up any value. */;
5599 else if (!VAR_LOC_UNKNOWN_P (vloc
) && !unsuitable_loc (vloc
)
5600 && (val
= find_use_val (vloc
, GET_MODE (oloc
), cui
)))
5602 enum machine_mode mode2
;
5603 enum micro_operation_type type2
;
5605 bool resolvable
= REG_P (vloc
) || MEM_P (vloc
);
5608 nloc
= replace_expr_with_values (vloc
);
5612 oloc
= shallow_copy_rtx (oloc
);
5613 PAT_VAR_LOCATION_LOC (oloc
) = nloc
;
5616 oloc
= gen_rtx_CONCAT (mode
, val
->val_rtx
, oloc
);
5618 type2
= use_type (vloc
, 0, &mode2
);
5620 gcc_assert (type2
== MO_USE
|| type2
== MO_USE_NO_VAR
5621 || type2
== MO_CLOBBER
);
5623 if (type2
== MO_CLOBBER
5624 && !cselib_preserved_value_p (val
))
5626 VAL_NEEDS_RESOLUTION (oloc
) = resolvable
;
5627 preserve_value (val
);
5630 else if (!VAR_LOC_UNKNOWN_P (vloc
))
5632 oloc
= shallow_copy_rtx (oloc
);
5633 PAT_VAR_LOCATION_LOC (oloc
) = gen_rtx_UNKNOWN_VAR_LOC ();
5638 else if (type
== MO_VAL_USE
)
5640 enum machine_mode mode2
= VOIDmode
;
5641 enum micro_operation_type type2
;
5642 cselib_val
*val
= find_use_val (loc
, GET_MODE (loc
), cui
);
5643 rtx vloc
, oloc
= loc
, nloc
;
5645 gcc_assert (cui
->sets
);
5648 && !REG_P (XEXP (oloc
, 0))
5649 && !MEM_P (XEXP (oloc
, 0)))
5652 enum machine_mode address_mode
= get_address_mode (mloc
);
5654 = cselib_lookup (XEXP (mloc
, 0), address_mode
, 0,
5657 if (val
&& !cselib_preserved_value_p (val
))
5658 preserve_value (val
);
5661 type2
= use_type (loc
, 0, &mode2
);
5663 gcc_assert (type2
== MO_USE
|| type2
== MO_USE_NO_VAR
5664 || type2
== MO_CLOBBER
);
5666 if (type2
== MO_USE
)
5667 vloc
= var_lowpart (mode2
, loc
);
5671 /* The loc of a MO_VAL_USE may have two forms:
5673 (concat val src): val is at src, a value-based
5676 (concat (concat val use) src): same as above, with use as
5677 the MO_USE tracked value, if it differs from src.
5681 gcc_checking_assert (REG_P (loc
) || MEM_P (loc
));
5682 nloc
= replace_expr_with_values (loc
);
5687 oloc
= gen_rtx_CONCAT (mode2
, val
->val_rtx
, vloc
);
5689 oloc
= val
->val_rtx
;
5691 mo
.u
.loc
= gen_rtx_CONCAT (mode
, oloc
, nloc
);
5693 if (type2
== MO_USE
)
5694 VAL_HOLDS_TRACK_EXPR (mo
.u
.loc
) = 1;
5695 if (!cselib_preserved_value_p (val
))
5697 VAL_NEEDS_RESOLUTION (mo
.u
.loc
) = 1;
5698 preserve_value (val
);
5702 gcc_assert (type
== MO_USE
|| type
== MO_USE_NO_VAR
);
5704 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5705 log_op_type (mo
.u
.loc
, cui
->bb
, cui
->insn
, mo
.type
, dump_file
);
5706 VTI (bb
)->mos
.safe_push (mo
);
5710 /* Helper function for finding all uses of REG/MEM in X in insn INSN. */
5713 add_uses_1 (rtx
*x
, void *cui
)
5715 subrtx_var_iterator::array_type array
;
5716 FOR_EACH_SUBRTX_VAR (iter
, array
, *x
, NONCONST
)
5717 add_uses (*iter
, (struct count_use_info
*) cui
);
5720 /* This is the value used during expansion of locations. We want it
5721 to be unbounded, so that variables expanded deep in a recursion
5722 nest are fully evaluated, so that their values are cached
5723 correctly. We avoid recursion cycles through other means, and we
5724 don't unshare RTL, so excess complexity is not a problem. */
5725 #define EXPR_DEPTH (INT_MAX)
5726 /* We use this to keep too-complex expressions from being emitted as
5727 location notes, and then to debug information. Users can trade
5728 compile time for ridiculously complex expressions, although they're
5729 seldom useful, and they may often have to be discarded as not
5730 representable anyway. */
5731 #define EXPR_USE_DEPTH (PARAM_VALUE (PARAM_MAX_VARTRACK_EXPR_DEPTH))
5733 /* Attempt to reverse the EXPR operation in the debug info and record
5734 it in the cselib table. Say for reg1 = reg2 + 6 even when reg2 is
5735 no longer live we can express its value as VAL - 6. */
5738 reverse_op (rtx val
, const_rtx expr
, rtx_insn
*insn
)
5742 struct elt_loc_list
*l
;
5746 if (GET_CODE (expr
) != SET
)
5749 if (!REG_P (SET_DEST (expr
)) || GET_MODE (val
) != GET_MODE (SET_DEST (expr
)))
5752 src
= SET_SRC (expr
);
5753 switch (GET_CODE (src
))
5760 if (!REG_P (XEXP (src
, 0)))
5765 if (!REG_P (XEXP (src
, 0)) && !MEM_P (XEXP (src
, 0)))
5772 if (!SCALAR_INT_MODE_P (GET_MODE (src
)) || XEXP (src
, 0) == cfa_base_rtx
)
5775 v
= cselib_lookup (XEXP (src
, 0), GET_MODE (XEXP (src
, 0)), 0, VOIDmode
);
5776 if (!v
|| !cselib_preserved_value_p (v
))
5779 /* Use canonical V to avoid creating multiple redundant expressions
5780 for different VALUES equivalent to V. */
5781 v
= canonical_cselib_val (v
);
5783 /* Adding a reverse op isn't useful if V already has an always valid
5784 location. Ignore ENTRY_VALUE, while it is always constant, we should
5785 prefer non-ENTRY_VALUE locations whenever possible. */
5786 for (l
= v
->locs
, count
= 0; l
; l
= l
->next
, count
++)
5787 if (CONSTANT_P (l
->loc
)
5788 && (GET_CODE (l
->loc
) != CONST
|| !references_value_p (l
->loc
, 0)))
5790 /* Avoid creating too large locs lists. */
5791 else if (count
== PARAM_VALUE (PARAM_MAX_VARTRACK_REVERSE_OP_SIZE
))
5794 switch (GET_CODE (src
))
5798 if (GET_MODE (v
->val_rtx
) != GET_MODE (val
))
5800 ret
= gen_rtx_fmt_e (GET_CODE (src
), GET_MODE (val
), val
);
5804 ret
= gen_lowpart_SUBREG (GET_MODE (v
->val_rtx
), val
);
5816 if (GET_MODE (v
->val_rtx
) != GET_MODE (val
))
5818 arg
= XEXP (src
, 1);
5819 if (!CONST_INT_P (arg
) && GET_CODE (arg
) != SYMBOL_REF
)
5821 arg
= cselib_expand_value_rtx (arg
, scratch_regs
, 5);
5822 if (arg
== NULL_RTX
)
5824 if (!CONST_INT_P (arg
) && GET_CODE (arg
) != SYMBOL_REF
)
5827 ret
= simplify_gen_binary (code
, GET_MODE (val
), val
, arg
);
5829 /* Ensure ret isn't VALUE itself (which can happen e.g. for
5830 (plus (reg1) (reg2)) when reg2 is known to be 0), as that
5831 breaks a lot of routines during var-tracking. */
5832 ret
= gen_rtx_fmt_ee (PLUS
, GET_MODE (val
), val
, const0_rtx
);
5838 cselib_add_permanent_equiv (v
, ret
, insn
);
5841 /* Add stores (register and memory references) LOC which will be tracked
5842 to VTI (bb)->mos. EXPR is the RTL expression containing the store.
5843 CUIP->insn is instruction which the LOC is part of. */
5846 add_stores (rtx loc
, const_rtx expr
, void *cuip
)
5848 enum machine_mode mode
= VOIDmode
, mode2
;
5849 struct count_use_info
*cui
= (struct count_use_info
*)cuip
;
5850 basic_block bb
= cui
->bb
;
5852 rtx oloc
= loc
, nloc
, src
= NULL
;
5853 enum micro_operation_type type
= use_type (loc
, cui
, &mode
);
5854 bool track_p
= false;
5856 bool resolve
, preserve
;
5858 if (type
== MO_CLOBBER
)
5865 gcc_assert (loc
!= cfa_base_rtx
);
5866 if ((GET_CODE (expr
) == CLOBBER
&& type
!= MO_VAL_SET
)
5867 || !(track_p
= use_type (loc
, NULL
, &mode2
) == MO_USE
)
5868 || GET_CODE (expr
) == CLOBBER
)
5870 mo
.type
= MO_CLOBBER
;
5872 if (GET_CODE (expr
) == SET
5873 && SET_DEST (expr
) == loc
5874 && !unsuitable_loc (SET_SRC (expr
))
5875 && find_use_val (loc
, mode
, cui
))
5877 gcc_checking_assert (type
== MO_VAL_SET
);
5878 mo
.u
.loc
= gen_rtx_SET (VOIDmode
, loc
, SET_SRC (expr
));
5883 if (GET_CODE (expr
) == SET
5884 && SET_DEST (expr
) == loc
5885 && GET_CODE (SET_SRC (expr
)) != ASM_OPERANDS
)
5886 src
= var_lowpart (mode2
, SET_SRC (expr
));
5887 loc
= var_lowpart (mode2
, loc
);
5896 rtx xexpr
= gen_rtx_SET (VOIDmode
, loc
, src
);
5897 if (same_variable_part_p (src
, REG_EXPR (loc
), REG_OFFSET (loc
)))
5899 /* If this is an instruction copying (part of) a parameter
5900 passed by invisible reference to its register location,
5901 pretend it's a SET so that the initial memory location
5902 is discarded, as the parameter register can be reused
5903 for other purposes and we do not track locations based
5904 on generic registers. */
5907 && TREE_CODE (REG_EXPR (loc
)) == PARM_DECL
5908 && DECL_MODE (REG_EXPR (loc
)) != BLKmode
5909 && MEM_P (DECL_INCOMING_RTL (REG_EXPR (loc
)))
5910 && XEXP (DECL_INCOMING_RTL (REG_EXPR (loc
)), 0)
5921 mo
.insn
= cui
->insn
;
5923 else if (MEM_P (loc
)
5924 && ((track_p
= use_type (loc
, NULL
, &mode2
) == MO_USE
)
5927 if (MEM_P (loc
) && type
== MO_VAL_SET
5928 && !REG_P (XEXP (loc
, 0))
5929 && !MEM_P (XEXP (loc
, 0)))
5932 enum machine_mode address_mode
= get_address_mode (mloc
);
5933 cselib_val
*val
= cselib_lookup (XEXP (mloc
, 0),
5937 if (val
&& !cselib_preserved_value_p (val
))
5938 preserve_value (val
);
5941 if (GET_CODE (expr
) == CLOBBER
|| !track_p
)
5943 mo
.type
= MO_CLOBBER
;
5944 mo
.u
.loc
= track_p
? var_lowpart (mode2
, loc
) : loc
;
5948 if (GET_CODE (expr
) == SET
5949 && SET_DEST (expr
) == loc
5950 && GET_CODE (SET_SRC (expr
)) != ASM_OPERANDS
)
5951 src
= var_lowpart (mode2
, SET_SRC (expr
));
5952 loc
= var_lowpart (mode2
, loc
);
5961 rtx xexpr
= gen_rtx_SET (VOIDmode
, loc
, src
);
5962 if (same_variable_part_p (SET_SRC (xexpr
),
5964 INT_MEM_OFFSET (loc
)))
5971 mo
.insn
= cui
->insn
;
5976 if (type
!= MO_VAL_SET
)
5977 goto log_and_return
;
5979 v
= find_use_val (oloc
, mode
, cui
);
5982 goto log_and_return
;
5984 resolve
= preserve
= !cselib_preserved_value_p (v
);
5986 /* We cannot track values for multiple-part variables, so we track only
5987 locations for tracked parameters passed either by invisible reference
5988 or directly in multiple locations. */
5992 && TREE_CODE (REG_EXPR (loc
)) == PARM_DECL
5993 && DECL_MODE (REG_EXPR (loc
)) != BLKmode
5994 && TREE_CODE (TREE_TYPE (REG_EXPR (loc
))) != UNION_TYPE
5995 && ((MEM_P (DECL_INCOMING_RTL (REG_EXPR (loc
)))
5996 && XEXP (DECL_INCOMING_RTL (REG_EXPR (loc
)), 0) != arg_pointer_rtx
)
5997 || (GET_CODE (DECL_INCOMING_RTL (REG_EXPR (loc
))) == PARALLEL
5998 && XVECLEN (DECL_INCOMING_RTL (REG_EXPR (loc
)), 0) > 1)))
6000 /* Although we don't use the value here, it could be used later by the
6001 mere virtue of its existence as the operand of the reverse operation
6002 that gave rise to it (typically extension/truncation). Make sure it
6003 is preserved as required by vt_expand_var_loc_chain. */
6006 goto log_and_return
;
6009 if (loc
== stack_pointer_rtx
6010 && hard_frame_pointer_adjustment
!= -1
6012 cselib_set_value_sp_based (v
);
6014 nloc
= replace_expr_with_values (oloc
);
6018 if (GET_CODE (PATTERN (cui
->insn
)) == COND_EXEC
)
6020 cselib_val
*oval
= cselib_lookup (oloc
, GET_MODE (oloc
), 0, VOIDmode
);
6024 gcc_assert (REG_P (oloc
) || MEM_P (oloc
));
6026 if (oval
&& !cselib_preserved_value_p (oval
))
6028 micro_operation moa
;
6030 preserve_value (oval
);
6032 moa
.type
= MO_VAL_USE
;
6033 moa
.u
.loc
= gen_rtx_CONCAT (mode
, oval
->val_rtx
, oloc
);
6034 VAL_NEEDS_RESOLUTION (moa
.u
.loc
) = 1;
6035 moa
.insn
= cui
->insn
;
6037 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6038 log_op_type (moa
.u
.loc
, cui
->bb
, cui
->insn
,
6039 moa
.type
, dump_file
);
6040 VTI (bb
)->mos
.safe_push (moa
);
6045 else if (resolve
&& GET_CODE (mo
.u
.loc
) == SET
)
6047 if (REG_P (SET_SRC (expr
)) || MEM_P (SET_SRC (expr
)))
6048 nloc
= replace_expr_with_values (SET_SRC (expr
));
6052 /* Avoid the mode mismatch between oexpr and expr. */
6053 if (!nloc
&& mode
!= mode2
)
6055 nloc
= SET_SRC (expr
);
6056 gcc_assert (oloc
== SET_DEST (expr
));
6059 if (nloc
&& nloc
!= SET_SRC (mo
.u
.loc
))
6060 oloc
= gen_rtx_SET (GET_MODE (mo
.u
.loc
), oloc
, nloc
);
6063 if (oloc
== SET_DEST (mo
.u
.loc
))
6064 /* No point in duplicating. */
6066 if (!REG_P (SET_SRC (mo
.u
.loc
)))
6072 if (GET_CODE (mo
.u
.loc
) == SET
6073 && oloc
== SET_DEST (mo
.u
.loc
))
6074 /* No point in duplicating. */
6080 loc
= gen_rtx_CONCAT (mode
, v
->val_rtx
, oloc
);
6082 if (mo
.u
.loc
!= oloc
)
6083 loc
= gen_rtx_CONCAT (GET_MODE (mo
.u
.loc
), loc
, mo
.u
.loc
);
6085 /* The loc of a MO_VAL_SET may have various forms:
6087 (concat val dst): dst now holds val
6089 (concat val (set dst src)): dst now holds val, copied from src
6091 (concat (concat val dstv) dst): dst now holds val; dstv is dst
6092 after replacing mems and non-top-level regs with values.
6094 (concat (concat val dstv) (set dst src)): dst now holds val,
6095 copied from src. dstv is a value-based representation of dst, if
6096 it differs from dst. If resolution is needed, src is a REG, and
6097 its mode is the same as that of val.
6099 (concat (concat val (set dstv srcv)) (set dst src)): src
6100 copied to dst, holding val. dstv and srcv are value-based
6101 representations of dst and src, respectively.
6105 if (GET_CODE (PATTERN (cui
->insn
)) != COND_EXEC
)
6106 reverse_op (v
->val_rtx
, expr
, cui
->insn
);
6111 VAL_HOLDS_TRACK_EXPR (loc
) = 1;
6114 VAL_NEEDS_RESOLUTION (loc
) = resolve
;
6117 if (mo
.type
== MO_CLOBBER
)
6118 VAL_EXPR_IS_CLOBBERED (loc
) = 1;
6119 if (mo
.type
== MO_COPY
)
6120 VAL_EXPR_IS_COPIED (loc
) = 1;
6122 mo
.type
= MO_VAL_SET
;
6125 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6126 log_op_type (mo
.u
.loc
, cui
->bb
, cui
->insn
, mo
.type
, dump_file
);
6127 VTI (bb
)->mos
.safe_push (mo
);
6130 /* Arguments to the call. */
6131 static rtx call_arguments
;
6133 /* Compute call_arguments. */
6136 prepare_call_arguments (basic_block bb
, rtx_insn
*insn
)
6139 rtx prev
, cur
, next
;
6140 rtx this_arg
= NULL_RTX
;
6141 tree type
= NULL_TREE
, t
, fndecl
= NULL_TREE
;
6142 tree obj_type_ref
= NULL_TREE
;
6143 CUMULATIVE_ARGS args_so_far_v
;
6144 cumulative_args_t args_so_far
;
6146 memset (&args_so_far_v
, 0, sizeof (args_so_far_v
));
6147 args_so_far
= pack_cumulative_args (&args_so_far_v
);
6148 call
= get_call_rtx_from (insn
);
6151 if (GET_CODE (XEXP (XEXP (call
, 0), 0)) == SYMBOL_REF
)
6153 rtx symbol
= XEXP (XEXP (call
, 0), 0);
6154 if (SYMBOL_REF_DECL (symbol
))
6155 fndecl
= SYMBOL_REF_DECL (symbol
);
6157 if (fndecl
== NULL_TREE
)
6158 fndecl
= MEM_EXPR (XEXP (call
, 0));
6160 && TREE_CODE (TREE_TYPE (fndecl
)) != FUNCTION_TYPE
6161 && TREE_CODE (TREE_TYPE (fndecl
)) != METHOD_TYPE
)
6163 if (fndecl
&& TYPE_ARG_TYPES (TREE_TYPE (fndecl
)))
6164 type
= TREE_TYPE (fndecl
);
6165 if (fndecl
&& TREE_CODE (fndecl
) != FUNCTION_DECL
)
6167 if (TREE_CODE (fndecl
) == INDIRECT_REF
6168 && TREE_CODE (TREE_OPERAND (fndecl
, 0)) == OBJ_TYPE_REF
)
6169 obj_type_ref
= TREE_OPERAND (fndecl
, 0);
6174 for (t
= TYPE_ARG_TYPES (type
); t
&& t
!= void_list_node
;
6176 if (TREE_CODE (TREE_VALUE (t
)) == REFERENCE_TYPE
6177 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_VALUE (t
))))
6179 if ((t
== NULL
|| t
== void_list_node
) && obj_type_ref
== NULL_TREE
)
6183 int nargs ATTRIBUTE_UNUSED
= list_length (TYPE_ARG_TYPES (type
));
6184 link
= CALL_INSN_FUNCTION_USAGE (insn
);
6185 #ifndef PCC_STATIC_STRUCT_RETURN
6186 if (aggregate_value_p (TREE_TYPE (type
), type
)
6187 && targetm
.calls
.struct_value_rtx (type
, 0) == 0)
6189 tree struct_addr
= build_pointer_type (TREE_TYPE (type
));
6190 enum machine_mode mode
= TYPE_MODE (struct_addr
);
6192 INIT_CUMULATIVE_ARGS (args_so_far_v
, type
, NULL_RTX
, fndecl
,
6194 reg
= targetm
.calls
.function_arg (args_so_far
, mode
,
6196 targetm
.calls
.function_arg_advance (args_so_far
, mode
,
6198 if (reg
== NULL_RTX
)
6200 for (; link
; link
= XEXP (link
, 1))
6201 if (GET_CODE (XEXP (link
, 0)) == USE
6202 && MEM_P (XEXP (XEXP (link
, 0), 0)))
6204 link
= XEXP (link
, 1);
6211 INIT_CUMULATIVE_ARGS (args_so_far_v
, type
, NULL_RTX
, fndecl
,
6213 if (obj_type_ref
&& TYPE_ARG_TYPES (type
) != void_list_node
)
6215 enum machine_mode mode
;
6216 t
= TYPE_ARG_TYPES (type
);
6217 mode
= TYPE_MODE (TREE_VALUE (t
));
6218 this_arg
= targetm
.calls
.function_arg (args_so_far
, mode
,
6219 TREE_VALUE (t
), true);
6220 if (this_arg
&& !REG_P (this_arg
))
6221 this_arg
= NULL_RTX
;
6222 else if (this_arg
== NULL_RTX
)
6224 for (; link
; link
= XEXP (link
, 1))
6225 if (GET_CODE (XEXP (link
, 0)) == USE
6226 && MEM_P (XEXP (XEXP (link
, 0), 0)))
6228 this_arg
= XEXP (XEXP (link
, 0), 0);
6236 t
= type
? TYPE_ARG_TYPES (type
) : NULL_TREE
;
6238 for (link
= CALL_INSN_FUNCTION_USAGE (insn
); link
; link
= XEXP (link
, 1))
6239 if (GET_CODE (XEXP (link
, 0)) == USE
)
6241 rtx item
= NULL_RTX
;
6242 x
= XEXP (XEXP (link
, 0), 0);
6243 if (GET_MODE (link
) == VOIDmode
6244 || GET_MODE (link
) == BLKmode
6245 || (GET_MODE (link
) != GET_MODE (x
)
6246 && ((GET_MODE_CLASS (GET_MODE (link
)) != MODE_INT
6247 && GET_MODE_CLASS (GET_MODE (link
)) != MODE_PARTIAL_INT
)
6248 || (GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
6249 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_PARTIAL_INT
))))
6250 /* Can't do anything for these, if the original type mode
6251 isn't known or can't be converted. */;
6254 cselib_val
*val
= cselib_lookup (x
, GET_MODE (x
), 0, VOIDmode
);
6255 if (val
&& cselib_preserved_value_p (val
))
6256 item
= val
->val_rtx
;
6257 else if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_INT
6258 || GET_MODE_CLASS (GET_MODE (x
)) == MODE_PARTIAL_INT
)
6260 enum machine_mode mode
= GET_MODE (x
);
6262 while ((mode
= GET_MODE_WIDER_MODE (mode
)) != VOIDmode
6263 && GET_MODE_BITSIZE (mode
) <= BITS_PER_WORD
)
6265 rtx reg
= simplify_subreg (mode
, x
, GET_MODE (x
), 0);
6267 if (reg
== NULL_RTX
|| !REG_P (reg
))
6269 val
= cselib_lookup (reg
, mode
, 0, VOIDmode
);
6270 if (val
&& cselib_preserved_value_p (val
))
6272 item
= val
->val_rtx
;
6283 if (!frame_pointer_needed
)
6285 struct adjust_mem_data amd
;
6286 amd
.mem_mode
= VOIDmode
;
6287 amd
.stack_adjust
= -VTI (bb
)->out
.stack_adjust
;
6288 amd
.side_effects
= NULL
;
6290 mem
= simplify_replace_fn_rtx (mem
, NULL_RTX
, adjust_mems
,
6292 gcc_assert (amd
.side_effects
== NULL_RTX
);
6294 val
= cselib_lookup (mem
, GET_MODE (mem
), 0, VOIDmode
);
6295 if (val
&& cselib_preserved_value_p (val
))
6296 item
= val
->val_rtx
;
6297 else if (GET_MODE_CLASS (GET_MODE (mem
)) != MODE_INT
6298 && GET_MODE_CLASS (GET_MODE (mem
)) != MODE_PARTIAL_INT
)
6300 /* For non-integer stack argument see also if they weren't
6301 initialized by integers. */
6302 enum machine_mode imode
= int_mode_for_mode (GET_MODE (mem
));
6303 if (imode
!= GET_MODE (mem
) && imode
!= BLKmode
)
6305 val
= cselib_lookup (adjust_address_nv (mem
, imode
, 0),
6306 imode
, 0, VOIDmode
);
6307 if (val
&& cselib_preserved_value_p (val
))
6308 item
= lowpart_subreg (GET_MODE (x
), val
->val_rtx
,
6316 if (GET_MODE (item
) != GET_MODE (link
))
6317 item
= lowpart_subreg (GET_MODE (link
), item
, GET_MODE (item
));
6318 if (GET_MODE (x2
) != GET_MODE (link
))
6319 x2
= lowpart_subreg (GET_MODE (link
), x2
, GET_MODE (x2
));
6320 item
= gen_rtx_CONCAT (GET_MODE (link
), x2
, item
);
6322 = gen_rtx_EXPR_LIST (VOIDmode
, item
, call_arguments
);
6324 if (t
&& t
!= void_list_node
)
6326 tree argtype
= TREE_VALUE (t
);
6327 enum machine_mode mode
= TYPE_MODE (argtype
);
6329 if (pass_by_reference (&args_so_far_v
, mode
, argtype
, true))
6331 argtype
= build_pointer_type (argtype
);
6332 mode
= TYPE_MODE (argtype
);
6334 reg
= targetm
.calls
.function_arg (args_so_far
, mode
,
6336 if (TREE_CODE (argtype
) == REFERENCE_TYPE
6337 && INTEGRAL_TYPE_P (TREE_TYPE (argtype
))
6340 && GET_MODE (reg
) == mode
6341 && (GET_MODE_CLASS (mode
) == MODE_INT
6342 || GET_MODE_CLASS (mode
) == MODE_PARTIAL_INT
)
6344 && REGNO (x
) == REGNO (reg
)
6345 && GET_MODE (x
) == mode
6348 enum machine_mode indmode
6349 = TYPE_MODE (TREE_TYPE (argtype
));
6350 rtx mem
= gen_rtx_MEM (indmode
, x
);
6351 cselib_val
*val
= cselib_lookup (mem
, indmode
, 0, VOIDmode
);
6352 if (val
&& cselib_preserved_value_p (val
))
6354 item
= gen_rtx_CONCAT (indmode
, mem
, val
->val_rtx
);
6355 call_arguments
= gen_rtx_EXPR_LIST (VOIDmode
, item
,
6360 struct elt_loc_list
*l
;
6363 /* Try harder, when passing address of a constant
6364 pool integer it can be easily read back. */
6365 item
= XEXP (item
, 1);
6366 if (GET_CODE (item
) == SUBREG
)
6367 item
= SUBREG_REG (item
);
6368 gcc_assert (GET_CODE (item
) == VALUE
);
6369 val
= CSELIB_VAL_PTR (item
);
6370 for (l
= val
->locs
; l
; l
= l
->next
)
6371 if (GET_CODE (l
->loc
) == SYMBOL_REF
6372 && TREE_CONSTANT_POOL_ADDRESS_P (l
->loc
)
6373 && SYMBOL_REF_DECL (l
->loc
)
6374 && DECL_INITIAL (SYMBOL_REF_DECL (l
->loc
)))
6376 initial
= DECL_INITIAL (SYMBOL_REF_DECL (l
->loc
));
6377 if (tree_fits_shwi_p (initial
))
6379 item
= GEN_INT (tree_to_shwi (initial
));
6380 item
= gen_rtx_CONCAT (indmode
, mem
, item
);
6382 = gen_rtx_EXPR_LIST (VOIDmode
, item
,
6389 targetm
.calls
.function_arg_advance (args_so_far
, mode
,
6395 /* Add debug arguments. */
6397 && TREE_CODE (fndecl
) == FUNCTION_DECL
6398 && DECL_HAS_DEBUG_ARGS_P (fndecl
))
6400 vec
<tree
, va_gc
> **debug_args
= decl_debug_args_lookup (fndecl
);
6405 for (ix
= 0; vec_safe_iterate (*debug_args
, ix
, ¶m
); ix
+= 2)
6408 tree dtemp
= (**debug_args
)[ix
+ 1];
6409 enum machine_mode mode
= DECL_MODE (dtemp
);
6410 item
= gen_rtx_DEBUG_PARAMETER_REF (mode
, param
);
6411 item
= gen_rtx_CONCAT (mode
, item
, DECL_RTL_KNOWN_SET (dtemp
));
6412 call_arguments
= gen_rtx_EXPR_LIST (VOIDmode
, item
,
6418 /* Reverse call_arguments chain. */
6420 for (cur
= call_arguments
; cur
; cur
= next
)
6422 next
= XEXP (cur
, 1);
6423 XEXP (cur
, 1) = prev
;
6426 call_arguments
= prev
;
6428 x
= get_call_rtx_from (insn
);
6431 x
= XEXP (XEXP (x
, 0), 0);
6432 if (GET_CODE (x
) == SYMBOL_REF
)
6433 /* Don't record anything. */;
6434 else if (CONSTANT_P (x
))
6436 x
= gen_rtx_CONCAT (GET_MODE (x
) == VOIDmode
? Pmode
: GET_MODE (x
),
6439 = gen_rtx_EXPR_LIST (VOIDmode
, x
, call_arguments
);
6443 cselib_val
*val
= cselib_lookup (x
, GET_MODE (x
), 0, VOIDmode
);
6444 if (val
&& cselib_preserved_value_p (val
))
6446 x
= gen_rtx_CONCAT (GET_MODE (x
), pc_rtx
, val
->val_rtx
);
6448 = gen_rtx_EXPR_LIST (VOIDmode
, x
, call_arguments
);
6454 enum machine_mode mode
6455 = TYPE_MODE (TREE_TYPE (OBJ_TYPE_REF_EXPR (obj_type_ref
)));
6456 rtx clobbered
= gen_rtx_MEM (mode
, this_arg
);
6458 = tree_to_shwi (OBJ_TYPE_REF_TOKEN (obj_type_ref
));
6460 clobbered
= plus_constant (mode
, clobbered
,
6461 token
* GET_MODE_SIZE (mode
));
6462 clobbered
= gen_rtx_MEM (mode
, clobbered
);
6463 x
= gen_rtx_CONCAT (mode
, gen_rtx_CLOBBER (VOIDmode
, pc_rtx
), clobbered
);
6465 = gen_rtx_EXPR_LIST (VOIDmode
, x
, call_arguments
);
6469 /* Callback for cselib_record_sets_hook, that records as micro
6470 operations uses and stores in an insn after cselib_record_sets has
6471 analyzed the sets in an insn, but before it modifies the stored
6472 values in the internal tables, unless cselib_record_sets doesn't
6473 call it directly (perhaps because we're not doing cselib in the
6474 first place, in which case sets and n_sets will be 0). */
6477 add_with_sets (rtx_insn
*insn
, struct cselib_set
*sets
, int n_sets
)
6479 basic_block bb
= BLOCK_FOR_INSN (insn
);
6481 struct count_use_info cui
;
6482 micro_operation
*mos
;
6484 cselib_hook_called
= true;
6489 cui
.n_sets
= n_sets
;
6491 n1
= VTI (bb
)->mos
.length ();
6492 cui
.store_p
= false;
6493 note_uses (&PATTERN (insn
), add_uses_1
, &cui
);
6494 n2
= VTI (bb
)->mos
.length () - 1;
6495 mos
= VTI (bb
)->mos
.address ();
6497 /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
6501 while (n1
< n2
&& mos
[n1
].type
== MO_USE
)
6503 while (n1
< n2
&& mos
[n2
].type
!= MO_USE
)
6515 n2
= VTI (bb
)->mos
.length () - 1;
6518 while (n1
< n2
&& mos
[n1
].type
!= MO_VAL_LOC
)
6520 while (n1
< n2
&& mos
[n2
].type
== MO_VAL_LOC
)
6538 mo
.u
.loc
= call_arguments
;
6539 call_arguments
= NULL_RTX
;
6541 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6542 log_op_type (PATTERN (insn
), bb
, insn
, mo
.type
, dump_file
);
6543 VTI (bb
)->mos
.safe_push (mo
);
6546 n1
= VTI (bb
)->mos
.length ();
6547 /* This will record NEXT_INSN (insn), such that we can
6548 insert notes before it without worrying about any
6549 notes that MO_USEs might emit after the insn. */
6551 note_stores (PATTERN (insn
), add_stores
, &cui
);
6552 n2
= VTI (bb
)->mos
.length () - 1;
6553 mos
= VTI (bb
)->mos
.address ();
6555 /* Order the MO_VAL_USEs first (note_stores does nothing
6556 on DEBUG_INSNs, so there are no MO_VAL_LOCs from this
6557 insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET. */
6560 while (n1
< n2
&& mos
[n1
].type
== MO_VAL_USE
)
6562 while (n1
< n2
&& mos
[n2
].type
!= MO_VAL_USE
)
6574 n2
= VTI (bb
)->mos
.length () - 1;
6577 while (n1
< n2
&& mos
[n1
].type
== MO_CLOBBER
)
6579 while (n1
< n2
&& mos
[n2
].type
!= MO_CLOBBER
)
6592 static enum var_init_status
6593 find_src_status (dataflow_set
*in
, rtx src
)
6595 tree decl
= NULL_TREE
;
6596 enum var_init_status status
= VAR_INIT_STATUS_UNINITIALIZED
;
6598 if (! flag_var_tracking_uninit
)
6599 status
= VAR_INIT_STATUS_INITIALIZED
;
6601 if (src
&& REG_P (src
))
6602 decl
= var_debug_decl (REG_EXPR (src
));
6603 else if (src
&& MEM_P (src
))
6604 decl
= var_debug_decl (MEM_EXPR (src
));
6607 status
= get_init_value (in
, src
, dv_from_decl (decl
));
6612 /* SRC is the source of an assignment. Use SET to try to find what
6613 was ultimately assigned to SRC. Return that value if known,
6614 otherwise return SRC itself. */
6617 find_src_set_src (dataflow_set
*set
, rtx src
)
6619 tree decl
= NULL_TREE
; /* The variable being copied around. */
6620 rtx set_src
= NULL_RTX
; /* The value for "decl" stored in "src". */
6622 location_chain nextp
;
6626 if (src
&& REG_P (src
))
6627 decl
= var_debug_decl (REG_EXPR (src
));
6628 else if (src
&& MEM_P (src
))
6629 decl
= var_debug_decl (MEM_EXPR (src
));
6633 decl_or_value dv
= dv_from_decl (decl
);
6635 var
= shared_hash_find (set
->vars
, dv
);
6639 for (i
= 0; i
< var
->n_var_parts
&& !found
; i
++)
6640 for (nextp
= var
->var_part
[i
].loc_chain
; nextp
&& !found
;
6641 nextp
= nextp
->next
)
6642 if (rtx_equal_p (nextp
->loc
, src
))
6644 set_src
= nextp
->set_src
;
6654 /* Compute the changes of variable locations in the basic block BB. */
6657 compute_bb_dataflow (basic_block bb
)
6660 micro_operation
*mo
;
6662 dataflow_set old_out
;
6663 dataflow_set
*in
= &VTI (bb
)->in
;
6664 dataflow_set
*out
= &VTI (bb
)->out
;
6666 dataflow_set_init (&old_out
);
6667 dataflow_set_copy (&old_out
, out
);
6668 dataflow_set_copy (out
, in
);
6670 if (MAY_HAVE_DEBUG_INSNS
)
6671 local_get_addr_cache
= new hash_map
<rtx
, rtx
>;
6673 FOR_EACH_VEC_ELT (VTI (bb
)->mos
, i
, mo
)
6675 rtx_insn
*insn
= mo
->insn
;
6680 dataflow_set_clear_at_call (out
);
6685 rtx loc
= mo
->u
.loc
;
6688 var_reg_set (out
, loc
, VAR_INIT_STATUS_UNINITIALIZED
, NULL
);
6689 else if (MEM_P (loc
))
6690 var_mem_set (out
, loc
, VAR_INIT_STATUS_UNINITIALIZED
, NULL
);
6696 rtx loc
= mo
->u
.loc
;
6700 if (GET_CODE (loc
) == CONCAT
)
6702 val
= XEXP (loc
, 0);
6703 vloc
= XEXP (loc
, 1);
6711 var
= PAT_VAR_LOCATION_DECL (vloc
);
6713 clobber_variable_part (out
, NULL_RTX
,
6714 dv_from_decl (var
), 0, NULL_RTX
);
6717 if (VAL_NEEDS_RESOLUTION (loc
))
6718 val_resolve (out
, val
, PAT_VAR_LOCATION_LOC (vloc
), insn
);
6719 set_variable_part (out
, val
, dv_from_decl (var
), 0,
6720 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
,
6723 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc
)))
6724 set_variable_part (out
, PAT_VAR_LOCATION_LOC (vloc
),
6725 dv_from_decl (var
), 0,
6726 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
,
6733 rtx loc
= mo
->u
.loc
;
6734 rtx val
, vloc
, uloc
;
6736 vloc
= uloc
= XEXP (loc
, 1);
6737 val
= XEXP (loc
, 0);
6739 if (GET_CODE (val
) == CONCAT
)
6741 uloc
= XEXP (val
, 1);
6742 val
= XEXP (val
, 0);
6745 if (VAL_NEEDS_RESOLUTION (loc
))
6746 val_resolve (out
, val
, vloc
, insn
);
6748 val_store (out
, val
, uloc
, insn
, false);
6750 if (VAL_HOLDS_TRACK_EXPR (loc
))
6752 if (GET_CODE (uloc
) == REG
)
6753 var_reg_set (out
, uloc
, VAR_INIT_STATUS_UNINITIALIZED
,
6755 else if (GET_CODE (uloc
) == MEM
)
6756 var_mem_set (out
, uloc
, VAR_INIT_STATUS_UNINITIALIZED
,
6764 rtx loc
= mo
->u
.loc
;
6765 rtx val
, vloc
, uloc
;
6769 uloc
= XEXP (vloc
, 1);
6770 val
= XEXP (vloc
, 0);
6773 if (GET_CODE (uloc
) == SET
)
6775 dstv
= SET_DEST (uloc
);
6776 srcv
= SET_SRC (uloc
);
6784 if (GET_CODE (val
) == CONCAT
)
6786 dstv
= vloc
= XEXP (val
, 1);
6787 val
= XEXP (val
, 0);
6790 if (GET_CODE (vloc
) == SET
)
6792 srcv
= SET_SRC (vloc
);
6794 gcc_assert (val
!= srcv
);
6795 gcc_assert (vloc
== uloc
|| VAL_NEEDS_RESOLUTION (loc
));
6797 dstv
= vloc
= SET_DEST (vloc
);
6799 if (VAL_NEEDS_RESOLUTION (loc
))
6800 val_resolve (out
, val
, srcv
, insn
);
6802 else if (VAL_NEEDS_RESOLUTION (loc
))
6804 gcc_assert (GET_CODE (uloc
) == SET
6805 && GET_CODE (SET_SRC (uloc
)) == REG
);
6806 val_resolve (out
, val
, SET_SRC (uloc
), insn
);
6809 if (VAL_HOLDS_TRACK_EXPR (loc
))
6811 if (VAL_EXPR_IS_CLOBBERED (loc
))
6814 var_reg_delete (out
, uloc
, true);
6815 else if (MEM_P (uloc
))
6817 gcc_assert (MEM_P (dstv
));
6818 gcc_assert (MEM_ATTRS (dstv
) == MEM_ATTRS (uloc
));
6819 var_mem_delete (out
, dstv
, true);
6824 bool copied_p
= VAL_EXPR_IS_COPIED (loc
);
6825 rtx src
= NULL
, dst
= uloc
;
6826 enum var_init_status status
= VAR_INIT_STATUS_INITIALIZED
;
6828 if (GET_CODE (uloc
) == SET
)
6830 src
= SET_SRC (uloc
);
6831 dst
= SET_DEST (uloc
);
6836 if (flag_var_tracking_uninit
)
6838 status
= find_src_status (in
, src
);
6840 if (status
== VAR_INIT_STATUS_UNKNOWN
)
6841 status
= find_src_status (out
, src
);
6844 src
= find_src_set_src (in
, src
);
6848 var_reg_delete_and_set (out
, dst
, !copied_p
,
6850 else if (MEM_P (dst
))
6852 gcc_assert (MEM_P (dstv
));
6853 gcc_assert (MEM_ATTRS (dstv
) == MEM_ATTRS (dst
));
6854 var_mem_delete_and_set (out
, dstv
, !copied_p
,
6859 else if (REG_P (uloc
))
6860 var_regno_delete (out
, REGNO (uloc
));
6861 else if (MEM_P (uloc
))
6863 gcc_checking_assert (GET_CODE (vloc
) == MEM
);
6864 gcc_checking_assert (dstv
== vloc
);
6866 clobber_overlapping_mems (out
, vloc
);
6869 val_store (out
, val
, dstv
, insn
, true);
6875 rtx loc
= mo
->u
.loc
;
6878 if (GET_CODE (loc
) == SET
)
6880 set_src
= SET_SRC (loc
);
6881 loc
= SET_DEST (loc
);
6885 var_reg_delete_and_set (out
, loc
, true, VAR_INIT_STATUS_INITIALIZED
,
6887 else if (MEM_P (loc
))
6888 var_mem_delete_and_set (out
, loc
, true, VAR_INIT_STATUS_INITIALIZED
,
6895 rtx loc
= mo
->u
.loc
;
6896 enum var_init_status src_status
;
6899 if (GET_CODE (loc
) == SET
)
6901 set_src
= SET_SRC (loc
);
6902 loc
= SET_DEST (loc
);
6905 if (! flag_var_tracking_uninit
)
6906 src_status
= VAR_INIT_STATUS_INITIALIZED
;
6909 src_status
= find_src_status (in
, set_src
);
6911 if (src_status
== VAR_INIT_STATUS_UNKNOWN
)
6912 src_status
= find_src_status (out
, set_src
);
6915 set_src
= find_src_set_src (in
, set_src
);
6918 var_reg_delete_and_set (out
, loc
, false, src_status
, set_src
);
6919 else if (MEM_P (loc
))
6920 var_mem_delete_and_set (out
, loc
, false, src_status
, set_src
);
6926 rtx loc
= mo
->u
.loc
;
6929 var_reg_delete (out
, loc
, false);
6930 else if (MEM_P (loc
))
6931 var_mem_delete (out
, loc
, false);
6937 rtx loc
= mo
->u
.loc
;
6940 var_reg_delete (out
, loc
, true);
6941 else if (MEM_P (loc
))
6942 var_mem_delete (out
, loc
, true);
6947 out
->stack_adjust
+= mo
->u
.adjust
;
6952 if (MAY_HAVE_DEBUG_INSNS
)
6954 delete local_get_addr_cache
;
6955 local_get_addr_cache
= NULL
;
6957 dataflow_set_equiv_regs (out
);
6958 shared_hash_htab (out
->vars
)
6959 ->traverse
<dataflow_set
*, canonicalize_values_mark
> (out
);
6960 shared_hash_htab (out
->vars
)
6961 ->traverse
<dataflow_set
*, canonicalize_values_star
> (out
);
6963 shared_hash_htab (out
->vars
)
6964 ->traverse
<dataflow_set
*, canonicalize_loc_order_check
> (out
);
6967 changed
= dataflow_set_different (&old_out
, out
);
6968 dataflow_set_destroy (&old_out
);
6972 /* Find the locations of variables in the whole function. */
6975 vt_find_locations (void)
6977 fibheap_t worklist
, pending
, fibheap_swap
;
6978 sbitmap visited
, in_worklist
, in_pending
, sbitmap_swap
;
6985 int htabmax
= PARAM_VALUE (PARAM_MAX_VARTRACK_SIZE
);
6986 bool success
= true;
6988 timevar_push (TV_VAR_TRACKING_DATAFLOW
);
6989 /* Compute reverse completion order of depth first search of the CFG
6990 so that the data-flow runs faster. */
6991 rc_order
= XNEWVEC (int, n_basic_blocks_for_fn (cfun
) - NUM_FIXED_BLOCKS
);
6992 bb_order
= XNEWVEC (int, last_basic_block_for_fn (cfun
));
6993 pre_and_rev_post_order_compute (NULL
, rc_order
, false);
6994 for (i
= 0; i
< n_basic_blocks_for_fn (cfun
) - NUM_FIXED_BLOCKS
; i
++)
6995 bb_order
[rc_order
[i
]] = i
;
6998 worklist
= fibheap_new ();
6999 pending
= fibheap_new ();
7000 visited
= sbitmap_alloc (last_basic_block_for_fn (cfun
));
7001 in_worklist
= sbitmap_alloc (last_basic_block_for_fn (cfun
));
7002 in_pending
= sbitmap_alloc (last_basic_block_for_fn (cfun
));
7003 bitmap_clear (in_worklist
);
7005 FOR_EACH_BB_FN (bb
, cfun
)
7006 fibheap_insert (pending
, bb_order
[bb
->index
], bb
);
7007 bitmap_ones (in_pending
);
7009 while (success
&& !fibheap_empty (pending
))
7011 fibheap_swap
= pending
;
7013 worklist
= fibheap_swap
;
7014 sbitmap_swap
= in_pending
;
7015 in_pending
= in_worklist
;
7016 in_worklist
= sbitmap_swap
;
7018 bitmap_clear (visited
);
7020 while (!fibheap_empty (worklist
))
7022 bb
= (basic_block
) fibheap_extract_min (worklist
);
7023 bitmap_clear_bit (in_worklist
, bb
->index
);
7024 gcc_assert (!bitmap_bit_p (visited
, bb
->index
));
7025 if (!bitmap_bit_p (visited
, bb
->index
))
7029 int oldinsz
, oldoutsz
;
7031 bitmap_set_bit (visited
, bb
->index
);
7033 if (VTI (bb
)->in
.vars
)
7036 -= shared_hash_htab (VTI (bb
)->in
.vars
)->size ()
7037 + shared_hash_htab (VTI (bb
)->out
.vars
)->size ();
7038 oldinsz
= shared_hash_htab (VTI (bb
)->in
.vars
)->elements ();
7040 = shared_hash_htab (VTI (bb
)->out
.vars
)->elements ();
7043 oldinsz
= oldoutsz
= 0;
7045 if (MAY_HAVE_DEBUG_INSNS
)
7047 dataflow_set
*in
= &VTI (bb
)->in
, *first_out
= NULL
;
7048 bool first
= true, adjust
= false;
7050 /* Calculate the IN set as the intersection of
7051 predecessor OUT sets. */
7053 dataflow_set_clear (in
);
7054 dst_can_be_shared
= true;
7056 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
7057 if (!VTI (e
->src
)->flooded
)
7058 gcc_assert (bb_order
[bb
->index
]
7059 <= bb_order
[e
->src
->index
]);
7062 dataflow_set_copy (in
, &VTI (e
->src
)->out
);
7063 first_out
= &VTI (e
->src
)->out
;
7068 dataflow_set_merge (in
, &VTI (e
->src
)->out
);
7074 dataflow_post_merge_adjust (in
, &VTI (bb
)->permp
);
7076 /* Merge and merge_adjust should keep entries in
7078 shared_hash_htab (in
->vars
)
7079 ->traverse
<dataflow_set
*,
7080 canonicalize_loc_order_check
> (in
);
7082 if (dst_can_be_shared
)
7084 shared_hash_destroy (in
->vars
);
7085 in
->vars
= shared_hash_copy (first_out
->vars
);
7089 VTI (bb
)->flooded
= true;
7093 /* Calculate the IN set as union of predecessor OUT sets. */
7094 dataflow_set_clear (&VTI (bb
)->in
);
7095 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
7096 dataflow_set_union (&VTI (bb
)->in
, &VTI (e
->src
)->out
);
7099 changed
= compute_bb_dataflow (bb
);
7100 htabsz
+= shared_hash_htab (VTI (bb
)->in
.vars
)->size ()
7101 + shared_hash_htab (VTI (bb
)->out
.vars
)->size ();
7103 if (htabmax
&& htabsz
> htabmax
)
7105 if (MAY_HAVE_DEBUG_INSNS
)
7106 inform (DECL_SOURCE_LOCATION (cfun
->decl
),
7107 "variable tracking size limit exceeded with "
7108 "-fvar-tracking-assignments, retrying without");
7110 inform (DECL_SOURCE_LOCATION (cfun
->decl
),
7111 "variable tracking size limit exceeded");
7118 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7120 if (e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
7123 if (bitmap_bit_p (visited
, e
->dest
->index
))
7125 if (!bitmap_bit_p (in_pending
, e
->dest
->index
))
7127 /* Send E->DEST to next round. */
7128 bitmap_set_bit (in_pending
, e
->dest
->index
);
7129 fibheap_insert (pending
,
7130 bb_order
[e
->dest
->index
],
7134 else if (!bitmap_bit_p (in_worklist
, e
->dest
->index
))
7136 /* Add E->DEST to current round. */
7137 bitmap_set_bit (in_worklist
, e
->dest
->index
);
7138 fibheap_insert (worklist
, bb_order
[e
->dest
->index
],
7146 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
7148 (int)shared_hash_htab (VTI (bb
)->in
.vars
)->size (),
7150 (int)shared_hash_htab (VTI (bb
)->out
.vars
)->size (),
7152 (int)worklist
->nodes
, (int)pending
->nodes
, htabsz
);
7154 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7156 fprintf (dump_file
, "BB %i IN:\n", bb
->index
);
7157 dump_dataflow_set (&VTI (bb
)->in
);
7158 fprintf (dump_file
, "BB %i OUT:\n", bb
->index
);
7159 dump_dataflow_set (&VTI (bb
)->out
);
7165 if (success
&& MAY_HAVE_DEBUG_INSNS
)
7166 FOR_EACH_BB_FN (bb
, cfun
)
7167 gcc_assert (VTI (bb
)->flooded
);
7170 fibheap_delete (worklist
);
7171 fibheap_delete (pending
);
7172 sbitmap_free (visited
);
7173 sbitmap_free (in_worklist
);
7174 sbitmap_free (in_pending
);
7176 timevar_pop (TV_VAR_TRACKING_DATAFLOW
);
7180 /* Print the content of the LIST to dump file. */
7183 dump_attrs_list (attrs list
)
7185 for (; list
; list
= list
->next
)
7187 if (dv_is_decl_p (list
->dv
))
7188 print_mem_expr (dump_file
, dv_as_decl (list
->dv
));
7190 print_rtl_single (dump_file
, dv_as_value (list
->dv
));
7191 fprintf (dump_file
, "+" HOST_WIDE_INT_PRINT_DEC
, list
->offset
);
7193 fprintf (dump_file
, "\n");
7196 /* Print the information about variable *SLOT to dump file. */
7199 dump_var_tracking_slot (variable_def
**slot
, void *data ATTRIBUTE_UNUSED
)
7201 variable var
= *slot
;
7205 /* Continue traversing the hash table. */
7209 /* Print the information about variable VAR to dump file. */
7212 dump_var (variable var
)
7215 location_chain node
;
7217 if (dv_is_decl_p (var
->dv
))
7219 const_tree decl
= dv_as_decl (var
->dv
);
7221 if (DECL_NAME (decl
))
7223 fprintf (dump_file
, " name: %s",
7224 IDENTIFIER_POINTER (DECL_NAME (decl
)));
7225 if (dump_flags
& TDF_UID
)
7226 fprintf (dump_file
, "D.%u", DECL_UID (decl
));
7228 else if (TREE_CODE (decl
) == DEBUG_EXPR_DECL
)
7229 fprintf (dump_file
, " name: D#%u", DEBUG_TEMP_UID (decl
));
7231 fprintf (dump_file
, " name: D.%u", DECL_UID (decl
));
7232 fprintf (dump_file
, "\n");
7236 fputc (' ', dump_file
);
7237 print_rtl_single (dump_file
, dv_as_value (var
->dv
));
7240 for (i
= 0; i
< var
->n_var_parts
; i
++)
7242 fprintf (dump_file
, " offset %ld\n",
7243 (long)(var
->onepart
? 0 : VAR_PART_OFFSET (var
, i
)));
7244 for (node
= var
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
7246 fprintf (dump_file
, " ");
7247 if (node
->init
== VAR_INIT_STATUS_UNINITIALIZED
)
7248 fprintf (dump_file
, "[uninit]");
7249 print_rtl_single (dump_file
, node
->loc
);
7254 /* Print the information about variables from hash table VARS to dump file. */
7257 dump_vars (variable_table_type
*vars
)
7259 if (vars
->elements () > 0)
7261 fprintf (dump_file
, "Variables:\n");
7262 vars
->traverse
<void *, dump_var_tracking_slot
> (NULL
);
7266 /* Print the dataflow set SET to dump file. */
7269 dump_dataflow_set (dataflow_set
*set
)
7273 fprintf (dump_file
, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC
"\n",
7275 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
7279 fprintf (dump_file
, "Reg %d:", i
);
7280 dump_attrs_list (set
->regs
[i
]);
7283 dump_vars (shared_hash_htab (set
->vars
));
7284 fprintf (dump_file
, "\n");
7287 /* Print the IN and OUT sets for each basic block to dump file. */
7290 dump_dataflow_sets (void)
7294 FOR_EACH_BB_FN (bb
, cfun
)
7296 fprintf (dump_file
, "\nBasic block %d:\n", bb
->index
);
7297 fprintf (dump_file
, "IN:\n");
7298 dump_dataflow_set (&VTI (bb
)->in
);
7299 fprintf (dump_file
, "OUT:\n");
7300 dump_dataflow_set (&VTI (bb
)->out
);
7304 /* Return the variable for DV in dropped_values, inserting one if
7305 requested with INSERT. */
7307 static inline variable
7308 variable_from_dropped (decl_or_value dv
, enum insert_option insert
)
7310 variable_def
**slot
;
7312 onepart_enum_t onepart
;
7314 slot
= dropped_values
->find_slot_with_hash (dv
, dv_htab_hash (dv
), insert
);
7322 gcc_checking_assert (insert
== INSERT
);
7324 onepart
= dv_onepart_p (dv
);
7326 gcc_checking_assert (onepart
== ONEPART_VALUE
|| onepart
== ONEPART_DEXPR
);
7328 empty_var
= (variable
) pool_alloc (onepart_pool (onepart
));
7330 empty_var
->refcount
= 1;
7331 empty_var
->n_var_parts
= 0;
7332 empty_var
->onepart
= onepart
;
7333 empty_var
->in_changed_variables
= false;
7334 empty_var
->var_part
[0].loc_chain
= NULL
;
7335 empty_var
->var_part
[0].cur_loc
= NULL
;
7336 VAR_LOC_1PAUX (empty_var
) = NULL
;
7337 set_dv_changed (dv
, true);
7344 /* Recover the one-part aux from dropped_values. */
7346 static struct onepart_aux
*
7347 recover_dropped_1paux (variable var
)
7351 gcc_checking_assert (var
->onepart
);
7353 if (VAR_LOC_1PAUX (var
))
7354 return VAR_LOC_1PAUX (var
);
7356 if (var
->onepart
== ONEPART_VDECL
)
7359 dvar
= variable_from_dropped (var
->dv
, NO_INSERT
);
7364 VAR_LOC_1PAUX (var
) = VAR_LOC_1PAUX (dvar
);
7365 VAR_LOC_1PAUX (dvar
) = NULL
;
7367 return VAR_LOC_1PAUX (var
);
7370 /* Add variable VAR to the hash table of changed variables and
7371 if it has no locations delete it from SET's hash table. */
7374 variable_was_changed (variable var
, dataflow_set
*set
)
7376 hashval_t hash
= dv_htab_hash (var
->dv
);
7380 variable_def
**slot
;
7382 /* Remember this decl or VALUE has been added to changed_variables. */
7383 set_dv_changed (var
->dv
, true);
7385 slot
= changed_variables
->find_slot_with_hash (var
->dv
, hash
, INSERT
);
7389 variable old_var
= *slot
;
7390 gcc_assert (old_var
->in_changed_variables
);
7391 old_var
->in_changed_variables
= false;
7392 if (var
!= old_var
&& var
->onepart
)
7394 /* Restore the auxiliary info from an empty variable
7395 previously created for changed_variables, so it is
7397 gcc_checking_assert (!VAR_LOC_1PAUX (var
));
7398 VAR_LOC_1PAUX (var
) = VAR_LOC_1PAUX (old_var
);
7399 VAR_LOC_1PAUX (old_var
) = NULL
;
7401 variable_htab_free (*slot
);
7404 if (set
&& var
->n_var_parts
== 0)
7406 onepart_enum_t onepart
= var
->onepart
;
7407 variable empty_var
= NULL
;
7408 variable_def
**dslot
= NULL
;
7410 if (onepart
== ONEPART_VALUE
|| onepart
== ONEPART_DEXPR
)
7412 dslot
= dropped_values
->find_slot_with_hash (var
->dv
,
7413 dv_htab_hash (var
->dv
),
7419 gcc_checking_assert (!empty_var
->in_changed_variables
);
7420 if (!VAR_LOC_1PAUX (var
))
7422 VAR_LOC_1PAUX (var
) = VAR_LOC_1PAUX (empty_var
);
7423 VAR_LOC_1PAUX (empty_var
) = NULL
;
7426 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var
));
7432 empty_var
= (variable
) pool_alloc (onepart_pool (onepart
));
7433 empty_var
->dv
= var
->dv
;
7434 empty_var
->refcount
= 1;
7435 empty_var
->n_var_parts
= 0;
7436 empty_var
->onepart
= onepart
;
7439 empty_var
->refcount
++;
7444 empty_var
->refcount
++;
7445 empty_var
->in_changed_variables
= true;
7449 empty_var
->var_part
[0].loc_chain
= NULL
;
7450 empty_var
->var_part
[0].cur_loc
= NULL
;
7451 VAR_LOC_1PAUX (empty_var
) = VAR_LOC_1PAUX (var
);
7452 VAR_LOC_1PAUX (var
) = NULL
;
7458 if (var
->onepart
&& !VAR_LOC_1PAUX (var
))
7459 recover_dropped_1paux (var
);
7461 var
->in_changed_variables
= true;
7468 if (var
->n_var_parts
== 0)
7470 variable_def
**slot
;
7473 slot
= shared_hash_find_slot_noinsert (set
->vars
, var
->dv
);
7476 if (shared_hash_shared (set
->vars
))
7477 slot
= shared_hash_find_slot_unshare (&set
->vars
, var
->dv
,
7479 shared_hash_htab (set
->vars
)->clear_slot (slot
);
7485 /* Look for the index in VAR->var_part corresponding to OFFSET.
7486 Return -1 if not found. If INSERTION_POINT is non-NULL, the
7487 referenced int will be set to the index that the part has or should
7488 have, if it should be inserted. */
7491 find_variable_location_part (variable var
, HOST_WIDE_INT offset
,
7492 int *insertion_point
)
7501 if (insertion_point
)
7502 *insertion_point
= 0;
7504 return var
->n_var_parts
- 1;
7507 /* Find the location part. */
7509 high
= var
->n_var_parts
;
7512 pos
= (low
+ high
) / 2;
7513 if (VAR_PART_OFFSET (var
, pos
) < offset
)
7520 if (insertion_point
)
7521 *insertion_point
= pos
;
7523 if (pos
< var
->n_var_parts
&& VAR_PART_OFFSET (var
, pos
) == offset
)
7529 static variable_def
**
7530 set_slot_part (dataflow_set
*set
, rtx loc
, variable_def
**slot
,
7531 decl_or_value dv
, HOST_WIDE_INT offset
,
7532 enum var_init_status initialized
, rtx set_src
)
7535 location_chain node
, next
;
7536 location_chain
*nextp
;
7538 onepart_enum_t onepart
;
7543 onepart
= var
->onepart
;
7545 onepart
= dv_onepart_p (dv
);
7547 gcc_checking_assert (offset
== 0 || !onepart
);
7548 gcc_checking_assert (loc
!= dv_as_opaque (dv
));
7550 if (! flag_var_tracking_uninit
)
7551 initialized
= VAR_INIT_STATUS_INITIALIZED
;
7555 /* Create new variable information. */
7556 var
= (variable
) pool_alloc (onepart_pool (onepart
));
7559 var
->n_var_parts
= 1;
7560 var
->onepart
= onepart
;
7561 var
->in_changed_variables
= false;
7563 VAR_LOC_1PAUX (var
) = NULL
;
7565 VAR_PART_OFFSET (var
, 0) = offset
;
7566 var
->var_part
[0].loc_chain
= NULL
;
7567 var
->var_part
[0].cur_loc
= NULL
;
7570 nextp
= &var
->var_part
[0].loc_chain
;
7576 gcc_assert (dv_as_opaque (var
->dv
) == dv_as_opaque (dv
));
7580 if (GET_CODE (loc
) == VALUE
)
7582 for (nextp
= &var
->var_part
[0].loc_chain
; (node
= *nextp
);
7583 nextp
= &node
->next
)
7584 if (GET_CODE (node
->loc
) == VALUE
)
7586 if (node
->loc
== loc
)
7591 if (canon_value_cmp (node
->loc
, loc
))
7599 else if (REG_P (node
->loc
) || MEM_P (node
->loc
))
7607 else if (REG_P (loc
))
7609 for (nextp
= &var
->var_part
[0].loc_chain
; (node
= *nextp
);
7610 nextp
= &node
->next
)
7611 if (REG_P (node
->loc
))
7613 if (REGNO (node
->loc
) < REGNO (loc
))
7617 if (REGNO (node
->loc
) == REGNO (loc
))
7630 else if (MEM_P (loc
))
7632 for (nextp
= &var
->var_part
[0].loc_chain
; (node
= *nextp
);
7633 nextp
= &node
->next
)
7634 if (REG_P (node
->loc
))
7636 else if (MEM_P (node
->loc
))
7638 if ((r
= loc_cmp (XEXP (node
->loc
, 0), XEXP (loc
, 0))) >= 0)
7650 for (nextp
= &var
->var_part
[0].loc_chain
; (node
= *nextp
);
7651 nextp
= &node
->next
)
7652 if ((r
= loc_cmp (node
->loc
, loc
)) >= 0)
7660 if (shared_var_p (var
, set
->vars
))
7662 slot
= unshare_variable (set
, slot
, var
, initialized
);
7664 for (nextp
= &var
->var_part
[0].loc_chain
; c
;
7665 nextp
= &(*nextp
)->next
)
7667 gcc_assert ((!node
&& !*nextp
) || node
->loc
== (*nextp
)->loc
);
7674 gcc_assert (dv_as_decl (var
->dv
) == dv_as_decl (dv
));
7676 pos
= find_variable_location_part (var
, offset
, &inspos
);
7680 node
= var
->var_part
[pos
].loc_chain
;
7683 && ((REG_P (node
->loc
) && REG_P (loc
)
7684 && REGNO (node
->loc
) == REGNO (loc
))
7685 || rtx_equal_p (node
->loc
, loc
)))
7687 /* LOC is in the beginning of the chain so we have nothing
7689 if (node
->init
< initialized
)
7690 node
->init
= initialized
;
7691 if (set_src
!= NULL
)
7692 node
->set_src
= set_src
;
7698 /* We have to make a copy of a shared variable. */
7699 if (shared_var_p (var
, set
->vars
))
7701 slot
= unshare_variable (set
, slot
, var
, initialized
);
7708 /* We have not found the location part, new one will be created. */
7710 /* We have to make a copy of the shared variable. */
7711 if (shared_var_p (var
, set
->vars
))
7713 slot
= unshare_variable (set
, slot
, var
, initialized
);
7717 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
7718 thus there are at most MAX_VAR_PARTS different offsets. */
7719 gcc_assert (var
->n_var_parts
< MAX_VAR_PARTS
7720 && (!var
->n_var_parts
|| !onepart
));
7722 /* We have to move the elements of array starting at index
7723 inspos to the next position. */
7724 for (pos
= var
->n_var_parts
; pos
> inspos
; pos
--)
7725 var
->var_part
[pos
] = var
->var_part
[pos
- 1];
7728 gcc_checking_assert (!onepart
);
7729 VAR_PART_OFFSET (var
, pos
) = offset
;
7730 var
->var_part
[pos
].loc_chain
= NULL
;
7731 var
->var_part
[pos
].cur_loc
= NULL
;
7734 /* Delete the location from the list. */
7735 nextp
= &var
->var_part
[pos
].loc_chain
;
7736 for (node
= var
->var_part
[pos
].loc_chain
; node
; node
= next
)
7739 if ((REG_P (node
->loc
) && REG_P (loc
)
7740 && REGNO (node
->loc
) == REGNO (loc
))
7741 || rtx_equal_p (node
->loc
, loc
))
7743 /* Save these values, to assign to the new node, before
7744 deleting this one. */
7745 if (node
->init
> initialized
)
7746 initialized
= node
->init
;
7747 if (node
->set_src
!= NULL
&& set_src
== NULL
)
7748 set_src
= node
->set_src
;
7749 if (var
->var_part
[pos
].cur_loc
== node
->loc
)
7750 var
->var_part
[pos
].cur_loc
= NULL
;
7751 pool_free (loc_chain_pool
, node
);
7756 nextp
= &node
->next
;
7759 nextp
= &var
->var_part
[pos
].loc_chain
;
7762 /* Add the location to the beginning. */
7763 node
= (location_chain
) pool_alloc (loc_chain_pool
);
7765 node
->init
= initialized
;
7766 node
->set_src
= set_src
;
7767 node
->next
= *nextp
;
7770 /* If no location was emitted do so. */
7771 if (var
->var_part
[pos
].cur_loc
== NULL
)
7772 variable_was_changed (var
, set
);
7777 /* Set the part of variable's location in the dataflow set SET. The
7778 variable part is specified by variable's declaration in DV and
7779 offset OFFSET and the part's location by LOC. IOPT should be
7780 NO_INSERT if the variable is known to be in SET already and the
7781 variable hash table must not be resized, and INSERT otherwise. */
7784 set_variable_part (dataflow_set
*set
, rtx loc
,
7785 decl_or_value dv
, HOST_WIDE_INT offset
,
7786 enum var_init_status initialized
, rtx set_src
,
7787 enum insert_option iopt
)
7789 variable_def
**slot
;
7791 if (iopt
== NO_INSERT
)
7792 slot
= shared_hash_find_slot_noinsert (set
->vars
, dv
);
7795 slot
= shared_hash_find_slot (set
->vars
, dv
);
7797 slot
= shared_hash_find_slot_unshare (&set
->vars
, dv
, iopt
);
7799 set_slot_part (set
, loc
, slot
, dv
, offset
, initialized
, set_src
);
7802 /* Remove all recorded register locations for the given variable part
7803 from dataflow set SET, except for those that are identical to loc.
7804 The variable part is specified by variable's declaration or value
7805 DV and offset OFFSET. */
7807 static variable_def
**
7808 clobber_slot_part (dataflow_set
*set
, rtx loc
, variable_def
**slot
,
7809 HOST_WIDE_INT offset
, rtx set_src
)
7811 variable var
= *slot
;
7812 int pos
= find_variable_location_part (var
, offset
, NULL
);
7816 location_chain node
, next
;
7818 /* Remove the register locations from the dataflow set. */
7819 next
= var
->var_part
[pos
].loc_chain
;
7820 for (node
= next
; node
; node
= next
)
7823 if (node
->loc
!= loc
7824 && (!flag_var_tracking_uninit
7827 || !rtx_equal_p (set_src
, node
->set_src
)))
7829 if (REG_P (node
->loc
))
7834 /* Remove the variable part from the register's
7835 list, but preserve any other variable parts
7836 that might be regarded as live in that same
7838 anextp
= &set
->regs
[REGNO (node
->loc
)];
7839 for (anode
= *anextp
; anode
; anode
= anext
)
7841 anext
= anode
->next
;
7842 if (dv_as_opaque (anode
->dv
) == dv_as_opaque (var
->dv
)
7843 && anode
->offset
== offset
)
7845 pool_free (attrs_pool
, anode
);
7849 anextp
= &anode
->next
;
7853 slot
= delete_slot_part (set
, node
->loc
, slot
, offset
);
7861 /* Remove all recorded register locations for the given variable part
7862 from dataflow set SET, except for those that are identical to loc.
7863 The variable part is specified by variable's declaration or value
7864 DV and offset OFFSET. */
7867 clobber_variable_part (dataflow_set
*set
, rtx loc
, decl_or_value dv
,
7868 HOST_WIDE_INT offset
, rtx set_src
)
7870 variable_def
**slot
;
7872 if (!dv_as_opaque (dv
)
7873 || (!dv_is_value_p (dv
) && ! DECL_P (dv_as_decl (dv
))))
7876 slot
= shared_hash_find_slot_noinsert (set
->vars
, dv
);
7880 clobber_slot_part (set
, loc
, slot
, offset
, set_src
);
7883 /* Delete the part of variable's location from dataflow set SET. The
7884 variable part is specified by its SET->vars slot SLOT and offset
7885 OFFSET and the part's location by LOC. */
7887 static variable_def
**
7888 delete_slot_part (dataflow_set
*set
, rtx loc
, variable_def
**slot
,
7889 HOST_WIDE_INT offset
)
7891 variable var
= *slot
;
7892 int pos
= find_variable_location_part (var
, offset
, NULL
);
7896 location_chain node
, next
;
7897 location_chain
*nextp
;
7901 if (shared_var_p (var
, set
->vars
))
7903 /* If the variable contains the location part we have to
7904 make a copy of the variable. */
7905 for (node
= var
->var_part
[pos
].loc_chain
; node
;
7908 if ((REG_P (node
->loc
) && REG_P (loc
)
7909 && REGNO (node
->loc
) == REGNO (loc
))
7910 || rtx_equal_p (node
->loc
, loc
))
7912 slot
= unshare_variable (set
, slot
, var
,
7913 VAR_INIT_STATUS_UNKNOWN
);
7920 if (pos
== 0 && var
->onepart
&& VAR_LOC_1PAUX (var
))
7921 cur_loc
= VAR_LOC_FROM (var
);
7923 cur_loc
= var
->var_part
[pos
].cur_loc
;
7925 /* Delete the location part. */
7927 nextp
= &var
->var_part
[pos
].loc_chain
;
7928 for (node
= *nextp
; node
; node
= next
)
7931 if ((REG_P (node
->loc
) && REG_P (loc
)
7932 && REGNO (node
->loc
) == REGNO (loc
))
7933 || rtx_equal_p (node
->loc
, loc
))
7935 /* If we have deleted the location which was last emitted
7936 we have to emit new location so add the variable to set
7937 of changed variables. */
7938 if (cur_loc
== node
->loc
)
7941 var
->var_part
[pos
].cur_loc
= NULL
;
7942 if (pos
== 0 && var
->onepart
&& VAR_LOC_1PAUX (var
))
7943 VAR_LOC_FROM (var
) = NULL
;
7945 pool_free (loc_chain_pool
, node
);
7950 nextp
= &node
->next
;
7953 if (var
->var_part
[pos
].loc_chain
== NULL
)
7957 while (pos
< var
->n_var_parts
)
7959 var
->var_part
[pos
] = var
->var_part
[pos
+ 1];
7964 variable_was_changed (var
, set
);
7970 /* Delete the part of variable's location from dataflow set SET. The
7971 variable part is specified by variable's declaration or value DV
7972 and offset OFFSET and the part's location by LOC. */
7975 delete_variable_part (dataflow_set
*set
, rtx loc
, decl_or_value dv
,
7976 HOST_WIDE_INT offset
)
7978 variable_def
**slot
= shared_hash_find_slot_noinsert (set
->vars
, dv
);
7982 delete_slot_part (set
, loc
, slot
, offset
);
7986 /* Structure for passing some other parameters to function
7987 vt_expand_loc_callback. */
7988 struct expand_loc_callback_data
7990 /* The variables and values active at this point. */
7991 variable_table_type
*vars
;
7993 /* Stack of values and debug_exprs under expansion, and their
7995 auto_vec
<rtx
, 4> expanding
;
7997 /* Stack of values and debug_exprs whose expansion hit recursion
7998 cycles. They will have VALUE_RECURSED_INTO marked when added to
7999 this list. This flag will be cleared if any of its dependencies
8000 resolves to a valid location. So, if the flag remains set at the
8001 end of the search, we know no valid location for this one can
8003 auto_vec
<rtx
, 4> pending
;
8005 /* The maximum depth among the sub-expressions under expansion.
8006 Zero indicates no expansion so far. */
8010 /* Allocate the one-part auxiliary data structure for VAR, with enough
8011 room for COUNT dependencies. */
8014 loc_exp_dep_alloc (variable var
, int count
)
8018 gcc_checking_assert (var
->onepart
);
8020 /* We can be called with COUNT == 0 to allocate the data structure
8021 without any dependencies, e.g. for the backlinks only. However,
8022 if we are specifying a COUNT, then the dependency list must have
8023 been emptied before. It would be possible to adjust pointers or
8024 force it empty here, but this is better done at an earlier point
8025 in the algorithm, so we instead leave an assertion to catch
8027 gcc_checking_assert (!count
8028 || VAR_LOC_DEP_VEC (var
) == NULL
8029 || VAR_LOC_DEP_VEC (var
)->is_empty ());
8031 if (VAR_LOC_1PAUX (var
) && VAR_LOC_DEP_VEC (var
)->space (count
))
8034 allocsize
= offsetof (struct onepart_aux
, deps
)
8035 + vec
<loc_exp_dep
, va_heap
, vl_embed
>::embedded_size (count
);
8037 if (VAR_LOC_1PAUX (var
))
8039 VAR_LOC_1PAUX (var
) = XRESIZEVAR (struct onepart_aux
,
8040 VAR_LOC_1PAUX (var
), allocsize
);
8041 /* If the reallocation moves the onepaux structure, the
8042 back-pointer to BACKLINKS in the first list member will still
8043 point to its old location. Adjust it. */
8044 if (VAR_LOC_DEP_LST (var
))
8045 VAR_LOC_DEP_LST (var
)->pprev
= VAR_LOC_DEP_LSTP (var
);
8049 VAR_LOC_1PAUX (var
) = XNEWVAR (struct onepart_aux
, allocsize
);
8050 *VAR_LOC_DEP_LSTP (var
) = NULL
;
8051 VAR_LOC_FROM (var
) = NULL
;
8052 VAR_LOC_DEPTH (var
).complexity
= 0;
8053 VAR_LOC_DEPTH (var
).entryvals
= 0;
8055 VAR_LOC_DEP_VEC (var
)->embedded_init (count
);
8058 /* Remove all entries from the vector of active dependencies of VAR,
8059 removing them from the back-links lists too. */
8062 loc_exp_dep_clear (variable var
)
8064 while (VAR_LOC_DEP_VEC (var
) && !VAR_LOC_DEP_VEC (var
)->is_empty ())
8066 loc_exp_dep
*led
= &VAR_LOC_DEP_VEC (var
)->last ();
8068 led
->next
->pprev
= led
->pprev
;
8070 *led
->pprev
= led
->next
;
8071 VAR_LOC_DEP_VEC (var
)->pop ();
8075 /* Insert an active dependency from VAR on X to the vector of
8076 dependencies, and add the corresponding back-link to X's list of
8077 back-links in VARS. */
8080 loc_exp_insert_dep (variable var
, rtx x
, variable_table_type
*vars
)
8086 dv
= dv_from_rtx (x
);
8088 /* ??? Build a vector of variables parallel to EXPANDING, to avoid
8089 an additional look up? */
8090 xvar
= vars
->find_with_hash (dv
, dv_htab_hash (dv
));
8094 xvar
= variable_from_dropped (dv
, NO_INSERT
);
8095 gcc_checking_assert (xvar
);
8098 /* No point in adding the same backlink more than once. This may
8099 arise if say the same value appears in two complex expressions in
8100 the same loc_list, or even more than once in a single
8102 if (VAR_LOC_DEP_LST (xvar
) && VAR_LOC_DEP_LST (xvar
)->dv
== var
->dv
)
8105 if (var
->onepart
== NOT_ONEPART
)
8106 led
= (loc_exp_dep
*) pool_alloc (loc_exp_dep_pool
);
8110 memset (&empty
, 0, sizeof (empty
));
8111 VAR_LOC_DEP_VEC (var
)->quick_push (empty
);
8112 led
= &VAR_LOC_DEP_VEC (var
)->last ();
8117 loc_exp_dep_alloc (xvar
, 0);
8118 led
->pprev
= VAR_LOC_DEP_LSTP (xvar
);
8119 led
->next
= *led
->pprev
;
8121 led
->next
->pprev
= &led
->next
;
8125 /* Create active dependencies of VAR on COUNT values starting at
8126 VALUE, and corresponding back-links to the entries in VARS. Return
8127 true if we found any pending-recursion results. */
8130 loc_exp_dep_set (variable var
, rtx result
, rtx
*value
, int count
,
8131 variable_table_type
*vars
)
8133 bool pending_recursion
= false;
8135 gcc_checking_assert (VAR_LOC_DEP_VEC (var
) == NULL
8136 || VAR_LOC_DEP_VEC (var
)->is_empty ());
8138 /* Set up all dependencies from last_child (as set up at the end of
8139 the loop above) to the end. */
8140 loc_exp_dep_alloc (var
, count
);
8146 if (!pending_recursion
)
8147 pending_recursion
= !result
&& VALUE_RECURSED_INTO (x
);
8149 loc_exp_insert_dep (var
, x
, vars
);
8152 return pending_recursion
;
8155 /* Notify the back-links of IVAR that are pending recursion that we
8156 have found a non-NIL value for it, so they are cleared for another
8157 attempt to compute a current location. */
8160 notify_dependents_of_resolved_value (variable ivar
, variable_table_type
*vars
)
8162 loc_exp_dep
*led
, *next
;
8164 for (led
= VAR_LOC_DEP_LST (ivar
); led
; led
= next
)
8166 decl_or_value dv
= led
->dv
;
8171 if (dv_is_value_p (dv
))
8173 rtx value
= dv_as_value (dv
);
8175 /* If we have already resolved it, leave it alone. */
8176 if (!VALUE_RECURSED_INTO (value
))
8179 /* Check that VALUE_RECURSED_INTO, true from the test above,
8180 implies NO_LOC_P. */
8181 gcc_checking_assert (NO_LOC_P (value
));
8183 /* We won't notify variables that are being expanded,
8184 because their dependency list is cleared before
8186 NO_LOC_P (value
) = false;
8187 VALUE_RECURSED_INTO (value
) = false;
8189 gcc_checking_assert (dv_changed_p (dv
));
8193 gcc_checking_assert (dv_onepart_p (dv
) != NOT_ONEPART
);
8194 if (!dv_changed_p (dv
))
8198 var
= vars
->find_with_hash (dv
, dv_htab_hash (dv
));
8201 var
= variable_from_dropped (dv
, NO_INSERT
);
8204 notify_dependents_of_resolved_value (var
, vars
);
8207 next
->pprev
= led
->pprev
;
8215 static rtx
vt_expand_loc_callback (rtx x
, bitmap regs
,
8216 int max_depth
, void *data
);
8218 /* Return the combined depth, when one sub-expression evaluated to
8219 BEST_DEPTH and the previous known depth was SAVED_DEPTH. */
8221 static inline expand_depth
8222 update_depth (expand_depth saved_depth
, expand_depth best_depth
)
8224 /* If we didn't find anything, stick with what we had. */
8225 if (!best_depth
.complexity
)
8228 /* If we found hadn't found anything, use the depth of the current
8229 expression. Do NOT add one extra level, we want to compute the
8230 maximum depth among sub-expressions. We'll increment it later,
8232 if (!saved_depth
.complexity
)
8235 /* Combine the entryval count so that regardless of which one we
8236 return, the entryval count is accurate. */
8237 best_depth
.entryvals
= saved_depth
.entryvals
8238 = best_depth
.entryvals
+ saved_depth
.entryvals
;
8240 if (saved_depth
.complexity
< best_depth
.complexity
)
8246 /* Expand VAR to a location RTX, updating its cur_loc. Use REGS and
8247 DATA for cselib expand callback. If PENDRECP is given, indicate in
8248 it whether any sub-expression couldn't be fully evaluated because
8249 it is pending recursion resolution. */
8252 vt_expand_var_loc_chain (variable var
, bitmap regs
, void *data
, bool *pendrecp
)
8254 struct expand_loc_callback_data
*elcd
8255 = (struct expand_loc_callback_data
*) data
;
8256 location_chain loc
, next
;
8258 int first_child
, result_first_child
, last_child
;
8259 bool pending_recursion
;
8260 rtx loc_from
= NULL
;
8261 struct elt_loc_list
*cloc
= NULL
;
8262 expand_depth depth
= { 0, 0 }, saved_depth
= elcd
->depth
;
8263 int wanted_entryvals
, found_entryvals
= 0;
8265 /* Clear all backlinks pointing at this, so that we're not notified
8266 while we're active. */
8267 loc_exp_dep_clear (var
);
8270 if (var
->onepart
== ONEPART_VALUE
)
8272 cselib_val
*val
= CSELIB_VAL_PTR (dv_as_value (var
->dv
));
8274 gcc_checking_assert (cselib_preserved_value_p (val
));
8279 first_child
= result_first_child
= last_child
8280 = elcd
->expanding
.length ();
8282 wanted_entryvals
= found_entryvals
;
8284 /* Attempt to expand each available location in turn. */
8285 for (next
= loc
= var
->n_var_parts
? var
->var_part
[0].loc_chain
: NULL
;
8286 loc
|| cloc
; loc
= next
)
8288 result_first_child
= last_child
;
8292 loc_from
= cloc
->loc
;
8295 if (unsuitable_loc (loc_from
))
8300 loc_from
= loc
->loc
;
8304 gcc_checking_assert (!unsuitable_loc (loc_from
));
8306 elcd
->depth
.complexity
= elcd
->depth
.entryvals
= 0;
8307 result
= cselib_expand_value_rtx_cb (loc_from
, regs
, EXPR_DEPTH
,
8308 vt_expand_loc_callback
, data
);
8309 last_child
= elcd
->expanding
.length ();
8313 depth
= elcd
->depth
;
8315 gcc_checking_assert (depth
.complexity
8316 || result_first_child
== last_child
);
8318 if (last_child
- result_first_child
!= 1)
8320 if (!depth
.complexity
&& GET_CODE (result
) == ENTRY_VALUE
)
8325 if (depth
.complexity
<= EXPR_USE_DEPTH
)
8327 if (depth
.entryvals
<= wanted_entryvals
)
8329 else if (!found_entryvals
|| depth
.entryvals
< found_entryvals
)
8330 found_entryvals
= depth
.entryvals
;
8336 /* Set it up in case we leave the loop. */
8337 depth
.complexity
= depth
.entryvals
= 0;
8339 result_first_child
= first_child
;
8342 if (!loc_from
&& wanted_entryvals
< found_entryvals
)
8344 /* We found entries with ENTRY_VALUEs and skipped them. Since
8345 we could not find any expansions without ENTRY_VALUEs, but we
8346 found at least one with them, go back and get an entry with
8347 the minimum number ENTRY_VALUE count that we found. We could
8348 avoid looping, but since each sub-loc is already resolved,
8349 the re-expansion should be trivial. ??? Should we record all
8350 attempted locs as dependencies, so that we retry the
8351 expansion should any of them change, in the hope it can give
8352 us a new entry without an ENTRY_VALUE? */
8353 elcd
->expanding
.truncate (first_child
);
8357 /* Register all encountered dependencies as active. */
8358 pending_recursion
= loc_exp_dep_set
8359 (var
, result
, elcd
->expanding
.address () + result_first_child
,
8360 last_child
- result_first_child
, elcd
->vars
);
8362 elcd
->expanding
.truncate (first_child
);
8364 /* Record where the expansion came from. */
8365 gcc_checking_assert (!result
|| !pending_recursion
);
8366 VAR_LOC_FROM (var
) = loc_from
;
8367 VAR_LOC_DEPTH (var
) = depth
;
8369 gcc_checking_assert (!depth
.complexity
== !result
);
8371 elcd
->depth
= update_depth (saved_depth
, depth
);
8373 /* Indicate whether any of the dependencies are pending recursion
8376 *pendrecp
= pending_recursion
;
8378 if (!pendrecp
|| !pending_recursion
)
8379 var
->var_part
[0].cur_loc
= result
;
8384 /* Callback for cselib_expand_value, that looks for expressions
8385 holding the value in the var-tracking hash tables. Return X for
8386 standard processing, anything else is to be used as-is. */
8389 vt_expand_loc_callback (rtx x
, bitmap regs
,
8390 int max_depth ATTRIBUTE_UNUSED
,
8393 struct expand_loc_callback_data
*elcd
8394 = (struct expand_loc_callback_data
*) data
;
8398 bool pending_recursion
= false;
8399 bool from_empty
= false;
8401 switch (GET_CODE (x
))
8404 subreg
= cselib_expand_value_rtx_cb (SUBREG_REG (x
), regs
,
8406 vt_expand_loc_callback
, data
);
8411 result
= simplify_gen_subreg (GET_MODE (x
), subreg
,
8412 GET_MODE (SUBREG_REG (x
)),
8415 /* Invalid SUBREGs are ok in debug info. ??? We could try
8416 alternate expansions for the VALUE as well. */
8418 result
= gen_rtx_raw_SUBREG (GET_MODE (x
), subreg
, SUBREG_BYTE (x
));
8424 dv
= dv_from_rtx (x
);
8431 elcd
->expanding
.safe_push (x
);
8433 /* Check that VALUE_RECURSED_INTO implies NO_LOC_P. */
8434 gcc_checking_assert (!VALUE_RECURSED_INTO (x
) || NO_LOC_P (x
));
8438 gcc_checking_assert (VALUE_RECURSED_INTO (x
) || !dv_changed_p (dv
));
8442 var
= elcd
->vars
->find_with_hash (dv
, dv_htab_hash (dv
));
8447 var
= variable_from_dropped (dv
, INSERT
);
8450 gcc_checking_assert (var
);
8452 if (!dv_changed_p (dv
))
8454 gcc_checking_assert (!NO_LOC_P (x
));
8455 gcc_checking_assert (var
->var_part
[0].cur_loc
);
8456 gcc_checking_assert (VAR_LOC_1PAUX (var
));
8457 gcc_checking_assert (VAR_LOC_1PAUX (var
)->depth
.complexity
);
8459 elcd
->depth
= update_depth (elcd
->depth
, VAR_LOC_1PAUX (var
)->depth
);
8461 return var
->var_part
[0].cur_loc
;
8464 VALUE_RECURSED_INTO (x
) = true;
8465 /* This is tentative, but it makes some tests simpler. */
8466 NO_LOC_P (x
) = true;
8468 gcc_checking_assert (var
->n_var_parts
== 1 || from_empty
);
8470 result
= vt_expand_var_loc_chain (var
, regs
, data
, &pending_recursion
);
8472 if (pending_recursion
)
8474 gcc_checking_assert (!result
);
8475 elcd
->pending
.safe_push (x
);
8479 NO_LOC_P (x
) = !result
;
8480 VALUE_RECURSED_INTO (x
) = false;
8481 set_dv_changed (dv
, false);
8484 notify_dependents_of_resolved_value (var
, elcd
->vars
);
8490 /* While expanding variables, we may encounter recursion cycles
8491 because of mutual (possibly indirect) dependencies between two
8492 particular variables (or values), say A and B. If we're trying to
8493 expand A when we get to B, which in turn attempts to expand A, if
8494 we can't find any other expansion for B, we'll add B to this
8495 pending-recursion stack, and tentatively return NULL for its
8496 location. This tentative value will be used for any other
8497 occurrences of B, unless A gets some other location, in which case
8498 it will notify B that it is worth another try at computing a
8499 location for it, and it will use the location computed for A then.
8500 At the end of the expansion, the tentative NULL locations become
8501 final for all members of PENDING that didn't get a notification.
8502 This function performs this finalization of NULL locations. */
8505 resolve_expansions_pending_recursion (vec
<rtx
, va_heap
> *pending
)
8507 while (!pending
->is_empty ())
8509 rtx x
= pending
->pop ();
8512 if (!VALUE_RECURSED_INTO (x
))
8515 gcc_checking_assert (NO_LOC_P (x
));
8516 VALUE_RECURSED_INTO (x
) = false;
8517 dv
= dv_from_rtx (x
);
8518 gcc_checking_assert (dv_changed_p (dv
));
8519 set_dv_changed (dv
, false);
8523 /* Initialize expand_loc_callback_data D with variable hash table V.
8524 It must be a macro because of alloca (vec stack). */
8525 #define INIT_ELCD(d, v) \
8529 (d).depth.complexity = (d).depth.entryvals = 0; \
8532 /* Finalize expand_loc_callback_data D, resolved to location L. */
8533 #define FINI_ELCD(d, l) \
8536 resolve_expansions_pending_recursion (&(d).pending); \
8537 (d).pending.release (); \
8538 (d).expanding.release (); \
8540 if ((l) && MEM_P (l)) \
8541 (l) = targetm.delegitimize_address (l); \
8545 /* Expand VALUEs and DEBUG_EXPRs in LOC to a location, using the
8546 equivalences in VARS, updating their CUR_LOCs in the process. */
8549 vt_expand_loc (rtx loc
, variable_table_type
*vars
)
8551 struct expand_loc_callback_data data
;
8554 if (!MAY_HAVE_DEBUG_INSNS
)
8557 INIT_ELCD (data
, vars
);
8559 result
= cselib_expand_value_rtx_cb (loc
, scratch_regs
, EXPR_DEPTH
,
8560 vt_expand_loc_callback
, &data
);
8562 FINI_ELCD (data
, result
);
8567 /* Expand the one-part VARiable to a location, using the equivalences
8568 in VARS, updating their CUR_LOCs in the process. */
8571 vt_expand_1pvar (variable var
, variable_table_type
*vars
)
8573 struct expand_loc_callback_data data
;
8576 gcc_checking_assert (var
->onepart
&& var
->n_var_parts
== 1);
8578 if (!dv_changed_p (var
->dv
))
8579 return var
->var_part
[0].cur_loc
;
8581 INIT_ELCD (data
, vars
);
8583 loc
= vt_expand_var_loc_chain (var
, scratch_regs
, &data
, NULL
);
8585 gcc_checking_assert (data
.expanding
.is_empty ());
8587 FINI_ELCD (data
, loc
);
8592 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
8593 additional parameters: WHERE specifies whether the note shall be emitted
8594 before or after instruction INSN. */
8597 emit_note_insn_var_location (variable_def
**varp
, emit_note_data
*data
)
8599 variable var
= *varp
;
8600 rtx_insn
*insn
= data
->insn
;
8601 enum emit_note_where where
= data
->where
;
8602 variable_table_type
*vars
= data
->vars
;
8605 int i
, j
, n_var_parts
;
8607 enum var_init_status initialized
= VAR_INIT_STATUS_UNINITIALIZED
;
8608 HOST_WIDE_INT last_limit
;
8609 tree type_size_unit
;
8610 HOST_WIDE_INT offsets
[MAX_VAR_PARTS
];
8611 rtx loc
[MAX_VAR_PARTS
];
8615 gcc_checking_assert (var
->onepart
== NOT_ONEPART
8616 || var
->onepart
== ONEPART_VDECL
);
8618 decl
= dv_as_decl (var
->dv
);
8624 for (i
= 0; i
< var
->n_var_parts
; i
++)
8625 if (var
->var_part
[i
].cur_loc
== NULL
&& var
->var_part
[i
].loc_chain
)
8626 var
->var_part
[i
].cur_loc
= var
->var_part
[i
].loc_chain
->loc
;
8627 for (i
= 0; i
< var
->n_var_parts
; i
++)
8629 enum machine_mode mode
, wider_mode
;
8631 HOST_WIDE_INT offset
;
8633 if (i
== 0 && var
->onepart
)
8635 gcc_checking_assert (var
->n_var_parts
== 1);
8637 initialized
= VAR_INIT_STATUS_INITIALIZED
;
8638 loc2
= vt_expand_1pvar (var
, vars
);
8642 if (last_limit
< VAR_PART_OFFSET (var
, i
))
8647 else if (last_limit
> VAR_PART_OFFSET (var
, i
))
8649 offset
= VAR_PART_OFFSET (var
, i
);
8650 loc2
= var
->var_part
[i
].cur_loc
;
8651 if (loc2
&& GET_CODE (loc2
) == MEM
8652 && GET_CODE (XEXP (loc2
, 0)) == VALUE
)
8654 rtx depval
= XEXP (loc2
, 0);
8656 loc2
= vt_expand_loc (loc2
, vars
);
8659 loc_exp_insert_dep (var
, depval
, vars
);
8666 gcc_checking_assert (GET_CODE (loc2
) != VALUE
);
8667 for (lc
= var
->var_part
[i
].loc_chain
; lc
; lc
= lc
->next
)
8668 if (var
->var_part
[i
].cur_loc
== lc
->loc
)
8670 initialized
= lc
->init
;
8676 offsets
[n_var_parts
] = offset
;
8682 loc
[n_var_parts
] = loc2
;
8683 mode
= GET_MODE (var
->var_part
[i
].cur_loc
);
8684 if (mode
== VOIDmode
&& var
->onepart
)
8685 mode
= DECL_MODE (decl
);
8686 last_limit
= offsets
[n_var_parts
] + GET_MODE_SIZE (mode
);
8688 /* Attempt to merge adjacent registers or memory. */
8689 wider_mode
= GET_MODE_WIDER_MODE (mode
);
8690 for (j
= i
+ 1; j
< var
->n_var_parts
; j
++)
8691 if (last_limit
<= VAR_PART_OFFSET (var
, j
))
8693 if (j
< var
->n_var_parts
8694 && wider_mode
!= VOIDmode
8695 && var
->var_part
[j
].cur_loc
8696 && mode
== GET_MODE (var
->var_part
[j
].cur_loc
)
8697 && (REG_P (loc
[n_var_parts
]) || MEM_P (loc
[n_var_parts
]))
8698 && last_limit
== (var
->onepart
? 0 : VAR_PART_OFFSET (var
, j
))
8699 && (loc2
= vt_expand_loc (var
->var_part
[j
].cur_loc
, vars
))
8700 && GET_CODE (loc
[n_var_parts
]) == GET_CODE (loc2
))
8704 if (REG_P (loc
[n_var_parts
])
8705 && hard_regno_nregs
[REGNO (loc
[n_var_parts
])][mode
] * 2
8706 == hard_regno_nregs
[REGNO (loc
[n_var_parts
])][wider_mode
]
8707 && end_hard_regno (mode
, REGNO (loc
[n_var_parts
]))
8710 if (! WORDS_BIG_ENDIAN
&& ! BYTES_BIG_ENDIAN
)
8711 new_loc
= simplify_subreg (wider_mode
, loc
[n_var_parts
],
8713 else if (WORDS_BIG_ENDIAN
&& BYTES_BIG_ENDIAN
)
8714 new_loc
= simplify_subreg (wider_mode
, loc2
, mode
, 0);
8717 if (!REG_P (new_loc
)
8718 || REGNO (new_loc
) != REGNO (loc
[n_var_parts
]))
8721 REG_ATTRS (new_loc
) = REG_ATTRS (loc
[n_var_parts
]);
8724 else if (MEM_P (loc
[n_var_parts
])
8725 && GET_CODE (XEXP (loc2
, 0)) == PLUS
8726 && REG_P (XEXP (XEXP (loc2
, 0), 0))
8727 && CONST_INT_P (XEXP (XEXP (loc2
, 0), 1)))
8729 if ((REG_P (XEXP (loc
[n_var_parts
], 0))
8730 && rtx_equal_p (XEXP (loc
[n_var_parts
], 0),
8731 XEXP (XEXP (loc2
, 0), 0))
8732 && INTVAL (XEXP (XEXP (loc2
, 0), 1))
8733 == GET_MODE_SIZE (mode
))
8734 || (GET_CODE (XEXP (loc
[n_var_parts
], 0)) == PLUS
8735 && CONST_INT_P (XEXP (XEXP (loc
[n_var_parts
], 0), 1))
8736 && rtx_equal_p (XEXP (XEXP (loc
[n_var_parts
], 0), 0),
8737 XEXP (XEXP (loc2
, 0), 0))
8738 && INTVAL (XEXP (XEXP (loc
[n_var_parts
], 0), 1))
8739 + GET_MODE_SIZE (mode
)
8740 == INTVAL (XEXP (XEXP (loc2
, 0), 1))))
8741 new_loc
= adjust_address_nv (loc
[n_var_parts
],
8747 loc
[n_var_parts
] = new_loc
;
8749 last_limit
= offsets
[n_var_parts
] + GET_MODE_SIZE (mode
);
8755 type_size_unit
= TYPE_SIZE_UNIT (TREE_TYPE (decl
));
8756 if ((unsigned HOST_WIDE_INT
) last_limit
< TREE_INT_CST_LOW (type_size_unit
))
8759 if (! flag_var_tracking_uninit
)
8760 initialized
= VAR_INIT_STATUS_INITIALIZED
;
8764 note_vl
= gen_rtx_VAR_LOCATION (VOIDmode
, decl
, NULL_RTX
, initialized
);
8765 else if (n_var_parts
== 1)
8769 if (offsets
[0] || GET_CODE (loc
[0]) == PARALLEL
)
8770 expr_list
= gen_rtx_EXPR_LIST (VOIDmode
, loc
[0], GEN_INT (offsets
[0]));
8774 note_vl
= gen_rtx_VAR_LOCATION (VOIDmode
, decl
, expr_list
, initialized
);
8776 else if (n_var_parts
)
8780 for (i
= 0; i
< n_var_parts
; i
++)
8782 = gen_rtx_EXPR_LIST (VOIDmode
, loc
[i
], GEN_INT (offsets
[i
]));
8784 parallel
= gen_rtx_PARALLEL (VOIDmode
,
8785 gen_rtvec_v (n_var_parts
, loc
));
8786 note_vl
= gen_rtx_VAR_LOCATION (VOIDmode
, decl
,
8787 parallel
, initialized
);
8790 if (where
!= EMIT_NOTE_BEFORE_INSN
)
8792 note
= emit_note_after (NOTE_INSN_VAR_LOCATION
, insn
);
8793 if (where
== EMIT_NOTE_AFTER_CALL_INSN
)
8794 NOTE_DURING_CALL_P (note
) = true;
8798 /* Make sure that the call related notes come first. */
8799 while (NEXT_INSN (insn
)
8801 && ((NOTE_KIND (insn
) == NOTE_INSN_VAR_LOCATION
8802 && NOTE_DURING_CALL_P (insn
))
8803 || NOTE_KIND (insn
) == NOTE_INSN_CALL_ARG_LOCATION
))
8804 insn
= NEXT_INSN (insn
);
8806 && ((NOTE_KIND (insn
) == NOTE_INSN_VAR_LOCATION
8807 && NOTE_DURING_CALL_P (insn
))
8808 || NOTE_KIND (insn
) == NOTE_INSN_CALL_ARG_LOCATION
))
8809 note
= emit_note_after (NOTE_INSN_VAR_LOCATION
, insn
);
8811 note
= emit_note_before (NOTE_INSN_VAR_LOCATION
, insn
);
8813 NOTE_VAR_LOCATION (note
) = note_vl
;
8815 set_dv_changed (var
->dv
, false);
8816 gcc_assert (var
->in_changed_variables
);
8817 var
->in_changed_variables
= false;
8818 changed_variables
->clear_slot (varp
);
8820 /* Continue traversing the hash table. */
8824 /* While traversing changed_variables, push onto DATA (a stack of RTX
8825 values) entries that aren't user variables. */
8828 var_track_values_to_stack (variable_def
**slot
,
8829 vec
<rtx
, va_heap
> *changed_values_stack
)
8831 variable var
= *slot
;
8833 if (var
->onepart
== ONEPART_VALUE
)
8834 changed_values_stack
->safe_push (dv_as_value (var
->dv
));
8835 else if (var
->onepart
== ONEPART_DEXPR
)
8836 changed_values_stack
->safe_push (DECL_RTL_KNOWN_SET (dv_as_decl (var
->dv
)));
8841 /* Remove from changed_variables the entry whose DV corresponds to
8842 value or debug_expr VAL. */
8844 remove_value_from_changed_variables (rtx val
)
8846 decl_or_value dv
= dv_from_rtx (val
);
8847 variable_def
**slot
;
8850 slot
= changed_variables
->find_slot_with_hash (dv
, dv_htab_hash (dv
),
8853 var
->in_changed_variables
= false;
8854 changed_variables
->clear_slot (slot
);
8857 /* If VAL (a value or debug_expr) has backlinks to variables actively
8858 dependent on it in HTAB or in CHANGED_VARIABLES, mark them as
8859 changed, adding to CHANGED_VALUES_STACK any dependencies that may
8860 have dependencies of their own to notify. */
8863 notify_dependents_of_changed_value (rtx val
, variable_table_type
*htab
,
8864 vec
<rtx
, va_heap
> *changed_values_stack
)
8866 variable_def
**slot
;
8869 decl_or_value dv
= dv_from_rtx (val
);
8871 slot
= changed_variables
->find_slot_with_hash (dv
, dv_htab_hash (dv
),
8874 slot
= htab
->find_slot_with_hash (dv
, dv_htab_hash (dv
), NO_INSERT
);
8876 slot
= dropped_values
->find_slot_with_hash (dv
, dv_htab_hash (dv
),
8880 while ((led
= VAR_LOC_DEP_LST (var
)))
8882 decl_or_value ldv
= led
->dv
;
8885 /* Deactivate and remove the backlink, as it was “used up”. It
8886 makes no sense to attempt to notify the same entity again:
8887 either it will be recomputed and re-register an active
8888 dependency, or it will still have the changed mark. */
8890 led
->next
->pprev
= led
->pprev
;
8892 *led
->pprev
= led
->next
;
8896 if (dv_changed_p (ldv
))
8899 switch (dv_onepart_p (ldv
))
8903 set_dv_changed (ldv
, true);
8904 changed_values_stack
->safe_push (dv_as_rtx (ldv
));
8908 ivar
= htab
->find_with_hash (ldv
, dv_htab_hash (ldv
));
8909 gcc_checking_assert (!VAR_LOC_DEP_LST (ivar
));
8910 variable_was_changed (ivar
, NULL
);
8914 pool_free (loc_exp_dep_pool
, led
);
8915 ivar
= htab
->find_with_hash (ldv
, dv_htab_hash (ldv
));
8918 int i
= ivar
->n_var_parts
;
8921 rtx loc
= ivar
->var_part
[i
].cur_loc
;
8923 if (loc
&& GET_CODE (loc
) == MEM
8924 && XEXP (loc
, 0) == val
)
8926 variable_was_changed (ivar
, NULL
);
8939 /* Take out of changed_variables any entries that don't refer to use
8940 variables. Back-propagate change notifications from values and
8941 debug_exprs to their active dependencies in HTAB or in
8942 CHANGED_VARIABLES. */
8945 process_changed_values (variable_table_type
*htab
)
8949 auto_vec
<rtx
, 20> changed_values_stack
;
8951 /* Move values from changed_variables to changed_values_stack. */
8953 ->traverse
<vec
<rtx
, va_heap
>*, var_track_values_to_stack
>
8954 (&changed_values_stack
);
8956 /* Back-propagate change notifications in values while popping
8957 them from the stack. */
8958 for (n
= i
= changed_values_stack
.length ();
8959 i
> 0; i
= changed_values_stack
.length ())
8961 val
= changed_values_stack
.pop ();
8962 notify_dependents_of_changed_value (val
, htab
, &changed_values_stack
);
8964 /* This condition will hold when visiting each of the entries
8965 originally in changed_variables. We can't remove them
8966 earlier because this could drop the backlinks before we got a
8967 chance to use them. */
8970 remove_value_from_changed_variables (val
);
8976 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
8977 CHANGED_VARIABLES and delete this chain. WHERE specifies whether
8978 the notes shall be emitted before of after instruction INSN. */
8981 emit_notes_for_changes (rtx_insn
*insn
, enum emit_note_where where
,
8984 emit_note_data data
;
8985 variable_table_type
*htab
= shared_hash_htab (vars
);
8987 if (!changed_variables
->elements ())
8990 if (MAY_HAVE_DEBUG_INSNS
)
8991 process_changed_values (htab
);
8998 ->traverse
<emit_note_data
*, emit_note_insn_var_location
> (&data
);
9001 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
9002 same variable in hash table DATA or is not there at all. */
9005 emit_notes_for_differences_1 (variable_def
**slot
, variable_table_type
*new_vars
)
9007 variable old_var
, new_var
;
9010 new_var
= new_vars
->find_with_hash (old_var
->dv
, dv_htab_hash (old_var
->dv
));
9014 /* Variable has disappeared. */
9015 variable empty_var
= NULL
;
9017 if (old_var
->onepart
== ONEPART_VALUE
9018 || old_var
->onepart
== ONEPART_DEXPR
)
9020 empty_var
= variable_from_dropped (old_var
->dv
, NO_INSERT
);
9023 gcc_checking_assert (!empty_var
->in_changed_variables
);
9024 if (!VAR_LOC_1PAUX (old_var
))
9026 VAR_LOC_1PAUX (old_var
) = VAR_LOC_1PAUX (empty_var
);
9027 VAR_LOC_1PAUX (empty_var
) = NULL
;
9030 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var
));
9036 empty_var
= (variable
) pool_alloc (onepart_pool (old_var
->onepart
));
9037 empty_var
->dv
= old_var
->dv
;
9038 empty_var
->refcount
= 0;
9039 empty_var
->n_var_parts
= 0;
9040 empty_var
->onepart
= old_var
->onepart
;
9041 empty_var
->in_changed_variables
= false;
9044 if (empty_var
->onepart
)
9046 /* Propagate the auxiliary data to (ultimately)
9047 changed_variables. */
9048 empty_var
->var_part
[0].loc_chain
= NULL
;
9049 empty_var
->var_part
[0].cur_loc
= NULL
;
9050 VAR_LOC_1PAUX (empty_var
) = VAR_LOC_1PAUX (old_var
);
9051 VAR_LOC_1PAUX (old_var
) = NULL
;
9053 variable_was_changed (empty_var
, NULL
);
9054 /* Continue traversing the hash table. */
9057 /* Update cur_loc and one-part auxiliary data, before new_var goes
9058 through variable_was_changed. */
9059 if (old_var
!= new_var
&& new_var
->onepart
)
9061 gcc_checking_assert (VAR_LOC_1PAUX (new_var
) == NULL
);
9062 VAR_LOC_1PAUX (new_var
) = VAR_LOC_1PAUX (old_var
);
9063 VAR_LOC_1PAUX (old_var
) = NULL
;
9064 new_var
->var_part
[0].cur_loc
= old_var
->var_part
[0].cur_loc
;
9066 if (variable_different_p (old_var
, new_var
))
9067 variable_was_changed (new_var
, NULL
);
9069 /* Continue traversing the hash table. */
9073 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
9077 emit_notes_for_differences_2 (variable_def
**slot
, variable_table_type
*old_vars
)
9079 variable old_var
, new_var
;
9082 old_var
= old_vars
->find_with_hash (new_var
->dv
, dv_htab_hash (new_var
->dv
));
9086 for (i
= 0; i
< new_var
->n_var_parts
; i
++)
9087 new_var
->var_part
[i
].cur_loc
= NULL
;
9088 variable_was_changed (new_var
, NULL
);
9091 /* Continue traversing the hash table. */
9095 /* Emit notes before INSN for differences between dataflow sets OLD_SET and
9099 emit_notes_for_differences (rtx_insn
*insn
, dataflow_set
*old_set
,
9100 dataflow_set
*new_set
)
9102 shared_hash_htab (old_set
->vars
)
9103 ->traverse
<variable_table_type
*, emit_notes_for_differences_1
>
9104 (shared_hash_htab (new_set
->vars
));
9105 shared_hash_htab (new_set
->vars
)
9106 ->traverse
<variable_table_type
*, emit_notes_for_differences_2
>
9107 (shared_hash_htab (old_set
->vars
));
9108 emit_notes_for_changes (insn
, EMIT_NOTE_BEFORE_INSN
, new_set
->vars
);
9111 /* Return the next insn after INSN that is not a NOTE_INSN_VAR_LOCATION. */
9114 next_non_note_insn_var_location (rtx_insn
*insn
)
9118 insn
= NEXT_INSN (insn
);
9121 || NOTE_KIND (insn
) != NOTE_INSN_VAR_LOCATION
)
9128 /* Emit the notes for changes of location parts in the basic block BB. */
9131 emit_notes_in_bb (basic_block bb
, dataflow_set
*set
)
9134 micro_operation
*mo
;
9136 dataflow_set_clear (set
);
9137 dataflow_set_copy (set
, &VTI (bb
)->in
);
9139 FOR_EACH_VEC_ELT (VTI (bb
)->mos
, i
, mo
)
9141 rtx_insn
*insn
= mo
->insn
;
9142 rtx_insn
*next_insn
= next_non_note_insn_var_location (insn
);
9147 dataflow_set_clear_at_call (set
);
9148 emit_notes_for_changes (insn
, EMIT_NOTE_AFTER_CALL_INSN
, set
->vars
);
9150 rtx arguments
= mo
->u
.loc
, *p
= &arguments
;
9154 XEXP (XEXP (*p
, 0), 1)
9155 = vt_expand_loc (XEXP (XEXP (*p
, 0), 1),
9156 shared_hash_htab (set
->vars
));
9157 /* If expansion is successful, keep it in the list. */
9158 if (XEXP (XEXP (*p
, 0), 1))
9160 /* Otherwise, if the following item is data_value for it,
9162 else if (XEXP (*p
, 1)
9163 && REG_P (XEXP (XEXP (*p
, 0), 0))
9164 && MEM_P (XEXP (XEXP (XEXP (*p
, 1), 0), 0))
9165 && REG_P (XEXP (XEXP (XEXP (XEXP (*p
, 1), 0), 0),
9167 && REGNO (XEXP (XEXP (*p
, 0), 0))
9168 == REGNO (XEXP (XEXP (XEXP (XEXP (*p
, 1), 0),
9170 *p
= XEXP (XEXP (*p
, 1), 1);
9171 /* Just drop this item. */
9175 note
= emit_note_after (NOTE_INSN_CALL_ARG_LOCATION
, insn
);
9176 NOTE_VAR_LOCATION (note
) = arguments
;
9182 rtx loc
= mo
->u
.loc
;
9185 var_reg_set (set
, loc
, VAR_INIT_STATUS_UNINITIALIZED
, NULL
);
9187 var_mem_set (set
, loc
, VAR_INIT_STATUS_UNINITIALIZED
, NULL
);
9189 emit_notes_for_changes (insn
, EMIT_NOTE_BEFORE_INSN
, set
->vars
);
9195 rtx loc
= mo
->u
.loc
;
9199 if (GET_CODE (loc
) == CONCAT
)
9201 val
= XEXP (loc
, 0);
9202 vloc
= XEXP (loc
, 1);
9210 var
= PAT_VAR_LOCATION_DECL (vloc
);
9212 clobber_variable_part (set
, NULL_RTX
,
9213 dv_from_decl (var
), 0, NULL_RTX
);
9216 if (VAL_NEEDS_RESOLUTION (loc
))
9217 val_resolve (set
, val
, PAT_VAR_LOCATION_LOC (vloc
), insn
);
9218 set_variable_part (set
, val
, dv_from_decl (var
), 0,
9219 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
,
9222 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc
)))
9223 set_variable_part (set
, PAT_VAR_LOCATION_LOC (vloc
),
9224 dv_from_decl (var
), 0,
9225 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
,
9228 emit_notes_for_changes (insn
, EMIT_NOTE_AFTER_INSN
, set
->vars
);
9234 rtx loc
= mo
->u
.loc
;
9235 rtx val
, vloc
, uloc
;
9237 vloc
= uloc
= XEXP (loc
, 1);
9238 val
= XEXP (loc
, 0);
9240 if (GET_CODE (val
) == CONCAT
)
9242 uloc
= XEXP (val
, 1);
9243 val
= XEXP (val
, 0);
9246 if (VAL_NEEDS_RESOLUTION (loc
))
9247 val_resolve (set
, val
, vloc
, insn
);
9249 val_store (set
, val
, uloc
, insn
, false);
9251 if (VAL_HOLDS_TRACK_EXPR (loc
))
9253 if (GET_CODE (uloc
) == REG
)
9254 var_reg_set (set
, uloc
, VAR_INIT_STATUS_UNINITIALIZED
,
9256 else if (GET_CODE (uloc
) == MEM
)
9257 var_mem_set (set
, uloc
, VAR_INIT_STATUS_UNINITIALIZED
,
9261 emit_notes_for_changes (insn
, EMIT_NOTE_BEFORE_INSN
, set
->vars
);
9267 rtx loc
= mo
->u
.loc
;
9268 rtx val
, vloc
, uloc
;
9272 uloc
= XEXP (vloc
, 1);
9273 val
= XEXP (vloc
, 0);
9276 if (GET_CODE (uloc
) == SET
)
9278 dstv
= SET_DEST (uloc
);
9279 srcv
= SET_SRC (uloc
);
9287 if (GET_CODE (val
) == CONCAT
)
9289 dstv
= vloc
= XEXP (val
, 1);
9290 val
= XEXP (val
, 0);
9293 if (GET_CODE (vloc
) == SET
)
9295 srcv
= SET_SRC (vloc
);
9297 gcc_assert (val
!= srcv
);
9298 gcc_assert (vloc
== uloc
|| VAL_NEEDS_RESOLUTION (loc
));
9300 dstv
= vloc
= SET_DEST (vloc
);
9302 if (VAL_NEEDS_RESOLUTION (loc
))
9303 val_resolve (set
, val
, srcv
, insn
);
9305 else if (VAL_NEEDS_RESOLUTION (loc
))
9307 gcc_assert (GET_CODE (uloc
) == SET
9308 && GET_CODE (SET_SRC (uloc
)) == REG
);
9309 val_resolve (set
, val
, SET_SRC (uloc
), insn
);
9312 if (VAL_HOLDS_TRACK_EXPR (loc
))
9314 if (VAL_EXPR_IS_CLOBBERED (loc
))
9317 var_reg_delete (set
, uloc
, true);
9318 else if (MEM_P (uloc
))
9320 gcc_assert (MEM_P (dstv
));
9321 gcc_assert (MEM_ATTRS (dstv
) == MEM_ATTRS (uloc
));
9322 var_mem_delete (set
, dstv
, true);
9327 bool copied_p
= VAL_EXPR_IS_COPIED (loc
);
9328 rtx src
= NULL
, dst
= uloc
;
9329 enum var_init_status status
= VAR_INIT_STATUS_INITIALIZED
;
9331 if (GET_CODE (uloc
) == SET
)
9333 src
= SET_SRC (uloc
);
9334 dst
= SET_DEST (uloc
);
9339 status
= find_src_status (set
, src
);
9341 src
= find_src_set_src (set
, src
);
9345 var_reg_delete_and_set (set
, dst
, !copied_p
,
9347 else if (MEM_P (dst
))
9349 gcc_assert (MEM_P (dstv
));
9350 gcc_assert (MEM_ATTRS (dstv
) == MEM_ATTRS (dst
));
9351 var_mem_delete_and_set (set
, dstv
, !copied_p
,
9356 else if (REG_P (uloc
))
9357 var_regno_delete (set
, REGNO (uloc
));
9358 else if (MEM_P (uloc
))
9360 gcc_checking_assert (GET_CODE (vloc
) == MEM
);
9361 gcc_checking_assert (vloc
== dstv
);
9363 clobber_overlapping_mems (set
, vloc
);
9366 val_store (set
, val
, dstv
, insn
, true);
9368 emit_notes_for_changes (next_insn
, EMIT_NOTE_BEFORE_INSN
,
9375 rtx loc
= mo
->u
.loc
;
9378 if (GET_CODE (loc
) == SET
)
9380 set_src
= SET_SRC (loc
);
9381 loc
= SET_DEST (loc
);
9385 var_reg_delete_and_set (set
, loc
, true, VAR_INIT_STATUS_INITIALIZED
,
9388 var_mem_delete_and_set (set
, loc
, true, VAR_INIT_STATUS_INITIALIZED
,
9391 emit_notes_for_changes (next_insn
, EMIT_NOTE_BEFORE_INSN
,
9398 rtx loc
= mo
->u
.loc
;
9399 enum var_init_status src_status
;
9402 if (GET_CODE (loc
) == SET
)
9404 set_src
= SET_SRC (loc
);
9405 loc
= SET_DEST (loc
);
9408 src_status
= find_src_status (set
, set_src
);
9409 set_src
= find_src_set_src (set
, set_src
);
9412 var_reg_delete_and_set (set
, loc
, false, src_status
, set_src
);
9414 var_mem_delete_and_set (set
, loc
, false, src_status
, set_src
);
9416 emit_notes_for_changes (next_insn
, EMIT_NOTE_BEFORE_INSN
,
9423 rtx loc
= mo
->u
.loc
;
9426 var_reg_delete (set
, loc
, false);
9428 var_mem_delete (set
, loc
, false);
9430 emit_notes_for_changes (insn
, EMIT_NOTE_AFTER_INSN
, set
->vars
);
9436 rtx loc
= mo
->u
.loc
;
9439 var_reg_delete (set
, loc
, true);
9441 var_mem_delete (set
, loc
, true);
9443 emit_notes_for_changes (next_insn
, EMIT_NOTE_BEFORE_INSN
,
9449 set
->stack_adjust
+= mo
->u
.adjust
;
9455 /* Emit notes for the whole function. */
9458 vt_emit_notes (void)
9463 gcc_assert (!changed_variables
->elements ());
9465 /* Free memory occupied by the out hash tables, as they aren't used
9467 FOR_EACH_BB_FN (bb
, cfun
)
9468 dataflow_set_clear (&VTI (bb
)->out
);
9470 /* Enable emitting notes by functions (mainly by set_variable_part and
9471 delete_variable_part). */
9474 if (MAY_HAVE_DEBUG_INSNS
)
9476 dropped_values
= new variable_table_type (cselib_get_next_uid () * 2);
9477 loc_exp_dep_pool
= create_alloc_pool ("loc_exp_dep pool",
9478 sizeof (loc_exp_dep
), 64);
9481 dataflow_set_init (&cur
);
9483 FOR_EACH_BB_FN (bb
, cfun
)
9485 /* Emit the notes for changes of variable locations between two
9486 subsequent basic blocks. */
9487 emit_notes_for_differences (BB_HEAD (bb
), &cur
, &VTI (bb
)->in
);
9489 if (MAY_HAVE_DEBUG_INSNS
)
9490 local_get_addr_cache
= new hash_map
<rtx
, rtx
>;
9492 /* Emit the notes for the changes in the basic block itself. */
9493 emit_notes_in_bb (bb
, &cur
);
9495 if (MAY_HAVE_DEBUG_INSNS
)
9496 delete local_get_addr_cache
;
9497 local_get_addr_cache
= NULL
;
9499 /* Free memory occupied by the in hash table, we won't need it
9501 dataflow_set_clear (&VTI (bb
)->in
);
9503 #ifdef ENABLE_CHECKING
9504 shared_hash_htab (cur
.vars
)
9505 ->traverse
<variable_table_type
*, emit_notes_for_differences_1
>
9506 (shared_hash_htab (empty_shared_hash
));
9508 dataflow_set_destroy (&cur
);
9510 if (MAY_HAVE_DEBUG_INSNS
)
9511 delete dropped_values
;
9512 dropped_values
= NULL
;
9517 /* If there is a declaration and offset associated with register/memory RTL
9518 assign declaration to *DECLP and offset to *OFFSETP, and return true. */
9521 vt_get_decl_and_offset (rtx rtl
, tree
*declp
, HOST_WIDE_INT
*offsetp
)
9525 if (REG_ATTRS (rtl
))
9527 *declp
= REG_EXPR (rtl
);
9528 *offsetp
= REG_OFFSET (rtl
);
9532 else if (GET_CODE (rtl
) == PARALLEL
)
9534 tree decl
= NULL_TREE
;
9535 HOST_WIDE_INT offset
= MAX_VAR_PARTS
;
9536 int len
= XVECLEN (rtl
, 0), i
;
9538 for (i
= 0; i
< len
; i
++)
9540 rtx reg
= XEXP (XVECEXP (rtl
, 0, i
), 0);
9541 if (!REG_P (reg
) || !REG_ATTRS (reg
))
9544 decl
= REG_EXPR (reg
);
9545 if (REG_EXPR (reg
) != decl
)
9547 if (REG_OFFSET (reg
) < offset
)
9548 offset
= REG_OFFSET (reg
);
9558 else if (MEM_P (rtl
))
9560 if (MEM_ATTRS (rtl
))
9562 *declp
= MEM_EXPR (rtl
);
9563 *offsetp
= INT_MEM_OFFSET (rtl
);
9570 /* Record the value for the ENTRY_VALUE of RTL as a global equivalence
9574 record_entry_value (cselib_val
*val
, rtx rtl
)
9576 rtx ev
= gen_rtx_ENTRY_VALUE (GET_MODE (rtl
));
9578 ENTRY_VALUE_EXP (ev
) = rtl
;
9580 cselib_add_permanent_equiv (val
, ev
, get_insns ());
9583 /* Insert function parameter PARM in IN and OUT sets of ENTRY_BLOCK. */
9586 vt_add_function_parameter (tree parm
)
9588 rtx decl_rtl
= DECL_RTL_IF_SET (parm
);
9589 rtx incoming
= DECL_INCOMING_RTL (parm
);
9591 enum machine_mode mode
;
9592 HOST_WIDE_INT offset
;
9596 if (TREE_CODE (parm
) != PARM_DECL
)
9599 if (!decl_rtl
|| !incoming
)
9602 if (GET_MODE (decl_rtl
) == BLKmode
|| GET_MODE (incoming
) == BLKmode
)
9605 /* If there is a DRAP register or a pseudo in internal_arg_pointer,
9606 rewrite the incoming location of parameters passed on the stack
9607 into MEMs based on the argument pointer, so that incoming doesn't
9608 depend on a pseudo. */
9609 if (MEM_P (incoming
)
9610 && (XEXP (incoming
, 0) == crtl
->args
.internal_arg_pointer
9611 || (GET_CODE (XEXP (incoming
, 0)) == PLUS
9612 && XEXP (XEXP (incoming
, 0), 0)
9613 == crtl
->args
.internal_arg_pointer
9614 && CONST_INT_P (XEXP (XEXP (incoming
, 0), 1)))))
9616 HOST_WIDE_INT off
= -FIRST_PARM_OFFSET (current_function_decl
);
9617 if (GET_CODE (XEXP (incoming
, 0)) == PLUS
)
9618 off
+= INTVAL (XEXP (XEXP (incoming
, 0), 1));
9620 = replace_equiv_address_nv (incoming
,
9621 plus_constant (Pmode
,
9622 arg_pointer_rtx
, off
));
9625 #ifdef HAVE_window_save
9626 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
9627 If the target machine has an explicit window save instruction, the
9628 actual entry value is the corresponding OUTGOING_REGNO instead. */
9629 if (HAVE_window_save
&& !crtl
->uses_only_leaf_regs
)
9631 if (REG_P (incoming
)
9632 && HARD_REGISTER_P (incoming
)
9633 && OUTGOING_REGNO (REGNO (incoming
)) != REGNO (incoming
))
9636 p
.incoming
= incoming
;
9638 = gen_rtx_REG_offset (incoming
, GET_MODE (incoming
),
9639 OUTGOING_REGNO (REGNO (incoming
)), 0);
9640 p
.outgoing
= incoming
;
9641 vec_safe_push (windowed_parm_regs
, p
);
9643 else if (GET_CODE (incoming
) == PARALLEL
)
9646 = gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (XVECLEN (incoming
, 0)));
9649 for (i
= 0; i
< XVECLEN (incoming
, 0); i
++)
9651 rtx reg
= XEXP (XVECEXP (incoming
, 0, i
), 0);
9654 reg
= gen_rtx_REG_offset (reg
, GET_MODE (reg
),
9655 OUTGOING_REGNO (REGNO (reg
)), 0);
9657 XVECEXP (outgoing
, 0, i
)
9658 = gen_rtx_EXPR_LIST (VOIDmode
, reg
,
9659 XEXP (XVECEXP (incoming
, 0, i
), 1));
9660 vec_safe_push (windowed_parm_regs
, p
);
9663 incoming
= outgoing
;
9665 else if (MEM_P (incoming
)
9666 && REG_P (XEXP (incoming
, 0))
9667 && HARD_REGISTER_P (XEXP (incoming
, 0)))
9669 rtx reg
= XEXP (incoming
, 0);
9670 if (OUTGOING_REGNO (REGNO (reg
)) != REGNO (reg
))
9674 reg
= gen_raw_REG (GET_MODE (reg
), OUTGOING_REGNO (REGNO (reg
)));
9676 vec_safe_push (windowed_parm_regs
, p
);
9677 incoming
= replace_equiv_address_nv (incoming
, reg
);
9683 if (!vt_get_decl_and_offset (incoming
, &decl
, &offset
))
9685 if (MEM_P (incoming
))
9687 /* This means argument is passed by invisible reference. */
9693 if (!vt_get_decl_and_offset (decl_rtl
, &decl
, &offset
))
9695 offset
+= byte_lowpart_offset (GET_MODE (incoming
),
9696 GET_MODE (decl_rtl
));
9705 /* If that DECL_RTL wasn't a pseudo that got spilled to
9706 memory, bail out. Otherwise, the spill slot sharing code
9707 will force the memory to reference spill_slot_decl (%sfp),
9708 so we don't match above. That's ok, the pseudo must have
9709 referenced the entire parameter, so just reset OFFSET. */
9710 if (decl
!= get_spill_slot_decl (false))
9715 if (!track_loc_p (incoming
, parm
, offset
, false, &mode
, &offset
))
9718 out
= &VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun
))->out
;
9720 dv
= dv_from_decl (parm
);
9722 if (target_for_debug_bind (parm
)
9723 /* We can't deal with these right now, because this kind of
9724 variable is single-part. ??? We could handle parallels
9725 that describe multiple locations for the same single
9726 value, but ATM we don't. */
9727 && GET_CODE (incoming
) != PARALLEL
)
9732 /* ??? We shouldn't ever hit this, but it may happen because
9733 arguments passed by invisible reference aren't dealt with
9734 above: incoming-rtl will have Pmode rather than the
9735 expected mode for the type. */
9739 lowpart
= var_lowpart (mode
, incoming
);
9743 val
= cselib_lookup_from_insn (lowpart
, mode
, true,
9744 VOIDmode
, get_insns ());
9746 /* ??? Float-typed values in memory are not handled by
9750 preserve_value (val
);
9751 set_variable_part (out
, val
->val_rtx
, dv
, offset
,
9752 VAR_INIT_STATUS_INITIALIZED
, NULL
, INSERT
);
9753 dv
= dv_from_value (val
->val_rtx
);
9756 if (MEM_P (incoming
))
9758 val
= cselib_lookup_from_insn (XEXP (incoming
, 0), mode
, true,
9759 VOIDmode
, get_insns ());
9762 preserve_value (val
);
9763 incoming
= replace_equiv_address_nv (incoming
, val
->val_rtx
);
9768 if (REG_P (incoming
))
9770 incoming
= var_lowpart (mode
, incoming
);
9771 gcc_assert (REGNO (incoming
) < FIRST_PSEUDO_REGISTER
);
9772 attrs_list_insert (&out
->regs
[REGNO (incoming
)], dv
, offset
,
9774 set_variable_part (out
, incoming
, dv
, offset
,
9775 VAR_INIT_STATUS_INITIALIZED
, NULL
, INSERT
);
9776 if (dv_is_value_p (dv
))
9778 record_entry_value (CSELIB_VAL_PTR (dv_as_value (dv
)), incoming
);
9779 if (TREE_CODE (TREE_TYPE (parm
)) == REFERENCE_TYPE
9780 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (parm
))))
9782 enum machine_mode indmode
9783 = TYPE_MODE (TREE_TYPE (TREE_TYPE (parm
)));
9784 rtx mem
= gen_rtx_MEM (indmode
, incoming
);
9785 cselib_val
*val
= cselib_lookup_from_insn (mem
, indmode
, true,
9790 preserve_value (val
);
9791 record_entry_value (val
, mem
);
9792 set_variable_part (out
, mem
, dv_from_value (val
->val_rtx
), 0,
9793 VAR_INIT_STATUS_INITIALIZED
, NULL
, INSERT
);
9798 else if (GET_CODE (incoming
) == PARALLEL
&& !dv_onepart_p (dv
))
9802 for (i
= 0; i
< XVECLEN (incoming
, 0); i
++)
9804 rtx reg
= XEXP (XVECEXP (incoming
, 0, i
), 0);
9805 offset
= REG_OFFSET (reg
);
9806 gcc_assert (REGNO (reg
) < FIRST_PSEUDO_REGISTER
);
9807 attrs_list_insert (&out
->regs
[REGNO (reg
)], dv
, offset
, reg
);
9808 set_variable_part (out
, reg
, dv
, offset
,
9809 VAR_INIT_STATUS_INITIALIZED
, NULL
, INSERT
);
9812 else if (MEM_P (incoming
))
9814 incoming
= var_lowpart (mode
, incoming
);
9815 set_variable_part (out
, incoming
, dv
, offset
,
9816 VAR_INIT_STATUS_INITIALIZED
, NULL
, INSERT
);
9820 /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
9823 vt_add_function_parameters (void)
9827 for (parm
= DECL_ARGUMENTS (current_function_decl
);
9828 parm
; parm
= DECL_CHAIN (parm
))
9829 vt_add_function_parameter (parm
);
9831 if (DECL_HAS_VALUE_EXPR_P (DECL_RESULT (current_function_decl
)))
9833 tree vexpr
= DECL_VALUE_EXPR (DECL_RESULT (current_function_decl
));
9835 if (TREE_CODE (vexpr
) == INDIRECT_REF
)
9836 vexpr
= TREE_OPERAND (vexpr
, 0);
9838 if (TREE_CODE (vexpr
) == PARM_DECL
9839 && DECL_ARTIFICIAL (vexpr
)
9840 && !DECL_IGNORED_P (vexpr
)
9841 && DECL_NAMELESS (vexpr
))
9842 vt_add_function_parameter (vexpr
);
9846 /* Initialize cfa_base_rtx, create a preserved VALUE for it and
9847 ensure it isn't flushed during cselib_reset_table.
9848 Can be called only if frame_pointer_rtx resp. arg_pointer_rtx
9849 has been eliminated. */
9852 vt_init_cfa_base (void)
9856 #ifdef FRAME_POINTER_CFA_OFFSET
9857 cfa_base_rtx
= frame_pointer_rtx
;
9858 cfa_base_offset
= -FRAME_POINTER_CFA_OFFSET (current_function_decl
);
9860 cfa_base_rtx
= arg_pointer_rtx
;
9861 cfa_base_offset
= -ARG_POINTER_CFA_OFFSET (current_function_decl
);
9863 if (cfa_base_rtx
== hard_frame_pointer_rtx
9864 || !fixed_regs
[REGNO (cfa_base_rtx
)])
9866 cfa_base_rtx
= NULL_RTX
;
9869 if (!MAY_HAVE_DEBUG_INSNS
)
9872 /* Tell alias analysis that cfa_base_rtx should share
9873 find_base_term value with stack pointer or hard frame pointer. */
9874 if (!frame_pointer_needed
)
9875 vt_equate_reg_base_value (cfa_base_rtx
, stack_pointer_rtx
);
9876 else if (!crtl
->stack_realign_tried
)
9877 vt_equate_reg_base_value (cfa_base_rtx
, hard_frame_pointer_rtx
);
9879 val
= cselib_lookup_from_insn (cfa_base_rtx
, GET_MODE (cfa_base_rtx
), 1,
9880 VOIDmode
, get_insns ());
9881 preserve_value (val
);
9882 cselib_preserve_cfa_base_value (val
, REGNO (cfa_base_rtx
));
9885 /* Allocate and initialize the data structures for variable tracking
9886 and parse the RTL to get the micro operations. */
9889 vt_initialize (void)
9892 HOST_WIDE_INT fp_cfa_offset
= -1;
9894 alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def
));
9896 attrs_pool
= create_alloc_pool ("attrs_def pool",
9897 sizeof (struct attrs_def
), 1024);
9898 var_pool
= create_alloc_pool ("variable_def pool",
9899 sizeof (struct variable_def
)
9900 + (MAX_VAR_PARTS
- 1)
9901 * sizeof (((variable
)NULL
)->var_part
[0]), 64);
9902 loc_chain_pool
= create_alloc_pool ("location_chain_def pool",
9903 sizeof (struct location_chain_def
),
9905 shared_hash_pool
= create_alloc_pool ("shared_hash_def pool",
9906 sizeof (struct shared_hash_def
), 256);
9907 empty_shared_hash
= (shared_hash
) pool_alloc (shared_hash_pool
);
9908 empty_shared_hash
->refcount
= 1;
9909 empty_shared_hash
->htab
= new variable_table_type (1);
9910 changed_variables
= new variable_table_type (10);
9912 /* Init the IN and OUT sets. */
9913 FOR_ALL_BB_FN (bb
, cfun
)
9915 VTI (bb
)->visited
= false;
9916 VTI (bb
)->flooded
= false;
9917 dataflow_set_init (&VTI (bb
)->in
);
9918 dataflow_set_init (&VTI (bb
)->out
);
9919 VTI (bb
)->permp
= NULL
;
9922 if (MAY_HAVE_DEBUG_INSNS
)
9924 cselib_init (CSELIB_RECORD_MEMORY
| CSELIB_PRESERVE_CONSTANTS
);
9925 scratch_regs
= BITMAP_ALLOC (NULL
);
9926 valvar_pool
= create_alloc_pool ("small variable_def pool",
9927 sizeof (struct variable_def
), 256);
9928 preserved_values
.create (256);
9929 global_get_addr_cache
= new hash_map
<rtx
, rtx
>;
9933 scratch_regs
= NULL
;
9935 global_get_addr_cache
= NULL
;
9938 if (MAY_HAVE_DEBUG_INSNS
)
9944 #ifdef FRAME_POINTER_CFA_OFFSET
9945 reg
= frame_pointer_rtx
;
9946 ofst
= FRAME_POINTER_CFA_OFFSET (current_function_decl
);
9948 reg
= arg_pointer_rtx
;
9949 ofst
= ARG_POINTER_CFA_OFFSET (current_function_decl
);
9952 ofst
-= INCOMING_FRAME_SP_OFFSET
;
9954 val
= cselib_lookup_from_insn (reg
, GET_MODE (reg
), 1,
9955 VOIDmode
, get_insns ());
9956 preserve_value (val
);
9957 if (reg
!= hard_frame_pointer_rtx
&& fixed_regs
[REGNO (reg
)])
9958 cselib_preserve_cfa_base_value (val
, REGNO (reg
));
9959 expr
= plus_constant (GET_MODE (stack_pointer_rtx
),
9960 stack_pointer_rtx
, -ofst
);
9961 cselib_add_permanent_equiv (val
, expr
, get_insns ());
9965 val
= cselib_lookup_from_insn (stack_pointer_rtx
,
9966 GET_MODE (stack_pointer_rtx
), 1,
9967 VOIDmode
, get_insns ());
9968 preserve_value (val
);
9969 expr
= plus_constant (GET_MODE (reg
), reg
, ofst
);
9970 cselib_add_permanent_equiv (val
, expr
, get_insns ());
9974 /* In order to factor out the adjustments made to the stack pointer or to
9975 the hard frame pointer and thus be able to use DW_OP_fbreg operations
9976 instead of individual location lists, we're going to rewrite MEMs based
9977 on them into MEMs based on the CFA by de-eliminating stack_pointer_rtx
9978 or hard_frame_pointer_rtx to the virtual CFA pointer frame_pointer_rtx
9979 resp. arg_pointer_rtx. We can do this either when there is no frame
9980 pointer in the function and stack adjustments are consistent for all
9981 basic blocks or when there is a frame pointer and no stack realignment.
9982 But we first have to check that frame_pointer_rtx resp. arg_pointer_rtx
9983 has been eliminated. */
9984 if (!frame_pointer_needed
)
9988 if (!vt_stack_adjustments ())
9991 #ifdef FRAME_POINTER_CFA_OFFSET
9992 reg
= frame_pointer_rtx
;
9994 reg
= arg_pointer_rtx
;
9996 elim
= eliminate_regs (reg
, VOIDmode
, NULL_RTX
);
9999 if (GET_CODE (elim
) == PLUS
)
10000 elim
= XEXP (elim
, 0);
10001 if (elim
== stack_pointer_rtx
)
10002 vt_init_cfa_base ();
10005 else if (!crtl
->stack_realign_tried
)
10009 #ifdef FRAME_POINTER_CFA_OFFSET
10010 reg
= frame_pointer_rtx
;
10011 fp_cfa_offset
= FRAME_POINTER_CFA_OFFSET (current_function_decl
);
10013 reg
= arg_pointer_rtx
;
10014 fp_cfa_offset
= ARG_POINTER_CFA_OFFSET (current_function_decl
);
10016 elim
= eliminate_regs (reg
, VOIDmode
, NULL_RTX
);
10019 if (GET_CODE (elim
) == PLUS
)
10021 fp_cfa_offset
-= INTVAL (XEXP (elim
, 1));
10022 elim
= XEXP (elim
, 0);
10024 if (elim
!= hard_frame_pointer_rtx
)
10025 fp_cfa_offset
= -1;
10028 fp_cfa_offset
= -1;
10031 /* If the stack is realigned and a DRAP register is used, we're going to
10032 rewrite MEMs based on it representing incoming locations of parameters
10033 passed on the stack into MEMs based on the argument pointer. Although
10034 we aren't going to rewrite other MEMs, we still need to initialize the
10035 virtual CFA pointer in order to ensure that the argument pointer will
10036 be seen as a constant throughout the function.
10038 ??? This doesn't work if FRAME_POINTER_CFA_OFFSET is defined. */
10039 else if (stack_realign_drap
)
10043 #ifdef FRAME_POINTER_CFA_OFFSET
10044 reg
= frame_pointer_rtx
;
10046 reg
= arg_pointer_rtx
;
10048 elim
= eliminate_regs (reg
, VOIDmode
, NULL_RTX
);
10051 if (GET_CODE (elim
) == PLUS
)
10052 elim
= XEXP (elim
, 0);
10053 if (elim
== hard_frame_pointer_rtx
)
10054 vt_init_cfa_base ();
10058 hard_frame_pointer_adjustment
= -1;
10060 vt_add_function_parameters ();
10062 FOR_EACH_BB_FN (bb
, cfun
)
10065 HOST_WIDE_INT pre
, post
= 0;
10066 basic_block first_bb
, last_bb
;
10068 if (MAY_HAVE_DEBUG_INSNS
)
10070 cselib_record_sets_hook
= add_with_sets
;
10071 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
10072 fprintf (dump_file
, "first value: %i\n",
10073 cselib_get_next_uid ());
10080 if (bb
->next_bb
== EXIT_BLOCK_PTR_FOR_FN (cfun
)
10081 || ! single_pred_p (bb
->next_bb
))
10083 e
= find_edge (bb
, bb
->next_bb
);
10084 if (! e
|| (e
->flags
& EDGE_FALLTHRU
) == 0)
10090 /* Add the micro-operations to the vector. */
10091 FOR_BB_BETWEEN (bb
, first_bb
, last_bb
->next_bb
, next_bb
)
10093 HOST_WIDE_INT offset
= VTI (bb
)->out
.stack_adjust
;
10094 VTI (bb
)->out
.stack_adjust
= VTI (bb
)->in
.stack_adjust
;
10095 for (insn
= BB_HEAD (bb
); insn
!= NEXT_INSN (BB_END (bb
));
10096 insn
= NEXT_INSN (insn
))
10100 if (!frame_pointer_needed
)
10102 insn_stack_adjust_offset_pre_post (insn
, &pre
, &post
);
10105 micro_operation mo
;
10106 mo
.type
= MO_ADJUST
;
10109 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
10110 log_op_type (PATTERN (insn
), bb
, insn
,
10111 MO_ADJUST
, dump_file
);
10112 VTI (bb
)->mos
.safe_push (mo
);
10113 VTI (bb
)->out
.stack_adjust
+= pre
;
10117 cselib_hook_called
= false;
10118 adjust_insn (bb
, insn
);
10119 if (MAY_HAVE_DEBUG_INSNS
)
10122 prepare_call_arguments (bb
, insn
);
10123 cselib_process_insn (insn
);
10124 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
10126 print_rtl_single (dump_file
, insn
);
10127 dump_cselib_table (dump_file
);
10130 if (!cselib_hook_called
)
10131 add_with_sets (insn
, 0, 0);
10132 cancel_changes (0);
10134 if (!frame_pointer_needed
&& post
)
10136 micro_operation mo
;
10137 mo
.type
= MO_ADJUST
;
10138 mo
.u
.adjust
= post
;
10140 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
10141 log_op_type (PATTERN (insn
), bb
, insn
,
10142 MO_ADJUST
, dump_file
);
10143 VTI (bb
)->mos
.safe_push (mo
);
10144 VTI (bb
)->out
.stack_adjust
+= post
;
10147 if (fp_cfa_offset
!= -1
10148 && hard_frame_pointer_adjustment
== -1
10149 && fp_setter_insn (insn
))
10151 vt_init_cfa_base ();
10152 hard_frame_pointer_adjustment
= fp_cfa_offset
;
10153 /* Disassociate sp from fp now. */
10154 if (MAY_HAVE_DEBUG_INSNS
)
10157 cselib_invalidate_rtx (stack_pointer_rtx
);
10158 v
= cselib_lookup (stack_pointer_rtx
, Pmode
, 1,
10160 if (v
&& !cselib_preserved_value_p (v
))
10162 cselib_set_value_sp_based (v
);
10163 preserve_value (v
);
10169 gcc_assert (offset
== VTI (bb
)->out
.stack_adjust
);
10174 if (MAY_HAVE_DEBUG_INSNS
)
10176 cselib_preserve_only_values ();
10177 cselib_reset_table (cselib_get_next_uid ());
10178 cselib_record_sets_hook
= NULL
;
10182 hard_frame_pointer_adjustment
= -1;
10183 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun
))->flooded
= true;
10184 cfa_base_rtx
= NULL_RTX
;
10188 /* This is *not* reset after each function. It gives each
10189 NOTE_INSN_DELETED_DEBUG_LABEL in the entire compilation
10190 a unique label number. */
10192 static int debug_label_num
= 1;
10194 /* Get rid of all debug insns from the insn stream. */
10197 delete_debug_insns (void)
10200 rtx_insn
*insn
, *next
;
10202 if (!MAY_HAVE_DEBUG_INSNS
)
10205 FOR_EACH_BB_FN (bb
, cfun
)
10207 FOR_BB_INSNS_SAFE (bb
, insn
, next
)
10208 if (DEBUG_INSN_P (insn
))
10210 tree decl
= INSN_VAR_LOCATION_DECL (insn
);
10211 if (TREE_CODE (decl
) == LABEL_DECL
10212 && DECL_NAME (decl
)
10213 && !DECL_RTL_SET_P (decl
))
10215 PUT_CODE (insn
, NOTE
);
10216 NOTE_KIND (insn
) = NOTE_INSN_DELETED_DEBUG_LABEL
;
10217 NOTE_DELETED_LABEL_NAME (insn
)
10218 = IDENTIFIER_POINTER (DECL_NAME (decl
));
10219 SET_DECL_RTL (decl
, insn
);
10220 CODE_LABEL_NUMBER (insn
) = debug_label_num
++;
10223 delete_insn (insn
);
10228 /* Run a fast, BB-local only version of var tracking, to take care of
10229 information that we don't do global analysis on, such that not all
10230 information is lost. If SKIPPED holds, we're skipping the global
10231 pass entirely, so we should try to use information it would have
10232 handled as well.. */
10235 vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED
)
10237 /* ??? Just skip it all for now. */
10238 delete_debug_insns ();
10241 /* Free the data structures needed for variable tracking. */
10248 FOR_EACH_BB_FN (bb
, cfun
)
10250 VTI (bb
)->mos
.release ();
10253 FOR_ALL_BB_FN (bb
, cfun
)
10255 dataflow_set_destroy (&VTI (bb
)->in
);
10256 dataflow_set_destroy (&VTI (bb
)->out
);
10257 if (VTI (bb
)->permp
)
10259 dataflow_set_destroy (VTI (bb
)->permp
);
10260 XDELETE (VTI (bb
)->permp
);
10263 free_aux_for_blocks ();
10264 delete empty_shared_hash
->htab
;
10265 empty_shared_hash
->htab
= NULL
;
10266 delete changed_variables
;
10267 changed_variables
= NULL
;
10268 free_alloc_pool (attrs_pool
);
10269 free_alloc_pool (var_pool
);
10270 free_alloc_pool (loc_chain_pool
);
10271 free_alloc_pool (shared_hash_pool
);
10273 if (MAY_HAVE_DEBUG_INSNS
)
10275 if (global_get_addr_cache
)
10276 delete global_get_addr_cache
;
10277 global_get_addr_cache
= NULL
;
10278 if (loc_exp_dep_pool
)
10279 free_alloc_pool (loc_exp_dep_pool
);
10280 loc_exp_dep_pool
= NULL
;
10281 free_alloc_pool (valvar_pool
);
10282 preserved_values
.release ();
10284 BITMAP_FREE (scratch_regs
);
10285 scratch_regs
= NULL
;
10288 #ifdef HAVE_window_save
10289 vec_free (windowed_parm_regs
);
10293 XDELETEVEC (vui_vec
);
10298 /* The entry point to variable tracking pass. */
10300 static inline unsigned int
10301 variable_tracking_main_1 (void)
10305 if (flag_var_tracking_assignments
< 0)
10307 delete_debug_insns ();
10311 if (n_basic_blocks_for_fn (cfun
) > 500 &&
10312 n_edges_for_fn (cfun
) / n_basic_blocks_for_fn (cfun
) >= 20)
10314 vt_debug_insns_local (true);
10318 mark_dfs_back_edges ();
10319 if (!vt_initialize ())
10322 vt_debug_insns_local (true);
10326 success
= vt_find_locations ();
10328 if (!success
&& flag_var_tracking_assignments
> 0)
10332 delete_debug_insns ();
10334 /* This is later restored by our caller. */
10335 flag_var_tracking_assignments
= 0;
10337 success
= vt_initialize ();
10338 gcc_assert (success
);
10340 success
= vt_find_locations ();
10346 vt_debug_insns_local (false);
10350 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
10352 dump_dataflow_sets ();
10353 dump_reg_info (dump_file
);
10354 dump_flow_info (dump_file
, dump_flags
);
10357 timevar_push (TV_VAR_TRACKING_EMIT
);
10359 timevar_pop (TV_VAR_TRACKING_EMIT
);
10362 vt_debug_insns_local (false);
10367 variable_tracking_main (void)
10370 int save
= flag_var_tracking_assignments
;
10372 ret
= variable_tracking_main_1 ();
10374 flag_var_tracking_assignments
= save
;
10381 const pass_data pass_data_variable_tracking
=
10383 RTL_PASS
, /* type */
10384 "vartrack", /* name */
10385 OPTGROUP_NONE
, /* optinfo_flags */
10386 TV_VAR_TRACKING
, /* tv_id */
10387 0, /* properties_required */
10388 0, /* properties_provided */
10389 0, /* properties_destroyed */
10390 0, /* todo_flags_start */
10391 0, /* todo_flags_finish */
10394 class pass_variable_tracking
: public rtl_opt_pass
10397 pass_variable_tracking (gcc::context
*ctxt
)
10398 : rtl_opt_pass (pass_data_variable_tracking
, ctxt
)
10401 /* opt_pass methods: */
10402 virtual bool gate (function
*)
10404 return (flag_var_tracking
&& !targetm
.delay_vartrack
);
10407 virtual unsigned int execute (function
*)
10409 return variable_tracking_main ();
10412 }; // class pass_variable_tracking
10414 } // anon namespace
10417 make_pass_variable_tracking (gcc::context
*ctxt
)
10419 return new pass_variable_tracking (ctxt
);