1 /* Variable tracking routines for the GNU compiler.
2 Copyright (C) 2002-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the variable tracking pass. It computes where
21 variables are located (which registers or where in memory) at each position
22 in instruction stream and emits notes describing the locations.
23 Debug information (DWARF2 location lists) is finally generated from
25 With this debug information, it is possible to show variables
26 even when debugging optimized code.
28 How does the variable tracking pass work?
30 First, it scans RTL code for uses, stores and clobbers (register/memory
31 references in instructions), for call insns and for stack adjustments
32 separately for each basic block and saves them to an array of micro
34 The micro operations of one instruction are ordered so that
35 pre-modifying stack adjustment < use < use with no var < call insn <
36 < clobber < set < post-modifying stack adjustment
38 Then, a forward dataflow analysis is performed to find out how locations
39 of variables change through code and to propagate the variable locations
40 along control flow graph.
41 The IN set for basic block BB is computed as a union of OUT sets of BB's
42 predecessors, the OUT set for BB is copied from the IN set for BB and
43 is changed according to micro operations in BB.
45 The IN and OUT sets for basic blocks consist of a current stack adjustment
46 (used for adjusting offset of variables addressed using stack pointer),
47 the table of structures describing the locations of parts of a variable
48 and for each physical register a linked list for each physical register.
49 The linked list is a list of variable parts stored in the register,
50 i.e. it is a list of triplets (reg, decl, offset) where decl is
51 REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
52 effective deleting appropriate variable parts when we set or clobber the
55 There may be more than one variable part in a register. The linked lists
56 should be pretty short so it is a good data structure here.
57 For example in the following code, register allocator may assign same
58 register to variables A and B, and both of them are stored in the same
71 Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
72 are emitted to appropriate positions in RTL code. Each such a note describes
73 the location of one variable at the point in instruction stream where the
74 note is. There is no need to emit a note for each variable before each
75 instruction, we only emit these notes where the location of variable changes
76 (this means that we also emit notes for changes between the OUT set of the
77 previous block and the IN set of the current block).
79 The notes consist of two parts:
80 1. the declaration (from REG_EXPR or MEM_EXPR)
81 2. the location of a variable - it is either a simple register/memory
82 reference (for simple variables, for example int),
83 or a parallel of register/memory references (for a large variables
84 which consist of several parts, for example long long).
90 #include "coretypes.h"
95 #include "stor-layout.h"
98 #include "hard-reg-set.h"
99 #include "basic-block.h"
101 #include "insn-config.h"
104 #include "alloc-pool.h"
106 #include "hash-table.h"
109 #include "tree-pass.h"
111 #include "tree-dfa.h"
112 #include "tree-ssa.h"
116 #include "diagnostic.h"
117 #include "tree-pretty-print.h"
118 #include "pointer-set.h"
123 /* var-tracking.c assumes that tree code with the same value as VALUE rtx code
124 has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
125 Currently the value is the same as IDENTIFIER_NODE, which has such
126 a property. If this compile time assertion ever fails, make sure that
127 the new tree code that equals (int) VALUE has the same property. */
128 extern char check_value_val
[(int) VALUE
== (int) IDENTIFIER_NODE
? 1 : -1];
130 /* Type of micro operation. */
131 enum micro_operation_type
133 MO_USE
, /* Use location (REG or MEM). */
134 MO_USE_NO_VAR
,/* Use location which is not associated with a variable
135 or the variable is not trackable. */
136 MO_VAL_USE
, /* Use location which is associated with a value. */
137 MO_VAL_LOC
, /* Use location which appears in a debug insn. */
138 MO_VAL_SET
, /* Set location associated with a value. */
139 MO_SET
, /* Set location. */
140 MO_COPY
, /* Copy the same portion of a variable from one
141 location to another. */
142 MO_CLOBBER
, /* Clobber location. */
143 MO_CALL
, /* Call insn. */
144 MO_ADJUST
/* Adjust stack pointer. */
148 static const char * const ATTRIBUTE_UNUSED
149 micro_operation_type_name
[] = {
162 /* Where shall the note be emitted? BEFORE or AFTER the instruction.
163 Notes emitted as AFTER_CALL are to take effect during the call,
164 rather than after the call. */
167 EMIT_NOTE_BEFORE_INSN
,
168 EMIT_NOTE_AFTER_INSN
,
169 EMIT_NOTE_AFTER_CALL_INSN
172 /* Structure holding information about micro operation. */
173 typedef struct micro_operation_def
175 /* Type of micro operation. */
176 enum micro_operation_type type
;
178 /* The instruction which the micro operation is in, for MO_USE,
179 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
180 instruction or note in the original flow (before any var-tracking
181 notes are inserted, to simplify emission of notes), for MO_SET
186 /* Location. For MO_SET and MO_COPY, this is the SET that
187 performs the assignment, if known, otherwise it is the target
188 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
189 CONCAT of the VALUE and the LOC associated with it. For
190 MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
191 associated with it. */
194 /* Stack adjustment. */
195 HOST_WIDE_INT adjust
;
200 /* A declaration of a variable, or an RTL value being handled like a
202 typedef void *decl_or_value
;
204 /* Return true if a decl_or_value DV is a DECL or NULL. */
206 dv_is_decl_p (decl_or_value dv
)
208 return !dv
|| (int) TREE_CODE ((tree
) dv
) != (int) VALUE
;
211 /* Return true if a decl_or_value is a VALUE rtl. */
213 dv_is_value_p (decl_or_value dv
)
215 return dv
&& !dv_is_decl_p (dv
);
218 /* Return the decl in the decl_or_value. */
220 dv_as_decl (decl_or_value dv
)
222 gcc_checking_assert (dv_is_decl_p (dv
));
226 /* Return the value in the decl_or_value. */
228 dv_as_value (decl_or_value dv
)
230 gcc_checking_assert (dv_is_value_p (dv
));
234 /* Return the opaque pointer in the decl_or_value. */
236 dv_as_opaque (decl_or_value dv
)
242 /* Description of location of a part of a variable. The content of a physical
243 register is described by a chain of these structures.
244 The chains are pretty short (usually 1 or 2 elements) and thus
245 chain is the best data structure. */
246 typedef struct attrs_def
248 /* Pointer to next member of the list. */
249 struct attrs_def
*next
;
251 /* The rtx of register. */
254 /* The declaration corresponding to LOC. */
257 /* Offset from start of DECL. */
258 HOST_WIDE_INT offset
;
261 /* Structure for chaining the locations. */
262 typedef struct location_chain_def
264 /* Next element in the chain. */
265 struct location_chain_def
*next
;
267 /* The location (REG, MEM or VALUE). */
270 /* The "value" stored in this location. */
274 enum var_init_status init
;
277 /* A vector of loc_exp_dep holds the active dependencies of a one-part
278 DV on VALUEs, i.e., the VALUEs expanded so as to form the current
279 location of DV. Each entry is also part of VALUE' s linked-list of
280 backlinks back to DV. */
281 typedef struct loc_exp_dep_s
283 /* The dependent DV. */
285 /* The dependency VALUE or DECL_DEBUG. */
287 /* The next entry in VALUE's backlinks list. */
288 struct loc_exp_dep_s
*next
;
289 /* A pointer to the pointer to this entry (head or prev's next) in
290 the doubly-linked list. */
291 struct loc_exp_dep_s
**pprev
;
295 /* This data structure holds information about the depth of a variable
297 typedef struct expand_depth_struct
299 /* This measures the complexity of the expanded expression. It
300 grows by one for each level of expansion that adds more than one
303 /* This counts the number of ENTRY_VALUE expressions in an
304 expansion. We want to minimize their use. */
308 /* This data structure is allocated for one-part variables at the time
309 of emitting notes. */
312 /* Doubly-linked list of dependent DVs. These are DVs whose cur_loc
313 computation used the expansion of this variable, and that ought
314 to be notified should this variable change. If the DV's cur_loc
315 expanded to NULL, all components of the loc list are regarded as
316 active, so that any changes in them give us a chance to get a
317 location. Otherwise, only components of the loc that expanded to
318 non-NULL are regarded as active dependencies. */
319 loc_exp_dep
*backlinks
;
320 /* This holds the LOC that was expanded into cur_loc. We need only
321 mark a one-part variable as changed if the FROM loc is removed,
322 or if it has no known location and a loc is added, or if it gets
323 a change notification from any of its active dependencies. */
325 /* The depth of the cur_loc expression. */
327 /* Dependencies actively used when expand FROM into cur_loc. */
328 vec
<loc_exp_dep
, va_heap
, vl_embed
> deps
;
331 /* Structure describing one part of variable. */
332 typedef struct variable_part_def
334 /* Chain of locations of the part. */
335 location_chain loc_chain
;
337 /* Location which was last emitted to location list. */
342 /* The offset in the variable, if !var->onepart. */
343 HOST_WIDE_INT offset
;
345 /* Pointer to auxiliary data, if var->onepart and emit_notes. */
346 struct onepart_aux
*onepaux
;
350 /* Maximum number of location parts. */
351 #define MAX_VAR_PARTS 16
353 /* Enumeration type used to discriminate various types of one-part
355 typedef enum onepart_enum
357 /* Not a one-part variable. */
359 /* A one-part DECL that is not a DEBUG_EXPR_DECL. */
361 /* A DEBUG_EXPR_DECL. */
367 /* Structure describing where the variable is located. */
368 typedef struct variable_def
370 /* The declaration of the variable, or an RTL value being handled
371 like a declaration. */
374 /* Reference count. */
377 /* Number of variable parts. */
380 /* What type of DV this is, according to enum onepart_enum. */
381 ENUM_BITFIELD (onepart_enum
) onepart
: CHAR_BIT
;
383 /* True if this variable_def struct is currently in the
384 changed_variables hash table. */
385 bool in_changed_variables
;
387 /* The variable parts. */
388 variable_part var_part
[1];
390 typedef const struct variable_def
*const_variable
;
392 /* Pointer to the BB's information specific to variable tracking pass. */
393 #define VTI(BB) ((variable_tracking_info) (BB)->aux)
395 /* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */
396 #define INT_MEM_OFFSET(mem) (MEM_OFFSET_KNOWN_P (mem) ? MEM_OFFSET (mem) : 0)
398 #if ENABLE_CHECKING && (GCC_VERSION >= 2007)
400 /* Access VAR's Ith part's offset, checking that it's not a one-part
402 #define VAR_PART_OFFSET(var, i) __extension__ \
403 (*({ variable const __v = (var); \
404 gcc_checking_assert (!__v->onepart); \
405 &__v->var_part[(i)].aux.offset; }))
407 /* Access VAR's one-part auxiliary data, checking that it is a
408 one-part variable. */
409 #define VAR_LOC_1PAUX(var) __extension__ \
410 (*({ variable const __v = (var); \
411 gcc_checking_assert (__v->onepart); \
412 &__v->var_part[0].aux.onepaux; }))
415 #define VAR_PART_OFFSET(var, i) ((var)->var_part[(i)].aux.offset)
416 #define VAR_LOC_1PAUX(var) ((var)->var_part[0].aux.onepaux)
419 /* These are accessor macros for the one-part auxiliary data. When
420 convenient for users, they're guarded by tests that the data was
422 #define VAR_LOC_DEP_LST(var) (VAR_LOC_1PAUX (var) \
423 ? VAR_LOC_1PAUX (var)->backlinks \
425 #define VAR_LOC_DEP_LSTP(var) (VAR_LOC_1PAUX (var) \
426 ? &VAR_LOC_1PAUX (var)->backlinks \
428 #define VAR_LOC_FROM(var) (VAR_LOC_1PAUX (var)->from)
429 #define VAR_LOC_DEPTH(var) (VAR_LOC_1PAUX (var)->depth)
430 #define VAR_LOC_DEP_VEC(var) (VAR_LOC_1PAUX (var) \
431 ? &VAR_LOC_1PAUX (var)->deps \
436 typedef unsigned int dvuid
;
438 /* Return the uid of DV. */
441 dv_uid (decl_or_value dv
)
443 if (dv_is_value_p (dv
))
444 return CSELIB_VAL_PTR (dv_as_value (dv
))->uid
;
446 return DECL_UID (dv_as_decl (dv
));
449 /* Compute the hash from the uid. */
451 static inline hashval_t
452 dv_uid2hash (dvuid uid
)
457 /* The hash function for a mask table in a shared_htab chain. */
459 static inline hashval_t
460 dv_htab_hash (decl_or_value dv
)
462 return dv_uid2hash (dv_uid (dv
));
465 static void variable_htab_free (void *);
467 /* Variable hashtable helpers. */
469 struct variable_hasher
471 typedef variable_def value_type
;
472 typedef void compare_type
;
473 static inline hashval_t
hash (const value_type
*);
474 static inline bool equal (const value_type
*, const compare_type
*);
475 static inline void remove (value_type
*);
478 /* The hash function for variable_htab, computes the hash value
479 from the declaration of variable X. */
482 variable_hasher::hash (const value_type
*v
)
484 return dv_htab_hash (v
->dv
);
487 /* Compare the declaration of variable X with declaration Y. */
490 variable_hasher::equal (const value_type
*v
, const compare_type
*y
)
492 decl_or_value dv
= CONST_CAST2 (decl_or_value
, const void *, y
);
494 return (dv_as_opaque (v
->dv
) == dv_as_opaque (dv
));
497 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
500 variable_hasher::remove (value_type
*var
)
502 variable_htab_free (var
);
505 typedef hash_table
<variable_hasher
> variable_table_type
;
506 typedef variable_table_type::iterator variable_iterator_type
;
508 /* Structure for passing some other parameters to function
509 emit_note_insn_var_location. */
510 typedef struct emit_note_data_def
512 /* The instruction which the note will be emitted before/after. */
515 /* Where the note will be emitted (before/after insn)? */
516 enum emit_note_where where
;
518 /* The variables and values active at this point. */
519 variable_table_type vars
;
522 /* Structure holding a refcounted hash table. If refcount > 1,
523 it must be first unshared before modified. */
524 typedef struct shared_hash_def
526 /* Reference count. */
529 /* Actual hash table. */
530 variable_table_type htab
;
533 /* Structure holding the IN or OUT set for a basic block. */
534 typedef struct dataflow_set_def
536 /* Adjustment of stack offset. */
537 HOST_WIDE_INT stack_adjust
;
539 /* Attributes for registers (lists of attrs). */
540 attrs regs
[FIRST_PSEUDO_REGISTER
];
542 /* Variable locations. */
545 /* Vars that is being traversed. */
546 shared_hash traversed_vars
;
549 /* The structure (one for each basic block) containing the information
550 needed for variable tracking. */
551 typedef struct variable_tracking_info_def
553 /* The vector of micro operations. */
554 vec
<micro_operation
> mos
;
556 /* The IN and OUT set for dataflow analysis. */
560 /* The permanent-in dataflow set for this block. This is used to
561 hold values for which we had to compute entry values. ??? This
562 should probably be dynamically allocated, to avoid using more
563 memory in non-debug builds. */
566 /* Has the block been visited in DFS? */
569 /* Has the block been flooded in VTA? */
572 } *variable_tracking_info
;
574 /* Alloc pool for struct attrs_def. */
575 static alloc_pool attrs_pool
;
577 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
578 static alloc_pool var_pool
;
580 /* Alloc pool for struct variable_def with a single var_part entry. */
581 static alloc_pool valvar_pool
;
583 /* Alloc pool for struct location_chain_def. */
584 static alloc_pool loc_chain_pool
;
586 /* Alloc pool for struct shared_hash_def. */
587 static alloc_pool shared_hash_pool
;
589 /* Alloc pool for struct loc_exp_dep_s for NOT_ONEPART variables. */
590 static alloc_pool loc_exp_dep_pool
;
592 /* Changed variables, notes will be emitted for them. */
593 static variable_table_type changed_variables
;
595 /* Shall notes be emitted? */
596 static bool emit_notes
;
598 /* Values whose dynamic location lists have gone empty, but whose
599 cselib location lists are still usable. Use this to hold the
600 current location, the backlinks, etc, during emit_notes. */
601 static variable_table_type dropped_values
;
603 /* Empty shared hashtable. */
604 static shared_hash empty_shared_hash
;
606 /* Scratch register bitmap used by cselib_expand_value_rtx. */
607 static bitmap scratch_regs
= NULL
;
609 #ifdef HAVE_window_save
610 typedef struct GTY(()) parm_reg
{
616 /* Vector of windowed parameter registers, if any. */
617 static vec
<parm_reg_t
, va_gc
> *windowed_parm_regs
= NULL
;
620 /* Variable used to tell whether cselib_process_insn called our hook. */
621 static bool cselib_hook_called
;
623 /* Local function prototypes. */
624 static void stack_adjust_offset_pre_post (rtx
, HOST_WIDE_INT
*,
626 static void insn_stack_adjust_offset_pre_post (rtx
, HOST_WIDE_INT
*,
628 static bool vt_stack_adjustments (void);
630 static void init_attrs_list_set (attrs
*);
631 static void attrs_list_clear (attrs
*);
632 static attrs
attrs_list_member (attrs
, decl_or_value
, HOST_WIDE_INT
);
633 static void attrs_list_insert (attrs
*, decl_or_value
, HOST_WIDE_INT
, rtx
);
634 static void attrs_list_copy (attrs
*, attrs
);
635 static void attrs_list_union (attrs
*, attrs
);
637 static variable_def
**unshare_variable (dataflow_set
*set
, variable_def
**slot
,
638 variable var
, enum var_init_status
);
639 static void vars_copy (variable_table_type
, variable_table_type
);
640 static tree
var_debug_decl (tree
);
641 static void var_reg_set (dataflow_set
*, rtx
, enum var_init_status
, rtx
);
642 static void var_reg_delete_and_set (dataflow_set
*, rtx
, bool,
643 enum var_init_status
, rtx
);
644 static void var_reg_delete (dataflow_set
*, rtx
, bool);
645 static void var_regno_delete (dataflow_set
*, int);
646 static void var_mem_set (dataflow_set
*, rtx
, enum var_init_status
, rtx
);
647 static void var_mem_delete_and_set (dataflow_set
*, rtx
, bool,
648 enum var_init_status
, rtx
);
649 static void var_mem_delete (dataflow_set
*, rtx
, bool);
651 static void dataflow_set_init (dataflow_set
*);
652 static void dataflow_set_clear (dataflow_set
*);
653 static void dataflow_set_copy (dataflow_set
*, dataflow_set
*);
654 static int variable_union_info_cmp_pos (const void *, const void *);
655 static void dataflow_set_union (dataflow_set
*, dataflow_set
*);
656 static location_chain
find_loc_in_1pdv (rtx
, variable
, variable_table_type
);
657 static bool canon_value_cmp (rtx
, rtx
);
658 static int loc_cmp (rtx
, rtx
);
659 static bool variable_part_different_p (variable_part
*, variable_part
*);
660 static bool onepart_variable_different_p (variable
, variable
);
661 static bool variable_different_p (variable
, variable
);
662 static bool dataflow_set_different (dataflow_set
*, dataflow_set
*);
663 static void dataflow_set_destroy (dataflow_set
*);
665 static bool contains_symbol_ref (rtx
);
666 static bool track_expr_p (tree
, bool);
667 static bool same_variable_part_p (rtx
, tree
, HOST_WIDE_INT
);
668 static int add_uses (rtx
*, void *);
669 static void add_uses_1 (rtx
*, void *);
670 static void add_stores (rtx
, const_rtx
, void *);
671 static bool compute_bb_dataflow (basic_block
);
672 static bool vt_find_locations (void);
674 static void dump_attrs_list (attrs
);
675 static void dump_var (variable
);
676 static void dump_vars (variable_table_type
);
677 static void dump_dataflow_set (dataflow_set
*);
678 static void dump_dataflow_sets (void);
680 static void set_dv_changed (decl_or_value
, bool);
681 static void variable_was_changed (variable
, dataflow_set
*);
682 static variable_def
**set_slot_part (dataflow_set
*, rtx
, variable_def
**,
683 decl_or_value
, HOST_WIDE_INT
,
684 enum var_init_status
, rtx
);
685 static void set_variable_part (dataflow_set
*, rtx
,
686 decl_or_value
, HOST_WIDE_INT
,
687 enum var_init_status
, rtx
, enum insert_option
);
688 static variable_def
**clobber_slot_part (dataflow_set
*, rtx
,
689 variable_def
**, HOST_WIDE_INT
, rtx
);
690 static void clobber_variable_part (dataflow_set
*, rtx
,
691 decl_or_value
, HOST_WIDE_INT
, rtx
);
692 static variable_def
**delete_slot_part (dataflow_set
*, rtx
, variable_def
**,
694 static void delete_variable_part (dataflow_set
*, rtx
,
695 decl_or_value
, HOST_WIDE_INT
);
696 static void emit_notes_in_bb (basic_block
, dataflow_set
*);
697 static void vt_emit_notes (void);
699 static bool vt_get_decl_and_offset (rtx
, tree
*, HOST_WIDE_INT
*);
700 static void vt_add_function_parameters (void);
701 static bool vt_initialize (void);
702 static void vt_finalize (void);
704 /* Given a SET, calculate the amount of stack adjustment it contains
705 PRE- and POST-modifying stack pointer.
706 This function is similar to stack_adjust_offset. */
709 stack_adjust_offset_pre_post (rtx pattern
, HOST_WIDE_INT
*pre
,
712 rtx src
= SET_SRC (pattern
);
713 rtx dest
= SET_DEST (pattern
);
716 if (dest
== stack_pointer_rtx
)
718 /* (set (reg sp) (plus (reg sp) (const_int))) */
719 code
= GET_CODE (src
);
720 if (! (code
== PLUS
|| code
== MINUS
)
721 || XEXP (src
, 0) != stack_pointer_rtx
722 || !CONST_INT_P (XEXP (src
, 1)))
726 *post
+= INTVAL (XEXP (src
, 1));
728 *post
-= INTVAL (XEXP (src
, 1));
730 else if (MEM_P (dest
))
732 /* (set (mem (pre_dec (reg sp))) (foo)) */
733 src
= XEXP (dest
, 0);
734 code
= GET_CODE (src
);
740 if (XEXP (src
, 0) == stack_pointer_rtx
)
742 rtx val
= XEXP (XEXP (src
, 1), 1);
743 /* We handle only adjustments by constant amount. */
744 gcc_assert (GET_CODE (XEXP (src
, 1)) == PLUS
&&
747 if (code
== PRE_MODIFY
)
748 *pre
-= INTVAL (val
);
750 *post
-= INTVAL (val
);
756 if (XEXP (src
, 0) == stack_pointer_rtx
)
758 *pre
+= GET_MODE_SIZE (GET_MODE (dest
));
764 if (XEXP (src
, 0) == stack_pointer_rtx
)
766 *post
+= GET_MODE_SIZE (GET_MODE (dest
));
772 if (XEXP (src
, 0) == stack_pointer_rtx
)
774 *pre
-= GET_MODE_SIZE (GET_MODE (dest
));
780 if (XEXP (src
, 0) == stack_pointer_rtx
)
782 *post
-= GET_MODE_SIZE (GET_MODE (dest
));
793 /* Given an INSN, calculate the amount of stack adjustment it contains
794 PRE- and POST-modifying stack pointer. */
797 insn_stack_adjust_offset_pre_post (rtx insn
, HOST_WIDE_INT
*pre
,
805 pattern
= PATTERN (insn
);
806 if (RTX_FRAME_RELATED_P (insn
))
808 rtx expr
= find_reg_note (insn
, REG_FRAME_RELATED_EXPR
, NULL_RTX
);
810 pattern
= XEXP (expr
, 0);
813 if (GET_CODE (pattern
) == SET
)
814 stack_adjust_offset_pre_post (pattern
, pre
, post
);
815 else if (GET_CODE (pattern
) == PARALLEL
816 || GET_CODE (pattern
) == SEQUENCE
)
820 /* There may be stack adjustments inside compound insns. Search
822 for ( i
= XVECLEN (pattern
, 0) - 1; i
>= 0; i
--)
823 if (GET_CODE (XVECEXP (pattern
, 0, i
)) == SET
)
824 stack_adjust_offset_pre_post (XVECEXP (pattern
, 0, i
), pre
, post
);
828 /* Compute stack adjustments for all blocks by traversing DFS tree.
829 Return true when the adjustments on all incoming edges are consistent.
830 Heavily borrowed from pre_and_rev_post_order_compute. */
833 vt_stack_adjustments (void)
835 edge_iterator
*stack
;
838 /* Initialize entry block. */
839 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun
))->visited
= true;
840 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun
))->in
.stack_adjust
=
841 INCOMING_FRAME_SP_OFFSET
;
842 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun
))->out
.stack_adjust
=
843 INCOMING_FRAME_SP_OFFSET
;
845 /* Allocate stack for back-tracking up CFG. */
846 stack
= XNEWVEC (edge_iterator
, n_basic_blocks_for_fn (cfun
) + 1);
849 /* Push the first edge on to the stack. */
850 stack
[sp
++] = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->succs
);
858 /* Look at the edge on the top of the stack. */
860 src
= ei_edge (ei
)->src
;
861 dest
= ei_edge (ei
)->dest
;
863 /* Check if the edge destination has been visited yet. */
864 if (!VTI (dest
)->visited
)
867 HOST_WIDE_INT pre
, post
, offset
;
868 VTI (dest
)->visited
= true;
869 VTI (dest
)->in
.stack_adjust
= offset
= VTI (src
)->out
.stack_adjust
;
871 if (dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
872 for (insn
= BB_HEAD (dest
);
873 insn
!= NEXT_INSN (BB_END (dest
));
874 insn
= NEXT_INSN (insn
))
877 insn_stack_adjust_offset_pre_post (insn
, &pre
, &post
);
878 offset
+= pre
+ post
;
881 VTI (dest
)->out
.stack_adjust
= offset
;
883 if (EDGE_COUNT (dest
->succs
) > 0)
884 /* Since the DEST node has been visited for the first
885 time, check its successors. */
886 stack
[sp
++] = ei_start (dest
->succs
);
890 /* Check whether the adjustments on the edges are the same. */
891 if (VTI (dest
)->in
.stack_adjust
!= VTI (src
)->out
.stack_adjust
)
897 if (! ei_one_before_end_p (ei
))
898 /* Go to the next edge. */
899 ei_next (&stack
[sp
- 1]);
901 /* Return to previous level if there are no more edges. */
910 /* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or
911 hard_frame_pointer_rtx is being mapped to it and offset for it. */
912 static rtx cfa_base_rtx
;
913 static HOST_WIDE_INT cfa_base_offset
;
915 /* Compute a CFA-based value for an ADJUSTMENT made to stack_pointer_rtx
916 or hard_frame_pointer_rtx. */
919 compute_cfa_pointer (HOST_WIDE_INT adjustment
)
921 return plus_constant (Pmode
, cfa_base_rtx
, adjustment
+ cfa_base_offset
);
924 /* Adjustment for hard_frame_pointer_rtx to cfa base reg,
925 or -1 if the replacement shouldn't be done. */
926 static HOST_WIDE_INT hard_frame_pointer_adjustment
= -1;
928 /* Data for adjust_mems callback. */
930 struct adjust_mem_data
933 enum machine_mode mem_mode
;
934 HOST_WIDE_INT stack_adjust
;
938 /* Helper for adjust_mems. Return 1 if *loc is unsuitable for
939 transformation of wider mode arithmetics to narrower mode,
940 -1 if it is suitable and subexpressions shouldn't be
941 traversed and 0 if it is suitable and subexpressions should
942 be traversed. Called through for_each_rtx. */
945 use_narrower_mode_test (rtx
*loc
, void *data
)
947 rtx subreg
= (rtx
) data
;
949 if (CONSTANT_P (*loc
))
951 switch (GET_CODE (*loc
))
954 if (cselib_lookup (*loc
, GET_MODE (SUBREG_REG (subreg
)), 0, VOIDmode
))
956 if (!validate_subreg (GET_MODE (subreg
), GET_MODE (*loc
),
957 *loc
, subreg_lowpart_offset (GET_MODE (subreg
),
966 if (for_each_rtx (&XEXP (*loc
, 0), use_narrower_mode_test
, data
))
975 /* Transform X into narrower mode MODE from wider mode WMODE. */
978 use_narrower_mode (rtx x
, enum machine_mode mode
, enum machine_mode wmode
)
982 return lowpart_subreg (mode
, x
, wmode
);
983 switch (GET_CODE (x
))
986 return lowpart_subreg (mode
, x
, wmode
);
990 op0
= use_narrower_mode (XEXP (x
, 0), mode
, wmode
);
991 op1
= use_narrower_mode (XEXP (x
, 1), mode
, wmode
);
992 return simplify_gen_binary (GET_CODE (x
), mode
, op0
, op1
);
994 op0
= use_narrower_mode (XEXP (x
, 0), mode
, wmode
);
995 return simplify_gen_binary (ASHIFT
, mode
, op0
, XEXP (x
, 1));
1001 /* Helper function for adjusting used MEMs. */
1004 adjust_mems (rtx loc
, const_rtx old_rtx
, void *data
)
1006 struct adjust_mem_data
*amd
= (struct adjust_mem_data
*) data
;
1007 rtx mem
, addr
= loc
, tem
;
1008 enum machine_mode mem_mode_save
;
1010 switch (GET_CODE (loc
))
1013 /* Don't do any sp or fp replacements outside of MEM addresses
1015 if (amd
->mem_mode
== VOIDmode
&& amd
->store
)
1017 if (loc
== stack_pointer_rtx
1018 && !frame_pointer_needed
1020 return compute_cfa_pointer (amd
->stack_adjust
);
1021 else if (loc
== hard_frame_pointer_rtx
1022 && frame_pointer_needed
1023 && hard_frame_pointer_adjustment
!= -1
1025 return compute_cfa_pointer (hard_frame_pointer_adjustment
);
1026 gcc_checking_assert (loc
!= virtual_incoming_args_rtx
);
1032 mem
= targetm
.delegitimize_address (mem
);
1033 if (mem
!= loc
&& !MEM_P (mem
))
1034 return simplify_replace_fn_rtx (mem
, old_rtx
, adjust_mems
, data
);
1037 addr
= XEXP (mem
, 0);
1038 mem_mode_save
= amd
->mem_mode
;
1039 amd
->mem_mode
= GET_MODE (mem
);
1040 store_save
= amd
->store
;
1042 addr
= simplify_replace_fn_rtx (addr
, old_rtx
, adjust_mems
, data
);
1043 amd
->store
= store_save
;
1044 amd
->mem_mode
= mem_mode_save
;
1046 addr
= targetm
.delegitimize_address (addr
);
1047 if (addr
!= XEXP (mem
, 0))
1048 mem
= replace_equiv_address_nv (mem
, addr
);
1050 mem
= avoid_constant_pool_reference (mem
);
1054 addr
= gen_rtx_PLUS (GET_MODE (loc
), XEXP (loc
, 0),
1055 gen_int_mode (GET_CODE (loc
) == PRE_INC
1056 ? GET_MODE_SIZE (amd
->mem_mode
)
1057 : -GET_MODE_SIZE (amd
->mem_mode
),
1062 addr
= XEXP (loc
, 0);
1063 gcc_assert (amd
->mem_mode
!= VOIDmode
&& amd
->mem_mode
!= BLKmode
);
1064 addr
= simplify_replace_fn_rtx (addr
, old_rtx
, adjust_mems
, data
);
1065 tem
= gen_rtx_PLUS (GET_MODE (loc
), XEXP (loc
, 0),
1066 gen_int_mode ((GET_CODE (loc
) == PRE_INC
1067 || GET_CODE (loc
) == POST_INC
)
1068 ? GET_MODE_SIZE (amd
->mem_mode
)
1069 : -GET_MODE_SIZE (amd
->mem_mode
),
1071 amd
->side_effects
= alloc_EXPR_LIST (0,
1072 gen_rtx_SET (VOIDmode
,
1078 addr
= XEXP (loc
, 1);
1081 addr
= XEXP (loc
, 0);
1082 gcc_assert (amd
->mem_mode
!= VOIDmode
);
1083 addr
= simplify_replace_fn_rtx (addr
, old_rtx
, adjust_mems
, data
);
1084 amd
->side_effects
= alloc_EXPR_LIST (0,
1085 gen_rtx_SET (VOIDmode
,
1091 /* First try without delegitimization of whole MEMs and
1092 avoid_constant_pool_reference, which is more likely to succeed. */
1093 store_save
= amd
->store
;
1095 addr
= simplify_replace_fn_rtx (SUBREG_REG (loc
), old_rtx
, adjust_mems
,
1097 amd
->store
= store_save
;
1098 mem
= simplify_replace_fn_rtx (addr
, old_rtx
, adjust_mems
, data
);
1099 if (mem
== SUBREG_REG (loc
))
1104 tem
= simplify_gen_subreg (GET_MODE (loc
), mem
,
1105 GET_MODE (SUBREG_REG (loc
)),
1109 tem
= simplify_gen_subreg (GET_MODE (loc
), addr
,
1110 GET_MODE (SUBREG_REG (loc
)),
1112 if (tem
== NULL_RTX
)
1113 tem
= gen_rtx_raw_SUBREG (GET_MODE (loc
), addr
, SUBREG_BYTE (loc
));
1115 if (MAY_HAVE_DEBUG_INSNS
1116 && GET_CODE (tem
) == SUBREG
1117 && (GET_CODE (SUBREG_REG (tem
)) == PLUS
1118 || GET_CODE (SUBREG_REG (tem
)) == MINUS
1119 || GET_CODE (SUBREG_REG (tem
)) == MULT
1120 || GET_CODE (SUBREG_REG (tem
)) == ASHIFT
)
1121 && GET_MODE_CLASS (GET_MODE (tem
)) == MODE_INT
1122 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem
))) == MODE_INT
1123 && GET_MODE_SIZE (GET_MODE (tem
))
1124 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (tem
)))
1125 && subreg_lowpart_p (tem
)
1126 && !for_each_rtx (&SUBREG_REG (tem
), use_narrower_mode_test
, tem
))
1127 return use_narrower_mode (SUBREG_REG (tem
), GET_MODE (tem
),
1128 GET_MODE (SUBREG_REG (tem
)));
1131 /* Don't do any replacements in second and following
1132 ASM_OPERANDS of inline-asm with multiple sets.
1133 ASM_OPERANDS_INPUT_VEC, ASM_OPERANDS_INPUT_CONSTRAINT_VEC
1134 and ASM_OPERANDS_LABEL_VEC need to be equal between
1135 all the ASM_OPERANDs in the insn and adjust_insn will
1137 if (ASM_OPERANDS_OUTPUT_IDX (loc
) != 0)
1146 /* Helper function for replacement of uses. */
1149 adjust_mem_uses (rtx
*x
, void *data
)
1151 rtx new_x
= simplify_replace_fn_rtx (*x
, NULL_RTX
, adjust_mems
, data
);
1153 validate_change (NULL_RTX
, x
, new_x
, true);
1156 /* Helper function for replacement of stores. */
1159 adjust_mem_stores (rtx loc
, const_rtx expr
, void *data
)
1163 rtx new_dest
= simplify_replace_fn_rtx (SET_DEST (expr
), NULL_RTX
,
1165 if (new_dest
!= SET_DEST (expr
))
1167 rtx xexpr
= CONST_CAST_RTX (expr
);
1168 validate_change (NULL_RTX
, &SET_DEST (xexpr
), new_dest
, true);
1173 /* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
1174 replace them with their value in the insn and add the side-effects
1175 as other sets to the insn. */
1178 adjust_insn (basic_block bb
, rtx insn
)
1180 struct adjust_mem_data amd
;
1183 #ifdef HAVE_window_save
1184 /* If the target machine has an explicit window save instruction, the
1185 transformation OUTGOING_REGNO -> INCOMING_REGNO is done there. */
1186 if (RTX_FRAME_RELATED_P (insn
)
1187 && find_reg_note (insn
, REG_CFA_WINDOW_SAVE
, NULL_RTX
))
1189 unsigned int i
, nregs
= vec_safe_length (windowed_parm_regs
);
1190 rtx rtl
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (nregs
* 2));
1193 FOR_EACH_VEC_SAFE_ELT (windowed_parm_regs
, i
, p
)
1195 XVECEXP (rtl
, 0, i
* 2)
1196 = gen_rtx_SET (VOIDmode
, p
->incoming
, p
->outgoing
);
1197 /* Do not clobber the attached DECL, but only the REG. */
1198 XVECEXP (rtl
, 0, i
* 2 + 1)
1199 = gen_rtx_CLOBBER (GET_MODE (p
->outgoing
),
1200 gen_raw_REG (GET_MODE (p
->outgoing
),
1201 REGNO (p
->outgoing
)));
1204 validate_change (NULL_RTX
, &PATTERN (insn
), rtl
, true);
1209 amd
.mem_mode
= VOIDmode
;
1210 amd
.stack_adjust
= -VTI (bb
)->out
.stack_adjust
;
1211 amd
.side_effects
= NULL_RTX
;
1214 note_stores (PATTERN (insn
), adjust_mem_stores
, &amd
);
1217 if (GET_CODE (PATTERN (insn
)) == PARALLEL
1218 && asm_noperands (PATTERN (insn
)) > 0
1219 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == SET
)
1224 /* inline-asm with multiple sets is tiny bit more complicated,
1225 because the 3 vectors in ASM_OPERANDS need to be shared between
1226 all ASM_OPERANDS in the instruction. adjust_mems will
1227 not touch ASM_OPERANDS other than the first one, asm_noperands
1228 test above needs to be called before that (otherwise it would fail)
1229 and afterwards this code fixes it up. */
1230 note_uses (&PATTERN (insn
), adjust_mem_uses
, &amd
);
1231 body
= PATTERN (insn
);
1232 set0
= XVECEXP (body
, 0, 0);
1233 gcc_checking_assert (GET_CODE (set0
) == SET
1234 && GET_CODE (SET_SRC (set0
)) == ASM_OPERANDS
1235 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set0
)) == 0);
1236 for (i
= 1; i
< XVECLEN (body
, 0); i
++)
1237 if (GET_CODE (XVECEXP (body
, 0, i
)) != SET
)
1241 set
= XVECEXP (body
, 0, i
);
1242 gcc_checking_assert (GET_CODE (SET_SRC (set
)) == ASM_OPERANDS
1243 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set
))
1245 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (set
))
1246 != ASM_OPERANDS_INPUT_VEC (SET_SRC (set0
))
1247 || ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set
))
1248 != ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0
))
1249 || ASM_OPERANDS_LABEL_VEC (SET_SRC (set
))
1250 != ASM_OPERANDS_LABEL_VEC (SET_SRC (set0
)))
1252 rtx newsrc
= shallow_copy_rtx (SET_SRC (set
));
1253 ASM_OPERANDS_INPUT_VEC (newsrc
)
1254 = ASM_OPERANDS_INPUT_VEC (SET_SRC (set0
));
1255 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (newsrc
)
1256 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0
));
1257 ASM_OPERANDS_LABEL_VEC (newsrc
)
1258 = ASM_OPERANDS_LABEL_VEC (SET_SRC (set0
));
1259 validate_change (NULL_RTX
, &SET_SRC (set
), newsrc
, true);
1264 note_uses (&PATTERN (insn
), adjust_mem_uses
, &amd
);
1266 /* For read-only MEMs containing some constant, prefer those
1268 set
= single_set (insn
);
1269 if (set
&& MEM_P (SET_SRC (set
)) && MEM_READONLY_P (SET_SRC (set
)))
1271 rtx note
= find_reg_equal_equiv_note (insn
);
1273 if (note
&& CONSTANT_P (XEXP (note
, 0)))
1274 validate_change (NULL_RTX
, &SET_SRC (set
), XEXP (note
, 0), true);
1277 if (amd
.side_effects
)
1279 rtx
*pat
, new_pat
, s
;
1282 pat
= &PATTERN (insn
);
1283 if (GET_CODE (*pat
) == COND_EXEC
)
1284 pat
= &COND_EXEC_CODE (*pat
);
1285 if (GET_CODE (*pat
) == PARALLEL
)
1286 oldn
= XVECLEN (*pat
, 0);
1289 for (s
= amd
.side_effects
, newn
= 0; s
; newn
++)
1291 new_pat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (oldn
+ newn
));
1292 if (GET_CODE (*pat
) == PARALLEL
)
1293 for (i
= 0; i
< oldn
; i
++)
1294 XVECEXP (new_pat
, 0, i
) = XVECEXP (*pat
, 0, i
);
1296 XVECEXP (new_pat
, 0, 0) = *pat
;
1297 for (s
= amd
.side_effects
, i
= oldn
; i
< oldn
+ newn
; i
++, s
= XEXP (s
, 1))
1298 XVECEXP (new_pat
, 0, i
) = XEXP (s
, 0);
1299 free_EXPR_LIST_list (&amd
.side_effects
);
1300 validate_change (NULL_RTX
, pat
, new_pat
, true);
1304 /* Return the DEBUG_EXPR of a DEBUG_EXPR_DECL or the VALUE in DV. */
1306 dv_as_rtx (decl_or_value dv
)
1310 if (dv_is_value_p (dv
))
1311 return dv_as_value (dv
);
1313 decl
= dv_as_decl (dv
);
1315 gcc_checking_assert (TREE_CODE (decl
) == DEBUG_EXPR_DECL
);
1316 return DECL_RTL_KNOWN_SET (decl
);
1319 /* Return nonzero if a decl_or_value must not have more than one
1320 variable part. The returned value discriminates among various
1321 kinds of one-part DVs ccording to enum onepart_enum. */
1322 static inline onepart_enum_t
1323 dv_onepart_p (decl_or_value dv
)
1327 if (!MAY_HAVE_DEBUG_INSNS
)
1330 if (dv_is_value_p (dv
))
1331 return ONEPART_VALUE
;
1333 decl
= dv_as_decl (dv
);
1335 if (TREE_CODE (decl
) == DEBUG_EXPR_DECL
)
1336 return ONEPART_DEXPR
;
1338 if (target_for_debug_bind (decl
) != NULL_TREE
)
1339 return ONEPART_VDECL
;
1344 /* Return the variable pool to be used for a dv of type ONEPART. */
1345 static inline alloc_pool
1346 onepart_pool (onepart_enum_t onepart
)
1348 return onepart
? valvar_pool
: var_pool
;
1351 /* Build a decl_or_value out of a decl. */
1352 static inline decl_or_value
1353 dv_from_decl (tree decl
)
1357 gcc_checking_assert (dv_is_decl_p (dv
));
1361 /* Build a decl_or_value out of a value. */
1362 static inline decl_or_value
1363 dv_from_value (rtx value
)
1367 gcc_checking_assert (dv_is_value_p (dv
));
1371 /* Return a value or the decl of a debug_expr as a decl_or_value. */
1372 static inline decl_or_value
1377 switch (GET_CODE (x
))
1380 dv
= dv_from_decl (DEBUG_EXPR_TREE_DECL (x
));
1381 gcc_checking_assert (DECL_RTL_KNOWN_SET (DEBUG_EXPR_TREE_DECL (x
)) == x
);
1385 dv
= dv_from_value (x
);
1395 extern void debug_dv (decl_or_value dv
);
1398 debug_dv (decl_or_value dv
)
1400 if (dv_is_value_p (dv
))
1401 debug_rtx (dv_as_value (dv
));
1403 debug_generic_stmt (dv_as_decl (dv
));
1406 static void loc_exp_dep_clear (variable var
);
1408 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
1411 variable_htab_free (void *elem
)
1414 variable var
= (variable
) elem
;
1415 location_chain node
, next
;
1417 gcc_checking_assert (var
->refcount
> 0);
1420 if (var
->refcount
> 0)
1423 for (i
= 0; i
< var
->n_var_parts
; i
++)
1425 for (node
= var
->var_part
[i
].loc_chain
; node
; node
= next
)
1428 pool_free (loc_chain_pool
, node
);
1430 var
->var_part
[i
].loc_chain
= NULL
;
1432 if (var
->onepart
&& VAR_LOC_1PAUX (var
))
1434 loc_exp_dep_clear (var
);
1435 if (VAR_LOC_DEP_LST (var
))
1436 VAR_LOC_DEP_LST (var
)->pprev
= NULL
;
1437 XDELETE (VAR_LOC_1PAUX (var
));
1438 /* These may be reused across functions, so reset
1440 if (var
->onepart
== ONEPART_DEXPR
)
1441 set_dv_changed (var
->dv
, true);
1443 pool_free (onepart_pool (var
->onepart
), var
);
1446 /* Initialize the set (array) SET of attrs to empty lists. */
1449 init_attrs_list_set (attrs
*set
)
1453 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1457 /* Make the list *LISTP empty. */
1460 attrs_list_clear (attrs
*listp
)
1464 for (list
= *listp
; list
; list
= next
)
1467 pool_free (attrs_pool
, list
);
1472 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */
1475 attrs_list_member (attrs list
, decl_or_value dv
, HOST_WIDE_INT offset
)
1477 for (; list
; list
= list
->next
)
1478 if (dv_as_opaque (list
->dv
) == dv_as_opaque (dv
) && list
->offset
== offset
)
1483 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
1486 attrs_list_insert (attrs
*listp
, decl_or_value dv
,
1487 HOST_WIDE_INT offset
, rtx loc
)
1491 list
= (attrs
) pool_alloc (attrs_pool
);
1494 list
->offset
= offset
;
1495 list
->next
= *listp
;
1499 /* Copy all nodes from SRC and create a list *DSTP of the copies. */
1502 attrs_list_copy (attrs
*dstp
, attrs src
)
1506 attrs_list_clear (dstp
);
1507 for (; src
; src
= src
->next
)
1509 n
= (attrs
) pool_alloc (attrs_pool
);
1512 n
->offset
= src
->offset
;
1518 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */
1521 attrs_list_union (attrs
*dstp
, attrs src
)
1523 for (; src
; src
= src
->next
)
1525 if (!attrs_list_member (*dstp
, src
->dv
, src
->offset
))
1526 attrs_list_insert (dstp
, src
->dv
, src
->offset
, src
->loc
);
1530 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1534 attrs_list_mpdv_union (attrs
*dstp
, attrs src
, attrs src2
)
1536 gcc_assert (!*dstp
);
1537 for (; src
; src
= src
->next
)
1539 if (!dv_onepart_p (src
->dv
))
1540 attrs_list_insert (dstp
, src
->dv
, src
->offset
, src
->loc
);
1542 for (src
= src2
; src
; src
= src
->next
)
1544 if (!dv_onepart_p (src
->dv
)
1545 && !attrs_list_member (*dstp
, src
->dv
, src
->offset
))
1546 attrs_list_insert (dstp
, src
->dv
, src
->offset
, src
->loc
);
1550 /* Shared hashtable support. */
1552 /* Return true if VARS is shared. */
1555 shared_hash_shared (shared_hash vars
)
1557 return vars
->refcount
> 1;
1560 /* Return the hash table for VARS. */
1562 static inline variable_table_type
1563 shared_hash_htab (shared_hash vars
)
1568 /* Return true if VAR is shared, or maybe because VARS is shared. */
1571 shared_var_p (variable var
, shared_hash vars
)
1573 /* Don't count an entry in the changed_variables table as a duplicate. */
1574 return ((var
->refcount
> 1 + (int) var
->in_changed_variables
)
1575 || shared_hash_shared (vars
));
1578 /* Copy variables into a new hash table. */
1581 shared_hash_unshare (shared_hash vars
)
1583 shared_hash new_vars
= (shared_hash
) pool_alloc (shared_hash_pool
);
1584 gcc_assert (vars
->refcount
> 1);
1585 new_vars
->refcount
= 1;
1586 new_vars
->htab
.create (vars
->htab
.elements () + 3);
1587 vars_copy (new_vars
->htab
, vars
->htab
);
1592 /* Increment reference counter on VARS and return it. */
1594 static inline shared_hash
1595 shared_hash_copy (shared_hash vars
)
1601 /* Decrement reference counter and destroy hash table if not shared
1605 shared_hash_destroy (shared_hash vars
)
1607 gcc_checking_assert (vars
->refcount
> 0);
1608 if (--vars
->refcount
== 0)
1610 vars
->htab
.dispose ();
1611 pool_free (shared_hash_pool
, vars
);
1615 /* Unshare *PVARS if shared and return slot for DV. If INS is
1616 INSERT, insert it if not already present. */
1618 static inline variable_def
**
1619 shared_hash_find_slot_unshare_1 (shared_hash
*pvars
, decl_or_value dv
,
1620 hashval_t dvhash
, enum insert_option ins
)
1622 if (shared_hash_shared (*pvars
))
1623 *pvars
= shared_hash_unshare (*pvars
);
1624 return shared_hash_htab (*pvars
).find_slot_with_hash (dv
, dvhash
, ins
);
1627 static inline variable_def
**
1628 shared_hash_find_slot_unshare (shared_hash
*pvars
, decl_or_value dv
,
1629 enum insert_option ins
)
1631 return shared_hash_find_slot_unshare_1 (pvars
, dv
, dv_htab_hash (dv
), ins
);
1634 /* Return slot for DV, if it is already present in the hash table.
1635 If it is not present, insert it only VARS is not shared, otherwise
1638 static inline variable_def
**
1639 shared_hash_find_slot_1 (shared_hash vars
, decl_or_value dv
, hashval_t dvhash
)
1641 return shared_hash_htab (vars
).find_slot_with_hash (dv
, dvhash
,
1642 shared_hash_shared (vars
)
1643 ? NO_INSERT
: INSERT
);
1646 static inline variable_def
**
1647 shared_hash_find_slot (shared_hash vars
, decl_or_value dv
)
1649 return shared_hash_find_slot_1 (vars
, dv
, dv_htab_hash (dv
));
1652 /* Return slot for DV only if it is already present in the hash table. */
1654 static inline variable_def
**
1655 shared_hash_find_slot_noinsert_1 (shared_hash vars
, decl_or_value dv
,
1658 return shared_hash_htab (vars
).find_slot_with_hash (dv
, dvhash
, NO_INSERT
);
1661 static inline variable_def
**
1662 shared_hash_find_slot_noinsert (shared_hash vars
, decl_or_value dv
)
1664 return shared_hash_find_slot_noinsert_1 (vars
, dv
, dv_htab_hash (dv
));
1667 /* Return variable for DV or NULL if not already present in the hash
1670 static inline variable
1671 shared_hash_find_1 (shared_hash vars
, decl_or_value dv
, hashval_t dvhash
)
1673 return shared_hash_htab (vars
).find_with_hash (dv
, dvhash
);
1676 static inline variable
1677 shared_hash_find (shared_hash vars
, decl_or_value dv
)
1679 return shared_hash_find_1 (vars
, dv
, dv_htab_hash (dv
));
1682 /* Return true if TVAL is better than CVAL as a canonival value. We
1683 choose lowest-numbered VALUEs, using the RTX address as a
1684 tie-breaker. The idea is to arrange them into a star topology,
1685 such that all of them are at most one step away from the canonical
1686 value, and the canonical value has backlinks to all of them, in
1687 addition to all the actual locations. We don't enforce this
1688 topology throughout the entire dataflow analysis, though.
1692 canon_value_cmp (rtx tval
, rtx cval
)
1695 || CSELIB_VAL_PTR (tval
)->uid
< CSELIB_VAL_PTR (cval
)->uid
;
1698 static bool dst_can_be_shared
;
1700 /* Return a copy of a variable VAR and insert it to dataflow set SET. */
1702 static variable_def
**
1703 unshare_variable (dataflow_set
*set
, variable_def
**slot
, variable var
,
1704 enum var_init_status initialized
)
1709 new_var
= (variable
) pool_alloc (onepart_pool (var
->onepart
));
1710 new_var
->dv
= var
->dv
;
1711 new_var
->refcount
= 1;
1713 new_var
->n_var_parts
= var
->n_var_parts
;
1714 new_var
->onepart
= var
->onepart
;
1715 new_var
->in_changed_variables
= false;
1717 if (! flag_var_tracking_uninit
)
1718 initialized
= VAR_INIT_STATUS_INITIALIZED
;
1720 for (i
= 0; i
< var
->n_var_parts
; i
++)
1722 location_chain node
;
1723 location_chain
*nextp
;
1725 if (i
== 0 && var
->onepart
)
1727 /* One-part auxiliary data is only used while emitting
1728 notes, so propagate it to the new variable in the active
1729 dataflow set. If we're not emitting notes, this will be
1731 gcc_checking_assert (!VAR_LOC_1PAUX (var
) || emit_notes
);
1732 VAR_LOC_1PAUX (new_var
) = VAR_LOC_1PAUX (var
);
1733 VAR_LOC_1PAUX (var
) = NULL
;
1736 VAR_PART_OFFSET (new_var
, i
) = VAR_PART_OFFSET (var
, i
);
1737 nextp
= &new_var
->var_part
[i
].loc_chain
;
1738 for (node
= var
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
1740 location_chain new_lc
;
1742 new_lc
= (location_chain
) pool_alloc (loc_chain_pool
);
1743 new_lc
->next
= NULL
;
1744 if (node
->init
> initialized
)
1745 new_lc
->init
= node
->init
;
1747 new_lc
->init
= initialized
;
1748 if (node
->set_src
&& !(MEM_P (node
->set_src
)))
1749 new_lc
->set_src
= node
->set_src
;
1751 new_lc
->set_src
= NULL
;
1752 new_lc
->loc
= node
->loc
;
1755 nextp
= &new_lc
->next
;
1758 new_var
->var_part
[i
].cur_loc
= var
->var_part
[i
].cur_loc
;
1761 dst_can_be_shared
= false;
1762 if (shared_hash_shared (set
->vars
))
1763 slot
= shared_hash_find_slot_unshare (&set
->vars
, var
->dv
, NO_INSERT
);
1764 else if (set
->traversed_vars
&& set
->vars
!= set
->traversed_vars
)
1765 slot
= shared_hash_find_slot_noinsert (set
->vars
, var
->dv
);
1767 if (var
->in_changed_variables
)
1769 variable_def
**cslot
1770 = changed_variables
.find_slot_with_hash (var
->dv
,
1771 dv_htab_hash (var
->dv
), NO_INSERT
);
1772 gcc_assert (*cslot
== (void *) var
);
1773 var
->in_changed_variables
= false;
1774 variable_htab_free (var
);
1776 new_var
->in_changed_variables
= true;
1781 /* Copy all variables from hash table SRC to hash table DST. */
1784 vars_copy (variable_table_type dst
, variable_table_type src
)
1786 variable_iterator_type hi
;
1789 FOR_EACH_HASH_TABLE_ELEMENT (src
, var
, variable
, hi
)
1791 variable_def
**dstp
;
1793 dstp
= dst
.find_slot_with_hash (var
->dv
, dv_htab_hash (var
->dv
), INSERT
);
1798 /* Map a decl to its main debug decl. */
1801 var_debug_decl (tree decl
)
1803 if (decl
&& TREE_CODE (decl
) == VAR_DECL
1804 && DECL_HAS_DEBUG_EXPR_P (decl
))
1806 tree debugdecl
= DECL_DEBUG_EXPR (decl
);
1807 if (DECL_P (debugdecl
))
1814 /* Set the register LOC to contain DV, OFFSET. */
1817 var_reg_decl_set (dataflow_set
*set
, rtx loc
, enum var_init_status initialized
,
1818 decl_or_value dv
, HOST_WIDE_INT offset
, rtx set_src
,
1819 enum insert_option iopt
)
1822 bool decl_p
= dv_is_decl_p (dv
);
1825 dv
= dv_from_decl (var_debug_decl (dv_as_decl (dv
)));
1827 for (node
= set
->regs
[REGNO (loc
)]; node
; node
= node
->next
)
1828 if (dv_as_opaque (node
->dv
) == dv_as_opaque (dv
)
1829 && node
->offset
== offset
)
1832 attrs_list_insert (&set
->regs
[REGNO (loc
)], dv
, offset
, loc
);
1833 set_variable_part (set
, loc
, dv
, offset
, initialized
, set_src
, iopt
);
1836 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
1839 var_reg_set (dataflow_set
*set
, rtx loc
, enum var_init_status initialized
,
1842 tree decl
= REG_EXPR (loc
);
1843 HOST_WIDE_INT offset
= REG_OFFSET (loc
);
1845 var_reg_decl_set (set
, loc
, initialized
,
1846 dv_from_decl (decl
), offset
, set_src
, INSERT
);
1849 static enum var_init_status
1850 get_init_value (dataflow_set
*set
, rtx loc
, decl_or_value dv
)
1854 enum var_init_status ret_val
= VAR_INIT_STATUS_UNKNOWN
;
1856 if (! flag_var_tracking_uninit
)
1857 return VAR_INIT_STATUS_INITIALIZED
;
1859 var
= shared_hash_find (set
->vars
, dv
);
1862 for (i
= 0; i
< var
->n_var_parts
&& ret_val
== VAR_INIT_STATUS_UNKNOWN
; i
++)
1864 location_chain nextp
;
1865 for (nextp
= var
->var_part
[i
].loc_chain
; nextp
; nextp
= nextp
->next
)
1866 if (rtx_equal_p (nextp
->loc
, loc
))
1868 ret_val
= nextp
->init
;
1877 /* Delete current content of register LOC in dataflow set SET and set
1878 the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
1879 MODIFY is true, any other live copies of the same variable part are
1880 also deleted from the dataflow set, otherwise the variable part is
1881 assumed to be copied from another location holding the same
1885 var_reg_delete_and_set (dataflow_set
*set
, rtx loc
, bool modify
,
1886 enum var_init_status initialized
, rtx set_src
)
1888 tree decl
= REG_EXPR (loc
);
1889 HOST_WIDE_INT offset
= REG_OFFSET (loc
);
1893 decl
= var_debug_decl (decl
);
1895 if (initialized
== VAR_INIT_STATUS_UNKNOWN
)
1896 initialized
= get_init_value (set
, loc
, dv_from_decl (decl
));
1898 nextp
= &set
->regs
[REGNO (loc
)];
1899 for (node
= *nextp
; node
; node
= next
)
1902 if (dv_as_opaque (node
->dv
) != decl
|| node
->offset
!= offset
)
1904 delete_variable_part (set
, node
->loc
, node
->dv
, node
->offset
);
1905 pool_free (attrs_pool
, node
);
1911 nextp
= &node
->next
;
1915 clobber_variable_part (set
, loc
, dv_from_decl (decl
), offset
, set_src
);
1916 var_reg_set (set
, loc
, initialized
, set_src
);
1919 /* Delete the association of register LOC in dataflow set SET with any
1920 variables that aren't onepart. If CLOBBER is true, also delete any
1921 other live copies of the same variable part, and delete the
1922 association with onepart dvs too. */
1925 var_reg_delete (dataflow_set
*set
, rtx loc
, bool clobber
)
1927 attrs
*nextp
= &set
->regs
[REGNO (loc
)];
1932 tree decl
= REG_EXPR (loc
);
1933 HOST_WIDE_INT offset
= REG_OFFSET (loc
);
1935 decl
= var_debug_decl (decl
);
1937 clobber_variable_part (set
, NULL
, dv_from_decl (decl
), offset
, NULL
);
1940 for (node
= *nextp
; node
; node
= next
)
1943 if (clobber
|| !dv_onepart_p (node
->dv
))
1945 delete_variable_part (set
, node
->loc
, node
->dv
, node
->offset
);
1946 pool_free (attrs_pool
, node
);
1950 nextp
= &node
->next
;
1954 /* Delete content of register with number REGNO in dataflow set SET. */
1957 var_regno_delete (dataflow_set
*set
, int regno
)
1959 attrs
*reg
= &set
->regs
[regno
];
1962 for (node
= *reg
; node
; node
= next
)
1965 delete_variable_part (set
, node
->loc
, node
->dv
, node
->offset
);
1966 pool_free (attrs_pool
, node
);
1971 /* Return true if I is the negated value of a power of two. */
1973 negative_power_of_two_p (HOST_WIDE_INT i
)
1975 unsigned HOST_WIDE_INT x
= -(unsigned HOST_WIDE_INT
)i
;
1976 return x
== (x
& -x
);
1979 /* Strip constant offsets and alignments off of LOC. Return the base
1983 vt_get_canonicalize_base (rtx loc
)
1985 while ((GET_CODE (loc
) == PLUS
1986 || GET_CODE (loc
) == AND
)
1987 && GET_CODE (XEXP (loc
, 1)) == CONST_INT
1988 && (GET_CODE (loc
) != AND
1989 || negative_power_of_two_p (INTVAL (XEXP (loc
, 1)))))
1990 loc
= XEXP (loc
, 0);
1995 /* This caches canonicalized addresses for VALUEs, computed using
1996 information in the global cselib table. */
1997 static struct pointer_map_t
*global_get_addr_cache
;
1999 /* This caches canonicalized addresses for VALUEs, computed using
2000 information from the global cache and information pertaining to a
2001 basic block being analyzed. */
2002 static struct pointer_map_t
*local_get_addr_cache
;
2004 static rtx
vt_canonicalize_addr (dataflow_set
*, rtx
);
2006 /* Return the canonical address for LOC, that must be a VALUE, using a
2007 cached global equivalence or computing it and storing it in the
2011 get_addr_from_global_cache (rtx
const loc
)
2016 gcc_checking_assert (GET_CODE (loc
) == VALUE
);
2018 slot
= pointer_map_insert (global_get_addr_cache
, loc
);
2022 x
= canon_rtx (get_addr (loc
));
2024 /* Tentative, avoiding infinite recursion. */
2029 rtx nx
= vt_canonicalize_addr (NULL
, x
);
2032 /* The table may have moved during recursion, recompute
2034 slot
= pointer_map_contains (global_get_addr_cache
, loc
);
2042 /* Return the canonical address for LOC, that must be a VALUE, using a
2043 cached local equivalence or computing it and storing it in the
2047 get_addr_from_local_cache (dataflow_set
*set
, rtx
const loc
)
2055 gcc_checking_assert (GET_CODE (loc
) == VALUE
);
2057 slot
= pointer_map_insert (local_get_addr_cache
, loc
);
2061 x
= get_addr_from_global_cache (loc
);
2063 /* Tentative, avoiding infinite recursion. */
2066 /* Recurse to cache local expansion of X, or if we need to search
2067 for a VALUE in the expansion. */
2070 rtx nx
= vt_canonicalize_addr (set
, x
);
2073 slot
= pointer_map_contains (local_get_addr_cache
, loc
);
2079 dv
= dv_from_rtx (x
);
2080 var
= shared_hash_find (set
->vars
, dv
);
2084 /* Look for an improved equivalent expression. */
2085 for (l
= var
->var_part
[0].loc_chain
; l
; l
= l
->next
)
2087 rtx base
= vt_get_canonicalize_base (l
->loc
);
2088 if (GET_CODE (base
) == VALUE
2089 && canon_value_cmp (base
, loc
))
2091 rtx nx
= vt_canonicalize_addr (set
, l
->loc
);
2094 slot
= pointer_map_contains (local_get_addr_cache
, loc
);
2104 /* Canonicalize LOC using equivalences from SET in addition to those
2105 in the cselib static table. It expects a VALUE-based expression,
2106 and it will only substitute VALUEs with other VALUEs or
2107 function-global equivalences, so that, if two addresses have base
2108 VALUEs that are locally or globally related in ways that
2109 memrefs_conflict_p cares about, they will both canonicalize to
2110 expressions that have the same base VALUE.
2112 The use of VALUEs as canonical base addresses enables the canonical
2113 RTXs to remain unchanged globally, if they resolve to a constant,
2114 or throughout a basic block otherwise, so that they can be cached
2115 and the cache needs not be invalidated when REGs, MEMs or such
2119 vt_canonicalize_addr (dataflow_set
*set
, rtx oloc
)
2121 HOST_WIDE_INT ofst
= 0;
2122 enum machine_mode mode
= GET_MODE (oloc
);
2129 while (GET_CODE (loc
) == PLUS
2130 && GET_CODE (XEXP (loc
, 1)) == CONST_INT
)
2132 ofst
+= INTVAL (XEXP (loc
, 1));
2133 loc
= XEXP (loc
, 0);
2136 /* Alignment operations can't normally be combined, so just
2137 canonicalize the base and we're done. We'll normally have
2138 only one stack alignment anyway. */
2139 if (GET_CODE (loc
) == AND
2140 && GET_CODE (XEXP (loc
, 1)) == CONST_INT
2141 && negative_power_of_two_p (INTVAL (XEXP (loc
, 1))))
2143 x
= vt_canonicalize_addr (set
, XEXP (loc
, 0));
2144 if (x
!= XEXP (loc
, 0))
2145 loc
= gen_rtx_AND (mode
, x
, XEXP (loc
, 1));
2149 if (GET_CODE (loc
) == VALUE
)
2152 loc
= get_addr_from_local_cache (set
, loc
);
2154 loc
= get_addr_from_global_cache (loc
);
2156 /* Consolidate plus_constants. */
2157 while (ofst
&& GET_CODE (loc
) == PLUS
2158 && GET_CODE (XEXP (loc
, 1)) == CONST_INT
)
2160 ofst
+= INTVAL (XEXP (loc
, 1));
2161 loc
= XEXP (loc
, 0);
2168 x
= canon_rtx (loc
);
2175 /* Add OFST back in. */
2178 /* Don't build new RTL if we can help it. */
2179 if (GET_CODE (oloc
) == PLUS
2180 && XEXP (oloc
, 0) == loc
2181 && INTVAL (XEXP (oloc
, 1)) == ofst
)
2184 loc
= plus_constant (mode
, loc
, ofst
);
2190 /* Return true iff there's a true dependence between MLOC and LOC.
2191 MADDR must be a canonicalized version of MLOC's address. */
2194 vt_canon_true_dep (dataflow_set
*set
, rtx mloc
, rtx maddr
, rtx loc
)
2196 if (GET_CODE (loc
) != MEM
)
2199 rtx addr
= vt_canonicalize_addr (set
, XEXP (loc
, 0));
2200 if (!canon_true_dependence (mloc
, GET_MODE (mloc
), maddr
, loc
, addr
))
2206 /* Hold parameters for the hashtab traversal function
2207 drop_overlapping_mem_locs, see below. */
2209 struct overlapping_mems
2215 /* Remove all MEMs that overlap with COMS->LOC from the location list
2216 of a hash table entry for a value. COMS->ADDR must be a
2217 canonicalized form of COMS->LOC's address, and COMS->LOC must be
2218 canonicalized itself. */
2221 drop_overlapping_mem_locs (variable_def
**slot
, overlapping_mems
*coms
)
2223 dataflow_set
*set
= coms
->set
;
2224 rtx mloc
= coms
->loc
, addr
= coms
->addr
;
2225 variable var
= *slot
;
2227 if (var
->onepart
== ONEPART_VALUE
)
2229 location_chain loc
, *locp
;
2230 bool changed
= false;
2233 gcc_assert (var
->n_var_parts
== 1);
2235 if (shared_var_p (var
, set
->vars
))
2237 for (loc
= var
->var_part
[0].loc_chain
; loc
; loc
= loc
->next
)
2238 if (vt_canon_true_dep (set
, mloc
, addr
, loc
->loc
))
2244 slot
= unshare_variable (set
, slot
, var
, VAR_INIT_STATUS_UNKNOWN
);
2246 gcc_assert (var
->n_var_parts
== 1);
2249 if (VAR_LOC_1PAUX (var
))
2250 cur_loc
= VAR_LOC_FROM (var
);
2252 cur_loc
= var
->var_part
[0].cur_loc
;
2254 for (locp
= &var
->var_part
[0].loc_chain
, loc
= *locp
;
2257 if (!vt_canon_true_dep (set
, mloc
, addr
, loc
->loc
))
2264 /* If we have deleted the location which was last emitted
2265 we have to emit new location so add the variable to set
2266 of changed variables. */
2267 if (cur_loc
== loc
->loc
)
2270 var
->var_part
[0].cur_loc
= NULL
;
2271 if (VAR_LOC_1PAUX (var
))
2272 VAR_LOC_FROM (var
) = NULL
;
2274 pool_free (loc_chain_pool
, loc
);
2277 if (!var
->var_part
[0].loc_chain
)
2283 variable_was_changed (var
, set
);
2289 /* Remove from SET all VALUE bindings to MEMs that overlap with LOC. */
2292 clobber_overlapping_mems (dataflow_set
*set
, rtx loc
)
2294 struct overlapping_mems coms
;
2296 gcc_checking_assert (GET_CODE (loc
) == MEM
);
2299 coms
.loc
= canon_rtx (loc
);
2300 coms
.addr
= vt_canonicalize_addr (set
, XEXP (loc
, 0));
2302 set
->traversed_vars
= set
->vars
;
2303 shared_hash_htab (set
->vars
)
2304 .traverse
<overlapping_mems
*, drop_overlapping_mem_locs
> (&coms
);
2305 set
->traversed_vars
= NULL
;
2308 /* Set the location of DV, OFFSET as the MEM LOC. */
2311 var_mem_decl_set (dataflow_set
*set
, rtx loc
, enum var_init_status initialized
,
2312 decl_or_value dv
, HOST_WIDE_INT offset
, rtx set_src
,
2313 enum insert_option iopt
)
2315 if (dv_is_decl_p (dv
))
2316 dv
= dv_from_decl (var_debug_decl (dv_as_decl (dv
)));
2318 set_variable_part (set
, loc
, dv
, offset
, initialized
, set_src
, iopt
);
2321 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
2323 Adjust the address first if it is stack pointer based. */
2326 var_mem_set (dataflow_set
*set
, rtx loc
, enum var_init_status initialized
,
2329 tree decl
= MEM_EXPR (loc
);
2330 HOST_WIDE_INT offset
= INT_MEM_OFFSET (loc
);
2332 var_mem_decl_set (set
, loc
, initialized
,
2333 dv_from_decl (decl
), offset
, set_src
, INSERT
);
2336 /* Delete and set the location part of variable MEM_EXPR (LOC) in
2337 dataflow set SET to LOC. If MODIFY is true, any other live copies
2338 of the same variable part are also deleted from the dataflow set,
2339 otherwise the variable part is assumed to be copied from another
2340 location holding the same part.
2341 Adjust the address first if it is stack pointer based. */
2344 var_mem_delete_and_set (dataflow_set
*set
, rtx loc
, bool modify
,
2345 enum var_init_status initialized
, rtx set_src
)
2347 tree decl
= MEM_EXPR (loc
);
2348 HOST_WIDE_INT offset
= INT_MEM_OFFSET (loc
);
2350 clobber_overlapping_mems (set
, loc
);
2351 decl
= var_debug_decl (decl
);
2353 if (initialized
== VAR_INIT_STATUS_UNKNOWN
)
2354 initialized
= get_init_value (set
, loc
, dv_from_decl (decl
));
2357 clobber_variable_part (set
, NULL
, dv_from_decl (decl
), offset
, set_src
);
2358 var_mem_set (set
, loc
, initialized
, set_src
);
2361 /* Delete the location part LOC from dataflow set SET. If CLOBBER is
2362 true, also delete any other live copies of the same variable part.
2363 Adjust the address first if it is stack pointer based. */
2366 var_mem_delete (dataflow_set
*set
, rtx loc
, bool clobber
)
2368 tree decl
= MEM_EXPR (loc
);
2369 HOST_WIDE_INT offset
= INT_MEM_OFFSET (loc
);
2371 clobber_overlapping_mems (set
, loc
);
2372 decl
= var_debug_decl (decl
);
2374 clobber_variable_part (set
, NULL
, dv_from_decl (decl
), offset
, NULL
);
2375 delete_variable_part (set
, loc
, dv_from_decl (decl
), offset
);
2378 /* Return true if LOC should not be expanded for location expressions,
2382 unsuitable_loc (rtx loc
)
2384 switch (GET_CODE (loc
))
2398 /* Bind VAL to LOC in SET. If MODIFIED, detach LOC from any values
2402 val_bind (dataflow_set
*set
, rtx val
, rtx loc
, bool modified
)
2407 var_regno_delete (set
, REGNO (loc
));
2408 var_reg_decl_set (set
, loc
, VAR_INIT_STATUS_INITIALIZED
,
2409 dv_from_value (val
), 0, NULL_RTX
, INSERT
);
2411 else if (MEM_P (loc
))
2413 struct elt_loc_list
*l
= CSELIB_VAL_PTR (val
)->locs
;
2416 clobber_overlapping_mems (set
, loc
);
2418 if (l
&& GET_CODE (l
->loc
) == VALUE
)
2419 l
= canonical_cselib_val (CSELIB_VAL_PTR (l
->loc
))->locs
;
2421 /* If this MEM is a global constant, we don't need it in the
2422 dynamic tables. ??? We should test this before emitting the
2423 micro-op in the first place. */
2425 if (GET_CODE (l
->loc
) == MEM
&& XEXP (l
->loc
, 0) == XEXP (loc
, 0))
2431 var_mem_decl_set (set
, loc
, VAR_INIT_STATUS_INITIALIZED
,
2432 dv_from_value (val
), 0, NULL_RTX
, INSERT
);
2436 /* Other kinds of equivalences are necessarily static, at least
2437 so long as we do not perform substitutions while merging
2440 set_variable_part (set
, loc
, dv_from_value (val
), 0,
2441 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
, INSERT
);
2445 /* Bind a value to a location it was just stored in. If MODIFIED
2446 holds, assume the location was modified, detaching it from any
2447 values bound to it. */
2450 val_store (dataflow_set
*set
, rtx val
, rtx loc
, rtx insn
, bool modified
)
2452 cselib_val
*v
= CSELIB_VAL_PTR (val
);
2454 gcc_assert (cselib_preserved_value_p (v
));
2458 fprintf (dump_file
, "%i: ", insn
? INSN_UID (insn
) : 0);
2459 print_inline_rtx (dump_file
, loc
, 0);
2460 fprintf (dump_file
, " evaluates to ");
2461 print_inline_rtx (dump_file
, val
, 0);
2464 struct elt_loc_list
*l
;
2465 for (l
= v
->locs
; l
; l
= l
->next
)
2467 fprintf (dump_file
, "\n%i: ", INSN_UID (l
->setting_insn
));
2468 print_inline_rtx (dump_file
, l
->loc
, 0);
2471 fprintf (dump_file
, "\n");
2474 gcc_checking_assert (!unsuitable_loc (loc
));
2476 val_bind (set
, val
, loc
, modified
);
2479 /* Clear (canonical address) slots that reference X. */
2482 local_get_addr_clear_given_value (const void *v ATTRIBUTE_UNUSED
,
2483 void **slot
, void *x
)
2485 if (vt_get_canonicalize_base ((rtx
)*slot
) == x
)
2490 /* Reset this node, detaching all its equivalences. Return the slot
2491 in the variable hash table that holds dv, if there is one. */
2494 val_reset (dataflow_set
*set
, decl_or_value dv
)
2496 variable var
= shared_hash_find (set
->vars
, dv
) ;
2497 location_chain node
;
2500 if (!var
|| !var
->n_var_parts
)
2503 gcc_assert (var
->n_var_parts
== 1);
2505 if (var
->onepart
== ONEPART_VALUE
)
2507 rtx x
= dv_as_value (dv
);
2510 /* Relationships in the global cache don't change, so reset the
2511 local cache entry only. */
2512 slot
= pointer_map_contains (local_get_addr_cache
, x
);
2515 /* If the value resolved back to itself, odds are that other
2516 values may have cached it too. These entries now refer
2517 to the old X, so detach them too. Entries that used the
2518 old X but resolved to something else remain ok as long as
2519 that something else isn't also reset. */
2521 pointer_map_traverse (local_get_addr_cache
,
2522 local_get_addr_clear_given_value
, x
);
2528 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
2529 if (GET_CODE (node
->loc
) == VALUE
2530 && canon_value_cmp (node
->loc
, cval
))
2533 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
2534 if (GET_CODE (node
->loc
) == VALUE
&& cval
!= node
->loc
)
2536 /* Redirect the equivalence link to the new canonical
2537 value, or simply remove it if it would point at
2540 set_variable_part (set
, cval
, dv_from_value (node
->loc
),
2541 0, node
->init
, node
->set_src
, NO_INSERT
);
2542 delete_variable_part (set
, dv_as_value (dv
),
2543 dv_from_value (node
->loc
), 0);
2548 decl_or_value cdv
= dv_from_value (cval
);
2550 /* Keep the remaining values connected, accummulating links
2551 in the canonical value. */
2552 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
2554 if (node
->loc
== cval
)
2556 else if (GET_CODE (node
->loc
) == REG
)
2557 var_reg_decl_set (set
, node
->loc
, node
->init
, cdv
, 0,
2558 node
->set_src
, NO_INSERT
);
2559 else if (GET_CODE (node
->loc
) == MEM
)
2560 var_mem_decl_set (set
, node
->loc
, node
->init
, cdv
, 0,
2561 node
->set_src
, NO_INSERT
);
2563 set_variable_part (set
, node
->loc
, cdv
, 0,
2564 node
->init
, node
->set_src
, NO_INSERT
);
2568 /* We remove this last, to make sure that the canonical value is not
2569 removed to the point of requiring reinsertion. */
2571 delete_variable_part (set
, dv_as_value (dv
), dv_from_value (cval
), 0);
2573 clobber_variable_part (set
, NULL
, dv
, 0, NULL
);
2576 /* Find the values in a given location and map the val to another
2577 value, if it is unique, or add the location as one holding the
2581 val_resolve (dataflow_set
*set
, rtx val
, rtx loc
, rtx insn
)
2583 decl_or_value dv
= dv_from_value (val
);
2585 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2588 fprintf (dump_file
, "%i: ", INSN_UID (insn
));
2590 fprintf (dump_file
, "head: ");
2591 print_inline_rtx (dump_file
, val
, 0);
2592 fputs (" is at ", dump_file
);
2593 print_inline_rtx (dump_file
, loc
, 0);
2594 fputc ('\n', dump_file
);
2597 val_reset (set
, dv
);
2599 gcc_checking_assert (!unsuitable_loc (loc
));
2603 attrs node
, found
= NULL
;
2605 for (node
= set
->regs
[REGNO (loc
)]; node
; node
= node
->next
)
2606 if (dv_is_value_p (node
->dv
)
2607 && GET_MODE (dv_as_value (node
->dv
)) == GET_MODE (loc
))
2611 /* Map incoming equivalences. ??? Wouldn't it be nice if
2612 we just started sharing the location lists? Maybe a
2613 circular list ending at the value itself or some
2615 set_variable_part (set
, dv_as_value (node
->dv
),
2616 dv_from_value (val
), node
->offset
,
2617 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
, INSERT
);
2618 set_variable_part (set
, val
, node
->dv
, node
->offset
,
2619 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
, INSERT
);
2622 /* If we didn't find any equivalence, we need to remember that
2623 this value is held in the named register. */
2627 /* ??? Attempt to find and merge equivalent MEMs or other
2630 val_bind (set
, val
, loc
, false);
2633 /* Initialize dataflow set SET to be empty.
2634 VARS_SIZE is the initial size of hash table VARS. */
2637 dataflow_set_init (dataflow_set
*set
)
2639 init_attrs_list_set (set
->regs
);
2640 set
->vars
= shared_hash_copy (empty_shared_hash
);
2641 set
->stack_adjust
= 0;
2642 set
->traversed_vars
= NULL
;
2645 /* Delete the contents of dataflow set SET. */
2648 dataflow_set_clear (dataflow_set
*set
)
2652 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2653 attrs_list_clear (&set
->regs
[i
]);
2655 shared_hash_destroy (set
->vars
);
2656 set
->vars
= shared_hash_copy (empty_shared_hash
);
2659 /* Copy the contents of dataflow set SRC to DST. */
2662 dataflow_set_copy (dataflow_set
*dst
, dataflow_set
*src
)
2666 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2667 attrs_list_copy (&dst
->regs
[i
], src
->regs
[i
]);
2669 shared_hash_destroy (dst
->vars
);
2670 dst
->vars
= shared_hash_copy (src
->vars
);
2671 dst
->stack_adjust
= src
->stack_adjust
;
2674 /* Information for merging lists of locations for a given offset of variable.
2676 struct variable_union_info
2678 /* Node of the location chain. */
2681 /* The sum of positions in the input chains. */
2684 /* The position in the chain of DST dataflow set. */
2688 /* Buffer for location list sorting and its allocated size. */
2689 static struct variable_union_info
*vui_vec
;
2690 static int vui_allocated
;
2692 /* Compare function for qsort, order the structures by POS element. */
2695 variable_union_info_cmp_pos (const void *n1
, const void *n2
)
2697 const struct variable_union_info
*const i1
=
2698 (const struct variable_union_info
*) n1
;
2699 const struct variable_union_info
*const i2
=
2700 ( const struct variable_union_info
*) n2
;
2702 if (i1
->pos
!= i2
->pos
)
2703 return i1
->pos
- i2
->pos
;
2705 return (i1
->pos_dst
- i2
->pos_dst
);
2708 /* Compute union of location parts of variable *SLOT and the same variable
2709 from hash table DATA. Compute "sorted" union of the location chains
2710 for common offsets, i.e. the locations of a variable part are sorted by
2711 a priority where the priority is the sum of the positions in the 2 chains
2712 (if a location is only in one list the position in the second list is
2713 defined to be larger than the length of the chains).
2714 When we are updating the location parts the newest location is in the
2715 beginning of the chain, so when we do the described "sorted" union
2716 we keep the newest locations in the beginning. */
2719 variable_union (variable src
, dataflow_set
*set
)
2722 variable_def
**dstp
;
2725 dstp
= shared_hash_find_slot (set
->vars
, src
->dv
);
2726 if (!dstp
|| !*dstp
)
2730 dst_can_be_shared
= false;
2732 dstp
= shared_hash_find_slot_unshare (&set
->vars
, src
->dv
, INSERT
);
2736 /* Continue traversing the hash table. */
2742 gcc_assert (src
->n_var_parts
);
2743 gcc_checking_assert (src
->onepart
== dst
->onepart
);
2745 /* We can combine one-part variables very efficiently, because their
2746 entries are in canonical order. */
2749 location_chain
*nodep
, dnode
, snode
;
2751 gcc_assert (src
->n_var_parts
== 1
2752 && dst
->n_var_parts
== 1);
2754 snode
= src
->var_part
[0].loc_chain
;
2757 restart_onepart_unshared
:
2758 nodep
= &dst
->var_part
[0].loc_chain
;
2764 int r
= dnode
? loc_cmp (dnode
->loc
, snode
->loc
) : 1;
2768 location_chain nnode
;
2770 if (shared_var_p (dst
, set
->vars
))
2772 dstp
= unshare_variable (set
, dstp
, dst
,
2773 VAR_INIT_STATUS_INITIALIZED
);
2775 goto restart_onepart_unshared
;
2778 *nodep
= nnode
= (location_chain
) pool_alloc (loc_chain_pool
);
2779 nnode
->loc
= snode
->loc
;
2780 nnode
->init
= snode
->init
;
2781 if (!snode
->set_src
|| MEM_P (snode
->set_src
))
2782 nnode
->set_src
= NULL
;
2784 nnode
->set_src
= snode
->set_src
;
2785 nnode
->next
= dnode
;
2789 gcc_checking_assert (rtx_equal_p (dnode
->loc
, snode
->loc
));
2792 snode
= snode
->next
;
2794 nodep
= &dnode
->next
;
2801 gcc_checking_assert (!src
->onepart
);
2803 /* Count the number of location parts, result is K. */
2804 for (i
= 0, j
= 0, k
= 0;
2805 i
< src
->n_var_parts
&& j
< dst
->n_var_parts
; k
++)
2807 if (VAR_PART_OFFSET (src
, i
) == VAR_PART_OFFSET (dst
, j
))
2812 else if (VAR_PART_OFFSET (src
, i
) < VAR_PART_OFFSET (dst
, j
))
2817 k
+= src
->n_var_parts
- i
;
2818 k
+= dst
->n_var_parts
- j
;
2820 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
2821 thus there are at most MAX_VAR_PARTS different offsets. */
2822 gcc_checking_assert (dst
->onepart
? k
== 1 : k
<= MAX_VAR_PARTS
);
2824 if (dst
->n_var_parts
!= k
&& shared_var_p (dst
, set
->vars
))
2826 dstp
= unshare_variable (set
, dstp
, dst
, VAR_INIT_STATUS_UNKNOWN
);
2830 i
= src
->n_var_parts
- 1;
2831 j
= dst
->n_var_parts
- 1;
2832 dst
->n_var_parts
= k
;
2834 for (k
--; k
>= 0; k
--)
2836 location_chain node
, node2
;
2838 if (i
>= 0 && j
>= 0
2839 && VAR_PART_OFFSET (src
, i
) == VAR_PART_OFFSET (dst
, j
))
2841 /* Compute the "sorted" union of the chains, i.e. the locations which
2842 are in both chains go first, they are sorted by the sum of
2843 positions in the chains. */
2846 struct variable_union_info
*vui
;
2848 /* If DST is shared compare the location chains.
2849 If they are different we will modify the chain in DST with
2850 high probability so make a copy of DST. */
2851 if (shared_var_p (dst
, set
->vars
))
2853 for (node
= src
->var_part
[i
].loc_chain
,
2854 node2
= dst
->var_part
[j
].loc_chain
; node
&& node2
;
2855 node
= node
->next
, node2
= node2
->next
)
2857 if (!((REG_P (node2
->loc
)
2858 && REG_P (node
->loc
)
2859 && REGNO (node2
->loc
) == REGNO (node
->loc
))
2860 || rtx_equal_p (node2
->loc
, node
->loc
)))
2862 if (node2
->init
< node
->init
)
2863 node2
->init
= node
->init
;
2869 dstp
= unshare_variable (set
, dstp
, dst
,
2870 VAR_INIT_STATUS_UNKNOWN
);
2871 dst
= (variable
)*dstp
;
2876 for (node
= src
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
2879 for (node
= dst
->var_part
[j
].loc_chain
; node
; node
= node
->next
)
2884 /* The most common case, much simpler, no qsort is needed. */
2885 location_chain dstnode
= dst
->var_part
[j
].loc_chain
;
2886 dst
->var_part
[k
].loc_chain
= dstnode
;
2887 VAR_PART_OFFSET (dst
, k
) = VAR_PART_OFFSET (dst
, j
);
2889 for (node
= src
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
2890 if (!((REG_P (dstnode
->loc
)
2891 && REG_P (node
->loc
)
2892 && REGNO (dstnode
->loc
) == REGNO (node
->loc
))
2893 || rtx_equal_p (dstnode
->loc
, node
->loc
)))
2895 location_chain new_node
;
2897 /* Copy the location from SRC. */
2898 new_node
= (location_chain
) pool_alloc (loc_chain_pool
);
2899 new_node
->loc
= node
->loc
;
2900 new_node
->init
= node
->init
;
2901 if (!node
->set_src
|| MEM_P (node
->set_src
))
2902 new_node
->set_src
= NULL
;
2904 new_node
->set_src
= node
->set_src
;
2905 node2
->next
= new_node
;
2912 if (src_l
+ dst_l
> vui_allocated
)
2914 vui_allocated
= MAX (vui_allocated
* 2, src_l
+ dst_l
);
2915 vui_vec
= XRESIZEVEC (struct variable_union_info
, vui_vec
,
2920 /* Fill in the locations from DST. */
2921 for (node
= dst
->var_part
[j
].loc_chain
, jj
= 0; node
;
2922 node
= node
->next
, jj
++)
2925 vui
[jj
].pos_dst
= jj
;
2927 /* Pos plus value larger than a sum of 2 valid positions. */
2928 vui
[jj
].pos
= jj
+ src_l
+ dst_l
;
2931 /* Fill in the locations from SRC. */
2933 for (node
= src
->var_part
[i
].loc_chain
, ii
= 0; node
;
2934 node
= node
->next
, ii
++)
2936 /* Find location from NODE. */
2937 for (jj
= 0; jj
< dst_l
; jj
++)
2939 if ((REG_P (vui
[jj
].lc
->loc
)
2940 && REG_P (node
->loc
)
2941 && REGNO (vui
[jj
].lc
->loc
) == REGNO (node
->loc
))
2942 || rtx_equal_p (vui
[jj
].lc
->loc
, node
->loc
))
2944 vui
[jj
].pos
= jj
+ ii
;
2948 if (jj
>= dst_l
) /* The location has not been found. */
2950 location_chain new_node
;
2952 /* Copy the location from SRC. */
2953 new_node
= (location_chain
) pool_alloc (loc_chain_pool
);
2954 new_node
->loc
= node
->loc
;
2955 new_node
->init
= node
->init
;
2956 if (!node
->set_src
|| MEM_P (node
->set_src
))
2957 new_node
->set_src
= NULL
;
2959 new_node
->set_src
= node
->set_src
;
2960 vui
[n
].lc
= new_node
;
2961 vui
[n
].pos_dst
= src_l
+ dst_l
;
2962 vui
[n
].pos
= ii
+ src_l
+ dst_l
;
2969 /* Special case still very common case. For dst_l == 2
2970 all entries dst_l ... n-1 are sorted, with for i >= dst_l
2971 vui[i].pos == i + src_l + dst_l. */
2972 if (vui
[0].pos
> vui
[1].pos
)
2974 /* Order should be 1, 0, 2... */
2975 dst
->var_part
[k
].loc_chain
= vui
[1].lc
;
2976 vui
[1].lc
->next
= vui
[0].lc
;
2979 vui
[0].lc
->next
= vui
[2].lc
;
2980 vui
[n
- 1].lc
->next
= NULL
;
2983 vui
[0].lc
->next
= NULL
;
2988 dst
->var_part
[k
].loc_chain
= vui
[0].lc
;
2989 if (n
>= 3 && vui
[2].pos
< vui
[1].pos
)
2991 /* Order should be 0, 2, 1, 3... */
2992 vui
[0].lc
->next
= vui
[2].lc
;
2993 vui
[2].lc
->next
= vui
[1].lc
;
2996 vui
[1].lc
->next
= vui
[3].lc
;
2997 vui
[n
- 1].lc
->next
= NULL
;
3000 vui
[1].lc
->next
= NULL
;
3005 /* Order should be 0, 1, 2... */
3007 vui
[n
- 1].lc
->next
= NULL
;
3010 for (; ii
< n
; ii
++)
3011 vui
[ii
- 1].lc
->next
= vui
[ii
].lc
;
3015 qsort (vui
, n
, sizeof (struct variable_union_info
),
3016 variable_union_info_cmp_pos
);
3018 /* Reconnect the nodes in sorted order. */
3019 for (ii
= 1; ii
< n
; ii
++)
3020 vui
[ii
- 1].lc
->next
= vui
[ii
].lc
;
3021 vui
[n
- 1].lc
->next
= NULL
;
3022 dst
->var_part
[k
].loc_chain
= vui
[0].lc
;
3025 VAR_PART_OFFSET (dst
, k
) = VAR_PART_OFFSET (dst
, j
);
3030 else if ((i
>= 0 && j
>= 0
3031 && VAR_PART_OFFSET (src
, i
) < VAR_PART_OFFSET (dst
, j
))
3034 dst
->var_part
[k
] = dst
->var_part
[j
];
3037 else if ((i
>= 0 && j
>= 0
3038 && VAR_PART_OFFSET (src
, i
) > VAR_PART_OFFSET (dst
, j
))
3041 location_chain
*nextp
;
3043 /* Copy the chain from SRC. */
3044 nextp
= &dst
->var_part
[k
].loc_chain
;
3045 for (node
= src
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
3047 location_chain new_lc
;
3049 new_lc
= (location_chain
) pool_alloc (loc_chain_pool
);
3050 new_lc
->next
= NULL
;
3051 new_lc
->init
= node
->init
;
3052 if (!node
->set_src
|| MEM_P (node
->set_src
))
3053 new_lc
->set_src
= NULL
;
3055 new_lc
->set_src
= node
->set_src
;
3056 new_lc
->loc
= node
->loc
;
3059 nextp
= &new_lc
->next
;
3062 VAR_PART_OFFSET (dst
, k
) = VAR_PART_OFFSET (src
, i
);
3065 dst
->var_part
[k
].cur_loc
= NULL
;
3068 if (flag_var_tracking_uninit
)
3069 for (i
= 0; i
< src
->n_var_parts
&& i
< dst
->n_var_parts
; i
++)
3071 location_chain node
, node2
;
3072 for (node
= src
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
3073 for (node2
= dst
->var_part
[i
].loc_chain
; node2
; node2
= node2
->next
)
3074 if (rtx_equal_p (node
->loc
, node2
->loc
))
3076 if (node
->init
> node2
->init
)
3077 node2
->init
= node
->init
;
3081 /* Continue traversing the hash table. */
3085 /* Compute union of dataflow sets SRC and DST and store it to DST. */
3088 dataflow_set_union (dataflow_set
*dst
, dataflow_set
*src
)
3092 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3093 attrs_list_union (&dst
->regs
[i
], src
->regs
[i
]);
3095 if (dst
->vars
== empty_shared_hash
)
3097 shared_hash_destroy (dst
->vars
);
3098 dst
->vars
= shared_hash_copy (src
->vars
);
3102 variable_iterator_type hi
;
3105 FOR_EACH_HASH_TABLE_ELEMENT (shared_hash_htab (src
->vars
),
3107 variable_union (var
, dst
);
3111 /* Whether the value is currently being expanded. */
3112 #define VALUE_RECURSED_INTO(x) \
3113 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
3115 /* Whether no expansion was found, saving useless lookups.
3116 It must only be set when VALUE_CHANGED is clear. */
3117 #define NO_LOC_P(x) \
3118 (RTL_FLAG_CHECK2 ("NO_LOC_P", (x), VALUE, DEBUG_EXPR)->return_val)
3120 /* Whether cur_loc in the value needs to be (re)computed. */
3121 #define VALUE_CHANGED(x) \
3122 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
3123 /* Whether cur_loc in the decl needs to be (re)computed. */
3124 #define DECL_CHANGED(x) TREE_VISITED (x)
3126 /* Record (if NEWV) that DV needs to have its cur_loc recomputed. For
3127 user DECLs, this means they're in changed_variables. Values and
3128 debug exprs may be left with this flag set if no user variable
3129 requires them to be evaluated. */
3132 set_dv_changed (decl_or_value dv
, bool newv
)
3134 switch (dv_onepart_p (dv
))
3138 NO_LOC_P (dv_as_value (dv
)) = false;
3139 VALUE_CHANGED (dv_as_value (dv
)) = newv
;
3144 NO_LOC_P (DECL_RTL_KNOWN_SET (dv_as_decl (dv
))) = false;
3145 /* Fall through... */
3148 DECL_CHANGED (dv_as_decl (dv
)) = newv
;
3153 /* Return true if DV needs to have its cur_loc recomputed. */
3156 dv_changed_p (decl_or_value dv
)
3158 return (dv_is_value_p (dv
)
3159 ? VALUE_CHANGED (dv_as_value (dv
))
3160 : DECL_CHANGED (dv_as_decl (dv
)));
3163 /* Return a location list node whose loc is rtx_equal to LOC, in the
3164 location list of a one-part variable or value VAR, or in that of
3165 any values recursively mentioned in the location lists. VARS must
3166 be in star-canonical form. */
3168 static location_chain
3169 find_loc_in_1pdv (rtx loc
, variable var
, variable_table_type vars
)
3171 location_chain node
;
3172 enum rtx_code loc_code
;
3177 gcc_checking_assert (var
->onepart
);
3179 if (!var
->n_var_parts
)
3182 gcc_checking_assert (loc
!= dv_as_opaque (var
->dv
));
3184 loc_code
= GET_CODE (loc
);
3185 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
3190 if (GET_CODE (node
->loc
) != loc_code
)
3192 if (GET_CODE (node
->loc
) != VALUE
)
3195 else if (loc
== node
->loc
)
3197 else if (loc_code
!= VALUE
)
3199 if (rtx_equal_p (loc
, node
->loc
))
3204 /* Since we're in star-canonical form, we don't need to visit
3205 non-canonical nodes: one-part variables and non-canonical
3206 values would only point back to the canonical node. */
3207 if (dv_is_value_p (var
->dv
)
3208 && !canon_value_cmp (node
->loc
, dv_as_value (var
->dv
)))
3210 /* Skip all subsequent VALUEs. */
3211 while (node
->next
&& GET_CODE (node
->next
->loc
) == VALUE
)
3214 gcc_checking_assert (!canon_value_cmp (node
->loc
,
3215 dv_as_value (var
->dv
)));
3216 if (loc
== node
->loc
)
3222 gcc_checking_assert (node
== var
->var_part
[0].loc_chain
);
3223 gcc_checking_assert (!node
->next
);
3225 dv
= dv_from_value (node
->loc
);
3226 rvar
= vars
.find_with_hash (dv
, dv_htab_hash (dv
));
3227 return find_loc_in_1pdv (loc
, rvar
, vars
);
3230 /* ??? Gotta look in cselib_val locations too. */
3235 /* Hash table iteration argument passed to variable_merge. */
3238 /* The set in which the merge is to be inserted. */
3240 /* The set that we're iterating in. */
3242 /* The set that may contain the other dv we are to merge with. */
3244 /* Number of onepart dvs in src. */
3245 int src_onepart_cnt
;
3248 /* Insert LOC in *DNODE, if it's not there yet. The list must be in
3249 loc_cmp order, and it is maintained as such. */
3252 insert_into_intersection (location_chain
*nodep
, rtx loc
,
3253 enum var_init_status status
)
3255 location_chain node
;
3258 for (node
= *nodep
; node
; nodep
= &node
->next
, node
= *nodep
)
3259 if ((r
= loc_cmp (node
->loc
, loc
)) == 0)
3261 node
->init
= MIN (node
->init
, status
);
3267 node
= (location_chain
) pool_alloc (loc_chain_pool
);
3270 node
->set_src
= NULL
;
3271 node
->init
= status
;
3272 node
->next
= *nodep
;
3276 /* Insert in DEST the intersection of the locations present in both
3277 S1NODE and S2VAR, directly or indirectly. S1NODE is from a
3278 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
3282 intersect_loc_chains (rtx val
, location_chain
*dest
, struct dfset_merge
*dsm
,
3283 location_chain s1node
, variable s2var
)
3285 dataflow_set
*s1set
= dsm
->cur
;
3286 dataflow_set
*s2set
= dsm
->src
;
3287 location_chain found
;
3291 location_chain s2node
;
3293 gcc_checking_assert (s2var
->onepart
);
3295 if (s2var
->n_var_parts
)
3297 s2node
= s2var
->var_part
[0].loc_chain
;
3299 for (; s1node
&& s2node
;
3300 s1node
= s1node
->next
, s2node
= s2node
->next
)
3301 if (s1node
->loc
!= s2node
->loc
)
3303 else if (s1node
->loc
== val
)
3306 insert_into_intersection (dest
, s1node
->loc
,
3307 MIN (s1node
->init
, s2node
->init
));
3311 for (; s1node
; s1node
= s1node
->next
)
3313 if (s1node
->loc
== val
)
3316 if ((found
= find_loc_in_1pdv (s1node
->loc
, s2var
,
3317 shared_hash_htab (s2set
->vars
))))
3319 insert_into_intersection (dest
, s1node
->loc
,
3320 MIN (s1node
->init
, found
->init
));
3324 if (GET_CODE (s1node
->loc
) == VALUE
3325 && !VALUE_RECURSED_INTO (s1node
->loc
))
3327 decl_or_value dv
= dv_from_value (s1node
->loc
);
3328 variable svar
= shared_hash_find (s1set
->vars
, dv
);
3331 if (svar
->n_var_parts
== 1)
3333 VALUE_RECURSED_INTO (s1node
->loc
) = true;
3334 intersect_loc_chains (val
, dest
, dsm
,
3335 svar
->var_part
[0].loc_chain
,
3337 VALUE_RECURSED_INTO (s1node
->loc
) = false;
3342 /* ??? gotta look in cselib_val locations too. */
3344 /* ??? if the location is equivalent to any location in src,
3345 searched recursively
3347 add to dst the values needed to represent the equivalence
3349 telling whether locations S is equivalent to another dv's
3352 for each location D in the list
3354 if S and D satisfy rtx_equal_p, then it is present
3356 else if D is a value, recurse without cycles
3358 else if S and D have the same CODE and MODE
3360 for each operand oS and the corresponding oD
3362 if oS and oD are not equivalent, then S an D are not equivalent
3364 else if they are RTX vectors
3366 if any vector oS element is not equivalent to its respective oD,
3367 then S and D are not equivalent
3375 /* Return -1 if X should be before Y in a location list for a 1-part
3376 variable, 1 if Y should be before X, and 0 if they're equivalent
3377 and should not appear in the list. */
3380 loc_cmp (rtx x
, rtx y
)
3383 RTX_CODE code
= GET_CODE (x
);
3393 gcc_assert (GET_MODE (x
) == GET_MODE (y
));
3394 if (REGNO (x
) == REGNO (y
))
3396 else if (REGNO (x
) < REGNO (y
))
3409 gcc_assert (GET_MODE (x
) == GET_MODE (y
));
3410 return loc_cmp (XEXP (x
, 0), XEXP (y
, 0));
3416 if (GET_CODE (x
) == VALUE
)
3418 if (GET_CODE (y
) != VALUE
)
3420 /* Don't assert the modes are the same, that is true only
3421 when not recursing. (subreg:QI (value:SI 1:1) 0)
3422 and (subreg:QI (value:DI 2:2) 0) can be compared,
3423 even when the modes are different. */
3424 if (canon_value_cmp (x
, y
))
3430 if (GET_CODE (y
) == VALUE
)
3433 /* Entry value is the least preferable kind of expression. */
3434 if (GET_CODE (x
) == ENTRY_VALUE
)
3436 if (GET_CODE (y
) != ENTRY_VALUE
)
3438 gcc_assert (GET_MODE (x
) == GET_MODE (y
));
3439 return loc_cmp (ENTRY_VALUE_EXP (x
), ENTRY_VALUE_EXP (y
));
3442 if (GET_CODE (y
) == ENTRY_VALUE
)
3445 if (GET_CODE (x
) == GET_CODE (y
))
3446 /* Compare operands below. */;
3447 else if (GET_CODE (x
) < GET_CODE (y
))
3452 gcc_assert (GET_MODE (x
) == GET_MODE (y
));
3454 if (GET_CODE (x
) == DEBUG_EXPR
)
3456 if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x
))
3457 < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y
)))
3459 gcc_checking_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x
))
3460 > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y
)));
3464 fmt
= GET_RTX_FORMAT (code
);
3465 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
3469 if (XWINT (x
, i
) == XWINT (y
, i
))
3471 else if (XWINT (x
, i
) < XWINT (y
, i
))
3478 if (XINT (x
, i
) == XINT (y
, i
))
3480 else if (XINT (x
, i
) < XINT (y
, i
))
3487 /* Compare the vector length first. */
3488 if (XVECLEN (x
, i
) == XVECLEN (y
, i
))
3489 /* Compare the vectors elements. */;
3490 else if (XVECLEN (x
, i
) < XVECLEN (y
, i
))
3495 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3496 if ((r
= loc_cmp (XVECEXP (x
, i
, j
),
3497 XVECEXP (y
, i
, j
))))
3502 if ((r
= loc_cmp (XEXP (x
, i
), XEXP (y
, i
))))
3508 if (XSTR (x
, i
) == XSTR (y
, i
))
3514 if ((r
= strcmp (XSTR (x
, i
), XSTR (y
, i
))) == 0)
3522 /* These are just backpointers, so they don't matter. */
3529 /* It is believed that rtx's at this level will never
3530 contain anything but integers and other rtx's,
3531 except for within LABEL_REFs and SYMBOL_REFs. */
3540 /* Check the order of entries in one-part variables. */
3543 canonicalize_loc_order_check (variable_def
**slot
,
3544 dataflow_set
*data ATTRIBUTE_UNUSED
)
3546 variable var
= *slot
;
3547 location_chain node
, next
;
3549 #ifdef ENABLE_RTL_CHECKING
3551 for (i
= 0; i
< var
->n_var_parts
; i
++)
3552 gcc_assert (var
->var_part
[0].cur_loc
== NULL
);
3553 gcc_assert (!var
->in_changed_variables
);
3559 gcc_assert (var
->n_var_parts
== 1);
3560 node
= var
->var_part
[0].loc_chain
;
3563 while ((next
= node
->next
))
3565 gcc_assert (loc_cmp (node
->loc
, next
->loc
) < 0);
3573 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
3574 more likely to be chosen as canonical for an equivalence set.
3575 Ensure less likely values can reach more likely neighbors, making
3576 the connections bidirectional. */
3579 canonicalize_values_mark (variable_def
**slot
, dataflow_set
*set
)
3581 variable var
= *slot
;
3582 decl_or_value dv
= var
->dv
;
3584 location_chain node
;
3586 if (!dv_is_value_p (dv
))
3589 gcc_checking_assert (var
->n_var_parts
== 1);
3591 val
= dv_as_value (dv
);
3593 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
3594 if (GET_CODE (node
->loc
) == VALUE
)
3596 if (canon_value_cmp (node
->loc
, val
))
3597 VALUE_RECURSED_INTO (val
) = true;
3600 decl_or_value odv
= dv_from_value (node
->loc
);
3601 variable_def
**oslot
;
3602 oslot
= shared_hash_find_slot_noinsert (set
->vars
, odv
);
3604 set_slot_part (set
, val
, oslot
, odv
, 0,
3605 node
->init
, NULL_RTX
);
3607 VALUE_RECURSED_INTO (node
->loc
) = true;
3614 /* Remove redundant entries from equivalence lists in onepart
3615 variables, canonicalizing equivalence sets into star shapes. */
3618 canonicalize_values_star (variable_def
**slot
, dataflow_set
*set
)
3620 variable var
= *slot
;
3621 decl_or_value dv
= var
->dv
;
3622 location_chain node
;
3625 variable_def
**cslot
;
3632 gcc_checking_assert (var
->n_var_parts
== 1);
3634 if (dv_is_value_p (dv
))
3636 cval
= dv_as_value (dv
);
3637 if (!VALUE_RECURSED_INTO (cval
))
3639 VALUE_RECURSED_INTO (cval
) = false;
3649 gcc_assert (var
->n_var_parts
== 1);
3651 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
3652 if (GET_CODE (node
->loc
) == VALUE
)
3655 if (VALUE_RECURSED_INTO (node
->loc
))
3657 if (canon_value_cmp (node
->loc
, cval
))
3666 if (!has_marks
|| dv_is_decl_p (dv
))
3669 /* Keep it marked so that we revisit it, either after visiting a
3670 child node, or after visiting a new parent that might be
3672 VALUE_RECURSED_INTO (val
) = true;
3674 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
3675 if (GET_CODE (node
->loc
) == VALUE
3676 && VALUE_RECURSED_INTO (node
->loc
))
3680 VALUE_RECURSED_INTO (cval
) = false;
3681 dv
= dv_from_value (cval
);
3682 slot
= shared_hash_find_slot_noinsert (set
->vars
, dv
);
3685 gcc_assert (dv_is_decl_p (var
->dv
));
3686 /* The canonical value was reset and dropped.
3688 clobber_variable_part (set
, NULL
, var
->dv
, 0, NULL
);
3692 gcc_assert (dv_is_value_p (var
->dv
));
3693 if (var
->n_var_parts
== 0)
3695 gcc_assert (var
->n_var_parts
== 1);
3699 VALUE_RECURSED_INTO (val
) = false;
3704 /* Push values to the canonical one. */
3705 cdv
= dv_from_value (cval
);
3706 cslot
= shared_hash_find_slot_noinsert (set
->vars
, cdv
);
3708 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
3709 if (node
->loc
!= cval
)
3711 cslot
= set_slot_part (set
, node
->loc
, cslot
, cdv
, 0,
3712 node
->init
, NULL_RTX
);
3713 if (GET_CODE (node
->loc
) == VALUE
)
3715 decl_or_value ndv
= dv_from_value (node
->loc
);
3717 set_variable_part (set
, cval
, ndv
, 0, node
->init
, NULL_RTX
,
3720 if (canon_value_cmp (node
->loc
, val
))
3722 /* If it could have been a local minimum, it's not any more,
3723 since it's now neighbor to cval, so it may have to push
3724 to it. Conversely, if it wouldn't have prevailed over
3725 val, then whatever mark it has is fine: if it was to
3726 push, it will now push to a more canonical node, but if
3727 it wasn't, then it has already pushed any values it might
3729 VALUE_RECURSED_INTO (node
->loc
) = true;
3730 /* Make sure we visit node->loc by ensuring we cval is
3732 VALUE_RECURSED_INTO (cval
) = true;
3734 else if (!VALUE_RECURSED_INTO (node
->loc
))
3735 /* If we have no need to "recurse" into this node, it's
3736 already "canonicalized", so drop the link to the old
3738 clobber_variable_part (set
, cval
, ndv
, 0, NULL
);
3740 else if (GET_CODE (node
->loc
) == REG
)
3742 attrs list
= set
->regs
[REGNO (node
->loc
)], *listp
;
3744 /* Change an existing attribute referring to dv so that it
3745 refers to cdv, removing any duplicate this might
3746 introduce, and checking that no previous duplicates
3747 existed, all in a single pass. */
3751 if (list
->offset
== 0
3752 && (dv_as_opaque (list
->dv
) == dv_as_opaque (dv
)
3753 || dv_as_opaque (list
->dv
) == dv_as_opaque (cdv
)))
3760 if (dv_as_opaque (list
->dv
) == dv_as_opaque (dv
))
3763 for (listp
= &list
->next
; (list
= *listp
); listp
= &list
->next
)
3768 if (dv_as_opaque (list
->dv
) == dv_as_opaque (cdv
))
3770 *listp
= list
->next
;
3771 pool_free (attrs_pool
, list
);
3776 gcc_assert (dv_as_opaque (list
->dv
) != dv_as_opaque (dv
));
3779 else if (dv_as_opaque (list
->dv
) == dv_as_opaque (cdv
))
3781 for (listp
= &list
->next
; (list
= *listp
); listp
= &list
->next
)
3786 if (dv_as_opaque (list
->dv
) == dv_as_opaque (dv
))
3788 *listp
= list
->next
;
3789 pool_free (attrs_pool
, list
);
3794 gcc_assert (dv_as_opaque (list
->dv
) != dv_as_opaque (cdv
));
3803 if (list
->offset
== 0
3804 && (dv_as_opaque (list
->dv
) == dv_as_opaque (dv
)
3805 || dv_as_opaque (list
->dv
) == dv_as_opaque (cdv
)))
3815 set_slot_part (set
, val
, cslot
, cdv
, 0,
3816 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
);
3818 slot
= clobber_slot_part (set
, cval
, slot
, 0, NULL
);
3820 /* Variable may have been unshared. */
3822 gcc_checking_assert (var
->n_var_parts
&& var
->var_part
[0].loc_chain
->loc
== cval
3823 && var
->var_part
[0].loc_chain
->next
== NULL
);
3825 if (VALUE_RECURSED_INTO (cval
))
3826 goto restart_with_cval
;
3831 /* Bind one-part variables to the canonical value in an equivalence
3832 set. Not doing this causes dataflow convergence failure in rare
3833 circumstances, see PR42873. Unfortunately we can't do this
3834 efficiently as part of canonicalize_values_star, since we may not
3835 have determined or even seen the canonical value of a set when we
3836 get to a variable that references another member of the set. */
3839 canonicalize_vars_star (variable_def
**slot
, dataflow_set
*set
)
3841 variable var
= *slot
;
3842 decl_or_value dv
= var
->dv
;
3843 location_chain node
;
3846 variable_def
**cslot
;
3848 location_chain cnode
;
3850 if (!var
->onepart
|| var
->onepart
== ONEPART_VALUE
)
3853 gcc_assert (var
->n_var_parts
== 1);
3855 node
= var
->var_part
[0].loc_chain
;
3857 if (GET_CODE (node
->loc
) != VALUE
)
3860 gcc_assert (!node
->next
);
3863 /* Push values to the canonical one. */
3864 cdv
= dv_from_value (cval
);
3865 cslot
= shared_hash_find_slot_noinsert (set
->vars
, cdv
);
3869 gcc_assert (cvar
->n_var_parts
== 1);
3871 cnode
= cvar
->var_part
[0].loc_chain
;
3873 /* CVAL is canonical if its value list contains non-VALUEs or VALUEs
3874 that are not “more canonical” than it. */
3875 if (GET_CODE (cnode
->loc
) != VALUE
3876 || !canon_value_cmp (cnode
->loc
, cval
))
3879 /* CVAL was found to be non-canonical. Change the variable to point
3880 to the canonical VALUE. */
3881 gcc_assert (!cnode
->next
);
3884 slot
= set_slot_part (set
, cval
, slot
, dv
, 0,
3885 node
->init
, node
->set_src
);
3886 clobber_slot_part (set
, cval
, slot
, 0, node
->set_src
);
3891 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
3892 corresponding entry in DSM->src. Multi-part variables are combined
3893 with variable_union, whereas onepart dvs are combined with
3897 variable_merge_over_cur (variable s1var
, struct dfset_merge
*dsm
)
3899 dataflow_set
*dst
= dsm
->dst
;
3900 variable_def
**dstslot
;
3901 variable s2var
, dvar
= NULL
;
3902 decl_or_value dv
= s1var
->dv
;
3903 onepart_enum_t onepart
= s1var
->onepart
;
3906 location_chain node
, *nodep
;
3908 /* If the incoming onepart variable has an empty location list, then
3909 the intersection will be just as empty. For other variables,
3910 it's always union. */
3911 gcc_checking_assert (s1var
->n_var_parts
3912 && s1var
->var_part
[0].loc_chain
);
3915 return variable_union (s1var
, dst
);
3917 gcc_checking_assert (s1var
->n_var_parts
== 1);
3919 dvhash
= dv_htab_hash (dv
);
3920 if (dv_is_value_p (dv
))
3921 val
= dv_as_value (dv
);
3925 s2var
= shared_hash_find_1 (dsm
->src
->vars
, dv
, dvhash
);
3928 dst_can_be_shared
= false;
3932 dsm
->src_onepart_cnt
--;
3933 gcc_assert (s2var
->var_part
[0].loc_chain
3934 && s2var
->onepart
== onepart
3935 && s2var
->n_var_parts
== 1);
3937 dstslot
= shared_hash_find_slot_noinsert_1 (dst
->vars
, dv
, dvhash
);
3941 gcc_assert (dvar
->refcount
== 1
3942 && dvar
->onepart
== onepart
3943 && dvar
->n_var_parts
== 1);
3944 nodep
= &dvar
->var_part
[0].loc_chain
;
3952 if (!dstslot
&& !onepart_variable_different_p (s1var
, s2var
))
3954 dstslot
= shared_hash_find_slot_unshare_1 (&dst
->vars
, dv
,
3956 *dstslot
= dvar
= s2var
;
3961 dst_can_be_shared
= false;
3963 intersect_loc_chains (val
, nodep
, dsm
,
3964 s1var
->var_part
[0].loc_chain
, s2var
);
3970 dvar
= (variable
) pool_alloc (onepart_pool (onepart
));
3973 dvar
->n_var_parts
= 1;
3974 dvar
->onepart
= onepart
;
3975 dvar
->in_changed_variables
= false;
3976 dvar
->var_part
[0].loc_chain
= node
;
3977 dvar
->var_part
[0].cur_loc
= NULL
;
3979 VAR_LOC_1PAUX (dvar
) = NULL
;
3981 VAR_PART_OFFSET (dvar
, 0) = 0;
3984 = shared_hash_find_slot_unshare_1 (&dst
->vars
, dv
, dvhash
,
3986 gcc_assert (!*dstslot
);
3994 nodep
= &dvar
->var_part
[0].loc_chain
;
3995 while ((node
= *nodep
))
3997 location_chain
*nextp
= &node
->next
;
3999 if (GET_CODE (node
->loc
) == REG
)
4003 for (list
= dst
->regs
[REGNO (node
->loc
)]; list
; list
= list
->next
)
4004 if (GET_MODE (node
->loc
) == GET_MODE (list
->loc
)
4005 && dv_is_value_p (list
->dv
))
4009 attrs_list_insert (&dst
->regs
[REGNO (node
->loc
)],
4011 /* If this value became canonical for another value that had
4012 this register, we want to leave it alone. */
4013 else if (dv_as_value (list
->dv
) != val
)
4015 dstslot
= set_slot_part (dst
, dv_as_value (list
->dv
),
4017 node
->init
, NULL_RTX
);
4018 dstslot
= delete_slot_part (dst
, node
->loc
, dstslot
, 0);
4020 /* Since nextp points into the removed node, we can't
4021 use it. The pointer to the next node moved to nodep.
4022 However, if the variable we're walking is unshared
4023 during our walk, we'll keep walking the location list
4024 of the previously-shared variable, in which case the
4025 node won't have been removed, and we'll want to skip
4026 it. That's why we test *nodep here. */
4032 /* Canonicalization puts registers first, so we don't have to
4038 if (dvar
!= *dstslot
)
4040 nodep
= &dvar
->var_part
[0].loc_chain
;
4044 /* Mark all referenced nodes for canonicalization, and make sure
4045 we have mutual equivalence links. */
4046 VALUE_RECURSED_INTO (val
) = true;
4047 for (node
= *nodep
; node
; node
= node
->next
)
4048 if (GET_CODE (node
->loc
) == VALUE
)
4050 VALUE_RECURSED_INTO (node
->loc
) = true;
4051 set_variable_part (dst
, val
, dv_from_value (node
->loc
), 0,
4052 node
->init
, NULL
, INSERT
);
4055 dstslot
= shared_hash_find_slot_noinsert_1 (dst
->vars
, dv
, dvhash
);
4056 gcc_assert (*dstslot
== dvar
);
4057 canonicalize_values_star (dstslot
, dst
);
4058 gcc_checking_assert (dstslot
4059 == shared_hash_find_slot_noinsert_1 (dst
->vars
,
4065 bool has_value
= false, has_other
= false;
4067 /* If we have one value and anything else, we're going to
4068 canonicalize this, so make sure all values have an entry in
4069 the table and are marked for canonicalization. */
4070 for (node
= *nodep
; node
; node
= node
->next
)
4072 if (GET_CODE (node
->loc
) == VALUE
)
4074 /* If this was marked during register canonicalization,
4075 we know we have to canonicalize values. */
4090 if (has_value
&& has_other
)
4092 for (node
= *nodep
; node
; node
= node
->next
)
4094 if (GET_CODE (node
->loc
) == VALUE
)
4096 decl_or_value dv
= dv_from_value (node
->loc
);
4097 variable_def
**slot
= NULL
;
4099 if (shared_hash_shared (dst
->vars
))
4100 slot
= shared_hash_find_slot_noinsert (dst
->vars
, dv
);
4102 slot
= shared_hash_find_slot_unshare (&dst
->vars
, dv
,
4106 variable var
= (variable
) pool_alloc (onepart_pool
4110 var
->n_var_parts
= 1;
4111 var
->onepart
= ONEPART_VALUE
;
4112 var
->in_changed_variables
= false;
4113 var
->var_part
[0].loc_chain
= NULL
;
4114 var
->var_part
[0].cur_loc
= NULL
;
4115 VAR_LOC_1PAUX (var
) = NULL
;
4119 VALUE_RECURSED_INTO (node
->loc
) = true;
4123 dstslot
= shared_hash_find_slot_noinsert_1 (dst
->vars
, dv
, dvhash
);
4124 gcc_assert (*dstslot
== dvar
);
4125 canonicalize_values_star (dstslot
, dst
);
4126 gcc_checking_assert (dstslot
4127 == shared_hash_find_slot_noinsert_1 (dst
->vars
,
4133 if (!onepart_variable_different_p (dvar
, s2var
))
4135 variable_htab_free (dvar
);
4136 *dstslot
= dvar
= s2var
;
4139 else if (s2var
!= s1var
&& !onepart_variable_different_p (dvar
, s1var
))
4141 variable_htab_free (dvar
);
4142 *dstslot
= dvar
= s1var
;
4144 dst_can_be_shared
= false;
4147 dst_can_be_shared
= false;
4152 /* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
4153 multi-part variable. Unions of multi-part variables and
4154 intersections of one-part ones will be handled in
4155 variable_merge_over_cur(). */
4158 variable_merge_over_src (variable s2var
, struct dfset_merge
*dsm
)
4160 dataflow_set
*dst
= dsm
->dst
;
4161 decl_or_value dv
= s2var
->dv
;
4163 if (!s2var
->onepart
)
4165 variable_def
**dstp
= shared_hash_find_slot (dst
->vars
, dv
);
4171 dsm
->src_onepart_cnt
++;
4175 /* Combine dataflow set information from SRC2 into DST, using PDST
4176 to carry over information across passes. */
4179 dataflow_set_merge (dataflow_set
*dst
, dataflow_set
*src2
)
4181 dataflow_set cur
= *dst
;
4182 dataflow_set
*src1
= &cur
;
4183 struct dfset_merge dsm
;
4185 size_t src1_elems
, src2_elems
;
4186 variable_iterator_type hi
;
4189 src1_elems
= shared_hash_htab (src1
->vars
).elements ();
4190 src2_elems
= shared_hash_htab (src2
->vars
).elements ();
4191 dataflow_set_init (dst
);
4192 dst
->stack_adjust
= cur
.stack_adjust
;
4193 shared_hash_destroy (dst
->vars
);
4194 dst
->vars
= (shared_hash
) pool_alloc (shared_hash_pool
);
4195 dst
->vars
->refcount
= 1;
4196 dst
->vars
->htab
.create (MAX (src1_elems
, src2_elems
));
4198 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
4199 attrs_list_mpdv_union (&dst
->regs
[i
], src1
->regs
[i
], src2
->regs
[i
]);
4204 dsm
.src_onepart_cnt
= 0;
4206 FOR_EACH_HASH_TABLE_ELEMENT (shared_hash_htab (dsm
.src
->vars
),
4208 variable_merge_over_src (var
, &dsm
);
4209 FOR_EACH_HASH_TABLE_ELEMENT (shared_hash_htab (dsm
.cur
->vars
),
4211 variable_merge_over_cur (var
, &dsm
);
4213 if (dsm
.src_onepart_cnt
)
4214 dst_can_be_shared
= false;
4216 dataflow_set_destroy (src1
);
4219 /* Mark register equivalences. */
4222 dataflow_set_equiv_regs (dataflow_set
*set
)
4227 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
4229 rtx canon
[NUM_MACHINE_MODES
];
4231 /* If the list is empty or one entry, no need to canonicalize
4233 if (set
->regs
[i
] == NULL
|| set
->regs
[i
]->next
== NULL
)
4236 memset (canon
, 0, sizeof (canon
));
4238 for (list
= set
->regs
[i
]; list
; list
= list
->next
)
4239 if (list
->offset
== 0 && dv_is_value_p (list
->dv
))
4241 rtx val
= dv_as_value (list
->dv
);
4242 rtx
*cvalp
= &canon
[(int)GET_MODE (val
)];
4245 if (canon_value_cmp (val
, cval
))
4249 for (list
= set
->regs
[i
]; list
; list
= list
->next
)
4250 if (list
->offset
== 0 && dv_onepart_p (list
->dv
))
4252 rtx cval
= canon
[(int)GET_MODE (list
->loc
)];
4257 if (dv_is_value_p (list
->dv
))
4259 rtx val
= dv_as_value (list
->dv
);
4264 VALUE_RECURSED_INTO (val
) = true;
4265 set_variable_part (set
, val
, dv_from_value (cval
), 0,
4266 VAR_INIT_STATUS_INITIALIZED
,
4270 VALUE_RECURSED_INTO (cval
) = true;
4271 set_variable_part (set
, cval
, list
->dv
, 0,
4272 VAR_INIT_STATUS_INITIALIZED
, NULL
, NO_INSERT
);
4275 for (listp
= &set
->regs
[i
]; (list
= *listp
);
4276 listp
= list
? &list
->next
: listp
)
4277 if (list
->offset
== 0 && dv_onepart_p (list
->dv
))
4279 rtx cval
= canon
[(int)GET_MODE (list
->loc
)];
4280 variable_def
**slot
;
4285 if (dv_is_value_p (list
->dv
))
4287 rtx val
= dv_as_value (list
->dv
);
4288 if (!VALUE_RECURSED_INTO (val
))
4292 slot
= shared_hash_find_slot_noinsert (set
->vars
, list
->dv
);
4293 canonicalize_values_star (slot
, set
);
4300 /* Remove any redundant values in the location list of VAR, which must
4301 be unshared and 1-part. */
4304 remove_duplicate_values (variable var
)
4306 location_chain node
, *nodep
;
4308 gcc_assert (var
->onepart
);
4309 gcc_assert (var
->n_var_parts
== 1);
4310 gcc_assert (var
->refcount
== 1);
4312 for (nodep
= &var
->var_part
[0].loc_chain
; (node
= *nodep
); )
4314 if (GET_CODE (node
->loc
) == VALUE
)
4316 if (VALUE_RECURSED_INTO (node
->loc
))
4318 /* Remove duplicate value node. */
4319 *nodep
= node
->next
;
4320 pool_free (loc_chain_pool
, node
);
4324 VALUE_RECURSED_INTO (node
->loc
) = true;
4326 nodep
= &node
->next
;
4329 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
4330 if (GET_CODE (node
->loc
) == VALUE
)
4332 gcc_assert (VALUE_RECURSED_INTO (node
->loc
));
4333 VALUE_RECURSED_INTO (node
->loc
) = false;
4338 /* Hash table iteration argument passed to variable_post_merge. */
4339 struct dfset_post_merge
4341 /* The new input set for the current block. */
4343 /* Pointer to the permanent input set for the current block, or
4345 dataflow_set
**permp
;
4348 /* Create values for incoming expressions associated with one-part
4349 variables that don't have value numbers for them. */
4352 variable_post_merge_new_vals (variable_def
**slot
, dfset_post_merge
*dfpm
)
4354 dataflow_set
*set
= dfpm
->set
;
4355 variable var
= *slot
;
4356 location_chain node
;
4358 if (!var
->onepart
|| !var
->n_var_parts
)
4361 gcc_assert (var
->n_var_parts
== 1);
4363 if (dv_is_decl_p (var
->dv
))
4365 bool check_dupes
= false;
4368 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
4370 if (GET_CODE (node
->loc
) == VALUE
)
4371 gcc_assert (!VALUE_RECURSED_INTO (node
->loc
));
4372 else if (GET_CODE (node
->loc
) == REG
)
4374 attrs att
, *attp
, *curp
= NULL
;
4376 if (var
->refcount
!= 1)
4378 slot
= unshare_variable (set
, slot
, var
,
4379 VAR_INIT_STATUS_INITIALIZED
);
4384 for (attp
= &set
->regs
[REGNO (node
->loc
)]; (att
= *attp
);
4386 if (att
->offset
== 0
4387 && GET_MODE (att
->loc
) == GET_MODE (node
->loc
))
4389 if (dv_is_value_p (att
->dv
))
4391 rtx cval
= dv_as_value (att
->dv
);
4396 else if (dv_as_opaque (att
->dv
) == dv_as_opaque (var
->dv
))
4404 if ((*curp
)->offset
== 0
4405 && GET_MODE ((*curp
)->loc
) == GET_MODE (node
->loc
)
4406 && dv_as_opaque ((*curp
)->dv
) == dv_as_opaque (var
->dv
))
4409 curp
= &(*curp
)->next
;
4420 *dfpm
->permp
= XNEW (dataflow_set
);
4421 dataflow_set_init (*dfpm
->permp
);
4424 for (att
= (*dfpm
->permp
)->regs
[REGNO (node
->loc
)];
4425 att
; att
= att
->next
)
4426 if (GET_MODE (att
->loc
) == GET_MODE (node
->loc
))
4428 gcc_assert (att
->offset
== 0
4429 && dv_is_value_p (att
->dv
));
4430 val_reset (set
, att
->dv
);
4437 cval
= dv_as_value (cdv
);
4441 /* Create a unique value to hold this register,
4442 that ought to be found and reused in
4443 subsequent rounds. */
4445 gcc_assert (!cselib_lookup (node
->loc
,
4446 GET_MODE (node
->loc
), 0,
4448 v
= cselib_lookup (node
->loc
, GET_MODE (node
->loc
), 1,
4450 cselib_preserve_value (v
);
4451 cselib_invalidate_rtx (node
->loc
);
4453 cdv
= dv_from_value (cval
);
4456 "Created new value %u:%u for reg %i\n",
4457 v
->uid
, v
->hash
, REGNO (node
->loc
));
4460 var_reg_decl_set (*dfpm
->permp
, node
->loc
,
4461 VAR_INIT_STATUS_INITIALIZED
,
4462 cdv
, 0, NULL
, INSERT
);
4468 /* Remove attribute referring to the decl, which now
4469 uses the value for the register, already existing or
4470 to be added when we bring perm in. */
4473 pool_free (attrs_pool
, att
);
4478 remove_duplicate_values (var
);
4484 /* Reset values in the permanent set that are not associated with the
4485 chosen expression. */
4488 variable_post_merge_perm_vals (variable_def
**pslot
, dfset_post_merge
*dfpm
)
4490 dataflow_set
*set
= dfpm
->set
;
4491 variable pvar
= *pslot
, var
;
4492 location_chain pnode
;
4496 gcc_assert (dv_is_value_p (pvar
->dv
)
4497 && pvar
->n_var_parts
== 1);
4498 pnode
= pvar
->var_part
[0].loc_chain
;
4501 && REG_P (pnode
->loc
));
4505 var
= shared_hash_find (set
->vars
, dv
);
4508 /* Although variable_post_merge_new_vals may have made decls
4509 non-star-canonical, values that pre-existed in canonical form
4510 remain canonical, and newly-created values reference a single
4511 REG, so they are canonical as well. Since VAR has the
4512 location list for a VALUE, using find_loc_in_1pdv for it is
4513 fine, since VALUEs don't map back to DECLs. */
4514 if (find_loc_in_1pdv (pnode
->loc
, var
, shared_hash_htab (set
->vars
)))
4516 val_reset (set
, dv
);
4519 for (att
= set
->regs
[REGNO (pnode
->loc
)]; att
; att
= att
->next
)
4520 if (att
->offset
== 0
4521 && GET_MODE (att
->loc
) == GET_MODE (pnode
->loc
)
4522 && dv_is_value_p (att
->dv
))
4525 /* If there is a value associated with this register already, create
4527 if (att
&& dv_as_value (att
->dv
) != dv_as_value (dv
))
4529 rtx cval
= dv_as_value (att
->dv
);
4530 set_variable_part (set
, cval
, dv
, 0, pnode
->init
, NULL
, INSERT
);
4531 set_variable_part (set
, dv_as_value (dv
), att
->dv
, 0, pnode
->init
,
4536 attrs_list_insert (&set
->regs
[REGNO (pnode
->loc
)],
4538 variable_union (pvar
, set
);
4544 /* Just checking stuff and registering register attributes for
4548 dataflow_post_merge_adjust (dataflow_set
*set
, dataflow_set
**permp
)
4550 struct dfset_post_merge dfpm
;
4555 shared_hash_htab (set
->vars
)
4556 .traverse
<dfset_post_merge
*, variable_post_merge_new_vals
> (&dfpm
);
4558 shared_hash_htab ((*permp
)->vars
)
4559 .traverse
<dfset_post_merge
*, variable_post_merge_perm_vals
> (&dfpm
);
4560 shared_hash_htab (set
->vars
)
4561 .traverse
<dataflow_set
*, canonicalize_values_star
> (set
);
4562 shared_hash_htab (set
->vars
)
4563 .traverse
<dataflow_set
*, canonicalize_vars_star
> (set
);
4566 /* Return a node whose loc is a MEM that refers to EXPR in the
4567 location list of a one-part variable or value VAR, or in that of
4568 any values recursively mentioned in the location lists. */
4570 static location_chain
4571 find_mem_expr_in_1pdv (tree expr
, rtx val
, variable_table_type vars
)
4573 location_chain node
;
4576 location_chain where
= NULL
;
4581 gcc_assert (GET_CODE (val
) == VALUE
4582 && !VALUE_RECURSED_INTO (val
));
4584 dv
= dv_from_value (val
);
4585 var
= vars
.find_with_hash (dv
, dv_htab_hash (dv
));
4590 gcc_assert (var
->onepart
);
4592 if (!var
->n_var_parts
)
4595 VALUE_RECURSED_INTO (val
) = true;
4597 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
4598 if (MEM_P (node
->loc
)
4599 && MEM_EXPR (node
->loc
) == expr
4600 && INT_MEM_OFFSET (node
->loc
) == 0)
4605 else if (GET_CODE (node
->loc
) == VALUE
4606 && !VALUE_RECURSED_INTO (node
->loc
)
4607 && (where
= find_mem_expr_in_1pdv (expr
, node
->loc
, vars
)))
4610 VALUE_RECURSED_INTO (val
) = false;
4615 /* Return TRUE if the value of MEM may vary across a call. */
4618 mem_dies_at_call (rtx mem
)
4620 tree expr
= MEM_EXPR (mem
);
4626 decl
= get_base_address (expr
);
4634 return (may_be_aliased (decl
)
4635 || (!TREE_READONLY (decl
) && is_global_var (decl
)));
4638 /* Remove all MEMs from the location list of a hash table entry for a
4639 one-part variable, except those whose MEM attributes map back to
4640 the variable itself, directly or within a VALUE. */
4643 dataflow_set_preserve_mem_locs (variable_def
**slot
, dataflow_set
*set
)
4645 variable var
= *slot
;
4647 if (var
->onepart
== ONEPART_VDECL
|| var
->onepart
== ONEPART_DEXPR
)
4649 tree decl
= dv_as_decl (var
->dv
);
4650 location_chain loc
, *locp
;
4651 bool changed
= false;
4653 if (!var
->n_var_parts
)
4656 gcc_assert (var
->n_var_parts
== 1);
4658 if (shared_var_p (var
, set
->vars
))
4660 for (loc
= var
->var_part
[0].loc_chain
; loc
; loc
= loc
->next
)
4662 /* We want to remove dying MEMs that doesn't refer to DECL. */
4663 if (GET_CODE (loc
->loc
) == MEM
4664 && (MEM_EXPR (loc
->loc
) != decl
4665 || INT_MEM_OFFSET (loc
->loc
) != 0)
4666 && !mem_dies_at_call (loc
->loc
))
4668 /* We want to move here MEMs that do refer to DECL. */
4669 else if (GET_CODE (loc
->loc
) == VALUE
4670 && find_mem_expr_in_1pdv (decl
, loc
->loc
,
4671 shared_hash_htab (set
->vars
)))
4678 slot
= unshare_variable (set
, slot
, var
, VAR_INIT_STATUS_UNKNOWN
);
4680 gcc_assert (var
->n_var_parts
== 1);
4683 for (locp
= &var
->var_part
[0].loc_chain
, loc
= *locp
;
4686 rtx old_loc
= loc
->loc
;
4687 if (GET_CODE (old_loc
) == VALUE
)
4689 location_chain mem_node
4690 = find_mem_expr_in_1pdv (decl
, loc
->loc
,
4691 shared_hash_htab (set
->vars
));
4693 /* ??? This picks up only one out of multiple MEMs that
4694 refer to the same variable. Do we ever need to be
4695 concerned about dealing with more than one, or, given
4696 that they should all map to the same variable
4697 location, their addresses will have been merged and
4698 they will be regarded as equivalent? */
4701 loc
->loc
= mem_node
->loc
;
4702 loc
->set_src
= mem_node
->set_src
;
4703 loc
->init
= MIN (loc
->init
, mem_node
->init
);
4707 if (GET_CODE (loc
->loc
) != MEM
4708 || (MEM_EXPR (loc
->loc
) == decl
4709 && INT_MEM_OFFSET (loc
->loc
) == 0)
4710 || !mem_dies_at_call (loc
->loc
))
4712 if (old_loc
!= loc
->loc
&& emit_notes
)
4714 if (old_loc
== var
->var_part
[0].cur_loc
)
4717 var
->var_part
[0].cur_loc
= NULL
;
4726 if (old_loc
== var
->var_part
[0].cur_loc
)
4729 var
->var_part
[0].cur_loc
= NULL
;
4733 pool_free (loc_chain_pool
, loc
);
4736 if (!var
->var_part
[0].loc_chain
)
4742 variable_was_changed (var
, set
);
4748 /* Remove all MEMs from the location list of a hash table entry for a
4752 dataflow_set_remove_mem_locs (variable_def
**slot
, dataflow_set
*set
)
4754 variable var
= *slot
;
4756 if (var
->onepart
== ONEPART_VALUE
)
4758 location_chain loc
, *locp
;
4759 bool changed
= false;
4762 gcc_assert (var
->n_var_parts
== 1);
4764 if (shared_var_p (var
, set
->vars
))
4766 for (loc
= var
->var_part
[0].loc_chain
; loc
; loc
= loc
->next
)
4767 if (GET_CODE (loc
->loc
) == MEM
4768 && mem_dies_at_call (loc
->loc
))
4774 slot
= unshare_variable (set
, slot
, var
, VAR_INIT_STATUS_UNKNOWN
);
4776 gcc_assert (var
->n_var_parts
== 1);
4779 if (VAR_LOC_1PAUX (var
))
4780 cur_loc
= VAR_LOC_FROM (var
);
4782 cur_loc
= var
->var_part
[0].cur_loc
;
4784 for (locp
= &var
->var_part
[0].loc_chain
, loc
= *locp
;
4787 if (GET_CODE (loc
->loc
) != MEM
4788 || !mem_dies_at_call (loc
->loc
))
4795 /* If we have deleted the location which was last emitted
4796 we have to emit new location so add the variable to set
4797 of changed variables. */
4798 if (cur_loc
== loc
->loc
)
4801 var
->var_part
[0].cur_loc
= NULL
;
4802 if (VAR_LOC_1PAUX (var
))
4803 VAR_LOC_FROM (var
) = NULL
;
4805 pool_free (loc_chain_pool
, loc
);
4808 if (!var
->var_part
[0].loc_chain
)
4814 variable_was_changed (var
, set
);
4820 /* Remove all variable-location information about call-clobbered
4821 registers, as well as associations between MEMs and VALUEs. */
4824 dataflow_set_clear_at_call (dataflow_set
*set
)
4827 hard_reg_set_iterator hrsi
;
4829 EXECUTE_IF_SET_IN_HARD_REG_SET (regs_invalidated_by_call
, 0, r
, hrsi
)
4830 var_regno_delete (set
, r
);
4832 if (MAY_HAVE_DEBUG_INSNS
)
4834 set
->traversed_vars
= set
->vars
;
4835 shared_hash_htab (set
->vars
)
4836 .traverse
<dataflow_set
*, dataflow_set_preserve_mem_locs
> (set
);
4837 set
->traversed_vars
= set
->vars
;
4838 shared_hash_htab (set
->vars
)
4839 .traverse
<dataflow_set
*, dataflow_set_remove_mem_locs
> (set
);
4840 set
->traversed_vars
= NULL
;
4845 variable_part_different_p (variable_part
*vp1
, variable_part
*vp2
)
4847 location_chain lc1
, lc2
;
4849 for (lc1
= vp1
->loc_chain
; lc1
; lc1
= lc1
->next
)
4851 for (lc2
= vp2
->loc_chain
; lc2
; lc2
= lc2
->next
)
4853 if (REG_P (lc1
->loc
) && REG_P (lc2
->loc
))
4855 if (REGNO (lc1
->loc
) == REGNO (lc2
->loc
))
4858 if (rtx_equal_p (lc1
->loc
, lc2
->loc
))
4867 /* Return true if one-part variables VAR1 and VAR2 are different.
4868 They must be in canonical order. */
4871 onepart_variable_different_p (variable var1
, variable var2
)
4873 location_chain lc1
, lc2
;
4878 gcc_assert (var1
->n_var_parts
== 1
4879 && var2
->n_var_parts
== 1);
4881 lc1
= var1
->var_part
[0].loc_chain
;
4882 lc2
= var2
->var_part
[0].loc_chain
;
4884 gcc_assert (lc1
&& lc2
);
4888 if (loc_cmp (lc1
->loc
, lc2
->loc
))
4897 /* Return true if variables VAR1 and VAR2 are different. */
4900 variable_different_p (variable var1
, variable var2
)
4907 if (var1
->onepart
!= var2
->onepart
)
4910 if (var1
->n_var_parts
!= var2
->n_var_parts
)
4913 if (var1
->onepart
&& var1
->n_var_parts
)
4915 gcc_checking_assert (dv_as_opaque (var1
->dv
) == dv_as_opaque (var2
->dv
)
4916 && var1
->n_var_parts
== 1);
4917 /* One-part values have locations in a canonical order. */
4918 return onepart_variable_different_p (var1
, var2
);
4921 for (i
= 0; i
< var1
->n_var_parts
; i
++)
4923 if (VAR_PART_OFFSET (var1
, i
) != VAR_PART_OFFSET (var2
, i
))
4925 if (variable_part_different_p (&var1
->var_part
[i
], &var2
->var_part
[i
]))
4927 if (variable_part_different_p (&var2
->var_part
[i
], &var1
->var_part
[i
]))
4933 /* Return true if dataflow sets OLD_SET and NEW_SET differ. */
4936 dataflow_set_different (dataflow_set
*old_set
, dataflow_set
*new_set
)
4938 variable_iterator_type hi
;
4941 if (old_set
->vars
== new_set
->vars
)
4944 if (shared_hash_htab (old_set
->vars
).elements ()
4945 != shared_hash_htab (new_set
->vars
).elements ())
4948 FOR_EACH_HASH_TABLE_ELEMENT (shared_hash_htab (old_set
->vars
),
4951 variable_table_type htab
= shared_hash_htab (new_set
->vars
);
4952 variable var2
= htab
.find_with_hash (var1
->dv
, dv_htab_hash (var1
->dv
));
4955 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4957 fprintf (dump_file
, "dataflow difference found: removal of:\n");
4963 if (variable_different_p (var1
, var2
))
4965 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4967 fprintf (dump_file
, "dataflow difference found: "
4968 "old and new follow:\n");
4976 /* No need to traverse the second hashtab, if both have the same number
4977 of elements and the second one had all entries found in the first one,
4978 then it can't have any extra entries. */
4982 /* Free the contents of dataflow set SET. */
4985 dataflow_set_destroy (dataflow_set
*set
)
4989 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
4990 attrs_list_clear (&set
->regs
[i
]);
4992 shared_hash_destroy (set
->vars
);
4996 /* Return true if RTL X contains a SYMBOL_REF. */
4999 contains_symbol_ref (rtx x
)
5008 code
= GET_CODE (x
);
5009 if (code
== SYMBOL_REF
)
5012 fmt
= GET_RTX_FORMAT (code
);
5013 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
5017 if (contains_symbol_ref (XEXP (x
, i
)))
5020 else if (fmt
[i
] == 'E')
5023 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
5024 if (contains_symbol_ref (XVECEXP (x
, i
, j
)))
5032 /* Shall EXPR be tracked? */
5035 track_expr_p (tree expr
, bool need_rtl
)
5040 if (TREE_CODE (expr
) == DEBUG_EXPR_DECL
)
5041 return DECL_RTL_SET_P (expr
);
5043 /* If EXPR is not a parameter or a variable do not track it. */
5044 if (TREE_CODE (expr
) != VAR_DECL
&& TREE_CODE (expr
) != PARM_DECL
)
5047 /* It also must have a name... */
5048 if (!DECL_NAME (expr
) && need_rtl
)
5051 /* ... and a RTL assigned to it. */
5052 decl_rtl
= DECL_RTL_IF_SET (expr
);
5053 if (!decl_rtl
&& need_rtl
)
5056 /* If this expression is really a debug alias of some other declaration, we
5057 don't need to track this expression if the ultimate declaration is
5060 if (TREE_CODE (realdecl
) == VAR_DECL
&& DECL_HAS_DEBUG_EXPR_P (realdecl
))
5062 realdecl
= DECL_DEBUG_EXPR (realdecl
);
5063 if (!DECL_P (realdecl
))
5065 if (handled_component_p (realdecl
)
5066 || (TREE_CODE (realdecl
) == MEM_REF
5067 && TREE_CODE (TREE_OPERAND (realdecl
, 0)) == ADDR_EXPR
))
5069 HOST_WIDE_INT bitsize
, bitpos
, maxsize
;
5071 = get_ref_base_and_extent (realdecl
, &bitpos
, &bitsize
,
5073 if (!DECL_P (innerdecl
)
5074 || DECL_IGNORED_P (innerdecl
)
5075 || TREE_STATIC (innerdecl
)
5077 || bitpos
+ bitsize
> 256
5078 || bitsize
!= maxsize
)
5088 /* Do not track EXPR if REALDECL it should be ignored for debugging
5090 if (DECL_IGNORED_P (realdecl
))
5093 /* Do not track global variables until we are able to emit correct location
5095 if (TREE_STATIC (realdecl
))
5098 /* When the EXPR is a DECL for alias of some variable (see example)
5099 the TREE_STATIC flag is not used. Disable tracking all DECLs whose
5100 DECL_RTL contains SYMBOL_REF.
5103 extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
5106 if (decl_rtl
&& MEM_P (decl_rtl
)
5107 && contains_symbol_ref (XEXP (decl_rtl
, 0)))
5110 /* If RTX is a memory it should not be very large (because it would be
5111 an array or struct). */
5112 if (decl_rtl
&& MEM_P (decl_rtl
))
5114 /* Do not track structures and arrays. */
5115 if (GET_MODE (decl_rtl
) == BLKmode
5116 || AGGREGATE_TYPE_P (TREE_TYPE (realdecl
)))
5118 if (MEM_SIZE_KNOWN_P (decl_rtl
)
5119 && MEM_SIZE (decl_rtl
) > MAX_VAR_PARTS
)
5123 DECL_CHANGED (expr
) = 0;
5124 DECL_CHANGED (realdecl
) = 0;
5128 /* Determine whether a given LOC refers to the same variable part as
5132 same_variable_part_p (rtx loc
, tree expr
, HOST_WIDE_INT offset
)
5135 HOST_WIDE_INT offset2
;
5137 if (! DECL_P (expr
))
5142 expr2
= REG_EXPR (loc
);
5143 offset2
= REG_OFFSET (loc
);
5145 else if (MEM_P (loc
))
5147 expr2
= MEM_EXPR (loc
);
5148 offset2
= INT_MEM_OFFSET (loc
);
5153 if (! expr2
|| ! DECL_P (expr2
))
5156 expr
= var_debug_decl (expr
);
5157 expr2
= var_debug_decl (expr2
);
5159 return (expr
== expr2
&& offset
== offset2
);
5162 /* LOC is a REG or MEM that we would like to track if possible.
5163 If EXPR is null, we don't know what expression LOC refers to,
5164 otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
5165 LOC is an lvalue register.
5167 Return true if EXPR is nonnull and if LOC, or some lowpart of it,
5168 is something we can track. When returning true, store the mode of
5169 the lowpart we can track in *MODE_OUT (if nonnull) and its offset
5170 from EXPR in *OFFSET_OUT (if nonnull). */
5173 track_loc_p (rtx loc
, tree expr
, HOST_WIDE_INT offset
, bool store_reg_p
,
5174 enum machine_mode
*mode_out
, HOST_WIDE_INT
*offset_out
)
5176 enum machine_mode mode
;
5178 if (expr
== NULL
|| !track_expr_p (expr
, true))
5181 /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
5182 whole subreg, but only the old inner part is really relevant. */
5183 mode
= GET_MODE (loc
);
5184 if (REG_P (loc
) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc
)))
5186 enum machine_mode pseudo_mode
;
5188 pseudo_mode
= PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc
));
5189 if (GET_MODE_SIZE (mode
) > GET_MODE_SIZE (pseudo_mode
))
5191 offset
+= byte_lowpart_offset (pseudo_mode
, mode
);
5196 /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
5197 Do the same if we are storing to a register and EXPR occupies
5198 the whole of register LOC; in that case, the whole of EXPR is
5199 being changed. We exclude complex modes from the second case
5200 because the real and imaginary parts are represented as separate
5201 pseudo registers, even if the whole complex value fits into one
5203 if ((GET_MODE_SIZE (mode
) > GET_MODE_SIZE (DECL_MODE (expr
))
5205 && !COMPLEX_MODE_P (DECL_MODE (expr
))
5206 && hard_regno_nregs
[REGNO (loc
)][DECL_MODE (expr
)] == 1))
5207 && offset
+ byte_lowpart_offset (DECL_MODE (expr
), mode
) == 0)
5209 mode
= DECL_MODE (expr
);
5213 if (offset
< 0 || offset
>= MAX_VAR_PARTS
)
5219 *offset_out
= offset
;
5223 /* Return the MODE lowpart of LOC, or null if LOC is not something we
5224 want to track. When returning nonnull, make sure that the attributes
5225 on the returned value are updated. */
5228 var_lowpart (enum machine_mode mode
, rtx loc
)
5230 unsigned int offset
, reg_offset
, regno
;
5232 if (GET_MODE (loc
) == mode
)
5235 if (!REG_P (loc
) && !MEM_P (loc
))
5238 offset
= byte_lowpart_offset (mode
, GET_MODE (loc
));
5241 return adjust_address_nv (loc
, mode
, offset
);
5243 reg_offset
= subreg_lowpart_offset (mode
, GET_MODE (loc
));
5244 regno
= REGNO (loc
) + subreg_regno_offset (REGNO (loc
), GET_MODE (loc
),
5246 return gen_rtx_REG_offset (loc
, mode
, regno
, offset
);
5249 /* Carry information about uses and stores while walking rtx. */
5251 struct count_use_info
5253 /* The insn where the RTX is. */
5256 /* The basic block where insn is. */
5259 /* The array of n_sets sets in the insn, as determined by cselib. */
5260 struct cselib_set
*sets
;
5263 /* True if we're counting stores, false otherwise. */
5267 /* Find a VALUE corresponding to X. */
5269 static inline cselib_val
*
5270 find_use_val (rtx x
, enum machine_mode mode
, struct count_use_info
*cui
)
5276 /* This is called after uses are set up and before stores are
5277 processed by cselib, so it's safe to look up srcs, but not
5278 dsts. So we look up expressions that appear in srcs or in
5279 dest expressions, but we search the sets array for dests of
5283 /* Some targets represent memset and memcpy patterns
5284 by (set (mem:BLK ...) (reg:[QHSD]I ...)) or
5285 (set (mem:BLK ...) (const_int ...)) or
5286 (set (mem:BLK ...) (mem:BLK ...)). Don't return anything
5287 in that case, otherwise we end up with mode mismatches. */
5288 if (mode
== BLKmode
&& MEM_P (x
))
5290 for (i
= 0; i
< cui
->n_sets
; i
++)
5291 if (cui
->sets
[i
].dest
== x
)
5292 return cui
->sets
[i
].src_elt
;
5295 return cselib_lookup (x
, mode
, 0, VOIDmode
);
5301 /* Replace all registers and addresses in an expression with VALUE
5302 expressions that map back to them, unless the expression is a
5303 register. If no mapping is or can be performed, returns NULL. */
5306 replace_expr_with_values (rtx loc
)
5308 if (REG_P (loc
) || GET_CODE (loc
) == ENTRY_VALUE
)
5310 else if (MEM_P (loc
))
5312 cselib_val
*addr
= cselib_lookup (XEXP (loc
, 0),
5313 get_address_mode (loc
), 0,
5316 return replace_equiv_address_nv (loc
, addr
->val_rtx
);
5321 return cselib_subst_to_values (loc
, VOIDmode
);
5324 /* Return true if *X is a DEBUG_EXPR. Usable as an argument to
5325 for_each_rtx to tell whether there are any DEBUG_EXPRs within
5329 rtx_debug_expr_p (rtx
*x
, void *data ATTRIBUTE_UNUSED
)
5333 return GET_CODE (loc
) == DEBUG_EXPR
;
5336 /* Determine what kind of micro operation to choose for a USE. Return
5337 MO_CLOBBER if no micro operation is to be generated. */
5339 static enum micro_operation_type
5340 use_type (rtx loc
, struct count_use_info
*cui
, enum machine_mode
*modep
)
5344 if (cui
&& cui
->sets
)
5346 if (GET_CODE (loc
) == VAR_LOCATION
)
5348 if (track_expr_p (PAT_VAR_LOCATION_DECL (loc
), false))
5350 rtx ploc
= PAT_VAR_LOCATION_LOC (loc
);
5351 if (! VAR_LOC_UNKNOWN_P (ploc
))
5353 cselib_val
*val
= cselib_lookup (ploc
, GET_MODE (loc
), 1,
5356 /* ??? flag_float_store and volatile mems are never
5357 given values, but we could in theory use them for
5359 gcc_assert (val
|| 1);
5367 if (REG_P (loc
) || MEM_P (loc
))
5370 *modep
= GET_MODE (loc
);
5374 || (find_use_val (loc
, GET_MODE (loc
), cui
)
5375 && cselib_lookup (XEXP (loc
, 0),
5376 get_address_mode (loc
), 0,
5382 cselib_val
*val
= find_use_val (loc
, GET_MODE (loc
), cui
);
5384 if (val
&& !cselib_preserved_value_p (val
))
5392 gcc_assert (REGNO (loc
) < FIRST_PSEUDO_REGISTER
);
5394 if (loc
== cfa_base_rtx
)
5396 expr
= REG_EXPR (loc
);
5399 return MO_USE_NO_VAR
;
5400 else if (target_for_debug_bind (var_debug_decl (expr
)))
5402 else if (track_loc_p (loc
, expr
, REG_OFFSET (loc
),
5403 false, modep
, NULL
))
5406 return MO_USE_NO_VAR
;
5408 else if (MEM_P (loc
))
5410 expr
= MEM_EXPR (loc
);
5414 else if (target_for_debug_bind (var_debug_decl (expr
)))
5416 else if (track_loc_p (loc
, expr
, INT_MEM_OFFSET (loc
),
5418 /* Multi-part variables shouldn't refer to one-part
5419 variable names such as VALUEs (never happens) or
5420 DEBUG_EXPRs (only happens in the presence of debug
5422 && (!MAY_HAVE_DEBUG_INSNS
5423 || !for_each_rtx (&XEXP (loc
, 0), rtx_debug_expr_p
, NULL
)))
5432 /* Log to OUT information about micro-operation MOPT involving X in
5436 log_op_type (rtx x
, basic_block bb
, rtx insn
,
5437 enum micro_operation_type mopt
, FILE *out
)
5439 fprintf (out
, "bb %i op %i insn %i %s ",
5440 bb
->index
, VTI (bb
)->mos
.length (),
5441 INSN_UID (insn
), micro_operation_type_name
[mopt
]);
5442 print_inline_rtx (out
, x
, 2);
5446 /* Tell whether the CONCAT used to holds a VALUE and its location
5447 needs value resolution, i.e., an attempt of mapping the location
5448 back to other incoming values. */
5449 #define VAL_NEEDS_RESOLUTION(x) \
5450 (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
5451 /* Whether the location in the CONCAT is a tracked expression, that
5452 should also be handled like a MO_USE. */
5453 #define VAL_HOLDS_TRACK_EXPR(x) \
5454 (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
5455 /* Whether the location in the CONCAT should be handled like a MO_COPY
5457 #define VAL_EXPR_IS_COPIED(x) \
5458 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
5459 /* Whether the location in the CONCAT should be handled like a
5460 MO_CLOBBER as well. */
5461 #define VAL_EXPR_IS_CLOBBERED(x) \
5462 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
5464 /* All preserved VALUEs. */
5465 static vec
<rtx
> preserved_values
;
5467 /* Ensure VAL is preserved and remember it in a vector for vt_emit_notes. */
5470 preserve_value (cselib_val
*val
)
5472 cselib_preserve_value (val
);
5473 preserved_values
.safe_push (val
->val_rtx
);
5476 /* Helper function for MO_VAL_LOC handling. Return non-zero if
5477 any rtxes not suitable for CONST use not replaced by VALUEs
5481 non_suitable_const (rtx
*x
, void *data ATTRIBUTE_UNUSED
)
5486 switch (GET_CODE (*x
))
5497 return !MEM_READONLY_P (*x
);
5503 /* Add uses (register and memory references) LOC which will be tracked
5504 to VTI (bb)->mos. INSN is instruction which the LOC is part of. */
5507 add_uses (rtx
*ploc
, void *data
)
5510 enum machine_mode mode
= VOIDmode
;
5511 struct count_use_info
*cui
= (struct count_use_info
*)data
;
5512 enum micro_operation_type type
= use_type (loc
, cui
, &mode
);
5514 if (type
!= MO_CLOBBER
)
5516 basic_block bb
= cui
->bb
;
5520 mo
.u
.loc
= type
== MO_USE
? var_lowpart (mode
, loc
) : loc
;
5521 mo
.insn
= cui
->insn
;
5523 if (type
== MO_VAL_LOC
)
5526 rtx vloc
= PAT_VAR_LOCATION_LOC (oloc
);
5529 gcc_assert (cui
->sets
);
5532 && !REG_P (XEXP (vloc
, 0))
5533 && !MEM_P (XEXP (vloc
, 0)))
5536 enum machine_mode address_mode
= get_address_mode (mloc
);
5538 = cselib_lookup (XEXP (mloc
, 0), address_mode
, 0,
5541 if (val
&& !cselib_preserved_value_p (val
))
5542 preserve_value (val
);
5545 if (CONSTANT_P (vloc
)
5546 && (GET_CODE (vloc
) != CONST
5547 || for_each_rtx (&vloc
, non_suitable_const
, NULL
)))
5548 /* For constants don't look up any value. */;
5549 else if (!VAR_LOC_UNKNOWN_P (vloc
) && !unsuitable_loc (vloc
)
5550 && (val
= find_use_val (vloc
, GET_MODE (oloc
), cui
)))
5552 enum machine_mode mode2
;
5553 enum micro_operation_type type2
;
5555 bool resolvable
= REG_P (vloc
) || MEM_P (vloc
);
5558 nloc
= replace_expr_with_values (vloc
);
5562 oloc
= shallow_copy_rtx (oloc
);
5563 PAT_VAR_LOCATION_LOC (oloc
) = nloc
;
5566 oloc
= gen_rtx_CONCAT (mode
, val
->val_rtx
, oloc
);
5568 type2
= use_type (vloc
, 0, &mode2
);
5570 gcc_assert (type2
== MO_USE
|| type2
== MO_USE_NO_VAR
5571 || type2
== MO_CLOBBER
);
5573 if (type2
== MO_CLOBBER
5574 && !cselib_preserved_value_p (val
))
5576 VAL_NEEDS_RESOLUTION (oloc
) = resolvable
;
5577 preserve_value (val
);
5580 else if (!VAR_LOC_UNKNOWN_P (vloc
))
5582 oloc
= shallow_copy_rtx (oloc
);
5583 PAT_VAR_LOCATION_LOC (oloc
) = gen_rtx_UNKNOWN_VAR_LOC ();
5588 else if (type
== MO_VAL_USE
)
5590 enum machine_mode mode2
= VOIDmode
;
5591 enum micro_operation_type type2
;
5592 cselib_val
*val
= find_use_val (loc
, GET_MODE (loc
), cui
);
5593 rtx vloc
, oloc
= loc
, nloc
;
5595 gcc_assert (cui
->sets
);
5598 && !REG_P (XEXP (oloc
, 0))
5599 && !MEM_P (XEXP (oloc
, 0)))
5602 enum machine_mode address_mode
= get_address_mode (mloc
);
5604 = cselib_lookup (XEXP (mloc
, 0), address_mode
, 0,
5607 if (val
&& !cselib_preserved_value_p (val
))
5608 preserve_value (val
);
5611 type2
= use_type (loc
, 0, &mode2
);
5613 gcc_assert (type2
== MO_USE
|| type2
== MO_USE_NO_VAR
5614 || type2
== MO_CLOBBER
);
5616 if (type2
== MO_USE
)
5617 vloc
= var_lowpart (mode2
, loc
);
5621 /* The loc of a MO_VAL_USE may have two forms:
5623 (concat val src): val is at src, a value-based
5626 (concat (concat val use) src): same as above, with use as
5627 the MO_USE tracked value, if it differs from src.
5631 gcc_checking_assert (REG_P (loc
) || MEM_P (loc
));
5632 nloc
= replace_expr_with_values (loc
);
5637 oloc
= gen_rtx_CONCAT (mode2
, val
->val_rtx
, vloc
);
5639 oloc
= val
->val_rtx
;
5641 mo
.u
.loc
= gen_rtx_CONCAT (mode
, oloc
, nloc
);
5643 if (type2
== MO_USE
)
5644 VAL_HOLDS_TRACK_EXPR (mo
.u
.loc
) = 1;
5645 if (!cselib_preserved_value_p (val
))
5647 VAL_NEEDS_RESOLUTION (mo
.u
.loc
) = 1;
5648 preserve_value (val
);
5652 gcc_assert (type
== MO_USE
|| type
== MO_USE_NO_VAR
);
5654 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5655 log_op_type (mo
.u
.loc
, cui
->bb
, cui
->insn
, mo
.type
, dump_file
);
5656 VTI (bb
)->mos
.safe_push (mo
);
5662 /* Helper function for finding all uses of REG/MEM in X in insn INSN. */
5665 add_uses_1 (rtx
*x
, void *cui
)
5667 for_each_rtx (x
, add_uses
, cui
);
5670 /* This is the value used during expansion of locations. We want it
5671 to be unbounded, so that variables expanded deep in a recursion
5672 nest are fully evaluated, so that their values are cached
5673 correctly. We avoid recursion cycles through other means, and we
5674 don't unshare RTL, so excess complexity is not a problem. */
5675 #define EXPR_DEPTH (INT_MAX)
5676 /* We use this to keep too-complex expressions from being emitted as
5677 location notes, and then to debug information. Users can trade
5678 compile time for ridiculously complex expressions, although they're
5679 seldom useful, and they may often have to be discarded as not
5680 representable anyway. */
5681 #define EXPR_USE_DEPTH (PARAM_VALUE (PARAM_MAX_VARTRACK_EXPR_DEPTH))
5683 /* Attempt to reverse the EXPR operation in the debug info and record
5684 it in the cselib table. Say for reg1 = reg2 + 6 even when reg2 is
5685 no longer live we can express its value as VAL - 6. */
5688 reverse_op (rtx val
, const_rtx expr
, rtx insn
)
5692 struct elt_loc_list
*l
;
5696 if (GET_CODE (expr
) != SET
)
5699 if (!REG_P (SET_DEST (expr
)) || GET_MODE (val
) != GET_MODE (SET_DEST (expr
)))
5702 src
= SET_SRC (expr
);
5703 switch (GET_CODE (src
))
5710 if (!REG_P (XEXP (src
, 0)))
5715 if (!REG_P (XEXP (src
, 0)) && !MEM_P (XEXP (src
, 0)))
5722 if (!SCALAR_INT_MODE_P (GET_MODE (src
)) || XEXP (src
, 0) == cfa_base_rtx
)
5725 v
= cselib_lookup (XEXP (src
, 0), GET_MODE (XEXP (src
, 0)), 0, VOIDmode
);
5726 if (!v
|| !cselib_preserved_value_p (v
))
5729 /* Use canonical V to avoid creating multiple redundant expressions
5730 for different VALUES equivalent to V. */
5731 v
= canonical_cselib_val (v
);
5733 /* Adding a reverse op isn't useful if V already has an always valid
5734 location. Ignore ENTRY_VALUE, while it is always constant, we should
5735 prefer non-ENTRY_VALUE locations whenever possible. */
5736 for (l
= v
->locs
, count
= 0; l
; l
= l
->next
, count
++)
5737 if (CONSTANT_P (l
->loc
)
5738 && (GET_CODE (l
->loc
) != CONST
|| !references_value_p (l
->loc
, 0)))
5740 /* Avoid creating too large locs lists. */
5741 else if (count
== PARAM_VALUE (PARAM_MAX_VARTRACK_REVERSE_OP_SIZE
))
5744 switch (GET_CODE (src
))
5748 if (GET_MODE (v
->val_rtx
) != GET_MODE (val
))
5750 ret
= gen_rtx_fmt_e (GET_CODE (src
), GET_MODE (val
), val
);
5754 ret
= gen_lowpart_SUBREG (GET_MODE (v
->val_rtx
), val
);
5766 if (GET_MODE (v
->val_rtx
) != GET_MODE (val
))
5768 arg
= XEXP (src
, 1);
5769 if (!CONST_INT_P (arg
) && GET_CODE (arg
) != SYMBOL_REF
)
5771 arg
= cselib_expand_value_rtx (arg
, scratch_regs
, 5);
5772 if (arg
== NULL_RTX
)
5774 if (!CONST_INT_P (arg
) && GET_CODE (arg
) != SYMBOL_REF
)
5777 ret
= simplify_gen_binary (code
, GET_MODE (val
), val
, arg
);
5779 /* Ensure ret isn't VALUE itself (which can happen e.g. for
5780 (plus (reg1) (reg2)) when reg2 is known to be 0), as that
5781 breaks a lot of routines during var-tracking. */
5782 ret
= gen_rtx_fmt_ee (PLUS
, GET_MODE (val
), val
, const0_rtx
);
5788 cselib_add_permanent_equiv (v
, ret
, insn
);
5791 /* Add stores (register and memory references) LOC which will be tracked
5792 to VTI (bb)->mos. EXPR is the RTL expression containing the store.
5793 CUIP->insn is instruction which the LOC is part of. */
5796 add_stores (rtx loc
, const_rtx expr
, void *cuip
)
5798 enum machine_mode mode
= VOIDmode
, mode2
;
5799 struct count_use_info
*cui
= (struct count_use_info
*)cuip
;
5800 basic_block bb
= cui
->bb
;
5802 rtx oloc
= loc
, nloc
, src
= NULL
;
5803 enum micro_operation_type type
= use_type (loc
, cui
, &mode
);
5804 bool track_p
= false;
5806 bool resolve
, preserve
;
5808 if (type
== MO_CLOBBER
)
5815 gcc_assert (loc
!= cfa_base_rtx
);
5816 if ((GET_CODE (expr
) == CLOBBER
&& type
!= MO_VAL_SET
)
5817 || !(track_p
= use_type (loc
, NULL
, &mode2
) == MO_USE
)
5818 || GET_CODE (expr
) == CLOBBER
)
5820 mo
.type
= MO_CLOBBER
;
5822 if (GET_CODE (expr
) == SET
5823 && SET_DEST (expr
) == loc
5824 && !unsuitable_loc (SET_SRC (expr
))
5825 && find_use_val (loc
, mode
, cui
))
5827 gcc_checking_assert (type
== MO_VAL_SET
);
5828 mo
.u
.loc
= gen_rtx_SET (VOIDmode
, loc
, SET_SRC (expr
));
5833 if (GET_CODE (expr
) == SET
5834 && SET_DEST (expr
) == loc
5835 && GET_CODE (SET_SRC (expr
)) != ASM_OPERANDS
)
5836 src
= var_lowpart (mode2
, SET_SRC (expr
));
5837 loc
= var_lowpart (mode2
, loc
);
5846 rtx xexpr
= gen_rtx_SET (VOIDmode
, loc
, src
);
5847 if (same_variable_part_p (src
, REG_EXPR (loc
), REG_OFFSET (loc
)))
5849 /* If this is an instruction copying (part of) a parameter
5850 passed by invisible reference to its register location,
5851 pretend it's a SET so that the initial memory location
5852 is discarded, as the parameter register can be reused
5853 for other purposes and we do not track locations based
5854 on generic registers. */
5857 && TREE_CODE (REG_EXPR (loc
)) == PARM_DECL
5858 && DECL_MODE (REG_EXPR (loc
)) != BLKmode
5859 && MEM_P (DECL_INCOMING_RTL (REG_EXPR (loc
)))
5860 && XEXP (DECL_INCOMING_RTL (REG_EXPR (loc
)), 0)
5871 mo
.insn
= cui
->insn
;
5873 else if (MEM_P (loc
)
5874 && ((track_p
= use_type (loc
, NULL
, &mode2
) == MO_USE
)
5877 if (MEM_P (loc
) && type
== MO_VAL_SET
5878 && !REG_P (XEXP (loc
, 0))
5879 && !MEM_P (XEXP (loc
, 0)))
5882 enum machine_mode address_mode
= get_address_mode (mloc
);
5883 cselib_val
*val
= cselib_lookup (XEXP (mloc
, 0),
5887 if (val
&& !cselib_preserved_value_p (val
))
5888 preserve_value (val
);
5891 if (GET_CODE (expr
) == CLOBBER
|| !track_p
)
5893 mo
.type
= MO_CLOBBER
;
5894 mo
.u
.loc
= track_p
? var_lowpart (mode2
, loc
) : loc
;
5898 if (GET_CODE (expr
) == SET
5899 && SET_DEST (expr
) == loc
5900 && GET_CODE (SET_SRC (expr
)) != ASM_OPERANDS
)
5901 src
= var_lowpart (mode2
, SET_SRC (expr
));
5902 loc
= var_lowpart (mode2
, loc
);
5911 rtx xexpr
= gen_rtx_SET (VOIDmode
, loc
, src
);
5912 if (same_variable_part_p (SET_SRC (xexpr
),
5914 INT_MEM_OFFSET (loc
)))
5921 mo
.insn
= cui
->insn
;
5926 if (type
!= MO_VAL_SET
)
5927 goto log_and_return
;
5929 v
= find_use_val (oloc
, mode
, cui
);
5932 goto log_and_return
;
5934 resolve
= preserve
= !cselib_preserved_value_p (v
);
5936 if (loc
== stack_pointer_rtx
5937 && hard_frame_pointer_adjustment
!= -1
5939 cselib_set_value_sp_based (v
);
5941 nloc
= replace_expr_with_values (oloc
);
5945 if (GET_CODE (PATTERN (cui
->insn
)) == COND_EXEC
)
5947 cselib_val
*oval
= cselib_lookup (oloc
, GET_MODE (oloc
), 0, VOIDmode
);
5949 gcc_assert (oval
!= v
);
5950 gcc_assert (REG_P (oloc
) || MEM_P (oloc
));
5952 if (oval
&& !cselib_preserved_value_p (oval
))
5954 micro_operation moa
;
5956 preserve_value (oval
);
5958 moa
.type
= MO_VAL_USE
;
5959 moa
.u
.loc
= gen_rtx_CONCAT (mode
, oval
->val_rtx
, oloc
);
5960 VAL_NEEDS_RESOLUTION (moa
.u
.loc
) = 1;
5961 moa
.insn
= cui
->insn
;
5963 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5964 log_op_type (moa
.u
.loc
, cui
->bb
, cui
->insn
,
5965 moa
.type
, dump_file
);
5966 VTI (bb
)->mos
.safe_push (moa
);
5971 else if (resolve
&& GET_CODE (mo
.u
.loc
) == SET
)
5973 if (REG_P (SET_SRC (expr
)) || MEM_P (SET_SRC (expr
)))
5974 nloc
= replace_expr_with_values (SET_SRC (expr
));
5978 /* Avoid the mode mismatch between oexpr and expr. */
5979 if (!nloc
&& mode
!= mode2
)
5981 nloc
= SET_SRC (expr
);
5982 gcc_assert (oloc
== SET_DEST (expr
));
5985 if (nloc
&& nloc
!= SET_SRC (mo
.u
.loc
))
5986 oloc
= gen_rtx_SET (GET_MODE (mo
.u
.loc
), oloc
, nloc
);
5989 if (oloc
== SET_DEST (mo
.u
.loc
))
5990 /* No point in duplicating. */
5992 if (!REG_P (SET_SRC (mo
.u
.loc
)))
5998 if (GET_CODE (mo
.u
.loc
) == SET
5999 && oloc
== SET_DEST (mo
.u
.loc
))
6000 /* No point in duplicating. */
6006 loc
= gen_rtx_CONCAT (mode
, v
->val_rtx
, oloc
);
6008 if (mo
.u
.loc
!= oloc
)
6009 loc
= gen_rtx_CONCAT (GET_MODE (mo
.u
.loc
), loc
, mo
.u
.loc
);
6011 /* The loc of a MO_VAL_SET may have various forms:
6013 (concat val dst): dst now holds val
6015 (concat val (set dst src)): dst now holds val, copied from src
6017 (concat (concat val dstv) dst): dst now holds val; dstv is dst
6018 after replacing mems and non-top-level regs with values.
6020 (concat (concat val dstv) (set dst src)): dst now holds val,
6021 copied from src. dstv is a value-based representation of dst, if
6022 it differs from dst. If resolution is needed, src is a REG, and
6023 its mode is the same as that of val.
6025 (concat (concat val (set dstv srcv)) (set dst src)): src
6026 copied to dst, holding val. dstv and srcv are value-based
6027 representations of dst and src, respectively.
6031 if (GET_CODE (PATTERN (cui
->insn
)) != COND_EXEC
)
6032 reverse_op (v
->val_rtx
, expr
, cui
->insn
);
6037 VAL_HOLDS_TRACK_EXPR (loc
) = 1;
6040 VAL_NEEDS_RESOLUTION (loc
) = resolve
;
6043 if (mo
.type
== MO_CLOBBER
)
6044 VAL_EXPR_IS_CLOBBERED (loc
) = 1;
6045 if (mo
.type
== MO_COPY
)
6046 VAL_EXPR_IS_COPIED (loc
) = 1;
6048 mo
.type
= MO_VAL_SET
;
6051 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6052 log_op_type (mo
.u
.loc
, cui
->bb
, cui
->insn
, mo
.type
, dump_file
);
6053 VTI (bb
)->mos
.safe_push (mo
);
6056 /* Arguments to the call. */
6057 static rtx call_arguments
;
6059 /* Compute call_arguments. */
6062 prepare_call_arguments (basic_block bb
, rtx insn
)
6065 rtx prev
, cur
, next
;
6066 rtx this_arg
= NULL_RTX
;
6067 tree type
= NULL_TREE
, t
, fndecl
= NULL_TREE
;
6068 tree obj_type_ref
= NULL_TREE
;
6069 CUMULATIVE_ARGS args_so_far_v
;
6070 cumulative_args_t args_so_far
;
6072 memset (&args_so_far_v
, 0, sizeof (args_so_far_v
));
6073 args_so_far
= pack_cumulative_args (&args_so_far_v
);
6074 call
= get_call_rtx_from (insn
);
6077 if (GET_CODE (XEXP (XEXP (call
, 0), 0)) == SYMBOL_REF
)
6079 rtx symbol
= XEXP (XEXP (call
, 0), 0);
6080 if (SYMBOL_REF_DECL (symbol
))
6081 fndecl
= SYMBOL_REF_DECL (symbol
);
6083 if (fndecl
== NULL_TREE
)
6084 fndecl
= MEM_EXPR (XEXP (call
, 0));
6086 && TREE_CODE (TREE_TYPE (fndecl
)) != FUNCTION_TYPE
6087 && TREE_CODE (TREE_TYPE (fndecl
)) != METHOD_TYPE
)
6089 if (fndecl
&& TYPE_ARG_TYPES (TREE_TYPE (fndecl
)))
6090 type
= TREE_TYPE (fndecl
);
6091 if (fndecl
&& TREE_CODE (fndecl
) != FUNCTION_DECL
)
6093 if (TREE_CODE (fndecl
) == INDIRECT_REF
6094 && TREE_CODE (TREE_OPERAND (fndecl
, 0)) == OBJ_TYPE_REF
)
6095 obj_type_ref
= TREE_OPERAND (fndecl
, 0);
6100 for (t
= TYPE_ARG_TYPES (type
); t
&& t
!= void_list_node
;
6102 if (TREE_CODE (TREE_VALUE (t
)) == REFERENCE_TYPE
6103 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_VALUE (t
))))
6105 if ((t
== NULL
|| t
== void_list_node
) && obj_type_ref
== NULL_TREE
)
6109 int nargs ATTRIBUTE_UNUSED
= list_length (TYPE_ARG_TYPES (type
));
6110 link
= CALL_INSN_FUNCTION_USAGE (insn
);
6111 #ifndef PCC_STATIC_STRUCT_RETURN
6112 if (aggregate_value_p (TREE_TYPE (type
), type
)
6113 && targetm
.calls
.struct_value_rtx (type
, 0) == 0)
6115 tree struct_addr
= build_pointer_type (TREE_TYPE (type
));
6116 enum machine_mode mode
= TYPE_MODE (struct_addr
);
6118 INIT_CUMULATIVE_ARGS (args_so_far_v
, type
, NULL_RTX
, fndecl
,
6120 reg
= targetm
.calls
.function_arg (args_so_far
, mode
,
6122 targetm
.calls
.function_arg_advance (args_so_far
, mode
,
6124 if (reg
== NULL_RTX
)
6126 for (; link
; link
= XEXP (link
, 1))
6127 if (GET_CODE (XEXP (link
, 0)) == USE
6128 && MEM_P (XEXP (XEXP (link
, 0), 0)))
6130 link
= XEXP (link
, 1);
6137 INIT_CUMULATIVE_ARGS (args_so_far_v
, type
, NULL_RTX
, fndecl
,
6139 if (obj_type_ref
&& TYPE_ARG_TYPES (type
) != void_list_node
)
6141 enum machine_mode mode
;
6142 t
= TYPE_ARG_TYPES (type
);
6143 mode
= TYPE_MODE (TREE_VALUE (t
));
6144 this_arg
= targetm
.calls
.function_arg (args_so_far
, mode
,
6145 TREE_VALUE (t
), true);
6146 if (this_arg
&& !REG_P (this_arg
))
6147 this_arg
= NULL_RTX
;
6148 else if (this_arg
== NULL_RTX
)
6150 for (; link
; link
= XEXP (link
, 1))
6151 if (GET_CODE (XEXP (link
, 0)) == USE
6152 && MEM_P (XEXP (XEXP (link
, 0), 0)))
6154 this_arg
= XEXP (XEXP (link
, 0), 0);
6162 t
= type
? TYPE_ARG_TYPES (type
) : NULL_TREE
;
6164 for (link
= CALL_INSN_FUNCTION_USAGE (insn
); link
; link
= XEXP (link
, 1))
6165 if (GET_CODE (XEXP (link
, 0)) == USE
)
6167 rtx item
= NULL_RTX
;
6168 x
= XEXP (XEXP (link
, 0), 0);
6169 if (GET_MODE (link
) == VOIDmode
6170 || GET_MODE (link
) == BLKmode
6171 || (GET_MODE (link
) != GET_MODE (x
)
6172 && (GET_MODE_CLASS (GET_MODE (link
)) != MODE_INT
6173 || GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
)))
6174 /* Can't do anything for these, if the original type mode
6175 isn't known or can't be converted. */;
6178 cselib_val
*val
= cselib_lookup (x
, GET_MODE (x
), 0, VOIDmode
);
6179 if (val
&& cselib_preserved_value_p (val
))
6180 item
= val
->val_rtx
;
6181 else if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_INT
)
6183 enum machine_mode mode
= GET_MODE (x
);
6185 while ((mode
= GET_MODE_WIDER_MODE (mode
)) != VOIDmode
6186 && GET_MODE_BITSIZE (mode
) <= BITS_PER_WORD
)
6188 rtx reg
= simplify_subreg (mode
, x
, GET_MODE (x
), 0);
6190 if (reg
== NULL_RTX
|| !REG_P (reg
))
6192 val
= cselib_lookup (reg
, mode
, 0, VOIDmode
);
6193 if (val
&& cselib_preserved_value_p (val
))
6195 item
= val
->val_rtx
;
6206 if (!frame_pointer_needed
)
6208 struct adjust_mem_data amd
;
6209 amd
.mem_mode
= VOIDmode
;
6210 amd
.stack_adjust
= -VTI (bb
)->out
.stack_adjust
;
6211 amd
.side_effects
= NULL_RTX
;
6213 mem
= simplify_replace_fn_rtx (mem
, NULL_RTX
, adjust_mems
,
6215 gcc_assert (amd
.side_effects
== NULL_RTX
);
6217 val
= cselib_lookup (mem
, GET_MODE (mem
), 0, VOIDmode
);
6218 if (val
&& cselib_preserved_value_p (val
))
6219 item
= val
->val_rtx
;
6220 else if (GET_MODE_CLASS (GET_MODE (mem
)) != MODE_INT
)
6222 /* For non-integer stack argument see also if they weren't
6223 initialized by integers. */
6224 enum machine_mode imode
= int_mode_for_mode (GET_MODE (mem
));
6225 if (imode
!= GET_MODE (mem
) && imode
!= BLKmode
)
6227 val
= cselib_lookup (adjust_address_nv (mem
, imode
, 0),
6228 imode
, 0, VOIDmode
);
6229 if (val
&& cselib_preserved_value_p (val
))
6230 item
= lowpart_subreg (GET_MODE (x
), val
->val_rtx
,
6238 if (GET_MODE (item
) != GET_MODE (link
))
6239 item
= lowpart_subreg (GET_MODE (link
), item
, GET_MODE (item
));
6240 if (GET_MODE (x2
) != GET_MODE (link
))
6241 x2
= lowpart_subreg (GET_MODE (link
), x2
, GET_MODE (x2
));
6242 item
= gen_rtx_CONCAT (GET_MODE (link
), x2
, item
);
6244 = gen_rtx_EXPR_LIST (VOIDmode
, item
, call_arguments
);
6246 if (t
&& t
!= void_list_node
)
6248 tree argtype
= TREE_VALUE (t
);
6249 enum machine_mode mode
= TYPE_MODE (argtype
);
6251 if (pass_by_reference (&args_so_far_v
, mode
, argtype
, true))
6253 argtype
= build_pointer_type (argtype
);
6254 mode
= TYPE_MODE (argtype
);
6256 reg
= targetm
.calls
.function_arg (args_so_far
, mode
,
6258 if (TREE_CODE (argtype
) == REFERENCE_TYPE
6259 && INTEGRAL_TYPE_P (TREE_TYPE (argtype
))
6262 && GET_MODE (reg
) == mode
6263 && GET_MODE_CLASS (mode
) == MODE_INT
6265 && REGNO (x
) == REGNO (reg
)
6266 && GET_MODE (x
) == mode
6269 enum machine_mode indmode
6270 = TYPE_MODE (TREE_TYPE (argtype
));
6271 rtx mem
= gen_rtx_MEM (indmode
, x
);
6272 cselib_val
*val
= cselib_lookup (mem
, indmode
, 0, VOIDmode
);
6273 if (val
&& cselib_preserved_value_p (val
))
6275 item
= gen_rtx_CONCAT (indmode
, mem
, val
->val_rtx
);
6276 call_arguments
= gen_rtx_EXPR_LIST (VOIDmode
, item
,
6281 struct elt_loc_list
*l
;
6284 /* Try harder, when passing address of a constant
6285 pool integer it can be easily read back. */
6286 item
= XEXP (item
, 1);
6287 if (GET_CODE (item
) == SUBREG
)
6288 item
= SUBREG_REG (item
);
6289 gcc_assert (GET_CODE (item
) == VALUE
);
6290 val
= CSELIB_VAL_PTR (item
);
6291 for (l
= val
->locs
; l
; l
= l
->next
)
6292 if (GET_CODE (l
->loc
) == SYMBOL_REF
6293 && TREE_CONSTANT_POOL_ADDRESS_P (l
->loc
)
6294 && SYMBOL_REF_DECL (l
->loc
)
6295 && DECL_INITIAL (SYMBOL_REF_DECL (l
->loc
)))
6297 initial
= DECL_INITIAL (SYMBOL_REF_DECL (l
->loc
));
6298 if (tree_fits_shwi_p (initial
))
6300 item
= GEN_INT (tree_to_shwi (initial
));
6301 item
= gen_rtx_CONCAT (indmode
, mem
, item
);
6303 = gen_rtx_EXPR_LIST (VOIDmode
, item
,
6310 targetm
.calls
.function_arg_advance (args_so_far
, mode
,
6316 /* Add debug arguments. */
6318 && TREE_CODE (fndecl
) == FUNCTION_DECL
6319 && DECL_HAS_DEBUG_ARGS_P (fndecl
))
6321 vec
<tree
, va_gc
> **debug_args
= decl_debug_args_lookup (fndecl
);
6326 for (ix
= 0; vec_safe_iterate (*debug_args
, ix
, ¶m
); ix
+= 2)
6329 tree dtemp
= (**debug_args
)[ix
+ 1];
6330 enum machine_mode mode
= DECL_MODE (dtemp
);
6331 item
= gen_rtx_DEBUG_PARAMETER_REF (mode
, param
);
6332 item
= gen_rtx_CONCAT (mode
, item
, DECL_RTL_KNOWN_SET (dtemp
));
6333 call_arguments
= gen_rtx_EXPR_LIST (VOIDmode
, item
,
6339 /* Reverse call_arguments chain. */
6341 for (cur
= call_arguments
; cur
; cur
= next
)
6343 next
= XEXP (cur
, 1);
6344 XEXP (cur
, 1) = prev
;
6347 call_arguments
= prev
;
6349 x
= get_call_rtx_from (insn
);
6352 x
= XEXP (XEXP (x
, 0), 0);
6353 if (GET_CODE (x
) == SYMBOL_REF
)
6354 /* Don't record anything. */;
6355 else if (CONSTANT_P (x
))
6357 x
= gen_rtx_CONCAT (GET_MODE (x
) == VOIDmode
? Pmode
: GET_MODE (x
),
6360 = gen_rtx_EXPR_LIST (VOIDmode
, x
, call_arguments
);
6364 cselib_val
*val
= cselib_lookup (x
, GET_MODE (x
), 0, VOIDmode
);
6365 if (val
&& cselib_preserved_value_p (val
))
6367 x
= gen_rtx_CONCAT (GET_MODE (x
), pc_rtx
, val
->val_rtx
);
6369 = gen_rtx_EXPR_LIST (VOIDmode
, x
, call_arguments
);
6375 enum machine_mode mode
6376 = TYPE_MODE (TREE_TYPE (OBJ_TYPE_REF_EXPR (obj_type_ref
)));
6377 rtx clobbered
= gen_rtx_MEM (mode
, this_arg
);
6379 = tree_to_shwi (OBJ_TYPE_REF_TOKEN (obj_type_ref
));
6381 clobbered
= plus_constant (mode
, clobbered
,
6382 token
* GET_MODE_SIZE (mode
));
6383 clobbered
= gen_rtx_MEM (mode
, clobbered
);
6384 x
= gen_rtx_CONCAT (mode
, gen_rtx_CLOBBER (VOIDmode
, pc_rtx
), clobbered
);
6386 = gen_rtx_EXPR_LIST (VOIDmode
, x
, call_arguments
);
6390 /* Callback for cselib_record_sets_hook, that records as micro
6391 operations uses and stores in an insn after cselib_record_sets has
6392 analyzed the sets in an insn, but before it modifies the stored
6393 values in the internal tables, unless cselib_record_sets doesn't
6394 call it directly (perhaps because we're not doing cselib in the
6395 first place, in which case sets and n_sets will be 0). */
6398 add_with_sets (rtx insn
, struct cselib_set
*sets
, int n_sets
)
6400 basic_block bb
= BLOCK_FOR_INSN (insn
);
6402 struct count_use_info cui
;
6403 micro_operation
*mos
;
6405 cselib_hook_called
= true;
6410 cui
.n_sets
= n_sets
;
6412 n1
= VTI (bb
)->mos
.length ();
6413 cui
.store_p
= false;
6414 note_uses (&PATTERN (insn
), add_uses_1
, &cui
);
6415 n2
= VTI (bb
)->mos
.length () - 1;
6416 mos
= VTI (bb
)->mos
.address ();
6418 /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
6422 while (n1
< n2
&& mos
[n1
].type
== MO_USE
)
6424 while (n1
< n2
&& mos
[n2
].type
!= MO_USE
)
6436 n2
= VTI (bb
)->mos
.length () - 1;
6439 while (n1
< n2
&& mos
[n1
].type
!= MO_VAL_LOC
)
6441 while (n1
< n2
&& mos
[n2
].type
== MO_VAL_LOC
)
6459 mo
.u
.loc
= call_arguments
;
6460 call_arguments
= NULL_RTX
;
6462 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6463 log_op_type (PATTERN (insn
), bb
, insn
, mo
.type
, dump_file
);
6464 VTI (bb
)->mos
.safe_push (mo
);
6467 n1
= VTI (bb
)->mos
.length ();
6468 /* This will record NEXT_INSN (insn), such that we can
6469 insert notes before it without worrying about any
6470 notes that MO_USEs might emit after the insn. */
6472 note_stores (PATTERN (insn
), add_stores
, &cui
);
6473 n2
= VTI (bb
)->mos
.length () - 1;
6474 mos
= VTI (bb
)->mos
.address ();
6476 /* Order the MO_VAL_USEs first (note_stores does nothing
6477 on DEBUG_INSNs, so there are no MO_VAL_LOCs from this
6478 insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET. */
6481 while (n1
< n2
&& mos
[n1
].type
== MO_VAL_USE
)
6483 while (n1
< n2
&& mos
[n2
].type
!= MO_VAL_USE
)
6495 n2
= VTI (bb
)->mos
.length () - 1;
6498 while (n1
< n2
&& mos
[n1
].type
== MO_CLOBBER
)
6500 while (n1
< n2
&& mos
[n2
].type
!= MO_CLOBBER
)
6513 static enum var_init_status
6514 find_src_status (dataflow_set
*in
, rtx src
)
6516 tree decl
= NULL_TREE
;
6517 enum var_init_status status
= VAR_INIT_STATUS_UNINITIALIZED
;
6519 if (! flag_var_tracking_uninit
)
6520 status
= VAR_INIT_STATUS_INITIALIZED
;
6522 if (src
&& REG_P (src
))
6523 decl
= var_debug_decl (REG_EXPR (src
));
6524 else if (src
&& MEM_P (src
))
6525 decl
= var_debug_decl (MEM_EXPR (src
));
6528 status
= get_init_value (in
, src
, dv_from_decl (decl
));
6533 /* SRC is the source of an assignment. Use SET to try to find what
6534 was ultimately assigned to SRC. Return that value if known,
6535 otherwise return SRC itself. */
6538 find_src_set_src (dataflow_set
*set
, rtx src
)
6540 tree decl
= NULL_TREE
; /* The variable being copied around. */
6541 rtx set_src
= NULL_RTX
; /* The value for "decl" stored in "src". */
6543 location_chain nextp
;
6547 if (src
&& REG_P (src
))
6548 decl
= var_debug_decl (REG_EXPR (src
));
6549 else if (src
&& MEM_P (src
))
6550 decl
= var_debug_decl (MEM_EXPR (src
));
6554 decl_or_value dv
= dv_from_decl (decl
);
6556 var
= shared_hash_find (set
->vars
, dv
);
6560 for (i
= 0; i
< var
->n_var_parts
&& !found
; i
++)
6561 for (nextp
= var
->var_part
[i
].loc_chain
; nextp
&& !found
;
6562 nextp
= nextp
->next
)
6563 if (rtx_equal_p (nextp
->loc
, src
))
6565 set_src
= nextp
->set_src
;
6575 /* Compute the changes of variable locations in the basic block BB. */
6578 compute_bb_dataflow (basic_block bb
)
6581 micro_operation
*mo
;
6583 dataflow_set old_out
;
6584 dataflow_set
*in
= &VTI (bb
)->in
;
6585 dataflow_set
*out
= &VTI (bb
)->out
;
6587 dataflow_set_init (&old_out
);
6588 dataflow_set_copy (&old_out
, out
);
6589 dataflow_set_copy (out
, in
);
6591 if (MAY_HAVE_DEBUG_INSNS
)
6592 local_get_addr_cache
= pointer_map_create ();
6594 FOR_EACH_VEC_ELT (VTI (bb
)->mos
, i
, mo
)
6596 rtx insn
= mo
->insn
;
6601 dataflow_set_clear_at_call (out
);
6606 rtx loc
= mo
->u
.loc
;
6609 var_reg_set (out
, loc
, VAR_INIT_STATUS_UNINITIALIZED
, NULL
);
6610 else if (MEM_P (loc
))
6611 var_mem_set (out
, loc
, VAR_INIT_STATUS_UNINITIALIZED
, NULL
);
6617 rtx loc
= mo
->u
.loc
;
6621 if (GET_CODE (loc
) == CONCAT
)
6623 val
= XEXP (loc
, 0);
6624 vloc
= XEXP (loc
, 1);
6632 var
= PAT_VAR_LOCATION_DECL (vloc
);
6634 clobber_variable_part (out
, NULL_RTX
,
6635 dv_from_decl (var
), 0, NULL_RTX
);
6638 if (VAL_NEEDS_RESOLUTION (loc
))
6639 val_resolve (out
, val
, PAT_VAR_LOCATION_LOC (vloc
), insn
);
6640 set_variable_part (out
, val
, dv_from_decl (var
), 0,
6641 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
,
6644 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc
)))
6645 set_variable_part (out
, PAT_VAR_LOCATION_LOC (vloc
),
6646 dv_from_decl (var
), 0,
6647 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
,
6654 rtx loc
= mo
->u
.loc
;
6655 rtx val
, vloc
, uloc
;
6657 vloc
= uloc
= XEXP (loc
, 1);
6658 val
= XEXP (loc
, 0);
6660 if (GET_CODE (val
) == CONCAT
)
6662 uloc
= XEXP (val
, 1);
6663 val
= XEXP (val
, 0);
6666 if (VAL_NEEDS_RESOLUTION (loc
))
6667 val_resolve (out
, val
, vloc
, insn
);
6669 val_store (out
, val
, uloc
, insn
, false);
6671 if (VAL_HOLDS_TRACK_EXPR (loc
))
6673 if (GET_CODE (uloc
) == REG
)
6674 var_reg_set (out
, uloc
, VAR_INIT_STATUS_UNINITIALIZED
,
6676 else if (GET_CODE (uloc
) == MEM
)
6677 var_mem_set (out
, uloc
, VAR_INIT_STATUS_UNINITIALIZED
,
6685 rtx loc
= mo
->u
.loc
;
6686 rtx val
, vloc
, uloc
;
6690 uloc
= XEXP (vloc
, 1);
6691 val
= XEXP (vloc
, 0);
6694 if (GET_CODE (uloc
) == SET
)
6696 dstv
= SET_DEST (uloc
);
6697 srcv
= SET_SRC (uloc
);
6705 if (GET_CODE (val
) == CONCAT
)
6707 dstv
= vloc
= XEXP (val
, 1);
6708 val
= XEXP (val
, 0);
6711 if (GET_CODE (vloc
) == SET
)
6713 srcv
= SET_SRC (vloc
);
6715 gcc_assert (val
!= srcv
);
6716 gcc_assert (vloc
== uloc
|| VAL_NEEDS_RESOLUTION (loc
));
6718 dstv
= vloc
= SET_DEST (vloc
);
6720 if (VAL_NEEDS_RESOLUTION (loc
))
6721 val_resolve (out
, val
, srcv
, insn
);
6723 else if (VAL_NEEDS_RESOLUTION (loc
))
6725 gcc_assert (GET_CODE (uloc
) == SET
6726 && GET_CODE (SET_SRC (uloc
)) == REG
);
6727 val_resolve (out
, val
, SET_SRC (uloc
), insn
);
6730 if (VAL_HOLDS_TRACK_EXPR (loc
))
6732 if (VAL_EXPR_IS_CLOBBERED (loc
))
6735 var_reg_delete (out
, uloc
, true);
6736 else if (MEM_P (uloc
))
6738 gcc_assert (MEM_P (dstv
));
6739 gcc_assert (MEM_ATTRS (dstv
) == MEM_ATTRS (uloc
));
6740 var_mem_delete (out
, dstv
, true);
6745 bool copied_p
= VAL_EXPR_IS_COPIED (loc
);
6746 rtx src
= NULL
, dst
= uloc
;
6747 enum var_init_status status
= VAR_INIT_STATUS_INITIALIZED
;
6749 if (GET_CODE (uloc
) == SET
)
6751 src
= SET_SRC (uloc
);
6752 dst
= SET_DEST (uloc
);
6757 if (flag_var_tracking_uninit
)
6759 status
= find_src_status (in
, src
);
6761 if (status
== VAR_INIT_STATUS_UNKNOWN
)
6762 status
= find_src_status (out
, src
);
6765 src
= find_src_set_src (in
, src
);
6769 var_reg_delete_and_set (out
, dst
, !copied_p
,
6771 else if (MEM_P (dst
))
6773 gcc_assert (MEM_P (dstv
));
6774 gcc_assert (MEM_ATTRS (dstv
) == MEM_ATTRS (dst
));
6775 var_mem_delete_and_set (out
, dstv
, !copied_p
,
6780 else if (REG_P (uloc
))
6781 var_regno_delete (out
, REGNO (uloc
));
6782 else if (MEM_P (uloc
))
6784 gcc_checking_assert (GET_CODE (vloc
) == MEM
);
6785 gcc_checking_assert (dstv
== vloc
);
6787 clobber_overlapping_mems (out
, vloc
);
6790 val_store (out
, val
, dstv
, insn
, true);
6796 rtx loc
= mo
->u
.loc
;
6799 if (GET_CODE (loc
) == SET
)
6801 set_src
= SET_SRC (loc
);
6802 loc
= SET_DEST (loc
);
6806 var_reg_delete_and_set (out
, loc
, true, VAR_INIT_STATUS_INITIALIZED
,
6808 else if (MEM_P (loc
))
6809 var_mem_delete_and_set (out
, loc
, true, VAR_INIT_STATUS_INITIALIZED
,
6816 rtx loc
= mo
->u
.loc
;
6817 enum var_init_status src_status
;
6820 if (GET_CODE (loc
) == SET
)
6822 set_src
= SET_SRC (loc
);
6823 loc
= SET_DEST (loc
);
6826 if (! flag_var_tracking_uninit
)
6827 src_status
= VAR_INIT_STATUS_INITIALIZED
;
6830 src_status
= find_src_status (in
, set_src
);
6832 if (src_status
== VAR_INIT_STATUS_UNKNOWN
)
6833 src_status
= find_src_status (out
, set_src
);
6836 set_src
= find_src_set_src (in
, set_src
);
6839 var_reg_delete_and_set (out
, loc
, false, src_status
, set_src
);
6840 else if (MEM_P (loc
))
6841 var_mem_delete_and_set (out
, loc
, false, src_status
, set_src
);
6847 rtx loc
= mo
->u
.loc
;
6850 var_reg_delete (out
, loc
, false);
6851 else if (MEM_P (loc
))
6852 var_mem_delete (out
, loc
, false);
6858 rtx loc
= mo
->u
.loc
;
6861 var_reg_delete (out
, loc
, true);
6862 else if (MEM_P (loc
))
6863 var_mem_delete (out
, loc
, true);
6868 out
->stack_adjust
+= mo
->u
.adjust
;
6873 if (MAY_HAVE_DEBUG_INSNS
)
6875 pointer_map_destroy (local_get_addr_cache
);
6876 local_get_addr_cache
= NULL
;
6878 dataflow_set_equiv_regs (out
);
6879 shared_hash_htab (out
->vars
)
6880 .traverse
<dataflow_set
*, canonicalize_values_mark
> (out
);
6881 shared_hash_htab (out
->vars
)
6882 .traverse
<dataflow_set
*, canonicalize_values_star
> (out
);
6884 shared_hash_htab (out
->vars
)
6885 .traverse
<dataflow_set
*, canonicalize_loc_order_check
> (out
);
6888 changed
= dataflow_set_different (&old_out
, out
);
6889 dataflow_set_destroy (&old_out
);
6893 /* Find the locations of variables in the whole function. */
6896 vt_find_locations (void)
6898 fibheap_t worklist
, pending
, fibheap_swap
;
6899 sbitmap visited
, in_worklist
, in_pending
, sbitmap_swap
;
6906 int htabmax
= PARAM_VALUE (PARAM_MAX_VARTRACK_SIZE
);
6907 bool success
= true;
6909 timevar_push (TV_VAR_TRACKING_DATAFLOW
);
6910 /* Compute reverse completion order of depth first search of the CFG
6911 so that the data-flow runs faster. */
6912 rc_order
= XNEWVEC (int, n_basic_blocks_for_fn (cfun
) - NUM_FIXED_BLOCKS
);
6913 bb_order
= XNEWVEC (int, last_basic_block
);
6914 pre_and_rev_post_order_compute (NULL
, rc_order
, false);
6915 for (i
= 0; i
< n_basic_blocks_for_fn (cfun
) - NUM_FIXED_BLOCKS
; i
++)
6916 bb_order
[rc_order
[i
]] = i
;
6919 worklist
= fibheap_new ();
6920 pending
= fibheap_new ();
6921 visited
= sbitmap_alloc (last_basic_block
);
6922 in_worklist
= sbitmap_alloc (last_basic_block
);
6923 in_pending
= sbitmap_alloc (last_basic_block
);
6924 bitmap_clear (in_worklist
);
6927 fibheap_insert (pending
, bb_order
[bb
->index
], bb
);
6928 bitmap_ones (in_pending
);
6930 while (success
&& !fibheap_empty (pending
))
6932 fibheap_swap
= pending
;
6934 worklist
= fibheap_swap
;
6935 sbitmap_swap
= in_pending
;
6936 in_pending
= in_worklist
;
6937 in_worklist
= sbitmap_swap
;
6939 bitmap_clear (visited
);
6941 while (!fibheap_empty (worklist
))
6943 bb
= (basic_block
) fibheap_extract_min (worklist
);
6944 bitmap_clear_bit (in_worklist
, bb
->index
);
6945 gcc_assert (!bitmap_bit_p (visited
, bb
->index
));
6946 if (!bitmap_bit_p (visited
, bb
->index
))
6950 int oldinsz
, oldoutsz
;
6952 bitmap_set_bit (visited
, bb
->index
);
6954 if (VTI (bb
)->in
.vars
)
6957 -= shared_hash_htab (VTI (bb
)->in
.vars
).size ()
6958 + shared_hash_htab (VTI (bb
)->out
.vars
).size ();
6959 oldinsz
= shared_hash_htab (VTI (bb
)->in
.vars
).elements ();
6960 oldoutsz
= shared_hash_htab (VTI (bb
)->out
.vars
).elements ();
6963 oldinsz
= oldoutsz
= 0;
6965 if (MAY_HAVE_DEBUG_INSNS
)
6967 dataflow_set
*in
= &VTI (bb
)->in
, *first_out
= NULL
;
6968 bool first
= true, adjust
= false;
6970 /* Calculate the IN set as the intersection of
6971 predecessor OUT sets. */
6973 dataflow_set_clear (in
);
6974 dst_can_be_shared
= true;
6976 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
6977 if (!VTI (e
->src
)->flooded
)
6978 gcc_assert (bb_order
[bb
->index
]
6979 <= bb_order
[e
->src
->index
]);
6982 dataflow_set_copy (in
, &VTI (e
->src
)->out
);
6983 first_out
= &VTI (e
->src
)->out
;
6988 dataflow_set_merge (in
, &VTI (e
->src
)->out
);
6994 dataflow_post_merge_adjust (in
, &VTI (bb
)->permp
);
6996 /* Merge and merge_adjust should keep entries in
6998 shared_hash_htab (in
->vars
)
6999 .traverse
<dataflow_set
*,
7000 canonicalize_loc_order_check
> (in
);
7002 if (dst_can_be_shared
)
7004 shared_hash_destroy (in
->vars
);
7005 in
->vars
= shared_hash_copy (first_out
->vars
);
7009 VTI (bb
)->flooded
= true;
7013 /* Calculate the IN set as union of predecessor OUT sets. */
7014 dataflow_set_clear (&VTI (bb
)->in
);
7015 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
7016 dataflow_set_union (&VTI (bb
)->in
, &VTI (e
->src
)->out
);
7019 changed
= compute_bb_dataflow (bb
);
7020 htabsz
+= shared_hash_htab (VTI (bb
)->in
.vars
).size ()
7021 + shared_hash_htab (VTI (bb
)->out
.vars
).size ();
7023 if (htabmax
&& htabsz
> htabmax
)
7025 if (MAY_HAVE_DEBUG_INSNS
)
7026 inform (DECL_SOURCE_LOCATION (cfun
->decl
),
7027 "variable tracking size limit exceeded with "
7028 "-fvar-tracking-assignments, retrying without");
7030 inform (DECL_SOURCE_LOCATION (cfun
->decl
),
7031 "variable tracking size limit exceeded");
7038 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7040 if (e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
7043 if (bitmap_bit_p (visited
, e
->dest
->index
))
7045 if (!bitmap_bit_p (in_pending
, e
->dest
->index
))
7047 /* Send E->DEST to next round. */
7048 bitmap_set_bit (in_pending
, e
->dest
->index
);
7049 fibheap_insert (pending
,
7050 bb_order
[e
->dest
->index
],
7054 else if (!bitmap_bit_p (in_worklist
, e
->dest
->index
))
7056 /* Add E->DEST to current round. */
7057 bitmap_set_bit (in_worklist
, e
->dest
->index
);
7058 fibheap_insert (worklist
, bb_order
[e
->dest
->index
],
7066 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
7068 (int)shared_hash_htab (VTI (bb
)->in
.vars
).size (),
7070 (int)shared_hash_htab (VTI (bb
)->out
.vars
).size (),
7072 (int)worklist
->nodes
, (int)pending
->nodes
, htabsz
);
7074 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7076 fprintf (dump_file
, "BB %i IN:\n", bb
->index
);
7077 dump_dataflow_set (&VTI (bb
)->in
);
7078 fprintf (dump_file
, "BB %i OUT:\n", bb
->index
);
7079 dump_dataflow_set (&VTI (bb
)->out
);
7085 if (success
&& MAY_HAVE_DEBUG_INSNS
)
7087 gcc_assert (VTI (bb
)->flooded
);
7090 fibheap_delete (worklist
);
7091 fibheap_delete (pending
);
7092 sbitmap_free (visited
);
7093 sbitmap_free (in_worklist
);
7094 sbitmap_free (in_pending
);
7096 timevar_pop (TV_VAR_TRACKING_DATAFLOW
);
7100 /* Print the content of the LIST to dump file. */
7103 dump_attrs_list (attrs list
)
7105 for (; list
; list
= list
->next
)
7107 if (dv_is_decl_p (list
->dv
))
7108 print_mem_expr (dump_file
, dv_as_decl (list
->dv
));
7110 print_rtl_single (dump_file
, dv_as_value (list
->dv
));
7111 fprintf (dump_file
, "+" HOST_WIDE_INT_PRINT_DEC
, list
->offset
);
7113 fprintf (dump_file
, "\n");
7116 /* Print the information about variable *SLOT to dump file. */
7119 dump_var_tracking_slot (variable_def
**slot
, void *data ATTRIBUTE_UNUSED
)
7121 variable var
= *slot
;
7125 /* Continue traversing the hash table. */
7129 /* Print the information about variable VAR to dump file. */
7132 dump_var (variable var
)
7135 location_chain node
;
7137 if (dv_is_decl_p (var
->dv
))
7139 const_tree decl
= dv_as_decl (var
->dv
);
7141 if (DECL_NAME (decl
))
7143 fprintf (dump_file
, " name: %s",
7144 IDENTIFIER_POINTER (DECL_NAME (decl
)));
7145 if (dump_flags
& TDF_UID
)
7146 fprintf (dump_file
, "D.%u", DECL_UID (decl
));
7148 else if (TREE_CODE (decl
) == DEBUG_EXPR_DECL
)
7149 fprintf (dump_file
, " name: D#%u", DEBUG_TEMP_UID (decl
));
7151 fprintf (dump_file
, " name: D.%u", DECL_UID (decl
));
7152 fprintf (dump_file
, "\n");
7156 fputc (' ', dump_file
);
7157 print_rtl_single (dump_file
, dv_as_value (var
->dv
));
7160 for (i
= 0; i
< var
->n_var_parts
; i
++)
7162 fprintf (dump_file
, " offset %ld\n",
7163 (long)(var
->onepart
? 0 : VAR_PART_OFFSET (var
, i
)));
7164 for (node
= var
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
7166 fprintf (dump_file
, " ");
7167 if (node
->init
== VAR_INIT_STATUS_UNINITIALIZED
)
7168 fprintf (dump_file
, "[uninit]");
7169 print_rtl_single (dump_file
, node
->loc
);
7174 /* Print the information about variables from hash table VARS to dump file. */
7177 dump_vars (variable_table_type vars
)
7179 if (vars
.elements () > 0)
7181 fprintf (dump_file
, "Variables:\n");
7182 vars
.traverse
<void *, dump_var_tracking_slot
> (NULL
);
7186 /* Print the dataflow set SET to dump file. */
7189 dump_dataflow_set (dataflow_set
*set
)
7193 fprintf (dump_file
, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC
"\n",
7195 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
7199 fprintf (dump_file
, "Reg %d:", i
);
7200 dump_attrs_list (set
->regs
[i
]);
7203 dump_vars (shared_hash_htab (set
->vars
));
7204 fprintf (dump_file
, "\n");
7207 /* Print the IN and OUT sets for each basic block to dump file. */
7210 dump_dataflow_sets (void)
7216 fprintf (dump_file
, "\nBasic block %d:\n", bb
->index
);
7217 fprintf (dump_file
, "IN:\n");
7218 dump_dataflow_set (&VTI (bb
)->in
);
7219 fprintf (dump_file
, "OUT:\n");
7220 dump_dataflow_set (&VTI (bb
)->out
);
7224 /* Return the variable for DV in dropped_values, inserting one if
7225 requested with INSERT. */
7227 static inline variable
7228 variable_from_dropped (decl_or_value dv
, enum insert_option insert
)
7230 variable_def
**slot
;
7232 onepart_enum_t onepart
;
7234 slot
= dropped_values
.find_slot_with_hash (dv
, dv_htab_hash (dv
), insert
);
7242 gcc_checking_assert (insert
== INSERT
);
7244 onepart
= dv_onepart_p (dv
);
7246 gcc_checking_assert (onepart
== ONEPART_VALUE
|| onepart
== ONEPART_DEXPR
);
7248 empty_var
= (variable
) pool_alloc (onepart_pool (onepart
));
7250 empty_var
->refcount
= 1;
7251 empty_var
->n_var_parts
= 0;
7252 empty_var
->onepart
= onepart
;
7253 empty_var
->in_changed_variables
= false;
7254 empty_var
->var_part
[0].loc_chain
= NULL
;
7255 empty_var
->var_part
[0].cur_loc
= NULL
;
7256 VAR_LOC_1PAUX (empty_var
) = NULL
;
7257 set_dv_changed (dv
, true);
7264 /* Recover the one-part aux from dropped_values. */
7266 static struct onepart_aux
*
7267 recover_dropped_1paux (variable var
)
7271 gcc_checking_assert (var
->onepart
);
7273 if (VAR_LOC_1PAUX (var
))
7274 return VAR_LOC_1PAUX (var
);
7276 if (var
->onepart
== ONEPART_VDECL
)
7279 dvar
= variable_from_dropped (var
->dv
, NO_INSERT
);
7284 VAR_LOC_1PAUX (var
) = VAR_LOC_1PAUX (dvar
);
7285 VAR_LOC_1PAUX (dvar
) = NULL
;
7287 return VAR_LOC_1PAUX (var
);
7290 /* Add variable VAR to the hash table of changed variables and
7291 if it has no locations delete it from SET's hash table. */
7294 variable_was_changed (variable var
, dataflow_set
*set
)
7296 hashval_t hash
= dv_htab_hash (var
->dv
);
7300 variable_def
**slot
;
7302 /* Remember this decl or VALUE has been added to changed_variables. */
7303 set_dv_changed (var
->dv
, true);
7305 slot
= changed_variables
.find_slot_with_hash (var
->dv
, hash
, INSERT
);
7309 variable old_var
= *slot
;
7310 gcc_assert (old_var
->in_changed_variables
);
7311 old_var
->in_changed_variables
= false;
7312 if (var
!= old_var
&& var
->onepart
)
7314 /* Restore the auxiliary info from an empty variable
7315 previously created for changed_variables, so it is
7317 gcc_checking_assert (!VAR_LOC_1PAUX (var
));
7318 VAR_LOC_1PAUX (var
) = VAR_LOC_1PAUX (old_var
);
7319 VAR_LOC_1PAUX (old_var
) = NULL
;
7321 variable_htab_free (*slot
);
7324 if (set
&& var
->n_var_parts
== 0)
7326 onepart_enum_t onepart
= var
->onepart
;
7327 variable empty_var
= NULL
;
7328 variable_def
**dslot
= NULL
;
7330 if (onepart
== ONEPART_VALUE
|| onepart
== ONEPART_DEXPR
)
7332 dslot
= dropped_values
.find_slot_with_hash (var
->dv
,
7333 dv_htab_hash (var
->dv
),
7339 gcc_checking_assert (!empty_var
->in_changed_variables
);
7340 if (!VAR_LOC_1PAUX (var
))
7342 VAR_LOC_1PAUX (var
) = VAR_LOC_1PAUX (empty_var
);
7343 VAR_LOC_1PAUX (empty_var
) = NULL
;
7346 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var
));
7352 empty_var
= (variable
) pool_alloc (onepart_pool (onepart
));
7353 empty_var
->dv
= var
->dv
;
7354 empty_var
->refcount
= 1;
7355 empty_var
->n_var_parts
= 0;
7356 empty_var
->onepart
= onepart
;
7359 empty_var
->refcount
++;
7364 empty_var
->refcount
++;
7365 empty_var
->in_changed_variables
= true;
7369 empty_var
->var_part
[0].loc_chain
= NULL
;
7370 empty_var
->var_part
[0].cur_loc
= NULL
;
7371 VAR_LOC_1PAUX (empty_var
) = VAR_LOC_1PAUX (var
);
7372 VAR_LOC_1PAUX (var
) = NULL
;
7378 if (var
->onepart
&& !VAR_LOC_1PAUX (var
))
7379 recover_dropped_1paux (var
);
7381 var
->in_changed_variables
= true;
7388 if (var
->n_var_parts
== 0)
7390 variable_def
**slot
;
7393 slot
= shared_hash_find_slot_noinsert (set
->vars
, var
->dv
);
7396 if (shared_hash_shared (set
->vars
))
7397 slot
= shared_hash_find_slot_unshare (&set
->vars
, var
->dv
,
7399 shared_hash_htab (set
->vars
).clear_slot (slot
);
7405 /* Look for the index in VAR->var_part corresponding to OFFSET.
7406 Return -1 if not found. If INSERTION_POINT is non-NULL, the
7407 referenced int will be set to the index that the part has or should
7408 have, if it should be inserted. */
7411 find_variable_location_part (variable var
, HOST_WIDE_INT offset
,
7412 int *insertion_point
)
7421 if (insertion_point
)
7422 *insertion_point
= 0;
7424 return var
->n_var_parts
- 1;
7427 /* Find the location part. */
7429 high
= var
->n_var_parts
;
7432 pos
= (low
+ high
) / 2;
7433 if (VAR_PART_OFFSET (var
, pos
) < offset
)
7440 if (insertion_point
)
7441 *insertion_point
= pos
;
7443 if (pos
< var
->n_var_parts
&& VAR_PART_OFFSET (var
, pos
) == offset
)
7449 static variable_def
**
7450 set_slot_part (dataflow_set
*set
, rtx loc
, variable_def
**slot
,
7451 decl_or_value dv
, HOST_WIDE_INT offset
,
7452 enum var_init_status initialized
, rtx set_src
)
7455 location_chain node
, next
;
7456 location_chain
*nextp
;
7458 onepart_enum_t onepart
;
7463 onepart
= var
->onepart
;
7465 onepart
= dv_onepart_p (dv
);
7467 gcc_checking_assert (offset
== 0 || !onepart
);
7468 gcc_checking_assert (loc
!= dv_as_opaque (dv
));
7470 if (! flag_var_tracking_uninit
)
7471 initialized
= VAR_INIT_STATUS_INITIALIZED
;
7475 /* Create new variable information. */
7476 var
= (variable
) pool_alloc (onepart_pool (onepart
));
7479 var
->n_var_parts
= 1;
7480 var
->onepart
= onepart
;
7481 var
->in_changed_variables
= false;
7483 VAR_LOC_1PAUX (var
) = NULL
;
7485 VAR_PART_OFFSET (var
, 0) = offset
;
7486 var
->var_part
[0].loc_chain
= NULL
;
7487 var
->var_part
[0].cur_loc
= NULL
;
7490 nextp
= &var
->var_part
[0].loc_chain
;
7496 gcc_assert (dv_as_opaque (var
->dv
) == dv_as_opaque (dv
));
7500 if (GET_CODE (loc
) == VALUE
)
7502 for (nextp
= &var
->var_part
[0].loc_chain
; (node
= *nextp
);
7503 nextp
= &node
->next
)
7504 if (GET_CODE (node
->loc
) == VALUE
)
7506 if (node
->loc
== loc
)
7511 if (canon_value_cmp (node
->loc
, loc
))
7519 else if (REG_P (node
->loc
) || MEM_P (node
->loc
))
7527 else if (REG_P (loc
))
7529 for (nextp
= &var
->var_part
[0].loc_chain
; (node
= *nextp
);
7530 nextp
= &node
->next
)
7531 if (REG_P (node
->loc
))
7533 if (REGNO (node
->loc
) < REGNO (loc
))
7537 if (REGNO (node
->loc
) == REGNO (loc
))
7550 else if (MEM_P (loc
))
7552 for (nextp
= &var
->var_part
[0].loc_chain
; (node
= *nextp
);
7553 nextp
= &node
->next
)
7554 if (REG_P (node
->loc
))
7556 else if (MEM_P (node
->loc
))
7558 if ((r
= loc_cmp (XEXP (node
->loc
, 0), XEXP (loc
, 0))) >= 0)
7570 for (nextp
= &var
->var_part
[0].loc_chain
; (node
= *nextp
);
7571 nextp
= &node
->next
)
7572 if ((r
= loc_cmp (node
->loc
, loc
)) >= 0)
7580 if (shared_var_p (var
, set
->vars
))
7582 slot
= unshare_variable (set
, slot
, var
, initialized
);
7584 for (nextp
= &var
->var_part
[0].loc_chain
; c
;
7585 nextp
= &(*nextp
)->next
)
7587 gcc_assert ((!node
&& !*nextp
) || node
->loc
== (*nextp
)->loc
);
7594 gcc_assert (dv_as_decl (var
->dv
) == dv_as_decl (dv
));
7596 pos
= find_variable_location_part (var
, offset
, &inspos
);
7600 node
= var
->var_part
[pos
].loc_chain
;
7603 && ((REG_P (node
->loc
) && REG_P (loc
)
7604 && REGNO (node
->loc
) == REGNO (loc
))
7605 || rtx_equal_p (node
->loc
, loc
)))
7607 /* LOC is in the beginning of the chain so we have nothing
7609 if (node
->init
< initialized
)
7610 node
->init
= initialized
;
7611 if (set_src
!= NULL
)
7612 node
->set_src
= set_src
;
7618 /* We have to make a copy of a shared variable. */
7619 if (shared_var_p (var
, set
->vars
))
7621 slot
= unshare_variable (set
, slot
, var
, initialized
);
7628 /* We have not found the location part, new one will be created. */
7630 /* We have to make a copy of the shared variable. */
7631 if (shared_var_p (var
, set
->vars
))
7633 slot
= unshare_variable (set
, slot
, var
, initialized
);
7637 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
7638 thus there are at most MAX_VAR_PARTS different offsets. */
7639 gcc_assert (var
->n_var_parts
< MAX_VAR_PARTS
7640 && (!var
->n_var_parts
|| !onepart
));
7642 /* We have to move the elements of array starting at index
7643 inspos to the next position. */
7644 for (pos
= var
->n_var_parts
; pos
> inspos
; pos
--)
7645 var
->var_part
[pos
] = var
->var_part
[pos
- 1];
7648 gcc_checking_assert (!onepart
);
7649 VAR_PART_OFFSET (var
, pos
) = offset
;
7650 var
->var_part
[pos
].loc_chain
= NULL
;
7651 var
->var_part
[pos
].cur_loc
= NULL
;
7654 /* Delete the location from the list. */
7655 nextp
= &var
->var_part
[pos
].loc_chain
;
7656 for (node
= var
->var_part
[pos
].loc_chain
; node
; node
= next
)
7659 if ((REG_P (node
->loc
) && REG_P (loc
)
7660 && REGNO (node
->loc
) == REGNO (loc
))
7661 || rtx_equal_p (node
->loc
, loc
))
7663 /* Save these values, to assign to the new node, before
7664 deleting this one. */
7665 if (node
->init
> initialized
)
7666 initialized
= node
->init
;
7667 if (node
->set_src
!= NULL
&& set_src
== NULL
)
7668 set_src
= node
->set_src
;
7669 if (var
->var_part
[pos
].cur_loc
== node
->loc
)
7670 var
->var_part
[pos
].cur_loc
= NULL
;
7671 pool_free (loc_chain_pool
, node
);
7676 nextp
= &node
->next
;
7679 nextp
= &var
->var_part
[pos
].loc_chain
;
7682 /* Add the location to the beginning. */
7683 node
= (location_chain
) pool_alloc (loc_chain_pool
);
7685 node
->init
= initialized
;
7686 node
->set_src
= set_src
;
7687 node
->next
= *nextp
;
7690 /* If no location was emitted do so. */
7691 if (var
->var_part
[pos
].cur_loc
== NULL
)
7692 variable_was_changed (var
, set
);
7697 /* Set the part of variable's location in the dataflow set SET. The
7698 variable part is specified by variable's declaration in DV and
7699 offset OFFSET and the part's location by LOC. IOPT should be
7700 NO_INSERT if the variable is known to be in SET already and the
7701 variable hash table must not be resized, and INSERT otherwise. */
7704 set_variable_part (dataflow_set
*set
, rtx loc
,
7705 decl_or_value dv
, HOST_WIDE_INT offset
,
7706 enum var_init_status initialized
, rtx set_src
,
7707 enum insert_option iopt
)
7709 variable_def
**slot
;
7711 if (iopt
== NO_INSERT
)
7712 slot
= shared_hash_find_slot_noinsert (set
->vars
, dv
);
7715 slot
= shared_hash_find_slot (set
->vars
, dv
);
7717 slot
= shared_hash_find_slot_unshare (&set
->vars
, dv
, iopt
);
7719 set_slot_part (set
, loc
, slot
, dv
, offset
, initialized
, set_src
);
7722 /* Remove all recorded register locations for the given variable part
7723 from dataflow set SET, except for those that are identical to loc.
7724 The variable part is specified by variable's declaration or value
7725 DV and offset OFFSET. */
7727 static variable_def
**
7728 clobber_slot_part (dataflow_set
*set
, rtx loc
, variable_def
**slot
,
7729 HOST_WIDE_INT offset
, rtx set_src
)
7731 variable var
= *slot
;
7732 int pos
= find_variable_location_part (var
, offset
, NULL
);
7736 location_chain node
, next
;
7738 /* Remove the register locations from the dataflow set. */
7739 next
= var
->var_part
[pos
].loc_chain
;
7740 for (node
= next
; node
; node
= next
)
7743 if (node
->loc
!= loc
7744 && (!flag_var_tracking_uninit
7747 || !rtx_equal_p (set_src
, node
->set_src
)))
7749 if (REG_P (node
->loc
))
7754 /* Remove the variable part from the register's
7755 list, but preserve any other variable parts
7756 that might be regarded as live in that same
7758 anextp
= &set
->regs
[REGNO (node
->loc
)];
7759 for (anode
= *anextp
; anode
; anode
= anext
)
7761 anext
= anode
->next
;
7762 if (dv_as_opaque (anode
->dv
) == dv_as_opaque (var
->dv
)
7763 && anode
->offset
== offset
)
7765 pool_free (attrs_pool
, anode
);
7769 anextp
= &anode
->next
;
7773 slot
= delete_slot_part (set
, node
->loc
, slot
, offset
);
7781 /* Remove all recorded register locations for the given variable part
7782 from dataflow set SET, except for those that are identical to loc.
7783 The variable part is specified by variable's declaration or value
7784 DV and offset OFFSET. */
7787 clobber_variable_part (dataflow_set
*set
, rtx loc
, decl_or_value dv
,
7788 HOST_WIDE_INT offset
, rtx set_src
)
7790 variable_def
**slot
;
7792 if (!dv_as_opaque (dv
)
7793 || (!dv_is_value_p (dv
) && ! DECL_P (dv_as_decl (dv
))))
7796 slot
= shared_hash_find_slot_noinsert (set
->vars
, dv
);
7800 clobber_slot_part (set
, loc
, slot
, offset
, set_src
);
7803 /* Delete the part of variable's location from dataflow set SET. The
7804 variable part is specified by its SET->vars slot SLOT and offset
7805 OFFSET and the part's location by LOC. */
7807 static variable_def
**
7808 delete_slot_part (dataflow_set
*set
, rtx loc
, variable_def
**slot
,
7809 HOST_WIDE_INT offset
)
7811 variable var
= *slot
;
7812 int pos
= find_variable_location_part (var
, offset
, NULL
);
7816 location_chain node
, next
;
7817 location_chain
*nextp
;
7821 if (shared_var_p (var
, set
->vars
))
7823 /* If the variable contains the location part we have to
7824 make a copy of the variable. */
7825 for (node
= var
->var_part
[pos
].loc_chain
; node
;
7828 if ((REG_P (node
->loc
) && REG_P (loc
)
7829 && REGNO (node
->loc
) == REGNO (loc
))
7830 || rtx_equal_p (node
->loc
, loc
))
7832 slot
= unshare_variable (set
, slot
, var
,
7833 VAR_INIT_STATUS_UNKNOWN
);
7840 if (pos
== 0 && var
->onepart
&& VAR_LOC_1PAUX (var
))
7841 cur_loc
= VAR_LOC_FROM (var
);
7843 cur_loc
= var
->var_part
[pos
].cur_loc
;
7845 /* Delete the location part. */
7847 nextp
= &var
->var_part
[pos
].loc_chain
;
7848 for (node
= *nextp
; node
; node
= next
)
7851 if ((REG_P (node
->loc
) && REG_P (loc
)
7852 && REGNO (node
->loc
) == REGNO (loc
))
7853 || rtx_equal_p (node
->loc
, loc
))
7855 /* If we have deleted the location which was last emitted
7856 we have to emit new location so add the variable to set
7857 of changed variables. */
7858 if (cur_loc
== node
->loc
)
7861 var
->var_part
[pos
].cur_loc
= NULL
;
7862 if (pos
== 0 && var
->onepart
&& VAR_LOC_1PAUX (var
))
7863 VAR_LOC_FROM (var
) = NULL
;
7865 pool_free (loc_chain_pool
, node
);
7870 nextp
= &node
->next
;
7873 if (var
->var_part
[pos
].loc_chain
== NULL
)
7877 while (pos
< var
->n_var_parts
)
7879 var
->var_part
[pos
] = var
->var_part
[pos
+ 1];
7884 variable_was_changed (var
, set
);
7890 /* Delete the part of variable's location from dataflow set SET. The
7891 variable part is specified by variable's declaration or value DV
7892 and offset OFFSET and the part's location by LOC. */
7895 delete_variable_part (dataflow_set
*set
, rtx loc
, decl_or_value dv
,
7896 HOST_WIDE_INT offset
)
7898 variable_def
**slot
= shared_hash_find_slot_noinsert (set
->vars
, dv
);
7902 delete_slot_part (set
, loc
, slot
, offset
);
7906 /* Structure for passing some other parameters to function
7907 vt_expand_loc_callback. */
7908 struct expand_loc_callback_data
7910 /* The variables and values active at this point. */
7911 variable_table_type vars
;
7913 /* Stack of values and debug_exprs under expansion, and their
7915 stack_vec
<rtx
, 4> expanding
;
7917 /* Stack of values and debug_exprs whose expansion hit recursion
7918 cycles. They will have VALUE_RECURSED_INTO marked when added to
7919 this list. This flag will be cleared if any of its dependencies
7920 resolves to a valid location. So, if the flag remains set at the
7921 end of the search, we know no valid location for this one can
7923 stack_vec
<rtx
, 4> pending
;
7925 /* The maximum depth among the sub-expressions under expansion.
7926 Zero indicates no expansion so far. */
7930 /* Allocate the one-part auxiliary data structure for VAR, with enough
7931 room for COUNT dependencies. */
7934 loc_exp_dep_alloc (variable var
, int count
)
7938 gcc_checking_assert (var
->onepart
);
7940 /* We can be called with COUNT == 0 to allocate the data structure
7941 without any dependencies, e.g. for the backlinks only. However,
7942 if we are specifying a COUNT, then the dependency list must have
7943 been emptied before. It would be possible to adjust pointers or
7944 force it empty here, but this is better done at an earlier point
7945 in the algorithm, so we instead leave an assertion to catch
7947 gcc_checking_assert (!count
7948 || VAR_LOC_DEP_VEC (var
) == NULL
7949 || VAR_LOC_DEP_VEC (var
)->is_empty ());
7951 if (VAR_LOC_1PAUX (var
) && VAR_LOC_DEP_VEC (var
)->space (count
))
7954 allocsize
= offsetof (struct onepart_aux
, deps
)
7955 + vec
<loc_exp_dep
, va_heap
, vl_embed
>::embedded_size (count
);
7957 if (VAR_LOC_1PAUX (var
))
7959 VAR_LOC_1PAUX (var
) = XRESIZEVAR (struct onepart_aux
,
7960 VAR_LOC_1PAUX (var
), allocsize
);
7961 /* If the reallocation moves the onepaux structure, the
7962 back-pointer to BACKLINKS in the first list member will still
7963 point to its old location. Adjust it. */
7964 if (VAR_LOC_DEP_LST (var
))
7965 VAR_LOC_DEP_LST (var
)->pprev
= VAR_LOC_DEP_LSTP (var
);
7969 VAR_LOC_1PAUX (var
) = XNEWVAR (struct onepart_aux
, allocsize
);
7970 *VAR_LOC_DEP_LSTP (var
) = NULL
;
7971 VAR_LOC_FROM (var
) = NULL
;
7972 VAR_LOC_DEPTH (var
).complexity
= 0;
7973 VAR_LOC_DEPTH (var
).entryvals
= 0;
7975 VAR_LOC_DEP_VEC (var
)->embedded_init (count
);
7978 /* Remove all entries from the vector of active dependencies of VAR,
7979 removing them from the back-links lists too. */
7982 loc_exp_dep_clear (variable var
)
7984 while (VAR_LOC_DEP_VEC (var
) && !VAR_LOC_DEP_VEC (var
)->is_empty ())
7986 loc_exp_dep
*led
= &VAR_LOC_DEP_VEC (var
)->last ();
7988 led
->next
->pprev
= led
->pprev
;
7990 *led
->pprev
= led
->next
;
7991 VAR_LOC_DEP_VEC (var
)->pop ();
7995 /* Insert an active dependency from VAR on X to the vector of
7996 dependencies, and add the corresponding back-link to X's list of
7997 back-links in VARS. */
8000 loc_exp_insert_dep (variable var
, rtx x
, variable_table_type vars
)
8006 dv
= dv_from_rtx (x
);
8008 /* ??? Build a vector of variables parallel to EXPANDING, to avoid
8009 an additional look up? */
8010 xvar
= vars
.find_with_hash (dv
, dv_htab_hash (dv
));
8014 xvar
= variable_from_dropped (dv
, NO_INSERT
);
8015 gcc_checking_assert (xvar
);
8018 /* No point in adding the same backlink more than once. This may
8019 arise if say the same value appears in two complex expressions in
8020 the same loc_list, or even more than once in a single
8022 if (VAR_LOC_DEP_LST (xvar
) && VAR_LOC_DEP_LST (xvar
)->dv
== var
->dv
)
8025 if (var
->onepart
== NOT_ONEPART
)
8026 led
= (loc_exp_dep
*) pool_alloc (loc_exp_dep_pool
);
8030 memset (&empty
, 0, sizeof (empty
));
8031 VAR_LOC_DEP_VEC (var
)->quick_push (empty
);
8032 led
= &VAR_LOC_DEP_VEC (var
)->last ();
8037 loc_exp_dep_alloc (xvar
, 0);
8038 led
->pprev
= VAR_LOC_DEP_LSTP (xvar
);
8039 led
->next
= *led
->pprev
;
8041 led
->next
->pprev
= &led
->next
;
8045 /* Create active dependencies of VAR on COUNT values starting at
8046 VALUE, and corresponding back-links to the entries in VARS. Return
8047 true if we found any pending-recursion results. */
8050 loc_exp_dep_set (variable var
, rtx result
, rtx
*value
, int count
,
8051 variable_table_type vars
)
8053 bool pending_recursion
= false;
8055 gcc_checking_assert (VAR_LOC_DEP_VEC (var
) == NULL
8056 || VAR_LOC_DEP_VEC (var
)->is_empty ());
8058 /* Set up all dependencies from last_child (as set up at the end of
8059 the loop above) to the end. */
8060 loc_exp_dep_alloc (var
, count
);
8066 if (!pending_recursion
)
8067 pending_recursion
= !result
&& VALUE_RECURSED_INTO (x
);
8069 loc_exp_insert_dep (var
, x
, vars
);
8072 return pending_recursion
;
8075 /* Notify the back-links of IVAR that are pending recursion that we
8076 have found a non-NIL value for it, so they are cleared for another
8077 attempt to compute a current location. */
8080 notify_dependents_of_resolved_value (variable ivar
, variable_table_type vars
)
8082 loc_exp_dep
*led
, *next
;
8084 for (led
= VAR_LOC_DEP_LST (ivar
); led
; led
= next
)
8086 decl_or_value dv
= led
->dv
;
8091 if (dv_is_value_p (dv
))
8093 rtx value
= dv_as_value (dv
);
8095 /* If we have already resolved it, leave it alone. */
8096 if (!VALUE_RECURSED_INTO (value
))
8099 /* Check that VALUE_RECURSED_INTO, true from the test above,
8100 implies NO_LOC_P. */
8101 gcc_checking_assert (NO_LOC_P (value
));
8103 /* We won't notify variables that are being expanded,
8104 because their dependency list is cleared before
8106 NO_LOC_P (value
) = false;
8107 VALUE_RECURSED_INTO (value
) = false;
8109 gcc_checking_assert (dv_changed_p (dv
));
8113 gcc_checking_assert (dv_onepart_p (dv
) != NOT_ONEPART
);
8114 if (!dv_changed_p (dv
))
8118 var
= vars
.find_with_hash (dv
, dv_htab_hash (dv
));
8121 var
= variable_from_dropped (dv
, NO_INSERT
);
8124 notify_dependents_of_resolved_value (var
, vars
);
8127 next
->pprev
= led
->pprev
;
8135 static rtx
vt_expand_loc_callback (rtx x
, bitmap regs
,
8136 int max_depth
, void *data
);
8138 /* Return the combined depth, when one sub-expression evaluated to
8139 BEST_DEPTH and the previous known depth was SAVED_DEPTH. */
8141 static inline expand_depth
8142 update_depth (expand_depth saved_depth
, expand_depth best_depth
)
8144 /* If we didn't find anything, stick with what we had. */
8145 if (!best_depth
.complexity
)
8148 /* If we found hadn't found anything, use the depth of the current
8149 expression. Do NOT add one extra level, we want to compute the
8150 maximum depth among sub-expressions. We'll increment it later,
8152 if (!saved_depth
.complexity
)
8155 /* Combine the entryval count so that regardless of which one we
8156 return, the entryval count is accurate. */
8157 best_depth
.entryvals
= saved_depth
.entryvals
8158 = best_depth
.entryvals
+ saved_depth
.entryvals
;
8160 if (saved_depth
.complexity
< best_depth
.complexity
)
8166 /* Expand VAR to a location RTX, updating its cur_loc. Use REGS and
8167 DATA for cselib expand callback. If PENDRECP is given, indicate in
8168 it whether any sub-expression couldn't be fully evaluated because
8169 it is pending recursion resolution. */
8172 vt_expand_var_loc_chain (variable var
, bitmap regs
, void *data
, bool *pendrecp
)
8174 struct expand_loc_callback_data
*elcd
8175 = (struct expand_loc_callback_data
*) data
;
8176 location_chain loc
, next
;
8178 int first_child
, result_first_child
, last_child
;
8179 bool pending_recursion
;
8180 rtx loc_from
= NULL
;
8181 struct elt_loc_list
*cloc
= NULL
;
8182 expand_depth depth
= { 0, 0 }, saved_depth
= elcd
->depth
;
8183 int wanted_entryvals
, found_entryvals
= 0;
8185 /* Clear all backlinks pointing at this, so that we're not notified
8186 while we're active. */
8187 loc_exp_dep_clear (var
);
8190 if (var
->onepart
== ONEPART_VALUE
)
8192 cselib_val
*val
= CSELIB_VAL_PTR (dv_as_value (var
->dv
));
8194 gcc_checking_assert (cselib_preserved_value_p (val
));
8199 first_child
= result_first_child
= last_child
8200 = elcd
->expanding
.length ();
8202 wanted_entryvals
= found_entryvals
;
8204 /* Attempt to expand each available location in turn. */
8205 for (next
= loc
= var
->n_var_parts
? var
->var_part
[0].loc_chain
: NULL
;
8206 loc
|| cloc
; loc
= next
)
8208 result_first_child
= last_child
;
8212 loc_from
= cloc
->loc
;
8215 if (unsuitable_loc (loc_from
))
8220 loc_from
= loc
->loc
;
8224 gcc_checking_assert (!unsuitable_loc (loc_from
));
8226 elcd
->depth
.complexity
= elcd
->depth
.entryvals
= 0;
8227 result
= cselib_expand_value_rtx_cb (loc_from
, regs
, EXPR_DEPTH
,
8228 vt_expand_loc_callback
, data
);
8229 last_child
= elcd
->expanding
.length ();
8233 depth
= elcd
->depth
;
8235 gcc_checking_assert (depth
.complexity
8236 || result_first_child
== last_child
);
8238 if (last_child
- result_first_child
!= 1)
8240 if (!depth
.complexity
&& GET_CODE (result
) == ENTRY_VALUE
)
8245 if (depth
.complexity
<= EXPR_USE_DEPTH
)
8247 if (depth
.entryvals
<= wanted_entryvals
)
8249 else if (!found_entryvals
|| depth
.entryvals
< found_entryvals
)
8250 found_entryvals
= depth
.entryvals
;
8256 /* Set it up in case we leave the loop. */
8257 depth
.complexity
= depth
.entryvals
= 0;
8259 result_first_child
= first_child
;
8262 if (!loc_from
&& wanted_entryvals
< found_entryvals
)
8264 /* We found entries with ENTRY_VALUEs and skipped them. Since
8265 we could not find any expansions without ENTRY_VALUEs, but we
8266 found at least one with them, go back and get an entry with
8267 the minimum number ENTRY_VALUE count that we found. We could
8268 avoid looping, but since each sub-loc is already resolved,
8269 the re-expansion should be trivial. ??? Should we record all
8270 attempted locs as dependencies, so that we retry the
8271 expansion should any of them change, in the hope it can give
8272 us a new entry without an ENTRY_VALUE? */
8273 elcd
->expanding
.truncate (first_child
);
8277 /* Register all encountered dependencies as active. */
8278 pending_recursion
= loc_exp_dep_set
8279 (var
, result
, elcd
->expanding
.address () + result_first_child
,
8280 last_child
- result_first_child
, elcd
->vars
);
8282 elcd
->expanding
.truncate (first_child
);
8284 /* Record where the expansion came from. */
8285 gcc_checking_assert (!result
|| !pending_recursion
);
8286 VAR_LOC_FROM (var
) = loc_from
;
8287 VAR_LOC_DEPTH (var
) = depth
;
8289 gcc_checking_assert (!depth
.complexity
== !result
);
8291 elcd
->depth
= update_depth (saved_depth
, depth
);
8293 /* Indicate whether any of the dependencies are pending recursion
8296 *pendrecp
= pending_recursion
;
8298 if (!pendrecp
|| !pending_recursion
)
8299 var
->var_part
[0].cur_loc
= result
;
8304 /* Callback for cselib_expand_value, that looks for expressions
8305 holding the value in the var-tracking hash tables. Return X for
8306 standard processing, anything else is to be used as-is. */
8309 vt_expand_loc_callback (rtx x
, bitmap regs
,
8310 int max_depth ATTRIBUTE_UNUSED
,
8313 struct expand_loc_callback_data
*elcd
8314 = (struct expand_loc_callback_data
*) data
;
8318 bool pending_recursion
= false;
8319 bool from_empty
= false;
8321 switch (GET_CODE (x
))
8324 subreg
= cselib_expand_value_rtx_cb (SUBREG_REG (x
), regs
,
8326 vt_expand_loc_callback
, data
);
8331 result
= simplify_gen_subreg (GET_MODE (x
), subreg
,
8332 GET_MODE (SUBREG_REG (x
)),
8335 /* Invalid SUBREGs are ok in debug info. ??? We could try
8336 alternate expansions for the VALUE as well. */
8338 result
= gen_rtx_raw_SUBREG (GET_MODE (x
), subreg
, SUBREG_BYTE (x
));
8344 dv
= dv_from_rtx (x
);
8351 elcd
->expanding
.safe_push (x
);
8353 /* Check that VALUE_RECURSED_INTO implies NO_LOC_P. */
8354 gcc_checking_assert (!VALUE_RECURSED_INTO (x
) || NO_LOC_P (x
));
8358 gcc_checking_assert (VALUE_RECURSED_INTO (x
) || !dv_changed_p (dv
));
8362 var
= elcd
->vars
.find_with_hash (dv
, dv_htab_hash (dv
));
8367 var
= variable_from_dropped (dv
, INSERT
);
8370 gcc_checking_assert (var
);
8372 if (!dv_changed_p (dv
))
8374 gcc_checking_assert (!NO_LOC_P (x
));
8375 gcc_checking_assert (var
->var_part
[0].cur_loc
);
8376 gcc_checking_assert (VAR_LOC_1PAUX (var
));
8377 gcc_checking_assert (VAR_LOC_1PAUX (var
)->depth
.complexity
);
8379 elcd
->depth
= update_depth (elcd
->depth
, VAR_LOC_1PAUX (var
)->depth
);
8381 return var
->var_part
[0].cur_loc
;
8384 VALUE_RECURSED_INTO (x
) = true;
8385 /* This is tentative, but it makes some tests simpler. */
8386 NO_LOC_P (x
) = true;
8388 gcc_checking_assert (var
->n_var_parts
== 1 || from_empty
);
8390 result
= vt_expand_var_loc_chain (var
, regs
, data
, &pending_recursion
);
8392 if (pending_recursion
)
8394 gcc_checking_assert (!result
);
8395 elcd
->pending
.safe_push (x
);
8399 NO_LOC_P (x
) = !result
;
8400 VALUE_RECURSED_INTO (x
) = false;
8401 set_dv_changed (dv
, false);
8404 notify_dependents_of_resolved_value (var
, elcd
->vars
);
8410 /* While expanding variables, we may encounter recursion cycles
8411 because of mutual (possibly indirect) dependencies between two
8412 particular variables (or values), say A and B. If we're trying to
8413 expand A when we get to B, which in turn attempts to expand A, if
8414 we can't find any other expansion for B, we'll add B to this
8415 pending-recursion stack, and tentatively return NULL for its
8416 location. This tentative value will be used for any other
8417 occurrences of B, unless A gets some other location, in which case
8418 it will notify B that it is worth another try at computing a
8419 location for it, and it will use the location computed for A then.
8420 At the end of the expansion, the tentative NULL locations become
8421 final for all members of PENDING that didn't get a notification.
8422 This function performs this finalization of NULL locations. */
8425 resolve_expansions_pending_recursion (vec
<rtx
, va_heap
> *pending
)
8427 while (!pending
->is_empty ())
8429 rtx x
= pending
->pop ();
8432 if (!VALUE_RECURSED_INTO (x
))
8435 gcc_checking_assert (NO_LOC_P (x
));
8436 VALUE_RECURSED_INTO (x
) = false;
8437 dv
= dv_from_rtx (x
);
8438 gcc_checking_assert (dv_changed_p (dv
));
8439 set_dv_changed (dv
, false);
8443 /* Initialize expand_loc_callback_data D with variable hash table V.
8444 It must be a macro because of alloca (vec stack). */
8445 #define INIT_ELCD(d, v) \
8449 (d).depth.complexity = (d).depth.entryvals = 0; \
8452 /* Finalize expand_loc_callback_data D, resolved to location L. */
8453 #define FINI_ELCD(d, l) \
8456 resolve_expansions_pending_recursion (&(d).pending); \
8457 (d).pending.release (); \
8458 (d).expanding.release (); \
8460 if ((l) && MEM_P (l)) \
8461 (l) = targetm.delegitimize_address (l); \
8465 /* Expand VALUEs and DEBUG_EXPRs in LOC to a location, using the
8466 equivalences in VARS, updating their CUR_LOCs in the process. */
8469 vt_expand_loc (rtx loc
, variable_table_type vars
)
8471 struct expand_loc_callback_data data
;
8474 if (!MAY_HAVE_DEBUG_INSNS
)
8477 INIT_ELCD (data
, vars
);
8479 result
= cselib_expand_value_rtx_cb (loc
, scratch_regs
, EXPR_DEPTH
,
8480 vt_expand_loc_callback
, &data
);
8482 FINI_ELCD (data
, result
);
8487 /* Expand the one-part VARiable to a location, using the equivalences
8488 in VARS, updating their CUR_LOCs in the process. */
8491 vt_expand_1pvar (variable var
, variable_table_type vars
)
8493 struct expand_loc_callback_data data
;
8496 gcc_checking_assert (var
->onepart
&& var
->n_var_parts
== 1);
8498 if (!dv_changed_p (var
->dv
))
8499 return var
->var_part
[0].cur_loc
;
8501 INIT_ELCD (data
, vars
);
8503 loc
= vt_expand_var_loc_chain (var
, scratch_regs
, &data
, NULL
);
8505 gcc_checking_assert (data
.expanding
.is_empty ());
8507 FINI_ELCD (data
, loc
);
8512 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
8513 additional parameters: WHERE specifies whether the note shall be emitted
8514 before or after instruction INSN. */
8517 emit_note_insn_var_location (variable_def
**varp
, emit_note_data
*data
)
8519 variable var
= *varp
;
8520 rtx insn
= data
->insn
;
8521 enum emit_note_where where
= data
->where
;
8522 variable_table_type vars
= data
->vars
;
8524 int i
, j
, n_var_parts
;
8526 enum var_init_status initialized
= VAR_INIT_STATUS_UNINITIALIZED
;
8527 HOST_WIDE_INT last_limit
;
8528 tree type_size_unit
;
8529 HOST_WIDE_INT offsets
[MAX_VAR_PARTS
];
8530 rtx loc
[MAX_VAR_PARTS
];
8534 gcc_checking_assert (var
->onepart
== NOT_ONEPART
8535 || var
->onepart
== ONEPART_VDECL
);
8537 decl
= dv_as_decl (var
->dv
);
8543 for (i
= 0; i
< var
->n_var_parts
; i
++)
8544 if (var
->var_part
[i
].cur_loc
== NULL
&& var
->var_part
[i
].loc_chain
)
8545 var
->var_part
[i
].cur_loc
= var
->var_part
[i
].loc_chain
->loc
;
8546 for (i
= 0; i
< var
->n_var_parts
; i
++)
8548 enum machine_mode mode
, wider_mode
;
8550 HOST_WIDE_INT offset
;
8552 if (i
== 0 && var
->onepart
)
8554 gcc_checking_assert (var
->n_var_parts
== 1);
8556 initialized
= VAR_INIT_STATUS_INITIALIZED
;
8557 loc2
= vt_expand_1pvar (var
, vars
);
8561 if (last_limit
< VAR_PART_OFFSET (var
, i
))
8566 else if (last_limit
> VAR_PART_OFFSET (var
, i
))
8568 offset
= VAR_PART_OFFSET (var
, i
);
8569 loc2
= var
->var_part
[i
].cur_loc
;
8570 if (loc2
&& GET_CODE (loc2
) == MEM
8571 && GET_CODE (XEXP (loc2
, 0)) == VALUE
)
8573 rtx depval
= XEXP (loc2
, 0);
8575 loc2
= vt_expand_loc (loc2
, vars
);
8578 loc_exp_insert_dep (var
, depval
, vars
);
8585 gcc_checking_assert (GET_CODE (loc2
) != VALUE
);
8586 for (lc
= var
->var_part
[i
].loc_chain
; lc
; lc
= lc
->next
)
8587 if (var
->var_part
[i
].cur_loc
== lc
->loc
)
8589 initialized
= lc
->init
;
8595 offsets
[n_var_parts
] = offset
;
8601 loc
[n_var_parts
] = loc2
;
8602 mode
= GET_MODE (var
->var_part
[i
].cur_loc
);
8603 if (mode
== VOIDmode
&& var
->onepart
)
8604 mode
= DECL_MODE (decl
);
8605 last_limit
= offsets
[n_var_parts
] + GET_MODE_SIZE (mode
);
8607 /* Attempt to merge adjacent registers or memory. */
8608 wider_mode
= GET_MODE_WIDER_MODE (mode
);
8609 for (j
= i
+ 1; j
< var
->n_var_parts
; j
++)
8610 if (last_limit
<= VAR_PART_OFFSET (var
, j
))
8612 if (j
< var
->n_var_parts
8613 && wider_mode
!= VOIDmode
8614 && var
->var_part
[j
].cur_loc
8615 && mode
== GET_MODE (var
->var_part
[j
].cur_loc
)
8616 && (REG_P (loc
[n_var_parts
]) || MEM_P (loc
[n_var_parts
]))
8617 && last_limit
== (var
->onepart
? 0 : VAR_PART_OFFSET (var
, j
))
8618 && (loc2
= vt_expand_loc (var
->var_part
[j
].cur_loc
, vars
))
8619 && GET_CODE (loc
[n_var_parts
]) == GET_CODE (loc2
))
8623 if (REG_P (loc
[n_var_parts
])
8624 && hard_regno_nregs
[REGNO (loc
[n_var_parts
])][mode
] * 2
8625 == hard_regno_nregs
[REGNO (loc
[n_var_parts
])][wider_mode
]
8626 && end_hard_regno (mode
, REGNO (loc
[n_var_parts
]))
8629 if (! WORDS_BIG_ENDIAN
&& ! BYTES_BIG_ENDIAN
)
8630 new_loc
= simplify_subreg (wider_mode
, loc
[n_var_parts
],
8632 else if (WORDS_BIG_ENDIAN
&& BYTES_BIG_ENDIAN
)
8633 new_loc
= simplify_subreg (wider_mode
, loc2
, mode
, 0);
8636 if (!REG_P (new_loc
)
8637 || REGNO (new_loc
) != REGNO (loc
[n_var_parts
]))
8640 REG_ATTRS (new_loc
) = REG_ATTRS (loc
[n_var_parts
]);
8643 else if (MEM_P (loc
[n_var_parts
])
8644 && GET_CODE (XEXP (loc2
, 0)) == PLUS
8645 && REG_P (XEXP (XEXP (loc2
, 0), 0))
8646 && CONST_INT_P (XEXP (XEXP (loc2
, 0), 1)))
8648 if ((REG_P (XEXP (loc
[n_var_parts
], 0))
8649 && rtx_equal_p (XEXP (loc
[n_var_parts
], 0),
8650 XEXP (XEXP (loc2
, 0), 0))
8651 && INTVAL (XEXP (XEXP (loc2
, 0), 1))
8652 == GET_MODE_SIZE (mode
))
8653 || (GET_CODE (XEXP (loc
[n_var_parts
], 0)) == PLUS
8654 && CONST_INT_P (XEXP (XEXP (loc
[n_var_parts
], 0), 1))
8655 && rtx_equal_p (XEXP (XEXP (loc
[n_var_parts
], 0), 0),
8656 XEXP (XEXP (loc2
, 0), 0))
8657 && INTVAL (XEXP (XEXP (loc
[n_var_parts
], 0), 1))
8658 + GET_MODE_SIZE (mode
)
8659 == INTVAL (XEXP (XEXP (loc2
, 0), 1))))
8660 new_loc
= adjust_address_nv (loc
[n_var_parts
],
8666 loc
[n_var_parts
] = new_loc
;
8668 last_limit
= offsets
[n_var_parts
] + GET_MODE_SIZE (mode
);
8674 type_size_unit
= TYPE_SIZE_UNIT (TREE_TYPE (decl
));
8675 if ((unsigned HOST_WIDE_INT
) last_limit
< TREE_INT_CST_LOW (type_size_unit
))
8678 if (! flag_var_tracking_uninit
)
8679 initialized
= VAR_INIT_STATUS_INITIALIZED
;
8683 note_vl
= gen_rtx_VAR_LOCATION (VOIDmode
, decl
, NULL_RTX
,
8685 else if (n_var_parts
== 1)
8689 if (offsets
[0] || GET_CODE (loc
[0]) == PARALLEL
)
8690 expr_list
= gen_rtx_EXPR_LIST (VOIDmode
, loc
[0], GEN_INT (offsets
[0]));
8694 note_vl
= gen_rtx_VAR_LOCATION (VOIDmode
, decl
, expr_list
,
8697 else if (n_var_parts
)
8701 for (i
= 0; i
< n_var_parts
; i
++)
8703 = gen_rtx_EXPR_LIST (VOIDmode
, loc
[i
], GEN_INT (offsets
[i
]));
8705 parallel
= gen_rtx_PARALLEL (VOIDmode
,
8706 gen_rtvec_v (n_var_parts
, loc
));
8707 note_vl
= gen_rtx_VAR_LOCATION (VOIDmode
, decl
,
8708 parallel
, (int) initialized
);
8711 if (where
!= EMIT_NOTE_BEFORE_INSN
)
8713 note
= emit_note_after (NOTE_INSN_VAR_LOCATION
, insn
);
8714 if (where
== EMIT_NOTE_AFTER_CALL_INSN
)
8715 NOTE_DURING_CALL_P (note
) = true;
8719 /* Make sure that the call related notes come first. */
8720 while (NEXT_INSN (insn
)
8722 && ((NOTE_KIND (insn
) == NOTE_INSN_VAR_LOCATION
8723 && NOTE_DURING_CALL_P (insn
))
8724 || NOTE_KIND (insn
) == NOTE_INSN_CALL_ARG_LOCATION
))
8725 insn
= NEXT_INSN (insn
);
8727 && ((NOTE_KIND (insn
) == NOTE_INSN_VAR_LOCATION
8728 && NOTE_DURING_CALL_P (insn
))
8729 || NOTE_KIND (insn
) == NOTE_INSN_CALL_ARG_LOCATION
))
8730 note
= emit_note_after (NOTE_INSN_VAR_LOCATION
, insn
);
8732 note
= emit_note_before (NOTE_INSN_VAR_LOCATION
, insn
);
8734 NOTE_VAR_LOCATION (note
) = note_vl
;
8736 set_dv_changed (var
->dv
, false);
8737 gcc_assert (var
->in_changed_variables
);
8738 var
->in_changed_variables
= false;
8739 changed_variables
.clear_slot (varp
);
8741 /* Continue traversing the hash table. */
8745 /* While traversing changed_variables, push onto DATA (a stack of RTX
8746 values) entries that aren't user variables. */
8749 var_track_values_to_stack (variable_def
**slot
,
8750 vec
<rtx
, va_heap
> *changed_values_stack
)
8752 variable var
= *slot
;
8754 if (var
->onepart
== ONEPART_VALUE
)
8755 changed_values_stack
->safe_push (dv_as_value (var
->dv
));
8756 else if (var
->onepart
== ONEPART_DEXPR
)
8757 changed_values_stack
->safe_push (DECL_RTL_KNOWN_SET (dv_as_decl (var
->dv
)));
8762 /* Remove from changed_variables the entry whose DV corresponds to
8763 value or debug_expr VAL. */
8765 remove_value_from_changed_variables (rtx val
)
8767 decl_or_value dv
= dv_from_rtx (val
);
8768 variable_def
**slot
;
8771 slot
= changed_variables
.find_slot_with_hash (dv
, dv_htab_hash (dv
),
8774 var
->in_changed_variables
= false;
8775 changed_variables
.clear_slot (slot
);
8778 /* If VAL (a value or debug_expr) has backlinks to variables actively
8779 dependent on it in HTAB or in CHANGED_VARIABLES, mark them as
8780 changed, adding to CHANGED_VALUES_STACK any dependencies that may
8781 have dependencies of their own to notify. */
8784 notify_dependents_of_changed_value (rtx val
, variable_table_type htab
,
8785 vec
<rtx
, va_heap
> *changed_values_stack
)
8787 variable_def
**slot
;
8790 decl_or_value dv
= dv_from_rtx (val
);
8792 slot
= changed_variables
.find_slot_with_hash (dv
, dv_htab_hash (dv
),
8795 slot
= htab
.find_slot_with_hash (dv
, dv_htab_hash (dv
), NO_INSERT
);
8797 slot
= dropped_values
.find_slot_with_hash (dv
, dv_htab_hash (dv
),
8801 while ((led
= VAR_LOC_DEP_LST (var
)))
8803 decl_or_value ldv
= led
->dv
;
8806 /* Deactivate and remove the backlink, as it was “used up”. It
8807 makes no sense to attempt to notify the same entity again:
8808 either it will be recomputed and re-register an active
8809 dependency, or it will still have the changed mark. */
8811 led
->next
->pprev
= led
->pprev
;
8813 *led
->pprev
= led
->next
;
8817 if (dv_changed_p (ldv
))
8820 switch (dv_onepart_p (ldv
))
8824 set_dv_changed (ldv
, true);
8825 changed_values_stack
->safe_push (dv_as_rtx (ldv
));
8829 ivar
= htab
.find_with_hash (ldv
, dv_htab_hash (ldv
));
8830 gcc_checking_assert (!VAR_LOC_DEP_LST (ivar
));
8831 variable_was_changed (ivar
, NULL
);
8835 pool_free (loc_exp_dep_pool
, led
);
8836 ivar
= htab
.find_with_hash (ldv
, dv_htab_hash (ldv
));
8839 int i
= ivar
->n_var_parts
;
8842 rtx loc
= ivar
->var_part
[i
].cur_loc
;
8844 if (loc
&& GET_CODE (loc
) == MEM
8845 && XEXP (loc
, 0) == val
)
8847 variable_was_changed (ivar
, NULL
);
8860 /* Take out of changed_variables any entries that don't refer to use
8861 variables. Back-propagate change notifications from values and
8862 debug_exprs to their active dependencies in HTAB or in
8863 CHANGED_VARIABLES. */
8866 process_changed_values (variable_table_type htab
)
8870 stack_vec
<rtx
, 20> changed_values_stack
;
8872 /* Move values from changed_variables to changed_values_stack. */
8874 .traverse
<vec
<rtx
, va_heap
>*, var_track_values_to_stack
>
8875 (&changed_values_stack
);
8877 /* Back-propagate change notifications in values while popping
8878 them from the stack. */
8879 for (n
= i
= changed_values_stack
.length ();
8880 i
> 0; i
= changed_values_stack
.length ())
8882 val
= changed_values_stack
.pop ();
8883 notify_dependents_of_changed_value (val
, htab
, &changed_values_stack
);
8885 /* This condition will hold when visiting each of the entries
8886 originally in changed_variables. We can't remove them
8887 earlier because this could drop the backlinks before we got a
8888 chance to use them. */
8891 remove_value_from_changed_variables (val
);
8897 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
8898 CHANGED_VARIABLES and delete this chain. WHERE specifies whether
8899 the notes shall be emitted before of after instruction INSN. */
8902 emit_notes_for_changes (rtx insn
, enum emit_note_where where
,
8905 emit_note_data data
;
8906 variable_table_type htab
= shared_hash_htab (vars
);
8908 if (!changed_variables
.elements ())
8911 if (MAY_HAVE_DEBUG_INSNS
)
8912 process_changed_values (htab
);
8919 .traverse
<emit_note_data
*, emit_note_insn_var_location
> (&data
);
8922 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
8923 same variable in hash table DATA or is not there at all. */
8926 emit_notes_for_differences_1 (variable_def
**slot
, variable_table_type new_vars
)
8928 variable old_var
, new_var
;
8931 new_var
= new_vars
.find_with_hash (old_var
->dv
, dv_htab_hash (old_var
->dv
));
8935 /* Variable has disappeared. */
8936 variable empty_var
= NULL
;
8938 if (old_var
->onepart
== ONEPART_VALUE
8939 || old_var
->onepart
== ONEPART_DEXPR
)
8941 empty_var
= variable_from_dropped (old_var
->dv
, NO_INSERT
);
8944 gcc_checking_assert (!empty_var
->in_changed_variables
);
8945 if (!VAR_LOC_1PAUX (old_var
))
8947 VAR_LOC_1PAUX (old_var
) = VAR_LOC_1PAUX (empty_var
);
8948 VAR_LOC_1PAUX (empty_var
) = NULL
;
8951 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var
));
8957 empty_var
= (variable
) pool_alloc (onepart_pool (old_var
->onepart
));
8958 empty_var
->dv
= old_var
->dv
;
8959 empty_var
->refcount
= 0;
8960 empty_var
->n_var_parts
= 0;
8961 empty_var
->onepart
= old_var
->onepart
;
8962 empty_var
->in_changed_variables
= false;
8965 if (empty_var
->onepart
)
8967 /* Propagate the auxiliary data to (ultimately)
8968 changed_variables. */
8969 empty_var
->var_part
[0].loc_chain
= NULL
;
8970 empty_var
->var_part
[0].cur_loc
= NULL
;
8971 VAR_LOC_1PAUX (empty_var
) = VAR_LOC_1PAUX (old_var
);
8972 VAR_LOC_1PAUX (old_var
) = NULL
;
8974 variable_was_changed (empty_var
, NULL
);
8975 /* Continue traversing the hash table. */
8978 /* Update cur_loc and one-part auxiliary data, before new_var goes
8979 through variable_was_changed. */
8980 if (old_var
!= new_var
&& new_var
->onepart
)
8982 gcc_checking_assert (VAR_LOC_1PAUX (new_var
) == NULL
);
8983 VAR_LOC_1PAUX (new_var
) = VAR_LOC_1PAUX (old_var
);
8984 VAR_LOC_1PAUX (old_var
) = NULL
;
8985 new_var
->var_part
[0].cur_loc
= old_var
->var_part
[0].cur_loc
;
8987 if (variable_different_p (old_var
, new_var
))
8988 variable_was_changed (new_var
, NULL
);
8990 /* Continue traversing the hash table. */
8994 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
8998 emit_notes_for_differences_2 (variable_def
**slot
, variable_table_type old_vars
)
9000 variable old_var
, new_var
;
9003 old_var
= old_vars
.find_with_hash (new_var
->dv
, dv_htab_hash (new_var
->dv
));
9007 for (i
= 0; i
< new_var
->n_var_parts
; i
++)
9008 new_var
->var_part
[i
].cur_loc
= NULL
;
9009 variable_was_changed (new_var
, NULL
);
9012 /* Continue traversing the hash table. */
9016 /* Emit notes before INSN for differences between dataflow sets OLD_SET and
9020 emit_notes_for_differences (rtx insn
, dataflow_set
*old_set
,
9021 dataflow_set
*new_set
)
9023 shared_hash_htab (old_set
->vars
)
9024 .traverse
<variable_table_type
, emit_notes_for_differences_1
>
9025 (shared_hash_htab (new_set
->vars
));
9026 shared_hash_htab (new_set
->vars
)
9027 .traverse
<variable_table_type
, emit_notes_for_differences_2
>
9028 (shared_hash_htab (old_set
->vars
));
9029 emit_notes_for_changes (insn
, EMIT_NOTE_BEFORE_INSN
, new_set
->vars
);
9032 /* Return the next insn after INSN that is not a NOTE_INSN_VAR_LOCATION. */
9035 next_non_note_insn_var_location (rtx insn
)
9039 insn
= NEXT_INSN (insn
);
9042 || NOTE_KIND (insn
) != NOTE_INSN_VAR_LOCATION
)
9049 /* Emit the notes for changes of location parts in the basic block BB. */
9052 emit_notes_in_bb (basic_block bb
, dataflow_set
*set
)
9055 micro_operation
*mo
;
9057 dataflow_set_clear (set
);
9058 dataflow_set_copy (set
, &VTI (bb
)->in
);
9060 FOR_EACH_VEC_ELT (VTI (bb
)->mos
, i
, mo
)
9062 rtx insn
= mo
->insn
;
9063 rtx next_insn
= next_non_note_insn_var_location (insn
);
9068 dataflow_set_clear_at_call (set
);
9069 emit_notes_for_changes (insn
, EMIT_NOTE_AFTER_CALL_INSN
, set
->vars
);
9071 rtx arguments
= mo
->u
.loc
, *p
= &arguments
, note
;
9074 XEXP (XEXP (*p
, 0), 1)
9075 = vt_expand_loc (XEXP (XEXP (*p
, 0), 1),
9076 shared_hash_htab (set
->vars
));
9077 /* If expansion is successful, keep it in the list. */
9078 if (XEXP (XEXP (*p
, 0), 1))
9080 /* Otherwise, if the following item is data_value for it,
9082 else if (XEXP (*p
, 1)
9083 && REG_P (XEXP (XEXP (*p
, 0), 0))
9084 && MEM_P (XEXP (XEXP (XEXP (*p
, 1), 0), 0))
9085 && REG_P (XEXP (XEXP (XEXP (XEXP (*p
, 1), 0), 0),
9087 && REGNO (XEXP (XEXP (*p
, 0), 0))
9088 == REGNO (XEXP (XEXP (XEXP (XEXP (*p
, 1), 0),
9090 *p
= XEXP (XEXP (*p
, 1), 1);
9091 /* Just drop this item. */
9095 note
= emit_note_after (NOTE_INSN_CALL_ARG_LOCATION
, insn
);
9096 NOTE_VAR_LOCATION (note
) = arguments
;
9102 rtx loc
= mo
->u
.loc
;
9105 var_reg_set (set
, loc
, VAR_INIT_STATUS_UNINITIALIZED
, NULL
);
9107 var_mem_set (set
, loc
, VAR_INIT_STATUS_UNINITIALIZED
, NULL
);
9109 emit_notes_for_changes (insn
, EMIT_NOTE_BEFORE_INSN
, set
->vars
);
9115 rtx loc
= mo
->u
.loc
;
9119 if (GET_CODE (loc
) == CONCAT
)
9121 val
= XEXP (loc
, 0);
9122 vloc
= XEXP (loc
, 1);
9130 var
= PAT_VAR_LOCATION_DECL (vloc
);
9132 clobber_variable_part (set
, NULL_RTX
,
9133 dv_from_decl (var
), 0, NULL_RTX
);
9136 if (VAL_NEEDS_RESOLUTION (loc
))
9137 val_resolve (set
, val
, PAT_VAR_LOCATION_LOC (vloc
), insn
);
9138 set_variable_part (set
, val
, dv_from_decl (var
), 0,
9139 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
,
9142 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc
)))
9143 set_variable_part (set
, PAT_VAR_LOCATION_LOC (vloc
),
9144 dv_from_decl (var
), 0,
9145 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
,
9148 emit_notes_for_changes (insn
, EMIT_NOTE_AFTER_INSN
, set
->vars
);
9154 rtx loc
= mo
->u
.loc
;
9155 rtx val
, vloc
, uloc
;
9157 vloc
= uloc
= XEXP (loc
, 1);
9158 val
= XEXP (loc
, 0);
9160 if (GET_CODE (val
) == CONCAT
)
9162 uloc
= XEXP (val
, 1);
9163 val
= XEXP (val
, 0);
9166 if (VAL_NEEDS_RESOLUTION (loc
))
9167 val_resolve (set
, val
, vloc
, insn
);
9169 val_store (set
, val
, uloc
, insn
, false);
9171 if (VAL_HOLDS_TRACK_EXPR (loc
))
9173 if (GET_CODE (uloc
) == REG
)
9174 var_reg_set (set
, uloc
, VAR_INIT_STATUS_UNINITIALIZED
,
9176 else if (GET_CODE (uloc
) == MEM
)
9177 var_mem_set (set
, uloc
, VAR_INIT_STATUS_UNINITIALIZED
,
9181 emit_notes_for_changes (insn
, EMIT_NOTE_BEFORE_INSN
, set
->vars
);
9187 rtx loc
= mo
->u
.loc
;
9188 rtx val
, vloc
, uloc
;
9192 uloc
= XEXP (vloc
, 1);
9193 val
= XEXP (vloc
, 0);
9196 if (GET_CODE (uloc
) == SET
)
9198 dstv
= SET_DEST (uloc
);
9199 srcv
= SET_SRC (uloc
);
9207 if (GET_CODE (val
) == CONCAT
)
9209 dstv
= vloc
= XEXP (val
, 1);
9210 val
= XEXP (val
, 0);
9213 if (GET_CODE (vloc
) == SET
)
9215 srcv
= SET_SRC (vloc
);
9217 gcc_assert (val
!= srcv
);
9218 gcc_assert (vloc
== uloc
|| VAL_NEEDS_RESOLUTION (loc
));
9220 dstv
= vloc
= SET_DEST (vloc
);
9222 if (VAL_NEEDS_RESOLUTION (loc
))
9223 val_resolve (set
, val
, srcv
, insn
);
9225 else if (VAL_NEEDS_RESOLUTION (loc
))
9227 gcc_assert (GET_CODE (uloc
) == SET
9228 && GET_CODE (SET_SRC (uloc
)) == REG
);
9229 val_resolve (set
, val
, SET_SRC (uloc
), insn
);
9232 if (VAL_HOLDS_TRACK_EXPR (loc
))
9234 if (VAL_EXPR_IS_CLOBBERED (loc
))
9237 var_reg_delete (set
, uloc
, true);
9238 else if (MEM_P (uloc
))
9240 gcc_assert (MEM_P (dstv
));
9241 gcc_assert (MEM_ATTRS (dstv
) == MEM_ATTRS (uloc
));
9242 var_mem_delete (set
, dstv
, true);
9247 bool copied_p
= VAL_EXPR_IS_COPIED (loc
);
9248 rtx src
= NULL
, dst
= uloc
;
9249 enum var_init_status status
= VAR_INIT_STATUS_INITIALIZED
;
9251 if (GET_CODE (uloc
) == SET
)
9253 src
= SET_SRC (uloc
);
9254 dst
= SET_DEST (uloc
);
9259 status
= find_src_status (set
, src
);
9261 src
= find_src_set_src (set
, src
);
9265 var_reg_delete_and_set (set
, dst
, !copied_p
,
9267 else if (MEM_P (dst
))
9269 gcc_assert (MEM_P (dstv
));
9270 gcc_assert (MEM_ATTRS (dstv
) == MEM_ATTRS (dst
));
9271 var_mem_delete_and_set (set
, dstv
, !copied_p
,
9276 else if (REG_P (uloc
))
9277 var_regno_delete (set
, REGNO (uloc
));
9278 else if (MEM_P (uloc
))
9280 gcc_checking_assert (GET_CODE (vloc
) == MEM
);
9281 gcc_checking_assert (vloc
== dstv
);
9283 clobber_overlapping_mems (set
, vloc
);
9286 val_store (set
, val
, dstv
, insn
, true);
9288 emit_notes_for_changes (next_insn
, EMIT_NOTE_BEFORE_INSN
,
9295 rtx loc
= mo
->u
.loc
;
9298 if (GET_CODE (loc
) == SET
)
9300 set_src
= SET_SRC (loc
);
9301 loc
= SET_DEST (loc
);
9305 var_reg_delete_and_set (set
, loc
, true, VAR_INIT_STATUS_INITIALIZED
,
9308 var_mem_delete_and_set (set
, loc
, true, VAR_INIT_STATUS_INITIALIZED
,
9311 emit_notes_for_changes (next_insn
, EMIT_NOTE_BEFORE_INSN
,
9318 rtx loc
= mo
->u
.loc
;
9319 enum var_init_status src_status
;
9322 if (GET_CODE (loc
) == SET
)
9324 set_src
= SET_SRC (loc
);
9325 loc
= SET_DEST (loc
);
9328 src_status
= find_src_status (set
, set_src
);
9329 set_src
= find_src_set_src (set
, set_src
);
9332 var_reg_delete_and_set (set
, loc
, false, src_status
, set_src
);
9334 var_mem_delete_and_set (set
, loc
, false, src_status
, set_src
);
9336 emit_notes_for_changes (next_insn
, EMIT_NOTE_BEFORE_INSN
,
9343 rtx loc
= mo
->u
.loc
;
9346 var_reg_delete (set
, loc
, false);
9348 var_mem_delete (set
, loc
, false);
9350 emit_notes_for_changes (insn
, EMIT_NOTE_AFTER_INSN
, set
->vars
);
9356 rtx loc
= mo
->u
.loc
;
9359 var_reg_delete (set
, loc
, true);
9361 var_mem_delete (set
, loc
, true);
9363 emit_notes_for_changes (next_insn
, EMIT_NOTE_BEFORE_INSN
,
9369 set
->stack_adjust
+= mo
->u
.adjust
;
9375 /* Emit notes for the whole function. */
9378 vt_emit_notes (void)
9383 gcc_assert (!changed_variables
.elements ());
9385 /* Free memory occupied by the out hash tables, as they aren't used
9388 dataflow_set_clear (&VTI (bb
)->out
);
9390 /* Enable emitting notes by functions (mainly by set_variable_part and
9391 delete_variable_part). */
9394 if (MAY_HAVE_DEBUG_INSNS
)
9396 dropped_values
.create (cselib_get_next_uid () * 2);
9397 loc_exp_dep_pool
= create_alloc_pool ("loc_exp_dep pool",
9398 sizeof (loc_exp_dep
), 64);
9401 dataflow_set_init (&cur
);
9405 /* Emit the notes for changes of variable locations between two
9406 subsequent basic blocks. */
9407 emit_notes_for_differences (BB_HEAD (bb
), &cur
, &VTI (bb
)->in
);
9409 if (MAY_HAVE_DEBUG_INSNS
)
9410 local_get_addr_cache
= pointer_map_create ();
9412 /* Emit the notes for the changes in the basic block itself. */
9413 emit_notes_in_bb (bb
, &cur
);
9415 if (MAY_HAVE_DEBUG_INSNS
)
9416 pointer_map_destroy (local_get_addr_cache
);
9417 local_get_addr_cache
= NULL
;
9419 /* Free memory occupied by the in hash table, we won't need it
9421 dataflow_set_clear (&VTI (bb
)->in
);
9423 #ifdef ENABLE_CHECKING
9424 shared_hash_htab (cur
.vars
)
9425 .traverse
<variable_table_type
, emit_notes_for_differences_1
>
9426 (shared_hash_htab (empty_shared_hash
));
9428 dataflow_set_destroy (&cur
);
9430 if (MAY_HAVE_DEBUG_INSNS
)
9431 dropped_values
.dispose ();
9436 /* If there is a declaration and offset associated with register/memory RTL
9437 assign declaration to *DECLP and offset to *OFFSETP, and return true. */
9440 vt_get_decl_and_offset (rtx rtl
, tree
*declp
, HOST_WIDE_INT
*offsetp
)
9444 if (REG_ATTRS (rtl
))
9446 *declp
= REG_EXPR (rtl
);
9447 *offsetp
= REG_OFFSET (rtl
);
9451 else if (MEM_P (rtl
))
9453 if (MEM_ATTRS (rtl
))
9455 *declp
= MEM_EXPR (rtl
);
9456 *offsetp
= INT_MEM_OFFSET (rtl
);
9463 /* Record the value for the ENTRY_VALUE of RTL as a global equivalence
9467 record_entry_value (cselib_val
*val
, rtx rtl
)
9469 rtx ev
= gen_rtx_ENTRY_VALUE (GET_MODE (rtl
));
9471 ENTRY_VALUE_EXP (ev
) = rtl
;
9473 cselib_add_permanent_equiv (val
, ev
, get_insns ());
9476 /* Insert function parameter PARM in IN and OUT sets of ENTRY_BLOCK. */
9479 vt_add_function_parameter (tree parm
)
9481 rtx decl_rtl
= DECL_RTL_IF_SET (parm
);
9482 rtx incoming
= DECL_INCOMING_RTL (parm
);
9484 enum machine_mode mode
;
9485 HOST_WIDE_INT offset
;
9489 if (TREE_CODE (parm
) != PARM_DECL
)
9492 if (!decl_rtl
|| !incoming
)
9495 if (GET_MODE (decl_rtl
) == BLKmode
|| GET_MODE (incoming
) == BLKmode
)
9498 /* If there is a DRAP register or a pseudo in internal_arg_pointer,
9499 rewrite the incoming location of parameters passed on the stack
9500 into MEMs based on the argument pointer, so that incoming doesn't
9501 depend on a pseudo. */
9502 if (MEM_P (incoming
)
9503 && (XEXP (incoming
, 0) == crtl
->args
.internal_arg_pointer
9504 || (GET_CODE (XEXP (incoming
, 0)) == PLUS
9505 && XEXP (XEXP (incoming
, 0), 0)
9506 == crtl
->args
.internal_arg_pointer
9507 && CONST_INT_P (XEXP (XEXP (incoming
, 0), 1)))))
9509 HOST_WIDE_INT off
= -FIRST_PARM_OFFSET (current_function_decl
);
9510 if (GET_CODE (XEXP (incoming
, 0)) == PLUS
)
9511 off
+= INTVAL (XEXP (XEXP (incoming
, 0), 1));
9513 = replace_equiv_address_nv (incoming
,
9514 plus_constant (Pmode
,
9515 arg_pointer_rtx
, off
));
9518 #ifdef HAVE_window_save
9519 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
9520 If the target machine has an explicit window save instruction, the
9521 actual entry value is the corresponding OUTGOING_REGNO instead. */
9522 if (HAVE_window_save
&& !crtl
->uses_only_leaf_regs
)
9524 if (REG_P (incoming
)
9525 && HARD_REGISTER_P (incoming
)
9526 && OUTGOING_REGNO (REGNO (incoming
)) != REGNO (incoming
))
9529 p
.incoming
= incoming
;
9531 = gen_rtx_REG_offset (incoming
, GET_MODE (incoming
),
9532 OUTGOING_REGNO (REGNO (incoming
)), 0);
9533 p
.outgoing
= incoming
;
9534 vec_safe_push (windowed_parm_regs
, p
);
9536 else if (MEM_P (incoming
)
9537 && REG_P (XEXP (incoming
, 0))
9538 && HARD_REGISTER_P (XEXP (incoming
, 0)))
9540 rtx reg
= XEXP (incoming
, 0);
9541 if (OUTGOING_REGNO (REGNO (reg
)) != REGNO (reg
))
9545 reg
= gen_raw_REG (GET_MODE (reg
), OUTGOING_REGNO (REGNO (reg
)));
9547 vec_safe_push (windowed_parm_regs
, p
);
9548 incoming
= replace_equiv_address_nv (incoming
, reg
);
9554 if (!vt_get_decl_and_offset (incoming
, &decl
, &offset
))
9556 if (MEM_P (incoming
))
9558 /* This means argument is passed by invisible reference. */
9564 if (!vt_get_decl_and_offset (decl_rtl
, &decl
, &offset
))
9566 offset
+= byte_lowpart_offset (GET_MODE (incoming
),
9567 GET_MODE (decl_rtl
));
9576 /* If that DECL_RTL wasn't a pseudo that got spilled to
9577 memory, bail out. Otherwise, the spill slot sharing code
9578 will force the memory to reference spill_slot_decl (%sfp),
9579 so we don't match above. That's ok, the pseudo must have
9580 referenced the entire parameter, so just reset OFFSET. */
9581 if (decl
!= get_spill_slot_decl (false))
9586 if (!track_loc_p (incoming
, parm
, offset
, false, &mode
, &offset
))
9589 out
= &VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun
))->out
;
9591 dv
= dv_from_decl (parm
);
9593 if (target_for_debug_bind (parm
)
9594 /* We can't deal with these right now, because this kind of
9595 variable is single-part. ??? We could handle parallels
9596 that describe multiple locations for the same single
9597 value, but ATM we don't. */
9598 && GET_CODE (incoming
) != PARALLEL
)
9603 /* ??? We shouldn't ever hit this, but it may happen because
9604 arguments passed by invisible reference aren't dealt with
9605 above: incoming-rtl will have Pmode rather than the
9606 expected mode for the type. */
9610 lowpart
= var_lowpart (mode
, incoming
);
9614 val
= cselib_lookup_from_insn (lowpart
, mode
, true,
9615 VOIDmode
, get_insns ());
9617 /* ??? Float-typed values in memory are not handled by
9621 preserve_value (val
);
9622 set_variable_part (out
, val
->val_rtx
, dv
, offset
,
9623 VAR_INIT_STATUS_INITIALIZED
, NULL
, INSERT
);
9624 dv
= dv_from_value (val
->val_rtx
);
9627 if (MEM_P (incoming
))
9629 val
= cselib_lookup_from_insn (XEXP (incoming
, 0), mode
, true,
9630 VOIDmode
, get_insns ());
9633 preserve_value (val
);
9634 incoming
= replace_equiv_address_nv (incoming
, val
->val_rtx
);
9639 if (REG_P (incoming
))
9641 incoming
= var_lowpart (mode
, incoming
);
9642 gcc_assert (REGNO (incoming
) < FIRST_PSEUDO_REGISTER
);
9643 attrs_list_insert (&out
->regs
[REGNO (incoming
)], dv
, offset
,
9645 set_variable_part (out
, incoming
, dv
, offset
,
9646 VAR_INIT_STATUS_INITIALIZED
, NULL
, INSERT
);
9647 if (dv_is_value_p (dv
))
9649 record_entry_value (CSELIB_VAL_PTR (dv_as_value (dv
)), incoming
);
9650 if (TREE_CODE (TREE_TYPE (parm
)) == REFERENCE_TYPE
9651 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (parm
))))
9653 enum machine_mode indmode
9654 = TYPE_MODE (TREE_TYPE (TREE_TYPE (parm
)));
9655 rtx mem
= gen_rtx_MEM (indmode
, incoming
);
9656 cselib_val
*val
= cselib_lookup_from_insn (mem
, indmode
, true,
9661 preserve_value (val
);
9662 record_entry_value (val
, mem
);
9663 set_variable_part (out
, mem
, dv_from_value (val
->val_rtx
), 0,
9664 VAR_INIT_STATUS_INITIALIZED
, NULL
, INSERT
);
9669 else if (MEM_P (incoming
))
9671 incoming
= var_lowpart (mode
, incoming
);
9672 set_variable_part (out
, incoming
, dv
, offset
,
9673 VAR_INIT_STATUS_INITIALIZED
, NULL
, INSERT
);
9677 /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
9680 vt_add_function_parameters (void)
9684 for (parm
= DECL_ARGUMENTS (current_function_decl
);
9685 parm
; parm
= DECL_CHAIN (parm
))
9686 vt_add_function_parameter (parm
);
9688 if (DECL_HAS_VALUE_EXPR_P (DECL_RESULT (current_function_decl
)))
9690 tree vexpr
= DECL_VALUE_EXPR (DECL_RESULT (current_function_decl
));
9692 if (TREE_CODE (vexpr
) == INDIRECT_REF
)
9693 vexpr
= TREE_OPERAND (vexpr
, 0);
9695 if (TREE_CODE (vexpr
) == PARM_DECL
9696 && DECL_ARTIFICIAL (vexpr
)
9697 && !DECL_IGNORED_P (vexpr
)
9698 && DECL_NAMELESS (vexpr
))
9699 vt_add_function_parameter (vexpr
);
9703 /* Initialize cfa_base_rtx, create a preserved VALUE for it and
9704 ensure it isn't flushed during cselib_reset_table.
9705 Can be called only if frame_pointer_rtx resp. arg_pointer_rtx
9706 has been eliminated. */
9709 vt_init_cfa_base (void)
9713 #ifdef FRAME_POINTER_CFA_OFFSET
9714 cfa_base_rtx
= frame_pointer_rtx
;
9715 cfa_base_offset
= -FRAME_POINTER_CFA_OFFSET (current_function_decl
);
9717 cfa_base_rtx
= arg_pointer_rtx
;
9718 cfa_base_offset
= -ARG_POINTER_CFA_OFFSET (current_function_decl
);
9720 if (cfa_base_rtx
== hard_frame_pointer_rtx
9721 || !fixed_regs
[REGNO (cfa_base_rtx
)])
9723 cfa_base_rtx
= NULL_RTX
;
9726 if (!MAY_HAVE_DEBUG_INSNS
)
9729 /* Tell alias analysis that cfa_base_rtx should share
9730 find_base_term value with stack pointer or hard frame pointer. */
9731 if (!frame_pointer_needed
)
9732 vt_equate_reg_base_value (cfa_base_rtx
, stack_pointer_rtx
);
9733 else if (!crtl
->stack_realign_tried
)
9734 vt_equate_reg_base_value (cfa_base_rtx
, hard_frame_pointer_rtx
);
9736 val
= cselib_lookup_from_insn (cfa_base_rtx
, GET_MODE (cfa_base_rtx
), 1,
9737 VOIDmode
, get_insns ());
9738 preserve_value (val
);
9739 cselib_preserve_cfa_base_value (val
, REGNO (cfa_base_rtx
));
9742 /* Allocate and initialize the data structures for variable tracking
9743 and parse the RTL to get the micro operations. */
9746 vt_initialize (void)
9749 HOST_WIDE_INT fp_cfa_offset
= -1;
9751 alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def
));
9753 attrs_pool
= create_alloc_pool ("attrs_def pool",
9754 sizeof (struct attrs_def
), 1024);
9755 var_pool
= create_alloc_pool ("variable_def pool",
9756 sizeof (struct variable_def
)
9757 + (MAX_VAR_PARTS
- 1)
9758 * sizeof (((variable
)NULL
)->var_part
[0]), 64);
9759 loc_chain_pool
= create_alloc_pool ("location_chain_def pool",
9760 sizeof (struct location_chain_def
),
9762 shared_hash_pool
= create_alloc_pool ("shared_hash_def pool",
9763 sizeof (struct shared_hash_def
), 256);
9764 empty_shared_hash
= (shared_hash
) pool_alloc (shared_hash_pool
);
9765 empty_shared_hash
->refcount
= 1;
9766 empty_shared_hash
->htab
.create (1);
9767 changed_variables
.create (10);
9769 /* Init the IN and OUT sets. */
9772 VTI (bb
)->visited
= false;
9773 VTI (bb
)->flooded
= false;
9774 dataflow_set_init (&VTI (bb
)->in
);
9775 dataflow_set_init (&VTI (bb
)->out
);
9776 VTI (bb
)->permp
= NULL
;
9779 if (MAY_HAVE_DEBUG_INSNS
)
9781 cselib_init (CSELIB_RECORD_MEMORY
| CSELIB_PRESERVE_CONSTANTS
);
9782 scratch_regs
= BITMAP_ALLOC (NULL
);
9783 valvar_pool
= create_alloc_pool ("small variable_def pool",
9784 sizeof (struct variable_def
), 256);
9785 preserved_values
.create (256);
9786 global_get_addr_cache
= pointer_map_create ();
9790 scratch_regs
= NULL
;
9792 global_get_addr_cache
= NULL
;
9795 if (MAY_HAVE_DEBUG_INSNS
)
9801 #ifdef FRAME_POINTER_CFA_OFFSET
9802 reg
= frame_pointer_rtx
;
9803 ofst
= FRAME_POINTER_CFA_OFFSET (current_function_decl
);
9805 reg
= arg_pointer_rtx
;
9806 ofst
= ARG_POINTER_CFA_OFFSET (current_function_decl
);
9809 ofst
-= INCOMING_FRAME_SP_OFFSET
;
9811 val
= cselib_lookup_from_insn (reg
, GET_MODE (reg
), 1,
9812 VOIDmode
, get_insns ());
9813 preserve_value (val
);
9814 cselib_preserve_cfa_base_value (val
, REGNO (reg
));
9815 expr
= plus_constant (GET_MODE (stack_pointer_rtx
),
9816 stack_pointer_rtx
, -ofst
);
9817 cselib_add_permanent_equiv (val
, expr
, get_insns ());
9821 val
= cselib_lookup_from_insn (stack_pointer_rtx
,
9822 GET_MODE (stack_pointer_rtx
), 1,
9823 VOIDmode
, get_insns ());
9824 preserve_value (val
);
9825 expr
= plus_constant (GET_MODE (reg
), reg
, ofst
);
9826 cselib_add_permanent_equiv (val
, expr
, get_insns ());
9830 /* In order to factor out the adjustments made to the stack pointer or to
9831 the hard frame pointer and thus be able to use DW_OP_fbreg operations
9832 instead of individual location lists, we're going to rewrite MEMs based
9833 on them into MEMs based on the CFA by de-eliminating stack_pointer_rtx
9834 or hard_frame_pointer_rtx to the virtual CFA pointer frame_pointer_rtx
9835 resp. arg_pointer_rtx. We can do this either when there is no frame
9836 pointer in the function and stack adjustments are consistent for all
9837 basic blocks or when there is a frame pointer and no stack realignment.
9838 But we first have to check that frame_pointer_rtx resp. arg_pointer_rtx
9839 has been eliminated. */
9840 if (!frame_pointer_needed
)
9844 if (!vt_stack_adjustments ())
9847 #ifdef FRAME_POINTER_CFA_OFFSET
9848 reg
= frame_pointer_rtx
;
9850 reg
= arg_pointer_rtx
;
9852 elim
= eliminate_regs (reg
, VOIDmode
, NULL_RTX
);
9855 if (GET_CODE (elim
) == PLUS
)
9856 elim
= XEXP (elim
, 0);
9857 if (elim
== stack_pointer_rtx
)
9858 vt_init_cfa_base ();
9861 else if (!crtl
->stack_realign_tried
)
9865 #ifdef FRAME_POINTER_CFA_OFFSET
9866 reg
= frame_pointer_rtx
;
9867 fp_cfa_offset
= FRAME_POINTER_CFA_OFFSET (current_function_decl
);
9869 reg
= arg_pointer_rtx
;
9870 fp_cfa_offset
= ARG_POINTER_CFA_OFFSET (current_function_decl
);
9872 elim
= eliminate_regs (reg
, VOIDmode
, NULL_RTX
);
9875 if (GET_CODE (elim
) == PLUS
)
9877 fp_cfa_offset
-= INTVAL (XEXP (elim
, 1));
9878 elim
= XEXP (elim
, 0);
9880 if (elim
!= hard_frame_pointer_rtx
)
9887 /* If the stack is realigned and a DRAP register is used, we're going to
9888 rewrite MEMs based on it representing incoming locations of parameters
9889 passed on the stack into MEMs based on the argument pointer. Although
9890 we aren't going to rewrite other MEMs, we still need to initialize the
9891 virtual CFA pointer in order to ensure that the argument pointer will
9892 be seen as a constant throughout the function.
9894 ??? This doesn't work if FRAME_POINTER_CFA_OFFSET is defined. */
9895 else if (stack_realign_drap
)
9899 #ifdef FRAME_POINTER_CFA_OFFSET
9900 reg
= frame_pointer_rtx
;
9902 reg
= arg_pointer_rtx
;
9904 elim
= eliminate_regs (reg
, VOIDmode
, NULL_RTX
);
9907 if (GET_CODE (elim
) == PLUS
)
9908 elim
= XEXP (elim
, 0);
9909 if (elim
== hard_frame_pointer_rtx
)
9910 vt_init_cfa_base ();
9914 hard_frame_pointer_adjustment
= -1;
9916 vt_add_function_parameters ();
9921 HOST_WIDE_INT pre
, post
= 0;
9922 basic_block first_bb
, last_bb
;
9924 if (MAY_HAVE_DEBUG_INSNS
)
9926 cselib_record_sets_hook
= add_with_sets
;
9927 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
9928 fprintf (dump_file
, "first value: %i\n",
9929 cselib_get_next_uid ());
9936 if (bb
->next_bb
== EXIT_BLOCK_PTR_FOR_FN (cfun
)
9937 || ! single_pred_p (bb
->next_bb
))
9939 e
= find_edge (bb
, bb
->next_bb
);
9940 if (! e
|| (e
->flags
& EDGE_FALLTHRU
) == 0)
9946 /* Add the micro-operations to the vector. */
9947 FOR_BB_BETWEEN (bb
, first_bb
, last_bb
->next_bb
, next_bb
)
9949 HOST_WIDE_INT offset
= VTI (bb
)->out
.stack_adjust
;
9950 VTI (bb
)->out
.stack_adjust
= VTI (bb
)->in
.stack_adjust
;
9951 for (insn
= BB_HEAD (bb
); insn
!= NEXT_INSN (BB_END (bb
));
9952 insn
= NEXT_INSN (insn
))
9956 if (!frame_pointer_needed
)
9958 insn_stack_adjust_offset_pre_post (insn
, &pre
, &post
);
9962 mo
.type
= MO_ADJUST
;
9965 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
9966 log_op_type (PATTERN (insn
), bb
, insn
,
9967 MO_ADJUST
, dump_file
);
9968 VTI (bb
)->mos
.safe_push (mo
);
9969 VTI (bb
)->out
.stack_adjust
+= pre
;
9973 cselib_hook_called
= false;
9974 adjust_insn (bb
, insn
);
9975 if (MAY_HAVE_DEBUG_INSNS
)
9978 prepare_call_arguments (bb
, insn
);
9979 cselib_process_insn (insn
);
9980 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
9982 print_rtl_single (dump_file
, insn
);
9983 dump_cselib_table (dump_file
);
9986 if (!cselib_hook_called
)
9987 add_with_sets (insn
, 0, 0);
9990 if (!frame_pointer_needed
&& post
)
9993 mo
.type
= MO_ADJUST
;
9996 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
9997 log_op_type (PATTERN (insn
), bb
, insn
,
9998 MO_ADJUST
, dump_file
);
9999 VTI (bb
)->mos
.safe_push (mo
);
10000 VTI (bb
)->out
.stack_adjust
+= post
;
10003 if (fp_cfa_offset
!= -1
10004 && hard_frame_pointer_adjustment
== -1
10005 && fp_setter_insn (insn
))
10007 vt_init_cfa_base ();
10008 hard_frame_pointer_adjustment
= fp_cfa_offset
;
10009 /* Disassociate sp from fp now. */
10010 if (MAY_HAVE_DEBUG_INSNS
)
10013 cselib_invalidate_rtx (stack_pointer_rtx
);
10014 v
= cselib_lookup (stack_pointer_rtx
, Pmode
, 1,
10016 if (v
&& !cselib_preserved_value_p (v
))
10018 cselib_set_value_sp_based (v
);
10019 preserve_value (v
);
10025 gcc_assert (offset
== VTI (bb
)->out
.stack_adjust
);
10030 if (MAY_HAVE_DEBUG_INSNS
)
10032 cselib_preserve_only_values ();
10033 cselib_reset_table (cselib_get_next_uid ());
10034 cselib_record_sets_hook
= NULL
;
10038 hard_frame_pointer_adjustment
= -1;
10039 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun
))->flooded
= true;
10040 cfa_base_rtx
= NULL_RTX
;
10044 /* This is *not* reset after each function. It gives each
10045 NOTE_INSN_DELETED_DEBUG_LABEL in the entire compilation
10046 a unique label number. */
10048 static int debug_label_num
= 1;
10050 /* Get rid of all debug insns from the insn stream. */
10053 delete_debug_insns (void)
10058 if (!MAY_HAVE_DEBUG_INSNS
)
10063 FOR_BB_INSNS_SAFE (bb
, insn
, next
)
10064 if (DEBUG_INSN_P (insn
))
10066 tree decl
= INSN_VAR_LOCATION_DECL (insn
);
10067 if (TREE_CODE (decl
) == LABEL_DECL
10068 && DECL_NAME (decl
)
10069 && !DECL_RTL_SET_P (decl
))
10071 PUT_CODE (insn
, NOTE
);
10072 NOTE_KIND (insn
) = NOTE_INSN_DELETED_DEBUG_LABEL
;
10073 NOTE_DELETED_LABEL_NAME (insn
)
10074 = IDENTIFIER_POINTER (DECL_NAME (decl
));
10075 SET_DECL_RTL (decl
, insn
);
10076 CODE_LABEL_NUMBER (insn
) = debug_label_num
++;
10079 delete_insn (insn
);
10084 /* Run a fast, BB-local only version of var tracking, to take care of
10085 information that we don't do global analysis on, such that not all
10086 information is lost. If SKIPPED holds, we're skipping the global
10087 pass entirely, so we should try to use information it would have
10088 handled as well.. */
10091 vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED
)
10093 /* ??? Just skip it all for now. */
10094 delete_debug_insns ();
10097 /* Free the data structures needed for variable tracking. */
10106 VTI (bb
)->mos
.release ();
10111 dataflow_set_destroy (&VTI (bb
)->in
);
10112 dataflow_set_destroy (&VTI (bb
)->out
);
10113 if (VTI (bb
)->permp
)
10115 dataflow_set_destroy (VTI (bb
)->permp
);
10116 XDELETE (VTI (bb
)->permp
);
10119 free_aux_for_blocks ();
10120 empty_shared_hash
->htab
.dispose ();
10121 changed_variables
.dispose ();
10122 free_alloc_pool (attrs_pool
);
10123 free_alloc_pool (var_pool
);
10124 free_alloc_pool (loc_chain_pool
);
10125 free_alloc_pool (shared_hash_pool
);
10127 if (MAY_HAVE_DEBUG_INSNS
)
10129 if (global_get_addr_cache
)
10130 pointer_map_destroy (global_get_addr_cache
);
10131 global_get_addr_cache
= NULL
;
10132 if (loc_exp_dep_pool
)
10133 free_alloc_pool (loc_exp_dep_pool
);
10134 loc_exp_dep_pool
= NULL
;
10135 free_alloc_pool (valvar_pool
);
10136 preserved_values
.release ();
10138 BITMAP_FREE (scratch_regs
);
10139 scratch_regs
= NULL
;
10142 #ifdef HAVE_window_save
10143 vec_free (windowed_parm_regs
);
10147 XDELETEVEC (vui_vec
);
10152 /* The entry point to variable tracking pass. */
10154 static inline unsigned int
10155 variable_tracking_main_1 (void)
10159 if (flag_var_tracking_assignments
< 0)
10161 delete_debug_insns ();
10165 if (n_basic_blocks_for_fn (cfun
) > 500 &&
10166 n_edges_for_fn (cfun
) / n_basic_blocks_for_fn (cfun
) >= 20)
10168 vt_debug_insns_local (true);
10172 mark_dfs_back_edges ();
10173 if (!vt_initialize ())
10176 vt_debug_insns_local (true);
10180 success
= vt_find_locations ();
10182 if (!success
&& flag_var_tracking_assignments
> 0)
10186 delete_debug_insns ();
10188 /* This is later restored by our caller. */
10189 flag_var_tracking_assignments
= 0;
10191 success
= vt_initialize ();
10192 gcc_assert (success
);
10194 success
= vt_find_locations ();
10200 vt_debug_insns_local (false);
10204 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
10206 dump_dataflow_sets ();
10207 dump_reg_info (dump_file
);
10208 dump_flow_info (dump_file
, dump_flags
);
10211 timevar_push (TV_VAR_TRACKING_EMIT
);
10213 timevar_pop (TV_VAR_TRACKING_EMIT
);
10216 vt_debug_insns_local (false);
10221 variable_tracking_main (void)
10224 int save
= flag_var_tracking_assignments
;
10226 ret
= variable_tracking_main_1 ();
10228 flag_var_tracking_assignments
= save
;
10234 gate_handle_var_tracking (void)
10236 return (flag_var_tracking
&& !targetm
.delay_vartrack
);
10243 const pass_data pass_data_variable_tracking
=
10245 RTL_PASS
, /* type */
10246 "vartrack", /* name */
10247 OPTGROUP_NONE
, /* optinfo_flags */
10248 true, /* has_gate */
10249 true, /* has_execute */
10250 TV_VAR_TRACKING
, /* tv_id */
10251 0, /* properties_required */
10252 0, /* properties_provided */
10253 0, /* properties_destroyed */
10254 0, /* todo_flags_start */
10255 ( TODO_verify_rtl_sharing
| TODO_verify_flow
), /* todo_flags_finish */
10258 class pass_variable_tracking
: public rtl_opt_pass
10261 pass_variable_tracking (gcc::context
*ctxt
)
10262 : rtl_opt_pass (pass_data_variable_tracking
, ctxt
)
10265 /* opt_pass methods: */
10266 bool gate () { return gate_handle_var_tracking (); }
10267 unsigned int execute () { return variable_tracking_main (); }
10269 }; // class pass_variable_tracking
10271 } // anon namespace
10274 make_pass_variable_tracking (gcc::context
*ctxt
)
10276 return new pass_variable_tracking (ctxt
);