1 /* Variable tracking routines for the GNU compiler.
2 Copyright (C) 2002-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the variable tracking pass. It computes where
21 variables are located (which registers or where in memory) at each position
22 in instruction stream and emits notes describing the locations.
23 Debug information (DWARF2 location lists) is finally generated from
25 With this debug information, it is possible to show variables
26 even when debugging optimized code.
28 How does the variable tracking pass work?
30 First, it scans RTL code for uses, stores and clobbers (register/memory
31 references in instructions), for call insns and for stack adjustments
32 separately for each basic block and saves them to an array of micro
34 The micro operations of one instruction are ordered so that
35 pre-modifying stack adjustment < use < use with no var < call insn <
36 < clobber < set < post-modifying stack adjustment
38 Then, a forward dataflow analysis is performed to find out how locations
39 of variables change through code and to propagate the variable locations
40 along control flow graph.
41 The IN set for basic block BB is computed as a union of OUT sets of BB's
42 predecessors, the OUT set for BB is copied from the IN set for BB and
43 is changed according to micro operations in BB.
45 The IN and OUT sets for basic blocks consist of a current stack adjustment
46 (used for adjusting offset of variables addressed using stack pointer),
47 the table of structures describing the locations of parts of a variable
48 and for each physical register a linked list for each physical register.
49 The linked list is a list of variable parts stored in the register,
50 i.e. it is a list of triplets (reg, decl, offset) where decl is
51 REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
52 effective deleting appropriate variable parts when we set or clobber the
55 There may be more than one variable part in a register. The linked lists
56 should be pretty short so it is a good data structure here.
57 For example in the following code, register allocator may assign same
58 register to variables A and B, and both of them are stored in the same
71 Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
72 are emitted to appropriate positions in RTL code. Each such a note describes
73 the location of one variable at the point in instruction stream where the
74 note is. There is no need to emit a note for each variable before each
75 instruction, we only emit these notes where the location of variable changes
76 (this means that we also emit notes for changes between the OUT set of the
77 previous block and the IN set of the current block).
79 The notes consist of two parts:
80 1. the declaration (from REG_EXPR or MEM_EXPR)
81 2. the location of a variable - it is either a simple register/memory
82 reference (for simple variables, for example int),
83 or a parallel of register/memory references (for a large variables
84 which consist of several parts, for example long long).
90 #include "coretypes.h"
95 #include "stor-layout.h"
98 #include "hard-reg-set.h"
99 #include "basic-block.h"
101 #include "insn-config.h"
104 #include "alloc-pool.h"
106 #include "hash-table.h"
109 #include "tree-pass.h"
111 #include "tree-dfa.h"
112 #include "tree-ssa.h"
116 #include "diagnostic.h"
117 #include "tree-pretty-print.h"
118 #include "pointer-set.h"
123 /* var-tracking.c assumes that tree code with the same value as VALUE rtx code
124 has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
125 Currently the value is the same as IDENTIFIER_NODE, which has such
126 a property. If this compile time assertion ever fails, make sure that
127 the new tree code that equals (int) VALUE has the same property. */
128 extern char check_value_val
[(int) VALUE
== (int) IDENTIFIER_NODE
? 1 : -1];
130 /* Type of micro operation. */
131 enum micro_operation_type
133 MO_USE
, /* Use location (REG or MEM). */
134 MO_USE_NO_VAR
,/* Use location which is not associated with a variable
135 or the variable is not trackable. */
136 MO_VAL_USE
, /* Use location which is associated with a value. */
137 MO_VAL_LOC
, /* Use location which appears in a debug insn. */
138 MO_VAL_SET
, /* Set location associated with a value. */
139 MO_SET
, /* Set location. */
140 MO_COPY
, /* Copy the same portion of a variable from one
141 location to another. */
142 MO_CLOBBER
, /* Clobber location. */
143 MO_CALL
, /* Call insn. */
144 MO_ADJUST
/* Adjust stack pointer. */
148 static const char * const ATTRIBUTE_UNUSED
149 micro_operation_type_name
[] = {
162 /* Where shall the note be emitted? BEFORE or AFTER the instruction.
163 Notes emitted as AFTER_CALL are to take effect during the call,
164 rather than after the call. */
167 EMIT_NOTE_BEFORE_INSN
,
168 EMIT_NOTE_AFTER_INSN
,
169 EMIT_NOTE_AFTER_CALL_INSN
172 /* Structure holding information about micro operation. */
173 typedef struct micro_operation_def
175 /* Type of micro operation. */
176 enum micro_operation_type type
;
178 /* The instruction which the micro operation is in, for MO_USE,
179 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
180 instruction or note in the original flow (before any var-tracking
181 notes are inserted, to simplify emission of notes), for MO_SET
186 /* Location. For MO_SET and MO_COPY, this is the SET that
187 performs the assignment, if known, otherwise it is the target
188 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
189 CONCAT of the VALUE and the LOC associated with it. For
190 MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
191 associated with it. */
194 /* Stack adjustment. */
195 HOST_WIDE_INT adjust
;
200 /* A declaration of a variable, or an RTL value being handled like a
202 typedef void *decl_or_value
;
204 /* Return true if a decl_or_value DV is a DECL or NULL. */
206 dv_is_decl_p (decl_or_value dv
)
208 return !dv
|| (int) TREE_CODE ((tree
) dv
) != (int) VALUE
;
211 /* Return true if a decl_or_value is a VALUE rtl. */
213 dv_is_value_p (decl_or_value dv
)
215 return dv
&& !dv_is_decl_p (dv
);
218 /* Return the decl in the decl_or_value. */
220 dv_as_decl (decl_or_value dv
)
222 gcc_checking_assert (dv_is_decl_p (dv
));
226 /* Return the value in the decl_or_value. */
228 dv_as_value (decl_or_value dv
)
230 gcc_checking_assert (dv_is_value_p (dv
));
234 /* Return the opaque pointer in the decl_or_value. */
236 dv_as_opaque (decl_or_value dv
)
242 /* Description of location of a part of a variable. The content of a physical
243 register is described by a chain of these structures.
244 The chains are pretty short (usually 1 or 2 elements) and thus
245 chain is the best data structure. */
246 typedef struct attrs_def
248 /* Pointer to next member of the list. */
249 struct attrs_def
*next
;
251 /* The rtx of register. */
254 /* The declaration corresponding to LOC. */
257 /* Offset from start of DECL. */
258 HOST_WIDE_INT offset
;
261 /* Structure for chaining the locations. */
262 typedef struct location_chain_def
264 /* Next element in the chain. */
265 struct location_chain_def
*next
;
267 /* The location (REG, MEM or VALUE). */
270 /* The "value" stored in this location. */
274 enum var_init_status init
;
277 /* A vector of loc_exp_dep holds the active dependencies of a one-part
278 DV on VALUEs, i.e., the VALUEs expanded so as to form the current
279 location of DV. Each entry is also part of VALUE' s linked-list of
280 backlinks back to DV. */
281 typedef struct loc_exp_dep_s
283 /* The dependent DV. */
285 /* The dependency VALUE or DECL_DEBUG. */
287 /* The next entry in VALUE's backlinks list. */
288 struct loc_exp_dep_s
*next
;
289 /* A pointer to the pointer to this entry (head or prev's next) in
290 the doubly-linked list. */
291 struct loc_exp_dep_s
**pprev
;
295 /* This data structure holds information about the depth of a variable
297 typedef struct expand_depth_struct
299 /* This measures the complexity of the expanded expression. It
300 grows by one for each level of expansion that adds more than one
303 /* This counts the number of ENTRY_VALUE expressions in an
304 expansion. We want to minimize their use. */
308 /* This data structure is allocated for one-part variables at the time
309 of emitting notes. */
312 /* Doubly-linked list of dependent DVs. These are DVs whose cur_loc
313 computation used the expansion of this variable, and that ought
314 to be notified should this variable change. If the DV's cur_loc
315 expanded to NULL, all components of the loc list are regarded as
316 active, so that any changes in them give us a chance to get a
317 location. Otherwise, only components of the loc that expanded to
318 non-NULL are regarded as active dependencies. */
319 loc_exp_dep
*backlinks
;
320 /* This holds the LOC that was expanded into cur_loc. We need only
321 mark a one-part variable as changed if the FROM loc is removed,
322 or if it has no known location and a loc is added, or if it gets
323 a change notification from any of its active dependencies. */
325 /* The depth of the cur_loc expression. */
327 /* Dependencies actively used when expand FROM into cur_loc. */
328 vec
<loc_exp_dep
, va_heap
, vl_embed
> deps
;
331 /* Structure describing one part of variable. */
332 typedef struct variable_part_def
334 /* Chain of locations of the part. */
335 location_chain loc_chain
;
337 /* Location which was last emitted to location list. */
342 /* The offset in the variable, if !var->onepart. */
343 HOST_WIDE_INT offset
;
345 /* Pointer to auxiliary data, if var->onepart and emit_notes. */
346 struct onepart_aux
*onepaux
;
350 /* Maximum number of location parts. */
351 #define MAX_VAR_PARTS 16
353 /* Enumeration type used to discriminate various types of one-part
355 typedef enum onepart_enum
357 /* Not a one-part variable. */
359 /* A one-part DECL that is not a DEBUG_EXPR_DECL. */
361 /* A DEBUG_EXPR_DECL. */
367 /* Structure describing where the variable is located. */
368 typedef struct variable_def
370 /* The declaration of the variable, or an RTL value being handled
371 like a declaration. */
374 /* Reference count. */
377 /* Number of variable parts. */
380 /* What type of DV this is, according to enum onepart_enum. */
381 ENUM_BITFIELD (onepart_enum
) onepart
: CHAR_BIT
;
383 /* True if this variable_def struct is currently in the
384 changed_variables hash table. */
385 bool in_changed_variables
;
387 /* The variable parts. */
388 variable_part var_part
[1];
390 typedef const struct variable_def
*const_variable
;
392 /* Pointer to the BB's information specific to variable tracking pass. */
393 #define VTI(BB) ((variable_tracking_info) (BB)->aux)
395 /* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */
396 #define INT_MEM_OFFSET(mem) (MEM_OFFSET_KNOWN_P (mem) ? MEM_OFFSET (mem) : 0)
398 #if ENABLE_CHECKING && (GCC_VERSION >= 2007)
400 /* Access VAR's Ith part's offset, checking that it's not a one-part
402 #define VAR_PART_OFFSET(var, i) __extension__ \
403 (*({ variable const __v = (var); \
404 gcc_checking_assert (!__v->onepart); \
405 &__v->var_part[(i)].aux.offset; }))
407 /* Access VAR's one-part auxiliary data, checking that it is a
408 one-part variable. */
409 #define VAR_LOC_1PAUX(var) __extension__ \
410 (*({ variable const __v = (var); \
411 gcc_checking_assert (__v->onepart); \
412 &__v->var_part[0].aux.onepaux; }))
415 #define VAR_PART_OFFSET(var, i) ((var)->var_part[(i)].aux.offset)
416 #define VAR_LOC_1PAUX(var) ((var)->var_part[0].aux.onepaux)
419 /* These are accessor macros for the one-part auxiliary data. When
420 convenient for users, they're guarded by tests that the data was
422 #define VAR_LOC_DEP_LST(var) (VAR_LOC_1PAUX (var) \
423 ? VAR_LOC_1PAUX (var)->backlinks \
425 #define VAR_LOC_DEP_LSTP(var) (VAR_LOC_1PAUX (var) \
426 ? &VAR_LOC_1PAUX (var)->backlinks \
428 #define VAR_LOC_FROM(var) (VAR_LOC_1PAUX (var)->from)
429 #define VAR_LOC_DEPTH(var) (VAR_LOC_1PAUX (var)->depth)
430 #define VAR_LOC_DEP_VEC(var) (VAR_LOC_1PAUX (var) \
431 ? &VAR_LOC_1PAUX (var)->deps \
436 typedef unsigned int dvuid
;
438 /* Return the uid of DV. */
441 dv_uid (decl_or_value dv
)
443 if (dv_is_value_p (dv
))
444 return CSELIB_VAL_PTR (dv_as_value (dv
))->uid
;
446 return DECL_UID (dv_as_decl (dv
));
449 /* Compute the hash from the uid. */
451 static inline hashval_t
452 dv_uid2hash (dvuid uid
)
457 /* The hash function for a mask table in a shared_htab chain. */
459 static inline hashval_t
460 dv_htab_hash (decl_or_value dv
)
462 return dv_uid2hash (dv_uid (dv
));
465 static void variable_htab_free (void *);
467 /* Variable hashtable helpers. */
469 struct variable_hasher
471 typedef variable_def value_type
;
472 typedef void compare_type
;
473 static inline hashval_t
hash (const value_type
*);
474 static inline bool equal (const value_type
*, const compare_type
*);
475 static inline void remove (value_type
*);
478 /* The hash function for variable_htab, computes the hash value
479 from the declaration of variable X. */
482 variable_hasher::hash (const value_type
*v
)
484 return dv_htab_hash (v
->dv
);
487 /* Compare the declaration of variable X with declaration Y. */
490 variable_hasher::equal (const value_type
*v
, const compare_type
*y
)
492 decl_or_value dv
= CONST_CAST2 (decl_or_value
, const void *, y
);
494 return (dv_as_opaque (v
->dv
) == dv_as_opaque (dv
));
497 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
500 variable_hasher::remove (value_type
*var
)
502 variable_htab_free (var
);
505 typedef hash_table
<variable_hasher
> variable_table_type
;
506 typedef variable_table_type::iterator variable_iterator_type
;
508 /* Structure for passing some other parameters to function
509 emit_note_insn_var_location. */
510 typedef struct emit_note_data_def
512 /* The instruction which the note will be emitted before/after. */
515 /* Where the note will be emitted (before/after insn)? */
516 enum emit_note_where where
;
518 /* The variables and values active at this point. */
519 variable_table_type vars
;
522 /* Structure holding a refcounted hash table. If refcount > 1,
523 it must be first unshared before modified. */
524 typedef struct shared_hash_def
526 /* Reference count. */
529 /* Actual hash table. */
530 variable_table_type htab
;
533 /* Structure holding the IN or OUT set for a basic block. */
534 typedef struct dataflow_set_def
536 /* Adjustment of stack offset. */
537 HOST_WIDE_INT stack_adjust
;
539 /* Attributes for registers (lists of attrs). */
540 attrs regs
[FIRST_PSEUDO_REGISTER
];
542 /* Variable locations. */
545 /* Vars that is being traversed. */
546 shared_hash traversed_vars
;
549 /* The structure (one for each basic block) containing the information
550 needed for variable tracking. */
551 typedef struct variable_tracking_info_def
553 /* The vector of micro operations. */
554 vec
<micro_operation
> mos
;
556 /* The IN and OUT set for dataflow analysis. */
560 /* The permanent-in dataflow set for this block. This is used to
561 hold values for which we had to compute entry values. ??? This
562 should probably be dynamically allocated, to avoid using more
563 memory in non-debug builds. */
566 /* Has the block been visited in DFS? */
569 /* Has the block been flooded in VTA? */
572 } *variable_tracking_info
;
574 /* Alloc pool for struct attrs_def. */
575 static alloc_pool attrs_pool
;
577 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
578 static alloc_pool var_pool
;
580 /* Alloc pool for struct variable_def with a single var_part entry. */
581 static alloc_pool valvar_pool
;
583 /* Alloc pool for struct location_chain_def. */
584 static alloc_pool loc_chain_pool
;
586 /* Alloc pool for struct shared_hash_def. */
587 static alloc_pool shared_hash_pool
;
589 /* Alloc pool for struct loc_exp_dep_s for NOT_ONEPART variables. */
590 static alloc_pool loc_exp_dep_pool
;
592 /* Changed variables, notes will be emitted for them. */
593 static variable_table_type changed_variables
;
595 /* Shall notes be emitted? */
596 static bool emit_notes
;
598 /* Values whose dynamic location lists have gone empty, but whose
599 cselib location lists are still usable. Use this to hold the
600 current location, the backlinks, etc, during emit_notes. */
601 static variable_table_type dropped_values
;
603 /* Empty shared hashtable. */
604 static shared_hash empty_shared_hash
;
606 /* Scratch register bitmap used by cselib_expand_value_rtx. */
607 static bitmap scratch_regs
= NULL
;
609 #ifdef HAVE_window_save
610 typedef struct GTY(()) parm_reg
{
616 /* Vector of windowed parameter registers, if any. */
617 static vec
<parm_reg_t
, va_gc
> *windowed_parm_regs
= NULL
;
620 /* Variable used to tell whether cselib_process_insn called our hook. */
621 static bool cselib_hook_called
;
623 /* Local function prototypes. */
624 static void stack_adjust_offset_pre_post (rtx
, HOST_WIDE_INT
*,
626 static void insn_stack_adjust_offset_pre_post (rtx
, HOST_WIDE_INT
*,
628 static bool vt_stack_adjustments (void);
630 static void init_attrs_list_set (attrs
*);
631 static void attrs_list_clear (attrs
*);
632 static attrs
attrs_list_member (attrs
, decl_or_value
, HOST_WIDE_INT
);
633 static void attrs_list_insert (attrs
*, decl_or_value
, HOST_WIDE_INT
, rtx
);
634 static void attrs_list_copy (attrs
*, attrs
);
635 static void attrs_list_union (attrs
*, attrs
);
637 static variable_def
**unshare_variable (dataflow_set
*set
, variable_def
**slot
,
638 variable var
, enum var_init_status
);
639 static void vars_copy (variable_table_type
, variable_table_type
);
640 static tree
var_debug_decl (tree
);
641 static void var_reg_set (dataflow_set
*, rtx
, enum var_init_status
, rtx
);
642 static void var_reg_delete_and_set (dataflow_set
*, rtx
, bool,
643 enum var_init_status
, rtx
);
644 static void var_reg_delete (dataflow_set
*, rtx
, bool);
645 static void var_regno_delete (dataflow_set
*, int);
646 static void var_mem_set (dataflow_set
*, rtx
, enum var_init_status
, rtx
);
647 static void var_mem_delete_and_set (dataflow_set
*, rtx
, bool,
648 enum var_init_status
, rtx
);
649 static void var_mem_delete (dataflow_set
*, rtx
, bool);
651 static void dataflow_set_init (dataflow_set
*);
652 static void dataflow_set_clear (dataflow_set
*);
653 static void dataflow_set_copy (dataflow_set
*, dataflow_set
*);
654 static int variable_union_info_cmp_pos (const void *, const void *);
655 static void dataflow_set_union (dataflow_set
*, dataflow_set
*);
656 static location_chain
find_loc_in_1pdv (rtx
, variable
, variable_table_type
);
657 static bool canon_value_cmp (rtx
, rtx
);
658 static int loc_cmp (rtx
, rtx
);
659 static bool variable_part_different_p (variable_part
*, variable_part
*);
660 static bool onepart_variable_different_p (variable
, variable
);
661 static bool variable_different_p (variable
, variable
);
662 static bool dataflow_set_different (dataflow_set
*, dataflow_set
*);
663 static void dataflow_set_destroy (dataflow_set
*);
665 static bool contains_symbol_ref (rtx
);
666 static bool track_expr_p (tree
, bool);
667 static bool same_variable_part_p (rtx
, tree
, HOST_WIDE_INT
);
668 static int add_uses (rtx
*, void *);
669 static void add_uses_1 (rtx
*, void *);
670 static void add_stores (rtx
, const_rtx
, void *);
671 static bool compute_bb_dataflow (basic_block
);
672 static bool vt_find_locations (void);
674 static void dump_attrs_list (attrs
);
675 static void dump_var (variable
);
676 static void dump_vars (variable_table_type
);
677 static void dump_dataflow_set (dataflow_set
*);
678 static void dump_dataflow_sets (void);
680 static void set_dv_changed (decl_or_value
, bool);
681 static void variable_was_changed (variable
, dataflow_set
*);
682 static variable_def
**set_slot_part (dataflow_set
*, rtx
, variable_def
**,
683 decl_or_value
, HOST_WIDE_INT
,
684 enum var_init_status
, rtx
);
685 static void set_variable_part (dataflow_set
*, rtx
,
686 decl_or_value
, HOST_WIDE_INT
,
687 enum var_init_status
, rtx
, enum insert_option
);
688 static variable_def
**clobber_slot_part (dataflow_set
*, rtx
,
689 variable_def
**, HOST_WIDE_INT
, rtx
);
690 static void clobber_variable_part (dataflow_set
*, rtx
,
691 decl_or_value
, HOST_WIDE_INT
, rtx
);
692 static variable_def
**delete_slot_part (dataflow_set
*, rtx
, variable_def
**,
694 static void delete_variable_part (dataflow_set
*, rtx
,
695 decl_or_value
, HOST_WIDE_INT
);
696 static void emit_notes_in_bb (basic_block
, dataflow_set
*);
697 static void vt_emit_notes (void);
699 static bool vt_get_decl_and_offset (rtx
, tree
*, HOST_WIDE_INT
*);
700 static void vt_add_function_parameters (void);
701 static bool vt_initialize (void);
702 static void vt_finalize (void);
704 /* Given a SET, calculate the amount of stack adjustment it contains
705 PRE- and POST-modifying stack pointer.
706 This function is similar to stack_adjust_offset. */
709 stack_adjust_offset_pre_post (rtx pattern
, HOST_WIDE_INT
*pre
,
712 rtx src
= SET_SRC (pattern
);
713 rtx dest
= SET_DEST (pattern
);
716 if (dest
== stack_pointer_rtx
)
718 /* (set (reg sp) (plus (reg sp) (const_int))) */
719 code
= GET_CODE (src
);
720 if (! (code
== PLUS
|| code
== MINUS
)
721 || XEXP (src
, 0) != stack_pointer_rtx
722 || !CONST_INT_P (XEXP (src
, 1)))
726 *post
+= INTVAL (XEXP (src
, 1));
728 *post
-= INTVAL (XEXP (src
, 1));
730 else if (MEM_P (dest
))
732 /* (set (mem (pre_dec (reg sp))) (foo)) */
733 src
= XEXP (dest
, 0);
734 code
= GET_CODE (src
);
740 if (XEXP (src
, 0) == stack_pointer_rtx
)
742 rtx val
= XEXP (XEXP (src
, 1), 1);
743 /* We handle only adjustments by constant amount. */
744 gcc_assert (GET_CODE (XEXP (src
, 1)) == PLUS
&&
747 if (code
== PRE_MODIFY
)
748 *pre
-= INTVAL (val
);
750 *post
-= INTVAL (val
);
756 if (XEXP (src
, 0) == stack_pointer_rtx
)
758 *pre
+= GET_MODE_SIZE (GET_MODE (dest
));
764 if (XEXP (src
, 0) == stack_pointer_rtx
)
766 *post
+= GET_MODE_SIZE (GET_MODE (dest
));
772 if (XEXP (src
, 0) == stack_pointer_rtx
)
774 *pre
-= GET_MODE_SIZE (GET_MODE (dest
));
780 if (XEXP (src
, 0) == stack_pointer_rtx
)
782 *post
-= GET_MODE_SIZE (GET_MODE (dest
));
793 /* Given an INSN, calculate the amount of stack adjustment it contains
794 PRE- and POST-modifying stack pointer. */
797 insn_stack_adjust_offset_pre_post (rtx insn
, HOST_WIDE_INT
*pre
,
805 pattern
= PATTERN (insn
);
806 if (RTX_FRAME_RELATED_P (insn
))
808 rtx expr
= find_reg_note (insn
, REG_FRAME_RELATED_EXPR
, NULL_RTX
);
810 pattern
= XEXP (expr
, 0);
813 if (GET_CODE (pattern
) == SET
)
814 stack_adjust_offset_pre_post (pattern
, pre
, post
);
815 else if (GET_CODE (pattern
) == PARALLEL
816 || GET_CODE (pattern
) == SEQUENCE
)
820 /* There may be stack adjustments inside compound insns. Search
822 for ( i
= XVECLEN (pattern
, 0) - 1; i
>= 0; i
--)
823 if (GET_CODE (XVECEXP (pattern
, 0, i
)) == SET
)
824 stack_adjust_offset_pre_post (XVECEXP (pattern
, 0, i
), pre
, post
);
828 /* Compute stack adjustments for all blocks by traversing DFS tree.
829 Return true when the adjustments on all incoming edges are consistent.
830 Heavily borrowed from pre_and_rev_post_order_compute. */
833 vt_stack_adjustments (void)
835 edge_iterator
*stack
;
838 /* Initialize entry block. */
839 VTI (ENTRY_BLOCK_PTR
)->visited
= true;
840 VTI (ENTRY_BLOCK_PTR
)->in
.stack_adjust
= INCOMING_FRAME_SP_OFFSET
;
841 VTI (ENTRY_BLOCK_PTR
)->out
.stack_adjust
= INCOMING_FRAME_SP_OFFSET
;
843 /* Allocate stack for back-tracking up CFG. */
844 stack
= XNEWVEC (edge_iterator
, n_basic_blocks_for_fn (cfun
) + 1);
847 /* Push the first edge on to the stack. */
848 stack
[sp
++] = ei_start (ENTRY_BLOCK_PTR
->succs
);
856 /* Look at the edge on the top of the stack. */
858 src
= ei_edge (ei
)->src
;
859 dest
= ei_edge (ei
)->dest
;
861 /* Check if the edge destination has been visited yet. */
862 if (!VTI (dest
)->visited
)
865 HOST_WIDE_INT pre
, post
, offset
;
866 VTI (dest
)->visited
= true;
867 VTI (dest
)->in
.stack_adjust
= offset
= VTI (src
)->out
.stack_adjust
;
869 if (dest
!= EXIT_BLOCK_PTR
)
870 for (insn
= BB_HEAD (dest
);
871 insn
!= NEXT_INSN (BB_END (dest
));
872 insn
= NEXT_INSN (insn
))
875 insn_stack_adjust_offset_pre_post (insn
, &pre
, &post
);
876 offset
+= pre
+ post
;
879 VTI (dest
)->out
.stack_adjust
= offset
;
881 if (EDGE_COUNT (dest
->succs
) > 0)
882 /* Since the DEST node has been visited for the first
883 time, check its successors. */
884 stack
[sp
++] = ei_start (dest
->succs
);
888 /* Check whether the adjustments on the edges are the same. */
889 if (VTI (dest
)->in
.stack_adjust
!= VTI (src
)->out
.stack_adjust
)
895 if (! ei_one_before_end_p (ei
))
896 /* Go to the next edge. */
897 ei_next (&stack
[sp
- 1]);
899 /* Return to previous level if there are no more edges. */
908 /* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or
909 hard_frame_pointer_rtx is being mapped to it and offset for it. */
910 static rtx cfa_base_rtx
;
911 static HOST_WIDE_INT cfa_base_offset
;
913 /* Compute a CFA-based value for an ADJUSTMENT made to stack_pointer_rtx
914 or hard_frame_pointer_rtx. */
917 compute_cfa_pointer (HOST_WIDE_INT adjustment
)
919 return plus_constant (Pmode
, cfa_base_rtx
, adjustment
+ cfa_base_offset
);
922 /* Adjustment for hard_frame_pointer_rtx to cfa base reg,
923 or -1 if the replacement shouldn't be done. */
924 static HOST_WIDE_INT hard_frame_pointer_adjustment
= -1;
926 /* Data for adjust_mems callback. */
928 struct adjust_mem_data
931 enum machine_mode mem_mode
;
932 HOST_WIDE_INT stack_adjust
;
936 /* Helper for adjust_mems. Return 1 if *loc is unsuitable for
937 transformation of wider mode arithmetics to narrower mode,
938 -1 if it is suitable and subexpressions shouldn't be
939 traversed and 0 if it is suitable and subexpressions should
940 be traversed. Called through for_each_rtx. */
943 use_narrower_mode_test (rtx
*loc
, void *data
)
945 rtx subreg
= (rtx
) data
;
947 if (CONSTANT_P (*loc
))
949 switch (GET_CODE (*loc
))
952 if (cselib_lookup (*loc
, GET_MODE (SUBREG_REG (subreg
)), 0, VOIDmode
))
954 if (!validate_subreg (GET_MODE (subreg
), GET_MODE (*loc
),
955 *loc
, subreg_lowpart_offset (GET_MODE (subreg
),
964 if (for_each_rtx (&XEXP (*loc
, 0), use_narrower_mode_test
, data
))
973 /* Transform X into narrower mode MODE from wider mode WMODE. */
976 use_narrower_mode (rtx x
, enum machine_mode mode
, enum machine_mode wmode
)
980 return lowpart_subreg (mode
, x
, wmode
);
981 switch (GET_CODE (x
))
984 return lowpart_subreg (mode
, x
, wmode
);
988 op0
= use_narrower_mode (XEXP (x
, 0), mode
, wmode
);
989 op1
= use_narrower_mode (XEXP (x
, 1), mode
, wmode
);
990 return simplify_gen_binary (GET_CODE (x
), mode
, op0
, op1
);
992 op0
= use_narrower_mode (XEXP (x
, 0), mode
, wmode
);
993 return simplify_gen_binary (ASHIFT
, mode
, op0
, XEXP (x
, 1));
999 /* Helper function for adjusting used MEMs. */
1002 adjust_mems (rtx loc
, const_rtx old_rtx
, void *data
)
1004 struct adjust_mem_data
*amd
= (struct adjust_mem_data
*) data
;
1005 rtx mem
, addr
= loc
, tem
;
1006 enum machine_mode mem_mode_save
;
1008 switch (GET_CODE (loc
))
1011 /* Don't do any sp or fp replacements outside of MEM addresses
1013 if (amd
->mem_mode
== VOIDmode
&& amd
->store
)
1015 if (loc
== stack_pointer_rtx
1016 && !frame_pointer_needed
1018 return compute_cfa_pointer (amd
->stack_adjust
);
1019 else if (loc
== hard_frame_pointer_rtx
1020 && frame_pointer_needed
1021 && hard_frame_pointer_adjustment
!= -1
1023 return compute_cfa_pointer (hard_frame_pointer_adjustment
);
1024 gcc_checking_assert (loc
!= virtual_incoming_args_rtx
);
1030 mem
= targetm
.delegitimize_address (mem
);
1031 if (mem
!= loc
&& !MEM_P (mem
))
1032 return simplify_replace_fn_rtx (mem
, old_rtx
, adjust_mems
, data
);
1035 addr
= XEXP (mem
, 0);
1036 mem_mode_save
= amd
->mem_mode
;
1037 amd
->mem_mode
= GET_MODE (mem
);
1038 store_save
= amd
->store
;
1040 addr
= simplify_replace_fn_rtx (addr
, old_rtx
, adjust_mems
, data
);
1041 amd
->store
= store_save
;
1042 amd
->mem_mode
= mem_mode_save
;
1044 addr
= targetm
.delegitimize_address (addr
);
1045 if (addr
!= XEXP (mem
, 0))
1046 mem
= replace_equiv_address_nv (mem
, addr
);
1048 mem
= avoid_constant_pool_reference (mem
);
1052 addr
= gen_rtx_PLUS (GET_MODE (loc
), XEXP (loc
, 0),
1053 gen_int_mode (GET_CODE (loc
) == PRE_INC
1054 ? GET_MODE_SIZE (amd
->mem_mode
)
1055 : -GET_MODE_SIZE (amd
->mem_mode
),
1060 addr
= XEXP (loc
, 0);
1061 gcc_assert (amd
->mem_mode
!= VOIDmode
&& amd
->mem_mode
!= BLKmode
);
1062 addr
= simplify_replace_fn_rtx (addr
, old_rtx
, adjust_mems
, data
);
1063 tem
= gen_rtx_PLUS (GET_MODE (loc
), XEXP (loc
, 0),
1064 gen_int_mode ((GET_CODE (loc
) == PRE_INC
1065 || GET_CODE (loc
) == POST_INC
)
1066 ? GET_MODE_SIZE (amd
->mem_mode
)
1067 : -GET_MODE_SIZE (amd
->mem_mode
),
1069 amd
->side_effects
= alloc_EXPR_LIST (0,
1070 gen_rtx_SET (VOIDmode
,
1076 addr
= XEXP (loc
, 1);
1079 addr
= XEXP (loc
, 0);
1080 gcc_assert (amd
->mem_mode
!= VOIDmode
);
1081 addr
= simplify_replace_fn_rtx (addr
, old_rtx
, adjust_mems
, data
);
1082 amd
->side_effects
= alloc_EXPR_LIST (0,
1083 gen_rtx_SET (VOIDmode
,
1089 /* First try without delegitimization of whole MEMs and
1090 avoid_constant_pool_reference, which is more likely to succeed. */
1091 store_save
= amd
->store
;
1093 addr
= simplify_replace_fn_rtx (SUBREG_REG (loc
), old_rtx
, adjust_mems
,
1095 amd
->store
= store_save
;
1096 mem
= simplify_replace_fn_rtx (addr
, old_rtx
, adjust_mems
, data
);
1097 if (mem
== SUBREG_REG (loc
))
1102 tem
= simplify_gen_subreg (GET_MODE (loc
), mem
,
1103 GET_MODE (SUBREG_REG (loc
)),
1107 tem
= simplify_gen_subreg (GET_MODE (loc
), addr
,
1108 GET_MODE (SUBREG_REG (loc
)),
1110 if (tem
== NULL_RTX
)
1111 tem
= gen_rtx_raw_SUBREG (GET_MODE (loc
), addr
, SUBREG_BYTE (loc
));
1113 if (MAY_HAVE_DEBUG_INSNS
1114 && GET_CODE (tem
) == SUBREG
1115 && (GET_CODE (SUBREG_REG (tem
)) == PLUS
1116 || GET_CODE (SUBREG_REG (tem
)) == MINUS
1117 || GET_CODE (SUBREG_REG (tem
)) == MULT
1118 || GET_CODE (SUBREG_REG (tem
)) == ASHIFT
)
1119 && GET_MODE_CLASS (GET_MODE (tem
)) == MODE_INT
1120 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem
))) == MODE_INT
1121 && GET_MODE_SIZE (GET_MODE (tem
))
1122 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (tem
)))
1123 && subreg_lowpart_p (tem
)
1124 && !for_each_rtx (&SUBREG_REG (tem
), use_narrower_mode_test
, tem
))
1125 return use_narrower_mode (SUBREG_REG (tem
), GET_MODE (tem
),
1126 GET_MODE (SUBREG_REG (tem
)));
1129 /* Don't do any replacements in second and following
1130 ASM_OPERANDS of inline-asm with multiple sets.
1131 ASM_OPERANDS_INPUT_VEC, ASM_OPERANDS_INPUT_CONSTRAINT_VEC
1132 and ASM_OPERANDS_LABEL_VEC need to be equal between
1133 all the ASM_OPERANDs in the insn and adjust_insn will
1135 if (ASM_OPERANDS_OUTPUT_IDX (loc
) != 0)
1144 /* Helper function for replacement of uses. */
1147 adjust_mem_uses (rtx
*x
, void *data
)
1149 rtx new_x
= simplify_replace_fn_rtx (*x
, NULL_RTX
, adjust_mems
, data
);
1151 validate_change (NULL_RTX
, x
, new_x
, true);
1154 /* Helper function for replacement of stores. */
1157 adjust_mem_stores (rtx loc
, const_rtx expr
, void *data
)
1161 rtx new_dest
= simplify_replace_fn_rtx (SET_DEST (expr
), NULL_RTX
,
1163 if (new_dest
!= SET_DEST (expr
))
1165 rtx xexpr
= CONST_CAST_RTX (expr
);
1166 validate_change (NULL_RTX
, &SET_DEST (xexpr
), new_dest
, true);
1171 /* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
1172 replace them with their value in the insn and add the side-effects
1173 as other sets to the insn. */
1176 adjust_insn (basic_block bb
, rtx insn
)
1178 struct adjust_mem_data amd
;
1181 #ifdef HAVE_window_save
1182 /* If the target machine has an explicit window save instruction, the
1183 transformation OUTGOING_REGNO -> INCOMING_REGNO is done there. */
1184 if (RTX_FRAME_RELATED_P (insn
)
1185 && find_reg_note (insn
, REG_CFA_WINDOW_SAVE
, NULL_RTX
))
1187 unsigned int i
, nregs
= vec_safe_length (windowed_parm_regs
);
1188 rtx rtl
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (nregs
* 2));
1191 FOR_EACH_VEC_SAFE_ELT (windowed_parm_regs
, i
, p
)
1193 XVECEXP (rtl
, 0, i
* 2)
1194 = gen_rtx_SET (VOIDmode
, p
->incoming
, p
->outgoing
);
1195 /* Do not clobber the attached DECL, but only the REG. */
1196 XVECEXP (rtl
, 0, i
* 2 + 1)
1197 = gen_rtx_CLOBBER (GET_MODE (p
->outgoing
),
1198 gen_raw_REG (GET_MODE (p
->outgoing
),
1199 REGNO (p
->outgoing
)));
1202 validate_change (NULL_RTX
, &PATTERN (insn
), rtl
, true);
1207 amd
.mem_mode
= VOIDmode
;
1208 amd
.stack_adjust
= -VTI (bb
)->out
.stack_adjust
;
1209 amd
.side_effects
= NULL_RTX
;
1212 note_stores (PATTERN (insn
), adjust_mem_stores
, &amd
);
1215 if (GET_CODE (PATTERN (insn
)) == PARALLEL
1216 && asm_noperands (PATTERN (insn
)) > 0
1217 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == SET
)
1222 /* inline-asm with multiple sets is tiny bit more complicated,
1223 because the 3 vectors in ASM_OPERANDS need to be shared between
1224 all ASM_OPERANDS in the instruction. adjust_mems will
1225 not touch ASM_OPERANDS other than the first one, asm_noperands
1226 test above needs to be called before that (otherwise it would fail)
1227 and afterwards this code fixes it up. */
1228 note_uses (&PATTERN (insn
), adjust_mem_uses
, &amd
);
1229 body
= PATTERN (insn
);
1230 set0
= XVECEXP (body
, 0, 0);
1231 gcc_checking_assert (GET_CODE (set0
) == SET
1232 && GET_CODE (SET_SRC (set0
)) == ASM_OPERANDS
1233 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set0
)) == 0);
1234 for (i
= 1; i
< XVECLEN (body
, 0); i
++)
1235 if (GET_CODE (XVECEXP (body
, 0, i
)) != SET
)
1239 set
= XVECEXP (body
, 0, i
);
1240 gcc_checking_assert (GET_CODE (SET_SRC (set
)) == ASM_OPERANDS
1241 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set
))
1243 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (set
))
1244 != ASM_OPERANDS_INPUT_VEC (SET_SRC (set0
))
1245 || ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set
))
1246 != ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0
))
1247 || ASM_OPERANDS_LABEL_VEC (SET_SRC (set
))
1248 != ASM_OPERANDS_LABEL_VEC (SET_SRC (set0
)))
1250 rtx newsrc
= shallow_copy_rtx (SET_SRC (set
));
1251 ASM_OPERANDS_INPUT_VEC (newsrc
)
1252 = ASM_OPERANDS_INPUT_VEC (SET_SRC (set0
));
1253 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (newsrc
)
1254 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0
));
1255 ASM_OPERANDS_LABEL_VEC (newsrc
)
1256 = ASM_OPERANDS_LABEL_VEC (SET_SRC (set0
));
1257 validate_change (NULL_RTX
, &SET_SRC (set
), newsrc
, true);
1262 note_uses (&PATTERN (insn
), adjust_mem_uses
, &amd
);
1264 /* For read-only MEMs containing some constant, prefer those
1266 set
= single_set (insn
);
1267 if (set
&& MEM_P (SET_SRC (set
)) && MEM_READONLY_P (SET_SRC (set
)))
1269 rtx note
= find_reg_equal_equiv_note (insn
);
1271 if (note
&& CONSTANT_P (XEXP (note
, 0)))
1272 validate_change (NULL_RTX
, &SET_SRC (set
), XEXP (note
, 0), true);
1275 if (amd
.side_effects
)
1277 rtx
*pat
, new_pat
, s
;
1280 pat
= &PATTERN (insn
);
1281 if (GET_CODE (*pat
) == COND_EXEC
)
1282 pat
= &COND_EXEC_CODE (*pat
);
1283 if (GET_CODE (*pat
) == PARALLEL
)
1284 oldn
= XVECLEN (*pat
, 0);
1287 for (s
= amd
.side_effects
, newn
= 0; s
; newn
++)
1289 new_pat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (oldn
+ newn
));
1290 if (GET_CODE (*pat
) == PARALLEL
)
1291 for (i
= 0; i
< oldn
; i
++)
1292 XVECEXP (new_pat
, 0, i
) = XVECEXP (*pat
, 0, i
);
1294 XVECEXP (new_pat
, 0, 0) = *pat
;
1295 for (s
= amd
.side_effects
, i
= oldn
; i
< oldn
+ newn
; i
++, s
= XEXP (s
, 1))
1296 XVECEXP (new_pat
, 0, i
) = XEXP (s
, 0);
1297 free_EXPR_LIST_list (&amd
.side_effects
);
1298 validate_change (NULL_RTX
, pat
, new_pat
, true);
1302 /* Return the DEBUG_EXPR of a DEBUG_EXPR_DECL or the VALUE in DV. */
1304 dv_as_rtx (decl_or_value dv
)
1308 if (dv_is_value_p (dv
))
1309 return dv_as_value (dv
);
1311 decl
= dv_as_decl (dv
);
1313 gcc_checking_assert (TREE_CODE (decl
) == DEBUG_EXPR_DECL
);
1314 return DECL_RTL_KNOWN_SET (decl
);
1317 /* Return nonzero if a decl_or_value must not have more than one
1318 variable part. The returned value discriminates among various
1319 kinds of one-part DVs ccording to enum onepart_enum. */
1320 static inline onepart_enum_t
1321 dv_onepart_p (decl_or_value dv
)
1325 if (!MAY_HAVE_DEBUG_INSNS
)
1328 if (dv_is_value_p (dv
))
1329 return ONEPART_VALUE
;
1331 decl
= dv_as_decl (dv
);
1333 if (TREE_CODE (decl
) == DEBUG_EXPR_DECL
)
1334 return ONEPART_DEXPR
;
1336 if (target_for_debug_bind (decl
) != NULL_TREE
)
1337 return ONEPART_VDECL
;
1342 /* Return the variable pool to be used for a dv of type ONEPART. */
1343 static inline alloc_pool
1344 onepart_pool (onepart_enum_t onepart
)
1346 return onepart
? valvar_pool
: var_pool
;
1349 /* Build a decl_or_value out of a decl. */
1350 static inline decl_or_value
1351 dv_from_decl (tree decl
)
1355 gcc_checking_assert (dv_is_decl_p (dv
));
1359 /* Build a decl_or_value out of a value. */
1360 static inline decl_or_value
1361 dv_from_value (rtx value
)
1365 gcc_checking_assert (dv_is_value_p (dv
));
1369 /* Return a value or the decl of a debug_expr as a decl_or_value. */
1370 static inline decl_or_value
1375 switch (GET_CODE (x
))
1378 dv
= dv_from_decl (DEBUG_EXPR_TREE_DECL (x
));
1379 gcc_checking_assert (DECL_RTL_KNOWN_SET (DEBUG_EXPR_TREE_DECL (x
)) == x
);
1383 dv
= dv_from_value (x
);
1393 extern void debug_dv (decl_or_value dv
);
1396 debug_dv (decl_or_value dv
)
1398 if (dv_is_value_p (dv
))
1399 debug_rtx (dv_as_value (dv
));
1401 debug_generic_stmt (dv_as_decl (dv
));
1404 static void loc_exp_dep_clear (variable var
);
1406 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
1409 variable_htab_free (void *elem
)
1412 variable var
= (variable
) elem
;
1413 location_chain node
, next
;
1415 gcc_checking_assert (var
->refcount
> 0);
1418 if (var
->refcount
> 0)
1421 for (i
= 0; i
< var
->n_var_parts
; i
++)
1423 for (node
= var
->var_part
[i
].loc_chain
; node
; node
= next
)
1426 pool_free (loc_chain_pool
, node
);
1428 var
->var_part
[i
].loc_chain
= NULL
;
1430 if (var
->onepart
&& VAR_LOC_1PAUX (var
))
1432 loc_exp_dep_clear (var
);
1433 if (VAR_LOC_DEP_LST (var
))
1434 VAR_LOC_DEP_LST (var
)->pprev
= NULL
;
1435 XDELETE (VAR_LOC_1PAUX (var
));
1436 /* These may be reused across functions, so reset
1438 if (var
->onepart
== ONEPART_DEXPR
)
1439 set_dv_changed (var
->dv
, true);
1441 pool_free (onepart_pool (var
->onepart
), var
);
1444 /* Initialize the set (array) SET of attrs to empty lists. */
1447 init_attrs_list_set (attrs
*set
)
1451 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1455 /* Make the list *LISTP empty. */
1458 attrs_list_clear (attrs
*listp
)
1462 for (list
= *listp
; list
; list
= next
)
1465 pool_free (attrs_pool
, list
);
1470 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */
1473 attrs_list_member (attrs list
, decl_or_value dv
, HOST_WIDE_INT offset
)
1475 for (; list
; list
= list
->next
)
1476 if (dv_as_opaque (list
->dv
) == dv_as_opaque (dv
) && list
->offset
== offset
)
1481 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
1484 attrs_list_insert (attrs
*listp
, decl_or_value dv
,
1485 HOST_WIDE_INT offset
, rtx loc
)
1489 list
= (attrs
) pool_alloc (attrs_pool
);
1492 list
->offset
= offset
;
1493 list
->next
= *listp
;
1497 /* Copy all nodes from SRC and create a list *DSTP of the copies. */
1500 attrs_list_copy (attrs
*dstp
, attrs src
)
1504 attrs_list_clear (dstp
);
1505 for (; src
; src
= src
->next
)
1507 n
= (attrs
) pool_alloc (attrs_pool
);
1510 n
->offset
= src
->offset
;
1516 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */
1519 attrs_list_union (attrs
*dstp
, attrs src
)
1521 for (; src
; src
= src
->next
)
1523 if (!attrs_list_member (*dstp
, src
->dv
, src
->offset
))
1524 attrs_list_insert (dstp
, src
->dv
, src
->offset
, src
->loc
);
1528 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1532 attrs_list_mpdv_union (attrs
*dstp
, attrs src
, attrs src2
)
1534 gcc_assert (!*dstp
);
1535 for (; src
; src
= src
->next
)
1537 if (!dv_onepart_p (src
->dv
))
1538 attrs_list_insert (dstp
, src
->dv
, src
->offset
, src
->loc
);
1540 for (src
= src2
; src
; src
= src
->next
)
1542 if (!dv_onepart_p (src
->dv
)
1543 && !attrs_list_member (*dstp
, src
->dv
, src
->offset
))
1544 attrs_list_insert (dstp
, src
->dv
, src
->offset
, src
->loc
);
1548 /* Shared hashtable support. */
1550 /* Return true if VARS is shared. */
1553 shared_hash_shared (shared_hash vars
)
1555 return vars
->refcount
> 1;
1558 /* Return the hash table for VARS. */
1560 static inline variable_table_type
1561 shared_hash_htab (shared_hash vars
)
1566 /* Return true if VAR is shared, or maybe because VARS is shared. */
1569 shared_var_p (variable var
, shared_hash vars
)
1571 /* Don't count an entry in the changed_variables table as a duplicate. */
1572 return ((var
->refcount
> 1 + (int) var
->in_changed_variables
)
1573 || shared_hash_shared (vars
));
1576 /* Copy variables into a new hash table. */
1579 shared_hash_unshare (shared_hash vars
)
1581 shared_hash new_vars
= (shared_hash
) pool_alloc (shared_hash_pool
);
1582 gcc_assert (vars
->refcount
> 1);
1583 new_vars
->refcount
= 1;
1584 new_vars
->htab
.create (vars
->htab
.elements () + 3);
1585 vars_copy (new_vars
->htab
, vars
->htab
);
1590 /* Increment reference counter on VARS and return it. */
1592 static inline shared_hash
1593 shared_hash_copy (shared_hash vars
)
1599 /* Decrement reference counter and destroy hash table if not shared
1603 shared_hash_destroy (shared_hash vars
)
1605 gcc_checking_assert (vars
->refcount
> 0);
1606 if (--vars
->refcount
== 0)
1608 vars
->htab
.dispose ();
1609 pool_free (shared_hash_pool
, vars
);
1613 /* Unshare *PVARS if shared and return slot for DV. If INS is
1614 INSERT, insert it if not already present. */
1616 static inline variable_def
**
1617 shared_hash_find_slot_unshare_1 (shared_hash
*pvars
, decl_or_value dv
,
1618 hashval_t dvhash
, enum insert_option ins
)
1620 if (shared_hash_shared (*pvars
))
1621 *pvars
= shared_hash_unshare (*pvars
);
1622 return shared_hash_htab (*pvars
).find_slot_with_hash (dv
, dvhash
, ins
);
1625 static inline variable_def
**
1626 shared_hash_find_slot_unshare (shared_hash
*pvars
, decl_or_value dv
,
1627 enum insert_option ins
)
1629 return shared_hash_find_slot_unshare_1 (pvars
, dv
, dv_htab_hash (dv
), ins
);
1632 /* Return slot for DV, if it is already present in the hash table.
1633 If it is not present, insert it only VARS is not shared, otherwise
1636 static inline variable_def
**
1637 shared_hash_find_slot_1 (shared_hash vars
, decl_or_value dv
, hashval_t dvhash
)
1639 return shared_hash_htab (vars
).find_slot_with_hash (dv
, dvhash
,
1640 shared_hash_shared (vars
)
1641 ? NO_INSERT
: INSERT
);
1644 static inline variable_def
**
1645 shared_hash_find_slot (shared_hash vars
, decl_or_value dv
)
1647 return shared_hash_find_slot_1 (vars
, dv
, dv_htab_hash (dv
));
1650 /* Return slot for DV only if it is already present in the hash table. */
1652 static inline variable_def
**
1653 shared_hash_find_slot_noinsert_1 (shared_hash vars
, decl_or_value dv
,
1656 return shared_hash_htab (vars
).find_slot_with_hash (dv
, dvhash
, NO_INSERT
);
1659 static inline variable_def
**
1660 shared_hash_find_slot_noinsert (shared_hash vars
, decl_or_value dv
)
1662 return shared_hash_find_slot_noinsert_1 (vars
, dv
, dv_htab_hash (dv
));
1665 /* Return variable for DV or NULL if not already present in the hash
1668 static inline variable
1669 shared_hash_find_1 (shared_hash vars
, decl_or_value dv
, hashval_t dvhash
)
1671 return shared_hash_htab (vars
).find_with_hash (dv
, dvhash
);
1674 static inline variable
1675 shared_hash_find (shared_hash vars
, decl_or_value dv
)
1677 return shared_hash_find_1 (vars
, dv
, dv_htab_hash (dv
));
1680 /* Return true if TVAL is better than CVAL as a canonival value. We
1681 choose lowest-numbered VALUEs, using the RTX address as a
1682 tie-breaker. The idea is to arrange them into a star topology,
1683 such that all of them are at most one step away from the canonical
1684 value, and the canonical value has backlinks to all of them, in
1685 addition to all the actual locations. We don't enforce this
1686 topology throughout the entire dataflow analysis, though.
1690 canon_value_cmp (rtx tval
, rtx cval
)
1693 || CSELIB_VAL_PTR (tval
)->uid
< CSELIB_VAL_PTR (cval
)->uid
;
1696 static bool dst_can_be_shared
;
1698 /* Return a copy of a variable VAR and insert it to dataflow set SET. */
1700 static variable_def
**
1701 unshare_variable (dataflow_set
*set
, variable_def
**slot
, variable var
,
1702 enum var_init_status initialized
)
1707 new_var
= (variable
) pool_alloc (onepart_pool (var
->onepart
));
1708 new_var
->dv
= var
->dv
;
1709 new_var
->refcount
= 1;
1711 new_var
->n_var_parts
= var
->n_var_parts
;
1712 new_var
->onepart
= var
->onepart
;
1713 new_var
->in_changed_variables
= false;
1715 if (! flag_var_tracking_uninit
)
1716 initialized
= VAR_INIT_STATUS_INITIALIZED
;
1718 for (i
= 0; i
< var
->n_var_parts
; i
++)
1720 location_chain node
;
1721 location_chain
*nextp
;
1723 if (i
== 0 && var
->onepart
)
1725 /* One-part auxiliary data is only used while emitting
1726 notes, so propagate it to the new variable in the active
1727 dataflow set. If we're not emitting notes, this will be
1729 gcc_checking_assert (!VAR_LOC_1PAUX (var
) || emit_notes
);
1730 VAR_LOC_1PAUX (new_var
) = VAR_LOC_1PAUX (var
);
1731 VAR_LOC_1PAUX (var
) = NULL
;
1734 VAR_PART_OFFSET (new_var
, i
) = VAR_PART_OFFSET (var
, i
);
1735 nextp
= &new_var
->var_part
[i
].loc_chain
;
1736 for (node
= var
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
1738 location_chain new_lc
;
1740 new_lc
= (location_chain
) pool_alloc (loc_chain_pool
);
1741 new_lc
->next
= NULL
;
1742 if (node
->init
> initialized
)
1743 new_lc
->init
= node
->init
;
1745 new_lc
->init
= initialized
;
1746 if (node
->set_src
&& !(MEM_P (node
->set_src
)))
1747 new_lc
->set_src
= node
->set_src
;
1749 new_lc
->set_src
= NULL
;
1750 new_lc
->loc
= node
->loc
;
1753 nextp
= &new_lc
->next
;
1756 new_var
->var_part
[i
].cur_loc
= var
->var_part
[i
].cur_loc
;
1759 dst_can_be_shared
= false;
1760 if (shared_hash_shared (set
->vars
))
1761 slot
= shared_hash_find_slot_unshare (&set
->vars
, var
->dv
, NO_INSERT
);
1762 else if (set
->traversed_vars
&& set
->vars
!= set
->traversed_vars
)
1763 slot
= shared_hash_find_slot_noinsert (set
->vars
, var
->dv
);
1765 if (var
->in_changed_variables
)
1767 variable_def
**cslot
1768 = changed_variables
.find_slot_with_hash (var
->dv
,
1769 dv_htab_hash (var
->dv
), NO_INSERT
);
1770 gcc_assert (*cslot
== (void *) var
);
1771 var
->in_changed_variables
= false;
1772 variable_htab_free (var
);
1774 new_var
->in_changed_variables
= true;
1779 /* Copy all variables from hash table SRC to hash table DST. */
1782 vars_copy (variable_table_type dst
, variable_table_type src
)
1784 variable_iterator_type hi
;
1787 FOR_EACH_HASH_TABLE_ELEMENT (src
, var
, variable
, hi
)
1789 variable_def
**dstp
;
1791 dstp
= dst
.find_slot_with_hash (var
->dv
, dv_htab_hash (var
->dv
), INSERT
);
1796 /* Map a decl to its main debug decl. */
1799 var_debug_decl (tree decl
)
1801 if (decl
&& TREE_CODE (decl
) == VAR_DECL
1802 && DECL_HAS_DEBUG_EXPR_P (decl
))
1804 tree debugdecl
= DECL_DEBUG_EXPR (decl
);
1805 if (DECL_P (debugdecl
))
1812 /* Set the register LOC to contain DV, OFFSET. */
1815 var_reg_decl_set (dataflow_set
*set
, rtx loc
, enum var_init_status initialized
,
1816 decl_or_value dv
, HOST_WIDE_INT offset
, rtx set_src
,
1817 enum insert_option iopt
)
1820 bool decl_p
= dv_is_decl_p (dv
);
1823 dv
= dv_from_decl (var_debug_decl (dv_as_decl (dv
)));
1825 for (node
= set
->regs
[REGNO (loc
)]; node
; node
= node
->next
)
1826 if (dv_as_opaque (node
->dv
) == dv_as_opaque (dv
)
1827 && node
->offset
== offset
)
1830 attrs_list_insert (&set
->regs
[REGNO (loc
)], dv
, offset
, loc
);
1831 set_variable_part (set
, loc
, dv
, offset
, initialized
, set_src
, iopt
);
1834 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
1837 var_reg_set (dataflow_set
*set
, rtx loc
, enum var_init_status initialized
,
1840 tree decl
= REG_EXPR (loc
);
1841 HOST_WIDE_INT offset
= REG_OFFSET (loc
);
1843 var_reg_decl_set (set
, loc
, initialized
,
1844 dv_from_decl (decl
), offset
, set_src
, INSERT
);
1847 static enum var_init_status
1848 get_init_value (dataflow_set
*set
, rtx loc
, decl_or_value dv
)
1852 enum var_init_status ret_val
= VAR_INIT_STATUS_UNKNOWN
;
1854 if (! flag_var_tracking_uninit
)
1855 return VAR_INIT_STATUS_INITIALIZED
;
1857 var
= shared_hash_find (set
->vars
, dv
);
1860 for (i
= 0; i
< var
->n_var_parts
&& ret_val
== VAR_INIT_STATUS_UNKNOWN
; i
++)
1862 location_chain nextp
;
1863 for (nextp
= var
->var_part
[i
].loc_chain
; nextp
; nextp
= nextp
->next
)
1864 if (rtx_equal_p (nextp
->loc
, loc
))
1866 ret_val
= nextp
->init
;
1875 /* Delete current content of register LOC in dataflow set SET and set
1876 the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
1877 MODIFY is true, any other live copies of the same variable part are
1878 also deleted from the dataflow set, otherwise the variable part is
1879 assumed to be copied from another location holding the same
1883 var_reg_delete_and_set (dataflow_set
*set
, rtx loc
, bool modify
,
1884 enum var_init_status initialized
, rtx set_src
)
1886 tree decl
= REG_EXPR (loc
);
1887 HOST_WIDE_INT offset
= REG_OFFSET (loc
);
1891 decl
= var_debug_decl (decl
);
1893 if (initialized
== VAR_INIT_STATUS_UNKNOWN
)
1894 initialized
= get_init_value (set
, loc
, dv_from_decl (decl
));
1896 nextp
= &set
->regs
[REGNO (loc
)];
1897 for (node
= *nextp
; node
; node
= next
)
1900 if (dv_as_opaque (node
->dv
) != decl
|| node
->offset
!= offset
)
1902 delete_variable_part (set
, node
->loc
, node
->dv
, node
->offset
);
1903 pool_free (attrs_pool
, node
);
1909 nextp
= &node
->next
;
1913 clobber_variable_part (set
, loc
, dv_from_decl (decl
), offset
, set_src
);
1914 var_reg_set (set
, loc
, initialized
, set_src
);
1917 /* Delete the association of register LOC in dataflow set SET with any
1918 variables that aren't onepart. If CLOBBER is true, also delete any
1919 other live copies of the same variable part, and delete the
1920 association with onepart dvs too. */
1923 var_reg_delete (dataflow_set
*set
, rtx loc
, bool clobber
)
1925 attrs
*nextp
= &set
->regs
[REGNO (loc
)];
1930 tree decl
= REG_EXPR (loc
);
1931 HOST_WIDE_INT offset
= REG_OFFSET (loc
);
1933 decl
= var_debug_decl (decl
);
1935 clobber_variable_part (set
, NULL
, dv_from_decl (decl
), offset
, NULL
);
1938 for (node
= *nextp
; node
; node
= next
)
1941 if (clobber
|| !dv_onepart_p (node
->dv
))
1943 delete_variable_part (set
, node
->loc
, node
->dv
, node
->offset
);
1944 pool_free (attrs_pool
, node
);
1948 nextp
= &node
->next
;
1952 /* Delete content of register with number REGNO in dataflow set SET. */
1955 var_regno_delete (dataflow_set
*set
, int regno
)
1957 attrs
*reg
= &set
->regs
[regno
];
1960 for (node
= *reg
; node
; node
= next
)
1963 delete_variable_part (set
, node
->loc
, node
->dv
, node
->offset
);
1964 pool_free (attrs_pool
, node
);
1969 /* Return true if I is the negated value of a power of two. */
1971 negative_power_of_two_p (HOST_WIDE_INT i
)
1973 unsigned HOST_WIDE_INT x
= -(unsigned HOST_WIDE_INT
)i
;
1974 return x
== (x
& -x
);
1977 /* Strip constant offsets and alignments off of LOC. Return the base
1981 vt_get_canonicalize_base (rtx loc
)
1983 while ((GET_CODE (loc
) == PLUS
1984 || GET_CODE (loc
) == AND
)
1985 && GET_CODE (XEXP (loc
, 1)) == CONST_INT
1986 && (GET_CODE (loc
) != AND
1987 || negative_power_of_two_p (INTVAL (XEXP (loc
, 1)))))
1988 loc
= XEXP (loc
, 0);
1993 /* This caches canonicalized addresses for VALUEs, computed using
1994 information in the global cselib table. */
1995 static struct pointer_map_t
*global_get_addr_cache
;
1997 /* This caches canonicalized addresses for VALUEs, computed using
1998 information from the global cache and information pertaining to a
1999 basic block being analyzed. */
2000 static struct pointer_map_t
*local_get_addr_cache
;
2002 static rtx
vt_canonicalize_addr (dataflow_set
*, rtx
);
2004 /* Return the canonical address for LOC, that must be a VALUE, using a
2005 cached global equivalence or computing it and storing it in the
2009 get_addr_from_global_cache (rtx
const loc
)
2014 gcc_checking_assert (GET_CODE (loc
) == VALUE
);
2016 slot
= pointer_map_insert (global_get_addr_cache
, loc
);
2020 x
= canon_rtx (get_addr (loc
));
2022 /* Tentative, avoiding infinite recursion. */
2027 rtx nx
= vt_canonicalize_addr (NULL
, x
);
2030 /* The table may have moved during recursion, recompute
2032 slot
= pointer_map_contains (global_get_addr_cache
, loc
);
2040 /* Return the canonical address for LOC, that must be a VALUE, using a
2041 cached local equivalence or computing it and storing it in the
2045 get_addr_from_local_cache (dataflow_set
*set
, rtx
const loc
)
2053 gcc_checking_assert (GET_CODE (loc
) == VALUE
);
2055 slot
= pointer_map_insert (local_get_addr_cache
, loc
);
2059 x
= get_addr_from_global_cache (loc
);
2061 /* Tentative, avoiding infinite recursion. */
2064 /* Recurse to cache local expansion of X, or if we need to search
2065 for a VALUE in the expansion. */
2068 rtx nx
= vt_canonicalize_addr (set
, x
);
2071 slot
= pointer_map_contains (local_get_addr_cache
, loc
);
2077 dv
= dv_from_rtx (x
);
2078 var
= shared_hash_find (set
->vars
, dv
);
2082 /* Look for an improved equivalent expression. */
2083 for (l
= var
->var_part
[0].loc_chain
; l
; l
= l
->next
)
2085 rtx base
= vt_get_canonicalize_base (l
->loc
);
2086 if (GET_CODE (base
) == VALUE
2087 && canon_value_cmp (base
, loc
))
2089 rtx nx
= vt_canonicalize_addr (set
, l
->loc
);
2092 slot
= pointer_map_contains (local_get_addr_cache
, loc
);
2102 /* Canonicalize LOC using equivalences from SET in addition to those
2103 in the cselib static table. It expects a VALUE-based expression,
2104 and it will only substitute VALUEs with other VALUEs or
2105 function-global equivalences, so that, if two addresses have base
2106 VALUEs that are locally or globally related in ways that
2107 memrefs_conflict_p cares about, they will both canonicalize to
2108 expressions that have the same base VALUE.
2110 The use of VALUEs as canonical base addresses enables the canonical
2111 RTXs to remain unchanged globally, if they resolve to a constant,
2112 or throughout a basic block otherwise, so that they can be cached
2113 and the cache needs not be invalidated when REGs, MEMs or such
2117 vt_canonicalize_addr (dataflow_set
*set
, rtx oloc
)
2119 HOST_WIDE_INT ofst
= 0;
2120 enum machine_mode mode
= GET_MODE (oloc
);
2127 while (GET_CODE (loc
) == PLUS
2128 && GET_CODE (XEXP (loc
, 1)) == CONST_INT
)
2130 ofst
+= INTVAL (XEXP (loc
, 1));
2131 loc
= XEXP (loc
, 0);
2134 /* Alignment operations can't normally be combined, so just
2135 canonicalize the base and we're done. We'll normally have
2136 only one stack alignment anyway. */
2137 if (GET_CODE (loc
) == AND
2138 && GET_CODE (XEXP (loc
, 1)) == CONST_INT
2139 && negative_power_of_two_p (INTVAL (XEXP (loc
, 1))))
2141 x
= vt_canonicalize_addr (set
, XEXP (loc
, 0));
2142 if (x
!= XEXP (loc
, 0))
2143 loc
= gen_rtx_AND (mode
, x
, XEXP (loc
, 1));
2147 if (GET_CODE (loc
) == VALUE
)
2150 loc
= get_addr_from_local_cache (set
, loc
);
2152 loc
= get_addr_from_global_cache (loc
);
2154 /* Consolidate plus_constants. */
2155 while (ofst
&& GET_CODE (loc
) == PLUS
2156 && GET_CODE (XEXP (loc
, 1)) == CONST_INT
)
2158 ofst
+= INTVAL (XEXP (loc
, 1));
2159 loc
= XEXP (loc
, 0);
2166 x
= canon_rtx (loc
);
2173 /* Add OFST back in. */
2176 /* Don't build new RTL if we can help it. */
2177 if (GET_CODE (oloc
) == PLUS
2178 && XEXP (oloc
, 0) == loc
2179 && INTVAL (XEXP (oloc
, 1)) == ofst
)
2182 loc
= plus_constant (mode
, loc
, ofst
);
2188 /* Return true iff there's a true dependence between MLOC and LOC.
2189 MADDR must be a canonicalized version of MLOC's address. */
2192 vt_canon_true_dep (dataflow_set
*set
, rtx mloc
, rtx maddr
, rtx loc
)
2194 if (GET_CODE (loc
) != MEM
)
2197 rtx addr
= vt_canonicalize_addr (set
, XEXP (loc
, 0));
2198 if (!canon_true_dependence (mloc
, GET_MODE (mloc
), maddr
, loc
, addr
))
2204 /* Hold parameters for the hashtab traversal function
2205 drop_overlapping_mem_locs, see below. */
2207 struct overlapping_mems
2213 /* Remove all MEMs that overlap with COMS->LOC from the location list
2214 of a hash table entry for a value. COMS->ADDR must be a
2215 canonicalized form of COMS->LOC's address, and COMS->LOC must be
2216 canonicalized itself. */
2219 drop_overlapping_mem_locs (variable_def
**slot
, overlapping_mems
*coms
)
2221 dataflow_set
*set
= coms
->set
;
2222 rtx mloc
= coms
->loc
, addr
= coms
->addr
;
2223 variable var
= *slot
;
2225 if (var
->onepart
== ONEPART_VALUE
)
2227 location_chain loc
, *locp
;
2228 bool changed
= false;
2231 gcc_assert (var
->n_var_parts
== 1);
2233 if (shared_var_p (var
, set
->vars
))
2235 for (loc
= var
->var_part
[0].loc_chain
; loc
; loc
= loc
->next
)
2236 if (vt_canon_true_dep (set
, mloc
, addr
, loc
->loc
))
2242 slot
= unshare_variable (set
, slot
, var
, VAR_INIT_STATUS_UNKNOWN
);
2244 gcc_assert (var
->n_var_parts
== 1);
2247 if (VAR_LOC_1PAUX (var
))
2248 cur_loc
= VAR_LOC_FROM (var
);
2250 cur_loc
= var
->var_part
[0].cur_loc
;
2252 for (locp
= &var
->var_part
[0].loc_chain
, loc
= *locp
;
2255 if (!vt_canon_true_dep (set
, mloc
, addr
, loc
->loc
))
2262 /* If we have deleted the location which was last emitted
2263 we have to emit new location so add the variable to set
2264 of changed variables. */
2265 if (cur_loc
== loc
->loc
)
2268 var
->var_part
[0].cur_loc
= NULL
;
2269 if (VAR_LOC_1PAUX (var
))
2270 VAR_LOC_FROM (var
) = NULL
;
2272 pool_free (loc_chain_pool
, loc
);
2275 if (!var
->var_part
[0].loc_chain
)
2281 variable_was_changed (var
, set
);
2287 /* Remove from SET all VALUE bindings to MEMs that overlap with LOC. */
2290 clobber_overlapping_mems (dataflow_set
*set
, rtx loc
)
2292 struct overlapping_mems coms
;
2294 gcc_checking_assert (GET_CODE (loc
) == MEM
);
2297 coms
.loc
= canon_rtx (loc
);
2298 coms
.addr
= vt_canonicalize_addr (set
, XEXP (loc
, 0));
2300 set
->traversed_vars
= set
->vars
;
2301 shared_hash_htab (set
->vars
)
2302 .traverse
<overlapping_mems
*, drop_overlapping_mem_locs
> (&coms
);
2303 set
->traversed_vars
= NULL
;
2306 /* Set the location of DV, OFFSET as the MEM LOC. */
2309 var_mem_decl_set (dataflow_set
*set
, rtx loc
, enum var_init_status initialized
,
2310 decl_or_value dv
, HOST_WIDE_INT offset
, rtx set_src
,
2311 enum insert_option iopt
)
2313 if (dv_is_decl_p (dv
))
2314 dv
= dv_from_decl (var_debug_decl (dv_as_decl (dv
)));
2316 set_variable_part (set
, loc
, dv
, offset
, initialized
, set_src
, iopt
);
2319 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
2321 Adjust the address first if it is stack pointer based. */
2324 var_mem_set (dataflow_set
*set
, rtx loc
, enum var_init_status initialized
,
2327 tree decl
= MEM_EXPR (loc
);
2328 HOST_WIDE_INT offset
= INT_MEM_OFFSET (loc
);
2330 var_mem_decl_set (set
, loc
, initialized
,
2331 dv_from_decl (decl
), offset
, set_src
, INSERT
);
2334 /* Delete and set the location part of variable MEM_EXPR (LOC) in
2335 dataflow set SET to LOC. If MODIFY is true, any other live copies
2336 of the same variable part are also deleted from the dataflow set,
2337 otherwise the variable part is assumed to be copied from another
2338 location holding the same part.
2339 Adjust the address first if it is stack pointer based. */
2342 var_mem_delete_and_set (dataflow_set
*set
, rtx loc
, bool modify
,
2343 enum var_init_status initialized
, rtx set_src
)
2345 tree decl
= MEM_EXPR (loc
);
2346 HOST_WIDE_INT offset
= INT_MEM_OFFSET (loc
);
2348 clobber_overlapping_mems (set
, loc
);
2349 decl
= var_debug_decl (decl
);
2351 if (initialized
== VAR_INIT_STATUS_UNKNOWN
)
2352 initialized
= get_init_value (set
, loc
, dv_from_decl (decl
));
2355 clobber_variable_part (set
, NULL
, dv_from_decl (decl
), offset
, set_src
);
2356 var_mem_set (set
, loc
, initialized
, set_src
);
2359 /* Delete the location part LOC from dataflow set SET. If CLOBBER is
2360 true, also delete any other live copies of the same variable part.
2361 Adjust the address first if it is stack pointer based. */
2364 var_mem_delete (dataflow_set
*set
, rtx loc
, bool clobber
)
2366 tree decl
= MEM_EXPR (loc
);
2367 HOST_WIDE_INT offset
= INT_MEM_OFFSET (loc
);
2369 clobber_overlapping_mems (set
, loc
);
2370 decl
= var_debug_decl (decl
);
2372 clobber_variable_part (set
, NULL
, dv_from_decl (decl
), offset
, NULL
);
2373 delete_variable_part (set
, loc
, dv_from_decl (decl
), offset
);
2376 /* Return true if LOC should not be expanded for location expressions,
2380 unsuitable_loc (rtx loc
)
2382 switch (GET_CODE (loc
))
2396 /* Bind VAL to LOC in SET. If MODIFIED, detach LOC from any values
2400 val_bind (dataflow_set
*set
, rtx val
, rtx loc
, bool modified
)
2405 var_regno_delete (set
, REGNO (loc
));
2406 var_reg_decl_set (set
, loc
, VAR_INIT_STATUS_INITIALIZED
,
2407 dv_from_value (val
), 0, NULL_RTX
, INSERT
);
2409 else if (MEM_P (loc
))
2411 struct elt_loc_list
*l
= CSELIB_VAL_PTR (val
)->locs
;
2414 clobber_overlapping_mems (set
, loc
);
2416 if (l
&& GET_CODE (l
->loc
) == VALUE
)
2417 l
= canonical_cselib_val (CSELIB_VAL_PTR (l
->loc
))->locs
;
2419 /* If this MEM is a global constant, we don't need it in the
2420 dynamic tables. ??? We should test this before emitting the
2421 micro-op in the first place. */
2423 if (GET_CODE (l
->loc
) == MEM
&& XEXP (l
->loc
, 0) == XEXP (loc
, 0))
2429 var_mem_decl_set (set
, loc
, VAR_INIT_STATUS_INITIALIZED
,
2430 dv_from_value (val
), 0, NULL_RTX
, INSERT
);
2434 /* Other kinds of equivalences are necessarily static, at least
2435 so long as we do not perform substitutions while merging
2438 set_variable_part (set
, loc
, dv_from_value (val
), 0,
2439 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
, INSERT
);
2443 /* Bind a value to a location it was just stored in. If MODIFIED
2444 holds, assume the location was modified, detaching it from any
2445 values bound to it. */
2448 val_store (dataflow_set
*set
, rtx val
, rtx loc
, rtx insn
, bool modified
)
2450 cselib_val
*v
= CSELIB_VAL_PTR (val
);
2452 gcc_assert (cselib_preserved_value_p (v
));
2456 fprintf (dump_file
, "%i: ", insn
? INSN_UID (insn
) : 0);
2457 print_inline_rtx (dump_file
, loc
, 0);
2458 fprintf (dump_file
, " evaluates to ");
2459 print_inline_rtx (dump_file
, val
, 0);
2462 struct elt_loc_list
*l
;
2463 for (l
= v
->locs
; l
; l
= l
->next
)
2465 fprintf (dump_file
, "\n%i: ", INSN_UID (l
->setting_insn
));
2466 print_inline_rtx (dump_file
, l
->loc
, 0);
2469 fprintf (dump_file
, "\n");
2472 gcc_checking_assert (!unsuitable_loc (loc
));
2474 val_bind (set
, val
, loc
, modified
);
2477 /* Clear (canonical address) slots that reference X. */
2480 local_get_addr_clear_given_value (const void *v ATTRIBUTE_UNUSED
,
2481 void **slot
, void *x
)
2483 if (vt_get_canonicalize_base ((rtx
)*slot
) == x
)
2488 /* Reset this node, detaching all its equivalences. Return the slot
2489 in the variable hash table that holds dv, if there is one. */
2492 val_reset (dataflow_set
*set
, decl_or_value dv
)
2494 variable var
= shared_hash_find (set
->vars
, dv
) ;
2495 location_chain node
;
2498 if (!var
|| !var
->n_var_parts
)
2501 gcc_assert (var
->n_var_parts
== 1);
2503 if (var
->onepart
== ONEPART_VALUE
)
2505 rtx x
= dv_as_value (dv
);
2508 /* Relationships in the global cache don't change, so reset the
2509 local cache entry only. */
2510 slot
= pointer_map_contains (local_get_addr_cache
, x
);
2513 /* If the value resolved back to itself, odds are that other
2514 values may have cached it too. These entries now refer
2515 to the old X, so detach them too. Entries that used the
2516 old X but resolved to something else remain ok as long as
2517 that something else isn't also reset. */
2519 pointer_map_traverse (local_get_addr_cache
,
2520 local_get_addr_clear_given_value
, x
);
2526 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
2527 if (GET_CODE (node
->loc
) == VALUE
2528 && canon_value_cmp (node
->loc
, cval
))
2531 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
2532 if (GET_CODE (node
->loc
) == VALUE
&& cval
!= node
->loc
)
2534 /* Redirect the equivalence link to the new canonical
2535 value, or simply remove it if it would point at
2538 set_variable_part (set
, cval
, dv_from_value (node
->loc
),
2539 0, node
->init
, node
->set_src
, NO_INSERT
);
2540 delete_variable_part (set
, dv_as_value (dv
),
2541 dv_from_value (node
->loc
), 0);
2546 decl_or_value cdv
= dv_from_value (cval
);
2548 /* Keep the remaining values connected, accummulating links
2549 in the canonical value. */
2550 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
2552 if (node
->loc
== cval
)
2554 else if (GET_CODE (node
->loc
) == REG
)
2555 var_reg_decl_set (set
, node
->loc
, node
->init
, cdv
, 0,
2556 node
->set_src
, NO_INSERT
);
2557 else if (GET_CODE (node
->loc
) == MEM
)
2558 var_mem_decl_set (set
, node
->loc
, node
->init
, cdv
, 0,
2559 node
->set_src
, NO_INSERT
);
2561 set_variable_part (set
, node
->loc
, cdv
, 0,
2562 node
->init
, node
->set_src
, NO_INSERT
);
2566 /* We remove this last, to make sure that the canonical value is not
2567 removed to the point of requiring reinsertion. */
2569 delete_variable_part (set
, dv_as_value (dv
), dv_from_value (cval
), 0);
2571 clobber_variable_part (set
, NULL
, dv
, 0, NULL
);
2574 /* Find the values in a given location and map the val to another
2575 value, if it is unique, or add the location as one holding the
2579 val_resolve (dataflow_set
*set
, rtx val
, rtx loc
, rtx insn
)
2581 decl_or_value dv
= dv_from_value (val
);
2583 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2586 fprintf (dump_file
, "%i: ", INSN_UID (insn
));
2588 fprintf (dump_file
, "head: ");
2589 print_inline_rtx (dump_file
, val
, 0);
2590 fputs (" is at ", dump_file
);
2591 print_inline_rtx (dump_file
, loc
, 0);
2592 fputc ('\n', dump_file
);
2595 val_reset (set
, dv
);
2597 gcc_checking_assert (!unsuitable_loc (loc
));
2601 attrs node
, found
= NULL
;
2603 for (node
= set
->regs
[REGNO (loc
)]; node
; node
= node
->next
)
2604 if (dv_is_value_p (node
->dv
)
2605 && GET_MODE (dv_as_value (node
->dv
)) == GET_MODE (loc
))
2609 /* Map incoming equivalences. ??? Wouldn't it be nice if
2610 we just started sharing the location lists? Maybe a
2611 circular list ending at the value itself or some
2613 set_variable_part (set
, dv_as_value (node
->dv
),
2614 dv_from_value (val
), node
->offset
,
2615 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
, INSERT
);
2616 set_variable_part (set
, val
, node
->dv
, node
->offset
,
2617 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
, INSERT
);
2620 /* If we didn't find any equivalence, we need to remember that
2621 this value is held in the named register. */
2625 /* ??? Attempt to find and merge equivalent MEMs or other
2628 val_bind (set
, val
, loc
, false);
2631 /* Initialize dataflow set SET to be empty.
2632 VARS_SIZE is the initial size of hash table VARS. */
2635 dataflow_set_init (dataflow_set
*set
)
2637 init_attrs_list_set (set
->regs
);
2638 set
->vars
= shared_hash_copy (empty_shared_hash
);
2639 set
->stack_adjust
= 0;
2640 set
->traversed_vars
= NULL
;
2643 /* Delete the contents of dataflow set SET. */
2646 dataflow_set_clear (dataflow_set
*set
)
2650 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2651 attrs_list_clear (&set
->regs
[i
]);
2653 shared_hash_destroy (set
->vars
);
2654 set
->vars
= shared_hash_copy (empty_shared_hash
);
2657 /* Copy the contents of dataflow set SRC to DST. */
2660 dataflow_set_copy (dataflow_set
*dst
, dataflow_set
*src
)
2664 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2665 attrs_list_copy (&dst
->regs
[i
], src
->regs
[i
]);
2667 shared_hash_destroy (dst
->vars
);
2668 dst
->vars
= shared_hash_copy (src
->vars
);
2669 dst
->stack_adjust
= src
->stack_adjust
;
2672 /* Information for merging lists of locations for a given offset of variable.
2674 struct variable_union_info
2676 /* Node of the location chain. */
2679 /* The sum of positions in the input chains. */
2682 /* The position in the chain of DST dataflow set. */
2686 /* Buffer for location list sorting and its allocated size. */
2687 static struct variable_union_info
*vui_vec
;
2688 static int vui_allocated
;
2690 /* Compare function for qsort, order the structures by POS element. */
2693 variable_union_info_cmp_pos (const void *n1
, const void *n2
)
2695 const struct variable_union_info
*const i1
=
2696 (const struct variable_union_info
*) n1
;
2697 const struct variable_union_info
*const i2
=
2698 ( const struct variable_union_info
*) n2
;
2700 if (i1
->pos
!= i2
->pos
)
2701 return i1
->pos
- i2
->pos
;
2703 return (i1
->pos_dst
- i2
->pos_dst
);
2706 /* Compute union of location parts of variable *SLOT and the same variable
2707 from hash table DATA. Compute "sorted" union of the location chains
2708 for common offsets, i.e. the locations of a variable part are sorted by
2709 a priority where the priority is the sum of the positions in the 2 chains
2710 (if a location is only in one list the position in the second list is
2711 defined to be larger than the length of the chains).
2712 When we are updating the location parts the newest location is in the
2713 beginning of the chain, so when we do the described "sorted" union
2714 we keep the newest locations in the beginning. */
2717 variable_union (variable src
, dataflow_set
*set
)
2720 variable_def
**dstp
;
2723 dstp
= shared_hash_find_slot (set
->vars
, src
->dv
);
2724 if (!dstp
|| !*dstp
)
2728 dst_can_be_shared
= false;
2730 dstp
= shared_hash_find_slot_unshare (&set
->vars
, src
->dv
, INSERT
);
2734 /* Continue traversing the hash table. */
2740 gcc_assert (src
->n_var_parts
);
2741 gcc_checking_assert (src
->onepart
== dst
->onepart
);
2743 /* We can combine one-part variables very efficiently, because their
2744 entries are in canonical order. */
2747 location_chain
*nodep
, dnode
, snode
;
2749 gcc_assert (src
->n_var_parts
== 1
2750 && dst
->n_var_parts
== 1);
2752 snode
= src
->var_part
[0].loc_chain
;
2755 restart_onepart_unshared
:
2756 nodep
= &dst
->var_part
[0].loc_chain
;
2762 int r
= dnode
? loc_cmp (dnode
->loc
, snode
->loc
) : 1;
2766 location_chain nnode
;
2768 if (shared_var_p (dst
, set
->vars
))
2770 dstp
= unshare_variable (set
, dstp
, dst
,
2771 VAR_INIT_STATUS_INITIALIZED
);
2773 goto restart_onepart_unshared
;
2776 *nodep
= nnode
= (location_chain
) pool_alloc (loc_chain_pool
);
2777 nnode
->loc
= snode
->loc
;
2778 nnode
->init
= snode
->init
;
2779 if (!snode
->set_src
|| MEM_P (snode
->set_src
))
2780 nnode
->set_src
= NULL
;
2782 nnode
->set_src
= snode
->set_src
;
2783 nnode
->next
= dnode
;
2787 gcc_checking_assert (rtx_equal_p (dnode
->loc
, snode
->loc
));
2790 snode
= snode
->next
;
2792 nodep
= &dnode
->next
;
2799 gcc_checking_assert (!src
->onepart
);
2801 /* Count the number of location parts, result is K. */
2802 for (i
= 0, j
= 0, k
= 0;
2803 i
< src
->n_var_parts
&& j
< dst
->n_var_parts
; k
++)
2805 if (VAR_PART_OFFSET (src
, i
) == VAR_PART_OFFSET (dst
, j
))
2810 else if (VAR_PART_OFFSET (src
, i
) < VAR_PART_OFFSET (dst
, j
))
2815 k
+= src
->n_var_parts
- i
;
2816 k
+= dst
->n_var_parts
- j
;
2818 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
2819 thus there are at most MAX_VAR_PARTS different offsets. */
2820 gcc_checking_assert (dst
->onepart
? k
== 1 : k
<= MAX_VAR_PARTS
);
2822 if (dst
->n_var_parts
!= k
&& shared_var_p (dst
, set
->vars
))
2824 dstp
= unshare_variable (set
, dstp
, dst
, VAR_INIT_STATUS_UNKNOWN
);
2828 i
= src
->n_var_parts
- 1;
2829 j
= dst
->n_var_parts
- 1;
2830 dst
->n_var_parts
= k
;
2832 for (k
--; k
>= 0; k
--)
2834 location_chain node
, node2
;
2836 if (i
>= 0 && j
>= 0
2837 && VAR_PART_OFFSET (src
, i
) == VAR_PART_OFFSET (dst
, j
))
2839 /* Compute the "sorted" union of the chains, i.e. the locations which
2840 are in both chains go first, they are sorted by the sum of
2841 positions in the chains. */
2844 struct variable_union_info
*vui
;
2846 /* If DST is shared compare the location chains.
2847 If they are different we will modify the chain in DST with
2848 high probability so make a copy of DST. */
2849 if (shared_var_p (dst
, set
->vars
))
2851 for (node
= src
->var_part
[i
].loc_chain
,
2852 node2
= dst
->var_part
[j
].loc_chain
; node
&& node2
;
2853 node
= node
->next
, node2
= node2
->next
)
2855 if (!((REG_P (node2
->loc
)
2856 && REG_P (node
->loc
)
2857 && REGNO (node2
->loc
) == REGNO (node
->loc
))
2858 || rtx_equal_p (node2
->loc
, node
->loc
)))
2860 if (node2
->init
< node
->init
)
2861 node2
->init
= node
->init
;
2867 dstp
= unshare_variable (set
, dstp
, dst
,
2868 VAR_INIT_STATUS_UNKNOWN
);
2869 dst
= (variable
)*dstp
;
2874 for (node
= src
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
2877 for (node
= dst
->var_part
[j
].loc_chain
; node
; node
= node
->next
)
2882 /* The most common case, much simpler, no qsort is needed. */
2883 location_chain dstnode
= dst
->var_part
[j
].loc_chain
;
2884 dst
->var_part
[k
].loc_chain
= dstnode
;
2885 VAR_PART_OFFSET (dst
, k
) = VAR_PART_OFFSET (dst
, j
);
2887 for (node
= src
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
2888 if (!((REG_P (dstnode
->loc
)
2889 && REG_P (node
->loc
)
2890 && REGNO (dstnode
->loc
) == REGNO (node
->loc
))
2891 || rtx_equal_p (dstnode
->loc
, node
->loc
)))
2893 location_chain new_node
;
2895 /* Copy the location from SRC. */
2896 new_node
= (location_chain
) pool_alloc (loc_chain_pool
);
2897 new_node
->loc
= node
->loc
;
2898 new_node
->init
= node
->init
;
2899 if (!node
->set_src
|| MEM_P (node
->set_src
))
2900 new_node
->set_src
= NULL
;
2902 new_node
->set_src
= node
->set_src
;
2903 node2
->next
= new_node
;
2910 if (src_l
+ dst_l
> vui_allocated
)
2912 vui_allocated
= MAX (vui_allocated
* 2, src_l
+ dst_l
);
2913 vui_vec
= XRESIZEVEC (struct variable_union_info
, vui_vec
,
2918 /* Fill in the locations from DST. */
2919 for (node
= dst
->var_part
[j
].loc_chain
, jj
= 0; node
;
2920 node
= node
->next
, jj
++)
2923 vui
[jj
].pos_dst
= jj
;
2925 /* Pos plus value larger than a sum of 2 valid positions. */
2926 vui
[jj
].pos
= jj
+ src_l
+ dst_l
;
2929 /* Fill in the locations from SRC. */
2931 for (node
= src
->var_part
[i
].loc_chain
, ii
= 0; node
;
2932 node
= node
->next
, ii
++)
2934 /* Find location from NODE. */
2935 for (jj
= 0; jj
< dst_l
; jj
++)
2937 if ((REG_P (vui
[jj
].lc
->loc
)
2938 && REG_P (node
->loc
)
2939 && REGNO (vui
[jj
].lc
->loc
) == REGNO (node
->loc
))
2940 || rtx_equal_p (vui
[jj
].lc
->loc
, node
->loc
))
2942 vui
[jj
].pos
= jj
+ ii
;
2946 if (jj
>= dst_l
) /* The location has not been found. */
2948 location_chain new_node
;
2950 /* Copy the location from SRC. */
2951 new_node
= (location_chain
) pool_alloc (loc_chain_pool
);
2952 new_node
->loc
= node
->loc
;
2953 new_node
->init
= node
->init
;
2954 if (!node
->set_src
|| MEM_P (node
->set_src
))
2955 new_node
->set_src
= NULL
;
2957 new_node
->set_src
= node
->set_src
;
2958 vui
[n
].lc
= new_node
;
2959 vui
[n
].pos_dst
= src_l
+ dst_l
;
2960 vui
[n
].pos
= ii
+ src_l
+ dst_l
;
2967 /* Special case still very common case. For dst_l == 2
2968 all entries dst_l ... n-1 are sorted, with for i >= dst_l
2969 vui[i].pos == i + src_l + dst_l. */
2970 if (vui
[0].pos
> vui
[1].pos
)
2972 /* Order should be 1, 0, 2... */
2973 dst
->var_part
[k
].loc_chain
= vui
[1].lc
;
2974 vui
[1].lc
->next
= vui
[0].lc
;
2977 vui
[0].lc
->next
= vui
[2].lc
;
2978 vui
[n
- 1].lc
->next
= NULL
;
2981 vui
[0].lc
->next
= NULL
;
2986 dst
->var_part
[k
].loc_chain
= vui
[0].lc
;
2987 if (n
>= 3 && vui
[2].pos
< vui
[1].pos
)
2989 /* Order should be 0, 2, 1, 3... */
2990 vui
[0].lc
->next
= vui
[2].lc
;
2991 vui
[2].lc
->next
= vui
[1].lc
;
2994 vui
[1].lc
->next
= vui
[3].lc
;
2995 vui
[n
- 1].lc
->next
= NULL
;
2998 vui
[1].lc
->next
= NULL
;
3003 /* Order should be 0, 1, 2... */
3005 vui
[n
- 1].lc
->next
= NULL
;
3008 for (; ii
< n
; ii
++)
3009 vui
[ii
- 1].lc
->next
= vui
[ii
].lc
;
3013 qsort (vui
, n
, sizeof (struct variable_union_info
),
3014 variable_union_info_cmp_pos
);
3016 /* Reconnect the nodes in sorted order. */
3017 for (ii
= 1; ii
< n
; ii
++)
3018 vui
[ii
- 1].lc
->next
= vui
[ii
].lc
;
3019 vui
[n
- 1].lc
->next
= NULL
;
3020 dst
->var_part
[k
].loc_chain
= vui
[0].lc
;
3023 VAR_PART_OFFSET (dst
, k
) = VAR_PART_OFFSET (dst
, j
);
3028 else if ((i
>= 0 && j
>= 0
3029 && VAR_PART_OFFSET (src
, i
) < VAR_PART_OFFSET (dst
, j
))
3032 dst
->var_part
[k
] = dst
->var_part
[j
];
3035 else if ((i
>= 0 && j
>= 0
3036 && VAR_PART_OFFSET (src
, i
) > VAR_PART_OFFSET (dst
, j
))
3039 location_chain
*nextp
;
3041 /* Copy the chain from SRC. */
3042 nextp
= &dst
->var_part
[k
].loc_chain
;
3043 for (node
= src
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
3045 location_chain new_lc
;
3047 new_lc
= (location_chain
) pool_alloc (loc_chain_pool
);
3048 new_lc
->next
= NULL
;
3049 new_lc
->init
= node
->init
;
3050 if (!node
->set_src
|| MEM_P (node
->set_src
))
3051 new_lc
->set_src
= NULL
;
3053 new_lc
->set_src
= node
->set_src
;
3054 new_lc
->loc
= node
->loc
;
3057 nextp
= &new_lc
->next
;
3060 VAR_PART_OFFSET (dst
, k
) = VAR_PART_OFFSET (src
, i
);
3063 dst
->var_part
[k
].cur_loc
= NULL
;
3066 if (flag_var_tracking_uninit
)
3067 for (i
= 0; i
< src
->n_var_parts
&& i
< dst
->n_var_parts
; i
++)
3069 location_chain node
, node2
;
3070 for (node
= src
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
3071 for (node2
= dst
->var_part
[i
].loc_chain
; node2
; node2
= node2
->next
)
3072 if (rtx_equal_p (node
->loc
, node2
->loc
))
3074 if (node
->init
> node2
->init
)
3075 node2
->init
= node
->init
;
3079 /* Continue traversing the hash table. */
3083 /* Compute union of dataflow sets SRC and DST and store it to DST. */
3086 dataflow_set_union (dataflow_set
*dst
, dataflow_set
*src
)
3090 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3091 attrs_list_union (&dst
->regs
[i
], src
->regs
[i
]);
3093 if (dst
->vars
== empty_shared_hash
)
3095 shared_hash_destroy (dst
->vars
);
3096 dst
->vars
= shared_hash_copy (src
->vars
);
3100 variable_iterator_type hi
;
3103 FOR_EACH_HASH_TABLE_ELEMENT (shared_hash_htab (src
->vars
),
3105 variable_union (var
, dst
);
3109 /* Whether the value is currently being expanded. */
3110 #define VALUE_RECURSED_INTO(x) \
3111 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
3113 /* Whether no expansion was found, saving useless lookups.
3114 It must only be set when VALUE_CHANGED is clear. */
3115 #define NO_LOC_P(x) \
3116 (RTL_FLAG_CHECK2 ("NO_LOC_P", (x), VALUE, DEBUG_EXPR)->return_val)
3118 /* Whether cur_loc in the value needs to be (re)computed. */
3119 #define VALUE_CHANGED(x) \
3120 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
3121 /* Whether cur_loc in the decl needs to be (re)computed. */
3122 #define DECL_CHANGED(x) TREE_VISITED (x)
3124 /* Record (if NEWV) that DV needs to have its cur_loc recomputed. For
3125 user DECLs, this means they're in changed_variables. Values and
3126 debug exprs may be left with this flag set if no user variable
3127 requires them to be evaluated. */
3130 set_dv_changed (decl_or_value dv
, bool newv
)
3132 switch (dv_onepart_p (dv
))
3136 NO_LOC_P (dv_as_value (dv
)) = false;
3137 VALUE_CHANGED (dv_as_value (dv
)) = newv
;
3142 NO_LOC_P (DECL_RTL_KNOWN_SET (dv_as_decl (dv
))) = false;
3143 /* Fall through... */
3146 DECL_CHANGED (dv_as_decl (dv
)) = newv
;
3151 /* Return true if DV needs to have its cur_loc recomputed. */
3154 dv_changed_p (decl_or_value dv
)
3156 return (dv_is_value_p (dv
)
3157 ? VALUE_CHANGED (dv_as_value (dv
))
3158 : DECL_CHANGED (dv_as_decl (dv
)));
3161 /* Return a location list node whose loc is rtx_equal to LOC, in the
3162 location list of a one-part variable or value VAR, or in that of
3163 any values recursively mentioned in the location lists. VARS must
3164 be in star-canonical form. */
3166 static location_chain
3167 find_loc_in_1pdv (rtx loc
, variable var
, variable_table_type vars
)
3169 location_chain node
;
3170 enum rtx_code loc_code
;
3175 gcc_checking_assert (var
->onepart
);
3177 if (!var
->n_var_parts
)
3180 gcc_checking_assert (loc
!= dv_as_opaque (var
->dv
));
3182 loc_code
= GET_CODE (loc
);
3183 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
3188 if (GET_CODE (node
->loc
) != loc_code
)
3190 if (GET_CODE (node
->loc
) != VALUE
)
3193 else if (loc
== node
->loc
)
3195 else if (loc_code
!= VALUE
)
3197 if (rtx_equal_p (loc
, node
->loc
))
3202 /* Since we're in star-canonical form, we don't need to visit
3203 non-canonical nodes: one-part variables and non-canonical
3204 values would only point back to the canonical node. */
3205 if (dv_is_value_p (var
->dv
)
3206 && !canon_value_cmp (node
->loc
, dv_as_value (var
->dv
)))
3208 /* Skip all subsequent VALUEs. */
3209 while (node
->next
&& GET_CODE (node
->next
->loc
) == VALUE
)
3212 gcc_checking_assert (!canon_value_cmp (node
->loc
,
3213 dv_as_value (var
->dv
)));
3214 if (loc
== node
->loc
)
3220 gcc_checking_assert (node
== var
->var_part
[0].loc_chain
);
3221 gcc_checking_assert (!node
->next
);
3223 dv
= dv_from_value (node
->loc
);
3224 rvar
= vars
.find_with_hash (dv
, dv_htab_hash (dv
));
3225 return find_loc_in_1pdv (loc
, rvar
, vars
);
3228 /* ??? Gotta look in cselib_val locations too. */
3233 /* Hash table iteration argument passed to variable_merge. */
3236 /* The set in which the merge is to be inserted. */
3238 /* The set that we're iterating in. */
3240 /* The set that may contain the other dv we are to merge with. */
3242 /* Number of onepart dvs in src. */
3243 int src_onepart_cnt
;
3246 /* Insert LOC in *DNODE, if it's not there yet. The list must be in
3247 loc_cmp order, and it is maintained as such. */
3250 insert_into_intersection (location_chain
*nodep
, rtx loc
,
3251 enum var_init_status status
)
3253 location_chain node
;
3256 for (node
= *nodep
; node
; nodep
= &node
->next
, node
= *nodep
)
3257 if ((r
= loc_cmp (node
->loc
, loc
)) == 0)
3259 node
->init
= MIN (node
->init
, status
);
3265 node
= (location_chain
) pool_alloc (loc_chain_pool
);
3268 node
->set_src
= NULL
;
3269 node
->init
= status
;
3270 node
->next
= *nodep
;
3274 /* Insert in DEST the intersection of the locations present in both
3275 S1NODE and S2VAR, directly or indirectly. S1NODE is from a
3276 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
3280 intersect_loc_chains (rtx val
, location_chain
*dest
, struct dfset_merge
*dsm
,
3281 location_chain s1node
, variable s2var
)
3283 dataflow_set
*s1set
= dsm
->cur
;
3284 dataflow_set
*s2set
= dsm
->src
;
3285 location_chain found
;
3289 location_chain s2node
;
3291 gcc_checking_assert (s2var
->onepart
);
3293 if (s2var
->n_var_parts
)
3295 s2node
= s2var
->var_part
[0].loc_chain
;
3297 for (; s1node
&& s2node
;
3298 s1node
= s1node
->next
, s2node
= s2node
->next
)
3299 if (s1node
->loc
!= s2node
->loc
)
3301 else if (s1node
->loc
== val
)
3304 insert_into_intersection (dest
, s1node
->loc
,
3305 MIN (s1node
->init
, s2node
->init
));
3309 for (; s1node
; s1node
= s1node
->next
)
3311 if (s1node
->loc
== val
)
3314 if ((found
= find_loc_in_1pdv (s1node
->loc
, s2var
,
3315 shared_hash_htab (s2set
->vars
))))
3317 insert_into_intersection (dest
, s1node
->loc
,
3318 MIN (s1node
->init
, found
->init
));
3322 if (GET_CODE (s1node
->loc
) == VALUE
3323 && !VALUE_RECURSED_INTO (s1node
->loc
))
3325 decl_or_value dv
= dv_from_value (s1node
->loc
);
3326 variable svar
= shared_hash_find (s1set
->vars
, dv
);
3329 if (svar
->n_var_parts
== 1)
3331 VALUE_RECURSED_INTO (s1node
->loc
) = true;
3332 intersect_loc_chains (val
, dest
, dsm
,
3333 svar
->var_part
[0].loc_chain
,
3335 VALUE_RECURSED_INTO (s1node
->loc
) = false;
3340 /* ??? gotta look in cselib_val locations too. */
3342 /* ??? if the location is equivalent to any location in src,
3343 searched recursively
3345 add to dst the values needed to represent the equivalence
3347 telling whether locations S is equivalent to another dv's
3350 for each location D in the list
3352 if S and D satisfy rtx_equal_p, then it is present
3354 else if D is a value, recurse without cycles
3356 else if S and D have the same CODE and MODE
3358 for each operand oS and the corresponding oD
3360 if oS and oD are not equivalent, then S an D are not equivalent
3362 else if they are RTX vectors
3364 if any vector oS element is not equivalent to its respective oD,
3365 then S and D are not equivalent
3373 /* Return -1 if X should be before Y in a location list for a 1-part
3374 variable, 1 if Y should be before X, and 0 if they're equivalent
3375 and should not appear in the list. */
3378 loc_cmp (rtx x
, rtx y
)
3381 RTX_CODE code
= GET_CODE (x
);
3391 gcc_assert (GET_MODE (x
) == GET_MODE (y
));
3392 if (REGNO (x
) == REGNO (y
))
3394 else if (REGNO (x
) < REGNO (y
))
3407 gcc_assert (GET_MODE (x
) == GET_MODE (y
));
3408 return loc_cmp (XEXP (x
, 0), XEXP (y
, 0));
3414 if (GET_CODE (x
) == VALUE
)
3416 if (GET_CODE (y
) != VALUE
)
3418 /* Don't assert the modes are the same, that is true only
3419 when not recursing. (subreg:QI (value:SI 1:1) 0)
3420 and (subreg:QI (value:DI 2:2) 0) can be compared,
3421 even when the modes are different. */
3422 if (canon_value_cmp (x
, y
))
3428 if (GET_CODE (y
) == VALUE
)
3431 /* Entry value is the least preferable kind of expression. */
3432 if (GET_CODE (x
) == ENTRY_VALUE
)
3434 if (GET_CODE (y
) != ENTRY_VALUE
)
3436 gcc_assert (GET_MODE (x
) == GET_MODE (y
));
3437 return loc_cmp (ENTRY_VALUE_EXP (x
), ENTRY_VALUE_EXP (y
));
3440 if (GET_CODE (y
) == ENTRY_VALUE
)
3443 if (GET_CODE (x
) == GET_CODE (y
))
3444 /* Compare operands below. */;
3445 else if (GET_CODE (x
) < GET_CODE (y
))
3450 gcc_assert (GET_MODE (x
) == GET_MODE (y
));
3452 if (GET_CODE (x
) == DEBUG_EXPR
)
3454 if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x
))
3455 < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y
)))
3457 gcc_checking_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x
))
3458 > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y
)));
3462 fmt
= GET_RTX_FORMAT (code
);
3463 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
3467 if (XWINT (x
, i
) == XWINT (y
, i
))
3469 else if (XWINT (x
, i
) < XWINT (y
, i
))
3476 if (XINT (x
, i
) == XINT (y
, i
))
3478 else if (XINT (x
, i
) < XINT (y
, i
))
3485 /* Compare the vector length first. */
3486 if (XVECLEN (x
, i
) == XVECLEN (y
, i
))
3487 /* Compare the vectors elements. */;
3488 else if (XVECLEN (x
, i
) < XVECLEN (y
, i
))
3493 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3494 if ((r
= loc_cmp (XVECEXP (x
, i
, j
),
3495 XVECEXP (y
, i
, j
))))
3500 if ((r
= loc_cmp (XEXP (x
, i
), XEXP (y
, i
))))
3506 if (XSTR (x
, i
) == XSTR (y
, i
))
3512 if ((r
= strcmp (XSTR (x
, i
), XSTR (y
, i
))) == 0)
3520 /* These are just backpointers, so they don't matter. */
3527 /* It is believed that rtx's at this level will never
3528 contain anything but integers and other rtx's,
3529 except for within LABEL_REFs and SYMBOL_REFs. */
3538 /* Check the order of entries in one-part variables. */
3541 canonicalize_loc_order_check (variable_def
**slot
,
3542 dataflow_set
*data ATTRIBUTE_UNUSED
)
3544 variable var
= *slot
;
3545 location_chain node
, next
;
3547 #ifdef ENABLE_RTL_CHECKING
3549 for (i
= 0; i
< var
->n_var_parts
; i
++)
3550 gcc_assert (var
->var_part
[0].cur_loc
== NULL
);
3551 gcc_assert (!var
->in_changed_variables
);
3557 gcc_assert (var
->n_var_parts
== 1);
3558 node
= var
->var_part
[0].loc_chain
;
3561 while ((next
= node
->next
))
3563 gcc_assert (loc_cmp (node
->loc
, next
->loc
) < 0);
3571 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
3572 more likely to be chosen as canonical for an equivalence set.
3573 Ensure less likely values can reach more likely neighbors, making
3574 the connections bidirectional. */
3577 canonicalize_values_mark (variable_def
**slot
, dataflow_set
*set
)
3579 variable var
= *slot
;
3580 decl_or_value dv
= var
->dv
;
3582 location_chain node
;
3584 if (!dv_is_value_p (dv
))
3587 gcc_checking_assert (var
->n_var_parts
== 1);
3589 val
= dv_as_value (dv
);
3591 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
3592 if (GET_CODE (node
->loc
) == VALUE
)
3594 if (canon_value_cmp (node
->loc
, val
))
3595 VALUE_RECURSED_INTO (val
) = true;
3598 decl_or_value odv
= dv_from_value (node
->loc
);
3599 variable_def
**oslot
;
3600 oslot
= shared_hash_find_slot_noinsert (set
->vars
, odv
);
3602 set_slot_part (set
, val
, oslot
, odv
, 0,
3603 node
->init
, NULL_RTX
);
3605 VALUE_RECURSED_INTO (node
->loc
) = true;
3612 /* Remove redundant entries from equivalence lists in onepart
3613 variables, canonicalizing equivalence sets into star shapes. */
3616 canonicalize_values_star (variable_def
**slot
, dataflow_set
*set
)
3618 variable var
= *slot
;
3619 decl_or_value dv
= var
->dv
;
3620 location_chain node
;
3623 variable_def
**cslot
;
3630 gcc_checking_assert (var
->n_var_parts
== 1);
3632 if (dv_is_value_p (dv
))
3634 cval
= dv_as_value (dv
);
3635 if (!VALUE_RECURSED_INTO (cval
))
3637 VALUE_RECURSED_INTO (cval
) = false;
3647 gcc_assert (var
->n_var_parts
== 1);
3649 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
3650 if (GET_CODE (node
->loc
) == VALUE
)
3653 if (VALUE_RECURSED_INTO (node
->loc
))
3655 if (canon_value_cmp (node
->loc
, cval
))
3664 if (!has_marks
|| dv_is_decl_p (dv
))
3667 /* Keep it marked so that we revisit it, either after visiting a
3668 child node, or after visiting a new parent that might be
3670 VALUE_RECURSED_INTO (val
) = true;
3672 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
3673 if (GET_CODE (node
->loc
) == VALUE
3674 && VALUE_RECURSED_INTO (node
->loc
))
3678 VALUE_RECURSED_INTO (cval
) = false;
3679 dv
= dv_from_value (cval
);
3680 slot
= shared_hash_find_slot_noinsert (set
->vars
, dv
);
3683 gcc_assert (dv_is_decl_p (var
->dv
));
3684 /* The canonical value was reset and dropped.
3686 clobber_variable_part (set
, NULL
, var
->dv
, 0, NULL
);
3690 gcc_assert (dv_is_value_p (var
->dv
));
3691 if (var
->n_var_parts
== 0)
3693 gcc_assert (var
->n_var_parts
== 1);
3697 VALUE_RECURSED_INTO (val
) = false;
3702 /* Push values to the canonical one. */
3703 cdv
= dv_from_value (cval
);
3704 cslot
= shared_hash_find_slot_noinsert (set
->vars
, cdv
);
3706 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
3707 if (node
->loc
!= cval
)
3709 cslot
= set_slot_part (set
, node
->loc
, cslot
, cdv
, 0,
3710 node
->init
, NULL_RTX
);
3711 if (GET_CODE (node
->loc
) == VALUE
)
3713 decl_or_value ndv
= dv_from_value (node
->loc
);
3715 set_variable_part (set
, cval
, ndv
, 0, node
->init
, NULL_RTX
,
3718 if (canon_value_cmp (node
->loc
, val
))
3720 /* If it could have been a local minimum, it's not any more,
3721 since it's now neighbor to cval, so it may have to push
3722 to it. Conversely, if it wouldn't have prevailed over
3723 val, then whatever mark it has is fine: if it was to
3724 push, it will now push to a more canonical node, but if
3725 it wasn't, then it has already pushed any values it might
3727 VALUE_RECURSED_INTO (node
->loc
) = true;
3728 /* Make sure we visit node->loc by ensuring we cval is
3730 VALUE_RECURSED_INTO (cval
) = true;
3732 else if (!VALUE_RECURSED_INTO (node
->loc
))
3733 /* If we have no need to "recurse" into this node, it's
3734 already "canonicalized", so drop the link to the old
3736 clobber_variable_part (set
, cval
, ndv
, 0, NULL
);
3738 else if (GET_CODE (node
->loc
) == REG
)
3740 attrs list
= set
->regs
[REGNO (node
->loc
)], *listp
;
3742 /* Change an existing attribute referring to dv so that it
3743 refers to cdv, removing any duplicate this might
3744 introduce, and checking that no previous duplicates
3745 existed, all in a single pass. */
3749 if (list
->offset
== 0
3750 && (dv_as_opaque (list
->dv
) == dv_as_opaque (dv
)
3751 || dv_as_opaque (list
->dv
) == dv_as_opaque (cdv
)))
3758 if (dv_as_opaque (list
->dv
) == dv_as_opaque (dv
))
3761 for (listp
= &list
->next
; (list
= *listp
); listp
= &list
->next
)
3766 if (dv_as_opaque (list
->dv
) == dv_as_opaque (cdv
))
3768 *listp
= list
->next
;
3769 pool_free (attrs_pool
, list
);
3774 gcc_assert (dv_as_opaque (list
->dv
) != dv_as_opaque (dv
));
3777 else if (dv_as_opaque (list
->dv
) == dv_as_opaque (cdv
))
3779 for (listp
= &list
->next
; (list
= *listp
); listp
= &list
->next
)
3784 if (dv_as_opaque (list
->dv
) == dv_as_opaque (dv
))
3786 *listp
= list
->next
;
3787 pool_free (attrs_pool
, list
);
3792 gcc_assert (dv_as_opaque (list
->dv
) != dv_as_opaque (cdv
));
3801 if (list
->offset
== 0
3802 && (dv_as_opaque (list
->dv
) == dv_as_opaque (dv
)
3803 || dv_as_opaque (list
->dv
) == dv_as_opaque (cdv
)))
3813 set_slot_part (set
, val
, cslot
, cdv
, 0,
3814 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
);
3816 slot
= clobber_slot_part (set
, cval
, slot
, 0, NULL
);
3818 /* Variable may have been unshared. */
3820 gcc_checking_assert (var
->n_var_parts
&& var
->var_part
[0].loc_chain
->loc
== cval
3821 && var
->var_part
[0].loc_chain
->next
== NULL
);
3823 if (VALUE_RECURSED_INTO (cval
))
3824 goto restart_with_cval
;
3829 /* Bind one-part variables to the canonical value in an equivalence
3830 set. Not doing this causes dataflow convergence failure in rare
3831 circumstances, see PR42873. Unfortunately we can't do this
3832 efficiently as part of canonicalize_values_star, since we may not
3833 have determined or even seen the canonical value of a set when we
3834 get to a variable that references another member of the set. */
3837 canonicalize_vars_star (variable_def
**slot
, dataflow_set
*set
)
3839 variable var
= *slot
;
3840 decl_or_value dv
= var
->dv
;
3841 location_chain node
;
3844 variable_def
**cslot
;
3846 location_chain cnode
;
3848 if (!var
->onepart
|| var
->onepart
== ONEPART_VALUE
)
3851 gcc_assert (var
->n_var_parts
== 1);
3853 node
= var
->var_part
[0].loc_chain
;
3855 if (GET_CODE (node
->loc
) != VALUE
)
3858 gcc_assert (!node
->next
);
3861 /* Push values to the canonical one. */
3862 cdv
= dv_from_value (cval
);
3863 cslot
= shared_hash_find_slot_noinsert (set
->vars
, cdv
);
3867 gcc_assert (cvar
->n_var_parts
== 1);
3869 cnode
= cvar
->var_part
[0].loc_chain
;
3871 /* CVAL is canonical if its value list contains non-VALUEs or VALUEs
3872 that are not “more canonical” than it. */
3873 if (GET_CODE (cnode
->loc
) != VALUE
3874 || !canon_value_cmp (cnode
->loc
, cval
))
3877 /* CVAL was found to be non-canonical. Change the variable to point
3878 to the canonical VALUE. */
3879 gcc_assert (!cnode
->next
);
3882 slot
= set_slot_part (set
, cval
, slot
, dv
, 0,
3883 node
->init
, node
->set_src
);
3884 clobber_slot_part (set
, cval
, slot
, 0, node
->set_src
);
3889 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
3890 corresponding entry in DSM->src. Multi-part variables are combined
3891 with variable_union, whereas onepart dvs are combined with
3895 variable_merge_over_cur (variable s1var
, struct dfset_merge
*dsm
)
3897 dataflow_set
*dst
= dsm
->dst
;
3898 variable_def
**dstslot
;
3899 variable s2var
, dvar
= NULL
;
3900 decl_or_value dv
= s1var
->dv
;
3901 onepart_enum_t onepart
= s1var
->onepart
;
3904 location_chain node
, *nodep
;
3906 /* If the incoming onepart variable has an empty location list, then
3907 the intersection will be just as empty. For other variables,
3908 it's always union. */
3909 gcc_checking_assert (s1var
->n_var_parts
3910 && s1var
->var_part
[0].loc_chain
);
3913 return variable_union (s1var
, dst
);
3915 gcc_checking_assert (s1var
->n_var_parts
== 1);
3917 dvhash
= dv_htab_hash (dv
);
3918 if (dv_is_value_p (dv
))
3919 val
= dv_as_value (dv
);
3923 s2var
= shared_hash_find_1 (dsm
->src
->vars
, dv
, dvhash
);
3926 dst_can_be_shared
= false;
3930 dsm
->src_onepart_cnt
--;
3931 gcc_assert (s2var
->var_part
[0].loc_chain
3932 && s2var
->onepart
== onepart
3933 && s2var
->n_var_parts
== 1);
3935 dstslot
= shared_hash_find_slot_noinsert_1 (dst
->vars
, dv
, dvhash
);
3939 gcc_assert (dvar
->refcount
== 1
3940 && dvar
->onepart
== onepart
3941 && dvar
->n_var_parts
== 1);
3942 nodep
= &dvar
->var_part
[0].loc_chain
;
3950 if (!dstslot
&& !onepart_variable_different_p (s1var
, s2var
))
3952 dstslot
= shared_hash_find_slot_unshare_1 (&dst
->vars
, dv
,
3954 *dstslot
= dvar
= s2var
;
3959 dst_can_be_shared
= false;
3961 intersect_loc_chains (val
, nodep
, dsm
,
3962 s1var
->var_part
[0].loc_chain
, s2var
);
3968 dvar
= (variable
) pool_alloc (onepart_pool (onepart
));
3971 dvar
->n_var_parts
= 1;
3972 dvar
->onepart
= onepart
;
3973 dvar
->in_changed_variables
= false;
3974 dvar
->var_part
[0].loc_chain
= node
;
3975 dvar
->var_part
[0].cur_loc
= NULL
;
3977 VAR_LOC_1PAUX (dvar
) = NULL
;
3979 VAR_PART_OFFSET (dvar
, 0) = 0;
3982 = shared_hash_find_slot_unshare_1 (&dst
->vars
, dv
, dvhash
,
3984 gcc_assert (!*dstslot
);
3992 nodep
= &dvar
->var_part
[0].loc_chain
;
3993 while ((node
= *nodep
))
3995 location_chain
*nextp
= &node
->next
;
3997 if (GET_CODE (node
->loc
) == REG
)
4001 for (list
= dst
->regs
[REGNO (node
->loc
)]; list
; list
= list
->next
)
4002 if (GET_MODE (node
->loc
) == GET_MODE (list
->loc
)
4003 && dv_is_value_p (list
->dv
))
4007 attrs_list_insert (&dst
->regs
[REGNO (node
->loc
)],
4009 /* If this value became canonical for another value that had
4010 this register, we want to leave it alone. */
4011 else if (dv_as_value (list
->dv
) != val
)
4013 dstslot
= set_slot_part (dst
, dv_as_value (list
->dv
),
4015 node
->init
, NULL_RTX
);
4016 dstslot
= delete_slot_part (dst
, node
->loc
, dstslot
, 0);
4018 /* Since nextp points into the removed node, we can't
4019 use it. The pointer to the next node moved to nodep.
4020 However, if the variable we're walking is unshared
4021 during our walk, we'll keep walking the location list
4022 of the previously-shared variable, in which case the
4023 node won't have been removed, and we'll want to skip
4024 it. That's why we test *nodep here. */
4030 /* Canonicalization puts registers first, so we don't have to
4036 if (dvar
!= *dstslot
)
4038 nodep
= &dvar
->var_part
[0].loc_chain
;
4042 /* Mark all referenced nodes for canonicalization, and make sure
4043 we have mutual equivalence links. */
4044 VALUE_RECURSED_INTO (val
) = true;
4045 for (node
= *nodep
; node
; node
= node
->next
)
4046 if (GET_CODE (node
->loc
) == VALUE
)
4048 VALUE_RECURSED_INTO (node
->loc
) = true;
4049 set_variable_part (dst
, val
, dv_from_value (node
->loc
), 0,
4050 node
->init
, NULL
, INSERT
);
4053 dstslot
= shared_hash_find_slot_noinsert_1 (dst
->vars
, dv
, dvhash
);
4054 gcc_assert (*dstslot
== dvar
);
4055 canonicalize_values_star (dstslot
, dst
);
4056 gcc_checking_assert (dstslot
4057 == shared_hash_find_slot_noinsert_1 (dst
->vars
,
4063 bool has_value
= false, has_other
= false;
4065 /* If we have one value and anything else, we're going to
4066 canonicalize this, so make sure all values have an entry in
4067 the table and are marked for canonicalization. */
4068 for (node
= *nodep
; node
; node
= node
->next
)
4070 if (GET_CODE (node
->loc
) == VALUE
)
4072 /* If this was marked during register canonicalization,
4073 we know we have to canonicalize values. */
4088 if (has_value
&& has_other
)
4090 for (node
= *nodep
; node
; node
= node
->next
)
4092 if (GET_CODE (node
->loc
) == VALUE
)
4094 decl_or_value dv
= dv_from_value (node
->loc
);
4095 variable_def
**slot
= NULL
;
4097 if (shared_hash_shared (dst
->vars
))
4098 slot
= shared_hash_find_slot_noinsert (dst
->vars
, dv
);
4100 slot
= shared_hash_find_slot_unshare (&dst
->vars
, dv
,
4104 variable var
= (variable
) pool_alloc (onepart_pool
4108 var
->n_var_parts
= 1;
4109 var
->onepart
= ONEPART_VALUE
;
4110 var
->in_changed_variables
= false;
4111 var
->var_part
[0].loc_chain
= NULL
;
4112 var
->var_part
[0].cur_loc
= NULL
;
4113 VAR_LOC_1PAUX (var
) = NULL
;
4117 VALUE_RECURSED_INTO (node
->loc
) = true;
4121 dstslot
= shared_hash_find_slot_noinsert_1 (dst
->vars
, dv
, dvhash
);
4122 gcc_assert (*dstslot
== dvar
);
4123 canonicalize_values_star (dstslot
, dst
);
4124 gcc_checking_assert (dstslot
4125 == shared_hash_find_slot_noinsert_1 (dst
->vars
,
4131 if (!onepart_variable_different_p (dvar
, s2var
))
4133 variable_htab_free (dvar
);
4134 *dstslot
= dvar
= s2var
;
4137 else if (s2var
!= s1var
&& !onepart_variable_different_p (dvar
, s1var
))
4139 variable_htab_free (dvar
);
4140 *dstslot
= dvar
= s1var
;
4142 dst_can_be_shared
= false;
4145 dst_can_be_shared
= false;
4150 /* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
4151 multi-part variable. Unions of multi-part variables and
4152 intersections of one-part ones will be handled in
4153 variable_merge_over_cur(). */
4156 variable_merge_over_src (variable s2var
, struct dfset_merge
*dsm
)
4158 dataflow_set
*dst
= dsm
->dst
;
4159 decl_or_value dv
= s2var
->dv
;
4161 if (!s2var
->onepart
)
4163 variable_def
**dstp
= shared_hash_find_slot (dst
->vars
, dv
);
4169 dsm
->src_onepart_cnt
++;
4173 /* Combine dataflow set information from SRC2 into DST, using PDST
4174 to carry over information across passes. */
4177 dataflow_set_merge (dataflow_set
*dst
, dataflow_set
*src2
)
4179 dataflow_set cur
= *dst
;
4180 dataflow_set
*src1
= &cur
;
4181 struct dfset_merge dsm
;
4183 size_t src1_elems
, src2_elems
;
4184 variable_iterator_type hi
;
4187 src1_elems
= shared_hash_htab (src1
->vars
).elements ();
4188 src2_elems
= shared_hash_htab (src2
->vars
).elements ();
4189 dataflow_set_init (dst
);
4190 dst
->stack_adjust
= cur
.stack_adjust
;
4191 shared_hash_destroy (dst
->vars
);
4192 dst
->vars
= (shared_hash
) pool_alloc (shared_hash_pool
);
4193 dst
->vars
->refcount
= 1;
4194 dst
->vars
->htab
.create (MAX (src1_elems
, src2_elems
));
4196 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
4197 attrs_list_mpdv_union (&dst
->regs
[i
], src1
->regs
[i
], src2
->regs
[i
]);
4202 dsm
.src_onepart_cnt
= 0;
4204 FOR_EACH_HASH_TABLE_ELEMENT (shared_hash_htab (dsm
.src
->vars
),
4206 variable_merge_over_src (var
, &dsm
);
4207 FOR_EACH_HASH_TABLE_ELEMENT (shared_hash_htab (dsm
.cur
->vars
),
4209 variable_merge_over_cur (var
, &dsm
);
4211 if (dsm
.src_onepart_cnt
)
4212 dst_can_be_shared
= false;
4214 dataflow_set_destroy (src1
);
4217 /* Mark register equivalences. */
4220 dataflow_set_equiv_regs (dataflow_set
*set
)
4225 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
4227 rtx canon
[NUM_MACHINE_MODES
];
4229 /* If the list is empty or one entry, no need to canonicalize
4231 if (set
->regs
[i
] == NULL
|| set
->regs
[i
]->next
== NULL
)
4234 memset (canon
, 0, sizeof (canon
));
4236 for (list
= set
->regs
[i
]; list
; list
= list
->next
)
4237 if (list
->offset
== 0 && dv_is_value_p (list
->dv
))
4239 rtx val
= dv_as_value (list
->dv
);
4240 rtx
*cvalp
= &canon
[(int)GET_MODE (val
)];
4243 if (canon_value_cmp (val
, cval
))
4247 for (list
= set
->regs
[i
]; list
; list
= list
->next
)
4248 if (list
->offset
== 0 && dv_onepart_p (list
->dv
))
4250 rtx cval
= canon
[(int)GET_MODE (list
->loc
)];
4255 if (dv_is_value_p (list
->dv
))
4257 rtx val
= dv_as_value (list
->dv
);
4262 VALUE_RECURSED_INTO (val
) = true;
4263 set_variable_part (set
, val
, dv_from_value (cval
), 0,
4264 VAR_INIT_STATUS_INITIALIZED
,
4268 VALUE_RECURSED_INTO (cval
) = true;
4269 set_variable_part (set
, cval
, list
->dv
, 0,
4270 VAR_INIT_STATUS_INITIALIZED
, NULL
, NO_INSERT
);
4273 for (listp
= &set
->regs
[i
]; (list
= *listp
);
4274 listp
= list
? &list
->next
: listp
)
4275 if (list
->offset
== 0 && dv_onepart_p (list
->dv
))
4277 rtx cval
= canon
[(int)GET_MODE (list
->loc
)];
4278 variable_def
**slot
;
4283 if (dv_is_value_p (list
->dv
))
4285 rtx val
= dv_as_value (list
->dv
);
4286 if (!VALUE_RECURSED_INTO (val
))
4290 slot
= shared_hash_find_slot_noinsert (set
->vars
, list
->dv
);
4291 canonicalize_values_star (slot
, set
);
4298 /* Remove any redundant values in the location list of VAR, which must
4299 be unshared and 1-part. */
4302 remove_duplicate_values (variable var
)
4304 location_chain node
, *nodep
;
4306 gcc_assert (var
->onepart
);
4307 gcc_assert (var
->n_var_parts
== 1);
4308 gcc_assert (var
->refcount
== 1);
4310 for (nodep
= &var
->var_part
[0].loc_chain
; (node
= *nodep
); )
4312 if (GET_CODE (node
->loc
) == VALUE
)
4314 if (VALUE_RECURSED_INTO (node
->loc
))
4316 /* Remove duplicate value node. */
4317 *nodep
= node
->next
;
4318 pool_free (loc_chain_pool
, node
);
4322 VALUE_RECURSED_INTO (node
->loc
) = true;
4324 nodep
= &node
->next
;
4327 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
4328 if (GET_CODE (node
->loc
) == VALUE
)
4330 gcc_assert (VALUE_RECURSED_INTO (node
->loc
));
4331 VALUE_RECURSED_INTO (node
->loc
) = false;
4336 /* Hash table iteration argument passed to variable_post_merge. */
4337 struct dfset_post_merge
4339 /* The new input set for the current block. */
4341 /* Pointer to the permanent input set for the current block, or
4343 dataflow_set
**permp
;
4346 /* Create values for incoming expressions associated with one-part
4347 variables that don't have value numbers for them. */
4350 variable_post_merge_new_vals (variable_def
**slot
, dfset_post_merge
*dfpm
)
4352 dataflow_set
*set
= dfpm
->set
;
4353 variable var
= *slot
;
4354 location_chain node
;
4356 if (!var
->onepart
|| !var
->n_var_parts
)
4359 gcc_assert (var
->n_var_parts
== 1);
4361 if (dv_is_decl_p (var
->dv
))
4363 bool check_dupes
= false;
4366 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
4368 if (GET_CODE (node
->loc
) == VALUE
)
4369 gcc_assert (!VALUE_RECURSED_INTO (node
->loc
));
4370 else if (GET_CODE (node
->loc
) == REG
)
4372 attrs att
, *attp
, *curp
= NULL
;
4374 if (var
->refcount
!= 1)
4376 slot
= unshare_variable (set
, slot
, var
,
4377 VAR_INIT_STATUS_INITIALIZED
);
4382 for (attp
= &set
->regs
[REGNO (node
->loc
)]; (att
= *attp
);
4384 if (att
->offset
== 0
4385 && GET_MODE (att
->loc
) == GET_MODE (node
->loc
))
4387 if (dv_is_value_p (att
->dv
))
4389 rtx cval
= dv_as_value (att
->dv
);
4394 else if (dv_as_opaque (att
->dv
) == dv_as_opaque (var
->dv
))
4402 if ((*curp
)->offset
== 0
4403 && GET_MODE ((*curp
)->loc
) == GET_MODE (node
->loc
)
4404 && dv_as_opaque ((*curp
)->dv
) == dv_as_opaque (var
->dv
))
4407 curp
= &(*curp
)->next
;
4418 *dfpm
->permp
= XNEW (dataflow_set
);
4419 dataflow_set_init (*dfpm
->permp
);
4422 for (att
= (*dfpm
->permp
)->regs
[REGNO (node
->loc
)];
4423 att
; att
= att
->next
)
4424 if (GET_MODE (att
->loc
) == GET_MODE (node
->loc
))
4426 gcc_assert (att
->offset
== 0
4427 && dv_is_value_p (att
->dv
));
4428 val_reset (set
, att
->dv
);
4435 cval
= dv_as_value (cdv
);
4439 /* Create a unique value to hold this register,
4440 that ought to be found and reused in
4441 subsequent rounds. */
4443 gcc_assert (!cselib_lookup (node
->loc
,
4444 GET_MODE (node
->loc
), 0,
4446 v
= cselib_lookup (node
->loc
, GET_MODE (node
->loc
), 1,
4448 cselib_preserve_value (v
);
4449 cselib_invalidate_rtx (node
->loc
);
4451 cdv
= dv_from_value (cval
);
4454 "Created new value %u:%u for reg %i\n",
4455 v
->uid
, v
->hash
, REGNO (node
->loc
));
4458 var_reg_decl_set (*dfpm
->permp
, node
->loc
,
4459 VAR_INIT_STATUS_INITIALIZED
,
4460 cdv
, 0, NULL
, INSERT
);
4466 /* Remove attribute referring to the decl, which now
4467 uses the value for the register, already existing or
4468 to be added when we bring perm in. */
4471 pool_free (attrs_pool
, att
);
4476 remove_duplicate_values (var
);
4482 /* Reset values in the permanent set that are not associated with the
4483 chosen expression. */
4486 variable_post_merge_perm_vals (variable_def
**pslot
, dfset_post_merge
*dfpm
)
4488 dataflow_set
*set
= dfpm
->set
;
4489 variable pvar
= *pslot
, var
;
4490 location_chain pnode
;
4494 gcc_assert (dv_is_value_p (pvar
->dv
)
4495 && pvar
->n_var_parts
== 1);
4496 pnode
= pvar
->var_part
[0].loc_chain
;
4499 && REG_P (pnode
->loc
));
4503 var
= shared_hash_find (set
->vars
, dv
);
4506 /* Although variable_post_merge_new_vals may have made decls
4507 non-star-canonical, values that pre-existed in canonical form
4508 remain canonical, and newly-created values reference a single
4509 REG, so they are canonical as well. Since VAR has the
4510 location list for a VALUE, using find_loc_in_1pdv for it is
4511 fine, since VALUEs don't map back to DECLs. */
4512 if (find_loc_in_1pdv (pnode
->loc
, var
, shared_hash_htab (set
->vars
)))
4514 val_reset (set
, dv
);
4517 for (att
= set
->regs
[REGNO (pnode
->loc
)]; att
; att
= att
->next
)
4518 if (att
->offset
== 0
4519 && GET_MODE (att
->loc
) == GET_MODE (pnode
->loc
)
4520 && dv_is_value_p (att
->dv
))
4523 /* If there is a value associated with this register already, create
4525 if (att
&& dv_as_value (att
->dv
) != dv_as_value (dv
))
4527 rtx cval
= dv_as_value (att
->dv
);
4528 set_variable_part (set
, cval
, dv
, 0, pnode
->init
, NULL
, INSERT
);
4529 set_variable_part (set
, dv_as_value (dv
), att
->dv
, 0, pnode
->init
,
4534 attrs_list_insert (&set
->regs
[REGNO (pnode
->loc
)],
4536 variable_union (pvar
, set
);
4542 /* Just checking stuff and registering register attributes for
4546 dataflow_post_merge_adjust (dataflow_set
*set
, dataflow_set
**permp
)
4548 struct dfset_post_merge dfpm
;
4553 shared_hash_htab (set
->vars
)
4554 .traverse
<dfset_post_merge
*, variable_post_merge_new_vals
> (&dfpm
);
4556 shared_hash_htab ((*permp
)->vars
)
4557 .traverse
<dfset_post_merge
*, variable_post_merge_perm_vals
> (&dfpm
);
4558 shared_hash_htab (set
->vars
)
4559 .traverse
<dataflow_set
*, canonicalize_values_star
> (set
);
4560 shared_hash_htab (set
->vars
)
4561 .traverse
<dataflow_set
*, canonicalize_vars_star
> (set
);
4564 /* Return a node whose loc is a MEM that refers to EXPR in the
4565 location list of a one-part variable or value VAR, or in that of
4566 any values recursively mentioned in the location lists. */
4568 static location_chain
4569 find_mem_expr_in_1pdv (tree expr
, rtx val
, variable_table_type vars
)
4571 location_chain node
;
4574 location_chain where
= NULL
;
4579 gcc_assert (GET_CODE (val
) == VALUE
4580 && !VALUE_RECURSED_INTO (val
));
4582 dv
= dv_from_value (val
);
4583 var
= vars
.find_with_hash (dv
, dv_htab_hash (dv
));
4588 gcc_assert (var
->onepart
);
4590 if (!var
->n_var_parts
)
4593 VALUE_RECURSED_INTO (val
) = true;
4595 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
4596 if (MEM_P (node
->loc
)
4597 && MEM_EXPR (node
->loc
) == expr
4598 && INT_MEM_OFFSET (node
->loc
) == 0)
4603 else if (GET_CODE (node
->loc
) == VALUE
4604 && !VALUE_RECURSED_INTO (node
->loc
)
4605 && (where
= find_mem_expr_in_1pdv (expr
, node
->loc
, vars
)))
4608 VALUE_RECURSED_INTO (val
) = false;
4613 /* Return TRUE if the value of MEM may vary across a call. */
4616 mem_dies_at_call (rtx mem
)
4618 tree expr
= MEM_EXPR (mem
);
4624 decl
= get_base_address (expr
);
4632 return (may_be_aliased (decl
)
4633 || (!TREE_READONLY (decl
) && is_global_var (decl
)));
4636 /* Remove all MEMs from the location list of a hash table entry for a
4637 one-part variable, except those whose MEM attributes map back to
4638 the variable itself, directly or within a VALUE. */
4641 dataflow_set_preserve_mem_locs (variable_def
**slot
, dataflow_set
*set
)
4643 variable var
= *slot
;
4645 if (var
->onepart
== ONEPART_VDECL
|| var
->onepart
== ONEPART_DEXPR
)
4647 tree decl
= dv_as_decl (var
->dv
);
4648 location_chain loc
, *locp
;
4649 bool changed
= false;
4651 if (!var
->n_var_parts
)
4654 gcc_assert (var
->n_var_parts
== 1);
4656 if (shared_var_p (var
, set
->vars
))
4658 for (loc
= var
->var_part
[0].loc_chain
; loc
; loc
= loc
->next
)
4660 /* We want to remove dying MEMs that doesn't refer to DECL. */
4661 if (GET_CODE (loc
->loc
) == MEM
4662 && (MEM_EXPR (loc
->loc
) != decl
4663 || INT_MEM_OFFSET (loc
->loc
) != 0)
4664 && !mem_dies_at_call (loc
->loc
))
4666 /* We want to move here MEMs that do refer to DECL. */
4667 else if (GET_CODE (loc
->loc
) == VALUE
4668 && find_mem_expr_in_1pdv (decl
, loc
->loc
,
4669 shared_hash_htab (set
->vars
)))
4676 slot
= unshare_variable (set
, slot
, var
, VAR_INIT_STATUS_UNKNOWN
);
4678 gcc_assert (var
->n_var_parts
== 1);
4681 for (locp
= &var
->var_part
[0].loc_chain
, loc
= *locp
;
4684 rtx old_loc
= loc
->loc
;
4685 if (GET_CODE (old_loc
) == VALUE
)
4687 location_chain mem_node
4688 = find_mem_expr_in_1pdv (decl
, loc
->loc
,
4689 shared_hash_htab (set
->vars
));
4691 /* ??? This picks up only one out of multiple MEMs that
4692 refer to the same variable. Do we ever need to be
4693 concerned about dealing with more than one, or, given
4694 that they should all map to the same variable
4695 location, their addresses will have been merged and
4696 they will be regarded as equivalent? */
4699 loc
->loc
= mem_node
->loc
;
4700 loc
->set_src
= mem_node
->set_src
;
4701 loc
->init
= MIN (loc
->init
, mem_node
->init
);
4705 if (GET_CODE (loc
->loc
) != MEM
4706 || (MEM_EXPR (loc
->loc
) == decl
4707 && INT_MEM_OFFSET (loc
->loc
) == 0)
4708 || !mem_dies_at_call (loc
->loc
))
4710 if (old_loc
!= loc
->loc
&& emit_notes
)
4712 if (old_loc
== var
->var_part
[0].cur_loc
)
4715 var
->var_part
[0].cur_loc
= NULL
;
4724 if (old_loc
== var
->var_part
[0].cur_loc
)
4727 var
->var_part
[0].cur_loc
= NULL
;
4731 pool_free (loc_chain_pool
, loc
);
4734 if (!var
->var_part
[0].loc_chain
)
4740 variable_was_changed (var
, set
);
4746 /* Remove all MEMs from the location list of a hash table entry for a
4750 dataflow_set_remove_mem_locs (variable_def
**slot
, dataflow_set
*set
)
4752 variable var
= *slot
;
4754 if (var
->onepart
== ONEPART_VALUE
)
4756 location_chain loc
, *locp
;
4757 bool changed
= false;
4760 gcc_assert (var
->n_var_parts
== 1);
4762 if (shared_var_p (var
, set
->vars
))
4764 for (loc
= var
->var_part
[0].loc_chain
; loc
; loc
= loc
->next
)
4765 if (GET_CODE (loc
->loc
) == MEM
4766 && mem_dies_at_call (loc
->loc
))
4772 slot
= unshare_variable (set
, slot
, var
, VAR_INIT_STATUS_UNKNOWN
);
4774 gcc_assert (var
->n_var_parts
== 1);
4777 if (VAR_LOC_1PAUX (var
))
4778 cur_loc
= VAR_LOC_FROM (var
);
4780 cur_loc
= var
->var_part
[0].cur_loc
;
4782 for (locp
= &var
->var_part
[0].loc_chain
, loc
= *locp
;
4785 if (GET_CODE (loc
->loc
) != MEM
4786 || !mem_dies_at_call (loc
->loc
))
4793 /* If we have deleted the location which was last emitted
4794 we have to emit new location so add the variable to set
4795 of changed variables. */
4796 if (cur_loc
== loc
->loc
)
4799 var
->var_part
[0].cur_loc
= NULL
;
4800 if (VAR_LOC_1PAUX (var
))
4801 VAR_LOC_FROM (var
) = NULL
;
4803 pool_free (loc_chain_pool
, loc
);
4806 if (!var
->var_part
[0].loc_chain
)
4812 variable_was_changed (var
, set
);
4818 /* Remove all variable-location information about call-clobbered
4819 registers, as well as associations between MEMs and VALUEs. */
4822 dataflow_set_clear_at_call (dataflow_set
*set
)
4825 hard_reg_set_iterator hrsi
;
4827 EXECUTE_IF_SET_IN_HARD_REG_SET (regs_invalidated_by_call
, 0, r
, hrsi
)
4828 var_regno_delete (set
, r
);
4830 if (MAY_HAVE_DEBUG_INSNS
)
4832 set
->traversed_vars
= set
->vars
;
4833 shared_hash_htab (set
->vars
)
4834 .traverse
<dataflow_set
*, dataflow_set_preserve_mem_locs
> (set
);
4835 set
->traversed_vars
= set
->vars
;
4836 shared_hash_htab (set
->vars
)
4837 .traverse
<dataflow_set
*, dataflow_set_remove_mem_locs
> (set
);
4838 set
->traversed_vars
= NULL
;
4843 variable_part_different_p (variable_part
*vp1
, variable_part
*vp2
)
4845 location_chain lc1
, lc2
;
4847 for (lc1
= vp1
->loc_chain
; lc1
; lc1
= lc1
->next
)
4849 for (lc2
= vp2
->loc_chain
; lc2
; lc2
= lc2
->next
)
4851 if (REG_P (lc1
->loc
) && REG_P (lc2
->loc
))
4853 if (REGNO (lc1
->loc
) == REGNO (lc2
->loc
))
4856 if (rtx_equal_p (lc1
->loc
, lc2
->loc
))
4865 /* Return true if one-part variables VAR1 and VAR2 are different.
4866 They must be in canonical order. */
4869 onepart_variable_different_p (variable var1
, variable var2
)
4871 location_chain lc1
, lc2
;
4876 gcc_assert (var1
->n_var_parts
== 1
4877 && var2
->n_var_parts
== 1);
4879 lc1
= var1
->var_part
[0].loc_chain
;
4880 lc2
= var2
->var_part
[0].loc_chain
;
4882 gcc_assert (lc1
&& lc2
);
4886 if (loc_cmp (lc1
->loc
, lc2
->loc
))
4895 /* Return true if variables VAR1 and VAR2 are different. */
4898 variable_different_p (variable var1
, variable var2
)
4905 if (var1
->onepart
!= var2
->onepart
)
4908 if (var1
->n_var_parts
!= var2
->n_var_parts
)
4911 if (var1
->onepart
&& var1
->n_var_parts
)
4913 gcc_checking_assert (dv_as_opaque (var1
->dv
) == dv_as_opaque (var2
->dv
)
4914 && var1
->n_var_parts
== 1);
4915 /* One-part values have locations in a canonical order. */
4916 return onepart_variable_different_p (var1
, var2
);
4919 for (i
= 0; i
< var1
->n_var_parts
; i
++)
4921 if (VAR_PART_OFFSET (var1
, i
) != VAR_PART_OFFSET (var2
, i
))
4923 if (variable_part_different_p (&var1
->var_part
[i
], &var2
->var_part
[i
]))
4925 if (variable_part_different_p (&var2
->var_part
[i
], &var1
->var_part
[i
]))
4931 /* Return true if dataflow sets OLD_SET and NEW_SET differ. */
4934 dataflow_set_different (dataflow_set
*old_set
, dataflow_set
*new_set
)
4936 variable_iterator_type hi
;
4939 if (old_set
->vars
== new_set
->vars
)
4942 if (shared_hash_htab (old_set
->vars
).elements ()
4943 != shared_hash_htab (new_set
->vars
).elements ())
4946 FOR_EACH_HASH_TABLE_ELEMENT (shared_hash_htab (old_set
->vars
),
4949 variable_table_type htab
= shared_hash_htab (new_set
->vars
);
4950 variable var2
= htab
.find_with_hash (var1
->dv
, dv_htab_hash (var1
->dv
));
4953 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4955 fprintf (dump_file
, "dataflow difference found: removal of:\n");
4961 if (variable_different_p (var1
, var2
))
4963 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4965 fprintf (dump_file
, "dataflow difference found: "
4966 "old and new follow:\n");
4974 /* No need to traverse the second hashtab, if both have the same number
4975 of elements and the second one had all entries found in the first one,
4976 then it can't have any extra entries. */
4980 /* Free the contents of dataflow set SET. */
4983 dataflow_set_destroy (dataflow_set
*set
)
4987 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
4988 attrs_list_clear (&set
->regs
[i
]);
4990 shared_hash_destroy (set
->vars
);
4994 /* Return true if RTL X contains a SYMBOL_REF. */
4997 contains_symbol_ref (rtx x
)
5006 code
= GET_CODE (x
);
5007 if (code
== SYMBOL_REF
)
5010 fmt
= GET_RTX_FORMAT (code
);
5011 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
5015 if (contains_symbol_ref (XEXP (x
, i
)))
5018 else if (fmt
[i
] == 'E')
5021 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
5022 if (contains_symbol_ref (XVECEXP (x
, i
, j
)))
5030 /* Shall EXPR be tracked? */
5033 track_expr_p (tree expr
, bool need_rtl
)
5038 if (TREE_CODE (expr
) == DEBUG_EXPR_DECL
)
5039 return DECL_RTL_SET_P (expr
);
5041 /* If EXPR is not a parameter or a variable do not track it. */
5042 if (TREE_CODE (expr
) != VAR_DECL
&& TREE_CODE (expr
) != PARM_DECL
)
5045 /* It also must have a name... */
5046 if (!DECL_NAME (expr
) && need_rtl
)
5049 /* ... and a RTL assigned to it. */
5050 decl_rtl
= DECL_RTL_IF_SET (expr
);
5051 if (!decl_rtl
&& need_rtl
)
5054 /* If this expression is really a debug alias of some other declaration, we
5055 don't need to track this expression if the ultimate declaration is
5058 if (TREE_CODE (realdecl
) == VAR_DECL
&& DECL_HAS_DEBUG_EXPR_P (realdecl
))
5060 realdecl
= DECL_DEBUG_EXPR (realdecl
);
5061 if (!DECL_P (realdecl
))
5063 if (handled_component_p (realdecl
)
5064 || (TREE_CODE (realdecl
) == MEM_REF
5065 && TREE_CODE (TREE_OPERAND (realdecl
, 0)) == ADDR_EXPR
))
5067 HOST_WIDE_INT bitsize
, bitpos
, maxsize
;
5069 = get_ref_base_and_extent (realdecl
, &bitpos
, &bitsize
,
5071 if (!DECL_P (innerdecl
)
5072 || DECL_IGNORED_P (innerdecl
)
5073 || TREE_STATIC (innerdecl
)
5075 || bitpos
+ bitsize
> 256
5076 || bitsize
!= maxsize
)
5086 /* Do not track EXPR if REALDECL it should be ignored for debugging
5088 if (DECL_IGNORED_P (realdecl
))
5091 /* Do not track global variables until we are able to emit correct location
5093 if (TREE_STATIC (realdecl
))
5096 /* When the EXPR is a DECL for alias of some variable (see example)
5097 the TREE_STATIC flag is not used. Disable tracking all DECLs whose
5098 DECL_RTL contains SYMBOL_REF.
5101 extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
5104 if (decl_rtl
&& MEM_P (decl_rtl
)
5105 && contains_symbol_ref (XEXP (decl_rtl
, 0)))
5108 /* If RTX is a memory it should not be very large (because it would be
5109 an array or struct). */
5110 if (decl_rtl
&& MEM_P (decl_rtl
))
5112 /* Do not track structures and arrays. */
5113 if (GET_MODE (decl_rtl
) == BLKmode
5114 || AGGREGATE_TYPE_P (TREE_TYPE (realdecl
)))
5116 if (MEM_SIZE_KNOWN_P (decl_rtl
)
5117 && MEM_SIZE (decl_rtl
) > MAX_VAR_PARTS
)
5121 DECL_CHANGED (expr
) = 0;
5122 DECL_CHANGED (realdecl
) = 0;
5126 /* Determine whether a given LOC refers to the same variable part as
5130 same_variable_part_p (rtx loc
, tree expr
, HOST_WIDE_INT offset
)
5133 HOST_WIDE_INT offset2
;
5135 if (! DECL_P (expr
))
5140 expr2
= REG_EXPR (loc
);
5141 offset2
= REG_OFFSET (loc
);
5143 else if (MEM_P (loc
))
5145 expr2
= MEM_EXPR (loc
);
5146 offset2
= INT_MEM_OFFSET (loc
);
5151 if (! expr2
|| ! DECL_P (expr2
))
5154 expr
= var_debug_decl (expr
);
5155 expr2
= var_debug_decl (expr2
);
5157 return (expr
== expr2
&& offset
== offset2
);
5160 /* LOC is a REG or MEM that we would like to track if possible.
5161 If EXPR is null, we don't know what expression LOC refers to,
5162 otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
5163 LOC is an lvalue register.
5165 Return true if EXPR is nonnull and if LOC, or some lowpart of it,
5166 is something we can track. When returning true, store the mode of
5167 the lowpart we can track in *MODE_OUT (if nonnull) and its offset
5168 from EXPR in *OFFSET_OUT (if nonnull). */
5171 track_loc_p (rtx loc
, tree expr
, HOST_WIDE_INT offset
, bool store_reg_p
,
5172 enum machine_mode
*mode_out
, HOST_WIDE_INT
*offset_out
)
5174 enum machine_mode mode
;
5176 if (expr
== NULL
|| !track_expr_p (expr
, true))
5179 /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
5180 whole subreg, but only the old inner part is really relevant. */
5181 mode
= GET_MODE (loc
);
5182 if (REG_P (loc
) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc
)))
5184 enum machine_mode pseudo_mode
;
5186 pseudo_mode
= PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc
));
5187 if (GET_MODE_SIZE (mode
) > GET_MODE_SIZE (pseudo_mode
))
5189 offset
+= byte_lowpart_offset (pseudo_mode
, mode
);
5194 /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
5195 Do the same if we are storing to a register and EXPR occupies
5196 the whole of register LOC; in that case, the whole of EXPR is
5197 being changed. We exclude complex modes from the second case
5198 because the real and imaginary parts are represented as separate
5199 pseudo registers, even if the whole complex value fits into one
5201 if ((GET_MODE_SIZE (mode
) > GET_MODE_SIZE (DECL_MODE (expr
))
5203 && !COMPLEX_MODE_P (DECL_MODE (expr
))
5204 && hard_regno_nregs
[REGNO (loc
)][DECL_MODE (expr
)] == 1))
5205 && offset
+ byte_lowpart_offset (DECL_MODE (expr
), mode
) == 0)
5207 mode
= DECL_MODE (expr
);
5211 if (offset
< 0 || offset
>= MAX_VAR_PARTS
)
5217 *offset_out
= offset
;
5221 /* Return the MODE lowpart of LOC, or null if LOC is not something we
5222 want to track. When returning nonnull, make sure that the attributes
5223 on the returned value are updated. */
5226 var_lowpart (enum machine_mode mode
, rtx loc
)
5228 unsigned int offset
, reg_offset
, regno
;
5230 if (GET_MODE (loc
) == mode
)
5233 if (!REG_P (loc
) && !MEM_P (loc
))
5236 offset
= byte_lowpart_offset (mode
, GET_MODE (loc
));
5239 return adjust_address_nv (loc
, mode
, offset
);
5241 reg_offset
= subreg_lowpart_offset (mode
, GET_MODE (loc
));
5242 regno
= REGNO (loc
) + subreg_regno_offset (REGNO (loc
), GET_MODE (loc
),
5244 return gen_rtx_REG_offset (loc
, mode
, regno
, offset
);
5247 /* Carry information about uses and stores while walking rtx. */
5249 struct count_use_info
5251 /* The insn where the RTX is. */
5254 /* The basic block where insn is. */
5257 /* The array of n_sets sets in the insn, as determined by cselib. */
5258 struct cselib_set
*sets
;
5261 /* True if we're counting stores, false otherwise. */
5265 /* Find a VALUE corresponding to X. */
5267 static inline cselib_val
*
5268 find_use_val (rtx x
, enum machine_mode mode
, struct count_use_info
*cui
)
5274 /* This is called after uses are set up and before stores are
5275 processed by cselib, so it's safe to look up srcs, but not
5276 dsts. So we look up expressions that appear in srcs or in
5277 dest expressions, but we search the sets array for dests of
5281 /* Some targets represent memset and memcpy patterns
5282 by (set (mem:BLK ...) (reg:[QHSD]I ...)) or
5283 (set (mem:BLK ...) (const_int ...)) or
5284 (set (mem:BLK ...) (mem:BLK ...)). Don't return anything
5285 in that case, otherwise we end up with mode mismatches. */
5286 if (mode
== BLKmode
&& MEM_P (x
))
5288 for (i
= 0; i
< cui
->n_sets
; i
++)
5289 if (cui
->sets
[i
].dest
== x
)
5290 return cui
->sets
[i
].src_elt
;
5293 return cselib_lookup (x
, mode
, 0, VOIDmode
);
5299 /* Replace all registers and addresses in an expression with VALUE
5300 expressions that map back to them, unless the expression is a
5301 register. If no mapping is or can be performed, returns NULL. */
5304 replace_expr_with_values (rtx loc
)
5306 if (REG_P (loc
) || GET_CODE (loc
) == ENTRY_VALUE
)
5308 else if (MEM_P (loc
))
5310 cselib_val
*addr
= cselib_lookup (XEXP (loc
, 0),
5311 get_address_mode (loc
), 0,
5314 return replace_equiv_address_nv (loc
, addr
->val_rtx
);
5319 return cselib_subst_to_values (loc
, VOIDmode
);
5322 /* Return true if *X is a DEBUG_EXPR. Usable as an argument to
5323 for_each_rtx to tell whether there are any DEBUG_EXPRs within
5327 rtx_debug_expr_p (rtx
*x
, void *data ATTRIBUTE_UNUSED
)
5331 return GET_CODE (loc
) == DEBUG_EXPR
;
5334 /* Determine what kind of micro operation to choose for a USE. Return
5335 MO_CLOBBER if no micro operation is to be generated. */
5337 static enum micro_operation_type
5338 use_type (rtx loc
, struct count_use_info
*cui
, enum machine_mode
*modep
)
5342 if (cui
&& cui
->sets
)
5344 if (GET_CODE (loc
) == VAR_LOCATION
)
5346 if (track_expr_p (PAT_VAR_LOCATION_DECL (loc
), false))
5348 rtx ploc
= PAT_VAR_LOCATION_LOC (loc
);
5349 if (! VAR_LOC_UNKNOWN_P (ploc
))
5351 cselib_val
*val
= cselib_lookup (ploc
, GET_MODE (loc
), 1,
5354 /* ??? flag_float_store and volatile mems are never
5355 given values, but we could in theory use them for
5357 gcc_assert (val
|| 1);
5365 if (REG_P (loc
) || MEM_P (loc
))
5368 *modep
= GET_MODE (loc
);
5372 || (find_use_val (loc
, GET_MODE (loc
), cui
)
5373 && cselib_lookup (XEXP (loc
, 0),
5374 get_address_mode (loc
), 0,
5380 cselib_val
*val
= find_use_val (loc
, GET_MODE (loc
), cui
);
5382 if (val
&& !cselib_preserved_value_p (val
))
5390 gcc_assert (REGNO (loc
) < FIRST_PSEUDO_REGISTER
);
5392 if (loc
== cfa_base_rtx
)
5394 expr
= REG_EXPR (loc
);
5397 return MO_USE_NO_VAR
;
5398 else if (target_for_debug_bind (var_debug_decl (expr
)))
5400 else if (track_loc_p (loc
, expr
, REG_OFFSET (loc
),
5401 false, modep
, NULL
))
5404 return MO_USE_NO_VAR
;
5406 else if (MEM_P (loc
))
5408 expr
= MEM_EXPR (loc
);
5412 else if (target_for_debug_bind (var_debug_decl (expr
)))
5414 else if (track_loc_p (loc
, expr
, INT_MEM_OFFSET (loc
),
5416 /* Multi-part variables shouldn't refer to one-part
5417 variable names such as VALUEs (never happens) or
5418 DEBUG_EXPRs (only happens in the presence of debug
5420 && (!MAY_HAVE_DEBUG_INSNS
5421 || !for_each_rtx (&XEXP (loc
, 0), rtx_debug_expr_p
, NULL
)))
5430 /* Log to OUT information about micro-operation MOPT involving X in
5434 log_op_type (rtx x
, basic_block bb
, rtx insn
,
5435 enum micro_operation_type mopt
, FILE *out
)
5437 fprintf (out
, "bb %i op %i insn %i %s ",
5438 bb
->index
, VTI (bb
)->mos
.length (),
5439 INSN_UID (insn
), micro_operation_type_name
[mopt
]);
5440 print_inline_rtx (out
, x
, 2);
5444 /* Tell whether the CONCAT used to holds a VALUE and its location
5445 needs value resolution, i.e., an attempt of mapping the location
5446 back to other incoming values. */
5447 #define VAL_NEEDS_RESOLUTION(x) \
5448 (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
5449 /* Whether the location in the CONCAT is a tracked expression, that
5450 should also be handled like a MO_USE. */
5451 #define VAL_HOLDS_TRACK_EXPR(x) \
5452 (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
5453 /* Whether the location in the CONCAT should be handled like a MO_COPY
5455 #define VAL_EXPR_IS_COPIED(x) \
5456 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
5457 /* Whether the location in the CONCAT should be handled like a
5458 MO_CLOBBER as well. */
5459 #define VAL_EXPR_IS_CLOBBERED(x) \
5460 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
5462 /* All preserved VALUEs. */
5463 static vec
<rtx
> preserved_values
;
5465 /* Ensure VAL is preserved and remember it in a vector for vt_emit_notes. */
5468 preserve_value (cselib_val
*val
)
5470 cselib_preserve_value (val
);
5471 preserved_values
.safe_push (val
->val_rtx
);
5474 /* Helper function for MO_VAL_LOC handling. Return non-zero if
5475 any rtxes not suitable for CONST use not replaced by VALUEs
5479 non_suitable_const (rtx
*x
, void *data ATTRIBUTE_UNUSED
)
5484 switch (GET_CODE (*x
))
5495 return !MEM_READONLY_P (*x
);
5501 /* Add uses (register and memory references) LOC which will be tracked
5502 to VTI (bb)->mos. INSN is instruction which the LOC is part of. */
5505 add_uses (rtx
*ploc
, void *data
)
5508 enum machine_mode mode
= VOIDmode
;
5509 struct count_use_info
*cui
= (struct count_use_info
*)data
;
5510 enum micro_operation_type type
= use_type (loc
, cui
, &mode
);
5512 if (type
!= MO_CLOBBER
)
5514 basic_block bb
= cui
->bb
;
5518 mo
.u
.loc
= type
== MO_USE
? var_lowpart (mode
, loc
) : loc
;
5519 mo
.insn
= cui
->insn
;
5521 if (type
== MO_VAL_LOC
)
5524 rtx vloc
= PAT_VAR_LOCATION_LOC (oloc
);
5527 gcc_assert (cui
->sets
);
5530 && !REG_P (XEXP (vloc
, 0))
5531 && !MEM_P (XEXP (vloc
, 0)))
5534 enum machine_mode address_mode
= get_address_mode (mloc
);
5536 = cselib_lookup (XEXP (mloc
, 0), address_mode
, 0,
5539 if (val
&& !cselib_preserved_value_p (val
))
5540 preserve_value (val
);
5543 if (CONSTANT_P (vloc
)
5544 && (GET_CODE (vloc
) != CONST
5545 || for_each_rtx (&vloc
, non_suitable_const
, NULL
)))
5546 /* For constants don't look up any value. */;
5547 else if (!VAR_LOC_UNKNOWN_P (vloc
) && !unsuitable_loc (vloc
)
5548 && (val
= find_use_val (vloc
, GET_MODE (oloc
), cui
)))
5550 enum machine_mode mode2
;
5551 enum micro_operation_type type2
;
5553 bool resolvable
= REG_P (vloc
) || MEM_P (vloc
);
5556 nloc
= replace_expr_with_values (vloc
);
5560 oloc
= shallow_copy_rtx (oloc
);
5561 PAT_VAR_LOCATION_LOC (oloc
) = nloc
;
5564 oloc
= gen_rtx_CONCAT (mode
, val
->val_rtx
, oloc
);
5566 type2
= use_type (vloc
, 0, &mode2
);
5568 gcc_assert (type2
== MO_USE
|| type2
== MO_USE_NO_VAR
5569 || type2
== MO_CLOBBER
);
5571 if (type2
== MO_CLOBBER
5572 && !cselib_preserved_value_p (val
))
5574 VAL_NEEDS_RESOLUTION (oloc
) = resolvable
;
5575 preserve_value (val
);
5578 else if (!VAR_LOC_UNKNOWN_P (vloc
))
5580 oloc
= shallow_copy_rtx (oloc
);
5581 PAT_VAR_LOCATION_LOC (oloc
) = gen_rtx_UNKNOWN_VAR_LOC ();
5586 else if (type
== MO_VAL_USE
)
5588 enum machine_mode mode2
= VOIDmode
;
5589 enum micro_operation_type type2
;
5590 cselib_val
*val
= find_use_val (loc
, GET_MODE (loc
), cui
);
5591 rtx vloc
, oloc
= loc
, nloc
;
5593 gcc_assert (cui
->sets
);
5596 && !REG_P (XEXP (oloc
, 0))
5597 && !MEM_P (XEXP (oloc
, 0)))
5600 enum machine_mode address_mode
= get_address_mode (mloc
);
5602 = cselib_lookup (XEXP (mloc
, 0), address_mode
, 0,
5605 if (val
&& !cselib_preserved_value_p (val
))
5606 preserve_value (val
);
5609 type2
= use_type (loc
, 0, &mode2
);
5611 gcc_assert (type2
== MO_USE
|| type2
== MO_USE_NO_VAR
5612 || type2
== MO_CLOBBER
);
5614 if (type2
== MO_USE
)
5615 vloc
= var_lowpart (mode2
, loc
);
5619 /* The loc of a MO_VAL_USE may have two forms:
5621 (concat val src): val is at src, a value-based
5624 (concat (concat val use) src): same as above, with use as
5625 the MO_USE tracked value, if it differs from src.
5629 gcc_checking_assert (REG_P (loc
) || MEM_P (loc
));
5630 nloc
= replace_expr_with_values (loc
);
5635 oloc
= gen_rtx_CONCAT (mode2
, val
->val_rtx
, vloc
);
5637 oloc
= val
->val_rtx
;
5639 mo
.u
.loc
= gen_rtx_CONCAT (mode
, oloc
, nloc
);
5641 if (type2
== MO_USE
)
5642 VAL_HOLDS_TRACK_EXPR (mo
.u
.loc
) = 1;
5643 if (!cselib_preserved_value_p (val
))
5645 VAL_NEEDS_RESOLUTION (mo
.u
.loc
) = 1;
5646 preserve_value (val
);
5650 gcc_assert (type
== MO_USE
|| type
== MO_USE_NO_VAR
);
5652 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5653 log_op_type (mo
.u
.loc
, cui
->bb
, cui
->insn
, mo
.type
, dump_file
);
5654 VTI (bb
)->mos
.safe_push (mo
);
5660 /* Helper function for finding all uses of REG/MEM in X in insn INSN. */
5663 add_uses_1 (rtx
*x
, void *cui
)
5665 for_each_rtx (x
, add_uses
, cui
);
5668 /* This is the value used during expansion of locations. We want it
5669 to be unbounded, so that variables expanded deep in a recursion
5670 nest are fully evaluated, so that their values are cached
5671 correctly. We avoid recursion cycles through other means, and we
5672 don't unshare RTL, so excess complexity is not a problem. */
5673 #define EXPR_DEPTH (INT_MAX)
5674 /* We use this to keep too-complex expressions from being emitted as
5675 location notes, and then to debug information. Users can trade
5676 compile time for ridiculously complex expressions, although they're
5677 seldom useful, and they may often have to be discarded as not
5678 representable anyway. */
5679 #define EXPR_USE_DEPTH (PARAM_VALUE (PARAM_MAX_VARTRACK_EXPR_DEPTH))
5681 /* Attempt to reverse the EXPR operation in the debug info and record
5682 it in the cselib table. Say for reg1 = reg2 + 6 even when reg2 is
5683 no longer live we can express its value as VAL - 6. */
5686 reverse_op (rtx val
, const_rtx expr
, rtx insn
)
5690 struct elt_loc_list
*l
;
5694 if (GET_CODE (expr
) != SET
)
5697 if (!REG_P (SET_DEST (expr
)) || GET_MODE (val
) != GET_MODE (SET_DEST (expr
)))
5700 src
= SET_SRC (expr
);
5701 switch (GET_CODE (src
))
5708 if (!REG_P (XEXP (src
, 0)))
5713 if (!REG_P (XEXP (src
, 0)) && !MEM_P (XEXP (src
, 0)))
5720 if (!SCALAR_INT_MODE_P (GET_MODE (src
)) || XEXP (src
, 0) == cfa_base_rtx
)
5723 v
= cselib_lookup (XEXP (src
, 0), GET_MODE (XEXP (src
, 0)), 0, VOIDmode
);
5724 if (!v
|| !cselib_preserved_value_p (v
))
5727 /* Use canonical V to avoid creating multiple redundant expressions
5728 for different VALUES equivalent to V. */
5729 v
= canonical_cselib_val (v
);
5731 /* Adding a reverse op isn't useful if V already has an always valid
5732 location. Ignore ENTRY_VALUE, while it is always constant, we should
5733 prefer non-ENTRY_VALUE locations whenever possible. */
5734 for (l
= v
->locs
, count
= 0; l
; l
= l
->next
, count
++)
5735 if (CONSTANT_P (l
->loc
)
5736 && (GET_CODE (l
->loc
) != CONST
|| !references_value_p (l
->loc
, 0)))
5738 /* Avoid creating too large locs lists. */
5739 else if (count
== PARAM_VALUE (PARAM_MAX_VARTRACK_REVERSE_OP_SIZE
))
5742 switch (GET_CODE (src
))
5746 if (GET_MODE (v
->val_rtx
) != GET_MODE (val
))
5748 ret
= gen_rtx_fmt_e (GET_CODE (src
), GET_MODE (val
), val
);
5752 ret
= gen_lowpart_SUBREG (GET_MODE (v
->val_rtx
), val
);
5764 if (GET_MODE (v
->val_rtx
) != GET_MODE (val
))
5766 arg
= XEXP (src
, 1);
5767 if (!CONST_INT_P (arg
) && GET_CODE (arg
) != SYMBOL_REF
)
5769 arg
= cselib_expand_value_rtx (arg
, scratch_regs
, 5);
5770 if (arg
== NULL_RTX
)
5772 if (!CONST_INT_P (arg
) && GET_CODE (arg
) != SYMBOL_REF
)
5775 ret
= simplify_gen_binary (code
, GET_MODE (val
), val
, arg
);
5777 /* Ensure ret isn't VALUE itself (which can happen e.g. for
5778 (plus (reg1) (reg2)) when reg2 is known to be 0), as that
5779 breaks a lot of routines during var-tracking. */
5780 ret
= gen_rtx_fmt_ee (PLUS
, GET_MODE (val
), val
, const0_rtx
);
5786 cselib_add_permanent_equiv (v
, ret
, insn
);
5789 /* Add stores (register and memory references) LOC which will be tracked
5790 to VTI (bb)->mos. EXPR is the RTL expression containing the store.
5791 CUIP->insn is instruction which the LOC is part of. */
5794 add_stores (rtx loc
, const_rtx expr
, void *cuip
)
5796 enum machine_mode mode
= VOIDmode
, mode2
;
5797 struct count_use_info
*cui
= (struct count_use_info
*)cuip
;
5798 basic_block bb
= cui
->bb
;
5800 rtx oloc
= loc
, nloc
, src
= NULL
;
5801 enum micro_operation_type type
= use_type (loc
, cui
, &mode
);
5802 bool track_p
= false;
5804 bool resolve
, preserve
;
5806 if (type
== MO_CLOBBER
)
5813 gcc_assert (loc
!= cfa_base_rtx
);
5814 if ((GET_CODE (expr
) == CLOBBER
&& type
!= MO_VAL_SET
)
5815 || !(track_p
= use_type (loc
, NULL
, &mode2
) == MO_USE
)
5816 || GET_CODE (expr
) == CLOBBER
)
5818 mo
.type
= MO_CLOBBER
;
5820 if (GET_CODE (expr
) == SET
5821 && SET_DEST (expr
) == loc
5822 && !unsuitable_loc (SET_SRC (expr
))
5823 && find_use_val (loc
, mode
, cui
))
5825 gcc_checking_assert (type
== MO_VAL_SET
);
5826 mo
.u
.loc
= gen_rtx_SET (VOIDmode
, loc
, SET_SRC (expr
));
5831 if (GET_CODE (expr
) == SET
5832 && SET_DEST (expr
) == loc
5833 && GET_CODE (SET_SRC (expr
)) != ASM_OPERANDS
)
5834 src
= var_lowpart (mode2
, SET_SRC (expr
));
5835 loc
= var_lowpart (mode2
, loc
);
5844 rtx xexpr
= gen_rtx_SET (VOIDmode
, loc
, src
);
5845 if (same_variable_part_p (src
, REG_EXPR (loc
), REG_OFFSET (loc
)))
5847 /* If this is an instruction copying (part of) a parameter
5848 passed by invisible reference to its register location,
5849 pretend it's a SET so that the initial memory location
5850 is discarded, as the parameter register can be reused
5851 for other purposes and we do not track locations based
5852 on generic registers. */
5855 && TREE_CODE (REG_EXPR (loc
)) == PARM_DECL
5856 && DECL_MODE (REG_EXPR (loc
)) != BLKmode
5857 && MEM_P (DECL_INCOMING_RTL (REG_EXPR (loc
)))
5858 && XEXP (DECL_INCOMING_RTL (REG_EXPR (loc
)), 0)
5869 mo
.insn
= cui
->insn
;
5871 else if (MEM_P (loc
)
5872 && ((track_p
= use_type (loc
, NULL
, &mode2
) == MO_USE
)
5875 if (MEM_P (loc
) && type
== MO_VAL_SET
5876 && !REG_P (XEXP (loc
, 0))
5877 && !MEM_P (XEXP (loc
, 0)))
5880 enum machine_mode address_mode
= get_address_mode (mloc
);
5881 cselib_val
*val
= cselib_lookup (XEXP (mloc
, 0),
5885 if (val
&& !cselib_preserved_value_p (val
))
5886 preserve_value (val
);
5889 if (GET_CODE (expr
) == CLOBBER
|| !track_p
)
5891 mo
.type
= MO_CLOBBER
;
5892 mo
.u
.loc
= track_p
? var_lowpart (mode2
, loc
) : loc
;
5896 if (GET_CODE (expr
) == SET
5897 && SET_DEST (expr
) == loc
5898 && GET_CODE (SET_SRC (expr
)) != ASM_OPERANDS
)
5899 src
= var_lowpart (mode2
, SET_SRC (expr
));
5900 loc
= var_lowpart (mode2
, loc
);
5909 rtx xexpr
= gen_rtx_SET (VOIDmode
, loc
, src
);
5910 if (same_variable_part_p (SET_SRC (xexpr
),
5912 INT_MEM_OFFSET (loc
)))
5919 mo
.insn
= cui
->insn
;
5924 if (type
!= MO_VAL_SET
)
5925 goto log_and_return
;
5927 v
= find_use_val (oloc
, mode
, cui
);
5930 goto log_and_return
;
5932 resolve
= preserve
= !cselib_preserved_value_p (v
);
5934 if (loc
== stack_pointer_rtx
5935 && hard_frame_pointer_adjustment
!= -1
5937 cselib_set_value_sp_based (v
);
5939 nloc
= replace_expr_with_values (oloc
);
5943 if (GET_CODE (PATTERN (cui
->insn
)) == COND_EXEC
)
5945 cselib_val
*oval
= cselib_lookup (oloc
, GET_MODE (oloc
), 0, VOIDmode
);
5947 gcc_assert (oval
!= v
);
5948 gcc_assert (REG_P (oloc
) || MEM_P (oloc
));
5950 if (oval
&& !cselib_preserved_value_p (oval
))
5952 micro_operation moa
;
5954 preserve_value (oval
);
5956 moa
.type
= MO_VAL_USE
;
5957 moa
.u
.loc
= gen_rtx_CONCAT (mode
, oval
->val_rtx
, oloc
);
5958 VAL_NEEDS_RESOLUTION (moa
.u
.loc
) = 1;
5959 moa
.insn
= cui
->insn
;
5961 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5962 log_op_type (moa
.u
.loc
, cui
->bb
, cui
->insn
,
5963 moa
.type
, dump_file
);
5964 VTI (bb
)->mos
.safe_push (moa
);
5969 else if (resolve
&& GET_CODE (mo
.u
.loc
) == SET
)
5971 if (REG_P (SET_SRC (expr
)) || MEM_P (SET_SRC (expr
)))
5972 nloc
= replace_expr_with_values (SET_SRC (expr
));
5976 /* Avoid the mode mismatch between oexpr and expr. */
5977 if (!nloc
&& mode
!= mode2
)
5979 nloc
= SET_SRC (expr
);
5980 gcc_assert (oloc
== SET_DEST (expr
));
5983 if (nloc
&& nloc
!= SET_SRC (mo
.u
.loc
))
5984 oloc
= gen_rtx_SET (GET_MODE (mo
.u
.loc
), oloc
, nloc
);
5987 if (oloc
== SET_DEST (mo
.u
.loc
))
5988 /* No point in duplicating. */
5990 if (!REG_P (SET_SRC (mo
.u
.loc
)))
5996 if (GET_CODE (mo
.u
.loc
) == SET
5997 && oloc
== SET_DEST (mo
.u
.loc
))
5998 /* No point in duplicating. */
6004 loc
= gen_rtx_CONCAT (mode
, v
->val_rtx
, oloc
);
6006 if (mo
.u
.loc
!= oloc
)
6007 loc
= gen_rtx_CONCAT (GET_MODE (mo
.u
.loc
), loc
, mo
.u
.loc
);
6009 /* The loc of a MO_VAL_SET may have various forms:
6011 (concat val dst): dst now holds val
6013 (concat val (set dst src)): dst now holds val, copied from src
6015 (concat (concat val dstv) dst): dst now holds val; dstv is dst
6016 after replacing mems and non-top-level regs with values.
6018 (concat (concat val dstv) (set dst src)): dst now holds val,
6019 copied from src. dstv is a value-based representation of dst, if
6020 it differs from dst. If resolution is needed, src is a REG, and
6021 its mode is the same as that of val.
6023 (concat (concat val (set dstv srcv)) (set dst src)): src
6024 copied to dst, holding val. dstv and srcv are value-based
6025 representations of dst and src, respectively.
6029 if (GET_CODE (PATTERN (cui
->insn
)) != COND_EXEC
)
6030 reverse_op (v
->val_rtx
, expr
, cui
->insn
);
6035 VAL_HOLDS_TRACK_EXPR (loc
) = 1;
6038 VAL_NEEDS_RESOLUTION (loc
) = resolve
;
6041 if (mo
.type
== MO_CLOBBER
)
6042 VAL_EXPR_IS_CLOBBERED (loc
) = 1;
6043 if (mo
.type
== MO_COPY
)
6044 VAL_EXPR_IS_COPIED (loc
) = 1;
6046 mo
.type
= MO_VAL_SET
;
6049 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6050 log_op_type (mo
.u
.loc
, cui
->bb
, cui
->insn
, mo
.type
, dump_file
);
6051 VTI (bb
)->mos
.safe_push (mo
);
6054 /* Arguments to the call. */
6055 static rtx call_arguments
;
6057 /* Compute call_arguments. */
6060 prepare_call_arguments (basic_block bb
, rtx insn
)
6063 rtx prev
, cur
, next
;
6064 rtx this_arg
= NULL_RTX
;
6065 tree type
= NULL_TREE
, t
, fndecl
= NULL_TREE
;
6066 tree obj_type_ref
= NULL_TREE
;
6067 CUMULATIVE_ARGS args_so_far_v
;
6068 cumulative_args_t args_so_far
;
6070 memset (&args_so_far_v
, 0, sizeof (args_so_far_v
));
6071 args_so_far
= pack_cumulative_args (&args_so_far_v
);
6072 call
= get_call_rtx_from (insn
);
6075 if (GET_CODE (XEXP (XEXP (call
, 0), 0)) == SYMBOL_REF
)
6077 rtx symbol
= XEXP (XEXP (call
, 0), 0);
6078 if (SYMBOL_REF_DECL (symbol
))
6079 fndecl
= SYMBOL_REF_DECL (symbol
);
6081 if (fndecl
== NULL_TREE
)
6082 fndecl
= MEM_EXPR (XEXP (call
, 0));
6084 && TREE_CODE (TREE_TYPE (fndecl
)) != FUNCTION_TYPE
6085 && TREE_CODE (TREE_TYPE (fndecl
)) != METHOD_TYPE
)
6087 if (fndecl
&& TYPE_ARG_TYPES (TREE_TYPE (fndecl
)))
6088 type
= TREE_TYPE (fndecl
);
6089 if (fndecl
&& TREE_CODE (fndecl
) != FUNCTION_DECL
)
6091 if (TREE_CODE (fndecl
) == INDIRECT_REF
6092 && TREE_CODE (TREE_OPERAND (fndecl
, 0)) == OBJ_TYPE_REF
)
6093 obj_type_ref
= TREE_OPERAND (fndecl
, 0);
6098 for (t
= TYPE_ARG_TYPES (type
); t
&& t
!= void_list_node
;
6100 if (TREE_CODE (TREE_VALUE (t
)) == REFERENCE_TYPE
6101 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_VALUE (t
))))
6103 if ((t
== NULL
|| t
== void_list_node
) && obj_type_ref
== NULL_TREE
)
6107 int nargs ATTRIBUTE_UNUSED
= list_length (TYPE_ARG_TYPES (type
));
6108 link
= CALL_INSN_FUNCTION_USAGE (insn
);
6109 #ifndef PCC_STATIC_STRUCT_RETURN
6110 if (aggregate_value_p (TREE_TYPE (type
), type
)
6111 && targetm
.calls
.struct_value_rtx (type
, 0) == 0)
6113 tree struct_addr
= build_pointer_type (TREE_TYPE (type
));
6114 enum machine_mode mode
= TYPE_MODE (struct_addr
);
6116 INIT_CUMULATIVE_ARGS (args_so_far_v
, type
, NULL_RTX
, fndecl
,
6118 reg
= targetm
.calls
.function_arg (args_so_far
, mode
,
6120 targetm
.calls
.function_arg_advance (args_so_far
, mode
,
6122 if (reg
== NULL_RTX
)
6124 for (; link
; link
= XEXP (link
, 1))
6125 if (GET_CODE (XEXP (link
, 0)) == USE
6126 && MEM_P (XEXP (XEXP (link
, 0), 0)))
6128 link
= XEXP (link
, 1);
6135 INIT_CUMULATIVE_ARGS (args_so_far_v
, type
, NULL_RTX
, fndecl
,
6137 if (obj_type_ref
&& TYPE_ARG_TYPES (type
) != void_list_node
)
6139 enum machine_mode mode
;
6140 t
= TYPE_ARG_TYPES (type
);
6141 mode
= TYPE_MODE (TREE_VALUE (t
));
6142 this_arg
= targetm
.calls
.function_arg (args_so_far
, mode
,
6143 TREE_VALUE (t
), true);
6144 if (this_arg
&& !REG_P (this_arg
))
6145 this_arg
= NULL_RTX
;
6146 else if (this_arg
== NULL_RTX
)
6148 for (; link
; link
= XEXP (link
, 1))
6149 if (GET_CODE (XEXP (link
, 0)) == USE
6150 && MEM_P (XEXP (XEXP (link
, 0), 0)))
6152 this_arg
= XEXP (XEXP (link
, 0), 0);
6160 t
= type
? TYPE_ARG_TYPES (type
) : NULL_TREE
;
6162 for (link
= CALL_INSN_FUNCTION_USAGE (insn
); link
; link
= XEXP (link
, 1))
6163 if (GET_CODE (XEXP (link
, 0)) == USE
)
6165 rtx item
= NULL_RTX
;
6166 x
= XEXP (XEXP (link
, 0), 0);
6167 if (GET_MODE (link
) == VOIDmode
6168 || GET_MODE (link
) == BLKmode
6169 || (GET_MODE (link
) != GET_MODE (x
)
6170 && (GET_MODE_CLASS (GET_MODE (link
)) != MODE_INT
6171 || GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
)))
6172 /* Can't do anything for these, if the original type mode
6173 isn't known or can't be converted. */;
6176 cselib_val
*val
= cselib_lookup (x
, GET_MODE (x
), 0, VOIDmode
);
6177 if (val
&& cselib_preserved_value_p (val
))
6178 item
= val
->val_rtx
;
6179 else if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_INT
)
6181 enum machine_mode mode
= GET_MODE (x
);
6183 while ((mode
= GET_MODE_WIDER_MODE (mode
)) != VOIDmode
6184 && GET_MODE_BITSIZE (mode
) <= BITS_PER_WORD
)
6186 rtx reg
= simplify_subreg (mode
, x
, GET_MODE (x
), 0);
6188 if (reg
== NULL_RTX
|| !REG_P (reg
))
6190 val
= cselib_lookup (reg
, mode
, 0, VOIDmode
);
6191 if (val
&& cselib_preserved_value_p (val
))
6193 item
= val
->val_rtx
;
6204 if (!frame_pointer_needed
)
6206 struct adjust_mem_data amd
;
6207 amd
.mem_mode
= VOIDmode
;
6208 amd
.stack_adjust
= -VTI (bb
)->out
.stack_adjust
;
6209 amd
.side_effects
= NULL_RTX
;
6211 mem
= simplify_replace_fn_rtx (mem
, NULL_RTX
, adjust_mems
,
6213 gcc_assert (amd
.side_effects
== NULL_RTX
);
6215 val
= cselib_lookup (mem
, GET_MODE (mem
), 0, VOIDmode
);
6216 if (val
&& cselib_preserved_value_p (val
))
6217 item
= val
->val_rtx
;
6218 else if (GET_MODE_CLASS (GET_MODE (mem
)) != MODE_INT
)
6220 /* For non-integer stack argument see also if they weren't
6221 initialized by integers. */
6222 enum machine_mode imode
= int_mode_for_mode (GET_MODE (mem
));
6223 if (imode
!= GET_MODE (mem
) && imode
!= BLKmode
)
6225 val
= cselib_lookup (adjust_address_nv (mem
, imode
, 0),
6226 imode
, 0, VOIDmode
);
6227 if (val
&& cselib_preserved_value_p (val
))
6228 item
= lowpart_subreg (GET_MODE (x
), val
->val_rtx
,
6236 if (GET_MODE (item
) != GET_MODE (link
))
6237 item
= lowpart_subreg (GET_MODE (link
), item
, GET_MODE (item
));
6238 if (GET_MODE (x2
) != GET_MODE (link
))
6239 x2
= lowpart_subreg (GET_MODE (link
), x2
, GET_MODE (x2
));
6240 item
= gen_rtx_CONCAT (GET_MODE (link
), x2
, item
);
6242 = gen_rtx_EXPR_LIST (VOIDmode
, item
, call_arguments
);
6244 if (t
&& t
!= void_list_node
)
6246 tree argtype
= TREE_VALUE (t
);
6247 enum machine_mode mode
= TYPE_MODE (argtype
);
6249 if (pass_by_reference (&args_so_far_v
, mode
, argtype
, true))
6251 argtype
= build_pointer_type (argtype
);
6252 mode
= TYPE_MODE (argtype
);
6254 reg
= targetm
.calls
.function_arg (args_so_far
, mode
,
6256 if (TREE_CODE (argtype
) == REFERENCE_TYPE
6257 && INTEGRAL_TYPE_P (TREE_TYPE (argtype
))
6260 && GET_MODE (reg
) == mode
6261 && GET_MODE_CLASS (mode
) == MODE_INT
6263 && REGNO (x
) == REGNO (reg
)
6264 && GET_MODE (x
) == mode
6267 enum machine_mode indmode
6268 = TYPE_MODE (TREE_TYPE (argtype
));
6269 rtx mem
= gen_rtx_MEM (indmode
, x
);
6270 cselib_val
*val
= cselib_lookup (mem
, indmode
, 0, VOIDmode
);
6271 if (val
&& cselib_preserved_value_p (val
))
6273 item
= gen_rtx_CONCAT (indmode
, mem
, val
->val_rtx
);
6274 call_arguments
= gen_rtx_EXPR_LIST (VOIDmode
, item
,
6279 struct elt_loc_list
*l
;
6282 /* Try harder, when passing address of a constant
6283 pool integer it can be easily read back. */
6284 item
= XEXP (item
, 1);
6285 if (GET_CODE (item
) == SUBREG
)
6286 item
= SUBREG_REG (item
);
6287 gcc_assert (GET_CODE (item
) == VALUE
);
6288 val
= CSELIB_VAL_PTR (item
);
6289 for (l
= val
->locs
; l
; l
= l
->next
)
6290 if (GET_CODE (l
->loc
) == SYMBOL_REF
6291 && TREE_CONSTANT_POOL_ADDRESS_P (l
->loc
)
6292 && SYMBOL_REF_DECL (l
->loc
)
6293 && DECL_INITIAL (SYMBOL_REF_DECL (l
->loc
)))
6295 initial
= DECL_INITIAL (SYMBOL_REF_DECL (l
->loc
));
6296 if (tree_fits_shwi_p (initial
))
6298 item
= GEN_INT (tree_to_shwi (initial
));
6299 item
= gen_rtx_CONCAT (indmode
, mem
, item
);
6301 = gen_rtx_EXPR_LIST (VOIDmode
, item
,
6308 targetm
.calls
.function_arg_advance (args_so_far
, mode
,
6314 /* Add debug arguments. */
6316 && TREE_CODE (fndecl
) == FUNCTION_DECL
6317 && DECL_HAS_DEBUG_ARGS_P (fndecl
))
6319 vec
<tree
, va_gc
> **debug_args
= decl_debug_args_lookup (fndecl
);
6324 for (ix
= 0; vec_safe_iterate (*debug_args
, ix
, ¶m
); ix
+= 2)
6327 tree dtemp
= (**debug_args
)[ix
+ 1];
6328 enum machine_mode mode
= DECL_MODE (dtemp
);
6329 item
= gen_rtx_DEBUG_PARAMETER_REF (mode
, param
);
6330 item
= gen_rtx_CONCAT (mode
, item
, DECL_RTL_KNOWN_SET (dtemp
));
6331 call_arguments
= gen_rtx_EXPR_LIST (VOIDmode
, item
,
6337 /* Reverse call_arguments chain. */
6339 for (cur
= call_arguments
; cur
; cur
= next
)
6341 next
= XEXP (cur
, 1);
6342 XEXP (cur
, 1) = prev
;
6345 call_arguments
= prev
;
6347 x
= get_call_rtx_from (insn
);
6350 x
= XEXP (XEXP (x
, 0), 0);
6351 if (GET_CODE (x
) == SYMBOL_REF
)
6352 /* Don't record anything. */;
6353 else if (CONSTANT_P (x
))
6355 x
= gen_rtx_CONCAT (GET_MODE (x
) == VOIDmode
? Pmode
: GET_MODE (x
),
6358 = gen_rtx_EXPR_LIST (VOIDmode
, x
, call_arguments
);
6362 cselib_val
*val
= cselib_lookup (x
, GET_MODE (x
), 0, VOIDmode
);
6363 if (val
&& cselib_preserved_value_p (val
))
6365 x
= gen_rtx_CONCAT (GET_MODE (x
), pc_rtx
, val
->val_rtx
);
6367 = gen_rtx_EXPR_LIST (VOIDmode
, x
, call_arguments
);
6373 enum machine_mode mode
6374 = TYPE_MODE (TREE_TYPE (OBJ_TYPE_REF_EXPR (obj_type_ref
)));
6375 rtx clobbered
= gen_rtx_MEM (mode
, this_arg
);
6377 = tree_to_shwi (OBJ_TYPE_REF_TOKEN (obj_type_ref
));
6379 clobbered
= plus_constant (mode
, clobbered
,
6380 token
* GET_MODE_SIZE (mode
));
6381 clobbered
= gen_rtx_MEM (mode
, clobbered
);
6382 x
= gen_rtx_CONCAT (mode
, gen_rtx_CLOBBER (VOIDmode
, pc_rtx
), clobbered
);
6384 = gen_rtx_EXPR_LIST (VOIDmode
, x
, call_arguments
);
6388 /* Callback for cselib_record_sets_hook, that records as micro
6389 operations uses and stores in an insn after cselib_record_sets has
6390 analyzed the sets in an insn, but before it modifies the stored
6391 values in the internal tables, unless cselib_record_sets doesn't
6392 call it directly (perhaps because we're not doing cselib in the
6393 first place, in which case sets and n_sets will be 0). */
6396 add_with_sets (rtx insn
, struct cselib_set
*sets
, int n_sets
)
6398 basic_block bb
= BLOCK_FOR_INSN (insn
);
6400 struct count_use_info cui
;
6401 micro_operation
*mos
;
6403 cselib_hook_called
= true;
6408 cui
.n_sets
= n_sets
;
6410 n1
= VTI (bb
)->mos
.length ();
6411 cui
.store_p
= false;
6412 note_uses (&PATTERN (insn
), add_uses_1
, &cui
);
6413 n2
= VTI (bb
)->mos
.length () - 1;
6414 mos
= VTI (bb
)->mos
.address ();
6416 /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
6420 while (n1
< n2
&& mos
[n1
].type
== MO_USE
)
6422 while (n1
< n2
&& mos
[n2
].type
!= MO_USE
)
6434 n2
= VTI (bb
)->mos
.length () - 1;
6437 while (n1
< n2
&& mos
[n1
].type
!= MO_VAL_LOC
)
6439 while (n1
< n2
&& mos
[n2
].type
== MO_VAL_LOC
)
6457 mo
.u
.loc
= call_arguments
;
6458 call_arguments
= NULL_RTX
;
6460 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6461 log_op_type (PATTERN (insn
), bb
, insn
, mo
.type
, dump_file
);
6462 VTI (bb
)->mos
.safe_push (mo
);
6465 n1
= VTI (bb
)->mos
.length ();
6466 /* This will record NEXT_INSN (insn), such that we can
6467 insert notes before it without worrying about any
6468 notes that MO_USEs might emit after the insn. */
6470 note_stores (PATTERN (insn
), add_stores
, &cui
);
6471 n2
= VTI (bb
)->mos
.length () - 1;
6472 mos
= VTI (bb
)->mos
.address ();
6474 /* Order the MO_VAL_USEs first (note_stores does nothing
6475 on DEBUG_INSNs, so there are no MO_VAL_LOCs from this
6476 insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET. */
6479 while (n1
< n2
&& mos
[n1
].type
== MO_VAL_USE
)
6481 while (n1
< n2
&& mos
[n2
].type
!= MO_VAL_USE
)
6493 n2
= VTI (bb
)->mos
.length () - 1;
6496 while (n1
< n2
&& mos
[n1
].type
== MO_CLOBBER
)
6498 while (n1
< n2
&& mos
[n2
].type
!= MO_CLOBBER
)
6511 static enum var_init_status
6512 find_src_status (dataflow_set
*in
, rtx src
)
6514 tree decl
= NULL_TREE
;
6515 enum var_init_status status
= VAR_INIT_STATUS_UNINITIALIZED
;
6517 if (! flag_var_tracking_uninit
)
6518 status
= VAR_INIT_STATUS_INITIALIZED
;
6520 if (src
&& REG_P (src
))
6521 decl
= var_debug_decl (REG_EXPR (src
));
6522 else if (src
&& MEM_P (src
))
6523 decl
= var_debug_decl (MEM_EXPR (src
));
6526 status
= get_init_value (in
, src
, dv_from_decl (decl
));
6531 /* SRC is the source of an assignment. Use SET to try to find what
6532 was ultimately assigned to SRC. Return that value if known,
6533 otherwise return SRC itself. */
6536 find_src_set_src (dataflow_set
*set
, rtx src
)
6538 tree decl
= NULL_TREE
; /* The variable being copied around. */
6539 rtx set_src
= NULL_RTX
; /* The value for "decl" stored in "src". */
6541 location_chain nextp
;
6545 if (src
&& REG_P (src
))
6546 decl
= var_debug_decl (REG_EXPR (src
));
6547 else if (src
&& MEM_P (src
))
6548 decl
= var_debug_decl (MEM_EXPR (src
));
6552 decl_or_value dv
= dv_from_decl (decl
);
6554 var
= shared_hash_find (set
->vars
, dv
);
6558 for (i
= 0; i
< var
->n_var_parts
&& !found
; i
++)
6559 for (nextp
= var
->var_part
[i
].loc_chain
; nextp
&& !found
;
6560 nextp
= nextp
->next
)
6561 if (rtx_equal_p (nextp
->loc
, src
))
6563 set_src
= nextp
->set_src
;
6573 /* Compute the changes of variable locations in the basic block BB. */
6576 compute_bb_dataflow (basic_block bb
)
6579 micro_operation
*mo
;
6581 dataflow_set old_out
;
6582 dataflow_set
*in
= &VTI (bb
)->in
;
6583 dataflow_set
*out
= &VTI (bb
)->out
;
6585 dataflow_set_init (&old_out
);
6586 dataflow_set_copy (&old_out
, out
);
6587 dataflow_set_copy (out
, in
);
6589 if (MAY_HAVE_DEBUG_INSNS
)
6590 local_get_addr_cache
= pointer_map_create ();
6592 FOR_EACH_VEC_ELT (VTI (bb
)->mos
, i
, mo
)
6594 rtx insn
= mo
->insn
;
6599 dataflow_set_clear_at_call (out
);
6604 rtx loc
= mo
->u
.loc
;
6607 var_reg_set (out
, loc
, VAR_INIT_STATUS_UNINITIALIZED
, NULL
);
6608 else if (MEM_P (loc
))
6609 var_mem_set (out
, loc
, VAR_INIT_STATUS_UNINITIALIZED
, NULL
);
6615 rtx loc
= mo
->u
.loc
;
6619 if (GET_CODE (loc
) == CONCAT
)
6621 val
= XEXP (loc
, 0);
6622 vloc
= XEXP (loc
, 1);
6630 var
= PAT_VAR_LOCATION_DECL (vloc
);
6632 clobber_variable_part (out
, NULL_RTX
,
6633 dv_from_decl (var
), 0, NULL_RTX
);
6636 if (VAL_NEEDS_RESOLUTION (loc
))
6637 val_resolve (out
, val
, PAT_VAR_LOCATION_LOC (vloc
), insn
);
6638 set_variable_part (out
, val
, dv_from_decl (var
), 0,
6639 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
,
6642 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc
)))
6643 set_variable_part (out
, PAT_VAR_LOCATION_LOC (vloc
),
6644 dv_from_decl (var
), 0,
6645 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
,
6652 rtx loc
= mo
->u
.loc
;
6653 rtx val
, vloc
, uloc
;
6655 vloc
= uloc
= XEXP (loc
, 1);
6656 val
= XEXP (loc
, 0);
6658 if (GET_CODE (val
) == CONCAT
)
6660 uloc
= XEXP (val
, 1);
6661 val
= XEXP (val
, 0);
6664 if (VAL_NEEDS_RESOLUTION (loc
))
6665 val_resolve (out
, val
, vloc
, insn
);
6667 val_store (out
, val
, uloc
, insn
, false);
6669 if (VAL_HOLDS_TRACK_EXPR (loc
))
6671 if (GET_CODE (uloc
) == REG
)
6672 var_reg_set (out
, uloc
, VAR_INIT_STATUS_UNINITIALIZED
,
6674 else if (GET_CODE (uloc
) == MEM
)
6675 var_mem_set (out
, uloc
, VAR_INIT_STATUS_UNINITIALIZED
,
6683 rtx loc
= mo
->u
.loc
;
6684 rtx val
, vloc
, uloc
;
6688 uloc
= XEXP (vloc
, 1);
6689 val
= XEXP (vloc
, 0);
6692 if (GET_CODE (uloc
) == SET
)
6694 dstv
= SET_DEST (uloc
);
6695 srcv
= SET_SRC (uloc
);
6703 if (GET_CODE (val
) == CONCAT
)
6705 dstv
= vloc
= XEXP (val
, 1);
6706 val
= XEXP (val
, 0);
6709 if (GET_CODE (vloc
) == SET
)
6711 srcv
= SET_SRC (vloc
);
6713 gcc_assert (val
!= srcv
);
6714 gcc_assert (vloc
== uloc
|| VAL_NEEDS_RESOLUTION (loc
));
6716 dstv
= vloc
= SET_DEST (vloc
);
6718 if (VAL_NEEDS_RESOLUTION (loc
))
6719 val_resolve (out
, val
, srcv
, insn
);
6721 else if (VAL_NEEDS_RESOLUTION (loc
))
6723 gcc_assert (GET_CODE (uloc
) == SET
6724 && GET_CODE (SET_SRC (uloc
)) == REG
);
6725 val_resolve (out
, val
, SET_SRC (uloc
), insn
);
6728 if (VAL_HOLDS_TRACK_EXPR (loc
))
6730 if (VAL_EXPR_IS_CLOBBERED (loc
))
6733 var_reg_delete (out
, uloc
, true);
6734 else if (MEM_P (uloc
))
6736 gcc_assert (MEM_P (dstv
));
6737 gcc_assert (MEM_ATTRS (dstv
) == MEM_ATTRS (uloc
));
6738 var_mem_delete (out
, dstv
, true);
6743 bool copied_p
= VAL_EXPR_IS_COPIED (loc
);
6744 rtx src
= NULL
, dst
= uloc
;
6745 enum var_init_status status
= VAR_INIT_STATUS_INITIALIZED
;
6747 if (GET_CODE (uloc
) == SET
)
6749 src
= SET_SRC (uloc
);
6750 dst
= SET_DEST (uloc
);
6755 if (flag_var_tracking_uninit
)
6757 status
= find_src_status (in
, src
);
6759 if (status
== VAR_INIT_STATUS_UNKNOWN
)
6760 status
= find_src_status (out
, src
);
6763 src
= find_src_set_src (in
, src
);
6767 var_reg_delete_and_set (out
, dst
, !copied_p
,
6769 else if (MEM_P (dst
))
6771 gcc_assert (MEM_P (dstv
));
6772 gcc_assert (MEM_ATTRS (dstv
) == MEM_ATTRS (dst
));
6773 var_mem_delete_and_set (out
, dstv
, !copied_p
,
6778 else if (REG_P (uloc
))
6779 var_regno_delete (out
, REGNO (uloc
));
6780 else if (MEM_P (uloc
))
6782 gcc_checking_assert (GET_CODE (vloc
) == MEM
);
6783 gcc_checking_assert (dstv
== vloc
);
6785 clobber_overlapping_mems (out
, vloc
);
6788 val_store (out
, val
, dstv
, insn
, true);
6794 rtx loc
= mo
->u
.loc
;
6797 if (GET_CODE (loc
) == SET
)
6799 set_src
= SET_SRC (loc
);
6800 loc
= SET_DEST (loc
);
6804 var_reg_delete_and_set (out
, loc
, true, VAR_INIT_STATUS_INITIALIZED
,
6806 else if (MEM_P (loc
))
6807 var_mem_delete_and_set (out
, loc
, true, VAR_INIT_STATUS_INITIALIZED
,
6814 rtx loc
= mo
->u
.loc
;
6815 enum var_init_status src_status
;
6818 if (GET_CODE (loc
) == SET
)
6820 set_src
= SET_SRC (loc
);
6821 loc
= SET_DEST (loc
);
6824 if (! flag_var_tracking_uninit
)
6825 src_status
= VAR_INIT_STATUS_INITIALIZED
;
6828 src_status
= find_src_status (in
, set_src
);
6830 if (src_status
== VAR_INIT_STATUS_UNKNOWN
)
6831 src_status
= find_src_status (out
, set_src
);
6834 set_src
= find_src_set_src (in
, set_src
);
6837 var_reg_delete_and_set (out
, loc
, false, src_status
, set_src
);
6838 else if (MEM_P (loc
))
6839 var_mem_delete_and_set (out
, loc
, false, src_status
, set_src
);
6845 rtx loc
= mo
->u
.loc
;
6848 var_reg_delete (out
, loc
, false);
6849 else if (MEM_P (loc
))
6850 var_mem_delete (out
, loc
, false);
6856 rtx loc
= mo
->u
.loc
;
6859 var_reg_delete (out
, loc
, true);
6860 else if (MEM_P (loc
))
6861 var_mem_delete (out
, loc
, true);
6866 out
->stack_adjust
+= mo
->u
.adjust
;
6871 if (MAY_HAVE_DEBUG_INSNS
)
6873 pointer_map_destroy (local_get_addr_cache
);
6874 local_get_addr_cache
= NULL
;
6876 dataflow_set_equiv_regs (out
);
6877 shared_hash_htab (out
->vars
)
6878 .traverse
<dataflow_set
*, canonicalize_values_mark
> (out
);
6879 shared_hash_htab (out
->vars
)
6880 .traverse
<dataflow_set
*, canonicalize_values_star
> (out
);
6882 shared_hash_htab (out
->vars
)
6883 .traverse
<dataflow_set
*, canonicalize_loc_order_check
> (out
);
6886 changed
= dataflow_set_different (&old_out
, out
);
6887 dataflow_set_destroy (&old_out
);
6891 /* Find the locations of variables in the whole function. */
6894 vt_find_locations (void)
6896 fibheap_t worklist
, pending
, fibheap_swap
;
6897 sbitmap visited
, in_worklist
, in_pending
, sbitmap_swap
;
6904 int htabmax
= PARAM_VALUE (PARAM_MAX_VARTRACK_SIZE
);
6905 bool success
= true;
6907 timevar_push (TV_VAR_TRACKING_DATAFLOW
);
6908 /* Compute reverse completion order of depth first search of the CFG
6909 so that the data-flow runs faster. */
6910 rc_order
= XNEWVEC (int, n_basic_blocks_for_fn (cfun
) - NUM_FIXED_BLOCKS
);
6911 bb_order
= XNEWVEC (int, last_basic_block
);
6912 pre_and_rev_post_order_compute (NULL
, rc_order
, false);
6913 for (i
= 0; i
< n_basic_blocks_for_fn (cfun
) - NUM_FIXED_BLOCKS
; i
++)
6914 bb_order
[rc_order
[i
]] = i
;
6917 worklist
= fibheap_new ();
6918 pending
= fibheap_new ();
6919 visited
= sbitmap_alloc (last_basic_block
);
6920 in_worklist
= sbitmap_alloc (last_basic_block
);
6921 in_pending
= sbitmap_alloc (last_basic_block
);
6922 bitmap_clear (in_worklist
);
6925 fibheap_insert (pending
, bb_order
[bb
->index
], bb
);
6926 bitmap_ones (in_pending
);
6928 while (success
&& !fibheap_empty (pending
))
6930 fibheap_swap
= pending
;
6932 worklist
= fibheap_swap
;
6933 sbitmap_swap
= in_pending
;
6934 in_pending
= in_worklist
;
6935 in_worklist
= sbitmap_swap
;
6937 bitmap_clear (visited
);
6939 while (!fibheap_empty (worklist
))
6941 bb
= (basic_block
) fibheap_extract_min (worklist
);
6942 bitmap_clear_bit (in_worklist
, bb
->index
);
6943 gcc_assert (!bitmap_bit_p (visited
, bb
->index
));
6944 if (!bitmap_bit_p (visited
, bb
->index
))
6948 int oldinsz
, oldoutsz
;
6950 bitmap_set_bit (visited
, bb
->index
);
6952 if (VTI (bb
)->in
.vars
)
6955 -= shared_hash_htab (VTI (bb
)->in
.vars
).size ()
6956 + shared_hash_htab (VTI (bb
)->out
.vars
).size ();
6957 oldinsz
= shared_hash_htab (VTI (bb
)->in
.vars
).elements ();
6958 oldoutsz
= shared_hash_htab (VTI (bb
)->out
.vars
).elements ();
6961 oldinsz
= oldoutsz
= 0;
6963 if (MAY_HAVE_DEBUG_INSNS
)
6965 dataflow_set
*in
= &VTI (bb
)->in
, *first_out
= NULL
;
6966 bool first
= true, adjust
= false;
6968 /* Calculate the IN set as the intersection of
6969 predecessor OUT sets. */
6971 dataflow_set_clear (in
);
6972 dst_can_be_shared
= true;
6974 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
6975 if (!VTI (e
->src
)->flooded
)
6976 gcc_assert (bb_order
[bb
->index
]
6977 <= bb_order
[e
->src
->index
]);
6980 dataflow_set_copy (in
, &VTI (e
->src
)->out
);
6981 first_out
= &VTI (e
->src
)->out
;
6986 dataflow_set_merge (in
, &VTI (e
->src
)->out
);
6992 dataflow_post_merge_adjust (in
, &VTI (bb
)->permp
);
6994 /* Merge and merge_adjust should keep entries in
6996 shared_hash_htab (in
->vars
)
6997 .traverse
<dataflow_set
*,
6998 canonicalize_loc_order_check
> (in
);
7000 if (dst_can_be_shared
)
7002 shared_hash_destroy (in
->vars
);
7003 in
->vars
= shared_hash_copy (first_out
->vars
);
7007 VTI (bb
)->flooded
= true;
7011 /* Calculate the IN set as union of predecessor OUT sets. */
7012 dataflow_set_clear (&VTI (bb
)->in
);
7013 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
7014 dataflow_set_union (&VTI (bb
)->in
, &VTI (e
->src
)->out
);
7017 changed
= compute_bb_dataflow (bb
);
7018 htabsz
+= shared_hash_htab (VTI (bb
)->in
.vars
).size ()
7019 + shared_hash_htab (VTI (bb
)->out
.vars
).size ();
7021 if (htabmax
&& htabsz
> htabmax
)
7023 if (MAY_HAVE_DEBUG_INSNS
)
7024 inform (DECL_SOURCE_LOCATION (cfun
->decl
),
7025 "variable tracking size limit exceeded with "
7026 "-fvar-tracking-assignments, retrying without");
7028 inform (DECL_SOURCE_LOCATION (cfun
->decl
),
7029 "variable tracking size limit exceeded");
7036 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7038 if (e
->dest
== EXIT_BLOCK_PTR
)
7041 if (bitmap_bit_p (visited
, e
->dest
->index
))
7043 if (!bitmap_bit_p (in_pending
, e
->dest
->index
))
7045 /* Send E->DEST to next round. */
7046 bitmap_set_bit (in_pending
, e
->dest
->index
);
7047 fibheap_insert (pending
,
7048 bb_order
[e
->dest
->index
],
7052 else if (!bitmap_bit_p (in_worklist
, e
->dest
->index
))
7054 /* Add E->DEST to current round. */
7055 bitmap_set_bit (in_worklist
, e
->dest
->index
);
7056 fibheap_insert (worklist
, bb_order
[e
->dest
->index
],
7064 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
7066 (int)shared_hash_htab (VTI (bb
)->in
.vars
).size (),
7068 (int)shared_hash_htab (VTI (bb
)->out
.vars
).size (),
7070 (int)worklist
->nodes
, (int)pending
->nodes
, htabsz
);
7072 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7074 fprintf (dump_file
, "BB %i IN:\n", bb
->index
);
7075 dump_dataflow_set (&VTI (bb
)->in
);
7076 fprintf (dump_file
, "BB %i OUT:\n", bb
->index
);
7077 dump_dataflow_set (&VTI (bb
)->out
);
7083 if (success
&& MAY_HAVE_DEBUG_INSNS
)
7085 gcc_assert (VTI (bb
)->flooded
);
7088 fibheap_delete (worklist
);
7089 fibheap_delete (pending
);
7090 sbitmap_free (visited
);
7091 sbitmap_free (in_worklist
);
7092 sbitmap_free (in_pending
);
7094 timevar_pop (TV_VAR_TRACKING_DATAFLOW
);
7098 /* Print the content of the LIST to dump file. */
7101 dump_attrs_list (attrs list
)
7103 for (; list
; list
= list
->next
)
7105 if (dv_is_decl_p (list
->dv
))
7106 print_mem_expr (dump_file
, dv_as_decl (list
->dv
));
7108 print_rtl_single (dump_file
, dv_as_value (list
->dv
));
7109 fprintf (dump_file
, "+" HOST_WIDE_INT_PRINT_DEC
, list
->offset
);
7111 fprintf (dump_file
, "\n");
7114 /* Print the information about variable *SLOT to dump file. */
7117 dump_var_tracking_slot (variable_def
**slot
, void *data ATTRIBUTE_UNUSED
)
7119 variable var
= *slot
;
7123 /* Continue traversing the hash table. */
7127 /* Print the information about variable VAR to dump file. */
7130 dump_var (variable var
)
7133 location_chain node
;
7135 if (dv_is_decl_p (var
->dv
))
7137 const_tree decl
= dv_as_decl (var
->dv
);
7139 if (DECL_NAME (decl
))
7141 fprintf (dump_file
, " name: %s",
7142 IDENTIFIER_POINTER (DECL_NAME (decl
)));
7143 if (dump_flags
& TDF_UID
)
7144 fprintf (dump_file
, "D.%u", DECL_UID (decl
));
7146 else if (TREE_CODE (decl
) == DEBUG_EXPR_DECL
)
7147 fprintf (dump_file
, " name: D#%u", DEBUG_TEMP_UID (decl
));
7149 fprintf (dump_file
, " name: D.%u", DECL_UID (decl
));
7150 fprintf (dump_file
, "\n");
7154 fputc (' ', dump_file
);
7155 print_rtl_single (dump_file
, dv_as_value (var
->dv
));
7158 for (i
= 0; i
< var
->n_var_parts
; i
++)
7160 fprintf (dump_file
, " offset %ld\n",
7161 (long)(var
->onepart
? 0 : VAR_PART_OFFSET (var
, i
)));
7162 for (node
= var
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
7164 fprintf (dump_file
, " ");
7165 if (node
->init
== VAR_INIT_STATUS_UNINITIALIZED
)
7166 fprintf (dump_file
, "[uninit]");
7167 print_rtl_single (dump_file
, node
->loc
);
7172 /* Print the information about variables from hash table VARS to dump file. */
7175 dump_vars (variable_table_type vars
)
7177 if (vars
.elements () > 0)
7179 fprintf (dump_file
, "Variables:\n");
7180 vars
.traverse
<void *, dump_var_tracking_slot
> (NULL
);
7184 /* Print the dataflow set SET to dump file. */
7187 dump_dataflow_set (dataflow_set
*set
)
7191 fprintf (dump_file
, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC
"\n",
7193 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
7197 fprintf (dump_file
, "Reg %d:", i
);
7198 dump_attrs_list (set
->regs
[i
]);
7201 dump_vars (shared_hash_htab (set
->vars
));
7202 fprintf (dump_file
, "\n");
7205 /* Print the IN and OUT sets for each basic block to dump file. */
7208 dump_dataflow_sets (void)
7214 fprintf (dump_file
, "\nBasic block %d:\n", bb
->index
);
7215 fprintf (dump_file
, "IN:\n");
7216 dump_dataflow_set (&VTI (bb
)->in
);
7217 fprintf (dump_file
, "OUT:\n");
7218 dump_dataflow_set (&VTI (bb
)->out
);
7222 /* Return the variable for DV in dropped_values, inserting one if
7223 requested with INSERT. */
7225 static inline variable
7226 variable_from_dropped (decl_or_value dv
, enum insert_option insert
)
7228 variable_def
**slot
;
7230 onepart_enum_t onepart
;
7232 slot
= dropped_values
.find_slot_with_hash (dv
, dv_htab_hash (dv
), insert
);
7240 gcc_checking_assert (insert
== INSERT
);
7242 onepart
= dv_onepart_p (dv
);
7244 gcc_checking_assert (onepart
== ONEPART_VALUE
|| onepart
== ONEPART_DEXPR
);
7246 empty_var
= (variable
) pool_alloc (onepart_pool (onepart
));
7248 empty_var
->refcount
= 1;
7249 empty_var
->n_var_parts
= 0;
7250 empty_var
->onepart
= onepart
;
7251 empty_var
->in_changed_variables
= false;
7252 empty_var
->var_part
[0].loc_chain
= NULL
;
7253 empty_var
->var_part
[0].cur_loc
= NULL
;
7254 VAR_LOC_1PAUX (empty_var
) = NULL
;
7255 set_dv_changed (dv
, true);
7262 /* Recover the one-part aux from dropped_values. */
7264 static struct onepart_aux
*
7265 recover_dropped_1paux (variable var
)
7269 gcc_checking_assert (var
->onepart
);
7271 if (VAR_LOC_1PAUX (var
))
7272 return VAR_LOC_1PAUX (var
);
7274 if (var
->onepart
== ONEPART_VDECL
)
7277 dvar
= variable_from_dropped (var
->dv
, NO_INSERT
);
7282 VAR_LOC_1PAUX (var
) = VAR_LOC_1PAUX (dvar
);
7283 VAR_LOC_1PAUX (dvar
) = NULL
;
7285 return VAR_LOC_1PAUX (var
);
7288 /* Add variable VAR to the hash table of changed variables and
7289 if it has no locations delete it from SET's hash table. */
7292 variable_was_changed (variable var
, dataflow_set
*set
)
7294 hashval_t hash
= dv_htab_hash (var
->dv
);
7298 variable_def
**slot
;
7300 /* Remember this decl or VALUE has been added to changed_variables. */
7301 set_dv_changed (var
->dv
, true);
7303 slot
= changed_variables
.find_slot_with_hash (var
->dv
, hash
, INSERT
);
7307 variable old_var
= *slot
;
7308 gcc_assert (old_var
->in_changed_variables
);
7309 old_var
->in_changed_variables
= false;
7310 if (var
!= old_var
&& var
->onepart
)
7312 /* Restore the auxiliary info from an empty variable
7313 previously created for changed_variables, so it is
7315 gcc_checking_assert (!VAR_LOC_1PAUX (var
));
7316 VAR_LOC_1PAUX (var
) = VAR_LOC_1PAUX (old_var
);
7317 VAR_LOC_1PAUX (old_var
) = NULL
;
7319 variable_htab_free (*slot
);
7322 if (set
&& var
->n_var_parts
== 0)
7324 onepart_enum_t onepart
= var
->onepart
;
7325 variable empty_var
= NULL
;
7326 variable_def
**dslot
= NULL
;
7328 if (onepart
== ONEPART_VALUE
|| onepart
== ONEPART_DEXPR
)
7330 dslot
= dropped_values
.find_slot_with_hash (var
->dv
,
7331 dv_htab_hash (var
->dv
),
7337 gcc_checking_assert (!empty_var
->in_changed_variables
);
7338 if (!VAR_LOC_1PAUX (var
))
7340 VAR_LOC_1PAUX (var
) = VAR_LOC_1PAUX (empty_var
);
7341 VAR_LOC_1PAUX (empty_var
) = NULL
;
7344 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var
));
7350 empty_var
= (variable
) pool_alloc (onepart_pool (onepart
));
7351 empty_var
->dv
= var
->dv
;
7352 empty_var
->refcount
= 1;
7353 empty_var
->n_var_parts
= 0;
7354 empty_var
->onepart
= onepart
;
7357 empty_var
->refcount
++;
7362 empty_var
->refcount
++;
7363 empty_var
->in_changed_variables
= true;
7367 empty_var
->var_part
[0].loc_chain
= NULL
;
7368 empty_var
->var_part
[0].cur_loc
= NULL
;
7369 VAR_LOC_1PAUX (empty_var
) = VAR_LOC_1PAUX (var
);
7370 VAR_LOC_1PAUX (var
) = NULL
;
7376 if (var
->onepart
&& !VAR_LOC_1PAUX (var
))
7377 recover_dropped_1paux (var
);
7379 var
->in_changed_variables
= true;
7386 if (var
->n_var_parts
== 0)
7388 variable_def
**slot
;
7391 slot
= shared_hash_find_slot_noinsert (set
->vars
, var
->dv
);
7394 if (shared_hash_shared (set
->vars
))
7395 slot
= shared_hash_find_slot_unshare (&set
->vars
, var
->dv
,
7397 shared_hash_htab (set
->vars
).clear_slot (slot
);
7403 /* Look for the index in VAR->var_part corresponding to OFFSET.
7404 Return -1 if not found. If INSERTION_POINT is non-NULL, the
7405 referenced int will be set to the index that the part has or should
7406 have, if it should be inserted. */
7409 find_variable_location_part (variable var
, HOST_WIDE_INT offset
,
7410 int *insertion_point
)
7419 if (insertion_point
)
7420 *insertion_point
= 0;
7422 return var
->n_var_parts
- 1;
7425 /* Find the location part. */
7427 high
= var
->n_var_parts
;
7430 pos
= (low
+ high
) / 2;
7431 if (VAR_PART_OFFSET (var
, pos
) < offset
)
7438 if (insertion_point
)
7439 *insertion_point
= pos
;
7441 if (pos
< var
->n_var_parts
&& VAR_PART_OFFSET (var
, pos
) == offset
)
7447 static variable_def
**
7448 set_slot_part (dataflow_set
*set
, rtx loc
, variable_def
**slot
,
7449 decl_or_value dv
, HOST_WIDE_INT offset
,
7450 enum var_init_status initialized
, rtx set_src
)
7453 location_chain node
, next
;
7454 location_chain
*nextp
;
7456 onepart_enum_t onepart
;
7461 onepart
= var
->onepart
;
7463 onepart
= dv_onepart_p (dv
);
7465 gcc_checking_assert (offset
== 0 || !onepart
);
7466 gcc_checking_assert (loc
!= dv_as_opaque (dv
));
7468 if (! flag_var_tracking_uninit
)
7469 initialized
= VAR_INIT_STATUS_INITIALIZED
;
7473 /* Create new variable information. */
7474 var
= (variable
) pool_alloc (onepart_pool (onepart
));
7477 var
->n_var_parts
= 1;
7478 var
->onepart
= onepart
;
7479 var
->in_changed_variables
= false;
7481 VAR_LOC_1PAUX (var
) = NULL
;
7483 VAR_PART_OFFSET (var
, 0) = offset
;
7484 var
->var_part
[0].loc_chain
= NULL
;
7485 var
->var_part
[0].cur_loc
= NULL
;
7488 nextp
= &var
->var_part
[0].loc_chain
;
7494 gcc_assert (dv_as_opaque (var
->dv
) == dv_as_opaque (dv
));
7498 if (GET_CODE (loc
) == VALUE
)
7500 for (nextp
= &var
->var_part
[0].loc_chain
; (node
= *nextp
);
7501 nextp
= &node
->next
)
7502 if (GET_CODE (node
->loc
) == VALUE
)
7504 if (node
->loc
== loc
)
7509 if (canon_value_cmp (node
->loc
, loc
))
7517 else if (REG_P (node
->loc
) || MEM_P (node
->loc
))
7525 else if (REG_P (loc
))
7527 for (nextp
= &var
->var_part
[0].loc_chain
; (node
= *nextp
);
7528 nextp
= &node
->next
)
7529 if (REG_P (node
->loc
))
7531 if (REGNO (node
->loc
) < REGNO (loc
))
7535 if (REGNO (node
->loc
) == REGNO (loc
))
7548 else if (MEM_P (loc
))
7550 for (nextp
= &var
->var_part
[0].loc_chain
; (node
= *nextp
);
7551 nextp
= &node
->next
)
7552 if (REG_P (node
->loc
))
7554 else if (MEM_P (node
->loc
))
7556 if ((r
= loc_cmp (XEXP (node
->loc
, 0), XEXP (loc
, 0))) >= 0)
7568 for (nextp
= &var
->var_part
[0].loc_chain
; (node
= *nextp
);
7569 nextp
= &node
->next
)
7570 if ((r
= loc_cmp (node
->loc
, loc
)) >= 0)
7578 if (shared_var_p (var
, set
->vars
))
7580 slot
= unshare_variable (set
, slot
, var
, initialized
);
7582 for (nextp
= &var
->var_part
[0].loc_chain
; c
;
7583 nextp
= &(*nextp
)->next
)
7585 gcc_assert ((!node
&& !*nextp
) || node
->loc
== (*nextp
)->loc
);
7592 gcc_assert (dv_as_decl (var
->dv
) == dv_as_decl (dv
));
7594 pos
= find_variable_location_part (var
, offset
, &inspos
);
7598 node
= var
->var_part
[pos
].loc_chain
;
7601 && ((REG_P (node
->loc
) && REG_P (loc
)
7602 && REGNO (node
->loc
) == REGNO (loc
))
7603 || rtx_equal_p (node
->loc
, loc
)))
7605 /* LOC is in the beginning of the chain so we have nothing
7607 if (node
->init
< initialized
)
7608 node
->init
= initialized
;
7609 if (set_src
!= NULL
)
7610 node
->set_src
= set_src
;
7616 /* We have to make a copy of a shared variable. */
7617 if (shared_var_p (var
, set
->vars
))
7619 slot
= unshare_variable (set
, slot
, var
, initialized
);
7626 /* We have not found the location part, new one will be created. */
7628 /* We have to make a copy of the shared variable. */
7629 if (shared_var_p (var
, set
->vars
))
7631 slot
= unshare_variable (set
, slot
, var
, initialized
);
7635 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
7636 thus there are at most MAX_VAR_PARTS different offsets. */
7637 gcc_assert (var
->n_var_parts
< MAX_VAR_PARTS
7638 && (!var
->n_var_parts
|| !onepart
));
7640 /* We have to move the elements of array starting at index
7641 inspos to the next position. */
7642 for (pos
= var
->n_var_parts
; pos
> inspos
; pos
--)
7643 var
->var_part
[pos
] = var
->var_part
[pos
- 1];
7646 gcc_checking_assert (!onepart
);
7647 VAR_PART_OFFSET (var
, pos
) = offset
;
7648 var
->var_part
[pos
].loc_chain
= NULL
;
7649 var
->var_part
[pos
].cur_loc
= NULL
;
7652 /* Delete the location from the list. */
7653 nextp
= &var
->var_part
[pos
].loc_chain
;
7654 for (node
= var
->var_part
[pos
].loc_chain
; node
; node
= next
)
7657 if ((REG_P (node
->loc
) && REG_P (loc
)
7658 && REGNO (node
->loc
) == REGNO (loc
))
7659 || rtx_equal_p (node
->loc
, loc
))
7661 /* Save these values, to assign to the new node, before
7662 deleting this one. */
7663 if (node
->init
> initialized
)
7664 initialized
= node
->init
;
7665 if (node
->set_src
!= NULL
&& set_src
== NULL
)
7666 set_src
= node
->set_src
;
7667 if (var
->var_part
[pos
].cur_loc
== node
->loc
)
7668 var
->var_part
[pos
].cur_loc
= NULL
;
7669 pool_free (loc_chain_pool
, node
);
7674 nextp
= &node
->next
;
7677 nextp
= &var
->var_part
[pos
].loc_chain
;
7680 /* Add the location to the beginning. */
7681 node
= (location_chain
) pool_alloc (loc_chain_pool
);
7683 node
->init
= initialized
;
7684 node
->set_src
= set_src
;
7685 node
->next
= *nextp
;
7688 /* If no location was emitted do so. */
7689 if (var
->var_part
[pos
].cur_loc
== NULL
)
7690 variable_was_changed (var
, set
);
7695 /* Set the part of variable's location in the dataflow set SET. The
7696 variable part is specified by variable's declaration in DV and
7697 offset OFFSET and the part's location by LOC. IOPT should be
7698 NO_INSERT if the variable is known to be in SET already and the
7699 variable hash table must not be resized, and INSERT otherwise. */
7702 set_variable_part (dataflow_set
*set
, rtx loc
,
7703 decl_or_value dv
, HOST_WIDE_INT offset
,
7704 enum var_init_status initialized
, rtx set_src
,
7705 enum insert_option iopt
)
7707 variable_def
**slot
;
7709 if (iopt
== NO_INSERT
)
7710 slot
= shared_hash_find_slot_noinsert (set
->vars
, dv
);
7713 slot
= shared_hash_find_slot (set
->vars
, dv
);
7715 slot
= shared_hash_find_slot_unshare (&set
->vars
, dv
, iopt
);
7717 set_slot_part (set
, loc
, slot
, dv
, offset
, initialized
, set_src
);
7720 /* Remove all recorded register locations for the given variable part
7721 from dataflow set SET, except for those that are identical to loc.
7722 The variable part is specified by variable's declaration or value
7723 DV and offset OFFSET. */
7725 static variable_def
**
7726 clobber_slot_part (dataflow_set
*set
, rtx loc
, variable_def
**slot
,
7727 HOST_WIDE_INT offset
, rtx set_src
)
7729 variable var
= *slot
;
7730 int pos
= find_variable_location_part (var
, offset
, NULL
);
7734 location_chain node
, next
;
7736 /* Remove the register locations from the dataflow set. */
7737 next
= var
->var_part
[pos
].loc_chain
;
7738 for (node
= next
; node
; node
= next
)
7741 if (node
->loc
!= loc
7742 && (!flag_var_tracking_uninit
7745 || !rtx_equal_p (set_src
, node
->set_src
)))
7747 if (REG_P (node
->loc
))
7752 /* Remove the variable part from the register's
7753 list, but preserve any other variable parts
7754 that might be regarded as live in that same
7756 anextp
= &set
->regs
[REGNO (node
->loc
)];
7757 for (anode
= *anextp
; anode
; anode
= anext
)
7759 anext
= anode
->next
;
7760 if (dv_as_opaque (anode
->dv
) == dv_as_opaque (var
->dv
)
7761 && anode
->offset
== offset
)
7763 pool_free (attrs_pool
, anode
);
7767 anextp
= &anode
->next
;
7771 slot
= delete_slot_part (set
, node
->loc
, slot
, offset
);
7779 /* Remove all recorded register locations for the given variable part
7780 from dataflow set SET, except for those that are identical to loc.
7781 The variable part is specified by variable's declaration or value
7782 DV and offset OFFSET. */
7785 clobber_variable_part (dataflow_set
*set
, rtx loc
, decl_or_value dv
,
7786 HOST_WIDE_INT offset
, rtx set_src
)
7788 variable_def
**slot
;
7790 if (!dv_as_opaque (dv
)
7791 || (!dv_is_value_p (dv
) && ! DECL_P (dv_as_decl (dv
))))
7794 slot
= shared_hash_find_slot_noinsert (set
->vars
, dv
);
7798 clobber_slot_part (set
, loc
, slot
, offset
, set_src
);
7801 /* Delete the part of variable's location from dataflow set SET. The
7802 variable part is specified by its SET->vars slot SLOT and offset
7803 OFFSET and the part's location by LOC. */
7805 static variable_def
**
7806 delete_slot_part (dataflow_set
*set
, rtx loc
, variable_def
**slot
,
7807 HOST_WIDE_INT offset
)
7809 variable var
= *slot
;
7810 int pos
= find_variable_location_part (var
, offset
, NULL
);
7814 location_chain node
, next
;
7815 location_chain
*nextp
;
7819 if (shared_var_p (var
, set
->vars
))
7821 /* If the variable contains the location part we have to
7822 make a copy of the variable. */
7823 for (node
= var
->var_part
[pos
].loc_chain
; node
;
7826 if ((REG_P (node
->loc
) && REG_P (loc
)
7827 && REGNO (node
->loc
) == REGNO (loc
))
7828 || rtx_equal_p (node
->loc
, loc
))
7830 slot
= unshare_variable (set
, slot
, var
,
7831 VAR_INIT_STATUS_UNKNOWN
);
7838 if (pos
== 0 && var
->onepart
&& VAR_LOC_1PAUX (var
))
7839 cur_loc
= VAR_LOC_FROM (var
);
7841 cur_loc
= var
->var_part
[pos
].cur_loc
;
7843 /* Delete the location part. */
7845 nextp
= &var
->var_part
[pos
].loc_chain
;
7846 for (node
= *nextp
; node
; node
= next
)
7849 if ((REG_P (node
->loc
) && REG_P (loc
)
7850 && REGNO (node
->loc
) == REGNO (loc
))
7851 || rtx_equal_p (node
->loc
, loc
))
7853 /* If we have deleted the location which was last emitted
7854 we have to emit new location so add the variable to set
7855 of changed variables. */
7856 if (cur_loc
== node
->loc
)
7859 var
->var_part
[pos
].cur_loc
= NULL
;
7860 if (pos
== 0 && var
->onepart
&& VAR_LOC_1PAUX (var
))
7861 VAR_LOC_FROM (var
) = NULL
;
7863 pool_free (loc_chain_pool
, node
);
7868 nextp
= &node
->next
;
7871 if (var
->var_part
[pos
].loc_chain
== NULL
)
7875 while (pos
< var
->n_var_parts
)
7877 var
->var_part
[pos
] = var
->var_part
[pos
+ 1];
7882 variable_was_changed (var
, set
);
7888 /* Delete the part of variable's location from dataflow set SET. The
7889 variable part is specified by variable's declaration or value DV
7890 and offset OFFSET and the part's location by LOC. */
7893 delete_variable_part (dataflow_set
*set
, rtx loc
, decl_or_value dv
,
7894 HOST_WIDE_INT offset
)
7896 variable_def
**slot
= shared_hash_find_slot_noinsert (set
->vars
, dv
);
7900 delete_slot_part (set
, loc
, slot
, offset
);
7904 /* Structure for passing some other parameters to function
7905 vt_expand_loc_callback. */
7906 struct expand_loc_callback_data
7908 /* The variables and values active at this point. */
7909 variable_table_type vars
;
7911 /* Stack of values and debug_exprs under expansion, and their
7913 stack_vec
<rtx
, 4> expanding
;
7915 /* Stack of values and debug_exprs whose expansion hit recursion
7916 cycles. They will have VALUE_RECURSED_INTO marked when added to
7917 this list. This flag will be cleared if any of its dependencies
7918 resolves to a valid location. So, if the flag remains set at the
7919 end of the search, we know no valid location for this one can
7921 stack_vec
<rtx
, 4> pending
;
7923 /* The maximum depth among the sub-expressions under expansion.
7924 Zero indicates no expansion so far. */
7928 /* Allocate the one-part auxiliary data structure for VAR, with enough
7929 room for COUNT dependencies. */
7932 loc_exp_dep_alloc (variable var
, int count
)
7936 gcc_checking_assert (var
->onepart
);
7938 /* We can be called with COUNT == 0 to allocate the data structure
7939 without any dependencies, e.g. for the backlinks only. However,
7940 if we are specifying a COUNT, then the dependency list must have
7941 been emptied before. It would be possible to adjust pointers or
7942 force it empty here, but this is better done at an earlier point
7943 in the algorithm, so we instead leave an assertion to catch
7945 gcc_checking_assert (!count
7946 || VAR_LOC_DEP_VEC (var
) == NULL
7947 || VAR_LOC_DEP_VEC (var
)->is_empty ());
7949 if (VAR_LOC_1PAUX (var
) && VAR_LOC_DEP_VEC (var
)->space (count
))
7952 allocsize
= offsetof (struct onepart_aux
, deps
)
7953 + vec
<loc_exp_dep
, va_heap
, vl_embed
>::embedded_size (count
);
7955 if (VAR_LOC_1PAUX (var
))
7957 VAR_LOC_1PAUX (var
) = XRESIZEVAR (struct onepart_aux
,
7958 VAR_LOC_1PAUX (var
), allocsize
);
7959 /* If the reallocation moves the onepaux structure, the
7960 back-pointer to BACKLINKS in the first list member will still
7961 point to its old location. Adjust it. */
7962 if (VAR_LOC_DEP_LST (var
))
7963 VAR_LOC_DEP_LST (var
)->pprev
= VAR_LOC_DEP_LSTP (var
);
7967 VAR_LOC_1PAUX (var
) = XNEWVAR (struct onepart_aux
, allocsize
);
7968 *VAR_LOC_DEP_LSTP (var
) = NULL
;
7969 VAR_LOC_FROM (var
) = NULL
;
7970 VAR_LOC_DEPTH (var
).complexity
= 0;
7971 VAR_LOC_DEPTH (var
).entryvals
= 0;
7973 VAR_LOC_DEP_VEC (var
)->embedded_init (count
);
7976 /* Remove all entries from the vector of active dependencies of VAR,
7977 removing them from the back-links lists too. */
7980 loc_exp_dep_clear (variable var
)
7982 while (VAR_LOC_DEP_VEC (var
) && !VAR_LOC_DEP_VEC (var
)->is_empty ())
7984 loc_exp_dep
*led
= &VAR_LOC_DEP_VEC (var
)->last ();
7986 led
->next
->pprev
= led
->pprev
;
7988 *led
->pprev
= led
->next
;
7989 VAR_LOC_DEP_VEC (var
)->pop ();
7993 /* Insert an active dependency from VAR on X to the vector of
7994 dependencies, and add the corresponding back-link to X's list of
7995 back-links in VARS. */
7998 loc_exp_insert_dep (variable var
, rtx x
, variable_table_type vars
)
8004 dv
= dv_from_rtx (x
);
8006 /* ??? Build a vector of variables parallel to EXPANDING, to avoid
8007 an additional look up? */
8008 xvar
= vars
.find_with_hash (dv
, dv_htab_hash (dv
));
8012 xvar
= variable_from_dropped (dv
, NO_INSERT
);
8013 gcc_checking_assert (xvar
);
8016 /* No point in adding the same backlink more than once. This may
8017 arise if say the same value appears in two complex expressions in
8018 the same loc_list, or even more than once in a single
8020 if (VAR_LOC_DEP_LST (xvar
) && VAR_LOC_DEP_LST (xvar
)->dv
== var
->dv
)
8023 if (var
->onepart
== NOT_ONEPART
)
8024 led
= (loc_exp_dep
*) pool_alloc (loc_exp_dep_pool
);
8028 memset (&empty
, 0, sizeof (empty
));
8029 VAR_LOC_DEP_VEC (var
)->quick_push (empty
);
8030 led
= &VAR_LOC_DEP_VEC (var
)->last ();
8035 loc_exp_dep_alloc (xvar
, 0);
8036 led
->pprev
= VAR_LOC_DEP_LSTP (xvar
);
8037 led
->next
= *led
->pprev
;
8039 led
->next
->pprev
= &led
->next
;
8043 /* Create active dependencies of VAR on COUNT values starting at
8044 VALUE, and corresponding back-links to the entries in VARS. Return
8045 true if we found any pending-recursion results. */
8048 loc_exp_dep_set (variable var
, rtx result
, rtx
*value
, int count
,
8049 variable_table_type vars
)
8051 bool pending_recursion
= false;
8053 gcc_checking_assert (VAR_LOC_DEP_VEC (var
) == NULL
8054 || VAR_LOC_DEP_VEC (var
)->is_empty ());
8056 /* Set up all dependencies from last_child (as set up at the end of
8057 the loop above) to the end. */
8058 loc_exp_dep_alloc (var
, count
);
8064 if (!pending_recursion
)
8065 pending_recursion
= !result
&& VALUE_RECURSED_INTO (x
);
8067 loc_exp_insert_dep (var
, x
, vars
);
8070 return pending_recursion
;
8073 /* Notify the back-links of IVAR that are pending recursion that we
8074 have found a non-NIL value for it, so they are cleared for another
8075 attempt to compute a current location. */
8078 notify_dependents_of_resolved_value (variable ivar
, variable_table_type vars
)
8080 loc_exp_dep
*led
, *next
;
8082 for (led
= VAR_LOC_DEP_LST (ivar
); led
; led
= next
)
8084 decl_or_value dv
= led
->dv
;
8089 if (dv_is_value_p (dv
))
8091 rtx value
= dv_as_value (dv
);
8093 /* If we have already resolved it, leave it alone. */
8094 if (!VALUE_RECURSED_INTO (value
))
8097 /* Check that VALUE_RECURSED_INTO, true from the test above,
8098 implies NO_LOC_P. */
8099 gcc_checking_assert (NO_LOC_P (value
));
8101 /* We won't notify variables that are being expanded,
8102 because their dependency list is cleared before
8104 NO_LOC_P (value
) = false;
8105 VALUE_RECURSED_INTO (value
) = false;
8107 gcc_checking_assert (dv_changed_p (dv
));
8111 gcc_checking_assert (dv_onepart_p (dv
) != NOT_ONEPART
);
8112 if (!dv_changed_p (dv
))
8116 var
= vars
.find_with_hash (dv
, dv_htab_hash (dv
));
8119 var
= variable_from_dropped (dv
, NO_INSERT
);
8122 notify_dependents_of_resolved_value (var
, vars
);
8125 next
->pprev
= led
->pprev
;
8133 static rtx
vt_expand_loc_callback (rtx x
, bitmap regs
,
8134 int max_depth
, void *data
);
8136 /* Return the combined depth, when one sub-expression evaluated to
8137 BEST_DEPTH and the previous known depth was SAVED_DEPTH. */
8139 static inline expand_depth
8140 update_depth (expand_depth saved_depth
, expand_depth best_depth
)
8142 /* If we didn't find anything, stick with what we had. */
8143 if (!best_depth
.complexity
)
8146 /* If we found hadn't found anything, use the depth of the current
8147 expression. Do NOT add one extra level, we want to compute the
8148 maximum depth among sub-expressions. We'll increment it later,
8150 if (!saved_depth
.complexity
)
8153 /* Combine the entryval count so that regardless of which one we
8154 return, the entryval count is accurate. */
8155 best_depth
.entryvals
= saved_depth
.entryvals
8156 = best_depth
.entryvals
+ saved_depth
.entryvals
;
8158 if (saved_depth
.complexity
< best_depth
.complexity
)
8164 /* Expand VAR to a location RTX, updating its cur_loc. Use REGS and
8165 DATA for cselib expand callback. If PENDRECP is given, indicate in
8166 it whether any sub-expression couldn't be fully evaluated because
8167 it is pending recursion resolution. */
8170 vt_expand_var_loc_chain (variable var
, bitmap regs
, void *data
, bool *pendrecp
)
8172 struct expand_loc_callback_data
*elcd
8173 = (struct expand_loc_callback_data
*) data
;
8174 location_chain loc
, next
;
8176 int first_child
, result_first_child
, last_child
;
8177 bool pending_recursion
;
8178 rtx loc_from
= NULL
;
8179 struct elt_loc_list
*cloc
= NULL
;
8180 expand_depth depth
= { 0, 0 }, saved_depth
= elcd
->depth
;
8181 int wanted_entryvals
, found_entryvals
= 0;
8183 /* Clear all backlinks pointing at this, so that we're not notified
8184 while we're active. */
8185 loc_exp_dep_clear (var
);
8188 if (var
->onepart
== ONEPART_VALUE
)
8190 cselib_val
*val
= CSELIB_VAL_PTR (dv_as_value (var
->dv
));
8192 gcc_checking_assert (cselib_preserved_value_p (val
));
8197 first_child
= result_first_child
= last_child
8198 = elcd
->expanding
.length ();
8200 wanted_entryvals
= found_entryvals
;
8202 /* Attempt to expand each available location in turn. */
8203 for (next
= loc
= var
->n_var_parts
? var
->var_part
[0].loc_chain
: NULL
;
8204 loc
|| cloc
; loc
= next
)
8206 result_first_child
= last_child
;
8210 loc_from
= cloc
->loc
;
8213 if (unsuitable_loc (loc_from
))
8218 loc_from
= loc
->loc
;
8222 gcc_checking_assert (!unsuitable_loc (loc_from
));
8224 elcd
->depth
.complexity
= elcd
->depth
.entryvals
= 0;
8225 result
= cselib_expand_value_rtx_cb (loc_from
, regs
, EXPR_DEPTH
,
8226 vt_expand_loc_callback
, data
);
8227 last_child
= elcd
->expanding
.length ();
8231 depth
= elcd
->depth
;
8233 gcc_checking_assert (depth
.complexity
8234 || result_first_child
== last_child
);
8236 if (last_child
- result_first_child
!= 1)
8238 if (!depth
.complexity
&& GET_CODE (result
) == ENTRY_VALUE
)
8243 if (depth
.complexity
<= EXPR_USE_DEPTH
)
8245 if (depth
.entryvals
<= wanted_entryvals
)
8247 else if (!found_entryvals
|| depth
.entryvals
< found_entryvals
)
8248 found_entryvals
= depth
.entryvals
;
8254 /* Set it up in case we leave the loop. */
8255 depth
.complexity
= depth
.entryvals
= 0;
8257 result_first_child
= first_child
;
8260 if (!loc_from
&& wanted_entryvals
< found_entryvals
)
8262 /* We found entries with ENTRY_VALUEs and skipped them. Since
8263 we could not find any expansions without ENTRY_VALUEs, but we
8264 found at least one with them, go back and get an entry with
8265 the minimum number ENTRY_VALUE count that we found. We could
8266 avoid looping, but since each sub-loc is already resolved,
8267 the re-expansion should be trivial. ??? Should we record all
8268 attempted locs as dependencies, so that we retry the
8269 expansion should any of them change, in the hope it can give
8270 us a new entry without an ENTRY_VALUE? */
8271 elcd
->expanding
.truncate (first_child
);
8275 /* Register all encountered dependencies as active. */
8276 pending_recursion
= loc_exp_dep_set
8277 (var
, result
, elcd
->expanding
.address () + result_first_child
,
8278 last_child
- result_first_child
, elcd
->vars
);
8280 elcd
->expanding
.truncate (first_child
);
8282 /* Record where the expansion came from. */
8283 gcc_checking_assert (!result
|| !pending_recursion
);
8284 VAR_LOC_FROM (var
) = loc_from
;
8285 VAR_LOC_DEPTH (var
) = depth
;
8287 gcc_checking_assert (!depth
.complexity
== !result
);
8289 elcd
->depth
= update_depth (saved_depth
, depth
);
8291 /* Indicate whether any of the dependencies are pending recursion
8294 *pendrecp
= pending_recursion
;
8296 if (!pendrecp
|| !pending_recursion
)
8297 var
->var_part
[0].cur_loc
= result
;
8302 /* Callback for cselib_expand_value, that looks for expressions
8303 holding the value in the var-tracking hash tables. Return X for
8304 standard processing, anything else is to be used as-is. */
8307 vt_expand_loc_callback (rtx x
, bitmap regs
,
8308 int max_depth ATTRIBUTE_UNUSED
,
8311 struct expand_loc_callback_data
*elcd
8312 = (struct expand_loc_callback_data
*) data
;
8316 bool pending_recursion
= false;
8317 bool from_empty
= false;
8319 switch (GET_CODE (x
))
8322 subreg
= cselib_expand_value_rtx_cb (SUBREG_REG (x
), regs
,
8324 vt_expand_loc_callback
, data
);
8329 result
= simplify_gen_subreg (GET_MODE (x
), subreg
,
8330 GET_MODE (SUBREG_REG (x
)),
8333 /* Invalid SUBREGs are ok in debug info. ??? We could try
8334 alternate expansions for the VALUE as well. */
8336 result
= gen_rtx_raw_SUBREG (GET_MODE (x
), subreg
, SUBREG_BYTE (x
));
8342 dv
= dv_from_rtx (x
);
8349 elcd
->expanding
.safe_push (x
);
8351 /* Check that VALUE_RECURSED_INTO implies NO_LOC_P. */
8352 gcc_checking_assert (!VALUE_RECURSED_INTO (x
) || NO_LOC_P (x
));
8356 gcc_checking_assert (VALUE_RECURSED_INTO (x
) || !dv_changed_p (dv
));
8360 var
= elcd
->vars
.find_with_hash (dv
, dv_htab_hash (dv
));
8365 var
= variable_from_dropped (dv
, INSERT
);
8368 gcc_checking_assert (var
);
8370 if (!dv_changed_p (dv
))
8372 gcc_checking_assert (!NO_LOC_P (x
));
8373 gcc_checking_assert (var
->var_part
[0].cur_loc
);
8374 gcc_checking_assert (VAR_LOC_1PAUX (var
));
8375 gcc_checking_assert (VAR_LOC_1PAUX (var
)->depth
.complexity
);
8377 elcd
->depth
= update_depth (elcd
->depth
, VAR_LOC_1PAUX (var
)->depth
);
8379 return var
->var_part
[0].cur_loc
;
8382 VALUE_RECURSED_INTO (x
) = true;
8383 /* This is tentative, but it makes some tests simpler. */
8384 NO_LOC_P (x
) = true;
8386 gcc_checking_assert (var
->n_var_parts
== 1 || from_empty
);
8388 result
= vt_expand_var_loc_chain (var
, regs
, data
, &pending_recursion
);
8390 if (pending_recursion
)
8392 gcc_checking_assert (!result
);
8393 elcd
->pending
.safe_push (x
);
8397 NO_LOC_P (x
) = !result
;
8398 VALUE_RECURSED_INTO (x
) = false;
8399 set_dv_changed (dv
, false);
8402 notify_dependents_of_resolved_value (var
, elcd
->vars
);
8408 /* While expanding variables, we may encounter recursion cycles
8409 because of mutual (possibly indirect) dependencies between two
8410 particular variables (or values), say A and B. If we're trying to
8411 expand A when we get to B, which in turn attempts to expand A, if
8412 we can't find any other expansion for B, we'll add B to this
8413 pending-recursion stack, and tentatively return NULL for its
8414 location. This tentative value will be used for any other
8415 occurrences of B, unless A gets some other location, in which case
8416 it will notify B that it is worth another try at computing a
8417 location for it, and it will use the location computed for A then.
8418 At the end of the expansion, the tentative NULL locations become
8419 final for all members of PENDING that didn't get a notification.
8420 This function performs this finalization of NULL locations. */
8423 resolve_expansions_pending_recursion (vec
<rtx
, va_heap
> *pending
)
8425 while (!pending
->is_empty ())
8427 rtx x
= pending
->pop ();
8430 if (!VALUE_RECURSED_INTO (x
))
8433 gcc_checking_assert (NO_LOC_P (x
));
8434 VALUE_RECURSED_INTO (x
) = false;
8435 dv
= dv_from_rtx (x
);
8436 gcc_checking_assert (dv_changed_p (dv
));
8437 set_dv_changed (dv
, false);
8441 /* Initialize expand_loc_callback_data D with variable hash table V.
8442 It must be a macro because of alloca (vec stack). */
8443 #define INIT_ELCD(d, v) \
8447 (d).depth.complexity = (d).depth.entryvals = 0; \
8450 /* Finalize expand_loc_callback_data D, resolved to location L. */
8451 #define FINI_ELCD(d, l) \
8454 resolve_expansions_pending_recursion (&(d).pending); \
8455 (d).pending.release (); \
8456 (d).expanding.release (); \
8458 if ((l) && MEM_P (l)) \
8459 (l) = targetm.delegitimize_address (l); \
8463 /* Expand VALUEs and DEBUG_EXPRs in LOC to a location, using the
8464 equivalences in VARS, updating their CUR_LOCs in the process. */
8467 vt_expand_loc (rtx loc
, variable_table_type vars
)
8469 struct expand_loc_callback_data data
;
8472 if (!MAY_HAVE_DEBUG_INSNS
)
8475 INIT_ELCD (data
, vars
);
8477 result
= cselib_expand_value_rtx_cb (loc
, scratch_regs
, EXPR_DEPTH
,
8478 vt_expand_loc_callback
, &data
);
8480 FINI_ELCD (data
, result
);
8485 /* Expand the one-part VARiable to a location, using the equivalences
8486 in VARS, updating their CUR_LOCs in the process. */
8489 vt_expand_1pvar (variable var
, variable_table_type vars
)
8491 struct expand_loc_callback_data data
;
8494 gcc_checking_assert (var
->onepart
&& var
->n_var_parts
== 1);
8496 if (!dv_changed_p (var
->dv
))
8497 return var
->var_part
[0].cur_loc
;
8499 INIT_ELCD (data
, vars
);
8501 loc
= vt_expand_var_loc_chain (var
, scratch_regs
, &data
, NULL
);
8503 gcc_checking_assert (data
.expanding
.is_empty ());
8505 FINI_ELCD (data
, loc
);
8510 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
8511 additional parameters: WHERE specifies whether the note shall be emitted
8512 before or after instruction INSN. */
8515 emit_note_insn_var_location (variable_def
**varp
, emit_note_data
*data
)
8517 variable var
= *varp
;
8518 rtx insn
= data
->insn
;
8519 enum emit_note_where where
= data
->where
;
8520 variable_table_type vars
= data
->vars
;
8522 int i
, j
, n_var_parts
;
8524 enum var_init_status initialized
= VAR_INIT_STATUS_UNINITIALIZED
;
8525 HOST_WIDE_INT last_limit
;
8526 tree type_size_unit
;
8527 HOST_WIDE_INT offsets
[MAX_VAR_PARTS
];
8528 rtx loc
[MAX_VAR_PARTS
];
8532 gcc_checking_assert (var
->onepart
== NOT_ONEPART
8533 || var
->onepart
== ONEPART_VDECL
);
8535 decl
= dv_as_decl (var
->dv
);
8541 for (i
= 0; i
< var
->n_var_parts
; i
++)
8542 if (var
->var_part
[i
].cur_loc
== NULL
&& var
->var_part
[i
].loc_chain
)
8543 var
->var_part
[i
].cur_loc
= var
->var_part
[i
].loc_chain
->loc
;
8544 for (i
= 0; i
< var
->n_var_parts
; i
++)
8546 enum machine_mode mode
, wider_mode
;
8548 HOST_WIDE_INT offset
;
8550 if (i
== 0 && var
->onepart
)
8552 gcc_checking_assert (var
->n_var_parts
== 1);
8554 initialized
= VAR_INIT_STATUS_INITIALIZED
;
8555 loc2
= vt_expand_1pvar (var
, vars
);
8559 if (last_limit
< VAR_PART_OFFSET (var
, i
))
8564 else if (last_limit
> VAR_PART_OFFSET (var
, i
))
8566 offset
= VAR_PART_OFFSET (var
, i
);
8567 loc2
= var
->var_part
[i
].cur_loc
;
8568 if (loc2
&& GET_CODE (loc2
) == MEM
8569 && GET_CODE (XEXP (loc2
, 0)) == VALUE
)
8571 rtx depval
= XEXP (loc2
, 0);
8573 loc2
= vt_expand_loc (loc2
, vars
);
8576 loc_exp_insert_dep (var
, depval
, vars
);
8583 gcc_checking_assert (GET_CODE (loc2
) != VALUE
);
8584 for (lc
= var
->var_part
[i
].loc_chain
; lc
; lc
= lc
->next
)
8585 if (var
->var_part
[i
].cur_loc
== lc
->loc
)
8587 initialized
= lc
->init
;
8593 offsets
[n_var_parts
] = offset
;
8599 loc
[n_var_parts
] = loc2
;
8600 mode
= GET_MODE (var
->var_part
[i
].cur_loc
);
8601 if (mode
== VOIDmode
&& var
->onepart
)
8602 mode
= DECL_MODE (decl
);
8603 last_limit
= offsets
[n_var_parts
] + GET_MODE_SIZE (mode
);
8605 /* Attempt to merge adjacent registers or memory. */
8606 wider_mode
= GET_MODE_WIDER_MODE (mode
);
8607 for (j
= i
+ 1; j
< var
->n_var_parts
; j
++)
8608 if (last_limit
<= VAR_PART_OFFSET (var
, j
))
8610 if (j
< var
->n_var_parts
8611 && wider_mode
!= VOIDmode
8612 && var
->var_part
[j
].cur_loc
8613 && mode
== GET_MODE (var
->var_part
[j
].cur_loc
)
8614 && (REG_P (loc
[n_var_parts
]) || MEM_P (loc
[n_var_parts
]))
8615 && last_limit
== (var
->onepart
? 0 : VAR_PART_OFFSET (var
, j
))
8616 && (loc2
= vt_expand_loc (var
->var_part
[j
].cur_loc
, vars
))
8617 && GET_CODE (loc
[n_var_parts
]) == GET_CODE (loc2
))
8621 if (REG_P (loc
[n_var_parts
])
8622 && hard_regno_nregs
[REGNO (loc
[n_var_parts
])][mode
] * 2
8623 == hard_regno_nregs
[REGNO (loc
[n_var_parts
])][wider_mode
]
8624 && end_hard_regno (mode
, REGNO (loc
[n_var_parts
]))
8627 if (! WORDS_BIG_ENDIAN
&& ! BYTES_BIG_ENDIAN
)
8628 new_loc
= simplify_subreg (wider_mode
, loc
[n_var_parts
],
8630 else if (WORDS_BIG_ENDIAN
&& BYTES_BIG_ENDIAN
)
8631 new_loc
= simplify_subreg (wider_mode
, loc2
, mode
, 0);
8634 if (!REG_P (new_loc
)
8635 || REGNO (new_loc
) != REGNO (loc
[n_var_parts
]))
8638 REG_ATTRS (new_loc
) = REG_ATTRS (loc
[n_var_parts
]);
8641 else if (MEM_P (loc
[n_var_parts
])
8642 && GET_CODE (XEXP (loc2
, 0)) == PLUS
8643 && REG_P (XEXP (XEXP (loc2
, 0), 0))
8644 && CONST_INT_P (XEXP (XEXP (loc2
, 0), 1)))
8646 if ((REG_P (XEXP (loc
[n_var_parts
], 0))
8647 && rtx_equal_p (XEXP (loc
[n_var_parts
], 0),
8648 XEXP (XEXP (loc2
, 0), 0))
8649 && INTVAL (XEXP (XEXP (loc2
, 0), 1))
8650 == GET_MODE_SIZE (mode
))
8651 || (GET_CODE (XEXP (loc
[n_var_parts
], 0)) == PLUS
8652 && CONST_INT_P (XEXP (XEXP (loc
[n_var_parts
], 0), 1))
8653 && rtx_equal_p (XEXP (XEXP (loc
[n_var_parts
], 0), 0),
8654 XEXP (XEXP (loc2
, 0), 0))
8655 && INTVAL (XEXP (XEXP (loc
[n_var_parts
], 0), 1))
8656 + GET_MODE_SIZE (mode
)
8657 == INTVAL (XEXP (XEXP (loc2
, 0), 1))))
8658 new_loc
= adjust_address_nv (loc
[n_var_parts
],
8664 loc
[n_var_parts
] = new_loc
;
8666 last_limit
= offsets
[n_var_parts
] + GET_MODE_SIZE (mode
);
8672 type_size_unit
= TYPE_SIZE_UNIT (TREE_TYPE (decl
));
8673 if ((unsigned HOST_WIDE_INT
) last_limit
< TREE_INT_CST_LOW (type_size_unit
))
8676 if (! flag_var_tracking_uninit
)
8677 initialized
= VAR_INIT_STATUS_INITIALIZED
;
8681 note_vl
= gen_rtx_VAR_LOCATION (VOIDmode
, decl
, NULL_RTX
,
8683 else if (n_var_parts
== 1)
8687 if (offsets
[0] || GET_CODE (loc
[0]) == PARALLEL
)
8688 expr_list
= gen_rtx_EXPR_LIST (VOIDmode
, loc
[0], GEN_INT (offsets
[0]));
8692 note_vl
= gen_rtx_VAR_LOCATION (VOIDmode
, decl
, expr_list
,
8695 else if (n_var_parts
)
8699 for (i
= 0; i
< n_var_parts
; i
++)
8701 = gen_rtx_EXPR_LIST (VOIDmode
, loc
[i
], GEN_INT (offsets
[i
]));
8703 parallel
= gen_rtx_PARALLEL (VOIDmode
,
8704 gen_rtvec_v (n_var_parts
, loc
));
8705 note_vl
= gen_rtx_VAR_LOCATION (VOIDmode
, decl
,
8706 parallel
, (int) initialized
);
8709 if (where
!= EMIT_NOTE_BEFORE_INSN
)
8711 note
= emit_note_after (NOTE_INSN_VAR_LOCATION
, insn
);
8712 if (where
== EMIT_NOTE_AFTER_CALL_INSN
)
8713 NOTE_DURING_CALL_P (note
) = true;
8717 /* Make sure that the call related notes come first. */
8718 while (NEXT_INSN (insn
)
8720 && ((NOTE_KIND (insn
) == NOTE_INSN_VAR_LOCATION
8721 && NOTE_DURING_CALL_P (insn
))
8722 || NOTE_KIND (insn
) == NOTE_INSN_CALL_ARG_LOCATION
))
8723 insn
= NEXT_INSN (insn
);
8725 && ((NOTE_KIND (insn
) == NOTE_INSN_VAR_LOCATION
8726 && NOTE_DURING_CALL_P (insn
))
8727 || NOTE_KIND (insn
) == NOTE_INSN_CALL_ARG_LOCATION
))
8728 note
= emit_note_after (NOTE_INSN_VAR_LOCATION
, insn
);
8730 note
= emit_note_before (NOTE_INSN_VAR_LOCATION
, insn
);
8732 NOTE_VAR_LOCATION (note
) = note_vl
;
8734 set_dv_changed (var
->dv
, false);
8735 gcc_assert (var
->in_changed_variables
);
8736 var
->in_changed_variables
= false;
8737 changed_variables
.clear_slot (varp
);
8739 /* Continue traversing the hash table. */
8743 /* While traversing changed_variables, push onto DATA (a stack of RTX
8744 values) entries that aren't user variables. */
8747 var_track_values_to_stack (variable_def
**slot
,
8748 vec
<rtx
, va_heap
> *changed_values_stack
)
8750 variable var
= *slot
;
8752 if (var
->onepart
== ONEPART_VALUE
)
8753 changed_values_stack
->safe_push (dv_as_value (var
->dv
));
8754 else if (var
->onepart
== ONEPART_DEXPR
)
8755 changed_values_stack
->safe_push (DECL_RTL_KNOWN_SET (dv_as_decl (var
->dv
)));
8760 /* Remove from changed_variables the entry whose DV corresponds to
8761 value or debug_expr VAL. */
8763 remove_value_from_changed_variables (rtx val
)
8765 decl_or_value dv
= dv_from_rtx (val
);
8766 variable_def
**slot
;
8769 slot
= changed_variables
.find_slot_with_hash (dv
, dv_htab_hash (dv
),
8772 var
->in_changed_variables
= false;
8773 changed_variables
.clear_slot (slot
);
8776 /* If VAL (a value or debug_expr) has backlinks to variables actively
8777 dependent on it in HTAB or in CHANGED_VARIABLES, mark them as
8778 changed, adding to CHANGED_VALUES_STACK any dependencies that may
8779 have dependencies of their own to notify. */
8782 notify_dependents_of_changed_value (rtx val
, variable_table_type htab
,
8783 vec
<rtx
, va_heap
> *changed_values_stack
)
8785 variable_def
**slot
;
8788 decl_or_value dv
= dv_from_rtx (val
);
8790 slot
= changed_variables
.find_slot_with_hash (dv
, dv_htab_hash (dv
),
8793 slot
= htab
.find_slot_with_hash (dv
, dv_htab_hash (dv
), NO_INSERT
);
8795 slot
= dropped_values
.find_slot_with_hash (dv
, dv_htab_hash (dv
),
8799 while ((led
= VAR_LOC_DEP_LST (var
)))
8801 decl_or_value ldv
= led
->dv
;
8804 /* Deactivate and remove the backlink, as it was “used up”. It
8805 makes no sense to attempt to notify the same entity again:
8806 either it will be recomputed and re-register an active
8807 dependency, or it will still have the changed mark. */
8809 led
->next
->pprev
= led
->pprev
;
8811 *led
->pprev
= led
->next
;
8815 if (dv_changed_p (ldv
))
8818 switch (dv_onepart_p (ldv
))
8822 set_dv_changed (ldv
, true);
8823 changed_values_stack
->safe_push (dv_as_rtx (ldv
));
8827 ivar
= htab
.find_with_hash (ldv
, dv_htab_hash (ldv
));
8828 gcc_checking_assert (!VAR_LOC_DEP_LST (ivar
));
8829 variable_was_changed (ivar
, NULL
);
8833 pool_free (loc_exp_dep_pool
, led
);
8834 ivar
= htab
.find_with_hash (ldv
, dv_htab_hash (ldv
));
8837 int i
= ivar
->n_var_parts
;
8840 rtx loc
= ivar
->var_part
[i
].cur_loc
;
8842 if (loc
&& GET_CODE (loc
) == MEM
8843 && XEXP (loc
, 0) == val
)
8845 variable_was_changed (ivar
, NULL
);
8858 /* Take out of changed_variables any entries that don't refer to use
8859 variables. Back-propagate change notifications from values and
8860 debug_exprs to their active dependencies in HTAB or in
8861 CHANGED_VARIABLES. */
8864 process_changed_values (variable_table_type htab
)
8868 stack_vec
<rtx
, 20> changed_values_stack
;
8870 /* Move values from changed_variables to changed_values_stack. */
8872 .traverse
<vec
<rtx
, va_heap
>*, var_track_values_to_stack
>
8873 (&changed_values_stack
);
8875 /* Back-propagate change notifications in values while popping
8876 them from the stack. */
8877 for (n
= i
= changed_values_stack
.length ();
8878 i
> 0; i
= changed_values_stack
.length ())
8880 val
= changed_values_stack
.pop ();
8881 notify_dependents_of_changed_value (val
, htab
, &changed_values_stack
);
8883 /* This condition will hold when visiting each of the entries
8884 originally in changed_variables. We can't remove them
8885 earlier because this could drop the backlinks before we got a
8886 chance to use them. */
8889 remove_value_from_changed_variables (val
);
8895 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
8896 CHANGED_VARIABLES and delete this chain. WHERE specifies whether
8897 the notes shall be emitted before of after instruction INSN. */
8900 emit_notes_for_changes (rtx insn
, enum emit_note_where where
,
8903 emit_note_data data
;
8904 variable_table_type htab
= shared_hash_htab (vars
);
8906 if (!changed_variables
.elements ())
8909 if (MAY_HAVE_DEBUG_INSNS
)
8910 process_changed_values (htab
);
8917 .traverse
<emit_note_data
*, emit_note_insn_var_location
> (&data
);
8920 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
8921 same variable in hash table DATA or is not there at all. */
8924 emit_notes_for_differences_1 (variable_def
**slot
, variable_table_type new_vars
)
8926 variable old_var
, new_var
;
8929 new_var
= new_vars
.find_with_hash (old_var
->dv
, dv_htab_hash (old_var
->dv
));
8933 /* Variable has disappeared. */
8934 variable empty_var
= NULL
;
8936 if (old_var
->onepart
== ONEPART_VALUE
8937 || old_var
->onepart
== ONEPART_DEXPR
)
8939 empty_var
= variable_from_dropped (old_var
->dv
, NO_INSERT
);
8942 gcc_checking_assert (!empty_var
->in_changed_variables
);
8943 if (!VAR_LOC_1PAUX (old_var
))
8945 VAR_LOC_1PAUX (old_var
) = VAR_LOC_1PAUX (empty_var
);
8946 VAR_LOC_1PAUX (empty_var
) = NULL
;
8949 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var
));
8955 empty_var
= (variable
) pool_alloc (onepart_pool (old_var
->onepart
));
8956 empty_var
->dv
= old_var
->dv
;
8957 empty_var
->refcount
= 0;
8958 empty_var
->n_var_parts
= 0;
8959 empty_var
->onepart
= old_var
->onepart
;
8960 empty_var
->in_changed_variables
= false;
8963 if (empty_var
->onepart
)
8965 /* Propagate the auxiliary data to (ultimately)
8966 changed_variables. */
8967 empty_var
->var_part
[0].loc_chain
= NULL
;
8968 empty_var
->var_part
[0].cur_loc
= NULL
;
8969 VAR_LOC_1PAUX (empty_var
) = VAR_LOC_1PAUX (old_var
);
8970 VAR_LOC_1PAUX (old_var
) = NULL
;
8972 variable_was_changed (empty_var
, NULL
);
8973 /* Continue traversing the hash table. */
8976 /* Update cur_loc and one-part auxiliary data, before new_var goes
8977 through variable_was_changed. */
8978 if (old_var
!= new_var
&& new_var
->onepart
)
8980 gcc_checking_assert (VAR_LOC_1PAUX (new_var
) == NULL
);
8981 VAR_LOC_1PAUX (new_var
) = VAR_LOC_1PAUX (old_var
);
8982 VAR_LOC_1PAUX (old_var
) = NULL
;
8983 new_var
->var_part
[0].cur_loc
= old_var
->var_part
[0].cur_loc
;
8985 if (variable_different_p (old_var
, new_var
))
8986 variable_was_changed (new_var
, NULL
);
8988 /* Continue traversing the hash table. */
8992 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
8996 emit_notes_for_differences_2 (variable_def
**slot
, variable_table_type old_vars
)
8998 variable old_var
, new_var
;
9001 old_var
= old_vars
.find_with_hash (new_var
->dv
, dv_htab_hash (new_var
->dv
));
9005 for (i
= 0; i
< new_var
->n_var_parts
; i
++)
9006 new_var
->var_part
[i
].cur_loc
= NULL
;
9007 variable_was_changed (new_var
, NULL
);
9010 /* Continue traversing the hash table. */
9014 /* Emit notes before INSN for differences between dataflow sets OLD_SET and
9018 emit_notes_for_differences (rtx insn
, dataflow_set
*old_set
,
9019 dataflow_set
*new_set
)
9021 shared_hash_htab (old_set
->vars
)
9022 .traverse
<variable_table_type
, emit_notes_for_differences_1
>
9023 (shared_hash_htab (new_set
->vars
));
9024 shared_hash_htab (new_set
->vars
)
9025 .traverse
<variable_table_type
, emit_notes_for_differences_2
>
9026 (shared_hash_htab (old_set
->vars
));
9027 emit_notes_for_changes (insn
, EMIT_NOTE_BEFORE_INSN
, new_set
->vars
);
9030 /* Return the next insn after INSN that is not a NOTE_INSN_VAR_LOCATION. */
9033 next_non_note_insn_var_location (rtx insn
)
9037 insn
= NEXT_INSN (insn
);
9040 || NOTE_KIND (insn
) != NOTE_INSN_VAR_LOCATION
)
9047 /* Emit the notes for changes of location parts in the basic block BB. */
9050 emit_notes_in_bb (basic_block bb
, dataflow_set
*set
)
9053 micro_operation
*mo
;
9055 dataflow_set_clear (set
);
9056 dataflow_set_copy (set
, &VTI (bb
)->in
);
9058 FOR_EACH_VEC_ELT (VTI (bb
)->mos
, i
, mo
)
9060 rtx insn
= mo
->insn
;
9061 rtx next_insn
= next_non_note_insn_var_location (insn
);
9066 dataflow_set_clear_at_call (set
);
9067 emit_notes_for_changes (insn
, EMIT_NOTE_AFTER_CALL_INSN
, set
->vars
);
9069 rtx arguments
= mo
->u
.loc
, *p
= &arguments
, note
;
9072 XEXP (XEXP (*p
, 0), 1)
9073 = vt_expand_loc (XEXP (XEXP (*p
, 0), 1),
9074 shared_hash_htab (set
->vars
));
9075 /* If expansion is successful, keep it in the list. */
9076 if (XEXP (XEXP (*p
, 0), 1))
9078 /* Otherwise, if the following item is data_value for it,
9080 else if (XEXP (*p
, 1)
9081 && REG_P (XEXP (XEXP (*p
, 0), 0))
9082 && MEM_P (XEXP (XEXP (XEXP (*p
, 1), 0), 0))
9083 && REG_P (XEXP (XEXP (XEXP (XEXP (*p
, 1), 0), 0),
9085 && REGNO (XEXP (XEXP (*p
, 0), 0))
9086 == REGNO (XEXP (XEXP (XEXP (XEXP (*p
, 1), 0),
9088 *p
= XEXP (XEXP (*p
, 1), 1);
9089 /* Just drop this item. */
9093 note
= emit_note_after (NOTE_INSN_CALL_ARG_LOCATION
, insn
);
9094 NOTE_VAR_LOCATION (note
) = arguments
;
9100 rtx loc
= mo
->u
.loc
;
9103 var_reg_set (set
, loc
, VAR_INIT_STATUS_UNINITIALIZED
, NULL
);
9105 var_mem_set (set
, loc
, VAR_INIT_STATUS_UNINITIALIZED
, NULL
);
9107 emit_notes_for_changes (insn
, EMIT_NOTE_BEFORE_INSN
, set
->vars
);
9113 rtx loc
= mo
->u
.loc
;
9117 if (GET_CODE (loc
) == CONCAT
)
9119 val
= XEXP (loc
, 0);
9120 vloc
= XEXP (loc
, 1);
9128 var
= PAT_VAR_LOCATION_DECL (vloc
);
9130 clobber_variable_part (set
, NULL_RTX
,
9131 dv_from_decl (var
), 0, NULL_RTX
);
9134 if (VAL_NEEDS_RESOLUTION (loc
))
9135 val_resolve (set
, val
, PAT_VAR_LOCATION_LOC (vloc
), insn
);
9136 set_variable_part (set
, val
, dv_from_decl (var
), 0,
9137 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
,
9140 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc
)))
9141 set_variable_part (set
, PAT_VAR_LOCATION_LOC (vloc
),
9142 dv_from_decl (var
), 0,
9143 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
,
9146 emit_notes_for_changes (insn
, EMIT_NOTE_AFTER_INSN
, set
->vars
);
9152 rtx loc
= mo
->u
.loc
;
9153 rtx val
, vloc
, uloc
;
9155 vloc
= uloc
= XEXP (loc
, 1);
9156 val
= XEXP (loc
, 0);
9158 if (GET_CODE (val
) == CONCAT
)
9160 uloc
= XEXP (val
, 1);
9161 val
= XEXP (val
, 0);
9164 if (VAL_NEEDS_RESOLUTION (loc
))
9165 val_resolve (set
, val
, vloc
, insn
);
9167 val_store (set
, val
, uloc
, insn
, false);
9169 if (VAL_HOLDS_TRACK_EXPR (loc
))
9171 if (GET_CODE (uloc
) == REG
)
9172 var_reg_set (set
, uloc
, VAR_INIT_STATUS_UNINITIALIZED
,
9174 else if (GET_CODE (uloc
) == MEM
)
9175 var_mem_set (set
, uloc
, VAR_INIT_STATUS_UNINITIALIZED
,
9179 emit_notes_for_changes (insn
, EMIT_NOTE_BEFORE_INSN
, set
->vars
);
9185 rtx loc
= mo
->u
.loc
;
9186 rtx val
, vloc
, uloc
;
9190 uloc
= XEXP (vloc
, 1);
9191 val
= XEXP (vloc
, 0);
9194 if (GET_CODE (uloc
) == SET
)
9196 dstv
= SET_DEST (uloc
);
9197 srcv
= SET_SRC (uloc
);
9205 if (GET_CODE (val
) == CONCAT
)
9207 dstv
= vloc
= XEXP (val
, 1);
9208 val
= XEXP (val
, 0);
9211 if (GET_CODE (vloc
) == SET
)
9213 srcv
= SET_SRC (vloc
);
9215 gcc_assert (val
!= srcv
);
9216 gcc_assert (vloc
== uloc
|| VAL_NEEDS_RESOLUTION (loc
));
9218 dstv
= vloc
= SET_DEST (vloc
);
9220 if (VAL_NEEDS_RESOLUTION (loc
))
9221 val_resolve (set
, val
, srcv
, insn
);
9223 else if (VAL_NEEDS_RESOLUTION (loc
))
9225 gcc_assert (GET_CODE (uloc
) == SET
9226 && GET_CODE (SET_SRC (uloc
)) == REG
);
9227 val_resolve (set
, val
, SET_SRC (uloc
), insn
);
9230 if (VAL_HOLDS_TRACK_EXPR (loc
))
9232 if (VAL_EXPR_IS_CLOBBERED (loc
))
9235 var_reg_delete (set
, uloc
, true);
9236 else if (MEM_P (uloc
))
9238 gcc_assert (MEM_P (dstv
));
9239 gcc_assert (MEM_ATTRS (dstv
) == MEM_ATTRS (uloc
));
9240 var_mem_delete (set
, dstv
, true);
9245 bool copied_p
= VAL_EXPR_IS_COPIED (loc
);
9246 rtx src
= NULL
, dst
= uloc
;
9247 enum var_init_status status
= VAR_INIT_STATUS_INITIALIZED
;
9249 if (GET_CODE (uloc
) == SET
)
9251 src
= SET_SRC (uloc
);
9252 dst
= SET_DEST (uloc
);
9257 status
= find_src_status (set
, src
);
9259 src
= find_src_set_src (set
, src
);
9263 var_reg_delete_and_set (set
, dst
, !copied_p
,
9265 else if (MEM_P (dst
))
9267 gcc_assert (MEM_P (dstv
));
9268 gcc_assert (MEM_ATTRS (dstv
) == MEM_ATTRS (dst
));
9269 var_mem_delete_and_set (set
, dstv
, !copied_p
,
9274 else if (REG_P (uloc
))
9275 var_regno_delete (set
, REGNO (uloc
));
9276 else if (MEM_P (uloc
))
9278 gcc_checking_assert (GET_CODE (vloc
) == MEM
);
9279 gcc_checking_assert (vloc
== dstv
);
9281 clobber_overlapping_mems (set
, vloc
);
9284 val_store (set
, val
, dstv
, insn
, true);
9286 emit_notes_for_changes (next_insn
, EMIT_NOTE_BEFORE_INSN
,
9293 rtx loc
= mo
->u
.loc
;
9296 if (GET_CODE (loc
) == SET
)
9298 set_src
= SET_SRC (loc
);
9299 loc
= SET_DEST (loc
);
9303 var_reg_delete_and_set (set
, loc
, true, VAR_INIT_STATUS_INITIALIZED
,
9306 var_mem_delete_and_set (set
, loc
, true, VAR_INIT_STATUS_INITIALIZED
,
9309 emit_notes_for_changes (next_insn
, EMIT_NOTE_BEFORE_INSN
,
9316 rtx loc
= mo
->u
.loc
;
9317 enum var_init_status src_status
;
9320 if (GET_CODE (loc
) == SET
)
9322 set_src
= SET_SRC (loc
);
9323 loc
= SET_DEST (loc
);
9326 src_status
= find_src_status (set
, set_src
);
9327 set_src
= find_src_set_src (set
, set_src
);
9330 var_reg_delete_and_set (set
, loc
, false, src_status
, set_src
);
9332 var_mem_delete_and_set (set
, loc
, false, src_status
, set_src
);
9334 emit_notes_for_changes (next_insn
, EMIT_NOTE_BEFORE_INSN
,
9341 rtx loc
= mo
->u
.loc
;
9344 var_reg_delete (set
, loc
, false);
9346 var_mem_delete (set
, loc
, false);
9348 emit_notes_for_changes (insn
, EMIT_NOTE_AFTER_INSN
, set
->vars
);
9354 rtx loc
= mo
->u
.loc
;
9357 var_reg_delete (set
, loc
, true);
9359 var_mem_delete (set
, loc
, true);
9361 emit_notes_for_changes (next_insn
, EMIT_NOTE_BEFORE_INSN
,
9367 set
->stack_adjust
+= mo
->u
.adjust
;
9373 /* Emit notes for the whole function. */
9376 vt_emit_notes (void)
9381 gcc_assert (!changed_variables
.elements ());
9383 /* Free memory occupied by the out hash tables, as they aren't used
9386 dataflow_set_clear (&VTI (bb
)->out
);
9388 /* Enable emitting notes by functions (mainly by set_variable_part and
9389 delete_variable_part). */
9392 if (MAY_HAVE_DEBUG_INSNS
)
9394 dropped_values
.create (cselib_get_next_uid () * 2);
9395 loc_exp_dep_pool
= create_alloc_pool ("loc_exp_dep pool",
9396 sizeof (loc_exp_dep
), 64);
9399 dataflow_set_init (&cur
);
9403 /* Emit the notes for changes of variable locations between two
9404 subsequent basic blocks. */
9405 emit_notes_for_differences (BB_HEAD (bb
), &cur
, &VTI (bb
)->in
);
9407 if (MAY_HAVE_DEBUG_INSNS
)
9408 local_get_addr_cache
= pointer_map_create ();
9410 /* Emit the notes for the changes in the basic block itself. */
9411 emit_notes_in_bb (bb
, &cur
);
9413 if (MAY_HAVE_DEBUG_INSNS
)
9414 pointer_map_destroy (local_get_addr_cache
);
9415 local_get_addr_cache
= NULL
;
9417 /* Free memory occupied by the in hash table, we won't need it
9419 dataflow_set_clear (&VTI (bb
)->in
);
9421 #ifdef ENABLE_CHECKING
9422 shared_hash_htab (cur
.vars
)
9423 .traverse
<variable_table_type
, emit_notes_for_differences_1
>
9424 (shared_hash_htab (empty_shared_hash
));
9426 dataflow_set_destroy (&cur
);
9428 if (MAY_HAVE_DEBUG_INSNS
)
9429 dropped_values
.dispose ();
9434 /* If there is a declaration and offset associated with register/memory RTL
9435 assign declaration to *DECLP and offset to *OFFSETP, and return true. */
9438 vt_get_decl_and_offset (rtx rtl
, tree
*declp
, HOST_WIDE_INT
*offsetp
)
9442 if (REG_ATTRS (rtl
))
9444 *declp
= REG_EXPR (rtl
);
9445 *offsetp
= REG_OFFSET (rtl
);
9449 else if (MEM_P (rtl
))
9451 if (MEM_ATTRS (rtl
))
9453 *declp
= MEM_EXPR (rtl
);
9454 *offsetp
= INT_MEM_OFFSET (rtl
);
9461 /* Record the value for the ENTRY_VALUE of RTL as a global equivalence
9465 record_entry_value (cselib_val
*val
, rtx rtl
)
9467 rtx ev
= gen_rtx_ENTRY_VALUE (GET_MODE (rtl
));
9469 ENTRY_VALUE_EXP (ev
) = rtl
;
9471 cselib_add_permanent_equiv (val
, ev
, get_insns ());
9474 /* Insert function parameter PARM in IN and OUT sets of ENTRY_BLOCK. */
9477 vt_add_function_parameter (tree parm
)
9479 rtx decl_rtl
= DECL_RTL_IF_SET (parm
);
9480 rtx incoming
= DECL_INCOMING_RTL (parm
);
9482 enum machine_mode mode
;
9483 HOST_WIDE_INT offset
;
9487 if (TREE_CODE (parm
) != PARM_DECL
)
9490 if (!decl_rtl
|| !incoming
)
9493 if (GET_MODE (decl_rtl
) == BLKmode
|| GET_MODE (incoming
) == BLKmode
)
9496 /* If there is a DRAP register or a pseudo in internal_arg_pointer,
9497 rewrite the incoming location of parameters passed on the stack
9498 into MEMs based on the argument pointer, so that incoming doesn't
9499 depend on a pseudo. */
9500 if (MEM_P (incoming
)
9501 && (XEXP (incoming
, 0) == crtl
->args
.internal_arg_pointer
9502 || (GET_CODE (XEXP (incoming
, 0)) == PLUS
9503 && XEXP (XEXP (incoming
, 0), 0)
9504 == crtl
->args
.internal_arg_pointer
9505 && CONST_INT_P (XEXP (XEXP (incoming
, 0), 1)))))
9507 HOST_WIDE_INT off
= -FIRST_PARM_OFFSET (current_function_decl
);
9508 if (GET_CODE (XEXP (incoming
, 0)) == PLUS
)
9509 off
+= INTVAL (XEXP (XEXP (incoming
, 0), 1));
9511 = replace_equiv_address_nv (incoming
,
9512 plus_constant (Pmode
,
9513 arg_pointer_rtx
, off
));
9516 #ifdef HAVE_window_save
9517 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
9518 If the target machine has an explicit window save instruction, the
9519 actual entry value is the corresponding OUTGOING_REGNO instead. */
9520 if (HAVE_window_save
&& !crtl
->uses_only_leaf_regs
)
9522 if (REG_P (incoming
)
9523 && HARD_REGISTER_P (incoming
)
9524 && OUTGOING_REGNO (REGNO (incoming
)) != REGNO (incoming
))
9527 p
.incoming
= incoming
;
9529 = gen_rtx_REG_offset (incoming
, GET_MODE (incoming
),
9530 OUTGOING_REGNO (REGNO (incoming
)), 0);
9531 p
.outgoing
= incoming
;
9532 vec_safe_push (windowed_parm_regs
, p
);
9534 else if (MEM_P (incoming
)
9535 && REG_P (XEXP (incoming
, 0))
9536 && HARD_REGISTER_P (XEXP (incoming
, 0)))
9538 rtx reg
= XEXP (incoming
, 0);
9539 if (OUTGOING_REGNO (REGNO (reg
)) != REGNO (reg
))
9543 reg
= gen_raw_REG (GET_MODE (reg
), OUTGOING_REGNO (REGNO (reg
)));
9545 vec_safe_push (windowed_parm_regs
, p
);
9546 incoming
= replace_equiv_address_nv (incoming
, reg
);
9552 if (!vt_get_decl_and_offset (incoming
, &decl
, &offset
))
9554 if (MEM_P (incoming
))
9556 /* This means argument is passed by invisible reference. */
9562 if (!vt_get_decl_and_offset (decl_rtl
, &decl
, &offset
))
9564 offset
+= byte_lowpart_offset (GET_MODE (incoming
),
9565 GET_MODE (decl_rtl
));
9574 /* If that DECL_RTL wasn't a pseudo that got spilled to
9575 memory, bail out. Otherwise, the spill slot sharing code
9576 will force the memory to reference spill_slot_decl (%sfp),
9577 so we don't match above. That's ok, the pseudo must have
9578 referenced the entire parameter, so just reset OFFSET. */
9579 if (decl
!= get_spill_slot_decl (false))
9584 if (!track_loc_p (incoming
, parm
, offset
, false, &mode
, &offset
))
9587 out
= &VTI (ENTRY_BLOCK_PTR
)->out
;
9589 dv
= dv_from_decl (parm
);
9591 if (target_for_debug_bind (parm
)
9592 /* We can't deal with these right now, because this kind of
9593 variable is single-part. ??? We could handle parallels
9594 that describe multiple locations for the same single
9595 value, but ATM we don't. */
9596 && GET_CODE (incoming
) != PARALLEL
)
9601 /* ??? We shouldn't ever hit this, but it may happen because
9602 arguments passed by invisible reference aren't dealt with
9603 above: incoming-rtl will have Pmode rather than the
9604 expected mode for the type. */
9608 lowpart
= var_lowpart (mode
, incoming
);
9612 val
= cselib_lookup_from_insn (lowpart
, mode
, true,
9613 VOIDmode
, get_insns ());
9615 /* ??? Float-typed values in memory are not handled by
9619 preserve_value (val
);
9620 set_variable_part (out
, val
->val_rtx
, dv
, offset
,
9621 VAR_INIT_STATUS_INITIALIZED
, NULL
, INSERT
);
9622 dv
= dv_from_value (val
->val_rtx
);
9625 if (MEM_P (incoming
))
9627 val
= cselib_lookup_from_insn (XEXP (incoming
, 0), mode
, true,
9628 VOIDmode
, get_insns ());
9631 preserve_value (val
);
9632 incoming
= replace_equiv_address_nv (incoming
, val
->val_rtx
);
9637 if (REG_P (incoming
))
9639 incoming
= var_lowpart (mode
, incoming
);
9640 gcc_assert (REGNO (incoming
) < FIRST_PSEUDO_REGISTER
);
9641 attrs_list_insert (&out
->regs
[REGNO (incoming
)], dv
, offset
,
9643 set_variable_part (out
, incoming
, dv
, offset
,
9644 VAR_INIT_STATUS_INITIALIZED
, NULL
, INSERT
);
9645 if (dv_is_value_p (dv
))
9647 record_entry_value (CSELIB_VAL_PTR (dv_as_value (dv
)), incoming
);
9648 if (TREE_CODE (TREE_TYPE (parm
)) == REFERENCE_TYPE
9649 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (parm
))))
9651 enum machine_mode indmode
9652 = TYPE_MODE (TREE_TYPE (TREE_TYPE (parm
)));
9653 rtx mem
= gen_rtx_MEM (indmode
, incoming
);
9654 cselib_val
*val
= cselib_lookup_from_insn (mem
, indmode
, true,
9659 preserve_value (val
);
9660 record_entry_value (val
, mem
);
9661 set_variable_part (out
, mem
, dv_from_value (val
->val_rtx
), 0,
9662 VAR_INIT_STATUS_INITIALIZED
, NULL
, INSERT
);
9667 else if (MEM_P (incoming
))
9669 incoming
= var_lowpart (mode
, incoming
);
9670 set_variable_part (out
, incoming
, dv
, offset
,
9671 VAR_INIT_STATUS_INITIALIZED
, NULL
, INSERT
);
9675 /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
9678 vt_add_function_parameters (void)
9682 for (parm
= DECL_ARGUMENTS (current_function_decl
);
9683 parm
; parm
= DECL_CHAIN (parm
))
9684 vt_add_function_parameter (parm
);
9686 if (DECL_HAS_VALUE_EXPR_P (DECL_RESULT (current_function_decl
)))
9688 tree vexpr
= DECL_VALUE_EXPR (DECL_RESULT (current_function_decl
));
9690 if (TREE_CODE (vexpr
) == INDIRECT_REF
)
9691 vexpr
= TREE_OPERAND (vexpr
, 0);
9693 if (TREE_CODE (vexpr
) == PARM_DECL
9694 && DECL_ARTIFICIAL (vexpr
)
9695 && !DECL_IGNORED_P (vexpr
)
9696 && DECL_NAMELESS (vexpr
))
9697 vt_add_function_parameter (vexpr
);
9701 /* Initialize cfa_base_rtx, create a preserved VALUE for it and
9702 ensure it isn't flushed during cselib_reset_table.
9703 Can be called only if frame_pointer_rtx resp. arg_pointer_rtx
9704 has been eliminated. */
9707 vt_init_cfa_base (void)
9711 #ifdef FRAME_POINTER_CFA_OFFSET
9712 cfa_base_rtx
= frame_pointer_rtx
;
9713 cfa_base_offset
= -FRAME_POINTER_CFA_OFFSET (current_function_decl
);
9715 cfa_base_rtx
= arg_pointer_rtx
;
9716 cfa_base_offset
= -ARG_POINTER_CFA_OFFSET (current_function_decl
);
9718 if (cfa_base_rtx
== hard_frame_pointer_rtx
9719 || !fixed_regs
[REGNO (cfa_base_rtx
)])
9721 cfa_base_rtx
= NULL_RTX
;
9724 if (!MAY_HAVE_DEBUG_INSNS
)
9727 /* Tell alias analysis that cfa_base_rtx should share
9728 find_base_term value with stack pointer or hard frame pointer. */
9729 if (!frame_pointer_needed
)
9730 vt_equate_reg_base_value (cfa_base_rtx
, stack_pointer_rtx
);
9731 else if (!crtl
->stack_realign_tried
)
9732 vt_equate_reg_base_value (cfa_base_rtx
, hard_frame_pointer_rtx
);
9734 val
= cselib_lookup_from_insn (cfa_base_rtx
, GET_MODE (cfa_base_rtx
), 1,
9735 VOIDmode
, get_insns ());
9736 preserve_value (val
);
9737 cselib_preserve_cfa_base_value (val
, REGNO (cfa_base_rtx
));
9740 /* Allocate and initialize the data structures for variable tracking
9741 and parse the RTL to get the micro operations. */
9744 vt_initialize (void)
9747 HOST_WIDE_INT fp_cfa_offset
= -1;
9749 alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def
));
9751 attrs_pool
= create_alloc_pool ("attrs_def pool",
9752 sizeof (struct attrs_def
), 1024);
9753 var_pool
= create_alloc_pool ("variable_def pool",
9754 sizeof (struct variable_def
)
9755 + (MAX_VAR_PARTS
- 1)
9756 * sizeof (((variable
)NULL
)->var_part
[0]), 64);
9757 loc_chain_pool
= create_alloc_pool ("location_chain_def pool",
9758 sizeof (struct location_chain_def
),
9760 shared_hash_pool
= create_alloc_pool ("shared_hash_def pool",
9761 sizeof (struct shared_hash_def
), 256);
9762 empty_shared_hash
= (shared_hash
) pool_alloc (shared_hash_pool
);
9763 empty_shared_hash
->refcount
= 1;
9764 empty_shared_hash
->htab
.create (1);
9765 changed_variables
.create (10);
9767 /* Init the IN and OUT sets. */
9770 VTI (bb
)->visited
= false;
9771 VTI (bb
)->flooded
= false;
9772 dataflow_set_init (&VTI (bb
)->in
);
9773 dataflow_set_init (&VTI (bb
)->out
);
9774 VTI (bb
)->permp
= NULL
;
9777 if (MAY_HAVE_DEBUG_INSNS
)
9779 cselib_init (CSELIB_RECORD_MEMORY
| CSELIB_PRESERVE_CONSTANTS
);
9780 scratch_regs
= BITMAP_ALLOC (NULL
);
9781 valvar_pool
= create_alloc_pool ("small variable_def pool",
9782 sizeof (struct variable_def
), 256);
9783 preserved_values
.create (256);
9784 global_get_addr_cache
= pointer_map_create ();
9788 scratch_regs
= NULL
;
9790 global_get_addr_cache
= NULL
;
9793 if (MAY_HAVE_DEBUG_INSNS
)
9799 #ifdef FRAME_POINTER_CFA_OFFSET
9800 reg
= frame_pointer_rtx
;
9801 ofst
= FRAME_POINTER_CFA_OFFSET (current_function_decl
);
9803 reg
= arg_pointer_rtx
;
9804 ofst
= ARG_POINTER_CFA_OFFSET (current_function_decl
);
9807 ofst
-= INCOMING_FRAME_SP_OFFSET
;
9809 val
= cselib_lookup_from_insn (reg
, GET_MODE (reg
), 1,
9810 VOIDmode
, get_insns ());
9811 preserve_value (val
);
9812 cselib_preserve_cfa_base_value (val
, REGNO (reg
));
9813 expr
= plus_constant (GET_MODE (stack_pointer_rtx
),
9814 stack_pointer_rtx
, -ofst
);
9815 cselib_add_permanent_equiv (val
, expr
, get_insns ());
9819 val
= cselib_lookup_from_insn (stack_pointer_rtx
,
9820 GET_MODE (stack_pointer_rtx
), 1,
9821 VOIDmode
, get_insns ());
9822 preserve_value (val
);
9823 expr
= plus_constant (GET_MODE (reg
), reg
, ofst
);
9824 cselib_add_permanent_equiv (val
, expr
, get_insns ());
9828 /* In order to factor out the adjustments made to the stack pointer or to
9829 the hard frame pointer and thus be able to use DW_OP_fbreg operations
9830 instead of individual location lists, we're going to rewrite MEMs based
9831 on them into MEMs based on the CFA by de-eliminating stack_pointer_rtx
9832 or hard_frame_pointer_rtx to the virtual CFA pointer frame_pointer_rtx
9833 resp. arg_pointer_rtx. We can do this either when there is no frame
9834 pointer in the function and stack adjustments are consistent for all
9835 basic blocks or when there is a frame pointer and no stack realignment.
9836 But we first have to check that frame_pointer_rtx resp. arg_pointer_rtx
9837 has been eliminated. */
9838 if (!frame_pointer_needed
)
9842 if (!vt_stack_adjustments ())
9845 #ifdef FRAME_POINTER_CFA_OFFSET
9846 reg
= frame_pointer_rtx
;
9848 reg
= arg_pointer_rtx
;
9850 elim
= eliminate_regs (reg
, VOIDmode
, NULL_RTX
);
9853 if (GET_CODE (elim
) == PLUS
)
9854 elim
= XEXP (elim
, 0);
9855 if (elim
== stack_pointer_rtx
)
9856 vt_init_cfa_base ();
9859 else if (!crtl
->stack_realign_tried
)
9863 #ifdef FRAME_POINTER_CFA_OFFSET
9864 reg
= frame_pointer_rtx
;
9865 fp_cfa_offset
= FRAME_POINTER_CFA_OFFSET (current_function_decl
);
9867 reg
= arg_pointer_rtx
;
9868 fp_cfa_offset
= ARG_POINTER_CFA_OFFSET (current_function_decl
);
9870 elim
= eliminate_regs (reg
, VOIDmode
, NULL_RTX
);
9873 if (GET_CODE (elim
) == PLUS
)
9875 fp_cfa_offset
-= INTVAL (XEXP (elim
, 1));
9876 elim
= XEXP (elim
, 0);
9878 if (elim
!= hard_frame_pointer_rtx
)
9885 /* If the stack is realigned and a DRAP register is used, we're going to
9886 rewrite MEMs based on it representing incoming locations of parameters
9887 passed on the stack into MEMs based on the argument pointer. Although
9888 we aren't going to rewrite other MEMs, we still need to initialize the
9889 virtual CFA pointer in order to ensure that the argument pointer will
9890 be seen as a constant throughout the function.
9892 ??? This doesn't work if FRAME_POINTER_CFA_OFFSET is defined. */
9893 else if (stack_realign_drap
)
9897 #ifdef FRAME_POINTER_CFA_OFFSET
9898 reg
= frame_pointer_rtx
;
9900 reg
= arg_pointer_rtx
;
9902 elim
= eliminate_regs (reg
, VOIDmode
, NULL_RTX
);
9905 if (GET_CODE (elim
) == PLUS
)
9906 elim
= XEXP (elim
, 0);
9907 if (elim
== hard_frame_pointer_rtx
)
9908 vt_init_cfa_base ();
9912 hard_frame_pointer_adjustment
= -1;
9914 vt_add_function_parameters ();
9919 HOST_WIDE_INT pre
, post
= 0;
9920 basic_block first_bb
, last_bb
;
9922 if (MAY_HAVE_DEBUG_INSNS
)
9924 cselib_record_sets_hook
= add_with_sets
;
9925 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
9926 fprintf (dump_file
, "first value: %i\n",
9927 cselib_get_next_uid ());
9934 if (bb
->next_bb
== EXIT_BLOCK_PTR
9935 || ! single_pred_p (bb
->next_bb
))
9937 e
= find_edge (bb
, bb
->next_bb
);
9938 if (! e
|| (e
->flags
& EDGE_FALLTHRU
) == 0)
9944 /* Add the micro-operations to the vector. */
9945 FOR_BB_BETWEEN (bb
, first_bb
, last_bb
->next_bb
, next_bb
)
9947 HOST_WIDE_INT offset
= VTI (bb
)->out
.stack_adjust
;
9948 VTI (bb
)->out
.stack_adjust
= VTI (bb
)->in
.stack_adjust
;
9949 for (insn
= BB_HEAD (bb
); insn
!= NEXT_INSN (BB_END (bb
));
9950 insn
= NEXT_INSN (insn
))
9954 if (!frame_pointer_needed
)
9956 insn_stack_adjust_offset_pre_post (insn
, &pre
, &post
);
9960 mo
.type
= MO_ADJUST
;
9963 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
9964 log_op_type (PATTERN (insn
), bb
, insn
,
9965 MO_ADJUST
, dump_file
);
9966 VTI (bb
)->mos
.safe_push (mo
);
9967 VTI (bb
)->out
.stack_adjust
+= pre
;
9971 cselib_hook_called
= false;
9972 adjust_insn (bb
, insn
);
9973 if (MAY_HAVE_DEBUG_INSNS
)
9976 prepare_call_arguments (bb
, insn
);
9977 cselib_process_insn (insn
);
9978 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
9980 print_rtl_single (dump_file
, insn
);
9981 dump_cselib_table (dump_file
);
9984 if (!cselib_hook_called
)
9985 add_with_sets (insn
, 0, 0);
9988 if (!frame_pointer_needed
&& post
)
9991 mo
.type
= MO_ADJUST
;
9994 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
9995 log_op_type (PATTERN (insn
), bb
, insn
,
9996 MO_ADJUST
, dump_file
);
9997 VTI (bb
)->mos
.safe_push (mo
);
9998 VTI (bb
)->out
.stack_adjust
+= post
;
10001 if (fp_cfa_offset
!= -1
10002 && hard_frame_pointer_adjustment
== -1
10003 && fp_setter_insn (insn
))
10005 vt_init_cfa_base ();
10006 hard_frame_pointer_adjustment
= fp_cfa_offset
;
10007 /* Disassociate sp from fp now. */
10008 if (MAY_HAVE_DEBUG_INSNS
)
10011 cselib_invalidate_rtx (stack_pointer_rtx
);
10012 v
= cselib_lookup (stack_pointer_rtx
, Pmode
, 1,
10014 if (v
&& !cselib_preserved_value_p (v
))
10016 cselib_set_value_sp_based (v
);
10017 preserve_value (v
);
10023 gcc_assert (offset
== VTI (bb
)->out
.stack_adjust
);
10028 if (MAY_HAVE_DEBUG_INSNS
)
10030 cselib_preserve_only_values ();
10031 cselib_reset_table (cselib_get_next_uid ());
10032 cselib_record_sets_hook
= NULL
;
10036 hard_frame_pointer_adjustment
= -1;
10037 VTI (ENTRY_BLOCK_PTR
)->flooded
= true;
10038 cfa_base_rtx
= NULL_RTX
;
10042 /* This is *not* reset after each function. It gives each
10043 NOTE_INSN_DELETED_DEBUG_LABEL in the entire compilation
10044 a unique label number. */
10046 static int debug_label_num
= 1;
10048 /* Get rid of all debug insns from the insn stream. */
10051 delete_debug_insns (void)
10056 if (!MAY_HAVE_DEBUG_INSNS
)
10061 FOR_BB_INSNS_SAFE (bb
, insn
, next
)
10062 if (DEBUG_INSN_P (insn
))
10064 tree decl
= INSN_VAR_LOCATION_DECL (insn
);
10065 if (TREE_CODE (decl
) == LABEL_DECL
10066 && DECL_NAME (decl
)
10067 && !DECL_RTL_SET_P (decl
))
10069 PUT_CODE (insn
, NOTE
);
10070 NOTE_KIND (insn
) = NOTE_INSN_DELETED_DEBUG_LABEL
;
10071 NOTE_DELETED_LABEL_NAME (insn
)
10072 = IDENTIFIER_POINTER (DECL_NAME (decl
));
10073 SET_DECL_RTL (decl
, insn
);
10074 CODE_LABEL_NUMBER (insn
) = debug_label_num
++;
10077 delete_insn (insn
);
10082 /* Run a fast, BB-local only version of var tracking, to take care of
10083 information that we don't do global analysis on, such that not all
10084 information is lost. If SKIPPED holds, we're skipping the global
10085 pass entirely, so we should try to use information it would have
10086 handled as well.. */
10089 vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED
)
10091 /* ??? Just skip it all for now. */
10092 delete_debug_insns ();
10095 /* Free the data structures needed for variable tracking. */
10104 VTI (bb
)->mos
.release ();
10109 dataflow_set_destroy (&VTI (bb
)->in
);
10110 dataflow_set_destroy (&VTI (bb
)->out
);
10111 if (VTI (bb
)->permp
)
10113 dataflow_set_destroy (VTI (bb
)->permp
);
10114 XDELETE (VTI (bb
)->permp
);
10117 free_aux_for_blocks ();
10118 empty_shared_hash
->htab
.dispose ();
10119 changed_variables
.dispose ();
10120 free_alloc_pool (attrs_pool
);
10121 free_alloc_pool (var_pool
);
10122 free_alloc_pool (loc_chain_pool
);
10123 free_alloc_pool (shared_hash_pool
);
10125 if (MAY_HAVE_DEBUG_INSNS
)
10127 if (global_get_addr_cache
)
10128 pointer_map_destroy (global_get_addr_cache
);
10129 global_get_addr_cache
= NULL
;
10130 if (loc_exp_dep_pool
)
10131 free_alloc_pool (loc_exp_dep_pool
);
10132 loc_exp_dep_pool
= NULL
;
10133 free_alloc_pool (valvar_pool
);
10134 preserved_values
.release ();
10136 BITMAP_FREE (scratch_regs
);
10137 scratch_regs
= NULL
;
10140 #ifdef HAVE_window_save
10141 vec_free (windowed_parm_regs
);
10145 XDELETEVEC (vui_vec
);
10150 /* The entry point to variable tracking pass. */
10152 static inline unsigned int
10153 variable_tracking_main_1 (void)
10157 if (flag_var_tracking_assignments
< 0)
10159 delete_debug_insns ();
10163 if (n_basic_blocks_for_fn (cfun
) > 500 &&
10164 n_edges
/ n_basic_blocks_for_fn (cfun
) >= 20)
10166 vt_debug_insns_local (true);
10170 mark_dfs_back_edges ();
10171 if (!vt_initialize ())
10174 vt_debug_insns_local (true);
10178 success
= vt_find_locations ();
10180 if (!success
&& flag_var_tracking_assignments
> 0)
10184 delete_debug_insns ();
10186 /* This is later restored by our caller. */
10187 flag_var_tracking_assignments
= 0;
10189 success
= vt_initialize ();
10190 gcc_assert (success
);
10192 success
= vt_find_locations ();
10198 vt_debug_insns_local (false);
10202 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
10204 dump_dataflow_sets ();
10205 dump_reg_info (dump_file
);
10206 dump_flow_info (dump_file
, dump_flags
);
10209 timevar_push (TV_VAR_TRACKING_EMIT
);
10211 timevar_pop (TV_VAR_TRACKING_EMIT
);
10214 vt_debug_insns_local (false);
10219 variable_tracking_main (void)
10222 int save
= flag_var_tracking_assignments
;
10224 ret
= variable_tracking_main_1 ();
10226 flag_var_tracking_assignments
= save
;
10232 gate_handle_var_tracking (void)
10234 return (flag_var_tracking
&& !targetm
.delay_vartrack
);
10241 const pass_data pass_data_variable_tracking
=
10243 RTL_PASS
, /* type */
10244 "vartrack", /* name */
10245 OPTGROUP_NONE
, /* optinfo_flags */
10246 true, /* has_gate */
10247 true, /* has_execute */
10248 TV_VAR_TRACKING
, /* tv_id */
10249 0, /* properties_required */
10250 0, /* properties_provided */
10251 0, /* properties_destroyed */
10252 0, /* todo_flags_start */
10253 ( TODO_verify_rtl_sharing
| TODO_verify_flow
), /* todo_flags_finish */
10256 class pass_variable_tracking
: public rtl_opt_pass
10259 pass_variable_tracking (gcc::context
*ctxt
)
10260 : rtl_opt_pass (pass_data_variable_tracking
, ctxt
)
10263 /* opt_pass methods: */
10264 bool gate () { return gate_handle_var_tracking (); }
10265 unsigned int execute () { return variable_tracking_main (); }
10267 }; // class pass_variable_tracking
10269 } // anon namespace
10272 make_pass_variable_tracking (gcc::context
*ctxt
)
10274 return new pass_variable_tracking (ctxt
);