1 /* Variable tracking routines for the GNU compiler.
2 Copyright (C) 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This file contains the variable tracking pass. It computes where
22 variables are located (which registers or where in memory) at each position
23 in instruction stream and emits notes describing the locations.
24 Debug information (DWARF2 location lists) is finally generated from
26 With this debug information, it is possible to show variables
27 even when debugging optimized code.
29 How does the variable tracking pass work?
31 First, it scans RTL code for uses, stores and clobbers (register/memory
32 references in instructions), for call insns and for stack adjustments
33 separately for each basic block and saves them to an array of micro
35 The micro operations of one instruction are ordered so that
36 pre-modifying stack adjustment < use < use with no var < call insn <
37 < set < clobber < post-modifying stack adjustment
39 Then, a forward dataflow analysis is performed to find out how locations
40 of variables change through code and to propagate the variable locations
41 along control flow graph.
42 The IN set for basic block BB is computed as a union of OUT sets of BB's
43 predecessors, the OUT set for BB is copied from the IN set for BB and
44 is changed according to micro operations in BB.
46 The IN and OUT sets for basic blocks consist of a current stack adjustment
47 (used for adjusting offset of variables addressed using stack pointer),
48 the table of structures describing the locations of parts of a variable
49 and for each physical register a linked list for each physical register.
50 The linked list is a list of variable parts stored in the register,
51 i.e. it is a list of triplets (reg, decl, offset) where decl is
52 REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
53 effective deleting appropriate variable parts when we set or clobber the
56 There may be more than one variable part in a register. The linked lists
57 should be pretty short so it is a good data structure here.
58 For example in the following code, register allocator may assign same
59 register to variables A and B, and both of them are stored in the same
72 Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
73 are emitted to appropriate positions in RTL code. Each such a note describes
74 the location of one variable at the point in instruction stream where the
75 note is. There is no need to emit a note for each variable before each
76 instruction, we only emit these notes where the location of variable changes
77 (this means that we also emit notes for changes between the OUT set of the
78 previous block and the IN set of the current block).
80 The notes consist of two parts:
81 1. the declaration (from REG_EXPR or MEM_EXPR)
82 2. the location of a variable - it is either a simple register/memory
83 reference (for simple variables, for example int),
84 or a parallel of register/memory references (for a large variables
85 which consist of several parts, for example long long).
91 #include "coretypes.h"
95 #include "hard-reg-set.h"
96 #include "basic-block.h"
99 #include "insn-config.h"
102 #include "alloc-pool.h"
108 #include "tree-pass.h"
109 #include "tree-flow.h"
114 #include "diagnostic.h"
115 #include "pointer-set.h"
118 /* var-tracking.c assumes that tree code with the same value as VALUE rtx code
119 has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
120 Currently the value is the same as IDENTIFIER_NODE, which has such
121 a property. If this compile time assertion ever fails, make sure that
122 the new tree code that equals (int) VALUE has the same property. */
123 extern char check_value_val
[(int) VALUE
== (int) IDENTIFIER_NODE
? 1 : -1];
125 /* Type of micro operation. */
126 enum micro_operation_type
128 MO_USE
, /* Use location (REG or MEM). */
129 MO_USE_NO_VAR
,/* Use location which is not associated with a variable
130 or the variable is not trackable. */
131 MO_VAL_USE
, /* Use location which is associated with a value. */
132 MO_VAL_LOC
, /* Use location which appears in a debug insn. */
133 MO_VAL_SET
, /* Set location associated with a value. */
134 MO_SET
, /* Set location. */
135 MO_COPY
, /* Copy the same portion of a variable from one
136 location to another. */
137 MO_CLOBBER
, /* Clobber location. */
138 MO_CALL
, /* Call insn. */
139 MO_ADJUST
/* Adjust stack pointer. */
143 static const char * const ATTRIBUTE_UNUSED
144 micro_operation_type_name
[] = {
157 /* Where shall the note be emitted? BEFORE or AFTER the instruction.
158 Notes emitted as AFTER_CALL are to take effect during the call,
159 rather than after the call. */
162 EMIT_NOTE_BEFORE_INSN
,
163 EMIT_NOTE_AFTER_INSN
,
164 EMIT_NOTE_AFTER_CALL_INSN
167 /* Structure holding information about micro operation. */
168 typedef struct micro_operation_def
170 /* Type of micro operation. */
171 enum micro_operation_type type
;
173 /* The instruction which the micro operation is in, for MO_USE,
174 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
175 instruction or note in the original flow (before any var-tracking
176 notes are inserted, to simplify emission of notes), for MO_SET
181 /* Location. For MO_SET and MO_COPY, this is the SET that
182 performs the assignment, if known, otherwise it is the target
183 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
184 CONCAT of the VALUE and the LOC associated with it. For
185 MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
186 associated with it. */
189 /* Stack adjustment. */
190 HOST_WIDE_INT adjust
;
194 DEF_VEC_O(micro_operation
);
195 DEF_VEC_ALLOC_O(micro_operation
,heap
);
197 /* A declaration of a variable, or an RTL value being handled like a
199 typedef void *decl_or_value
;
201 /* Structure for passing some other parameters to function
202 emit_note_insn_var_location. */
203 typedef struct emit_note_data_def
205 /* The instruction which the note will be emitted before/after. */
208 /* Where the note will be emitted (before/after insn)? */
209 enum emit_note_where where
;
211 /* The variables and values active at this point. */
215 /* Description of location of a part of a variable. The content of a physical
216 register is described by a chain of these structures.
217 The chains are pretty short (usually 1 or 2 elements) and thus
218 chain is the best data structure. */
219 typedef struct attrs_def
221 /* Pointer to next member of the list. */
222 struct attrs_def
*next
;
224 /* The rtx of register. */
227 /* The declaration corresponding to LOC. */
230 /* Offset from start of DECL. */
231 HOST_WIDE_INT offset
;
234 /* Structure holding a refcounted hash table. If refcount > 1,
235 it must be first unshared before modified. */
236 typedef struct shared_hash_def
238 /* Reference count. */
241 /* Actual hash table. */
245 /* Structure holding the IN or OUT set for a basic block. */
246 typedef struct dataflow_set_def
248 /* Adjustment of stack offset. */
249 HOST_WIDE_INT stack_adjust
;
251 /* Attributes for registers (lists of attrs). */
252 attrs regs
[FIRST_PSEUDO_REGISTER
];
254 /* Variable locations. */
257 /* Vars that is being traversed. */
258 shared_hash traversed_vars
;
261 /* The structure (one for each basic block) containing the information
262 needed for variable tracking. */
263 typedef struct variable_tracking_info_def
265 /* The vector of micro operations. */
266 VEC(micro_operation
, heap
) *mos
;
268 /* The IN and OUT set for dataflow analysis. */
272 /* The permanent-in dataflow set for this block. This is used to
273 hold values for which we had to compute entry values. ??? This
274 should probably be dynamically allocated, to avoid using more
275 memory in non-debug builds. */
278 /* Has the block been visited in DFS? */
281 /* Has the block been flooded in VTA? */
284 } *variable_tracking_info
;
286 /* Structure for chaining the locations. */
287 typedef struct location_chain_def
289 /* Next element in the chain. */
290 struct location_chain_def
*next
;
292 /* The location (REG, MEM or VALUE). */
295 /* The "value" stored in this location. */
299 enum var_init_status init
;
302 /* Structure describing one part of variable. */
303 typedef struct variable_part_def
305 /* Chain of locations of the part. */
306 location_chain loc_chain
;
308 /* Location which was last emitted to location list. */
311 /* The offset in the variable. */
312 HOST_WIDE_INT offset
;
315 /* Maximum number of location parts. */
316 #define MAX_VAR_PARTS 16
318 /* Structure describing where the variable is located. */
319 typedef struct variable_def
321 /* The declaration of the variable, or an RTL value being handled
322 like a declaration. */
325 /* Reference count. */
328 /* Number of variable parts. */
331 /* True if this variable changed (any of its) cur_loc fields
332 during the current emit_notes_for_changes resp.
333 emit_notes_for_differences call. */
334 bool cur_loc_changed
;
336 /* True if this variable_def struct is currently in the
337 changed_variables hash table. */
338 bool in_changed_variables
;
340 /* The variable parts. */
341 variable_part var_part
[1];
343 typedef const struct variable_def
*const_variable
;
345 /* Structure for chaining backlinks from referenced VALUEs to
346 DVs that are referencing them. */
347 typedef struct value_chain_def
349 /* Next value_chain entry. */
350 struct value_chain_def
*next
;
352 /* The declaration of the variable, or an RTL value
353 being handled like a declaration, whose var_parts[0].loc_chain
354 references the VALUE owning this value_chain. */
357 /* Reference count. */
360 typedef const struct value_chain_def
*const_value_chain
;
362 /* Pointer to the BB's information specific to variable tracking pass. */
363 #define VTI(BB) ((variable_tracking_info) (BB)->aux)
365 /* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */
366 #define INT_MEM_OFFSET(mem) (MEM_OFFSET (mem) ? INTVAL (MEM_OFFSET (mem)) : 0)
368 /* Alloc pool for struct attrs_def. */
369 static alloc_pool attrs_pool
;
371 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
372 static alloc_pool var_pool
;
374 /* Alloc pool for struct variable_def with a single var_part entry. */
375 static alloc_pool valvar_pool
;
377 /* Alloc pool for struct location_chain_def. */
378 static alloc_pool loc_chain_pool
;
380 /* Alloc pool for struct shared_hash_def. */
381 static alloc_pool shared_hash_pool
;
383 /* Alloc pool for struct value_chain_def. */
384 static alloc_pool value_chain_pool
;
386 /* Changed variables, notes will be emitted for them. */
387 static htab_t changed_variables
;
389 /* Links from VALUEs to DVs referencing them in their current loc_chains. */
390 static htab_t value_chains
;
392 /* Shall notes be emitted? */
393 static bool emit_notes
;
395 /* Empty shared hashtable. */
396 static shared_hash empty_shared_hash
;
398 /* Scratch register bitmap used by cselib_expand_value_rtx. */
399 static bitmap scratch_regs
= NULL
;
401 /* Variable used to tell whether cselib_process_insn called our hook. */
402 static bool cselib_hook_called
;
404 /* Local function prototypes. */
405 static void stack_adjust_offset_pre_post (rtx
, HOST_WIDE_INT
*,
407 static void insn_stack_adjust_offset_pre_post (rtx
, HOST_WIDE_INT
*,
409 static bool vt_stack_adjustments (void);
410 static rtx
compute_cfa_pointer (HOST_WIDE_INT
);
411 static hashval_t
variable_htab_hash (const void *);
412 static int variable_htab_eq (const void *, const void *);
413 static void variable_htab_free (void *);
415 static void init_attrs_list_set (attrs
*);
416 static void attrs_list_clear (attrs
*);
417 static attrs
attrs_list_member (attrs
, decl_or_value
, HOST_WIDE_INT
);
418 static void attrs_list_insert (attrs
*, decl_or_value
, HOST_WIDE_INT
, rtx
);
419 static void attrs_list_copy (attrs
*, attrs
);
420 static void attrs_list_union (attrs
*, attrs
);
422 static void **unshare_variable (dataflow_set
*set
, void **slot
, variable var
,
423 enum var_init_status
);
424 static int vars_copy_1 (void **, void *);
425 static void vars_copy (htab_t
, htab_t
);
426 static tree
var_debug_decl (tree
);
427 static void var_reg_set (dataflow_set
*, rtx
, enum var_init_status
, rtx
);
428 static void var_reg_delete_and_set (dataflow_set
*, rtx
, bool,
429 enum var_init_status
, rtx
);
430 static void var_reg_delete (dataflow_set
*, rtx
, bool);
431 static void var_regno_delete (dataflow_set
*, int);
432 static void var_mem_set (dataflow_set
*, rtx
, enum var_init_status
, rtx
);
433 static void var_mem_delete_and_set (dataflow_set
*, rtx
, bool,
434 enum var_init_status
, rtx
);
435 static void var_mem_delete (dataflow_set
*, rtx
, bool);
437 static void dataflow_set_init (dataflow_set
*);
438 static void dataflow_set_clear (dataflow_set
*);
439 static void dataflow_set_copy (dataflow_set
*, dataflow_set
*);
440 static int variable_union_info_cmp_pos (const void *, const void *);
441 static int variable_union (void **, void *);
442 static void dataflow_set_union (dataflow_set
*, dataflow_set
*);
443 static location_chain
find_loc_in_1pdv (rtx
, variable
, htab_t
);
444 static bool canon_value_cmp (rtx
, rtx
);
445 static int loc_cmp (rtx
, rtx
);
446 static bool variable_part_different_p (variable_part
*, variable_part
*);
447 static bool onepart_variable_different_p (variable
, variable
);
448 static bool variable_different_p (variable
, variable
);
449 static int dataflow_set_different_1 (void **, void *);
450 static bool dataflow_set_different (dataflow_set
*, dataflow_set
*);
451 static void dataflow_set_destroy (dataflow_set
*);
453 static bool contains_symbol_ref (rtx
);
454 static bool track_expr_p (tree
, bool);
455 static bool same_variable_part_p (rtx
, tree
, HOST_WIDE_INT
);
456 static int add_uses (rtx
*, void *);
457 static void add_uses_1 (rtx
*, void *);
458 static void add_stores (rtx
, const_rtx
, void *);
459 static bool compute_bb_dataflow (basic_block
);
460 static bool vt_find_locations (void);
462 static void dump_attrs_list (attrs
);
463 static int dump_var_slot (void **, void *);
464 static void dump_var (variable
);
465 static void dump_vars (htab_t
);
466 static void dump_dataflow_set (dataflow_set
*);
467 static void dump_dataflow_sets (void);
469 static void variable_was_changed (variable
, dataflow_set
*);
470 static void **set_slot_part (dataflow_set
*, rtx
, void **,
471 decl_or_value
, HOST_WIDE_INT
,
472 enum var_init_status
, rtx
);
473 static void set_variable_part (dataflow_set
*, rtx
,
474 decl_or_value
, HOST_WIDE_INT
,
475 enum var_init_status
, rtx
, enum insert_option
);
476 static void **clobber_slot_part (dataflow_set
*, rtx
,
477 void **, HOST_WIDE_INT
, rtx
);
478 static void clobber_variable_part (dataflow_set
*, rtx
,
479 decl_or_value
, HOST_WIDE_INT
, rtx
);
480 static void **delete_slot_part (dataflow_set
*, rtx
, void **, HOST_WIDE_INT
);
481 static void delete_variable_part (dataflow_set
*, rtx
,
482 decl_or_value
, HOST_WIDE_INT
);
483 static int emit_note_insn_var_location (void **, void *);
484 static void emit_notes_for_changes (rtx
, enum emit_note_where
, shared_hash
);
485 static int emit_notes_for_differences_1 (void **, void *);
486 static int emit_notes_for_differences_2 (void **, void *);
487 static void emit_notes_for_differences (rtx
, dataflow_set
*, dataflow_set
*);
488 static void emit_notes_in_bb (basic_block
, dataflow_set
*);
489 static void vt_emit_notes (void);
491 static bool vt_get_decl_and_offset (rtx
, tree
*, HOST_WIDE_INT
*);
492 static void vt_add_function_parameters (void);
493 static bool vt_initialize (void);
494 static void vt_finalize (void);
496 /* Given a SET, calculate the amount of stack adjustment it contains
497 PRE- and POST-modifying stack pointer.
498 This function is similar to stack_adjust_offset. */
501 stack_adjust_offset_pre_post (rtx pattern
, HOST_WIDE_INT
*pre
,
504 rtx src
= SET_SRC (pattern
);
505 rtx dest
= SET_DEST (pattern
);
508 if (dest
== stack_pointer_rtx
)
510 /* (set (reg sp) (plus (reg sp) (const_int))) */
511 code
= GET_CODE (src
);
512 if (! (code
== PLUS
|| code
== MINUS
)
513 || XEXP (src
, 0) != stack_pointer_rtx
514 || !CONST_INT_P (XEXP (src
, 1)))
518 *post
+= INTVAL (XEXP (src
, 1));
520 *post
-= INTVAL (XEXP (src
, 1));
522 else if (MEM_P (dest
))
524 /* (set (mem (pre_dec (reg sp))) (foo)) */
525 src
= XEXP (dest
, 0);
526 code
= GET_CODE (src
);
532 if (XEXP (src
, 0) == stack_pointer_rtx
)
534 rtx val
= XEXP (XEXP (src
, 1), 1);
535 /* We handle only adjustments by constant amount. */
536 gcc_assert (GET_CODE (XEXP (src
, 1)) == PLUS
&&
539 if (code
== PRE_MODIFY
)
540 *pre
-= INTVAL (val
);
542 *post
-= INTVAL (val
);
548 if (XEXP (src
, 0) == stack_pointer_rtx
)
550 *pre
+= GET_MODE_SIZE (GET_MODE (dest
));
556 if (XEXP (src
, 0) == stack_pointer_rtx
)
558 *post
+= GET_MODE_SIZE (GET_MODE (dest
));
564 if (XEXP (src
, 0) == stack_pointer_rtx
)
566 *pre
-= GET_MODE_SIZE (GET_MODE (dest
));
572 if (XEXP (src
, 0) == stack_pointer_rtx
)
574 *post
-= GET_MODE_SIZE (GET_MODE (dest
));
585 /* Given an INSN, calculate the amount of stack adjustment it contains
586 PRE- and POST-modifying stack pointer. */
589 insn_stack_adjust_offset_pre_post (rtx insn
, HOST_WIDE_INT
*pre
,
597 pattern
= PATTERN (insn
);
598 if (RTX_FRAME_RELATED_P (insn
))
600 rtx expr
= find_reg_note (insn
, REG_FRAME_RELATED_EXPR
, NULL_RTX
);
602 pattern
= XEXP (expr
, 0);
605 if (GET_CODE (pattern
) == SET
)
606 stack_adjust_offset_pre_post (pattern
, pre
, post
);
607 else if (GET_CODE (pattern
) == PARALLEL
608 || GET_CODE (pattern
) == SEQUENCE
)
612 /* There may be stack adjustments inside compound insns. Search
614 for ( i
= XVECLEN (pattern
, 0) - 1; i
>= 0; i
--)
615 if (GET_CODE (XVECEXP (pattern
, 0, i
)) == SET
)
616 stack_adjust_offset_pre_post (XVECEXP (pattern
, 0, i
), pre
, post
);
620 /* Compute stack adjustments for all blocks by traversing DFS tree.
621 Return true when the adjustments on all incoming edges are consistent.
622 Heavily borrowed from pre_and_rev_post_order_compute. */
625 vt_stack_adjustments (void)
627 edge_iterator
*stack
;
630 /* Initialize entry block. */
631 VTI (ENTRY_BLOCK_PTR
)->visited
= true;
632 VTI (ENTRY_BLOCK_PTR
)->in
.stack_adjust
= INCOMING_FRAME_SP_OFFSET
;
633 VTI (ENTRY_BLOCK_PTR
)->out
.stack_adjust
= INCOMING_FRAME_SP_OFFSET
;
635 /* Allocate stack for back-tracking up CFG. */
636 stack
= XNEWVEC (edge_iterator
, n_basic_blocks
+ 1);
639 /* Push the first edge on to the stack. */
640 stack
[sp
++] = ei_start (ENTRY_BLOCK_PTR
->succs
);
648 /* Look at the edge on the top of the stack. */
650 src
= ei_edge (ei
)->src
;
651 dest
= ei_edge (ei
)->dest
;
653 /* Check if the edge destination has been visited yet. */
654 if (!VTI (dest
)->visited
)
657 HOST_WIDE_INT pre
, post
, offset
;
658 VTI (dest
)->visited
= true;
659 VTI (dest
)->in
.stack_adjust
= offset
= VTI (src
)->out
.stack_adjust
;
661 if (dest
!= EXIT_BLOCK_PTR
)
662 for (insn
= BB_HEAD (dest
);
663 insn
!= NEXT_INSN (BB_END (dest
));
664 insn
= NEXT_INSN (insn
))
667 insn_stack_adjust_offset_pre_post (insn
, &pre
, &post
);
668 offset
+= pre
+ post
;
671 VTI (dest
)->out
.stack_adjust
= offset
;
673 if (EDGE_COUNT (dest
->succs
) > 0)
674 /* Since the DEST node has been visited for the first
675 time, check its successors. */
676 stack
[sp
++] = ei_start (dest
->succs
);
680 /* Check whether the adjustments on the edges are the same. */
681 if (VTI (dest
)->in
.stack_adjust
!= VTI (src
)->out
.stack_adjust
)
687 if (! ei_one_before_end_p (ei
))
688 /* Go to the next edge. */
689 ei_next (&stack
[sp
- 1]);
691 /* Return to previous level if there are no more edges. */
700 /* Compute a CFA-based value for the stack pointer. */
703 compute_cfa_pointer (HOST_WIDE_INT adjustment
)
707 #ifdef FRAME_POINTER_CFA_OFFSET
708 adjustment
-= FRAME_POINTER_CFA_OFFSET (current_function_decl
);
709 cfa
= plus_constant (frame_pointer_rtx
, adjustment
);
711 adjustment
-= ARG_POINTER_CFA_OFFSET (current_function_decl
);
712 cfa
= plus_constant (arg_pointer_rtx
, adjustment
);
718 /* Adjustment for hard_frame_pointer_rtx to cfa base reg,
719 or -1 if the replacement shouldn't be done. */
720 static HOST_WIDE_INT hard_frame_pointer_adjustment
= -1;
722 /* Data for adjust_mems callback. */
724 struct adjust_mem_data
727 enum machine_mode mem_mode
;
728 HOST_WIDE_INT stack_adjust
;
732 /* Helper function for adjusting used MEMs. */
735 adjust_mems (rtx loc
, const_rtx old_rtx
, void *data
)
737 struct adjust_mem_data
*amd
= (struct adjust_mem_data
*) data
;
738 rtx mem
, addr
= loc
, tem
;
739 enum machine_mode mem_mode_save
;
741 switch (GET_CODE (loc
))
744 /* Don't do any sp or fp replacements outside of MEM addresses. */
745 if (amd
->mem_mode
== VOIDmode
)
747 if (loc
== stack_pointer_rtx
748 && !frame_pointer_needed
)
749 return compute_cfa_pointer (amd
->stack_adjust
);
750 else if (loc
== hard_frame_pointer_rtx
751 && frame_pointer_needed
752 && hard_frame_pointer_adjustment
!= -1)
753 return compute_cfa_pointer (hard_frame_pointer_adjustment
);
759 mem
= targetm
.delegitimize_address (mem
);
760 if (mem
!= loc
&& !MEM_P (mem
))
761 return simplify_replace_fn_rtx (mem
, old_rtx
, adjust_mems
, data
);
764 addr
= XEXP (mem
, 0);
765 mem_mode_save
= amd
->mem_mode
;
766 amd
->mem_mode
= GET_MODE (mem
);
767 store_save
= amd
->store
;
769 addr
= simplify_replace_fn_rtx (addr
, old_rtx
, adjust_mems
, data
);
770 amd
->store
= store_save
;
771 amd
->mem_mode
= mem_mode_save
;
773 addr
= targetm
.delegitimize_address (addr
);
774 if (addr
!= XEXP (mem
, 0))
775 mem
= replace_equiv_address_nv (mem
, addr
);
777 mem
= avoid_constant_pool_reference (mem
);
781 addr
= gen_rtx_PLUS (GET_MODE (loc
), XEXP (loc
, 0),
782 GEN_INT (GET_CODE (loc
) == PRE_INC
783 ? GET_MODE_SIZE (amd
->mem_mode
)
784 : -GET_MODE_SIZE (amd
->mem_mode
)));
788 addr
= XEXP (loc
, 0);
789 gcc_assert (amd
->mem_mode
!= VOIDmode
&& amd
->mem_mode
!= BLKmode
);
790 addr
= simplify_replace_fn_rtx (addr
, old_rtx
, adjust_mems
, data
);
791 tem
= gen_rtx_PLUS (GET_MODE (loc
), XEXP (loc
, 0),
792 GEN_INT ((GET_CODE (loc
) == PRE_INC
793 || GET_CODE (loc
) == POST_INC
)
794 ? GET_MODE_SIZE (amd
->mem_mode
)
795 : -GET_MODE_SIZE (amd
->mem_mode
)));
796 amd
->side_effects
= alloc_EXPR_LIST (0,
797 gen_rtx_SET (VOIDmode
,
803 addr
= XEXP (loc
, 1);
806 addr
= XEXP (loc
, 0);
807 gcc_assert (amd
->mem_mode
!= VOIDmode
);
808 addr
= simplify_replace_fn_rtx (addr
, old_rtx
, adjust_mems
, data
);
809 amd
->side_effects
= alloc_EXPR_LIST (0,
810 gen_rtx_SET (VOIDmode
,
816 /* First try without delegitimization of whole MEMs and
817 avoid_constant_pool_reference, which is more likely to succeed. */
818 store_save
= amd
->store
;
820 addr
= simplify_replace_fn_rtx (SUBREG_REG (loc
), old_rtx
, adjust_mems
,
822 amd
->store
= store_save
;
823 mem
= simplify_replace_fn_rtx (addr
, old_rtx
, adjust_mems
, data
);
824 if (mem
== SUBREG_REG (loc
))
826 tem
= simplify_gen_subreg (GET_MODE (loc
), mem
,
827 GET_MODE (SUBREG_REG (loc
)),
831 tem
= simplify_gen_subreg (GET_MODE (loc
), addr
,
832 GET_MODE (SUBREG_REG (loc
)),
836 return gen_rtx_raw_SUBREG (GET_MODE (loc
), addr
, SUBREG_BYTE (loc
));
843 /* Helper function for replacement of uses. */
846 adjust_mem_uses (rtx
*x
, void *data
)
848 rtx new_x
= simplify_replace_fn_rtx (*x
, NULL_RTX
, adjust_mems
, data
);
850 validate_change (NULL_RTX
, x
, new_x
, true);
853 /* Helper function for replacement of stores. */
856 adjust_mem_stores (rtx loc
, const_rtx expr
, void *data
)
860 rtx new_dest
= simplify_replace_fn_rtx (SET_DEST (expr
), NULL_RTX
,
862 if (new_dest
!= SET_DEST (expr
))
864 rtx xexpr
= CONST_CAST_RTX (expr
);
865 validate_change (NULL_RTX
, &SET_DEST (xexpr
), new_dest
, true);
870 /* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
871 replace them with their value in the insn and add the side-effects
872 as other sets to the insn. */
875 adjust_insn (basic_block bb
, rtx insn
)
877 struct adjust_mem_data amd
;
879 amd
.mem_mode
= VOIDmode
;
880 amd
.stack_adjust
= -VTI (bb
)->out
.stack_adjust
;
881 amd
.side_effects
= NULL_RTX
;
884 note_stores (PATTERN (insn
), adjust_mem_stores
, &amd
);
887 note_uses (&PATTERN (insn
), adjust_mem_uses
, &amd
);
889 /* For read-only MEMs containing some constant, prefer those
891 set
= single_set (insn
);
892 if (set
&& MEM_P (SET_SRC (set
)) && MEM_READONLY_P (SET_SRC (set
)))
894 rtx note
= find_reg_equal_equiv_note (insn
);
896 if (note
&& CONSTANT_P (XEXP (note
, 0)))
897 validate_change (NULL_RTX
, &SET_SRC (set
), XEXP (note
, 0), true);
900 if (amd
.side_effects
)
902 rtx
*pat
, new_pat
, s
;
905 pat
= &PATTERN (insn
);
906 if (GET_CODE (*pat
) == COND_EXEC
)
907 pat
= &COND_EXEC_CODE (*pat
);
908 if (GET_CODE (*pat
) == PARALLEL
)
909 oldn
= XVECLEN (*pat
, 0);
912 for (s
= amd
.side_effects
, newn
= 0; s
; newn
++)
914 new_pat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (oldn
+ newn
));
915 if (GET_CODE (*pat
) == PARALLEL
)
916 for (i
= 0; i
< oldn
; i
++)
917 XVECEXP (new_pat
, 0, i
) = XVECEXP (*pat
, 0, i
);
919 XVECEXP (new_pat
, 0, 0) = *pat
;
920 for (s
= amd
.side_effects
, i
= oldn
; i
< oldn
+ newn
; i
++, s
= XEXP (s
, 1))
921 XVECEXP (new_pat
, 0, i
) = XEXP (s
, 0);
922 free_EXPR_LIST_list (&amd
.side_effects
);
923 validate_change (NULL_RTX
, pat
, new_pat
, true);
927 /* Return true if a decl_or_value DV is a DECL or NULL. */
929 dv_is_decl_p (decl_or_value dv
)
931 return !dv
|| (int) TREE_CODE ((tree
) dv
) != (int) VALUE
;
934 /* Return true if a decl_or_value is a VALUE rtl. */
936 dv_is_value_p (decl_or_value dv
)
938 return dv
&& !dv_is_decl_p (dv
);
941 /* Return the decl in the decl_or_value. */
943 dv_as_decl (decl_or_value dv
)
945 #ifdef ENABLE_CHECKING
946 gcc_assert (dv_is_decl_p (dv
));
951 /* Return the value in the decl_or_value. */
953 dv_as_value (decl_or_value dv
)
955 #ifdef ENABLE_CHECKING
956 gcc_assert (dv_is_value_p (dv
));
961 /* Return the opaque pointer in the decl_or_value. */
963 dv_as_opaque (decl_or_value dv
)
968 /* Return true if a decl_or_value must not have more than one variable
971 dv_onepart_p (decl_or_value dv
)
975 if (!MAY_HAVE_DEBUG_INSNS
)
978 if (dv_is_value_p (dv
))
981 decl
= dv_as_decl (dv
);
986 if (TREE_CODE (decl
) == DEBUG_EXPR_DECL
)
989 return (target_for_debug_bind (decl
) != NULL_TREE
);
992 /* Return the variable pool to be used for dv, depending on whether it
993 can have multiple parts or not. */
994 static inline alloc_pool
995 dv_pool (decl_or_value dv
)
997 return dv_onepart_p (dv
) ? valvar_pool
: var_pool
;
1000 /* Build a decl_or_value out of a decl. */
1001 static inline decl_or_value
1002 dv_from_decl (tree decl
)
1006 #ifdef ENABLE_CHECKING
1007 gcc_assert (dv_is_decl_p (dv
));
1012 /* Build a decl_or_value out of a value. */
1013 static inline decl_or_value
1014 dv_from_value (rtx value
)
1018 #ifdef ENABLE_CHECKING
1019 gcc_assert (dv_is_value_p (dv
));
1024 extern void debug_dv (decl_or_value dv
);
1027 debug_dv (decl_or_value dv
)
1029 if (dv_is_value_p (dv
))
1030 debug_rtx (dv_as_value (dv
));
1032 debug_generic_stmt (dv_as_decl (dv
));
1035 typedef unsigned int dvuid
;
1037 /* Return the uid of DV. */
1040 dv_uid (decl_or_value dv
)
1042 if (dv_is_value_p (dv
))
1043 return CSELIB_VAL_PTR (dv_as_value (dv
))->uid
;
1045 return DECL_UID (dv_as_decl (dv
));
1048 /* Compute the hash from the uid. */
1050 static inline hashval_t
1051 dv_uid2hash (dvuid uid
)
1056 /* The hash function for a mask table in a shared_htab chain. */
1058 static inline hashval_t
1059 dv_htab_hash (decl_or_value dv
)
1061 return dv_uid2hash (dv_uid (dv
));
1064 /* The hash function for variable_htab, computes the hash value
1065 from the declaration of variable X. */
1068 variable_htab_hash (const void *x
)
1070 const_variable
const v
= (const_variable
) x
;
1072 return dv_htab_hash (v
->dv
);
1075 /* Compare the declaration of variable X with declaration Y. */
1078 variable_htab_eq (const void *x
, const void *y
)
1080 const_variable
const v
= (const_variable
) x
;
1081 decl_or_value dv
= CONST_CAST2 (decl_or_value
, const void *, y
);
1083 return (dv_as_opaque (v
->dv
) == dv_as_opaque (dv
));
1086 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
1089 variable_htab_free (void *elem
)
1092 variable var
= (variable
) elem
;
1093 location_chain node
, next
;
1095 gcc_assert (var
->refcount
> 0);
1098 if (var
->refcount
> 0)
1101 for (i
= 0; i
< var
->n_var_parts
; i
++)
1103 for (node
= var
->var_part
[i
].loc_chain
; node
; node
= next
)
1106 pool_free (loc_chain_pool
, node
);
1108 var
->var_part
[i
].loc_chain
= NULL
;
1110 pool_free (dv_pool (var
->dv
), var
);
1113 /* The hash function for value_chains htab, computes the hash value
1117 value_chain_htab_hash (const void *x
)
1119 const_value_chain
const v
= (const_value_chain
) x
;
1121 return dv_htab_hash (v
->dv
);
1124 /* Compare the VALUE X with VALUE Y. */
1127 value_chain_htab_eq (const void *x
, const void *y
)
1129 const_value_chain
const v
= (const_value_chain
) x
;
1130 decl_or_value dv
= CONST_CAST2 (decl_or_value
, const void *, y
);
1132 return dv_as_opaque (v
->dv
) == dv_as_opaque (dv
);
1135 /* Initialize the set (array) SET of attrs to empty lists. */
1138 init_attrs_list_set (attrs
*set
)
1142 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1146 /* Make the list *LISTP empty. */
1149 attrs_list_clear (attrs
*listp
)
1153 for (list
= *listp
; list
; list
= next
)
1156 pool_free (attrs_pool
, list
);
1161 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */
1164 attrs_list_member (attrs list
, decl_or_value dv
, HOST_WIDE_INT offset
)
1166 for (; list
; list
= list
->next
)
1167 if (dv_as_opaque (list
->dv
) == dv_as_opaque (dv
) && list
->offset
== offset
)
1172 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
1175 attrs_list_insert (attrs
*listp
, decl_or_value dv
,
1176 HOST_WIDE_INT offset
, rtx loc
)
1180 list
= (attrs
) pool_alloc (attrs_pool
);
1183 list
->offset
= offset
;
1184 list
->next
= *listp
;
1188 /* Copy all nodes from SRC and create a list *DSTP of the copies. */
1191 attrs_list_copy (attrs
*dstp
, attrs src
)
1195 attrs_list_clear (dstp
);
1196 for (; src
; src
= src
->next
)
1198 n
= (attrs
) pool_alloc (attrs_pool
);
1201 n
->offset
= src
->offset
;
1207 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */
1210 attrs_list_union (attrs
*dstp
, attrs src
)
1212 for (; src
; src
= src
->next
)
1214 if (!attrs_list_member (*dstp
, src
->dv
, src
->offset
))
1215 attrs_list_insert (dstp
, src
->dv
, src
->offset
, src
->loc
);
1219 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1223 attrs_list_mpdv_union (attrs
*dstp
, attrs src
, attrs src2
)
1225 gcc_assert (!*dstp
);
1226 for (; src
; src
= src
->next
)
1228 if (!dv_onepart_p (src
->dv
))
1229 attrs_list_insert (dstp
, src
->dv
, src
->offset
, src
->loc
);
1231 for (src
= src2
; src
; src
= src
->next
)
1233 if (!dv_onepart_p (src
->dv
)
1234 && !attrs_list_member (*dstp
, src
->dv
, src
->offset
))
1235 attrs_list_insert (dstp
, src
->dv
, src
->offset
, src
->loc
);
1239 /* Shared hashtable support. */
1241 /* Return true if VARS is shared. */
1244 shared_hash_shared (shared_hash vars
)
1246 return vars
->refcount
> 1;
1249 /* Return the hash table for VARS. */
1251 static inline htab_t
1252 shared_hash_htab (shared_hash vars
)
1257 /* Return true if VAR is shared, or maybe because VARS is shared. */
1260 shared_var_p (variable var
, shared_hash vars
)
1262 /* Don't count an entry in the changed_variables table as a duplicate. */
1263 return ((var
->refcount
> 1 + (int) var
->in_changed_variables
)
1264 || shared_hash_shared (vars
));
1267 /* Copy variables into a new hash table. */
1270 shared_hash_unshare (shared_hash vars
)
1272 shared_hash new_vars
= (shared_hash
) pool_alloc (shared_hash_pool
);
1273 gcc_assert (vars
->refcount
> 1);
1274 new_vars
->refcount
= 1;
1276 = htab_create (htab_elements (vars
->htab
) + 3, variable_htab_hash
,
1277 variable_htab_eq
, variable_htab_free
);
1278 vars_copy (new_vars
->htab
, vars
->htab
);
1283 /* Increment reference counter on VARS and return it. */
1285 static inline shared_hash
1286 shared_hash_copy (shared_hash vars
)
1292 /* Decrement reference counter and destroy hash table if not shared
1296 shared_hash_destroy (shared_hash vars
)
1298 gcc_assert (vars
->refcount
> 0);
1299 if (--vars
->refcount
== 0)
1301 htab_delete (vars
->htab
);
1302 pool_free (shared_hash_pool
, vars
);
1306 /* Unshare *PVARS if shared and return slot for DV. If INS is
1307 INSERT, insert it if not already present. */
1309 static inline void **
1310 shared_hash_find_slot_unshare_1 (shared_hash
*pvars
, decl_or_value dv
,
1311 hashval_t dvhash
, enum insert_option ins
)
1313 if (shared_hash_shared (*pvars
))
1314 *pvars
= shared_hash_unshare (*pvars
);
1315 return htab_find_slot_with_hash (shared_hash_htab (*pvars
), dv
, dvhash
, ins
);
1318 static inline void **
1319 shared_hash_find_slot_unshare (shared_hash
*pvars
, decl_or_value dv
,
1320 enum insert_option ins
)
1322 return shared_hash_find_slot_unshare_1 (pvars
, dv
, dv_htab_hash (dv
), ins
);
1325 /* Return slot for DV, if it is already present in the hash table.
1326 If it is not present, insert it only VARS is not shared, otherwise
1329 static inline void **
1330 shared_hash_find_slot_1 (shared_hash vars
, decl_or_value dv
, hashval_t dvhash
)
1332 return htab_find_slot_with_hash (shared_hash_htab (vars
), dv
, dvhash
,
1333 shared_hash_shared (vars
)
1334 ? NO_INSERT
: INSERT
);
1337 static inline void **
1338 shared_hash_find_slot (shared_hash vars
, decl_or_value dv
)
1340 return shared_hash_find_slot_1 (vars
, dv
, dv_htab_hash (dv
));
1343 /* Return slot for DV only if it is already present in the hash table. */
1345 static inline void **
1346 shared_hash_find_slot_noinsert_1 (shared_hash vars
, decl_or_value dv
,
1349 return htab_find_slot_with_hash (shared_hash_htab (vars
), dv
, dvhash
,
1353 static inline void **
1354 shared_hash_find_slot_noinsert (shared_hash vars
, decl_or_value dv
)
1356 return shared_hash_find_slot_noinsert_1 (vars
, dv
, dv_htab_hash (dv
));
1359 /* Return variable for DV or NULL if not already present in the hash
1362 static inline variable
1363 shared_hash_find_1 (shared_hash vars
, decl_or_value dv
, hashval_t dvhash
)
1365 return (variable
) htab_find_with_hash (shared_hash_htab (vars
), dv
, dvhash
);
1368 static inline variable
1369 shared_hash_find (shared_hash vars
, decl_or_value dv
)
1371 return shared_hash_find_1 (vars
, dv
, dv_htab_hash (dv
));
1374 /* Return true if TVAL is better than CVAL as a canonival value. We
1375 choose lowest-numbered VALUEs, using the RTX address as a
1376 tie-breaker. The idea is to arrange them into a star topology,
1377 such that all of them are at most one step away from the canonical
1378 value, and the canonical value has backlinks to all of them, in
1379 addition to all the actual locations. We don't enforce this
1380 topology throughout the entire dataflow analysis, though.
1384 canon_value_cmp (rtx tval
, rtx cval
)
1387 || CSELIB_VAL_PTR (tval
)->uid
< CSELIB_VAL_PTR (cval
)->uid
;
1390 static bool dst_can_be_shared
;
1392 /* Return a copy of a variable VAR and insert it to dataflow set SET. */
1395 unshare_variable (dataflow_set
*set
, void **slot
, variable var
,
1396 enum var_init_status initialized
)
1401 new_var
= (variable
) pool_alloc (dv_pool (var
->dv
));
1402 new_var
->dv
= var
->dv
;
1403 new_var
->refcount
= 1;
1405 new_var
->n_var_parts
= var
->n_var_parts
;
1406 new_var
->cur_loc_changed
= var
->cur_loc_changed
;
1407 var
->cur_loc_changed
= false;
1408 new_var
->in_changed_variables
= false;
1410 if (! flag_var_tracking_uninit
)
1411 initialized
= VAR_INIT_STATUS_INITIALIZED
;
1413 for (i
= 0; i
< var
->n_var_parts
; i
++)
1415 location_chain node
;
1416 location_chain
*nextp
;
1418 new_var
->var_part
[i
].offset
= var
->var_part
[i
].offset
;
1419 nextp
= &new_var
->var_part
[i
].loc_chain
;
1420 for (node
= var
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
1422 location_chain new_lc
;
1424 new_lc
= (location_chain
) pool_alloc (loc_chain_pool
);
1425 new_lc
->next
= NULL
;
1426 if (node
->init
> initialized
)
1427 new_lc
->init
= node
->init
;
1429 new_lc
->init
= initialized
;
1430 if (node
->set_src
&& !(MEM_P (node
->set_src
)))
1431 new_lc
->set_src
= node
->set_src
;
1433 new_lc
->set_src
= NULL
;
1434 new_lc
->loc
= node
->loc
;
1437 nextp
= &new_lc
->next
;
1440 new_var
->var_part
[i
].cur_loc
= var
->var_part
[i
].cur_loc
;
1443 dst_can_be_shared
= false;
1444 if (shared_hash_shared (set
->vars
))
1445 slot
= shared_hash_find_slot_unshare (&set
->vars
, var
->dv
, NO_INSERT
);
1446 else if (set
->traversed_vars
&& set
->vars
!= set
->traversed_vars
)
1447 slot
= shared_hash_find_slot_noinsert (set
->vars
, var
->dv
);
1449 if (var
->in_changed_variables
)
1452 = htab_find_slot_with_hash (changed_variables
, var
->dv
,
1453 dv_htab_hash (var
->dv
), NO_INSERT
);
1454 gcc_assert (*cslot
== (void *) var
);
1455 var
->in_changed_variables
= false;
1456 variable_htab_free (var
);
1458 new_var
->in_changed_variables
= true;
1463 /* Add a variable from *SLOT to hash table DATA and increase its reference
1467 vars_copy_1 (void **slot
, void *data
)
1469 htab_t dst
= (htab_t
) data
;
1473 src
= (variable
) *slot
;
1476 dstp
= htab_find_slot_with_hash (dst
, src
->dv
,
1477 dv_htab_hash (src
->dv
),
1481 /* Continue traversing the hash table. */
1485 /* Copy all variables from hash table SRC to hash table DST. */
1488 vars_copy (htab_t dst
, htab_t src
)
1490 htab_traverse_noresize (src
, vars_copy_1
, dst
);
1493 /* Map a decl to its main debug decl. */
1496 var_debug_decl (tree decl
)
1498 if (decl
&& DECL_P (decl
)
1499 && DECL_DEBUG_EXPR_IS_FROM (decl
) && DECL_DEBUG_EXPR (decl
)
1500 && DECL_P (DECL_DEBUG_EXPR (decl
)))
1501 decl
= DECL_DEBUG_EXPR (decl
);
1506 /* Set the register LOC to contain DV, OFFSET. */
1509 var_reg_decl_set (dataflow_set
*set
, rtx loc
, enum var_init_status initialized
,
1510 decl_or_value dv
, HOST_WIDE_INT offset
, rtx set_src
,
1511 enum insert_option iopt
)
1514 bool decl_p
= dv_is_decl_p (dv
);
1517 dv
= dv_from_decl (var_debug_decl (dv_as_decl (dv
)));
1519 for (node
= set
->regs
[REGNO (loc
)]; node
; node
= node
->next
)
1520 if (dv_as_opaque (node
->dv
) == dv_as_opaque (dv
)
1521 && node
->offset
== offset
)
1524 attrs_list_insert (&set
->regs
[REGNO (loc
)], dv
, offset
, loc
);
1525 set_variable_part (set
, loc
, dv
, offset
, initialized
, set_src
, iopt
);
1528 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
1531 var_reg_set (dataflow_set
*set
, rtx loc
, enum var_init_status initialized
,
1534 tree decl
= REG_EXPR (loc
);
1535 HOST_WIDE_INT offset
= REG_OFFSET (loc
);
1537 var_reg_decl_set (set
, loc
, initialized
,
1538 dv_from_decl (decl
), offset
, set_src
, INSERT
);
1541 static enum var_init_status
1542 get_init_value (dataflow_set
*set
, rtx loc
, decl_or_value dv
)
1546 enum var_init_status ret_val
= VAR_INIT_STATUS_UNKNOWN
;
1548 if (! flag_var_tracking_uninit
)
1549 return VAR_INIT_STATUS_INITIALIZED
;
1551 var
= shared_hash_find (set
->vars
, dv
);
1554 for (i
= 0; i
< var
->n_var_parts
&& ret_val
== VAR_INIT_STATUS_UNKNOWN
; i
++)
1556 location_chain nextp
;
1557 for (nextp
= var
->var_part
[i
].loc_chain
; nextp
; nextp
= nextp
->next
)
1558 if (rtx_equal_p (nextp
->loc
, loc
))
1560 ret_val
= nextp
->init
;
1569 /* Delete current content of register LOC in dataflow set SET and set
1570 the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
1571 MODIFY is true, any other live copies of the same variable part are
1572 also deleted from the dataflow set, otherwise the variable part is
1573 assumed to be copied from another location holding the same
1577 var_reg_delete_and_set (dataflow_set
*set
, rtx loc
, bool modify
,
1578 enum var_init_status initialized
, rtx set_src
)
1580 tree decl
= REG_EXPR (loc
);
1581 HOST_WIDE_INT offset
= REG_OFFSET (loc
);
1585 decl
= var_debug_decl (decl
);
1587 if (initialized
== VAR_INIT_STATUS_UNKNOWN
)
1588 initialized
= get_init_value (set
, loc
, dv_from_decl (decl
));
1590 nextp
= &set
->regs
[REGNO (loc
)];
1591 for (node
= *nextp
; node
; node
= next
)
1594 if (dv_as_opaque (node
->dv
) != decl
|| node
->offset
!= offset
)
1596 delete_variable_part (set
, node
->loc
, node
->dv
, node
->offset
);
1597 pool_free (attrs_pool
, node
);
1603 nextp
= &node
->next
;
1607 clobber_variable_part (set
, loc
, dv_from_decl (decl
), offset
, set_src
);
1608 var_reg_set (set
, loc
, initialized
, set_src
);
1611 /* Delete the association of register LOC in dataflow set SET with any
1612 variables that aren't onepart. If CLOBBER is true, also delete any
1613 other live copies of the same variable part, and delete the
1614 association with onepart dvs too. */
1617 var_reg_delete (dataflow_set
*set
, rtx loc
, bool clobber
)
1619 attrs
*nextp
= &set
->regs
[REGNO (loc
)];
1624 tree decl
= REG_EXPR (loc
);
1625 HOST_WIDE_INT offset
= REG_OFFSET (loc
);
1627 decl
= var_debug_decl (decl
);
1629 clobber_variable_part (set
, NULL
, dv_from_decl (decl
), offset
, NULL
);
1632 for (node
= *nextp
; node
; node
= next
)
1635 if (clobber
|| !dv_onepart_p (node
->dv
))
1637 delete_variable_part (set
, node
->loc
, node
->dv
, node
->offset
);
1638 pool_free (attrs_pool
, node
);
1642 nextp
= &node
->next
;
1646 /* Delete content of register with number REGNO in dataflow set SET. */
1649 var_regno_delete (dataflow_set
*set
, int regno
)
1651 attrs
*reg
= &set
->regs
[regno
];
1654 for (node
= *reg
; node
; node
= next
)
1657 delete_variable_part (set
, node
->loc
, node
->dv
, node
->offset
);
1658 pool_free (attrs_pool
, node
);
1663 /* Set the location of DV, OFFSET as the MEM LOC. */
1666 var_mem_decl_set (dataflow_set
*set
, rtx loc
, enum var_init_status initialized
,
1667 decl_or_value dv
, HOST_WIDE_INT offset
, rtx set_src
,
1668 enum insert_option iopt
)
1670 if (dv_is_decl_p (dv
))
1671 dv
= dv_from_decl (var_debug_decl (dv_as_decl (dv
)));
1673 set_variable_part (set
, loc
, dv
, offset
, initialized
, set_src
, iopt
);
1676 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
1678 Adjust the address first if it is stack pointer based. */
1681 var_mem_set (dataflow_set
*set
, rtx loc
, enum var_init_status initialized
,
1684 tree decl
= MEM_EXPR (loc
);
1685 HOST_WIDE_INT offset
= INT_MEM_OFFSET (loc
);
1687 var_mem_decl_set (set
, loc
, initialized
,
1688 dv_from_decl (decl
), offset
, set_src
, INSERT
);
1691 /* Delete and set the location part of variable MEM_EXPR (LOC) in
1692 dataflow set SET to LOC. If MODIFY is true, any other live copies
1693 of the same variable part are also deleted from the dataflow set,
1694 otherwise the variable part is assumed to be copied from another
1695 location holding the same part.
1696 Adjust the address first if it is stack pointer based. */
1699 var_mem_delete_and_set (dataflow_set
*set
, rtx loc
, bool modify
,
1700 enum var_init_status initialized
, rtx set_src
)
1702 tree decl
= MEM_EXPR (loc
);
1703 HOST_WIDE_INT offset
= INT_MEM_OFFSET (loc
);
1705 decl
= var_debug_decl (decl
);
1707 if (initialized
== VAR_INIT_STATUS_UNKNOWN
)
1708 initialized
= get_init_value (set
, loc
, dv_from_decl (decl
));
1711 clobber_variable_part (set
, NULL
, dv_from_decl (decl
), offset
, set_src
);
1712 var_mem_set (set
, loc
, initialized
, set_src
);
1715 /* Delete the location part LOC from dataflow set SET. If CLOBBER is
1716 true, also delete any other live copies of the same variable part.
1717 Adjust the address first if it is stack pointer based. */
1720 var_mem_delete (dataflow_set
*set
, rtx loc
, bool clobber
)
1722 tree decl
= MEM_EXPR (loc
);
1723 HOST_WIDE_INT offset
= INT_MEM_OFFSET (loc
);
1725 decl
= var_debug_decl (decl
);
1727 clobber_variable_part (set
, NULL
, dv_from_decl (decl
), offset
, NULL
);
1728 delete_variable_part (set
, loc
, dv_from_decl (decl
), offset
);
1731 /* Bind a value to a location it was just stored in. If MODIFIED
1732 holds, assume the location was modified, detaching it from any
1733 values bound to it. */
1736 val_store (dataflow_set
*set
, rtx val
, rtx loc
, rtx insn
, bool modified
)
1738 cselib_val
*v
= CSELIB_VAL_PTR (val
);
1740 gcc_assert (cselib_preserved_value_p (v
));
1744 fprintf (dump_file
, "%i: ", INSN_UID (insn
));
1745 print_inline_rtx (dump_file
, val
, 0);
1746 fprintf (dump_file
, " stored in ");
1747 print_inline_rtx (dump_file
, loc
, 0);
1750 struct elt_loc_list
*l
;
1751 for (l
= v
->locs
; l
; l
= l
->next
)
1753 fprintf (dump_file
, "\n%i: ", INSN_UID (l
->setting_insn
));
1754 print_inline_rtx (dump_file
, l
->loc
, 0);
1757 fprintf (dump_file
, "\n");
1763 var_regno_delete (set
, REGNO (loc
));
1764 var_reg_decl_set (set
, loc
, VAR_INIT_STATUS_INITIALIZED
,
1765 dv_from_value (val
), 0, NULL_RTX
, INSERT
);
1767 else if (MEM_P (loc
))
1768 var_mem_decl_set (set
, loc
, VAR_INIT_STATUS_INITIALIZED
,
1769 dv_from_value (val
), 0, NULL_RTX
, INSERT
);
1771 set_variable_part (set
, loc
, dv_from_value (val
), 0,
1772 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
, INSERT
);
1775 /* Reset this node, detaching all its equivalences. Return the slot
1776 in the variable hash table that holds dv, if there is one. */
1779 val_reset (dataflow_set
*set
, decl_or_value dv
)
1781 variable var
= shared_hash_find (set
->vars
, dv
) ;
1782 location_chain node
;
1785 if (!var
|| !var
->n_var_parts
)
1788 gcc_assert (var
->n_var_parts
== 1);
1791 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
1792 if (GET_CODE (node
->loc
) == VALUE
1793 && canon_value_cmp (node
->loc
, cval
))
1796 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
1797 if (GET_CODE (node
->loc
) == VALUE
&& cval
!= node
->loc
)
1799 /* Redirect the equivalence link to the new canonical
1800 value, or simply remove it if it would point at
1803 set_variable_part (set
, cval
, dv_from_value (node
->loc
),
1804 0, node
->init
, node
->set_src
, NO_INSERT
);
1805 delete_variable_part (set
, dv_as_value (dv
),
1806 dv_from_value (node
->loc
), 0);
1811 decl_or_value cdv
= dv_from_value (cval
);
1813 /* Keep the remaining values connected, accummulating links
1814 in the canonical value. */
1815 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
1817 if (node
->loc
== cval
)
1819 else if (GET_CODE (node
->loc
) == REG
)
1820 var_reg_decl_set (set
, node
->loc
, node
->init
, cdv
, 0,
1821 node
->set_src
, NO_INSERT
);
1822 else if (GET_CODE (node
->loc
) == MEM
)
1823 var_mem_decl_set (set
, node
->loc
, node
->init
, cdv
, 0,
1824 node
->set_src
, NO_INSERT
);
1826 set_variable_part (set
, node
->loc
, cdv
, 0,
1827 node
->init
, node
->set_src
, NO_INSERT
);
1831 /* We remove this last, to make sure that the canonical value is not
1832 removed to the point of requiring reinsertion. */
1834 delete_variable_part (set
, dv_as_value (dv
), dv_from_value (cval
), 0);
1836 clobber_variable_part (set
, NULL
, dv
, 0, NULL
);
1838 /* ??? Should we make sure there aren't other available values or
1839 variables whose values involve this one other than by
1840 equivalence? E.g., at the very least we should reset MEMs, those
1841 shouldn't be too hard to find cselib-looking up the value as an
1842 address, then locating the resulting value in our own hash
1846 /* Find the values in a given location and map the val to another
1847 value, if it is unique, or add the location as one holding the
1851 val_resolve (dataflow_set
*set
, rtx val
, rtx loc
, rtx insn
)
1853 decl_or_value dv
= dv_from_value (val
);
1855 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1858 fprintf (dump_file
, "%i: ", INSN_UID (insn
));
1860 fprintf (dump_file
, "head: ");
1861 print_inline_rtx (dump_file
, val
, 0);
1862 fputs (" is at ", dump_file
);
1863 print_inline_rtx (dump_file
, loc
, 0);
1864 fputc ('\n', dump_file
);
1867 val_reset (set
, dv
);
1871 attrs node
, found
= NULL
;
1873 for (node
= set
->regs
[REGNO (loc
)]; node
; node
= node
->next
)
1874 if (dv_is_value_p (node
->dv
)
1875 && GET_MODE (dv_as_value (node
->dv
)) == GET_MODE (loc
))
1879 /* Map incoming equivalences. ??? Wouldn't it be nice if
1880 we just started sharing the location lists? Maybe a
1881 circular list ending at the value itself or some
1883 set_variable_part (set
, dv_as_value (node
->dv
),
1884 dv_from_value (val
), node
->offset
,
1885 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
, INSERT
);
1886 set_variable_part (set
, val
, node
->dv
, node
->offset
,
1887 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
, INSERT
);
1890 /* If we didn't find any equivalence, we need to remember that
1891 this value is held in the named register. */
1893 var_reg_decl_set (set
, loc
, VAR_INIT_STATUS_INITIALIZED
,
1894 dv_from_value (val
), 0, NULL_RTX
, INSERT
);
1896 else if (MEM_P (loc
))
1897 /* ??? Merge equivalent MEMs. */
1898 var_mem_decl_set (set
, loc
, VAR_INIT_STATUS_INITIALIZED
,
1899 dv_from_value (val
), 0, NULL_RTX
, INSERT
);
1901 /* ??? Merge equivalent expressions. */
1902 set_variable_part (set
, loc
, dv_from_value (val
), 0,
1903 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
, INSERT
);
1906 /* Initialize dataflow set SET to be empty.
1907 VARS_SIZE is the initial size of hash table VARS. */
1910 dataflow_set_init (dataflow_set
*set
)
1912 init_attrs_list_set (set
->regs
);
1913 set
->vars
= shared_hash_copy (empty_shared_hash
);
1914 set
->stack_adjust
= 0;
1915 set
->traversed_vars
= NULL
;
1918 /* Delete the contents of dataflow set SET. */
1921 dataflow_set_clear (dataflow_set
*set
)
1925 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1926 attrs_list_clear (&set
->regs
[i
]);
1928 shared_hash_destroy (set
->vars
);
1929 set
->vars
= shared_hash_copy (empty_shared_hash
);
1932 /* Copy the contents of dataflow set SRC to DST. */
1935 dataflow_set_copy (dataflow_set
*dst
, dataflow_set
*src
)
1939 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1940 attrs_list_copy (&dst
->regs
[i
], src
->regs
[i
]);
1942 shared_hash_destroy (dst
->vars
);
1943 dst
->vars
= shared_hash_copy (src
->vars
);
1944 dst
->stack_adjust
= src
->stack_adjust
;
1947 /* Information for merging lists of locations for a given offset of variable.
1949 struct variable_union_info
1951 /* Node of the location chain. */
1954 /* The sum of positions in the input chains. */
1957 /* The position in the chain of DST dataflow set. */
1961 /* Buffer for location list sorting and its allocated size. */
1962 static struct variable_union_info
*vui_vec
;
1963 static int vui_allocated
;
1965 /* Compare function for qsort, order the structures by POS element. */
1968 variable_union_info_cmp_pos (const void *n1
, const void *n2
)
1970 const struct variable_union_info
*const i1
=
1971 (const struct variable_union_info
*) n1
;
1972 const struct variable_union_info
*const i2
=
1973 ( const struct variable_union_info
*) n2
;
1975 if (i1
->pos
!= i2
->pos
)
1976 return i1
->pos
- i2
->pos
;
1978 return (i1
->pos_dst
- i2
->pos_dst
);
1981 /* Compute union of location parts of variable *SLOT and the same variable
1982 from hash table DATA. Compute "sorted" union of the location chains
1983 for common offsets, i.e. the locations of a variable part are sorted by
1984 a priority where the priority is the sum of the positions in the 2 chains
1985 (if a location is only in one list the position in the second list is
1986 defined to be larger than the length of the chains).
1987 When we are updating the location parts the newest location is in the
1988 beginning of the chain, so when we do the described "sorted" union
1989 we keep the newest locations in the beginning. */
1992 variable_union (void **slot
, void *data
)
1996 dataflow_set
*set
= (dataflow_set
*) data
;
1999 src
= (variable
) *slot
;
2000 dstp
= shared_hash_find_slot (set
->vars
, src
->dv
);
2001 if (!dstp
|| !*dstp
)
2005 dst_can_be_shared
= false;
2007 dstp
= shared_hash_find_slot_unshare (&set
->vars
, src
->dv
, INSERT
);
2011 /* Continue traversing the hash table. */
2015 dst
= (variable
) *dstp
;
2017 gcc_assert (src
->n_var_parts
);
2019 /* We can combine one-part variables very efficiently, because their
2020 entries are in canonical order. */
2021 if (dv_onepart_p (src
->dv
))
2023 location_chain
*nodep
, dnode
, snode
;
2025 gcc_assert (src
->n_var_parts
== 1);
2026 gcc_assert (dst
->n_var_parts
== 1);
2028 snode
= src
->var_part
[0].loc_chain
;
2031 restart_onepart_unshared
:
2032 nodep
= &dst
->var_part
[0].loc_chain
;
2038 int r
= dnode
? loc_cmp (dnode
->loc
, snode
->loc
) : 1;
2042 location_chain nnode
;
2044 if (shared_var_p (dst
, set
->vars
))
2046 dstp
= unshare_variable (set
, dstp
, dst
,
2047 VAR_INIT_STATUS_INITIALIZED
);
2048 dst
= (variable
)*dstp
;
2049 goto restart_onepart_unshared
;
2052 *nodep
= nnode
= (location_chain
) pool_alloc (loc_chain_pool
);
2053 nnode
->loc
= snode
->loc
;
2054 nnode
->init
= snode
->init
;
2055 if (!snode
->set_src
|| MEM_P (snode
->set_src
))
2056 nnode
->set_src
= NULL
;
2058 nnode
->set_src
= snode
->set_src
;
2059 nnode
->next
= dnode
;
2062 #ifdef ENABLE_CHECKING
2064 gcc_assert (rtx_equal_p (dnode
->loc
, snode
->loc
));
2068 snode
= snode
->next
;
2070 nodep
= &dnode
->next
;
2077 /* Count the number of location parts, result is K. */
2078 for (i
= 0, j
= 0, k
= 0;
2079 i
< src
->n_var_parts
&& j
< dst
->n_var_parts
; k
++)
2081 if (src
->var_part
[i
].offset
== dst
->var_part
[j
].offset
)
2086 else if (src
->var_part
[i
].offset
< dst
->var_part
[j
].offset
)
2091 k
+= src
->n_var_parts
- i
;
2092 k
+= dst
->n_var_parts
- j
;
2094 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
2095 thus there are at most MAX_VAR_PARTS different offsets. */
2096 gcc_assert (dv_onepart_p (dst
->dv
) ? k
== 1 : k
<= MAX_VAR_PARTS
);
2098 if (dst
->n_var_parts
!= k
&& shared_var_p (dst
, set
->vars
))
2100 dstp
= unshare_variable (set
, dstp
, dst
, VAR_INIT_STATUS_UNKNOWN
);
2101 dst
= (variable
)*dstp
;
2104 i
= src
->n_var_parts
- 1;
2105 j
= dst
->n_var_parts
- 1;
2106 dst
->n_var_parts
= k
;
2108 for (k
--; k
>= 0; k
--)
2110 location_chain node
, node2
;
2112 if (i
>= 0 && j
>= 0
2113 && src
->var_part
[i
].offset
== dst
->var_part
[j
].offset
)
2115 /* Compute the "sorted" union of the chains, i.e. the locations which
2116 are in both chains go first, they are sorted by the sum of
2117 positions in the chains. */
2120 struct variable_union_info
*vui
;
2122 /* If DST is shared compare the location chains.
2123 If they are different we will modify the chain in DST with
2124 high probability so make a copy of DST. */
2125 if (shared_var_p (dst
, set
->vars
))
2127 for (node
= src
->var_part
[i
].loc_chain
,
2128 node2
= dst
->var_part
[j
].loc_chain
; node
&& node2
;
2129 node
= node
->next
, node2
= node2
->next
)
2131 if (!((REG_P (node2
->loc
)
2132 && REG_P (node
->loc
)
2133 && REGNO (node2
->loc
) == REGNO (node
->loc
))
2134 || rtx_equal_p (node2
->loc
, node
->loc
)))
2136 if (node2
->init
< node
->init
)
2137 node2
->init
= node
->init
;
2143 dstp
= unshare_variable (set
, dstp
, dst
,
2144 VAR_INIT_STATUS_UNKNOWN
);
2145 dst
= (variable
)*dstp
;
2150 for (node
= src
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
2153 for (node
= dst
->var_part
[j
].loc_chain
; node
; node
= node
->next
)
2158 /* The most common case, much simpler, no qsort is needed. */
2159 location_chain dstnode
= dst
->var_part
[j
].loc_chain
;
2160 dst
->var_part
[k
].loc_chain
= dstnode
;
2161 dst
->var_part
[k
].offset
= dst
->var_part
[j
].offset
;
2163 for (node
= src
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
2164 if (!((REG_P (dstnode
->loc
)
2165 && REG_P (node
->loc
)
2166 && REGNO (dstnode
->loc
) == REGNO (node
->loc
))
2167 || rtx_equal_p (dstnode
->loc
, node
->loc
)))
2169 location_chain new_node
;
2171 /* Copy the location from SRC. */
2172 new_node
= (location_chain
) pool_alloc (loc_chain_pool
);
2173 new_node
->loc
= node
->loc
;
2174 new_node
->init
= node
->init
;
2175 if (!node
->set_src
|| MEM_P (node
->set_src
))
2176 new_node
->set_src
= NULL
;
2178 new_node
->set_src
= node
->set_src
;
2179 node2
->next
= new_node
;
2186 if (src_l
+ dst_l
> vui_allocated
)
2188 vui_allocated
= MAX (vui_allocated
* 2, src_l
+ dst_l
);
2189 vui_vec
= XRESIZEVEC (struct variable_union_info
, vui_vec
,
2194 /* Fill in the locations from DST. */
2195 for (node
= dst
->var_part
[j
].loc_chain
, jj
= 0; node
;
2196 node
= node
->next
, jj
++)
2199 vui
[jj
].pos_dst
= jj
;
2201 /* Pos plus value larger than a sum of 2 valid positions. */
2202 vui
[jj
].pos
= jj
+ src_l
+ dst_l
;
2205 /* Fill in the locations from SRC. */
2207 for (node
= src
->var_part
[i
].loc_chain
, ii
= 0; node
;
2208 node
= node
->next
, ii
++)
2210 /* Find location from NODE. */
2211 for (jj
= 0; jj
< dst_l
; jj
++)
2213 if ((REG_P (vui
[jj
].lc
->loc
)
2214 && REG_P (node
->loc
)
2215 && REGNO (vui
[jj
].lc
->loc
) == REGNO (node
->loc
))
2216 || rtx_equal_p (vui
[jj
].lc
->loc
, node
->loc
))
2218 vui
[jj
].pos
= jj
+ ii
;
2222 if (jj
>= dst_l
) /* The location has not been found. */
2224 location_chain new_node
;
2226 /* Copy the location from SRC. */
2227 new_node
= (location_chain
) pool_alloc (loc_chain_pool
);
2228 new_node
->loc
= node
->loc
;
2229 new_node
->init
= node
->init
;
2230 if (!node
->set_src
|| MEM_P (node
->set_src
))
2231 new_node
->set_src
= NULL
;
2233 new_node
->set_src
= node
->set_src
;
2234 vui
[n
].lc
= new_node
;
2235 vui
[n
].pos_dst
= src_l
+ dst_l
;
2236 vui
[n
].pos
= ii
+ src_l
+ dst_l
;
2243 /* Special case still very common case. For dst_l == 2
2244 all entries dst_l ... n-1 are sorted, with for i >= dst_l
2245 vui[i].pos == i + src_l + dst_l. */
2246 if (vui
[0].pos
> vui
[1].pos
)
2248 /* Order should be 1, 0, 2... */
2249 dst
->var_part
[k
].loc_chain
= vui
[1].lc
;
2250 vui
[1].lc
->next
= vui
[0].lc
;
2253 vui
[0].lc
->next
= vui
[2].lc
;
2254 vui
[n
- 1].lc
->next
= NULL
;
2257 vui
[0].lc
->next
= NULL
;
2262 dst
->var_part
[k
].loc_chain
= vui
[0].lc
;
2263 if (n
>= 3 && vui
[2].pos
< vui
[1].pos
)
2265 /* Order should be 0, 2, 1, 3... */
2266 vui
[0].lc
->next
= vui
[2].lc
;
2267 vui
[2].lc
->next
= vui
[1].lc
;
2270 vui
[1].lc
->next
= vui
[3].lc
;
2271 vui
[n
- 1].lc
->next
= NULL
;
2274 vui
[1].lc
->next
= NULL
;
2279 /* Order should be 0, 1, 2... */
2281 vui
[n
- 1].lc
->next
= NULL
;
2284 for (; ii
< n
; ii
++)
2285 vui
[ii
- 1].lc
->next
= vui
[ii
].lc
;
2289 qsort (vui
, n
, sizeof (struct variable_union_info
),
2290 variable_union_info_cmp_pos
);
2292 /* Reconnect the nodes in sorted order. */
2293 for (ii
= 1; ii
< n
; ii
++)
2294 vui
[ii
- 1].lc
->next
= vui
[ii
].lc
;
2295 vui
[n
- 1].lc
->next
= NULL
;
2296 dst
->var_part
[k
].loc_chain
= vui
[0].lc
;
2299 dst
->var_part
[k
].offset
= dst
->var_part
[j
].offset
;
2304 else if ((i
>= 0 && j
>= 0
2305 && src
->var_part
[i
].offset
< dst
->var_part
[j
].offset
)
2308 dst
->var_part
[k
] = dst
->var_part
[j
];
2311 else if ((i
>= 0 && j
>= 0
2312 && src
->var_part
[i
].offset
> dst
->var_part
[j
].offset
)
2315 location_chain
*nextp
;
2317 /* Copy the chain from SRC. */
2318 nextp
= &dst
->var_part
[k
].loc_chain
;
2319 for (node
= src
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
2321 location_chain new_lc
;
2323 new_lc
= (location_chain
) pool_alloc (loc_chain_pool
);
2324 new_lc
->next
= NULL
;
2325 new_lc
->init
= node
->init
;
2326 if (!node
->set_src
|| MEM_P (node
->set_src
))
2327 new_lc
->set_src
= NULL
;
2329 new_lc
->set_src
= node
->set_src
;
2330 new_lc
->loc
= node
->loc
;
2333 nextp
= &new_lc
->next
;
2336 dst
->var_part
[k
].offset
= src
->var_part
[i
].offset
;
2339 dst
->var_part
[k
].cur_loc
= NULL
;
2342 if (flag_var_tracking_uninit
)
2343 for (i
= 0; i
< src
->n_var_parts
&& i
< dst
->n_var_parts
; i
++)
2345 location_chain node
, node2
;
2346 for (node
= src
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
2347 for (node2
= dst
->var_part
[i
].loc_chain
; node2
; node2
= node2
->next
)
2348 if (rtx_equal_p (node
->loc
, node2
->loc
))
2350 if (node
->init
> node2
->init
)
2351 node2
->init
= node
->init
;
2355 /* Continue traversing the hash table. */
2359 /* Compute union of dataflow sets SRC and DST and store it to DST. */
2362 dataflow_set_union (dataflow_set
*dst
, dataflow_set
*src
)
2366 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2367 attrs_list_union (&dst
->regs
[i
], src
->regs
[i
]);
2369 if (dst
->vars
== empty_shared_hash
)
2371 shared_hash_destroy (dst
->vars
);
2372 dst
->vars
= shared_hash_copy (src
->vars
);
2375 htab_traverse (shared_hash_htab (src
->vars
), variable_union
, dst
);
2378 /* Whether the value is currently being expanded. */
2379 #define VALUE_RECURSED_INTO(x) \
2380 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
2381 /* Whether the value is in changed_variables hash table. */
2382 #define VALUE_CHANGED(x) \
2383 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
2384 /* Whether the decl is in changed_variables hash table. */
2385 #define DECL_CHANGED(x) TREE_VISITED (x)
2387 /* Record that DV has been added into resp. removed from changed_variables
2391 set_dv_changed (decl_or_value dv
, bool newv
)
2393 if (dv_is_value_p (dv
))
2394 VALUE_CHANGED (dv_as_value (dv
)) = newv
;
2396 DECL_CHANGED (dv_as_decl (dv
)) = newv
;
2399 /* Return true if DV is present in changed_variables hash table. */
2402 dv_changed_p (decl_or_value dv
)
2404 return (dv_is_value_p (dv
)
2405 ? VALUE_CHANGED (dv_as_value (dv
))
2406 : DECL_CHANGED (dv_as_decl (dv
)));
2409 /* Return a location list node whose loc is rtx_equal to LOC, in the
2410 location list of a one-part variable or value VAR, or in that of
2411 any values recursively mentioned in the location lists. */
2413 static location_chain
2414 find_loc_in_1pdv (rtx loc
, variable var
, htab_t vars
)
2416 location_chain node
;
2421 gcc_assert (dv_onepart_p (var
->dv
));
2423 if (!var
->n_var_parts
)
2426 gcc_assert (var
->var_part
[0].offset
== 0);
2428 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
2429 if (rtx_equal_p (loc
, node
->loc
))
2431 else if (GET_CODE (node
->loc
) == VALUE
2432 && !VALUE_RECURSED_INTO (node
->loc
))
2434 decl_or_value dv
= dv_from_value (node
->loc
);
2435 variable var
= (variable
)
2436 htab_find_with_hash (vars
, dv
, dv_htab_hash (dv
));
2440 location_chain where
;
2441 VALUE_RECURSED_INTO (node
->loc
) = true;
2442 if ((where
= find_loc_in_1pdv (loc
, var
, vars
)))
2444 VALUE_RECURSED_INTO (node
->loc
) = false;
2447 VALUE_RECURSED_INTO (node
->loc
) = false;
2454 /* Hash table iteration argument passed to variable_merge. */
2457 /* The set in which the merge is to be inserted. */
2459 /* The set that we're iterating in. */
2461 /* The set that may contain the other dv we are to merge with. */
2463 /* Number of onepart dvs in src. */
2464 int src_onepart_cnt
;
2467 /* Insert LOC in *DNODE, if it's not there yet. The list must be in
2468 loc_cmp order, and it is maintained as such. */
2471 insert_into_intersection (location_chain
*nodep
, rtx loc
,
2472 enum var_init_status status
)
2474 location_chain node
;
2477 for (node
= *nodep
; node
; nodep
= &node
->next
, node
= *nodep
)
2478 if ((r
= loc_cmp (node
->loc
, loc
)) == 0)
2480 node
->init
= MIN (node
->init
, status
);
2486 node
= (location_chain
) pool_alloc (loc_chain_pool
);
2489 node
->set_src
= NULL
;
2490 node
->init
= status
;
2491 node
->next
= *nodep
;
2495 /* Insert in DEST the intersection the locations present in both
2496 S1NODE and S2VAR, directly or indirectly. S1NODE is from a
2497 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
2501 intersect_loc_chains (rtx val
, location_chain
*dest
, struct dfset_merge
*dsm
,
2502 location_chain s1node
, variable s2var
)
2504 dataflow_set
*s1set
= dsm
->cur
;
2505 dataflow_set
*s2set
= dsm
->src
;
2506 location_chain found
;
2508 for (; s1node
; s1node
= s1node
->next
)
2510 if (s1node
->loc
== val
)
2513 if ((found
= find_loc_in_1pdv (s1node
->loc
, s2var
,
2514 shared_hash_htab (s2set
->vars
))))
2516 insert_into_intersection (dest
, s1node
->loc
,
2517 MIN (s1node
->init
, found
->init
));
2521 if (GET_CODE (s1node
->loc
) == VALUE
2522 && !VALUE_RECURSED_INTO (s1node
->loc
))
2524 decl_or_value dv
= dv_from_value (s1node
->loc
);
2525 variable svar
= shared_hash_find (s1set
->vars
, dv
);
2528 if (svar
->n_var_parts
== 1)
2530 VALUE_RECURSED_INTO (s1node
->loc
) = true;
2531 intersect_loc_chains (val
, dest
, dsm
,
2532 svar
->var_part
[0].loc_chain
,
2534 VALUE_RECURSED_INTO (s1node
->loc
) = false;
2539 /* ??? if the location is equivalent to any location in src,
2540 searched recursively
2542 add to dst the values needed to represent the equivalence
2544 telling whether locations S is equivalent to another dv's
2547 for each location D in the list
2549 if S and D satisfy rtx_equal_p, then it is present
2551 else if D is a value, recurse without cycles
2553 else if S and D have the same CODE and MODE
2555 for each operand oS and the corresponding oD
2557 if oS and oD are not equivalent, then S an D are not equivalent
2559 else if they are RTX vectors
2561 if any vector oS element is not equivalent to its respective oD,
2562 then S and D are not equivalent
2570 /* Return -1 if X should be before Y in a location list for a 1-part
2571 variable, 1 if Y should be before X, and 0 if they're equivalent
2572 and should not appear in the list. */
2575 loc_cmp (rtx x
, rtx y
)
2578 RTX_CODE code
= GET_CODE (x
);
2588 gcc_assert (GET_MODE (x
) == GET_MODE (y
));
2589 if (REGNO (x
) == REGNO (y
))
2591 else if (REGNO (x
) < REGNO (y
))
2604 gcc_assert (GET_MODE (x
) == GET_MODE (y
));
2605 return loc_cmp (XEXP (x
, 0), XEXP (y
, 0));
2611 if (GET_CODE (x
) == VALUE
)
2613 if (GET_CODE (y
) != VALUE
)
2615 /* Don't assert the modes are the same, that is true only
2616 when not recursing. (subreg:QI (value:SI 1:1) 0)
2617 and (subreg:QI (value:DI 2:2) 0) can be compared,
2618 even when the modes are different. */
2619 if (canon_value_cmp (x
, y
))
2625 if (GET_CODE (y
) == VALUE
)
2628 if (GET_CODE (x
) == GET_CODE (y
))
2629 /* Compare operands below. */;
2630 else if (GET_CODE (x
) < GET_CODE (y
))
2635 gcc_assert (GET_MODE (x
) == GET_MODE (y
));
2637 if (GET_CODE (x
) == DEBUG_EXPR
)
2639 if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x
))
2640 < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y
)))
2642 #ifdef ENABLE_CHECKING
2643 gcc_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x
))
2644 > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y
)));
2649 fmt
= GET_RTX_FORMAT (code
);
2650 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
2654 if (XWINT (x
, i
) == XWINT (y
, i
))
2656 else if (XWINT (x
, i
) < XWINT (y
, i
))
2663 if (XINT (x
, i
) == XINT (y
, i
))
2665 else if (XINT (x
, i
) < XINT (y
, i
))
2672 /* Compare the vector length first. */
2673 if (XVECLEN (x
, i
) == XVECLEN (y
, i
))
2674 /* Compare the vectors elements. */;
2675 else if (XVECLEN (x
, i
) < XVECLEN (y
, i
))
2680 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2681 if ((r
= loc_cmp (XVECEXP (x
, i
, j
),
2682 XVECEXP (y
, i
, j
))))
2687 if ((r
= loc_cmp (XEXP (x
, i
), XEXP (y
, i
))))
2693 if (XSTR (x
, i
) == XSTR (y
, i
))
2699 if ((r
= strcmp (XSTR (x
, i
), XSTR (y
, i
))) == 0)
2707 /* These are just backpointers, so they don't matter. */
2714 /* It is believed that rtx's at this level will never
2715 contain anything but integers and other rtx's,
2716 except for within LABEL_REFs and SYMBOL_REFs. */
2724 /* If decl or value DVP refers to VALUE from *LOC, add backlinks
2725 from VALUE to DVP. */
2728 add_value_chain (rtx
*loc
, void *dvp
)
2730 decl_or_value dv
, ldv
;
2731 value_chain vc
, nvc
;
2734 if (GET_CODE (*loc
) == VALUE
)
2735 ldv
= dv_from_value (*loc
);
2736 else if (GET_CODE (*loc
) == DEBUG_EXPR
)
2737 ldv
= dv_from_decl (DEBUG_EXPR_TREE_DECL (*loc
));
2741 if (dv_as_opaque (ldv
) == dvp
)
2744 dv
= (decl_or_value
) dvp
;
2745 slot
= htab_find_slot_with_hash (value_chains
, ldv
, dv_htab_hash (ldv
),
2749 vc
= (value_chain
) pool_alloc (value_chain_pool
);
2753 *slot
= (void *) vc
;
2757 for (vc
= ((value_chain
) *slot
)->next
; vc
; vc
= vc
->next
)
2758 if (dv_as_opaque (vc
->dv
) == dv_as_opaque (dv
))
2766 vc
= (value_chain
) *slot
;
2767 nvc
= (value_chain
) pool_alloc (value_chain_pool
);
2769 nvc
->next
= vc
->next
;
2775 /* If decl or value DVP refers to VALUEs from within LOC, add backlinks
2776 from those VALUEs to DVP. */
2779 add_value_chains (decl_or_value dv
, rtx loc
)
2781 if (GET_CODE (loc
) == VALUE
|| GET_CODE (loc
) == DEBUG_EXPR
)
2783 add_value_chain (&loc
, dv_as_opaque (dv
));
2789 loc
= XEXP (loc
, 0);
2790 for_each_rtx (&loc
, add_value_chain
, dv_as_opaque (dv
));
2793 /* If CSELIB_VAL_PTR of value DV refer to VALUEs, add backlinks from those
2794 VALUEs to DV. Add the same time get rid of ASM_OPERANDS from locs list,
2795 that is something we never can express in .debug_info and can prevent
2796 reverse ops from being used. */
2799 add_cselib_value_chains (decl_or_value dv
)
2801 struct elt_loc_list
**l
;
2803 for (l
= &CSELIB_VAL_PTR (dv_as_value (dv
))->locs
; *l
;)
2804 if (GET_CODE ((*l
)->loc
) == ASM_OPERANDS
)
2808 for_each_rtx (&(*l
)->loc
, add_value_chain
, dv_as_opaque (dv
));
2813 /* If decl or value DVP refers to VALUE from *LOC, remove backlinks
2814 from VALUE to DVP. */
2817 remove_value_chain (rtx
*loc
, void *dvp
)
2819 decl_or_value dv
, ldv
;
2823 if (GET_CODE (*loc
) == VALUE
)
2824 ldv
= dv_from_value (*loc
);
2825 else if (GET_CODE (*loc
) == DEBUG_EXPR
)
2826 ldv
= dv_from_decl (DEBUG_EXPR_TREE_DECL (*loc
));
2830 if (dv_as_opaque (ldv
) == dvp
)
2833 dv
= (decl_or_value
) dvp
;
2834 slot
= htab_find_slot_with_hash (value_chains
, ldv
, dv_htab_hash (ldv
),
2836 for (vc
= (value_chain
) *slot
; vc
->next
; vc
= vc
->next
)
2837 if (dv_as_opaque (vc
->next
->dv
) == dv_as_opaque (dv
))
2839 value_chain dvc
= vc
->next
;
2840 gcc_assert (dvc
->refcount
> 0);
2841 if (--dvc
->refcount
== 0)
2843 vc
->next
= dvc
->next
;
2844 pool_free (value_chain_pool
, dvc
);
2845 if (vc
->next
== NULL
&& vc
== (value_chain
) *slot
)
2847 pool_free (value_chain_pool
, vc
);
2848 htab_clear_slot (value_chains
, slot
);
2856 /* If decl or value DVP refers to VALUEs from within LOC, remove backlinks
2857 from those VALUEs to DVP. */
2860 remove_value_chains (decl_or_value dv
, rtx loc
)
2862 if (GET_CODE (loc
) == VALUE
|| GET_CODE (loc
) == DEBUG_EXPR
)
2864 remove_value_chain (&loc
, dv_as_opaque (dv
));
2870 loc
= XEXP (loc
, 0);
2871 for_each_rtx (&loc
, remove_value_chain
, dv_as_opaque (dv
));
2875 /* If CSELIB_VAL_PTR of value DV refer to VALUEs, remove backlinks from those
2879 remove_cselib_value_chains (decl_or_value dv
)
2881 struct elt_loc_list
*l
;
2883 for (l
= CSELIB_VAL_PTR (dv_as_value (dv
))->locs
; l
; l
= l
->next
)
2884 for_each_rtx (&l
->loc
, remove_value_chain
, dv_as_opaque (dv
));
2887 /* Check the order of entries in one-part variables. */
2890 canonicalize_loc_order_check (void **slot
, void *data ATTRIBUTE_UNUSED
)
2892 variable var
= (variable
) *slot
;
2893 decl_or_value dv
= var
->dv
;
2894 location_chain node
, next
;
2896 #ifdef ENABLE_RTL_CHECKING
2898 for (i
= 0; i
< var
->n_var_parts
; i
++)
2899 gcc_assert (var
->var_part
[0].cur_loc
== NULL
);
2900 gcc_assert (!var
->cur_loc_changed
&& !var
->in_changed_variables
);
2903 if (!dv_onepart_p (dv
))
2906 gcc_assert (var
->n_var_parts
== 1);
2907 node
= var
->var_part
[0].loc_chain
;
2910 while ((next
= node
->next
))
2912 gcc_assert (loc_cmp (node
->loc
, next
->loc
) < 0);
2920 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
2921 more likely to be chosen as canonical for an equivalence set.
2922 Ensure less likely values can reach more likely neighbors, making
2923 the connections bidirectional. */
2926 canonicalize_values_mark (void **slot
, void *data
)
2928 dataflow_set
*set
= (dataflow_set
*)data
;
2929 variable var
= (variable
) *slot
;
2930 decl_or_value dv
= var
->dv
;
2932 location_chain node
;
2934 if (!dv_is_value_p (dv
))
2937 gcc_assert (var
->n_var_parts
== 1);
2939 val
= dv_as_value (dv
);
2941 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
2942 if (GET_CODE (node
->loc
) == VALUE
)
2944 if (canon_value_cmp (node
->loc
, val
))
2945 VALUE_RECURSED_INTO (val
) = true;
2948 decl_or_value odv
= dv_from_value (node
->loc
);
2949 void **oslot
= shared_hash_find_slot_noinsert (set
->vars
, odv
);
2951 oslot
= set_slot_part (set
, val
, oslot
, odv
, 0,
2952 node
->init
, NULL_RTX
);
2954 VALUE_RECURSED_INTO (node
->loc
) = true;
2961 /* Remove redundant entries from equivalence lists in onepart
2962 variables, canonicalizing equivalence sets into star shapes. */
2965 canonicalize_values_star (void **slot
, void *data
)
2967 dataflow_set
*set
= (dataflow_set
*)data
;
2968 variable var
= (variable
) *slot
;
2969 decl_or_value dv
= var
->dv
;
2970 location_chain node
;
2977 if (!dv_onepart_p (dv
))
2980 gcc_assert (var
->n_var_parts
== 1);
2982 if (dv_is_value_p (dv
))
2984 cval
= dv_as_value (dv
);
2985 if (!VALUE_RECURSED_INTO (cval
))
2987 VALUE_RECURSED_INTO (cval
) = false;
2997 gcc_assert (var
->n_var_parts
== 1);
2999 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
3000 if (GET_CODE (node
->loc
) == VALUE
)
3003 if (VALUE_RECURSED_INTO (node
->loc
))
3005 if (canon_value_cmp (node
->loc
, cval
))
3014 if (!has_marks
|| dv_is_decl_p (dv
))
3017 /* Keep it marked so that we revisit it, either after visiting a
3018 child node, or after visiting a new parent that might be
3020 VALUE_RECURSED_INTO (val
) = true;
3022 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
3023 if (GET_CODE (node
->loc
) == VALUE
3024 && VALUE_RECURSED_INTO (node
->loc
))
3028 VALUE_RECURSED_INTO (cval
) = false;
3029 dv
= dv_from_value (cval
);
3030 slot
= shared_hash_find_slot_noinsert (set
->vars
, dv
);
3033 gcc_assert (dv_is_decl_p (var
->dv
));
3034 /* The canonical value was reset and dropped.
3036 clobber_variable_part (set
, NULL
, var
->dv
, 0, NULL
);
3039 var
= (variable
)*slot
;
3040 gcc_assert (dv_is_value_p (var
->dv
));
3041 if (var
->n_var_parts
== 0)
3043 gcc_assert (var
->n_var_parts
== 1);
3047 VALUE_RECURSED_INTO (val
) = false;
3052 /* Push values to the canonical one. */
3053 cdv
= dv_from_value (cval
);
3054 cslot
= shared_hash_find_slot_noinsert (set
->vars
, cdv
);
3056 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
3057 if (node
->loc
!= cval
)
3059 cslot
= set_slot_part (set
, node
->loc
, cslot
, cdv
, 0,
3060 node
->init
, NULL_RTX
);
3061 if (GET_CODE (node
->loc
) == VALUE
)
3063 decl_or_value ndv
= dv_from_value (node
->loc
);
3065 set_variable_part (set
, cval
, ndv
, 0, node
->init
, NULL_RTX
,
3068 if (canon_value_cmp (node
->loc
, val
))
3070 /* If it could have been a local minimum, it's not any more,
3071 since it's now neighbor to cval, so it may have to push
3072 to it. Conversely, if it wouldn't have prevailed over
3073 val, then whatever mark it has is fine: if it was to
3074 push, it will now push to a more canonical node, but if
3075 it wasn't, then it has already pushed any values it might
3077 VALUE_RECURSED_INTO (node
->loc
) = true;
3078 /* Make sure we visit node->loc by ensuring we cval is
3080 VALUE_RECURSED_INTO (cval
) = true;
3082 else if (!VALUE_RECURSED_INTO (node
->loc
))
3083 /* If we have no need to "recurse" into this node, it's
3084 already "canonicalized", so drop the link to the old
3086 clobber_variable_part (set
, cval
, ndv
, 0, NULL
);
3088 else if (GET_CODE (node
->loc
) == REG
)
3090 attrs list
= set
->regs
[REGNO (node
->loc
)], *listp
;
3092 /* Change an existing attribute referring to dv so that it
3093 refers to cdv, removing any duplicate this might
3094 introduce, and checking that no previous duplicates
3095 existed, all in a single pass. */
3099 if (list
->offset
== 0
3100 && (dv_as_opaque (list
->dv
) == dv_as_opaque (dv
)
3101 || dv_as_opaque (list
->dv
) == dv_as_opaque (cdv
)))
3108 if (dv_as_opaque (list
->dv
) == dv_as_opaque (dv
))
3111 for (listp
= &list
->next
; (list
= *listp
); listp
= &list
->next
)
3116 if (dv_as_opaque (list
->dv
) == dv_as_opaque (cdv
))
3118 *listp
= list
->next
;
3119 pool_free (attrs_pool
, list
);
3124 gcc_assert (dv_as_opaque (list
->dv
) != dv_as_opaque (dv
));
3127 else if (dv_as_opaque (list
->dv
) == dv_as_opaque (cdv
))
3129 for (listp
= &list
->next
; (list
= *listp
); listp
= &list
->next
)
3134 if (dv_as_opaque (list
->dv
) == dv_as_opaque (dv
))
3136 *listp
= list
->next
;
3137 pool_free (attrs_pool
, list
);
3142 gcc_assert (dv_as_opaque (list
->dv
) != dv_as_opaque (cdv
));
3151 if (list
->offset
== 0
3152 && (dv_as_opaque (list
->dv
) == dv_as_opaque (dv
)
3153 || dv_as_opaque (list
->dv
) == dv_as_opaque (cdv
)))
3163 cslot
= set_slot_part (set
, val
, cslot
, cdv
, 0,
3164 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
);
3166 slot
= clobber_slot_part (set
, cval
, slot
, 0, NULL
);
3168 /* Variable may have been unshared. */
3169 var
= (variable
)*slot
;
3170 gcc_assert (var
->n_var_parts
&& var
->var_part
[0].loc_chain
->loc
== cval
3171 && var
->var_part
[0].loc_chain
->next
== NULL
);
3173 if (VALUE_RECURSED_INTO (cval
))
3174 goto restart_with_cval
;
3179 /* Bind one-part variables to the canonical value in an equivalence
3180 set. Not doing this causes dataflow convergence failure in rare
3181 circumstances, see PR42873. Unfortunately we can't do this
3182 efficiently as part of canonicalize_values_star, since we may not
3183 have determined or even seen the canonical value of a set when we
3184 get to a variable that references another member of the set. */
3187 canonicalize_vars_star (void **slot
, void *data
)
3189 dataflow_set
*set
= (dataflow_set
*)data
;
3190 variable var
= (variable
) *slot
;
3191 decl_or_value dv
= var
->dv
;
3192 location_chain node
;
3197 location_chain cnode
;
3199 if (!dv_onepart_p (dv
) || dv_is_value_p (dv
))
3202 gcc_assert (var
->n_var_parts
== 1);
3204 node
= var
->var_part
[0].loc_chain
;
3206 if (GET_CODE (node
->loc
) != VALUE
)
3209 gcc_assert (!node
->next
);
3212 /* Push values to the canonical one. */
3213 cdv
= dv_from_value (cval
);
3214 cslot
= shared_hash_find_slot_noinsert (set
->vars
, cdv
);
3217 cvar
= (variable
)*cslot
;
3218 gcc_assert (cvar
->n_var_parts
== 1);
3220 cnode
= cvar
->var_part
[0].loc_chain
;
3222 /* CVAL is canonical if its value list contains non-VALUEs or VALUEs
3223 that are not “more canonical” than it. */
3224 if (GET_CODE (cnode
->loc
) != VALUE
3225 || !canon_value_cmp (cnode
->loc
, cval
))
3228 /* CVAL was found to be non-canonical. Change the variable to point
3229 to the canonical VALUE. */
3230 gcc_assert (!cnode
->next
);
3233 slot
= set_slot_part (set
, cval
, slot
, dv
, 0,
3234 node
->init
, node
->set_src
);
3235 slot
= clobber_slot_part (set
, cval
, slot
, 0, node
->set_src
);
3240 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
3241 corresponding entry in DSM->src. Multi-part variables are combined
3242 with variable_union, whereas onepart dvs are combined with
3246 variable_merge_over_cur (void **s1slot
, void *data
)
3248 struct dfset_merge
*dsm
= (struct dfset_merge
*)data
;
3249 dataflow_set
*dst
= dsm
->dst
;
3251 variable s1var
= (variable
) *s1slot
;
3252 variable s2var
, dvar
= NULL
;
3253 decl_or_value dv
= s1var
->dv
;
3254 bool onepart
= dv_onepart_p (dv
);
3257 location_chain node
, *nodep
;
3259 /* If the incoming onepart variable has an empty location list, then
3260 the intersection will be just as empty. For other variables,
3261 it's always union. */
3262 gcc_assert (s1var
->n_var_parts
);
3263 gcc_assert (s1var
->var_part
[0].loc_chain
);
3266 return variable_union (s1slot
, dst
);
3268 gcc_assert (s1var
->n_var_parts
== 1);
3269 gcc_assert (s1var
->var_part
[0].offset
== 0);
3271 dvhash
= dv_htab_hash (dv
);
3272 if (dv_is_value_p (dv
))
3273 val
= dv_as_value (dv
);
3277 s2var
= shared_hash_find_1 (dsm
->src
->vars
, dv
, dvhash
);
3280 dst_can_be_shared
= false;
3284 dsm
->src_onepart_cnt
--;
3285 gcc_assert (s2var
->var_part
[0].loc_chain
);
3286 gcc_assert (s2var
->n_var_parts
== 1);
3287 gcc_assert (s2var
->var_part
[0].offset
== 0);
3289 dstslot
= shared_hash_find_slot_noinsert_1 (dst
->vars
, dv
, dvhash
);
3292 dvar
= (variable
)*dstslot
;
3293 gcc_assert (dvar
->refcount
== 1);
3294 gcc_assert (dvar
->n_var_parts
== 1);
3295 gcc_assert (dvar
->var_part
[0].offset
== 0);
3296 nodep
= &dvar
->var_part
[0].loc_chain
;
3304 if (!dstslot
&& !onepart_variable_different_p (s1var
, s2var
))
3306 dstslot
= shared_hash_find_slot_unshare_1 (&dst
->vars
, dv
,
3308 *dstslot
= dvar
= s2var
;
3313 dst_can_be_shared
= false;
3315 intersect_loc_chains (val
, nodep
, dsm
,
3316 s1var
->var_part
[0].loc_chain
, s2var
);
3322 dvar
= (variable
) pool_alloc (dv_pool (dv
));
3325 dvar
->n_var_parts
= 1;
3326 dvar
->cur_loc_changed
= false;
3327 dvar
->in_changed_variables
= false;
3328 dvar
->var_part
[0].offset
= 0;
3329 dvar
->var_part
[0].loc_chain
= node
;
3330 dvar
->var_part
[0].cur_loc
= NULL
;
3333 = shared_hash_find_slot_unshare_1 (&dst
->vars
, dv
, dvhash
,
3335 gcc_assert (!*dstslot
);
3343 nodep
= &dvar
->var_part
[0].loc_chain
;
3344 while ((node
= *nodep
))
3346 location_chain
*nextp
= &node
->next
;
3348 if (GET_CODE (node
->loc
) == REG
)
3352 for (list
= dst
->regs
[REGNO (node
->loc
)]; list
; list
= list
->next
)
3353 if (GET_MODE (node
->loc
) == GET_MODE (list
->loc
)
3354 && dv_is_value_p (list
->dv
))
3358 attrs_list_insert (&dst
->regs
[REGNO (node
->loc
)],
3360 /* If this value became canonical for another value that had
3361 this register, we want to leave it alone. */
3362 else if (dv_as_value (list
->dv
) != val
)
3364 dstslot
= set_slot_part (dst
, dv_as_value (list
->dv
),
3366 node
->init
, NULL_RTX
);
3367 dstslot
= delete_slot_part (dst
, node
->loc
, dstslot
, 0);
3369 /* Since nextp points into the removed node, we can't
3370 use it. The pointer to the next node moved to nodep.
3371 However, if the variable we're walking is unshared
3372 during our walk, we'll keep walking the location list
3373 of the previously-shared variable, in which case the
3374 node won't have been removed, and we'll want to skip
3375 it. That's why we test *nodep here. */
3381 /* Canonicalization puts registers first, so we don't have to
3387 if (dvar
!= (variable
)*dstslot
)
3388 dvar
= (variable
)*dstslot
;
3389 nodep
= &dvar
->var_part
[0].loc_chain
;
3393 /* Mark all referenced nodes for canonicalization, and make sure
3394 we have mutual equivalence links. */
3395 VALUE_RECURSED_INTO (val
) = true;
3396 for (node
= *nodep
; node
; node
= node
->next
)
3397 if (GET_CODE (node
->loc
) == VALUE
)
3399 VALUE_RECURSED_INTO (node
->loc
) = true;
3400 set_variable_part (dst
, val
, dv_from_value (node
->loc
), 0,
3401 node
->init
, NULL
, INSERT
);
3404 dstslot
= shared_hash_find_slot_noinsert_1 (dst
->vars
, dv
, dvhash
);
3405 gcc_assert (*dstslot
== dvar
);
3406 canonicalize_values_star (dstslot
, dst
);
3407 #ifdef ENABLE_CHECKING
3409 == shared_hash_find_slot_noinsert_1 (dst
->vars
, dv
, dvhash
));
3411 dvar
= (variable
)*dstslot
;
3415 bool has_value
= false, has_other
= false;
3417 /* If we have one value and anything else, we're going to
3418 canonicalize this, so make sure all values have an entry in
3419 the table and are marked for canonicalization. */
3420 for (node
= *nodep
; node
; node
= node
->next
)
3422 if (GET_CODE (node
->loc
) == VALUE
)
3424 /* If this was marked during register canonicalization,
3425 we know we have to canonicalize values. */
3440 if (has_value
&& has_other
)
3442 for (node
= *nodep
; node
; node
= node
->next
)
3444 if (GET_CODE (node
->loc
) == VALUE
)
3446 decl_or_value dv
= dv_from_value (node
->loc
);
3449 if (shared_hash_shared (dst
->vars
))
3450 slot
= shared_hash_find_slot_noinsert (dst
->vars
, dv
);
3452 slot
= shared_hash_find_slot_unshare (&dst
->vars
, dv
,
3456 variable var
= (variable
) pool_alloc (dv_pool (dv
));
3459 var
->n_var_parts
= 1;
3460 var
->cur_loc_changed
= false;
3461 var
->in_changed_variables
= false;
3462 var
->var_part
[0].offset
= 0;
3463 var
->var_part
[0].loc_chain
= NULL
;
3464 var
->var_part
[0].cur_loc
= NULL
;
3468 VALUE_RECURSED_INTO (node
->loc
) = true;
3472 dstslot
= shared_hash_find_slot_noinsert_1 (dst
->vars
, dv
, dvhash
);
3473 gcc_assert (*dstslot
== dvar
);
3474 canonicalize_values_star (dstslot
, dst
);
3475 #ifdef ENABLE_CHECKING
3477 == shared_hash_find_slot_noinsert_1 (dst
->vars
,
3480 dvar
= (variable
)*dstslot
;
3484 if (!onepart_variable_different_p (dvar
, s2var
))
3486 variable_htab_free (dvar
);
3487 *dstslot
= dvar
= s2var
;
3490 else if (s2var
!= s1var
&& !onepart_variable_different_p (dvar
, s1var
))
3492 variable_htab_free (dvar
);
3493 *dstslot
= dvar
= s1var
;
3495 dst_can_be_shared
= false;
3498 dst_can_be_shared
= false;
3503 /* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
3504 multi-part variable. Unions of multi-part variables and
3505 intersections of one-part ones will be handled in
3506 variable_merge_over_cur(). */
3509 variable_merge_over_src (void **s2slot
, void *data
)
3511 struct dfset_merge
*dsm
= (struct dfset_merge
*)data
;
3512 dataflow_set
*dst
= dsm
->dst
;
3513 variable s2var
= (variable
) *s2slot
;
3514 decl_or_value dv
= s2var
->dv
;
3515 bool onepart
= dv_onepart_p (dv
);
3519 void **dstp
= shared_hash_find_slot (dst
->vars
, dv
);
3525 dsm
->src_onepart_cnt
++;
3529 /* Combine dataflow set information from SRC2 into DST, using PDST
3530 to carry over information across passes. */
3533 dataflow_set_merge (dataflow_set
*dst
, dataflow_set
*src2
)
3535 dataflow_set cur
= *dst
;
3536 dataflow_set
*src1
= &cur
;
3537 struct dfset_merge dsm
;
3539 size_t src1_elems
, src2_elems
;
3541 src1_elems
= htab_elements (shared_hash_htab (src1
->vars
));
3542 src2_elems
= htab_elements (shared_hash_htab (src2
->vars
));
3543 dataflow_set_init (dst
);
3544 dst
->stack_adjust
= cur
.stack_adjust
;
3545 shared_hash_destroy (dst
->vars
);
3546 dst
->vars
= (shared_hash
) pool_alloc (shared_hash_pool
);
3547 dst
->vars
->refcount
= 1;
3549 = htab_create (MAX (src1_elems
, src2_elems
), variable_htab_hash
,
3550 variable_htab_eq
, variable_htab_free
);
3552 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3553 attrs_list_mpdv_union (&dst
->regs
[i
], src1
->regs
[i
], src2
->regs
[i
]);
3558 dsm
.src_onepart_cnt
= 0;
3560 htab_traverse (shared_hash_htab (dsm
.src
->vars
), variable_merge_over_src
,
3562 htab_traverse (shared_hash_htab (dsm
.cur
->vars
), variable_merge_over_cur
,
3565 if (dsm
.src_onepart_cnt
)
3566 dst_can_be_shared
= false;
3568 dataflow_set_destroy (src1
);
3571 /* Mark register equivalences. */
3574 dataflow_set_equiv_regs (dataflow_set
*set
)
3579 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3581 rtx canon
[NUM_MACHINE_MODES
];
3583 memset (canon
, 0, sizeof (canon
));
3585 for (list
= set
->regs
[i
]; list
; list
= list
->next
)
3586 if (list
->offset
== 0 && dv_is_value_p (list
->dv
))
3588 rtx val
= dv_as_value (list
->dv
);
3589 rtx
*cvalp
= &canon
[(int)GET_MODE (val
)];
3592 if (canon_value_cmp (val
, cval
))
3596 for (list
= set
->regs
[i
]; list
; list
= list
->next
)
3597 if (list
->offset
== 0 && dv_onepart_p (list
->dv
))
3599 rtx cval
= canon
[(int)GET_MODE (list
->loc
)];
3604 if (dv_is_value_p (list
->dv
))
3606 rtx val
= dv_as_value (list
->dv
);
3611 VALUE_RECURSED_INTO (val
) = true;
3612 set_variable_part (set
, val
, dv_from_value (cval
), 0,
3613 VAR_INIT_STATUS_INITIALIZED
,
3617 VALUE_RECURSED_INTO (cval
) = true;
3618 set_variable_part (set
, cval
, list
->dv
, 0,
3619 VAR_INIT_STATUS_INITIALIZED
, NULL
, NO_INSERT
);
3622 for (listp
= &set
->regs
[i
]; (list
= *listp
);
3623 listp
= list
? &list
->next
: listp
)
3624 if (list
->offset
== 0 && dv_onepart_p (list
->dv
))
3626 rtx cval
= canon
[(int)GET_MODE (list
->loc
)];
3632 if (dv_is_value_p (list
->dv
))
3634 rtx val
= dv_as_value (list
->dv
);
3635 if (!VALUE_RECURSED_INTO (val
))
3639 slot
= shared_hash_find_slot_noinsert (set
->vars
, list
->dv
);
3640 canonicalize_values_star (slot
, set
);
3647 /* Remove any redundant values in the location list of VAR, which must
3648 be unshared and 1-part. */
3651 remove_duplicate_values (variable var
)
3653 location_chain node
, *nodep
;
3655 gcc_assert (dv_onepart_p (var
->dv
));
3656 gcc_assert (var
->n_var_parts
== 1);
3657 gcc_assert (var
->refcount
== 1);
3659 for (nodep
= &var
->var_part
[0].loc_chain
; (node
= *nodep
); )
3661 if (GET_CODE (node
->loc
) == VALUE
)
3663 if (VALUE_RECURSED_INTO (node
->loc
))
3665 /* Remove duplicate value node. */
3666 *nodep
= node
->next
;
3667 pool_free (loc_chain_pool
, node
);
3671 VALUE_RECURSED_INTO (node
->loc
) = true;
3673 nodep
= &node
->next
;
3676 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
3677 if (GET_CODE (node
->loc
) == VALUE
)
3679 gcc_assert (VALUE_RECURSED_INTO (node
->loc
));
3680 VALUE_RECURSED_INTO (node
->loc
) = false;
3685 /* Hash table iteration argument passed to variable_post_merge. */
3686 struct dfset_post_merge
3688 /* The new input set for the current block. */
3690 /* Pointer to the permanent input set for the current block, or
3692 dataflow_set
**permp
;
3695 /* Create values for incoming expressions associated with one-part
3696 variables that don't have value numbers for them. */
3699 variable_post_merge_new_vals (void **slot
, void *info
)
3701 struct dfset_post_merge
*dfpm
= (struct dfset_post_merge
*)info
;
3702 dataflow_set
*set
= dfpm
->set
;
3703 variable var
= (variable
)*slot
;
3704 location_chain node
;
3706 if (!dv_onepart_p (var
->dv
) || !var
->n_var_parts
)
3709 gcc_assert (var
->n_var_parts
== 1);
3711 if (dv_is_decl_p (var
->dv
))
3713 bool check_dupes
= false;
3716 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
3718 if (GET_CODE (node
->loc
) == VALUE
)
3719 gcc_assert (!VALUE_RECURSED_INTO (node
->loc
));
3720 else if (GET_CODE (node
->loc
) == REG
)
3722 attrs att
, *attp
, *curp
= NULL
;
3724 if (var
->refcount
!= 1)
3726 slot
= unshare_variable (set
, slot
, var
,
3727 VAR_INIT_STATUS_INITIALIZED
);
3728 var
= (variable
)*slot
;
3732 for (attp
= &set
->regs
[REGNO (node
->loc
)]; (att
= *attp
);
3734 if (att
->offset
== 0
3735 && GET_MODE (att
->loc
) == GET_MODE (node
->loc
))
3737 if (dv_is_value_p (att
->dv
))
3739 rtx cval
= dv_as_value (att
->dv
);
3744 else if (dv_as_opaque (att
->dv
) == dv_as_opaque (var
->dv
))
3752 if ((*curp
)->offset
== 0
3753 && GET_MODE ((*curp
)->loc
) == GET_MODE (node
->loc
)
3754 && dv_as_opaque ((*curp
)->dv
) == dv_as_opaque (var
->dv
))
3757 curp
= &(*curp
)->next
;
3768 *dfpm
->permp
= XNEW (dataflow_set
);
3769 dataflow_set_init (*dfpm
->permp
);
3772 for (att
= (*dfpm
->permp
)->regs
[REGNO (node
->loc
)];
3773 att
; att
= att
->next
)
3774 if (GET_MODE (att
->loc
) == GET_MODE (node
->loc
))
3776 gcc_assert (att
->offset
== 0);
3777 gcc_assert (dv_is_value_p (att
->dv
));
3778 val_reset (set
, att
->dv
);
3785 cval
= dv_as_value (cdv
);
3789 /* Create a unique value to hold this register,
3790 that ought to be found and reused in
3791 subsequent rounds. */
3793 gcc_assert (!cselib_lookup (node
->loc
,
3794 GET_MODE (node
->loc
), 0));
3795 v
= cselib_lookup (node
->loc
, GET_MODE (node
->loc
), 1);
3796 cselib_preserve_value (v
);
3797 cselib_invalidate_rtx (node
->loc
);
3799 cdv
= dv_from_value (cval
);
3802 "Created new value %u:%u for reg %i\n",
3803 v
->uid
, v
->hash
, REGNO (node
->loc
));
3806 var_reg_decl_set (*dfpm
->permp
, node
->loc
,
3807 VAR_INIT_STATUS_INITIALIZED
,
3808 cdv
, 0, NULL
, INSERT
);
3814 /* Remove attribute referring to the decl, which now
3815 uses the value for the register, already existing or
3816 to be added when we bring perm in. */
3819 pool_free (attrs_pool
, att
);
3824 remove_duplicate_values (var
);
3830 /* Reset values in the permanent set that are not associated with the
3831 chosen expression. */
3834 variable_post_merge_perm_vals (void **pslot
, void *info
)
3836 struct dfset_post_merge
*dfpm
= (struct dfset_post_merge
*)info
;
3837 dataflow_set
*set
= dfpm
->set
;
3838 variable pvar
= (variable
)*pslot
, var
;
3839 location_chain pnode
;
3843 gcc_assert (dv_is_value_p (pvar
->dv
));
3844 gcc_assert (pvar
->n_var_parts
== 1);
3845 pnode
= pvar
->var_part
[0].loc_chain
;
3847 gcc_assert (!pnode
->next
);
3848 gcc_assert (REG_P (pnode
->loc
));
3852 var
= shared_hash_find (set
->vars
, dv
);
3855 if (find_loc_in_1pdv (pnode
->loc
, var
, shared_hash_htab (set
->vars
)))
3857 val_reset (set
, dv
);
3860 for (att
= set
->regs
[REGNO (pnode
->loc
)]; att
; att
= att
->next
)
3861 if (att
->offset
== 0
3862 && GET_MODE (att
->loc
) == GET_MODE (pnode
->loc
)
3863 && dv_is_value_p (att
->dv
))
3866 /* If there is a value associated with this register already, create
3868 if (att
&& dv_as_value (att
->dv
) != dv_as_value (dv
))
3870 rtx cval
= dv_as_value (att
->dv
);
3871 set_variable_part (set
, cval
, dv
, 0, pnode
->init
, NULL
, INSERT
);
3872 set_variable_part (set
, dv_as_value (dv
), att
->dv
, 0, pnode
->init
,
3877 attrs_list_insert (&set
->regs
[REGNO (pnode
->loc
)],
3879 variable_union (pslot
, set
);
3885 /* Just checking stuff and registering register attributes for
3889 dataflow_post_merge_adjust (dataflow_set
*set
, dataflow_set
**permp
)
3891 struct dfset_post_merge dfpm
;
3896 htab_traverse (shared_hash_htab (set
->vars
), variable_post_merge_new_vals
,
3899 htab_traverse (shared_hash_htab ((*permp
)->vars
),
3900 variable_post_merge_perm_vals
, &dfpm
);
3901 htab_traverse (shared_hash_htab (set
->vars
), canonicalize_values_star
, set
);
3902 htab_traverse (shared_hash_htab (set
->vars
), canonicalize_vars_star
, set
);
3905 /* Return a node whose loc is a MEM that refers to EXPR in the
3906 location list of a one-part variable or value VAR, or in that of
3907 any values recursively mentioned in the location lists. */
3909 static location_chain
3910 find_mem_expr_in_1pdv (tree expr
, rtx val
, htab_t vars
)
3912 location_chain node
;
3915 location_chain where
= NULL
;
3920 gcc_assert (GET_CODE (val
) == VALUE
);
3922 gcc_assert (!VALUE_RECURSED_INTO (val
));
3924 dv
= dv_from_value (val
);
3925 var
= (variable
) htab_find_with_hash (vars
, dv
, dv_htab_hash (dv
));
3930 gcc_assert (dv_onepart_p (var
->dv
));
3932 if (!var
->n_var_parts
)
3935 gcc_assert (var
->var_part
[0].offset
== 0);
3937 VALUE_RECURSED_INTO (val
) = true;
3939 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
3940 if (MEM_P (node
->loc
) && MEM_EXPR (node
->loc
) == expr
3941 && MEM_OFFSET (node
->loc
) == 0)
3946 else if (GET_CODE (node
->loc
) == VALUE
3947 && !VALUE_RECURSED_INTO (node
->loc
)
3948 && (where
= find_mem_expr_in_1pdv (expr
, node
->loc
, vars
)))
3951 VALUE_RECURSED_INTO (val
) = false;
3956 /* Return TRUE if the value of MEM may vary across a call. */
3959 mem_dies_at_call (rtx mem
)
3961 tree expr
= MEM_EXPR (mem
);
3967 decl
= get_base_address (expr
);
3975 return (may_be_aliased (decl
)
3976 || (!TREE_READONLY (decl
) && is_global_var (decl
)));
3979 /* Remove all MEMs from the location list of a hash table entry for a
3980 one-part variable, except those whose MEM attributes map back to
3981 the variable itself, directly or within a VALUE. */
3984 dataflow_set_preserve_mem_locs (void **slot
, void *data
)
3986 dataflow_set
*set
= (dataflow_set
*) data
;
3987 variable var
= (variable
) *slot
;
3989 if (dv_is_decl_p (var
->dv
) && dv_onepart_p (var
->dv
))
3991 tree decl
= dv_as_decl (var
->dv
);
3992 location_chain loc
, *locp
;
3993 bool changed
= false;
3995 if (!var
->n_var_parts
)
3998 gcc_assert (var
->n_var_parts
== 1);
4000 if (shared_var_p (var
, set
->vars
))
4002 for (loc
= var
->var_part
[0].loc_chain
; loc
; loc
= loc
->next
)
4004 /* We want to remove dying MEMs that doesn't refer to
4006 if (GET_CODE (loc
->loc
) == MEM
4007 && (MEM_EXPR (loc
->loc
) != decl
4008 || MEM_OFFSET (loc
->loc
))
4009 && !mem_dies_at_call (loc
->loc
))
4011 /* We want to move here MEMs that do refer to DECL. */
4012 else if (GET_CODE (loc
->loc
) == VALUE
4013 && find_mem_expr_in_1pdv (decl
, loc
->loc
,
4014 shared_hash_htab (set
->vars
)))
4021 slot
= unshare_variable (set
, slot
, var
, VAR_INIT_STATUS_UNKNOWN
);
4022 var
= (variable
)*slot
;
4023 gcc_assert (var
->n_var_parts
== 1);
4026 for (locp
= &var
->var_part
[0].loc_chain
, loc
= *locp
;
4029 rtx old_loc
= loc
->loc
;
4030 if (GET_CODE (old_loc
) == VALUE
)
4032 location_chain mem_node
4033 = find_mem_expr_in_1pdv (decl
, loc
->loc
,
4034 shared_hash_htab (set
->vars
));
4036 /* ??? This picks up only one out of multiple MEMs that
4037 refer to the same variable. Do we ever need to be
4038 concerned about dealing with more than one, or, given
4039 that they should all map to the same variable
4040 location, their addresses will have been merged and
4041 they will be regarded as equivalent? */
4044 loc
->loc
= mem_node
->loc
;
4045 loc
->set_src
= mem_node
->set_src
;
4046 loc
->init
= MIN (loc
->init
, mem_node
->init
);
4050 if (GET_CODE (loc
->loc
) != MEM
4051 || (MEM_EXPR (loc
->loc
) == decl
4052 && MEM_OFFSET (loc
->loc
) == 0)
4053 || !mem_dies_at_call (loc
->loc
))
4055 if (old_loc
!= loc
->loc
&& emit_notes
)
4057 if (old_loc
== var
->var_part
[0].cur_loc
)
4060 var
->var_part
[0].cur_loc
= NULL
;
4061 var
->cur_loc_changed
= true;
4063 add_value_chains (var
->dv
, loc
->loc
);
4064 remove_value_chains (var
->dv
, old_loc
);
4072 remove_value_chains (var
->dv
, old_loc
);
4073 if (old_loc
== var
->var_part
[0].cur_loc
)
4076 var
->var_part
[0].cur_loc
= NULL
;
4077 var
->cur_loc_changed
= true;
4081 pool_free (loc_chain_pool
, loc
);
4084 if (!var
->var_part
[0].loc_chain
)
4090 variable_was_changed (var
, set
);
4096 /* Remove all MEMs from the location list of a hash table entry for a
4100 dataflow_set_remove_mem_locs (void **slot
, void *data
)
4102 dataflow_set
*set
= (dataflow_set
*) data
;
4103 variable var
= (variable
) *slot
;
4105 if (dv_is_value_p (var
->dv
))
4107 location_chain loc
, *locp
;
4108 bool changed
= false;
4110 gcc_assert (var
->n_var_parts
== 1);
4112 if (shared_var_p (var
, set
->vars
))
4114 for (loc
= var
->var_part
[0].loc_chain
; loc
; loc
= loc
->next
)
4115 if (GET_CODE (loc
->loc
) == MEM
4116 && mem_dies_at_call (loc
->loc
))
4122 slot
= unshare_variable (set
, slot
, var
, VAR_INIT_STATUS_UNKNOWN
);
4123 var
= (variable
)*slot
;
4124 gcc_assert (var
->n_var_parts
== 1);
4127 for (locp
= &var
->var_part
[0].loc_chain
, loc
= *locp
;
4130 if (GET_CODE (loc
->loc
) != MEM
4131 || !mem_dies_at_call (loc
->loc
))
4138 remove_value_chains (var
->dv
, loc
->loc
);
4140 /* If we have deleted the location which was last emitted
4141 we have to emit new location so add the variable to set
4142 of changed variables. */
4143 if (var
->var_part
[0].cur_loc
== loc
->loc
)
4146 var
->var_part
[0].cur_loc
= NULL
;
4147 var
->cur_loc_changed
= true;
4149 pool_free (loc_chain_pool
, loc
);
4152 if (!var
->var_part
[0].loc_chain
)
4158 variable_was_changed (var
, set
);
4164 /* Remove all variable-location information about call-clobbered
4165 registers, as well as associations between MEMs and VALUEs. */
4168 dataflow_set_clear_at_call (dataflow_set
*set
)
4172 for (r
= 0; r
< FIRST_PSEUDO_REGISTER
; r
++)
4173 if (TEST_HARD_REG_BIT (call_used_reg_set
, r
))
4174 var_regno_delete (set
, r
);
4176 if (MAY_HAVE_DEBUG_INSNS
)
4178 set
->traversed_vars
= set
->vars
;
4179 htab_traverse (shared_hash_htab (set
->vars
),
4180 dataflow_set_preserve_mem_locs
, set
);
4181 set
->traversed_vars
= set
->vars
;
4182 htab_traverse (shared_hash_htab (set
->vars
), dataflow_set_remove_mem_locs
,
4184 set
->traversed_vars
= NULL
;
4188 /* Flag whether two dataflow sets being compared contain different data. */
4190 dataflow_set_different_value
;
4193 variable_part_different_p (variable_part
*vp1
, variable_part
*vp2
)
4195 location_chain lc1
, lc2
;
4197 for (lc1
= vp1
->loc_chain
; lc1
; lc1
= lc1
->next
)
4199 for (lc2
= vp2
->loc_chain
; lc2
; lc2
= lc2
->next
)
4201 if (REG_P (lc1
->loc
) && REG_P (lc2
->loc
))
4203 if (REGNO (lc1
->loc
) == REGNO (lc2
->loc
))
4206 if (rtx_equal_p (lc1
->loc
, lc2
->loc
))
4215 /* Return true if one-part variables VAR1 and VAR2 are different.
4216 They must be in canonical order. */
4219 onepart_variable_different_p (variable var1
, variable var2
)
4221 location_chain lc1
, lc2
;
4226 gcc_assert (var1
->n_var_parts
== 1);
4227 gcc_assert (var2
->n_var_parts
== 1);
4229 lc1
= var1
->var_part
[0].loc_chain
;
4230 lc2
= var2
->var_part
[0].loc_chain
;
4237 if (loc_cmp (lc1
->loc
, lc2
->loc
))
4246 /* Return true if variables VAR1 and VAR2 are different. */
4249 variable_different_p (variable var1
, variable var2
)
4256 if (var1
->n_var_parts
!= var2
->n_var_parts
)
4259 for (i
= 0; i
< var1
->n_var_parts
; i
++)
4261 if (var1
->var_part
[i
].offset
!= var2
->var_part
[i
].offset
)
4263 /* One-part values have locations in a canonical order. */
4264 if (i
== 0 && var1
->var_part
[i
].offset
== 0 && dv_onepart_p (var1
->dv
))
4266 gcc_assert (var1
->n_var_parts
== 1);
4267 gcc_assert (dv_as_opaque (var1
->dv
) == dv_as_opaque (var2
->dv
));
4268 return onepart_variable_different_p (var1
, var2
);
4270 if (variable_part_different_p (&var1
->var_part
[i
], &var2
->var_part
[i
]))
4272 if (variable_part_different_p (&var2
->var_part
[i
], &var1
->var_part
[i
]))
4278 /* Compare variable *SLOT with the same variable in hash table DATA
4279 and set DATAFLOW_SET_DIFFERENT_VALUE if they are different. */
4282 dataflow_set_different_1 (void **slot
, void *data
)
4284 htab_t htab
= (htab_t
) data
;
4285 variable var1
, var2
;
4287 var1
= (variable
) *slot
;
4288 var2
= (variable
) htab_find_with_hash (htab
, var1
->dv
,
4289 dv_htab_hash (var1
->dv
));
4292 dataflow_set_different_value
= true;
4294 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4296 fprintf (dump_file
, "dataflow difference found: removal of:\n");
4300 /* Stop traversing the hash table. */
4304 if (variable_different_p (var1
, var2
))
4306 dataflow_set_different_value
= true;
4308 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4310 fprintf (dump_file
, "dataflow difference found: old and new follow:\n");
4315 /* Stop traversing the hash table. */
4319 /* Continue traversing the hash table. */
4323 /* Return true if dataflow sets OLD_SET and NEW_SET differ. */
4326 dataflow_set_different (dataflow_set
*old_set
, dataflow_set
*new_set
)
4328 if (old_set
->vars
== new_set
->vars
)
4331 if (htab_elements (shared_hash_htab (old_set
->vars
))
4332 != htab_elements (shared_hash_htab (new_set
->vars
)))
4335 dataflow_set_different_value
= false;
4337 htab_traverse (shared_hash_htab (old_set
->vars
), dataflow_set_different_1
,
4338 shared_hash_htab (new_set
->vars
));
4339 /* No need to traverse the second hashtab, if both have the same number
4340 of elements and the second one had all entries found in the first one,
4341 then it can't have any extra entries. */
4342 return dataflow_set_different_value
;
4345 /* Free the contents of dataflow set SET. */
4348 dataflow_set_destroy (dataflow_set
*set
)
4352 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
4353 attrs_list_clear (&set
->regs
[i
]);
4355 shared_hash_destroy (set
->vars
);
4359 /* Return true if RTL X contains a SYMBOL_REF. */
4362 contains_symbol_ref (rtx x
)
4371 code
= GET_CODE (x
);
4372 if (code
== SYMBOL_REF
)
4375 fmt
= GET_RTX_FORMAT (code
);
4376 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
4380 if (contains_symbol_ref (XEXP (x
, i
)))
4383 else if (fmt
[i
] == 'E')
4386 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
4387 if (contains_symbol_ref (XVECEXP (x
, i
, j
)))
4395 /* Shall EXPR be tracked? */
4398 track_expr_p (tree expr
, bool need_rtl
)
4403 if (TREE_CODE (expr
) == DEBUG_EXPR_DECL
)
4404 return DECL_RTL_SET_P (expr
);
4406 /* If EXPR is not a parameter or a variable do not track it. */
4407 if (TREE_CODE (expr
) != VAR_DECL
&& TREE_CODE (expr
) != PARM_DECL
)
4410 /* It also must have a name... */
4411 if (!DECL_NAME (expr
) && need_rtl
)
4414 /* ... and a RTL assigned to it. */
4415 decl_rtl
= DECL_RTL_IF_SET (expr
);
4416 if (!decl_rtl
&& need_rtl
)
4419 /* If this expression is really a debug alias of some other declaration, we
4420 don't need to track this expression if the ultimate declaration is
4423 if (DECL_DEBUG_EXPR_IS_FROM (realdecl
) && DECL_DEBUG_EXPR (realdecl
))
4425 realdecl
= DECL_DEBUG_EXPR (realdecl
);
4426 /* ??? We don't yet know how to emit DW_OP_piece for variable
4427 that has been SRA'ed. */
4428 if (!DECL_P (realdecl
))
4432 /* Do not track EXPR if REALDECL it should be ignored for debugging
4434 if (DECL_IGNORED_P (realdecl
))
4437 /* Do not track global variables until we are able to emit correct location
4439 if (TREE_STATIC (realdecl
))
4442 /* When the EXPR is a DECL for alias of some variable (see example)
4443 the TREE_STATIC flag is not used. Disable tracking all DECLs whose
4444 DECL_RTL contains SYMBOL_REF.
4447 extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
4450 if (decl_rtl
&& MEM_P (decl_rtl
)
4451 && contains_symbol_ref (XEXP (decl_rtl
, 0)))
4454 /* If RTX is a memory it should not be very large (because it would be
4455 an array or struct). */
4456 if (decl_rtl
&& MEM_P (decl_rtl
))
4458 /* Do not track structures and arrays. */
4459 if (GET_MODE (decl_rtl
) == BLKmode
4460 || AGGREGATE_TYPE_P (TREE_TYPE (realdecl
)))
4462 if (MEM_SIZE (decl_rtl
)
4463 && INTVAL (MEM_SIZE (decl_rtl
)) > MAX_VAR_PARTS
)
4467 DECL_CHANGED (expr
) = 0;
4468 DECL_CHANGED (realdecl
) = 0;
4472 /* Determine whether a given LOC refers to the same variable part as
4476 same_variable_part_p (rtx loc
, tree expr
, HOST_WIDE_INT offset
)
4479 HOST_WIDE_INT offset2
;
4481 if (! DECL_P (expr
))
4486 expr2
= REG_EXPR (loc
);
4487 offset2
= REG_OFFSET (loc
);
4489 else if (MEM_P (loc
))
4491 expr2
= MEM_EXPR (loc
);
4492 offset2
= INT_MEM_OFFSET (loc
);
4497 if (! expr2
|| ! DECL_P (expr2
))
4500 expr
= var_debug_decl (expr
);
4501 expr2
= var_debug_decl (expr2
);
4503 return (expr
== expr2
&& offset
== offset2
);
4506 /* LOC is a REG or MEM that we would like to track if possible.
4507 If EXPR is null, we don't know what expression LOC refers to,
4508 otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
4509 LOC is an lvalue register.
4511 Return true if EXPR is nonnull and if LOC, or some lowpart of it,
4512 is something we can track. When returning true, store the mode of
4513 the lowpart we can track in *MODE_OUT (if nonnull) and its offset
4514 from EXPR in *OFFSET_OUT (if nonnull). */
4517 track_loc_p (rtx loc
, tree expr
, HOST_WIDE_INT offset
, bool store_reg_p
,
4518 enum machine_mode
*mode_out
, HOST_WIDE_INT
*offset_out
)
4520 enum machine_mode mode
;
4522 if (expr
== NULL
|| !track_expr_p (expr
, true))
4525 /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
4526 whole subreg, but only the old inner part is really relevant. */
4527 mode
= GET_MODE (loc
);
4528 if (REG_P (loc
) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc
)))
4530 enum machine_mode pseudo_mode
;
4532 pseudo_mode
= PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc
));
4533 if (GET_MODE_SIZE (mode
) > GET_MODE_SIZE (pseudo_mode
))
4535 offset
+= byte_lowpart_offset (pseudo_mode
, mode
);
4540 /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
4541 Do the same if we are storing to a register and EXPR occupies
4542 the whole of register LOC; in that case, the whole of EXPR is
4543 being changed. We exclude complex modes from the second case
4544 because the real and imaginary parts are represented as separate
4545 pseudo registers, even if the whole complex value fits into one
4547 if ((GET_MODE_SIZE (mode
) > GET_MODE_SIZE (DECL_MODE (expr
))
4549 && !COMPLEX_MODE_P (DECL_MODE (expr
))
4550 && hard_regno_nregs
[REGNO (loc
)][DECL_MODE (expr
)] == 1))
4551 && offset
+ byte_lowpart_offset (DECL_MODE (expr
), mode
) == 0)
4553 mode
= DECL_MODE (expr
);
4557 if (offset
< 0 || offset
>= MAX_VAR_PARTS
)
4563 *offset_out
= offset
;
4567 /* Return the MODE lowpart of LOC, or null if LOC is not something we
4568 want to track. When returning nonnull, make sure that the attributes
4569 on the returned value are updated. */
4572 var_lowpart (enum machine_mode mode
, rtx loc
)
4574 unsigned int offset
, reg_offset
, regno
;
4576 if (!REG_P (loc
) && !MEM_P (loc
))
4579 if (GET_MODE (loc
) == mode
)
4582 offset
= byte_lowpart_offset (mode
, GET_MODE (loc
));
4585 return adjust_address_nv (loc
, mode
, offset
);
4587 reg_offset
= subreg_lowpart_offset (mode
, GET_MODE (loc
));
4588 regno
= REGNO (loc
) + subreg_regno_offset (REGNO (loc
), GET_MODE (loc
),
4590 return gen_rtx_REG_offset (loc
, mode
, regno
, offset
);
4593 /* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or
4594 hard_frame_pointer_rtx is being mapped to it. */
4595 static rtx cfa_base_rtx
;
4597 /* Carry information about uses and stores while walking rtx. */
4599 struct count_use_info
4601 /* The insn where the RTX is. */
4604 /* The basic block where insn is. */
4607 /* The array of n_sets sets in the insn, as determined by cselib. */
4608 struct cselib_set
*sets
;
4611 /* True if we're counting stores, false otherwise. */
4615 /* Find a VALUE corresponding to X. */
4617 static inline cselib_val
*
4618 find_use_val (rtx x
, enum machine_mode mode
, struct count_use_info
*cui
)
4624 /* This is called after uses are set up and before stores are
4625 processed bycselib, so it's safe to look up srcs, but not
4626 dsts. So we look up expressions that appear in srcs or in
4627 dest expressions, but we search the sets array for dests of
4631 for (i
= 0; i
< cui
->n_sets
; i
++)
4632 if (cui
->sets
[i
].dest
== x
)
4633 return cui
->sets
[i
].src_elt
;
4636 return cselib_lookup (x
, mode
, 0);
4642 /* Helper function to get mode of MEM's address. */
4644 static inline enum machine_mode
4645 get_address_mode (rtx mem
)
4647 enum machine_mode mode
= GET_MODE (XEXP (mem
, 0));
4648 if (mode
!= VOIDmode
)
4650 return targetm
.addr_space
.address_mode (MEM_ADDR_SPACE (mem
));
4653 /* Replace all registers and addresses in an expression with VALUE
4654 expressions that map back to them, unless the expression is a
4655 register. If no mapping is or can be performed, returns NULL. */
4658 replace_expr_with_values (rtx loc
)
4662 else if (MEM_P (loc
))
4664 cselib_val
*addr
= cselib_lookup (XEXP (loc
, 0),
4665 get_address_mode (loc
), 0);
4667 return replace_equiv_address_nv (loc
, addr
->val_rtx
);
4672 return cselib_subst_to_values (loc
);
4675 /* Determine what kind of micro operation to choose for a USE. Return
4676 MO_CLOBBER if no micro operation is to be generated. */
4678 static enum micro_operation_type
4679 use_type (rtx loc
, struct count_use_info
*cui
, enum machine_mode
*modep
)
4683 if (cui
&& cui
->sets
)
4685 if (GET_CODE (loc
) == VAR_LOCATION
)
4687 if (track_expr_p (PAT_VAR_LOCATION_DECL (loc
), false))
4689 rtx ploc
= PAT_VAR_LOCATION_LOC (loc
);
4690 if (! VAR_LOC_UNKNOWN_P (ploc
))
4692 cselib_val
*val
= cselib_lookup (ploc
, GET_MODE (loc
), 1);
4694 /* ??? flag_float_store and volatile mems are never
4695 given values, but we could in theory use them for
4697 gcc_assert (val
|| 1);
4705 if (REG_P (loc
) || MEM_P (loc
))
4708 *modep
= GET_MODE (loc
);
4712 || (find_use_val (loc
, GET_MODE (loc
), cui
)
4713 && cselib_lookup (XEXP (loc
, 0),
4714 get_address_mode (loc
), 0)))
4719 cselib_val
*val
= find_use_val (loc
, GET_MODE (loc
), cui
);
4721 if (val
&& !cselib_preserved_value_p (val
))
4729 gcc_assert (REGNO (loc
) < FIRST_PSEUDO_REGISTER
);
4731 if (loc
== cfa_base_rtx
)
4733 expr
= REG_EXPR (loc
);
4736 return MO_USE_NO_VAR
;
4737 else if (target_for_debug_bind (var_debug_decl (expr
)))
4739 else if (track_loc_p (loc
, expr
, REG_OFFSET (loc
),
4740 false, modep
, NULL
))
4743 return MO_USE_NO_VAR
;
4745 else if (MEM_P (loc
))
4747 expr
= MEM_EXPR (loc
);
4751 else if (target_for_debug_bind (var_debug_decl (expr
)))
4753 else if (track_loc_p (loc
, expr
, INT_MEM_OFFSET (loc
),
4754 false, modep
, NULL
))
4763 /* Log to OUT information about micro-operation MOPT involving X in
4767 log_op_type (rtx x
, basic_block bb
, rtx insn
,
4768 enum micro_operation_type mopt
, FILE *out
)
4770 fprintf (out
, "bb %i op %i insn %i %s ",
4771 bb
->index
, VEC_length (micro_operation
, VTI (bb
)->mos
),
4772 INSN_UID (insn
), micro_operation_type_name
[mopt
]);
4773 print_inline_rtx (out
, x
, 2);
4777 /* Tell whether the CONCAT used to holds a VALUE and its location
4778 needs value resolution, i.e., an attempt of mapping the location
4779 back to other incoming values. */
4780 #define VAL_NEEDS_RESOLUTION(x) \
4781 (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
4782 /* Whether the location in the CONCAT is a tracked expression, that
4783 should also be handled like a MO_USE. */
4784 #define VAL_HOLDS_TRACK_EXPR(x) \
4785 (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
4786 /* Whether the location in the CONCAT should be handled like a MO_COPY
4788 #define VAL_EXPR_IS_COPIED(x) \
4789 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
4790 /* Whether the location in the CONCAT should be handled like a
4791 MO_CLOBBER as well. */
4792 #define VAL_EXPR_IS_CLOBBERED(x) \
4793 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
4794 /* Whether the location is a CONCAT of the MO_VAL_SET expression and
4795 a reverse operation that should be handled afterwards. */
4796 #define VAL_EXPR_HAS_REVERSE(x) \
4797 (RTL_FLAG_CHECK1 ("VAL_EXPR_HAS_REVERSE", (x), CONCAT)->return_val)
4799 /* All preserved VALUEs. */
4800 static VEC (rtx
, heap
) *preserved_values
;
4802 /* Ensure VAL is preserved and remember it in a vector for vt_emit_notes. */
4805 preserve_value (cselib_val
*val
)
4807 cselib_preserve_value (val
);
4808 VEC_safe_push (rtx
, heap
, preserved_values
, val
->val_rtx
);
4811 /* Helper function for MO_VAL_LOC handling. Return non-zero if
4812 any rtxes not suitable for CONST use not replaced by VALUEs
4816 non_suitable_const (rtx
*x
, void *data ATTRIBUTE_UNUSED
)
4821 switch (GET_CODE (*x
))
4832 return !MEM_READONLY_P (*x
);
4838 /* Add uses (register and memory references) LOC which will be tracked
4839 to VTI (bb)->mos. INSN is instruction which the LOC is part of. */
4842 add_uses (rtx
*ploc
, void *data
)
4845 enum machine_mode mode
= VOIDmode
;
4846 struct count_use_info
*cui
= (struct count_use_info
*)data
;
4847 enum micro_operation_type type
= use_type (loc
, cui
, &mode
);
4849 if (type
!= MO_CLOBBER
)
4851 basic_block bb
= cui
->bb
;
4855 mo
.u
.loc
= type
== MO_USE
? var_lowpart (mode
, loc
) : loc
;
4856 mo
.insn
= cui
->insn
;
4858 if (type
== MO_VAL_LOC
)
4861 rtx vloc
= PAT_VAR_LOCATION_LOC (oloc
);
4864 gcc_assert (cui
->sets
);
4867 && !REG_P (XEXP (vloc
, 0))
4868 && !MEM_P (XEXP (vloc
, 0))
4869 && (GET_CODE (XEXP (vloc
, 0)) != PLUS
4870 || XEXP (XEXP (vloc
, 0), 0) != cfa_base_rtx
4871 || !CONST_INT_P (XEXP (XEXP (vloc
, 0), 1))))
4874 enum machine_mode address_mode
= get_address_mode (mloc
);
4876 = cselib_lookup (XEXP (mloc
, 0), address_mode
, 0);
4878 if (val
&& !cselib_preserved_value_p (val
))
4880 micro_operation moa
;
4881 preserve_value (val
);
4882 mloc
= cselib_subst_to_values (XEXP (mloc
, 0));
4883 moa
.type
= MO_VAL_USE
;
4884 moa
.insn
= cui
->insn
;
4885 moa
.u
.loc
= gen_rtx_CONCAT (address_mode
,
4886 val
->val_rtx
, mloc
);
4887 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4888 log_op_type (moa
.u
.loc
, cui
->bb
, cui
->insn
,
4889 moa
.type
, dump_file
);
4890 VEC_safe_push (micro_operation
, heap
, VTI (bb
)->mos
, &moa
);
4894 if (CONSTANT_P (vloc
)
4895 && (GET_CODE (vloc
) != CONST
4896 || for_each_rtx (&vloc
, non_suitable_const
, NULL
)))
4897 /* For constants don't look up any value. */;
4898 else if (!VAR_LOC_UNKNOWN_P (vloc
)
4899 && (val
= find_use_val (vloc
, GET_MODE (oloc
), cui
)))
4901 enum machine_mode mode2
;
4902 enum micro_operation_type type2
;
4903 rtx nloc
= replace_expr_with_values (vloc
);
4907 oloc
= shallow_copy_rtx (oloc
);
4908 PAT_VAR_LOCATION_LOC (oloc
) = nloc
;
4911 oloc
= gen_rtx_CONCAT (mode
, val
->val_rtx
, oloc
);
4913 type2
= use_type (vloc
, 0, &mode2
);
4915 gcc_assert (type2
== MO_USE
|| type2
== MO_USE_NO_VAR
4916 || type2
== MO_CLOBBER
);
4918 if (type2
== MO_CLOBBER
4919 && !cselib_preserved_value_p (val
))
4921 VAL_NEEDS_RESOLUTION (oloc
) = 1;
4922 preserve_value (val
);
4925 else if (!VAR_LOC_UNKNOWN_P (vloc
))
4927 oloc
= shallow_copy_rtx (oloc
);
4928 PAT_VAR_LOCATION_LOC (oloc
) = gen_rtx_UNKNOWN_VAR_LOC ();
4933 else if (type
== MO_VAL_USE
)
4935 enum machine_mode mode2
= VOIDmode
;
4936 enum micro_operation_type type2
;
4937 cselib_val
*val
= find_use_val (loc
, GET_MODE (loc
), cui
);
4938 rtx vloc
, oloc
= loc
, nloc
;
4940 gcc_assert (cui
->sets
);
4943 && !REG_P (XEXP (oloc
, 0))
4944 && !MEM_P (XEXP (oloc
, 0))
4945 && (GET_CODE (XEXP (oloc
, 0)) != PLUS
4946 || XEXP (XEXP (oloc
, 0), 0) != cfa_base_rtx
4947 || !CONST_INT_P (XEXP (XEXP (oloc
, 0), 1))))
4950 enum machine_mode address_mode
= get_address_mode (mloc
);
4952 = cselib_lookup (XEXP (mloc
, 0), address_mode
, 0);
4954 if (val
&& !cselib_preserved_value_p (val
))
4956 micro_operation moa
;
4957 preserve_value (val
);
4958 mloc
= cselib_subst_to_values (XEXP (mloc
, 0));
4959 moa
.type
= MO_VAL_USE
;
4960 moa
.insn
= cui
->insn
;
4961 moa
.u
.loc
= gen_rtx_CONCAT (address_mode
,
4962 val
->val_rtx
, mloc
);
4963 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4964 log_op_type (moa
.u
.loc
, cui
->bb
, cui
->insn
,
4965 moa
.type
, dump_file
);
4966 VEC_safe_push (micro_operation
, heap
, VTI (bb
)->mos
, &moa
);
4970 type2
= use_type (loc
, 0, &mode2
);
4972 gcc_assert (type2
== MO_USE
|| type2
== MO_USE_NO_VAR
4973 || type2
== MO_CLOBBER
);
4975 if (type2
== MO_USE
)
4976 vloc
= var_lowpart (mode2
, loc
);
4980 /* The loc of a MO_VAL_USE may have two forms:
4982 (concat val src): val is at src, a value-based
4985 (concat (concat val use) src): same as above, with use as
4986 the MO_USE tracked value, if it differs from src.
4990 nloc
= replace_expr_with_values (loc
);
4995 oloc
= gen_rtx_CONCAT (mode2
, val
->val_rtx
, vloc
);
4997 oloc
= val
->val_rtx
;
4999 mo
.u
.loc
= gen_rtx_CONCAT (mode
, oloc
, nloc
);
5001 if (type2
== MO_USE
)
5002 VAL_HOLDS_TRACK_EXPR (mo
.u
.loc
) = 1;
5003 if (!cselib_preserved_value_p (val
))
5005 VAL_NEEDS_RESOLUTION (mo
.u
.loc
) = 1;
5006 preserve_value (val
);
5010 gcc_assert (type
== MO_USE
|| type
== MO_USE_NO_VAR
);
5012 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5013 log_op_type (mo
.u
.loc
, cui
->bb
, cui
->insn
, mo
.type
, dump_file
);
5014 VEC_safe_push (micro_operation
, heap
, VTI (bb
)->mos
, &mo
);
5020 /* Helper function for finding all uses of REG/MEM in X in insn INSN. */
5023 add_uses_1 (rtx
*x
, void *cui
)
5025 for_each_rtx (x
, add_uses
, cui
);
5028 /* Attempt to reverse the EXPR operation in the debug info. Say for
5029 reg1 = reg2 + 6 even when reg2 is no longer live we
5030 can express its value as VAL - 6. */
5033 reverse_op (rtx val
, const_rtx expr
)
5039 if (GET_CODE (expr
) != SET
)
5042 if (!REG_P (SET_DEST (expr
)) || GET_MODE (val
) != GET_MODE (SET_DEST (expr
)))
5045 src
= SET_SRC (expr
);
5046 switch (GET_CODE (src
))
5060 if (!REG_P (XEXP (src
, 0)) || !SCALAR_INT_MODE_P (GET_MODE (src
)))
5063 v
= cselib_lookup (XEXP (src
, 0), GET_MODE (XEXP (src
, 0)), 0);
5064 if (!v
|| !cselib_preserved_value_p (v
))
5067 switch (GET_CODE (src
))
5071 if (GET_MODE (v
->val_rtx
) != GET_MODE (val
))
5073 ret
= gen_rtx_fmt_e (GET_CODE (src
), GET_MODE (val
), val
);
5077 ret
= gen_lowpart_SUBREG (GET_MODE (v
->val_rtx
), val
);
5089 if (GET_MODE (v
->val_rtx
) != GET_MODE (val
))
5091 arg
= XEXP (src
, 1);
5092 if (!CONST_INT_P (arg
) && GET_CODE (arg
) != SYMBOL_REF
)
5094 arg
= cselib_expand_value_rtx (arg
, scratch_regs
, 5);
5095 if (arg
== NULL_RTX
)
5097 if (!CONST_INT_P (arg
) && GET_CODE (arg
) != SYMBOL_REF
)
5100 ret
= simplify_gen_binary (code
, GET_MODE (val
), val
, arg
);
5102 /* Ensure ret isn't VALUE itself (which can happen e.g. for
5103 (plus (reg1) (reg2)) when reg2 is known to be 0), as that
5104 breaks a lot of routines during var-tracking. */
5105 ret
= gen_rtx_fmt_ee (PLUS
, GET_MODE (val
), val
, const0_rtx
);
5111 return gen_rtx_CONCAT (GET_MODE (v
->val_rtx
), v
->val_rtx
, ret
);
5114 /* Add stores (register and memory references) LOC which will be tracked
5115 to VTI (bb)->mos. EXPR is the RTL expression containing the store.
5116 CUIP->insn is instruction which the LOC is part of. */
5119 add_stores (rtx loc
, const_rtx expr
, void *cuip
)
5121 enum machine_mode mode
= VOIDmode
, mode2
;
5122 struct count_use_info
*cui
= (struct count_use_info
*)cuip
;
5123 basic_block bb
= cui
->bb
;
5125 rtx oloc
= loc
, nloc
, src
= NULL
;
5126 enum micro_operation_type type
= use_type (loc
, cui
, &mode
);
5127 bool track_p
= false;
5129 bool resolve
, preserve
;
5132 if (type
== MO_CLOBBER
)
5139 gcc_assert (loc
!= cfa_base_rtx
);
5140 if ((GET_CODE (expr
) == CLOBBER
&& type
!= MO_VAL_SET
)
5141 || !(track_p
= use_type (loc
, NULL
, &mode2
) == MO_USE
)
5142 || GET_CODE (expr
) == CLOBBER
)
5144 mo
.type
= MO_CLOBBER
;
5149 if (GET_CODE (expr
) == SET
&& SET_DEST (expr
) == loc
)
5150 src
= var_lowpart (mode2
, SET_SRC (expr
));
5151 loc
= var_lowpart (mode2
, loc
);
5160 rtx xexpr
= gen_rtx_SET (VOIDmode
, loc
, src
);
5161 if (same_variable_part_p (src
, REG_EXPR (loc
), REG_OFFSET (loc
)))
5168 mo
.insn
= cui
->insn
;
5170 else if (MEM_P (loc
)
5171 && ((track_p
= use_type (loc
, NULL
, &mode2
) == MO_USE
)
5174 if (MEM_P (loc
) && type
== MO_VAL_SET
5175 && !REG_P (XEXP (loc
, 0))
5176 && !MEM_P (XEXP (loc
, 0))
5177 && (GET_CODE (XEXP (loc
, 0)) != PLUS
5178 || XEXP (XEXP (loc
, 0), 0) != cfa_base_rtx
5179 || !CONST_INT_P (XEXP (XEXP (loc
, 0), 1))))
5182 enum machine_mode address_mode
= get_address_mode (mloc
);
5183 cselib_val
*val
= cselib_lookup (XEXP (mloc
, 0),
5186 if (val
&& !cselib_preserved_value_p (val
))
5188 preserve_value (val
);
5189 mo
.type
= MO_VAL_USE
;
5190 mloc
= cselib_subst_to_values (XEXP (mloc
, 0));
5191 mo
.u
.loc
= gen_rtx_CONCAT (address_mode
, val
->val_rtx
, mloc
);
5192 mo
.insn
= cui
->insn
;
5193 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5194 log_op_type (mo
.u
.loc
, cui
->bb
, cui
->insn
,
5195 mo
.type
, dump_file
);
5196 VEC_safe_push (micro_operation
, heap
, VTI (bb
)->mos
, &mo
);
5200 if (GET_CODE (expr
) == CLOBBER
|| !track_p
)
5202 mo
.type
= MO_CLOBBER
;
5203 mo
.u
.loc
= track_p
? var_lowpart (mode2
, loc
) : loc
;
5207 if (GET_CODE (expr
) == SET
&& SET_DEST (expr
) == loc
)
5208 src
= var_lowpart (mode2
, SET_SRC (expr
));
5209 loc
= var_lowpart (mode2
, loc
);
5218 rtx xexpr
= gen_rtx_SET (VOIDmode
, loc
, src
);
5219 if (same_variable_part_p (SET_SRC (xexpr
),
5221 INT_MEM_OFFSET (loc
)))
5228 mo
.insn
= cui
->insn
;
5233 if (type
!= MO_VAL_SET
)
5234 goto log_and_return
;
5236 v
= find_use_val (oloc
, mode
, cui
);
5239 goto log_and_return
;
5241 resolve
= preserve
= !cselib_preserved_value_p (v
);
5243 nloc
= replace_expr_with_values (oloc
);
5247 if (GET_CODE (PATTERN (cui
->insn
)) == COND_EXEC
)
5249 cselib_val
*oval
= cselib_lookup (oloc
, GET_MODE (oloc
), 0);
5251 gcc_assert (oval
!= v
);
5252 gcc_assert (REG_P (oloc
) || MEM_P (oloc
));
5254 if (!cselib_preserved_value_p (oval
))
5256 micro_operation moa
;
5258 preserve_value (oval
);
5260 moa
.type
= MO_VAL_USE
;
5261 moa
.u
.loc
= gen_rtx_CONCAT (mode
, oval
->val_rtx
, oloc
);
5262 VAL_NEEDS_RESOLUTION (moa
.u
.loc
) = 1;
5263 moa
.insn
= cui
->insn
;
5265 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5266 log_op_type (moa
.u
.loc
, cui
->bb
, cui
->insn
,
5267 moa
.type
, dump_file
);
5268 VEC_safe_push (micro_operation
, heap
, VTI (bb
)->mos
, &moa
);
5273 else if (resolve
&& GET_CODE (mo
.u
.loc
) == SET
)
5275 nloc
= replace_expr_with_values (SET_SRC (expr
));
5277 /* Avoid the mode mismatch between oexpr and expr. */
5278 if (!nloc
&& mode
!= mode2
)
5280 nloc
= SET_SRC (expr
);
5281 gcc_assert (oloc
== SET_DEST (expr
));
5285 oloc
= gen_rtx_SET (GET_MODE (mo
.u
.loc
), oloc
, nloc
);
5288 if (oloc
== SET_DEST (mo
.u
.loc
))
5289 /* No point in duplicating. */
5291 if (!REG_P (SET_SRC (mo
.u
.loc
)))
5297 if (GET_CODE (mo
.u
.loc
) == SET
5298 && oloc
== SET_DEST (mo
.u
.loc
))
5299 /* No point in duplicating. */
5305 loc
= gen_rtx_CONCAT (mode
, v
->val_rtx
, oloc
);
5307 if (mo
.u
.loc
!= oloc
)
5308 loc
= gen_rtx_CONCAT (GET_MODE (mo
.u
.loc
), loc
, mo
.u
.loc
);
5310 /* The loc of a MO_VAL_SET may have various forms:
5312 (concat val dst): dst now holds val
5314 (concat val (set dst src)): dst now holds val, copied from src
5316 (concat (concat val dstv) dst): dst now holds val; dstv is dst
5317 after replacing mems and non-top-level regs with values.
5319 (concat (concat val dstv) (set dst src)): dst now holds val,
5320 copied from src. dstv is a value-based representation of dst, if
5321 it differs from dst. If resolution is needed, src is a REG, and
5322 its mode is the same as that of val.
5324 (concat (concat val (set dstv srcv)) (set dst src)): src
5325 copied to dst, holding val. dstv and srcv are value-based
5326 representations of dst and src, respectively.
5330 if (GET_CODE (PATTERN (cui
->insn
)) != COND_EXEC
)
5332 reverse
= reverse_op (v
->val_rtx
, expr
);
5335 loc
= gen_rtx_CONCAT (GET_MODE (mo
.u
.loc
), loc
, reverse
);
5336 VAL_EXPR_HAS_REVERSE (loc
) = 1;
5343 VAL_HOLDS_TRACK_EXPR (loc
) = 1;
5346 VAL_NEEDS_RESOLUTION (loc
) = resolve
;
5349 if (mo
.type
== MO_CLOBBER
)
5350 VAL_EXPR_IS_CLOBBERED (loc
) = 1;
5351 if (mo
.type
== MO_COPY
)
5352 VAL_EXPR_IS_COPIED (loc
) = 1;
5354 mo
.type
= MO_VAL_SET
;
5357 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5358 log_op_type (mo
.u
.loc
, cui
->bb
, cui
->insn
, mo
.type
, dump_file
);
5359 VEC_safe_push (micro_operation
, heap
, VTI (bb
)->mos
, &mo
);
5362 /* Callback for cselib_record_sets_hook, that records as micro
5363 operations uses and stores in an insn after cselib_record_sets has
5364 analyzed the sets in an insn, but before it modifies the stored
5365 values in the internal tables, unless cselib_record_sets doesn't
5366 call it directly (perhaps because we're not doing cselib in the
5367 first place, in which case sets and n_sets will be 0). */
5370 add_with_sets (rtx insn
, struct cselib_set
*sets
, int n_sets
)
5372 basic_block bb
= BLOCK_FOR_INSN (insn
);
5374 struct count_use_info cui
;
5375 micro_operation
*mos
;
5377 cselib_hook_called
= true;
5382 cui
.n_sets
= n_sets
;
5384 n1
= VEC_length (micro_operation
, VTI (bb
)->mos
);
5385 cui
.store_p
= false;
5386 note_uses (&PATTERN (insn
), add_uses_1
, &cui
);
5387 n2
= VEC_length (micro_operation
, VTI (bb
)->mos
) - 1;
5388 mos
= VEC_address (micro_operation
, VTI (bb
)->mos
);
5390 /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
5394 while (n1
< n2
&& mos
[n1
].type
== MO_USE
)
5396 while (n1
< n2
&& mos
[n2
].type
!= MO_USE
)
5408 n2
= VEC_length (micro_operation
, VTI (bb
)->mos
) - 1;
5411 while (n1
< n2
&& mos
[n1
].type
!= MO_VAL_LOC
)
5413 while (n1
< n2
&& mos
[n2
].type
== MO_VAL_LOC
)
5431 mo
.u
.loc
= NULL_RTX
;
5433 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5434 log_op_type (PATTERN (insn
), bb
, insn
, mo
.type
, dump_file
);
5435 VEC_safe_push (micro_operation
, heap
, VTI (bb
)->mos
, &mo
);
5438 n1
= VEC_length (micro_operation
, VTI (bb
)->mos
);
5439 /* This will record NEXT_INSN (insn), such that we can
5440 insert notes before it without worrying about any
5441 notes that MO_USEs might emit after the insn. */
5443 note_stores (PATTERN (insn
), add_stores
, &cui
);
5444 n2
= VEC_length (micro_operation
, VTI (bb
)->mos
) - 1;
5445 mos
= VEC_address (micro_operation
, VTI (bb
)->mos
);
5447 /* Order the MO_VAL_USEs first (note_stores does nothing
5448 on DEBUG_INSNs, so there are no MO_VAL_LOCs from this
5449 insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET. */
5452 while (n1
< n2
&& mos
[n1
].type
== MO_VAL_USE
)
5454 while (n1
< n2
&& mos
[n2
].type
!= MO_VAL_USE
)
5466 n2
= VEC_length (micro_operation
, VTI (bb
)->mos
) - 1;
5469 while (n1
< n2
&& mos
[n1
].type
== MO_CLOBBER
)
5471 while (n1
< n2
&& mos
[n2
].type
!= MO_CLOBBER
)
5484 static enum var_init_status
5485 find_src_status (dataflow_set
*in
, rtx src
)
5487 tree decl
= NULL_TREE
;
5488 enum var_init_status status
= VAR_INIT_STATUS_UNINITIALIZED
;
5490 if (! flag_var_tracking_uninit
)
5491 status
= VAR_INIT_STATUS_INITIALIZED
;
5493 if (src
&& REG_P (src
))
5494 decl
= var_debug_decl (REG_EXPR (src
));
5495 else if (src
&& MEM_P (src
))
5496 decl
= var_debug_decl (MEM_EXPR (src
));
5499 status
= get_init_value (in
, src
, dv_from_decl (decl
));
5504 /* SRC is the source of an assignment. Use SET to try to find what
5505 was ultimately assigned to SRC. Return that value if known,
5506 otherwise return SRC itself. */
5509 find_src_set_src (dataflow_set
*set
, rtx src
)
5511 tree decl
= NULL_TREE
; /* The variable being copied around. */
5512 rtx set_src
= NULL_RTX
; /* The value for "decl" stored in "src". */
5514 location_chain nextp
;
5518 if (src
&& REG_P (src
))
5519 decl
= var_debug_decl (REG_EXPR (src
));
5520 else if (src
&& MEM_P (src
))
5521 decl
= var_debug_decl (MEM_EXPR (src
));
5525 decl_or_value dv
= dv_from_decl (decl
);
5527 var
= shared_hash_find (set
->vars
, dv
);
5531 for (i
= 0; i
< var
->n_var_parts
&& !found
; i
++)
5532 for (nextp
= var
->var_part
[i
].loc_chain
; nextp
&& !found
;
5533 nextp
= nextp
->next
)
5534 if (rtx_equal_p (nextp
->loc
, src
))
5536 set_src
= nextp
->set_src
;
5546 /* Compute the changes of variable locations in the basic block BB. */
5549 compute_bb_dataflow (basic_block bb
)
5552 micro_operation
*mo
;
5554 dataflow_set old_out
;
5555 dataflow_set
*in
= &VTI (bb
)->in
;
5556 dataflow_set
*out
= &VTI (bb
)->out
;
5558 dataflow_set_init (&old_out
);
5559 dataflow_set_copy (&old_out
, out
);
5560 dataflow_set_copy (out
, in
);
5562 for (i
= 0; VEC_iterate (micro_operation
, VTI (bb
)->mos
, i
, mo
); i
++)
5564 rtx insn
= mo
->insn
;
5569 dataflow_set_clear_at_call (out
);
5574 rtx loc
= mo
->u
.loc
;
5577 var_reg_set (out
, loc
, VAR_INIT_STATUS_UNINITIALIZED
, NULL
);
5578 else if (MEM_P (loc
))
5579 var_mem_set (out
, loc
, VAR_INIT_STATUS_UNINITIALIZED
, NULL
);
5585 rtx loc
= mo
->u
.loc
;
5589 if (GET_CODE (loc
) == CONCAT
)
5591 val
= XEXP (loc
, 0);
5592 vloc
= XEXP (loc
, 1);
5600 var
= PAT_VAR_LOCATION_DECL (vloc
);
5602 clobber_variable_part (out
, NULL_RTX
,
5603 dv_from_decl (var
), 0, NULL_RTX
);
5606 if (VAL_NEEDS_RESOLUTION (loc
))
5607 val_resolve (out
, val
, PAT_VAR_LOCATION_LOC (vloc
), insn
);
5608 set_variable_part (out
, val
, dv_from_decl (var
), 0,
5609 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
,
5612 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc
)))
5613 set_variable_part (out
, PAT_VAR_LOCATION_LOC (vloc
),
5614 dv_from_decl (var
), 0,
5615 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
,
5622 rtx loc
= mo
->u
.loc
;
5623 rtx val
, vloc
, uloc
;
5625 vloc
= uloc
= XEXP (loc
, 1);
5626 val
= XEXP (loc
, 0);
5628 if (GET_CODE (val
) == CONCAT
)
5630 uloc
= XEXP (val
, 1);
5631 val
= XEXP (val
, 0);
5634 if (VAL_NEEDS_RESOLUTION (loc
))
5635 val_resolve (out
, val
, vloc
, insn
);
5637 val_store (out
, val
, uloc
, insn
, false);
5639 if (VAL_HOLDS_TRACK_EXPR (loc
))
5641 if (GET_CODE (uloc
) == REG
)
5642 var_reg_set (out
, uloc
, VAR_INIT_STATUS_UNINITIALIZED
,
5644 else if (GET_CODE (uloc
) == MEM
)
5645 var_mem_set (out
, uloc
, VAR_INIT_STATUS_UNINITIALIZED
,
5653 rtx loc
= mo
->u
.loc
;
5654 rtx val
, vloc
, uloc
, reverse
= NULL_RTX
;
5657 if (VAL_EXPR_HAS_REVERSE (loc
))
5659 reverse
= XEXP (loc
, 1);
5660 vloc
= XEXP (loc
, 0);
5662 uloc
= XEXP (vloc
, 1);
5663 val
= XEXP (vloc
, 0);
5666 if (GET_CODE (val
) == CONCAT
)
5668 vloc
= XEXP (val
, 1);
5669 val
= XEXP (val
, 0);
5672 if (GET_CODE (vloc
) == SET
)
5674 rtx vsrc
= SET_SRC (vloc
);
5676 gcc_assert (val
!= vsrc
);
5677 gcc_assert (vloc
== uloc
|| VAL_NEEDS_RESOLUTION (loc
));
5679 vloc
= SET_DEST (vloc
);
5681 if (VAL_NEEDS_RESOLUTION (loc
))
5682 val_resolve (out
, val
, vsrc
, insn
);
5684 else if (VAL_NEEDS_RESOLUTION (loc
))
5686 gcc_assert (GET_CODE (uloc
) == SET
5687 && GET_CODE (SET_SRC (uloc
)) == REG
);
5688 val_resolve (out
, val
, SET_SRC (uloc
), insn
);
5691 if (VAL_HOLDS_TRACK_EXPR (loc
))
5693 if (VAL_EXPR_IS_CLOBBERED (loc
))
5696 var_reg_delete (out
, uloc
, true);
5697 else if (MEM_P (uloc
))
5698 var_mem_delete (out
, uloc
, true);
5702 bool copied_p
= VAL_EXPR_IS_COPIED (loc
);
5704 enum var_init_status status
= VAR_INIT_STATUS_INITIALIZED
;
5706 if (GET_CODE (uloc
) == SET
)
5708 set_src
= SET_SRC (uloc
);
5709 uloc
= SET_DEST (uloc
);
5714 if (flag_var_tracking_uninit
)
5716 status
= find_src_status (in
, set_src
);
5718 if (status
== VAR_INIT_STATUS_UNKNOWN
)
5719 status
= find_src_status (out
, set_src
);
5722 set_src
= find_src_set_src (in
, set_src
);
5726 var_reg_delete_and_set (out
, uloc
, !copied_p
,
5728 else if (MEM_P (uloc
))
5729 var_mem_delete_and_set (out
, uloc
, !copied_p
,
5733 else if (REG_P (uloc
))
5734 var_regno_delete (out
, REGNO (uloc
));
5736 val_store (out
, val
, vloc
, insn
, true);
5739 val_store (out
, XEXP (reverse
, 0), XEXP (reverse
, 1),
5746 rtx loc
= mo
->u
.loc
;
5749 if (GET_CODE (loc
) == SET
)
5751 set_src
= SET_SRC (loc
);
5752 loc
= SET_DEST (loc
);
5756 var_reg_delete_and_set (out
, loc
, true, VAR_INIT_STATUS_INITIALIZED
,
5758 else if (MEM_P (loc
))
5759 var_mem_delete_and_set (out
, loc
, true, VAR_INIT_STATUS_INITIALIZED
,
5766 rtx loc
= mo
->u
.loc
;
5767 enum var_init_status src_status
;
5770 if (GET_CODE (loc
) == SET
)
5772 set_src
= SET_SRC (loc
);
5773 loc
= SET_DEST (loc
);
5776 if (! flag_var_tracking_uninit
)
5777 src_status
= VAR_INIT_STATUS_INITIALIZED
;
5780 src_status
= find_src_status (in
, set_src
);
5782 if (src_status
== VAR_INIT_STATUS_UNKNOWN
)
5783 src_status
= find_src_status (out
, set_src
);
5786 set_src
= find_src_set_src (in
, set_src
);
5789 var_reg_delete_and_set (out
, loc
, false, src_status
, set_src
);
5790 else if (MEM_P (loc
))
5791 var_mem_delete_and_set (out
, loc
, false, src_status
, set_src
);
5797 rtx loc
= mo
->u
.loc
;
5800 var_reg_delete (out
, loc
, false);
5801 else if (MEM_P (loc
))
5802 var_mem_delete (out
, loc
, false);
5808 rtx loc
= mo
->u
.loc
;
5811 var_reg_delete (out
, loc
, true);
5812 else if (MEM_P (loc
))
5813 var_mem_delete (out
, loc
, true);
5818 out
->stack_adjust
+= mo
->u
.adjust
;
5823 if (MAY_HAVE_DEBUG_INSNS
)
5825 dataflow_set_equiv_regs (out
);
5826 htab_traverse (shared_hash_htab (out
->vars
), canonicalize_values_mark
,
5828 htab_traverse (shared_hash_htab (out
->vars
), canonicalize_values_star
,
5831 htab_traverse (shared_hash_htab (out
->vars
),
5832 canonicalize_loc_order_check
, out
);
5835 changed
= dataflow_set_different (&old_out
, out
);
5836 dataflow_set_destroy (&old_out
);
5840 /* Find the locations of variables in the whole function. */
5843 vt_find_locations (void)
5845 fibheap_t worklist
, pending
, fibheap_swap
;
5846 sbitmap visited
, in_worklist
, in_pending
, sbitmap_swap
;
5853 int htabmax
= PARAM_VALUE (PARAM_MAX_VARTRACK_SIZE
);
5854 bool success
= true;
5856 /* Compute reverse completion order of depth first search of the CFG
5857 so that the data-flow runs faster. */
5858 rc_order
= XNEWVEC (int, n_basic_blocks
- NUM_FIXED_BLOCKS
);
5859 bb_order
= XNEWVEC (int, last_basic_block
);
5860 pre_and_rev_post_order_compute (NULL
, rc_order
, false);
5861 for (i
= 0; i
< n_basic_blocks
- NUM_FIXED_BLOCKS
; i
++)
5862 bb_order
[rc_order
[i
]] = i
;
5865 worklist
= fibheap_new ();
5866 pending
= fibheap_new ();
5867 visited
= sbitmap_alloc (last_basic_block
);
5868 in_worklist
= sbitmap_alloc (last_basic_block
);
5869 in_pending
= sbitmap_alloc (last_basic_block
);
5870 sbitmap_zero (in_worklist
);
5873 fibheap_insert (pending
, bb_order
[bb
->index
], bb
);
5874 sbitmap_ones (in_pending
);
5876 while (success
&& !fibheap_empty (pending
))
5878 fibheap_swap
= pending
;
5880 worklist
= fibheap_swap
;
5881 sbitmap_swap
= in_pending
;
5882 in_pending
= in_worklist
;
5883 in_worklist
= sbitmap_swap
;
5885 sbitmap_zero (visited
);
5887 while (!fibheap_empty (worklist
))
5889 bb
= (basic_block
) fibheap_extract_min (worklist
);
5890 RESET_BIT (in_worklist
, bb
->index
);
5891 if (!TEST_BIT (visited
, bb
->index
))
5895 int oldinsz
, oldoutsz
;
5897 SET_BIT (visited
, bb
->index
);
5899 if (VTI (bb
)->in
.vars
)
5902 -= (htab_size (shared_hash_htab (VTI (bb
)->in
.vars
))
5903 + htab_size (shared_hash_htab (VTI (bb
)->out
.vars
)));
5905 = htab_elements (shared_hash_htab (VTI (bb
)->in
.vars
));
5907 = htab_elements (shared_hash_htab (VTI (bb
)->out
.vars
));
5910 oldinsz
= oldoutsz
= 0;
5912 if (MAY_HAVE_DEBUG_INSNS
)
5914 dataflow_set
*in
= &VTI (bb
)->in
, *first_out
= NULL
;
5915 bool first
= true, adjust
= false;
5917 /* Calculate the IN set as the intersection of
5918 predecessor OUT sets. */
5920 dataflow_set_clear (in
);
5921 dst_can_be_shared
= true;
5923 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
5924 if (!VTI (e
->src
)->flooded
)
5925 gcc_assert (bb_order
[bb
->index
]
5926 <= bb_order
[e
->src
->index
]);
5929 dataflow_set_copy (in
, &VTI (e
->src
)->out
);
5930 first_out
= &VTI (e
->src
)->out
;
5935 dataflow_set_merge (in
, &VTI (e
->src
)->out
);
5941 dataflow_post_merge_adjust (in
, &VTI (bb
)->permp
);
5943 /* Merge and merge_adjust should keep entries in
5945 htab_traverse (shared_hash_htab (in
->vars
),
5946 canonicalize_loc_order_check
,
5949 if (dst_can_be_shared
)
5951 shared_hash_destroy (in
->vars
);
5952 in
->vars
= shared_hash_copy (first_out
->vars
);
5956 VTI (bb
)->flooded
= true;
5960 /* Calculate the IN set as union of predecessor OUT sets. */
5961 dataflow_set_clear (&VTI (bb
)->in
);
5962 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
5963 dataflow_set_union (&VTI (bb
)->in
, &VTI (e
->src
)->out
);
5966 changed
= compute_bb_dataflow (bb
);
5967 htabsz
+= (htab_size (shared_hash_htab (VTI (bb
)->in
.vars
))
5968 + htab_size (shared_hash_htab (VTI (bb
)->out
.vars
)));
5970 if (htabmax
&& htabsz
> htabmax
)
5972 if (MAY_HAVE_DEBUG_INSNS
)
5973 inform (DECL_SOURCE_LOCATION (cfun
->decl
),
5974 "variable tracking size limit exceeded with "
5975 "-fvar-tracking-assignments, retrying without");
5977 inform (DECL_SOURCE_LOCATION (cfun
->decl
),
5978 "variable tracking size limit exceeded");
5985 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5987 if (e
->dest
== EXIT_BLOCK_PTR
)
5990 if (TEST_BIT (visited
, e
->dest
->index
))
5992 if (!TEST_BIT (in_pending
, e
->dest
->index
))
5994 /* Send E->DEST to next round. */
5995 SET_BIT (in_pending
, e
->dest
->index
);
5996 fibheap_insert (pending
,
5997 bb_order
[e
->dest
->index
],
6001 else if (!TEST_BIT (in_worklist
, e
->dest
->index
))
6003 /* Add E->DEST to current round. */
6004 SET_BIT (in_worklist
, e
->dest
->index
);
6005 fibheap_insert (worklist
, bb_order
[e
->dest
->index
],
6013 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
6015 (int)htab_elements (shared_hash_htab (VTI (bb
)->in
.vars
)),
6017 (int)htab_elements (shared_hash_htab (VTI (bb
)->out
.vars
)),
6019 (int)worklist
->nodes
, (int)pending
->nodes
, htabsz
);
6021 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6023 fprintf (dump_file
, "BB %i IN:\n", bb
->index
);
6024 dump_dataflow_set (&VTI (bb
)->in
);
6025 fprintf (dump_file
, "BB %i OUT:\n", bb
->index
);
6026 dump_dataflow_set (&VTI (bb
)->out
);
6032 if (success
&& MAY_HAVE_DEBUG_INSNS
)
6034 gcc_assert (VTI (bb
)->flooded
);
6037 fibheap_delete (worklist
);
6038 fibheap_delete (pending
);
6039 sbitmap_free (visited
);
6040 sbitmap_free (in_worklist
);
6041 sbitmap_free (in_pending
);
6046 /* Print the content of the LIST to dump file. */
6049 dump_attrs_list (attrs list
)
6051 for (; list
; list
= list
->next
)
6053 if (dv_is_decl_p (list
->dv
))
6054 print_mem_expr (dump_file
, dv_as_decl (list
->dv
));
6056 print_rtl_single (dump_file
, dv_as_value (list
->dv
));
6057 fprintf (dump_file
, "+" HOST_WIDE_INT_PRINT_DEC
, list
->offset
);
6059 fprintf (dump_file
, "\n");
6062 /* Print the information about variable *SLOT to dump file. */
6065 dump_var_slot (void **slot
, void *data ATTRIBUTE_UNUSED
)
6067 variable var
= (variable
) *slot
;
6071 /* Continue traversing the hash table. */
6075 /* Print the information about variable VAR to dump file. */
6078 dump_var (variable var
)
6081 location_chain node
;
6083 if (dv_is_decl_p (var
->dv
))
6085 const_tree decl
= dv_as_decl (var
->dv
);
6087 if (DECL_NAME (decl
))
6089 fprintf (dump_file
, " name: %s",
6090 IDENTIFIER_POINTER (DECL_NAME (decl
)));
6091 if (dump_flags
& TDF_UID
)
6092 fprintf (dump_file
, "D.%u", DECL_UID (decl
));
6094 else if (TREE_CODE (decl
) == DEBUG_EXPR_DECL
)
6095 fprintf (dump_file
, " name: D#%u", DEBUG_TEMP_UID (decl
));
6097 fprintf (dump_file
, " name: D.%u", DECL_UID (decl
));
6098 fprintf (dump_file
, "\n");
6102 fputc (' ', dump_file
);
6103 print_rtl_single (dump_file
, dv_as_value (var
->dv
));
6106 for (i
= 0; i
< var
->n_var_parts
; i
++)
6108 fprintf (dump_file
, " offset %ld\n",
6109 (long) var
->var_part
[i
].offset
);
6110 for (node
= var
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
6112 fprintf (dump_file
, " ");
6113 if (node
->init
== VAR_INIT_STATUS_UNINITIALIZED
)
6114 fprintf (dump_file
, "[uninit]");
6115 print_rtl_single (dump_file
, node
->loc
);
6120 /* Print the information about variables from hash table VARS to dump file. */
6123 dump_vars (htab_t vars
)
6125 if (htab_elements (vars
) > 0)
6127 fprintf (dump_file
, "Variables:\n");
6128 htab_traverse (vars
, dump_var_slot
, NULL
);
6132 /* Print the dataflow set SET to dump file. */
6135 dump_dataflow_set (dataflow_set
*set
)
6139 fprintf (dump_file
, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC
"\n",
6141 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
6145 fprintf (dump_file
, "Reg %d:", i
);
6146 dump_attrs_list (set
->regs
[i
]);
6149 dump_vars (shared_hash_htab (set
->vars
));
6150 fprintf (dump_file
, "\n");
6153 /* Print the IN and OUT sets for each basic block to dump file. */
6156 dump_dataflow_sets (void)
6162 fprintf (dump_file
, "\nBasic block %d:\n", bb
->index
);
6163 fprintf (dump_file
, "IN:\n");
6164 dump_dataflow_set (&VTI (bb
)->in
);
6165 fprintf (dump_file
, "OUT:\n");
6166 dump_dataflow_set (&VTI (bb
)->out
);
6170 /* Add variable VAR to the hash table of changed variables and
6171 if it has no locations delete it from SET's hash table. */
6174 variable_was_changed (variable var
, dataflow_set
*set
)
6176 hashval_t hash
= dv_htab_hash (var
->dv
);
6181 bool old_cur_loc_changed
= false;
6183 /* Remember this decl or VALUE has been added to changed_variables. */
6184 set_dv_changed (var
->dv
, true);
6186 slot
= htab_find_slot_with_hash (changed_variables
,
6192 variable old_var
= (variable
) *slot
;
6193 gcc_assert (old_var
->in_changed_variables
);
6194 old_var
->in_changed_variables
= false;
6195 old_cur_loc_changed
= old_var
->cur_loc_changed
;
6196 variable_htab_free (*slot
);
6198 if (set
&& var
->n_var_parts
== 0)
6202 empty_var
= (variable
) pool_alloc (dv_pool (var
->dv
));
6203 empty_var
->dv
= var
->dv
;
6204 empty_var
->refcount
= 1;
6205 empty_var
->n_var_parts
= 0;
6206 empty_var
->cur_loc_changed
= true;
6207 empty_var
->in_changed_variables
= true;
6214 var
->in_changed_variables
= true;
6215 /* If within processing one uop a variable is deleted
6216 and then readded, we need to assume it has changed. */
6217 if (old_cur_loc_changed
)
6218 var
->cur_loc_changed
= true;
6225 if (var
->n_var_parts
== 0)
6230 slot
= shared_hash_find_slot_noinsert (set
->vars
, var
->dv
);
6233 if (shared_hash_shared (set
->vars
))
6234 slot
= shared_hash_find_slot_unshare (&set
->vars
, var
->dv
,
6236 htab_clear_slot (shared_hash_htab (set
->vars
), slot
);
6242 /* Look for the index in VAR->var_part corresponding to OFFSET.
6243 Return -1 if not found. If INSERTION_POINT is non-NULL, the
6244 referenced int will be set to the index that the part has or should
6245 have, if it should be inserted. */
6248 find_variable_location_part (variable var
, HOST_WIDE_INT offset
,
6249 int *insertion_point
)
6253 /* Find the location part. */
6255 high
= var
->n_var_parts
;
6258 pos
= (low
+ high
) / 2;
6259 if (var
->var_part
[pos
].offset
< offset
)
6266 if (insertion_point
)
6267 *insertion_point
= pos
;
6269 if (pos
< var
->n_var_parts
&& var
->var_part
[pos
].offset
== offset
)
6276 set_slot_part (dataflow_set
*set
, rtx loc
, void **slot
,
6277 decl_or_value dv
, HOST_WIDE_INT offset
,
6278 enum var_init_status initialized
, rtx set_src
)
6281 location_chain node
, next
;
6282 location_chain
*nextp
;
6284 bool onepart
= dv_onepart_p (dv
);
6286 gcc_assert (offset
== 0 || !onepart
);
6287 gcc_assert (loc
!= dv_as_opaque (dv
));
6289 var
= (variable
) *slot
;
6291 if (! flag_var_tracking_uninit
)
6292 initialized
= VAR_INIT_STATUS_INITIALIZED
;
6296 /* Create new variable information. */
6297 var
= (variable
) pool_alloc (dv_pool (dv
));
6300 var
->n_var_parts
= 1;
6301 var
->cur_loc_changed
= false;
6302 var
->in_changed_variables
= false;
6303 var
->var_part
[0].offset
= offset
;
6304 var
->var_part
[0].loc_chain
= NULL
;
6305 var
->var_part
[0].cur_loc
= NULL
;
6308 nextp
= &var
->var_part
[0].loc_chain
;
6314 gcc_assert (dv_as_opaque (var
->dv
) == dv_as_opaque (dv
));
6318 if (GET_CODE (loc
) == VALUE
)
6320 for (nextp
= &var
->var_part
[0].loc_chain
; (node
= *nextp
);
6321 nextp
= &node
->next
)
6322 if (GET_CODE (node
->loc
) == VALUE
)
6324 if (node
->loc
== loc
)
6329 if (canon_value_cmp (node
->loc
, loc
))
6337 else if (REG_P (node
->loc
) || MEM_P (node
->loc
))
6345 else if (REG_P (loc
))
6347 for (nextp
= &var
->var_part
[0].loc_chain
; (node
= *nextp
);
6348 nextp
= &node
->next
)
6349 if (REG_P (node
->loc
))
6351 if (REGNO (node
->loc
) < REGNO (loc
))
6355 if (REGNO (node
->loc
) == REGNO (loc
))
6368 else if (MEM_P (loc
))
6370 for (nextp
= &var
->var_part
[0].loc_chain
; (node
= *nextp
);
6371 nextp
= &node
->next
)
6372 if (REG_P (node
->loc
))
6374 else if (MEM_P (node
->loc
))
6376 if ((r
= loc_cmp (XEXP (node
->loc
, 0), XEXP (loc
, 0))) >= 0)
6388 for (nextp
= &var
->var_part
[0].loc_chain
; (node
= *nextp
);
6389 nextp
= &node
->next
)
6390 if ((r
= loc_cmp (node
->loc
, loc
)) >= 0)
6398 if (shared_var_p (var
, set
->vars
))
6400 slot
= unshare_variable (set
, slot
, var
, initialized
);
6401 var
= (variable
)*slot
;
6402 for (nextp
= &var
->var_part
[0].loc_chain
; c
;
6403 nextp
= &(*nextp
)->next
)
6405 gcc_assert ((!node
&& !*nextp
) || node
->loc
== (*nextp
)->loc
);
6412 gcc_assert (dv_as_decl (var
->dv
) == dv_as_decl (dv
));
6414 pos
= find_variable_location_part (var
, offset
, &inspos
);
6418 node
= var
->var_part
[pos
].loc_chain
;
6421 && ((REG_P (node
->loc
) && REG_P (loc
)
6422 && REGNO (node
->loc
) == REGNO (loc
))
6423 || rtx_equal_p (node
->loc
, loc
)))
6425 /* LOC is in the beginning of the chain so we have nothing
6427 if (node
->init
< initialized
)
6428 node
->init
= initialized
;
6429 if (set_src
!= NULL
)
6430 node
->set_src
= set_src
;
6436 /* We have to make a copy of a shared variable. */
6437 if (shared_var_p (var
, set
->vars
))
6439 slot
= unshare_variable (set
, slot
, var
, initialized
);
6440 var
= (variable
)*slot
;
6446 /* We have not found the location part, new one will be created. */
6448 /* We have to make a copy of the shared variable. */
6449 if (shared_var_p (var
, set
->vars
))
6451 slot
= unshare_variable (set
, slot
, var
, initialized
);
6452 var
= (variable
)*slot
;
6455 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
6456 thus there are at most MAX_VAR_PARTS different offsets. */
6457 gcc_assert (var
->n_var_parts
< MAX_VAR_PARTS
6458 && (!var
->n_var_parts
|| !dv_onepart_p (var
->dv
)));
6460 /* We have to move the elements of array starting at index
6461 inspos to the next position. */
6462 for (pos
= var
->n_var_parts
; pos
> inspos
; pos
--)
6463 var
->var_part
[pos
] = var
->var_part
[pos
- 1];
6466 var
->var_part
[pos
].offset
= offset
;
6467 var
->var_part
[pos
].loc_chain
= NULL
;
6468 var
->var_part
[pos
].cur_loc
= NULL
;
6471 /* Delete the location from the list. */
6472 nextp
= &var
->var_part
[pos
].loc_chain
;
6473 for (node
= var
->var_part
[pos
].loc_chain
; node
; node
= next
)
6476 if ((REG_P (node
->loc
) && REG_P (loc
)
6477 && REGNO (node
->loc
) == REGNO (loc
))
6478 || rtx_equal_p (node
->loc
, loc
))
6480 /* Save these values, to assign to the new node, before
6481 deleting this one. */
6482 if (node
->init
> initialized
)
6483 initialized
= node
->init
;
6484 if (node
->set_src
!= NULL
&& set_src
== NULL
)
6485 set_src
= node
->set_src
;
6486 if (var
->var_part
[pos
].cur_loc
== node
->loc
)
6488 var
->var_part
[pos
].cur_loc
= NULL
;
6489 var
->cur_loc_changed
= true;
6491 pool_free (loc_chain_pool
, node
);
6496 nextp
= &node
->next
;
6499 nextp
= &var
->var_part
[pos
].loc_chain
;
6502 /* Add the location to the beginning. */
6503 node
= (location_chain
) pool_alloc (loc_chain_pool
);
6505 node
->init
= initialized
;
6506 node
->set_src
= set_src
;
6507 node
->next
= *nextp
;
6510 if (onepart
&& emit_notes
)
6511 add_value_chains (var
->dv
, loc
);
6513 /* If no location was emitted do so. */
6514 if (var
->var_part
[pos
].cur_loc
== NULL
)
6515 variable_was_changed (var
, set
);
6520 /* Set the part of variable's location in the dataflow set SET. The
6521 variable part is specified by variable's declaration in DV and
6522 offset OFFSET and the part's location by LOC. IOPT should be
6523 NO_INSERT if the variable is known to be in SET already and the
6524 variable hash table must not be resized, and INSERT otherwise. */
6527 set_variable_part (dataflow_set
*set
, rtx loc
,
6528 decl_or_value dv
, HOST_WIDE_INT offset
,
6529 enum var_init_status initialized
, rtx set_src
,
6530 enum insert_option iopt
)
6534 if (iopt
== NO_INSERT
)
6535 slot
= shared_hash_find_slot_noinsert (set
->vars
, dv
);
6538 slot
= shared_hash_find_slot (set
->vars
, dv
);
6540 slot
= shared_hash_find_slot_unshare (&set
->vars
, dv
, iopt
);
6542 slot
= set_slot_part (set
, loc
, slot
, dv
, offset
, initialized
, set_src
);
6545 /* Remove all recorded register locations for the given variable part
6546 from dataflow set SET, except for those that are identical to loc.
6547 The variable part is specified by variable's declaration or value
6548 DV and offset OFFSET. */
6551 clobber_slot_part (dataflow_set
*set
, rtx loc
, void **slot
,
6552 HOST_WIDE_INT offset
, rtx set_src
)
6554 variable var
= (variable
) *slot
;
6555 int pos
= find_variable_location_part (var
, offset
, NULL
);
6559 location_chain node
, next
;
6561 /* Remove the register locations from the dataflow set. */
6562 next
= var
->var_part
[pos
].loc_chain
;
6563 for (node
= next
; node
; node
= next
)
6566 if (node
->loc
!= loc
6567 && (!flag_var_tracking_uninit
6570 || !rtx_equal_p (set_src
, node
->set_src
)))
6572 if (REG_P (node
->loc
))
6577 /* Remove the variable part from the register's
6578 list, but preserve any other variable parts
6579 that might be regarded as live in that same
6581 anextp
= &set
->regs
[REGNO (node
->loc
)];
6582 for (anode
= *anextp
; anode
; anode
= anext
)
6584 anext
= anode
->next
;
6585 if (dv_as_opaque (anode
->dv
) == dv_as_opaque (var
->dv
)
6586 && anode
->offset
== offset
)
6588 pool_free (attrs_pool
, anode
);
6592 anextp
= &anode
->next
;
6596 slot
= delete_slot_part (set
, node
->loc
, slot
, offset
);
6604 /* Remove all recorded register locations for the given variable part
6605 from dataflow set SET, except for those that are identical to loc.
6606 The variable part is specified by variable's declaration or value
6607 DV and offset OFFSET. */
6610 clobber_variable_part (dataflow_set
*set
, rtx loc
, decl_or_value dv
,
6611 HOST_WIDE_INT offset
, rtx set_src
)
6615 if (!dv_as_opaque (dv
)
6616 || (!dv_is_value_p (dv
) && ! DECL_P (dv_as_decl (dv
))))
6619 slot
= shared_hash_find_slot_noinsert (set
->vars
, dv
);
6623 slot
= clobber_slot_part (set
, loc
, slot
, offset
, set_src
);
6626 /* Delete the part of variable's location from dataflow set SET. The
6627 variable part is specified by its SET->vars slot SLOT and offset
6628 OFFSET and the part's location by LOC. */
6631 delete_slot_part (dataflow_set
*set
, rtx loc
, void **slot
,
6632 HOST_WIDE_INT offset
)
6634 variable var
= (variable
) *slot
;
6635 int pos
= find_variable_location_part (var
, offset
, NULL
);
6639 location_chain node
, next
;
6640 location_chain
*nextp
;
6643 if (shared_var_p (var
, set
->vars
))
6645 /* If the variable contains the location part we have to
6646 make a copy of the variable. */
6647 for (node
= var
->var_part
[pos
].loc_chain
; node
;
6650 if ((REG_P (node
->loc
) && REG_P (loc
)
6651 && REGNO (node
->loc
) == REGNO (loc
))
6652 || rtx_equal_p (node
->loc
, loc
))
6654 slot
= unshare_variable (set
, slot
, var
,
6655 VAR_INIT_STATUS_UNKNOWN
);
6656 var
= (variable
)*slot
;
6662 /* Delete the location part. */
6664 nextp
= &var
->var_part
[pos
].loc_chain
;
6665 for (node
= *nextp
; node
; node
= next
)
6668 if ((REG_P (node
->loc
) && REG_P (loc
)
6669 && REGNO (node
->loc
) == REGNO (loc
))
6670 || rtx_equal_p (node
->loc
, loc
))
6672 if (emit_notes
&& pos
== 0 && dv_onepart_p (var
->dv
))
6673 remove_value_chains (var
->dv
, node
->loc
);
6674 /* If we have deleted the location which was last emitted
6675 we have to emit new location so add the variable to set
6676 of changed variables. */
6677 if (var
->var_part
[pos
].cur_loc
== node
->loc
)
6680 var
->var_part
[pos
].cur_loc
= NULL
;
6681 var
->cur_loc_changed
= true;
6683 pool_free (loc_chain_pool
, node
);
6688 nextp
= &node
->next
;
6691 if (var
->var_part
[pos
].loc_chain
== NULL
)
6696 var
->cur_loc_changed
= true;
6697 while (pos
< var
->n_var_parts
)
6699 var
->var_part
[pos
] = var
->var_part
[pos
+ 1];
6704 variable_was_changed (var
, set
);
6710 /* Delete the part of variable's location from dataflow set SET. The
6711 variable part is specified by variable's declaration or value DV
6712 and offset OFFSET and the part's location by LOC. */
6715 delete_variable_part (dataflow_set
*set
, rtx loc
, decl_or_value dv
,
6716 HOST_WIDE_INT offset
)
6718 void **slot
= shared_hash_find_slot_noinsert (set
->vars
, dv
);
6722 slot
= delete_slot_part (set
, loc
, slot
, offset
);
6725 /* Structure for passing some other parameters to function
6726 vt_expand_loc_callback. */
6727 struct expand_loc_callback_data
6729 /* The variables and values active at this point. */
6732 /* True in vt_expand_loc_dummy calls, no rtl should be allocated.
6733 Non-NULL should be returned if vt_expand_loc would return
6734 non-NULL in that case, NULL otherwise. cur_loc_changed should be
6735 computed and cur_loc recomputed when possible (but just once
6736 per emit_notes_for_changes call). */
6739 /* True if expansion of subexpressions had to recompute some
6740 VALUE/DEBUG_EXPR_DECL's cur_loc or used a VALUE/DEBUG_EXPR_DECL
6741 whose cur_loc has been already recomputed during current
6742 emit_notes_for_changes call. */
6743 bool cur_loc_changed
;
6746 /* Callback for cselib_expand_value, that looks for expressions
6747 holding the value in the var-tracking hash tables. Return X for
6748 standard processing, anything else is to be used as-is. */
6751 vt_expand_loc_callback (rtx x
, bitmap regs
, int max_depth
, void *data
)
6753 struct expand_loc_callback_data
*elcd
6754 = (struct expand_loc_callback_data
*) data
;
6755 bool dummy
= elcd
->dummy
;
6756 bool cur_loc_changed
= elcd
->cur_loc_changed
;
6760 rtx result
, subreg
, xret
;
6762 switch (GET_CODE (x
))
6767 if (cselib_dummy_expand_value_rtx_cb (SUBREG_REG (x
), regs
,
6769 vt_expand_loc_callback
, data
))
6775 subreg
= cselib_expand_value_rtx_cb (SUBREG_REG (x
), regs
,
6777 vt_expand_loc_callback
, data
);
6782 result
= simplify_gen_subreg (GET_MODE (x
), subreg
,
6783 GET_MODE (SUBREG_REG (x
)),
6786 /* Invalid SUBREGs are ok in debug info. ??? We could try
6787 alternate expansions for the VALUE as well. */
6789 result
= gen_rtx_raw_SUBREG (GET_MODE (x
), subreg
, SUBREG_BYTE (x
));
6794 dv
= dv_from_decl (DEBUG_EXPR_TREE_DECL (x
));
6799 dv
= dv_from_value (x
);
6807 if (VALUE_RECURSED_INTO (x
))
6810 var
= (variable
) htab_find_with_hash (elcd
->vars
, dv
, dv_htab_hash (dv
));
6814 if (dummy
&& dv_changed_p (dv
))
6815 elcd
->cur_loc_changed
= true;
6819 if (var
->n_var_parts
== 0)
6822 elcd
->cur_loc_changed
= true;
6826 gcc_assert (var
->n_var_parts
== 1);
6828 VALUE_RECURSED_INTO (x
) = true;
6831 if (var
->var_part
[0].cur_loc
)
6835 if (cselib_dummy_expand_value_rtx_cb (var
->var_part
[0].cur_loc
, regs
,
6837 vt_expand_loc_callback
, data
))
6841 result
= cselib_expand_value_rtx_cb (var
->var_part
[0].cur_loc
, regs
,
6843 vt_expand_loc_callback
, data
);
6845 set_dv_changed (dv
, false);
6847 if (!result
&& dv_changed_p (dv
))
6849 set_dv_changed (dv
, false);
6850 for (loc
= var
->var_part
[0].loc_chain
; loc
; loc
= loc
->next
)
6851 if (loc
->loc
== var
->var_part
[0].cur_loc
)
6855 elcd
->cur_loc_changed
= cur_loc_changed
;
6856 if (cselib_dummy_expand_value_rtx_cb (loc
->loc
, regs
, max_depth
,
6857 vt_expand_loc_callback
,
6865 result
= cselib_expand_value_rtx_cb (loc
->loc
, regs
, max_depth
,
6866 vt_expand_loc_callback
,
6872 if (dummy
&& (result
|| var
->var_part
[0].cur_loc
))
6873 var
->cur_loc_changed
= true;
6874 var
->var_part
[0].cur_loc
= loc
? loc
->loc
: NULL_RTX
;
6878 if (var
->cur_loc_changed
)
6879 elcd
->cur_loc_changed
= true;
6880 else if (!result
&& var
->var_part
[0].cur_loc
== NULL_RTX
)
6881 elcd
->cur_loc_changed
= cur_loc_changed
;
6884 VALUE_RECURSED_INTO (x
) = false;
6891 /* Expand VALUEs in LOC, using VARS as well as cselib's equivalence
6895 vt_expand_loc (rtx loc
, htab_t vars
)
6897 struct expand_loc_callback_data data
;
6899 if (!MAY_HAVE_DEBUG_INSNS
)
6904 data
.cur_loc_changed
= false;
6905 loc
= cselib_expand_value_rtx_cb (loc
, scratch_regs
, 5,
6906 vt_expand_loc_callback
, &data
);
6908 if (loc
&& MEM_P (loc
))
6909 loc
= targetm
.delegitimize_address (loc
);
6913 /* Like vt_expand_loc, but only return true/false (whether vt_expand_loc
6914 would succeed or not, without actually allocating new rtxes. */
6917 vt_expand_loc_dummy (rtx loc
, htab_t vars
, bool *pcur_loc_changed
)
6919 struct expand_loc_callback_data data
;
6922 gcc_assert (MAY_HAVE_DEBUG_INSNS
);
6925 data
.cur_loc_changed
= false;
6926 ret
= cselib_dummy_expand_value_rtx_cb (loc
, scratch_regs
, 5,
6927 vt_expand_loc_callback
, &data
);
6928 *pcur_loc_changed
= data
.cur_loc_changed
;
6932 #ifdef ENABLE_RTL_CHECKING
6933 /* Used to verify that cur_loc_changed updating is safe. */
6934 static struct pointer_map_t
*emitted_notes
;
6937 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
6938 additional parameters: WHERE specifies whether the note shall be emitted
6939 before or after instruction INSN. */
6942 emit_note_insn_var_location (void **varp
, void *data
)
6944 variable var
= (variable
) *varp
;
6945 rtx insn
= ((emit_note_data
*)data
)->insn
;
6946 enum emit_note_where where
= ((emit_note_data
*)data
)->where
;
6947 htab_t vars
= ((emit_note_data
*)data
)->vars
;
6949 int i
, j
, n_var_parts
;
6951 enum var_init_status initialized
= VAR_INIT_STATUS_UNINITIALIZED
;
6952 HOST_WIDE_INT last_limit
;
6953 tree type_size_unit
;
6954 HOST_WIDE_INT offsets
[MAX_VAR_PARTS
];
6955 rtx loc
[MAX_VAR_PARTS
];
6959 if (dv_is_value_p (var
->dv
))
6960 goto value_or_debug_decl
;
6962 decl
= dv_as_decl (var
->dv
);
6964 if (TREE_CODE (decl
) == DEBUG_EXPR_DECL
)
6965 goto value_or_debug_decl
;
6970 if (!MAY_HAVE_DEBUG_INSNS
)
6972 for (i
= 0; i
< var
->n_var_parts
; i
++)
6973 if (var
->var_part
[i
].cur_loc
== NULL
&& var
->var_part
[i
].loc_chain
)
6975 var
->var_part
[i
].cur_loc
= var
->var_part
[i
].loc_chain
->loc
;
6976 var
->cur_loc_changed
= true;
6978 if (var
->n_var_parts
== 0)
6979 var
->cur_loc_changed
= true;
6981 #ifndef ENABLE_RTL_CHECKING
6982 if (!var
->cur_loc_changed
)
6985 for (i
= 0; i
< var
->n_var_parts
; i
++)
6987 enum machine_mode mode
, wider_mode
;
6990 if (last_limit
< var
->var_part
[i
].offset
)
6995 else if (last_limit
> var
->var_part
[i
].offset
)
6997 offsets
[n_var_parts
] = var
->var_part
[i
].offset
;
6998 if (!var
->var_part
[i
].cur_loc
)
7003 loc2
= vt_expand_loc (var
->var_part
[i
].cur_loc
, vars
);
7009 loc
[n_var_parts
] = loc2
;
7010 mode
= GET_MODE (var
->var_part
[i
].cur_loc
);
7011 if (mode
== VOIDmode
&& dv_onepart_p (var
->dv
))
7012 mode
= DECL_MODE (decl
);
7013 for (lc
= var
->var_part
[i
].loc_chain
; lc
; lc
= lc
->next
)
7014 if (var
->var_part
[i
].cur_loc
== lc
->loc
)
7016 initialized
= lc
->init
;
7020 last_limit
= offsets
[n_var_parts
] + GET_MODE_SIZE (mode
);
7022 /* Attempt to merge adjacent registers or memory. */
7023 wider_mode
= GET_MODE_WIDER_MODE (mode
);
7024 for (j
= i
+ 1; j
< var
->n_var_parts
; j
++)
7025 if (last_limit
<= var
->var_part
[j
].offset
)
7027 if (j
< var
->n_var_parts
7028 && wider_mode
!= VOIDmode
7029 && var
->var_part
[j
].cur_loc
7030 && mode
== GET_MODE (var
->var_part
[j
].cur_loc
)
7031 && (REG_P (loc
[n_var_parts
]) || MEM_P (loc
[n_var_parts
]))
7032 && last_limit
== var
->var_part
[j
].offset
7033 && (loc2
= vt_expand_loc (var
->var_part
[j
].cur_loc
, vars
))
7034 && GET_CODE (loc
[n_var_parts
]) == GET_CODE (loc2
))
7038 if (REG_P (loc
[n_var_parts
])
7039 && hard_regno_nregs
[REGNO (loc
[n_var_parts
])][mode
] * 2
7040 == hard_regno_nregs
[REGNO (loc
[n_var_parts
])][wider_mode
]
7041 && end_hard_regno (mode
, REGNO (loc
[n_var_parts
]))
7044 if (! WORDS_BIG_ENDIAN
&& ! BYTES_BIG_ENDIAN
)
7045 new_loc
= simplify_subreg (wider_mode
, loc
[n_var_parts
],
7047 else if (WORDS_BIG_ENDIAN
&& BYTES_BIG_ENDIAN
)
7048 new_loc
= simplify_subreg (wider_mode
, loc2
, mode
, 0);
7051 if (!REG_P (new_loc
)
7052 || REGNO (new_loc
) != REGNO (loc
[n_var_parts
]))
7055 REG_ATTRS (new_loc
) = REG_ATTRS (loc
[n_var_parts
]);
7058 else if (MEM_P (loc
[n_var_parts
])
7059 && GET_CODE (XEXP (loc2
, 0)) == PLUS
7060 && REG_P (XEXP (XEXP (loc2
, 0), 0))
7061 && CONST_INT_P (XEXP (XEXP (loc2
, 0), 1)))
7063 if ((REG_P (XEXP (loc
[n_var_parts
], 0))
7064 && rtx_equal_p (XEXP (loc
[n_var_parts
], 0),
7065 XEXP (XEXP (loc2
, 0), 0))
7066 && INTVAL (XEXP (XEXP (loc2
, 0), 1))
7067 == GET_MODE_SIZE (mode
))
7068 || (GET_CODE (XEXP (loc
[n_var_parts
], 0)) == PLUS
7069 && CONST_INT_P (XEXP (XEXP (loc
[n_var_parts
], 0), 1))
7070 && rtx_equal_p (XEXP (XEXP (loc
[n_var_parts
], 0), 0),
7071 XEXP (XEXP (loc2
, 0), 0))
7072 && INTVAL (XEXP (XEXP (loc
[n_var_parts
], 0), 1))
7073 + GET_MODE_SIZE (mode
)
7074 == INTVAL (XEXP (XEXP (loc2
, 0), 1))))
7075 new_loc
= adjust_address_nv (loc
[n_var_parts
],
7081 loc
[n_var_parts
] = new_loc
;
7083 last_limit
= offsets
[n_var_parts
] + GET_MODE_SIZE (mode
);
7089 type_size_unit
= TYPE_SIZE_UNIT (TREE_TYPE (decl
));
7090 if ((unsigned HOST_WIDE_INT
) last_limit
< TREE_INT_CST_LOW (type_size_unit
))
7093 if (! flag_var_tracking_uninit
)
7094 initialized
= VAR_INIT_STATUS_INITIALIZED
;
7098 note_vl
= gen_rtx_VAR_LOCATION (VOIDmode
, decl
, NULL_RTX
,
7100 else if (n_var_parts
== 1)
7103 = gen_rtx_EXPR_LIST (VOIDmode
, loc
[0], GEN_INT (offsets
[0]));
7105 note_vl
= gen_rtx_VAR_LOCATION (VOIDmode
, decl
, expr_list
,
7108 else if (n_var_parts
)
7112 for (i
= 0; i
< n_var_parts
; i
++)
7114 = gen_rtx_EXPR_LIST (VOIDmode
, loc
[i
], GEN_INT (offsets
[i
]));
7116 parallel
= gen_rtx_PARALLEL (VOIDmode
,
7117 gen_rtvec_v (n_var_parts
, loc
));
7118 note_vl
= gen_rtx_VAR_LOCATION (VOIDmode
, decl
,
7119 parallel
, (int) initialized
);
7122 #ifdef ENABLE_RTL_CHECKING
7125 void **note_slot
= pointer_map_insert (emitted_notes
, decl
);
7126 rtx pnote
= (rtx
) *note_slot
;
7127 if (!var
->cur_loc_changed
&& (pnote
|| PAT_VAR_LOCATION_LOC (note_vl
)))
7130 gcc_assert (rtx_equal_p (PAT_VAR_LOCATION_LOC (pnote
),
7131 PAT_VAR_LOCATION_LOC (note_vl
)));
7133 *note_slot
= (void *) note_vl
;
7135 if (!var
->cur_loc_changed
)
7139 if (where
!= EMIT_NOTE_BEFORE_INSN
)
7141 note
= emit_note_after (NOTE_INSN_VAR_LOCATION
, insn
);
7142 if (where
== EMIT_NOTE_AFTER_CALL_INSN
)
7143 NOTE_DURING_CALL_P (note
) = true;
7146 note
= emit_note_before (NOTE_INSN_VAR_LOCATION
, insn
);
7147 NOTE_VAR_LOCATION (note
) = note_vl
;
7150 set_dv_changed (var
->dv
, false);
7151 var
->cur_loc_changed
= false;
7152 gcc_assert (var
->in_changed_variables
);
7153 var
->in_changed_variables
= false;
7154 htab_clear_slot (changed_variables
, varp
);
7156 /* Continue traversing the hash table. */
7159 value_or_debug_decl
:
7160 if (dv_changed_p (var
->dv
) && var
->n_var_parts
)
7163 bool cur_loc_changed
;
7165 if (var
->var_part
[0].cur_loc
7166 && vt_expand_loc_dummy (var
->var_part
[0].cur_loc
, vars
,
7169 for (lc
= var
->var_part
[0].loc_chain
; lc
; lc
= lc
->next
)
7170 if (lc
->loc
!= var
->var_part
[0].cur_loc
7171 && vt_expand_loc_dummy (lc
->loc
, vars
, &cur_loc_changed
))
7173 var
->var_part
[0].cur_loc
= lc
? lc
->loc
: NULL_RTX
;
7178 DEF_VEC_P (variable
);
7179 DEF_VEC_ALLOC_P (variable
, heap
);
7181 /* Stack of variable_def pointers that need processing with
7182 check_changed_vars_2. */
7184 static VEC (variable
, heap
) *changed_variables_stack
;
7186 /* VALUEs with no variables that need set_dv_changed (val, false)
7187 called before check_changed_vars_3. */
7189 static VEC (rtx
, heap
) *changed_values_stack
;
7191 /* Helper function for check_changed_vars_1 and check_changed_vars_2. */
7194 check_changed_vars_0 (decl_or_value dv
, htab_t htab
)
7197 = (value_chain
) htab_find_with_hash (value_chains
, dv
, dv_htab_hash (dv
));
7201 for (vc
= vc
->next
; vc
; vc
= vc
->next
)
7202 if (!dv_changed_p (vc
->dv
))
7205 = (variable
) htab_find_with_hash (htab
, vc
->dv
,
7206 dv_htab_hash (vc
->dv
));
7209 set_dv_changed (vc
->dv
, true);
7210 VEC_safe_push (variable
, heap
, changed_variables_stack
, vcvar
);
7212 else if (dv_is_value_p (vc
->dv
))
7214 set_dv_changed (vc
->dv
, true);
7215 VEC_safe_push (rtx
, heap
, changed_values_stack
,
7216 dv_as_value (vc
->dv
));
7217 check_changed_vars_0 (vc
->dv
, htab
);
7222 /* Populate changed_variables_stack with variable_def pointers
7223 that need variable_was_changed called on them. */
7226 check_changed_vars_1 (void **slot
, void *data
)
7228 variable var
= (variable
) *slot
;
7229 htab_t htab
= (htab_t
) data
;
7231 if (dv_is_value_p (var
->dv
)
7232 || TREE_CODE (dv_as_decl (var
->dv
)) == DEBUG_EXPR_DECL
)
7233 check_changed_vars_0 (var
->dv
, htab
);
7237 /* Add VAR to changed_variables and also for VALUEs add recursively
7238 all DVs that aren't in changed_variables yet but reference the
7239 VALUE from its loc_chain. */
7242 check_changed_vars_2 (variable var
, htab_t htab
)
7244 variable_was_changed (var
, NULL
);
7245 if (dv_is_value_p (var
->dv
)
7246 || TREE_CODE (dv_as_decl (var
->dv
)) == DEBUG_EXPR_DECL
)
7247 check_changed_vars_0 (var
->dv
, htab
);
7250 /* For each changed decl (except DEBUG_EXPR_DECLs) recompute
7251 cur_loc if needed (and cur_loc of all VALUEs and DEBUG_EXPR_DECLs
7252 it needs and are also in changed variables) and track whether
7253 cur_loc (or anything it uses to compute location) had to change
7254 during the current emit_notes_for_changes call. */
7257 check_changed_vars_3 (void **slot
, void *data
)
7259 variable var
= (variable
) *slot
;
7260 htab_t vars
= (htab_t
) data
;
7263 bool cur_loc_changed
;
7265 if (dv_is_value_p (var
->dv
)
7266 || TREE_CODE (dv_as_decl (var
->dv
)) == DEBUG_EXPR_DECL
)
7269 for (i
= 0; i
< var
->n_var_parts
; i
++)
7271 if (var
->var_part
[i
].cur_loc
7272 && vt_expand_loc_dummy (var
->var_part
[i
].cur_loc
, vars
,
7275 if (cur_loc_changed
)
7276 var
->cur_loc_changed
= true;
7279 for (lc
= var
->var_part
[i
].loc_chain
; lc
; lc
= lc
->next
)
7280 if (lc
->loc
!= var
->var_part
[i
].cur_loc
7281 && vt_expand_loc_dummy (lc
->loc
, vars
, &cur_loc_changed
))
7283 if (lc
|| var
->var_part
[i
].cur_loc
)
7284 var
->cur_loc_changed
= true;
7285 var
->var_part
[i
].cur_loc
= lc
? lc
->loc
: NULL_RTX
;
7287 if (var
->n_var_parts
== 0)
7288 var
->cur_loc_changed
= true;
7292 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
7293 CHANGED_VARIABLES and delete this chain. WHERE specifies whether the notes
7294 shall be emitted before of after instruction INSN. */
7297 emit_notes_for_changes (rtx insn
, enum emit_note_where where
,
7300 emit_note_data data
;
7301 htab_t htab
= shared_hash_htab (vars
);
7303 if (!htab_elements (changed_variables
))
7306 if (MAY_HAVE_DEBUG_INSNS
)
7308 /* Unfortunately this has to be done in two steps, because
7309 we can't traverse a hashtab into which we are inserting
7310 through variable_was_changed. */
7311 htab_traverse (changed_variables
, check_changed_vars_1
, htab
);
7312 while (VEC_length (variable
, changed_variables_stack
) > 0)
7313 check_changed_vars_2 (VEC_pop (variable
, changed_variables_stack
),
7315 while (VEC_length (rtx
, changed_values_stack
) > 0)
7316 set_dv_changed (dv_from_value (VEC_pop (rtx
, changed_values_stack
)),
7318 htab_traverse (changed_variables
, check_changed_vars_3
, htab
);
7325 htab_traverse (changed_variables
, emit_note_insn_var_location
, &data
);
7328 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
7329 same variable in hash table DATA or is not there at all. */
7332 emit_notes_for_differences_1 (void **slot
, void *data
)
7334 htab_t new_vars
= (htab_t
) data
;
7335 variable old_var
, new_var
;
7337 old_var
= (variable
) *slot
;
7338 new_var
= (variable
) htab_find_with_hash (new_vars
, old_var
->dv
,
7339 dv_htab_hash (old_var
->dv
));
7343 /* Variable has disappeared. */
7346 empty_var
= (variable
) pool_alloc (dv_pool (old_var
->dv
));
7347 empty_var
->dv
= old_var
->dv
;
7348 empty_var
->refcount
= 0;
7349 empty_var
->n_var_parts
= 0;
7350 empty_var
->cur_loc_changed
= false;
7351 empty_var
->in_changed_variables
= false;
7352 if (dv_onepart_p (old_var
->dv
))
7356 gcc_assert (old_var
->n_var_parts
== 1);
7357 for (lc
= old_var
->var_part
[0].loc_chain
; lc
; lc
= lc
->next
)
7358 remove_value_chains (old_var
->dv
, lc
->loc
);
7360 variable_was_changed (empty_var
, NULL
);
7361 /* Continue traversing the hash table. */
7364 if (variable_different_p (old_var
, new_var
))
7366 if (dv_onepart_p (old_var
->dv
))
7368 location_chain lc1
, lc2
;
7370 gcc_assert (old_var
->n_var_parts
== 1);
7371 gcc_assert (new_var
->n_var_parts
== 1);
7372 lc1
= old_var
->var_part
[0].loc_chain
;
7373 lc2
= new_var
->var_part
[0].loc_chain
;
7376 && ((REG_P (lc1
->loc
) && REG_P (lc2
->loc
))
7377 || rtx_equal_p (lc1
->loc
, lc2
->loc
)))
7382 for (; lc2
; lc2
= lc2
->next
)
7383 add_value_chains (old_var
->dv
, lc2
->loc
);
7384 for (; lc1
; lc1
= lc1
->next
)
7385 remove_value_chains (old_var
->dv
, lc1
->loc
);
7387 variable_was_changed (new_var
, NULL
);
7389 /* Update cur_loc. */
7390 if (old_var
!= new_var
)
7393 for (i
= 0; i
< new_var
->n_var_parts
; i
++)
7395 new_var
->var_part
[i
].cur_loc
= NULL
;
7396 if (old_var
->n_var_parts
!= new_var
->n_var_parts
7397 || old_var
->var_part
[i
].offset
!= new_var
->var_part
[i
].offset
)
7398 new_var
->cur_loc_changed
= true;
7399 else if (old_var
->var_part
[i
].cur_loc
!= NULL
)
7402 rtx cur_loc
= old_var
->var_part
[i
].cur_loc
;
7404 for (lc
= new_var
->var_part
[i
].loc_chain
; lc
; lc
= lc
->next
)
7405 if (lc
->loc
== cur_loc
7406 || rtx_equal_p (cur_loc
, lc
->loc
))
7408 new_var
->var_part
[i
].cur_loc
= lc
->loc
;
7412 new_var
->cur_loc_changed
= true;
7417 /* Continue traversing the hash table. */
7421 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
7425 emit_notes_for_differences_2 (void **slot
, void *data
)
7427 htab_t old_vars
= (htab_t
) data
;
7428 variable old_var
, new_var
;
7430 new_var
= (variable
) *slot
;
7431 old_var
= (variable
) htab_find_with_hash (old_vars
, new_var
->dv
,
7432 dv_htab_hash (new_var
->dv
));
7436 /* Variable has appeared. */
7437 if (dv_onepart_p (new_var
->dv
))
7441 gcc_assert (new_var
->n_var_parts
== 1);
7442 for (lc
= new_var
->var_part
[0].loc_chain
; lc
; lc
= lc
->next
)
7443 add_value_chains (new_var
->dv
, lc
->loc
);
7445 for (i
= 0; i
< new_var
->n_var_parts
; i
++)
7446 new_var
->var_part
[i
].cur_loc
= NULL
;
7447 variable_was_changed (new_var
, NULL
);
7450 /* Continue traversing the hash table. */
7454 /* Emit notes before INSN for differences between dataflow sets OLD_SET and
7458 emit_notes_for_differences (rtx insn
, dataflow_set
*old_set
,
7459 dataflow_set
*new_set
)
7461 htab_traverse (shared_hash_htab (old_set
->vars
),
7462 emit_notes_for_differences_1
,
7463 shared_hash_htab (new_set
->vars
));
7464 htab_traverse (shared_hash_htab (new_set
->vars
),
7465 emit_notes_for_differences_2
,
7466 shared_hash_htab (old_set
->vars
));
7467 emit_notes_for_changes (insn
, EMIT_NOTE_BEFORE_INSN
, new_set
->vars
);
7470 /* Emit the notes for changes of location parts in the basic block BB. */
7473 emit_notes_in_bb (basic_block bb
, dataflow_set
*set
)
7476 micro_operation
*mo
;
7478 dataflow_set_clear (set
);
7479 dataflow_set_copy (set
, &VTI (bb
)->in
);
7481 for (i
= 0; VEC_iterate (micro_operation
, VTI (bb
)->mos
, i
, mo
); i
++)
7483 rtx insn
= mo
->insn
;
7488 dataflow_set_clear_at_call (set
);
7489 emit_notes_for_changes (insn
, EMIT_NOTE_AFTER_CALL_INSN
, set
->vars
);
7494 rtx loc
= mo
->u
.loc
;
7497 var_reg_set (set
, loc
, VAR_INIT_STATUS_UNINITIALIZED
, NULL
);
7499 var_mem_set (set
, loc
, VAR_INIT_STATUS_UNINITIALIZED
, NULL
);
7501 emit_notes_for_changes (insn
, EMIT_NOTE_AFTER_INSN
, set
->vars
);
7507 rtx loc
= mo
->u
.loc
;
7511 if (GET_CODE (loc
) == CONCAT
)
7513 val
= XEXP (loc
, 0);
7514 vloc
= XEXP (loc
, 1);
7522 var
= PAT_VAR_LOCATION_DECL (vloc
);
7524 clobber_variable_part (set
, NULL_RTX
,
7525 dv_from_decl (var
), 0, NULL_RTX
);
7528 if (VAL_NEEDS_RESOLUTION (loc
))
7529 val_resolve (set
, val
, PAT_VAR_LOCATION_LOC (vloc
), insn
);
7530 set_variable_part (set
, val
, dv_from_decl (var
), 0,
7531 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
,
7534 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc
)))
7535 set_variable_part (set
, PAT_VAR_LOCATION_LOC (vloc
),
7536 dv_from_decl (var
), 0,
7537 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
,
7540 emit_notes_for_changes (insn
, EMIT_NOTE_AFTER_INSN
, set
->vars
);
7546 rtx loc
= mo
->u
.loc
;
7547 rtx val
, vloc
, uloc
;
7549 vloc
= uloc
= XEXP (loc
, 1);
7550 val
= XEXP (loc
, 0);
7552 if (GET_CODE (val
) == CONCAT
)
7554 uloc
= XEXP (val
, 1);
7555 val
= XEXP (val
, 0);
7558 if (VAL_NEEDS_RESOLUTION (loc
))
7559 val_resolve (set
, val
, vloc
, insn
);
7561 val_store (set
, val
, uloc
, insn
, false);
7563 if (VAL_HOLDS_TRACK_EXPR (loc
))
7565 if (GET_CODE (uloc
) == REG
)
7566 var_reg_set (set
, uloc
, VAR_INIT_STATUS_UNINITIALIZED
,
7568 else if (GET_CODE (uloc
) == MEM
)
7569 var_mem_set (set
, uloc
, VAR_INIT_STATUS_UNINITIALIZED
,
7573 emit_notes_for_changes (insn
, EMIT_NOTE_BEFORE_INSN
, set
->vars
);
7579 rtx loc
= mo
->u
.loc
;
7580 rtx val
, vloc
, uloc
, reverse
= NULL_RTX
;
7583 if (VAL_EXPR_HAS_REVERSE (loc
))
7585 reverse
= XEXP (loc
, 1);
7586 vloc
= XEXP (loc
, 0);
7588 uloc
= XEXP (vloc
, 1);
7589 val
= XEXP (vloc
, 0);
7592 if (GET_CODE (val
) == CONCAT
)
7594 vloc
= XEXP (val
, 1);
7595 val
= XEXP (val
, 0);
7598 if (GET_CODE (vloc
) == SET
)
7600 rtx vsrc
= SET_SRC (vloc
);
7602 gcc_assert (val
!= vsrc
);
7603 gcc_assert (vloc
== uloc
|| VAL_NEEDS_RESOLUTION (loc
));
7605 vloc
= SET_DEST (vloc
);
7607 if (VAL_NEEDS_RESOLUTION (loc
))
7608 val_resolve (set
, val
, vsrc
, insn
);
7610 else if (VAL_NEEDS_RESOLUTION (loc
))
7612 gcc_assert (GET_CODE (uloc
) == SET
7613 && GET_CODE (SET_SRC (uloc
)) == REG
);
7614 val_resolve (set
, val
, SET_SRC (uloc
), insn
);
7617 if (VAL_HOLDS_TRACK_EXPR (loc
))
7619 if (VAL_EXPR_IS_CLOBBERED (loc
))
7622 var_reg_delete (set
, uloc
, true);
7623 else if (MEM_P (uloc
))
7624 var_mem_delete (set
, uloc
, true);
7628 bool copied_p
= VAL_EXPR_IS_COPIED (loc
);
7630 enum var_init_status status
= VAR_INIT_STATUS_INITIALIZED
;
7632 if (GET_CODE (uloc
) == SET
)
7634 set_src
= SET_SRC (uloc
);
7635 uloc
= SET_DEST (uloc
);
7640 status
= find_src_status (set
, set_src
);
7642 set_src
= find_src_set_src (set
, set_src
);
7646 var_reg_delete_and_set (set
, uloc
, !copied_p
,
7648 else if (MEM_P (uloc
))
7649 var_mem_delete_and_set (set
, uloc
, !copied_p
,
7653 else if (REG_P (uloc
))
7654 var_regno_delete (set
, REGNO (uloc
));
7656 val_store (set
, val
, vloc
, insn
, true);
7659 val_store (set
, XEXP (reverse
, 0), XEXP (reverse
, 1),
7662 emit_notes_for_changes (NEXT_INSN (insn
), EMIT_NOTE_BEFORE_INSN
,
7669 rtx loc
= mo
->u
.loc
;
7672 if (GET_CODE (loc
) == SET
)
7674 set_src
= SET_SRC (loc
);
7675 loc
= SET_DEST (loc
);
7679 var_reg_delete_and_set (set
, loc
, true, VAR_INIT_STATUS_INITIALIZED
,
7682 var_mem_delete_and_set (set
, loc
, true, VAR_INIT_STATUS_INITIALIZED
,
7685 emit_notes_for_changes (NEXT_INSN (insn
), EMIT_NOTE_BEFORE_INSN
,
7692 rtx loc
= mo
->u
.loc
;
7693 enum var_init_status src_status
;
7696 if (GET_CODE (loc
) == SET
)
7698 set_src
= SET_SRC (loc
);
7699 loc
= SET_DEST (loc
);
7702 src_status
= find_src_status (set
, set_src
);
7703 set_src
= find_src_set_src (set
, set_src
);
7706 var_reg_delete_and_set (set
, loc
, false, src_status
, set_src
);
7708 var_mem_delete_and_set (set
, loc
, false, src_status
, set_src
);
7710 emit_notes_for_changes (NEXT_INSN (insn
), EMIT_NOTE_BEFORE_INSN
,
7717 rtx loc
= mo
->u
.loc
;
7720 var_reg_delete (set
, loc
, false);
7722 var_mem_delete (set
, loc
, false);
7724 emit_notes_for_changes (insn
, EMIT_NOTE_AFTER_INSN
, set
->vars
);
7730 rtx loc
= mo
->u
.loc
;
7733 var_reg_delete (set
, loc
, true);
7735 var_mem_delete (set
, loc
, true);
7737 emit_notes_for_changes (NEXT_INSN (insn
), EMIT_NOTE_BEFORE_INSN
,
7743 set
->stack_adjust
+= mo
->u
.adjust
;
7749 /* Emit notes for the whole function. */
7752 vt_emit_notes (void)
7757 #ifdef ENABLE_RTL_CHECKING
7758 emitted_notes
= pointer_map_create ();
7760 gcc_assert (!htab_elements (changed_variables
));
7762 /* Free memory occupied by the out hash tables, as they aren't used
7765 dataflow_set_clear (&VTI (bb
)->out
);
7767 /* Enable emitting notes by functions (mainly by set_variable_part and
7768 delete_variable_part). */
7771 if (MAY_HAVE_DEBUG_INSNS
)
7776 for (i
= 0; VEC_iterate (rtx
, preserved_values
, i
, val
); i
++)
7777 add_cselib_value_chains (dv_from_value (val
));
7778 changed_variables_stack
= VEC_alloc (variable
, heap
, 40);
7779 changed_values_stack
= VEC_alloc (rtx
, heap
, 40);
7782 dataflow_set_init (&cur
);
7786 /* Emit the notes for changes of variable locations between two
7787 subsequent basic blocks. */
7788 emit_notes_for_differences (BB_HEAD (bb
), &cur
, &VTI (bb
)->in
);
7790 /* Emit the notes for the changes in the basic block itself. */
7791 emit_notes_in_bb (bb
, &cur
);
7793 /* Free memory occupied by the in hash table, we won't need it
7795 dataflow_set_clear (&VTI (bb
)->in
);
7797 #ifdef ENABLE_CHECKING
7798 htab_traverse (shared_hash_htab (cur
.vars
),
7799 emit_notes_for_differences_1
,
7800 shared_hash_htab (empty_shared_hash
));
7801 if (MAY_HAVE_DEBUG_INSNS
)
7806 for (i
= 0; VEC_iterate (rtx
, preserved_values
, i
, val
); i
++)
7807 remove_cselib_value_chains (dv_from_value (val
));
7808 gcc_assert (htab_elements (value_chains
) == 0);
7811 dataflow_set_destroy (&cur
);
7813 if (MAY_HAVE_DEBUG_INSNS
)
7815 VEC_free (variable
, heap
, changed_variables_stack
);
7816 VEC_free (rtx
, heap
, changed_values_stack
);
7819 #ifdef ENABLE_RTL_CHECKING
7820 pointer_map_destroy (emitted_notes
);
7825 /* If there is a declaration and offset associated with register/memory RTL
7826 assign declaration to *DECLP and offset to *OFFSETP, and return true. */
7829 vt_get_decl_and_offset (rtx rtl
, tree
*declp
, HOST_WIDE_INT
*offsetp
)
7833 if (REG_ATTRS (rtl
))
7835 *declp
= REG_EXPR (rtl
);
7836 *offsetp
= REG_OFFSET (rtl
);
7840 else if (MEM_P (rtl
))
7842 if (MEM_ATTRS (rtl
))
7844 *declp
= MEM_EXPR (rtl
);
7845 *offsetp
= INT_MEM_OFFSET (rtl
);
7852 /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
7855 vt_add_function_parameters (void)
7859 for (parm
= DECL_ARGUMENTS (current_function_decl
);
7860 parm
; parm
= TREE_CHAIN (parm
))
7862 rtx decl_rtl
= DECL_RTL_IF_SET (parm
);
7863 rtx incoming
= DECL_INCOMING_RTL (parm
);
7865 enum machine_mode mode
;
7866 HOST_WIDE_INT offset
;
7870 if (TREE_CODE (parm
) != PARM_DECL
)
7873 if (!DECL_NAME (parm
))
7876 if (!decl_rtl
|| !incoming
)
7879 if (GET_MODE (decl_rtl
) == BLKmode
|| GET_MODE (incoming
) == BLKmode
)
7882 if (!vt_get_decl_and_offset (incoming
, &decl
, &offset
))
7884 if (REG_P (incoming
) || MEM_P (incoming
))
7886 /* This means argument is passed by invisible reference. */
7889 incoming
= gen_rtx_MEM (GET_MODE (decl_rtl
), incoming
);
7893 if (!vt_get_decl_and_offset (decl_rtl
, &decl
, &offset
))
7895 offset
+= byte_lowpart_offset (GET_MODE (incoming
),
7896 GET_MODE (decl_rtl
));
7905 /* Assume that DECL_RTL was a pseudo that got spilled to
7906 memory. The spill slot sharing code will force the
7907 memory to reference spill_slot_decl (%sfp), so we don't
7908 match above. That's ok, the pseudo must have referenced
7909 the entire parameter, so just reset OFFSET. */
7910 gcc_assert (decl
== get_spill_slot_decl (false));
7914 if (!track_loc_p (incoming
, parm
, offset
, false, &mode
, &offset
))
7917 out
= &VTI (ENTRY_BLOCK_PTR
)->out
;
7919 dv
= dv_from_decl (parm
);
7921 if (target_for_debug_bind (parm
)
7922 /* We can't deal with these right now, because this kind of
7923 variable is single-part. ??? We could handle parallels
7924 that describe multiple locations for the same single
7925 value, but ATM we don't. */
7926 && GET_CODE (incoming
) != PARALLEL
)
7930 /* ??? We shouldn't ever hit this, but it may happen because
7931 arguments passed by invisible reference aren't dealt with
7932 above: incoming-rtl will have Pmode rather than the
7933 expected mode for the type. */
7937 val
= cselib_lookup (var_lowpart (mode
, incoming
), mode
, true);
7939 /* ??? Float-typed values in memory are not handled by
7943 preserve_value (val
);
7944 set_variable_part (out
, val
->val_rtx
, dv
, offset
,
7945 VAR_INIT_STATUS_INITIALIZED
, NULL
, INSERT
);
7946 dv
= dv_from_value (val
->val_rtx
);
7950 if (REG_P (incoming
))
7952 incoming
= var_lowpart (mode
, incoming
);
7953 gcc_assert (REGNO (incoming
) < FIRST_PSEUDO_REGISTER
);
7954 attrs_list_insert (&out
->regs
[REGNO (incoming
)], dv
, offset
,
7956 set_variable_part (out
, incoming
, dv
, offset
,
7957 VAR_INIT_STATUS_INITIALIZED
, NULL
, INSERT
);
7959 else if (MEM_P (incoming
))
7961 incoming
= var_lowpart (mode
, incoming
);
7962 set_variable_part (out
, incoming
, dv
, offset
,
7963 VAR_INIT_STATUS_INITIALIZED
, NULL
, INSERT
);
7967 if (MAY_HAVE_DEBUG_INSNS
)
7969 cselib_preserve_only_values ();
7970 cselib_reset_table (cselib_get_next_uid ());
7975 /* Return true if INSN in the prologue initializes hard_frame_pointer_rtx. */
7978 fp_setter (rtx insn
)
7980 rtx pat
= PATTERN (insn
);
7981 if (RTX_FRAME_RELATED_P (insn
))
7983 rtx expr
= find_reg_note (insn
, REG_FRAME_RELATED_EXPR
, NULL_RTX
);
7985 pat
= XEXP (expr
, 0);
7987 if (GET_CODE (pat
) == SET
)
7988 return SET_DEST (pat
) == hard_frame_pointer_rtx
;
7989 else if (GET_CODE (pat
) == PARALLEL
)
7992 for (i
= XVECLEN (pat
, 0) - 1; i
>= 0; i
--)
7993 if (GET_CODE (XVECEXP (pat
, 0, i
)) == SET
7994 && SET_DEST (XVECEXP (pat
, 0, i
)) == hard_frame_pointer_rtx
)
8000 /* Initialize cfa_base_rtx, create a preserved VALUE for it and
8001 ensure it isn't flushed during cselib_reset_table.
8002 Can be called only if frame_pointer_rtx resp. arg_pointer_rtx
8003 has been eliminated. */
8006 vt_init_cfa_base (void)
8010 #ifdef FRAME_POINTER_CFA_OFFSET
8011 cfa_base_rtx
= frame_pointer_rtx
;
8013 cfa_base_rtx
= arg_pointer_rtx
;
8015 if (cfa_base_rtx
== hard_frame_pointer_rtx
8016 || !fixed_regs
[REGNO (cfa_base_rtx
)])
8018 cfa_base_rtx
= NULL_RTX
;
8021 if (!MAY_HAVE_DEBUG_INSNS
)
8024 val
= cselib_lookup (cfa_base_rtx
, GET_MODE (cfa_base_rtx
), 1);
8025 preserve_value (val
);
8026 cselib_preserve_cfa_base_value (val
);
8027 val
->locs
->setting_insn
= get_insns ();
8028 var_reg_decl_set (&VTI (ENTRY_BLOCK_PTR
)->out
, cfa_base_rtx
,
8029 VAR_INIT_STATUS_INITIALIZED
, dv_from_value (val
->val_rtx
),
8030 0, NULL_RTX
, INSERT
);
8033 /* Allocate and initialize the data structures for variable tracking
8034 and parse the RTL to get the micro operations. */
8037 vt_initialize (void)
8039 basic_block bb
, prologue_bb
= NULL
;
8040 HOST_WIDE_INT fp_cfa_offset
= -1;
8042 alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def
));
8044 attrs_pool
= create_alloc_pool ("attrs_def pool",
8045 sizeof (struct attrs_def
), 1024);
8046 var_pool
= create_alloc_pool ("variable_def pool",
8047 sizeof (struct variable_def
)
8048 + (MAX_VAR_PARTS
- 1)
8049 * sizeof (((variable
)NULL
)->var_part
[0]), 64);
8050 loc_chain_pool
= create_alloc_pool ("location_chain_def pool",
8051 sizeof (struct location_chain_def
),
8053 shared_hash_pool
= create_alloc_pool ("shared_hash_def pool",
8054 sizeof (struct shared_hash_def
), 256);
8055 empty_shared_hash
= (shared_hash
) pool_alloc (shared_hash_pool
);
8056 empty_shared_hash
->refcount
= 1;
8057 empty_shared_hash
->htab
8058 = htab_create (1, variable_htab_hash
, variable_htab_eq
,
8059 variable_htab_free
);
8060 changed_variables
= htab_create (10, variable_htab_hash
, variable_htab_eq
,
8061 variable_htab_free
);
8062 if (MAY_HAVE_DEBUG_INSNS
)
8064 value_chain_pool
= create_alloc_pool ("value_chain_def pool",
8065 sizeof (struct value_chain_def
),
8067 value_chains
= htab_create (32, value_chain_htab_hash
,
8068 value_chain_htab_eq
, NULL
);
8071 /* Init the IN and OUT sets. */
8074 VTI (bb
)->visited
= false;
8075 VTI (bb
)->flooded
= false;
8076 dataflow_set_init (&VTI (bb
)->in
);
8077 dataflow_set_init (&VTI (bb
)->out
);
8078 VTI (bb
)->permp
= NULL
;
8081 if (MAY_HAVE_DEBUG_INSNS
)
8083 cselib_init (CSELIB_RECORD_MEMORY
| CSELIB_PRESERVE_CONSTANTS
);
8084 scratch_regs
= BITMAP_ALLOC (NULL
);
8085 valvar_pool
= create_alloc_pool ("small variable_def pool",
8086 sizeof (struct variable_def
), 256);
8087 preserved_values
= VEC_alloc (rtx
, heap
, 256);
8091 scratch_regs
= NULL
;
8095 if (!frame_pointer_needed
)
8099 if (!vt_stack_adjustments ())
8102 #ifdef FRAME_POINTER_CFA_OFFSET
8103 reg
= frame_pointer_rtx
;
8105 reg
= arg_pointer_rtx
;
8107 elim
= eliminate_regs (reg
, VOIDmode
, NULL_RTX
);
8110 if (GET_CODE (elim
) == PLUS
)
8111 elim
= XEXP (elim
, 0);
8112 if (elim
== stack_pointer_rtx
)
8113 vt_init_cfa_base ();
8116 else if (!crtl
->stack_realign_tried
)
8120 #ifdef FRAME_POINTER_CFA_OFFSET
8121 reg
= frame_pointer_rtx
;
8122 fp_cfa_offset
= FRAME_POINTER_CFA_OFFSET (current_function_decl
);
8124 reg
= arg_pointer_rtx
;
8125 fp_cfa_offset
= ARG_POINTER_CFA_OFFSET (current_function_decl
);
8127 elim
= eliminate_regs (reg
, VOIDmode
, NULL_RTX
);
8130 if (GET_CODE (elim
) == PLUS
)
8132 fp_cfa_offset
-= INTVAL (XEXP (elim
, 1));
8133 elim
= XEXP (elim
, 0);
8135 if (elim
!= hard_frame_pointer_rtx
)
8138 prologue_bb
= single_succ (ENTRY_BLOCK_PTR
);
8142 hard_frame_pointer_adjustment
= -1;
8147 HOST_WIDE_INT pre
, post
= 0;
8148 basic_block first_bb
, last_bb
;
8150 if (MAY_HAVE_DEBUG_INSNS
)
8152 cselib_record_sets_hook
= add_with_sets
;
8153 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
8154 fprintf (dump_file
, "first value: %i\n",
8155 cselib_get_next_uid ());
8162 if (bb
->next_bb
== EXIT_BLOCK_PTR
8163 || ! single_pred_p (bb
->next_bb
))
8165 e
= find_edge (bb
, bb
->next_bb
);
8166 if (! e
|| (e
->flags
& EDGE_FALLTHRU
) == 0)
8172 /* Add the micro-operations to the vector. */
8173 FOR_BB_BETWEEN (bb
, first_bb
, last_bb
->next_bb
, next_bb
)
8175 HOST_WIDE_INT offset
= VTI (bb
)->out
.stack_adjust
;
8176 VTI (bb
)->out
.stack_adjust
= VTI (bb
)->in
.stack_adjust
;
8177 for (insn
= BB_HEAD (bb
); insn
!= NEXT_INSN (BB_END (bb
));
8178 insn
= NEXT_INSN (insn
))
8182 if (!frame_pointer_needed
)
8184 insn_stack_adjust_offset_pre_post (insn
, &pre
, &post
);
8188 mo
.type
= MO_ADJUST
;
8191 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
8192 log_op_type (PATTERN (insn
), bb
, insn
,
8193 MO_ADJUST
, dump_file
);
8194 VEC_safe_push (micro_operation
, heap
, VTI (bb
)->mos
,
8196 VTI (bb
)->out
.stack_adjust
+= pre
;
8200 cselib_hook_called
= false;
8201 adjust_insn (bb
, insn
);
8202 if (MAY_HAVE_DEBUG_INSNS
)
8204 cselib_process_insn (insn
);
8205 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
8207 print_rtl_single (dump_file
, insn
);
8208 dump_cselib_table (dump_file
);
8211 if (!cselib_hook_called
)
8212 add_with_sets (insn
, 0, 0);
8215 if (!frame_pointer_needed
&& post
)
8218 mo
.type
= MO_ADJUST
;
8221 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
8222 log_op_type (PATTERN (insn
), bb
, insn
,
8223 MO_ADJUST
, dump_file
);
8224 VEC_safe_push (micro_operation
, heap
, VTI (bb
)->mos
,
8226 VTI (bb
)->out
.stack_adjust
+= post
;
8229 if (bb
== prologue_bb
8230 && hard_frame_pointer_adjustment
== -1
8231 && RTX_FRAME_RELATED_P (insn
)
8232 && fp_setter (insn
))
8234 vt_init_cfa_base ();
8235 hard_frame_pointer_adjustment
= fp_cfa_offset
;
8239 gcc_assert (offset
== VTI (bb
)->out
.stack_adjust
);
8244 if (MAY_HAVE_DEBUG_INSNS
)
8246 cselib_preserve_only_values ();
8247 cselib_reset_table (cselib_get_next_uid ());
8248 cselib_record_sets_hook
= NULL
;
8252 hard_frame_pointer_adjustment
= -1;
8253 VTI (ENTRY_BLOCK_PTR
)->flooded
= true;
8254 vt_add_function_parameters ();
8255 cfa_base_rtx
= NULL_RTX
;
8259 /* Get rid of all debug insns from the insn stream. */
8262 delete_debug_insns (void)
8267 if (!MAY_HAVE_DEBUG_INSNS
)
8272 FOR_BB_INSNS_SAFE (bb
, insn
, next
)
8273 if (DEBUG_INSN_P (insn
))
8278 /* Run a fast, BB-local only version of var tracking, to take care of
8279 information that we don't do global analysis on, such that not all
8280 information is lost. If SKIPPED holds, we're skipping the global
8281 pass entirely, so we should try to use information it would have
8282 handled as well.. */
8285 vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED
)
8287 /* ??? Just skip it all for now. */
8288 delete_debug_insns ();
8291 /* Free the data structures needed for variable tracking. */
8300 VEC_free (micro_operation
, heap
, VTI (bb
)->mos
);
8305 dataflow_set_destroy (&VTI (bb
)->in
);
8306 dataflow_set_destroy (&VTI (bb
)->out
);
8307 if (VTI (bb
)->permp
)
8309 dataflow_set_destroy (VTI (bb
)->permp
);
8310 XDELETE (VTI (bb
)->permp
);
8313 free_aux_for_blocks ();
8314 htab_delete (empty_shared_hash
->htab
);
8315 htab_delete (changed_variables
);
8316 free_alloc_pool (attrs_pool
);
8317 free_alloc_pool (var_pool
);
8318 free_alloc_pool (loc_chain_pool
);
8319 free_alloc_pool (shared_hash_pool
);
8321 if (MAY_HAVE_DEBUG_INSNS
)
8323 htab_delete (value_chains
);
8324 free_alloc_pool (value_chain_pool
);
8325 free_alloc_pool (valvar_pool
);
8326 VEC_free (rtx
, heap
, preserved_values
);
8328 BITMAP_FREE (scratch_regs
);
8329 scratch_regs
= NULL
;
8333 XDELETEVEC (vui_vec
);
8338 /* The entry point to variable tracking pass. */
8340 static inline unsigned int
8341 variable_tracking_main_1 (void)
8345 if (flag_var_tracking_assignments
< 0)
8347 delete_debug_insns ();
8351 if (n_basic_blocks
> 500 && n_edges
/ n_basic_blocks
>= 20)
8353 vt_debug_insns_local (true);
8357 mark_dfs_back_edges ();
8358 if (!vt_initialize ())
8361 vt_debug_insns_local (true);
8365 success
= vt_find_locations ();
8367 if (!success
&& flag_var_tracking_assignments
> 0)
8371 delete_debug_insns ();
8373 /* This is later restored by our caller. */
8374 flag_var_tracking_assignments
= 0;
8376 success
= vt_initialize ();
8377 gcc_assert (success
);
8379 success
= vt_find_locations ();
8385 vt_debug_insns_local (false);
8389 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
8391 dump_dataflow_sets ();
8392 dump_flow_info (dump_file
, dump_flags
);
8398 vt_debug_insns_local (false);
8403 variable_tracking_main (void)
8406 int save
= flag_var_tracking_assignments
;
8408 ret
= variable_tracking_main_1 ();
8410 flag_var_tracking_assignments
= save
;
8416 gate_handle_var_tracking (void)
8418 return (flag_var_tracking
);
8423 struct rtl_opt_pass pass_variable_tracking
=
8427 "vartrack", /* name */
8428 gate_handle_var_tracking
, /* gate */
8429 variable_tracking_main
, /* execute */
8432 0, /* static_pass_number */
8433 TV_VAR_TRACKING
, /* tv_id */
8434 0, /* properties_required */
8435 0, /* properties_provided */
8436 0, /* properties_destroyed */
8437 0, /* todo_flags_start */
8438 TODO_dump_func
| TODO_verify_rtl_sharing
/* todo_flags_finish */