1 /* Variable tracking routines for the GNU compiler.
2 Copyright (C) 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This file contains the variable tracking pass. It computes where
22 variables are located (which registers or where in memory) at each position
23 in instruction stream and emits notes describing the locations.
24 Debug information (DWARF2 location lists) is finally generated from
26 With this debug information, it is possible to show variables
27 even when debugging optimized code.
29 How does the variable tracking pass work?
31 First, it scans RTL code for uses, stores and clobbers (register/memory
32 references in instructions), for call insns and for stack adjustments
33 separately for each basic block and saves them to an array of micro
35 The micro operations of one instruction are ordered so that
36 pre-modifying stack adjustment < use < use with no var < call insn <
37 < set < clobber < post-modifying stack adjustment
39 Then, a forward dataflow analysis is performed to find out how locations
40 of variables change through code and to propagate the variable locations
41 along control flow graph.
42 The IN set for basic block BB is computed as a union of OUT sets of BB's
43 predecessors, the OUT set for BB is copied from the IN set for BB and
44 is changed according to micro operations in BB.
46 The IN and OUT sets for basic blocks consist of a current stack adjustment
47 (used for adjusting offset of variables addressed using stack pointer),
48 the table of structures describing the locations of parts of a variable
49 and for each physical register a linked list for each physical register.
50 The linked list is a list of variable parts stored in the register,
51 i.e. it is a list of triplets (reg, decl, offset) where decl is
52 REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
53 effective deleting appropriate variable parts when we set or clobber the
56 There may be more than one variable part in a register. The linked lists
57 should be pretty short so it is a good data structure here.
58 For example in the following code, register allocator may assign same
59 register to variables A and B, and both of them are stored in the same
72 Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
73 are emitted to appropriate positions in RTL code. Each such a note describes
74 the location of one variable at the point in instruction stream where the
75 note is. There is no need to emit a note for each variable before each
76 instruction, we only emit these notes where the location of variable changes
77 (this means that we also emit notes for changes between the OUT set of the
78 previous block and the IN set of the current block).
80 The notes consist of two parts:
81 1. the declaration (from REG_EXPR or MEM_EXPR)
82 2. the location of a variable - it is either a simple register/memory
83 reference (for simple variables, for example int),
84 or a parallel of register/memory references (for a large variables
85 which consist of several parts, for example long long).
91 #include "coretypes.h"
95 #include "hard-reg-set.h"
96 #include "basic-block.h"
99 #include "insn-config.h"
102 #include "alloc-pool.h"
108 #include "tree-pass.h"
109 #include "tree-flow.h"
114 #include "diagnostic.h"
115 #include "tree-pretty-print.h"
116 #include "pointer-set.h"
119 /* var-tracking.c assumes that tree code with the same value as VALUE rtx code
120 has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
121 Currently the value is the same as IDENTIFIER_NODE, which has such
122 a property. If this compile time assertion ever fails, make sure that
123 the new tree code that equals (int) VALUE has the same property. */
124 extern char check_value_val
[(int) VALUE
== (int) IDENTIFIER_NODE
? 1 : -1];
126 /* Type of micro operation. */
127 enum micro_operation_type
129 MO_USE
, /* Use location (REG or MEM). */
130 MO_USE_NO_VAR
,/* Use location which is not associated with a variable
131 or the variable is not trackable. */
132 MO_VAL_USE
, /* Use location which is associated with a value. */
133 MO_VAL_LOC
, /* Use location which appears in a debug insn. */
134 MO_VAL_SET
, /* Set location associated with a value. */
135 MO_SET
, /* Set location. */
136 MO_COPY
, /* Copy the same portion of a variable from one
137 location to another. */
138 MO_CLOBBER
, /* Clobber location. */
139 MO_CALL
, /* Call insn. */
140 MO_ADJUST
/* Adjust stack pointer. */
144 static const char * const ATTRIBUTE_UNUSED
145 micro_operation_type_name
[] = {
158 /* Where shall the note be emitted? BEFORE or AFTER the instruction.
159 Notes emitted as AFTER_CALL are to take effect during the call,
160 rather than after the call. */
163 EMIT_NOTE_BEFORE_INSN
,
164 EMIT_NOTE_AFTER_INSN
,
165 EMIT_NOTE_AFTER_CALL_INSN
168 /* Structure holding information about micro operation. */
169 typedef struct micro_operation_def
171 /* Type of micro operation. */
172 enum micro_operation_type type
;
174 /* The instruction which the micro operation is in, for MO_USE,
175 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
176 instruction or note in the original flow (before any var-tracking
177 notes are inserted, to simplify emission of notes), for MO_SET
182 /* Location. For MO_SET and MO_COPY, this is the SET that
183 performs the assignment, if known, otherwise it is the target
184 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
185 CONCAT of the VALUE and the LOC associated with it. For
186 MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
187 associated with it. */
190 /* Stack adjustment. */
191 HOST_WIDE_INT adjust
;
195 DEF_VEC_O(micro_operation
);
196 DEF_VEC_ALLOC_O(micro_operation
,heap
);
198 /* A declaration of a variable, or an RTL value being handled like a
200 typedef void *decl_or_value
;
202 /* Structure for passing some other parameters to function
203 emit_note_insn_var_location. */
204 typedef struct emit_note_data_def
206 /* The instruction which the note will be emitted before/after. */
209 /* Where the note will be emitted (before/after insn)? */
210 enum emit_note_where where
;
212 /* The variables and values active at this point. */
216 /* Description of location of a part of a variable. The content of a physical
217 register is described by a chain of these structures.
218 The chains are pretty short (usually 1 or 2 elements) and thus
219 chain is the best data structure. */
220 typedef struct attrs_def
222 /* Pointer to next member of the list. */
223 struct attrs_def
*next
;
225 /* The rtx of register. */
228 /* The declaration corresponding to LOC. */
231 /* Offset from start of DECL. */
232 HOST_WIDE_INT offset
;
235 /* Structure holding a refcounted hash table. If refcount > 1,
236 it must be first unshared before modified. */
237 typedef struct shared_hash_def
239 /* Reference count. */
242 /* Actual hash table. */
246 /* Structure holding the IN or OUT set for a basic block. */
247 typedef struct dataflow_set_def
249 /* Adjustment of stack offset. */
250 HOST_WIDE_INT stack_adjust
;
252 /* Attributes for registers (lists of attrs). */
253 attrs regs
[FIRST_PSEUDO_REGISTER
];
255 /* Variable locations. */
258 /* Vars that is being traversed. */
259 shared_hash traversed_vars
;
262 /* The structure (one for each basic block) containing the information
263 needed for variable tracking. */
264 typedef struct variable_tracking_info_def
266 /* The vector of micro operations. */
267 VEC(micro_operation
, heap
) *mos
;
269 /* The IN and OUT set for dataflow analysis. */
273 /* The permanent-in dataflow set for this block. This is used to
274 hold values for which we had to compute entry values. ??? This
275 should probably be dynamically allocated, to avoid using more
276 memory in non-debug builds. */
279 /* Has the block been visited in DFS? */
282 /* Has the block been flooded in VTA? */
285 } *variable_tracking_info
;
287 /* Structure for chaining the locations. */
288 typedef struct location_chain_def
290 /* Next element in the chain. */
291 struct location_chain_def
*next
;
293 /* The location (REG, MEM or VALUE). */
296 /* The "value" stored in this location. */
300 enum var_init_status init
;
303 /* Structure describing one part of variable. */
304 typedef struct variable_part_def
306 /* Chain of locations of the part. */
307 location_chain loc_chain
;
309 /* Location which was last emitted to location list. */
312 /* The offset in the variable. */
313 HOST_WIDE_INT offset
;
316 /* Maximum number of location parts. */
317 #define MAX_VAR_PARTS 16
319 /* Structure describing where the variable is located. */
320 typedef struct variable_def
322 /* The declaration of the variable, or an RTL value being handled
323 like a declaration. */
326 /* Reference count. */
329 /* Number of variable parts. */
332 /* True if this variable changed (any of its) cur_loc fields
333 during the current emit_notes_for_changes resp.
334 emit_notes_for_differences call. */
335 bool cur_loc_changed
;
337 /* True if this variable_def struct is currently in the
338 changed_variables hash table. */
339 bool in_changed_variables
;
341 /* The variable parts. */
342 variable_part var_part
[1];
344 typedef const struct variable_def
*const_variable
;
346 /* Structure for chaining backlinks from referenced VALUEs to
347 DVs that are referencing them. */
348 typedef struct value_chain_def
350 /* Next value_chain entry. */
351 struct value_chain_def
*next
;
353 /* The declaration of the variable, or an RTL value
354 being handled like a declaration, whose var_parts[0].loc_chain
355 references the VALUE owning this value_chain. */
358 /* Reference count. */
361 typedef const struct value_chain_def
*const_value_chain
;
363 /* Pointer to the BB's information specific to variable tracking pass. */
364 #define VTI(BB) ((variable_tracking_info) (BB)->aux)
366 /* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */
367 #define INT_MEM_OFFSET(mem) (MEM_OFFSET (mem) ? INTVAL (MEM_OFFSET (mem)) : 0)
369 /* Alloc pool for struct attrs_def. */
370 static alloc_pool attrs_pool
;
372 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
373 static alloc_pool var_pool
;
375 /* Alloc pool for struct variable_def with a single var_part entry. */
376 static alloc_pool valvar_pool
;
378 /* Alloc pool for struct location_chain_def. */
379 static alloc_pool loc_chain_pool
;
381 /* Alloc pool for struct shared_hash_def. */
382 static alloc_pool shared_hash_pool
;
384 /* Alloc pool for struct value_chain_def. */
385 static alloc_pool value_chain_pool
;
387 /* Changed variables, notes will be emitted for them. */
388 static htab_t changed_variables
;
390 /* Links from VALUEs to DVs referencing them in their current loc_chains. */
391 static htab_t value_chains
;
393 /* Shall notes be emitted? */
394 static bool emit_notes
;
396 /* Empty shared hashtable. */
397 static shared_hash empty_shared_hash
;
399 /* Scratch register bitmap used by cselib_expand_value_rtx. */
400 static bitmap scratch_regs
= NULL
;
402 /* Variable used to tell whether cselib_process_insn called our hook. */
403 static bool cselib_hook_called
;
405 /* Local function prototypes. */
406 static void stack_adjust_offset_pre_post (rtx
, HOST_WIDE_INT
*,
408 static void insn_stack_adjust_offset_pre_post (rtx
, HOST_WIDE_INT
*,
410 static bool vt_stack_adjustments (void);
411 static rtx
compute_cfa_pointer (HOST_WIDE_INT
);
412 static hashval_t
variable_htab_hash (const void *);
413 static int variable_htab_eq (const void *, const void *);
414 static void variable_htab_free (void *);
416 static void init_attrs_list_set (attrs
*);
417 static void attrs_list_clear (attrs
*);
418 static attrs
attrs_list_member (attrs
, decl_or_value
, HOST_WIDE_INT
);
419 static void attrs_list_insert (attrs
*, decl_or_value
, HOST_WIDE_INT
, rtx
);
420 static void attrs_list_copy (attrs
*, attrs
);
421 static void attrs_list_union (attrs
*, attrs
);
423 static void **unshare_variable (dataflow_set
*set
, void **slot
, variable var
,
424 enum var_init_status
);
425 static void vars_copy (htab_t
, htab_t
);
426 static tree
var_debug_decl (tree
);
427 static void var_reg_set (dataflow_set
*, rtx
, enum var_init_status
, rtx
);
428 static void var_reg_delete_and_set (dataflow_set
*, rtx
, bool,
429 enum var_init_status
, rtx
);
430 static void var_reg_delete (dataflow_set
*, rtx
, bool);
431 static void var_regno_delete (dataflow_set
*, int);
432 static void var_mem_set (dataflow_set
*, rtx
, enum var_init_status
, rtx
);
433 static void var_mem_delete_and_set (dataflow_set
*, rtx
, bool,
434 enum var_init_status
, rtx
);
435 static void var_mem_delete (dataflow_set
*, rtx
, bool);
437 static void dataflow_set_init (dataflow_set
*);
438 static void dataflow_set_clear (dataflow_set
*);
439 static void dataflow_set_copy (dataflow_set
*, dataflow_set
*);
440 static int variable_union_info_cmp_pos (const void *, const void *);
441 static void dataflow_set_union (dataflow_set
*, dataflow_set
*);
442 static location_chain
find_loc_in_1pdv (rtx
, variable
, htab_t
);
443 static bool canon_value_cmp (rtx
, rtx
);
444 static int loc_cmp (rtx
, rtx
);
445 static bool variable_part_different_p (variable_part
*, variable_part
*);
446 static bool onepart_variable_different_p (variable
, variable
);
447 static bool variable_different_p (variable
, variable
);
448 static bool dataflow_set_different (dataflow_set
*, dataflow_set
*);
449 static void dataflow_set_destroy (dataflow_set
*);
451 static bool contains_symbol_ref (rtx
);
452 static bool track_expr_p (tree
, bool);
453 static bool same_variable_part_p (rtx
, tree
, HOST_WIDE_INT
);
454 static int add_uses (rtx
*, void *);
455 static void add_uses_1 (rtx
*, void *);
456 static void add_stores (rtx
, const_rtx
, void *);
457 static bool compute_bb_dataflow (basic_block
);
458 static bool vt_find_locations (void);
460 static void dump_attrs_list (attrs
);
461 static int dump_var_slot (void **, void *);
462 static void dump_var (variable
);
463 static void dump_vars (htab_t
);
464 static void dump_dataflow_set (dataflow_set
*);
465 static void dump_dataflow_sets (void);
467 static void variable_was_changed (variable
, dataflow_set
*);
468 static void **set_slot_part (dataflow_set
*, rtx
, void **,
469 decl_or_value
, HOST_WIDE_INT
,
470 enum var_init_status
, rtx
);
471 static void set_variable_part (dataflow_set
*, rtx
,
472 decl_or_value
, HOST_WIDE_INT
,
473 enum var_init_status
, rtx
, enum insert_option
);
474 static void **clobber_slot_part (dataflow_set
*, rtx
,
475 void **, HOST_WIDE_INT
, rtx
);
476 static void clobber_variable_part (dataflow_set
*, rtx
,
477 decl_or_value
, HOST_WIDE_INT
, rtx
);
478 static void **delete_slot_part (dataflow_set
*, rtx
, void **, HOST_WIDE_INT
);
479 static void delete_variable_part (dataflow_set
*, rtx
,
480 decl_or_value
, HOST_WIDE_INT
);
481 static int emit_note_insn_var_location (void **, void *);
482 static void emit_notes_for_changes (rtx
, enum emit_note_where
, shared_hash
);
483 static int emit_notes_for_differences_1 (void **, void *);
484 static int emit_notes_for_differences_2 (void **, void *);
485 static void emit_notes_for_differences (rtx
, dataflow_set
*, dataflow_set
*);
486 static void emit_notes_in_bb (basic_block
, dataflow_set
*);
487 static void vt_emit_notes (void);
489 static bool vt_get_decl_and_offset (rtx
, tree
*, HOST_WIDE_INT
*);
490 static void vt_add_function_parameters (void);
491 static bool vt_initialize (void);
492 static void vt_finalize (void);
494 /* Given a SET, calculate the amount of stack adjustment it contains
495 PRE- and POST-modifying stack pointer.
496 This function is similar to stack_adjust_offset. */
499 stack_adjust_offset_pre_post (rtx pattern
, HOST_WIDE_INT
*pre
,
502 rtx src
= SET_SRC (pattern
);
503 rtx dest
= SET_DEST (pattern
);
506 if (dest
== stack_pointer_rtx
)
508 /* (set (reg sp) (plus (reg sp) (const_int))) */
509 code
= GET_CODE (src
);
510 if (! (code
== PLUS
|| code
== MINUS
)
511 || XEXP (src
, 0) != stack_pointer_rtx
512 || !CONST_INT_P (XEXP (src
, 1)))
516 *post
+= INTVAL (XEXP (src
, 1));
518 *post
-= INTVAL (XEXP (src
, 1));
520 else if (MEM_P (dest
))
522 /* (set (mem (pre_dec (reg sp))) (foo)) */
523 src
= XEXP (dest
, 0);
524 code
= GET_CODE (src
);
530 if (XEXP (src
, 0) == stack_pointer_rtx
)
532 rtx val
= XEXP (XEXP (src
, 1), 1);
533 /* We handle only adjustments by constant amount. */
534 gcc_assert (GET_CODE (XEXP (src
, 1)) == PLUS
&&
537 if (code
== PRE_MODIFY
)
538 *pre
-= INTVAL (val
);
540 *post
-= INTVAL (val
);
546 if (XEXP (src
, 0) == stack_pointer_rtx
)
548 *pre
+= GET_MODE_SIZE (GET_MODE (dest
));
554 if (XEXP (src
, 0) == stack_pointer_rtx
)
556 *post
+= GET_MODE_SIZE (GET_MODE (dest
));
562 if (XEXP (src
, 0) == stack_pointer_rtx
)
564 *pre
-= GET_MODE_SIZE (GET_MODE (dest
));
570 if (XEXP (src
, 0) == stack_pointer_rtx
)
572 *post
-= GET_MODE_SIZE (GET_MODE (dest
));
583 /* Given an INSN, calculate the amount of stack adjustment it contains
584 PRE- and POST-modifying stack pointer. */
587 insn_stack_adjust_offset_pre_post (rtx insn
, HOST_WIDE_INT
*pre
,
595 pattern
= PATTERN (insn
);
596 if (RTX_FRAME_RELATED_P (insn
))
598 rtx expr
= find_reg_note (insn
, REG_FRAME_RELATED_EXPR
, NULL_RTX
);
600 pattern
= XEXP (expr
, 0);
603 if (GET_CODE (pattern
) == SET
)
604 stack_adjust_offset_pre_post (pattern
, pre
, post
);
605 else if (GET_CODE (pattern
) == PARALLEL
606 || GET_CODE (pattern
) == SEQUENCE
)
610 /* There may be stack adjustments inside compound insns. Search
612 for ( i
= XVECLEN (pattern
, 0) - 1; i
>= 0; i
--)
613 if (GET_CODE (XVECEXP (pattern
, 0, i
)) == SET
)
614 stack_adjust_offset_pre_post (XVECEXP (pattern
, 0, i
), pre
, post
);
618 /* Compute stack adjustments for all blocks by traversing DFS tree.
619 Return true when the adjustments on all incoming edges are consistent.
620 Heavily borrowed from pre_and_rev_post_order_compute. */
623 vt_stack_adjustments (void)
625 edge_iterator
*stack
;
628 /* Initialize entry block. */
629 VTI (ENTRY_BLOCK_PTR
)->visited
= true;
630 VTI (ENTRY_BLOCK_PTR
)->in
.stack_adjust
= INCOMING_FRAME_SP_OFFSET
;
631 VTI (ENTRY_BLOCK_PTR
)->out
.stack_adjust
= INCOMING_FRAME_SP_OFFSET
;
633 /* Allocate stack for back-tracking up CFG. */
634 stack
= XNEWVEC (edge_iterator
, n_basic_blocks
+ 1);
637 /* Push the first edge on to the stack. */
638 stack
[sp
++] = ei_start (ENTRY_BLOCK_PTR
->succs
);
646 /* Look at the edge on the top of the stack. */
648 src
= ei_edge (ei
)->src
;
649 dest
= ei_edge (ei
)->dest
;
651 /* Check if the edge destination has been visited yet. */
652 if (!VTI (dest
)->visited
)
655 HOST_WIDE_INT pre
, post
, offset
;
656 VTI (dest
)->visited
= true;
657 VTI (dest
)->in
.stack_adjust
= offset
= VTI (src
)->out
.stack_adjust
;
659 if (dest
!= EXIT_BLOCK_PTR
)
660 for (insn
= BB_HEAD (dest
);
661 insn
!= NEXT_INSN (BB_END (dest
));
662 insn
= NEXT_INSN (insn
))
665 insn_stack_adjust_offset_pre_post (insn
, &pre
, &post
);
666 offset
+= pre
+ post
;
669 VTI (dest
)->out
.stack_adjust
= offset
;
671 if (EDGE_COUNT (dest
->succs
) > 0)
672 /* Since the DEST node has been visited for the first
673 time, check its successors. */
674 stack
[sp
++] = ei_start (dest
->succs
);
678 /* Check whether the adjustments on the edges are the same. */
679 if (VTI (dest
)->in
.stack_adjust
!= VTI (src
)->out
.stack_adjust
)
685 if (! ei_one_before_end_p (ei
))
686 /* Go to the next edge. */
687 ei_next (&stack
[sp
- 1]);
689 /* Return to previous level if there are no more edges. */
698 /* Compute a CFA-based value for the stack pointer. */
701 compute_cfa_pointer (HOST_WIDE_INT adjustment
)
705 #ifdef FRAME_POINTER_CFA_OFFSET
706 adjustment
-= FRAME_POINTER_CFA_OFFSET (current_function_decl
);
707 cfa
= plus_constant (frame_pointer_rtx
, adjustment
);
709 adjustment
-= ARG_POINTER_CFA_OFFSET (current_function_decl
);
710 cfa
= plus_constant (arg_pointer_rtx
, adjustment
);
716 /* Adjustment for hard_frame_pointer_rtx to cfa base reg,
717 or -1 if the replacement shouldn't be done. */
718 static HOST_WIDE_INT hard_frame_pointer_adjustment
= -1;
720 /* Data for adjust_mems callback. */
722 struct adjust_mem_data
725 enum machine_mode mem_mode
;
726 HOST_WIDE_INT stack_adjust
;
730 /* Helper for adjust_mems. Return 1 if *loc is unsuitable for
731 transformation of wider mode arithmetics to narrower mode,
732 -1 if it is suitable and subexpressions shouldn't be
733 traversed and 0 if it is suitable and subexpressions should
734 be traversed. Called through for_each_rtx. */
737 use_narrower_mode_test (rtx
*loc
, void *data
)
739 rtx subreg
= (rtx
) data
;
741 if (CONSTANT_P (*loc
))
743 switch (GET_CODE (*loc
))
746 if (cselib_lookup (*loc
, GET_MODE (SUBREG_REG (subreg
)), 0))
754 if (for_each_rtx (&XEXP (*loc
, 0), use_narrower_mode_test
, data
))
763 /* Transform X into narrower mode MODE from wider mode WMODE. */
766 use_narrower_mode (rtx x
, enum machine_mode mode
, enum machine_mode wmode
)
770 return lowpart_subreg (mode
, x
, wmode
);
771 switch (GET_CODE (x
))
774 return lowpart_subreg (mode
, x
, wmode
);
778 op0
= use_narrower_mode (XEXP (x
, 0), mode
, wmode
);
779 op1
= use_narrower_mode (XEXP (x
, 1), mode
, wmode
);
780 return simplify_gen_binary (GET_CODE (x
), mode
, op0
, op1
);
782 op0
= use_narrower_mode (XEXP (x
, 0), mode
, wmode
);
783 return simplify_gen_binary (ASHIFT
, mode
, op0
, XEXP (x
, 1));
789 /* Helper function for adjusting used MEMs. */
792 adjust_mems (rtx loc
, const_rtx old_rtx
, void *data
)
794 struct adjust_mem_data
*amd
= (struct adjust_mem_data
*) data
;
795 rtx mem
, addr
= loc
, tem
;
796 enum machine_mode mem_mode_save
;
798 switch (GET_CODE (loc
))
801 /* Don't do any sp or fp replacements outside of MEM addresses. */
802 if (amd
->mem_mode
== VOIDmode
)
804 if (loc
== stack_pointer_rtx
805 && !frame_pointer_needed
)
806 return compute_cfa_pointer (amd
->stack_adjust
);
807 else if (loc
== hard_frame_pointer_rtx
808 && frame_pointer_needed
809 && hard_frame_pointer_adjustment
!= -1)
810 return compute_cfa_pointer (hard_frame_pointer_adjustment
);
816 mem
= targetm
.delegitimize_address (mem
);
817 if (mem
!= loc
&& !MEM_P (mem
))
818 return simplify_replace_fn_rtx (mem
, old_rtx
, adjust_mems
, data
);
821 addr
= XEXP (mem
, 0);
822 mem_mode_save
= amd
->mem_mode
;
823 amd
->mem_mode
= GET_MODE (mem
);
824 store_save
= amd
->store
;
826 addr
= simplify_replace_fn_rtx (addr
, old_rtx
, adjust_mems
, data
);
827 amd
->store
= store_save
;
828 amd
->mem_mode
= mem_mode_save
;
830 addr
= targetm
.delegitimize_address (addr
);
831 if (addr
!= XEXP (mem
, 0))
832 mem
= replace_equiv_address_nv (mem
, addr
);
834 mem
= avoid_constant_pool_reference (mem
);
838 addr
= gen_rtx_PLUS (GET_MODE (loc
), XEXP (loc
, 0),
839 GEN_INT (GET_CODE (loc
) == PRE_INC
840 ? GET_MODE_SIZE (amd
->mem_mode
)
841 : -GET_MODE_SIZE (amd
->mem_mode
)));
845 addr
= XEXP (loc
, 0);
846 gcc_assert (amd
->mem_mode
!= VOIDmode
&& amd
->mem_mode
!= BLKmode
);
847 addr
= simplify_replace_fn_rtx (addr
, old_rtx
, adjust_mems
, data
);
848 tem
= gen_rtx_PLUS (GET_MODE (loc
), XEXP (loc
, 0),
849 GEN_INT ((GET_CODE (loc
) == PRE_INC
850 || GET_CODE (loc
) == POST_INC
)
851 ? GET_MODE_SIZE (amd
->mem_mode
)
852 : -GET_MODE_SIZE (amd
->mem_mode
)));
853 amd
->side_effects
= alloc_EXPR_LIST (0,
854 gen_rtx_SET (VOIDmode
,
860 addr
= XEXP (loc
, 1);
863 addr
= XEXP (loc
, 0);
864 gcc_assert (amd
->mem_mode
!= VOIDmode
);
865 addr
= simplify_replace_fn_rtx (addr
, old_rtx
, adjust_mems
, data
);
866 amd
->side_effects
= alloc_EXPR_LIST (0,
867 gen_rtx_SET (VOIDmode
,
873 /* First try without delegitimization of whole MEMs and
874 avoid_constant_pool_reference, which is more likely to succeed. */
875 store_save
= amd
->store
;
877 addr
= simplify_replace_fn_rtx (SUBREG_REG (loc
), old_rtx
, adjust_mems
,
879 amd
->store
= store_save
;
880 mem
= simplify_replace_fn_rtx (addr
, old_rtx
, adjust_mems
, data
);
881 if (mem
== SUBREG_REG (loc
))
886 tem
= simplify_gen_subreg (GET_MODE (loc
), mem
,
887 GET_MODE (SUBREG_REG (loc
)),
891 tem
= simplify_gen_subreg (GET_MODE (loc
), addr
,
892 GET_MODE (SUBREG_REG (loc
)),
895 tem
= gen_rtx_raw_SUBREG (GET_MODE (loc
), addr
, SUBREG_BYTE (loc
));
897 if (MAY_HAVE_DEBUG_INSNS
898 && GET_CODE (tem
) == SUBREG
899 && (GET_CODE (SUBREG_REG (tem
)) == PLUS
900 || GET_CODE (SUBREG_REG (tem
)) == MINUS
901 || GET_CODE (SUBREG_REG (tem
)) == MULT
902 || GET_CODE (SUBREG_REG (tem
)) == ASHIFT
)
903 && GET_MODE_CLASS (GET_MODE (tem
)) == MODE_INT
904 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem
))) == MODE_INT
905 && GET_MODE_SIZE (GET_MODE (tem
))
906 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (tem
)))
907 && subreg_lowpart_p (tem
)
908 && !for_each_rtx (&SUBREG_REG (tem
), use_narrower_mode_test
, tem
))
909 return use_narrower_mode (SUBREG_REG (tem
), GET_MODE (tem
),
910 GET_MODE (SUBREG_REG (tem
)));
918 /* Helper function for replacement of uses. */
921 adjust_mem_uses (rtx
*x
, void *data
)
923 rtx new_x
= simplify_replace_fn_rtx (*x
, NULL_RTX
, adjust_mems
, data
);
925 validate_change (NULL_RTX
, x
, new_x
, true);
928 /* Helper function for replacement of stores. */
931 adjust_mem_stores (rtx loc
, const_rtx expr
, void *data
)
935 rtx new_dest
= simplify_replace_fn_rtx (SET_DEST (expr
), NULL_RTX
,
937 if (new_dest
!= SET_DEST (expr
))
939 rtx xexpr
= CONST_CAST_RTX (expr
);
940 validate_change (NULL_RTX
, &SET_DEST (xexpr
), new_dest
, true);
945 /* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
946 replace them with their value in the insn and add the side-effects
947 as other sets to the insn. */
950 adjust_insn (basic_block bb
, rtx insn
)
952 struct adjust_mem_data amd
;
954 amd
.mem_mode
= VOIDmode
;
955 amd
.stack_adjust
= -VTI (bb
)->out
.stack_adjust
;
956 amd
.side_effects
= NULL_RTX
;
959 note_stores (PATTERN (insn
), adjust_mem_stores
, &amd
);
962 note_uses (&PATTERN (insn
), adjust_mem_uses
, &amd
);
964 /* For read-only MEMs containing some constant, prefer those
966 set
= single_set (insn
);
967 if (set
&& MEM_P (SET_SRC (set
)) && MEM_READONLY_P (SET_SRC (set
)))
969 rtx note
= find_reg_equal_equiv_note (insn
);
971 if (note
&& CONSTANT_P (XEXP (note
, 0)))
972 validate_change (NULL_RTX
, &SET_SRC (set
), XEXP (note
, 0), true);
975 if (amd
.side_effects
)
977 rtx
*pat
, new_pat
, s
;
980 pat
= &PATTERN (insn
);
981 if (GET_CODE (*pat
) == COND_EXEC
)
982 pat
= &COND_EXEC_CODE (*pat
);
983 if (GET_CODE (*pat
) == PARALLEL
)
984 oldn
= XVECLEN (*pat
, 0);
987 for (s
= amd
.side_effects
, newn
= 0; s
; newn
++)
989 new_pat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (oldn
+ newn
));
990 if (GET_CODE (*pat
) == PARALLEL
)
991 for (i
= 0; i
< oldn
; i
++)
992 XVECEXP (new_pat
, 0, i
) = XVECEXP (*pat
, 0, i
);
994 XVECEXP (new_pat
, 0, 0) = *pat
;
995 for (s
= amd
.side_effects
, i
= oldn
; i
< oldn
+ newn
; i
++, s
= XEXP (s
, 1))
996 XVECEXP (new_pat
, 0, i
) = XEXP (s
, 0);
997 free_EXPR_LIST_list (&amd
.side_effects
);
998 validate_change (NULL_RTX
, pat
, new_pat
, true);
1002 /* Return true if a decl_or_value DV is a DECL or NULL. */
1004 dv_is_decl_p (decl_or_value dv
)
1006 return !dv
|| (int) TREE_CODE ((tree
) dv
) != (int) VALUE
;
1009 /* Return true if a decl_or_value is a VALUE rtl. */
1011 dv_is_value_p (decl_or_value dv
)
1013 return dv
&& !dv_is_decl_p (dv
);
1016 /* Return the decl in the decl_or_value. */
1018 dv_as_decl (decl_or_value dv
)
1020 #ifdef ENABLE_CHECKING
1021 gcc_assert (dv_is_decl_p (dv
));
1026 /* Return the value in the decl_or_value. */
1028 dv_as_value (decl_or_value dv
)
1030 #ifdef ENABLE_CHECKING
1031 gcc_assert (dv_is_value_p (dv
));
1036 /* Return the opaque pointer in the decl_or_value. */
1037 static inline void *
1038 dv_as_opaque (decl_or_value dv
)
1043 /* Return true if a decl_or_value must not have more than one variable
1046 dv_onepart_p (decl_or_value dv
)
1050 if (!MAY_HAVE_DEBUG_INSNS
)
1053 if (dv_is_value_p (dv
))
1056 decl
= dv_as_decl (dv
);
1061 if (TREE_CODE (decl
) == DEBUG_EXPR_DECL
)
1064 return (target_for_debug_bind (decl
) != NULL_TREE
);
1067 /* Return the variable pool to be used for dv, depending on whether it
1068 can have multiple parts or not. */
1069 static inline alloc_pool
1070 dv_pool (decl_or_value dv
)
1072 return dv_onepart_p (dv
) ? valvar_pool
: var_pool
;
1075 /* Build a decl_or_value out of a decl. */
1076 static inline decl_or_value
1077 dv_from_decl (tree decl
)
1081 #ifdef ENABLE_CHECKING
1082 gcc_assert (dv_is_decl_p (dv
));
1087 /* Build a decl_or_value out of a value. */
1088 static inline decl_or_value
1089 dv_from_value (rtx value
)
1093 #ifdef ENABLE_CHECKING
1094 gcc_assert (dv_is_value_p (dv
));
1099 extern void debug_dv (decl_or_value dv
);
1102 debug_dv (decl_or_value dv
)
1104 if (dv_is_value_p (dv
))
1105 debug_rtx (dv_as_value (dv
));
1107 debug_generic_stmt (dv_as_decl (dv
));
1110 typedef unsigned int dvuid
;
1112 /* Return the uid of DV. */
1115 dv_uid (decl_or_value dv
)
1117 if (dv_is_value_p (dv
))
1118 return CSELIB_VAL_PTR (dv_as_value (dv
))->uid
;
1120 return DECL_UID (dv_as_decl (dv
));
1123 /* Compute the hash from the uid. */
1125 static inline hashval_t
1126 dv_uid2hash (dvuid uid
)
1131 /* The hash function for a mask table in a shared_htab chain. */
1133 static inline hashval_t
1134 dv_htab_hash (decl_or_value dv
)
1136 return dv_uid2hash (dv_uid (dv
));
1139 /* The hash function for variable_htab, computes the hash value
1140 from the declaration of variable X. */
1143 variable_htab_hash (const void *x
)
1145 const_variable
const v
= (const_variable
) x
;
1147 return dv_htab_hash (v
->dv
);
1150 /* Compare the declaration of variable X with declaration Y. */
1153 variable_htab_eq (const void *x
, const void *y
)
1155 const_variable
const v
= (const_variable
) x
;
1156 decl_or_value dv
= CONST_CAST2 (decl_or_value
, const void *, y
);
1158 return (dv_as_opaque (v
->dv
) == dv_as_opaque (dv
));
1161 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
1164 variable_htab_free (void *elem
)
1167 variable var
= (variable
) elem
;
1168 location_chain node
, next
;
1170 gcc_assert (var
->refcount
> 0);
1173 if (var
->refcount
> 0)
1176 for (i
= 0; i
< var
->n_var_parts
; i
++)
1178 for (node
= var
->var_part
[i
].loc_chain
; node
; node
= next
)
1181 pool_free (loc_chain_pool
, node
);
1183 var
->var_part
[i
].loc_chain
= NULL
;
1185 pool_free (dv_pool (var
->dv
), var
);
1188 /* The hash function for value_chains htab, computes the hash value
1192 value_chain_htab_hash (const void *x
)
1194 const_value_chain
const v
= (const_value_chain
) x
;
1196 return dv_htab_hash (v
->dv
);
1199 /* Compare the VALUE X with VALUE Y. */
1202 value_chain_htab_eq (const void *x
, const void *y
)
1204 const_value_chain
const v
= (const_value_chain
) x
;
1205 decl_or_value dv
= CONST_CAST2 (decl_or_value
, const void *, y
);
1207 return dv_as_opaque (v
->dv
) == dv_as_opaque (dv
);
1210 /* Initialize the set (array) SET of attrs to empty lists. */
1213 init_attrs_list_set (attrs
*set
)
1217 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1221 /* Make the list *LISTP empty. */
1224 attrs_list_clear (attrs
*listp
)
1228 for (list
= *listp
; list
; list
= next
)
1231 pool_free (attrs_pool
, list
);
1236 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */
1239 attrs_list_member (attrs list
, decl_or_value dv
, HOST_WIDE_INT offset
)
1241 for (; list
; list
= list
->next
)
1242 if (dv_as_opaque (list
->dv
) == dv_as_opaque (dv
) && list
->offset
== offset
)
1247 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
1250 attrs_list_insert (attrs
*listp
, decl_or_value dv
,
1251 HOST_WIDE_INT offset
, rtx loc
)
1255 list
= (attrs
) pool_alloc (attrs_pool
);
1258 list
->offset
= offset
;
1259 list
->next
= *listp
;
1263 /* Copy all nodes from SRC and create a list *DSTP of the copies. */
1266 attrs_list_copy (attrs
*dstp
, attrs src
)
1270 attrs_list_clear (dstp
);
1271 for (; src
; src
= src
->next
)
1273 n
= (attrs
) pool_alloc (attrs_pool
);
1276 n
->offset
= src
->offset
;
1282 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */
1285 attrs_list_union (attrs
*dstp
, attrs src
)
1287 for (; src
; src
= src
->next
)
1289 if (!attrs_list_member (*dstp
, src
->dv
, src
->offset
))
1290 attrs_list_insert (dstp
, src
->dv
, src
->offset
, src
->loc
);
1294 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1298 attrs_list_mpdv_union (attrs
*dstp
, attrs src
, attrs src2
)
1300 gcc_assert (!*dstp
);
1301 for (; src
; src
= src
->next
)
1303 if (!dv_onepart_p (src
->dv
))
1304 attrs_list_insert (dstp
, src
->dv
, src
->offset
, src
->loc
);
1306 for (src
= src2
; src
; src
= src
->next
)
1308 if (!dv_onepart_p (src
->dv
)
1309 && !attrs_list_member (*dstp
, src
->dv
, src
->offset
))
1310 attrs_list_insert (dstp
, src
->dv
, src
->offset
, src
->loc
);
1314 /* Shared hashtable support. */
1316 /* Return true if VARS is shared. */
1319 shared_hash_shared (shared_hash vars
)
1321 return vars
->refcount
> 1;
1324 /* Return the hash table for VARS. */
1326 static inline htab_t
1327 shared_hash_htab (shared_hash vars
)
1332 /* Return true if VAR is shared, or maybe because VARS is shared. */
1335 shared_var_p (variable var
, shared_hash vars
)
1337 /* Don't count an entry in the changed_variables table as a duplicate. */
1338 return ((var
->refcount
> 1 + (int) var
->in_changed_variables
)
1339 || shared_hash_shared (vars
));
1342 /* Copy variables into a new hash table. */
1345 shared_hash_unshare (shared_hash vars
)
1347 shared_hash new_vars
= (shared_hash
) pool_alloc (shared_hash_pool
);
1348 gcc_assert (vars
->refcount
> 1);
1349 new_vars
->refcount
= 1;
1351 = htab_create (htab_elements (vars
->htab
) + 3, variable_htab_hash
,
1352 variable_htab_eq
, variable_htab_free
);
1353 vars_copy (new_vars
->htab
, vars
->htab
);
1358 /* Increment reference counter on VARS and return it. */
1360 static inline shared_hash
1361 shared_hash_copy (shared_hash vars
)
1367 /* Decrement reference counter and destroy hash table if not shared
1371 shared_hash_destroy (shared_hash vars
)
1373 gcc_assert (vars
->refcount
> 0);
1374 if (--vars
->refcount
== 0)
1376 htab_delete (vars
->htab
);
1377 pool_free (shared_hash_pool
, vars
);
1381 /* Unshare *PVARS if shared and return slot for DV. If INS is
1382 INSERT, insert it if not already present. */
1384 static inline void **
1385 shared_hash_find_slot_unshare_1 (shared_hash
*pvars
, decl_or_value dv
,
1386 hashval_t dvhash
, enum insert_option ins
)
1388 if (shared_hash_shared (*pvars
))
1389 *pvars
= shared_hash_unshare (*pvars
);
1390 return htab_find_slot_with_hash (shared_hash_htab (*pvars
), dv
, dvhash
, ins
);
1393 static inline void **
1394 shared_hash_find_slot_unshare (shared_hash
*pvars
, decl_or_value dv
,
1395 enum insert_option ins
)
1397 return shared_hash_find_slot_unshare_1 (pvars
, dv
, dv_htab_hash (dv
), ins
);
1400 /* Return slot for DV, if it is already present in the hash table.
1401 If it is not present, insert it only VARS is not shared, otherwise
1404 static inline void **
1405 shared_hash_find_slot_1 (shared_hash vars
, decl_or_value dv
, hashval_t dvhash
)
1407 return htab_find_slot_with_hash (shared_hash_htab (vars
), dv
, dvhash
,
1408 shared_hash_shared (vars
)
1409 ? NO_INSERT
: INSERT
);
1412 static inline void **
1413 shared_hash_find_slot (shared_hash vars
, decl_or_value dv
)
1415 return shared_hash_find_slot_1 (vars
, dv
, dv_htab_hash (dv
));
1418 /* Return slot for DV only if it is already present in the hash table. */
1420 static inline void **
1421 shared_hash_find_slot_noinsert_1 (shared_hash vars
, decl_or_value dv
,
1424 return htab_find_slot_with_hash (shared_hash_htab (vars
), dv
, dvhash
,
1428 static inline void **
1429 shared_hash_find_slot_noinsert (shared_hash vars
, decl_or_value dv
)
1431 return shared_hash_find_slot_noinsert_1 (vars
, dv
, dv_htab_hash (dv
));
1434 /* Return variable for DV or NULL if not already present in the hash
1437 static inline variable
1438 shared_hash_find_1 (shared_hash vars
, decl_or_value dv
, hashval_t dvhash
)
1440 return (variable
) htab_find_with_hash (shared_hash_htab (vars
), dv
, dvhash
);
1443 static inline variable
1444 shared_hash_find (shared_hash vars
, decl_or_value dv
)
1446 return shared_hash_find_1 (vars
, dv
, dv_htab_hash (dv
));
1449 /* Return true if TVAL is better than CVAL as a canonival value. We
1450 choose lowest-numbered VALUEs, using the RTX address as a
1451 tie-breaker. The idea is to arrange them into a star topology,
1452 such that all of them are at most one step away from the canonical
1453 value, and the canonical value has backlinks to all of them, in
1454 addition to all the actual locations. We don't enforce this
1455 topology throughout the entire dataflow analysis, though.
1459 canon_value_cmp (rtx tval
, rtx cval
)
1462 || CSELIB_VAL_PTR (tval
)->uid
< CSELIB_VAL_PTR (cval
)->uid
;
1465 static bool dst_can_be_shared
;
1467 /* Return a copy of a variable VAR and insert it to dataflow set SET. */
1470 unshare_variable (dataflow_set
*set
, void **slot
, variable var
,
1471 enum var_init_status initialized
)
1476 new_var
= (variable
) pool_alloc (dv_pool (var
->dv
));
1477 new_var
->dv
= var
->dv
;
1478 new_var
->refcount
= 1;
1480 new_var
->n_var_parts
= var
->n_var_parts
;
1481 new_var
->cur_loc_changed
= var
->cur_loc_changed
;
1482 var
->cur_loc_changed
= false;
1483 new_var
->in_changed_variables
= false;
1485 if (! flag_var_tracking_uninit
)
1486 initialized
= VAR_INIT_STATUS_INITIALIZED
;
1488 for (i
= 0; i
< var
->n_var_parts
; i
++)
1490 location_chain node
;
1491 location_chain
*nextp
;
1493 new_var
->var_part
[i
].offset
= var
->var_part
[i
].offset
;
1494 nextp
= &new_var
->var_part
[i
].loc_chain
;
1495 for (node
= var
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
1497 location_chain new_lc
;
1499 new_lc
= (location_chain
) pool_alloc (loc_chain_pool
);
1500 new_lc
->next
= NULL
;
1501 if (node
->init
> initialized
)
1502 new_lc
->init
= node
->init
;
1504 new_lc
->init
= initialized
;
1505 if (node
->set_src
&& !(MEM_P (node
->set_src
)))
1506 new_lc
->set_src
= node
->set_src
;
1508 new_lc
->set_src
= NULL
;
1509 new_lc
->loc
= node
->loc
;
1512 nextp
= &new_lc
->next
;
1515 new_var
->var_part
[i
].cur_loc
= var
->var_part
[i
].cur_loc
;
1518 dst_can_be_shared
= false;
1519 if (shared_hash_shared (set
->vars
))
1520 slot
= shared_hash_find_slot_unshare (&set
->vars
, var
->dv
, NO_INSERT
);
1521 else if (set
->traversed_vars
&& set
->vars
!= set
->traversed_vars
)
1522 slot
= shared_hash_find_slot_noinsert (set
->vars
, var
->dv
);
1524 if (var
->in_changed_variables
)
1527 = htab_find_slot_with_hash (changed_variables
, var
->dv
,
1528 dv_htab_hash (var
->dv
), NO_INSERT
);
1529 gcc_assert (*cslot
== (void *) var
);
1530 var
->in_changed_variables
= false;
1531 variable_htab_free (var
);
1533 new_var
->in_changed_variables
= true;
1538 /* Copy all variables from hash table SRC to hash table DST. */
1541 vars_copy (htab_t dst
, htab_t src
)
1546 FOR_EACH_HTAB_ELEMENT (src
, var
, variable
, hi
)
1550 dstp
= htab_find_slot_with_hash (dst
, var
->dv
,
1551 dv_htab_hash (var
->dv
),
1557 /* Map a decl to its main debug decl. */
1560 var_debug_decl (tree decl
)
1562 if (decl
&& DECL_P (decl
)
1563 && DECL_DEBUG_EXPR_IS_FROM (decl
))
1565 tree debugdecl
= DECL_DEBUG_EXPR (decl
);
1566 if (debugdecl
&& DECL_P (debugdecl
))
1573 /* Set the register LOC to contain DV, OFFSET. */
1576 var_reg_decl_set (dataflow_set
*set
, rtx loc
, enum var_init_status initialized
,
1577 decl_or_value dv
, HOST_WIDE_INT offset
, rtx set_src
,
1578 enum insert_option iopt
)
1581 bool decl_p
= dv_is_decl_p (dv
);
1584 dv
= dv_from_decl (var_debug_decl (dv_as_decl (dv
)));
1586 for (node
= set
->regs
[REGNO (loc
)]; node
; node
= node
->next
)
1587 if (dv_as_opaque (node
->dv
) == dv_as_opaque (dv
)
1588 && node
->offset
== offset
)
1591 attrs_list_insert (&set
->regs
[REGNO (loc
)], dv
, offset
, loc
);
1592 set_variable_part (set
, loc
, dv
, offset
, initialized
, set_src
, iopt
);
1595 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
1598 var_reg_set (dataflow_set
*set
, rtx loc
, enum var_init_status initialized
,
1601 tree decl
= REG_EXPR (loc
);
1602 HOST_WIDE_INT offset
= REG_OFFSET (loc
);
1604 var_reg_decl_set (set
, loc
, initialized
,
1605 dv_from_decl (decl
), offset
, set_src
, INSERT
);
1608 static enum var_init_status
1609 get_init_value (dataflow_set
*set
, rtx loc
, decl_or_value dv
)
1613 enum var_init_status ret_val
= VAR_INIT_STATUS_UNKNOWN
;
1615 if (! flag_var_tracking_uninit
)
1616 return VAR_INIT_STATUS_INITIALIZED
;
1618 var
= shared_hash_find (set
->vars
, dv
);
1621 for (i
= 0; i
< var
->n_var_parts
&& ret_val
== VAR_INIT_STATUS_UNKNOWN
; i
++)
1623 location_chain nextp
;
1624 for (nextp
= var
->var_part
[i
].loc_chain
; nextp
; nextp
= nextp
->next
)
1625 if (rtx_equal_p (nextp
->loc
, loc
))
1627 ret_val
= nextp
->init
;
1636 /* Delete current content of register LOC in dataflow set SET and set
1637 the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
1638 MODIFY is true, any other live copies of the same variable part are
1639 also deleted from the dataflow set, otherwise the variable part is
1640 assumed to be copied from another location holding the same
1644 var_reg_delete_and_set (dataflow_set
*set
, rtx loc
, bool modify
,
1645 enum var_init_status initialized
, rtx set_src
)
1647 tree decl
= REG_EXPR (loc
);
1648 HOST_WIDE_INT offset
= REG_OFFSET (loc
);
1652 decl
= var_debug_decl (decl
);
1654 if (initialized
== VAR_INIT_STATUS_UNKNOWN
)
1655 initialized
= get_init_value (set
, loc
, dv_from_decl (decl
));
1657 nextp
= &set
->regs
[REGNO (loc
)];
1658 for (node
= *nextp
; node
; node
= next
)
1661 if (dv_as_opaque (node
->dv
) != decl
|| node
->offset
!= offset
)
1663 delete_variable_part (set
, node
->loc
, node
->dv
, node
->offset
);
1664 pool_free (attrs_pool
, node
);
1670 nextp
= &node
->next
;
1674 clobber_variable_part (set
, loc
, dv_from_decl (decl
), offset
, set_src
);
1675 var_reg_set (set
, loc
, initialized
, set_src
);
1678 /* Delete the association of register LOC in dataflow set SET with any
1679 variables that aren't onepart. If CLOBBER is true, also delete any
1680 other live copies of the same variable part, and delete the
1681 association with onepart dvs too. */
1684 var_reg_delete (dataflow_set
*set
, rtx loc
, bool clobber
)
1686 attrs
*nextp
= &set
->regs
[REGNO (loc
)];
1691 tree decl
= REG_EXPR (loc
);
1692 HOST_WIDE_INT offset
= REG_OFFSET (loc
);
1694 decl
= var_debug_decl (decl
);
1696 clobber_variable_part (set
, NULL
, dv_from_decl (decl
), offset
, NULL
);
1699 for (node
= *nextp
; node
; node
= next
)
1702 if (clobber
|| !dv_onepart_p (node
->dv
))
1704 delete_variable_part (set
, node
->loc
, node
->dv
, node
->offset
);
1705 pool_free (attrs_pool
, node
);
1709 nextp
= &node
->next
;
1713 /* Delete content of register with number REGNO in dataflow set SET. */
1716 var_regno_delete (dataflow_set
*set
, int regno
)
1718 attrs
*reg
= &set
->regs
[regno
];
1721 for (node
= *reg
; node
; node
= next
)
1724 delete_variable_part (set
, node
->loc
, node
->dv
, node
->offset
);
1725 pool_free (attrs_pool
, node
);
1730 /* Set the location of DV, OFFSET as the MEM LOC. */
1733 var_mem_decl_set (dataflow_set
*set
, rtx loc
, enum var_init_status initialized
,
1734 decl_or_value dv
, HOST_WIDE_INT offset
, rtx set_src
,
1735 enum insert_option iopt
)
1737 if (dv_is_decl_p (dv
))
1738 dv
= dv_from_decl (var_debug_decl (dv_as_decl (dv
)));
1740 set_variable_part (set
, loc
, dv
, offset
, initialized
, set_src
, iopt
);
1743 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
1745 Adjust the address first if it is stack pointer based. */
1748 var_mem_set (dataflow_set
*set
, rtx loc
, enum var_init_status initialized
,
1751 tree decl
= MEM_EXPR (loc
);
1752 HOST_WIDE_INT offset
= INT_MEM_OFFSET (loc
);
1754 var_mem_decl_set (set
, loc
, initialized
,
1755 dv_from_decl (decl
), offset
, set_src
, INSERT
);
1758 /* Delete and set the location part of variable MEM_EXPR (LOC) in
1759 dataflow set SET to LOC. If MODIFY is true, any other live copies
1760 of the same variable part are also deleted from the dataflow set,
1761 otherwise the variable part is assumed to be copied from another
1762 location holding the same part.
1763 Adjust the address first if it is stack pointer based. */
1766 var_mem_delete_and_set (dataflow_set
*set
, rtx loc
, bool modify
,
1767 enum var_init_status initialized
, rtx set_src
)
1769 tree decl
= MEM_EXPR (loc
);
1770 HOST_WIDE_INT offset
= INT_MEM_OFFSET (loc
);
1772 decl
= var_debug_decl (decl
);
1774 if (initialized
== VAR_INIT_STATUS_UNKNOWN
)
1775 initialized
= get_init_value (set
, loc
, dv_from_decl (decl
));
1778 clobber_variable_part (set
, NULL
, dv_from_decl (decl
), offset
, set_src
);
1779 var_mem_set (set
, loc
, initialized
, set_src
);
1782 /* Delete the location part LOC from dataflow set SET. If CLOBBER is
1783 true, also delete any other live copies of the same variable part.
1784 Adjust the address first if it is stack pointer based. */
1787 var_mem_delete (dataflow_set
*set
, rtx loc
, bool clobber
)
1789 tree decl
= MEM_EXPR (loc
);
1790 HOST_WIDE_INT offset
= INT_MEM_OFFSET (loc
);
1792 decl
= var_debug_decl (decl
);
1794 clobber_variable_part (set
, NULL
, dv_from_decl (decl
), offset
, NULL
);
1795 delete_variable_part (set
, loc
, dv_from_decl (decl
), offset
);
1798 /* Bind a value to a location it was just stored in. If MODIFIED
1799 holds, assume the location was modified, detaching it from any
1800 values bound to it. */
1803 val_store (dataflow_set
*set
, rtx val
, rtx loc
, rtx insn
, bool modified
)
1805 cselib_val
*v
= CSELIB_VAL_PTR (val
);
1807 gcc_assert (cselib_preserved_value_p (v
));
1811 fprintf (dump_file
, "%i: ", INSN_UID (insn
));
1812 print_inline_rtx (dump_file
, val
, 0);
1813 fprintf (dump_file
, " stored in ");
1814 print_inline_rtx (dump_file
, loc
, 0);
1817 struct elt_loc_list
*l
;
1818 for (l
= v
->locs
; l
; l
= l
->next
)
1820 fprintf (dump_file
, "\n%i: ", INSN_UID (l
->setting_insn
));
1821 print_inline_rtx (dump_file
, l
->loc
, 0);
1824 fprintf (dump_file
, "\n");
1830 var_regno_delete (set
, REGNO (loc
));
1831 var_reg_decl_set (set
, loc
, VAR_INIT_STATUS_INITIALIZED
,
1832 dv_from_value (val
), 0, NULL_RTX
, INSERT
);
1834 else if (MEM_P (loc
))
1835 var_mem_decl_set (set
, loc
, VAR_INIT_STATUS_INITIALIZED
,
1836 dv_from_value (val
), 0, NULL_RTX
, INSERT
);
1838 set_variable_part (set
, loc
, dv_from_value (val
), 0,
1839 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
, INSERT
);
1842 /* Reset this node, detaching all its equivalences. Return the slot
1843 in the variable hash table that holds dv, if there is one. */
1846 val_reset (dataflow_set
*set
, decl_or_value dv
)
1848 variable var
= shared_hash_find (set
->vars
, dv
) ;
1849 location_chain node
;
1852 if (!var
|| !var
->n_var_parts
)
1855 gcc_assert (var
->n_var_parts
== 1);
1858 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
1859 if (GET_CODE (node
->loc
) == VALUE
1860 && canon_value_cmp (node
->loc
, cval
))
1863 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
1864 if (GET_CODE (node
->loc
) == VALUE
&& cval
!= node
->loc
)
1866 /* Redirect the equivalence link to the new canonical
1867 value, or simply remove it if it would point at
1870 set_variable_part (set
, cval
, dv_from_value (node
->loc
),
1871 0, node
->init
, node
->set_src
, NO_INSERT
);
1872 delete_variable_part (set
, dv_as_value (dv
),
1873 dv_from_value (node
->loc
), 0);
1878 decl_or_value cdv
= dv_from_value (cval
);
1880 /* Keep the remaining values connected, accummulating links
1881 in the canonical value. */
1882 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
1884 if (node
->loc
== cval
)
1886 else if (GET_CODE (node
->loc
) == REG
)
1887 var_reg_decl_set (set
, node
->loc
, node
->init
, cdv
, 0,
1888 node
->set_src
, NO_INSERT
);
1889 else if (GET_CODE (node
->loc
) == MEM
)
1890 var_mem_decl_set (set
, node
->loc
, node
->init
, cdv
, 0,
1891 node
->set_src
, NO_INSERT
);
1893 set_variable_part (set
, node
->loc
, cdv
, 0,
1894 node
->init
, node
->set_src
, NO_INSERT
);
1898 /* We remove this last, to make sure that the canonical value is not
1899 removed to the point of requiring reinsertion. */
1901 delete_variable_part (set
, dv_as_value (dv
), dv_from_value (cval
), 0);
1903 clobber_variable_part (set
, NULL
, dv
, 0, NULL
);
1905 /* ??? Should we make sure there aren't other available values or
1906 variables whose values involve this one other than by
1907 equivalence? E.g., at the very least we should reset MEMs, those
1908 shouldn't be too hard to find cselib-looking up the value as an
1909 address, then locating the resulting value in our own hash
1913 /* Find the values in a given location and map the val to another
1914 value, if it is unique, or add the location as one holding the
1918 val_resolve (dataflow_set
*set
, rtx val
, rtx loc
, rtx insn
)
1920 decl_or_value dv
= dv_from_value (val
);
1922 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1925 fprintf (dump_file
, "%i: ", INSN_UID (insn
));
1927 fprintf (dump_file
, "head: ");
1928 print_inline_rtx (dump_file
, val
, 0);
1929 fputs (" is at ", dump_file
);
1930 print_inline_rtx (dump_file
, loc
, 0);
1931 fputc ('\n', dump_file
);
1934 val_reset (set
, dv
);
1938 attrs node
, found
= NULL
;
1940 for (node
= set
->regs
[REGNO (loc
)]; node
; node
= node
->next
)
1941 if (dv_is_value_p (node
->dv
)
1942 && GET_MODE (dv_as_value (node
->dv
)) == GET_MODE (loc
))
1946 /* Map incoming equivalences. ??? Wouldn't it be nice if
1947 we just started sharing the location lists? Maybe a
1948 circular list ending at the value itself or some
1950 set_variable_part (set
, dv_as_value (node
->dv
),
1951 dv_from_value (val
), node
->offset
,
1952 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
, INSERT
);
1953 set_variable_part (set
, val
, node
->dv
, node
->offset
,
1954 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
, INSERT
);
1957 /* If we didn't find any equivalence, we need to remember that
1958 this value is held in the named register. */
1960 var_reg_decl_set (set
, loc
, VAR_INIT_STATUS_INITIALIZED
,
1961 dv_from_value (val
), 0, NULL_RTX
, INSERT
);
1963 else if (MEM_P (loc
))
1964 /* ??? Merge equivalent MEMs. */
1965 var_mem_decl_set (set
, loc
, VAR_INIT_STATUS_INITIALIZED
,
1966 dv_from_value (val
), 0, NULL_RTX
, INSERT
);
1968 /* ??? Merge equivalent expressions. */
1969 set_variable_part (set
, loc
, dv_from_value (val
), 0,
1970 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
, INSERT
);
1973 /* Initialize dataflow set SET to be empty.
1974 VARS_SIZE is the initial size of hash table VARS. */
1977 dataflow_set_init (dataflow_set
*set
)
1979 init_attrs_list_set (set
->regs
);
1980 set
->vars
= shared_hash_copy (empty_shared_hash
);
1981 set
->stack_adjust
= 0;
1982 set
->traversed_vars
= NULL
;
1985 /* Delete the contents of dataflow set SET. */
1988 dataflow_set_clear (dataflow_set
*set
)
1992 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1993 attrs_list_clear (&set
->regs
[i
]);
1995 shared_hash_destroy (set
->vars
);
1996 set
->vars
= shared_hash_copy (empty_shared_hash
);
1999 /* Copy the contents of dataflow set SRC to DST. */
2002 dataflow_set_copy (dataflow_set
*dst
, dataflow_set
*src
)
2006 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2007 attrs_list_copy (&dst
->regs
[i
], src
->regs
[i
]);
2009 shared_hash_destroy (dst
->vars
);
2010 dst
->vars
= shared_hash_copy (src
->vars
);
2011 dst
->stack_adjust
= src
->stack_adjust
;
2014 /* Information for merging lists of locations for a given offset of variable.
2016 struct variable_union_info
2018 /* Node of the location chain. */
2021 /* The sum of positions in the input chains. */
2024 /* The position in the chain of DST dataflow set. */
2028 /* Buffer for location list sorting and its allocated size. */
2029 static struct variable_union_info
*vui_vec
;
2030 static int vui_allocated
;
2032 /* Compare function for qsort, order the structures by POS element. */
2035 variable_union_info_cmp_pos (const void *n1
, const void *n2
)
2037 const struct variable_union_info
*const i1
=
2038 (const struct variable_union_info
*) n1
;
2039 const struct variable_union_info
*const i2
=
2040 ( const struct variable_union_info
*) n2
;
2042 if (i1
->pos
!= i2
->pos
)
2043 return i1
->pos
- i2
->pos
;
2045 return (i1
->pos_dst
- i2
->pos_dst
);
2048 /* Compute union of location parts of variable *SLOT and the same variable
2049 from hash table DATA. Compute "sorted" union of the location chains
2050 for common offsets, i.e. the locations of a variable part are sorted by
2051 a priority where the priority is the sum of the positions in the 2 chains
2052 (if a location is only in one list the position in the second list is
2053 defined to be larger than the length of the chains).
2054 When we are updating the location parts the newest location is in the
2055 beginning of the chain, so when we do the described "sorted" union
2056 we keep the newest locations in the beginning. */
2059 variable_union (variable src
, dataflow_set
*set
)
2065 dstp
= shared_hash_find_slot (set
->vars
, src
->dv
);
2066 if (!dstp
|| !*dstp
)
2070 dst_can_be_shared
= false;
2072 dstp
= shared_hash_find_slot_unshare (&set
->vars
, src
->dv
, INSERT
);
2076 /* Continue traversing the hash table. */
2080 dst
= (variable
) *dstp
;
2082 gcc_assert (src
->n_var_parts
);
2084 /* We can combine one-part variables very efficiently, because their
2085 entries are in canonical order. */
2086 if (dv_onepart_p (src
->dv
))
2088 location_chain
*nodep
, dnode
, snode
;
2090 gcc_assert (src
->n_var_parts
== 1
2091 && dst
->n_var_parts
== 1);
2093 snode
= src
->var_part
[0].loc_chain
;
2096 restart_onepart_unshared
:
2097 nodep
= &dst
->var_part
[0].loc_chain
;
2103 int r
= dnode
? loc_cmp (dnode
->loc
, snode
->loc
) : 1;
2107 location_chain nnode
;
2109 if (shared_var_p (dst
, set
->vars
))
2111 dstp
= unshare_variable (set
, dstp
, dst
,
2112 VAR_INIT_STATUS_INITIALIZED
);
2113 dst
= (variable
)*dstp
;
2114 goto restart_onepart_unshared
;
2117 *nodep
= nnode
= (location_chain
) pool_alloc (loc_chain_pool
);
2118 nnode
->loc
= snode
->loc
;
2119 nnode
->init
= snode
->init
;
2120 if (!snode
->set_src
|| MEM_P (snode
->set_src
))
2121 nnode
->set_src
= NULL
;
2123 nnode
->set_src
= snode
->set_src
;
2124 nnode
->next
= dnode
;
2127 #ifdef ENABLE_CHECKING
2129 gcc_assert (rtx_equal_p (dnode
->loc
, snode
->loc
));
2133 snode
= snode
->next
;
2135 nodep
= &dnode
->next
;
2142 /* Count the number of location parts, result is K. */
2143 for (i
= 0, j
= 0, k
= 0;
2144 i
< src
->n_var_parts
&& j
< dst
->n_var_parts
; k
++)
2146 if (src
->var_part
[i
].offset
== dst
->var_part
[j
].offset
)
2151 else if (src
->var_part
[i
].offset
< dst
->var_part
[j
].offset
)
2156 k
+= src
->n_var_parts
- i
;
2157 k
+= dst
->n_var_parts
- j
;
2159 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
2160 thus there are at most MAX_VAR_PARTS different offsets. */
2161 gcc_assert (dv_onepart_p (dst
->dv
) ? k
== 1 : k
<= MAX_VAR_PARTS
);
2163 if (dst
->n_var_parts
!= k
&& shared_var_p (dst
, set
->vars
))
2165 dstp
= unshare_variable (set
, dstp
, dst
, VAR_INIT_STATUS_UNKNOWN
);
2166 dst
= (variable
)*dstp
;
2169 i
= src
->n_var_parts
- 1;
2170 j
= dst
->n_var_parts
- 1;
2171 dst
->n_var_parts
= k
;
2173 for (k
--; k
>= 0; k
--)
2175 location_chain node
, node2
;
2177 if (i
>= 0 && j
>= 0
2178 && src
->var_part
[i
].offset
== dst
->var_part
[j
].offset
)
2180 /* Compute the "sorted" union of the chains, i.e. the locations which
2181 are in both chains go first, they are sorted by the sum of
2182 positions in the chains. */
2185 struct variable_union_info
*vui
;
2187 /* If DST is shared compare the location chains.
2188 If they are different we will modify the chain in DST with
2189 high probability so make a copy of DST. */
2190 if (shared_var_p (dst
, set
->vars
))
2192 for (node
= src
->var_part
[i
].loc_chain
,
2193 node2
= dst
->var_part
[j
].loc_chain
; node
&& node2
;
2194 node
= node
->next
, node2
= node2
->next
)
2196 if (!((REG_P (node2
->loc
)
2197 && REG_P (node
->loc
)
2198 && REGNO (node2
->loc
) == REGNO (node
->loc
))
2199 || rtx_equal_p (node2
->loc
, node
->loc
)))
2201 if (node2
->init
< node
->init
)
2202 node2
->init
= node
->init
;
2208 dstp
= unshare_variable (set
, dstp
, dst
,
2209 VAR_INIT_STATUS_UNKNOWN
);
2210 dst
= (variable
)*dstp
;
2215 for (node
= src
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
2218 for (node
= dst
->var_part
[j
].loc_chain
; node
; node
= node
->next
)
2223 /* The most common case, much simpler, no qsort is needed. */
2224 location_chain dstnode
= dst
->var_part
[j
].loc_chain
;
2225 dst
->var_part
[k
].loc_chain
= dstnode
;
2226 dst
->var_part
[k
].offset
= dst
->var_part
[j
].offset
;
2228 for (node
= src
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
2229 if (!((REG_P (dstnode
->loc
)
2230 && REG_P (node
->loc
)
2231 && REGNO (dstnode
->loc
) == REGNO (node
->loc
))
2232 || rtx_equal_p (dstnode
->loc
, node
->loc
)))
2234 location_chain new_node
;
2236 /* Copy the location from SRC. */
2237 new_node
= (location_chain
) pool_alloc (loc_chain_pool
);
2238 new_node
->loc
= node
->loc
;
2239 new_node
->init
= node
->init
;
2240 if (!node
->set_src
|| MEM_P (node
->set_src
))
2241 new_node
->set_src
= NULL
;
2243 new_node
->set_src
= node
->set_src
;
2244 node2
->next
= new_node
;
2251 if (src_l
+ dst_l
> vui_allocated
)
2253 vui_allocated
= MAX (vui_allocated
* 2, src_l
+ dst_l
);
2254 vui_vec
= XRESIZEVEC (struct variable_union_info
, vui_vec
,
2259 /* Fill in the locations from DST. */
2260 for (node
= dst
->var_part
[j
].loc_chain
, jj
= 0; node
;
2261 node
= node
->next
, jj
++)
2264 vui
[jj
].pos_dst
= jj
;
2266 /* Pos plus value larger than a sum of 2 valid positions. */
2267 vui
[jj
].pos
= jj
+ src_l
+ dst_l
;
2270 /* Fill in the locations from SRC. */
2272 for (node
= src
->var_part
[i
].loc_chain
, ii
= 0; node
;
2273 node
= node
->next
, ii
++)
2275 /* Find location from NODE. */
2276 for (jj
= 0; jj
< dst_l
; jj
++)
2278 if ((REG_P (vui
[jj
].lc
->loc
)
2279 && REG_P (node
->loc
)
2280 && REGNO (vui
[jj
].lc
->loc
) == REGNO (node
->loc
))
2281 || rtx_equal_p (vui
[jj
].lc
->loc
, node
->loc
))
2283 vui
[jj
].pos
= jj
+ ii
;
2287 if (jj
>= dst_l
) /* The location has not been found. */
2289 location_chain new_node
;
2291 /* Copy the location from SRC. */
2292 new_node
= (location_chain
) pool_alloc (loc_chain_pool
);
2293 new_node
->loc
= node
->loc
;
2294 new_node
->init
= node
->init
;
2295 if (!node
->set_src
|| MEM_P (node
->set_src
))
2296 new_node
->set_src
= NULL
;
2298 new_node
->set_src
= node
->set_src
;
2299 vui
[n
].lc
= new_node
;
2300 vui
[n
].pos_dst
= src_l
+ dst_l
;
2301 vui
[n
].pos
= ii
+ src_l
+ dst_l
;
2308 /* Special case still very common case. For dst_l == 2
2309 all entries dst_l ... n-1 are sorted, with for i >= dst_l
2310 vui[i].pos == i + src_l + dst_l. */
2311 if (vui
[0].pos
> vui
[1].pos
)
2313 /* Order should be 1, 0, 2... */
2314 dst
->var_part
[k
].loc_chain
= vui
[1].lc
;
2315 vui
[1].lc
->next
= vui
[0].lc
;
2318 vui
[0].lc
->next
= vui
[2].lc
;
2319 vui
[n
- 1].lc
->next
= NULL
;
2322 vui
[0].lc
->next
= NULL
;
2327 dst
->var_part
[k
].loc_chain
= vui
[0].lc
;
2328 if (n
>= 3 && vui
[2].pos
< vui
[1].pos
)
2330 /* Order should be 0, 2, 1, 3... */
2331 vui
[0].lc
->next
= vui
[2].lc
;
2332 vui
[2].lc
->next
= vui
[1].lc
;
2335 vui
[1].lc
->next
= vui
[3].lc
;
2336 vui
[n
- 1].lc
->next
= NULL
;
2339 vui
[1].lc
->next
= NULL
;
2344 /* Order should be 0, 1, 2... */
2346 vui
[n
- 1].lc
->next
= NULL
;
2349 for (; ii
< n
; ii
++)
2350 vui
[ii
- 1].lc
->next
= vui
[ii
].lc
;
2354 qsort (vui
, n
, sizeof (struct variable_union_info
),
2355 variable_union_info_cmp_pos
);
2357 /* Reconnect the nodes in sorted order. */
2358 for (ii
= 1; ii
< n
; ii
++)
2359 vui
[ii
- 1].lc
->next
= vui
[ii
].lc
;
2360 vui
[n
- 1].lc
->next
= NULL
;
2361 dst
->var_part
[k
].loc_chain
= vui
[0].lc
;
2364 dst
->var_part
[k
].offset
= dst
->var_part
[j
].offset
;
2369 else if ((i
>= 0 && j
>= 0
2370 && src
->var_part
[i
].offset
< dst
->var_part
[j
].offset
)
2373 dst
->var_part
[k
] = dst
->var_part
[j
];
2376 else if ((i
>= 0 && j
>= 0
2377 && src
->var_part
[i
].offset
> dst
->var_part
[j
].offset
)
2380 location_chain
*nextp
;
2382 /* Copy the chain from SRC. */
2383 nextp
= &dst
->var_part
[k
].loc_chain
;
2384 for (node
= src
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
2386 location_chain new_lc
;
2388 new_lc
= (location_chain
) pool_alloc (loc_chain_pool
);
2389 new_lc
->next
= NULL
;
2390 new_lc
->init
= node
->init
;
2391 if (!node
->set_src
|| MEM_P (node
->set_src
))
2392 new_lc
->set_src
= NULL
;
2394 new_lc
->set_src
= node
->set_src
;
2395 new_lc
->loc
= node
->loc
;
2398 nextp
= &new_lc
->next
;
2401 dst
->var_part
[k
].offset
= src
->var_part
[i
].offset
;
2404 dst
->var_part
[k
].cur_loc
= NULL
;
2407 if (flag_var_tracking_uninit
)
2408 for (i
= 0; i
< src
->n_var_parts
&& i
< dst
->n_var_parts
; i
++)
2410 location_chain node
, node2
;
2411 for (node
= src
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
2412 for (node2
= dst
->var_part
[i
].loc_chain
; node2
; node2
= node2
->next
)
2413 if (rtx_equal_p (node
->loc
, node2
->loc
))
2415 if (node
->init
> node2
->init
)
2416 node2
->init
= node
->init
;
2420 /* Continue traversing the hash table. */
2424 /* Compute union of dataflow sets SRC and DST and store it to DST. */
2427 dataflow_set_union (dataflow_set
*dst
, dataflow_set
*src
)
2431 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2432 attrs_list_union (&dst
->regs
[i
], src
->regs
[i
]);
2434 if (dst
->vars
== empty_shared_hash
)
2436 shared_hash_destroy (dst
->vars
);
2437 dst
->vars
= shared_hash_copy (src
->vars
);
2444 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (src
->vars
), var
, variable
, hi
)
2445 variable_union (var
, dst
);
2449 /* Whether the value is currently being expanded. */
2450 #define VALUE_RECURSED_INTO(x) \
2451 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
2452 /* Whether the value is in changed_variables hash table. */
2453 #define VALUE_CHANGED(x) \
2454 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
2455 /* Whether the decl is in changed_variables hash table. */
2456 #define DECL_CHANGED(x) TREE_VISITED (x)
2458 /* Record that DV has been added into resp. removed from changed_variables
2462 set_dv_changed (decl_or_value dv
, bool newv
)
2464 if (dv_is_value_p (dv
))
2465 VALUE_CHANGED (dv_as_value (dv
)) = newv
;
2467 DECL_CHANGED (dv_as_decl (dv
)) = newv
;
2470 /* Return true if DV is present in changed_variables hash table. */
2473 dv_changed_p (decl_or_value dv
)
2475 return (dv_is_value_p (dv
)
2476 ? VALUE_CHANGED (dv_as_value (dv
))
2477 : DECL_CHANGED (dv_as_decl (dv
)));
2480 /* Return a location list node whose loc is rtx_equal to LOC, in the
2481 location list of a one-part variable or value VAR, or in that of
2482 any values recursively mentioned in the location lists. */
2484 static location_chain
2485 find_loc_in_1pdv (rtx loc
, variable var
, htab_t vars
)
2487 location_chain node
;
2488 enum rtx_code loc_code
;
2493 #ifdef ENABLE_CHECKING
2494 gcc_assert (dv_onepart_p (var
->dv
));
2497 if (!var
->n_var_parts
)
2500 #ifdef ENABLE_CHECKING
2501 gcc_assert (var
->var_part
[0].offset
== 0);
2504 loc_code
= GET_CODE (loc
);
2505 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
2507 if (GET_CODE (node
->loc
) != loc_code
)
2509 if (GET_CODE (node
->loc
) != VALUE
)
2512 else if (loc
== node
->loc
)
2514 else if (loc_code
!= VALUE
)
2516 if (rtx_equal_p (loc
, node
->loc
))
2520 if (!VALUE_RECURSED_INTO (node
->loc
))
2522 decl_or_value dv
= dv_from_value (node
->loc
);
2523 variable var
= (variable
)
2524 htab_find_with_hash (vars
, dv
, dv_htab_hash (dv
));
2528 location_chain where
;
2529 VALUE_RECURSED_INTO (node
->loc
) = true;
2530 if ((where
= find_loc_in_1pdv (loc
, var
, vars
)))
2532 VALUE_RECURSED_INTO (node
->loc
) = false;
2535 VALUE_RECURSED_INTO (node
->loc
) = false;
2543 /* Hash table iteration argument passed to variable_merge. */
2546 /* The set in which the merge is to be inserted. */
2548 /* The set that we're iterating in. */
2550 /* The set that may contain the other dv we are to merge with. */
2552 /* Number of onepart dvs in src. */
2553 int src_onepart_cnt
;
2556 /* Insert LOC in *DNODE, if it's not there yet. The list must be in
2557 loc_cmp order, and it is maintained as such. */
2560 insert_into_intersection (location_chain
*nodep
, rtx loc
,
2561 enum var_init_status status
)
2563 location_chain node
;
2566 for (node
= *nodep
; node
; nodep
= &node
->next
, node
= *nodep
)
2567 if ((r
= loc_cmp (node
->loc
, loc
)) == 0)
2569 node
->init
= MIN (node
->init
, status
);
2575 node
= (location_chain
) pool_alloc (loc_chain_pool
);
2578 node
->set_src
= NULL
;
2579 node
->init
= status
;
2580 node
->next
= *nodep
;
2584 /* Insert in DEST the intersection the locations present in both
2585 S1NODE and S2VAR, directly or indirectly. S1NODE is from a
2586 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
2590 intersect_loc_chains (rtx val
, location_chain
*dest
, struct dfset_merge
*dsm
,
2591 location_chain s1node
, variable s2var
)
2593 dataflow_set
*s1set
= dsm
->cur
;
2594 dataflow_set
*s2set
= dsm
->src
;
2595 location_chain found
;
2599 location_chain s2node
;
2601 #ifdef ENABLE_CHECKING
2602 gcc_assert (dv_onepart_p (s2var
->dv
));
2605 if (s2var
->n_var_parts
)
2607 #ifdef ENABLE_CHECKING
2608 gcc_assert (s2var
->var_part
[0].offset
== 0);
2610 s2node
= s2var
->var_part
[0].loc_chain
;
2612 for (; s1node
&& s2node
;
2613 s1node
= s1node
->next
, s2node
= s2node
->next
)
2614 if (s1node
->loc
!= s2node
->loc
)
2616 else if (s1node
->loc
== val
)
2619 insert_into_intersection (dest
, s1node
->loc
,
2620 MIN (s1node
->init
, s2node
->init
));
2624 for (; s1node
; s1node
= s1node
->next
)
2626 if (s1node
->loc
== val
)
2629 if ((found
= find_loc_in_1pdv (s1node
->loc
, s2var
,
2630 shared_hash_htab (s2set
->vars
))))
2632 insert_into_intersection (dest
, s1node
->loc
,
2633 MIN (s1node
->init
, found
->init
));
2637 if (GET_CODE (s1node
->loc
) == VALUE
2638 && !VALUE_RECURSED_INTO (s1node
->loc
))
2640 decl_or_value dv
= dv_from_value (s1node
->loc
);
2641 variable svar
= shared_hash_find (s1set
->vars
, dv
);
2644 if (svar
->n_var_parts
== 1)
2646 VALUE_RECURSED_INTO (s1node
->loc
) = true;
2647 intersect_loc_chains (val
, dest
, dsm
,
2648 svar
->var_part
[0].loc_chain
,
2650 VALUE_RECURSED_INTO (s1node
->loc
) = false;
2655 /* ??? if the location is equivalent to any location in src,
2656 searched recursively
2658 add to dst the values needed to represent the equivalence
2660 telling whether locations S is equivalent to another dv's
2663 for each location D in the list
2665 if S and D satisfy rtx_equal_p, then it is present
2667 else if D is a value, recurse without cycles
2669 else if S and D have the same CODE and MODE
2671 for each operand oS and the corresponding oD
2673 if oS and oD are not equivalent, then S an D are not equivalent
2675 else if they are RTX vectors
2677 if any vector oS element is not equivalent to its respective oD,
2678 then S and D are not equivalent
2686 /* Return -1 if X should be before Y in a location list for a 1-part
2687 variable, 1 if Y should be before X, and 0 if they're equivalent
2688 and should not appear in the list. */
2691 loc_cmp (rtx x
, rtx y
)
2694 RTX_CODE code
= GET_CODE (x
);
2704 gcc_assert (GET_MODE (x
) == GET_MODE (y
));
2705 if (REGNO (x
) == REGNO (y
))
2707 else if (REGNO (x
) < REGNO (y
))
2720 gcc_assert (GET_MODE (x
) == GET_MODE (y
));
2721 return loc_cmp (XEXP (x
, 0), XEXP (y
, 0));
2727 if (GET_CODE (x
) == VALUE
)
2729 if (GET_CODE (y
) != VALUE
)
2731 /* Don't assert the modes are the same, that is true only
2732 when not recursing. (subreg:QI (value:SI 1:1) 0)
2733 and (subreg:QI (value:DI 2:2) 0) can be compared,
2734 even when the modes are different. */
2735 if (canon_value_cmp (x
, y
))
2741 if (GET_CODE (y
) == VALUE
)
2744 if (GET_CODE (x
) == GET_CODE (y
))
2745 /* Compare operands below. */;
2746 else if (GET_CODE (x
) < GET_CODE (y
))
2751 gcc_assert (GET_MODE (x
) == GET_MODE (y
));
2753 if (GET_CODE (x
) == DEBUG_EXPR
)
2755 if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x
))
2756 < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y
)))
2758 #ifdef ENABLE_CHECKING
2759 gcc_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x
))
2760 > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y
)));
2765 fmt
= GET_RTX_FORMAT (code
);
2766 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
2770 if (XWINT (x
, i
) == XWINT (y
, i
))
2772 else if (XWINT (x
, i
) < XWINT (y
, i
))
2779 if (XINT (x
, i
) == XINT (y
, i
))
2781 else if (XINT (x
, i
) < XINT (y
, i
))
2788 /* Compare the vector length first. */
2789 if (XVECLEN (x
, i
) == XVECLEN (y
, i
))
2790 /* Compare the vectors elements. */;
2791 else if (XVECLEN (x
, i
) < XVECLEN (y
, i
))
2796 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2797 if ((r
= loc_cmp (XVECEXP (x
, i
, j
),
2798 XVECEXP (y
, i
, j
))))
2803 if ((r
= loc_cmp (XEXP (x
, i
), XEXP (y
, i
))))
2809 if (XSTR (x
, i
) == XSTR (y
, i
))
2815 if ((r
= strcmp (XSTR (x
, i
), XSTR (y
, i
))) == 0)
2823 /* These are just backpointers, so they don't matter. */
2830 /* It is believed that rtx's at this level will never
2831 contain anything but integers and other rtx's,
2832 except for within LABEL_REFs and SYMBOL_REFs. */
2840 /* If decl or value DVP refers to VALUE from *LOC, add backlinks
2841 from VALUE to DVP. */
2844 add_value_chain (rtx
*loc
, void *dvp
)
2846 decl_or_value dv
, ldv
;
2847 value_chain vc
, nvc
;
2850 if (GET_CODE (*loc
) == VALUE
)
2851 ldv
= dv_from_value (*loc
);
2852 else if (GET_CODE (*loc
) == DEBUG_EXPR
)
2853 ldv
= dv_from_decl (DEBUG_EXPR_TREE_DECL (*loc
));
2857 if (dv_as_opaque (ldv
) == dvp
)
2860 dv
= (decl_or_value
) dvp
;
2861 slot
= htab_find_slot_with_hash (value_chains
, ldv
, dv_htab_hash (ldv
),
2865 vc
= (value_chain
) pool_alloc (value_chain_pool
);
2869 *slot
= (void *) vc
;
2873 for (vc
= ((value_chain
) *slot
)->next
; vc
; vc
= vc
->next
)
2874 if (dv_as_opaque (vc
->dv
) == dv_as_opaque (dv
))
2882 vc
= (value_chain
) *slot
;
2883 nvc
= (value_chain
) pool_alloc (value_chain_pool
);
2885 nvc
->next
= vc
->next
;
2891 /* If decl or value DVP refers to VALUEs from within LOC, add backlinks
2892 from those VALUEs to DVP. */
2895 add_value_chains (decl_or_value dv
, rtx loc
)
2897 if (GET_CODE (loc
) == VALUE
|| GET_CODE (loc
) == DEBUG_EXPR
)
2899 add_value_chain (&loc
, dv_as_opaque (dv
));
2905 loc
= XEXP (loc
, 0);
2906 for_each_rtx (&loc
, add_value_chain
, dv_as_opaque (dv
));
2909 /* If CSELIB_VAL_PTR of value DV refer to VALUEs, add backlinks from those
2910 VALUEs to DV. Add the same time get rid of ASM_OPERANDS from locs list,
2911 that is something we never can express in .debug_info and can prevent
2912 reverse ops from being used. */
2915 add_cselib_value_chains (decl_or_value dv
)
2917 struct elt_loc_list
**l
;
2919 for (l
= &CSELIB_VAL_PTR (dv_as_value (dv
))->locs
; *l
;)
2920 if (GET_CODE ((*l
)->loc
) == ASM_OPERANDS
)
2924 for_each_rtx (&(*l
)->loc
, add_value_chain
, dv_as_opaque (dv
));
2929 /* If decl or value DVP refers to VALUE from *LOC, remove backlinks
2930 from VALUE to DVP. */
2933 remove_value_chain (rtx
*loc
, void *dvp
)
2935 decl_or_value dv
, ldv
;
2939 if (GET_CODE (*loc
) == VALUE
)
2940 ldv
= dv_from_value (*loc
);
2941 else if (GET_CODE (*loc
) == DEBUG_EXPR
)
2942 ldv
= dv_from_decl (DEBUG_EXPR_TREE_DECL (*loc
));
2946 if (dv_as_opaque (ldv
) == dvp
)
2949 dv
= (decl_or_value
) dvp
;
2950 slot
= htab_find_slot_with_hash (value_chains
, ldv
, dv_htab_hash (ldv
),
2952 for (vc
= (value_chain
) *slot
; vc
->next
; vc
= vc
->next
)
2953 if (dv_as_opaque (vc
->next
->dv
) == dv_as_opaque (dv
))
2955 value_chain dvc
= vc
->next
;
2956 gcc_assert (dvc
->refcount
> 0);
2957 if (--dvc
->refcount
== 0)
2959 vc
->next
= dvc
->next
;
2960 pool_free (value_chain_pool
, dvc
);
2961 if (vc
->next
== NULL
&& vc
== (value_chain
) *slot
)
2963 pool_free (value_chain_pool
, vc
);
2964 htab_clear_slot (value_chains
, slot
);
2972 /* If decl or value DVP refers to VALUEs from within LOC, remove backlinks
2973 from those VALUEs to DVP. */
2976 remove_value_chains (decl_or_value dv
, rtx loc
)
2978 if (GET_CODE (loc
) == VALUE
|| GET_CODE (loc
) == DEBUG_EXPR
)
2980 remove_value_chain (&loc
, dv_as_opaque (dv
));
2986 loc
= XEXP (loc
, 0);
2987 for_each_rtx (&loc
, remove_value_chain
, dv_as_opaque (dv
));
2991 /* If CSELIB_VAL_PTR of value DV refer to VALUEs, remove backlinks from those
2995 remove_cselib_value_chains (decl_or_value dv
)
2997 struct elt_loc_list
*l
;
2999 for (l
= CSELIB_VAL_PTR (dv_as_value (dv
))->locs
; l
; l
= l
->next
)
3000 for_each_rtx (&l
->loc
, remove_value_chain
, dv_as_opaque (dv
));
3003 /* Check the order of entries in one-part variables. */
3006 canonicalize_loc_order_check (void **slot
, void *data ATTRIBUTE_UNUSED
)
3008 variable var
= (variable
) *slot
;
3009 decl_or_value dv
= var
->dv
;
3010 location_chain node
, next
;
3012 #ifdef ENABLE_RTL_CHECKING
3014 for (i
= 0; i
< var
->n_var_parts
; i
++)
3015 gcc_assert (var
->var_part
[0].cur_loc
== NULL
);
3016 gcc_assert (!var
->cur_loc_changed
&& !var
->in_changed_variables
);
3019 if (!dv_onepart_p (dv
))
3022 gcc_assert (var
->n_var_parts
== 1);
3023 node
= var
->var_part
[0].loc_chain
;
3026 while ((next
= node
->next
))
3028 gcc_assert (loc_cmp (node
->loc
, next
->loc
) < 0);
3036 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
3037 more likely to be chosen as canonical for an equivalence set.
3038 Ensure less likely values can reach more likely neighbors, making
3039 the connections bidirectional. */
3042 canonicalize_values_mark (void **slot
, void *data
)
3044 dataflow_set
*set
= (dataflow_set
*)data
;
3045 variable var
= (variable
) *slot
;
3046 decl_or_value dv
= var
->dv
;
3048 location_chain node
;
3050 if (!dv_is_value_p (dv
))
3053 gcc_assert (var
->n_var_parts
== 1);
3055 val
= dv_as_value (dv
);
3057 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
3058 if (GET_CODE (node
->loc
) == VALUE
)
3060 if (canon_value_cmp (node
->loc
, val
))
3061 VALUE_RECURSED_INTO (val
) = true;
3064 decl_or_value odv
= dv_from_value (node
->loc
);
3065 void **oslot
= shared_hash_find_slot_noinsert (set
->vars
, odv
);
3067 oslot
= set_slot_part (set
, val
, oslot
, odv
, 0,
3068 node
->init
, NULL_RTX
);
3070 VALUE_RECURSED_INTO (node
->loc
) = true;
3077 /* Remove redundant entries from equivalence lists in onepart
3078 variables, canonicalizing equivalence sets into star shapes. */
3081 canonicalize_values_star (void **slot
, void *data
)
3083 dataflow_set
*set
= (dataflow_set
*)data
;
3084 variable var
= (variable
) *slot
;
3085 decl_or_value dv
= var
->dv
;
3086 location_chain node
;
3093 if (!dv_onepart_p (dv
))
3096 gcc_assert (var
->n_var_parts
== 1);
3098 if (dv_is_value_p (dv
))
3100 cval
= dv_as_value (dv
);
3101 if (!VALUE_RECURSED_INTO (cval
))
3103 VALUE_RECURSED_INTO (cval
) = false;
3113 gcc_assert (var
->n_var_parts
== 1);
3115 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
3116 if (GET_CODE (node
->loc
) == VALUE
)
3119 if (VALUE_RECURSED_INTO (node
->loc
))
3121 if (canon_value_cmp (node
->loc
, cval
))
3130 if (!has_marks
|| dv_is_decl_p (dv
))
3133 /* Keep it marked so that we revisit it, either after visiting a
3134 child node, or after visiting a new parent that might be
3136 VALUE_RECURSED_INTO (val
) = true;
3138 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
3139 if (GET_CODE (node
->loc
) == VALUE
3140 && VALUE_RECURSED_INTO (node
->loc
))
3144 VALUE_RECURSED_INTO (cval
) = false;
3145 dv
= dv_from_value (cval
);
3146 slot
= shared_hash_find_slot_noinsert (set
->vars
, dv
);
3149 gcc_assert (dv_is_decl_p (var
->dv
));
3150 /* The canonical value was reset and dropped.
3152 clobber_variable_part (set
, NULL
, var
->dv
, 0, NULL
);
3155 var
= (variable
)*slot
;
3156 gcc_assert (dv_is_value_p (var
->dv
));
3157 if (var
->n_var_parts
== 0)
3159 gcc_assert (var
->n_var_parts
== 1);
3163 VALUE_RECURSED_INTO (val
) = false;
3168 /* Push values to the canonical one. */
3169 cdv
= dv_from_value (cval
);
3170 cslot
= shared_hash_find_slot_noinsert (set
->vars
, cdv
);
3172 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
3173 if (node
->loc
!= cval
)
3175 cslot
= set_slot_part (set
, node
->loc
, cslot
, cdv
, 0,
3176 node
->init
, NULL_RTX
);
3177 if (GET_CODE (node
->loc
) == VALUE
)
3179 decl_or_value ndv
= dv_from_value (node
->loc
);
3181 set_variable_part (set
, cval
, ndv
, 0, node
->init
, NULL_RTX
,
3184 if (canon_value_cmp (node
->loc
, val
))
3186 /* If it could have been a local minimum, it's not any more,
3187 since it's now neighbor to cval, so it may have to push
3188 to it. Conversely, if it wouldn't have prevailed over
3189 val, then whatever mark it has is fine: if it was to
3190 push, it will now push to a more canonical node, but if
3191 it wasn't, then it has already pushed any values it might
3193 VALUE_RECURSED_INTO (node
->loc
) = true;
3194 /* Make sure we visit node->loc by ensuring we cval is
3196 VALUE_RECURSED_INTO (cval
) = true;
3198 else if (!VALUE_RECURSED_INTO (node
->loc
))
3199 /* If we have no need to "recurse" into this node, it's
3200 already "canonicalized", so drop the link to the old
3202 clobber_variable_part (set
, cval
, ndv
, 0, NULL
);
3204 else if (GET_CODE (node
->loc
) == REG
)
3206 attrs list
= set
->regs
[REGNO (node
->loc
)], *listp
;
3208 /* Change an existing attribute referring to dv so that it
3209 refers to cdv, removing any duplicate this might
3210 introduce, and checking that no previous duplicates
3211 existed, all in a single pass. */
3215 if (list
->offset
== 0
3216 && (dv_as_opaque (list
->dv
) == dv_as_opaque (dv
)
3217 || dv_as_opaque (list
->dv
) == dv_as_opaque (cdv
)))
3224 if (dv_as_opaque (list
->dv
) == dv_as_opaque (dv
))
3227 for (listp
= &list
->next
; (list
= *listp
); listp
= &list
->next
)
3232 if (dv_as_opaque (list
->dv
) == dv_as_opaque (cdv
))
3234 *listp
= list
->next
;
3235 pool_free (attrs_pool
, list
);
3240 gcc_assert (dv_as_opaque (list
->dv
) != dv_as_opaque (dv
));
3243 else if (dv_as_opaque (list
->dv
) == dv_as_opaque (cdv
))
3245 for (listp
= &list
->next
; (list
= *listp
); listp
= &list
->next
)
3250 if (dv_as_opaque (list
->dv
) == dv_as_opaque (dv
))
3252 *listp
= list
->next
;
3253 pool_free (attrs_pool
, list
);
3258 gcc_assert (dv_as_opaque (list
->dv
) != dv_as_opaque (cdv
));
3267 if (list
->offset
== 0
3268 && (dv_as_opaque (list
->dv
) == dv_as_opaque (dv
)
3269 || dv_as_opaque (list
->dv
) == dv_as_opaque (cdv
)))
3279 cslot
= set_slot_part (set
, val
, cslot
, cdv
, 0,
3280 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
);
3282 slot
= clobber_slot_part (set
, cval
, slot
, 0, NULL
);
3284 /* Variable may have been unshared. */
3285 var
= (variable
)*slot
;
3286 gcc_assert (var
->n_var_parts
&& var
->var_part
[0].loc_chain
->loc
== cval
3287 && var
->var_part
[0].loc_chain
->next
== NULL
);
3289 if (VALUE_RECURSED_INTO (cval
))
3290 goto restart_with_cval
;
3295 /* Bind one-part variables to the canonical value in an equivalence
3296 set. Not doing this causes dataflow convergence failure in rare
3297 circumstances, see PR42873. Unfortunately we can't do this
3298 efficiently as part of canonicalize_values_star, since we may not
3299 have determined or even seen the canonical value of a set when we
3300 get to a variable that references another member of the set. */
3303 canonicalize_vars_star (void **slot
, void *data
)
3305 dataflow_set
*set
= (dataflow_set
*)data
;
3306 variable var
= (variable
) *slot
;
3307 decl_or_value dv
= var
->dv
;
3308 location_chain node
;
3313 location_chain cnode
;
3315 if (!dv_onepart_p (dv
) || dv_is_value_p (dv
))
3318 gcc_assert (var
->n_var_parts
== 1);
3320 node
= var
->var_part
[0].loc_chain
;
3322 if (GET_CODE (node
->loc
) != VALUE
)
3325 gcc_assert (!node
->next
);
3328 /* Push values to the canonical one. */
3329 cdv
= dv_from_value (cval
);
3330 cslot
= shared_hash_find_slot_noinsert (set
->vars
, cdv
);
3333 cvar
= (variable
)*cslot
;
3334 gcc_assert (cvar
->n_var_parts
== 1);
3336 cnode
= cvar
->var_part
[0].loc_chain
;
3338 /* CVAL is canonical if its value list contains non-VALUEs or VALUEs
3339 that are not “more canonical” than it. */
3340 if (GET_CODE (cnode
->loc
) != VALUE
3341 || !canon_value_cmp (cnode
->loc
, cval
))
3344 /* CVAL was found to be non-canonical. Change the variable to point
3345 to the canonical VALUE. */
3346 gcc_assert (!cnode
->next
);
3349 slot
= set_slot_part (set
, cval
, slot
, dv
, 0,
3350 node
->init
, node
->set_src
);
3351 slot
= clobber_slot_part (set
, cval
, slot
, 0, node
->set_src
);
3356 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
3357 corresponding entry in DSM->src. Multi-part variables are combined
3358 with variable_union, whereas onepart dvs are combined with
3362 variable_merge_over_cur (variable s1var
, struct dfset_merge
*dsm
)
3364 dataflow_set
*dst
= dsm
->dst
;
3366 variable s2var
, dvar
= NULL
;
3367 decl_or_value dv
= s1var
->dv
;
3368 bool onepart
= dv_onepart_p (dv
);
3371 location_chain node
, *nodep
;
3373 /* If the incoming onepart variable has an empty location list, then
3374 the intersection will be just as empty. For other variables,
3375 it's always union. */
3376 gcc_assert (s1var
->n_var_parts
3377 && s1var
->var_part
[0].loc_chain
);
3380 return variable_union (s1var
, dst
);
3382 gcc_assert (s1var
->n_var_parts
== 1
3383 && s1var
->var_part
[0].offset
== 0);
3385 dvhash
= dv_htab_hash (dv
);
3386 if (dv_is_value_p (dv
))
3387 val
= dv_as_value (dv
);
3391 s2var
= shared_hash_find_1 (dsm
->src
->vars
, dv
, dvhash
);
3394 dst_can_be_shared
= false;
3398 dsm
->src_onepart_cnt
--;
3399 gcc_assert (s2var
->var_part
[0].loc_chain
3400 && s2var
->n_var_parts
== 1
3401 && s2var
->var_part
[0].offset
== 0);
3403 dstslot
= shared_hash_find_slot_noinsert_1 (dst
->vars
, dv
, dvhash
);
3406 dvar
= (variable
)*dstslot
;
3407 gcc_assert (dvar
->refcount
== 1
3408 && dvar
->n_var_parts
== 1
3409 && dvar
->var_part
[0].offset
== 0);
3410 nodep
= &dvar
->var_part
[0].loc_chain
;
3418 if (!dstslot
&& !onepart_variable_different_p (s1var
, s2var
))
3420 dstslot
= shared_hash_find_slot_unshare_1 (&dst
->vars
, dv
,
3422 *dstslot
= dvar
= s2var
;
3427 dst_can_be_shared
= false;
3429 intersect_loc_chains (val
, nodep
, dsm
,
3430 s1var
->var_part
[0].loc_chain
, s2var
);
3436 dvar
= (variable
) pool_alloc (dv_pool (dv
));
3439 dvar
->n_var_parts
= 1;
3440 dvar
->cur_loc_changed
= false;
3441 dvar
->in_changed_variables
= false;
3442 dvar
->var_part
[0].offset
= 0;
3443 dvar
->var_part
[0].loc_chain
= node
;
3444 dvar
->var_part
[0].cur_loc
= NULL
;
3447 = shared_hash_find_slot_unshare_1 (&dst
->vars
, dv
, dvhash
,
3449 gcc_assert (!*dstslot
);
3457 nodep
= &dvar
->var_part
[0].loc_chain
;
3458 while ((node
= *nodep
))
3460 location_chain
*nextp
= &node
->next
;
3462 if (GET_CODE (node
->loc
) == REG
)
3466 for (list
= dst
->regs
[REGNO (node
->loc
)]; list
; list
= list
->next
)
3467 if (GET_MODE (node
->loc
) == GET_MODE (list
->loc
)
3468 && dv_is_value_p (list
->dv
))
3472 attrs_list_insert (&dst
->regs
[REGNO (node
->loc
)],
3474 /* If this value became canonical for another value that had
3475 this register, we want to leave it alone. */
3476 else if (dv_as_value (list
->dv
) != val
)
3478 dstslot
= set_slot_part (dst
, dv_as_value (list
->dv
),
3480 node
->init
, NULL_RTX
);
3481 dstslot
= delete_slot_part (dst
, node
->loc
, dstslot
, 0);
3483 /* Since nextp points into the removed node, we can't
3484 use it. The pointer to the next node moved to nodep.
3485 However, if the variable we're walking is unshared
3486 during our walk, we'll keep walking the location list
3487 of the previously-shared variable, in which case the
3488 node won't have been removed, and we'll want to skip
3489 it. That's why we test *nodep here. */
3495 /* Canonicalization puts registers first, so we don't have to
3501 if (dvar
!= (variable
)*dstslot
)
3502 dvar
= (variable
)*dstslot
;
3503 nodep
= &dvar
->var_part
[0].loc_chain
;
3507 /* Mark all referenced nodes for canonicalization, and make sure
3508 we have mutual equivalence links. */
3509 VALUE_RECURSED_INTO (val
) = true;
3510 for (node
= *nodep
; node
; node
= node
->next
)
3511 if (GET_CODE (node
->loc
) == VALUE
)
3513 VALUE_RECURSED_INTO (node
->loc
) = true;
3514 set_variable_part (dst
, val
, dv_from_value (node
->loc
), 0,
3515 node
->init
, NULL
, INSERT
);
3518 dstslot
= shared_hash_find_slot_noinsert_1 (dst
->vars
, dv
, dvhash
);
3519 gcc_assert (*dstslot
== dvar
);
3520 canonicalize_values_star (dstslot
, dst
);
3521 #ifdef ENABLE_CHECKING
3523 == shared_hash_find_slot_noinsert_1 (dst
->vars
, dv
, dvhash
));
3525 dvar
= (variable
)*dstslot
;
3529 bool has_value
= false, has_other
= false;
3531 /* If we have one value and anything else, we're going to
3532 canonicalize this, so make sure all values have an entry in
3533 the table and are marked for canonicalization. */
3534 for (node
= *nodep
; node
; node
= node
->next
)
3536 if (GET_CODE (node
->loc
) == VALUE
)
3538 /* If this was marked during register canonicalization,
3539 we know we have to canonicalize values. */
3554 if (has_value
&& has_other
)
3556 for (node
= *nodep
; node
; node
= node
->next
)
3558 if (GET_CODE (node
->loc
) == VALUE
)
3560 decl_or_value dv
= dv_from_value (node
->loc
);
3563 if (shared_hash_shared (dst
->vars
))
3564 slot
= shared_hash_find_slot_noinsert (dst
->vars
, dv
);
3566 slot
= shared_hash_find_slot_unshare (&dst
->vars
, dv
,
3570 variable var
= (variable
) pool_alloc (dv_pool (dv
));
3573 var
->n_var_parts
= 1;
3574 var
->cur_loc_changed
= false;
3575 var
->in_changed_variables
= false;
3576 var
->var_part
[0].offset
= 0;
3577 var
->var_part
[0].loc_chain
= NULL
;
3578 var
->var_part
[0].cur_loc
= NULL
;
3582 VALUE_RECURSED_INTO (node
->loc
) = true;
3586 dstslot
= shared_hash_find_slot_noinsert_1 (dst
->vars
, dv
, dvhash
);
3587 gcc_assert (*dstslot
== dvar
);
3588 canonicalize_values_star (dstslot
, dst
);
3589 #ifdef ENABLE_CHECKING
3591 == shared_hash_find_slot_noinsert_1 (dst
->vars
,
3594 dvar
= (variable
)*dstslot
;
3598 if (!onepart_variable_different_p (dvar
, s2var
))
3600 variable_htab_free (dvar
);
3601 *dstslot
= dvar
= s2var
;
3604 else if (s2var
!= s1var
&& !onepart_variable_different_p (dvar
, s1var
))
3606 variable_htab_free (dvar
);
3607 *dstslot
= dvar
= s1var
;
3609 dst_can_be_shared
= false;
3612 dst_can_be_shared
= false;
3617 /* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
3618 multi-part variable. Unions of multi-part variables and
3619 intersections of one-part ones will be handled in
3620 variable_merge_over_cur(). */
3623 variable_merge_over_src (variable s2var
, struct dfset_merge
*dsm
)
3625 dataflow_set
*dst
= dsm
->dst
;
3626 decl_or_value dv
= s2var
->dv
;
3627 bool onepart
= dv_onepart_p (dv
);
3631 void **dstp
= shared_hash_find_slot (dst
->vars
, dv
);
3637 dsm
->src_onepart_cnt
++;
3641 /* Combine dataflow set information from SRC2 into DST, using PDST
3642 to carry over information across passes. */
3645 dataflow_set_merge (dataflow_set
*dst
, dataflow_set
*src2
)
3647 dataflow_set cur
= *dst
;
3648 dataflow_set
*src1
= &cur
;
3649 struct dfset_merge dsm
;
3651 size_t src1_elems
, src2_elems
;
3655 src1_elems
= htab_elements (shared_hash_htab (src1
->vars
));
3656 src2_elems
= htab_elements (shared_hash_htab (src2
->vars
));
3657 dataflow_set_init (dst
);
3658 dst
->stack_adjust
= cur
.stack_adjust
;
3659 shared_hash_destroy (dst
->vars
);
3660 dst
->vars
= (shared_hash
) pool_alloc (shared_hash_pool
);
3661 dst
->vars
->refcount
= 1;
3663 = htab_create (MAX (src1_elems
, src2_elems
), variable_htab_hash
,
3664 variable_htab_eq
, variable_htab_free
);
3666 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3667 attrs_list_mpdv_union (&dst
->regs
[i
], src1
->regs
[i
], src2
->regs
[i
]);
3672 dsm
.src_onepart_cnt
= 0;
3674 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (dsm
.src
->vars
), var
, variable
, hi
)
3675 variable_merge_over_src (var
, &dsm
);
3676 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (dsm
.cur
->vars
), var
, variable
, hi
)
3677 variable_merge_over_cur (var
, &dsm
);
3679 if (dsm
.src_onepart_cnt
)
3680 dst_can_be_shared
= false;
3682 dataflow_set_destroy (src1
);
3685 /* Mark register equivalences. */
3688 dataflow_set_equiv_regs (dataflow_set
*set
)
3693 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3695 rtx canon
[NUM_MACHINE_MODES
];
3697 memset (canon
, 0, sizeof (canon
));
3699 for (list
= set
->regs
[i
]; list
; list
= list
->next
)
3700 if (list
->offset
== 0 && dv_is_value_p (list
->dv
))
3702 rtx val
= dv_as_value (list
->dv
);
3703 rtx
*cvalp
= &canon
[(int)GET_MODE (val
)];
3706 if (canon_value_cmp (val
, cval
))
3710 for (list
= set
->regs
[i
]; list
; list
= list
->next
)
3711 if (list
->offset
== 0 && dv_onepart_p (list
->dv
))
3713 rtx cval
= canon
[(int)GET_MODE (list
->loc
)];
3718 if (dv_is_value_p (list
->dv
))
3720 rtx val
= dv_as_value (list
->dv
);
3725 VALUE_RECURSED_INTO (val
) = true;
3726 set_variable_part (set
, val
, dv_from_value (cval
), 0,
3727 VAR_INIT_STATUS_INITIALIZED
,
3731 VALUE_RECURSED_INTO (cval
) = true;
3732 set_variable_part (set
, cval
, list
->dv
, 0,
3733 VAR_INIT_STATUS_INITIALIZED
, NULL
, NO_INSERT
);
3736 for (listp
= &set
->regs
[i
]; (list
= *listp
);
3737 listp
= list
? &list
->next
: listp
)
3738 if (list
->offset
== 0 && dv_onepart_p (list
->dv
))
3740 rtx cval
= canon
[(int)GET_MODE (list
->loc
)];
3746 if (dv_is_value_p (list
->dv
))
3748 rtx val
= dv_as_value (list
->dv
);
3749 if (!VALUE_RECURSED_INTO (val
))
3753 slot
= shared_hash_find_slot_noinsert (set
->vars
, list
->dv
);
3754 canonicalize_values_star (slot
, set
);
3761 /* Remove any redundant values in the location list of VAR, which must
3762 be unshared and 1-part. */
3765 remove_duplicate_values (variable var
)
3767 location_chain node
, *nodep
;
3769 gcc_assert (dv_onepart_p (var
->dv
));
3770 gcc_assert (var
->n_var_parts
== 1);
3771 gcc_assert (var
->refcount
== 1);
3773 for (nodep
= &var
->var_part
[0].loc_chain
; (node
= *nodep
); )
3775 if (GET_CODE (node
->loc
) == VALUE
)
3777 if (VALUE_RECURSED_INTO (node
->loc
))
3779 /* Remove duplicate value node. */
3780 *nodep
= node
->next
;
3781 pool_free (loc_chain_pool
, node
);
3785 VALUE_RECURSED_INTO (node
->loc
) = true;
3787 nodep
= &node
->next
;
3790 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
3791 if (GET_CODE (node
->loc
) == VALUE
)
3793 gcc_assert (VALUE_RECURSED_INTO (node
->loc
));
3794 VALUE_RECURSED_INTO (node
->loc
) = false;
3799 /* Hash table iteration argument passed to variable_post_merge. */
3800 struct dfset_post_merge
3802 /* The new input set for the current block. */
3804 /* Pointer to the permanent input set for the current block, or
3806 dataflow_set
**permp
;
3809 /* Create values for incoming expressions associated with one-part
3810 variables that don't have value numbers for them. */
3813 variable_post_merge_new_vals (void **slot
, void *info
)
3815 struct dfset_post_merge
*dfpm
= (struct dfset_post_merge
*)info
;
3816 dataflow_set
*set
= dfpm
->set
;
3817 variable var
= (variable
)*slot
;
3818 location_chain node
;
3820 if (!dv_onepart_p (var
->dv
) || !var
->n_var_parts
)
3823 gcc_assert (var
->n_var_parts
== 1);
3825 if (dv_is_decl_p (var
->dv
))
3827 bool check_dupes
= false;
3830 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
3832 if (GET_CODE (node
->loc
) == VALUE
)
3833 gcc_assert (!VALUE_RECURSED_INTO (node
->loc
));
3834 else if (GET_CODE (node
->loc
) == REG
)
3836 attrs att
, *attp
, *curp
= NULL
;
3838 if (var
->refcount
!= 1)
3840 slot
= unshare_variable (set
, slot
, var
,
3841 VAR_INIT_STATUS_INITIALIZED
);
3842 var
= (variable
)*slot
;
3846 for (attp
= &set
->regs
[REGNO (node
->loc
)]; (att
= *attp
);
3848 if (att
->offset
== 0
3849 && GET_MODE (att
->loc
) == GET_MODE (node
->loc
))
3851 if (dv_is_value_p (att
->dv
))
3853 rtx cval
= dv_as_value (att
->dv
);
3858 else if (dv_as_opaque (att
->dv
) == dv_as_opaque (var
->dv
))
3866 if ((*curp
)->offset
== 0
3867 && GET_MODE ((*curp
)->loc
) == GET_MODE (node
->loc
)
3868 && dv_as_opaque ((*curp
)->dv
) == dv_as_opaque (var
->dv
))
3871 curp
= &(*curp
)->next
;
3882 *dfpm
->permp
= XNEW (dataflow_set
);
3883 dataflow_set_init (*dfpm
->permp
);
3886 for (att
= (*dfpm
->permp
)->regs
[REGNO (node
->loc
)];
3887 att
; att
= att
->next
)
3888 if (GET_MODE (att
->loc
) == GET_MODE (node
->loc
))
3890 gcc_assert (att
->offset
== 0
3891 && dv_is_value_p (att
->dv
));
3892 val_reset (set
, att
->dv
);
3899 cval
= dv_as_value (cdv
);
3903 /* Create a unique value to hold this register,
3904 that ought to be found and reused in
3905 subsequent rounds. */
3907 gcc_assert (!cselib_lookup (node
->loc
,
3908 GET_MODE (node
->loc
), 0));
3909 v
= cselib_lookup (node
->loc
, GET_MODE (node
->loc
), 1);
3910 cselib_preserve_value (v
);
3911 cselib_invalidate_rtx (node
->loc
);
3913 cdv
= dv_from_value (cval
);
3916 "Created new value %u:%u for reg %i\n",
3917 v
->uid
, v
->hash
, REGNO (node
->loc
));
3920 var_reg_decl_set (*dfpm
->permp
, node
->loc
,
3921 VAR_INIT_STATUS_INITIALIZED
,
3922 cdv
, 0, NULL
, INSERT
);
3928 /* Remove attribute referring to the decl, which now
3929 uses the value for the register, already existing or
3930 to be added when we bring perm in. */
3933 pool_free (attrs_pool
, att
);
3938 remove_duplicate_values (var
);
3944 /* Reset values in the permanent set that are not associated with the
3945 chosen expression. */
3948 variable_post_merge_perm_vals (void **pslot
, void *info
)
3950 struct dfset_post_merge
*dfpm
= (struct dfset_post_merge
*)info
;
3951 dataflow_set
*set
= dfpm
->set
;
3952 variable pvar
= (variable
)*pslot
, var
;
3953 location_chain pnode
;
3957 gcc_assert (dv_is_value_p (pvar
->dv
)
3958 && pvar
->n_var_parts
== 1);
3959 pnode
= pvar
->var_part
[0].loc_chain
;
3962 && REG_P (pnode
->loc
));
3966 var
= shared_hash_find (set
->vars
, dv
);
3969 if (find_loc_in_1pdv (pnode
->loc
, var
, shared_hash_htab (set
->vars
)))
3971 val_reset (set
, dv
);
3974 for (att
= set
->regs
[REGNO (pnode
->loc
)]; att
; att
= att
->next
)
3975 if (att
->offset
== 0
3976 && GET_MODE (att
->loc
) == GET_MODE (pnode
->loc
)
3977 && dv_is_value_p (att
->dv
))
3980 /* If there is a value associated with this register already, create
3982 if (att
&& dv_as_value (att
->dv
) != dv_as_value (dv
))
3984 rtx cval
= dv_as_value (att
->dv
);
3985 set_variable_part (set
, cval
, dv
, 0, pnode
->init
, NULL
, INSERT
);
3986 set_variable_part (set
, dv_as_value (dv
), att
->dv
, 0, pnode
->init
,
3991 attrs_list_insert (&set
->regs
[REGNO (pnode
->loc
)],
3993 variable_union (pvar
, set
);
3999 /* Just checking stuff and registering register attributes for
4003 dataflow_post_merge_adjust (dataflow_set
*set
, dataflow_set
**permp
)
4005 struct dfset_post_merge dfpm
;
4010 htab_traverse (shared_hash_htab (set
->vars
), variable_post_merge_new_vals
,
4013 htab_traverse (shared_hash_htab ((*permp
)->vars
),
4014 variable_post_merge_perm_vals
, &dfpm
);
4015 htab_traverse (shared_hash_htab (set
->vars
), canonicalize_values_star
, set
);
4016 htab_traverse (shared_hash_htab (set
->vars
), canonicalize_vars_star
, set
);
4019 /* Return a node whose loc is a MEM that refers to EXPR in the
4020 location list of a one-part variable or value VAR, or in that of
4021 any values recursively mentioned in the location lists. */
4023 static location_chain
4024 find_mem_expr_in_1pdv (tree expr
, rtx val
, htab_t vars
)
4026 location_chain node
;
4029 location_chain where
= NULL
;
4034 gcc_assert (GET_CODE (val
) == VALUE
4035 && !VALUE_RECURSED_INTO (val
));
4037 dv
= dv_from_value (val
);
4038 var
= (variable
) htab_find_with_hash (vars
, dv
, dv_htab_hash (dv
));
4043 gcc_assert (dv_onepart_p (var
->dv
));
4045 if (!var
->n_var_parts
)
4048 gcc_assert (var
->var_part
[0].offset
== 0);
4050 VALUE_RECURSED_INTO (val
) = true;
4052 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
4053 if (MEM_P (node
->loc
) && MEM_EXPR (node
->loc
) == expr
4054 && MEM_OFFSET (node
->loc
) == 0)
4059 else if (GET_CODE (node
->loc
) == VALUE
4060 && !VALUE_RECURSED_INTO (node
->loc
)
4061 && (where
= find_mem_expr_in_1pdv (expr
, node
->loc
, vars
)))
4064 VALUE_RECURSED_INTO (val
) = false;
4069 /* Return TRUE if the value of MEM may vary across a call. */
4072 mem_dies_at_call (rtx mem
)
4074 tree expr
= MEM_EXPR (mem
);
4080 decl
= get_base_address (expr
);
4088 return (may_be_aliased (decl
)
4089 || (!TREE_READONLY (decl
) && is_global_var (decl
)));
4092 /* Remove all MEMs from the location list of a hash table entry for a
4093 one-part variable, except those whose MEM attributes map back to
4094 the variable itself, directly or within a VALUE. */
4097 dataflow_set_preserve_mem_locs (void **slot
, void *data
)
4099 dataflow_set
*set
= (dataflow_set
*) data
;
4100 variable var
= (variable
) *slot
;
4102 if (dv_is_decl_p (var
->dv
) && dv_onepart_p (var
->dv
))
4104 tree decl
= dv_as_decl (var
->dv
);
4105 location_chain loc
, *locp
;
4106 bool changed
= false;
4108 if (!var
->n_var_parts
)
4111 gcc_assert (var
->n_var_parts
== 1);
4113 if (shared_var_p (var
, set
->vars
))
4115 for (loc
= var
->var_part
[0].loc_chain
; loc
; loc
= loc
->next
)
4117 /* We want to remove dying MEMs that doesn't refer to
4119 if (GET_CODE (loc
->loc
) == MEM
4120 && (MEM_EXPR (loc
->loc
) != decl
4121 || MEM_OFFSET (loc
->loc
))
4122 && !mem_dies_at_call (loc
->loc
))
4124 /* We want to move here MEMs that do refer to DECL. */
4125 else if (GET_CODE (loc
->loc
) == VALUE
4126 && find_mem_expr_in_1pdv (decl
, loc
->loc
,
4127 shared_hash_htab (set
->vars
)))
4134 slot
= unshare_variable (set
, slot
, var
, VAR_INIT_STATUS_UNKNOWN
);
4135 var
= (variable
)*slot
;
4136 gcc_assert (var
->n_var_parts
== 1);
4139 for (locp
= &var
->var_part
[0].loc_chain
, loc
= *locp
;
4142 rtx old_loc
= loc
->loc
;
4143 if (GET_CODE (old_loc
) == VALUE
)
4145 location_chain mem_node
4146 = find_mem_expr_in_1pdv (decl
, loc
->loc
,
4147 shared_hash_htab (set
->vars
));
4149 /* ??? This picks up only one out of multiple MEMs that
4150 refer to the same variable. Do we ever need to be
4151 concerned about dealing with more than one, or, given
4152 that they should all map to the same variable
4153 location, their addresses will have been merged and
4154 they will be regarded as equivalent? */
4157 loc
->loc
= mem_node
->loc
;
4158 loc
->set_src
= mem_node
->set_src
;
4159 loc
->init
= MIN (loc
->init
, mem_node
->init
);
4163 if (GET_CODE (loc
->loc
) != MEM
4164 || (MEM_EXPR (loc
->loc
) == decl
4165 && MEM_OFFSET (loc
->loc
) == 0)
4166 || !mem_dies_at_call (loc
->loc
))
4168 if (old_loc
!= loc
->loc
&& emit_notes
)
4170 if (old_loc
== var
->var_part
[0].cur_loc
)
4173 var
->var_part
[0].cur_loc
= NULL
;
4174 var
->cur_loc_changed
= true;
4176 add_value_chains (var
->dv
, loc
->loc
);
4177 remove_value_chains (var
->dv
, old_loc
);
4185 remove_value_chains (var
->dv
, old_loc
);
4186 if (old_loc
== var
->var_part
[0].cur_loc
)
4189 var
->var_part
[0].cur_loc
= NULL
;
4190 var
->cur_loc_changed
= true;
4194 pool_free (loc_chain_pool
, loc
);
4197 if (!var
->var_part
[0].loc_chain
)
4203 variable_was_changed (var
, set
);
4209 /* Remove all MEMs from the location list of a hash table entry for a
4213 dataflow_set_remove_mem_locs (void **slot
, void *data
)
4215 dataflow_set
*set
= (dataflow_set
*) data
;
4216 variable var
= (variable
) *slot
;
4218 if (dv_is_value_p (var
->dv
))
4220 location_chain loc
, *locp
;
4221 bool changed
= false;
4223 gcc_assert (var
->n_var_parts
== 1);
4225 if (shared_var_p (var
, set
->vars
))
4227 for (loc
= var
->var_part
[0].loc_chain
; loc
; loc
= loc
->next
)
4228 if (GET_CODE (loc
->loc
) == MEM
4229 && mem_dies_at_call (loc
->loc
))
4235 slot
= unshare_variable (set
, slot
, var
, VAR_INIT_STATUS_UNKNOWN
);
4236 var
= (variable
)*slot
;
4237 gcc_assert (var
->n_var_parts
== 1);
4240 for (locp
= &var
->var_part
[0].loc_chain
, loc
= *locp
;
4243 if (GET_CODE (loc
->loc
) != MEM
4244 || !mem_dies_at_call (loc
->loc
))
4251 remove_value_chains (var
->dv
, loc
->loc
);
4253 /* If we have deleted the location which was last emitted
4254 we have to emit new location so add the variable to set
4255 of changed variables. */
4256 if (var
->var_part
[0].cur_loc
== loc
->loc
)
4259 var
->var_part
[0].cur_loc
= NULL
;
4260 var
->cur_loc_changed
= true;
4262 pool_free (loc_chain_pool
, loc
);
4265 if (!var
->var_part
[0].loc_chain
)
4271 variable_was_changed (var
, set
);
4277 /* Remove all variable-location information about call-clobbered
4278 registers, as well as associations between MEMs and VALUEs. */
4281 dataflow_set_clear_at_call (dataflow_set
*set
)
4285 for (r
= 0; r
< FIRST_PSEUDO_REGISTER
; r
++)
4286 if (TEST_HARD_REG_BIT (regs_invalidated_by_call
, r
))
4287 var_regno_delete (set
, r
);
4289 if (MAY_HAVE_DEBUG_INSNS
)
4291 set
->traversed_vars
= set
->vars
;
4292 htab_traverse (shared_hash_htab (set
->vars
),
4293 dataflow_set_preserve_mem_locs
, set
);
4294 set
->traversed_vars
= set
->vars
;
4295 htab_traverse (shared_hash_htab (set
->vars
), dataflow_set_remove_mem_locs
,
4297 set
->traversed_vars
= NULL
;
4302 variable_part_different_p (variable_part
*vp1
, variable_part
*vp2
)
4304 location_chain lc1
, lc2
;
4306 for (lc1
= vp1
->loc_chain
; lc1
; lc1
= lc1
->next
)
4308 for (lc2
= vp2
->loc_chain
; lc2
; lc2
= lc2
->next
)
4310 if (REG_P (lc1
->loc
) && REG_P (lc2
->loc
))
4312 if (REGNO (lc1
->loc
) == REGNO (lc2
->loc
))
4315 if (rtx_equal_p (lc1
->loc
, lc2
->loc
))
4324 /* Return true if one-part variables VAR1 and VAR2 are different.
4325 They must be in canonical order. */
4328 onepart_variable_different_p (variable var1
, variable var2
)
4330 location_chain lc1
, lc2
;
4335 gcc_assert (var1
->n_var_parts
== 1
4336 && var2
->n_var_parts
== 1);
4338 lc1
= var1
->var_part
[0].loc_chain
;
4339 lc2
= var2
->var_part
[0].loc_chain
;
4341 gcc_assert (lc1
&& lc2
);
4345 if (loc_cmp (lc1
->loc
, lc2
->loc
))
4354 /* Return true if variables VAR1 and VAR2 are different. */
4357 variable_different_p (variable var1
, variable var2
)
4364 if (var1
->n_var_parts
!= var2
->n_var_parts
)
4367 for (i
= 0; i
< var1
->n_var_parts
; i
++)
4369 if (var1
->var_part
[i
].offset
!= var2
->var_part
[i
].offset
)
4371 /* One-part values have locations in a canonical order. */
4372 if (i
== 0 && var1
->var_part
[i
].offset
== 0 && dv_onepart_p (var1
->dv
))
4374 gcc_assert (var1
->n_var_parts
== 1
4375 && dv_as_opaque (var1
->dv
) == dv_as_opaque (var2
->dv
));
4376 return onepart_variable_different_p (var1
, var2
);
4378 if (variable_part_different_p (&var1
->var_part
[i
], &var2
->var_part
[i
]))
4380 if (variable_part_different_p (&var2
->var_part
[i
], &var1
->var_part
[i
]))
4386 /* Return true if dataflow sets OLD_SET and NEW_SET differ. */
4389 dataflow_set_different (dataflow_set
*old_set
, dataflow_set
*new_set
)
4394 if (old_set
->vars
== new_set
->vars
)
4397 if (htab_elements (shared_hash_htab (old_set
->vars
))
4398 != htab_elements (shared_hash_htab (new_set
->vars
)))
4401 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (old_set
->vars
), var1
, variable
, hi
)
4403 htab_t htab
= shared_hash_htab (new_set
->vars
);
4404 variable var2
= (variable
) htab_find_with_hash (htab
, var1
->dv
,
4405 dv_htab_hash (var1
->dv
));
4408 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4410 fprintf (dump_file
, "dataflow difference found: removal of:\n");
4416 if (variable_different_p (var1
, var2
))
4418 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4420 fprintf (dump_file
, "dataflow difference found: "
4421 "old and new follow:\n");
4429 /* No need to traverse the second hashtab, if both have the same number
4430 of elements and the second one had all entries found in the first one,
4431 then it can't have any extra entries. */
4435 /* Free the contents of dataflow set SET. */
4438 dataflow_set_destroy (dataflow_set
*set
)
4442 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
4443 attrs_list_clear (&set
->regs
[i
]);
4445 shared_hash_destroy (set
->vars
);
4449 /* Return true if RTL X contains a SYMBOL_REF. */
4452 contains_symbol_ref (rtx x
)
4461 code
= GET_CODE (x
);
4462 if (code
== SYMBOL_REF
)
4465 fmt
= GET_RTX_FORMAT (code
);
4466 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
4470 if (contains_symbol_ref (XEXP (x
, i
)))
4473 else if (fmt
[i
] == 'E')
4476 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
4477 if (contains_symbol_ref (XVECEXP (x
, i
, j
)))
4485 /* Shall EXPR be tracked? */
4488 track_expr_p (tree expr
, bool need_rtl
)
4493 if (TREE_CODE (expr
) == DEBUG_EXPR_DECL
)
4494 return DECL_RTL_SET_P (expr
);
4496 /* If EXPR is not a parameter or a variable do not track it. */
4497 if (TREE_CODE (expr
) != VAR_DECL
&& TREE_CODE (expr
) != PARM_DECL
)
4500 /* It also must have a name... */
4501 if (!DECL_NAME (expr
) && need_rtl
)
4504 /* ... and a RTL assigned to it. */
4505 decl_rtl
= DECL_RTL_IF_SET (expr
);
4506 if (!decl_rtl
&& need_rtl
)
4509 /* If this expression is really a debug alias of some other declaration, we
4510 don't need to track this expression if the ultimate declaration is
4513 if (DECL_DEBUG_EXPR_IS_FROM (realdecl
))
4515 realdecl
= DECL_DEBUG_EXPR (realdecl
);
4516 if (realdecl
== NULL_TREE
)
4518 else if (!DECL_P (realdecl
))
4520 if (handled_component_p (realdecl
))
4522 HOST_WIDE_INT bitsize
, bitpos
, maxsize
;
4524 = get_ref_base_and_extent (realdecl
, &bitpos
, &bitsize
,
4526 if (!DECL_P (innerdecl
)
4527 || DECL_IGNORED_P (innerdecl
)
4528 || TREE_STATIC (innerdecl
)
4530 || bitpos
+ bitsize
> 256
4531 || bitsize
!= maxsize
)
4541 /* Do not track EXPR if REALDECL it should be ignored for debugging
4543 if (DECL_IGNORED_P (realdecl
))
4546 /* Do not track global variables until we are able to emit correct location
4548 if (TREE_STATIC (realdecl
))
4551 /* When the EXPR is a DECL for alias of some variable (see example)
4552 the TREE_STATIC flag is not used. Disable tracking all DECLs whose
4553 DECL_RTL contains SYMBOL_REF.
4556 extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
4559 if (decl_rtl
&& MEM_P (decl_rtl
)
4560 && contains_symbol_ref (XEXP (decl_rtl
, 0)))
4563 /* If RTX is a memory it should not be very large (because it would be
4564 an array or struct). */
4565 if (decl_rtl
&& MEM_P (decl_rtl
))
4567 /* Do not track structures and arrays. */
4568 if (GET_MODE (decl_rtl
) == BLKmode
4569 || AGGREGATE_TYPE_P (TREE_TYPE (realdecl
)))
4571 if (MEM_SIZE (decl_rtl
)
4572 && INTVAL (MEM_SIZE (decl_rtl
)) > MAX_VAR_PARTS
)
4576 DECL_CHANGED (expr
) = 0;
4577 DECL_CHANGED (realdecl
) = 0;
4581 /* Determine whether a given LOC refers to the same variable part as
4585 same_variable_part_p (rtx loc
, tree expr
, HOST_WIDE_INT offset
)
4588 HOST_WIDE_INT offset2
;
4590 if (! DECL_P (expr
))
4595 expr2
= REG_EXPR (loc
);
4596 offset2
= REG_OFFSET (loc
);
4598 else if (MEM_P (loc
))
4600 expr2
= MEM_EXPR (loc
);
4601 offset2
= INT_MEM_OFFSET (loc
);
4606 if (! expr2
|| ! DECL_P (expr2
))
4609 expr
= var_debug_decl (expr
);
4610 expr2
= var_debug_decl (expr2
);
4612 return (expr
== expr2
&& offset
== offset2
);
4615 /* LOC is a REG or MEM that we would like to track if possible.
4616 If EXPR is null, we don't know what expression LOC refers to,
4617 otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
4618 LOC is an lvalue register.
4620 Return true if EXPR is nonnull and if LOC, or some lowpart of it,
4621 is something we can track. When returning true, store the mode of
4622 the lowpart we can track in *MODE_OUT (if nonnull) and its offset
4623 from EXPR in *OFFSET_OUT (if nonnull). */
4626 track_loc_p (rtx loc
, tree expr
, HOST_WIDE_INT offset
, bool store_reg_p
,
4627 enum machine_mode
*mode_out
, HOST_WIDE_INT
*offset_out
)
4629 enum machine_mode mode
;
4631 if (expr
== NULL
|| !track_expr_p (expr
, true))
4634 /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
4635 whole subreg, but only the old inner part is really relevant. */
4636 mode
= GET_MODE (loc
);
4637 if (REG_P (loc
) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc
)))
4639 enum machine_mode pseudo_mode
;
4641 pseudo_mode
= PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc
));
4642 if (GET_MODE_SIZE (mode
) > GET_MODE_SIZE (pseudo_mode
))
4644 offset
+= byte_lowpart_offset (pseudo_mode
, mode
);
4649 /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
4650 Do the same if we are storing to a register and EXPR occupies
4651 the whole of register LOC; in that case, the whole of EXPR is
4652 being changed. We exclude complex modes from the second case
4653 because the real and imaginary parts are represented as separate
4654 pseudo registers, even if the whole complex value fits into one
4656 if ((GET_MODE_SIZE (mode
) > GET_MODE_SIZE (DECL_MODE (expr
))
4658 && !COMPLEX_MODE_P (DECL_MODE (expr
))
4659 && hard_regno_nregs
[REGNO (loc
)][DECL_MODE (expr
)] == 1))
4660 && offset
+ byte_lowpart_offset (DECL_MODE (expr
), mode
) == 0)
4662 mode
= DECL_MODE (expr
);
4666 if (offset
< 0 || offset
>= MAX_VAR_PARTS
)
4672 *offset_out
= offset
;
4676 /* Return the MODE lowpart of LOC, or null if LOC is not something we
4677 want to track. When returning nonnull, make sure that the attributes
4678 on the returned value are updated. */
4681 var_lowpart (enum machine_mode mode
, rtx loc
)
4683 unsigned int offset
, reg_offset
, regno
;
4685 if (!REG_P (loc
) && !MEM_P (loc
))
4688 if (GET_MODE (loc
) == mode
)
4691 offset
= byte_lowpart_offset (mode
, GET_MODE (loc
));
4694 return adjust_address_nv (loc
, mode
, offset
);
4696 reg_offset
= subreg_lowpart_offset (mode
, GET_MODE (loc
));
4697 regno
= REGNO (loc
) + subreg_regno_offset (REGNO (loc
), GET_MODE (loc
),
4699 return gen_rtx_REG_offset (loc
, mode
, regno
, offset
);
4702 /* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or
4703 hard_frame_pointer_rtx is being mapped to it. */
4704 static rtx cfa_base_rtx
;
4706 /* Carry information about uses and stores while walking rtx. */
4708 struct count_use_info
4710 /* The insn where the RTX is. */
4713 /* The basic block where insn is. */
4716 /* The array of n_sets sets in the insn, as determined by cselib. */
4717 struct cselib_set
*sets
;
4720 /* True if we're counting stores, false otherwise. */
4724 /* Find a VALUE corresponding to X. */
4726 static inline cselib_val
*
4727 find_use_val (rtx x
, enum machine_mode mode
, struct count_use_info
*cui
)
4733 /* This is called after uses are set up and before stores are
4734 processed bycselib, so it's safe to look up srcs, but not
4735 dsts. So we look up expressions that appear in srcs or in
4736 dest expressions, but we search the sets array for dests of
4740 for (i
= 0; i
< cui
->n_sets
; i
++)
4741 if (cui
->sets
[i
].dest
== x
)
4742 return cui
->sets
[i
].src_elt
;
4745 return cselib_lookup (x
, mode
, 0);
4751 /* Helper function to get mode of MEM's address. */
4753 static inline enum machine_mode
4754 get_address_mode (rtx mem
)
4756 enum machine_mode mode
= GET_MODE (XEXP (mem
, 0));
4757 if (mode
!= VOIDmode
)
4759 return targetm
.addr_space
.address_mode (MEM_ADDR_SPACE (mem
));
4762 /* Replace all registers and addresses in an expression with VALUE
4763 expressions that map back to them, unless the expression is a
4764 register. If no mapping is or can be performed, returns NULL. */
4767 replace_expr_with_values (rtx loc
)
4771 else if (MEM_P (loc
))
4773 cselib_val
*addr
= cselib_lookup (XEXP (loc
, 0),
4774 get_address_mode (loc
), 0);
4776 return replace_equiv_address_nv (loc
, addr
->val_rtx
);
4781 return cselib_subst_to_values (loc
);
4784 /* Determine what kind of micro operation to choose for a USE. Return
4785 MO_CLOBBER if no micro operation is to be generated. */
4787 static enum micro_operation_type
4788 use_type (rtx loc
, struct count_use_info
*cui
, enum machine_mode
*modep
)
4792 if (cui
&& cui
->sets
)
4794 if (GET_CODE (loc
) == VAR_LOCATION
)
4796 if (track_expr_p (PAT_VAR_LOCATION_DECL (loc
), false))
4798 rtx ploc
= PAT_VAR_LOCATION_LOC (loc
);
4799 if (! VAR_LOC_UNKNOWN_P (ploc
))
4801 cselib_val
*val
= cselib_lookup (ploc
, GET_MODE (loc
), 1);
4803 /* ??? flag_float_store and volatile mems are never
4804 given values, but we could in theory use them for
4806 gcc_assert (val
|| 1);
4814 if (REG_P (loc
) || MEM_P (loc
))
4817 *modep
= GET_MODE (loc
);
4821 || (find_use_val (loc
, GET_MODE (loc
), cui
)
4822 && cselib_lookup (XEXP (loc
, 0),
4823 get_address_mode (loc
), 0)))
4828 cselib_val
*val
= find_use_val (loc
, GET_MODE (loc
), cui
);
4830 if (val
&& !cselib_preserved_value_p (val
))
4838 gcc_assert (REGNO (loc
) < FIRST_PSEUDO_REGISTER
);
4840 if (loc
== cfa_base_rtx
)
4842 expr
= REG_EXPR (loc
);
4845 return MO_USE_NO_VAR
;
4846 else if (target_for_debug_bind (var_debug_decl (expr
)))
4848 else if (track_loc_p (loc
, expr
, REG_OFFSET (loc
),
4849 false, modep
, NULL
))
4852 return MO_USE_NO_VAR
;
4854 else if (MEM_P (loc
))
4856 expr
= MEM_EXPR (loc
);
4860 else if (target_for_debug_bind (var_debug_decl (expr
)))
4862 else if (track_loc_p (loc
, expr
, INT_MEM_OFFSET (loc
),
4863 false, modep
, NULL
))
4872 /* Log to OUT information about micro-operation MOPT involving X in
4876 log_op_type (rtx x
, basic_block bb
, rtx insn
,
4877 enum micro_operation_type mopt
, FILE *out
)
4879 fprintf (out
, "bb %i op %i insn %i %s ",
4880 bb
->index
, VEC_length (micro_operation
, VTI (bb
)->mos
),
4881 INSN_UID (insn
), micro_operation_type_name
[mopt
]);
4882 print_inline_rtx (out
, x
, 2);
4886 /* Tell whether the CONCAT used to holds a VALUE and its location
4887 needs value resolution, i.e., an attempt of mapping the location
4888 back to other incoming values. */
4889 #define VAL_NEEDS_RESOLUTION(x) \
4890 (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
4891 /* Whether the location in the CONCAT is a tracked expression, that
4892 should also be handled like a MO_USE. */
4893 #define VAL_HOLDS_TRACK_EXPR(x) \
4894 (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
4895 /* Whether the location in the CONCAT should be handled like a MO_COPY
4897 #define VAL_EXPR_IS_COPIED(x) \
4898 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
4899 /* Whether the location in the CONCAT should be handled like a
4900 MO_CLOBBER as well. */
4901 #define VAL_EXPR_IS_CLOBBERED(x) \
4902 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
4903 /* Whether the location is a CONCAT of the MO_VAL_SET expression and
4904 a reverse operation that should be handled afterwards. */
4905 #define VAL_EXPR_HAS_REVERSE(x) \
4906 (RTL_FLAG_CHECK1 ("VAL_EXPR_HAS_REVERSE", (x), CONCAT)->return_val)
4908 /* All preserved VALUEs. */
4909 static VEC (rtx
, heap
) *preserved_values
;
4911 /* Ensure VAL is preserved and remember it in a vector for vt_emit_notes. */
4914 preserve_value (cselib_val
*val
)
4916 cselib_preserve_value (val
);
4917 VEC_safe_push (rtx
, heap
, preserved_values
, val
->val_rtx
);
4920 /* Helper function for MO_VAL_LOC handling. Return non-zero if
4921 any rtxes not suitable for CONST use not replaced by VALUEs
4925 non_suitable_const (rtx
*x
, void *data ATTRIBUTE_UNUSED
)
4930 switch (GET_CODE (*x
))
4941 return !MEM_READONLY_P (*x
);
4947 /* Add uses (register and memory references) LOC which will be tracked
4948 to VTI (bb)->mos. INSN is instruction which the LOC is part of. */
4951 add_uses (rtx
*ploc
, void *data
)
4954 enum machine_mode mode
= VOIDmode
;
4955 struct count_use_info
*cui
= (struct count_use_info
*)data
;
4956 enum micro_operation_type type
= use_type (loc
, cui
, &mode
);
4958 if (type
!= MO_CLOBBER
)
4960 basic_block bb
= cui
->bb
;
4964 mo
.u
.loc
= type
== MO_USE
? var_lowpart (mode
, loc
) : loc
;
4965 mo
.insn
= cui
->insn
;
4967 if (type
== MO_VAL_LOC
)
4970 rtx vloc
= PAT_VAR_LOCATION_LOC (oloc
);
4973 gcc_assert (cui
->sets
);
4976 && !REG_P (XEXP (vloc
, 0))
4977 && !MEM_P (XEXP (vloc
, 0))
4978 && (GET_CODE (XEXP (vloc
, 0)) != PLUS
4979 || XEXP (XEXP (vloc
, 0), 0) != cfa_base_rtx
4980 || !CONST_INT_P (XEXP (XEXP (vloc
, 0), 1))))
4983 enum machine_mode address_mode
= get_address_mode (mloc
);
4985 = cselib_lookup (XEXP (mloc
, 0), address_mode
, 0);
4987 if (val
&& !cselib_preserved_value_p (val
))
4989 micro_operation moa
;
4990 preserve_value (val
);
4991 mloc
= cselib_subst_to_values (XEXP (mloc
, 0));
4992 moa
.type
= MO_VAL_USE
;
4993 moa
.insn
= cui
->insn
;
4994 moa
.u
.loc
= gen_rtx_CONCAT (address_mode
,
4995 val
->val_rtx
, mloc
);
4996 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4997 log_op_type (moa
.u
.loc
, cui
->bb
, cui
->insn
,
4998 moa
.type
, dump_file
);
4999 VEC_safe_push (micro_operation
, heap
, VTI (bb
)->mos
, &moa
);
5003 if (CONSTANT_P (vloc
)
5004 && (GET_CODE (vloc
) != CONST
5005 || for_each_rtx (&vloc
, non_suitable_const
, NULL
)))
5006 /* For constants don't look up any value. */;
5007 else if (!VAR_LOC_UNKNOWN_P (vloc
)
5008 && (val
= find_use_val (vloc
, GET_MODE (oloc
), cui
)))
5010 enum machine_mode mode2
;
5011 enum micro_operation_type type2
;
5012 rtx nloc
= replace_expr_with_values (vloc
);
5016 oloc
= shallow_copy_rtx (oloc
);
5017 PAT_VAR_LOCATION_LOC (oloc
) = nloc
;
5020 oloc
= gen_rtx_CONCAT (mode
, val
->val_rtx
, oloc
);
5022 type2
= use_type (vloc
, 0, &mode2
);
5024 gcc_assert (type2
== MO_USE
|| type2
== MO_USE_NO_VAR
5025 || type2
== MO_CLOBBER
);
5027 if (type2
== MO_CLOBBER
5028 && !cselib_preserved_value_p (val
))
5030 VAL_NEEDS_RESOLUTION (oloc
) = 1;
5031 preserve_value (val
);
5034 else if (!VAR_LOC_UNKNOWN_P (vloc
))
5036 oloc
= shallow_copy_rtx (oloc
);
5037 PAT_VAR_LOCATION_LOC (oloc
) = gen_rtx_UNKNOWN_VAR_LOC ();
5042 else if (type
== MO_VAL_USE
)
5044 enum machine_mode mode2
= VOIDmode
;
5045 enum micro_operation_type type2
;
5046 cselib_val
*val
= find_use_val (loc
, GET_MODE (loc
), cui
);
5047 rtx vloc
, oloc
= loc
, nloc
;
5049 gcc_assert (cui
->sets
);
5052 && !REG_P (XEXP (oloc
, 0))
5053 && !MEM_P (XEXP (oloc
, 0))
5054 && (GET_CODE (XEXP (oloc
, 0)) != PLUS
5055 || XEXP (XEXP (oloc
, 0), 0) != cfa_base_rtx
5056 || !CONST_INT_P (XEXP (XEXP (oloc
, 0), 1))))
5059 enum machine_mode address_mode
= get_address_mode (mloc
);
5061 = cselib_lookup (XEXP (mloc
, 0), address_mode
, 0);
5063 if (val
&& !cselib_preserved_value_p (val
))
5065 micro_operation moa
;
5066 preserve_value (val
);
5067 mloc
= cselib_subst_to_values (XEXP (mloc
, 0));
5068 moa
.type
= MO_VAL_USE
;
5069 moa
.insn
= cui
->insn
;
5070 moa
.u
.loc
= gen_rtx_CONCAT (address_mode
,
5071 val
->val_rtx
, mloc
);
5072 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5073 log_op_type (moa
.u
.loc
, cui
->bb
, cui
->insn
,
5074 moa
.type
, dump_file
);
5075 VEC_safe_push (micro_operation
, heap
, VTI (bb
)->mos
, &moa
);
5079 type2
= use_type (loc
, 0, &mode2
);
5081 gcc_assert (type2
== MO_USE
|| type2
== MO_USE_NO_VAR
5082 || type2
== MO_CLOBBER
);
5084 if (type2
== MO_USE
)
5085 vloc
= var_lowpart (mode2
, loc
);
5089 /* The loc of a MO_VAL_USE may have two forms:
5091 (concat val src): val is at src, a value-based
5094 (concat (concat val use) src): same as above, with use as
5095 the MO_USE tracked value, if it differs from src.
5099 nloc
= replace_expr_with_values (loc
);
5104 oloc
= gen_rtx_CONCAT (mode2
, val
->val_rtx
, vloc
);
5106 oloc
= val
->val_rtx
;
5108 mo
.u
.loc
= gen_rtx_CONCAT (mode
, oloc
, nloc
);
5110 if (type2
== MO_USE
)
5111 VAL_HOLDS_TRACK_EXPR (mo
.u
.loc
) = 1;
5112 if (!cselib_preserved_value_p (val
))
5114 VAL_NEEDS_RESOLUTION (mo
.u
.loc
) = 1;
5115 preserve_value (val
);
5119 gcc_assert (type
== MO_USE
|| type
== MO_USE_NO_VAR
);
5121 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5122 log_op_type (mo
.u
.loc
, cui
->bb
, cui
->insn
, mo
.type
, dump_file
);
5123 VEC_safe_push (micro_operation
, heap
, VTI (bb
)->mos
, &mo
);
5129 /* Helper function for finding all uses of REG/MEM in X in insn INSN. */
5132 add_uses_1 (rtx
*x
, void *cui
)
5134 for_each_rtx (x
, add_uses
, cui
);
5137 /* Attempt to reverse the EXPR operation in the debug info. Say for
5138 reg1 = reg2 + 6 even when reg2 is no longer live we
5139 can express its value as VAL - 6. */
5142 reverse_op (rtx val
, const_rtx expr
)
5148 if (GET_CODE (expr
) != SET
)
5151 if (!REG_P (SET_DEST (expr
)) || GET_MODE (val
) != GET_MODE (SET_DEST (expr
)))
5154 src
= SET_SRC (expr
);
5155 switch (GET_CODE (src
))
5169 if (!REG_P (XEXP (src
, 0)) || !SCALAR_INT_MODE_P (GET_MODE (src
)))
5172 v
= cselib_lookup (XEXP (src
, 0), GET_MODE (XEXP (src
, 0)), 0);
5173 if (!v
|| !cselib_preserved_value_p (v
))
5176 switch (GET_CODE (src
))
5180 if (GET_MODE (v
->val_rtx
) != GET_MODE (val
))
5182 ret
= gen_rtx_fmt_e (GET_CODE (src
), GET_MODE (val
), val
);
5186 ret
= gen_lowpart_SUBREG (GET_MODE (v
->val_rtx
), val
);
5198 if (GET_MODE (v
->val_rtx
) != GET_MODE (val
))
5200 arg
= XEXP (src
, 1);
5201 if (!CONST_INT_P (arg
) && GET_CODE (arg
) != SYMBOL_REF
)
5203 arg
= cselib_expand_value_rtx (arg
, scratch_regs
, 5);
5204 if (arg
== NULL_RTX
)
5206 if (!CONST_INT_P (arg
) && GET_CODE (arg
) != SYMBOL_REF
)
5209 ret
= simplify_gen_binary (code
, GET_MODE (val
), val
, arg
);
5211 /* Ensure ret isn't VALUE itself (which can happen e.g. for
5212 (plus (reg1) (reg2)) when reg2 is known to be 0), as that
5213 breaks a lot of routines during var-tracking. */
5214 ret
= gen_rtx_fmt_ee (PLUS
, GET_MODE (val
), val
, const0_rtx
);
5220 return gen_rtx_CONCAT (GET_MODE (v
->val_rtx
), v
->val_rtx
, ret
);
5223 /* Add stores (register and memory references) LOC which will be tracked
5224 to VTI (bb)->mos. EXPR is the RTL expression containing the store.
5225 CUIP->insn is instruction which the LOC is part of. */
5228 add_stores (rtx loc
, const_rtx expr
, void *cuip
)
5230 enum machine_mode mode
= VOIDmode
, mode2
;
5231 struct count_use_info
*cui
= (struct count_use_info
*)cuip
;
5232 basic_block bb
= cui
->bb
;
5234 rtx oloc
= loc
, nloc
, src
= NULL
;
5235 enum micro_operation_type type
= use_type (loc
, cui
, &mode
);
5236 bool track_p
= false;
5238 bool resolve
, preserve
;
5241 if (type
== MO_CLOBBER
)
5248 gcc_assert (loc
!= cfa_base_rtx
);
5249 if ((GET_CODE (expr
) == CLOBBER
&& type
!= MO_VAL_SET
)
5250 || !(track_p
= use_type (loc
, NULL
, &mode2
) == MO_USE
)
5251 || GET_CODE (expr
) == CLOBBER
)
5253 mo
.type
= MO_CLOBBER
;
5258 if (GET_CODE (expr
) == SET
&& SET_DEST (expr
) == loc
)
5259 src
= var_lowpart (mode2
, SET_SRC (expr
));
5260 loc
= var_lowpart (mode2
, loc
);
5269 rtx xexpr
= gen_rtx_SET (VOIDmode
, loc
, src
);
5270 if (same_variable_part_p (src
, REG_EXPR (loc
), REG_OFFSET (loc
)))
5277 mo
.insn
= cui
->insn
;
5279 else if (MEM_P (loc
)
5280 && ((track_p
= use_type (loc
, NULL
, &mode2
) == MO_USE
)
5283 if (MEM_P (loc
) && type
== MO_VAL_SET
5284 && !REG_P (XEXP (loc
, 0))
5285 && !MEM_P (XEXP (loc
, 0))
5286 && (GET_CODE (XEXP (loc
, 0)) != PLUS
5287 || XEXP (XEXP (loc
, 0), 0) != cfa_base_rtx
5288 || !CONST_INT_P (XEXP (XEXP (loc
, 0), 1))))
5291 enum machine_mode address_mode
= get_address_mode (mloc
);
5292 cselib_val
*val
= cselib_lookup (XEXP (mloc
, 0),
5295 if (val
&& !cselib_preserved_value_p (val
))
5297 preserve_value (val
);
5298 mo
.type
= MO_VAL_USE
;
5299 mloc
= cselib_subst_to_values (XEXP (mloc
, 0));
5300 mo
.u
.loc
= gen_rtx_CONCAT (address_mode
, val
->val_rtx
, mloc
);
5301 mo
.insn
= cui
->insn
;
5302 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5303 log_op_type (mo
.u
.loc
, cui
->bb
, cui
->insn
,
5304 mo
.type
, dump_file
);
5305 VEC_safe_push (micro_operation
, heap
, VTI (bb
)->mos
, &mo
);
5309 if (GET_CODE (expr
) == CLOBBER
|| !track_p
)
5311 mo
.type
= MO_CLOBBER
;
5312 mo
.u
.loc
= track_p
? var_lowpart (mode2
, loc
) : loc
;
5316 if (GET_CODE (expr
) == SET
&& SET_DEST (expr
) == loc
)
5317 src
= var_lowpart (mode2
, SET_SRC (expr
));
5318 loc
= var_lowpart (mode2
, loc
);
5327 rtx xexpr
= gen_rtx_SET (VOIDmode
, loc
, src
);
5328 if (same_variable_part_p (SET_SRC (xexpr
),
5330 INT_MEM_OFFSET (loc
)))
5337 mo
.insn
= cui
->insn
;
5342 if (type
!= MO_VAL_SET
)
5343 goto log_and_return
;
5345 v
= find_use_val (oloc
, mode
, cui
);
5348 goto log_and_return
;
5350 resolve
= preserve
= !cselib_preserved_value_p (v
);
5352 nloc
= replace_expr_with_values (oloc
);
5356 if (GET_CODE (PATTERN (cui
->insn
)) == COND_EXEC
)
5358 cselib_val
*oval
= cselib_lookup (oloc
, GET_MODE (oloc
), 0);
5360 gcc_assert (oval
!= v
);
5361 gcc_assert (REG_P (oloc
) || MEM_P (oloc
));
5363 if (!cselib_preserved_value_p (oval
))
5365 micro_operation moa
;
5367 preserve_value (oval
);
5369 moa
.type
= MO_VAL_USE
;
5370 moa
.u
.loc
= gen_rtx_CONCAT (mode
, oval
->val_rtx
, oloc
);
5371 VAL_NEEDS_RESOLUTION (moa
.u
.loc
) = 1;
5372 moa
.insn
= cui
->insn
;
5374 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5375 log_op_type (moa
.u
.loc
, cui
->bb
, cui
->insn
,
5376 moa
.type
, dump_file
);
5377 VEC_safe_push (micro_operation
, heap
, VTI (bb
)->mos
, &moa
);
5382 else if (resolve
&& GET_CODE (mo
.u
.loc
) == SET
)
5384 nloc
= replace_expr_with_values (SET_SRC (expr
));
5386 /* Avoid the mode mismatch between oexpr and expr. */
5387 if (!nloc
&& mode
!= mode2
)
5389 nloc
= SET_SRC (expr
);
5390 gcc_assert (oloc
== SET_DEST (expr
));
5394 oloc
= gen_rtx_SET (GET_MODE (mo
.u
.loc
), oloc
, nloc
);
5397 if (oloc
== SET_DEST (mo
.u
.loc
))
5398 /* No point in duplicating. */
5400 if (!REG_P (SET_SRC (mo
.u
.loc
)))
5406 if (GET_CODE (mo
.u
.loc
) == SET
5407 && oloc
== SET_DEST (mo
.u
.loc
))
5408 /* No point in duplicating. */
5414 loc
= gen_rtx_CONCAT (mode
, v
->val_rtx
, oloc
);
5416 if (mo
.u
.loc
!= oloc
)
5417 loc
= gen_rtx_CONCAT (GET_MODE (mo
.u
.loc
), loc
, mo
.u
.loc
);
5419 /* The loc of a MO_VAL_SET may have various forms:
5421 (concat val dst): dst now holds val
5423 (concat val (set dst src)): dst now holds val, copied from src
5425 (concat (concat val dstv) dst): dst now holds val; dstv is dst
5426 after replacing mems and non-top-level regs with values.
5428 (concat (concat val dstv) (set dst src)): dst now holds val,
5429 copied from src. dstv is a value-based representation of dst, if
5430 it differs from dst. If resolution is needed, src is a REG, and
5431 its mode is the same as that of val.
5433 (concat (concat val (set dstv srcv)) (set dst src)): src
5434 copied to dst, holding val. dstv and srcv are value-based
5435 representations of dst and src, respectively.
5439 if (GET_CODE (PATTERN (cui
->insn
)) != COND_EXEC
)
5441 reverse
= reverse_op (v
->val_rtx
, expr
);
5444 loc
= gen_rtx_CONCAT (GET_MODE (mo
.u
.loc
), loc
, reverse
);
5445 VAL_EXPR_HAS_REVERSE (loc
) = 1;
5452 VAL_HOLDS_TRACK_EXPR (loc
) = 1;
5455 VAL_NEEDS_RESOLUTION (loc
) = resolve
;
5458 if (mo
.type
== MO_CLOBBER
)
5459 VAL_EXPR_IS_CLOBBERED (loc
) = 1;
5460 if (mo
.type
== MO_COPY
)
5461 VAL_EXPR_IS_COPIED (loc
) = 1;
5463 mo
.type
= MO_VAL_SET
;
5466 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5467 log_op_type (mo
.u
.loc
, cui
->bb
, cui
->insn
, mo
.type
, dump_file
);
5468 VEC_safe_push (micro_operation
, heap
, VTI (bb
)->mos
, &mo
);
5471 /* Callback for cselib_record_sets_hook, that records as micro
5472 operations uses and stores in an insn after cselib_record_sets has
5473 analyzed the sets in an insn, but before it modifies the stored
5474 values in the internal tables, unless cselib_record_sets doesn't
5475 call it directly (perhaps because we're not doing cselib in the
5476 first place, in which case sets and n_sets will be 0). */
5479 add_with_sets (rtx insn
, struct cselib_set
*sets
, int n_sets
)
5481 basic_block bb
= BLOCK_FOR_INSN (insn
);
5483 struct count_use_info cui
;
5484 micro_operation
*mos
;
5486 cselib_hook_called
= true;
5491 cui
.n_sets
= n_sets
;
5493 n1
= VEC_length (micro_operation
, VTI (bb
)->mos
);
5494 cui
.store_p
= false;
5495 note_uses (&PATTERN (insn
), add_uses_1
, &cui
);
5496 n2
= VEC_length (micro_operation
, VTI (bb
)->mos
) - 1;
5497 mos
= VEC_address (micro_operation
, VTI (bb
)->mos
);
5499 /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
5503 while (n1
< n2
&& mos
[n1
].type
== MO_USE
)
5505 while (n1
< n2
&& mos
[n2
].type
!= MO_USE
)
5517 n2
= VEC_length (micro_operation
, VTI (bb
)->mos
) - 1;
5520 while (n1
< n2
&& mos
[n1
].type
!= MO_VAL_LOC
)
5522 while (n1
< n2
&& mos
[n2
].type
== MO_VAL_LOC
)
5540 mo
.u
.loc
= NULL_RTX
;
5542 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5543 log_op_type (PATTERN (insn
), bb
, insn
, mo
.type
, dump_file
);
5544 VEC_safe_push (micro_operation
, heap
, VTI (bb
)->mos
, &mo
);
5547 n1
= VEC_length (micro_operation
, VTI (bb
)->mos
);
5548 /* This will record NEXT_INSN (insn), such that we can
5549 insert notes before it without worrying about any
5550 notes that MO_USEs might emit after the insn. */
5552 note_stores (PATTERN (insn
), add_stores
, &cui
);
5553 n2
= VEC_length (micro_operation
, VTI (bb
)->mos
) - 1;
5554 mos
= VEC_address (micro_operation
, VTI (bb
)->mos
);
5556 /* Order the MO_VAL_USEs first (note_stores does nothing
5557 on DEBUG_INSNs, so there are no MO_VAL_LOCs from this
5558 insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET. */
5561 while (n1
< n2
&& mos
[n1
].type
== MO_VAL_USE
)
5563 while (n1
< n2
&& mos
[n2
].type
!= MO_VAL_USE
)
5575 n2
= VEC_length (micro_operation
, VTI (bb
)->mos
) - 1;
5578 while (n1
< n2
&& mos
[n1
].type
== MO_CLOBBER
)
5580 while (n1
< n2
&& mos
[n2
].type
!= MO_CLOBBER
)
5593 static enum var_init_status
5594 find_src_status (dataflow_set
*in
, rtx src
)
5596 tree decl
= NULL_TREE
;
5597 enum var_init_status status
= VAR_INIT_STATUS_UNINITIALIZED
;
5599 if (! flag_var_tracking_uninit
)
5600 status
= VAR_INIT_STATUS_INITIALIZED
;
5602 if (src
&& REG_P (src
))
5603 decl
= var_debug_decl (REG_EXPR (src
));
5604 else if (src
&& MEM_P (src
))
5605 decl
= var_debug_decl (MEM_EXPR (src
));
5608 status
= get_init_value (in
, src
, dv_from_decl (decl
));
5613 /* SRC is the source of an assignment. Use SET to try to find what
5614 was ultimately assigned to SRC. Return that value if known,
5615 otherwise return SRC itself. */
5618 find_src_set_src (dataflow_set
*set
, rtx src
)
5620 tree decl
= NULL_TREE
; /* The variable being copied around. */
5621 rtx set_src
= NULL_RTX
; /* The value for "decl" stored in "src". */
5623 location_chain nextp
;
5627 if (src
&& REG_P (src
))
5628 decl
= var_debug_decl (REG_EXPR (src
));
5629 else if (src
&& MEM_P (src
))
5630 decl
= var_debug_decl (MEM_EXPR (src
));
5634 decl_or_value dv
= dv_from_decl (decl
);
5636 var
= shared_hash_find (set
->vars
, dv
);
5640 for (i
= 0; i
< var
->n_var_parts
&& !found
; i
++)
5641 for (nextp
= var
->var_part
[i
].loc_chain
; nextp
&& !found
;
5642 nextp
= nextp
->next
)
5643 if (rtx_equal_p (nextp
->loc
, src
))
5645 set_src
= nextp
->set_src
;
5655 /* Compute the changes of variable locations in the basic block BB. */
5658 compute_bb_dataflow (basic_block bb
)
5661 micro_operation
*mo
;
5663 dataflow_set old_out
;
5664 dataflow_set
*in
= &VTI (bb
)->in
;
5665 dataflow_set
*out
= &VTI (bb
)->out
;
5667 dataflow_set_init (&old_out
);
5668 dataflow_set_copy (&old_out
, out
);
5669 dataflow_set_copy (out
, in
);
5671 for (i
= 0; VEC_iterate (micro_operation
, VTI (bb
)->mos
, i
, mo
); i
++)
5673 rtx insn
= mo
->insn
;
5678 dataflow_set_clear_at_call (out
);
5683 rtx loc
= mo
->u
.loc
;
5686 var_reg_set (out
, loc
, VAR_INIT_STATUS_UNINITIALIZED
, NULL
);
5687 else if (MEM_P (loc
))
5688 var_mem_set (out
, loc
, VAR_INIT_STATUS_UNINITIALIZED
, NULL
);
5694 rtx loc
= mo
->u
.loc
;
5698 if (GET_CODE (loc
) == CONCAT
)
5700 val
= XEXP (loc
, 0);
5701 vloc
= XEXP (loc
, 1);
5709 var
= PAT_VAR_LOCATION_DECL (vloc
);
5711 clobber_variable_part (out
, NULL_RTX
,
5712 dv_from_decl (var
), 0, NULL_RTX
);
5715 if (VAL_NEEDS_RESOLUTION (loc
))
5716 val_resolve (out
, val
, PAT_VAR_LOCATION_LOC (vloc
), insn
);
5717 set_variable_part (out
, val
, dv_from_decl (var
), 0,
5718 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
,
5721 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc
)))
5722 set_variable_part (out
, PAT_VAR_LOCATION_LOC (vloc
),
5723 dv_from_decl (var
), 0,
5724 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
,
5731 rtx loc
= mo
->u
.loc
;
5732 rtx val
, vloc
, uloc
;
5734 vloc
= uloc
= XEXP (loc
, 1);
5735 val
= XEXP (loc
, 0);
5737 if (GET_CODE (val
) == CONCAT
)
5739 uloc
= XEXP (val
, 1);
5740 val
= XEXP (val
, 0);
5743 if (VAL_NEEDS_RESOLUTION (loc
))
5744 val_resolve (out
, val
, vloc
, insn
);
5746 val_store (out
, val
, uloc
, insn
, false);
5748 if (VAL_HOLDS_TRACK_EXPR (loc
))
5750 if (GET_CODE (uloc
) == REG
)
5751 var_reg_set (out
, uloc
, VAR_INIT_STATUS_UNINITIALIZED
,
5753 else if (GET_CODE (uloc
) == MEM
)
5754 var_mem_set (out
, uloc
, VAR_INIT_STATUS_UNINITIALIZED
,
5762 rtx loc
= mo
->u
.loc
;
5763 rtx val
, vloc
, uloc
, reverse
= NULL_RTX
;
5766 if (VAL_EXPR_HAS_REVERSE (loc
))
5768 reverse
= XEXP (loc
, 1);
5769 vloc
= XEXP (loc
, 0);
5771 uloc
= XEXP (vloc
, 1);
5772 val
= XEXP (vloc
, 0);
5775 if (GET_CODE (val
) == CONCAT
)
5777 vloc
= XEXP (val
, 1);
5778 val
= XEXP (val
, 0);
5781 if (GET_CODE (vloc
) == SET
)
5783 rtx vsrc
= SET_SRC (vloc
);
5785 gcc_assert (val
!= vsrc
);
5786 gcc_assert (vloc
== uloc
|| VAL_NEEDS_RESOLUTION (loc
));
5788 vloc
= SET_DEST (vloc
);
5790 if (VAL_NEEDS_RESOLUTION (loc
))
5791 val_resolve (out
, val
, vsrc
, insn
);
5793 else if (VAL_NEEDS_RESOLUTION (loc
))
5795 gcc_assert (GET_CODE (uloc
) == SET
5796 && GET_CODE (SET_SRC (uloc
)) == REG
);
5797 val_resolve (out
, val
, SET_SRC (uloc
), insn
);
5800 if (VAL_HOLDS_TRACK_EXPR (loc
))
5802 if (VAL_EXPR_IS_CLOBBERED (loc
))
5805 var_reg_delete (out
, uloc
, true);
5806 else if (MEM_P (uloc
))
5807 var_mem_delete (out
, uloc
, true);
5811 bool copied_p
= VAL_EXPR_IS_COPIED (loc
);
5813 enum var_init_status status
= VAR_INIT_STATUS_INITIALIZED
;
5815 if (GET_CODE (uloc
) == SET
)
5817 set_src
= SET_SRC (uloc
);
5818 uloc
= SET_DEST (uloc
);
5823 if (flag_var_tracking_uninit
)
5825 status
= find_src_status (in
, set_src
);
5827 if (status
== VAR_INIT_STATUS_UNKNOWN
)
5828 status
= find_src_status (out
, set_src
);
5831 set_src
= find_src_set_src (in
, set_src
);
5835 var_reg_delete_and_set (out
, uloc
, !copied_p
,
5837 else if (MEM_P (uloc
))
5838 var_mem_delete_and_set (out
, uloc
, !copied_p
,
5842 else if (REG_P (uloc
))
5843 var_regno_delete (out
, REGNO (uloc
));
5845 val_store (out
, val
, vloc
, insn
, true);
5848 val_store (out
, XEXP (reverse
, 0), XEXP (reverse
, 1),
5855 rtx loc
= mo
->u
.loc
;
5858 if (GET_CODE (loc
) == SET
)
5860 set_src
= SET_SRC (loc
);
5861 loc
= SET_DEST (loc
);
5865 var_reg_delete_and_set (out
, loc
, true, VAR_INIT_STATUS_INITIALIZED
,
5867 else if (MEM_P (loc
))
5868 var_mem_delete_and_set (out
, loc
, true, VAR_INIT_STATUS_INITIALIZED
,
5875 rtx loc
= mo
->u
.loc
;
5876 enum var_init_status src_status
;
5879 if (GET_CODE (loc
) == SET
)
5881 set_src
= SET_SRC (loc
);
5882 loc
= SET_DEST (loc
);
5885 if (! flag_var_tracking_uninit
)
5886 src_status
= VAR_INIT_STATUS_INITIALIZED
;
5889 src_status
= find_src_status (in
, set_src
);
5891 if (src_status
== VAR_INIT_STATUS_UNKNOWN
)
5892 src_status
= find_src_status (out
, set_src
);
5895 set_src
= find_src_set_src (in
, set_src
);
5898 var_reg_delete_and_set (out
, loc
, false, src_status
, set_src
);
5899 else if (MEM_P (loc
))
5900 var_mem_delete_and_set (out
, loc
, false, src_status
, set_src
);
5906 rtx loc
= mo
->u
.loc
;
5909 var_reg_delete (out
, loc
, false);
5910 else if (MEM_P (loc
))
5911 var_mem_delete (out
, loc
, false);
5917 rtx loc
= mo
->u
.loc
;
5920 var_reg_delete (out
, loc
, true);
5921 else if (MEM_P (loc
))
5922 var_mem_delete (out
, loc
, true);
5927 out
->stack_adjust
+= mo
->u
.adjust
;
5932 if (MAY_HAVE_DEBUG_INSNS
)
5934 dataflow_set_equiv_regs (out
);
5935 htab_traverse (shared_hash_htab (out
->vars
), canonicalize_values_mark
,
5937 htab_traverse (shared_hash_htab (out
->vars
), canonicalize_values_star
,
5940 htab_traverse (shared_hash_htab (out
->vars
),
5941 canonicalize_loc_order_check
, out
);
5944 changed
= dataflow_set_different (&old_out
, out
);
5945 dataflow_set_destroy (&old_out
);
5949 /* Find the locations of variables in the whole function. */
5952 vt_find_locations (void)
5954 fibheap_t worklist
, pending
, fibheap_swap
;
5955 sbitmap visited
, in_worklist
, in_pending
, sbitmap_swap
;
5962 int htabmax
= PARAM_VALUE (PARAM_MAX_VARTRACK_SIZE
);
5963 bool success
= true;
5965 /* Compute reverse completion order of depth first search of the CFG
5966 so that the data-flow runs faster. */
5967 rc_order
= XNEWVEC (int, n_basic_blocks
- NUM_FIXED_BLOCKS
);
5968 bb_order
= XNEWVEC (int, last_basic_block
);
5969 pre_and_rev_post_order_compute (NULL
, rc_order
, false);
5970 for (i
= 0; i
< n_basic_blocks
- NUM_FIXED_BLOCKS
; i
++)
5971 bb_order
[rc_order
[i
]] = i
;
5974 worklist
= fibheap_new ();
5975 pending
= fibheap_new ();
5976 visited
= sbitmap_alloc (last_basic_block
);
5977 in_worklist
= sbitmap_alloc (last_basic_block
);
5978 in_pending
= sbitmap_alloc (last_basic_block
);
5979 sbitmap_zero (in_worklist
);
5982 fibheap_insert (pending
, bb_order
[bb
->index
], bb
);
5983 sbitmap_ones (in_pending
);
5985 while (success
&& !fibheap_empty (pending
))
5987 fibheap_swap
= pending
;
5989 worklist
= fibheap_swap
;
5990 sbitmap_swap
= in_pending
;
5991 in_pending
= in_worklist
;
5992 in_worklist
= sbitmap_swap
;
5994 sbitmap_zero (visited
);
5996 while (!fibheap_empty (worklist
))
5998 bb
= (basic_block
) fibheap_extract_min (worklist
);
5999 RESET_BIT (in_worklist
, bb
->index
);
6000 if (!TEST_BIT (visited
, bb
->index
))
6004 int oldinsz
, oldoutsz
;
6006 SET_BIT (visited
, bb
->index
);
6008 if (VTI (bb
)->in
.vars
)
6011 -= (htab_size (shared_hash_htab (VTI (bb
)->in
.vars
))
6012 + htab_size (shared_hash_htab (VTI (bb
)->out
.vars
)));
6014 = htab_elements (shared_hash_htab (VTI (bb
)->in
.vars
));
6016 = htab_elements (shared_hash_htab (VTI (bb
)->out
.vars
));
6019 oldinsz
= oldoutsz
= 0;
6021 if (MAY_HAVE_DEBUG_INSNS
)
6023 dataflow_set
*in
= &VTI (bb
)->in
, *first_out
= NULL
;
6024 bool first
= true, adjust
= false;
6026 /* Calculate the IN set as the intersection of
6027 predecessor OUT sets. */
6029 dataflow_set_clear (in
);
6030 dst_can_be_shared
= true;
6032 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
6033 if (!VTI (e
->src
)->flooded
)
6034 gcc_assert (bb_order
[bb
->index
]
6035 <= bb_order
[e
->src
->index
]);
6038 dataflow_set_copy (in
, &VTI (e
->src
)->out
);
6039 first_out
= &VTI (e
->src
)->out
;
6044 dataflow_set_merge (in
, &VTI (e
->src
)->out
);
6050 dataflow_post_merge_adjust (in
, &VTI (bb
)->permp
);
6052 /* Merge and merge_adjust should keep entries in
6054 htab_traverse (shared_hash_htab (in
->vars
),
6055 canonicalize_loc_order_check
,
6058 if (dst_can_be_shared
)
6060 shared_hash_destroy (in
->vars
);
6061 in
->vars
= shared_hash_copy (first_out
->vars
);
6065 VTI (bb
)->flooded
= true;
6069 /* Calculate the IN set as union of predecessor OUT sets. */
6070 dataflow_set_clear (&VTI (bb
)->in
);
6071 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
6072 dataflow_set_union (&VTI (bb
)->in
, &VTI (e
->src
)->out
);
6075 changed
= compute_bb_dataflow (bb
);
6076 htabsz
+= (htab_size (shared_hash_htab (VTI (bb
)->in
.vars
))
6077 + htab_size (shared_hash_htab (VTI (bb
)->out
.vars
)));
6079 if (htabmax
&& htabsz
> htabmax
)
6081 if (MAY_HAVE_DEBUG_INSNS
)
6082 inform (DECL_SOURCE_LOCATION (cfun
->decl
),
6083 "variable tracking size limit exceeded with "
6084 "-fvar-tracking-assignments, retrying without");
6086 inform (DECL_SOURCE_LOCATION (cfun
->decl
),
6087 "variable tracking size limit exceeded");
6094 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6096 if (e
->dest
== EXIT_BLOCK_PTR
)
6099 if (TEST_BIT (visited
, e
->dest
->index
))
6101 if (!TEST_BIT (in_pending
, e
->dest
->index
))
6103 /* Send E->DEST to next round. */
6104 SET_BIT (in_pending
, e
->dest
->index
);
6105 fibheap_insert (pending
,
6106 bb_order
[e
->dest
->index
],
6110 else if (!TEST_BIT (in_worklist
, e
->dest
->index
))
6112 /* Add E->DEST to current round. */
6113 SET_BIT (in_worklist
, e
->dest
->index
);
6114 fibheap_insert (worklist
, bb_order
[e
->dest
->index
],
6122 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
6124 (int)htab_elements (shared_hash_htab (VTI (bb
)->in
.vars
)),
6126 (int)htab_elements (shared_hash_htab (VTI (bb
)->out
.vars
)),
6128 (int)worklist
->nodes
, (int)pending
->nodes
, htabsz
);
6130 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6132 fprintf (dump_file
, "BB %i IN:\n", bb
->index
);
6133 dump_dataflow_set (&VTI (bb
)->in
);
6134 fprintf (dump_file
, "BB %i OUT:\n", bb
->index
);
6135 dump_dataflow_set (&VTI (bb
)->out
);
6141 if (success
&& MAY_HAVE_DEBUG_INSNS
)
6143 gcc_assert (VTI (bb
)->flooded
);
6146 fibheap_delete (worklist
);
6147 fibheap_delete (pending
);
6148 sbitmap_free (visited
);
6149 sbitmap_free (in_worklist
);
6150 sbitmap_free (in_pending
);
6155 /* Print the content of the LIST to dump file. */
6158 dump_attrs_list (attrs list
)
6160 for (; list
; list
= list
->next
)
6162 if (dv_is_decl_p (list
->dv
))
6163 print_mem_expr (dump_file
, dv_as_decl (list
->dv
));
6165 print_rtl_single (dump_file
, dv_as_value (list
->dv
));
6166 fprintf (dump_file
, "+" HOST_WIDE_INT_PRINT_DEC
, list
->offset
);
6168 fprintf (dump_file
, "\n");
6171 /* Print the information about variable *SLOT to dump file. */
6174 dump_var_slot (void **slot
, void *data ATTRIBUTE_UNUSED
)
6176 variable var
= (variable
) *slot
;
6180 /* Continue traversing the hash table. */
6184 /* Print the information about variable VAR to dump file. */
6187 dump_var (variable var
)
6190 location_chain node
;
6192 if (dv_is_decl_p (var
->dv
))
6194 const_tree decl
= dv_as_decl (var
->dv
);
6196 if (DECL_NAME (decl
))
6198 fprintf (dump_file
, " name: %s",
6199 IDENTIFIER_POINTER (DECL_NAME (decl
)));
6200 if (dump_flags
& TDF_UID
)
6201 fprintf (dump_file
, "D.%u", DECL_UID (decl
));
6203 else if (TREE_CODE (decl
) == DEBUG_EXPR_DECL
)
6204 fprintf (dump_file
, " name: D#%u", DEBUG_TEMP_UID (decl
));
6206 fprintf (dump_file
, " name: D.%u", DECL_UID (decl
));
6207 fprintf (dump_file
, "\n");
6211 fputc (' ', dump_file
);
6212 print_rtl_single (dump_file
, dv_as_value (var
->dv
));
6215 for (i
= 0; i
< var
->n_var_parts
; i
++)
6217 fprintf (dump_file
, " offset %ld\n",
6218 (long) var
->var_part
[i
].offset
);
6219 for (node
= var
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
6221 fprintf (dump_file
, " ");
6222 if (node
->init
== VAR_INIT_STATUS_UNINITIALIZED
)
6223 fprintf (dump_file
, "[uninit]");
6224 print_rtl_single (dump_file
, node
->loc
);
6229 /* Print the information about variables from hash table VARS to dump file. */
6232 dump_vars (htab_t vars
)
6234 if (htab_elements (vars
) > 0)
6236 fprintf (dump_file
, "Variables:\n");
6237 htab_traverse (vars
, dump_var_slot
, NULL
);
6241 /* Print the dataflow set SET to dump file. */
6244 dump_dataflow_set (dataflow_set
*set
)
6248 fprintf (dump_file
, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC
"\n",
6250 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
6254 fprintf (dump_file
, "Reg %d:", i
);
6255 dump_attrs_list (set
->regs
[i
]);
6258 dump_vars (shared_hash_htab (set
->vars
));
6259 fprintf (dump_file
, "\n");
6262 /* Print the IN and OUT sets for each basic block to dump file. */
6265 dump_dataflow_sets (void)
6271 fprintf (dump_file
, "\nBasic block %d:\n", bb
->index
);
6272 fprintf (dump_file
, "IN:\n");
6273 dump_dataflow_set (&VTI (bb
)->in
);
6274 fprintf (dump_file
, "OUT:\n");
6275 dump_dataflow_set (&VTI (bb
)->out
);
6279 /* Add variable VAR to the hash table of changed variables and
6280 if it has no locations delete it from SET's hash table. */
6283 variable_was_changed (variable var
, dataflow_set
*set
)
6285 hashval_t hash
= dv_htab_hash (var
->dv
);
6290 bool old_cur_loc_changed
= false;
6292 /* Remember this decl or VALUE has been added to changed_variables. */
6293 set_dv_changed (var
->dv
, true);
6295 slot
= htab_find_slot_with_hash (changed_variables
,
6301 variable old_var
= (variable
) *slot
;
6302 gcc_assert (old_var
->in_changed_variables
);
6303 old_var
->in_changed_variables
= false;
6304 old_cur_loc_changed
= old_var
->cur_loc_changed
;
6305 variable_htab_free (*slot
);
6307 if (set
&& var
->n_var_parts
== 0)
6311 empty_var
= (variable
) pool_alloc (dv_pool (var
->dv
));
6312 empty_var
->dv
= var
->dv
;
6313 empty_var
->refcount
= 1;
6314 empty_var
->n_var_parts
= 0;
6315 empty_var
->cur_loc_changed
= true;
6316 empty_var
->in_changed_variables
= true;
6323 var
->in_changed_variables
= true;
6324 /* If within processing one uop a variable is deleted
6325 and then readded, we need to assume it has changed. */
6326 if (old_cur_loc_changed
)
6327 var
->cur_loc_changed
= true;
6334 if (var
->n_var_parts
== 0)
6339 slot
= shared_hash_find_slot_noinsert (set
->vars
, var
->dv
);
6342 if (shared_hash_shared (set
->vars
))
6343 slot
= shared_hash_find_slot_unshare (&set
->vars
, var
->dv
,
6345 htab_clear_slot (shared_hash_htab (set
->vars
), slot
);
6351 /* Look for the index in VAR->var_part corresponding to OFFSET.
6352 Return -1 if not found. If INSERTION_POINT is non-NULL, the
6353 referenced int will be set to the index that the part has or should
6354 have, if it should be inserted. */
6357 find_variable_location_part (variable var
, HOST_WIDE_INT offset
,
6358 int *insertion_point
)
6362 /* Find the location part. */
6364 high
= var
->n_var_parts
;
6367 pos
= (low
+ high
) / 2;
6368 if (var
->var_part
[pos
].offset
< offset
)
6375 if (insertion_point
)
6376 *insertion_point
= pos
;
6378 if (pos
< var
->n_var_parts
&& var
->var_part
[pos
].offset
== offset
)
6385 set_slot_part (dataflow_set
*set
, rtx loc
, void **slot
,
6386 decl_or_value dv
, HOST_WIDE_INT offset
,
6387 enum var_init_status initialized
, rtx set_src
)
6390 location_chain node
, next
;
6391 location_chain
*nextp
;
6393 bool onepart
= dv_onepart_p (dv
);
6395 gcc_assert (offset
== 0 || !onepart
);
6396 gcc_assert (loc
!= dv_as_opaque (dv
));
6398 var
= (variable
) *slot
;
6400 if (! flag_var_tracking_uninit
)
6401 initialized
= VAR_INIT_STATUS_INITIALIZED
;
6405 /* Create new variable information. */
6406 var
= (variable
) pool_alloc (dv_pool (dv
));
6409 var
->n_var_parts
= 1;
6410 var
->cur_loc_changed
= false;
6411 var
->in_changed_variables
= false;
6412 var
->var_part
[0].offset
= offset
;
6413 var
->var_part
[0].loc_chain
= NULL
;
6414 var
->var_part
[0].cur_loc
= NULL
;
6417 nextp
= &var
->var_part
[0].loc_chain
;
6423 gcc_assert (dv_as_opaque (var
->dv
) == dv_as_opaque (dv
));
6427 if (GET_CODE (loc
) == VALUE
)
6429 for (nextp
= &var
->var_part
[0].loc_chain
; (node
= *nextp
);
6430 nextp
= &node
->next
)
6431 if (GET_CODE (node
->loc
) == VALUE
)
6433 if (node
->loc
== loc
)
6438 if (canon_value_cmp (node
->loc
, loc
))
6446 else if (REG_P (node
->loc
) || MEM_P (node
->loc
))
6454 else if (REG_P (loc
))
6456 for (nextp
= &var
->var_part
[0].loc_chain
; (node
= *nextp
);
6457 nextp
= &node
->next
)
6458 if (REG_P (node
->loc
))
6460 if (REGNO (node
->loc
) < REGNO (loc
))
6464 if (REGNO (node
->loc
) == REGNO (loc
))
6477 else if (MEM_P (loc
))
6479 for (nextp
= &var
->var_part
[0].loc_chain
; (node
= *nextp
);
6480 nextp
= &node
->next
)
6481 if (REG_P (node
->loc
))
6483 else if (MEM_P (node
->loc
))
6485 if ((r
= loc_cmp (XEXP (node
->loc
, 0), XEXP (loc
, 0))) >= 0)
6497 for (nextp
= &var
->var_part
[0].loc_chain
; (node
= *nextp
);
6498 nextp
= &node
->next
)
6499 if ((r
= loc_cmp (node
->loc
, loc
)) >= 0)
6507 if (shared_var_p (var
, set
->vars
))
6509 slot
= unshare_variable (set
, slot
, var
, initialized
);
6510 var
= (variable
)*slot
;
6511 for (nextp
= &var
->var_part
[0].loc_chain
; c
;
6512 nextp
= &(*nextp
)->next
)
6514 gcc_assert ((!node
&& !*nextp
) || node
->loc
== (*nextp
)->loc
);
6521 gcc_assert (dv_as_decl (var
->dv
) == dv_as_decl (dv
));
6523 pos
= find_variable_location_part (var
, offset
, &inspos
);
6527 node
= var
->var_part
[pos
].loc_chain
;
6530 && ((REG_P (node
->loc
) && REG_P (loc
)
6531 && REGNO (node
->loc
) == REGNO (loc
))
6532 || rtx_equal_p (node
->loc
, loc
)))
6534 /* LOC is in the beginning of the chain so we have nothing
6536 if (node
->init
< initialized
)
6537 node
->init
= initialized
;
6538 if (set_src
!= NULL
)
6539 node
->set_src
= set_src
;
6545 /* We have to make a copy of a shared variable. */
6546 if (shared_var_p (var
, set
->vars
))
6548 slot
= unshare_variable (set
, slot
, var
, initialized
);
6549 var
= (variable
)*slot
;
6555 /* We have not found the location part, new one will be created. */
6557 /* We have to make a copy of the shared variable. */
6558 if (shared_var_p (var
, set
->vars
))
6560 slot
= unshare_variable (set
, slot
, var
, initialized
);
6561 var
= (variable
)*slot
;
6564 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
6565 thus there are at most MAX_VAR_PARTS different offsets. */
6566 gcc_assert (var
->n_var_parts
< MAX_VAR_PARTS
6567 && (!var
->n_var_parts
|| !dv_onepart_p (var
->dv
)));
6569 /* We have to move the elements of array starting at index
6570 inspos to the next position. */
6571 for (pos
= var
->n_var_parts
; pos
> inspos
; pos
--)
6572 var
->var_part
[pos
] = var
->var_part
[pos
- 1];
6575 var
->var_part
[pos
].offset
= offset
;
6576 var
->var_part
[pos
].loc_chain
= NULL
;
6577 var
->var_part
[pos
].cur_loc
= NULL
;
6580 /* Delete the location from the list. */
6581 nextp
= &var
->var_part
[pos
].loc_chain
;
6582 for (node
= var
->var_part
[pos
].loc_chain
; node
; node
= next
)
6585 if ((REG_P (node
->loc
) && REG_P (loc
)
6586 && REGNO (node
->loc
) == REGNO (loc
))
6587 || rtx_equal_p (node
->loc
, loc
))
6589 /* Save these values, to assign to the new node, before
6590 deleting this one. */
6591 if (node
->init
> initialized
)
6592 initialized
= node
->init
;
6593 if (node
->set_src
!= NULL
&& set_src
== NULL
)
6594 set_src
= node
->set_src
;
6595 if (var
->var_part
[pos
].cur_loc
== node
->loc
)
6597 var
->var_part
[pos
].cur_loc
= NULL
;
6598 var
->cur_loc_changed
= true;
6600 pool_free (loc_chain_pool
, node
);
6605 nextp
= &node
->next
;
6608 nextp
= &var
->var_part
[pos
].loc_chain
;
6611 /* Add the location to the beginning. */
6612 node
= (location_chain
) pool_alloc (loc_chain_pool
);
6614 node
->init
= initialized
;
6615 node
->set_src
= set_src
;
6616 node
->next
= *nextp
;
6619 if (onepart
&& emit_notes
)
6620 add_value_chains (var
->dv
, loc
);
6622 /* If no location was emitted do so. */
6623 if (var
->var_part
[pos
].cur_loc
== NULL
)
6624 variable_was_changed (var
, set
);
6629 /* Set the part of variable's location in the dataflow set SET. The
6630 variable part is specified by variable's declaration in DV and
6631 offset OFFSET and the part's location by LOC. IOPT should be
6632 NO_INSERT if the variable is known to be in SET already and the
6633 variable hash table must not be resized, and INSERT otherwise. */
6636 set_variable_part (dataflow_set
*set
, rtx loc
,
6637 decl_or_value dv
, HOST_WIDE_INT offset
,
6638 enum var_init_status initialized
, rtx set_src
,
6639 enum insert_option iopt
)
6643 if (iopt
== NO_INSERT
)
6644 slot
= shared_hash_find_slot_noinsert (set
->vars
, dv
);
6647 slot
= shared_hash_find_slot (set
->vars
, dv
);
6649 slot
= shared_hash_find_slot_unshare (&set
->vars
, dv
, iopt
);
6651 slot
= set_slot_part (set
, loc
, slot
, dv
, offset
, initialized
, set_src
);
6654 /* Remove all recorded register locations for the given variable part
6655 from dataflow set SET, except for those that are identical to loc.
6656 The variable part is specified by variable's declaration or value
6657 DV and offset OFFSET. */
6660 clobber_slot_part (dataflow_set
*set
, rtx loc
, void **slot
,
6661 HOST_WIDE_INT offset
, rtx set_src
)
6663 variable var
= (variable
) *slot
;
6664 int pos
= find_variable_location_part (var
, offset
, NULL
);
6668 location_chain node
, next
;
6670 /* Remove the register locations from the dataflow set. */
6671 next
= var
->var_part
[pos
].loc_chain
;
6672 for (node
= next
; node
; node
= next
)
6675 if (node
->loc
!= loc
6676 && (!flag_var_tracking_uninit
6679 || !rtx_equal_p (set_src
, node
->set_src
)))
6681 if (REG_P (node
->loc
))
6686 /* Remove the variable part from the register's
6687 list, but preserve any other variable parts
6688 that might be regarded as live in that same
6690 anextp
= &set
->regs
[REGNO (node
->loc
)];
6691 for (anode
= *anextp
; anode
; anode
= anext
)
6693 anext
= anode
->next
;
6694 if (dv_as_opaque (anode
->dv
) == dv_as_opaque (var
->dv
)
6695 && anode
->offset
== offset
)
6697 pool_free (attrs_pool
, anode
);
6701 anextp
= &anode
->next
;
6705 slot
= delete_slot_part (set
, node
->loc
, slot
, offset
);
6713 /* Remove all recorded register locations for the given variable part
6714 from dataflow set SET, except for those that are identical to loc.
6715 The variable part is specified by variable's declaration or value
6716 DV and offset OFFSET. */
6719 clobber_variable_part (dataflow_set
*set
, rtx loc
, decl_or_value dv
,
6720 HOST_WIDE_INT offset
, rtx set_src
)
6724 if (!dv_as_opaque (dv
)
6725 || (!dv_is_value_p (dv
) && ! DECL_P (dv_as_decl (dv
))))
6728 slot
= shared_hash_find_slot_noinsert (set
->vars
, dv
);
6732 slot
= clobber_slot_part (set
, loc
, slot
, offset
, set_src
);
6735 /* Delete the part of variable's location from dataflow set SET. The
6736 variable part is specified by its SET->vars slot SLOT and offset
6737 OFFSET and the part's location by LOC. */
6740 delete_slot_part (dataflow_set
*set
, rtx loc
, void **slot
,
6741 HOST_WIDE_INT offset
)
6743 variable var
= (variable
) *slot
;
6744 int pos
= find_variable_location_part (var
, offset
, NULL
);
6748 location_chain node
, next
;
6749 location_chain
*nextp
;
6752 if (shared_var_p (var
, set
->vars
))
6754 /* If the variable contains the location part we have to
6755 make a copy of the variable. */
6756 for (node
= var
->var_part
[pos
].loc_chain
; node
;
6759 if ((REG_P (node
->loc
) && REG_P (loc
)
6760 && REGNO (node
->loc
) == REGNO (loc
))
6761 || rtx_equal_p (node
->loc
, loc
))
6763 slot
= unshare_variable (set
, slot
, var
,
6764 VAR_INIT_STATUS_UNKNOWN
);
6765 var
= (variable
)*slot
;
6771 /* Delete the location part. */
6773 nextp
= &var
->var_part
[pos
].loc_chain
;
6774 for (node
= *nextp
; node
; node
= next
)
6777 if ((REG_P (node
->loc
) && REG_P (loc
)
6778 && REGNO (node
->loc
) == REGNO (loc
))
6779 || rtx_equal_p (node
->loc
, loc
))
6781 if (emit_notes
&& pos
== 0 && dv_onepart_p (var
->dv
))
6782 remove_value_chains (var
->dv
, node
->loc
);
6783 /* If we have deleted the location which was last emitted
6784 we have to emit new location so add the variable to set
6785 of changed variables. */
6786 if (var
->var_part
[pos
].cur_loc
== node
->loc
)
6789 var
->var_part
[pos
].cur_loc
= NULL
;
6790 var
->cur_loc_changed
= true;
6792 pool_free (loc_chain_pool
, node
);
6797 nextp
= &node
->next
;
6800 if (var
->var_part
[pos
].loc_chain
== NULL
)
6805 var
->cur_loc_changed
= true;
6806 while (pos
< var
->n_var_parts
)
6808 var
->var_part
[pos
] = var
->var_part
[pos
+ 1];
6813 variable_was_changed (var
, set
);
6819 /* Delete the part of variable's location from dataflow set SET. The
6820 variable part is specified by variable's declaration or value DV
6821 and offset OFFSET and the part's location by LOC. */
6824 delete_variable_part (dataflow_set
*set
, rtx loc
, decl_or_value dv
,
6825 HOST_WIDE_INT offset
)
6827 void **slot
= shared_hash_find_slot_noinsert (set
->vars
, dv
);
6831 slot
= delete_slot_part (set
, loc
, slot
, offset
);
6834 /* Structure for passing some other parameters to function
6835 vt_expand_loc_callback. */
6836 struct expand_loc_callback_data
6838 /* The variables and values active at this point. */
6841 /* True in vt_expand_loc_dummy calls, no rtl should be allocated.
6842 Non-NULL should be returned if vt_expand_loc would return
6843 non-NULL in that case, NULL otherwise. cur_loc_changed should be
6844 computed and cur_loc recomputed when possible (but just once
6845 per emit_notes_for_changes call). */
6848 /* True if expansion of subexpressions had to recompute some
6849 VALUE/DEBUG_EXPR_DECL's cur_loc or used a VALUE/DEBUG_EXPR_DECL
6850 whose cur_loc has been already recomputed during current
6851 emit_notes_for_changes call. */
6852 bool cur_loc_changed
;
6855 /* Callback for cselib_expand_value, that looks for expressions
6856 holding the value in the var-tracking hash tables. Return X for
6857 standard processing, anything else is to be used as-is. */
6860 vt_expand_loc_callback (rtx x
, bitmap regs
, int max_depth
, void *data
)
6862 struct expand_loc_callback_data
*elcd
6863 = (struct expand_loc_callback_data
*) data
;
6864 bool dummy
= elcd
->dummy
;
6865 bool cur_loc_changed
= elcd
->cur_loc_changed
;
6869 rtx result
, subreg
, xret
;
6871 switch (GET_CODE (x
))
6876 if (cselib_dummy_expand_value_rtx_cb (SUBREG_REG (x
), regs
,
6878 vt_expand_loc_callback
, data
))
6884 subreg
= cselib_expand_value_rtx_cb (SUBREG_REG (x
), regs
,
6886 vt_expand_loc_callback
, data
);
6891 result
= simplify_gen_subreg (GET_MODE (x
), subreg
,
6892 GET_MODE (SUBREG_REG (x
)),
6895 /* Invalid SUBREGs are ok in debug info. ??? We could try
6896 alternate expansions for the VALUE as well. */
6898 result
= gen_rtx_raw_SUBREG (GET_MODE (x
), subreg
, SUBREG_BYTE (x
));
6903 dv
= dv_from_decl (DEBUG_EXPR_TREE_DECL (x
));
6908 dv
= dv_from_value (x
);
6916 if (VALUE_RECURSED_INTO (x
))
6919 var
= (variable
) htab_find_with_hash (elcd
->vars
, dv
, dv_htab_hash (dv
));
6923 if (dummy
&& dv_changed_p (dv
))
6924 elcd
->cur_loc_changed
= true;
6928 if (var
->n_var_parts
== 0)
6931 elcd
->cur_loc_changed
= true;
6935 gcc_assert (var
->n_var_parts
== 1);
6937 VALUE_RECURSED_INTO (x
) = true;
6940 if (var
->var_part
[0].cur_loc
)
6944 if (cselib_dummy_expand_value_rtx_cb (var
->var_part
[0].cur_loc
, regs
,
6946 vt_expand_loc_callback
, data
))
6950 result
= cselib_expand_value_rtx_cb (var
->var_part
[0].cur_loc
, regs
,
6952 vt_expand_loc_callback
, data
);
6954 set_dv_changed (dv
, false);
6956 if (!result
&& dv_changed_p (dv
))
6958 set_dv_changed (dv
, false);
6959 for (loc
= var
->var_part
[0].loc_chain
; loc
; loc
= loc
->next
)
6960 if (loc
->loc
== var
->var_part
[0].cur_loc
)
6964 elcd
->cur_loc_changed
= cur_loc_changed
;
6965 if (cselib_dummy_expand_value_rtx_cb (loc
->loc
, regs
, max_depth
,
6966 vt_expand_loc_callback
,
6975 result
= cselib_expand_value_rtx_cb (loc
->loc
, regs
, max_depth
,
6976 vt_expand_loc_callback
, data
);
6980 if (dummy
&& (result
|| var
->var_part
[0].cur_loc
))
6981 var
->cur_loc_changed
= true;
6982 var
->var_part
[0].cur_loc
= loc
? loc
->loc
: NULL_RTX
;
6986 if (var
->cur_loc_changed
)
6987 elcd
->cur_loc_changed
= true;
6988 else if (!result
&& var
->var_part
[0].cur_loc
== NULL_RTX
)
6989 elcd
->cur_loc_changed
= cur_loc_changed
;
6992 VALUE_RECURSED_INTO (x
) = false;
6999 /* Expand VALUEs in LOC, using VARS as well as cselib's equivalence
7003 vt_expand_loc (rtx loc
, htab_t vars
)
7005 struct expand_loc_callback_data data
;
7007 if (!MAY_HAVE_DEBUG_INSNS
)
7012 data
.cur_loc_changed
= false;
7013 loc
= cselib_expand_value_rtx_cb (loc
, scratch_regs
, 5,
7014 vt_expand_loc_callback
, &data
);
7016 if (loc
&& MEM_P (loc
))
7017 loc
= targetm
.delegitimize_address (loc
);
7021 /* Like vt_expand_loc, but only return true/false (whether vt_expand_loc
7022 would succeed or not, without actually allocating new rtxes. */
7025 vt_expand_loc_dummy (rtx loc
, htab_t vars
, bool *pcur_loc_changed
)
7027 struct expand_loc_callback_data data
;
7030 gcc_assert (MAY_HAVE_DEBUG_INSNS
);
7033 data
.cur_loc_changed
= false;
7034 ret
= cselib_dummy_expand_value_rtx_cb (loc
, scratch_regs
, 5,
7035 vt_expand_loc_callback
, &data
);
7036 *pcur_loc_changed
= data
.cur_loc_changed
;
7040 #ifdef ENABLE_RTL_CHECKING
7041 /* Used to verify that cur_loc_changed updating is safe. */
7042 static struct pointer_map_t
*emitted_notes
;
7045 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
7046 additional parameters: WHERE specifies whether the note shall be emitted
7047 before or after instruction INSN. */
7050 emit_note_insn_var_location (void **varp
, void *data
)
7052 variable var
= (variable
) *varp
;
7053 rtx insn
= ((emit_note_data
*)data
)->insn
;
7054 enum emit_note_where where
= ((emit_note_data
*)data
)->where
;
7055 htab_t vars
= ((emit_note_data
*)data
)->vars
;
7057 int i
, j
, n_var_parts
;
7059 enum var_init_status initialized
= VAR_INIT_STATUS_UNINITIALIZED
;
7060 HOST_WIDE_INT last_limit
;
7061 tree type_size_unit
;
7062 HOST_WIDE_INT offsets
[MAX_VAR_PARTS
];
7063 rtx loc
[MAX_VAR_PARTS
];
7067 if (dv_is_value_p (var
->dv
))
7068 goto value_or_debug_decl
;
7070 decl
= dv_as_decl (var
->dv
);
7072 if (TREE_CODE (decl
) == DEBUG_EXPR_DECL
)
7073 goto value_or_debug_decl
;
7078 if (!MAY_HAVE_DEBUG_INSNS
)
7080 for (i
= 0; i
< var
->n_var_parts
; i
++)
7081 if (var
->var_part
[i
].cur_loc
== NULL
&& var
->var_part
[i
].loc_chain
)
7083 var
->var_part
[i
].cur_loc
= var
->var_part
[i
].loc_chain
->loc
;
7084 var
->cur_loc_changed
= true;
7086 if (var
->n_var_parts
== 0)
7087 var
->cur_loc_changed
= true;
7089 #ifndef ENABLE_RTL_CHECKING
7090 if (!var
->cur_loc_changed
)
7093 for (i
= 0; i
< var
->n_var_parts
; i
++)
7095 enum machine_mode mode
, wider_mode
;
7098 if (last_limit
< var
->var_part
[i
].offset
)
7103 else if (last_limit
> var
->var_part
[i
].offset
)
7105 offsets
[n_var_parts
] = var
->var_part
[i
].offset
;
7106 if (!var
->var_part
[i
].cur_loc
)
7111 loc2
= vt_expand_loc (var
->var_part
[i
].cur_loc
, vars
);
7117 loc
[n_var_parts
] = loc2
;
7118 mode
= GET_MODE (var
->var_part
[i
].cur_loc
);
7119 if (mode
== VOIDmode
&& dv_onepart_p (var
->dv
))
7120 mode
= DECL_MODE (decl
);
7121 for (lc
= var
->var_part
[i
].loc_chain
; lc
; lc
= lc
->next
)
7122 if (var
->var_part
[i
].cur_loc
== lc
->loc
)
7124 initialized
= lc
->init
;
7128 last_limit
= offsets
[n_var_parts
] + GET_MODE_SIZE (mode
);
7130 /* Attempt to merge adjacent registers or memory. */
7131 wider_mode
= GET_MODE_WIDER_MODE (mode
);
7132 for (j
= i
+ 1; j
< var
->n_var_parts
; j
++)
7133 if (last_limit
<= var
->var_part
[j
].offset
)
7135 if (j
< var
->n_var_parts
7136 && wider_mode
!= VOIDmode
7137 && var
->var_part
[j
].cur_loc
7138 && mode
== GET_MODE (var
->var_part
[j
].cur_loc
)
7139 && (REG_P (loc
[n_var_parts
]) || MEM_P (loc
[n_var_parts
]))
7140 && last_limit
== var
->var_part
[j
].offset
7141 && (loc2
= vt_expand_loc (var
->var_part
[j
].cur_loc
, vars
))
7142 && GET_CODE (loc
[n_var_parts
]) == GET_CODE (loc2
))
7146 if (REG_P (loc
[n_var_parts
])
7147 && hard_regno_nregs
[REGNO (loc
[n_var_parts
])][mode
] * 2
7148 == hard_regno_nregs
[REGNO (loc
[n_var_parts
])][wider_mode
]
7149 && end_hard_regno (mode
, REGNO (loc
[n_var_parts
]))
7152 if (! WORDS_BIG_ENDIAN
&& ! BYTES_BIG_ENDIAN
)
7153 new_loc
= simplify_subreg (wider_mode
, loc
[n_var_parts
],
7155 else if (WORDS_BIG_ENDIAN
&& BYTES_BIG_ENDIAN
)
7156 new_loc
= simplify_subreg (wider_mode
, loc2
, mode
, 0);
7159 if (!REG_P (new_loc
)
7160 || REGNO (new_loc
) != REGNO (loc
[n_var_parts
]))
7163 REG_ATTRS (new_loc
) = REG_ATTRS (loc
[n_var_parts
]);
7166 else if (MEM_P (loc
[n_var_parts
])
7167 && GET_CODE (XEXP (loc2
, 0)) == PLUS
7168 && REG_P (XEXP (XEXP (loc2
, 0), 0))
7169 && CONST_INT_P (XEXP (XEXP (loc2
, 0), 1)))
7171 if ((REG_P (XEXP (loc
[n_var_parts
], 0))
7172 && rtx_equal_p (XEXP (loc
[n_var_parts
], 0),
7173 XEXP (XEXP (loc2
, 0), 0))
7174 && INTVAL (XEXP (XEXP (loc2
, 0), 1))
7175 == GET_MODE_SIZE (mode
))
7176 || (GET_CODE (XEXP (loc
[n_var_parts
], 0)) == PLUS
7177 && CONST_INT_P (XEXP (XEXP (loc
[n_var_parts
], 0), 1))
7178 && rtx_equal_p (XEXP (XEXP (loc
[n_var_parts
], 0), 0),
7179 XEXP (XEXP (loc2
, 0), 0))
7180 && INTVAL (XEXP (XEXP (loc
[n_var_parts
], 0), 1))
7181 + GET_MODE_SIZE (mode
)
7182 == INTVAL (XEXP (XEXP (loc2
, 0), 1))))
7183 new_loc
= adjust_address_nv (loc
[n_var_parts
],
7189 loc
[n_var_parts
] = new_loc
;
7191 last_limit
= offsets
[n_var_parts
] + GET_MODE_SIZE (mode
);
7197 type_size_unit
= TYPE_SIZE_UNIT (TREE_TYPE (decl
));
7198 if ((unsigned HOST_WIDE_INT
) last_limit
< TREE_INT_CST_LOW (type_size_unit
))
7201 if (! flag_var_tracking_uninit
)
7202 initialized
= VAR_INIT_STATUS_INITIALIZED
;
7206 note_vl
= gen_rtx_VAR_LOCATION (VOIDmode
, decl
, NULL_RTX
,
7208 else if (n_var_parts
== 1)
7212 if (offsets
[0] || GET_CODE (loc
[0]) == PARALLEL
)
7213 expr_list
= gen_rtx_EXPR_LIST (VOIDmode
, loc
[0], GEN_INT (offsets
[0]));
7217 note_vl
= gen_rtx_VAR_LOCATION (VOIDmode
, decl
, expr_list
,
7220 else if (n_var_parts
)
7224 for (i
= 0; i
< n_var_parts
; i
++)
7226 = gen_rtx_EXPR_LIST (VOIDmode
, loc
[i
], GEN_INT (offsets
[i
]));
7228 parallel
= gen_rtx_PARALLEL (VOIDmode
,
7229 gen_rtvec_v (n_var_parts
, loc
));
7230 note_vl
= gen_rtx_VAR_LOCATION (VOIDmode
, decl
,
7231 parallel
, (int) initialized
);
7234 #ifdef ENABLE_RTL_CHECKING
7237 void **note_slot
= pointer_map_insert (emitted_notes
, decl
);
7238 rtx pnote
= (rtx
) *note_slot
;
7239 if (!var
->cur_loc_changed
&& (pnote
|| PAT_VAR_LOCATION_LOC (note_vl
)))
7242 gcc_assert (rtx_equal_p (PAT_VAR_LOCATION_LOC (pnote
),
7243 PAT_VAR_LOCATION_LOC (note_vl
)));
7245 *note_slot
= (void *) note_vl
;
7247 if (!var
->cur_loc_changed
)
7251 if (where
!= EMIT_NOTE_BEFORE_INSN
)
7253 note
= emit_note_after (NOTE_INSN_VAR_LOCATION
, insn
);
7254 if (where
== EMIT_NOTE_AFTER_CALL_INSN
)
7255 NOTE_DURING_CALL_P (note
) = true;
7258 note
= emit_note_before (NOTE_INSN_VAR_LOCATION
, insn
);
7259 NOTE_VAR_LOCATION (note
) = note_vl
;
7262 set_dv_changed (var
->dv
, false);
7263 var
->cur_loc_changed
= false;
7264 gcc_assert (var
->in_changed_variables
);
7265 var
->in_changed_variables
= false;
7266 htab_clear_slot (changed_variables
, varp
);
7268 /* Continue traversing the hash table. */
7271 value_or_debug_decl
:
7272 if (dv_changed_p (var
->dv
) && var
->n_var_parts
)
7275 bool cur_loc_changed
;
7277 if (var
->var_part
[0].cur_loc
7278 && vt_expand_loc_dummy (var
->var_part
[0].cur_loc
, vars
,
7281 for (lc
= var
->var_part
[0].loc_chain
; lc
; lc
= lc
->next
)
7282 if (lc
->loc
!= var
->var_part
[0].cur_loc
7283 && vt_expand_loc_dummy (lc
->loc
, vars
, &cur_loc_changed
))
7285 var
->var_part
[0].cur_loc
= lc
? lc
->loc
: NULL_RTX
;
7290 DEF_VEC_P (variable
);
7291 DEF_VEC_ALLOC_P (variable
, heap
);
7293 /* Stack of variable_def pointers that need processing with
7294 check_changed_vars_2. */
7296 static VEC (variable
, heap
) *changed_variables_stack
;
7298 /* VALUEs with no variables that need set_dv_changed (val, false)
7299 called before check_changed_vars_3. */
7301 static VEC (rtx
, heap
) *changed_values_stack
;
7303 /* Helper function for check_changed_vars_1 and check_changed_vars_2. */
7306 check_changed_vars_0 (decl_or_value dv
, htab_t htab
)
7309 = (value_chain
) htab_find_with_hash (value_chains
, dv
, dv_htab_hash (dv
));
7313 for (vc
= vc
->next
; vc
; vc
= vc
->next
)
7314 if (!dv_changed_p (vc
->dv
))
7317 = (variable
) htab_find_with_hash (htab
, vc
->dv
,
7318 dv_htab_hash (vc
->dv
));
7321 set_dv_changed (vc
->dv
, true);
7322 VEC_safe_push (variable
, heap
, changed_variables_stack
, vcvar
);
7324 else if (dv_is_value_p (vc
->dv
))
7326 set_dv_changed (vc
->dv
, true);
7327 VEC_safe_push (rtx
, heap
, changed_values_stack
,
7328 dv_as_value (vc
->dv
));
7329 check_changed_vars_0 (vc
->dv
, htab
);
7334 /* Populate changed_variables_stack with variable_def pointers
7335 that need variable_was_changed called on them. */
7338 check_changed_vars_1 (void **slot
, void *data
)
7340 variable var
= (variable
) *slot
;
7341 htab_t htab
= (htab_t
) data
;
7343 if (dv_is_value_p (var
->dv
)
7344 || TREE_CODE (dv_as_decl (var
->dv
)) == DEBUG_EXPR_DECL
)
7345 check_changed_vars_0 (var
->dv
, htab
);
7349 /* Add VAR to changed_variables and also for VALUEs add recursively
7350 all DVs that aren't in changed_variables yet but reference the
7351 VALUE from its loc_chain. */
7354 check_changed_vars_2 (variable var
, htab_t htab
)
7356 variable_was_changed (var
, NULL
);
7357 if (dv_is_value_p (var
->dv
)
7358 || TREE_CODE (dv_as_decl (var
->dv
)) == DEBUG_EXPR_DECL
)
7359 check_changed_vars_0 (var
->dv
, htab
);
7362 /* For each changed decl (except DEBUG_EXPR_DECLs) recompute
7363 cur_loc if needed (and cur_loc of all VALUEs and DEBUG_EXPR_DECLs
7364 it needs and are also in changed variables) and track whether
7365 cur_loc (or anything it uses to compute location) had to change
7366 during the current emit_notes_for_changes call. */
7369 check_changed_vars_3 (void **slot
, void *data
)
7371 variable var
= (variable
) *slot
;
7372 htab_t vars
= (htab_t
) data
;
7375 bool cur_loc_changed
;
7377 if (dv_is_value_p (var
->dv
)
7378 || TREE_CODE (dv_as_decl (var
->dv
)) == DEBUG_EXPR_DECL
)
7381 for (i
= 0; i
< var
->n_var_parts
; i
++)
7383 if (var
->var_part
[i
].cur_loc
7384 && vt_expand_loc_dummy (var
->var_part
[i
].cur_loc
, vars
,
7387 if (cur_loc_changed
)
7388 var
->cur_loc_changed
= true;
7391 for (lc
= var
->var_part
[i
].loc_chain
; lc
; lc
= lc
->next
)
7392 if (lc
->loc
!= var
->var_part
[i
].cur_loc
7393 && vt_expand_loc_dummy (lc
->loc
, vars
, &cur_loc_changed
))
7395 if (lc
|| var
->var_part
[i
].cur_loc
)
7396 var
->cur_loc_changed
= true;
7397 var
->var_part
[i
].cur_loc
= lc
? lc
->loc
: NULL_RTX
;
7399 if (var
->n_var_parts
== 0)
7400 var
->cur_loc_changed
= true;
7404 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
7405 CHANGED_VARIABLES and delete this chain. WHERE specifies whether the notes
7406 shall be emitted before of after instruction INSN. */
7409 emit_notes_for_changes (rtx insn
, enum emit_note_where where
,
7412 emit_note_data data
;
7413 htab_t htab
= shared_hash_htab (vars
);
7415 if (!htab_elements (changed_variables
))
7418 if (MAY_HAVE_DEBUG_INSNS
)
7420 /* Unfortunately this has to be done in two steps, because
7421 we can't traverse a hashtab into which we are inserting
7422 through variable_was_changed. */
7423 htab_traverse (changed_variables
, check_changed_vars_1
, htab
);
7424 while (VEC_length (variable
, changed_variables_stack
) > 0)
7425 check_changed_vars_2 (VEC_pop (variable
, changed_variables_stack
),
7427 while (VEC_length (rtx
, changed_values_stack
) > 0)
7428 set_dv_changed (dv_from_value (VEC_pop (rtx
, changed_values_stack
)),
7430 htab_traverse (changed_variables
, check_changed_vars_3
, htab
);
7437 htab_traverse (changed_variables
, emit_note_insn_var_location
, &data
);
7440 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
7441 same variable in hash table DATA or is not there at all. */
7444 emit_notes_for_differences_1 (void **slot
, void *data
)
7446 htab_t new_vars
= (htab_t
) data
;
7447 variable old_var
, new_var
;
7449 old_var
= (variable
) *slot
;
7450 new_var
= (variable
) htab_find_with_hash (new_vars
, old_var
->dv
,
7451 dv_htab_hash (old_var
->dv
));
7455 /* Variable has disappeared. */
7458 empty_var
= (variable
) pool_alloc (dv_pool (old_var
->dv
));
7459 empty_var
->dv
= old_var
->dv
;
7460 empty_var
->refcount
= 0;
7461 empty_var
->n_var_parts
= 0;
7462 empty_var
->cur_loc_changed
= false;
7463 empty_var
->in_changed_variables
= false;
7464 if (dv_onepart_p (old_var
->dv
))
7468 gcc_assert (old_var
->n_var_parts
== 1);
7469 for (lc
= old_var
->var_part
[0].loc_chain
; lc
; lc
= lc
->next
)
7470 remove_value_chains (old_var
->dv
, lc
->loc
);
7472 variable_was_changed (empty_var
, NULL
);
7473 /* Continue traversing the hash table. */
7476 if (variable_different_p (old_var
, new_var
))
7478 if (dv_onepart_p (old_var
->dv
))
7480 location_chain lc1
, lc2
;
7482 gcc_assert (old_var
->n_var_parts
== 1
7483 && new_var
->n_var_parts
== 1);
7484 lc1
= old_var
->var_part
[0].loc_chain
;
7485 lc2
= new_var
->var_part
[0].loc_chain
;
7488 && ((REG_P (lc1
->loc
) && REG_P (lc2
->loc
))
7489 || rtx_equal_p (lc1
->loc
, lc2
->loc
)))
7494 for (; lc2
; lc2
= lc2
->next
)
7495 add_value_chains (old_var
->dv
, lc2
->loc
);
7496 for (; lc1
; lc1
= lc1
->next
)
7497 remove_value_chains (old_var
->dv
, lc1
->loc
);
7499 variable_was_changed (new_var
, NULL
);
7501 /* Update cur_loc. */
7502 if (old_var
!= new_var
)
7505 for (i
= 0; i
< new_var
->n_var_parts
; i
++)
7507 new_var
->var_part
[i
].cur_loc
= NULL
;
7508 if (old_var
->n_var_parts
!= new_var
->n_var_parts
7509 || old_var
->var_part
[i
].offset
!= new_var
->var_part
[i
].offset
)
7510 new_var
->cur_loc_changed
= true;
7511 else if (old_var
->var_part
[i
].cur_loc
!= NULL
)
7514 rtx cur_loc
= old_var
->var_part
[i
].cur_loc
;
7516 for (lc
= new_var
->var_part
[i
].loc_chain
; lc
; lc
= lc
->next
)
7517 if (lc
->loc
== cur_loc
7518 || rtx_equal_p (cur_loc
, lc
->loc
))
7520 new_var
->var_part
[i
].cur_loc
= lc
->loc
;
7524 new_var
->cur_loc_changed
= true;
7529 /* Continue traversing the hash table. */
7533 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
7537 emit_notes_for_differences_2 (void **slot
, void *data
)
7539 htab_t old_vars
= (htab_t
) data
;
7540 variable old_var
, new_var
;
7542 new_var
= (variable
) *slot
;
7543 old_var
= (variable
) htab_find_with_hash (old_vars
, new_var
->dv
,
7544 dv_htab_hash (new_var
->dv
));
7548 /* Variable has appeared. */
7549 if (dv_onepart_p (new_var
->dv
))
7553 gcc_assert (new_var
->n_var_parts
== 1);
7554 for (lc
= new_var
->var_part
[0].loc_chain
; lc
; lc
= lc
->next
)
7555 add_value_chains (new_var
->dv
, lc
->loc
);
7557 for (i
= 0; i
< new_var
->n_var_parts
; i
++)
7558 new_var
->var_part
[i
].cur_loc
= NULL
;
7559 variable_was_changed (new_var
, NULL
);
7562 /* Continue traversing the hash table. */
7566 /* Emit notes before INSN for differences between dataflow sets OLD_SET and
7570 emit_notes_for_differences (rtx insn
, dataflow_set
*old_set
,
7571 dataflow_set
*new_set
)
7573 htab_traverse (shared_hash_htab (old_set
->vars
),
7574 emit_notes_for_differences_1
,
7575 shared_hash_htab (new_set
->vars
));
7576 htab_traverse (shared_hash_htab (new_set
->vars
),
7577 emit_notes_for_differences_2
,
7578 shared_hash_htab (old_set
->vars
));
7579 emit_notes_for_changes (insn
, EMIT_NOTE_BEFORE_INSN
, new_set
->vars
);
7582 /* Emit the notes for changes of location parts in the basic block BB. */
7585 emit_notes_in_bb (basic_block bb
, dataflow_set
*set
)
7588 micro_operation
*mo
;
7590 dataflow_set_clear (set
);
7591 dataflow_set_copy (set
, &VTI (bb
)->in
);
7593 for (i
= 0; VEC_iterate (micro_operation
, VTI (bb
)->mos
, i
, mo
); i
++)
7595 rtx insn
= mo
->insn
;
7600 dataflow_set_clear_at_call (set
);
7601 emit_notes_for_changes (insn
, EMIT_NOTE_AFTER_CALL_INSN
, set
->vars
);
7606 rtx loc
= mo
->u
.loc
;
7609 var_reg_set (set
, loc
, VAR_INIT_STATUS_UNINITIALIZED
, NULL
);
7611 var_mem_set (set
, loc
, VAR_INIT_STATUS_UNINITIALIZED
, NULL
);
7613 emit_notes_for_changes (insn
, EMIT_NOTE_AFTER_INSN
, set
->vars
);
7619 rtx loc
= mo
->u
.loc
;
7623 if (GET_CODE (loc
) == CONCAT
)
7625 val
= XEXP (loc
, 0);
7626 vloc
= XEXP (loc
, 1);
7634 var
= PAT_VAR_LOCATION_DECL (vloc
);
7636 clobber_variable_part (set
, NULL_RTX
,
7637 dv_from_decl (var
), 0, NULL_RTX
);
7640 if (VAL_NEEDS_RESOLUTION (loc
))
7641 val_resolve (set
, val
, PAT_VAR_LOCATION_LOC (vloc
), insn
);
7642 set_variable_part (set
, val
, dv_from_decl (var
), 0,
7643 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
,
7646 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc
)))
7647 set_variable_part (set
, PAT_VAR_LOCATION_LOC (vloc
),
7648 dv_from_decl (var
), 0,
7649 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
,
7652 emit_notes_for_changes (insn
, EMIT_NOTE_AFTER_INSN
, set
->vars
);
7658 rtx loc
= mo
->u
.loc
;
7659 rtx val
, vloc
, uloc
;
7661 vloc
= uloc
= XEXP (loc
, 1);
7662 val
= XEXP (loc
, 0);
7664 if (GET_CODE (val
) == CONCAT
)
7666 uloc
= XEXP (val
, 1);
7667 val
= XEXP (val
, 0);
7670 if (VAL_NEEDS_RESOLUTION (loc
))
7671 val_resolve (set
, val
, vloc
, insn
);
7673 val_store (set
, val
, uloc
, insn
, false);
7675 if (VAL_HOLDS_TRACK_EXPR (loc
))
7677 if (GET_CODE (uloc
) == REG
)
7678 var_reg_set (set
, uloc
, VAR_INIT_STATUS_UNINITIALIZED
,
7680 else if (GET_CODE (uloc
) == MEM
)
7681 var_mem_set (set
, uloc
, VAR_INIT_STATUS_UNINITIALIZED
,
7685 emit_notes_for_changes (insn
, EMIT_NOTE_BEFORE_INSN
, set
->vars
);
7691 rtx loc
= mo
->u
.loc
;
7692 rtx val
, vloc
, uloc
, reverse
= NULL_RTX
;
7695 if (VAL_EXPR_HAS_REVERSE (loc
))
7697 reverse
= XEXP (loc
, 1);
7698 vloc
= XEXP (loc
, 0);
7700 uloc
= XEXP (vloc
, 1);
7701 val
= XEXP (vloc
, 0);
7704 if (GET_CODE (val
) == CONCAT
)
7706 vloc
= XEXP (val
, 1);
7707 val
= XEXP (val
, 0);
7710 if (GET_CODE (vloc
) == SET
)
7712 rtx vsrc
= SET_SRC (vloc
);
7714 gcc_assert (val
!= vsrc
);
7715 gcc_assert (vloc
== uloc
|| VAL_NEEDS_RESOLUTION (loc
));
7717 vloc
= SET_DEST (vloc
);
7719 if (VAL_NEEDS_RESOLUTION (loc
))
7720 val_resolve (set
, val
, vsrc
, insn
);
7722 else if (VAL_NEEDS_RESOLUTION (loc
))
7724 gcc_assert (GET_CODE (uloc
) == SET
7725 && GET_CODE (SET_SRC (uloc
)) == REG
);
7726 val_resolve (set
, val
, SET_SRC (uloc
), insn
);
7729 if (VAL_HOLDS_TRACK_EXPR (loc
))
7731 if (VAL_EXPR_IS_CLOBBERED (loc
))
7734 var_reg_delete (set
, uloc
, true);
7735 else if (MEM_P (uloc
))
7736 var_mem_delete (set
, uloc
, true);
7740 bool copied_p
= VAL_EXPR_IS_COPIED (loc
);
7742 enum var_init_status status
= VAR_INIT_STATUS_INITIALIZED
;
7744 if (GET_CODE (uloc
) == SET
)
7746 set_src
= SET_SRC (uloc
);
7747 uloc
= SET_DEST (uloc
);
7752 status
= find_src_status (set
, set_src
);
7754 set_src
= find_src_set_src (set
, set_src
);
7758 var_reg_delete_and_set (set
, uloc
, !copied_p
,
7760 else if (MEM_P (uloc
))
7761 var_mem_delete_and_set (set
, uloc
, !copied_p
,
7765 else if (REG_P (uloc
))
7766 var_regno_delete (set
, REGNO (uloc
));
7768 val_store (set
, val
, vloc
, insn
, true);
7771 val_store (set
, XEXP (reverse
, 0), XEXP (reverse
, 1),
7774 emit_notes_for_changes (NEXT_INSN (insn
), EMIT_NOTE_BEFORE_INSN
,
7781 rtx loc
= mo
->u
.loc
;
7784 if (GET_CODE (loc
) == SET
)
7786 set_src
= SET_SRC (loc
);
7787 loc
= SET_DEST (loc
);
7791 var_reg_delete_and_set (set
, loc
, true, VAR_INIT_STATUS_INITIALIZED
,
7794 var_mem_delete_and_set (set
, loc
, true, VAR_INIT_STATUS_INITIALIZED
,
7797 emit_notes_for_changes (NEXT_INSN (insn
), EMIT_NOTE_BEFORE_INSN
,
7804 rtx loc
= mo
->u
.loc
;
7805 enum var_init_status src_status
;
7808 if (GET_CODE (loc
) == SET
)
7810 set_src
= SET_SRC (loc
);
7811 loc
= SET_DEST (loc
);
7814 src_status
= find_src_status (set
, set_src
);
7815 set_src
= find_src_set_src (set
, set_src
);
7818 var_reg_delete_and_set (set
, loc
, false, src_status
, set_src
);
7820 var_mem_delete_and_set (set
, loc
, false, src_status
, set_src
);
7822 emit_notes_for_changes (NEXT_INSN (insn
), EMIT_NOTE_BEFORE_INSN
,
7829 rtx loc
= mo
->u
.loc
;
7832 var_reg_delete (set
, loc
, false);
7834 var_mem_delete (set
, loc
, false);
7836 emit_notes_for_changes (insn
, EMIT_NOTE_AFTER_INSN
, set
->vars
);
7842 rtx loc
= mo
->u
.loc
;
7845 var_reg_delete (set
, loc
, true);
7847 var_mem_delete (set
, loc
, true);
7849 emit_notes_for_changes (NEXT_INSN (insn
), EMIT_NOTE_BEFORE_INSN
,
7855 set
->stack_adjust
+= mo
->u
.adjust
;
7861 /* Emit notes for the whole function. */
7864 vt_emit_notes (void)
7869 #ifdef ENABLE_RTL_CHECKING
7870 emitted_notes
= pointer_map_create ();
7872 gcc_assert (!htab_elements (changed_variables
));
7874 /* Free memory occupied by the out hash tables, as they aren't used
7877 dataflow_set_clear (&VTI (bb
)->out
);
7879 /* Enable emitting notes by functions (mainly by set_variable_part and
7880 delete_variable_part). */
7883 if (MAY_HAVE_DEBUG_INSNS
)
7888 for (i
= 0; VEC_iterate (rtx
, preserved_values
, i
, val
); i
++)
7889 add_cselib_value_chains (dv_from_value (val
));
7890 changed_variables_stack
= VEC_alloc (variable
, heap
, 40);
7891 changed_values_stack
= VEC_alloc (rtx
, heap
, 40);
7894 dataflow_set_init (&cur
);
7898 /* Emit the notes for changes of variable locations between two
7899 subsequent basic blocks. */
7900 emit_notes_for_differences (BB_HEAD (bb
), &cur
, &VTI (bb
)->in
);
7902 /* Emit the notes for the changes in the basic block itself. */
7903 emit_notes_in_bb (bb
, &cur
);
7905 /* Free memory occupied by the in hash table, we won't need it
7907 dataflow_set_clear (&VTI (bb
)->in
);
7909 #ifdef ENABLE_CHECKING
7910 htab_traverse (shared_hash_htab (cur
.vars
),
7911 emit_notes_for_differences_1
,
7912 shared_hash_htab (empty_shared_hash
));
7913 if (MAY_HAVE_DEBUG_INSNS
)
7918 for (i
= 0; VEC_iterate (rtx
, preserved_values
, i
, val
); i
++)
7919 remove_cselib_value_chains (dv_from_value (val
));
7920 gcc_assert (htab_elements (value_chains
) == 0);
7923 dataflow_set_destroy (&cur
);
7925 if (MAY_HAVE_DEBUG_INSNS
)
7927 VEC_free (variable
, heap
, changed_variables_stack
);
7928 VEC_free (rtx
, heap
, changed_values_stack
);
7931 #ifdef ENABLE_RTL_CHECKING
7932 pointer_map_destroy (emitted_notes
);
7937 /* If there is a declaration and offset associated with register/memory RTL
7938 assign declaration to *DECLP and offset to *OFFSETP, and return true. */
7941 vt_get_decl_and_offset (rtx rtl
, tree
*declp
, HOST_WIDE_INT
*offsetp
)
7945 if (REG_ATTRS (rtl
))
7947 *declp
= REG_EXPR (rtl
);
7948 *offsetp
= REG_OFFSET (rtl
);
7952 else if (MEM_P (rtl
))
7954 if (MEM_ATTRS (rtl
))
7956 *declp
= MEM_EXPR (rtl
);
7957 *offsetp
= INT_MEM_OFFSET (rtl
);
7964 /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
7967 vt_add_function_parameters (void)
7971 for (parm
= DECL_ARGUMENTS (current_function_decl
);
7972 parm
; parm
= TREE_CHAIN (parm
))
7974 rtx decl_rtl
= DECL_RTL_IF_SET (parm
);
7975 rtx incoming
= DECL_INCOMING_RTL (parm
);
7977 enum machine_mode mode
;
7978 HOST_WIDE_INT offset
;
7982 if (TREE_CODE (parm
) != PARM_DECL
)
7985 if (!DECL_NAME (parm
))
7988 if (!decl_rtl
|| !incoming
)
7991 if (GET_MODE (decl_rtl
) == BLKmode
|| GET_MODE (incoming
) == BLKmode
)
7994 if (!vt_get_decl_and_offset (incoming
, &decl
, &offset
))
7996 if (REG_P (incoming
) || MEM_P (incoming
))
7998 /* This means argument is passed by invisible reference. */
8001 incoming
= gen_rtx_MEM (GET_MODE (decl_rtl
), incoming
);
8005 if (!vt_get_decl_and_offset (decl_rtl
, &decl
, &offset
))
8007 offset
+= byte_lowpart_offset (GET_MODE (incoming
),
8008 GET_MODE (decl_rtl
));
8017 /* Assume that DECL_RTL was a pseudo that got spilled to
8018 memory. The spill slot sharing code will force the
8019 memory to reference spill_slot_decl (%sfp), so we don't
8020 match above. That's ok, the pseudo must have referenced
8021 the entire parameter, so just reset OFFSET. */
8022 gcc_assert (decl
== get_spill_slot_decl (false));
8026 if (!track_loc_p (incoming
, parm
, offset
, false, &mode
, &offset
))
8029 out
= &VTI (ENTRY_BLOCK_PTR
)->out
;
8031 dv
= dv_from_decl (parm
);
8033 if (target_for_debug_bind (parm
)
8034 /* We can't deal with these right now, because this kind of
8035 variable is single-part. ??? We could handle parallels
8036 that describe multiple locations for the same single
8037 value, but ATM we don't. */
8038 && GET_CODE (incoming
) != PARALLEL
)
8042 /* ??? We shouldn't ever hit this, but it may happen because
8043 arguments passed by invisible reference aren't dealt with
8044 above: incoming-rtl will have Pmode rather than the
8045 expected mode for the type. */
8049 val
= cselib_lookup (var_lowpart (mode
, incoming
), mode
, true);
8051 /* ??? Float-typed values in memory are not handled by
8055 preserve_value (val
);
8056 set_variable_part (out
, val
->val_rtx
, dv
, offset
,
8057 VAR_INIT_STATUS_INITIALIZED
, NULL
, INSERT
);
8058 dv
= dv_from_value (val
->val_rtx
);
8062 if (REG_P (incoming
))
8064 incoming
= var_lowpart (mode
, incoming
);
8065 gcc_assert (REGNO (incoming
) < FIRST_PSEUDO_REGISTER
);
8066 attrs_list_insert (&out
->regs
[REGNO (incoming
)], dv
, offset
,
8068 set_variable_part (out
, incoming
, dv
, offset
,
8069 VAR_INIT_STATUS_INITIALIZED
, NULL
, INSERT
);
8071 else if (MEM_P (incoming
))
8073 incoming
= var_lowpart (mode
, incoming
);
8074 set_variable_part (out
, incoming
, dv
, offset
,
8075 VAR_INIT_STATUS_INITIALIZED
, NULL
, INSERT
);
8079 if (MAY_HAVE_DEBUG_INSNS
)
8081 cselib_preserve_only_values ();
8082 cselib_reset_table (cselib_get_next_uid ());
8087 /* Return true if INSN in the prologue initializes hard_frame_pointer_rtx. */
8090 fp_setter (rtx insn
)
8092 rtx pat
= PATTERN (insn
);
8093 if (RTX_FRAME_RELATED_P (insn
))
8095 rtx expr
= find_reg_note (insn
, REG_FRAME_RELATED_EXPR
, NULL_RTX
);
8097 pat
= XEXP (expr
, 0);
8099 if (GET_CODE (pat
) == SET
)
8100 return SET_DEST (pat
) == hard_frame_pointer_rtx
;
8101 else if (GET_CODE (pat
) == PARALLEL
)
8104 for (i
= XVECLEN (pat
, 0) - 1; i
>= 0; i
--)
8105 if (GET_CODE (XVECEXP (pat
, 0, i
)) == SET
8106 && SET_DEST (XVECEXP (pat
, 0, i
)) == hard_frame_pointer_rtx
)
8112 /* Initialize cfa_base_rtx, create a preserved VALUE for it and
8113 ensure it isn't flushed during cselib_reset_table.
8114 Can be called only if frame_pointer_rtx resp. arg_pointer_rtx
8115 has been eliminated. */
8118 vt_init_cfa_base (void)
8122 #ifdef FRAME_POINTER_CFA_OFFSET
8123 cfa_base_rtx
= frame_pointer_rtx
;
8125 cfa_base_rtx
= arg_pointer_rtx
;
8127 if (cfa_base_rtx
== hard_frame_pointer_rtx
8128 || !fixed_regs
[REGNO (cfa_base_rtx
)])
8130 cfa_base_rtx
= NULL_RTX
;
8133 if (!MAY_HAVE_DEBUG_INSNS
)
8136 val
= cselib_lookup_from_insn (cfa_base_rtx
, GET_MODE (cfa_base_rtx
), 1,
8138 preserve_value (val
);
8139 cselib_preserve_cfa_base_value (val
);
8140 var_reg_decl_set (&VTI (ENTRY_BLOCK_PTR
)->out
, cfa_base_rtx
,
8141 VAR_INIT_STATUS_INITIALIZED
, dv_from_value (val
->val_rtx
),
8142 0, NULL_RTX
, INSERT
);
8145 /* Allocate and initialize the data structures for variable tracking
8146 and parse the RTL to get the micro operations. */
8149 vt_initialize (void)
8151 basic_block bb
, prologue_bb
= NULL
;
8152 HOST_WIDE_INT fp_cfa_offset
= -1;
8154 alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def
));
8156 attrs_pool
= create_alloc_pool ("attrs_def pool",
8157 sizeof (struct attrs_def
), 1024);
8158 var_pool
= create_alloc_pool ("variable_def pool",
8159 sizeof (struct variable_def
)
8160 + (MAX_VAR_PARTS
- 1)
8161 * sizeof (((variable
)NULL
)->var_part
[0]), 64);
8162 loc_chain_pool
= create_alloc_pool ("location_chain_def pool",
8163 sizeof (struct location_chain_def
),
8165 shared_hash_pool
= create_alloc_pool ("shared_hash_def pool",
8166 sizeof (struct shared_hash_def
), 256);
8167 empty_shared_hash
= (shared_hash
) pool_alloc (shared_hash_pool
);
8168 empty_shared_hash
->refcount
= 1;
8169 empty_shared_hash
->htab
8170 = htab_create (1, variable_htab_hash
, variable_htab_eq
,
8171 variable_htab_free
);
8172 changed_variables
= htab_create (10, variable_htab_hash
, variable_htab_eq
,
8173 variable_htab_free
);
8174 if (MAY_HAVE_DEBUG_INSNS
)
8176 value_chain_pool
= create_alloc_pool ("value_chain_def pool",
8177 sizeof (struct value_chain_def
),
8179 value_chains
= htab_create (32, value_chain_htab_hash
,
8180 value_chain_htab_eq
, NULL
);
8183 /* Init the IN and OUT sets. */
8186 VTI (bb
)->visited
= false;
8187 VTI (bb
)->flooded
= false;
8188 dataflow_set_init (&VTI (bb
)->in
);
8189 dataflow_set_init (&VTI (bb
)->out
);
8190 VTI (bb
)->permp
= NULL
;
8193 if (MAY_HAVE_DEBUG_INSNS
)
8195 cselib_init (CSELIB_RECORD_MEMORY
| CSELIB_PRESERVE_CONSTANTS
);
8196 scratch_regs
= BITMAP_ALLOC (NULL
);
8197 valvar_pool
= create_alloc_pool ("small variable_def pool",
8198 sizeof (struct variable_def
), 256);
8199 preserved_values
= VEC_alloc (rtx
, heap
, 256);
8203 scratch_regs
= NULL
;
8207 if (!frame_pointer_needed
)
8211 if (!vt_stack_adjustments ())
8214 #ifdef FRAME_POINTER_CFA_OFFSET
8215 reg
= frame_pointer_rtx
;
8217 reg
= arg_pointer_rtx
;
8219 elim
= eliminate_regs (reg
, VOIDmode
, NULL_RTX
);
8222 if (GET_CODE (elim
) == PLUS
)
8223 elim
= XEXP (elim
, 0);
8224 if (elim
== stack_pointer_rtx
)
8225 vt_init_cfa_base ();
8228 else if (!crtl
->stack_realign_tried
)
8232 #ifdef FRAME_POINTER_CFA_OFFSET
8233 reg
= frame_pointer_rtx
;
8234 fp_cfa_offset
= FRAME_POINTER_CFA_OFFSET (current_function_decl
);
8236 reg
= arg_pointer_rtx
;
8237 fp_cfa_offset
= ARG_POINTER_CFA_OFFSET (current_function_decl
);
8239 elim
= eliminate_regs (reg
, VOIDmode
, NULL_RTX
);
8242 if (GET_CODE (elim
) == PLUS
)
8244 fp_cfa_offset
-= INTVAL (XEXP (elim
, 1));
8245 elim
= XEXP (elim
, 0);
8247 if (elim
!= hard_frame_pointer_rtx
)
8250 prologue_bb
= single_succ (ENTRY_BLOCK_PTR
);
8254 hard_frame_pointer_adjustment
= -1;
8259 HOST_WIDE_INT pre
, post
= 0;
8260 basic_block first_bb
, last_bb
;
8262 if (MAY_HAVE_DEBUG_INSNS
)
8264 cselib_record_sets_hook
= add_with_sets
;
8265 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
8266 fprintf (dump_file
, "first value: %i\n",
8267 cselib_get_next_uid ());
8274 if (bb
->next_bb
== EXIT_BLOCK_PTR
8275 || ! single_pred_p (bb
->next_bb
))
8277 e
= find_edge (bb
, bb
->next_bb
);
8278 if (! e
|| (e
->flags
& EDGE_FALLTHRU
) == 0)
8284 /* Add the micro-operations to the vector. */
8285 FOR_BB_BETWEEN (bb
, first_bb
, last_bb
->next_bb
, next_bb
)
8287 HOST_WIDE_INT offset
= VTI (bb
)->out
.stack_adjust
;
8288 VTI (bb
)->out
.stack_adjust
= VTI (bb
)->in
.stack_adjust
;
8289 for (insn
= BB_HEAD (bb
); insn
!= NEXT_INSN (BB_END (bb
));
8290 insn
= NEXT_INSN (insn
))
8294 if (!frame_pointer_needed
)
8296 insn_stack_adjust_offset_pre_post (insn
, &pre
, &post
);
8300 mo
.type
= MO_ADJUST
;
8303 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
8304 log_op_type (PATTERN (insn
), bb
, insn
,
8305 MO_ADJUST
, dump_file
);
8306 VEC_safe_push (micro_operation
, heap
, VTI (bb
)->mos
,
8308 VTI (bb
)->out
.stack_adjust
+= pre
;
8312 cselib_hook_called
= false;
8313 adjust_insn (bb
, insn
);
8314 if (MAY_HAVE_DEBUG_INSNS
)
8316 cselib_process_insn (insn
);
8317 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
8319 print_rtl_single (dump_file
, insn
);
8320 dump_cselib_table (dump_file
);
8323 if (!cselib_hook_called
)
8324 add_with_sets (insn
, 0, 0);
8327 if (!frame_pointer_needed
&& post
)
8330 mo
.type
= MO_ADJUST
;
8333 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
8334 log_op_type (PATTERN (insn
), bb
, insn
,
8335 MO_ADJUST
, dump_file
);
8336 VEC_safe_push (micro_operation
, heap
, VTI (bb
)->mos
,
8338 VTI (bb
)->out
.stack_adjust
+= post
;
8341 if (bb
== prologue_bb
8342 && hard_frame_pointer_adjustment
== -1
8343 && RTX_FRAME_RELATED_P (insn
)
8344 && fp_setter (insn
))
8346 vt_init_cfa_base ();
8347 hard_frame_pointer_adjustment
= fp_cfa_offset
;
8351 gcc_assert (offset
== VTI (bb
)->out
.stack_adjust
);
8356 if (MAY_HAVE_DEBUG_INSNS
)
8358 cselib_preserve_only_values ();
8359 cselib_reset_table (cselib_get_next_uid ());
8360 cselib_record_sets_hook
= NULL
;
8364 hard_frame_pointer_adjustment
= -1;
8365 VTI (ENTRY_BLOCK_PTR
)->flooded
= true;
8366 vt_add_function_parameters ();
8367 cfa_base_rtx
= NULL_RTX
;
8371 /* Get rid of all debug insns from the insn stream. */
8374 delete_debug_insns (void)
8379 if (!MAY_HAVE_DEBUG_INSNS
)
8384 FOR_BB_INSNS_SAFE (bb
, insn
, next
)
8385 if (DEBUG_INSN_P (insn
))
8390 /* Run a fast, BB-local only version of var tracking, to take care of
8391 information that we don't do global analysis on, such that not all
8392 information is lost. If SKIPPED holds, we're skipping the global
8393 pass entirely, so we should try to use information it would have
8394 handled as well.. */
8397 vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED
)
8399 /* ??? Just skip it all for now. */
8400 delete_debug_insns ();
8403 /* Free the data structures needed for variable tracking. */
8412 VEC_free (micro_operation
, heap
, VTI (bb
)->mos
);
8417 dataflow_set_destroy (&VTI (bb
)->in
);
8418 dataflow_set_destroy (&VTI (bb
)->out
);
8419 if (VTI (bb
)->permp
)
8421 dataflow_set_destroy (VTI (bb
)->permp
);
8422 XDELETE (VTI (bb
)->permp
);
8425 free_aux_for_blocks ();
8426 htab_delete (empty_shared_hash
->htab
);
8427 htab_delete (changed_variables
);
8428 free_alloc_pool (attrs_pool
);
8429 free_alloc_pool (var_pool
);
8430 free_alloc_pool (loc_chain_pool
);
8431 free_alloc_pool (shared_hash_pool
);
8433 if (MAY_HAVE_DEBUG_INSNS
)
8435 htab_delete (value_chains
);
8436 free_alloc_pool (value_chain_pool
);
8437 free_alloc_pool (valvar_pool
);
8438 VEC_free (rtx
, heap
, preserved_values
);
8440 BITMAP_FREE (scratch_regs
);
8441 scratch_regs
= NULL
;
8445 XDELETEVEC (vui_vec
);
8450 /* The entry point to variable tracking pass. */
8452 static inline unsigned int
8453 variable_tracking_main_1 (void)
8457 if (flag_var_tracking_assignments
< 0)
8459 delete_debug_insns ();
8463 if (n_basic_blocks
> 500 && n_edges
/ n_basic_blocks
>= 20)
8465 vt_debug_insns_local (true);
8469 mark_dfs_back_edges ();
8470 if (!vt_initialize ())
8473 vt_debug_insns_local (true);
8477 success
= vt_find_locations ();
8479 if (!success
&& flag_var_tracking_assignments
> 0)
8483 delete_debug_insns ();
8485 /* This is later restored by our caller. */
8486 flag_var_tracking_assignments
= 0;
8488 success
= vt_initialize ();
8489 gcc_assert (success
);
8491 success
= vt_find_locations ();
8497 vt_debug_insns_local (false);
8501 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
8503 dump_dataflow_sets ();
8504 dump_flow_info (dump_file
, dump_flags
);
8510 vt_debug_insns_local (false);
8515 variable_tracking_main (void)
8518 int save
= flag_var_tracking_assignments
;
8520 ret
= variable_tracking_main_1 ();
8522 flag_var_tracking_assignments
= save
;
8528 gate_handle_var_tracking (void)
8530 return (flag_var_tracking
);
8535 struct rtl_opt_pass pass_variable_tracking
=
8539 "vartrack", /* name */
8540 gate_handle_var_tracking
, /* gate */
8541 variable_tracking_main
, /* execute */
8544 0, /* static_pass_number */
8545 TV_VAR_TRACKING
, /* tv_id */
8546 0, /* properties_required */
8547 0, /* properties_provided */
8548 0, /* properties_destroyed */
8549 0, /* todo_flags_start */
8550 TODO_dump_func
| TODO_verify_rtl_sharing
/* todo_flags_finish */