1 /* Variable tracking routines for the GNU compiler.
2 Copyright (C) 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This file contains the variable tracking pass. It computes where
22 variables are located (which registers or where in memory) at each position
23 in instruction stream and emits notes describing the locations.
24 Debug information (DWARF2 location lists) is finally generated from
26 With this debug information, it is possible to show variables
27 even when debugging optimized code.
29 How does the variable tracking pass work?
31 First, it scans RTL code for uses, stores and clobbers (register/memory
32 references in instructions), for call insns and for stack adjustments
33 separately for each basic block and saves them to an array of micro
35 The micro operations of one instruction are ordered so that
36 pre-modifying stack adjustment < use < use with no var < call insn <
37 < clobber < set < post-modifying stack adjustment
39 Then, a forward dataflow analysis is performed to find out how locations
40 of variables change through code and to propagate the variable locations
41 along control flow graph.
42 The IN set for basic block BB is computed as a union of OUT sets of BB's
43 predecessors, the OUT set for BB is copied from the IN set for BB and
44 is changed according to micro operations in BB.
46 The IN and OUT sets for basic blocks consist of a current stack adjustment
47 (used for adjusting offset of variables addressed using stack pointer),
48 the table of structures describing the locations of parts of a variable
49 and for each physical register a linked list for each physical register.
50 The linked list is a list of variable parts stored in the register,
51 i.e. it is a list of triplets (reg, decl, offset) where decl is
52 REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
53 effective deleting appropriate variable parts when we set or clobber the
56 There may be more than one variable part in a register. The linked lists
57 should be pretty short so it is a good data structure here.
58 For example in the following code, register allocator may assign same
59 register to variables A and B, and both of them are stored in the same
72 Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
73 are emitted to appropriate positions in RTL code. Each such a note describes
74 the location of one variable at the point in instruction stream where the
75 note is. There is no need to emit a note for each variable before each
76 instruction, we only emit these notes where the location of variable changes
77 (this means that we also emit notes for changes between the OUT set of the
78 previous block and the IN set of the current block).
80 The notes consist of two parts:
81 1. the declaration (from REG_EXPR or MEM_EXPR)
82 2. the location of a variable - it is either a simple register/memory
83 reference (for simple variables, for example int),
84 or a parallel of register/memory references (for a large variables
85 which consist of several parts, for example long long).
91 #include "coretypes.h"
96 #include "hard-reg-set.h"
97 #include "basic-block.h"
100 #include "insn-config.h"
103 #include "alloc-pool.h"
109 #include "tree-pass.h"
110 #include "tree-flow.h"
114 #include "diagnostic.h"
115 #include "tree-pretty-print.h"
116 #include "pointer-set.h"
120 /* var-tracking.c assumes that tree code with the same value as VALUE rtx code
121 has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
122 Currently the value is the same as IDENTIFIER_NODE, which has such
123 a property. If this compile time assertion ever fails, make sure that
124 the new tree code that equals (int) VALUE has the same property. */
125 extern char check_value_val
[(int) VALUE
== (int) IDENTIFIER_NODE
? 1 : -1];
127 /* Type of micro operation. */
128 enum micro_operation_type
130 MO_USE
, /* Use location (REG or MEM). */
131 MO_USE_NO_VAR
,/* Use location which is not associated with a variable
132 or the variable is not trackable. */
133 MO_VAL_USE
, /* Use location which is associated with a value. */
134 MO_VAL_LOC
, /* Use location which appears in a debug insn. */
135 MO_VAL_SET
, /* Set location associated with a value. */
136 MO_SET
, /* Set location. */
137 MO_COPY
, /* Copy the same portion of a variable from one
138 location to another. */
139 MO_CLOBBER
, /* Clobber location. */
140 MO_CALL
, /* Call insn. */
141 MO_ADJUST
/* Adjust stack pointer. */
145 static const char * const ATTRIBUTE_UNUSED
146 micro_operation_type_name
[] = {
159 /* Where shall the note be emitted? BEFORE or AFTER the instruction.
160 Notes emitted as AFTER_CALL are to take effect during the call,
161 rather than after the call. */
164 EMIT_NOTE_BEFORE_INSN
,
165 EMIT_NOTE_AFTER_INSN
,
166 EMIT_NOTE_AFTER_CALL_INSN
169 /* Structure holding information about micro operation. */
170 typedef struct micro_operation_def
172 /* Type of micro operation. */
173 enum micro_operation_type type
;
175 /* The instruction which the micro operation is in, for MO_USE,
176 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
177 instruction or note in the original flow (before any var-tracking
178 notes are inserted, to simplify emission of notes), for MO_SET
183 /* Location. For MO_SET and MO_COPY, this is the SET that
184 performs the assignment, if known, otherwise it is the target
185 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
186 CONCAT of the VALUE and the LOC associated with it. For
187 MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
188 associated with it. */
191 /* Stack adjustment. */
192 HOST_WIDE_INT adjust
;
196 DEF_VEC_O(micro_operation
);
197 DEF_VEC_ALLOC_O(micro_operation
,heap
);
199 /* A declaration of a variable, or an RTL value being handled like a
201 typedef void *decl_or_value
;
203 /* Structure for passing some other parameters to function
204 emit_note_insn_var_location. */
205 typedef struct emit_note_data_def
207 /* The instruction which the note will be emitted before/after. */
210 /* Where the note will be emitted (before/after insn)? */
211 enum emit_note_where where
;
213 /* The variables and values active at this point. */
217 /* Description of location of a part of a variable. The content of a physical
218 register is described by a chain of these structures.
219 The chains are pretty short (usually 1 or 2 elements) and thus
220 chain is the best data structure. */
221 typedef struct attrs_def
223 /* Pointer to next member of the list. */
224 struct attrs_def
*next
;
226 /* The rtx of register. */
229 /* The declaration corresponding to LOC. */
232 /* Offset from start of DECL. */
233 HOST_WIDE_INT offset
;
236 /* Structure holding a refcounted hash table. If refcount > 1,
237 it must be first unshared before modified. */
238 typedef struct shared_hash_def
240 /* Reference count. */
243 /* Actual hash table. */
247 /* Structure holding the IN or OUT set for a basic block. */
248 typedef struct dataflow_set_def
250 /* Adjustment of stack offset. */
251 HOST_WIDE_INT stack_adjust
;
253 /* Attributes for registers (lists of attrs). */
254 attrs regs
[FIRST_PSEUDO_REGISTER
];
256 /* Variable locations. */
259 /* Vars that is being traversed. */
260 shared_hash traversed_vars
;
263 /* The structure (one for each basic block) containing the information
264 needed for variable tracking. */
265 typedef struct variable_tracking_info_def
267 /* The vector of micro operations. */
268 VEC(micro_operation
, heap
) *mos
;
270 /* The IN and OUT set for dataflow analysis. */
274 /* The permanent-in dataflow set for this block. This is used to
275 hold values for which we had to compute entry values. ??? This
276 should probably be dynamically allocated, to avoid using more
277 memory in non-debug builds. */
280 /* Has the block been visited in DFS? */
283 /* Has the block been flooded in VTA? */
286 } *variable_tracking_info
;
288 /* Structure for chaining the locations. */
289 typedef struct location_chain_def
291 /* Next element in the chain. */
292 struct location_chain_def
*next
;
294 /* The location (REG, MEM or VALUE). */
297 /* The "value" stored in this location. */
301 enum var_init_status init
;
304 /* Structure describing one part of variable. */
305 typedef struct variable_part_def
307 /* Chain of locations of the part. */
308 location_chain loc_chain
;
310 /* Location which was last emitted to location list. */
313 /* The offset in the variable. */
314 HOST_WIDE_INT offset
;
317 /* Maximum number of location parts. */
318 #define MAX_VAR_PARTS 16
320 /* Structure describing where the variable is located. */
321 typedef struct variable_def
323 /* The declaration of the variable, or an RTL value being handled
324 like a declaration. */
327 /* Reference count. */
330 /* Number of variable parts. */
333 /* True if this variable changed (any of its) cur_loc fields
334 during the current emit_notes_for_changes resp.
335 emit_notes_for_differences call. */
336 bool cur_loc_changed
;
338 /* True if this variable_def struct is currently in the
339 changed_variables hash table. */
340 bool in_changed_variables
;
342 /* The variable parts. */
343 variable_part var_part
[1];
345 typedef const struct variable_def
*const_variable
;
347 /* Structure for chaining backlinks from referenced VALUEs to
348 DVs that are referencing them. */
349 typedef struct value_chain_def
351 /* Next value_chain entry. */
352 struct value_chain_def
*next
;
354 /* The declaration of the variable, or an RTL value
355 being handled like a declaration, whose var_parts[0].loc_chain
356 references the VALUE owning this value_chain. */
359 /* Reference count. */
362 typedef const struct value_chain_def
*const_value_chain
;
364 /* Pointer to the BB's information specific to variable tracking pass. */
365 #define VTI(BB) ((variable_tracking_info) (BB)->aux)
367 /* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */
368 #define INT_MEM_OFFSET(mem) (MEM_OFFSET_KNOWN_P (mem) ? MEM_OFFSET (mem) : 0)
370 /* Alloc pool for struct attrs_def. */
371 static alloc_pool attrs_pool
;
373 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
374 static alloc_pool var_pool
;
376 /* Alloc pool for struct variable_def with a single var_part entry. */
377 static alloc_pool valvar_pool
;
379 /* Alloc pool for struct location_chain_def. */
380 static alloc_pool loc_chain_pool
;
382 /* Alloc pool for struct shared_hash_def. */
383 static alloc_pool shared_hash_pool
;
385 /* Alloc pool for struct value_chain_def. */
386 static alloc_pool value_chain_pool
;
388 /* Changed variables, notes will be emitted for them. */
389 static htab_t changed_variables
;
391 /* Links from VALUEs to DVs referencing them in their current loc_chains. */
392 static htab_t value_chains
;
394 /* Shall notes be emitted? */
395 static bool emit_notes
;
397 /* Empty shared hashtable. */
398 static shared_hash empty_shared_hash
;
400 /* Scratch register bitmap used by cselib_expand_value_rtx. */
401 static bitmap scratch_regs
= NULL
;
403 typedef struct GTY(()) parm_reg
{
408 DEF_VEC_O(parm_reg_t
);
409 DEF_VEC_ALLOC_O(parm_reg_t
, gc
);
411 /* Vector of windowed parameter registers, if any. */
412 static VEC(parm_reg_t
, gc
) *windowed_parm_regs
= NULL
;
414 /* Variable used to tell whether cselib_process_insn called our hook. */
415 static bool cselib_hook_called
;
417 /* Local function prototypes. */
418 static void stack_adjust_offset_pre_post (rtx
, HOST_WIDE_INT
*,
420 static void insn_stack_adjust_offset_pre_post (rtx
, HOST_WIDE_INT
*,
422 static bool vt_stack_adjustments (void);
423 static void note_register_arguments (rtx
);
424 static hashval_t
variable_htab_hash (const void *);
425 static int variable_htab_eq (const void *, const void *);
426 static void variable_htab_free (void *);
428 static void init_attrs_list_set (attrs
*);
429 static void attrs_list_clear (attrs
*);
430 static attrs
attrs_list_member (attrs
, decl_or_value
, HOST_WIDE_INT
);
431 static void attrs_list_insert (attrs
*, decl_or_value
, HOST_WIDE_INT
, rtx
);
432 static void attrs_list_copy (attrs
*, attrs
);
433 static void attrs_list_union (attrs
*, attrs
);
435 static void **unshare_variable (dataflow_set
*set
, void **slot
, variable var
,
436 enum var_init_status
);
437 static void vars_copy (htab_t
, htab_t
);
438 static tree
var_debug_decl (tree
);
439 static void var_reg_set (dataflow_set
*, rtx
, enum var_init_status
, rtx
);
440 static void var_reg_delete_and_set (dataflow_set
*, rtx
, bool,
441 enum var_init_status
, rtx
);
442 static void var_reg_delete (dataflow_set
*, rtx
, bool);
443 static void var_regno_delete (dataflow_set
*, int);
444 static void var_mem_set (dataflow_set
*, rtx
, enum var_init_status
, rtx
);
445 static void var_mem_delete_and_set (dataflow_set
*, rtx
, bool,
446 enum var_init_status
, rtx
);
447 static void var_mem_delete (dataflow_set
*, rtx
, bool);
449 static void dataflow_set_init (dataflow_set
*);
450 static void dataflow_set_clear (dataflow_set
*);
451 static void dataflow_set_copy (dataflow_set
*, dataflow_set
*);
452 static int variable_union_info_cmp_pos (const void *, const void *);
453 static void dataflow_set_union (dataflow_set
*, dataflow_set
*);
454 static location_chain
find_loc_in_1pdv (rtx
, variable
, htab_t
);
455 static bool canon_value_cmp (rtx
, rtx
);
456 static int loc_cmp (rtx
, rtx
);
457 static bool variable_part_different_p (variable_part
*, variable_part
*);
458 static bool onepart_variable_different_p (variable
, variable
);
459 static bool variable_different_p (variable
, variable
);
460 static bool dataflow_set_different (dataflow_set
*, dataflow_set
*);
461 static void dataflow_set_destroy (dataflow_set
*);
463 static bool contains_symbol_ref (rtx
);
464 static bool track_expr_p (tree
, bool);
465 static bool same_variable_part_p (rtx
, tree
, HOST_WIDE_INT
);
466 static int add_uses (rtx
*, void *);
467 static void add_uses_1 (rtx
*, void *);
468 static void add_stores (rtx
, const_rtx
, void *);
469 static bool compute_bb_dataflow (basic_block
);
470 static bool vt_find_locations (void);
472 static void dump_attrs_list (attrs
);
473 static int dump_var_slot (void **, void *);
474 static void dump_var (variable
);
475 static void dump_vars (htab_t
);
476 static void dump_dataflow_set (dataflow_set
*);
477 static void dump_dataflow_sets (void);
479 static void variable_was_changed (variable
, dataflow_set
*);
480 static void **set_slot_part (dataflow_set
*, rtx
, void **,
481 decl_or_value
, HOST_WIDE_INT
,
482 enum var_init_status
, rtx
);
483 static void set_variable_part (dataflow_set
*, rtx
,
484 decl_or_value
, HOST_WIDE_INT
,
485 enum var_init_status
, rtx
, enum insert_option
);
486 static void **clobber_slot_part (dataflow_set
*, rtx
,
487 void **, HOST_WIDE_INT
, rtx
);
488 static void clobber_variable_part (dataflow_set
*, rtx
,
489 decl_or_value
, HOST_WIDE_INT
, rtx
);
490 static void **delete_slot_part (dataflow_set
*, rtx
, void **, HOST_WIDE_INT
);
491 static void delete_variable_part (dataflow_set
*, rtx
,
492 decl_or_value
, HOST_WIDE_INT
);
493 static int emit_note_insn_var_location (void **, void *);
494 static void emit_notes_for_changes (rtx
, enum emit_note_where
, shared_hash
);
495 static int emit_notes_for_differences_1 (void **, void *);
496 static int emit_notes_for_differences_2 (void **, void *);
497 static void emit_notes_for_differences (rtx
, dataflow_set
*, dataflow_set
*);
498 static void emit_notes_in_bb (basic_block
, dataflow_set
*);
499 static void vt_emit_notes (void);
501 static bool vt_get_decl_and_offset (rtx
, tree
*, HOST_WIDE_INT
*);
502 static void vt_add_function_parameters (void);
503 static bool vt_initialize (void);
504 static void vt_finalize (void);
506 /* Given a SET, calculate the amount of stack adjustment it contains
507 PRE- and POST-modifying stack pointer.
508 This function is similar to stack_adjust_offset. */
511 stack_adjust_offset_pre_post (rtx pattern
, HOST_WIDE_INT
*pre
,
514 rtx src
= SET_SRC (pattern
);
515 rtx dest
= SET_DEST (pattern
);
518 if (dest
== stack_pointer_rtx
)
520 /* (set (reg sp) (plus (reg sp) (const_int))) */
521 code
= GET_CODE (src
);
522 if (! (code
== PLUS
|| code
== MINUS
)
523 || XEXP (src
, 0) != stack_pointer_rtx
524 || !CONST_INT_P (XEXP (src
, 1)))
528 *post
+= INTVAL (XEXP (src
, 1));
530 *post
-= INTVAL (XEXP (src
, 1));
532 else if (MEM_P (dest
))
534 /* (set (mem (pre_dec (reg sp))) (foo)) */
535 src
= XEXP (dest
, 0);
536 code
= GET_CODE (src
);
542 if (XEXP (src
, 0) == stack_pointer_rtx
)
544 rtx val
= XEXP (XEXP (src
, 1), 1);
545 /* We handle only adjustments by constant amount. */
546 gcc_assert (GET_CODE (XEXP (src
, 1)) == PLUS
&&
549 if (code
== PRE_MODIFY
)
550 *pre
-= INTVAL (val
);
552 *post
-= INTVAL (val
);
558 if (XEXP (src
, 0) == stack_pointer_rtx
)
560 *pre
+= GET_MODE_SIZE (GET_MODE (dest
));
566 if (XEXP (src
, 0) == stack_pointer_rtx
)
568 *post
+= GET_MODE_SIZE (GET_MODE (dest
));
574 if (XEXP (src
, 0) == stack_pointer_rtx
)
576 *pre
-= GET_MODE_SIZE (GET_MODE (dest
));
582 if (XEXP (src
, 0) == stack_pointer_rtx
)
584 *post
-= GET_MODE_SIZE (GET_MODE (dest
));
595 /* Given an INSN, calculate the amount of stack adjustment it contains
596 PRE- and POST-modifying stack pointer. */
599 insn_stack_adjust_offset_pre_post (rtx insn
, HOST_WIDE_INT
*pre
,
607 pattern
= PATTERN (insn
);
608 if (RTX_FRAME_RELATED_P (insn
))
610 rtx expr
= find_reg_note (insn
, REG_FRAME_RELATED_EXPR
, NULL_RTX
);
612 pattern
= XEXP (expr
, 0);
615 if (GET_CODE (pattern
) == SET
)
616 stack_adjust_offset_pre_post (pattern
, pre
, post
);
617 else if (GET_CODE (pattern
) == PARALLEL
618 || GET_CODE (pattern
) == SEQUENCE
)
622 /* There may be stack adjustments inside compound insns. Search
624 for ( i
= XVECLEN (pattern
, 0) - 1; i
>= 0; i
--)
625 if (GET_CODE (XVECEXP (pattern
, 0, i
)) == SET
)
626 stack_adjust_offset_pre_post (XVECEXP (pattern
, 0, i
), pre
, post
);
630 /* Compute stack adjustments for all blocks by traversing DFS tree.
631 Return true when the adjustments on all incoming edges are consistent.
632 Heavily borrowed from pre_and_rev_post_order_compute. */
635 vt_stack_adjustments (void)
637 edge_iterator
*stack
;
640 /* Initialize entry block. */
641 VTI (ENTRY_BLOCK_PTR
)->visited
= true;
642 VTI (ENTRY_BLOCK_PTR
)->in
.stack_adjust
= INCOMING_FRAME_SP_OFFSET
;
643 VTI (ENTRY_BLOCK_PTR
)->out
.stack_adjust
= INCOMING_FRAME_SP_OFFSET
;
645 /* Allocate stack for back-tracking up CFG. */
646 stack
= XNEWVEC (edge_iterator
, n_basic_blocks
+ 1);
649 /* Push the first edge on to the stack. */
650 stack
[sp
++] = ei_start (ENTRY_BLOCK_PTR
->succs
);
658 /* Look at the edge on the top of the stack. */
660 src
= ei_edge (ei
)->src
;
661 dest
= ei_edge (ei
)->dest
;
663 /* Check if the edge destination has been visited yet. */
664 if (!VTI (dest
)->visited
)
667 HOST_WIDE_INT pre
, post
, offset
;
668 VTI (dest
)->visited
= true;
669 VTI (dest
)->in
.stack_adjust
= offset
= VTI (src
)->out
.stack_adjust
;
671 if (dest
!= EXIT_BLOCK_PTR
)
672 for (insn
= BB_HEAD (dest
);
673 insn
!= NEXT_INSN (BB_END (dest
));
674 insn
= NEXT_INSN (insn
))
678 insn_stack_adjust_offset_pre_post (insn
, &pre
, &post
);
679 offset
+= pre
+ post
;
682 note_register_arguments (insn
);
685 VTI (dest
)->out
.stack_adjust
= offset
;
687 if (EDGE_COUNT (dest
->succs
) > 0)
688 /* Since the DEST node has been visited for the first
689 time, check its successors. */
690 stack
[sp
++] = ei_start (dest
->succs
);
694 /* Check whether the adjustments on the edges are the same. */
695 if (VTI (dest
)->in
.stack_adjust
!= VTI (src
)->out
.stack_adjust
)
701 if (! ei_one_before_end_p (ei
))
702 /* Go to the next edge. */
703 ei_next (&stack
[sp
- 1]);
705 /* Return to previous level if there are no more edges. */
714 /* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or
715 hard_frame_pointer_rtx is being mapped to it and offset for it. */
716 static rtx cfa_base_rtx
;
717 static HOST_WIDE_INT cfa_base_offset
;
719 /* Compute a CFA-based value for an ADJUSTMENT made to stack_pointer_rtx
720 or hard_frame_pointer_rtx. */
723 compute_cfa_pointer (HOST_WIDE_INT adjustment
)
725 return plus_constant (cfa_base_rtx
, adjustment
+ cfa_base_offset
);
728 /* Adjustment for hard_frame_pointer_rtx to cfa base reg,
729 or -1 if the replacement shouldn't be done. */
730 static HOST_WIDE_INT hard_frame_pointer_adjustment
= -1;
732 /* Data for adjust_mems callback. */
734 struct adjust_mem_data
737 enum machine_mode mem_mode
;
738 HOST_WIDE_INT stack_adjust
;
742 /* Helper for adjust_mems. Return 1 if *loc is unsuitable for
743 transformation of wider mode arithmetics to narrower mode,
744 -1 if it is suitable and subexpressions shouldn't be
745 traversed and 0 if it is suitable and subexpressions should
746 be traversed. Called through for_each_rtx. */
749 use_narrower_mode_test (rtx
*loc
, void *data
)
751 rtx subreg
= (rtx
) data
;
753 if (CONSTANT_P (*loc
))
755 switch (GET_CODE (*loc
))
758 if (cselib_lookup (*loc
, GET_MODE (SUBREG_REG (subreg
)), 0, VOIDmode
))
760 if (!validate_subreg (GET_MODE (subreg
), GET_MODE (*loc
),
761 *loc
, subreg_lowpart_offset (GET_MODE (subreg
),
770 if (for_each_rtx (&XEXP (*loc
, 0), use_narrower_mode_test
, data
))
779 /* Transform X into narrower mode MODE from wider mode WMODE. */
782 use_narrower_mode (rtx x
, enum machine_mode mode
, enum machine_mode wmode
)
786 return lowpart_subreg (mode
, x
, wmode
);
787 switch (GET_CODE (x
))
790 return lowpart_subreg (mode
, x
, wmode
);
794 op0
= use_narrower_mode (XEXP (x
, 0), mode
, wmode
);
795 op1
= use_narrower_mode (XEXP (x
, 1), mode
, wmode
);
796 return simplify_gen_binary (GET_CODE (x
), mode
, op0
, op1
);
798 op0
= use_narrower_mode (XEXP (x
, 0), mode
, wmode
);
799 return simplify_gen_binary (ASHIFT
, mode
, op0
, XEXP (x
, 1));
805 /* Helper function for adjusting used MEMs. */
808 adjust_mems (rtx loc
, const_rtx old_rtx
, void *data
)
810 struct adjust_mem_data
*amd
= (struct adjust_mem_data
*) data
;
811 rtx mem
, addr
= loc
, tem
;
812 enum machine_mode mem_mode_save
;
814 switch (GET_CODE (loc
))
817 /* Don't do any sp or fp replacements outside of MEM addresses
819 if (amd
->mem_mode
== VOIDmode
&& amd
->store
)
821 if (loc
== stack_pointer_rtx
822 && !frame_pointer_needed
824 return compute_cfa_pointer (amd
->stack_adjust
);
825 else if (loc
== hard_frame_pointer_rtx
826 && frame_pointer_needed
827 && hard_frame_pointer_adjustment
!= -1
829 return compute_cfa_pointer (hard_frame_pointer_adjustment
);
830 gcc_checking_assert (loc
!= virtual_incoming_args_rtx
);
836 mem
= targetm
.delegitimize_address (mem
);
837 if (mem
!= loc
&& !MEM_P (mem
))
838 return simplify_replace_fn_rtx (mem
, old_rtx
, adjust_mems
, data
);
841 addr
= XEXP (mem
, 0);
842 mem_mode_save
= amd
->mem_mode
;
843 amd
->mem_mode
= GET_MODE (mem
);
844 store_save
= amd
->store
;
846 addr
= simplify_replace_fn_rtx (addr
, old_rtx
, adjust_mems
, data
);
847 amd
->store
= store_save
;
848 amd
->mem_mode
= mem_mode_save
;
850 addr
= targetm
.delegitimize_address (addr
);
851 if (addr
!= XEXP (mem
, 0))
852 mem
= replace_equiv_address_nv (mem
, addr
);
854 mem
= avoid_constant_pool_reference (mem
);
858 addr
= gen_rtx_PLUS (GET_MODE (loc
), XEXP (loc
, 0),
859 GEN_INT (GET_CODE (loc
) == PRE_INC
860 ? GET_MODE_SIZE (amd
->mem_mode
)
861 : -GET_MODE_SIZE (amd
->mem_mode
)));
865 addr
= XEXP (loc
, 0);
866 gcc_assert (amd
->mem_mode
!= VOIDmode
&& amd
->mem_mode
!= BLKmode
);
867 addr
= simplify_replace_fn_rtx (addr
, old_rtx
, adjust_mems
, data
);
868 tem
= gen_rtx_PLUS (GET_MODE (loc
), XEXP (loc
, 0),
869 GEN_INT ((GET_CODE (loc
) == PRE_INC
870 || GET_CODE (loc
) == POST_INC
)
871 ? GET_MODE_SIZE (amd
->mem_mode
)
872 : -GET_MODE_SIZE (amd
->mem_mode
)));
873 amd
->side_effects
= alloc_EXPR_LIST (0,
874 gen_rtx_SET (VOIDmode
,
880 addr
= XEXP (loc
, 1);
883 addr
= XEXP (loc
, 0);
884 gcc_assert (amd
->mem_mode
!= VOIDmode
);
885 addr
= simplify_replace_fn_rtx (addr
, old_rtx
, adjust_mems
, data
);
886 amd
->side_effects
= alloc_EXPR_LIST (0,
887 gen_rtx_SET (VOIDmode
,
893 /* First try without delegitimization of whole MEMs and
894 avoid_constant_pool_reference, which is more likely to succeed. */
895 store_save
= amd
->store
;
897 addr
= simplify_replace_fn_rtx (SUBREG_REG (loc
), old_rtx
, adjust_mems
,
899 amd
->store
= store_save
;
900 mem
= simplify_replace_fn_rtx (addr
, old_rtx
, adjust_mems
, data
);
901 if (mem
== SUBREG_REG (loc
))
906 tem
= simplify_gen_subreg (GET_MODE (loc
), mem
,
907 GET_MODE (SUBREG_REG (loc
)),
911 tem
= simplify_gen_subreg (GET_MODE (loc
), addr
,
912 GET_MODE (SUBREG_REG (loc
)),
915 tem
= gen_rtx_raw_SUBREG (GET_MODE (loc
), addr
, SUBREG_BYTE (loc
));
917 if (MAY_HAVE_DEBUG_INSNS
918 && GET_CODE (tem
) == SUBREG
919 && (GET_CODE (SUBREG_REG (tem
)) == PLUS
920 || GET_CODE (SUBREG_REG (tem
)) == MINUS
921 || GET_CODE (SUBREG_REG (tem
)) == MULT
922 || GET_CODE (SUBREG_REG (tem
)) == ASHIFT
)
923 && GET_MODE_CLASS (GET_MODE (tem
)) == MODE_INT
924 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem
))) == MODE_INT
925 && GET_MODE_SIZE (GET_MODE (tem
))
926 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (tem
)))
927 && subreg_lowpart_p (tem
)
928 && !for_each_rtx (&SUBREG_REG (tem
), use_narrower_mode_test
, tem
))
929 return use_narrower_mode (SUBREG_REG (tem
), GET_MODE (tem
),
930 GET_MODE (SUBREG_REG (tem
)));
933 /* Don't do any replacements in second and following
934 ASM_OPERANDS of inline-asm with multiple sets.
935 ASM_OPERANDS_INPUT_VEC, ASM_OPERANDS_INPUT_CONSTRAINT_VEC
936 and ASM_OPERANDS_LABEL_VEC need to be equal between
937 all the ASM_OPERANDs in the insn and adjust_insn will
939 if (ASM_OPERANDS_OUTPUT_IDX (loc
) != 0)
948 /* Helper function for replacement of uses. */
951 adjust_mem_uses (rtx
*x
, void *data
)
953 rtx new_x
= simplify_replace_fn_rtx (*x
, NULL_RTX
, adjust_mems
, data
);
955 validate_change (NULL_RTX
, x
, new_x
, true);
958 /* Helper function for replacement of stores. */
961 adjust_mem_stores (rtx loc
, const_rtx expr
, void *data
)
965 rtx new_dest
= simplify_replace_fn_rtx (SET_DEST (expr
), NULL_RTX
,
967 if (new_dest
!= SET_DEST (expr
))
969 rtx xexpr
= CONST_CAST_RTX (expr
);
970 validate_change (NULL_RTX
, &SET_DEST (xexpr
), new_dest
, true);
975 /* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
976 replace them with their value in the insn and add the side-effects
977 as other sets to the insn. */
980 adjust_insn (basic_block bb
, rtx insn
)
982 struct adjust_mem_data amd
;
985 #ifdef HAVE_window_save
986 /* If the target machine has an explicit window save instruction, the
987 transformation OUTGOING_REGNO -> INCOMING_REGNO is done there. */
988 if (RTX_FRAME_RELATED_P (insn
)
989 && find_reg_note (insn
, REG_CFA_WINDOW_SAVE
, NULL_RTX
))
991 unsigned int i
, nregs
= VEC_length(parm_reg_t
, windowed_parm_regs
);
992 rtx rtl
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (nregs
* 2));
995 FOR_EACH_VEC_ELT (parm_reg_t
, windowed_parm_regs
, i
, p
)
997 XVECEXP (rtl
, 0, i
* 2)
998 = gen_rtx_SET (VOIDmode
, p
->incoming
, p
->outgoing
);
999 /* Do not clobber the attached DECL, but only the REG. */
1000 XVECEXP (rtl
, 0, i
* 2 + 1)
1001 = gen_rtx_CLOBBER (GET_MODE (p
->outgoing
),
1002 gen_raw_REG (GET_MODE (p
->outgoing
),
1003 REGNO (p
->outgoing
)));
1006 validate_change (NULL_RTX
, &PATTERN (insn
), rtl
, true);
1011 amd
.mem_mode
= VOIDmode
;
1012 amd
.stack_adjust
= -VTI (bb
)->out
.stack_adjust
;
1013 amd
.side_effects
= NULL_RTX
;
1016 note_stores (PATTERN (insn
), adjust_mem_stores
, &amd
);
1019 if (GET_CODE (PATTERN (insn
)) == PARALLEL
1020 && asm_noperands (PATTERN (insn
)) > 0
1021 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == SET
)
1026 /* inline-asm with multiple sets is tiny bit more complicated,
1027 because the 3 vectors in ASM_OPERANDS need to be shared between
1028 all ASM_OPERANDS in the instruction. adjust_mems will
1029 not touch ASM_OPERANDS other than the first one, asm_noperands
1030 test above needs to be called before that (otherwise it would fail)
1031 and afterwards this code fixes it up. */
1032 note_uses (&PATTERN (insn
), adjust_mem_uses
, &amd
);
1033 body
= PATTERN (insn
);
1034 set0
= XVECEXP (body
, 0, 0);
1035 gcc_checking_assert (GET_CODE (set0
) == SET
1036 && GET_CODE (SET_SRC (set0
)) == ASM_OPERANDS
1037 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set0
)) == 0);
1038 for (i
= 1; i
< XVECLEN (body
, 0); i
++)
1039 if (GET_CODE (XVECEXP (body
, 0, i
)) != SET
)
1043 set
= XVECEXP (body
, 0, i
);
1044 gcc_checking_assert (GET_CODE (SET_SRC (set
)) == ASM_OPERANDS
1045 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set
))
1047 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (set
))
1048 != ASM_OPERANDS_INPUT_VEC (SET_SRC (set0
))
1049 || ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set
))
1050 != ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0
))
1051 || ASM_OPERANDS_LABEL_VEC (SET_SRC (set
))
1052 != ASM_OPERANDS_LABEL_VEC (SET_SRC (set0
)))
1054 rtx newsrc
= shallow_copy_rtx (SET_SRC (set
));
1055 ASM_OPERANDS_INPUT_VEC (newsrc
)
1056 = ASM_OPERANDS_INPUT_VEC (SET_SRC (set0
));
1057 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (newsrc
)
1058 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0
));
1059 ASM_OPERANDS_LABEL_VEC (newsrc
)
1060 = ASM_OPERANDS_LABEL_VEC (SET_SRC (set0
));
1061 validate_change (NULL_RTX
, &SET_SRC (set
), newsrc
, true);
1066 note_uses (&PATTERN (insn
), adjust_mem_uses
, &amd
);
1068 /* For read-only MEMs containing some constant, prefer those
1070 set
= single_set (insn
);
1071 if (set
&& MEM_P (SET_SRC (set
)) && MEM_READONLY_P (SET_SRC (set
)))
1073 rtx note
= find_reg_equal_equiv_note (insn
);
1075 if (note
&& CONSTANT_P (XEXP (note
, 0)))
1076 validate_change (NULL_RTX
, &SET_SRC (set
), XEXP (note
, 0), true);
1079 if (amd
.side_effects
)
1081 rtx
*pat
, new_pat
, s
;
1084 pat
= &PATTERN (insn
);
1085 if (GET_CODE (*pat
) == COND_EXEC
)
1086 pat
= &COND_EXEC_CODE (*pat
);
1087 if (GET_CODE (*pat
) == PARALLEL
)
1088 oldn
= XVECLEN (*pat
, 0);
1091 for (s
= amd
.side_effects
, newn
= 0; s
; newn
++)
1093 new_pat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (oldn
+ newn
));
1094 if (GET_CODE (*pat
) == PARALLEL
)
1095 for (i
= 0; i
< oldn
; i
++)
1096 XVECEXP (new_pat
, 0, i
) = XVECEXP (*pat
, 0, i
);
1098 XVECEXP (new_pat
, 0, 0) = *pat
;
1099 for (s
= amd
.side_effects
, i
= oldn
; i
< oldn
+ newn
; i
++, s
= XEXP (s
, 1))
1100 XVECEXP (new_pat
, 0, i
) = XEXP (s
, 0);
1101 free_EXPR_LIST_list (&amd
.side_effects
);
1102 validate_change (NULL_RTX
, pat
, new_pat
, true);
1106 /* Return true if a decl_or_value DV is a DECL or NULL. */
1108 dv_is_decl_p (decl_or_value dv
)
1110 return !dv
|| (int) TREE_CODE ((tree
) dv
) != (int) VALUE
;
1113 /* Return true if a decl_or_value is a VALUE rtl. */
1115 dv_is_value_p (decl_or_value dv
)
1117 return dv
&& !dv_is_decl_p (dv
);
1120 /* Return the decl in the decl_or_value. */
1122 dv_as_decl (decl_or_value dv
)
1124 gcc_checking_assert (dv_is_decl_p (dv
));
1128 /* Return the value in the decl_or_value. */
1130 dv_as_value (decl_or_value dv
)
1132 gcc_checking_assert (dv_is_value_p (dv
));
1136 /* Return the opaque pointer in the decl_or_value. */
1137 static inline void *
1138 dv_as_opaque (decl_or_value dv
)
1143 /* Return true if a decl_or_value must not have more than one variable
1146 dv_onepart_p (decl_or_value dv
)
1150 if (!MAY_HAVE_DEBUG_INSNS
)
1153 if (dv_is_value_p (dv
))
1156 decl
= dv_as_decl (dv
);
1161 if (TREE_CODE (decl
) == DEBUG_EXPR_DECL
)
1164 return (target_for_debug_bind (decl
) != NULL_TREE
);
1167 /* Return the variable pool to be used for dv, depending on whether it
1168 can have multiple parts or not. */
1169 static inline alloc_pool
1170 dv_pool (decl_or_value dv
)
1172 return dv_onepart_p (dv
) ? valvar_pool
: var_pool
;
1175 /* Build a decl_or_value out of a decl. */
1176 static inline decl_or_value
1177 dv_from_decl (tree decl
)
1181 gcc_checking_assert (dv_is_decl_p (dv
));
1185 /* Build a decl_or_value out of a value. */
1186 static inline decl_or_value
1187 dv_from_value (rtx value
)
1191 gcc_checking_assert (dv_is_value_p (dv
));
1195 extern void debug_dv (decl_or_value dv
);
1198 debug_dv (decl_or_value dv
)
1200 if (dv_is_value_p (dv
))
1201 debug_rtx (dv_as_value (dv
));
1203 debug_generic_stmt (dv_as_decl (dv
));
1206 typedef unsigned int dvuid
;
1208 /* Return the uid of DV. */
1211 dv_uid (decl_or_value dv
)
1213 if (dv_is_value_p (dv
))
1214 return CSELIB_VAL_PTR (dv_as_value (dv
))->uid
;
1216 return DECL_UID (dv_as_decl (dv
));
1219 /* Compute the hash from the uid. */
1221 static inline hashval_t
1222 dv_uid2hash (dvuid uid
)
1227 /* The hash function for a mask table in a shared_htab chain. */
1229 static inline hashval_t
1230 dv_htab_hash (decl_or_value dv
)
1232 return dv_uid2hash (dv_uid (dv
));
1235 /* The hash function for variable_htab, computes the hash value
1236 from the declaration of variable X. */
1239 variable_htab_hash (const void *x
)
1241 const_variable
const v
= (const_variable
) x
;
1243 return dv_htab_hash (v
->dv
);
1246 /* Compare the declaration of variable X with declaration Y. */
1249 variable_htab_eq (const void *x
, const void *y
)
1251 const_variable
const v
= (const_variable
) x
;
1252 decl_or_value dv
= CONST_CAST2 (decl_or_value
, const void *, y
);
1254 return (dv_as_opaque (v
->dv
) == dv_as_opaque (dv
));
1257 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
1260 variable_htab_free (void *elem
)
1263 variable var
= (variable
) elem
;
1264 location_chain node
, next
;
1266 gcc_checking_assert (var
->refcount
> 0);
1269 if (var
->refcount
> 0)
1272 for (i
= 0; i
< var
->n_var_parts
; i
++)
1274 for (node
= var
->var_part
[i
].loc_chain
; node
; node
= next
)
1277 pool_free (loc_chain_pool
, node
);
1279 var
->var_part
[i
].loc_chain
= NULL
;
1281 pool_free (dv_pool (var
->dv
), var
);
1284 /* The hash function for value_chains htab, computes the hash value
1288 value_chain_htab_hash (const void *x
)
1290 const_value_chain
const v
= (const_value_chain
) x
;
1292 return dv_htab_hash (v
->dv
);
1295 /* Compare the VALUE X with VALUE Y. */
1298 value_chain_htab_eq (const void *x
, const void *y
)
1300 const_value_chain
const v
= (const_value_chain
) x
;
1301 decl_or_value dv
= CONST_CAST2 (decl_or_value
, const void *, y
);
1303 return dv_as_opaque (v
->dv
) == dv_as_opaque (dv
);
1306 /* Initialize the set (array) SET of attrs to empty lists. */
1309 init_attrs_list_set (attrs
*set
)
1313 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1317 /* Make the list *LISTP empty. */
1320 attrs_list_clear (attrs
*listp
)
1324 for (list
= *listp
; list
; list
= next
)
1327 pool_free (attrs_pool
, list
);
1332 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */
1335 attrs_list_member (attrs list
, decl_or_value dv
, HOST_WIDE_INT offset
)
1337 for (; list
; list
= list
->next
)
1338 if (dv_as_opaque (list
->dv
) == dv_as_opaque (dv
) && list
->offset
== offset
)
1343 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
1346 attrs_list_insert (attrs
*listp
, decl_or_value dv
,
1347 HOST_WIDE_INT offset
, rtx loc
)
1351 list
= (attrs
) pool_alloc (attrs_pool
);
1354 list
->offset
= offset
;
1355 list
->next
= *listp
;
1359 /* Copy all nodes from SRC and create a list *DSTP of the copies. */
1362 attrs_list_copy (attrs
*dstp
, attrs src
)
1366 attrs_list_clear (dstp
);
1367 for (; src
; src
= src
->next
)
1369 n
= (attrs
) pool_alloc (attrs_pool
);
1372 n
->offset
= src
->offset
;
1378 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */
1381 attrs_list_union (attrs
*dstp
, attrs src
)
1383 for (; src
; src
= src
->next
)
1385 if (!attrs_list_member (*dstp
, src
->dv
, src
->offset
))
1386 attrs_list_insert (dstp
, src
->dv
, src
->offset
, src
->loc
);
1390 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1394 attrs_list_mpdv_union (attrs
*dstp
, attrs src
, attrs src2
)
1396 gcc_assert (!*dstp
);
1397 for (; src
; src
= src
->next
)
1399 if (!dv_onepart_p (src
->dv
))
1400 attrs_list_insert (dstp
, src
->dv
, src
->offset
, src
->loc
);
1402 for (src
= src2
; src
; src
= src
->next
)
1404 if (!dv_onepart_p (src
->dv
)
1405 && !attrs_list_member (*dstp
, src
->dv
, src
->offset
))
1406 attrs_list_insert (dstp
, src
->dv
, src
->offset
, src
->loc
);
1410 /* Shared hashtable support. */
1412 /* Return true if VARS is shared. */
1415 shared_hash_shared (shared_hash vars
)
1417 return vars
->refcount
> 1;
1420 /* Return the hash table for VARS. */
1422 static inline htab_t
1423 shared_hash_htab (shared_hash vars
)
1428 /* Return true if VAR is shared, or maybe because VARS is shared. */
1431 shared_var_p (variable var
, shared_hash vars
)
1433 /* Don't count an entry in the changed_variables table as a duplicate. */
1434 return ((var
->refcount
> 1 + (int) var
->in_changed_variables
)
1435 || shared_hash_shared (vars
));
1438 /* Copy variables into a new hash table. */
1441 shared_hash_unshare (shared_hash vars
)
1443 shared_hash new_vars
= (shared_hash
) pool_alloc (shared_hash_pool
);
1444 gcc_assert (vars
->refcount
> 1);
1445 new_vars
->refcount
= 1;
1447 = htab_create (htab_elements (vars
->htab
) + 3, variable_htab_hash
,
1448 variable_htab_eq
, variable_htab_free
);
1449 vars_copy (new_vars
->htab
, vars
->htab
);
1454 /* Increment reference counter on VARS and return it. */
1456 static inline shared_hash
1457 shared_hash_copy (shared_hash vars
)
1463 /* Decrement reference counter and destroy hash table if not shared
1467 shared_hash_destroy (shared_hash vars
)
1469 gcc_checking_assert (vars
->refcount
> 0);
1470 if (--vars
->refcount
== 0)
1472 htab_delete (vars
->htab
);
1473 pool_free (shared_hash_pool
, vars
);
1477 /* Unshare *PVARS if shared and return slot for DV. If INS is
1478 INSERT, insert it if not already present. */
1480 static inline void **
1481 shared_hash_find_slot_unshare_1 (shared_hash
*pvars
, decl_or_value dv
,
1482 hashval_t dvhash
, enum insert_option ins
)
1484 if (shared_hash_shared (*pvars
))
1485 *pvars
= shared_hash_unshare (*pvars
);
1486 return htab_find_slot_with_hash (shared_hash_htab (*pvars
), dv
, dvhash
, ins
);
1489 static inline void **
1490 shared_hash_find_slot_unshare (shared_hash
*pvars
, decl_or_value dv
,
1491 enum insert_option ins
)
1493 return shared_hash_find_slot_unshare_1 (pvars
, dv
, dv_htab_hash (dv
), ins
);
1496 /* Return slot for DV, if it is already present in the hash table.
1497 If it is not present, insert it only VARS is not shared, otherwise
1500 static inline void **
1501 shared_hash_find_slot_1 (shared_hash vars
, decl_or_value dv
, hashval_t dvhash
)
1503 return htab_find_slot_with_hash (shared_hash_htab (vars
), dv
, dvhash
,
1504 shared_hash_shared (vars
)
1505 ? NO_INSERT
: INSERT
);
1508 static inline void **
1509 shared_hash_find_slot (shared_hash vars
, decl_or_value dv
)
1511 return shared_hash_find_slot_1 (vars
, dv
, dv_htab_hash (dv
));
1514 /* Return slot for DV only if it is already present in the hash table. */
1516 static inline void **
1517 shared_hash_find_slot_noinsert_1 (shared_hash vars
, decl_or_value dv
,
1520 return htab_find_slot_with_hash (shared_hash_htab (vars
), dv
, dvhash
,
1524 static inline void **
1525 shared_hash_find_slot_noinsert (shared_hash vars
, decl_or_value dv
)
1527 return shared_hash_find_slot_noinsert_1 (vars
, dv
, dv_htab_hash (dv
));
1530 /* Return variable for DV or NULL if not already present in the hash
1533 static inline variable
1534 shared_hash_find_1 (shared_hash vars
, decl_or_value dv
, hashval_t dvhash
)
1536 return (variable
) htab_find_with_hash (shared_hash_htab (vars
), dv
, dvhash
);
1539 static inline variable
1540 shared_hash_find (shared_hash vars
, decl_or_value dv
)
1542 return shared_hash_find_1 (vars
, dv
, dv_htab_hash (dv
));
1545 /* Return true if TVAL is better than CVAL as a canonival value. We
1546 choose lowest-numbered VALUEs, using the RTX address as a
1547 tie-breaker. The idea is to arrange them into a star topology,
1548 such that all of them are at most one step away from the canonical
1549 value, and the canonical value has backlinks to all of them, in
1550 addition to all the actual locations. We don't enforce this
1551 topology throughout the entire dataflow analysis, though.
1555 canon_value_cmp (rtx tval
, rtx cval
)
1558 || CSELIB_VAL_PTR (tval
)->uid
< CSELIB_VAL_PTR (cval
)->uid
;
1561 static bool dst_can_be_shared
;
1563 /* Return a copy of a variable VAR and insert it to dataflow set SET. */
1566 unshare_variable (dataflow_set
*set
, void **slot
, variable var
,
1567 enum var_init_status initialized
)
1572 new_var
= (variable
) pool_alloc (dv_pool (var
->dv
));
1573 new_var
->dv
= var
->dv
;
1574 new_var
->refcount
= 1;
1576 new_var
->n_var_parts
= var
->n_var_parts
;
1577 new_var
->cur_loc_changed
= var
->cur_loc_changed
;
1578 var
->cur_loc_changed
= false;
1579 new_var
->in_changed_variables
= false;
1581 if (! flag_var_tracking_uninit
)
1582 initialized
= VAR_INIT_STATUS_INITIALIZED
;
1584 for (i
= 0; i
< var
->n_var_parts
; i
++)
1586 location_chain node
;
1587 location_chain
*nextp
;
1589 new_var
->var_part
[i
].offset
= var
->var_part
[i
].offset
;
1590 nextp
= &new_var
->var_part
[i
].loc_chain
;
1591 for (node
= var
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
1593 location_chain new_lc
;
1595 new_lc
= (location_chain
) pool_alloc (loc_chain_pool
);
1596 new_lc
->next
= NULL
;
1597 if (node
->init
> initialized
)
1598 new_lc
->init
= node
->init
;
1600 new_lc
->init
= initialized
;
1601 if (node
->set_src
&& !(MEM_P (node
->set_src
)))
1602 new_lc
->set_src
= node
->set_src
;
1604 new_lc
->set_src
= NULL
;
1605 new_lc
->loc
= node
->loc
;
1608 nextp
= &new_lc
->next
;
1611 new_var
->var_part
[i
].cur_loc
= var
->var_part
[i
].cur_loc
;
1614 dst_can_be_shared
= false;
1615 if (shared_hash_shared (set
->vars
))
1616 slot
= shared_hash_find_slot_unshare (&set
->vars
, var
->dv
, NO_INSERT
);
1617 else if (set
->traversed_vars
&& set
->vars
!= set
->traversed_vars
)
1618 slot
= shared_hash_find_slot_noinsert (set
->vars
, var
->dv
);
1620 if (var
->in_changed_variables
)
1623 = htab_find_slot_with_hash (changed_variables
, var
->dv
,
1624 dv_htab_hash (var
->dv
), NO_INSERT
);
1625 gcc_assert (*cslot
== (void *) var
);
1626 var
->in_changed_variables
= false;
1627 variable_htab_free (var
);
1629 new_var
->in_changed_variables
= true;
1634 /* Copy all variables from hash table SRC to hash table DST. */
1637 vars_copy (htab_t dst
, htab_t src
)
1642 FOR_EACH_HTAB_ELEMENT (src
, var
, variable
, hi
)
1646 dstp
= htab_find_slot_with_hash (dst
, var
->dv
,
1647 dv_htab_hash (var
->dv
),
1653 /* Map a decl to its main debug decl. */
1656 var_debug_decl (tree decl
)
1658 if (decl
&& DECL_P (decl
)
1659 && DECL_DEBUG_EXPR_IS_FROM (decl
))
1661 tree debugdecl
= DECL_DEBUG_EXPR (decl
);
1662 if (debugdecl
&& DECL_P (debugdecl
))
1669 /* Set the register LOC to contain DV, OFFSET. */
1672 var_reg_decl_set (dataflow_set
*set
, rtx loc
, enum var_init_status initialized
,
1673 decl_or_value dv
, HOST_WIDE_INT offset
, rtx set_src
,
1674 enum insert_option iopt
)
1677 bool decl_p
= dv_is_decl_p (dv
);
1680 dv
= dv_from_decl (var_debug_decl (dv_as_decl (dv
)));
1682 for (node
= set
->regs
[REGNO (loc
)]; node
; node
= node
->next
)
1683 if (dv_as_opaque (node
->dv
) == dv_as_opaque (dv
)
1684 && node
->offset
== offset
)
1687 attrs_list_insert (&set
->regs
[REGNO (loc
)], dv
, offset
, loc
);
1688 set_variable_part (set
, loc
, dv
, offset
, initialized
, set_src
, iopt
);
1691 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
1694 var_reg_set (dataflow_set
*set
, rtx loc
, enum var_init_status initialized
,
1697 tree decl
= REG_EXPR (loc
);
1698 HOST_WIDE_INT offset
= REG_OFFSET (loc
);
1700 var_reg_decl_set (set
, loc
, initialized
,
1701 dv_from_decl (decl
), offset
, set_src
, INSERT
);
1704 static enum var_init_status
1705 get_init_value (dataflow_set
*set
, rtx loc
, decl_or_value dv
)
1709 enum var_init_status ret_val
= VAR_INIT_STATUS_UNKNOWN
;
1711 if (! flag_var_tracking_uninit
)
1712 return VAR_INIT_STATUS_INITIALIZED
;
1714 var
= shared_hash_find (set
->vars
, dv
);
1717 for (i
= 0; i
< var
->n_var_parts
&& ret_val
== VAR_INIT_STATUS_UNKNOWN
; i
++)
1719 location_chain nextp
;
1720 for (nextp
= var
->var_part
[i
].loc_chain
; nextp
; nextp
= nextp
->next
)
1721 if (rtx_equal_p (nextp
->loc
, loc
))
1723 ret_val
= nextp
->init
;
1732 /* Delete current content of register LOC in dataflow set SET and set
1733 the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
1734 MODIFY is true, any other live copies of the same variable part are
1735 also deleted from the dataflow set, otherwise the variable part is
1736 assumed to be copied from another location holding the same
1740 var_reg_delete_and_set (dataflow_set
*set
, rtx loc
, bool modify
,
1741 enum var_init_status initialized
, rtx set_src
)
1743 tree decl
= REG_EXPR (loc
);
1744 HOST_WIDE_INT offset
= REG_OFFSET (loc
);
1748 decl
= var_debug_decl (decl
);
1750 if (initialized
== VAR_INIT_STATUS_UNKNOWN
)
1751 initialized
= get_init_value (set
, loc
, dv_from_decl (decl
));
1753 nextp
= &set
->regs
[REGNO (loc
)];
1754 for (node
= *nextp
; node
; node
= next
)
1757 if (dv_as_opaque (node
->dv
) != decl
|| node
->offset
!= offset
)
1759 delete_variable_part (set
, node
->loc
, node
->dv
, node
->offset
);
1760 pool_free (attrs_pool
, node
);
1766 nextp
= &node
->next
;
1770 clobber_variable_part (set
, loc
, dv_from_decl (decl
), offset
, set_src
);
1771 var_reg_set (set
, loc
, initialized
, set_src
);
1774 /* Delete the association of register LOC in dataflow set SET with any
1775 variables that aren't onepart. If CLOBBER is true, also delete any
1776 other live copies of the same variable part, and delete the
1777 association with onepart dvs too. */
1780 var_reg_delete (dataflow_set
*set
, rtx loc
, bool clobber
)
1782 attrs
*nextp
= &set
->regs
[REGNO (loc
)];
1787 tree decl
= REG_EXPR (loc
);
1788 HOST_WIDE_INT offset
= REG_OFFSET (loc
);
1790 decl
= var_debug_decl (decl
);
1792 clobber_variable_part (set
, NULL
, dv_from_decl (decl
), offset
, NULL
);
1795 for (node
= *nextp
; node
; node
= next
)
1798 if (clobber
|| !dv_onepart_p (node
->dv
))
1800 delete_variable_part (set
, node
->loc
, node
->dv
, node
->offset
);
1801 pool_free (attrs_pool
, node
);
1805 nextp
= &node
->next
;
1809 /* Delete content of register with number REGNO in dataflow set SET. */
1812 var_regno_delete (dataflow_set
*set
, int regno
)
1814 attrs
*reg
= &set
->regs
[regno
];
1817 for (node
= *reg
; node
; node
= next
)
1820 delete_variable_part (set
, node
->loc
, node
->dv
, node
->offset
);
1821 pool_free (attrs_pool
, node
);
1826 /* Set the location of DV, OFFSET as the MEM LOC. */
1829 var_mem_decl_set (dataflow_set
*set
, rtx loc
, enum var_init_status initialized
,
1830 decl_or_value dv
, HOST_WIDE_INT offset
, rtx set_src
,
1831 enum insert_option iopt
)
1833 if (dv_is_decl_p (dv
))
1834 dv
= dv_from_decl (var_debug_decl (dv_as_decl (dv
)));
1836 set_variable_part (set
, loc
, dv
, offset
, initialized
, set_src
, iopt
);
1839 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
1841 Adjust the address first if it is stack pointer based. */
1844 var_mem_set (dataflow_set
*set
, rtx loc
, enum var_init_status initialized
,
1847 tree decl
= MEM_EXPR (loc
);
1848 HOST_WIDE_INT offset
= INT_MEM_OFFSET (loc
);
1850 var_mem_decl_set (set
, loc
, initialized
,
1851 dv_from_decl (decl
), offset
, set_src
, INSERT
);
1854 /* Delete and set the location part of variable MEM_EXPR (LOC) in
1855 dataflow set SET to LOC. If MODIFY is true, any other live copies
1856 of the same variable part are also deleted from the dataflow set,
1857 otherwise the variable part is assumed to be copied from another
1858 location holding the same part.
1859 Adjust the address first if it is stack pointer based. */
1862 var_mem_delete_and_set (dataflow_set
*set
, rtx loc
, bool modify
,
1863 enum var_init_status initialized
, rtx set_src
)
1865 tree decl
= MEM_EXPR (loc
);
1866 HOST_WIDE_INT offset
= INT_MEM_OFFSET (loc
);
1868 decl
= var_debug_decl (decl
);
1870 if (initialized
== VAR_INIT_STATUS_UNKNOWN
)
1871 initialized
= get_init_value (set
, loc
, dv_from_decl (decl
));
1874 clobber_variable_part (set
, NULL
, dv_from_decl (decl
), offset
, set_src
);
1875 var_mem_set (set
, loc
, initialized
, set_src
);
1878 /* Delete the location part LOC from dataflow set SET. If CLOBBER is
1879 true, also delete any other live copies of the same variable part.
1880 Adjust the address first if it is stack pointer based. */
1883 var_mem_delete (dataflow_set
*set
, rtx loc
, bool clobber
)
1885 tree decl
= MEM_EXPR (loc
);
1886 HOST_WIDE_INT offset
= INT_MEM_OFFSET (loc
);
1888 decl
= var_debug_decl (decl
);
1890 clobber_variable_part (set
, NULL
, dv_from_decl (decl
), offset
, NULL
);
1891 delete_variable_part (set
, loc
, dv_from_decl (decl
), offset
);
1894 /* Bind a value to a location it was just stored in. If MODIFIED
1895 holds, assume the location was modified, detaching it from any
1896 values bound to it. */
1899 val_store (dataflow_set
*set
, rtx val
, rtx loc
, rtx insn
, bool modified
)
1901 cselib_val
*v
= CSELIB_VAL_PTR (val
);
1903 gcc_assert (cselib_preserved_value_p (v
));
1907 fprintf (dump_file
, "%i: ", INSN_UID (insn
));
1908 print_inline_rtx (dump_file
, val
, 0);
1909 fprintf (dump_file
, " stored in ");
1910 print_inline_rtx (dump_file
, loc
, 0);
1913 struct elt_loc_list
*l
;
1914 for (l
= v
->locs
; l
; l
= l
->next
)
1916 fprintf (dump_file
, "\n%i: ", INSN_UID (l
->setting_insn
));
1917 print_inline_rtx (dump_file
, l
->loc
, 0);
1920 fprintf (dump_file
, "\n");
1926 var_regno_delete (set
, REGNO (loc
));
1927 var_reg_decl_set (set
, loc
, VAR_INIT_STATUS_INITIALIZED
,
1928 dv_from_value (val
), 0, NULL_RTX
, INSERT
);
1930 else if (MEM_P (loc
))
1931 var_mem_decl_set (set
, loc
, VAR_INIT_STATUS_INITIALIZED
,
1932 dv_from_value (val
), 0, NULL_RTX
, INSERT
);
1934 set_variable_part (set
, loc
, dv_from_value (val
), 0,
1935 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
, INSERT
);
1938 /* Reset this node, detaching all its equivalences. Return the slot
1939 in the variable hash table that holds dv, if there is one. */
1942 val_reset (dataflow_set
*set
, decl_or_value dv
)
1944 variable var
= shared_hash_find (set
->vars
, dv
) ;
1945 location_chain node
;
1948 if (!var
|| !var
->n_var_parts
)
1951 gcc_assert (var
->n_var_parts
== 1);
1954 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
1955 if (GET_CODE (node
->loc
) == VALUE
1956 && canon_value_cmp (node
->loc
, cval
))
1959 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
1960 if (GET_CODE (node
->loc
) == VALUE
&& cval
!= node
->loc
)
1962 /* Redirect the equivalence link to the new canonical
1963 value, or simply remove it if it would point at
1966 set_variable_part (set
, cval
, dv_from_value (node
->loc
),
1967 0, node
->init
, node
->set_src
, NO_INSERT
);
1968 delete_variable_part (set
, dv_as_value (dv
),
1969 dv_from_value (node
->loc
), 0);
1974 decl_or_value cdv
= dv_from_value (cval
);
1976 /* Keep the remaining values connected, accummulating links
1977 in the canonical value. */
1978 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
1980 if (node
->loc
== cval
)
1982 else if (GET_CODE (node
->loc
) == REG
)
1983 var_reg_decl_set (set
, node
->loc
, node
->init
, cdv
, 0,
1984 node
->set_src
, NO_INSERT
);
1985 else if (GET_CODE (node
->loc
) == MEM
)
1986 var_mem_decl_set (set
, node
->loc
, node
->init
, cdv
, 0,
1987 node
->set_src
, NO_INSERT
);
1989 set_variable_part (set
, node
->loc
, cdv
, 0,
1990 node
->init
, node
->set_src
, NO_INSERT
);
1994 /* We remove this last, to make sure that the canonical value is not
1995 removed to the point of requiring reinsertion. */
1997 delete_variable_part (set
, dv_as_value (dv
), dv_from_value (cval
), 0);
1999 clobber_variable_part (set
, NULL
, dv
, 0, NULL
);
2001 /* ??? Should we make sure there aren't other available values or
2002 variables whose values involve this one other than by
2003 equivalence? E.g., at the very least we should reset MEMs, those
2004 shouldn't be too hard to find cselib-looking up the value as an
2005 address, then locating the resulting value in our own hash
2009 /* Find the values in a given location and map the val to another
2010 value, if it is unique, or add the location as one holding the
2014 val_resolve (dataflow_set
*set
, rtx val
, rtx loc
, rtx insn
)
2016 decl_or_value dv
= dv_from_value (val
);
2018 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2021 fprintf (dump_file
, "%i: ", INSN_UID (insn
));
2023 fprintf (dump_file
, "head: ");
2024 print_inline_rtx (dump_file
, val
, 0);
2025 fputs (" is at ", dump_file
);
2026 print_inline_rtx (dump_file
, loc
, 0);
2027 fputc ('\n', dump_file
);
2030 val_reset (set
, dv
);
2034 attrs node
, found
= NULL
;
2036 for (node
= set
->regs
[REGNO (loc
)]; node
; node
= node
->next
)
2037 if (dv_is_value_p (node
->dv
)
2038 && GET_MODE (dv_as_value (node
->dv
)) == GET_MODE (loc
))
2042 /* Map incoming equivalences. ??? Wouldn't it be nice if
2043 we just started sharing the location lists? Maybe a
2044 circular list ending at the value itself or some
2046 set_variable_part (set
, dv_as_value (node
->dv
),
2047 dv_from_value (val
), node
->offset
,
2048 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
, INSERT
);
2049 set_variable_part (set
, val
, node
->dv
, node
->offset
,
2050 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
, INSERT
);
2053 /* If we didn't find any equivalence, we need to remember that
2054 this value is held in the named register. */
2056 var_reg_decl_set (set
, loc
, VAR_INIT_STATUS_INITIALIZED
,
2057 dv_from_value (val
), 0, NULL_RTX
, INSERT
);
2059 else if (MEM_P (loc
))
2060 /* ??? Merge equivalent MEMs. */
2061 var_mem_decl_set (set
, loc
, VAR_INIT_STATUS_INITIALIZED
,
2062 dv_from_value (val
), 0, NULL_RTX
, INSERT
);
2064 /* ??? Merge equivalent expressions. */
2065 set_variable_part (set
, loc
, dv_from_value (val
), 0,
2066 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
, INSERT
);
2069 /* Initialize dataflow set SET to be empty.
2070 VARS_SIZE is the initial size of hash table VARS. */
2073 dataflow_set_init (dataflow_set
*set
)
2075 init_attrs_list_set (set
->regs
);
2076 set
->vars
= shared_hash_copy (empty_shared_hash
);
2077 set
->stack_adjust
= 0;
2078 set
->traversed_vars
= NULL
;
2081 /* Delete the contents of dataflow set SET. */
2084 dataflow_set_clear (dataflow_set
*set
)
2088 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2089 attrs_list_clear (&set
->regs
[i
]);
2091 shared_hash_destroy (set
->vars
);
2092 set
->vars
= shared_hash_copy (empty_shared_hash
);
2095 /* Copy the contents of dataflow set SRC to DST. */
2098 dataflow_set_copy (dataflow_set
*dst
, dataflow_set
*src
)
2102 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2103 attrs_list_copy (&dst
->regs
[i
], src
->regs
[i
]);
2105 shared_hash_destroy (dst
->vars
);
2106 dst
->vars
= shared_hash_copy (src
->vars
);
2107 dst
->stack_adjust
= src
->stack_adjust
;
2110 /* Information for merging lists of locations for a given offset of variable.
2112 struct variable_union_info
2114 /* Node of the location chain. */
2117 /* The sum of positions in the input chains. */
2120 /* The position in the chain of DST dataflow set. */
2124 /* Buffer for location list sorting and its allocated size. */
2125 static struct variable_union_info
*vui_vec
;
2126 static int vui_allocated
;
2128 /* Compare function for qsort, order the structures by POS element. */
2131 variable_union_info_cmp_pos (const void *n1
, const void *n2
)
2133 const struct variable_union_info
*const i1
=
2134 (const struct variable_union_info
*) n1
;
2135 const struct variable_union_info
*const i2
=
2136 ( const struct variable_union_info
*) n2
;
2138 if (i1
->pos
!= i2
->pos
)
2139 return i1
->pos
- i2
->pos
;
2141 return (i1
->pos_dst
- i2
->pos_dst
);
2144 /* Compute union of location parts of variable *SLOT and the same variable
2145 from hash table DATA. Compute "sorted" union of the location chains
2146 for common offsets, i.e. the locations of a variable part are sorted by
2147 a priority where the priority is the sum of the positions in the 2 chains
2148 (if a location is only in one list the position in the second list is
2149 defined to be larger than the length of the chains).
2150 When we are updating the location parts the newest location is in the
2151 beginning of the chain, so when we do the described "sorted" union
2152 we keep the newest locations in the beginning. */
2155 variable_union (variable src
, dataflow_set
*set
)
2161 dstp
= shared_hash_find_slot (set
->vars
, src
->dv
);
2162 if (!dstp
|| !*dstp
)
2166 dst_can_be_shared
= false;
2168 dstp
= shared_hash_find_slot_unshare (&set
->vars
, src
->dv
, INSERT
);
2172 /* Continue traversing the hash table. */
2176 dst
= (variable
) *dstp
;
2178 gcc_assert (src
->n_var_parts
);
2180 /* We can combine one-part variables very efficiently, because their
2181 entries are in canonical order. */
2182 if (dv_onepart_p (src
->dv
))
2184 location_chain
*nodep
, dnode
, snode
;
2186 gcc_assert (src
->n_var_parts
== 1
2187 && dst
->n_var_parts
== 1);
2189 snode
= src
->var_part
[0].loc_chain
;
2192 restart_onepart_unshared
:
2193 nodep
= &dst
->var_part
[0].loc_chain
;
2199 int r
= dnode
? loc_cmp (dnode
->loc
, snode
->loc
) : 1;
2203 location_chain nnode
;
2205 if (shared_var_p (dst
, set
->vars
))
2207 dstp
= unshare_variable (set
, dstp
, dst
,
2208 VAR_INIT_STATUS_INITIALIZED
);
2209 dst
= (variable
)*dstp
;
2210 goto restart_onepart_unshared
;
2213 *nodep
= nnode
= (location_chain
) pool_alloc (loc_chain_pool
);
2214 nnode
->loc
= snode
->loc
;
2215 nnode
->init
= snode
->init
;
2216 if (!snode
->set_src
|| MEM_P (snode
->set_src
))
2217 nnode
->set_src
= NULL
;
2219 nnode
->set_src
= snode
->set_src
;
2220 nnode
->next
= dnode
;
2224 gcc_checking_assert (rtx_equal_p (dnode
->loc
, snode
->loc
));
2227 snode
= snode
->next
;
2229 nodep
= &dnode
->next
;
2236 /* Count the number of location parts, result is K. */
2237 for (i
= 0, j
= 0, k
= 0;
2238 i
< src
->n_var_parts
&& j
< dst
->n_var_parts
; k
++)
2240 if (src
->var_part
[i
].offset
== dst
->var_part
[j
].offset
)
2245 else if (src
->var_part
[i
].offset
< dst
->var_part
[j
].offset
)
2250 k
+= src
->n_var_parts
- i
;
2251 k
+= dst
->n_var_parts
- j
;
2253 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
2254 thus there are at most MAX_VAR_PARTS different offsets. */
2255 gcc_assert (dv_onepart_p (dst
->dv
) ? k
== 1 : k
<= MAX_VAR_PARTS
);
2257 if (dst
->n_var_parts
!= k
&& shared_var_p (dst
, set
->vars
))
2259 dstp
= unshare_variable (set
, dstp
, dst
, VAR_INIT_STATUS_UNKNOWN
);
2260 dst
= (variable
)*dstp
;
2263 i
= src
->n_var_parts
- 1;
2264 j
= dst
->n_var_parts
- 1;
2265 dst
->n_var_parts
= k
;
2267 for (k
--; k
>= 0; k
--)
2269 location_chain node
, node2
;
2271 if (i
>= 0 && j
>= 0
2272 && src
->var_part
[i
].offset
== dst
->var_part
[j
].offset
)
2274 /* Compute the "sorted" union of the chains, i.e. the locations which
2275 are in both chains go first, they are sorted by the sum of
2276 positions in the chains. */
2279 struct variable_union_info
*vui
;
2281 /* If DST is shared compare the location chains.
2282 If they are different we will modify the chain in DST with
2283 high probability so make a copy of DST. */
2284 if (shared_var_p (dst
, set
->vars
))
2286 for (node
= src
->var_part
[i
].loc_chain
,
2287 node2
= dst
->var_part
[j
].loc_chain
; node
&& node2
;
2288 node
= node
->next
, node2
= node2
->next
)
2290 if (!((REG_P (node2
->loc
)
2291 && REG_P (node
->loc
)
2292 && REGNO (node2
->loc
) == REGNO (node
->loc
))
2293 || rtx_equal_p (node2
->loc
, node
->loc
)))
2295 if (node2
->init
< node
->init
)
2296 node2
->init
= node
->init
;
2302 dstp
= unshare_variable (set
, dstp
, dst
,
2303 VAR_INIT_STATUS_UNKNOWN
);
2304 dst
= (variable
)*dstp
;
2309 for (node
= src
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
2312 for (node
= dst
->var_part
[j
].loc_chain
; node
; node
= node
->next
)
2317 /* The most common case, much simpler, no qsort is needed. */
2318 location_chain dstnode
= dst
->var_part
[j
].loc_chain
;
2319 dst
->var_part
[k
].loc_chain
= dstnode
;
2320 dst
->var_part
[k
].offset
= dst
->var_part
[j
].offset
;
2322 for (node
= src
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
2323 if (!((REG_P (dstnode
->loc
)
2324 && REG_P (node
->loc
)
2325 && REGNO (dstnode
->loc
) == REGNO (node
->loc
))
2326 || rtx_equal_p (dstnode
->loc
, node
->loc
)))
2328 location_chain new_node
;
2330 /* Copy the location from SRC. */
2331 new_node
= (location_chain
) pool_alloc (loc_chain_pool
);
2332 new_node
->loc
= node
->loc
;
2333 new_node
->init
= node
->init
;
2334 if (!node
->set_src
|| MEM_P (node
->set_src
))
2335 new_node
->set_src
= NULL
;
2337 new_node
->set_src
= node
->set_src
;
2338 node2
->next
= new_node
;
2345 if (src_l
+ dst_l
> vui_allocated
)
2347 vui_allocated
= MAX (vui_allocated
* 2, src_l
+ dst_l
);
2348 vui_vec
= XRESIZEVEC (struct variable_union_info
, vui_vec
,
2353 /* Fill in the locations from DST. */
2354 for (node
= dst
->var_part
[j
].loc_chain
, jj
= 0; node
;
2355 node
= node
->next
, jj
++)
2358 vui
[jj
].pos_dst
= jj
;
2360 /* Pos plus value larger than a sum of 2 valid positions. */
2361 vui
[jj
].pos
= jj
+ src_l
+ dst_l
;
2364 /* Fill in the locations from SRC. */
2366 for (node
= src
->var_part
[i
].loc_chain
, ii
= 0; node
;
2367 node
= node
->next
, ii
++)
2369 /* Find location from NODE. */
2370 for (jj
= 0; jj
< dst_l
; jj
++)
2372 if ((REG_P (vui
[jj
].lc
->loc
)
2373 && REG_P (node
->loc
)
2374 && REGNO (vui
[jj
].lc
->loc
) == REGNO (node
->loc
))
2375 || rtx_equal_p (vui
[jj
].lc
->loc
, node
->loc
))
2377 vui
[jj
].pos
= jj
+ ii
;
2381 if (jj
>= dst_l
) /* The location has not been found. */
2383 location_chain new_node
;
2385 /* Copy the location from SRC. */
2386 new_node
= (location_chain
) pool_alloc (loc_chain_pool
);
2387 new_node
->loc
= node
->loc
;
2388 new_node
->init
= node
->init
;
2389 if (!node
->set_src
|| MEM_P (node
->set_src
))
2390 new_node
->set_src
= NULL
;
2392 new_node
->set_src
= node
->set_src
;
2393 vui
[n
].lc
= new_node
;
2394 vui
[n
].pos_dst
= src_l
+ dst_l
;
2395 vui
[n
].pos
= ii
+ src_l
+ dst_l
;
2402 /* Special case still very common case. For dst_l == 2
2403 all entries dst_l ... n-1 are sorted, with for i >= dst_l
2404 vui[i].pos == i + src_l + dst_l. */
2405 if (vui
[0].pos
> vui
[1].pos
)
2407 /* Order should be 1, 0, 2... */
2408 dst
->var_part
[k
].loc_chain
= vui
[1].lc
;
2409 vui
[1].lc
->next
= vui
[0].lc
;
2412 vui
[0].lc
->next
= vui
[2].lc
;
2413 vui
[n
- 1].lc
->next
= NULL
;
2416 vui
[0].lc
->next
= NULL
;
2421 dst
->var_part
[k
].loc_chain
= vui
[0].lc
;
2422 if (n
>= 3 && vui
[2].pos
< vui
[1].pos
)
2424 /* Order should be 0, 2, 1, 3... */
2425 vui
[0].lc
->next
= vui
[2].lc
;
2426 vui
[2].lc
->next
= vui
[1].lc
;
2429 vui
[1].lc
->next
= vui
[3].lc
;
2430 vui
[n
- 1].lc
->next
= NULL
;
2433 vui
[1].lc
->next
= NULL
;
2438 /* Order should be 0, 1, 2... */
2440 vui
[n
- 1].lc
->next
= NULL
;
2443 for (; ii
< n
; ii
++)
2444 vui
[ii
- 1].lc
->next
= vui
[ii
].lc
;
2448 qsort (vui
, n
, sizeof (struct variable_union_info
),
2449 variable_union_info_cmp_pos
);
2451 /* Reconnect the nodes in sorted order. */
2452 for (ii
= 1; ii
< n
; ii
++)
2453 vui
[ii
- 1].lc
->next
= vui
[ii
].lc
;
2454 vui
[n
- 1].lc
->next
= NULL
;
2455 dst
->var_part
[k
].loc_chain
= vui
[0].lc
;
2458 dst
->var_part
[k
].offset
= dst
->var_part
[j
].offset
;
2463 else if ((i
>= 0 && j
>= 0
2464 && src
->var_part
[i
].offset
< dst
->var_part
[j
].offset
)
2467 dst
->var_part
[k
] = dst
->var_part
[j
];
2470 else if ((i
>= 0 && j
>= 0
2471 && src
->var_part
[i
].offset
> dst
->var_part
[j
].offset
)
2474 location_chain
*nextp
;
2476 /* Copy the chain from SRC. */
2477 nextp
= &dst
->var_part
[k
].loc_chain
;
2478 for (node
= src
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
2480 location_chain new_lc
;
2482 new_lc
= (location_chain
) pool_alloc (loc_chain_pool
);
2483 new_lc
->next
= NULL
;
2484 new_lc
->init
= node
->init
;
2485 if (!node
->set_src
|| MEM_P (node
->set_src
))
2486 new_lc
->set_src
= NULL
;
2488 new_lc
->set_src
= node
->set_src
;
2489 new_lc
->loc
= node
->loc
;
2492 nextp
= &new_lc
->next
;
2495 dst
->var_part
[k
].offset
= src
->var_part
[i
].offset
;
2498 dst
->var_part
[k
].cur_loc
= NULL
;
2501 if (flag_var_tracking_uninit
)
2502 for (i
= 0; i
< src
->n_var_parts
&& i
< dst
->n_var_parts
; i
++)
2504 location_chain node
, node2
;
2505 for (node
= src
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
2506 for (node2
= dst
->var_part
[i
].loc_chain
; node2
; node2
= node2
->next
)
2507 if (rtx_equal_p (node
->loc
, node2
->loc
))
2509 if (node
->init
> node2
->init
)
2510 node2
->init
= node
->init
;
2514 /* Continue traversing the hash table. */
2518 /* Compute union of dataflow sets SRC and DST and store it to DST. */
2521 dataflow_set_union (dataflow_set
*dst
, dataflow_set
*src
)
2525 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2526 attrs_list_union (&dst
->regs
[i
], src
->regs
[i
]);
2528 if (dst
->vars
== empty_shared_hash
)
2530 shared_hash_destroy (dst
->vars
);
2531 dst
->vars
= shared_hash_copy (src
->vars
);
2538 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (src
->vars
), var
, variable
, hi
)
2539 variable_union (var
, dst
);
2543 /* Whether the value is currently being expanded. */
2544 #define VALUE_RECURSED_INTO(x) \
2545 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
2546 /* Whether the value is in changed_variables hash table. */
2547 #define VALUE_CHANGED(x) \
2548 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
2549 /* Whether the decl is in changed_variables hash table. */
2550 #define DECL_CHANGED(x) TREE_VISITED (x)
2552 /* Record that DV has been added into resp. removed from changed_variables
2556 set_dv_changed (decl_or_value dv
, bool newv
)
2558 if (dv_is_value_p (dv
))
2559 VALUE_CHANGED (dv_as_value (dv
)) = newv
;
2561 DECL_CHANGED (dv_as_decl (dv
)) = newv
;
2564 /* Return true if DV is present in changed_variables hash table. */
2567 dv_changed_p (decl_or_value dv
)
2569 return (dv_is_value_p (dv
)
2570 ? VALUE_CHANGED (dv_as_value (dv
))
2571 : DECL_CHANGED (dv_as_decl (dv
)));
2574 /* Return a location list node whose loc is rtx_equal to LOC, in the
2575 location list of a one-part variable or value VAR, or in that of
2576 any values recursively mentioned in the location lists. VARS must
2577 be in star-canonical form. */
2579 static location_chain
2580 find_loc_in_1pdv (rtx loc
, variable var
, htab_t vars
)
2582 location_chain node
;
2583 enum rtx_code loc_code
;
2588 gcc_checking_assert (dv_onepart_p (var
->dv
));
2590 if (!var
->n_var_parts
)
2593 gcc_checking_assert (var
->var_part
[0].offset
== 0);
2594 gcc_checking_assert (loc
!= dv_as_opaque (var
->dv
));
2596 loc_code
= GET_CODE (loc
);
2597 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
2602 if (GET_CODE (node
->loc
) != loc_code
)
2604 if (GET_CODE (node
->loc
) != VALUE
)
2607 else if (loc
== node
->loc
)
2609 else if (loc_code
!= VALUE
)
2611 if (rtx_equal_p (loc
, node
->loc
))
2616 /* Since we're in star-canonical form, we don't need to visit
2617 non-canonical nodes: one-part variables and non-canonical
2618 values would only point back to the canonical node. */
2619 if (dv_is_value_p (var
->dv
)
2620 && !canon_value_cmp (node
->loc
, dv_as_value (var
->dv
)))
2622 /* Skip all subsequent VALUEs. */
2623 while (node
->next
&& GET_CODE (node
->next
->loc
) == VALUE
)
2626 gcc_checking_assert (!canon_value_cmp (node
->loc
,
2627 dv_as_value (var
->dv
)));
2628 if (loc
== node
->loc
)
2634 gcc_checking_assert (node
== var
->var_part
[0].loc_chain
);
2635 gcc_checking_assert (!node
->next
);
2637 dv
= dv_from_value (node
->loc
);
2638 rvar
= (variable
) htab_find_with_hash (vars
, dv
, dv_htab_hash (dv
));
2639 return find_loc_in_1pdv (loc
, rvar
, vars
);
2645 /* Hash table iteration argument passed to variable_merge. */
2648 /* The set in which the merge is to be inserted. */
2650 /* The set that we're iterating in. */
2652 /* The set that may contain the other dv we are to merge with. */
2654 /* Number of onepart dvs in src. */
2655 int src_onepart_cnt
;
2658 /* Insert LOC in *DNODE, if it's not there yet. The list must be in
2659 loc_cmp order, and it is maintained as such. */
2662 insert_into_intersection (location_chain
*nodep
, rtx loc
,
2663 enum var_init_status status
)
2665 location_chain node
;
2668 for (node
= *nodep
; node
; nodep
= &node
->next
, node
= *nodep
)
2669 if ((r
= loc_cmp (node
->loc
, loc
)) == 0)
2671 node
->init
= MIN (node
->init
, status
);
2677 node
= (location_chain
) pool_alloc (loc_chain_pool
);
2680 node
->set_src
= NULL
;
2681 node
->init
= status
;
2682 node
->next
= *nodep
;
2686 /* Insert in DEST the intersection the locations present in both
2687 S1NODE and S2VAR, directly or indirectly. S1NODE is from a
2688 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
2692 intersect_loc_chains (rtx val
, location_chain
*dest
, struct dfset_merge
*dsm
,
2693 location_chain s1node
, variable s2var
)
2695 dataflow_set
*s1set
= dsm
->cur
;
2696 dataflow_set
*s2set
= dsm
->src
;
2697 location_chain found
;
2701 location_chain s2node
;
2703 gcc_checking_assert (dv_onepart_p (s2var
->dv
));
2705 if (s2var
->n_var_parts
)
2707 gcc_checking_assert (s2var
->var_part
[0].offset
== 0);
2708 s2node
= s2var
->var_part
[0].loc_chain
;
2710 for (; s1node
&& s2node
;
2711 s1node
= s1node
->next
, s2node
= s2node
->next
)
2712 if (s1node
->loc
!= s2node
->loc
)
2714 else if (s1node
->loc
== val
)
2717 insert_into_intersection (dest
, s1node
->loc
,
2718 MIN (s1node
->init
, s2node
->init
));
2722 for (; s1node
; s1node
= s1node
->next
)
2724 if (s1node
->loc
== val
)
2727 if ((found
= find_loc_in_1pdv (s1node
->loc
, s2var
,
2728 shared_hash_htab (s2set
->vars
))))
2730 insert_into_intersection (dest
, s1node
->loc
,
2731 MIN (s1node
->init
, found
->init
));
2735 if (GET_CODE (s1node
->loc
) == VALUE
2736 && !VALUE_RECURSED_INTO (s1node
->loc
))
2738 decl_or_value dv
= dv_from_value (s1node
->loc
);
2739 variable svar
= shared_hash_find (s1set
->vars
, dv
);
2742 if (svar
->n_var_parts
== 1)
2744 VALUE_RECURSED_INTO (s1node
->loc
) = true;
2745 intersect_loc_chains (val
, dest
, dsm
,
2746 svar
->var_part
[0].loc_chain
,
2748 VALUE_RECURSED_INTO (s1node
->loc
) = false;
2753 /* ??? if the location is equivalent to any location in src,
2754 searched recursively
2756 add to dst the values needed to represent the equivalence
2758 telling whether locations S is equivalent to another dv's
2761 for each location D in the list
2763 if S and D satisfy rtx_equal_p, then it is present
2765 else if D is a value, recurse without cycles
2767 else if S and D have the same CODE and MODE
2769 for each operand oS and the corresponding oD
2771 if oS and oD are not equivalent, then S an D are not equivalent
2773 else if they are RTX vectors
2775 if any vector oS element is not equivalent to its respective oD,
2776 then S and D are not equivalent
2784 /* Return -1 if X should be before Y in a location list for a 1-part
2785 variable, 1 if Y should be before X, and 0 if they're equivalent
2786 and should not appear in the list. */
2789 loc_cmp (rtx x
, rtx y
)
2792 RTX_CODE code
= GET_CODE (x
);
2802 gcc_assert (GET_MODE (x
) == GET_MODE (y
));
2803 if (REGNO (x
) == REGNO (y
))
2805 else if (REGNO (x
) < REGNO (y
))
2818 gcc_assert (GET_MODE (x
) == GET_MODE (y
));
2819 return loc_cmp (XEXP (x
, 0), XEXP (y
, 0));
2825 if (GET_CODE (x
) == VALUE
)
2827 if (GET_CODE (y
) != VALUE
)
2829 /* Don't assert the modes are the same, that is true only
2830 when not recursing. (subreg:QI (value:SI 1:1) 0)
2831 and (subreg:QI (value:DI 2:2) 0) can be compared,
2832 even when the modes are different. */
2833 if (canon_value_cmp (x
, y
))
2839 if (GET_CODE (y
) == VALUE
)
2842 if (GET_CODE (x
) == GET_CODE (y
))
2843 /* Compare operands below. */;
2844 else if (GET_CODE (x
) < GET_CODE (y
))
2849 gcc_assert (GET_MODE (x
) == GET_MODE (y
));
2851 if (GET_CODE (x
) == DEBUG_EXPR
)
2853 if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x
))
2854 < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y
)))
2856 gcc_checking_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x
))
2857 > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y
)));
2861 fmt
= GET_RTX_FORMAT (code
);
2862 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
2866 if (XWINT (x
, i
) == XWINT (y
, i
))
2868 else if (XWINT (x
, i
) < XWINT (y
, i
))
2875 if (XINT (x
, i
) == XINT (y
, i
))
2877 else if (XINT (x
, i
) < XINT (y
, i
))
2884 /* Compare the vector length first. */
2885 if (XVECLEN (x
, i
) == XVECLEN (y
, i
))
2886 /* Compare the vectors elements. */;
2887 else if (XVECLEN (x
, i
) < XVECLEN (y
, i
))
2892 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2893 if ((r
= loc_cmp (XVECEXP (x
, i
, j
),
2894 XVECEXP (y
, i
, j
))))
2899 if ((r
= loc_cmp (XEXP (x
, i
), XEXP (y
, i
))))
2905 if (XSTR (x
, i
) == XSTR (y
, i
))
2911 if ((r
= strcmp (XSTR (x
, i
), XSTR (y
, i
))) == 0)
2919 /* These are just backpointers, so they don't matter. */
2926 /* It is believed that rtx's at this level will never
2927 contain anything but integers and other rtx's,
2928 except for within LABEL_REFs and SYMBOL_REFs. */
2936 /* If decl or value DVP refers to VALUE from *LOC, add backlinks
2937 from VALUE to DVP. */
2940 add_value_chain (rtx
*loc
, void *dvp
)
2942 decl_or_value dv
, ldv
;
2943 value_chain vc
, nvc
;
2946 if (GET_CODE (*loc
) == VALUE
)
2947 ldv
= dv_from_value (*loc
);
2948 else if (GET_CODE (*loc
) == DEBUG_EXPR
)
2949 ldv
= dv_from_decl (DEBUG_EXPR_TREE_DECL (*loc
));
2953 if (dv_as_opaque (ldv
) == dvp
)
2956 dv
= (decl_or_value
) dvp
;
2957 slot
= htab_find_slot_with_hash (value_chains
, ldv
, dv_htab_hash (ldv
),
2961 vc
= (value_chain
) pool_alloc (value_chain_pool
);
2965 *slot
= (void *) vc
;
2969 for (vc
= ((value_chain
) *slot
)->next
; vc
; vc
= vc
->next
)
2970 if (dv_as_opaque (vc
->dv
) == dv_as_opaque (dv
))
2978 vc
= (value_chain
) *slot
;
2979 nvc
= (value_chain
) pool_alloc (value_chain_pool
);
2981 nvc
->next
= vc
->next
;
2987 /* If decl or value DVP refers to VALUEs from within LOC, add backlinks
2988 from those VALUEs to DVP. */
2991 add_value_chains (decl_or_value dv
, rtx loc
)
2993 if (GET_CODE (loc
) == VALUE
|| GET_CODE (loc
) == DEBUG_EXPR
)
2995 add_value_chain (&loc
, dv_as_opaque (dv
));
3001 loc
= XEXP (loc
, 0);
3002 for_each_rtx (&loc
, add_value_chain
, dv_as_opaque (dv
));
3005 /* If CSELIB_VAL_PTR of value DV refer to VALUEs, add backlinks from those
3006 VALUEs to DV. Add the same time get rid of ASM_OPERANDS from locs list,
3007 that is something we never can express in .debug_info and can prevent
3008 reverse ops from being used. */
3011 add_cselib_value_chains (decl_or_value dv
)
3013 struct elt_loc_list
**l
;
3015 for (l
= &CSELIB_VAL_PTR (dv_as_value (dv
))->locs
; *l
;)
3016 if (GET_CODE ((*l
)->loc
) == ASM_OPERANDS
)
3020 for_each_rtx (&(*l
)->loc
, add_value_chain
, dv_as_opaque (dv
));
3025 /* If decl or value DVP refers to VALUE from *LOC, remove backlinks
3026 from VALUE to DVP. */
3029 remove_value_chain (rtx
*loc
, void *dvp
)
3031 decl_or_value dv
, ldv
;
3035 if (GET_CODE (*loc
) == VALUE
)
3036 ldv
= dv_from_value (*loc
);
3037 else if (GET_CODE (*loc
) == DEBUG_EXPR
)
3038 ldv
= dv_from_decl (DEBUG_EXPR_TREE_DECL (*loc
));
3042 if (dv_as_opaque (ldv
) == dvp
)
3045 dv
= (decl_or_value
) dvp
;
3046 slot
= htab_find_slot_with_hash (value_chains
, ldv
, dv_htab_hash (ldv
),
3048 for (vc
= (value_chain
) *slot
; vc
->next
; vc
= vc
->next
)
3049 if (dv_as_opaque (vc
->next
->dv
) == dv_as_opaque (dv
))
3051 value_chain dvc
= vc
->next
;
3052 gcc_assert (dvc
->refcount
> 0);
3053 if (--dvc
->refcount
== 0)
3055 vc
->next
= dvc
->next
;
3056 pool_free (value_chain_pool
, dvc
);
3057 if (vc
->next
== NULL
&& vc
== (value_chain
) *slot
)
3059 pool_free (value_chain_pool
, vc
);
3060 htab_clear_slot (value_chains
, slot
);
3068 /* If decl or value DVP refers to VALUEs from within LOC, remove backlinks
3069 from those VALUEs to DVP. */
3072 remove_value_chains (decl_or_value dv
, rtx loc
)
3074 if (GET_CODE (loc
) == VALUE
|| GET_CODE (loc
) == DEBUG_EXPR
)
3076 remove_value_chain (&loc
, dv_as_opaque (dv
));
3082 loc
= XEXP (loc
, 0);
3083 for_each_rtx (&loc
, remove_value_chain
, dv_as_opaque (dv
));
3087 /* If CSELIB_VAL_PTR of value DV refer to VALUEs, remove backlinks from those
3091 remove_cselib_value_chains (decl_or_value dv
)
3093 struct elt_loc_list
*l
;
3095 for (l
= CSELIB_VAL_PTR (dv_as_value (dv
))->locs
; l
; l
= l
->next
)
3096 for_each_rtx (&l
->loc
, remove_value_chain
, dv_as_opaque (dv
));
3099 /* Check the order of entries in one-part variables. */
3102 canonicalize_loc_order_check (void **slot
, void *data ATTRIBUTE_UNUSED
)
3104 variable var
= (variable
) *slot
;
3105 decl_or_value dv
= var
->dv
;
3106 location_chain node
, next
;
3108 #ifdef ENABLE_RTL_CHECKING
3110 for (i
= 0; i
< var
->n_var_parts
; i
++)
3111 gcc_assert (var
->var_part
[0].cur_loc
== NULL
);
3112 gcc_assert (!var
->cur_loc_changed
&& !var
->in_changed_variables
);
3115 if (!dv_onepart_p (dv
))
3118 gcc_assert (var
->n_var_parts
== 1);
3119 node
= var
->var_part
[0].loc_chain
;
3122 while ((next
= node
->next
))
3124 gcc_assert (loc_cmp (node
->loc
, next
->loc
) < 0);
3132 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
3133 more likely to be chosen as canonical for an equivalence set.
3134 Ensure less likely values can reach more likely neighbors, making
3135 the connections bidirectional. */
3138 canonicalize_values_mark (void **slot
, void *data
)
3140 dataflow_set
*set
= (dataflow_set
*)data
;
3141 variable var
= (variable
) *slot
;
3142 decl_or_value dv
= var
->dv
;
3144 location_chain node
;
3146 if (!dv_is_value_p (dv
))
3149 gcc_checking_assert (var
->n_var_parts
== 1);
3151 val
= dv_as_value (dv
);
3153 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
3154 if (GET_CODE (node
->loc
) == VALUE
)
3156 if (canon_value_cmp (node
->loc
, val
))
3157 VALUE_RECURSED_INTO (val
) = true;
3160 decl_or_value odv
= dv_from_value (node
->loc
);
3161 void **oslot
= shared_hash_find_slot_noinsert (set
->vars
, odv
);
3163 set_slot_part (set
, val
, oslot
, odv
, 0,
3164 node
->init
, NULL_RTX
);
3166 VALUE_RECURSED_INTO (node
->loc
) = true;
3173 /* Remove redundant entries from equivalence lists in onepart
3174 variables, canonicalizing equivalence sets into star shapes. */
3177 canonicalize_values_star (void **slot
, void *data
)
3179 dataflow_set
*set
= (dataflow_set
*)data
;
3180 variable var
= (variable
) *slot
;
3181 decl_or_value dv
= var
->dv
;
3182 location_chain node
;
3189 if (!dv_onepart_p (dv
))
3192 gcc_checking_assert (var
->n_var_parts
== 1);
3194 if (dv_is_value_p (dv
))
3196 cval
= dv_as_value (dv
);
3197 if (!VALUE_RECURSED_INTO (cval
))
3199 VALUE_RECURSED_INTO (cval
) = false;
3209 gcc_assert (var
->n_var_parts
== 1);
3211 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
3212 if (GET_CODE (node
->loc
) == VALUE
)
3215 if (VALUE_RECURSED_INTO (node
->loc
))
3217 if (canon_value_cmp (node
->loc
, cval
))
3226 if (!has_marks
|| dv_is_decl_p (dv
))
3229 /* Keep it marked so that we revisit it, either after visiting a
3230 child node, or after visiting a new parent that might be
3232 VALUE_RECURSED_INTO (val
) = true;
3234 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
3235 if (GET_CODE (node
->loc
) == VALUE
3236 && VALUE_RECURSED_INTO (node
->loc
))
3240 VALUE_RECURSED_INTO (cval
) = false;
3241 dv
= dv_from_value (cval
);
3242 slot
= shared_hash_find_slot_noinsert (set
->vars
, dv
);
3245 gcc_assert (dv_is_decl_p (var
->dv
));
3246 /* The canonical value was reset and dropped.
3248 clobber_variable_part (set
, NULL
, var
->dv
, 0, NULL
);
3251 var
= (variable
)*slot
;
3252 gcc_assert (dv_is_value_p (var
->dv
));
3253 if (var
->n_var_parts
== 0)
3255 gcc_assert (var
->n_var_parts
== 1);
3259 VALUE_RECURSED_INTO (val
) = false;
3264 /* Push values to the canonical one. */
3265 cdv
= dv_from_value (cval
);
3266 cslot
= shared_hash_find_slot_noinsert (set
->vars
, cdv
);
3268 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
3269 if (node
->loc
!= cval
)
3271 cslot
= set_slot_part (set
, node
->loc
, cslot
, cdv
, 0,
3272 node
->init
, NULL_RTX
);
3273 if (GET_CODE (node
->loc
) == VALUE
)
3275 decl_or_value ndv
= dv_from_value (node
->loc
);
3277 set_variable_part (set
, cval
, ndv
, 0, node
->init
, NULL_RTX
,
3280 if (canon_value_cmp (node
->loc
, val
))
3282 /* If it could have been a local minimum, it's not any more,
3283 since it's now neighbor to cval, so it may have to push
3284 to it. Conversely, if it wouldn't have prevailed over
3285 val, then whatever mark it has is fine: if it was to
3286 push, it will now push to a more canonical node, but if
3287 it wasn't, then it has already pushed any values it might
3289 VALUE_RECURSED_INTO (node
->loc
) = true;
3290 /* Make sure we visit node->loc by ensuring we cval is
3292 VALUE_RECURSED_INTO (cval
) = true;
3294 else if (!VALUE_RECURSED_INTO (node
->loc
))
3295 /* If we have no need to "recurse" into this node, it's
3296 already "canonicalized", so drop the link to the old
3298 clobber_variable_part (set
, cval
, ndv
, 0, NULL
);
3300 else if (GET_CODE (node
->loc
) == REG
)
3302 attrs list
= set
->regs
[REGNO (node
->loc
)], *listp
;
3304 /* Change an existing attribute referring to dv so that it
3305 refers to cdv, removing any duplicate this might
3306 introduce, and checking that no previous duplicates
3307 existed, all in a single pass. */
3311 if (list
->offset
== 0
3312 && (dv_as_opaque (list
->dv
) == dv_as_opaque (dv
)
3313 || dv_as_opaque (list
->dv
) == dv_as_opaque (cdv
)))
3320 if (dv_as_opaque (list
->dv
) == dv_as_opaque (dv
))
3323 for (listp
= &list
->next
; (list
= *listp
); listp
= &list
->next
)
3328 if (dv_as_opaque (list
->dv
) == dv_as_opaque (cdv
))
3330 *listp
= list
->next
;
3331 pool_free (attrs_pool
, list
);
3336 gcc_assert (dv_as_opaque (list
->dv
) != dv_as_opaque (dv
));
3339 else if (dv_as_opaque (list
->dv
) == dv_as_opaque (cdv
))
3341 for (listp
= &list
->next
; (list
= *listp
); listp
= &list
->next
)
3346 if (dv_as_opaque (list
->dv
) == dv_as_opaque (dv
))
3348 *listp
= list
->next
;
3349 pool_free (attrs_pool
, list
);
3354 gcc_assert (dv_as_opaque (list
->dv
) != dv_as_opaque (cdv
));
3363 if (list
->offset
== 0
3364 && (dv_as_opaque (list
->dv
) == dv_as_opaque (dv
)
3365 || dv_as_opaque (list
->dv
) == dv_as_opaque (cdv
)))
3375 set_slot_part (set
, val
, cslot
, cdv
, 0,
3376 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
);
3378 slot
= clobber_slot_part (set
, cval
, slot
, 0, NULL
);
3380 /* Variable may have been unshared. */
3381 var
= (variable
)*slot
;
3382 gcc_checking_assert (var
->n_var_parts
&& var
->var_part
[0].loc_chain
->loc
== cval
3383 && var
->var_part
[0].loc_chain
->next
== NULL
);
3385 if (VALUE_RECURSED_INTO (cval
))
3386 goto restart_with_cval
;
3391 /* Bind one-part variables to the canonical value in an equivalence
3392 set. Not doing this causes dataflow convergence failure in rare
3393 circumstances, see PR42873. Unfortunately we can't do this
3394 efficiently as part of canonicalize_values_star, since we may not
3395 have determined or even seen the canonical value of a set when we
3396 get to a variable that references another member of the set. */
3399 canonicalize_vars_star (void **slot
, void *data
)
3401 dataflow_set
*set
= (dataflow_set
*)data
;
3402 variable var
= (variable
) *slot
;
3403 decl_or_value dv
= var
->dv
;
3404 location_chain node
;
3409 location_chain cnode
;
3411 if (!dv_onepart_p (dv
) || dv_is_value_p (dv
))
3414 gcc_assert (var
->n_var_parts
== 1);
3416 node
= var
->var_part
[0].loc_chain
;
3418 if (GET_CODE (node
->loc
) != VALUE
)
3421 gcc_assert (!node
->next
);
3424 /* Push values to the canonical one. */
3425 cdv
= dv_from_value (cval
);
3426 cslot
= shared_hash_find_slot_noinsert (set
->vars
, cdv
);
3429 cvar
= (variable
)*cslot
;
3430 gcc_assert (cvar
->n_var_parts
== 1);
3432 cnode
= cvar
->var_part
[0].loc_chain
;
3434 /* CVAL is canonical if its value list contains non-VALUEs or VALUEs
3435 that are not “more canonical” than it. */
3436 if (GET_CODE (cnode
->loc
) != VALUE
3437 || !canon_value_cmp (cnode
->loc
, cval
))
3440 /* CVAL was found to be non-canonical. Change the variable to point
3441 to the canonical VALUE. */
3442 gcc_assert (!cnode
->next
);
3445 slot
= set_slot_part (set
, cval
, slot
, dv
, 0,
3446 node
->init
, node
->set_src
);
3447 clobber_slot_part (set
, cval
, slot
, 0, node
->set_src
);
3452 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
3453 corresponding entry in DSM->src. Multi-part variables are combined
3454 with variable_union, whereas onepart dvs are combined with
3458 variable_merge_over_cur (variable s1var
, struct dfset_merge
*dsm
)
3460 dataflow_set
*dst
= dsm
->dst
;
3462 variable s2var
, dvar
= NULL
;
3463 decl_or_value dv
= s1var
->dv
;
3464 bool onepart
= dv_onepart_p (dv
);
3467 location_chain node
, *nodep
;
3469 /* If the incoming onepart variable has an empty location list, then
3470 the intersection will be just as empty. For other variables,
3471 it's always union. */
3472 gcc_checking_assert (s1var
->n_var_parts
3473 && s1var
->var_part
[0].loc_chain
);
3476 return variable_union (s1var
, dst
);
3478 gcc_checking_assert (s1var
->n_var_parts
== 1
3479 && s1var
->var_part
[0].offset
== 0);
3481 dvhash
= dv_htab_hash (dv
);
3482 if (dv_is_value_p (dv
))
3483 val
= dv_as_value (dv
);
3487 s2var
= shared_hash_find_1 (dsm
->src
->vars
, dv
, dvhash
);
3490 dst_can_be_shared
= false;
3494 dsm
->src_onepart_cnt
--;
3495 gcc_assert (s2var
->var_part
[0].loc_chain
3496 && s2var
->n_var_parts
== 1
3497 && s2var
->var_part
[0].offset
== 0);
3499 dstslot
= shared_hash_find_slot_noinsert_1 (dst
->vars
, dv
, dvhash
);
3502 dvar
= (variable
)*dstslot
;
3503 gcc_assert (dvar
->refcount
== 1
3504 && dvar
->n_var_parts
== 1
3505 && dvar
->var_part
[0].offset
== 0);
3506 nodep
= &dvar
->var_part
[0].loc_chain
;
3514 if (!dstslot
&& !onepart_variable_different_p (s1var
, s2var
))
3516 dstslot
= shared_hash_find_slot_unshare_1 (&dst
->vars
, dv
,
3518 *dstslot
= dvar
= s2var
;
3523 dst_can_be_shared
= false;
3525 intersect_loc_chains (val
, nodep
, dsm
,
3526 s1var
->var_part
[0].loc_chain
, s2var
);
3532 dvar
= (variable
) pool_alloc (dv_pool (dv
));
3535 dvar
->n_var_parts
= 1;
3536 dvar
->cur_loc_changed
= false;
3537 dvar
->in_changed_variables
= false;
3538 dvar
->var_part
[0].offset
= 0;
3539 dvar
->var_part
[0].loc_chain
= node
;
3540 dvar
->var_part
[0].cur_loc
= NULL
;
3543 = shared_hash_find_slot_unshare_1 (&dst
->vars
, dv
, dvhash
,
3545 gcc_assert (!*dstslot
);
3553 nodep
= &dvar
->var_part
[0].loc_chain
;
3554 while ((node
= *nodep
))
3556 location_chain
*nextp
= &node
->next
;
3558 if (GET_CODE (node
->loc
) == REG
)
3562 for (list
= dst
->regs
[REGNO (node
->loc
)]; list
; list
= list
->next
)
3563 if (GET_MODE (node
->loc
) == GET_MODE (list
->loc
)
3564 && dv_is_value_p (list
->dv
))
3568 attrs_list_insert (&dst
->regs
[REGNO (node
->loc
)],
3570 /* If this value became canonical for another value that had
3571 this register, we want to leave it alone. */
3572 else if (dv_as_value (list
->dv
) != val
)
3574 dstslot
= set_slot_part (dst
, dv_as_value (list
->dv
),
3576 node
->init
, NULL_RTX
);
3577 dstslot
= delete_slot_part (dst
, node
->loc
, dstslot
, 0);
3579 /* Since nextp points into the removed node, we can't
3580 use it. The pointer to the next node moved to nodep.
3581 However, if the variable we're walking is unshared
3582 during our walk, we'll keep walking the location list
3583 of the previously-shared variable, in which case the
3584 node won't have been removed, and we'll want to skip
3585 it. That's why we test *nodep here. */
3591 /* Canonicalization puts registers first, so we don't have to
3597 if (dvar
!= (variable
)*dstslot
)
3598 dvar
= (variable
)*dstslot
;
3599 nodep
= &dvar
->var_part
[0].loc_chain
;
3603 /* Mark all referenced nodes for canonicalization, and make sure
3604 we have mutual equivalence links. */
3605 VALUE_RECURSED_INTO (val
) = true;
3606 for (node
= *nodep
; node
; node
= node
->next
)
3607 if (GET_CODE (node
->loc
) == VALUE
)
3609 VALUE_RECURSED_INTO (node
->loc
) = true;
3610 set_variable_part (dst
, val
, dv_from_value (node
->loc
), 0,
3611 node
->init
, NULL
, INSERT
);
3614 dstslot
= shared_hash_find_slot_noinsert_1 (dst
->vars
, dv
, dvhash
);
3615 gcc_assert (*dstslot
== dvar
);
3616 canonicalize_values_star (dstslot
, dst
);
3617 gcc_checking_assert (dstslot
3618 == shared_hash_find_slot_noinsert_1 (dst
->vars
,
3620 dvar
= (variable
)*dstslot
;
3624 bool has_value
= false, has_other
= false;
3626 /* If we have one value and anything else, we're going to
3627 canonicalize this, so make sure all values have an entry in
3628 the table and are marked for canonicalization. */
3629 for (node
= *nodep
; node
; node
= node
->next
)
3631 if (GET_CODE (node
->loc
) == VALUE
)
3633 /* If this was marked during register canonicalization,
3634 we know we have to canonicalize values. */
3649 if (has_value
&& has_other
)
3651 for (node
= *nodep
; node
; node
= node
->next
)
3653 if (GET_CODE (node
->loc
) == VALUE
)
3655 decl_or_value dv
= dv_from_value (node
->loc
);
3658 if (shared_hash_shared (dst
->vars
))
3659 slot
= shared_hash_find_slot_noinsert (dst
->vars
, dv
);
3661 slot
= shared_hash_find_slot_unshare (&dst
->vars
, dv
,
3665 variable var
= (variable
) pool_alloc (dv_pool (dv
));
3668 var
->n_var_parts
= 1;
3669 var
->cur_loc_changed
= false;
3670 var
->in_changed_variables
= false;
3671 var
->var_part
[0].offset
= 0;
3672 var
->var_part
[0].loc_chain
= NULL
;
3673 var
->var_part
[0].cur_loc
= NULL
;
3677 VALUE_RECURSED_INTO (node
->loc
) = true;
3681 dstslot
= shared_hash_find_slot_noinsert_1 (dst
->vars
, dv
, dvhash
);
3682 gcc_assert (*dstslot
== dvar
);
3683 canonicalize_values_star (dstslot
, dst
);
3684 gcc_checking_assert (dstslot
3685 == shared_hash_find_slot_noinsert_1 (dst
->vars
,
3687 dvar
= (variable
)*dstslot
;
3691 if (!onepart_variable_different_p (dvar
, s2var
))
3693 variable_htab_free (dvar
);
3694 *dstslot
= dvar
= s2var
;
3697 else if (s2var
!= s1var
&& !onepart_variable_different_p (dvar
, s1var
))
3699 variable_htab_free (dvar
);
3700 *dstslot
= dvar
= s1var
;
3702 dst_can_be_shared
= false;
3705 dst_can_be_shared
= false;
3710 /* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
3711 multi-part variable. Unions of multi-part variables and
3712 intersections of one-part ones will be handled in
3713 variable_merge_over_cur(). */
3716 variable_merge_over_src (variable s2var
, struct dfset_merge
*dsm
)
3718 dataflow_set
*dst
= dsm
->dst
;
3719 decl_or_value dv
= s2var
->dv
;
3720 bool onepart
= dv_onepart_p (dv
);
3724 void **dstp
= shared_hash_find_slot (dst
->vars
, dv
);
3730 dsm
->src_onepart_cnt
++;
3734 /* Combine dataflow set information from SRC2 into DST, using PDST
3735 to carry over information across passes. */
3738 dataflow_set_merge (dataflow_set
*dst
, dataflow_set
*src2
)
3740 dataflow_set cur
= *dst
;
3741 dataflow_set
*src1
= &cur
;
3742 struct dfset_merge dsm
;
3744 size_t src1_elems
, src2_elems
;
3748 src1_elems
= htab_elements (shared_hash_htab (src1
->vars
));
3749 src2_elems
= htab_elements (shared_hash_htab (src2
->vars
));
3750 dataflow_set_init (dst
);
3751 dst
->stack_adjust
= cur
.stack_adjust
;
3752 shared_hash_destroy (dst
->vars
);
3753 dst
->vars
= (shared_hash
) pool_alloc (shared_hash_pool
);
3754 dst
->vars
->refcount
= 1;
3756 = htab_create (MAX (src1_elems
, src2_elems
), variable_htab_hash
,
3757 variable_htab_eq
, variable_htab_free
);
3759 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3760 attrs_list_mpdv_union (&dst
->regs
[i
], src1
->regs
[i
], src2
->regs
[i
]);
3765 dsm
.src_onepart_cnt
= 0;
3767 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (dsm
.src
->vars
), var
, variable
, hi
)
3768 variable_merge_over_src (var
, &dsm
);
3769 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (dsm
.cur
->vars
), var
, variable
, hi
)
3770 variable_merge_over_cur (var
, &dsm
);
3772 if (dsm
.src_onepart_cnt
)
3773 dst_can_be_shared
= false;
3775 dataflow_set_destroy (src1
);
3778 /* Mark register equivalences. */
3781 dataflow_set_equiv_regs (dataflow_set
*set
)
3786 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3788 rtx canon
[NUM_MACHINE_MODES
];
3790 /* If the list is empty or one entry, no need to canonicalize
3792 if (set
->regs
[i
] == NULL
|| set
->regs
[i
]->next
== NULL
)
3795 memset (canon
, 0, sizeof (canon
));
3797 for (list
= set
->regs
[i
]; list
; list
= list
->next
)
3798 if (list
->offset
== 0 && dv_is_value_p (list
->dv
))
3800 rtx val
= dv_as_value (list
->dv
);
3801 rtx
*cvalp
= &canon
[(int)GET_MODE (val
)];
3804 if (canon_value_cmp (val
, cval
))
3808 for (list
= set
->regs
[i
]; list
; list
= list
->next
)
3809 if (list
->offset
== 0 && dv_onepart_p (list
->dv
))
3811 rtx cval
= canon
[(int)GET_MODE (list
->loc
)];
3816 if (dv_is_value_p (list
->dv
))
3818 rtx val
= dv_as_value (list
->dv
);
3823 VALUE_RECURSED_INTO (val
) = true;
3824 set_variable_part (set
, val
, dv_from_value (cval
), 0,
3825 VAR_INIT_STATUS_INITIALIZED
,
3829 VALUE_RECURSED_INTO (cval
) = true;
3830 set_variable_part (set
, cval
, list
->dv
, 0,
3831 VAR_INIT_STATUS_INITIALIZED
, NULL
, NO_INSERT
);
3834 for (listp
= &set
->regs
[i
]; (list
= *listp
);
3835 listp
= list
? &list
->next
: listp
)
3836 if (list
->offset
== 0 && dv_onepart_p (list
->dv
))
3838 rtx cval
= canon
[(int)GET_MODE (list
->loc
)];
3844 if (dv_is_value_p (list
->dv
))
3846 rtx val
= dv_as_value (list
->dv
);
3847 if (!VALUE_RECURSED_INTO (val
))
3851 slot
= shared_hash_find_slot_noinsert (set
->vars
, list
->dv
);
3852 canonicalize_values_star (slot
, set
);
3859 /* Remove any redundant values in the location list of VAR, which must
3860 be unshared and 1-part. */
3863 remove_duplicate_values (variable var
)
3865 location_chain node
, *nodep
;
3867 gcc_assert (dv_onepart_p (var
->dv
));
3868 gcc_assert (var
->n_var_parts
== 1);
3869 gcc_assert (var
->refcount
== 1);
3871 for (nodep
= &var
->var_part
[0].loc_chain
; (node
= *nodep
); )
3873 if (GET_CODE (node
->loc
) == VALUE
)
3875 if (VALUE_RECURSED_INTO (node
->loc
))
3877 /* Remove duplicate value node. */
3878 *nodep
= node
->next
;
3879 pool_free (loc_chain_pool
, node
);
3883 VALUE_RECURSED_INTO (node
->loc
) = true;
3885 nodep
= &node
->next
;
3888 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
3889 if (GET_CODE (node
->loc
) == VALUE
)
3891 gcc_assert (VALUE_RECURSED_INTO (node
->loc
));
3892 VALUE_RECURSED_INTO (node
->loc
) = false;
3897 /* Hash table iteration argument passed to variable_post_merge. */
3898 struct dfset_post_merge
3900 /* The new input set for the current block. */
3902 /* Pointer to the permanent input set for the current block, or
3904 dataflow_set
**permp
;
3907 /* Create values for incoming expressions associated with one-part
3908 variables that don't have value numbers for them. */
3911 variable_post_merge_new_vals (void **slot
, void *info
)
3913 struct dfset_post_merge
*dfpm
= (struct dfset_post_merge
*)info
;
3914 dataflow_set
*set
= dfpm
->set
;
3915 variable var
= (variable
)*slot
;
3916 location_chain node
;
3918 if (!dv_onepart_p (var
->dv
) || !var
->n_var_parts
)
3921 gcc_assert (var
->n_var_parts
== 1);
3923 if (dv_is_decl_p (var
->dv
))
3925 bool check_dupes
= false;
3928 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
3930 if (GET_CODE (node
->loc
) == VALUE
)
3931 gcc_assert (!VALUE_RECURSED_INTO (node
->loc
));
3932 else if (GET_CODE (node
->loc
) == REG
)
3934 attrs att
, *attp
, *curp
= NULL
;
3936 if (var
->refcount
!= 1)
3938 slot
= unshare_variable (set
, slot
, var
,
3939 VAR_INIT_STATUS_INITIALIZED
);
3940 var
= (variable
)*slot
;
3944 for (attp
= &set
->regs
[REGNO (node
->loc
)]; (att
= *attp
);
3946 if (att
->offset
== 0
3947 && GET_MODE (att
->loc
) == GET_MODE (node
->loc
))
3949 if (dv_is_value_p (att
->dv
))
3951 rtx cval
= dv_as_value (att
->dv
);
3956 else if (dv_as_opaque (att
->dv
) == dv_as_opaque (var
->dv
))
3964 if ((*curp
)->offset
== 0
3965 && GET_MODE ((*curp
)->loc
) == GET_MODE (node
->loc
)
3966 && dv_as_opaque ((*curp
)->dv
) == dv_as_opaque (var
->dv
))
3969 curp
= &(*curp
)->next
;
3980 *dfpm
->permp
= XNEW (dataflow_set
);
3981 dataflow_set_init (*dfpm
->permp
);
3984 for (att
= (*dfpm
->permp
)->regs
[REGNO (node
->loc
)];
3985 att
; att
= att
->next
)
3986 if (GET_MODE (att
->loc
) == GET_MODE (node
->loc
))
3988 gcc_assert (att
->offset
== 0
3989 && dv_is_value_p (att
->dv
));
3990 val_reset (set
, att
->dv
);
3997 cval
= dv_as_value (cdv
);
4001 /* Create a unique value to hold this register,
4002 that ought to be found and reused in
4003 subsequent rounds. */
4005 gcc_assert (!cselib_lookup (node
->loc
,
4006 GET_MODE (node
->loc
), 0,
4008 v
= cselib_lookup (node
->loc
, GET_MODE (node
->loc
), 1,
4010 cselib_preserve_value (v
);
4011 cselib_invalidate_rtx (node
->loc
);
4013 cdv
= dv_from_value (cval
);
4016 "Created new value %u:%u for reg %i\n",
4017 v
->uid
, v
->hash
, REGNO (node
->loc
));
4020 var_reg_decl_set (*dfpm
->permp
, node
->loc
,
4021 VAR_INIT_STATUS_INITIALIZED
,
4022 cdv
, 0, NULL
, INSERT
);
4028 /* Remove attribute referring to the decl, which now
4029 uses the value for the register, already existing or
4030 to be added when we bring perm in. */
4033 pool_free (attrs_pool
, att
);
4038 remove_duplicate_values (var
);
4044 /* Reset values in the permanent set that are not associated with the
4045 chosen expression. */
4048 variable_post_merge_perm_vals (void **pslot
, void *info
)
4050 struct dfset_post_merge
*dfpm
= (struct dfset_post_merge
*)info
;
4051 dataflow_set
*set
= dfpm
->set
;
4052 variable pvar
= (variable
)*pslot
, var
;
4053 location_chain pnode
;
4057 gcc_assert (dv_is_value_p (pvar
->dv
)
4058 && pvar
->n_var_parts
== 1);
4059 pnode
= pvar
->var_part
[0].loc_chain
;
4062 && REG_P (pnode
->loc
));
4066 var
= shared_hash_find (set
->vars
, dv
);
4069 /* Although variable_post_merge_new_vals may have made decls
4070 non-star-canonical, values that pre-existed in canonical form
4071 remain canonical, and newly-created values reference a single
4072 REG, so they are canonical as well. Since VAR has the
4073 location list for a VALUE, using find_loc_in_1pdv for it is
4074 fine, since VALUEs don't map back to DECLs. */
4075 if (find_loc_in_1pdv (pnode
->loc
, var
, shared_hash_htab (set
->vars
)))
4077 val_reset (set
, dv
);
4080 for (att
= set
->regs
[REGNO (pnode
->loc
)]; att
; att
= att
->next
)
4081 if (att
->offset
== 0
4082 && GET_MODE (att
->loc
) == GET_MODE (pnode
->loc
)
4083 && dv_is_value_p (att
->dv
))
4086 /* If there is a value associated with this register already, create
4088 if (att
&& dv_as_value (att
->dv
) != dv_as_value (dv
))
4090 rtx cval
= dv_as_value (att
->dv
);
4091 set_variable_part (set
, cval
, dv
, 0, pnode
->init
, NULL
, INSERT
);
4092 set_variable_part (set
, dv_as_value (dv
), att
->dv
, 0, pnode
->init
,
4097 attrs_list_insert (&set
->regs
[REGNO (pnode
->loc
)],
4099 variable_union (pvar
, set
);
4105 /* Just checking stuff and registering register attributes for
4109 dataflow_post_merge_adjust (dataflow_set
*set
, dataflow_set
**permp
)
4111 struct dfset_post_merge dfpm
;
4116 htab_traverse (shared_hash_htab (set
->vars
), variable_post_merge_new_vals
,
4119 htab_traverse (shared_hash_htab ((*permp
)->vars
),
4120 variable_post_merge_perm_vals
, &dfpm
);
4121 htab_traverse (shared_hash_htab (set
->vars
), canonicalize_values_star
, set
);
4122 htab_traverse (shared_hash_htab (set
->vars
), canonicalize_vars_star
, set
);
4125 /* Return a node whose loc is a MEM that refers to EXPR in the
4126 location list of a one-part variable or value VAR, or in that of
4127 any values recursively mentioned in the location lists. */
4129 static location_chain
4130 find_mem_expr_in_1pdv (tree expr
, rtx val
, htab_t vars
)
4132 location_chain node
;
4135 location_chain where
= NULL
;
4140 gcc_assert (GET_CODE (val
) == VALUE
4141 && !VALUE_RECURSED_INTO (val
));
4143 dv
= dv_from_value (val
);
4144 var
= (variable
) htab_find_with_hash (vars
, dv
, dv_htab_hash (dv
));
4149 gcc_assert (dv_onepart_p (var
->dv
));
4151 if (!var
->n_var_parts
)
4154 gcc_assert (var
->var_part
[0].offset
== 0);
4156 VALUE_RECURSED_INTO (val
) = true;
4158 for (node
= var
->var_part
[0].loc_chain
; node
; node
= node
->next
)
4159 if (MEM_P (node
->loc
)
4160 && MEM_EXPR (node
->loc
) == expr
4161 && INT_MEM_OFFSET (node
->loc
) == 0)
4166 else if (GET_CODE (node
->loc
) == VALUE
4167 && !VALUE_RECURSED_INTO (node
->loc
)
4168 && (where
= find_mem_expr_in_1pdv (expr
, node
->loc
, vars
)))
4171 VALUE_RECURSED_INTO (val
) = false;
4176 /* Return TRUE if the value of MEM may vary across a call. */
4179 mem_dies_at_call (rtx mem
)
4181 tree expr
= MEM_EXPR (mem
);
4187 decl
= get_base_address (expr
);
4195 return (may_be_aliased (decl
)
4196 || (!TREE_READONLY (decl
) && is_global_var (decl
)));
4199 /* Remove all MEMs from the location list of a hash table entry for a
4200 one-part variable, except those whose MEM attributes map back to
4201 the variable itself, directly or within a VALUE. */
4204 dataflow_set_preserve_mem_locs (void **slot
, void *data
)
4206 dataflow_set
*set
= (dataflow_set
*) data
;
4207 variable var
= (variable
) *slot
;
4209 if (dv_is_decl_p (var
->dv
) && dv_onepart_p (var
->dv
))
4211 tree decl
= dv_as_decl (var
->dv
);
4212 location_chain loc
, *locp
;
4213 bool changed
= false;
4215 if (!var
->n_var_parts
)
4218 gcc_assert (var
->n_var_parts
== 1);
4220 if (shared_var_p (var
, set
->vars
))
4222 for (loc
= var
->var_part
[0].loc_chain
; loc
; loc
= loc
->next
)
4224 /* We want to remove dying MEMs that doesn't refer to DECL. */
4225 if (GET_CODE (loc
->loc
) == MEM
4226 && (MEM_EXPR (loc
->loc
) != decl
4227 || INT_MEM_OFFSET (loc
->loc
) != 0)
4228 && !mem_dies_at_call (loc
->loc
))
4230 /* We want to move here MEMs that do refer to DECL. */
4231 else if (GET_CODE (loc
->loc
) == VALUE
4232 && find_mem_expr_in_1pdv (decl
, loc
->loc
,
4233 shared_hash_htab (set
->vars
)))
4240 slot
= unshare_variable (set
, slot
, var
, VAR_INIT_STATUS_UNKNOWN
);
4241 var
= (variable
)*slot
;
4242 gcc_assert (var
->n_var_parts
== 1);
4245 for (locp
= &var
->var_part
[0].loc_chain
, loc
= *locp
;
4248 rtx old_loc
= loc
->loc
;
4249 if (GET_CODE (old_loc
) == VALUE
)
4251 location_chain mem_node
4252 = find_mem_expr_in_1pdv (decl
, loc
->loc
,
4253 shared_hash_htab (set
->vars
));
4255 /* ??? This picks up only one out of multiple MEMs that
4256 refer to the same variable. Do we ever need to be
4257 concerned about dealing with more than one, or, given
4258 that they should all map to the same variable
4259 location, their addresses will have been merged and
4260 they will be regarded as equivalent? */
4263 loc
->loc
= mem_node
->loc
;
4264 loc
->set_src
= mem_node
->set_src
;
4265 loc
->init
= MIN (loc
->init
, mem_node
->init
);
4269 if (GET_CODE (loc
->loc
) != MEM
4270 || (MEM_EXPR (loc
->loc
) == decl
4271 && INT_MEM_OFFSET (loc
->loc
) == 0)
4272 || !mem_dies_at_call (loc
->loc
))
4274 if (old_loc
!= loc
->loc
&& emit_notes
)
4276 if (old_loc
== var
->var_part
[0].cur_loc
)
4279 var
->var_part
[0].cur_loc
= NULL
;
4280 var
->cur_loc_changed
= true;
4282 add_value_chains (var
->dv
, loc
->loc
);
4283 remove_value_chains (var
->dv
, old_loc
);
4291 remove_value_chains (var
->dv
, old_loc
);
4292 if (old_loc
== var
->var_part
[0].cur_loc
)
4295 var
->var_part
[0].cur_loc
= NULL
;
4296 var
->cur_loc_changed
= true;
4300 pool_free (loc_chain_pool
, loc
);
4303 if (!var
->var_part
[0].loc_chain
)
4309 variable_was_changed (var
, set
);
4315 /* Remove all MEMs from the location list of a hash table entry for a
4319 dataflow_set_remove_mem_locs (void **slot
, void *data
)
4321 dataflow_set
*set
= (dataflow_set
*) data
;
4322 variable var
= (variable
) *slot
;
4324 if (dv_is_value_p (var
->dv
))
4326 location_chain loc
, *locp
;
4327 bool changed
= false;
4329 gcc_assert (var
->n_var_parts
== 1);
4331 if (shared_var_p (var
, set
->vars
))
4333 for (loc
= var
->var_part
[0].loc_chain
; loc
; loc
= loc
->next
)
4334 if (GET_CODE (loc
->loc
) == MEM
4335 && mem_dies_at_call (loc
->loc
))
4341 slot
= unshare_variable (set
, slot
, var
, VAR_INIT_STATUS_UNKNOWN
);
4342 var
= (variable
)*slot
;
4343 gcc_assert (var
->n_var_parts
== 1);
4346 for (locp
= &var
->var_part
[0].loc_chain
, loc
= *locp
;
4349 if (GET_CODE (loc
->loc
) != MEM
4350 || !mem_dies_at_call (loc
->loc
))
4357 remove_value_chains (var
->dv
, loc
->loc
);
4359 /* If we have deleted the location which was last emitted
4360 we have to emit new location so add the variable to set
4361 of changed variables. */
4362 if (var
->var_part
[0].cur_loc
== loc
->loc
)
4365 var
->var_part
[0].cur_loc
= NULL
;
4366 var
->cur_loc_changed
= true;
4368 pool_free (loc_chain_pool
, loc
);
4371 if (!var
->var_part
[0].loc_chain
)
4377 variable_was_changed (var
, set
);
4383 /* Remove all variable-location information about call-clobbered
4384 registers, as well as associations between MEMs and VALUEs. */
4387 dataflow_set_clear_at_call (dataflow_set
*set
)
4391 for (r
= 0; r
< FIRST_PSEUDO_REGISTER
; r
++)
4392 if (TEST_HARD_REG_BIT (regs_invalidated_by_call
, r
))
4393 var_regno_delete (set
, r
);
4395 if (MAY_HAVE_DEBUG_INSNS
)
4397 set
->traversed_vars
= set
->vars
;
4398 htab_traverse (shared_hash_htab (set
->vars
),
4399 dataflow_set_preserve_mem_locs
, set
);
4400 set
->traversed_vars
= set
->vars
;
4401 htab_traverse (shared_hash_htab (set
->vars
), dataflow_set_remove_mem_locs
,
4403 set
->traversed_vars
= NULL
;
4408 variable_part_different_p (variable_part
*vp1
, variable_part
*vp2
)
4410 location_chain lc1
, lc2
;
4412 for (lc1
= vp1
->loc_chain
; lc1
; lc1
= lc1
->next
)
4414 for (lc2
= vp2
->loc_chain
; lc2
; lc2
= lc2
->next
)
4416 if (REG_P (lc1
->loc
) && REG_P (lc2
->loc
))
4418 if (REGNO (lc1
->loc
) == REGNO (lc2
->loc
))
4421 if (rtx_equal_p (lc1
->loc
, lc2
->loc
))
4430 /* Return true if one-part variables VAR1 and VAR2 are different.
4431 They must be in canonical order. */
4434 onepart_variable_different_p (variable var1
, variable var2
)
4436 location_chain lc1
, lc2
;
4441 gcc_assert (var1
->n_var_parts
== 1
4442 && var2
->n_var_parts
== 1);
4444 lc1
= var1
->var_part
[0].loc_chain
;
4445 lc2
= var2
->var_part
[0].loc_chain
;
4447 gcc_assert (lc1
&& lc2
);
4451 if (loc_cmp (lc1
->loc
, lc2
->loc
))
4460 /* Return true if variables VAR1 and VAR2 are different. */
4463 variable_different_p (variable var1
, variable var2
)
4470 if (var1
->n_var_parts
!= var2
->n_var_parts
)
4473 for (i
= 0; i
< var1
->n_var_parts
; i
++)
4475 if (var1
->var_part
[i
].offset
!= var2
->var_part
[i
].offset
)
4477 /* One-part values have locations in a canonical order. */
4478 if (i
== 0 && var1
->var_part
[i
].offset
== 0 && dv_onepart_p (var1
->dv
))
4480 gcc_assert (var1
->n_var_parts
== 1
4481 && dv_as_opaque (var1
->dv
) == dv_as_opaque (var2
->dv
));
4482 return onepart_variable_different_p (var1
, var2
);
4484 if (variable_part_different_p (&var1
->var_part
[i
], &var2
->var_part
[i
]))
4486 if (variable_part_different_p (&var2
->var_part
[i
], &var1
->var_part
[i
]))
4492 /* Return true if dataflow sets OLD_SET and NEW_SET differ. */
4495 dataflow_set_different (dataflow_set
*old_set
, dataflow_set
*new_set
)
4500 if (old_set
->vars
== new_set
->vars
)
4503 if (htab_elements (shared_hash_htab (old_set
->vars
))
4504 != htab_elements (shared_hash_htab (new_set
->vars
)))
4507 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (old_set
->vars
), var1
, variable
, hi
)
4509 htab_t htab
= shared_hash_htab (new_set
->vars
);
4510 variable var2
= (variable
) htab_find_with_hash (htab
, var1
->dv
,
4511 dv_htab_hash (var1
->dv
));
4514 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4516 fprintf (dump_file
, "dataflow difference found: removal of:\n");
4522 if (variable_different_p (var1
, var2
))
4524 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4526 fprintf (dump_file
, "dataflow difference found: "
4527 "old and new follow:\n");
4535 /* No need to traverse the second hashtab, if both have the same number
4536 of elements and the second one had all entries found in the first one,
4537 then it can't have any extra entries. */
4541 /* Free the contents of dataflow set SET. */
4544 dataflow_set_destroy (dataflow_set
*set
)
4548 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
4549 attrs_list_clear (&set
->regs
[i
]);
4551 shared_hash_destroy (set
->vars
);
4555 /* Return true if RTL X contains a SYMBOL_REF. */
4558 contains_symbol_ref (rtx x
)
4567 code
= GET_CODE (x
);
4568 if (code
== SYMBOL_REF
)
4571 fmt
= GET_RTX_FORMAT (code
);
4572 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
4576 if (contains_symbol_ref (XEXP (x
, i
)))
4579 else if (fmt
[i
] == 'E')
4582 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
4583 if (contains_symbol_ref (XVECEXP (x
, i
, j
)))
4591 /* Shall EXPR be tracked? */
4594 track_expr_p (tree expr
, bool need_rtl
)
4599 if (TREE_CODE (expr
) == DEBUG_EXPR_DECL
)
4600 return DECL_RTL_SET_P (expr
);
4602 /* If EXPR is not a parameter or a variable do not track it. */
4603 if (TREE_CODE (expr
) != VAR_DECL
&& TREE_CODE (expr
) != PARM_DECL
)
4606 /* It also must have a name... */
4607 if (!DECL_NAME (expr
) && need_rtl
)
4610 /* ... and a RTL assigned to it. */
4611 decl_rtl
= DECL_RTL_IF_SET (expr
);
4612 if (!decl_rtl
&& need_rtl
)
4615 /* If this expression is really a debug alias of some other declaration, we
4616 don't need to track this expression if the ultimate declaration is
4619 if (DECL_DEBUG_EXPR_IS_FROM (realdecl
))
4621 realdecl
= DECL_DEBUG_EXPR (realdecl
);
4622 if (realdecl
== NULL_TREE
)
4624 else if (!DECL_P (realdecl
))
4626 if (handled_component_p (realdecl
))
4628 HOST_WIDE_INT bitsize
, bitpos
, maxsize
;
4630 = get_ref_base_and_extent (realdecl
, &bitpos
, &bitsize
,
4632 if (!DECL_P (innerdecl
)
4633 || DECL_IGNORED_P (innerdecl
)
4634 || TREE_STATIC (innerdecl
)
4636 || bitpos
+ bitsize
> 256
4637 || bitsize
!= maxsize
)
4647 /* Do not track EXPR if REALDECL it should be ignored for debugging
4649 if (DECL_IGNORED_P (realdecl
))
4652 /* Do not track global variables until we are able to emit correct location
4654 if (TREE_STATIC (realdecl
))
4657 /* When the EXPR is a DECL for alias of some variable (see example)
4658 the TREE_STATIC flag is not used. Disable tracking all DECLs whose
4659 DECL_RTL contains SYMBOL_REF.
4662 extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
4665 if (decl_rtl
&& MEM_P (decl_rtl
)
4666 && contains_symbol_ref (XEXP (decl_rtl
, 0)))
4669 /* If RTX is a memory it should not be very large (because it would be
4670 an array or struct). */
4671 if (decl_rtl
&& MEM_P (decl_rtl
))
4673 /* Do not track structures and arrays. */
4674 if (GET_MODE (decl_rtl
) == BLKmode
4675 || AGGREGATE_TYPE_P (TREE_TYPE (realdecl
)))
4677 if (MEM_SIZE_KNOWN_P (decl_rtl
)
4678 && MEM_SIZE (decl_rtl
) > MAX_VAR_PARTS
)
4682 DECL_CHANGED (expr
) = 0;
4683 DECL_CHANGED (realdecl
) = 0;
4687 /* Determine whether a given LOC refers to the same variable part as
4691 same_variable_part_p (rtx loc
, tree expr
, HOST_WIDE_INT offset
)
4694 HOST_WIDE_INT offset2
;
4696 if (! DECL_P (expr
))
4701 expr2
= REG_EXPR (loc
);
4702 offset2
= REG_OFFSET (loc
);
4704 else if (MEM_P (loc
))
4706 expr2
= MEM_EXPR (loc
);
4707 offset2
= INT_MEM_OFFSET (loc
);
4712 if (! expr2
|| ! DECL_P (expr2
))
4715 expr
= var_debug_decl (expr
);
4716 expr2
= var_debug_decl (expr2
);
4718 return (expr
== expr2
&& offset
== offset2
);
4721 /* LOC is a REG or MEM that we would like to track if possible.
4722 If EXPR is null, we don't know what expression LOC refers to,
4723 otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
4724 LOC is an lvalue register.
4726 Return true if EXPR is nonnull and if LOC, or some lowpart of it,
4727 is something we can track. When returning true, store the mode of
4728 the lowpart we can track in *MODE_OUT (if nonnull) and its offset
4729 from EXPR in *OFFSET_OUT (if nonnull). */
4732 track_loc_p (rtx loc
, tree expr
, HOST_WIDE_INT offset
, bool store_reg_p
,
4733 enum machine_mode
*mode_out
, HOST_WIDE_INT
*offset_out
)
4735 enum machine_mode mode
;
4737 if (expr
== NULL
|| !track_expr_p (expr
, true))
4740 /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
4741 whole subreg, but only the old inner part is really relevant. */
4742 mode
= GET_MODE (loc
);
4743 if (REG_P (loc
) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc
)))
4745 enum machine_mode pseudo_mode
;
4747 pseudo_mode
= PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc
));
4748 if (GET_MODE_SIZE (mode
) > GET_MODE_SIZE (pseudo_mode
))
4750 offset
+= byte_lowpart_offset (pseudo_mode
, mode
);
4755 /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
4756 Do the same if we are storing to a register and EXPR occupies
4757 the whole of register LOC; in that case, the whole of EXPR is
4758 being changed. We exclude complex modes from the second case
4759 because the real and imaginary parts are represented as separate
4760 pseudo registers, even if the whole complex value fits into one
4762 if ((GET_MODE_SIZE (mode
) > GET_MODE_SIZE (DECL_MODE (expr
))
4764 && !COMPLEX_MODE_P (DECL_MODE (expr
))
4765 && hard_regno_nregs
[REGNO (loc
)][DECL_MODE (expr
)] == 1))
4766 && offset
+ byte_lowpart_offset (DECL_MODE (expr
), mode
) == 0)
4768 mode
= DECL_MODE (expr
);
4772 if (offset
< 0 || offset
>= MAX_VAR_PARTS
)
4778 *offset_out
= offset
;
4782 /* Return the MODE lowpart of LOC, or null if LOC is not something we
4783 want to track. When returning nonnull, make sure that the attributes
4784 on the returned value are updated. */
4787 var_lowpart (enum machine_mode mode
, rtx loc
)
4789 unsigned int offset
, reg_offset
, regno
;
4791 if (!REG_P (loc
) && !MEM_P (loc
))
4794 if (GET_MODE (loc
) == mode
)
4797 offset
= byte_lowpart_offset (mode
, GET_MODE (loc
));
4800 return adjust_address_nv (loc
, mode
, offset
);
4802 reg_offset
= subreg_lowpart_offset (mode
, GET_MODE (loc
));
4803 regno
= REGNO (loc
) + subreg_regno_offset (REGNO (loc
), GET_MODE (loc
),
4805 return gen_rtx_REG_offset (loc
, mode
, regno
, offset
);
4808 /* Carry information about uses and stores while walking rtx. */
4810 struct count_use_info
4812 /* The insn where the RTX is. */
4815 /* The basic block where insn is. */
4818 /* The array of n_sets sets in the insn, as determined by cselib. */
4819 struct cselib_set
*sets
;
4822 /* True if we're counting stores, false otherwise. */
4826 /* Find a VALUE corresponding to X. */
4828 static inline cselib_val
*
4829 find_use_val (rtx x
, enum machine_mode mode
, struct count_use_info
*cui
)
4835 /* This is called after uses are set up and before stores are
4836 processed by cselib, so it's safe to look up srcs, but not
4837 dsts. So we look up expressions that appear in srcs or in
4838 dest expressions, but we search the sets array for dests of
4842 /* Some targets represent memset and memcpy patterns
4843 by (set (mem:BLK ...) (reg:[QHSD]I ...)) or
4844 (set (mem:BLK ...) (const_int ...)) or
4845 (set (mem:BLK ...) (mem:BLK ...)). Don't return anything
4846 in that case, otherwise we end up with mode mismatches. */
4847 if (mode
== BLKmode
&& MEM_P (x
))
4849 for (i
= 0; i
< cui
->n_sets
; i
++)
4850 if (cui
->sets
[i
].dest
== x
)
4851 return cui
->sets
[i
].src_elt
;
4854 return cselib_lookup (x
, mode
, 0, VOIDmode
);
4860 /* Helper function to get mode of MEM's address. */
4862 static inline enum machine_mode
4863 get_address_mode (rtx mem
)
4865 enum machine_mode mode
= GET_MODE (XEXP (mem
, 0));
4866 if (mode
!= VOIDmode
)
4868 return targetm
.addr_space
.address_mode (MEM_ADDR_SPACE (mem
));
4871 /* Replace all registers and addresses in an expression with VALUE
4872 expressions that map back to them, unless the expression is a
4873 register. If no mapping is or can be performed, returns NULL. */
4876 replace_expr_with_values (rtx loc
)
4878 if (REG_P (loc
) || GET_CODE (loc
) == ENTRY_VALUE
)
4880 else if (MEM_P (loc
))
4882 cselib_val
*addr
= cselib_lookup (XEXP (loc
, 0),
4883 get_address_mode (loc
), 0,
4886 return replace_equiv_address_nv (loc
, addr
->val_rtx
);
4891 return cselib_subst_to_values (loc
, VOIDmode
);
4894 /* Determine what kind of micro operation to choose for a USE. Return
4895 MO_CLOBBER if no micro operation is to be generated. */
4897 static enum micro_operation_type
4898 use_type (rtx loc
, struct count_use_info
*cui
, enum machine_mode
*modep
)
4902 if (cui
&& cui
->sets
)
4904 if (GET_CODE (loc
) == VAR_LOCATION
)
4906 if (track_expr_p (PAT_VAR_LOCATION_DECL (loc
), false))
4908 rtx ploc
= PAT_VAR_LOCATION_LOC (loc
);
4909 if (! VAR_LOC_UNKNOWN_P (ploc
))
4911 cselib_val
*val
= cselib_lookup (ploc
, GET_MODE (loc
), 1,
4914 /* ??? flag_float_store and volatile mems are never
4915 given values, but we could in theory use them for
4917 gcc_assert (val
|| 1);
4925 if (REG_P (loc
) || MEM_P (loc
))
4928 *modep
= GET_MODE (loc
);
4932 || (find_use_val (loc
, GET_MODE (loc
), cui
)
4933 && cselib_lookup (XEXP (loc
, 0),
4934 get_address_mode (loc
), 0,
4940 cselib_val
*val
= find_use_val (loc
, GET_MODE (loc
), cui
);
4942 if (val
&& !cselib_preserved_value_p (val
))
4950 gcc_assert (REGNO (loc
) < FIRST_PSEUDO_REGISTER
);
4952 if (loc
== cfa_base_rtx
)
4954 expr
= REG_EXPR (loc
);
4957 return MO_USE_NO_VAR
;
4958 else if (target_for_debug_bind (var_debug_decl (expr
)))
4960 else if (track_loc_p (loc
, expr
, REG_OFFSET (loc
),
4961 false, modep
, NULL
))
4964 return MO_USE_NO_VAR
;
4966 else if (MEM_P (loc
))
4968 expr
= MEM_EXPR (loc
);
4972 else if (target_for_debug_bind (var_debug_decl (expr
)))
4974 else if (track_loc_p (loc
, expr
, INT_MEM_OFFSET (loc
),
4975 false, modep
, NULL
))
4984 /* Log to OUT information about micro-operation MOPT involving X in
4988 log_op_type (rtx x
, basic_block bb
, rtx insn
,
4989 enum micro_operation_type mopt
, FILE *out
)
4991 fprintf (out
, "bb %i op %i insn %i %s ",
4992 bb
->index
, VEC_length (micro_operation
, VTI (bb
)->mos
),
4993 INSN_UID (insn
), micro_operation_type_name
[mopt
]);
4994 print_inline_rtx (out
, x
, 2);
4998 /* Tell whether the CONCAT used to holds a VALUE and its location
4999 needs value resolution, i.e., an attempt of mapping the location
5000 back to other incoming values. */
5001 #define VAL_NEEDS_RESOLUTION(x) \
5002 (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
5003 /* Whether the location in the CONCAT is a tracked expression, that
5004 should also be handled like a MO_USE. */
5005 #define VAL_HOLDS_TRACK_EXPR(x) \
5006 (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
5007 /* Whether the location in the CONCAT should be handled like a MO_COPY
5009 #define VAL_EXPR_IS_COPIED(x) \
5010 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
5011 /* Whether the location in the CONCAT should be handled like a
5012 MO_CLOBBER as well. */
5013 #define VAL_EXPR_IS_CLOBBERED(x) \
5014 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
5015 /* Whether the location is a CONCAT of the MO_VAL_SET expression and
5016 a reverse operation that should be handled afterwards. */
5017 #define VAL_EXPR_HAS_REVERSE(x) \
5018 (RTL_FLAG_CHECK1 ("VAL_EXPR_HAS_REVERSE", (x), CONCAT)->return_val)
5020 /* All preserved VALUEs. */
5021 static VEC (rtx
, heap
) *preserved_values
;
5023 /* Registers used in the current function for passing parameters. */
5024 static HARD_REG_SET argument_reg_set
;
5026 /* Ensure VAL is preserved and remember it in a vector for vt_emit_notes. */
5029 preserve_value (cselib_val
*val
)
5031 cselib_preserve_value (val
);
5032 VEC_safe_push (rtx
, heap
, preserved_values
, val
->val_rtx
);
5035 /* Helper function for MO_VAL_LOC handling. Return non-zero if
5036 any rtxes not suitable for CONST use not replaced by VALUEs
5040 non_suitable_const (rtx
*x
, void *data ATTRIBUTE_UNUSED
)
5045 switch (GET_CODE (*x
))
5056 return !MEM_READONLY_P (*x
);
5062 /* Add uses (register and memory references) LOC which will be tracked
5063 to VTI (bb)->mos. INSN is instruction which the LOC is part of. */
5066 add_uses (rtx
*ploc
, void *data
)
5069 enum machine_mode mode
= VOIDmode
;
5070 struct count_use_info
*cui
= (struct count_use_info
*)data
;
5071 enum micro_operation_type type
= use_type (loc
, cui
, &mode
);
5073 if (type
!= MO_CLOBBER
)
5075 basic_block bb
= cui
->bb
;
5079 mo
.u
.loc
= type
== MO_USE
? var_lowpart (mode
, loc
) : loc
;
5080 mo
.insn
= cui
->insn
;
5082 if (type
== MO_VAL_LOC
)
5085 rtx vloc
= PAT_VAR_LOCATION_LOC (oloc
);
5088 gcc_assert (cui
->sets
);
5091 && !REG_P (XEXP (vloc
, 0))
5092 && !MEM_P (XEXP (vloc
, 0))
5093 && GET_CODE (XEXP (vloc
, 0)) != ENTRY_VALUE
5094 && (GET_CODE (XEXP (vloc
, 0)) != PLUS
5095 || XEXP (XEXP (vloc
, 0), 0) != cfa_base_rtx
5096 || !CONST_INT_P (XEXP (XEXP (vloc
, 0), 1))))
5099 enum machine_mode address_mode
= get_address_mode (mloc
);
5101 = cselib_lookup (XEXP (mloc
, 0), address_mode
, 0,
5104 if (val
&& !cselib_preserved_value_p (val
))
5106 micro_operation moa
;
5107 preserve_value (val
);
5108 mloc
= cselib_subst_to_values (XEXP (mloc
, 0),
5110 moa
.type
= MO_VAL_USE
;
5111 moa
.insn
= cui
->insn
;
5112 moa
.u
.loc
= gen_rtx_CONCAT (address_mode
,
5113 val
->val_rtx
, mloc
);
5114 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5115 log_op_type (moa
.u
.loc
, cui
->bb
, cui
->insn
,
5116 moa
.type
, dump_file
);
5117 VEC_safe_push (micro_operation
, heap
, VTI (bb
)->mos
, &moa
);
5121 if (CONSTANT_P (vloc
)
5122 && (GET_CODE (vloc
) != CONST
5123 || for_each_rtx (&vloc
, non_suitable_const
, NULL
)))
5124 /* For constants don't look up any value. */;
5125 else if (!VAR_LOC_UNKNOWN_P (vloc
)
5126 && (val
= find_use_val (vloc
, GET_MODE (oloc
), cui
)))
5128 enum machine_mode mode2
;
5129 enum micro_operation_type type2
;
5130 rtx nloc
= replace_expr_with_values (vloc
);
5134 oloc
= shallow_copy_rtx (oloc
);
5135 PAT_VAR_LOCATION_LOC (oloc
) = nloc
;
5138 oloc
= gen_rtx_CONCAT (mode
, val
->val_rtx
, oloc
);
5140 type2
= use_type (vloc
, 0, &mode2
);
5142 gcc_assert (type2
== MO_USE
|| type2
== MO_USE_NO_VAR
5143 || type2
== MO_CLOBBER
);
5145 if (type2
== MO_CLOBBER
5146 && !cselib_preserved_value_p (val
))
5148 VAL_NEEDS_RESOLUTION (oloc
) = 1;
5149 preserve_value (val
);
5152 else if (!VAR_LOC_UNKNOWN_P (vloc
))
5154 oloc
= shallow_copy_rtx (oloc
);
5155 PAT_VAR_LOCATION_LOC (oloc
) = gen_rtx_UNKNOWN_VAR_LOC ();
5160 else if (type
== MO_VAL_USE
)
5162 enum machine_mode mode2
= VOIDmode
;
5163 enum micro_operation_type type2
;
5164 cselib_val
*val
= find_use_val (loc
, GET_MODE (loc
), cui
);
5165 rtx vloc
, oloc
= loc
, nloc
;
5167 gcc_assert (cui
->sets
);
5170 && !REG_P (XEXP (oloc
, 0))
5171 && !MEM_P (XEXP (oloc
, 0))
5172 && GET_CODE (XEXP (oloc
, 0)) != ENTRY_VALUE
5173 && (GET_CODE (XEXP (oloc
, 0)) != PLUS
5174 || XEXP (XEXP (oloc
, 0), 0) != cfa_base_rtx
5175 || !CONST_INT_P (XEXP (XEXP (oloc
, 0), 1))))
5178 enum machine_mode address_mode
= get_address_mode (mloc
);
5180 = cselib_lookup (XEXP (mloc
, 0), address_mode
, 0,
5183 if (val
&& !cselib_preserved_value_p (val
))
5185 micro_operation moa
;
5186 preserve_value (val
);
5187 mloc
= cselib_subst_to_values (XEXP (mloc
, 0),
5189 moa
.type
= MO_VAL_USE
;
5190 moa
.insn
= cui
->insn
;
5191 moa
.u
.loc
= gen_rtx_CONCAT (address_mode
,
5192 val
->val_rtx
, mloc
);
5193 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5194 log_op_type (moa
.u
.loc
, cui
->bb
, cui
->insn
,
5195 moa
.type
, dump_file
);
5196 VEC_safe_push (micro_operation
, heap
, VTI (bb
)->mos
, &moa
);
5200 type2
= use_type (loc
, 0, &mode2
);
5202 gcc_assert (type2
== MO_USE
|| type2
== MO_USE_NO_VAR
5203 || type2
== MO_CLOBBER
);
5205 if (type2
== MO_USE
)
5206 vloc
= var_lowpart (mode2
, loc
);
5210 /* The loc of a MO_VAL_USE may have two forms:
5212 (concat val src): val is at src, a value-based
5215 (concat (concat val use) src): same as above, with use as
5216 the MO_USE tracked value, if it differs from src.
5220 nloc
= replace_expr_with_values (loc
);
5225 oloc
= gen_rtx_CONCAT (mode2
, val
->val_rtx
, vloc
);
5227 oloc
= val
->val_rtx
;
5229 mo
.u
.loc
= gen_rtx_CONCAT (mode
, oloc
, nloc
);
5231 if (type2
== MO_USE
)
5232 VAL_HOLDS_TRACK_EXPR (mo
.u
.loc
) = 1;
5233 if (!cselib_preserved_value_p (val
))
5235 VAL_NEEDS_RESOLUTION (mo
.u
.loc
) = 1;
5236 preserve_value (val
);
5240 gcc_assert (type
== MO_USE
|| type
== MO_USE_NO_VAR
);
5242 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5243 log_op_type (mo
.u
.loc
, cui
->bb
, cui
->insn
, mo
.type
, dump_file
);
5244 VEC_safe_push (micro_operation
, heap
, VTI (bb
)->mos
, &mo
);
5250 /* Helper function for finding all uses of REG/MEM in X in insn INSN. */
5253 add_uses_1 (rtx
*x
, void *cui
)
5255 for_each_rtx (x
, add_uses
, cui
);
5258 #define EXPR_DEPTH (PARAM_VALUE (PARAM_MAX_VARTRACK_EXPR_DEPTH))
5260 /* Attempt to reverse the EXPR operation in the debug info. Say for
5261 reg1 = reg2 + 6 even when reg2 is no longer live we
5262 can express its value as VAL - 6. */
5265 reverse_op (rtx val
, const_rtx expr
)
5271 if (GET_CODE (expr
) != SET
)
5274 if (!REG_P (SET_DEST (expr
)) || GET_MODE (val
) != GET_MODE (SET_DEST (expr
)))
5277 src
= SET_SRC (expr
);
5278 switch (GET_CODE (src
))
5285 if (!REG_P (XEXP (src
, 0)))
5290 if (!REG_P (XEXP (src
, 0)) && !MEM_P (XEXP (src
, 0)))
5297 if (!SCALAR_INT_MODE_P (GET_MODE (src
)) || XEXP (src
, 0) == cfa_base_rtx
)
5300 v
= cselib_lookup (XEXP (src
, 0), GET_MODE (XEXP (src
, 0)), 0, VOIDmode
);
5301 if (!v
|| !cselib_preserved_value_p (v
))
5304 switch (GET_CODE (src
))
5308 if (GET_MODE (v
->val_rtx
) != GET_MODE (val
))
5310 ret
= gen_rtx_fmt_e (GET_CODE (src
), GET_MODE (val
), val
);
5314 ret
= gen_lowpart_SUBREG (GET_MODE (v
->val_rtx
), val
);
5326 if (GET_MODE (v
->val_rtx
) != GET_MODE (val
))
5328 arg
= XEXP (src
, 1);
5329 if (!CONST_INT_P (arg
) && GET_CODE (arg
) != SYMBOL_REF
)
5331 arg
= cselib_expand_value_rtx (arg
, scratch_regs
, 5);
5332 if (arg
== NULL_RTX
)
5334 if (!CONST_INT_P (arg
) && GET_CODE (arg
) != SYMBOL_REF
)
5337 ret
= simplify_gen_binary (code
, GET_MODE (val
), val
, arg
);
5339 /* Ensure ret isn't VALUE itself (which can happen e.g. for
5340 (plus (reg1) (reg2)) when reg2 is known to be 0), as that
5341 breaks a lot of routines during var-tracking. */
5342 ret
= gen_rtx_fmt_ee (PLUS
, GET_MODE (val
), val
, const0_rtx
);
5348 return gen_rtx_CONCAT (GET_MODE (v
->val_rtx
), v
->val_rtx
, ret
);
5351 /* Add stores (register and memory references) LOC which will be tracked
5352 to VTI (bb)->mos. EXPR is the RTL expression containing the store.
5353 CUIP->insn is instruction which the LOC is part of. */
5356 add_stores (rtx loc
, const_rtx expr
, void *cuip
)
5358 enum machine_mode mode
= VOIDmode
, mode2
;
5359 struct count_use_info
*cui
= (struct count_use_info
*)cuip
;
5360 basic_block bb
= cui
->bb
;
5362 rtx oloc
= loc
, nloc
, src
= NULL
;
5363 enum micro_operation_type type
= use_type (loc
, cui
, &mode
);
5364 bool track_p
= false;
5366 bool resolve
, preserve
;
5369 if (type
== MO_CLOBBER
)
5376 gcc_assert (loc
!= cfa_base_rtx
);
5377 if ((GET_CODE (expr
) == CLOBBER
&& type
!= MO_VAL_SET
)
5378 || !(track_p
= use_type (loc
, NULL
, &mode2
) == MO_USE
)
5379 || GET_CODE (expr
) == CLOBBER
)
5381 mo
.type
= MO_CLOBBER
;
5383 if (GET_CODE (expr
) == SET
5384 && SET_DEST (expr
) == loc
5385 && REGNO (loc
) < FIRST_PSEUDO_REGISTER
5386 && TEST_HARD_REG_BIT (argument_reg_set
, REGNO (loc
))
5387 && find_use_val (loc
, mode
, cui
)
5388 && GET_CODE (SET_SRC (expr
)) != ASM_OPERANDS
)
5390 gcc_checking_assert (type
== MO_VAL_SET
);
5391 mo
.u
.loc
= gen_rtx_SET (VOIDmode
, loc
, SET_SRC (expr
));
5396 if (GET_CODE (expr
) == SET
5397 && SET_DEST (expr
) == loc
5398 && GET_CODE (SET_SRC (expr
)) != ASM_OPERANDS
)
5399 src
= var_lowpart (mode2
, SET_SRC (expr
));
5400 loc
= var_lowpart (mode2
, loc
);
5409 rtx xexpr
= gen_rtx_SET (VOIDmode
, loc
, src
);
5410 if (same_variable_part_p (src
, REG_EXPR (loc
), REG_OFFSET (loc
)))
5417 mo
.insn
= cui
->insn
;
5419 else if (MEM_P (loc
)
5420 && ((track_p
= use_type (loc
, NULL
, &mode2
) == MO_USE
)
5423 if (MEM_P (loc
) && type
== MO_VAL_SET
5424 && !REG_P (XEXP (loc
, 0))
5425 && !MEM_P (XEXP (loc
, 0))
5426 && GET_CODE (XEXP (loc
, 0)) != ENTRY_VALUE
5427 && (GET_CODE (XEXP (loc
, 0)) != PLUS
5428 || XEXP (XEXP (loc
, 0), 0) != cfa_base_rtx
5429 || !CONST_INT_P (XEXP (XEXP (loc
, 0), 1))))
5432 enum machine_mode address_mode
= get_address_mode (mloc
);
5433 cselib_val
*val
= cselib_lookup (XEXP (mloc
, 0),
5437 if (val
&& !cselib_preserved_value_p (val
))
5439 preserve_value (val
);
5440 mo
.type
= MO_VAL_USE
;
5441 mloc
= cselib_subst_to_values (XEXP (mloc
, 0),
5443 mo
.u
.loc
= gen_rtx_CONCAT (address_mode
, val
->val_rtx
, mloc
);
5444 mo
.insn
= cui
->insn
;
5445 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5446 log_op_type (mo
.u
.loc
, cui
->bb
, cui
->insn
,
5447 mo
.type
, dump_file
);
5448 VEC_safe_push (micro_operation
, heap
, VTI (bb
)->mos
, &mo
);
5452 if (GET_CODE (expr
) == CLOBBER
|| !track_p
)
5454 mo
.type
= MO_CLOBBER
;
5455 mo
.u
.loc
= track_p
? var_lowpart (mode2
, loc
) : loc
;
5459 if (GET_CODE (expr
) == SET
5460 && SET_DEST (expr
) == loc
5461 && GET_CODE (SET_SRC (expr
)) != ASM_OPERANDS
)
5462 src
= var_lowpart (mode2
, SET_SRC (expr
));
5463 loc
= var_lowpart (mode2
, loc
);
5472 rtx xexpr
= gen_rtx_SET (VOIDmode
, loc
, src
);
5473 if (same_variable_part_p (SET_SRC (xexpr
),
5475 INT_MEM_OFFSET (loc
)))
5482 mo
.insn
= cui
->insn
;
5487 if (type
!= MO_VAL_SET
)
5488 goto log_and_return
;
5490 v
= find_use_val (oloc
, mode
, cui
);
5493 goto log_and_return
;
5495 resolve
= preserve
= !cselib_preserved_value_p (v
);
5497 nloc
= replace_expr_with_values (oloc
);
5501 if (GET_CODE (PATTERN (cui
->insn
)) == COND_EXEC
)
5503 cselib_val
*oval
= cselib_lookup (oloc
, GET_MODE (oloc
), 0, VOIDmode
);
5505 gcc_assert (oval
!= v
);
5506 gcc_assert (REG_P (oloc
) || MEM_P (oloc
));
5508 if (!cselib_preserved_value_p (oval
))
5510 micro_operation moa
;
5512 preserve_value (oval
);
5514 moa
.type
= MO_VAL_USE
;
5515 moa
.u
.loc
= gen_rtx_CONCAT (mode
, oval
->val_rtx
, oloc
);
5516 VAL_NEEDS_RESOLUTION (moa
.u
.loc
) = 1;
5517 moa
.insn
= cui
->insn
;
5519 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5520 log_op_type (moa
.u
.loc
, cui
->bb
, cui
->insn
,
5521 moa
.type
, dump_file
);
5522 VEC_safe_push (micro_operation
, heap
, VTI (bb
)->mos
, &moa
);
5527 else if (resolve
&& GET_CODE (mo
.u
.loc
) == SET
)
5529 nloc
= replace_expr_with_values (SET_SRC (expr
));
5531 /* Avoid the mode mismatch between oexpr and expr. */
5532 if (!nloc
&& mode
!= mode2
)
5534 nloc
= SET_SRC (expr
);
5535 gcc_assert (oloc
== SET_DEST (expr
));
5539 oloc
= gen_rtx_SET (GET_MODE (mo
.u
.loc
), oloc
, nloc
);
5542 if (oloc
== SET_DEST (mo
.u
.loc
))
5543 /* No point in duplicating. */
5545 if (!REG_P (SET_SRC (mo
.u
.loc
)))
5551 if (GET_CODE (mo
.u
.loc
) == SET
5552 && oloc
== SET_DEST (mo
.u
.loc
))
5553 /* No point in duplicating. */
5559 loc
= gen_rtx_CONCAT (mode
, v
->val_rtx
, oloc
);
5561 if (mo
.u
.loc
!= oloc
)
5562 loc
= gen_rtx_CONCAT (GET_MODE (mo
.u
.loc
), loc
, mo
.u
.loc
);
5564 /* The loc of a MO_VAL_SET may have various forms:
5566 (concat val dst): dst now holds val
5568 (concat val (set dst src)): dst now holds val, copied from src
5570 (concat (concat val dstv) dst): dst now holds val; dstv is dst
5571 after replacing mems and non-top-level regs with values.
5573 (concat (concat val dstv) (set dst src)): dst now holds val,
5574 copied from src. dstv is a value-based representation of dst, if
5575 it differs from dst. If resolution is needed, src is a REG, and
5576 its mode is the same as that of val.
5578 (concat (concat val (set dstv srcv)) (set dst src)): src
5579 copied to dst, holding val. dstv and srcv are value-based
5580 representations of dst and src, respectively.
5584 if (GET_CODE (PATTERN (cui
->insn
)) != COND_EXEC
)
5586 reverse
= reverse_op (v
->val_rtx
, expr
);
5589 loc
= gen_rtx_CONCAT (GET_MODE (mo
.u
.loc
), loc
, reverse
);
5590 VAL_EXPR_HAS_REVERSE (loc
) = 1;
5597 VAL_HOLDS_TRACK_EXPR (loc
) = 1;
5600 VAL_NEEDS_RESOLUTION (loc
) = resolve
;
5603 if (mo
.type
== MO_CLOBBER
)
5604 VAL_EXPR_IS_CLOBBERED (loc
) = 1;
5605 if (mo
.type
== MO_COPY
)
5606 VAL_EXPR_IS_COPIED (loc
) = 1;
5608 mo
.type
= MO_VAL_SET
;
5611 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5612 log_op_type (mo
.u
.loc
, cui
->bb
, cui
->insn
, mo
.type
, dump_file
);
5613 VEC_safe_push (micro_operation
, heap
, VTI (bb
)->mos
, &mo
);
5616 /* Arguments to the call. */
5617 static rtx call_arguments
;
5619 /* Compute call_arguments. */
5622 prepare_call_arguments (basic_block bb
, rtx insn
)
5625 rtx prev
, cur
, next
;
5626 rtx call
= PATTERN (insn
);
5627 rtx this_arg
= NULL_RTX
;
5628 tree type
= NULL_TREE
, t
, fndecl
= NULL_TREE
;
5629 tree obj_type_ref
= NULL_TREE
;
5630 CUMULATIVE_ARGS args_so_far_v
;
5631 cumulative_args_t args_so_far
;
5633 memset (&args_so_far_v
, 0, sizeof (args_so_far_v
));
5634 args_so_far
= pack_cumulative_args (&args_so_far_v
);
5635 if (GET_CODE (call
) == PARALLEL
)
5636 call
= XVECEXP (call
, 0, 0);
5637 if (GET_CODE (call
) == SET
)
5638 call
= SET_SRC (call
);
5639 if (GET_CODE (call
) == CALL
&& MEM_P (XEXP (call
, 0)))
5641 if (GET_CODE (XEXP (XEXP (call
, 0), 0)) == SYMBOL_REF
)
5643 rtx symbol
= XEXP (XEXP (call
, 0), 0);
5644 if (SYMBOL_REF_DECL (symbol
))
5645 fndecl
= SYMBOL_REF_DECL (symbol
);
5647 if (fndecl
== NULL_TREE
)
5648 fndecl
= MEM_EXPR (XEXP (call
, 0));
5650 && TREE_CODE (TREE_TYPE (fndecl
)) != FUNCTION_TYPE
5651 && TREE_CODE (TREE_TYPE (fndecl
)) != METHOD_TYPE
)
5653 if (fndecl
&& TYPE_ARG_TYPES (TREE_TYPE (fndecl
)))
5654 type
= TREE_TYPE (fndecl
);
5655 if (fndecl
&& TREE_CODE (fndecl
) != FUNCTION_DECL
)
5657 if (TREE_CODE (fndecl
) == INDIRECT_REF
5658 && TREE_CODE (TREE_OPERAND (fndecl
, 0)) == OBJ_TYPE_REF
)
5659 obj_type_ref
= TREE_OPERAND (fndecl
, 0);
5664 for (t
= TYPE_ARG_TYPES (type
); t
&& t
!= void_list_node
;
5666 if (TREE_CODE (TREE_VALUE (t
)) == REFERENCE_TYPE
5667 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_VALUE (t
))))
5669 if ((t
== NULL
|| t
== void_list_node
) && obj_type_ref
== NULL_TREE
)
5673 int nargs ATTRIBUTE_UNUSED
= list_length (TYPE_ARG_TYPES (type
));
5674 link
= CALL_INSN_FUNCTION_USAGE (insn
);
5675 #ifndef PCC_STATIC_STRUCT_RETURN
5676 if (aggregate_value_p (TREE_TYPE (type
), type
)
5677 && targetm
.calls
.struct_value_rtx (type
, 0) == 0)
5679 tree struct_addr
= build_pointer_type (TREE_TYPE (type
));
5680 enum machine_mode mode
= TYPE_MODE (struct_addr
);
5682 INIT_CUMULATIVE_ARGS (args_so_far_v
, type
, NULL_RTX
, fndecl
,
5684 reg
= targetm
.calls
.function_arg (args_so_far
, mode
,
5686 targetm
.calls
.function_arg_advance (args_so_far
, mode
,
5688 if (reg
== NULL_RTX
)
5690 for (; link
; link
= XEXP (link
, 1))
5691 if (GET_CODE (XEXP (link
, 0)) == USE
5692 && MEM_P (XEXP (XEXP (link
, 0), 0)))
5694 link
= XEXP (link
, 1);
5701 INIT_CUMULATIVE_ARGS (args_so_far_v
, type
, NULL_RTX
, fndecl
,
5703 if (obj_type_ref
&& TYPE_ARG_TYPES (type
) != void_list_node
)
5705 enum machine_mode mode
;
5706 t
= TYPE_ARG_TYPES (type
);
5707 mode
= TYPE_MODE (TREE_VALUE (t
));
5708 this_arg
= targetm
.calls
.function_arg (args_so_far
, mode
,
5709 TREE_VALUE (t
), true);
5710 if (this_arg
&& !REG_P (this_arg
))
5711 this_arg
= NULL_RTX
;
5712 else if (this_arg
== NULL_RTX
)
5714 for (; link
; link
= XEXP (link
, 1))
5715 if (GET_CODE (XEXP (link
, 0)) == USE
5716 && MEM_P (XEXP (XEXP (link
, 0), 0)))
5718 this_arg
= XEXP (XEXP (link
, 0), 0);
5726 t
= type
? TYPE_ARG_TYPES (type
) : NULL_TREE
;
5728 for (link
= CALL_INSN_FUNCTION_USAGE (insn
); link
; link
= XEXP (link
, 1))
5729 if (GET_CODE (XEXP (link
, 0)) == USE
)
5731 rtx item
= NULL_RTX
;
5732 x
= XEXP (XEXP (link
, 0), 0);
5735 cselib_val
*val
= cselib_lookup (x
, GET_MODE (x
), 0, VOIDmode
);
5736 if (val
&& cselib_preserved_value_p (val
))
5737 item
= gen_rtx_CONCAT (GET_MODE (x
), x
, val
->val_rtx
);
5738 else if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_INT
)
5740 enum machine_mode mode
= GET_MODE (x
);
5742 while ((mode
= GET_MODE_WIDER_MODE (mode
)) != VOIDmode
5743 && GET_MODE_BITSIZE (mode
) <= BITS_PER_WORD
)
5745 rtx reg
= simplify_subreg (mode
, x
, GET_MODE (x
), 0);
5747 if (reg
== NULL_RTX
|| !REG_P (reg
))
5749 val
= cselib_lookup (reg
, mode
, 0, VOIDmode
);
5750 if (val
&& cselib_preserved_value_p (val
))
5752 item
= gen_rtx_CONCAT (GET_MODE (x
), x
,
5753 lowpart_subreg (GET_MODE (x
),
5766 if (!frame_pointer_needed
)
5768 struct adjust_mem_data amd
;
5769 amd
.mem_mode
= VOIDmode
;
5770 amd
.stack_adjust
= -VTI (bb
)->out
.stack_adjust
;
5771 amd
.side_effects
= NULL_RTX
;
5773 mem
= simplify_replace_fn_rtx (mem
, NULL_RTX
, adjust_mems
,
5775 gcc_assert (amd
.side_effects
== NULL_RTX
);
5777 val
= cselib_lookup (mem
, GET_MODE (mem
), 0, VOIDmode
);
5778 if (val
&& cselib_preserved_value_p (val
))
5779 item
= gen_rtx_CONCAT (GET_MODE (x
), copy_rtx (x
), val
->val_rtx
);
5780 else if (GET_MODE_CLASS (GET_MODE (mem
)) != MODE_INT
)
5782 /* For non-integer stack argument see also if they weren't
5783 initialized by integers. */
5784 enum machine_mode imode
= int_mode_for_mode (GET_MODE (mem
));
5785 if (imode
!= GET_MODE (mem
) && imode
!= BLKmode
)
5787 val
= cselib_lookup (adjust_address_nv (mem
, imode
, 0),
5788 imode
, 0, VOIDmode
);
5789 if (val
&& cselib_preserved_value_p (val
))
5790 item
= gen_rtx_CONCAT (GET_MODE (x
), copy_rtx (x
),
5791 lowpart_subreg (GET_MODE (x
),
5798 call_arguments
= gen_rtx_EXPR_LIST (VOIDmode
, item
, call_arguments
);
5799 if (t
&& t
!= void_list_node
)
5801 tree argtype
= TREE_VALUE (t
);
5802 enum machine_mode mode
= TYPE_MODE (argtype
);
5804 if (pass_by_reference (&args_so_far_v
, mode
, argtype
, true))
5806 argtype
= build_pointer_type (argtype
);
5807 mode
= TYPE_MODE (argtype
);
5809 reg
= targetm
.calls
.function_arg (args_so_far
, mode
,
5811 if (TREE_CODE (argtype
) == REFERENCE_TYPE
5812 && INTEGRAL_TYPE_P (TREE_TYPE (argtype
))
5815 && GET_MODE (reg
) == mode
5816 && GET_MODE_CLASS (mode
) == MODE_INT
5818 && REGNO (x
) == REGNO (reg
)
5819 && GET_MODE (x
) == mode
5822 enum machine_mode indmode
5823 = TYPE_MODE (TREE_TYPE (argtype
));
5824 rtx mem
= gen_rtx_MEM (indmode
, x
);
5825 cselib_val
*val
= cselib_lookup (mem
, indmode
, 0, VOIDmode
);
5826 if (val
&& cselib_preserved_value_p (val
))
5828 item
= gen_rtx_CONCAT (indmode
, mem
, val
->val_rtx
);
5829 call_arguments
= gen_rtx_EXPR_LIST (VOIDmode
, item
,
5834 struct elt_loc_list
*l
;
5837 /* Try harder, when passing address of a constant
5838 pool integer it can be easily read back. */
5839 item
= XEXP (item
, 1);
5840 if (GET_CODE (item
) == SUBREG
)
5841 item
= SUBREG_REG (item
);
5842 gcc_assert (GET_CODE (item
) == VALUE
);
5843 val
= CSELIB_VAL_PTR (item
);
5844 for (l
= val
->locs
; l
; l
= l
->next
)
5845 if (GET_CODE (l
->loc
) == SYMBOL_REF
5846 && TREE_CONSTANT_POOL_ADDRESS_P (l
->loc
)
5847 && SYMBOL_REF_DECL (l
->loc
)
5848 && DECL_INITIAL (SYMBOL_REF_DECL (l
->loc
)))
5850 initial
= DECL_INITIAL (SYMBOL_REF_DECL (l
->loc
));
5851 if (host_integerp (initial
, 0))
5853 item
= GEN_INT (tree_low_cst (initial
, 0));
5854 item
= gen_rtx_CONCAT (indmode
, mem
, item
);
5856 = gen_rtx_EXPR_LIST (VOIDmode
, item
,
5863 targetm
.calls
.function_arg_advance (args_so_far
, mode
,
5869 /* Add debug arguments. */
5871 && TREE_CODE (fndecl
) == FUNCTION_DECL
5872 && DECL_HAS_DEBUG_ARGS_P (fndecl
))
5874 VEC(tree
, gc
) **debug_args
= decl_debug_args_lookup (fndecl
);
5879 for (ix
= 0; VEC_iterate (tree
, *debug_args
, ix
, param
); ix
+= 2)
5882 tree dtemp
= VEC_index (tree
, *debug_args
, ix
+ 1);
5883 enum machine_mode mode
= DECL_MODE (dtemp
);
5884 item
= gen_rtx_DEBUG_PARAMETER_REF (mode
, param
);
5885 item
= gen_rtx_CONCAT (mode
, item
, DECL_RTL (dtemp
));
5886 call_arguments
= gen_rtx_EXPR_LIST (VOIDmode
, item
,
5892 /* Reverse call_arguments chain. */
5894 for (cur
= call_arguments
; cur
; cur
= next
)
5896 next
= XEXP (cur
, 1);
5897 XEXP (cur
, 1) = prev
;
5900 call_arguments
= prev
;
5903 if (GET_CODE (x
) == PARALLEL
)
5904 x
= XVECEXP (x
, 0, 0);
5905 if (GET_CODE (x
) == SET
)
5907 if (GET_CODE (x
) == CALL
&& MEM_P (XEXP (x
, 0)))
5909 x
= XEXP (XEXP (x
, 0), 0);
5910 if (GET_CODE (x
) == SYMBOL_REF
)
5911 /* Don't record anything. */;
5912 else if (CONSTANT_P (x
))
5914 x
= gen_rtx_CONCAT (GET_MODE (x
) == VOIDmode
? Pmode
: GET_MODE (x
),
5917 = gen_rtx_EXPR_LIST (VOIDmode
, x
, call_arguments
);
5921 cselib_val
*val
= cselib_lookup (x
, GET_MODE (x
), 0, VOIDmode
);
5922 if (val
&& cselib_preserved_value_p (val
))
5924 x
= gen_rtx_CONCAT (GET_MODE (x
), pc_rtx
, val
->val_rtx
);
5926 = gen_rtx_EXPR_LIST (VOIDmode
, x
, call_arguments
);
5932 enum machine_mode mode
5933 = TYPE_MODE (TREE_TYPE (OBJ_TYPE_REF_EXPR (obj_type_ref
)));
5934 rtx clobbered
= gen_rtx_MEM (mode
, this_arg
);
5936 = tree_low_cst (OBJ_TYPE_REF_TOKEN (obj_type_ref
), 0);
5938 clobbered
= plus_constant (clobbered
, token
* GET_MODE_SIZE (mode
));
5939 clobbered
= gen_rtx_MEM (mode
, clobbered
);
5940 x
= gen_rtx_CONCAT (mode
, gen_rtx_CLOBBER (VOIDmode
, pc_rtx
), clobbered
);
5942 = gen_rtx_EXPR_LIST (VOIDmode
, x
, call_arguments
);
5946 /* Callback for cselib_record_sets_hook, that records as micro
5947 operations uses and stores in an insn after cselib_record_sets has
5948 analyzed the sets in an insn, but before it modifies the stored
5949 values in the internal tables, unless cselib_record_sets doesn't
5950 call it directly (perhaps because we're not doing cselib in the
5951 first place, in which case sets and n_sets will be 0). */
5954 add_with_sets (rtx insn
, struct cselib_set
*sets
, int n_sets
)
5956 basic_block bb
= BLOCK_FOR_INSN (insn
);
5958 struct count_use_info cui
;
5959 micro_operation
*mos
;
5961 cselib_hook_called
= true;
5966 cui
.n_sets
= n_sets
;
5968 n1
= VEC_length (micro_operation
, VTI (bb
)->mos
);
5969 cui
.store_p
= false;
5970 note_uses (&PATTERN (insn
), add_uses_1
, &cui
);
5971 n2
= VEC_length (micro_operation
, VTI (bb
)->mos
) - 1;
5972 mos
= VEC_address (micro_operation
, VTI (bb
)->mos
);
5974 /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
5978 while (n1
< n2
&& mos
[n1
].type
== MO_USE
)
5980 while (n1
< n2
&& mos
[n2
].type
!= MO_USE
)
5992 n2
= VEC_length (micro_operation
, VTI (bb
)->mos
) - 1;
5995 while (n1
< n2
&& mos
[n1
].type
!= MO_VAL_LOC
)
5997 while (n1
< n2
&& mos
[n2
].type
== MO_VAL_LOC
)
6015 mo
.u
.loc
= call_arguments
;
6016 call_arguments
= NULL_RTX
;
6018 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6019 log_op_type (PATTERN (insn
), bb
, insn
, mo
.type
, dump_file
);
6020 VEC_safe_push (micro_operation
, heap
, VTI (bb
)->mos
, &mo
);
6023 n1
= VEC_length (micro_operation
, VTI (bb
)->mos
);
6024 /* This will record NEXT_INSN (insn), such that we can
6025 insert notes before it without worrying about any
6026 notes that MO_USEs might emit after the insn. */
6028 note_stores (PATTERN (insn
), add_stores
, &cui
);
6029 n2
= VEC_length (micro_operation
, VTI (bb
)->mos
) - 1;
6030 mos
= VEC_address (micro_operation
, VTI (bb
)->mos
);
6032 /* Order the MO_VAL_USEs first (note_stores does nothing
6033 on DEBUG_INSNs, so there are no MO_VAL_LOCs from this
6034 insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET. */
6037 while (n1
< n2
&& mos
[n1
].type
== MO_VAL_USE
)
6039 while (n1
< n2
&& mos
[n2
].type
!= MO_VAL_USE
)
6051 n2
= VEC_length (micro_operation
, VTI (bb
)->mos
) - 1;
6054 while (n1
< n2
&& mos
[n1
].type
== MO_CLOBBER
)
6056 while (n1
< n2
&& mos
[n2
].type
!= MO_CLOBBER
)
6069 static enum var_init_status
6070 find_src_status (dataflow_set
*in
, rtx src
)
6072 tree decl
= NULL_TREE
;
6073 enum var_init_status status
= VAR_INIT_STATUS_UNINITIALIZED
;
6075 if (! flag_var_tracking_uninit
)
6076 status
= VAR_INIT_STATUS_INITIALIZED
;
6078 if (src
&& REG_P (src
))
6079 decl
= var_debug_decl (REG_EXPR (src
));
6080 else if (src
&& MEM_P (src
))
6081 decl
= var_debug_decl (MEM_EXPR (src
));
6084 status
= get_init_value (in
, src
, dv_from_decl (decl
));
6089 /* SRC is the source of an assignment. Use SET to try to find what
6090 was ultimately assigned to SRC. Return that value if known,
6091 otherwise return SRC itself. */
6094 find_src_set_src (dataflow_set
*set
, rtx src
)
6096 tree decl
= NULL_TREE
; /* The variable being copied around. */
6097 rtx set_src
= NULL_RTX
; /* The value for "decl" stored in "src". */
6099 location_chain nextp
;
6103 if (src
&& REG_P (src
))
6104 decl
= var_debug_decl (REG_EXPR (src
));
6105 else if (src
&& MEM_P (src
))
6106 decl
= var_debug_decl (MEM_EXPR (src
));
6110 decl_or_value dv
= dv_from_decl (decl
);
6112 var
= shared_hash_find (set
->vars
, dv
);
6116 for (i
= 0; i
< var
->n_var_parts
&& !found
; i
++)
6117 for (nextp
= var
->var_part
[i
].loc_chain
; nextp
&& !found
;
6118 nextp
= nextp
->next
)
6119 if (rtx_equal_p (nextp
->loc
, src
))
6121 set_src
= nextp
->set_src
;
6131 /* Compute the changes of variable locations in the basic block BB. */
6134 compute_bb_dataflow (basic_block bb
)
6137 micro_operation
*mo
;
6139 dataflow_set old_out
;
6140 dataflow_set
*in
= &VTI (bb
)->in
;
6141 dataflow_set
*out
= &VTI (bb
)->out
;
6143 dataflow_set_init (&old_out
);
6144 dataflow_set_copy (&old_out
, out
);
6145 dataflow_set_copy (out
, in
);
6147 FOR_EACH_VEC_ELT (micro_operation
, VTI (bb
)->mos
, i
, mo
)
6149 rtx insn
= mo
->insn
;
6154 dataflow_set_clear_at_call (out
);
6159 rtx loc
= mo
->u
.loc
;
6162 var_reg_set (out
, loc
, VAR_INIT_STATUS_UNINITIALIZED
, NULL
);
6163 else if (MEM_P (loc
))
6164 var_mem_set (out
, loc
, VAR_INIT_STATUS_UNINITIALIZED
, NULL
);
6170 rtx loc
= mo
->u
.loc
;
6174 if (GET_CODE (loc
) == CONCAT
)
6176 val
= XEXP (loc
, 0);
6177 vloc
= XEXP (loc
, 1);
6185 var
= PAT_VAR_LOCATION_DECL (vloc
);
6187 clobber_variable_part (out
, NULL_RTX
,
6188 dv_from_decl (var
), 0, NULL_RTX
);
6191 if (VAL_NEEDS_RESOLUTION (loc
))
6192 val_resolve (out
, val
, PAT_VAR_LOCATION_LOC (vloc
), insn
);
6193 set_variable_part (out
, val
, dv_from_decl (var
), 0,
6194 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
,
6197 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc
)))
6198 set_variable_part (out
, PAT_VAR_LOCATION_LOC (vloc
),
6199 dv_from_decl (var
), 0,
6200 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
,
6207 rtx loc
= mo
->u
.loc
;
6208 rtx val
, vloc
, uloc
;
6210 vloc
= uloc
= XEXP (loc
, 1);
6211 val
= XEXP (loc
, 0);
6213 if (GET_CODE (val
) == CONCAT
)
6215 uloc
= XEXP (val
, 1);
6216 val
= XEXP (val
, 0);
6219 if (VAL_NEEDS_RESOLUTION (loc
))
6220 val_resolve (out
, val
, vloc
, insn
);
6222 val_store (out
, val
, uloc
, insn
, false);
6224 if (VAL_HOLDS_TRACK_EXPR (loc
))
6226 if (GET_CODE (uloc
) == REG
)
6227 var_reg_set (out
, uloc
, VAR_INIT_STATUS_UNINITIALIZED
,
6229 else if (GET_CODE (uloc
) == MEM
)
6230 var_mem_set (out
, uloc
, VAR_INIT_STATUS_UNINITIALIZED
,
6238 rtx loc
= mo
->u
.loc
;
6239 rtx val
, vloc
, uloc
, reverse
= NULL_RTX
;
6242 if (VAL_EXPR_HAS_REVERSE (loc
))
6244 reverse
= XEXP (loc
, 1);
6245 vloc
= XEXP (loc
, 0);
6247 uloc
= XEXP (vloc
, 1);
6248 val
= XEXP (vloc
, 0);
6251 if (GET_CODE (val
) == CONCAT
)
6253 vloc
= XEXP (val
, 1);
6254 val
= XEXP (val
, 0);
6257 if (GET_CODE (vloc
) == SET
)
6259 rtx vsrc
= SET_SRC (vloc
);
6261 gcc_assert (val
!= vsrc
);
6262 gcc_assert (vloc
== uloc
|| VAL_NEEDS_RESOLUTION (loc
));
6264 vloc
= SET_DEST (vloc
);
6266 if (VAL_NEEDS_RESOLUTION (loc
))
6267 val_resolve (out
, val
, vsrc
, insn
);
6269 else if (VAL_NEEDS_RESOLUTION (loc
))
6271 gcc_assert (GET_CODE (uloc
) == SET
6272 && GET_CODE (SET_SRC (uloc
)) == REG
);
6273 val_resolve (out
, val
, SET_SRC (uloc
), insn
);
6276 if (VAL_HOLDS_TRACK_EXPR (loc
))
6278 if (VAL_EXPR_IS_CLOBBERED (loc
))
6281 var_reg_delete (out
, uloc
, true);
6282 else if (MEM_P (uloc
))
6283 var_mem_delete (out
, uloc
, true);
6287 bool copied_p
= VAL_EXPR_IS_COPIED (loc
);
6289 enum var_init_status status
= VAR_INIT_STATUS_INITIALIZED
;
6291 if (GET_CODE (uloc
) == SET
)
6293 set_src
= SET_SRC (uloc
);
6294 uloc
= SET_DEST (uloc
);
6299 if (flag_var_tracking_uninit
)
6301 status
= find_src_status (in
, set_src
);
6303 if (status
== VAR_INIT_STATUS_UNKNOWN
)
6304 status
= find_src_status (out
, set_src
);
6307 set_src
= find_src_set_src (in
, set_src
);
6311 var_reg_delete_and_set (out
, uloc
, !copied_p
,
6313 else if (MEM_P (uloc
))
6314 var_mem_delete_and_set (out
, uloc
, !copied_p
,
6318 else if (REG_P (uloc
))
6319 var_regno_delete (out
, REGNO (uloc
));
6321 val_store (out
, val
, vloc
, insn
, true);
6324 val_store (out
, XEXP (reverse
, 0), XEXP (reverse
, 1),
6331 rtx loc
= mo
->u
.loc
;
6334 if (GET_CODE (loc
) == SET
)
6336 set_src
= SET_SRC (loc
);
6337 loc
= SET_DEST (loc
);
6341 var_reg_delete_and_set (out
, loc
, true, VAR_INIT_STATUS_INITIALIZED
,
6343 else if (MEM_P (loc
))
6344 var_mem_delete_and_set (out
, loc
, true, VAR_INIT_STATUS_INITIALIZED
,
6351 rtx loc
= mo
->u
.loc
;
6352 enum var_init_status src_status
;
6355 if (GET_CODE (loc
) == SET
)
6357 set_src
= SET_SRC (loc
);
6358 loc
= SET_DEST (loc
);
6361 if (! flag_var_tracking_uninit
)
6362 src_status
= VAR_INIT_STATUS_INITIALIZED
;
6365 src_status
= find_src_status (in
, set_src
);
6367 if (src_status
== VAR_INIT_STATUS_UNKNOWN
)
6368 src_status
= find_src_status (out
, set_src
);
6371 set_src
= find_src_set_src (in
, set_src
);
6374 var_reg_delete_and_set (out
, loc
, false, src_status
, set_src
);
6375 else if (MEM_P (loc
))
6376 var_mem_delete_and_set (out
, loc
, false, src_status
, set_src
);
6382 rtx loc
= mo
->u
.loc
;
6385 var_reg_delete (out
, loc
, false);
6386 else if (MEM_P (loc
))
6387 var_mem_delete (out
, loc
, false);
6393 rtx loc
= mo
->u
.loc
;
6396 var_reg_delete (out
, loc
, true);
6397 else if (MEM_P (loc
))
6398 var_mem_delete (out
, loc
, true);
6403 out
->stack_adjust
+= mo
->u
.adjust
;
6408 if (MAY_HAVE_DEBUG_INSNS
)
6410 dataflow_set_equiv_regs (out
);
6411 htab_traverse (shared_hash_htab (out
->vars
), canonicalize_values_mark
,
6413 htab_traverse (shared_hash_htab (out
->vars
), canonicalize_values_star
,
6416 htab_traverse (shared_hash_htab (out
->vars
),
6417 canonicalize_loc_order_check
, out
);
6420 changed
= dataflow_set_different (&old_out
, out
);
6421 dataflow_set_destroy (&old_out
);
6425 /* Find the locations of variables in the whole function. */
6428 vt_find_locations (void)
6430 fibheap_t worklist
, pending
, fibheap_swap
;
6431 sbitmap visited
, in_worklist
, in_pending
, sbitmap_swap
;
6438 int htabmax
= PARAM_VALUE (PARAM_MAX_VARTRACK_SIZE
);
6439 bool success
= true;
6441 timevar_push (TV_VAR_TRACKING_DATAFLOW
);
6442 /* Compute reverse completion order of depth first search of the CFG
6443 so that the data-flow runs faster. */
6444 rc_order
= XNEWVEC (int, n_basic_blocks
- NUM_FIXED_BLOCKS
);
6445 bb_order
= XNEWVEC (int, last_basic_block
);
6446 pre_and_rev_post_order_compute (NULL
, rc_order
, false);
6447 for (i
= 0; i
< n_basic_blocks
- NUM_FIXED_BLOCKS
; i
++)
6448 bb_order
[rc_order
[i
]] = i
;
6451 worklist
= fibheap_new ();
6452 pending
= fibheap_new ();
6453 visited
= sbitmap_alloc (last_basic_block
);
6454 in_worklist
= sbitmap_alloc (last_basic_block
);
6455 in_pending
= sbitmap_alloc (last_basic_block
);
6456 sbitmap_zero (in_worklist
);
6459 fibheap_insert (pending
, bb_order
[bb
->index
], bb
);
6460 sbitmap_ones (in_pending
);
6462 while (success
&& !fibheap_empty (pending
))
6464 fibheap_swap
= pending
;
6466 worklist
= fibheap_swap
;
6467 sbitmap_swap
= in_pending
;
6468 in_pending
= in_worklist
;
6469 in_worklist
= sbitmap_swap
;
6471 sbitmap_zero (visited
);
6473 while (!fibheap_empty (worklist
))
6475 bb
= (basic_block
) fibheap_extract_min (worklist
);
6476 RESET_BIT (in_worklist
, bb
->index
);
6477 gcc_assert (!TEST_BIT (visited
, bb
->index
));
6478 if (!TEST_BIT (visited
, bb
->index
))
6482 int oldinsz
, oldoutsz
;
6484 SET_BIT (visited
, bb
->index
);
6486 if (VTI (bb
)->in
.vars
)
6489 -= (htab_size (shared_hash_htab (VTI (bb
)->in
.vars
))
6490 + htab_size (shared_hash_htab (VTI (bb
)->out
.vars
)));
6492 = htab_elements (shared_hash_htab (VTI (bb
)->in
.vars
));
6494 = htab_elements (shared_hash_htab (VTI (bb
)->out
.vars
));
6497 oldinsz
= oldoutsz
= 0;
6499 if (MAY_HAVE_DEBUG_INSNS
)
6501 dataflow_set
*in
= &VTI (bb
)->in
, *first_out
= NULL
;
6502 bool first
= true, adjust
= false;
6504 /* Calculate the IN set as the intersection of
6505 predecessor OUT sets. */
6507 dataflow_set_clear (in
);
6508 dst_can_be_shared
= true;
6510 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
6511 if (!VTI (e
->src
)->flooded
)
6512 gcc_assert (bb_order
[bb
->index
]
6513 <= bb_order
[e
->src
->index
]);
6516 dataflow_set_copy (in
, &VTI (e
->src
)->out
);
6517 first_out
= &VTI (e
->src
)->out
;
6522 dataflow_set_merge (in
, &VTI (e
->src
)->out
);
6528 dataflow_post_merge_adjust (in
, &VTI (bb
)->permp
);
6530 /* Merge and merge_adjust should keep entries in
6532 htab_traverse (shared_hash_htab (in
->vars
),
6533 canonicalize_loc_order_check
,
6536 if (dst_can_be_shared
)
6538 shared_hash_destroy (in
->vars
);
6539 in
->vars
= shared_hash_copy (first_out
->vars
);
6543 VTI (bb
)->flooded
= true;
6547 /* Calculate the IN set as union of predecessor OUT sets. */
6548 dataflow_set_clear (&VTI (bb
)->in
);
6549 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
6550 dataflow_set_union (&VTI (bb
)->in
, &VTI (e
->src
)->out
);
6553 changed
= compute_bb_dataflow (bb
);
6554 htabsz
+= (htab_size (shared_hash_htab (VTI (bb
)->in
.vars
))
6555 + htab_size (shared_hash_htab (VTI (bb
)->out
.vars
)));
6557 if (htabmax
&& htabsz
> htabmax
)
6559 if (MAY_HAVE_DEBUG_INSNS
)
6560 inform (DECL_SOURCE_LOCATION (cfun
->decl
),
6561 "variable tracking size limit exceeded with "
6562 "-fvar-tracking-assignments, retrying without");
6564 inform (DECL_SOURCE_LOCATION (cfun
->decl
),
6565 "variable tracking size limit exceeded");
6572 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6574 if (e
->dest
== EXIT_BLOCK_PTR
)
6577 if (TEST_BIT (visited
, e
->dest
->index
))
6579 if (!TEST_BIT (in_pending
, e
->dest
->index
))
6581 /* Send E->DEST to next round. */
6582 SET_BIT (in_pending
, e
->dest
->index
);
6583 fibheap_insert (pending
,
6584 bb_order
[e
->dest
->index
],
6588 else if (!TEST_BIT (in_worklist
, e
->dest
->index
))
6590 /* Add E->DEST to current round. */
6591 SET_BIT (in_worklist
, e
->dest
->index
);
6592 fibheap_insert (worklist
, bb_order
[e
->dest
->index
],
6600 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
6602 (int)htab_elements (shared_hash_htab (VTI (bb
)->in
.vars
)),
6604 (int)htab_elements (shared_hash_htab (VTI (bb
)->out
.vars
)),
6606 (int)worklist
->nodes
, (int)pending
->nodes
, htabsz
);
6608 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6610 fprintf (dump_file
, "BB %i IN:\n", bb
->index
);
6611 dump_dataflow_set (&VTI (bb
)->in
);
6612 fprintf (dump_file
, "BB %i OUT:\n", bb
->index
);
6613 dump_dataflow_set (&VTI (bb
)->out
);
6619 if (success
&& MAY_HAVE_DEBUG_INSNS
)
6621 gcc_assert (VTI (bb
)->flooded
);
6624 fibheap_delete (worklist
);
6625 fibheap_delete (pending
);
6626 sbitmap_free (visited
);
6627 sbitmap_free (in_worklist
);
6628 sbitmap_free (in_pending
);
6630 timevar_pop (TV_VAR_TRACKING_DATAFLOW
);
6634 /* Print the content of the LIST to dump file. */
6637 dump_attrs_list (attrs list
)
6639 for (; list
; list
= list
->next
)
6641 if (dv_is_decl_p (list
->dv
))
6642 print_mem_expr (dump_file
, dv_as_decl (list
->dv
));
6644 print_rtl_single (dump_file
, dv_as_value (list
->dv
));
6645 fprintf (dump_file
, "+" HOST_WIDE_INT_PRINT_DEC
, list
->offset
);
6647 fprintf (dump_file
, "\n");
6650 /* Print the information about variable *SLOT to dump file. */
6653 dump_var_slot (void **slot
, void *data ATTRIBUTE_UNUSED
)
6655 variable var
= (variable
) *slot
;
6659 /* Continue traversing the hash table. */
6663 /* Print the information about variable VAR to dump file. */
6666 dump_var (variable var
)
6669 location_chain node
;
6671 if (dv_is_decl_p (var
->dv
))
6673 const_tree decl
= dv_as_decl (var
->dv
);
6675 if (DECL_NAME (decl
))
6677 fprintf (dump_file
, " name: %s",
6678 IDENTIFIER_POINTER (DECL_NAME (decl
)));
6679 if (dump_flags
& TDF_UID
)
6680 fprintf (dump_file
, "D.%u", DECL_UID (decl
));
6682 else if (TREE_CODE (decl
) == DEBUG_EXPR_DECL
)
6683 fprintf (dump_file
, " name: D#%u", DEBUG_TEMP_UID (decl
));
6685 fprintf (dump_file
, " name: D.%u", DECL_UID (decl
));
6686 fprintf (dump_file
, "\n");
6690 fputc (' ', dump_file
);
6691 print_rtl_single (dump_file
, dv_as_value (var
->dv
));
6694 for (i
= 0; i
< var
->n_var_parts
; i
++)
6696 fprintf (dump_file
, " offset %ld\n",
6697 (long) var
->var_part
[i
].offset
);
6698 for (node
= var
->var_part
[i
].loc_chain
; node
; node
= node
->next
)
6700 fprintf (dump_file
, " ");
6701 if (node
->init
== VAR_INIT_STATUS_UNINITIALIZED
)
6702 fprintf (dump_file
, "[uninit]");
6703 print_rtl_single (dump_file
, node
->loc
);
6708 /* Print the information about variables from hash table VARS to dump file. */
6711 dump_vars (htab_t vars
)
6713 if (htab_elements (vars
) > 0)
6715 fprintf (dump_file
, "Variables:\n");
6716 htab_traverse (vars
, dump_var_slot
, NULL
);
6720 /* Print the dataflow set SET to dump file. */
6723 dump_dataflow_set (dataflow_set
*set
)
6727 fprintf (dump_file
, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC
"\n",
6729 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
6733 fprintf (dump_file
, "Reg %d:", i
);
6734 dump_attrs_list (set
->regs
[i
]);
6737 dump_vars (shared_hash_htab (set
->vars
));
6738 fprintf (dump_file
, "\n");
6741 /* Print the IN and OUT sets for each basic block to dump file. */
6744 dump_dataflow_sets (void)
6750 fprintf (dump_file
, "\nBasic block %d:\n", bb
->index
);
6751 fprintf (dump_file
, "IN:\n");
6752 dump_dataflow_set (&VTI (bb
)->in
);
6753 fprintf (dump_file
, "OUT:\n");
6754 dump_dataflow_set (&VTI (bb
)->out
);
6758 /* Add variable VAR to the hash table of changed variables and
6759 if it has no locations delete it from SET's hash table. */
6762 variable_was_changed (variable var
, dataflow_set
*set
)
6764 hashval_t hash
= dv_htab_hash (var
->dv
);
6769 bool old_cur_loc_changed
= false;
6771 /* Remember this decl or VALUE has been added to changed_variables. */
6772 set_dv_changed (var
->dv
, true);
6774 slot
= htab_find_slot_with_hash (changed_variables
,
6780 variable old_var
= (variable
) *slot
;
6781 gcc_assert (old_var
->in_changed_variables
);
6782 old_var
->in_changed_variables
= false;
6783 old_cur_loc_changed
= old_var
->cur_loc_changed
;
6784 variable_htab_free (*slot
);
6786 if (set
&& var
->n_var_parts
== 0)
6790 empty_var
= (variable
) pool_alloc (dv_pool (var
->dv
));
6791 empty_var
->dv
= var
->dv
;
6792 empty_var
->refcount
= 1;
6793 empty_var
->n_var_parts
= 0;
6794 empty_var
->cur_loc_changed
= true;
6795 empty_var
->in_changed_variables
= true;
6802 var
->in_changed_variables
= true;
6803 /* If within processing one uop a variable is deleted
6804 and then readded, we need to assume it has changed. */
6805 if (old_cur_loc_changed
)
6806 var
->cur_loc_changed
= true;
6813 if (var
->n_var_parts
== 0)
6818 slot
= shared_hash_find_slot_noinsert (set
->vars
, var
->dv
);
6821 if (shared_hash_shared (set
->vars
))
6822 slot
= shared_hash_find_slot_unshare (&set
->vars
, var
->dv
,
6824 htab_clear_slot (shared_hash_htab (set
->vars
), slot
);
6830 /* Look for the index in VAR->var_part corresponding to OFFSET.
6831 Return -1 if not found. If INSERTION_POINT is non-NULL, the
6832 referenced int will be set to the index that the part has or should
6833 have, if it should be inserted. */
6836 find_variable_location_part (variable var
, HOST_WIDE_INT offset
,
6837 int *insertion_point
)
6841 /* Find the location part. */
6843 high
= var
->n_var_parts
;
6846 pos
= (low
+ high
) / 2;
6847 if (var
->var_part
[pos
].offset
< offset
)
6854 if (insertion_point
)
6855 *insertion_point
= pos
;
6857 if (pos
< var
->n_var_parts
&& var
->var_part
[pos
].offset
== offset
)
6864 set_slot_part (dataflow_set
*set
, rtx loc
, void **slot
,
6865 decl_or_value dv
, HOST_WIDE_INT offset
,
6866 enum var_init_status initialized
, rtx set_src
)
6869 location_chain node
, next
;
6870 location_chain
*nextp
;
6872 bool onepart
= dv_onepart_p (dv
);
6874 gcc_assert (offset
== 0 || !onepart
);
6875 gcc_assert (loc
!= dv_as_opaque (dv
));
6877 var
= (variable
) *slot
;
6879 if (! flag_var_tracking_uninit
)
6880 initialized
= VAR_INIT_STATUS_INITIALIZED
;
6884 /* Create new variable information. */
6885 var
= (variable
) pool_alloc (dv_pool (dv
));
6888 var
->n_var_parts
= 1;
6889 var
->cur_loc_changed
= false;
6890 var
->in_changed_variables
= false;
6891 var
->var_part
[0].offset
= offset
;
6892 var
->var_part
[0].loc_chain
= NULL
;
6893 var
->var_part
[0].cur_loc
= NULL
;
6896 nextp
= &var
->var_part
[0].loc_chain
;
6902 gcc_assert (dv_as_opaque (var
->dv
) == dv_as_opaque (dv
));
6906 if (GET_CODE (loc
) == VALUE
)
6908 for (nextp
= &var
->var_part
[0].loc_chain
; (node
= *nextp
);
6909 nextp
= &node
->next
)
6910 if (GET_CODE (node
->loc
) == VALUE
)
6912 if (node
->loc
== loc
)
6917 if (canon_value_cmp (node
->loc
, loc
))
6925 else if (REG_P (node
->loc
) || MEM_P (node
->loc
))
6933 else if (REG_P (loc
))
6935 for (nextp
= &var
->var_part
[0].loc_chain
; (node
= *nextp
);
6936 nextp
= &node
->next
)
6937 if (REG_P (node
->loc
))
6939 if (REGNO (node
->loc
) < REGNO (loc
))
6943 if (REGNO (node
->loc
) == REGNO (loc
))
6956 else if (MEM_P (loc
))
6958 for (nextp
= &var
->var_part
[0].loc_chain
; (node
= *nextp
);
6959 nextp
= &node
->next
)
6960 if (REG_P (node
->loc
))
6962 else if (MEM_P (node
->loc
))
6964 if ((r
= loc_cmp (XEXP (node
->loc
, 0), XEXP (loc
, 0))) >= 0)
6976 for (nextp
= &var
->var_part
[0].loc_chain
; (node
= *nextp
);
6977 nextp
= &node
->next
)
6978 if ((r
= loc_cmp (node
->loc
, loc
)) >= 0)
6986 if (shared_var_p (var
, set
->vars
))
6988 slot
= unshare_variable (set
, slot
, var
, initialized
);
6989 var
= (variable
)*slot
;
6990 for (nextp
= &var
->var_part
[0].loc_chain
; c
;
6991 nextp
= &(*nextp
)->next
)
6993 gcc_assert ((!node
&& !*nextp
) || node
->loc
== (*nextp
)->loc
);
7000 gcc_assert (dv_as_decl (var
->dv
) == dv_as_decl (dv
));
7002 pos
= find_variable_location_part (var
, offset
, &inspos
);
7006 node
= var
->var_part
[pos
].loc_chain
;
7009 && ((REG_P (node
->loc
) && REG_P (loc
)
7010 && REGNO (node
->loc
) == REGNO (loc
))
7011 || rtx_equal_p (node
->loc
, loc
)))
7013 /* LOC is in the beginning of the chain so we have nothing
7015 if (node
->init
< initialized
)
7016 node
->init
= initialized
;
7017 if (set_src
!= NULL
)
7018 node
->set_src
= set_src
;
7024 /* We have to make a copy of a shared variable. */
7025 if (shared_var_p (var
, set
->vars
))
7027 slot
= unshare_variable (set
, slot
, var
, initialized
);
7028 var
= (variable
)*slot
;
7034 /* We have not found the location part, new one will be created. */
7036 /* We have to make a copy of the shared variable. */
7037 if (shared_var_p (var
, set
->vars
))
7039 slot
= unshare_variable (set
, slot
, var
, initialized
);
7040 var
= (variable
)*slot
;
7043 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
7044 thus there are at most MAX_VAR_PARTS different offsets. */
7045 gcc_assert (var
->n_var_parts
< MAX_VAR_PARTS
7046 && (!var
->n_var_parts
|| !dv_onepart_p (var
->dv
)));
7048 /* We have to move the elements of array starting at index
7049 inspos to the next position. */
7050 for (pos
= var
->n_var_parts
; pos
> inspos
; pos
--)
7051 var
->var_part
[pos
] = var
->var_part
[pos
- 1];
7054 var
->var_part
[pos
].offset
= offset
;
7055 var
->var_part
[pos
].loc_chain
= NULL
;
7056 var
->var_part
[pos
].cur_loc
= NULL
;
7059 /* Delete the location from the list. */
7060 nextp
= &var
->var_part
[pos
].loc_chain
;
7061 for (node
= var
->var_part
[pos
].loc_chain
; node
; node
= next
)
7064 if ((REG_P (node
->loc
) && REG_P (loc
)
7065 && REGNO (node
->loc
) == REGNO (loc
))
7066 || rtx_equal_p (node
->loc
, loc
))
7068 /* Save these values, to assign to the new node, before
7069 deleting this one. */
7070 if (node
->init
> initialized
)
7071 initialized
= node
->init
;
7072 if (node
->set_src
!= NULL
&& set_src
== NULL
)
7073 set_src
= node
->set_src
;
7074 if (var
->var_part
[pos
].cur_loc
== node
->loc
)
7076 var
->var_part
[pos
].cur_loc
= NULL
;
7077 var
->cur_loc_changed
= true;
7079 pool_free (loc_chain_pool
, node
);
7084 nextp
= &node
->next
;
7087 nextp
= &var
->var_part
[pos
].loc_chain
;
7090 /* Add the location to the beginning. */
7091 node
= (location_chain
) pool_alloc (loc_chain_pool
);
7093 node
->init
= initialized
;
7094 node
->set_src
= set_src
;
7095 node
->next
= *nextp
;
7098 if (onepart
&& emit_notes
)
7099 add_value_chains (var
->dv
, loc
);
7101 /* If no location was emitted do so. */
7102 if (var
->var_part
[pos
].cur_loc
== NULL
)
7103 variable_was_changed (var
, set
);
7108 /* Set the part of variable's location in the dataflow set SET. The
7109 variable part is specified by variable's declaration in DV and
7110 offset OFFSET and the part's location by LOC. IOPT should be
7111 NO_INSERT if the variable is known to be in SET already and the
7112 variable hash table must not be resized, and INSERT otherwise. */
7115 set_variable_part (dataflow_set
*set
, rtx loc
,
7116 decl_or_value dv
, HOST_WIDE_INT offset
,
7117 enum var_init_status initialized
, rtx set_src
,
7118 enum insert_option iopt
)
7122 if (iopt
== NO_INSERT
)
7123 slot
= shared_hash_find_slot_noinsert (set
->vars
, dv
);
7126 slot
= shared_hash_find_slot (set
->vars
, dv
);
7128 slot
= shared_hash_find_slot_unshare (&set
->vars
, dv
, iopt
);
7130 set_slot_part (set
, loc
, slot
, dv
, offset
, initialized
, set_src
);
7133 /* Remove all recorded register locations for the given variable part
7134 from dataflow set SET, except for those that are identical to loc.
7135 The variable part is specified by variable's declaration or value
7136 DV and offset OFFSET. */
7139 clobber_slot_part (dataflow_set
*set
, rtx loc
, void **slot
,
7140 HOST_WIDE_INT offset
, rtx set_src
)
7142 variable var
= (variable
) *slot
;
7143 int pos
= find_variable_location_part (var
, offset
, NULL
);
7147 location_chain node
, next
;
7149 /* Remove the register locations from the dataflow set. */
7150 next
= var
->var_part
[pos
].loc_chain
;
7151 for (node
= next
; node
; node
= next
)
7154 if (node
->loc
!= loc
7155 && (!flag_var_tracking_uninit
7158 || !rtx_equal_p (set_src
, node
->set_src
)))
7160 if (REG_P (node
->loc
))
7165 /* Remove the variable part from the register's
7166 list, but preserve any other variable parts
7167 that might be regarded as live in that same
7169 anextp
= &set
->regs
[REGNO (node
->loc
)];
7170 for (anode
= *anextp
; anode
; anode
= anext
)
7172 anext
= anode
->next
;
7173 if (dv_as_opaque (anode
->dv
) == dv_as_opaque (var
->dv
)
7174 && anode
->offset
== offset
)
7176 pool_free (attrs_pool
, anode
);
7180 anextp
= &anode
->next
;
7184 slot
= delete_slot_part (set
, node
->loc
, slot
, offset
);
7192 /* Remove all recorded register locations for the given variable part
7193 from dataflow set SET, except for those that are identical to loc.
7194 The variable part is specified by variable's declaration or value
7195 DV and offset OFFSET. */
7198 clobber_variable_part (dataflow_set
*set
, rtx loc
, decl_or_value dv
,
7199 HOST_WIDE_INT offset
, rtx set_src
)
7203 if (!dv_as_opaque (dv
)
7204 || (!dv_is_value_p (dv
) && ! DECL_P (dv_as_decl (dv
))))
7207 slot
= shared_hash_find_slot_noinsert (set
->vars
, dv
);
7211 clobber_slot_part (set
, loc
, slot
, offset
, set_src
);
7214 /* Delete the part of variable's location from dataflow set SET. The
7215 variable part is specified by its SET->vars slot SLOT and offset
7216 OFFSET and the part's location by LOC. */
7219 delete_slot_part (dataflow_set
*set
, rtx loc
, void **slot
,
7220 HOST_WIDE_INT offset
)
7222 variable var
= (variable
) *slot
;
7223 int pos
= find_variable_location_part (var
, offset
, NULL
);
7227 location_chain node
, next
;
7228 location_chain
*nextp
;
7231 if (shared_var_p (var
, set
->vars
))
7233 /* If the variable contains the location part we have to
7234 make a copy of the variable. */
7235 for (node
= var
->var_part
[pos
].loc_chain
; node
;
7238 if ((REG_P (node
->loc
) && REG_P (loc
)
7239 && REGNO (node
->loc
) == REGNO (loc
))
7240 || rtx_equal_p (node
->loc
, loc
))
7242 slot
= unshare_variable (set
, slot
, var
,
7243 VAR_INIT_STATUS_UNKNOWN
);
7244 var
= (variable
)*slot
;
7250 /* Delete the location part. */
7252 nextp
= &var
->var_part
[pos
].loc_chain
;
7253 for (node
= *nextp
; node
; node
= next
)
7256 if ((REG_P (node
->loc
) && REG_P (loc
)
7257 && REGNO (node
->loc
) == REGNO (loc
))
7258 || rtx_equal_p (node
->loc
, loc
))
7260 if (emit_notes
&& pos
== 0 && dv_onepart_p (var
->dv
))
7261 remove_value_chains (var
->dv
, node
->loc
);
7262 /* If we have deleted the location which was last emitted
7263 we have to emit new location so add the variable to set
7264 of changed variables. */
7265 if (var
->var_part
[pos
].cur_loc
== node
->loc
)
7268 var
->var_part
[pos
].cur_loc
= NULL
;
7269 var
->cur_loc_changed
= true;
7271 pool_free (loc_chain_pool
, node
);
7276 nextp
= &node
->next
;
7279 if (var
->var_part
[pos
].loc_chain
== NULL
)
7284 var
->cur_loc_changed
= true;
7285 while (pos
< var
->n_var_parts
)
7287 var
->var_part
[pos
] = var
->var_part
[pos
+ 1];
7292 variable_was_changed (var
, set
);
7298 /* Delete the part of variable's location from dataflow set SET. The
7299 variable part is specified by variable's declaration or value DV
7300 and offset OFFSET and the part's location by LOC. */
7303 delete_variable_part (dataflow_set
*set
, rtx loc
, decl_or_value dv
,
7304 HOST_WIDE_INT offset
)
7306 void **slot
= shared_hash_find_slot_noinsert (set
->vars
, dv
);
7310 delete_slot_part (set
, loc
, slot
, offset
);
7313 /* Structure for passing some other parameters to function
7314 vt_expand_loc_callback. */
7315 struct expand_loc_callback_data
7317 /* The variables and values active at this point. */
7320 /* True in vt_expand_loc_dummy calls, no rtl should be allocated.
7321 Non-NULL should be returned if vt_expand_loc would return
7322 non-NULL in that case, NULL otherwise. cur_loc_changed should be
7323 computed and cur_loc recomputed when possible (but just once
7324 per emit_notes_for_changes call). */
7327 /* True if expansion of subexpressions had to recompute some
7328 VALUE/DEBUG_EXPR_DECL's cur_loc or used a VALUE/DEBUG_EXPR_DECL
7329 whose cur_loc has been already recomputed during current
7330 emit_notes_for_changes call. */
7331 bool cur_loc_changed
;
7333 /* True if cur_loc should be ignored and any possible location
7335 bool ignore_cur_loc
;
7338 /* Callback for cselib_expand_value, that looks for expressions
7339 holding the value in the var-tracking hash tables. Return X for
7340 standard processing, anything else is to be used as-is. */
7343 vt_expand_loc_callback (rtx x
, bitmap regs
, int max_depth
, void *data
)
7345 struct expand_loc_callback_data
*elcd
7346 = (struct expand_loc_callback_data
*) data
;
7347 bool dummy
= elcd
->dummy
;
7348 bool cur_loc_changed
= elcd
->cur_loc_changed
;
7353 rtx result
, subreg
, xret
;
7355 switch (GET_CODE (x
))
7360 if (cselib_dummy_expand_value_rtx_cb (SUBREG_REG (x
), regs
,
7362 vt_expand_loc_callback
, data
))
7368 subreg
= cselib_expand_value_rtx_cb (SUBREG_REG (x
), regs
,
7370 vt_expand_loc_callback
, data
);
7375 result
= simplify_gen_subreg (GET_MODE (x
), subreg
,
7376 GET_MODE (SUBREG_REG (x
)),
7379 /* Invalid SUBREGs are ok in debug info. ??? We could try
7380 alternate expansions for the VALUE as well. */
7382 result
= gen_rtx_raw_SUBREG (GET_MODE (x
), subreg
, SUBREG_BYTE (x
));
7387 dv
= dv_from_decl (DEBUG_EXPR_TREE_DECL (x
));
7392 dv
= dv_from_value (x
);
7400 if (VALUE_RECURSED_INTO (x
))
7403 var
= (variable
) htab_find_with_hash (elcd
->vars
, dv
, dv_htab_hash (dv
));
7407 if (dummy
&& dv_changed_p (dv
))
7408 elcd
->cur_loc_changed
= true;
7412 if (var
->n_var_parts
== 0)
7415 elcd
->cur_loc_changed
= true;
7419 gcc_assert (var
->n_var_parts
== 1);
7421 VALUE_RECURSED_INTO (x
) = true;
7424 if (var
->var_part
[0].cur_loc
&& !elcd
->ignore_cur_loc
)
7428 if (cselib_dummy_expand_value_rtx_cb (var
->var_part
[0].cur_loc
, regs
,
7430 vt_expand_loc_callback
, data
))
7434 result
= cselib_expand_value_rtx_cb (var
->var_part
[0].cur_loc
, regs
,
7436 vt_expand_loc_callback
, data
);
7438 set_dv_changed (dv
, false);
7439 cur_loc
= var
->var_part
[0].cur_loc
;
7443 if (!result
&& (dv_changed_p (dv
) || elcd
->ignore_cur_loc
))
7445 if (!elcd
->ignore_cur_loc
)
7446 set_dv_changed (dv
, false);
7447 for (loc
= var
->var_part
[0].loc_chain
; loc
; loc
= loc
->next
)
7448 if (loc
->loc
== cur_loc
)
7452 elcd
->cur_loc_changed
= cur_loc_changed
;
7453 if (cselib_dummy_expand_value_rtx_cb (loc
->loc
, regs
, max_depth
,
7454 vt_expand_loc_callback
,
7463 result
= cselib_expand_value_rtx_cb (loc
->loc
, regs
, max_depth
,
7464 vt_expand_loc_callback
, data
);
7468 if (dummy
&& (result
|| var
->var_part
[0].cur_loc
))
7469 var
->cur_loc_changed
= true;
7470 if (!elcd
->ignore_cur_loc
)
7471 var
->var_part
[0].cur_loc
= loc
? loc
->loc
: NULL_RTX
;
7475 if (var
->cur_loc_changed
)
7476 elcd
->cur_loc_changed
= true;
7477 else if (!result
&& var
->var_part
[0].cur_loc
== NULL_RTX
)
7478 elcd
->cur_loc_changed
= cur_loc_changed
;
7481 VALUE_RECURSED_INTO (x
) = false;
7488 /* Expand VALUEs in LOC, using VARS as well as cselib's equivalence
7492 vt_expand_loc (rtx loc
, htab_t vars
, bool ignore_cur_loc
)
7494 struct expand_loc_callback_data data
;
7496 if (!MAY_HAVE_DEBUG_INSNS
)
7501 data
.cur_loc_changed
= false;
7502 data
.ignore_cur_loc
= ignore_cur_loc
;
7503 loc
= cselib_expand_value_rtx_cb (loc
, scratch_regs
, EXPR_DEPTH
,
7504 vt_expand_loc_callback
, &data
);
7506 if (loc
&& MEM_P (loc
))
7507 loc
= targetm
.delegitimize_address (loc
);
7511 /* Like vt_expand_loc, but only return true/false (whether vt_expand_loc
7512 would succeed or not, without actually allocating new rtxes. */
7515 vt_expand_loc_dummy (rtx loc
, htab_t vars
, bool *pcur_loc_changed
)
7517 struct expand_loc_callback_data data
;
7520 gcc_assert (MAY_HAVE_DEBUG_INSNS
);
7523 data
.cur_loc_changed
= false;
7524 data
.ignore_cur_loc
= false;
7525 ret
= cselib_dummy_expand_value_rtx_cb (loc
, scratch_regs
, EXPR_DEPTH
,
7526 vt_expand_loc_callback
, &data
);
7527 *pcur_loc_changed
= data
.cur_loc_changed
;
7531 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
7532 additional parameters: WHERE specifies whether the note shall be emitted
7533 before or after instruction INSN. */
7536 emit_note_insn_var_location (void **varp
, void *data
)
7538 variable var
= (variable
) *varp
;
7539 rtx insn
= ((emit_note_data
*)data
)->insn
;
7540 enum emit_note_where where
= ((emit_note_data
*)data
)->where
;
7541 htab_t vars
= ((emit_note_data
*)data
)->vars
;
7543 int i
, j
, n_var_parts
;
7545 enum var_init_status initialized
= VAR_INIT_STATUS_UNINITIALIZED
;
7546 HOST_WIDE_INT last_limit
;
7547 tree type_size_unit
;
7548 HOST_WIDE_INT offsets
[MAX_VAR_PARTS
];
7549 rtx loc
[MAX_VAR_PARTS
];
7553 if (dv_is_value_p (var
->dv
))
7554 goto value_or_debug_decl
;
7556 decl
= dv_as_decl (var
->dv
);
7558 if (TREE_CODE (decl
) == DEBUG_EXPR_DECL
)
7559 goto value_or_debug_decl
;
7564 if (!MAY_HAVE_DEBUG_INSNS
)
7566 for (i
= 0; i
< var
->n_var_parts
; i
++)
7567 if (var
->var_part
[i
].cur_loc
== NULL
&& var
->var_part
[i
].loc_chain
)
7569 var
->var_part
[i
].cur_loc
= var
->var_part
[i
].loc_chain
->loc
;
7570 var
->cur_loc_changed
= true;
7572 if (var
->n_var_parts
== 0)
7573 var
->cur_loc_changed
= true;
7575 if (!var
->cur_loc_changed
)
7577 for (i
= 0; i
< var
->n_var_parts
; i
++)
7579 enum machine_mode mode
, wider_mode
;
7582 if (last_limit
< var
->var_part
[i
].offset
)
7587 else if (last_limit
> var
->var_part
[i
].offset
)
7589 offsets
[n_var_parts
] = var
->var_part
[i
].offset
;
7590 if (!var
->var_part
[i
].cur_loc
)
7595 loc2
= vt_expand_loc (var
->var_part
[i
].cur_loc
, vars
, false);
7601 loc
[n_var_parts
] = loc2
;
7602 mode
= GET_MODE (var
->var_part
[i
].cur_loc
);
7603 if (mode
== VOIDmode
&& dv_onepart_p (var
->dv
))
7604 mode
= DECL_MODE (decl
);
7605 for (lc
= var
->var_part
[i
].loc_chain
; lc
; lc
= lc
->next
)
7606 if (var
->var_part
[i
].cur_loc
== lc
->loc
)
7608 initialized
= lc
->init
;
7612 last_limit
= offsets
[n_var_parts
] + GET_MODE_SIZE (mode
);
7614 /* Attempt to merge adjacent registers or memory. */
7615 wider_mode
= GET_MODE_WIDER_MODE (mode
);
7616 for (j
= i
+ 1; j
< var
->n_var_parts
; j
++)
7617 if (last_limit
<= var
->var_part
[j
].offset
)
7619 if (j
< var
->n_var_parts
7620 && wider_mode
!= VOIDmode
7621 && var
->var_part
[j
].cur_loc
7622 && mode
== GET_MODE (var
->var_part
[j
].cur_loc
)
7623 && (REG_P (loc
[n_var_parts
]) || MEM_P (loc
[n_var_parts
]))
7624 && last_limit
== var
->var_part
[j
].offset
7625 && (loc2
= vt_expand_loc (var
->var_part
[j
].cur_loc
, vars
, false))
7626 && GET_CODE (loc
[n_var_parts
]) == GET_CODE (loc2
))
7630 if (REG_P (loc
[n_var_parts
])
7631 && hard_regno_nregs
[REGNO (loc
[n_var_parts
])][mode
] * 2
7632 == hard_regno_nregs
[REGNO (loc
[n_var_parts
])][wider_mode
]
7633 && end_hard_regno (mode
, REGNO (loc
[n_var_parts
]))
7636 if (! WORDS_BIG_ENDIAN
&& ! BYTES_BIG_ENDIAN
)
7637 new_loc
= simplify_subreg (wider_mode
, loc
[n_var_parts
],
7639 else if (WORDS_BIG_ENDIAN
&& BYTES_BIG_ENDIAN
)
7640 new_loc
= simplify_subreg (wider_mode
, loc2
, mode
, 0);
7643 if (!REG_P (new_loc
)
7644 || REGNO (new_loc
) != REGNO (loc
[n_var_parts
]))
7647 REG_ATTRS (new_loc
) = REG_ATTRS (loc
[n_var_parts
]);
7650 else if (MEM_P (loc
[n_var_parts
])
7651 && GET_CODE (XEXP (loc2
, 0)) == PLUS
7652 && REG_P (XEXP (XEXP (loc2
, 0), 0))
7653 && CONST_INT_P (XEXP (XEXP (loc2
, 0), 1)))
7655 if ((REG_P (XEXP (loc
[n_var_parts
], 0))
7656 && rtx_equal_p (XEXP (loc
[n_var_parts
], 0),
7657 XEXP (XEXP (loc2
, 0), 0))
7658 && INTVAL (XEXP (XEXP (loc2
, 0), 1))
7659 == GET_MODE_SIZE (mode
))
7660 || (GET_CODE (XEXP (loc
[n_var_parts
], 0)) == PLUS
7661 && CONST_INT_P (XEXP (XEXP (loc
[n_var_parts
], 0), 1))
7662 && rtx_equal_p (XEXP (XEXP (loc
[n_var_parts
], 0), 0),
7663 XEXP (XEXP (loc2
, 0), 0))
7664 && INTVAL (XEXP (XEXP (loc
[n_var_parts
], 0), 1))
7665 + GET_MODE_SIZE (mode
)
7666 == INTVAL (XEXP (XEXP (loc2
, 0), 1))))
7667 new_loc
= adjust_address_nv (loc
[n_var_parts
],
7673 loc
[n_var_parts
] = new_loc
;
7675 last_limit
= offsets
[n_var_parts
] + GET_MODE_SIZE (mode
);
7681 type_size_unit
= TYPE_SIZE_UNIT (TREE_TYPE (decl
));
7682 if ((unsigned HOST_WIDE_INT
) last_limit
< TREE_INT_CST_LOW (type_size_unit
))
7685 if (! flag_var_tracking_uninit
)
7686 initialized
= VAR_INIT_STATUS_INITIALIZED
;
7690 note_vl
= gen_rtx_VAR_LOCATION (VOIDmode
, decl
, NULL_RTX
,
7692 else if (n_var_parts
== 1)
7696 if (offsets
[0] || GET_CODE (loc
[0]) == PARALLEL
)
7697 expr_list
= gen_rtx_EXPR_LIST (VOIDmode
, loc
[0], GEN_INT (offsets
[0]));
7701 note_vl
= gen_rtx_VAR_LOCATION (VOIDmode
, decl
, expr_list
,
7704 else if (n_var_parts
)
7708 for (i
= 0; i
< n_var_parts
; i
++)
7710 = gen_rtx_EXPR_LIST (VOIDmode
, loc
[i
], GEN_INT (offsets
[i
]));
7712 parallel
= gen_rtx_PARALLEL (VOIDmode
,
7713 gen_rtvec_v (n_var_parts
, loc
));
7714 note_vl
= gen_rtx_VAR_LOCATION (VOIDmode
, decl
,
7715 parallel
, (int) initialized
);
7718 if (where
!= EMIT_NOTE_BEFORE_INSN
)
7720 note
= emit_note_after (NOTE_INSN_VAR_LOCATION
, insn
);
7721 if (where
== EMIT_NOTE_AFTER_CALL_INSN
)
7722 NOTE_DURING_CALL_P (note
) = true;
7726 /* Make sure that the call related notes come first. */
7727 while (NEXT_INSN (insn
)
7729 && NOTE_DURING_CALL_P (insn
))
7730 insn
= NEXT_INSN (insn
);
7731 if (NOTE_P (insn
) && NOTE_DURING_CALL_P (insn
))
7732 note
= emit_note_after (NOTE_INSN_VAR_LOCATION
, insn
);
7734 note
= emit_note_before (NOTE_INSN_VAR_LOCATION
, insn
);
7736 NOTE_VAR_LOCATION (note
) = note_vl
;
7739 set_dv_changed (var
->dv
, false);
7740 var
->cur_loc_changed
= false;
7741 gcc_assert (var
->in_changed_variables
);
7742 var
->in_changed_variables
= false;
7743 htab_clear_slot (changed_variables
, varp
);
7745 /* Continue traversing the hash table. */
7748 value_or_debug_decl
:
7749 if (dv_changed_p (var
->dv
) && var
->n_var_parts
)
7752 bool cur_loc_changed
;
7754 if (var
->var_part
[0].cur_loc
7755 && vt_expand_loc_dummy (var
->var_part
[0].cur_loc
, vars
,
7758 for (lc
= var
->var_part
[0].loc_chain
; lc
; lc
= lc
->next
)
7759 if (lc
->loc
!= var
->var_part
[0].cur_loc
7760 && vt_expand_loc_dummy (lc
->loc
, vars
, &cur_loc_changed
))
7762 var
->var_part
[0].cur_loc
= lc
? lc
->loc
: NULL_RTX
;
7767 DEF_VEC_P (variable
);
7768 DEF_VEC_ALLOC_P (variable
, heap
);
7770 /* Stack of variable_def pointers that need processing with
7771 check_changed_vars_2. */
7773 static VEC (variable
, heap
) *changed_variables_stack
;
7775 /* VALUEs with no variables that need set_dv_changed (val, false)
7776 called before check_changed_vars_3. */
7778 static VEC (rtx
, heap
) *changed_values_stack
;
7780 /* Helper function for check_changed_vars_1 and check_changed_vars_2. */
7783 check_changed_vars_0 (decl_or_value dv
, htab_t htab
)
7786 = (value_chain
) htab_find_with_hash (value_chains
, dv
, dv_htab_hash (dv
));
7790 for (vc
= vc
->next
; vc
; vc
= vc
->next
)
7791 if (!dv_changed_p (vc
->dv
))
7794 = (variable
) htab_find_with_hash (htab
, vc
->dv
,
7795 dv_htab_hash (vc
->dv
));
7798 set_dv_changed (vc
->dv
, true);
7799 VEC_safe_push (variable
, heap
, changed_variables_stack
, vcvar
);
7801 else if (dv_is_value_p (vc
->dv
))
7803 set_dv_changed (vc
->dv
, true);
7804 VEC_safe_push (rtx
, heap
, changed_values_stack
,
7805 dv_as_value (vc
->dv
));
7806 check_changed_vars_0 (vc
->dv
, htab
);
7811 /* Populate changed_variables_stack with variable_def pointers
7812 that need variable_was_changed called on them. */
7815 check_changed_vars_1 (void **slot
, void *data
)
7817 variable var
= (variable
) *slot
;
7818 htab_t htab
= (htab_t
) data
;
7820 if (dv_is_value_p (var
->dv
)
7821 || TREE_CODE (dv_as_decl (var
->dv
)) == DEBUG_EXPR_DECL
)
7822 check_changed_vars_0 (var
->dv
, htab
);
7826 /* Add VAR to changed_variables and also for VALUEs add recursively
7827 all DVs that aren't in changed_variables yet but reference the
7828 VALUE from its loc_chain. */
7831 check_changed_vars_2 (variable var
, htab_t htab
)
7833 variable_was_changed (var
, NULL
);
7834 if (dv_is_value_p (var
->dv
)
7835 || TREE_CODE (dv_as_decl (var
->dv
)) == DEBUG_EXPR_DECL
)
7836 check_changed_vars_0 (var
->dv
, htab
);
7839 /* For each changed decl (except DEBUG_EXPR_DECLs) recompute
7840 cur_loc if needed (and cur_loc of all VALUEs and DEBUG_EXPR_DECLs
7841 it needs and are also in changed variables) and track whether
7842 cur_loc (or anything it uses to compute location) had to change
7843 during the current emit_notes_for_changes call. */
7846 check_changed_vars_3 (void **slot
, void *data
)
7848 variable var
= (variable
) *slot
;
7849 htab_t vars
= (htab_t
) data
;
7852 bool cur_loc_changed
;
7854 if (dv_is_value_p (var
->dv
)
7855 || TREE_CODE (dv_as_decl (var
->dv
)) == DEBUG_EXPR_DECL
)
7858 for (i
= 0; i
< var
->n_var_parts
; i
++)
7860 if (var
->var_part
[i
].cur_loc
7861 && vt_expand_loc_dummy (var
->var_part
[i
].cur_loc
, vars
,
7864 if (cur_loc_changed
)
7865 var
->cur_loc_changed
= true;
7868 for (lc
= var
->var_part
[i
].loc_chain
; lc
; lc
= lc
->next
)
7869 if (lc
->loc
!= var
->var_part
[i
].cur_loc
7870 && vt_expand_loc_dummy (lc
->loc
, vars
, &cur_loc_changed
))
7872 if (lc
|| var
->var_part
[i
].cur_loc
)
7873 var
->cur_loc_changed
= true;
7874 var
->var_part
[i
].cur_loc
= lc
? lc
->loc
: NULL_RTX
;
7876 if (var
->n_var_parts
== 0)
7877 var
->cur_loc_changed
= true;
7881 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
7882 CHANGED_VARIABLES and delete this chain. WHERE specifies whether the notes
7883 shall be emitted before of after instruction INSN. */
7886 emit_notes_for_changes (rtx insn
, enum emit_note_where where
,
7889 emit_note_data data
;
7890 htab_t htab
= shared_hash_htab (vars
);
7892 if (!htab_elements (changed_variables
))
7895 if (MAY_HAVE_DEBUG_INSNS
)
7897 /* Unfortunately this has to be done in two steps, because
7898 we can't traverse a hashtab into which we are inserting
7899 through variable_was_changed. */
7900 htab_traverse (changed_variables
, check_changed_vars_1
, htab
);
7901 while (VEC_length (variable
, changed_variables_stack
) > 0)
7902 check_changed_vars_2 (VEC_pop (variable
, changed_variables_stack
),
7904 while (VEC_length (rtx
, changed_values_stack
) > 0)
7905 set_dv_changed (dv_from_value (VEC_pop (rtx
, changed_values_stack
)),
7907 htab_traverse (changed_variables
, check_changed_vars_3
, htab
);
7914 htab_traverse (changed_variables
, emit_note_insn_var_location
, &data
);
7917 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
7918 same variable in hash table DATA or is not there at all. */
7921 emit_notes_for_differences_1 (void **slot
, void *data
)
7923 htab_t new_vars
= (htab_t
) data
;
7924 variable old_var
, new_var
;
7926 old_var
= (variable
) *slot
;
7927 new_var
= (variable
) htab_find_with_hash (new_vars
, old_var
->dv
,
7928 dv_htab_hash (old_var
->dv
));
7932 /* Variable has disappeared. */
7935 empty_var
= (variable
) pool_alloc (dv_pool (old_var
->dv
));
7936 empty_var
->dv
= old_var
->dv
;
7937 empty_var
->refcount
= 0;
7938 empty_var
->n_var_parts
= 0;
7939 empty_var
->cur_loc_changed
= false;
7940 empty_var
->in_changed_variables
= false;
7941 if (dv_onepart_p (old_var
->dv
))
7945 gcc_assert (old_var
->n_var_parts
== 1);
7946 for (lc
= old_var
->var_part
[0].loc_chain
; lc
; lc
= lc
->next
)
7947 remove_value_chains (old_var
->dv
, lc
->loc
);
7949 variable_was_changed (empty_var
, NULL
);
7950 /* Continue traversing the hash table. */
7953 if (variable_different_p (old_var
, new_var
))
7955 if (dv_onepart_p (old_var
->dv
))
7957 location_chain lc1
, lc2
;
7959 gcc_assert (old_var
->n_var_parts
== 1
7960 && new_var
->n_var_parts
== 1);
7961 lc1
= old_var
->var_part
[0].loc_chain
;
7962 lc2
= new_var
->var_part
[0].loc_chain
;
7965 && ((REG_P (lc1
->loc
) && REG_P (lc2
->loc
))
7966 || rtx_equal_p (lc1
->loc
, lc2
->loc
)))
7971 for (; lc2
; lc2
= lc2
->next
)
7972 add_value_chains (old_var
->dv
, lc2
->loc
);
7973 for (; lc1
; lc1
= lc1
->next
)
7974 remove_value_chains (old_var
->dv
, lc1
->loc
);
7976 variable_was_changed (new_var
, NULL
);
7978 /* Update cur_loc. */
7979 if (old_var
!= new_var
)
7982 for (i
= 0; i
< new_var
->n_var_parts
; i
++)
7984 new_var
->var_part
[i
].cur_loc
= NULL
;
7985 if (old_var
->n_var_parts
!= new_var
->n_var_parts
7986 || old_var
->var_part
[i
].offset
!= new_var
->var_part
[i
].offset
)
7987 new_var
->cur_loc_changed
= true;
7988 else if (old_var
->var_part
[i
].cur_loc
!= NULL
)
7991 rtx cur_loc
= old_var
->var_part
[i
].cur_loc
;
7993 for (lc
= new_var
->var_part
[i
].loc_chain
; lc
; lc
= lc
->next
)
7994 if (lc
->loc
== cur_loc
7995 || rtx_equal_p (cur_loc
, lc
->loc
))
7997 new_var
->var_part
[i
].cur_loc
= lc
->loc
;
8001 new_var
->cur_loc_changed
= true;
8006 /* Continue traversing the hash table. */
8010 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
8014 emit_notes_for_differences_2 (void **slot
, void *data
)
8016 htab_t old_vars
= (htab_t
) data
;
8017 variable old_var
, new_var
;
8019 new_var
= (variable
) *slot
;
8020 old_var
= (variable
) htab_find_with_hash (old_vars
, new_var
->dv
,
8021 dv_htab_hash (new_var
->dv
));
8025 /* Variable has appeared. */
8026 if (dv_onepart_p (new_var
->dv
))
8030 gcc_assert (new_var
->n_var_parts
== 1);
8031 for (lc
= new_var
->var_part
[0].loc_chain
; lc
; lc
= lc
->next
)
8032 add_value_chains (new_var
->dv
, lc
->loc
);
8034 for (i
= 0; i
< new_var
->n_var_parts
; i
++)
8035 new_var
->var_part
[i
].cur_loc
= NULL
;
8036 variable_was_changed (new_var
, NULL
);
8039 /* Continue traversing the hash table. */
8043 /* Emit notes before INSN for differences between dataflow sets OLD_SET and
8047 emit_notes_for_differences (rtx insn
, dataflow_set
*old_set
,
8048 dataflow_set
*new_set
)
8050 htab_traverse (shared_hash_htab (old_set
->vars
),
8051 emit_notes_for_differences_1
,
8052 shared_hash_htab (new_set
->vars
));
8053 htab_traverse (shared_hash_htab (new_set
->vars
),
8054 emit_notes_for_differences_2
,
8055 shared_hash_htab (old_set
->vars
));
8056 emit_notes_for_changes (insn
, EMIT_NOTE_BEFORE_INSN
, new_set
->vars
);
8059 /* Return the next insn after INSN that is not a NOTE_INSN_VAR_LOCATION. */
8062 next_non_note_insn_var_location (rtx insn
)
8066 insn
= NEXT_INSN (insn
);
8069 || NOTE_KIND (insn
) != NOTE_INSN_VAR_LOCATION
)
8076 /* Emit the notes for changes of location parts in the basic block BB. */
8079 emit_notes_in_bb (basic_block bb
, dataflow_set
*set
)
8082 micro_operation
*mo
;
8084 dataflow_set_clear (set
);
8085 dataflow_set_copy (set
, &VTI (bb
)->in
);
8087 FOR_EACH_VEC_ELT (micro_operation
, VTI (bb
)->mos
, i
, mo
)
8089 rtx insn
= mo
->insn
;
8090 rtx next_insn
= next_non_note_insn_var_location (insn
);
8095 dataflow_set_clear_at_call (set
);
8096 emit_notes_for_changes (insn
, EMIT_NOTE_AFTER_CALL_INSN
, set
->vars
);
8098 rtx arguments
= mo
->u
.loc
, *p
= &arguments
, note
;
8101 XEXP (XEXP (*p
, 0), 1)
8102 = vt_expand_loc (XEXP (XEXP (*p
, 0), 1),
8103 shared_hash_htab (set
->vars
), true);
8104 /* If expansion is successful, keep it in the list. */
8105 if (XEXP (XEXP (*p
, 0), 1))
8107 /* Otherwise, if the following item is data_value for it,
8109 else if (XEXP (*p
, 1)
8110 && REG_P (XEXP (XEXP (*p
, 0), 0))
8111 && MEM_P (XEXP (XEXP (XEXP (*p
, 1), 0), 0))
8112 && REG_P (XEXP (XEXP (XEXP (XEXP (*p
, 1), 0), 0),
8114 && REGNO (XEXP (XEXP (*p
, 0), 0))
8115 == REGNO (XEXP (XEXP (XEXP (XEXP (*p
, 1), 0),
8117 *p
= XEXP (XEXP (*p
, 1), 1);
8118 /* Just drop this item. */
8122 note
= emit_note_after (NOTE_INSN_CALL_ARG_LOCATION
, insn
);
8123 NOTE_VAR_LOCATION (note
) = arguments
;
8129 rtx loc
= mo
->u
.loc
;
8132 var_reg_set (set
, loc
, VAR_INIT_STATUS_UNINITIALIZED
, NULL
);
8134 var_mem_set (set
, loc
, VAR_INIT_STATUS_UNINITIALIZED
, NULL
);
8136 emit_notes_for_changes (insn
, EMIT_NOTE_AFTER_INSN
, set
->vars
);
8142 rtx loc
= mo
->u
.loc
;
8146 if (GET_CODE (loc
) == CONCAT
)
8148 val
= XEXP (loc
, 0);
8149 vloc
= XEXP (loc
, 1);
8157 var
= PAT_VAR_LOCATION_DECL (vloc
);
8159 clobber_variable_part (set
, NULL_RTX
,
8160 dv_from_decl (var
), 0, NULL_RTX
);
8163 if (VAL_NEEDS_RESOLUTION (loc
))
8164 val_resolve (set
, val
, PAT_VAR_LOCATION_LOC (vloc
), insn
);
8165 set_variable_part (set
, val
, dv_from_decl (var
), 0,
8166 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
,
8169 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc
)))
8170 set_variable_part (set
, PAT_VAR_LOCATION_LOC (vloc
),
8171 dv_from_decl (var
), 0,
8172 VAR_INIT_STATUS_INITIALIZED
, NULL_RTX
,
8175 emit_notes_for_changes (insn
, EMIT_NOTE_AFTER_INSN
, set
->vars
);
8181 rtx loc
= mo
->u
.loc
;
8182 rtx val
, vloc
, uloc
;
8184 vloc
= uloc
= XEXP (loc
, 1);
8185 val
= XEXP (loc
, 0);
8187 if (GET_CODE (val
) == CONCAT
)
8189 uloc
= XEXP (val
, 1);
8190 val
= XEXP (val
, 0);
8193 if (VAL_NEEDS_RESOLUTION (loc
))
8194 val_resolve (set
, val
, vloc
, insn
);
8196 val_store (set
, val
, uloc
, insn
, false);
8198 if (VAL_HOLDS_TRACK_EXPR (loc
))
8200 if (GET_CODE (uloc
) == REG
)
8201 var_reg_set (set
, uloc
, VAR_INIT_STATUS_UNINITIALIZED
,
8203 else if (GET_CODE (uloc
) == MEM
)
8204 var_mem_set (set
, uloc
, VAR_INIT_STATUS_UNINITIALIZED
,
8208 emit_notes_for_changes (insn
, EMIT_NOTE_BEFORE_INSN
, set
->vars
);
8214 rtx loc
= mo
->u
.loc
;
8215 rtx val
, vloc
, uloc
, reverse
= NULL_RTX
;
8218 if (VAL_EXPR_HAS_REVERSE (loc
))
8220 reverse
= XEXP (loc
, 1);
8221 vloc
= XEXP (loc
, 0);
8223 uloc
= XEXP (vloc
, 1);
8224 val
= XEXP (vloc
, 0);
8227 if (GET_CODE (val
) == CONCAT
)
8229 vloc
= XEXP (val
, 1);
8230 val
= XEXP (val
, 0);
8233 if (GET_CODE (vloc
) == SET
)
8235 rtx vsrc
= SET_SRC (vloc
);
8237 gcc_assert (val
!= vsrc
);
8238 gcc_assert (vloc
== uloc
|| VAL_NEEDS_RESOLUTION (loc
));
8240 vloc
= SET_DEST (vloc
);
8242 if (VAL_NEEDS_RESOLUTION (loc
))
8243 val_resolve (set
, val
, vsrc
, insn
);
8245 else if (VAL_NEEDS_RESOLUTION (loc
))
8247 gcc_assert (GET_CODE (uloc
) == SET
8248 && GET_CODE (SET_SRC (uloc
)) == REG
);
8249 val_resolve (set
, val
, SET_SRC (uloc
), insn
);
8252 if (VAL_HOLDS_TRACK_EXPR (loc
))
8254 if (VAL_EXPR_IS_CLOBBERED (loc
))
8257 var_reg_delete (set
, uloc
, true);
8258 else if (MEM_P (uloc
))
8259 var_mem_delete (set
, uloc
, true);
8263 bool copied_p
= VAL_EXPR_IS_COPIED (loc
);
8265 enum var_init_status status
= VAR_INIT_STATUS_INITIALIZED
;
8267 if (GET_CODE (uloc
) == SET
)
8269 set_src
= SET_SRC (uloc
);
8270 uloc
= SET_DEST (uloc
);
8275 status
= find_src_status (set
, set_src
);
8277 set_src
= find_src_set_src (set
, set_src
);
8281 var_reg_delete_and_set (set
, uloc
, !copied_p
,
8283 else if (MEM_P (uloc
))
8284 var_mem_delete_and_set (set
, uloc
, !copied_p
,
8288 else if (REG_P (uloc
))
8289 var_regno_delete (set
, REGNO (uloc
));
8291 val_store (set
, val
, vloc
, insn
, true);
8294 val_store (set
, XEXP (reverse
, 0), XEXP (reverse
, 1),
8297 emit_notes_for_changes (next_insn
, EMIT_NOTE_BEFORE_INSN
,
8304 rtx loc
= mo
->u
.loc
;
8307 if (GET_CODE (loc
) == SET
)
8309 set_src
= SET_SRC (loc
);
8310 loc
= SET_DEST (loc
);
8314 var_reg_delete_and_set (set
, loc
, true, VAR_INIT_STATUS_INITIALIZED
,
8317 var_mem_delete_and_set (set
, loc
, true, VAR_INIT_STATUS_INITIALIZED
,
8320 emit_notes_for_changes (next_insn
, EMIT_NOTE_BEFORE_INSN
,
8327 rtx loc
= mo
->u
.loc
;
8328 enum var_init_status src_status
;
8331 if (GET_CODE (loc
) == SET
)
8333 set_src
= SET_SRC (loc
);
8334 loc
= SET_DEST (loc
);
8337 src_status
= find_src_status (set
, set_src
);
8338 set_src
= find_src_set_src (set
, set_src
);
8341 var_reg_delete_and_set (set
, loc
, false, src_status
, set_src
);
8343 var_mem_delete_and_set (set
, loc
, false, src_status
, set_src
);
8345 emit_notes_for_changes (next_insn
, EMIT_NOTE_BEFORE_INSN
,
8352 rtx loc
= mo
->u
.loc
;
8355 var_reg_delete (set
, loc
, false);
8357 var_mem_delete (set
, loc
, false);
8359 emit_notes_for_changes (insn
, EMIT_NOTE_AFTER_INSN
, set
->vars
);
8365 rtx loc
= mo
->u
.loc
;
8368 var_reg_delete (set
, loc
, true);
8370 var_mem_delete (set
, loc
, true);
8372 emit_notes_for_changes (next_insn
, EMIT_NOTE_BEFORE_INSN
,
8378 set
->stack_adjust
+= mo
->u
.adjust
;
8384 /* Emit notes for the whole function. */
8387 vt_emit_notes (void)
8392 gcc_assert (!htab_elements (changed_variables
));
8394 /* Free memory occupied by the out hash tables, as they aren't used
8397 dataflow_set_clear (&VTI (bb
)->out
);
8399 /* Enable emitting notes by functions (mainly by set_variable_part and
8400 delete_variable_part). */
8403 if (MAY_HAVE_DEBUG_INSNS
)
8408 FOR_EACH_VEC_ELT (rtx
, preserved_values
, i
, val
)
8409 add_cselib_value_chains (dv_from_value (val
));
8410 changed_variables_stack
= VEC_alloc (variable
, heap
, 40);
8411 changed_values_stack
= VEC_alloc (rtx
, heap
, 40);
8414 dataflow_set_init (&cur
);
8418 /* Emit the notes for changes of variable locations between two
8419 subsequent basic blocks. */
8420 emit_notes_for_differences (BB_HEAD (bb
), &cur
, &VTI (bb
)->in
);
8422 /* Emit the notes for the changes in the basic block itself. */
8423 emit_notes_in_bb (bb
, &cur
);
8425 /* Free memory occupied by the in hash table, we won't need it
8427 dataflow_set_clear (&VTI (bb
)->in
);
8429 #ifdef ENABLE_CHECKING
8430 htab_traverse (shared_hash_htab (cur
.vars
),
8431 emit_notes_for_differences_1
,
8432 shared_hash_htab (empty_shared_hash
));
8433 if (MAY_HAVE_DEBUG_INSNS
)
8438 FOR_EACH_VEC_ELT (rtx
, preserved_values
, i
, val
)
8439 remove_cselib_value_chains (dv_from_value (val
));
8440 gcc_assert (htab_elements (value_chains
) == 0);
8443 dataflow_set_destroy (&cur
);
8445 if (MAY_HAVE_DEBUG_INSNS
)
8447 VEC_free (variable
, heap
, changed_variables_stack
);
8448 VEC_free (rtx
, heap
, changed_values_stack
);
8454 /* If there is a declaration and offset associated with register/memory RTL
8455 assign declaration to *DECLP and offset to *OFFSETP, and return true. */
8458 vt_get_decl_and_offset (rtx rtl
, tree
*declp
, HOST_WIDE_INT
*offsetp
)
8462 if (REG_ATTRS (rtl
))
8464 *declp
= REG_EXPR (rtl
);
8465 *offsetp
= REG_OFFSET (rtl
);
8469 else if (MEM_P (rtl
))
8471 if (MEM_ATTRS (rtl
))
8473 *declp
= MEM_EXPR (rtl
);
8474 *offsetp
= INT_MEM_OFFSET (rtl
);
8481 /* Helper function for vt_add_function_parameter. RTL is
8482 the expression and VAL corresponding cselib_val pointer
8483 for which ENTRY_VALUE should be created. */
8486 create_entry_value (rtx rtl
, cselib_val
*val
)
8489 struct elt_loc_list
*el
;
8490 el
= (struct elt_loc_list
*) ggc_alloc_cleared_atomic (sizeof (*el
));
8491 el
->next
= val
->locs
;
8492 el
->loc
= gen_rtx_ENTRY_VALUE (GET_MODE (rtl
));
8493 ENTRY_VALUE_EXP (el
->loc
) = rtl
;
8494 el
->setting_insn
= get_insns ();
8496 val2
= cselib_lookup_from_insn (el
->loc
, GET_MODE (rtl
), true,
8497 VOIDmode
, get_insns ());
8501 && rtx_equal_p (val2
->locs
->loc
, el
->loc
))
8503 struct elt_loc_list
*el2
;
8505 preserve_value (val2
);
8506 el2
= (struct elt_loc_list
*) ggc_alloc_cleared_atomic (sizeof (*el2
));
8507 el2
->next
= val2
->locs
;
8508 el2
->loc
= val
->val_rtx
;
8509 el2
->setting_insn
= get_insns ();
8514 /* Insert function parameter PARM in IN and OUT sets of ENTRY_BLOCK. */
8517 vt_add_function_parameter (tree parm
)
8519 rtx decl_rtl
= DECL_RTL_IF_SET (parm
);
8520 rtx incoming
= DECL_INCOMING_RTL (parm
);
8522 enum machine_mode mode
;
8523 HOST_WIDE_INT offset
;
8527 if (TREE_CODE (parm
) != PARM_DECL
)
8530 if (!decl_rtl
|| !incoming
)
8533 if (GET_MODE (decl_rtl
) == BLKmode
|| GET_MODE (incoming
) == BLKmode
)
8536 /* If there is a DRAP register, rewrite the incoming location of parameters
8537 passed on the stack into MEMs based on the argument pointer, as the DRAP
8538 register can be reused for other purposes and we do not track locations
8539 based on generic registers. But the prerequisite is that this argument
8540 pointer be also the virtual CFA pointer, see vt_initialize. */
8541 if (MEM_P (incoming
)
8542 && stack_realign_drap
8543 && arg_pointer_rtx
== cfa_base_rtx
8544 && (XEXP (incoming
, 0) == crtl
->args
.internal_arg_pointer
8545 || (GET_CODE (XEXP (incoming
, 0)) == PLUS
8546 && XEXP (XEXP (incoming
, 0), 0)
8547 == crtl
->args
.internal_arg_pointer
8548 && CONST_INT_P (XEXP (XEXP (incoming
, 0), 1)))))
8550 HOST_WIDE_INT off
= -FIRST_PARM_OFFSET (current_function_decl
);
8551 if (GET_CODE (XEXP (incoming
, 0)) == PLUS
)
8552 off
+= INTVAL (XEXP (XEXP (incoming
, 0), 1));
8554 = replace_equiv_address_nv (incoming
,
8555 plus_constant (arg_pointer_rtx
, off
));
8558 #ifdef HAVE_window_save
8559 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
8560 If the target machine has an explicit window save instruction, the
8561 actual entry value is the corresponding OUTGOING_REGNO instead. */
8562 if (REG_P (incoming
)
8563 && HARD_REGISTER_P (incoming
)
8564 && OUTGOING_REGNO (REGNO (incoming
)) != REGNO (incoming
))
8567 = VEC_safe_push (parm_reg_t
, gc
, windowed_parm_regs
, NULL
);
8568 p
->incoming
= incoming
;
8570 = gen_rtx_REG_offset (incoming
, GET_MODE (incoming
),
8571 OUTGOING_REGNO (REGNO (incoming
)), 0);
8572 p
->outgoing
= incoming
;
8574 else if (MEM_P (incoming
)
8575 && REG_P (XEXP (incoming
, 0))
8576 && HARD_REGISTER_P (XEXP (incoming
, 0)))
8578 rtx reg
= XEXP (incoming
, 0);
8579 if (OUTGOING_REGNO (REGNO (reg
)) != REGNO (reg
))
8582 = VEC_safe_push (parm_reg_t
, gc
, windowed_parm_regs
, NULL
);
8584 reg
= gen_raw_REG (GET_MODE (reg
), OUTGOING_REGNO (REGNO (reg
)));
8586 incoming
= replace_equiv_address_nv (incoming
, reg
);
8591 if (!vt_get_decl_and_offset (incoming
, &decl
, &offset
))
8593 if (REG_P (incoming
) || MEM_P (incoming
))
8595 /* This means argument is passed by invisible reference. */
8598 incoming
= gen_rtx_MEM (GET_MODE (decl_rtl
), incoming
);
8602 if (!vt_get_decl_and_offset (decl_rtl
, &decl
, &offset
))
8604 offset
+= byte_lowpart_offset (GET_MODE (incoming
),
8605 GET_MODE (decl_rtl
));
8614 /* Assume that DECL_RTL was a pseudo that got spilled to
8615 memory. The spill slot sharing code will force the
8616 memory to reference spill_slot_decl (%sfp), so we don't
8617 match above. That's ok, the pseudo must have referenced
8618 the entire parameter, so just reset OFFSET. */
8619 gcc_assert (decl
== get_spill_slot_decl (false));
8623 if (!track_loc_p (incoming
, parm
, offset
, false, &mode
, &offset
))
8626 out
= &VTI (ENTRY_BLOCK_PTR
)->out
;
8628 dv
= dv_from_decl (parm
);
8630 if (target_for_debug_bind (parm
)
8631 /* We can't deal with these right now, because this kind of
8632 variable is single-part. ??? We could handle parallels
8633 that describe multiple locations for the same single
8634 value, but ATM we don't. */
8635 && GET_CODE (incoming
) != PARALLEL
)
8639 /* ??? We shouldn't ever hit this, but it may happen because
8640 arguments passed by invisible reference aren't dealt with
8641 above: incoming-rtl will have Pmode rather than the
8642 expected mode for the type. */
8646 val
= cselib_lookup_from_insn (var_lowpart (mode
, incoming
), mode
, true,
8647 VOIDmode
, get_insns ());
8649 /* ??? Float-typed values in memory are not handled by
8653 preserve_value (val
);
8654 set_variable_part (out
, val
->val_rtx
, dv
, offset
,
8655 VAR_INIT_STATUS_INITIALIZED
, NULL
, INSERT
);
8656 dv
= dv_from_value (val
->val_rtx
);
8660 if (REG_P (incoming
))
8662 incoming
= var_lowpart (mode
, incoming
);
8663 gcc_assert (REGNO (incoming
) < FIRST_PSEUDO_REGISTER
);
8664 attrs_list_insert (&out
->regs
[REGNO (incoming
)], dv
, offset
,
8666 set_variable_part (out
, incoming
, dv
, offset
,
8667 VAR_INIT_STATUS_INITIALIZED
, NULL
, INSERT
);
8668 if (dv_is_value_p (dv
))
8670 cselib_val
*val
= CSELIB_VAL_PTR (dv_as_value (dv
));
8671 create_entry_value (incoming
, val
);
8672 if (TREE_CODE (TREE_TYPE (parm
)) == REFERENCE_TYPE
8673 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (parm
))))
8675 enum machine_mode indmode
8676 = TYPE_MODE (TREE_TYPE (TREE_TYPE (parm
)));
8677 rtx mem
= gen_rtx_MEM (indmode
, incoming
);
8678 val
= cselib_lookup_from_insn (mem
, indmode
, true,
8679 VOIDmode
, get_insns ());
8682 preserve_value (val
);
8683 create_entry_value (mem
, val
);
8688 else if (MEM_P (incoming
))
8690 incoming
= var_lowpart (mode
, incoming
);
8691 set_variable_part (out
, incoming
, dv
, offset
,
8692 VAR_INIT_STATUS_INITIALIZED
, NULL
, INSERT
);
8696 /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
8699 vt_add_function_parameters (void)
8703 for (parm
= DECL_ARGUMENTS (current_function_decl
);
8704 parm
; parm
= DECL_CHAIN (parm
))
8705 vt_add_function_parameter (parm
);
8707 if (DECL_HAS_VALUE_EXPR_P (DECL_RESULT (current_function_decl
)))
8709 tree vexpr
= DECL_VALUE_EXPR (DECL_RESULT (current_function_decl
));
8711 if (TREE_CODE (vexpr
) == INDIRECT_REF
)
8712 vexpr
= TREE_OPERAND (vexpr
, 0);
8714 if (TREE_CODE (vexpr
) == PARM_DECL
8715 && DECL_ARTIFICIAL (vexpr
)
8716 && !DECL_IGNORED_P (vexpr
)
8717 && DECL_NAMELESS (vexpr
))
8718 vt_add_function_parameter (vexpr
);
8722 /* Return true if INSN in the prologue initializes hard_frame_pointer_rtx. */
8725 fp_setter (rtx insn
)
8727 rtx pat
= PATTERN (insn
);
8728 if (RTX_FRAME_RELATED_P (insn
))
8730 rtx expr
= find_reg_note (insn
, REG_FRAME_RELATED_EXPR
, NULL_RTX
);
8732 pat
= XEXP (expr
, 0);
8734 if (GET_CODE (pat
) == SET
)
8735 return SET_DEST (pat
) == hard_frame_pointer_rtx
;
8736 else if (GET_CODE (pat
) == PARALLEL
)
8739 for (i
= XVECLEN (pat
, 0) - 1; i
>= 0; i
--)
8740 if (GET_CODE (XVECEXP (pat
, 0, i
)) == SET
8741 && SET_DEST (XVECEXP (pat
, 0, i
)) == hard_frame_pointer_rtx
)
8747 /* Gather all registers used for passing arguments to other functions
8748 called from the current routine. */
8751 note_register_arguments (rtx insn
)
8755 for (link
= CALL_INSN_FUNCTION_USAGE (insn
); link
; link
= XEXP (link
, 1))
8756 if (GET_CODE (XEXP (link
, 0)) == USE
)
8758 x
= XEXP (XEXP (link
, 0), 0);
8759 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
8760 SET_HARD_REG_BIT (argument_reg_set
, REGNO (x
));
8764 /* Initialize cfa_base_rtx, create a preserved VALUE for it and
8765 ensure it isn't flushed during cselib_reset_table.
8766 Can be called only if frame_pointer_rtx resp. arg_pointer_rtx
8767 has been eliminated. */
8770 vt_init_cfa_base (void)
8774 #ifdef FRAME_POINTER_CFA_OFFSET
8775 cfa_base_rtx
= frame_pointer_rtx
;
8776 cfa_base_offset
= -FRAME_POINTER_CFA_OFFSET (current_function_decl
);
8778 cfa_base_rtx
= arg_pointer_rtx
;
8779 cfa_base_offset
= -ARG_POINTER_CFA_OFFSET (current_function_decl
);
8781 if (cfa_base_rtx
== hard_frame_pointer_rtx
8782 || !fixed_regs
[REGNO (cfa_base_rtx
)])
8784 cfa_base_rtx
= NULL_RTX
;
8787 if (!MAY_HAVE_DEBUG_INSNS
)
8790 /* Tell alias analysis that cfa_base_rtx should share
8791 find_base_term value with stack pointer or hard frame pointer. */
8792 if (!frame_pointer_needed
)
8793 vt_equate_reg_base_value (cfa_base_rtx
, stack_pointer_rtx
);
8794 else if (!crtl
->stack_realign_tried
)
8795 vt_equate_reg_base_value (cfa_base_rtx
, hard_frame_pointer_rtx
);
8797 val
= cselib_lookup_from_insn (cfa_base_rtx
, GET_MODE (cfa_base_rtx
), 1,
8798 VOIDmode
, get_insns ());
8799 preserve_value (val
);
8800 cselib_preserve_cfa_base_value (val
, REGNO (cfa_base_rtx
));
8801 var_reg_decl_set (&VTI (ENTRY_BLOCK_PTR
)->out
, cfa_base_rtx
,
8802 VAR_INIT_STATUS_INITIALIZED
, dv_from_value (val
->val_rtx
),
8803 0, NULL_RTX
, INSERT
);
8806 /* Allocate and initialize the data structures for variable tracking
8807 and parse the RTL to get the micro operations. */
8810 vt_initialize (void)
8812 basic_block bb
, prologue_bb
= single_succ (ENTRY_BLOCK_PTR
);
8813 HOST_WIDE_INT fp_cfa_offset
= -1;
8815 alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def
));
8817 attrs_pool
= create_alloc_pool ("attrs_def pool",
8818 sizeof (struct attrs_def
), 1024);
8819 var_pool
= create_alloc_pool ("variable_def pool",
8820 sizeof (struct variable_def
)
8821 + (MAX_VAR_PARTS
- 1)
8822 * sizeof (((variable
)NULL
)->var_part
[0]), 64);
8823 loc_chain_pool
= create_alloc_pool ("location_chain_def pool",
8824 sizeof (struct location_chain_def
),
8826 shared_hash_pool
= create_alloc_pool ("shared_hash_def pool",
8827 sizeof (struct shared_hash_def
), 256);
8828 empty_shared_hash
= (shared_hash
) pool_alloc (shared_hash_pool
);
8829 empty_shared_hash
->refcount
= 1;
8830 empty_shared_hash
->htab
8831 = htab_create (1, variable_htab_hash
, variable_htab_eq
,
8832 variable_htab_free
);
8833 changed_variables
= htab_create (10, variable_htab_hash
, variable_htab_eq
,
8834 variable_htab_free
);
8835 if (MAY_HAVE_DEBUG_INSNS
)
8837 value_chain_pool
= create_alloc_pool ("value_chain_def pool",
8838 sizeof (struct value_chain_def
),
8840 value_chains
= htab_create (32, value_chain_htab_hash
,
8841 value_chain_htab_eq
, NULL
);
8844 /* Init the IN and OUT sets. */
8847 VTI (bb
)->visited
= false;
8848 VTI (bb
)->flooded
= false;
8849 dataflow_set_init (&VTI (bb
)->in
);
8850 dataflow_set_init (&VTI (bb
)->out
);
8851 VTI (bb
)->permp
= NULL
;
8854 if (MAY_HAVE_DEBUG_INSNS
)
8856 cselib_init (CSELIB_RECORD_MEMORY
| CSELIB_PRESERVE_CONSTANTS
);
8857 scratch_regs
= BITMAP_ALLOC (NULL
);
8858 valvar_pool
= create_alloc_pool ("small variable_def pool",
8859 sizeof (struct variable_def
), 256);
8860 preserved_values
= VEC_alloc (rtx
, heap
, 256);
8864 scratch_regs
= NULL
;
8868 CLEAR_HARD_REG_SET (argument_reg_set
);
8870 /* In order to factor out the adjustments made to the stack pointer or to
8871 the hard frame pointer and thus be able to use DW_OP_fbreg operations
8872 instead of individual location lists, we're going to rewrite MEMs based
8873 on them into MEMs based on the CFA by de-eliminating stack_pointer_rtx
8874 or hard_frame_pointer_rtx to the virtual CFA pointer frame_pointer_rtx
8875 resp. arg_pointer_rtx. We can do this either when there is no frame
8876 pointer in the function and stack adjustments are consistent for all
8877 basic blocks or when there is a frame pointer and no stack realignment.
8878 But we first have to check that frame_pointer_rtx resp. arg_pointer_rtx
8879 has been eliminated. */
8880 if (!frame_pointer_needed
)
8884 if (!vt_stack_adjustments ())
8887 #ifdef FRAME_POINTER_CFA_OFFSET
8888 reg
= frame_pointer_rtx
;
8890 reg
= arg_pointer_rtx
;
8892 elim
= eliminate_regs (reg
, VOIDmode
, NULL_RTX
);
8895 if (GET_CODE (elim
) == PLUS
)
8896 elim
= XEXP (elim
, 0);
8897 if (elim
== stack_pointer_rtx
)
8898 vt_init_cfa_base ();
8901 else if (!crtl
->stack_realign_tried
)
8905 #ifdef FRAME_POINTER_CFA_OFFSET
8906 reg
= frame_pointer_rtx
;
8907 fp_cfa_offset
= FRAME_POINTER_CFA_OFFSET (current_function_decl
);
8909 reg
= arg_pointer_rtx
;
8910 fp_cfa_offset
= ARG_POINTER_CFA_OFFSET (current_function_decl
);
8912 elim
= eliminate_regs (reg
, VOIDmode
, NULL_RTX
);
8915 if (GET_CODE (elim
) == PLUS
)
8917 fp_cfa_offset
-= INTVAL (XEXP (elim
, 1));
8918 elim
= XEXP (elim
, 0);
8920 if (elim
!= hard_frame_pointer_rtx
)
8927 /* If the stack is realigned and a DRAP register is used, we're going to
8928 rewrite MEMs based on it representing incoming locations of parameters
8929 passed on the stack into MEMs based on the argument pointer. Although
8930 we aren't going to rewrite other MEMs, we still need to initialize the
8931 virtual CFA pointer in order to ensure that the argument pointer will
8932 be seen as a constant throughout the function.
8934 ??? This doesn't work if FRAME_POINTER_CFA_OFFSET is defined. */
8935 else if (stack_realign_drap
)
8939 #ifdef FRAME_POINTER_CFA_OFFSET
8940 reg
= frame_pointer_rtx
;
8942 reg
= arg_pointer_rtx
;
8944 elim
= eliminate_regs (reg
, VOIDmode
, NULL_RTX
);
8947 if (GET_CODE (elim
) == PLUS
)
8948 elim
= XEXP (elim
, 0);
8949 if (elim
== hard_frame_pointer_rtx
)
8950 vt_init_cfa_base ();
8954 if (frame_pointer_needed
)
8957 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
8959 note_register_arguments (insn
);
8962 hard_frame_pointer_adjustment
= -1;
8964 vt_add_function_parameters ();
8969 HOST_WIDE_INT pre
, post
= 0;
8970 basic_block first_bb
, last_bb
;
8972 if (MAY_HAVE_DEBUG_INSNS
)
8974 cselib_record_sets_hook
= add_with_sets
;
8975 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
8976 fprintf (dump_file
, "first value: %i\n",
8977 cselib_get_next_uid ());
8984 if (bb
->next_bb
== EXIT_BLOCK_PTR
8985 || ! single_pred_p (bb
->next_bb
))
8987 e
= find_edge (bb
, bb
->next_bb
);
8988 if (! e
|| (e
->flags
& EDGE_FALLTHRU
) == 0)
8994 /* Add the micro-operations to the vector. */
8995 FOR_BB_BETWEEN (bb
, first_bb
, last_bb
->next_bb
, next_bb
)
8997 HOST_WIDE_INT offset
= VTI (bb
)->out
.stack_adjust
;
8998 VTI (bb
)->out
.stack_adjust
= VTI (bb
)->in
.stack_adjust
;
8999 for (insn
= BB_HEAD (bb
); insn
!= NEXT_INSN (BB_END (bb
));
9000 insn
= NEXT_INSN (insn
))
9004 if (!frame_pointer_needed
)
9006 insn_stack_adjust_offset_pre_post (insn
, &pre
, &post
);
9010 mo
.type
= MO_ADJUST
;
9013 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
9014 log_op_type (PATTERN (insn
), bb
, insn
,
9015 MO_ADJUST
, dump_file
);
9016 VEC_safe_push (micro_operation
, heap
, VTI (bb
)->mos
,
9018 VTI (bb
)->out
.stack_adjust
+= pre
;
9022 cselib_hook_called
= false;
9023 adjust_insn (bb
, insn
);
9024 if (MAY_HAVE_DEBUG_INSNS
)
9027 prepare_call_arguments (bb
, insn
);
9028 cselib_process_insn (insn
);
9029 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
9031 print_rtl_single (dump_file
, insn
);
9032 dump_cselib_table (dump_file
);
9035 if (!cselib_hook_called
)
9036 add_with_sets (insn
, 0, 0);
9039 if (!frame_pointer_needed
&& post
)
9042 mo
.type
= MO_ADJUST
;
9045 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
9046 log_op_type (PATTERN (insn
), bb
, insn
,
9047 MO_ADJUST
, dump_file
);
9048 VEC_safe_push (micro_operation
, heap
, VTI (bb
)->mos
,
9050 VTI (bb
)->out
.stack_adjust
+= post
;
9053 if (bb
== prologue_bb
9054 && fp_cfa_offset
!= -1
9055 && hard_frame_pointer_adjustment
== -1
9056 && RTX_FRAME_RELATED_P (insn
)
9057 && fp_setter (insn
))
9059 vt_init_cfa_base ();
9060 hard_frame_pointer_adjustment
= fp_cfa_offset
;
9064 gcc_assert (offset
== VTI (bb
)->out
.stack_adjust
);
9069 if (MAY_HAVE_DEBUG_INSNS
)
9071 cselib_preserve_only_values ();
9072 cselib_reset_table (cselib_get_next_uid ());
9073 cselib_record_sets_hook
= NULL
;
9077 hard_frame_pointer_adjustment
= -1;
9078 VTI (ENTRY_BLOCK_PTR
)->flooded
= true;
9079 cfa_base_rtx
= NULL_RTX
;
9083 /* Get rid of all debug insns from the insn stream. */
9086 delete_debug_insns (void)
9091 if (!MAY_HAVE_DEBUG_INSNS
)
9096 FOR_BB_INSNS_SAFE (bb
, insn
, next
)
9097 if (DEBUG_INSN_P (insn
))
9102 /* Run a fast, BB-local only version of var tracking, to take care of
9103 information that we don't do global analysis on, such that not all
9104 information is lost. If SKIPPED holds, we're skipping the global
9105 pass entirely, so we should try to use information it would have
9106 handled as well.. */
9109 vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED
)
9111 /* ??? Just skip it all for now. */
9112 delete_debug_insns ();
9115 /* Free the data structures needed for variable tracking. */
9124 VEC_free (micro_operation
, heap
, VTI (bb
)->mos
);
9129 dataflow_set_destroy (&VTI (bb
)->in
);
9130 dataflow_set_destroy (&VTI (bb
)->out
);
9131 if (VTI (bb
)->permp
)
9133 dataflow_set_destroy (VTI (bb
)->permp
);
9134 XDELETE (VTI (bb
)->permp
);
9137 free_aux_for_blocks ();
9138 htab_delete (empty_shared_hash
->htab
);
9139 htab_delete (changed_variables
);
9140 free_alloc_pool (attrs_pool
);
9141 free_alloc_pool (var_pool
);
9142 free_alloc_pool (loc_chain_pool
);
9143 free_alloc_pool (shared_hash_pool
);
9145 if (MAY_HAVE_DEBUG_INSNS
)
9147 htab_delete (value_chains
);
9148 free_alloc_pool (value_chain_pool
);
9149 free_alloc_pool (valvar_pool
);
9150 VEC_free (rtx
, heap
, preserved_values
);
9152 BITMAP_FREE (scratch_regs
);
9153 scratch_regs
= NULL
;
9156 VEC_free (parm_reg_t
, gc
, windowed_parm_regs
);
9159 XDELETEVEC (vui_vec
);
9164 /* The entry point to variable tracking pass. */
9166 static inline unsigned int
9167 variable_tracking_main_1 (void)
9171 if (flag_var_tracking_assignments
< 0)
9173 delete_debug_insns ();
9177 if (n_basic_blocks
> 500 && n_edges
/ n_basic_blocks
>= 20)
9179 vt_debug_insns_local (true);
9183 mark_dfs_back_edges ();
9184 if (!vt_initialize ())
9187 vt_debug_insns_local (true);
9191 success
= vt_find_locations ();
9193 if (!success
&& flag_var_tracking_assignments
> 0)
9197 delete_debug_insns ();
9199 /* This is later restored by our caller. */
9200 flag_var_tracking_assignments
= 0;
9202 success
= vt_initialize ();
9203 gcc_assert (success
);
9205 success
= vt_find_locations ();
9211 vt_debug_insns_local (false);
9215 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
9217 dump_dataflow_sets ();
9218 dump_flow_info (dump_file
, dump_flags
);
9221 timevar_push (TV_VAR_TRACKING_EMIT
);
9223 timevar_pop (TV_VAR_TRACKING_EMIT
);
9226 vt_debug_insns_local (false);
9231 variable_tracking_main (void)
9234 int save
= flag_var_tracking_assignments
;
9236 ret
= variable_tracking_main_1 ();
9238 flag_var_tracking_assignments
= save
;
9244 gate_handle_var_tracking (void)
9246 return (flag_var_tracking
&& !targetm
.delay_vartrack
);
9251 struct rtl_opt_pass pass_variable_tracking
=
9255 "vartrack", /* name */
9256 gate_handle_var_tracking
, /* gate */
9257 variable_tracking_main
, /* execute */
9260 0, /* static_pass_number */
9261 TV_VAR_TRACKING
, /* tv_id */
9262 0, /* properties_required */
9263 0, /* properties_provided */
9264 0, /* properties_destroyed */
9265 0, /* todo_flags_start */
9266 TODO_verify_rtl_sharing
/* todo_flags_finish */