t-linux64: Delete the 32-bit multilib that uses software floating point emulation.
[official-gcc.git] / gcc / var-tracking.c
blob69e6847d0224223ab3ef8a654d1bbdfd7d506d95
1 /* Variable tracking routines for the GNU compiler.
2 Copyright (C) 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This file contains the variable tracking pass. It computes where
22 variables are located (which registers or where in memory) at each position
23 in instruction stream and emits notes describing the locations.
24 Debug information (DWARF2 location lists) is finally generated from
25 these notes.
26 With this debug information, it is possible to show variables
27 even when debugging optimized code.
29 How does the variable tracking pass work?
31 First, it scans RTL code for uses, stores and clobbers (register/memory
32 references in instructions), for call insns and for stack adjustments
33 separately for each basic block and saves them to an array of micro
34 operations.
35 The micro operations of one instruction are ordered so that
36 pre-modifying stack adjustment < use < use with no var < call insn <
37 < clobber < set < post-modifying stack adjustment
39 Then, a forward dataflow analysis is performed to find out how locations
40 of variables change through code and to propagate the variable locations
41 along control flow graph.
42 The IN set for basic block BB is computed as a union of OUT sets of BB's
43 predecessors, the OUT set for BB is copied from the IN set for BB and
44 is changed according to micro operations in BB.
46 The IN and OUT sets for basic blocks consist of a current stack adjustment
47 (used for adjusting offset of variables addressed using stack pointer),
48 the table of structures describing the locations of parts of a variable
49 and for each physical register a linked list for each physical register.
50 The linked list is a list of variable parts stored in the register,
51 i.e. it is a list of triplets (reg, decl, offset) where decl is
52 REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
53 effective deleting appropriate variable parts when we set or clobber the
54 register.
56 There may be more than one variable part in a register. The linked lists
57 should be pretty short so it is a good data structure here.
58 For example in the following code, register allocator may assign same
59 register to variables A and B, and both of them are stored in the same
60 register in CODE:
62 if (cond)
63 set A;
64 else
65 set B;
66 CODE;
67 if (cond)
68 use A;
69 else
70 use B;
72 Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
73 are emitted to appropriate positions in RTL code. Each such a note describes
74 the location of one variable at the point in instruction stream where the
75 note is. There is no need to emit a note for each variable before each
76 instruction, we only emit these notes where the location of variable changes
77 (this means that we also emit notes for changes between the OUT set of the
78 previous block and the IN set of the current block).
80 The notes consist of two parts:
81 1. the declaration (from REG_EXPR or MEM_EXPR)
82 2. the location of a variable - it is either a simple register/memory
83 reference (for simple variables, for example int),
84 or a parallel of register/memory references (for a large variables
85 which consist of several parts, for example long long).
89 #include "config.h"
90 #include "system.h"
91 #include "coretypes.h"
92 #include "tm.h"
93 #include "rtl.h"
94 #include "tree.h"
95 #include "tm_p.h"
96 #include "hard-reg-set.h"
97 #include "basic-block.h"
98 #include "flags.h"
99 #include "output.h"
100 #include "insn-config.h"
101 #include "reload.h"
102 #include "sbitmap.h"
103 #include "alloc-pool.h"
104 #include "fibheap.h"
105 #include "hashtab.h"
106 #include "regs.h"
107 #include "expr.h"
108 #include "timevar.h"
109 #include "tree-pass.h"
110 #include "tree-flow.h"
111 #include "cselib.h"
112 #include "target.h"
113 #include "params.h"
114 #include "diagnostic.h"
115 #include "tree-pretty-print.h"
116 #include "pointer-set.h"
117 #include "recog.h"
118 #include "tm_p.h"
120 /* var-tracking.c assumes that tree code with the same value as VALUE rtx code
121 has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
122 Currently the value is the same as IDENTIFIER_NODE, which has such
123 a property. If this compile time assertion ever fails, make sure that
124 the new tree code that equals (int) VALUE has the same property. */
125 extern char check_value_val[(int) VALUE == (int) IDENTIFIER_NODE ? 1 : -1];
127 /* Type of micro operation. */
128 enum micro_operation_type
130 MO_USE, /* Use location (REG or MEM). */
131 MO_USE_NO_VAR,/* Use location which is not associated with a variable
132 or the variable is not trackable. */
133 MO_VAL_USE, /* Use location which is associated with a value. */
134 MO_VAL_LOC, /* Use location which appears in a debug insn. */
135 MO_VAL_SET, /* Set location associated with a value. */
136 MO_SET, /* Set location. */
137 MO_COPY, /* Copy the same portion of a variable from one
138 location to another. */
139 MO_CLOBBER, /* Clobber location. */
140 MO_CALL, /* Call insn. */
141 MO_ADJUST /* Adjust stack pointer. */
145 static const char * const ATTRIBUTE_UNUSED
146 micro_operation_type_name[] = {
147 "MO_USE",
148 "MO_USE_NO_VAR",
149 "MO_VAL_USE",
150 "MO_VAL_LOC",
151 "MO_VAL_SET",
152 "MO_SET",
153 "MO_COPY",
154 "MO_CLOBBER",
155 "MO_CALL",
156 "MO_ADJUST"
159 /* Where shall the note be emitted? BEFORE or AFTER the instruction.
160 Notes emitted as AFTER_CALL are to take effect during the call,
161 rather than after the call. */
162 enum emit_note_where
164 EMIT_NOTE_BEFORE_INSN,
165 EMIT_NOTE_AFTER_INSN,
166 EMIT_NOTE_AFTER_CALL_INSN
169 /* Structure holding information about micro operation. */
170 typedef struct micro_operation_def
172 /* Type of micro operation. */
173 enum micro_operation_type type;
175 /* The instruction which the micro operation is in, for MO_USE,
176 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
177 instruction or note in the original flow (before any var-tracking
178 notes are inserted, to simplify emission of notes), for MO_SET
179 and MO_CLOBBER. */
180 rtx insn;
182 union {
183 /* Location. For MO_SET and MO_COPY, this is the SET that
184 performs the assignment, if known, otherwise it is the target
185 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
186 CONCAT of the VALUE and the LOC associated with it. For
187 MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
188 associated with it. */
189 rtx loc;
191 /* Stack adjustment. */
192 HOST_WIDE_INT adjust;
193 } u;
194 } micro_operation;
196 DEF_VEC_O(micro_operation);
197 DEF_VEC_ALLOC_O(micro_operation,heap);
199 /* A declaration of a variable, or an RTL value being handled like a
200 declaration. */
201 typedef void *decl_or_value;
203 /* Structure for passing some other parameters to function
204 emit_note_insn_var_location. */
205 typedef struct emit_note_data_def
207 /* The instruction which the note will be emitted before/after. */
208 rtx insn;
210 /* Where the note will be emitted (before/after insn)? */
211 enum emit_note_where where;
213 /* The variables and values active at this point. */
214 htab_t vars;
215 } emit_note_data;
217 /* Description of location of a part of a variable. The content of a physical
218 register is described by a chain of these structures.
219 The chains are pretty short (usually 1 or 2 elements) and thus
220 chain is the best data structure. */
221 typedef struct attrs_def
223 /* Pointer to next member of the list. */
224 struct attrs_def *next;
226 /* The rtx of register. */
227 rtx loc;
229 /* The declaration corresponding to LOC. */
230 decl_or_value dv;
232 /* Offset from start of DECL. */
233 HOST_WIDE_INT offset;
234 } *attrs;
236 /* Structure holding a refcounted hash table. If refcount > 1,
237 it must be first unshared before modified. */
238 typedef struct shared_hash_def
240 /* Reference count. */
241 int refcount;
243 /* Actual hash table. */
244 htab_t htab;
245 } *shared_hash;
247 /* Structure holding the IN or OUT set for a basic block. */
248 typedef struct dataflow_set_def
250 /* Adjustment of stack offset. */
251 HOST_WIDE_INT stack_adjust;
253 /* Attributes for registers (lists of attrs). */
254 attrs regs[FIRST_PSEUDO_REGISTER];
256 /* Variable locations. */
257 shared_hash vars;
259 /* Vars that is being traversed. */
260 shared_hash traversed_vars;
261 } dataflow_set;
263 /* The structure (one for each basic block) containing the information
264 needed for variable tracking. */
265 typedef struct variable_tracking_info_def
267 /* The vector of micro operations. */
268 VEC(micro_operation, heap) *mos;
270 /* The IN and OUT set for dataflow analysis. */
271 dataflow_set in;
272 dataflow_set out;
274 /* The permanent-in dataflow set for this block. This is used to
275 hold values for which we had to compute entry values. ??? This
276 should probably be dynamically allocated, to avoid using more
277 memory in non-debug builds. */
278 dataflow_set *permp;
280 /* Has the block been visited in DFS? */
281 bool visited;
283 /* Has the block been flooded in VTA? */
284 bool flooded;
286 } *variable_tracking_info;
288 /* Structure for chaining the locations. */
289 typedef struct location_chain_def
291 /* Next element in the chain. */
292 struct location_chain_def *next;
294 /* The location (REG, MEM or VALUE). */
295 rtx loc;
297 /* The "value" stored in this location. */
298 rtx set_src;
300 /* Initialized? */
301 enum var_init_status init;
302 } *location_chain;
304 /* A vector of loc_exp_dep holds the active dependencies of a one-part
305 DV on VALUEs, i.e., the VALUEs expanded so as to form the current
306 location of DV. Each entry is also part of VALUE' s linked-list of
307 backlinks back to DV. */
308 typedef struct loc_exp_dep_s
310 /* The dependent DV. */
311 decl_or_value dv;
312 /* The dependency VALUE or DECL_DEBUG. */
313 rtx value;
314 /* The next entry in VALUE's backlinks list. */
315 struct loc_exp_dep_s *next;
316 /* A pointer to the pointer to this entry (head or prev's next) in
317 the doubly-linked list. */
318 struct loc_exp_dep_s **pprev;
319 } loc_exp_dep;
321 DEF_VEC_O (loc_exp_dep);
323 /* This data structure holds information about the depth of a variable
324 expansion. */
325 typedef struct expand_depth_struct
327 /* This measures the complexity of the expanded expression. It
328 grows by one for each level of expansion that adds more than one
329 operand. */
330 int complexity;
331 /* This counts the number of ENTRY_VALUE expressions in an
332 expansion. We want to minimize their use. */
333 int entryvals;
334 } expand_depth;
336 /* This data structure is allocated for one-part variables at the time
337 of emitting notes. */
338 struct onepart_aux
340 /* Doubly-linked list of dependent DVs. These are DVs whose cur_loc
341 computation used the expansion of this variable, and that ought
342 to be notified should this variable change. If the DV's cur_loc
343 expanded to NULL, all components of the loc list are regarded as
344 active, so that any changes in them give us a chance to get a
345 location. Otherwise, only components of the loc that expanded to
346 non-NULL are regarded as active dependencies. */
347 loc_exp_dep *backlinks;
348 /* This holds the LOC that was expanded into cur_loc. We need only
349 mark a one-part variable as changed if the FROM loc is removed,
350 or if it has no known location and a loc is added, or if it gets
351 a change notification from any of its active dependencies. */
352 rtx from;
353 /* The depth of the cur_loc expression. */
354 expand_depth depth;
355 /* Dependencies actively used when expand FROM into cur_loc. */
356 VEC (loc_exp_dep, none) deps;
359 /* Structure describing one part of variable. */
360 typedef struct variable_part_def
362 /* Chain of locations of the part. */
363 location_chain loc_chain;
365 /* Location which was last emitted to location list. */
366 rtx cur_loc;
368 union variable_aux
370 /* The offset in the variable, if !var->onepart. */
371 HOST_WIDE_INT offset;
373 /* Pointer to auxiliary data, if var->onepart and emit_notes. */
374 struct onepart_aux *onepaux;
375 } aux;
376 } variable_part;
378 /* Maximum number of location parts. */
379 #define MAX_VAR_PARTS 16
381 /* Enumeration type used to discriminate various types of one-part
382 variables. */
383 typedef enum onepart_enum
385 /* Not a one-part variable. */
386 NOT_ONEPART = 0,
387 /* A one-part DECL that is not a DEBUG_EXPR_DECL. */
388 ONEPART_VDECL = 1,
389 /* A DEBUG_EXPR_DECL. */
390 ONEPART_DEXPR = 2,
391 /* A VALUE. */
392 ONEPART_VALUE = 3
393 } onepart_enum_t;
395 /* Structure describing where the variable is located. */
396 typedef struct variable_def
398 /* The declaration of the variable, or an RTL value being handled
399 like a declaration. */
400 decl_or_value dv;
402 /* Reference count. */
403 int refcount;
405 /* Number of variable parts. */
406 char n_var_parts;
408 /* What type of DV this is, according to enum onepart_enum. */
409 ENUM_BITFIELD (onepart_enum) onepart : CHAR_BIT;
411 /* True if this variable_def struct is currently in the
412 changed_variables hash table. */
413 bool in_changed_variables;
415 /* The variable parts. */
416 variable_part var_part[1];
417 } *variable;
418 typedef const struct variable_def *const_variable;
420 /* Pointer to the BB's information specific to variable tracking pass. */
421 #define VTI(BB) ((variable_tracking_info) (BB)->aux)
423 /* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */
424 #define INT_MEM_OFFSET(mem) (MEM_OFFSET_KNOWN_P (mem) ? MEM_OFFSET (mem) : 0)
426 #if ENABLE_CHECKING && (GCC_VERSION >= 2007)
428 /* Access VAR's Ith part's offset, checking that it's not a one-part
429 variable. */
430 #define VAR_PART_OFFSET(var, i) __extension__ \
431 (*({ variable const __v = (var); \
432 gcc_checking_assert (!__v->onepart); \
433 &__v->var_part[(i)].aux.offset; }))
435 /* Access VAR's one-part auxiliary data, checking that it is a
436 one-part variable. */
437 #define VAR_LOC_1PAUX(var) __extension__ \
438 (*({ variable const __v = (var); \
439 gcc_checking_assert (__v->onepart); \
440 &__v->var_part[0].aux.onepaux; }))
442 #else
443 #define VAR_PART_OFFSET(var, i) ((var)->var_part[(i)].aux.offset)
444 #define VAR_LOC_1PAUX(var) ((var)->var_part[0].aux.onepaux)
445 #endif
447 /* These are accessor macros for the one-part auxiliary data. When
448 convenient for users, they're guarded by tests that the data was
449 allocated. */
450 #define VAR_LOC_DEP_LST(var) (VAR_LOC_1PAUX (var) \
451 ? VAR_LOC_1PAUX (var)->backlinks \
452 : NULL)
453 #define VAR_LOC_DEP_LSTP(var) (VAR_LOC_1PAUX (var) \
454 ? &VAR_LOC_1PAUX (var)->backlinks \
455 : NULL)
456 #define VAR_LOC_FROM(var) (VAR_LOC_1PAUX (var)->from)
457 #define VAR_LOC_DEPTH(var) (VAR_LOC_1PAUX (var)->depth)
458 #define VAR_LOC_DEP_VEC(var) (VAR_LOC_1PAUX (var) \
459 ? &VAR_LOC_1PAUX (var)->deps \
460 : NULL)
462 /* Alloc pool for struct attrs_def. */
463 static alloc_pool attrs_pool;
465 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
466 static alloc_pool var_pool;
468 /* Alloc pool for struct variable_def with a single var_part entry. */
469 static alloc_pool valvar_pool;
471 /* Alloc pool for struct location_chain_def. */
472 static alloc_pool loc_chain_pool;
474 /* Alloc pool for struct shared_hash_def. */
475 static alloc_pool shared_hash_pool;
477 /* Changed variables, notes will be emitted for them. */
478 static htab_t changed_variables;
480 /* Shall notes be emitted? */
481 static bool emit_notes;
483 /* Values whose dynamic location lists have gone empty, but whose
484 cselib location lists are still usable. Use this to hold the
485 current location, the backlinks, etc, during emit_notes. */
486 static htab_t dropped_values;
488 /* Empty shared hashtable. */
489 static shared_hash empty_shared_hash;
491 /* Scratch register bitmap used by cselib_expand_value_rtx. */
492 static bitmap scratch_regs = NULL;
494 #ifdef HAVE_window_save
495 typedef struct GTY(()) parm_reg {
496 rtx outgoing;
497 rtx incoming;
498 } parm_reg_t;
500 DEF_VEC_O(parm_reg_t);
501 DEF_VEC_ALLOC_O(parm_reg_t, gc);
503 /* Vector of windowed parameter registers, if any. */
504 static VEC(parm_reg_t, gc) *windowed_parm_regs = NULL;
505 #endif
507 /* Variable used to tell whether cselib_process_insn called our hook. */
508 static bool cselib_hook_called;
510 /* Local function prototypes. */
511 static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
512 HOST_WIDE_INT *);
513 static void insn_stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
514 HOST_WIDE_INT *);
515 static bool vt_stack_adjustments (void);
516 static hashval_t variable_htab_hash (const void *);
517 static int variable_htab_eq (const void *, const void *);
518 static void variable_htab_free (void *);
520 static void init_attrs_list_set (attrs *);
521 static void attrs_list_clear (attrs *);
522 static attrs attrs_list_member (attrs, decl_or_value, HOST_WIDE_INT);
523 static void attrs_list_insert (attrs *, decl_or_value, HOST_WIDE_INT, rtx);
524 static void attrs_list_copy (attrs *, attrs);
525 static void attrs_list_union (attrs *, attrs);
527 static void **unshare_variable (dataflow_set *set, void **slot, variable var,
528 enum var_init_status);
529 static void vars_copy (htab_t, htab_t);
530 static tree var_debug_decl (tree);
531 static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
532 static void var_reg_delete_and_set (dataflow_set *, rtx, bool,
533 enum var_init_status, rtx);
534 static void var_reg_delete (dataflow_set *, rtx, bool);
535 static void var_regno_delete (dataflow_set *, int);
536 static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx);
537 static void var_mem_delete_and_set (dataflow_set *, rtx, bool,
538 enum var_init_status, rtx);
539 static void var_mem_delete (dataflow_set *, rtx, bool);
541 static void dataflow_set_init (dataflow_set *);
542 static void dataflow_set_clear (dataflow_set *);
543 static void dataflow_set_copy (dataflow_set *, dataflow_set *);
544 static int variable_union_info_cmp_pos (const void *, const void *);
545 static void dataflow_set_union (dataflow_set *, dataflow_set *);
546 static location_chain find_loc_in_1pdv (rtx, variable, htab_t);
547 static bool canon_value_cmp (rtx, rtx);
548 static int loc_cmp (rtx, rtx);
549 static bool variable_part_different_p (variable_part *, variable_part *);
550 static bool onepart_variable_different_p (variable, variable);
551 static bool variable_different_p (variable, variable);
552 static bool dataflow_set_different (dataflow_set *, dataflow_set *);
553 static void dataflow_set_destroy (dataflow_set *);
555 static bool contains_symbol_ref (rtx);
556 static bool track_expr_p (tree, bool);
557 static bool same_variable_part_p (rtx, tree, HOST_WIDE_INT);
558 static int add_uses (rtx *, void *);
559 static void add_uses_1 (rtx *, void *);
560 static void add_stores (rtx, const_rtx, void *);
561 static bool compute_bb_dataflow (basic_block);
562 static bool vt_find_locations (void);
564 static void dump_attrs_list (attrs);
565 static int dump_var_slot (void **, void *);
566 static void dump_var (variable);
567 static void dump_vars (htab_t);
568 static void dump_dataflow_set (dataflow_set *);
569 static void dump_dataflow_sets (void);
571 static void set_dv_changed (decl_or_value, bool);
572 static void variable_was_changed (variable, dataflow_set *);
573 static void **set_slot_part (dataflow_set *, rtx, void **,
574 decl_or_value, HOST_WIDE_INT,
575 enum var_init_status, rtx);
576 static void set_variable_part (dataflow_set *, rtx,
577 decl_or_value, HOST_WIDE_INT,
578 enum var_init_status, rtx, enum insert_option);
579 static void **clobber_slot_part (dataflow_set *, rtx,
580 void **, HOST_WIDE_INT, rtx);
581 static void clobber_variable_part (dataflow_set *, rtx,
582 decl_or_value, HOST_WIDE_INT, rtx);
583 static void **delete_slot_part (dataflow_set *, rtx, void **, HOST_WIDE_INT);
584 static void delete_variable_part (dataflow_set *, rtx,
585 decl_or_value, HOST_WIDE_INT);
586 static int emit_note_insn_var_location (void **, void *);
587 static void emit_notes_for_changes (rtx, enum emit_note_where, shared_hash);
588 static int emit_notes_for_differences_1 (void **, void *);
589 static int emit_notes_for_differences_2 (void **, void *);
590 static void emit_notes_for_differences (rtx, dataflow_set *, dataflow_set *);
591 static void emit_notes_in_bb (basic_block, dataflow_set *);
592 static void vt_emit_notes (void);
594 static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *);
595 static void vt_add_function_parameters (void);
596 static bool vt_initialize (void);
597 static void vt_finalize (void);
599 /* Given a SET, calculate the amount of stack adjustment it contains
600 PRE- and POST-modifying stack pointer.
601 This function is similar to stack_adjust_offset. */
603 static void
604 stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
605 HOST_WIDE_INT *post)
607 rtx src = SET_SRC (pattern);
608 rtx dest = SET_DEST (pattern);
609 enum rtx_code code;
611 if (dest == stack_pointer_rtx)
613 /* (set (reg sp) (plus (reg sp) (const_int))) */
614 code = GET_CODE (src);
615 if (! (code == PLUS || code == MINUS)
616 || XEXP (src, 0) != stack_pointer_rtx
617 || !CONST_INT_P (XEXP (src, 1)))
618 return;
620 if (code == MINUS)
621 *post += INTVAL (XEXP (src, 1));
622 else
623 *post -= INTVAL (XEXP (src, 1));
625 else if (MEM_P (dest))
627 /* (set (mem (pre_dec (reg sp))) (foo)) */
628 src = XEXP (dest, 0);
629 code = GET_CODE (src);
631 switch (code)
633 case PRE_MODIFY:
634 case POST_MODIFY:
635 if (XEXP (src, 0) == stack_pointer_rtx)
637 rtx val = XEXP (XEXP (src, 1), 1);
638 /* We handle only adjustments by constant amount. */
639 gcc_assert (GET_CODE (XEXP (src, 1)) == PLUS &&
640 CONST_INT_P (val));
642 if (code == PRE_MODIFY)
643 *pre -= INTVAL (val);
644 else
645 *post -= INTVAL (val);
646 break;
648 return;
650 case PRE_DEC:
651 if (XEXP (src, 0) == stack_pointer_rtx)
653 *pre += GET_MODE_SIZE (GET_MODE (dest));
654 break;
656 return;
658 case POST_DEC:
659 if (XEXP (src, 0) == stack_pointer_rtx)
661 *post += GET_MODE_SIZE (GET_MODE (dest));
662 break;
664 return;
666 case PRE_INC:
667 if (XEXP (src, 0) == stack_pointer_rtx)
669 *pre -= GET_MODE_SIZE (GET_MODE (dest));
670 break;
672 return;
674 case POST_INC:
675 if (XEXP (src, 0) == stack_pointer_rtx)
677 *post -= GET_MODE_SIZE (GET_MODE (dest));
678 break;
680 return;
682 default:
683 return;
688 /* Given an INSN, calculate the amount of stack adjustment it contains
689 PRE- and POST-modifying stack pointer. */
691 static void
692 insn_stack_adjust_offset_pre_post (rtx insn, HOST_WIDE_INT *pre,
693 HOST_WIDE_INT *post)
695 rtx pattern;
697 *pre = 0;
698 *post = 0;
700 pattern = PATTERN (insn);
701 if (RTX_FRAME_RELATED_P (insn))
703 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
704 if (expr)
705 pattern = XEXP (expr, 0);
708 if (GET_CODE (pattern) == SET)
709 stack_adjust_offset_pre_post (pattern, pre, post);
710 else if (GET_CODE (pattern) == PARALLEL
711 || GET_CODE (pattern) == SEQUENCE)
713 int i;
715 /* There may be stack adjustments inside compound insns. Search
716 for them. */
717 for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
718 if (GET_CODE (XVECEXP (pattern, 0, i)) == SET)
719 stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post);
723 /* Compute stack adjustments for all blocks by traversing DFS tree.
724 Return true when the adjustments on all incoming edges are consistent.
725 Heavily borrowed from pre_and_rev_post_order_compute. */
727 static bool
728 vt_stack_adjustments (void)
730 edge_iterator *stack;
731 int sp;
733 /* Initialize entry block. */
734 VTI (ENTRY_BLOCK_PTR)->visited = true;
735 VTI (ENTRY_BLOCK_PTR)->in.stack_adjust = INCOMING_FRAME_SP_OFFSET;
736 VTI (ENTRY_BLOCK_PTR)->out.stack_adjust = INCOMING_FRAME_SP_OFFSET;
738 /* Allocate stack for back-tracking up CFG. */
739 stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
740 sp = 0;
742 /* Push the first edge on to the stack. */
743 stack[sp++] = ei_start (ENTRY_BLOCK_PTR->succs);
745 while (sp)
747 edge_iterator ei;
748 basic_block src;
749 basic_block dest;
751 /* Look at the edge on the top of the stack. */
752 ei = stack[sp - 1];
753 src = ei_edge (ei)->src;
754 dest = ei_edge (ei)->dest;
756 /* Check if the edge destination has been visited yet. */
757 if (!VTI (dest)->visited)
759 rtx insn;
760 HOST_WIDE_INT pre, post, offset;
761 VTI (dest)->visited = true;
762 VTI (dest)->in.stack_adjust = offset = VTI (src)->out.stack_adjust;
764 if (dest != EXIT_BLOCK_PTR)
765 for (insn = BB_HEAD (dest);
766 insn != NEXT_INSN (BB_END (dest));
767 insn = NEXT_INSN (insn))
768 if (INSN_P (insn))
770 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
771 offset += pre + post;
774 VTI (dest)->out.stack_adjust = offset;
776 if (EDGE_COUNT (dest->succs) > 0)
777 /* Since the DEST node has been visited for the first
778 time, check its successors. */
779 stack[sp++] = ei_start (dest->succs);
781 else
783 /* Check whether the adjustments on the edges are the same. */
784 if (VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust)
786 free (stack);
787 return false;
790 if (! ei_one_before_end_p (ei))
791 /* Go to the next edge. */
792 ei_next (&stack[sp - 1]);
793 else
794 /* Return to previous level if there are no more edges. */
795 sp--;
799 free (stack);
800 return true;
803 /* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or
804 hard_frame_pointer_rtx is being mapped to it and offset for it. */
805 static rtx cfa_base_rtx;
806 static HOST_WIDE_INT cfa_base_offset;
808 /* Compute a CFA-based value for an ADJUSTMENT made to stack_pointer_rtx
809 or hard_frame_pointer_rtx. */
811 static inline rtx
812 compute_cfa_pointer (HOST_WIDE_INT adjustment)
814 return plus_constant (Pmode, cfa_base_rtx, adjustment + cfa_base_offset);
817 /* Adjustment for hard_frame_pointer_rtx to cfa base reg,
818 or -1 if the replacement shouldn't be done. */
819 static HOST_WIDE_INT hard_frame_pointer_adjustment = -1;
821 /* Data for adjust_mems callback. */
823 struct adjust_mem_data
825 bool store;
826 enum machine_mode mem_mode;
827 HOST_WIDE_INT stack_adjust;
828 rtx side_effects;
831 /* Helper for adjust_mems. Return 1 if *loc is unsuitable for
832 transformation of wider mode arithmetics to narrower mode,
833 -1 if it is suitable and subexpressions shouldn't be
834 traversed and 0 if it is suitable and subexpressions should
835 be traversed. Called through for_each_rtx. */
837 static int
838 use_narrower_mode_test (rtx *loc, void *data)
840 rtx subreg = (rtx) data;
842 if (CONSTANT_P (*loc))
843 return -1;
844 switch (GET_CODE (*loc))
846 case REG:
847 if (cselib_lookup (*loc, GET_MODE (SUBREG_REG (subreg)), 0, VOIDmode))
848 return 1;
849 if (!validate_subreg (GET_MODE (subreg), GET_MODE (*loc),
850 *loc, subreg_lowpart_offset (GET_MODE (subreg),
851 GET_MODE (*loc))))
852 return 1;
853 return -1;
854 case PLUS:
855 case MINUS:
856 case MULT:
857 return 0;
858 case ASHIFT:
859 if (for_each_rtx (&XEXP (*loc, 0), use_narrower_mode_test, data))
860 return 1;
861 else
862 return -1;
863 default:
864 return 1;
868 /* Transform X into narrower mode MODE from wider mode WMODE. */
870 static rtx
871 use_narrower_mode (rtx x, enum machine_mode mode, enum machine_mode wmode)
873 rtx op0, op1;
874 if (CONSTANT_P (x))
875 return lowpart_subreg (mode, x, wmode);
876 switch (GET_CODE (x))
878 case REG:
879 return lowpart_subreg (mode, x, wmode);
880 case PLUS:
881 case MINUS:
882 case MULT:
883 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
884 op1 = use_narrower_mode (XEXP (x, 1), mode, wmode);
885 return simplify_gen_binary (GET_CODE (x), mode, op0, op1);
886 case ASHIFT:
887 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
888 return simplify_gen_binary (ASHIFT, mode, op0, XEXP (x, 1));
889 default:
890 gcc_unreachable ();
894 /* Helper function for adjusting used MEMs. */
896 static rtx
897 adjust_mems (rtx loc, const_rtx old_rtx, void *data)
899 struct adjust_mem_data *amd = (struct adjust_mem_data *) data;
900 rtx mem, addr = loc, tem;
901 enum machine_mode mem_mode_save;
902 bool store_save;
903 switch (GET_CODE (loc))
905 case REG:
906 /* Don't do any sp or fp replacements outside of MEM addresses
907 on the LHS. */
908 if (amd->mem_mode == VOIDmode && amd->store)
909 return loc;
910 if (loc == stack_pointer_rtx
911 && !frame_pointer_needed
912 && cfa_base_rtx)
913 return compute_cfa_pointer (amd->stack_adjust);
914 else if (loc == hard_frame_pointer_rtx
915 && frame_pointer_needed
916 && hard_frame_pointer_adjustment != -1
917 && cfa_base_rtx)
918 return compute_cfa_pointer (hard_frame_pointer_adjustment);
919 gcc_checking_assert (loc != virtual_incoming_args_rtx);
920 return loc;
921 case MEM:
922 mem = loc;
923 if (!amd->store)
925 mem = targetm.delegitimize_address (mem);
926 if (mem != loc && !MEM_P (mem))
927 return simplify_replace_fn_rtx (mem, old_rtx, adjust_mems, data);
930 addr = XEXP (mem, 0);
931 mem_mode_save = amd->mem_mode;
932 amd->mem_mode = GET_MODE (mem);
933 store_save = amd->store;
934 amd->store = false;
935 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
936 amd->store = store_save;
937 amd->mem_mode = mem_mode_save;
938 if (mem == loc)
939 addr = targetm.delegitimize_address (addr);
940 if (addr != XEXP (mem, 0))
941 mem = replace_equiv_address_nv (mem, addr);
942 if (!amd->store)
943 mem = avoid_constant_pool_reference (mem);
944 return mem;
945 case PRE_INC:
946 case PRE_DEC:
947 addr = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
948 GEN_INT (GET_CODE (loc) == PRE_INC
949 ? GET_MODE_SIZE (amd->mem_mode)
950 : -GET_MODE_SIZE (amd->mem_mode)));
951 case POST_INC:
952 case POST_DEC:
953 if (addr == loc)
954 addr = XEXP (loc, 0);
955 gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode);
956 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
957 tem = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
958 GEN_INT ((GET_CODE (loc) == PRE_INC
959 || GET_CODE (loc) == POST_INC)
960 ? GET_MODE_SIZE (amd->mem_mode)
961 : -GET_MODE_SIZE (amd->mem_mode)));
962 amd->side_effects = alloc_EXPR_LIST (0,
963 gen_rtx_SET (VOIDmode,
964 XEXP (loc, 0),
965 tem),
966 amd->side_effects);
967 return addr;
968 case PRE_MODIFY:
969 addr = XEXP (loc, 1);
970 case POST_MODIFY:
971 if (addr == loc)
972 addr = XEXP (loc, 0);
973 gcc_assert (amd->mem_mode != VOIDmode);
974 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
975 amd->side_effects = alloc_EXPR_LIST (0,
976 gen_rtx_SET (VOIDmode,
977 XEXP (loc, 0),
978 XEXP (loc, 1)),
979 amd->side_effects);
980 return addr;
981 case SUBREG:
982 /* First try without delegitimization of whole MEMs and
983 avoid_constant_pool_reference, which is more likely to succeed. */
984 store_save = amd->store;
985 amd->store = true;
986 addr = simplify_replace_fn_rtx (SUBREG_REG (loc), old_rtx, adjust_mems,
987 data);
988 amd->store = store_save;
989 mem = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
990 if (mem == SUBREG_REG (loc))
992 tem = loc;
993 goto finish_subreg;
995 tem = simplify_gen_subreg (GET_MODE (loc), mem,
996 GET_MODE (SUBREG_REG (loc)),
997 SUBREG_BYTE (loc));
998 if (tem)
999 goto finish_subreg;
1000 tem = simplify_gen_subreg (GET_MODE (loc), addr,
1001 GET_MODE (SUBREG_REG (loc)),
1002 SUBREG_BYTE (loc));
1003 if (tem == NULL_RTX)
1004 tem = gen_rtx_raw_SUBREG (GET_MODE (loc), addr, SUBREG_BYTE (loc));
1005 finish_subreg:
1006 if (MAY_HAVE_DEBUG_INSNS
1007 && GET_CODE (tem) == SUBREG
1008 && (GET_CODE (SUBREG_REG (tem)) == PLUS
1009 || GET_CODE (SUBREG_REG (tem)) == MINUS
1010 || GET_CODE (SUBREG_REG (tem)) == MULT
1011 || GET_CODE (SUBREG_REG (tem)) == ASHIFT)
1012 && GET_MODE_CLASS (GET_MODE (tem)) == MODE_INT
1013 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem))) == MODE_INT
1014 && GET_MODE_SIZE (GET_MODE (tem))
1015 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (tem)))
1016 && subreg_lowpart_p (tem)
1017 && !for_each_rtx (&SUBREG_REG (tem), use_narrower_mode_test, tem))
1018 return use_narrower_mode (SUBREG_REG (tem), GET_MODE (tem),
1019 GET_MODE (SUBREG_REG (tem)));
1020 return tem;
1021 case ASM_OPERANDS:
1022 /* Don't do any replacements in second and following
1023 ASM_OPERANDS of inline-asm with multiple sets.
1024 ASM_OPERANDS_INPUT_VEC, ASM_OPERANDS_INPUT_CONSTRAINT_VEC
1025 and ASM_OPERANDS_LABEL_VEC need to be equal between
1026 all the ASM_OPERANDs in the insn and adjust_insn will
1027 fix this up. */
1028 if (ASM_OPERANDS_OUTPUT_IDX (loc) != 0)
1029 return loc;
1030 break;
1031 default:
1032 break;
1034 return NULL_RTX;
1037 /* Helper function for replacement of uses. */
1039 static void
1040 adjust_mem_uses (rtx *x, void *data)
1042 rtx new_x = simplify_replace_fn_rtx (*x, NULL_RTX, adjust_mems, data);
1043 if (new_x != *x)
1044 validate_change (NULL_RTX, x, new_x, true);
1047 /* Helper function for replacement of stores. */
1049 static void
1050 adjust_mem_stores (rtx loc, const_rtx expr, void *data)
1052 if (MEM_P (loc))
1054 rtx new_dest = simplify_replace_fn_rtx (SET_DEST (expr), NULL_RTX,
1055 adjust_mems, data);
1056 if (new_dest != SET_DEST (expr))
1058 rtx xexpr = CONST_CAST_RTX (expr);
1059 validate_change (NULL_RTX, &SET_DEST (xexpr), new_dest, true);
1064 /* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
1065 replace them with their value in the insn and add the side-effects
1066 as other sets to the insn. */
1068 static void
1069 adjust_insn (basic_block bb, rtx insn)
1071 struct adjust_mem_data amd;
1072 rtx set;
1074 #ifdef HAVE_window_save
1075 /* If the target machine has an explicit window save instruction, the
1076 transformation OUTGOING_REGNO -> INCOMING_REGNO is done there. */
1077 if (RTX_FRAME_RELATED_P (insn)
1078 && find_reg_note (insn, REG_CFA_WINDOW_SAVE, NULL_RTX))
1080 unsigned int i, nregs = VEC_length(parm_reg_t, windowed_parm_regs);
1081 rtx rtl = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nregs * 2));
1082 parm_reg_t *p;
1084 FOR_EACH_VEC_ELT (parm_reg_t, windowed_parm_regs, i, p)
1086 XVECEXP (rtl, 0, i * 2)
1087 = gen_rtx_SET (VOIDmode, p->incoming, p->outgoing);
1088 /* Do not clobber the attached DECL, but only the REG. */
1089 XVECEXP (rtl, 0, i * 2 + 1)
1090 = gen_rtx_CLOBBER (GET_MODE (p->outgoing),
1091 gen_raw_REG (GET_MODE (p->outgoing),
1092 REGNO (p->outgoing)));
1095 validate_change (NULL_RTX, &PATTERN (insn), rtl, true);
1096 return;
1098 #endif
1100 amd.mem_mode = VOIDmode;
1101 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
1102 amd.side_effects = NULL_RTX;
1104 amd.store = true;
1105 note_stores (PATTERN (insn), adjust_mem_stores, &amd);
1107 amd.store = false;
1108 if (GET_CODE (PATTERN (insn)) == PARALLEL
1109 && asm_noperands (PATTERN (insn)) > 0
1110 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1112 rtx body, set0;
1113 int i;
1115 /* inline-asm with multiple sets is tiny bit more complicated,
1116 because the 3 vectors in ASM_OPERANDS need to be shared between
1117 all ASM_OPERANDS in the instruction. adjust_mems will
1118 not touch ASM_OPERANDS other than the first one, asm_noperands
1119 test above needs to be called before that (otherwise it would fail)
1120 and afterwards this code fixes it up. */
1121 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1122 body = PATTERN (insn);
1123 set0 = XVECEXP (body, 0, 0);
1124 gcc_checking_assert (GET_CODE (set0) == SET
1125 && GET_CODE (SET_SRC (set0)) == ASM_OPERANDS
1126 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set0)) == 0);
1127 for (i = 1; i < XVECLEN (body, 0); i++)
1128 if (GET_CODE (XVECEXP (body, 0, i)) != SET)
1129 break;
1130 else
1132 set = XVECEXP (body, 0, i);
1133 gcc_checking_assert (GET_CODE (SET_SRC (set)) == ASM_OPERANDS
1134 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set))
1135 == i);
1136 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (set))
1137 != ASM_OPERANDS_INPUT_VEC (SET_SRC (set0))
1138 || ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set))
1139 != ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0))
1140 || ASM_OPERANDS_LABEL_VEC (SET_SRC (set))
1141 != ASM_OPERANDS_LABEL_VEC (SET_SRC (set0)))
1143 rtx newsrc = shallow_copy_rtx (SET_SRC (set));
1144 ASM_OPERANDS_INPUT_VEC (newsrc)
1145 = ASM_OPERANDS_INPUT_VEC (SET_SRC (set0));
1146 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (newsrc)
1147 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0));
1148 ASM_OPERANDS_LABEL_VEC (newsrc)
1149 = ASM_OPERANDS_LABEL_VEC (SET_SRC (set0));
1150 validate_change (NULL_RTX, &SET_SRC (set), newsrc, true);
1154 else
1155 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1157 /* For read-only MEMs containing some constant, prefer those
1158 constants. */
1159 set = single_set (insn);
1160 if (set && MEM_P (SET_SRC (set)) && MEM_READONLY_P (SET_SRC (set)))
1162 rtx note = find_reg_equal_equiv_note (insn);
1164 if (note && CONSTANT_P (XEXP (note, 0)))
1165 validate_change (NULL_RTX, &SET_SRC (set), XEXP (note, 0), true);
1168 if (amd.side_effects)
1170 rtx *pat, new_pat, s;
1171 int i, oldn, newn;
1173 pat = &PATTERN (insn);
1174 if (GET_CODE (*pat) == COND_EXEC)
1175 pat = &COND_EXEC_CODE (*pat);
1176 if (GET_CODE (*pat) == PARALLEL)
1177 oldn = XVECLEN (*pat, 0);
1178 else
1179 oldn = 1;
1180 for (s = amd.side_effects, newn = 0; s; newn++)
1181 s = XEXP (s, 1);
1182 new_pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (oldn + newn));
1183 if (GET_CODE (*pat) == PARALLEL)
1184 for (i = 0; i < oldn; i++)
1185 XVECEXP (new_pat, 0, i) = XVECEXP (*pat, 0, i);
1186 else
1187 XVECEXP (new_pat, 0, 0) = *pat;
1188 for (s = amd.side_effects, i = oldn; i < oldn + newn; i++, s = XEXP (s, 1))
1189 XVECEXP (new_pat, 0, i) = XEXP (s, 0);
1190 free_EXPR_LIST_list (&amd.side_effects);
1191 validate_change (NULL_RTX, pat, new_pat, true);
1195 /* Return true if a decl_or_value DV is a DECL or NULL. */
1196 static inline bool
1197 dv_is_decl_p (decl_or_value dv)
1199 return !dv || (int) TREE_CODE ((tree) dv) != (int) VALUE;
1202 /* Return true if a decl_or_value is a VALUE rtl. */
1203 static inline bool
1204 dv_is_value_p (decl_or_value dv)
1206 return dv && !dv_is_decl_p (dv);
1209 /* Return the decl in the decl_or_value. */
1210 static inline tree
1211 dv_as_decl (decl_or_value dv)
1213 gcc_checking_assert (dv_is_decl_p (dv));
1214 return (tree) dv;
1217 /* Return the value in the decl_or_value. */
1218 static inline rtx
1219 dv_as_value (decl_or_value dv)
1221 gcc_checking_assert (dv_is_value_p (dv));
1222 return (rtx)dv;
1225 /* Return the DEBUG_EXPR of a DEBUG_EXPR_DECL or the VALUE in DV. */
1226 static inline rtx
1227 dv_as_rtx (decl_or_value dv)
1229 tree decl;
1231 if (dv_is_value_p (dv))
1232 return dv_as_value (dv);
1234 decl = dv_as_decl (dv);
1236 gcc_checking_assert (TREE_CODE (decl) == DEBUG_EXPR_DECL);
1237 return DECL_RTL_KNOWN_SET (decl);
1240 /* Return the opaque pointer in the decl_or_value. */
1241 static inline void *
1242 dv_as_opaque (decl_or_value dv)
1244 return dv;
1247 /* Return nonzero if a decl_or_value must not have more than one
1248 variable part. The returned value discriminates among various
1249 kinds of one-part DVs ccording to enum onepart_enum. */
1250 static inline onepart_enum_t
1251 dv_onepart_p (decl_or_value dv)
1253 tree decl;
1255 if (!MAY_HAVE_DEBUG_INSNS)
1256 return NOT_ONEPART;
1258 if (dv_is_value_p (dv))
1259 return ONEPART_VALUE;
1261 decl = dv_as_decl (dv);
1263 if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
1264 return ONEPART_DEXPR;
1266 if (target_for_debug_bind (decl) != NULL_TREE)
1267 return ONEPART_VDECL;
1269 return NOT_ONEPART;
1272 /* Return the variable pool to be used for a dv of type ONEPART. */
1273 static inline alloc_pool
1274 onepart_pool (onepart_enum_t onepart)
1276 return onepart ? valvar_pool : var_pool;
1279 /* Build a decl_or_value out of a decl. */
1280 static inline decl_or_value
1281 dv_from_decl (tree decl)
1283 decl_or_value dv;
1284 dv = decl;
1285 gcc_checking_assert (dv_is_decl_p (dv));
1286 return dv;
1289 /* Build a decl_or_value out of a value. */
1290 static inline decl_or_value
1291 dv_from_value (rtx value)
1293 decl_or_value dv;
1294 dv = value;
1295 gcc_checking_assert (dv_is_value_p (dv));
1296 return dv;
1299 /* Return a value or the decl of a debug_expr as a decl_or_value. */
1300 static inline decl_or_value
1301 dv_from_rtx (rtx x)
1303 decl_or_value dv;
1305 switch (GET_CODE (x))
1307 case DEBUG_EXPR:
1308 dv = dv_from_decl (DEBUG_EXPR_TREE_DECL (x));
1309 gcc_checking_assert (DECL_RTL_KNOWN_SET (DEBUG_EXPR_TREE_DECL (x)) == x);
1310 break;
1312 case VALUE:
1313 dv = dv_from_value (x);
1314 break;
1316 default:
1317 gcc_unreachable ();
1320 return dv;
1323 extern void debug_dv (decl_or_value dv);
1325 DEBUG_FUNCTION void
1326 debug_dv (decl_or_value dv)
1328 if (dv_is_value_p (dv))
1329 debug_rtx (dv_as_value (dv));
1330 else
1331 debug_generic_stmt (dv_as_decl (dv));
1334 typedef unsigned int dvuid;
1336 /* Return the uid of DV. */
1338 static inline dvuid
1339 dv_uid (decl_or_value dv)
1341 if (dv_is_value_p (dv))
1342 return CSELIB_VAL_PTR (dv_as_value (dv))->uid;
1343 else
1344 return DECL_UID (dv_as_decl (dv));
1347 /* Compute the hash from the uid. */
1349 static inline hashval_t
1350 dv_uid2hash (dvuid uid)
1352 return uid;
1355 /* The hash function for a mask table in a shared_htab chain. */
1357 static inline hashval_t
1358 dv_htab_hash (decl_or_value dv)
1360 return dv_uid2hash (dv_uid (dv));
1363 /* The hash function for variable_htab, computes the hash value
1364 from the declaration of variable X. */
1366 static hashval_t
1367 variable_htab_hash (const void *x)
1369 const_variable const v = (const_variable) x;
1371 return dv_htab_hash (v->dv);
1374 /* Compare the declaration of variable X with declaration Y. */
1376 static int
1377 variable_htab_eq (const void *x, const void *y)
1379 const_variable const v = (const_variable) x;
1380 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
1382 return (dv_as_opaque (v->dv) == dv_as_opaque (dv));
1385 static void loc_exp_dep_clear (variable var);
1387 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
1389 static void
1390 variable_htab_free (void *elem)
1392 int i;
1393 variable var = (variable) elem;
1394 location_chain node, next;
1396 gcc_checking_assert (var->refcount > 0);
1398 var->refcount--;
1399 if (var->refcount > 0)
1400 return;
1402 for (i = 0; i < var->n_var_parts; i++)
1404 for (node = var->var_part[i].loc_chain; node; node = next)
1406 next = node->next;
1407 pool_free (loc_chain_pool, node);
1409 var->var_part[i].loc_chain = NULL;
1411 if (var->onepart && VAR_LOC_1PAUX (var))
1413 loc_exp_dep_clear (var);
1414 if (VAR_LOC_DEP_LST (var))
1415 VAR_LOC_DEP_LST (var)->pprev = NULL;
1416 XDELETE (VAR_LOC_1PAUX (var));
1417 /* These may be reused across functions, so reset
1418 e.g. NO_LOC_P. */
1419 if (var->onepart == ONEPART_DEXPR)
1420 set_dv_changed (var->dv, true);
1422 pool_free (onepart_pool (var->onepart), var);
1425 /* Initialize the set (array) SET of attrs to empty lists. */
1427 static void
1428 init_attrs_list_set (attrs *set)
1430 int i;
1432 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1433 set[i] = NULL;
1436 /* Make the list *LISTP empty. */
1438 static void
1439 attrs_list_clear (attrs *listp)
1441 attrs list, next;
1443 for (list = *listp; list; list = next)
1445 next = list->next;
1446 pool_free (attrs_pool, list);
1448 *listp = NULL;
1451 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */
1453 static attrs
1454 attrs_list_member (attrs list, decl_or_value dv, HOST_WIDE_INT offset)
1456 for (; list; list = list->next)
1457 if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset)
1458 return list;
1459 return NULL;
1462 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
1464 static void
1465 attrs_list_insert (attrs *listp, decl_or_value dv,
1466 HOST_WIDE_INT offset, rtx loc)
1468 attrs list;
1470 list = (attrs) pool_alloc (attrs_pool);
1471 list->loc = loc;
1472 list->dv = dv;
1473 list->offset = offset;
1474 list->next = *listp;
1475 *listp = list;
1478 /* Copy all nodes from SRC and create a list *DSTP of the copies. */
1480 static void
1481 attrs_list_copy (attrs *dstp, attrs src)
1483 attrs n;
1485 attrs_list_clear (dstp);
1486 for (; src; src = src->next)
1488 n = (attrs) pool_alloc (attrs_pool);
1489 n->loc = src->loc;
1490 n->dv = src->dv;
1491 n->offset = src->offset;
1492 n->next = *dstp;
1493 *dstp = n;
1497 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */
1499 static void
1500 attrs_list_union (attrs *dstp, attrs src)
1502 for (; src; src = src->next)
1504 if (!attrs_list_member (*dstp, src->dv, src->offset))
1505 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1509 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1510 *DSTP. */
1512 static void
1513 attrs_list_mpdv_union (attrs *dstp, attrs src, attrs src2)
1515 gcc_assert (!*dstp);
1516 for (; src; src = src->next)
1518 if (!dv_onepart_p (src->dv))
1519 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1521 for (src = src2; src; src = src->next)
1523 if (!dv_onepart_p (src->dv)
1524 && !attrs_list_member (*dstp, src->dv, src->offset))
1525 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1529 /* Shared hashtable support. */
1531 /* Return true if VARS is shared. */
1533 static inline bool
1534 shared_hash_shared (shared_hash vars)
1536 return vars->refcount > 1;
1539 /* Return the hash table for VARS. */
1541 static inline htab_t
1542 shared_hash_htab (shared_hash vars)
1544 return vars->htab;
1547 /* Return true if VAR is shared, or maybe because VARS is shared. */
1549 static inline bool
1550 shared_var_p (variable var, shared_hash vars)
1552 /* Don't count an entry in the changed_variables table as a duplicate. */
1553 return ((var->refcount > 1 + (int) var->in_changed_variables)
1554 || shared_hash_shared (vars));
1557 /* Copy variables into a new hash table. */
1559 static shared_hash
1560 shared_hash_unshare (shared_hash vars)
1562 shared_hash new_vars = (shared_hash) pool_alloc (shared_hash_pool);
1563 gcc_assert (vars->refcount > 1);
1564 new_vars->refcount = 1;
1565 new_vars->htab
1566 = htab_create (htab_elements (vars->htab) + 3, variable_htab_hash,
1567 variable_htab_eq, variable_htab_free);
1568 vars_copy (new_vars->htab, vars->htab);
1569 vars->refcount--;
1570 return new_vars;
1573 /* Increment reference counter on VARS and return it. */
1575 static inline shared_hash
1576 shared_hash_copy (shared_hash vars)
1578 vars->refcount++;
1579 return vars;
1582 /* Decrement reference counter and destroy hash table if not shared
1583 anymore. */
1585 static void
1586 shared_hash_destroy (shared_hash vars)
1588 gcc_checking_assert (vars->refcount > 0);
1589 if (--vars->refcount == 0)
1591 htab_delete (vars->htab);
1592 pool_free (shared_hash_pool, vars);
1596 /* Unshare *PVARS if shared and return slot for DV. If INS is
1597 INSERT, insert it if not already present. */
1599 static inline void **
1600 shared_hash_find_slot_unshare_1 (shared_hash *pvars, decl_or_value dv,
1601 hashval_t dvhash, enum insert_option ins)
1603 if (shared_hash_shared (*pvars))
1604 *pvars = shared_hash_unshare (*pvars);
1605 return htab_find_slot_with_hash (shared_hash_htab (*pvars), dv, dvhash, ins);
1608 static inline void **
1609 shared_hash_find_slot_unshare (shared_hash *pvars, decl_or_value dv,
1610 enum insert_option ins)
1612 return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins);
1615 /* Return slot for DV, if it is already present in the hash table.
1616 If it is not present, insert it only VARS is not shared, otherwise
1617 return NULL. */
1619 static inline void **
1620 shared_hash_find_slot_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1622 return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
1623 shared_hash_shared (vars)
1624 ? NO_INSERT : INSERT);
1627 static inline void **
1628 shared_hash_find_slot (shared_hash vars, decl_or_value dv)
1630 return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv));
1633 /* Return slot for DV only if it is already present in the hash table. */
1635 static inline void **
1636 shared_hash_find_slot_noinsert_1 (shared_hash vars, decl_or_value dv,
1637 hashval_t dvhash)
1639 return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
1640 NO_INSERT);
1643 static inline void **
1644 shared_hash_find_slot_noinsert (shared_hash vars, decl_or_value dv)
1646 return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv));
1649 /* Return variable for DV or NULL if not already present in the hash
1650 table. */
1652 static inline variable
1653 shared_hash_find_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1655 return (variable) htab_find_with_hash (shared_hash_htab (vars), dv, dvhash);
1658 static inline variable
1659 shared_hash_find (shared_hash vars, decl_or_value dv)
1661 return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
1664 /* Return true if TVAL is better than CVAL as a canonival value. We
1665 choose lowest-numbered VALUEs, using the RTX address as a
1666 tie-breaker. The idea is to arrange them into a star topology,
1667 such that all of them are at most one step away from the canonical
1668 value, and the canonical value has backlinks to all of them, in
1669 addition to all the actual locations. We don't enforce this
1670 topology throughout the entire dataflow analysis, though.
1673 static inline bool
1674 canon_value_cmp (rtx tval, rtx cval)
1676 return !cval
1677 || CSELIB_VAL_PTR (tval)->uid < CSELIB_VAL_PTR (cval)->uid;
1680 static bool dst_can_be_shared;
1682 /* Return a copy of a variable VAR and insert it to dataflow set SET. */
1684 static void **
1685 unshare_variable (dataflow_set *set, void **slot, variable var,
1686 enum var_init_status initialized)
1688 variable new_var;
1689 int i;
1691 new_var = (variable) pool_alloc (onepart_pool (var->onepart));
1692 new_var->dv = var->dv;
1693 new_var->refcount = 1;
1694 var->refcount--;
1695 new_var->n_var_parts = var->n_var_parts;
1696 new_var->onepart = var->onepart;
1697 new_var->in_changed_variables = false;
1699 if (! flag_var_tracking_uninit)
1700 initialized = VAR_INIT_STATUS_INITIALIZED;
1702 for (i = 0; i < var->n_var_parts; i++)
1704 location_chain node;
1705 location_chain *nextp;
1707 if (i == 0 && var->onepart)
1709 /* One-part auxiliary data is only used while emitting
1710 notes, so propagate it to the new variable in the active
1711 dataflow set. If we're not emitting notes, this will be
1712 a no-op. */
1713 gcc_checking_assert (!VAR_LOC_1PAUX (var) || emit_notes);
1714 VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (var);
1715 VAR_LOC_1PAUX (var) = NULL;
1717 else
1718 VAR_PART_OFFSET (new_var, i) = VAR_PART_OFFSET (var, i);
1719 nextp = &new_var->var_part[i].loc_chain;
1720 for (node = var->var_part[i].loc_chain; node; node = node->next)
1722 location_chain new_lc;
1724 new_lc = (location_chain) pool_alloc (loc_chain_pool);
1725 new_lc->next = NULL;
1726 if (node->init > initialized)
1727 new_lc->init = node->init;
1728 else
1729 new_lc->init = initialized;
1730 if (node->set_src && !(MEM_P (node->set_src)))
1731 new_lc->set_src = node->set_src;
1732 else
1733 new_lc->set_src = NULL;
1734 new_lc->loc = node->loc;
1736 *nextp = new_lc;
1737 nextp = &new_lc->next;
1740 new_var->var_part[i].cur_loc = var->var_part[i].cur_loc;
1743 dst_can_be_shared = false;
1744 if (shared_hash_shared (set->vars))
1745 slot = shared_hash_find_slot_unshare (&set->vars, var->dv, NO_INSERT);
1746 else if (set->traversed_vars && set->vars != set->traversed_vars)
1747 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
1748 *slot = new_var;
1749 if (var->in_changed_variables)
1751 void **cslot
1752 = htab_find_slot_with_hash (changed_variables, var->dv,
1753 dv_htab_hash (var->dv), NO_INSERT);
1754 gcc_assert (*cslot == (void *) var);
1755 var->in_changed_variables = false;
1756 variable_htab_free (var);
1757 *cslot = new_var;
1758 new_var->in_changed_variables = true;
1760 return slot;
1763 /* Copy all variables from hash table SRC to hash table DST. */
1765 static void
1766 vars_copy (htab_t dst, htab_t src)
1768 htab_iterator hi;
1769 variable var;
1771 FOR_EACH_HTAB_ELEMENT (src, var, variable, hi)
1773 void **dstp;
1774 var->refcount++;
1775 dstp = htab_find_slot_with_hash (dst, var->dv,
1776 dv_htab_hash (var->dv),
1777 INSERT);
1778 *dstp = var;
1782 /* Map a decl to its main debug decl. */
1784 static inline tree
1785 var_debug_decl (tree decl)
1787 if (decl && DECL_P (decl)
1788 && DECL_DEBUG_EXPR_IS_FROM (decl))
1790 tree debugdecl = DECL_DEBUG_EXPR (decl);
1791 if (debugdecl && DECL_P (debugdecl))
1792 decl = debugdecl;
1795 return decl;
1798 /* Set the register LOC to contain DV, OFFSET. */
1800 static void
1801 var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1802 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1803 enum insert_option iopt)
1805 attrs node;
1806 bool decl_p = dv_is_decl_p (dv);
1808 if (decl_p)
1809 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1811 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1812 if (dv_as_opaque (node->dv) == dv_as_opaque (dv)
1813 && node->offset == offset)
1814 break;
1815 if (!node)
1816 attrs_list_insert (&set->regs[REGNO (loc)], dv, offset, loc);
1817 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1820 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
1822 static void
1823 var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1824 rtx set_src)
1826 tree decl = REG_EXPR (loc);
1827 HOST_WIDE_INT offset = REG_OFFSET (loc);
1829 var_reg_decl_set (set, loc, initialized,
1830 dv_from_decl (decl), offset, set_src, INSERT);
1833 static enum var_init_status
1834 get_init_value (dataflow_set *set, rtx loc, decl_or_value dv)
1836 variable var;
1837 int i;
1838 enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN;
1840 if (! flag_var_tracking_uninit)
1841 return VAR_INIT_STATUS_INITIALIZED;
1843 var = shared_hash_find (set->vars, dv);
1844 if (var)
1846 for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++)
1848 location_chain nextp;
1849 for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next)
1850 if (rtx_equal_p (nextp->loc, loc))
1852 ret_val = nextp->init;
1853 break;
1858 return ret_val;
1861 /* Delete current content of register LOC in dataflow set SET and set
1862 the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
1863 MODIFY is true, any other live copies of the same variable part are
1864 also deleted from the dataflow set, otherwise the variable part is
1865 assumed to be copied from another location holding the same
1866 part. */
1868 static void
1869 var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1870 enum var_init_status initialized, rtx set_src)
1872 tree decl = REG_EXPR (loc);
1873 HOST_WIDE_INT offset = REG_OFFSET (loc);
1874 attrs node, next;
1875 attrs *nextp;
1877 decl = var_debug_decl (decl);
1879 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1880 initialized = get_init_value (set, loc, dv_from_decl (decl));
1882 nextp = &set->regs[REGNO (loc)];
1883 for (node = *nextp; node; node = next)
1885 next = node->next;
1886 if (dv_as_opaque (node->dv) != decl || node->offset != offset)
1888 delete_variable_part (set, node->loc, node->dv, node->offset);
1889 pool_free (attrs_pool, node);
1890 *nextp = next;
1892 else
1894 node->loc = loc;
1895 nextp = &node->next;
1898 if (modify)
1899 clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src);
1900 var_reg_set (set, loc, initialized, set_src);
1903 /* Delete the association of register LOC in dataflow set SET with any
1904 variables that aren't onepart. If CLOBBER is true, also delete any
1905 other live copies of the same variable part, and delete the
1906 association with onepart dvs too. */
1908 static void
1909 var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
1911 attrs *nextp = &set->regs[REGNO (loc)];
1912 attrs node, next;
1914 if (clobber)
1916 tree decl = REG_EXPR (loc);
1917 HOST_WIDE_INT offset = REG_OFFSET (loc);
1919 decl = var_debug_decl (decl);
1921 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1924 for (node = *nextp; node; node = next)
1926 next = node->next;
1927 if (clobber || !dv_onepart_p (node->dv))
1929 delete_variable_part (set, node->loc, node->dv, node->offset);
1930 pool_free (attrs_pool, node);
1931 *nextp = next;
1933 else
1934 nextp = &node->next;
1938 /* Delete content of register with number REGNO in dataflow set SET. */
1940 static void
1941 var_regno_delete (dataflow_set *set, int regno)
1943 attrs *reg = &set->regs[regno];
1944 attrs node, next;
1946 for (node = *reg; node; node = next)
1948 next = node->next;
1949 delete_variable_part (set, node->loc, node->dv, node->offset);
1950 pool_free (attrs_pool, node);
1952 *reg = NULL;
1955 /* Set the location of DV, OFFSET as the MEM LOC. */
1957 static void
1958 var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1959 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1960 enum insert_option iopt)
1962 if (dv_is_decl_p (dv))
1963 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1965 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1968 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
1969 SET to LOC.
1970 Adjust the address first if it is stack pointer based. */
1972 static void
1973 var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1974 rtx set_src)
1976 tree decl = MEM_EXPR (loc);
1977 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1979 var_mem_decl_set (set, loc, initialized,
1980 dv_from_decl (decl), offset, set_src, INSERT);
1983 /* Delete and set the location part of variable MEM_EXPR (LOC) in
1984 dataflow set SET to LOC. If MODIFY is true, any other live copies
1985 of the same variable part are also deleted from the dataflow set,
1986 otherwise the variable part is assumed to be copied from another
1987 location holding the same part.
1988 Adjust the address first if it is stack pointer based. */
1990 static void
1991 var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1992 enum var_init_status initialized, rtx set_src)
1994 tree decl = MEM_EXPR (loc);
1995 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1997 decl = var_debug_decl (decl);
1999 if (initialized == VAR_INIT_STATUS_UNKNOWN)
2000 initialized = get_init_value (set, loc, dv_from_decl (decl));
2002 if (modify)
2003 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src);
2004 var_mem_set (set, loc, initialized, set_src);
2007 /* Delete the location part LOC from dataflow set SET. If CLOBBER is
2008 true, also delete any other live copies of the same variable part.
2009 Adjust the address first if it is stack pointer based. */
2011 static void
2012 var_mem_delete (dataflow_set *set, rtx loc, bool clobber)
2014 tree decl = MEM_EXPR (loc);
2015 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2017 decl = var_debug_decl (decl);
2018 if (clobber)
2019 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
2020 delete_variable_part (set, loc, dv_from_decl (decl), offset);
2023 /* Return true if LOC should not be expanded for location expressions,
2024 or used in them. */
2026 static inline bool
2027 unsuitable_loc (rtx loc)
2029 switch (GET_CODE (loc))
2031 case PC:
2032 case SCRATCH:
2033 case CC0:
2034 case ASM_INPUT:
2035 case ASM_OPERANDS:
2036 return true;
2038 default:
2039 return false;
2043 /* Bind VAL to LOC in SET. If MODIFIED, detach LOC from any values
2044 bound to it. */
2046 static inline void
2047 val_bind (dataflow_set *set, rtx val, rtx loc, bool modified)
2049 if (REG_P (loc))
2051 if (modified)
2052 var_regno_delete (set, REGNO (loc));
2053 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2054 dv_from_value (val), 0, NULL_RTX, INSERT);
2056 else if (MEM_P (loc))
2058 struct elt_loc_list *l = CSELIB_VAL_PTR (val)->locs;
2060 if (l && GET_CODE (l->loc) == VALUE)
2061 l = canonical_cselib_val (CSELIB_VAL_PTR (l->loc))->locs;
2063 /* If this MEM is a global constant, we don't need it in the
2064 dynamic tables. ??? We should test this before emitting the
2065 micro-op in the first place. */
2066 while (l)
2067 if (GET_CODE (l->loc) == MEM && XEXP (l->loc, 0) == XEXP (loc, 0))
2068 break;
2069 else
2070 l = l->next;
2072 if (!l)
2073 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2074 dv_from_value (val), 0, NULL_RTX, INSERT);
2076 else
2078 /* Other kinds of equivalences are necessarily static, at least
2079 so long as we do not perform substitutions while merging
2080 expressions. */
2081 gcc_unreachable ();
2082 set_variable_part (set, loc, dv_from_value (val), 0,
2083 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2087 /* Bind a value to a location it was just stored in. If MODIFIED
2088 holds, assume the location was modified, detaching it from any
2089 values bound to it. */
2091 static void
2092 val_store (dataflow_set *set, rtx val, rtx loc, rtx insn, bool modified)
2094 cselib_val *v = CSELIB_VAL_PTR (val);
2096 gcc_assert (cselib_preserved_value_p (v));
2098 if (dump_file)
2100 fprintf (dump_file, "%i: ", insn ? INSN_UID (insn) : 0);
2101 print_inline_rtx (dump_file, loc, 0);
2102 fprintf (dump_file, " evaluates to ");
2103 print_inline_rtx (dump_file, val, 0);
2104 if (v->locs)
2106 struct elt_loc_list *l;
2107 for (l = v->locs; l; l = l->next)
2109 fprintf (dump_file, "\n%i: ", INSN_UID (l->setting_insn));
2110 print_inline_rtx (dump_file, l->loc, 0);
2113 fprintf (dump_file, "\n");
2116 gcc_checking_assert (!unsuitable_loc (loc));
2118 val_bind (set, val, loc, modified);
2121 /* Reset this node, detaching all its equivalences. Return the slot
2122 in the variable hash table that holds dv, if there is one. */
2124 static void
2125 val_reset (dataflow_set *set, decl_or_value dv)
2127 variable var = shared_hash_find (set->vars, dv) ;
2128 location_chain node;
2129 rtx cval;
2131 if (!var || !var->n_var_parts)
2132 return;
2134 gcc_assert (var->n_var_parts == 1);
2136 cval = NULL;
2137 for (node = var->var_part[0].loc_chain; node; node = node->next)
2138 if (GET_CODE (node->loc) == VALUE
2139 && canon_value_cmp (node->loc, cval))
2140 cval = node->loc;
2142 for (node = var->var_part[0].loc_chain; node; node = node->next)
2143 if (GET_CODE (node->loc) == VALUE && cval != node->loc)
2145 /* Redirect the equivalence link to the new canonical
2146 value, or simply remove it if it would point at
2147 itself. */
2148 if (cval)
2149 set_variable_part (set, cval, dv_from_value (node->loc),
2150 0, node->init, node->set_src, NO_INSERT);
2151 delete_variable_part (set, dv_as_value (dv),
2152 dv_from_value (node->loc), 0);
2155 if (cval)
2157 decl_or_value cdv = dv_from_value (cval);
2159 /* Keep the remaining values connected, accummulating links
2160 in the canonical value. */
2161 for (node = var->var_part[0].loc_chain; node; node = node->next)
2163 if (node->loc == cval)
2164 continue;
2165 else if (GET_CODE (node->loc) == REG)
2166 var_reg_decl_set (set, node->loc, node->init, cdv, 0,
2167 node->set_src, NO_INSERT);
2168 else if (GET_CODE (node->loc) == MEM)
2169 var_mem_decl_set (set, node->loc, node->init, cdv, 0,
2170 node->set_src, NO_INSERT);
2171 else
2172 set_variable_part (set, node->loc, cdv, 0,
2173 node->init, node->set_src, NO_INSERT);
2177 /* We remove this last, to make sure that the canonical value is not
2178 removed to the point of requiring reinsertion. */
2179 if (cval)
2180 delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0);
2182 clobber_variable_part (set, NULL, dv, 0, NULL);
2185 /* Find the values in a given location and map the val to another
2186 value, if it is unique, or add the location as one holding the
2187 value. */
2189 static void
2190 val_resolve (dataflow_set *set, rtx val, rtx loc, rtx insn)
2192 decl_or_value dv = dv_from_value (val);
2194 if (dump_file && (dump_flags & TDF_DETAILS))
2196 if (insn)
2197 fprintf (dump_file, "%i: ", INSN_UID (insn));
2198 else
2199 fprintf (dump_file, "head: ");
2200 print_inline_rtx (dump_file, val, 0);
2201 fputs (" is at ", dump_file);
2202 print_inline_rtx (dump_file, loc, 0);
2203 fputc ('\n', dump_file);
2206 val_reset (set, dv);
2208 gcc_checking_assert (!unsuitable_loc (loc));
2210 if (REG_P (loc))
2212 attrs node, found = NULL;
2214 for (node = set->regs[REGNO (loc)]; node; node = node->next)
2215 if (dv_is_value_p (node->dv)
2216 && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc))
2218 found = node;
2220 /* Map incoming equivalences. ??? Wouldn't it be nice if
2221 we just started sharing the location lists? Maybe a
2222 circular list ending at the value itself or some
2223 such. */
2224 set_variable_part (set, dv_as_value (node->dv),
2225 dv_from_value (val), node->offset,
2226 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2227 set_variable_part (set, val, node->dv, node->offset,
2228 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2231 /* If we didn't find any equivalence, we need to remember that
2232 this value is held in the named register. */
2233 if (found)
2234 return;
2236 /* ??? Attempt to find and merge equivalent MEMs or other
2237 expressions too. */
2239 val_bind (set, val, loc, false);
2242 /* Initialize dataflow set SET to be empty.
2243 VARS_SIZE is the initial size of hash table VARS. */
2245 static void
2246 dataflow_set_init (dataflow_set *set)
2248 init_attrs_list_set (set->regs);
2249 set->vars = shared_hash_copy (empty_shared_hash);
2250 set->stack_adjust = 0;
2251 set->traversed_vars = NULL;
2254 /* Delete the contents of dataflow set SET. */
2256 static void
2257 dataflow_set_clear (dataflow_set *set)
2259 int i;
2261 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2262 attrs_list_clear (&set->regs[i]);
2264 shared_hash_destroy (set->vars);
2265 set->vars = shared_hash_copy (empty_shared_hash);
2268 /* Copy the contents of dataflow set SRC to DST. */
2270 static void
2271 dataflow_set_copy (dataflow_set *dst, dataflow_set *src)
2273 int i;
2275 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2276 attrs_list_copy (&dst->regs[i], src->regs[i]);
2278 shared_hash_destroy (dst->vars);
2279 dst->vars = shared_hash_copy (src->vars);
2280 dst->stack_adjust = src->stack_adjust;
2283 /* Information for merging lists of locations for a given offset of variable.
2285 struct variable_union_info
2287 /* Node of the location chain. */
2288 location_chain lc;
2290 /* The sum of positions in the input chains. */
2291 int pos;
2293 /* The position in the chain of DST dataflow set. */
2294 int pos_dst;
2297 /* Buffer for location list sorting and its allocated size. */
2298 static struct variable_union_info *vui_vec;
2299 static int vui_allocated;
2301 /* Compare function for qsort, order the structures by POS element. */
2303 static int
2304 variable_union_info_cmp_pos (const void *n1, const void *n2)
2306 const struct variable_union_info *const i1 =
2307 (const struct variable_union_info *) n1;
2308 const struct variable_union_info *const i2 =
2309 ( const struct variable_union_info *) n2;
2311 if (i1->pos != i2->pos)
2312 return i1->pos - i2->pos;
2314 return (i1->pos_dst - i2->pos_dst);
2317 /* Compute union of location parts of variable *SLOT and the same variable
2318 from hash table DATA. Compute "sorted" union of the location chains
2319 for common offsets, i.e. the locations of a variable part are sorted by
2320 a priority where the priority is the sum of the positions in the 2 chains
2321 (if a location is only in one list the position in the second list is
2322 defined to be larger than the length of the chains).
2323 When we are updating the location parts the newest location is in the
2324 beginning of the chain, so when we do the described "sorted" union
2325 we keep the newest locations in the beginning. */
2327 static int
2328 variable_union (variable src, dataflow_set *set)
2330 variable dst;
2331 void **dstp;
2332 int i, j, k;
2334 dstp = shared_hash_find_slot (set->vars, src->dv);
2335 if (!dstp || !*dstp)
2337 src->refcount++;
2339 dst_can_be_shared = false;
2340 if (!dstp)
2341 dstp = shared_hash_find_slot_unshare (&set->vars, src->dv, INSERT);
2343 *dstp = src;
2345 /* Continue traversing the hash table. */
2346 return 1;
2348 else
2349 dst = (variable) *dstp;
2351 gcc_assert (src->n_var_parts);
2352 gcc_checking_assert (src->onepart == dst->onepart);
2354 /* We can combine one-part variables very efficiently, because their
2355 entries are in canonical order. */
2356 if (src->onepart)
2358 location_chain *nodep, dnode, snode;
2360 gcc_assert (src->n_var_parts == 1
2361 && dst->n_var_parts == 1);
2363 snode = src->var_part[0].loc_chain;
2364 gcc_assert (snode);
2366 restart_onepart_unshared:
2367 nodep = &dst->var_part[0].loc_chain;
2368 dnode = *nodep;
2369 gcc_assert (dnode);
2371 while (snode)
2373 int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1;
2375 if (r > 0)
2377 location_chain nnode;
2379 if (shared_var_p (dst, set->vars))
2381 dstp = unshare_variable (set, dstp, dst,
2382 VAR_INIT_STATUS_INITIALIZED);
2383 dst = (variable)*dstp;
2384 goto restart_onepart_unshared;
2387 *nodep = nnode = (location_chain) pool_alloc (loc_chain_pool);
2388 nnode->loc = snode->loc;
2389 nnode->init = snode->init;
2390 if (!snode->set_src || MEM_P (snode->set_src))
2391 nnode->set_src = NULL;
2392 else
2393 nnode->set_src = snode->set_src;
2394 nnode->next = dnode;
2395 dnode = nnode;
2397 else if (r == 0)
2398 gcc_checking_assert (rtx_equal_p (dnode->loc, snode->loc));
2400 if (r >= 0)
2401 snode = snode->next;
2403 nodep = &dnode->next;
2404 dnode = *nodep;
2407 return 1;
2410 gcc_checking_assert (!src->onepart);
2412 /* Count the number of location parts, result is K. */
2413 for (i = 0, j = 0, k = 0;
2414 i < src->n_var_parts && j < dst->n_var_parts; k++)
2416 if (VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2418 i++;
2419 j++;
2421 else if (VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
2422 i++;
2423 else
2424 j++;
2426 k += src->n_var_parts - i;
2427 k += dst->n_var_parts - j;
2429 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
2430 thus there are at most MAX_VAR_PARTS different offsets. */
2431 gcc_checking_assert (dst->onepart ? k == 1 : k <= MAX_VAR_PARTS);
2433 if (dst->n_var_parts != k && shared_var_p (dst, set->vars))
2435 dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN);
2436 dst = (variable)*dstp;
2439 i = src->n_var_parts - 1;
2440 j = dst->n_var_parts - 1;
2441 dst->n_var_parts = k;
2443 for (k--; k >= 0; k--)
2445 location_chain node, node2;
2447 if (i >= 0 && j >= 0
2448 && VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2450 /* Compute the "sorted" union of the chains, i.e. the locations which
2451 are in both chains go first, they are sorted by the sum of
2452 positions in the chains. */
2453 int dst_l, src_l;
2454 int ii, jj, n;
2455 struct variable_union_info *vui;
2457 /* If DST is shared compare the location chains.
2458 If they are different we will modify the chain in DST with
2459 high probability so make a copy of DST. */
2460 if (shared_var_p (dst, set->vars))
2462 for (node = src->var_part[i].loc_chain,
2463 node2 = dst->var_part[j].loc_chain; node && node2;
2464 node = node->next, node2 = node2->next)
2466 if (!((REG_P (node2->loc)
2467 && REG_P (node->loc)
2468 && REGNO (node2->loc) == REGNO (node->loc))
2469 || rtx_equal_p (node2->loc, node->loc)))
2471 if (node2->init < node->init)
2472 node2->init = node->init;
2473 break;
2476 if (node || node2)
2478 dstp = unshare_variable (set, dstp, dst,
2479 VAR_INIT_STATUS_UNKNOWN);
2480 dst = (variable)*dstp;
2484 src_l = 0;
2485 for (node = src->var_part[i].loc_chain; node; node = node->next)
2486 src_l++;
2487 dst_l = 0;
2488 for (node = dst->var_part[j].loc_chain; node; node = node->next)
2489 dst_l++;
2491 if (dst_l == 1)
2493 /* The most common case, much simpler, no qsort is needed. */
2494 location_chain dstnode = dst->var_part[j].loc_chain;
2495 dst->var_part[k].loc_chain = dstnode;
2496 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET(dst, j);
2497 node2 = dstnode;
2498 for (node = src->var_part[i].loc_chain; node; node = node->next)
2499 if (!((REG_P (dstnode->loc)
2500 && REG_P (node->loc)
2501 && REGNO (dstnode->loc) == REGNO (node->loc))
2502 || rtx_equal_p (dstnode->loc, node->loc)))
2504 location_chain new_node;
2506 /* Copy the location from SRC. */
2507 new_node = (location_chain) pool_alloc (loc_chain_pool);
2508 new_node->loc = node->loc;
2509 new_node->init = node->init;
2510 if (!node->set_src || MEM_P (node->set_src))
2511 new_node->set_src = NULL;
2512 else
2513 new_node->set_src = node->set_src;
2514 node2->next = new_node;
2515 node2 = new_node;
2517 node2->next = NULL;
2519 else
2521 if (src_l + dst_l > vui_allocated)
2523 vui_allocated = MAX (vui_allocated * 2, src_l + dst_l);
2524 vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec,
2525 vui_allocated);
2527 vui = vui_vec;
2529 /* Fill in the locations from DST. */
2530 for (node = dst->var_part[j].loc_chain, jj = 0; node;
2531 node = node->next, jj++)
2533 vui[jj].lc = node;
2534 vui[jj].pos_dst = jj;
2536 /* Pos plus value larger than a sum of 2 valid positions. */
2537 vui[jj].pos = jj + src_l + dst_l;
2540 /* Fill in the locations from SRC. */
2541 n = dst_l;
2542 for (node = src->var_part[i].loc_chain, ii = 0; node;
2543 node = node->next, ii++)
2545 /* Find location from NODE. */
2546 for (jj = 0; jj < dst_l; jj++)
2548 if ((REG_P (vui[jj].lc->loc)
2549 && REG_P (node->loc)
2550 && REGNO (vui[jj].lc->loc) == REGNO (node->loc))
2551 || rtx_equal_p (vui[jj].lc->loc, node->loc))
2553 vui[jj].pos = jj + ii;
2554 break;
2557 if (jj >= dst_l) /* The location has not been found. */
2559 location_chain new_node;
2561 /* Copy the location from SRC. */
2562 new_node = (location_chain) pool_alloc (loc_chain_pool);
2563 new_node->loc = node->loc;
2564 new_node->init = node->init;
2565 if (!node->set_src || MEM_P (node->set_src))
2566 new_node->set_src = NULL;
2567 else
2568 new_node->set_src = node->set_src;
2569 vui[n].lc = new_node;
2570 vui[n].pos_dst = src_l + dst_l;
2571 vui[n].pos = ii + src_l + dst_l;
2572 n++;
2576 if (dst_l == 2)
2578 /* Special case still very common case. For dst_l == 2
2579 all entries dst_l ... n-1 are sorted, with for i >= dst_l
2580 vui[i].pos == i + src_l + dst_l. */
2581 if (vui[0].pos > vui[1].pos)
2583 /* Order should be 1, 0, 2... */
2584 dst->var_part[k].loc_chain = vui[1].lc;
2585 vui[1].lc->next = vui[0].lc;
2586 if (n >= 3)
2588 vui[0].lc->next = vui[2].lc;
2589 vui[n - 1].lc->next = NULL;
2591 else
2592 vui[0].lc->next = NULL;
2593 ii = 3;
2595 else
2597 dst->var_part[k].loc_chain = vui[0].lc;
2598 if (n >= 3 && vui[2].pos < vui[1].pos)
2600 /* Order should be 0, 2, 1, 3... */
2601 vui[0].lc->next = vui[2].lc;
2602 vui[2].lc->next = vui[1].lc;
2603 if (n >= 4)
2605 vui[1].lc->next = vui[3].lc;
2606 vui[n - 1].lc->next = NULL;
2608 else
2609 vui[1].lc->next = NULL;
2610 ii = 4;
2612 else
2614 /* Order should be 0, 1, 2... */
2615 ii = 1;
2616 vui[n - 1].lc->next = NULL;
2619 for (; ii < n; ii++)
2620 vui[ii - 1].lc->next = vui[ii].lc;
2622 else
2624 qsort (vui, n, sizeof (struct variable_union_info),
2625 variable_union_info_cmp_pos);
2627 /* Reconnect the nodes in sorted order. */
2628 for (ii = 1; ii < n; ii++)
2629 vui[ii - 1].lc->next = vui[ii].lc;
2630 vui[n - 1].lc->next = NULL;
2631 dst->var_part[k].loc_chain = vui[0].lc;
2634 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
2636 i--;
2637 j--;
2639 else if ((i >= 0 && j >= 0
2640 && VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
2641 || i < 0)
2643 dst->var_part[k] = dst->var_part[j];
2644 j--;
2646 else if ((i >= 0 && j >= 0
2647 && VAR_PART_OFFSET (src, i) > VAR_PART_OFFSET (dst, j))
2648 || j < 0)
2650 location_chain *nextp;
2652 /* Copy the chain from SRC. */
2653 nextp = &dst->var_part[k].loc_chain;
2654 for (node = src->var_part[i].loc_chain; node; node = node->next)
2656 location_chain new_lc;
2658 new_lc = (location_chain) pool_alloc (loc_chain_pool);
2659 new_lc->next = NULL;
2660 new_lc->init = node->init;
2661 if (!node->set_src || MEM_P (node->set_src))
2662 new_lc->set_src = NULL;
2663 else
2664 new_lc->set_src = node->set_src;
2665 new_lc->loc = node->loc;
2667 *nextp = new_lc;
2668 nextp = &new_lc->next;
2671 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (src, i);
2672 i--;
2674 dst->var_part[k].cur_loc = NULL;
2677 if (flag_var_tracking_uninit)
2678 for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++)
2680 location_chain node, node2;
2681 for (node = src->var_part[i].loc_chain; node; node = node->next)
2682 for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next)
2683 if (rtx_equal_p (node->loc, node2->loc))
2685 if (node->init > node2->init)
2686 node2->init = node->init;
2690 /* Continue traversing the hash table. */
2691 return 1;
2694 /* Compute union of dataflow sets SRC and DST and store it to DST. */
2696 static void
2697 dataflow_set_union (dataflow_set *dst, dataflow_set *src)
2699 int i;
2701 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2702 attrs_list_union (&dst->regs[i], src->regs[i]);
2704 if (dst->vars == empty_shared_hash)
2706 shared_hash_destroy (dst->vars);
2707 dst->vars = shared_hash_copy (src->vars);
2709 else
2711 htab_iterator hi;
2712 variable var;
2714 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (src->vars), var, variable, hi)
2715 variable_union (var, dst);
2719 /* Whether the value is currently being expanded. */
2720 #define VALUE_RECURSED_INTO(x) \
2721 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
2723 /* Whether no expansion was found, saving useless lookups.
2724 It must only be set when VALUE_CHANGED is clear. */
2725 #define NO_LOC_P(x) \
2726 (RTL_FLAG_CHECK2 ("NO_LOC_P", (x), VALUE, DEBUG_EXPR)->return_val)
2728 /* Whether cur_loc in the value needs to be (re)computed. */
2729 #define VALUE_CHANGED(x) \
2730 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
2731 /* Whether cur_loc in the decl needs to be (re)computed. */
2732 #define DECL_CHANGED(x) TREE_VISITED (x)
2734 /* Record (if NEWV) that DV needs to have its cur_loc recomputed. For
2735 user DECLs, this means they're in changed_variables. Values and
2736 debug exprs may be left with this flag set if no user variable
2737 requires them to be evaluated. */
2739 static inline void
2740 set_dv_changed (decl_or_value dv, bool newv)
2742 switch (dv_onepart_p (dv))
2744 case ONEPART_VALUE:
2745 if (newv)
2746 NO_LOC_P (dv_as_value (dv)) = false;
2747 VALUE_CHANGED (dv_as_value (dv)) = newv;
2748 break;
2750 case ONEPART_DEXPR:
2751 if (newv)
2752 NO_LOC_P (DECL_RTL_KNOWN_SET (dv_as_decl (dv))) = false;
2753 /* Fall through... */
2755 default:
2756 DECL_CHANGED (dv_as_decl (dv)) = newv;
2757 break;
2761 /* Return true if DV needs to have its cur_loc recomputed. */
2763 static inline bool
2764 dv_changed_p (decl_or_value dv)
2766 return (dv_is_value_p (dv)
2767 ? VALUE_CHANGED (dv_as_value (dv))
2768 : DECL_CHANGED (dv_as_decl (dv)));
2771 /* Return a location list node whose loc is rtx_equal to LOC, in the
2772 location list of a one-part variable or value VAR, or in that of
2773 any values recursively mentioned in the location lists. VARS must
2774 be in star-canonical form. */
2776 static location_chain
2777 find_loc_in_1pdv (rtx loc, variable var, htab_t vars)
2779 location_chain node;
2780 enum rtx_code loc_code;
2782 if (!var)
2783 return NULL;
2785 gcc_checking_assert (var->onepart);
2787 if (!var->n_var_parts)
2788 return NULL;
2790 gcc_checking_assert (loc != dv_as_opaque (var->dv));
2792 loc_code = GET_CODE (loc);
2793 for (node = var->var_part[0].loc_chain; node; node = node->next)
2795 decl_or_value dv;
2796 variable rvar;
2798 if (GET_CODE (node->loc) != loc_code)
2800 if (GET_CODE (node->loc) != VALUE)
2801 continue;
2803 else if (loc == node->loc)
2804 return node;
2805 else if (loc_code != VALUE)
2807 if (rtx_equal_p (loc, node->loc))
2808 return node;
2809 continue;
2812 /* Since we're in star-canonical form, we don't need to visit
2813 non-canonical nodes: one-part variables and non-canonical
2814 values would only point back to the canonical node. */
2815 if (dv_is_value_p (var->dv)
2816 && !canon_value_cmp (node->loc, dv_as_value (var->dv)))
2818 /* Skip all subsequent VALUEs. */
2819 while (node->next && GET_CODE (node->next->loc) == VALUE)
2821 node = node->next;
2822 gcc_checking_assert (!canon_value_cmp (node->loc,
2823 dv_as_value (var->dv)));
2824 if (loc == node->loc)
2825 return node;
2827 continue;
2830 gcc_checking_assert (node == var->var_part[0].loc_chain);
2831 gcc_checking_assert (!node->next);
2833 dv = dv_from_value (node->loc);
2834 rvar = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
2835 return find_loc_in_1pdv (loc, rvar, vars);
2838 /* ??? Gotta look in cselib_val locations too. */
2840 return NULL;
2843 /* Hash table iteration argument passed to variable_merge. */
2844 struct dfset_merge
2846 /* The set in which the merge is to be inserted. */
2847 dataflow_set *dst;
2848 /* The set that we're iterating in. */
2849 dataflow_set *cur;
2850 /* The set that may contain the other dv we are to merge with. */
2851 dataflow_set *src;
2852 /* Number of onepart dvs in src. */
2853 int src_onepart_cnt;
2856 /* Insert LOC in *DNODE, if it's not there yet. The list must be in
2857 loc_cmp order, and it is maintained as such. */
2859 static void
2860 insert_into_intersection (location_chain *nodep, rtx loc,
2861 enum var_init_status status)
2863 location_chain node;
2864 int r;
2866 for (node = *nodep; node; nodep = &node->next, node = *nodep)
2867 if ((r = loc_cmp (node->loc, loc)) == 0)
2869 node->init = MIN (node->init, status);
2870 return;
2872 else if (r > 0)
2873 break;
2875 node = (location_chain) pool_alloc (loc_chain_pool);
2877 node->loc = loc;
2878 node->set_src = NULL;
2879 node->init = status;
2880 node->next = *nodep;
2881 *nodep = node;
2884 /* Insert in DEST the intersection of the locations present in both
2885 S1NODE and S2VAR, directly or indirectly. S1NODE is from a
2886 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
2887 DSM->dst. */
2889 static void
2890 intersect_loc_chains (rtx val, location_chain *dest, struct dfset_merge *dsm,
2891 location_chain s1node, variable s2var)
2893 dataflow_set *s1set = dsm->cur;
2894 dataflow_set *s2set = dsm->src;
2895 location_chain found;
2897 if (s2var)
2899 location_chain s2node;
2901 gcc_checking_assert (s2var->onepart);
2903 if (s2var->n_var_parts)
2905 s2node = s2var->var_part[0].loc_chain;
2907 for (; s1node && s2node;
2908 s1node = s1node->next, s2node = s2node->next)
2909 if (s1node->loc != s2node->loc)
2910 break;
2911 else if (s1node->loc == val)
2912 continue;
2913 else
2914 insert_into_intersection (dest, s1node->loc,
2915 MIN (s1node->init, s2node->init));
2919 for (; s1node; s1node = s1node->next)
2921 if (s1node->loc == val)
2922 continue;
2924 if ((found = find_loc_in_1pdv (s1node->loc, s2var,
2925 shared_hash_htab (s2set->vars))))
2927 insert_into_intersection (dest, s1node->loc,
2928 MIN (s1node->init, found->init));
2929 continue;
2932 if (GET_CODE (s1node->loc) == VALUE
2933 && !VALUE_RECURSED_INTO (s1node->loc))
2935 decl_or_value dv = dv_from_value (s1node->loc);
2936 variable svar = shared_hash_find (s1set->vars, dv);
2937 if (svar)
2939 if (svar->n_var_parts == 1)
2941 VALUE_RECURSED_INTO (s1node->loc) = true;
2942 intersect_loc_chains (val, dest, dsm,
2943 svar->var_part[0].loc_chain,
2944 s2var);
2945 VALUE_RECURSED_INTO (s1node->loc) = false;
2950 /* ??? gotta look in cselib_val locations too. */
2952 /* ??? if the location is equivalent to any location in src,
2953 searched recursively
2955 add to dst the values needed to represent the equivalence
2957 telling whether locations S is equivalent to another dv's
2958 location list:
2960 for each location D in the list
2962 if S and D satisfy rtx_equal_p, then it is present
2964 else if D is a value, recurse without cycles
2966 else if S and D have the same CODE and MODE
2968 for each operand oS and the corresponding oD
2970 if oS and oD are not equivalent, then S an D are not equivalent
2972 else if they are RTX vectors
2974 if any vector oS element is not equivalent to its respective oD,
2975 then S and D are not equivalent
2983 /* Return -1 if X should be before Y in a location list for a 1-part
2984 variable, 1 if Y should be before X, and 0 if they're equivalent
2985 and should not appear in the list. */
2987 static int
2988 loc_cmp (rtx x, rtx y)
2990 int i, j, r;
2991 RTX_CODE code = GET_CODE (x);
2992 const char *fmt;
2994 if (x == y)
2995 return 0;
2997 if (REG_P (x))
2999 if (!REG_P (y))
3000 return -1;
3001 gcc_assert (GET_MODE (x) == GET_MODE (y));
3002 if (REGNO (x) == REGNO (y))
3003 return 0;
3004 else if (REGNO (x) < REGNO (y))
3005 return -1;
3006 else
3007 return 1;
3010 if (REG_P (y))
3011 return 1;
3013 if (MEM_P (x))
3015 if (!MEM_P (y))
3016 return -1;
3017 gcc_assert (GET_MODE (x) == GET_MODE (y));
3018 return loc_cmp (XEXP (x, 0), XEXP (y, 0));
3021 if (MEM_P (y))
3022 return 1;
3024 if (GET_CODE (x) == VALUE)
3026 if (GET_CODE (y) != VALUE)
3027 return -1;
3028 /* Don't assert the modes are the same, that is true only
3029 when not recursing. (subreg:QI (value:SI 1:1) 0)
3030 and (subreg:QI (value:DI 2:2) 0) can be compared,
3031 even when the modes are different. */
3032 if (canon_value_cmp (x, y))
3033 return -1;
3034 else
3035 return 1;
3038 if (GET_CODE (y) == VALUE)
3039 return 1;
3041 /* Entry value is the least preferable kind of expression. */
3042 if (GET_CODE (x) == ENTRY_VALUE)
3044 if (GET_CODE (y) != ENTRY_VALUE)
3045 return 1;
3046 gcc_assert (GET_MODE (x) == GET_MODE (y));
3047 return loc_cmp (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
3050 if (GET_CODE (y) == ENTRY_VALUE)
3051 return -1;
3053 if (GET_CODE (x) == GET_CODE (y))
3054 /* Compare operands below. */;
3055 else if (GET_CODE (x) < GET_CODE (y))
3056 return -1;
3057 else
3058 return 1;
3060 gcc_assert (GET_MODE (x) == GET_MODE (y));
3062 if (GET_CODE (x) == DEBUG_EXPR)
3064 if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3065 < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)))
3066 return -1;
3067 gcc_checking_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3068 > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)));
3069 return 1;
3072 fmt = GET_RTX_FORMAT (code);
3073 for (i = 0; i < GET_RTX_LENGTH (code); i++)
3074 switch (fmt[i])
3076 case 'w':
3077 if (XWINT (x, i) == XWINT (y, i))
3078 break;
3079 else if (XWINT (x, i) < XWINT (y, i))
3080 return -1;
3081 else
3082 return 1;
3084 case 'n':
3085 case 'i':
3086 if (XINT (x, i) == XINT (y, i))
3087 break;
3088 else if (XINT (x, i) < XINT (y, i))
3089 return -1;
3090 else
3091 return 1;
3093 case 'V':
3094 case 'E':
3095 /* Compare the vector length first. */
3096 if (XVECLEN (x, i) == XVECLEN (y, i))
3097 /* Compare the vectors elements. */;
3098 else if (XVECLEN (x, i) < XVECLEN (y, i))
3099 return -1;
3100 else
3101 return 1;
3103 for (j = 0; j < XVECLEN (x, i); j++)
3104 if ((r = loc_cmp (XVECEXP (x, i, j),
3105 XVECEXP (y, i, j))))
3106 return r;
3107 break;
3109 case 'e':
3110 if ((r = loc_cmp (XEXP (x, i), XEXP (y, i))))
3111 return r;
3112 break;
3114 case 'S':
3115 case 's':
3116 if (XSTR (x, i) == XSTR (y, i))
3117 break;
3118 if (!XSTR (x, i))
3119 return -1;
3120 if (!XSTR (y, i))
3121 return 1;
3122 if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0)
3123 break;
3124 else if (r < 0)
3125 return -1;
3126 else
3127 return 1;
3129 case 'u':
3130 /* These are just backpointers, so they don't matter. */
3131 break;
3133 case '0':
3134 case 't':
3135 break;
3137 /* It is believed that rtx's at this level will never
3138 contain anything but integers and other rtx's,
3139 except for within LABEL_REFs and SYMBOL_REFs. */
3140 default:
3141 gcc_unreachable ();
3144 return 0;
3147 #if ENABLE_CHECKING
3148 /* Check the order of entries in one-part variables. */
3150 static int
3151 canonicalize_loc_order_check (void **slot, void *data ATTRIBUTE_UNUSED)
3153 variable var = (variable) *slot;
3154 location_chain node, next;
3156 #ifdef ENABLE_RTL_CHECKING
3157 int i;
3158 for (i = 0; i < var->n_var_parts; i++)
3159 gcc_assert (var->var_part[0].cur_loc == NULL);
3160 gcc_assert (!var->in_changed_variables);
3161 #endif
3163 if (!var->onepart)
3164 return 1;
3166 gcc_assert (var->n_var_parts == 1);
3167 node = var->var_part[0].loc_chain;
3168 gcc_assert (node);
3170 while ((next = node->next))
3172 gcc_assert (loc_cmp (node->loc, next->loc) < 0);
3173 node = next;
3176 return 1;
3178 #endif
3180 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
3181 more likely to be chosen as canonical for an equivalence set.
3182 Ensure less likely values can reach more likely neighbors, making
3183 the connections bidirectional. */
3185 static int
3186 canonicalize_values_mark (void **slot, void *data)
3188 dataflow_set *set = (dataflow_set *)data;
3189 variable var = (variable) *slot;
3190 decl_or_value dv = var->dv;
3191 rtx val;
3192 location_chain node;
3194 if (!dv_is_value_p (dv))
3195 return 1;
3197 gcc_checking_assert (var->n_var_parts == 1);
3199 val = dv_as_value (dv);
3201 for (node = var->var_part[0].loc_chain; node; node = node->next)
3202 if (GET_CODE (node->loc) == VALUE)
3204 if (canon_value_cmp (node->loc, val))
3205 VALUE_RECURSED_INTO (val) = true;
3206 else
3208 decl_or_value odv = dv_from_value (node->loc);
3209 void **oslot = shared_hash_find_slot_noinsert (set->vars, odv);
3211 set_slot_part (set, val, oslot, odv, 0,
3212 node->init, NULL_RTX);
3214 VALUE_RECURSED_INTO (node->loc) = true;
3218 return 1;
3221 /* Remove redundant entries from equivalence lists in onepart
3222 variables, canonicalizing equivalence sets into star shapes. */
3224 static int
3225 canonicalize_values_star (void **slot, void *data)
3227 dataflow_set *set = (dataflow_set *)data;
3228 variable var = (variable) *slot;
3229 decl_or_value dv = var->dv;
3230 location_chain node;
3231 decl_or_value cdv;
3232 rtx val, cval;
3233 void **cslot;
3234 bool has_value;
3235 bool has_marks;
3237 if (!var->onepart)
3238 return 1;
3240 gcc_checking_assert (var->n_var_parts == 1);
3242 if (dv_is_value_p (dv))
3244 cval = dv_as_value (dv);
3245 if (!VALUE_RECURSED_INTO (cval))
3246 return 1;
3247 VALUE_RECURSED_INTO (cval) = false;
3249 else
3250 cval = NULL_RTX;
3252 restart:
3253 val = cval;
3254 has_value = false;
3255 has_marks = false;
3257 gcc_assert (var->n_var_parts == 1);
3259 for (node = var->var_part[0].loc_chain; node; node = node->next)
3260 if (GET_CODE (node->loc) == VALUE)
3262 has_value = true;
3263 if (VALUE_RECURSED_INTO (node->loc))
3264 has_marks = true;
3265 if (canon_value_cmp (node->loc, cval))
3266 cval = node->loc;
3269 if (!has_value)
3270 return 1;
3272 if (cval == val)
3274 if (!has_marks || dv_is_decl_p (dv))
3275 return 1;
3277 /* Keep it marked so that we revisit it, either after visiting a
3278 child node, or after visiting a new parent that might be
3279 found out. */
3280 VALUE_RECURSED_INTO (val) = true;
3282 for (node = var->var_part[0].loc_chain; node; node = node->next)
3283 if (GET_CODE (node->loc) == VALUE
3284 && VALUE_RECURSED_INTO (node->loc))
3286 cval = node->loc;
3287 restart_with_cval:
3288 VALUE_RECURSED_INTO (cval) = false;
3289 dv = dv_from_value (cval);
3290 slot = shared_hash_find_slot_noinsert (set->vars, dv);
3291 if (!slot)
3293 gcc_assert (dv_is_decl_p (var->dv));
3294 /* The canonical value was reset and dropped.
3295 Remove it. */
3296 clobber_variable_part (set, NULL, var->dv, 0, NULL);
3297 return 1;
3299 var = (variable)*slot;
3300 gcc_assert (dv_is_value_p (var->dv));
3301 if (var->n_var_parts == 0)
3302 return 1;
3303 gcc_assert (var->n_var_parts == 1);
3304 goto restart;
3307 VALUE_RECURSED_INTO (val) = false;
3309 return 1;
3312 /* Push values to the canonical one. */
3313 cdv = dv_from_value (cval);
3314 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3316 for (node = var->var_part[0].loc_chain; node; node = node->next)
3317 if (node->loc != cval)
3319 cslot = set_slot_part (set, node->loc, cslot, cdv, 0,
3320 node->init, NULL_RTX);
3321 if (GET_CODE (node->loc) == VALUE)
3323 decl_or_value ndv = dv_from_value (node->loc);
3325 set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX,
3326 NO_INSERT);
3328 if (canon_value_cmp (node->loc, val))
3330 /* If it could have been a local minimum, it's not any more,
3331 since it's now neighbor to cval, so it may have to push
3332 to it. Conversely, if it wouldn't have prevailed over
3333 val, then whatever mark it has is fine: if it was to
3334 push, it will now push to a more canonical node, but if
3335 it wasn't, then it has already pushed any values it might
3336 have to. */
3337 VALUE_RECURSED_INTO (node->loc) = true;
3338 /* Make sure we visit node->loc by ensuring we cval is
3339 visited too. */
3340 VALUE_RECURSED_INTO (cval) = true;
3342 else if (!VALUE_RECURSED_INTO (node->loc))
3343 /* If we have no need to "recurse" into this node, it's
3344 already "canonicalized", so drop the link to the old
3345 parent. */
3346 clobber_variable_part (set, cval, ndv, 0, NULL);
3348 else if (GET_CODE (node->loc) == REG)
3350 attrs list = set->regs[REGNO (node->loc)], *listp;
3352 /* Change an existing attribute referring to dv so that it
3353 refers to cdv, removing any duplicate this might
3354 introduce, and checking that no previous duplicates
3355 existed, all in a single pass. */
3357 while (list)
3359 if (list->offset == 0
3360 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3361 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3362 break;
3364 list = list->next;
3367 gcc_assert (list);
3368 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3370 list->dv = cdv;
3371 for (listp = &list->next; (list = *listp); listp = &list->next)
3373 if (list->offset)
3374 continue;
3376 if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3378 *listp = list->next;
3379 pool_free (attrs_pool, list);
3380 list = *listp;
3381 break;
3384 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv));
3387 else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3389 for (listp = &list->next; (list = *listp); listp = &list->next)
3391 if (list->offset)
3392 continue;
3394 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3396 *listp = list->next;
3397 pool_free (attrs_pool, list);
3398 list = *listp;
3399 break;
3402 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv));
3405 else
3406 gcc_unreachable ();
3408 #if ENABLE_CHECKING
3409 while (list)
3411 if (list->offset == 0
3412 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3413 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3414 gcc_unreachable ();
3416 list = list->next;
3418 #endif
3422 if (val)
3423 set_slot_part (set, val, cslot, cdv, 0,
3424 VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
3426 slot = clobber_slot_part (set, cval, slot, 0, NULL);
3428 /* Variable may have been unshared. */
3429 var = (variable)*slot;
3430 gcc_checking_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval
3431 && var->var_part[0].loc_chain->next == NULL);
3433 if (VALUE_RECURSED_INTO (cval))
3434 goto restart_with_cval;
3436 return 1;
3439 /* Bind one-part variables to the canonical value in an equivalence
3440 set. Not doing this causes dataflow convergence failure in rare
3441 circumstances, see PR42873. Unfortunately we can't do this
3442 efficiently as part of canonicalize_values_star, since we may not
3443 have determined or even seen the canonical value of a set when we
3444 get to a variable that references another member of the set. */
3446 static int
3447 canonicalize_vars_star (void **slot, void *data)
3449 dataflow_set *set = (dataflow_set *)data;
3450 variable var = (variable) *slot;
3451 decl_or_value dv = var->dv;
3452 location_chain node;
3453 rtx cval;
3454 decl_or_value cdv;
3455 void **cslot;
3456 variable cvar;
3457 location_chain cnode;
3459 if (!var->onepart || var->onepart == ONEPART_VALUE)
3460 return 1;
3462 gcc_assert (var->n_var_parts == 1);
3464 node = var->var_part[0].loc_chain;
3466 if (GET_CODE (node->loc) != VALUE)
3467 return 1;
3469 gcc_assert (!node->next);
3470 cval = node->loc;
3472 /* Push values to the canonical one. */
3473 cdv = dv_from_value (cval);
3474 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3475 if (!cslot)
3476 return 1;
3477 cvar = (variable)*cslot;
3478 gcc_assert (cvar->n_var_parts == 1);
3480 cnode = cvar->var_part[0].loc_chain;
3482 /* CVAL is canonical if its value list contains non-VALUEs or VALUEs
3483 that are not “more canonical” than it. */
3484 if (GET_CODE (cnode->loc) != VALUE
3485 || !canon_value_cmp (cnode->loc, cval))
3486 return 1;
3488 /* CVAL was found to be non-canonical. Change the variable to point
3489 to the canonical VALUE. */
3490 gcc_assert (!cnode->next);
3491 cval = cnode->loc;
3493 slot = set_slot_part (set, cval, slot, dv, 0,
3494 node->init, node->set_src);
3495 clobber_slot_part (set, cval, slot, 0, node->set_src);
3497 return 1;
3500 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
3501 corresponding entry in DSM->src. Multi-part variables are combined
3502 with variable_union, whereas onepart dvs are combined with
3503 intersection. */
3505 static int
3506 variable_merge_over_cur (variable s1var, struct dfset_merge *dsm)
3508 dataflow_set *dst = dsm->dst;
3509 void **dstslot;
3510 variable s2var, dvar = NULL;
3511 decl_or_value dv = s1var->dv;
3512 onepart_enum_t onepart = s1var->onepart;
3513 rtx val;
3514 hashval_t dvhash;
3515 location_chain node, *nodep;
3517 /* If the incoming onepart variable has an empty location list, then
3518 the intersection will be just as empty. For other variables,
3519 it's always union. */
3520 gcc_checking_assert (s1var->n_var_parts
3521 && s1var->var_part[0].loc_chain);
3523 if (!onepart)
3524 return variable_union (s1var, dst);
3526 gcc_checking_assert (s1var->n_var_parts == 1);
3528 dvhash = dv_htab_hash (dv);
3529 if (dv_is_value_p (dv))
3530 val = dv_as_value (dv);
3531 else
3532 val = NULL;
3534 s2var = shared_hash_find_1 (dsm->src->vars, dv, dvhash);
3535 if (!s2var)
3537 dst_can_be_shared = false;
3538 return 1;
3541 dsm->src_onepart_cnt--;
3542 gcc_assert (s2var->var_part[0].loc_chain
3543 && s2var->onepart == onepart
3544 && s2var->n_var_parts == 1);
3546 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3547 if (dstslot)
3549 dvar = (variable)*dstslot;
3550 gcc_assert (dvar->refcount == 1
3551 && dvar->onepart == onepart
3552 && dvar->n_var_parts == 1);
3553 nodep = &dvar->var_part[0].loc_chain;
3555 else
3557 nodep = &node;
3558 node = NULL;
3561 if (!dstslot && !onepart_variable_different_p (s1var, s2var))
3563 dstslot = shared_hash_find_slot_unshare_1 (&dst->vars, dv,
3564 dvhash, INSERT);
3565 *dstslot = dvar = s2var;
3566 dvar->refcount++;
3568 else
3570 dst_can_be_shared = false;
3572 intersect_loc_chains (val, nodep, dsm,
3573 s1var->var_part[0].loc_chain, s2var);
3575 if (!dstslot)
3577 if (node)
3579 dvar = (variable) pool_alloc (onepart_pool (onepart));
3580 dvar->dv = dv;
3581 dvar->refcount = 1;
3582 dvar->n_var_parts = 1;
3583 dvar->onepart = onepart;
3584 dvar->in_changed_variables = false;
3585 dvar->var_part[0].loc_chain = node;
3586 dvar->var_part[0].cur_loc = NULL;
3587 if (onepart)
3588 VAR_LOC_1PAUX (dvar) = NULL;
3589 else
3590 VAR_PART_OFFSET (dvar, 0) = 0;
3592 dstslot
3593 = shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash,
3594 INSERT);
3595 gcc_assert (!*dstslot);
3596 *dstslot = dvar;
3598 else
3599 return 1;
3603 nodep = &dvar->var_part[0].loc_chain;
3604 while ((node = *nodep))
3606 location_chain *nextp = &node->next;
3608 if (GET_CODE (node->loc) == REG)
3610 attrs list;
3612 for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
3613 if (GET_MODE (node->loc) == GET_MODE (list->loc)
3614 && dv_is_value_p (list->dv))
3615 break;
3617 if (!list)
3618 attrs_list_insert (&dst->regs[REGNO (node->loc)],
3619 dv, 0, node->loc);
3620 /* If this value became canonical for another value that had
3621 this register, we want to leave it alone. */
3622 else if (dv_as_value (list->dv) != val)
3624 dstslot = set_slot_part (dst, dv_as_value (list->dv),
3625 dstslot, dv, 0,
3626 node->init, NULL_RTX);
3627 dstslot = delete_slot_part (dst, node->loc, dstslot, 0);
3629 /* Since nextp points into the removed node, we can't
3630 use it. The pointer to the next node moved to nodep.
3631 However, if the variable we're walking is unshared
3632 during our walk, we'll keep walking the location list
3633 of the previously-shared variable, in which case the
3634 node won't have been removed, and we'll want to skip
3635 it. That's why we test *nodep here. */
3636 if (*nodep != node)
3637 nextp = nodep;
3640 else
3641 /* Canonicalization puts registers first, so we don't have to
3642 walk it all. */
3643 break;
3644 nodep = nextp;
3647 if (dvar != (variable)*dstslot)
3648 dvar = (variable)*dstslot;
3649 nodep = &dvar->var_part[0].loc_chain;
3651 if (val)
3653 /* Mark all referenced nodes for canonicalization, and make sure
3654 we have mutual equivalence links. */
3655 VALUE_RECURSED_INTO (val) = true;
3656 for (node = *nodep; node; node = node->next)
3657 if (GET_CODE (node->loc) == VALUE)
3659 VALUE_RECURSED_INTO (node->loc) = true;
3660 set_variable_part (dst, val, dv_from_value (node->loc), 0,
3661 node->init, NULL, INSERT);
3664 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3665 gcc_assert (*dstslot == dvar);
3666 canonicalize_values_star (dstslot, dst);
3667 gcc_checking_assert (dstslot
3668 == shared_hash_find_slot_noinsert_1 (dst->vars,
3669 dv, dvhash));
3670 dvar = (variable)*dstslot;
3672 else
3674 bool has_value = false, has_other = false;
3676 /* If we have one value and anything else, we're going to
3677 canonicalize this, so make sure all values have an entry in
3678 the table and are marked for canonicalization. */
3679 for (node = *nodep; node; node = node->next)
3681 if (GET_CODE (node->loc) == VALUE)
3683 /* If this was marked during register canonicalization,
3684 we know we have to canonicalize values. */
3685 if (has_value)
3686 has_other = true;
3687 has_value = true;
3688 if (has_other)
3689 break;
3691 else
3693 has_other = true;
3694 if (has_value)
3695 break;
3699 if (has_value && has_other)
3701 for (node = *nodep; node; node = node->next)
3703 if (GET_CODE (node->loc) == VALUE)
3705 decl_or_value dv = dv_from_value (node->loc);
3706 void **slot = NULL;
3708 if (shared_hash_shared (dst->vars))
3709 slot = shared_hash_find_slot_noinsert (dst->vars, dv);
3710 if (!slot)
3711 slot = shared_hash_find_slot_unshare (&dst->vars, dv,
3712 INSERT);
3713 if (!*slot)
3715 variable var = (variable) pool_alloc (onepart_pool
3716 (ONEPART_VALUE));
3717 var->dv = dv;
3718 var->refcount = 1;
3719 var->n_var_parts = 1;
3720 var->onepart = ONEPART_VALUE;
3721 var->in_changed_variables = false;
3722 var->var_part[0].loc_chain = NULL;
3723 var->var_part[0].cur_loc = NULL;
3724 VAR_LOC_1PAUX (var) = NULL;
3725 *slot = var;
3728 VALUE_RECURSED_INTO (node->loc) = true;
3732 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3733 gcc_assert (*dstslot == dvar);
3734 canonicalize_values_star (dstslot, dst);
3735 gcc_checking_assert (dstslot
3736 == shared_hash_find_slot_noinsert_1 (dst->vars,
3737 dv, dvhash));
3738 dvar = (variable)*dstslot;
3742 if (!onepart_variable_different_p (dvar, s2var))
3744 variable_htab_free (dvar);
3745 *dstslot = dvar = s2var;
3746 dvar->refcount++;
3748 else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var))
3750 variable_htab_free (dvar);
3751 *dstslot = dvar = s1var;
3752 dvar->refcount++;
3753 dst_can_be_shared = false;
3755 else
3756 dst_can_be_shared = false;
3758 return 1;
3761 /* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
3762 multi-part variable. Unions of multi-part variables and
3763 intersections of one-part ones will be handled in
3764 variable_merge_over_cur(). */
3766 static int
3767 variable_merge_over_src (variable s2var, struct dfset_merge *dsm)
3769 dataflow_set *dst = dsm->dst;
3770 decl_or_value dv = s2var->dv;
3772 if (!s2var->onepart)
3774 void **dstp = shared_hash_find_slot (dst->vars, dv);
3775 *dstp = s2var;
3776 s2var->refcount++;
3777 return 1;
3780 dsm->src_onepart_cnt++;
3781 return 1;
3784 /* Combine dataflow set information from SRC2 into DST, using PDST
3785 to carry over information across passes. */
3787 static void
3788 dataflow_set_merge (dataflow_set *dst, dataflow_set *src2)
3790 dataflow_set cur = *dst;
3791 dataflow_set *src1 = &cur;
3792 struct dfset_merge dsm;
3793 int i;
3794 size_t src1_elems, src2_elems;
3795 htab_iterator hi;
3796 variable var;
3798 src1_elems = htab_elements (shared_hash_htab (src1->vars));
3799 src2_elems = htab_elements (shared_hash_htab (src2->vars));
3800 dataflow_set_init (dst);
3801 dst->stack_adjust = cur.stack_adjust;
3802 shared_hash_destroy (dst->vars);
3803 dst->vars = (shared_hash) pool_alloc (shared_hash_pool);
3804 dst->vars->refcount = 1;
3805 dst->vars->htab
3806 = htab_create (MAX (src1_elems, src2_elems), variable_htab_hash,
3807 variable_htab_eq, variable_htab_free);
3809 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3810 attrs_list_mpdv_union (&dst->regs[i], src1->regs[i], src2->regs[i]);
3812 dsm.dst = dst;
3813 dsm.src = src2;
3814 dsm.cur = src1;
3815 dsm.src_onepart_cnt = 0;
3817 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (dsm.src->vars), var, variable, hi)
3818 variable_merge_over_src (var, &dsm);
3819 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (dsm.cur->vars), var, variable, hi)
3820 variable_merge_over_cur (var, &dsm);
3822 if (dsm.src_onepart_cnt)
3823 dst_can_be_shared = false;
3825 dataflow_set_destroy (src1);
3828 /* Mark register equivalences. */
3830 static void
3831 dataflow_set_equiv_regs (dataflow_set *set)
3833 int i;
3834 attrs list, *listp;
3836 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3838 rtx canon[NUM_MACHINE_MODES];
3840 /* If the list is empty or one entry, no need to canonicalize
3841 anything. */
3842 if (set->regs[i] == NULL || set->regs[i]->next == NULL)
3843 continue;
3845 memset (canon, 0, sizeof (canon));
3847 for (list = set->regs[i]; list; list = list->next)
3848 if (list->offset == 0 && dv_is_value_p (list->dv))
3850 rtx val = dv_as_value (list->dv);
3851 rtx *cvalp = &canon[(int)GET_MODE (val)];
3852 rtx cval = *cvalp;
3854 if (canon_value_cmp (val, cval))
3855 *cvalp = val;
3858 for (list = set->regs[i]; list; list = list->next)
3859 if (list->offset == 0 && dv_onepart_p (list->dv))
3861 rtx cval = canon[(int)GET_MODE (list->loc)];
3863 if (!cval)
3864 continue;
3866 if (dv_is_value_p (list->dv))
3868 rtx val = dv_as_value (list->dv);
3870 if (val == cval)
3871 continue;
3873 VALUE_RECURSED_INTO (val) = true;
3874 set_variable_part (set, val, dv_from_value (cval), 0,
3875 VAR_INIT_STATUS_INITIALIZED,
3876 NULL, NO_INSERT);
3879 VALUE_RECURSED_INTO (cval) = true;
3880 set_variable_part (set, cval, list->dv, 0,
3881 VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT);
3884 for (listp = &set->regs[i]; (list = *listp);
3885 listp = list ? &list->next : listp)
3886 if (list->offset == 0 && dv_onepart_p (list->dv))
3888 rtx cval = canon[(int)GET_MODE (list->loc)];
3889 void **slot;
3891 if (!cval)
3892 continue;
3894 if (dv_is_value_p (list->dv))
3896 rtx val = dv_as_value (list->dv);
3897 if (!VALUE_RECURSED_INTO (val))
3898 continue;
3901 slot = shared_hash_find_slot_noinsert (set->vars, list->dv);
3902 canonicalize_values_star (slot, set);
3903 if (*listp != list)
3904 list = NULL;
3909 /* Remove any redundant values in the location list of VAR, which must
3910 be unshared and 1-part. */
3912 static void
3913 remove_duplicate_values (variable var)
3915 location_chain node, *nodep;
3917 gcc_assert (var->onepart);
3918 gcc_assert (var->n_var_parts == 1);
3919 gcc_assert (var->refcount == 1);
3921 for (nodep = &var->var_part[0].loc_chain; (node = *nodep); )
3923 if (GET_CODE (node->loc) == VALUE)
3925 if (VALUE_RECURSED_INTO (node->loc))
3927 /* Remove duplicate value node. */
3928 *nodep = node->next;
3929 pool_free (loc_chain_pool, node);
3930 continue;
3932 else
3933 VALUE_RECURSED_INTO (node->loc) = true;
3935 nodep = &node->next;
3938 for (node = var->var_part[0].loc_chain; node; node = node->next)
3939 if (GET_CODE (node->loc) == VALUE)
3941 gcc_assert (VALUE_RECURSED_INTO (node->loc));
3942 VALUE_RECURSED_INTO (node->loc) = false;
3947 /* Hash table iteration argument passed to variable_post_merge. */
3948 struct dfset_post_merge
3950 /* The new input set for the current block. */
3951 dataflow_set *set;
3952 /* Pointer to the permanent input set for the current block, or
3953 NULL. */
3954 dataflow_set **permp;
3957 /* Create values for incoming expressions associated with one-part
3958 variables that don't have value numbers for them. */
3960 static int
3961 variable_post_merge_new_vals (void **slot, void *info)
3963 struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
3964 dataflow_set *set = dfpm->set;
3965 variable var = (variable)*slot;
3966 location_chain node;
3968 if (!var->onepart || !var->n_var_parts)
3969 return 1;
3971 gcc_assert (var->n_var_parts == 1);
3973 if (dv_is_decl_p (var->dv))
3975 bool check_dupes = false;
3977 restart:
3978 for (node = var->var_part[0].loc_chain; node; node = node->next)
3980 if (GET_CODE (node->loc) == VALUE)
3981 gcc_assert (!VALUE_RECURSED_INTO (node->loc));
3982 else if (GET_CODE (node->loc) == REG)
3984 attrs att, *attp, *curp = NULL;
3986 if (var->refcount != 1)
3988 slot = unshare_variable (set, slot, var,
3989 VAR_INIT_STATUS_INITIALIZED);
3990 var = (variable)*slot;
3991 goto restart;
3994 for (attp = &set->regs[REGNO (node->loc)]; (att = *attp);
3995 attp = &att->next)
3996 if (att->offset == 0
3997 && GET_MODE (att->loc) == GET_MODE (node->loc))
3999 if (dv_is_value_p (att->dv))
4001 rtx cval = dv_as_value (att->dv);
4002 node->loc = cval;
4003 check_dupes = true;
4004 break;
4006 else if (dv_as_opaque (att->dv) == dv_as_opaque (var->dv))
4007 curp = attp;
4010 if (!curp)
4012 curp = attp;
4013 while (*curp)
4014 if ((*curp)->offset == 0
4015 && GET_MODE ((*curp)->loc) == GET_MODE (node->loc)
4016 && dv_as_opaque ((*curp)->dv) == dv_as_opaque (var->dv))
4017 break;
4018 else
4019 curp = &(*curp)->next;
4020 gcc_assert (*curp);
4023 if (!att)
4025 decl_or_value cdv;
4026 rtx cval;
4028 if (!*dfpm->permp)
4030 *dfpm->permp = XNEW (dataflow_set);
4031 dataflow_set_init (*dfpm->permp);
4034 for (att = (*dfpm->permp)->regs[REGNO (node->loc)];
4035 att; att = att->next)
4036 if (GET_MODE (att->loc) == GET_MODE (node->loc))
4038 gcc_assert (att->offset == 0
4039 && dv_is_value_p (att->dv));
4040 val_reset (set, att->dv);
4041 break;
4044 if (att)
4046 cdv = att->dv;
4047 cval = dv_as_value (cdv);
4049 else
4051 /* Create a unique value to hold this register,
4052 that ought to be found and reused in
4053 subsequent rounds. */
4054 cselib_val *v;
4055 gcc_assert (!cselib_lookup (node->loc,
4056 GET_MODE (node->loc), 0,
4057 VOIDmode));
4058 v = cselib_lookup (node->loc, GET_MODE (node->loc), 1,
4059 VOIDmode);
4060 cselib_preserve_value (v);
4061 cselib_invalidate_rtx (node->loc);
4062 cval = v->val_rtx;
4063 cdv = dv_from_value (cval);
4064 if (dump_file)
4065 fprintf (dump_file,
4066 "Created new value %u:%u for reg %i\n",
4067 v->uid, v->hash, REGNO (node->loc));
4070 var_reg_decl_set (*dfpm->permp, node->loc,
4071 VAR_INIT_STATUS_INITIALIZED,
4072 cdv, 0, NULL, INSERT);
4074 node->loc = cval;
4075 check_dupes = true;
4078 /* Remove attribute referring to the decl, which now
4079 uses the value for the register, already existing or
4080 to be added when we bring perm in. */
4081 att = *curp;
4082 *curp = att->next;
4083 pool_free (attrs_pool, att);
4087 if (check_dupes)
4088 remove_duplicate_values (var);
4091 return 1;
4094 /* Reset values in the permanent set that are not associated with the
4095 chosen expression. */
4097 static int
4098 variable_post_merge_perm_vals (void **pslot, void *info)
4100 struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
4101 dataflow_set *set = dfpm->set;
4102 variable pvar = (variable)*pslot, var;
4103 location_chain pnode;
4104 decl_or_value dv;
4105 attrs att;
4107 gcc_assert (dv_is_value_p (pvar->dv)
4108 && pvar->n_var_parts == 1);
4109 pnode = pvar->var_part[0].loc_chain;
4110 gcc_assert (pnode
4111 && !pnode->next
4112 && REG_P (pnode->loc));
4114 dv = pvar->dv;
4116 var = shared_hash_find (set->vars, dv);
4117 if (var)
4119 /* Although variable_post_merge_new_vals may have made decls
4120 non-star-canonical, values that pre-existed in canonical form
4121 remain canonical, and newly-created values reference a single
4122 REG, so they are canonical as well. Since VAR has the
4123 location list for a VALUE, using find_loc_in_1pdv for it is
4124 fine, since VALUEs don't map back to DECLs. */
4125 if (find_loc_in_1pdv (pnode->loc, var, shared_hash_htab (set->vars)))
4126 return 1;
4127 val_reset (set, dv);
4130 for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next)
4131 if (att->offset == 0
4132 && GET_MODE (att->loc) == GET_MODE (pnode->loc)
4133 && dv_is_value_p (att->dv))
4134 break;
4136 /* If there is a value associated with this register already, create
4137 an equivalence. */
4138 if (att && dv_as_value (att->dv) != dv_as_value (dv))
4140 rtx cval = dv_as_value (att->dv);
4141 set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT);
4142 set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init,
4143 NULL, INSERT);
4145 else if (!att)
4147 attrs_list_insert (&set->regs[REGNO (pnode->loc)],
4148 dv, 0, pnode->loc);
4149 variable_union (pvar, set);
4152 return 1;
4155 /* Just checking stuff and registering register attributes for
4156 now. */
4158 static void
4159 dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp)
4161 struct dfset_post_merge dfpm;
4163 dfpm.set = set;
4164 dfpm.permp = permp;
4166 htab_traverse (shared_hash_htab (set->vars), variable_post_merge_new_vals,
4167 &dfpm);
4168 if (*permp)
4169 htab_traverse (shared_hash_htab ((*permp)->vars),
4170 variable_post_merge_perm_vals, &dfpm);
4171 htab_traverse (shared_hash_htab (set->vars), canonicalize_values_star, set);
4172 htab_traverse (shared_hash_htab (set->vars), canonicalize_vars_star, set);
4175 /* Return a node whose loc is a MEM that refers to EXPR in the
4176 location list of a one-part variable or value VAR, or in that of
4177 any values recursively mentioned in the location lists. */
4179 static location_chain
4180 find_mem_expr_in_1pdv (tree expr, rtx val, htab_t vars)
4182 location_chain node;
4183 decl_or_value dv;
4184 variable var;
4185 location_chain where = NULL;
4187 if (!val)
4188 return NULL;
4190 gcc_assert (GET_CODE (val) == VALUE
4191 && !VALUE_RECURSED_INTO (val));
4193 dv = dv_from_value (val);
4194 var = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
4196 if (!var)
4197 return NULL;
4199 gcc_assert (var->onepart);
4201 if (!var->n_var_parts)
4202 return NULL;
4204 VALUE_RECURSED_INTO (val) = true;
4206 for (node = var->var_part[0].loc_chain; node; node = node->next)
4207 if (MEM_P (node->loc)
4208 && MEM_EXPR (node->loc) == expr
4209 && INT_MEM_OFFSET (node->loc) == 0)
4211 where = node;
4212 break;
4214 else if (GET_CODE (node->loc) == VALUE
4215 && !VALUE_RECURSED_INTO (node->loc)
4216 && (where = find_mem_expr_in_1pdv (expr, node->loc, vars)))
4217 break;
4219 VALUE_RECURSED_INTO (val) = false;
4221 return where;
4224 /* Return TRUE if the value of MEM may vary across a call. */
4226 static bool
4227 mem_dies_at_call (rtx mem)
4229 tree expr = MEM_EXPR (mem);
4230 tree decl;
4232 if (!expr)
4233 return true;
4235 decl = get_base_address (expr);
4237 if (!decl)
4238 return true;
4240 if (!DECL_P (decl))
4241 return true;
4243 return (may_be_aliased (decl)
4244 || (!TREE_READONLY (decl) && is_global_var (decl)));
4247 /* Remove all MEMs from the location list of a hash table entry for a
4248 one-part variable, except those whose MEM attributes map back to
4249 the variable itself, directly or within a VALUE. */
4251 static int
4252 dataflow_set_preserve_mem_locs (void **slot, void *data)
4254 dataflow_set *set = (dataflow_set *) data;
4255 variable var = (variable) *slot;
4257 if (var->onepart == ONEPART_VDECL || var->onepart == ONEPART_DEXPR)
4259 tree decl = dv_as_decl (var->dv);
4260 location_chain loc, *locp;
4261 bool changed = false;
4263 if (!var->n_var_parts)
4264 return 1;
4266 gcc_assert (var->n_var_parts == 1);
4268 if (shared_var_p (var, set->vars))
4270 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4272 /* We want to remove dying MEMs that doesn't refer to DECL. */
4273 if (GET_CODE (loc->loc) == MEM
4274 && (MEM_EXPR (loc->loc) != decl
4275 || INT_MEM_OFFSET (loc->loc) != 0)
4276 && !mem_dies_at_call (loc->loc))
4277 break;
4278 /* We want to move here MEMs that do refer to DECL. */
4279 else if (GET_CODE (loc->loc) == VALUE
4280 && find_mem_expr_in_1pdv (decl, loc->loc,
4281 shared_hash_htab (set->vars)))
4282 break;
4285 if (!loc)
4286 return 1;
4288 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4289 var = (variable)*slot;
4290 gcc_assert (var->n_var_parts == 1);
4293 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4294 loc; loc = *locp)
4296 rtx old_loc = loc->loc;
4297 if (GET_CODE (old_loc) == VALUE)
4299 location_chain mem_node
4300 = find_mem_expr_in_1pdv (decl, loc->loc,
4301 shared_hash_htab (set->vars));
4303 /* ??? This picks up only one out of multiple MEMs that
4304 refer to the same variable. Do we ever need to be
4305 concerned about dealing with more than one, or, given
4306 that they should all map to the same variable
4307 location, their addresses will have been merged and
4308 they will be regarded as equivalent? */
4309 if (mem_node)
4311 loc->loc = mem_node->loc;
4312 loc->set_src = mem_node->set_src;
4313 loc->init = MIN (loc->init, mem_node->init);
4317 if (GET_CODE (loc->loc) != MEM
4318 || (MEM_EXPR (loc->loc) == decl
4319 && INT_MEM_OFFSET (loc->loc) == 0)
4320 || !mem_dies_at_call (loc->loc))
4322 if (old_loc != loc->loc && emit_notes)
4324 if (old_loc == var->var_part[0].cur_loc)
4326 changed = true;
4327 var->var_part[0].cur_loc = NULL;
4330 locp = &loc->next;
4331 continue;
4334 if (emit_notes)
4336 if (old_loc == var->var_part[0].cur_loc)
4338 changed = true;
4339 var->var_part[0].cur_loc = NULL;
4342 *locp = loc->next;
4343 pool_free (loc_chain_pool, loc);
4346 if (!var->var_part[0].loc_chain)
4348 var->n_var_parts--;
4349 changed = true;
4351 if (changed)
4352 variable_was_changed (var, set);
4355 return 1;
4358 /* Remove all MEMs from the location list of a hash table entry for a
4359 value. */
4361 static int
4362 dataflow_set_remove_mem_locs (void **slot, void *data)
4364 dataflow_set *set = (dataflow_set *) data;
4365 variable var = (variable) *slot;
4367 if (var->onepart == ONEPART_VALUE)
4369 location_chain loc, *locp;
4370 bool changed = false;
4371 rtx cur_loc;
4373 gcc_assert (var->n_var_parts == 1);
4375 if (shared_var_p (var, set->vars))
4377 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4378 if (GET_CODE (loc->loc) == MEM
4379 && mem_dies_at_call (loc->loc))
4380 break;
4382 if (!loc)
4383 return 1;
4385 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4386 var = (variable)*slot;
4387 gcc_assert (var->n_var_parts == 1);
4390 if (VAR_LOC_1PAUX (var))
4391 cur_loc = VAR_LOC_FROM (var);
4392 else
4393 cur_loc = var->var_part[0].cur_loc;
4395 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4396 loc; loc = *locp)
4398 if (GET_CODE (loc->loc) != MEM
4399 || !mem_dies_at_call (loc->loc))
4401 locp = &loc->next;
4402 continue;
4405 *locp = loc->next;
4406 /* If we have deleted the location which was last emitted
4407 we have to emit new location so add the variable to set
4408 of changed variables. */
4409 if (cur_loc == loc->loc)
4411 changed = true;
4412 var->var_part[0].cur_loc = NULL;
4413 if (VAR_LOC_1PAUX (var))
4414 VAR_LOC_FROM (var) = NULL;
4416 pool_free (loc_chain_pool, loc);
4419 if (!var->var_part[0].loc_chain)
4421 var->n_var_parts--;
4422 changed = true;
4424 if (changed)
4425 variable_was_changed (var, set);
4428 return 1;
4431 /* Remove all variable-location information about call-clobbered
4432 registers, as well as associations between MEMs and VALUEs. */
4434 static void
4435 dataflow_set_clear_at_call (dataflow_set *set)
4437 int r;
4439 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
4440 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, r))
4441 var_regno_delete (set, r);
4443 if (MAY_HAVE_DEBUG_INSNS)
4445 set->traversed_vars = set->vars;
4446 htab_traverse (shared_hash_htab (set->vars),
4447 dataflow_set_preserve_mem_locs, set);
4448 set->traversed_vars = set->vars;
4449 htab_traverse (shared_hash_htab (set->vars), dataflow_set_remove_mem_locs,
4450 set);
4451 set->traversed_vars = NULL;
4455 static bool
4456 variable_part_different_p (variable_part *vp1, variable_part *vp2)
4458 location_chain lc1, lc2;
4460 for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next)
4462 for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next)
4464 if (REG_P (lc1->loc) && REG_P (lc2->loc))
4466 if (REGNO (lc1->loc) == REGNO (lc2->loc))
4467 break;
4469 if (rtx_equal_p (lc1->loc, lc2->loc))
4470 break;
4472 if (!lc2)
4473 return true;
4475 return false;
4478 /* Return true if one-part variables VAR1 and VAR2 are different.
4479 They must be in canonical order. */
4481 static bool
4482 onepart_variable_different_p (variable var1, variable var2)
4484 location_chain lc1, lc2;
4486 if (var1 == var2)
4487 return false;
4489 gcc_assert (var1->n_var_parts == 1
4490 && var2->n_var_parts == 1);
4492 lc1 = var1->var_part[0].loc_chain;
4493 lc2 = var2->var_part[0].loc_chain;
4495 gcc_assert (lc1 && lc2);
4497 while (lc1 && lc2)
4499 if (loc_cmp (lc1->loc, lc2->loc))
4500 return true;
4501 lc1 = lc1->next;
4502 lc2 = lc2->next;
4505 return lc1 != lc2;
4508 /* Return true if variables VAR1 and VAR2 are different. */
4510 static bool
4511 variable_different_p (variable var1, variable var2)
4513 int i;
4515 if (var1 == var2)
4516 return false;
4518 if (var1->onepart != var2->onepart)
4519 return true;
4521 if (var1->n_var_parts != var2->n_var_parts)
4522 return true;
4524 if (var1->onepart && var1->n_var_parts)
4526 gcc_checking_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv)
4527 && var1->n_var_parts == 1);
4528 /* One-part values have locations in a canonical order. */
4529 return onepart_variable_different_p (var1, var2);
4532 for (i = 0; i < var1->n_var_parts; i++)
4534 if (VAR_PART_OFFSET (var1, i) != VAR_PART_OFFSET (var2, i))
4535 return true;
4536 if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i]))
4537 return true;
4538 if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i]))
4539 return true;
4541 return false;
4544 /* Return true if dataflow sets OLD_SET and NEW_SET differ. */
4546 static bool
4547 dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
4549 htab_iterator hi;
4550 variable var1;
4552 if (old_set->vars == new_set->vars)
4553 return false;
4555 if (htab_elements (shared_hash_htab (old_set->vars))
4556 != htab_elements (shared_hash_htab (new_set->vars)))
4557 return true;
4559 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (old_set->vars), var1, variable, hi)
4561 htab_t htab = shared_hash_htab (new_set->vars);
4562 variable var2 = (variable) htab_find_with_hash (htab, var1->dv,
4563 dv_htab_hash (var1->dv));
4564 if (!var2)
4566 if (dump_file && (dump_flags & TDF_DETAILS))
4568 fprintf (dump_file, "dataflow difference found: removal of:\n");
4569 dump_var (var1);
4571 return true;
4574 if (variable_different_p (var1, var2))
4576 if (dump_file && (dump_flags & TDF_DETAILS))
4578 fprintf (dump_file, "dataflow difference found: "
4579 "old and new follow:\n");
4580 dump_var (var1);
4581 dump_var (var2);
4583 return true;
4587 /* No need to traverse the second hashtab, if both have the same number
4588 of elements and the second one had all entries found in the first one,
4589 then it can't have any extra entries. */
4590 return false;
4593 /* Free the contents of dataflow set SET. */
4595 static void
4596 dataflow_set_destroy (dataflow_set *set)
4598 int i;
4600 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4601 attrs_list_clear (&set->regs[i]);
4603 shared_hash_destroy (set->vars);
4604 set->vars = NULL;
4607 /* Return true if RTL X contains a SYMBOL_REF. */
4609 static bool
4610 contains_symbol_ref (rtx x)
4612 const char *fmt;
4613 RTX_CODE code;
4614 int i;
4616 if (!x)
4617 return false;
4619 code = GET_CODE (x);
4620 if (code == SYMBOL_REF)
4621 return true;
4623 fmt = GET_RTX_FORMAT (code);
4624 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4626 if (fmt[i] == 'e')
4628 if (contains_symbol_ref (XEXP (x, i)))
4629 return true;
4631 else if (fmt[i] == 'E')
4633 int j;
4634 for (j = 0; j < XVECLEN (x, i); j++)
4635 if (contains_symbol_ref (XVECEXP (x, i, j)))
4636 return true;
4640 return false;
4643 /* Shall EXPR be tracked? */
4645 static bool
4646 track_expr_p (tree expr, bool need_rtl)
4648 rtx decl_rtl;
4649 tree realdecl;
4651 if (TREE_CODE (expr) == DEBUG_EXPR_DECL)
4652 return DECL_RTL_SET_P (expr);
4654 /* If EXPR is not a parameter or a variable do not track it. */
4655 if (TREE_CODE (expr) != VAR_DECL && TREE_CODE (expr) != PARM_DECL)
4656 return 0;
4658 /* It also must have a name... */
4659 if (!DECL_NAME (expr) && need_rtl)
4660 return 0;
4662 /* ... and a RTL assigned to it. */
4663 decl_rtl = DECL_RTL_IF_SET (expr);
4664 if (!decl_rtl && need_rtl)
4665 return 0;
4667 /* If this expression is really a debug alias of some other declaration, we
4668 don't need to track this expression if the ultimate declaration is
4669 ignored. */
4670 realdecl = expr;
4671 if (DECL_DEBUG_EXPR_IS_FROM (realdecl))
4673 realdecl = DECL_DEBUG_EXPR (realdecl);
4674 if (realdecl == NULL_TREE)
4675 realdecl = expr;
4676 else if (!DECL_P (realdecl))
4678 if (handled_component_p (realdecl))
4680 HOST_WIDE_INT bitsize, bitpos, maxsize;
4681 tree innerdecl
4682 = get_ref_base_and_extent (realdecl, &bitpos, &bitsize,
4683 &maxsize);
4684 if (!DECL_P (innerdecl)
4685 || DECL_IGNORED_P (innerdecl)
4686 || TREE_STATIC (innerdecl)
4687 || bitsize <= 0
4688 || bitpos + bitsize > 256
4689 || bitsize != maxsize)
4690 return 0;
4691 else
4692 realdecl = expr;
4694 else
4695 return 0;
4699 /* Do not track EXPR if REALDECL it should be ignored for debugging
4700 purposes. */
4701 if (DECL_IGNORED_P (realdecl))
4702 return 0;
4704 /* Do not track global variables until we are able to emit correct location
4705 list for them. */
4706 if (TREE_STATIC (realdecl))
4707 return 0;
4709 /* When the EXPR is a DECL for alias of some variable (see example)
4710 the TREE_STATIC flag is not used. Disable tracking all DECLs whose
4711 DECL_RTL contains SYMBOL_REF.
4713 Example:
4714 extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
4715 char **_dl_argv;
4717 if (decl_rtl && MEM_P (decl_rtl)
4718 && contains_symbol_ref (XEXP (decl_rtl, 0)))
4719 return 0;
4721 /* If RTX is a memory it should not be very large (because it would be
4722 an array or struct). */
4723 if (decl_rtl && MEM_P (decl_rtl))
4725 /* Do not track structures and arrays. */
4726 if (GET_MODE (decl_rtl) == BLKmode
4727 || AGGREGATE_TYPE_P (TREE_TYPE (realdecl)))
4728 return 0;
4729 if (MEM_SIZE_KNOWN_P (decl_rtl)
4730 && MEM_SIZE (decl_rtl) > MAX_VAR_PARTS)
4731 return 0;
4734 DECL_CHANGED (expr) = 0;
4735 DECL_CHANGED (realdecl) = 0;
4736 return 1;
4739 /* Determine whether a given LOC refers to the same variable part as
4740 EXPR+OFFSET. */
4742 static bool
4743 same_variable_part_p (rtx loc, tree expr, HOST_WIDE_INT offset)
4745 tree expr2;
4746 HOST_WIDE_INT offset2;
4748 if (! DECL_P (expr))
4749 return false;
4751 if (REG_P (loc))
4753 expr2 = REG_EXPR (loc);
4754 offset2 = REG_OFFSET (loc);
4756 else if (MEM_P (loc))
4758 expr2 = MEM_EXPR (loc);
4759 offset2 = INT_MEM_OFFSET (loc);
4761 else
4762 return false;
4764 if (! expr2 || ! DECL_P (expr2))
4765 return false;
4767 expr = var_debug_decl (expr);
4768 expr2 = var_debug_decl (expr2);
4770 return (expr == expr2 && offset == offset2);
4773 /* LOC is a REG or MEM that we would like to track if possible.
4774 If EXPR is null, we don't know what expression LOC refers to,
4775 otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
4776 LOC is an lvalue register.
4778 Return true if EXPR is nonnull and if LOC, or some lowpart of it,
4779 is something we can track. When returning true, store the mode of
4780 the lowpart we can track in *MODE_OUT (if nonnull) and its offset
4781 from EXPR in *OFFSET_OUT (if nonnull). */
4783 static bool
4784 track_loc_p (rtx loc, tree expr, HOST_WIDE_INT offset, bool store_reg_p,
4785 enum machine_mode *mode_out, HOST_WIDE_INT *offset_out)
4787 enum machine_mode mode;
4789 if (expr == NULL || !track_expr_p (expr, true))
4790 return false;
4792 /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
4793 whole subreg, but only the old inner part is really relevant. */
4794 mode = GET_MODE (loc);
4795 if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc)))
4797 enum machine_mode pseudo_mode;
4799 pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc));
4800 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (pseudo_mode))
4802 offset += byte_lowpart_offset (pseudo_mode, mode);
4803 mode = pseudo_mode;
4807 /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
4808 Do the same if we are storing to a register and EXPR occupies
4809 the whole of register LOC; in that case, the whole of EXPR is
4810 being changed. We exclude complex modes from the second case
4811 because the real and imaginary parts are represented as separate
4812 pseudo registers, even if the whole complex value fits into one
4813 hard register. */
4814 if ((GET_MODE_SIZE (mode) > GET_MODE_SIZE (DECL_MODE (expr))
4815 || (store_reg_p
4816 && !COMPLEX_MODE_P (DECL_MODE (expr))
4817 && hard_regno_nregs[REGNO (loc)][DECL_MODE (expr)] == 1))
4818 && offset + byte_lowpart_offset (DECL_MODE (expr), mode) == 0)
4820 mode = DECL_MODE (expr);
4821 offset = 0;
4824 if (offset < 0 || offset >= MAX_VAR_PARTS)
4825 return false;
4827 if (mode_out)
4828 *mode_out = mode;
4829 if (offset_out)
4830 *offset_out = offset;
4831 return true;
4834 /* Return the MODE lowpart of LOC, or null if LOC is not something we
4835 want to track. When returning nonnull, make sure that the attributes
4836 on the returned value are updated. */
4838 static rtx
4839 var_lowpart (enum machine_mode mode, rtx loc)
4841 unsigned int offset, reg_offset, regno;
4843 if (!REG_P (loc) && !MEM_P (loc))
4844 return NULL;
4846 if (GET_MODE (loc) == mode)
4847 return loc;
4849 offset = byte_lowpart_offset (mode, GET_MODE (loc));
4851 if (MEM_P (loc))
4852 return adjust_address_nv (loc, mode, offset);
4854 reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc));
4855 regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc),
4856 reg_offset, mode);
4857 return gen_rtx_REG_offset (loc, mode, regno, offset);
4860 /* Carry information about uses and stores while walking rtx. */
4862 struct count_use_info
4864 /* The insn where the RTX is. */
4865 rtx insn;
4867 /* The basic block where insn is. */
4868 basic_block bb;
4870 /* The array of n_sets sets in the insn, as determined by cselib. */
4871 struct cselib_set *sets;
4872 int n_sets;
4874 /* True if we're counting stores, false otherwise. */
4875 bool store_p;
4878 /* Find a VALUE corresponding to X. */
4880 static inline cselib_val *
4881 find_use_val (rtx x, enum machine_mode mode, struct count_use_info *cui)
4883 int i;
4885 if (cui->sets)
4887 /* This is called after uses are set up and before stores are
4888 processed by cselib, so it's safe to look up srcs, but not
4889 dsts. So we look up expressions that appear in srcs or in
4890 dest expressions, but we search the sets array for dests of
4891 stores. */
4892 if (cui->store_p)
4894 /* Some targets represent memset and memcpy patterns
4895 by (set (mem:BLK ...) (reg:[QHSD]I ...)) or
4896 (set (mem:BLK ...) (const_int ...)) or
4897 (set (mem:BLK ...) (mem:BLK ...)). Don't return anything
4898 in that case, otherwise we end up with mode mismatches. */
4899 if (mode == BLKmode && MEM_P (x))
4900 return NULL;
4901 for (i = 0; i < cui->n_sets; i++)
4902 if (cui->sets[i].dest == x)
4903 return cui->sets[i].src_elt;
4905 else
4906 return cselib_lookup (x, mode, 0, VOIDmode);
4909 return NULL;
4912 /* Replace all registers and addresses in an expression with VALUE
4913 expressions that map back to them, unless the expression is a
4914 register. If no mapping is or can be performed, returns NULL. */
4916 static rtx
4917 replace_expr_with_values (rtx loc)
4919 if (REG_P (loc) || GET_CODE (loc) == ENTRY_VALUE)
4920 return NULL;
4921 else if (MEM_P (loc))
4923 cselib_val *addr = cselib_lookup (XEXP (loc, 0),
4924 get_address_mode (loc), 0,
4925 GET_MODE (loc));
4926 if (addr)
4927 return replace_equiv_address_nv (loc, addr->val_rtx);
4928 else
4929 return NULL;
4931 else
4932 return cselib_subst_to_values (loc, VOIDmode);
4935 /* Return true if *X is a DEBUG_EXPR. Usable as an argument to
4936 for_each_rtx to tell whether there are any DEBUG_EXPRs within
4937 RTX. */
4939 static int
4940 rtx_debug_expr_p (rtx *x, void *data ATTRIBUTE_UNUSED)
4942 rtx loc = *x;
4944 return GET_CODE (loc) == DEBUG_EXPR;
4947 /* Determine what kind of micro operation to choose for a USE. Return
4948 MO_CLOBBER if no micro operation is to be generated. */
4950 static enum micro_operation_type
4951 use_type (rtx loc, struct count_use_info *cui, enum machine_mode *modep)
4953 tree expr;
4955 if (cui && cui->sets)
4957 if (GET_CODE (loc) == VAR_LOCATION)
4959 if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false))
4961 rtx ploc = PAT_VAR_LOCATION_LOC (loc);
4962 if (! VAR_LOC_UNKNOWN_P (ploc))
4964 cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1,
4965 VOIDmode);
4967 /* ??? flag_float_store and volatile mems are never
4968 given values, but we could in theory use them for
4969 locations. */
4970 gcc_assert (val || 1);
4972 return MO_VAL_LOC;
4974 else
4975 return MO_CLOBBER;
4978 if (REG_P (loc) || MEM_P (loc))
4980 if (modep)
4981 *modep = GET_MODE (loc);
4982 if (cui->store_p)
4984 if (REG_P (loc)
4985 || (find_use_val (loc, GET_MODE (loc), cui)
4986 && cselib_lookup (XEXP (loc, 0),
4987 get_address_mode (loc), 0,
4988 GET_MODE (loc))))
4989 return MO_VAL_SET;
4991 else
4993 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
4995 if (val && !cselib_preserved_value_p (val))
4996 return MO_VAL_USE;
5001 if (REG_P (loc))
5003 gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER);
5005 if (loc == cfa_base_rtx)
5006 return MO_CLOBBER;
5007 expr = REG_EXPR (loc);
5009 if (!expr)
5010 return MO_USE_NO_VAR;
5011 else if (target_for_debug_bind (var_debug_decl (expr)))
5012 return MO_CLOBBER;
5013 else if (track_loc_p (loc, expr, REG_OFFSET (loc),
5014 false, modep, NULL))
5015 return MO_USE;
5016 else
5017 return MO_USE_NO_VAR;
5019 else if (MEM_P (loc))
5021 expr = MEM_EXPR (loc);
5023 if (!expr)
5024 return MO_CLOBBER;
5025 else if (target_for_debug_bind (var_debug_decl (expr)))
5026 return MO_CLOBBER;
5027 else if (track_loc_p (loc, expr, INT_MEM_OFFSET (loc),
5028 false, modep, NULL)
5029 /* Multi-part variables shouldn't refer to one-part
5030 variable names such as VALUEs (never happens) or
5031 DEBUG_EXPRs (only happens in the presence of debug
5032 insns). */
5033 && (!MAY_HAVE_DEBUG_INSNS
5034 || !for_each_rtx (&XEXP (loc, 0), rtx_debug_expr_p, NULL)))
5035 return MO_USE;
5036 else
5037 return MO_CLOBBER;
5040 return MO_CLOBBER;
5043 /* Log to OUT information about micro-operation MOPT involving X in
5044 INSN of BB. */
5046 static inline void
5047 log_op_type (rtx x, basic_block bb, rtx insn,
5048 enum micro_operation_type mopt, FILE *out)
5050 fprintf (out, "bb %i op %i insn %i %s ",
5051 bb->index, VEC_length (micro_operation, VTI (bb)->mos),
5052 INSN_UID (insn), micro_operation_type_name[mopt]);
5053 print_inline_rtx (out, x, 2);
5054 fputc ('\n', out);
5057 /* Tell whether the CONCAT used to holds a VALUE and its location
5058 needs value resolution, i.e., an attempt of mapping the location
5059 back to other incoming values. */
5060 #define VAL_NEEDS_RESOLUTION(x) \
5061 (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
5062 /* Whether the location in the CONCAT is a tracked expression, that
5063 should also be handled like a MO_USE. */
5064 #define VAL_HOLDS_TRACK_EXPR(x) \
5065 (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
5066 /* Whether the location in the CONCAT should be handled like a MO_COPY
5067 as well. */
5068 #define VAL_EXPR_IS_COPIED(x) \
5069 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
5070 /* Whether the location in the CONCAT should be handled like a
5071 MO_CLOBBER as well. */
5072 #define VAL_EXPR_IS_CLOBBERED(x) \
5073 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
5075 /* All preserved VALUEs. */
5076 static VEC (rtx, heap) *preserved_values;
5078 /* Ensure VAL is preserved and remember it in a vector for vt_emit_notes. */
5080 static void
5081 preserve_value (cselib_val *val)
5083 cselib_preserve_value (val);
5084 VEC_safe_push (rtx, heap, preserved_values, val->val_rtx);
5087 /* Helper function for MO_VAL_LOC handling. Return non-zero if
5088 any rtxes not suitable for CONST use not replaced by VALUEs
5089 are discovered. */
5091 static int
5092 non_suitable_const (rtx *x, void *data ATTRIBUTE_UNUSED)
5094 if (*x == NULL_RTX)
5095 return 0;
5097 switch (GET_CODE (*x))
5099 case REG:
5100 case DEBUG_EXPR:
5101 case PC:
5102 case SCRATCH:
5103 case CC0:
5104 case ASM_INPUT:
5105 case ASM_OPERANDS:
5106 return 1;
5107 case MEM:
5108 return !MEM_READONLY_P (*x);
5109 default:
5110 return 0;
5114 /* Add uses (register and memory references) LOC which will be tracked
5115 to VTI (bb)->mos. INSN is instruction which the LOC is part of. */
5117 static int
5118 add_uses (rtx *ploc, void *data)
5120 rtx loc = *ploc;
5121 enum machine_mode mode = VOIDmode;
5122 struct count_use_info *cui = (struct count_use_info *)data;
5123 enum micro_operation_type type = use_type (loc, cui, &mode);
5125 if (type != MO_CLOBBER)
5127 basic_block bb = cui->bb;
5128 micro_operation mo;
5130 mo.type = type;
5131 mo.u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc;
5132 mo.insn = cui->insn;
5134 if (type == MO_VAL_LOC)
5136 rtx oloc = loc;
5137 rtx vloc = PAT_VAR_LOCATION_LOC (oloc);
5138 cselib_val *val;
5140 gcc_assert (cui->sets);
5142 if (MEM_P (vloc)
5143 && !REG_P (XEXP (vloc, 0))
5144 && !MEM_P (XEXP (vloc, 0)))
5146 rtx mloc = vloc;
5147 enum machine_mode address_mode = get_address_mode (mloc);
5148 cselib_val *val
5149 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5150 GET_MODE (mloc));
5152 if (val && !cselib_preserved_value_p (val))
5153 preserve_value (val);
5156 if (CONSTANT_P (vloc)
5157 && (GET_CODE (vloc) != CONST
5158 || for_each_rtx (&vloc, non_suitable_const, NULL)))
5159 /* For constants don't look up any value. */;
5160 else if (!VAR_LOC_UNKNOWN_P (vloc) && !unsuitable_loc (vloc)
5161 && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
5163 enum machine_mode mode2;
5164 enum micro_operation_type type2;
5165 rtx nloc = NULL;
5166 bool resolvable = REG_P (vloc) || MEM_P (vloc);
5168 if (resolvable)
5169 nloc = replace_expr_with_values (vloc);
5171 if (nloc)
5173 oloc = shallow_copy_rtx (oloc);
5174 PAT_VAR_LOCATION_LOC (oloc) = nloc;
5177 oloc = gen_rtx_CONCAT (mode, val->val_rtx, oloc);
5179 type2 = use_type (vloc, 0, &mode2);
5181 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5182 || type2 == MO_CLOBBER);
5184 if (type2 == MO_CLOBBER
5185 && !cselib_preserved_value_p (val))
5187 VAL_NEEDS_RESOLUTION (oloc) = resolvable;
5188 preserve_value (val);
5191 else if (!VAR_LOC_UNKNOWN_P (vloc))
5193 oloc = shallow_copy_rtx (oloc);
5194 PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC ();
5197 mo.u.loc = oloc;
5199 else if (type == MO_VAL_USE)
5201 enum machine_mode mode2 = VOIDmode;
5202 enum micro_operation_type type2;
5203 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5204 rtx vloc, oloc = loc, nloc;
5206 gcc_assert (cui->sets);
5208 if (MEM_P (oloc)
5209 && !REG_P (XEXP (oloc, 0))
5210 && !MEM_P (XEXP (oloc, 0)))
5212 rtx mloc = oloc;
5213 enum machine_mode address_mode = get_address_mode (mloc);
5214 cselib_val *val
5215 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5216 GET_MODE (mloc));
5218 if (val && !cselib_preserved_value_p (val))
5219 preserve_value (val);
5222 type2 = use_type (loc, 0, &mode2);
5224 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5225 || type2 == MO_CLOBBER);
5227 if (type2 == MO_USE)
5228 vloc = var_lowpart (mode2, loc);
5229 else
5230 vloc = oloc;
5232 /* The loc of a MO_VAL_USE may have two forms:
5234 (concat val src): val is at src, a value-based
5235 representation.
5237 (concat (concat val use) src): same as above, with use as
5238 the MO_USE tracked value, if it differs from src.
5242 gcc_checking_assert (REG_P (loc) || MEM_P (loc));
5243 nloc = replace_expr_with_values (loc);
5244 if (!nloc)
5245 nloc = oloc;
5247 if (vloc != nloc)
5248 oloc = gen_rtx_CONCAT (mode2, val->val_rtx, vloc);
5249 else
5250 oloc = val->val_rtx;
5252 mo.u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
5254 if (type2 == MO_USE)
5255 VAL_HOLDS_TRACK_EXPR (mo.u.loc) = 1;
5256 if (!cselib_preserved_value_p (val))
5258 VAL_NEEDS_RESOLUTION (mo.u.loc) = 1;
5259 preserve_value (val);
5262 else
5263 gcc_assert (type == MO_USE || type == MO_USE_NO_VAR);
5265 if (dump_file && (dump_flags & TDF_DETAILS))
5266 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
5267 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
5270 return 0;
5273 /* Helper function for finding all uses of REG/MEM in X in insn INSN. */
5275 static void
5276 add_uses_1 (rtx *x, void *cui)
5278 for_each_rtx (x, add_uses, cui);
5281 /* This is the value used during expansion of locations. We want it
5282 to be unbounded, so that variables expanded deep in a recursion
5283 nest are fully evaluated, so that their values are cached
5284 correctly. We avoid recursion cycles through other means, and we
5285 don't unshare RTL, so excess complexity is not a problem. */
5286 #define EXPR_DEPTH (INT_MAX)
5287 /* We use this to keep too-complex expressions from being emitted as
5288 location notes, and then to debug information. Users can trade
5289 compile time for ridiculously complex expressions, although they're
5290 seldom useful, and they may often have to be discarded as not
5291 representable anyway. */
5292 #define EXPR_USE_DEPTH (PARAM_VALUE (PARAM_MAX_VARTRACK_EXPR_DEPTH))
5294 /* Attempt to reverse the EXPR operation in the debug info and record
5295 it in the cselib table. Say for reg1 = reg2 + 6 even when reg2 is
5296 no longer live we can express its value as VAL - 6. */
5298 static void
5299 reverse_op (rtx val, const_rtx expr, rtx insn)
5301 rtx src, arg, ret;
5302 cselib_val *v;
5303 struct elt_loc_list *l;
5304 enum rtx_code code;
5306 if (GET_CODE (expr) != SET)
5307 return;
5309 if (!REG_P (SET_DEST (expr)) || GET_MODE (val) != GET_MODE (SET_DEST (expr)))
5310 return;
5312 src = SET_SRC (expr);
5313 switch (GET_CODE (src))
5315 case PLUS:
5316 case MINUS:
5317 case XOR:
5318 case NOT:
5319 case NEG:
5320 if (!REG_P (XEXP (src, 0)))
5321 return;
5322 break;
5323 case SIGN_EXTEND:
5324 case ZERO_EXTEND:
5325 if (!REG_P (XEXP (src, 0)) && !MEM_P (XEXP (src, 0)))
5326 return;
5327 break;
5328 default:
5329 return;
5332 if (!SCALAR_INT_MODE_P (GET_MODE (src)) || XEXP (src, 0) == cfa_base_rtx)
5333 return;
5335 v = cselib_lookup (XEXP (src, 0), GET_MODE (XEXP (src, 0)), 0, VOIDmode);
5336 if (!v || !cselib_preserved_value_p (v))
5337 return;
5339 /* Use canonical V to avoid creating multiple redundant expressions
5340 for different VALUES equivalent to V. */
5341 v = canonical_cselib_val (v);
5343 /* Adding a reverse op isn't useful if V already has an always valid
5344 location. Ignore ENTRY_VALUE, while it is always constant, we should
5345 prefer non-ENTRY_VALUE locations whenever possible. */
5346 for (l = v->locs; l; l = l->next)
5347 if (CONSTANT_P (l->loc)
5348 && (GET_CODE (l->loc) != CONST || !references_value_p (l->loc, 0)))
5349 return;
5351 switch (GET_CODE (src))
5353 case NOT:
5354 case NEG:
5355 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5356 return;
5357 ret = gen_rtx_fmt_e (GET_CODE (src), GET_MODE (val), val);
5358 break;
5359 case SIGN_EXTEND:
5360 case ZERO_EXTEND:
5361 ret = gen_lowpart_SUBREG (GET_MODE (v->val_rtx), val);
5362 break;
5363 case XOR:
5364 code = XOR;
5365 goto binary;
5366 case PLUS:
5367 code = MINUS;
5368 goto binary;
5369 case MINUS:
5370 code = PLUS;
5371 goto binary;
5372 binary:
5373 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5374 return;
5375 arg = XEXP (src, 1);
5376 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5378 arg = cselib_expand_value_rtx (arg, scratch_regs, 5);
5379 if (arg == NULL_RTX)
5380 return;
5381 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5382 return;
5384 ret = simplify_gen_binary (code, GET_MODE (val), val, arg);
5385 if (ret == val)
5386 /* Ensure ret isn't VALUE itself (which can happen e.g. for
5387 (plus (reg1) (reg2)) when reg2 is known to be 0), as that
5388 breaks a lot of routines during var-tracking. */
5389 ret = gen_rtx_fmt_ee (PLUS, GET_MODE (val), val, const0_rtx);
5390 break;
5391 default:
5392 gcc_unreachable ();
5395 cselib_add_permanent_equiv (v, ret, insn);
5398 /* Add stores (register and memory references) LOC which will be tracked
5399 to VTI (bb)->mos. EXPR is the RTL expression containing the store.
5400 CUIP->insn is instruction which the LOC is part of. */
5402 static void
5403 add_stores (rtx loc, const_rtx expr, void *cuip)
5405 enum machine_mode mode = VOIDmode, mode2;
5406 struct count_use_info *cui = (struct count_use_info *)cuip;
5407 basic_block bb = cui->bb;
5408 micro_operation mo;
5409 rtx oloc = loc, nloc, src = NULL;
5410 enum micro_operation_type type = use_type (loc, cui, &mode);
5411 bool track_p = false;
5412 cselib_val *v;
5413 bool resolve, preserve;
5415 if (type == MO_CLOBBER)
5416 return;
5418 mode2 = mode;
5420 if (REG_P (loc))
5422 gcc_assert (loc != cfa_base_rtx);
5423 if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
5424 || !(track_p = use_type (loc, NULL, &mode2) == MO_USE)
5425 || GET_CODE (expr) == CLOBBER)
5427 mo.type = MO_CLOBBER;
5428 mo.u.loc = loc;
5429 if (GET_CODE (expr) == SET
5430 && SET_DEST (expr) == loc
5431 && !unsuitable_loc (SET_SRC (expr))
5432 && find_use_val (loc, mode, cui))
5434 gcc_checking_assert (type == MO_VAL_SET);
5435 mo.u.loc = gen_rtx_SET (VOIDmode, loc, SET_SRC (expr));
5438 else
5440 if (GET_CODE (expr) == SET
5441 && SET_DEST (expr) == loc
5442 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
5443 src = var_lowpart (mode2, SET_SRC (expr));
5444 loc = var_lowpart (mode2, loc);
5446 if (src == NULL)
5448 mo.type = MO_SET;
5449 mo.u.loc = loc;
5451 else
5453 rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
5454 if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
5455 mo.type = MO_COPY;
5456 else
5457 mo.type = MO_SET;
5458 mo.u.loc = xexpr;
5461 mo.insn = cui->insn;
5463 else if (MEM_P (loc)
5464 && ((track_p = use_type (loc, NULL, &mode2) == MO_USE)
5465 || cui->sets))
5467 if (MEM_P (loc) && type == MO_VAL_SET
5468 && !REG_P (XEXP (loc, 0))
5469 && !MEM_P (XEXP (loc, 0)))
5471 rtx mloc = loc;
5472 enum machine_mode address_mode = get_address_mode (mloc);
5473 cselib_val *val = cselib_lookup (XEXP (mloc, 0),
5474 address_mode, 0,
5475 GET_MODE (mloc));
5477 if (val && !cselib_preserved_value_p (val))
5478 preserve_value (val);
5481 if (GET_CODE (expr) == CLOBBER || !track_p)
5483 mo.type = MO_CLOBBER;
5484 mo.u.loc = track_p ? var_lowpart (mode2, loc) : loc;
5486 else
5488 if (GET_CODE (expr) == SET
5489 && SET_DEST (expr) == loc
5490 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
5491 src = var_lowpart (mode2, SET_SRC (expr));
5492 loc = var_lowpart (mode2, loc);
5494 if (src == NULL)
5496 mo.type = MO_SET;
5497 mo.u.loc = loc;
5499 else
5501 rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
5502 if (same_variable_part_p (SET_SRC (xexpr),
5503 MEM_EXPR (loc),
5504 INT_MEM_OFFSET (loc)))
5505 mo.type = MO_COPY;
5506 else
5507 mo.type = MO_SET;
5508 mo.u.loc = xexpr;
5511 mo.insn = cui->insn;
5513 else
5514 return;
5516 if (type != MO_VAL_SET)
5517 goto log_and_return;
5519 v = find_use_val (oloc, mode, cui);
5521 if (!v)
5522 goto log_and_return;
5524 resolve = preserve = !cselib_preserved_value_p (v);
5526 nloc = replace_expr_with_values (oloc);
5527 if (nloc)
5528 oloc = nloc;
5530 if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC)
5532 cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0, VOIDmode);
5534 gcc_assert (oval != v);
5535 gcc_assert (REG_P (oloc) || MEM_P (oloc));
5537 if (oval && !cselib_preserved_value_p (oval))
5539 micro_operation moa;
5541 preserve_value (oval);
5543 moa.type = MO_VAL_USE;
5544 moa.u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc);
5545 VAL_NEEDS_RESOLUTION (moa.u.loc) = 1;
5546 moa.insn = cui->insn;
5548 if (dump_file && (dump_flags & TDF_DETAILS))
5549 log_op_type (moa.u.loc, cui->bb, cui->insn,
5550 moa.type, dump_file);
5551 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &moa);
5554 resolve = false;
5556 else if (resolve && GET_CODE (mo.u.loc) == SET)
5558 if (REG_P (SET_SRC (expr)) || MEM_P (SET_SRC (expr)))
5559 nloc = replace_expr_with_values (SET_SRC (expr));
5560 else
5561 nloc = NULL_RTX;
5563 /* Avoid the mode mismatch between oexpr and expr. */
5564 if (!nloc && mode != mode2)
5566 nloc = SET_SRC (expr);
5567 gcc_assert (oloc == SET_DEST (expr));
5570 if (nloc && nloc != SET_SRC (mo.u.loc))
5571 oloc = gen_rtx_SET (GET_MODE (mo.u.loc), oloc, nloc);
5572 else
5574 if (oloc == SET_DEST (mo.u.loc))
5575 /* No point in duplicating. */
5576 oloc = mo.u.loc;
5577 if (!REG_P (SET_SRC (mo.u.loc)))
5578 resolve = false;
5581 else if (!resolve)
5583 if (GET_CODE (mo.u.loc) == SET
5584 && oloc == SET_DEST (mo.u.loc))
5585 /* No point in duplicating. */
5586 oloc = mo.u.loc;
5588 else
5589 resolve = false;
5591 loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc);
5593 if (mo.u.loc != oloc)
5594 loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, mo.u.loc);
5596 /* The loc of a MO_VAL_SET may have various forms:
5598 (concat val dst): dst now holds val
5600 (concat val (set dst src)): dst now holds val, copied from src
5602 (concat (concat val dstv) dst): dst now holds val; dstv is dst
5603 after replacing mems and non-top-level regs with values.
5605 (concat (concat val dstv) (set dst src)): dst now holds val,
5606 copied from src. dstv is a value-based representation of dst, if
5607 it differs from dst. If resolution is needed, src is a REG, and
5608 its mode is the same as that of val.
5610 (concat (concat val (set dstv srcv)) (set dst src)): src
5611 copied to dst, holding val. dstv and srcv are value-based
5612 representations of dst and src, respectively.
5616 if (GET_CODE (PATTERN (cui->insn)) != COND_EXEC)
5617 reverse_op (v->val_rtx, expr, cui->insn);
5619 mo.u.loc = loc;
5621 if (track_p)
5622 VAL_HOLDS_TRACK_EXPR (loc) = 1;
5623 if (preserve)
5625 VAL_NEEDS_RESOLUTION (loc) = resolve;
5626 preserve_value (v);
5628 if (mo.type == MO_CLOBBER)
5629 VAL_EXPR_IS_CLOBBERED (loc) = 1;
5630 if (mo.type == MO_COPY)
5631 VAL_EXPR_IS_COPIED (loc) = 1;
5633 mo.type = MO_VAL_SET;
5635 log_and_return:
5636 if (dump_file && (dump_flags & TDF_DETAILS))
5637 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
5638 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
5641 /* Arguments to the call. */
5642 static rtx call_arguments;
5644 /* Compute call_arguments. */
5646 static void
5647 prepare_call_arguments (basic_block bb, rtx insn)
5649 rtx link, x;
5650 rtx prev, cur, next;
5651 rtx call = PATTERN (insn);
5652 rtx this_arg = NULL_RTX;
5653 tree type = NULL_TREE, t, fndecl = NULL_TREE;
5654 tree obj_type_ref = NULL_TREE;
5655 CUMULATIVE_ARGS args_so_far_v;
5656 cumulative_args_t args_so_far;
5658 memset (&args_so_far_v, 0, sizeof (args_so_far_v));
5659 args_so_far = pack_cumulative_args (&args_so_far_v);
5660 if (GET_CODE (call) == PARALLEL)
5661 call = XVECEXP (call, 0, 0);
5662 if (GET_CODE (call) == SET)
5663 call = SET_SRC (call);
5664 if (GET_CODE (call) == CALL && MEM_P (XEXP (call, 0)))
5666 if (GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
5668 rtx symbol = XEXP (XEXP (call, 0), 0);
5669 if (SYMBOL_REF_DECL (symbol))
5670 fndecl = SYMBOL_REF_DECL (symbol);
5672 if (fndecl == NULL_TREE)
5673 fndecl = MEM_EXPR (XEXP (call, 0));
5674 if (fndecl
5675 && TREE_CODE (TREE_TYPE (fndecl)) != FUNCTION_TYPE
5676 && TREE_CODE (TREE_TYPE (fndecl)) != METHOD_TYPE)
5677 fndecl = NULL_TREE;
5678 if (fndecl && TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
5679 type = TREE_TYPE (fndecl);
5680 if (fndecl && TREE_CODE (fndecl) != FUNCTION_DECL)
5682 if (TREE_CODE (fndecl) == INDIRECT_REF
5683 && TREE_CODE (TREE_OPERAND (fndecl, 0)) == OBJ_TYPE_REF)
5684 obj_type_ref = TREE_OPERAND (fndecl, 0);
5685 fndecl = NULL_TREE;
5687 if (type)
5689 for (t = TYPE_ARG_TYPES (type); t && t != void_list_node;
5690 t = TREE_CHAIN (t))
5691 if (TREE_CODE (TREE_VALUE (t)) == REFERENCE_TYPE
5692 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_VALUE (t))))
5693 break;
5694 if ((t == NULL || t == void_list_node) && obj_type_ref == NULL_TREE)
5695 type = NULL;
5696 else
5698 int nargs ATTRIBUTE_UNUSED = list_length (TYPE_ARG_TYPES (type));
5699 link = CALL_INSN_FUNCTION_USAGE (insn);
5700 #ifndef PCC_STATIC_STRUCT_RETURN
5701 if (aggregate_value_p (TREE_TYPE (type), type)
5702 && targetm.calls.struct_value_rtx (type, 0) == 0)
5704 tree struct_addr = build_pointer_type (TREE_TYPE (type));
5705 enum machine_mode mode = TYPE_MODE (struct_addr);
5706 rtx reg;
5707 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
5708 nargs + 1);
5709 reg = targetm.calls.function_arg (args_so_far, mode,
5710 struct_addr, true);
5711 targetm.calls.function_arg_advance (args_so_far, mode,
5712 struct_addr, true);
5713 if (reg == NULL_RTX)
5715 for (; link; link = XEXP (link, 1))
5716 if (GET_CODE (XEXP (link, 0)) == USE
5717 && MEM_P (XEXP (XEXP (link, 0), 0)))
5719 link = XEXP (link, 1);
5720 break;
5724 else
5725 #endif
5726 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
5727 nargs);
5728 if (obj_type_ref && TYPE_ARG_TYPES (type) != void_list_node)
5730 enum machine_mode mode;
5731 t = TYPE_ARG_TYPES (type);
5732 mode = TYPE_MODE (TREE_VALUE (t));
5733 this_arg = targetm.calls.function_arg (args_so_far, mode,
5734 TREE_VALUE (t), true);
5735 if (this_arg && !REG_P (this_arg))
5736 this_arg = NULL_RTX;
5737 else if (this_arg == NULL_RTX)
5739 for (; link; link = XEXP (link, 1))
5740 if (GET_CODE (XEXP (link, 0)) == USE
5741 && MEM_P (XEXP (XEXP (link, 0), 0)))
5743 this_arg = XEXP (XEXP (link, 0), 0);
5744 break;
5751 t = type ? TYPE_ARG_TYPES (type) : NULL_TREE;
5753 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
5754 if (GET_CODE (XEXP (link, 0)) == USE)
5756 rtx item = NULL_RTX;
5757 x = XEXP (XEXP (link, 0), 0);
5758 if (GET_MODE (link) == VOIDmode
5759 || GET_MODE (link) == BLKmode
5760 || (GET_MODE (link) != GET_MODE (x)
5761 && (GET_MODE_CLASS (GET_MODE (link)) != MODE_INT
5762 || GET_MODE_CLASS (GET_MODE (x)) != MODE_INT)))
5763 /* Can't do anything for these, if the original type mode
5764 isn't known or can't be converted. */;
5765 else if (REG_P (x))
5767 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
5768 if (val && cselib_preserved_value_p (val))
5769 item = val->val_rtx;
5770 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT)
5772 enum machine_mode mode = GET_MODE (x);
5774 while ((mode = GET_MODE_WIDER_MODE (mode)) != VOIDmode
5775 && GET_MODE_BITSIZE (mode) <= BITS_PER_WORD)
5777 rtx reg = simplify_subreg (mode, x, GET_MODE (x), 0);
5779 if (reg == NULL_RTX || !REG_P (reg))
5780 continue;
5781 val = cselib_lookup (reg, mode, 0, VOIDmode);
5782 if (val && cselib_preserved_value_p (val))
5784 item = val->val_rtx;
5785 break;
5790 else if (MEM_P (x))
5792 rtx mem = x;
5793 cselib_val *val;
5795 if (!frame_pointer_needed)
5797 struct adjust_mem_data amd;
5798 amd.mem_mode = VOIDmode;
5799 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
5800 amd.side_effects = NULL_RTX;
5801 amd.store = true;
5802 mem = simplify_replace_fn_rtx (mem, NULL_RTX, adjust_mems,
5803 &amd);
5804 gcc_assert (amd.side_effects == NULL_RTX);
5806 val = cselib_lookup (mem, GET_MODE (mem), 0, VOIDmode);
5807 if (val && cselib_preserved_value_p (val))
5808 item = val->val_rtx;
5809 else if (GET_MODE_CLASS (GET_MODE (mem)) != MODE_INT)
5811 /* For non-integer stack argument see also if they weren't
5812 initialized by integers. */
5813 enum machine_mode imode = int_mode_for_mode (GET_MODE (mem));
5814 if (imode != GET_MODE (mem) && imode != BLKmode)
5816 val = cselib_lookup (adjust_address_nv (mem, imode, 0),
5817 imode, 0, VOIDmode);
5818 if (val && cselib_preserved_value_p (val))
5819 item = lowpart_subreg (GET_MODE (x), val->val_rtx,
5820 imode);
5824 if (item)
5826 rtx x2 = x;
5827 if (GET_MODE (item) != GET_MODE (link))
5828 item = lowpart_subreg (GET_MODE (link), item, GET_MODE (item));
5829 if (GET_MODE (x2) != GET_MODE (link))
5830 x2 = lowpart_subreg (GET_MODE (link), x2, GET_MODE (x2));
5831 item = gen_rtx_CONCAT (GET_MODE (link), x2, item);
5832 call_arguments
5833 = gen_rtx_EXPR_LIST (VOIDmode, item, call_arguments);
5835 if (t && t != void_list_node)
5837 tree argtype = TREE_VALUE (t);
5838 enum machine_mode mode = TYPE_MODE (argtype);
5839 rtx reg;
5840 if (pass_by_reference (&args_so_far_v, mode, argtype, true))
5842 argtype = build_pointer_type (argtype);
5843 mode = TYPE_MODE (argtype);
5845 reg = targetm.calls.function_arg (args_so_far, mode,
5846 argtype, true);
5847 if (TREE_CODE (argtype) == REFERENCE_TYPE
5848 && INTEGRAL_TYPE_P (TREE_TYPE (argtype))
5849 && reg
5850 && REG_P (reg)
5851 && GET_MODE (reg) == mode
5852 && GET_MODE_CLASS (mode) == MODE_INT
5853 && REG_P (x)
5854 && REGNO (x) == REGNO (reg)
5855 && GET_MODE (x) == mode
5856 && item)
5858 enum machine_mode indmode
5859 = TYPE_MODE (TREE_TYPE (argtype));
5860 rtx mem = gen_rtx_MEM (indmode, x);
5861 cselib_val *val = cselib_lookup (mem, indmode, 0, VOIDmode);
5862 if (val && cselib_preserved_value_p (val))
5864 item = gen_rtx_CONCAT (indmode, mem, val->val_rtx);
5865 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
5866 call_arguments);
5868 else
5870 struct elt_loc_list *l;
5871 tree initial;
5873 /* Try harder, when passing address of a constant
5874 pool integer it can be easily read back. */
5875 item = XEXP (item, 1);
5876 if (GET_CODE (item) == SUBREG)
5877 item = SUBREG_REG (item);
5878 gcc_assert (GET_CODE (item) == VALUE);
5879 val = CSELIB_VAL_PTR (item);
5880 for (l = val->locs; l; l = l->next)
5881 if (GET_CODE (l->loc) == SYMBOL_REF
5882 && TREE_CONSTANT_POOL_ADDRESS_P (l->loc)
5883 && SYMBOL_REF_DECL (l->loc)
5884 && DECL_INITIAL (SYMBOL_REF_DECL (l->loc)))
5886 initial = DECL_INITIAL (SYMBOL_REF_DECL (l->loc));
5887 if (host_integerp (initial, 0))
5889 item = GEN_INT (tree_low_cst (initial, 0));
5890 item = gen_rtx_CONCAT (indmode, mem, item);
5891 call_arguments
5892 = gen_rtx_EXPR_LIST (VOIDmode, item,
5893 call_arguments);
5895 break;
5899 targetm.calls.function_arg_advance (args_so_far, mode,
5900 argtype, true);
5901 t = TREE_CHAIN (t);
5905 /* Add debug arguments. */
5906 if (fndecl
5907 && TREE_CODE (fndecl) == FUNCTION_DECL
5908 && DECL_HAS_DEBUG_ARGS_P (fndecl))
5910 VEC(tree, gc) **debug_args = decl_debug_args_lookup (fndecl);
5911 if (debug_args)
5913 unsigned int ix;
5914 tree param;
5915 for (ix = 0; VEC_iterate (tree, *debug_args, ix, param); ix += 2)
5917 rtx item;
5918 tree dtemp = VEC_index (tree, *debug_args, ix + 1);
5919 enum machine_mode mode = DECL_MODE (dtemp);
5920 item = gen_rtx_DEBUG_PARAMETER_REF (mode, param);
5921 item = gen_rtx_CONCAT (mode, item, DECL_RTL_KNOWN_SET (dtemp));
5922 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
5923 call_arguments);
5928 /* Reverse call_arguments chain. */
5929 prev = NULL_RTX;
5930 for (cur = call_arguments; cur; cur = next)
5932 next = XEXP (cur, 1);
5933 XEXP (cur, 1) = prev;
5934 prev = cur;
5936 call_arguments = prev;
5938 x = PATTERN (insn);
5939 if (GET_CODE (x) == PARALLEL)
5940 x = XVECEXP (x, 0, 0);
5941 if (GET_CODE (x) == SET)
5942 x = SET_SRC (x);
5943 if (GET_CODE (x) == CALL && MEM_P (XEXP (x, 0)))
5945 x = XEXP (XEXP (x, 0), 0);
5946 if (GET_CODE (x) == SYMBOL_REF)
5947 /* Don't record anything. */;
5948 else if (CONSTANT_P (x))
5950 x = gen_rtx_CONCAT (GET_MODE (x) == VOIDmode ? Pmode : GET_MODE (x),
5951 pc_rtx, x);
5952 call_arguments
5953 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
5955 else
5957 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
5958 if (val && cselib_preserved_value_p (val))
5960 x = gen_rtx_CONCAT (GET_MODE (x), pc_rtx, val->val_rtx);
5961 call_arguments
5962 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
5966 if (this_arg)
5968 enum machine_mode mode
5969 = TYPE_MODE (TREE_TYPE (OBJ_TYPE_REF_EXPR (obj_type_ref)));
5970 rtx clobbered = gen_rtx_MEM (mode, this_arg);
5971 HOST_WIDE_INT token
5972 = tree_low_cst (OBJ_TYPE_REF_TOKEN (obj_type_ref), 0);
5973 if (token)
5974 clobbered = plus_constant (mode, clobbered,
5975 token * GET_MODE_SIZE (mode));
5976 clobbered = gen_rtx_MEM (mode, clobbered);
5977 x = gen_rtx_CONCAT (mode, gen_rtx_CLOBBER (VOIDmode, pc_rtx), clobbered);
5978 call_arguments
5979 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
5983 /* Callback for cselib_record_sets_hook, that records as micro
5984 operations uses and stores in an insn after cselib_record_sets has
5985 analyzed the sets in an insn, but before it modifies the stored
5986 values in the internal tables, unless cselib_record_sets doesn't
5987 call it directly (perhaps because we're not doing cselib in the
5988 first place, in which case sets and n_sets will be 0). */
5990 static void
5991 add_with_sets (rtx insn, struct cselib_set *sets, int n_sets)
5993 basic_block bb = BLOCK_FOR_INSN (insn);
5994 int n1, n2;
5995 struct count_use_info cui;
5996 micro_operation *mos;
5998 cselib_hook_called = true;
6000 cui.insn = insn;
6001 cui.bb = bb;
6002 cui.sets = sets;
6003 cui.n_sets = n_sets;
6005 n1 = VEC_length (micro_operation, VTI (bb)->mos);
6006 cui.store_p = false;
6007 note_uses (&PATTERN (insn), add_uses_1, &cui);
6008 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
6009 mos = VEC_address (micro_operation, VTI (bb)->mos);
6011 /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
6012 MO_VAL_LOC last. */
6013 while (n1 < n2)
6015 while (n1 < n2 && mos[n1].type == MO_USE)
6016 n1++;
6017 while (n1 < n2 && mos[n2].type != MO_USE)
6018 n2--;
6019 if (n1 < n2)
6021 micro_operation sw;
6023 sw = mos[n1];
6024 mos[n1] = mos[n2];
6025 mos[n2] = sw;
6029 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
6030 while (n1 < n2)
6032 while (n1 < n2 && mos[n1].type != MO_VAL_LOC)
6033 n1++;
6034 while (n1 < n2 && mos[n2].type == MO_VAL_LOC)
6035 n2--;
6036 if (n1 < n2)
6038 micro_operation sw;
6040 sw = mos[n1];
6041 mos[n1] = mos[n2];
6042 mos[n2] = sw;
6046 if (CALL_P (insn))
6048 micro_operation mo;
6050 mo.type = MO_CALL;
6051 mo.insn = insn;
6052 mo.u.loc = call_arguments;
6053 call_arguments = NULL_RTX;
6055 if (dump_file && (dump_flags & TDF_DETAILS))
6056 log_op_type (PATTERN (insn), bb, insn, mo.type, dump_file);
6057 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
6060 n1 = VEC_length (micro_operation, VTI (bb)->mos);
6061 /* This will record NEXT_INSN (insn), such that we can
6062 insert notes before it without worrying about any
6063 notes that MO_USEs might emit after the insn. */
6064 cui.store_p = true;
6065 note_stores (PATTERN (insn), add_stores, &cui);
6066 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
6067 mos = VEC_address (micro_operation, VTI (bb)->mos);
6069 /* Order the MO_VAL_USEs first (note_stores does nothing
6070 on DEBUG_INSNs, so there are no MO_VAL_LOCs from this
6071 insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET. */
6072 while (n1 < n2)
6074 while (n1 < n2 && mos[n1].type == MO_VAL_USE)
6075 n1++;
6076 while (n1 < n2 && mos[n2].type != MO_VAL_USE)
6077 n2--;
6078 if (n1 < n2)
6080 micro_operation sw;
6082 sw = mos[n1];
6083 mos[n1] = mos[n2];
6084 mos[n2] = sw;
6088 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
6089 while (n1 < n2)
6091 while (n1 < n2 && mos[n1].type == MO_CLOBBER)
6092 n1++;
6093 while (n1 < n2 && mos[n2].type != MO_CLOBBER)
6094 n2--;
6095 if (n1 < n2)
6097 micro_operation sw;
6099 sw = mos[n1];
6100 mos[n1] = mos[n2];
6101 mos[n2] = sw;
6106 static enum var_init_status
6107 find_src_status (dataflow_set *in, rtx src)
6109 tree decl = NULL_TREE;
6110 enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
6112 if (! flag_var_tracking_uninit)
6113 status = VAR_INIT_STATUS_INITIALIZED;
6115 if (src && REG_P (src))
6116 decl = var_debug_decl (REG_EXPR (src));
6117 else if (src && MEM_P (src))
6118 decl = var_debug_decl (MEM_EXPR (src));
6120 if (src && decl)
6121 status = get_init_value (in, src, dv_from_decl (decl));
6123 return status;
6126 /* SRC is the source of an assignment. Use SET to try to find what
6127 was ultimately assigned to SRC. Return that value if known,
6128 otherwise return SRC itself. */
6130 static rtx
6131 find_src_set_src (dataflow_set *set, rtx src)
6133 tree decl = NULL_TREE; /* The variable being copied around. */
6134 rtx set_src = NULL_RTX; /* The value for "decl" stored in "src". */
6135 variable var;
6136 location_chain nextp;
6137 int i;
6138 bool found;
6140 if (src && REG_P (src))
6141 decl = var_debug_decl (REG_EXPR (src));
6142 else if (src && MEM_P (src))
6143 decl = var_debug_decl (MEM_EXPR (src));
6145 if (src && decl)
6147 decl_or_value dv = dv_from_decl (decl);
6149 var = shared_hash_find (set->vars, dv);
6150 if (var)
6152 found = false;
6153 for (i = 0; i < var->n_var_parts && !found; i++)
6154 for (nextp = var->var_part[i].loc_chain; nextp && !found;
6155 nextp = nextp->next)
6156 if (rtx_equal_p (nextp->loc, src))
6158 set_src = nextp->set_src;
6159 found = true;
6165 return set_src;
6168 /* Compute the changes of variable locations in the basic block BB. */
6170 static bool
6171 compute_bb_dataflow (basic_block bb)
6173 unsigned int i;
6174 micro_operation *mo;
6175 bool changed;
6176 dataflow_set old_out;
6177 dataflow_set *in = &VTI (bb)->in;
6178 dataflow_set *out = &VTI (bb)->out;
6180 dataflow_set_init (&old_out);
6181 dataflow_set_copy (&old_out, out);
6182 dataflow_set_copy (out, in);
6184 FOR_EACH_VEC_ELT (micro_operation, VTI (bb)->mos, i, mo)
6186 rtx insn = mo->insn;
6188 switch (mo->type)
6190 case MO_CALL:
6191 dataflow_set_clear_at_call (out);
6192 break;
6194 case MO_USE:
6196 rtx loc = mo->u.loc;
6198 if (REG_P (loc))
6199 var_reg_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6200 else if (MEM_P (loc))
6201 var_mem_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6203 break;
6205 case MO_VAL_LOC:
6207 rtx loc = mo->u.loc;
6208 rtx val, vloc;
6209 tree var;
6211 if (GET_CODE (loc) == CONCAT)
6213 val = XEXP (loc, 0);
6214 vloc = XEXP (loc, 1);
6216 else
6218 val = NULL_RTX;
6219 vloc = loc;
6222 var = PAT_VAR_LOCATION_DECL (vloc);
6224 clobber_variable_part (out, NULL_RTX,
6225 dv_from_decl (var), 0, NULL_RTX);
6226 if (val)
6228 if (VAL_NEEDS_RESOLUTION (loc))
6229 val_resolve (out, val, PAT_VAR_LOCATION_LOC (vloc), insn);
6230 set_variable_part (out, val, dv_from_decl (var), 0,
6231 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6232 INSERT);
6234 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
6235 set_variable_part (out, PAT_VAR_LOCATION_LOC (vloc),
6236 dv_from_decl (var), 0,
6237 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6238 INSERT);
6240 break;
6242 case MO_VAL_USE:
6244 rtx loc = mo->u.loc;
6245 rtx val, vloc, uloc;
6247 vloc = uloc = XEXP (loc, 1);
6248 val = XEXP (loc, 0);
6250 if (GET_CODE (val) == CONCAT)
6252 uloc = XEXP (val, 1);
6253 val = XEXP (val, 0);
6256 if (VAL_NEEDS_RESOLUTION (loc))
6257 val_resolve (out, val, vloc, insn);
6258 else
6259 val_store (out, val, uloc, insn, false);
6261 if (VAL_HOLDS_TRACK_EXPR (loc))
6263 if (GET_CODE (uloc) == REG)
6264 var_reg_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6265 NULL);
6266 else if (GET_CODE (uloc) == MEM)
6267 var_mem_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6268 NULL);
6271 break;
6273 case MO_VAL_SET:
6275 rtx loc = mo->u.loc;
6276 rtx val, vloc, uloc;
6278 vloc = loc;
6279 uloc = XEXP (vloc, 1);
6280 val = XEXP (vloc, 0);
6281 vloc = uloc;
6283 if (GET_CODE (val) == CONCAT)
6285 vloc = XEXP (val, 1);
6286 val = XEXP (val, 0);
6289 if (GET_CODE (vloc) == SET)
6291 rtx vsrc = SET_SRC (vloc);
6293 gcc_assert (val != vsrc);
6294 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
6296 vloc = SET_DEST (vloc);
6298 if (VAL_NEEDS_RESOLUTION (loc))
6299 val_resolve (out, val, vsrc, insn);
6301 else if (VAL_NEEDS_RESOLUTION (loc))
6303 gcc_assert (GET_CODE (uloc) == SET
6304 && GET_CODE (SET_SRC (uloc)) == REG);
6305 val_resolve (out, val, SET_SRC (uloc), insn);
6308 if (VAL_HOLDS_TRACK_EXPR (loc))
6310 if (VAL_EXPR_IS_CLOBBERED (loc))
6312 if (REG_P (uloc))
6313 var_reg_delete (out, uloc, true);
6314 else if (MEM_P (uloc))
6315 var_mem_delete (out, uloc, true);
6317 else
6319 bool copied_p = VAL_EXPR_IS_COPIED (loc);
6320 rtx set_src = NULL;
6321 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
6323 if (GET_CODE (uloc) == SET)
6325 set_src = SET_SRC (uloc);
6326 uloc = SET_DEST (uloc);
6329 if (copied_p)
6331 if (flag_var_tracking_uninit)
6333 status = find_src_status (in, set_src);
6335 if (status == VAR_INIT_STATUS_UNKNOWN)
6336 status = find_src_status (out, set_src);
6339 set_src = find_src_set_src (in, set_src);
6342 if (REG_P (uloc))
6343 var_reg_delete_and_set (out, uloc, !copied_p,
6344 status, set_src);
6345 else if (MEM_P (uloc))
6346 var_mem_delete_and_set (out, uloc, !copied_p,
6347 status, set_src);
6350 else if (REG_P (uloc))
6351 var_regno_delete (out, REGNO (uloc));
6353 val_store (out, val, vloc, insn, true);
6355 break;
6357 case MO_SET:
6359 rtx loc = mo->u.loc;
6360 rtx set_src = NULL;
6362 if (GET_CODE (loc) == SET)
6364 set_src = SET_SRC (loc);
6365 loc = SET_DEST (loc);
6368 if (REG_P (loc))
6369 var_reg_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6370 set_src);
6371 else if (MEM_P (loc))
6372 var_mem_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6373 set_src);
6375 break;
6377 case MO_COPY:
6379 rtx loc = mo->u.loc;
6380 enum var_init_status src_status;
6381 rtx set_src = NULL;
6383 if (GET_CODE (loc) == SET)
6385 set_src = SET_SRC (loc);
6386 loc = SET_DEST (loc);
6389 if (! flag_var_tracking_uninit)
6390 src_status = VAR_INIT_STATUS_INITIALIZED;
6391 else
6393 src_status = find_src_status (in, set_src);
6395 if (src_status == VAR_INIT_STATUS_UNKNOWN)
6396 src_status = find_src_status (out, set_src);
6399 set_src = find_src_set_src (in, set_src);
6401 if (REG_P (loc))
6402 var_reg_delete_and_set (out, loc, false, src_status, set_src);
6403 else if (MEM_P (loc))
6404 var_mem_delete_and_set (out, loc, false, src_status, set_src);
6406 break;
6408 case MO_USE_NO_VAR:
6410 rtx loc = mo->u.loc;
6412 if (REG_P (loc))
6413 var_reg_delete (out, loc, false);
6414 else if (MEM_P (loc))
6415 var_mem_delete (out, loc, false);
6417 break;
6419 case MO_CLOBBER:
6421 rtx loc = mo->u.loc;
6423 if (REG_P (loc))
6424 var_reg_delete (out, loc, true);
6425 else if (MEM_P (loc))
6426 var_mem_delete (out, loc, true);
6428 break;
6430 case MO_ADJUST:
6431 out->stack_adjust += mo->u.adjust;
6432 break;
6436 if (MAY_HAVE_DEBUG_INSNS)
6438 dataflow_set_equiv_regs (out);
6439 htab_traverse (shared_hash_htab (out->vars), canonicalize_values_mark,
6440 out);
6441 htab_traverse (shared_hash_htab (out->vars), canonicalize_values_star,
6442 out);
6443 #if ENABLE_CHECKING
6444 htab_traverse (shared_hash_htab (out->vars),
6445 canonicalize_loc_order_check, out);
6446 #endif
6448 changed = dataflow_set_different (&old_out, out);
6449 dataflow_set_destroy (&old_out);
6450 return changed;
6453 /* Find the locations of variables in the whole function. */
6455 static bool
6456 vt_find_locations (void)
6458 fibheap_t worklist, pending, fibheap_swap;
6459 sbitmap visited, in_worklist, in_pending, sbitmap_swap;
6460 basic_block bb;
6461 edge e;
6462 int *bb_order;
6463 int *rc_order;
6464 int i;
6465 int htabsz = 0;
6466 int htabmax = PARAM_VALUE (PARAM_MAX_VARTRACK_SIZE);
6467 bool success = true;
6469 timevar_push (TV_VAR_TRACKING_DATAFLOW);
6470 /* Compute reverse completion order of depth first search of the CFG
6471 so that the data-flow runs faster. */
6472 rc_order = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
6473 bb_order = XNEWVEC (int, last_basic_block);
6474 pre_and_rev_post_order_compute (NULL, rc_order, false);
6475 for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
6476 bb_order[rc_order[i]] = i;
6477 free (rc_order);
6479 worklist = fibheap_new ();
6480 pending = fibheap_new ();
6481 visited = sbitmap_alloc (last_basic_block);
6482 in_worklist = sbitmap_alloc (last_basic_block);
6483 in_pending = sbitmap_alloc (last_basic_block);
6484 sbitmap_zero (in_worklist);
6486 FOR_EACH_BB (bb)
6487 fibheap_insert (pending, bb_order[bb->index], bb);
6488 sbitmap_ones (in_pending);
6490 while (success && !fibheap_empty (pending))
6492 fibheap_swap = pending;
6493 pending = worklist;
6494 worklist = fibheap_swap;
6495 sbitmap_swap = in_pending;
6496 in_pending = in_worklist;
6497 in_worklist = sbitmap_swap;
6499 sbitmap_zero (visited);
6501 while (!fibheap_empty (worklist))
6503 bb = (basic_block) fibheap_extract_min (worklist);
6504 RESET_BIT (in_worklist, bb->index);
6505 gcc_assert (!TEST_BIT (visited, bb->index));
6506 if (!TEST_BIT (visited, bb->index))
6508 bool changed;
6509 edge_iterator ei;
6510 int oldinsz, oldoutsz;
6512 SET_BIT (visited, bb->index);
6514 if (VTI (bb)->in.vars)
6516 htabsz
6517 -= (htab_size (shared_hash_htab (VTI (bb)->in.vars))
6518 + htab_size (shared_hash_htab (VTI (bb)->out.vars)));
6519 oldinsz
6520 = htab_elements (shared_hash_htab (VTI (bb)->in.vars));
6521 oldoutsz
6522 = htab_elements (shared_hash_htab (VTI (bb)->out.vars));
6524 else
6525 oldinsz = oldoutsz = 0;
6527 if (MAY_HAVE_DEBUG_INSNS)
6529 dataflow_set *in = &VTI (bb)->in, *first_out = NULL;
6530 bool first = true, adjust = false;
6532 /* Calculate the IN set as the intersection of
6533 predecessor OUT sets. */
6535 dataflow_set_clear (in);
6536 dst_can_be_shared = true;
6538 FOR_EACH_EDGE (e, ei, bb->preds)
6539 if (!VTI (e->src)->flooded)
6540 gcc_assert (bb_order[bb->index]
6541 <= bb_order[e->src->index]);
6542 else if (first)
6544 dataflow_set_copy (in, &VTI (e->src)->out);
6545 first_out = &VTI (e->src)->out;
6546 first = false;
6548 else
6550 dataflow_set_merge (in, &VTI (e->src)->out);
6551 adjust = true;
6554 if (adjust)
6556 dataflow_post_merge_adjust (in, &VTI (bb)->permp);
6557 #if ENABLE_CHECKING
6558 /* Merge and merge_adjust should keep entries in
6559 canonical order. */
6560 htab_traverse (shared_hash_htab (in->vars),
6561 canonicalize_loc_order_check,
6562 in);
6563 #endif
6564 if (dst_can_be_shared)
6566 shared_hash_destroy (in->vars);
6567 in->vars = shared_hash_copy (first_out->vars);
6571 VTI (bb)->flooded = true;
6573 else
6575 /* Calculate the IN set as union of predecessor OUT sets. */
6576 dataflow_set_clear (&VTI (bb)->in);
6577 FOR_EACH_EDGE (e, ei, bb->preds)
6578 dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out);
6581 changed = compute_bb_dataflow (bb);
6582 htabsz += (htab_size (shared_hash_htab (VTI (bb)->in.vars))
6583 + htab_size (shared_hash_htab (VTI (bb)->out.vars)));
6585 if (htabmax && htabsz > htabmax)
6587 if (MAY_HAVE_DEBUG_INSNS)
6588 inform (DECL_SOURCE_LOCATION (cfun->decl),
6589 "variable tracking size limit exceeded with "
6590 "-fvar-tracking-assignments, retrying without");
6591 else
6592 inform (DECL_SOURCE_LOCATION (cfun->decl),
6593 "variable tracking size limit exceeded");
6594 success = false;
6595 break;
6598 if (changed)
6600 FOR_EACH_EDGE (e, ei, bb->succs)
6602 if (e->dest == EXIT_BLOCK_PTR)
6603 continue;
6605 if (TEST_BIT (visited, e->dest->index))
6607 if (!TEST_BIT (in_pending, e->dest->index))
6609 /* Send E->DEST to next round. */
6610 SET_BIT (in_pending, e->dest->index);
6611 fibheap_insert (pending,
6612 bb_order[e->dest->index],
6613 e->dest);
6616 else if (!TEST_BIT (in_worklist, e->dest->index))
6618 /* Add E->DEST to current round. */
6619 SET_BIT (in_worklist, e->dest->index);
6620 fibheap_insert (worklist, bb_order[e->dest->index],
6621 e->dest);
6626 if (dump_file)
6627 fprintf (dump_file,
6628 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
6629 bb->index,
6630 (int)htab_elements (shared_hash_htab (VTI (bb)->in.vars)),
6631 oldinsz,
6632 (int)htab_elements (shared_hash_htab (VTI (bb)->out.vars)),
6633 oldoutsz,
6634 (int)worklist->nodes, (int)pending->nodes, htabsz);
6636 if (dump_file && (dump_flags & TDF_DETAILS))
6638 fprintf (dump_file, "BB %i IN:\n", bb->index);
6639 dump_dataflow_set (&VTI (bb)->in);
6640 fprintf (dump_file, "BB %i OUT:\n", bb->index);
6641 dump_dataflow_set (&VTI (bb)->out);
6647 if (success && MAY_HAVE_DEBUG_INSNS)
6648 FOR_EACH_BB (bb)
6649 gcc_assert (VTI (bb)->flooded);
6651 free (bb_order);
6652 fibheap_delete (worklist);
6653 fibheap_delete (pending);
6654 sbitmap_free (visited);
6655 sbitmap_free (in_worklist);
6656 sbitmap_free (in_pending);
6658 timevar_pop (TV_VAR_TRACKING_DATAFLOW);
6659 return success;
6662 /* Print the content of the LIST to dump file. */
6664 static void
6665 dump_attrs_list (attrs list)
6667 for (; list; list = list->next)
6669 if (dv_is_decl_p (list->dv))
6670 print_mem_expr (dump_file, dv_as_decl (list->dv));
6671 else
6672 print_rtl_single (dump_file, dv_as_value (list->dv));
6673 fprintf (dump_file, "+" HOST_WIDE_INT_PRINT_DEC, list->offset);
6675 fprintf (dump_file, "\n");
6678 /* Print the information about variable *SLOT to dump file. */
6680 static int
6681 dump_var_slot (void **slot, void *data ATTRIBUTE_UNUSED)
6683 variable var = (variable) *slot;
6685 dump_var (var);
6687 /* Continue traversing the hash table. */
6688 return 1;
6691 /* Print the information about variable VAR to dump file. */
6693 static void
6694 dump_var (variable var)
6696 int i;
6697 location_chain node;
6699 if (dv_is_decl_p (var->dv))
6701 const_tree decl = dv_as_decl (var->dv);
6703 if (DECL_NAME (decl))
6705 fprintf (dump_file, " name: %s",
6706 IDENTIFIER_POINTER (DECL_NAME (decl)));
6707 if (dump_flags & TDF_UID)
6708 fprintf (dump_file, "D.%u", DECL_UID (decl));
6710 else if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
6711 fprintf (dump_file, " name: D#%u", DEBUG_TEMP_UID (decl));
6712 else
6713 fprintf (dump_file, " name: D.%u", DECL_UID (decl));
6714 fprintf (dump_file, "\n");
6716 else
6718 fputc (' ', dump_file);
6719 print_rtl_single (dump_file, dv_as_value (var->dv));
6722 for (i = 0; i < var->n_var_parts; i++)
6724 fprintf (dump_file, " offset %ld\n",
6725 (long)(var->onepart ? 0 : VAR_PART_OFFSET (var, i)));
6726 for (node = var->var_part[i].loc_chain; node; node = node->next)
6728 fprintf (dump_file, " ");
6729 if (node->init == VAR_INIT_STATUS_UNINITIALIZED)
6730 fprintf (dump_file, "[uninit]");
6731 print_rtl_single (dump_file, node->loc);
6736 /* Print the information about variables from hash table VARS to dump file. */
6738 static void
6739 dump_vars (htab_t vars)
6741 if (htab_elements (vars) > 0)
6743 fprintf (dump_file, "Variables:\n");
6744 htab_traverse (vars, dump_var_slot, NULL);
6748 /* Print the dataflow set SET to dump file. */
6750 static void
6751 dump_dataflow_set (dataflow_set *set)
6753 int i;
6755 fprintf (dump_file, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC "\n",
6756 set->stack_adjust);
6757 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
6759 if (set->regs[i])
6761 fprintf (dump_file, "Reg %d:", i);
6762 dump_attrs_list (set->regs[i]);
6765 dump_vars (shared_hash_htab (set->vars));
6766 fprintf (dump_file, "\n");
6769 /* Print the IN and OUT sets for each basic block to dump file. */
6771 static void
6772 dump_dataflow_sets (void)
6774 basic_block bb;
6776 FOR_EACH_BB (bb)
6778 fprintf (dump_file, "\nBasic block %d:\n", bb->index);
6779 fprintf (dump_file, "IN:\n");
6780 dump_dataflow_set (&VTI (bb)->in);
6781 fprintf (dump_file, "OUT:\n");
6782 dump_dataflow_set (&VTI (bb)->out);
6786 /* Return the variable for DV in dropped_values, inserting one if
6787 requested with INSERT. */
6789 static inline variable
6790 variable_from_dropped (decl_or_value dv, enum insert_option insert)
6792 void **slot;
6793 variable empty_var;
6794 onepart_enum_t onepart;
6796 slot = htab_find_slot_with_hash (dropped_values, dv, dv_htab_hash (dv),
6797 insert);
6799 if (!slot)
6800 return NULL;
6802 if (*slot)
6803 return (variable) *slot;
6805 gcc_checking_assert (insert == INSERT);
6807 onepart = dv_onepart_p (dv);
6809 gcc_checking_assert (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR);
6811 empty_var = (variable) pool_alloc (onepart_pool (onepart));
6812 empty_var->dv = dv;
6813 empty_var->refcount = 1;
6814 empty_var->n_var_parts = 0;
6815 empty_var->onepart = onepart;
6816 empty_var->in_changed_variables = false;
6817 empty_var->var_part[0].loc_chain = NULL;
6818 empty_var->var_part[0].cur_loc = NULL;
6819 VAR_LOC_1PAUX (empty_var) = NULL;
6820 set_dv_changed (dv, true);
6822 *slot = empty_var;
6824 return empty_var;
6827 /* Recover the one-part aux from dropped_values. */
6829 static struct onepart_aux *
6830 recover_dropped_1paux (variable var)
6832 variable dvar;
6834 gcc_checking_assert (var->onepart);
6836 if (VAR_LOC_1PAUX (var))
6837 return VAR_LOC_1PAUX (var);
6839 if (var->onepart == ONEPART_VDECL)
6840 return NULL;
6842 dvar = variable_from_dropped (var->dv, NO_INSERT);
6844 if (!dvar)
6845 return NULL;
6847 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (dvar);
6848 VAR_LOC_1PAUX (dvar) = NULL;
6850 return VAR_LOC_1PAUX (var);
6853 /* Add variable VAR to the hash table of changed variables and
6854 if it has no locations delete it from SET's hash table. */
6856 static void
6857 variable_was_changed (variable var, dataflow_set *set)
6859 hashval_t hash = dv_htab_hash (var->dv);
6861 if (emit_notes)
6863 void **slot;
6865 /* Remember this decl or VALUE has been added to changed_variables. */
6866 set_dv_changed (var->dv, true);
6868 slot = htab_find_slot_with_hash (changed_variables,
6869 var->dv,
6870 hash, INSERT);
6872 if (*slot)
6874 variable old_var = (variable) *slot;
6875 gcc_assert (old_var->in_changed_variables);
6876 old_var->in_changed_variables = false;
6877 if (var != old_var && var->onepart)
6879 /* Restore the auxiliary info from an empty variable
6880 previously created for changed_variables, so it is
6881 not lost. */
6882 gcc_checking_assert (!VAR_LOC_1PAUX (var));
6883 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (old_var);
6884 VAR_LOC_1PAUX (old_var) = NULL;
6886 variable_htab_free (*slot);
6889 if (set && var->n_var_parts == 0)
6891 onepart_enum_t onepart = var->onepart;
6892 variable empty_var = NULL;
6893 void **dslot = NULL;
6895 if (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR)
6897 dslot = htab_find_slot_with_hash (dropped_values, var->dv,
6898 dv_htab_hash (var->dv),
6899 INSERT);
6900 empty_var = (variable) *dslot;
6902 if (empty_var)
6904 gcc_checking_assert (!empty_var->in_changed_variables);
6905 if (!VAR_LOC_1PAUX (var))
6907 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (empty_var);
6908 VAR_LOC_1PAUX (empty_var) = NULL;
6910 else
6911 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
6915 if (!empty_var)
6917 empty_var = (variable) pool_alloc (onepart_pool (onepart));
6918 empty_var->dv = var->dv;
6919 empty_var->refcount = 1;
6920 empty_var->n_var_parts = 0;
6921 empty_var->onepart = onepart;
6922 if (dslot)
6924 empty_var->refcount++;
6925 *dslot = empty_var;
6928 else
6929 empty_var->refcount++;
6930 empty_var->in_changed_variables = true;
6931 *slot = empty_var;
6932 if (onepart)
6934 empty_var->var_part[0].loc_chain = NULL;
6935 empty_var->var_part[0].cur_loc = NULL;
6936 VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (var);
6937 VAR_LOC_1PAUX (var) = NULL;
6939 goto drop_var;
6941 else
6943 if (var->onepart && !VAR_LOC_1PAUX (var))
6944 recover_dropped_1paux (var);
6945 var->refcount++;
6946 var->in_changed_variables = true;
6947 *slot = var;
6950 else
6952 gcc_assert (set);
6953 if (var->n_var_parts == 0)
6955 void **slot;
6957 drop_var:
6958 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
6959 if (slot)
6961 if (shared_hash_shared (set->vars))
6962 slot = shared_hash_find_slot_unshare (&set->vars, var->dv,
6963 NO_INSERT);
6964 htab_clear_slot (shared_hash_htab (set->vars), slot);
6970 /* Look for the index in VAR->var_part corresponding to OFFSET.
6971 Return -1 if not found. If INSERTION_POINT is non-NULL, the
6972 referenced int will be set to the index that the part has or should
6973 have, if it should be inserted. */
6975 static inline int
6976 find_variable_location_part (variable var, HOST_WIDE_INT offset,
6977 int *insertion_point)
6979 int pos, low, high;
6981 if (var->onepart)
6983 if (offset != 0)
6984 return -1;
6986 if (insertion_point)
6987 *insertion_point = 0;
6989 return var->n_var_parts - 1;
6992 /* Find the location part. */
6993 low = 0;
6994 high = var->n_var_parts;
6995 while (low != high)
6997 pos = (low + high) / 2;
6998 if (VAR_PART_OFFSET (var, pos) < offset)
6999 low = pos + 1;
7000 else
7001 high = pos;
7003 pos = low;
7005 if (insertion_point)
7006 *insertion_point = pos;
7008 if (pos < var->n_var_parts && VAR_PART_OFFSET (var, pos) == offset)
7009 return pos;
7011 return -1;
7014 static void **
7015 set_slot_part (dataflow_set *set, rtx loc, void **slot,
7016 decl_or_value dv, HOST_WIDE_INT offset,
7017 enum var_init_status initialized, rtx set_src)
7019 int pos;
7020 location_chain node, next;
7021 location_chain *nextp;
7022 variable var;
7023 onepart_enum_t onepart;
7025 var = (variable) *slot;
7027 if (var)
7028 onepart = var->onepart;
7029 else
7030 onepart = dv_onepart_p (dv);
7032 gcc_checking_assert (offset == 0 || !onepart);
7033 gcc_checking_assert (loc != dv_as_opaque (dv));
7035 if (! flag_var_tracking_uninit)
7036 initialized = VAR_INIT_STATUS_INITIALIZED;
7038 if (!var)
7040 /* Create new variable information. */
7041 var = (variable) pool_alloc (onepart_pool (onepart));
7042 var->dv = dv;
7043 var->refcount = 1;
7044 var->n_var_parts = 1;
7045 var->onepart = onepart;
7046 var->in_changed_variables = false;
7047 if (var->onepart)
7048 VAR_LOC_1PAUX (var) = NULL;
7049 else
7050 VAR_PART_OFFSET (var, 0) = offset;
7051 var->var_part[0].loc_chain = NULL;
7052 var->var_part[0].cur_loc = NULL;
7053 *slot = var;
7054 pos = 0;
7055 nextp = &var->var_part[0].loc_chain;
7057 else if (onepart)
7059 int r = -1, c = 0;
7061 gcc_assert (dv_as_opaque (var->dv) == dv_as_opaque (dv));
7063 pos = 0;
7065 if (GET_CODE (loc) == VALUE)
7067 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7068 nextp = &node->next)
7069 if (GET_CODE (node->loc) == VALUE)
7071 if (node->loc == loc)
7073 r = 0;
7074 break;
7076 if (canon_value_cmp (node->loc, loc))
7077 c++;
7078 else
7080 r = 1;
7081 break;
7084 else if (REG_P (node->loc) || MEM_P (node->loc))
7085 c++;
7086 else
7088 r = 1;
7089 break;
7092 else if (REG_P (loc))
7094 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7095 nextp = &node->next)
7096 if (REG_P (node->loc))
7098 if (REGNO (node->loc) < REGNO (loc))
7099 c++;
7100 else
7102 if (REGNO (node->loc) == REGNO (loc))
7103 r = 0;
7104 else
7105 r = 1;
7106 break;
7109 else
7111 r = 1;
7112 break;
7115 else if (MEM_P (loc))
7117 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7118 nextp = &node->next)
7119 if (REG_P (node->loc))
7120 c++;
7121 else if (MEM_P (node->loc))
7123 if ((r = loc_cmp (XEXP (node->loc, 0), XEXP (loc, 0))) >= 0)
7124 break;
7125 else
7126 c++;
7128 else
7130 r = 1;
7131 break;
7134 else
7135 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7136 nextp = &node->next)
7137 if ((r = loc_cmp (node->loc, loc)) >= 0)
7138 break;
7139 else
7140 c++;
7142 if (r == 0)
7143 return slot;
7145 if (shared_var_p (var, set->vars))
7147 slot = unshare_variable (set, slot, var, initialized);
7148 var = (variable)*slot;
7149 for (nextp = &var->var_part[0].loc_chain; c;
7150 nextp = &(*nextp)->next)
7151 c--;
7152 gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc);
7155 else
7157 int inspos = 0;
7159 gcc_assert (dv_as_decl (var->dv) == dv_as_decl (dv));
7161 pos = find_variable_location_part (var, offset, &inspos);
7163 if (pos >= 0)
7165 node = var->var_part[pos].loc_chain;
7167 if (node
7168 && ((REG_P (node->loc) && REG_P (loc)
7169 && REGNO (node->loc) == REGNO (loc))
7170 || rtx_equal_p (node->loc, loc)))
7172 /* LOC is in the beginning of the chain so we have nothing
7173 to do. */
7174 if (node->init < initialized)
7175 node->init = initialized;
7176 if (set_src != NULL)
7177 node->set_src = set_src;
7179 return slot;
7181 else
7183 /* We have to make a copy of a shared variable. */
7184 if (shared_var_p (var, set->vars))
7186 slot = unshare_variable (set, slot, var, initialized);
7187 var = (variable)*slot;
7191 else
7193 /* We have not found the location part, new one will be created. */
7195 /* We have to make a copy of the shared variable. */
7196 if (shared_var_p (var, set->vars))
7198 slot = unshare_variable (set, slot, var, initialized);
7199 var = (variable)*slot;
7202 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
7203 thus there are at most MAX_VAR_PARTS different offsets. */
7204 gcc_assert (var->n_var_parts < MAX_VAR_PARTS
7205 && (!var->n_var_parts || !onepart));
7207 /* We have to move the elements of array starting at index
7208 inspos to the next position. */
7209 for (pos = var->n_var_parts; pos > inspos; pos--)
7210 var->var_part[pos] = var->var_part[pos - 1];
7212 var->n_var_parts++;
7213 gcc_checking_assert (!onepart);
7214 VAR_PART_OFFSET (var, pos) = offset;
7215 var->var_part[pos].loc_chain = NULL;
7216 var->var_part[pos].cur_loc = NULL;
7219 /* Delete the location from the list. */
7220 nextp = &var->var_part[pos].loc_chain;
7221 for (node = var->var_part[pos].loc_chain; node; node = next)
7223 next = node->next;
7224 if ((REG_P (node->loc) && REG_P (loc)
7225 && REGNO (node->loc) == REGNO (loc))
7226 || rtx_equal_p (node->loc, loc))
7228 /* Save these values, to assign to the new node, before
7229 deleting this one. */
7230 if (node->init > initialized)
7231 initialized = node->init;
7232 if (node->set_src != NULL && set_src == NULL)
7233 set_src = node->set_src;
7234 if (var->var_part[pos].cur_loc == node->loc)
7235 var->var_part[pos].cur_loc = NULL;
7236 pool_free (loc_chain_pool, node);
7237 *nextp = next;
7238 break;
7240 else
7241 nextp = &node->next;
7244 nextp = &var->var_part[pos].loc_chain;
7247 /* Add the location to the beginning. */
7248 node = (location_chain) pool_alloc (loc_chain_pool);
7249 node->loc = loc;
7250 node->init = initialized;
7251 node->set_src = set_src;
7252 node->next = *nextp;
7253 *nextp = node;
7255 /* If no location was emitted do so. */
7256 if (var->var_part[pos].cur_loc == NULL)
7257 variable_was_changed (var, set);
7259 return slot;
7262 /* Set the part of variable's location in the dataflow set SET. The
7263 variable part is specified by variable's declaration in DV and
7264 offset OFFSET and the part's location by LOC. IOPT should be
7265 NO_INSERT if the variable is known to be in SET already and the
7266 variable hash table must not be resized, and INSERT otherwise. */
7268 static void
7269 set_variable_part (dataflow_set *set, rtx loc,
7270 decl_or_value dv, HOST_WIDE_INT offset,
7271 enum var_init_status initialized, rtx set_src,
7272 enum insert_option iopt)
7274 void **slot;
7276 if (iopt == NO_INSERT)
7277 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7278 else
7280 slot = shared_hash_find_slot (set->vars, dv);
7281 if (!slot)
7282 slot = shared_hash_find_slot_unshare (&set->vars, dv, iopt);
7284 set_slot_part (set, loc, slot, dv, offset, initialized, set_src);
7287 /* Remove all recorded register locations for the given variable part
7288 from dataflow set SET, except for those that are identical to loc.
7289 The variable part is specified by variable's declaration or value
7290 DV and offset OFFSET. */
7292 static void **
7293 clobber_slot_part (dataflow_set *set, rtx loc, void **slot,
7294 HOST_WIDE_INT offset, rtx set_src)
7296 variable var = (variable) *slot;
7297 int pos = find_variable_location_part (var, offset, NULL);
7299 if (pos >= 0)
7301 location_chain node, next;
7303 /* Remove the register locations from the dataflow set. */
7304 next = var->var_part[pos].loc_chain;
7305 for (node = next; node; node = next)
7307 next = node->next;
7308 if (node->loc != loc
7309 && (!flag_var_tracking_uninit
7310 || !set_src
7311 || MEM_P (set_src)
7312 || !rtx_equal_p (set_src, node->set_src)))
7314 if (REG_P (node->loc))
7316 attrs anode, anext;
7317 attrs *anextp;
7319 /* Remove the variable part from the register's
7320 list, but preserve any other variable parts
7321 that might be regarded as live in that same
7322 register. */
7323 anextp = &set->regs[REGNO (node->loc)];
7324 for (anode = *anextp; anode; anode = anext)
7326 anext = anode->next;
7327 if (dv_as_opaque (anode->dv) == dv_as_opaque (var->dv)
7328 && anode->offset == offset)
7330 pool_free (attrs_pool, anode);
7331 *anextp = anext;
7333 else
7334 anextp = &anode->next;
7338 slot = delete_slot_part (set, node->loc, slot, offset);
7343 return slot;
7346 /* Remove all recorded register locations for the given variable part
7347 from dataflow set SET, except for those that are identical to loc.
7348 The variable part is specified by variable's declaration or value
7349 DV and offset OFFSET. */
7351 static void
7352 clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
7353 HOST_WIDE_INT offset, rtx set_src)
7355 void **slot;
7357 if (!dv_as_opaque (dv)
7358 || (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv))))
7359 return;
7361 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7362 if (!slot)
7363 return;
7365 clobber_slot_part (set, loc, slot, offset, set_src);
7368 /* Delete the part of variable's location from dataflow set SET. The
7369 variable part is specified by its SET->vars slot SLOT and offset
7370 OFFSET and the part's location by LOC. */
7372 static void **
7373 delete_slot_part (dataflow_set *set, rtx loc, void **slot,
7374 HOST_WIDE_INT offset)
7376 variable var = (variable) *slot;
7377 int pos = find_variable_location_part (var, offset, NULL);
7379 if (pos >= 0)
7381 location_chain node, next;
7382 location_chain *nextp;
7383 bool changed;
7384 rtx cur_loc;
7386 if (shared_var_p (var, set->vars))
7388 /* If the variable contains the location part we have to
7389 make a copy of the variable. */
7390 for (node = var->var_part[pos].loc_chain; node;
7391 node = node->next)
7393 if ((REG_P (node->loc) && REG_P (loc)
7394 && REGNO (node->loc) == REGNO (loc))
7395 || rtx_equal_p (node->loc, loc))
7397 slot = unshare_variable (set, slot, var,
7398 VAR_INIT_STATUS_UNKNOWN);
7399 var = (variable)*slot;
7400 break;
7405 if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
7406 cur_loc = VAR_LOC_FROM (var);
7407 else
7408 cur_loc = var->var_part[pos].cur_loc;
7410 /* Delete the location part. */
7411 changed = false;
7412 nextp = &var->var_part[pos].loc_chain;
7413 for (node = *nextp; node; node = next)
7415 next = node->next;
7416 if ((REG_P (node->loc) && REG_P (loc)
7417 && REGNO (node->loc) == REGNO (loc))
7418 || rtx_equal_p (node->loc, loc))
7420 /* If we have deleted the location which was last emitted
7421 we have to emit new location so add the variable to set
7422 of changed variables. */
7423 if (cur_loc == node->loc)
7425 changed = true;
7426 var->var_part[pos].cur_loc = NULL;
7427 if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
7428 VAR_LOC_FROM (var) = NULL;
7430 pool_free (loc_chain_pool, node);
7431 *nextp = next;
7432 break;
7434 else
7435 nextp = &node->next;
7438 if (var->var_part[pos].loc_chain == NULL)
7440 changed = true;
7441 var->n_var_parts--;
7442 while (pos < var->n_var_parts)
7444 var->var_part[pos] = var->var_part[pos + 1];
7445 pos++;
7448 if (changed)
7449 variable_was_changed (var, set);
7452 return slot;
7455 /* Delete the part of variable's location from dataflow set SET. The
7456 variable part is specified by variable's declaration or value DV
7457 and offset OFFSET and the part's location by LOC. */
7459 static void
7460 delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
7461 HOST_WIDE_INT offset)
7463 void **slot = shared_hash_find_slot_noinsert (set->vars, dv);
7464 if (!slot)
7465 return;
7467 delete_slot_part (set, loc, slot, offset);
7470 DEF_VEC_P (variable);
7471 DEF_VEC_ALLOC_P (variable, heap);
7473 DEF_VEC_ALLOC_P_STACK (rtx);
7474 #define VEC_rtx_stack_alloc(alloc) VEC_stack_alloc (rtx, alloc)
7476 /* Structure for passing some other parameters to function
7477 vt_expand_loc_callback. */
7478 struct expand_loc_callback_data
7480 /* The variables and values active at this point. */
7481 htab_t vars;
7483 /* Stack of values and debug_exprs under expansion, and their
7484 children. */
7485 VEC (rtx, stack) *expanding;
7487 /* Stack of values and debug_exprs whose expansion hit recursion
7488 cycles. They will have VALUE_RECURSED_INTO marked when added to
7489 this list. This flag will be cleared if any of its dependencies
7490 resolves to a valid location. So, if the flag remains set at the
7491 end of the search, we know no valid location for this one can
7492 possibly exist. */
7493 VEC (rtx, stack) *pending;
7495 /* The maximum depth among the sub-expressions under expansion.
7496 Zero indicates no expansion so far. */
7497 expand_depth depth;
7500 /* Allocate the one-part auxiliary data structure for VAR, with enough
7501 room for COUNT dependencies. */
7503 static void
7504 loc_exp_dep_alloc (variable var, int count)
7506 size_t allocsize;
7508 gcc_checking_assert (var->onepart);
7510 /* We can be called with COUNT == 0 to allocate the data structure
7511 without any dependencies, e.g. for the backlinks only. However,
7512 if we are specifying a COUNT, then the dependency list must have
7513 been emptied before. It would be possible to adjust pointers or
7514 force it empty here, but this is better done at an earlier point
7515 in the algorithm, so we instead leave an assertion to catch
7516 errors. */
7517 gcc_checking_assert (!count
7518 || VEC_empty (loc_exp_dep, VAR_LOC_DEP_VEC (var)));
7520 if (VAR_LOC_1PAUX (var)
7521 && VEC_space (loc_exp_dep, VAR_LOC_DEP_VEC (var), count))
7522 return;
7524 allocsize = offsetof (struct onepart_aux, deps)
7525 + VEC_embedded_size (loc_exp_dep, count);
7527 if (VAR_LOC_1PAUX (var))
7529 VAR_LOC_1PAUX (var) = XRESIZEVAR (struct onepart_aux,
7530 VAR_LOC_1PAUX (var), allocsize);
7531 /* If the reallocation moves the onepaux structure, the
7532 back-pointer to BACKLINKS in the first list member will still
7533 point to its old location. Adjust it. */
7534 if (VAR_LOC_DEP_LST (var))
7535 VAR_LOC_DEP_LST (var)->pprev = VAR_LOC_DEP_LSTP (var);
7537 else
7539 VAR_LOC_1PAUX (var) = XNEWVAR (struct onepart_aux, allocsize);
7540 *VAR_LOC_DEP_LSTP (var) = NULL;
7541 VAR_LOC_FROM (var) = NULL;
7542 VAR_LOC_DEPTH (var).complexity = 0;
7543 VAR_LOC_DEPTH (var).entryvals = 0;
7545 VEC_embedded_init (loc_exp_dep, VAR_LOC_DEP_VEC (var), count);
7548 /* Remove all entries from the vector of active dependencies of VAR,
7549 removing them from the back-links lists too. */
7551 static void
7552 loc_exp_dep_clear (variable var)
7554 while (!VEC_empty (loc_exp_dep, VAR_LOC_DEP_VEC (var)))
7556 loc_exp_dep *led = VEC_last (loc_exp_dep, VAR_LOC_DEP_VEC (var));
7557 if (led->next)
7558 led->next->pprev = led->pprev;
7559 if (led->pprev)
7560 *led->pprev = led->next;
7561 VEC_pop (loc_exp_dep, VAR_LOC_DEP_VEC (var));
7565 /* Insert an active dependency from VAR on X to the vector of
7566 dependencies, and add the corresponding back-link to X's list of
7567 back-links in VARS. */
7569 static void
7570 loc_exp_insert_dep (variable var, rtx x, htab_t vars)
7572 decl_or_value dv;
7573 variable xvar;
7574 loc_exp_dep *led;
7576 dv = dv_from_rtx (x);
7578 /* ??? Build a vector of variables parallel to EXPANDING, to avoid
7579 an additional look up? */
7580 xvar = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
7582 if (!xvar)
7584 xvar = variable_from_dropped (dv, NO_INSERT);
7585 gcc_checking_assert (xvar);
7588 /* No point in adding the same backlink more than once. This may
7589 arise if say the same value appears in two complex expressions in
7590 the same loc_list, or even more than once in a single
7591 expression. */
7592 if (VAR_LOC_DEP_LST (xvar) && VAR_LOC_DEP_LST (xvar)->dv == var->dv)
7593 return;
7595 VEC_quick_push (loc_exp_dep, VAR_LOC_DEP_VEC (var), NULL);
7596 led = VEC_last (loc_exp_dep, VAR_LOC_DEP_VEC (var));
7597 led->dv = var->dv;
7598 led->value = x;
7600 loc_exp_dep_alloc (xvar, 0);
7601 led->pprev = VAR_LOC_DEP_LSTP (xvar);
7602 led->next = *led->pprev;
7603 if (led->next)
7604 led->next->pprev = &led->next;
7605 *led->pprev = led;
7608 /* Create active dependencies of VAR on COUNT values starting at
7609 VALUE, and corresponding back-links to the entries in VARS. Return
7610 true if we found any pending-recursion results. */
7612 static bool
7613 loc_exp_dep_set (variable var, rtx result, rtx *value, int count, htab_t vars)
7615 bool pending_recursion = false;
7617 gcc_checking_assert (VEC_empty (loc_exp_dep, VAR_LOC_DEP_VEC (var)));
7619 /* Set up all dependencies from last_child (as set up at the end of
7620 the loop above) to the end. */
7621 loc_exp_dep_alloc (var, count);
7623 while (count--)
7625 rtx x = *value++;
7627 if (!pending_recursion)
7628 pending_recursion = !result && VALUE_RECURSED_INTO (x);
7630 loc_exp_insert_dep (var, x, vars);
7633 return pending_recursion;
7636 /* Notify the back-links of IVAR that are pending recursion that we
7637 have found a non-NIL value for it, so they are cleared for another
7638 attempt to compute a current location. */
7640 static void
7641 notify_dependents_of_resolved_value (variable ivar, htab_t vars)
7643 loc_exp_dep *led, *next;
7645 for (led = VAR_LOC_DEP_LST (ivar); led; led = next)
7647 decl_or_value dv = led->dv;
7648 variable var;
7650 next = led->next;
7652 if (dv_is_value_p (dv))
7654 rtx value = dv_as_value (dv);
7656 /* If we have already resolved it, leave it alone. */
7657 if (!VALUE_RECURSED_INTO (value))
7658 continue;
7660 /* Check that VALUE_RECURSED_INTO, true from the test above,
7661 implies NO_LOC_P. */
7662 gcc_checking_assert (NO_LOC_P (value));
7664 /* We won't notify variables that are being expanded,
7665 because their dependency list is cleared before
7666 recursing. */
7667 NO_LOC_P (value) = false;
7668 VALUE_RECURSED_INTO (value) = false;
7670 gcc_checking_assert (dv_changed_p (dv));
7672 else if (!dv_changed_p (dv))
7673 continue;
7675 var = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
7677 if (!var)
7678 var = variable_from_dropped (dv, NO_INSERT);
7680 if (var)
7681 notify_dependents_of_resolved_value (var, vars);
7683 if (next)
7684 next->pprev = led->pprev;
7685 if (led->pprev)
7686 *led->pprev = next;
7687 led->next = NULL;
7688 led->pprev = NULL;
7692 static rtx vt_expand_loc_callback (rtx x, bitmap regs,
7693 int max_depth, void *data);
7695 /* Return the combined depth, when one sub-expression evaluated to
7696 BEST_DEPTH and the previous known depth was SAVED_DEPTH. */
7698 static inline expand_depth
7699 update_depth (expand_depth saved_depth, expand_depth best_depth)
7701 /* If we didn't find anything, stick with what we had. */
7702 if (!best_depth.complexity)
7703 return saved_depth;
7705 /* If we found hadn't found anything, use the depth of the current
7706 expression. Do NOT add one extra level, we want to compute the
7707 maximum depth among sub-expressions. We'll increment it later,
7708 if appropriate. */
7709 if (!saved_depth.complexity)
7710 return best_depth;
7712 /* Combine the entryval count so that regardless of which one we
7713 return, the entryval count is accurate. */
7714 best_depth.entryvals = saved_depth.entryvals
7715 = best_depth.entryvals + saved_depth.entryvals;
7717 if (saved_depth.complexity < best_depth.complexity)
7718 return best_depth;
7719 else
7720 return saved_depth;
7723 /* Expand VAR to a location RTX, updating its cur_loc. Use REGS and
7724 DATA for cselib expand callback. If PENDRECP is given, indicate in
7725 it whether any sub-expression couldn't be fully evaluated because
7726 it is pending recursion resolution. */
7728 static inline rtx
7729 vt_expand_var_loc_chain (variable var, bitmap regs, void *data, bool *pendrecp)
7731 struct expand_loc_callback_data *elcd
7732 = (struct expand_loc_callback_data *) data;
7733 location_chain loc, next;
7734 rtx result = NULL;
7735 int first_child, result_first_child, last_child;
7736 bool pending_recursion;
7737 rtx loc_from = NULL;
7738 struct elt_loc_list *cloc = NULL;
7739 expand_depth depth = { 0, 0 }, saved_depth = elcd->depth;
7740 int wanted_entryvals, found_entryvals = 0;
7742 /* Clear all backlinks pointing at this, so that we're not notified
7743 while we're active. */
7744 loc_exp_dep_clear (var);
7746 retry:
7747 if (var->onepart == ONEPART_VALUE)
7749 cselib_val *val = CSELIB_VAL_PTR (dv_as_value (var->dv));
7751 gcc_checking_assert (cselib_preserved_value_p (val));
7753 cloc = val->locs;
7756 first_child = result_first_child = last_child
7757 = VEC_length (rtx, elcd->expanding);
7759 wanted_entryvals = found_entryvals;
7761 /* Attempt to expand each available location in turn. */
7762 for (next = loc = var->n_var_parts ? var->var_part[0].loc_chain : NULL;
7763 loc || cloc; loc = next)
7765 result_first_child = last_child;
7767 if (!loc)
7769 loc_from = cloc->loc;
7770 next = loc;
7771 cloc = cloc->next;
7772 if (unsuitable_loc (loc_from))
7773 continue;
7775 else
7777 loc_from = loc->loc;
7778 next = loc->next;
7781 gcc_checking_assert (!unsuitable_loc (loc_from));
7783 elcd->depth.complexity = elcd->depth.entryvals = 0;
7784 result = cselib_expand_value_rtx_cb (loc_from, regs, EXPR_DEPTH,
7785 vt_expand_loc_callback, data);
7786 last_child = VEC_length (rtx, elcd->expanding);
7788 if (result)
7790 depth = elcd->depth;
7792 gcc_checking_assert (depth.complexity
7793 || result_first_child == last_child);
7795 if (last_child - result_first_child != 1)
7797 if (!depth.complexity && GET_CODE (result) == ENTRY_VALUE)
7798 depth.entryvals++;
7799 depth.complexity++;
7802 if (depth.complexity <= EXPR_USE_DEPTH)
7804 if (depth.entryvals <= wanted_entryvals)
7805 break;
7806 else if (!found_entryvals || depth.entryvals < found_entryvals)
7807 found_entryvals = depth.entryvals;
7810 result = NULL;
7813 /* Set it up in case we leave the loop. */
7814 depth.complexity = depth.entryvals = 0;
7815 loc_from = NULL;
7816 result_first_child = first_child;
7819 if (!loc_from && wanted_entryvals < found_entryvals)
7821 /* We found entries with ENTRY_VALUEs and skipped them. Since
7822 we could not find any expansions without ENTRY_VALUEs, but we
7823 found at least one with them, go back and get an entry with
7824 the minimum number ENTRY_VALUE count that we found. We could
7825 avoid looping, but since each sub-loc is already resolved,
7826 the re-expansion should be trivial. ??? Should we record all
7827 attempted locs as dependencies, so that we retry the
7828 expansion should any of them change, in the hope it can give
7829 us a new entry without an ENTRY_VALUE? */
7830 VEC_truncate (rtx, elcd->expanding, first_child);
7831 goto retry;
7834 /* Register all encountered dependencies as active. */
7835 pending_recursion = loc_exp_dep_set
7836 (var, result, VEC_address (rtx, elcd->expanding) + result_first_child,
7837 last_child - result_first_child, elcd->vars);
7839 VEC_truncate (rtx, elcd->expanding, first_child);
7841 /* Record where the expansion came from. */
7842 gcc_checking_assert (!result || !pending_recursion);
7843 VAR_LOC_FROM (var) = loc_from;
7844 VAR_LOC_DEPTH (var) = depth;
7846 gcc_checking_assert (!depth.complexity == !result);
7848 elcd->depth = update_depth (saved_depth, depth);
7850 /* Indicate whether any of the dependencies are pending recursion
7851 resolution. */
7852 if (pendrecp)
7853 *pendrecp = pending_recursion;
7855 if (!pendrecp || !pending_recursion)
7856 var->var_part[0].cur_loc = result;
7858 return result;
7861 /* Callback for cselib_expand_value, that looks for expressions
7862 holding the value in the var-tracking hash tables. Return X for
7863 standard processing, anything else is to be used as-is. */
7865 static rtx
7866 vt_expand_loc_callback (rtx x, bitmap regs,
7867 int max_depth ATTRIBUTE_UNUSED,
7868 void *data)
7870 struct expand_loc_callback_data *elcd
7871 = (struct expand_loc_callback_data *) data;
7872 decl_or_value dv;
7873 variable var;
7874 rtx result, subreg;
7875 bool pending_recursion = false;
7876 bool from_empty = false;
7878 switch (GET_CODE (x))
7880 case SUBREG:
7881 subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs,
7882 EXPR_DEPTH,
7883 vt_expand_loc_callback, data);
7885 if (!subreg)
7886 return NULL;
7888 result = simplify_gen_subreg (GET_MODE (x), subreg,
7889 GET_MODE (SUBREG_REG (x)),
7890 SUBREG_BYTE (x));
7892 /* Invalid SUBREGs are ok in debug info. ??? We could try
7893 alternate expansions for the VALUE as well. */
7894 if (!result)
7895 result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x));
7897 return result;
7899 case DEBUG_EXPR:
7900 case VALUE:
7901 dv = dv_from_rtx (x);
7902 break;
7904 default:
7905 return x;
7908 VEC_safe_push (rtx, stack, elcd->expanding, x);
7910 /* Check that VALUE_RECURSED_INTO implies NO_LOC_P. */
7911 gcc_checking_assert (!VALUE_RECURSED_INTO (x) || NO_LOC_P (x));
7913 if (NO_LOC_P (x))
7915 gcc_checking_assert (VALUE_RECURSED_INTO (x) || !dv_changed_p (dv));
7916 return NULL;
7919 var = (variable) htab_find_with_hash (elcd->vars, dv, dv_htab_hash (dv));
7921 if (!var)
7923 from_empty = true;
7924 var = variable_from_dropped (dv, INSERT);
7927 gcc_checking_assert (var);
7929 if (!dv_changed_p (dv))
7931 gcc_checking_assert (!NO_LOC_P (x));
7932 gcc_checking_assert (var->var_part[0].cur_loc);
7933 gcc_checking_assert (VAR_LOC_1PAUX (var));
7934 gcc_checking_assert (VAR_LOC_1PAUX (var)->depth.complexity);
7936 elcd->depth = update_depth (elcd->depth, VAR_LOC_1PAUX (var)->depth);
7938 return var->var_part[0].cur_loc;
7941 VALUE_RECURSED_INTO (x) = true;
7942 /* This is tentative, but it makes some tests simpler. */
7943 NO_LOC_P (x) = true;
7945 gcc_checking_assert (var->n_var_parts == 1 || from_empty);
7947 result = vt_expand_var_loc_chain (var, regs, data, &pending_recursion);
7949 if (pending_recursion)
7951 gcc_checking_assert (!result);
7952 VEC_safe_push (rtx, stack, elcd->pending, x);
7954 else
7956 NO_LOC_P (x) = !result;
7957 VALUE_RECURSED_INTO (x) = false;
7958 set_dv_changed (dv, false);
7960 if (result)
7961 notify_dependents_of_resolved_value (var, elcd->vars);
7964 return result;
7967 /* While expanding variables, we may encounter recursion cycles
7968 because of mutual (possibly indirect) dependencies between two
7969 particular variables (or values), say A and B. If we're trying to
7970 expand A when we get to B, which in turn attempts to expand A, if
7971 we can't find any other expansion for B, we'll add B to this
7972 pending-recursion stack, and tentatively return NULL for its
7973 location. This tentative value will be used for any other
7974 occurrences of B, unless A gets some other location, in which case
7975 it will notify B that it is worth another try at computing a
7976 location for it, and it will use the location computed for A then.
7977 At the end of the expansion, the tentative NULL locations become
7978 final for all members of PENDING that didn't get a notification.
7979 This function performs this finalization of NULL locations. */
7981 static void
7982 resolve_expansions_pending_recursion (VEC (rtx, stack) *pending)
7984 while (!VEC_empty (rtx, pending))
7986 rtx x = VEC_pop (rtx, pending);
7987 decl_or_value dv;
7989 if (!VALUE_RECURSED_INTO (x))
7990 continue;
7992 gcc_checking_assert (NO_LOC_P (x));
7993 VALUE_RECURSED_INTO (x) = false;
7994 dv = dv_from_rtx (x);
7995 gcc_checking_assert (dv_changed_p (dv));
7996 set_dv_changed (dv, false);
8000 /* Initialize expand_loc_callback_data D with variable hash table V.
8001 It must be a macro because of alloca (VEC stack). */
8002 #define INIT_ELCD(d, v) \
8003 do \
8005 (d).vars = (v); \
8006 (d).expanding = VEC_alloc (rtx, stack, 4); \
8007 (d).pending = VEC_alloc (rtx, stack, 4); \
8008 (d).depth.complexity = (d).depth.entryvals = 0; \
8010 while (0)
8011 /* Finalize expand_loc_callback_data D, resolved to location L. */
8012 #define FINI_ELCD(d, l) \
8013 do \
8015 resolve_expansions_pending_recursion ((d).pending); \
8016 VEC_free (rtx, stack, (d).pending); \
8017 VEC_free (rtx, stack, (d).expanding); \
8019 if ((l) && MEM_P (l)) \
8020 (l) = targetm.delegitimize_address (l); \
8022 while (0)
8024 /* Expand VALUEs and DEBUG_EXPRs in LOC to a location, using the
8025 equivalences in VARS, updating their CUR_LOCs in the process. */
8027 static rtx
8028 vt_expand_loc (rtx loc, htab_t vars)
8030 struct expand_loc_callback_data data;
8031 rtx result;
8033 if (!MAY_HAVE_DEBUG_INSNS)
8034 return loc;
8036 INIT_ELCD (data, vars);
8038 result = cselib_expand_value_rtx_cb (loc, scratch_regs, EXPR_DEPTH,
8039 vt_expand_loc_callback, &data);
8041 FINI_ELCD (data, result);
8043 return result;
8046 /* Expand the one-part VARiable to a location, using the equivalences
8047 in VARS, updating their CUR_LOCs in the process. */
8049 static rtx
8050 vt_expand_1pvar (variable var, htab_t vars)
8052 struct expand_loc_callback_data data;
8053 rtx loc;
8055 gcc_checking_assert (var->onepart && var->n_var_parts == 1);
8057 if (!dv_changed_p (var->dv))
8058 return var->var_part[0].cur_loc;
8060 INIT_ELCD (data, vars);
8062 loc = vt_expand_var_loc_chain (var, scratch_regs, &data, NULL);
8064 gcc_checking_assert (VEC_empty (rtx, data.expanding));
8066 FINI_ELCD (data, loc);
8068 return loc;
8071 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
8072 additional parameters: WHERE specifies whether the note shall be emitted
8073 before or after instruction INSN. */
8075 static int
8076 emit_note_insn_var_location (void **varp, void *data)
8078 variable var = (variable) *varp;
8079 rtx insn = ((emit_note_data *)data)->insn;
8080 enum emit_note_where where = ((emit_note_data *)data)->where;
8081 htab_t vars = ((emit_note_data *)data)->vars;
8082 rtx note, note_vl;
8083 int i, j, n_var_parts;
8084 bool complete;
8085 enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED;
8086 HOST_WIDE_INT last_limit;
8087 tree type_size_unit;
8088 HOST_WIDE_INT offsets[MAX_VAR_PARTS];
8089 rtx loc[MAX_VAR_PARTS];
8090 tree decl;
8091 location_chain lc;
8093 gcc_checking_assert (var->onepart == NOT_ONEPART
8094 || var->onepart == ONEPART_VDECL);
8096 decl = dv_as_decl (var->dv);
8098 complete = true;
8099 last_limit = 0;
8100 n_var_parts = 0;
8101 if (!var->onepart)
8102 for (i = 0; i < var->n_var_parts; i++)
8103 if (var->var_part[i].cur_loc == NULL && var->var_part[i].loc_chain)
8104 var->var_part[i].cur_loc = var->var_part[i].loc_chain->loc;
8105 for (i = 0; i < var->n_var_parts; i++)
8107 enum machine_mode mode, wider_mode;
8108 rtx loc2;
8109 HOST_WIDE_INT offset;
8111 if (i == 0 && var->onepart)
8113 gcc_checking_assert (var->n_var_parts == 1);
8114 offset = 0;
8115 initialized = VAR_INIT_STATUS_INITIALIZED;
8116 loc2 = vt_expand_1pvar (var, vars);
8118 else
8120 if (last_limit < VAR_PART_OFFSET (var, i))
8122 complete = false;
8123 break;
8125 else if (last_limit > VAR_PART_OFFSET (var, i))
8126 continue;
8127 offset = VAR_PART_OFFSET (var, i);
8128 if (!var->var_part[i].cur_loc)
8130 complete = false;
8131 continue;
8133 for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
8134 if (var->var_part[i].cur_loc == lc->loc)
8136 initialized = lc->init;
8137 break;
8139 gcc_assert (lc);
8140 loc2 = var->var_part[i].cur_loc;
8143 offsets[n_var_parts] = offset;
8144 if (!loc2)
8146 complete = false;
8147 continue;
8149 loc[n_var_parts] = loc2;
8150 mode = GET_MODE (var->var_part[i].cur_loc);
8151 if (mode == VOIDmode && var->onepart)
8152 mode = DECL_MODE (decl);
8153 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
8155 /* Attempt to merge adjacent registers or memory. */
8156 wider_mode = GET_MODE_WIDER_MODE (mode);
8157 for (j = i + 1; j < var->n_var_parts; j++)
8158 if (last_limit <= VAR_PART_OFFSET (var, j))
8159 break;
8160 if (j < var->n_var_parts
8161 && wider_mode != VOIDmode
8162 && var->var_part[j].cur_loc
8163 && mode == GET_MODE (var->var_part[j].cur_loc)
8164 && (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts]))
8165 && last_limit == (var->onepart ? 0 : VAR_PART_OFFSET (var, j))
8166 && (loc2 = vt_expand_loc (var->var_part[j].cur_loc, vars))
8167 && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2))
8169 rtx new_loc = NULL;
8171 if (REG_P (loc[n_var_parts])
8172 && hard_regno_nregs[REGNO (loc[n_var_parts])][mode] * 2
8173 == hard_regno_nregs[REGNO (loc[n_var_parts])][wider_mode]
8174 && end_hard_regno (mode, REGNO (loc[n_var_parts]))
8175 == REGNO (loc2))
8177 if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN)
8178 new_loc = simplify_subreg (wider_mode, loc[n_var_parts],
8179 mode, 0);
8180 else if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
8181 new_loc = simplify_subreg (wider_mode, loc2, mode, 0);
8182 if (new_loc)
8184 if (!REG_P (new_loc)
8185 || REGNO (new_loc) != REGNO (loc[n_var_parts]))
8186 new_loc = NULL;
8187 else
8188 REG_ATTRS (new_loc) = REG_ATTRS (loc[n_var_parts]);
8191 else if (MEM_P (loc[n_var_parts])
8192 && GET_CODE (XEXP (loc2, 0)) == PLUS
8193 && REG_P (XEXP (XEXP (loc2, 0), 0))
8194 && CONST_INT_P (XEXP (XEXP (loc2, 0), 1)))
8196 if ((REG_P (XEXP (loc[n_var_parts], 0))
8197 && rtx_equal_p (XEXP (loc[n_var_parts], 0),
8198 XEXP (XEXP (loc2, 0), 0))
8199 && INTVAL (XEXP (XEXP (loc2, 0), 1))
8200 == GET_MODE_SIZE (mode))
8201 || (GET_CODE (XEXP (loc[n_var_parts], 0)) == PLUS
8202 && CONST_INT_P (XEXP (XEXP (loc[n_var_parts], 0), 1))
8203 && rtx_equal_p (XEXP (XEXP (loc[n_var_parts], 0), 0),
8204 XEXP (XEXP (loc2, 0), 0))
8205 && INTVAL (XEXP (XEXP (loc[n_var_parts], 0), 1))
8206 + GET_MODE_SIZE (mode)
8207 == INTVAL (XEXP (XEXP (loc2, 0), 1))))
8208 new_loc = adjust_address_nv (loc[n_var_parts],
8209 wider_mode, 0);
8212 if (new_loc)
8214 loc[n_var_parts] = new_loc;
8215 mode = wider_mode;
8216 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
8217 i = j;
8220 ++n_var_parts;
8222 type_size_unit = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8223 if ((unsigned HOST_WIDE_INT) last_limit < TREE_INT_CST_LOW (type_size_unit))
8224 complete = false;
8226 if (! flag_var_tracking_uninit)
8227 initialized = VAR_INIT_STATUS_INITIALIZED;
8229 note_vl = NULL_RTX;
8230 if (!complete)
8231 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, NULL_RTX,
8232 (int) initialized);
8233 else if (n_var_parts == 1)
8235 rtx expr_list;
8237 if (offsets[0] || GET_CODE (loc[0]) == PARALLEL)
8238 expr_list = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
8239 else
8240 expr_list = loc[0];
8242 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, expr_list,
8243 (int) initialized);
8245 else if (n_var_parts)
8247 rtx parallel;
8249 for (i = 0; i < n_var_parts; i++)
8250 loc[i]
8251 = gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i]));
8253 parallel = gen_rtx_PARALLEL (VOIDmode,
8254 gen_rtvec_v (n_var_parts, loc));
8255 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl,
8256 parallel, (int) initialized);
8259 if (where != EMIT_NOTE_BEFORE_INSN)
8261 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8262 if (where == EMIT_NOTE_AFTER_CALL_INSN)
8263 NOTE_DURING_CALL_P (note) = true;
8265 else
8267 /* Make sure that the call related notes come first. */
8268 while (NEXT_INSN (insn)
8269 && NOTE_P (insn)
8270 && ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8271 && NOTE_DURING_CALL_P (insn))
8272 || NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
8273 insn = NEXT_INSN (insn);
8274 if (NOTE_P (insn)
8275 && ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8276 && NOTE_DURING_CALL_P (insn))
8277 || NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
8278 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8279 else
8280 note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
8282 NOTE_VAR_LOCATION (note) = note_vl;
8284 set_dv_changed (var->dv, false);
8285 gcc_assert (var->in_changed_variables);
8286 var->in_changed_variables = false;
8287 htab_clear_slot (changed_variables, varp);
8289 /* Continue traversing the hash table. */
8290 return 1;
8293 /* While traversing changed_variables, push onto DATA (a stack of RTX
8294 values) entries that aren't user variables. */
8296 static int
8297 values_to_stack (void **slot, void *data)
8299 VEC (rtx, stack) **changed_values_stack = (VEC (rtx, stack) **)data;
8300 variable var = (variable) *slot;
8302 if (var->onepart == ONEPART_VALUE)
8303 VEC_safe_push (rtx, stack, *changed_values_stack, dv_as_value (var->dv));
8304 else if (var->onepart == ONEPART_DEXPR)
8305 VEC_safe_push (rtx, stack, *changed_values_stack,
8306 DECL_RTL_KNOWN_SET (dv_as_decl (var->dv)));
8308 return 1;
8311 /* Remove from changed_variables the entry whose DV corresponds to
8312 value or debug_expr VAL. */
8313 static void
8314 remove_value_from_changed_variables (rtx val)
8316 decl_or_value dv = dv_from_rtx (val);
8317 void **slot;
8318 variable var;
8320 slot = htab_find_slot_with_hash (changed_variables,
8321 dv, dv_htab_hash (dv), NO_INSERT);
8322 var = (variable) *slot;
8323 var->in_changed_variables = false;
8324 htab_clear_slot (changed_variables, slot);
8327 /* If VAL (a value or debug_expr) has backlinks to variables actively
8328 dependent on it in HTAB or in CHANGED_VARIABLES, mark them as
8329 changed, adding to CHANGED_VALUES_STACK any dependencies that may
8330 have dependencies of their own to notify. */
8332 static void
8333 notify_dependents_of_changed_value (rtx val, htab_t htab,
8334 VEC (rtx, stack) **changed_values_stack)
8336 void **slot;
8337 variable var;
8338 loc_exp_dep *led;
8339 decl_or_value dv = dv_from_rtx (val);
8341 slot = htab_find_slot_with_hash (changed_variables,
8342 dv, dv_htab_hash (dv), NO_INSERT);
8343 if (!slot)
8344 slot = htab_find_slot_with_hash (htab,
8345 dv, dv_htab_hash (dv), NO_INSERT);
8346 if (!slot)
8347 slot = htab_find_slot_with_hash (dropped_values,
8348 dv, dv_htab_hash (dv), NO_INSERT);
8349 var = (variable) *slot;
8351 while ((led = VAR_LOC_DEP_LST (var)))
8353 decl_or_value ldv = led->dv;
8354 void **islot;
8355 variable ivar;
8357 /* Deactivate and remove the backlink, as it was “used up”. It
8358 makes no sense to attempt to notify the same entity again:
8359 either it will be recomputed and re-register an active
8360 dependency, or it will still have the changed mark. */
8361 if (led->next)
8362 led->next->pprev = led->pprev;
8363 if (led->pprev)
8364 *led->pprev = led->next;
8365 led->next = NULL;
8366 led->pprev = NULL;
8368 if (dv_changed_p (ldv))
8369 continue;
8371 switch (dv_onepart_p (ldv))
8373 case ONEPART_VALUE:
8374 case ONEPART_DEXPR:
8375 set_dv_changed (ldv, true);
8376 VEC_safe_push (rtx, stack, *changed_values_stack, dv_as_rtx (ldv));
8377 break;
8379 default:
8380 islot = htab_find_slot_with_hash (htab, ldv, dv_htab_hash (ldv),
8381 NO_INSERT);
8382 ivar = (variable) *islot;
8383 gcc_checking_assert (!VAR_LOC_DEP_LST (ivar));
8384 variable_was_changed (ivar, NULL);
8385 break;
8390 /* Take out of changed_variables any entries that don't refer to use
8391 variables. Back-propagate change notifications from values and
8392 debug_exprs to their active dependencies in HTAB or in
8393 CHANGED_VARIABLES. */
8395 static void
8396 process_changed_values (htab_t htab)
8398 int i, n;
8399 rtx val;
8400 VEC (rtx, stack) *changed_values_stack = VEC_alloc (rtx, stack, 20);
8402 /* Move values from changed_variables to changed_values_stack. */
8403 htab_traverse (changed_variables, values_to_stack, &changed_values_stack);
8405 /* Back-propagate change notifications in values while popping
8406 them from the stack. */
8407 for (n = i = VEC_length (rtx, changed_values_stack);
8408 i > 0; i = VEC_length (rtx, changed_values_stack))
8410 val = VEC_pop (rtx, changed_values_stack);
8411 notify_dependents_of_changed_value (val, htab, &changed_values_stack);
8413 /* This condition will hold when visiting each of the entries
8414 originally in changed_variables. We can't remove them
8415 earlier because this could drop the backlinks before we got a
8416 chance to use them. */
8417 if (i == n)
8419 remove_value_from_changed_variables (val);
8420 n--;
8424 VEC_free (rtx, stack, changed_values_stack);
8427 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
8428 CHANGED_VARIABLES and delete this chain. WHERE specifies whether
8429 the notes shall be emitted before of after instruction INSN. */
8431 static void
8432 emit_notes_for_changes (rtx insn, enum emit_note_where where,
8433 shared_hash vars)
8435 emit_note_data data;
8436 htab_t htab = shared_hash_htab (vars);
8438 if (!htab_elements (changed_variables))
8439 return;
8441 if (MAY_HAVE_DEBUG_INSNS)
8442 process_changed_values (htab);
8444 data.insn = insn;
8445 data.where = where;
8446 data.vars = htab;
8448 htab_traverse (changed_variables, emit_note_insn_var_location, &data);
8451 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
8452 same variable in hash table DATA or is not there at all. */
8454 static int
8455 emit_notes_for_differences_1 (void **slot, void *data)
8457 htab_t new_vars = (htab_t) data;
8458 variable old_var, new_var;
8460 old_var = (variable) *slot;
8461 new_var = (variable) htab_find_with_hash (new_vars, old_var->dv,
8462 dv_htab_hash (old_var->dv));
8464 if (!new_var)
8466 /* Variable has disappeared. */
8467 variable empty_var = NULL;
8469 if (old_var->onepart == ONEPART_VALUE
8470 || old_var->onepart == ONEPART_DEXPR)
8472 empty_var = variable_from_dropped (old_var->dv, NO_INSERT);
8473 if (empty_var)
8475 gcc_checking_assert (!empty_var->in_changed_variables);
8476 if (!VAR_LOC_1PAUX (old_var))
8478 VAR_LOC_1PAUX (old_var) = VAR_LOC_1PAUX (empty_var);
8479 VAR_LOC_1PAUX (empty_var) = NULL;
8481 else
8482 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
8486 if (!empty_var)
8488 empty_var = (variable) pool_alloc (onepart_pool (old_var->onepart));
8489 empty_var->dv = old_var->dv;
8490 empty_var->refcount = 0;
8491 empty_var->n_var_parts = 0;
8492 empty_var->onepart = old_var->onepart;
8493 empty_var->in_changed_variables = false;
8496 if (empty_var->onepart)
8498 /* Propagate the auxiliary data to (ultimately)
8499 changed_variables. */
8500 empty_var->var_part[0].loc_chain = NULL;
8501 empty_var->var_part[0].cur_loc = NULL;
8502 VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (old_var);
8503 VAR_LOC_1PAUX (old_var) = NULL;
8505 variable_was_changed (empty_var, NULL);
8506 /* Continue traversing the hash table. */
8507 return 1;
8509 /* Update cur_loc and one-part auxiliary data, before new_var goes
8510 through variable_was_changed. */
8511 if (old_var != new_var && new_var->onepart)
8513 gcc_checking_assert (VAR_LOC_1PAUX (new_var) == NULL);
8514 VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (old_var);
8515 VAR_LOC_1PAUX (old_var) = NULL;
8516 new_var->var_part[0].cur_loc = old_var->var_part[0].cur_loc;
8518 if (variable_different_p (old_var, new_var))
8519 variable_was_changed (new_var, NULL);
8521 /* Continue traversing the hash table. */
8522 return 1;
8525 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
8526 table DATA. */
8528 static int
8529 emit_notes_for_differences_2 (void **slot, void *data)
8531 htab_t old_vars = (htab_t) data;
8532 variable old_var, new_var;
8534 new_var = (variable) *slot;
8535 old_var = (variable) htab_find_with_hash (old_vars, new_var->dv,
8536 dv_htab_hash (new_var->dv));
8537 if (!old_var)
8539 int i;
8540 for (i = 0; i < new_var->n_var_parts; i++)
8541 new_var->var_part[i].cur_loc = NULL;
8542 variable_was_changed (new_var, NULL);
8545 /* Continue traversing the hash table. */
8546 return 1;
8549 /* Emit notes before INSN for differences between dataflow sets OLD_SET and
8550 NEW_SET. */
8552 static void
8553 emit_notes_for_differences (rtx insn, dataflow_set *old_set,
8554 dataflow_set *new_set)
8556 htab_traverse (shared_hash_htab (old_set->vars),
8557 emit_notes_for_differences_1,
8558 shared_hash_htab (new_set->vars));
8559 htab_traverse (shared_hash_htab (new_set->vars),
8560 emit_notes_for_differences_2,
8561 shared_hash_htab (old_set->vars));
8562 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars);
8565 /* Return the next insn after INSN that is not a NOTE_INSN_VAR_LOCATION. */
8567 static rtx
8568 next_non_note_insn_var_location (rtx insn)
8570 while (insn)
8572 insn = NEXT_INSN (insn);
8573 if (insn == 0
8574 || !NOTE_P (insn)
8575 || NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION)
8576 break;
8579 return insn;
8582 /* Emit the notes for changes of location parts in the basic block BB. */
8584 static void
8585 emit_notes_in_bb (basic_block bb, dataflow_set *set)
8587 unsigned int i;
8588 micro_operation *mo;
8590 dataflow_set_clear (set);
8591 dataflow_set_copy (set, &VTI (bb)->in);
8593 FOR_EACH_VEC_ELT (micro_operation, VTI (bb)->mos, i, mo)
8595 rtx insn = mo->insn;
8596 rtx next_insn = next_non_note_insn_var_location (insn);
8598 switch (mo->type)
8600 case MO_CALL:
8601 dataflow_set_clear_at_call (set);
8602 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars);
8604 rtx arguments = mo->u.loc, *p = &arguments, note;
8605 while (*p)
8607 XEXP (XEXP (*p, 0), 1)
8608 = vt_expand_loc (XEXP (XEXP (*p, 0), 1),
8609 shared_hash_htab (set->vars));
8610 /* If expansion is successful, keep it in the list. */
8611 if (XEXP (XEXP (*p, 0), 1))
8612 p = &XEXP (*p, 1);
8613 /* Otherwise, if the following item is data_value for it,
8614 drop it too too. */
8615 else if (XEXP (*p, 1)
8616 && REG_P (XEXP (XEXP (*p, 0), 0))
8617 && MEM_P (XEXP (XEXP (XEXP (*p, 1), 0), 0))
8618 && REG_P (XEXP (XEXP (XEXP (XEXP (*p, 1), 0), 0),
8620 && REGNO (XEXP (XEXP (*p, 0), 0))
8621 == REGNO (XEXP (XEXP (XEXP (XEXP (*p, 1), 0),
8622 0), 0)))
8623 *p = XEXP (XEXP (*p, 1), 1);
8624 /* Just drop this item. */
8625 else
8626 *p = XEXP (*p, 1);
8628 note = emit_note_after (NOTE_INSN_CALL_ARG_LOCATION, insn);
8629 NOTE_VAR_LOCATION (note) = arguments;
8631 break;
8633 case MO_USE:
8635 rtx loc = mo->u.loc;
8637 if (REG_P (loc))
8638 var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
8639 else
8640 var_mem_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
8642 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
8644 break;
8646 case MO_VAL_LOC:
8648 rtx loc = mo->u.loc;
8649 rtx val, vloc;
8650 tree var;
8652 if (GET_CODE (loc) == CONCAT)
8654 val = XEXP (loc, 0);
8655 vloc = XEXP (loc, 1);
8657 else
8659 val = NULL_RTX;
8660 vloc = loc;
8663 var = PAT_VAR_LOCATION_DECL (vloc);
8665 clobber_variable_part (set, NULL_RTX,
8666 dv_from_decl (var), 0, NULL_RTX);
8667 if (val)
8669 if (VAL_NEEDS_RESOLUTION (loc))
8670 val_resolve (set, val, PAT_VAR_LOCATION_LOC (vloc), insn);
8671 set_variable_part (set, val, dv_from_decl (var), 0,
8672 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
8673 INSERT);
8675 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
8676 set_variable_part (set, PAT_VAR_LOCATION_LOC (vloc),
8677 dv_from_decl (var), 0,
8678 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
8679 INSERT);
8681 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
8683 break;
8685 case MO_VAL_USE:
8687 rtx loc = mo->u.loc;
8688 rtx val, vloc, uloc;
8690 vloc = uloc = XEXP (loc, 1);
8691 val = XEXP (loc, 0);
8693 if (GET_CODE (val) == CONCAT)
8695 uloc = XEXP (val, 1);
8696 val = XEXP (val, 0);
8699 if (VAL_NEEDS_RESOLUTION (loc))
8700 val_resolve (set, val, vloc, insn);
8701 else
8702 val_store (set, val, uloc, insn, false);
8704 if (VAL_HOLDS_TRACK_EXPR (loc))
8706 if (GET_CODE (uloc) == REG)
8707 var_reg_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
8708 NULL);
8709 else if (GET_CODE (uloc) == MEM)
8710 var_mem_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
8711 NULL);
8714 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
8716 break;
8718 case MO_VAL_SET:
8720 rtx loc = mo->u.loc;
8721 rtx val, vloc, uloc;
8723 vloc = loc;
8724 uloc = XEXP (vloc, 1);
8725 val = XEXP (vloc, 0);
8726 vloc = uloc;
8728 if (GET_CODE (val) == CONCAT)
8730 vloc = XEXP (val, 1);
8731 val = XEXP (val, 0);
8734 if (GET_CODE (vloc) == SET)
8736 rtx vsrc = SET_SRC (vloc);
8738 gcc_assert (val != vsrc);
8739 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
8741 vloc = SET_DEST (vloc);
8743 if (VAL_NEEDS_RESOLUTION (loc))
8744 val_resolve (set, val, vsrc, insn);
8746 else if (VAL_NEEDS_RESOLUTION (loc))
8748 gcc_assert (GET_CODE (uloc) == SET
8749 && GET_CODE (SET_SRC (uloc)) == REG);
8750 val_resolve (set, val, SET_SRC (uloc), insn);
8753 if (VAL_HOLDS_TRACK_EXPR (loc))
8755 if (VAL_EXPR_IS_CLOBBERED (loc))
8757 if (REG_P (uloc))
8758 var_reg_delete (set, uloc, true);
8759 else if (MEM_P (uloc))
8760 var_mem_delete (set, uloc, true);
8762 else
8764 bool copied_p = VAL_EXPR_IS_COPIED (loc);
8765 rtx set_src = NULL;
8766 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
8768 if (GET_CODE (uloc) == SET)
8770 set_src = SET_SRC (uloc);
8771 uloc = SET_DEST (uloc);
8774 if (copied_p)
8776 status = find_src_status (set, set_src);
8778 set_src = find_src_set_src (set, set_src);
8781 if (REG_P (uloc))
8782 var_reg_delete_and_set (set, uloc, !copied_p,
8783 status, set_src);
8784 else if (MEM_P (uloc))
8785 var_mem_delete_and_set (set, uloc, !copied_p,
8786 status, set_src);
8789 else if (REG_P (uloc))
8790 var_regno_delete (set, REGNO (uloc));
8792 val_store (set, val, vloc, insn, true);
8794 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
8795 set->vars);
8797 break;
8799 case MO_SET:
8801 rtx loc = mo->u.loc;
8802 rtx set_src = NULL;
8804 if (GET_CODE (loc) == SET)
8806 set_src = SET_SRC (loc);
8807 loc = SET_DEST (loc);
8810 if (REG_P (loc))
8811 var_reg_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
8812 set_src);
8813 else
8814 var_mem_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
8815 set_src);
8817 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
8818 set->vars);
8820 break;
8822 case MO_COPY:
8824 rtx loc = mo->u.loc;
8825 enum var_init_status src_status;
8826 rtx set_src = NULL;
8828 if (GET_CODE (loc) == SET)
8830 set_src = SET_SRC (loc);
8831 loc = SET_DEST (loc);
8834 src_status = find_src_status (set, set_src);
8835 set_src = find_src_set_src (set, set_src);
8837 if (REG_P (loc))
8838 var_reg_delete_and_set (set, loc, false, src_status, set_src);
8839 else
8840 var_mem_delete_and_set (set, loc, false, src_status, set_src);
8842 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
8843 set->vars);
8845 break;
8847 case MO_USE_NO_VAR:
8849 rtx loc = mo->u.loc;
8851 if (REG_P (loc))
8852 var_reg_delete (set, loc, false);
8853 else
8854 var_mem_delete (set, loc, false);
8856 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
8858 break;
8860 case MO_CLOBBER:
8862 rtx loc = mo->u.loc;
8864 if (REG_P (loc))
8865 var_reg_delete (set, loc, true);
8866 else
8867 var_mem_delete (set, loc, true);
8869 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
8870 set->vars);
8872 break;
8874 case MO_ADJUST:
8875 set->stack_adjust += mo->u.adjust;
8876 break;
8881 /* Emit notes for the whole function. */
8883 static void
8884 vt_emit_notes (void)
8886 basic_block bb;
8887 dataflow_set cur;
8889 gcc_assert (!htab_elements (changed_variables));
8891 /* Free memory occupied by the out hash tables, as they aren't used
8892 anymore. */
8893 FOR_EACH_BB (bb)
8894 dataflow_set_clear (&VTI (bb)->out);
8896 /* Enable emitting notes by functions (mainly by set_variable_part and
8897 delete_variable_part). */
8898 emit_notes = true;
8900 if (MAY_HAVE_DEBUG_INSNS)
8901 dropped_values = htab_create (cselib_get_next_uid () * 2,
8902 variable_htab_hash, variable_htab_eq,
8903 variable_htab_free);
8905 dataflow_set_init (&cur);
8907 FOR_EACH_BB (bb)
8909 /* Emit the notes for changes of variable locations between two
8910 subsequent basic blocks. */
8911 emit_notes_for_differences (BB_HEAD (bb), &cur, &VTI (bb)->in);
8913 /* Emit the notes for the changes in the basic block itself. */
8914 emit_notes_in_bb (bb, &cur);
8916 /* Free memory occupied by the in hash table, we won't need it
8917 again. */
8918 dataflow_set_clear (&VTI (bb)->in);
8920 #ifdef ENABLE_CHECKING
8921 htab_traverse (shared_hash_htab (cur.vars),
8922 emit_notes_for_differences_1,
8923 shared_hash_htab (empty_shared_hash));
8924 #endif
8925 dataflow_set_destroy (&cur);
8927 if (MAY_HAVE_DEBUG_INSNS)
8928 htab_delete (dropped_values);
8930 emit_notes = false;
8933 /* If there is a declaration and offset associated with register/memory RTL
8934 assign declaration to *DECLP and offset to *OFFSETP, and return true. */
8936 static bool
8937 vt_get_decl_and_offset (rtx rtl, tree *declp, HOST_WIDE_INT *offsetp)
8939 if (REG_P (rtl))
8941 if (REG_ATTRS (rtl))
8943 *declp = REG_EXPR (rtl);
8944 *offsetp = REG_OFFSET (rtl);
8945 return true;
8948 else if (MEM_P (rtl))
8950 if (MEM_ATTRS (rtl))
8952 *declp = MEM_EXPR (rtl);
8953 *offsetp = INT_MEM_OFFSET (rtl);
8954 return true;
8957 return false;
8960 /* Record the value for the ENTRY_VALUE of RTL as a global equivalence
8961 of VAL. */
8963 static void
8964 record_entry_value (cselib_val *val, rtx rtl)
8966 rtx ev = gen_rtx_ENTRY_VALUE (GET_MODE (rtl));
8968 ENTRY_VALUE_EXP (ev) = rtl;
8970 cselib_add_permanent_equiv (val, ev, get_insns ());
8973 /* Insert function parameter PARM in IN and OUT sets of ENTRY_BLOCK. */
8975 static void
8976 vt_add_function_parameter (tree parm)
8978 rtx decl_rtl = DECL_RTL_IF_SET (parm);
8979 rtx incoming = DECL_INCOMING_RTL (parm);
8980 tree decl;
8981 enum machine_mode mode;
8982 HOST_WIDE_INT offset;
8983 dataflow_set *out;
8984 decl_or_value dv;
8986 if (TREE_CODE (parm) != PARM_DECL)
8987 return;
8989 if (!decl_rtl || !incoming)
8990 return;
8992 if (GET_MODE (decl_rtl) == BLKmode || GET_MODE (incoming) == BLKmode)
8993 return;
8995 /* If there is a DRAP register, rewrite the incoming location of parameters
8996 passed on the stack into MEMs based on the argument pointer, as the DRAP
8997 register can be reused for other purposes and we do not track locations
8998 based on generic registers. But the prerequisite is that this argument
8999 pointer be also the virtual CFA pointer, see vt_initialize. */
9000 if (MEM_P (incoming)
9001 && stack_realign_drap
9002 && arg_pointer_rtx == cfa_base_rtx
9003 && (XEXP (incoming, 0) == crtl->args.internal_arg_pointer
9004 || (GET_CODE (XEXP (incoming, 0)) == PLUS
9005 && XEXP (XEXP (incoming, 0), 0)
9006 == crtl->args.internal_arg_pointer
9007 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
9009 HOST_WIDE_INT off = -FIRST_PARM_OFFSET (current_function_decl);
9010 if (GET_CODE (XEXP (incoming, 0)) == PLUS)
9011 off += INTVAL (XEXP (XEXP (incoming, 0), 1));
9012 incoming
9013 = replace_equiv_address_nv (incoming,
9014 plus_constant (Pmode,
9015 arg_pointer_rtx, off));
9018 #ifdef HAVE_window_save
9019 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
9020 If the target machine has an explicit window save instruction, the
9021 actual entry value is the corresponding OUTGOING_REGNO instead. */
9022 if (REG_P (incoming)
9023 && HARD_REGISTER_P (incoming)
9024 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
9026 parm_reg_t *p
9027 = VEC_safe_push (parm_reg_t, gc, windowed_parm_regs, NULL);
9028 p->incoming = incoming;
9029 incoming
9030 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
9031 OUTGOING_REGNO (REGNO (incoming)), 0);
9032 p->outgoing = incoming;
9034 else if (MEM_P (incoming)
9035 && REG_P (XEXP (incoming, 0))
9036 && HARD_REGISTER_P (XEXP (incoming, 0)))
9038 rtx reg = XEXP (incoming, 0);
9039 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
9041 parm_reg_t *p
9042 = VEC_safe_push (parm_reg_t, gc, windowed_parm_regs, NULL);
9043 p->incoming = reg;
9044 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
9045 p->outgoing = reg;
9046 incoming = replace_equiv_address_nv (incoming, reg);
9049 #endif
9051 if (!vt_get_decl_and_offset (incoming, &decl, &offset))
9053 if (REG_P (incoming) || MEM_P (incoming))
9055 /* This means argument is passed by invisible reference. */
9056 offset = 0;
9057 decl = parm;
9058 incoming = gen_rtx_MEM (GET_MODE (decl_rtl), incoming);
9060 else
9062 if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset))
9063 return;
9064 offset += byte_lowpart_offset (GET_MODE (incoming),
9065 GET_MODE (decl_rtl));
9069 if (!decl)
9070 return;
9072 if (parm != decl)
9074 /* Assume that DECL_RTL was a pseudo that got spilled to
9075 memory. The spill slot sharing code will force the
9076 memory to reference spill_slot_decl (%sfp), so we don't
9077 match above. That's ok, the pseudo must have referenced
9078 the entire parameter, so just reset OFFSET. */
9079 gcc_assert (decl == get_spill_slot_decl (false));
9080 offset = 0;
9083 if (!track_loc_p (incoming, parm, offset, false, &mode, &offset))
9084 return;
9086 out = &VTI (ENTRY_BLOCK_PTR)->out;
9088 dv = dv_from_decl (parm);
9090 if (target_for_debug_bind (parm)
9091 /* We can't deal with these right now, because this kind of
9092 variable is single-part. ??? We could handle parallels
9093 that describe multiple locations for the same single
9094 value, but ATM we don't. */
9095 && GET_CODE (incoming) != PARALLEL)
9097 cselib_val *val;
9099 /* ??? We shouldn't ever hit this, but it may happen because
9100 arguments passed by invisible reference aren't dealt with
9101 above: incoming-rtl will have Pmode rather than the
9102 expected mode for the type. */
9103 if (offset)
9104 return;
9106 val = cselib_lookup_from_insn (var_lowpart (mode, incoming), mode, true,
9107 VOIDmode, get_insns ());
9109 /* ??? Float-typed values in memory are not handled by
9110 cselib. */
9111 if (val)
9113 preserve_value (val);
9114 set_variable_part (out, val->val_rtx, dv, offset,
9115 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9116 dv = dv_from_value (val->val_rtx);
9120 if (REG_P (incoming))
9122 incoming = var_lowpart (mode, incoming);
9123 gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER);
9124 attrs_list_insert (&out->regs[REGNO (incoming)], dv, offset,
9125 incoming);
9126 set_variable_part (out, incoming, dv, offset,
9127 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9128 if (dv_is_value_p (dv))
9130 record_entry_value (CSELIB_VAL_PTR (dv_as_value (dv)), incoming);
9131 if (TREE_CODE (TREE_TYPE (parm)) == REFERENCE_TYPE
9132 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (parm))))
9134 enum machine_mode indmode
9135 = TYPE_MODE (TREE_TYPE (TREE_TYPE (parm)));
9136 rtx mem = gen_rtx_MEM (indmode, incoming);
9137 cselib_val *val = cselib_lookup_from_insn (mem, indmode, true,
9138 VOIDmode,
9139 get_insns ());
9140 if (val)
9142 preserve_value (val);
9143 record_entry_value (val, mem);
9144 set_variable_part (out, mem, dv_from_value (val->val_rtx), 0,
9145 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9150 else if (MEM_P (incoming))
9152 incoming = var_lowpart (mode, incoming);
9153 set_variable_part (out, incoming, dv, offset,
9154 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9158 /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
9160 static void
9161 vt_add_function_parameters (void)
9163 tree parm;
9165 for (parm = DECL_ARGUMENTS (current_function_decl);
9166 parm; parm = DECL_CHAIN (parm))
9167 vt_add_function_parameter (parm);
9169 if (DECL_HAS_VALUE_EXPR_P (DECL_RESULT (current_function_decl)))
9171 tree vexpr = DECL_VALUE_EXPR (DECL_RESULT (current_function_decl));
9173 if (TREE_CODE (vexpr) == INDIRECT_REF)
9174 vexpr = TREE_OPERAND (vexpr, 0);
9176 if (TREE_CODE (vexpr) == PARM_DECL
9177 && DECL_ARTIFICIAL (vexpr)
9178 && !DECL_IGNORED_P (vexpr)
9179 && DECL_NAMELESS (vexpr))
9180 vt_add_function_parameter (vexpr);
9184 /* Return true if INSN in the prologue initializes hard_frame_pointer_rtx. */
9186 static bool
9187 fp_setter (rtx insn)
9189 rtx pat = PATTERN (insn);
9190 if (RTX_FRAME_RELATED_P (insn))
9192 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
9193 if (expr)
9194 pat = XEXP (expr, 0);
9196 if (GET_CODE (pat) == SET)
9197 return SET_DEST (pat) == hard_frame_pointer_rtx;
9198 else if (GET_CODE (pat) == PARALLEL)
9200 int i;
9201 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
9202 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
9203 && SET_DEST (XVECEXP (pat, 0, i)) == hard_frame_pointer_rtx)
9204 return true;
9206 return false;
9209 /* Initialize cfa_base_rtx, create a preserved VALUE for it and
9210 ensure it isn't flushed during cselib_reset_table.
9211 Can be called only if frame_pointer_rtx resp. arg_pointer_rtx
9212 has been eliminated. */
9214 static void
9215 vt_init_cfa_base (void)
9217 cselib_val *val;
9219 #ifdef FRAME_POINTER_CFA_OFFSET
9220 cfa_base_rtx = frame_pointer_rtx;
9221 cfa_base_offset = -FRAME_POINTER_CFA_OFFSET (current_function_decl);
9222 #else
9223 cfa_base_rtx = arg_pointer_rtx;
9224 cfa_base_offset = -ARG_POINTER_CFA_OFFSET (current_function_decl);
9225 #endif
9226 if (cfa_base_rtx == hard_frame_pointer_rtx
9227 || !fixed_regs[REGNO (cfa_base_rtx)])
9229 cfa_base_rtx = NULL_RTX;
9230 return;
9232 if (!MAY_HAVE_DEBUG_INSNS)
9233 return;
9235 /* Tell alias analysis that cfa_base_rtx should share
9236 find_base_term value with stack pointer or hard frame pointer. */
9237 if (!frame_pointer_needed)
9238 vt_equate_reg_base_value (cfa_base_rtx, stack_pointer_rtx);
9239 else if (!crtl->stack_realign_tried)
9240 vt_equate_reg_base_value (cfa_base_rtx, hard_frame_pointer_rtx);
9242 val = cselib_lookup_from_insn (cfa_base_rtx, GET_MODE (cfa_base_rtx), 1,
9243 VOIDmode, get_insns ());
9244 preserve_value (val);
9245 cselib_preserve_cfa_base_value (val, REGNO (cfa_base_rtx));
9246 var_reg_decl_set (&VTI (ENTRY_BLOCK_PTR)->out, cfa_base_rtx,
9247 VAR_INIT_STATUS_INITIALIZED, dv_from_value (val->val_rtx),
9248 0, NULL_RTX, INSERT);
9251 /* Allocate and initialize the data structures for variable tracking
9252 and parse the RTL to get the micro operations. */
9254 static bool
9255 vt_initialize (void)
9257 basic_block bb, prologue_bb = single_succ (ENTRY_BLOCK_PTR);
9258 HOST_WIDE_INT fp_cfa_offset = -1;
9260 alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def));
9262 attrs_pool = create_alloc_pool ("attrs_def pool",
9263 sizeof (struct attrs_def), 1024);
9264 var_pool = create_alloc_pool ("variable_def pool",
9265 sizeof (struct variable_def)
9266 + (MAX_VAR_PARTS - 1)
9267 * sizeof (((variable)NULL)->var_part[0]), 64);
9268 loc_chain_pool = create_alloc_pool ("location_chain_def pool",
9269 sizeof (struct location_chain_def),
9270 1024);
9271 shared_hash_pool = create_alloc_pool ("shared_hash_def pool",
9272 sizeof (struct shared_hash_def), 256);
9273 empty_shared_hash = (shared_hash) pool_alloc (shared_hash_pool);
9274 empty_shared_hash->refcount = 1;
9275 empty_shared_hash->htab
9276 = htab_create (1, variable_htab_hash, variable_htab_eq,
9277 variable_htab_free);
9278 changed_variables = htab_create (10, variable_htab_hash, variable_htab_eq,
9279 variable_htab_free);
9281 /* Init the IN and OUT sets. */
9282 FOR_ALL_BB (bb)
9284 VTI (bb)->visited = false;
9285 VTI (bb)->flooded = false;
9286 dataflow_set_init (&VTI (bb)->in);
9287 dataflow_set_init (&VTI (bb)->out);
9288 VTI (bb)->permp = NULL;
9291 if (MAY_HAVE_DEBUG_INSNS)
9293 cselib_init (CSELIB_RECORD_MEMORY | CSELIB_PRESERVE_CONSTANTS);
9294 scratch_regs = BITMAP_ALLOC (NULL);
9295 valvar_pool = create_alloc_pool ("small variable_def pool",
9296 sizeof (struct variable_def), 256);
9297 preserved_values = VEC_alloc (rtx, heap, 256);
9299 else
9301 scratch_regs = NULL;
9302 valvar_pool = NULL;
9305 /* In order to factor out the adjustments made to the stack pointer or to
9306 the hard frame pointer and thus be able to use DW_OP_fbreg operations
9307 instead of individual location lists, we're going to rewrite MEMs based
9308 on them into MEMs based on the CFA by de-eliminating stack_pointer_rtx
9309 or hard_frame_pointer_rtx to the virtual CFA pointer frame_pointer_rtx
9310 resp. arg_pointer_rtx. We can do this either when there is no frame
9311 pointer in the function and stack adjustments are consistent for all
9312 basic blocks or when there is a frame pointer and no stack realignment.
9313 But we first have to check that frame_pointer_rtx resp. arg_pointer_rtx
9314 has been eliminated. */
9315 if (!frame_pointer_needed)
9317 rtx reg, elim;
9319 if (!vt_stack_adjustments ())
9320 return false;
9322 #ifdef FRAME_POINTER_CFA_OFFSET
9323 reg = frame_pointer_rtx;
9324 #else
9325 reg = arg_pointer_rtx;
9326 #endif
9327 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
9328 if (elim != reg)
9330 if (GET_CODE (elim) == PLUS)
9331 elim = XEXP (elim, 0);
9332 if (elim == stack_pointer_rtx)
9333 vt_init_cfa_base ();
9336 else if (!crtl->stack_realign_tried)
9338 rtx reg, elim;
9340 #ifdef FRAME_POINTER_CFA_OFFSET
9341 reg = frame_pointer_rtx;
9342 fp_cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
9343 #else
9344 reg = arg_pointer_rtx;
9345 fp_cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
9346 #endif
9347 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
9348 if (elim != reg)
9350 if (GET_CODE (elim) == PLUS)
9352 fp_cfa_offset -= INTVAL (XEXP (elim, 1));
9353 elim = XEXP (elim, 0);
9355 if (elim != hard_frame_pointer_rtx)
9356 fp_cfa_offset = -1;
9358 else
9359 fp_cfa_offset = -1;
9362 /* If the stack is realigned and a DRAP register is used, we're going to
9363 rewrite MEMs based on it representing incoming locations of parameters
9364 passed on the stack into MEMs based on the argument pointer. Although
9365 we aren't going to rewrite other MEMs, we still need to initialize the
9366 virtual CFA pointer in order to ensure that the argument pointer will
9367 be seen as a constant throughout the function.
9369 ??? This doesn't work if FRAME_POINTER_CFA_OFFSET is defined. */
9370 else if (stack_realign_drap)
9372 rtx reg, elim;
9374 #ifdef FRAME_POINTER_CFA_OFFSET
9375 reg = frame_pointer_rtx;
9376 #else
9377 reg = arg_pointer_rtx;
9378 #endif
9379 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
9380 if (elim != reg)
9382 if (GET_CODE (elim) == PLUS)
9383 elim = XEXP (elim, 0);
9384 if (elim == hard_frame_pointer_rtx)
9385 vt_init_cfa_base ();
9389 hard_frame_pointer_adjustment = -1;
9391 vt_add_function_parameters ();
9393 FOR_EACH_BB (bb)
9395 rtx insn;
9396 HOST_WIDE_INT pre, post = 0;
9397 basic_block first_bb, last_bb;
9399 if (MAY_HAVE_DEBUG_INSNS)
9401 cselib_record_sets_hook = add_with_sets;
9402 if (dump_file && (dump_flags & TDF_DETAILS))
9403 fprintf (dump_file, "first value: %i\n",
9404 cselib_get_next_uid ());
9407 first_bb = bb;
9408 for (;;)
9410 edge e;
9411 if (bb->next_bb == EXIT_BLOCK_PTR
9412 || ! single_pred_p (bb->next_bb))
9413 break;
9414 e = find_edge (bb, bb->next_bb);
9415 if (! e || (e->flags & EDGE_FALLTHRU) == 0)
9416 break;
9417 bb = bb->next_bb;
9419 last_bb = bb;
9421 /* Add the micro-operations to the vector. */
9422 FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb)
9424 HOST_WIDE_INT offset = VTI (bb)->out.stack_adjust;
9425 VTI (bb)->out.stack_adjust = VTI (bb)->in.stack_adjust;
9426 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
9427 insn = NEXT_INSN (insn))
9429 if (INSN_P (insn))
9431 if (!frame_pointer_needed)
9433 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
9434 if (pre)
9436 micro_operation mo;
9437 mo.type = MO_ADJUST;
9438 mo.u.adjust = pre;
9439 mo.insn = insn;
9440 if (dump_file && (dump_flags & TDF_DETAILS))
9441 log_op_type (PATTERN (insn), bb, insn,
9442 MO_ADJUST, dump_file);
9443 VEC_safe_push (micro_operation, heap, VTI (bb)->mos,
9444 &mo);
9445 VTI (bb)->out.stack_adjust += pre;
9449 cselib_hook_called = false;
9450 adjust_insn (bb, insn);
9451 if (MAY_HAVE_DEBUG_INSNS)
9453 if (CALL_P (insn))
9454 prepare_call_arguments (bb, insn);
9455 cselib_process_insn (insn);
9456 if (dump_file && (dump_flags & TDF_DETAILS))
9458 print_rtl_single (dump_file, insn);
9459 dump_cselib_table (dump_file);
9462 if (!cselib_hook_called)
9463 add_with_sets (insn, 0, 0);
9464 cancel_changes (0);
9466 if (!frame_pointer_needed && post)
9468 micro_operation mo;
9469 mo.type = MO_ADJUST;
9470 mo.u.adjust = post;
9471 mo.insn = insn;
9472 if (dump_file && (dump_flags & TDF_DETAILS))
9473 log_op_type (PATTERN (insn), bb, insn,
9474 MO_ADJUST, dump_file);
9475 VEC_safe_push (micro_operation, heap, VTI (bb)->mos,
9476 &mo);
9477 VTI (bb)->out.stack_adjust += post;
9480 if (bb == prologue_bb
9481 && fp_cfa_offset != -1
9482 && hard_frame_pointer_adjustment == -1
9483 && RTX_FRAME_RELATED_P (insn)
9484 && fp_setter (insn))
9486 vt_init_cfa_base ();
9487 hard_frame_pointer_adjustment = fp_cfa_offset;
9491 gcc_assert (offset == VTI (bb)->out.stack_adjust);
9494 bb = last_bb;
9496 if (MAY_HAVE_DEBUG_INSNS)
9498 cselib_preserve_only_values ();
9499 cselib_reset_table (cselib_get_next_uid ());
9500 cselib_record_sets_hook = NULL;
9504 hard_frame_pointer_adjustment = -1;
9505 VTI (ENTRY_BLOCK_PTR)->flooded = true;
9506 cfa_base_rtx = NULL_RTX;
9507 return true;
9510 /* This is *not* reset after each function. It gives each
9511 NOTE_INSN_DELETED_DEBUG_LABEL in the entire compilation
9512 a unique label number. */
9514 static int debug_label_num = 1;
9516 /* Get rid of all debug insns from the insn stream. */
9518 static void
9519 delete_debug_insns (void)
9521 basic_block bb;
9522 rtx insn, next;
9524 if (!MAY_HAVE_DEBUG_INSNS)
9525 return;
9527 FOR_EACH_BB (bb)
9529 FOR_BB_INSNS_SAFE (bb, insn, next)
9530 if (DEBUG_INSN_P (insn))
9532 tree decl = INSN_VAR_LOCATION_DECL (insn);
9533 if (TREE_CODE (decl) == LABEL_DECL
9534 && DECL_NAME (decl)
9535 && !DECL_RTL_SET_P (decl))
9537 PUT_CODE (insn, NOTE);
9538 NOTE_KIND (insn) = NOTE_INSN_DELETED_DEBUG_LABEL;
9539 NOTE_DELETED_LABEL_NAME (insn)
9540 = IDENTIFIER_POINTER (DECL_NAME (decl));
9541 SET_DECL_RTL (decl, insn);
9542 CODE_LABEL_NUMBER (insn) = debug_label_num++;
9544 else
9545 delete_insn (insn);
9550 /* Run a fast, BB-local only version of var tracking, to take care of
9551 information that we don't do global analysis on, such that not all
9552 information is lost. If SKIPPED holds, we're skipping the global
9553 pass entirely, so we should try to use information it would have
9554 handled as well.. */
9556 static void
9557 vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED)
9559 /* ??? Just skip it all for now. */
9560 delete_debug_insns ();
9563 /* Free the data structures needed for variable tracking. */
9565 static void
9566 vt_finalize (void)
9568 basic_block bb;
9570 FOR_EACH_BB (bb)
9572 VEC_free (micro_operation, heap, VTI (bb)->mos);
9575 FOR_ALL_BB (bb)
9577 dataflow_set_destroy (&VTI (bb)->in);
9578 dataflow_set_destroy (&VTI (bb)->out);
9579 if (VTI (bb)->permp)
9581 dataflow_set_destroy (VTI (bb)->permp);
9582 XDELETE (VTI (bb)->permp);
9585 free_aux_for_blocks ();
9586 htab_delete (empty_shared_hash->htab);
9587 htab_delete (changed_variables);
9588 free_alloc_pool (attrs_pool);
9589 free_alloc_pool (var_pool);
9590 free_alloc_pool (loc_chain_pool);
9591 free_alloc_pool (shared_hash_pool);
9593 if (MAY_HAVE_DEBUG_INSNS)
9595 free_alloc_pool (valvar_pool);
9596 VEC_free (rtx, heap, preserved_values);
9597 cselib_finish ();
9598 BITMAP_FREE (scratch_regs);
9599 scratch_regs = NULL;
9602 #ifdef HAVE_window_save
9603 VEC_free (parm_reg_t, gc, windowed_parm_regs);
9604 #endif
9606 if (vui_vec)
9607 XDELETEVEC (vui_vec);
9608 vui_vec = NULL;
9609 vui_allocated = 0;
9612 /* The entry point to variable tracking pass. */
9614 static inline unsigned int
9615 variable_tracking_main_1 (void)
9617 bool success;
9619 if (flag_var_tracking_assignments < 0)
9621 delete_debug_insns ();
9622 return 0;
9625 if (n_basic_blocks > 500 && n_edges / n_basic_blocks >= 20)
9627 vt_debug_insns_local (true);
9628 return 0;
9631 mark_dfs_back_edges ();
9632 if (!vt_initialize ())
9634 vt_finalize ();
9635 vt_debug_insns_local (true);
9636 return 0;
9639 success = vt_find_locations ();
9641 if (!success && flag_var_tracking_assignments > 0)
9643 vt_finalize ();
9645 delete_debug_insns ();
9647 /* This is later restored by our caller. */
9648 flag_var_tracking_assignments = 0;
9650 success = vt_initialize ();
9651 gcc_assert (success);
9653 success = vt_find_locations ();
9656 if (!success)
9658 vt_finalize ();
9659 vt_debug_insns_local (false);
9660 return 0;
9663 if (dump_file && (dump_flags & TDF_DETAILS))
9665 dump_dataflow_sets ();
9666 dump_flow_info (dump_file, dump_flags);
9669 timevar_push (TV_VAR_TRACKING_EMIT);
9670 vt_emit_notes ();
9671 timevar_pop (TV_VAR_TRACKING_EMIT);
9673 vt_finalize ();
9674 vt_debug_insns_local (false);
9675 return 0;
9678 unsigned int
9679 variable_tracking_main (void)
9681 unsigned int ret;
9682 int save = flag_var_tracking_assignments;
9684 ret = variable_tracking_main_1 ();
9686 flag_var_tracking_assignments = save;
9688 return ret;
9691 static bool
9692 gate_handle_var_tracking (void)
9694 return (flag_var_tracking && !targetm.delay_vartrack);
9699 struct rtl_opt_pass pass_variable_tracking =
9702 RTL_PASS,
9703 "vartrack", /* name */
9704 gate_handle_var_tracking, /* gate */
9705 variable_tracking_main, /* execute */
9706 NULL, /* sub */
9707 NULL, /* next */
9708 0, /* static_pass_number */
9709 TV_VAR_TRACKING, /* tv_id */
9710 0, /* properties_required */
9711 0, /* properties_provided */
9712 0, /* properties_destroyed */
9713 0, /* todo_flags_start */
9714 TODO_verify_rtl_sharing /* todo_flags_finish */