PR debug/53671
[official-gcc.git] / gcc / var-tracking.c
blob419269f945a8cf02dd48dad07c70aa5fc2474596
1 /* Variable tracking routines for the GNU compiler.
2 Copyright (C) 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This file contains the variable tracking pass. It computes where
22 variables are located (which registers or where in memory) at each position
23 in instruction stream and emits notes describing the locations.
24 Debug information (DWARF2 location lists) is finally generated from
25 these notes.
26 With this debug information, it is possible to show variables
27 even when debugging optimized code.
29 How does the variable tracking pass work?
31 First, it scans RTL code for uses, stores and clobbers (register/memory
32 references in instructions), for call insns and for stack adjustments
33 separately for each basic block and saves them to an array of micro
34 operations.
35 The micro operations of one instruction are ordered so that
36 pre-modifying stack adjustment < use < use with no var < call insn <
37 < clobber < set < post-modifying stack adjustment
39 Then, a forward dataflow analysis is performed to find out how locations
40 of variables change through code and to propagate the variable locations
41 along control flow graph.
42 The IN set for basic block BB is computed as a union of OUT sets of BB's
43 predecessors, the OUT set for BB is copied from the IN set for BB and
44 is changed according to micro operations in BB.
46 The IN and OUT sets for basic blocks consist of a current stack adjustment
47 (used for adjusting offset of variables addressed using stack pointer),
48 the table of structures describing the locations of parts of a variable
49 and for each physical register a linked list for each physical register.
50 The linked list is a list of variable parts stored in the register,
51 i.e. it is a list of triplets (reg, decl, offset) where decl is
52 REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
53 effective deleting appropriate variable parts when we set or clobber the
54 register.
56 There may be more than one variable part in a register. The linked lists
57 should be pretty short so it is a good data structure here.
58 For example in the following code, register allocator may assign same
59 register to variables A and B, and both of them are stored in the same
60 register in CODE:
62 if (cond)
63 set A;
64 else
65 set B;
66 CODE;
67 if (cond)
68 use A;
69 else
70 use B;
72 Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
73 are emitted to appropriate positions in RTL code. Each such a note describes
74 the location of one variable at the point in instruction stream where the
75 note is. There is no need to emit a note for each variable before each
76 instruction, we only emit these notes where the location of variable changes
77 (this means that we also emit notes for changes between the OUT set of the
78 previous block and the IN set of the current block).
80 The notes consist of two parts:
81 1. the declaration (from REG_EXPR or MEM_EXPR)
82 2. the location of a variable - it is either a simple register/memory
83 reference (for simple variables, for example int),
84 or a parallel of register/memory references (for a large variables
85 which consist of several parts, for example long long).
89 #include "config.h"
90 #include "system.h"
91 #include "coretypes.h"
92 #include "tm.h"
93 #include "rtl.h"
94 #include "tree.h"
95 #include "tm_p.h"
96 #include "hard-reg-set.h"
97 #include "basic-block.h"
98 #include "flags.h"
99 #include "insn-config.h"
100 #include "reload.h"
101 #include "sbitmap.h"
102 #include "alloc-pool.h"
103 #include "fibheap.h"
104 #include "hashtab.h"
105 #include "regs.h"
106 #include "expr.h"
107 #include "timevar.h"
108 #include "tree-pass.h"
109 #include "tree-flow.h"
110 #include "cselib.h"
111 #include "target.h"
112 #include "params.h"
113 #include "diagnostic.h"
114 #include "tree-pretty-print.h"
115 #include "pointer-set.h"
116 #include "recog.h"
117 #include "tm_p.h"
118 #include "alias.h"
120 /* var-tracking.c assumes that tree code with the same value as VALUE rtx code
121 has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
122 Currently the value is the same as IDENTIFIER_NODE, which has such
123 a property. If this compile time assertion ever fails, make sure that
124 the new tree code that equals (int) VALUE has the same property. */
125 extern char check_value_val[(int) VALUE == (int) IDENTIFIER_NODE ? 1 : -1];
127 /* Type of micro operation. */
128 enum micro_operation_type
130 MO_USE, /* Use location (REG or MEM). */
131 MO_USE_NO_VAR,/* Use location which is not associated with a variable
132 or the variable is not trackable. */
133 MO_VAL_USE, /* Use location which is associated with a value. */
134 MO_VAL_LOC, /* Use location which appears in a debug insn. */
135 MO_VAL_SET, /* Set location associated with a value. */
136 MO_SET, /* Set location. */
137 MO_COPY, /* Copy the same portion of a variable from one
138 location to another. */
139 MO_CLOBBER, /* Clobber location. */
140 MO_CALL, /* Call insn. */
141 MO_ADJUST /* Adjust stack pointer. */
145 static const char * const ATTRIBUTE_UNUSED
146 micro_operation_type_name[] = {
147 "MO_USE",
148 "MO_USE_NO_VAR",
149 "MO_VAL_USE",
150 "MO_VAL_LOC",
151 "MO_VAL_SET",
152 "MO_SET",
153 "MO_COPY",
154 "MO_CLOBBER",
155 "MO_CALL",
156 "MO_ADJUST"
159 /* Where shall the note be emitted? BEFORE or AFTER the instruction.
160 Notes emitted as AFTER_CALL are to take effect during the call,
161 rather than after the call. */
162 enum emit_note_where
164 EMIT_NOTE_BEFORE_INSN,
165 EMIT_NOTE_AFTER_INSN,
166 EMIT_NOTE_AFTER_CALL_INSN
169 /* Structure holding information about micro operation. */
170 typedef struct micro_operation_def
172 /* Type of micro operation. */
173 enum micro_operation_type type;
175 /* The instruction which the micro operation is in, for MO_USE,
176 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
177 instruction or note in the original flow (before any var-tracking
178 notes are inserted, to simplify emission of notes), for MO_SET
179 and MO_CLOBBER. */
180 rtx insn;
182 union {
183 /* Location. For MO_SET and MO_COPY, this is the SET that
184 performs the assignment, if known, otherwise it is the target
185 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
186 CONCAT of the VALUE and the LOC associated with it. For
187 MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
188 associated with it. */
189 rtx loc;
191 /* Stack adjustment. */
192 HOST_WIDE_INT adjust;
193 } u;
194 } micro_operation;
196 DEF_VEC_O(micro_operation);
197 DEF_VEC_ALLOC_O(micro_operation,heap);
199 /* A declaration of a variable, or an RTL value being handled like a
200 declaration. */
201 typedef void *decl_or_value;
203 /* Structure for passing some other parameters to function
204 emit_note_insn_var_location. */
205 typedef struct emit_note_data_def
207 /* The instruction which the note will be emitted before/after. */
208 rtx insn;
210 /* Where the note will be emitted (before/after insn)? */
211 enum emit_note_where where;
213 /* The variables and values active at this point. */
214 htab_t vars;
215 } emit_note_data;
217 /* Description of location of a part of a variable. The content of a physical
218 register is described by a chain of these structures.
219 The chains are pretty short (usually 1 or 2 elements) and thus
220 chain is the best data structure. */
221 typedef struct attrs_def
223 /* Pointer to next member of the list. */
224 struct attrs_def *next;
226 /* The rtx of register. */
227 rtx loc;
229 /* The declaration corresponding to LOC. */
230 decl_or_value dv;
232 /* Offset from start of DECL. */
233 HOST_WIDE_INT offset;
234 } *attrs;
236 /* Structure holding a refcounted hash table. If refcount > 1,
237 it must be first unshared before modified. */
238 typedef struct shared_hash_def
240 /* Reference count. */
241 int refcount;
243 /* Actual hash table. */
244 htab_t htab;
245 } *shared_hash;
247 /* Structure holding the IN or OUT set for a basic block. */
248 typedef struct dataflow_set_def
250 /* Adjustment of stack offset. */
251 HOST_WIDE_INT stack_adjust;
253 /* Attributes for registers (lists of attrs). */
254 attrs regs[FIRST_PSEUDO_REGISTER];
256 /* Variable locations. */
257 shared_hash vars;
259 /* Vars that is being traversed. */
260 shared_hash traversed_vars;
261 } dataflow_set;
263 /* The structure (one for each basic block) containing the information
264 needed for variable tracking. */
265 typedef struct variable_tracking_info_def
267 /* The vector of micro operations. */
268 VEC(micro_operation, heap) *mos;
270 /* The IN and OUT set for dataflow analysis. */
271 dataflow_set in;
272 dataflow_set out;
274 /* The permanent-in dataflow set for this block. This is used to
275 hold values for which we had to compute entry values. ??? This
276 should probably be dynamically allocated, to avoid using more
277 memory in non-debug builds. */
278 dataflow_set *permp;
280 /* Has the block been visited in DFS? */
281 bool visited;
283 /* Has the block been flooded in VTA? */
284 bool flooded;
286 } *variable_tracking_info;
288 /* Structure for chaining the locations. */
289 typedef struct location_chain_def
291 /* Next element in the chain. */
292 struct location_chain_def *next;
294 /* The location (REG, MEM or VALUE). */
295 rtx loc;
297 /* The "value" stored in this location. */
298 rtx set_src;
300 /* Initialized? */
301 enum var_init_status init;
302 } *location_chain;
304 /* A vector of loc_exp_dep holds the active dependencies of a one-part
305 DV on VALUEs, i.e., the VALUEs expanded so as to form the current
306 location of DV. Each entry is also part of VALUE' s linked-list of
307 backlinks back to DV. */
308 typedef struct loc_exp_dep_s
310 /* The dependent DV. */
311 decl_or_value dv;
312 /* The dependency VALUE or DECL_DEBUG. */
313 rtx value;
314 /* The next entry in VALUE's backlinks list. */
315 struct loc_exp_dep_s *next;
316 /* A pointer to the pointer to this entry (head or prev's next) in
317 the doubly-linked list. */
318 struct loc_exp_dep_s **pprev;
319 } loc_exp_dep;
321 DEF_VEC_O (loc_exp_dep);
323 /* This data structure holds information about the depth of a variable
324 expansion. */
325 typedef struct expand_depth_struct
327 /* This measures the complexity of the expanded expression. It
328 grows by one for each level of expansion that adds more than one
329 operand. */
330 int complexity;
331 /* This counts the number of ENTRY_VALUE expressions in an
332 expansion. We want to minimize their use. */
333 int entryvals;
334 } expand_depth;
336 /* This data structure is allocated for one-part variables at the time
337 of emitting notes. */
338 struct onepart_aux
340 /* Doubly-linked list of dependent DVs. These are DVs whose cur_loc
341 computation used the expansion of this variable, and that ought
342 to be notified should this variable change. If the DV's cur_loc
343 expanded to NULL, all components of the loc list are regarded as
344 active, so that any changes in them give us a chance to get a
345 location. Otherwise, only components of the loc that expanded to
346 non-NULL are regarded as active dependencies. */
347 loc_exp_dep *backlinks;
348 /* This holds the LOC that was expanded into cur_loc. We need only
349 mark a one-part variable as changed if the FROM loc is removed,
350 or if it has no known location and a loc is added, or if it gets
351 a change notification from any of its active dependencies. */
352 rtx from;
353 /* The depth of the cur_loc expression. */
354 expand_depth depth;
355 /* Dependencies actively used when expand FROM into cur_loc. */
356 VEC (loc_exp_dep, none) deps;
359 /* Structure describing one part of variable. */
360 typedef struct variable_part_def
362 /* Chain of locations of the part. */
363 location_chain loc_chain;
365 /* Location which was last emitted to location list. */
366 rtx cur_loc;
368 union variable_aux
370 /* The offset in the variable, if !var->onepart. */
371 HOST_WIDE_INT offset;
373 /* Pointer to auxiliary data, if var->onepart and emit_notes. */
374 struct onepart_aux *onepaux;
375 } aux;
376 } variable_part;
378 /* Maximum number of location parts. */
379 #define MAX_VAR_PARTS 16
381 /* Enumeration type used to discriminate various types of one-part
382 variables. */
383 typedef enum onepart_enum
385 /* Not a one-part variable. */
386 NOT_ONEPART = 0,
387 /* A one-part DECL that is not a DEBUG_EXPR_DECL. */
388 ONEPART_VDECL = 1,
389 /* A DEBUG_EXPR_DECL. */
390 ONEPART_DEXPR = 2,
391 /* A VALUE. */
392 ONEPART_VALUE = 3
393 } onepart_enum_t;
395 /* Structure describing where the variable is located. */
396 typedef struct variable_def
398 /* The declaration of the variable, or an RTL value being handled
399 like a declaration. */
400 decl_or_value dv;
402 /* Reference count. */
403 int refcount;
405 /* Number of variable parts. */
406 char n_var_parts;
408 /* What type of DV this is, according to enum onepart_enum. */
409 ENUM_BITFIELD (onepart_enum) onepart : CHAR_BIT;
411 /* True if this variable_def struct is currently in the
412 changed_variables hash table. */
413 bool in_changed_variables;
415 /* The variable parts. */
416 variable_part var_part[1];
417 } *variable;
418 typedef const struct variable_def *const_variable;
420 /* Pointer to the BB's information specific to variable tracking pass. */
421 #define VTI(BB) ((variable_tracking_info) (BB)->aux)
423 /* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */
424 #define INT_MEM_OFFSET(mem) (MEM_OFFSET_KNOWN_P (mem) ? MEM_OFFSET (mem) : 0)
426 #if ENABLE_CHECKING && (GCC_VERSION >= 2007)
428 /* Access VAR's Ith part's offset, checking that it's not a one-part
429 variable. */
430 #define VAR_PART_OFFSET(var, i) __extension__ \
431 (*({ variable const __v = (var); \
432 gcc_checking_assert (!__v->onepart); \
433 &__v->var_part[(i)].aux.offset; }))
435 /* Access VAR's one-part auxiliary data, checking that it is a
436 one-part variable. */
437 #define VAR_LOC_1PAUX(var) __extension__ \
438 (*({ variable const __v = (var); \
439 gcc_checking_assert (__v->onepart); \
440 &__v->var_part[0].aux.onepaux; }))
442 #else
443 #define VAR_PART_OFFSET(var, i) ((var)->var_part[(i)].aux.offset)
444 #define VAR_LOC_1PAUX(var) ((var)->var_part[0].aux.onepaux)
445 #endif
447 /* These are accessor macros for the one-part auxiliary data. When
448 convenient for users, they're guarded by tests that the data was
449 allocated. */
450 #define VAR_LOC_DEP_LST(var) (VAR_LOC_1PAUX (var) \
451 ? VAR_LOC_1PAUX (var)->backlinks \
452 : NULL)
453 #define VAR_LOC_DEP_LSTP(var) (VAR_LOC_1PAUX (var) \
454 ? &VAR_LOC_1PAUX (var)->backlinks \
455 : NULL)
456 #define VAR_LOC_FROM(var) (VAR_LOC_1PAUX (var)->from)
457 #define VAR_LOC_DEPTH(var) (VAR_LOC_1PAUX (var)->depth)
458 #define VAR_LOC_DEP_VEC(var) (VAR_LOC_1PAUX (var) \
459 ? &VAR_LOC_1PAUX (var)->deps \
460 : NULL)
462 /* Alloc pool for struct attrs_def. */
463 static alloc_pool attrs_pool;
465 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
466 static alloc_pool var_pool;
468 /* Alloc pool for struct variable_def with a single var_part entry. */
469 static alloc_pool valvar_pool;
471 /* Alloc pool for struct location_chain_def. */
472 static alloc_pool loc_chain_pool;
474 /* Alloc pool for struct shared_hash_def. */
475 static alloc_pool shared_hash_pool;
477 /* Alloc pool for struct loc_exp_dep_s for NOT_ONEPART variables. */
478 static alloc_pool loc_exp_dep_pool;
480 /* Changed variables, notes will be emitted for them. */
481 static htab_t changed_variables;
483 /* Shall notes be emitted? */
484 static bool emit_notes;
486 /* Values whose dynamic location lists have gone empty, but whose
487 cselib location lists are still usable. Use this to hold the
488 current location, the backlinks, etc, during emit_notes. */
489 static htab_t dropped_values;
491 /* Empty shared hashtable. */
492 static shared_hash empty_shared_hash;
494 /* Scratch register bitmap used by cselib_expand_value_rtx. */
495 static bitmap scratch_regs = NULL;
497 #ifdef HAVE_window_save
498 typedef struct GTY(()) parm_reg {
499 rtx outgoing;
500 rtx incoming;
501 } parm_reg_t;
503 DEF_VEC_O(parm_reg_t);
504 DEF_VEC_ALLOC_O(parm_reg_t, gc);
506 /* Vector of windowed parameter registers, if any. */
507 static VEC(parm_reg_t, gc) *windowed_parm_regs = NULL;
508 #endif
510 /* Variable used to tell whether cselib_process_insn called our hook. */
511 static bool cselib_hook_called;
513 /* Local function prototypes. */
514 static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
515 HOST_WIDE_INT *);
516 static void insn_stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
517 HOST_WIDE_INT *);
518 static bool vt_stack_adjustments (void);
519 static hashval_t variable_htab_hash (const void *);
520 static int variable_htab_eq (const void *, const void *);
521 static void variable_htab_free (void *);
523 static void init_attrs_list_set (attrs *);
524 static void attrs_list_clear (attrs *);
525 static attrs attrs_list_member (attrs, decl_or_value, HOST_WIDE_INT);
526 static void attrs_list_insert (attrs *, decl_or_value, HOST_WIDE_INT, rtx);
527 static void attrs_list_copy (attrs *, attrs);
528 static void attrs_list_union (attrs *, attrs);
530 static void **unshare_variable (dataflow_set *set, void **slot, variable var,
531 enum var_init_status);
532 static void vars_copy (htab_t, htab_t);
533 static tree var_debug_decl (tree);
534 static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
535 static void var_reg_delete_and_set (dataflow_set *, rtx, bool,
536 enum var_init_status, rtx);
537 static void var_reg_delete (dataflow_set *, rtx, bool);
538 static void var_regno_delete (dataflow_set *, int);
539 static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx);
540 static void var_mem_delete_and_set (dataflow_set *, rtx, bool,
541 enum var_init_status, rtx);
542 static void var_mem_delete (dataflow_set *, rtx, bool);
544 static void dataflow_set_init (dataflow_set *);
545 static void dataflow_set_clear (dataflow_set *);
546 static void dataflow_set_copy (dataflow_set *, dataflow_set *);
547 static int variable_union_info_cmp_pos (const void *, const void *);
548 static void dataflow_set_union (dataflow_set *, dataflow_set *);
549 static location_chain find_loc_in_1pdv (rtx, variable, htab_t);
550 static bool canon_value_cmp (rtx, rtx);
551 static int loc_cmp (rtx, rtx);
552 static bool variable_part_different_p (variable_part *, variable_part *);
553 static bool onepart_variable_different_p (variable, variable);
554 static bool variable_different_p (variable, variable);
555 static bool dataflow_set_different (dataflow_set *, dataflow_set *);
556 static void dataflow_set_destroy (dataflow_set *);
558 static bool contains_symbol_ref (rtx);
559 static bool track_expr_p (tree, bool);
560 static bool same_variable_part_p (rtx, tree, HOST_WIDE_INT);
561 static int add_uses (rtx *, void *);
562 static void add_uses_1 (rtx *, void *);
563 static void add_stores (rtx, const_rtx, void *);
564 static bool compute_bb_dataflow (basic_block);
565 static bool vt_find_locations (void);
567 static void dump_attrs_list (attrs);
568 static int dump_var_slot (void **, void *);
569 static void dump_var (variable);
570 static void dump_vars (htab_t);
571 static void dump_dataflow_set (dataflow_set *);
572 static void dump_dataflow_sets (void);
574 static void set_dv_changed (decl_or_value, bool);
575 static void variable_was_changed (variable, dataflow_set *);
576 static void **set_slot_part (dataflow_set *, rtx, void **,
577 decl_or_value, HOST_WIDE_INT,
578 enum var_init_status, rtx);
579 static void set_variable_part (dataflow_set *, rtx,
580 decl_or_value, HOST_WIDE_INT,
581 enum var_init_status, rtx, enum insert_option);
582 static void **clobber_slot_part (dataflow_set *, rtx,
583 void **, HOST_WIDE_INT, rtx);
584 static void clobber_variable_part (dataflow_set *, rtx,
585 decl_or_value, HOST_WIDE_INT, rtx);
586 static void **delete_slot_part (dataflow_set *, rtx, void **, HOST_WIDE_INT);
587 static void delete_variable_part (dataflow_set *, rtx,
588 decl_or_value, HOST_WIDE_INT);
589 static int emit_note_insn_var_location (void **, void *);
590 static void emit_notes_for_changes (rtx, enum emit_note_where, shared_hash);
591 static int emit_notes_for_differences_1 (void **, void *);
592 static int emit_notes_for_differences_2 (void **, void *);
593 static void emit_notes_for_differences (rtx, dataflow_set *, dataflow_set *);
594 static void emit_notes_in_bb (basic_block, dataflow_set *);
595 static void vt_emit_notes (void);
597 static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *);
598 static void vt_add_function_parameters (void);
599 static bool vt_initialize (void);
600 static void vt_finalize (void);
602 /* Given a SET, calculate the amount of stack adjustment it contains
603 PRE- and POST-modifying stack pointer.
604 This function is similar to stack_adjust_offset. */
606 static void
607 stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
608 HOST_WIDE_INT *post)
610 rtx src = SET_SRC (pattern);
611 rtx dest = SET_DEST (pattern);
612 enum rtx_code code;
614 if (dest == stack_pointer_rtx)
616 /* (set (reg sp) (plus (reg sp) (const_int))) */
617 code = GET_CODE (src);
618 if (! (code == PLUS || code == MINUS)
619 || XEXP (src, 0) != stack_pointer_rtx
620 || !CONST_INT_P (XEXP (src, 1)))
621 return;
623 if (code == MINUS)
624 *post += INTVAL (XEXP (src, 1));
625 else
626 *post -= INTVAL (XEXP (src, 1));
628 else if (MEM_P (dest))
630 /* (set (mem (pre_dec (reg sp))) (foo)) */
631 src = XEXP (dest, 0);
632 code = GET_CODE (src);
634 switch (code)
636 case PRE_MODIFY:
637 case POST_MODIFY:
638 if (XEXP (src, 0) == stack_pointer_rtx)
640 rtx val = XEXP (XEXP (src, 1), 1);
641 /* We handle only adjustments by constant amount. */
642 gcc_assert (GET_CODE (XEXP (src, 1)) == PLUS &&
643 CONST_INT_P (val));
645 if (code == PRE_MODIFY)
646 *pre -= INTVAL (val);
647 else
648 *post -= INTVAL (val);
649 break;
651 return;
653 case PRE_DEC:
654 if (XEXP (src, 0) == stack_pointer_rtx)
656 *pre += GET_MODE_SIZE (GET_MODE (dest));
657 break;
659 return;
661 case POST_DEC:
662 if (XEXP (src, 0) == stack_pointer_rtx)
664 *post += GET_MODE_SIZE (GET_MODE (dest));
665 break;
667 return;
669 case PRE_INC:
670 if (XEXP (src, 0) == stack_pointer_rtx)
672 *pre -= GET_MODE_SIZE (GET_MODE (dest));
673 break;
675 return;
677 case POST_INC:
678 if (XEXP (src, 0) == stack_pointer_rtx)
680 *post -= GET_MODE_SIZE (GET_MODE (dest));
681 break;
683 return;
685 default:
686 return;
691 /* Given an INSN, calculate the amount of stack adjustment it contains
692 PRE- and POST-modifying stack pointer. */
694 static void
695 insn_stack_adjust_offset_pre_post (rtx insn, HOST_WIDE_INT *pre,
696 HOST_WIDE_INT *post)
698 rtx pattern;
700 *pre = 0;
701 *post = 0;
703 pattern = PATTERN (insn);
704 if (RTX_FRAME_RELATED_P (insn))
706 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
707 if (expr)
708 pattern = XEXP (expr, 0);
711 if (GET_CODE (pattern) == SET)
712 stack_adjust_offset_pre_post (pattern, pre, post);
713 else if (GET_CODE (pattern) == PARALLEL
714 || GET_CODE (pattern) == SEQUENCE)
716 int i;
718 /* There may be stack adjustments inside compound insns. Search
719 for them. */
720 for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
721 if (GET_CODE (XVECEXP (pattern, 0, i)) == SET)
722 stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post);
726 /* Compute stack adjustments for all blocks by traversing DFS tree.
727 Return true when the adjustments on all incoming edges are consistent.
728 Heavily borrowed from pre_and_rev_post_order_compute. */
730 static bool
731 vt_stack_adjustments (void)
733 edge_iterator *stack;
734 int sp;
736 /* Initialize entry block. */
737 VTI (ENTRY_BLOCK_PTR)->visited = true;
738 VTI (ENTRY_BLOCK_PTR)->in.stack_adjust = INCOMING_FRAME_SP_OFFSET;
739 VTI (ENTRY_BLOCK_PTR)->out.stack_adjust = INCOMING_FRAME_SP_OFFSET;
741 /* Allocate stack for back-tracking up CFG. */
742 stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
743 sp = 0;
745 /* Push the first edge on to the stack. */
746 stack[sp++] = ei_start (ENTRY_BLOCK_PTR->succs);
748 while (sp)
750 edge_iterator ei;
751 basic_block src;
752 basic_block dest;
754 /* Look at the edge on the top of the stack. */
755 ei = stack[sp - 1];
756 src = ei_edge (ei)->src;
757 dest = ei_edge (ei)->dest;
759 /* Check if the edge destination has been visited yet. */
760 if (!VTI (dest)->visited)
762 rtx insn;
763 HOST_WIDE_INT pre, post, offset;
764 VTI (dest)->visited = true;
765 VTI (dest)->in.stack_adjust = offset = VTI (src)->out.stack_adjust;
767 if (dest != EXIT_BLOCK_PTR)
768 for (insn = BB_HEAD (dest);
769 insn != NEXT_INSN (BB_END (dest));
770 insn = NEXT_INSN (insn))
771 if (INSN_P (insn))
773 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
774 offset += pre + post;
777 VTI (dest)->out.stack_adjust = offset;
779 if (EDGE_COUNT (dest->succs) > 0)
780 /* Since the DEST node has been visited for the first
781 time, check its successors. */
782 stack[sp++] = ei_start (dest->succs);
784 else
786 /* Check whether the adjustments on the edges are the same. */
787 if (VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust)
789 free (stack);
790 return false;
793 if (! ei_one_before_end_p (ei))
794 /* Go to the next edge. */
795 ei_next (&stack[sp - 1]);
796 else
797 /* Return to previous level if there are no more edges. */
798 sp--;
802 free (stack);
803 return true;
806 /* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or
807 hard_frame_pointer_rtx is being mapped to it and offset for it. */
808 static rtx cfa_base_rtx;
809 static HOST_WIDE_INT cfa_base_offset;
811 /* Compute a CFA-based value for an ADJUSTMENT made to stack_pointer_rtx
812 or hard_frame_pointer_rtx. */
814 static inline rtx
815 compute_cfa_pointer (HOST_WIDE_INT adjustment)
817 return plus_constant (Pmode, cfa_base_rtx, adjustment + cfa_base_offset);
820 /* Adjustment for hard_frame_pointer_rtx to cfa base reg,
821 or -1 if the replacement shouldn't be done. */
822 static HOST_WIDE_INT hard_frame_pointer_adjustment = -1;
824 /* Data for adjust_mems callback. */
826 struct adjust_mem_data
828 bool store;
829 enum machine_mode mem_mode;
830 HOST_WIDE_INT stack_adjust;
831 rtx side_effects;
834 /* Helper for adjust_mems. Return 1 if *loc is unsuitable for
835 transformation of wider mode arithmetics to narrower mode,
836 -1 if it is suitable and subexpressions shouldn't be
837 traversed and 0 if it is suitable and subexpressions should
838 be traversed. Called through for_each_rtx. */
840 static int
841 use_narrower_mode_test (rtx *loc, void *data)
843 rtx subreg = (rtx) data;
845 if (CONSTANT_P (*loc))
846 return -1;
847 switch (GET_CODE (*loc))
849 case REG:
850 if (cselib_lookup (*loc, GET_MODE (SUBREG_REG (subreg)), 0, VOIDmode))
851 return 1;
852 if (!validate_subreg (GET_MODE (subreg), GET_MODE (*loc),
853 *loc, subreg_lowpart_offset (GET_MODE (subreg),
854 GET_MODE (*loc))))
855 return 1;
856 return -1;
857 case PLUS:
858 case MINUS:
859 case MULT:
860 return 0;
861 case ASHIFT:
862 if (for_each_rtx (&XEXP (*loc, 0), use_narrower_mode_test, data))
863 return 1;
864 else
865 return -1;
866 default:
867 return 1;
871 /* Transform X into narrower mode MODE from wider mode WMODE. */
873 static rtx
874 use_narrower_mode (rtx x, enum machine_mode mode, enum machine_mode wmode)
876 rtx op0, op1;
877 if (CONSTANT_P (x))
878 return lowpart_subreg (mode, x, wmode);
879 switch (GET_CODE (x))
881 case REG:
882 return lowpart_subreg (mode, x, wmode);
883 case PLUS:
884 case MINUS:
885 case MULT:
886 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
887 op1 = use_narrower_mode (XEXP (x, 1), mode, wmode);
888 return simplify_gen_binary (GET_CODE (x), mode, op0, op1);
889 case ASHIFT:
890 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
891 return simplify_gen_binary (ASHIFT, mode, op0, XEXP (x, 1));
892 default:
893 gcc_unreachable ();
897 /* Helper function for adjusting used MEMs. */
899 static rtx
900 adjust_mems (rtx loc, const_rtx old_rtx, void *data)
902 struct adjust_mem_data *amd = (struct adjust_mem_data *) data;
903 rtx mem, addr = loc, tem;
904 enum machine_mode mem_mode_save;
905 bool store_save;
906 switch (GET_CODE (loc))
908 case REG:
909 /* Don't do any sp or fp replacements outside of MEM addresses
910 on the LHS. */
911 if (amd->mem_mode == VOIDmode && amd->store)
912 return loc;
913 if (loc == stack_pointer_rtx
914 && !frame_pointer_needed
915 && cfa_base_rtx)
916 return compute_cfa_pointer (amd->stack_adjust);
917 else if (loc == hard_frame_pointer_rtx
918 && frame_pointer_needed
919 && hard_frame_pointer_adjustment != -1
920 && cfa_base_rtx)
921 return compute_cfa_pointer (hard_frame_pointer_adjustment);
922 gcc_checking_assert (loc != virtual_incoming_args_rtx);
923 return loc;
924 case MEM:
925 mem = loc;
926 if (!amd->store)
928 mem = targetm.delegitimize_address (mem);
929 if (mem != loc && !MEM_P (mem))
930 return simplify_replace_fn_rtx (mem, old_rtx, adjust_mems, data);
933 addr = XEXP (mem, 0);
934 mem_mode_save = amd->mem_mode;
935 amd->mem_mode = GET_MODE (mem);
936 store_save = amd->store;
937 amd->store = false;
938 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
939 amd->store = store_save;
940 amd->mem_mode = mem_mode_save;
941 if (mem == loc)
942 addr = targetm.delegitimize_address (addr);
943 if (addr != XEXP (mem, 0))
944 mem = replace_equiv_address_nv (mem, addr);
945 if (!amd->store)
946 mem = avoid_constant_pool_reference (mem);
947 return mem;
948 case PRE_INC:
949 case PRE_DEC:
950 addr = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
951 GEN_INT (GET_CODE (loc) == PRE_INC
952 ? GET_MODE_SIZE (amd->mem_mode)
953 : -GET_MODE_SIZE (amd->mem_mode)));
954 case POST_INC:
955 case POST_DEC:
956 if (addr == loc)
957 addr = XEXP (loc, 0);
958 gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode);
959 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
960 tem = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
961 GEN_INT ((GET_CODE (loc) == PRE_INC
962 || GET_CODE (loc) == POST_INC)
963 ? GET_MODE_SIZE (amd->mem_mode)
964 : -GET_MODE_SIZE (amd->mem_mode)));
965 amd->side_effects = alloc_EXPR_LIST (0,
966 gen_rtx_SET (VOIDmode,
967 XEXP (loc, 0),
968 tem),
969 amd->side_effects);
970 return addr;
971 case PRE_MODIFY:
972 addr = XEXP (loc, 1);
973 case POST_MODIFY:
974 if (addr == loc)
975 addr = XEXP (loc, 0);
976 gcc_assert (amd->mem_mode != VOIDmode);
977 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
978 amd->side_effects = alloc_EXPR_LIST (0,
979 gen_rtx_SET (VOIDmode,
980 XEXP (loc, 0),
981 XEXP (loc, 1)),
982 amd->side_effects);
983 return addr;
984 case SUBREG:
985 /* First try without delegitimization of whole MEMs and
986 avoid_constant_pool_reference, which is more likely to succeed. */
987 store_save = amd->store;
988 amd->store = true;
989 addr = simplify_replace_fn_rtx (SUBREG_REG (loc), old_rtx, adjust_mems,
990 data);
991 amd->store = store_save;
992 mem = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
993 if (mem == SUBREG_REG (loc))
995 tem = loc;
996 goto finish_subreg;
998 tem = simplify_gen_subreg (GET_MODE (loc), mem,
999 GET_MODE (SUBREG_REG (loc)),
1000 SUBREG_BYTE (loc));
1001 if (tem)
1002 goto finish_subreg;
1003 tem = simplify_gen_subreg (GET_MODE (loc), addr,
1004 GET_MODE (SUBREG_REG (loc)),
1005 SUBREG_BYTE (loc));
1006 if (tem == NULL_RTX)
1007 tem = gen_rtx_raw_SUBREG (GET_MODE (loc), addr, SUBREG_BYTE (loc));
1008 finish_subreg:
1009 if (MAY_HAVE_DEBUG_INSNS
1010 && GET_CODE (tem) == SUBREG
1011 && (GET_CODE (SUBREG_REG (tem)) == PLUS
1012 || GET_CODE (SUBREG_REG (tem)) == MINUS
1013 || GET_CODE (SUBREG_REG (tem)) == MULT
1014 || GET_CODE (SUBREG_REG (tem)) == ASHIFT)
1015 && GET_MODE_CLASS (GET_MODE (tem)) == MODE_INT
1016 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem))) == MODE_INT
1017 && GET_MODE_SIZE (GET_MODE (tem))
1018 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (tem)))
1019 && subreg_lowpart_p (tem)
1020 && !for_each_rtx (&SUBREG_REG (tem), use_narrower_mode_test, tem))
1021 return use_narrower_mode (SUBREG_REG (tem), GET_MODE (tem),
1022 GET_MODE (SUBREG_REG (tem)));
1023 return tem;
1024 case ASM_OPERANDS:
1025 /* Don't do any replacements in second and following
1026 ASM_OPERANDS of inline-asm with multiple sets.
1027 ASM_OPERANDS_INPUT_VEC, ASM_OPERANDS_INPUT_CONSTRAINT_VEC
1028 and ASM_OPERANDS_LABEL_VEC need to be equal between
1029 all the ASM_OPERANDs in the insn and adjust_insn will
1030 fix this up. */
1031 if (ASM_OPERANDS_OUTPUT_IDX (loc) != 0)
1032 return loc;
1033 break;
1034 default:
1035 break;
1037 return NULL_RTX;
1040 /* Helper function for replacement of uses. */
1042 static void
1043 adjust_mem_uses (rtx *x, void *data)
1045 rtx new_x = simplify_replace_fn_rtx (*x, NULL_RTX, adjust_mems, data);
1046 if (new_x != *x)
1047 validate_change (NULL_RTX, x, new_x, true);
1050 /* Helper function for replacement of stores. */
1052 static void
1053 adjust_mem_stores (rtx loc, const_rtx expr, void *data)
1055 if (MEM_P (loc))
1057 rtx new_dest = simplify_replace_fn_rtx (SET_DEST (expr), NULL_RTX,
1058 adjust_mems, data);
1059 if (new_dest != SET_DEST (expr))
1061 rtx xexpr = CONST_CAST_RTX (expr);
1062 validate_change (NULL_RTX, &SET_DEST (xexpr), new_dest, true);
1067 /* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
1068 replace them with their value in the insn and add the side-effects
1069 as other sets to the insn. */
1071 static void
1072 adjust_insn (basic_block bb, rtx insn)
1074 struct adjust_mem_data amd;
1075 rtx set;
1077 #ifdef HAVE_window_save
1078 /* If the target machine has an explicit window save instruction, the
1079 transformation OUTGOING_REGNO -> INCOMING_REGNO is done there. */
1080 if (RTX_FRAME_RELATED_P (insn)
1081 && find_reg_note (insn, REG_CFA_WINDOW_SAVE, NULL_RTX))
1083 unsigned int i, nregs = VEC_length(parm_reg_t, windowed_parm_regs);
1084 rtx rtl = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nregs * 2));
1085 parm_reg_t *p;
1087 FOR_EACH_VEC_ELT (parm_reg_t, windowed_parm_regs, i, p)
1089 XVECEXP (rtl, 0, i * 2)
1090 = gen_rtx_SET (VOIDmode, p->incoming, p->outgoing);
1091 /* Do not clobber the attached DECL, but only the REG. */
1092 XVECEXP (rtl, 0, i * 2 + 1)
1093 = gen_rtx_CLOBBER (GET_MODE (p->outgoing),
1094 gen_raw_REG (GET_MODE (p->outgoing),
1095 REGNO (p->outgoing)));
1098 validate_change (NULL_RTX, &PATTERN (insn), rtl, true);
1099 return;
1101 #endif
1103 amd.mem_mode = VOIDmode;
1104 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
1105 amd.side_effects = NULL_RTX;
1107 amd.store = true;
1108 note_stores (PATTERN (insn), adjust_mem_stores, &amd);
1110 amd.store = false;
1111 if (GET_CODE (PATTERN (insn)) == PARALLEL
1112 && asm_noperands (PATTERN (insn)) > 0
1113 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1115 rtx body, set0;
1116 int i;
1118 /* inline-asm with multiple sets is tiny bit more complicated,
1119 because the 3 vectors in ASM_OPERANDS need to be shared between
1120 all ASM_OPERANDS in the instruction. adjust_mems will
1121 not touch ASM_OPERANDS other than the first one, asm_noperands
1122 test above needs to be called before that (otherwise it would fail)
1123 and afterwards this code fixes it up. */
1124 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1125 body = PATTERN (insn);
1126 set0 = XVECEXP (body, 0, 0);
1127 gcc_checking_assert (GET_CODE (set0) == SET
1128 && GET_CODE (SET_SRC (set0)) == ASM_OPERANDS
1129 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set0)) == 0);
1130 for (i = 1; i < XVECLEN (body, 0); i++)
1131 if (GET_CODE (XVECEXP (body, 0, i)) != SET)
1132 break;
1133 else
1135 set = XVECEXP (body, 0, i);
1136 gcc_checking_assert (GET_CODE (SET_SRC (set)) == ASM_OPERANDS
1137 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set))
1138 == i);
1139 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (set))
1140 != ASM_OPERANDS_INPUT_VEC (SET_SRC (set0))
1141 || ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set))
1142 != ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0))
1143 || ASM_OPERANDS_LABEL_VEC (SET_SRC (set))
1144 != ASM_OPERANDS_LABEL_VEC (SET_SRC (set0)))
1146 rtx newsrc = shallow_copy_rtx (SET_SRC (set));
1147 ASM_OPERANDS_INPUT_VEC (newsrc)
1148 = ASM_OPERANDS_INPUT_VEC (SET_SRC (set0));
1149 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (newsrc)
1150 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0));
1151 ASM_OPERANDS_LABEL_VEC (newsrc)
1152 = ASM_OPERANDS_LABEL_VEC (SET_SRC (set0));
1153 validate_change (NULL_RTX, &SET_SRC (set), newsrc, true);
1157 else
1158 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1160 /* For read-only MEMs containing some constant, prefer those
1161 constants. */
1162 set = single_set (insn);
1163 if (set && MEM_P (SET_SRC (set)) && MEM_READONLY_P (SET_SRC (set)))
1165 rtx note = find_reg_equal_equiv_note (insn);
1167 if (note && CONSTANT_P (XEXP (note, 0)))
1168 validate_change (NULL_RTX, &SET_SRC (set), XEXP (note, 0), true);
1171 if (amd.side_effects)
1173 rtx *pat, new_pat, s;
1174 int i, oldn, newn;
1176 pat = &PATTERN (insn);
1177 if (GET_CODE (*pat) == COND_EXEC)
1178 pat = &COND_EXEC_CODE (*pat);
1179 if (GET_CODE (*pat) == PARALLEL)
1180 oldn = XVECLEN (*pat, 0);
1181 else
1182 oldn = 1;
1183 for (s = amd.side_effects, newn = 0; s; newn++)
1184 s = XEXP (s, 1);
1185 new_pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (oldn + newn));
1186 if (GET_CODE (*pat) == PARALLEL)
1187 for (i = 0; i < oldn; i++)
1188 XVECEXP (new_pat, 0, i) = XVECEXP (*pat, 0, i);
1189 else
1190 XVECEXP (new_pat, 0, 0) = *pat;
1191 for (s = amd.side_effects, i = oldn; i < oldn + newn; i++, s = XEXP (s, 1))
1192 XVECEXP (new_pat, 0, i) = XEXP (s, 0);
1193 free_EXPR_LIST_list (&amd.side_effects);
1194 validate_change (NULL_RTX, pat, new_pat, true);
1198 /* Return true if a decl_or_value DV is a DECL or NULL. */
1199 static inline bool
1200 dv_is_decl_p (decl_or_value dv)
1202 return !dv || (int) TREE_CODE ((tree) dv) != (int) VALUE;
1205 /* Return true if a decl_or_value is a VALUE rtl. */
1206 static inline bool
1207 dv_is_value_p (decl_or_value dv)
1209 return dv && !dv_is_decl_p (dv);
1212 /* Return the decl in the decl_or_value. */
1213 static inline tree
1214 dv_as_decl (decl_or_value dv)
1216 gcc_checking_assert (dv_is_decl_p (dv));
1217 return (tree) dv;
1220 /* Return the value in the decl_or_value. */
1221 static inline rtx
1222 dv_as_value (decl_or_value dv)
1224 gcc_checking_assert (dv_is_value_p (dv));
1225 return (rtx)dv;
1228 /* Return the DEBUG_EXPR of a DEBUG_EXPR_DECL or the VALUE in DV. */
1229 static inline rtx
1230 dv_as_rtx (decl_or_value dv)
1232 tree decl;
1234 if (dv_is_value_p (dv))
1235 return dv_as_value (dv);
1237 decl = dv_as_decl (dv);
1239 gcc_checking_assert (TREE_CODE (decl) == DEBUG_EXPR_DECL);
1240 return DECL_RTL_KNOWN_SET (decl);
1243 /* Return the opaque pointer in the decl_or_value. */
1244 static inline void *
1245 dv_as_opaque (decl_or_value dv)
1247 return dv;
1250 /* Return nonzero if a decl_or_value must not have more than one
1251 variable part. The returned value discriminates among various
1252 kinds of one-part DVs ccording to enum onepart_enum. */
1253 static inline onepart_enum_t
1254 dv_onepart_p (decl_or_value dv)
1256 tree decl;
1258 if (!MAY_HAVE_DEBUG_INSNS)
1259 return NOT_ONEPART;
1261 if (dv_is_value_p (dv))
1262 return ONEPART_VALUE;
1264 decl = dv_as_decl (dv);
1266 if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
1267 return ONEPART_DEXPR;
1269 if (target_for_debug_bind (decl) != NULL_TREE)
1270 return ONEPART_VDECL;
1272 return NOT_ONEPART;
1275 /* Return the variable pool to be used for a dv of type ONEPART. */
1276 static inline alloc_pool
1277 onepart_pool (onepart_enum_t onepart)
1279 return onepart ? valvar_pool : var_pool;
1282 /* Build a decl_or_value out of a decl. */
1283 static inline decl_or_value
1284 dv_from_decl (tree decl)
1286 decl_or_value dv;
1287 dv = decl;
1288 gcc_checking_assert (dv_is_decl_p (dv));
1289 return dv;
1292 /* Build a decl_or_value out of a value. */
1293 static inline decl_or_value
1294 dv_from_value (rtx value)
1296 decl_or_value dv;
1297 dv = value;
1298 gcc_checking_assert (dv_is_value_p (dv));
1299 return dv;
1302 /* Return a value or the decl of a debug_expr as a decl_or_value. */
1303 static inline decl_or_value
1304 dv_from_rtx (rtx x)
1306 decl_or_value dv;
1308 switch (GET_CODE (x))
1310 case DEBUG_EXPR:
1311 dv = dv_from_decl (DEBUG_EXPR_TREE_DECL (x));
1312 gcc_checking_assert (DECL_RTL_KNOWN_SET (DEBUG_EXPR_TREE_DECL (x)) == x);
1313 break;
1315 case VALUE:
1316 dv = dv_from_value (x);
1317 break;
1319 default:
1320 gcc_unreachable ();
1323 return dv;
1326 extern void debug_dv (decl_or_value dv);
1328 DEBUG_FUNCTION void
1329 debug_dv (decl_or_value dv)
1331 if (dv_is_value_p (dv))
1332 debug_rtx (dv_as_value (dv));
1333 else
1334 debug_generic_stmt (dv_as_decl (dv));
1337 typedef unsigned int dvuid;
1339 /* Return the uid of DV. */
1341 static inline dvuid
1342 dv_uid (decl_or_value dv)
1344 if (dv_is_value_p (dv))
1345 return CSELIB_VAL_PTR (dv_as_value (dv))->uid;
1346 else
1347 return DECL_UID (dv_as_decl (dv));
1350 /* Compute the hash from the uid. */
1352 static inline hashval_t
1353 dv_uid2hash (dvuid uid)
1355 return uid;
1358 /* The hash function for a mask table in a shared_htab chain. */
1360 static inline hashval_t
1361 dv_htab_hash (decl_or_value dv)
1363 return dv_uid2hash (dv_uid (dv));
1366 /* The hash function for variable_htab, computes the hash value
1367 from the declaration of variable X. */
1369 static hashval_t
1370 variable_htab_hash (const void *x)
1372 const_variable const v = (const_variable) x;
1374 return dv_htab_hash (v->dv);
1377 /* Compare the declaration of variable X with declaration Y. */
1379 static int
1380 variable_htab_eq (const void *x, const void *y)
1382 const_variable const v = (const_variable) x;
1383 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
1385 return (dv_as_opaque (v->dv) == dv_as_opaque (dv));
1388 static void loc_exp_dep_clear (variable var);
1390 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
1392 static void
1393 variable_htab_free (void *elem)
1395 int i;
1396 variable var = (variable) elem;
1397 location_chain node, next;
1399 gcc_checking_assert (var->refcount > 0);
1401 var->refcount--;
1402 if (var->refcount > 0)
1403 return;
1405 for (i = 0; i < var->n_var_parts; i++)
1407 for (node = var->var_part[i].loc_chain; node; node = next)
1409 next = node->next;
1410 pool_free (loc_chain_pool, node);
1412 var->var_part[i].loc_chain = NULL;
1414 if (var->onepart && VAR_LOC_1PAUX (var))
1416 loc_exp_dep_clear (var);
1417 if (VAR_LOC_DEP_LST (var))
1418 VAR_LOC_DEP_LST (var)->pprev = NULL;
1419 XDELETE (VAR_LOC_1PAUX (var));
1420 /* These may be reused across functions, so reset
1421 e.g. NO_LOC_P. */
1422 if (var->onepart == ONEPART_DEXPR)
1423 set_dv_changed (var->dv, true);
1425 pool_free (onepart_pool (var->onepart), var);
1428 /* Initialize the set (array) SET of attrs to empty lists. */
1430 static void
1431 init_attrs_list_set (attrs *set)
1433 int i;
1435 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1436 set[i] = NULL;
1439 /* Make the list *LISTP empty. */
1441 static void
1442 attrs_list_clear (attrs *listp)
1444 attrs list, next;
1446 for (list = *listp; list; list = next)
1448 next = list->next;
1449 pool_free (attrs_pool, list);
1451 *listp = NULL;
1454 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */
1456 static attrs
1457 attrs_list_member (attrs list, decl_or_value dv, HOST_WIDE_INT offset)
1459 for (; list; list = list->next)
1460 if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset)
1461 return list;
1462 return NULL;
1465 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
1467 static void
1468 attrs_list_insert (attrs *listp, decl_or_value dv,
1469 HOST_WIDE_INT offset, rtx loc)
1471 attrs list;
1473 list = (attrs) pool_alloc (attrs_pool);
1474 list->loc = loc;
1475 list->dv = dv;
1476 list->offset = offset;
1477 list->next = *listp;
1478 *listp = list;
1481 /* Copy all nodes from SRC and create a list *DSTP of the copies. */
1483 static void
1484 attrs_list_copy (attrs *dstp, attrs src)
1486 attrs n;
1488 attrs_list_clear (dstp);
1489 for (; src; src = src->next)
1491 n = (attrs) pool_alloc (attrs_pool);
1492 n->loc = src->loc;
1493 n->dv = src->dv;
1494 n->offset = src->offset;
1495 n->next = *dstp;
1496 *dstp = n;
1500 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */
1502 static void
1503 attrs_list_union (attrs *dstp, attrs src)
1505 for (; src; src = src->next)
1507 if (!attrs_list_member (*dstp, src->dv, src->offset))
1508 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1512 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1513 *DSTP. */
1515 static void
1516 attrs_list_mpdv_union (attrs *dstp, attrs src, attrs src2)
1518 gcc_assert (!*dstp);
1519 for (; src; src = src->next)
1521 if (!dv_onepart_p (src->dv))
1522 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1524 for (src = src2; src; src = src->next)
1526 if (!dv_onepart_p (src->dv)
1527 && !attrs_list_member (*dstp, src->dv, src->offset))
1528 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1532 /* Shared hashtable support. */
1534 /* Return true if VARS is shared. */
1536 static inline bool
1537 shared_hash_shared (shared_hash vars)
1539 return vars->refcount > 1;
1542 /* Return the hash table for VARS. */
1544 static inline htab_t
1545 shared_hash_htab (shared_hash vars)
1547 return vars->htab;
1550 /* Return true if VAR is shared, or maybe because VARS is shared. */
1552 static inline bool
1553 shared_var_p (variable var, shared_hash vars)
1555 /* Don't count an entry in the changed_variables table as a duplicate. */
1556 return ((var->refcount > 1 + (int) var->in_changed_variables)
1557 || shared_hash_shared (vars));
1560 /* Copy variables into a new hash table. */
1562 static shared_hash
1563 shared_hash_unshare (shared_hash vars)
1565 shared_hash new_vars = (shared_hash) pool_alloc (shared_hash_pool);
1566 gcc_assert (vars->refcount > 1);
1567 new_vars->refcount = 1;
1568 new_vars->htab
1569 = htab_create (htab_elements (vars->htab) + 3, variable_htab_hash,
1570 variable_htab_eq, variable_htab_free);
1571 vars_copy (new_vars->htab, vars->htab);
1572 vars->refcount--;
1573 return new_vars;
1576 /* Increment reference counter on VARS and return it. */
1578 static inline shared_hash
1579 shared_hash_copy (shared_hash vars)
1581 vars->refcount++;
1582 return vars;
1585 /* Decrement reference counter and destroy hash table if not shared
1586 anymore. */
1588 static void
1589 shared_hash_destroy (shared_hash vars)
1591 gcc_checking_assert (vars->refcount > 0);
1592 if (--vars->refcount == 0)
1594 htab_delete (vars->htab);
1595 pool_free (shared_hash_pool, vars);
1599 /* Unshare *PVARS if shared and return slot for DV. If INS is
1600 INSERT, insert it if not already present. */
1602 static inline void **
1603 shared_hash_find_slot_unshare_1 (shared_hash *pvars, decl_or_value dv,
1604 hashval_t dvhash, enum insert_option ins)
1606 if (shared_hash_shared (*pvars))
1607 *pvars = shared_hash_unshare (*pvars);
1608 return htab_find_slot_with_hash (shared_hash_htab (*pvars), dv, dvhash, ins);
1611 static inline void **
1612 shared_hash_find_slot_unshare (shared_hash *pvars, decl_or_value dv,
1613 enum insert_option ins)
1615 return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins);
1618 /* Return slot for DV, if it is already present in the hash table.
1619 If it is not present, insert it only VARS is not shared, otherwise
1620 return NULL. */
1622 static inline void **
1623 shared_hash_find_slot_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1625 return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
1626 shared_hash_shared (vars)
1627 ? NO_INSERT : INSERT);
1630 static inline void **
1631 shared_hash_find_slot (shared_hash vars, decl_or_value dv)
1633 return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv));
1636 /* Return slot for DV only if it is already present in the hash table. */
1638 static inline void **
1639 shared_hash_find_slot_noinsert_1 (shared_hash vars, decl_or_value dv,
1640 hashval_t dvhash)
1642 return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
1643 NO_INSERT);
1646 static inline void **
1647 shared_hash_find_slot_noinsert (shared_hash vars, decl_or_value dv)
1649 return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv));
1652 /* Return variable for DV or NULL if not already present in the hash
1653 table. */
1655 static inline variable
1656 shared_hash_find_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1658 return (variable) htab_find_with_hash (shared_hash_htab (vars), dv, dvhash);
1661 static inline variable
1662 shared_hash_find (shared_hash vars, decl_or_value dv)
1664 return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
1667 /* Return true if TVAL is better than CVAL as a canonival value. We
1668 choose lowest-numbered VALUEs, using the RTX address as a
1669 tie-breaker. The idea is to arrange them into a star topology,
1670 such that all of them are at most one step away from the canonical
1671 value, and the canonical value has backlinks to all of them, in
1672 addition to all the actual locations. We don't enforce this
1673 topology throughout the entire dataflow analysis, though.
1676 static inline bool
1677 canon_value_cmp (rtx tval, rtx cval)
1679 return !cval
1680 || CSELIB_VAL_PTR (tval)->uid < CSELIB_VAL_PTR (cval)->uid;
1683 static bool dst_can_be_shared;
1685 /* Return a copy of a variable VAR and insert it to dataflow set SET. */
1687 static void **
1688 unshare_variable (dataflow_set *set, void **slot, variable var,
1689 enum var_init_status initialized)
1691 variable new_var;
1692 int i;
1694 new_var = (variable) pool_alloc (onepart_pool (var->onepart));
1695 new_var->dv = var->dv;
1696 new_var->refcount = 1;
1697 var->refcount--;
1698 new_var->n_var_parts = var->n_var_parts;
1699 new_var->onepart = var->onepart;
1700 new_var->in_changed_variables = false;
1702 if (! flag_var_tracking_uninit)
1703 initialized = VAR_INIT_STATUS_INITIALIZED;
1705 for (i = 0; i < var->n_var_parts; i++)
1707 location_chain node;
1708 location_chain *nextp;
1710 if (i == 0 && var->onepart)
1712 /* One-part auxiliary data is only used while emitting
1713 notes, so propagate it to the new variable in the active
1714 dataflow set. If we're not emitting notes, this will be
1715 a no-op. */
1716 gcc_checking_assert (!VAR_LOC_1PAUX (var) || emit_notes);
1717 VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (var);
1718 VAR_LOC_1PAUX (var) = NULL;
1720 else
1721 VAR_PART_OFFSET (new_var, i) = VAR_PART_OFFSET (var, i);
1722 nextp = &new_var->var_part[i].loc_chain;
1723 for (node = var->var_part[i].loc_chain; node; node = node->next)
1725 location_chain new_lc;
1727 new_lc = (location_chain) pool_alloc (loc_chain_pool);
1728 new_lc->next = NULL;
1729 if (node->init > initialized)
1730 new_lc->init = node->init;
1731 else
1732 new_lc->init = initialized;
1733 if (node->set_src && !(MEM_P (node->set_src)))
1734 new_lc->set_src = node->set_src;
1735 else
1736 new_lc->set_src = NULL;
1737 new_lc->loc = node->loc;
1739 *nextp = new_lc;
1740 nextp = &new_lc->next;
1743 new_var->var_part[i].cur_loc = var->var_part[i].cur_loc;
1746 dst_can_be_shared = false;
1747 if (shared_hash_shared (set->vars))
1748 slot = shared_hash_find_slot_unshare (&set->vars, var->dv, NO_INSERT);
1749 else if (set->traversed_vars && set->vars != set->traversed_vars)
1750 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
1751 *slot = new_var;
1752 if (var->in_changed_variables)
1754 void **cslot
1755 = htab_find_slot_with_hash (changed_variables, var->dv,
1756 dv_htab_hash (var->dv), NO_INSERT);
1757 gcc_assert (*cslot == (void *) var);
1758 var->in_changed_variables = false;
1759 variable_htab_free (var);
1760 *cslot = new_var;
1761 new_var->in_changed_variables = true;
1763 return slot;
1766 /* Copy all variables from hash table SRC to hash table DST. */
1768 static void
1769 vars_copy (htab_t dst, htab_t src)
1771 htab_iterator hi;
1772 variable var;
1774 FOR_EACH_HTAB_ELEMENT (src, var, variable, hi)
1776 void **dstp;
1777 var->refcount++;
1778 dstp = htab_find_slot_with_hash (dst, var->dv,
1779 dv_htab_hash (var->dv),
1780 INSERT);
1781 *dstp = var;
1785 /* Map a decl to its main debug decl. */
1787 static inline tree
1788 var_debug_decl (tree decl)
1790 if (decl && DECL_P (decl)
1791 && DECL_DEBUG_EXPR_IS_FROM (decl))
1793 tree debugdecl = DECL_DEBUG_EXPR (decl);
1794 if (debugdecl && DECL_P (debugdecl))
1795 decl = debugdecl;
1798 return decl;
1801 /* Set the register LOC to contain DV, OFFSET. */
1803 static void
1804 var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1805 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1806 enum insert_option iopt)
1808 attrs node;
1809 bool decl_p = dv_is_decl_p (dv);
1811 if (decl_p)
1812 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1814 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1815 if (dv_as_opaque (node->dv) == dv_as_opaque (dv)
1816 && node->offset == offset)
1817 break;
1818 if (!node)
1819 attrs_list_insert (&set->regs[REGNO (loc)], dv, offset, loc);
1820 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1823 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
1825 static void
1826 var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1827 rtx set_src)
1829 tree decl = REG_EXPR (loc);
1830 HOST_WIDE_INT offset = REG_OFFSET (loc);
1832 var_reg_decl_set (set, loc, initialized,
1833 dv_from_decl (decl), offset, set_src, INSERT);
1836 static enum var_init_status
1837 get_init_value (dataflow_set *set, rtx loc, decl_or_value dv)
1839 variable var;
1840 int i;
1841 enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN;
1843 if (! flag_var_tracking_uninit)
1844 return VAR_INIT_STATUS_INITIALIZED;
1846 var = shared_hash_find (set->vars, dv);
1847 if (var)
1849 for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++)
1851 location_chain nextp;
1852 for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next)
1853 if (rtx_equal_p (nextp->loc, loc))
1855 ret_val = nextp->init;
1856 break;
1861 return ret_val;
1864 /* Delete current content of register LOC in dataflow set SET and set
1865 the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
1866 MODIFY is true, any other live copies of the same variable part are
1867 also deleted from the dataflow set, otherwise the variable part is
1868 assumed to be copied from another location holding the same
1869 part. */
1871 static void
1872 var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1873 enum var_init_status initialized, rtx set_src)
1875 tree decl = REG_EXPR (loc);
1876 HOST_WIDE_INT offset = REG_OFFSET (loc);
1877 attrs node, next;
1878 attrs *nextp;
1880 decl = var_debug_decl (decl);
1882 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1883 initialized = get_init_value (set, loc, dv_from_decl (decl));
1885 nextp = &set->regs[REGNO (loc)];
1886 for (node = *nextp; node; node = next)
1888 next = node->next;
1889 if (dv_as_opaque (node->dv) != decl || node->offset != offset)
1891 delete_variable_part (set, node->loc, node->dv, node->offset);
1892 pool_free (attrs_pool, node);
1893 *nextp = next;
1895 else
1897 node->loc = loc;
1898 nextp = &node->next;
1901 if (modify)
1902 clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src);
1903 var_reg_set (set, loc, initialized, set_src);
1906 /* Delete the association of register LOC in dataflow set SET with any
1907 variables that aren't onepart. If CLOBBER is true, also delete any
1908 other live copies of the same variable part, and delete the
1909 association with onepart dvs too. */
1911 static void
1912 var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
1914 attrs *nextp = &set->regs[REGNO (loc)];
1915 attrs node, next;
1917 if (clobber)
1919 tree decl = REG_EXPR (loc);
1920 HOST_WIDE_INT offset = REG_OFFSET (loc);
1922 decl = var_debug_decl (decl);
1924 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1927 for (node = *nextp; node; node = next)
1929 next = node->next;
1930 if (clobber || !dv_onepart_p (node->dv))
1932 delete_variable_part (set, node->loc, node->dv, node->offset);
1933 pool_free (attrs_pool, node);
1934 *nextp = next;
1936 else
1937 nextp = &node->next;
1941 /* Delete content of register with number REGNO in dataflow set SET. */
1943 static void
1944 var_regno_delete (dataflow_set *set, int regno)
1946 attrs *reg = &set->regs[regno];
1947 attrs node, next;
1949 for (node = *reg; node; node = next)
1951 next = node->next;
1952 delete_variable_part (set, node->loc, node->dv, node->offset);
1953 pool_free (attrs_pool, node);
1955 *reg = NULL;
1958 /* Strip constant offsets and alignments off of LOC. Return the base
1959 expression. */
1961 static rtx
1962 vt_get_canonicalize_base (rtx loc)
1964 while ((GET_CODE (loc) == PLUS
1965 || GET_CODE (loc) == AND)
1966 && GET_CODE (XEXP (loc, 1)) == CONST_INT
1967 && (GET_CODE (loc) != AND
1968 || INTVAL (XEXP (loc, 1)) < 0))
1969 loc = XEXP (loc, 0);
1971 return loc;
1974 /* Canonicalize LOC using equivalences from SET in addition to those
1975 in the cselib static table. */
1977 static rtx
1978 vt_canonicalize_addr (dataflow_set *set, rtx oloc)
1980 HOST_WIDE_INT ofst = 0;
1981 enum machine_mode mode = GET_MODE (oloc);
1982 rtx loc = canon_rtx (get_addr (oloc));
1984 /* Try to substitute a base VALUE for equivalent expressions as much
1985 as possible. The goal here is to expand stack-related addresses
1986 to one of the stack base registers, so that we can compare
1987 addresses for overlaps. */
1988 while (GET_CODE (vt_get_canonicalize_base (loc)) == VALUE)
1990 rtx x;
1991 decl_or_value dv;
1992 variable var;
1993 location_chain l;
1995 while (GET_CODE (loc) == PLUS)
1997 ofst += INTVAL (XEXP (loc, 1));
1998 loc = XEXP (loc, 0);
1999 continue;
2002 /* Alignment operations can't normally be combined, so just
2003 canonicalize the base and we're done. We'll normally have
2004 only one stack alignment anyway. */
2005 if (GET_CODE (loc) == AND)
2007 x = vt_canonicalize_addr (set, XEXP (loc, 0));
2008 if (x != XEXP (loc, 0))
2009 loc = gen_rtx_AND (mode, x, XEXP (loc, 1));
2010 loc = canon_rtx (get_addr (loc));
2011 break;
2014 x = canon_rtx (get_addr (loc));
2016 /* We've made progress! Start over. */
2017 if (x != loc || GET_CODE (x) != VALUE)
2019 loc = x;
2020 continue;
2023 dv = dv_from_rtx (x);
2024 var = (variable) htab_find_with_hash (shared_hash_htab (set->vars),
2025 dv, dv_htab_hash (dv));
2026 if (!var)
2027 break;
2029 /* Look for an improved equivalent expression. */
2030 for (l = var->var_part[0].loc_chain; l; l = l->next)
2032 rtx base = vt_get_canonicalize_base (l->loc);
2033 if (GET_CODE (base) == REG
2034 || (GET_CODE (base) == VALUE
2035 && canon_value_cmp (base, loc)))
2037 loc = l->loc;
2038 break;
2042 /* No luck with the dataflow set, so we're done. */
2043 if (!l)
2044 break;
2047 /* Add OFST back in. */
2048 if (ofst)
2050 /* Don't build new RTL if we can help it. */
2051 if (GET_CODE (oloc) == PLUS
2052 && XEXP (oloc, 0) == loc
2053 && INTVAL (XEXP (oloc, 1)) == ofst)
2054 return oloc;
2056 loc = plus_constant (mode, loc, ofst);
2059 return loc;
2062 /* Return true iff ADDR has a stack register as the base address. */
2064 static inline bool
2065 vt_stack_offset_p (rtx addr)
2067 rtx base = vt_get_canonicalize_base (addr);
2069 if (GET_CODE (base) != REG)
2070 return false;
2072 return REGNO_PTR_FRAME_P (REGNO (base));
2075 /* Return true iff there's a true dependence between MLOC and LOC.
2076 MADDR must be a canonicalized version of MLOC's address. */
2078 static inline bool
2079 vt_canon_true_dep (dataflow_set *set, rtx mloc, rtx maddr, rtx loc)
2081 if (GET_CODE (loc) != MEM)
2082 return false;
2084 if (!canon_true_dependence (mloc, GET_MODE (mloc), maddr, loc, NULL))
2085 return false;
2087 if (!MEM_EXPR (loc) && vt_stack_offset_p (maddr))
2089 rtx addr = vt_canonicalize_addr (set, XEXP (loc, 0));
2090 return canon_true_dependence (mloc, GET_MODE (mloc), maddr, loc, addr);
2093 return true;
2096 /* Hold parameters for the hashtab traversal function
2097 drop_overlapping_mem_locs, see below. */
2099 struct overlapping_mems
2101 dataflow_set *set;
2102 rtx loc, addr;
2105 /* Remove all MEMs that overlap with COMS->LOC from the location list
2106 of a hash table entry for a value. COMS->ADDR must be a
2107 canonicalized form of COMS->LOC's address, and COMS->LOC must be
2108 canonicalized itself. */
2110 static int
2111 drop_overlapping_mem_locs (void **slot, void *data)
2113 struct overlapping_mems *coms = (struct overlapping_mems *)data;
2114 dataflow_set *set = coms->set;
2115 rtx mloc = coms->loc, addr = coms->addr;
2116 variable var = (variable) *slot;
2118 if (var->onepart == ONEPART_VALUE)
2120 location_chain loc, *locp;
2121 bool changed = false;
2122 rtx cur_loc;
2124 gcc_assert (var->n_var_parts == 1);
2126 if (shared_var_p (var, set->vars))
2128 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
2129 if (vt_canon_true_dep (set, mloc, addr, loc->loc))
2130 break;
2132 if (!loc)
2133 return 1;
2135 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
2136 var = (variable)*slot;
2137 gcc_assert (var->n_var_parts == 1);
2140 if (VAR_LOC_1PAUX (var))
2141 cur_loc = VAR_LOC_FROM (var);
2142 else
2143 cur_loc = var->var_part[0].cur_loc;
2145 for (locp = &var->var_part[0].loc_chain, loc = *locp;
2146 loc; loc = *locp)
2148 if (!vt_canon_true_dep (set, mloc, addr, loc->loc))
2150 locp = &loc->next;
2151 continue;
2154 *locp = loc->next;
2155 /* If we have deleted the location which was last emitted
2156 we have to emit new location so add the variable to set
2157 of changed variables. */
2158 if (cur_loc == loc->loc)
2160 changed = true;
2161 var->var_part[0].cur_loc = NULL;
2162 if (VAR_LOC_1PAUX (var))
2163 VAR_LOC_FROM (var) = NULL;
2165 pool_free (loc_chain_pool, loc);
2168 if (!var->var_part[0].loc_chain)
2170 var->n_var_parts--;
2171 changed = true;
2173 if (changed)
2174 variable_was_changed (var, set);
2177 return 1;
2180 /* Remove from SET all VALUE bindings to MEMs that overlap with LOC. */
2182 static void
2183 clobber_overlapping_mems (dataflow_set *set, rtx loc)
2185 struct overlapping_mems coms;
2187 coms.set = set;
2188 coms.loc = canon_rtx (loc);
2189 coms.addr = vt_canonicalize_addr (set, XEXP (loc, 0));
2191 set->traversed_vars = set->vars;
2192 htab_traverse (shared_hash_htab (set->vars),
2193 drop_overlapping_mem_locs, &coms);
2194 set->traversed_vars = NULL;
2197 /* Set the location of DV, OFFSET as the MEM LOC. */
2199 static void
2200 var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
2201 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
2202 enum insert_option iopt)
2204 if (dv_is_decl_p (dv))
2205 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
2207 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
2210 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
2211 SET to LOC.
2212 Adjust the address first if it is stack pointer based. */
2214 static void
2215 var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
2216 rtx set_src)
2218 tree decl = MEM_EXPR (loc);
2219 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2221 var_mem_decl_set (set, loc, initialized,
2222 dv_from_decl (decl), offset, set_src, INSERT);
2225 /* Delete and set the location part of variable MEM_EXPR (LOC) in
2226 dataflow set SET to LOC. If MODIFY is true, any other live copies
2227 of the same variable part are also deleted from the dataflow set,
2228 otherwise the variable part is assumed to be copied from another
2229 location holding the same part.
2230 Adjust the address first if it is stack pointer based. */
2232 static void
2233 var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify,
2234 enum var_init_status initialized, rtx set_src)
2236 tree decl = MEM_EXPR (loc);
2237 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2239 clobber_overlapping_mems (set, loc);
2240 decl = var_debug_decl (decl);
2242 if (initialized == VAR_INIT_STATUS_UNKNOWN)
2243 initialized = get_init_value (set, loc, dv_from_decl (decl));
2245 if (modify)
2246 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src);
2247 var_mem_set (set, loc, initialized, set_src);
2250 /* Delete the location part LOC from dataflow set SET. If CLOBBER is
2251 true, also delete any other live copies of the same variable part.
2252 Adjust the address first if it is stack pointer based. */
2254 static void
2255 var_mem_delete (dataflow_set *set, rtx loc, bool clobber)
2257 tree decl = MEM_EXPR (loc);
2258 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2260 clobber_overlapping_mems (set, loc);
2261 decl = var_debug_decl (decl);
2262 if (clobber)
2263 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
2264 delete_variable_part (set, loc, dv_from_decl (decl), offset);
2267 /* Return true if LOC should not be expanded for location expressions,
2268 or used in them. */
2270 static inline bool
2271 unsuitable_loc (rtx loc)
2273 switch (GET_CODE (loc))
2275 case PC:
2276 case SCRATCH:
2277 case CC0:
2278 case ASM_INPUT:
2279 case ASM_OPERANDS:
2280 return true;
2282 default:
2283 return false;
2287 /* Bind VAL to LOC in SET. If MODIFIED, detach LOC from any values
2288 bound to it. */
2290 static inline void
2291 val_bind (dataflow_set *set, rtx val, rtx loc, bool modified)
2293 if (REG_P (loc))
2295 if (modified)
2296 var_regno_delete (set, REGNO (loc));
2297 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2298 dv_from_value (val), 0, NULL_RTX, INSERT);
2300 else if (MEM_P (loc))
2302 struct elt_loc_list *l = CSELIB_VAL_PTR (val)->locs;
2304 if (modified)
2305 clobber_overlapping_mems (set, loc);
2307 if (l && GET_CODE (l->loc) == VALUE)
2308 l = canonical_cselib_val (CSELIB_VAL_PTR (l->loc))->locs;
2310 /* If this MEM is a global constant, we don't need it in the
2311 dynamic tables. ??? We should test this before emitting the
2312 micro-op in the first place. */
2313 while (l)
2314 if (GET_CODE (l->loc) == MEM && XEXP (l->loc, 0) == XEXP (loc, 0))
2315 break;
2316 else
2317 l = l->next;
2319 if (!l)
2320 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2321 dv_from_value (val), 0, NULL_RTX, INSERT);
2323 else
2325 /* Other kinds of equivalences are necessarily static, at least
2326 so long as we do not perform substitutions while merging
2327 expressions. */
2328 gcc_unreachable ();
2329 set_variable_part (set, loc, dv_from_value (val), 0,
2330 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2334 /* Bind a value to a location it was just stored in. If MODIFIED
2335 holds, assume the location was modified, detaching it from any
2336 values bound to it. */
2338 static void
2339 val_store (dataflow_set *set, rtx val, rtx loc, rtx insn, bool modified)
2341 cselib_val *v = CSELIB_VAL_PTR (val);
2343 gcc_assert (cselib_preserved_value_p (v));
2345 if (dump_file)
2347 fprintf (dump_file, "%i: ", insn ? INSN_UID (insn) : 0);
2348 print_inline_rtx (dump_file, loc, 0);
2349 fprintf (dump_file, " evaluates to ");
2350 print_inline_rtx (dump_file, val, 0);
2351 if (v->locs)
2353 struct elt_loc_list *l;
2354 for (l = v->locs; l; l = l->next)
2356 fprintf (dump_file, "\n%i: ", INSN_UID (l->setting_insn));
2357 print_inline_rtx (dump_file, l->loc, 0);
2360 fprintf (dump_file, "\n");
2363 gcc_checking_assert (!unsuitable_loc (loc));
2365 val_bind (set, val, loc, modified);
2368 /* Reset this node, detaching all its equivalences. Return the slot
2369 in the variable hash table that holds dv, if there is one. */
2371 static void
2372 val_reset (dataflow_set *set, decl_or_value dv)
2374 variable var = shared_hash_find (set->vars, dv) ;
2375 location_chain node;
2376 rtx cval;
2378 if (!var || !var->n_var_parts)
2379 return;
2381 gcc_assert (var->n_var_parts == 1);
2383 cval = NULL;
2384 for (node = var->var_part[0].loc_chain; node; node = node->next)
2385 if (GET_CODE (node->loc) == VALUE
2386 && canon_value_cmp (node->loc, cval))
2387 cval = node->loc;
2389 for (node = var->var_part[0].loc_chain; node; node = node->next)
2390 if (GET_CODE (node->loc) == VALUE && cval != node->loc)
2392 /* Redirect the equivalence link to the new canonical
2393 value, or simply remove it if it would point at
2394 itself. */
2395 if (cval)
2396 set_variable_part (set, cval, dv_from_value (node->loc),
2397 0, node->init, node->set_src, NO_INSERT);
2398 delete_variable_part (set, dv_as_value (dv),
2399 dv_from_value (node->loc), 0);
2402 if (cval)
2404 decl_or_value cdv = dv_from_value (cval);
2406 /* Keep the remaining values connected, accummulating links
2407 in the canonical value. */
2408 for (node = var->var_part[0].loc_chain; node; node = node->next)
2410 if (node->loc == cval)
2411 continue;
2412 else if (GET_CODE (node->loc) == REG)
2413 var_reg_decl_set (set, node->loc, node->init, cdv, 0,
2414 node->set_src, NO_INSERT);
2415 else if (GET_CODE (node->loc) == MEM)
2416 var_mem_decl_set (set, node->loc, node->init, cdv, 0,
2417 node->set_src, NO_INSERT);
2418 else
2419 set_variable_part (set, node->loc, cdv, 0,
2420 node->init, node->set_src, NO_INSERT);
2424 /* We remove this last, to make sure that the canonical value is not
2425 removed to the point of requiring reinsertion. */
2426 if (cval)
2427 delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0);
2429 clobber_variable_part (set, NULL, dv, 0, NULL);
2432 /* Find the values in a given location and map the val to another
2433 value, if it is unique, or add the location as one holding the
2434 value. */
2436 static void
2437 val_resolve (dataflow_set *set, rtx val, rtx loc, rtx insn)
2439 decl_or_value dv = dv_from_value (val);
2441 if (dump_file && (dump_flags & TDF_DETAILS))
2443 if (insn)
2444 fprintf (dump_file, "%i: ", INSN_UID (insn));
2445 else
2446 fprintf (dump_file, "head: ");
2447 print_inline_rtx (dump_file, val, 0);
2448 fputs (" is at ", dump_file);
2449 print_inline_rtx (dump_file, loc, 0);
2450 fputc ('\n', dump_file);
2453 val_reset (set, dv);
2455 gcc_checking_assert (!unsuitable_loc (loc));
2457 if (REG_P (loc))
2459 attrs node, found = NULL;
2461 for (node = set->regs[REGNO (loc)]; node; node = node->next)
2462 if (dv_is_value_p (node->dv)
2463 && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc))
2465 found = node;
2467 /* Map incoming equivalences. ??? Wouldn't it be nice if
2468 we just started sharing the location lists? Maybe a
2469 circular list ending at the value itself or some
2470 such. */
2471 set_variable_part (set, dv_as_value (node->dv),
2472 dv_from_value (val), node->offset,
2473 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2474 set_variable_part (set, val, node->dv, node->offset,
2475 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2478 /* If we didn't find any equivalence, we need to remember that
2479 this value is held in the named register. */
2480 if (found)
2481 return;
2483 /* ??? Attempt to find and merge equivalent MEMs or other
2484 expressions too. */
2486 val_bind (set, val, loc, false);
2489 /* Initialize dataflow set SET to be empty.
2490 VARS_SIZE is the initial size of hash table VARS. */
2492 static void
2493 dataflow_set_init (dataflow_set *set)
2495 init_attrs_list_set (set->regs);
2496 set->vars = shared_hash_copy (empty_shared_hash);
2497 set->stack_adjust = 0;
2498 set->traversed_vars = NULL;
2501 /* Delete the contents of dataflow set SET. */
2503 static void
2504 dataflow_set_clear (dataflow_set *set)
2506 int i;
2508 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2509 attrs_list_clear (&set->regs[i]);
2511 shared_hash_destroy (set->vars);
2512 set->vars = shared_hash_copy (empty_shared_hash);
2515 /* Copy the contents of dataflow set SRC to DST. */
2517 static void
2518 dataflow_set_copy (dataflow_set *dst, dataflow_set *src)
2520 int i;
2522 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2523 attrs_list_copy (&dst->regs[i], src->regs[i]);
2525 shared_hash_destroy (dst->vars);
2526 dst->vars = shared_hash_copy (src->vars);
2527 dst->stack_adjust = src->stack_adjust;
2530 /* Information for merging lists of locations for a given offset of variable.
2532 struct variable_union_info
2534 /* Node of the location chain. */
2535 location_chain lc;
2537 /* The sum of positions in the input chains. */
2538 int pos;
2540 /* The position in the chain of DST dataflow set. */
2541 int pos_dst;
2544 /* Buffer for location list sorting and its allocated size. */
2545 static struct variable_union_info *vui_vec;
2546 static int vui_allocated;
2548 /* Compare function for qsort, order the structures by POS element. */
2550 static int
2551 variable_union_info_cmp_pos (const void *n1, const void *n2)
2553 const struct variable_union_info *const i1 =
2554 (const struct variable_union_info *) n1;
2555 const struct variable_union_info *const i2 =
2556 ( const struct variable_union_info *) n2;
2558 if (i1->pos != i2->pos)
2559 return i1->pos - i2->pos;
2561 return (i1->pos_dst - i2->pos_dst);
2564 /* Compute union of location parts of variable *SLOT and the same variable
2565 from hash table DATA. Compute "sorted" union of the location chains
2566 for common offsets, i.e. the locations of a variable part are sorted by
2567 a priority where the priority is the sum of the positions in the 2 chains
2568 (if a location is only in one list the position in the second list is
2569 defined to be larger than the length of the chains).
2570 When we are updating the location parts the newest location is in the
2571 beginning of the chain, so when we do the described "sorted" union
2572 we keep the newest locations in the beginning. */
2574 static int
2575 variable_union (variable src, dataflow_set *set)
2577 variable dst;
2578 void **dstp;
2579 int i, j, k;
2581 dstp = shared_hash_find_slot (set->vars, src->dv);
2582 if (!dstp || !*dstp)
2584 src->refcount++;
2586 dst_can_be_shared = false;
2587 if (!dstp)
2588 dstp = shared_hash_find_slot_unshare (&set->vars, src->dv, INSERT);
2590 *dstp = src;
2592 /* Continue traversing the hash table. */
2593 return 1;
2595 else
2596 dst = (variable) *dstp;
2598 gcc_assert (src->n_var_parts);
2599 gcc_checking_assert (src->onepart == dst->onepart);
2601 /* We can combine one-part variables very efficiently, because their
2602 entries are in canonical order. */
2603 if (src->onepart)
2605 location_chain *nodep, dnode, snode;
2607 gcc_assert (src->n_var_parts == 1
2608 && dst->n_var_parts == 1);
2610 snode = src->var_part[0].loc_chain;
2611 gcc_assert (snode);
2613 restart_onepart_unshared:
2614 nodep = &dst->var_part[0].loc_chain;
2615 dnode = *nodep;
2616 gcc_assert (dnode);
2618 while (snode)
2620 int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1;
2622 if (r > 0)
2624 location_chain nnode;
2626 if (shared_var_p (dst, set->vars))
2628 dstp = unshare_variable (set, dstp, dst,
2629 VAR_INIT_STATUS_INITIALIZED);
2630 dst = (variable)*dstp;
2631 goto restart_onepart_unshared;
2634 *nodep = nnode = (location_chain) pool_alloc (loc_chain_pool);
2635 nnode->loc = snode->loc;
2636 nnode->init = snode->init;
2637 if (!snode->set_src || MEM_P (snode->set_src))
2638 nnode->set_src = NULL;
2639 else
2640 nnode->set_src = snode->set_src;
2641 nnode->next = dnode;
2642 dnode = nnode;
2644 else if (r == 0)
2645 gcc_checking_assert (rtx_equal_p (dnode->loc, snode->loc));
2647 if (r >= 0)
2648 snode = snode->next;
2650 nodep = &dnode->next;
2651 dnode = *nodep;
2654 return 1;
2657 gcc_checking_assert (!src->onepart);
2659 /* Count the number of location parts, result is K. */
2660 for (i = 0, j = 0, k = 0;
2661 i < src->n_var_parts && j < dst->n_var_parts; k++)
2663 if (VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2665 i++;
2666 j++;
2668 else if (VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
2669 i++;
2670 else
2671 j++;
2673 k += src->n_var_parts - i;
2674 k += dst->n_var_parts - j;
2676 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
2677 thus there are at most MAX_VAR_PARTS different offsets. */
2678 gcc_checking_assert (dst->onepart ? k == 1 : k <= MAX_VAR_PARTS);
2680 if (dst->n_var_parts != k && shared_var_p (dst, set->vars))
2682 dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN);
2683 dst = (variable)*dstp;
2686 i = src->n_var_parts - 1;
2687 j = dst->n_var_parts - 1;
2688 dst->n_var_parts = k;
2690 for (k--; k >= 0; k--)
2692 location_chain node, node2;
2694 if (i >= 0 && j >= 0
2695 && VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2697 /* Compute the "sorted" union of the chains, i.e. the locations which
2698 are in both chains go first, they are sorted by the sum of
2699 positions in the chains. */
2700 int dst_l, src_l;
2701 int ii, jj, n;
2702 struct variable_union_info *vui;
2704 /* If DST is shared compare the location chains.
2705 If they are different we will modify the chain in DST with
2706 high probability so make a copy of DST. */
2707 if (shared_var_p (dst, set->vars))
2709 for (node = src->var_part[i].loc_chain,
2710 node2 = dst->var_part[j].loc_chain; node && node2;
2711 node = node->next, node2 = node2->next)
2713 if (!((REG_P (node2->loc)
2714 && REG_P (node->loc)
2715 && REGNO (node2->loc) == REGNO (node->loc))
2716 || rtx_equal_p (node2->loc, node->loc)))
2718 if (node2->init < node->init)
2719 node2->init = node->init;
2720 break;
2723 if (node || node2)
2725 dstp = unshare_variable (set, dstp, dst,
2726 VAR_INIT_STATUS_UNKNOWN);
2727 dst = (variable)*dstp;
2731 src_l = 0;
2732 for (node = src->var_part[i].loc_chain; node; node = node->next)
2733 src_l++;
2734 dst_l = 0;
2735 for (node = dst->var_part[j].loc_chain; node; node = node->next)
2736 dst_l++;
2738 if (dst_l == 1)
2740 /* The most common case, much simpler, no qsort is needed. */
2741 location_chain dstnode = dst->var_part[j].loc_chain;
2742 dst->var_part[k].loc_chain = dstnode;
2743 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET(dst, j);
2744 node2 = dstnode;
2745 for (node = src->var_part[i].loc_chain; node; node = node->next)
2746 if (!((REG_P (dstnode->loc)
2747 && REG_P (node->loc)
2748 && REGNO (dstnode->loc) == REGNO (node->loc))
2749 || rtx_equal_p (dstnode->loc, node->loc)))
2751 location_chain new_node;
2753 /* Copy the location from SRC. */
2754 new_node = (location_chain) pool_alloc (loc_chain_pool);
2755 new_node->loc = node->loc;
2756 new_node->init = node->init;
2757 if (!node->set_src || MEM_P (node->set_src))
2758 new_node->set_src = NULL;
2759 else
2760 new_node->set_src = node->set_src;
2761 node2->next = new_node;
2762 node2 = new_node;
2764 node2->next = NULL;
2766 else
2768 if (src_l + dst_l > vui_allocated)
2770 vui_allocated = MAX (vui_allocated * 2, src_l + dst_l);
2771 vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec,
2772 vui_allocated);
2774 vui = vui_vec;
2776 /* Fill in the locations from DST. */
2777 for (node = dst->var_part[j].loc_chain, jj = 0; node;
2778 node = node->next, jj++)
2780 vui[jj].lc = node;
2781 vui[jj].pos_dst = jj;
2783 /* Pos plus value larger than a sum of 2 valid positions. */
2784 vui[jj].pos = jj + src_l + dst_l;
2787 /* Fill in the locations from SRC. */
2788 n = dst_l;
2789 for (node = src->var_part[i].loc_chain, ii = 0; node;
2790 node = node->next, ii++)
2792 /* Find location from NODE. */
2793 for (jj = 0; jj < dst_l; jj++)
2795 if ((REG_P (vui[jj].lc->loc)
2796 && REG_P (node->loc)
2797 && REGNO (vui[jj].lc->loc) == REGNO (node->loc))
2798 || rtx_equal_p (vui[jj].lc->loc, node->loc))
2800 vui[jj].pos = jj + ii;
2801 break;
2804 if (jj >= dst_l) /* The location has not been found. */
2806 location_chain new_node;
2808 /* Copy the location from SRC. */
2809 new_node = (location_chain) pool_alloc (loc_chain_pool);
2810 new_node->loc = node->loc;
2811 new_node->init = node->init;
2812 if (!node->set_src || MEM_P (node->set_src))
2813 new_node->set_src = NULL;
2814 else
2815 new_node->set_src = node->set_src;
2816 vui[n].lc = new_node;
2817 vui[n].pos_dst = src_l + dst_l;
2818 vui[n].pos = ii + src_l + dst_l;
2819 n++;
2823 if (dst_l == 2)
2825 /* Special case still very common case. For dst_l == 2
2826 all entries dst_l ... n-1 are sorted, with for i >= dst_l
2827 vui[i].pos == i + src_l + dst_l. */
2828 if (vui[0].pos > vui[1].pos)
2830 /* Order should be 1, 0, 2... */
2831 dst->var_part[k].loc_chain = vui[1].lc;
2832 vui[1].lc->next = vui[0].lc;
2833 if (n >= 3)
2835 vui[0].lc->next = vui[2].lc;
2836 vui[n - 1].lc->next = NULL;
2838 else
2839 vui[0].lc->next = NULL;
2840 ii = 3;
2842 else
2844 dst->var_part[k].loc_chain = vui[0].lc;
2845 if (n >= 3 && vui[2].pos < vui[1].pos)
2847 /* Order should be 0, 2, 1, 3... */
2848 vui[0].lc->next = vui[2].lc;
2849 vui[2].lc->next = vui[1].lc;
2850 if (n >= 4)
2852 vui[1].lc->next = vui[3].lc;
2853 vui[n - 1].lc->next = NULL;
2855 else
2856 vui[1].lc->next = NULL;
2857 ii = 4;
2859 else
2861 /* Order should be 0, 1, 2... */
2862 ii = 1;
2863 vui[n - 1].lc->next = NULL;
2866 for (; ii < n; ii++)
2867 vui[ii - 1].lc->next = vui[ii].lc;
2869 else
2871 qsort (vui, n, sizeof (struct variable_union_info),
2872 variable_union_info_cmp_pos);
2874 /* Reconnect the nodes in sorted order. */
2875 for (ii = 1; ii < n; ii++)
2876 vui[ii - 1].lc->next = vui[ii].lc;
2877 vui[n - 1].lc->next = NULL;
2878 dst->var_part[k].loc_chain = vui[0].lc;
2881 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
2883 i--;
2884 j--;
2886 else if ((i >= 0 && j >= 0
2887 && VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
2888 || i < 0)
2890 dst->var_part[k] = dst->var_part[j];
2891 j--;
2893 else if ((i >= 0 && j >= 0
2894 && VAR_PART_OFFSET (src, i) > VAR_PART_OFFSET (dst, j))
2895 || j < 0)
2897 location_chain *nextp;
2899 /* Copy the chain from SRC. */
2900 nextp = &dst->var_part[k].loc_chain;
2901 for (node = src->var_part[i].loc_chain; node; node = node->next)
2903 location_chain new_lc;
2905 new_lc = (location_chain) pool_alloc (loc_chain_pool);
2906 new_lc->next = NULL;
2907 new_lc->init = node->init;
2908 if (!node->set_src || MEM_P (node->set_src))
2909 new_lc->set_src = NULL;
2910 else
2911 new_lc->set_src = node->set_src;
2912 new_lc->loc = node->loc;
2914 *nextp = new_lc;
2915 nextp = &new_lc->next;
2918 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (src, i);
2919 i--;
2921 dst->var_part[k].cur_loc = NULL;
2924 if (flag_var_tracking_uninit)
2925 for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++)
2927 location_chain node, node2;
2928 for (node = src->var_part[i].loc_chain; node; node = node->next)
2929 for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next)
2930 if (rtx_equal_p (node->loc, node2->loc))
2932 if (node->init > node2->init)
2933 node2->init = node->init;
2937 /* Continue traversing the hash table. */
2938 return 1;
2941 /* Compute union of dataflow sets SRC and DST and store it to DST. */
2943 static void
2944 dataflow_set_union (dataflow_set *dst, dataflow_set *src)
2946 int i;
2948 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2949 attrs_list_union (&dst->regs[i], src->regs[i]);
2951 if (dst->vars == empty_shared_hash)
2953 shared_hash_destroy (dst->vars);
2954 dst->vars = shared_hash_copy (src->vars);
2956 else
2958 htab_iterator hi;
2959 variable var;
2961 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (src->vars), var, variable, hi)
2962 variable_union (var, dst);
2966 /* Whether the value is currently being expanded. */
2967 #define VALUE_RECURSED_INTO(x) \
2968 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
2970 /* Whether no expansion was found, saving useless lookups.
2971 It must only be set when VALUE_CHANGED is clear. */
2972 #define NO_LOC_P(x) \
2973 (RTL_FLAG_CHECK2 ("NO_LOC_P", (x), VALUE, DEBUG_EXPR)->return_val)
2975 /* Whether cur_loc in the value needs to be (re)computed. */
2976 #define VALUE_CHANGED(x) \
2977 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
2978 /* Whether cur_loc in the decl needs to be (re)computed. */
2979 #define DECL_CHANGED(x) TREE_VISITED (x)
2981 /* Record (if NEWV) that DV needs to have its cur_loc recomputed. For
2982 user DECLs, this means they're in changed_variables. Values and
2983 debug exprs may be left with this flag set if no user variable
2984 requires them to be evaluated. */
2986 static inline void
2987 set_dv_changed (decl_or_value dv, bool newv)
2989 switch (dv_onepart_p (dv))
2991 case ONEPART_VALUE:
2992 if (newv)
2993 NO_LOC_P (dv_as_value (dv)) = false;
2994 VALUE_CHANGED (dv_as_value (dv)) = newv;
2995 break;
2997 case ONEPART_DEXPR:
2998 if (newv)
2999 NO_LOC_P (DECL_RTL_KNOWN_SET (dv_as_decl (dv))) = false;
3000 /* Fall through... */
3002 default:
3003 DECL_CHANGED (dv_as_decl (dv)) = newv;
3004 break;
3008 /* Return true if DV needs to have its cur_loc recomputed. */
3010 static inline bool
3011 dv_changed_p (decl_or_value dv)
3013 return (dv_is_value_p (dv)
3014 ? VALUE_CHANGED (dv_as_value (dv))
3015 : DECL_CHANGED (dv_as_decl (dv)));
3018 /* Return a location list node whose loc is rtx_equal to LOC, in the
3019 location list of a one-part variable or value VAR, or in that of
3020 any values recursively mentioned in the location lists. VARS must
3021 be in star-canonical form. */
3023 static location_chain
3024 find_loc_in_1pdv (rtx loc, variable var, htab_t vars)
3026 location_chain node;
3027 enum rtx_code loc_code;
3029 if (!var)
3030 return NULL;
3032 gcc_checking_assert (var->onepart);
3034 if (!var->n_var_parts)
3035 return NULL;
3037 gcc_checking_assert (loc != dv_as_opaque (var->dv));
3039 loc_code = GET_CODE (loc);
3040 for (node = var->var_part[0].loc_chain; node; node = node->next)
3042 decl_or_value dv;
3043 variable rvar;
3045 if (GET_CODE (node->loc) != loc_code)
3047 if (GET_CODE (node->loc) != VALUE)
3048 continue;
3050 else if (loc == node->loc)
3051 return node;
3052 else if (loc_code != VALUE)
3054 if (rtx_equal_p (loc, node->loc))
3055 return node;
3056 continue;
3059 /* Since we're in star-canonical form, we don't need to visit
3060 non-canonical nodes: one-part variables and non-canonical
3061 values would only point back to the canonical node. */
3062 if (dv_is_value_p (var->dv)
3063 && !canon_value_cmp (node->loc, dv_as_value (var->dv)))
3065 /* Skip all subsequent VALUEs. */
3066 while (node->next && GET_CODE (node->next->loc) == VALUE)
3068 node = node->next;
3069 gcc_checking_assert (!canon_value_cmp (node->loc,
3070 dv_as_value (var->dv)));
3071 if (loc == node->loc)
3072 return node;
3074 continue;
3077 gcc_checking_assert (node == var->var_part[0].loc_chain);
3078 gcc_checking_assert (!node->next);
3080 dv = dv_from_value (node->loc);
3081 rvar = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
3082 return find_loc_in_1pdv (loc, rvar, vars);
3085 /* ??? Gotta look in cselib_val locations too. */
3087 return NULL;
3090 /* Hash table iteration argument passed to variable_merge. */
3091 struct dfset_merge
3093 /* The set in which the merge is to be inserted. */
3094 dataflow_set *dst;
3095 /* The set that we're iterating in. */
3096 dataflow_set *cur;
3097 /* The set that may contain the other dv we are to merge with. */
3098 dataflow_set *src;
3099 /* Number of onepart dvs in src. */
3100 int src_onepart_cnt;
3103 /* Insert LOC in *DNODE, if it's not there yet. The list must be in
3104 loc_cmp order, and it is maintained as such. */
3106 static void
3107 insert_into_intersection (location_chain *nodep, rtx loc,
3108 enum var_init_status status)
3110 location_chain node;
3111 int r;
3113 for (node = *nodep; node; nodep = &node->next, node = *nodep)
3114 if ((r = loc_cmp (node->loc, loc)) == 0)
3116 node->init = MIN (node->init, status);
3117 return;
3119 else if (r > 0)
3120 break;
3122 node = (location_chain) pool_alloc (loc_chain_pool);
3124 node->loc = loc;
3125 node->set_src = NULL;
3126 node->init = status;
3127 node->next = *nodep;
3128 *nodep = node;
3131 /* Insert in DEST the intersection of the locations present in both
3132 S1NODE and S2VAR, directly or indirectly. S1NODE is from a
3133 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
3134 DSM->dst. */
3136 static void
3137 intersect_loc_chains (rtx val, location_chain *dest, struct dfset_merge *dsm,
3138 location_chain s1node, variable s2var)
3140 dataflow_set *s1set = dsm->cur;
3141 dataflow_set *s2set = dsm->src;
3142 location_chain found;
3144 if (s2var)
3146 location_chain s2node;
3148 gcc_checking_assert (s2var->onepart);
3150 if (s2var->n_var_parts)
3152 s2node = s2var->var_part[0].loc_chain;
3154 for (; s1node && s2node;
3155 s1node = s1node->next, s2node = s2node->next)
3156 if (s1node->loc != s2node->loc)
3157 break;
3158 else if (s1node->loc == val)
3159 continue;
3160 else
3161 insert_into_intersection (dest, s1node->loc,
3162 MIN (s1node->init, s2node->init));
3166 for (; s1node; s1node = s1node->next)
3168 if (s1node->loc == val)
3169 continue;
3171 if ((found = find_loc_in_1pdv (s1node->loc, s2var,
3172 shared_hash_htab (s2set->vars))))
3174 insert_into_intersection (dest, s1node->loc,
3175 MIN (s1node->init, found->init));
3176 continue;
3179 if (GET_CODE (s1node->loc) == VALUE
3180 && !VALUE_RECURSED_INTO (s1node->loc))
3182 decl_or_value dv = dv_from_value (s1node->loc);
3183 variable svar = shared_hash_find (s1set->vars, dv);
3184 if (svar)
3186 if (svar->n_var_parts == 1)
3188 VALUE_RECURSED_INTO (s1node->loc) = true;
3189 intersect_loc_chains (val, dest, dsm,
3190 svar->var_part[0].loc_chain,
3191 s2var);
3192 VALUE_RECURSED_INTO (s1node->loc) = false;
3197 /* ??? gotta look in cselib_val locations too. */
3199 /* ??? if the location is equivalent to any location in src,
3200 searched recursively
3202 add to dst the values needed to represent the equivalence
3204 telling whether locations S is equivalent to another dv's
3205 location list:
3207 for each location D in the list
3209 if S and D satisfy rtx_equal_p, then it is present
3211 else if D is a value, recurse without cycles
3213 else if S and D have the same CODE and MODE
3215 for each operand oS and the corresponding oD
3217 if oS and oD are not equivalent, then S an D are not equivalent
3219 else if they are RTX vectors
3221 if any vector oS element is not equivalent to its respective oD,
3222 then S and D are not equivalent
3230 /* Return -1 if X should be before Y in a location list for a 1-part
3231 variable, 1 if Y should be before X, and 0 if they're equivalent
3232 and should not appear in the list. */
3234 static int
3235 loc_cmp (rtx x, rtx y)
3237 int i, j, r;
3238 RTX_CODE code = GET_CODE (x);
3239 const char *fmt;
3241 if (x == y)
3242 return 0;
3244 if (REG_P (x))
3246 if (!REG_P (y))
3247 return -1;
3248 gcc_assert (GET_MODE (x) == GET_MODE (y));
3249 if (REGNO (x) == REGNO (y))
3250 return 0;
3251 else if (REGNO (x) < REGNO (y))
3252 return -1;
3253 else
3254 return 1;
3257 if (REG_P (y))
3258 return 1;
3260 if (MEM_P (x))
3262 if (!MEM_P (y))
3263 return -1;
3264 gcc_assert (GET_MODE (x) == GET_MODE (y));
3265 return loc_cmp (XEXP (x, 0), XEXP (y, 0));
3268 if (MEM_P (y))
3269 return 1;
3271 if (GET_CODE (x) == VALUE)
3273 if (GET_CODE (y) != VALUE)
3274 return -1;
3275 /* Don't assert the modes are the same, that is true only
3276 when not recursing. (subreg:QI (value:SI 1:1) 0)
3277 and (subreg:QI (value:DI 2:2) 0) can be compared,
3278 even when the modes are different. */
3279 if (canon_value_cmp (x, y))
3280 return -1;
3281 else
3282 return 1;
3285 if (GET_CODE (y) == VALUE)
3286 return 1;
3288 /* Entry value is the least preferable kind of expression. */
3289 if (GET_CODE (x) == ENTRY_VALUE)
3291 if (GET_CODE (y) != ENTRY_VALUE)
3292 return 1;
3293 gcc_assert (GET_MODE (x) == GET_MODE (y));
3294 return loc_cmp (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
3297 if (GET_CODE (y) == ENTRY_VALUE)
3298 return -1;
3300 if (GET_CODE (x) == GET_CODE (y))
3301 /* Compare operands below. */;
3302 else if (GET_CODE (x) < GET_CODE (y))
3303 return -1;
3304 else
3305 return 1;
3307 gcc_assert (GET_MODE (x) == GET_MODE (y));
3309 if (GET_CODE (x) == DEBUG_EXPR)
3311 if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3312 < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)))
3313 return -1;
3314 gcc_checking_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3315 > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)));
3316 return 1;
3319 fmt = GET_RTX_FORMAT (code);
3320 for (i = 0; i < GET_RTX_LENGTH (code); i++)
3321 switch (fmt[i])
3323 case 'w':
3324 if (XWINT (x, i) == XWINT (y, i))
3325 break;
3326 else if (XWINT (x, i) < XWINT (y, i))
3327 return -1;
3328 else
3329 return 1;
3331 case 'n':
3332 case 'i':
3333 if (XINT (x, i) == XINT (y, i))
3334 break;
3335 else if (XINT (x, i) < XINT (y, i))
3336 return -1;
3337 else
3338 return 1;
3340 case 'V':
3341 case 'E':
3342 /* Compare the vector length first. */
3343 if (XVECLEN (x, i) == XVECLEN (y, i))
3344 /* Compare the vectors elements. */;
3345 else if (XVECLEN (x, i) < XVECLEN (y, i))
3346 return -1;
3347 else
3348 return 1;
3350 for (j = 0; j < XVECLEN (x, i); j++)
3351 if ((r = loc_cmp (XVECEXP (x, i, j),
3352 XVECEXP (y, i, j))))
3353 return r;
3354 break;
3356 case 'e':
3357 if ((r = loc_cmp (XEXP (x, i), XEXP (y, i))))
3358 return r;
3359 break;
3361 case 'S':
3362 case 's':
3363 if (XSTR (x, i) == XSTR (y, i))
3364 break;
3365 if (!XSTR (x, i))
3366 return -1;
3367 if (!XSTR (y, i))
3368 return 1;
3369 if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0)
3370 break;
3371 else if (r < 0)
3372 return -1;
3373 else
3374 return 1;
3376 case 'u':
3377 /* These are just backpointers, so they don't matter. */
3378 break;
3380 case '0':
3381 case 't':
3382 break;
3384 /* It is believed that rtx's at this level will never
3385 contain anything but integers and other rtx's,
3386 except for within LABEL_REFs and SYMBOL_REFs. */
3387 default:
3388 gcc_unreachable ();
3391 return 0;
3394 #if ENABLE_CHECKING
3395 /* Check the order of entries in one-part variables. */
3397 static int
3398 canonicalize_loc_order_check (void **slot, void *data ATTRIBUTE_UNUSED)
3400 variable var = (variable) *slot;
3401 location_chain node, next;
3403 #ifdef ENABLE_RTL_CHECKING
3404 int i;
3405 for (i = 0; i < var->n_var_parts; i++)
3406 gcc_assert (var->var_part[0].cur_loc == NULL);
3407 gcc_assert (!var->in_changed_variables);
3408 #endif
3410 if (!var->onepart)
3411 return 1;
3413 gcc_assert (var->n_var_parts == 1);
3414 node = var->var_part[0].loc_chain;
3415 gcc_assert (node);
3417 while ((next = node->next))
3419 gcc_assert (loc_cmp (node->loc, next->loc) < 0);
3420 node = next;
3423 return 1;
3425 #endif
3427 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
3428 more likely to be chosen as canonical for an equivalence set.
3429 Ensure less likely values can reach more likely neighbors, making
3430 the connections bidirectional. */
3432 static int
3433 canonicalize_values_mark (void **slot, void *data)
3435 dataflow_set *set = (dataflow_set *)data;
3436 variable var = (variable) *slot;
3437 decl_or_value dv = var->dv;
3438 rtx val;
3439 location_chain node;
3441 if (!dv_is_value_p (dv))
3442 return 1;
3444 gcc_checking_assert (var->n_var_parts == 1);
3446 val = dv_as_value (dv);
3448 for (node = var->var_part[0].loc_chain; node; node = node->next)
3449 if (GET_CODE (node->loc) == VALUE)
3451 if (canon_value_cmp (node->loc, val))
3452 VALUE_RECURSED_INTO (val) = true;
3453 else
3455 decl_or_value odv = dv_from_value (node->loc);
3456 void **oslot = shared_hash_find_slot_noinsert (set->vars, odv);
3458 set_slot_part (set, val, oslot, odv, 0,
3459 node->init, NULL_RTX);
3461 VALUE_RECURSED_INTO (node->loc) = true;
3465 return 1;
3468 /* Remove redundant entries from equivalence lists in onepart
3469 variables, canonicalizing equivalence sets into star shapes. */
3471 static int
3472 canonicalize_values_star (void **slot, void *data)
3474 dataflow_set *set = (dataflow_set *)data;
3475 variable var = (variable) *slot;
3476 decl_or_value dv = var->dv;
3477 location_chain node;
3478 decl_or_value cdv;
3479 rtx val, cval;
3480 void **cslot;
3481 bool has_value;
3482 bool has_marks;
3484 if (!var->onepart)
3485 return 1;
3487 gcc_checking_assert (var->n_var_parts == 1);
3489 if (dv_is_value_p (dv))
3491 cval = dv_as_value (dv);
3492 if (!VALUE_RECURSED_INTO (cval))
3493 return 1;
3494 VALUE_RECURSED_INTO (cval) = false;
3496 else
3497 cval = NULL_RTX;
3499 restart:
3500 val = cval;
3501 has_value = false;
3502 has_marks = false;
3504 gcc_assert (var->n_var_parts == 1);
3506 for (node = var->var_part[0].loc_chain; node; node = node->next)
3507 if (GET_CODE (node->loc) == VALUE)
3509 has_value = true;
3510 if (VALUE_RECURSED_INTO (node->loc))
3511 has_marks = true;
3512 if (canon_value_cmp (node->loc, cval))
3513 cval = node->loc;
3516 if (!has_value)
3517 return 1;
3519 if (cval == val)
3521 if (!has_marks || dv_is_decl_p (dv))
3522 return 1;
3524 /* Keep it marked so that we revisit it, either after visiting a
3525 child node, or after visiting a new parent that might be
3526 found out. */
3527 VALUE_RECURSED_INTO (val) = true;
3529 for (node = var->var_part[0].loc_chain; node; node = node->next)
3530 if (GET_CODE (node->loc) == VALUE
3531 && VALUE_RECURSED_INTO (node->loc))
3533 cval = node->loc;
3534 restart_with_cval:
3535 VALUE_RECURSED_INTO (cval) = false;
3536 dv = dv_from_value (cval);
3537 slot = shared_hash_find_slot_noinsert (set->vars, dv);
3538 if (!slot)
3540 gcc_assert (dv_is_decl_p (var->dv));
3541 /* The canonical value was reset and dropped.
3542 Remove it. */
3543 clobber_variable_part (set, NULL, var->dv, 0, NULL);
3544 return 1;
3546 var = (variable)*slot;
3547 gcc_assert (dv_is_value_p (var->dv));
3548 if (var->n_var_parts == 0)
3549 return 1;
3550 gcc_assert (var->n_var_parts == 1);
3551 goto restart;
3554 VALUE_RECURSED_INTO (val) = false;
3556 return 1;
3559 /* Push values to the canonical one. */
3560 cdv = dv_from_value (cval);
3561 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3563 for (node = var->var_part[0].loc_chain; node; node = node->next)
3564 if (node->loc != cval)
3566 cslot = set_slot_part (set, node->loc, cslot, cdv, 0,
3567 node->init, NULL_RTX);
3568 if (GET_CODE (node->loc) == VALUE)
3570 decl_or_value ndv = dv_from_value (node->loc);
3572 set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX,
3573 NO_INSERT);
3575 if (canon_value_cmp (node->loc, val))
3577 /* If it could have been a local minimum, it's not any more,
3578 since it's now neighbor to cval, so it may have to push
3579 to it. Conversely, if it wouldn't have prevailed over
3580 val, then whatever mark it has is fine: if it was to
3581 push, it will now push to a more canonical node, but if
3582 it wasn't, then it has already pushed any values it might
3583 have to. */
3584 VALUE_RECURSED_INTO (node->loc) = true;
3585 /* Make sure we visit node->loc by ensuring we cval is
3586 visited too. */
3587 VALUE_RECURSED_INTO (cval) = true;
3589 else if (!VALUE_RECURSED_INTO (node->loc))
3590 /* If we have no need to "recurse" into this node, it's
3591 already "canonicalized", so drop the link to the old
3592 parent. */
3593 clobber_variable_part (set, cval, ndv, 0, NULL);
3595 else if (GET_CODE (node->loc) == REG)
3597 attrs list = set->regs[REGNO (node->loc)], *listp;
3599 /* Change an existing attribute referring to dv so that it
3600 refers to cdv, removing any duplicate this might
3601 introduce, and checking that no previous duplicates
3602 existed, all in a single pass. */
3604 while (list)
3606 if (list->offset == 0
3607 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3608 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3609 break;
3611 list = list->next;
3614 gcc_assert (list);
3615 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3617 list->dv = cdv;
3618 for (listp = &list->next; (list = *listp); listp = &list->next)
3620 if (list->offset)
3621 continue;
3623 if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3625 *listp = list->next;
3626 pool_free (attrs_pool, list);
3627 list = *listp;
3628 break;
3631 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv));
3634 else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3636 for (listp = &list->next; (list = *listp); listp = &list->next)
3638 if (list->offset)
3639 continue;
3641 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3643 *listp = list->next;
3644 pool_free (attrs_pool, list);
3645 list = *listp;
3646 break;
3649 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv));
3652 else
3653 gcc_unreachable ();
3655 #if ENABLE_CHECKING
3656 while (list)
3658 if (list->offset == 0
3659 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3660 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3661 gcc_unreachable ();
3663 list = list->next;
3665 #endif
3669 if (val)
3670 set_slot_part (set, val, cslot, cdv, 0,
3671 VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
3673 slot = clobber_slot_part (set, cval, slot, 0, NULL);
3675 /* Variable may have been unshared. */
3676 var = (variable)*slot;
3677 gcc_checking_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval
3678 && var->var_part[0].loc_chain->next == NULL);
3680 if (VALUE_RECURSED_INTO (cval))
3681 goto restart_with_cval;
3683 return 1;
3686 /* Bind one-part variables to the canonical value in an equivalence
3687 set. Not doing this causes dataflow convergence failure in rare
3688 circumstances, see PR42873. Unfortunately we can't do this
3689 efficiently as part of canonicalize_values_star, since we may not
3690 have determined or even seen the canonical value of a set when we
3691 get to a variable that references another member of the set. */
3693 static int
3694 canonicalize_vars_star (void **slot, void *data)
3696 dataflow_set *set = (dataflow_set *)data;
3697 variable var = (variable) *slot;
3698 decl_or_value dv = var->dv;
3699 location_chain node;
3700 rtx cval;
3701 decl_or_value cdv;
3702 void **cslot;
3703 variable cvar;
3704 location_chain cnode;
3706 if (!var->onepart || var->onepart == ONEPART_VALUE)
3707 return 1;
3709 gcc_assert (var->n_var_parts == 1);
3711 node = var->var_part[0].loc_chain;
3713 if (GET_CODE (node->loc) != VALUE)
3714 return 1;
3716 gcc_assert (!node->next);
3717 cval = node->loc;
3719 /* Push values to the canonical one. */
3720 cdv = dv_from_value (cval);
3721 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3722 if (!cslot)
3723 return 1;
3724 cvar = (variable)*cslot;
3725 gcc_assert (cvar->n_var_parts == 1);
3727 cnode = cvar->var_part[0].loc_chain;
3729 /* CVAL is canonical if its value list contains non-VALUEs or VALUEs
3730 that are not “more canonical” than it. */
3731 if (GET_CODE (cnode->loc) != VALUE
3732 || !canon_value_cmp (cnode->loc, cval))
3733 return 1;
3735 /* CVAL was found to be non-canonical. Change the variable to point
3736 to the canonical VALUE. */
3737 gcc_assert (!cnode->next);
3738 cval = cnode->loc;
3740 slot = set_slot_part (set, cval, slot, dv, 0,
3741 node->init, node->set_src);
3742 clobber_slot_part (set, cval, slot, 0, node->set_src);
3744 return 1;
3747 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
3748 corresponding entry in DSM->src. Multi-part variables are combined
3749 with variable_union, whereas onepart dvs are combined with
3750 intersection. */
3752 static int
3753 variable_merge_over_cur (variable s1var, struct dfset_merge *dsm)
3755 dataflow_set *dst = dsm->dst;
3756 void **dstslot;
3757 variable s2var, dvar = NULL;
3758 decl_or_value dv = s1var->dv;
3759 onepart_enum_t onepart = s1var->onepart;
3760 rtx val;
3761 hashval_t dvhash;
3762 location_chain node, *nodep;
3764 /* If the incoming onepart variable has an empty location list, then
3765 the intersection will be just as empty. For other variables,
3766 it's always union. */
3767 gcc_checking_assert (s1var->n_var_parts
3768 && s1var->var_part[0].loc_chain);
3770 if (!onepart)
3771 return variable_union (s1var, dst);
3773 gcc_checking_assert (s1var->n_var_parts == 1);
3775 dvhash = dv_htab_hash (dv);
3776 if (dv_is_value_p (dv))
3777 val = dv_as_value (dv);
3778 else
3779 val = NULL;
3781 s2var = shared_hash_find_1 (dsm->src->vars, dv, dvhash);
3782 if (!s2var)
3784 dst_can_be_shared = false;
3785 return 1;
3788 dsm->src_onepart_cnt--;
3789 gcc_assert (s2var->var_part[0].loc_chain
3790 && s2var->onepart == onepart
3791 && s2var->n_var_parts == 1);
3793 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3794 if (dstslot)
3796 dvar = (variable)*dstslot;
3797 gcc_assert (dvar->refcount == 1
3798 && dvar->onepart == onepart
3799 && dvar->n_var_parts == 1);
3800 nodep = &dvar->var_part[0].loc_chain;
3802 else
3804 nodep = &node;
3805 node = NULL;
3808 if (!dstslot && !onepart_variable_different_p (s1var, s2var))
3810 dstslot = shared_hash_find_slot_unshare_1 (&dst->vars, dv,
3811 dvhash, INSERT);
3812 *dstslot = dvar = s2var;
3813 dvar->refcount++;
3815 else
3817 dst_can_be_shared = false;
3819 intersect_loc_chains (val, nodep, dsm,
3820 s1var->var_part[0].loc_chain, s2var);
3822 if (!dstslot)
3824 if (node)
3826 dvar = (variable) pool_alloc (onepart_pool (onepart));
3827 dvar->dv = dv;
3828 dvar->refcount = 1;
3829 dvar->n_var_parts = 1;
3830 dvar->onepart = onepart;
3831 dvar->in_changed_variables = false;
3832 dvar->var_part[0].loc_chain = node;
3833 dvar->var_part[0].cur_loc = NULL;
3834 if (onepart)
3835 VAR_LOC_1PAUX (dvar) = NULL;
3836 else
3837 VAR_PART_OFFSET (dvar, 0) = 0;
3839 dstslot
3840 = shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash,
3841 INSERT);
3842 gcc_assert (!*dstslot);
3843 *dstslot = dvar;
3845 else
3846 return 1;
3850 nodep = &dvar->var_part[0].loc_chain;
3851 while ((node = *nodep))
3853 location_chain *nextp = &node->next;
3855 if (GET_CODE (node->loc) == REG)
3857 attrs list;
3859 for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
3860 if (GET_MODE (node->loc) == GET_MODE (list->loc)
3861 && dv_is_value_p (list->dv))
3862 break;
3864 if (!list)
3865 attrs_list_insert (&dst->regs[REGNO (node->loc)],
3866 dv, 0, node->loc);
3867 /* If this value became canonical for another value that had
3868 this register, we want to leave it alone. */
3869 else if (dv_as_value (list->dv) != val)
3871 dstslot = set_slot_part (dst, dv_as_value (list->dv),
3872 dstslot, dv, 0,
3873 node->init, NULL_RTX);
3874 dstslot = delete_slot_part (dst, node->loc, dstslot, 0);
3876 /* Since nextp points into the removed node, we can't
3877 use it. The pointer to the next node moved to nodep.
3878 However, if the variable we're walking is unshared
3879 during our walk, we'll keep walking the location list
3880 of the previously-shared variable, in which case the
3881 node won't have been removed, and we'll want to skip
3882 it. That's why we test *nodep here. */
3883 if (*nodep != node)
3884 nextp = nodep;
3887 else
3888 /* Canonicalization puts registers first, so we don't have to
3889 walk it all. */
3890 break;
3891 nodep = nextp;
3894 if (dvar != (variable)*dstslot)
3895 dvar = (variable)*dstslot;
3896 nodep = &dvar->var_part[0].loc_chain;
3898 if (val)
3900 /* Mark all referenced nodes for canonicalization, and make sure
3901 we have mutual equivalence links. */
3902 VALUE_RECURSED_INTO (val) = true;
3903 for (node = *nodep; node; node = node->next)
3904 if (GET_CODE (node->loc) == VALUE)
3906 VALUE_RECURSED_INTO (node->loc) = true;
3907 set_variable_part (dst, val, dv_from_value (node->loc), 0,
3908 node->init, NULL, INSERT);
3911 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3912 gcc_assert (*dstslot == dvar);
3913 canonicalize_values_star (dstslot, dst);
3914 gcc_checking_assert (dstslot
3915 == shared_hash_find_slot_noinsert_1 (dst->vars,
3916 dv, dvhash));
3917 dvar = (variable)*dstslot;
3919 else
3921 bool has_value = false, has_other = false;
3923 /* If we have one value and anything else, we're going to
3924 canonicalize this, so make sure all values have an entry in
3925 the table and are marked for canonicalization. */
3926 for (node = *nodep; node; node = node->next)
3928 if (GET_CODE (node->loc) == VALUE)
3930 /* If this was marked during register canonicalization,
3931 we know we have to canonicalize values. */
3932 if (has_value)
3933 has_other = true;
3934 has_value = true;
3935 if (has_other)
3936 break;
3938 else
3940 has_other = true;
3941 if (has_value)
3942 break;
3946 if (has_value && has_other)
3948 for (node = *nodep; node; node = node->next)
3950 if (GET_CODE (node->loc) == VALUE)
3952 decl_or_value dv = dv_from_value (node->loc);
3953 void **slot = NULL;
3955 if (shared_hash_shared (dst->vars))
3956 slot = shared_hash_find_slot_noinsert (dst->vars, dv);
3957 if (!slot)
3958 slot = shared_hash_find_slot_unshare (&dst->vars, dv,
3959 INSERT);
3960 if (!*slot)
3962 variable var = (variable) pool_alloc (onepart_pool
3963 (ONEPART_VALUE));
3964 var->dv = dv;
3965 var->refcount = 1;
3966 var->n_var_parts = 1;
3967 var->onepart = ONEPART_VALUE;
3968 var->in_changed_variables = false;
3969 var->var_part[0].loc_chain = NULL;
3970 var->var_part[0].cur_loc = NULL;
3971 VAR_LOC_1PAUX (var) = NULL;
3972 *slot = var;
3975 VALUE_RECURSED_INTO (node->loc) = true;
3979 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3980 gcc_assert (*dstslot == dvar);
3981 canonicalize_values_star (dstslot, dst);
3982 gcc_checking_assert (dstslot
3983 == shared_hash_find_slot_noinsert_1 (dst->vars,
3984 dv, dvhash));
3985 dvar = (variable)*dstslot;
3989 if (!onepart_variable_different_p (dvar, s2var))
3991 variable_htab_free (dvar);
3992 *dstslot = dvar = s2var;
3993 dvar->refcount++;
3995 else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var))
3997 variable_htab_free (dvar);
3998 *dstslot = dvar = s1var;
3999 dvar->refcount++;
4000 dst_can_be_shared = false;
4002 else
4003 dst_can_be_shared = false;
4005 return 1;
4008 /* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
4009 multi-part variable. Unions of multi-part variables and
4010 intersections of one-part ones will be handled in
4011 variable_merge_over_cur(). */
4013 static int
4014 variable_merge_over_src (variable s2var, struct dfset_merge *dsm)
4016 dataflow_set *dst = dsm->dst;
4017 decl_or_value dv = s2var->dv;
4019 if (!s2var->onepart)
4021 void **dstp = shared_hash_find_slot (dst->vars, dv);
4022 *dstp = s2var;
4023 s2var->refcount++;
4024 return 1;
4027 dsm->src_onepart_cnt++;
4028 return 1;
4031 /* Combine dataflow set information from SRC2 into DST, using PDST
4032 to carry over information across passes. */
4034 static void
4035 dataflow_set_merge (dataflow_set *dst, dataflow_set *src2)
4037 dataflow_set cur = *dst;
4038 dataflow_set *src1 = &cur;
4039 struct dfset_merge dsm;
4040 int i;
4041 size_t src1_elems, src2_elems;
4042 htab_iterator hi;
4043 variable var;
4045 src1_elems = htab_elements (shared_hash_htab (src1->vars));
4046 src2_elems = htab_elements (shared_hash_htab (src2->vars));
4047 dataflow_set_init (dst);
4048 dst->stack_adjust = cur.stack_adjust;
4049 shared_hash_destroy (dst->vars);
4050 dst->vars = (shared_hash) pool_alloc (shared_hash_pool);
4051 dst->vars->refcount = 1;
4052 dst->vars->htab
4053 = htab_create (MAX (src1_elems, src2_elems), variable_htab_hash,
4054 variable_htab_eq, variable_htab_free);
4056 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4057 attrs_list_mpdv_union (&dst->regs[i], src1->regs[i], src2->regs[i]);
4059 dsm.dst = dst;
4060 dsm.src = src2;
4061 dsm.cur = src1;
4062 dsm.src_onepart_cnt = 0;
4064 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (dsm.src->vars), var, variable, hi)
4065 variable_merge_over_src (var, &dsm);
4066 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (dsm.cur->vars), var, variable, hi)
4067 variable_merge_over_cur (var, &dsm);
4069 if (dsm.src_onepart_cnt)
4070 dst_can_be_shared = false;
4072 dataflow_set_destroy (src1);
4075 /* Mark register equivalences. */
4077 static void
4078 dataflow_set_equiv_regs (dataflow_set *set)
4080 int i;
4081 attrs list, *listp;
4083 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4085 rtx canon[NUM_MACHINE_MODES];
4087 /* If the list is empty or one entry, no need to canonicalize
4088 anything. */
4089 if (set->regs[i] == NULL || set->regs[i]->next == NULL)
4090 continue;
4092 memset (canon, 0, sizeof (canon));
4094 for (list = set->regs[i]; list; list = list->next)
4095 if (list->offset == 0 && dv_is_value_p (list->dv))
4097 rtx val = dv_as_value (list->dv);
4098 rtx *cvalp = &canon[(int)GET_MODE (val)];
4099 rtx cval = *cvalp;
4101 if (canon_value_cmp (val, cval))
4102 *cvalp = val;
4105 for (list = set->regs[i]; list; list = list->next)
4106 if (list->offset == 0 && dv_onepart_p (list->dv))
4108 rtx cval = canon[(int)GET_MODE (list->loc)];
4110 if (!cval)
4111 continue;
4113 if (dv_is_value_p (list->dv))
4115 rtx val = dv_as_value (list->dv);
4117 if (val == cval)
4118 continue;
4120 VALUE_RECURSED_INTO (val) = true;
4121 set_variable_part (set, val, dv_from_value (cval), 0,
4122 VAR_INIT_STATUS_INITIALIZED,
4123 NULL, NO_INSERT);
4126 VALUE_RECURSED_INTO (cval) = true;
4127 set_variable_part (set, cval, list->dv, 0,
4128 VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT);
4131 for (listp = &set->regs[i]; (list = *listp);
4132 listp = list ? &list->next : listp)
4133 if (list->offset == 0 && dv_onepart_p (list->dv))
4135 rtx cval = canon[(int)GET_MODE (list->loc)];
4136 void **slot;
4138 if (!cval)
4139 continue;
4141 if (dv_is_value_p (list->dv))
4143 rtx val = dv_as_value (list->dv);
4144 if (!VALUE_RECURSED_INTO (val))
4145 continue;
4148 slot = shared_hash_find_slot_noinsert (set->vars, list->dv);
4149 canonicalize_values_star (slot, set);
4150 if (*listp != list)
4151 list = NULL;
4156 /* Remove any redundant values in the location list of VAR, which must
4157 be unshared and 1-part. */
4159 static void
4160 remove_duplicate_values (variable var)
4162 location_chain node, *nodep;
4164 gcc_assert (var->onepart);
4165 gcc_assert (var->n_var_parts == 1);
4166 gcc_assert (var->refcount == 1);
4168 for (nodep = &var->var_part[0].loc_chain; (node = *nodep); )
4170 if (GET_CODE (node->loc) == VALUE)
4172 if (VALUE_RECURSED_INTO (node->loc))
4174 /* Remove duplicate value node. */
4175 *nodep = node->next;
4176 pool_free (loc_chain_pool, node);
4177 continue;
4179 else
4180 VALUE_RECURSED_INTO (node->loc) = true;
4182 nodep = &node->next;
4185 for (node = var->var_part[0].loc_chain; node; node = node->next)
4186 if (GET_CODE (node->loc) == VALUE)
4188 gcc_assert (VALUE_RECURSED_INTO (node->loc));
4189 VALUE_RECURSED_INTO (node->loc) = false;
4194 /* Hash table iteration argument passed to variable_post_merge. */
4195 struct dfset_post_merge
4197 /* The new input set for the current block. */
4198 dataflow_set *set;
4199 /* Pointer to the permanent input set for the current block, or
4200 NULL. */
4201 dataflow_set **permp;
4204 /* Create values for incoming expressions associated with one-part
4205 variables that don't have value numbers for them. */
4207 static int
4208 variable_post_merge_new_vals (void **slot, void *info)
4210 struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
4211 dataflow_set *set = dfpm->set;
4212 variable var = (variable)*slot;
4213 location_chain node;
4215 if (!var->onepart || !var->n_var_parts)
4216 return 1;
4218 gcc_assert (var->n_var_parts == 1);
4220 if (dv_is_decl_p (var->dv))
4222 bool check_dupes = false;
4224 restart:
4225 for (node = var->var_part[0].loc_chain; node; node = node->next)
4227 if (GET_CODE (node->loc) == VALUE)
4228 gcc_assert (!VALUE_RECURSED_INTO (node->loc));
4229 else if (GET_CODE (node->loc) == REG)
4231 attrs att, *attp, *curp = NULL;
4233 if (var->refcount != 1)
4235 slot = unshare_variable (set, slot, var,
4236 VAR_INIT_STATUS_INITIALIZED);
4237 var = (variable)*slot;
4238 goto restart;
4241 for (attp = &set->regs[REGNO (node->loc)]; (att = *attp);
4242 attp = &att->next)
4243 if (att->offset == 0
4244 && GET_MODE (att->loc) == GET_MODE (node->loc))
4246 if (dv_is_value_p (att->dv))
4248 rtx cval = dv_as_value (att->dv);
4249 node->loc = cval;
4250 check_dupes = true;
4251 break;
4253 else if (dv_as_opaque (att->dv) == dv_as_opaque (var->dv))
4254 curp = attp;
4257 if (!curp)
4259 curp = attp;
4260 while (*curp)
4261 if ((*curp)->offset == 0
4262 && GET_MODE ((*curp)->loc) == GET_MODE (node->loc)
4263 && dv_as_opaque ((*curp)->dv) == dv_as_opaque (var->dv))
4264 break;
4265 else
4266 curp = &(*curp)->next;
4267 gcc_assert (*curp);
4270 if (!att)
4272 decl_or_value cdv;
4273 rtx cval;
4275 if (!*dfpm->permp)
4277 *dfpm->permp = XNEW (dataflow_set);
4278 dataflow_set_init (*dfpm->permp);
4281 for (att = (*dfpm->permp)->regs[REGNO (node->loc)];
4282 att; att = att->next)
4283 if (GET_MODE (att->loc) == GET_MODE (node->loc))
4285 gcc_assert (att->offset == 0
4286 && dv_is_value_p (att->dv));
4287 val_reset (set, att->dv);
4288 break;
4291 if (att)
4293 cdv = att->dv;
4294 cval = dv_as_value (cdv);
4296 else
4298 /* Create a unique value to hold this register,
4299 that ought to be found and reused in
4300 subsequent rounds. */
4301 cselib_val *v;
4302 gcc_assert (!cselib_lookup (node->loc,
4303 GET_MODE (node->loc), 0,
4304 VOIDmode));
4305 v = cselib_lookup (node->loc, GET_MODE (node->loc), 1,
4306 VOIDmode);
4307 cselib_preserve_value (v);
4308 cselib_invalidate_rtx (node->loc);
4309 cval = v->val_rtx;
4310 cdv = dv_from_value (cval);
4311 if (dump_file)
4312 fprintf (dump_file,
4313 "Created new value %u:%u for reg %i\n",
4314 v->uid, v->hash, REGNO (node->loc));
4317 var_reg_decl_set (*dfpm->permp, node->loc,
4318 VAR_INIT_STATUS_INITIALIZED,
4319 cdv, 0, NULL, INSERT);
4321 node->loc = cval;
4322 check_dupes = true;
4325 /* Remove attribute referring to the decl, which now
4326 uses the value for the register, already existing or
4327 to be added when we bring perm in. */
4328 att = *curp;
4329 *curp = att->next;
4330 pool_free (attrs_pool, att);
4334 if (check_dupes)
4335 remove_duplicate_values (var);
4338 return 1;
4341 /* Reset values in the permanent set that are not associated with the
4342 chosen expression. */
4344 static int
4345 variable_post_merge_perm_vals (void **pslot, void *info)
4347 struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
4348 dataflow_set *set = dfpm->set;
4349 variable pvar = (variable)*pslot, var;
4350 location_chain pnode;
4351 decl_or_value dv;
4352 attrs att;
4354 gcc_assert (dv_is_value_p (pvar->dv)
4355 && pvar->n_var_parts == 1);
4356 pnode = pvar->var_part[0].loc_chain;
4357 gcc_assert (pnode
4358 && !pnode->next
4359 && REG_P (pnode->loc));
4361 dv = pvar->dv;
4363 var = shared_hash_find (set->vars, dv);
4364 if (var)
4366 /* Although variable_post_merge_new_vals may have made decls
4367 non-star-canonical, values that pre-existed in canonical form
4368 remain canonical, and newly-created values reference a single
4369 REG, so they are canonical as well. Since VAR has the
4370 location list for a VALUE, using find_loc_in_1pdv for it is
4371 fine, since VALUEs don't map back to DECLs. */
4372 if (find_loc_in_1pdv (pnode->loc, var, shared_hash_htab (set->vars)))
4373 return 1;
4374 val_reset (set, dv);
4377 for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next)
4378 if (att->offset == 0
4379 && GET_MODE (att->loc) == GET_MODE (pnode->loc)
4380 && dv_is_value_p (att->dv))
4381 break;
4383 /* If there is a value associated with this register already, create
4384 an equivalence. */
4385 if (att && dv_as_value (att->dv) != dv_as_value (dv))
4387 rtx cval = dv_as_value (att->dv);
4388 set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT);
4389 set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init,
4390 NULL, INSERT);
4392 else if (!att)
4394 attrs_list_insert (&set->regs[REGNO (pnode->loc)],
4395 dv, 0, pnode->loc);
4396 variable_union (pvar, set);
4399 return 1;
4402 /* Just checking stuff and registering register attributes for
4403 now. */
4405 static void
4406 dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp)
4408 struct dfset_post_merge dfpm;
4410 dfpm.set = set;
4411 dfpm.permp = permp;
4413 htab_traverse (shared_hash_htab (set->vars), variable_post_merge_new_vals,
4414 &dfpm);
4415 if (*permp)
4416 htab_traverse (shared_hash_htab ((*permp)->vars),
4417 variable_post_merge_perm_vals, &dfpm);
4418 htab_traverse (shared_hash_htab (set->vars), canonicalize_values_star, set);
4419 htab_traverse (shared_hash_htab (set->vars), canonicalize_vars_star, set);
4422 /* Return a node whose loc is a MEM that refers to EXPR in the
4423 location list of a one-part variable or value VAR, or in that of
4424 any values recursively mentioned in the location lists. */
4426 static location_chain
4427 find_mem_expr_in_1pdv (tree expr, rtx val, htab_t vars)
4429 location_chain node;
4430 decl_or_value dv;
4431 variable var;
4432 location_chain where = NULL;
4434 if (!val)
4435 return NULL;
4437 gcc_assert (GET_CODE (val) == VALUE
4438 && !VALUE_RECURSED_INTO (val));
4440 dv = dv_from_value (val);
4441 var = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
4443 if (!var)
4444 return NULL;
4446 gcc_assert (var->onepart);
4448 if (!var->n_var_parts)
4449 return NULL;
4451 VALUE_RECURSED_INTO (val) = true;
4453 for (node = var->var_part[0].loc_chain; node; node = node->next)
4454 if (MEM_P (node->loc)
4455 && MEM_EXPR (node->loc) == expr
4456 && INT_MEM_OFFSET (node->loc) == 0)
4458 where = node;
4459 break;
4461 else if (GET_CODE (node->loc) == VALUE
4462 && !VALUE_RECURSED_INTO (node->loc)
4463 && (where = find_mem_expr_in_1pdv (expr, node->loc, vars)))
4464 break;
4466 VALUE_RECURSED_INTO (val) = false;
4468 return where;
4471 /* Return TRUE if the value of MEM may vary across a call. */
4473 static bool
4474 mem_dies_at_call (rtx mem)
4476 tree expr = MEM_EXPR (mem);
4477 tree decl;
4479 if (!expr)
4480 return true;
4482 decl = get_base_address (expr);
4484 if (!decl)
4485 return true;
4487 if (!DECL_P (decl))
4488 return true;
4490 return (may_be_aliased (decl)
4491 || (!TREE_READONLY (decl) && is_global_var (decl)));
4494 /* Remove all MEMs from the location list of a hash table entry for a
4495 one-part variable, except those whose MEM attributes map back to
4496 the variable itself, directly or within a VALUE. */
4498 static int
4499 dataflow_set_preserve_mem_locs (void **slot, void *data)
4501 dataflow_set *set = (dataflow_set *) data;
4502 variable var = (variable) *slot;
4504 if (var->onepart == ONEPART_VDECL || var->onepart == ONEPART_DEXPR)
4506 tree decl = dv_as_decl (var->dv);
4507 location_chain loc, *locp;
4508 bool changed = false;
4510 if (!var->n_var_parts)
4511 return 1;
4513 gcc_assert (var->n_var_parts == 1);
4515 if (shared_var_p (var, set->vars))
4517 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4519 /* We want to remove dying MEMs that doesn't refer to DECL. */
4520 if (GET_CODE (loc->loc) == MEM
4521 && (MEM_EXPR (loc->loc) != decl
4522 || INT_MEM_OFFSET (loc->loc) != 0)
4523 && !mem_dies_at_call (loc->loc))
4524 break;
4525 /* We want to move here MEMs that do refer to DECL. */
4526 else if (GET_CODE (loc->loc) == VALUE
4527 && find_mem_expr_in_1pdv (decl, loc->loc,
4528 shared_hash_htab (set->vars)))
4529 break;
4532 if (!loc)
4533 return 1;
4535 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4536 var = (variable)*slot;
4537 gcc_assert (var->n_var_parts == 1);
4540 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4541 loc; loc = *locp)
4543 rtx old_loc = loc->loc;
4544 if (GET_CODE (old_loc) == VALUE)
4546 location_chain mem_node
4547 = find_mem_expr_in_1pdv (decl, loc->loc,
4548 shared_hash_htab (set->vars));
4550 /* ??? This picks up only one out of multiple MEMs that
4551 refer to the same variable. Do we ever need to be
4552 concerned about dealing with more than one, or, given
4553 that they should all map to the same variable
4554 location, their addresses will have been merged and
4555 they will be regarded as equivalent? */
4556 if (mem_node)
4558 loc->loc = mem_node->loc;
4559 loc->set_src = mem_node->set_src;
4560 loc->init = MIN (loc->init, mem_node->init);
4564 if (GET_CODE (loc->loc) != MEM
4565 || (MEM_EXPR (loc->loc) == decl
4566 && INT_MEM_OFFSET (loc->loc) == 0)
4567 || !mem_dies_at_call (loc->loc))
4569 if (old_loc != loc->loc && emit_notes)
4571 if (old_loc == var->var_part[0].cur_loc)
4573 changed = true;
4574 var->var_part[0].cur_loc = NULL;
4577 locp = &loc->next;
4578 continue;
4581 if (emit_notes)
4583 if (old_loc == var->var_part[0].cur_loc)
4585 changed = true;
4586 var->var_part[0].cur_loc = NULL;
4589 *locp = loc->next;
4590 pool_free (loc_chain_pool, loc);
4593 if (!var->var_part[0].loc_chain)
4595 var->n_var_parts--;
4596 changed = true;
4598 if (changed)
4599 variable_was_changed (var, set);
4602 return 1;
4605 /* Remove all MEMs from the location list of a hash table entry for a
4606 value. */
4608 static int
4609 dataflow_set_remove_mem_locs (void **slot, void *data)
4611 dataflow_set *set = (dataflow_set *) data;
4612 variable var = (variable) *slot;
4614 if (var->onepart == ONEPART_VALUE)
4616 location_chain loc, *locp;
4617 bool changed = false;
4618 rtx cur_loc;
4620 gcc_assert (var->n_var_parts == 1);
4622 if (shared_var_p (var, set->vars))
4624 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4625 if (GET_CODE (loc->loc) == MEM
4626 && mem_dies_at_call (loc->loc))
4627 break;
4629 if (!loc)
4630 return 1;
4632 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4633 var = (variable)*slot;
4634 gcc_assert (var->n_var_parts == 1);
4637 if (VAR_LOC_1PAUX (var))
4638 cur_loc = VAR_LOC_FROM (var);
4639 else
4640 cur_loc = var->var_part[0].cur_loc;
4642 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4643 loc; loc = *locp)
4645 if (GET_CODE (loc->loc) != MEM
4646 || !mem_dies_at_call (loc->loc))
4648 locp = &loc->next;
4649 continue;
4652 *locp = loc->next;
4653 /* If we have deleted the location which was last emitted
4654 we have to emit new location so add the variable to set
4655 of changed variables. */
4656 if (cur_loc == loc->loc)
4658 changed = true;
4659 var->var_part[0].cur_loc = NULL;
4660 if (VAR_LOC_1PAUX (var))
4661 VAR_LOC_FROM (var) = NULL;
4663 pool_free (loc_chain_pool, loc);
4666 if (!var->var_part[0].loc_chain)
4668 var->n_var_parts--;
4669 changed = true;
4671 if (changed)
4672 variable_was_changed (var, set);
4675 return 1;
4678 /* Remove all variable-location information about call-clobbered
4679 registers, as well as associations between MEMs and VALUEs. */
4681 static void
4682 dataflow_set_clear_at_call (dataflow_set *set)
4684 int r;
4686 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
4687 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, r))
4688 var_regno_delete (set, r);
4690 if (MAY_HAVE_DEBUG_INSNS)
4692 set->traversed_vars = set->vars;
4693 htab_traverse (shared_hash_htab (set->vars),
4694 dataflow_set_preserve_mem_locs, set);
4695 set->traversed_vars = set->vars;
4696 htab_traverse (shared_hash_htab (set->vars), dataflow_set_remove_mem_locs,
4697 set);
4698 set->traversed_vars = NULL;
4702 static bool
4703 variable_part_different_p (variable_part *vp1, variable_part *vp2)
4705 location_chain lc1, lc2;
4707 for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next)
4709 for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next)
4711 if (REG_P (lc1->loc) && REG_P (lc2->loc))
4713 if (REGNO (lc1->loc) == REGNO (lc2->loc))
4714 break;
4716 if (rtx_equal_p (lc1->loc, lc2->loc))
4717 break;
4719 if (!lc2)
4720 return true;
4722 return false;
4725 /* Return true if one-part variables VAR1 and VAR2 are different.
4726 They must be in canonical order. */
4728 static bool
4729 onepart_variable_different_p (variable var1, variable var2)
4731 location_chain lc1, lc2;
4733 if (var1 == var2)
4734 return false;
4736 gcc_assert (var1->n_var_parts == 1
4737 && var2->n_var_parts == 1);
4739 lc1 = var1->var_part[0].loc_chain;
4740 lc2 = var2->var_part[0].loc_chain;
4742 gcc_assert (lc1 && lc2);
4744 while (lc1 && lc2)
4746 if (loc_cmp (lc1->loc, lc2->loc))
4747 return true;
4748 lc1 = lc1->next;
4749 lc2 = lc2->next;
4752 return lc1 != lc2;
4755 /* Return true if variables VAR1 and VAR2 are different. */
4757 static bool
4758 variable_different_p (variable var1, variable var2)
4760 int i;
4762 if (var1 == var2)
4763 return false;
4765 if (var1->onepart != var2->onepart)
4766 return true;
4768 if (var1->n_var_parts != var2->n_var_parts)
4769 return true;
4771 if (var1->onepart && var1->n_var_parts)
4773 gcc_checking_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv)
4774 && var1->n_var_parts == 1);
4775 /* One-part values have locations in a canonical order. */
4776 return onepart_variable_different_p (var1, var2);
4779 for (i = 0; i < var1->n_var_parts; i++)
4781 if (VAR_PART_OFFSET (var1, i) != VAR_PART_OFFSET (var2, i))
4782 return true;
4783 if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i]))
4784 return true;
4785 if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i]))
4786 return true;
4788 return false;
4791 /* Return true if dataflow sets OLD_SET and NEW_SET differ. */
4793 static bool
4794 dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
4796 htab_iterator hi;
4797 variable var1;
4799 if (old_set->vars == new_set->vars)
4800 return false;
4802 if (htab_elements (shared_hash_htab (old_set->vars))
4803 != htab_elements (shared_hash_htab (new_set->vars)))
4804 return true;
4806 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (old_set->vars), var1, variable, hi)
4808 htab_t htab = shared_hash_htab (new_set->vars);
4809 variable var2 = (variable) htab_find_with_hash (htab, var1->dv,
4810 dv_htab_hash (var1->dv));
4811 if (!var2)
4813 if (dump_file && (dump_flags & TDF_DETAILS))
4815 fprintf (dump_file, "dataflow difference found: removal of:\n");
4816 dump_var (var1);
4818 return true;
4821 if (variable_different_p (var1, var2))
4823 if (dump_file && (dump_flags & TDF_DETAILS))
4825 fprintf (dump_file, "dataflow difference found: "
4826 "old and new follow:\n");
4827 dump_var (var1);
4828 dump_var (var2);
4830 return true;
4834 /* No need to traverse the second hashtab, if both have the same number
4835 of elements and the second one had all entries found in the first one,
4836 then it can't have any extra entries. */
4837 return false;
4840 /* Free the contents of dataflow set SET. */
4842 static void
4843 dataflow_set_destroy (dataflow_set *set)
4845 int i;
4847 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4848 attrs_list_clear (&set->regs[i]);
4850 shared_hash_destroy (set->vars);
4851 set->vars = NULL;
4854 /* Return true if RTL X contains a SYMBOL_REF. */
4856 static bool
4857 contains_symbol_ref (rtx x)
4859 const char *fmt;
4860 RTX_CODE code;
4861 int i;
4863 if (!x)
4864 return false;
4866 code = GET_CODE (x);
4867 if (code == SYMBOL_REF)
4868 return true;
4870 fmt = GET_RTX_FORMAT (code);
4871 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4873 if (fmt[i] == 'e')
4875 if (contains_symbol_ref (XEXP (x, i)))
4876 return true;
4878 else if (fmt[i] == 'E')
4880 int j;
4881 for (j = 0; j < XVECLEN (x, i); j++)
4882 if (contains_symbol_ref (XVECEXP (x, i, j)))
4883 return true;
4887 return false;
4890 /* Shall EXPR be tracked? */
4892 static bool
4893 track_expr_p (tree expr, bool need_rtl)
4895 rtx decl_rtl;
4896 tree realdecl;
4898 if (TREE_CODE (expr) == DEBUG_EXPR_DECL)
4899 return DECL_RTL_SET_P (expr);
4901 /* If EXPR is not a parameter or a variable do not track it. */
4902 if (TREE_CODE (expr) != VAR_DECL && TREE_CODE (expr) != PARM_DECL)
4903 return 0;
4905 /* It also must have a name... */
4906 if (!DECL_NAME (expr) && need_rtl)
4907 return 0;
4909 /* ... and a RTL assigned to it. */
4910 decl_rtl = DECL_RTL_IF_SET (expr);
4911 if (!decl_rtl && need_rtl)
4912 return 0;
4914 /* If this expression is really a debug alias of some other declaration, we
4915 don't need to track this expression if the ultimate declaration is
4916 ignored. */
4917 realdecl = expr;
4918 if (DECL_DEBUG_EXPR_IS_FROM (realdecl))
4920 realdecl = DECL_DEBUG_EXPR (realdecl);
4921 if (realdecl == NULL_TREE)
4922 realdecl = expr;
4923 else if (!DECL_P (realdecl))
4925 if (handled_component_p (realdecl))
4927 HOST_WIDE_INT bitsize, bitpos, maxsize;
4928 tree innerdecl
4929 = get_ref_base_and_extent (realdecl, &bitpos, &bitsize,
4930 &maxsize);
4931 if (!DECL_P (innerdecl)
4932 || DECL_IGNORED_P (innerdecl)
4933 || TREE_STATIC (innerdecl)
4934 || bitsize <= 0
4935 || bitpos + bitsize > 256
4936 || bitsize != maxsize)
4937 return 0;
4938 else
4939 realdecl = expr;
4941 else
4942 return 0;
4946 /* Do not track EXPR if REALDECL it should be ignored for debugging
4947 purposes. */
4948 if (DECL_IGNORED_P (realdecl))
4949 return 0;
4951 /* Do not track global variables until we are able to emit correct location
4952 list for them. */
4953 if (TREE_STATIC (realdecl))
4954 return 0;
4956 /* When the EXPR is a DECL for alias of some variable (see example)
4957 the TREE_STATIC flag is not used. Disable tracking all DECLs whose
4958 DECL_RTL contains SYMBOL_REF.
4960 Example:
4961 extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
4962 char **_dl_argv;
4964 if (decl_rtl && MEM_P (decl_rtl)
4965 && contains_symbol_ref (XEXP (decl_rtl, 0)))
4966 return 0;
4968 /* If RTX is a memory it should not be very large (because it would be
4969 an array or struct). */
4970 if (decl_rtl && MEM_P (decl_rtl))
4972 /* Do not track structures and arrays. */
4973 if (GET_MODE (decl_rtl) == BLKmode
4974 || AGGREGATE_TYPE_P (TREE_TYPE (realdecl)))
4975 return 0;
4976 if (MEM_SIZE_KNOWN_P (decl_rtl)
4977 && MEM_SIZE (decl_rtl) > MAX_VAR_PARTS)
4978 return 0;
4981 DECL_CHANGED (expr) = 0;
4982 DECL_CHANGED (realdecl) = 0;
4983 return 1;
4986 /* Determine whether a given LOC refers to the same variable part as
4987 EXPR+OFFSET. */
4989 static bool
4990 same_variable_part_p (rtx loc, tree expr, HOST_WIDE_INT offset)
4992 tree expr2;
4993 HOST_WIDE_INT offset2;
4995 if (! DECL_P (expr))
4996 return false;
4998 if (REG_P (loc))
5000 expr2 = REG_EXPR (loc);
5001 offset2 = REG_OFFSET (loc);
5003 else if (MEM_P (loc))
5005 expr2 = MEM_EXPR (loc);
5006 offset2 = INT_MEM_OFFSET (loc);
5008 else
5009 return false;
5011 if (! expr2 || ! DECL_P (expr2))
5012 return false;
5014 expr = var_debug_decl (expr);
5015 expr2 = var_debug_decl (expr2);
5017 return (expr == expr2 && offset == offset2);
5020 /* LOC is a REG or MEM that we would like to track if possible.
5021 If EXPR is null, we don't know what expression LOC refers to,
5022 otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
5023 LOC is an lvalue register.
5025 Return true if EXPR is nonnull and if LOC, or some lowpart of it,
5026 is something we can track. When returning true, store the mode of
5027 the lowpart we can track in *MODE_OUT (if nonnull) and its offset
5028 from EXPR in *OFFSET_OUT (if nonnull). */
5030 static bool
5031 track_loc_p (rtx loc, tree expr, HOST_WIDE_INT offset, bool store_reg_p,
5032 enum machine_mode *mode_out, HOST_WIDE_INT *offset_out)
5034 enum machine_mode mode;
5036 if (expr == NULL || !track_expr_p (expr, true))
5037 return false;
5039 /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
5040 whole subreg, but only the old inner part is really relevant. */
5041 mode = GET_MODE (loc);
5042 if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc)))
5044 enum machine_mode pseudo_mode;
5046 pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc));
5047 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (pseudo_mode))
5049 offset += byte_lowpart_offset (pseudo_mode, mode);
5050 mode = pseudo_mode;
5054 /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
5055 Do the same if we are storing to a register and EXPR occupies
5056 the whole of register LOC; in that case, the whole of EXPR is
5057 being changed. We exclude complex modes from the second case
5058 because the real and imaginary parts are represented as separate
5059 pseudo registers, even if the whole complex value fits into one
5060 hard register. */
5061 if ((GET_MODE_SIZE (mode) > GET_MODE_SIZE (DECL_MODE (expr))
5062 || (store_reg_p
5063 && !COMPLEX_MODE_P (DECL_MODE (expr))
5064 && hard_regno_nregs[REGNO (loc)][DECL_MODE (expr)] == 1))
5065 && offset + byte_lowpart_offset (DECL_MODE (expr), mode) == 0)
5067 mode = DECL_MODE (expr);
5068 offset = 0;
5071 if (offset < 0 || offset >= MAX_VAR_PARTS)
5072 return false;
5074 if (mode_out)
5075 *mode_out = mode;
5076 if (offset_out)
5077 *offset_out = offset;
5078 return true;
5081 /* Return the MODE lowpart of LOC, or null if LOC is not something we
5082 want to track. When returning nonnull, make sure that the attributes
5083 on the returned value are updated. */
5085 static rtx
5086 var_lowpart (enum machine_mode mode, rtx loc)
5088 unsigned int offset, reg_offset, regno;
5090 if (!REG_P (loc) && !MEM_P (loc))
5091 return NULL;
5093 if (GET_MODE (loc) == mode)
5094 return loc;
5096 offset = byte_lowpart_offset (mode, GET_MODE (loc));
5098 if (MEM_P (loc))
5099 return adjust_address_nv (loc, mode, offset);
5101 reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc));
5102 regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc),
5103 reg_offset, mode);
5104 return gen_rtx_REG_offset (loc, mode, regno, offset);
5107 /* Carry information about uses and stores while walking rtx. */
5109 struct count_use_info
5111 /* The insn where the RTX is. */
5112 rtx insn;
5114 /* The basic block where insn is. */
5115 basic_block bb;
5117 /* The array of n_sets sets in the insn, as determined by cselib. */
5118 struct cselib_set *sets;
5119 int n_sets;
5121 /* True if we're counting stores, false otherwise. */
5122 bool store_p;
5125 /* Find a VALUE corresponding to X. */
5127 static inline cselib_val *
5128 find_use_val (rtx x, enum machine_mode mode, struct count_use_info *cui)
5130 int i;
5132 if (cui->sets)
5134 /* This is called after uses are set up and before stores are
5135 processed by cselib, so it's safe to look up srcs, but not
5136 dsts. So we look up expressions that appear in srcs or in
5137 dest expressions, but we search the sets array for dests of
5138 stores. */
5139 if (cui->store_p)
5141 /* Some targets represent memset and memcpy patterns
5142 by (set (mem:BLK ...) (reg:[QHSD]I ...)) or
5143 (set (mem:BLK ...) (const_int ...)) or
5144 (set (mem:BLK ...) (mem:BLK ...)). Don't return anything
5145 in that case, otherwise we end up with mode mismatches. */
5146 if (mode == BLKmode && MEM_P (x))
5147 return NULL;
5148 for (i = 0; i < cui->n_sets; i++)
5149 if (cui->sets[i].dest == x)
5150 return cui->sets[i].src_elt;
5152 else
5153 return cselib_lookup (x, mode, 0, VOIDmode);
5156 return NULL;
5159 /* Replace all registers and addresses in an expression with VALUE
5160 expressions that map back to them, unless the expression is a
5161 register. If no mapping is or can be performed, returns NULL. */
5163 static rtx
5164 replace_expr_with_values (rtx loc)
5166 if (REG_P (loc) || GET_CODE (loc) == ENTRY_VALUE)
5167 return NULL;
5168 else if (MEM_P (loc))
5170 cselib_val *addr = cselib_lookup (XEXP (loc, 0),
5171 get_address_mode (loc), 0,
5172 GET_MODE (loc));
5173 if (addr)
5174 return replace_equiv_address_nv (loc, addr->val_rtx);
5175 else
5176 return NULL;
5178 else
5179 return cselib_subst_to_values (loc, VOIDmode);
5182 /* Return true if *X is a DEBUG_EXPR. Usable as an argument to
5183 for_each_rtx to tell whether there are any DEBUG_EXPRs within
5184 RTX. */
5186 static int
5187 rtx_debug_expr_p (rtx *x, void *data ATTRIBUTE_UNUSED)
5189 rtx loc = *x;
5191 return GET_CODE (loc) == DEBUG_EXPR;
5194 /* Determine what kind of micro operation to choose for a USE. Return
5195 MO_CLOBBER if no micro operation is to be generated. */
5197 static enum micro_operation_type
5198 use_type (rtx loc, struct count_use_info *cui, enum machine_mode *modep)
5200 tree expr;
5202 if (cui && cui->sets)
5204 if (GET_CODE (loc) == VAR_LOCATION)
5206 if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false))
5208 rtx ploc = PAT_VAR_LOCATION_LOC (loc);
5209 if (! VAR_LOC_UNKNOWN_P (ploc))
5211 cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1,
5212 VOIDmode);
5214 /* ??? flag_float_store and volatile mems are never
5215 given values, but we could in theory use them for
5216 locations. */
5217 gcc_assert (val || 1);
5219 return MO_VAL_LOC;
5221 else
5222 return MO_CLOBBER;
5225 if (REG_P (loc) || MEM_P (loc))
5227 if (modep)
5228 *modep = GET_MODE (loc);
5229 if (cui->store_p)
5231 if (REG_P (loc)
5232 || (find_use_val (loc, GET_MODE (loc), cui)
5233 && cselib_lookup (XEXP (loc, 0),
5234 get_address_mode (loc), 0,
5235 GET_MODE (loc))))
5236 return MO_VAL_SET;
5238 else
5240 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5242 if (val && !cselib_preserved_value_p (val))
5243 return MO_VAL_USE;
5248 if (REG_P (loc))
5250 gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER);
5252 if (loc == cfa_base_rtx)
5253 return MO_CLOBBER;
5254 expr = REG_EXPR (loc);
5256 if (!expr)
5257 return MO_USE_NO_VAR;
5258 else if (target_for_debug_bind (var_debug_decl (expr)))
5259 return MO_CLOBBER;
5260 else if (track_loc_p (loc, expr, REG_OFFSET (loc),
5261 false, modep, NULL))
5262 return MO_USE;
5263 else
5264 return MO_USE_NO_VAR;
5266 else if (MEM_P (loc))
5268 expr = MEM_EXPR (loc);
5270 if (!expr)
5271 return MO_CLOBBER;
5272 else if (target_for_debug_bind (var_debug_decl (expr)))
5273 return MO_CLOBBER;
5274 else if (track_loc_p (loc, expr, INT_MEM_OFFSET (loc),
5275 false, modep, NULL)
5276 /* Multi-part variables shouldn't refer to one-part
5277 variable names such as VALUEs (never happens) or
5278 DEBUG_EXPRs (only happens in the presence of debug
5279 insns). */
5280 && (!MAY_HAVE_DEBUG_INSNS
5281 || !for_each_rtx (&XEXP (loc, 0), rtx_debug_expr_p, NULL)))
5282 return MO_USE;
5283 else
5284 return MO_CLOBBER;
5287 return MO_CLOBBER;
5290 /* Log to OUT information about micro-operation MOPT involving X in
5291 INSN of BB. */
5293 static inline void
5294 log_op_type (rtx x, basic_block bb, rtx insn,
5295 enum micro_operation_type mopt, FILE *out)
5297 fprintf (out, "bb %i op %i insn %i %s ",
5298 bb->index, VEC_length (micro_operation, VTI (bb)->mos),
5299 INSN_UID (insn), micro_operation_type_name[mopt]);
5300 print_inline_rtx (out, x, 2);
5301 fputc ('\n', out);
5304 /* Tell whether the CONCAT used to holds a VALUE and its location
5305 needs value resolution, i.e., an attempt of mapping the location
5306 back to other incoming values. */
5307 #define VAL_NEEDS_RESOLUTION(x) \
5308 (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
5309 /* Whether the location in the CONCAT is a tracked expression, that
5310 should also be handled like a MO_USE. */
5311 #define VAL_HOLDS_TRACK_EXPR(x) \
5312 (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
5313 /* Whether the location in the CONCAT should be handled like a MO_COPY
5314 as well. */
5315 #define VAL_EXPR_IS_COPIED(x) \
5316 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
5317 /* Whether the location in the CONCAT should be handled like a
5318 MO_CLOBBER as well. */
5319 #define VAL_EXPR_IS_CLOBBERED(x) \
5320 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
5322 /* All preserved VALUEs. */
5323 static VEC (rtx, heap) *preserved_values;
5325 /* Ensure VAL is preserved and remember it in a vector for vt_emit_notes. */
5327 static void
5328 preserve_value (cselib_val *val)
5330 cselib_preserve_value (val);
5331 VEC_safe_push (rtx, heap, preserved_values, val->val_rtx);
5334 /* Helper function for MO_VAL_LOC handling. Return non-zero if
5335 any rtxes not suitable for CONST use not replaced by VALUEs
5336 are discovered. */
5338 static int
5339 non_suitable_const (rtx *x, void *data ATTRIBUTE_UNUSED)
5341 if (*x == NULL_RTX)
5342 return 0;
5344 switch (GET_CODE (*x))
5346 case REG:
5347 case DEBUG_EXPR:
5348 case PC:
5349 case SCRATCH:
5350 case CC0:
5351 case ASM_INPUT:
5352 case ASM_OPERANDS:
5353 return 1;
5354 case MEM:
5355 return !MEM_READONLY_P (*x);
5356 default:
5357 return 0;
5361 /* Add uses (register and memory references) LOC which will be tracked
5362 to VTI (bb)->mos. INSN is instruction which the LOC is part of. */
5364 static int
5365 add_uses (rtx *ploc, void *data)
5367 rtx loc = *ploc;
5368 enum machine_mode mode = VOIDmode;
5369 struct count_use_info *cui = (struct count_use_info *)data;
5370 enum micro_operation_type type = use_type (loc, cui, &mode);
5372 if (type != MO_CLOBBER)
5374 basic_block bb = cui->bb;
5375 micro_operation mo;
5377 mo.type = type;
5378 mo.u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc;
5379 mo.insn = cui->insn;
5381 if (type == MO_VAL_LOC)
5383 rtx oloc = loc;
5384 rtx vloc = PAT_VAR_LOCATION_LOC (oloc);
5385 cselib_val *val;
5387 gcc_assert (cui->sets);
5389 if (MEM_P (vloc)
5390 && !REG_P (XEXP (vloc, 0))
5391 && !MEM_P (XEXP (vloc, 0)))
5393 rtx mloc = vloc;
5394 enum machine_mode address_mode = get_address_mode (mloc);
5395 cselib_val *val
5396 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5397 GET_MODE (mloc));
5399 if (val && !cselib_preserved_value_p (val))
5400 preserve_value (val);
5403 if (CONSTANT_P (vloc)
5404 && (GET_CODE (vloc) != CONST
5405 || for_each_rtx (&vloc, non_suitable_const, NULL)))
5406 /* For constants don't look up any value. */;
5407 else if (!VAR_LOC_UNKNOWN_P (vloc) && !unsuitable_loc (vloc)
5408 && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
5410 enum machine_mode mode2;
5411 enum micro_operation_type type2;
5412 rtx nloc = NULL;
5413 bool resolvable = REG_P (vloc) || MEM_P (vloc);
5415 if (resolvable)
5416 nloc = replace_expr_with_values (vloc);
5418 if (nloc)
5420 oloc = shallow_copy_rtx (oloc);
5421 PAT_VAR_LOCATION_LOC (oloc) = nloc;
5424 oloc = gen_rtx_CONCAT (mode, val->val_rtx, oloc);
5426 type2 = use_type (vloc, 0, &mode2);
5428 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5429 || type2 == MO_CLOBBER);
5431 if (type2 == MO_CLOBBER
5432 && !cselib_preserved_value_p (val))
5434 VAL_NEEDS_RESOLUTION (oloc) = resolvable;
5435 preserve_value (val);
5438 else if (!VAR_LOC_UNKNOWN_P (vloc))
5440 oloc = shallow_copy_rtx (oloc);
5441 PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC ();
5444 mo.u.loc = oloc;
5446 else if (type == MO_VAL_USE)
5448 enum machine_mode mode2 = VOIDmode;
5449 enum micro_operation_type type2;
5450 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5451 rtx vloc, oloc = loc, nloc;
5453 gcc_assert (cui->sets);
5455 if (MEM_P (oloc)
5456 && !REG_P (XEXP (oloc, 0))
5457 && !MEM_P (XEXP (oloc, 0)))
5459 rtx mloc = oloc;
5460 enum machine_mode address_mode = get_address_mode (mloc);
5461 cselib_val *val
5462 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5463 GET_MODE (mloc));
5465 if (val && !cselib_preserved_value_p (val))
5466 preserve_value (val);
5469 type2 = use_type (loc, 0, &mode2);
5471 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5472 || type2 == MO_CLOBBER);
5474 if (type2 == MO_USE)
5475 vloc = var_lowpart (mode2, loc);
5476 else
5477 vloc = oloc;
5479 /* The loc of a MO_VAL_USE may have two forms:
5481 (concat val src): val is at src, a value-based
5482 representation.
5484 (concat (concat val use) src): same as above, with use as
5485 the MO_USE tracked value, if it differs from src.
5489 gcc_checking_assert (REG_P (loc) || MEM_P (loc));
5490 nloc = replace_expr_with_values (loc);
5491 if (!nloc)
5492 nloc = oloc;
5494 if (vloc != nloc)
5495 oloc = gen_rtx_CONCAT (mode2, val->val_rtx, vloc);
5496 else
5497 oloc = val->val_rtx;
5499 mo.u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
5501 if (type2 == MO_USE)
5502 VAL_HOLDS_TRACK_EXPR (mo.u.loc) = 1;
5503 if (!cselib_preserved_value_p (val))
5505 VAL_NEEDS_RESOLUTION (mo.u.loc) = 1;
5506 preserve_value (val);
5509 else
5510 gcc_assert (type == MO_USE || type == MO_USE_NO_VAR);
5512 if (dump_file && (dump_flags & TDF_DETAILS))
5513 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
5514 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
5517 return 0;
5520 /* Helper function for finding all uses of REG/MEM in X in insn INSN. */
5522 static void
5523 add_uses_1 (rtx *x, void *cui)
5525 for_each_rtx (x, add_uses, cui);
5528 /* This is the value used during expansion of locations. We want it
5529 to be unbounded, so that variables expanded deep in a recursion
5530 nest are fully evaluated, so that their values are cached
5531 correctly. We avoid recursion cycles through other means, and we
5532 don't unshare RTL, so excess complexity is not a problem. */
5533 #define EXPR_DEPTH (INT_MAX)
5534 /* We use this to keep too-complex expressions from being emitted as
5535 location notes, and then to debug information. Users can trade
5536 compile time for ridiculously complex expressions, although they're
5537 seldom useful, and they may often have to be discarded as not
5538 representable anyway. */
5539 #define EXPR_USE_DEPTH (PARAM_VALUE (PARAM_MAX_VARTRACK_EXPR_DEPTH))
5541 /* Attempt to reverse the EXPR operation in the debug info and record
5542 it in the cselib table. Say for reg1 = reg2 + 6 even when reg2 is
5543 no longer live we can express its value as VAL - 6. */
5545 static void
5546 reverse_op (rtx val, const_rtx expr, rtx insn)
5548 rtx src, arg, ret;
5549 cselib_val *v;
5550 struct elt_loc_list *l;
5551 enum rtx_code code;
5553 if (GET_CODE (expr) != SET)
5554 return;
5556 if (!REG_P (SET_DEST (expr)) || GET_MODE (val) != GET_MODE (SET_DEST (expr)))
5557 return;
5559 src = SET_SRC (expr);
5560 switch (GET_CODE (src))
5562 case PLUS:
5563 case MINUS:
5564 case XOR:
5565 case NOT:
5566 case NEG:
5567 if (!REG_P (XEXP (src, 0)))
5568 return;
5569 break;
5570 case SIGN_EXTEND:
5571 case ZERO_EXTEND:
5572 if (!REG_P (XEXP (src, 0)) && !MEM_P (XEXP (src, 0)))
5573 return;
5574 break;
5575 default:
5576 return;
5579 if (!SCALAR_INT_MODE_P (GET_MODE (src)) || XEXP (src, 0) == cfa_base_rtx)
5580 return;
5582 v = cselib_lookup (XEXP (src, 0), GET_MODE (XEXP (src, 0)), 0, VOIDmode);
5583 if (!v || !cselib_preserved_value_p (v))
5584 return;
5586 /* Use canonical V to avoid creating multiple redundant expressions
5587 for different VALUES equivalent to V. */
5588 v = canonical_cselib_val (v);
5590 /* Adding a reverse op isn't useful if V already has an always valid
5591 location. Ignore ENTRY_VALUE, while it is always constant, we should
5592 prefer non-ENTRY_VALUE locations whenever possible. */
5593 for (l = v->locs; l; l = l->next)
5594 if (CONSTANT_P (l->loc)
5595 && (GET_CODE (l->loc) != CONST || !references_value_p (l->loc, 0)))
5596 return;
5598 switch (GET_CODE (src))
5600 case NOT:
5601 case NEG:
5602 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5603 return;
5604 ret = gen_rtx_fmt_e (GET_CODE (src), GET_MODE (val), val);
5605 break;
5606 case SIGN_EXTEND:
5607 case ZERO_EXTEND:
5608 ret = gen_lowpart_SUBREG (GET_MODE (v->val_rtx), val);
5609 break;
5610 case XOR:
5611 code = XOR;
5612 goto binary;
5613 case PLUS:
5614 code = MINUS;
5615 goto binary;
5616 case MINUS:
5617 code = PLUS;
5618 goto binary;
5619 binary:
5620 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5621 return;
5622 arg = XEXP (src, 1);
5623 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5625 arg = cselib_expand_value_rtx (arg, scratch_regs, 5);
5626 if (arg == NULL_RTX)
5627 return;
5628 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5629 return;
5631 ret = simplify_gen_binary (code, GET_MODE (val), val, arg);
5632 if (ret == val)
5633 /* Ensure ret isn't VALUE itself (which can happen e.g. for
5634 (plus (reg1) (reg2)) when reg2 is known to be 0), as that
5635 breaks a lot of routines during var-tracking. */
5636 ret = gen_rtx_fmt_ee (PLUS, GET_MODE (val), val, const0_rtx);
5637 break;
5638 default:
5639 gcc_unreachable ();
5642 cselib_add_permanent_equiv (v, ret, insn);
5645 /* Add stores (register and memory references) LOC which will be tracked
5646 to VTI (bb)->mos. EXPR is the RTL expression containing the store.
5647 CUIP->insn is instruction which the LOC is part of. */
5649 static void
5650 add_stores (rtx loc, const_rtx expr, void *cuip)
5652 enum machine_mode mode = VOIDmode, mode2;
5653 struct count_use_info *cui = (struct count_use_info *)cuip;
5654 basic_block bb = cui->bb;
5655 micro_operation mo;
5656 rtx oloc = loc, nloc, src = NULL;
5657 enum micro_operation_type type = use_type (loc, cui, &mode);
5658 bool track_p = false;
5659 cselib_val *v;
5660 bool resolve, preserve;
5662 if (type == MO_CLOBBER)
5663 return;
5665 mode2 = mode;
5667 if (REG_P (loc))
5669 gcc_assert (loc != cfa_base_rtx);
5670 if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
5671 || !(track_p = use_type (loc, NULL, &mode2) == MO_USE)
5672 || GET_CODE (expr) == CLOBBER)
5674 mo.type = MO_CLOBBER;
5675 mo.u.loc = loc;
5676 if (GET_CODE (expr) == SET
5677 && SET_DEST (expr) == loc
5678 && !unsuitable_loc (SET_SRC (expr))
5679 && find_use_val (loc, mode, cui))
5681 gcc_checking_assert (type == MO_VAL_SET);
5682 mo.u.loc = gen_rtx_SET (VOIDmode, loc, SET_SRC (expr));
5685 else
5687 if (GET_CODE (expr) == SET
5688 && SET_DEST (expr) == loc
5689 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
5690 src = var_lowpart (mode2, SET_SRC (expr));
5691 loc = var_lowpart (mode2, loc);
5693 if (src == NULL)
5695 mo.type = MO_SET;
5696 mo.u.loc = loc;
5698 else
5700 rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
5701 if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
5702 mo.type = MO_COPY;
5703 else
5704 mo.type = MO_SET;
5705 mo.u.loc = xexpr;
5708 mo.insn = cui->insn;
5710 else if (MEM_P (loc)
5711 && ((track_p = use_type (loc, NULL, &mode2) == MO_USE)
5712 || cui->sets))
5714 if (MEM_P (loc) && type == MO_VAL_SET
5715 && !REG_P (XEXP (loc, 0))
5716 && !MEM_P (XEXP (loc, 0)))
5718 rtx mloc = loc;
5719 enum machine_mode address_mode = get_address_mode (mloc);
5720 cselib_val *val = cselib_lookup (XEXP (mloc, 0),
5721 address_mode, 0,
5722 GET_MODE (mloc));
5724 if (val && !cselib_preserved_value_p (val))
5725 preserve_value (val);
5728 if (GET_CODE (expr) == CLOBBER || !track_p)
5730 mo.type = MO_CLOBBER;
5731 mo.u.loc = track_p ? var_lowpart (mode2, loc) : loc;
5733 else
5735 if (GET_CODE (expr) == SET
5736 && SET_DEST (expr) == loc
5737 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
5738 src = var_lowpart (mode2, SET_SRC (expr));
5739 loc = var_lowpart (mode2, loc);
5741 if (src == NULL)
5743 mo.type = MO_SET;
5744 mo.u.loc = loc;
5746 else
5748 rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
5749 if (same_variable_part_p (SET_SRC (xexpr),
5750 MEM_EXPR (loc),
5751 INT_MEM_OFFSET (loc)))
5752 mo.type = MO_COPY;
5753 else
5754 mo.type = MO_SET;
5755 mo.u.loc = xexpr;
5758 mo.insn = cui->insn;
5760 else
5761 return;
5763 if (type != MO_VAL_SET)
5764 goto log_and_return;
5766 v = find_use_val (oloc, mode, cui);
5768 if (!v)
5769 goto log_and_return;
5771 resolve = preserve = !cselib_preserved_value_p (v);
5773 nloc = replace_expr_with_values (oloc);
5774 if (nloc)
5775 oloc = nloc;
5777 if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC)
5779 cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0, VOIDmode);
5781 gcc_assert (oval != v);
5782 gcc_assert (REG_P (oloc) || MEM_P (oloc));
5784 if (oval && !cselib_preserved_value_p (oval))
5786 micro_operation moa;
5788 preserve_value (oval);
5790 moa.type = MO_VAL_USE;
5791 moa.u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc);
5792 VAL_NEEDS_RESOLUTION (moa.u.loc) = 1;
5793 moa.insn = cui->insn;
5795 if (dump_file && (dump_flags & TDF_DETAILS))
5796 log_op_type (moa.u.loc, cui->bb, cui->insn,
5797 moa.type, dump_file);
5798 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &moa);
5801 resolve = false;
5803 else if (resolve && GET_CODE (mo.u.loc) == SET)
5805 if (REG_P (SET_SRC (expr)) || MEM_P (SET_SRC (expr)))
5806 nloc = replace_expr_with_values (SET_SRC (expr));
5807 else
5808 nloc = NULL_RTX;
5810 /* Avoid the mode mismatch between oexpr and expr. */
5811 if (!nloc && mode != mode2)
5813 nloc = SET_SRC (expr);
5814 gcc_assert (oloc == SET_DEST (expr));
5817 if (nloc && nloc != SET_SRC (mo.u.loc))
5818 oloc = gen_rtx_SET (GET_MODE (mo.u.loc), oloc, nloc);
5819 else
5821 if (oloc == SET_DEST (mo.u.loc))
5822 /* No point in duplicating. */
5823 oloc = mo.u.loc;
5824 if (!REG_P (SET_SRC (mo.u.loc)))
5825 resolve = false;
5828 else if (!resolve)
5830 if (GET_CODE (mo.u.loc) == SET
5831 && oloc == SET_DEST (mo.u.loc))
5832 /* No point in duplicating. */
5833 oloc = mo.u.loc;
5835 else
5836 resolve = false;
5838 loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc);
5840 if (mo.u.loc != oloc)
5841 loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, mo.u.loc);
5843 /* The loc of a MO_VAL_SET may have various forms:
5845 (concat val dst): dst now holds val
5847 (concat val (set dst src)): dst now holds val, copied from src
5849 (concat (concat val dstv) dst): dst now holds val; dstv is dst
5850 after replacing mems and non-top-level regs with values.
5852 (concat (concat val dstv) (set dst src)): dst now holds val,
5853 copied from src. dstv is a value-based representation of dst, if
5854 it differs from dst. If resolution is needed, src is a REG, and
5855 its mode is the same as that of val.
5857 (concat (concat val (set dstv srcv)) (set dst src)): src
5858 copied to dst, holding val. dstv and srcv are value-based
5859 representations of dst and src, respectively.
5863 if (GET_CODE (PATTERN (cui->insn)) != COND_EXEC)
5864 reverse_op (v->val_rtx, expr, cui->insn);
5866 mo.u.loc = loc;
5868 if (track_p)
5869 VAL_HOLDS_TRACK_EXPR (loc) = 1;
5870 if (preserve)
5872 VAL_NEEDS_RESOLUTION (loc) = resolve;
5873 preserve_value (v);
5875 if (mo.type == MO_CLOBBER)
5876 VAL_EXPR_IS_CLOBBERED (loc) = 1;
5877 if (mo.type == MO_COPY)
5878 VAL_EXPR_IS_COPIED (loc) = 1;
5880 mo.type = MO_VAL_SET;
5882 log_and_return:
5883 if (dump_file && (dump_flags & TDF_DETAILS))
5884 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
5885 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
5888 /* Arguments to the call. */
5889 static rtx call_arguments;
5891 /* Compute call_arguments. */
5893 static void
5894 prepare_call_arguments (basic_block bb, rtx insn)
5896 rtx link, x;
5897 rtx prev, cur, next;
5898 rtx call = PATTERN (insn);
5899 rtx this_arg = NULL_RTX;
5900 tree type = NULL_TREE, t, fndecl = NULL_TREE;
5901 tree obj_type_ref = NULL_TREE;
5902 CUMULATIVE_ARGS args_so_far_v;
5903 cumulative_args_t args_so_far;
5905 memset (&args_so_far_v, 0, sizeof (args_so_far_v));
5906 args_so_far = pack_cumulative_args (&args_so_far_v);
5907 if (GET_CODE (call) == PARALLEL)
5908 call = XVECEXP (call, 0, 0);
5909 if (GET_CODE (call) == SET)
5910 call = SET_SRC (call);
5911 if (GET_CODE (call) == CALL && MEM_P (XEXP (call, 0)))
5913 if (GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
5915 rtx symbol = XEXP (XEXP (call, 0), 0);
5916 if (SYMBOL_REF_DECL (symbol))
5917 fndecl = SYMBOL_REF_DECL (symbol);
5919 if (fndecl == NULL_TREE)
5920 fndecl = MEM_EXPR (XEXP (call, 0));
5921 if (fndecl
5922 && TREE_CODE (TREE_TYPE (fndecl)) != FUNCTION_TYPE
5923 && TREE_CODE (TREE_TYPE (fndecl)) != METHOD_TYPE)
5924 fndecl = NULL_TREE;
5925 if (fndecl && TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
5926 type = TREE_TYPE (fndecl);
5927 if (fndecl && TREE_CODE (fndecl) != FUNCTION_DECL)
5929 if (TREE_CODE (fndecl) == INDIRECT_REF
5930 && TREE_CODE (TREE_OPERAND (fndecl, 0)) == OBJ_TYPE_REF)
5931 obj_type_ref = TREE_OPERAND (fndecl, 0);
5932 fndecl = NULL_TREE;
5934 if (type)
5936 for (t = TYPE_ARG_TYPES (type); t && t != void_list_node;
5937 t = TREE_CHAIN (t))
5938 if (TREE_CODE (TREE_VALUE (t)) == REFERENCE_TYPE
5939 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_VALUE (t))))
5940 break;
5941 if ((t == NULL || t == void_list_node) && obj_type_ref == NULL_TREE)
5942 type = NULL;
5943 else
5945 int nargs ATTRIBUTE_UNUSED = list_length (TYPE_ARG_TYPES (type));
5946 link = CALL_INSN_FUNCTION_USAGE (insn);
5947 #ifndef PCC_STATIC_STRUCT_RETURN
5948 if (aggregate_value_p (TREE_TYPE (type), type)
5949 && targetm.calls.struct_value_rtx (type, 0) == 0)
5951 tree struct_addr = build_pointer_type (TREE_TYPE (type));
5952 enum machine_mode mode = TYPE_MODE (struct_addr);
5953 rtx reg;
5954 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
5955 nargs + 1);
5956 reg = targetm.calls.function_arg (args_so_far, mode,
5957 struct_addr, true);
5958 targetm.calls.function_arg_advance (args_so_far, mode,
5959 struct_addr, true);
5960 if (reg == NULL_RTX)
5962 for (; link; link = XEXP (link, 1))
5963 if (GET_CODE (XEXP (link, 0)) == USE
5964 && MEM_P (XEXP (XEXP (link, 0), 0)))
5966 link = XEXP (link, 1);
5967 break;
5971 else
5972 #endif
5973 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
5974 nargs);
5975 if (obj_type_ref && TYPE_ARG_TYPES (type) != void_list_node)
5977 enum machine_mode mode;
5978 t = TYPE_ARG_TYPES (type);
5979 mode = TYPE_MODE (TREE_VALUE (t));
5980 this_arg = targetm.calls.function_arg (args_so_far, mode,
5981 TREE_VALUE (t), true);
5982 if (this_arg && !REG_P (this_arg))
5983 this_arg = NULL_RTX;
5984 else if (this_arg == NULL_RTX)
5986 for (; link; link = XEXP (link, 1))
5987 if (GET_CODE (XEXP (link, 0)) == USE
5988 && MEM_P (XEXP (XEXP (link, 0), 0)))
5990 this_arg = XEXP (XEXP (link, 0), 0);
5991 break;
5998 t = type ? TYPE_ARG_TYPES (type) : NULL_TREE;
6000 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
6001 if (GET_CODE (XEXP (link, 0)) == USE)
6003 rtx item = NULL_RTX;
6004 x = XEXP (XEXP (link, 0), 0);
6005 if (GET_MODE (link) == VOIDmode
6006 || GET_MODE (link) == BLKmode
6007 || (GET_MODE (link) != GET_MODE (x)
6008 && (GET_MODE_CLASS (GET_MODE (link)) != MODE_INT
6009 || GET_MODE_CLASS (GET_MODE (x)) != MODE_INT)))
6010 /* Can't do anything for these, if the original type mode
6011 isn't known or can't be converted. */;
6012 else if (REG_P (x))
6014 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
6015 if (val && cselib_preserved_value_p (val))
6016 item = val->val_rtx;
6017 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT)
6019 enum machine_mode mode = GET_MODE (x);
6021 while ((mode = GET_MODE_WIDER_MODE (mode)) != VOIDmode
6022 && GET_MODE_BITSIZE (mode) <= BITS_PER_WORD)
6024 rtx reg = simplify_subreg (mode, x, GET_MODE (x), 0);
6026 if (reg == NULL_RTX || !REG_P (reg))
6027 continue;
6028 val = cselib_lookup (reg, mode, 0, VOIDmode);
6029 if (val && cselib_preserved_value_p (val))
6031 item = val->val_rtx;
6032 break;
6037 else if (MEM_P (x))
6039 rtx mem = x;
6040 cselib_val *val;
6042 if (!frame_pointer_needed)
6044 struct adjust_mem_data amd;
6045 amd.mem_mode = VOIDmode;
6046 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
6047 amd.side_effects = NULL_RTX;
6048 amd.store = true;
6049 mem = simplify_replace_fn_rtx (mem, NULL_RTX, adjust_mems,
6050 &amd);
6051 gcc_assert (amd.side_effects == NULL_RTX);
6053 val = cselib_lookup (mem, GET_MODE (mem), 0, VOIDmode);
6054 if (val && cselib_preserved_value_p (val))
6055 item = val->val_rtx;
6056 else if (GET_MODE_CLASS (GET_MODE (mem)) != MODE_INT)
6058 /* For non-integer stack argument see also if they weren't
6059 initialized by integers. */
6060 enum machine_mode imode = int_mode_for_mode (GET_MODE (mem));
6061 if (imode != GET_MODE (mem) && imode != BLKmode)
6063 val = cselib_lookup (adjust_address_nv (mem, imode, 0),
6064 imode, 0, VOIDmode);
6065 if (val && cselib_preserved_value_p (val))
6066 item = lowpart_subreg (GET_MODE (x), val->val_rtx,
6067 imode);
6071 if (item)
6073 rtx x2 = x;
6074 if (GET_MODE (item) != GET_MODE (link))
6075 item = lowpart_subreg (GET_MODE (link), item, GET_MODE (item));
6076 if (GET_MODE (x2) != GET_MODE (link))
6077 x2 = lowpart_subreg (GET_MODE (link), x2, GET_MODE (x2));
6078 item = gen_rtx_CONCAT (GET_MODE (link), x2, item);
6079 call_arguments
6080 = gen_rtx_EXPR_LIST (VOIDmode, item, call_arguments);
6082 if (t && t != void_list_node)
6084 tree argtype = TREE_VALUE (t);
6085 enum machine_mode mode = TYPE_MODE (argtype);
6086 rtx reg;
6087 if (pass_by_reference (&args_so_far_v, mode, argtype, true))
6089 argtype = build_pointer_type (argtype);
6090 mode = TYPE_MODE (argtype);
6092 reg = targetm.calls.function_arg (args_so_far, mode,
6093 argtype, true);
6094 if (TREE_CODE (argtype) == REFERENCE_TYPE
6095 && INTEGRAL_TYPE_P (TREE_TYPE (argtype))
6096 && reg
6097 && REG_P (reg)
6098 && GET_MODE (reg) == mode
6099 && GET_MODE_CLASS (mode) == MODE_INT
6100 && REG_P (x)
6101 && REGNO (x) == REGNO (reg)
6102 && GET_MODE (x) == mode
6103 && item)
6105 enum machine_mode indmode
6106 = TYPE_MODE (TREE_TYPE (argtype));
6107 rtx mem = gen_rtx_MEM (indmode, x);
6108 cselib_val *val = cselib_lookup (mem, indmode, 0, VOIDmode);
6109 if (val && cselib_preserved_value_p (val))
6111 item = gen_rtx_CONCAT (indmode, mem, val->val_rtx);
6112 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
6113 call_arguments);
6115 else
6117 struct elt_loc_list *l;
6118 tree initial;
6120 /* Try harder, when passing address of a constant
6121 pool integer it can be easily read back. */
6122 item = XEXP (item, 1);
6123 if (GET_CODE (item) == SUBREG)
6124 item = SUBREG_REG (item);
6125 gcc_assert (GET_CODE (item) == VALUE);
6126 val = CSELIB_VAL_PTR (item);
6127 for (l = val->locs; l; l = l->next)
6128 if (GET_CODE (l->loc) == SYMBOL_REF
6129 && TREE_CONSTANT_POOL_ADDRESS_P (l->loc)
6130 && SYMBOL_REF_DECL (l->loc)
6131 && DECL_INITIAL (SYMBOL_REF_DECL (l->loc)))
6133 initial = DECL_INITIAL (SYMBOL_REF_DECL (l->loc));
6134 if (host_integerp (initial, 0))
6136 item = GEN_INT (tree_low_cst (initial, 0));
6137 item = gen_rtx_CONCAT (indmode, mem, item);
6138 call_arguments
6139 = gen_rtx_EXPR_LIST (VOIDmode, item,
6140 call_arguments);
6142 break;
6146 targetm.calls.function_arg_advance (args_so_far, mode,
6147 argtype, true);
6148 t = TREE_CHAIN (t);
6152 /* Add debug arguments. */
6153 if (fndecl
6154 && TREE_CODE (fndecl) == FUNCTION_DECL
6155 && DECL_HAS_DEBUG_ARGS_P (fndecl))
6157 VEC(tree, gc) **debug_args = decl_debug_args_lookup (fndecl);
6158 if (debug_args)
6160 unsigned int ix;
6161 tree param;
6162 for (ix = 0; VEC_iterate (tree, *debug_args, ix, param); ix += 2)
6164 rtx item;
6165 tree dtemp = VEC_index (tree, *debug_args, ix + 1);
6166 enum machine_mode mode = DECL_MODE (dtemp);
6167 item = gen_rtx_DEBUG_PARAMETER_REF (mode, param);
6168 item = gen_rtx_CONCAT (mode, item, DECL_RTL_KNOWN_SET (dtemp));
6169 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
6170 call_arguments);
6175 /* Reverse call_arguments chain. */
6176 prev = NULL_RTX;
6177 for (cur = call_arguments; cur; cur = next)
6179 next = XEXP (cur, 1);
6180 XEXP (cur, 1) = prev;
6181 prev = cur;
6183 call_arguments = prev;
6185 x = PATTERN (insn);
6186 if (GET_CODE (x) == PARALLEL)
6187 x = XVECEXP (x, 0, 0);
6188 if (GET_CODE (x) == SET)
6189 x = SET_SRC (x);
6190 if (GET_CODE (x) == CALL && MEM_P (XEXP (x, 0)))
6192 x = XEXP (XEXP (x, 0), 0);
6193 if (GET_CODE (x) == SYMBOL_REF)
6194 /* Don't record anything. */;
6195 else if (CONSTANT_P (x))
6197 x = gen_rtx_CONCAT (GET_MODE (x) == VOIDmode ? Pmode : GET_MODE (x),
6198 pc_rtx, x);
6199 call_arguments
6200 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6202 else
6204 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
6205 if (val && cselib_preserved_value_p (val))
6207 x = gen_rtx_CONCAT (GET_MODE (x), pc_rtx, val->val_rtx);
6208 call_arguments
6209 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6213 if (this_arg)
6215 enum machine_mode mode
6216 = TYPE_MODE (TREE_TYPE (OBJ_TYPE_REF_EXPR (obj_type_ref)));
6217 rtx clobbered = gen_rtx_MEM (mode, this_arg);
6218 HOST_WIDE_INT token
6219 = tree_low_cst (OBJ_TYPE_REF_TOKEN (obj_type_ref), 0);
6220 if (token)
6221 clobbered = plus_constant (mode, clobbered,
6222 token * GET_MODE_SIZE (mode));
6223 clobbered = gen_rtx_MEM (mode, clobbered);
6224 x = gen_rtx_CONCAT (mode, gen_rtx_CLOBBER (VOIDmode, pc_rtx), clobbered);
6225 call_arguments
6226 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6230 /* Callback for cselib_record_sets_hook, that records as micro
6231 operations uses and stores in an insn after cselib_record_sets has
6232 analyzed the sets in an insn, but before it modifies the stored
6233 values in the internal tables, unless cselib_record_sets doesn't
6234 call it directly (perhaps because we're not doing cselib in the
6235 first place, in which case sets and n_sets will be 0). */
6237 static void
6238 add_with_sets (rtx insn, struct cselib_set *sets, int n_sets)
6240 basic_block bb = BLOCK_FOR_INSN (insn);
6241 int n1, n2;
6242 struct count_use_info cui;
6243 micro_operation *mos;
6245 cselib_hook_called = true;
6247 cui.insn = insn;
6248 cui.bb = bb;
6249 cui.sets = sets;
6250 cui.n_sets = n_sets;
6252 n1 = VEC_length (micro_operation, VTI (bb)->mos);
6253 cui.store_p = false;
6254 note_uses (&PATTERN (insn), add_uses_1, &cui);
6255 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
6256 mos = VEC_address (micro_operation, VTI (bb)->mos);
6258 /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
6259 MO_VAL_LOC last. */
6260 while (n1 < n2)
6262 while (n1 < n2 && mos[n1].type == MO_USE)
6263 n1++;
6264 while (n1 < n2 && mos[n2].type != MO_USE)
6265 n2--;
6266 if (n1 < n2)
6268 micro_operation sw;
6270 sw = mos[n1];
6271 mos[n1] = mos[n2];
6272 mos[n2] = sw;
6276 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
6277 while (n1 < n2)
6279 while (n1 < n2 && mos[n1].type != MO_VAL_LOC)
6280 n1++;
6281 while (n1 < n2 && mos[n2].type == MO_VAL_LOC)
6282 n2--;
6283 if (n1 < n2)
6285 micro_operation sw;
6287 sw = mos[n1];
6288 mos[n1] = mos[n2];
6289 mos[n2] = sw;
6293 if (CALL_P (insn))
6295 micro_operation mo;
6297 mo.type = MO_CALL;
6298 mo.insn = insn;
6299 mo.u.loc = call_arguments;
6300 call_arguments = NULL_RTX;
6302 if (dump_file && (dump_flags & TDF_DETAILS))
6303 log_op_type (PATTERN (insn), bb, insn, mo.type, dump_file);
6304 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
6307 n1 = VEC_length (micro_operation, VTI (bb)->mos);
6308 /* This will record NEXT_INSN (insn), such that we can
6309 insert notes before it without worrying about any
6310 notes that MO_USEs might emit after the insn. */
6311 cui.store_p = true;
6312 note_stores (PATTERN (insn), add_stores, &cui);
6313 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
6314 mos = VEC_address (micro_operation, VTI (bb)->mos);
6316 /* Order the MO_VAL_USEs first (note_stores does nothing
6317 on DEBUG_INSNs, so there are no MO_VAL_LOCs from this
6318 insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET. */
6319 while (n1 < n2)
6321 while (n1 < n2 && mos[n1].type == MO_VAL_USE)
6322 n1++;
6323 while (n1 < n2 && mos[n2].type != MO_VAL_USE)
6324 n2--;
6325 if (n1 < n2)
6327 micro_operation sw;
6329 sw = mos[n1];
6330 mos[n1] = mos[n2];
6331 mos[n2] = sw;
6335 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
6336 while (n1 < n2)
6338 while (n1 < n2 && mos[n1].type == MO_CLOBBER)
6339 n1++;
6340 while (n1 < n2 && mos[n2].type != MO_CLOBBER)
6341 n2--;
6342 if (n1 < n2)
6344 micro_operation sw;
6346 sw = mos[n1];
6347 mos[n1] = mos[n2];
6348 mos[n2] = sw;
6353 static enum var_init_status
6354 find_src_status (dataflow_set *in, rtx src)
6356 tree decl = NULL_TREE;
6357 enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
6359 if (! flag_var_tracking_uninit)
6360 status = VAR_INIT_STATUS_INITIALIZED;
6362 if (src && REG_P (src))
6363 decl = var_debug_decl (REG_EXPR (src));
6364 else if (src && MEM_P (src))
6365 decl = var_debug_decl (MEM_EXPR (src));
6367 if (src && decl)
6368 status = get_init_value (in, src, dv_from_decl (decl));
6370 return status;
6373 /* SRC is the source of an assignment. Use SET to try to find what
6374 was ultimately assigned to SRC. Return that value if known,
6375 otherwise return SRC itself. */
6377 static rtx
6378 find_src_set_src (dataflow_set *set, rtx src)
6380 tree decl = NULL_TREE; /* The variable being copied around. */
6381 rtx set_src = NULL_RTX; /* The value for "decl" stored in "src". */
6382 variable var;
6383 location_chain nextp;
6384 int i;
6385 bool found;
6387 if (src && REG_P (src))
6388 decl = var_debug_decl (REG_EXPR (src));
6389 else if (src && MEM_P (src))
6390 decl = var_debug_decl (MEM_EXPR (src));
6392 if (src && decl)
6394 decl_or_value dv = dv_from_decl (decl);
6396 var = shared_hash_find (set->vars, dv);
6397 if (var)
6399 found = false;
6400 for (i = 0; i < var->n_var_parts && !found; i++)
6401 for (nextp = var->var_part[i].loc_chain; nextp && !found;
6402 nextp = nextp->next)
6403 if (rtx_equal_p (nextp->loc, src))
6405 set_src = nextp->set_src;
6406 found = true;
6412 return set_src;
6415 /* Compute the changes of variable locations in the basic block BB. */
6417 static bool
6418 compute_bb_dataflow (basic_block bb)
6420 unsigned int i;
6421 micro_operation *mo;
6422 bool changed;
6423 dataflow_set old_out;
6424 dataflow_set *in = &VTI (bb)->in;
6425 dataflow_set *out = &VTI (bb)->out;
6427 dataflow_set_init (&old_out);
6428 dataflow_set_copy (&old_out, out);
6429 dataflow_set_copy (out, in);
6431 FOR_EACH_VEC_ELT (micro_operation, VTI (bb)->mos, i, mo)
6433 rtx insn = mo->insn;
6435 switch (mo->type)
6437 case MO_CALL:
6438 dataflow_set_clear_at_call (out);
6439 break;
6441 case MO_USE:
6443 rtx loc = mo->u.loc;
6445 if (REG_P (loc))
6446 var_reg_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6447 else if (MEM_P (loc))
6448 var_mem_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6450 break;
6452 case MO_VAL_LOC:
6454 rtx loc = mo->u.loc;
6455 rtx val, vloc;
6456 tree var;
6458 if (GET_CODE (loc) == CONCAT)
6460 val = XEXP (loc, 0);
6461 vloc = XEXP (loc, 1);
6463 else
6465 val = NULL_RTX;
6466 vloc = loc;
6469 var = PAT_VAR_LOCATION_DECL (vloc);
6471 clobber_variable_part (out, NULL_RTX,
6472 dv_from_decl (var), 0, NULL_RTX);
6473 if (val)
6475 if (VAL_NEEDS_RESOLUTION (loc))
6476 val_resolve (out, val, PAT_VAR_LOCATION_LOC (vloc), insn);
6477 set_variable_part (out, val, dv_from_decl (var), 0,
6478 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6479 INSERT);
6481 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
6482 set_variable_part (out, PAT_VAR_LOCATION_LOC (vloc),
6483 dv_from_decl (var), 0,
6484 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6485 INSERT);
6487 break;
6489 case MO_VAL_USE:
6491 rtx loc = mo->u.loc;
6492 rtx val, vloc, uloc;
6494 vloc = uloc = XEXP (loc, 1);
6495 val = XEXP (loc, 0);
6497 if (GET_CODE (val) == CONCAT)
6499 uloc = XEXP (val, 1);
6500 val = XEXP (val, 0);
6503 if (VAL_NEEDS_RESOLUTION (loc))
6504 val_resolve (out, val, vloc, insn);
6505 else
6506 val_store (out, val, uloc, insn, false);
6508 if (VAL_HOLDS_TRACK_EXPR (loc))
6510 if (GET_CODE (uloc) == REG)
6511 var_reg_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6512 NULL);
6513 else if (GET_CODE (uloc) == MEM)
6514 var_mem_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6515 NULL);
6518 break;
6520 case MO_VAL_SET:
6522 rtx loc = mo->u.loc;
6523 rtx val, vloc, uloc;
6524 rtx dstv, srcv;
6526 vloc = loc;
6527 uloc = XEXP (vloc, 1);
6528 val = XEXP (vloc, 0);
6529 vloc = uloc;
6531 if (GET_CODE (uloc) == SET)
6533 dstv = SET_DEST (uloc);
6534 srcv = SET_SRC (uloc);
6536 else
6538 dstv = uloc;
6539 srcv = NULL;
6542 if (GET_CODE (val) == CONCAT)
6544 dstv = vloc = XEXP (val, 1);
6545 val = XEXP (val, 0);
6548 if (GET_CODE (vloc) == SET)
6550 srcv = SET_SRC (vloc);
6552 gcc_assert (val != srcv);
6553 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
6555 dstv = vloc = SET_DEST (vloc);
6557 if (VAL_NEEDS_RESOLUTION (loc))
6558 val_resolve (out, val, srcv, insn);
6560 else if (VAL_NEEDS_RESOLUTION (loc))
6562 gcc_assert (GET_CODE (uloc) == SET
6563 && GET_CODE (SET_SRC (uloc)) == REG);
6564 val_resolve (out, val, SET_SRC (uloc), insn);
6567 if (VAL_HOLDS_TRACK_EXPR (loc))
6569 if (VAL_EXPR_IS_CLOBBERED (loc))
6571 if (REG_P (uloc))
6572 var_reg_delete (out, uloc, true);
6573 else if (MEM_P (uloc))
6575 gcc_assert (MEM_P (dstv));
6576 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
6577 var_mem_delete (out, dstv, true);
6580 else
6582 bool copied_p = VAL_EXPR_IS_COPIED (loc);
6583 rtx src = NULL, dst = uloc;
6584 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
6586 if (GET_CODE (uloc) == SET)
6588 src = SET_SRC (uloc);
6589 dst = SET_DEST (uloc);
6592 if (copied_p)
6594 if (flag_var_tracking_uninit)
6596 status = find_src_status (in, src);
6598 if (status == VAR_INIT_STATUS_UNKNOWN)
6599 status = find_src_status (out, src);
6602 src = find_src_set_src (in, src);
6605 if (REG_P (dst))
6606 var_reg_delete_and_set (out, dst, !copied_p,
6607 status, srcv);
6608 else if (MEM_P (dst))
6610 gcc_assert (MEM_P (dstv));
6611 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
6612 var_mem_delete_and_set (out, dstv, !copied_p,
6613 status, srcv);
6617 else if (REG_P (uloc))
6618 var_regno_delete (out, REGNO (uloc));
6619 else if (MEM_P (uloc))
6620 clobber_overlapping_mems (out, uloc);
6622 val_store (out, val, dstv, insn, true);
6624 break;
6626 case MO_SET:
6628 rtx loc = mo->u.loc;
6629 rtx set_src = NULL;
6631 if (GET_CODE (loc) == SET)
6633 set_src = SET_SRC (loc);
6634 loc = SET_DEST (loc);
6637 if (REG_P (loc))
6638 var_reg_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6639 set_src);
6640 else if (MEM_P (loc))
6641 var_mem_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6642 set_src);
6644 break;
6646 case MO_COPY:
6648 rtx loc = mo->u.loc;
6649 enum var_init_status src_status;
6650 rtx set_src = NULL;
6652 if (GET_CODE (loc) == SET)
6654 set_src = SET_SRC (loc);
6655 loc = SET_DEST (loc);
6658 if (! flag_var_tracking_uninit)
6659 src_status = VAR_INIT_STATUS_INITIALIZED;
6660 else
6662 src_status = find_src_status (in, set_src);
6664 if (src_status == VAR_INIT_STATUS_UNKNOWN)
6665 src_status = find_src_status (out, set_src);
6668 set_src = find_src_set_src (in, set_src);
6670 if (REG_P (loc))
6671 var_reg_delete_and_set (out, loc, false, src_status, set_src);
6672 else if (MEM_P (loc))
6673 var_mem_delete_and_set (out, loc, false, src_status, set_src);
6675 break;
6677 case MO_USE_NO_VAR:
6679 rtx loc = mo->u.loc;
6681 if (REG_P (loc))
6682 var_reg_delete (out, loc, false);
6683 else if (MEM_P (loc))
6684 var_mem_delete (out, loc, false);
6686 break;
6688 case MO_CLOBBER:
6690 rtx loc = mo->u.loc;
6692 if (REG_P (loc))
6693 var_reg_delete (out, loc, true);
6694 else if (MEM_P (loc))
6695 var_mem_delete (out, loc, true);
6697 break;
6699 case MO_ADJUST:
6700 out->stack_adjust += mo->u.adjust;
6701 break;
6705 if (MAY_HAVE_DEBUG_INSNS)
6707 dataflow_set_equiv_regs (out);
6708 htab_traverse (shared_hash_htab (out->vars), canonicalize_values_mark,
6709 out);
6710 htab_traverse (shared_hash_htab (out->vars), canonicalize_values_star,
6711 out);
6712 #if ENABLE_CHECKING
6713 htab_traverse (shared_hash_htab (out->vars),
6714 canonicalize_loc_order_check, out);
6715 #endif
6717 changed = dataflow_set_different (&old_out, out);
6718 dataflow_set_destroy (&old_out);
6719 return changed;
6722 /* Find the locations of variables in the whole function. */
6724 static bool
6725 vt_find_locations (void)
6727 fibheap_t worklist, pending, fibheap_swap;
6728 sbitmap visited, in_worklist, in_pending, sbitmap_swap;
6729 basic_block bb;
6730 edge e;
6731 int *bb_order;
6732 int *rc_order;
6733 int i;
6734 int htabsz = 0;
6735 int htabmax = PARAM_VALUE (PARAM_MAX_VARTRACK_SIZE);
6736 bool success = true;
6738 timevar_push (TV_VAR_TRACKING_DATAFLOW);
6739 /* Compute reverse completion order of depth first search of the CFG
6740 so that the data-flow runs faster. */
6741 rc_order = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
6742 bb_order = XNEWVEC (int, last_basic_block);
6743 pre_and_rev_post_order_compute (NULL, rc_order, false);
6744 for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
6745 bb_order[rc_order[i]] = i;
6746 free (rc_order);
6748 worklist = fibheap_new ();
6749 pending = fibheap_new ();
6750 visited = sbitmap_alloc (last_basic_block);
6751 in_worklist = sbitmap_alloc (last_basic_block);
6752 in_pending = sbitmap_alloc (last_basic_block);
6753 sbitmap_zero (in_worklist);
6755 FOR_EACH_BB (bb)
6756 fibheap_insert (pending, bb_order[bb->index], bb);
6757 sbitmap_ones (in_pending);
6759 while (success && !fibheap_empty (pending))
6761 fibheap_swap = pending;
6762 pending = worklist;
6763 worklist = fibheap_swap;
6764 sbitmap_swap = in_pending;
6765 in_pending = in_worklist;
6766 in_worklist = sbitmap_swap;
6768 sbitmap_zero (visited);
6770 while (!fibheap_empty (worklist))
6772 bb = (basic_block) fibheap_extract_min (worklist);
6773 RESET_BIT (in_worklist, bb->index);
6774 gcc_assert (!TEST_BIT (visited, bb->index));
6775 if (!TEST_BIT (visited, bb->index))
6777 bool changed;
6778 edge_iterator ei;
6779 int oldinsz, oldoutsz;
6781 SET_BIT (visited, bb->index);
6783 if (VTI (bb)->in.vars)
6785 htabsz
6786 -= (htab_size (shared_hash_htab (VTI (bb)->in.vars))
6787 + htab_size (shared_hash_htab (VTI (bb)->out.vars)));
6788 oldinsz
6789 = htab_elements (shared_hash_htab (VTI (bb)->in.vars));
6790 oldoutsz
6791 = htab_elements (shared_hash_htab (VTI (bb)->out.vars));
6793 else
6794 oldinsz = oldoutsz = 0;
6796 if (MAY_HAVE_DEBUG_INSNS)
6798 dataflow_set *in = &VTI (bb)->in, *first_out = NULL;
6799 bool first = true, adjust = false;
6801 /* Calculate the IN set as the intersection of
6802 predecessor OUT sets. */
6804 dataflow_set_clear (in);
6805 dst_can_be_shared = true;
6807 FOR_EACH_EDGE (e, ei, bb->preds)
6808 if (!VTI (e->src)->flooded)
6809 gcc_assert (bb_order[bb->index]
6810 <= bb_order[e->src->index]);
6811 else if (first)
6813 dataflow_set_copy (in, &VTI (e->src)->out);
6814 first_out = &VTI (e->src)->out;
6815 first = false;
6817 else
6819 dataflow_set_merge (in, &VTI (e->src)->out);
6820 adjust = true;
6823 if (adjust)
6825 dataflow_post_merge_adjust (in, &VTI (bb)->permp);
6826 #if ENABLE_CHECKING
6827 /* Merge and merge_adjust should keep entries in
6828 canonical order. */
6829 htab_traverse (shared_hash_htab (in->vars),
6830 canonicalize_loc_order_check,
6831 in);
6832 #endif
6833 if (dst_can_be_shared)
6835 shared_hash_destroy (in->vars);
6836 in->vars = shared_hash_copy (first_out->vars);
6840 VTI (bb)->flooded = true;
6842 else
6844 /* Calculate the IN set as union of predecessor OUT sets. */
6845 dataflow_set_clear (&VTI (bb)->in);
6846 FOR_EACH_EDGE (e, ei, bb->preds)
6847 dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out);
6850 changed = compute_bb_dataflow (bb);
6851 htabsz += (htab_size (shared_hash_htab (VTI (bb)->in.vars))
6852 + htab_size (shared_hash_htab (VTI (bb)->out.vars)));
6854 if (htabmax && htabsz > htabmax)
6856 if (MAY_HAVE_DEBUG_INSNS)
6857 inform (DECL_SOURCE_LOCATION (cfun->decl),
6858 "variable tracking size limit exceeded with "
6859 "-fvar-tracking-assignments, retrying without");
6860 else
6861 inform (DECL_SOURCE_LOCATION (cfun->decl),
6862 "variable tracking size limit exceeded");
6863 success = false;
6864 break;
6867 if (changed)
6869 FOR_EACH_EDGE (e, ei, bb->succs)
6871 if (e->dest == EXIT_BLOCK_PTR)
6872 continue;
6874 if (TEST_BIT (visited, e->dest->index))
6876 if (!TEST_BIT (in_pending, e->dest->index))
6878 /* Send E->DEST to next round. */
6879 SET_BIT (in_pending, e->dest->index);
6880 fibheap_insert (pending,
6881 bb_order[e->dest->index],
6882 e->dest);
6885 else if (!TEST_BIT (in_worklist, e->dest->index))
6887 /* Add E->DEST to current round. */
6888 SET_BIT (in_worklist, e->dest->index);
6889 fibheap_insert (worklist, bb_order[e->dest->index],
6890 e->dest);
6895 if (dump_file)
6896 fprintf (dump_file,
6897 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
6898 bb->index,
6899 (int)htab_elements (shared_hash_htab (VTI (bb)->in.vars)),
6900 oldinsz,
6901 (int)htab_elements (shared_hash_htab (VTI (bb)->out.vars)),
6902 oldoutsz,
6903 (int)worklist->nodes, (int)pending->nodes, htabsz);
6905 if (dump_file && (dump_flags & TDF_DETAILS))
6907 fprintf (dump_file, "BB %i IN:\n", bb->index);
6908 dump_dataflow_set (&VTI (bb)->in);
6909 fprintf (dump_file, "BB %i OUT:\n", bb->index);
6910 dump_dataflow_set (&VTI (bb)->out);
6916 if (success && MAY_HAVE_DEBUG_INSNS)
6917 FOR_EACH_BB (bb)
6918 gcc_assert (VTI (bb)->flooded);
6920 free (bb_order);
6921 fibheap_delete (worklist);
6922 fibheap_delete (pending);
6923 sbitmap_free (visited);
6924 sbitmap_free (in_worklist);
6925 sbitmap_free (in_pending);
6927 timevar_pop (TV_VAR_TRACKING_DATAFLOW);
6928 return success;
6931 /* Print the content of the LIST to dump file. */
6933 static void
6934 dump_attrs_list (attrs list)
6936 for (; list; list = list->next)
6938 if (dv_is_decl_p (list->dv))
6939 print_mem_expr (dump_file, dv_as_decl (list->dv));
6940 else
6941 print_rtl_single (dump_file, dv_as_value (list->dv));
6942 fprintf (dump_file, "+" HOST_WIDE_INT_PRINT_DEC, list->offset);
6944 fprintf (dump_file, "\n");
6947 /* Print the information about variable *SLOT to dump file. */
6949 static int
6950 dump_var_slot (void **slot, void *data ATTRIBUTE_UNUSED)
6952 variable var = (variable) *slot;
6954 dump_var (var);
6956 /* Continue traversing the hash table. */
6957 return 1;
6960 /* Print the information about variable VAR to dump file. */
6962 static void
6963 dump_var (variable var)
6965 int i;
6966 location_chain node;
6968 if (dv_is_decl_p (var->dv))
6970 const_tree decl = dv_as_decl (var->dv);
6972 if (DECL_NAME (decl))
6974 fprintf (dump_file, " name: %s",
6975 IDENTIFIER_POINTER (DECL_NAME (decl)));
6976 if (dump_flags & TDF_UID)
6977 fprintf (dump_file, "D.%u", DECL_UID (decl));
6979 else if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
6980 fprintf (dump_file, " name: D#%u", DEBUG_TEMP_UID (decl));
6981 else
6982 fprintf (dump_file, " name: D.%u", DECL_UID (decl));
6983 fprintf (dump_file, "\n");
6985 else
6987 fputc (' ', dump_file);
6988 print_rtl_single (dump_file, dv_as_value (var->dv));
6991 for (i = 0; i < var->n_var_parts; i++)
6993 fprintf (dump_file, " offset %ld\n",
6994 (long)(var->onepart ? 0 : VAR_PART_OFFSET (var, i)));
6995 for (node = var->var_part[i].loc_chain; node; node = node->next)
6997 fprintf (dump_file, " ");
6998 if (node->init == VAR_INIT_STATUS_UNINITIALIZED)
6999 fprintf (dump_file, "[uninit]");
7000 print_rtl_single (dump_file, node->loc);
7005 /* Print the information about variables from hash table VARS to dump file. */
7007 static void
7008 dump_vars (htab_t vars)
7010 if (htab_elements (vars) > 0)
7012 fprintf (dump_file, "Variables:\n");
7013 htab_traverse (vars, dump_var_slot, NULL);
7017 /* Print the dataflow set SET to dump file. */
7019 static void
7020 dump_dataflow_set (dataflow_set *set)
7022 int i;
7024 fprintf (dump_file, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC "\n",
7025 set->stack_adjust);
7026 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7028 if (set->regs[i])
7030 fprintf (dump_file, "Reg %d:", i);
7031 dump_attrs_list (set->regs[i]);
7034 dump_vars (shared_hash_htab (set->vars));
7035 fprintf (dump_file, "\n");
7038 /* Print the IN and OUT sets for each basic block to dump file. */
7040 static void
7041 dump_dataflow_sets (void)
7043 basic_block bb;
7045 FOR_EACH_BB (bb)
7047 fprintf (dump_file, "\nBasic block %d:\n", bb->index);
7048 fprintf (dump_file, "IN:\n");
7049 dump_dataflow_set (&VTI (bb)->in);
7050 fprintf (dump_file, "OUT:\n");
7051 dump_dataflow_set (&VTI (bb)->out);
7055 /* Return the variable for DV in dropped_values, inserting one if
7056 requested with INSERT. */
7058 static inline variable
7059 variable_from_dropped (decl_or_value dv, enum insert_option insert)
7061 void **slot;
7062 variable empty_var;
7063 onepart_enum_t onepart;
7065 slot = htab_find_slot_with_hash (dropped_values, dv, dv_htab_hash (dv),
7066 insert);
7068 if (!slot)
7069 return NULL;
7071 if (*slot)
7072 return (variable) *slot;
7074 gcc_checking_assert (insert == INSERT);
7076 onepart = dv_onepart_p (dv);
7078 gcc_checking_assert (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR);
7080 empty_var = (variable) pool_alloc (onepart_pool (onepart));
7081 empty_var->dv = dv;
7082 empty_var->refcount = 1;
7083 empty_var->n_var_parts = 0;
7084 empty_var->onepart = onepart;
7085 empty_var->in_changed_variables = false;
7086 empty_var->var_part[0].loc_chain = NULL;
7087 empty_var->var_part[0].cur_loc = NULL;
7088 VAR_LOC_1PAUX (empty_var) = NULL;
7089 set_dv_changed (dv, true);
7091 *slot = empty_var;
7093 return empty_var;
7096 /* Recover the one-part aux from dropped_values. */
7098 static struct onepart_aux *
7099 recover_dropped_1paux (variable var)
7101 variable dvar;
7103 gcc_checking_assert (var->onepart);
7105 if (VAR_LOC_1PAUX (var))
7106 return VAR_LOC_1PAUX (var);
7108 if (var->onepart == ONEPART_VDECL)
7109 return NULL;
7111 dvar = variable_from_dropped (var->dv, NO_INSERT);
7113 if (!dvar)
7114 return NULL;
7116 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (dvar);
7117 VAR_LOC_1PAUX (dvar) = NULL;
7119 return VAR_LOC_1PAUX (var);
7122 /* Add variable VAR to the hash table of changed variables and
7123 if it has no locations delete it from SET's hash table. */
7125 static void
7126 variable_was_changed (variable var, dataflow_set *set)
7128 hashval_t hash = dv_htab_hash (var->dv);
7130 if (emit_notes)
7132 void **slot;
7134 /* Remember this decl or VALUE has been added to changed_variables. */
7135 set_dv_changed (var->dv, true);
7137 slot = htab_find_slot_with_hash (changed_variables,
7138 var->dv,
7139 hash, INSERT);
7141 if (*slot)
7143 variable old_var = (variable) *slot;
7144 gcc_assert (old_var->in_changed_variables);
7145 old_var->in_changed_variables = false;
7146 if (var != old_var && var->onepart)
7148 /* Restore the auxiliary info from an empty variable
7149 previously created for changed_variables, so it is
7150 not lost. */
7151 gcc_checking_assert (!VAR_LOC_1PAUX (var));
7152 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (old_var);
7153 VAR_LOC_1PAUX (old_var) = NULL;
7155 variable_htab_free (*slot);
7158 if (set && var->n_var_parts == 0)
7160 onepart_enum_t onepart = var->onepart;
7161 variable empty_var = NULL;
7162 void **dslot = NULL;
7164 if (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR)
7166 dslot = htab_find_slot_with_hash (dropped_values, var->dv,
7167 dv_htab_hash (var->dv),
7168 INSERT);
7169 empty_var = (variable) *dslot;
7171 if (empty_var)
7173 gcc_checking_assert (!empty_var->in_changed_variables);
7174 if (!VAR_LOC_1PAUX (var))
7176 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (empty_var);
7177 VAR_LOC_1PAUX (empty_var) = NULL;
7179 else
7180 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
7184 if (!empty_var)
7186 empty_var = (variable) pool_alloc (onepart_pool (onepart));
7187 empty_var->dv = var->dv;
7188 empty_var->refcount = 1;
7189 empty_var->n_var_parts = 0;
7190 empty_var->onepart = onepart;
7191 if (dslot)
7193 empty_var->refcount++;
7194 *dslot = empty_var;
7197 else
7198 empty_var->refcount++;
7199 empty_var->in_changed_variables = true;
7200 *slot = empty_var;
7201 if (onepart)
7203 empty_var->var_part[0].loc_chain = NULL;
7204 empty_var->var_part[0].cur_loc = NULL;
7205 VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (var);
7206 VAR_LOC_1PAUX (var) = NULL;
7208 goto drop_var;
7210 else
7212 if (var->onepart && !VAR_LOC_1PAUX (var))
7213 recover_dropped_1paux (var);
7214 var->refcount++;
7215 var->in_changed_variables = true;
7216 *slot = var;
7219 else
7221 gcc_assert (set);
7222 if (var->n_var_parts == 0)
7224 void **slot;
7226 drop_var:
7227 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
7228 if (slot)
7230 if (shared_hash_shared (set->vars))
7231 slot = shared_hash_find_slot_unshare (&set->vars, var->dv,
7232 NO_INSERT);
7233 htab_clear_slot (shared_hash_htab (set->vars), slot);
7239 /* Look for the index in VAR->var_part corresponding to OFFSET.
7240 Return -1 if not found. If INSERTION_POINT is non-NULL, the
7241 referenced int will be set to the index that the part has or should
7242 have, if it should be inserted. */
7244 static inline int
7245 find_variable_location_part (variable var, HOST_WIDE_INT offset,
7246 int *insertion_point)
7248 int pos, low, high;
7250 if (var->onepart)
7252 if (offset != 0)
7253 return -1;
7255 if (insertion_point)
7256 *insertion_point = 0;
7258 return var->n_var_parts - 1;
7261 /* Find the location part. */
7262 low = 0;
7263 high = var->n_var_parts;
7264 while (low != high)
7266 pos = (low + high) / 2;
7267 if (VAR_PART_OFFSET (var, pos) < offset)
7268 low = pos + 1;
7269 else
7270 high = pos;
7272 pos = low;
7274 if (insertion_point)
7275 *insertion_point = pos;
7277 if (pos < var->n_var_parts && VAR_PART_OFFSET (var, pos) == offset)
7278 return pos;
7280 return -1;
7283 static void **
7284 set_slot_part (dataflow_set *set, rtx loc, void **slot,
7285 decl_or_value dv, HOST_WIDE_INT offset,
7286 enum var_init_status initialized, rtx set_src)
7288 int pos;
7289 location_chain node, next;
7290 location_chain *nextp;
7291 variable var;
7292 onepart_enum_t onepart;
7294 var = (variable) *slot;
7296 if (var)
7297 onepart = var->onepart;
7298 else
7299 onepart = dv_onepart_p (dv);
7301 gcc_checking_assert (offset == 0 || !onepart);
7302 gcc_checking_assert (loc != dv_as_opaque (dv));
7304 if (! flag_var_tracking_uninit)
7305 initialized = VAR_INIT_STATUS_INITIALIZED;
7307 if (!var)
7309 /* Create new variable information. */
7310 var = (variable) pool_alloc (onepart_pool (onepart));
7311 var->dv = dv;
7312 var->refcount = 1;
7313 var->n_var_parts = 1;
7314 var->onepart = onepart;
7315 var->in_changed_variables = false;
7316 if (var->onepart)
7317 VAR_LOC_1PAUX (var) = NULL;
7318 else
7319 VAR_PART_OFFSET (var, 0) = offset;
7320 var->var_part[0].loc_chain = NULL;
7321 var->var_part[0].cur_loc = NULL;
7322 *slot = var;
7323 pos = 0;
7324 nextp = &var->var_part[0].loc_chain;
7326 else if (onepart)
7328 int r = -1, c = 0;
7330 gcc_assert (dv_as_opaque (var->dv) == dv_as_opaque (dv));
7332 pos = 0;
7334 if (GET_CODE (loc) == VALUE)
7336 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7337 nextp = &node->next)
7338 if (GET_CODE (node->loc) == VALUE)
7340 if (node->loc == loc)
7342 r = 0;
7343 break;
7345 if (canon_value_cmp (node->loc, loc))
7346 c++;
7347 else
7349 r = 1;
7350 break;
7353 else if (REG_P (node->loc) || MEM_P (node->loc))
7354 c++;
7355 else
7357 r = 1;
7358 break;
7361 else if (REG_P (loc))
7363 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7364 nextp = &node->next)
7365 if (REG_P (node->loc))
7367 if (REGNO (node->loc) < REGNO (loc))
7368 c++;
7369 else
7371 if (REGNO (node->loc) == REGNO (loc))
7372 r = 0;
7373 else
7374 r = 1;
7375 break;
7378 else
7380 r = 1;
7381 break;
7384 else if (MEM_P (loc))
7386 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7387 nextp = &node->next)
7388 if (REG_P (node->loc))
7389 c++;
7390 else if (MEM_P (node->loc))
7392 if ((r = loc_cmp (XEXP (node->loc, 0), XEXP (loc, 0))) >= 0)
7393 break;
7394 else
7395 c++;
7397 else
7399 r = 1;
7400 break;
7403 else
7404 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7405 nextp = &node->next)
7406 if ((r = loc_cmp (node->loc, loc)) >= 0)
7407 break;
7408 else
7409 c++;
7411 if (r == 0)
7412 return slot;
7414 if (shared_var_p (var, set->vars))
7416 slot = unshare_variable (set, slot, var, initialized);
7417 var = (variable)*slot;
7418 for (nextp = &var->var_part[0].loc_chain; c;
7419 nextp = &(*nextp)->next)
7420 c--;
7421 gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc);
7424 else
7426 int inspos = 0;
7428 gcc_assert (dv_as_decl (var->dv) == dv_as_decl (dv));
7430 pos = find_variable_location_part (var, offset, &inspos);
7432 if (pos >= 0)
7434 node = var->var_part[pos].loc_chain;
7436 if (node
7437 && ((REG_P (node->loc) && REG_P (loc)
7438 && REGNO (node->loc) == REGNO (loc))
7439 || rtx_equal_p (node->loc, loc)))
7441 /* LOC is in the beginning of the chain so we have nothing
7442 to do. */
7443 if (node->init < initialized)
7444 node->init = initialized;
7445 if (set_src != NULL)
7446 node->set_src = set_src;
7448 return slot;
7450 else
7452 /* We have to make a copy of a shared variable. */
7453 if (shared_var_p (var, set->vars))
7455 slot = unshare_variable (set, slot, var, initialized);
7456 var = (variable)*slot;
7460 else
7462 /* We have not found the location part, new one will be created. */
7464 /* We have to make a copy of the shared variable. */
7465 if (shared_var_p (var, set->vars))
7467 slot = unshare_variable (set, slot, var, initialized);
7468 var = (variable)*slot;
7471 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
7472 thus there are at most MAX_VAR_PARTS different offsets. */
7473 gcc_assert (var->n_var_parts < MAX_VAR_PARTS
7474 && (!var->n_var_parts || !onepart));
7476 /* We have to move the elements of array starting at index
7477 inspos to the next position. */
7478 for (pos = var->n_var_parts; pos > inspos; pos--)
7479 var->var_part[pos] = var->var_part[pos - 1];
7481 var->n_var_parts++;
7482 gcc_checking_assert (!onepart);
7483 VAR_PART_OFFSET (var, pos) = offset;
7484 var->var_part[pos].loc_chain = NULL;
7485 var->var_part[pos].cur_loc = NULL;
7488 /* Delete the location from the list. */
7489 nextp = &var->var_part[pos].loc_chain;
7490 for (node = var->var_part[pos].loc_chain; node; node = next)
7492 next = node->next;
7493 if ((REG_P (node->loc) && REG_P (loc)
7494 && REGNO (node->loc) == REGNO (loc))
7495 || rtx_equal_p (node->loc, loc))
7497 /* Save these values, to assign to the new node, before
7498 deleting this one. */
7499 if (node->init > initialized)
7500 initialized = node->init;
7501 if (node->set_src != NULL && set_src == NULL)
7502 set_src = node->set_src;
7503 if (var->var_part[pos].cur_loc == node->loc)
7504 var->var_part[pos].cur_loc = NULL;
7505 pool_free (loc_chain_pool, node);
7506 *nextp = next;
7507 break;
7509 else
7510 nextp = &node->next;
7513 nextp = &var->var_part[pos].loc_chain;
7516 /* Add the location to the beginning. */
7517 node = (location_chain) pool_alloc (loc_chain_pool);
7518 node->loc = loc;
7519 node->init = initialized;
7520 node->set_src = set_src;
7521 node->next = *nextp;
7522 *nextp = node;
7524 /* If no location was emitted do so. */
7525 if (var->var_part[pos].cur_loc == NULL)
7526 variable_was_changed (var, set);
7528 return slot;
7531 /* Set the part of variable's location in the dataflow set SET. The
7532 variable part is specified by variable's declaration in DV and
7533 offset OFFSET and the part's location by LOC. IOPT should be
7534 NO_INSERT if the variable is known to be in SET already and the
7535 variable hash table must not be resized, and INSERT otherwise. */
7537 static void
7538 set_variable_part (dataflow_set *set, rtx loc,
7539 decl_or_value dv, HOST_WIDE_INT offset,
7540 enum var_init_status initialized, rtx set_src,
7541 enum insert_option iopt)
7543 void **slot;
7545 if (iopt == NO_INSERT)
7546 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7547 else
7549 slot = shared_hash_find_slot (set->vars, dv);
7550 if (!slot)
7551 slot = shared_hash_find_slot_unshare (&set->vars, dv, iopt);
7553 set_slot_part (set, loc, slot, dv, offset, initialized, set_src);
7556 /* Remove all recorded register locations for the given variable part
7557 from dataflow set SET, except for those that are identical to loc.
7558 The variable part is specified by variable's declaration or value
7559 DV and offset OFFSET. */
7561 static void **
7562 clobber_slot_part (dataflow_set *set, rtx loc, void **slot,
7563 HOST_WIDE_INT offset, rtx set_src)
7565 variable var = (variable) *slot;
7566 int pos = find_variable_location_part (var, offset, NULL);
7568 if (pos >= 0)
7570 location_chain node, next;
7572 /* Remove the register locations from the dataflow set. */
7573 next = var->var_part[pos].loc_chain;
7574 for (node = next; node; node = next)
7576 next = node->next;
7577 if (node->loc != loc
7578 && (!flag_var_tracking_uninit
7579 || !set_src
7580 || MEM_P (set_src)
7581 || !rtx_equal_p (set_src, node->set_src)))
7583 if (REG_P (node->loc))
7585 attrs anode, anext;
7586 attrs *anextp;
7588 /* Remove the variable part from the register's
7589 list, but preserve any other variable parts
7590 that might be regarded as live in that same
7591 register. */
7592 anextp = &set->regs[REGNO (node->loc)];
7593 for (anode = *anextp; anode; anode = anext)
7595 anext = anode->next;
7596 if (dv_as_opaque (anode->dv) == dv_as_opaque (var->dv)
7597 && anode->offset == offset)
7599 pool_free (attrs_pool, anode);
7600 *anextp = anext;
7602 else
7603 anextp = &anode->next;
7607 slot = delete_slot_part (set, node->loc, slot, offset);
7612 return slot;
7615 /* Remove all recorded register locations for the given variable part
7616 from dataflow set SET, except for those that are identical to loc.
7617 The variable part is specified by variable's declaration or value
7618 DV and offset OFFSET. */
7620 static void
7621 clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
7622 HOST_WIDE_INT offset, rtx set_src)
7624 void **slot;
7626 if (!dv_as_opaque (dv)
7627 || (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv))))
7628 return;
7630 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7631 if (!slot)
7632 return;
7634 clobber_slot_part (set, loc, slot, offset, set_src);
7637 /* Delete the part of variable's location from dataflow set SET. The
7638 variable part is specified by its SET->vars slot SLOT and offset
7639 OFFSET and the part's location by LOC. */
7641 static void **
7642 delete_slot_part (dataflow_set *set, rtx loc, void **slot,
7643 HOST_WIDE_INT offset)
7645 variable var = (variable) *slot;
7646 int pos = find_variable_location_part (var, offset, NULL);
7648 if (pos >= 0)
7650 location_chain node, next;
7651 location_chain *nextp;
7652 bool changed;
7653 rtx cur_loc;
7655 if (shared_var_p (var, set->vars))
7657 /* If the variable contains the location part we have to
7658 make a copy of the variable. */
7659 for (node = var->var_part[pos].loc_chain; node;
7660 node = node->next)
7662 if ((REG_P (node->loc) && REG_P (loc)
7663 && REGNO (node->loc) == REGNO (loc))
7664 || rtx_equal_p (node->loc, loc))
7666 slot = unshare_variable (set, slot, var,
7667 VAR_INIT_STATUS_UNKNOWN);
7668 var = (variable)*slot;
7669 break;
7674 if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
7675 cur_loc = VAR_LOC_FROM (var);
7676 else
7677 cur_loc = var->var_part[pos].cur_loc;
7679 /* Delete the location part. */
7680 changed = false;
7681 nextp = &var->var_part[pos].loc_chain;
7682 for (node = *nextp; node; node = next)
7684 next = node->next;
7685 if ((REG_P (node->loc) && REG_P (loc)
7686 && REGNO (node->loc) == REGNO (loc))
7687 || rtx_equal_p (node->loc, loc))
7689 /* If we have deleted the location which was last emitted
7690 we have to emit new location so add the variable to set
7691 of changed variables. */
7692 if (cur_loc == node->loc)
7694 changed = true;
7695 var->var_part[pos].cur_loc = NULL;
7696 if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
7697 VAR_LOC_FROM (var) = NULL;
7699 pool_free (loc_chain_pool, node);
7700 *nextp = next;
7701 break;
7703 else
7704 nextp = &node->next;
7707 if (var->var_part[pos].loc_chain == NULL)
7709 changed = true;
7710 var->n_var_parts--;
7711 while (pos < var->n_var_parts)
7713 var->var_part[pos] = var->var_part[pos + 1];
7714 pos++;
7717 if (changed)
7718 variable_was_changed (var, set);
7721 return slot;
7724 /* Delete the part of variable's location from dataflow set SET. The
7725 variable part is specified by variable's declaration or value DV
7726 and offset OFFSET and the part's location by LOC. */
7728 static void
7729 delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
7730 HOST_WIDE_INT offset)
7732 void **slot = shared_hash_find_slot_noinsert (set->vars, dv);
7733 if (!slot)
7734 return;
7736 delete_slot_part (set, loc, slot, offset);
7739 DEF_VEC_P (variable);
7740 DEF_VEC_ALLOC_P (variable, heap);
7742 DEF_VEC_ALLOC_P_STACK (rtx);
7743 #define VEC_rtx_stack_alloc(alloc) VEC_stack_alloc (rtx, alloc)
7745 /* Structure for passing some other parameters to function
7746 vt_expand_loc_callback. */
7747 struct expand_loc_callback_data
7749 /* The variables and values active at this point. */
7750 htab_t vars;
7752 /* Stack of values and debug_exprs under expansion, and their
7753 children. */
7754 VEC (rtx, stack) *expanding;
7756 /* Stack of values and debug_exprs whose expansion hit recursion
7757 cycles. They will have VALUE_RECURSED_INTO marked when added to
7758 this list. This flag will be cleared if any of its dependencies
7759 resolves to a valid location. So, if the flag remains set at the
7760 end of the search, we know no valid location for this one can
7761 possibly exist. */
7762 VEC (rtx, stack) *pending;
7764 /* The maximum depth among the sub-expressions under expansion.
7765 Zero indicates no expansion so far. */
7766 expand_depth depth;
7769 /* Allocate the one-part auxiliary data structure for VAR, with enough
7770 room for COUNT dependencies. */
7772 static void
7773 loc_exp_dep_alloc (variable var, int count)
7775 size_t allocsize;
7777 gcc_checking_assert (var->onepart);
7779 /* We can be called with COUNT == 0 to allocate the data structure
7780 without any dependencies, e.g. for the backlinks only. However,
7781 if we are specifying a COUNT, then the dependency list must have
7782 been emptied before. It would be possible to adjust pointers or
7783 force it empty here, but this is better done at an earlier point
7784 in the algorithm, so we instead leave an assertion to catch
7785 errors. */
7786 gcc_checking_assert (!count
7787 || VEC_empty (loc_exp_dep, VAR_LOC_DEP_VEC (var)));
7789 if (VAR_LOC_1PAUX (var)
7790 && VEC_space (loc_exp_dep, VAR_LOC_DEP_VEC (var), count))
7791 return;
7793 allocsize = offsetof (struct onepart_aux, deps)
7794 + VEC_embedded_size (loc_exp_dep, count);
7796 if (VAR_LOC_1PAUX (var))
7798 VAR_LOC_1PAUX (var) = XRESIZEVAR (struct onepart_aux,
7799 VAR_LOC_1PAUX (var), allocsize);
7800 /* If the reallocation moves the onepaux structure, the
7801 back-pointer to BACKLINKS in the first list member will still
7802 point to its old location. Adjust it. */
7803 if (VAR_LOC_DEP_LST (var))
7804 VAR_LOC_DEP_LST (var)->pprev = VAR_LOC_DEP_LSTP (var);
7806 else
7808 VAR_LOC_1PAUX (var) = XNEWVAR (struct onepart_aux, allocsize);
7809 *VAR_LOC_DEP_LSTP (var) = NULL;
7810 VAR_LOC_FROM (var) = NULL;
7811 VAR_LOC_DEPTH (var).complexity = 0;
7812 VAR_LOC_DEPTH (var).entryvals = 0;
7814 VEC_embedded_init (loc_exp_dep, VAR_LOC_DEP_VEC (var), count);
7817 /* Remove all entries from the vector of active dependencies of VAR,
7818 removing them from the back-links lists too. */
7820 static void
7821 loc_exp_dep_clear (variable var)
7823 while (!VEC_empty (loc_exp_dep, VAR_LOC_DEP_VEC (var)))
7825 loc_exp_dep *led = VEC_last (loc_exp_dep, VAR_LOC_DEP_VEC (var));
7826 if (led->next)
7827 led->next->pprev = led->pprev;
7828 if (led->pprev)
7829 *led->pprev = led->next;
7830 VEC_pop (loc_exp_dep, VAR_LOC_DEP_VEC (var));
7834 /* Insert an active dependency from VAR on X to the vector of
7835 dependencies, and add the corresponding back-link to X's list of
7836 back-links in VARS. */
7838 static void
7839 loc_exp_insert_dep (variable var, rtx x, htab_t vars)
7841 decl_or_value dv;
7842 variable xvar;
7843 loc_exp_dep *led;
7845 dv = dv_from_rtx (x);
7847 /* ??? Build a vector of variables parallel to EXPANDING, to avoid
7848 an additional look up? */
7849 xvar = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
7851 if (!xvar)
7853 xvar = variable_from_dropped (dv, NO_INSERT);
7854 gcc_checking_assert (xvar);
7857 /* No point in adding the same backlink more than once. This may
7858 arise if say the same value appears in two complex expressions in
7859 the same loc_list, or even more than once in a single
7860 expression. */
7861 if (VAR_LOC_DEP_LST (xvar) && VAR_LOC_DEP_LST (xvar)->dv == var->dv)
7862 return;
7864 if (var->onepart == NOT_ONEPART)
7865 led = (loc_exp_dep *) pool_alloc (loc_exp_dep_pool);
7866 else
7868 VEC_quick_push (loc_exp_dep, VAR_LOC_DEP_VEC (var), NULL);
7869 led = VEC_last (loc_exp_dep, VAR_LOC_DEP_VEC (var));
7871 led->dv = var->dv;
7872 led->value = x;
7874 loc_exp_dep_alloc (xvar, 0);
7875 led->pprev = VAR_LOC_DEP_LSTP (xvar);
7876 led->next = *led->pprev;
7877 if (led->next)
7878 led->next->pprev = &led->next;
7879 *led->pprev = led;
7882 /* Create active dependencies of VAR on COUNT values starting at
7883 VALUE, and corresponding back-links to the entries in VARS. Return
7884 true if we found any pending-recursion results. */
7886 static bool
7887 loc_exp_dep_set (variable var, rtx result, rtx *value, int count, htab_t vars)
7889 bool pending_recursion = false;
7891 gcc_checking_assert (VEC_empty (loc_exp_dep, VAR_LOC_DEP_VEC (var)));
7893 /* Set up all dependencies from last_child (as set up at the end of
7894 the loop above) to the end. */
7895 loc_exp_dep_alloc (var, count);
7897 while (count--)
7899 rtx x = *value++;
7901 if (!pending_recursion)
7902 pending_recursion = !result && VALUE_RECURSED_INTO (x);
7904 loc_exp_insert_dep (var, x, vars);
7907 return pending_recursion;
7910 /* Notify the back-links of IVAR that are pending recursion that we
7911 have found a non-NIL value for it, so they are cleared for another
7912 attempt to compute a current location. */
7914 static void
7915 notify_dependents_of_resolved_value (variable ivar, htab_t vars)
7917 loc_exp_dep *led, *next;
7919 for (led = VAR_LOC_DEP_LST (ivar); led; led = next)
7921 decl_or_value dv = led->dv;
7922 variable var;
7924 next = led->next;
7926 if (dv_is_value_p (dv))
7928 rtx value = dv_as_value (dv);
7930 /* If we have already resolved it, leave it alone. */
7931 if (!VALUE_RECURSED_INTO (value))
7932 continue;
7934 /* Check that VALUE_RECURSED_INTO, true from the test above,
7935 implies NO_LOC_P. */
7936 gcc_checking_assert (NO_LOC_P (value));
7938 /* We won't notify variables that are being expanded,
7939 because their dependency list is cleared before
7940 recursing. */
7941 NO_LOC_P (value) = false;
7942 VALUE_RECURSED_INTO (value) = false;
7944 gcc_checking_assert (dv_changed_p (dv));
7946 else
7948 gcc_checking_assert (dv_onepart_p (dv) != NOT_ONEPART);
7949 if (!dv_changed_p (dv))
7950 continue;
7953 var = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
7955 if (!var)
7956 var = variable_from_dropped (dv, NO_INSERT);
7958 if (var)
7959 notify_dependents_of_resolved_value (var, vars);
7961 if (next)
7962 next->pprev = led->pprev;
7963 if (led->pprev)
7964 *led->pprev = next;
7965 led->next = NULL;
7966 led->pprev = NULL;
7970 static rtx vt_expand_loc_callback (rtx x, bitmap regs,
7971 int max_depth, void *data);
7973 /* Return the combined depth, when one sub-expression evaluated to
7974 BEST_DEPTH and the previous known depth was SAVED_DEPTH. */
7976 static inline expand_depth
7977 update_depth (expand_depth saved_depth, expand_depth best_depth)
7979 /* If we didn't find anything, stick with what we had. */
7980 if (!best_depth.complexity)
7981 return saved_depth;
7983 /* If we found hadn't found anything, use the depth of the current
7984 expression. Do NOT add one extra level, we want to compute the
7985 maximum depth among sub-expressions. We'll increment it later,
7986 if appropriate. */
7987 if (!saved_depth.complexity)
7988 return best_depth;
7990 /* Combine the entryval count so that regardless of which one we
7991 return, the entryval count is accurate. */
7992 best_depth.entryvals = saved_depth.entryvals
7993 = best_depth.entryvals + saved_depth.entryvals;
7995 if (saved_depth.complexity < best_depth.complexity)
7996 return best_depth;
7997 else
7998 return saved_depth;
8001 /* Expand VAR to a location RTX, updating its cur_loc. Use REGS and
8002 DATA for cselib expand callback. If PENDRECP is given, indicate in
8003 it whether any sub-expression couldn't be fully evaluated because
8004 it is pending recursion resolution. */
8006 static inline rtx
8007 vt_expand_var_loc_chain (variable var, bitmap regs, void *data, bool *pendrecp)
8009 struct expand_loc_callback_data *elcd
8010 = (struct expand_loc_callback_data *) data;
8011 location_chain loc, next;
8012 rtx result = NULL;
8013 int first_child, result_first_child, last_child;
8014 bool pending_recursion;
8015 rtx loc_from = NULL;
8016 struct elt_loc_list *cloc = NULL;
8017 expand_depth depth = { 0, 0 }, saved_depth = elcd->depth;
8018 int wanted_entryvals, found_entryvals = 0;
8020 /* Clear all backlinks pointing at this, so that we're not notified
8021 while we're active. */
8022 loc_exp_dep_clear (var);
8024 retry:
8025 if (var->onepart == ONEPART_VALUE)
8027 cselib_val *val = CSELIB_VAL_PTR (dv_as_value (var->dv));
8029 gcc_checking_assert (cselib_preserved_value_p (val));
8031 cloc = val->locs;
8034 first_child = result_first_child = last_child
8035 = VEC_length (rtx, elcd->expanding);
8037 wanted_entryvals = found_entryvals;
8039 /* Attempt to expand each available location in turn. */
8040 for (next = loc = var->n_var_parts ? var->var_part[0].loc_chain : NULL;
8041 loc || cloc; loc = next)
8043 result_first_child = last_child;
8045 if (!loc)
8047 loc_from = cloc->loc;
8048 next = loc;
8049 cloc = cloc->next;
8050 if (unsuitable_loc (loc_from))
8051 continue;
8053 else
8055 loc_from = loc->loc;
8056 next = loc->next;
8059 gcc_checking_assert (!unsuitable_loc (loc_from));
8061 elcd->depth.complexity = elcd->depth.entryvals = 0;
8062 result = cselib_expand_value_rtx_cb (loc_from, regs, EXPR_DEPTH,
8063 vt_expand_loc_callback, data);
8064 last_child = VEC_length (rtx, elcd->expanding);
8066 if (result)
8068 depth = elcd->depth;
8070 gcc_checking_assert (depth.complexity
8071 || result_first_child == last_child);
8073 if (last_child - result_first_child != 1)
8075 if (!depth.complexity && GET_CODE (result) == ENTRY_VALUE)
8076 depth.entryvals++;
8077 depth.complexity++;
8080 if (depth.complexity <= EXPR_USE_DEPTH)
8082 if (depth.entryvals <= wanted_entryvals)
8083 break;
8084 else if (!found_entryvals || depth.entryvals < found_entryvals)
8085 found_entryvals = depth.entryvals;
8088 result = NULL;
8091 /* Set it up in case we leave the loop. */
8092 depth.complexity = depth.entryvals = 0;
8093 loc_from = NULL;
8094 result_first_child = first_child;
8097 if (!loc_from && wanted_entryvals < found_entryvals)
8099 /* We found entries with ENTRY_VALUEs and skipped them. Since
8100 we could not find any expansions without ENTRY_VALUEs, but we
8101 found at least one with them, go back and get an entry with
8102 the minimum number ENTRY_VALUE count that we found. We could
8103 avoid looping, but since each sub-loc is already resolved,
8104 the re-expansion should be trivial. ??? Should we record all
8105 attempted locs as dependencies, so that we retry the
8106 expansion should any of them change, in the hope it can give
8107 us a new entry without an ENTRY_VALUE? */
8108 VEC_truncate (rtx, elcd->expanding, first_child);
8109 goto retry;
8112 /* Register all encountered dependencies as active. */
8113 pending_recursion = loc_exp_dep_set
8114 (var, result, VEC_address (rtx, elcd->expanding) + result_first_child,
8115 last_child - result_first_child, elcd->vars);
8117 VEC_truncate (rtx, elcd->expanding, first_child);
8119 /* Record where the expansion came from. */
8120 gcc_checking_assert (!result || !pending_recursion);
8121 VAR_LOC_FROM (var) = loc_from;
8122 VAR_LOC_DEPTH (var) = depth;
8124 gcc_checking_assert (!depth.complexity == !result);
8126 elcd->depth = update_depth (saved_depth, depth);
8128 /* Indicate whether any of the dependencies are pending recursion
8129 resolution. */
8130 if (pendrecp)
8131 *pendrecp = pending_recursion;
8133 if (!pendrecp || !pending_recursion)
8134 var->var_part[0].cur_loc = result;
8136 return result;
8139 /* Callback for cselib_expand_value, that looks for expressions
8140 holding the value in the var-tracking hash tables. Return X for
8141 standard processing, anything else is to be used as-is. */
8143 static rtx
8144 vt_expand_loc_callback (rtx x, bitmap regs,
8145 int max_depth ATTRIBUTE_UNUSED,
8146 void *data)
8148 struct expand_loc_callback_data *elcd
8149 = (struct expand_loc_callback_data *) data;
8150 decl_or_value dv;
8151 variable var;
8152 rtx result, subreg;
8153 bool pending_recursion = false;
8154 bool from_empty = false;
8156 switch (GET_CODE (x))
8158 case SUBREG:
8159 subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs,
8160 EXPR_DEPTH,
8161 vt_expand_loc_callback, data);
8163 if (!subreg)
8164 return NULL;
8166 result = simplify_gen_subreg (GET_MODE (x), subreg,
8167 GET_MODE (SUBREG_REG (x)),
8168 SUBREG_BYTE (x));
8170 /* Invalid SUBREGs are ok in debug info. ??? We could try
8171 alternate expansions for the VALUE as well. */
8172 if (!result)
8173 result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x));
8175 return result;
8177 case DEBUG_EXPR:
8178 case VALUE:
8179 dv = dv_from_rtx (x);
8180 break;
8182 default:
8183 return x;
8186 VEC_safe_push (rtx, stack, elcd->expanding, x);
8188 /* Check that VALUE_RECURSED_INTO implies NO_LOC_P. */
8189 gcc_checking_assert (!VALUE_RECURSED_INTO (x) || NO_LOC_P (x));
8191 if (NO_LOC_P (x))
8193 gcc_checking_assert (VALUE_RECURSED_INTO (x) || !dv_changed_p (dv));
8194 return NULL;
8197 var = (variable) htab_find_with_hash (elcd->vars, dv, dv_htab_hash (dv));
8199 if (!var)
8201 from_empty = true;
8202 var = variable_from_dropped (dv, INSERT);
8205 gcc_checking_assert (var);
8207 if (!dv_changed_p (dv))
8209 gcc_checking_assert (!NO_LOC_P (x));
8210 gcc_checking_assert (var->var_part[0].cur_loc);
8211 gcc_checking_assert (VAR_LOC_1PAUX (var));
8212 gcc_checking_assert (VAR_LOC_1PAUX (var)->depth.complexity);
8214 elcd->depth = update_depth (elcd->depth, VAR_LOC_1PAUX (var)->depth);
8216 return var->var_part[0].cur_loc;
8219 VALUE_RECURSED_INTO (x) = true;
8220 /* This is tentative, but it makes some tests simpler. */
8221 NO_LOC_P (x) = true;
8223 gcc_checking_assert (var->n_var_parts == 1 || from_empty);
8225 result = vt_expand_var_loc_chain (var, regs, data, &pending_recursion);
8227 if (pending_recursion)
8229 gcc_checking_assert (!result);
8230 VEC_safe_push (rtx, stack, elcd->pending, x);
8232 else
8234 NO_LOC_P (x) = !result;
8235 VALUE_RECURSED_INTO (x) = false;
8236 set_dv_changed (dv, false);
8238 if (result)
8239 notify_dependents_of_resolved_value (var, elcd->vars);
8242 return result;
8245 /* While expanding variables, we may encounter recursion cycles
8246 because of mutual (possibly indirect) dependencies between two
8247 particular variables (or values), say A and B. If we're trying to
8248 expand A when we get to B, which in turn attempts to expand A, if
8249 we can't find any other expansion for B, we'll add B to this
8250 pending-recursion stack, and tentatively return NULL for its
8251 location. This tentative value will be used for any other
8252 occurrences of B, unless A gets some other location, in which case
8253 it will notify B that it is worth another try at computing a
8254 location for it, and it will use the location computed for A then.
8255 At the end of the expansion, the tentative NULL locations become
8256 final for all members of PENDING that didn't get a notification.
8257 This function performs this finalization of NULL locations. */
8259 static void
8260 resolve_expansions_pending_recursion (VEC (rtx, stack) *pending)
8262 while (!VEC_empty (rtx, pending))
8264 rtx x = VEC_pop (rtx, pending);
8265 decl_or_value dv;
8267 if (!VALUE_RECURSED_INTO (x))
8268 continue;
8270 gcc_checking_assert (NO_LOC_P (x));
8271 VALUE_RECURSED_INTO (x) = false;
8272 dv = dv_from_rtx (x);
8273 gcc_checking_assert (dv_changed_p (dv));
8274 set_dv_changed (dv, false);
8278 /* Initialize expand_loc_callback_data D with variable hash table V.
8279 It must be a macro because of alloca (VEC stack). */
8280 #define INIT_ELCD(d, v) \
8281 do \
8283 (d).vars = (v); \
8284 (d).expanding = VEC_alloc (rtx, stack, 4); \
8285 (d).pending = VEC_alloc (rtx, stack, 4); \
8286 (d).depth.complexity = (d).depth.entryvals = 0; \
8288 while (0)
8289 /* Finalize expand_loc_callback_data D, resolved to location L. */
8290 #define FINI_ELCD(d, l) \
8291 do \
8293 resolve_expansions_pending_recursion ((d).pending); \
8294 VEC_free (rtx, stack, (d).pending); \
8295 VEC_free (rtx, stack, (d).expanding); \
8297 if ((l) && MEM_P (l)) \
8298 (l) = targetm.delegitimize_address (l); \
8300 while (0)
8302 /* Expand VALUEs and DEBUG_EXPRs in LOC to a location, using the
8303 equivalences in VARS, updating their CUR_LOCs in the process. */
8305 static rtx
8306 vt_expand_loc (rtx loc, htab_t vars)
8308 struct expand_loc_callback_data data;
8309 rtx result;
8311 if (!MAY_HAVE_DEBUG_INSNS)
8312 return loc;
8314 INIT_ELCD (data, vars);
8316 result = cselib_expand_value_rtx_cb (loc, scratch_regs, EXPR_DEPTH,
8317 vt_expand_loc_callback, &data);
8319 FINI_ELCD (data, result);
8321 return result;
8324 /* Expand the one-part VARiable to a location, using the equivalences
8325 in VARS, updating their CUR_LOCs in the process. */
8327 static rtx
8328 vt_expand_1pvar (variable var, htab_t vars)
8330 struct expand_loc_callback_data data;
8331 rtx loc;
8333 gcc_checking_assert (var->onepart && var->n_var_parts == 1);
8335 if (!dv_changed_p (var->dv))
8336 return var->var_part[0].cur_loc;
8338 INIT_ELCD (data, vars);
8340 loc = vt_expand_var_loc_chain (var, scratch_regs, &data, NULL);
8342 gcc_checking_assert (VEC_empty (rtx, data.expanding));
8344 FINI_ELCD (data, loc);
8346 return loc;
8349 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
8350 additional parameters: WHERE specifies whether the note shall be emitted
8351 before or after instruction INSN. */
8353 static int
8354 emit_note_insn_var_location (void **varp, void *data)
8356 variable var = (variable) *varp;
8357 rtx insn = ((emit_note_data *)data)->insn;
8358 enum emit_note_where where = ((emit_note_data *)data)->where;
8359 htab_t vars = ((emit_note_data *)data)->vars;
8360 rtx note, note_vl;
8361 int i, j, n_var_parts;
8362 bool complete;
8363 enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED;
8364 HOST_WIDE_INT last_limit;
8365 tree type_size_unit;
8366 HOST_WIDE_INT offsets[MAX_VAR_PARTS];
8367 rtx loc[MAX_VAR_PARTS];
8368 tree decl;
8369 location_chain lc;
8371 gcc_checking_assert (var->onepart == NOT_ONEPART
8372 || var->onepart == ONEPART_VDECL);
8374 decl = dv_as_decl (var->dv);
8376 complete = true;
8377 last_limit = 0;
8378 n_var_parts = 0;
8379 if (!var->onepart)
8380 for (i = 0; i < var->n_var_parts; i++)
8381 if (var->var_part[i].cur_loc == NULL && var->var_part[i].loc_chain)
8382 var->var_part[i].cur_loc = var->var_part[i].loc_chain->loc;
8383 for (i = 0; i < var->n_var_parts; i++)
8385 enum machine_mode mode, wider_mode;
8386 rtx loc2;
8387 HOST_WIDE_INT offset;
8389 if (i == 0 && var->onepart)
8391 gcc_checking_assert (var->n_var_parts == 1);
8392 offset = 0;
8393 initialized = VAR_INIT_STATUS_INITIALIZED;
8394 loc2 = vt_expand_1pvar (var, vars);
8396 else
8398 if (last_limit < VAR_PART_OFFSET (var, i))
8400 complete = false;
8401 break;
8403 else if (last_limit > VAR_PART_OFFSET (var, i))
8404 continue;
8405 offset = VAR_PART_OFFSET (var, i);
8406 loc2 = var->var_part[i].cur_loc;
8407 if (loc2 && GET_CODE (loc2) == MEM
8408 && GET_CODE (XEXP (loc2, 0)) == VALUE)
8410 rtx depval = XEXP (loc2, 0);
8412 loc2 = vt_expand_loc (loc2, vars);
8414 if (loc2)
8415 loc_exp_insert_dep (var, depval, vars);
8417 if (!loc2)
8419 complete = false;
8420 continue;
8422 gcc_checking_assert (GET_CODE (loc2) != VALUE);
8423 for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
8424 if (var->var_part[i].cur_loc == lc->loc)
8426 initialized = lc->init;
8427 break;
8429 gcc_assert (lc);
8432 offsets[n_var_parts] = offset;
8433 if (!loc2)
8435 complete = false;
8436 continue;
8438 loc[n_var_parts] = loc2;
8439 mode = GET_MODE (var->var_part[i].cur_loc);
8440 if (mode == VOIDmode && var->onepart)
8441 mode = DECL_MODE (decl);
8442 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
8444 /* Attempt to merge adjacent registers or memory. */
8445 wider_mode = GET_MODE_WIDER_MODE (mode);
8446 for (j = i + 1; j < var->n_var_parts; j++)
8447 if (last_limit <= VAR_PART_OFFSET (var, j))
8448 break;
8449 if (j < var->n_var_parts
8450 && wider_mode != VOIDmode
8451 && var->var_part[j].cur_loc
8452 && mode == GET_MODE (var->var_part[j].cur_loc)
8453 && (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts]))
8454 && last_limit == (var->onepart ? 0 : VAR_PART_OFFSET (var, j))
8455 && (loc2 = vt_expand_loc (var->var_part[j].cur_loc, vars))
8456 && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2))
8458 rtx new_loc = NULL;
8460 if (REG_P (loc[n_var_parts])
8461 && hard_regno_nregs[REGNO (loc[n_var_parts])][mode] * 2
8462 == hard_regno_nregs[REGNO (loc[n_var_parts])][wider_mode]
8463 && end_hard_regno (mode, REGNO (loc[n_var_parts]))
8464 == REGNO (loc2))
8466 if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN)
8467 new_loc = simplify_subreg (wider_mode, loc[n_var_parts],
8468 mode, 0);
8469 else if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
8470 new_loc = simplify_subreg (wider_mode, loc2, mode, 0);
8471 if (new_loc)
8473 if (!REG_P (new_loc)
8474 || REGNO (new_loc) != REGNO (loc[n_var_parts]))
8475 new_loc = NULL;
8476 else
8477 REG_ATTRS (new_loc) = REG_ATTRS (loc[n_var_parts]);
8480 else if (MEM_P (loc[n_var_parts])
8481 && GET_CODE (XEXP (loc2, 0)) == PLUS
8482 && REG_P (XEXP (XEXP (loc2, 0), 0))
8483 && CONST_INT_P (XEXP (XEXP (loc2, 0), 1)))
8485 if ((REG_P (XEXP (loc[n_var_parts], 0))
8486 && rtx_equal_p (XEXP (loc[n_var_parts], 0),
8487 XEXP (XEXP (loc2, 0), 0))
8488 && INTVAL (XEXP (XEXP (loc2, 0), 1))
8489 == GET_MODE_SIZE (mode))
8490 || (GET_CODE (XEXP (loc[n_var_parts], 0)) == PLUS
8491 && CONST_INT_P (XEXP (XEXP (loc[n_var_parts], 0), 1))
8492 && rtx_equal_p (XEXP (XEXP (loc[n_var_parts], 0), 0),
8493 XEXP (XEXP (loc2, 0), 0))
8494 && INTVAL (XEXP (XEXP (loc[n_var_parts], 0), 1))
8495 + GET_MODE_SIZE (mode)
8496 == INTVAL (XEXP (XEXP (loc2, 0), 1))))
8497 new_loc = adjust_address_nv (loc[n_var_parts],
8498 wider_mode, 0);
8501 if (new_loc)
8503 loc[n_var_parts] = new_loc;
8504 mode = wider_mode;
8505 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
8506 i = j;
8509 ++n_var_parts;
8511 type_size_unit = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8512 if ((unsigned HOST_WIDE_INT) last_limit < TREE_INT_CST_LOW (type_size_unit))
8513 complete = false;
8515 if (! flag_var_tracking_uninit)
8516 initialized = VAR_INIT_STATUS_INITIALIZED;
8518 note_vl = NULL_RTX;
8519 if (!complete)
8520 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, NULL_RTX,
8521 (int) initialized);
8522 else if (n_var_parts == 1)
8524 rtx expr_list;
8526 if (offsets[0] || GET_CODE (loc[0]) == PARALLEL)
8527 expr_list = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
8528 else
8529 expr_list = loc[0];
8531 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, expr_list,
8532 (int) initialized);
8534 else if (n_var_parts)
8536 rtx parallel;
8538 for (i = 0; i < n_var_parts; i++)
8539 loc[i]
8540 = gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i]));
8542 parallel = gen_rtx_PARALLEL (VOIDmode,
8543 gen_rtvec_v (n_var_parts, loc));
8544 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl,
8545 parallel, (int) initialized);
8548 if (where != EMIT_NOTE_BEFORE_INSN)
8550 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8551 if (where == EMIT_NOTE_AFTER_CALL_INSN)
8552 NOTE_DURING_CALL_P (note) = true;
8554 else
8556 /* Make sure that the call related notes come first. */
8557 while (NEXT_INSN (insn)
8558 && NOTE_P (insn)
8559 && ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8560 && NOTE_DURING_CALL_P (insn))
8561 || NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
8562 insn = NEXT_INSN (insn);
8563 if (NOTE_P (insn)
8564 && ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8565 && NOTE_DURING_CALL_P (insn))
8566 || NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
8567 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8568 else
8569 note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
8571 NOTE_VAR_LOCATION (note) = note_vl;
8573 set_dv_changed (var->dv, false);
8574 gcc_assert (var->in_changed_variables);
8575 var->in_changed_variables = false;
8576 htab_clear_slot (changed_variables, varp);
8578 /* Continue traversing the hash table. */
8579 return 1;
8582 /* While traversing changed_variables, push onto DATA (a stack of RTX
8583 values) entries that aren't user variables. */
8585 static int
8586 values_to_stack (void **slot, void *data)
8588 VEC (rtx, stack) **changed_values_stack = (VEC (rtx, stack) **)data;
8589 variable var = (variable) *slot;
8591 if (var->onepart == ONEPART_VALUE)
8592 VEC_safe_push (rtx, stack, *changed_values_stack, dv_as_value (var->dv));
8593 else if (var->onepart == ONEPART_DEXPR)
8594 VEC_safe_push (rtx, stack, *changed_values_stack,
8595 DECL_RTL_KNOWN_SET (dv_as_decl (var->dv)));
8597 return 1;
8600 /* Remove from changed_variables the entry whose DV corresponds to
8601 value or debug_expr VAL. */
8602 static void
8603 remove_value_from_changed_variables (rtx val)
8605 decl_or_value dv = dv_from_rtx (val);
8606 void **slot;
8607 variable var;
8609 slot = htab_find_slot_with_hash (changed_variables,
8610 dv, dv_htab_hash (dv), NO_INSERT);
8611 var = (variable) *slot;
8612 var->in_changed_variables = false;
8613 htab_clear_slot (changed_variables, slot);
8616 /* If VAL (a value or debug_expr) has backlinks to variables actively
8617 dependent on it in HTAB or in CHANGED_VARIABLES, mark them as
8618 changed, adding to CHANGED_VALUES_STACK any dependencies that may
8619 have dependencies of their own to notify. */
8621 static void
8622 notify_dependents_of_changed_value (rtx val, htab_t htab,
8623 VEC (rtx, stack) **changed_values_stack)
8625 void **slot;
8626 variable var;
8627 loc_exp_dep *led;
8628 decl_or_value dv = dv_from_rtx (val);
8630 slot = htab_find_slot_with_hash (changed_variables,
8631 dv, dv_htab_hash (dv), NO_INSERT);
8632 if (!slot)
8633 slot = htab_find_slot_with_hash (htab,
8634 dv, dv_htab_hash (dv), NO_INSERT);
8635 if (!slot)
8636 slot = htab_find_slot_with_hash (dropped_values,
8637 dv, dv_htab_hash (dv), NO_INSERT);
8638 var = (variable) *slot;
8640 while ((led = VAR_LOC_DEP_LST (var)))
8642 decl_or_value ldv = led->dv;
8643 variable ivar;
8645 /* Deactivate and remove the backlink, as it was “used up”. It
8646 makes no sense to attempt to notify the same entity again:
8647 either it will be recomputed and re-register an active
8648 dependency, or it will still have the changed mark. */
8649 if (led->next)
8650 led->next->pprev = led->pprev;
8651 if (led->pprev)
8652 *led->pprev = led->next;
8653 led->next = NULL;
8654 led->pprev = NULL;
8656 if (dv_changed_p (ldv))
8657 continue;
8659 switch (dv_onepart_p (ldv))
8661 case ONEPART_VALUE:
8662 case ONEPART_DEXPR:
8663 set_dv_changed (ldv, true);
8664 VEC_safe_push (rtx, stack, *changed_values_stack, dv_as_rtx (ldv));
8665 break;
8667 case ONEPART_VDECL:
8668 ivar = (variable) htab_find_with_hash (htab, ldv, dv_htab_hash (ldv));
8669 gcc_checking_assert (!VAR_LOC_DEP_LST (ivar));
8670 variable_was_changed (ivar, NULL);
8671 break;
8673 case NOT_ONEPART:
8674 pool_free (loc_exp_dep_pool, led);
8675 ivar = (variable) htab_find_with_hash (htab, ldv, dv_htab_hash (ldv));
8676 if (ivar)
8678 int i = ivar->n_var_parts;
8679 while (i--)
8681 rtx loc = ivar->var_part[i].cur_loc;
8683 if (loc && GET_CODE (loc) == MEM
8684 && XEXP (loc, 0) == val)
8686 variable_was_changed (ivar, NULL);
8687 break;
8691 break;
8693 default:
8694 gcc_unreachable ();
8699 /* Take out of changed_variables any entries that don't refer to use
8700 variables. Back-propagate change notifications from values and
8701 debug_exprs to their active dependencies in HTAB or in
8702 CHANGED_VARIABLES. */
8704 static void
8705 process_changed_values (htab_t htab)
8707 int i, n;
8708 rtx val;
8709 VEC (rtx, stack) *changed_values_stack = VEC_alloc (rtx, stack, 20);
8711 /* Move values from changed_variables to changed_values_stack. */
8712 htab_traverse (changed_variables, values_to_stack, &changed_values_stack);
8714 /* Back-propagate change notifications in values while popping
8715 them from the stack. */
8716 for (n = i = VEC_length (rtx, changed_values_stack);
8717 i > 0; i = VEC_length (rtx, changed_values_stack))
8719 val = VEC_pop (rtx, changed_values_stack);
8720 notify_dependents_of_changed_value (val, htab, &changed_values_stack);
8722 /* This condition will hold when visiting each of the entries
8723 originally in changed_variables. We can't remove them
8724 earlier because this could drop the backlinks before we got a
8725 chance to use them. */
8726 if (i == n)
8728 remove_value_from_changed_variables (val);
8729 n--;
8733 VEC_free (rtx, stack, changed_values_stack);
8736 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
8737 CHANGED_VARIABLES and delete this chain. WHERE specifies whether
8738 the notes shall be emitted before of after instruction INSN. */
8740 static void
8741 emit_notes_for_changes (rtx insn, enum emit_note_where where,
8742 shared_hash vars)
8744 emit_note_data data;
8745 htab_t htab = shared_hash_htab (vars);
8747 if (!htab_elements (changed_variables))
8748 return;
8750 if (MAY_HAVE_DEBUG_INSNS)
8751 process_changed_values (htab);
8753 data.insn = insn;
8754 data.where = where;
8755 data.vars = htab;
8757 htab_traverse (changed_variables, emit_note_insn_var_location, &data);
8760 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
8761 same variable in hash table DATA or is not there at all. */
8763 static int
8764 emit_notes_for_differences_1 (void **slot, void *data)
8766 htab_t new_vars = (htab_t) data;
8767 variable old_var, new_var;
8769 old_var = (variable) *slot;
8770 new_var = (variable) htab_find_with_hash (new_vars, old_var->dv,
8771 dv_htab_hash (old_var->dv));
8773 if (!new_var)
8775 /* Variable has disappeared. */
8776 variable empty_var = NULL;
8778 if (old_var->onepart == ONEPART_VALUE
8779 || old_var->onepart == ONEPART_DEXPR)
8781 empty_var = variable_from_dropped (old_var->dv, NO_INSERT);
8782 if (empty_var)
8784 gcc_checking_assert (!empty_var->in_changed_variables);
8785 if (!VAR_LOC_1PAUX (old_var))
8787 VAR_LOC_1PAUX (old_var) = VAR_LOC_1PAUX (empty_var);
8788 VAR_LOC_1PAUX (empty_var) = NULL;
8790 else
8791 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
8795 if (!empty_var)
8797 empty_var = (variable) pool_alloc (onepart_pool (old_var->onepart));
8798 empty_var->dv = old_var->dv;
8799 empty_var->refcount = 0;
8800 empty_var->n_var_parts = 0;
8801 empty_var->onepart = old_var->onepart;
8802 empty_var->in_changed_variables = false;
8805 if (empty_var->onepart)
8807 /* Propagate the auxiliary data to (ultimately)
8808 changed_variables. */
8809 empty_var->var_part[0].loc_chain = NULL;
8810 empty_var->var_part[0].cur_loc = NULL;
8811 VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (old_var);
8812 VAR_LOC_1PAUX (old_var) = NULL;
8814 variable_was_changed (empty_var, NULL);
8815 /* Continue traversing the hash table. */
8816 return 1;
8818 /* Update cur_loc and one-part auxiliary data, before new_var goes
8819 through variable_was_changed. */
8820 if (old_var != new_var && new_var->onepart)
8822 gcc_checking_assert (VAR_LOC_1PAUX (new_var) == NULL);
8823 VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (old_var);
8824 VAR_LOC_1PAUX (old_var) = NULL;
8825 new_var->var_part[0].cur_loc = old_var->var_part[0].cur_loc;
8827 if (variable_different_p (old_var, new_var))
8828 variable_was_changed (new_var, NULL);
8830 /* Continue traversing the hash table. */
8831 return 1;
8834 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
8835 table DATA. */
8837 static int
8838 emit_notes_for_differences_2 (void **slot, void *data)
8840 htab_t old_vars = (htab_t) data;
8841 variable old_var, new_var;
8843 new_var = (variable) *slot;
8844 old_var = (variable) htab_find_with_hash (old_vars, new_var->dv,
8845 dv_htab_hash (new_var->dv));
8846 if (!old_var)
8848 int i;
8849 for (i = 0; i < new_var->n_var_parts; i++)
8850 new_var->var_part[i].cur_loc = NULL;
8851 variable_was_changed (new_var, NULL);
8854 /* Continue traversing the hash table. */
8855 return 1;
8858 /* Emit notes before INSN for differences between dataflow sets OLD_SET and
8859 NEW_SET. */
8861 static void
8862 emit_notes_for_differences (rtx insn, dataflow_set *old_set,
8863 dataflow_set *new_set)
8865 htab_traverse (shared_hash_htab (old_set->vars),
8866 emit_notes_for_differences_1,
8867 shared_hash_htab (new_set->vars));
8868 htab_traverse (shared_hash_htab (new_set->vars),
8869 emit_notes_for_differences_2,
8870 shared_hash_htab (old_set->vars));
8871 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars);
8874 /* Return the next insn after INSN that is not a NOTE_INSN_VAR_LOCATION. */
8876 static rtx
8877 next_non_note_insn_var_location (rtx insn)
8879 while (insn)
8881 insn = NEXT_INSN (insn);
8882 if (insn == 0
8883 || !NOTE_P (insn)
8884 || NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION)
8885 break;
8888 return insn;
8891 /* Emit the notes for changes of location parts in the basic block BB. */
8893 static void
8894 emit_notes_in_bb (basic_block bb, dataflow_set *set)
8896 unsigned int i;
8897 micro_operation *mo;
8899 dataflow_set_clear (set);
8900 dataflow_set_copy (set, &VTI (bb)->in);
8902 FOR_EACH_VEC_ELT (micro_operation, VTI (bb)->mos, i, mo)
8904 rtx insn = mo->insn;
8905 rtx next_insn = next_non_note_insn_var_location (insn);
8907 switch (mo->type)
8909 case MO_CALL:
8910 dataflow_set_clear_at_call (set);
8911 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars);
8913 rtx arguments = mo->u.loc, *p = &arguments, note;
8914 while (*p)
8916 XEXP (XEXP (*p, 0), 1)
8917 = vt_expand_loc (XEXP (XEXP (*p, 0), 1),
8918 shared_hash_htab (set->vars));
8919 /* If expansion is successful, keep it in the list. */
8920 if (XEXP (XEXP (*p, 0), 1))
8921 p = &XEXP (*p, 1);
8922 /* Otherwise, if the following item is data_value for it,
8923 drop it too too. */
8924 else if (XEXP (*p, 1)
8925 && REG_P (XEXP (XEXP (*p, 0), 0))
8926 && MEM_P (XEXP (XEXP (XEXP (*p, 1), 0), 0))
8927 && REG_P (XEXP (XEXP (XEXP (XEXP (*p, 1), 0), 0),
8929 && REGNO (XEXP (XEXP (*p, 0), 0))
8930 == REGNO (XEXP (XEXP (XEXP (XEXP (*p, 1), 0),
8931 0), 0)))
8932 *p = XEXP (XEXP (*p, 1), 1);
8933 /* Just drop this item. */
8934 else
8935 *p = XEXP (*p, 1);
8937 note = emit_note_after (NOTE_INSN_CALL_ARG_LOCATION, insn);
8938 NOTE_VAR_LOCATION (note) = arguments;
8940 break;
8942 case MO_USE:
8944 rtx loc = mo->u.loc;
8946 if (REG_P (loc))
8947 var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
8948 else
8949 var_mem_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
8951 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
8953 break;
8955 case MO_VAL_LOC:
8957 rtx loc = mo->u.loc;
8958 rtx val, vloc;
8959 tree var;
8961 if (GET_CODE (loc) == CONCAT)
8963 val = XEXP (loc, 0);
8964 vloc = XEXP (loc, 1);
8966 else
8968 val = NULL_RTX;
8969 vloc = loc;
8972 var = PAT_VAR_LOCATION_DECL (vloc);
8974 clobber_variable_part (set, NULL_RTX,
8975 dv_from_decl (var), 0, NULL_RTX);
8976 if (val)
8978 if (VAL_NEEDS_RESOLUTION (loc))
8979 val_resolve (set, val, PAT_VAR_LOCATION_LOC (vloc), insn);
8980 set_variable_part (set, val, dv_from_decl (var), 0,
8981 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
8982 INSERT);
8984 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
8985 set_variable_part (set, PAT_VAR_LOCATION_LOC (vloc),
8986 dv_from_decl (var), 0,
8987 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
8988 INSERT);
8990 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
8992 break;
8994 case MO_VAL_USE:
8996 rtx loc = mo->u.loc;
8997 rtx val, vloc, uloc;
8999 vloc = uloc = XEXP (loc, 1);
9000 val = XEXP (loc, 0);
9002 if (GET_CODE (val) == CONCAT)
9004 uloc = XEXP (val, 1);
9005 val = XEXP (val, 0);
9008 if (VAL_NEEDS_RESOLUTION (loc))
9009 val_resolve (set, val, vloc, insn);
9010 else
9011 val_store (set, val, uloc, insn, false);
9013 if (VAL_HOLDS_TRACK_EXPR (loc))
9015 if (GET_CODE (uloc) == REG)
9016 var_reg_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
9017 NULL);
9018 else if (GET_CODE (uloc) == MEM)
9019 var_mem_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
9020 NULL);
9023 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
9025 break;
9027 case MO_VAL_SET:
9029 rtx loc = mo->u.loc;
9030 rtx val, vloc, uloc;
9031 rtx dstv, srcv;
9033 vloc = loc;
9034 uloc = XEXP (vloc, 1);
9035 val = XEXP (vloc, 0);
9036 vloc = uloc;
9038 if (GET_CODE (uloc) == SET)
9040 dstv = SET_DEST (uloc);
9041 srcv = SET_SRC (uloc);
9043 else
9045 dstv = uloc;
9046 srcv = NULL;
9049 if (GET_CODE (val) == CONCAT)
9051 dstv = vloc = XEXP (val, 1);
9052 val = XEXP (val, 0);
9055 if (GET_CODE (vloc) == SET)
9057 srcv = SET_SRC (vloc);
9059 gcc_assert (val != srcv);
9060 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
9062 dstv = vloc = SET_DEST (vloc);
9064 if (VAL_NEEDS_RESOLUTION (loc))
9065 val_resolve (set, val, srcv, insn);
9067 else if (VAL_NEEDS_RESOLUTION (loc))
9069 gcc_assert (GET_CODE (uloc) == SET
9070 && GET_CODE (SET_SRC (uloc)) == REG);
9071 val_resolve (set, val, SET_SRC (uloc), insn);
9074 if (VAL_HOLDS_TRACK_EXPR (loc))
9076 if (VAL_EXPR_IS_CLOBBERED (loc))
9078 if (REG_P (uloc))
9079 var_reg_delete (set, uloc, true);
9080 else if (MEM_P (uloc))
9082 gcc_assert (MEM_P (dstv));
9083 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
9084 var_mem_delete (set, dstv, true);
9087 else
9089 bool copied_p = VAL_EXPR_IS_COPIED (loc);
9090 rtx src = NULL, dst = uloc;
9091 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
9093 if (GET_CODE (uloc) == SET)
9095 src = SET_SRC (uloc);
9096 dst = SET_DEST (uloc);
9099 if (copied_p)
9101 status = find_src_status (set, src);
9103 src = find_src_set_src (set, src);
9106 if (REG_P (dst))
9107 var_reg_delete_and_set (set, dst, !copied_p,
9108 status, srcv);
9109 else if (MEM_P (dst))
9111 gcc_assert (MEM_P (dstv));
9112 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
9113 var_mem_delete_and_set (set, dstv, !copied_p,
9114 status, srcv);
9118 else if (REG_P (uloc))
9119 var_regno_delete (set, REGNO (uloc));
9120 else if (MEM_P (uloc))
9121 clobber_overlapping_mems (set, uloc);
9123 val_store (set, val, dstv, insn, true);
9125 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9126 set->vars);
9128 break;
9130 case MO_SET:
9132 rtx loc = mo->u.loc;
9133 rtx set_src = NULL;
9135 if (GET_CODE (loc) == SET)
9137 set_src = SET_SRC (loc);
9138 loc = SET_DEST (loc);
9141 if (REG_P (loc))
9142 var_reg_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
9143 set_src);
9144 else
9145 var_mem_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
9146 set_src);
9148 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9149 set->vars);
9151 break;
9153 case MO_COPY:
9155 rtx loc = mo->u.loc;
9156 enum var_init_status src_status;
9157 rtx set_src = NULL;
9159 if (GET_CODE (loc) == SET)
9161 set_src = SET_SRC (loc);
9162 loc = SET_DEST (loc);
9165 src_status = find_src_status (set, set_src);
9166 set_src = find_src_set_src (set, set_src);
9168 if (REG_P (loc))
9169 var_reg_delete_and_set (set, loc, false, src_status, set_src);
9170 else
9171 var_mem_delete_and_set (set, loc, false, src_status, set_src);
9173 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9174 set->vars);
9176 break;
9178 case MO_USE_NO_VAR:
9180 rtx loc = mo->u.loc;
9182 if (REG_P (loc))
9183 var_reg_delete (set, loc, false);
9184 else
9185 var_mem_delete (set, loc, false);
9187 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
9189 break;
9191 case MO_CLOBBER:
9193 rtx loc = mo->u.loc;
9195 if (REG_P (loc))
9196 var_reg_delete (set, loc, true);
9197 else
9198 var_mem_delete (set, loc, true);
9200 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9201 set->vars);
9203 break;
9205 case MO_ADJUST:
9206 set->stack_adjust += mo->u.adjust;
9207 break;
9212 /* Emit notes for the whole function. */
9214 static void
9215 vt_emit_notes (void)
9217 basic_block bb;
9218 dataflow_set cur;
9220 gcc_assert (!htab_elements (changed_variables));
9222 /* Free memory occupied by the out hash tables, as they aren't used
9223 anymore. */
9224 FOR_EACH_BB (bb)
9225 dataflow_set_clear (&VTI (bb)->out);
9227 /* Enable emitting notes by functions (mainly by set_variable_part and
9228 delete_variable_part). */
9229 emit_notes = true;
9231 if (MAY_HAVE_DEBUG_INSNS)
9233 dropped_values = htab_create (cselib_get_next_uid () * 2,
9234 variable_htab_hash, variable_htab_eq,
9235 variable_htab_free);
9236 loc_exp_dep_pool = create_alloc_pool ("loc_exp_dep pool",
9237 sizeof (loc_exp_dep), 64);
9240 dataflow_set_init (&cur);
9242 FOR_EACH_BB (bb)
9244 /* Emit the notes for changes of variable locations between two
9245 subsequent basic blocks. */
9246 emit_notes_for_differences (BB_HEAD (bb), &cur, &VTI (bb)->in);
9248 /* Emit the notes for the changes in the basic block itself. */
9249 emit_notes_in_bb (bb, &cur);
9251 /* Free memory occupied by the in hash table, we won't need it
9252 again. */
9253 dataflow_set_clear (&VTI (bb)->in);
9255 #ifdef ENABLE_CHECKING
9256 htab_traverse (shared_hash_htab (cur.vars),
9257 emit_notes_for_differences_1,
9258 shared_hash_htab (empty_shared_hash));
9259 #endif
9260 dataflow_set_destroy (&cur);
9262 if (MAY_HAVE_DEBUG_INSNS)
9264 free_alloc_pool (loc_exp_dep_pool);
9265 loc_exp_dep_pool = NULL;
9266 htab_delete (dropped_values);
9269 emit_notes = false;
9272 /* If there is a declaration and offset associated with register/memory RTL
9273 assign declaration to *DECLP and offset to *OFFSETP, and return true. */
9275 static bool
9276 vt_get_decl_and_offset (rtx rtl, tree *declp, HOST_WIDE_INT *offsetp)
9278 if (REG_P (rtl))
9280 if (REG_ATTRS (rtl))
9282 *declp = REG_EXPR (rtl);
9283 *offsetp = REG_OFFSET (rtl);
9284 return true;
9287 else if (MEM_P (rtl))
9289 if (MEM_ATTRS (rtl))
9291 *declp = MEM_EXPR (rtl);
9292 *offsetp = INT_MEM_OFFSET (rtl);
9293 return true;
9296 return false;
9299 /* Record the value for the ENTRY_VALUE of RTL as a global equivalence
9300 of VAL. */
9302 static void
9303 record_entry_value (cselib_val *val, rtx rtl)
9305 rtx ev = gen_rtx_ENTRY_VALUE (GET_MODE (rtl));
9307 ENTRY_VALUE_EXP (ev) = rtl;
9309 cselib_add_permanent_equiv (val, ev, get_insns ());
9312 /* Insert function parameter PARM in IN and OUT sets of ENTRY_BLOCK. */
9314 static void
9315 vt_add_function_parameter (tree parm)
9317 rtx decl_rtl = DECL_RTL_IF_SET (parm);
9318 rtx incoming = DECL_INCOMING_RTL (parm);
9319 tree decl;
9320 enum machine_mode mode;
9321 HOST_WIDE_INT offset;
9322 dataflow_set *out;
9323 decl_or_value dv;
9325 if (TREE_CODE (parm) != PARM_DECL)
9326 return;
9328 if (!decl_rtl || !incoming)
9329 return;
9331 if (GET_MODE (decl_rtl) == BLKmode || GET_MODE (incoming) == BLKmode)
9332 return;
9334 /* If there is a DRAP register, rewrite the incoming location of parameters
9335 passed on the stack into MEMs based on the argument pointer, as the DRAP
9336 register can be reused for other purposes and we do not track locations
9337 based on generic registers. But the prerequisite is that this argument
9338 pointer be also the virtual CFA pointer, see vt_initialize. */
9339 if (MEM_P (incoming)
9340 && stack_realign_drap
9341 && arg_pointer_rtx == cfa_base_rtx
9342 && (XEXP (incoming, 0) == crtl->args.internal_arg_pointer
9343 || (GET_CODE (XEXP (incoming, 0)) == PLUS
9344 && XEXP (XEXP (incoming, 0), 0)
9345 == crtl->args.internal_arg_pointer
9346 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
9348 HOST_WIDE_INT off = -FIRST_PARM_OFFSET (current_function_decl);
9349 if (GET_CODE (XEXP (incoming, 0)) == PLUS)
9350 off += INTVAL (XEXP (XEXP (incoming, 0), 1));
9351 incoming
9352 = replace_equiv_address_nv (incoming,
9353 plus_constant (Pmode,
9354 arg_pointer_rtx, off));
9357 #ifdef HAVE_window_save
9358 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
9359 If the target machine has an explicit window save instruction, the
9360 actual entry value is the corresponding OUTGOING_REGNO instead. */
9361 if (REG_P (incoming)
9362 && HARD_REGISTER_P (incoming)
9363 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
9365 parm_reg_t *p
9366 = VEC_safe_push (parm_reg_t, gc, windowed_parm_regs, NULL);
9367 p->incoming = incoming;
9368 incoming
9369 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
9370 OUTGOING_REGNO (REGNO (incoming)), 0);
9371 p->outgoing = incoming;
9373 else if (MEM_P (incoming)
9374 && REG_P (XEXP (incoming, 0))
9375 && HARD_REGISTER_P (XEXP (incoming, 0)))
9377 rtx reg = XEXP (incoming, 0);
9378 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
9380 parm_reg_t *p
9381 = VEC_safe_push (parm_reg_t, gc, windowed_parm_regs, NULL);
9382 p->incoming = reg;
9383 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
9384 p->outgoing = reg;
9385 incoming = replace_equiv_address_nv (incoming, reg);
9388 #endif
9390 if (!vt_get_decl_and_offset (incoming, &decl, &offset))
9392 if (REG_P (incoming) || MEM_P (incoming))
9394 /* This means argument is passed by invisible reference. */
9395 offset = 0;
9396 decl = parm;
9397 incoming = gen_rtx_MEM (GET_MODE (decl_rtl), incoming);
9399 else
9401 if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset))
9402 return;
9403 offset += byte_lowpart_offset (GET_MODE (incoming),
9404 GET_MODE (decl_rtl));
9408 if (!decl)
9409 return;
9411 if (parm != decl)
9413 /* Assume that DECL_RTL was a pseudo that got spilled to
9414 memory. The spill slot sharing code will force the
9415 memory to reference spill_slot_decl (%sfp), so we don't
9416 match above. That's ok, the pseudo must have referenced
9417 the entire parameter, so just reset OFFSET. */
9418 gcc_assert (decl == get_spill_slot_decl (false));
9419 offset = 0;
9422 if (!track_loc_p (incoming, parm, offset, false, &mode, &offset))
9423 return;
9425 out = &VTI (ENTRY_BLOCK_PTR)->out;
9427 dv = dv_from_decl (parm);
9429 if (target_for_debug_bind (parm)
9430 /* We can't deal with these right now, because this kind of
9431 variable is single-part. ??? We could handle parallels
9432 that describe multiple locations for the same single
9433 value, but ATM we don't. */
9434 && GET_CODE (incoming) != PARALLEL)
9436 cselib_val *val;
9438 /* ??? We shouldn't ever hit this, but it may happen because
9439 arguments passed by invisible reference aren't dealt with
9440 above: incoming-rtl will have Pmode rather than the
9441 expected mode for the type. */
9442 if (offset)
9443 return;
9445 val = cselib_lookup_from_insn (var_lowpart (mode, incoming), mode, true,
9446 VOIDmode, get_insns ());
9448 /* ??? Float-typed values in memory are not handled by
9449 cselib. */
9450 if (val)
9452 preserve_value (val);
9453 set_variable_part (out, val->val_rtx, dv, offset,
9454 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9455 dv = dv_from_value (val->val_rtx);
9459 if (REG_P (incoming))
9461 incoming = var_lowpart (mode, incoming);
9462 gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER);
9463 attrs_list_insert (&out->regs[REGNO (incoming)], dv, offset,
9464 incoming);
9465 set_variable_part (out, incoming, dv, offset,
9466 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9467 if (dv_is_value_p (dv))
9469 record_entry_value (CSELIB_VAL_PTR (dv_as_value (dv)), incoming);
9470 if (TREE_CODE (TREE_TYPE (parm)) == REFERENCE_TYPE
9471 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (parm))))
9473 enum machine_mode indmode
9474 = TYPE_MODE (TREE_TYPE (TREE_TYPE (parm)));
9475 rtx mem = gen_rtx_MEM (indmode, incoming);
9476 cselib_val *val = cselib_lookup_from_insn (mem, indmode, true,
9477 VOIDmode,
9478 get_insns ());
9479 if (val)
9481 preserve_value (val);
9482 record_entry_value (val, mem);
9483 set_variable_part (out, mem, dv_from_value (val->val_rtx), 0,
9484 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9489 else if (MEM_P (incoming))
9491 incoming = var_lowpart (mode, incoming);
9492 set_variable_part (out, incoming, dv, offset,
9493 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9497 /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
9499 static void
9500 vt_add_function_parameters (void)
9502 tree parm;
9504 for (parm = DECL_ARGUMENTS (current_function_decl);
9505 parm; parm = DECL_CHAIN (parm))
9506 vt_add_function_parameter (parm);
9508 if (DECL_HAS_VALUE_EXPR_P (DECL_RESULT (current_function_decl)))
9510 tree vexpr = DECL_VALUE_EXPR (DECL_RESULT (current_function_decl));
9512 if (TREE_CODE (vexpr) == INDIRECT_REF)
9513 vexpr = TREE_OPERAND (vexpr, 0);
9515 if (TREE_CODE (vexpr) == PARM_DECL
9516 && DECL_ARTIFICIAL (vexpr)
9517 && !DECL_IGNORED_P (vexpr)
9518 && DECL_NAMELESS (vexpr))
9519 vt_add_function_parameter (vexpr);
9523 /* Return true if INSN in the prologue initializes hard_frame_pointer_rtx. */
9525 static bool
9526 fp_setter (rtx insn)
9528 rtx pat = PATTERN (insn);
9529 if (RTX_FRAME_RELATED_P (insn))
9531 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
9532 if (expr)
9533 pat = XEXP (expr, 0);
9535 if (GET_CODE (pat) == SET)
9536 return SET_DEST (pat) == hard_frame_pointer_rtx;
9537 else if (GET_CODE (pat) == PARALLEL)
9539 int i;
9540 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
9541 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
9542 && SET_DEST (XVECEXP (pat, 0, i)) == hard_frame_pointer_rtx)
9543 return true;
9545 return false;
9548 /* Initialize cfa_base_rtx, create a preserved VALUE for it and
9549 ensure it isn't flushed during cselib_reset_table.
9550 Can be called only if frame_pointer_rtx resp. arg_pointer_rtx
9551 has been eliminated. */
9553 static void
9554 vt_init_cfa_base (void)
9556 cselib_val *val;
9558 #ifdef FRAME_POINTER_CFA_OFFSET
9559 cfa_base_rtx = frame_pointer_rtx;
9560 cfa_base_offset = -FRAME_POINTER_CFA_OFFSET (current_function_decl);
9561 #else
9562 cfa_base_rtx = arg_pointer_rtx;
9563 cfa_base_offset = -ARG_POINTER_CFA_OFFSET (current_function_decl);
9564 #endif
9565 if (cfa_base_rtx == hard_frame_pointer_rtx
9566 || !fixed_regs[REGNO (cfa_base_rtx)])
9568 cfa_base_rtx = NULL_RTX;
9569 return;
9571 if (!MAY_HAVE_DEBUG_INSNS)
9572 return;
9574 /* Tell alias analysis that cfa_base_rtx should share
9575 find_base_term value with stack pointer or hard frame pointer. */
9576 if (!frame_pointer_needed)
9577 vt_equate_reg_base_value (cfa_base_rtx, stack_pointer_rtx);
9578 else if (!crtl->stack_realign_tried)
9579 vt_equate_reg_base_value (cfa_base_rtx, hard_frame_pointer_rtx);
9581 val = cselib_lookup_from_insn (cfa_base_rtx, GET_MODE (cfa_base_rtx), 1,
9582 VOIDmode, get_insns ());
9583 preserve_value (val);
9584 cselib_preserve_cfa_base_value (val, REGNO (cfa_base_rtx));
9587 /* Allocate and initialize the data structures for variable tracking
9588 and parse the RTL to get the micro operations. */
9590 static bool
9591 vt_initialize (void)
9593 basic_block bb, prologue_bb = single_succ (ENTRY_BLOCK_PTR);
9594 HOST_WIDE_INT fp_cfa_offset = -1;
9596 alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def));
9598 attrs_pool = create_alloc_pool ("attrs_def pool",
9599 sizeof (struct attrs_def), 1024);
9600 var_pool = create_alloc_pool ("variable_def pool",
9601 sizeof (struct variable_def)
9602 + (MAX_VAR_PARTS - 1)
9603 * sizeof (((variable)NULL)->var_part[0]), 64);
9604 loc_chain_pool = create_alloc_pool ("location_chain_def pool",
9605 sizeof (struct location_chain_def),
9606 1024);
9607 shared_hash_pool = create_alloc_pool ("shared_hash_def pool",
9608 sizeof (struct shared_hash_def), 256);
9609 empty_shared_hash = (shared_hash) pool_alloc (shared_hash_pool);
9610 empty_shared_hash->refcount = 1;
9611 empty_shared_hash->htab
9612 = htab_create (1, variable_htab_hash, variable_htab_eq,
9613 variable_htab_free);
9614 changed_variables = htab_create (10, variable_htab_hash, variable_htab_eq,
9615 variable_htab_free);
9617 /* Init the IN and OUT sets. */
9618 FOR_ALL_BB (bb)
9620 VTI (bb)->visited = false;
9621 VTI (bb)->flooded = false;
9622 dataflow_set_init (&VTI (bb)->in);
9623 dataflow_set_init (&VTI (bb)->out);
9624 VTI (bb)->permp = NULL;
9627 if (MAY_HAVE_DEBUG_INSNS)
9629 cselib_init (CSELIB_RECORD_MEMORY | CSELIB_PRESERVE_CONSTANTS);
9630 scratch_regs = BITMAP_ALLOC (NULL);
9631 valvar_pool = create_alloc_pool ("small variable_def pool",
9632 sizeof (struct variable_def), 256);
9633 preserved_values = VEC_alloc (rtx, heap, 256);
9635 else
9637 scratch_regs = NULL;
9638 valvar_pool = NULL;
9641 if (MAY_HAVE_DEBUG_INSNS)
9643 rtx reg, expr;
9644 int ofst;
9645 cselib_val *val;
9647 #ifdef FRAME_POINTER_CFA_OFFSET
9648 reg = frame_pointer_rtx;
9649 ofst = FRAME_POINTER_CFA_OFFSET (current_function_decl);
9650 #else
9651 reg = arg_pointer_rtx;
9652 ofst = ARG_POINTER_CFA_OFFSET (current_function_decl);
9653 #endif
9655 ofst -= INCOMING_FRAME_SP_OFFSET;
9657 val = cselib_lookup_from_insn (reg, GET_MODE (reg), 1,
9658 VOIDmode, get_insns ());
9659 preserve_value (val);
9660 cselib_preserve_cfa_base_value (val, REGNO (reg));
9661 expr = plus_constant (GET_MODE (stack_pointer_rtx),
9662 stack_pointer_rtx, -ofst);
9663 cselib_add_permanent_equiv (val, expr, get_insns ());
9665 if (ofst)
9667 val = cselib_lookup_from_insn (stack_pointer_rtx,
9668 GET_MODE (stack_pointer_rtx), 1,
9669 VOIDmode, get_insns ());
9670 preserve_value (val);
9671 expr = plus_constant (GET_MODE (reg), reg, ofst);
9672 cselib_add_permanent_equiv (val, expr, get_insns ());
9676 /* In order to factor out the adjustments made to the stack pointer or to
9677 the hard frame pointer and thus be able to use DW_OP_fbreg operations
9678 instead of individual location lists, we're going to rewrite MEMs based
9679 on them into MEMs based on the CFA by de-eliminating stack_pointer_rtx
9680 or hard_frame_pointer_rtx to the virtual CFA pointer frame_pointer_rtx
9681 resp. arg_pointer_rtx. We can do this either when there is no frame
9682 pointer in the function and stack adjustments are consistent for all
9683 basic blocks or when there is a frame pointer and no stack realignment.
9684 But we first have to check that frame_pointer_rtx resp. arg_pointer_rtx
9685 has been eliminated. */
9686 if (!frame_pointer_needed)
9688 rtx reg, elim;
9690 if (!vt_stack_adjustments ())
9691 return false;
9693 #ifdef FRAME_POINTER_CFA_OFFSET
9694 reg = frame_pointer_rtx;
9695 #else
9696 reg = arg_pointer_rtx;
9697 #endif
9698 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
9699 if (elim != reg)
9701 if (GET_CODE (elim) == PLUS)
9702 elim = XEXP (elim, 0);
9703 if (elim == stack_pointer_rtx)
9704 vt_init_cfa_base ();
9707 else if (!crtl->stack_realign_tried)
9709 rtx reg, elim;
9711 #ifdef FRAME_POINTER_CFA_OFFSET
9712 reg = frame_pointer_rtx;
9713 fp_cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
9714 #else
9715 reg = arg_pointer_rtx;
9716 fp_cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
9717 #endif
9718 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
9719 if (elim != reg)
9721 if (GET_CODE (elim) == PLUS)
9723 fp_cfa_offset -= INTVAL (XEXP (elim, 1));
9724 elim = XEXP (elim, 0);
9726 if (elim != hard_frame_pointer_rtx)
9727 fp_cfa_offset = -1;
9729 else
9730 fp_cfa_offset = -1;
9733 /* If the stack is realigned and a DRAP register is used, we're going to
9734 rewrite MEMs based on it representing incoming locations of parameters
9735 passed on the stack into MEMs based on the argument pointer. Although
9736 we aren't going to rewrite other MEMs, we still need to initialize the
9737 virtual CFA pointer in order to ensure that the argument pointer will
9738 be seen as a constant throughout the function.
9740 ??? This doesn't work if FRAME_POINTER_CFA_OFFSET is defined. */
9741 else if (stack_realign_drap)
9743 rtx reg, elim;
9745 #ifdef FRAME_POINTER_CFA_OFFSET
9746 reg = frame_pointer_rtx;
9747 #else
9748 reg = arg_pointer_rtx;
9749 #endif
9750 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
9751 if (elim != reg)
9753 if (GET_CODE (elim) == PLUS)
9754 elim = XEXP (elim, 0);
9755 if (elim == hard_frame_pointer_rtx)
9756 vt_init_cfa_base ();
9760 hard_frame_pointer_adjustment = -1;
9762 vt_add_function_parameters ();
9764 FOR_EACH_BB (bb)
9766 rtx insn;
9767 HOST_WIDE_INT pre, post = 0;
9768 basic_block first_bb, last_bb;
9770 if (MAY_HAVE_DEBUG_INSNS)
9772 cselib_record_sets_hook = add_with_sets;
9773 if (dump_file && (dump_flags & TDF_DETAILS))
9774 fprintf (dump_file, "first value: %i\n",
9775 cselib_get_next_uid ());
9778 first_bb = bb;
9779 for (;;)
9781 edge e;
9782 if (bb->next_bb == EXIT_BLOCK_PTR
9783 || ! single_pred_p (bb->next_bb))
9784 break;
9785 e = find_edge (bb, bb->next_bb);
9786 if (! e || (e->flags & EDGE_FALLTHRU) == 0)
9787 break;
9788 bb = bb->next_bb;
9790 last_bb = bb;
9792 /* Add the micro-operations to the vector. */
9793 FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb)
9795 HOST_WIDE_INT offset = VTI (bb)->out.stack_adjust;
9796 VTI (bb)->out.stack_adjust = VTI (bb)->in.stack_adjust;
9797 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
9798 insn = NEXT_INSN (insn))
9800 if (INSN_P (insn))
9802 if (!frame_pointer_needed)
9804 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
9805 if (pre)
9807 micro_operation mo;
9808 mo.type = MO_ADJUST;
9809 mo.u.adjust = pre;
9810 mo.insn = insn;
9811 if (dump_file && (dump_flags & TDF_DETAILS))
9812 log_op_type (PATTERN (insn), bb, insn,
9813 MO_ADJUST, dump_file);
9814 VEC_safe_push (micro_operation, heap, VTI (bb)->mos,
9815 &mo);
9816 VTI (bb)->out.stack_adjust += pre;
9820 cselib_hook_called = false;
9821 adjust_insn (bb, insn);
9822 if (MAY_HAVE_DEBUG_INSNS)
9824 if (CALL_P (insn))
9825 prepare_call_arguments (bb, insn);
9826 cselib_process_insn (insn);
9827 if (dump_file && (dump_flags & TDF_DETAILS))
9829 print_rtl_single (dump_file, insn);
9830 dump_cselib_table (dump_file);
9833 if (!cselib_hook_called)
9834 add_with_sets (insn, 0, 0);
9835 cancel_changes (0);
9837 if (!frame_pointer_needed && post)
9839 micro_operation mo;
9840 mo.type = MO_ADJUST;
9841 mo.u.adjust = post;
9842 mo.insn = insn;
9843 if (dump_file && (dump_flags & TDF_DETAILS))
9844 log_op_type (PATTERN (insn), bb, insn,
9845 MO_ADJUST, dump_file);
9846 VEC_safe_push (micro_operation, heap, VTI (bb)->mos,
9847 &mo);
9848 VTI (bb)->out.stack_adjust += post;
9851 if (bb == prologue_bb
9852 && fp_cfa_offset != -1
9853 && hard_frame_pointer_adjustment == -1
9854 && RTX_FRAME_RELATED_P (insn)
9855 && fp_setter (insn))
9857 vt_init_cfa_base ();
9858 hard_frame_pointer_adjustment = fp_cfa_offset;
9862 gcc_assert (offset == VTI (bb)->out.stack_adjust);
9865 bb = last_bb;
9867 if (MAY_HAVE_DEBUG_INSNS)
9869 cselib_preserve_only_values ();
9870 cselib_reset_table (cselib_get_next_uid ());
9871 cselib_record_sets_hook = NULL;
9875 hard_frame_pointer_adjustment = -1;
9876 VTI (ENTRY_BLOCK_PTR)->flooded = true;
9877 cfa_base_rtx = NULL_RTX;
9878 return true;
9881 /* This is *not* reset after each function. It gives each
9882 NOTE_INSN_DELETED_DEBUG_LABEL in the entire compilation
9883 a unique label number. */
9885 static int debug_label_num = 1;
9887 /* Get rid of all debug insns from the insn stream. */
9889 static void
9890 delete_debug_insns (void)
9892 basic_block bb;
9893 rtx insn, next;
9895 if (!MAY_HAVE_DEBUG_INSNS)
9896 return;
9898 FOR_EACH_BB (bb)
9900 FOR_BB_INSNS_SAFE (bb, insn, next)
9901 if (DEBUG_INSN_P (insn))
9903 tree decl = INSN_VAR_LOCATION_DECL (insn);
9904 if (TREE_CODE (decl) == LABEL_DECL
9905 && DECL_NAME (decl)
9906 && !DECL_RTL_SET_P (decl))
9908 PUT_CODE (insn, NOTE);
9909 NOTE_KIND (insn) = NOTE_INSN_DELETED_DEBUG_LABEL;
9910 NOTE_DELETED_LABEL_NAME (insn)
9911 = IDENTIFIER_POINTER (DECL_NAME (decl));
9912 SET_DECL_RTL (decl, insn);
9913 CODE_LABEL_NUMBER (insn) = debug_label_num++;
9915 else
9916 delete_insn (insn);
9921 /* Run a fast, BB-local only version of var tracking, to take care of
9922 information that we don't do global analysis on, such that not all
9923 information is lost. If SKIPPED holds, we're skipping the global
9924 pass entirely, so we should try to use information it would have
9925 handled as well.. */
9927 static void
9928 vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED)
9930 /* ??? Just skip it all for now. */
9931 delete_debug_insns ();
9934 /* Free the data structures needed for variable tracking. */
9936 static void
9937 vt_finalize (void)
9939 basic_block bb;
9941 FOR_EACH_BB (bb)
9943 VEC_free (micro_operation, heap, VTI (bb)->mos);
9946 FOR_ALL_BB (bb)
9948 dataflow_set_destroy (&VTI (bb)->in);
9949 dataflow_set_destroy (&VTI (bb)->out);
9950 if (VTI (bb)->permp)
9952 dataflow_set_destroy (VTI (bb)->permp);
9953 XDELETE (VTI (bb)->permp);
9956 free_aux_for_blocks ();
9957 htab_delete (empty_shared_hash->htab);
9958 htab_delete (changed_variables);
9959 free_alloc_pool (attrs_pool);
9960 free_alloc_pool (var_pool);
9961 free_alloc_pool (loc_chain_pool);
9962 free_alloc_pool (shared_hash_pool);
9964 if (MAY_HAVE_DEBUG_INSNS)
9966 free_alloc_pool (valvar_pool);
9967 VEC_free (rtx, heap, preserved_values);
9968 cselib_finish ();
9969 BITMAP_FREE (scratch_regs);
9970 scratch_regs = NULL;
9973 #ifdef HAVE_window_save
9974 VEC_free (parm_reg_t, gc, windowed_parm_regs);
9975 #endif
9977 if (vui_vec)
9978 XDELETEVEC (vui_vec);
9979 vui_vec = NULL;
9980 vui_allocated = 0;
9983 /* The entry point to variable tracking pass. */
9985 static inline unsigned int
9986 variable_tracking_main_1 (void)
9988 bool success;
9990 if (flag_var_tracking_assignments < 0)
9992 delete_debug_insns ();
9993 return 0;
9996 if (n_basic_blocks > 500 && n_edges / n_basic_blocks >= 20)
9998 vt_debug_insns_local (true);
9999 return 0;
10002 mark_dfs_back_edges ();
10003 if (!vt_initialize ())
10005 vt_finalize ();
10006 vt_debug_insns_local (true);
10007 return 0;
10010 success = vt_find_locations ();
10012 if (!success && flag_var_tracking_assignments > 0)
10014 vt_finalize ();
10016 delete_debug_insns ();
10018 /* This is later restored by our caller. */
10019 flag_var_tracking_assignments = 0;
10021 success = vt_initialize ();
10022 gcc_assert (success);
10024 success = vt_find_locations ();
10027 if (!success)
10029 vt_finalize ();
10030 vt_debug_insns_local (false);
10031 return 0;
10034 if (dump_file && (dump_flags & TDF_DETAILS))
10036 dump_dataflow_sets ();
10037 dump_flow_info (dump_file, dump_flags);
10040 timevar_push (TV_VAR_TRACKING_EMIT);
10041 vt_emit_notes ();
10042 timevar_pop (TV_VAR_TRACKING_EMIT);
10044 vt_finalize ();
10045 vt_debug_insns_local (false);
10046 return 0;
10049 unsigned int
10050 variable_tracking_main (void)
10052 unsigned int ret;
10053 int save = flag_var_tracking_assignments;
10055 ret = variable_tracking_main_1 ();
10057 flag_var_tracking_assignments = save;
10059 return ret;
10062 static bool
10063 gate_handle_var_tracking (void)
10065 return (flag_var_tracking && !targetm.delay_vartrack);
10070 struct rtl_opt_pass pass_variable_tracking =
10073 RTL_PASS,
10074 "vartrack", /* name */
10075 gate_handle_var_tracking, /* gate */
10076 variable_tracking_main, /* execute */
10077 NULL, /* sub */
10078 NULL, /* next */
10079 0, /* static_pass_number */
10080 TV_VAR_TRACKING, /* tv_id */
10081 0, /* properties_required */
10082 0, /* properties_provided */
10083 0, /* properties_destroyed */
10084 0, /* todo_flags_start */
10085 TODO_verify_rtl_sharing /* todo_flags_finish */