fixes for darwin. Remove remnant references to vec_s.
[official-gcc.git] / gcc / var-tracking.c
blobe95cf87a75232684a3faa9a03d1f657d829d7f01
1 /* Variable tracking routines for the GNU compiler.
2 Copyright (C) 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This file contains the variable tracking pass. It computes where
22 variables are located (which registers or where in memory) at each position
23 in instruction stream and emits notes describing the locations.
24 Debug information (DWARF2 location lists) is finally generated from
25 these notes.
26 With this debug information, it is possible to show variables
27 even when debugging optimized code.
29 How does the variable tracking pass work?
31 First, it scans RTL code for uses, stores and clobbers (register/memory
32 references in instructions), for call insns and for stack adjustments
33 separately for each basic block and saves them to an array of micro
34 operations.
35 The micro operations of one instruction are ordered so that
36 pre-modifying stack adjustment < use < use with no var < call insn <
37 < clobber < set < post-modifying stack adjustment
39 Then, a forward dataflow analysis is performed to find out how locations
40 of variables change through code and to propagate the variable locations
41 along control flow graph.
42 The IN set for basic block BB is computed as a union of OUT sets of BB's
43 predecessors, the OUT set for BB is copied from the IN set for BB and
44 is changed according to micro operations in BB.
46 The IN and OUT sets for basic blocks consist of a current stack adjustment
47 (used for adjusting offset of variables addressed using stack pointer),
48 the table of structures describing the locations of parts of a variable
49 and for each physical register a linked list for each physical register.
50 The linked list is a list of variable parts stored in the register,
51 i.e. it is a list of triplets (reg, decl, offset) where decl is
52 REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
53 effective deleting appropriate variable parts when we set or clobber the
54 register.
56 There may be more than one variable part in a register. The linked lists
57 should be pretty short so it is a good data structure here.
58 For example in the following code, register allocator may assign same
59 register to variables A and B, and both of them are stored in the same
60 register in CODE:
62 if (cond)
63 set A;
64 else
65 set B;
66 CODE;
67 if (cond)
68 use A;
69 else
70 use B;
72 Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
73 are emitted to appropriate positions in RTL code. Each such a note describes
74 the location of one variable at the point in instruction stream where the
75 note is. There is no need to emit a note for each variable before each
76 instruction, we only emit these notes where the location of variable changes
77 (this means that we also emit notes for changes between the OUT set of the
78 previous block and the IN set of the current block).
80 The notes consist of two parts:
81 1. the declaration (from REG_EXPR or MEM_EXPR)
82 2. the location of a variable - it is either a simple register/memory
83 reference (for simple variables, for example int),
84 or a parallel of register/memory references (for a large variables
85 which consist of several parts, for example long long).
89 #include "config.h"
90 #include "system.h"
91 #include "coretypes.h"
92 #include "tm.h"
93 #include "rtl.h"
94 #include "tree.h"
95 #include "tm_p.h"
96 #include "hard-reg-set.h"
97 #include "basic-block.h"
98 #include "flags.h"
99 #include "insn-config.h"
100 #include "reload.h"
101 #include "sbitmap.h"
102 #include "alloc-pool.h"
103 #include "fibheap.h"
104 #include "hashtab.h"
105 #include "regs.h"
106 #include "expr.h"
107 #include "tree-pass.h"
108 #include "tree-flow.h"
109 #include "cselib.h"
110 #include "target.h"
111 #include "params.h"
112 #include "diagnostic.h"
113 #include "tree-pretty-print.h"
114 #include "pointer-set.h"
115 #include "recog.h"
116 #include "tm_p.h"
117 #include "alias.h"
119 /* var-tracking.c assumes that tree code with the same value as VALUE rtx code
120 has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
121 Currently the value is the same as IDENTIFIER_NODE, which has such
122 a property. If this compile time assertion ever fails, make sure that
123 the new tree code that equals (int) VALUE has the same property. */
124 extern char check_value_val[(int) VALUE == (int) IDENTIFIER_NODE ? 1 : -1];
126 /* Type of micro operation. */
127 enum micro_operation_type
129 MO_USE, /* Use location (REG or MEM). */
130 MO_USE_NO_VAR,/* Use location which is not associated with a variable
131 or the variable is not trackable. */
132 MO_VAL_USE, /* Use location which is associated with a value. */
133 MO_VAL_LOC, /* Use location which appears in a debug insn. */
134 MO_VAL_SET, /* Set location associated with a value. */
135 MO_SET, /* Set location. */
136 MO_COPY, /* Copy the same portion of a variable from one
137 location to another. */
138 MO_CLOBBER, /* Clobber location. */
139 MO_CALL, /* Call insn. */
140 MO_ADJUST /* Adjust stack pointer. */
144 static const char * const ATTRIBUTE_UNUSED
145 micro_operation_type_name[] = {
146 "MO_USE",
147 "MO_USE_NO_VAR",
148 "MO_VAL_USE",
149 "MO_VAL_LOC",
150 "MO_VAL_SET",
151 "MO_SET",
152 "MO_COPY",
153 "MO_CLOBBER",
154 "MO_CALL",
155 "MO_ADJUST"
158 /* Where shall the note be emitted? BEFORE or AFTER the instruction.
159 Notes emitted as AFTER_CALL are to take effect during the call,
160 rather than after the call. */
161 enum emit_note_where
163 EMIT_NOTE_BEFORE_INSN,
164 EMIT_NOTE_AFTER_INSN,
165 EMIT_NOTE_AFTER_CALL_INSN
168 /* Structure holding information about micro operation. */
169 typedef struct micro_operation_def
171 /* Type of micro operation. */
172 enum micro_operation_type type;
174 /* The instruction which the micro operation is in, for MO_USE,
175 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
176 instruction or note in the original flow (before any var-tracking
177 notes are inserted, to simplify emission of notes), for MO_SET
178 and MO_CLOBBER. */
179 rtx insn;
181 union {
182 /* Location. For MO_SET and MO_COPY, this is the SET that
183 performs the assignment, if known, otherwise it is the target
184 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
185 CONCAT of the VALUE and the LOC associated with it. For
186 MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
187 associated with it. */
188 rtx loc;
190 /* Stack adjustment. */
191 HOST_WIDE_INT adjust;
192 } u;
193 } micro_operation;
196 /* A declaration of a variable, or an RTL value being handled like a
197 declaration. */
198 typedef void *decl_or_value;
200 /* Structure for passing some other parameters to function
201 emit_note_insn_var_location. */
202 typedef struct emit_note_data_def
204 /* The instruction which the note will be emitted before/after. */
205 rtx insn;
207 /* Where the note will be emitted (before/after insn)? */
208 enum emit_note_where where;
210 /* The variables and values active at this point. */
211 htab_t vars;
212 } emit_note_data;
214 /* Description of location of a part of a variable. The content of a physical
215 register is described by a chain of these structures.
216 The chains are pretty short (usually 1 or 2 elements) and thus
217 chain is the best data structure. */
218 typedef struct attrs_def
220 /* Pointer to next member of the list. */
221 struct attrs_def *next;
223 /* The rtx of register. */
224 rtx loc;
226 /* The declaration corresponding to LOC. */
227 decl_or_value dv;
229 /* Offset from start of DECL. */
230 HOST_WIDE_INT offset;
231 } *attrs;
233 /* Structure holding a refcounted hash table. If refcount > 1,
234 it must be first unshared before modified. */
235 typedef struct shared_hash_def
237 /* Reference count. */
238 int refcount;
240 /* Actual hash table. */
241 htab_t htab;
242 } *shared_hash;
244 /* Structure holding the IN or OUT set for a basic block. */
245 typedef struct dataflow_set_def
247 /* Adjustment of stack offset. */
248 HOST_WIDE_INT stack_adjust;
250 /* Attributes for registers (lists of attrs). */
251 attrs regs[FIRST_PSEUDO_REGISTER];
253 /* Variable locations. */
254 shared_hash vars;
256 /* Vars that is being traversed. */
257 shared_hash traversed_vars;
258 } dataflow_set;
260 /* The structure (one for each basic block) containing the information
261 needed for variable tracking. */
262 typedef struct variable_tracking_info_def
264 /* The vector of micro operations. */
265 vec<micro_operation> mos;
267 /* The IN and OUT set for dataflow analysis. */
268 dataflow_set in;
269 dataflow_set out;
271 /* The permanent-in dataflow set for this block. This is used to
272 hold values for which we had to compute entry values. ??? This
273 should probably be dynamically allocated, to avoid using more
274 memory in non-debug builds. */
275 dataflow_set *permp;
277 /* Has the block been visited in DFS? */
278 bool visited;
280 /* Has the block been flooded in VTA? */
281 bool flooded;
283 } *variable_tracking_info;
285 /* Structure for chaining the locations. */
286 typedef struct location_chain_def
288 /* Next element in the chain. */
289 struct location_chain_def *next;
291 /* The location (REG, MEM or VALUE). */
292 rtx loc;
294 /* The "value" stored in this location. */
295 rtx set_src;
297 /* Initialized? */
298 enum var_init_status init;
299 } *location_chain;
301 /* A vector of loc_exp_dep holds the active dependencies of a one-part
302 DV on VALUEs, i.e., the VALUEs expanded so as to form the current
303 location of DV. Each entry is also part of VALUE' s linked-list of
304 backlinks back to DV. */
305 typedef struct loc_exp_dep_s
307 /* The dependent DV. */
308 decl_or_value dv;
309 /* The dependency VALUE or DECL_DEBUG. */
310 rtx value;
311 /* The next entry in VALUE's backlinks list. */
312 struct loc_exp_dep_s *next;
313 /* A pointer to the pointer to this entry (head or prev's next) in
314 the doubly-linked list. */
315 struct loc_exp_dep_s **pprev;
316 } loc_exp_dep;
319 /* This data structure holds information about the depth of a variable
320 expansion. */
321 typedef struct expand_depth_struct
323 /* This measures the complexity of the expanded expression. It
324 grows by one for each level of expansion that adds more than one
325 operand. */
326 int complexity;
327 /* This counts the number of ENTRY_VALUE expressions in an
328 expansion. We want to minimize their use. */
329 int entryvals;
330 } expand_depth;
332 /* This data structure is allocated for one-part variables at the time
333 of emitting notes. */
334 struct onepart_aux
336 /* Doubly-linked list of dependent DVs. These are DVs whose cur_loc
337 computation used the expansion of this variable, and that ought
338 to be notified should this variable change. If the DV's cur_loc
339 expanded to NULL, all components of the loc list are regarded as
340 active, so that any changes in them give us a chance to get a
341 location. Otherwise, only components of the loc that expanded to
342 non-NULL are regarded as active dependencies. */
343 loc_exp_dep *backlinks;
344 /* This holds the LOC that was expanded into cur_loc. We need only
345 mark a one-part variable as changed if the FROM loc is removed,
346 or if it has no known location and a loc is added, or if it gets
347 a change notification from any of its active dependencies. */
348 rtx from;
349 /* The depth of the cur_loc expression. */
350 expand_depth depth;
351 /* Dependencies actively used when expand FROM into cur_loc. */
352 vec<loc_exp_dep, va_heap, vl_embed> deps;
355 /* Structure describing one part of variable. */
356 typedef struct variable_part_def
358 /* Chain of locations of the part. */
359 location_chain loc_chain;
361 /* Location which was last emitted to location list. */
362 rtx cur_loc;
364 union variable_aux
366 /* The offset in the variable, if !var->onepart. */
367 HOST_WIDE_INT offset;
369 /* Pointer to auxiliary data, if var->onepart and emit_notes. */
370 struct onepart_aux *onepaux;
371 } aux;
372 } variable_part;
374 /* Maximum number of location parts. */
375 #define MAX_VAR_PARTS 16
377 /* Enumeration type used to discriminate various types of one-part
378 variables. */
379 typedef enum onepart_enum
381 /* Not a one-part variable. */
382 NOT_ONEPART = 0,
383 /* A one-part DECL that is not a DEBUG_EXPR_DECL. */
384 ONEPART_VDECL = 1,
385 /* A DEBUG_EXPR_DECL. */
386 ONEPART_DEXPR = 2,
387 /* A VALUE. */
388 ONEPART_VALUE = 3
389 } onepart_enum_t;
391 /* Structure describing where the variable is located. */
392 typedef struct variable_def
394 /* The declaration of the variable, or an RTL value being handled
395 like a declaration. */
396 decl_or_value dv;
398 /* Reference count. */
399 int refcount;
401 /* Number of variable parts. */
402 char n_var_parts;
404 /* What type of DV this is, according to enum onepart_enum. */
405 ENUM_BITFIELD (onepart_enum) onepart : CHAR_BIT;
407 /* True if this variable_def struct is currently in the
408 changed_variables hash table. */
409 bool in_changed_variables;
411 /* The variable parts. */
412 variable_part var_part[1];
413 } *variable;
414 typedef const struct variable_def *const_variable;
416 /* Pointer to the BB's information specific to variable tracking pass. */
417 #define VTI(BB) ((variable_tracking_info) (BB)->aux)
419 /* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */
420 #define INT_MEM_OFFSET(mem) (MEM_OFFSET_KNOWN_P (mem) ? MEM_OFFSET (mem) : 0)
422 #if ENABLE_CHECKING && (GCC_VERSION >= 2007)
424 /* Access VAR's Ith part's offset, checking that it's not a one-part
425 variable. */
426 #define VAR_PART_OFFSET(var, i) __extension__ \
427 (*({ variable const __v = (var); \
428 gcc_checking_assert (!__v->onepart); \
429 &__v->var_part[(i)].aux.offset; }))
431 /* Access VAR's one-part auxiliary data, checking that it is a
432 one-part variable. */
433 #define VAR_LOC_1PAUX(var) __extension__ \
434 (*({ variable const __v = (var); \
435 gcc_checking_assert (__v->onepart); \
436 &__v->var_part[0].aux.onepaux; }))
438 #else
439 #define VAR_PART_OFFSET(var, i) ((var)->var_part[(i)].aux.offset)
440 #define VAR_LOC_1PAUX(var) ((var)->var_part[0].aux.onepaux)
441 #endif
443 /* These are accessor macros for the one-part auxiliary data. When
444 convenient for users, they're guarded by tests that the data was
445 allocated. */
446 #define VAR_LOC_DEP_LST(var) (VAR_LOC_1PAUX (var) \
447 ? VAR_LOC_1PAUX (var)->backlinks \
448 : NULL)
449 #define VAR_LOC_DEP_LSTP(var) (VAR_LOC_1PAUX (var) \
450 ? &VAR_LOC_1PAUX (var)->backlinks \
451 : NULL)
452 #define VAR_LOC_FROM(var) (VAR_LOC_1PAUX (var)->from)
453 #define VAR_LOC_DEPTH(var) (VAR_LOC_1PAUX (var)->depth)
454 #define VAR_LOC_DEP_VEC(var) (VAR_LOC_1PAUX (var) \
455 ? &VAR_LOC_1PAUX (var)->deps \
456 : NULL)
458 /* Alloc pool for struct attrs_def. */
459 static alloc_pool attrs_pool;
461 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
462 static alloc_pool var_pool;
464 /* Alloc pool for struct variable_def with a single var_part entry. */
465 static alloc_pool valvar_pool;
467 /* Alloc pool for struct location_chain_def. */
468 static alloc_pool loc_chain_pool;
470 /* Alloc pool for struct shared_hash_def. */
471 static alloc_pool shared_hash_pool;
473 /* Alloc pool for struct loc_exp_dep_s for NOT_ONEPART variables. */
474 static alloc_pool loc_exp_dep_pool;
476 /* Changed variables, notes will be emitted for them. */
477 static htab_t changed_variables;
479 /* Shall notes be emitted? */
480 static bool emit_notes;
482 /* Values whose dynamic location lists have gone empty, but whose
483 cselib location lists are still usable. Use this to hold the
484 current location, the backlinks, etc, during emit_notes. */
485 static htab_t dropped_values;
487 /* Empty shared hashtable. */
488 static shared_hash empty_shared_hash;
490 /* Scratch register bitmap used by cselib_expand_value_rtx. */
491 static bitmap scratch_regs = NULL;
493 #ifdef HAVE_window_save
494 typedef struct GTY(()) parm_reg {
495 rtx outgoing;
496 rtx incoming;
497 } parm_reg_t;
500 /* Vector of windowed parameter registers, if any. */
501 static vec<parm_reg_t, va_gc> *windowed_parm_regs = NULL;
502 #endif
504 /* Variable used to tell whether cselib_process_insn called our hook. */
505 static bool cselib_hook_called;
507 /* Local function prototypes. */
508 static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
509 HOST_WIDE_INT *);
510 static void insn_stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
511 HOST_WIDE_INT *);
512 static bool vt_stack_adjustments (void);
513 static hashval_t variable_htab_hash (const void *);
514 static int variable_htab_eq (const void *, const void *);
515 static void variable_htab_free (void *);
517 static void init_attrs_list_set (attrs *);
518 static void attrs_list_clear (attrs *);
519 static attrs attrs_list_member (attrs, decl_or_value, HOST_WIDE_INT);
520 static void attrs_list_insert (attrs *, decl_or_value, HOST_WIDE_INT, rtx);
521 static void attrs_list_copy (attrs *, attrs);
522 static void attrs_list_union (attrs *, attrs);
524 static void **unshare_variable (dataflow_set *set, void **slot, variable var,
525 enum var_init_status);
526 static void vars_copy (htab_t, htab_t);
527 static tree var_debug_decl (tree);
528 static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
529 static void var_reg_delete_and_set (dataflow_set *, rtx, bool,
530 enum var_init_status, rtx);
531 static void var_reg_delete (dataflow_set *, rtx, bool);
532 static void var_regno_delete (dataflow_set *, int);
533 static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx);
534 static void var_mem_delete_and_set (dataflow_set *, rtx, bool,
535 enum var_init_status, rtx);
536 static void var_mem_delete (dataflow_set *, rtx, bool);
538 static void dataflow_set_init (dataflow_set *);
539 static void dataflow_set_clear (dataflow_set *);
540 static void dataflow_set_copy (dataflow_set *, dataflow_set *);
541 static int variable_union_info_cmp_pos (const void *, const void *);
542 static void dataflow_set_union (dataflow_set *, dataflow_set *);
543 static location_chain find_loc_in_1pdv (rtx, variable, htab_t);
544 static bool canon_value_cmp (rtx, rtx);
545 static int loc_cmp (rtx, rtx);
546 static bool variable_part_different_p (variable_part *, variable_part *);
547 static bool onepart_variable_different_p (variable, variable);
548 static bool variable_different_p (variable, variable);
549 static bool dataflow_set_different (dataflow_set *, dataflow_set *);
550 static void dataflow_set_destroy (dataflow_set *);
552 static bool contains_symbol_ref (rtx);
553 static bool track_expr_p (tree, bool);
554 static bool same_variable_part_p (rtx, tree, HOST_WIDE_INT);
555 static int add_uses (rtx *, void *);
556 static void add_uses_1 (rtx *, void *);
557 static void add_stores (rtx, const_rtx, void *);
558 static bool compute_bb_dataflow (basic_block);
559 static bool vt_find_locations (void);
561 static void dump_attrs_list (attrs);
562 static int dump_var_slot (void **, void *);
563 static void dump_var (variable);
564 static void dump_vars (htab_t);
565 static void dump_dataflow_set (dataflow_set *);
566 static void dump_dataflow_sets (void);
568 static void set_dv_changed (decl_or_value, bool);
569 static void variable_was_changed (variable, dataflow_set *);
570 static void **set_slot_part (dataflow_set *, rtx, void **,
571 decl_or_value, HOST_WIDE_INT,
572 enum var_init_status, rtx);
573 static void set_variable_part (dataflow_set *, rtx,
574 decl_or_value, HOST_WIDE_INT,
575 enum var_init_status, rtx, enum insert_option);
576 static void **clobber_slot_part (dataflow_set *, rtx,
577 void **, HOST_WIDE_INT, rtx);
578 static void clobber_variable_part (dataflow_set *, rtx,
579 decl_or_value, HOST_WIDE_INT, rtx);
580 static void **delete_slot_part (dataflow_set *, rtx, void **, HOST_WIDE_INT);
581 static void delete_variable_part (dataflow_set *, rtx,
582 decl_or_value, HOST_WIDE_INT);
583 static int emit_note_insn_var_location (void **, void *);
584 static void emit_notes_for_changes (rtx, enum emit_note_where, shared_hash);
585 static int emit_notes_for_differences_1 (void **, void *);
586 static int emit_notes_for_differences_2 (void **, void *);
587 static void emit_notes_for_differences (rtx, dataflow_set *, dataflow_set *);
588 static void emit_notes_in_bb (basic_block, dataflow_set *);
589 static void vt_emit_notes (void);
591 static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *);
592 static void vt_add_function_parameters (void);
593 static bool vt_initialize (void);
594 static void vt_finalize (void);
596 /* Given a SET, calculate the amount of stack adjustment it contains
597 PRE- and POST-modifying stack pointer.
598 This function is similar to stack_adjust_offset. */
600 static void
601 stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
602 HOST_WIDE_INT *post)
604 rtx src = SET_SRC (pattern);
605 rtx dest = SET_DEST (pattern);
606 enum rtx_code code;
608 if (dest == stack_pointer_rtx)
610 /* (set (reg sp) (plus (reg sp) (const_int))) */
611 code = GET_CODE (src);
612 if (! (code == PLUS || code == MINUS)
613 || XEXP (src, 0) != stack_pointer_rtx
614 || !CONST_INT_P (XEXP (src, 1)))
615 return;
617 if (code == MINUS)
618 *post += INTVAL (XEXP (src, 1));
619 else
620 *post -= INTVAL (XEXP (src, 1));
622 else if (MEM_P (dest))
624 /* (set (mem (pre_dec (reg sp))) (foo)) */
625 src = XEXP (dest, 0);
626 code = GET_CODE (src);
628 switch (code)
630 case PRE_MODIFY:
631 case POST_MODIFY:
632 if (XEXP (src, 0) == stack_pointer_rtx)
634 rtx val = XEXP (XEXP (src, 1), 1);
635 /* We handle only adjustments by constant amount. */
636 gcc_assert (GET_CODE (XEXP (src, 1)) == PLUS &&
637 CONST_INT_P (val));
639 if (code == PRE_MODIFY)
640 *pre -= INTVAL (val);
641 else
642 *post -= INTVAL (val);
643 break;
645 return;
647 case PRE_DEC:
648 if (XEXP (src, 0) == stack_pointer_rtx)
650 *pre += GET_MODE_SIZE (GET_MODE (dest));
651 break;
653 return;
655 case POST_DEC:
656 if (XEXP (src, 0) == stack_pointer_rtx)
658 *post += GET_MODE_SIZE (GET_MODE (dest));
659 break;
661 return;
663 case PRE_INC:
664 if (XEXP (src, 0) == stack_pointer_rtx)
666 *pre -= GET_MODE_SIZE (GET_MODE (dest));
667 break;
669 return;
671 case POST_INC:
672 if (XEXP (src, 0) == stack_pointer_rtx)
674 *post -= GET_MODE_SIZE (GET_MODE (dest));
675 break;
677 return;
679 default:
680 return;
685 /* Given an INSN, calculate the amount of stack adjustment it contains
686 PRE- and POST-modifying stack pointer. */
688 static void
689 insn_stack_adjust_offset_pre_post (rtx insn, HOST_WIDE_INT *pre,
690 HOST_WIDE_INT *post)
692 rtx pattern;
694 *pre = 0;
695 *post = 0;
697 pattern = PATTERN (insn);
698 if (RTX_FRAME_RELATED_P (insn))
700 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
701 if (expr)
702 pattern = XEXP (expr, 0);
705 if (GET_CODE (pattern) == SET)
706 stack_adjust_offset_pre_post (pattern, pre, post);
707 else if (GET_CODE (pattern) == PARALLEL
708 || GET_CODE (pattern) == SEQUENCE)
710 int i;
712 /* There may be stack adjustments inside compound insns. Search
713 for them. */
714 for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
715 if (GET_CODE (XVECEXP (pattern, 0, i)) == SET)
716 stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post);
720 /* Compute stack adjustments for all blocks by traversing DFS tree.
721 Return true when the adjustments on all incoming edges are consistent.
722 Heavily borrowed from pre_and_rev_post_order_compute. */
724 static bool
725 vt_stack_adjustments (void)
727 edge_iterator *stack;
728 int sp;
730 /* Initialize entry block. */
731 VTI (ENTRY_BLOCK_PTR)->visited = true;
732 VTI (ENTRY_BLOCK_PTR)->in.stack_adjust = INCOMING_FRAME_SP_OFFSET;
733 VTI (ENTRY_BLOCK_PTR)->out.stack_adjust = INCOMING_FRAME_SP_OFFSET;
735 /* Allocate stack for back-tracking up CFG. */
736 stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
737 sp = 0;
739 /* Push the first edge on to the stack. */
740 stack[sp++] = ei_start (ENTRY_BLOCK_PTR->succs);
742 while (sp)
744 edge_iterator ei;
745 basic_block src;
746 basic_block dest;
748 /* Look at the edge on the top of the stack. */
749 ei = stack[sp - 1];
750 src = ei_edge (ei)->src;
751 dest = ei_edge (ei)->dest;
753 /* Check if the edge destination has been visited yet. */
754 if (!VTI (dest)->visited)
756 rtx insn;
757 HOST_WIDE_INT pre, post, offset;
758 VTI (dest)->visited = true;
759 VTI (dest)->in.stack_adjust = offset = VTI (src)->out.stack_adjust;
761 if (dest != EXIT_BLOCK_PTR)
762 for (insn = BB_HEAD (dest);
763 insn != NEXT_INSN (BB_END (dest));
764 insn = NEXT_INSN (insn))
765 if (INSN_P (insn))
767 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
768 offset += pre + post;
771 VTI (dest)->out.stack_adjust = offset;
773 if (EDGE_COUNT (dest->succs) > 0)
774 /* Since the DEST node has been visited for the first
775 time, check its successors. */
776 stack[sp++] = ei_start (dest->succs);
778 else
780 /* Check whether the adjustments on the edges are the same. */
781 if (VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust)
783 free (stack);
784 return false;
787 if (! ei_one_before_end_p (ei))
788 /* Go to the next edge. */
789 ei_next (&stack[sp - 1]);
790 else
791 /* Return to previous level if there are no more edges. */
792 sp--;
796 free (stack);
797 return true;
800 /* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or
801 hard_frame_pointer_rtx is being mapped to it and offset for it. */
802 static rtx cfa_base_rtx;
803 static HOST_WIDE_INT cfa_base_offset;
805 /* Compute a CFA-based value for an ADJUSTMENT made to stack_pointer_rtx
806 or hard_frame_pointer_rtx. */
808 static inline rtx
809 compute_cfa_pointer (HOST_WIDE_INT adjustment)
811 return plus_constant (Pmode, cfa_base_rtx, adjustment + cfa_base_offset);
814 /* Adjustment for hard_frame_pointer_rtx to cfa base reg,
815 or -1 if the replacement shouldn't be done. */
816 static HOST_WIDE_INT hard_frame_pointer_adjustment = -1;
818 /* Data for adjust_mems callback. */
820 struct adjust_mem_data
822 bool store;
823 enum machine_mode mem_mode;
824 HOST_WIDE_INT stack_adjust;
825 rtx side_effects;
828 /* Helper for adjust_mems. Return 1 if *loc is unsuitable for
829 transformation of wider mode arithmetics to narrower mode,
830 -1 if it is suitable and subexpressions shouldn't be
831 traversed and 0 if it is suitable and subexpressions should
832 be traversed. Called through for_each_rtx. */
834 static int
835 use_narrower_mode_test (rtx *loc, void *data)
837 rtx subreg = (rtx) data;
839 if (CONSTANT_P (*loc))
840 return -1;
841 switch (GET_CODE (*loc))
843 case REG:
844 if (cselib_lookup (*loc, GET_MODE (SUBREG_REG (subreg)), 0, VOIDmode))
845 return 1;
846 if (!validate_subreg (GET_MODE (subreg), GET_MODE (*loc),
847 *loc, subreg_lowpart_offset (GET_MODE (subreg),
848 GET_MODE (*loc))))
849 return 1;
850 return -1;
851 case PLUS:
852 case MINUS:
853 case MULT:
854 return 0;
855 case ASHIFT:
856 if (for_each_rtx (&XEXP (*loc, 0), use_narrower_mode_test, data))
857 return 1;
858 else
859 return -1;
860 default:
861 return 1;
865 /* Transform X into narrower mode MODE from wider mode WMODE. */
867 static rtx
868 use_narrower_mode (rtx x, enum machine_mode mode, enum machine_mode wmode)
870 rtx op0, op1;
871 if (CONSTANT_P (x))
872 return lowpart_subreg (mode, x, wmode);
873 switch (GET_CODE (x))
875 case REG:
876 return lowpart_subreg (mode, x, wmode);
877 case PLUS:
878 case MINUS:
879 case MULT:
880 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
881 op1 = use_narrower_mode (XEXP (x, 1), mode, wmode);
882 return simplify_gen_binary (GET_CODE (x), mode, op0, op1);
883 case ASHIFT:
884 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
885 return simplify_gen_binary (ASHIFT, mode, op0, XEXP (x, 1));
886 default:
887 gcc_unreachable ();
891 /* Helper function for adjusting used MEMs. */
893 static rtx
894 adjust_mems (rtx loc, const_rtx old_rtx, void *data)
896 struct adjust_mem_data *amd = (struct adjust_mem_data *) data;
897 rtx mem, addr = loc, tem;
898 enum machine_mode mem_mode_save;
899 bool store_save;
900 switch (GET_CODE (loc))
902 case REG:
903 /* Don't do any sp or fp replacements outside of MEM addresses
904 on the LHS. */
905 if (amd->mem_mode == VOIDmode && amd->store)
906 return loc;
907 if (loc == stack_pointer_rtx
908 && !frame_pointer_needed
909 && cfa_base_rtx)
910 return compute_cfa_pointer (amd->stack_adjust);
911 else if (loc == hard_frame_pointer_rtx
912 && frame_pointer_needed
913 && hard_frame_pointer_adjustment != -1
914 && cfa_base_rtx)
915 return compute_cfa_pointer (hard_frame_pointer_adjustment);
916 gcc_checking_assert (loc != virtual_incoming_args_rtx);
917 return loc;
918 case MEM:
919 mem = loc;
920 if (!amd->store)
922 mem = targetm.delegitimize_address (mem);
923 if (mem != loc && !MEM_P (mem))
924 return simplify_replace_fn_rtx (mem, old_rtx, adjust_mems, data);
927 addr = XEXP (mem, 0);
928 mem_mode_save = amd->mem_mode;
929 amd->mem_mode = GET_MODE (mem);
930 store_save = amd->store;
931 amd->store = false;
932 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
933 amd->store = store_save;
934 amd->mem_mode = mem_mode_save;
935 if (mem == loc)
936 addr = targetm.delegitimize_address (addr);
937 if (addr != XEXP (mem, 0))
938 mem = replace_equiv_address_nv (mem, addr);
939 if (!amd->store)
940 mem = avoid_constant_pool_reference (mem);
941 return mem;
942 case PRE_INC:
943 case PRE_DEC:
944 addr = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
945 GEN_INT (GET_CODE (loc) == PRE_INC
946 ? GET_MODE_SIZE (amd->mem_mode)
947 : -GET_MODE_SIZE (amd->mem_mode)));
948 case POST_INC:
949 case POST_DEC:
950 if (addr == loc)
951 addr = XEXP (loc, 0);
952 gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode);
953 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
954 tem = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
955 GEN_INT ((GET_CODE (loc) == PRE_INC
956 || GET_CODE (loc) == POST_INC)
957 ? GET_MODE_SIZE (amd->mem_mode)
958 : -GET_MODE_SIZE (amd->mem_mode)));
959 amd->side_effects = alloc_EXPR_LIST (0,
960 gen_rtx_SET (VOIDmode,
961 XEXP (loc, 0),
962 tem),
963 amd->side_effects);
964 return addr;
965 case PRE_MODIFY:
966 addr = XEXP (loc, 1);
967 case POST_MODIFY:
968 if (addr == loc)
969 addr = XEXP (loc, 0);
970 gcc_assert (amd->mem_mode != VOIDmode);
971 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
972 amd->side_effects = alloc_EXPR_LIST (0,
973 gen_rtx_SET (VOIDmode,
974 XEXP (loc, 0),
975 XEXP (loc, 1)),
976 amd->side_effects);
977 return addr;
978 case SUBREG:
979 /* First try without delegitimization of whole MEMs and
980 avoid_constant_pool_reference, which is more likely to succeed. */
981 store_save = amd->store;
982 amd->store = true;
983 addr = simplify_replace_fn_rtx (SUBREG_REG (loc), old_rtx, adjust_mems,
984 data);
985 amd->store = store_save;
986 mem = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
987 if (mem == SUBREG_REG (loc))
989 tem = loc;
990 goto finish_subreg;
992 tem = simplify_gen_subreg (GET_MODE (loc), mem,
993 GET_MODE (SUBREG_REG (loc)),
994 SUBREG_BYTE (loc));
995 if (tem)
996 goto finish_subreg;
997 tem = simplify_gen_subreg (GET_MODE (loc), addr,
998 GET_MODE (SUBREG_REG (loc)),
999 SUBREG_BYTE (loc));
1000 if (tem == NULL_RTX)
1001 tem = gen_rtx_raw_SUBREG (GET_MODE (loc), addr, SUBREG_BYTE (loc));
1002 finish_subreg:
1003 if (MAY_HAVE_DEBUG_INSNS
1004 && GET_CODE (tem) == SUBREG
1005 && (GET_CODE (SUBREG_REG (tem)) == PLUS
1006 || GET_CODE (SUBREG_REG (tem)) == MINUS
1007 || GET_CODE (SUBREG_REG (tem)) == MULT
1008 || GET_CODE (SUBREG_REG (tem)) == ASHIFT)
1009 && GET_MODE_CLASS (GET_MODE (tem)) == MODE_INT
1010 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem))) == MODE_INT
1011 && GET_MODE_SIZE (GET_MODE (tem))
1012 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (tem)))
1013 && subreg_lowpart_p (tem)
1014 && !for_each_rtx (&SUBREG_REG (tem), use_narrower_mode_test, tem))
1015 return use_narrower_mode (SUBREG_REG (tem), GET_MODE (tem),
1016 GET_MODE (SUBREG_REG (tem)));
1017 return tem;
1018 case ASM_OPERANDS:
1019 /* Don't do any replacements in second and following
1020 ASM_OPERANDS of inline-asm with multiple sets.
1021 ASM_OPERANDS_INPUT_VEC, ASM_OPERANDS_INPUT_CONSTRAINT_VEC
1022 and ASM_OPERANDS_LABEL_VEC need to be equal between
1023 all the ASM_OPERANDs in the insn and adjust_insn will
1024 fix this up. */
1025 if (ASM_OPERANDS_OUTPUT_IDX (loc) != 0)
1026 return loc;
1027 break;
1028 default:
1029 break;
1031 return NULL_RTX;
1034 /* Helper function for replacement of uses. */
1036 static void
1037 adjust_mem_uses (rtx *x, void *data)
1039 rtx new_x = simplify_replace_fn_rtx (*x, NULL_RTX, adjust_mems, data);
1040 if (new_x != *x)
1041 validate_change (NULL_RTX, x, new_x, true);
1044 /* Helper function for replacement of stores. */
1046 static void
1047 adjust_mem_stores (rtx loc, const_rtx expr, void *data)
1049 if (MEM_P (loc))
1051 rtx new_dest = simplify_replace_fn_rtx (SET_DEST (expr), NULL_RTX,
1052 adjust_mems, data);
1053 if (new_dest != SET_DEST (expr))
1055 rtx xexpr = CONST_CAST_RTX (expr);
1056 validate_change (NULL_RTX, &SET_DEST (xexpr), new_dest, true);
1061 /* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
1062 replace them with their value in the insn and add the side-effects
1063 as other sets to the insn. */
1065 static void
1066 adjust_insn (basic_block bb, rtx insn)
1068 struct adjust_mem_data amd;
1069 rtx set;
1071 #ifdef HAVE_window_save
1072 /* If the target machine has an explicit window save instruction, the
1073 transformation OUTGOING_REGNO -> INCOMING_REGNO is done there. */
1074 if (RTX_FRAME_RELATED_P (insn)
1075 && find_reg_note (insn, REG_CFA_WINDOW_SAVE, NULL_RTX))
1077 unsigned int i, nregs = vec_safe_length (windowed_parm_regs);
1078 rtx rtl = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nregs * 2));
1079 parm_reg_t *p;
1081 FOR_EACH_VEC_SAFE_ELT (windowed_parm_regs, i, p)
1083 XVECEXP (rtl, 0, i * 2)
1084 = gen_rtx_SET (VOIDmode, p->incoming, p->outgoing);
1085 /* Do not clobber the attached DECL, but only the REG. */
1086 XVECEXP (rtl, 0, i * 2 + 1)
1087 = gen_rtx_CLOBBER (GET_MODE (p->outgoing),
1088 gen_raw_REG (GET_MODE (p->outgoing),
1089 REGNO (p->outgoing)));
1092 validate_change (NULL_RTX, &PATTERN (insn), rtl, true);
1093 return;
1095 #endif
1097 amd.mem_mode = VOIDmode;
1098 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
1099 amd.side_effects = NULL_RTX;
1101 amd.store = true;
1102 note_stores (PATTERN (insn), adjust_mem_stores, &amd);
1104 amd.store = false;
1105 if (GET_CODE (PATTERN (insn)) == PARALLEL
1106 && asm_noperands (PATTERN (insn)) > 0
1107 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1109 rtx body, set0;
1110 int i;
1112 /* inline-asm with multiple sets is tiny bit more complicated,
1113 because the 3 vectors in ASM_OPERANDS need to be shared between
1114 all ASM_OPERANDS in the instruction. adjust_mems will
1115 not touch ASM_OPERANDS other than the first one, asm_noperands
1116 test above needs to be called before that (otherwise it would fail)
1117 and afterwards this code fixes it up. */
1118 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1119 body = PATTERN (insn);
1120 set0 = XVECEXP (body, 0, 0);
1121 gcc_checking_assert (GET_CODE (set0) == SET
1122 && GET_CODE (SET_SRC (set0)) == ASM_OPERANDS
1123 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set0)) == 0);
1124 for (i = 1; i < XVECLEN (body, 0); i++)
1125 if (GET_CODE (XVECEXP (body, 0, i)) != SET)
1126 break;
1127 else
1129 set = XVECEXP (body, 0, i);
1130 gcc_checking_assert (GET_CODE (SET_SRC (set)) == ASM_OPERANDS
1131 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set))
1132 == i);
1133 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (set))
1134 != ASM_OPERANDS_INPUT_VEC (SET_SRC (set0))
1135 || ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set))
1136 != ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0))
1137 || ASM_OPERANDS_LABEL_VEC (SET_SRC (set))
1138 != ASM_OPERANDS_LABEL_VEC (SET_SRC (set0)))
1140 rtx newsrc = shallow_copy_rtx (SET_SRC (set));
1141 ASM_OPERANDS_INPUT_VEC (newsrc)
1142 = ASM_OPERANDS_INPUT_VEC (SET_SRC (set0));
1143 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (newsrc)
1144 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0));
1145 ASM_OPERANDS_LABEL_VEC (newsrc)
1146 = ASM_OPERANDS_LABEL_VEC (SET_SRC (set0));
1147 validate_change (NULL_RTX, &SET_SRC (set), newsrc, true);
1151 else
1152 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1154 /* For read-only MEMs containing some constant, prefer those
1155 constants. */
1156 set = single_set (insn);
1157 if (set && MEM_P (SET_SRC (set)) && MEM_READONLY_P (SET_SRC (set)))
1159 rtx note = find_reg_equal_equiv_note (insn);
1161 if (note && CONSTANT_P (XEXP (note, 0)))
1162 validate_change (NULL_RTX, &SET_SRC (set), XEXP (note, 0), true);
1165 if (amd.side_effects)
1167 rtx *pat, new_pat, s;
1168 int i, oldn, newn;
1170 pat = &PATTERN (insn);
1171 if (GET_CODE (*pat) == COND_EXEC)
1172 pat = &COND_EXEC_CODE (*pat);
1173 if (GET_CODE (*pat) == PARALLEL)
1174 oldn = XVECLEN (*pat, 0);
1175 else
1176 oldn = 1;
1177 for (s = amd.side_effects, newn = 0; s; newn++)
1178 s = XEXP (s, 1);
1179 new_pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (oldn + newn));
1180 if (GET_CODE (*pat) == PARALLEL)
1181 for (i = 0; i < oldn; i++)
1182 XVECEXP (new_pat, 0, i) = XVECEXP (*pat, 0, i);
1183 else
1184 XVECEXP (new_pat, 0, 0) = *pat;
1185 for (s = amd.side_effects, i = oldn; i < oldn + newn; i++, s = XEXP (s, 1))
1186 XVECEXP (new_pat, 0, i) = XEXP (s, 0);
1187 free_EXPR_LIST_list (&amd.side_effects);
1188 validate_change (NULL_RTX, pat, new_pat, true);
1192 /* Return true if a decl_or_value DV is a DECL or NULL. */
1193 static inline bool
1194 dv_is_decl_p (decl_or_value dv)
1196 return !dv || (int) TREE_CODE ((tree) dv) != (int) VALUE;
1199 /* Return true if a decl_or_value is a VALUE rtl. */
1200 static inline bool
1201 dv_is_value_p (decl_or_value dv)
1203 return dv && !dv_is_decl_p (dv);
1206 /* Return the decl in the decl_or_value. */
1207 static inline tree
1208 dv_as_decl (decl_or_value dv)
1210 gcc_checking_assert (dv_is_decl_p (dv));
1211 return (tree) dv;
1214 /* Return the value in the decl_or_value. */
1215 static inline rtx
1216 dv_as_value (decl_or_value dv)
1218 gcc_checking_assert (dv_is_value_p (dv));
1219 return (rtx)dv;
1222 /* Return the DEBUG_EXPR of a DEBUG_EXPR_DECL or the VALUE in DV. */
1223 static inline rtx
1224 dv_as_rtx (decl_or_value dv)
1226 tree decl;
1228 if (dv_is_value_p (dv))
1229 return dv_as_value (dv);
1231 decl = dv_as_decl (dv);
1233 gcc_checking_assert (TREE_CODE (decl) == DEBUG_EXPR_DECL);
1234 return DECL_RTL_KNOWN_SET (decl);
1237 /* Return the opaque pointer in the decl_or_value. */
1238 static inline void *
1239 dv_as_opaque (decl_or_value dv)
1241 return dv;
1244 /* Return nonzero if a decl_or_value must not have more than one
1245 variable part. The returned value discriminates among various
1246 kinds of one-part DVs ccording to enum onepart_enum. */
1247 static inline onepart_enum_t
1248 dv_onepart_p (decl_or_value dv)
1250 tree decl;
1252 if (!MAY_HAVE_DEBUG_INSNS)
1253 return NOT_ONEPART;
1255 if (dv_is_value_p (dv))
1256 return ONEPART_VALUE;
1258 decl = dv_as_decl (dv);
1260 if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
1261 return ONEPART_DEXPR;
1263 if (target_for_debug_bind (decl) != NULL_TREE)
1264 return ONEPART_VDECL;
1266 return NOT_ONEPART;
1269 /* Return the variable pool to be used for a dv of type ONEPART. */
1270 static inline alloc_pool
1271 onepart_pool (onepart_enum_t onepart)
1273 return onepart ? valvar_pool : var_pool;
1276 /* Build a decl_or_value out of a decl. */
1277 static inline decl_or_value
1278 dv_from_decl (tree decl)
1280 decl_or_value dv;
1281 dv = decl;
1282 gcc_checking_assert (dv_is_decl_p (dv));
1283 return dv;
1286 /* Build a decl_or_value out of a value. */
1287 static inline decl_or_value
1288 dv_from_value (rtx value)
1290 decl_or_value dv;
1291 dv = value;
1292 gcc_checking_assert (dv_is_value_p (dv));
1293 return dv;
1296 /* Return a value or the decl of a debug_expr as a decl_or_value. */
1297 static inline decl_or_value
1298 dv_from_rtx (rtx x)
1300 decl_or_value dv;
1302 switch (GET_CODE (x))
1304 case DEBUG_EXPR:
1305 dv = dv_from_decl (DEBUG_EXPR_TREE_DECL (x));
1306 gcc_checking_assert (DECL_RTL_KNOWN_SET (DEBUG_EXPR_TREE_DECL (x)) == x);
1307 break;
1309 case VALUE:
1310 dv = dv_from_value (x);
1311 break;
1313 default:
1314 gcc_unreachable ();
1317 return dv;
1320 extern void debug_dv (decl_or_value dv);
1322 DEBUG_FUNCTION void
1323 debug_dv (decl_or_value dv)
1325 if (dv_is_value_p (dv))
1326 debug_rtx (dv_as_value (dv));
1327 else
1328 debug_generic_stmt (dv_as_decl (dv));
1331 typedef unsigned int dvuid;
1333 /* Return the uid of DV. */
1335 static inline dvuid
1336 dv_uid (decl_or_value dv)
1338 if (dv_is_value_p (dv))
1339 return CSELIB_VAL_PTR (dv_as_value (dv))->uid;
1340 else
1341 return DECL_UID (dv_as_decl (dv));
1344 /* Compute the hash from the uid. */
1346 static inline hashval_t
1347 dv_uid2hash (dvuid uid)
1349 return uid;
1352 /* The hash function for a mask table in a shared_htab chain. */
1354 static inline hashval_t
1355 dv_htab_hash (decl_or_value dv)
1357 return dv_uid2hash (dv_uid (dv));
1360 /* The hash function for variable_htab, computes the hash value
1361 from the declaration of variable X. */
1363 static hashval_t
1364 variable_htab_hash (const void *x)
1366 const_variable const v = (const_variable) x;
1368 return dv_htab_hash (v->dv);
1371 /* Compare the declaration of variable X with declaration Y. */
1373 static int
1374 variable_htab_eq (const void *x, const void *y)
1376 const_variable const v = (const_variable) x;
1377 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
1379 return (dv_as_opaque (v->dv) == dv_as_opaque (dv));
1382 static void loc_exp_dep_clear (variable var);
1384 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
1386 static void
1387 variable_htab_free (void *elem)
1389 int i;
1390 variable var = (variable) elem;
1391 location_chain node, next;
1393 gcc_checking_assert (var->refcount > 0);
1395 var->refcount--;
1396 if (var->refcount > 0)
1397 return;
1399 for (i = 0; i < var->n_var_parts; i++)
1401 for (node = var->var_part[i].loc_chain; node; node = next)
1403 next = node->next;
1404 pool_free (loc_chain_pool, node);
1406 var->var_part[i].loc_chain = NULL;
1408 if (var->onepart && VAR_LOC_1PAUX (var))
1410 loc_exp_dep_clear (var);
1411 if (VAR_LOC_DEP_LST (var))
1412 VAR_LOC_DEP_LST (var)->pprev = NULL;
1413 XDELETE (VAR_LOC_1PAUX (var));
1414 /* These may be reused across functions, so reset
1415 e.g. NO_LOC_P. */
1416 if (var->onepart == ONEPART_DEXPR)
1417 set_dv_changed (var->dv, true);
1419 pool_free (onepart_pool (var->onepart), var);
1422 /* Initialize the set (array) SET of attrs to empty lists. */
1424 static void
1425 init_attrs_list_set (attrs *set)
1427 int i;
1429 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1430 set[i] = NULL;
1433 /* Make the list *LISTP empty. */
1435 static void
1436 attrs_list_clear (attrs *listp)
1438 attrs list, next;
1440 for (list = *listp; list; list = next)
1442 next = list->next;
1443 pool_free (attrs_pool, list);
1445 *listp = NULL;
1448 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */
1450 static attrs
1451 attrs_list_member (attrs list, decl_or_value dv, HOST_WIDE_INT offset)
1453 for (; list; list = list->next)
1454 if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset)
1455 return list;
1456 return NULL;
1459 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
1461 static void
1462 attrs_list_insert (attrs *listp, decl_or_value dv,
1463 HOST_WIDE_INT offset, rtx loc)
1465 attrs list;
1467 list = (attrs) pool_alloc (attrs_pool);
1468 list->loc = loc;
1469 list->dv = dv;
1470 list->offset = offset;
1471 list->next = *listp;
1472 *listp = list;
1475 /* Copy all nodes from SRC and create a list *DSTP of the copies. */
1477 static void
1478 attrs_list_copy (attrs *dstp, attrs src)
1480 attrs n;
1482 attrs_list_clear (dstp);
1483 for (; src; src = src->next)
1485 n = (attrs) pool_alloc (attrs_pool);
1486 n->loc = src->loc;
1487 n->dv = src->dv;
1488 n->offset = src->offset;
1489 n->next = *dstp;
1490 *dstp = n;
1494 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */
1496 static void
1497 attrs_list_union (attrs *dstp, attrs src)
1499 for (; src; src = src->next)
1501 if (!attrs_list_member (*dstp, src->dv, src->offset))
1502 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1506 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1507 *DSTP. */
1509 static void
1510 attrs_list_mpdv_union (attrs *dstp, attrs src, attrs src2)
1512 gcc_assert (!*dstp);
1513 for (; src; src = src->next)
1515 if (!dv_onepart_p (src->dv))
1516 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1518 for (src = src2; src; src = src->next)
1520 if (!dv_onepart_p (src->dv)
1521 && !attrs_list_member (*dstp, src->dv, src->offset))
1522 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1526 /* Shared hashtable support. */
1528 /* Return true if VARS is shared. */
1530 static inline bool
1531 shared_hash_shared (shared_hash vars)
1533 return vars->refcount > 1;
1536 /* Return the hash table for VARS. */
1538 static inline htab_t
1539 shared_hash_htab (shared_hash vars)
1541 return vars->htab;
1544 /* Return true if VAR is shared, or maybe because VARS is shared. */
1546 static inline bool
1547 shared_var_p (variable var, shared_hash vars)
1549 /* Don't count an entry in the changed_variables table as a duplicate. */
1550 return ((var->refcount > 1 + (int) var->in_changed_variables)
1551 || shared_hash_shared (vars));
1554 /* Copy variables into a new hash table. */
1556 static shared_hash
1557 shared_hash_unshare (shared_hash vars)
1559 shared_hash new_vars = (shared_hash) pool_alloc (shared_hash_pool);
1560 gcc_assert (vars->refcount > 1);
1561 new_vars->refcount = 1;
1562 new_vars->htab
1563 = htab_create (htab_elements (vars->htab) + 3, variable_htab_hash,
1564 variable_htab_eq, variable_htab_free);
1565 vars_copy (new_vars->htab, vars->htab);
1566 vars->refcount--;
1567 return new_vars;
1570 /* Increment reference counter on VARS and return it. */
1572 static inline shared_hash
1573 shared_hash_copy (shared_hash vars)
1575 vars->refcount++;
1576 return vars;
1579 /* Decrement reference counter and destroy hash table if not shared
1580 anymore. */
1582 static void
1583 shared_hash_destroy (shared_hash vars)
1585 gcc_checking_assert (vars->refcount > 0);
1586 if (--vars->refcount == 0)
1588 htab_delete (vars->htab);
1589 pool_free (shared_hash_pool, vars);
1593 /* Unshare *PVARS if shared and return slot for DV. If INS is
1594 INSERT, insert it if not already present. */
1596 static inline void **
1597 shared_hash_find_slot_unshare_1 (shared_hash *pvars, decl_or_value dv,
1598 hashval_t dvhash, enum insert_option ins)
1600 if (shared_hash_shared (*pvars))
1601 *pvars = shared_hash_unshare (*pvars);
1602 return htab_find_slot_with_hash (shared_hash_htab (*pvars), dv, dvhash, ins);
1605 static inline void **
1606 shared_hash_find_slot_unshare (shared_hash *pvars, decl_or_value dv,
1607 enum insert_option ins)
1609 return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins);
1612 /* Return slot for DV, if it is already present in the hash table.
1613 If it is not present, insert it only VARS is not shared, otherwise
1614 return NULL. */
1616 static inline void **
1617 shared_hash_find_slot_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1619 return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
1620 shared_hash_shared (vars)
1621 ? NO_INSERT : INSERT);
1624 static inline void **
1625 shared_hash_find_slot (shared_hash vars, decl_or_value dv)
1627 return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv));
1630 /* Return slot for DV only if it is already present in the hash table. */
1632 static inline void **
1633 shared_hash_find_slot_noinsert_1 (shared_hash vars, decl_or_value dv,
1634 hashval_t dvhash)
1636 return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
1637 NO_INSERT);
1640 static inline void **
1641 shared_hash_find_slot_noinsert (shared_hash vars, decl_or_value dv)
1643 return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv));
1646 /* Return variable for DV or NULL if not already present in the hash
1647 table. */
1649 static inline variable
1650 shared_hash_find_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1652 return (variable) htab_find_with_hash (shared_hash_htab (vars), dv, dvhash);
1655 static inline variable
1656 shared_hash_find (shared_hash vars, decl_or_value dv)
1658 return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
1661 /* Return true if TVAL is better than CVAL as a canonival value. We
1662 choose lowest-numbered VALUEs, using the RTX address as a
1663 tie-breaker. The idea is to arrange them into a star topology,
1664 such that all of them are at most one step away from the canonical
1665 value, and the canonical value has backlinks to all of them, in
1666 addition to all the actual locations. We don't enforce this
1667 topology throughout the entire dataflow analysis, though.
1670 static inline bool
1671 canon_value_cmp (rtx tval, rtx cval)
1673 return !cval
1674 || CSELIB_VAL_PTR (tval)->uid < CSELIB_VAL_PTR (cval)->uid;
1677 static bool dst_can_be_shared;
1679 /* Return a copy of a variable VAR and insert it to dataflow set SET. */
1681 static void **
1682 unshare_variable (dataflow_set *set, void **slot, variable var,
1683 enum var_init_status initialized)
1685 variable new_var;
1686 int i;
1688 new_var = (variable) pool_alloc (onepart_pool (var->onepart));
1689 new_var->dv = var->dv;
1690 new_var->refcount = 1;
1691 var->refcount--;
1692 new_var->n_var_parts = var->n_var_parts;
1693 new_var->onepart = var->onepart;
1694 new_var->in_changed_variables = false;
1696 if (! flag_var_tracking_uninit)
1697 initialized = VAR_INIT_STATUS_INITIALIZED;
1699 for (i = 0; i < var->n_var_parts; i++)
1701 location_chain node;
1702 location_chain *nextp;
1704 if (i == 0 && var->onepart)
1706 /* One-part auxiliary data is only used while emitting
1707 notes, so propagate it to the new variable in the active
1708 dataflow set. If we're not emitting notes, this will be
1709 a no-op. */
1710 gcc_checking_assert (!VAR_LOC_1PAUX (var) || emit_notes);
1711 VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (var);
1712 VAR_LOC_1PAUX (var) = NULL;
1714 else
1715 VAR_PART_OFFSET (new_var, i) = VAR_PART_OFFSET (var, i);
1716 nextp = &new_var->var_part[i].loc_chain;
1717 for (node = var->var_part[i].loc_chain; node; node = node->next)
1719 location_chain new_lc;
1721 new_lc = (location_chain) pool_alloc (loc_chain_pool);
1722 new_lc->next = NULL;
1723 if (node->init > initialized)
1724 new_lc->init = node->init;
1725 else
1726 new_lc->init = initialized;
1727 if (node->set_src && !(MEM_P (node->set_src)))
1728 new_lc->set_src = node->set_src;
1729 else
1730 new_lc->set_src = NULL;
1731 new_lc->loc = node->loc;
1733 *nextp = new_lc;
1734 nextp = &new_lc->next;
1737 new_var->var_part[i].cur_loc = var->var_part[i].cur_loc;
1740 dst_can_be_shared = false;
1741 if (shared_hash_shared (set->vars))
1742 slot = shared_hash_find_slot_unshare (&set->vars, var->dv, NO_INSERT);
1743 else if (set->traversed_vars && set->vars != set->traversed_vars)
1744 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
1745 *slot = new_var;
1746 if (var->in_changed_variables)
1748 void **cslot
1749 = htab_find_slot_with_hash (changed_variables, var->dv,
1750 dv_htab_hash (var->dv), NO_INSERT);
1751 gcc_assert (*cslot == (void *) var);
1752 var->in_changed_variables = false;
1753 variable_htab_free (var);
1754 *cslot = new_var;
1755 new_var->in_changed_variables = true;
1757 return slot;
1760 /* Copy all variables from hash table SRC to hash table DST. */
1762 static void
1763 vars_copy (htab_t dst, htab_t src)
1765 htab_iterator hi;
1766 variable var;
1768 FOR_EACH_HTAB_ELEMENT (src, var, variable, hi)
1770 void **dstp;
1771 var->refcount++;
1772 dstp = htab_find_slot_with_hash (dst, var->dv,
1773 dv_htab_hash (var->dv),
1774 INSERT);
1775 *dstp = var;
1779 /* Map a decl to its main debug decl. */
1781 static inline tree
1782 var_debug_decl (tree decl)
1784 if (decl && DECL_P (decl)
1785 && DECL_DEBUG_EXPR_IS_FROM (decl))
1787 tree debugdecl = DECL_DEBUG_EXPR (decl);
1788 if (debugdecl && DECL_P (debugdecl))
1789 decl = debugdecl;
1792 return decl;
1795 /* Set the register LOC to contain DV, OFFSET. */
1797 static void
1798 var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1799 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1800 enum insert_option iopt)
1802 attrs node;
1803 bool decl_p = dv_is_decl_p (dv);
1805 if (decl_p)
1806 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1808 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1809 if (dv_as_opaque (node->dv) == dv_as_opaque (dv)
1810 && node->offset == offset)
1811 break;
1812 if (!node)
1813 attrs_list_insert (&set->regs[REGNO (loc)], dv, offset, loc);
1814 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1817 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
1819 static void
1820 var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1821 rtx set_src)
1823 tree decl = REG_EXPR (loc);
1824 HOST_WIDE_INT offset = REG_OFFSET (loc);
1826 var_reg_decl_set (set, loc, initialized,
1827 dv_from_decl (decl), offset, set_src, INSERT);
1830 static enum var_init_status
1831 get_init_value (dataflow_set *set, rtx loc, decl_or_value dv)
1833 variable var;
1834 int i;
1835 enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN;
1837 if (! flag_var_tracking_uninit)
1838 return VAR_INIT_STATUS_INITIALIZED;
1840 var = shared_hash_find (set->vars, dv);
1841 if (var)
1843 for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++)
1845 location_chain nextp;
1846 for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next)
1847 if (rtx_equal_p (nextp->loc, loc))
1849 ret_val = nextp->init;
1850 break;
1855 return ret_val;
1858 /* Delete current content of register LOC in dataflow set SET and set
1859 the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
1860 MODIFY is true, any other live copies of the same variable part are
1861 also deleted from the dataflow set, otherwise the variable part is
1862 assumed to be copied from another location holding the same
1863 part. */
1865 static void
1866 var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1867 enum var_init_status initialized, rtx set_src)
1869 tree decl = REG_EXPR (loc);
1870 HOST_WIDE_INT offset = REG_OFFSET (loc);
1871 attrs node, next;
1872 attrs *nextp;
1874 decl = var_debug_decl (decl);
1876 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1877 initialized = get_init_value (set, loc, dv_from_decl (decl));
1879 nextp = &set->regs[REGNO (loc)];
1880 for (node = *nextp; node; node = next)
1882 next = node->next;
1883 if (dv_as_opaque (node->dv) != decl || node->offset != offset)
1885 delete_variable_part (set, node->loc, node->dv, node->offset);
1886 pool_free (attrs_pool, node);
1887 *nextp = next;
1889 else
1891 node->loc = loc;
1892 nextp = &node->next;
1895 if (modify)
1896 clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src);
1897 var_reg_set (set, loc, initialized, set_src);
1900 /* Delete the association of register LOC in dataflow set SET with any
1901 variables that aren't onepart. If CLOBBER is true, also delete any
1902 other live copies of the same variable part, and delete the
1903 association with onepart dvs too. */
1905 static void
1906 var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
1908 attrs *nextp = &set->regs[REGNO (loc)];
1909 attrs node, next;
1911 if (clobber)
1913 tree decl = REG_EXPR (loc);
1914 HOST_WIDE_INT offset = REG_OFFSET (loc);
1916 decl = var_debug_decl (decl);
1918 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1921 for (node = *nextp; node; node = next)
1923 next = node->next;
1924 if (clobber || !dv_onepart_p (node->dv))
1926 delete_variable_part (set, node->loc, node->dv, node->offset);
1927 pool_free (attrs_pool, node);
1928 *nextp = next;
1930 else
1931 nextp = &node->next;
1935 /* Delete content of register with number REGNO in dataflow set SET. */
1937 static void
1938 var_regno_delete (dataflow_set *set, int regno)
1940 attrs *reg = &set->regs[regno];
1941 attrs node, next;
1943 for (node = *reg; node; node = next)
1945 next = node->next;
1946 delete_variable_part (set, node->loc, node->dv, node->offset);
1947 pool_free (attrs_pool, node);
1949 *reg = NULL;
1952 /* Strip constant offsets and alignments off of LOC. Return the base
1953 expression. */
1955 static rtx
1956 vt_get_canonicalize_base (rtx loc)
1958 while ((GET_CODE (loc) == PLUS
1959 || GET_CODE (loc) == AND)
1960 && GET_CODE (XEXP (loc, 1)) == CONST_INT
1961 && (GET_CODE (loc) != AND
1962 || INTVAL (XEXP (loc, 1)) < 0))
1963 loc = XEXP (loc, 0);
1965 return loc;
1968 /* Canonicalize LOC using equivalences from SET in addition to those
1969 in the cselib static table. */
1971 static rtx
1972 vt_canonicalize_addr (dataflow_set *set, rtx oloc)
1974 HOST_WIDE_INT ofst = 0;
1975 enum machine_mode mode = GET_MODE (oloc);
1976 rtx loc = canon_rtx (get_addr (oloc));
1978 /* Try to substitute a base VALUE for equivalent expressions as much
1979 as possible. The goal here is to expand stack-related addresses
1980 to one of the stack base registers, so that we can compare
1981 addresses for overlaps. */
1982 while (GET_CODE (vt_get_canonicalize_base (loc)) == VALUE)
1984 rtx x;
1985 decl_or_value dv;
1986 variable var;
1987 location_chain l;
1989 while (GET_CODE (loc) == PLUS)
1991 ofst += INTVAL (XEXP (loc, 1));
1992 loc = XEXP (loc, 0);
1993 continue;
1996 /* Alignment operations can't normally be combined, so just
1997 canonicalize the base and we're done. We'll normally have
1998 only one stack alignment anyway. */
1999 if (GET_CODE (loc) == AND)
2001 x = vt_canonicalize_addr (set, XEXP (loc, 0));
2002 if (x != XEXP (loc, 0))
2003 loc = gen_rtx_AND (mode, x, XEXP (loc, 1));
2004 loc = canon_rtx (get_addr (loc));
2005 break;
2008 x = canon_rtx (get_addr (loc));
2010 /* We've made progress! Start over. */
2011 if (x != loc || GET_CODE (x) != VALUE)
2013 loc = x;
2014 continue;
2017 dv = dv_from_rtx (x);
2018 var = (variable) htab_find_with_hash (shared_hash_htab (set->vars),
2019 dv, dv_htab_hash (dv));
2020 if (!var)
2021 break;
2023 /* Look for an improved equivalent expression. */
2024 for (l = var->var_part[0].loc_chain; l; l = l->next)
2026 rtx base = vt_get_canonicalize_base (l->loc);
2027 if (GET_CODE (base) == REG
2028 || (GET_CODE (base) == VALUE
2029 && canon_value_cmp (base, loc)))
2031 loc = l->loc;
2032 break;
2036 /* No luck with the dataflow set, so we're done. */
2037 if (!l)
2038 break;
2041 /* Add OFST back in. */
2042 if (ofst)
2044 /* Don't build new RTL if we can help it. */
2045 if (GET_CODE (oloc) == PLUS
2046 && XEXP (oloc, 0) == loc
2047 && INTVAL (XEXP (oloc, 1)) == ofst)
2048 return oloc;
2050 loc = plus_constant (mode, loc, ofst);
2053 return loc;
2056 /* Return true iff ADDR has a stack register as the base address. */
2058 static inline bool
2059 vt_stack_offset_p (rtx addr)
2061 rtx base = vt_get_canonicalize_base (addr);
2063 if (GET_CODE (base) != REG)
2064 return false;
2066 return REGNO_PTR_FRAME_P (REGNO (base));
2069 /* Return true iff there's a true dependence between MLOC and LOC.
2070 MADDR must be a canonicalized version of MLOC's address. */
2072 static inline bool
2073 vt_canon_true_dep (dataflow_set *set, rtx mloc, rtx maddr, rtx loc)
2075 if (GET_CODE (loc) != MEM)
2076 return false;
2078 if (!canon_true_dependence (mloc, GET_MODE (mloc), maddr, loc, NULL))
2079 return false;
2081 if (!MEM_EXPR (loc) && vt_stack_offset_p (maddr))
2083 rtx addr = vt_canonicalize_addr (set, XEXP (loc, 0));
2084 return canon_true_dependence (mloc, GET_MODE (mloc), maddr, loc, addr);
2087 return true;
2090 /* Hold parameters for the hashtab traversal function
2091 drop_overlapping_mem_locs, see below. */
2093 struct overlapping_mems
2095 dataflow_set *set;
2096 rtx loc, addr;
2099 /* Remove all MEMs that overlap with COMS->LOC from the location list
2100 of a hash table entry for a value. COMS->ADDR must be a
2101 canonicalized form of COMS->LOC's address, and COMS->LOC must be
2102 canonicalized itself. */
2104 static int
2105 drop_overlapping_mem_locs (void **slot, void *data)
2107 struct overlapping_mems *coms = (struct overlapping_mems *)data;
2108 dataflow_set *set = coms->set;
2109 rtx mloc = coms->loc, addr = coms->addr;
2110 variable var = (variable) *slot;
2112 if (var->onepart == ONEPART_VALUE)
2114 location_chain loc, *locp;
2115 bool changed = false;
2116 rtx cur_loc;
2118 gcc_assert (var->n_var_parts == 1);
2120 if (shared_var_p (var, set->vars))
2122 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
2123 if (vt_canon_true_dep (set, mloc, addr, loc->loc))
2124 break;
2126 if (!loc)
2127 return 1;
2129 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
2130 var = (variable)*slot;
2131 gcc_assert (var->n_var_parts == 1);
2134 if (VAR_LOC_1PAUX (var))
2135 cur_loc = VAR_LOC_FROM (var);
2136 else
2137 cur_loc = var->var_part[0].cur_loc;
2139 for (locp = &var->var_part[0].loc_chain, loc = *locp;
2140 loc; loc = *locp)
2142 if (!vt_canon_true_dep (set, mloc, addr, loc->loc))
2144 locp = &loc->next;
2145 continue;
2148 *locp = loc->next;
2149 /* If we have deleted the location which was last emitted
2150 we have to emit new location so add the variable to set
2151 of changed variables. */
2152 if (cur_loc == loc->loc)
2154 changed = true;
2155 var->var_part[0].cur_loc = NULL;
2156 if (VAR_LOC_1PAUX (var))
2157 VAR_LOC_FROM (var) = NULL;
2159 pool_free (loc_chain_pool, loc);
2162 if (!var->var_part[0].loc_chain)
2164 var->n_var_parts--;
2165 changed = true;
2167 if (changed)
2168 variable_was_changed (var, set);
2171 return 1;
2174 /* Remove from SET all VALUE bindings to MEMs that overlap with LOC. */
2176 static void
2177 clobber_overlapping_mems (dataflow_set *set, rtx loc)
2179 struct overlapping_mems coms;
2181 coms.set = set;
2182 coms.loc = canon_rtx (loc);
2183 coms.addr = vt_canonicalize_addr (set, XEXP (loc, 0));
2185 set->traversed_vars = set->vars;
2186 htab_traverse (shared_hash_htab (set->vars),
2187 drop_overlapping_mem_locs, &coms);
2188 set->traversed_vars = NULL;
2191 /* Set the location of DV, OFFSET as the MEM LOC. */
2193 static void
2194 var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
2195 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
2196 enum insert_option iopt)
2198 if (dv_is_decl_p (dv))
2199 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
2201 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
2204 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
2205 SET to LOC.
2206 Adjust the address first if it is stack pointer based. */
2208 static void
2209 var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
2210 rtx set_src)
2212 tree decl = MEM_EXPR (loc);
2213 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2215 var_mem_decl_set (set, loc, initialized,
2216 dv_from_decl (decl), offset, set_src, INSERT);
2219 /* Delete and set the location part of variable MEM_EXPR (LOC) in
2220 dataflow set SET to LOC. If MODIFY is true, any other live copies
2221 of the same variable part are also deleted from the dataflow set,
2222 otherwise the variable part is assumed to be copied from another
2223 location holding the same part.
2224 Adjust the address first if it is stack pointer based. */
2226 static void
2227 var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify,
2228 enum var_init_status initialized, rtx set_src)
2230 tree decl = MEM_EXPR (loc);
2231 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2233 clobber_overlapping_mems (set, loc);
2234 decl = var_debug_decl (decl);
2236 if (initialized == VAR_INIT_STATUS_UNKNOWN)
2237 initialized = get_init_value (set, loc, dv_from_decl (decl));
2239 if (modify)
2240 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src);
2241 var_mem_set (set, loc, initialized, set_src);
2244 /* Delete the location part LOC from dataflow set SET. If CLOBBER is
2245 true, also delete any other live copies of the same variable part.
2246 Adjust the address first if it is stack pointer based. */
2248 static void
2249 var_mem_delete (dataflow_set *set, rtx loc, bool clobber)
2251 tree decl = MEM_EXPR (loc);
2252 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2254 clobber_overlapping_mems (set, loc);
2255 decl = var_debug_decl (decl);
2256 if (clobber)
2257 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
2258 delete_variable_part (set, loc, dv_from_decl (decl), offset);
2261 /* Return true if LOC should not be expanded for location expressions,
2262 or used in them. */
2264 static inline bool
2265 unsuitable_loc (rtx loc)
2267 switch (GET_CODE (loc))
2269 case PC:
2270 case SCRATCH:
2271 case CC0:
2272 case ASM_INPUT:
2273 case ASM_OPERANDS:
2274 return true;
2276 default:
2277 return false;
2281 /* Bind VAL to LOC in SET. If MODIFIED, detach LOC from any values
2282 bound to it. */
2284 static inline void
2285 val_bind (dataflow_set *set, rtx val, rtx loc, bool modified)
2287 if (REG_P (loc))
2289 if (modified)
2290 var_regno_delete (set, REGNO (loc));
2291 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2292 dv_from_value (val), 0, NULL_RTX, INSERT);
2294 else if (MEM_P (loc))
2296 struct elt_loc_list *l = CSELIB_VAL_PTR (val)->locs;
2298 if (modified)
2299 clobber_overlapping_mems (set, loc);
2301 if (l && GET_CODE (l->loc) == VALUE)
2302 l = canonical_cselib_val (CSELIB_VAL_PTR (l->loc))->locs;
2304 /* If this MEM is a global constant, we don't need it in the
2305 dynamic tables. ??? We should test this before emitting the
2306 micro-op in the first place. */
2307 while (l)
2308 if (GET_CODE (l->loc) == MEM && XEXP (l->loc, 0) == XEXP (loc, 0))
2309 break;
2310 else
2311 l = l->next;
2313 if (!l)
2314 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2315 dv_from_value (val), 0, NULL_RTX, INSERT);
2317 else
2319 /* Other kinds of equivalences are necessarily static, at least
2320 so long as we do not perform substitutions while merging
2321 expressions. */
2322 gcc_unreachable ();
2323 set_variable_part (set, loc, dv_from_value (val), 0,
2324 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2328 /* Bind a value to a location it was just stored in. If MODIFIED
2329 holds, assume the location was modified, detaching it from any
2330 values bound to it. */
2332 static void
2333 val_store (dataflow_set *set, rtx val, rtx loc, rtx insn, bool modified)
2335 cselib_val *v = CSELIB_VAL_PTR (val);
2337 gcc_assert (cselib_preserved_value_p (v));
2339 if (dump_file)
2341 fprintf (dump_file, "%i: ", insn ? INSN_UID (insn) : 0);
2342 print_inline_rtx (dump_file, loc, 0);
2343 fprintf (dump_file, " evaluates to ");
2344 print_inline_rtx (dump_file, val, 0);
2345 if (v->locs)
2347 struct elt_loc_list *l;
2348 for (l = v->locs; l; l = l->next)
2350 fprintf (dump_file, "\n%i: ", INSN_UID (l->setting_insn));
2351 print_inline_rtx (dump_file, l->loc, 0);
2354 fprintf (dump_file, "\n");
2357 gcc_checking_assert (!unsuitable_loc (loc));
2359 val_bind (set, val, loc, modified);
2362 /* Reset this node, detaching all its equivalences. Return the slot
2363 in the variable hash table that holds dv, if there is one. */
2365 static void
2366 val_reset (dataflow_set *set, decl_or_value dv)
2368 variable var = shared_hash_find (set->vars, dv) ;
2369 location_chain node;
2370 rtx cval;
2372 if (!var || !var->n_var_parts)
2373 return;
2375 gcc_assert (var->n_var_parts == 1);
2377 cval = NULL;
2378 for (node = var->var_part[0].loc_chain; node; node = node->next)
2379 if (GET_CODE (node->loc) == VALUE
2380 && canon_value_cmp (node->loc, cval))
2381 cval = node->loc;
2383 for (node = var->var_part[0].loc_chain; node; node = node->next)
2384 if (GET_CODE (node->loc) == VALUE && cval != node->loc)
2386 /* Redirect the equivalence link to the new canonical
2387 value, or simply remove it if it would point at
2388 itself. */
2389 if (cval)
2390 set_variable_part (set, cval, dv_from_value (node->loc),
2391 0, node->init, node->set_src, NO_INSERT);
2392 delete_variable_part (set, dv_as_value (dv),
2393 dv_from_value (node->loc), 0);
2396 if (cval)
2398 decl_or_value cdv = dv_from_value (cval);
2400 /* Keep the remaining values connected, accummulating links
2401 in the canonical value. */
2402 for (node = var->var_part[0].loc_chain; node; node = node->next)
2404 if (node->loc == cval)
2405 continue;
2406 else if (GET_CODE (node->loc) == REG)
2407 var_reg_decl_set (set, node->loc, node->init, cdv, 0,
2408 node->set_src, NO_INSERT);
2409 else if (GET_CODE (node->loc) == MEM)
2410 var_mem_decl_set (set, node->loc, node->init, cdv, 0,
2411 node->set_src, NO_INSERT);
2412 else
2413 set_variable_part (set, node->loc, cdv, 0,
2414 node->init, node->set_src, NO_INSERT);
2418 /* We remove this last, to make sure that the canonical value is not
2419 removed to the point of requiring reinsertion. */
2420 if (cval)
2421 delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0);
2423 clobber_variable_part (set, NULL, dv, 0, NULL);
2426 /* Find the values in a given location and map the val to another
2427 value, if it is unique, or add the location as one holding the
2428 value. */
2430 static void
2431 val_resolve (dataflow_set *set, rtx val, rtx loc, rtx insn)
2433 decl_or_value dv = dv_from_value (val);
2435 if (dump_file && (dump_flags & TDF_DETAILS))
2437 if (insn)
2438 fprintf (dump_file, "%i: ", INSN_UID (insn));
2439 else
2440 fprintf (dump_file, "head: ");
2441 print_inline_rtx (dump_file, val, 0);
2442 fputs (" is at ", dump_file);
2443 print_inline_rtx (dump_file, loc, 0);
2444 fputc ('\n', dump_file);
2447 val_reset (set, dv);
2449 gcc_checking_assert (!unsuitable_loc (loc));
2451 if (REG_P (loc))
2453 attrs node, found = NULL;
2455 for (node = set->regs[REGNO (loc)]; node; node = node->next)
2456 if (dv_is_value_p (node->dv)
2457 && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc))
2459 found = node;
2461 /* Map incoming equivalences. ??? Wouldn't it be nice if
2462 we just started sharing the location lists? Maybe a
2463 circular list ending at the value itself or some
2464 such. */
2465 set_variable_part (set, dv_as_value (node->dv),
2466 dv_from_value (val), node->offset,
2467 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2468 set_variable_part (set, val, node->dv, node->offset,
2469 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2472 /* If we didn't find any equivalence, we need to remember that
2473 this value is held in the named register. */
2474 if (found)
2475 return;
2477 /* ??? Attempt to find and merge equivalent MEMs or other
2478 expressions too. */
2480 val_bind (set, val, loc, false);
2483 /* Initialize dataflow set SET to be empty.
2484 VARS_SIZE is the initial size of hash table VARS. */
2486 static void
2487 dataflow_set_init (dataflow_set *set)
2489 init_attrs_list_set (set->regs);
2490 set->vars = shared_hash_copy (empty_shared_hash);
2491 set->stack_adjust = 0;
2492 set->traversed_vars = NULL;
2495 /* Delete the contents of dataflow set SET. */
2497 static void
2498 dataflow_set_clear (dataflow_set *set)
2500 int i;
2502 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2503 attrs_list_clear (&set->regs[i]);
2505 shared_hash_destroy (set->vars);
2506 set->vars = shared_hash_copy (empty_shared_hash);
2509 /* Copy the contents of dataflow set SRC to DST. */
2511 static void
2512 dataflow_set_copy (dataflow_set *dst, dataflow_set *src)
2514 int i;
2516 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2517 attrs_list_copy (&dst->regs[i], src->regs[i]);
2519 shared_hash_destroy (dst->vars);
2520 dst->vars = shared_hash_copy (src->vars);
2521 dst->stack_adjust = src->stack_adjust;
2524 /* Information for merging lists of locations for a given offset of variable.
2526 struct variable_union_info
2528 /* Node of the location chain. */
2529 location_chain lc;
2531 /* The sum of positions in the input chains. */
2532 int pos;
2534 /* The position in the chain of DST dataflow set. */
2535 int pos_dst;
2538 /* Buffer for location list sorting and its allocated size. */
2539 static struct variable_union_info *vui_vec;
2540 static int vui_allocated;
2542 /* Compare function for qsort, order the structures by POS element. */
2544 static int
2545 variable_union_info_cmp_pos (const void *n1, const void *n2)
2547 const struct variable_union_info *const i1 =
2548 (const struct variable_union_info *) n1;
2549 const struct variable_union_info *const i2 =
2550 ( const struct variable_union_info *) n2;
2552 if (i1->pos != i2->pos)
2553 return i1->pos - i2->pos;
2555 return (i1->pos_dst - i2->pos_dst);
2558 /* Compute union of location parts of variable *SLOT and the same variable
2559 from hash table DATA. Compute "sorted" union of the location chains
2560 for common offsets, i.e. the locations of a variable part are sorted by
2561 a priority where the priority is the sum of the positions in the 2 chains
2562 (if a location is only in one list the position in the second list is
2563 defined to be larger than the length of the chains).
2564 When we are updating the location parts the newest location is in the
2565 beginning of the chain, so when we do the described "sorted" union
2566 we keep the newest locations in the beginning. */
2568 static int
2569 variable_union (variable src, dataflow_set *set)
2571 variable dst;
2572 void **dstp;
2573 int i, j, k;
2575 dstp = shared_hash_find_slot (set->vars, src->dv);
2576 if (!dstp || !*dstp)
2578 src->refcount++;
2580 dst_can_be_shared = false;
2581 if (!dstp)
2582 dstp = shared_hash_find_slot_unshare (&set->vars, src->dv, INSERT);
2584 *dstp = src;
2586 /* Continue traversing the hash table. */
2587 return 1;
2589 else
2590 dst = (variable) *dstp;
2592 gcc_assert (src->n_var_parts);
2593 gcc_checking_assert (src->onepart == dst->onepart);
2595 /* We can combine one-part variables very efficiently, because their
2596 entries are in canonical order. */
2597 if (src->onepart)
2599 location_chain *nodep, dnode, snode;
2601 gcc_assert (src->n_var_parts == 1
2602 && dst->n_var_parts == 1);
2604 snode = src->var_part[0].loc_chain;
2605 gcc_assert (snode);
2607 restart_onepart_unshared:
2608 nodep = &dst->var_part[0].loc_chain;
2609 dnode = *nodep;
2610 gcc_assert (dnode);
2612 while (snode)
2614 int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1;
2616 if (r > 0)
2618 location_chain nnode;
2620 if (shared_var_p (dst, set->vars))
2622 dstp = unshare_variable (set, dstp, dst,
2623 VAR_INIT_STATUS_INITIALIZED);
2624 dst = (variable)*dstp;
2625 goto restart_onepart_unshared;
2628 *nodep = nnode = (location_chain) pool_alloc (loc_chain_pool);
2629 nnode->loc = snode->loc;
2630 nnode->init = snode->init;
2631 if (!snode->set_src || MEM_P (snode->set_src))
2632 nnode->set_src = NULL;
2633 else
2634 nnode->set_src = snode->set_src;
2635 nnode->next = dnode;
2636 dnode = nnode;
2638 else if (r == 0)
2639 gcc_checking_assert (rtx_equal_p (dnode->loc, snode->loc));
2641 if (r >= 0)
2642 snode = snode->next;
2644 nodep = &dnode->next;
2645 dnode = *nodep;
2648 return 1;
2651 gcc_checking_assert (!src->onepart);
2653 /* Count the number of location parts, result is K. */
2654 for (i = 0, j = 0, k = 0;
2655 i < src->n_var_parts && j < dst->n_var_parts; k++)
2657 if (VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2659 i++;
2660 j++;
2662 else if (VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
2663 i++;
2664 else
2665 j++;
2667 k += src->n_var_parts - i;
2668 k += dst->n_var_parts - j;
2670 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
2671 thus there are at most MAX_VAR_PARTS different offsets. */
2672 gcc_checking_assert (dst->onepart ? k == 1 : k <= MAX_VAR_PARTS);
2674 if (dst->n_var_parts != k && shared_var_p (dst, set->vars))
2676 dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN);
2677 dst = (variable)*dstp;
2680 i = src->n_var_parts - 1;
2681 j = dst->n_var_parts - 1;
2682 dst->n_var_parts = k;
2684 for (k--; k >= 0; k--)
2686 location_chain node, node2;
2688 if (i >= 0 && j >= 0
2689 && VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2691 /* Compute the "sorted" union of the chains, i.e. the locations which
2692 are in both chains go first, they are sorted by the sum of
2693 positions in the chains. */
2694 int dst_l, src_l;
2695 int ii, jj, n;
2696 struct variable_union_info *vui;
2698 /* If DST is shared compare the location chains.
2699 If they are different we will modify the chain in DST with
2700 high probability so make a copy of DST. */
2701 if (shared_var_p (dst, set->vars))
2703 for (node = src->var_part[i].loc_chain,
2704 node2 = dst->var_part[j].loc_chain; node && node2;
2705 node = node->next, node2 = node2->next)
2707 if (!((REG_P (node2->loc)
2708 && REG_P (node->loc)
2709 && REGNO (node2->loc) == REGNO (node->loc))
2710 || rtx_equal_p (node2->loc, node->loc)))
2712 if (node2->init < node->init)
2713 node2->init = node->init;
2714 break;
2717 if (node || node2)
2719 dstp = unshare_variable (set, dstp, dst,
2720 VAR_INIT_STATUS_UNKNOWN);
2721 dst = (variable)*dstp;
2725 src_l = 0;
2726 for (node = src->var_part[i].loc_chain; node; node = node->next)
2727 src_l++;
2728 dst_l = 0;
2729 for (node = dst->var_part[j].loc_chain; node; node = node->next)
2730 dst_l++;
2732 if (dst_l == 1)
2734 /* The most common case, much simpler, no qsort is needed. */
2735 location_chain dstnode = dst->var_part[j].loc_chain;
2736 dst->var_part[k].loc_chain = dstnode;
2737 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET(dst, j);
2738 node2 = dstnode;
2739 for (node = src->var_part[i].loc_chain; node; node = node->next)
2740 if (!((REG_P (dstnode->loc)
2741 && REG_P (node->loc)
2742 && REGNO (dstnode->loc) == REGNO (node->loc))
2743 || rtx_equal_p (dstnode->loc, node->loc)))
2745 location_chain new_node;
2747 /* Copy the location from SRC. */
2748 new_node = (location_chain) pool_alloc (loc_chain_pool);
2749 new_node->loc = node->loc;
2750 new_node->init = node->init;
2751 if (!node->set_src || MEM_P (node->set_src))
2752 new_node->set_src = NULL;
2753 else
2754 new_node->set_src = node->set_src;
2755 node2->next = new_node;
2756 node2 = new_node;
2758 node2->next = NULL;
2760 else
2762 if (src_l + dst_l > vui_allocated)
2764 vui_allocated = MAX (vui_allocated * 2, src_l + dst_l);
2765 vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec,
2766 vui_allocated);
2768 vui = vui_vec;
2770 /* Fill in the locations from DST. */
2771 for (node = dst->var_part[j].loc_chain, jj = 0; node;
2772 node = node->next, jj++)
2774 vui[jj].lc = node;
2775 vui[jj].pos_dst = jj;
2777 /* Pos plus value larger than a sum of 2 valid positions. */
2778 vui[jj].pos = jj + src_l + dst_l;
2781 /* Fill in the locations from SRC. */
2782 n = dst_l;
2783 for (node = src->var_part[i].loc_chain, ii = 0; node;
2784 node = node->next, ii++)
2786 /* Find location from NODE. */
2787 for (jj = 0; jj < dst_l; jj++)
2789 if ((REG_P (vui[jj].lc->loc)
2790 && REG_P (node->loc)
2791 && REGNO (vui[jj].lc->loc) == REGNO (node->loc))
2792 || rtx_equal_p (vui[jj].lc->loc, node->loc))
2794 vui[jj].pos = jj + ii;
2795 break;
2798 if (jj >= dst_l) /* The location has not been found. */
2800 location_chain new_node;
2802 /* Copy the location from SRC. */
2803 new_node = (location_chain) pool_alloc (loc_chain_pool);
2804 new_node->loc = node->loc;
2805 new_node->init = node->init;
2806 if (!node->set_src || MEM_P (node->set_src))
2807 new_node->set_src = NULL;
2808 else
2809 new_node->set_src = node->set_src;
2810 vui[n].lc = new_node;
2811 vui[n].pos_dst = src_l + dst_l;
2812 vui[n].pos = ii + src_l + dst_l;
2813 n++;
2817 if (dst_l == 2)
2819 /* Special case still very common case. For dst_l == 2
2820 all entries dst_l ... n-1 are sorted, with for i >= dst_l
2821 vui[i].pos == i + src_l + dst_l. */
2822 if (vui[0].pos > vui[1].pos)
2824 /* Order should be 1, 0, 2... */
2825 dst->var_part[k].loc_chain = vui[1].lc;
2826 vui[1].lc->next = vui[0].lc;
2827 if (n >= 3)
2829 vui[0].lc->next = vui[2].lc;
2830 vui[n - 1].lc->next = NULL;
2832 else
2833 vui[0].lc->next = NULL;
2834 ii = 3;
2836 else
2838 dst->var_part[k].loc_chain = vui[0].lc;
2839 if (n >= 3 && vui[2].pos < vui[1].pos)
2841 /* Order should be 0, 2, 1, 3... */
2842 vui[0].lc->next = vui[2].lc;
2843 vui[2].lc->next = vui[1].lc;
2844 if (n >= 4)
2846 vui[1].lc->next = vui[3].lc;
2847 vui[n - 1].lc->next = NULL;
2849 else
2850 vui[1].lc->next = NULL;
2851 ii = 4;
2853 else
2855 /* Order should be 0, 1, 2... */
2856 ii = 1;
2857 vui[n - 1].lc->next = NULL;
2860 for (; ii < n; ii++)
2861 vui[ii - 1].lc->next = vui[ii].lc;
2863 else
2865 qsort (vui, n, sizeof (struct variable_union_info),
2866 variable_union_info_cmp_pos);
2868 /* Reconnect the nodes in sorted order. */
2869 for (ii = 1; ii < n; ii++)
2870 vui[ii - 1].lc->next = vui[ii].lc;
2871 vui[n - 1].lc->next = NULL;
2872 dst->var_part[k].loc_chain = vui[0].lc;
2875 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
2877 i--;
2878 j--;
2880 else if ((i >= 0 && j >= 0
2881 && VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
2882 || i < 0)
2884 dst->var_part[k] = dst->var_part[j];
2885 j--;
2887 else if ((i >= 0 && j >= 0
2888 && VAR_PART_OFFSET (src, i) > VAR_PART_OFFSET (dst, j))
2889 || j < 0)
2891 location_chain *nextp;
2893 /* Copy the chain from SRC. */
2894 nextp = &dst->var_part[k].loc_chain;
2895 for (node = src->var_part[i].loc_chain; node; node = node->next)
2897 location_chain new_lc;
2899 new_lc = (location_chain) pool_alloc (loc_chain_pool);
2900 new_lc->next = NULL;
2901 new_lc->init = node->init;
2902 if (!node->set_src || MEM_P (node->set_src))
2903 new_lc->set_src = NULL;
2904 else
2905 new_lc->set_src = node->set_src;
2906 new_lc->loc = node->loc;
2908 *nextp = new_lc;
2909 nextp = &new_lc->next;
2912 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (src, i);
2913 i--;
2915 dst->var_part[k].cur_loc = NULL;
2918 if (flag_var_tracking_uninit)
2919 for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++)
2921 location_chain node, node2;
2922 for (node = src->var_part[i].loc_chain; node; node = node->next)
2923 for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next)
2924 if (rtx_equal_p (node->loc, node2->loc))
2926 if (node->init > node2->init)
2927 node2->init = node->init;
2931 /* Continue traversing the hash table. */
2932 return 1;
2935 /* Compute union of dataflow sets SRC and DST and store it to DST. */
2937 static void
2938 dataflow_set_union (dataflow_set *dst, dataflow_set *src)
2940 int i;
2942 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2943 attrs_list_union (&dst->regs[i], src->regs[i]);
2945 if (dst->vars == empty_shared_hash)
2947 shared_hash_destroy (dst->vars);
2948 dst->vars = shared_hash_copy (src->vars);
2950 else
2952 htab_iterator hi;
2953 variable var;
2955 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (src->vars), var, variable, hi)
2956 variable_union (var, dst);
2960 /* Whether the value is currently being expanded. */
2961 #define VALUE_RECURSED_INTO(x) \
2962 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
2964 /* Whether no expansion was found, saving useless lookups.
2965 It must only be set when VALUE_CHANGED is clear. */
2966 #define NO_LOC_P(x) \
2967 (RTL_FLAG_CHECK2 ("NO_LOC_P", (x), VALUE, DEBUG_EXPR)->return_val)
2969 /* Whether cur_loc in the value needs to be (re)computed. */
2970 #define VALUE_CHANGED(x) \
2971 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
2972 /* Whether cur_loc in the decl needs to be (re)computed. */
2973 #define DECL_CHANGED(x) TREE_VISITED (x)
2975 /* Record (if NEWV) that DV needs to have its cur_loc recomputed. For
2976 user DECLs, this means they're in changed_variables. Values and
2977 debug exprs may be left with this flag set if no user variable
2978 requires them to be evaluated. */
2980 static inline void
2981 set_dv_changed (decl_or_value dv, bool newv)
2983 switch (dv_onepart_p (dv))
2985 case ONEPART_VALUE:
2986 if (newv)
2987 NO_LOC_P (dv_as_value (dv)) = false;
2988 VALUE_CHANGED (dv_as_value (dv)) = newv;
2989 break;
2991 case ONEPART_DEXPR:
2992 if (newv)
2993 NO_LOC_P (DECL_RTL_KNOWN_SET (dv_as_decl (dv))) = false;
2994 /* Fall through... */
2996 default:
2997 DECL_CHANGED (dv_as_decl (dv)) = newv;
2998 break;
3002 /* Return true if DV needs to have its cur_loc recomputed. */
3004 static inline bool
3005 dv_changed_p (decl_or_value dv)
3007 return (dv_is_value_p (dv)
3008 ? VALUE_CHANGED (dv_as_value (dv))
3009 : DECL_CHANGED (dv_as_decl (dv)));
3012 /* Return a location list node whose loc is rtx_equal to LOC, in the
3013 location list of a one-part variable or value VAR, or in that of
3014 any values recursively mentioned in the location lists. VARS must
3015 be in star-canonical form. */
3017 static location_chain
3018 find_loc_in_1pdv (rtx loc, variable var, htab_t vars)
3020 location_chain node;
3021 enum rtx_code loc_code;
3023 if (!var)
3024 return NULL;
3026 gcc_checking_assert (var->onepart);
3028 if (!var->n_var_parts)
3029 return NULL;
3031 gcc_checking_assert (loc != dv_as_opaque (var->dv));
3033 loc_code = GET_CODE (loc);
3034 for (node = var->var_part[0].loc_chain; node; node = node->next)
3036 decl_or_value dv;
3037 variable rvar;
3039 if (GET_CODE (node->loc) != loc_code)
3041 if (GET_CODE (node->loc) != VALUE)
3042 continue;
3044 else if (loc == node->loc)
3045 return node;
3046 else if (loc_code != VALUE)
3048 if (rtx_equal_p (loc, node->loc))
3049 return node;
3050 continue;
3053 /* Since we're in star-canonical form, we don't need to visit
3054 non-canonical nodes: one-part variables and non-canonical
3055 values would only point back to the canonical node. */
3056 if (dv_is_value_p (var->dv)
3057 && !canon_value_cmp (node->loc, dv_as_value (var->dv)))
3059 /* Skip all subsequent VALUEs. */
3060 while (node->next && GET_CODE (node->next->loc) == VALUE)
3062 node = node->next;
3063 gcc_checking_assert (!canon_value_cmp (node->loc,
3064 dv_as_value (var->dv)));
3065 if (loc == node->loc)
3066 return node;
3068 continue;
3071 gcc_checking_assert (node == var->var_part[0].loc_chain);
3072 gcc_checking_assert (!node->next);
3074 dv = dv_from_value (node->loc);
3075 rvar = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
3076 return find_loc_in_1pdv (loc, rvar, vars);
3079 /* ??? Gotta look in cselib_val locations too. */
3081 return NULL;
3084 /* Hash table iteration argument passed to variable_merge. */
3085 struct dfset_merge
3087 /* The set in which the merge is to be inserted. */
3088 dataflow_set *dst;
3089 /* The set that we're iterating in. */
3090 dataflow_set *cur;
3091 /* The set that may contain the other dv we are to merge with. */
3092 dataflow_set *src;
3093 /* Number of onepart dvs in src. */
3094 int src_onepart_cnt;
3097 /* Insert LOC in *DNODE, if it's not there yet. The list must be in
3098 loc_cmp order, and it is maintained as such. */
3100 static void
3101 insert_into_intersection (location_chain *nodep, rtx loc,
3102 enum var_init_status status)
3104 location_chain node;
3105 int r;
3107 for (node = *nodep; node; nodep = &node->next, node = *nodep)
3108 if ((r = loc_cmp (node->loc, loc)) == 0)
3110 node->init = MIN (node->init, status);
3111 return;
3113 else if (r > 0)
3114 break;
3116 node = (location_chain) pool_alloc (loc_chain_pool);
3118 node->loc = loc;
3119 node->set_src = NULL;
3120 node->init = status;
3121 node->next = *nodep;
3122 *nodep = node;
3125 /* Insert in DEST the intersection of the locations present in both
3126 S1NODE and S2VAR, directly or indirectly. S1NODE is from a
3127 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
3128 DSM->dst. */
3130 static void
3131 intersect_loc_chains (rtx val, location_chain *dest, struct dfset_merge *dsm,
3132 location_chain s1node, variable s2var)
3134 dataflow_set *s1set = dsm->cur;
3135 dataflow_set *s2set = dsm->src;
3136 location_chain found;
3138 if (s2var)
3140 location_chain s2node;
3142 gcc_checking_assert (s2var->onepart);
3144 if (s2var->n_var_parts)
3146 s2node = s2var->var_part[0].loc_chain;
3148 for (; s1node && s2node;
3149 s1node = s1node->next, s2node = s2node->next)
3150 if (s1node->loc != s2node->loc)
3151 break;
3152 else if (s1node->loc == val)
3153 continue;
3154 else
3155 insert_into_intersection (dest, s1node->loc,
3156 MIN (s1node->init, s2node->init));
3160 for (; s1node; s1node = s1node->next)
3162 if (s1node->loc == val)
3163 continue;
3165 if ((found = find_loc_in_1pdv (s1node->loc, s2var,
3166 shared_hash_htab (s2set->vars))))
3168 insert_into_intersection (dest, s1node->loc,
3169 MIN (s1node->init, found->init));
3170 continue;
3173 if (GET_CODE (s1node->loc) == VALUE
3174 && !VALUE_RECURSED_INTO (s1node->loc))
3176 decl_or_value dv = dv_from_value (s1node->loc);
3177 variable svar = shared_hash_find (s1set->vars, dv);
3178 if (svar)
3180 if (svar->n_var_parts == 1)
3182 VALUE_RECURSED_INTO (s1node->loc) = true;
3183 intersect_loc_chains (val, dest, dsm,
3184 svar->var_part[0].loc_chain,
3185 s2var);
3186 VALUE_RECURSED_INTO (s1node->loc) = false;
3191 /* ??? gotta look in cselib_val locations too. */
3193 /* ??? if the location is equivalent to any location in src,
3194 searched recursively
3196 add to dst the values needed to represent the equivalence
3198 telling whether locations S is equivalent to another dv's
3199 location list:
3201 for each location D in the list
3203 if S and D satisfy rtx_equal_p, then it is present
3205 else if D is a value, recurse without cycles
3207 else if S and D have the same CODE and MODE
3209 for each operand oS and the corresponding oD
3211 if oS and oD are not equivalent, then S an D are not equivalent
3213 else if they are RTX vectors
3215 if any vector oS element is not equivalent to its respective oD,
3216 then S and D are not equivalent
3224 /* Return -1 if X should be before Y in a location list for a 1-part
3225 variable, 1 if Y should be before X, and 0 if they're equivalent
3226 and should not appear in the list. */
3228 static int
3229 loc_cmp (rtx x, rtx y)
3231 int i, j, r;
3232 RTX_CODE code = GET_CODE (x);
3233 const char *fmt;
3235 if (x == y)
3236 return 0;
3238 if (REG_P (x))
3240 if (!REG_P (y))
3241 return -1;
3242 gcc_assert (GET_MODE (x) == GET_MODE (y));
3243 if (REGNO (x) == REGNO (y))
3244 return 0;
3245 else if (REGNO (x) < REGNO (y))
3246 return -1;
3247 else
3248 return 1;
3251 if (REG_P (y))
3252 return 1;
3254 if (MEM_P (x))
3256 if (!MEM_P (y))
3257 return -1;
3258 gcc_assert (GET_MODE (x) == GET_MODE (y));
3259 return loc_cmp (XEXP (x, 0), XEXP (y, 0));
3262 if (MEM_P (y))
3263 return 1;
3265 if (GET_CODE (x) == VALUE)
3267 if (GET_CODE (y) != VALUE)
3268 return -1;
3269 /* Don't assert the modes are the same, that is true only
3270 when not recursing. (subreg:QI (value:SI 1:1) 0)
3271 and (subreg:QI (value:DI 2:2) 0) can be compared,
3272 even when the modes are different. */
3273 if (canon_value_cmp (x, y))
3274 return -1;
3275 else
3276 return 1;
3279 if (GET_CODE (y) == VALUE)
3280 return 1;
3282 /* Entry value is the least preferable kind of expression. */
3283 if (GET_CODE (x) == ENTRY_VALUE)
3285 if (GET_CODE (y) != ENTRY_VALUE)
3286 return 1;
3287 gcc_assert (GET_MODE (x) == GET_MODE (y));
3288 return loc_cmp (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
3291 if (GET_CODE (y) == ENTRY_VALUE)
3292 return -1;
3294 if (GET_CODE (x) == GET_CODE (y))
3295 /* Compare operands below. */;
3296 else if (GET_CODE (x) < GET_CODE (y))
3297 return -1;
3298 else
3299 return 1;
3301 gcc_assert (GET_MODE (x) == GET_MODE (y));
3303 if (GET_CODE (x) == DEBUG_EXPR)
3305 if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3306 < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)))
3307 return -1;
3308 gcc_checking_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3309 > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)));
3310 return 1;
3313 fmt = GET_RTX_FORMAT (code);
3314 for (i = 0; i < GET_RTX_LENGTH (code); i++)
3315 switch (fmt[i])
3317 case 'w':
3318 if (XWINT (x, i) == XWINT (y, i))
3319 break;
3320 else if (XWINT (x, i) < XWINT (y, i))
3321 return -1;
3322 else
3323 return 1;
3325 case 'n':
3326 case 'i':
3327 if (XINT (x, i) == XINT (y, i))
3328 break;
3329 else if (XINT (x, i) < XINT (y, i))
3330 return -1;
3331 else
3332 return 1;
3334 case 'V':
3335 case 'E':
3336 /* Compare the vector length first. */
3337 if (XVECLEN (x, i) == XVECLEN (y, i))
3338 /* Compare the vectors elements. */;
3339 else if (XVECLEN (x, i) < XVECLEN (y, i))
3340 return -1;
3341 else
3342 return 1;
3344 for (j = 0; j < XVECLEN (x, i); j++)
3345 if ((r = loc_cmp (XVECEXP (x, i, j),
3346 XVECEXP (y, i, j))))
3347 return r;
3348 break;
3350 case 'e':
3351 if ((r = loc_cmp (XEXP (x, i), XEXP (y, i))))
3352 return r;
3353 break;
3355 case 'S':
3356 case 's':
3357 if (XSTR (x, i) == XSTR (y, i))
3358 break;
3359 if (!XSTR (x, i))
3360 return -1;
3361 if (!XSTR (y, i))
3362 return 1;
3363 if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0)
3364 break;
3365 else if (r < 0)
3366 return -1;
3367 else
3368 return 1;
3370 case 'u':
3371 /* These are just backpointers, so they don't matter. */
3372 break;
3374 case '0':
3375 case 't':
3376 break;
3378 /* It is believed that rtx's at this level will never
3379 contain anything but integers and other rtx's,
3380 except for within LABEL_REFs and SYMBOL_REFs. */
3381 default:
3382 gcc_unreachable ();
3385 return 0;
3388 #if ENABLE_CHECKING
3389 /* Check the order of entries in one-part variables. */
3391 static int
3392 canonicalize_loc_order_check (void **slot, void *data ATTRIBUTE_UNUSED)
3394 variable var = (variable) *slot;
3395 location_chain node, next;
3397 #ifdef ENABLE_RTL_CHECKING
3398 int i;
3399 for (i = 0; i < var->n_var_parts; i++)
3400 gcc_assert (var->var_part[0].cur_loc == NULL);
3401 gcc_assert (!var->in_changed_variables);
3402 #endif
3404 if (!var->onepart)
3405 return 1;
3407 gcc_assert (var->n_var_parts == 1);
3408 node = var->var_part[0].loc_chain;
3409 gcc_assert (node);
3411 while ((next = node->next))
3413 gcc_assert (loc_cmp (node->loc, next->loc) < 0);
3414 node = next;
3417 return 1;
3419 #endif
3421 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
3422 more likely to be chosen as canonical for an equivalence set.
3423 Ensure less likely values can reach more likely neighbors, making
3424 the connections bidirectional. */
3426 static int
3427 canonicalize_values_mark (void **slot, void *data)
3429 dataflow_set *set = (dataflow_set *)data;
3430 variable var = (variable) *slot;
3431 decl_or_value dv = var->dv;
3432 rtx val;
3433 location_chain node;
3435 if (!dv_is_value_p (dv))
3436 return 1;
3438 gcc_checking_assert (var->n_var_parts == 1);
3440 val = dv_as_value (dv);
3442 for (node = var->var_part[0].loc_chain; node; node = node->next)
3443 if (GET_CODE (node->loc) == VALUE)
3445 if (canon_value_cmp (node->loc, val))
3446 VALUE_RECURSED_INTO (val) = true;
3447 else
3449 decl_or_value odv = dv_from_value (node->loc);
3450 void **oslot = shared_hash_find_slot_noinsert (set->vars, odv);
3452 set_slot_part (set, val, oslot, odv, 0,
3453 node->init, NULL_RTX);
3455 VALUE_RECURSED_INTO (node->loc) = true;
3459 return 1;
3462 /* Remove redundant entries from equivalence lists in onepart
3463 variables, canonicalizing equivalence sets into star shapes. */
3465 static int
3466 canonicalize_values_star (void **slot, void *data)
3468 dataflow_set *set = (dataflow_set *)data;
3469 variable var = (variable) *slot;
3470 decl_or_value dv = var->dv;
3471 location_chain node;
3472 decl_or_value cdv;
3473 rtx val, cval;
3474 void **cslot;
3475 bool has_value;
3476 bool has_marks;
3478 if (!var->onepart)
3479 return 1;
3481 gcc_checking_assert (var->n_var_parts == 1);
3483 if (dv_is_value_p (dv))
3485 cval = dv_as_value (dv);
3486 if (!VALUE_RECURSED_INTO (cval))
3487 return 1;
3488 VALUE_RECURSED_INTO (cval) = false;
3490 else
3491 cval = NULL_RTX;
3493 restart:
3494 val = cval;
3495 has_value = false;
3496 has_marks = false;
3498 gcc_assert (var->n_var_parts == 1);
3500 for (node = var->var_part[0].loc_chain; node; node = node->next)
3501 if (GET_CODE (node->loc) == VALUE)
3503 has_value = true;
3504 if (VALUE_RECURSED_INTO (node->loc))
3505 has_marks = true;
3506 if (canon_value_cmp (node->loc, cval))
3507 cval = node->loc;
3510 if (!has_value)
3511 return 1;
3513 if (cval == val)
3515 if (!has_marks || dv_is_decl_p (dv))
3516 return 1;
3518 /* Keep it marked so that we revisit it, either after visiting a
3519 child node, or after visiting a new parent that might be
3520 found out. */
3521 VALUE_RECURSED_INTO (val) = true;
3523 for (node = var->var_part[0].loc_chain; node; node = node->next)
3524 if (GET_CODE (node->loc) == VALUE
3525 && VALUE_RECURSED_INTO (node->loc))
3527 cval = node->loc;
3528 restart_with_cval:
3529 VALUE_RECURSED_INTO (cval) = false;
3530 dv = dv_from_value (cval);
3531 slot = shared_hash_find_slot_noinsert (set->vars, dv);
3532 if (!slot)
3534 gcc_assert (dv_is_decl_p (var->dv));
3535 /* The canonical value was reset and dropped.
3536 Remove it. */
3537 clobber_variable_part (set, NULL, var->dv, 0, NULL);
3538 return 1;
3540 var = (variable)*slot;
3541 gcc_assert (dv_is_value_p (var->dv));
3542 if (var->n_var_parts == 0)
3543 return 1;
3544 gcc_assert (var->n_var_parts == 1);
3545 goto restart;
3548 VALUE_RECURSED_INTO (val) = false;
3550 return 1;
3553 /* Push values to the canonical one. */
3554 cdv = dv_from_value (cval);
3555 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3557 for (node = var->var_part[0].loc_chain; node; node = node->next)
3558 if (node->loc != cval)
3560 cslot = set_slot_part (set, node->loc, cslot, cdv, 0,
3561 node->init, NULL_RTX);
3562 if (GET_CODE (node->loc) == VALUE)
3564 decl_or_value ndv = dv_from_value (node->loc);
3566 set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX,
3567 NO_INSERT);
3569 if (canon_value_cmp (node->loc, val))
3571 /* If it could have been a local minimum, it's not any more,
3572 since it's now neighbor to cval, so it may have to push
3573 to it. Conversely, if it wouldn't have prevailed over
3574 val, then whatever mark it has is fine: if it was to
3575 push, it will now push to a more canonical node, but if
3576 it wasn't, then it has already pushed any values it might
3577 have to. */
3578 VALUE_RECURSED_INTO (node->loc) = true;
3579 /* Make sure we visit node->loc by ensuring we cval is
3580 visited too. */
3581 VALUE_RECURSED_INTO (cval) = true;
3583 else if (!VALUE_RECURSED_INTO (node->loc))
3584 /* If we have no need to "recurse" into this node, it's
3585 already "canonicalized", so drop the link to the old
3586 parent. */
3587 clobber_variable_part (set, cval, ndv, 0, NULL);
3589 else if (GET_CODE (node->loc) == REG)
3591 attrs list = set->regs[REGNO (node->loc)], *listp;
3593 /* Change an existing attribute referring to dv so that it
3594 refers to cdv, removing any duplicate this might
3595 introduce, and checking that no previous duplicates
3596 existed, all in a single pass. */
3598 while (list)
3600 if (list->offset == 0
3601 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3602 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3603 break;
3605 list = list->next;
3608 gcc_assert (list);
3609 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3611 list->dv = cdv;
3612 for (listp = &list->next; (list = *listp); listp = &list->next)
3614 if (list->offset)
3615 continue;
3617 if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3619 *listp = list->next;
3620 pool_free (attrs_pool, list);
3621 list = *listp;
3622 break;
3625 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv));
3628 else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3630 for (listp = &list->next; (list = *listp); listp = &list->next)
3632 if (list->offset)
3633 continue;
3635 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3637 *listp = list->next;
3638 pool_free (attrs_pool, list);
3639 list = *listp;
3640 break;
3643 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv));
3646 else
3647 gcc_unreachable ();
3649 #if ENABLE_CHECKING
3650 while (list)
3652 if (list->offset == 0
3653 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3654 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3655 gcc_unreachable ();
3657 list = list->next;
3659 #endif
3663 if (val)
3664 set_slot_part (set, val, cslot, cdv, 0,
3665 VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
3667 slot = clobber_slot_part (set, cval, slot, 0, NULL);
3669 /* Variable may have been unshared. */
3670 var = (variable)*slot;
3671 gcc_checking_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval
3672 && var->var_part[0].loc_chain->next == NULL);
3674 if (VALUE_RECURSED_INTO (cval))
3675 goto restart_with_cval;
3677 return 1;
3680 /* Bind one-part variables to the canonical value in an equivalence
3681 set. Not doing this causes dataflow convergence failure in rare
3682 circumstances, see PR42873. Unfortunately we can't do this
3683 efficiently as part of canonicalize_values_star, since we may not
3684 have determined or even seen the canonical value of a set when we
3685 get to a variable that references another member of the set. */
3687 static int
3688 canonicalize_vars_star (void **slot, void *data)
3690 dataflow_set *set = (dataflow_set *)data;
3691 variable var = (variable) *slot;
3692 decl_or_value dv = var->dv;
3693 location_chain node;
3694 rtx cval;
3695 decl_or_value cdv;
3696 void **cslot;
3697 variable cvar;
3698 location_chain cnode;
3700 if (!var->onepart || var->onepart == ONEPART_VALUE)
3701 return 1;
3703 gcc_assert (var->n_var_parts == 1);
3705 node = var->var_part[0].loc_chain;
3707 if (GET_CODE (node->loc) != VALUE)
3708 return 1;
3710 gcc_assert (!node->next);
3711 cval = node->loc;
3713 /* Push values to the canonical one. */
3714 cdv = dv_from_value (cval);
3715 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3716 if (!cslot)
3717 return 1;
3718 cvar = (variable)*cslot;
3719 gcc_assert (cvar->n_var_parts == 1);
3721 cnode = cvar->var_part[0].loc_chain;
3723 /* CVAL is canonical if its value list contains non-VALUEs or VALUEs
3724 that are not “more canonical” than it. */
3725 if (GET_CODE (cnode->loc) != VALUE
3726 || !canon_value_cmp (cnode->loc, cval))
3727 return 1;
3729 /* CVAL was found to be non-canonical. Change the variable to point
3730 to the canonical VALUE. */
3731 gcc_assert (!cnode->next);
3732 cval = cnode->loc;
3734 slot = set_slot_part (set, cval, slot, dv, 0,
3735 node->init, node->set_src);
3736 clobber_slot_part (set, cval, slot, 0, node->set_src);
3738 return 1;
3741 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
3742 corresponding entry in DSM->src. Multi-part variables are combined
3743 with variable_union, whereas onepart dvs are combined with
3744 intersection. */
3746 static int
3747 variable_merge_over_cur (variable s1var, struct dfset_merge *dsm)
3749 dataflow_set *dst = dsm->dst;
3750 void **dstslot;
3751 variable s2var, dvar = NULL;
3752 decl_or_value dv = s1var->dv;
3753 onepart_enum_t onepart = s1var->onepart;
3754 rtx val;
3755 hashval_t dvhash;
3756 location_chain node, *nodep;
3758 /* If the incoming onepart variable has an empty location list, then
3759 the intersection will be just as empty. For other variables,
3760 it's always union. */
3761 gcc_checking_assert (s1var->n_var_parts
3762 && s1var->var_part[0].loc_chain);
3764 if (!onepart)
3765 return variable_union (s1var, dst);
3767 gcc_checking_assert (s1var->n_var_parts == 1);
3769 dvhash = dv_htab_hash (dv);
3770 if (dv_is_value_p (dv))
3771 val = dv_as_value (dv);
3772 else
3773 val = NULL;
3775 s2var = shared_hash_find_1 (dsm->src->vars, dv, dvhash);
3776 if (!s2var)
3778 dst_can_be_shared = false;
3779 return 1;
3782 dsm->src_onepart_cnt--;
3783 gcc_assert (s2var->var_part[0].loc_chain
3784 && s2var->onepart == onepart
3785 && s2var->n_var_parts == 1);
3787 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3788 if (dstslot)
3790 dvar = (variable)*dstslot;
3791 gcc_assert (dvar->refcount == 1
3792 && dvar->onepart == onepart
3793 && dvar->n_var_parts == 1);
3794 nodep = &dvar->var_part[0].loc_chain;
3796 else
3798 nodep = &node;
3799 node = NULL;
3802 if (!dstslot && !onepart_variable_different_p (s1var, s2var))
3804 dstslot = shared_hash_find_slot_unshare_1 (&dst->vars, dv,
3805 dvhash, INSERT);
3806 *dstslot = dvar = s2var;
3807 dvar->refcount++;
3809 else
3811 dst_can_be_shared = false;
3813 intersect_loc_chains (val, nodep, dsm,
3814 s1var->var_part[0].loc_chain, s2var);
3816 if (!dstslot)
3818 if (node)
3820 dvar = (variable) pool_alloc (onepart_pool (onepart));
3821 dvar->dv = dv;
3822 dvar->refcount = 1;
3823 dvar->n_var_parts = 1;
3824 dvar->onepart = onepart;
3825 dvar->in_changed_variables = false;
3826 dvar->var_part[0].loc_chain = node;
3827 dvar->var_part[0].cur_loc = NULL;
3828 if (onepart)
3829 VAR_LOC_1PAUX (dvar) = NULL;
3830 else
3831 VAR_PART_OFFSET (dvar, 0) = 0;
3833 dstslot
3834 = shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash,
3835 INSERT);
3836 gcc_assert (!*dstslot);
3837 *dstslot = dvar;
3839 else
3840 return 1;
3844 nodep = &dvar->var_part[0].loc_chain;
3845 while ((node = *nodep))
3847 location_chain *nextp = &node->next;
3849 if (GET_CODE (node->loc) == REG)
3851 attrs list;
3853 for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
3854 if (GET_MODE (node->loc) == GET_MODE (list->loc)
3855 && dv_is_value_p (list->dv))
3856 break;
3858 if (!list)
3859 attrs_list_insert (&dst->regs[REGNO (node->loc)],
3860 dv, 0, node->loc);
3861 /* If this value became canonical for another value that had
3862 this register, we want to leave it alone. */
3863 else if (dv_as_value (list->dv) != val)
3865 dstslot = set_slot_part (dst, dv_as_value (list->dv),
3866 dstslot, dv, 0,
3867 node->init, NULL_RTX);
3868 dstslot = delete_slot_part (dst, node->loc, dstslot, 0);
3870 /* Since nextp points into the removed node, we can't
3871 use it. The pointer to the next node moved to nodep.
3872 However, if the variable we're walking is unshared
3873 during our walk, we'll keep walking the location list
3874 of the previously-shared variable, in which case the
3875 node won't have been removed, and we'll want to skip
3876 it. That's why we test *nodep here. */
3877 if (*nodep != node)
3878 nextp = nodep;
3881 else
3882 /* Canonicalization puts registers first, so we don't have to
3883 walk it all. */
3884 break;
3885 nodep = nextp;
3888 if (dvar != (variable)*dstslot)
3889 dvar = (variable)*dstslot;
3890 nodep = &dvar->var_part[0].loc_chain;
3892 if (val)
3894 /* Mark all referenced nodes for canonicalization, and make sure
3895 we have mutual equivalence links. */
3896 VALUE_RECURSED_INTO (val) = true;
3897 for (node = *nodep; node; node = node->next)
3898 if (GET_CODE (node->loc) == VALUE)
3900 VALUE_RECURSED_INTO (node->loc) = true;
3901 set_variable_part (dst, val, dv_from_value (node->loc), 0,
3902 node->init, NULL, INSERT);
3905 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3906 gcc_assert (*dstslot == dvar);
3907 canonicalize_values_star (dstslot, dst);
3908 gcc_checking_assert (dstslot
3909 == shared_hash_find_slot_noinsert_1 (dst->vars,
3910 dv, dvhash));
3911 dvar = (variable)*dstslot;
3913 else
3915 bool has_value = false, has_other = false;
3917 /* If we have one value and anything else, we're going to
3918 canonicalize this, so make sure all values have an entry in
3919 the table and are marked for canonicalization. */
3920 for (node = *nodep; node; node = node->next)
3922 if (GET_CODE (node->loc) == VALUE)
3924 /* If this was marked during register canonicalization,
3925 we know we have to canonicalize values. */
3926 if (has_value)
3927 has_other = true;
3928 has_value = true;
3929 if (has_other)
3930 break;
3932 else
3934 has_other = true;
3935 if (has_value)
3936 break;
3940 if (has_value && has_other)
3942 for (node = *nodep; node; node = node->next)
3944 if (GET_CODE (node->loc) == VALUE)
3946 decl_or_value dv = dv_from_value (node->loc);
3947 void **slot = NULL;
3949 if (shared_hash_shared (dst->vars))
3950 slot = shared_hash_find_slot_noinsert (dst->vars, dv);
3951 if (!slot)
3952 slot = shared_hash_find_slot_unshare (&dst->vars, dv,
3953 INSERT);
3954 if (!*slot)
3956 variable var = (variable) pool_alloc (onepart_pool
3957 (ONEPART_VALUE));
3958 var->dv = dv;
3959 var->refcount = 1;
3960 var->n_var_parts = 1;
3961 var->onepart = ONEPART_VALUE;
3962 var->in_changed_variables = false;
3963 var->var_part[0].loc_chain = NULL;
3964 var->var_part[0].cur_loc = NULL;
3965 VAR_LOC_1PAUX (var) = NULL;
3966 *slot = var;
3969 VALUE_RECURSED_INTO (node->loc) = true;
3973 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3974 gcc_assert (*dstslot == dvar);
3975 canonicalize_values_star (dstslot, dst);
3976 gcc_checking_assert (dstslot
3977 == shared_hash_find_slot_noinsert_1 (dst->vars,
3978 dv, dvhash));
3979 dvar = (variable)*dstslot;
3983 if (!onepart_variable_different_p (dvar, s2var))
3985 variable_htab_free (dvar);
3986 *dstslot = dvar = s2var;
3987 dvar->refcount++;
3989 else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var))
3991 variable_htab_free (dvar);
3992 *dstslot = dvar = s1var;
3993 dvar->refcount++;
3994 dst_can_be_shared = false;
3996 else
3997 dst_can_be_shared = false;
3999 return 1;
4002 /* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
4003 multi-part variable. Unions of multi-part variables and
4004 intersections of one-part ones will be handled in
4005 variable_merge_over_cur(). */
4007 static int
4008 variable_merge_over_src (variable s2var, struct dfset_merge *dsm)
4010 dataflow_set *dst = dsm->dst;
4011 decl_or_value dv = s2var->dv;
4013 if (!s2var->onepart)
4015 void **dstp = shared_hash_find_slot (dst->vars, dv);
4016 *dstp = s2var;
4017 s2var->refcount++;
4018 return 1;
4021 dsm->src_onepart_cnt++;
4022 return 1;
4025 /* Combine dataflow set information from SRC2 into DST, using PDST
4026 to carry over information across passes. */
4028 static void
4029 dataflow_set_merge (dataflow_set *dst, dataflow_set *src2)
4031 dataflow_set cur = *dst;
4032 dataflow_set *src1 = &cur;
4033 struct dfset_merge dsm;
4034 int i;
4035 size_t src1_elems, src2_elems;
4036 htab_iterator hi;
4037 variable var;
4039 src1_elems = htab_elements (shared_hash_htab (src1->vars));
4040 src2_elems = htab_elements (shared_hash_htab (src2->vars));
4041 dataflow_set_init (dst);
4042 dst->stack_adjust = cur.stack_adjust;
4043 shared_hash_destroy (dst->vars);
4044 dst->vars = (shared_hash) pool_alloc (shared_hash_pool);
4045 dst->vars->refcount = 1;
4046 dst->vars->htab
4047 = htab_create (MAX (src1_elems, src2_elems), variable_htab_hash,
4048 variable_htab_eq, variable_htab_free);
4050 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4051 attrs_list_mpdv_union (&dst->regs[i], src1->regs[i], src2->regs[i]);
4053 dsm.dst = dst;
4054 dsm.src = src2;
4055 dsm.cur = src1;
4056 dsm.src_onepart_cnt = 0;
4058 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (dsm.src->vars), var, variable, hi)
4059 variable_merge_over_src (var, &dsm);
4060 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (dsm.cur->vars), var, variable, hi)
4061 variable_merge_over_cur (var, &dsm);
4063 if (dsm.src_onepart_cnt)
4064 dst_can_be_shared = false;
4066 dataflow_set_destroy (src1);
4069 /* Mark register equivalences. */
4071 static void
4072 dataflow_set_equiv_regs (dataflow_set *set)
4074 int i;
4075 attrs list, *listp;
4077 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4079 rtx canon[NUM_MACHINE_MODES];
4081 /* If the list is empty or one entry, no need to canonicalize
4082 anything. */
4083 if (set->regs[i] == NULL || set->regs[i]->next == NULL)
4084 continue;
4086 memset (canon, 0, sizeof (canon));
4088 for (list = set->regs[i]; list; list = list->next)
4089 if (list->offset == 0 && dv_is_value_p (list->dv))
4091 rtx val = dv_as_value (list->dv);
4092 rtx *cvalp = &canon[(int)GET_MODE (val)];
4093 rtx cval = *cvalp;
4095 if (canon_value_cmp (val, cval))
4096 *cvalp = val;
4099 for (list = set->regs[i]; list; list = list->next)
4100 if (list->offset == 0 && dv_onepart_p (list->dv))
4102 rtx cval = canon[(int)GET_MODE (list->loc)];
4104 if (!cval)
4105 continue;
4107 if (dv_is_value_p (list->dv))
4109 rtx val = dv_as_value (list->dv);
4111 if (val == cval)
4112 continue;
4114 VALUE_RECURSED_INTO (val) = true;
4115 set_variable_part (set, val, dv_from_value (cval), 0,
4116 VAR_INIT_STATUS_INITIALIZED,
4117 NULL, NO_INSERT);
4120 VALUE_RECURSED_INTO (cval) = true;
4121 set_variable_part (set, cval, list->dv, 0,
4122 VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT);
4125 for (listp = &set->regs[i]; (list = *listp);
4126 listp = list ? &list->next : listp)
4127 if (list->offset == 0 && dv_onepart_p (list->dv))
4129 rtx cval = canon[(int)GET_MODE (list->loc)];
4130 void **slot;
4132 if (!cval)
4133 continue;
4135 if (dv_is_value_p (list->dv))
4137 rtx val = dv_as_value (list->dv);
4138 if (!VALUE_RECURSED_INTO (val))
4139 continue;
4142 slot = shared_hash_find_slot_noinsert (set->vars, list->dv);
4143 canonicalize_values_star (slot, set);
4144 if (*listp != list)
4145 list = NULL;
4150 /* Remove any redundant values in the location list of VAR, which must
4151 be unshared and 1-part. */
4153 static void
4154 remove_duplicate_values (variable var)
4156 location_chain node, *nodep;
4158 gcc_assert (var->onepart);
4159 gcc_assert (var->n_var_parts == 1);
4160 gcc_assert (var->refcount == 1);
4162 for (nodep = &var->var_part[0].loc_chain; (node = *nodep); )
4164 if (GET_CODE (node->loc) == VALUE)
4166 if (VALUE_RECURSED_INTO (node->loc))
4168 /* Remove duplicate value node. */
4169 *nodep = node->next;
4170 pool_free (loc_chain_pool, node);
4171 continue;
4173 else
4174 VALUE_RECURSED_INTO (node->loc) = true;
4176 nodep = &node->next;
4179 for (node = var->var_part[0].loc_chain; node; node = node->next)
4180 if (GET_CODE (node->loc) == VALUE)
4182 gcc_assert (VALUE_RECURSED_INTO (node->loc));
4183 VALUE_RECURSED_INTO (node->loc) = false;
4188 /* Hash table iteration argument passed to variable_post_merge. */
4189 struct dfset_post_merge
4191 /* The new input set for the current block. */
4192 dataflow_set *set;
4193 /* Pointer to the permanent input set for the current block, or
4194 NULL. */
4195 dataflow_set **permp;
4198 /* Create values for incoming expressions associated with one-part
4199 variables that don't have value numbers for them. */
4201 static int
4202 variable_post_merge_new_vals (void **slot, void *info)
4204 struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
4205 dataflow_set *set = dfpm->set;
4206 variable var = (variable)*slot;
4207 location_chain node;
4209 if (!var->onepart || !var->n_var_parts)
4210 return 1;
4212 gcc_assert (var->n_var_parts == 1);
4214 if (dv_is_decl_p (var->dv))
4216 bool check_dupes = false;
4218 restart:
4219 for (node = var->var_part[0].loc_chain; node; node = node->next)
4221 if (GET_CODE (node->loc) == VALUE)
4222 gcc_assert (!VALUE_RECURSED_INTO (node->loc));
4223 else if (GET_CODE (node->loc) == REG)
4225 attrs att, *attp, *curp = NULL;
4227 if (var->refcount != 1)
4229 slot = unshare_variable (set, slot, var,
4230 VAR_INIT_STATUS_INITIALIZED);
4231 var = (variable)*slot;
4232 goto restart;
4235 for (attp = &set->regs[REGNO (node->loc)]; (att = *attp);
4236 attp = &att->next)
4237 if (att->offset == 0
4238 && GET_MODE (att->loc) == GET_MODE (node->loc))
4240 if (dv_is_value_p (att->dv))
4242 rtx cval = dv_as_value (att->dv);
4243 node->loc = cval;
4244 check_dupes = true;
4245 break;
4247 else if (dv_as_opaque (att->dv) == dv_as_opaque (var->dv))
4248 curp = attp;
4251 if (!curp)
4253 curp = attp;
4254 while (*curp)
4255 if ((*curp)->offset == 0
4256 && GET_MODE ((*curp)->loc) == GET_MODE (node->loc)
4257 && dv_as_opaque ((*curp)->dv) == dv_as_opaque (var->dv))
4258 break;
4259 else
4260 curp = &(*curp)->next;
4261 gcc_assert (*curp);
4264 if (!att)
4266 decl_or_value cdv;
4267 rtx cval;
4269 if (!*dfpm->permp)
4271 *dfpm->permp = XNEW (dataflow_set);
4272 dataflow_set_init (*dfpm->permp);
4275 for (att = (*dfpm->permp)->regs[REGNO (node->loc)];
4276 att; att = att->next)
4277 if (GET_MODE (att->loc) == GET_MODE (node->loc))
4279 gcc_assert (att->offset == 0
4280 && dv_is_value_p (att->dv));
4281 val_reset (set, att->dv);
4282 break;
4285 if (att)
4287 cdv = att->dv;
4288 cval = dv_as_value (cdv);
4290 else
4292 /* Create a unique value to hold this register,
4293 that ought to be found and reused in
4294 subsequent rounds. */
4295 cselib_val *v;
4296 gcc_assert (!cselib_lookup (node->loc,
4297 GET_MODE (node->loc), 0,
4298 VOIDmode));
4299 v = cselib_lookup (node->loc, GET_MODE (node->loc), 1,
4300 VOIDmode);
4301 cselib_preserve_value (v);
4302 cselib_invalidate_rtx (node->loc);
4303 cval = v->val_rtx;
4304 cdv = dv_from_value (cval);
4305 if (dump_file)
4306 fprintf (dump_file,
4307 "Created new value %u:%u for reg %i\n",
4308 v->uid, v->hash, REGNO (node->loc));
4311 var_reg_decl_set (*dfpm->permp, node->loc,
4312 VAR_INIT_STATUS_INITIALIZED,
4313 cdv, 0, NULL, INSERT);
4315 node->loc = cval;
4316 check_dupes = true;
4319 /* Remove attribute referring to the decl, which now
4320 uses the value for the register, already existing or
4321 to be added when we bring perm in. */
4322 att = *curp;
4323 *curp = att->next;
4324 pool_free (attrs_pool, att);
4328 if (check_dupes)
4329 remove_duplicate_values (var);
4332 return 1;
4335 /* Reset values in the permanent set that are not associated with the
4336 chosen expression. */
4338 static int
4339 variable_post_merge_perm_vals (void **pslot, void *info)
4341 struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
4342 dataflow_set *set = dfpm->set;
4343 variable pvar = (variable)*pslot, var;
4344 location_chain pnode;
4345 decl_or_value dv;
4346 attrs att;
4348 gcc_assert (dv_is_value_p (pvar->dv)
4349 && pvar->n_var_parts == 1);
4350 pnode = pvar->var_part[0].loc_chain;
4351 gcc_assert (pnode
4352 && !pnode->next
4353 && REG_P (pnode->loc));
4355 dv = pvar->dv;
4357 var = shared_hash_find (set->vars, dv);
4358 if (var)
4360 /* Although variable_post_merge_new_vals may have made decls
4361 non-star-canonical, values that pre-existed in canonical form
4362 remain canonical, and newly-created values reference a single
4363 REG, so they are canonical as well. Since VAR has the
4364 location list for a VALUE, using find_loc_in_1pdv for it is
4365 fine, since VALUEs don't map back to DECLs. */
4366 if (find_loc_in_1pdv (pnode->loc, var, shared_hash_htab (set->vars)))
4367 return 1;
4368 val_reset (set, dv);
4371 for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next)
4372 if (att->offset == 0
4373 && GET_MODE (att->loc) == GET_MODE (pnode->loc)
4374 && dv_is_value_p (att->dv))
4375 break;
4377 /* If there is a value associated with this register already, create
4378 an equivalence. */
4379 if (att && dv_as_value (att->dv) != dv_as_value (dv))
4381 rtx cval = dv_as_value (att->dv);
4382 set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT);
4383 set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init,
4384 NULL, INSERT);
4386 else if (!att)
4388 attrs_list_insert (&set->regs[REGNO (pnode->loc)],
4389 dv, 0, pnode->loc);
4390 variable_union (pvar, set);
4393 return 1;
4396 /* Just checking stuff and registering register attributes for
4397 now. */
4399 static void
4400 dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp)
4402 struct dfset_post_merge dfpm;
4404 dfpm.set = set;
4405 dfpm.permp = permp;
4407 htab_traverse (shared_hash_htab (set->vars), variable_post_merge_new_vals,
4408 &dfpm);
4409 if (*permp)
4410 htab_traverse (shared_hash_htab ((*permp)->vars),
4411 variable_post_merge_perm_vals, &dfpm);
4412 htab_traverse (shared_hash_htab (set->vars), canonicalize_values_star, set);
4413 htab_traverse (shared_hash_htab (set->vars), canonicalize_vars_star, set);
4416 /* Return a node whose loc is a MEM that refers to EXPR in the
4417 location list of a one-part variable or value VAR, or in that of
4418 any values recursively mentioned in the location lists. */
4420 static location_chain
4421 find_mem_expr_in_1pdv (tree expr, rtx val, htab_t vars)
4423 location_chain node;
4424 decl_or_value dv;
4425 variable var;
4426 location_chain where = NULL;
4428 if (!val)
4429 return NULL;
4431 gcc_assert (GET_CODE (val) == VALUE
4432 && !VALUE_RECURSED_INTO (val));
4434 dv = dv_from_value (val);
4435 var = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
4437 if (!var)
4438 return NULL;
4440 gcc_assert (var->onepart);
4442 if (!var->n_var_parts)
4443 return NULL;
4445 VALUE_RECURSED_INTO (val) = true;
4447 for (node = var->var_part[0].loc_chain; node; node = node->next)
4448 if (MEM_P (node->loc)
4449 && MEM_EXPR (node->loc) == expr
4450 && INT_MEM_OFFSET (node->loc) == 0)
4452 where = node;
4453 break;
4455 else if (GET_CODE (node->loc) == VALUE
4456 && !VALUE_RECURSED_INTO (node->loc)
4457 && (where = find_mem_expr_in_1pdv (expr, node->loc, vars)))
4458 break;
4460 VALUE_RECURSED_INTO (val) = false;
4462 return where;
4465 /* Return TRUE if the value of MEM may vary across a call. */
4467 static bool
4468 mem_dies_at_call (rtx mem)
4470 tree expr = MEM_EXPR (mem);
4471 tree decl;
4473 if (!expr)
4474 return true;
4476 decl = get_base_address (expr);
4478 if (!decl)
4479 return true;
4481 if (!DECL_P (decl))
4482 return true;
4484 return (may_be_aliased (decl)
4485 || (!TREE_READONLY (decl) && is_global_var (decl)));
4488 /* Remove all MEMs from the location list of a hash table entry for a
4489 one-part variable, except those whose MEM attributes map back to
4490 the variable itself, directly or within a VALUE. */
4492 static int
4493 dataflow_set_preserve_mem_locs (void **slot, void *data)
4495 dataflow_set *set = (dataflow_set *) data;
4496 variable var = (variable) *slot;
4498 if (var->onepart == ONEPART_VDECL || var->onepart == ONEPART_DEXPR)
4500 tree decl = dv_as_decl (var->dv);
4501 location_chain loc, *locp;
4502 bool changed = false;
4504 if (!var->n_var_parts)
4505 return 1;
4507 gcc_assert (var->n_var_parts == 1);
4509 if (shared_var_p (var, set->vars))
4511 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4513 /* We want to remove dying MEMs that doesn't refer to DECL. */
4514 if (GET_CODE (loc->loc) == MEM
4515 && (MEM_EXPR (loc->loc) != decl
4516 || INT_MEM_OFFSET (loc->loc) != 0)
4517 && !mem_dies_at_call (loc->loc))
4518 break;
4519 /* We want to move here MEMs that do refer to DECL. */
4520 else if (GET_CODE (loc->loc) == VALUE
4521 && find_mem_expr_in_1pdv (decl, loc->loc,
4522 shared_hash_htab (set->vars)))
4523 break;
4526 if (!loc)
4527 return 1;
4529 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4530 var = (variable)*slot;
4531 gcc_assert (var->n_var_parts == 1);
4534 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4535 loc; loc = *locp)
4537 rtx old_loc = loc->loc;
4538 if (GET_CODE (old_loc) == VALUE)
4540 location_chain mem_node
4541 = find_mem_expr_in_1pdv (decl, loc->loc,
4542 shared_hash_htab (set->vars));
4544 /* ??? This picks up only one out of multiple MEMs that
4545 refer to the same variable. Do we ever need to be
4546 concerned about dealing with more than one, or, given
4547 that they should all map to the same variable
4548 location, their addresses will have been merged and
4549 they will be regarded as equivalent? */
4550 if (mem_node)
4552 loc->loc = mem_node->loc;
4553 loc->set_src = mem_node->set_src;
4554 loc->init = MIN (loc->init, mem_node->init);
4558 if (GET_CODE (loc->loc) != MEM
4559 || (MEM_EXPR (loc->loc) == decl
4560 && INT_MEM_OFFSET (loc->loc) == 0)
4561 || !mem_dies_at_call (loc->loc))
4563 if (old_loc != loc->loc && emit_notes)
4565 if (old_loc == var->var_part[0].cur_loc)
4567 changed = true;
4568 var->var_part[0].cur_loc = NULL;
4571 locp = &loc->next;
4572 continue;
4575 if (emit_notes)
4577 if (old_loc == var->var_part[0].cur_loc)
4579 changed = true;
4580 var->var_part[0].cur_loc = NULL;
4583 *locp = loc->next;
4584 pool_free (loc_chain_pool, loc);
4587 if (!var->var_part[0].loc_chain)
4589 var->n_var_parts--;
4590 changed = true;
4592 if (changed)
4593 variable_was_changed (var, set);
4596 return 1;
4599 /* Remove all MEMs from the location list of a hash table entry for a
4600 value. */
4602 static int
4603 dataflow_set_remove_mem_locs (void **slot, void *data)
4605 dataflow_set *set = (dataflow_set *) data;
4606 variable var = (variable) *slot;
4608 if (var->onepart == ONEPART_VALUE)
4610 location_chain loc, *locp;
4611 bool changed = false;
4612 rtx cur_loc;
4614 gcc_assert (var->n_var_parts == 1);
4616 if (shared_var_p (var, set->vars))
4618 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4619 if (GET_CODE (loc->loc) == MEM
4620 && mem_dies_at_call (loc->loc))
4621 break;
4623 if (!loc)
4624 return 1;
4626 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4627 var = (variable)*slot;
4628 gcc_assert (var->n_var_parts == 1);
4631 if (VAR_LOC_1PAUX (var))
4632 cur_loc = VAR_LOC_FROM (var);
4633 else
4634 cur_loc = var->var_part[0].cur_loc;
4636 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4637 loc; loc = *locp)
4639 if (GET_CODE (loc->loc) != MEM
4640 || !mem_dies_at_call (loc->loc))
4642 locp = &loc->next;
4643 continue;
4646 *locp = loc->next;
4647 /* If we have deleted the location which was last emitted
4648 we have to emit new location so add the variable to set
4649 of changed variables. */
4650 if (cur_loc == loc->loc)
4652 changed = true;
4653 var->var_part[0].cur_loc = NULL;
4654 if (VAR_LOC_1PAUX (var))
4655 VAR_LOC_FROM (var) = NULL;
4657 pool_free (loc_chain_pool, loc);
4660 if (!var->var_part[0].loc_chain)
4662 var->n_var_parts--;
4663 changed = true;
4665 if (changed)
4666 variable_was_changed (var, set);
4669 return 1;
4672 /* Remove all variable-location information about call-clobbered
4673 registers, as well as associations between MEMs and VALUEs. */
4675 static void
4676 dataflow_set_clear_at_call (dataflow_set *set)
4678 unsigned int r;
4679 hard_reg_set_iterator hrsi;
4681 EXECUTE_IF_SET_IN_HARD_REG_SET (regs_invalidated_by_call, 0, r, hrsi)
4682 var_regno_delete (set, r);
4684 if (MAY_HAVE_DEBUG_INSNS)
4686 set->traversed_vars = set->vars;
4687 htab_traverse (shared_hash_htab (set->vars),
4688 dataflow_set_preserve_mem_locs, set);
4689 set->traversed_vars = set->vars;
4690 htab_traverse (shared_hash_htab (set->vars), dataflow_set_remove_mem_locs,
4691 set);
4692 set->traversed_vars = NULL;
4696 static bool
4697 variable_part_different_p (variable_part *vp1, variable_part *vp2)
4699 location_chain lc1, lc2;
4701 for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next)
4703 for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next)
4705 if (REG_P (lc1->loc) && REG_P (lc2->loc))
4707 if (REGNO (lc1->loc) == REGNO (lc2->loc))
4708 break;
4710 if (rtx_equal_p (lc1->loc, lc2->loc))
4711 break;
4713 if (!lc2)
4714 return true;
4716 return false;
4719 /* Return true if one-part variables VAR1 and VAR2 are different.
4720 They must be in canonical order. */
4722 static bool
4723 onepart_variable_different_p (variable var1, variable var2)
4725 location_chain lc1, lc2;
4727 if (var1 == var2)
4728 return false;
4730 gcc_assert (var1->n_var_parts == 1
4731 && var2->n_var_parts == 1);
4733 lc1 = var1->var_part[0].loc_chain;
4734 lc2 = var2->var_part[0].loc_chain;
4736 gcc_assert (lc1 && lc2);
4738 while (lc1 && lc2)
4740 if (loc_cmp (lc1->loc, lc2->loc))
4741 return true;
4742 lc1 = lc1->next;
4743 lc2 = lc2->next;
4746 return lc1 != lc2;
4749 /* Return true if variables VAR1 and VAR2 are different. */
4751 static bool
4752 variable_different_p (variable var1, variable var2)
4754 int i;
4756 if (var1 == var2)
4757 return false;
4759 if (var1->onepart != var2->onepart)
4760 return true;
4762 if (var1->n_var_parts != var2->n_var_parts)
4763 return true;
4765 if (var1->onepart && var1->n_var_parts)
4767 gcc_checking_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv)
4768 && var1->n_var_parts == 1);
4769 /* One-part values have locations in a canonical order. */
4770 return onepart_variable_different_p (var1, var2);
4773 for (i = 0; i < var1->n_var_parts; i++)
4775 if (VAR_PART_OFFSET (var1, i) != VAR_PART_OFFSET (var2, i))
4776 return true;
4777 if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i]))
4778 return true;
4779 if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i]))
4780 return true;
4782 return false;
4785 /* Return true if dataflow sets OLD_SET and NEW_SET differ. */
4787 static bool
4788 dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
4790 htab_iterator hi;
4791 variable var1;
4793 if (old_set->vars == new_set->vars)
4794 return false;
4796 if (htab_elements (shared_hash_htab (old_set->vars))
4797 != htab_elements (shared_hash_htab (new_set->vars)))
4798 return true;
4800 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (old_set->vars), var1, variable, hi)
4802 htab_t htab = shared_hash_htab (new_set->vars);
4803 variable var2 = (variable) htab_find_with_hash (htab, var1->dv,
4804 dv_htab_hash (var1->dv));
4805 if (!var2)
4807 if (dump_file && (dump_flags & TDF_DETAILS))
4809 fprintf (dump_file, "dataflow difference found: removal of:\n");
4810 dump_var (var1);
4812 return true;
4815 if (variable_different_p (var1, var2))
4817 if (dump_file && (dump_flags & TDF_DETAILS))
4819 fprintf (dump_file, "dataflow difference found: "
4820 "old and new follow:\n");
4821 dump_var (var1);
4822 dump_var (var2);
4824 return true;
4828 /* No need to traverse the second hashtab, if both have the same number
4829 of elements and the second one had all entries found in the first one,
4830 then it can't have any extra entries. */
4831 return false;
4834 /* Free the contents of dataflow set SET. */
4836 static void
4837 dataflow_set_destroy (dataflow_set *set)
4839 int i;
4841 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4842 attrs_list_clear (&set->regs[i]);
4844 shared_hash_destroy (set->vars);
4845 set->vars = NULL;
4848 /* Return true if RTL X contains a SYMBOL_REF. */
4850 static bool
4851 contains_symbol_ref (rtx x)
4853 const char *fmt;
4854 RTX_CODE code;
4855 int i;
4857 if (!x)
4858 return false;
4860 code = GET_CODE (x);
4861 if (code == SYMBOL_REF)
4862 return true;
4864 fmt = GET_RTX_FORMAT (code);
4865 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4867 if (fmt[i] == 'e')
4869 if (contains_symbol_ref (XEXP (x, i)))
4870 return true;
4872 else if (fmt[i] == 'E')
4874 int j;
4875 for (j = 0; j < XVECLEN (x, i); j++)
4876 if (contains_symbol_ref (XVECEXP (x, i, j)))
4877 return true;
4881 return false;
4884 /* Shall EXPR be tracked? */
4886 static bool
4887 track_expr_p (tree expr, bool need_rtl)
4889 rtx decl_rtl;
4890 tree realdecl;
4892 if (TREE_CODE (expr) == DEBUG_EXPR_DECL)
4893 return DECL_RTL_SET_P (expr);
4895 /* If EXPR is not a parameter or a variable do not track it. */
4896 if (TREE_CODE (expr) != VAR_DECL && TREE_CODE (expr) != PARM_DECL)
4897 return 0;
4899 /* It also must have a name... */
4900 if (!DECL_NAME (expr) && need_rtl)
4901 return 0;
4903 /* ... and a RTL assigned to it. */
4904 decl_rtl = DECL_RTL_IF_SET (expr);
4905 if (!decl_rtl && need_rtl)
4906 return 0;
4908 /* If this expression is really a debug alias of some other declaration, we
4909 don't need to track this expression if the ultimate declaration is
4910 ignored. */
4911 realdecl = expr;
4912 if (DECL_DEBUG_EXPR_IS_FROM (realdecl))
4914 realdecl = DECL_DEBUG_EXPR (realdecl);
4915 if (realdecl == NULL_TREE)
4916 realdecl = expr;
4917 else if (!DECL_P (realdecl))
4919 if (handled_component_p (realdecl)
4920 || (TREE_CODE (realdecl) == MEM_REF
4921 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
4923 HOST_WIDE_INT bitsize, bitpos, maxsize;
4924 tree innerdecl
4925 = get_ref_base_and_extent (realdecl, &bitpos, &bitsize,
4926 &maxsize);
4927 if (!DECL_P (innerdecl)
4928 || DECL_IGNORED_P (innerdecl)
4929 || TREE_STATIC (innerdecl)
4930 || bitsize <= 0
4931 || bitpos + bitsize > 256
4932 || bitsize != maxsize)
4933 return 0;
4934 else
4935 realdecl = expr;
4937 else
4938 return 0;
4942 /* Do not track EXPR if REALDECL it should be ignored for debugging
4943 purposes. */
4944 if (DECL_IGNORED_P (realdecl))
4945 return 0;
4947 /* Do not track global variables until we are able to emit correct location
4948 list for them. */
4949 if (TREE_STATIC (realdecl))
4950 return 0;
4952 /* When the EXPR is a DECL for alias of some variable (see example)
4953 the TREE_STATIC flag is not used. Disable tracking all DECLs whose
4954 DECL_RTL contains SYMBOL_REF.
4956 Example:
4957 extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
4958 char **_dl_argv;
4960 if (decl_rtl && MEM_P (decl_rtl)
4961 && contains_symbol_ref (XEXP (decl_rtl, 0)))
4962 return 0;
4964 /* If RTX is a memory it should not be very large (because it would be
4965 an array or struct). */
4966 if (decl_rtl && MEM_P (decl_rtl))
4968 /* Do not track structures and arrays. */
4969 if (GET_MODE (decl_rtl) == BLKmode
4970 || AGGREGATE_TYPE_P (TREE_TYPE (realdecl)))
4971 return 0;
4972 if (MEM_SIZE_KNOWN_P (decl_rtl)
4973 && MEM_SIZE (decl_rtl) > MAX_VAR_PARTS)
4974 return 0;
4977 DECL_CHANGED (expr) = 0;
4978 DECL_CHANGED (realdecl) = 0;
4979 return 1;
4982 /* Determine whether a given LOC refers to the same variable part as
4983 EXPR+OFFSET. */
4985 static bool
4986 same_variable_part_p (rtx loc, tree expr, HOST_WIDE_INT offset)
4988 tree expr2;
4989 HOST_WIDE_INT offset2;
4991 if (! DECL_P (expr))
4992 return false;
4994 if (REG_P (loc))
4996 expr2 = REG_EXPR (loc);
4997 offset2 = REG_OFFSET (loc);
4999 else if (MEM_P (loc))
5001 expr2 = MEM_EXPR (loc);
5002 offset2 = INT_MEM_OFFSET (loc);
5004 else
5005 return false;
5007 if (! expr2 || ! DECL_P (expr2))
5008 return false;
5010 expr = var_debug_decl (expr);
5011 expr2 = var_debug_decl (expr2);
5013 return (expr == expr2 && offset == offset2);
5016 /* LOC is a REG or MEM that we would like to track if possible.
5017 If EXPR is null, we don't know what expression LOC refers to,
5018 otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
5019 LOC is an lvalue register.
5021 Return true if EXPR is nonnull and if LOC, or some lowpart of it,
5022 is something we can track. When returning true, store the mode of
5023 the lowpart we can track in *MODE_OUT (if nonnull) and its offset
5024 from EXPR in *OFFSET_OUT (if nonnull). */
5026 static bool
5027 track_loc_p (rtx loc, tree expr, HOST_WIDE_INT offset, bool store_reg_p,
5028 enum machine_mode *mode_out, HOST_WIDE_INT *offset_out)
5030 enum machine_mode mode;
5032 if (expr == NULL || !track_expr_p (expr, true))
5033 return false;
5035 /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
5036 whole subreg, but only the old inner part is really relevant. */
5037 mode = GET_MODE (loc);
5038 if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc)))
5040 enum machine_mode pseudo_mode;
5042 pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc));
5043 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (pseudo_mode))
5045 offset += byte_lowpart_offset (pseudo_mode, mode);
5046 mode = pseudo_mode;
5050 /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
5051 Do the same if we are storing to a register and EXPR occupies
5052 the whole of register LOC; in that case, the whole of EXPR is
5053 being changed. We exclude complex modes from the second case
5054 because the real and imaginary parts are represented as separate
5055 pseudo registers, even if the whole complex value fits into one
5056 hard register. */
5057 if ((GET_MODE_SIZE (mode) > GET_MODE_SIZE (DECL_MODE (expr))
5058 || (store_reg_p
5059 && !COMPLEX_MODE_P (DECL_MODE (expr))
5060 && hard_regno_nregs[REGNO (loc)][DECL_MODE (expr)] == 1))
5061 && offset + byte_lowpart_offset (DECL_MODE (expr), mode) == 0)
5063 mode = DECL_MODE (expr);
5064 offset = 0;
5067 if (offset < 0 || offset >= MAX_VAR_PARTS)
5068 return false;
5070 if (mode_out)
5071 *mode_out = mode;
5072 if (offset_out)
5073 *offset_out = offset;
5074 return true;
5077 /* Return the MODE lowpart of LOC, or null if LOC is not something we
5078 want to track. When returning nonnull, make sure that the attributes
5079 on the returned value are updated. */
5081 static rtx
5082 var_lowpart (enum machine_mode mode, rtx loc)
5084 unsigned int offset, reg_offset, regno;
5086 if (GET_MODE (loc) == mode)
5087 return loc;
5089 if (!REG_P (loc) && !MEM_P (loc))
5090 return NULL;
5092 offset = byte_lowpart_offset (mode, GET_MODE (loc));
5094 if (MEM_P (loc))
5095 return adjust_address_nv (loc, mode, offset);
5097 reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc));
5098 regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc),
5099 reg_offset, mode);
5100 return gen_rtx_REG_offset (loc, mode, regno, offset);
5103 /* Carry information about uses and stores while walking rtx. */
5105 struct count_use_info
5107 /* The insn where the RTX is. */
5108 rtx insn;
5110 /* The basic block where insn is. */
5111 basic_block bb;
5113 /* The array of n_sets sets in the insn, as determined by cselib. */
5114 struct cselib_set *sets;
5115 int n_sets;
5117 /* True if we're counting stores, false otherwise. */
5118 bool store_p;
5121 /* Find a VALUE corresponding to X. */
5123 static inline cselib_val *
5124 find_use_val (rtx x, enum machine_mode mode, struct count_use_info *cui)
5126 int i;
5128 if (cui->sets)
5130 /* This is called after uses are set up and before stores are
5131 processed by cselib, so it's safe to look up srcs, but not
5132 dsts. So we look up expressions that appear in srcs or in
5133 dest expressions, but we search the sets array for dests of
5134 stores. */
5135 if (cui->store_p)
5137 /* Some targets represent memset and memcpy patterns
5138 by (set (mem:BLK ...) (reg:[QHSD]I ...)) or
5139 (set (mem:BLK ...) (const_int ...)) or
5140 (set (mem:BLK ...) (mem:BLK ...)). Don't return anything
5141 in that case, otherwise we end up with mode mismatches. */
5142 if (mode == BLKmode && MEM_P (x))
5143 return NULL;
5144 for (i = 0; i < cui->n_sets; i++)
5145 if (cui->sets[i].dest == x)
5146 return cui->sets[i].src_elt;
5148 else
5149 return cselib_lookup (x, mode, 0, VOIDmode);
5152 return NULL;
5155 /* Replace all registers and addresses in an expression with VALUE
5156 expressions that map back to them, unless the expression is a
5157 register. If no mapping is or can be performed, returns NULL. */
5159 static rtx
5160 replace_expr_with_values (rtx loc)
5162 if (REG_P (loc) || GET_CODE (loc) == ENTRY_VALUE)
5163 return NULL;
5164 else if (MEM_P (loc))
5166 cselib_val *addr = cselib_lookup (XEXP (loc, 0),
5167 get_address_mode (loc), 0,
5168 GET_MODE (loc));
5169 if (addr)
5170 return replace_equiv_address_nv (loc, addr->val_rtx);
5171 else
5172 return NULL;
5174 else
5175 return cselib_subst_to_values (loc, VOIDmode);
5178 /* Return true if *X is a DEBUG_EXPR. Usable as an argument to
5179 for_each_rtx to tell whether there are any DEBUG_EXPRs within
5180 RTX. */
5182 static int
5183 rtx_debug_expr_p (rtx *x, void *data ATTRIBUTE_UNUSED)
5185 rtx loc = *x;
5187 return GET_CODE (loc) == DEBUG_EXPR;
5190 /* Determine what kind of micro operation to choose for a USE. Return
5191 MO_CLOBBER if no micro operation is to be generated. */
5193 static enum micro_operation_type
5194 use_type (rtx loc, struct count_use_info *cui, enum machine_mode *modep)
5196 tree expr;
5198 if (cui && cui->sets)
5200 if (GET_CODE (loc) == VAR_LOCATION)
5202 if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false))
5204 rtx ploc = PAT_VAR_LOCATION_LOC (loc);
5205 if (! VAR_LOC_UNKNOWN_P (ploc))
5207 cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1,
5208 VOIDmode);
5210 /* ??? flag_float_store and volatile mems are never
5211 given values, but we could in theory use them for
5212 locations. */
5213 gcc_assert (val || 1);
5215 return MO_VAL_LOC;
5217 else
5218 return MO_CLOBBER;
5221 if (REG_P (loc) || MEM_P (loc))
5223 if (modep)
5224 *modep = GET_MODE (loc);
5225 if (cui->store_p)
5227 if (REG_P (loc)
5228 || (find_use_val (loc, GET_MODE (loc), cui)
5229 && cselib_lookup (XEXP (loc, 0),
5230 get_address_mode (loc), 0,
5231 GET_MODE (loc))))
5232 return MO_VAL_SET;
5234 else
5236 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5238 if (val && !cselib_preserved_value_p (val))
5239 return MO_VAL_USE;
5244 if (REG_P (loc))
5246 gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER);
5248 if (loc == cfa_base_rtx)
5249 return MO_CLOBBER;
5250 expr = REG_EXPR (loc);
5252 if (!expr)
5253 return MO_USE_NO_VAR;
5254 else if (target_for_debug_bind (var_debug_decl (expr)))
5255 return MO_CLOBBER;
5256 else if (track_loc_p (loc, expr, REG_OFFSET (loc),
5257 false, modep, NULL))
5258 return MO_USE;
5259 else
5260 return MO_USE_NO_VAR;
5262 else if (MEM_P (loc))
5264 expr = MEM_EXPR (loc);
5266 if (!expr)
5267 return MO_CLOBBER;
5268 else if (target_for_debug_bind (var_debug_decl (expr)))
5269 return MO_CLOBBER;
5270 else if (track_loc_p (loc, expr, INT_MEM_OFFSET (loc),
5271 false, modep, NULL)
5272 /* Multi-part variables shouldn't refer to one-part
5273 variable names such as VALUEs (never happens) or
5274 DEBUG_EXPRs (only happens in the presence of debug
5275 insns). */
5276 && (!MAY_HAVE_DEBUG_INSNS
5277 || !for_each_rtx (&XEXP (loc, 0), rtx_debug_expr_p, NULL)))
5278 return MO_USE;
5279 else
5280 return MO_CLOBBER;
5283 return MO_CLOBBER;
5286 /* Log to OUT information about micro-operation MOPT involving X in
5287 INSN of BB. */
5289 static inline void
5290 log_op_type (rtx x, basic_block bb, rtx insn,
5291 enum micro_operation_type mopt, FILE *out)
5293 fprintf (out, "bb %i op %i insn %i %s ",
5294 bb->index, VTI (bb)->mos.length (),
5295 INSN_UID (insn), micro_operation_type_name[mopt]);
5296 print_inline_rtx (out, x, 2);
5297 fputc ('\n', out);
5300 /* Tell whether the CONCAT used to holds a VALUE and its location
5301 needs value resolution, i.e., an attempt of mapping the location
5302 back to other incoming values. */
5303 #define VAL_NEEDS_RESOLUTION(x) \
5304 (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
5305 /* Whether the location in the CONCAT is a tracked expression, that
5306 should also be handled like a MO_USE. */
5307 #define VAL_HOLDS_TRACK_EXPR(x) \
5308 (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
5309 /* Whether the location in the CONCAT should be handled like a MO_COPY
5310 as well. */
5311 #define VAL_EXPR_IS_COPIED(x) \
5312 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
5313 /* Whether the location in the CONCAT should be handled like a
5314 MO_CLOBBER as well. */
5315 #define VAL_EXPR_IS_CLOBBERED(x) \
5316 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
5318 /* All preserved VALUEs. */
5319 static vec<rtx> preserved_values;
5321 /* Ensure VAL is preserved and remember it in a vector for vt_emit_notes. */
5323 static void
5324 preserve_value (cselib_val *val)
5326 cselib_preserve_value (val);
5327 preserved_values.safe_push (val->val_rtx);
5330 /* Helper function for MO_VAL_LOC handling. Return non-zero if
5331 any rtxes not suitable for CONST use not replaced by VALUEs
5332 are discovered. */
5334 static int
5335 non_suitable_const (rtx *x, void *data ATTRIBUTE_UNUSED)
5337 if (*x == NULL_RTX)
5338 return 0;
5340 switch (GET_CODE (*x))
5342 case REG:
5343 case DEBUG_EXPR:
5344 case PC:
5345 case SCRATCH:
5346 case CC0:
5347 case ASM_INPUT:
5348 case ASM_OPERANDS:
5349 return 1;
5350 case MEM:
5351 return !MEM_READONLY_P (*x);
5352 default:
5353 return 0;
5357 /* Add uses (register and memory references) LOC which will be tracked
5358 to VTI (bb)->mos. INSN is instruction which the LOC is part of. */
5360 static int
5361 add_uses (rtx *ploc, void *data)
5363 rtx loc = *ploc;
5364 enum machine_mode mode = VOIDmode;
5365 struct count_use_info *cui = (struct count_use_info *)data;
5366 enum micro_operation_type type = use_type (loc, cui, &mode);
5368 if (type != MO_CLOBBER)
5370 basic_block bb = cui->bb;
5371 micro_operation mo;
5373 mo.type = type;
5374 mo.u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc;
5375 mo.insn = cui->insn;
5377 if (type == MO_VAL_LOC)
5379 rtx oloc = loc;
5380 rtx vloc = PAT_VAR_LOCATION_LOC (oloc);
5381 cselib_val *val;
5383 gcc_assert (cui->sets);
5385 if (MEM_P (vloc)
5386 && !REG_P (XEXP (vloc, 0))
5387 && !MEM_P (XEXP (vloc, 0)))
5389 rtx mloc = vloc;
5390 enum machine_mode address_mode = get_address_mode (mloc);
5391 cselib_val *val
5392 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5393 GET_MODE (mloc));
5395 if (val && !cselib_preserved_value_p (val))
5396 preserve_value (val);
5399 if (CONSTANT_P (vloc)
5400 && (GET_CODE (vloc) != CONST
5401 || for_each_rtx (&vloc, non_suitable_const, NULL)))
5402 /* For constants don't look up any value. */;
5403 else if (!VAR_LOC_UNKNOWN_P (vloc) && !unsuitable_loc (vloc)
5404 && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
5406 enum machine_mode mode2;
5407 enum micro_operation_type type2;
5408 rtx nloc = NULL;
5409 bool resolvable = REG_P (vloc) || MEM_P (vloc);
5411 if (resolvable)
5412 nloc = replace_expr_with_values (vloc);
5414 if (nloc)
5416 oloc = shallow_copy_rtx (oloc);
5417 PAT_VAR_LOCATION_LOC (oloc) = nloc;
5420 oloc = gen_rtx_CONCAT (mode, val->val_rtx, oloc);
5422 type2 = use_type (vloc, 0, &mode2);
5424 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5425 || type2 == MO_CLOBBER);
5427 if (type2 == MO_CLOBBER
5428 && !cselib_preserved_value_p (val))
5430 VAL_NEEDS_RESOLUTION (oloc) = resolvable;
5431 preserve_value (val);
5434 else if (!VAR_LOC_UNKNOWN_P (vloc))
5436 oloc = shallow_copy_rtx (oloc);
5437 PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC ();
5440 mo.u.loc = oloc;
5442 else if (type == MO_VAL_USE)
5444 enum machine_mode mode2 = VOIDmode;
5445 enum micro_operation_type type2;
5446 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5447 rtx vloc, oloc = loc, nloc;
5449 gcc_assert (cui->sets);
5451 if (MEM_P (oloc)
5452 && !REG_P (XEXP (oloc, 0))
5453 && !MEM_P (XEXP (oloc, 0)))
5455 rtx mloc = oloc;
5456 enum machine_mode address_mode = get_address_mode (mloc);
5457 cselib_val *val
5458 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5459 GET_MODE (mloc));
5461 if (val && !cselib_preserved_value_p (val))
5462 preserve_value (val);
5465 type2 = use_type (loc, 0, &mode2);
5467 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5468 || type2 == MO_CLOBBER);
5470 if (type2 == MO_USE)
5471 vloc = var_lowpart (mode2, loc);
5472 else
5473 vloc = oloc;
5475 /* The loc of a MO_VAL_USE may have two forms:
5477 (concat val src): val is at src, a value-based
5478 representation.
5480 (concat (concat val use) src): same as above, with use as
5481 the MO_USE tracked value, if it differs from src.
5485 gcc_checking_assert (REG_P (loc) || MEM_P (loc));
5486 nloc = replace_expr_with_values (loc);
5487 if (!nloc)
5488 nloc = oloc;
5490 if (vloc != nloc)
5491 oloc = gen_rtx_CONCAT (mode2, val->val_rtx, vloc);
5492 else
5493 oloc = val->val_rtx;
5495 mo.u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
5497 if (type2 == MO_USE)
5498 VAL_HOLDS_TRACK_EXPR (mo.u.loc) = 1;
5499 if (!cselib_preserved_value_p (val))
5501 VAL_NEEDS_RESOLUTION (mo.u.loc) = 1;
5502 preserve_value (val);
5505 else
5506 gcc_assert (type == MO_USE || type == MO_USE_NO_VAR);
5508 if (dump_file && (dump_flags & TDF_DETAILS))
5509 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
5510 VTI (bb)->mos.safe_push (mo);
5513 return 0;
5516 /* Helper function for finding all uses of REG/MEM in X in insn INSN. */
5518 static void
5519 add_uses_1 (rtx *x, void *cui)
5521 for_each_rtx (x, add_uses, cui);
5524 /* This is the value used during expansion of locations. We want it
5525 to be unbounded, so that variables expanded deep in a recursion
5526 nest are fully evaluated, so that their values are cached
5527 correctly. We avoid recursion cycles through other means, and we
5528 don't unshare RTL, so excess complexity is not a problem. */
5529 #define EXPR_DEPTH (INT_MAX)
5530 /* We use this to keep too-complex expressions from being emitted as
5531 location notes, and then to debug information. Users can trade
5532 compile time for ridiculously complex expressions, although they're
5533 seldom useful, and they may often have to be discarded as not
5534 representable anyway. */
5535 #define EXPR_USE_DEPTH (PARAM_VALUE (PARAM_MAX_VARTRACK_EXPR_DEPTH))
5537 /* Attempt to reverse the EXPR operation in the debug info and record
5538 it in the cselib table. Say for reg1 = reg2 + 6 even when reg2 is
5539 no longer live we can express its value as VAL - 6. */
5541 static void
5542 reverse_op (rtx val, const_rtx expr, rtx insn)
5544 rtx src, arg, ret;
5545 cselib_val *v;
5546 struct elt_loc_list *l;
5547 enum rtx_code code;
5549 if (GET_CODE (expr) != SET)
5550 return;
5552 if (!REG_P (SET_DEST (expr)) || GET_MODE (val) != GET_MODE (SET_DEST (expr)))
5553 return;
5555 src = SET_SRC (expr);
5556 switch (GET_CODE (src))
5558 case PLUS:
5559 case MINUS:
5560 case XOR:
5561 case NOT:
5562 case NEG:
5563 if (!REG_P (XEXP (src, 0)))
5564 return;
5565 break;
5566 case SIGN_EXTEND:
5567 case ZERO_EXTEND:
5568 if (!REG_P (XEXP (src, 0)) && !MEM_P (XEXP (src, 0)))
5569 return;
5570 break;
5571 default:
5572 return;
5575 if (!SCALAR_INT_MODE_P (GET_MODE (src)) || XEXP (src, 0) == cfa_base_rtx)
5576 return;
5578 v = cselib_lookup (XEXP (src, 0), GET_MODE (XEXP (src, 0)), 0, VOIDmode);
5579 if (!v || !cselib_preserved_value_p (v))
5580 return;
5582 /* Use canonical V to avoid creating multiple redundant expressions
5583 for different VALUES equivalent to V. */
5584 v = canonical_cselib_val (v);
5586 /* Adding a reverse op isn't useful if V already has an always valid
5587 location. Ignore ENTRY_VALUE, while it is always constant, we should
5588 prefer non-ENTRY_VALUE locations whenever possible. */
5589 for (l = v->locs; l; l = l->next)
5590 if (CONSTANT_P (l->loc)
5591 && (GET_CODE (l->loc) != CONST || !references_value_p (l->loc, 0)))
5592 return;
5594 switch (GET_CODE (src))
5596 case NOT:
5597 case NEG:
5598 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5599 return;
5600 ret = gen_rtx_fmt_e (GET_CODE (src), GET_MODE (val), val);
5601 break;
5602 case SIGN_EXTEND:
5603 case ZERO_EXTEND:
5604 ret = gen_lowpart_SUBREG (GET_MODE (v->val_rtx), val);
5605 break;
5606 case XOR:
5607 code = XOR;
5608 goto binary;
5609 case PLUS:
5610 code = MINUS;
5611 goto binary;
5612 case MINUS:
5613 code = PLUS;
5614 goto binary;
5615 binary:
5616 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5617 return;
5618 arg = XEXP (src, 1);
5619 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5621 arg = cselib_expand_value_rtx (arg, scratch_regs, 5);
5622 if (arg == NULL_RTX)
5623 return;
5624 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5625 return;
5627 ret = simplify_gen_binary (code, GET_MODE (val), val, arg);
5628 if (ret == val)
5629 /* Ensure ret isn't VALUE itself (which can happen e.g. for
5630 (plus (reg1) (reg2)) when reg2 is known to be 0), as that
5631 breaks a lot of routines during var-tracking. */
5632 ret = gen_rtx_fmt_ee (PLUS, GET_MODE (val), val, const0_rtx);
5633 break;
5634 default:
5635 gcc_unreachable ();
5638 cselib_add_permanent_equiv (v, ret, insn);
5641 /* Add stores (register and memory references) LOC which will be tracked
5642 to VTI (bb)->mos. EXPR is the RTL expression containing the store.
5643 CUIP->insn is instruction which the LOC is part of. */
5645 static void
5646 add_stores (rtx loc, const_rtx expr, void *cuip)
5648 enum machine_mode mode = VOIDmode, mode2;
5649 struct count_use_info *cui = (struct count_use_info *)cuip;
5650 basic_block bb = cui->bb;
5651 micro_operation mo;
5652 rtx oloc = loc, nloc, src = NULL;
5653 enum micro_operation_type type = use_type (loc, cui, &mode);
5654 bool track_p = false;
5655 cselib_val *v;
5656 bool resolve, preserve;
5658 if (type == MO_CLOBBER)
5659 return;
5661 mode2 = mode;
5663 if (REG_P (loc))
5665 gcc_assert (loc != cfa_base_rtx);
5666 if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
5667 || !(track_p = use_type (loc, NULL, &mode2) == MO_USE)
5668 || GET_CODE (expr) == CLOBBER)
5670 mo.type = MO_CLOBBER;
5671 mo.u.loc = loc;
5672 if (GET_CODE (expr) == SET
5673 && SET_DEST (expr) == loc
5674 && !unsuitable_loc (SET_SRC (expr))
5675 && find_use_val (loc, mode, cui))
5677 gcc_checking_assert (type == MO_VAL_SET);
5678 mo.u.loc = gen_rtx_SET (VOIDmode, loc, SET_SRC (expr));
5681 else
5683 if (GET_CODE (expr) == SET
5684 && SET_DEST (expr) == loc
5685 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
5686 src = var_lowpart (mode2, SET_SRC (expr));
5687 loc = var_lowpart (mode2, loc);
5689 if (src == NULL)
5691 mo.type = MO_SET;
5692 mo.u.loc = loc;
5694 else
5696 rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
5697 if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
5698 mo.type = MO_COPY;
5699 else
5700 mo.type = MO_SET;
5701 mo.u.loc = xexpr;
5704 mo.insn = cui->insn;
5706 else if (MEM_P (loc)
5707 && ((track_p = use_type (loc, NULL, &mode2) == MO_USE)
5708 || cui->sets))
5710 if (MEM_P (loc) && type == MO_VAL_SET
5711 && !REG_P (XEXP (loc, 0))
5712 && !MEM_P (XEXP (loc, 0)))
5714 rtx mloc = loc;
5715 enum machine_mode address_mode = get_address_mode (mloc);
5716 cselib_val *val = cselib_lookup (XEXP (mloc, 0),
5717 address_mode, 0,
5718 GET_MODE (mloc));
5720 if (val && !cselib_preserved_value_p (val))
5721 preserve_value (val);
5724 if (GET_CODE (expr) == CLOBBER || !track_p)
5726 mo.type = MO_CLOBBER;
5727 mo.u.loc = track_p ? var_lowpart (mode2, loc) : loc;
5729 else
5731 if (GET_CODE (expr) == SET
5732 && SET_DEST (expr) == loc
5733 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
5734 src = var_lowpart (mode2, SET_SRC (expr));
5735 loc = var_lowpart (mode2, loc);
5737 if (src == NULL)
5739 mo.type = MO_SET;
5740 mo.u.loc = loc;
5742 else
5744 rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
5745 if (same_variable_part_p (SET_SRC (xexpr),
5746 MEM_EXPR (loc),
5747 INT_MEM_OFFSET (loc)))
5748 mo.type = MO_COPY;
5749 else
5750 mo.type = MO_SET;
5751 mo.u.loc = xexpr;
5754 mo.insn = cui->insn;
5756 else
5757 return;
5759 if (type != MO_VAL_SET)
5760 goto log_and_return;
5762 v = find_use_val (oloc, mode, cui);
5764 if (!v)
5765 goto log_and_return;
5767 resolve = preserve = !cselib_preserved_value_p (v);
5769 if (loc == stack_pointer_rtx
5770 && hard_frame_pointer_adjustment != -1
5771 && preserve)
5772 cselib_set_value_sp_based (v);
5774 nloc = replace_expr_with_values (oloc);
5775 if (nloc)
5776 oloc = nloc;
5778 if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC)
5780 cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0, VOIDmode);
5782 gcc_assert (oval != v);
5783 gcc_assert (REG_P (oloc) || MEM_P (oloc));
5785 if (oval && !cselib_preserved_value_p (oval))
5787 micro_operation moa;
5789 preserve_value (oval);
5791 moa.type = MO_VAL_USE;
5792 moa.u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc);
5793 VAL_NEEDS_RESOLUTION (moa.u.loc) = 1;
5794 moa.insn = cui->insn;
5796 if (dump_file && (dump_flags & TDF_DETAILS))
5797 log_op_type (moa.u.loc, cui->bb, cui->insn,
5798 moa.type, dump_file);
5799 VTI (bb)->mos.safe_push (moa);
5802 resolve = false;
5804 else if (resolve && GET_CODE (mo.u.loc) == SET)
5806 if (REG_P (SET_SRC (expr)) || MEM_P (SET_SRC (expr)))
5807 nloc = replace_expr_with_values (SET_SRC (expr));
5808 else
5809 nloc = NULL_RTX;
5811 /* Avoid the mode mismatch between oexpr and expr. */
5812 if (!nloc && mode != mode2)
5814 nloc = SET_SRC (expr);
5815 gcc_assert (oloc == SET_DEST (expr));
5818 if (nloc && nloc != SET_SRC (mo.u.loc))
5819 oloc = gen_rtx_SET (GET_MODE (mo.u.loc), oloc, nloc);
5820 else
5822 if (oloc == SET_DEST (mo.u.loc))
5823 /* No point in duplicating. */
5824 oloc = mo.u.loc;
5825 if (!REG_P (SET_SRC (mo.u.loc)))
5826 resolve = false;
5829 else if (!resolve)
5831 if (GET_CODE (mo.u.loc) == SET
5832 && oloc == SET_DEST (mo.u.loc))
5833 /* No point in duplicating. */
5834 oloc = mo.u.loc;
5836 else
5837 resolve = false;
5839 loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc);
5841 if (mo.u.loc != oloc)
5842 loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, mo.u.loc);
5844 /* The loc of a MO_VAL_SET may have various forms:
5846 (concat val dst): dst now holds val
5848 (concat val (set dst src)): dst now holds val, copied from src
5850 (concat (concat val dstv) dst): dst now holds val; dstv is dst
5851 after replacing mems and non-top-level regs with values.
5853 (concat (concat val dstv) (set dst src)): dst now holds val,
5854 copied from src. dstv is a value-based representation of dst, if
5855 it differs from dst. If resolution is needed, src is a REG, and
5856 its mode is the same as that of val.
5858 (concat (concat val (set dstv srcv)) (set dst src)): src
5859 copied to dst, holding val. dstv and srcv are value-based
5860 representations of dst and src, respectively.
5864 if (GET_CODE (PATTERN (cui->insn)) != COND_EXEC)
5865 reverse_op (v->val_rtx, expr, cui->insn);
5867 mo.u.loc = loc;
5869 if (track_p)
5870 VAL_HOLDS_TRACK_EXPR (loc) = 1;
5871 if (preserve)
5873 VAL_NEEDS_RESOLUTION (loc) = resolve;
5874 preserve_value (v);
5876 if (mo.type == MO_CLOBBER)
5877 VAL_EXPR_IS_CLOBBERED (loc) = 1;
5878 if (mo.type == MO_COPY)
5879 VAL_EXPR_IS_COPIED (loc) = 1;
5881 mo.type = MO_VAL_SET;
5883 log_and_return:
5884 if (dump_file && (dump_flags & TDF_DETAILS))
5885 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
5886 VTI (bb)->mos.safe_push (mo);
5889 /* Arguments to the call. */
5890 static rtx call_arguments;
5892 /* Compute call_arguments. */
5894 static void
5895 prepare_call_arguments (basic_block bb, rtx insn)
5897 rtx link, x, call;
5898 rtx prev, cur, next;
5899 rtx this_arg = NULL_RTX;
5900 tree type = NULL_TREE, t, fndecl = NULL_TREE;
5901 tree obj_type_ref = NULL_TREE;
5902 CUMULATIVE_ARGS args_so_far_v;
5903 cumulative_args_t args_so_far;
5905 memset (&args_so_far_v, 0, sizeof (args_so_far_v));
5906 args_so_far = pack_cumulative_args (&args_so_far_v);
5907 call = get_call_rtx_from (insn);
5908 if (call)
5910 if (GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
5912 rtx symbol = XEXP (XEXP (call, 0), 0);
5913 if (SYMBOL_REF_DECL (symbol))
5914 fndecl = SYMBOL_REF_DECL (symbol);
5916 if (fndecl == NULL_TREE)
5917 fndecl = MEM_EXPR (XEXP (call, 0));
5918 if (fndecl
5919 && TREE_CODE (TREE_TYPE (fndecl)) != FUNCTION_TYPE
5920 && TREE_CODE (TREE_TYPE (fndecl)) != METHOD_TYPE)
5921 fndecl = NULL_TREE;
5922 if (fndecl && TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
5923 type = TREE_TYPE (fndecl);
5924 if (fndecl && TREE_CODE (fndecl) != FUNCTION_DECL)
5926 if (TREE_CODE (fndecl) == INDIRECT_REF
5927 && TREE_CODE (TREE_OPERAND (fndecl, 0)) == OBJ_TYPE_REF)
5928 obj_type_ref = TREE_OPERAND (fndecl, 0);
5929 fndecl = NULL_TREE;
5931 if (type)
5933 for (t = TYPE_ARG_TYPES (type); t && t != void_list_node;
5934 t = TREE_CHAIN (t))
5935 if (TREE_CODE (TREE_VALUE (t)) == REFERENCE_TYPE
5936 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_VALUE (t))))
5937 break;
5938 if ((t == NULL || t == void_list_node) && obj_type_ref == NULL_TREE)
5939 type = NULL;
5940 else
5942 int nargs ATTRIBUTE_UNUSED = list_length (TYPE_ARG_TYPES (type));
5943 link = CALL_INSN_FUNCTION_USAGE (insn);
5944 #ifndef PCC_STATIC_STRUCT_RETURN
5945 if (aggregate_value_p (TREE_TYPE (type), type)
5946 && targetm.calls.struct_value_rtx (type, 0) == 0)
5948 tree struct_addr = build_pointer_type (TREE_TYPE (type));
5949 enum machine_mode mode = TYPE_MODE (struct_addr);
5950 rtx reg;
5951 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
5952 nargs + 1);
5953 reg = targetm.calls.function_arg (args_so_far, mode,
5954 struct_addr, true);
5955 targetm.calls.function_arg_advance (args_so_far, mode,
5956 struct_addr, true);
5957 if (reg == NULL_RTX)
5959 for (; link; link = XEXP (link, 1))
5960 if (GET_CODE (XEXP (link, 0)) == USE
5961 && MEM_P (XEXP (XEXP (link, 0), 0)))
5963 link = XEXP (link, 1);
5964 break;
5968 else
5969 #endif
5970 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
5971 nargs);
5972 if (obj_type_ref && TYPE_ARG_TYPES (type) != void_list_node)
5974 enum machine_mode mode;
5975 t = TYPE_ARG_TYPES (type);
5976 mode = TYPE_MODE (TREE_VALUE (t));
5977 this_arg = targetm.calls.function_arg (args_so_far, mode,
5978 TREE_VALUE (t), true);
5979 if (this_arg && !REG_P (this_arg))
5980 this_arg = NULL_RTX;
5981 else if (this_arg == NULL_RTX)
5983 for (; link; link = XEXP (link, 1))
5984 if (GET_CODE (XEXP (link, 0)) == USE
5985 && MEM_P (XEXP (XEXP (link, 0), 0)))
5987 this_arg = XEXP (XEXP (link, 0), 0);
5988 break;
5995 t = type ? TYPE_ARG_TYPES (type) : NULL_TREE;
5997 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
5998 if (GET_CODE (XEXP (link, 0)) == USE)
6000 rtx item = NULL_RTX;
6001 x = XEXP (XEXP (link, 0), 0);
6002 if (GET_MODE (link) == VOIDmode
6003 || GET_MODE (link) == BLKmode
6004 || (GET_MODE (link) != GET_MODE (x)
6005 && (GET_MODE_CLASS (GET_MODE (link)) != MODE_INT
6006 || GET_MODE_CLASS (GET_MODE (x)) != MODE_INT)))
6007 /* Can't do anything for these, if the original type mode
6008 isn't known or can't be converted. */;
6009 else if (REG_P (x))
6011 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
6012 if (val && cselib_preserved_value_p (val))
6013 item = val->val_rtx;
6014 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT)
6016 enum machine_mode mode = GET_MODE (x);
6018 while ((mode = GET_MODE_WIDER_MODE (mode)) != VOIDmode
6019 && GET_MODE_BITSIZE (mode) <= BITS_PER_WORD)
6021 rtx reg = simplify_subreg (mode, x, GET_MODE (x), 0);
6023 if (reg == NULL_RTX || !REG_P (reg))
6024 continue;
6025 val = cselib_lookup (reg, mode, 0, VOIDmode);
6026 if (val && cselib_preserved_value_p (val))
6028 item = val->val_rtx;
6029 break;
6034 else if (MEM_P (x))
6036 rtx mem = x;
6037 cselib_val *val;
6039 if (!frame_pointer_needed)
6041 struct adjust_mem_data amd;
6042 amd.mem_mode = VOIDmode;
6043 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
6044 amd.side_effects = NULL_RTX;
6045 amd.store = true;
6046 mem = simplify_replace_fn_rtx (mem, NULL_RTX, adjust_mems,
6047 &amd);
6048 gcc_assert (amd.side_effects == NULL_RTX);
6050 val = cselib_lookup (mem, GET_MODE (mem), 0, VOIDmode);
6051 if (val && cselib_preserved_value_p (val))
6052 item = val->val_rtx;
6053 else if (GET_MODE_CLASS (GET_MODE (mem)) != MODE_INT)
6055 /* For non-integer stack argument see also if they weren't
6056 initialized by integers. */
6057 enum machine_mode imode = int_mode_for_mode (GET_MODE (mem));
6058 if (imode != GET_MODE (mem) && imode != BLKmode)
6060 val = cselib_lookup (adjust_address_nv (mem, imode, 0),
6061 imode, 0, VOIDmode);
6062 if (val && cselib_preserved_value_p (val))
6063 item = lowpart_subreg (GET_MODE (x), val->val_rtx,
6064 imode);
6068 if (item)
6070 rtx x2 = x;
6071 if (GET_MODE (item) != GET_MODE (link))
6072 item = lowpart_subreg (GET_MODE (link), item, GET_MODE (item));
6073 if (GET_MODE (x2) != GET_MODE (link))
6074 x2 = lowpart_subreg (GET_MODE (link), x2, GET_MODE (x2));
6075 item = gen_rtx_CONCAT (GET_MODE (link), x2, item);
6076 call_arguments
6077 = gen_rtx_EXPR_LIST (VOIDmode, item, call_arguments);
6079 if (t && t != void_list_node)
6081 tree argtype = TREE_VALUE (t);
6082 enum machine_mode mode = TYPE_MODE (argtype);
6083 rtx reg;
6084 if (pass_by_reference (&args_so_far_v, mode, argtype, true))
6086 argtype = build_pointer_type (argtype);
6087 mode = TYPE_MODE (argtype);
6089 reg = targetm.calls.function_arg (args_so_far, mode,
6090 argtype, true);
6091 if (TREE_CODE (argtype) == REFERENCE_TYPE
6092 && INTEGRAL_TYPE_P (TREE_TYPE (argtype))
6093 && reg
6094 && REG_P (reg)
6095 && GET_MODE (reg) == mode
6096 && GET_MODE_CLASS (mode) == MODE_INT
6097 && REG_P (x)
6098 && REGNO (x) == REGNO (reg)
6099 && GET_MODE (x) == mode
6100 && item)
6102 enum machine_mode indmode
6103 = TYPE_MODE (TREE_TYPE (argtype));
6104 rtx mem = gen_rtx_MEM (indmode, x);
6105 cselib_val *val = cselib_lookup (mem, indmode, 0, VOIDmode);
6106 if (val && cselib_preserved_value_p (val))
6108 item = gen_rtx_CONCAT (indmode, mem, val->val_rtx);
6109 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
6110 call_arguments);
6112 else
6114 struct elt_loc_list *l;
6115 tree initial;
6117 /* Try harder, when passing address of a constant
6118 pool integer it can be easily read back. */
6119 item = XEXP (item, 1);
6120 if (GET_CODE (item) == SUBREG)
6121 item = SUBREG_REG (item);
6122 gcc_assert (GET_CODE (item) == VALUE);
6123 val = CSELIB_VAL_PTR (item);
6124 for (l = val->locs; l; l = l->next)
6125 if (GET_CODE (l->loc) == SYMBOL_REF
6126 && TREE_CONSTANT_POOL_ADDRESS_P (l->loc)
6127 && SYMBOL_REF_DECL (l->loc)
6128 && DECL_INITIAL (SYMBOL_REF_DECL (l->loc)))
6130 initial = DECL_INITIAL (SYMBOL_REF_DECL (l->loc));
6131 if (host_integerp (initial, 0))
6133 item = GEN_INT (tree_low_cst (initial, 0));
6134 item = gen_rtx_CONCAT (indmode, mem, item);
6135 call_arguments
6136 = gen_rtx_EXPR_LIST (VOIDmode, item,
6137 call_arguments);
6139 break;
6143 targetm.calls.function_arg_advance (args_so_far, mode,
6144 argtype, true);
6145 t = TREE_CHAIN (t);
6149 /* Add debug arguments. */
6150 if (fndecl
6151 && TREE_CODE (fndecl) == FUNCTION_DECL
6152 && DECL_HAS_DEBUG_ARGS_P (fndecl))
6154 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (fndecl);
6155 if (debug_args)
6157 unsigned int ix;
6158 tree param;
6159 for (ix = 0; vec_safe_iterate (*debug_args, ix, &param); ix += 2)
6161 rtx item;
6162 tree dtemp = (**debug_args)[ix + 1];
6163 enum machine_mode mode = DECL_MODE (dtemp);
6164 item = gen_rtx_DEBUG_PARAMETER_REF (mode, param);
6165 item = gen_rtx_CONCAT (mode, item, DECL_RTL_KNOWN_SET (dtemp));
6166 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
6167 call_arguments);
6172 /* Reverse call_arguments chain. */
6173 prev = NULL_RTX;
6174 for (cur = call_arguments; cur; cur = next)
6176 next = XEXP (cur, 1);
6177 XEXP (cur, 1) = prev;
6178 prev = cur;
6180 call_arguments = prev;
6182 x = get_call_rtx_from (insn);
6183 if (x)
6185 x = XEXP (XEXP (x, 0), 0);
6186 if (GET_CODE (x) == SYMBOL_REF)
6187 /* Don't record anything. */;
6188 else if (CONSTANT_P (x))
6190 x = gen_rtx_CONCAT (GET_MODE (x) == VOIDmode ? Pmode : GET_MODE (x),
6191 pc_rtx, x);
6192 call_arguments
6193 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6195 else
6197 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
6198 if (val && cselib_preserved_value_p (val))
6200 x = gen_rtx_CONCAT (GET_MODE (x), pc_rtx, val->val_rtx);
6201 call_arguments
6202 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6206 if (this_arg)
6208 enum machine_mode mode
6209 = TYPE_MODE (TREE_TYPE (OBJ_TYPE_REF_EXPR (obj_type_ref)));
6210 rtx clobbered = gen_rtx_MEM (mode, this_arg);
6211 HOST_WIDE_INT token
6212 = tree_low_cst (OBJ_TYPE_REF_TOKEN (obj_type_ref), 0);
6213 if (token)
6214 clobbered = plus_constant (mode, clobbered,
6215 token * GET_MODE_SIZE (mode));
6216 clobbered = gen_rtx_MEM (mode, clobbered);
6217 x = gen_rtx_CONCAT (mode, gen_rtx_CLOBBER (VOIDmode, pc_rtx), clobbered);
6218 call_arguments
6219 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6223 /* Callback for cselib_record_sets_hook, that records as micro
6224 operations uses and stores in an insn after cselib_record_sets has
6225 analyzed the sets in an insn, but before it modifies the stored
6226 values in the internal tables, unless cselib_record_sets doesn't
6227 call it directly (perhaps because we're not doing cselib in the
6228 first place, in which case sets and n_sets will be 0). */
6230 static void
6231 add_with_sets (rtx insn, struct cselib_set *sets, int n_sets)
6233 basic_block bb = BLOCK_FOR_INSN (insn);
6234 int n1, n2;
6235 struct count_use_info cui;
6236 micro_operation *mos;
6238 cselib_hook_called = true;
6240 cui.insn = insn;
6241 cui.bb = bb;
6242 cui.sets = sets;
6243 cui.n_sets = n_sets;
6245 n1 = VTI (bb)->mos.length ();
6246 cui.store_p = false;
6247 note_uses (&PATTERN (insn), add_uses_1, &cui);
6248 n2 = VTI (bb)->mos.length () - 1;
6249 mos = VTI (bb)->mos.address ();
6251 /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
6252 MO_VAL_LOC last. */
6253 while (n1 < n2)
6255 while (n1 < n2 && mos[n1].type == MO_USE)
6256 n1++;
6257 while (n1 < n2 && mos[n2].type != MO_USE)
6258 n2--;
6259 if (n1 < n2)
6261 micro_operation sw;
6263 sw = mos[n1];
6264 mos[n1] = mos[n2];
6265 mos[n2] = sw;
6269 n2 = VTI (bb)->mos.length () - 1;
6270 while (n1 < n2)
6272 while (n1 < n2 && mos[n1].type != MO_VAL_LOC)
6273 n1++;
6274 while (n1 < n2 && mos[n2].type == MO_VAL_LOC)
6275 n2--;
6276 if (n1 < n2)
6278 micro_operation sw;
6280 sw = mos[n1];
6281 mos[n1] = mos[n2];
6282 mos[n2] = sw;
6286 if (CALL_P (insn))
6288 micro_operation mo;
6290 mo.type = MO_CALL;
6291 mo.insn = insn;
6292 mo.u.loc = call_arguments;
6293 call_arguments = NULL_RTX;
6295 if (dump_file && (dump_flags & TDF_DETAILS))
6296 log_op_type (PATTERN (insn), bb, insn, mo.type, dump_file);
6297 VTI (bb)->mos.safe_push (mo);
6300 n1 = VTI (bb)->mos.length ();
6301 /* This will record NEXT_INSN (insn), such that we can
6302 insert notes before it without worrying about any
6303 notes that MO_USEs might emit after the insn. */
6304 cui.store_p = true;
6305 note_stores (PATTERN (insn), add_stores, &cui);
6306 n2 = VTI (bb)->mos.length () - 1;
6307 mos = VTI (bb)->mos.address ();
6309 /* Order the MO_VAL_USEs first (note_stores does nothing
6310 on DEBUG_INSNs, so there are no MO_VAL_LOCs from this
6311 insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET. */
6312 while (n1 < n2)
6314 while (n1 < n2 && mos[n1].type == MO_VAL_USE)
6315 n1++;
6316 while (n1 < n2 && mos[n2].type != MO_VAL_USE)
6317 n2--;
6318 if (n1 < n2)
6320 micro_operation sw;
6322 sw = mos[n1];
6323 mos[n1] = mos[n2];
6324 mos[n2] = sw;
6328 n2 = VTI (bb)->mos.length () - 1;
6329 while (n1 < n2)
6331 while (n1 < n2 && mos[n1].type == MO_CLOBBER)
6332 n1++;
6333 while (n1 < n2 && mos[n2].type != MO_CLOBBER)
6334 n2--;
6335 if (n1 < n2)
6337 micro_operation sw;
6339 sw = mos[n1];
6340 mos[n1] = mos[n2];
6341 mos[n2] = sw;
6346 static enum var_init_status
6347 find_src_status (dataflow_set *in, rtx src)
6349 tree decl = NULL_TREE;
6350 enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
6352 if (! flag_var_tracking_uninit)
6353 status = VAR_INIT_STATUS_INITIALIZED;
6355 if (src && REG_P (src))
6356 decl = var_debug_decl (REG_EXPR (src));
6357 else if (src && MEM_P (src))
6358 decl = var_debug_decl (MEM_EXPR (src));
6360 if (src && decl)
6361 status = get_init_value (in, src, dv_from_decl (decl));
6363 return status;
6366 /* SRC is the source of an assignment. Use SET to try to find what
6367 was ultimately assigned to SRC. Return that value if known,
6368 otherwise return SRC itself. */
6370 static rtx
6371 find_src_set_src (dataflow_set *set, rtx src)
6373 tree decl = NULL_TREE; /* The variable being copied around. */
6374 rtx set_src = NULL_RTX; /* The value for "decl" stored in "src". */
6375 variable var;
6376 location_chain nextp;
6377 int i;
6378 bool found;
6380 if (src && REG_P (src))
6381 decl = var_debug_decl (REG_EXPR (src));
6382 else if (src && MEM_P (src))
6383 decl = var_debug_decl (MEM_EXPR (src));
6385 if (src && decl)
6387 decl_or_value dv = dv_from_decl (decl);
6389 var = shared_hash_find (set->vars, dv);
6390 if (var)
6392 found = false;
6393 for (i = 0; i < var->n_var_parts && !found; i++)
6394 for (nextp = var->var_part[i].loc_chain; nextp && !found;
6395 nextp = nextp->next)
6396 if (rtx_equal_p (nextp->loc, src))
6398 set_src = nextp->set_src;
6399 found = true;
6405 return set_src;
6408 /* Compute the changes of variable locations in the basic block BB. */
6410 static bool
6411 compute_bb_dataflow (basic_block bb)
6413 unsigned int i;
6414 micro_operation *mo;
6415 bool changed;
6416 dataflow_set old_out;
6417 dataflow_set *in = &VTI (bb)->in;
6418 dataflow_set *out = &VTI (bb)->out;
6420 dataflow_set_init (&old_out);
6421 dataflow_set_copy (&old_out, out);
6422 dataflow_set_copy (out, in);
6424 FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo)
6426 rtx insn = mo->insn;
6428 switch (mo->type)
6430 case MO_CALL:
6431 dataflow_set_clear_at_call (out);
6432 break;
6434 case MO_USE:
6436 rtx loc = mo->u.loc;
6438 if (REG_P (loc))
6439 var_reg_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6440 else if (MEM_P (loc))
6441 var_mem_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6443 break;
6445 case MO_VAL_LOC:
6447 rtx loc = mo->u.loc;
6448 rtx val, vloc;
6449 tree var;
6451 if (GET_CODE (loc) == CONCAT)
6453 val = XEXP (loc, 0);
6454 vloc = XEXP (loc, 1);
6456 else
6458 val = NULL_RTX;
6459 vloc = loc;
6462 var = PAT_VAR_LOCATION_DECL (vloc);
6464 clobber_variable_part (out, NULL_RTX,
6465 dv_from_decl (var), 0, NULL_RTX);
6466 if (val)
6468 if (VAL_NEEDS_RESOLUTION (loc))
6469 val_resolve (out, val, PAT_VAR_LOCATION_LOC (vloc), insn);
6470 set_variable_part (out, val, dv_from_decl (var), 0,
6471 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6472 INSERT);
6474 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
6475 set_variable_part (out, PAT_VAR_LOCATION_LOC (vloc),
6476 dv_from_decl (var), 0,
6477 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6478 INSERT);
6480 break;
6482 case MO_VAL_USE:
6484 rtx loc = mo->u.loc;
6485 rtx val, vloc, uloc;
6487 vloc = uloc = XEXP (loc, 1);
6488 val = XEXP (loc, 0);
6490 if (GET_CODE (val) == CONCAT)
6492 uloc = XEXP (val, 1);
6493 val = XEXP (val, 0);
6496 if (VAL_NEEDS_RESOLUTION (loc))
6497 val_resolve (out, val, vloc, insn);
6498 else
6499 val_store (out, val, uloc, insn, false);
6501 if (VAL_HOLDS_TRACK_EXPR (loc))
6503 if (GET_CODE (uloc) == REG)
6504 var_reg_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6505 NULL);
6506 else if (GET_CODE (uloc) == MEM)
6507 var_mem_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6508 NULL);
6511 break;
6513 case MO_VAL_SET:
6515 rtx loc = mo->u.loc;
6516 rtx val, vloc, uloc;
6517 rtx dstv, srcv;
6519 vloc = loc;
6520 uloc = XEXP (vloc, 1);
6521 val = XEXP (vloc, 0);
6522 vloc = uloc;
6524 if (GET_CODE (uloc) == SET)
6526 dstv = SET_DEST (uloc);
6527 srcv = SET_SRC (uloc);
6529 else
6531 dstv = uloc;
6532 srcv = NULL;
6535 if (GET_CODE (val) == CONCAT)
6537 dstv = vloc = XEXP (val, 1);
6538 val = XEXP (val, 0);
6541 if (GET_CODE (vloc) == SET)
6543 srcv = SET_SRC (vloc);
6545 gcc_assert (val != srcv);
6546 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
6548 dstv = vloc = SET_DEST (vloc);
6550 if (VAL_NEEDS_RESOLUTION (loc))
6551 val_resolve (out, val, srcv, insn);
6553 else if (VAL_NEEDS_RESOLUTION (loc))
6555 gcc_assert (GET_CODE (uloc) == SET
6556 && GET_CODE (SET_SRC (uloc)) == REG);
6557 val_resolve (out, val, SET_SRC (uloc), insn);
6560 if (VAL_HOLDS_TRACK_EXPR (loc))
6562 if (VAL_EXPR_IS_CLOBBERED (loc))
6564 if (REG_P (uloc))
6565 var_reg_delete (out, uloc, true);
6566 else if (MEM_P (uloc))
6568 gcc_assert (MEM_P (dstv));
6569 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
6570 var_mem_delete (out, dstv, true);
6573 else
6575 bool copied_p = VAL_EXPR_IS_COPIED (loc);
6576 rtx src = NULL, dst = uloc;
6577 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
6579 if (GET_CODE (uloc) == SET)
6581 src = SET_SRC (uloc);
6582 dst = SET_DEST (uloc);
6585 if (copied_p)
6587 if (flag_var_tracking_uninit)
6589 status = find_src_status (in, src);
6591 if (status == VAR_INIT_STATUS_UNKNOWN)
6592 status = find_src_status (out, src);
6595 src = find_src_set_src (in, src);
6598 if (REG_P (dst))
6599 var_reg_delete_and_set (out, dst, !copied_p,
6600 status, srcv);
6601 else if (MEM_P (dst))
6603 gcc_assert (MEM_P (dstv));
6604 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
6605 var_mem_delete_and_set (out, dstv, !copied_p,
6606 status, srcv);
6610 else if (REG_P (uloc))
6611 var_regno_delete (out, REGNO (uloc));
6612 else if (MEM_P (uloc))
6613 clobber_overlapping_mems (out, uloc);
6615 val_store (out, val, dstv, insn, true);
6617 break;
6619 case MO_SET:
6621 rtx loc = mo->u.loc;
6622 rtx set_src = NULL;
6624 if (GET_CODE (loc) == SET)
6626 set_src = SET_SRC (loc);
6627 loc = SET_DEST (loc);
6630 if (REG_P (loc))
6631 var_reg_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6632 set_src);
6633 else if (MEM_P (loc))
6634 var_mem_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6635 set_src);
6637 break;
6639 case MO_COPY:
6641 rtx loc = mo->u.loc;
6642 enum var_init_status src_status;
6643 rtx set_src = NULL;
6645 if (GET_CODE (loc) == SET)
6647 set_src = SET_SRC (loc);
6648 loc = SET_DEST (loc);
6651 if (! flag_var_tracking_uninit)
6652 src_status = VAR_INIT_STATUS_INITIALIZED;
6653 else
6655 src_status = find_src_status (in, set_src);
6657 if (src_status == VAR_INIT_STATUS_UNKNOWN)
6658 src_status = find_src_status (out, set_src);
6661 set_src = find_src_set_src (in, set_src);
6663 if (REG_P (loc))
6664 var_reg_delete_and_set (out, loc, false, src_status, set_src);
6665 else if (MEM_P (loc))
6666 var_mem_delete_and_set (out, loc, false, src_status, set_src);
6668 break;
6670 case MO_USE_NO_VAR:
6672 rtx loc = mo->u.loc;
6674 if (REG_P (loc))
6675 var_reg_delete (out, loc, false);
6676 else if (MEM_P (loc))
6677 var_mem_delete (out, loc, false);
6679 break;
6681 case MO_CLOBBER:
6683 rtx loc = mo->u.loc;
6685 if (REG_P (loc))
6686 var_reg_delete (out, loc, true);
6687 else if (MEM_P (loc))
6688 var_mem_delete (out, loc, true);
6690 break;
6692 case MO_ADJUST:
6693 out->stack_adjust += mo->u.adjust;
6694 break;
6698 if (MAY_HAVE_DEBUG_INSNS)
6700 dataflow_set_equiv_regs (out);
6701 htab_traverse (shared_hash_htab (out->vars), canonicalize_values_mark,
6702 out);
6703 htab_traverse (shared_hash_htab (out->vars), canonicalize_values_star,
6704 out);
6705 #if ENABLE_CHECKING
6706 htab_traverse (shared_hash_htab (out->vars),
6707 canonicalize_loc_order_check, out);
6708 #endif
6710 changed = dataflow_set_different (&old_out, out);
6711 dataflow_set_destroy (&old_out);
6712 return changed;
6715 /* Find the locations of variables in the whole function. */
6717 static bool
6718 vt_find_locations (void)
6720 fibheap_t worklist, pending, fibheap_swap;
6721 sbitmap visited, in_worklist, in_pending, sbitmap_swap;
6722 basic_block bb;
6723 edge e;
6724 int *bb_order;
6725 int *rc_order;
6726 int i;
6727 int htabsz = 0;
6728 int htabmax = PARAM_VALUE (PARAM_MAX_VARTRACK_SIZE);
6729 bool success = true;
6731 timevar_push (TV_VAR_TRACKING_DATAFLOW);
6732 /* Compute reverse completion order of depth first search of the CFG
6733 so that the data-flow runs faster. */
6734 rc_order = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
6735 bb_order = XNEWVEC (int, last_basic_block);
6736 pre_and_rev_post_order_compute (NULL, rc_order, false);
6737 for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
6738 bb_order[rc_order[i]] = i;
6739 free (rc_order);
6741 worklist = fibheap_new ();
6742 pending = fibheap_new ();
6743 visited = sbitmap_alloc (last_basic_block);
6744 in_worklist = sbitmap_alloc (last_basic_block);
6745 in_pending = sbitmap_alloc (last_basic_block);
6746 bitmap_clear (in_worklist);
6748 FOR_EACH_BB (bb)
6749 fibheap_insert (pending, bb_order[bb->index], bb);
6750 bitmap_ones (in_pending);
6752 while (success && !fibheap_empty (pending))
6754 fibheap_swap = pending;
6755 pending = worklist;
6756 worklist = fibheap_swap;
6757 sbitmap_swap = in_pending;
6758 in_pending = in_worklist;
6759 in_worklist = sbitmap_swap;
6761 bitmap_clear (visited);
6763 while (!fibheap_empty (worklist))
6765 bb = (basic_block) fibheap_extract_min (worklist);
6766 bitmap_clear_bit (in_worklist, bb->index);
6767 gcc_assert (!bitmap_bit_p (visited, bb->index));
6768 if (!bitmap_bit_p (visited, bb->index))
6770 bool changed;
6771 edge_iterator ei;
6772 int oldinsz, oldoutsz;
6774 bitmap_set_bit (visited, bb->index);
6776 if (VTI (bb)->in.vars)
6778 htabsz
6779 -= (htab_size (shared_hash_htab (VTI (bb)->in.vars))
6780 + htab_size (shared_hash_htab (VTI (bb)->out.vars)));
6781 oldinsz
6782 = htab_elements (shared_hash_htab (VTI (bb)->in.vars));
6783 oldoutsz
6784 = htab_elements (shared_hash_htab (VTI (bb)->out.vars));
6786 else
6787 oldinsz = oldoutsz = 0;
6789 if (MAY_HAVE_DEBUG_INSNS)
6791 dataflow_set *in = &VTI (bb)->in, *first_out = NULL;
6792 bool first = true, adjust = false;
6794 /* Calculate the IN set as the intersection of
6795 predecessor OUT sets. */
6797 dataflow_set_clear (in);
6798 dst_can_be_shared = true;
6800 FOR_EACH_EDGE (e, ei, bb->preds)
6801 if (!VTI (e->src)->flooded)
6802 gcc_assert (bb_order[bb->index]
6803 <= bb_order[e->src->index]);
6804 else if (first)
6806 dataflow_set_copy (in, &VTI (e->src)->out);
6807 first_out = &VTI (e->src)->out;
6808 first = false;
6810 else
6812 dataflow_set_merge (in, &VTI (e->src)->out);
6813 adjust = true;
6816 if (adjust)
6818 dataflow_post_merge_adjust (in, &VTI (bb)->permp);
6819 #if ENABLE_CHECKING
6820 /* Merge and merge_adjust should keep entries in
6821 canonical order. */
6822 htab_traverse (shared_hash_htab (in->vars),
6823 canonicalize_loc_order_check,
6824 in);
6825 #endif
6826 if (dst_can_be_shared)
6828 shared_hash_destroy (in->vars);
6829 in->vars = shared_hash_copy (first_out->vars);
6833 VTI (bb)->flooded = true;
6835 else
6837 /* Calculate the IN set as union of predecessor OUT sets. */
6838 dataflow_set_clear (&VTI (bb)->in);
6839 FOR_EACH_EDGE (e, ei, bb->preds)
6840 dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out);
6843 changed = compute_bb_dataflow (bb);
6844 htabsz += (htab_size (shared_hash_htab (VTI (bb)->in.vars))
6845 + htab_size (shared_hash_htab (VTI (bb)->out.vars)));
6847 if (htabmax && htabsz > htabmax)
6849 if (MAY_HAVE_DEBUG_INSNS)
6850 inform (DECL_SOURCE_LOCATION (cfun->decl),
6851 "variable tracking size limit exceeded with "
6852 "-fvar-tracking-assignments, retrying without");
6853 else
6854 inform (DECL_SOURCE_LOCATION (cfun->decl),
6855 "variable tracking size limit exceeded");
6856 success = false;
6857 break;
6860 if (changed)
6862 FOR_EACH_EDGE (e, ei, bb->succs)
6864 if (e->dest == EXIT_BLOCK_PTR)
6865 continue;
6867 if (bitmap_bit_p (visited, e->dest->index))
6869 if (!bitmap_bit_p (in_pending, e->dest->index))
6871 /* Send E->DEST to next round. */
6872 bitmap_set_bit (in_pending, e->dest->index);
6873 fibheap_insert (pending,
6874 bb_order[e->dest->index],
6875 e->dest);
6878 else if (!bitmap_bit_p (in_worklist, e->dest->index))
6880 /* Add E->DEST to current round. */
6881 bitmap_set_bit (in_worklist, e->dest->index);
6882 fibheap_insert (worklist, bb_order[e->dest->index],
6883 e->dest);
6888 if (dump_file)
6889 fprintf (dump_file,
6890 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
6891 bb->index,
6892 (int)htab_elements (shared_hash_htab (VTI (bb)->in.vars)),
6893 oldinsz,
6894 (int)htab_elements (shared_hash_htab (VTI (bb)->out.vars)),
6895 oldoutsz,
6896 (int)worklist->nodes, (int)pending->nodes, htabsz);
6898 if (dump_file && (dump_flags & TDF_DETAILS))
6900 fprintf (dump_file, "BB %i IN:\n", bb->index);
6901 dump_dataflow_set (&VTI (bb)->in);
6902 fprintf (dump_file, "BB %i OUT:\n", bb->index);
6903 dump_dataflow_set (&VTI (bb)->out);
6909 if (success && MAY_HAVE_DEBUG_INSNS)
6910 FOR_EACH_BB (bb)
6911 gcc_assert (VTI (bb)->flooded);
6913 free (bb_order);
6914 fibheap_delete (worklist);
6915 fibheap_delete (pending);
6916 sbitmap_free (visited);
6917 sbitmap_free (in_worklist);
6918 sbitmap_free (in_pending);
6920 timevar_pop (TV_VAR_TRACKING_DATAFLOW);
6921 return success;
6924 /* Print the content of the LIST to dump file. */
6926 static void
6927 dump_attrs_list (attrs list)
6929 for (; list; list = list->next)
6931 if (dv_is_decl_p (list->dv))
6932 print_mem_expr (dump_file, dv_as_decl (list->dv));
6933 else
6934 print_rtl_single (dump_file, dv_as_value (list->dv));
6935 fprintf (dump_file, "+" HOST_WIDE_INT_PRINT_DEC, list->offset);
6937 fprintf (dump_file, "\n");
6940 /* Print the information about variable *SLOT to dump file. */
6942 static int
6943 dump_var_slot (void **slot, void *data ATTRIBUTE_UNUSED)
6945 variable var = (variable) *slot;
6947 dump_var (var);
6949 /* Continue traversing the hash table. */
6950 return 1;
6953 /* Print the information about variable VAR to dump file. */
6955 static void
6956 dump_var (variable var)
6958 int i;
6959 location_chain node;
6961 if (dv_is_decl_p (var->dv))
6963 const_tree decl = dv_as_decl (var->dv);
6965 if (DECL_NAME (decl))
6967 fprintf (dump_file, " name: %s",
6968 IDENTIFIER_POINTER (DECL_NAME (decl)));
6969 if (dump_flags & TDF_UID)
6970 fprintf (dump_file, "D.%u", DECL_UID (decl));
6972 else if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
6973 fprintf (dump_file, " name: D#%u", DEBUG_TEMP_UID (decl));
6974 else
6975 fprintf (dump_file, " name: D.%u", DECL_UID (decl));
6976 fprintf (dump_file, "\n");
6978 else
6980 fputc (' ', dump_file);
6981 print_rtl_single (dump_file, dv_as_value (var->dv));
6984 for (i = 0; i < var->n_var_parts; i++)
6986 fprintf (dump_file, " offset %ld\n",
6987 (long)(var->onepart ? 0 : VAR_PART_OFFSET (var, i)));
6988 for (node = var->var_part[i].loc_chain; node; node = node->next)
6990 fprintf (dump_file, " ");
6991 if (node->init == VAR_INIT_STATUS_UNINITIALIZED)
6992 fprintf (dump_file, "[uninit]");
6993 print_rtl_single (dump_file, node->loc);
6998 /* Print the information about variables from hash table VARS to dump file. */
7000 static void
7001 dump_vars (htab_t vars)
7003 if (htab_elements (vars) > 0)
7005 fprintf (dump_file, "Variables:\n");
7006 htab_traverse (vars, dump_var_slot, NULL);
7010 /* Print the dataflow set SET to dump file. */
7012 static void
7013 dump_dataflow_set (dataflow_set *set)
7015 int i;
7017 fprintf (dump_file, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC "\n",
7018 set->stack_adjust);
7019 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7021 if (set->regs[i])
7023 fprintf (dump_file, "Reg %d:", i);
7024 dump_attrs_list (set->regs[i]);
7027 dump_vars (shared_hash_htab (set->vars));
7028 fprintf (dump_file, "\n");
7031 /* Print the IN and OUT sets for each basic block to dump file. */
7033 static void
7034 dump_dataflow_sets (void)
7036 basic_block bb;
7038 FOR_EACH_BB (bb)
7040 fprintf (dump_file, "\nBasic block %d:\n", bb->index);
7041 fprintf (dump_file, "IN:\n");
7042 dump_dataflow_set (&VTI (bb)->in);
7043 fprintf (dump_file, "OUT:\n");
7044 dump_dataflow_set (&VTI (bb)->out);
7048 /* Return the variable for DV in dropped_values, inserting one if
7049 requested with INSERT. */
7051 static inline variable
7052 variable_from_dropped (decl_or_value dv, enum insert_option insert)
7054 void **slot;
7055 variable empty_var;
7056 onepart_enum_t onepart;
7058 slot = htab_find_slot_with_hash (dropped_values, dv, dv_htab_hash (dv),
7059 insert);
7061 if (!slot)
7062 return NULL;
7064 if (*slot)
7065 return (variable) *slot;
7067 gcc_checking_assert (insert == INSERT);
7069 onepart = dv_onepart_p (dv);
7071 gcc_checking_assert (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR);
7073 empty_var = (variable) pool_alloc (onepart_pool (onepart));
7074 empty_var->dv = dv;
7075 empty_var->refcount = 1;
7076 empty_var->n_var_parts = 0;
7077 empty_var->onepart = onepart;
7078 empty_var->in_changed_variables = false;
7079 empty_var->var_part[0].loc_chain = NULL;
7080 empty_var->var_part[0].cur_loc = NULL;
7081 VAR_LOC_1PAUX (empty_var) = NULL;
7082 set_dv_changed (dv, true);
7084 *slot = empty_var;
7086 return empty_var;
7089 /* Recover the one-part aux from dropped_values. */
7091 static struct onepart_aux *
7092 recover_dropped_1paux (variable var)
7094 variable dvar;
7096 gcc_checking_assert (var->onepart);
7098 if (VAR_LOC_1PAUX (var))
7099 return VAR_LOC_1PAUX (var);
7101 if (var->onepart == ONEPART_VDECL)
7102 return NULL;
7104 dvar = variable_from_dropped (var->dv, NO_INSERT);
7106 if (!dvar)
7107 return NULL;
7109 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (dvar);
7110 VAR_LOC_1PAUX (dvar) = NULL;
7112 return VAR_LOC_1PAUX (var);
7115 /* Add variable VAR to the hash table of changed variables and
7116 if it has no locations delete it from SET's hash table. */
7118 static void
7119 variable_was_changed (variable var, dataflow_set *set)
7121 hashval_t hash = dv_htab_hash (var->dv);
7123 if (emit_notes)
7125 void **slot;
7127 /* Remember this decl or VALUE has been added to changed_variables. */
7128 set_dv_changed (var->dv, true);
7130 slot = htab_find_slot_with_hash (changed_variables,
7131 var->dv,
7132 hash, INSERT);
7134 if (*slot)
7136 variable old_var = (variable) *slot;
7137 gcc_assert (old_var->in_changed_variables);
7138 old_var->in_changed_variables = false;
7139 if (var != old_var && var->onepart)
7141 /* Restore the auxiliary info from an empty variable
7142 previously created for changed_variables, so it is
7143 not lost. */
7144 gcc_checking_assert (!VAR_LOC_1PAUX (var));
7145 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (old_var);
7146 VAR_LOC_1PAUX (old_var) = NULL;
7148 variable_htab_free (*slot);
7151 if (set && var->n_var_parts == 0)
7153 onepart_enum_t onepart = var->onepart;
7154 variable empty_var = NULL;
7155 void **dslot = NULL;
7157 if (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR)
7159 dslot = htab_find_slot_with_hash (dropped_values, var->dv,
7160 dv_htab_hash (var->dv),
7161 INSERT);
7162 empty_var = (variable) *dslot;
7164 if (empty_var)
7166 gcc_checking_assert (!empty_var->in_changed_variables);
7167 if (!VAR_LOC_1PAUX (var))
7169 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (empty_var);
7170 VAR_LOC_1PAUX (empty_var) = NULL;
7172 else
7173 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
7177 if (!empty_var)
7179 empty_var = (variable) pool_alloc (onepart_pool (onepart));
7180 empty_var->dv = var->dv;
7181 empty_var->refcount = 1;
7182 empty_var->n_var_parts = 0;
7183 empty_var->onepart = onepart;
7184 if (dslot)
7186 empty_var->refcount++;
7187 *dslot = empty_var;
7190 else
7191 empty_var->refcount++;
7192 empty_var->in_changed_variables = true;
7193 *slot = empty_var;
7194 if (onepart)
7196 empty_var->var_part[0].loc_chain = NULL;
7197 empty_var->var_part[0].cur_loc = NULL;
7198 VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (var);
7199 VAR_LOC_1PAUX (var) = NULL;
7201 goto drop_var;
7203 else
7205 if (var->onepart && !VAR_LOC_1PAUX (var))
7206 recover_dropped_1paux (var);
7207 var->refcount++;
7208 var->in_changed_variables = true;
7209 *slot = var;
7212 else
7214 gcc_assert (set);
7215 if (var->n_var_parts == 0)
7217 void **slot;
7219 drop_var:
7220 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
7221 if (slot)
7223 if (shared_hash_shared (set->vars))
7224 slot = shared_hash_find_slot_unshare (&set->vars, var->dv,
7225 NO_INSERT);
7226 htab_clear_slot (shared_hash_htab (set->vars), slot);
7232 /* Look for the index in VAR->var_part corresponding to OFFSET.
7233 Return -1 if not found. If INSERTION_POINT is non-NULL, the
7234 referenced int will be set to the index that the part has or should
7235 have, if it should be inserted. */
7237 static inline int
7238 find_variable_location_part (variable var, HOST_WIDE_INT offset,
7239 int *insertion_point)
7241 int pos, low, high;
7243 if (var->onepart)
7245 if (offset != 0)
7246 return -1;
7248 if (insertion_point)
7249 *insertion_point = 0;
7251 return var->n_var_parts - 1;
7254 /* Find the location part. */
7255 low = 0;
7256 high = var->n_var_parts;
7257 while (low != high)
7259 pos = (low + high) / 2;
7260 if (VAR_PART_OFFSET (var, pos) < offset)
7261 low = pos + 1;
7262 else
7263 high = pos;
7265 pos = low;
7267 if (insertion_point)
7268 *insertion_point = pos;
7270 if (pos < var->n_var_parts && VAR_PART_OFFSET (var, pos) == offset)
7271 return pos;
7273 return -1;
7276 static void **
7277 set_slot_part (dataflow_set *set, rtx loc, void **slot,
7278 decl_or_value dv, HOST_WIDE_INT offset,
7279 enum var_init_status initialized, rtx set_src)
7281 int pos;
7282 location_chain node, next;
7283 location_chain *nextp;
7284 variable var;
7285 onepart_enum_t onepart;
7287 var = (variable) *slot;
7289 if (var)
7290 onepart = var->onepart;
7291 else
7292 onepart = dv_onepart_p (dv);
7294 gcc_checking_assert (offset == 0 || !onepart);
7295 gcc_checking_assert (loc != dv_as_opaque (dv));
7297 if (! flag_var_tracking_uninit)
7298 initialized = VAR_INIT_STATUS_INITIALIZED;
7300 if (!var)
7302 /* Create new variable information. */
7303 var = (variable) pool_alloc (onepart_pool (onepart));
7304 var->dv = dv;
7305 var->refcount = 1;
7306 var->n_var_parts = 1;
7307 var->onepart = onepart;
7308 var->in_changed_variables = false;
7309 if (var->onepart)
7310 VAR_LOC_1PAUX (var) = NULL;
7311 else
7312 VAR_PART_OFFSET (var, 0) = offset;
7313 var->var_part[0].loc_chain = NULL;
7314 var->var_part[0].cur_loc = NULL;
7315 *slot = var;
7316 pos = 0;
7317 nextp = &var->var_part[0].loc_chain;
7319 else if (onepart)
7321 int r = -1, c = 0;
7323 gcc_assert (dv_as_opaque (var->dv) == dv_as_opaque (dv));
7325 pos = 0;
7327 if (GET_CODE (loc) == VALUE)
7329 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7330 nextp = &node->next)
7331 if (GET_CODE (node->loc) == VALUE)
7333 if (node->loc == loc)
7335 r = 0;
7336 break;
7338 if (canon_value_cmp (node->loc, loc))
7339 c++;
7340 else
7342 r = 1;
7343 break;
7346 else if (REG_P (node->loc) || MEM_P (node->loc))
7347 c++;
7348 else
7350 r = 1;
7351 break;
7354 else if (REG_P (loc))
7356 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7357 nextp = &node->next)
7358 if (REG_P (node->loc))
7360 if (REGNO (node->loc) < REGNO (loc))
7361 c++;
7362 else
7364 if (REGNO (node->loc) == REGNO (loc))
7365 r = 0;
7366 else
7367 r = 1;
7368 break;
7371 else
7373 r = 1;
7374 break;
7377 else if (MEM_P (loc))
7379 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7380 nextp = &node->next)
7381 if (REG_P (node->loc))
7382 c++;
7383 else if (MEM_P (node->loc))
7385 if ((r = loc_cmp (XEXP (node->loc, 0), XEXP (loc, 0))) >= 0)
7386 break;
7387 else
7388 c++;
7390 else
7392 r = 1;
7393 break;
7396 else
7397 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7398 nextp = &node->next)
7399 if ((r = loc_cmp (node->loc, loc)) >= 0)
7400 break;
7401 else
7402 c++;
7404 if (r == 0)
7405 return slot;
7407 if (shared_var_p (var, set->vars))
7409 slot = unshare_variable (set, slot, var, initialized);
7410 var = (variable)*slot;
7411 for (nextp = &var->var_part[0].loc_chain; c;
7412 nextp = &(*nextp)->next)
7413 c--;
7414 gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc);
7417 else
7419 int inspos = 0;
7421 gcc_assert (dv_as_decl (var->dv) == dv_as_decl (dv));
7423 pos = find_variable_location_part (var, offset, &inspos);
7425 if (pos >= 0)
7427 node = var->var_part[pos].loc_chain;
7429 if (node
7430 && ((REG_P (node->loc) && REG_P (loc)
7431 && REGNO (node->loc) == REGNO (loc))
7432 || rtx_equal_p (node->loc, loc)))
7434 /* LOC is in the beginning of the chain so we have nothing
7435 to do. */
7436 if (node->init < initialized)
7437 node->init = initialized;
7438 if (set_src != NULL)
7439 node->set_src = set_src;
7441 return slot;
7443 else
7445 /* We have to make a copy of a shared variable. */
7446 if (shared_var_p (var, set->vars))
7448 slot = unshare_variable (set, slot, var, initialized);
7449 var = (variable)*slot;
7453 else
7455 /* We have not found the location part, new one will be created. */
7457 /* We have to make a copy of the shared variable. */
7458 if (shared_var_p (var, set->vars))
7460 slot = unshare_variable (set, slot, var, initialized);
7461 var = (variable)*slot;
7464 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
7465 thus there are at most MAX_VAR_PARTS different offsets. */
7466 gcc_assert (var->n_var_parts < MAX_VAR_PARTS
7467 && (!var->n_var_parts || !onepart));
7469 /* We have to move the elements of array starting at index
7470 inspos to the next position. */
7471 for (pos = var->n_var_parts; pos > inspos; pos--)
7472 var->var_part[pos] = var->var_part[pos - 1];
7474 var->n_var_parts++;
7475 gcc_checking_assert (!onepart);
7476 VAR_PART_OFFSET (var, pos) = offset;
7477 var->var_part[pos].loc_chain = NULL;
7478 var->var_part[pos].cur_loc = NULL;
7481 /* Delete the location from the list. */
7482 nextp = &var->var_part[pos].loc_chain;
7483 for (node = var->var_part[pos].loc_chain; node; node = next)
7485 next = node->next;
7486 if ((REG_P (node->loc) && REG_P (loc)
7487 && REGNO (node->loc) == REGNO (loc))
7488 || rtx_equal_p (node->loc, loc))
7490 /* Save these values, to assign to the new node, before
7491 deleting this one. */
7492 if (node->init > initialized)
7493 initialized = node->init;
7494 if (node->set_src != NULL && set_src == NULL)
7495 set_src = node->set_src;
7496 if (var->var_part[pos].cur_loc == node->loc)
7497 var->var_part[pos].cur_loc = NULL;
7498 pool_free (loc_chain_pool, node);
7499 *nextp = next;
7500 break;
7502 else
7503 nextp = &node->next;
7506 nextp = &var->var_part[pos].loc_chain;
7509 /* Add the location to the beginning. */
7510 node = (location_chain) pool_alloc (loc_chain_pool);
7511 node->loc = loc;
7512 node->init = initialized;
7513 node->set_src = set_src;
7514 node->next = *nextp;
7515 *nextp = node;
7517 /* If no location was emitted do so. */
7518 if (var->var_part[pos].cur_loc == NULL)
7519 variable_was_changed (var, set);
7521 return slot;
7524 /* Set the part of variable's location in the dataflow set SET. The
7525 variable part is specified by variable's declaration in DV and
7526 offset OFFSET and the part's location by LOC. IOPT should be
7527 NO_INSERT if the variable is known to be in SET already and the
7528 variable hash table must not be resized, and INSERT otherwise. */
7530 static void
7531 set_variable_part (dataflow_set *set, rtx loc,
7532 decl_or_value dv, HOST_WIDE_INT offset,
7533 enum var_init_status initialized, rtx set_src,
7534 enum insert_option iopt)
7536 void **slot;
7538 if (iopt == NO_INSERT)
7539 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7540 else
7542 slot = shared_hash_find_slot (set->vars, dv);
7543 if (!slot)
7544 slot = shared_hash_find_slot_unshare (&set->vars, dv, iopt);
7546 set_slot_part (set, loc, slot, dv, offset, initialized, set_src);
7549 /* Remove all recorded register locations for the given variable part
7550 from dataflow set SET, except for those that are identical to loc.
7551 The variable part is specified by variable's declaration or value
7552 DV and offset OFFSET. */
7554 static void **
7555 clobber_slot_part (dataflow_set *set, rtx loc, void **slot,
7556 HOST_WIDE_INT offset, rtx set_src)
7558 variable var = (variable) *slot;
7559 int pos = find_variable_location_part (var, offset, NULL);
7561 if (pos >= 0)
7563 location_chain node, next;
7565 /* Remove the register locations from the dataflow set. */
7566 next = var->var_part[pos].loc_chain;
7567 for (node = next; node; node = next)
7569 next = node->next;
7570 if (node->loc != loc
7571 && (!flag_var_tracking_uninit
7572 || !set_src
7573 || MEM_P (set_src)
7574 || !rtx_equal_p (set_src, node->set_src)))
7576 if (REG_P (node->loc))
7578 attrs anode, anext;
7579 attrs *anextp;
7581 /* Remove the variable part from the register's
7582 list, but preserve any other variable parts
7583 that might be regarded as live in that same
7584 register. */
7585 anextp = &set->regs[REGNO (node->loc)];
7586 for (anode = *anextp; anode; anode = anext)
7588 anext = anode->next;
7589 if (dv_as_opaque (anode->dv) == dv_as_opaque (var->dv)
7590 && anode->offset == offset)
7592 pool_free (attrs_pool, anode);
7593 *anextp = anext;
7595 else
7596 anextp = &anode->next;
7600 slot = delete_slot_part (set, node->loc, slot, offset);
7605 return slot;
7608 /* Remove all recorded register locations for the given variable part
7609 from dataflow set SET, except for those that are identical to loc.
7610 The variable part is specified by variable's declaration or value
7611 DV and offset OFFSET. */
7613 static void
7614 clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
7615 HOST_WIDE_INT offset, rtx set_src)
7617 void **slot;
7619 if (!dv_as_opaque (dv)
7620 || (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv))))
7621 return;
7623 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7624 if (!slot)
7625 return;
7627 clobber_slot_part (set, loc, slot, offset, set_src);
7630 /* Delete the part of variable's location from dataflow set SET. The
7631 variable part is specified by its SET->vars slot SLOT and offset
7632 OFFSET and the part's location by LOC. */
7634 static void **
7635 delete_slot_part (dataflow_set *set, rtx loc, void **slot,
7636 HOST_WIDE_INT offset)
7638 variable var = (variable) *slot;
7639 int pos = find_variable_location_part (var, offset, NULL);
7641 if (pos >= 0)
7643 location_chain node, next;
7644 location_chain *nextp;
7645 bool changed;
7646 rtx cur_loc;
7648 if (shared_var_p (var, set->vars))
7650 /* If the variable contains the location part we have to
7651 make a copy of the variable. */
7652 for (node = var->var_part[pos].loc_chain; node;
7653 node = node->next)
7655 if ((REG_P (node->loc) && REG_P (loc)
7656 && REGNO (node->loc) == REGNO (loc))
7657 || rtx_equal_p (node->loc, loc))
7659 slot = unshare_variable (set, slot, var,
7660 VAR_INIT_STATUS_UNKNOWN);
7661 var = (variable)*slot;
7662 break;
7667 if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
7668 cur_loc = VAR_LOC_FROM (var);
7669 else
7670 cur_loc = var->var_part[pos].cur_loc;
7672 /* Delete the location part. */
7673 changed = false;
7674 nextp = &var->var_part[pos].loc_chain;
7675 for (node = *nextp; node; node = next)
7677 next = node->next;
7678 if ((REG_P (node->loc) && REG_P (loc)
7679 && REGNO (node->loc) == REGNO (loc))
7680 || rtx_equal_p (node->loc, loc))
7682 /* If we have deleted the location which was last emitted
7683 we have to emit new location so add the variable to set
7684 of changed variables. */
7685 if (cur_loc == node->loc)
7687 changed = true;
7688 var->var_part[pos].cur_loc = NULL;
7689 if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
7690 VAR_LOC_FROM (var) = NULL;
7692 pool_free (loc_chain_pool, node);
7693 *nextp = next;
7694 break;
7696 else
7697 nextp = &node->next;
7700 if (var->var_part[pos].loc_chain == NULL)
7702 changed = true;
7703 var->n_var_parts--;
7704 while (pos < var->n_var_parts)
7706 var->var_part[pos] = var->var_part[pos + 1];
7707 pos++;
7710 if (changed)
7711 variable_was_changed (var, set);
7714 return slot;
7717 /* Delete the part of variable's location from dataflow set SET. The
7718 variable part is specified by variable's declaration or value DV
7719 and offset OFFSET and the part's location by LOC. */
7721 static void
7722 delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
7723 HOST_WIDE_INT offset)
7725 void **slot = shared_hash_find_slot_noinsert (set->vars, dv);
7726 if (!slot)
7727 return;
7729 delete_slot_part (set, loc, slot, offset);
7733 /* Structure for passing some other parameters to function
7734 vt_expand_loc_callback. */
7735 struct expand_loc_callback_data
7737 /* The variables and values active at this point. */
7738 htab_t vars;
7740 /* Stack of values and debug_exprs under expansion, and their
7741 children. */
7742 vec<rtx, va_stack> expanding;
7744 /* Stack of values and debug_exprs whose expansion hit recursion
7745 cycles. They will have VALUE_RECURSED_INTO marked when added to
7746 this list. This flag will be cleared if any of its dependencies
7747 resolves to a valid location. So, if the flag remains set at the
7748 end of the search, we know no valid location for this one can
7749 possibly exist. */
7750 vec<rtx, va_stack> pending;
7752 /* The maximum depth among the sub-expressions under expansion.
7753 Zero indicates no expansion so far. */
7754 expand_depth depth;
7757 /* Allocate the one-part auxiliary data structure for VAR, with enough
7758 room for COUNT dependencies. */
7760 static void
7761 loc_exp_dep_alloc (variable var, int count)
7763 size_t allocsize;
7765 gcc_checking_assert (var->onepart);
7767 /* We can be called with COUNT == 0 to allocate the data structure
7768 without any dependencies, e.g. for the backlinks only. However,
7769 if we are specifying a COUNT, then the dependency list must have
7770 been emptied before. It would be possible to adjust pointers or
7771 force it empty here, but this is better done at an earlier point
7772 in the algorithm, so we instead leave an assertion to catch
7773 errors. */
7774 gcc_checking_assert (!count
7775 || VAR_LOC_DEP_VEC (var) == NULL
7776 || VAR_LOC_DEP_VEC (var)->is_empty ());
7778 if (VAR_LOC_1PAUX (var) && VAR_LOC_DEP_VEC (var)->space (count))
7779 return;
7781 allocsize = offsetof (struct onepart_aux, deps)
7782 + vec<loc_exp_dep, va_heap, vl_embed>::embedded_size (count);
7784 if (VAR_LOC_1PAUX (var))
7786 VAR_LOC_1PAUX (var) = XRESIZEVAR (struct onepart_aux,
7787 VAR_LOC_1PAUX (var), allocsize);
7788 /* If the reallocation moves the onepaux structure, the
7789 back-pointer to BACKLINKS in the first list member will still
7790 point to its old location. Adjust it. */
7791 if (VAR_LOC_DEP_LST (var))
7792 VAR_LOC_DEP_LST (var)->pprev = VAR_LOC_DEP_LSTP (var);
7794 else
7796 VAR_LOC_1PAUX (var) = XNEWVAR (struct onepart_aux, allocsize);
7797 *VAR_LOC_DEP_LSTP (var) = NULL;
7798 VAR_LOC_FROM (var) = NULL;
7799 VAR_LOC_DEPTH (var).complexity = 0;
7800 VAR_LOC_DEPTH (var).entryvals = 0;
7802 VAR_LOC_DEP_VEC (var)->embedded_init (count);
7805 /* Remove all entries from the vector of active dependencies of VAR,
7806 removing them from the back-links lists too. */
7808 static void
7809 loc_exp_dep_clear (variable var)
7811 while (VAR_LOC_DEP_VEC (var) && !VAR_LOC_DEP_VEC (var)->is_empty ())
7813 loc_exp_dep *led = &VAR_LOC_DEP_VEC (var)->last ();
7814 if (led->next)
7815 led->next->pprev = led->pprev;
7816 if (led->pprev)
7817 *led->pprev = led->next;
7818 VAR_LOC_DEP_VEC (var)->pop ();
7822 /* Insert an active dependency from VAR on X to the vector of
7823 dependencies, and add the corresponding back-link to X's list of
7824 back-links in VARS. */
7826 static void
7827 loc_exp_insert_dep (variable var, rtx x, htab_t vars)
7829 decl_or_value dv;
7830 variable xvar;
7831 loc_exp_dep *led;
7833 dv = dv_from_rtx (x);
7835 /* ??? Build a vector of variables parallel to EXPANDING, to avoid
7836 an additional look up? */
7837 xvar = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
7839 if (!xvar)
7841 xvar = variable_from_dropped (dv, NO_INSERT);
7842 gcc_checking_assert (xvar);
7845 /* No point in adding the same backlink more than once. This may
7846 arise if say the same value appears in two complex expressions in
7847 the same loc_list, or even more than once in a single
7848 expression. */
7849 if (VAR_LOC_DEP_LST (xvar) && VAR_LOC_DEP_LST (xvar)->dv == var->dv)
7850 return;
7852 if (var->onepart == NOT_ONEPART)
7853 led = (loc_exp_dep *) pool_alloc (loc_exp_dep_pool);
7854 else
7856 loc_exp_dep empty;
7857 memset (&empty, 0, sizeof (empty));
7858 VAR_LOC_DEP_VEC (var)->quick_push (empty);
7859 led = &VAR_LOC_DEP_VEC (var)->last ();
7861 led->dv = var->dv;
7862 led->value = x;
7864 loc_exp_dep_alloc (xvar, 0);
7865 led->pprev = VAR_LOC_DEP_LSTP (xvar);
7866 led->next = *led->pprev;
7867 if (led->next)
7868 led->next->pprev = &led->next;
7869 *led->pprev = led;
7872 /* Create active dependencies of VAR on COUNT values starting at
7873 VALUE, and corresponding back-links to the entries in VARS. Return
7874 true if we found any pending-recursion results. */
7876 static bool
7877 loc_exp_dep_set (variable var, rtx result, rtx *value, int count, htab_t vars)
7879 bool pending_recursion = false;
7881 gcc_checking_assert (VAR_LOC_DEP_VEC (var) == NULL
7882 || VAR_LOC_DEP_VEC (var)->is_empty ());
7884 /* Set up all dependencies from last_child (as set up at the end of
7885 the loop above) to the end. */
7886 loc_exp_dep_alloc (var, count);
7888 while (count--)
7890 rtx x = *value++;
7892 if (!pending_recursion)
7893 pending_recursion = !result && VALUE_RECURSED_INTO (x);
7895 loc_exp_insert_dep (var, x, vars);
7898 return pending_recursion;
7901 /* Notify the back-links of IVAR that are pending recursion that we
7902 have found a non-NIL value for it, so they are cleared for another
7903 attempt to compute a current location. */
7905 static void
7906 notify_dependents_of_resolved_value (variable ivar, htab_t vars)
7908 loc_exp_dep *led, *next;
7910 for (led = VAR_LOC_DEP_LST (ivar); led; led = next)
7912 decl_or_value dv = led->dv;
7913 variable var;
7915 next = led->next;
7917 if (dv_is_value_p (dv))
7919 rtx value = dv_as_value (dv);
7921 /* If we have already resolved it, leave it alone. */
7922 if (!VALUE_RECURSED_INTO (value))
7923 continue;
7925 /* Check that VALUE_RECURSED_INTO, true from the test above,
7926 implies NO_LOC_P. */
7927 gcc_checking_assert (NO_LOC_P (value));
7929 /* We won't notify variables that are being expanded,
7930 because their dependency list is cleared before
7931 recursing. */
7932 NO_LOC_P (value) = false;
7933 VALUE_RECURSED_INTO (value) = false;
7935 gcc_checking_assert (dv_changed_p (dv));
7937 else
7939 gcc_checking_assert (dv_onepart_p (dv) != NOT_ONEPART);
7940 if (!dv_changed_p (dv))
7941 continue;
7944 var = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
7946 if (!var)
7947 var = variable_from_dropped (dv, NO_INSERT);
7949 if (var)
7950 notify_dependents_of_resolved_value (var, vars);
7952 if (next)
7953 next->pprev = led->pprev;
7954 if (led->pprev)
7955 *led->pprev = next;
7956 led->next = NULL;
7957 led->pprev = NULL;
7961 static rtx vt_expand_loc_callback (rtx x, bitmap regs,
7962 int max_depth, void *data);
7964 /* Return the combined depth, when one sub-expression evaluated to
7965 BEST_DEPTH and the previous known depth was SAVED_DEPTH. */
7967 static inline expand_depth
7968 update_depth (expand_depth saved_depth, expand_depth best_depth)
7970 /* If we didn't find anything, stick with what we had. */
7971 if (!best_depth.complexity)
7972 return saved_depth;
7974 /* If we found hadn't found anything, use the depth of the current
7975 expression. Do NOT add one extra level, we want to compute the
7976 maximum depth among sub-expressions. We'll increment it later,
7977 if appropriate. */
7978 if (!saved_depth.complexity)
7979 return best_depth;
7981 /* Combine the entryval count so that regardless of which one we
7982 return, the entryval count is accurate. */
7983 best_depth.entryvals = saved_depth.entryvals
7984 = best_depth.entryvals + saved_depth.entryvals;
7986 if (saved_depth.complexity < best_depth.complexity)
7987 return best_depth;
7988 else
7989 return saved_depth;
7992 /* Expand VAR to a location RTX, updating its cur_loc. Use REGS and
7993 DATA for cselib expand callback. If PENDRECP is given, indicate in
7994 it whether any sub-expression couldn't be fully evaluated because
7995 it is pending recursion resolution. */
7997 static inline rtx
7998 vt_expand_var_loc_chain (variable var, bitmap regs, void *data, bool *pendrecp)
8000 struct expand_loc_callback_data *elcd
8001 = (struct expand_loc_callback_data *) data;
8002 location_chain loc, next;
8003 rtx result = NULL;
8004 int first_child, result_first_child, last_child;
8005 bool pending_recursion;
8006 rtx loc_from = NULL;
8007 struct elt_loc_list *cloc = NULL;
8008 expand_depth depth = { 0, 0 }, saved_depth = elcd->depth;
8009 int wanted_entryvals, found_entryvals = 0;
8011 /* Clear all backlinks pointing at this, so that we're not notified
8012 while we're active. */
8013 loc_exp_dep_clear (var);
8015 retry:
8016 if (var->onepart == ONEPART_VALUE)
8018 cselib_val *val = CSELIB_VAL_PTR (dv_as_value (var->dv));
8020 gcc_checking_assert (cselib_preserved_value_p (val));
8022 cloc = val->locs;
8025 first_child = result_first_child = last_child
8026 = elcd->expanding.length ();
8028 wanted_entryvals = found_entryvals;
8030 /* Attempt to expand each available location in turn. */
8031 for (next = loc = var->n_var_parts ? var->var_part[0].loc_chain : NULL;
8032 loc || cloc; loc = next)
8034 result_first_child = last_child;
8036 if (!loc)
8038 loc_from = cloc->loc;
8039 next = loc;
8040 cloc = cloc->next;
8041 if (unsuitable_loc (loc_from))
8042 continue;
8044 else
8046 loc_from = loc->loc;
8047 next = loc->next;
8050 gcc_checking_assert (!unsuitable_loc (loc_from));
8052 elcd->depth.complexity = elcd->depth.entryvals = 0;
8053 result = cselib_expand_value_rtx_cb (loc_from, regs, EXPR_DEPTH,
8054 vt_expand_loc_callback, data);
8055 last_child = elcd->expanding.length ();
8057 if (result)
8059 depth = elcd->depth;
8061 gcc_checking_assert (depth.complexity
8062 || result_first_child == last_child);
8064 if (last_child - result_first_child != 1)
8066 if (!depth.complexity && GET_CODE (result) == ENTRY_VALUE)
8067 depth.entryvals++;
8068 depth.complexity++;
8071 if (depth.complexity <= EXPR_USE_DEPTH)
8073 if (depth.entryvals <= wanted_entryvals)
8074 break;
8075 else if (!found_entryvals || depth.entryvals < found_entryvals)
8076 found_entryvals = depth.entryvals;
8079 result = NULL;
8082 /* Set it up in case we leave the loop. */
8083 depth.complexity = depth.entryvals = 0;
8084 loc_from = NULL;
8085 result_first_child = first_child;
8088 if (!loc_from && wanted_entryvals < found_entryvals)
8090 /* We found entries with ENTRY_VALUEs and skipped them. Since
8091 we could not find any expansions without ENTRY_VALUEs, but we
8092 found at least one with them, go back and get an entry with
8093 the minimum number ENTRY_VALUE count that we found. We could
8094 avoid looping, but since each sub-loc is already resolved,
8095 the re-expansion should be trivial. ??? Should we record all
8096 attempted locs as dependencies, so that we retry the
8097 expansion should any of them change, in the hope it can give
8098 us a new entry without an ENTRY_VALUE? */
8099 elcd->expanding.truncate (first_child);
8100 goto retry;
8103 /* Register all encountered dependencies as active. */
8104 pending_recursion = loc_exp_dep_set
8105 (var, result, elcd->expanding.address () + result_first_child,
8106 last_child - result_first_child, elcd->vars);
8108 elcd->expanding.truncate (first_child);
8110 /* Record where the expansion came from. */
8111 gcc_checking_assert (!result || !pending_recursion);
8112 VAR_LOC_FROM (var) = loc_from;
8113 VAR_LOC_DEPTH (var) = depth;
8115 gcc_checking_assert (!depth.complexity == !result);
8117 elcd->depth = update_depth (saved_depth, depth);
8119 /* Indicate whether any of the dependencies are pending recursion
8120 resolution. */
8121 if (pendrecp)
8122 *pendrecp = pending_recursion;
8124 if (!pendrecp || !pending_recursion)
8125 var->var_part[0].cur_loc = result;
8127 return result;
8130 /* Callback for cselib_expand_value, that looks for expressions
8131 holding the value in the var-tracking hash tables. Return X for
8132 standard processing, anything else is to be used as-is. */
8134 static rtx
8135 vt_expand_loc_callback (rtx x, bitmap regs,
8136 int max_depth ATTRIBUTE_UNUSED,
8137 void *data)
8139 struct expand_loc_callback_data *elcd
8140 = (struct expand_loc_callback_data *) data;
8141 decl_or_value dv;
8142 variable var;
8143 rtx result, subreg;
8144 bool pending_recursion = false;
8145 bool from_empty = false;
8147 switch (GET_CODE (x))
8149 case SUBREG:
8150 subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs,
8151 EXPR_DEPTH,
8152 vt_expand_loc_callback, data);
8154 if (!subreg)
8155 return NULL;
8157 result = simplify_gen_subreg (GET_MODE (x), subreg,
8158 GET_MODE (SUBREG_REG (x)),
8159 SUBREG_BYTE (x));
8161 /* Invalid SUBREGs are ok in debug info. ??? We could try
8162 alternate expansions for the VALUE as well. */
8163 if (!result)
8164 result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x));
8166 return result;
8168 case DEBUG_EXPR:
8169 case VALUE:
8170 dv = dv_from_rtx (x);
8171 break;
8173 default:
8174 return x;
8177 elcd->expanding.safe_push (x);
8179 /* Check that VALUE_RECURSED_INTO implies NO_LOC_P. */
8180 gcc_checking_assert (!VALUE_RECURSED_INTO (x) || NO_LOC_P (x));
8182 if (NO_LOC_P (x))
8184 gcc_checking_assert (VALUE_RECURSED_INTO (x) || !dv_changed_p (dv));
8185 return NULL;
8188 var = (variable) htab_find_with_hash (elcd->vars, dv, dv_htab_hash (dv));
8190 if (!var)
8192 from_empty = true;
8193 var = variable_from_dropped (dv, INSERT);
8196 gcc_checking_assert (var);
8198 if (!dv_changed_p (dv))
8200 gcc_checking_assert (!NO_LOC_P (x));
8201 gcc_checking_assert (var->var_part[0].cur_loc);
8202 gcc_checking_assert (VAR_LOC_1PAUX (var));
8203 gcc_checking_assert (VAR_LOC_1PAUX (var)->depth.complexity);
8205 elcd->depth = update_depth (elcd->depth, VAR_LOC_1PAUX (var)->depth);
8207 return var->var_part[0].cur_loc;
8210 VALUE_RECURSED_INTO (x) = true;
8211 /* This is tentative, but it makes some tests simpler. */
8212 NO_LOC_P (x) = true;
8214 gcc_checking_assert (var->n_var_parts == 1 || from_empty);
8216 result = vt_expand_var_loc_chain (var, regs, data, &pending_recursion);
8218 if (pending_recursion)
8220 gcc_checking_assert (!result);
8221 elcd->pending.safe_push (x);
8223 else
8225 NO_LOC_P (x) = !result;
8226 VALUE_RECURSED_INTO (x) = false;
8227 set_dv_changed (dv, false);
8229 if (result)
8230 notify_dependents_of_resolved_value (var, elcd->vars);
8233 return result;
8236 /* While expanding variables, we may encounter recursion cycles
8237 because of mutual (possibly indirect) dependencies between two
8238 particular variables (or values), say A and B. If we're trying to
8239 expand A when we get to B, which in turn attempts to expand A, if
8240 we can't find any other expansion for B, we'll add B to this
8241 pending-recursion stack, and tentatively return NULL for its
8242 location. This tentative value will be used for any other
8243 occurrences of B, unless A gets some other location, in which case
8244 it will notify B that it is worth another try at computing a
8245 location for it, and it will use the location computed for A then.
8246 At the end of the expansion, the tentative NULL locations become
8247 final for all members of PENDING that didn't get a notification.
8248 This function performs this finalization of NULL locations. */
8250 static void
8251 resolve_expansions_pending_recursion (vec<rtx, va_stack> pending)
8253 while (!pending.is_empty ())
8255 rtx x = pending.pop ();
8256 decl_or_value dv;
8258 if (!VALUE_RECURSED_INTO (x))
8259 continue;
8261 gcc_checking_assert (NO_LOC_P (x));
8262 VALUE_RECURSED_INTO (x) = false;
8263 dv = dv_from_rtx (x);
8264 gcc_checking_assert (dv_changed_p (dv));
8265 set_dv_changed (dv, false);
8269 /* Initialize expand_loc_callback_data D with variable hash table V.
8270 It must be a macro because of alloca (vec stack). */
8271 #define INIT_ELCD(d, v) \
8272 do \
8274 (d).vars = (v); \
8275 vec_stack_alloc (rtx, (d).expanding, 4); \
8276 vec_stack_alloc (rtx, (d).pending, 4); \
8277 (d).depth.complexity = (d).depth.entryvals = 0; \
8279 while (0)
8280 /* Finalize expand_loc_callback_data D, resolved to location L. */
8281 #define FINI_ELCD(d, l) \
8282 do \
8284 resolve_expansions_pending_recursion ((d).pending); \
8285 (d).pending.release (); \
8286 (d).expanding.release (); \
8288 if ((l) && MEM_P (l)) \
8289 (l) = targetm.delegitimize_address (l); \
8291 while (0)
8293 /* Expand VALUEs and DEBUG_EXPRs in LOC to a location, using the
8294 equivalences in VARS, updating their CUR_LOCs in the process. */
8296 static rtx
8297 vt_expand_loc (rtx loc, htab_t vars)
8299 struct expand_loc_callback_data data;
8300 rtx result;
8302 if (!MAY_HAVE_DEBUG_INSNS)
8303 return loc;
8305 INIT_ELCD (data, vars);
8307 result = cselib_expand_value_rtx_cb (loc, scratch_regs, EXPR_DEPTH,
8308 vt_expand_loc_callback, &data);
8310 FINI_ELCD (data, result);
8312 return result;
8315 /* Expand the one-part VARiable to a location, using the equivalences
8316 in VARS, updating their CUR_LOCs in the process. */
8318 static rtx
8319 vt_expand_1pvar (variable var, htab_t vars)
8321 struct expand_loc_callback_data data;
8322 rtx loc;
8324 gcc_checking_assert (var->onepart && var->n_var_parts == 1);
8326 if (!dv_changed_p (var->dv))
8327 return var->var_part[0].cur_loc;
8329 INIT_ELCD (data, vars);
8331 loc = vt_expand_var_loc_chain (var, scratch_regs, &data, NULL);
8333 gcc_checking_assert (data.expanding.is_empty ());
8335 FINI_ELCD (data, loc);
8337 return loc;
8340 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
8341 additional parameters: WHERE specifies whether the note shall be emitted
8342 before or after instruction INSN. */
8344 static int
8345 emit_note_insn_var_location (void **varp, void *data)
8347 variable var = (variable) *varp;
8348 rtx insn = ((emit_note_data *)data)->insn;
8349 enum emit_note_where where = ((emit_note_data *)data)->where;
8350 htab_t vars = ((emit_note_data *)data)->vars;
8351 rtx note, note_vl;
8352 int i, j, n_var_parts;
8353 bool complete;
8354 enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED;
8355 HOST_WIDE_INT last_limit;
8356 tree type_size_unit;
8357 HOST_WIDE_INT offsets[MAX_VAR_PARTS];
8358 rtx loc[MAX_VAR_PARTS];
8359 tree decl;
8360 location_chain lc;
8362 gcc_checking_assert (var->onepart == NOT_ONEPART
8363 || var->onepart == ONEPART_VDECL);
8365 decl = dv_as_decl (var->dv);
8367 complete = true;
8368 last_limit = 0;
8369 n_var_parts = 0;
8370 if (!var->onepart)
8371 for (i = 0; i < var->n_var_parts; i++)
8372 if (var->var_part[i].cur_loc == NULL && var->var_part[i].loc_chain)
8373 var->var_part[i].cur_loc = var->var_part[i].loc_chain->loc;
8374 for (i = 0; i < var->n_var_parts; i++)
8376 enum machine_mode mode, wider_mode;
8377 rtx loc2;
8378 HOST_WIDE_INT offset;
8380 if (i == 0 && var->onepart)
8382 gcc_checking_assert (var->n_var_parts == 1);
8383 offset = 0;
8384 initialized = VAR_INIT_STATUS_INITIALIZED;
8385 loc2 = vt_expand_1pvar (var, vars);
8387 else
8389 if (last_limit < VAR_PART_OFFSET (var, i))
8391 complete = false;
8392 break;
8394 else if (last_limit > VAR_PART_OFFSET (var, i))
8395 continue;
8396 offset = VAR_PART_OFFSET (var, i);
8397 loc2 = var->var_part[i].cur_loc;
8398 if (loc2 && GET_CODE (loc2) == MEM
8399 && GET_CODE (XEXP (loc2, 0)) == VALUE)
8401 rtx depval = XEXP (loc2, 0);
8403 loc2 = vt_expand_loc (loc2, vars);
8405 if (loc2)
8406 loc_exp_insert_dep (var, depval, vars);
8408 if (!loc2)
8410 complete = false;
8411 continue;
8413 gcc_checking_assert (GET_CODE (loc2) != VALUE);
8414 for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
8415 if (var->var_part[i].cur_loc == lc->loc)
8417 initialized = lc->init;
8418 break;
8420 gcc_assert (lc);
8423 offsets[n_var_parts] = offset;
8424 if (!loc2)
8426 complete = false;
8427 continue;
8429 loc[n_var_parts] = loc2;
8430 mode = GET_MODE (var->var_part[i].cur_loc);
8431 if (mode == VOIDmode && var->onepart)
8432 mode = DECL_MODE (decl);
8433 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
8435 /* Attempt to merge adjacent registers or memory. */
8436 wider_mode = GET_MODE_WIDER_MODE (mode);
8437 for (j = i + 1; j < var->n_var_parts; j++)
8438 if (last_limit <= VAR_PART_OFFSET (var, j))
8439 break;
8440 if (j < var->n_var_parts
8441 && wider_mode != VOIDmode
8442 && var->var_part[j].cur_loc
8443 && mode == GET_MODE (var->var_part[j].cur_loc)
8444 && (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts]))
8445 && last_limit == (var->onepart ? 0 : VAR_PART_OFFSET (var, j))
8446 && (loc2 = vt_expand_loc (var->var_part[j].cur_loc, vars))
8447 && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2))
8449 rtx new_loc = NULL;
8451 if (REG_P (loc[n_var_parts])
8452 && hard_regno_nregs[REGNO (loc[n_var_parts])][mode] * 2
8453 == hard_regno_nregs[REGNO (loc[n_var_parts])][wider_mode]
8454 && end_hard_regno (mode, REGNO (loc[n_var_parts]))
8455 == REGNO (loc2))
8457 if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN)
8458 new_loc = simplify_subreg (wider_mode, loc[n_var_parts],
8459 mode, 0);
8460 else if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
8461 new_loc = simplify_subreg (wider_mode, loc2, mode, 0);
8462 if (new_loc)
8464 if (!REG_P (new_loc)
8465 || REGNO (new_loc) != REGNO (loc[n_var_parts]))
8466 new_loc = NULL;
8467 else
8468 REG_ATTRS (new_loc) = REG_ATTRS (loc[n_var_parts]);
8471 else if (MEM_P (loc[n_var_parts])
8472 && GET_CODE (XEXP (loc2, 0)) == PLUS
8473 && REG_P (XEXP (XEXP (loc2, 0), 0))
8474 && CONST_INT_P (XEXP (XEXP (loc2, 0), 1)))
8476 if ((REG_P (XEXP (loc[n_var_parts], 0))
8477 && rtx_equal_p (XEXP (loc[n_var_parts], 0),
8478 XEXP (XEXP (loc2, 0), 0))
8479 && INTVAL (XEXP (XEXP (loc2, 0), 1))
8480 == GET_MODE_SIZE (mode))
8481 || (GET_CODE (XEXP (loc[n_var_parts], 0)) == PLUS
8482 && CONST_INT_P (XEXP (XEXP (loc[n_var_parts], 0), 1))
8483 && rtx_equal_p (XEXP (XEXP (loc[n_var_parts], 0), 0),
8484 XEXP (XEXP (loc2, 0), 0))
8485 && INTVAL (XEXP (XEXP (loc[n_var_parts], 0), 1))
8486 + GET_MODE_SIZE (mode)
8487 == INTVAL (XEXP (XEXP (loc2, 0), 1))))
8488 new_loc = adjust_address_nv (loc[n_var_parts],
8489 wider_mode, 0);
8492 if (new_loc)
8494 loc[n_var_parts] = new_loc;
8495 mode = wider_mode;
8496 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
8497 i = j;
8500 ++n_var_parts;
8502 type_size_unit = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8503 if ((unsigned HOST_WIDE_INT) last_limit < TREE_INT_CST_LOW (type_size_unit))
8504 complete = false;
8506 if (! flag_var_tracking_uninit)
8507 initialized = VAR_INIT_STATUS_INITIALIZED;
8509 note_vl = NULL_RTX;
8510 if (!complete)
8511 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, NULL_RTX,
8512 (int) initialized);
8513 else if (n_var_parts == 1)
8515 rtx expr_list;
8517 if (offsets[0] || GET_CODE (loc[0]) == PARALLEL)
8518 expr_list = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
8519 else
8520 expr_list = loc[0];
8522 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, expr_list,
8523 (int) initialized);
8525 else if (n_var_parts)
8527 rtx parallel;
8529 for (i = 0; i < n_var_parts; i++)
8530 loc[i]
8531 = gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i]));
8533 parallel = gen_rtx_PARALLEL (VOIDmode,
8534 gen_rtvec_v (n_var_parts, loc));
8535 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl,
8536 parallel, (int) initialized);
8539 if (where != EMIT_NOTE_BEFORE_INSN)
8541 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8542 if (where == EMIT_NOTE_AFTER_CALL_INSN)
8543 NOTE_DURING_CALL_P (note) = true;
8545 else
8547 /* Make sure that the call related notes come first. */
8548 while (NEXT_INSN (insn)
8549 && NOTE_P (insn)
8550 && ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8551 && NOTE_DURING_CALL_P (insn))
8552 || NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
8553 insn = NEXT_INSN (insn);
8554 if (NOTE_P (insn)
8555 && ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8556 && NOTE_DURING_CALL_P (insn))
8557 || NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
8558 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8559 else
8560 note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
8562 NOTE_VAR_LOCATION (note) = note_vl;
8564 set_dv_changed (var->dv, false);
8565 gcc_assert (var->in_changed_variables);
8566 var->in_changed_variables = false;
8567 htab_clear_slot (changed_variables, varp);
8569 /* Continue traversing the hash table. */
8570 return 1;
8573 /* While traversing changed_variables, push onto DATA (a stack of RTX
8574 values) entries that aren't user variables. */
8576 static int
8577 values_to_stack (void **slot, void *data)
8579 vec<rtx, va_stack> *changed_values_stack = (vec<rtx, va_stack> *) data;
8580 variable var = (variable) *slot;
8582 if (var->onepart == ONEPART_VALUE)
8583 changed_values_stack->safe_push (dv_as_value (var->dv));
8584 else if (var->onepart == ONEPART_DEXPR)
8585 changed_values_stack->safe_push (DECL_RTL_KNOWN_SET (dv_as_decl (var->dv)));
8587 return 1;
8590 /* Remove from changed_variables the entry whose DV corresponds to
8591 value or debug_expr VAL. */
8592 static void
8593 remove_value_from_changed_variables (rtx val)
8595 decl_or_value dv = dv_from_rtx (val);
8596 void **slot;
8597 variable var;
8599 slot = htab_find_slot_with_hash (changed_variables,
8600 dv, dv_htab_hash (dv), NO_INSERT);
8601 var = (variable) *slot;
8602 var->in_changed_variables = false;
8603 htab_clear_slot (changed_variables, slot);
8606 /* If VAL (a value or debug_expr) has backlinks to variables actively
8607 dependent on it in HTAB or in CHANGED_VARIABLES, mark them as
8608 changed, adding to CHANGED_VALUES_STACK any dependencies that may
8609 have dependencies of their own to notify. */
8611 static void
8612 notify_dependents_of_changed_value (rtx val, htab_t htab,
8613 vec<rtx, va_stack> *changed_values_stack)
8615 void **slot;
8616 variable var;
8617 loc_exp_dep *led;
8618 decl_or_value dv = dv_from_rtx (val);
8620 slot = htab_find_slot_with_hash (changed_variables,
8621 dv, dv_htab_hash (dv), NO_INSERT);
8622 if (!slot)
8623 slot = htab_find_slot_with_hash (htab,
8624 dv, dv_htab_hash (dv), NO_INSERT);
8625 if (!slot)
8626 slot = htab_find_slot_with_hash (dropped_values,
8627 dv, dv_htab_hash (dv), NO_INSERT);
8628 var = (variable) *slot;
8630 while ((led = VAR_LOC_DEP_LST (var)))
8632 decl_or_value ldv = led->dv;
8633 variable ivar;
8635 /* Deactivate and remove the backlink, as it was “used up”. It
8636 makes no sense to attempt to notify the same entity again:
8637 either it will be recomputed and re-register an active
8638 dependency, or it will still have the changed mark. */
8639 if (led->next)
8640 led->next->pprev = led->pprev;
8641 if (led->pprev)
8642 *led->pprev = led->next;
8643 led->next = NULL;
8644 led->pprev = NULL;
8646 if (dv_changed_p (ldv))
8647 continue;
8649 switch (dv_onepart_p (ldv))
8651 case ONEPART_VALUE:
8652 case ONEPART_DEXPR:
8653 set_dv_changed (ldv, true);
8654 changed_values_stack->safe_push (dv_as_rtx (ldv));
8655 break;
8657 case ONEPART_VDECL:
8658 ivar = (variable) htab_find_with_hash (htab, ldv, dv_htab_hash (ldv));
8659 gcc_checking_assert (!VAR_LOC_DEP_LST (ivar));
8660 variable_was_changed (ivar, NULL);
8661 break;
8663 case NOT_ONEPART:
8664 pool_free (loc_exp_dep_pool, led);
8665 ivar = (variable) htab_find_with_hash (htab, ldv, dv_htab_hash (ldv));
8666 if (ivar)
8668 int i = ivar->n_var_parts;
8669 while (i--)
8671 rtx loc = ivar->var_part[i].cur_loc;
8673 if (loc && GET_CODE (loc) == MEM
8674 && XEXP (loc, 0) == val)
8676 variable_was_changed (ivar, NULL);
8677 break;
8681 break;
8683 default:
8684 gcc_unreachable ();
8689 /* Take out of changed_variables any entries that don't refer to use
8690 variables. Back-propagate change notifications from values and
8691 debug_exprs to their active dependencies in HTAB or in
8692 CHANGED_VARIABLES. */
8694 static void
8695 process_changed_values (htab_t htab)
8697 int i, n;
8698 rtx val;
8699 vec<rtx, va_stack> changed_values_stack;
8701 vec_stack_alloc (rtx, changed_values_stack, 20);
8703 /* Move values from changed_variables to changed_values_stack. */
8704 htab_traverse (changed_variables, values_to_stack, &changed_values_stack);
8706 /* Back-propagate change notifications in values while popping
8707 them from the stack. */
8708 for (n = i = changed_values_stack.length ();
8709 i > 0; i = changed_values_stack.length ())
8711 val = changed_values_stack.pop ();
8712 notify_dependents_of_changed_value (val, htab, &changed_values_stack);
8714 /* This condition will hold when visiting each of the entries
8715 originally in changed_variables. We can't remove them
8716 earlier because this could drop the backlinks before we got a
8717 chance to use them. */
8718 if (i == n)
8720 remove_value_from_changed_variables (val);
8721 n--;
8725 changed_values_stack.release ();
8728 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
8729 CHANGED_VARIABLES and delete this chain. WHERE specifies whether
8730 the notes shall be emitted before of after instruction INSN. */
8732 static void
8733 emit_notes_for_changes (rtx insn, enum emit_note_where where,
8734 shared_hash vars)
8736 emit_note_data data;
8737 htab_t htab = shared_hash_htab (vars);
8739 if (!htab_elements (changed_variables))
8740 return;
8742 if (MAY_HAVE_DEBUG_INSNS)
8743 process_changed_values (htab);
8745 data.insn = insn;
8746 data.where = where;
8747 data.vars = htab;
8749 htab_traverse (changed_variables, emit_note_insn_var_location, &data);
8752 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
8753 same variable in hash table DATA or is not there at all. */
8755 static int
8756 emit_notes_for_differences_1 (void **slot, void *data)
8758 htab_t new_vars = (htab_t) data;
8759 variable old_var, new_var;
8761 old_var = (variable) *slot;
8762 new_var = (variable) htab_find_with_hash (new_vars, old_var->dv,
8763 dv_htab_hash (old_var->dv));
8765 if (!new_var)
8767 /* Variable has disappeared. */
8768 variable empty_var = NULL;
8770 if (old_var->onepart == ONEPART_VALUE
8771 || old_var->onepart == ONEPART_DEXPR)
8773 empty_var = variable_from_dropped (old_var->dv, NO_INSERT);
8774 if (empty_var)
8776 gcc_checking_assert (!empty_var->in_changed_variables);
8777 if (!VAR_LOC_1PAUX (old_var))
8779 VAR_LOC_1PAUX (old_var) = VAR_LOC_1PAUX (empty_var);
8780 VAR_LOC_1PAUX (empty_var) = NULL;
8782 else
8783 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
8787 if (!empty_var)
8789 empty_var = (variable) pool_alloc (onepart_pool (old_var->onepart));
8790 empty_var->dv = old_var->dv;
8791 empty_var->refcount = 0;
8792 empty_var->n_var_parts = 0;
8793 empty_var->onepart = old_var->onepart;
8794 empty_var->in_changed_variables = false;
8797 if (empty_var->onepart)
8799 /* Propagate the auxiliary data to (ultimately)
8800 changed_variables. */
8801 empty_var->var_part[0].loc_chain = NULL;
8802 empty_var->var_part[0].cur_loc = NULL;
8803 VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (old_var);
8804 VAR_LOC_1PAUX (old_var) = NULL;
8806 variable_was_changed (empty_var, NULL);
8807 /* Continue traversing the hash table. */
8808 return 1;
8810 /* Update cur_loc and one-part auxiliary data, before new_var goes
8811 through variable_was_changed. */
8812 if (old_var != new_var && new_var->onepart)
8814 gcc_checking_assert (VAR_LOC_1PAUX (new_var) == NULL);
8815 VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (old_var);
8816 VAR_LOC_1PAUX (old_var) = NULL;
8817 new_var->var_part[0].cur_loc = old_var->var_part[0].cur_loc;
8819 if (variable_different_p (old_var, new_var))
8820 variable_was_changed (new_var, NULL);
8822 /* Continue traversing the hash table. */
8823 return 1;
8826 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
8827 table DATA. */
8829 static int
8830 emit_notes_for_differences_2 (void **slot, void *data)
8832 htab_t old_vars = (htab_t) data;
8833 variable old_var, new_var;
8835 new_var = (variable) *slot;
8836 old_var = (variable) htab_find_with_hash (old_vars, new_var->dv,
8837 dv_htab_hash (new_var->dv));
8838 if (!old_var)
8840 int i;
8841 for (i = 0; i < new_var->n_var_parts; i++)
8842 new_var->var_part[i].cur_loc = NULL;
8843 variable_was_changed (new_var, NULL);
8846 /* Continue traversing the hash table. */
8847 return 1;
8850 /* Emit notes before INSN for differences between dataflow sets OLD_SET and
8851 NEW_SET. */
8853 static void
8854 emit_notes_for_differences (rtx insn, dataflow_set *old_set,
8855 dataflow_set *new_set)
8857 htab_traverse (shared_hash_htab (old_set->vars),
8858 emit_notes_for_differences_1,
8859 shared_hash_htab (new_set->vars));
8860 htab_traverse (shared_hash_htab (new_set->vars),
8861 emit_notes_for_differences_2,
8862 shared_hash_htab (old_set->vars));
8863 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars);
8866 /* Return the next insn after INSN that is not a NOTE_INSN_VAR_LOCATION. */
8868 static rtx
8869 next_non_note_insn_var_location (rtx insn)
8871 while (insn)
8873 insn = NEXT_INSN (insn);
8874 if (insn == 0
8875 || !NOTE_P (insn)
8876 || NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION)
8877 break;
8880 return insn;
8883 /* Emit the notes for changes of location parts in the basic block BB. */
8885 static void
8886 emit_notes_in_bb (basic_block bb, dataflow_set *set)
8888 unsigned int i;
8889 micro_operation *mo;
8891 dataflow_set_clear (set);
8892 dataflow_set_copy (set, &VTI (bb)->in);
8894 FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo)
8896 rtx insn = mo->insn;
8897 rtx next_insn = next_non_note_insn_var_location (insn);
8899 switch (mo->type)
8901 case MO_CALL:
8902 dataflow_set_clear_at_call (set);
8903 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars);
8905 rtx arguments = mo->u.loc, *p = &arguments, note;
8906 while (*p)
8908 XEXP (XEXP (*p, 0), 1)
8909 = vt_expand_loc (XEXP (XEXP (*p, 0), 1),
8910 shared_hash_htab (set->vars));
8911 /* If expansion is successful, keep it in the list. */
8912 if (XEXP (XEXP (*p, 0), 1))
8913 p = &XEXP (*p, 1);
8914 /* Otherwise, if the following item is data_value for it,
8915 drop it too too. */
8916 else if (XEXP (*p, 1)
8917 && REG_P (XEXP (XEXP (*p, 0), 0))
8918 && MEM_P (XEXP (XEXP (XEXP (*p, 1), 0), 0))
8919 && REG_P (XEXP (XEXP (XEXP (XEXP (*p, 1), 0), 0),
8921 && REGNO (XEXP (XEXP (*p, 0), 0))
8922 == REGNO (XEXP (XEXP (XEXP (XEXP (*p, 1), 0),
8923 0), 0)))
8924 *p = XEXP (XEXP (*p, 1), 1);
8925 /* Just drop this item. */
8926 else
8927 *p = XEXP (*p, 1);
8929 note = emit_note_after (NOTE_INSN_CALL_ARG_LOCATION, insn);
8930 NOTE_VAR_LOCATION (note) = arguments;
8932 break;
8934 case MO_USE:
8936 rtx loc = mo->u.loc;
8938 if (REG_P (loc))
8939 var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
8940 else
8941 var_mem_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
8943 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
8945 break;
8947 case MO_VAL_LOC:
8949 rtx loc = mo->u.loc;
8950 rtx val, vloc;
8951 tree var;
8953 if (GET_CODE (loc) == CONCAT)
8955 val = XEXP (loc, 0);
8956 vloc = XEXP (loc, 1);
8958 else
8960 val = NULL_RTX;
8961 vloc = loc;
8964 var = PAT_VAR_LOCATION_DECL (vloc);
8966 clobber_variable_part (set, NULL_RTX,
8967 dv_from_decl (var), 0, NULL_RTX);
8968 if (val)
8970 if (VAL_NEEDS_RESOLUTION (loc))
8971 val_resolve (set, val, PAT_VAR_LOCATION_LOC (vloc), insn);
8972 set_variable_part (set, val, dv_from_decl (var), 0,
8973 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
8974 INSERT);
8976 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
8977 set_variable_part (set, PAT_VAR_LOCATION_LOC (vloc),
8978 dv_from_decl (var), 0,
8979 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
8980 INSERT);
8982 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
8984 break;
8986 case MO_VAL_USE:
8988 rtx loc = mo->u.loc;
8989 rtx val, vloc, uloc;
8991 vloc = uloc = XEXP (loc, 1);
8992 val = XEXP (loc, 0);
8994 if (GET_CODE (val) == CONCAT)
8996 uloc = XEXP (val, 1);
8997 val = XEXP (val, 0);
9000 if (VAL_NEEDS_RESOLUTION (loc))
9001 val_resolve (set, val, vloc, insn);
9002 else
9003 val_store (set, val, uloc, insn, false);
9005 if (VAL_HOLDS_TRACK_EXPR (loc))
9007 if (GET_CODE (uloc) == REG)
9008 var_reg_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
9009 NULL);
9010 else if (GET_CODE (uloc) == MEM)
9011 var_mem_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
9012 NULL);
9015 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
9017 break;
9019 case MO_VAL_SET:
9021 rtx loc = mo->u.loc;
9022 rtx val, vloc, uloc;
9023 rtx dstv, srcv;
9025 vloc = loc;
9026 uloc = XEXP (vloc, 1);
9027 val = XEXP (vloc, 0);
9028 vloc = uloc;
9030 if (GET_CODE (uloc) == SET)
9032 dstv = SET_DEST (uloc);
9033 srcv = SET_SRC (uloc);
9035 else
9037 dstv = uloc;
9038 srcv = NULL;
9041 if (GET_CODE (val) == CONCAT)
9043 dstv = vloc = XEXP (val, 1);
9044 val = XEXP (val, 0);
9047 if (GET_CODE (vloc) == SET)
9049 srcv = SET_SRC (vloc);
9051 gcc_assert (val != srcv);
9052 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
9054 dstv = vloc = SET_DEST (vloc);
9056 if (VAL_NEEDS_RESOLUTION (loc))
9057 val_resolve (set, val, srcv, insn);
9059 else if (VAL_NEEDS_RESOLUTION (loc))
9061 gcc_assert (GET_CODE (uloc) == SET
9062 && GET_CODE (SET_SRC (uloc)) == REG);
9063 val_resolve (set, val, SET_SRC (uloc), insn);
9066 if (VAL_HOLDS_TRACK_EXPR (loc))
9068 if (VAL_EXPR_IS_CLOBBERED (loc))
9070 if (REG_P (uloc))
9071 var_reg_delete (set, uloc, true);
9072 else if (MEM_P (uloc))
9074 gcc_assert (MEM_P (dstv));
9075 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
9076 var_mem_delete (set, dstv, true);
9079 else
9081 bool copied_p = VAL_EXPR_IS_COPIED (loc);
9082 rtx src = NULL, dst = uloc;
9083 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
9085 if (GET_CODE (uloc) == SET)
9087 src = SET_SRC (uloc);
9088 dst = SET_DEST (uloc);
9091 if (copied_p)
9093 status = find_src_status (set, src);
9095 src = find_src_set_src (set, src);
9098 if (REG_P (dst))
9099 var_reg_delete_and_set (set, dst, !copied_p,
9100 status, srcv);
9101 else if (MEM_P (dst))
9103 gcc_assert (MEM_P (dstv));
9104 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
9105 var_mem_delete_and_set (set, dstv, !copied_p,
9106 status, srcv);
9110 else if (REG_P (uloc))
9111 var_regno_delete (set, REGNO (uloc));
9112 else if (MEM_P (uloc))
9113 clobber_overlapping_mems (set, uloc);
9115 val_store (set, val, dstv, insn, true);
9117 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9118 set->vars);
9120 break;
9122 case MO_SET:
9124 rtx loc = mo->u.loc;
9125 rtx set_src = NULL;
9127 if (GET_CODE (loc) == SET)
9129 set_src = SET_SRC (loc);
9130 loc = SET_DEST (loc);
9133 if (REG_P (loc))
9134 var_reg_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
9135 set_src);
9136 else
9137 var_mem_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
9138 set_src);
9140 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9141 set->vars);
9143 break;
9145 case MO_COPY:
9147 rtx loc = mo->u.loc;
9148 enum var_init_status src_status;
9149 rtx set_src = NULL;
9151 if (GET_CODE (loc) == SET)
9153 set_src = SET_SRC (loc);
9154 loc = SET_DEST (loc);
9157 src_status = find_src_status (set, set_src);
9158 set_src = find_src_set_src (set, set_src);
9160 if (REG_P (loc))
9161 var_reg_delete_and_set (set, loc, false, src_status, set_src);
9162 else
9163 var_mem_delete_and_set (set, loc, false, src_status, set_src);
9165 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9166 set->vars);
9168 break;
9170 case MO_USE_NO_VAR:
9172 rtx loc = mo->u.loc;
9174 if (REG_P (loc))
9175 var_reg_delete (set, loc, false);
9176 else
9177 var_mem_delete (set, loc, false);
9179 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
9181 break;
9183 case MO_CLOBBER:
9185 rtx loc = mo->u.loc;
9187 if (REG_P (loc))
9188 var_reg_delete (set, loc, true);
9189 else
9190 var_mem_delete (set, loc, true);
9192 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9193 set->vars);
9195 break;
9197 case MO_ADJUST:
9198 set->stack_adjust += mo->u.adjust;
9199 break;
9204 /* Emit notes for the whole function. */
9206 static void
9207 vt_emit_notes (void)
9209 basic_block bb;
9210 dataflow_set cur;
9212 gcc_assert (!htab_elements (changed_variables));
9214 /* Free memory occupied by the out hash tables, as they aren't used
9215 anymore. */
9216 FOR_EACH_BB (bb)
9217 dataflow_set_clear (&VTI (bb)->out);
9219 /* Enable emitting notes by functions (mainly by set_variable_part and
9220 delete_variable_part). */
9221 emit_notes = true;
9223 if (MAY_HAVE_DEBUG_INSNS)
9225 dropped_values = htab_create (cselib_get_next_uid () * 2,
9226 variable_htab_hash, variable_htab_eq,
9227 variable_htab_free);
9228 loc_exp_dep_pool = create_alloc_pool ("loc_exp_dep pool",
9229 sizeof (loc_exp_dep), 64);
9232 dataflow_set_init (&cur);
9234 FOR_EACH_BB (bb)
9236 /* Emit the notes for changes of variable locations between two
9237 subsequent basic blocks. */
9238 emit_notes_for_differences (BB_HEAD (bb), &cur, &VTI (bb)->in);
9240 /* Emit the notes for the changes in the basic block itself. */
9241 emit_notes_in_bb (bb, &cur);
9243 /* Free memory occupied by the in hash table, we won't need it
9244 again. */
9245 dataflow_set_clear (&VTI (bb)->in);
9247 #ifdef ENABLE_CHECKING
9248 htab_traverse (shared_hash_htab (cur.vars),
9249 emit_notes_for_differences_1,
9250 shared_hash_htab (empty_shared_hash));
9251 #endif
9252 dataflow_set_destroy (&cur);
9254 if (MAY_HAVE_DEBUG_INSNS)
9255 htab_delete (dropped_values);
9257 emit_notes = false;
9260 /* If there is a declaration and offset associated with register/memory RTL
9261 assign declaration to *DECLP and offset to *OFFSETP, and return true. */
9263 static bool
9264 vt_get_decl_and_offset (rtx rtl, tree *declp, HOST_WIDE_INT *offsetp)
9266 if (REG_P (rtl))
9268 if (REG_ATTRS (rtl))
9270 *declp = REG_EXPR (rtl);
9271 *offsetp = REG_OFFSET (rtl);
9272 return true;
9275 else if (MEM_P (rtl))
9277 if (MEM_ATTRS (rtl))
9279 *declp = MEM_EXPR (rtl);
9280 *offsetp = INT_MEM_OFFSET (rtl);
9281 return true;
9284 return false;
9287 /* Record the value for the ENTRY_VALUE of RTL as a global equivalence
9288 of VAL. */
9290 static void
9291 record_entry_value (cselib_val *val, rtx rtl)
9293 rtx ev = gen_rtx_ENTRY_VALUE (GET_MODE (rtl));
9295 ENTRY_VALUE_EXP (ev) = rtl;
9297 cselib_add_permanent_equiv (val, ev, get_insns ());
9300 /* Insert function parameter PARM in IN and OUT sets of ENTRY_BLOCK. */
9302 static void
9303 vt_add_function_parameter (tree parm)
9305 rtx decl_rtl = DECL_RTL_IF_SET (parm);
9306 rtx incoming = DECL_INCOMING_RTL (parm);
9307 tree decl;
9308 enum machine_mode mode;
9309 HOST_WIDE_INT offset;
9310 dataflow_set *out;
9311 decl_or_value dv;
9313 if (TREE_CODE (parm) != PARM_DECL)
9314 return;
9316 if (!decl_rtl || !incoming)
9317 return;
9319 if (GET_MODE (decl_rtl) == BLKmode || GET_MODE (incoming) == BLKmode)
9320 return;
9322 /* If there is a DRAP register or a pseudo in internal_arg_pointer,
9323 rewrite the incoming location of parameters passed on the stack
9324 into MEMs based on the argument pointer, so that incoming doesn't
9325 depend on a pseudo. */
9326 if (MEM_P (incoming)
9327 && (XEXP (incoming, 0) == crtl->args.internal_arg_pointer
9328 || (GET_CODE (XEXP (incoming, 0)) == PLUS
9329 && XEXP (XEXP (incoming, 0), 0)
9330 == crtl->args.internal_arg_pointer
9331 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
9333 HOST_WIDE_INT off = -FIRST_PARM_OFFSET (current_function_decl);
9334 if (GET_CODE (XEXP (incoming, 0)) == PLUS)
9335 off += INTVAL (XEXP (XEXP (incoming, 0), 1));
9336 incoming
9337 = replace_equiv_address_nv (incoming,
9338 plus_constant (Pmode,
9339 arg_pointer_rtx, off));
9342 #ifdef HAVE_window_save
9343 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
9344 If the target machine has an explicit window save instruction, the
9345 actual entry value is the corresponding OUTGOING_REGNO instead. */
9346 if (REG_P (incoming)
9347 && HARD_REGISTER_P (incoming)
9348 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
9350 parm_reg_t p;
9351 p.incoming = incoming;
9352 incoming
9353 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
9354 OUTGOING_REGNO (REGNO (incoming)), 0);
9355 p.outgoing = incoming;
9356 vec_safe_push (windowed_parm_regs, p);
9358 else if (MEM_P (incoming)
9359 && REG_P (XEXP (incoming, 0))
9360 && HARD_REGISTER_P (XEXP (incoming, 0)))
9362 rtx reg = XEXP (incoming, 0);
9363 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
9365 parm_reg_t p;
9366 p.incoming = reg;
9367 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
9368 p.outgoing = reg;
9369 vec_safe_push (windowed_parm_regs, p);
9370 incoming = replace_equiv_address_nv (incoming, reg);
9373 #endif
9375 if (!vt_get_decl_and_offset (incoming, &decl, &offset))
9377 if (REG_P (incoming) || MEM_P (incoming))
9379 /* This means argument is passed by invisible reference. */
9380 offset = 0;
9381 decl = parm;
9382 incoming = gen_rtx_MEM (GET_MODE (decl_rtl), incoming);
9384 else
9386 if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset))
9387 return;
9388 offset += byte_lowpart_offset (GET_MODE (incoming),
9389 GET_MODE (decl_rtl));
9393 if (!decl)
9394 return;
9396 if (parm != decl)
9398 /* If that DECL_RTL wasn't a pseudo that got spilled to
9399 memory, bail out. Otherwise, the spill slot sharing code
9400 will force the memory to reference spill_slot_decl (%sfp),
9401 so we don't match above. That's ok, the pseudo must have
9402 referenced the entire parameter, so just reset OFFSET. */
9403 if (decl != get_spill_slot_decl (false))
9404 return;
9405 offset = 0;
9408 if (!track_loc_p (incoming, parm, offset, false, &mode, &offset))
9409 return;
9411 out = &VTI (ENTRY_BLOCK_PTR)->out;
9413 dv = dv_from_decl (parm);
9415 if (target_for_debug_bind (parm)
9416 /* We can't deal with these right now, because this kind of
9417 variable is single-part. ??? We could handle parallels
9418 that describe multiple locations for the same single
9419 value, but ATM we don't. */
9420 && GET_CODE (incoming) != PARALLEL)
9422 cselib_val *val;
9423 rtx lowpart;
9425 /* ??? We shouldn't ever hit this, but it may happen because
9426 arguments passed by invisible reference aren't dealt with
9427 above: incoming-rtl will have Pmode rather than the
9428 expected mode for the type. */
9429 if (offset)
9430 return;
9432 lowpart = var_lowpart (mode, incoming);
9433 if (!lowpart)
9434 return;
9436 val = cselib_lookup_from_insn (lowpart, mode, true,
9437 VOIDmode, get_insns ());
9439 /* ??? Float-typed values in memory are not handled by
9440 cselib. */
9441 if (val)
9443 preserve_value (val);
9444 set_variable_part (out, val->val_rtx, dv, offset,
9445 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9446 dv = dv_from_value (val->val_rtx);
9449 if (MEM_P (incoming))
9451 val = cselib_lookup_from_insn (XEXP (incoming, 0), mode, true,
9452 VOIDmode, get_insns ());
9453 if (val)
9455 preserve_value (val);
9456 incoming = replace_equiv_address_nv (incoming, val->val_rtx);
9461 if (REG_P (incoming))
9463 incoming = var_lowpart (mode, incoming);
9464 gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER);
9465 attrs_list_insert (&out->regs[REGNO (incoming)], dv, offset,
9466 incoming);
9467 set_variable_part (out, incoming, dv, offset,
9468 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9469 if (dv_is_value_p (dv))
9471 record_entry_value (CSELIB_VAL_PTR (dv_as_value (dv)), incoming);
9472 if (TREE_CODE (TREE_TYPE (parm)) == REFERENCE_TYPE
9473 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (parm))))
9475 enum machine_mode indmode
9476 = TYPE_MODE (TREE_TYPE (TREE_TYPE (parm)));
9477 rtx mem = gen_rtx_MEM (indmode, incoming);
9478 cselib_val *val = cselib_lookup_from_insn (mem, indmode, true,
9479 VOIDmode,
9480 get_insns ());
9481 if (val)
9483 preserve_value (val);
9484 record_entry_value (val, mem);
9485 set_variable_part (out, mem, dv_from_value (val->val_rtx), 0,
9486 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9491 else if (MEM_P (incoming))
9493 incoming = var_lowpart (mode, incoming);
9494 set_variable_part (out, incoming, dv, offset,
9495 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9499 /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
9501 static void
9502 vt_add_function_parameters (void)
9504 tree parm;
9506 for (parm = DECL_ARGUMENTS (current_function_decl);
9507 parm; parm = DECL_CHAIN (parm))
9508 vt_add_function_parameter (parm);
9510 if (DECL_HAS_VALUE_EXPR_P (DECL_RESULT (current_function_decl)))
9512 tree vexpr = DECL_VALUE_EXPR (DECL_RESULT (current_function_decl));
9514 if (TREE_CODE (vexpr) == INDIRECT_REF)
9515 vexpr = TREE_OPERAND (vexpr, 0);
9517 if (TREE_CODE (vexpr) == PARM_DECL
9518 && DECL_ARTIFICIAL (vexpr)
9519 && !DECL_IGNORED_P (vexpr)
9520 && DECL_NAMELESS (vexpr))
9521 vt_add_function_parameter (vexpr);
9525 /* Return true if INSN in the prologue initializes hard_frame_pointer_rtx. */
9527 static bool
9528 fp_setter (rtx insn)
9530 rtx pat = PATTERN (insn);
9531 if (RTX_FRAME_RELATED_P (insn))
9533 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
9534 if (expr)
9535 pat = XEXP (expr, 0);
9537 if (GET_CODE (pat) == SET)
9539 if (SET_DEST (pat) != hard_frame_pointer_rtx)
9540 return false;
9542 else if (GET_CODE (pat) == PARALLEL)
9544 int i;
9545 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
9546 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
9547 && SET_DEST (XVECEXP (pat, 0, i)) == hard_frame_pointer_rtx)
9548 break;
9549 if (i < 0)
9550 return false;
9552 else
9553 return false;
9554 if (find_reg_note (insn, REG_CFA_RESTORE, hard_frame_pointer_rtx))
9555 return false;
9556 return true;
9559 /* Initialize cfa_base_rtx, create a preserved VALUE for it and
9560 ensure it isn't flushed during cselib_reset_table.
9561 Can be called only if frame_pointer_rtx resp. arg_pointer_rtx
9562 has been eliminated. */
9564 static void
9565 vt_init_cfa_base (void)
9567 cselib_val *val;
9569 #ifdef FRAME_POINTER_CFA_OFFSET
9570 cfa_base_rtx = frame_pointer_rtx;
9571 cfa_base_offset = -FRAME_POINTER_CFA_OFFSET (current_function_decl);
9572 #else
9573 cfa_base_rtx = arg_pointer_rtx;
9574 cfa_base_offset = -ARG_POINTER_CFA_OFFSET (current_function_decl);
9575 #endif
9576 if (cfa_base_rtx == hard_frame_pointer_rtx
9577 || !fixed_regs[REGNO (cfa_base_rtx)])
9579 cfa_base_rtx = NULL_RTX;
9580 return;
9582 if (!MAY_HAVE_DEBUG_INSNS)
9583 return;
9585 /* Tell alias analysis that cfa_base_rtx should share
9586 find_base_term value with stack pointer or hard frame pointer. */
9587 if (!frame_pointer_needed)
9588 vt_equate_reg_base_value (cfa_base_rtx, stack_pointer_rtx);
9589 else if (!crtl->stack_realign_tried)
9590 vt_equate_reg_base_value (cfa_base_rtx, hard_frame_pointer_rtx);
9592 val = cselib_lookup_from_insn (cfa_base_rtx, GET_MODE (cfa_base_rtx), 1,
9593 VOIDmode, get_insns ());
9594 preserve_value (val);
9595 cselib_preserve_cfa_base_value (val, REGNO (cfa_base_rtx));
9598 /* Allocate and initialize the data structures for variable tracking
9599 and parse the RTL to get the micro operations. */
9601 static bool
9602 vt_initialize (void)
9604 basic_block bb;
9605 HOST_WIDE_INT fp_cfa_offset = -1;
9607 alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def));
9609 attrs_pool = create_alloc_pool ("attrs_def pool",
9610 sizeof (struct attrs_def), 1024);
9611 var_pool = create_alloc_pool ("variable_def pool",
9612 sizeof (struct variable_def)
9613 + (MAX_VAR_PARTS - 1)
9614 * sizeof (((variable)NULL)->var_part[0]), 64);
9615 loc_chain_pool = create_alloc_pool ("location_chain_def pool",
9616 sizeof (struct location_chain_def),
9617 1024);
9618 shared_hash_pool = create_alloc_pool ("shared_hash_def pool",
9619 sizeof (struct shared_hash_def), 256);
9620 empty_shared_hash = (shared_hash) pool_alloc (shared_hash_pool);
9621 empty_shared_hash->refcount = 1;
9622 empty_shared_hash->htab
9623 = htab_create (1, variable_htab_hash, variable_htab_eq,
9624 variable_htab_free);
9625 changed_variables = htab_create (10, variable_htab_hash, variable_htab_eq,
9626 variable_htab_free);
9628 /* Init the IN and OUT sets. */
9629 FOR_ALL_BB (bb)
9631 VTI (bb)->visited = false;
9632 VTI (bb)->flooded = false;
9633 dataflow_set_init (&VTI (bb)->in);
9634 dataflow_set_init (&VTI (bb)->out);
9635 VTI (bb)->permp = NULL;
9638 if (MAY_HAVE_DEBUG_INSNS)
9640 cselib_init (CSELIB_RECORD_MEMORY | CSELIB_PRESERVE_CONSTANTS);
9641 scratch_regs = BITMAP_ALLOC (NULL);
9642 valvar_pool = create_alloc_pool ("small variable_def pool",
9643 sizeof (struct variable_def), 256);
9644 preserved_values.create (256);
9646 else
9648 scratch_regs = NULL;
9649 valvar_pool = NULL;
9652 if (MAY_HAVE_DEBUG_INSNS)
9654 rtx reg, expr;
9655 int ofst;
9656 cselib_val *val;
9658 #ifdef FRAME_POINTER_CFA_OFFSET
9659 reg = frame_pointer_rtx;
9660 ofst = FRAME_POINTER_CFA_OFFSET (current_function_decl);
9661 #else
9662 reg = arg_pointer_rtx;
9663 ofst = ARG_POINTER_CFA_OFFSET (current_function_decl);
9664 #endif
9666 ofst -= INCOMING_FRAME_SP_OFFSET;
9668 val = cselib_lookup_from_insn (reg, GET_MODE (reg), 1,
9669 VOIDmode, get_insns ());
9670 preserve_value (val);
9671 cselib_preserve_cfa_base_value (val, REGNO (reg));
9672 expr = plus_constant (GET_MODE (stack_pointer_rtx),
9673 stack_pointer_rtx, -ofst);
9674 cselib_add_permanent_equiv (val, expr, get_insns ());
9676 if (ofst)
9678 val = cselib_lookup_from_insn (stack_pointer_rtx,
9679 GET_MODE (stack_pointer_rtx), 1,
9680 VOIDmode, get_insns ());
9681 preserve_value (val);
9682 expr = plus_constant (GET_MODE (reg), reg, ofst);
9683 cselib_add_permanent_equiv (val, expr, get_insns ());
9687 /* In order to factor out the adjustments made to the stack pointer or to
9688 the hard frame pointer and thus be able to use DW_OP_fbreg operations
9689 instead of individual location lists, we're going to rewrite MEMs based
9690 on them into MEMs based on the CFA by de-eliminating stack_pointer_rtx
9691 or hard_frame_pointer_rtx to the virtual CFA pointer frame_pointer_rtx
9692 resp. arg_pointer_rtx. We can do this either when there is no frame
9693 pointer in the function and stack adjustments are consistent for all
9694 basic blocks or when there is a frame pointer and no stack realignment.
9695 But we first have to check that frame_pointer_rtx resp. arg_pointer_rtx
9696 has been eliminated. */
9697 if (!frame_pointer_needed)
9699 rtx reg, elim;
9701 if (!vt_stack_adjustments ())
9702 return false;
9704 #ifdef FRAME_POINTER_CFA_OFFSET
9705 reg = frame_pointer_rtx;
9706 #else
9707 reg = arg_pointer_rtx;
9708 #endif
9709 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
9710 if (elim != reg)
9712 if (GET_CODE (elim) == PLUS)
9713 elim = XEXP (elim, 0);
9714 if (elim == stack_pointer_rtx)
9715 vt_init_cfa_base ();
9718 else if (!crtl->stack_realign_tried)
9720 rtx reg, elim;
9722 #ifdef FRAME_POINTER_CFA_OFFSET
9723 reg = frame_pointer_rtx;
9724 fp_cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
9725 #else
9726 reg = arg_pointer_rtx;
9727 fp_cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
9728 #endif
9729 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
9730 if (elim != reg)
9732 if (GET_CODE (elim) == PLUS)
9734 fp_cfa_offset -= INTVAL (XEXP (elim, 1));
9735 elim = XEXP (elim, 0);
9737 if (elim != hard_frame_pointer_rtx)
9738 fp_cfa_offset = -1;
9740 else
9741 fp_cfa_offset = -1;
9744 /* If the stack is realigned and a DRAP register is used, we're going to
9745 rewrite MEMs based on it representing incoming locations of parameters
9746 passed on the stack into MEMs based on the argument pointer. Although
9747 we aren't going to rewrite other MEMs, we still need to initialize the
9748 virtual CFA pointer in order to ensure that the argument pointer will
9749 be seen as a constant throughout the function.
9751 ??? This doesn't work if FRAME_POINTER_CFA_OFFSET is defined. */
9752 else if (stack_realign_drap)
9754 rtx reg, elim;
9756 #ifdef FRAME_POINTER_CFA_OFFSET
9757 reg = frame_pointer_rtx;
9758 #else
9759 reg = arg_pointer_rtx;
9760 #endif
9761 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
9762 if (elim != reg)
9764 if (GET_CODE (elim) == PLUS)
9765 elim = XEXP (elim, 0);
9766 if (elim == hard_frame_pointer_rtx)
9767 vt_init_cfa_base ();
9771 hard_frame_pointer_adjustment = -1;
9773 vt_add_function_parameters ();
9775 FOR_EACH_BB (bb)
9777 rtx insn;
9778 HOST_WIDE_INT pre, post = 0;
9779 basic_block first_bb, last_bb;
9781 if (MAY_HAVE_DEBUG_INSNS)
9783 cselib_record_sets_hook = add_with_sets;
9784 if (dump_file && (dump_flags & TDF_DETAILS))
9785 fprintf (dump_file, "first value: %i\n",
9786 cselib_get_next_uid ());
9789 first_bb = bb;
9790 for (;;)
9792 edge e;
9793 if (bb->next_bb == EXIT_BLOCK_PTR
9794 || ! single_pred_p (bb->next_bb))
9795 break;
9796 e = find_edge (bb, bb->next_bb);
9797 if (! e || (e->flags & EDGE_FALLTHRU) == 0)
9798 break;
9799 bb = bb->next_bb;
9801 last_bb = bb;
9803 /* Add the micro-operations to the vector. */
9804 FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb)
9806 HOST_WIDE_INT offset = VTI (bb)->out.stack_adjust;
9807 VTI (bb)->out.stack_adjust = VTI (bb)->in.stack_adjust;
9808 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
9809 insn = NEXT_INSN (insn))
9811 if (INSN_P (insn))
9813 if (!frame_pointer_needed)
9815 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
9816 if (pre)
9818 micro_operation mo;
9819 mo.type = MO_ADJUST;
9820 mo.u.adjust = pre;
9821 mo.insn = insn;
9822 if (dump_file && (dump_flags & TDF_DETAILS))
9823 log_op_type (PATTERN (insn), bb, insn,
9824 MO_ADJUST, dump_file);
9825 VTI (bb)->mos.safe_push (mo);
9826 VTI (bb)->out.stack_adjust += pre;
9830 cselib_hook_called = false;
9831 adjust_insn (bb, insn);
9832 if (MAY_HAVE_DEBUG_INSNS)
9834 if (CALL_P (insn))
9835 prepare_call_arguments (bb, insn);
9836 cselib_process_insn (insn);
9837 if (dump_file && (dump_flags & TDF_DETAILS))
9839 print_rtl_single (dump_file, insn);
9840 dump_cselib_table (dump_file);
9843 if (!cselib_hook_called)
9844 add_with_sets (insn, 0, 0);
9845 cancel_changes (0);
9847 if (!frame_pointer_needed && post)
9849 micro_operation mo;
9850 mo.type = MO_ADJUST;
9851 mo.u.adjust = post;
9852 mo.insn = insn;
9853 if (dump_file && (dump_flags & TDF_DETAILS))
9854 log_op_type (PATTERN (insn), bb, insn,
9855 MO_ADJUST, dump_file);
9856 VTI (bb)->mos.safe_push (mo);
9857 VTI (bb)->out.stack_adjust += post;
9860 if (fp_cfa_offset != -1
9861 && hard_frame_pointer_adjustment == -1
9862 && RTX_FRAME_RELATED_P (insn)
9863 && fp_setter (insn))
9865 vt_init_cfa_base ();
9866 hard_frame_pointer_adjustment = fp_cfa_offset;
9867 /* Disassociate sp from fp now. */
9868 if (MAY_HAVE_DEBUG_INSNS)
9870 cselib_val *v;
9871 cselib_invalidate_rtx (stack_pointer_rtx);
9872 v = cselib_lookup (stack_pointer_rtx, Pmode, 1,
9873 VOIDmode);
9874 if (v && !cselib_preserved_value_p (v))
9876 cselib_set_value_sp_based (v);
9877 preserve_value (v);
9883 gcc_assert (offset == VTI (bb)->out.stack_adjust);
9886 bb = last_bb;
9888 if (MAY_HAVE_DEBUG_INSNS)
9890 cselib_preserve_only_values ();
9891 cselib_reset_table (cselib_get_next_uid ());
9892 cselib_record_sets_hook = NULL;
9896 hard_frame_pointer_adjustment = -1;
9897 VTI (ENTRY_BLOCK_PTR)->flooded = true;
9898 cfa_base_rtx = NULL_RTX;
9899 return true;
9902 /* This is *not* reset after each function. It gives each
9903 NOTE_INSN_DELETED_DEBUG_LABEL in the entire compilation
9904 a unique label number. */
9906 static int debug_label_num = 1;
9908 /* Get rid of all debug insns from the insn stream. */
9910 static void
9911 delete_debug_insns (void)
9913 basic_block bb;
9914 rtx insn, next;
9916 if (!MAY_HAVE_DEBUG_INSNS)
9917 return;
9919 FOR_EACH_BB (bb)
9921 FOR_BB_INSNS_SAFE (bb, insn, next)
9922 if (DEBUG_INSN_P (insn))
9924 tree decl = INSN_VAR_LOCATION_DECL (insn);
9925 if (TREE_CODE (decl) == LABEL_DECL
9926 && DECL_NAME (decl)
9927 && !DECL_RTL_SET_P (decl))
9929 PUT_CODE (insn, NOTE);
9930 NOTE_KIND (insn) = NOTE_INSN_DELETED_DEBUG_LABEL;
9931 NOTE_DELETED_LABEL_NAME (insn)
9932 = IDENTIFIER_POINTER (DECL_NAME (decl));
9933 SET_DECL_RTL (decl, insn);
9934 CODE_LABEL_NUMBER (insn) = debug_label_num++;
9936 else
9937 delete_insn (insn);
9942 /* Run a fast, BB-local only version of var tracking, to take care of
9943 information that we don't do global analysis on, such that not all
9944 information is lost. If SKIPPED holds, we're skipping the global
9945 pass entirely, so we should try to use information it would have
9946 handled as well.. */
9948 static void
9949 vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED)
9951 /* ??? Just skip it all for now. */
9952 delete_debug_insns ();
9955 /* Free the data structures needed for variable tracking. */
9957 static void
9958 vt_finalize (void)
9960 basic_block bb;
9962 FOR_EACH_BB (bb)
9964 VTI (bb)->mos.release ();
9967 FOR_ALL_BB (bb)
9969 dataflow_set_destroy (&VTI (bb)->in);
9970 dataflow_set_destroy (&VTI (bb)->out);
9971 if (VTI (bb)->permp)
9973 dataflow_set_destroy (VTI (bb)->permp);
9974 XDELETE (VTI (bb)->permp);
9977 free_aux_for_blocks ();
9978 htab_delete (empty_shared_hash->htab);
9979 htab_delete (changed_variables);
9980 free_alloc_pool (attrs_pool);
9981 free_alloc_pool (var_pool);
9982 free_alloc_pool (loc_chain_pool);
9983 free_alloc_pool (shared_hash_pool);
9985 if (MAY_HAVE_DEBUG_INSNS)
9987 if (loc_exp_dep_pool)
9988 free_alloc_pool (loc_exp_dep_pool);
9989 loc_exp_dep_pool = NULL;
9990 free_alloc_pool (valvar_pool);
9991 preserved_values.release ();
9992 cselib_finish ();
9993 BITMAP_FREE (scratch_regs);
9994 scratch_regs = NULL;
9997 #ifdef HAVE_window_save
9998 vec_free (windowed_parm_regs);
9999 #endif
10001 if (vui_vec)
10002 XDELETEVEC (vui_vec);
10003 vui_vec = NULL;
10004 vui_allocated = 0;
10007 /* The entry point to variable tracking pass. */
10009 static inline unsigned int
10010 variable_tracking_main_1 (void)
10012 bool success;
10014 if (flag_var_tracking_assignments < 0)
10016 delete_debug_insns ();
10017 return 0;
10020 if (n_basic_blocks > 500 && n_edges / n_basic_blocks >= 20)
10022 vt_debug_insns_local (true);
10023 return 0;
10026 mark_dfs_back_edges ();
10027 if (!vt_initialize ())
10029 vt_finalize ();
10030 vt_debug_insns_local (true);
10031 return 0;
10034 success = vt_find_locations ();
10036 if (!success && flag_var_tracking_assignments > 0)
10038 vt_finalize ();
10040 delete_debug_insns ();
10042 /* This is later restored by our caller. */
10043 flag_var_tracking_assignments = 0;
10045 success = vt_initialize ();
10046 gcc_assert (success);
10048 success = vt_find_locations ();
10051 if (!success)
10053 vt_finalize ();
10054 vt_debug_insns_local (false);
10055 return 0;
10058 if (dump_file && (dump_flags & TDF_DETAILS))
10060 dump_dataflow_sets ();
10061 dump_reg_info (dump_file);
10062 dump_flow_info (dump_file, dump_flags);
10065 timevar_push (TV_VAR_TRACKING_EMIT);
10066 vt_emit_notes ();
10067 timevar_pop (TV_VAR_TRACKING_EMIT);
10069 vt_finalize ();
10070 vt_debug_insns_local (false);
10071 return 0;
10074 unsigned int
10075 variable_tracking_main (void)
10077 unsigned int ret;
10078 int save = flag_var_tracking_assignments;
10080 ret = variable_tracking_main_1 ();
10082 flag_var_tracking_assignments = save;
10084 return ret;
10087 static bool
10088 gate_handle_var_tracking (void)
10090 return (flag_var_tracking && !targetm.delay_vartrack);
10095 struct rtl_opt_pass pass_variable_tracking =
10098 RTL_PASS,
10099 "vartrack", /* name */
10100 OPTGROUP_NONE, /* optinfo_flags */
10101 gate_handle_var_tracking, /* gate */
10102 variable_tracking_main, /* execute */
10103 NULL, /* sub */
10104 NULL, /* next */
10105 0, /* static_pass_number */
10106 TV_VAR_TRACKING, /* tv_id */
10107 0, /* properties_required */
10108 0, /* properties_provided */
10109 0, /* properties_destroyed */
10110 0, /* todo_flags_start */
10111 TODO_verify_rtl_sharing /* todo_flags_finish */