Enable dumping of alias graphs.
[official-gcc/Ramakrishna.git] / gcc / var-tracking.c
bloba24755fe07effb8965de5b52967e3557a7db726f
1 /* Variable tracking routines for the GNU compiler.
2 Copyright (C) 2002, 2003, 2004, 2005, 2007, 2008, 2009
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This file contains the variable tracking pass. It computes where
22 variables are located (which registers or where in memory) at each position
23 in instruction stream and emits notes describing the locations.
24 Debug information (DWARF2 location lists) is finally generated from
25 these notes.
26 With this debug information, it is possible to show variables
27 even when debugging optimized code.
29 How does the variable tracking pass work?
31 First, it scans RTL code for uses, stores and clobbers (register/memory
32 references in instructions), for call insns and for stack adjustments
33 separately for each basic block and saves them to an array of micro
34 operations.
35 The micro operations of one instruction are ordered so that
36 pre-modifying stack adjustment < use < use with no var < call insn <
37 < set < clobber < post-modifying stack adjustment
39 Then, a forward dataflow analysis is performed to find out how locations
40 of variables change through code and to propagate the variable locations
41 along control flow graph.
42 The IN set for basic block BB is computed as a union of OUT sets of BB's
43 predecessors, the OUT set for BB is copied from the IN set for BB and
44 is changed according to micro operations in BB.
46 The IN and OUT sets for basic blocks consist of a current stack adjustment
47 (used for adjusting offset of variables addressed using stack pointer),
48 the table of structures describing the locations of parts of a variable
49 and for each physical register a linked list for each physical register.
50 The linked list is a list of variable parts stored in the register,
51 i.e. it is a list of triplets (reg, decl, offset) where decl is
52 REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
53 effective deleting appropriate variable parts when we set or clobber the
54 register.
56 There may be more than one variable part in a register. The linked lists
57 should be pretty short so it is a good data structure here.
58 For example in the following code, register allocator may assign same
59 register to variables A and B, and both of them are stored in the same
60 register in CODE:
62 if (cond)
63 set A;
64 else
65 set B;
66 CODE;
67 if (cond)
68 use A;
69 else
70 use B;
72 Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
73 are emitted to appropriate positions in RTL code. Each such a note describes
74 the location of one variable at the point in instruction stream where the
75 note is. There is no need to emit a note for each variable before each
76 instruction, we only emit these notes where the location of variable changes
77 (this means that we also emit notes for changes between the OUT set of the
78 previous block and the IN set of the current block).
80 The notes consist of two parts:
81 1. the declaration (from REG_EXPR or MEM_EXPR)
82 2. the location of a variable - it is either a simple register/memory
83 reference (for simple variables, for example int),
84 or a parallel of register/memory references (for a large variables
85 which consist of several parts, for example long long).
89 #include "config.h"
90 #include "system.h"
91 #include "coretypes.h"
92 #include "tm.h"
93 #include "rtl.h"
94 #include "tree.h"
95 #include "hard-reg-set.h"
96 #include "basic-block.h"
97 #include "flags.h"
98 #include "output.h"
99 #include "insn-config.h"
100 #include "reload.h"
101 #include "sbitmap.h"
102 #include "alloc-pool.h"
103 #include "fibheap.h"
104 #include "hashtab.h"
105 #include "regs.h"
106 #include "expr.h"
107 #include "timevar.h"
108 #include "tree-pass.h"
109 #include "cselib.h"
110 #include "target.h"
112 /* Type of micro operation. */
113 enum micro_operation_type
115 MO_USE, /* Use location (REG or MEM). */
116 MO_USE_NO_VAR,/* Use location which is not associated with a variable
117 or the variable is not trackable. */
118 MO_VAL_USE, /* Use location which is associated with a value. */
119 MO_VAL_LOC, /* Use location which appears in a debug insn. */
120 MO_VAL_SET, /* Set location associated with a value. */
121 MO_SET, /* Set location. */
122 MO_COPY, /* Copy the same portion of a variable from one
123 location to another. */
124 MO_CLOBBER, /* Clobber location. */
125 MO_CALL, /* Call insn. */
126 MO_ADJUST /* Adjust stack pointer. */
130 static const char * const ATTRIBUTE_UNUSED
131 micro_operation_type_name[] = {
132 "MO_USE",
133 "MO_USE_NO_VAR",
134 "MO_VAL_USE",
135 "MO_VAL_LOC",
136 "MO_VAL_SET",
137 "MO_SET",
138 "MO_COPY",
139 "MO_CLOBBER",
140 "MO_CALL",
141 "MO_ADJUST"
144 /* Where shall the note be emitted? BEFORE or AFTER the instruction.
145 Notes emitted as AFTER_CALL are to take effect during the call,
146 rather than after the call. */
147 enum emit_note_where
149 EMIT_NOTE_BEFORE_INSN,
150 EMIT_NOTE_AFTER_INSN,
151 EMIT_NOTE_AFTER_CALL_INSN
154 /* Structure holding information about micro operation. */
155 typedef struct micro_operation_def
157 /* Type of micro operation. */
158 enum micro_operation_type type;
160 union {
161 /* Location. For MO_SET and MO_COPY, this is the SET that
162 performs the assignment, if known, otherwise it is the target
163 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
164 CONCAT of the VALUE and the LOC associated with it. For
165 MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
166 associated with it. */
167 rtx loc;
169 /* Stack adjustment. */
170 HOST_WIDE_INT adjust;
171 } u;
173 /* The instruction which the micro operation is in, for MO_USE,
174 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
175 instruction or note in the original flow (before any var-tracking
176 notes are inserted, to simplify emission of notes), for MO_SET
177 and MO_CLOBBER. */
178 rtx insn;
179 } micro_operation;
181 /* A declaration of a variable, or an RTL value being handled like a
182 declaration. */
183 typedef void *decl_or_value;
185 /* Structure for passing some other parameters to function
186 emit_note_insn_var_location. */
187 typedef struct emit_note_data_def
189 /* The instruction which the note will be emitted before/after. */
190 rtx insn;
192 /* Where the note will be emitted (before/after insn)? */
193 enum emit_note_where where;
195 /* The variables and values active at this point. */
196 htab_t vars;
197 } emit_note_data;
199 /* Description of location of a part of a variable. The content of a physical
200 register is described by a chain of these structures.
201 The chains are pretty short (usually 1 or 2 elements) and thus
202 chain is the best data structure. */
203 typedef struct attrs_def
205 /* Pointer to next member of the list. */
206 struct attrs_def *next;
208 /* The rtx of register. */
209 rtx loc;
211 /* The declaration corresponding to LOC. */
212 decl_or_value dv;
214 /* Offset from start of DECL. */
215 HOST_WIDE_INT offset;
216 } *attrs;
218 /* Structure holding a refcounted hash table. If refcount > 1,
219 it must be first unshared before modified. */
220 typedef struct shared_hash_def
222 /* Reference count. */
223 int refcount;
225 /* Actual hash table. */
226 htab_t htab;
227 } *shared_hash;
229 /* Structure holding the IN or OUT set for a basic block. */
230 typedef struct dataflow_set_def
232 /* Adjustment of stack offset. */
233 HOST_WIDE_INT stack_adjust;
235 /* Attributes for registers (lists of attrs). */
236 attrs regs[FIRST_PSEUDO_REGISTER];
238 /* Variable locations. */
239 shared_hash vars;
241 /* Vars that is being traversed. */
242 shared_hash traversed_vars;
243 } dataflow_set;
245 /* The structure (one for each basic block) containing the information
246 needed for variable tracking. */
247 typedef struct variable_tracking_info_def
249 /* Number of micro operations stored in the MOS array. */
250 int n_mos;
252 /* The array of micro operations. */
253 micro_operation *mos;
255 /* The IN and OUT set for dataflow analysis. */
256 dataflow_set in;
257 dataflow_set out;
259 /* The permanent-in dataflow set for this block. This is used to
260 hold values for which we had to compute entry values. ??? This
261 should probably be dynamically allocated, to avoid using more
262 memory in non-debug builds. */
263 dataflow_set *permp;
265 /* Has the block been visited in DFS? */
266 bool visited;
268 /* Has the block been flooded in VTA? */
269 bool flooded;
271 } *variable_tracking_info;
273 /* Structure for chaining the locations. */
274 typedef struct location_chain_def
276 /* Next element in the chain. */
277 struct location_chain_def *next;
279 /* The location (REG, MEM or VALUE). */
280 rtx loc;
282 /* The "value" stored in this location. */
283 rtx set_src;
285 /* Initialized? */
286 enum var_init_status init;
287 } *location_chain;
289 /* Structure describing one part of variable. */
290 typedef struct variable_part_def
292 /* Chain of locations of the part. */
293 location_chain loc_chain;
295 /* Location which was last emitted to location list. */
296 rtx cur_loc;
298 /* The offset in the variable. */
299 HOST_WIDE_INT offset;
300 } variable_part;
302 /* Maximum number of location parts. */
303 #define MAX_VAR_PARTS 16
305 /* Structure describing where the variable is located. */
306 typedef struct variable_def
308 /* The declaration of the variable, or an RTL value being handled
309 like a declaration. */
310 decl_or_value dv;
312 /* Reference count. */
313 int refcount;
315 /* Number of variable parts. */
316 int n_var_parts;
318 /* The variable parts. */
319 variable_part var_part[1];
320 } *variable;
321 typedef const struct variable_def *const_variable;
323 /* Structure for chaining backlinks from referenced VALUEs to
324 DVs that are referencing them. */
325 typedef struct value_chain_def
327 /* Next value_chain entry. */
328 struct value_chain_def *next;
330 /* The declaration of the variable, or an RTL value
331 being handled like a declaration, whose var_parts[0].loc_chain
332 references the VALUE owning this value_chain. */
333 decl_or_value dv;
335 /* Reference count. */
336 int refcount;
337 } *value_chain;
338 typedef const struct value_chain_def *const_value_chain;
340 /* Hash function for DECL for VARIABLE_HTAB. */
341 #define VARIABLE_HASH_VAL(decl) (DECL_UID (decl))
343 /* Pointer to the BB's information specific to variable tracking pass. */
344 #define VTI(BB) ((variable_tracking_info) (BB)->aux)
346 /* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */
347 #define INT_MEM_OFFSET(mem) (MEM_OFFSET (mem) ? INTVAL (MEM_OFFSET (mem)) : 0)
349 /* Alloc pool for struct attrs_def. */
350 static alloc_pool attrs_pool;
352 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
353 static alloc_pool var_pool;
355 /* Alloc pool for struct variable_def with a single var_part entry. */
356 static alloc_pool valvar_pool;
358 /* Alloc pool for struct location_chain_def. */
359 static alloc_pool loc_chain_pool;
361 /* Alloc pool for struct shared_hash_def. */
362 static alloc_pool shared_hash_pool;
364 /* Alloc pool for struct value_chain_def. */
365 static alloc_pool value_chain_pool;
367 /* Changed variables, notes will be emitted for them. */
368 static htab_t changed_variables;
370 /* Links from VALUEs to DVs referencing them in their current loc_chains. */
371 static htab_t value_chains;
373 /* Shall notes be emitted? */
374 static bool emit_notes;
376 /* Empty shared hashtable. */
377 static shared_hash empty_shared_hash;
379 /* Scratch register bitmap used by cselib_expand_value_rtx. */
380 static bitmap scratch_regs = NULL;
382 /* Variable used to tell whether cselib_process_insn called our hook. */
383 static bool cselib_hook_called;
385 /* Local function prototypes. */
386 static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
387 HOST_WIDE_INT *);
388 static void insn_stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
389 HOST_WIDE_INT *);
390 static void bb_stack_adjust_offset (basic_block);
391 static bool vt_stack_adjustments (void);
392 static rtx adjust_stack_reference (rtx, HOST_WIDE_INT);
393 static hashval_t variable_htab_hash (const void *);
394 static int variable_htab_eq (const void *, const void *);
395 static void variable_htab_free (void *);
397 static void init_attrs_list_set (attrs *);
398 static void attrs_list_clear (attrs *);
399 static attrs attrs_list_member (attrs, decl_or_value, HOST_WIDE_INT);
400 static void attrs_list_insert (attrs *, decl_or_value, HOST_WIDE_INT, rtx);
401 static void attrs_list_copy (attrs *, attrs);
402 static void attrs_list_union (attrs *, attrs);
404 static void **unshare_variable (dataflow_set *set, void **slot, variable var,
405 enum var_init_status);
406 static int vars_copy_1 (void **, void *);
407 static void vars_copy (htab_t, htab_t);
408 static tree var_debug_decl (tree);
409 static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
410 static void var_reg_delete_and_set (dataflow_set *, rtx, bool,
411 enum var_init_status, rtx);
412 static void var_reg_delete (dataflow_set *, rtx, bool);
413 static void var_regno_delete (dataflow_set *, int);
414 static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx);
415 static void var_mem_delete_and_set (dataflow_set *, rtx, bool,
416 enum var_init_status, rtx);
417 static void var_mem_delete (dataflow_set *, rtx, bool);
419 static void dataflow_set_init (dataflow_set *);
420 static void dataflow_set_clear (dataflow_set *);
421 static void dataflow_set_copy (dataflow_set *, dataflow_set *);
422 static int variable_union_info_cmp_pos (const void *, const void *);
423 static int variable_union (void **, void *);
424 static int variable_canonicalize (void **, void *);
425 static void dataflow_set_union (dataflow_set *, dataflow_set *);
426 static location_chain find_loc_in_1pdv (rtx, variable, htab_t);
427 static bool canon_value_cmp (rtx, rtx);
428 static int loc_cmp (rtx, rtx);
429 static bool variable_part_different_p (variable_part *, variable_part *);
430 static bool onepart_variable_different_p (variable, variable);
431 static bool variable_different_p (variable, variable, bool);
432 static int dataflow_set_different_1 (void **, void *);
433 static bool dataflow_set_different (dataflow_set *, dataflow_set *);
434 static void dataflow_set_destroy (dataflow_set *);
436 static bool contains_symbol_ref (rtx);
437 static bool track_expr_p (tree, bool);
438 static bool same_variable_part_p (rtx, tree, HOST_WIDE_INT);
439 static int count_uses (rtx *, void *);
440 static void count_uses_1 (rtx *, void *);
441 static void count_stores (rtx, const_rtx, void *);
442 static int add_uses (rtx *, void *);
443 static void add_uses_1 (rtx *, void *);
444 static void add_stores (rtx, const_rtx, void *);
445 static bool compute_bb_dataflow (basic_block);
446 static void vt_find_locations (void);
448 static void dump_attrs_list (attrs);
449 static int dump_variable_slot (void **, void *);
450 static void dump_variable (variable);
451 static void dump_vars (htab_t);
452 static void dump_dataflow_set (dataflow_set *);
453 static void dump_dataflow_sets (void);
455 static void variable_was_changed (variable, dataflow_set *);
456 static void **set_slot_part (dataflow_set *, rtx, void **,
457 decl_or_value, HOST_WIDE_INT,
458 enum var_init_status, rtx);
459 static void set_variable_part (dataflow_set *, rtx,
460 decl_or_value, HOST_WIDE_INT,
461 enum var_init_status, rtx, enum insert_option);
462 static void **clobber_slot_part (dataflow_set *, rtx,
463 void **, HOST_WIDE_INT, rtx);
464 static void clobber_variable_part (dataflow_set *, rtx,
465 decl_or_value, HOST_WIDE_INT, rtx);
466 static void **delete_slot_part (dataflow_set *, rtx, void **, HOST_WIDE_INT);
467 static void delete_variable_part (dataflow_set *, rtx,
468 decl_or_value, HOST_WIDE_INT);
469 static int emit_note_insn_var_location (void **, void *);
470 static void emit_notes_for_changes (rtx, enum emit_note_where, shared_hash);
471 static int emit_notes_for_differences_1 (void **, void *);
472 static int emit_notes_for_differences_2 (void **, void *);
473 static void emit_notes_for_differences (rtx, dataflow_set *, dataflow_set *);
474 static void emit_notes_in_bb (basic_block, dataflow_set *);
475 static void vt_emit_notes (void);
477 static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *);
478 static void vt_add_function_parameters (void);
479 static void vt_initialize (void);
480 static void vt_finalize (void);
482 /* Given a SET, calculate the amount of stack adjustment it contains
483 PRE- and POST-modifying stack pointer.
484 This function is similar to stack_adjust_offset. */
486 static void
487 stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
488 HOST_WIDE_INT *post)
490 rtx src = SET_SRC (pattern);
491 rtx dest = SET_DEST (pattern);
492 enum rtx_code code;
494 if (dest == stack_pointer_rtx)
496 /* (set (reg sp) (plus (reg sp) (const_int))) */
497 code = GET_CODE (src);
498 if (! (code == PLUS || code == MINUS)
499 || XEXP (src, 0) != stack_pointer_rtx
500 || !CONST_INT_P (XEXP (src, 1)))
501 return;
503 if (code == MINUS)
504 *post += INTVAL (XEXP (src, 1));
505 else
506 *post -= INTVAL (XEXP (src, 1));
508 else if (MEM_P (dest))
510 /* (set (mem (pre_dec (reg sp))) (foo)) */
511 src = XEXP (dest, 0);
512 code = GET_CODE (src);
514 switch (code)
516 case PRE_MODIFY:
517 case POST_MODIFY:
518 if (XEXP (src, 0) == stack_pointer_rtx)
520 rtx val = XEXP (XEXP (src, 1), 1);
521 /* We handle only adjustments by constant amount. */
522 gcc_assert (GET_CODE (XEXP (src, 1)) == PLUS &&
523 CONST_INT_P (val));
525 if (code == PRE_MODIFY)
526 *pre -= INTVAL (val);
527 else
528 *post -= INTVAL (val);
529 break;
531 return;
533 case PRE_DEC:
534 if (XEXP (src, 0) == stack_pointer_rtx)
536 *pre += GET_MODE_SIZE (GET_MODE (dest));
537 break;
539 return;
541 case POST_DEC:
542 if (XEXP (src, 0) == stack_pointer_rtx)
544 *post += GET_MODE_SIZE (GET_MODE (dest));
545 break;
547 return;
549 case PRE_INC:
550 if (XEXP (src, 0) == stack_pointer_rtx)
552 *pre -= GET_MODE_SIZE (GET_MODE (dest));
553 break;
555 return;
557 case POST_INC:
558 if (XEXP (src, 0) == stack_pointer_rtx)
560 *post -= GET_MODE_SIZE (GET_MODE (dest));
561 break;
563 return;
565 default:
566 return;
571 /* Given an INSN, calculate the amount of stack adjustment it contains
572 PRE- and POST-modifying stack pointer. */
574 static void
575 insn_stack_adjust_offset_pre_post (rtx insn, HOST_WIDE_INT *pre,
576 HOST_WIDE_INT *post)
578 rtx pattern;
580 *pre = 0;
581 *post = 0;
583 pattern = PATTERN (insn);
584 if (RTX_FRAME_RELATED_P (insn))
586 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
587 if (expr)
588 pattern = XEXP (expr, 0);
591 if (GET_CODE (pattern) == SET)
592 stack_adjust_offset_pre_post (pattern, pre, post);
593 else if (GET_CODE (pattern) == PARALLEL
594 || GET_CODE (pattern) == SEQUENCE)
596 int i;
598 /* There may be stack adjustments inside compound insns. Search
599 for them. */
600 for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
601 if (GET_CODE (XVECEXP (pattern, 0, i)) == SET)
602 stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post);
606 /* Compute stack adjustment in basic block BB. */
608 static void
609 bb_stack_adjust_offset (basic_block bb)
611 HOST_WIDE_INT offset;
612 int i;
614 offset = VTI (bb)->in.stack_adjust;
615 for (i = 0; i < VTI (bb)->n_mos; i++)
617 if (VTI (bb)->mos[i].type == MO_ADJUST)
618 offset += VTI (bb)->mos[i].u.adjust;
619 else if (VTI (bb)->mos[i].type != MO_CALL)
621 if (MEM_P (VTI (bb)->mos[i].u.loc))
623 VTI (bb)->mos[i].u.loc
624 = adjust_stack_reference (VTI (bb)->mos[i].u.loc, -offset);
628 VTI (bb)->out.stack_adjust = offset;
631 /* Compute stack adjustments for all blocks by traversing DFS tree.
632 Return true when the adjustments on all incoming edges are consistent.
633 Heavily borrowed from pre_and_rev_post_order_compute. */
635 static bool
636 vt_stack_adjustments (void)
638 edge_iterator *stack;
639 int sp;
641 /* Initialize entry block. */
642 VTI (ENTRY_BLOCK_PTR)->visited = true;
643 VTI (ENTRY_BLOCK_PTR)->out.stack_adjust = INCOMING_FRAME_SP_OFFSET;
645 /* Allocate stack for back-tracking up CFG. */
646 stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
647 sp = 0;
649 /* Push the first edge on to the stack. */
650 stack[sp++] = ei_start (ENTRY_BLOCK_PTR->succs);
652 while (sp)
654 edge_iterator ei;
655 basic_block src;
656 basic_block dest;
658 /* Look at the edge on the top of the stack. */
659 ei = stack[sp - 1];
660 src = ei_edge (ei)->src;
661 dest = ei_edge (ei)->dest;
663 /* Check if the edge destination has been visited yet. */
664 if (!VTI (dest)->visited)
666 VTI (dest)->visited = true;
667 VTI (dest)->in.stack_adjust = VTI (src)->out.stack_adjust;
668 bb_stack_adjust_offset (dest);
670 if (EDGE_COUNT (dest->succs) > 0)
671 /* Since the DEST node has been visited for the first
672 time, check its successors. */
673 stack[sp++] = ei_start (dest->succs);
675 else
677 /* Check whether the adjustments on the edges are the same. */
678 if (VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust)
680 free (stack);
681 return false;
684 if (! ei_one_before_end_p (ei))
685 /* Go to the next edge. */
686 ei_next (&stack[sp - 1]);
687 else
688 /* Return to previous level if there are no more edges. */
689 sp--;
693 free (stack);
694 return true;
697 /* Adjust stack reference MEM by ADJUSTMENT bytes and make it relative
698 to the argument pointer. Return the new rtx. */
700 static rtx
701 adjust_stack_reference (rtx mem, HOST_WIDE_INT adjustment)
703 rtx addr, cfa, tmp;
705 #ifdef FRAME_POINTER_CFA_OFFSET
706 adjustment -= FRAME_POINTER_CFA_OFFSET (current_function_decl);
707 cfa = plus_constant (frame_pointer_rtx, adjustment);
708 #else
709 adjustment -= ARG_POINTER_CFA_OFFSET (current_function_decl);
710 cfa = plus_constant (arg_pointer_rtx, adjustment);
711 #endif
713 addr = replace_rtx (copy_rtx (XEXP (mem, 0)), stack_pointer_rtx, cfa);
714 tmp = simplify_rtx (addr);
715 if (tmp)
716 addr = tmp;
718 return replace_equiv_address_nv (mem, addr);
721 /* Return true if a decl_or_value DV is a DECL or NULL. */
722 static inline bool
723 dv_is_decl_p (decl_or_value dv)
725 if (!dv)
726 return false;
728 if (GET_CODE ((rtx)dv) == VALUE)
729 return false;
731 return true;
734 /* Return true if a decl_or_value is a VALUE rtl. */
735 static inline bool
736 dv_is_value_p (decl_or_value dv)
738 return dv && !dv_is_decl_p (dv);
741 /* Return the decl in the decl_or_value. */
742 static inline tree
743 dv_as_decl (decl_or_value dv)
745 gcc_assert (dv_is_decl_p (dv));
746 return (tree) dv;
749 /* Return the value in the decl_or_value. */
750 static inline rtx
751 dv_as_value (decl_or_value dv)
753 gcc_assert (dv_is_value_p (dv));
754 return (rtx)dv;
757 /* Return the opaque pointer in the decl_or_value. */
758 static inline void *
759 dv_as_opaque (decl_or_value dv)
761 return dv;
764 /* Return true if a decl_or_value must not have more than one variable
765 part. */
766 static inline bool
767 dv_onepart_p (decl_or_value dv)
769 tree decl;
771 if (!MAY_HAVE_DEBUG_INSNS)
772 return false;
774 if (dv_is_value_p (dv))
775 return true;
777 decl = dv_as_decl (dv);
779 if (!decl)
780 return true;
782 return (target_for_debug_bind (decl) != NULL_TREE);
785 /* Return the variable pool to be used for dv, depending on whether it
786 can have multiple parts or not. */
787 static inline alloc_pool
788 dv_pool (decl_or_value dv)
790 return dv_onepart_p (dv) ? valvar_pool : var_pool;
793 #define IS_DECL_CODE(C) ((C) == VAR_DECL || (C) == PARM_DECL \
794 || (C) == RESULT_DECL || (C) == COMPONENT_REF)
796 /* Check that VALUE won't ever look like a DECL. */
797 static char check_value_is_not_decl [(!IS_DECL_CODE ((enum tree_code)VALUE))
798 ? 1 : -1] ATTRIBUTE_UNUSED;
801 /* Build a decl_or_value out of a decl. */
802 static inline decl_or_value
803 dv_from_decl (tree decl)
805 decl_or_value dv;
806 gcc_assert (!decl || IS_DECL_CODE (TREE_CODE (decl)));
807 dv = decl;
808 return dv;
811 /* Build a decl_or_value out of a value. */
812 static inline decl_or_value
813 dv_from_value (rtx value)
815 decl_or_value dv;
816 gcc_assert (value);
817 dv = value;
818 return dv;
821 static inline hashval_t
822 dv_htab_hash (decl_or_value dv)
824 if (dv_is_value_p (dv))
825 return -(hashval_t)(CSELIB_VAL_PTR (dv_as_value (dv))->value);
826 else
827 return (VARIABLE_HASH_VAL (dv_as_decl (dv)));
830 /* The hash function for variable_htab, computes the hash value
831 from the declaration of variable X. */
833 static hashval_t
834 variable_htab_hash (const void *x)
836 const_variable const v = (const_variable) x;
838 return dv_htab_hash (v->dv);
841 /* Compare the declaration of variable X with declaration Y. */
843 static int
844 variable_htab_eq (const void *x, const void *y)
846 const_variable const v = (const_variable) x;
847 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
849 if (dv_as_opaque (v->dv) == dv_as_opaque (dv))
850 return true;
852 #if ENABLE_CHECKING
854 bool visv, dvisv;
856 visv = dv_is_value_p (v->dv);
857 dvisv = dv_is_value_p (dv);
859 if (visv != dvisv)
860 return false;
862 if (visv)
863 gcc_assert (CSELIB_VAL_PTR (dv_as_value (v->dv))
864 != CSELIB_VAL_PTR (dv_as_value (dv)));
865 else
866 gcc_assert (VARIABLE_HASH_VAL (dv_as_decl (v->dv))
867 != VARIABLE_HASH_VAL (dv_as_decl (dv)));
869 #endif
871 return false;
874 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
876 static void
877 variable_htab_free (void *elem)
879 int i;
880 variable var = (variable) elem;
881 location_chain node, next;
883 gcc_assert (var->refcount > 0);
885 var->refcount--;
886 if (var->refcount > 0)
887 return;
889 for (i = 0; i < var->n_var_parts; i++)
891 for (node = var->var_part[i].loc_chain; node; node = next)
893 next = node->next;
894 pool_free (loc_chain_pool, node);
896 var->var_part[i].loc_chain = NULL;
898 pool_free (dv_pool (var->dv), var);
901 /* The hash function for value_chains htab, computes the hash value
902 from the VALUE. */
904 static hashval_t
905 value_chain_htab_hash (const void *x)
907 const_value_chain const v = (const_value_chain) x;
909 return dv_htab_hash (v->dv);
912 /* Compare the VALUE X with VALUE Y. */
914 static int
915 value_chain_htab_eq (const void *x, const void *y)
917 const_value_chain const v = (const_value_chain) x;
918 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
920 return dv_as_opaque (v->dv) == dv_as_opaque (dv);
923 /* Initialize the set (array) SET of attrs to empty lists. */
925 static void
926 init_attrs_list_set (attrs *set)
928 int i;
930 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
931 set[i] = NULL;
934 /* Make the list *LISTP empty. */
936 static void
937 attrs_list_clear (attrs *listp)
939 attrs list, next;
941 for (list = *listp; list; list = next)
943 next = list->next;
944 pool_free (attrs_pool, list);
946 *listp = NULL;
949 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */
951 static attrs
952 attrs_list_member (attrs list, decl_or_value dv, HOST_WIDE_INT offset)
954 for (; list; list = list->next)
955 if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset)
956 return list;
957 return NULL;
960 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
962 static void
963 attrs_list_insert (attrs *listp, decl_or_value dv,
964 HOST_WIDE_INT offset, rtx loc)
966 attrs list;
968 list = (attrs) pool_alloc (attrs_pool);
969 list->loc = loc;
970 list->dv = dv;
971 list->offset = offset;
972 list->next = *listp;
973 *listp = list;
976 /* Copy all nodes from SRC and create a list *DSTP of the copies. */
978 static void
979 attrs_list_copy (attrs *dstp, attrs src)
981 attrs n;
983 attrs_list_clear (dstp);
984 for (; src; src = src->next)
986 n = (attrs) pool_alloc (attrs_pool);
987 n->loc = src->loc;
988 n->dv = src->dv;
989 n->offset = src->offset;
990 n->next = *dstp;
991 *dstp = n;
995 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */
997 static void
998 attrs_list_union (attrs *dstp, attrs src)
1000 for (; src; src = src->next)
1002 if (!attrs_list_member (*dstp, src->dv, src->offset))
1003 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1007 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1008 *DSTP. */
1010 static void
1011 attrs_list_mpdv_union (attrs *dstp, attrs src, attrs src2)
1013 gcc_assert (!*dstp);
1014 for (; src; src = src->next)
1016 if (!dv_onepart_p (src->dv))
1017 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1019 for (src = src2; src; src = src->next)
1021 if (!dv_onepart_p (src->dv)
1022 && !attrs_list_member (*dstp, src->dv, src->offset))
1023 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1027 /* Shared hashtable support. */
1029 /* Return true if VARS is shared. */
1031 static inline bool
1032 shared_hash_shared (shared_hash vars)
1034 return vars->refcount > 1;
1037 /* Return the hash table for VARS. */
1039 static inline htab_t
1040 shared_hash_htab (shared_hash vars)
1042 return vars->htab;
1045 /* Copy variables into a new hash table. */
1047 static shared_hash
1048 shared_hash_unshare (shared_hash vars)
1050 shared_hash new_vars = (shared_hash) pool_alloc (shared_hash_pool);
1051 gcc_assert (vars->refcount > 1);
1052 new_vars->refcount = 1;
1053 new_vars->htab
1054 = htab_create (htab_elements (vars->htab) + 3, variable_htab_hash,
1055 variable_htab_eq, variable_htab_free);
1056 vars_copy (new_vars->htab, vars->htab);
1057 vars->refcount--;
1058 return new_vars;
1061 /* Increment reference counter on VARS and return it. */
1063 static inline shared_hash
1064 shared_hash_copy (shared_hash vars)
1066 vars->refcount++;
1067 return vars;
1070 /* Decrement reference counter and destroy hash table if not shared
1071 anymore. */
1073 static void
1074 shared_hash_destroy (shared_hash vars)
1076 gcc_assert (vars->refcount > 0);
1077 if (--vars->refcount == 0)
1079 htab_delete (vars->htab);
1080 pool_free (shared_hash_pool, vars);
1084 /* Unshare *PVARS if shared and return slot for DV. If INS is
1085 INSERT, insert it if not already present. */
1087 static inline void **
1088 shared_hash_find_slot_unshare_1 (shared_hash *pvars, decl_or_value dv,
1089 hashval_t dvhash, enum insert_option ins)
1091 if (shared_hash_shared (*pvars))
1092 *pvars = shared_hash_unshare (*pvars);
1093 return htab_find_slot_with_hash (shared_hash_htab (*pvars), dv, dvhash, ins);
1096 static inline void **
1097 shared_hash_find_slot_unshare (shared_hash *pvars, decl_or_value dv,
1098 enum insert_option ins)
1100 return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins);
1103 /* Return slot for DV, if it is already present in the hash table.
1104 If it is not present, insert it only VARS is not shared, otherwise
1105 return NULL. */
1107 static inline void **
1108 shared_hash_find_slot_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1110 return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
1111 shared_hash_shared (vars)
1112 ? NO_INSERT : INSERT);
1115 static inline void **
1116 shared_hash_find_slot (shared_hash vars, decl_or_value dv)
1118 return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv));
1121 /* Return slot for DV only if it is already present in the hash table. */
1123 static inline void **
1124 shared_hash_find_slot_noinsert_1 (shared_hash vars, decl_or_value dv,
1125 hashval_t dvhash)
1127 return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
1128 NO_INSERT);
1131 static inline void **
1132 shared_hash_find_slot_noinsert (shared_hash vars, decl_or_value dv)
1134 return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv));
1137 /* Return variable for DV or NULL if not already present in the hash
1138 table. */
1140 static inline variable
1141 shared_hash_find_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1143 return (variable) htab_find_with_hash (shared_hash_htab (vars), dv, dvhash);
1146 static inline variable
1147 shared_hash_find (shared_hash vars, decl_or_value dv)
1149 return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
1152 /* Determine a total order between two distinct pointers. Compare the
1153 pointers as integral types if size_t is wide enough, otherwise
1154 resort to bitwise memory compare. The actual order does not
1155 matter, we just need to be consistent, so endianness is
1156 irrelevant. */
1158 static int
1159 tie_break_pointers (const void *p1, const void *p2)
1161 gcc_assert (p1 != p2);
1163 if (sizeof (size_t) >= sizeof (void*))
1164 return (size_t)p1 < (size_t)p2 ? -1 : 1;
1165 else
1166 return memcmp (&p1, &p2, sizeof (p1));
1169 /* Return true if TVAL is better than CVAL as a canonival value. We
1170 choose lowest-numbered VALUEs, using the RTX address as a
1171 tie-breaker. The idea is to arrange them into a star topology,
1172 such that all of them are at most one step away from the canonical
1173 value, and the canonical value has backlinks to all of them, in
1174 addition to all the actual locations. We don't enforce this
1175 topology throughout the entire dataflow analysis, though.
1178 static inline bool
1179 canon_value_cmp (rtx tval, rtx cval)
1181 return !cval
1182 || CSELIB_VAL_PTR (tval)->value < CSELIB_VAL_PTR (cval)->value
1183 || (CSELIB_VAL_PTR (tval)->value == CSELIB_VAL_PTR (cval)->value
1184 && tie_break_pointers (tval, cval) < 0);
1187 static bool dst_can_be_shared;
1189 /* Return a copy of a variable VAR and insert it to dataflow set SET. */
1191 static void **
1192 unshare_variable (dataflow_set *set, void **slot, variable var,
1193 enum var_init_status initialized)
1195 variable new_var;
1196 int i;
1198 new_var = (variable) pool_alloc (dv_pool (var->dv));
1199 new_var->dv = var->dv;
1200 new_var->refcount = 1;
1201 var->refcount--;
1202 new_var->n_var_parts = var->n_var_parts;
1204 if (! flag_var_tracking_uninit)
1205 initialized = VAR_INIT_STATUS_INITIALIZED;
1207 for (i = 0; i < var->n_var_parts; i++)
1209 location_chain node;
1210 location_chain *nextp;
1212 new_var->var_part[i].offset = var->var_part[i].offset;
1213 nextp = &new_var->var_part[i].loc_chain;
1214 for (node = var->var_part[i].loc_chain; node; node = node->next)
1216 location_chain new_lc;
1218 new_lc = (location_chain) pool_alloc (loc_chain_pool);
1219 new_lc->next = NULL;
1220 if (node->init > initialized)
1221 new_lc->init = node->init;
1222 else
1223 new_lc->init = initialized;
1224 if (node->set_src && !(MEM_P (node->set_src)))
1225 new_lc->set_src = node->set_src;
1226 else
1227 new_lc->set_src = NULL;
1228 new_lc->loc = node->loc;
1230 *nextp = new_lc;
1231 nextp = &new_lc->next;
1234 /* We are at the basic block boundary when copying variable description
1235 so set the CUR_LOC to be the first element of the chain. */
1236 if (new_var->var_part[i].loc_chain)
1237 new_var->var_part[i].cur_loc = new_var->var_part[i].loc_chain->loc;
1238 else
1239 new_var->var_part[i].cur_loc = NULL;
1242 dst_can_be_shared = false;
1243 if (shared_hash_shared (set->vars))
1244 slot = shared_hash_find_slot_unshare (&set->vars, var->dv, NO_INSERT);
1245 else if (set->traversed_vars && set->vars != set->traversed_vars)
1246 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
1247 *slot = new_var;
1248 return slot;
1251 /* Add a variable from *SLOT to hash table DATA and increase its reference
1252 count. */
1254 static int
1255 vars_copy_1 (void **slot, void *data)
1257 htab_t dst = (htab_t) data;
1258 variable src;
1259 void **dstp;
1261 src = (variable) *slot;
1262 src->refcount++;
1264 dstp = htab_find_slot_with_hash (dst, src->dv,
1265 dv_htab_hash (src->dv),
1266 INSERT);
1267 *dstp = src;
1269 /* Continue traversing the hash table. */
1270 return 1;
1273 /* Copy all variables from hash table SRC to hash table DST. */
1275 static void
1276 vars_copy (htab_t dst, htab_t src)
1278 htab_traverse_noresize (src, vars_copy_1, dst);
1281 /* Map a decl to its main debug decl. */
1283 static inline tree
1284 var_debug_decl (tree decl)
1286 if (decl && DECL_P (decl)
1287 && DECL_DEBUG_EXPR_IS_FROM (decl) && DECL_DEBUG_EXPR (decl)
1288 && DECL_P (DECL_DEBUG_EXPR (decl)))
1289 decl = DECL_DEBUG_EXPR (decl);
1291 return decl;
1294 /* Set the register LOC to contain DV, OFFSET. */
1296 static void
1297 var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1298 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1299 enum insert_option iopt)
1301 attrs node;
1302 bool decl_p = dv_is_decl_p (dv);
1304 if (decl_p)
1305 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1307 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1308 if (dv_as_opaque (node->dv) == dv_as_opaque (dv)
1309 && node->offset == offset)
1310 break;
1311 if (!node)
1312 attrs_list_insert (&set->regs[REGNO (loc)], dv, offset, loc);
1313 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1316 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
1318 static void
1319 var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1320 rtx set_src)
1322 tree decl = REG_EXPR (loc);
1323 HOST_WIDE_INT offset = REG_OFFSET (loc);
1325 var_reg_decl_set (set, loc, initialized,
1326 dv_from_decl (decl), offset, set_src, INSERT);
1329 static enum var_init_status
1330 get_init_value (dataflow_set *set, rtx loc, decl_or_value dv)
1332 variable var;
1333 int i;
1334 enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN;
1336 if (! flag_var_tracking_uninit)
1337 return VAR_INIT_STATUS_INITIALIZED;
1339 var = shared_hash_find (set->vars, dv);
1340 if (var)
1342 for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++)
1344 location_chain nextp;
1345 for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next)
1346 if (rtx_equal_p (nextp->loc, loc))
1348 ret_val = nextp->init;
1349 break;
1354 return ret_val;
1357 /* Delete current content of register LOC in dataflow set SET and set
1358 the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
1359 MODIFY is true, any other live copies of the same variable part are
1360 also deleted from the dataflow set, otherwise the variable part is
1361 assumed to be copied from another location holding the same
1362 part. */
1364 static void
1365 var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1366 enum var_init_status initialized, rtx set_src)
1368 tree decl = REG_EXPR (loc);
1369 HOST_WIDE_INT offset = REG_OFFSET (loc);
1370 attrs node, next;
1371 attrs *nextp;
1373 decl = var_debug_decl (decl);
1375 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1376 initialized = get_init_value (set, loc, dv_from_decl (decl));
1378 nextp = &set->regs[REGNO (loc)];
1379 for (node = *nextp; node; node = next)
1381 next = node->next;
1382 if (dv_as_opaque (node->dv) != decl || node->offset != offset)
1384 delete_variable_part (set, node->loc, node->dv, node->offset);
1385 pool_free (attrs_pool, node);
1386 *nextp = next;
1388 else
1390 node->loc = loc;
1391 nextp = &node->next;
1394 if (modify)
1395 clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src);
1396 var_reg_set (set, loc, initialized, set_src);
1399 /* Delete current content of register LOC in dataflow set SET. If
1400 CLOBBER is true, also delete any other live copies of the same
1401 variable part. */
1403 static void
1404 var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
1406 attrs *reg = &set->regs[REGNO (loc)];
1407 attrs node, next;
1409 if (clobber)
1411 tree decl = REG_EXPR (loc);
1412 HOST_WIDE_INT offset = REG_OFFSET (loc);
1414 decl = var_debug_decl (decl);
1416 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1419 for (node = *reg; node; node = next)
1421 next = node->next;
1422 delete_variable_part (set, node->loc, node->dv, node->offset);
1423 pool_free (attrs_pool, node);
1425 *reg = NULL;
1428 /* Delete content of register with number REGNO in dataflow set SET. */
1430 static void
1431 var_regno_delete (dataflow_set *set, int regno)
1433 attrs *reg = &set->regs[regno];
1434 attrs node, next;
1436 for (node = *reg; node; node = next)
1438 next = node->next;
1439 delete_variable_part (set, node->loc, node->dv, node->offset);
1440 pool_free (attrs_pool, node);
1442 *reg = NULL;
1445 /* Set the location of DV, OFFSET as the MEM LOC. */
1447 static void
1448 var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1449 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1450 enum insert_option iopt)
1452 if (dv_is_decl_p (dv))
1453 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1455 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1458 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
1459 SET to LOC.
1460 Adjust the address first if it is stack pointer based. */
1462 static void
1463 var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1464 rtx set_src)
1466 tree decl = MEM_EXPR (loc);
1467 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1469 var_mem_decl_set (set, loc, initialized,
1470 dv_from_decl (decl), offset, set_src, INSERT);
1473 /* Delete and set the location part of variable MEM_EXPR (LOC) in
1474 dataflow set SET to LOC. If MODIFY is true, any other live copies
1475 of the same variable part are also deleted from the dataflow set,
1476 otherwise the variable part is assumed to be copied from another
1477 location holding the same part.
1478 Adjust the address first if it is stack pointer based. */
1480 static void
1481 var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1482 enum var_init_status initialized, rtx set_src)
1484 tree decl = MEM_EXPR (loc);
1485 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1487 decl = var_debug_decl (decl);
1489 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1490 initialized = get_init_value (set, loc, dv_from_decl (decl));
1492 if (modify)
1493 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src);
1494 var_mem_set (set, loc, initialized, set_src);
1497 /* Delete the location part LOC from dataflow set SET. If CLOBBER is
1498 true, also delete any other live copies of the same variable part.
1499 Adjust the address first if it is stack pointer based. */
1501 static void
1502 var_mem_delete (dataflow_set *set, rtx loc, bool clobber)
1504 tree decl = MEM_EXPR (loc);
1505 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1507 decl = var_debug_decl (decl);
1508 if (clobber)
1509 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1510 delete_variable_part (set, loc, dv_from_decl (decl), offset);
1513 /* Map a value to a location it was just stored in. */
1515 static void
1516 val_store (dataflow_set *set, rtx val, rtx loc, rtx insn)
1518 cselib_val *v = CSELIB_VAL_PTR (val);
1520 gcc_assert (cselib_preserved_value_p (v));
1522 if (dump_file)
1524 fprintf (dump_file, "%i: ", INSN_UID (insn));
1525 print_inline_rtx (dump_file, val, 0);
1526 fprintf (dump_file, " stored in ");
1527 print_inline_rtx (dump_file, loc, 0);
1528 if (v->locs)
1530 struct elt_loc_list *l;
1531 for (l = v->locs; l; l = l->next)
1533 fprintf (dump_file, "\n%i: ", INSN_UID (l->setting_insn));
1534 print_inline_rtx (dump_file, l->loc, 0);
1537 fprintf (dump_file, "\n");
1540 if (REG_P (loc))
1542 var_regno_delete (set, REGNO (loc));
1543 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1544 dv_from_value (val), 0, NULL_RTX, INSERT);
1546 else if (MEM_P (loc))
1547 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1548 dv_from_value (val), 0, NULL_RTX, INSERT);
1549 else
1550 set_variable_part (set, loc, dv_from_value (val), 0,
1551 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1554 /* Reset this node, detaching all its equivalences. Return the slot
1555 in the variable hash table that holds dv, if there is one. */
1557 static void
1558 val_reset (dataflow_set *set, decl_or_value dv)
1560 variable var = shared_hash_find (set->vars, dv) ;
1561 location_chain node;
1562 rtx cval;
1564 if (!var || !var->n_var_parts)
1565 return;
1567 gcc_assert (var->n_var_parts == 1);
1569 cval = NULL;
1570 for (node = var->var_part[0].loc_chain; node; node = node->next)
1571 if (GET_CODE (node->loc) == VALUE
1572 && canon_value_cmp (node->loc, cval))
1573 cval = node->loc;
1575 for (node = var->var_part[0].loc_chain; node; node = node->next)
1576 if (GET_CODE (node->loc) == VALUE && cval != node->loc)
1578 /* Redirect the equivalence link to the new canonical
1579 value, or simply remove it if it would point at
1580 itself. */
1581 if (cval)
1582 set_variable_part (set, cval, dv_from_value (node->loc),
1583 0, node->init, node->set_src, NO_INSERT);
1584 delete_variable_part (set, dv_as_value (dv),
1585 dv_from_value (node->loc), 0);
1588 if (cval)
1590 decl_or_value cdv = dv_from_value (cval);
1592 /* Keep the remaining values connected, accummulating links
1593 in the canonical value. */
1594 for (node = var->var_part[0].loc_chain; node; node = node->next)
1596 if (node->loc == cval)
1597 continue;
1598 else if (GET_CODE (node->loc) == REG)
1599 var_reg_decl_set (set, node->loc, node->init, cdv, 0,
1600 node->set_src, NO_INSERT);
1601 else if (GET_CODE (node->loc) == MEM)
1602 var_mem_decl_set (set, node->loc, node->init, cdv, 0,
1603 node->set_src, NO_INSERT);
1604 else
1605 set_variable_part (set, node->loc, cdv, 0,
1606 node->init, node->set_src, NO_INSERT);
1610 /* We remove this last, to make sure that the canonical value is not
1611 removed to the point of requiring reinsertion. */
1612 if (cval)
1613 delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0);
1615 clobber_variable_part (set, NULL, dv, 0, NULL);
1617 /* ??? Should we make sure there aren't other available values or
1618 variables whose values involve this one other than by
1619 equivalence? E.g., at the very least we should reset MEMs, those
1620 shouldn't be too hard to find cselib-looking up the value as an
1621 address, then locating the resulting value in our own hash
1622 table. */
1625 /* Find the values in a given location and map the val to another
1626 value, if it is unique, or add the location as one holding the
1627 value. */
1629 static void
1630 val_resolve (dataflow_set *set, rtx val, rtx loc, rtx insn)
1632 decl_or_value dv = dv_from_value (val);
1634 if (dump_file && (dump_flags & TDF_DETAILS))
1636 if (insn)
1637 fprintf (dump_file, "%i: ", INSN_UID (insn));
1638 else
1639 fprintf (dump_file, "head: ");
1640 print_inline_rtx (dump_file, val, 0);
1641 fputs (" is at ", dump_file);
1642 print_inline_rtx (dump_file, loc, 0);
1643 fputc ('\n', dump_file);
1646 val_reset (set, dv);
1648 if (REG_P (loc))
1650 attrs node, found = NULL;
1652 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1653 if (dv_is_value_p (node->dv)
1654 && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc))
1656 found = node;
1658 /* Map incoming equivalences. ??? Wouldn't it be nice if
1659 we just started sharing the location lists? Maybe a
1660 circular list ending at the value itself or some
1661 such. */
1662 set_variable_part (set, dv_as_value (node->dv),
1663 dv_from_value (val), node->offset,
1664 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1665 set_variable_part (set, val, node->dv, node->offset,
1666 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1669 /* If we didn't find any equivalence, we need to remember that
1670 this value is held in the named register. */
1671 if (!found)
1672 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1673 dv_from_value (val), 0, NULL_RTX, INSERT);
1675 else if (MEM_P (loc))
1676 /* ??? Merge equivalent MEMs. */
1677 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1678 dv_from_value (val), 0, NULL_RTX, INSERT);
1679 else
1680 /* ??? Merge equivalent expressions. */
1681 set_variable_part (set, loc, dv_from_value (val), 0,
1682 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1685 /* Initialize dataflow set SET to be empty.
1686 VARS_SIZE is the initial size of hash table VARS. */
1688 static void
1689 dataflow_set_init (dataflow_set *set)
1691 init_attrs_list_set (set->regs);
1692 set->vars = shared_hash_copy (empty_shared_hash);
1693 set->stack_adjust = 0;
1694 set->traversed_vars = NULL;
1697 /* Delete the contents of dataflow set SET. */
1699 static void
1700 dataflow_set_clear (dataflow_set *set)
1702 int i;
1704 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1705 attrs_list_clear (&set->regs[i]);
1707 shared_hash_destroy (set->vars);
1708 set->vars = shared_hash_copy (empty_shared_hash);
1711 /* Copy the contents of dataflow set SRC to DST. */
1713 static void
1714 dataflow_set_copy (dataflow_set *dst, dataflow_set *src)
1716 int i;
1718 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1719 attrs_list_copy (&dst->regs[i], src->regs[i]);
1721 shared_hash_destroy (dst->vars);
1722 dst->vars = shared_hash_copy (src->vars);
1723 dst->stack_adjust = src->stack_adjust;
1726 /* Information for merging lists of locations for a given offset of variable.
1728 struct variable_union_info
1730 /* Node of the location chain. */
1731 location_chain lc;
1733 /* The sum of positions in the input chains. */
1734 int pos;
1736 /* The position in the chain of DST dataflow set. */
1737 int pos_dst;
1740 /* Buffer for location list sorting and its allocated size. */
1741 static struct variable_union_info *vui_vec;
1742 static int vui_allocated;
1744 /* Compare function for qsort, order the structures by POS element. */
1746 static int
1747 variable_union_info_cmp_pos (const void *n1, const void *n2)
1749 const struct variable_union_info *const i1 =
1750 (const struct variable_union_info *) n1;
1751 const struct variable_union_info *const i2 =
1752 ( const struct variable_union_info *) n2;
1754 if (i1->pos != i2->pos)
1755 return i1->pos - i2->pos;
1757 return (i1->pos_dst - i2->pos_dst);
1760 /* Compute union of location parts of variable *SLOT and the same variable
1761 from hash table DATA. Compute "sorted" union of the location chains
1762 for common offsets, i.e. the locations of a variable part are sorted by
1763 a priority where the priority is the sum of the positions in the 2 chains
1764 (if a location is only in one list the position in the second list is
1765 defined to be larger than the length of the chains).
1766 When we are updating the location parts the newest location is in the
1767 beginning of the chain, so when we do the described "sorted" union
1768 we keep the newest locations in the beginning. */
1770 static int
1771 variable_union (void **slot, void *data)
1773 variable src, dst;
1774 void **dstp;
1775 dataflow_set *set = (dataflow_set *) data;
1776 int i, j, k;
1778 src = (variable) *slot;
1779 dstp = shared_hash_find_slot (set->vars, src->dv);
1780 if (!dstp || !*dstp)
1782 src->refcount++;
1784 dst_can_be_shared = false;
1785 if (!dstp)
1786 dstp = shared_hash_find_slot_unshare (&set->vars, src->dv, INSERT);
1788 *dstp = src;
1790 /* If CUR_LOC of some variable part is not the first element of
1791 the location chain we are going to change it so we have to make
1792 a copy of the variable. */
1793 for (k = 0; k < src->n_var_parts; k++)
1795 gcc_assert (!src->var_part[k].loc_chain
1796 == !src->var_part[k].cur_loc);
1797 if (src->var_part[k].loc_chain)
1799 gcc_assert (src->var_part[k].cur_loc);
1800 if (src->var_part[k].cur_loc != src->var_part[k].loc_chain->loc)
1801 break;
1804 if (k < src->n_var_parts)
1805 dstp = unshare_variable (set, dstp, src, VAR_INIT_STATUS_UNKNOWN);
1807 /* Continue traversing the hash table. */
1808 return 1;
1810 else
1811 dst = (variable) *dstp;
1813 gcc_assert (src->n_var_parts);
1815 /* We can combine one-part variables very efficiently, because their
1816 entries are in canonical order. */
1817 if (dv_onepart_p (src->dv))
1819 location_chain *nodep, dnode, snode;
1821 gcc_assert (src->n_var_parts == 1);
1822 gcc_assert (dst->n_var_parts == 1);
1824 snode = src->var_part[0].loc_chain;
1825 gcc_assert (snode);
1827 restart_onepart_unshared:
1828 nodep = &dst->var_part[0].loc_chain;
1829 dnode = *nodep;
1830 gcc_assert (dnode);
1832 while (snode)
1834 int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1;
1836 if (r > 0)
1838 location_chain nnode;
1840 if (dst->refcount != 1 || shared_hash_shared (set->vars))
1842 dstp = unshare_variable (set, dstp, dst,
1843 VAR_INIT_STATUS_INITIALIZED);
1844 dst = (variable)*dstp;
1845 goto restart_onepart_unshared;
1848 *nodep = nnode = (location_chain) pool_alloc (loc_chain_pool);
1849 nnode->loc = snode->loc;
1850 nnode->init = snode->init;
1851 if (!snode->set_src || MEM_P (snode->set_src))
1852 nnode->set_src = NULL;
1853 else
1854 nnode->set_src = snode->set_src;
1855 nnode->next = dnode;
1856 dnode = nnode;
1858 #ifdef ENABLE_CHECKING
1859 else if (r == 0)
1860 gcc_assert (rtx_equal_p (dnode->loc, snode->loc));
1861 #endif
1863 if (r >= 0)
1864 snode = snode->next;
1866 nodep = &dnode->next;
1867 dnode = *nodep;
1870 dst->var_part[0].cur_loc = dst->var_part[0].loc_chain->loc;
1872 return 1;
1875 /* Count the number of location parts, result is K. */
1876 for (i = 0, j = 0, k = 0;
1877 i < src->n_var_parts && j < dst->n_var_parts; k++)
1879 if (src->var_part[i].offset == dst->var_part[j].offset)
1881 i++;
1882 j++;
1884 else if (src->var_part[i].offset < dst->var_part[j].offset)
1885 i++;
1886 else
1887 j++;
1889 k += src->n_var_parts - i;
1890 k += dst->n_var_parts - j;
1892 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
1893 thus there are at most MAX_VAR_PARTS different offsets. */
1894 gcc_assert (dv_onepart_p (dst->dv) ? k == 1 : k <= MAX_VAR_PARTS);
1896 if ((dst->refcount > 1 || shared_hash_shared (set->vars))
1897 && dst->n_var_parts != k)
1899 dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN);
1900 dst = (variable)*dstp;
1903 i = src->n_var_parts - 1;
1904 j = dst->n_var_parts - 1;
1905 dst->n_var_parts = k;
1907 for (k--; k >= 0; k--)
1909 location_chain node, node2;
1911 if (i >= 0 && j >= 0
1912 && src->var_part[i].offset == dst->var_part[j].offset)
1914 /* Compute the "sorted" union of the chains, i.e. the locations which
1915 are in both chains go first, they are sorted by the sum of
1916 positions in the chains. */
1917 int dst_l, src_l;
1918 int ii, jj, n;
1919 struct variable_union_info *vui;
1921 /* If DST is shared compare the location chains.
1922 If they are different we will modify the chain in DST with
1923 high probability so make a copy of DST. */
1924 if (dst->refcount > 1 || shared_hash_shared (set->vars))
1926 for (node = src->var_part[i].loc_chain,
1927 node2 = dst->var_part[j].loc_chain; node && node2;
1928 node = node->next, node2 = node2->next)
1930 if (!((REG_P (node2->loc)
1931 && REG_P (node->loc)
1932 && REGNO (node2->loc) == REGNO (node->loc))
1933 || rtx_equal_p (node2->loc, node->loc)))
1935 if (node2->init < node->init)
1936 node2->init = node->init;
1937 break;
1940 if (node || node2)
1942 dstp = unshare_variable (set, dstp, dst,
1943 VAR_INIT_STATUS_UNKNOWN);
1944 dst = (variable)*dstp;
1948 src_l = 0;
1949 for (node = src->var_part[i].loc_chain; node; node = node->next)
1950 src_l++;
1951 dst_l = 0;
1952 for (node = dst->var_part[j].loc_chain; node; node = node->next)
1953 dst_l++;
1955 if (dst_l == 1)
1957 /* The most common case, much simpler, no qsort is needed. */
1958 location_chain dstnode = dst->var_part[j].loc_chain;
1959 dst->var_part[k].loc_chain = dstnode;
1960 dst->var_part[k].offset = dst->var_part[j].offset;
1961 node2 = dstnode;
1962 for (node = src->var_part[i].loc_chain; node; node = node->next)
1963 if (!((REG_P (dstnode->loc)
1964 && REG_P (node->loc)
1965 && REGNO (dstnode->loc) == REGNO (node->loc))
1966 || rtx_equal_p (dstnode->loc, node->loc)))
1968 location_chain new_node;
1970 /* Copy the location from SRC. */
1971 new_node = (location_chain) pool_alloc (loc_chain_pool);
1972 new_node->loc = node->loc;
1973 new_node->init = node->init;
1974 if (!node->set_src || MEM_P (node->set_src))
1975 new_node->set_src = NULL;
1976 else
1977 new_node->set_src = node->set_src;
1978 node2->next = new_node;
1979 node2 = new_node;
1981 node2->next = NULL;
1983 else
1985 if (src_l + dst_l > vui_allocated)
1987 vui_allocated = MAX (vui_allocated * 2, src_l + dst_l);
1988 vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec,
1989 vui_allocated);
1991 vui = vui_vec;
1993 /* Fill in the locations from DST. */
1994 for (node = dst->var_part[j].loc_chain, jj = 0; node;
1995 node = node->next, jj++)
1997 vui[jj].lc = node;
1998 vui[jj].pos_dst = jj;
2000 /* Pos plus value larger than a sum of 2 valid positions. */
2001 vui[jj].pos = jj + src_l + dst_l;
2004 /* Fill in the locations from SRC. */
2005 n = dst_l;
2006 for (node = src->var_part[i].loc_chain, ii = 0; node;
2007 node = node->next, ii++)
2009 /* Find location from NODE. */
2010 for (jj = 0; jj < dst_l; jj++)
2012 if ((REG_P (vui[jj].lc->loc)
2013 && REG_P (node->loc)
2014 && REGNO (vui[jj].lc->loc) == REGNO (node->loc))
2015 || rtx_equal_p (vui[jj].lc->loc, node->loc))
2017 vui[jj].pos = jj + ii;
2018 break;
2021 if (jj >= dst_l) /* The location has not been found. */
2023 location_chain new_node;
2025 /* Copy the location from SRC. */
2026 new_node = (location_chain) pool_alloc (loc_chain_pool);
2027 new_node->loc = node->loc;
2028 new_node->init = node->init;
2029 if (!node->set_src || MEM_P (node->set_src))
2030 new_node->set_src = NULL;
2031 else
2032 new_node->set_src = node->set_src;
2033 vui[n].lc = new_node;
2034 vui[n].pos_dst = src_l + dst_l;
2035 vui[n].pos = ii + src_l + dst_l;
2036 n++;
2040 if (dst_l == 2)
2042 /* Special case still very common case. For dst_l == 2
2043 all entries dst_l ... n-1 are sorted, with for i >= dst_l
2044 vui[i].pos == i + src_l + dst_l. */
2045 if (vui[0].pos > vui[1].pos)
2047 /* Order should be 1, 0, 2... */
2048 dst->var_part[k].loc_chain = vui[1].lc;
2049 vui[1].lc->next = vui[0].lc;
2050 if (n >= 3)
2052 vui[0].lc->next = vui[2].lc;
2053 vui[n - 1].lc->next = NULL;
2055 else
2056 vui[0].lc->next = NULL;
2057 ii = 3;
2059 else
2061 dst->var_part[k].loc_chain = vui[0].lc;
2062 if (n >= 3 && vui[2].pos < vui[1].pos)
2064 /* Order should be 0, 2, 1, 3... */
2065 vui[0].lc->next = vui[2].lc;
2066 vui[2].lc->next = vui[1].lc;
2067 if (n >= 4)
2069 vui[1].lc->next = vui[3].lc;
2070 vui[n - 1].lc->next = NULL;
2072 else
2073 vui[1].lc->next = NULL;
2074 ii = 4;
2076 else
2078 /* Order should be 0, 1, 2... */
2079 ii = 1;
2080 vui[n - 1].lc->next = NULL;
2083 for (; ii < n; ii++)
2084 vui[ii - 1].lc->next = vui[ii].lc;
2086 else
2088 qsort (vui, n, sizeof (struct variable_union_info),
2089 variable_union_info_cmp_pos);
2091 /* Reconnect the nodes in sorted order. */
2092 for (ii = 1; ii < n; ii++)
2093 vui[ii - 1].lc->next = vui[ii].lc;
2094 vui[n - 1].lc->next = NULL;
2095 dst->var_part[k].loc_chain = vui[0].lc;
2098 dst->var_part[k].offset = dst->var_part[j].offset;
2100 i--;
2101 j--;
2103 else if ((i >= 0 && j >= 0
2104 && src->var_part[i].offset < dst->var_part[j].offset)
2105 || i < 0)
2107 dst->var_part[k] = dst->var_part[j];
2108 j--;
2110 else if ((i >= 0 && j >= 0
2111 && src->var_part[i].offset > dst->var_part[j].offset)
2112 || j < 0)
2114 location_chain *nextp;
2116 /* Copy the chain from SRC. */
2117 nextp = &dst->var_part[k].loc_chain;
2118 for (node = src->var_part[i].loc_chain; node; node = node->next)
2120 location_chain new_lc;
2122 new_lc = (location_chain) pool_alloc (loc_chain_pool);
2123 new_lc->next = NULL;
2124 new_lc->init = node->init;
2125 if (!node->set_src || MEM_P (node->set_src))
2126 new_lc->set_src = NULL;
2127 else
2128 new_lc->set_src = node->set_src;
2129 new_lc->loc = node->loc;
2131 *nextp = new_lc;
2132 nextp = &new_lc->next;
2135 dst->var_part[k].offset = src->var_part[i].offset;
2136 i--;
2139 /* We are at the basic block boundary when computing union
2140 so set the CUR_LOC to be the first element of the chain. */
2141 if (dst->var_part[k].loc_chain)
2142 dst->var_part[k].cur_loc = dst->var_part[k].loc_chain->loc;
2143 else
2144 dst->var_part[k].cur_loc = NULL;
2147 if (flag_var_tracking_uninit)
2148 for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++)
2150 location_chain node, node2;
2151 for (node = src->var_part[i].loc_chain; node; node = node->next)
2152 for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next)
2153 if (rtx_equal_p (node->loc, node2->loc))
2155 if (node->init > node2->init)
2156 node2->init = node->init;
2160 /* Continue traversing the hash table. */
2161 return 1;
2164 /* Like variable_union, but only used when doing dataflow_set_union
2165 into an empty hashtab. To allow sharing, dst is initially shared
2166 with src (so all variables are "copied" from src to dst hashtab),
2167 so only unshare_variable for variables that need canonicalization
2168 are needed. */
2170 static int
2171 variable_canonicalize (void **slot, void *data)
2173 variable src;
2174 dataflow_set *set = (dataflow_set *) data;
2175 int k;
2177 src = *(variable *) slot;
2179 /* If CUR_LOC of some variable part is not the first element of
2180 the location chain we are going to change it so we have to make
2181 a copy of the variable. */
2182 for (k = 0; k < src->n_var_parts; k++)
2184 gcc_assert (!src->var_part[k].loc_chain == !src->var_part[k].cur_loc);
2185 if (src->var_part[k].loc_chain)
2187 gcc_assert (src->var_part[k].cur_loc);
2188 if (src->var_part[k].cur_loc != src->var_part[k].loc_chain->loc)
2189 break;
2192 if (k < src->n_var_parts)
2193 slot = unshare_variable (set, slot, src, VAR_INIT_STATUS_UNKNOWN);
2194 return 1;
2197 /* Compute union of dataflow sets SRC and DST and store it to DST. */
2199 static void
2200 dataflow_set_union (dataflow_set *dst, dataflow_set *src)
2202 int i;
2204 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2205 attrs_list_union (&dst->regs[i], src->regs[i]);
2207 if (dst->vars == empty_shared_hash)
2209 shared_hash_destroy (dst->vars);
2210 dst->vars = shared_hash_copy (src->vars);
2211 dst->traversed_vars = dst->vars;
2212 htab_traverse (shared_hash_htab (dst->vars), variable_canonicalize, dst);
2213 dst->traversed_vars = NULL;
2215 else
2216 htab_traverse (shared_hash_htab (src->vars), variable_union, dst);
2219 /* Whether the value is currently being expanded. */
2220 #define VALUE_RECURSED_INTO(x) \
2221 (RTL_FLAG_CHECK1 ("VALUE_RECURSED_INTO", (x), VALUE)->used)
2222 /* Whether the value is in changed_variables hash table. */
2223 #define VALUE_CHANGED(x) \
2224 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
2225 /* Whether the decl is in changed_variables hash table. */
2226 #define DECL_CHANGED(x) TREE_VISITED (x)
2228 /* Record that DV has been added into resp. removed from changed_variables
2229 hashtable. */
2231 static inline void
2232 set_dv_changed (decl_or_value dv, bool newv)
2234 if (dv_is_value_p (dv))
2235 VALUE_CHANGED (dv_as_value (dv)) = newv;
2236 else
2237 DECL_CHANGED (dv_as_decl (dv)) = newv;
2240 /* Return true if DV is present in changed_variables hash table. */
2242 static inline bool
2243 dv_changed_p (decl_or_value dv)
2245 return (dv_is_value_p (dv)
2246 ? VALUE_CHANGED (dv_as_value (dv))
2247 : DECL_CHANGED (dv_as_decl (dv)));
2250 /* Return a location list node whose loc is rtx_equal to LOC, in the
2251 location list of a one-part variable or value VAR, or in that of
2252 any values recursively mentioned in the location lists. */
2254 static location_chain
2255 find_loc_in_1pdv (rtx loc, variable var, htab_t vars)
2257 location_chain node;
2259 if (!var)
2260 return NULL;
2262 gcc_assert (dv_onepart_p (var->dv));
2264 if (!var->n_var_parts)
2265 return NULL;
2267 gcc_assert (var->var_part[0].offset == 0);
2269 for (node = var->var_part[0].loc_chain; node; node = node->next)
2270 if (rtx_equal_p (loc, node->loc))
2271 return node;
2272 else if (GET_CODE (node->loc) == VALUE
2273 && !VALUE_RECURSED_INTO (node->loc))
2275 decl_or_value dv = dv_from_value (node->loc);
2276 variable var = (variable)
2277 htab_find_with_hash (vars, dv, dv_htab_hash (dv));
2279 if (var)
2281 location_chain where;
2282 VALUE_RECURSED_INTO (node->loc) = true;
2283 if ((where = find_loc_in_1pdv (loc, var, vars)))
2285 VALUE_RECURSED_INTO (node->loc) = false;
2286 return where;
2288 VALUE_RECURSED_INTO (node->loc) = false;
2292 return NULL;
2295 /* Hash table iteration argument passed to variable_merge. */
2296 struct dfset_merge
2298 /* The set in which the merge is to be inserted. */
2299 dataflow_set *dst;
2300 /* The set that we're iterating in. */
2301 dataflow_set *cur;
2302 /* The set that may contain the other dv we are to merge with. */
2303 dataflow_set *src;
2304 /* Number of onepart dvs in src. */
2305 int src_onepart_cnt;
2308 /* Insert LOC in *DNODE, if it's not there yet. The list must be in
2309 loc_cmp order, and it is maintained as such. */
2311 static void
2312 insert_into_intersection (location_chain *nodep, rtx loc,
2313 enum var_init_status status)
2315 location_chain node;
2316 int r;
2318 for (node = *nodep; node; nodep = &node->next, node = *nodep)
2319 if ((r = loc_cmp (node->loc, loc)) == 0)
2321 node->init = MIN (node->init, status);
2322 return;
2324 else if (r > 0)
2325 break;
2327 node = (location_chain) pool_alloc (loc_chain_pool);
2329 node->loc = loc;
2330 node->set_src = NULL;
2331 node->init = status;
2332 node->next = *nodep;
2333 *nodep = node;
2336 /* Insert in DEST the intersection the locations present in both
2337 S1NODE and S2VAR, directly or indirectly. S1NODE is from a
2338 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
2339 DSM->dst. */
2341 static void
2342 intersect_loc_chains (rtx val, location_chain *dest, struct dfset_merge *dsm,
2343 location_chain s1node, variable s2var)
2345 dataflow_set *s1set = dsm->cur;
2346 dataflow_set *s2set = dsm->src;
2347 location_chain found;
2349 for (; s1node; s1node = s1node->next)
2351 if (s1node->loc == val)
2352 continue;
2354 if ((found = find_loc_in_1pdv (s1node->loc, s2var,
2355 shared_hash_htab (s2set->vars))))
2357 insert_into_intersection (dest, s1node->loc,
2358 MIN (s1node->init, found->init));
2359 continue;
2362 if (GET_CODE (s1node->loc) == VALUE
2363 && !VALUE_RECURSED_INTO (s1node->loc))
2365 decl_or_value dv = dv_from_value (s1node->loc);
2366 variable svar = shared_hash_find (s1set->vars, dv);
2367 if (svar)
2369 if (svar->n_var_parts == 1)
2371 VALUE_RECURSED_INTO (s1node->loc) = true;
2372 intersect_loc_chains (val, dest, dsm,
2373 svar->var_part[0].loc_chain,
2374 s2var);
2375 VALUE_RECURSED_INTO (s1node->loc) = false;
2380 /* ??? if the location is equivalent to any location in src,
2381 searched recursively
2383 add to dst the values needed to represent the equivalence
2385 telling whether locations S is equivalent to another dv's
2386 location list:
2388 for each location D in the list
2390 if S and D satisfy rtx_equal_p, then it is present
2392 else if D is a value, recurse without cycles
2394 else if S and D have the same CODE and MODE
2396 for each operand oS and the corresponding oD
2398 if oS and oD are not equivalent, then S an D are not equivalent
2400 else if they are RTX vectors
2402 if any vector oS element is not equivalent to its respective oD,
2403 then S and D are not equivalent
2411 /* Return -1 if X should be before Y in a location list for a 1-part
2412 variable, 1 if Y should be before X, and 0 if they're equivalent
2413 and should not appear in the list. */
2415 static int
2416 loc_cmp (rtx x, rtx y)
2418 int i, j, r;
2419 RTX_CODE code = GET_CODE (x);
2420 const char *fmt;
2422 if (x == y)
2423 return 0;
2425 if (REG_P (x))
2427 if (!REG_P (y))
2428 return -1;
2429 gcc_assert (GET_MODE (x) == GET_MODE (y));
2430 if (REGNO (x) == REGNO (y))
2431 return 0;
2432 else if (REGNO (x) < REGNO (y))
2433 return -1;
2434 else
2435 return 1;
2438 if (REG_P (y))
2439 return 1;
2441 if (MEM_P (x))
2443 if (!MEM_P (y))
2444 return -1;
2445 gcc_assert (GET_MODE (x) == GET_MODE (y));
2446 return loc_cmp (XEXP (x, 0), XEXP (y, 0));
2449 if (MEM_P (y))
2450 return 1;
2452 if (GET_CODE (x) == VALUE)
2454 if (GET_CODE (y) != VALUE)
2455 return -1;
2456 gcc_assert (GET_MODE (x) == GET_MODE (y));
2457 if (canon_value_cmp (x, y))
2458 return -1;
2459 else
2460 return 1;
2463 if (GET_CODE (y) == VALUE)
2464 return 1;
2466 if (GET_CODE (x) == GET_CODE (y))
2467 /* Compare operands below. */;
2468 else if (GET_CODE (x) < GET_CODE (y))
2469 return -1;
2470 else
2471 return 1;
2473 gcc_assert (GET_MODE (x) == GET_MODE (y));
2475 fmt = GET_RTX_FORMAT (code);
2476 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2477 switch (fmt[i])
2479 case 'w':
2480 if (XWINT (x, i) == XWINT (y, i))
2481 break;
2482 else if (XWINT (x, i) < XWINT (y, i))
2483 return -1;
2484 else
2485 return 1;
2487 case 'n':
2488 case 'i':
2489 if (XINT (x, i) == XINT (y, i))
2490 break;
2491 else if (XINT (x, i) < XINT (y, i))
2492 return -1;
2493 else
2494 return 1;
2496 case 'V':
2497 case 'E':
2498 /* Compare the vector length first. */
2499 if (XVECLEN (x, i) == XVECLEN (y, i))
2500 /* Compare the vectors elements. */;
2501 else if (XVECLEN (x, i) < XVECLEN (y, i))
2502 return -1;
2503 else
2504 return 1;
2506 for (j = 0; j < XVECLEN (x, i); j++)
2507 if ((r = loc_cmp (XVECEXP (x, i, j),
2508 XVECEXP (y, i, j))))
2509 return r;
2510 break;
2512 case 'e':
2513 if ((r = loc_cmp (XEXP (x, i), XEXP (y, i))))
2514 return r;
2515 break;
2517 case 'S':
2518 case 's':
2519 if (XSTR (x, i) == XSTR (y, i))
2520 break;
2521 if (!XSTR (x, i))
2522 return -1;
2523 if (!XSTR (y, i))
2524 return 1;
2525 if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0)
2526 break;
2527 else if (r < 0)
2528 return -1;
2529 else
2530 return 1;
2532 case 'u':
2533 /* These are just backpointers, so they don't matter. */
2534 break;
2536 case '0':
2537 case 't':
2538 break;
2540 /* It is believed that rtx's at this level will never
2541 contain anything but integers and other rtx's,
2542 except for within LABEL_REFs and SYMBOL_REFs. */
2543 default:
2544 gcc_unreachable ();
2547 return 0;
2550 /* If decl or value DVP refers to VALUE from *LOC, add backlinks
2551 from VALUE to DVP. */
2553 static int
2554 add_value_chain (rtx *loc, void *dvp)
2556 if (GET_CODE (*loc) == VALUE && (void *) *loc != dvp)
2558 decl_or_value dv = (decl_or_value) dvp;
2559 decl_or_value ldv = dv_from_value (*loc);
2560 value_chain vc, nvc;
2561 void **slot = htab_find_slot_with_hash (value_chains, ldv,
2562 dv_htab_hash (ldv), INSERT);
2563 if (!*slot)
2565 vc = (value_chain) pool_alloc (value_chain_pool);
2566 vc->dv = ldv;
2567 vc->next = NULL;
2568 vc->refcount = 0;
2569 *slot = (void *) vc;
2571 else
2573 for (vc = ((value_chain) *slot)->next; vc; vc = vc->next)
2574 if (dv_as_opaque (vc->dv) == dv_as_opaque (dv))
2575 break;
2576 if (vc)
2578 vc->refcount++;
2579 return 0;
2582 vc = (value_chain) *slot;
2583 nvc = (value_chain) pool_alloc (value_chain_pool);
2584 nvc->dv = dv;
2585 nvc->next = vc->next;
2586 nvc->refcount = 1;
2587 vc->next = nvc;
2589 return 0;
2592 /* If decl or value DVP refers to VALUEs from within LOC, add backlinks
2593 from those VALUEs to DVP. */
2595 static void
2596 add_value_chains (decl_or_value dv, rtx loc)
2598 if (GET_CODE (loc) == VALUE)
2600 add_value_chain (&loc, dv_as_opaque (dv));
2601 return;
2603 if (REG_P (loc))
2604 return;
2605 if (MEM_P (loc))
2606 loc = XEXP (loc, 0);
2607 for_each_rtx (&loc, add_value_chain, dv_as_opaque (dv));
2610 /* If CSELIB_VAL_PTR of value DV refer to VALUEs, add backlinks from those
2611 VALUEs to DV. */
2613 static void
2614 add_cselib_value_chains (decl_or_value dv)
2616 struct elt_loc_list *l;
2618 for (l = CSELIB_VAL_PTR (dv_as_value (dv))->locs; l; l = l->next)
2619 for_each_rtx (&l->loc, add_value_chain, dv_as_opaque (dv));
2622 /* If decl or value DVP refers to VALUE from *LOC, remove backlinks
2623 from VALUE to DVP. */
2625 static int
2626 remove_value_chain (rtx *loc, void *dvp)
2628 if (GET_CODE (*loc) == VALUE && (void *) *loc != dvp)
2630 decl_or_value dv = (decl_or_value) dvp;
2631 decl_or_value ldv = dv_from_value (*loc);
2632 value_chain vc, dvc = NULL;
2633 void **slot = htab_find_slot_with_hash (value_chains, ldv,
2634 dv_htab_hash (ldv), NO_INSERT);
2635 for (vc = (value_chain) *slot; vc->next; vc = vc->next)
2636 if (dv_as_opaque (vc->next->dv) == dv_as_opaque (dv))
2638 dvc = vc->next;
2639 gcc_assert (dvc->refcount > 0);
2640 if (--dvc->refcount == 0)
2642 vc->next = dvc->next;
2643 pool_free (value_chain_pool, dvc);
2644 if (vc->next == NULL && vc == (value_chain) *slot)
2646 pool_free (value_chain_pool, vc);
2647 htab_clear_slot (value_chains, slot);
2650 return 0;
2652 gcc_unreachable ();
2654 return 0;
2657 /* If decl or value DVP refers to VALUEs from within LOC, remove backlinks
2658 from those VALUEs to DVP. */
2660 static void
2661 remove_value_chains (decl_or_value dv, rtx loc)
2663 if (GET_CODE (loc) == VALUE)
2665 remove_value_chain (&loc, dv_as_opaque (dv));
2666 return;
2668 if (REG_P (loc))
2669 return;
2670 if (MEM_P (loc))
2671 loc = XEXP (loc, 0);
2672 for_each_rtx (&loc, remove_value_chain, dv_as_opaque (dv));
2675 /* If CSELIB_VAL_PTR of value DV refer to VALUEs, remove backlinks from those
2676 VALUEs to DV. */
2678 static void
2679 remove_cselib_value_chains (decl_or_value dv)
2681 struct elt_loc_list *l;
2683 for (l = CSELIB_VAL_PTR (dv_as_value (dv))->locs; l; l = l->next)
2684 for_each_rtx (&l->loc, remove_value_chain, dv_as_opaque (dv));
2687 #if ENABLE_CHECKING
2688 /* Check the order of entries in one-part variables. */
2690 static int
2691 canonicalize_loc_order_check (void **slot, void *data ATTRIBUTE_UNUSED)
2693 variable var = (variable) *slot;
2694 decl_or_value dv = var->dv;
2695 location_chain node, next;
2697 if (!dv_onepart_p (dv))
2698 return 1;
2700 gcc_assert (var->n_var_parts == 1);
2701 node = var->var_part[0].loc_chain;
2702 gcc_assert (node);
2704 while ((next = node->next))
2706 gcc_assert (loc_cmp (node->loc, next->loc) < 0);
2707 node = next;
2710 return 1;
2712 #endif
2714 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
2715 more likely to be chosen as canonical for an equivalence set.
2716 Ensure less likely values can reach more likely neighbors, making
2717 the connections bidirectional. */
2719 static int
2720 canonicalize_values_mark (void **slot, void *data)
2722 dataflow_set *set = (dataflow_set *)data;
2723 variable var = (variable) *slot;
2724 decl_or_value dv = var->dv;
2725 rtx val;
2726 location_chain node;
2728 if (!dv_is_value_p (dv))
2729 return 1;
2731 gcc_assert (var->n_var_parts == 1);
2733 val = dv_as_value (dv);
2735 for (node = var->var_part[0].loc_chain; node; node = node->next)
2736 if (GET_CODE (node->loc) == VALUE)
2738 if (canon_value_cmp (node->loc, val))
2739 VALUE_RECURSED_INTO (val) = true;
2740 else
2742 decl_or_value odv = dv_from_value (node->loc);
2743 void **oslot = shared_hash_find_slot_noinsert (set->vars, odv);
2745 oslot = set_slot_part (set, val, oslot, odv, 0,
2746 node->init, NULL_RTX);
2748 VALUE_RECURSED_INTO (node->loc) = true;
2752 return 1;
2755 /* Remove redundant entries from equivalence lists in onepart
2756 variables, canonicalizing equivalence sets into star shapes. */
2758 static int
2759 canonicalize_values_star (void **slot, void *data)
2761 dataflow_set *set = (dataflow_set *)data;
2762 variable var = (variable) *slot;
2763 decl_or_value dv = var->dv;
2764 location_chain node;
2765 decl_or_value cdv;
2766 rtx val, cval;
2767 void **cslot;
2768 bool has_value;
2769 bool has_marks;
2771 if (!dv_onepart_p (dv))
2772 return 1;
2774 gcc_assert (var->n_var_parts == 1);
2776 if (dv_is_value_p (dv))
2778 cval = dv_as_value (dv);
2779 if (!VALUE_RECURSED_INTO (cval))
2780 return 1;
2781 VALUE_RECURSED_INTO (cval) = false;
2783 else
2784 cval = NULL_RTX;
2786 restart:
2787 val = cval;
2788 has_value = false;
2789 has_marks = false;
2791 gcc_assert (var->n_var_parts == 1);
2793 for (node = var->var_part[0].loc_chain; node; node = node->next)
2794 if (GET_CODE (node->loc) == VALUE)
2796 has_value = true;
2797 if (VALUE_RECURSED_INTO (node->loc))
2798 has_marks = true;
2799 if (canon_value_cmp (node->loc, cval))
2800 cval = node->loc;
2803 if (!has_value)
2804 return 1;
2806 if (cval == val)
2808 if (!has_marks || dv_is_decl_p (dv))
2809 return 1;
2811 /* Keep it marked so that we revisit it, either after visiting a
2812 child node, or after visiting a new parent that might be
2813 found out. */
2814 VALUE_RECURSED_INTO (val) = true;
2816 for (node = var->var_part[0].loc_chain; node; node = node->next)
2817 if (GET_CODE (node->loc) == VALUE
2818 && VALUE_RECURSED_INTO (node->loc))
2820 cval = node->loc;
2821 restart_with_cval:
2822 VALUE_RECURSED_INTO (cval) = false;
2823 dv = dv_from_value (cval);
2824 slot = shared_hash_find_slot_noinsert (set->vars, dv);
2825 if (!slot)
2827 gcc_assert (dv_is_decl_p (var->dv));
2828 /* The canonical value was reset and dropped.
2829 Remove it. */
2830 clobber_variable_part (set, NULL, var->dv, 0, NULL);
2831 return 1;
2833 var = (variable)*slot;
2834 gcc_assert (dv_is_value_p (var->dv));
2835 if (var->n_var_parts == 0)
2836 return 1;
2837 gcc_assert (var->n_var_parts == 1);
2838 goto restart;
2841 VALUE_RECURSED_INTO (val) = false;
2843 return 1;
2846 /* Push values to the canonical one. */
2847 cdv = dv_from_value (cval);
2848 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
2850 for (node = var->var_part[0].loc_chain; node; node = node->next)
2851 if (node->loc != cval)
2853 cslot = set_slot_part (set, node->loc, cslot, cdv, 0,
2854 node->init, NULL_RTX);
2855 if (GET_CODE (node->loc) == VALUE)
2857 decl_or_value ndv = dv_from_value (node->loc);
2859 set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX,
2860 NO_INSERT);
2862 if (canon_value_cmp (node->loc, val))
2864 /* If it could have been a local minimum, it's not any more,
2865 since it's now neighbor to cval, so it may have to push
2866 to it. Conversely, if it wouldn't have prevailed over
2867 val, then whatever mark it has is fine: if it was to
2868 push, it will now push to a more canonical node, but if
2869 it wasn't, then it has already pushed any values it might
2870 have to. */
2871 VALUE_RECURSED_INTO (node->loc) = true;
2872 /* Make sure we visit node->loc by ensuring we cval is
2873 visited too. */
2874 VALUE_RECURSED_INTO (cval) = true;
2876 else if (!VALUE_RECURSED_INTO (node->loc))
2877 /* If we have no need to "recurse" into this node, it's
2878 already "canonicalized", so drop the link to the old
2879 parent. */
2880 clobber_variable_part (set, cval, ndv, 0, NULL);
2882 else if (GET_CODE (node->loc) == REG)
2884 attrs list = set->regs[REGNO (node->loc)], *listp;
2886 /* Change an existing attribute referring to dv so that it
2887 refers to cdv, removing any duplicate this might
2888 introduce, and checking that no previous duplicates
2889 existed, all in a single pass. */
2891 while (list)
2893 if (list->offset == 0
2894 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
2895 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
2896 break;
2898 list = list->next;
2901 gcc_assert (list);
2902 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
2904 list->dv = cdv;
2905 for (listp = &list->next; (list = *listp); listp = &list->next)
2907 if (list->offset)
2908 continue;
2910 if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
2912 *listp = list->next;
2913 pool_free (attrs_pool, list);
2914 list = *listp;
2915 break;
2918 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv));
2921 else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
2923 for (listp = &list->next; (list = *listp); listp = &list->next)
2925 if (list->offset)
2926 continue;
2928 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
2930 *listp = list->next;
2931 pool_free (attrs_pool, list);
2932 list = *listp;
2933 break;
2936 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv));
2939 else
2940 gcc_unreachable ();
2942 #if ENABLE_CHECKING
2943 while (list)
2945 if (list->offset == 0
2946 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
2947 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
2948 gcc_unreachable ();
2950 list = list->next;
2952 #endif
2956 if (val)
2957 cslot = set_slot_part (set, val, cslot, cdv, 0,
2958 VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
2960 slot = clobber_slot_part (set, cval, slot, 0, NULL);
2962 /* Variable may have been unshared. */
2963 var = (variable)*slot;
2964 gcc_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval
2965 && var->var_part[0].loc_chain->next == NULL);
2967 if (VALUE_RECURSED_INTO (cval))
2968 goto restart_with_cval;
2970 return 1;
2973 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
2974 corresponding entry in DSM->src. Multi-part variables are combined
2975 with variable_union, whereas onepart dvs are combined with
2976 intersection. */
2978 static int
2979 variable_merge_over_cur (void **s1slot, void *data)
2981 struct dfset_merge *dsm = (struct dfset_merge *)data;
2982 dataflow_set *dst = dsm->dst;
2983 void **dstslot;
2984 variable s1var = (variable) *s1slot;
2985 variable s2var, dvar = NULL;
2986 decl_or_value dv = s1var->dv;
2987 bool onepart = dv_onepart_p (dv);
2988 rtx val;
2989 hashval_t dvhash;
2990 location_chain node, *nodep;
2992 /* If the incoming onepart variable has an empty location list, then
2993 the intersection will be just as empty. For other variables,
2994 it's always union. */
2995 gcc_assert (s1var->n_var_parts);
2996 gcc_assert (s1var->var_part[0].loc_chain);
2998 if (!onepart)
2999 return variable_union (s1slot, dst);
3001 gcc_assert (s1var->n_var_parts == 1);
3002 gcc_assert (s1var->var_part[0].offset == 0);
3004 dvhash = dv_htab_hash (dv);
3005 if (dv_is_value_p (dv))
3006 val = dv_as_value (dv);
3007 else
3008 val = NULL;
3010 s2var = shared_hash_find_1 (dsm->src->vars, dv, dvhash);
3011 if (!s2var)
3013 dst_can_be_shared = false;
3014 return 1;
3017 dsm->src_onepart_cnt--;
3018 gcc_assert (s2var->var_part[0].loc_chain);
3019 gcc_assert (s2var->n_var_parts == 1);
3020 gcc_assert (s2var->var_part[0].offset == 0);
3022 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3023 if (dstslot)
3025 dvar = (variable)*dstslot;
3026 gcc_assert (dvar->refcount == 1);
3027 gcc_assert (dvar->n_var_parts == 1);
3028 gcc_assert (dvar->var_part[0].offset == 0);
3029 nodep = &dvar->var_part[0].loc_chain;
3031 else
3033 nodep = &node;
3034 node = NULL;
3037 if (!dstslot && !onepart_variable_different_p (s1var, s2var))
3039 dstslot = shared_hash_find_slot_unshare_1 (&dst->vars, dv,
3040 dvhash, INSERT);
3041 *dstslot = dvar = s2var;
3042 dvar->refcount++;
3044 else
3046 dst_can_be_shared = false;
3048 intersect_loc_chains (val, nodep, dsm,
3049 s1var->var_part[0].loc_chain, s2var);
3051 if (!dstslot)
3053 if (node)
3055 dvar = (variable) pool_alloc (dv_pool (dv));
3056 dvar->dv = dv;
3057 dvar->refcount = 1;
3058 dvar->n_var_parts = 1;
3059 dvar->var_part[0].offset = 0;
3060 dvar->var_part[0].loc_chain = node;
3061 dvar->var_part[0].cur_loc = node->loc;
3063 dstslot
3064 = shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash,
3065 INSERT);
3066 gcc_assert (!*dstslot);
3067 *dstslot = dvar;
3069 else
3070 return 1;
3074 nodep = &dvar->var_part[0].loc_chain;
3075 while ((node = *nodep))
3077 location_chain *nextp = &node->next;
3079 if (GET_CODE (node->loc) == REG)
3081 attrs list;
3083 for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
3084 if (GET_MODE (node->loc) == GET_MODE (list->loc)
3085 && dv_is_value_p (list->dv))
3086 break;
3088 if (!list)
3089 attrs_list_insert (&dst->regs[REGNO (node->loc)],
3090 dv, 0, node->loc);
3091 /* If this value became canonical for another value that had
3092 this register, we want to leave it alone. */
3093 else if (dv_as_value (list->dv) != val)
3095 dstslot = set_slot_part (dst, dv_as_value (list->dv),
3096 dstslot, dv, 0,
3097 node->init, NULL_RTX);
3098 dstslot = delete_slot_part (dst, node->loc, dstslot, 0);
3100 /* Since nextp points into the removed node, we can't
3101 use it. The pointer to the next node moved to nodep.
3102 However, if the variable we're walking is unshared
3103 during our walk, we'll keep walking the location list
3104 of the previously-shared variable, in which case the
3105 node won't have been removed, and we'll want to skip
3106 it. That's why we test *nodep here. */
3107 if (*nodep != node)
3108 nextp = nodep;
3111 else
3112 /* Canonicalization puts registers first, so we don't have to
3113 walk it all. */
3114 break;
3115 nodep = nextp;
3118 if (dvar != (variable)*dstslot)
3119 dvar = (variable)*dstslot;
3120 nodep = &dvar->var_part[0].loc_chain;
3122 if (val)
3124 /* Mark all referenced nodes for canonicalization, and make sure
3125 we have mutual equivalence links. */
3126 VALUE_RECURSED_INTO (val) = true;
3127 for (node = *nodep; node; node = node->next)
3128 if (GET_CODE (node->loc) == VALUE)
3130 VALUE_RECURSED_INTO (node->loc) = true;
3131 set_variable_part (dst, val, dv_from_value (node->loc), 0,
3132 node->init, NULL, INSERT);
3135 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3136 gcc_assert (*dstslot == dvar);
3137 canonicalize_values_star (dstslot, dst);
3138 #ifdef ENABLE_CHECKING
3139 gcc_assert (dstslot
3140 == shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash));
3141 #endif
3142 dvar = (variable)*dstslot;
3144 else
3146 bool has_value = false, has_other = false;
3148 /* If we have one value and anything else, we're going to
3149 canonicalize this, so make sure all values have an entry in
3150 the table and are marked for canonicalization. */
3151 for (node = *nodep; node; node = node->next)
3153 if (GET_CODE (node->loc) == VALUE)
3155 /* If this was marked during register canonicalization,
3156 we know we have to canonicalize values. */
3157 if (has_value)
3158 has_other = true;
3159 has_value = true;
3160 if (has_other)
3161 break;
3163 else
3165 has_other = true;
3166 if (has_value)
3167 break;
3171 if (has_value && has_other)
3173 for (node = *nodep; node; node = node->next)
3175 if (GET_CODE (node->loc) == VALUE)
3177 decl_or_value dv = dv_from_value (node->loc);
3178 void **slot = NULL;
3180 if (shared_hash_shared (dst->vars))
3181 slot = shared_hash_find_slot_noinsert (dst->vars, dv);
3182 if (!slot)
3183 slot = shared_hash_find_slot_unshare (&dst->vars, dv,
3184 INSERT);
3185 if (!*slot)
3187 variable var = (variable) pool_alloc (dv_pool (dv));
3188 var->dv = dv;
3189 var->refcount = 1;
3190 var->n_var_parts = 1;
3191 var->var_part[0].offset = 0;
3192 var->var_part[0].loc_chain = NULL;
3193 var->var_part[0].cur_loc = NULL;
3194 *slot = var;
3197 VALUE_RECURSED_INTO (node->loc) = true;
3201 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3202 gcc_assert (*dstslot == dvar);
3203 canonicalize_values_star (dstslot, dst);
3204 #ifdef ENABLE_CHECKING
3205 gcc_assert (dstslot
3206 == shared_hash_find_slot_noinsert_1 (dst->vars,
3207 dv, dvhash));
3208 #endif
3209 dvar = (variable)*dstslot;
3213 if (!onepart_variable_different_p (dvar, s2var))
3215 variable_htab_free (dvar);
3216 *dstslot = dvar = s2var;
3217 dvar->refcount++;
3219 else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var))
3221 variable_htab_free (dvar);
3222 *dstslot = dvar = s1var;
3223 dvar->refcount++;
3224 dst_can_be_shared = false;
3226 else
3228 if (dvar->refcount == 1)
3229 dvar->var_part[0].cur_loc = dvar->var_part[0].loc_chain->loc;
3230 dst_can_be_shared = false;
3233 return 1;
3236 /* Combine variable in *S1SLOT (in DSM->src) with the corresponding
3237 entry in DSM->src. Only multi-part variables are combined, using
3238 variable_union. onepart dvs were already combined with
3239 intersection in variable_merge_over_cur(). */
3241 static int
3242 variable_merge_over_src (void **s2slot, void *data)
3244 struct dfset_merge *dsm = (struct dfset_merge *)data;
3245 dataflow_set *dst = dsm->dst;
3246 variable s2var = (variable) *s2slot;
3247 decl_or_value dv = s2var->dv;
3248 bool onepart = dv_onepart_p (dv);
3250 if (!onepart)
3252 void **dstp = shared_hash_find_slot (dst->vars, dv);
3253 *dstp = s2var;
3254 s2var->refcount++;
3255 return variable_canonicalize (dstp, dst);
3258 dsm->src_onepart_cnt++;
3259 return 1;
3262 /* Combine dataflow set information from SRC into DST, using PDST
3263 to carry over information across passes. */
3265 static void
3266 dataflow_set_merge (dataflow_set *dst, dataflow_set *src)
3268 dataflow_set src2 = *dst;
3269 struct dfset_merge dsm;
3270 int i;
3271 size_t src_elems, dst_elems;
3273 src_elems = htab_elements (shared_hash_htab (src->vars));
3274 dst_elems = htab_elements (shared_hash_htab (src2.vars));
3275 dataflow_set_init (dst);
3276 dst->stack_adjust = src2.stack_adjust;
3277 shared_hash_destroy (dst->vars);
3278 dst->vars = (shared_hash) pool_alloc (shared_hash_pool);
3279 dst->vars->refcount = 1;
3280 dst->vars->htab
3281 = htab_create (MAX (src_elems, dst_elems), variable_htab_hash,
3282 variable_htab_eq, variable_htab_free);
3284 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3285 attrs_list_mpdv_union (&dst->regs[i], src->regs[i], src2.regs[i]);
3287 dsm.dst = dst;
3288 dsm.src = &src2;
3289 dsm.cur = src;
3290 dsm.src_onepart_cnt = 0;
3292 htab_traverse (shared_hash_htab (dsm.src->vars), variable_merge_over_src,
3293 &dsm);
3294 htab_traverse (shared_hash_htab (dsm.cur->vars), variable_merge_over_cur,
3295 &dsm);
3297 if (dsm.src_onepart_cnt)
3298 dst_can_be_shared = false;
3300 dataflow_set_destroy (&src2);
3303 /* Mark register equivalences. */
3305 static void
3306 dataflow_set_equiv_regs (dataflow_set *set)
3308 int i;
3309 attrs list, *listp;
3311 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3313 rtx canon[NUM_MACHINE_MODES];
3315 memset (canon, 0, sizeof (canon));
3317 for (list = set->regs[i]; list; list = list->next)
3318 if (list->offset == 0 && dv_is_value_p (list->dv))
3320 rtx val = dv_as_value (list->dv);
3321 rtx *cvalp = &canon[(int)GET_MODE (val)];
3322 rtx cval = *cvalp;
3324 if (canon_value_cmp (val, cval))
3325 *cvalp = val;
3328 for (list = set->regs[i]; list; list = list->next)
3329 if (list->offset == 0 && dv_onepart_p (list->dv))
3331 rtx cval = canon[(int)GET_MODE (list->loc)];
3333 if (!cval)
3334 continue;
3336 if (dv_is_value_p (list->dv))
3338 rtx val = dv_as_value (list->dv);
3340 if (val == cval)
3341 continue;
3343 VALUE_RECURSED_INTO (val) = true;
3344 set_variable_part (set, val, dv_from_value (cval), 0,
3345 VAR_INIT_STATUS_INITIALIZED,
3346 NULL, NO_INSERT);
3349 VALUE_RECURSED_INTO (cval) = true;
3350 set_variable_part (set, cval, list->dv, 0,
3351 VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT);
3354 for (listp = &set->regs[i]; (list = *listp);
3355 listp = list ? &list->next : listp)
3356 if (list->offset == 0 && dv_onepart_p (list->dv))
3358 rtx cval = canon[(int)GET_MODE (list->loc)];
3359 void **slot;
3361 if (!cval)
3362 continue;
3364 if (dv_is_value_p (list->dv))
3366 rtx val = dv_as_value (list->dv);
3367 if (!VALUE_RECURSED_INTO (val))
3368 continue;
3371 slot = shared_hash_find_slot_noinsert (set->vars, list->dv);
3372 canonicalize_values_star (slot, set);
3373 if (*listp != list)
3374 list = NULL;
3379 /* Remove any redundant values in the location list of VAR, which must
3380 be unshared and 1-part. */
3382 static void
3383 remove_duplicate_values (variable var)
3385 location_chain node, *nodep;
3387 gcc_assert (dv_onepart_p (var->dv));
3388 gcc_assert (var->n_var_parts == 1);
3389 gcc_assert (var->refcount == 1);
3391 for (nodep = &var->var_part[0].loc_chain; (node = *nodep); )
3393 if (GET_CODE (node->loc) == VALUE)
3395 if (VALUE_RECURSED_INTO (node->loc))
3397 /* Remove duplicate value node. */
3398 *nodep = node->next;
3399 pool_free (loc_chain_pool, node);
3400 continue;
3402 else
3403 VALUE_RECURSED_INTO (node->loc) = true;
3405 nodep = &node->next;
3408 for (node = var->var_part[0].loc_chain; node; node = node->next)
3409 if (GET_CODE (node->loc) == VALUE)
3411 gcc_assert (VALUE_RECURSED_INTO (node->loc));
3412 VALUE_RECURSED_INTO (node->loc) = false;
3417 /* Hash table iteration argument passed to variable_post_merge. */
3418 struct dfset_post_merge
3420 /* The new input set for the current block. */
3421 dataflow_set *set;
3422 /* Pointer to the permanent input set for the current block, or
3423 NULL. */
3424 dataflow_set **permp;
3427 /* Create values for incoming expressions associated with one-part
3428 variables that don't have value numbers for them. */
3430 static int
3431 variable_post_merge_new_vals (void **slot, void *info)
3433 struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
3434 dataflow_set *set = dfpm->set;
3435 variable var = (variable)*slot;
3436 location_chain node;
3438 if (!dv_onepart_p (var->dv) || !var->n_var_parts)
3439 return 1;
3441 gcc_assert (var->n_var_parts == 1);
3443 if (dv_is_decl_p (var->dv))
3445 bool check_dupes = false;
3447 restart:
3448 for (node = var->var_part[0].loc_chain; node; node = node->next)
3450 if (GET_CODE (node->loc) == VALUE)
3451 gcc_assert (!VALUE_RECURSED_INTO (node->loc));
3452 else if (GET_CODE (node->loc) == REG)
3454 attrs att, *attp, *curp = NULL;
3456 if (var->refcount != 1)
3458 slot = unshare_variable (set, slot, var,
3459 VAR_INIT_STATUS_INITIALIZED);
3460 var = (variable)*slot;
3461 goto restart;
3464 for (attp = &set->regs[REGNO (node->loc)]; (att = *attp);
3465 attp = &att->next)
3466 if (att->offset == 0
3467 && GET_MODE (att->loc) == GET_MODE (node->loc))
3469 if (dv_is_value_p (att->dv))
3471 rtx cval = dv_as_value (att->dv);
3472 node->loc = cval;
3473 check_dupes = true;
3474 break;
3476 else if (dv_as_opaque (att->dv) == dv_as_opaque (var->dv))
3477 curp = attp;
3480 if (!curp)
3482 curp = attp;
3483 while (*curp)
3484 if ((*curp)->offset == 0
3485 && GET_MODE ((*curp)->loc) == GET_MODE (node->loc)
3486 && dv_as_opaque ((*curp)->dv) == dv_as_opaque (var->dv))
3487 break;
3488 else
3489 curp = &(*curp)->next;
3490 gcc_assert (*curp);
3493 if (!att)
3495 decl_or_value cdv;
3496 rtx cval;
3498 if (!*dfpm->permp)
3500 *dfpm->permp = XNEW (dataflow_set);
3501 dataflow_set_init (*dfpm->permp);
3504 for (att = (*dfpm->permp)->regs[REGNO (node->loc)];
3505 att; att = att->next)
3506 if (GET_MODE (att->loc) == GET_MODE (node->loc))
3508 gcc_assert (att->offset == 0);
3509 gcc_assert (dv_is_value_p (att->dv));
3510 val_reset (set, att->dv);
3511 break;
3514 if (att)
3516 cdv = att->dv;
3517 cval = dv_as_value (cdv);
3519 else
3521 /* Create a unique value to hold this register,
3522 that ought to be found and reused in
3523 subsequent rounds. */
3524 cselib_val *v;
3525 gcc_assert (!cselib_lookup (node->loc,
3526 GET_MODE (node->loc), 0));
3527 v = cselib_lookup (node->loc, GET_MODE (node->loc), 1);
3528 cselib_preserve_value (v);
3529 cselib_invalidate_rtx (node->loc);
3530 cval = v->val_rtx;
3531 cdv = dv_from_value (cval);
3532 if (dump_file)
3533 fprintf (dump_file,
3534 "Created new value %i for reg %i\n",
3535 v->value, REGNO (node->loc));
3538 var_reg_decl_set (*dfpm->permp, node->loc,
3539 VAR_INIT_STATUS_INITIALIZED,
3540 cdv, 0, NULL, INSERT);
3542 node->loc = cval;
3543 check_dupes = true;
3546 /* Remove attribute referring to the decl, which now
3547 uses the value for the register, already existing or
3548 to be added when we bring perm in. */
3549 att = *curp;
3550 *curp = att->next;
3551 pool_free (attrs_pool, att);
3555 if (check_dupes)
3556 remove_duplicate_values (var);
3559 return 1;
3562 /* Reset values in the permanent set that are not associated with the
3563 chosen expression. */
3565 static int
3566 variable_post_merge_perm_vals (void **pslot, void *info)
3568 struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
3569 dataflow_set *set = dfpm->set;
3570 variable pvar = (variable)*pslot, var;
3571 location_chain pnode;
3572 decl_or_value dv;
3573 attrs att;
3575 gcc_assert (dv_is_value_p (pvar->dv));
3576 gcc_assert (pvar->n_var_parts == 1);
3577 pnode = pvar->var_part[0].loc_chain;
3578 gcc_assert (pnode);
3579 gcc_assert (!pnode->next);
3580 gcc_assert (REG_P (pnode->loc));
3582 dv = pvar->dv;
3584 var = shared_hash_find (set->vars, dv);
3585 if (var)
3587 if (find_loc_in_1pdv (pnode->loc, var, shared_hash_htab (set->vars)))
3588 return 1;
3589 val_reset (set, dv);
3592 for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next)
3593 if (att->offset == 0
3594 && GET_MODE (att->loc) == GET_MODE (pnode->loc)
3595 && dv_is_value_p (att->dv))
3596 break;
3598 /* If there is a value associated with this register already, create
3599 an equivalence. */
3600 if (att && dv_as_value (att->dv) != dv_as_value (dv))
3602 rtx cval = dv_as_value (att->dv);
3603 set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT);
3604 set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init,
3605 NULL, INSERT);
3607 else if (!att)
3609 attrs_list_insert (&set->regs[REGNO (pnode->loc)],
3610 dv, 0, pnode->loc);
3611 variable_union (pslot, set);
3614 return 1;
3617 /* Just checking stuff and registering register attributes for
3618 now. */
3620 static void
3621 dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp)
3623 struct dfset_post_merge dfpm;
3625 dfpm.set = set;
3626 dfpm.permp = permp;
3628 htab_traverse (shared_hash_htab (set->vars), variable_post_merge_new_vals,
3629 &dfpm);
3630 if (*permp)
3631 htab_traverse (shared_hash_htab ((*permp)->vars),
3632 variable_post_merge_perm_vals, &dfpm);
3633 htab_traverse (shared_hash_htab (set->vars), canonicalize_values_star, set);
3636 /* Return a node whose loc is a MEM that refers to EXPR in the
3637 location list of a one-part variable or value VAR, or in that of
3638 any values recursively mentioned in the location lists. */
3640 static location_chain
3641 find_mem_expr_in_1pdv (tree expr, rtx val, htab_t vars)
3643 location_chain node;
3644 decl_or_value dv;
3645 variable var;
3646 location_chain where = NULL;
3648 if (!val)
3649 return NULL;
3651 gcc_assert (GET_CODE (val) == VALUE);
3653 gcc_assert (!VALUE_RECURSED_INTO (val));
3655 dv = dv_from_value (val);
3656 var = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
3658 if (!var)
3659 return NULL;
3661 gcc_assert (dv_onepart_p (var->dv));
3663 if (!var->n_var_parts)
3664 return NULL;
3666 gcc_assert (var->var_part[0].offset == 0);
3668 VALUE_RECURSED_INTO (val) = true;
3670 for (node = var->var_part[0].loc_chain; node; node = node->next)
3671 if (MEM_P (node->loc) && MEM_EXPR (node->loc) == expr
3672 && MEM_OFFSET (node->loc) == 0)
3674 where = node;
3675 break;
3677 else if (GET_CODE (node->loc) == VALUE
3678 && !VALUE_RECURSED_INTO (node->loc)
3679 && (where = find_mem_expr_in_1pdv (expr, node->loc, vars)))
3680 break;
3682 VALUE_RECURSED_INTO (val) = false;
3684 return where;
3687 /* Remove all MEMs from the location list of a hash table entry for a
3688 one-part variable, except those whose MEM attributes map back to
3689 the variable itself, directly or within a VALUE.
3691 ??? We could also preserve MEMs that reference stack slots that are
3692 annotated as not addressable. This is arguably even more reliable
3693 than the current heuristic. */
3695 static int
3696 dataflow_set_preserve_mem_locs (void **slot, void *data)
3698 dataflow_set *set = (dataflow_set *) data;
3699 variable var = (variable) *slot;
3701 if (dv_is_decl_p (var->dv) && dv_onepart_p (var->dv))
3703 tree decl = dv_as_decl (var->dv);
3704 location_chain loc, *locp;
3706 if (!var->n_var_parts)
3707 return 1;
3709 gcc_assert (var->n_var_parts == 1);
3711 if (var->refcount > 1 || shared_hash_shared (set->vars))
3713 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
3715 /* We want to remove a MEM that doesn't refer to DECL. */
3716 if (GET_CODE (loc->loc) == MEM
3717 && (MEM_EXPR (loc->loc) != decl
3718 || MEM_OFFSET (loc->loc)))
3719 break;
3720 /* We want to move here a MEM that does refer to DECL. */
3721 else if (GET_CODE (loc->loc) == VALUE
3722 && find_mem_expr_in_1pdv (decl, loc->loc,
3723 shared_hash_htab (set->vars)))
3724 break;
3727 if (!loc)
3728 return 1;
3730 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
3731 var = (variable)*slot;
3732 gcc_assert (var->n_var_parts == 1);
3735 for (locp = &var->var_part[0].loc_chain, loc = *locp;
3736 loc; loc = *locp)
3738 rtx old_loc = loc->loc;
3739 if (GET_CODE (old_loc) == VALUE)
3741 location_chain mem_node
3742 = find_mem_expr_in_1pdv (decl, loc->loc,
3743 shared_hash_htab (set->vars));
3745 /* ??? This picks up only one out of multiple MEMs that
3746 refer to the same variable. Do we ever need to be
3747 concerned about dealing with more than one, or, given
3748 that they should all map to the same variable
3749 location, their addresses will have been merged and
3750 they will be regarded as equivalent? */
3751 if (mem_node)
3753 loc->loc = mem_node->loc;
3754 loc->set_src = mem_node->set_src;
3755 loc->init = MIN (loc->init, mem_node->init);
3759 if (GET_CODE (loc->loc) != MEM
3760 || (MEM_EXPR (loc->loc) == decl
3761 && MEM_OFFSET (loc->loc) == 0))
3763 if (old_loc != loc->loc && emit_notes)
3765 add_value_chains (var->dv, loc->loc);
3766 remove_value_chains (var->dv, old_loc);
3768 locp = &loc->next;
3769 continue;
3772 if (emit_notes)
3773 remove_value_chains (var->dv, old_loc);
3774 *locp = loc->next;
3775 pool_free (loc_chain_pool, loc);
3778 if (!var->var_part[0].loc_chain)
3780 var->n_var_parts--;
3781 if (emit_notes && dv_is_value_p (var->dv))
3782 remove_cselib_value_chains (var->dv);
3783 variable_was_changed (var, set);
3787 return 1;
3790 /* Remove all MEMs from the location list of a hash table entry for a
3791 value. */
3793 static int
3794 dataflow_set_remove_mem_locs (void **slot, void *data)
3796 dataflow_set *set = (dataflow_set *) data;
3797 variable var = (variable) *slot;
3799 if (dv_is_value_p (var->dv))
3801 location_chain loc, *locp;
3802 bool changed = false;
3804 gcc_assert (var->n_var_parts == 1);
3806 if (var->refcount > 1 || shared_hash_shared (set->vars))
3808 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
3809 if (GET_CODE (loc->loc) == MEM)
3810 break;
3812 if (!loc)
3813 return 1;
3815 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
3816 var = (variable)*slot;
3817 gcc_assert (var->n_var_parts == 1);
3820 for (locp = &var->var_part[0].loc_chain, loc = *locp;
3821 loc; loc = *locp)
3823 if (GET_CODE (loc->loc) != MEM)
3825 locp = &loc->next;
3826 continue;
3829 if (emit_notes)
3830 remove_value_chains (var->dv, loc->loc);
3831 *locp = loc->next;
3832 /* If we have deleted the location which was last emitted
3833 we have to emit new location so add the variable to set
3834 of changed variables. */
3835 if (var->var_part[0].cur_loc
3836 && rtx_equal_p (loc->loc, var->var_part[0].cur_loc))
3837 changed = true;
3838 pool_free (loc_chain_pool, loc);
3841 if (!var->var_part[0].loc_chain)
3843 var->n_var_parts--;
3844 if (emit_notes && dv_is_value_p (var->dv))
3845 remove_cselib_value_chains (var->dv);
3846 gcc_assert (changed);
3848 if (changed)
3850 if (var->n_var_parts && var->var_part[0].loc_chain)
3851 var->var_part[0].cur_loc = var->var_part[0].loc_chain->loc;
3852 variable_was_changed (var, set);
3856 return 1;
3859 /* Remove all variable-location information about call-clobbered
3860 registers, as well as associations between MEMs and VALUEs. */
3862 static void
3863 dataflow_set_clear_at_call (dataflow_set *set)
3865 int r;
3867 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
3868 if (TEST_HARD_REG_BIT (call_used_reg_set, r))
3869 var_regno_delete (set, r);
3871 if (MAY_HAVE_DEBUG_INSNS)
3873 set->traversed_vars = set->vars;
3874 htab_traverse (shared_hash_htab (set->vars),
3875 dataflow_set_preserve_mem_locs, set);
3876 set->traversed_vars = set->vars;
3877 htab_traverse (shared_hash_htab (set->vars), dataflow_set_remove_mem_locs,
3878 set);
3879 set->traversed_vars = NULL;
3883 /* Flag whether two dataflow sets being compared contain different data. */
3884 static bool
3885 dataflow_set_different_value;
3887 static bool
3888 variable_part_different_p (variable_part *vp1, variable_part *vp2)
3890 location_chain lc1, lc2;
3892 for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next)
3894 for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next)
3896 if (REG_P (lc1->loc) && REG_P (lc2->loc))
3898 if (REGNO (lc1->loc) == REGNO (lc2->loc))
3899 break;
3901 if (rtx_equal_p (lc1->loc, lc2->loc))
3902 break;
3904 if (!lc2)
3905 return true;
3907 return false;
3910 /* Return true if one-part variables VAR1 and VAR2 are different.
3911 They must be in canonical order. */
3913 static bool
3914 onepart_variable_different_p (variable var1, variable var2)
3916 location_chain lc1, lc2;
3918 if (var1 == var2)
3919 return false;
3921 gcc_assert (var1->n_var_parts == 1);
3922 gcc_assert (var2->n_var_parts == 1);
3924 lc1 = var1->var_part[0].loc_chain;
3925 lc2 = var2->var_part[0].loc_chain;
3927 gcc_assert (lc1);
3928 gcc_assert (lc2);
3930 while (lc1 && lc2)
3932 if (loc_cmp (lc1->loc, lc2->loc))
3933 return true;
3934 lc1 = lc1->next;
3935 lc2 = lc2->next;
3938 return lc1 != lc2;
3941 /* Return true if variables VAR1 and VAR2 are different.
3942 If COMPARE_CURRENT_LOCATION is true compare also the cur_loc of each
3943 variable part. */
3945 static bool
3946 variable_different_p (variable var1, variable var2,
3947 bool compare_current_location)
3949 int i;
3951 if (var1 == var2)
3952 return false;
3954 if (var1->n_var_parts != var2->n_var_parts)
3955 return true;
3957 for (i = 0; i < var1->n_var_parts; i++)
3959 if (var1->var_part[i].offset != var2->var_part[i].offset)
3960 return true;
3961 if (compare_current_location)
3963 if (!((REG_P (var1->var_part[i].cur_loc)
3964 && REG_P (var2->var_part[i].cur_loc)
3965 && (REGNO (var1->var_part[i].cur_loc)
3966 == REGNO (var2->var_part[i].cur_loc)))
3967 || rtx_equal_p (var1->var_part[i].cur_loc,
3968 var2->var_part[i].cur_loc)))
3969 return true;
3971 /* One-part values have locations in a canonical order. */
3972 if (i == 0 && var1->var_part[i].offset == 0 && dv_onepart_p (var1->dv))
3974 gcc_assert (var1->n_var_parts == 1);
3975 gcc_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv));
3976 return onepart_variable_different_p (var1, var2);
3978 if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i]))
3979 return true;
3980 if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i]))
3981 return true;
3983 return false;
3986 /* Compare variable *SLOT with the same variable in hash table DATA
3987 and set DATAFLOW_SET_DIFFERENT_VALUE if they are different. */
3989 static int
3990 dataflow_set_different_1 (void **slot, void *data)
3992 htab_t htab = (htab_t) data;
3993 variable var1, var2;
3995 var1 = (variable) *slot;
3996 var2 = (variable) htab_find_with_hash (htab, var1->dv,
3997 dv_htab_hash (var1->dv));
3998 if (!var2)
4000 dataflow_set_different_value = true;
4002 if (dump_file && (dump_flags & TDF_DETAILS))
4004 fprintf (dump_file, "dataflow difference found: removal of:\n");
4005 dump_variable (var1);
4008 /* Stop traversing the hash table. */
4009 return 0;
4012 if (variable_different_p (var1, var2, false))
4014 dataflow_set_different_value = true;
4016 if (dump_file && (dump_flags & TDF_DETAILS))
4018 fprintf (dump_file, "dataflow difference found: old and new follow:\n");
4019 dump_variable (var1);
4020 dump_variable (var2);
4023 /* Stop traversing the hash table. */
4024 return 0;
4027 /* Continue traversing the hash table. */
4028 return 1;
4031 /* Return true if dataflow sets OLD_SET and NEW_SET differ. */
4033 static bool
4034 dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
4036 if (old_set->vars == new_set->vars)
4037 return false;
4039 if (htab_elements (shared_hash_htab (old_set->vars))
4040 != htab_elements (shared_hash_htab (new_set->vars)))
4041 return true;
4043 dataflow_set_different_value = false;
4045 htab_traverse (shared_hash_htab (old_set->vars), dataflow_set_different_1,
4046 shared_hash_htab (new_set->vars));
4047 /* No need to traverse the second hashtab, if both have the same number
4048 of elements and the second one had all entries found in the first one,
4049 then it can't have any extra entries. */
4050 return dataflow_set_different_value;
4053 /* Free the contents of dataflow set SET. */
4055 static void
4056 dataflow_set_destroy (dataflow_set *set)
4058 int i;
4060 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4061 attrs_list_clear (&set->regs[i]);
4063 shared_hash_destroy (set->vars);
4064 set->vars = NULL;
4067 /* Return true if RTL X contains a SYMBOL_REF. */
4069 static bool
4070 contains_symbol_ref (rtx x)
4072 const char *fmt;
4073 RTX_CODE code;
4074 int i;
4076 if (!x)
4077 return false;
4079 code = GET_CODE (x);
4080 if (code == SYMBOL_REF)
4081 return true;
4083 fmt = GET_RTX_FORMAT (code);
4084 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4086 if (fmt[i] == 'e')
4088 if (contains_symbol_ref (XEXP (x, i)))
4089 return true;
4091 else if (fmt[i] == 'E')
4093 int j;
4094 for (j = 0; j < XVECLEN (x, i); j++)
4095 if (contains_symbol_ref (XVECEXP (x, i, j)))
4096 return true;
4100 return false;
4103 /* Shall EXPR be tracked? */
4105 static bool
4106 track_expr_p (tree expr, bool need_rtl)
4108 rtx decl_rtl;
4109 tree realdecl;
4111 /* If EXPR is not a parameter or a variable do not track it. */
4112 if (TREE_CODE (expr) != VAR_DECL && TREE_CODE (expr) != PARM_DECL)
4113 return 0;
4115 /* It also must have a name... */
4116 if (!DECL_NAME (expr))
4117 return 0;
4119 /* ... and a RTL assigned to it. */
4120 decl_rtl = DECL_RTL_IF_SET (expr);
4121 if (!decl_rtl && need_rtl)
4122 return 0;
4124 /* If this expression is really a debug alias of some other declaration, we
4125 don't need to track this expression if the ultimate declaration is
4126 ignored. */
4127 realdecl = expr;
4128 if (DECL_DEBUG_EXPR_IS_FROM (realdecl) && DECL_DEBUG_EXPR (realdecl))
4130 realdecl = DECL_DEBUG_EXPR (realdecl);
4131 /* ??? We don't yet know how to emit DW_OP_piece for variable
4132 that has been SRA'ed. */
4133 if (!DECL_P (realdecl))
4134 return 0;
4137 /* Do not track EXPR if REALDECL it should be ignored for debugging
4138 purposes. */
4139 if (DECL_IGNORED_P (realdecl))
4140 return 0;
4142 /* Do not track global variables until we are able to emit correct location
4143 list for them. */
4144 if (TREE_STATIC (realdecl))
4145 return 0;
4147 /* When the EXPR is a DECL for alias of some variable (see example)
4148 the TREE_STATIC flag is not used. Disable tracking all DECLs whose
4149 DECL_RTL contains SYMBOL_REF.
4151 Example:
4152 extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
4153 char **_dl_argv;
4155 if (decl_rtl && MEM_P (decl_rtl)
4156 && contains_symbol_ref (XEXP (decl_rtl, 0)))
4157 return 0;
4159 /* If RTX is a memory it should not be very large (because it would be
4160 an array or struct). */
4161 if (decl_rtl && MEM_P (decl_rtl))
4163 /* Do not track structures and arrays. */
4164 if (GET_MODE (decl_rtl) == BLKmode
4165 || AGGREGATE_TYPE_P (TREE_TYPE (realdecl)))
4166 return 0;
4167 if (MEM_SIZE (decl_rtl)
4168 && INTVAL (MEM_SIZE (decl_rtl)) > MAX_VAR_PARTS)
4169 return 0;
4172 DECL_CHANGED (expr) = 0;
4173 DECL_CHANGED (realdecl) = 0;
4174 return 1;
4177 /* Determine whether a given LOC refers to the same variable part as
4178 EXPR+OFFSET. */
4180 static bool
4181 same_variable_part_p (rtx loc, tree expr, HOST_WIDE_INT offset)
4183 tree expr2;
4184 HOST_WIDE_INT offset2;
4186 if (! DECL_P (expr))
4187 return false;
4189 if (REG_P (loc))
4191 expr2 = REG_EXPR (loc);
4192 offset2 = REG_OFFSET (loc);
4194 else if (MEM_P (loc))
4196 expr2 = MEM_EXPR (loc);
4197 offset2 = INT_MEM_OFFSET (loc);
4199 else
4200 return false;
4202 if (! expr2 || ! DECL_P (expr2))
4203 return false;
4205 expr = var_debug_decl (expr);
4206 expr2 = var_debug_decl (expr2);
4208 return (expr == expr2 && offset == offset2);
4211 /* LOC is a REG or MEM that we would like to track if possible.
4212 If EXPR is null, we don't know what expression LOC refers to,
4213 otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
4214 LOC is an lvalue register.
4216 Return true if EXPR is nonnull and if LOC, or some lowpart of it,
4217 is something we can track. When returning true, store the mode of
4218 the lowpart we can track in *MODE_OUT (if nonnull) and its offset
4219 from EXPR in *OFFSET_OUT (if nonnull). */
4221 static bool
4222 track_loc_p (rtx loc, tree expr, HOST_WIDE_INT offset, bool store_reg_p,
4223 enum machine_mode *mode_out, HOST_WIDE_INT *offset_out)
4225 enum machine_mode mode;
4227 if (expr == NULL || !track_expr_p (expr, true))
4228 return false;
4230 /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
4231 whole subreg, but only the old inner part is really relevant. */
4232 mode = GET_MODE (loc);
4233 if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc)))
4235 enum machine_mode pseudo_mode;
4237 pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc));
4238 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (pseudo_mode))
4240 offset += byte_lowpart_offset (pseudo_mode, mode);
4241 mode = pseudo_mode;
4245 /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
4246 Do the same if we are storing to a register and EXPR occupies
4247 the whole of register LOC; in that case, the whole of EXPR is
4248 being changed. We exclude complex modes from the second case
4249 because the real and imaginary parts are represented as separate
4250 pseudo registers, even if the whole complex value fits into one
4251 hard register. */
4252 if ((GET_MODE_SIZE (mode) > GET_MODE_SIZE (DECL_MODE (expr))
4253 || (store_reg_p
4254 && !COMPLEX_MODE_P (DECL_MODE (expr))
4255 && hard_regno_nregs[REGNO (loc)][DECL_MODE (expr)] == 1))
4256 && offset + byte_lowpart_offset (DECL_MODE (expr), mode) == 0)
4258 mode = DECL_MODE (expr);
4259 offset = 0;
4262 if (offset < 0 || offset >= MAX_VAR_PARTS)
4263 return false;
4265 if (mode_out)
4266 *mode_out = mode;
4267 if (offset_out)
4268 *offset_out = offset;
4269 return true;
4272 /* Return the MODE lowpart of LOC, or null if LOC is not something we
4273 want to track. When returning nonnull, make sure that the attributes
4274 on the returned value are updated. */
4276 static rtx
4277 var_lowpart (enum machine_mode mode, rtx loc)
4279 unsigned int offset, reg_offset, regno;
4281 if (!REG_P (loc) && !MEM_P (loc))
4282 return NULL;
4284 if (GET_MODE (loc) == mode)
4285 return loc;
4287 offset = byte_lowpart_offset (mode, GET_MODE (loc));
4289 if (MEM_P (loc))
4290 return adjust_address_nv (loc, mode, offset);
4292 reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc));
4293 regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc),
4294 reg_offset, mode);
4295 return gen_rtx_REG_offset (loc, mode, regno, offset);
4298 /* Carry information about uses and stores while walking rtx. */
4300 struct count_use_info
4302 /* The insn where the RTX is. */
4303 rtx insn;
4305 /* The basic block where insn is. */
4306 basic_block bb;
4308 /* The array of n_sets sets in the insn, as determined by cselib. */
4309 struct cselib_set *sets;
4310 int n_sets;
4312 /* True if we're counting stores, false otherwise. */
4313 bool store_p;
4316 /* Find a VALUE corresponding to X. */
4318 static inline cselib_val *
4319 find_use_val (rtx x, enum machine_mode mode, struct count_use_info *cui)
4321 int i;
4323 if (cui->sets)
4325 /* This is called after uses are set up and before stores are
4326 processed bycselib, so it's safe to look up srcs, but not
4327 dsts. So we look up expressions that appear in srcs or in
4328 dest expressions, but we search the sets array for dests of
4329 stores. */
4330 if (cui->store_p)
4332 for (i = 0; i < cui->n_sets; i++)
4333 if (cui->sets[i].dest == x)
4334 return cui->sets[i].src_elt;
4336 else
4337 return cselib_lookup (x, mode, 0);
4340 return NULL;
4343 /* Replace all registers and addresses in an expression with VALUE
4344 expressions that map back to them, unless the expression is a
4345 register. If no mapping is or can be performed, returns NULL. */
4347 static rtx
4348 replace_expr_with_values (rtx loc)
4350 if (REG_P (loc))
4351 return NULL;
4352 else if (MEM_P (loc))
4354 cselib_val *addr = cselib_lookup (XEXP (loc, 0), Pmode, 0);
4355 if (addr)
4356 return replace_equiv_address_nv (loc, addr->val_rtx);
4357 else
4358 return NULL;
4360 else
4361 return cselib_subst_to_values (loc);
4364 /* Determine what kind of micro operation to choose for a USE. Return
4365 MO_CLOBBER if no micro operation is to be generated. */
4367 static enum micro_operation_type
4368 use_type (rtx *loc, struct count_use_info *cui, enum machine_mode *modep)
4370 tree expr;
4371 cselib_val *val;
4373 if (cui && cui->sets)
4375 if (GET_CODE (*loc) == VAR_LOCATION)
4377 if (track_expr_p (PAT_VAR_LOCATION_DECL (*loc), false))
4379 rtx ploc = PAT_VAR_LOCATION_LOC (*loc);
4380 cselib_val *val = cselib_lookup (ploc, GET_MODE (*loc), 1);
4382 /* ??? flag_float_store and volatile mems are never
4383 given values, but we could in theory use them for
4384 locations. */
4385 gcc_assert (val || 1);
4386 return MO_VAL_LOC;
4388 else
4389 return MO_CLOBBER;
4392 if ((REG_P (*loc) || MEM_P (*loc))
4393 && (val = find_use_val (*loc, GET_MODE (*loc), cui)))
4395 if (modep)
4396 *modep = GET_MODE (*loc);
4397 if (cui->store_p)
4399 if (REG_P (*loc)
4400 || cselib_lookup (XEXP (*loc, 0), GET_MODE (*loc), 0))
4401 return MO_VAL_SET;
4403 else if (!cselib_preserved_value_p (val))
4404 return MO_VAL_USE;
4408 if (REG_P (*loc))
4410 gcc_assert (REGNO (*loc) < FIRST_PSEUDO_REGISTER);
4412 expr = REG_EXPR (*loc);
4414 if (!expr)
4415 return MO_USE_NO_VAR;
4416 else if (target_for_debug_bind (var_debug_decl (expr)))
4417 return MO_CLOBBER;
4418 else if (track_loc_p (*loc, expr, REG_OFFSET (*loc),
4419 false, modep, NULL))
4420 return MO_USE;
4421 else
4422 return MO_USE_NO_VAR;
4424 else if (MEM_P (*loc))
4426 expr = MEM_EXPR (*loc);
4428 if (!expr)
4429 return MO_CLOBBER;
4430 else if (target_for_debug_bind (var_debug_decl (expr)))
4431 return MO_CLOBBER;
4432 else if (track_loc_p (*loc, expr, INT_MEM_OFFSET (*loc),
4433 false, modep, NULL))
4434 return MO_USE;
4435 else
4436 return MO_CLOBBER;
4439 return MO_CLOBBER;
4442 /* Log to OUT information about micro-operation MOPT involving X in
4443 INSN of BB. */
4445 static inline void
4446 log_op_type (rtx x, basic_block bb, rtx insn,
4447 enum micro_operation_type mopt, FILE *out)
4449 fprintf (out, "bb %i op %i insn %i %s ",
4450 bb->index, VTI (bb)->n_mos - 1,
4451 INSN_UID (insn), micro_operation_type_name[mopt]);
4452 print_inline_rtx (out, x, 2);
4453 fputc ('\n', out);
4456 /* Count uses (register and memory references) LOC which will be tracked.
4457 INSN is instruction which the LOC is part of. */
4459 static int
4460 count_uses (rtx *loc, void *cuip)
4462 struct count_use_info *cui = (struct count_use_info *) cuip;
4463 enum micro_operation_type mopt = use_type (loc, cui, NULL);
4465 if (mopt != MO_CLOBBER)
4467 cselib_val *val;
4468 enum machine_mode mode = GET_MODE (*loc);
4470 VTI (cui->bb)->n_mos++;
4472 if (dump_file && (dump_flags & TDF_DETAILS))
4473 log_op_type (*loc, cui->bb, cui->insn, mopt, dump_file);
4475 switch (mopt)
4477 case MO_VAL_LOC:
4478 loc = &PAT_VAR_LOCATION_LOC (*loc);
4479 if (VAR_LOC_UNKNOWN_P (*loc))
4480 break;
4481 /* Fall through. */
4483 case MO_VAL_USE:
4484 case MO_VAL_SET:
4485 if (MEM_P (*loc)
4486 && !REG_P (XEXP (*loc, 0)) && !MEM_P (XEXP (*loc, 0)))
4488 val = cselib_lookup (XEXP (*loc, 0), Pmode, false);
4490 if (val && !cselib_preserved_value_p (val))
4492 VTI (cui->bb)->n_mos++;
4493 cselib_preserve_value (val);
4497 val = find_use_val (*loc, mode, cui);
4498 if (val)
4499 cselib_preserve_value (val);
4500 else
4501 gcc_assert (mopt == MO_VAL_LOC);
4503 break;
4505 default:
4506 break;
4510 return 0;
4513 /* Helper function for finding all uses of REG/MEM in X in CUI's
4514 insn. */
4516 static void
4517 count_uses_1 (rtx *x, void *cui)
4519 for_each_rtx (x, count_uses, cui);
4522 /* Count stores (register and memory references) LOC which will be
4523 tracked. CUI is a count_use_info object containing the instruction
4524 which the LOC is part of. */
4526 static void
4527 count_stores (rtx loc, const_rtx expr ATTRIBUTE_UNUSED, void *cui)
4529 count_uses (&loc, cui);
4532 /* Callback for cselib_record_sets_hook, that counts how many micro
4533 operations it takes for uses and stores in an insn after
4534 cselib_record_sets has analyzed the sets in an insn, but before it
4535 modifies the stored values in the internal tables, unless
4536 cselib_record_sets doesn't call it directly (perhaps because we're
4537 not doing cselib in the first place, in which case sets and n_sets
4538 will be 0). */
4540 static void
4541 count_with_sets (rtx insn, struct cselib_set *sets, int n_sets)
4543 basic_block bb = BLOCK_FOR_INSN (insn);
4544 struct count_use_info cui;
4546 cselib_hook_called = true;
4548 cui.insn = insn;
4549 cui.bb = bb;
4550 cui.sets = sets;
4551 cui.n_sets = n_sets;
4553 cui.store_p = false;
4554 note_uses (&PATTERN (insn), count_uses_1, &cui);
4555 cui.store_p = true;
4556 note_stores (PATTERN (insn), count_stores, &cui);
4559 /* Tell whether the CONCAT used to holds a VALUE and its location
4560 needs value resolution, i.e., an attempt of mapping the location
4561 back to other incoming values. */
4562 #define VAL_NEEDS_RESOLUTION(x) \
4563 (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
4564 /* Whether the location in the CONCAT is a tracked expression, that
4565 should also be handled like a MO_USE. */
4566 #define VAL_HOLDS_TRACK_EXPR(x) \
4567 (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
4568 /* Whether the location in the CONCAT should be handled like a MO_COPY
4569 as well. */
4570 #define VAL_EXPR_IS_COPIED(x) \
4571 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
4572 /* Whether the location in the CONCAT should be handled like a
4573 MO_CLOBBER as well. */
4574 #define VAL_EXPR_IS_CLOBBERED(x) \
4575 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
4577 /* Add uses (register and memory references) LOC which will be tracked
4578 to VTI (bb)->mos. INSN is instruction which the LOC is part of. */
4580 static int
4581 add_uses (rtx *loc, void *data)
4583 enum machine_mode mode = VOIDmode;
4584 struct count_use_info *cui = (struct count_use_info *)data;
4585 enum micro_operation_type type = use_type (loc, cui, &mode);
4587 if (type != MO_CLOBBER)
4589 basic_block bb = cui->bb;
4590 micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
4592 mo->type = type;
4593 mo->u.loc = type == MO_USE ? var_lowpart (mode, *loc) : *loc;
4594 mo->insn = cui->insn;
4596 if (type == MO_VAL_LOC)
4598 rtx oloc = *loc;
4599 rtx vloc = PAT_VAR_LOCATION_LOC (oloc);
4600 cselib_val *val;
4602 gcc_assert (cui->sets);
4604 if (MEM_P (vloc)
4605 && !REG_P (XEXP (vloc, 0)) && !MEM_P (XEXP (vloc, 0)))
4607 rtx mloc = vloc;
4608 cselib_val *val = cselib_lookup (XEXP (mloc, 0), Pmode, 0);
4610 if (val && !cselib_preserved_value_p (val))
4612 micro_operation *mon = VTI (bb)->mos + VTI (bb)->n_mos++;
4613 mon->type = mo->type;
4614 mon->u.loc = mo->u.loc;
4615 mon->insn = mo->insn;
4616 cselib_preserve_value (val);
4617 mo->type = MO_VAL_USE;
4618 mloc = cselib_subst_to_values (XEXP (mloc, 0));
4619 mo->u.loc = gen_rtx_CONCAT (Pmode, val->val_rtx, mloc);
4620 if (dump_file && (dump_flags & TDF_DETAILS))
4621 log_op_type (mo->u.loc, cui->bb, cui->insn,
4622 mo->type, dump_file);
4623 mo = mon;
4627 if (!VAR_LOC_UNKNOWN_P (vloc)
4628 && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
4630 enum machine_mode mode2;
4631 enum micro_operation_type type2;
4632 rtx nloc = replace_expr_with_values (vloc);
4634 if (nloc)
4636 oloc = shallow_copy_rtx (oloc);
4637 PAT_VAR_LOCATION_LOC (oloc) = nloc;
4640 oloc = gen_rtx_CONCAT (mode, val->val_rtx, oloc);
4642 type2 = use_type (&vloc, 0, &mode2);
4644 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
4645 || type2 == MO_CLOBBER);
4647 if (type2 == MO_CLOBBER
4648 && !cselib_preserved_value_p (val))
4650 VAL_NEEDS_RESOLUTION (oloc) = 1;
4651 cselib_preserve_value (val);
4654 else if (!VAR_LOC_UNKNOWN_P (vloc))
4656 oloc = shallow_copy_rtx (oloc);
4657 PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC ();
4660 mo->u.loc = oloc;
4662 else if (type == MO_VAL_USE)
4664 enum machine_mode mode2 = VOIDmode;
4665 enum micro_operation_type type2;
4666 cselib_val *val = find_use_val (*loc, GET_MODE (*loc), cui);
4667 rtx vloc, oloc = *loc, nloc;
4669 gcc_assert (cui->sets);
4671 if (MEM_P (oloc)
4672 && !REG_P (XEXP (oloc, 0)) && !MEM_P (XEXP (oloc, 0)))
4674 rtx mloc = oloc;
4675 cselib_val *val = cselib_lookup (XEXP (mloc, 0), Pmode, 0);
4677 if (val && !cselib_preserved_value_p (val))
4679 micro_operation *mon = VTI (bb)->mos + VTI (bb)->n_mos++;
4680 mon->type = mo->type;
4681 mon->u.loc = mo->u.loc;
4682 mon->insn = mo->insn;
4683 cselib_preserve_value (val);
4684 mo->type = MO_VAL_USE;
4685 mloc = cselib_subst_to_values (XEXP (mloc, 0));
4686 mo->u.loc = gen_rtx_CONCAT (Pmode, val->val_rtx, mloc);
4687 mo->insn = cui->insn;
4688 if (dump_file && (dump_flags & TDF_DETAILS))
4689 log_op_type (mo->u.loc, cui->bb, cui->insn,
4690 mo->type, dump_file);
4691 mo = mon;
4695 type2 = use_type (loc, 0, &mode2);
4697 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
4698 || type2 == MO_CLOBBER);
4700 if (type2 == MO_USE)
4701 vloc = var_lowpart (mode2, *loc);
4702 else
4703 vloc = oloc;
4705 /* The loc of a MO_VAL_USE may have two forms:
4707 (concat val src): val is at src, a value-based
4708 representation.
4710 (concat (concat val use) src): same as above, with use as
4711 the MO_USE tracked value, if it differs from src.
4715 nloc = replace_expr_with_values (*loc);
4716 if (!nloc)
4717 nloc = oloc;
4719 if (vloc != nloc)
4720 oloc = gen_rtx_CONCAT (mode2, val->val_rtx, vloc);
4721 else
4722 oloc = val->val_rtx;
4724 mo->u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
4726 if (type2 == MO_USE)
4727 VAL_HOLDS_TRACK_EXPR (mo->u.loc) = 1;
4728 if (!cselib_preserved_value_p (val))
4730 VAL_NEEDS_RESOLUTION (mo->u.loc) = 1;
4731 cselib_preserve_value (val);
4734 else
4735 gcc_assert (type == MO_USE || type == MO_USE_NO_VAR);
4737 if (dump_file && (dump_flags & TDF_DETAILS))
4738 log_op_type (mo->u.loc, cui->bb, cui->insn, mo->type, dump_file);
4741 return 0;
4744 /* Helper function for finding all uses of REG/MEM in X in insn INSN. */
4746 static void
4747 add_uses_1 (rtx *x, void *cui)
4749 for_each_rtx (x, add_uses, cui);
4752 /* Add stores (register and memory references) LOC which will be tracked
4753 to VTI (bb)->mos. EXPR is the RTL expression containing the store.
4754 CUIP->insn is instruction which the LOC is part of. */
4756 static void
4757 add_stores (rtx loc, const_rtx expr, void *cuip)
4759 enum machine_mode mode = VOIDmode, mode2;
4760 struct count_use_info *cui = (struct count_use_info *)cuip;
4761 basic_block bb = cui->bb;
4762 micro_operation *mo;
4763 rtx oloc = loc, nloc, src = NULL;
4764 enum micro_operation_type type = use_type (&loc, cui, &mode);
4765 bool track_p = false;
4766 cselib_val *v;
4767 bool resolve, preserve;
4769 if (type == MO_CLOBBER)
4770 return;
4772 mode2 = mode;
4774 if (REG_P (loc))
4776 mo = VTI (bb)->mos + VTI (bb)->n_mos++;
4778 if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
4779 || !(track_p = use_type (&loc, NULL, &mode2) == MO_USE)
4780 || GET_CODE (expr) == CLOBBER)
4782 mo->type = MO_CLOBBER;
4783 mo->u.loc = loc;
4785 else
4787 if (GET_CODE (expr) == SET && SET_DEST (expr) == loc)
4788 src = var_lowpart (mode2, SET_SRC (expr));
4789 loc = var_lowpart (mode2, loc);
4791 if (src == NULL)
4793 mo->type = MO_SET;
4794 mo->u.loc = loc;
4796 else
4798 if (SET_SRC (expr) != src)
4799 expr = gen_rtx_SET (VOIDmode, loc, src);
4800 if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
4801 mo->type = MO_COPY;
4802 else
4803 mo->type = MO_SET;
4804 mo->u.loc = CONST_CAST_RTX (expr);
4807 mo->insn = cui->insn;
4809 else if (MEM_P (loc)
4810 && ((track_p = use_type (&loc, NULL, &mode2) == MO_USE)
4811 || cui->sets))
4813 mo = VTI (bb)->mos + VTI (bb)->n_mos++;
4815 if (MEM_P (loc) && type == MO_VAL_SET
4816 && !REG_P (XEXP (loc, 0)) && !MEM_P (XEXP (loc, 0)))
4818 rtx mloc = loc;
4819 cselib_val *val = cselib_lookup (XEXP (mloc, 0), Pmode, 0);
4821 if (val && !cselib_preserved_value_p (val))
4823 cselib_preserve_value (val);
4824 mo->type = MO_VAL_USE;
4825 mloc = cselib_subst_to_values (XEXP (mloc, 0));
4826 mo->u.loc = gen_rtx_CONCAT (Pmode, val->val_rtx, mloc);
4827 mo->insn = cui->insn;
4828 if (dump_file && (dump_flags & TDF_DETAILS))
4829 log_op_type (mo->u.loc, cui->bb, cui->insn,
4830 mo->type, dump_file);
4831 mo = VTI (bb)->mos + VTI (bb)->n_mos++;
4835 if (GET_CODE (expr) == CLOBBER || !track_p)
4837 mo->type = MO_CLOBBER;
4838 mo->u.loc = track_p ? var_lowpart (mode2, loc) : loc;
4840 else
4842 if (GET_CODE (expr) == SET && SET_DEST (expr) == loc)
4843 src = var_lowpart (mode2, SET_SRC (expr));
4844 loc = var_lowpart (mode2, loc);
4846 if (src == NULL)
4848 mo->type = MO_SET;
4849 mo->u.loc = loc;
4851 else
4853 if (SET_SRC (expr) != src)
4854 expr = gen_rtx_SET (VOIDmode, loc, src);
4855 if (same_variable_part_p (SET_SRC (expr),
4856 MEM_EXPR (loc),
4857 INT_MEM_OFFSET (loc)))
4858 mo->type = MO_COPY;
4859 else
4860 mo->type = MO_SET;
4861 mo->u.loc = CONST_CAST_RTX (expr);
4864 mo->insn = cui->insn;
4866 else
4867 return;
4869 if (type != MO_VAL_SET)
4870 goto log_and_return;
4872 v = find_use_val (oloc, mode, cui);
4874 resolve = preserve = !cselib_preserved_value_p (v);
4876 nloc = replace_expr_with_values (oloc);
4877 if (nloc)
4878 oloc = nloc;
4880 if (resolve && GET_CODE (mo->u.loc) == SET)
4882 nloc = replace_expr_with_values (SET_SRC (mo->u.loc));
4884 if (nloc)
4885 oloc = gen_rtx_SET (GET_MODE (mo->u.loc), oloc, nloc);
4886 else
4888 if (oloc == SET_DEST (mo->u.loc))
4889 /* No point in duplicating. */
4890 oloc = mo->u.loc;
4891 if (!REG_P (SET_SRC (mo->u.loc)))
4892 resolve = false;
4895 else if (!resolve)
4897 if (GET_CODE (mo->u.loc) == SET
4898 && oloc == SET_DEST (mo->u.loc))
4899 /* No point in duplicating. */
4900 oloc = mo->u.loc;
4902 else
4903 resolve = false;
4905 loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc);
4907 if (mo->u.loc != oloc)
4908 loc = gen_rtx_CONCAT (GET_MODE (mo->u.loc), loc, mo->u.loc);
4910 /* The loc of a MO_VAL_SET may have various forms:
4912 (concat val dst): dst now holds val
4914 (concat val (set dst src)): dst now holds val, copied from src
4916 (concat (concat val dstv) dst): dst now holds val; dstv is dst
4917 after replacing mems and non-top-level regs with values.
4919 (concat (concat val dstv) (set dst src)): dst now holds val,
4920 copied from src. dstv is a value-based representation of dst, if
4921 it differs from dst. If resolution is needed, src is a REG.
4923 (concat (concat val (set dstv srcv)) (set dst src)): src
4924 copied to dst, holding val. dstv and srcv are value-based
4925 representations of dst and src, respectively.
4929 mo->u.loc = loc;
4931 if (track_p)
4932 VAL_HOLDS_TRACK_EXPR (loc) = 1;
4933 if (preserve)
4935 VAL_NEEDS_RESOLUTION (loc) = resolve;
4936 cselib_preserve_value (v);
4938 if (mo->type == MO_CLOBBER)
4939 VAL_EXPR_IS_CLOBBERED (loc) = 1;
4940 if (mo->type == MO_COPY)
4941 VAL_EXPR_IS_COPIED (loc) = 1;
4943 mo->type = MO_VAL_SET;
4945 log_and_return:
4946 if (dump_file && (dump_flags & TDF_DETAILS))
4947 log_op_type (mo->u.loc, cui->bb, cui->insn, mo->type, dump_file);
4950 /* Callback for cselib_record_sets_hook, that records as micro
4951 operations uses and stores in an insn after cselib_record_sets has
4952 analyzed the sets in an insn, but before it modifies the stored
4953 values in the internal tables, unless cselib_record_sets doesn't
4954 call it directly (perhaps because we're not doing cselib in the
4955 first place, in which case sets and n_sets will be 0). */
4957 static void
4958 add_with_sets (rtx insn, struct cselib_set *sets, int n_sets)
4960 basic_block bb = BLOCK_FOR_INSN (insn);
4961 int n1, n2;
4962 struct count_use_info cui;
4964 cselib_hook_called = true;
4966 cui.insn = insn;
4967 cui.bb = bb;
4968 cui.sets = sets;
4969 cui.n_sets = n_sets;
4971 n1 = VTI (bb)->n_mos;
4972 cui.store_p = false;
4973 note_uses (&PATTERN (insn), add_uses_1, &cui);
4974 n2 = VTI (bb)->n_mos - 1;
4976 /* Order the MO_USEs to be before MO_USE_NO_VARs,
4977 MO_VAL_LOC and MO_VAL_USE. */
4978 while (n1 < n2)
4980 while (n1 < n2 && VTI (bb)->mos[n1].type == MO_USE)
4981 n1++;
4982 while (n1 < n2 && VTI (bb)->mos[n2].type != MO_USE)
4983 n2--;
4984 if (n1 < n2)
4986 micro_operation sw;
4988 sw = VTI (bb)->mos[n1];
4989 VTI (bb)->mos[n1] = VTI (bb)->mos[n2];
4990 VTI (bb)->mos[n2] = sw;
4994 if (CALL_P (insn))
4996 micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
4998 mo->type = MO_CALL;
4999 mo->insn = insn;
5001 if (dump_file && (dump_flags & TDF_DETAILS))
5002 log_op_type (PATTERN (insn), bb, insn, mo->type, dump_file);
5005 n1 = VTI (bb)->n_mos;
5006 /* This will record NEXT_INSN (insn), such that we can
5007 insert notes before it without worrying about any
5008 notes that MO_USEs might emit after the insn. */
5009 cui.store_p = true;
5010 note_stores (PATTERN (insn), add_stores, &cui);
5011 n2 = VTI (bb)->n_mos - 1;
5013 /* Order the MO_CLOBBERs to be before MO_SETs. */
5014 while (n1 < n2)
5016 while (n1 < n2 && VTI (bb)->mos[n1].type == MO_CLOBBER)
5017 n1++;
5018 while (n1 < n2 && VTI (bb)->mos[n2].type != MO_CLOBBER)
5019 n2--;
5020 if (n1 < n2)
5022 micro_operation sw;
5024 sw = VTI (bb)->mos[n1];
5025 VTI (bb)->mos[n1] = VTI (bb)->mos[n2];
5026 VTI (bb)->mos[n2] = sw;
5031 static enum var_init_status
5032 find_src_status (dataflow_set *in, rtx src)
5034 tree decl = NULL_TREE;
5035 enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
5037 if (! flag_var_tracking_uninit)
5038 status = VAR_INIT_STATUS_INITIALIZED;
5040 if (src && REG_P (src))
5041 decl = var_debug_decl (REG_EXPR (src));
5042 else if (src && MEM_P (src))
5043 decl = var_debug_decl (MEM_EXPR (src));
5045 if (src && decl)
5046 status = get_init_value (in, src, dv_from_decl (decl));
5048 return status;
5051 /* SRC is the source of an assignment. Use SET to try to find what
5052 was ultimately assigned to SRC. Return that value if known,
5053 otherwise return SRC itself. */
5055 static rtx
5056 find_src_set_src (dataflow_set *set, rtx src)
5058 tree decl = NULL_TREE; /* The variable being copied around. */
5059 rtx set_src = NULL_RTX; /* The value for "decl" stored in "src". */
5060 variable var;
5061 location_chain nextp;
5062 int i;
5063 bool found;
5065 if (src && REG_P (src))
5066 decl = var_debug_decl (REG_EXPR (src));
5067 else if (src && MEM_P (src))
5068 decl = var_debug_decl (MEM_EXPR (src));
5070 if (src && decl)
5072 decl_or_value dv = dv_from_decl (decl);
5074 var = shared_hash_find (set->vars, dv);
5075 if (var)
5077 found = false;
5078 for (i = 0; i < var->n_var_parts && !found; i++)
5079 for (nextp = var->var_part[i].loc_chain; nextp && !found;
5080 nextp = nextp->next)
5081 if (rtx_equal_p (nextp->loc, src))
5083 set_src = nextp->set_src;
5084 found = true;
5090 return set_src;
5093 /* Compute the changes of variable locations in the basic block BB. */
5095 static bool
5096 compute_bb_dataflow (basic_block bb)
5098 int i, n;
5099 bool changed;
5100 dataflow_set old_out;
5101 dataflow_set *in = &VTI (bb)->in;
5102 dataflow_set *out = &VTI (bb)->out;
5104 dataflow_set_init (&old_out);
5105 dataflow_set_copy (&old_out, out);
5106 dataflow_set_copy (out, in);
5108 n = VTI (bb)->n_mos;
5109 for (i = 0; i < n; i++)
5111 rtx insn = VTI (bb)->mos[i].insn;
5113 switch (VTI (bb)->mos[i].type)
5115 case MO_CALL:
5116 dataflow_set_clear_at_call (out);
5117 break;
5119 case MO_USE:
5121 rtx loc = VTI (bb)->mos[i].u.loc;
5123 if (REG_P (loc))
5124 var_reg_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
5125 else if (MEM_P (loc))
5126 var_mem_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
5128 break;
5130 case MO_VAL_LOC:
5132 rtx loc = VTI (bb)->mos[i].u.loc;
5133 rtx val, vloc;
5134 tree var;
5136 if (GET_CODE (loc) == CONCAT)
5138 val = XEXP (loc, 0);
5139 vloc = XEXP (loc, 1);
5141 else
5143 val = NULL_RTX;
5144 vloc = loc;
5147 var = PAT_VAR_LOCATION_DECL (vloc);
5149 clobber_variable_part (out, NULL_RTX,
5150 dv_from_decl (var), 0, NULL_RTX);
5151 if (val)
5153 if (VAL_NEEDS_RESOLUTION (loc))
5154 val_resolve (out, val, PAT_VAR_LOCATION_LOC (vloc), insn);
5155 set_variable_part (out, val, dv_from_decl (var), 0,
5156 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
5157 INSERT);
5160 break;
5162 case MO_VAL_USE:
5164 rtx loc = VTI (bb)->mos[i].u.loc;
5165 rtx val, vloc, uloc;
5167 vloc = uloc = XEXP (loc, 1);
5168 val = XEXP (loc, 0);
5170 if (GET_CODE (val) == CONCAT)
5172 uloc = XEXP (val, 1);
5173 val = XEXP (val, 0);
5176 if (VAL_NEEDS_RESOLUTION (loc))
5177 val_resolve (out, val, vloc, insn);
5179 if (VAL_HOLDS_TRACK_EXPR (loc))
5181 if (GET_CODE (uloc) == REG)
5182 var_reg_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
5183 NULL);
5184 else if (GET_CODE (uloc) == MEM)
5185 var_mem_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
5186 NULL);
5189 break;
5191 case MO_VAL_SET:
5193 rtx loc = VTI (bb)->mos[i].u.loc;
5194 rtx val, vloc, uloc;
5196 vloc = uloc = XEXP (loc, 1);
5197 val = XEXP (loc, 0);
5199 if (GET_CODE (val) == CONCAT)
5201 vloc = XEXP (val, 1);
5202 val = XEXP (val, 0);
5205 if (GET_CODE (vloc) == SET)
5207 rtx vsrc = SET_SRC (vloc);
5209 gcc_assert (val != vsrc);
5210 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
5212 vloc = SET_DEST (vloc);
5214 if (VAL_NEEDS_RESOLUTION (loc))
5215 val_resolve (out, val, vsrc, insn);
5217 else if (VAL_NEEDS_RESOLUTION (loc))
5219 gcc_assert (GET_CODE (uloc) == SET
5220 && GET_CODE (SET_SRC (uloc)) == REG);
5221 val_resolve (out, val, SET_SRC (uloc), insn);
5224 if (VAL_HOLDS_TRACK_EXPR (loc))
5226 if (VAL_EXPR_IS_CLOBBERED (loc))
5228 if (REG_P (uloc))
5229 var_reg_delete (out, uloc, true);
5230 else if (MEM_P (uloc))
5231 var_mem_delete (out, uloc, true);
5233 else
5235 bool copied_p = VAL_EXPR_IS_COPIED (loc);
5236 rtx set_src = NULL;
5237 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
5239 if (GET_CODE (uloc) == SET)
5241 set_src = SET_SRC (uloc);
5242 uloc = SET_DEST (uloc);
5245 if (copied_p)
5247 if (flag_var_tracking_uninit)
5249 status = find_src_status (in, set_src);
5251 if (status == VAR_INIT_STATUS_UNKNOWN)
5252 status = find_src_status (out, set_src);
5255 set_src = find_src_set_src (in, set_src);
5258 if (REG_P (uloc))
5259 var_reg_delete_and_set (out, uloc, !copied_p,
5260 status, set_src);
5261 else if (MEM_P (uloc))
5262 var_mem_delete_and_set (out, uloc, !copied_p,
5263 status, set_src);
5266 else if (REG_P (uloc))
5267 var_regno_delete (out, REGNO (uloc));
5269 val_store (out, val, vloc, insn);
5271 break;
5273 case MO_SET:
5275 rtx loc = VTI (bb)->mos[i].u.loc;
5276 rtx set_src = NULL;
5278 if (GET_CODE (loc) == SET)
5280 set_src = SET_SRC (loc);
5281 loc = SET_DEST (loc);
5284 if (REG_P (loc))
5285 var_reg_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
5286 set_src);
5287 else if (MEM_P (loc))
5288 var_mem_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
5289 set_src);
5291 break;
5293 case MO_COPY:
5295 rtx loc = VTI (bb)->mos[i].u.loc;
5296 enum var_init_status src_status;
5297 rtx set_src = NULL;
5299 if (GET_CODE (loc) == SET)
5301 set_src = SET_SRC (loc);
5302 loc = SET_DEST (loc);
5305 if (! flag_var_tracking_uninit)
5306 src_status = VAR_INIT_STATUS_INITIALIZED;
5307 else
5309 src_status = find_src_status (in, set_src);
5311 if (src_status == VAR_INIT_STATUS_UNKNOWN)
5312 src_status = find_src_status (out, set_src);
5315 set_src = find_src_set_src (in, set_src);
5317 if (REG_P (loc))
5318 var_reg_delete_and_set (out, loc, false, src_status, set_src);
5319 else if (MEM_P (loc))
5320 var_mem_delete_and_set (out, loc, false, src_status, set_src);
5322 break;
5324 case MO_USE_NO_VAR:
5326 rtx loc = VTI (bb)->mos[i].u.loc;
5328 if (REG_P (loc))
5329 var_reg_delete (out, loc, false);
5330 else if (MEM_P (loc))
5331 var_mem_delete (out, loc, false);
5333 break;
5335 case MO_CLOBBER:
5337 rtx loc = VTI (bb)->mos[i].u.loc;
5339 if (REG_P (loc))
5340 var_reg_delete (out, loc, true);
5341 else if (MEM_P (loc))
5342 var_mem_delete (out, loc, true);
5344 break;
5346 case MO_ADJUST:
5347 out->stack_adjust += VTI (bb)->mos[i].u.adjust;
5348 break;
5352 if (MAY_HAVE_DEBUG_INSNS)
5354 dataflow_set_equiv_regs (out);
5355 htab_traverse (shared_hash_htab (out->vars), canonicalize_values_mark,
5356 out);
5357 htab_traverse (shared_hash_htab (out->vars), canonicalize_values_star,
5358 out);
5359 #if ENABLE_CHECKING
5360 htab_traverse (shared_hash_htab (out->vars),
5361 canonicalize_loc_order_check, out);
5362 #endif
5364 changed = dataflow_set_different (&old_out, out);
5365 dataflow_set_destroy (&old_out);
5366 return changed;
5369 /* Find the locations of variables in the whole function. */
5371 static void
5372 vt_find_locations (void)
5374 fibheap_t worklist, pending, fibheap_swap;
5375 sbitmap visited, in_worklist, in_pending, sbitmap_swap;
5376 basic_block bb;
5377 edge e;
5378 int *bb_order;
5379 int *rc_order;
5380 int i;
5381 int htabsz = 0;
5383 /* Compute reverse completion order of depth first search of the CFG
5384 so that the data-flow runs faster. */
5385 rc_order = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
5386 bb_order = XNEWVEC (int, last_basic_block);
5387 pre_and_rev_post_order_compute (NULL, rc_order, false);
5388 for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
5389 bb_order[rc_order[i]] = i;
5390 free (rc_order);
5392 worklist = fibheap_new ();
5393 pending = fibheap_new ();
5394 visited = sbitmap_alloc (last_basic_block);
5395 in_worklist = sbitmap_alloc (last_basic_block);
5396 in_pending = sbitmap_alloc (last_basic_block);
5397 sbitmap_zero (in_worklist);
5399 FOR_EACH_BB (bb)
5400 fibheap_insert (pending, bb_order[bb->index], bb);
5401 sbitmap_ones (in_pending);
5403 while (!fibheap_empty (pending))
5405 fibheap_swap = pending;
5406 pending = worklist;
5407 worklist = fibheap_swap;
5408 sbitmap_swap = in_pending;
5409 in_pending = in_worklist;
5410 in_worklist = sbitmap_swap;
5412 sbitmap_zero (visited);
5414 while (!fibheap_empty (worklist))
5416 bb = (basic_block) fibheap_extract_min (worklist);
5417 RESET_BIT (in_worklist, bb->index);
5418 if (!TEST_BIT (visited, bb->index))
5420 bool changed;
5421 edge_iterator ei;
5422 int oldinsz, oldoutsz;
5424 SET_BIT (visited, bb->index);
5426 if (dump_file && VTI (bb)->in.vars)
5428 htabsz
5429 -= htab_size (shared_hash_htab (VTI (bb)->in.vars))
5430 + htab_size (shared_hash_htab (VTI (bb)->out.vars));
5431 oldinsz
5432 = htab_elements (shared_hash_htab (VTI (bb)->in.vars));
5433 oldoutsz
5434 = htab_elements (shared_hash_htab (VTI (bb)->out.vars));
5436 else
5437 oldinsz = oldoutsz = 0;
5439 if (MAY_HAVE_DEBUG_INSNS)
5441 dataflow_set *in = &VTI (bb)->in, *first_out = NULL;
5442 bool first = true, adjust = false;
5444 /* Calculate the IN set as the intersection of
5445 predecessor OUT sets. */
5447 dataflow_set_clear (in);
5448 dst_can_be_shared = true;
5450 FOR_EACH_EDGE (e, ei, bb->preds)
5451 if (!VTI (e->src)->flooded)
5452 gcc_assert (bb_order[bb->index]
5453 <= bb_order[e->src->index]);
5454 else if (first)
5456 dataflow_set_copy (in, &VTI (e->src)->out);
5457 first_out = &VTI (e->src)->out;
5458 first = false;
5460 else
5462 dataflow_set_merge (in, &VTI (e->src)->out);
5463 adjust = true;
5466 if (adjust)
5468 dataflow_post_merge_adjust (in, &VTI (bb)->permp);
5469 #if ENABLE_CHECKING
5470 /* Merge and merge_adjust should keep entries in
5471 canonical order. */
5472 htab_traverse (shared_hash_htab (in->vars),
5473 canonicalize_loc_order_check,
5474 in);
5475 #endif
5476 if (dst_can_be_shared)
5478 shared_hash_destroy (in->vars);
5479 in->vars = shared_hash_copy (first_out->vars);
5483 VTI (bb)->flooded = true;
5485 else
5487 /* Calculate the IN set as union of predecessor OUT sets. */
5488 dataflow_set_clear (&VTI (bb)->in);
5489 FOR_EACH_EDGE (e, ei, bb->preds)
5490 dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out);
5493 changed = compute_bb_dataflow (bb);
5494 if (dump_file)
5495 htabsz += htab_size (shared_hash_htab (VTI (bb)->in.vars))
5496 + htab_size (shared_hash_htab (VTI (bb)->out.vars));
5498 if (changed)
5500 FOR_EACH_EDGE (e, ei, bb->succs)
5502 if (e->dest == EXIT_BLOCK_PTR)
5503 continue;
5505 if (TEST_BIT (visited, e->dest->index))
5507 if (!TEST_BIT (in_pending, e->dest->index))
5509 /* Send E->DEST to next round. */
5510 SET_BIT (in_pending, e->dest->index);
5511 fibheap_insert (pending,
5512 bb_order[e->dest->index],
5513 e->dest);
5516 else if (!TEST_BIT (in_worklist, e->dest->index))
5518 /* Add E->DEST to current round. */
5519 SET_BIT (in_worklist, e->dest->index);
5520 fibheap_insert (worklist, bb_order[e->dest->index],
5521 e->dest);
5526 if (dump_file)
5527 fprintf (dump_file,
5528 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
5529 bb->index,
5530 (int)htab_elements (shared_hash_htab (VTI (bb)->in.vars)),
5531 oldinsz,
5532 (int)htab_elements (shared_hash_htab (VTI (bb)->out.vars)),
5533 oldoutsz,
5534 (int)worklist->nodes, (int)pending->nodes, htabsz);
5536 if (dump_file && (dump_flags & TDF_DETAILS))
5538 fprintf (dump_file, "BB %i IN:\n", bb->index);
5539 dump_dataflow_set (&VTI (bb)->in);
5540 fprintf (dump_file, "BB %i OUT:\n", bb->index);
5541 dump_dataflow_set (&VTI (bb)->out);
5547 if (MAY_HAVE_DEBUG_INSNS)
5548 FOR_EACH_BB (bb)
5549 gcc_assert (VTI (bb)->flooded);
5551 free (bb_order);
5552 fibheap_delete (worklist);
5553 fibheap_delete (pending);
5554 sbitmap_free (visited);
5555 sbitmap_free (in_worklist);
5556 sbitmap_free (in_pending);
5559 /* Print the content of the LIST to dump file. */
5561 static void
5562 dump_attrs_list (attrs list)
5564 for (; list; list = list->next)
5566 if (dv_is_decl_p (list->dv))
5567 print_mem_expr (dump_file, dv_as_decl (list->dv));
5568 else
5569 print_rtl_single (dump_file, dv_as_value (list->dv));
5570 fprintf (dump_file, "+" HOST_WIDE_INT_PRINT_DEC, list->offset);
5572 fprintf (dump_file, "\n");
5575 /* Print the information about variable *SLOT to dump file. */
5577 static int
5578 dump_variable_slot (void **slot, void *data ATTRIBUTE_UNUSED)
5580 variable var = (variable) *slot;
5582 dump_variable (var);
5584 /* Continue traversing the hash table. */
5585 return 1;
5588 /* Print the information about variable VAR to dump file. */
5590 static void
5591 dump_variable (variable var)
5593 int i;
5594 location_chain node;
5596 if (dv_is_decl_p (var->dv))
5598 const_tree decl = dv_as_decl (var->dv);
5600 if (DECL_NAME (decl))
5601 fprintf (dump_file, " name: %s",
5602 IDENTIFIER_POINTER (DECL_NAME (decl)));
5603 else
5604 fprintf (dump_file, " name: D.%u", DECL_UID (decl));
5605 if (dump_flags & TDF_UID)
5606 fprintf (dump_file, " D.%u\n", DECL_UID (decl));
5607 else
5608 fprintf (dump_file, "\n");
5610 else
5612 fputc (' ', dump_file);
5613 print_rtl_single (dump_file, dv_as_value (var->dv));
5616 for (i = 0; i < var->n_var_parts; i++)
5618 fprintf (dump_file, " offset %ld\n",
5619 (long) var->var_part[i].offset);
5620 for (node = var->var_part[i].loc_chain; node; node = node->next)
5622 fprintf (dump_file, " ");
5623 if (node->init == VAR_INIT_STATUS_UNINITIALIZED)
5624 fprintf (dump_file, "[uninit]");
5625 print_rtl_single (dump_file, node->loc);
5630 /* Print the information about variables from hash table VARS to dump file. */
5632 static void
5633 dump_vars (htab_t vars)
5635 if (htab_elements (vars) > 0)
5637 fprintf (dump_file, "Variables:\n");
5638 htab_traverse (vars, dump_variable_slot, NULL);
5642 /* Print the dataflow set SET to dump file. */
5644 static void
5645 dump_dataflow_set (dataflow_set *set)
5647 int i;
5649 fprintf (dump_file, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC "\n",
5650 set->stack_adjust);
5651 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5653 if (set->regs[i])
5655 fprintf (dump_file, "Reg %d:", i);
5656 dump_attrs_list (set->regs[i]);
5659 dump_vars (shared_hash_htab (set->vars));
5660 fprintf (dump_file, "\n");
5663 /* Print the IN and OUT sets for each basic block to dump file. */
5665 static void
5666 dump_dataflow_sets (void)
5668 basic_block bb;
5670 FOR_EACH_BB (bb)
5672 fprintf (dump_file, "\nBasic block %d:\n", bb->index);
5673 fprintf (dump_file, "IN:\n");
5674 dump_dataflow_set (&VTI (bb)->in);
5675 fprintf (dump_file, "OUT:\n");
5676 dump_dataflow_set (&VTI (bb)->out);
5680 /* Add variable VAR to the hash table of changed variables and
5681 if it has no locations delete it from SET's hash table. */
5683 static void
5684 variable_was_changed (variable var, dataflow_set *set)
5686 hashval_t hash = dv_htab_hash (var->dv);
5688 if (emit_notes)
5690 void **slot;
5692 /* Remember this decl or VALUE has been added to changed_variables. */
5693 set_dv_changed (var->dv, true);
5695 slot = htab_find_slot_with_hash (changed_variables,
5696 var->dv,
5697 hash, INSERT);
5699 if (set && var->n_var_parts == 0)
5701 variable empty_var;
5703 empty_var = (variable) pool_alloc (dv_pool (var->dv));
5704 empty_var->dv = var->dv;
5705 empty_var->refcount = 1;
5706 empty_var->n_var_parts = 0;
5707 *slot = empty_var;
5708 goto drop_var;
5710 else
5712 var->refcount++;
5713 *slot = var;
5716 else
5718 gcc_assert (set);
5719 if (var->n_var_parts == 0)
5721 void **slot;
5723 drop_var:
5724 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
5725 if (slot)
5727 if (shared_hash_shared (set->vars))
5728 slot = shared_hash_find_slot_unshare (&set->vars, var->dv,
5729 NO_INSERT);
5730 htab_clear_slot (shared_hash_htab (set->vars), slot);
5736 /* Look for the index in VAR->var_part corresponding to OFFSET.
5737 Return -1 if not found. If INSERTION_POINT is non-NULL, the
5738 referenced int will be set to the index that the part has or should
5739 have, if it should be inserted. */
5741 static inline int
5742 find_variable_location_part (variable var, HOST_WIDE_INT offset,
5743 int *insertion_point)
5745 int pos, low, high;
5747 /* Find the location part. */
5748 low = 0;
5749 high = var->n_var_parts;
5750 while (low != high)
5752 pos = (low + high) / 2;
5753 if (var->var_part[pos].offset < offset)
5754 low = pos + 1;
5755 else
5756 high = pos;
5758 pos = low;
5760 if (insertion_point)
5761 *insertion_point = pos;
5763 if (pos < var->n_var_parts && var->var_part[pos].offset == offset)
5764 return pos;
5766 return -1;
5769 static void **
5770 set_slot_part (dataflow_set *set, rtx loc, void **slot,
5771 decl_or_value dv, HOST_WIDE_INT offset,
5772 enum var_init_status initialized, rtx set_src)
5774 int pos;
5775 location_chain node, next;
5776 location_chain *nextp;
5777 variable var;
5778 bool onepart = dv_onepart_p (dv);
5780 gcc_assert (offset == 0 || !onepart);
5781 gcc_assert (loc != dv_as_opaque (dv));
5783 var = (variable) *slot;
5785 if (! flag_var_tracking_uninit)
5786 initialized = VAR_INIT_STATUS_INITIALIZED;
5788 if (!var)
5790 /* Create new variable information. */
5791 var = (variable) pool_alloc (dv_pool (dv));
5792 var->dv = dv;
5793 var->refcount = 1;
5794 var->n_var_parts = 1;
5795 var->var_part[0].offset = offset;
5796 var->var_part[0].loc_chain = NULL;
5797 var->var_part[0].cur_loc = NULL;
5798 *slot = var;
5799 pos = 0;
5800 nextp = &var->var_part[0].loc_chain;
5801 if (emit_notes && dv_is_value_p (dv))
5802 add_cselib_value_chains (dv);
5804 else if (onepart)
5806 int r = -1, c = 0;
5808 gcc_assert (dv_as_opaque (var->dv) == dv_as_opaque (dv));
5810 pos = 0;
5812 if (GET_CODE (loc) == VALUE)
5814 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
5815 nextp = &node->next)
5816 if (GET_CODE (node->loc) == VALUE)
5818 if (node->loc == loc)
5820 r = 0;
5821 break;
5823 if (canon_value_cmp (node->loc, loc))
5824 c++;
5825 else
5827 r = 1;
5828 break;
5831 else if (REG_P (node->loc) || MEM_P (node->loc))
5832 c++;
5833 else
5835 r = 1;
5836 break;
5839 else if (REG_P (loc))
5841 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
5842 nextp = &node->next)
5843 if (REG_P (node->loc))
5845 if (REGNO (node->loc) < REGNO (loc))
5846 c++;
5847 else
5849 if (REGNO (node->loc) == REGNO (loc))
5850 r = 0;
5851 else
5852 r = 1;
5853 break;
5856 else
5858 r = 1;
5859 break;
5862 else if (MEM_P (loc))
5864 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
5865 nextp = &node->next)
5866 if (REG_P (node->loc))
5867 c++;
5868 else if (MEM_P (node->loc))
5870 if ((r = loc_cmp (XEXP (node->loc, 0), XEXP (loc, 0))) >= 0)
5871 break;
5872 else
5873 c++;
5875 else
5877 r = 1;
5878 break;
5881 else
5882 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
5883 nextp = &node->next)
5884 if ((r = loc_cmp (node->loc, loc)) >= 0)
5885 break;
5886 else
5887 c++;
5889 if (r == 0)
5890 return slot;
5892 if (var->refcount > 1 || shared_hash_shared (set->vars))
5894 slot = unshare_variable (set, slot, var, initialized);
5895 var = (variable)*slot;
5896 for (nextp = &var->var_part[0].loc_chain; c;
5897 nextp = &(*nextp)->next)
5898 c--;
5899 gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc);
5902 else
5904 int inspos = 0;
5906 gcc_assert (dv_as_decl (var->dv) == dv_as_decl (dv));
5908 pos = find_variable_location_part (var, offset, &inspos);
5910 if (pos >= 0)
5912 node = var->var_part[pos].loc_chain;
5914 if (node
5915 && ((REG_P (node->loc) && REG_P (loc)
5916 && REGNO (node->loc) == REGNO (loc))
5917 || rtx_equal_p (node->loc, loc)))
5919 /* LOC is in the beginning of the chain so we have nothing
5920 to do. */
5921 if (node->init < initialized)
5922 node->init = initialized;
5923 if (set_src != NULL)
5924 node->set_src = set_src;
5926 return slot;
5928 else
5930 /* We have to make a copy of a shared variable. */
5931 if (var->refcount > 1 || shared_hash_shared (set->vars))
5933 slot = unshare_variable (set, slot, var, initialized);
5934 var = (variable)*slot;
5938 else
5940 /* We have not found the location part, new one will be created. */
5942 /* We have to make a copy of the shared variable. */
5943 if (var->refcount > 1 || shared_hash_shared (set->vars))
5945 slot = unshare_variable (set, slot, var, initialized);
5946 var = (variable)*slot;
5949 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
5950 thus there are at most MAX_VAR_PARTS different offsets. */
5951 gcc_assert (var->n_var_parts < MAX_VAR_PARTS
5952 && (!var->n_var_parts || !dv_onepart_p (var->dv)));
5954 /* We have to move the elements of array starting at index
5955 inspos to the next position. */
5956 for (pos = var->n_var_parts; pos > inspos; pos--)
5957 var->var_part[pos] = var->var_part[pos - 1];
5959 var->n_var_parts++;
5960 var->var_part[pos].offset = offset;
5961 var->var_part[pos].loc_chain = NULL;
5962 var->var_part[pos].cur_loc = NULL;
5965 /* Delete the location from the list. */
5966 nextp = &var->var_part[pos].loc_chain;
5967 for (node = var->var_part[pos].loc_chain; node; node = next)
5969 next = node->next;
5970 if ((REG_P (node->loc) && REG_P (loc)
5971 && REGNO (node->loc) == REGNO (loc))
5972 || rtx_equal_p (node->loc, loc))
5974 /* Save these values, to assign to the new node, before
5975 deleting this one. */
5976 if (node->init > initialized)
5977 initialized = node->init;
5978 if (node->set_src != NULL && set_src == NULL)
5979 set_src = node->set_src;
5980 pool_free (loc_chain_pool, node);
5981 *nextp = next;
5982 break;
5984 else
5985 nextp = &node->next;
5988 nextp = &var->var_part[pos].loc_chain;
5991 /* Add the location to the beginning. */
5992 node = (location_chain) pool_alloc (loc_chain_pool);
5993 node->loc = loc;
5994 node->init = initialized;
5995 node->set_src = set_src;
5996 node->next = *nextp;
5997 *nextp = node;
5999 if (onepart && emit_notes)
6000 add_value_chains (var->dv, loc);
6002 /* If no location was emitted do so. */
6003 if (var->var_part[pos].cur_loc == NULL)
6005 var->var_part[pos].cur_loc = loc;
6006 variable_was_changed (var, set);
6009 return slot;
6012 /* Set the part of variable's location in the dataflow set SET. The
6013 variable part is specified by variable's declaration in DV and
6014 offset OFFSET and the part's location by LOC. IOPT should be
6015 NO_INSERT if the variable is known to be in SET already and the
6016 variable hash table must not be resized, and INSERT otherwise. */
6018 static void
6019 set_variable_part (dataflow_set *set, rtx loc,
6020 decl_or_value dv, HOST_WIDE_INT offset,
6021 enum var_init_status initialized, rtx set_src,
6022 enum insert_option iopt)
6024 void **slot;
6026 if (iopt == NO_INSERT)
6027 slot = shared_hash_find_slot_noinsert (set->vars, dv);
6028 else
6030 slot = shared_hash_find_slot (set->vars, dv);
6031 if (!slot)
6032 slot = shared_hash_find_slot_unshare (&set->vars, dv, iopt);
6034 slot = set_slot_part (set, loc, slot, dv, offset, initialized, set_src);
6037 /* Remove all recorded register locations for the given variable part
6038 from dataflow set SET, except for those that are identical to loc.
6039 The variable part is specified by variable's declaration or value
6040 DV and offset OFFSET. */
6042 static void **
6043 clobber_slot_part (dataflow_set *set, rtx loc, void **slot,
6044 HOST_WIDE_INT offset, rtx set_src)
6046 variable var = (variable) *slot;
6047 int pos = find_variable_location_part (var, offset, NULL);
6049 if (pos >= 0)
6051 location_chain node, next;
6053 /* Remove the register locations from the dataflow set. */
6054 next = var->var_part[pos].loc_chain;
6055 for (node = next; node; node = next)
6057 next = node->next;
6058 if (node->loc != loc
6059 && (!flag_var_tracking_uninit
6060 || !set_src
6061 || MEM_P (set_src)
6062 || !rtx_equal_p (set_src, node->set_src)))
6064 if (REG_P (node->loc))
6066 attrs anode, anext;
6067 attrs *anextp;
6069 /* Remove the variable part from the register's
6070 list, but preserve any other variable parts
6071 that might be regarded as live in that same
6072 register. */
6073 anextp = &set->regs[REGNO (node->loc)];
6074 for (anode = *anextp; anode; anode = anext)
6076 anext = anode->next;
6077 if (dv_as_opaque (anode->dv) == dv_as_opaque (var->dv)
6078 && anode->offset == offset)
6080 pool_free (attrs_pool, anode);
6081 *anextp = anext;
6083 else
6084 anextp = &anode->next;
6088 slot = delete_slot_part (set, node->loc, slot, offset);
6093 return slot;
6096 /* Remove all recorded register locations for the given variable part
6097 from dataflow set SET, except for those that are identical to loc.
6098 The variable part is specified by variable's declaration or value
6099 DV and offset OFFSET. */
6101 static void
6102 clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
6103 HOST_WIDE_INT offset, rtx set_src)
6105 void **slot;
6107 if (!dv_as_opaque (dv)
6108 || (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv))))
6109 return;
6111 slot = shared_hash_find_slot_noinsert (set->vars, dv);
6112 if (!slot)
6113 return;
6115 slot = clobber_slot_part (set, loc, slot, offset, set_src);
6118 /* Delete the part of variable's location from dataflow set SET. The
6119 variable part is specified by its SET->vars slot SLOT and offset
6120 OFFSET and the part's location by LOC. */
6122 static void **
6123 delete_slot_part (dataflow_set *set, rtx loc, void **slot,
6124 HOST_WIDE_INT offset)
6126 variable var = (variable) *slot;
6127 int pos = find_variable_location_part (var, offset, NULL);
6129 if (pos >= 0)
6131 location_chain node, next;
6132 location_chain *nextp;
6133 bool changed;
6135 if (var->refcount > 1 || shared_hash_shared (set->vars))
6137 /* If the variable contains the location part we have to
6138 make a copy of the variable. */
6139 for (node = var->var_part[pos].loc_chain; node;
6140 node = node->next)
6142 if ((REG_P (node->loc) && REG_P (loc)
6143 && REGNO (node->loc) == REGNO (loc))
6144 || rtx_equal_p (node->loc, loc))
6146 slot = unshare_variable (set, slot, var,
6147 VAR_INIT_STATUS_UNKNOWN);
6148 var = (variable)*slot;
6149 break;
6154 /* Delete the location part. */
6155 nextp = &var->var_part[pos].loc_chain;
6156 for (node = *nextp; node; node = next)
6158 next = node->next;
6159 if ((REG_P (node->loc) && REG_P (loc)
6160 && REGNO (node->loc) == REGNO (loc))
6161 || rtx_equal_p (node->loc, loc))
6163 if (emit_notes && pos == 0 && dv_onepart_p (var->dv))
6164 remove_value_chains (var->dv, node->loc);
6165 pool_free (loc_chain_pool, node);
6166 *nextp = next;
6167 break;
6169 else
6170 nextp = &node->next;
6173 /* If we have deleted the location which was last emitted
6174 we have to emit new location so add the variable to set
6175 of changed variables. */
6176 if (var->var_part[pos].cur_loc
6177 && ((REG_P (loc)
6178 && REG_P (var->var_part[pos].cur_loc)
6179 && REGNO (loc) == REGNO (var->var_part[pos].cur_loc))
6180 || rtx_equal_p (loc, var->var_part[pos].cur_loc)))
6182 changed = true;
6183 if (var->var_part[pos].loc_chain)
6184 var->var_part[pos].cur_loc = var->var_part[pos].loc_chain->loc;
6186 else
6187 changed = false;
6189 if (var->var_part[pos].loc_chain == NULL)
6191 gcc_assert (changed);
6192 var->n_var_parts--;
6193 if (emit_notes && var->n_var_parts == 0 && dv_is_value_p (var->dv))
6194 remove_cselib_value_chains (var->dv);
6195 while (pos < var->n_var_parts)
6197 var->var_part[pos] = var->var_part[pos + 1];
6198 pos++;
6201 if (changed)
6202 variable_was_changed (var, set);
6205 return slot;
6208 /* Delete the part of variable's location from dataflow set SET. The
6209 variable part is specified by variable's declaration or value DV
6210 and offset OFFSET and the part's location by LOC. */
6212 static void
6213 delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
6214 HOST_WIDE_INT offset)
6216 void **slot = shared_hash_find_slot_noinsert (set->vars, dv);
6217 if (!slot)
6218 return;
6220 slot = delete_slot_part (set, loc, slot, offset);
6223 /* Wrap result in CONST:MODE if needed to preserve the mode. */
6225 static rtx
6226 check_wrap_constant (enum machine_mode mode, rtx result)
6228 if (!result || GET_MODE (result) == mode)
6229 return result;
6231 if (dump_file && (dump_flags & TDF_DETAILS))
6232 fprintf (dump_file, " wrapping result in const to preserve mode %s\n",
6233 GET_MODE_NAME (mode));
6235 result = wrap_constant (mode, result);
6236 gcc_assert (GET_MODE (result) == mode);
6238 return result;
6241 /* Callback for cselib_expand_value, that looks for expressions
6242 holding the value in the var-tracking hash tables. */
6244 static rtx
6245 vt_expand_loc_callback (rtx x, bitmap regs, int max_depth, void *data)
6247 htab_t vars = (htab_t)data;
6248 decl_or_value dv;
6249 variable var;
6250 location_chain loc;
6251 rtx result;
6253 gcc_assert (GET_CODE (x) == VALUE);
6255 if (VALUE_RECURSED_INTO (x))
6256 return NULL;
6258 dv = dv_from_value (x);
6259 var = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
6261 if (!var)
6262 return NULL;
6264 if (var->n_var_parts == 0)
6265 return NULL;
6267 gcc_assert (var->n_var_parts == 1);
6269 VALUE_RECURSED_INTO (x) = true;
6270 result = NULL;
6272 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
6274 result = cselib_expand_value_rtx_cb (loc->loc, regs, max_depth,
6275 vt_expand_loc_callback, vars);
6276 result = check_wrap_constant (GET_MODE (loc->loc), result);
6277 if (result)
6278 break;
6281 VALUE_RECURSED_INTO (x) = false;
6282 return result;
6285 /* Expand VALUEs in LOC, using VARS as well as cselib's equivalence
6286 tables. */
6288 static rtx
6289 vt_expand_loc (rtx loc, htab_t vars)
6291 rtx newloc;
6293 if (!MAY_HAVE_DEBUG_INSNS)
6294 return loc;
6296 newloc = cselib_expand_value_rtx_cb (loc, scratch_regs, 5,
6297 vt_expand_loc_callback, vars);
6298 loc = check_wrap_constant (GET_MODE (loc), newloc);
6300 if (loc && MEM_P (loc))
6301 loc = targetm.delegitimize_address (loc);
6303 return loc;
6306 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
6307 additional parameters: WHERE specifies whether the note shall be emitted
6308 before or after instruction INSN. */
6310 static int
6311 emit_note_insn_var_location (void **varp, void *data)
6313 variable var = (variable) *varp;
6314 rtx insn = ((emit_note_data *)data)->insn;
6315 enum emit_note_where where = ((emit_note_data *)data)->where;
6316 htab_t vars = ((emit_note_data *)data)->vars;
6317 rtx note;
6318 int i, j, n_var_parts;
6319 bool complete;
6320 enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED;
6321 HOST_WIDE_INT last_limit;
6322 tree type_size_unit;
6323 HOST_WIDE_INT offsets[MAX_VAR_PARTS];
6324 rtx loc[MAX_VAR_PARTS];
6325 tree decl;
6327 if (dv_is_value_p (var->dv))
6328 goto clear;
6330 decl = dv_as_decl (var->dv);
6332 gcc_assert (decl);
6334 complete = true;
6335 last_limit = 0;
6336 n_var_parts = 0;
6337 for (i = 0; i < var->n_var_parts; i++)
6339 enum machine_mode mode, wider_mode;
6340 rtx loc2;
6342 if (last_limit < var->var_part[i].offset)
6344 complete = false;
6345 break;
6347 else if (last_limit > var->var_part[i].offset)
6348 continue;
6349 offsets[n_var_parts] = var->var_part[i].offset;
6350 loc2 = vt_expand_loc (var->var_part[i].loc_chain->loc, vars);
6351 if (!loc2)
6353 complete = false;
6354 continue;
6356 loc[n_var_parts] = loc2;
6357 mode = GET_MODE (loc[n_var_parts]);
6358 initialized = var->var_part[i].loc_chain->init;
6359 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
6361 /* Attempt to merge adjacent registers or memory. */
6362 wider_mode = GET_MODE_WIDER_MODE (mode);
6363 for (j = i + 1; j < var->n_var_parts; j++)
6364 if (last_limit <= var->var_part[j].offset)
6365 break;
6366 if (j < var->n_var_parts
6367 && wider_mode != VOIDmode
6368 && (loc2 = vt_expand_loc (var->var_part[j].loc_chain->loc, vars))
6369 && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2)
6370 && mode == GET_MODE (loc2)
6371 && last_limit == var->var_part[j].offset)
6373 rtx new_loc = NULL;
6375 if (REG_P (loc[n_var_parts])
6376 && hard_regno_nregs[REGNO (loc[n_var_parts])][mode] * 2
6377 == hard_regno_nregs[REGNO (loc[n_var_parts])][wider_mode]
6378 && end_hard_regno (mode, REGNO (loc[n_var_parts]))
6379 == REGNO (loc2))
6381 if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN)
6382 new_loc = simplify_subreg (wider_mode, loc[n_var_parts],
6383 mode, 0);
6384 else if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
6385 new_loc = simplify_subreg (wider_mode, loc2, mode, 0);
6386 if (new_loc)
6388 if (!REG_P (new_loc)
6389 || REGNO (new_loc) != REGNO (loc[n_var_parts]))
6390 new_loc = NULL;
6391 else
6392 REG_ATTRS (new_loc) = REG_ATTRS (loc[n_var_parts]);
6395 else if (MEM_P (loc[n_var_parts])
6396 && GET_CODE (XEXP (loc2, 0)) == PLUS
6397 && REG_P (XEXP (XEXP (loc2, 0), 0))
6398 && CONST_INT_P (XEXP (XEXP (loc2, 0), 1)))
6400 if ((REG_P (XEXP (loc[n_var_parts], 0))
6401 && rtx_equal_p (XEXP (loc[n_var_parts], 0),
6402 XEXP (XEXP (loc2, 0), 0))
6403 && INTVAL (XEXP (XEXP (loc2, 0), 1))
6404 == GET_MODE_SIZE (mode))
6405 || (GET_CODE (XEXP (loc[n_var_parts], 0)) == PLUS
6406 && CONST_INT_P (XEXP (XEXP (loc[n_var_parts], 0), 1))
6407 && rtx_equal_p (XEXP (XEXP (loc[n_var_parts], 0), 0),
6408 XEXP (XEXP (loc2, 0), 0))
6409 && INTVAL (XEXP (XEXP (loc[n_var_parts], 0), 1))
6410 + GET_MODE_SIZE (mode)
6411 == INTVAL (XEXP (XEXP (loc2, 0), 1))))
6412 new_loc = adjust_address_nv (loc[n_var_parts],
6413 wider_mode, 0);
6416 if (new_loc)
6418 loc[n_var_parts] = new_loc;
6419 mode = wider_mode;
6420 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
6421 i = j;
6424 ++n_var_parts;
6426 type_size_unit = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6427 if ((unsigned HOST_WIDE_INT) last_limit < TREE_INT_CST_LOW (type_size_unit))
6428 complete = false;
6430 if (where != EMIT_NOTE_BEFORE_INSN)
6432 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
6433 if (where == EMIT_NOTE_AFTER_CALL_INSN)
6434 NOTE_DURING_CALL_P (note) = true;
6436 else
6437 note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
6439 if (! flag_var_tracking_uninit)
6440 initialized = VAR_INIT_STATUS_INITIALIZED;
6442 if (!complete)
6444 NOTE_VAR_LOCATION (note) = gen_rtx_VAR_LOCATION (VOIDmode, decl,
6445 NULL_RTX, (int) initialized);
6447 else if (n_var_parts == 1)
6449 rtx expr_list
6450 = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
6452 NOTE_VAR_LOCATION (note) = gen_rtx_VAR_LOCATION (VOIDmode, decl,
6453 expr_list,
6454 (int) initialized);
6456 else if (n_var_parts)
6458 rtx parallel;
6460 for (i = 0; i < n_var_parts; i++)
6461 loc[i]
6462 = gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i]));
6464 parallel = gen_rtx_PARALLEL (VOIDmode,
6465 gen_rtvec_v (n_var_parts, loc));
6466 NOTE_VAR_LOCATION (note) = gen_rtx_VAR_LOCATION (VOIDmode, decl,
6467 parallel,
6468 (int) initialized);
6471 clear:
6472 set_dv_changed (var->dv, false);
6473 htab_clear_slot (changed_variables, varp);
6475 /* Continue traversing the hash table. */
6476 return 1;
6479 DEF_VEC_P (variable);
6480 DEF_VEC_ALLOC_P (variable, heap);
6482 /* Stack of variable_def pointers that need processing with
6483 check_changed_vars_2. */
6485 static VEC (variable, heap) *changed_variables_stack;
6487 /* Populate changed_variables_stack with variable_def pointers
6488 that need variable_was_changed called on them. */
6490 static int
6491 check_changed_vars_1 (void **slot, void *data)
6493 variable var = (variable) *slot;
6494 htab_t htab = (htab_t) data;
6496 if (dv_is_value_p (var->dv))
6498 value_chain vc
6499 = (value_chain) htab_find_with_hash (value_chains, var->dv,
6500 dv_htab_hash (var->dv));
6502 if (vc == NULL)
6503 return 1;
6504 for (vc = vc->next; vc; vc = vc->next)
6505 if (!dv_changed_p (vc->dv))
6507 variable vcvar
6508 = (variable) htab_find_with_hash (htab, vc->dv,
6509 dv_htab_hash (vc->dv));
6510 if (vcvar)
6511 VEC_safe_push (variable, heap, changed_variables_stack,
6512 vcvar);
6515 return 1;
6518 /* Add VAR to changed_variables and also for VALUEs add recursively
6519 all DVs that aren't in changed_variables yet but reference the
6520 VALUE from its loc_chain. */
6522 static void
6523 check_changed_vars_2 (variable var, htab_t htab)
6525 variable_was_changed (var, NULL);
6526 if (dv_is_value_p (var->dv))
6528 value_chain vc
6529 = (value_chain) htab_find_with_hash (value_chains, var->dv,
6530 dv_htab_hash (var->dv));
6532 if (vc == NULL)
6533 return;
6534 for (vc = vc->next; vc; vc = vc->next)
6535 if (!dv_changed_p (vc->dv))
6537 variable vcvar
6538 = (variable) htab_find_with_hash (htab, vc->dv,
6539 dv_htab_hash (vc->dv));
6540 if (vcvar)
6541 check_changed_vars_2 (vcvar, htab);
6546 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
6547 CHANGED_VARIABLES and delete this chain. WHERE specifies whether the notes
6548 shall be emitted before of after instruction INSN. */
6550 static void
6551 emit_notes_for_changes (rtx insn, enum emit_note_where where,
6552 shared_hash vars)
6554 emit_note_data data;
6555 htab_t htab = shared_hash_htab (vars);
6557 if (!htab_elements (changed_variables))
6558 return;
6560 if (MAY_HAVE_DEBUG_INSNS)
6562 /* Unfortunately this has to be done in two steps, because
6563 we can't traverse a hashtab into which we are inserting
6564 through variable_was_changed. */
6565 htab_traverse (changed_variables, check_changed_vars_1, htab);
6566 while (VEC_length (variable, changed_variables_stack) > 0)
6567 check_changed_vars_2 (VEC_pop (variable, changed_variables_stack),
6568 htab);
6571 data.insn = insn;
6572 data.where = where;
6573 data.vars = htab;
6575 htab_traverse (changed_variables, emit_note_insn_var_location, &data);
6578 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
6579 same variable in hash table DATA or is not there at all. */
6581 static int
6582 emit_notes_for_differences_1 (void **slot, void *data)
6584 htab_t new_vars = (htab_t) data;
6585 variable old_var, new_var;
6587 old_var = (variable) *slot;
6588 new_var = (variable) htab_find_with_hash (new_vars, old_var->dv,
6589 dv_htab_hash (old_var->dv));
6591 if (!new_var)
6593 /* Variable has disappeared. */
6594 variable empty_var;
6596 empty_var = (variable) pool_alloc (dv_pool (old_var->dv));
6597 empty_var->dv = old_var->dv;
6598 empty_var->refcount = 0;
6599 empty_var->n_var_parts = 0;
6600 if (dv_onepart_p (old_var->dv))
6602 location_chain lc;
6604 gcc_assert (old_var->n_var_parts == 1);
6605 for (lc = old_var->var_part[0].loc_chain; lc; lc = lc->next)
6606 remove_value_chains (old_var->dv, lc->loc);
6607 if (dv_is_value_p (old_var->dv))
6608 remove_cselib_value_chains (old_var->dv);
6610 variable_was_changed (empty_var, NULL);
6612 else if (variable_different_p (old_var, new_var, true))
6614 if (dv_onepart_p (old_var->dv))
6616 location_chain lc1, lc2;
6618 gcc_assert (old_var->n_var_parts == 1);
6619 gcc_assert (new_var->n_var_parts == 1);
6620 lc1 = old_var->var_part[0].loc_chain;
6621 lc2 = new_var->var_part[0].loc_chain;
6622 while (lc1
6623 && lc2
6624 && ((REG_P (lc1->loc) && REG_P (lc2->loc))
6625 || rtx_equal_p (lc1->loc, lc2->loc)))
6627 lc1 = lc1->next;
6628 lc2 = lc2->next;
6630 for (; lc2; lc2 = lc2->next)
6631 add_value_chains (old_var->dv, lc2->loc);
6632 for (; lc1; lc1 = lc1->next)
6633 remove_value_chains (old_var->dv, lc1->loc);
6635 variable_was_changed (new_var, NULL);
6638 /* Continue traversing the hash table. */
6639 return 1;
6642 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
6643 table DATA. */
6645 static int
6646 emit_notes_for_differences_2 (void **slot, void *data)
6648 htab_t old_vars = (htab_t) data;
6649 variable old_var, new_var;
6651 new_var = (variable) *slot;
6652 old_var = (variable) htab_find_with_hash (old_vars, new_var->dv,
6653 dv_htab_hash (new_var->dv));
6654 if (!old_var)
6656 /* Variable has appeared. */
6657 if (dv_onepart_p (new_var->dv))
6659 location_chain lc;
6661 gcc_assert (new_var->n_var_parts == 1);
6662 for (lc = new_var->var_part[0].loc_chain; lc; lc = lc->next)
6663 add_value_chains (new_var->dv, lc->loc);
6664 if (dv_is_value_p (new_var->dv))
6665 add_cselib_value_chains (new_var->dv);
6667 variable_was_changed (new_var, NULL);
6670 /* Continue traversing the hash table. */
6671 return 1;
6674 /* Emit notes before INSN for differences between dataflow sets OLD_SET and
6675 NEW_SET. */
6677 static void
6678 emit_notes_for_differences (rtx insn, dataflow_set *old_set,
6679 dataflow_set *new_set)
6681 htab_traverse (shared_hash_htab (old_set->vars),
6682 emit_notes_for_differences_1,
6683 shared_hash_htab (new_set->vars));
6684 htab_traverse (shared_hash_htab (new_set->vars),
6685 emit_notes_for_differences_2,
6686 shared_hash_htab (old_set->vars));
6687 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars);
6690 /* Emit the notes for changes of location parts in the basic block BB. */
6692 static void
6693 emit_notes_in_bb (basic_block bb, dataflow_set *set)
6695 int i;
6697 dataflow_set_clear (set);
6698 dataflow_set_copy (set, &VTI (bb)->in);
6700 for (i = 0; i < VTI (bb)->n_mos; i++)
6702 rtx insn = VTI (bb)->mos[i].insn;
6704 switch (VTI (bb)->mos[i].type)
6706 case MO_CALL:
6707 dataflow_set_clear_at_call (set);
6708 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars);
6709 break;
6711 case MO_USE:
6713 rtx loc = VTI (bb)->mos[i].u.loc;
6715 if (REG_P (loc))
6716 var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6717 else
6718 var_mem_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6720 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
6722 break;
6724 case MO_VAL_LOC:
6726 rtx loc = VTI (bb)->mos[i].u.loc;
6727 rtx val, vloc;
6728 tree var;
6730 if (GET_CODE (loc) == CONCAT)
6732 val = XEXP (loc, 0);
6733 vloc = XEXP (loc, 1);
6735 else
6737 val = NULL_RTX;
6738 vloc = loc;
6741 var = PAT_VAR_LOCATION_DECL (vloc);
6743 clobber_variable_part (set, NULL_RTX,
6744 dv_from_decl (var), 0, NULL_RTX);
6745 if (val)
6747 if (VAL_NEEDS_RESOLUTION (loc))
6748 val_resolve (set, val, PAT_VAR_LOCATION_LOC (vloc), insn);
6749 set_variable_part (set, val, dv_from_decl (var), 0,
6750 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6751 INSERT);
6754 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
6756 break;
6758 case MO_VAL_USE:
6760 rtx loc = VTI (bb)->mos[i].u.loc;
6761 rtx val, vloc, uloc;
6763 vloc = uloc = XEXP (loc, 1);
6764 val = XEXP (loc, 0);
6766 if (GET_CODE (val) == CONCAT)
6768 uloc = XEXP (val, 1);
6769 val = XEXP (val, 0);
6772 if (VAL_NEEDS_RESOLUTION (loc))
6773 val_resolve (set, val, vloc, insn);
6775 if (VAL_HOLDS_TRACK_EXPR (loc))
6777 if (GET_CODE (uloc) == REG)
6778 var_reg_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6779 NULL);
6780 else if (GET_CODE (uloc) == MEM)
6781 var_mem_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6782 NULL);
6785 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
6787 break;
6789 case MO_VAL_SET:
6791 rtx loc = VTI (bb)->mos[i].u.loc;
6792 rtx val, vloc, uloc;
6794 vloc = uloc = XEXP (loc, 1);
6795 val = XEXP (loc, 0);
6797 if (GET_CODE (val) == CONCAT)
6799 vloc = XEXP (val, 1);
6800 val = XEXP (val, 0);
6803 if (GET_CODE (vloc) == SET)
6805 rtx vsrc = SET_SRC (vloc);
6807 gcc_assert (val != vsrc);
6808 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
6810 vloc = SET_DEST (vloc);
6812 if (VAL_NEEDS_RESOLUTION (loc))
6813 val_resolve (set, val, vsrc, insn);
6815 else if (VAL_NEEDS_RESOLUTION (loc))
6817 gcc_assert (GET_CODE (uloc) == SET
6818 && GET_CODE (SET_SRC (uloc)) == REG);
6819 val_resolve (set, val, SET_SRC (uloc), insn);
6822 if (VAL_HOLDS_TRACK_EXPR (loc))
6824 if (VAL_EXPR_IS_CLOBBERED (loc))
6826 if (REG_P (uloc))
6827 var_reg_delete (set, uloc, true);
6828 else if (MEM_P (uloc))
6829 var_mem_delete (set, uloc, true);
6831 else
6833 bool copied_p = VAL_EXPR_IS_COPIED (loc);
6834 rtx set_src = NULL;
6835 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
6837 if (GET_CODE (uloc) == SET)
6839 set_src = SET_SRC (uloc);
6840 uloc = SET_DEST (uloc);
6843 if (copied_p)
6845 status = find_src_status (set, set_src);
6847 set_src = find_src_set_src (set, set_src);
6850 if (REG_P (uloc))
6851 var_reg_delete_and_set (set, uloc, !copied_p,
6852 status, set_src);
6853 else if (MEM_P (uloc))
6854 var_mem_delete_and_set (set, uloc, !copied_p,
6855 status, set_src);
6858 else if (REG_P (uloc))
6859 var_regno_delete (set, REGNO (uloc));
6861 val_store (set, val, vloc, insn);
6863 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
6864 set->vars);
6866 break;
6868 case MO_SET:
6870 rtx loc = VTI (bb)->mos[i].u.loc;
6871 rtx set_src = NULL;
6873 if (GET_CODE (loc) == SET)
6875 set_src = SET_SRC (loc);
6876 loc = SET_DEST (loc);
6879 if (REG_P (loc))
6880 var_reg_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
6881 set_src);
6882 else
6883 var_mem_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
6884 set_src);
6886 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
6887 set->vars);
6889 break;
6891 case MO_COPY:
6893 rtx loc = VTI (bb)->mos[i].u.loc;
6894 enum var_init_status src_status;
6895 rtx set_src = NULL;
6897 if (GET_CODE (loc) == SET)
6899 set_src = SET_SRC (loc);
6900 loc = SET_DEST (loc);
6903 src_status = find_src_status (set, set_src);
6904 set_src = find_src_set_src (set, set_src);
6906 if (REG_P (loc))
6907 var_reg_delete_and_set (set, loc, false, src_status, set_src);
6908 else
6909 var_mem_delete_and_set (set, loc, false, src_status, set_src);
6911 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
6912 set->vars);
6914 break;
6916 case MO_USE_NO_VAR:
6918 rtx loc = VTI (bb)->mos[i].u.loc;
6920 if (REG_P (loc))
6921 var_reg_delete (set, loc, false);
6922 else
6923 var_mem_delete (set, loc, false);
6925 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
6927 break;
6929 case MO_CLOBBER:
6931 rtx loc = VTI (bb)->mos[i].u.loc;
6933 if (REG_P (loc))
6934 var_reg_delete (set, loc, true);
6935 else
6936 var_mem_delete (set, loc, true);
6938 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
6939 set->vars);
6941 break;
6943 case MO_ADJUST:
6944 set->stack_adjust += VTI (bb)->mos[i].u.adjust;
6945 break;
6950 /* Emit notes for the whole function. */
6952 static void
6953 vt_emit_notes (void)
6955 basic_block bb;
6956 dataflow_set cur;
6958 gcc_assert (!htab_elements (changed_variables));
6960 /* Free memory occupied by the out hash tables, as they aren't used
6961 anymore. */
6962 FOR_EACH_BB (bb)
6963 dataflow_set_clear (&VTI (bb)->out);
6965 /* Enable emitting notes by functions (mainly by set_variable_part and
6966 delete_variable_part). */
6967 emit_notes = true;
6969 if (MAY_HAVE_DEBUG_INSNS)
6970 changed_variables_stack = VEC_alloc (variable, heap, 40);
6972 dataflow_set_init (&cur);
6974 FOR_EACH_BB (bb)
6976 /* Emit the notes for changes of variable locations between two
6977 subsequent basic blocks. */
6978 emit_notes_for_differences (BB_HEAD (bb), &cur, &VTI (bb)->in);
6980 /* Emit the notes for the changes in the basic block itself. */
6981 emit_notes_in_bb (bb, &cur);
6983 /* Free memory occupied by the in hash table, we won't need it
6984 again. */
6985 dataflow_set_clear (&VTI (bb)->in);
6987 #ifdef ENABLE_CHECKING
6988 htab_traverse (shared_hash_htab (cur.vars),
6989 emit_notes_for_differences_1,
6990 shared_hash_htab (empty_shared_hash));
6991 if (MAY_HAVE_DEBUG_INSNS)
6992 gcc_assert (htab_elements (value_chains) == 0);
6993 #endif
6994 dataflow_set_destroy (&cur);
6996 if (MAY_HAVE_DEBUG_INSNS)
6997 VEC_free (variable, heap, changed_variables_stack);
6999 emit_notes = false;
7002 /* If there is a declaration and offset associated with register/memory RTL
7003 assign declaration to *DECLP and offset to *OFFSETP, and return true. */
7005 static bool
7006 vt_get_decl_and_offset (rtx rtl, tree *declp, HOST_WIDE_INT *offsetp)
7008 if (REG_P (rtl))
7010 if (REG_ATTRS (rtl))
7012 *declp = REG_EXPR (rtl);
7013 *offsetp = REG_OFFSET (rtl);
7014 return true;
7017 else if (MEM_P (rtl))
7019 if (MEM_ATTRS (rtl))
7021 *declp = MEM_EXPR (rtl);
7022 *offsetp = INT_MEM_OFFSET (rtl);
7023 return true;
7026 return false;
7029 /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
7031 static void
7032 vt_add_function_parameters (void)
7034 tree parm;
7036 for (parm = DECL_ARGUMENTS (current_function_decl);
7037 parm; parm = TREE_CHAIN (parm))
7039 rtx decl_rtl = DECL_RTL_IF_SET (parm);
7040 rtx incoming = DECL_INCOMING_RTL (parm);
7041 tree decl;
7042 enum machine_mode mode;
7043 HOST_WIDE_INT offset;
7044 dataflow_set *out;
7045 decl_or_value dv;
7047 if (TREE_CODE (parm) != PARM_DECL)
7048 continue;
7050 if (!DECL_NAME (parm))
7051 continue;
7053 if (!decl_rtl || !incoming)
7054 continue;
7056 if (GET_MODE (decl_rtl) == BLKmode || GET_MODE (incoming) == BLKmode)
7057 continue;
7059 if (!vt_get_decl_and_offset (incoming, &decl, &offset))
7061 if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset))
7062 continue;
7063 offset += byte_lowpart_offset (GET_MODE (incoming),
7064 GET_MODE (decl_rtl));
7067 if (!decl)
7068 continue;
7070 if (parm != decl)
7072 /* Assume that DECL_RTL was a pseudo that got spilled to
7073 memory. The spill slot sharing code will force the
7074 memory to reference spill_slot_decl (%sfp), so we don't
7075 match above. That's ok, the pseudo must have referenced
7076 the entire parameter, so just reset OFFSET. */
7077 gcc_assert (decl == get_spill_slot_decl (false));
7078 offset = 0;
7081 if (!track_loc_p (incoming, parm, offset, false, &mode, &offset))
7082 continue;
7084 out = &VTI (ENTRY_BLOCK_PTR)->out;
7086 dv = dv_from_decl (parm);
7088 if (target_for_debug_bind (parm)
7089 /* We can't deal with these right now, because this kind of
7090 variable is single-part. ??? We could handle parallels
7091 that describe multiple locations for the same single
7092 value, but ATM we don't. */
7093 && GET_CODE (incoming) != PARALLEL)
7095 cselib_val *val;
7097 /* ??? We shouldn't ever hit this, but it may happen because
7098 arguments passed by invisible reference aren't dealt with
7099 above: incoming-rtl will have Pmode rather than the
7100 expected mode for the type. */
7101 if (offset)
7102 continue;
7104 val = cselib_lookup (var_lowpart (mode, incoming), mode, true);
7106 /* ??? Float-typed values in memory are not handled by
7107 cselib. */
7108 if (val)
7110 cselib_preserve_value (val);
7111 set_variable_part (out, val->val_rtx, dv, offset,
7112 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
7113 dv = dv_from_value (val->val_rtx);
7117 if (REG_P (incoming))
7119 incoming = var_lowpart (mode, incoming);
7120 gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER);
7121 attrs_list_insert (&out->regs[REGNO (incoming)], dv, offset,
7122 incoming);
7123 set_variable_part (out, incoming, dv, offset,
7124 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
7126 else if (MEM_P (incoming))
7128 incoming = var_lowpart (mode, incoming);
7129 set_variable_part (out, incoming, dv, offset,
7130 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
7134 if (MAY_HAVE_DEBUG_INSNS)
7136 cselib_preserve_only_values (true);
7137 cselib_reset_table_with_next_value (cselib_get_next_unknown_value ());
7142 /* Allocate and initialize the data structures for variable tracking
7143 and parse the RTL to get the micro operations. */
7145 static void
7146 vt_initialize (void)
7148 basic_block bb;
7150 alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def));
7152 if (MAY_HAVE_DEBUG_INSNS)
7154 cselib_init (true);
7155 scratch_regs = BITMAP_ALLOC (NULL);
7156 valvar_pool = create_alloc_pool ("small variable_def pool",
7157 sizeof (struct variable_def), 256);
7159 else
7161 scratch_regs = NULL;
7162 valvar_pool = NULL;
7165 FOR_EACH_BB (bb)
7167 rtx insn;
7168 HOST_WIDE_INT pre, post = 0;
7169 int count;
7170 unsigned int next_value_before = cselib_get_next_unknown_value ();
7171 unsigned int next_value_after = next_value_before;
7173 if (MAY_HAVE_DEBUG_INSNS)
7175 cselib_record_sets_hook = count_with_sets;
7176 if (dump_file && (dump_flags & TDF_DETAILS))
7177 fprintf (dump_file, "first value: %i\n",
7178 cselib_get_next_unknown_value ());
7181 /* Count the number of micro operations. */
7182 VTI (bb)->n_mos = 0;
7183 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
7184 insn = NEXT_INSN (insn))
7186 if (INSN_P (insn))
7188 if (!frame_pointer_needed)
7190 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
7191 if (pre)
7193 VTI (bb)->n_mos++;
7194 if (dump_file && (dump_flags & TDF_DETAILS))
7195 log_op_type (GEN_INT (pre), bb, insn,
7196 MO_ADJUST, dump_file);
7198 if (post)
7200 VTI (bb)->n_mos++;
7201 if (dump_file && (dump_flags & TDF_DETAILS))
7202 log_op_type (GEN_INT (post), bb, insn,
7203 MO_ADJUST, dump_file);
7206 cselib_hook_called = false;
7207 if (MAY_HAVE_DEBUG_INSNS)
7209 cselib_process_insn (insn);
7210 if (dump_file && (dump_flags & TDF_DETAILS))
7212 print_rtl_single (dump_file, insn);
7213 dump_cselib_table (dump_file);
7216 if (!cselib_hook_called)
7217 count_with_sets (insn, 0, 0);
7218 if (CALL_P (insn))
7220 VTI (bb)->n_mos++;
7221 if (dump_file && (dump_flags & TDF_DETAILS))
7222 log_op_type (PATTERN (insn), bb, insn,
7223 MO_CALL, dump_file);
7228 count = VTI (bb)->n_mos;
7230 if (MAY_HAVE_DEBUG_INSNS)
7232 cselib_preserve_only_values (false);
7233 next_value_after = cselib_get_next_unknown_value ();
7234 cselib_reset_table_with_next_value (next_value_before);
7235 cselib_record_sets_hook = add_with_sets;
7236 if (dump_file && (dump_flags & TDF_DETAILS))
7237 fprintf (dump_file, "first value: %i\n",
7238 cselib_get_next_unknown_value ());
7241 /* Add the micro-operations to the array. */
7242 VTI (bb)->mos = XNEWVEC (micro_operation, VTI (bb)->n_mos);
7243 VTI (bb)->n_mos = 0;
7244 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
7245 insn = NEXT_INSN (insn))
7247 if (INSN_P (insn))
7249 if (!frame_pointer_needed)
7251 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
7252 if (pre)
7254 micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
7256 mo->type = MO_ADJUST;
7257 mo->u.adjust = pre;
7258 mo->insn = insn;
7260 if (dump_file && (dump_flags & TDF_DETAILS))
7261 log_op_type (PATTERN (insn), bb, insn,
7262 MO_ADJUST, dump_file);
7266 cselib_hook_called = false;
7267 if (MAY_HAVE_DEBUG_INSNS)
7269 cselib_process_insn (insn);
7270 if (dump_file && (dump_flags & TDF_DETAILS))
7272 print_rtl_single (dump_file, insn);
7273 dump_cselib_table (dump_file);
7276 if (!cselib_hook_called)
7277 add_with_sets (insn, 0, 0);
7279 if (!frame_pointer_needed && post)
7281 micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
7283 mo->type = MO_ADJUST;
7284 mo->u.adjust = post;
7285 mo->insn = insn;
7287 if (dump_file && (dump_flags & TDF_DETAILS))
7288 log_op_type (PATTERN (insn), bb, insn,
7289 MO_ADJUST, dump_file);
7293 gcc_assert (count == VTI (bb)->n_mos);
7294 if (MAY_HAVE_DEBUG_INSNS)
7296 cselib_preserve_only_values (true);
7297 gcc_assert (next_value_after == cselib_get_next_unknown_value ());
7298 cselib_reset_table_with_next_value (next_value_after);
7299 cselib_record_sets_hook = NULL;
7303 attrs_pool = create_alloc_pool ("attrs_def pool",
7304 sizeof (struct attrs_def), 1024);
7305 var_pool = create_alloc_pool ("variable_def pool",
7306 sizeof (struct variable_def)
7307 + (MAX_VAR_PARTS - 1)
7308 * sizeof (((variable)NULL)->var_part[0]), 64);
7309 loc_chain_pool = create_alloc_pool ("location_chain_def pool",
7310 sizeof (struct location_chain_def),
7311 1024);
7312 shared_hash_pool = create_alloc_pool ("shared_hash_def pool",
7313 sizeof (struct shared_hash_def), 256);
7314 empty_shared_hash = (shared_hash) pool_alloc (shared_hash_pool);
7315 empty_shared_hash->refcount = 1;
7316 empty_shared_hash->htab
7317 = htab_create (1, variable_htab_hash, variable_htab_eq,
7318 variable_htab_free);
7319 changed_variables = htab_create (10, variable_htab_hash, variable_htab_eq,
7320 variable_htab_free);
7321 if (MAY_HAVE_DEBUG_INSNS)
7323 value_chain_pool = create_alloc_pool ("value_chain_def pool",
7324 sizeof (struct value_chain_def),
7325 1024);
7326 value_chains = htab_create (32, value_chain_htab_hash,
7327 value_chain_htab_eq, NULL);
7330 /* Init the IN and OUT sets. */
7331 FOR_ALL_BB (bb)
7333 VTI (bb)->visited = false;
7334 VTI (bb)->flooded = false;
7335 dataflow_set_init (&VTI (bb)->in);
7336 dataflow_set_init (&VTI (bb)->out);
7337 VTI (bb)->permp = NULL;
7340 VTI (ENTRY_BLOCK_PTR)->flooded = true;
7341 vt_add_function_parameters ();
7344 /* Get rid of all debug insns from the insn stream. */
7346 static void
7347 delete_debug_insns (void)
7349 basic_block bb;
7350 rtx insn, next;
7352 if (!MAY_HAVE_DEBUG_INSNS)
7353 return;
7355 FOR_EACH_BB (bb)
7357 FOR_BB_INSNS_SAFE (bb, insn, next)
7358 if (DEBUG_INSN_P (insn))
7359 delete_insn (insn);
7363 /* Run a fast, BB-local only version of var tracking, to take care of
7364 information that we don't do global analysis on, such that not all
7365 information is lost. If SKIPPED holds, we're skipping the global
7366 pass entirely, so we should try to use information it would have
7367 handled as well.. */
7369 static void
7370 vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED)
7372 /* ??? Just skip it all for now. */
7373 delete_debug_insns ();
7376 /* Free the data structures needed for variable tracking. */
7378 static void
7379 vt_finalize (void)
7381 basic_block bb;
7383 FOR_EACH_BB (bb)
7385 free (VTI (bb)->mos);
7388 FOR_ALL_BB (bb)
7390 dataflow_set_destroy (&VTI (bb)->in);
7391 dataflow_set_destroy (&VTI (bb)->out);
7392 if (VTI (bb)->permp)
7394 dataflow_set_destroy (VTI (bb)->permp);
7395 XDELETE (VTI (bb)->permp);
7398 free_aux_for_blocks ();
7399 htab_delete (empty_shared_hash->htab);
7400 htab_delete (changed_variables);
7401 free_alloc_pool (attrs_pool);
7402 free_alloc_pool (var_pool);
7403 free_alloc_pool (loc_chain_pool);
7404 free_alloc_pool (shared_hash_pool);
7406 if (MAY_HAVE_DEBUG_INSNS)
7408 htab_delete (value_chains);
7409 free_alloc_pool (value_chain_pool);
7410 free_alloc_pool (valvar_pool);
7411 cselib_finish ();
7412 BITMAP_FREE (scratch_regs);
7413 scratch_regs = NULL;
7416 if (vui_vec)
7417 XDELETEVEC (vui_vec);
7418 vui_vec = NULL;
7419 vui_allocated = 0;
7422 /* The entry point to variable tracking pass. */
7424 unsigned int
7425 variable_tracking_main (void)
7427 if (flag_var_tracking_assignments < 0)
7429 delete_debug_insns ();
7430 return 0;
7433 if (n_basic_blocks > 500 && n_edges / n_basic_blocks >= 20)
7435 vt_debug_insns_local (true);
7436 return 0;
7439 mark_dfs_back_edges ();
7440 vt_initialize ();
7441 if (!frame_pointer_needed)
7443 if (!vt_stack_adjustments ())
7445 vt_finalize ();
7446 vt_debug_insns_local (true);
7447 return 0;
7451 vt_find_locations ();
7453 if (dump_file && (dump_flags & TDF_DETAILS))
7455 dump_dataflow_sets ();
7456 dump_flow_info (dump_file, dump_flags);
7459 vt_emit_notes ();
7461 vt_finalize ();
7462 vt_debug_insns_local (false);
7463 return 0;
7466 static bool
7467 gate_handle_var_tracking (void)
7469 return (flag_var_tracking);
7474 struct rtl_opt_pass pass_variable_tracking =
7477 RTL_PASS,
7478 "vartrack", /* name */
7479 gate_handle_var_tracking, /* gate */
7480 variable_tracking_main, /* execute */
7481 NULL, /* sub */
7482 NULL, /* next */
7483 0, /* static_pass_number */
7484 TV_VAR_TRACKING, /* tv_id */
7485 0, /* properties_required */
7486 0, /* properties_provided */
7487 0, /* properties_destroyed */
7488 0, /* todo_flags_start */
7489 TODO_dump_func | TODO_verify_rtl_sharing/* todo_flags_finish */