PR lto/42531
[official-gcc.git] / gcc / var-tracking.c
blobdb7778b8e74b44f570d5a0ab219cfdc4956f15e0
1 /* Variable tracking routines for the GNU compiler.
2 Copyright (C) 2002, 2003, 2004, 2005, 2007, 2008, 2009
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This file contains the variable tracking pass. It computes where
22 variables are located (which registers or where in memory) at each position
23 in instruction stream and emits notes describing the locations.
24 Debug information (DWARF2 location lists) is finally generated from
25 these notes.
26 With this debug information, it is possible to show variables
27 even when debugging optimized code.
29 How does the variable tracking pass work?
31 First, it scans RTL code for uses, stores and clobbers (register/memory
32 references in instructions), for call insns and for stack adjustments
33 separately for each basic block and saves them to an array of micro
34 operations.
35 The micro operations of one instruction are ordered so that
36 pre-modifying stack adjustment < use < use with no var < call insn <
37 < set < clobber < post-modifying stack adjustment
39 Then, a forward dataflow analysis is performed to find out how locations
40 of variables change through code and to propagate the variable locations
41 along control flow graph.
42 The IN set for basic block BB is computed as a union of OUT sets of BB's
43 predecessors, the OUT set for BB is copied from the IN set for BB and
44 is changed according to micro operations in BB.
46 The IN and OUT sets for basic blocks consist of a current stack adjustment
47 (used for adjusting offset of variables addressed using stack pointer),
48 the table of structures describing the locations of parts of a variable
49 and for each physical register a linked list for each physical register.
50 The linked list is a list of variable parts stored in the register,
51 i.e. it is a list of triplets (reg, decl, offset) where decl is
52 REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
53 effective deleting appropriate variable parts when we set or clobber the
54 register.
56 There may be more than one variable part in a register. The linked lists
57 should be pretty short so it is a good data structure here.
58 For example in the following code, register allocator may assign same
59 register to variables A and B, and both of them are stored in the same
60 register in CODE:
62 if (cond)
63 set A;
64 else
65 set B;
66 CODE;
67 if (cond)
68 use A;
69 else
70 use B;
72 Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
73 are emitted to appropriate positions in RTL code. Each such a note describes
74 the location of one variable at the point in instruction stream where the
75 note is. There is no need to emit a note for each variable before each
76 instruction, we only emit these notes where the location of variable changes
77 (this means that we also emit notes for changes between the OUT set of the
78 previous block and the IN set of the current block).
80 The notes consist of two parts:
81 1. the declaration (from REG_EXPR or MEM_EXPR)
82 2. the location of a variable - it is either a simple register/memory
83 reference (for simple variables, for example int),
84 or a parallel of register/memory references (for a large variables
85 which consist of several parts, for example long long).
89 #include "config.h"
90 #include "system.h"
91 #include "coretypes.h"
92 #include "tm.h"
93 #include "rtl.h"
94 #include "tree.h"
95 #include "hard-reg-set.h"
96 #include "basic-block.h"
97 #include "flags.h"
98 #include "output.h"
99 #include "insn-config.h"
100 #include "reload.h"
101 #include "sbitmap.h"
102 #include "alloc-pool.h"
103 #include "fibheap.h"
104 #include "hashtab.h"
105 #include "regs.h"
106 #include "expr.h"
107 #include "timevar.h"
108 #include "tree-pass.h"
109 #include "cselib.h"
110 #include "target.h"
112 /* Type of micro operation. */
113 enum micro_operation_type
115 MO_USE, /* Use location (REG or MEM). */
116 MO_USE_NO_VAR,/* Use location which is not associated with a variable
117 or the variable is not trackable. */
118 MO_VAL_USE, /* Use location which is associated with a value. */
119 MO_VAL_LOC, /* Use location which appears in a debug insn. */
120 MO_VAL_SET, /* Set location associated with a value. */
121 MO_SET, /* Set location. */
122 MO_COPY, /* Copy the same portion of a variable from one
123 location to another. */
124 MO_CLOBBER, /* Clobber location. */
125 MO_CALL, /* Call insn. */
126 MO_ADJUST /* Adjust stack pointer. */
130 static const char * const ATTRIBUTE_UNUSED
131 micro_operation_type_name[] = {
132 "MO_USE",
133 "MO_USE_NO_VAR",
134 "MO_VAL_USE",
135 "MO_VAL_LOC",
136 "MO_VAL_SET",
137 "MO_SET",
138 "MO_COPY",
139 "MO_CLOBBER",
140 "MO_CALL",
141 "MO_ADJUST"
144 /* Where shall the note be emitted? BEFORE or AFTER the instruction.
145 Notes emitted as AFTER_CALL are to take effect during the call,
146 rather than after the call. */
147 enum emit_note_where
149 EMIT_NOTE_BEFORE_INSN,
150 EMIT_NOTE_AFTER_INSN,
151 EMIT_NOTE_AFTER_CALL_INSN
154 /* Structure holding information about micro operation. */
155 typedef struct micro_operation_def
157 /* Type of micro operation. */
158 enum micro_operation_type type;
160 union {
161 /* Location. For MO_SET and MO_COPY, this is the SET that
162 performs the assignment, if known, otherwise it is the target
163 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
164 CONCAT of the VALUE and the LOC associated with it. For
165 MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
166 associated with it. */
167 rtx loc;
169 /* Stack adjustment. */
170 HOST_WIDE_INT adjust;
171 } u;
173 /* The instruction which the micro operation is in, for MO_USE,
174 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
175 instruction or note in the original flow (before any var-tracking
176 notes are inserted, to simplify emission of notes), for MO_SET
177 and MO_CLOBBER. */
178 rtx insn;
179 } micro_operation;
181 /* A declaration of a variable, or an RTL value being handled like a
182 declaration. */
183 typedef void *decl_or_value;
185 /* Structure for passing some other parameters to function
186 emit_note_insn_var_location. */
187 typedef struct emit_note_data_def
189 /* The instruction which the note will be emitted before/after. */
190 rtx insn;
192 /* Where the note will be emitted (before/after insn)? */
193 enum emit_note_where where;
195 /* The variables and values active at this point. */
196 htab_t vars;
197 } emit_note_data;
199 /* Description of location of a part of a variable. The content of a physical
200 register is described by a chain of these structures.
201 The chains are pretty short (usually 1 or 2 elements) and thus
202 chain is the best data structure. */
203 typedef struct attrs_def
205 /* Pointer to next member of the list. */
206 struct attrs_def *next;
208 /* The rtx of register. */
209 rtx loc;
211 /* The declaration corresponding to LOC. */
212 decl_or_value dv;
214 /* Offset from start of DECL. */
215 HOST_WIDE_INT offset;
216 } *attrs;
218 /* Structure holding a refcounted hash table. If refcount > 1,
219 it must be first unshared before modified. */
220 typedef struct shared_hash_def
222 /* Reference count. */
223 int refcount;
225 /* Actual hash table. */
226 htab_t htab;
227 } *shared_hash;
229 /* Structure holding the IN or OUT set for a basic block. */
230 typedef struct dataflow_set_def
232 /* Adjustment of stack offset. */
233 HOST_WIDE_INT stack_adjust;
235 /* Attributes for registers (lists of attrs). */
236 attrs regs[FIRST_PSEUDO_REGISTER];
238 /* Variable locations. */
239 shared_hash vars;
241 /* Vars that is being traversed. */
242 shared_hash traversed_vars;
243 } dataflow_set;
245 /* The structure (one for each basic block) containing the information
246 needed for variable tracking. */
247 typedef struct variable_tracking_info_def
249 /* Number of micro operations stored in the MOS array. */
250 int n_mos;
252 /* The array of micro operations. */
253 micro_operation *mos;
255 /* The IN and OUT set for dataflow analysis. */
256 dataflow_set in;
257 dataflow_set out;
259 /* The permanent-in dataflow set for this block. This is used to
260 hold values for which we had to compute entry values. ??? This
261 should probably be dynamically allocated, to avoid using more
262 memory in non-debug builds. */
263 dataflow_set *permp;
265 /* Has the block been visited in DFS? */
266 bool visited;
268 /* Has the block been flooded in VTA? */
269 bool flooded;
271 } *variable_tracking_info;
273 /* Structure for chaining the locations. */
274 typedef struct location_chain_def
276 /* Next element in the chain. */
277 struct location_chain_def *next;
279 /* The location (REG, MEM or VALUE). */
280 rtx loc;
282 /* The "value" stored in this location. */
283 rtx set_src;
285 /* Initialized? */
286 enum var_init_status init;
287 } *location_chain;
289 /* Structure describing one part of variable. */
290 typedef struct variable_part_def
292 /* Chain of locations of the part. */
293 location_chain loc_chain;
295 /* Location which was last emitted to location list. */
296 rtx cur_loc;
298 /* The offset in the variable. */
299 HOST_WIDE_INT offset;
300 } variable_part;
302 /* Maximum number of location parts. */
303 #define MAX_VAR_PARTS 16
305 /* Structure describing where the variable is located. */
306 typedef struct variable_def
308 /* The declaration of the variable, or an RTL value being handled
309 like a declaration. */
310 decl_or_value dv;
312 /* Reference count. */
313 int refcount;
315 /* Number of variable parts. */
316 int n_var_parts;
318 /* The variable parts. */
319 variable_part var_part[1];
320 } *variable;
321 typedef const struct variable_def *const_variable;
323 /* Structure for chaining backlinks from referenced VALUEs to
324 DVs that are referencing them. */
325 typedef struct value_chain_def
327 /* Next value_chain entry. */
328 struct value_chain_def *next;
330 /* The declaration of the variable, or an RTL value
331 being handled like a declaration, whose var_parts[0].loc_chain
332 references the VALUE owning this value_chain. */
333 decl_or_value dv;
335 /* Reference count. */
336 int refcount;
337 } *value_chain;
338 typedef const struct value_chain_def *const_value_chain;
340 /* Hash function for DECL for VARIABLE_HTAB. */
341 #define VARIABLE_HASH_VAL(decl) (DECL_UID (decl))
343 /* Pointer to the BB's information specific to variable tracking pass. */
344 #define VTI(BB) ((variable_tracking_info) (BB)->aux)
346 /* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */
347 #define INT_MEM_OFFSET(mem) (MEM_OFFSET (mem) ? INTVAL (MEM_OFFSET (mem)) : 0)
349 /* Alloc pool for struct attrs_def. */
350 static alloc_pool attrs_pool;
352 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
353 static alloc_pool var_pool;
355 /* Alloc pool for struct variable_def with a single var_part entry. */
356 static alloc_pool valvar_pool;
358 /* Alloc pool for struct location_chain_def. */
359 static alloc_pool loc_chain_pool;
361 /* Alloc pool for struct shared_hash_def. */
362 static alloc_pool shared_hash_pool;
364 /* Alloc pool for struct value_chain_def. */
365 static alloc_pool value_chain_pool;
367 /* Changed variables, notes will be emitted for them. */
368 static htab_t changed_variables;
370 /* Links from VALUEs to DVs referencing them in their current loc_chains. */
371 static htab_t value_chains;
373 /* Shall notes be emitted? */
374 static bool emit_notes;
376 /* Empty shared hashtable. */
377 static shared_hash empty_shared_hash;
379 /* Scratch register bitmap used by cselib_expand_value_rtx. */
380 static bitmap scratch_regs = NULL;
382 /* Variable used to tell whether cselib_process_insn called our hook. */
383 static bool cselib_hook_called;
385 /* Local function prototypes. */
386 static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
387 HOST_WIDE_INT *);
388 static void insn_stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
389 HOST_WIDE_INT *);
390 static void bb_stack_adjust_offset (basic_block);
391 static bool vt_stack_adjustments (void);
392 static rtx adjust_stack_reference (rtx, HOST_WIDE_INT);
393 static hashval_t variable_htab_hash (const void *);
394 static int variable_htab_eq (const void *, const void *);
395 static void variable_htab_free (void *);
397 static void init_attrs_list_set (attrs *);
398 static void attrs_list_clear (attrs *);
399 static attrs attrs_list_member (attrs, decl_or_value, HOST_WIDE_INT);
400 static void attrs_list_insert (attrs *, decl_or_value, HOST_WIDE_INT, rtx);
401 static void attrs_list_copy (attrs *, attrs);
402 static void attrs_list_union (attrs *, attrs);
404 static void **unshare_variable (dataflow_set *set, void **slot, variable var,
405 enum var_init_status);
406 static int vars_copy_1 (void **, void *);
407 static void vars_copy (htab_t, htab_t);
408 static tree var_debug_decl (tree);
409 static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
410 static void var_reg_delete_and_set (dataflow_set *, rtx, bool,
411 enum var_init_status, rtx);
412 static void var_reg_delete (dataflow_set *, rtx, bool);
413 static void var_regno_delete (dataflow_set *, int);
414 static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx);
415 static void var_mem_delete_and_set (dataflow_set *, rtx, bool,
416 enum var_init_status, rtx);
417 static void var_mem_delete (dataflow_set *, rtx, bool);
419 static void dataflow_set_init (dataflow_set *);
420 static void dataflow_set_clear (dataflow_set *);
421 static void dataflow_set_copy (dataflow_set *, dataflow_set *);
422 static int variable_union_info_cmp_pos (const void *, const void *);
423 static int variable_union (void **, void *);
424 static int variable_canonicalize (void **, void *);
425 static void dataflow_set_union (dataflow_set *, dataflow_set *);
426 static location_chain find_loc_in_1pdv (rtx, variable, htab_t);
427 static bool canon_value_cmp (rtx, rtx);
428 static int loc_cmp (rtx, rtx);
429 static bool variable_part_different_p (variable_part *, variable_part *);
430 static bool onepart_variable_different_p (variable, variable);
431 static bool variable_different_p (variable, variable, bool);
432 static int dataflow_set_different_1 (void **, void *);
433 static bool dataflow_set_different (dataflow_set *, dataflow_set *);
434 static void dataflow_set_destroy (dataflow_set *);
436 static bool contains_symbol_ref (rtx);
437 static bool track_expr_p (tree, bool);
438 static bool same_variable_part_p (rtx, tree, HOST_WIDE_INT);
439 static int count_uses (rtx *, void *);
440 static void count_uses_1 (rtx *, void *);
441 static void count_stores (rtx, const_rtx, void *);
442 static int add_uses (rtx *, void *);
443 static void add_uses_1 (rtx *, void *);
444 static void add_stores (rtx, const_rtx, void *);
445 static bool compute_bb_dataflow (basic_block);
446 static void vt_find_locations (void);
448 static void dump_attrs_list (attrs);
449 static int dump_variable_slot (void **, void *);
450 static void dump_variable (variable);
451 static void dump_vars (htab_t);
452 static void dump_dataflow_set (dataflow_set *);
453 static void dump_dataflow_sets (void);
455 static void variable_was_changed (variable, dataflow_set *);
456 static void **set_slot_part (dataflow_set *, rtx, void **,
457 decl_or_value, HOST_WIDE_INT,
458 enum var_init_status, rtx);
459 static void set_variable_part (dataflow_set *, rtx,
460 decl_or_value, HOST_WIDE_INT,
461 enum var_init_status, rtx, enum insert_option);
462 static void **clobber_slot_part (dataflow_set *, rtx,
463 void **, HOST_WIDE_INT, rtx);
464 static void clobber_variable_part (dataflow_set *, rtx,
465 decl_or_value, HOST_WIDE_INT, rtx);
466 static void **delete_slot_part (dataflow_set *, rtx, void **, HOST_WIDE_INT);
467 static void delete_variable_part (dataflow_set *, rtx,
468 decl_or_value, HOST_WIDE_INT);
469 static int emit_note_insn_var_location (void **, void *);
470 static void emit_notes_for_changes (rtx, enum emit_note_where, shared_hash);
471 static int emit_notes_for_differences_1 (void **, void *);
472 static int emit_notes_for_differences_2 (void **, void *);
473 static void emit_notes_for_differences (rtx, dataflow_set *, dataflow_set *);
474 static void emit_notes_in_bb (basic_block, dataflow_set *);
475 static void vt_emit_notes (void);
477 static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *);
478 static void vt_add_function_parameters (void);
479 static void vt_initialize (void);
480 static void vt_finalize (void);
482 /* Given a SET, calculate the amount of stack adjustment it contains
483 PRE- and POST-modifying stack pointer.
484 This function is similar to stack_adjust_offset. */
486 static void
487 stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
488 HOST_WIDE_INT *post)
490 rtx src = SET_SRC (pattern);
491 rtx dest = SET_DEST (pattern);
492 enum rtx_code code;
494 if (dest == stack_pointer_rtx)
496 /* (set (reg sp) (plus (reg sp) (const_int))) */
497 code = GET_CODE (src);
498 if (! (code == PLUS || code == MINUS)
499 || XEXP (src, 0) != stack_pointer_rtx
500 || !CONST_INT_P (XEXP (src, 1)))
501 return;
503 if (code == MINUS)
504 *post += INTVAL (XEXP (src, 1));
505 else
506 *post -= INTVAL (XEXP (src, 1));
508 else if (MEM_P (dest))
510 /* (set (mem (pre_dec (reg sp))) (foo)) */
511 src = XEXP (dest, 0);
512 code = GET_CODE (src);
514 switch (code)
516 case PRE_MODIFY:
517 case POST_MODIFY:
518 if (XEXP (src, 0) == stack_pointer_rtx)
520 rtx val = XEXP (XEXP (src, 1), 1);
521 /* We handle only adjustments by constant amount. */
522 gcc_assert (GET_CODE (XEXP (src, 1)) == PLUS &&
523 CONST_INT_P (val));
525 if (code == PRE_MODIFY)
526 *pre -= INTVAL (val);
527 else
528 *post -= INTVAL (val);
529 break;
531 return;
533 case PRE_DEC:
534 if (XEXP (src, 0) == stack_pointer_rtx)
536 *pre += GET_MODE_SIZE (GET_MODE (dest));
537 break;
539 return;
541 case POST_DEC:
542 if (XEXP (src, 0) == stack_pointer_rtx)
544 *post += GET_MODE_SIZE (GET_MODE (dest));
545 break;
547 return;
549 case PRE_INC:
550 if (XEXP (src, 0) == stack_pointer_rtx)
552 *pre -= GET_MODE_SIZE (GET_MODE (dest));
553 break;
555 return;
557 case POST_INC:
558 if (XEXP (src, 0) == stack_pointer_rtx)
560 *post -= GET_MODE_SIZE (GET_MODE (dest));
561 break;
563 return;
565 default:
566 return;
571 /* Given an INSN, calculate the amount of stack adjustment it contains
572 PRE- and POST-modifying stack pointer. */
574 static void
575 insn_stack_adjust_offset_pre_post (rtx insn, HOST_WIDE_INT *pre,
576 HOST_WIDE_INT *post)
578 rtx pattern;
580 *pre = 0;
581 *post = 0;
583 pattern = PATTERN (insn);
584 if (RTX_FRAME_RELATED_P (insn))
586 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
587 if (expr)
588 pattern = XEXP (expr, 0);
591 if (GET_CODE (pattern) == SET)
592 stack_adjust_offset_pre_post (pattern, pre, post);
593 else if (GET_CODE (pattern) == PARALLEL
594 || GET_CODE (pattern) == SEQUENCE)
596 int i;
598 /* There may be stack adjustments inside compound insns. Search
599 for them. */
600 for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
601 if (GET_CODE (XVECEXP (pattern, 0, i)) == SET)
602 stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post);
606 /* Compute stack adjustment in basic block BB. */
608 static void
609 bb_stack_adjust_offset (basic_block bb)
611 HOST_WIDE_INT offset;
612 int i;
614 offset = VTI (bb)->in.stack_adjust;
615 for (i = 0; i < VTI (bb)->n_mos; i++)
617 if (VTI (bb)->mos[i].type == MO_ADJUST)
618 offset += VTI (bb)->mos[i].u.adjust;
619 else if (VTI (bb)->mos[i].type != MO_CALL)
621 if (MEM_P (VTI (bb)->mos[i].u.loc))
623 VTI (bb)->mos[i].u.loc
624 = adjust_stack_reference (VTI (bb)->mos[i].u.loc, -offset);
628 VTI (bb)->out.stack_adjust = offset;
631 /* Compute stack adjustments for all blocks by traversing DFS tree.
632 Return true when the adjustments on all incoming edges are consistent.
633 Heavily borrowed from pre_and_rev_post_order_compute. */
635 static bool
636 vt_stack_adjustments (void)
638 edge_iterator *stack;
639 int sp;
641 /* Initialize entry block. */
642 VTI (ENTRY_BLOCK_PTR)->visited = true;
643 VTI (ENTRY_BLOCK_PTR)->out.stack_adjust = INCOMING_FRAME_SP_OFFSET;
645 /* Allocate stack for back-tracking up CFG. */
646 stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
647 sp = 0;
649 /* Push the first edge on to the stack. */
650 stack[sp++] = ei_start (ENTRY_BLOCK_PTR->succs);
652 while (sp)
654 edge_iterator ei;
655 basic_block src;
656 basic_block dest;
658 /* Look at the edge on the top of the stack. */
659 ei = stack[sp - 1];
660 src = ei_edge (ei)->src;
661 dest = ei_edge (ei)->dest;
663 /* Check if the edge destination has been visited yet. */
664 if (!VTI (dest)->visited)
666 VTI (dest)->visited = true;
667 VTI (dest)->in.stack_adjust = VTI (src)->out.stack_adjust;
668 bb_stack_adjust_offset (dest);
670 if (EDGE_COUNT (dest->succs) > 0)
671 /* Since the DEST node has been visited for the first
672 time, check its successors. */
673 stack[sp++] = ei_start (dest->succs);
675 else
677 /* Check whether the adjustments on the edges are the same. */
678 if (VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust)
680 free (stack);
681 return false;
684 if (! ei_one_before_end_p (ei))
685 /* Go to the next edge. */
686 ei_next (&stack[sp - 1]);
687 else
688 /* Return to previous level if there are no more edges. */
689 sp--;
693 free (stack);
694 return true;
697 /* Adjust stack reference MEM by ADJUSTMENT bytes and make it relative
698 to the argument pointer. Return the new rtx. */
700 static rtx
701 adjust_stack_reference (rtx mem, HOST_WIDE_INT adjustment)
703 rtx addr, cfa, tmp;
705 #ifdef FRAME_POINTER_CFA_OFFSET
706 adjustment -= FRAME_POINTER_CFA_OFFSET (current_function_decl);
707 cfa = plus_constant (frame_pointer_rtx, adjustment);
708 #else
709 adjustment -= ARG_POINTER_CFA_OFFSET (current_function_decl);
710 cfa = plus_constant (arg_pointer_rtx, adjustment);
711 #endif
713 addr = replace_rtx (copy_rtx (XEXP (mem, 0)), stack_pointer_rtx, cfa);
714 tmp = simplify_rtx (addr);
715 if (tmp)
716 addr = tmp;
718 return replace_equiv_address_nv (mem, addr);
721 /* Return true if a decl_or_value DV is a DECL or NULL. */
722 static inline bool
723 dv_is_decl_p (decl_or_value dv)
725 if (!dv)
726 return true;
728 /* Make sure relevant codes don't overlap. */
729 switch ((int)TREE_CODE ((tree)dv))
731 case (int)VAR_DECL:
732 case (int)PARM_DECL:
733 case (int)RESULT_DECL:
734 case (int)FUNCTION_DECL:
735 case (int)DEBUG_EXPR_DECL:
736 case (int)COMPONENT_REF:
737 return true;
739 case (int)VALUE:
740 return false;
742 default:
743 gcc_unreachable ();
747 /* Return true if a decl_or_value is a VALUE rtl. */
748 static inline bool
749 dv_is_value_p (decl_or_value dv)
751 return dv && !dv_is_decl_p (dv);
754 /* Return the decl in the decl_or_value. */
755 static inline tree
756 dv_as_decl (decl_or_value dv)
758 gcc_assert (dv_is_decl_p (dv));
759 return (tree) dv;
762 /* Return the value in the decl_or_value. */
763 static inline rtx
764 dv_as_value (decl_or_value dv)
766 gcc_assert (dv_is_value_p (dv));
767 return (rtx)dv;
770 /* Return the opaque pointer in the decl_or_value. */
771 static inline void *
772 dv_as_opaque (decl_or_value dv)
774 return dv;
777 /* Return true if a decl_or_value must not have more than one variable
778 part. */
779 static inline bool
780 dv_onepart_p (decl_or_value dv)
782 tree decl;
784 if (!MAY_HAVE_DEBUG_INSNS)
785 return false;
787 if (dv_is_value_p (dv))
788 return true;
790 decl = dv_as_decl (dv);
792 if (!decl)
793 return true;
795 return (target_for_debug_bind (decl) != NULL_TREE);
798 /* Return the variable pool to be used for dv, depending on whether it
799 can have multiple parts or not. */
800 static inline alloc_pool
801 dv_pool (decl_or_value dv)
803 return dv_onepart_p (dv) ? valvar_pool : var_pool;
806 /* Build a decl_or_value out of a decl. */
807 static inline decl_or_value
808 dv_from_decl (tree decl)
810 decl_or_value dv;
811 dv = decl;
812 gcc_assert (dv_is_decl_p (dv));
813 return dv;
816 /* Build a decl_or_value out of a value. */
817 static inline decl_or_value
818 dv_from_value (rtx value)
820 decl_or_value dv;
821 dv = value;
822 gcc_assert (dv_is_value_p (dv));
823 return dv;
826 static inline hashval_t
827 dv_htab_hash (decl_or_value dv)
829 if (dv_is_value_p (dv))
830 return -(hashval_t)(CSELIB_VAL_PTR (dv_as_value (dv))->value);
831 else
832 return (VARIABLE_HASH_VAL (dv_as_decl (dv)));
835 /* The hash function for variable_htab, computes the hash value
836 from the declaration of variable X. */
838 static hashval_t
839 variable_htab_hash (const void *x)
841 const_variable const v = (const_variable) x;
843 return dv_htab_hash (v->dv);
846 /* Compare the declaration of variable X with declaration Y. */
848 static int
849 variable_htab_eq (const void *x, const void *y)
851 const_variable const v = (const_variable) x;
852 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
854 if (dv_as_opaque (v->dv) == dv_as_opaque (dv))
855 return true;
857 #if ENABLE_CHECKING
859 bool visv, dvisv;
861 visv = dv_is_value_p (v->dv);
862 dvisv = dv_is_value_p (dv);
864 if (visv != dvisv)
865 return false;
867 if (visv)
868 gcc_assert (CSELIB_VAL_PTR (dv_as_value (v->dv))
869 != CSELIB_VAL_PTR (dv_as_value (dv)));
870 else
871 gcc_assert (VARIABLE_HASH_VAL (dv_as_decl (v->dv))
872 != VARIABLE_HASH_VAL (dv_as_decl (dv)));
874 #endif
876 return false;
879 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
881 static void
882 variable_htab_free (void *elem)
884 int i;
885 variable var = (variable) elem;
886 location_chain node, next;
888 gcc_assert (var->refcount > 0);
890 var->refcount--;
891 if (var->refcount > 0)
892 return;
894 for (i = 0; i < var->n_var_parts; i++)
896 for (node = var->var_part[i].loc_chain; node; node = next)
898 next = node->next;
899 pool_free (loc_chain_pool, node);
901 var->var_part[i].loc_chain = NULL;
903 pool_free (dv_pool (var->dv), var);
906 /* The hash function for value_chains htab, computes the hash value
907 from the VALUE. */
909 static hashval_t
910 value_chain_htab_hash (const void *x)
912 const_value_chain const v = (const_value_chain) x;
914 return dv_htab_hash (v->dv);
917 /* Compare the VALUE X with VALUE Y. */
919 static int
920 value_chain_htab_eq (const void *x, const void *y)
922 const_value_chain const v = (const_value_chain) x;
923 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
925 return dv_as_opaque (v->dv) == dv_as_opaque (dv);
928 /* Initialize the set (array) SET of attrs to empty lists. */
930 static void
931 init_attrs_list_set (attrs *set)
933 int i;
935 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
936 set[i] = NULL;
939 /* Make the list *LISTP empty. */
941 static void
942 attrs_list_clear (attrs *listp)
944 attrs list, next;
946 for (list = *listp; list; list = next)
948 next = list->next;
949 pool_free (attrs_pool, list);
951 *listp = NULL;
954 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */
956 static attrs
957 attrs_list_member (attrs list, decl_or_value dv, HOST_WIDE_INT offset)
959 for (; list; list = list->next)
960 if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset)
961 return list;
962 return NULL;
965 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
967 static void
968 attrs_list_insert (attrs *listp, decl_or_value dv,
969 HOST_WIDE_INT offset, rtx loc)
971 attrs list;
973 list = (attrs) pool_alloc (attrs_pool);
974 list->loc = loc;
975 list->dv = dv;
976 list->offset = offset;
977 list->next = *listp;
978 *listp = list;
981 /* Copy all nodes from SRC and create a list *DSTP of the copies. */
983 static void
984 attrs_list_copy (attrs *dstp, attrs src)
986 attrs n;
988 attrs_list_clear (dstp);
989 for (; src; src = src->next)
991 n = (attrs) pool_alloc (attrs_pool);
992 n->loc = src->loc;
993 n->dv = src->dv;
994 n->offset = src->offset;
995 n->next = *dstp;
996 *dstp = n;
1000 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */
1002 static void
1003 attrs_list_union (attrs *dstp, attrs src)
1005 for (; src; src = src->next)
1007 if (!attrs_list_member (*dstp, src->dv, src->offset))
1008 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1012 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1013 *DSTP. */
1015 static void
1016 attrs_list_mpdv_union (attrs *dstp, attrs src, attrs src2)
1018 gcc_assert (!*dstp);
1019 for (; src; src = src->next)
1021 if (!dv_onepart_p (src->dv))
1022 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1024 for (src = src2; src; src = src->next)
1026 if (!dv_onepart_p (src->dv)
1027 && !attrs_list_member (*dstp, src->dv, src->offset))
1028 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1032 /* Shared hashtable support. */
1034 /* Return true if VARS is shared. */
1036 static inline bool
1037 shared_hash_shared (shared_hash vars)
1039 return vars->refcount > 1;
1042 /* Return the hash table for VARS. */
1044 static inline htab_t
1045 shared_hash_htab (shared_hash vars)
1047 return vars->htab;
1050 /* Copy variables into a new hash table. */
1052 static shared_hash
1053 shared_hash_unshare (shared_hash vars)
1055 shared_hash new_vars = (shared_hash) pool_alloc (shared_hash_pool);
1056 gcc_assert (vars->refcount > 1);
1057 new_vars->refcount = 1;
1058 new_vars->htab
1059 = htab_create (htab_elements (vars->htab) + 3, variable_htab_hash,
1060 variable_htab_eq, variable_htab_free);
1061 vars_copy (new_vars->htab, vars->htab);
1062 vars->refcount--;
1063 return new_vars;
1066 /* Increment reference counter on VARS and return it. */
1068 static inline shared_hash
1069 shared_hash_copy (shared_hash vars)
1071 vars->refcount++;
1072 return vars;
1075 /* Decrement reference counter and destroy hash table if not shared
1076 anymore. */
1078 static void
1079 shared_hash_destroy (shared_hash vars)
1081 gcc_assert (vars->refcount > 0);
1082 if (--vars->refcount == 0)
1084 htab_delete (vars->htab);
1085 pool_free (shared_hash_pool, vars);
1089 /* Unshare *PVARS if shared and return slot for DV. If INS is
1090 INSERT, insert it if not already present. */
1092 static inline void **
1093 shared_hash_find_slot_unshare_1 (shared_hash *pvars, decl_or_value dv,
1094 hashval_t dvhash, enum insert_option ins)
1096 if (shared_hash_shared (*pvars))
1097 *pvars = shared_hash_unshare (*pvars);
1098 return htab_find_slot_with_hash (shared_hash_htab (*pvars), dv, dvhash, ins);
1101 static inline void **
1102 shared_hash_find_slot_unshare (shared_hash *pvars, decl_or_value dv,
1103 enum insert_option ins)
1105 return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins);
1108 /* Return slot for DV, if it is already present in the hash table.
1109 If it is not present, insert it only VARS is not shared, otherwise
1110 return NULL. */
1112 static inline void **
1113 shared_hash_find_slot_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1115 return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
1116 shared_hash_shared (vars)
1117 ? NO_INSERT : INSERT);
1120 static inline void **
1121 shared_hash_find_slot (shared_hash vars, decl_or_value dv)
1123 return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv));
1126 /* Return slot for DV only if it is already present in the hash table. */
1128 static inline void **
1129 shared_hash_find_slot_noinsert_1 (shared_hash vars, decl_or_value dv,
1130 hashval_t dvhash)
1132 return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
1133 NO_INSERT);
1136 static inline void **
1137 shared_hash_find_slot_noinsert (shared_hash vars, decl_or_value dv)
1139 return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv));
1142 /* Return variable for DV or NULL if not already present in the hash
1143 table. */
1145 static inline variable
1146 shared_hash_find_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1148 return (variable) htab_find_with_hash (shared_hash_htab (vars), dv, dvhash);
1151 static inline variable
1152 shared_hash_find (shared_hash vars, decl_or_value dv)
1154 return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
1157 /* Determine a total order between two distinct pointers. Compare the
1158 pointers as integral types if size_t is wide enough, otherwise
1159 resort to bitwise memory compare. The actual order does not
1160 matter, we just need to be consistent, so endianness is
1161 irrelevant. */
1163 static int
1164 tie_break_pointers (const void *p1, const void *p2)
1166 gcc_assert (p1 != p2);
1168 if (sizeof (size_t) >= sizeof (void*))
1169 return (size_t)p1 < (size_t)p2 ? -1 : 1;
1170 else
1171 return memcmp (&p1, &p2, sizeof (p1));
1174 /* Return true if TVAL is better than CVAL as a canonival value. We
1175 choose lowest-numbered VALUEs, using the RTX address as a
1176 tie-breaker. The idea is to arrange them into a star topology,
1177 such that all of them are at most one step away from the canonical
1178 value, and the canonical value has backlinks to all of them, in
1179 addition to all the actual locations. We don't enforce this
1180 topology throughout the entire dataflow analysis, though.
1183 static inline bool
1184 canon_value_cmp (rtx tval, rtx cval)
1186 return !cval
1187 || CSELIB_VAL_PTR (tval)->value < CSELIB_VAL_PTR (cval)->value
1188 || (CSELIB_VAL_PTR (tval)->value == CSELIB_VAL_PTR (cval)->value
1189 && tie_break_pointers (tval, cval) < 0);
1192 static bool dst_can_be_shared;
1194 /* Return a copy of a variable VAR and insert it to dataflow set SET. */
1196 static void **
1197 unshare_variable (dataflow_set *set, void **slot, variable var,
1198 enum var_init_status initialized)
1200 variable new_var;
1201 int i;
1203 new_var = (variable) pool_alloc (dv_pool (var->dv));
1204 new_var->dv = var->dv;
1205 new_var->refcount = 1;
1206 var->refcount--;
1207 new_var->n_var_parts = var->n_var_parts;
1209 if (! flag_var_tracking_uninit)
1210 initialized = VAR_INIT_STATUS_INITIALIZED;
1212 for (i = 0; i < var->n_var_parts; i++)
1214 location_chain node;
1215 location_chain *nextp;
1217 new_var->var_part[i].offset = var->var_part[i].offset;
1218 nextp = &new_var->var_part[i].loc_chain;
1219 for (node = var->var_part[i].loc_chain; node; node = node->next)
1221 location_chain new_lc;
1223 new_lc = (location_chain) pool_alloc (loc_chain_pool);
1224 new_lc->next = NULL;
1225 if (node->init > initialized)
1226 new_lc->init = node->init;
1227 else
1228 new_lc->init = initialized;
1229 if (node->set_src && !(MEM_P (node->set_src)))
1230 new_lc->set_src = node->set_src;
1231 else
1232 new_lc->set_src = NULL;
1233 new_lc->loc = node->loc;
1235 *nextp = new_lc;
1236 nextp = &new_lc->next;
1239 /* We are at the basic block boundary when copying variable description
1240 so set the CUR_LOC to be the first element of the chain. */
1241 if (new_var->var_part[i].loc_chain)
1242 new_var->var_part[i].cur_loc = new_var->var_part[i].loc_chain->loc;
1243 else
1244 new_var->var_part[i].cur_loc = NULL;
1247 dst_can_be_shared = false;
1248 if (shared_hash_shared (set->vars))
1249 slot = shared_hash_find_slot_unshare (&set->vars, var->dv, NO_INSERT);
1250 else if (set->traversed_vars && set->vars != set->traversed_vars)
1251 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
1252 *slot = new_var;
1253 return slot;
1256 /* Add a variable from *SLOT to hash table DATA and increase its reference
1257 count. */
1259 static int
1260 vars_copy_1 (void **slot, void *data)
1262 htab_t dst = (htab_t) data;
1263 variable src;
1264 void **dstp;
1266 src = (variable) *slot;
1267 src->refcount++;
1269 dstp = htab_find_slot_with_hash (dst, src->dv,
1270 dv_htab_hash (src->dv),
1271 INSERT);
1272 *dstp = src;
1274 /* Continue traversing the hash table. */
1275 return 1;
1278 /* Copy all variables from hash table SRC to hash table DST. */
1280 static void
1281 vars_copy (htab_t dst, htab_t src)
1283 htab_traverse_noresize (src, vars_copy_1, dst);
1286 /* Map a decl to its main debug decl. */
1288 static inline tree
1289 var_debug_decl (tree decl)
1291 if (decl && DECL_P (decl)
1292 && DECL_DEBUG_EXPR_IS_FROM (decl) && DECL_DEBUG_EXPR (decl)
1293 && DECL_P (DECL_DEBUG_EXPR (decl)))
1294 decl = DECL_DEBUG_EXPR (decl);
1296 return decl;
1299 /* Set the register LOC to contain DV, OFFSET. */
1301 static void
1302 var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1303 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1304 enum insert_option iopt)
1306 attrs node;
1307 bool decl_p = dv_is_decl_p (dv);
1309 if (decl_p)
1310 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1312 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1313 if (dv_as_opaque (node->dv) == dv_as_opaque (dv)
1314 && node->offset == offset)
1315 break;
1316 if (!node)
1317 attrs_list_insert (&set->regs[REGNO (loc)], dv, offset, loc);
1318 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1321 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
1323 static void
1324 var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1325 rtx set_src)
1327 tree decl = REG_EXPR (loc);
1328 HOST_WIDE_INT offset = REG_OFFSET (loc);
1330 var_reg_decl_set (set, loc, initialized,
1331 dv_from_decl (decl), offset, set_src, INSERT);
1334 static enum var_init_status
1335 get_init_value (dataflow_set *set, rtx loc, decl_or_value dv)
1337 variable var;
1338 int i;
1339 enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN;
1341 if (! flag_var_tracking_uninit)
1342 return VAR_INIT_STATUS_INITIALIZED;
1344 var = shared_hash_find (set->vars, dv);
1345 if (var)
1347 for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++)
1349 location_chain nextp;
1350 for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next)
1351 if (rtx_equal_p (nextp->loc, loc))
1353 ret_val = nextp->init;
1354 break;
1359 return ret_val;
1362 /* Delete current content of register LOC in dataflow set SET and set
1363 the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
1364 MODIFY is true, any other live copies of the same variable part are
1365 also deleted from the dataflow set, otherwise the variable part is
1366 assumed to be copied from another location holding the same
1367 part. */
1369 static void
1370 var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1371 enum var_init_status initialized, rtx set_src)
1373 tree decl = REG_EXPR (loc);
1374 HOST_WIDE_INT offset = REG_OFFSET (loc);
1375 attrs node, next;
1376 attrs *nextp;
1378 decl = var_debug_decl (decl);
1380 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1381 initialized = get_init_value (set, loc, dv_from_decl (decl));
1383 nextp = &set->regs[REGNO (loc)];
1384 for (node = *nextp; node; node = next)
1386 next = node->next;
1387 if (dv_as_opaque (node->dv) != decl || node->offset != offset)
1389 delete_variable_part (set, node->loc, node->dv, node->offset);
1390 pool_free (attrs_pool, node);
1391 *nextp = next;
1393 else
1395 node->loc = loc;
1396 nextp = &node->next;
1399 if (modify)
1400 clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src);
1401 var_reg_set (set, loc, initialized, set_src);
1404 /* Delete current content of register LOC in dataflow set SET. If
1405 CLOBBER is true, also delete any other live copies of the same
1406 variable part. */
1408 static void
1409 var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
1411 attrs *reg = &set->regs[REGNO (loc)];
1412 attrs node, next;
1414 if (clobber)
1416 tree decl = REG_EXPR (loc);
1417 HOST_WIDE_INT offset = REG_OFFSET (loc);
1419 decl = var_debug_decl (decl);
1421 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1424 for (node = *reg; node; node = next)
1426 next = node->next;
1427 delete_variable_part (set, node->loc, node->dv, node->offset);
1428 pool_free (attrs_pool, node);
1430 *reg = NULL;
1433 /* Delete content of register with number REGNO in dataflow set SET. */
1435 static void
1436 var_regno_delete (dataflow_set *set, int regno)
1438 attrs *reg = &set->regs[regno];
1439 attrs node, next;
1441 for (node = *reg; node; node = next)
1443 next = node->next;
1444 delete_variable_part (set, node->loc, node->dv, node->offset);
1445 pool_free (attrs_pool, node);
1447 *reg = NULL;
1450 /* Set the location of DV, OFFSET as the MEM LOC. */
1452 static void
1453 var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1454 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1455 enum insert_option iopt)
1457 if (dv_is_decl_p (dv))
1458 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1460 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1463 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
1464 SET to LOC.
1465 Adjust the address first if it is stack pointer based. */
1467 static void
1468 var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1469 rtx set_src)
1471 tree decl = MEM_EXPR (loc);
1472 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1474 var_mem_decl_set (set, loc, initialized,
1475 dv_from_decl (decl), offset, set_src, INSERT);
1478 /* Delete and set the location part of variable MEM_EXPR (LOC) in
1479 dataflow set SET to LOC. If MODIFY is true, any other live copies
1480 of the same variable part are also deleted from the dataflow set,
1481 otherwise the variable part is assumed to be copied from another
1482 location holding the same part.
1483 Adjust the address first if it is stack pointer based. */
1485 static void
1486 var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1487 enum var_init_status initialized, rtx set_src)
1489 tree decl = MEM_EXPR (loc);
1490 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1492 decl = var_debug_decl (decl);
1494 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1495 initialized = get_init_value (set, loc, dv_from_decl (decl));
1497 if (modify)
1498 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src);
1499 var_mem_set (set, loc, initialized, set_src);
1502 /* Delete the location part LOC from dataflow set SET. If CLOBBER is
1503 true, also delete any other live copies of the same variable part.
1504 Adjust the address first if it is stack pointer based. */
1506 static void
1507 var_mem_delete (dataflow_set *set, rtx loc, bool clobber)
1509 tree decl = MEM_EXPR (loc);
1510 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1512 decl = var_debug_decl (decl);
1513 if (clobber)
1514 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1515 delete_variable_part (set, loc, dv_from_decl (decl), offset);
1518 /* Map a value to a location it was just stored in. */
1520 static void
1521 val_store (dataflow_set *set, rtx val, rtx loc, rtx insn)
1523 cselib_val *v = CSELIB_VAL_PTR (val);
1525 gcc_assert (cselib_preserved_value_p (v));
1527 if (dump_file)
1529 fprintf (dump_file, "%i: ", INSN_UID (insn));
1530 print_inline_rtx (dump_file, val, 0);
1531 fprintf (dump_file, " stored in ");
1532 print_inline_rtx (dump_file, loc, 0);
1533 if (v->locs)
1535 struct elt_loc_list *l;
1536 for (l = v->locs; l; l = l->next)
1538 fprintf (dump_file, "\n%i: ", INSN_UID (l->setting_insn));
1539 print_inline_rtx (dump_file, l->loc, 0);
1542 fprintf (dump_file, "\n");
1545 if (REG_P (loc))
1547 var_regno_delete (set, REGNO (loc));
1548 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1549 dv_from_value (val), 0, NULL_RTX, INSERT);
1551 else if (MEM_P (loc))
1552 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1553 dv_from_value (val), 0, NULL_RTX, INSERT);
1554 else
1555 set_variable_part (set, loc, dv_from_value (val), 0,
1556 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1559 /* Reset this node, detaching all its equivalences. Return the slot
1560 in the variable hash table that holds dv, if there is one. */
1562 static void
1563 val_reset (dataflow_set *set, decl_or_value dv)
1565 variable var = shared_hash_find (set->vars, dv) ;
1566 location_chain node;
1567 rtx cval;
1569 if (!var || !var->n_var_parts)
1570 return;
1572 gcc_assert (var->n_var_parts == 1);
1574 cval = NULL;
1575 for (node = var->var_part[0].loc_chain; node; node = node->next)
1576 if (GET_CODE (node->loc) == VALUE
1577 && canon_value_cmp (node->loc, cval))
1578 cval = node->loc;
1580 for (node = var->var_part[0].loc_chain; node; node = node->next)
1581 if (GET_CODE (node->loc) == VALUE && cval != node->loc)
1583 /* Redirect the equivalence link to the new canonical
1584 value, or simply remove it if it would point at
1585 itself. */
1586 if (cval)
1587 set_variable_part (set, cval, dv_from_value (node->loc),
1588 0, node->init, node->set_src, NO_INSERT);
1589 delete_variable_part (set, dv_as_value (dv),
1590 dv_from_value (node->loc), 0);
1593 if (cval)
1595 decl_or_value cdv = dv_from_value (cval);
1597 /* Keep the remaining values connected, accummulating links
1598 in the canonical value. */
1599 for (node = var->var_part[0].loc_chain; node; node = node->next)
1601 if (node->loc == cval)
1602 continue;
1603 else if (GET_CODE (node->loc) == REG)
1604 var_reg_decl_set (set, node->loc, node->init, cdv, 0,
1605 node->set_src, NO_INSERT);
1606 else if (GET_CODE (node->loc) == MEM)
1607 var_mem_decl_set (set, node->loc, node->init, cdv, 0,
1608 node->set_src, NO_INSERT);
1609 else
1610 set_variable_part (set, node->loc, cdv, 0,
1611 node->init, node->set_src, NO_INSERT);
1615 /* We remove this last, to make sure that the canonical value is not
1616 removed to the point of requiring reinsertion. */
1617 if (cval)
1618 delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0);
1620 clobber_variable_part (set, NULL, dv, 0, NULL);
1622 /* ??? Should we make sure there aren't other available values or
1623 variables whose values involve this one other than by
1624 equivalence? E.g., at the very least we should reset MEMs, those
1625 shouldn't be too hard to find cselib-looking up the value as an
1626 address, then locating the resulting value in our own hash
1627 table. */
1630 /* Find the values in a given location and map the val to another
1631 value, if it is unique, or add the location as one holding the
1632 value. */
1634 static void
1635 val_resolve (dataflow_set *set, rtx val, rtx loc, rtx insn)
1637 decl_or_value dv = dv_from_value (val);
1639 if (dump_file && (dump_flags & TDF_DETAILS))
1641 if (insn)
1642 fprintf (dump_file, "%i: ", INSN_UID (insn));
1643 else
1644 fprintf (dump_file, "head: ");
1645 print_inline_rtx (dump_file, val, 0);
1646 fputs (" is at ", dump_file);
1647 print_inline_rtx (dump_file, loc, 0);
1648 fputc ('\n', dump_file);
1651 val_reset (set, dv);
1653 if (REG_P (loc))
1655 attrs node, found = NULL;
1657 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1658 if (dv_is_value_p (node->dv)
1659 && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc))
1661 found = node;
1663 /* Map incoming equivalences. ??? Wouldn't it be nice if
1664 we just started sharing the location lists? Maybe a
1665 circular list ending at the value itself or some
1666 such. */
1667 set_variable_part (set, dv_as_value (node->dv),
1668 dv_from_value (val), node->offset,
1669 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1670 set_variable_part (set, val, node->dv, node->offset,
1671 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1674 /* If we didn't find any equivalence, we need to remember that
1675 this value is held in the named register. */
1676 if (!found)
1677 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1678 dv_from_value (val), 0, NULL_RTX, INSERT);
1680 else if (MEM_P (loc))
1681 /* ??? Merge equivalent MEMs. */
1682 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1683 dv_from_value (val), 0, NULL_RTX, INSERT);
1684 else
1685 /* ??? Merge equivalent expressions. */
1686 set_variable_part (set, loc, dv_from_value (val), 0,
1687 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1690 /* Initialize dataflow set SET to be empty.
1691 VARS_SIZE is the initial size of hash table VARS. */
1693 static void
1694 dataflow_set_init (dataflow_set *set)
1696 init_attrs_list_set (set->regs);
1697 set->vars = shared_hash_copy (empty_shared_hash);
1698 set->stack_adjust = 0;
1699 set->traversed_vars = NULL;
1702 /* Delete the contents of dataflow set SET. */
1704 static void
1705 dataflow_set_clear (dataflow_set *set)
1707 int i;
1709 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1710 attrs_list_clear (&set->regs[i]);
1712 shared_hash_destroy (set->vars);
1713 set->vars = shared_hash_copy (empty_shared_hash);
1716 /* Copy the contents of dataflow set SRC to DST. */
1718 static void
1719 dataflow_set_copy (dataflow_set *dst, dataflow_set *src)
1721 int i;
1723 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1724 attrs_list_copy (&dst->regs[i], src->regs[i]);
1726 shared_hash_destroy (dst->vars);
1727 dst->vars = shared_hash_copy (src->vars);
1728 dst->stack_adjust = src->stack_adjust;
1731 /* Information for merging lists of locations for a given offset of variable.
1733 struct variable_union_info
1735 /* Node of the location chain. */
1736 location_chain lc;
1738 /* The sum of positions in the input chains. */
1739 int pos;
1741 /* The position in the chain of DST dataflow set. */
1742 int pos_dst;
1745 /* Buffer for location list sorting and its allocated size. */
1746 static struct variable_union_info *vui_vec;
1747 static int vui_allocated;
1749 /* Compare function for qsort, order the structures by POS element. */
1751 static int
1752 variable_union_info_cmp_pos (const void *n1, const void *n2)
1754 const struct variable_union_info *const i1 =
1755 (const struct variable_union_info *) n1;
1756 const struct variable_union_info *const i2 =
1757 ( const struct variable_union_info *) n2;
1759 if (i1->pos != i2->pos)
1760 return i1->pos - i2->pos;
1762 return (i1->pos_dst - i2->pos_dst);
1765 /* Compute union of location parts of variable *SLOT and the same variable
1766 from hash table DATA. Compute "sorted" union of the location chains
1767 for common offsets, i.e. the locations of a variable part are sorted by
1768 a priority where the priority is the sum of the positions in the 2 chains
1769 (if a location is only in one list the position in the second list is
1770 defined to be larger than the length of the chains).
1771 When we are updating the location parts the newest location is in the
1772 beginning of the chain, so when we do the described "sorted" union
1773 we keep the newest locations in the beginning. */
1775 static int
1776 variable_union (void **slot, void *data)
1778 variable src, dst;
1779 void **dstp;
1780 dataflow_set *set = (dataflow_set *) data;
1781 int i, j, k;
1783 src = (variable) *slot;
1784 dstp = shared_hash_find_slot (set->vars, src->dv);
1785 if (!dstp || !*dstp)
1787 src->refcount++;
1789 dst_can_be_shared = false;
1790 if (!dstp)
1791 dstp = shared_hash_find_slot_unshare (&set->vars, src->dv, INSERT);
1793 *dstp = src;
1795 /* If CUR_LOC of some variable part is not the first element of
1796 the location chain we are going to change it so we have to make
1797 a copy of the variable. */
1798 for (k = 0; k < src->n_var_parts; k++)
1800 gcc_assert (!src->var_part[k].loc_chain
1801 == !src->var_part[k].cur_loc);
1802 if (src->var_part[k].loc_chain)
1804 gcc_assert (src->var_part[k].cur_loc);
1805 if (src->var_part[k].cur_loc != src->var_part[k].loc_chain->loc)
1806 break;
1809 if (k < src->n_var_parts)
1810 dstp = unshare_variable (set, dstp, src, VAR_INIT_STATUS_UNKNOWN);
1812 /* Continue traversing the hash table. */
1813 return 1;
1815 else
1816 dst = (variable) *dstp;
1818 gcc_assert (src->n_var_parts);
1820 /* We can combine one-part variables very efficiently, because their
1821 entries are in canonical order. */
1822 if (dv_onepart_p (src->dv))
1824 location_chain *nodep, dnode, snode;
1826 gcc_assert (src->n_var_parts == 1);
1827 gcc_assert (dst->n_var_parts == 1);
1829 snode = src->var_part[0].loc_chain;
1830 gcc_assert (snode);
1832 restart_onepart_unshared:
1833 nodep = &dst->var_part[0].loc_chain;
1834 dnode = *nodep;
1835 gcc_assert (dnode);
1837 while (snode)
1839 int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1;
1841 if (r > 0)
1843 location_chain nnode;
1845 if (dst->refcount != 1 || shared_hash_shared (set->vars))
1847 dstp = unshare_variable (set, dstp, dst,
1848 VAR_INIT_STATUS_INITIALIZED);
1849 dst = (variable)*dstp;
1850 goto restart_onepart_unshared;
1853 *nodep = nnode = (location_chain) pool_alloc (loc_chain_pool);
1854 nnode->loc = snode->loc;
1855 nnode->init = snode->init;
1856 if (!snode->set_src || MEM_P (snode->set_src))
1857 nnode->set_src = NULL;
1858 else
1859 nnode->set_src = snode->set_src;
1860 nnode->next = dnode;
1861 dnode = nnode;
1863 #ifdef ENABLE_CHECKING
1864 else if (r == 0)
1865 gcc_assert (rtx_equal_p (dnode->loc, snode->loc));
1866 #endif
1868 if (r >= 0)
1869 snode = snode->next;
1871 nodep = &dnode->next;
1872 dnode = *nodep;
1875 dst->var_part[0].cur_loc = dst->var_part[0].loc_chain->loc;
1877 return 1;
1880 /* Count the number of location parts, result is K. */
1881 for (i = 0, j = 0, k = 0;
1882 i < src->n_var_parts && j < dst->n_var_parts; k++)
1884 if (src->var_part[i].offset == dst->var_part[j].offset)
1886 i++;
1887 j++;
1889 else if (src->var_part[i].offset < dst->var_part[j].offset)
1890 i++;
1891 else
1892 j++;
1894 k += src->n_var_parts - i;
1895 k += dst->n_var_parts - j;
1897 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
1898 thus there are at most MAX_VAR_PARTS different offsets. */
1899 gcc_assert (dv_onepart_p (dst->dv) ? k == 1 : k <= MAX_VAR_PARTS);
1901 if ((dst->refcount > 1 || shared_hash_shared (set->vars))
1902 && dst->n_var_parts != k)
1904 dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN);
1905 dst = (variable)*dstp;
1908 i = src->n_var_parts - 1;
1909 j = dst->n_var_parts - 1;
1910 dst->n_var_parts = k;
1912 for (k--; k >= 0; k--)
1914 location_chain node, node2;
1916 if (i >= 0 && j >= 0
1917 && src->var_part[i].offset == dst->var_part[j].offset)
1919 /* Compute the "sorted" union of the chains, i.e. the locations which
1920 are in both chains go first, they are sorted by the sum of
1921 positions in the chains. */
1922 int dst_l, src_l;
1923 int ii, jj, n;
1924 struct variable_union_info *vui;
1926 /* If DST is shared compare the location chains.
1927 If they are different we will modify the chain in DST with
1928 high probability so make a copy of DST. */
1929 if (dst->refcount > 1 || shared_hash_shared (set->vars))
1931 for (node = src->var_part[i].loc_chain,
1932 node2 = dst->var_part[j].loc_chain; node && node2;
1933 node = node->next, node2 = node2->next)
1935 if (!((REG_P (node2->loc)
1936 && REG_P (node->loc)
1937 && REGNO (node2->loc) == REGNO (node->loc))
1938 || rtx_equal_p (node2->loc, node->loc)))
1940 if (node2->init < node->init)
1941 node2->init = node->init;
1942 break;
1945 if (node || node2)
1947 dstp = unshare_variable (set, dstp, dst,
1948 VAR_INIT_STATUS_UNKNOWN);
1949 dst = (variable)*dstp;
1953 src_l = 0;
1954 for (node = src->var_part[i].loc_chain; node; node = node->next)
1955 src_l++;
1956 dst_l = 0;
1957 for (node = dst->var_part[j].loc_chain; node; node = node->next)
1958 dst_l++;
1960 if (dst_l == 1)
1962 /* The most common case, much simpler, no qsort is needed. */
1963 location_chain dstnode = dst->var_part[j].loc_chain;
1964 dst->var_part[k].loc_chain = dstnode;
1965 dst->var_part[k].offset = dst->var_part[j].offset;
1966 node2 = dstnode;
1967 for (node = src->var_part[i].loc_chain; node; node = node->next)
1968 if (!((REG_P (dstnode->loc)
1969 && REG_P (node->loc)
1970 && REGNO (dstnode->loc) == REGNO (node->loc))
1971 || rtx_equal_p (dstnode->loc, node->loc)))
1973 location_chain new_node;
1975 /* Copy the location from SRC. */
1976 new_node = (location_chain) pool_alloc (loc_chain_pool);
1977 new_node->loc = node->loc;
1978 new_node->init = node->init;
1979 if (!node->set_src || MEM_P (node->set_src))
1980 new_node->set_src = NULL;
1981 else
1982 new_node->set_src = node->set_src;
1983 node2->next = new_node;
1984 node2 = new_node;
1986 node2->next = NULL;
1988 else
1990 if (src_l + dst_l > vui_allocated)
1992 vui_allocated = MAX (vui_allocated * 2, src_l + dst_l);
1993 vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec,
1994 vui_allocated);
1996 vui = vui_vec;
1998 /* Fill in the locations from DST. */
1999 for (node = dst->var_part[j].loc_chain, jj = 0; node;
2000 node = node->next, jj++)
2002 vui[jj].lc = node;
2003 vui[jj].pos_dst = jj;
2005 /* Pos plus value larger than a sum of 2 valid positions. */
2006 vui[jj].pos = jj + src_l + dst_l;
2009 /* Fill in the locations from SRC. */
2010 n = dst_l;
2011 for (node = src->var_part[i].loc_chain, ii = 0; node;
2012 node = node->next, ii++)
2014 /* Find location from NODE. */
2015 for (jj = 0; jj < dst_l; jj++)
2017 if ((REG_P (vui[jj].lc->loc)
2018 && REG_P (node->loc)
2019 && REGNO (vui[jj].lc->loc) == REGNO (node->loc))
2020 || rtx_equal_p (vui[jj].lc->loc, node->loc))
2022 vui[jj].pos = jj + ii;
2023 break;
2026 if (jj >= dst_l) /* The location has not been found. */
2028 location_chain new_node;
2030 /* Copy the location from SRC. */
2031 new_node = (location_chain) pool_alloc (loc_chain_pool);
2032 new_node->loc = node->loc;
2033 new_node->init = node->init;
2034 if (!node->set_src || MEM_P (node->set_src))
2035 new_node->set_src = NULL;
2036 else
2037 new_node->set_src = node->set_src;
2038 vui[n].lc = new_node;
2039 vui[n].pos_dst = src_l + dst_l;
2040 vui[n].pos = ii + src_l + dst_l;
2041 n++;
2045 if (dst_l == 2)
2047 /* Special case still very common case. For dst_l == 2
2048 all entries dst_l ... n-1 are sorted, with for i >= dst_l
2049 vui[i].pos == i + src_l + dst_l. */
2050 if (vui[0].pos > vui[1].pos)
2052 /* Order should be 1, 0, 2... */
2053 dst->var_part[k].loc_chain = vui[1].lc;
2054 vui[1].lc->next = vui[0].lc;
2055 if (n >= 3)
2057 vui[0].lc->next = vui[2].lc;
2058 vui[n - 1].lc->next = NULL;
2060 else
2061 vui[0].lc->next = NULL;
2062 ii = 3;
2064 else
2066 dst->var_part[k].loc_chain = vui[0].lc;
2067 if (n >= 3 && vui[2].pos < vui[1].pos)
2069 /* Order should be 0, 2, 1, 3... */
2070 vui[0].lc->next = vui[2].lc;
2071 vui[2].lc->next = vui[1].lc;
2072 if (n >= 4)
2074 vui[1].lc->next = vui[3].lc;
2075 vui[n - 1].lc->next = NULL;
2077 else
2078 vui[1].lc->next = NULL;
2079 ii = 4;
2081 else
2083 /* Order should be 0, 1, 2... */
2084 ii = 1;
2085 vui[n - 1].lc->next = NULL;
2088 for (; ii < n; ii++)
2089 vui[ii - 1].lc->next = vui[ii].lc;
2091 else
2093 qsort (vui, n, sizeof (struct variable_union_info),
2094 variable_union_info_cmp_pos);
2096 /* Reconnect the nodes in sorted order. */
2097 for (ii = 1; ii < n; ii++)
2098 vui[ii - 1].lc->next = vui[ii].lc;
2099 vui[n - 1].lc->next = NULL;
2100 dst->var_part[k].loc_chain = vui[0].lc;
2103 dst->var_part[k].offset = dst->var_part[j].offset;
2105 i--;
2106 j--;
2108 else if ((i >= 0 && j >= 0
2109 && src->var_part[i].offset < dst->var_part[j].offset)
2110 || i < 0)
2112 dst->var_part[k] = dst->var_part[j];
2113 j--;
2115 else if ((i >= 0 && j >= 0
2116 && src->var_part[i].offset > dst->var_part[j].offset)
2117 || j < 0)
2119 location_chain *nextp;
2121 /* Copy the chain from SRC. */
2122 nextp = &dst->var_part[k].loc_chain;
2123 for (node = src->var_part[i].loc_chain; node; node = node->next)
2125 location_chain new_lc;
2127 new_lc = (location_chain) pool_alloc (loc_chain_pool);
2128 new_lc->next = NULL;
2129 new_lc->init = node->init;
2130 if (!node->set_src || MEM_P (node->set_src))
2131 new_lc->set_src = NULL;
2132 else
2133 new_lc->set_src = node->set_src;
2134 new_lc->loc = node->loc;
2136 *nextp = new_lc;
2137 nextp = &new_lc->next;
2140 dst->var_part[k].offset = src->var_part[i].offset;
2141 i--;
2144 /* We are at the basic block boundary when computing union
2145 so set the CUR_LOC to be the first element of the chain. */
2146 if (dst->var_part[k].loc_chain)
2147 dst->var_part[k].cur_loc = dst->var_part[k].loc_chain->loc;
2148 else
2149 dst->var_part[k].cur_loc = NULL;
2152 if (flag_var_tracking_uninit)
2153 for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++)
2155 location_chain node, node2;
2156 for (node = src->var_part[i].loc_chain; node; node = node->next)
2157 for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next)
2158 if (rtx_equal_p (node->loc, node2->loc))
2160 if (node->init > node2->init)
2161 node2->init = node->init;
2165 /* Continue traversing the hash table. */
2166 return 1;
2169 /* Like variable_union, but only used when doing dataflow_set_union
2170 into an empty hashtab. To allow sharing, dst is initially shared
2171 with src (so all variables are "copied" from src to dst hashtab),
2172 so only unshare_variable for variables that need canonicalization
2173 are needed. */
2175 static int
2176 variable_canonicalize (void **slot, void *data)
2178 variable src;
2179 dataflow_set *set = (dataflow_set *) data;
2180 int k;
2182 src = *(variable *) slot;
2184 /* If CUR_LOC of some variable part is not the first element of
2185 the location chain we are going to change it so we have to make
2186 a copy of the variable. */
2187 for (k = 0; k < src->n_var_parts; k++)
2189 gcc_assert (!src->var_part[k].loc_chain == !src->var_part[k].cur_loc);
2190 if (src->var_part[k].loc_chain)
2192 gcc_assert (src->var_part[k].cur_loc);
2193 if (src->var_part[k].cur_loc != src->var_part[k].loc_chain->loc)
2194 break;
2197 if (k < src->n_var_parts)
2198 slot = unshare_variable (set, slot, src, VAR_INIT_STATUS_UNKNOWN);
2199 return 1;
2202 /* Compute union of dataflow sets SRC and DST and store it to DST. */
2204 static void
2205 dataflow_set_union (dataflow_set *dst, dataflow_set *src)
2207 int i;
2209 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2210 attrs_list_union (&dst->regs[i], src->regs[i]);
2212 if (dst->vars == empty_shared_hash)
2214 shared_hash_destroy (dst->vars);
2215 dst->vars = shared_hash_copy (src->vars);
2216 dst->traversed_vars = dst->vars;
2217 htab_traverse (shared_hash_htab (dst->vars), variable_canonicalize, dst);
2218 dst->traversed_vars = NULL;
2220 else
2221 htab_traverse (shared_hash_htab (src->vars), variable_union, dst);
2224 /* Whether the value is currently being expanded. */
2225 #define VALUE_RECURSED_INTO(x) \
2226 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
2227 /* Whether the value is in changed_variables hash table. */
2228 #define VALUE_CHANGED(x) \
2229 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
2230 /* Whether the decl is in changed_variables hash table. */
2231 #define DECL_CHANGED(x) TREE_VISITED (x)
2233 /* Record that DV has been added into resp. removed from changed_variables
2234 hashtable. */
2236 static inline void
2237 set_dv_changed (decl_or_value dv, bool newv)
2239 if (dv_is_value_p (dv))
2240 VALUE_CHANGED (dv_as_value (dv)) = newv;
2241 else
2242 DECL_CHANGED (dv_as_decl (dv)) = newv;
2245 /* Return true if DV is present in changed_variables hash table. */
2247 static inline bool
2248 dv_changed_p (decl_or_value dv)
2250 return (dv_is_value_p (dv)
2251 ? VALUE_CHANGED (dv_as_value (dv))
2252 : DECL_CHANGED (dv_as_decl (dv)));
2255 /* Return a location list node whose loc is rtx_equal to LOC, in the
2256 location list of a one-part variable or value VAR, or in that of
2257 any values recursively mentioned in the location lists. */
2259 static location_chain
2260 find_loc_in_1pdv (rtx loc, variable var, htab_t vars)
2262 location_chain node;
2264 if (!var)
2265 return NULL;
2267 gcc_assert (dv_onepart_p (var->dv));
2269 if (!var->n_var_parts)
2270 return NULL;
2272 gcc_assert (var->var_part[0].offset == 0);
2274 for (node = var->var_part[0].loc_chain; node; node = node->next)
2275 if (rtx_equal_p (loc, node->loc))
2276 return node;
2277 else if (GET_CODE (node->loc) == VALUE
2278 && !VALUE_RECURSED_INTO (node->loc))
2280 decl_or_value dv = dv_from_value (node->loc);
2281 variable var = (variable)
2282 htab_find_with_hash (vars, dv, dv_htab_hash (dv));
2284 if (var)
2286 location_chain where;
2287 VALUE_RECURSED_INTO (node->loc) = true;
2288 if ((where = find_loc_in_1pdv (loc, var, vars)))
2290 VALUE_RECURSED_INTO (node->loc) = false;
2291 return where;
2293 VALUE_RECURSED_INTO (node->loc) = false;
2297 return NULL;
2300 /* Hash table iteration argument passed to variable_merge. */
2301 struct dfset_merge
2303 /* The set in which the merge is to be inserted. */
2304 dataflow_set *dst;
2305 /* The set that we're iterating in. */
2306 dataflow_set *cur;
2307 /* The set that may contain the other dv we are to merge with. */
2308 dataflow_set *src;
2309 /* Number of onepart dvs in src. */
2310 int src_onepart_cnt;
2313 /* Insert LOC in *DNODE, if it's not there yet. The list must be in
2314 loc_cmp order, and it is maintained as such. */
2316 static void
2317 insert_into_intersection (location_chain *nodep, rtx loc,
2318 enum var_init_status status)
2320 location_chain node;
2321 int r;
2323 for (node = *nodep; node; nodep = &node->next, node = *nodep)
2324 if ((r = loc_cmp (node->loc, loc)) == 0)
2326 node->init = MIN (node->init, status);
2327 return;
2329 else if (r > 0)
2330 break;
2332 node = (location_chain) pool_alloc (loc_chain_pool);
2334 node->loc = loc;
2335 node->set_src = NULL;
2336 node->init = status;
2337 node->next = *nodep;
2338 *nodep = node;
2341 /* Insert in DEST the intersection the locations present in both
2342 S1NODE and S2VAR, directly or indirectly. S1NODE is from a
2343 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
2344 DSM->dst. */
2346 static void
2347 intersect_loc_chains (rtx val, location_chain *dest, struct dfset_merge *dsm,
2348 location_chain s1node, variable s2var)
2350 dataflow_set *s1set = dsm->cur;
2351 dataflow_set *s2set = dsm->src;
2352 location_chain found;
2354 for (; s1node; s1node = s1node->next)
2356 if (s1node->loc == val)
2357 continue;
2359 if ((found = find_loc_in_1pdv (s1node->loc, s2var,
2360 shared_hash_htab (s2set->vars))))
2362 insert_into_intersection (dest, s1node->loc,
2363 MIN (s1node->init, found->init));
2364 continue;
2367 if (GET_CODE (s1node->loc) == VALUE
2368 && !VALUE_RECURSED_INTO (s1node->loc))
2370 decl_or_value dv = dv_from_value (s1node->loc);
2371 variable svar = shared_hash_find (s1set->vars, dv);
2372 if (svar)
2374 if (svar->n_var_parts == 1)
2376 VALUE_RECURSED_INTO (s1node->loc) = true;
2377 intersect_loc_chains (val, dest, dsm,
2378 svar->var_part[0].loc_chain,
2379 s2var);
2380 VALUE_RECURSED_INTO (s1node->loc) = false;
2385 /* ??? if the location is equivalent to any location in src,
2386 searched recursively
2388 add to dst the values needed to represent the equivalence
2390 telling whether locations S is equivalent to another dv's
2391 location list:
2393 for each location D in the list
2395 if S and D satisfy rtx_equal_p, then it is present
2397 else if D is a value, recurse without cycles
2399 else if S and D have the same CODE and MODE
2401 for each operand oS and the corresponding oD
2403 if oS and oD are not equivalent, then S an D are not equivalent
2405 else if they are RTX vectors
2407 if any vector oS element is not equivalent to its respective oD,
2408 then S and D are not equivalent
2416 /* Return -1 if X should be before Y in a location list for a 1-part
2417 variable, 1 if Y should be before X, and 0 if they're equivalent
2418 and should not appear in the list. */
2420 static int
2421 loc_cmp (rtx x, rtx y)
2423 int i, j, r;
2424 RTX_CODE code = GET_CODE (x);
2425 const char *fmt;
2427 if (x == y)
2428 return 0;
2430 if (REG_P (x))
2432 if (!REG_P (y))
2433 return -1;
2434 gcc_assert (GET_MODE (x) == GET_MODE (y));
2435 if (REGNO (x) == REGNO (y))
2436 return 0;
2437 else if (REGNO (x) < REGNO (y))
2438 return -1;
2439 else
2440 return 1;
2443 if (REG_P (y))
2444 return 1;
2446 if (MEM_P (x))
2448 if (!MEM_P (y))
2449 return -1;
2450 gcc_assert (GET_MODE (x) == GET_MODE (y));
2451 return loc_cmp (XEXP (x, 0), XEXP (y, 0));
2454 if (MEM_P (y))
2455 return 1;
2457 if (GET_CODE (x) == VALUE)
2459 if (GET_CODE (y) != VALUE)
2460 return -1;
2461 gcc_assert (GET_MODE (x) == GET_MODE (y));
2462 if (canon_value_cmp (x, y))
2463 return -1;
2464 else
2465 return 1;
2468 if (GET_CODE (y) == VALUE)
2469 return 1;
2471 if (GET_CODE (x) == GET_CODE (y))
2472 /* Compare operands below. */;
2473 else if (GET_CODE (x) < GET_CODE (y))
2474 return -1;
2475 else
2476 return 1;
2478 gcc_assert (GET_MODE (x) == GET_MODE (y));
2480 fmt = GET_RTX_FORMAT (code);
2481 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2482 switch (fmt[i])
2484 case 'w':
2485 if (XWINT (x, i) == XWINT (y, i))
2486 break;
2487 else if (XWINT (x, i) < XWINT (y, i))
2488 return -1;
2489 else
2490 return 1;
2492 case 'n':
2493 case 'i':
2494 if (XINT (x, i) == XINT (y, i))
2495 break;
2496 else if (XINT (x, i) < XINT (y, i))
2497 return -1;
2498 else
2499 return 1;
2501 case 'V':
2502 case 'E':
2503 /* Compare the vector length first. */
2504 if (XVECLEN (x, i) == XVECLEN (y, i))
2505 /* Compare the vectors elements. */;
2506 else if (XVECLEN (x, i) < XVECLEN (y, i))
2507 return -1;
2508 else
2509 return 1;
2511 for (j = 0; j < XVECLEN (x, i); j++)
2512 if ((r = loc_cmp (XVECEXP (x, i, j),
2513 XVECEXP (y, i, j))))
2514 return r;
2515 break;
2517 case 'e':
2518 if ((r = loc_cmp (XEXP (x, i), XEXP (y, i))))
2519 return r;
2520 break;
2522 case 'S':
2523 case 's':
2524 if (XSTR (x, i) == XSTR (y, i))
2525 break;
2526 if (!XSTR (x, i))
2527 return -1;
2528 if (!XSTR (y, i))
2529 return 1;
2530 if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0)
2531 break;
2532 else if (r < 0)
2533 return -1;
2534 else
2535 return 1;
2537 case 'u':
2538 /* These are just backpointers, so they don't matter. */
2539 break;
2541 case '0':
2542 case 't':
2543 break;
2545 /* It is believed that rtx's at this level will never
2546 contain anything but integers and other rtx's,
2547 except for within LABEL_REFs and SYMBOL_REFs. */
2548 default:
2549 gcc_unreachable ();
2552 return 0;
2555 /* If decl or value DVP refers to VALUE from *LOC, add backlinks
2556 from VALUE to DVP. */
2558 static int
2559 add_value_chain (rtx *loc, void *dvp)
2561 if (GET_CODE (*loc) == VALUE && (void *) *loc != dvp)
2563 decl_or_value dv = (decl_or_value) dvp;
2564 decl_or_value ldv = dv_from_value (*loc);
2565 value_chain vc, nvc;
2566 void **slot = htab_find_slot_with_hash (value_chains, ldv,
2567 dv_htab_hash (ldv), INSERT);
2568 if (!*slot)
2570 vc = (value_chain) pool_alloc (value_chain_pool);
2571 vc->dv = ldv;
2572 vc->next = NULL;
2573 vc->refcount = 0;
2574 *slot = (void *) vc;
2576 else
2578 for (vc = ((value_chain) *slot)->next; vc; vc = vc->next)
2579 if (dv_as_opaque (vc->dv) == dv_as_opaque (dv))
2580 break;
2581 if (vc)
2583 vc->refcount++;
2584 return 0;
2587 vc = (value_chain) *slot;
2588 nvc = (value_chain) pool_alloc (value_chain_pool);
2589 nvc->dv = dv;
2590 nvc->next = vc->next;
2591 nvc->refcount = 1;
2592 vc->next = nvc;
2594 return 0;
2597 /* If decl or value DVP refers to VALUEs from within LOC, add backlinks
2598 from those VALUEs to DVP. */
2600 static void
2601 add_value_chains (decl_or_value dv, rtx loc)
2603 if (GET_CODE (loc) == VALUE)
2605 add_value_chain (&loc, dv_as_opaque (dv));
2606 return;
2608 if (REG_P (loc))
2609 return;
2610 if (MEM_P (loc))
2611 loc = XEXP (loc, 0);
2612 for_each_rtx (&loc, add_value_chain, dv_as_opaque (dv));
2615 /* If CSELIB_VAL_PTR of value DV refer to VALUEs, add backlinks from those
2616 VALUEs to DV. */
2618 static void
2619 add_cselib_value_chains (decl_or_value dv)
2621 struct elt_loc_list *l;
2623 for (l = CSELIB_VAL_PTR (dv_as_value (dv))->locs; l; l = l->next)
2624 for_each_rtx (&l->loc, add_value_chain, dv_as_opaque (dv));
2627 /* If decl or value DVP refers to VALUE from *LOC, remove backlinks
2628 from VALUE to DVP. */
2630 static int
2631 remove_value_chain (rtx *loc, void *dvp)
2633 if (GET_CODE (*loc) == VALUE && (void *) *loc != dvp)
2635 decl_or_value dv = (decl_or_value) dvp;
2636 decl_or_value ldv = dv_from_value (*loc);
2637 value_chain vc, dvc = NULL;
2638 void **slot = htab_find_slot_with_hash (value_chains, ldv,
2639 dv_htab_hash (ldv), NO_INSERT);
2640 for (vc = (value_chain) *slot; vc->next; vc = vc->next)
2641 if (dv_as_opaque (vc->next->dv) == dv_as_opaque (dv))
2643 dvc = vc->next;
2644 gcc_assert (dvc->refcount > 0);
2645 if (--dvc->refcount == 0)
2647 vc->next = dvc->next;
2648 pool_free (value_chain_pool, dvc);
2649 if (vc->next == NULL && vc == (value_chain) *slot)
2651 pool_free (value_chain_pool, vc);
2652 htab_clear_slot (value_chains, slot);
2655 return 0;
2657 gcc_unreachable ();
2659 return 0;
2662 /* If decl or value DVP refers to VALUEs from within LOC, remove backlinks
2663 from those VALUEs to DVP. */
2665 static void
2666 remove_value_chains (decl_or_value dv, rtx loc)
2668 if (GET_CODE (loc) == VALUE)
2670 remove_value_chain (&loc, dv_as_opaque (dv));
2671 return;
2673 if (REG_P (loc))
2674 return;
2675 if (MEM_P (loc))
2676 loc = XEXP (loc, 0);
2677 for_each_rtx (&loc, remove_value_chain, dv_as_opaque (dv));
2680 /* If CSELIB_VAL_PTR of value DV refer to VALUEs, remove backlinks from those
2681 VALUEs to DV. */
2683 static void
2684 remove_cselib_value_chains (decl_or_value dv)
2686 struct elt_loc_list *l;
2688 for (l = CSELIB_VAL_PTR (dv_as_value (dv))->locs; l; l = l->next)
2689 for_each_rtx (&l->loc, remove_value_chain, dv_as_opaque (dv));
2692 #if ENABLE_CHECKING
2693 /* Check the order of entries in one-part variables. */
2695 static int
2696 canonicalize_loc_order_check (void **slot, void *data ATTRIBUTE_UNUSED)
2698 variable var = (variable) *slot;
2699 decl_or_value dv = var->dv;
2700 location_chain node, next;
2702 if (!dv_onepart_p (dv))
2703 return 1;
2705 gcc_assert (var->n_var_parts == 1);
2706 node = var->var_part[0].loc_chain;
2707 gcc_assert (node);
2709 while ((next = node->next))
2711 gcc_assert (loc_cmp (node->loc, next->loc) < 0);
2712 node = next;
2715 return 1;
2717 #endif
2719 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
2720 more likely to be chosen as canonical for an equivalence set.
2721 Ensure less likely values can reach more likely neighbors, making
2722 the connections bidirectional. */
2724 static int
2725 canonicalize_values_mark (void **slot, void *data)
2727 dataflow_set *set = (dataflow_set *)data;
2728 variable var = (variable) *slot;
2729 decl_or_value dv = var->dv;
2730 rtx val;
2731 location_chain node;
2733 if (!dv_is_value_p (dv))
2734 return 1;
2736 gcc_assert (var->n_var_parts == 1);
2738 val = dv_as_value (dv);
2740 for (node = var->var_part[0].loc_chain; node; node = node->next)
2741 if (GET_CODE (node->loc) == VALUE)
2743 if (canon_value_cmp (node->loc, val))
2744 VALUE_RECURSED_INTO (val) = true;
2745 else
2747 decl_or_value odv = dv_from_value (node->loc);
2748 void **oslot = shared_hash_find_slot_noinsert (set->vars, odv);
2750 oslot = set_slot_part (set, val, oslot, odv, 0,
2751 node->init, NULL_RTX);
2753 VALUE_RECURSED_INTO (node->loc) = true;
2757 return 1;
2760 /* Remove redundant entries from equivalence lists in onepart
2761 variables, canonicalizing equivalence sets into star shapes. */
2763 static int
2764 canonicalize_values_star (void **slot, void *data)
2766 dataflow_set *set = (dataflow_set *)data;
2767 variable var = (variable) *slot;
2768 decl_or_value dv = var->dv;
2769 location_chain node;
2770 decl_or_value cdv;
2771 rtx val, cval;
2772 void **cslot;
2773 bool has_value;
2774 bool has_marks;
2776 if (!dv_onepart_p (dv))
2777 return 1;
2779 gcc_assert (var->n_var_parts == 1);
2781 if (dv_is_value_p (dv))
2783 cval = dv_as_value (dv);
2784 if (!VALUE_RECURSED_INTO (cval))
2785 return 1;
2786 VALUE_RECURSED_INTO (cval) = false;
2788 else
2789 cval = NULL_RTX;
2791 restart:
2792 val = cval;
2793 has_value = false;
2794 has_marks = false;
2796 gcc_assert (var->n_var_parts == 1);
2798 for (node = var->var_part[0].loc_chain; node; node = node->next)
2799 if (GET_CODE (node->loc) == VALUE)
2801 has_value = true;
2802 if (VALUE_RECURSED_INTO (node->loc))
2803 has_marks = true;
2804 if (canon_value_cmp (node->loc, cval))
2805 cval = node->loc;
2808 if (!has_value)
2809 return 1;
2811 if (cval == val)
2813 if (!has_marks || dv_is_decl_p (dv))
2814 return 1;
2816 /* Keep it marked so that we revisit it, either after visiting a
2817 child node, or after visiting a new parent that might be
2818 found out. */
2819 VALUE_RECURSED_INTO (val) = true;
2821 for (node = var->var_part[0].loc_chain; node; node = node->next)
2822 if (GET_CODE (node->loc) == VALUE
2823 && VALUE_RECURSED_INTO (node->loc))
2825 cval = node->loc;
2826 restart_with_cval:
2827 VALUE_RECURSED_INTO (cval) = false;
2828 dv = dv_from_value (cval);
2829 slot = shared_hash_find_slot_noinsert (set->vars, dv);
2830 if (!slot)
2832 gcc_assert (dv_is_decl_p (var->dv));
2833 /* The canonical value was reset and dropped.
2834 Remove it. */
2835 clobber_variable_part (set, NULL, var->dv, 0, NULL);
2836 return 1;
2838 var = (variable)*slot;
2839 gcc_assert (dv_is_value_p (var->dv));
2840 if (var->n_var_parts == 0)
2841 return 1;
2842 gcc_assert (var->n_var_parts == 1);
2843 goto restart;
2846 VALUE_RECURSED_INTO (val) = false;
2848 return 1;
2851 /* Push values to the canonical one. */
2852 cdv = dv_from_value (cval);
2853 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
2855 for (node = var->var_part[0].loc_chain; node; node = node->next)
2856 if (node->loc != cval)
2858 cslot = set_slot_part (set, node->loc, cslot, cdv, 0,
2859 node->init, NULL_RTX);
2860 if (GET_CODE (node->loc) == VALUE)
2862 decl_or_value ndv = dv_from_value (node->loc);
2864 set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX,
2865 NO_INSERT);
2867 if (canon_value_cmp (node->loc, val))
2869 /* If it could have been a local minimum, it's not any more,
2870 since it's now neighbor to cval, so it may have to push
2871 to it. Conversely, if it wouldn't have prevailed over
2872 val, then whatever mark it has is fine: if it was to
2873 push, it will now push to a more canonical node, but if
2874 it wasn't, then it has already pushed any values it might
2875 have to. */
2876 VALUE_RECURSED_INTO (node->loc) = true;
2877 /* Make sure we visit node->loc by ensuring we cval is
2878 visited too. */
2879 VALUE_RECURSED_INTO (cval) = true;
2881 else if (!VALUE_RECURSED_INTO (node->loc))
2882 /* If we have no need to "recurse" into this node, it's
2883 already "canonicalized", so drop the link to the old
2884 parent. */
2885 clobber_variable_part (set, cval, ndv, 0, NULL);
2887 else if (GET_CODE (node->loc) == REG)
2889 attrs list = set->regs[REGNO (node->loc)], *listp;
2891 /* Change an existing attribute referring to dv so that it
2892 refers to cdv, removing any duplicate this might
2893 introduce, and checking that no previous duplicates
2894 existed, all in a single pass. */
2896 while (list)
2898 if (list->offset == 0
2899 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
2900 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
2901 break;
2903 list = list->next;
2906 gcc_assert (list);
2907 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
2909 list->dv = cdv;
2910 for (listp = &list->next; (list = *listp); listp = &list->next)
2912 if (list->offset)
2913 continue;
2915 if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
2917 *listp = list->next;
2918 pool_free (attrs_pool, list);
2919 list = *listp;
2920 break;
2923 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv));
2926 else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
2928 for (listp = &list->next; (list = *listp); listp = &list->next)
2930 if (list->offset)
2931 continue;
2933 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
2935 *listp = list->next;
2936 pool_free (attrs_pool, list);
2937 list = *listp;
2938 break;
2941 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv));
2944 else
2945 gcc_unreachable ();
2947 #if ENABLE_CHECKING
2948 while (list)
2950 if (list->offset == 0
2951 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
2952 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
2953 gcc_unreachable ();
2955 list = list->next;
2957 #endif
2961 if (val)
2962 cslot = set_slot_part (set, val, cslot, cdv, 0,
2963 VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
2965 slot = clobber_slot_part (set, cval, slot, 0, NULL);
2967 /* Variable may have been unshared. */
2968 var = (variable)*slot;
2969 gcc_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval
2970 && var->var_part[0].loc_chain->next == NULL);
2972 if (VALUE_RECURSED_INTO (cval))
2973 goto restart_with_cval;
2975 return 1;
2978 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
2979 corresponding entry in DSM->src. Multi-part variables are combined
2980 with variable_union, whereas onepart dvs are combined with
2981 intersection. */
2983 static int
2984 variable_merge_over_cur (void **s1slot, void *data)
2986 struct dfset_merge *dsm = (struct dfset_merge *)data;
2987 dataflow_set *dst = dsm->dst;
2988 void **dstslot;
2989 variable s1var = (variable) *s1slot;
2990 variable s2var, dvar = NULL;
2991 decl_or_value dv = s1var->dv;
2992 bool onepart = dv_onepart_p (dv);
2993 rtx val;
2994 hashval_t dvhash;
2995 location_chain node, *nodep;
2997 /* If the incoming onepart variable has an empty location list, then
2998 the intersection will be just as empty. For other variables,
2999 it's always union. */
3000 gcc_assert (s1var->n_var_parts);
3001 gcc_assert (s1var->var_part[0].loc_chain);
3003 if (!onepart)
3004 return variable_union (s1slot, dst);
3006 gcc_assert (s1var->n_var_parts == 1);
3007 gcc_assert (s1var->var_part[0].offset == 0);
3009 dvhash = dv_htab_hash (dv);
3010 if (dv_is_value_p (dv))
3011 val = dv_as_value (dv);
3012 else
3013 val = NULL;
3015 s2var = shared_hash_find_1 (dsm->src->vars, dv, dvhash);
3016 if (!s2var)
3018 dst_can_be_shared = false;
3019 return 1;
3022 dsm->src_onepart_cnt--;
3023 gcc_assert (s2var->var_part[0].loc_chain);
3024 gcc_assert (s2var->n_var_parts == 1);
3025 gcc_assert (s2var->var_part[0].offset == 0);
3027 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3028 if (dstslot)
3030 dvar = (variable)*dstslot;
3031 gcc_assert (dvar->refcount == 1);
3032 gcc_assert (dvar->n_var_parts == 1);
3033 gcc_assert (dvar->var_part[0].offset == 0);
3034 nodep = &dvar->var_part[0].loc_chain;
3036 else
3038 nodep = &node;
3039 node = NULL;
3042 if (!dstslot && !onepart_variable_different_p (s1var, s2var))
3044 dstslot = shared_hash_find_slot_unshare_1 (&dst->vars, dv,
3045 dvhash, INSERT);
3046 *dstslot = dvar = s2var;
3047 dvar->refcount++;
3049 else
3051 dst_can_be_shared = false;
3053 intersect_loc_chains (val, nodep, dsm,
3054 s1var->var_part[0].loc_chain, s2var);
3056 if (!dstslot)
3058 if (node)
3060 dvar = (variable) pool_alloc (dv_pool (dv));
3061 dvar->dv = dv;
3062 dvar->refcount = 1;
3063 dvar->n_var_parts = 1;
3064 dvar->var_part[0].offset = 0;
3065 dvar->var_part[0].loc_chain = node;
3066 dvar->var_part[0].cur_loc = node->loc;
3068 dstslot
3069 = shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash,
3070 INSERT);
3071 gcc_assert (!*dstslot);
3072 *dstslot = dvar;
3074 else
3075 return 1;
3079 nodep = &dvar->var_part[0].loc_chain;
3080 while ((node = *nodep))
3082 location_chain *nextp = &node->next;
3084 if (GET_CODE (node->loc) == REG)
3086 attrs list;
3088 for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
3089 if (GET_MODE (node->loc) == GET_MODE (list->loc)
3090 && dv_is_value_p (list->dv))
3091 break;
3093 if (!list)
3094 attrs_list_insert (&dst->regs[REGNO (node->loc)],
3095 dv, 0, node->loc);
3096 /* If this value became canonical for another value that had
3097 this register, we want to leave it alone. */
3098 else if (dv_as_value (list->dv) != val)
3100 dstslot = set_slot_part (dst, dv_as_value (list->dv),
3101 dstslot, dv, 0,
3102 node->init, NULL_RTX);
3103 dstslot = delete_slot_part (dst, node->loc, dstslot, 0);
3105 /* Since nextp points into the removed node, we can't
3106 use it. The pointer to the next node moved to nodep.
3107 However, if the variable we're walking is unshared
3108 during our walk, we'll keep walking the location list
3109 of the previously-shared variable, in which case the
3110 node won't have been removed, and we'll want to skip
3111 it. That's why we test *nodep here. */
3112 if (*nodep != node)
3113 nextp = nodep;
3116 else
3117 /* Canonicalization puts registers first, so we don't have to
3118 walk it all. */
3119 break;
3120 nodep = nextp;
3123 if (dvar != (variable)*dstslot)
3124 dvar = (variable)*dstslot;
3125 nodep = &dvar->var_part[0].loc_chain;
3127 if (val)
3129 /* Mark all referenced nodes for canonicalization, and make sure
3130 we have mutual equivalence links. */
3131 VALUE_RECURSED_INTO (val) = true;
3132 for (node = *nodep; node; node = node->next)
3133 if (GET_CODE (node->loc) == VALUE)
3135 VALUE_RECURSED_INTO (node->loc) = true;
3136 set_variable_part (dst, val, dv_from_value (node->loc), 0,
3137 node->init, NULL, INSERT);
3140 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3141 gcc_assert (*dstslot == dvar);
3142 canonicalize_values_star (dstslot, dst);
3143 #ifdef ENABLE_CHECKING
3144 gcc_assert (dstslot
3145 == shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash));
3146 #endif
3147 dvar = (variable)*dstslot;
3149 else
3151 bool has_value = false, has_other = false;
3153 /* If we have one value and anything else, we're going to
3154 canonicalize this, so make sure all values have an entry in
3155 the table and are marked for canonicalization. */
3156 for (node = *nodep; node; node = node->next)
3158 if (GET_CODE (node->loc) == VALUE)
3160 /* If this was marked during register canonicalization,
3161 we know we have to canonicalize values. */
3162 if (has_value)
3163 has_other = true;
3164 has_value = true;
3165 if (has_other)
3166 break;
3168 else
3170 has_other = true;
3171 if (has_value)
3172 break;
3176 if (has_value && has_other)
3178 for (node = *nodep; node; node = node->next)
3180 if (GET_CODE (node->loc) == VALUE)
3182 decl_or_value dv = dv_from_value (node->loc);
3183 void **slot = NULL;
3185 if (shared_hash_shared (dst->vars))
3186 slot = shared_hash_find_slot_noinsert (dst->vars, dv);
3187 if (!slot)
3188 slot = shared_hash_find_slot_unshare (&dst->vars, dv,
3189 INSERT);
3190 if (!*slot)
3192 variable var = (variable) pool_alloc (dv_pool (dv));
3193 var->dv = dv;
3194 var->refcount = 1;
3195 var->n_var_parts = 1;
3196 var->var_part[0].offset = 0;
3197 var->var_part[0].loc_chain = NULL;
3198 var->var_part[0].cur_loc = NULL;
3199 *slot = var;
3202 VALUE_RECURSED_INTO (node->loc) = true;
3206 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3207 gcc_assert (*dstslot == dvar);
3208 canonicalize_values_star (dstslot, dst);
3209 #ifdef ENABLE_CHECKING
3210 gcc_assert (dstslot
3211 == shared_hash_find_slot_noinsert_1 (dst->vars,
3212 dv, dvhash));
3213 #endif
3214 dvar = (variable)*dstslot;
3218 if (!onepart_variable_different_p (dvar, s2var))
3220 variable_htab_free (dvar);
3221 *dstslot = dvar = s2var;
3222 dvar->refcount++;
3224 else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var))
3226 variable_htab_free (dvar);
3227 *dstslot = dvar = s1var;
3228 dvar->refcount++;
3229 dst_can_be_shared = false;
3231 else
3233 if (dvar->refcount == 1)
3234 dvar->var_part[0].cur_loc = dvar->var_part[0].loc_chain->loc;
3235 dst_can_be_shared = false;
3238 return 1;
3241 /* Combine variable in *S1SLOT (in DSM->src) with the corresponding
3242 entry in DSM->src. Only multi-part variables are combined, using
3243 variable_union. onepart dvs were already combined with
3244 intersection in variable_merge_over_cur(). */
3246 static int
3247 variable_merge_over_src (void **s2slot, void *data)
3249 struct dfset_merge *dsm = (struct dfset_merge *)data;
3250 dataflow_set *dst = dsm->dst;
3251 variable s2var = (variable) *s2slot;
3252 decl_or_value dv = s2var->dv;
3253 bool onepart = dv_onepart_p (dv);
3255 if (!onepart)
3257 void **dstp = shared_hash_find_slot (dst->vars, dv);
3258 *dstp = s2var;
3259 s2var->refcount++;
3260 return variable_canonicalize (dstp, dst);
3263 dsm->src_onepart_cnt++;
3264 return 1;
3267 /* Combine dataflow set information from SRC into DST, using PDST
3268 to carry over information across passes. */
3270 static void
3271 dataflow_set_merge (dataflow_set *dst, dataflow_set *src)
3273 dataflow_set src2 = *dst;
3274 struct dfset_merge dsm;
3275 int i;
3276 size_t src_elems, dst_elems;
3278 src_elems = htab_elements (shared_hash_htab (src->vars));
3279 dst_elems = htab_elements (shared_hash_htab (src2.vars));
3280 dataflow_set_init (dst);
3281 dst->stack_adjust = src2.stack_adjust;
3282 shared_hash_destroy (dst->vars);
3283 dst->vars = (shared_hash) pool_alloc (shared_hash_pool);
3284 dst->vars->refcount = 1;
3285 dst->vars->htab
3286 = htab_create (MAX (src_elems, dst_elems), variable_htab_hash,
3287 variable_htab_eq, variable_htab_free);
3289 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3290 attrs_list_mpdv_union (&dst->regs[i], src->regs[i], src2.regs[i]);
3292 dsm.dst = dst;
3293 dsm.src = &src2;
3294 dsm.cur = src;
3295 dsm.src_onepart_cnt = 0;
3297 htab_traverse (shared_hash_htab (dsm.src->vars), variable_merge_over_src,
3298 &dsm);
3299 htab_traverse (shared_hash_htab (dsm.cur->vars), variable_merge_over_cur,
3300 &dsm);
3302 if (dsm.src_onepart_cnt)
3303 dst_can_be_shared = false;
3305 dataflow_set_destroy (&src2);
3308 /* Mark register equivalences. */
3310 static void
3311 dataflow_set_equiv_regs (dataflow_set *set)
3313 int i;
3314 attrs list, *listp;
3316 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3318 rtx canon[NUM_MACHINE_MODES];
3320 memset (canon, 0, sizeof (canon));
3322 for (list = set->regs[i]; list; list = list->next)
3323 if (list->offset == 0 && dv_is_value_p (list->dv))
3325 rtx val = dv_as_value (list->dv);
3326 rtx *cvalp = &canon[(int)GET_MODE (val)];
3327 rtx cval = *cvalp;
3329 if (canon_value_cmp (val, cval))
3330 *cvalp = val;
3333 for (list = set->regs[i]; list; list = list->next)
3334 if (list->offset == 0 && dv_onepart_p (list->dv))
3336 rtx cval = canon[(int)GET_MODE (list->loc)];
3338 if (!cval)
3339 continue;
3341 if (dv_is_value_p (list->dv))
3343 rtx val = dv_as_value (list->dv);
3345 if (val == cval)
3346 continue;
3348 VALUE_RECURSED_INTO (val) = true;
3349 set_variable_part (set, val, dv_from_value (cval), 0,
3350 VAR_INIT_STATUS_INITIALIZED,
3351 NULL, NO_INSERT);
3354 VALUE_RECURSED_INTO (cval) = true;
3355 set_variable_part (set, cval, list->dv, 0,
3356 VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT);
3359 for (listp = &set->regs[i]; (list = *listp);
3360 listp = list ? &list->next : listp)
3361 if (list->offset == 0 && dv_onepart_p (list->dv))
3363 rtx cval = canon[(int)GET_MODE (list->loc)];
3364 void **slot;
3366 if (!cval)
3367 continue;
3369 if (dv_is_value_p (list->dv))
3371 rtx val = dv_as_value (list->dv);
3372 if (!VALUE_RECURSED_INTO (val))
3373 continue;
3376 slot = shared_hash_find_slot_noinsert (set->vars, list->dv);
3377 canonicalize_values_star (slot, set);
3378 if (*listp != list)
3379 list = NULL;
3384 /* Remove any redundant values in the location list of VAR, which must
3385 be unshared and 1-part. */
3387 static void
3388 remove_duplicate_values (variable var)
3390 location_chain node, *nodep;
3392 gcc_assert (dv_onepart_p (var->dv));
3393 gcc_assert (var->n_var_parts == 1);
3394 gcc_assert (var->refcount == 1);
3396 for (nodep = &var->var_part[0].loc_chain; (node = *nodep); )
3398 if (GET_CODE (node->loc) == VALUE)
3400 if (VALUE_RECURSED_INTO (node->loc))
3402 /* Remove duplicate value node. */
3403 *nodep = node->next;
3404 pool_free (loc_chain_pool, node);
3405 continue;
3407 else
3408 VALUE_RECURSED_INTO (node->loc) = true;
3410 nodep = &node->next;
3413 for (node = var->var_part[0].loc_chain; node; node = node->next)
3414 if (GET_CODE (node->loc) == VALUE)
3416 gcc_assert (VALUE_RECURSED_INTO (node->loc));
3417 VALUE_RECURSED_INTO (node->loc) = false;
3422 /* Hash table iteration argument passed to variable_post_merge. */
3423 struct dfset_post_merge
3425 /* The new input set for the current block. */
3426 dataflow_set *set;
3427 /* Pointer to the permanent input set for the current block, or
3428 NULL. */
3429 dataflow_set **permp;
3432 /* Create values for incoming expressions associated with one-part
3433 variables that don't have value numbers for them. */
3435 static int
3436 variable_post_merge_new_vals (void **slot, void *info)
3438 struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
3439 dataflow_set *set = dfpm->set;
3440 variable var = (variable)*slot;
3441 location_chain node;
3443 if (!dv_onepart_p (var->dv) || !var->n_var_parts)
3444 return 1;
3446 gcc_assert (var->n_var_parts == 1);
3448 if (dv_is_decl_p (var->dv))
3450 bool check_dupes = false;
3452 restart:
3453 for (node = var->var_part[0].loc_chain; node; node = node->next)
3455 if (GET_CODE (node->loc) == VALUE)
3456 gcc_assert (!VALUE_RECURSED_INTO (node->loc));
3457 else if (GET_CODE (node->loc) == REG)
3459 attrs att, *attp, *curp = NULL;
3461 if (var->refcount != 1)
3463 slot = unshare_variable (set, slot, var,
3464 VAR_INIT_STATUS_INITIALIZED);
3465 var = (variable)*slot;
3466 goto restart;
3469 for (attp = &set->regs[REGNO (node->loc)]; (att = *attp);
3470 attp = &att->next)
3471 if (att->offset == 0
3472 && GET_MODE (att->loc) == GET_MODE (node->loc))
3474 if (dv_is_value_p (att->dv))
3476 rtx cval = dv_as_value (att->dv);
3477 node->loc = cval;
3478 check_dupes = true;
3479 break;
3481 else if (dv_as_opaque (att->dv) == dv_as_opaque (var->dv))
3482 curp = attp;
3485 if (!curp)
3487 curp = attp;
3488 while (*curp)
3489 if ((*curp)->offset == 0
3490 && GET_MODE ((*curp)->loc) == GET_MODE (node->loc)
3491 && dv_as_opaque ((*curp)->dv) == dv_as_opaque (var->dv))
3492 break;
3493 else
3494 curp = &(*curp)->next;
3495 gcc_assert (*curp);
3498 if (!att)
3500 decl_or_value cdv;
3501 rtx cval;
3503 if (!*dfpm->permp)
3505 *dfpm->permp = XNEW (dataflow_set);
3506 dataflow_set_init (*dfpm->permp);
3509 for (att = (*dfpm->permp)->regs[REGNO (node->loc)];
3510 att; att = att->next)
3511 if (GET_MODE (att->loc) == GET_MODE (node->loc))
3513 gcc_assert (att->offset == 0);
3514 gcc_assert (dv_is_value_p (att->dv));
3515 val_reset (set, att->dv);
3516 break;
3519 if (att)
3521 cdv = att->dv;
3522 cval = dv_as_value (cdv);
3524 else
3526 /* Create a unique value to hold this register,
3527 that ought to be found and reused in
3528 subsequent rounds. */
3529 cselib_val *v;
3530 gcc_assert (!cselib_lookup (node->loc,
3531 GET_MODE (node->loc), 0));
3532 v = cselib_lookup (node->loc, GET_MODE (node->loc), 1);
3533 cselib_preserve_value (v);
3534 cselib_invalidate_rtx (node->loc);
3535 cval = v->val_rtx;
3536 cdv = dv_from_value (cval);
3537 if (dump_file)
3538 fprintf (dump_file,
3539 "Created new value %i for reg %i\n",
3540 v->value, REGNO (node->loc));
3543 var_reg_decl_set (*dfpm->permp, node->loc,
3544 VAR_INIT_STATUS_INITIALIZED,
3545 cdv, 0, NULL, INSERT);
3547 node->loc = cval;
3548 check_dupes = true;
3551 /* Remove attribute referring to the decl, which now
3552 uses the value for the register, already existing or
3553 to be added when we bring perm in. */
3554 att = *curp;
3555 *curp = att->next;
3556 pool_free (attrs_pool, att);
3560 if (check_dupes)
3561 remove_duplicate_values (var);
3564 return 1;
3567 /* Reset values in the permanent set that are not associated with the
3568 chosen expression. */
3570 static int
3571 variable_post_merge_perm_vals (void **pslot, void *info)
3573 struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
3574 dataflow_set *set = dfpm->set;
3575 variable pvar = (variable)*pslot, var;
3576 location_chain pnode;
3577 decl_or_value dv;
3578 attrs att;
3580 gcc_assert (dv_is_value_p (pvar->dv));
3581 gcc_assert (pvar->n_var_parts == 1);
3582 pnode = pvar->var_part[0].loc_chain;
3583 gcc_assert (pnode);
3584 gcc_assert (!pnode->next);
3585 gcc_assert (REG_P (pnode->loc));
3587 dv = pvar->dv;
3589 var = shared_hash_find (set->vars, dv);
3590 if (var)
3592 if (find_loc_in_1pdv (pnode->loc, var, shared_hash_htab (set->vars)))
3593 return 1;
3594 val_reset (set, dv);
3597 for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next)
3598 if (att->offset == 0
3599 && GET_MODE (att->loc) == GET_MODE (pnode->loc)
3600 && dv_is_value_p (att->dv))
3601 break;
3603 /* If there is a value associated with this register already, create
3604 an equivalence. */
3605 if (att && dv_as_value (att->dv) != dv_as_value (dv))
3607 rtx cval = dv_as_value (att->dv);
3608 set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT);
3609 set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init,
3610 NULL, INSERT);
3612 else if (!att)
3614 attrs_list_insert (&set->regs[REGNO (pnode->loc)],
3615 dv, 0, pnode->loc);
3616 variable_union (pslot, set);
3619 return 1;
3622 /* Just checking stuff and registering register attributes for
3623 now. */
3625 static void
3626 dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp)
3628 struct dfset_post_merge dfpm;
3630 dfpm.set = set;
3631 dfpm.permp = permp;
3633 htab_traverse (shared_hash_htab (set->vars), variable_post_merge_new_vals,
3634 &dfpm);
3635 if (*permp)
3636 htab_traverse (shared_hash_htab ((*permp)->vars),
3637 variable_post_merge_perm_vals, &dfpm);
3638 htab_traverse (shared_hash_htab (set->vars), canonicalize_values_star, set);
3641 /* Return a node whose loc is a MEM that refers to EXPR in the
3642 location list of a one-part variable or value VAR, or in that of
3643 any values recursively mentioned in the location lists. */
3645 static location_chain
3646 find_mem_expr_in_1pdv (tree expr, rtx val, htab_t vars)
3648 location_chain node;
3649 decl_or_value dv;
3650 variable var;
3651 location_chain where = NULL;
3653 if (!val)
3654 return NULL;
3656 gcc_assert (GET_CODE (val) == VALUE);
3658 gcc_assert (!VALUE_RECURSED_INTO (val));
3660 dv = dv_from_value (val);
3661 var = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
3663 if (!var)
3664 return NULL;
3666 gcc_assert (dv_onepart_p (var->dv));
3668 if (!var->n_var_parts)
3669 return NULL;
3671 gcc_assert (var->var_part[0].offset == 0);
3673 VALUE_RECURSED_INTO (val) = true;
3675 for (node = var->var_part[0].loc_chain; node; node = node->next)
3676 if (MEM_P (node->loc) && MEM_EXPR (node->loc) == expr
3677 && MEM_OFFSET (node->loc) == 0)
3679 where = node;
3680 break;
3682 else if (GET_CODE (node->loc) == VALUE
3683 && !VALUE_RECURSED_INTO (node->loc)
3684 && (where = find_mem_expr_in_1pdv (expr, node->loc, vars)))
3685 break;
3687 VALUE_RECURSED_INTO (val) = false;
3689 return where;
3692 /* Remove all MEMs from the location list of a hash table entry for a
3693 one-part variable, except those whose MEM attributes map back to
3694 the variable itself, directly or within a VALUE.
3696 ??? We could also preserve MEMs that reference stack slots that are
3697 annotated as not addressable. This is arguably even more reliable
3698 than the current heuristic. */
3700 static int
3701 dataflow_set_preserve_mem_locs (void **slot, void *data)
3703 dataflow_set *set = (dataflow_set *) data;
3704 variable var = (variable) *slot;
3706 if (dv_is_decl_p (var->dv) && dv_onepart_p (var->dv))
3708 tree decl = dv_as_decl (var->dv);
3709 location_chain loc, *locp;
3711 if (!var->n_var_parts)
3712 return 1;
3714 gcc_assert (var->n_var_parts == 1);
3716 if (var->refcount > 1 || shared_hash_shared (set->vars))
3718 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
3720 /* We want to remove a MEM that doesn't refer to DECL. */
3721 if (GET_CODE (loc->loc) == MEM
3722 && (MEM_EXPR (loc->loc) != decl
3723 || MEM_OFFSET (loc->loc)))
3724 break;
3725 /* We want to move here a MEM that does refer to DECL. */
3726 else if (GET_CODE (loc->loc) == VALUE
3727 && find_mem_expr_in_1pdv (decl, loc->loc,
3728 shared_hash_htab (set->vars)))
3729 break;
3732 if (!loc)
3733 return 1;
3735 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
3736 var = (variable)*slot;
3737 gcc_assert (var->n_var_parts == 1);
3740 for (locp = &var->var_part[0].loc_chain, loc = *locp;
3741 loc; loc = *locp)
3743 rtx old_loc = loc->loc;
3744 if (GET_CODE (old_loc) == VALUE)
3746 location_chain mem_node
3747 = find_mem_expr_in_1pdv (decl, loc->loc,
3748 shared_hash_htab (set->vars));
3750 /* ??? This picks up only one out of multiple MEMs that
3751 refer to the same variable. Do we ever need to be
3752 concerned about dealing with more than one, or, given
3753 that they should all map to the same variable
3754 location, their addresses will have been merged and
3755 they will be regarded as equivalent? */
3756 if (mem_node)
3758 loc->loc = mem_node->loc;
3759 loc->set_src = mem_node->set_src;
3760 loc->init = MIN (loc->init, mem_node->init);
3764 if (GET_CODE (loc->loc) != MEM
3765 || (MEM_EXPR (loc->loc) == decl
3766 && MEM_OFFSET (loc->loc) == 0))
3768 if (old_loc != loc->loc && emit_notes)
3770 add_value_chains (var->dv, loc->loc);
3771 remove_value_chains (var->dv, old_loc);
3773 locp = &loc->next;
3774 continue;
3777 if (emit_notes)
3778 remove_value_chains (var->dv, old_loc);
3779 *locp = loc->next;
3780 pool_free (loc_chain_pool, loc);
3783 if (!var->var_part[0].loc_chain)
3785 var->n_var_parts--;
3786 if (emit_notes && dv_is_value_p (var->dv))
3787 remove_cselib_value_chains (var->dv);
3788 variable_was_changed (var, set);
3792 return 1;
3795 /* Remove all MEMs from the location list of a hash table entry for a
3796 value. */
3798 static int
3799 dataflow_set_remove_mem_locs (void **slot, void *data)
3801 dataflow_set *set = (dataflow_set *) data;
3802 variable var = (variable) *slot;
3804 if (dv_is_value_p (var->dv))
3806 location_chain loc, *locp;
3807 bool changed = false;
3809 gcc_assert (var->n_var_parts == 1);
3811 if (var->refcount > 1 || shared_hash_shared (set->vars))
3813 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
3814 if (GET_CODE (loc->loc) == MEM)
3815 break;
3817 if (!loc)
3818 return 1;
3820 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
3821 var = (variable)*slot;
3822 gcc_assert (var->n_var_parts == 1);
3825 for (locp = &var->var_part[0].loc_chain, loc = *locp;
3826 loc; loc = *locp)
3828 if (GET_CODE (loc->loc) != MEM)
3830 locp = &loc->next;
3831 continue;
3834 if (emit_notes)
3835 remove_value_chains (var->dv, loc->loc);
3836 *locp = loc->next;
3837 /* If we have deleted the location which was last emitted
3838 we have to emit new location so add the variable to set
3839 of changed variables. */
3840 if (var->var_part[0].cur_loc
3841 && rtx_equal_p (loc->loc, var->var_part[0].cur_loc))
3842 changed = true;
3843 pool_free (loc_chain_pool, loc);
3846 if (!var->var_part[0].loc_chain)
3848 var->n_var_parts--;
3849 if (emit_notes && dv_is_value_p (var->dv))
3850 remove_cselib_value_chains (var->dv);
3851 gcc_assert (changed);
3853 if (changed)
3855 if (var->n_var_parts && var->var_part[0].loc_chain)
3856 var->var_part[0].cur_loc = var->var_part[0].loc_chain->loc;
3857 variable_was_changed (var, set);
3861 return 1;
3864 /* Remove all variable-location information about call-clobbered
3865 registers, as well as associations between MEMs and VALUEs. */
3867 static void
3868 dataflow_set_clear_at_call (dataflow_set *set)
3870 int r;
3872 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
3873 if (TEST_HARD_REG_BIT (call_used_reg_set, r))
3874 var_regno_delete (set, r);
3876 if (MAY_HAVE_DEBUG_INSNS)
3878 set->traversed_vars = set->vars;
3879 htab_traverse (shared_hash_htab (set->vars),
3880 dataflow_set_preserve_mem_locs, set);
3881 set->traversed_vars = set->vars;
3882 htab_traverse (shared_hash_htab (set->vars), dataflow_set_remove_mem_locs,
3883 set);
3884 set->traversed_vars = NULL;
3888 /* Flag whether two dataflow sets being compared contain different data. */
3889 static bool
3890 dataflow_set_different_value;
3892 static bool
3893 variable_part_different_p (variable_part *vp1, variable_part *vp2)
3895 location_chain lc1, lc2;
3897 for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next)
3899 for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next)
3901 if (REG_P (lc1->loc) && REG_P (lc2->loc))
3903 if (REGNO (lc1->loc) == REGNO (lc2->loc))
3904 break;
3906 if (rtx_equal_p (lc1->loc, lc2->loc))
3907 break;
3909 if (!lc2)
3910 return true;
3912 return false;
3915 /* Return true if one-part variables VAR1 and VAR2 are different.
3916 They must be in canonical order. */
3918 static bool
3919 onepart_variable_different_p (variable var1, variable var2)
3921 location_chain lc1, lc2;
3923 if (var1 == var2)
3924 return false;
3926 gcc_assert (var1->n_var_parts == 1);
3927 gcc_assert (var2->n_var_parts == 1);
3929 lc1 = var1->var_part[0].loc_chain;
3930 lc2 = var2->var_part[0].loc_chain;
3932 gcc_assert (lc1);
3933 gcc_assert (lc2);
3935 while (lc1 && lc2)
3937 if (loc_cmp (lc1->loc, lc2->loc))
3938 return true;
3939 lc1 = lc1->next;
3940 lc2 = lc2->next;
3943 return lc1 != lc2;
3946 /* Return true if variables VAR1 and VAR2 are different.
3947 If COMPARE_CURRENT_LOCATION is true compare also the cur_loc of each
3948 variable part. */
3950 static bool
3951 variable_different_p (variable var1, variable var2,
3952 bool compare_current_location)
3954 int i;
3956 if (var1 == var2)
3957 return false;
3959 if (var1->n_var_parts != var2->n_var_parts)
3960 return true;
3962 for (i = 0; i < var1->n_var_parts; i++)
3964 if (var1->var_part[i].offset != var2->var_part[i].offset)
3965 return true;
3966 if (compare_current_location)
3968 if (!((REG_P (var1->var_part[i].cur_loc)
3969 && REG_P (var2->var_part[i].cur_loc)
3970 && (REGNO (var1->var_part[i].cur_loc)
3971 == REGNO (var2->var_part[i].cur_loc)))
3972 || rtx_equal_p (var1->var_part[i].cur_loc,
3973 var2->var_part[i].cur_loc)))
3974 return true;
3976 /* One-part values have locations in a canonical order. */
3977 if (i == 0 && var1->var_part[i].offset == 0 && dv_onepart_p (var1->dv))
3979 gcc_assert (var1->n_var_parts == 1);
3980 gcc_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv));
3981 return onepart_variable_different_p (var1, var2);
3983 if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i]))
3984 return true;
3985 if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i]))
3986 return true;
3988 return false;
3991 /* Compare variable *SLOT with the same variable in hash table DATA
3992 and set DATAFLOW_SET_DIFFERENT_VALUE if they are different. */
3994 static int
3995 dataflow_set_different_1 (void **slot, void *data)
3997 htab_t htab = (htab_t) data;
3998 variable var1, var2;
4000 var1 = (variable) *slot;
4001 var2 = (variable) htab_find_with_hash (htab, var1->dv,
4002 dv_htab_hash (var1->dv));
4003 if (!var2)
4005 dataflow_set_different_value = true;
4007 if (dump_file && (dump_flags & TDF_DETAILS))
4009 fprintf (dump_file, "dataflow difference found: removal of:\n");
4010 dump_variable (var1);
4013 /* Stop traversing the hash table. */
4014 return 0;
4017 if (variable_different_p (var1, var2, false))
4019 dataflow_set_different_value = true;
4021 if (dump_file && (dump_flags & TDF_DETAILS))
4023 fprintf (dump_file, "dataflow difference found: old and new follow:\n");
4024 dump_variable (var1);
4025 dump_variable (var2);
4028 /* Stop traversing the hash table. */
4029 return 0;
4032 /* Continue traversing the hash table. */
4033 return 1;
4036 /* Return true if dataflow sets OLD_SET and NEW_SET differ. */
4038 static bool
4039 dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
4041 if (old_set->vars == new_set->vars)
4042 return false;
4044 if (htab_elements (shared_hash_htab (old_set->vars))
4045 != htab_elements (shared_hash_htab (new_set->vars)))
4046 return true;
4048 dataflow_set_different_value = false;
4050 htab_traverse (shared_hash_htab (old_set->vars), dataflow_set_different_1,
4051 shared_hash_htab (new_set->vars));
4052 /* No need to traverse the second hashtab, if both have the same number
4053 of elements and the second one had all entries found in the first one,
4054 then it can't have any extra entries. */
4055 return dataflow_set_different_value;
4058 /* Free the contents of dataflow set SET. */
4060 static void
4061 dataflow_set_destroy (dataflow_set *set)
4063 int i;
4065 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4066 attrs_list_clear (&set->regs[i]);
4068 shared_hash_destroy (set->vars);
4069 set->vars = NULL;
4072 /* Return true if RTL X contains a SYMBOL_REF. */
4074 static bool
4075 contains_symbol_ref (rtx x)
4077 const char *fmt;
4078 RTX_CODE code;
4079 int i;
4081 if (!x)
4082 return false;
4084 code = GET_CODE (x);
4085 if (code == SYMBOL_REF)
4086 return true;
4088 fmt = GET_RTX_FORMAT (code);
4089 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4091 if (fmt[i] == 'e')
4093 if (contains_symbol_ref (XEXP (x, i)))
4094 return true;
4096 else if (fmt[i] == 'E')
4098 int j;
4099 for (j = 0; j < XVECLEN (x, i); j++)
4100 if (contains_symbol_ref (XVECEXP (x, i, j)))
4101 return true;
4105 return false;
4108 /* Shall EXPR be tracked? */
4110 static bool
4111 track_expr_p (tree expr, bool need_rtl)
4113 rtx decl_rtl;
4114 tree realdecl;
4116 if (TREE_CODE (expr) == DEBUG_EXPR_DECL)
4117 return DECL_RTL_SET_P (expr);
4119 /* If EXPR is not a parameter or a variable do not track it. */
4120 if (TREE_CODE (expr) != VAR_DECL && TREE_CODE (expr) != PARM_DECL)
4121 return 0;
4123 /* It also must have a name... */
4124 if (!DECL_NAME (expr))
4125 return 0;
4127 /* ... and a RTL assigned to it. */
4128 decl_rtl = DECL_RTL_IF_SET (expr);
4129 if (!decl_rtl && need_rtl)
4130 return 0;
4132 /* If this expression is really a debug alias of some other declaration, we
4133 don't need to track this expression if the ultimate declaration is
4134 ignored. */
4135 realdecl = expr;
4136 if (DECL_DEBUG_EXPR_IS_FROM (realdecl) && DECL_DEBUG_EXPR (realdecl))
4138 realdecl = DECL_DEBUG_EXPR (realdecl);
4139 /* ??? We don't yet know how to emit DW_OP_piece for variable
4140 that has been SRA'ed. */
4141 if (!DECL_P (realdecl))
4142 return 0;
4145 /* Do not track EXPR if REALDECL it should be ignored for debugging
4146 purposes. */
4147 if (DECL_IGNORED_P (realdecl))
4148 return 0;
4150 /* Do not track global variables until we are able to emit correct location
4151 list for them. */
4152 if (TREE_STATIC (realdecl))
4153 return 0;
4155 /* When the EXPR is a DECL for alias of some variable (see example)
4156 the TREE_STATIC flag is not used. Disable tracking all DECLs whose
4157 DECL_RTL contains SYMBOL_REF.
4159 Example:
4160 extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
4161 char **_dl_argv;
4163 if (decl_rtl && MEM_P (decl_rtl)
4164 && contains_symbol_ref (XEXP (decl_rtl, 0)))
4165 return 0;
4167 /* If RTX is a memory it should not be very large (because it would be
4168 an array or struct). */
4169 if (decl_rtl && MEM_P (decl_rtl))
4171 /* Do not track structures and arrays. */
4172 if (GET_MODE (decl_rtl) == BLKmode
4173 || AGGREGATE_TYPE_P (TREE_TYPE (realdecl)))
4174 return 0;
4175 if (MEM_SIZE (decl_rtl)
4176 && INTVAL (MEM_SIZE (decl_rtl)) > MAX_VAR_PARTS)
4177 return 0;
4180 DECL_CHANGED (expr) = 0;
4181 DECL_CHANGED (realdecl) = 0;
4182 return 1;
4185 /* Determine whether a given LOC refers to the same variable part as
4186 EXPR+OFFSET. */
4188 static bool
4189 same_variable_part_p (rtx loc, tree expr, HOST_WIDE_INT offset)
4191 tree expr2;
4192 HOST_WIDE_INT offset2;
4194 if (! DECL_P (expr))
4195 return false;
4197 if (REG_P (loc))
4199 expr2 = REG_EXPR (loc);
4200 offset2 = REG_OFFSET (loc);
4202 else if (MEM_P (loc))
4204 expr2 = MEM_EXPR (loc);
4205 offset2 = INT_MEM_OFFSET (loc);
4207 else
4208 return false;
4210 if (! expr2 || ! DECL_P (expr2))
4211 return false;
4213 expr = var_debug_decl (expr);
4214 expr2 = var_debug_decl (expr2);
4216 return (expr == expr2 && offset == offset2);
4219 /* LOC is a REG or MEM that we would like to track if possible.
4220 If EXPR is null, we don't know what expression LOC refers to,
4221 otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
4222 LOC is an lvalue register.
4224 Return true if EXPR is nonnull and if LOC, or some lowpart of it,
4225 is something we can track. When returning true, store the mode of
4226 the lowpart we can track in *MODE_OUT (if nonnull) and its offset
4227 from EXPR in *OFFSET_OUT (if nonnull). */
4229 static bool
4230 track_loc_p (rtx loc, tree expr, HOST_WIDE_INT offset, bool store_reg_p,
4231 enum machine_mode *mode_out, HOST_WIDE_INT *offset_out)
4233 enum machine_mode mode;
4235 if (expr == NULL || !track_expr_p (expr, true))
4236 return false;
4238 /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
4239 whole subreg, but only the old inner part is really relevant. */
4240 mode = GET_MODE (loc);
4241 if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc)))
4243 enum machine_mode pseudo_mode;
4245 pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc));
4246 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (pseudo_mode))
4248 offset += byte_lowpart_offset (pseudo_mode, mode);
4249 mode = pseudo_mode;
4253 /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
4254 Do the same if we are storing to a register and EXPR occupies
4255 the whole of register LOC; in that case, the whole of EXPR is
4256 being changed. We exclude complex modes from the second case
4257 because the real and imaginary parts are represented as separate
4258 pseudo registers, even if the whole complex value fits into one
4259 hard register. */
4260 if ((GET_MODE_SIZE (mode) > GET_MODE_SIZE (DECL_MODE (expr))
4261 || (store_reg_p
4262 && !COMPLEX_MODE_P (DECL_MODE (expr))
4263 && hard_regno_nregs[REGNO (loc)][DECL_MODE (expr)] == 1))
4264 && offset + byte_lowpart_offset (DECL_MODE (expr), mode) == 0)
4266 mode = DECL_MODE (expr);
4267 offset = 0;
4270 if (offset < 0 || offset >= MAX_VAR_PARTS)
4271 return false;
4273 if (mode_out)
4274 *mode_out = mode;
4275 if (offset_out)
4276 *offset_out = offset;
4277 return true;
4280 /* Return the MODE lowpart of LOC, or null if LOC is not something we
4281 want to track. When returning nonnull, make sure that the attributes
4282 on the returned value are updated. */
4284 static rtx
4285 var_lowpart (enum machine_mode mode, rtx loc)
4287 unsigned int offset, reg_offset, regno;
4289 if (!REG_P (loc) && !MEM_P (loc))
4290 return NULL;
4292 if (GET_MODE (loc) == mode)
4293 return loc;
4295 offset = byte_lowpart_offset (mode, GET_MODE (loc));
4297 if (MEM_P (loc))
4298 return adjust_address_nv (loc, mode, offset);
4300 reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc));
4301 regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc),
4302 reg_offset, mode);
4303 return gen_rtx_REG_offset (loc, mode, regno, offset);
4306 /* Carry information about uses and stores while walking rtx. */
4308 struct count_use_info
4310 /* The insn where the RTX is. */
4311 rtx insn;
4313 /* The basic block where insn is. */
4314 basic_block bb;
4316 /* The array of n_sets sets in the insn, as determined by cselib. */
4317 struct cselib_set *sets;
4318 int n_sets;
4320 /* True if we're counting stores, false otherwise. */
4321 bool store_p;
4324 /* Find a VALUE corresponding to X. */
4326 static inline cselib_val *
4327 find_use_val (rtx x, enum machine_mode mode, struct count_use_info *cui)
4329 int i;
4331 if (cui->sets)
4333 /* This is called after uses are set up and before stores are
4334 processed bycselib, so it's safe to look up srcs, but not
4335 dsts. So we look up expressions that appear in srcs or in
4336 dest expressions, but we search the sets array for dests of
4337 stores. */
4338 if (cui->store_p)
4340 for (i = 0; i < cui->n_sets; i++)
4341 if (cui->sets[i].dest == x)
4342 return cui->sets[i].src_elt;
4344 else
4345 return cselib_lookup (x, mode, 0);
4348 return NULL;
4351 /* Replace all registers and addresses in an expression with VALUE
4352 expressions that map back to them, unless the expression is a
4353 register. If no mapping is or can be performed, returns NULL. */
4355 static rtx
4356 replace_expr_with_values (rtx loc)
4358 if (REG_P (loc))
4359 return NULL;
4360 else if (MEM_P (loc))
4362 enum machine_mode address_mode
4363 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (loc));
4364 cselib_val *addr = cselib_lookup (XEXP (loc, 0), address_mode, 0);
4365 if (addr)
4366 return replace_equiv_address_nv (loc, addr->val_rtx);
4367 else
4368 return NULL;
4370 else
4371 return cselib_subst_to_values (loc);
4374 /* Determine what kind of micro operation to choose for a USE. Return
4375 MO_CLOBBER if no micro operation is to be generated. */
4377 static enum micro_operation_type
4378 use_type (rtx loc, struct count_use_info *cui, enum machine_mode *modep)
4380 tree expr;
4381 cselib_val *val;
4383 if (cui && cui->sets)
4385 if (GET_CODE (loc) == VAR_LOCATION)
4387 if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false))
4389 rtx ploc = PAT_VAR_LOCATION_LOC (loc);
4390 cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1);
4392 /* ??? flag_float_store and volatile mems are never
4393 given values, but we could in theory use them for
4394 locations. */
4395 gcc_assert (val || 1);
4396 return MO_VAL_LOC;
4398 else
4399 return MO_CLOBBER;
4402 if ((REG_P (loc) || MEM_P (loc))
4403 && (val = find_use_val (loc, GET_MODE (loc), cui)))
4405 if (modep)
4406 *modep = GET_MODE (loc);
4407 if (cui->store_p)
4409 if (REG_P (loc)
4410 || cselib_lookup (XEXP (loc, 0), GET_MODE (loc), 0))
4411 return MO_VAL_SET;
4413 else if (!cselib_preserved_value_p (val))
4414 return MO_VAL_USE;
4418 if (REG_P (loc))
4420 gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER);
4422 expr = REG_EXPR (loc);
4424 if (!expr)
4425 return MO_USE_NO_VAR;
4426 else if (target_for_debug_bind (var_debug_decl (expr)))
4427 return MO_CLOBBER;
4428 else if (track_loc_p (loc, expr, REG_OFFSET (loc),
4429 false, modep, NULL))
4430 return MO_USE;
4431 else
4432 return MO_USE_NO_VAR;
4434 else if (MEM_P (loc))
4436 expr = MEM_EXPR (loc);
4438 if (!expr)
4439 return MO_CLOBBER;
4440 else if (target_for_debug_bind (var_debug_decl (expr)))
4441 return MO_CLOBBER;
4442 else if (track_loc_p (loc, expr, INT_MEM_OFFSET (loc),
4443 false, modep, NULL))
4444 return MO_USE;
4445 else
4446 return MO_CLOBBER;
4449 return MO_CLOBBER;
4452 /* Log to OUT information about micro-operation MOPT involving X in
4453 INSN of BB. */
4455 static inline void
4456 log_op_type (rtx x, basic_block bb, rtx insn,
4457 enum micro_operation_type mopt, FILE *out)
4459 fprintf (out, "bb %i op %i insn %i %s ",
4460 bb->index, VTI (bb)->n_mos - 1,
4461 INSN_UID (insn), micro_operation_type_name[mopt]);
4462 print_inline_rtx (out, x, 2);
4463 fputc ('\n', out);
4466 /* Count uses (register and memory references) LOC which will be tracked.
4467 INSN is instruction which the LOC is part of. */
4469 static int
4470 count_uses (rtx *ploc, void *cuip)
4472 rtx loc = *ploc;
4473 struct count_use_info *cui = (struct count_use_info *) cuip;
4474 enum micro_operation_type mopt = use_type (loc, cui, NULL);
4476 if (mopt != MO_CLOBBER)
4478 cselib_val *val;
4479 enum machine_mode mode = GET_MODE (loc);
4481 VTI (cui->bb)->n_mos++;
4483 if (dump_file && (dump_flags & TDF_DETAILS))
4484 log_op_type (loc, cui->bb, cui->insn, mopt, dump_file);
4486 switch (mopt)
4488 case MO_VAL_LOC:
4489 loc = PAT_VAR_LOCATION_LOC (loc);
4490 if (VAR_LOC_UNKNOWN_P (loc))
4491 break;
4492 /* Fall through. */
4494 case MO_VAL_USE:
4495 case MO_VAL_SET:
4496 if (MEM_P (loc)
4497 && !REG_P (XEXP (loc, 0)) && !MEM_P (XEXP (loc, 0)))
4499 enum machine_mode address_mode
4500 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (loc));
4501 val = cselib_lookup (XEXP (loc, 0), address_mode, false);
4503 if (val && !cselib_preserved_value_p (val))
4505 VTI (cui->bb)->n_mos++;
4506 cselib_preserve_value (val);
4510 val = find_use_val (loc, mode, cui);
4511 if (val)
4513 if (mopt == MO_VAL_SET
4514 && GET_CODE (PATTERN (cui->insn)) == COND_EXEC
4515 && (REG_P (loc)
4516 || (MEM_P (loc)
4517 && (use_type (loc, NULL, NULL) == MO_USE
4518 || cui->sets))))
4520 cselib_val *oval = cselib_lookup (loc, GET_MODE (loc), 0);
4522 gcc_assert (oval != val);
4523 gcc_assert (REG_P (loc) || MEM_P (loc));
4525 if (!cselib_preserved_value_p (oval))
4527 VTI (cui->bb)->n_mos++;
4528 cselib_preserve_value (oval);
4532 cselib_preserve_value (val);
4534 else
4535 gcc_assert (mopt == MO_VAL_LOC);
4537 break;
4539 default:
4540 break;
4544 return 0;
4547 /* Helper function for finding all uses of REG/MEM in X in CUI's
4548 insn. */
4550 static void
4551 count_uses_1 (rtx *x, void *cui)
4553 for_each_rtx (x, count_uses, cui);
4556 /* Count stores (register and memory references) LOC which will be
4557 tracked. CUI is a count_use_info object containing the instruction
4558 which the LOC is part of. */
4560 static void
4561 count_stores (rtx loc, const_rtx expr ATTRIBUTE_UNUSED, void *cui)
4563 count_uses (&loc, cui);
4566 /* Callback for cselib_record_sets_hook, that counts how many micro
4567 operations it takes for uses and stores in an insn after
4568 cselib_record_sets has analyzed the sets in an insn, but before it
4569 modifies the stored values in the internal tables, unless
4570 cselib_record_sets doesn't call it directly (perhaps because we're
4571 not doing cselib in the first place, in which case sets and n_sets
4572 will be 0). */
4574 static void
4575 count_with_sets (rtx insn, struct cselib_set *sets, int n_sets)
4577 basic_block bb = BLOCK_FOR_INSN (insn);
4578 struct count_use_info cui;
4580 cselib_hook_called = true;
4582 cui.insn = insn;
4583 cui.bb = bb;
4584 cui.sets = sets;
4585 cui.n_sets = n_sets;
4587 cui.store_p = false;
4588 note_uses (&PATTERN (insn), count_uses_1, &cui);
4589 cui.store_p = true;
4590 note_stores (PATTERN (insn), count_stores, &cui);
4593 /* Tell whether the CONCAT used to holds a VALUE and its location
4594 needs value resolution, i.e., an attempt of mapping the location
4595 back to other incoming values. */
4596 #define VAL_NEEDS_RESOLUTION(x) \
4597 (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
4598 /* Whether the location in the CONCAT is a tracked expression, that
4599 should also be handled like a MO_USE. */
4600 #define VAL_HOLDS_TRACK_EXPR(x) \
4601 (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
4602 /* Whether the location in the CONCAT should be handled like a MO_COPY
4603 as well. */
4604 #define VAL_EXPR_IS_COPIED(x) \
4605 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
4606 /* Whether the location in the CONCAT should be handled like a
4607 MO_CLOBBER as well. */
4608 #define VAL_EXPR_IS_CLOBBERED(x) \
4609 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
4611 /* Add uses (register and memory references) LOC which will be tracked
4612 to VTI (bb)->mos. INSN is instruction which the LOC is part of. */
4614 static int
4615 add_uses (rtx *ploc, void *data)
4617 rtx loc = *ploc;
4618 enum machine_mode mode = VOIDmode;
4619 struct count_use_info *cui = (struct count_use_info *)data;
4620 enum micro_operation_type type = use_type (loc, cui, &mode);
4622 if (type != MO_CLOBBER)
4624 basic_block bb = cui->bb;
4625 micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
4627 mo->type = type;
4628 mo->u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc;
4629 mo->insn = cui->insn;
4631 if (type == MO_VAL_LOC)
4633 rtx oloc = loc;
4634 rtx vloc = PAT_VAR_LOCATION_LOC (oloc);
4635 cselib_val *val;
4637 gcc_assert (cui->sets);
4639 if (MEM_P (vloc)
4640 && !REG_P (XEXP (vloc, 0)) && !MEM_P (XEXP (vloc, 0)))
4642 rtx mloc = vloc;
4643 enum machine_mode address_mode
4644 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (mloc));
4645 cselib_val *val
4646 = cselib_lookup (XEXP (mloc, 0), address_mode, 0);
4648 if (val && !cselib_preserved_value_p (val))
4650 micro_operation *mon = VTI (bb)->mos + VTI (bb)->n_mos++;
4651 mon->type = mo->type;
4652 mon->u.loc = mo->u.loc;
4653 mon->insn = mo->insn;
4654 cselib_preserve_value (val);
4655 mo->type = MO_VAL_USE;
4656 mloc = cselib_subst_to_values (XEXP (mloc, 0));
4657 mo->u.loc = gen_rtx_CONCAT (address_mode,
4658 val->val_rtx, mloc);
4659 if (dump_file && (dump_flags & TDF_DETAILS))
4660 log_op_type (mo->u.loc, cui->bb, cui->insn,
4661 mo->type, dump_file);
4662 mo = mon;
4666 if (!VAR_LOC_UNKNOWN_P (vloc)
4667 && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
4669 enum machine_mode mode2;
4670 enum micro_operation_type type2;
4671 rtx nloc = replace_expr_with_values (vloc);
4673 if (nloc)
4675 oloc = shallow_copy_rtx (oloc);
4676 PAT_VAR_LOCATION_LOC (oloc) = nloc;
4679 oloc = gen_rtx_CONCAT (mode, val->val_rtx, oloc);
4681 type2 = use_type (vloc, 0, &mode2);
4683 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
4684 || type2 == MO_CLOBBER);
4686 if (type2 == MO_CLOBBER
4687 && !cselib_preserved_value_p (val))
4689 VAL_NEEDS_RESOLUTION (oloc) = 1;
4690 cselib_preserve_value (val);
4693 else if (!VAR_LOC_UNKNOWN_P (vloc))
4695 oloc = shallow_copy_rtx (oloc);
4696 PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC ();
4699 mo->u.loc = oloc;
4701 else if (type == MO_VAL_USE)
4703 enum machine_mode mode2 = VOIDmode;
4704 enum micro_operation_type type2;
4705 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
4706 rtx vloc, oloc = loc, nloc;
4708 gcc_assert (cui->sets);
4710 if (MEM_P (oloc)
4711 && !REG_P (XEXP (oloc, 0)) && !MEM_P (XEXP (oloc, 0)))
4713 rtx mloc = oloc;
4714 enum machine_mode address_mode
4715 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (mloc));
4716 cselib_val *val
4717 = cselib_lookup (XEXP (mloc, 0), address_mode, 0);
4719 if (val && !cselib_preserved_value_p (val))
4721 micro_operation *mon = VTI (bb)->mos + VTI (bb)->n_mos++;
4722 mon->type = mo->type;
4723 mon->u.loc = mo->u.loc;
4724 mon->insn = mo->insn;
4725 cselib_preserve_value (val);
4726 mo->type = MO_VAL_USE;
4727 mloc = cselib_subst_to_values (XEXP (mloc, 0));
4728 mo->u.loc = gen_rtx_CONCAT (address_mode,
4729 val->val_rtx, mloc);
4730 mo->insn = cui->insn;
4731 if (dump_file && (dump_flags & TDF_DETAILS))
4732 log_op_type (mo->u.loc, cui->bb, cui->insn,
4733 mo->type, dump_file);
4734 mo = mon;
4738 type2 = use_type (loc, 0, &mode2);
4740 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
4741 || type2 == MO_CLOBBER);
4743 if (type2 == MO_USE)
4744 vloc = var_lowpart (mode2, loc);
4745 else
4746 vloc = oloc;
4748 /* The loc of a MO_VAL_USE may have two forms:
4750 (concat val src): val is at src, a value-based
4751 representation.
4753 (concat (concat val use) src): same as above, with use as
4754 the MO_USE tracked value, if it differs from src.
4758 nloc = replace_expr_with_values (loc);
4759 if (!nloc)
4760 nloc = oloc;
4762 if (vloc != nloc)
4763 oloc = gen_rtx_CONCAT (mode2, val->val_rtx, vloc);
4764 else
4765 oloc = val->val_rtx;
4767 mo->u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
4769 if (type2 == MO_USE)
4770 VAL_HOLDS_TRACK_EXPR (mo->u.loc) = 1;
4771 if (!cselib_preserved_value_p (val))
4773 VAL_NEEDS_RESOLUTION (mo->u.loc) = 1;
4774 cselib_preserve_value (val);
4777 else
4778 gcc_assert (type == MO_USE || type == MO_USE_NO_VAR);
4780 if (dump_file && (dump_flags & TDF_DETAILS))
4781 log_op_type (mo->u.loc, cui->bb, cui->insn, mo->type, dump_file);
4784 return 0;
4787 /* Helper function for finding all uses of REG/MEM in X in insn INSN. */
4789 static void
4790 add_uses_1 (rtx *x, void *cui)
4792 for_each_rtx (x, add_uses, cui);
4795 /* Add stores (register and memory references) LOC which will be tracked
4796 to VTI (bb)->mos. EXPR is the RTL expression containing the store.
4797 CUIP->insn is instruction which the LOC is part of. */
4799 static void
4800 add_stores (rtx loc, const_rtx expr, void *cuip)
4802 enum machine_mode mode = VOIDmode, mode2;
4803 struct count_use_info *cui = (struct count_use_info *)cuip;
4804 basic_block bb = cui->bb;
4805 micro_operation *mo;
4806 rtx oloc = loc, nloc, src = NULL;
4807 enum micro_operation_type type = use_type (loc, cui, &mode);
4808 bool track_p = false;
4809 cselib_val *v;
4810 bool resolve, preserve;
4812 if (type == MO_CLOBBER)
4813 return;
4815 mode2 = mode;
4817 if (REG_P (loc))
4819 mo = VTI (bb)->mos + VTI (bb)->n_mos++;
4821 if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
4822 || !(track_p = use_type (loc, NULL, &mode2) == MO_USE)
4823 || GET_CODE (expr) == CLOBBER)
4825 mo->type = MO_CLOBBER;
4826 mo->u.loc = loc;
4828 else
4830 if (GET_CODE (expr) == SET && SET_DEST (expr) == loc)
4831 src = var_lowpart (mode2, SET_SRC (expr));
4832 loc = var_lowpart (mode2, loc);
4834 if (src == NULL)
4836 mo->type = MO_SET;
4837 mo->u.loc = loc;
4839 else
4841 rtx xexpr = CONST_CAST_RTX (expr);
4843 if (SET_SRC (expr) != src)
4844 xexpr = gen_rtx_SET (VOIDmode, loc, src);
4845 if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
4846 mo->type = MO_COPY;
4847 else
4848 mo->type = MO_SET;
4849 mo->u.loc = xexpr;
4852 mo->insn = cui->insn;
4854 else if (MEM_P (loc)
4855 && ((track_p = use_type (loc, NULL, &mode2) == MO_USE)
4856 || cui->sets))
4858 mo = VTI (bb)->mos + VTI (bb)->n_mos++;
4860 if (MEM_P (loc) && type == MO_VAL_SET
4861 && !REG_P (XEXP (loc, 0)) && !MEM_P (XEXP (loc, 0)))
4863 rtx mloc = loc;
4864 enum machine_mode address_mode
4865 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (mloc));
4866 cselib_val *val = cselib_lookup (XEXP (mloc, 0), address_mode, 0);
4868 if (val && !cselib_preserved_value_p (val))
4870 cselib_preserve_value (val);
4871 mo->type = MO_VAL_USE;
4872 mloc = cselib_subst_to_values (XEXP (mloc, 0));
4873 mo->u.loc = gen_rtx_CONCAT (address_mode, val->val_rtx, mloc);
4874 mo->insn = cui->insn;
4875 if (dump_file && (dump_flags & TDF_DETAILS))
4876 log_op_type (mo->u.loc, cui->bb, cui->insn,
4877 mo->type, dump_file);
4878 mo = VTI (bb)->mos + VTI (bb)->n_mos++;
4882 if (GET_CODE (expr) == CLOBBER || !track_p)
4884 mo->type = MO_CLOBBER;
4885 mo->u.loc = track_p ? var_lowpart (mode2, loc) : loc;
4887 else
4889 if (GET_CODE (expr) == SET && SET_DEST (expr) == loc)
4890 src = var_lowpart (mode2, SET_SRC (expr));
4891 loc = var_lowpart (mode2, loc);
4893 if (src == NULL)
4895 mo->type = MO_SET;
4896 mo->u.loc = loc;
4898 else
4900 rtx xexpr = CONST_CAST_RTX (expr);
4902 if (SET_SRC (expr) != src)
4903 xexpr = gen_rtx_SET (VOIDmode, loc, src);
4904 if (same_variable_part_p (SET_SRC (xexpr),
4905 MEM_EXPR (loc),
4906 INT_MEM_OFFSET (loc)))
4907 mo->type = MO_COPY;
4908 else
4909 mo->type = MO_SET;
4910 mo->u.loc = xexpr;
4913 mo->insn = cui->insn;
4915 else
4916 return;
4918 if (type != MO_VAL_SET)
4919 goto log_and_return;
4921 v = find_use_val (oloc, mode, cui);
4923 resolve = preserve = !cselib_preserved_value_p (v);
4925 nloc = replace_expr_with_values (oloc);
4926 if (nloc)
4927 oloc = nloc;
4929 if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC)
4931 cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0);
4933 gcc_assert (oval != v);
4934 gcc_assert (REG_P (oloc) || MEM_P (oloc));
4936 if (!cselib_preserved_value_p (oval))
4938 micro_operation *nmo = VTI (bb)->mos + VTI (bb)->n_mos++;
4940 cselib_preserve_value (oval);
4942 nmo->type = MO_VAL_USE;
4943 nmo->u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc);
4944 VAL_NEEDS_RESOLUTION (nmo->u.loc) = 1;
4945 nmo->insn = mo->insn;
4947 if (dump_file && (dump_flags & TDF_DETAILS))
4948 log_op_type (nmo->u.loc, cui->bb, cui->insn,
4949 nmo->type, dump_file);
4952 resolve = false;
4954 else if (resolve && GET_CODE (mo->u.loc) == SET)
4956 nloc = replace_expr_with_values (SET_SRC (expr));
4958 /* Avoid the mode mismatch between oexpr and expr. */
4959 if (!nloc && mode != mode2)
4961 nloc = SET_SRC (expr);
4962 gcc_assert (oloc == SET_DEST (expr));
4965 if (nloc)
4966 oloc = gen_rtx_SET (GET_MODE (mo->u.loc), oloc, nloc);
4967 else
4969 if (oloc == SET_DEST (mo->u.loc))
4970 /* No point in duplicating. */
4971 oloc = mo->u.loc;
4972 if (!REG_P (SET_SRC (mo->u.loc)))
4973 resolve = false;
4976 else if (!resolve)
4978 if (GET_CODE (mo->u.loc) == SET
4979 && oloc == SET_DEST (mo->u.loc))
4980 /* No point in duplicating. */
4981 oloc = mo->u.loc;
4983 else
4984 resolve = false;
4986 loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc);
4988 if (mo->u.loc != oloc)
4989 loc = gen_rtx_CONCAT (GET_MODE (mo->u.loc), loc, mo->u.loc);
4991 /* The loc of a MO_VAL_SET may have various forms:
4993 (concat val dst): dst now holds val
4995 (concat val (set dst src)): dst now holds val, copied from src
4997 (concat (concat val dstv) dst): dst now holds val; dstv is dst
4998 after replacing mems and non-top-level regs with values.
5000 (concat (concat val dstv) (set dst src)): dst now holds val,
5001 copied from src. dstv is a value-based representation of dst, if
5002 it differs from dst. If resolution is needed, src is a REG, and
5003 its mode is the same as that of val.
5005 (concat (concat val (set dstv srcv)) (set dst src)): src
5006 copied to dst, holding val. dstv and srcv are value-based
5007 representations of dst and src, respectively.
5011 mo->u.loc = loc;
5013 if (track_p)
5014 VAL_HOLDS_TRACK_EXPR (loc) = 1;
5015 if (preserve)
5017 VAL_NEEDS_RESOLUTION (loc) = resolve;
5018 cselib_preserve_value (v);
5020 if (mo->type == MO_CLOBBER)
5021 VAL_EXPR_IS_CLOBBERED (loc) = 1;
5022 if (mo->type == MO_COPY)
5023 VAL_EXPR_IS_COPIED (loc) = 1;
5025 mo->type = MO_VAL_SET;
5027 log_and_return:
5028 if (dump_file && (dump_flags & TDF_DETAILS))
5029 log_op_type (mo->u.loc, cui->bb, cui->insn, mo->type, dump_file);
5032 /* Callback for cselib_record_sets_hook, that records as micro
5033 operations uses and stores in an insn after cselib_record_sets has
5034 analyzed the sets in an insn, but before it modifies the stored
5035 values in the internal tables, unless cselib_record_sets doesn't
5036 call it directly (perhaps because we're not doing cselib in the
5037 first place, in which case sets and n_sets will be 0). */
5039 static void
5040 add_with_sets (rtx insn, struct cselib_set *sets, int n_sets)
5042 basic_block bb = BLOCK_FOR_INSN (insn);
5043 int n1, n2;
5044 struct count_use_info cui;
5046 cselib_hook_called = true;
5048 cui.insn = insn;
5049 cui.bb = bb;
5050 cui.sets = sets;
5051 cui.n_sets = n_sets;
5053 n1 = VTI (bb)->n_mos;
5054 cui.store_p = false;
5055 note_uses (&PATTERN (insn), add_uses_1, &cui);
5056 n2 = VTI (bb)->n_mos - 1;
5058 /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
5059 MO_VAL_LOC last. */
5060 while (n1 < n2)
5062 while (n1 < n2 && VTI (bb)->mos[n1].type == MO_USE)
5063 n1++;
5064 while (n1 < n2 && VTI (bb)->mos[n2].type != MO_USE)
5065 n2--;
5066 if (n1 < n2)
5068 micro_operation sw;
5070 sw = VTI (bb)->mos[n1];
5071 VTI (bb)->mos[n1] = VTI (bb)->mos[n2];
5072 VTI (bb)->mos[n2] = sw;
5076 n2 = VTI (bb)->n_mos - 1;
5078 while (n1 < n2)
5080 while (n1 < n2 && VTI (bb)->mos[n1].type != MO_VAL_LOC)
5081 n1++;
5082 while (n1 < n2 && VTI (bb)->mos[n2].type == MO_VAL_LOC)
5083 n2--;
5084 if (n1 < n2)
5086 micro_operation sw;
5088 sw = VTI (bb)->mos[n1];
5089 VTI (bb)->mos[n1] = VTI (bb)->mos[n2];
5090 VTI (bb)->mos[n2] = sw;
5094 if (CALL_P (insn))
5096 micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
5098 mo->type = MO_CALL;
5099 mo->insn = insn;
5101 if (dump_file && (dump_flags & TDF_DETAILS))
5102 log_op_type (PATTERN (insn), bb, insn, mo->type, dump_file);
5105 n1 = VTI (bb)->n_mos;
5106 /* This will record NEXT_INSN (insn), such that we can
5107 insert notes before it without worrying about any
5108 notes that MO_USEs might emit after the insn. */
5109 cui.store_p = true;
5110 note_stores (PATTERN (insn), add_stores, &cui);
5111 n2 = VTI (bb)->n_mos - 1;
5113 /* Order the MO_CLOBBERs to be before MO_SETs. */
5114 while (n1 < n2)
5116 while (n1 < n2 && VTI (bb)->mos[n1].type == MO_CLOBBER)
5117 n1++;
5118 while (n1 < n2 && VTI (bb)->mos[n2].type != MO_CLOBBER)
5119 n2--;
5120 if (n1 < n2)
5122 micro_operation sw;
5124 sw = VTI (bb)->mos[n1];
5125 VTI (bb)->mos[n1] = VTI (bb)->mos[n2];
5126 VTI (bb)->mos[n2] = sw;
5131 static enum var_init_status
5132 find_src_status (dataflow_set *in, rtx src)
5134 tree decl = NULL_TREE;
5135 enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
5137 if (! flag_var_tracking_uninit)
5138 status = VAR_INIT_STATUS_INITIALIZED;
5140 if (src && REG_P (src))
5141 decl = var_debug_decl (REG_EXPR (src));
5142 else if (src && MEM_P (src))
5143 decl = var_debug_decl (MEM_EXPR (src));
5145 if (src && decl)
5146 status = get_init_value (in, src, dv_from_decl (decl));
5148 return status;
5151 /* SRC is the source of an assignment. Use SET to try to find what
5152 was ultimately assigned to SRC. Return that value if known,
5153 otherwise return SRC itself. */
5155 static rtx
5156 find_src_set_src (dataflow_set *set, rtx src)
5158 tree decl = NULL_TREE; /* The variable being copied around. */
5159 rtx set_src = NULL_RTX; /* The value for "decl" stored in "src". */
5160 variable var;
5161 location_chain nextp;
5162 int i;
5163 bool found;
5165 if (src && REG_P (src))
5166 decl = var_debug_decl (REG_EXPR (src));
5167 else if (src && MEM_P (src))
5168 decl = var_debug_decl (MEM_EXPR (src));
5170 if (src && decl)
5172 decl_or_value dv = dv_from_decl (decl);
5174 var = shared_hash_find (set->vars, dv);
5175 if (var)
5177 found = false;
5178 for (i = 0; i < var->n_var_parts && !found; i++)
5179 for (nextp = var->var_part[i].loc_chain; nextp && !found;
5180 nextp = nextp->next)
5181 if (rtx_equal_p (nextp->loc, src))
5183 set_src = nextp->set_src;
5184 found = true;
5190 return set_src;
5193 /* Compute the changes of variable locations in the basic block BB. */
5195 static bool
5196 compute_bb_dataflow (basic_block bb)
5198 int i, n;
5199 bool changed;
5200 dataflow_set old_out;
5201 dataflow_set *in = &VTI (bb)->in;
5202 dataflow_set *out = &VTI (bb)->out;
5204 dataflow_set_init (&old_out);
5205 dataflow_set_copy (&old_out, out);
5206 dataflow_set_copy (out, in);
5208 n = VTI (bb)->n_mos;
5209 for (i = 0; i < n; i++)
5211 rtx insn = VTI (bb)->mos[i].insn;
5213 switch (VTI (bb)->mos[i].type)
5215 case MO_CALL:
5216 dataflow_set_clear_at_call (out);
5217 break;
5219 case MO_USE:
5221 rtx loc = VTI (bb)->mos[i].u.loc;
5223 if (REG_P (loc))
5224 var_reg_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
5225 else if (MEM_P (loc))
5226 var_mem_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
5228 break;
5230 case MO_VAL_LOC:
5232 rtx loc = VTI (bb)->mos[i].u.loc;
5233 rtx val, vloc;
5234 tree var;
5236 if (GET_CODE (loc) == CONCAT)
5238 val = XEXP (loc, 0);
5239 vloc = XEXP (loc, 1);
5241 else
5243 val = NULL_RTX;
5244 vloc = loc;
5247 var = PAT_VAR_LOCATION_DECL (vloc);
5249 clobber_variable_part (out, NULL_RTX,
5250 dv_from_decl (var), 0, NULL_RTX);
5251 if (val)
5253 if (VAL_NEEDS_RESOLUTION (loc))
5254 val_resolve (out, val, PAT_VAR_LOCATION_LOC (vloc), insn);
5255 set_variable_part (out, val, dv_from_decl (var), 0,
5256 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
5257 INSERT);
5260 break;
5262 case MO_VAL_USE:
5264 rtx loc = VTI (bb)->mos[i].u.loc;
5265 rtx val, vloc, uloc;
5267 vloc = uloc = XEXP (loc, 1);
5268 val = XEXP (loc, 0);
5270 if (GET_CODE (val) == CONCAT)
5272 uloc = XEXP (val, 1);
5273 val = XEXP (val, 0);
5276 if (VAL_NEEDS_RESOLUTION (loc))
5277 val_resolve (out, val, vloc, insn);
5279 if (VAL_HOLDS_TRACK_EXPR (loc))
5281 if (GET_CODE (uloc) == REG)
5282 var_reg_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
5283 NULL);
5284 else if (GET_CODE (uloc) == MEM)
5285 var_mem_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
5286 NULL);
5289 break;
5291 case MO_VAL_SET:
5293 rtx loc = VTI (bb)->mos[i].u.loc;
5294 rtx val, vloc, uloc;
5296 vloc = uloc = XEXP (loc, 1);
5297 val = XEXP (loc, 0);
5299 if (GET_CODE (val) == CONCAT)
5301 vloc = XEXP (val, 1);
5302 val = XEXP (val, 0);
5305 if (GET_CODE (vloc) == SET)
5307 rtx vsrc = SET_SRC (vloc);
5309 gcc_assert (val != vsrc);
5310 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
5312 vloc = SET_DEST (vloc);
5314 if (VAL_NEEDS_RESOLUTION (loc))
5315 val_resolve (out, val, vsrc, insn);
5317 else if (VAL_NEEDS_RESOLUTION (loc))
5319 gcc_assert (GET_CODE (uloc) == SET
5320 && GET_CODE (SET_SRC (uloc)) == REG);
5321 val_resolve (out, val, SET_SRC (uloc), insn);
5324 if (VAL_HOLDS_TRACK_EXPR (loc))
5326 if (VAL_EXPR_IS_CLOBBERED (loc))
5328 if (REG_P (uloc))
5329 var_reg_delete (out, uloc, true);
5330 else if (MEM_P (uloc))
5331 var_mem_delete (out, uloc, true);
5333 else
5335 bool copied_p = VAL_EXPR_IS_COPIED (loc);
5336 rtx set_src = NULL;
5337 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
5339 if (GET_CODE (uloc) == SET)
5341 set_src = SET_SRC (uloc);
5342 uloc = SET_DEST (uloc);
5345 if (copied_p)
5347 if (flag_var_tracking_uninit)
5349 status = find_src_status (in, set_src);
5351 if (status == VAR_INIT_STATUS_UNKNOWN)
5352 status = find_src_status (out, set_src);
5355 set_src = find_src_set_src (in, set_src);
5358 if (REG_P (uloc))
5359 var_reg_delete_and_set (out, uloc, !copied_p,
5360 status, set_src);
5361 else if (MEM_P (uloc))
5362 var_mem_delete_and_set (out, uloc, !copied_p,
5363 status, set_src);
5366 else if (REG_P (uloc))
5367 var_regno_delete (out, REGNO (uloc));
5369 val_store (out, val, vloc, insn);
5371 break;
5373 case MO_SET:
5375 rtx loc = VTI (bb)->mos[i].u.loc;
5376 rtx set_src = NULL;
5378 if (GET_CODE (loc) == SET)
5380 set_src = SET_SRC (loc);
5381 loc = SET_DEST (loc);
5384 if (REG_P (loc))
5385 var_reg_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
5386 set_src);
5387 else if (MEM_P (loc))
5388 var_mem_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
5389 set_src);
5391 break;
5393 case MO_COPY:
5395 rtx loc = VTI (bb)->mos[i].u.loc;
5396 enum var_init_status src_status;
5397 rtx set_src = NULL;
5399 if (GET_CODE (loc) == SET)
5401 set_src = SET_SRC (loc);
5402 loc = SET_DEST (loc);
5405 if (! flag_var_tracking_uninit)
5406 src_status = VAR_INIT_STATUS_INITIALIZED;
5407 else
5409 src_status = find_src_status (in, set_src);
5411 if (src_status == VAR_INIT_STATUS_UNKNOWN)
5412 src_status = find_src_status (out, set_src);
5415 set_src = find_src_set_src (in, set_src);
5417 if (REG_P (loc))
5418 var_reg_delete_and_set (out, loc, false, src_status, set_src);
5419 else if (MEM_P (loc))
5420 var_mem_delete_and_set (out, loc, false, src_status, set_src);
5422 break;
5424 case MO_USE_NO_VAR:
5426 rtx loc = VTI (bb)->mos[i].u.loc;
5428 if (REG_P (loc))
5429 var_reg_delete (out, loc, false);
5430 else if (MEM_P (loc))
5431 var_mem_delete (out, loc, false);
5433 break;
5435 case MO_CLOBBER:
5437 rtx loc = VTI (bb)->mos[i].u.loc;
5439 if (REG_P (loc))
5440 var_reg_delete (out, loc, true);
5441 else if (MEM_P (loc))
5442 var_mem_delete (out, loc, true);
5444 break;
5446 case MO_ADJUST:
5447 out->stack_adjust += VTI (bb)->mos[i].u.adjust;
5448 break;
5452 if (MAY_HAVE_DEBUG_INSNS)
5454 dataflow_set_equiv_regs (out);
5455 htab_traverse (shared_hash_htab (out->vars), canonicalize_values_mark,
5456 out);
5457 htab_traverse (shared_hash_htab (out->vars), canonicalize_values_star,
5458 out);
5459 #if ENABLE_CHECKING
5460 htab_traverse (shared_hash_htab (out->vars),
5461 canonicalize_loc_order_check, out);
5462 #endif
5464 changed = dataflow_set_different (&old_out, out);
5465 dataflow_set_destroy (&old_out);
5466 return changed;
5469 /* Find the locations of variables in the whole function. */
5471 static void
5472 vt_find_locations (void)
5474 fibheap_t worklist, pending, fibheap_swap;
5475 sbitmap visited, in_worklist, in_pending, sbitmap_swap;
5476 basic_block bb;
5477 edge e;
5478 int *bb_order;
5479 int *rc_order;
5480 int i;
5481 int htabsz = 0;
5483 /* Compute reverse completion order of depth first search of the CFG
5484 so that the data-flow runs faster. */
5485 rc_order = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
5486 bb_order = XNEWVEC (int, last_basic_block);
5487 pre_and_rev_post_order_compute (NULL, rc_order, false);
5488 for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
5489 bb_order[rc_order[i]] = i;
5490 free (rc_order);
5492 worklist = fibheap_new ();
5493 pending = fibheap_new ();
5494 visited = sbitmap_alloc (last_basic_block);
5495 in_worklist = sbitmap_alloc (last_basic_block);
5496 in_pending = sbitmap_alloc (last_basic_block);
5497 sbitmap_zero (in_worklist);
5499 FOR_EACH_BB (bb)
5500 fibheap_insert (pending, bb_order[bb->index], bb);
5501 sbitmap_ones (in_pending);
5503 while (!fibheap_empty (pending))
5505 fibheap_swap = pending;
5506 pending = worklist;
5507 worklist = fibheap_swap;
5508 sbitmap_swap = in_pending;
5509 in_pending = in_worklist;
5510 in_worklist = sbitmap_swap;
5512 sbitmap_zero (visited);
5514 while (!fibheap_empty (worklist))
5516 bb = (basic_block) fibheap_extract_min (worklist);
5517 RESET_BIT (in_worklist, bb->index);
5518 if (!TEST_BIT (visited, bb->index))
5520 bool changed;
5521 edge_iterator ei;
5522 int oldinsz, oldoutsz;
5524 SET_BIT (visited, bb->index);
5526 if (dump_file && VTI (bb)->in.vars)
5528 htabsz
5529 -= htab_size (shared_hash_htab (VTI (bb)->in.vars))
5530 + htab_size (shared_hash_htab (VTI (bb)->out.vars));
5531 oldinsz
5532 = htab_elements (shared_hash_htab (VTI (bb)->in.vars));
5533 oldoutsz
5534 = htab_elements (shared_hash_htab (VTI (bb)->out.vars));
5536 else
5537 oldinsz = oldoutsz = 0;
5539 if (MAY_HAVE_DEBUG_INSNS)
5541 dataflow_set *in = &VTI (bb)->in, *first_out = NULL;
5542 bool first = true, adjust = false;
5544 /* Calculate the IN set as the intersection of
5545 predecessor OUT sets. */
5547 dataflow_set_clear (in);
5548 dst_can_be_shared = true;
5550 FOR_EACH_EDGE (e, ei, bb->preds)
5551 if (!VTI (e->src)->flooded)
5552 gcc_assert (bb_order[bb->index]
5553 <= bb_order[e->src->index]);
5554 else if (first)
5556 dataflow_set_copy (in, &VTI (e->src)->out);
5557 first_out = &VTI (e->src)->out;
5558 first = false;
5560 else
5562 dataflow_set_merge (in, &VTI (e->src)->out);
5563 adjust = true;
5566 if (adjust)
5568 dataflow_post_merge_adjust (in, &VTI (bb)->permp);
5569 #if ENABLE_CHECKING
5570 /* Merge and merge_adjust should keep entries in
5571 canonical order. */
5572 htab_traverse (shared_hash_htab (in->vars),
5573 canonicalize_loc_order_check,
5574 in);
5575 #endif
5576 if (dst_can_be_shared)
5578 shared_hash_destroy (in->vars);
5579 in->vars = shared_hash_copy (first_out->vars);
5583 VTI (bb)->flooded = true;
5585 else
5587 /* Calculate the IN set as union of predecessor OUT sets. */
5588 dataflow_set_clear (&VTI (bb)->in);
5589 FOR_EACH_EDGE (e, ei, bb->preds)
5590 dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out);
5593 changed = compute_bb_dataflow (bb);
5594 if (dump_file)
5595 htabsz += htab_size (shared_hash_htab (VTI (bb)->in.vars))
5596 + htab_size (shared_hash_htab (VTI (bb)->out.vars));
5598 if (changed)
5600 FOR_EACH_EDGE (e, ei, bb->succs)
5602 if (e->dest == EXIT_BLOCK_PTR)
5603 continue;
5605 if (TEST_BIT (visited, e->dest->index))
5607 if (!TEST_BIT (in_pending, e->dest->index))
5609 /* Send E->DEST to next round. */
5610 SET_BIT (in_pending, e->dest->index);
5611 fibheap_insert (pending,
5612 bb_order[e->dest->index],
5613 e->dest);
5616 else if (!TEST_BIT (in_worklist, e->dest->index))
5618 /* Add E->DEST to current round. */
5619 SET_BIT (in_worklist, e->dest->index);
5620 fibheap_insert (worklist, bb_order[e->dest->index],
5621 e->dest);
5626 if (dump_file)
5627 fprintf (dump_file,
5628 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
5629 bb->index,
5630 (int)htab_elements (shared_hash_htab (VTI (bb)->in.vars)),
5631 oldinsz,
5632 (int)htab_elements (shared_hash_htab (VTI (bb)->out.vars)),
5633 oldoutsz,
5634 (int)worklist->nodes, (int)pending->nodes, htabsz);
5636 if (dump_file && (dump_flags & TDF_DETAILS))
5638 fprintf (dump_file, "BB %i IN:\n", bb->index);
5639 dump_dataflow_set (&VTI (bb)->in);
5640 fprintf (dump_file, "BB %i OUT:\n", bb->index);
5641 dump_dataflow_set (&VTI (bb)->out);
5647 if (MAY_HAVE_DEBUG_INSNS)
5648 FOR_EACH_BB (bb)
5649 gcc_assert (VTI (bb)->flooded);
5651 free (bb_order);
5652 fibheap_delete (worklist);
5653 fibheap_delete (pending);
5654 sbitmap_free (visited);
5655 sbitmap_free (in_worklist);
5656 sbitmap_free (in_pending);
5659 /* Print the content of the LIST to dump file. */
5661 static void
5662 dump_attrs_list (attrs list)
5664 for (; list; list = list->next)
5666 if (dv_is_decl_p (list->dv))
5667 print_mem_expr (dump_file, dv_as_decl (list->dv));
5668 else
5669 print_rtl_single (dump_file, dv_as_value (list->dv));
5670 fprintf (dump_file, "+" HOST_WIDE_INT_PRINT_DEC, list->offset);
5672 fprintf (dump_file, "\n");
5675 /* Print the information about variable *SLOT to dump file. */
5677 static int
5678 dump_variable_slot (void **slot, void *data ATTRIBUTE_UNUSED)
5680 variable var = (variable) *slot;
5682 dump_variable (var);
5684 /* Continue traversing the hash table. */
5685 return 1;
5688 /* Print the information about variable VAR to dump file. */
5690 static void
5691 dump_variable (variable var)
5693 int i;
5694 location_chain node;
5696 if (dv_is_decl_p (var->dv))
5698 const_tree decl = dv_as_decl (var->dv);
5700 if (DECL_NAME (decl))
5701 fprintf (dump_file, " name: %s",
5702 IDENTIFIER_POINTER (DECL_NAME (decl)));
5703 else
5704 fprintf (dump_file, " name: D.%u", DECL_UID (decl));
5705 if (dump_flags & TDF_UID)
5706 fprintf (dump_file, " D.%u\n", DECL_UID (decl));
5707 else
5708 fprintf (dump_file, "\n");
5710 else
5712 fputc (' ', dump_file);
5713 print_rtl_single (dump_file, dv_as_value (var->dv));
5716 for (i = 0; i < var->n_var_parts; i++)
5718 fprintf (dump_file, " offset %ld\n",
5719 (long) var->var_part[i].offset);
5720 for (node = var->var_part[i].loc_chain; node; node = node->next)
5722 fprintf (dump_file, " ");
5723 if (node->init == VAR_INIT_STATUS_UNINITIALIZED)
5724 fprintf (dump_file, "[uninit]");
5725 print_rtl_single (dump_file, node->loc);
5730 /* Print the information about variables from hash table VARS to dump file. */
5732 static void
5733 dump_vars (htab_t vars)
5735 if (htab_elements (vars) > 0)
5737 fprintf (dump_file, "Variables:\n");
5738 htab_traverse (vars, dump_variable_slot, NULL);
5742 /* Print the dataflow set SET to dump file. */
5744 static void
5745 dump_dataflow_set (dataflow_set *set)
5747 int i;
5749 fprintf (dump_file, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC "\n",
5750 set->stack_adjust);
5751 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5753 if (set->regs[i])
5755 fprintf (dump_file, "Reg %d:", i);
5756 dump_attrs_list (set->regs[i]);
5759 dump_vars (shared_hash_htab (set->vars));
5760 fprintf (dump_file, "\n");
5763 /* Print the IN and OUT sets for each basic block to dump file. */
5765 static void
5766 dump_dataflow_sets (void)
5768 basic_block bb;
5770 FOR_EACH_BB (bb)
5772 fprintf (dump_file, "\nBasic block %d:\n", bb->index);
5773 fprintf (dump_file, "IN:\n");
5774 dump_dataflow_set (&VTI (bb)->in);
5775 fprintf (dump_file, "OUT:\n");
5776 dump_dataflow_set (&VTI (bb)->out);
5780 /* Add variable VAR to the hash table of changed variables and
5781 if it has no locations delete it from SET's hash table. */
5783 static void
5784 variable_was_changed (variable var, dataflow_set *set)
5786 hashval_t hash = dv_htab_hash (var->dv);
5788 if (emit_notes)
5790 void **slot;
5792 /* Remember this decl or VALUE has been added to changed_variables. */
5793 set_dv_changed (var->dv, true);
5795 slot = htab_find_slot_with_hash (changed_variables,
5796 var->dv,
5797 hash, INSERT);
5799 if (set && var->n_var_parts == 0)
5801 variable empty_var;
5803 empty_var = (variable) pool_alloc (dv_pool (var->dv));
5804 empty_var->dv = var->dv;
5805 empty_var->refcount = 1;
5806 empty_var->n_var_parts = 0;
5807 *slot = empty_var;
5808 goto drop_var;
5810 else
5812 var->refcount++;
5813 *slot = var;
5816 else
5818 gcc_assert (set);
5819 if (var->n_var_parts == 0)
5821 void **slot;
5823 drop_var:
5824 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
5825 if (slot)
5827 if (shared_hash_shared (set->vars))
5828 slot = shared_hash_find_slot_unshare (&set->vars, var->dv,
5829 NO_INSERT);
5830 htab_clear_slot (shared_hash_htab (set->vars), slot);
5836 /* Look for the index in VAR->var_part corresponding to OFFSET.
5837 Return -1 if not found. If INSERTION_POINT is non-NULL, the
5838 referenced int will be set to the index that the part has or should
5839 have, if it should be inserted. */
5841 static inline int
5842 find_variable_location_part (variable var, HOST_WIDE_INT offset,
5843 int *insertion_point)
5845 int pos, low, high;
5847 /* Find the location part. */
5848 low = 0;
5849 high = var->n_var_parts;
5850 while (low != high)
5852 pos = (low + high) / 2;
5853 if (var->var_part[pos].offset < offset)
5854 low = pos + 1;
5855 else
5856 high = pos;
5858 pos = low;
5860 if (insertion_point)
5861 *insertion_point = pos;
5863 if (pos < var->n_var_parts && var->var_part[pos].offset == offset)
5864 return pos;
5866 return -1;
5869 static void **
5870 set_slot_part (dataflow_set *set, rtx loc, void **slot,
5871 decl_or_value dv, HOST_WIDE_INT offset,
5872 enum var_init_status initialized, rtx set_src)
5874 int pos;
5875 location_chain node, next;
5876 location_chain *nextp;
5877 variable var;
5878 bool onepart = dv_onepart_p (dv);
5880 gcc_assert (offset == 0 || !onepart);
5881 gcc_assert (loc != dv_as_opaque (dv));
5883 var = (variable) *slot;
5885 if (! flag_var_tracking_uninit)
5886 initialized = VAR_INIT_STATUS_INITIALIZED;
5888 if (!var)
5890 /* Create new variable information. */
5891 var = (variable) pool_alloc (dv_pool (dv));
5892 var->dv = dv;
5893 var->refcount = 1;
5894 var->n_var_parts = 1;
5895 var->var_part[0].offset = offset;
5896 var->var_part[0].loc_chain = NULL;
5897 var->var_part[0].cur_loc = NULL;
5898 *slot = var;
5899 pos = 0;
5900 nextp = &var->var_part[0].loc_chain;
5901 if (emit_notes && dv_is_value_p (dv))
5902 add_cselib_value_chains (dv);
5904 else if (onepart)
5906 int r = -1, c = 0;
5908 gcc_assert (dv_as_opaque (var->dv) == dv_as_opaque (dv));
5910 pos = 0;
5912 if (GET_CODE (loc) == VALUE)
5914 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
5915 nextp = &node->next)
5916 if (GET_CODE (node->loc) == VALUE)
5918 if (node->loc == loc)
5920 r = 0;
5921 break;
5923 if (canon_value_cmp (node->loc, loc))
5924 c++;
5925 else
5927 r = 1;
5928 break;
5931 else if (REG_P (node->loc) || MEM_P (node->loc))
5932 c++;
5933 else
5935 r = 1;
5936 break;
5939 else if (REG_P (loc))
5941 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
5942 nextp = &node->next)
5943 if (REG_P (node->loc))
5945 if (REGNO (node->loc) < REGNO (loc))
5946 c++;
5947 else
5949 if (REGNO (node->loc) == REGNO (loc))
5950 r = 0;
5951 else
5952 r = 1;
5953 break;
5956 else
5958 r = 1;
5959 break;
5962 else if (MEM_P (loc))
5964 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
5965 nextp = &node->next)
5966 if (REG_P (node->loc))
5967 c++;
5968 else if (MEM_P (node->loc))
5970 if ((r = loc_cmp (XEXP (node->loc, 0), XEXP (loc, 0))) >= 0)
5971 break;
5972 else
5973 c++;
5975 else
5977 r = 1;
5978 break;
5981 else
5982 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
5983 nextp = &node->next)
5984 if ((r = loc_cmp (node->loc, loc)) >= 0)
5985 break;
5986 else
5987 c++;
5989 if (r == 0)
5990 return slot;
5992 if (var->refcount > 1 || shared_hash_shared (set->vars))
5994 slot = unshare_variable (set, slot, var, initialized);
5995 var = (variable)*slot;
5996 for (nextp = &var->var_part[0].loc_chain; c;
5997 nextp = &(*nextp)->next)
5998 c--;
5999 gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc);
6002 else
6004 int inspos = 0;
6006 gcc_assert (dv_as_decl (var->dv) == dv_as_decl (dv));
6008 pos = find_variable_location_part (var, offset, &inspos);
6010 if (pos >= 0)
6012 node = var->var_part[pos].loc_chain;
6014 if (node
6015 && ((REG_P (node->loc) && REG_P (loc)
6016 && REGNO (node->loc) == REGNO (loc))
6017 || rtx_equal_p (node->loc, loc)))
6019 /* LOC is in the beginning of the chain so we have nothing
6020 to do. */
6021 if (node->init < initialized)
6022 node->init = initialized;
6023 if (set_src != NULL)
6024 node->set_src = set_src;
6026 return slot;
6028 else
6030 /* We have to make a copy of a shared variable. */
6031 if (var->refcount > 1 || shared_hash_shared (set->vars))
6033 slot = unshare_variable (set, slot, var, initialized);
6034 var = (variable)*slot;
6038 else
6040 /* We have not found the location part, new one will be created. */
6042 /* We have to make a copy of the shared variable. */
6043 if (var->refcount > 1 || shared_hash_shared (set->vars))
6045 slot = unshare_variable (set, slot, var, initialized);
6046 var = (variable)*slot;
6049 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
6050 thus there are at most MAX_VAR_PARTS different offsets. */
6051 gcc_assert (var->n_var_parts < MAX_VAR_PARTS
6052 && (!var->n_var_parts || !dv_onepart_p (var->dv)));
6054 /* We have to move the elements of array starting at index
6055 inspos to the next position. */
6056 for (pos = var->n_var_parts; pos > inspos; pos--)
6057 var->var_part[pos] = var->var_part[pos - 1];
6059 var->n_var_parts++;
6060 var->var_part[pos].offset = offset;
6061 var->var_part[pos].loc_chain = NULL;
6062 var->var_part[pos].cur_loc = NULL;
6065 /* Delete the location from the list. */
6066 nextp = &var->var_part[pos].loc_chain;
6067 for (node = var->var_part[pos].loc_chain; node; node = next)
6069 next = node->next;
6070 if ((REG_P (node->loc) && REG_P (loc)
6071 && REGNO (node->loc) == REGNO (loc))
6072 || rtx_equal_p (node->loc, loc))
6074 /* Save these values, to assign to the new node, before
6075 deleting this one. */
6076 if (node->init > initialized)
6077 initialized = node->init;
6078 if (node->set_src != NULL && set_src == NULL)
6079 set_src = node->set_src;
6080 pool_free (loc_chain_pool, node);
6081 *nextp = next;
6082 break;
6084 else
6085 nextp = &node->next;
6088 nextp = &var->var_part[pos].loc_chain;
6091 /* Add the location to the beginning. */
6092 node = (location_chain) pool_alloc (loc_chain_pool);
6093 node->loc = loc;
6094 node->init = initialized;
6095 node->set_src = set_src;
6096 node->next = *nextp;
6097 *nextp = node;
6099 if (onepart && emit_notes)
6100 add_value_chains (var->dv, loc);
6102 /* If no location was emitted do so. */
6103 if (var->var_part[pos].cur_loc == NULL)
6105 var->var_part[pos].cur_loc = loc;
6106 variable_was_changed (var, set);
6109 return slot;
6112 /* Set the part of variable's location in the dataflow set SET. The
6113 variable part is specified by variable's declaration in DV and
6114 offset OFFSET and the part's location by LOC. IOPT should be
6115 NO_INSERT if the variable is known to be in SET already and the
6116 variable hash table must not be resized, and INSERT otherwise. */
6118 static void
6119 set_variable_part (dataflow_set *set, rtx loc,
6120 decl_or_value dv, HOST_WIDE_INT offset,
6121 enum var_init_status initialized, rtx set_src,
6122 enum insert_option iopt)
6124 void **slot;
6126 if (iopt == NO_INSERT)
6127 slot = shared_hash_find_slot_noinsert (set->vars, dv);
6128 else
6130 slot = shared_hash_find_slot (set->vars, dv);
6131 if (!slot)
6132 slot = shared_hash_find_slot_unshare (&set->vars, dv, iopt);
6134 slot = set_slot_part (set, loc, slot, dv, offset, initialized, set_src);
6137 /* Remove all recorded register locations for the given variable part
6138 from dataflow set SET, except for those that are identical to loc.
6139 The variable part is specified by variable's declaration or value
6140 DV and offset OFFSET. */
6142 static void **
6143 clobber_slot_part (dataflow_set *set, rtx loc, void **slot,
6144 HOST_WIDE_INT offset, rtx set_src)
6146 variable var = (variable) *slot;
6147 int pos = find_variable_location_part (var, offset, NULL);
6149 if (pos >= 0)
6151 location_chain node, next;
6153 /* Remove the register locations from the dataflow set. */
6154 next = var->var_part[pos].loc_chain;
6155 for (node = next; node; node = next)
6157 next = node->next;
6158 if (node->loc != loc
6159 && (!flag_var_tracking_uninit
6160 || !set_src
6161 || MEM_P (set_src)
6162 || !rtx_equal_p (set_src, node->set_src)))
6164 if (REG_P (node->loc))
6166 attrs anode, anext;
6167 attrs *anextp;
6169 /* Remove the variable part from the register's
6170 list, but preserve any other variable parts
6171 that might be regarded as live in that same
6172 register. */
6173 anextp = &set->regs[REGNO (node->loc)];
6174 for (anode = *anextp; anode; anode = anext)
6176 anext = anode->next;
6177 if (dv_as_opaque (anode->dv) == dv_as_opaque (var->dv)
6178 && anode->offset == offset)
6180 pool_free (attrs_pool, anode);
6181 *anextp = anext;
6183 else
6184 anextp = &anode->next;
6188 slot = delete_slot_part (set, node->loc, slot, offset);
6193 return slot;
6196 /* Remove all recorded register locations for the given variable part
6197 from dataflow set SET, except for those that are identical to loc.
6198 The variable part is specified by variable's declaration or value
6199 DV and offset OFFSET. */
6201 static void
6202 clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
6203 HOST_WIDE_INT offset, rtx set_src)
6205 void **slot;
6207 if (!dv_as_opaque (dv)
6208 || (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv))))
6209 return;
6211 slot = shared_hash_find_slot_noinsert (set->vars, dv);
6212 if (!slot)
6213 return;
6215 slot = clobber_slot_part (set, loc, slot, offset, set_src);
6218 /* Delete the part of variable's location from dataflow set SET. The
6219 variable part is specified by its SET->vars slot SLOT and offset
6220 OFFSET and the part's location by LOC. */
6222 static void **
6223 delete_slot_part (dataflow_set *set, rtx loc, void **slot,
6224 HOST_WIDE_INT offset)
6226 variable var = (variable) *slot;
6227 int pos = find_variable_location_part (var, offset, NULL);
6229 if (pos >= 0)
6231 location_chain node, next;
6232 location_chain *nextp;
6233 bool changed;
6235 if (var->refcount > 1 || shared_hash_shared (set->vars))
6237 /* If the variable contains the location part we have to
6238 make a copy of the variable. */
6239 for (node = var->var_part[pos].loc_chain; node;
6240 node = node->next)
6242 if ((REG_P (node->loc) && REG_P (loc)
6243 && REGNO (node->loc) == REGNO (loc))
6244 || rtx_equal_p (node->loc, loc))
6246 slot = unshare_variable (set, slot, var,
6247 VAR_INIT_STATUS_UNKNOWN);
6248 var = (variable)*slot;
6249 break;
6254 /* Delete the location part. */
6255 nextp = &var->var_part[pos].loc_chain;
6256 for (node = *nextp; node; node = next)
6258 next = node->next;
6259 if ((REG_P (node->loc) && REG_P (loc)
6260 && REGNO (node->loc) == REGNO (loc))
6261 || rtx_equal_p (node->loc, loc))
6263 if (emit_notes && pos == 0 && dv_onepart_p (var->dv))
6264 remove_value_chains (var->dv, node->loc);
6265 pool_free (loc_chain_pool, node);
6266 *nextp = next;
6267 break;
6269 else
6270 nextp = &node->next;
6273 /* If we have deleted the location which was last emitted
6274 we have to emit new location so add the variable to set
6275 of changed variables. */
6276 if (var->var_part[pos].cur_loc
6277 && ((REG_P (loc)
6278 && REG_P (var->var_part[pos].cur_loc)
6279 && REGNO (loc) == REGNO (var->var_part[pos].cur_loc))
6280 || rtx_equal_p (loc, var->var_part[pos].cur_loc)))
6282 changed = true;
6283 if (var->var_part[pos].loc_chain)
6284 var->var_part[pos].cur_loc = var->var_part[pos].loc_chain->loc;
6286 else
6287 changed = false;
6289 if (var->var_part[pos].loc_chain == NULL)
6291 gcc_assert (changed);
6292 var->n_var_parts--;
6293 if (emit_notes && var->n_var_parts == 0 && dv_is_value_p (var->dv))
6294 remove_cselib_value_chains (var->dv);
6295 while (pos < var->n_var_parts)
6297 var->var_part[pos] = var->var_part[pos + 1];
6298 pos++;
6301 if (changed)
6302 variable_was_changed (var, set);
6305 return slot;
6308 /* Delete the part of variable's location from dataflow set SET. The
6309 variable part is specified by variable's declaration or value DV
6310 and offset OFFSET and the part's location by LOC. */
6312 static void
6313 delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
6314 HOST_WIDE_INT offset)
6316 void **slot = shared_hash_find_slot_noinsert (set->vars, dv);
6317 if (!slot)
6318 return;
6320 slot = delete_slot_part (set, loc, slot, offset);
6323 /* Callback for cselib_expand_value, that looks for expressions
6324 holding the value in the var-tracking hash tables. Return X for
6325 standard processing, anything else is to be used as-is. */
6327 static rtx
6328 vt_expand_loc_callback (rtx x, bitmap regs, int max_depth, void *data)
6330 htab_t vars = (htab_t)data;
6331 decl_or_value dv;
6332 variable var;
6333 location_chain loc;
6334 rtx result, subreg, xret;
6336 switch (GET_CODE (x))
6338 case SUBREG:
6339 subreg = SUBREG_REG (x);
6341 if (GET_CODE (SUBREG_REG (x)) != VALUE)
6342 return x;
6344 subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs,
6345 max_depth - 1,
6346 vt_expand_loc_callback, data);
6348 if (!subreg)
6349 return NULL;
6351 result = simplify_gen_subreg (GET_MODE (x), subreg,
6352 GET_MODE (SUBREG_REG (x)),
6353 SUBREG_BYTE (x));
6355 /* Invalid SUBREGs are ok in debug info. ??? We could try
6356 alternate expansions for the VALUE as well. */
6357 if (!result && (REG_P (subreg) || MEM_P (subreg)))
6358 result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x));
6360 return result;
6362 case DEBUG_EXPR:
6363 dv = dv_from_decl (DEBUG_EXPR_TREE_DECL (x));
6364 xret = NULL;
6365 break;
6367 case VALUE:
6368 dv = dv_from_value (x);
6369 xret = x;
6370 break;
6372 default:
6373 return x;
6376 if (VALUE_RECURSED_INTO (x))
6377 return NULL;
6379 var = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
6381 if (!var)
6382 return xret;
6384 if (var->n_var_parts == 0)
6385 return xret;
6387 gcc_assert (var->n_var_parts == 1);
6389 VALUE_RECURSED_INTO (x) = true;
6390 result = NULL;
6392 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
6394 result = cselib_expand_value_rtx_cb (loc->loc, regs, max_depth,
6395 vt_expand_loc_callback, vars);
6396 if (result)
6397 break;
6400 VALUE_RECURSED_INTO (x) = false;
6401 if (result)
6402 return result;
6403 else
6404 return xret;
6407 /* Expand VALUEs in LOC, using VARS as well as cselib's equivalence
6408 tables. */
6410 static rtx
6411 vt_expand_loc (rtx loc, htab_t vars)
6413 if (!MAY_HAVE_DEBUG_INSNS)
6414 return loc;
6416 loc = cselib_expand_value_rtx_cb (loc, scratch_regs, 5,
6417 vt_expand_loc_callback, vars);
6419 if (loc && MEM_P (loc))
6420 loc = targetm.delegitimize_address (loc);
6422 return loc;
6425 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
6426 additional parameters: WHERE specifies whether the note shall be emitted
6427 before or after instruction INSN. */
6429 static int
6430 emit_note_insn_var_location (void **varp, void *data)
6432 variable var = (variable) *varp;
6433 rtx insn = ((emit_note_data *)data)->insn;
6434 enum emit_note_where where = ((emit_note_data *)data)->where;
6435 htab_t vars = ((emit_note_data *)data)->vars;
6436 rtx note;
6437 int i, j, n_var_parts;
6438 bool complete;
6439 enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED;
6440 HOST_WIDE_INT last_limit;
6441 tree type_size_unit;
6442 HOST_WIDE_INT offsets[MAX_VAR_PARTS];
6443 rtx loc[MAX_VAR_PARTS];
6444 tree decl;
6446 if (dv_is_value_p (var->dv))
6447 goto clear;
6449 decl = dv_as_decl (var->dv);
6451 if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
6452 goto clear;
6454 gcc_assert (decl);
6456 complete = true;
6457 last_limit = 0;
6458 n_var_parts = 0;
6459 for (i = 0; i < var->n_var_parts; i++)
6461 enum machine_mode mode, wider_mode;
6462 rtx loc2;
6464 if (last_limit < var->var_part[i].offset)
6466 complete = false;
6467 break;
6469 else if (last_limit > var->var_part[i].offset)
6470 continue;
6471 offsets[n_var_parts] = var->var_part[i].offset;
6472 loc2 = vt_expand_loc (var->var_part[i].loc_chain->loc, vars);
6473 if (!loc2)
6475 complete = false;
6476 continue;
6478 loc[n_var_parts] = loc2;
6479 mode = GET_MODE (var->var_part[i].loc_chain->loc);
6480 initialized = var->var_part[i].loc_chain->init;
6481 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
6483 /* Attempt to merge adjacent registers or memory. */
6484 wider_mode = GET_MODE_WIDER_MODE (mode);
6485 for (j = i + 1; j < var->n_var_parts; j++)
6486 if (last_limit <= var->var_part[j].offset)
6487 break;
6488 if (j < var->n_var_parts
6489 && wider_mode != VOIDmode
6490 && mode == GET_MODE (var->var_part[j].loc_chain->loc)
6491 && (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts]))
6492 && (loc2 = vt_expand_loc (var->var_part[j].loc_chain->loc, vars))
6493 && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2)
6494 && last_limit == var->var_part[j].offset)
6496 rtx new_loc = NULL;
6498 if (REG_P (loc[n_var_parts])
6499 && hard_regno_nregs[REGNO (loc[n_var_parts])][mode] * 2
6500 == hard_regno_nregs[REGNO (loc[n_var_parts])][wider_mode]
6501 && end_hard_regno (mode, REGNO (loc[n_var_parts]))
6502 == REGNO (loc2))
6504 if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN)
6505 new_loc = simplify_subreg (wider_mode, loc[n_var_parts],
6506 mode, 0);
6507 else if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
6508 new_loc = simplify_subreg (wider_mode, loc2, mode, 0);
6509 if (new_loc)
6511 if (!REG_P (new_loc)
6512 || REGNO (new_loc) != REGNO (loc[n_var_parts]))
6513 new_loc = NULL;
6514 else
6515 REG_ATTRS (new_loc) = REG_ATTRS (loc[n_var_parts]);
6518 else if (MEM_P (loc[n_var_parts])
6519 && GET_CODE (XEXP (loc2, 0)) == PLUS
6520 && REG_P (XEXP (XEXP (loc2, 0), 0))
6521 && CONST_INT_P (XEXP (XEXP (loc2, 0), 1)))
6523 if ((REG_P (XEXP (loc[n_var_parts], 0))
6524 && rtx_equal_p (XEXP (loc[n_var_parts], 0),
6525 XEXP (XEXP (loc2, 0), 0))
6526 && INTVAL (XEXP (XEXP (loc2, 0), 1))
6527 == GET_MODE_SIZE (mode))
6528 || (GET_CODE (XEXP (loc[n_var_parts], 0)) == PLUS
6529 && CONST_INT_P (XEXP (XEXP (loc[n_var_parts], 0), 1))
6530 && rtx_equal_p (XEXP (XEXP (loc[n_var_parts], 0), 0),
6531 XEXP (XEXP (loc2, 0), 0))
6532 && INTVAL (XEXP (XEXP (loc[n_var_parts], 0), 1))
6533 + GET_MODE_SIZE (mode)
6534 == INTVAL (XEXP (XEXP (loc2, 0), 1))))
6535 new_loc = adjust_address_nv (loc[n_var_parts],
6536 wider_mode, 0);
6539 if (new_loc)
6541 loc[n_var_parts] = new_loc;
6542 mode = wider_mode;
6543 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
6544 i = j;
6547 ++n_var_parts;
6549 type_size_unit = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6550 if ((unsigned HOST_WIDE_INT) last_limit < TREE_INT_CST_LOW (type_size_unit))
6551 complete = false;
6553 if (where != EMIT_NOTE_BEFORE_INSN)
6555 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
6556 if (where == EMIT_NOTE_AFTER_CALL_INSN)
6557 NOTE_DURING_CALL_P (note) = true;
6559 else
6560 note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
6562 if (! flag_var_tracking_uninit)
6563 initialized = VAR_INIT_STATUS_INITIALIZED;
6565 if (!complete)
6567 NOTE_VAR_LOCATION (note) = gen_rtx_VAR_LOCATION (VOIDmode, decl,
6568 NULL_RTX, (int) initialized);
6570 else if (n_var_parts == 1)
6572 rtx expr_list
6573 = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
6575 NOTE_VAR_LOCATION (note) = gen_rtx_VAR_LOCATION (VOIDmode, decl,
6576 expr_list,
6577 (int) initialized);
6579 else if (n_var_parts)
6581 rtx parallel;
6583 for (i = 0; i < n_var_parts; i++)
6584 loc[i]
6585 = gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i]));
6587 parallel = gen_rtx_PARALLEL (VOIDmode,
6588 gen_rtvec_v (n_var_parts, loc));
6589 NOTE_VAR_LOCATION (note) = gen_rtx_VAR_LOCATION (VOIDmode, decl,
6590 parallel,
6591 (int) initialized);
6594 clear:
6595 set_dv_changed (var->dv, false);
6596 htab_clear_slot (changed_variables, varp);
6598 /* Continue traversing the hash table. */
6599 return 1;
6602 DEF_VEC_P (variable);
6603 DEF_VEC_ALLOC_P (variable, heap);
6605 /* Stack of variable_def pointers that need processing with
6606 check_changed_vars_2. */
6608 static VEC (variable, heap) *changed_variables_stack;
6610 /* Populate changed_variables_stack with variable_def pointers
6611 that need variable_was_changed called on them. */
6613 static int
6614 check_changed_vars_1 (void **slot, void *data)
6616 variable var = (variable) *slot;
6617 htab_t htab = (htab_t) data;
6619 if (dv_is_value_p (var->dv))
6621 value_chain vc
6622 = (value_chain) htab_find_with_hash (value_chains, var->dv,
6623 dv_htab_hash (var->dv));
6625 if (vc == NULL)
6626 return 1;
6627 for (vc = vc->next; vc; vc = vc->next)
6628 if (!dv_changed_p (vc->dv))
6630 variable vcvar
6631 = (variable) htab_find_with_hash (htab, vc->dv,
6632 dv_htab_hash (vc->dv));
6633 if (vcvar)
6634 VEC_safe_push (variable, heap, changed_variables_stack,
6635 vcvar);
6638 return 1;
6641 /* Add VAR to changed_variables and also for VALUEs add recursively
6642 all DVs that aren't in changed_variables yet but reference the
6643 VALUE from its loc_chain. */
6645 static void
6646 check_changed_vars_2 (variable var, htab_t htab)
6648 variable_was_changed (var, NULL);
6649 if (dv_is_value_p (var->dv))
6651 value_chain vc
6652 = (value_chain) htab_find_with_hash (value_chains, var->dv,
6653 dv_htab_hash (var->dv));
6655 if (vc == NULL)
6656 return;
6657 for (vc = vc->next; vc; vc = vc->next)
6658 if (!dv_changed_p (vc->dv))
6660 variable vcvar
6661 = (variable) htab_find_with_hash (htab, vc->dv,
6662 dv_htab_hash (vc->dv));
6663 if (vcvar)
6664 check_changed_vars_2 (vcvar, htab);
6669 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
6670 CHANGED_VARIABLES and delete this chain. WHERE specifies whether the notes
6671 shall be emitted before of after instruction INSN. */
6673 static void
6674 emit_notes_for_changes (rtx insn, enum emit_note_where where,
6675 shared_hash vars)
6677 emit_note_data data;
6678 htab_t htab = shared_hash_htab (vars);
6680 if (!htab_elements (changed_variables))
6681 return;
6683 if (MAY_HAVE_DEBUG_INSNS)
6685 /* Unfortunately this has to be done in two steps, because
6686 we can't traverse a hashtab into which we are inserting
6687 through variable_was_changed. */
6688 htab_traverse (changed_variables, check_changed_vars_1, htab);
6689 while (VEC_length (variable, changed_variables_stack) > 0)
6690 check_changed_vars_2 (VEC_pop (variable, changed_variables_stack),
6691 htab);
6694 data.insn = insn;
6695 data.where = where;
6696 data.vars = htab;
6698 htab_traverse (changed_variables, emit_note_insn_var_location, &data);
6701 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
6702 same variable in hash table DATA or is not there at all. */
6704 static int
6705 emit_notes_for_differences_1 (void **slot, void *data)
6707 htab_t new_vars = (htab_t) data;
6708 variable old_var, new_var;
6710 old_var = (variable) *slot;
6711 new_var = (variable) htab_find_with_hash (new_vars, old_var->dv,
6712 dv_htab_hash (old_var->dv));
6714 if (!new_var)
6716 /* Variable has disappeared. */
6717 variable empty_var;
6719 empty_var = (variable) pool_alloc (dv_pool (old_var->dv));
6720 empty_var->dv = old_var->dv;
6721 empty_var->refcount = 0;
6722 empty_var->n_var_parts = 0;
6723 if (dv_onepart_p (old_var->dv))
6725 location_chain lc;
6727 gcc_assert (old_var->n_var_parts == 1);
6728 for (lc = old_var->var_part[0].loc_chain; lc; lc = lc->next)
6729 remove_value_chains (old_var->dv, lc->loc);
6730 if (dv_is_value_p (old_var->dv))
6731 remove_cselib_value_chains (old_var->dv);
6733 variable_was_changed (empty_var, NULL);
6735 else if (variable_different_p (old_var, new_var, true))
6737 if (dv_onepart_p (old_var->dv))
6739 location_chain lc1, lc2;
6741 gcc_assert (old_var->n_var_parts == 1);
6742 gcc_assert (new_var->n_var_parts == 1);
6743 lc1 = old_var->var_part[0].loc_chain;
6744 lc2 = new_var->var_part[0].loc_chain;
6745 while (lc1
6746 && lc2
6747 && ((REG_P (lc1->loc) && REG_P (lc2->loc))
6748 || rtx_equal_p (lc1->loc, lc2->loc)))
6750 lc1 = lc1->next;
6751 lc2 = lc2->next;
6753 for (; lc2; lc2 = lc2->next)
6754 add_value_chains (old_var->dv, lc2->loc);
6755 for (; lc1; lc1 = lc1->next)
6756 remove_value_chains (old_var->dv, lc1->loc);
6758 variable_was_changed (new_var, NULL);
6761 /* Continue traversing the hash table. */
6762 return 1;
6765 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
6766 table DATA. */
6768 static int
6769 emit_notes_for_differences_2 (void **slot, void *data)
6771 htab_t old_vars = (htab_t) data;
6772 variable old_var, new_var;
6774 new_var = (variable) *slot;
6775 old_var = (variable) htab_find_with_hash (old_vars, new_var->dv,
6776 dv_htab_hash (new_var->dv));
6777 if (!old_var)
6779 /* Variable has appeared. */
6780 if (dv_onepart_p (new_var->dv))
6782 location_chain lc;
6784 gcc_assert (new_var->n_var_parts == 1);
6785 for (lc = new_var->var_part[0].loc_chain; lc; lc = lc->next)
6786 add_value_chains (new_var->dv, lc->loc);
6787 if (dv_is_value_p (new_var->dv))
6788 add_cselib_value_chains (new_var->dv);
6790 variable_was_changed (new_var, NULL);
6793 /* Continue traversing the hash table. */
6794 return 1;
6797 /* Emit notes before INSN for differences between dataflow sets OLD_SET and
6798 NEW_SET. */
6800 static void
6801 emit_notes_for_differences (rtx insn, dataflow_set *old_set,
6802 dataflow_set *new_set)
6804 htab_traverse (shared_hash_htab (old_set->vars),
6805 emit_notes_for_differences_1,
6806 shared_hash_htab (new_set->vars));
6807 htab_traverse (shared_hash_htab (new_set->vars),
6808 emit_notes_for_differences_2,
6809 shared_hash_htab (old_set->vars));
6810 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars);
6813 /* Emit the notes for changes of location parts in the basic block BB. */
6815 static void
6816 emit_notes_in_bb (basic_block bb, dataflow_set *set)
6818 int i;
6820 dataflow_set_clear (set);
6821 dataflow_set_copy (set, &VTI (bb)->in);
6823 for (i = 0; i < VTI (bb)->n_mos; i++)
6825 rtx insn = VTI (bb)->mos[i].insn;
6827 switch (VTI (bb)->mos[i].type)
6829 case MO_CALL:
6830 dataflow_set_clear_at_call (set);
6831 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars);
6832 break;
6834 case MO_USE:
6836 rtx loc = VTI (bb)->mos[i].u.loc;
6838 if (REG_P (loc))
6839 var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6840 else
6841 var_mem_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6843 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
6845 break;
6847 case MO_VAL_LOC:
6849 rtx loc = VTI (bb)->mos[i].u.loc;
6850 rtx val, vloc;
6851 tree var;
6853 if (GET_CODE (loc) == CONCAT)
6855 val = XEXP (loc, 0);
6856 vloc = XEXP (loc, 1);
6858 else
6860 val = NULL_RTX;
6861 vloc = loc;
6864 var = PAT_VAR_LOCATION_DECL (vloc);
6866 clobber_variable_part (set, NULL_RTX,
6867 dv_from_decl (var), 0, NULL_RTX);
6868 if (val)
6870 if (VAL_NEEDS_RESOLUTION (loc))
6871 val_resolve (set, val, PAT_VAR_LOCATION_LOC (vloc), insn);
6872 set_variable_part (set, val, dv_from_decl (var), 0,
6873 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6874 INSERT);
6877 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
6879 break;
6881 case MO_VAL_USE:
6883 rtx loc = VTI (bb)->mos[i].u.loc;
6884 rtx val, vloc, uloc;
6886 vloc = uloc = XEXP (loc, 1);
6887 val = XEXP (loc, 0);
6889 if (GET_CODE (val) == CONCAT)
6891 uloc = XEXP (val, 1);
6892 val = XEXP (val, 0);
6895 if (VAL_NEEDS_RESOLUTION (loc))
6896 val_resolve (set, val, vloc, insn);
6898 if (VAL_HOLDS_TRACK_EXPR (loc))
6900 if (GET_CODE (uloc) == REG)
6901 var_reg_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6902 NULL);
6903 else if (GET_CODE (uloc) == MEM)
6904 var_mem_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6905 NULL);
6908 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
6910 break;
6912 case MO_VAL_SET:
6914 rtx loc = VTI (bb)->mos[i].u.loc;
6915 rtx val, vloc, uloc;
6917 vloc = uloc = XEXP (loc, 1);
6918 val = XEXP (loc, 0);
6920 if (GET_CODE (val) == CONCAT)
6922 vloc = XEXP (val, 1);
6923 val = XEXP (val, 0);
6926 if (GET_CODE (vloc) == SET)
6928 rtx vsrc = SET_SRC (vloc);
6930 gcc_assert (val != vsrc);
6931 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
6933 vloc = SET_DEST (vloc);
6935 if (VAL_NEEDS_RESOLUTION (loc))
6936 val_resolve (set, val, vsrc, insn);
6938 else if (VAL_NEEDS_RESOLUTION (loc))
6940 gcc_assert (GET_CODE (uloc) == SET
6941 && GET_CODE (SET_SRC (uloc)) == REG);
6942 val_resolve (set, val, SET_SRC (uloc), insn);
6945 if (VAL_HOLDS_TRACK_EXPR (loc))
6947 if (VAL_EXPR_IS_CLOBBERED (loc))
6949 if (REG_P (uloc))
6950 var_reg_delete (set, uloc, true);
6951 else if (MEM_P (uloc))
6952 var_mem_delete (set, uloc, true);
6954 else
6956 bool copied_p = VAL_EXPR_IS_COPIED (loc);
6957 rtx set_src = NULL;
6958 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
6960 if (GET_CODE (uloc) == SET)
6962 set_src = SET_SRC (uloc);
6963 uloc = SET_DEST (uloc);
6966 if (copied_p)
6968 status = find_src_status (set, set_src);
6970 set_src = find_src_set_src (set, set_src);
6973 if (REG_P (uloc))
6974 var_reg_delete_and_set (set, uloc, !copied_p,
6975 status, set_src);
6976 else if (MEM_P (uloc))
6977 var_mem_delete_and_set (set, uloc, !copied_p,
6978 status, set_src);
6981 else if (REG_P (uloc))
6982 var_regno_delete (set, REGNO (uloc));
6984 val_store (set, val, vloc, insn);
6986 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
6987 set->vars);
6989 break;
6991 case MO_SET:
6993 rtx loc = VTI (bb)->mos[i].u.loc;
6994 rtx set_src = NULL;
6996 if (GET_CODE (loc) == SET)
6998 set_src = SET_SRC (loc);
6999 loc = SET_DEST (loc);
7002 if (REG_P (loc))
7003 var_reg_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
7004 set_src);
7005 else
7006 var_mem_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
7007 set_src);
7009 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
7010 set->vars);
7012 break;
7014 case MO_COPY:
7016 rtx loc = VTI (bb)->mos[i].u.loc;
7017 enum var_init_status src_status;
7018 rtx set_src = NULL;
7020 if (GET_CODE (loc) == SET)
7022 set_src = SET_SRC (loc);
7023 loc = SET_DEST (loc);
7026 src_status = find_src_status (set, set_src);
7027 set_src = find_src_set_src (set, set_src);
7029 if (REG_P (loc))
7030 var_reg_delete_and_set (set, loc, false, src_status, set_src);
7031 else
7032 var_mem_delete_and_set (set, loc, false, src_status, set_src);
7034 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
7035 set->vars);
7037 break;
7039 case MO_USE_NO_VAR:
7041 rtx loc = VTI (bb)->mos[i].u.loc;
7043 if (REG_P (loc))
7044 var_reg_delete (set, loc, false);
7045 else
7046 var_mem_delete (set, loc, false);
7048 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
7050 break;
7052 case MO_CLOBBER:
7054 rtx loc = VTI (bb)->mos[i].u.loc;
7056 if (REG_P (loc))
7057 var_reg_delete (set, loc, true);
7058 else
7059 var_mem_delete (set, loc, true);
7061 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
7062 set->vars);
7064 break;
7066 case MO_ADJUST:
7067 set->stack_adjust += VTI (bb)->mos[i].u.adjust;
7068 break;
7073 /* Emit notes for the whole function. */
7075 static void
7076 vt_emit_notes (void)
7078 basic_block bb;
7079 dataflow_set cur;
7081 gcc_assert (!htab_elements (changed_variables));
7083 /* Free memory occupied by the out hash tables, as they aren't used
7084 anymore. */
7085 FOR_EACH_BB (bb)
7086 dataflow_set_clear (&VTI (bb)->out);
7088 /* Enable emitting notes by functions (mainly by set_variable_part and
7089 delete_variable_part). */
7090 emit_notes = true;
7092 if (MAY_HAVE_DEBUG_INSNS)
7093 changed_variables_stack = VEC_alloc (variable, heap, 40);
7095 dataflow_set_init (&cur);
7097 FOR_EACH_BB (bb)
7099 /* Emit the notes for changes of variable locations between two
7100 subsequent basic blocks. */
7101 emit_notes_for_differences (BB_HEAD (bb), &cur, &VTI (bb)->in);
7103 /* Emit the notes for the changes in the basic block itself. */
7104 emit_notes_in_bb (bb, &cur);
7106 /* Free memory occupied by the in hash table, we won't need it
7107 again. */
7108 dataflow_set_clear (&VTI (bb)->in);
7110 #ifdef ENABLE_CHECKING
7111 htab_traverse (shared_hash_htab (cur.vars),
7112 emit_notes_for_differences_1,
7113 shared_hash_htab (empty_shared_hash));
7114 if (MAY_HAVE_DEBUG_INSNS)
7115 gcc_assert (htab_elements (value_chains) == 0);
7116 #endif
7117 dataflow_set_destroy (&cur);
7119 if (MAY_HAVE_DEBUG_INSNS)
7120 VEC_free (variable, heap, changed_variables_stack);
7122 emit_notes = false;
7125 /* If there is a declaration and offset associated with register/memory RTL
7126 assign declaration to *DECLP and offset to *OFFSETP, and return true. */
7128 static bool
7129 vt_get_decl_and_offset (rtx rtl, tree *declp, HOST_WIDE_INT *offsetp)
7131 if (REG_P (rtl))
7133 if (REG_ATTRS (rtl))
7135 *declp = REG_EXPR (rtl);
7136 *offsetp = REG_OFFSET (rtl);
7137 return true;
7140 else if (MEM_P (rtl))
7142 if (MEM_ATTRS (rtl))
7144 *declp = MEM_EXPR (rtl);
7145 *offsetp = INT_MEM_OFFSET (rtl);
7146 return true;
7149 return false;
7152 /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
7154 static void
7155 vt_add_function_parameters (void)
7157 tree parm;
7159 for (parm = DECL_ARGUMENTS (current_function_decl);
7160 parm; parm = TREE_CHAIN (parm))
7162 rtx decl_rtl = DECL_RTL_IF_SET (parm);
7163 rtx incoming = DECL_INCOMING_RTL (parm);
7164 tree decl;
7165 enum machine_mode mode;
7166 HOST_WIDE_INT offset;
7167 dataflow_set *out;
7168 decl_or_value dv;
7170 if (TREE_CODE (parm) != PARM_DECL)
7171 continue;
7173 if (!DECL_NAME (parm))
7174 continue;
7176 if (!decl_rtl || !incoming)
7177 continue;
7179 if (GET_MODE (decl_rtl) == BLKmode || GET_MODE (incoming) == BLKmode)
7180 continue;
7182 if (!vt_get_decl_and_offset (incoming, &decl, &offset))
7184 if (REG_P (incoming) || MEM_P (incoming))
7186 /* This means argument is passed by invisible reference. */
7187 offset = 0;
7188 decl = parm;
7189 incoming = gen_rtx_MEM (GET_MODE (decl_rtl), incoming);
7191 else
7193 if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset))
7194 continue;
7195 offset += byte_lowpart_offset (GET_MODE (incoming),
7196 GET_MODE (decl_rtl));
7200 if (!decl)
7201 continue;
7203 if (parm != decl)
7205 /* Assume that DECL_RTL was a pseudo that got spilled to
7206 memory. The spill slot sharing code will force the
7207 memory to reference spill_slot_decl (%sfp), so we don't
7208 match above. That's ok, the pseudo must have referenced
7209 the entire parameter, so just reset OFFSET. */
7210 gcc_assert (decl == get_spill_slot_decl (false));
7211 offset = 0;
7214 if (!track_loc_p (incoming, parm, offset, false, &mode, &offset))
7215 continue;
7217 out = &VTI (ENTRY_BLOCK_PTR)->out;
7219 dv = dv_from_decl (parm);
7221 if (target_for_debug_bind (parm)
7222 /* We can't deal with these right now, because this kind of
7223 variable is single-part. ??? We could handle parallels
7224 that describe multiple locations for the same single
7225 value, but ATM we don't. */
7226 && GET_CODE (incoming) != PARALLEL)
7228 cselib_val *val;
7230 /* ??? We shouldn't ever hit this, but it may happen because
7231 arguments passed by invisible reference aren't dealt with
7232 above: incoming-rtl will have Pmode rather than the
7233 expected mode for the type. */
7234 if (offset)
7235 continue;
7237 val = cselib_lookup (var_lowpart (mode, incoming), mode, true);
7239 /* ??? Float-typed values in memory are not handled by
7240 cselib. */
7241 if (val)
7243 cselib_preserve_value (val);
7244 set_variable_part (out, val->val_rtx, dv, offset,
7245 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
7246 dv = dv_from_value (val->val_rtx);
7250 if (REG_P (incoming))
7252 incoming = var_lowpart (mode, incoming);
7253 gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER);
7254 attrs_list_insert (&out->regs[REGNO (incoming)], dv, offset,
7255 incoming);
7256 set_variable_part (out, incoming, dv, offset,
7257 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
7259 else if (MEM_P (incoming))
7261 incoming = var_lowpart (mode, incoming);
7262 set_variable_part (out, incoming, dv, offset,
7263 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
7267 if (MAY_HAVE_DEBUG_INSNS)
7269 cselib_preserve_only_values (true);
7270 cselib_reset_table_with_next_value (cselib_get_next_unknown_value ());
7275 /* Allocate and initialize the data structures for variable tracking
7276 and parse the RTL to get the micro operations. */
7278 static void
7279 vt_initialize (void)
7281 basic_block bb;
7283 alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def));
7285 if (MAY_HAVE_DEBUG_INSNS)
7287 cselib_init (true);
7288 scratch_regs = BITMAP_ALLOC (NULL);
7289 valvar_pool = create_alloc_pool ("small variable_def pool",
7290 sizeof (struct variable_def), 256);
7292 else
7294 scratch_regs = NULL;
7295 valvar_pool = NULL;
7298 FOR_EACH_BB (bb)
7300 rtx insn;
7301 HOST_WIDE_INT pre, post = 0;
7302 int count;
7303 unsigned int next_value_before = cselib_get_next_unknown_value ();
7304 unsigned int next_value_after = next_value_before;
7306 if (MAY_HAVE_DEBUG_INSNS)
7308 cselib_record_sets_hook = count_with_sets;
7309 if (dump_file && (dump_flags & TDF_DETAILS))
7310 fprintf (dump_file, "first value: %i\n",
7311 cselib_get_next_unknown_value ());
7314 /* Count the number of micro operations. */
7315 VTI (bb)->n_mos = 0;
7316 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
7317 insn = NEXT_INSN (insn))
7319 if (INSN_P (insn))
7321 if (!frame_pointer_needed)
7323 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
7324 if (pre)
7326 VTI (bb)->n_mos++;
7327 if (dump_file && (dump_flags & TDF_DETAILS))
7328 log_op_type (GEN_INT (pre), bb, insn,
7329 MO_ADJUST, dump_file);
7331 if (post)
7333 VTI (bb)->n_mos++;
7334 if (dump_file && (dump_flags & TDF_DETAILS))
7335 log_op_type (GEN_INT (post), bb, insn,
7336 MO_ADJUST, dump_file);
7339 cselib_hook_called = false;
7340 if (MAY_HAVE_DEBUG_INSNS)
7342 cselib_process_insn (insn);
7343 if (dump_file && (dump_flags & TDF_DETAILS))
7345 print_rtl_single (dump_file, insn);
7346 dump_cselib_table (dump_file);
7349 if (!cselib_hook_called)
7350 count_with_sets (insn, 0, 0);
7351 if (CALL_P (insn))
7353 VTI (bb)->n_mos++;
7354 if (dump_file && (dump_flags & TDF_DETAILS))
7355 log_op_type (PATTERN (insn), bb, insn,
7356 MO_CALL, dump_file);
7361 count = VTI (bb)->n_mos;
7363 if (MAY_HAVE_DEBUG_INSNS)
7365 cselib_preserve_only_values (false);
7366 next_value_after = cselib_get_next_unknown_value ();
7367 cselib_reset_table_with_next_value (next_value_before);
7368 cselib_record_sets_hook = add_with_sets;
7369 if (dump_file && (dump_flags & TDF_DETAILS))
7370 fprintf (dump_file, "first value: %i\n",
7371 cselib_get_next_unknown_value ());
7374 /* Add the micro-operations to the array. */
7375 VTI (bb)->mos = XNEWVEC (micro_operation, VTI (bb)->n_mos);
7376 VTI (bb)->n_mos = 0;
7377 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
7378 insn = NEXT_INSN (insn))
7380 if (INSN_P (insn))
7382 if (!frame_pointer_needed)
7384 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
7385 if (pre)
7387 micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
7389 mo->type = MO_ADJUST;
7390 mo->u.adjust = pre;
7391 mo->insn = insn;
7393 if (dump_file && (dump_flags & TDF_DETAILS))
7394 log_op_type (PATTERN (insn), bb, insn,
7395 MO_ADJUST, dump_file);
7399 cselib_hook_called = false;
7400 if (MAY_HAVE_DEBUG_INSNS)
7402 cselib_process_insn (insn);
7403 if (dump_file && (dump_flags & TDF_DETAILS))
7405 print_rtl_single (dump_file, insn);
7406 dump_cselib_table (dump_file);
7409 if (!cselib_hook_called)
7410 add_with_sets (insn, 0, 0);
7412 if (!frame_pointer_needed && post)
7414 micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
7416 mo->type = MO_ADJUST;
7417 mo->u.adjust = post;
7418 mo->insn = insn;
7420 if (dump_file && (dump_flags & TDF_DETAILS))
7421 log_op_type (PATTERN (insn), bb, insn,
7422 MO_ADJUST, dump_file);
7426 gcc_assert (count == VTI (bb)->n_mos);
7427 if (MAY_HAVE_DEBUG_INSNS)
7429 cselib_preserve_only_values (true);
7430 gcc_assert (next_value_after == cselib_get_next_unknown_value ());
7431 cselib_reset_table_with_next_value (next_value_after);
7432 cselib_record_sets_hook = NULL;
7436 attrs_pool = create_alloc_pool ("attrs_def pool",
7437 sizeof (struct attrs_def), 1024);
7438 var_pool = create_alloc_pool ("variable_def pool",
7439 sizeof (struct variable_def)
7440 + (MAX_VAR_PARTS - 1)
7441 * sizeof (((variable)NULL)->var_part[0]), 64);
7442 loc_chain_pool = create_alloc_pool ("location_chain_def pool",
7443 sizeof (struct location_chain_def),
7444 1024);
7445 shared_hash_pool = create_alloc_pool ("shared_hash_def pool",
7446 sizeof (struct shared_hash_def), 256);
7447 empty_shared_hash = (shared_hash) pool_alloc (shared_hash_pool);
7448 empty_shared_hash->refcount = 1;
7449 empty_shared_hash->htab
7450 = htab_create (1, variable_htab_hash, variable_htab_eq,
7451 variable_htab_free);
7452 changed_variables = htab_create (10, variable_htab_hash, variable_htab_eq,
7453 variable_htab_free);
7454 if (MAY_HAVE_DEBUG_INSNS)
7456 value_chain_pool = create_alloc_pool ("value_chain_def pool",
7457 sizeof (struct value_chain_def),
7458 1024);
7459 value_chains = htab_create (32, value_chain_htab_hash,
7460 value_chain_htab_eq, NULL);
7463 /* Init the IN and OUT sets. */
7464 FOR_ALL_BB (bb)
7466 VTI (bb)->visited = false;
7467 VTI (bb)->flooded = false;
7468 dataflow_set_init (&VTI (bb)->in);
7469 dataflow_set_init (&VTI (bb)->out);
7470 VTI (bb)->permp = NULL;
7473 VTI (ENTRY_BLOCK_PTR)->flooded = true;
7474 vt_add_function_parameters ();
7477 /* Get rid of all debug insns from the insn stream. */
7479 static void
7480 delete_debug_insns (void)
7482 basic_block bb;
7483 rtx insn, next;
7485 if (!MAY_HAVE_DEBUG_INSNS)
7486 return;
7488 FOR_EACH_BB (bb)
7490 FOR_BB_INSNS_SAFE (bb, insn, next)
7491 if (DEBUG_INSN_P (insn))
7492 delete_insn (insn);
7496 /* Run a fast, BB-local only version of var tracking, to take care of
7497 information that we don't do global analysis on, such that not all
7498 information is lost. If SKIPPED holds, we're skipping the global
7499 pass entirely, so we should try to use information it would have
7500 handled as well.. */
7502 static void
7503 vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED)
7505 /* ??? Just skip it all for now. */
7506 delete_debug_insns ();
7509 /* Free the data structures needed for variable tracking. */
7511 static void
7512 vt_finalize (void)
7514 basic_block bb;
7516 FOR_EACH_BB (bb)
7518 free (VTI (bb)->mos);
7521 FOR_ALL_BB (bb)
7523 dataflow_set_destroy (&VTI (bb)->in);
7524 dataflow_set_destroy (&VTI (bb)->out);
7525 if (VTI (bb)->permp)
7527 dataflow_set_destroy (VTI (bb)->permp);
7528 XDELETE (VTI (bb)->permp);
7531 free_aux_for_blocks ();
7532 htab_delete (empty_shared_hash->htab);
7533 htab_delete (changed_variables);
7534 free_alloc_pool (attrs_pool);
7535 free_alloc_pool (var_pool);
7536 free_alloc_pool (loc_chain_pool);
7537 free_alloc_pool (shared_hash_pool);
7539 if (MAY_HAVE_DEBUG_INSNS)
7541 htab_delete (value_chains);
7542 free_alloc_pool (value_chain_pool);
7543 free_alloc_pool (valvar_pool);
7544 cselib_finish ();
7545 BITMAP_FREE (scratch_regs);
7546 scratch_regs = NULL;
7549 if (vui_vec)
7550 XDELETEVEC (vui_vec);
7551 vui_vec = NULL;
7552 vui_allocated = 0;
7555 /* The entry point to variable tracking pass. */
7557 unsigned int
7558 variable_tracking_main (void)
7560 if (flag_var_tracking_assignments < 0)
7562 delete_debug_insns ();
7563 return 0;
7566 if (n_basic_blocks > 500 && n_edges / n_basic_blocks >= 20)
7568 vt_debug_insns_local (true);
7569 return 0;
7572 mark_dfs_back_edges ();
7573 vt_initialize ();
7574 if (!frame_pointer_needed)
7576 if (!vt_stack_adjustments ())
7578 vt_finalize ();
7579 vt_debug_insns_local (true);
7580 return 0;
7584 vt_find_locations ();
7586 if (dump_file && (dump_flags & TDF_DETAILS))
7588 dump_dataflow_sets ();
7589 dump_flow_info (dump_file, dump_flags);
7592 vt_emit_notes ();
7594 vt_finalize ();
7595 vt_debug_insns_local (false);
7596 return 0;
7599 static bool
7600 gate_handle_var_tracking (void)
7602 return (flag_var_tracking);
7607 struct rtl_opt_pass pass_variable_tracking =
7610 RTL_PASS,
7611 "vartrack", /* name */
7612 gate_handle_var_tracking, /* gate */
7613 variable_tracking_main, /* execute */
7614 NULL, /* sub */
7615 NULL, /* next */
7616 0, /* static_pass_number */
7617 TV_VAR_TRACKING, /* tv_id */
7618 0, /* properties_required */
7619 0, /* properties_provided */
7620 0, /* properties_destroyed */
7621 0, /* todo_flags_start */
7622 TODO_dump_func | TODO_verify_rtl_sharing/* todo_flags_finish */