PR c++/42399
[official-gcc/constexpr.git] / gcc / var-tracking.c
blob7fa75748a545d01724e71801b81c6068c6b1969d
1 /* Variable tracking routines for the GNU compiler.
2 Copyright (C) 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This file contains the variable tracking pass. It computes where
22 variables are located (which registers or where in memory) at each position
23 in instruction stream and emits notes describing the locations.
24 Debug information (DWARF2 location lists) is finally generated from
25 these notes.
26 With this debug information, it is possible to show variables
27 even when debugging optimized code.
29 How does the variable tracking pass work?
31 First, it scans RTL code for uses, stores and clobbers (register/memory
32 references in instructions), for call insns and for stack adjustments
33 separately for each basic block and saves them to an array of micro
34 operations.
35 The micro operations of one instruction are ordered so that
36 pre-modifying stack adjustment < use < use with no var < call insn <
37 < set < clobber < post-modifying stack adjustment
39 Then, a forward dataflow analysis is performed to find out how locations
40 of variables change through code and to propagate the variable locations
41 along control flow graph.
42 The IN set for basic block BB is computed as a union of OUT sets of BB's
43 predecessors, the OUT set for BB is copied from the IN set for BB and
44 is changed according to micro operations in BB.
46 The IN and OUT sets for basic blocks consist of a current stack adjustment
47 (used for adjusting offset of variables addressed using stack pointer),
48 the table of structures describing the locations of parts of a variable
49 and for each physical register a linked list for each physical register.
50 The linked list is a list of variable parts stored in the register,
51 i.e. it is a list of triplets (reg, decl, offset) where decl is
52 REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
53 effective deleting appropriate variable parts when we set or clobber the
54 register.
56 There may be more than one variable part in a register. The linked lists
57 should be pretty short so it is a good data structure here.
58 For example in the following code, register allocator may assign same
59 register to variables A and B, and both of them are stored in the same
60 register in CODE:
62 if (cond)
63 set A;
64 else
65 set B;
66 CODE;
67 if (cond)
68 use A;
69 else
70 use B;
72 Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
73 are emitted to appropriate positions in RTL code. Each such a note describes
74 the location of one variable at the point in instruction stream where the
75 note is. There is no need to emit a note for each variable before each
76 instruction, we only emit these notes where the location of variable changes
77 (this means that we also emit notes for changes between the OUT set of the
78 previous block and the IN set of the current block).
80 The notes consist of two parts:
81 1. the declaration (from REG_EXPR or MEM_EXPR)
82 2. the location of a variable - it is either a simple register/memory
83 reference (for simple variables, for example int),
84 or a parallel of register/memory references (for a large variables
85 which consist of several parts, for example long long).
89 #include "config.h"
90 #include "system.h"
91 #include "coretypes.h"
92 #include "tm.h"
93 #include "rtl.h"
94 #include "tree.h"
95 #include "hard-reg-set.h"
96 #include "basic-block.h"
97 #include "flags.h"
98 #include "output.h"
99 #include "insn-config.h"
100 #include "reload.h"
101 #include "sbitmap.h"
102 #include "alloc-pool.h"
103 #include "fibheap.h"
104 #include "hashtab.h"
105 #include "regs.h"
106 #include "expr.h"
107 #include "timevar.h"
108 #include "tree-pass.h"
109 #include "tree-flow.h"
110 #include "cselib.h"
111 #include "target.h"
113 /* var-tracking.c assumes that tree code with the same value as VALUE rtx code
114 has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
115 Currently the value is the same as IDENTIFIER_NODE, which has such
116 a property. If this compile time assertion ever fails, make sure that
117 the new tree code that equals (int) VALUE has the same property. */
118 extern char check_value_val[(int) VALUE == (int) IDENTIFIER_NODE ? 1 : -1];
120 /* Type of micro operation. */
121 enum micro_operation_type
123 MO_USE, /* Use location (REG or MEM). */
124 MO_USE_NO_VAR,/* Use location which is not associated with a variable
125 or the variable is not trackable. */
126 MO_VAL_USE, /* Use location which is associated with a value. */
127 MO_VAL_LOC, /* Use location which appears in a debug insn. */
128 MO_VAL_SET, /* Set location associated with a value. */
129 MO_SET, /* Set location. */
130 MO_COPY, /* Copy the same portion of a variable from one
131 location to another. */
132 MO_CLOBBER, /* Clobber location. */
133 MO_CALL, /* Call insn. */
134 MO_ADJUST /* Adjust stack pointer. */
138 static const char * const ATTRIBUTE_UNUSED
139 micro_operation_type_name[] = {
140 "MO_USE",
141 "MO_USE_NO_VAR",
142 "MO_VAL_USE",
143 "MO_VAL_LOC",
144 "MO_VAL_SET",
145 "MO_SET",
146 "MO_COPY",
147 "MO_CLOBBER",
148 "MO_CALL",
149 "MO_ADJUST"
152 /* Where shall the note be emitted? BEFORE or AFTER the instruction.
153 Notes emitted as AFTER_CALL are to take effect during the call,
154 rather than after the call. */
155 enum emit_note_where
157 EMIT_NOTE_BEFORE_INSN,
158 EMIT_NOTE_AFTER_INSN,
159 EMIT_NOTE_AFTER_CALL_INSN
162 /* Structure holding information about micro operation. */
163 typedef struct micro_operation_def
165 /* Type of micro operation. */
166 enum micro_operation_type type;
168 union {
169 /* Location. For MO_SET and MO_COPY, this is the SET that
170 performs the assignment, if known, otherwise it is the target
171 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
172 CONCAT of the VALUE and the LOC associated with it. For
173 MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
174 associated with it. */
175 rtx loc;
177 /* Stack adjustment. */
178 HOST_WIDE_INT adjust;
179 } u;
181 /* The instruction which the micro operation is in, for MO_USE,
182 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
183 instruction or note in the original flow (before any var-tracking
184 notes are inserted, to simplify emission of notes), for MO_SET
185 and MO_CLOBBER. */
186 rtx insn;
187 } micro_operation;
189 /* A declaration of a variable, or an RTL value being handled like a
190 declaration. */
191 typedef void *decl_or_value;
193 /* Structure for passing some other parameters to function
194 emit_note_insn_var_location. */
195 typedef struct emit_note_data_def
197 /* The instruction which the note will be emitted before/after. */
198 rtx insn;
200 /* Where the note will be emitted (before/after insn)? */
201 enum emit_note_where where;
203 /* The variables and values active at this point. */
204 htab_t vars;
205 } emit_note_data;
207 /* Description of location of a part of a variable. The content of a physical
208 register is described by a chain of these structures.
209 The chains are pretty short (usually 1 or 2 elements) and thus
210 chain is the best data structure. */
211 typedef struct attrs_def
213 /* Pointer to next member of the list. */
214 struct attrs_def *next;
216 /* The rtx of register. */
217 rtx loc;
219 /* The declaration corresponding to LOC. */
220 decl_or_value dv;
222 /* Offset from start of DECL. */
223 HOST_WIDE_INT offset;
224 } *attrs;
226 /* Structure holding a refcounted hash table. If refcount > 1,
227 it must be first unshared before modified. */
228 typedef struct shared_hash_def
230 /* Reference count. */
231 int refcount;
233 /* Actual hash table. */
234 htab_t htab;
235 } *shared_hash;
237 /* Structure holding the IN or OUT set for a basic block. */
238 typedef struct dataflow_set_def
240 /* Adjustment of stack offset. */
241 HOST_WIDE_INT stack_adjust;
243 /* Attributes for registers (lists of attrs). */
244 attrs regs[FIRST_PSEUDO_REGISTER];
246 /* Variable locations. */
247 shared_hash vars;
249 /* Vars that is being traversed. */
250 shared_hash traversed_vars;
251 } dataflow_set;
253 /* The structure (one for each basic block) containing the information
254 needed for variable tracking. */
255 typedef struct variable_tracking_info_def
257 /* Number of micro operations stored in the MOS array. */
258 int n_mos;
260 /* The array of micro operations. */
261 micro_operation *mos;
263 /* The IN and OUT set for dataflow analysis. */
264 dataflow_set in;
265 dataflow_set out;
267 /* The permanent-in dataflow set for this block. This is used to
268 hold values for which we had to compute entry values. ??? This
269 should probably be dynamically allocated, to avoid using more
270 memory in non-debug builds. */
271 dataflow_set *permp;
273 /* Has the block been visited in DFS? */
274 bool visited;
276 /* Has the block been flooded in VTA? */
277 bool flooded;
279 } *variable_tracking_info;
281 /* Structure for chaining the locations. */
282 typedef struct location_chain_def
284 /* Next element in the chain. */
285 struct location_chain_def *next;
287 /* The location (REG, MEM or VALUE). */
288 rtx loc;
290 /* The "value" stored in this location. */
291 rtx set_src;
293 /* Initialized? */
294 enum var_init_status init;
295 } *location_chain;
297 /* Structure describing one part of variable. */
298 typedef struct variable_part_def
300 /* Chain of locations of the part. */
301 location_chain loc_chain;
303 /* Location which was last emitted to location list. */
304 rtx cur_loc;
306 /* The offset in the variable. */
307 HOST_WIDE_INT offset;
308 } variable_part;
310 /* Maximum number of location parts. */
311 #define MAX_VAR_PARTS 16
313 /* Structure describing where the variable is located. */
314 typedef struct variable_def
316 /* The declaration of the variable, or an RTL value being handled
317 like a declaration. */
318 decl_or_value dv;
320 /* Reference count. */
321 int refcount;
323 /* Number of variable parts. */
324 int n_var_parts;
326 /* The variable parts. */
327 variable_part var_part[1];
328 } *variable;
329 typedef const struct variable_def *const_variable;
331 /* Structure for chaining backlinks from referenced VALUEs to
332 DVs that are referencing them. */
333 typedef struct value_chain_def
335 /* Next value_chain entry. */
336 struct value_chain_def *next;
338 /* The declaration of the variable, or an RTL value
339 being handled like a declaration, whose var_parts[0].loc_chain
340 references the VALUE owning this value_chain. */
341 decl_or_value dv;
343 /* Reference count. */
344 int refcount;
345 } *value_chain;
346 typedef const struct value_chain_def *const_value_chain;
348 /* Pointer to the BB's information specific to variable tracking pass. */
349 #define VTI(BB) ((variable_tracking_info) (BB)->aux)
351 /* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */
352 #define INT_MEM_OFFSET(mem) (MEM_OFFSET (mem) ? INTVAL (MEM_OFFSET (mem)) : 0)
354 /* Alloc pool for struct attrs_def. */
355 static alloc_pool attrs_pool;
357 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
358 static alloc_pool var_pool;
360 /* Alloc pool for struct variable_def with a single var_part entry. */
361 static alloc_pool valvar_pool;
363 /* Alloc pool for struct location_chain_def. */
364 static alloc_pool loc_chain_pool;
366 /* Alloc pool for struct shared_hash_def. */
367 static alloc_pool shared_hash_pool;
369 /* Alloc pool for struct value_chain_def. */
370 static alloc_pool value_chain_pool;
372 /* Changed variables, notes will be emitted for them. */
373 static htab_t changed_variables;
375 /* Links from VALUEs to DVs referencing them in their current loc_chains. */
376 static htab_t value_chains;
378 /* Shall notes be emitted? */
379 static bool emit_notes;
381 /* Empty shared hashtable. */
382 static shared_hash empty_shared_hash;
384 /* Scratch register bitmap used by cselib_expand_value_rtx. */
385 static bitmap scratch_regs = NULL;
387 /* Variable used to tell whether cselib_process_insn called our hook. */
388 static bool cselib_hook_called;
390 /* Local function prototypes. */
391 static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
392 HOST_WIDE_INT *);
393 static void insn_stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
394 HOST_WIDE_INT *);
395 static void bb_stack_adjust_offset (basic_block);
396 static bool vt_stack_adjustments (void);
397 static rtx adjust_stack_reference (rtx, HOST_WIDE_INT);
398 static hashval_t variable_htab_hash (const void *);
399 static int variable_htab_eq (const void *, const void *);
400 static void variable_htab_free (void *);
402 static void init_attrs_list_set (attrs *);
403 static void attrs_list_clear (attrs *);
404 static attrs attrs_list_member (attrs, decl_or_value, HOST_WIDE_INT);
405 static void attrs_list_insert (attrs *, decl_or_value, HOST_WIDE_INT, rtx);
406 static void attrs_list_copy (attrs *, attrs);
407 static void attrs_list_union (attrs *, attrs);
409 static void **unshare_variable (dataflow_set *set, void **slot, variable var,
410 enum var_init_status);
411 static int vars_copy_1 (void **, void *);
412 static void vars_copy (htab_t, htab_t);
413 static tree var_debug_decl (tree);
414 static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
415 static void var_reg_delete_and_set (dataflow_set *, rtx, bool,
416 enum var_init_status, rtx);
417 static void var_reg_delete (dataflow_set *, rtx, bool);
418 static void var_regno_delete (dataflow_set *, int);
419 static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx);
420 static void var_mem_delete_and_set (dataflow_set *, rtx, bool,
421 enum var_init_status, rtx);
422 static void var_mem_delete (dataflow_set *, rtx, bool);
424 static void dataflow_set_init (dataflow_set *);
425 static void dataflow_set_clear (dataflow_set *);
426 static void dataflow_set_copy (dataflow_set *, dataflow_set *);
427 static int variable_union_info_cmp_pos (const void *, const void *);
428 static int variable_union (void **, void *);
429 static int variable_canonicalize (void **, void *);
430 static void dataflow_set_union (dataflow_set *, dataflow_set *);
431 static location_chain find_loc_in_1pdv (rtx, variable, htab_t);
432 static bool canon_value_cmp (rtx, rtx);
433 static int loc_cmp (rtx, rtx);
434 static bool variable_part_different_p (variable_part *, variable_part *);
435 static bool onepart_variable_different_p (variable, variable);
436 static bool variable_different_p (variable, variable, bool);
437 static int dataflow_set_different_1 (void **, void *);
438 static bool dataflow_set_different (dataflow_set *, dataflow_set *);
439 static void dataflow_set_destroy (dataflow_set *);
441 static bool contains_symbol_ref (rtx);
442 static bool track_expr_p (tree, bool);
443 static bool same_variable_part_p (rtx, tree, HOST_WIDE_INT);
444 static int count_uses (rtx *, void *);
445 static void count_uses_1 (rtx *, void *);
446 static void count_stores (rtx, const_rtx, void *);
447 static int add_uses (rtx *, void *);
448 static void add_uses_1 (rtx *, void *);
449 static void add_stores (rtx, const_rtx, void *);
450 static bool compute_bb_dataflow (basic_block);
451 static void vt_find_locations (void);
453 static void dump_attrs_list (attrs);
454 static int dump_var_slot (void **, void *);
455 static void dump_var (variable);
456 static void dump_vars (htab_t);
457 static void dump_dataflow_set (dataflow_set *);
458 static void dump_dataflow_sets (void);
460 static void variable_was_changed (variable, dataflow_set *);
461 static void **set_slot_part (dataflow_set *, rtx, void **,
462 decl_or_value, HOST_WIDE_INT,
463 enum var_init_status, rtx);
464 static void set_variable_part (dataflow_set *, rtx,
465 decl_or_value, HOST_WIDE_INT,
466 enum var_init_status, rtx, enum insert_option);
467 static void **clobber_slot_part (dataflow_set *, rtx,
468 void **, HOST_WIDE_INT, rtx);
469 static void clobber_variable_part (dataflow_set *, rtx,
470 decl_or_value, HOST_WIDE_INT, rtx);
471 static void **delete_slot_part (dataflow_set *, rtx, void **, HOST_WIDE_INT);
472 static void delete_variable_part (dataflow_set *, rtx,
473 decl_or_value, HOST_WIDE_INT);
474 static int emit_note_insn_var_location (void **, void *);
475 static void emit_notes_for_changes (rtx, enum emit_note_where, shared_hash);
476 static int emit_notes_for_differences_1 (void **, void *);
477 static int emit_notes_for_differences_2 (void **, void *);
478 static void emit_notes_for_differences (rtx, dataflow_set *, dataflow_set *);
479 static void emit_notes_in_bb (basic_block, dataflow_set *);
480 static void vt_emit_notes (void);
482 static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *);
483 static void vt_add_function_parameters (void);
484 static void vt_initialize (void);
485 static void vt_finalize (void);
487 /* Given a SET, calculate the amount of stack adjustment it contains
488 PRE- and POST-modifying stack pointer.
489 This function is similar to stack_adjust_offset. */
491 static void
492 stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
493 HOST_WIDE_INT *post)
495 rtx src = SET_SRC (pattern);
496 rtx dest = SET_DEST (pattern);
497 enum rtx_code code;
499 if (dest == stack_pointer_rtx)
501 /* (set (reg sp) (plus (reg sp) (const_int))) */
502 code = GET_CODE (src);
503 if (! (code == PLUS || code == MINUS)
504 || XEXP (src, 0) != stack_pointer_rtx
505 || !CONST_INT_P (XEXP (src, 1)))
506 return;
508 if (code == MINUS)
509 *post += INTVAL (XEXP (src, 1));
510 else
511 *post -= INTVAL (XEXP (src, 1));
513 else if (MEM_P (dest))
515 /* (set (mem (pre_dec (reg sp))) (foo)) */
516 src = XEXP (dest, 0);
517 code = GET_CODE (src);
519 switch (code)
521 case PRE_MODIFY:
522 case POST_MODIFY:
523 if (XEXP (src, 0) == stack_pointer_rtx)
525 rtx val = XEXP (XEXP (src, 1), 1);
526 /* We handle only adjustments by constant amount. */
527 gcc_assert (GET_CODE (XEXP (src, 1)) == PLUS &&
528 CONST_INT_P (val));
530 if (code == PRE_MODIFY)
531 *pre -= INTVAL (val);
532 else
533 *post -= INTVAL (val);
534 break;
536 return;
538 case PRE_DEC:
539 if (XEXP (src, 0) == stack_pointer_rtx)
541 *pre += GET_MODE_SIZE (GET_MODE (dest));
542 break;
544 return;
546 case POST_DEC:
547 if (XEXP (src, 0) == stack_pointer_rtx)
549 *post += GET_MODE_SIZE (GET_MODE (dest));
550 break;
552 return;
554 case PRE_INC:
555 if (XEXP (src, 0) == stack_pointer_rtx)
557 *pre -= GET_MODE_SIZE (GET_MODE (dest));
558 break;
560 return;
562 case POST_INC:
563 if (XEXP (src, 0) == stack_pointer_rtx)
565 *post -= GET_MODE_SIZE (GET_MODE (dest));
566 break;
568 return;
570 default:
571 return;
576 /* Given an INSN, calculate the amount of stack adjustment it contains
577 PRE- and POST-modifying stack pointer. */
579 static void
580 insn_stack_adjust_offset_pre_post (rtx insn, HOST_WIDE_INT *pre,
581 HOST_WIDE_INT *post)
583 rtx pattern;
585 *pre = 0;
586 *post = 0;
588 pattern = PATTERN (insn);
589 if (RTX_FRAME_RELATED_P (insn))
591 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
592 if (expr)
593 pattern = XEXP (expr, 0);
596 if (GET_CODE (pattern) == SET)
597 stack_adjust_offset_pre_post (pattern, pre, post);
598 else if (GET_CODE (pattern) == PARALLEL
599 || GET_CODE (pattern) == SEQUENCE)
601 int i;
603 /* There may be stack adjustments inside compound insns. Search
604 for them. */
605 for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
606 if (GET_CODE (XVECEXP (pattern, 0, i)) == SET)
607 stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post);
611 /* Compute stack adjustment in basic block BB. */
613 static void
614 bb_stack_adjust_offset (basic_block bb)
616 HOST_WIDE_INT offset;
617 int i;
619 offset = VTI (bb)->in.stack_adjust;
620 for (i = 0; i < VTI (bb)->n_mos; i++)
622 if (VTI (bb)->mos[i].type == MO_ADJUST)
623 offset += VTI (bb)->mos[i].u.adjust;
624 else if (VTI (bb)->mos[i].type != MO_CALL)
626 if (MEM_P (VTI (bb)->mos[i].u.loc))
628 VTI (bb)->mos[i].u.loc
629 = adjust_stack_reference (VTI (bb)->mos[i].u.loc, -offset);
633 VTI (bb)->out.stack_adjust = offset;
636 /* Compute stack adjustments for all blocks by traversing DFS tree.
637 Return true when the adjustments on all incoming edges are consistent.
638 Heavily borrowed from pre_and_rev_post_order_compute. */
640 static bool
641 vt_stack_adjustments (void)
643 edge_iterator *stack;
644 int sp;
646 /* Initialize entry block. */
647 VTI (ENTRY_BLOCK_PTR)->visited = true;
648 VTI (ENTRY_BLOCK_PTR)->out.stack_adjust = INCOMING_FRAME_SP_OFFSET;
650 /* Allocate stack for back-tracking up CFG. */
651 stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
652 sp = 0;
654 /* Push the first edge on to the stack. */
655 stack[sp++] = ei_start (ENTRY_BLOCK_PTR->succs);
657 while (sp)
659 edge_iterator ei;
660 basic_block src;
661 basic_block dest;
663 /* Look at the edge on the top of the stack. */
664 ei = stack[sp - 1];
665 src = ei_edge (ei)->src;
666 dest = ei_edge (ei)->dest;
668 /* Check if the edge destination has been visited yet. */
669 if (!VTI (dest)->visited)
671 VTI (dest)->visited = true;
672 VTI (dest)->in.stack_adjust = VTI (src)->out.stack_adjust;
673 bb_stack_adjust_offset (dest);
675 if (EDGE_COUNT (dest->succs) > 0)
676 /* Since the DEST node has been visited for the first
677 time, check its successors. */
678 stack[sp++] = ei_start (dest->succs);
680 else
682 /* Check whether the adjustments on the edges are the same. */
683 if (VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust)
685 free (stack);
686 return false;
689 if (! ei_one_before_end_p (ei))
690 /* Go to the next edge. */
691 ei_next (&stack[sp - 1]);
692 else
693 /* Return to previous level if there are no more edges. */
694 sp--;
698 free (stack);
699 return true;
702 /* Adjust stack reference MEM by ADJUSTMENT bytes and make it relative
703 to the argument pointer. Return the new rtx. */
705 static rtx
706 adjust_stack_reference (rtx mem, HOST_WIDE_INT adjustment)
708 rtx addr, cfa, tmp;
710 #ifdef FRAME_POINTER_CFA_OFFSET
711 adjustment -= FRAME_POINTER_CFA_OFFSET (current_function_decl);
712 cfa = plus_constant (frame_pointer_rtx, adjustment);
713 #else
714 adjustment -= ARG_POINTER_CFA_OFFSET (current_function_decl);
715 cfa = plus_constant (arg_pointer_rtx, adjustment);
716 #endif
718 addr = replace_rtx (copy_rtx (XEXP (mem, 0)), stack_pointer_rtx, cfa);
719 tmp = simplify_rtx (addr);
720 if (tmp)
721 addr = tmp;
723 return replace_equiv_address_nv (mem, addr);
726 /* Return true if a decl_or_value DV is a DECL or NULL. */
727 static inline bool
728 dv_is_decl_p (decl_or_value dv)
730 return !dv || (int) TREE_CODE ((tree) dv) != (int) VALUE;
733 /* Return true if a decl_or_value is a VALUE rtl. */
734 static inline bool
735 dv_is_value_p (decl_or_value dv)
737 return dv && !dv_is_decl_p (dv);
740 /* Return the decl in the decl_or_value. */
741 static inline tree
742 dv_as_decl (decl_or_value dv)
744 #ifdef ENABLE_CHECKING
745 gcc_assert (dv_is_decl_p (dv));
746 #endif
747 return (tree) dv;
750 /* Return the value in the decl_or_value. */
751 static inline rtx
752 dv_as_value (decl_or_value dv)
754 #ifdef ENABLE_CHECKING
755 gcc_assert (dv_is_value_p (dv));
756 #endif
757 return (rtx)dv;
760 /* Return the opaque pointer in the decl_or_value. */
761 static inline void *
762 dv_as_opaque (decl_or_value dv)
764 return dv;
767 /* Return true if a decl_or_value must not have more than one variable
768 part. */
769 static inline bool
770 dv_onepart_p (decl_or_value dv)
772 tree decl;
774 if (!MAY_HAVE_DEBUG_INSNS)
775 return false;
777 if (dv_is_value_p (dv))
778 return true;
780 decl = dv_as_decl (dv);
782 if (!decl)
783 return true;
785 return (target_for_debug_bind (decl) != NULL_TREE);
788 /* Return the variable pool to be used for dv, depending on whether it
789 can have multiple parts or not. */
790 static inline alloc_pool
791 dv_pool (decl_or_value dv)
793 return dv_onepart_p (dv) ? valvar_pool : var_pool;
796 /* Build a decl_or_value out of a decl. */
797 static inline decl_or_value
798 dv_from_decl (tree decl)
800 decl_or_value dv;
801 dv = decl;
802 #ifdef ENABLE_CHECKING
803 gcc_assert (dv_is_decl_p (dv));
804 #endif
805 return dv;
808 /* Build a decl_or_value out of a value. */
809 static inline decl_or_value
810 dv_from_value (rtx value)
812 decl_or_value dv;
813 dv = value;
814 #ifdef ENABLE_CHECKING
815 gcc_assert (dv_is_value_p (dv));
816 #endif
817 return dv;
820 typedef unsigned int dvuid;
822 /* Return the uid of DV. */
824 static inline dvuid
825 dv_uid (decl_or_value dv)
827 if (dv_is_value_p (dv))
828 return CSELIB_VAL_PTR (dv_as_value (dv))->uid;
829 else
830 return DECL_UID (dv_as_decl (dv));
833 /* Compute the hash from the uid. */
835 static inline hashval_t
836 dv_uid2hash (dvuid uid)
838 return uid;
841 /* The hash function for a mask table in a shared_htab chain. */
843 static inline hashval_t
844 dv_htab_hash (decl_or_value dv)
846 return dv_uid2hash (dv_uid (dv));
849 /* The hash function for variable_htab, computes the hash value
850 from the declaration of variable X. */
852 static hashval_t
853 variable_htab_hash (const void *x)
855 const_variable const v = (const_variable) x;
857 return dv_htab_hash (v->dv);
860 /* Compare the declaration of variable X with declaration Y. */
862 static int
863 variable_htab_eq (const void *x, const void *y)
865 const_variable const v = (const_variable) x;
866 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
868 return (dv_as_opaque (v->dv) == dv_as_opaque (dv));
871 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
873 static void
874 variable_htab_free (void *elem)
876 int i;
877 variable var = (variable) elem;
878 location_chain node, next;
880 gcc_assert (var->refcount > 0);
882 var->refcount--;
883 if (var->refcount > 0)
884 return;
886 for (i = 0; i < var->n_var_parts; i++)
888 for (node = var->var_part[i].loc_chain; node; node = next)
890 next = node->next;
891 pool_free (loc_chain_pool, node);
893 var->var_part[i].loc_chain = NULL;
895 pool_free (dv_pool (var->dv), var);
898 /* The hash function for value_chains htab, computes the hash value
899 from the VALUE. */
901 static hashval_t
902 value_chain_htab_hash (const void *x)
904 const_value_chain const v = (const_value_chain) x;
906 return dv_htab_hash (v->dv);
909 /* Compare the VALUE X with VALUE Y. */
911 static int
912 value_chain_htab_eq (const void *x, const void *y)
914 const_value_chain const v = (const_value_chain) x;
915 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
917 return dv_as_opaque (v->dv) == dv_as_opaque (dv);
920 /* Initialize the set (array) SET of attrs to empty lists. */
922 static void
923 init_attrs_list_set (attrs *set)
925 int i;
927 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
928 set[i] = NULL;
931 /* Make the list *LISTP empty. */
933 static void
934 attrs_list_clear (attrs *listp)
936 attrs list, next;
938 for (list = *listp; list; list = next)
940 next = list->next;
941 pool_free (attrs_pool, list);
943 *listp = NULL;
946 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */
948 static attrs
949 attrs_list_member (attrs list, decl_or_value dv, HOST_WIDE_INT offset)
951 for (; list; list = list->next)
952 if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset)
953 return list;
954 return NULL;
957 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
959 static void
960 attrs_list_insert (attrs *listp, decl_or_value dv,
961 HOST_WIDE_INT offset, rtx loc)
963 attrs list;
965 list = (attrs) pool_alloc (attrs_pool);
966 list->loc = loc;
967 list->dv = dv;
968 list->offset = offset;
969 list->next = *listp;
970 *listp = list;
973 /* Copy all nodes from SRC and create a list *DSTP of the copies. */
975 static void
976 attrs_list_copy (attrs *dstp, attrs src)
978 attrs n;
980 attrs_list_clear (dstp);
981 for (; src; src = src->next)
983 n = (attrs) pool_alloc (attrs_pool);
984 n->loc = src->loc;
985 n->dv = src->dv;
986 n->offset = src->offset;
987 n->next = *dstp;
988 *dstp = n;
992 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */
994 static void
995 attrs_list_union (attrs *dstp, attrs src)
997 for (; src; src = src->next)
999 if (!attrs_list_member (*dstp, src->dv, src->offset))
1000 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1004 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1005 *DSTP. */
1007 static void
1008 attrs_list_mpdv_union (attrs *dstp, attrs src, attrs src2)
1010 gcc_assert (!*dstp);
1011 for (; src; src = src->next)
1013 if (!dv_onepart_p (src->dv))
1014 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1016 for (src = src2; src; src = src->next)
1018 if (!dv_onepart_p (src->dv)
1019 && !attrs_list_member (*dstp, src->dv, src->offset))
1020 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1024 /* Shared hashtable support. */
1026 /* Return true if VARS is shared. */
1028 static inline bool
1029 shared_hash_shared (shared_hash vars)
1031 return vars->refcount > 1;
1034 /* Return the hash table for VARS. */
1036 static inline htab_t
1037 shared_hash_htab (shared_hash vars)
1039 return vars->htab;
1042 /* Copy variables into a new hash table. */
1044 static shared_hash
1045 shared_hash_unshare (shared_hash vars)
1047 shared_hash new_vars = (shared_hash) pool_alloc (shared_hash_pool);
1048 gcc_assert (vars->refcount > 1);
1049 new_vars->refcount = 1;
1050 new_vars->htab
1051 = htab_create (htab_elements (vars->htab) + 3, variable_htab_hash,
1052 variable_htab_eq, variable_htab_free);
1053 vars_copy (new_vars->htab, vars->htab);
1054 vars->refcount--;
1055 return new_vars;
1058 /* Increment reference counter on VARS and return it. */
1060 static inline shared_hash
1061 shared_hash_copy (shared_hash vars)
1063 vars->refcount++;
1064 return vars;
1067 /* Decrement reference counter and destroy hash table if not shared
1068 anymore. */
1070 static void
1071 shared_hash_destroy (shared_hash vars)
1073 gcc_assert (vars->refcount > 0);
1074 if (--vars->refcount == 0)
1076 htab_delete (vars->htab);
1077 pool_free (shared_hash_pool, vars);
1081 /* Unshare *PVARS if shared and return slot for DV. If INS is
1082 INSERT, insert it if not already present. */
1084 static inline void **
1085 shared_hash_find_slot_unshare_1 (shared_hash *pvars, decl_or_value dv,
1086 hashval_t dvhash, enum insert_option ins)
1088 if (shared_hash_shared (*pvars))
1089 *pvars = shared_hash_unshare (*pvars);
1090 return htab_find_slot_with_hash (shared_hash_htab (*pvars), dv, dvhash, ins);
1093 static inline void **
1094 shared_hash_find_slot_unshare (shared_hash *pvars, decl_or_value dv,
1095 enum insert_option ins)
1097 return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins);
1100 /* Return slot for DV, if it is already present in the hash table.
1101 If it is not present, insert it only VARS is not shared, otherwise
1102 return NULL. */
1104 static inline void **
1105 shared_hash_find_slot_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1107 return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
1108 shared_hash_shared (vars)
1109 ? NO_INSERT : INSERT);
1112 static inline void **
1113 shared_hash_find_slot (shared_hash vars, decl_or_value dv)
1115 return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv));
1118 /* Return slot for DV only if it is already present in the hash table. */
1120 static inline void **
1121 shared_hash_find_slot_noinsert_1 (shared_hash vars, decl_or_value dv,
1122 hashval_t dvhash)
1124 return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
1125 NO_INSERT);
1128 static inline void **
1129 shared_hash_find_slot_noinsert (shared_hash vars, decl_or_value dv)
1131 return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv));
1134 /* Return variable for DV or NULL if not already present in the hash
1135 table. */
1137 static inline variable
1138 shared_hash_find_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1140 return (variable) htab_find_with_hash (shared_hash_htab (vars), dv, dvhash);
1143 static inline variable
1144 shared_hash_find (shared_hash vars, decl_or_value dv)
1146 return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
1149 /* Return true if TVAL is better than CVAL as a canonival value. We
1150 choose lowest-numbered VALUEs, using the RTX address as a
1151 tie-breaker. The idea is to arrange them into a star topology,
1152 such that all of them are at most one step away from the canonical
1153 value, and the canonical value has backlinks to all of them, in
1154 addition to all the actual locations. We don't enforce this
1155 topology throughout the entire dataflow analysis, though.
1158 static inline bool
1159 canon_value_cmp (rtx tval, rtx cval)
1161 return !cval
1162 || CSELIB_VAL_PTR (tval)->uid < CSELIB_VAL_PTR (cval)->uid;
1165 static bool dst_can_be_shared;
1167 /* Return a copy of a variable VAR and insert it to dataflow set SET. */
1169 static void **
1170 unshare_variable (dataflow_set *set, void **slot, variable var,
1171 enum var_init_status initialized)
1173 variable new_var;
1174 int i;
1176 new_var = (variable) pool_alloc (dv_pool (var->dv));
1177 new_var->dv = var->dv;
1178 new_var->refcount = 1;
1179 var->refcount--;
1180 new_var->n_var_parts = var->n_var_parts;
1182 if (! flag_var_tracking_uninit)
1183 initialized = VAR_INIT_STATUS_INITIALIZED;
1185 for (i = 0; i < var->n_var_parts; i++)
1187 location_chain node;
1188 location_chain *nextp;
1190 new_var->var_part[i].offset = var->var_part[i].offset;
1191 nextp = &new_var->var_part[i].loc_chain;
1192 for (node = var->var_part[i].loc_chain; node; node = node->next)
1194 location_chain new_lc;
1196 new_lc = (location_chain) pool_alloc (loc_chain_pool);
1197 new_lc->next = NULL;
1198 if (node->init > initialized)
1199 new_lc->init = node->init;
1200 else
1201 new_lc->init = initialized;
1202 if (node->set_src && !(MEM_P (node->set_src)))
1203 new_lc->set_src = node->set_src;
1204 else
1205 new_lc->set_src = NULL;
1206 new_lc->loc = node->loc;
1208 *nextp = new_lc;
1209 nextp = &new_lc->next;
1212 /* We are at the basic block boundary when copying variable description
1213 so set the CUR_LOC to be the first element of the chain. */
1214 if (new_var->var_part[i].loc_chain)
1215 new_var->var_part[i].cur_loc = new_var->var_part[i].loc_chain->loc;
1216 else
1217 new_var->var_part[i].cur_loc = NULL;
1220 dst_can_be_shared = false;
1221 if (shared_hash_shared (set->vars))
1222 slot = shared_hash_find_slot_unshare (&set->vars, var->dv, NO_INSERT);
1223 else if (set->traversed_vars && set->vars != set->traversed_vars)
1224 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
1225 *slot = new_var;
1226 return slot;
1229 /* Add a variable from *SLOT to hash table DATA and increase its reference
1230 count. */
1232 static int
1233 vars_copy_1 (void **slot, void *data)
1235 htab_t dst = (htab_t) data;
1236 variable src;
1237 void **dstp;
1239 src = (variable) *slot;
1240 src->refcount++;
1242 dstp = htab_find_slot_with_hash (dst, src->dv,
1243 dv_htab_hash (src->dv),
1244 INSERT);
1245 *dstp = src;
1247 /* Continue traversing the hash table. */
1248 return 1;
1251 /* Copy all variables from hash table SRC to hash table DST. */
1253 static void
1254 vars_copy (htab_t dst, htab_t src)
1256 htab_traverse_noresize (src, vars_copy_1, dst);
1259 /* Map a decl to its main debug decl. */
1261 static inline tree
1262 var_debug_decl (tree decl)
1264 if (decl && DECL_P (decl)
1265 && DECL_DEBUG_EXPR_IS_FROM (decl) && DECL_DEBUG_EXPR (decl)
1266 && DECL_P (DECL_DEBUG_EXPR (decl)))
1267 decl = DECL_DEBUG_EXPR (decl);
1269 return decl;
1272 /* Set the register LOC to contain DV, OFFSET. */
1274 static void
1275 var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1276 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1277 enum insert_option iopt)
1279 attrs node;
1280 bool decl_p = dv_is_decl_p (dv);
1282 if (decl_p)
1283 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1285 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1286 if (dv_as_opaque (node->dv) == dv_as_opaque (dv)
1287 && node->offset == offset)
1288 break;
1289 if (!node)
1290 attrs_list_insert (&set->regs[REGNO (loc)], dv, offset, loc);
1291 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1294 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
1296 static void
1297 var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1298 rtx set_src)
1300 tree decl = REG_EXPR (loc);
1301 HOST_WIDE_INT offset = REG_OFFSET (loc);
1303 var_reg_decl_set (set, loc, initialized,
1304 dv_from_decl (decl), offset, set_src, INSERT);
1307 static enum var_init_status
1308 get_init_value (dataflow_set *set, rtx loc, decl_or_value dv)
1310 variable var;
1311 int i;
1312 enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN;
1314 if (! flag_var_tracking_uninit)
1315 return VAR_INIT_STATUS_INITIALIZED;
1317 var = shared_hash_find (set->vars, dv);
1318 if (var)
1320 for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++)
1322 location_chain nextp;
1323 for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next)
1324 if (rtx_equal_p (nextp->loc, loc))
1326 ret_val = nextp->init;
1327 break;
1332 return ret_val;
1335 /* Delete current content of register LOC in dataflow set SET and set
1336 the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
1337 MODIFY is true, any other live copies of the same variable part are
1338 also deleted from the dataflow set, otherwise the variable part is
1339 assumed to be copied from another location holding the same
1340 part. */
1342 static void
1343 var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1344 enum var_init_status initialized, rtx set_src)
1346 tree decl = REG_EXPR (loc);
1347 HOST_WIDE_INT offset = REG_OFFSET (loc);
1348 attrs node, next;
1349 attrs *nextp;
1351 decl = var_debug_decl (decl);
1353 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1354 initialized = get_init_value (set, loc, dv_from_decl (decl));
1356 nextp = &set->regs[REGNO (loc)];
1357 for (node = *nextp; node; node = next)
1359 next = node->next;
1360 if (dv_as_opaque (node->dv) != decl || node->offset != offset)
1362 delete_variable_part (set, node->loc, node->dv, node->offset);
1363 pool_free (attrs_pool, node);
1364 *nextp = next;
1366 else
1368 node->loc = loc;
1369 nextp = &node->next;
1372 if (modify)
1373 clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src);
1374 var_reg_set (set, loc, initialized, set_src);
1377 /* Delete the association of register LOC in dataflow set SET with any
1378 variables that aren't onepart. If CLOBBER is true, also delete any
1379 other live copies of the same variable part, and delete the
1380 association with onepart dvs too. */
1382 static void
1383 var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
1385 attrs *nextp = &set->regs[REGNO (loc)];
1386 attrs node, next;
1388 if (clobber)
1390 tree decl = REG_EXPR (loc);
1391 HOST_WIDE_INT offset = REG_OFFSET (loc);
1393 decl = var_debug_decl (decl);
1395 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1398 for (node = *nextp; node; node = next)
1400 next = node->next;
1401 if (clobber || !dv_onepart_p (node->dv))
1403 delete_variable_part (set, node->loc, node->dv, node->offset);
1404 pool_free (attrs_pool, node);
1405 *nextp = next;
1407 else
1408 nextp = &node->next;
1412 /* Delete content of register with number REGNO in dataflow set SET. */
1414 static void
1415 var_regno_delete (dataflow_set *set, int regno)
1417 attrs *reg = &set->regs[regno];
1418 attrs node, next;
1420 for (node = *reg; node; node = next)
1422 next = node->next;
1423 delete_variable_part (set, node->loc, node->dv, node->offset);
1424 pool_free (attrs_pool, node);
1426 *reg = NULL;
1429 /* Set the location of DV, OFFSET as the MEM LOC. */
1431 static void
1432 var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1433 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1434 enum insert_option iopt)
1436 if (dv_is_decl_p (dv))
1437 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1439 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1442 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
1443 SET to LOC.
1444 Adjust the address first if it is stack pointer based. */
1446 static void
1447 var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1448 rtx set_src)
1450 tree decl = MEM_EXPR (loc);
1451 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1453 var_mem_decl_set (set, loc, initialized,
1454 dv_from_decl (decl), offset, set_src, INSERT);
1457 /* Delete and set the location part of variable MEM_EXPR (LOC) in
1458 dataflow set SET to LOC. If MODIFY is true, any other live copies
1459 of the same variable part are also deleted from the dataflow set,
1460 otherwise the variable part is assumed to be copied from another
1461 location holding the same part.
1462 Adjust the address first if it is stack pointer based. */
1464 static void
1465 var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1466 enum var_init_status initialized, rtx set_src)
1468 tree decl = MEM_EXPR (loc);
1469 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1471 decl = var_debug_decl (decl);
1473 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1474 initialized = get_init_value (set, loc, dv_from_decl (decl));
1476 if (modify)
1477 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src);
1478 var_mem_set (set, loc, initialized, set_src);
1481 /* Delete the location part LOC from dataflow set SET. If CLOBBER is
1482 true, also delete any other live copies of the same variable part.
1483 Adjust the address first if it is stack pointer based. */
1485 static void
1486 var_mem_delete (dataflow_set *set, rtx loc, bool clobber)
1488 tree decl = MEM_EXPR (loc);
1489 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1491 decl = var_debug_decl (decl);
1492 if (clobber)
1493 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1494 delete_variable_part (set, loc, dv_from_decl (decl), offset);
1497 /* Bind a value to a location it was just stored in. If MODIFIED
1498 holds, assume the location was modified, detaching it from any
1499 values bound to it. */
1501 static void
1502 val_store (dataflow_set *set, rtx val, rtx loc, rtx insn, bool modified)
1504 cselib_val *v = CSELIB_VAL_PTR (val);
1506 gcc_assert (cselib_preserved_value_p (v));
1508 if (dump_file)
1510 fprintf (dump_file, "%i: ", INSN_UID (insn));
1511 print_inline_rtx (dump_file, val, 0);
1512 fprintf (dump_file, " stored in ");
1513 print_inline_rtx (dump_file, loc, 0);
1514 if (v->locs)
1516 struct elt_loc_list *l;
1517 for (l = v->locs; l; l = l->next)
1519 fprintf (dump_file, "\n%i: ", INSN_UID (l->setting_insn));
1520 print_inline_rtx (dump_file, l->loc, 0);
1523 fprintf (dump_file, "\n");
1526 if (REG_P (loc))
1528 if (modified)
1529 var_regno_delete (set, REGNO (loc));
1530 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1531 dv_from_value (val), 0, NULL_RTX, INSERT);
1533 else if (MEM_P (loc))
1534 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1535 dv_from_value (val), 0, NULL_RTX, INSERT);
1536 else
1537 set_variable_part (set, loc, dv_from_value (val), 0,
1538 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1541 /* Reset this node, detaching all its equivalences. Return the slot
1542 in the variable hash table that holds dv, if there is one. */
1544 static void
1545 val_reset (dataflow_set *set, decl_or_value dv)
1547 variable var = shared_hash_find (set->vars, dv) ;
1548 location_chain node;
1549 rtx cval;
1551 if (!var || !var->n_var_parts)
1552 return;
1554 gcc_assert (var->n_var_parts == 1);
1556 cval = NULL;
1557 for (node = var->var_part[0].loc_chain; node; node = node->next)
1558 if (GET_CODE (node->loc) == VALUE
1559 && canon_value_cmp (node->loc, cval))
1560 cval = node->loc;
1562 for (node = var->var_part[0].loc_chain; node; node = node->next)
1563 if (GET_CODE (node->loc) == VALUE && cval != node->loc)
1565 /* Redirect the equivalence link to the new canonical
1566 value, or simply remove it if it would point at
1567 itself. */
1568 if (cval)
1569 set_variable_part (set, cval, dv_from_value (node->loc),
1570 0, node->init, node->set_src, NO_INSERT);
1571 delete_variable_part (set, dv_as_value (dv),
1572 dv_from_value (node->loc), 0);
1575 if (cval)
1577 decl_or_value cdv = dv_from_value (cval);
1579 /* Keep the remaining values connected, accummulating links
1580 in the canonical value. */
1581 for (node = var->var_part[0].loc_chain; node; node = node->next)
1583 if (node->loc == cval)
1584 continue;
1585 else if (GET_CODE (node->loc) == REG)
1586 var_reg_decl_set (set, node->loc, node->init, cdv, 0,
1587 node->set_src, NO_INSERT);
1588 else if (GET_CODE (node->loc) == MEM)
1589 var_mem_decl_set (set, node->loc, node->init, cdv, 0,
1590 node->set_src, NO_INSERT);
1591 else
1592 set_variable_part (set, node->loc, cdv, 0,
1593 node->init, node->set_src, NO_INSERT);
1597 /* We remove this last, to make sure that the canonical value is not
1598 removed to the point of requiring reinsertion. */
1599 if (cval)
1600 delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0);
1602 clobber_variable_part (set, NULL, dv, 0, NULL);
1604 /* ??? Should we make sure there aren't other available values or
1605 variables whose values involve this one other than by
1606 equivalence? E.g., at the very least we should reset MEMs, those
1607 shouldn't be too hard to find cselib-looking up the value as an
1608 address, then locating the resulting value in our own hash
1609 table. */
1612 /* Find the values in a given location and map the val to another
1613 value, if it is unique, or add the location as one holding the
1614 value. */
1616 static void
1617 val_resolve (dataflow_set *set, rtx val, rtx loc, rtx insn)
1619 decl_or_value dv = dv_from_value (val);
1621 if (dump_file && (dump_flags & TDF_DETAILS))
1623 if (insn)
1624 fprintf (dump_file, "%i: ", INSN_UID (insn));
1625 else
1626 fprintf (dump_file, "head: ");
1627 print_inline_rtx (dump_file, val, 0);
1628 fputs (" is at ", dump_file);
1629 print_inline_rtx (dump_file, loc, 0);
1630 fputc ('\n', dump_file);
1633 val_reset (set, dv);
1635 if (REG_P (loc))
1637 attrs node, found = NULL;
1639 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1640 if (dv_is_value_p (node->dv)
1641 && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc))
1643 found = node;
1645 /* Map incoming equivalences. ??? Wouldn't it be nice if
1646 we just started sharing the location lists? Maybe a
1647 circular list ending at the value itself or some
1648 such. */
1649 set_variable_part (set, dv_as_value (node->dv),
1650 dv_from_value (val), node->offset,
1651 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1652 set_variable_part (set, val, node->dv, node->offset,
1653 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1656 /* If we didn't find any equivalence, we need to remember that
1657 this value is held in the named register. */
1658 if (!found)
1659 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1660 dv_from_value (val), 0, NULL_RTX, INSERT);
1662 else if (MEM_P (loc))
1663 /* ??? Merge equivalent MEMs. */
1664 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1665 dv_from_value (val), 0, NULL_RTX, INSERT);
1666 else
1667 /* ??? Merge equivalent expressions. */
1668 set_variable_part (set, loc, dv_from_value (val), 0,
1669 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1672 /* Initialize dataflow set SET to be empty.
1673 VARS_SIZE is the initial size of hash table VARS. */
1675 static void
1676 dataflow_set_init (dataflow_set *set)
1678 init_attrs_list_set (set->regs);
1679 set->vars = shared_hash_copy (empty_shared_hash);
1680 set->stack_adjust = 0;
1681 set->traversed_vars = NULL;
1684 /* Delete the contents of dataflow set SET. */
1686 static void
1687 dataflow_set_clear (dataflow_set *set)
1689 int i;
1691 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1692 attrs_list_clear (&set->regs[i]);
1694 shared_hash_destroy (set->vars);
1695 set->vars = shared_hash_copy (empty_shared_hash);
1698 /* Copy the contents of dataflow set SRC to DST. */
1700 static void
1701 dataflow_set_copy (dataflow_set *dst, dataflow_set *src)
1703 int i;
1705 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1706 attrs_list_copy (&dst->regs[i], src->regs[i]);
1708 shared_hash_destroy (dst->vars);
1709 dst->vars = shared_hash_copy (src->vars);
1710 dst->stack_adjust = src->stack_adjust;
1713 /* Information for merging lists of locations for a given offset of variable.
1715 struct variable_union_info
1717 /* Node of the location chain. */
1718 location_chain lc;
1720 /* The sum of positions in the input chains. */
1721 int pos;
1723 /* The position in the chain of DST dataflow set. */
1724 int pos_dst;
1727 /* Buffer for location list sorting and its allocated size. */
1728 static struct variable_union_info *vui_vec;
1729 static int vui_allocated;
1731 /* Compare function for qsort, order the structures by POS element. */
1733 static int
1734 variable_union_info_cmp_pos (const void *n1, const void *n2)
1736 const struct variable_union_info *const i1 =
1737 (const struct variable_union_info *) n1;
1738 const struct variable_union_info *const i2 =
1739 ( const struct variable_union_info *) n2;
1741 if (i1->pos != i2->pos)
1742 return i1->pos - i2->pos;
1744 return (i1->pos_dst - i2->pos_dst);
1747 /* Compute union of location parts of variable *SLOT and the same variable
1748 from hash table DATA. Compute "sorted" union of the location chains
1749 for common offsets, i.e. the locations of a variable part are sorted by
1750 a priority where the priority is the sum of the positions in the 2 chains
1751 (if a location is only in one list the position in the second list is
1752 defined to be larger than the length of the chains).
1753 When we are updating the location parts the newest location is in the
1754 beginning of the chain, so when we do the described "sorted" union
1755 we keep the newest locations in the beginning. */
1757 static int
1758 variable_union (void **slot, void *data)
1760 variable src, dst;
1761 void **dstp;
1762 dataflow_set *set = (dataflow_set *) data;
1763 int i, j, k;
1765 src = (variable) *slot;
1766 dstp = shared_hash_find_slot (set->vars, src->dv);
1767 if (!dstp || !*dstp)
1769 src->refcount++;
1771 dst_can_be_shared = false;
1772 if (!dstp)
1773 dstp = shared_hash_find_slot_unshare (&set->vars, src->dv, INSERT);
1775 *dstp = src;
1777 /* If CUR_LOC of some variable part is not the first element of
1778 the location chain we are going to change it so we have to make
1779 a copy of the variable. */
1780 for (k = 0; k < src->n_var_parts; k++)
1782 gcc_assert (!src->var_part[k].loc_chain
1783 == !src->var_part[k].cur_loc);
1784 if (src->var_part[k].loc_chain)
1786 gcc_assert (src->var_part[k].cur_loc);
1787 if (src->var_part[k].cur_loc != src->var_part[k].loc_chain->loc)
1788 break;
1791 if (k < src->n_var_parts)
1792 dstp = unshare_variable (set, dstp, src, VAR_INIT_STATUS_UNKNOWN);
1794 /* Continue traversing the hash table. */
1795 return 1;
1797 else
1798 dst = (variable) *dstp;
1800 gcc_assert (src->n_var_parts);
1802 /* We can combine one-part variables very efficiently, because their
1803 entries are in canonical order. */
1804 if (dv_onepart_p (src->dv))
1806 location_chain *nodep, dnode, snode;
1808 gcc_assert (src->n_var_parts == 1);
1809 gcc_assert (dst->n_var_parts == 1);
1811 snode = src->var_part[0].loc_chain;
1812 gcc_assert (snode);
1814 restart_onepart_unshared:
1815 nodep = &dst->var_part[0].loc_chain;
1816 dnode = *nodep;
1817 gcc_assert (dnode);
1819 while (snode)
1821 int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1;
1823 if (r > 0)
1825 location_chain nnode;
1827 if (dst->refcount != 1 || shared_hash_shared (set->vars))
1829 dstp = unshare_variable (set, dstp, dst,
1830 VAR_INIT_STATUS_INITIALIZED);
1831 dst = (variable)*dstp;
1832 goto restart_onepart_unshared;
1835 *nodep = nnode = (location_chain) pool_alloc (loc_chain_pool);
1836 nnode->loc = snode->loc;
1837 nnode->init = snode->init;
1838 if (!snode->set_src || MEM_P (snode->set_src))
1839 nnode->set_src = NULL;
1840 else
1841 nnode->set_src = snode->set_src;
1842 nnode->next = dnode;
1843 dnode = nnode;
1845 #ifdef ENABLE_CHECKING
1846 else if (r == 0)
1847 gcc_assert (rtx_equal_p (dnode->loc, snode->loc));
1848 #endif
1850 if (r >= 0)
1851 snode = snode->next;
1853 nodep = &dnode->next;
1854 dnode = *nodep;
1857 dst->var_part[0].cur_loc = dst->var_part[0].loc_chain->loc;
1859 return 1;
1862 /* Count the number of location parts, result is K. */
1863 for (i = 0, j = 0, k = 0;
1864 i < src->n_var_parts && j < dst->n_var_parts; k++)
1866 if (src->var_part[i].offset == dst->var_part[j].offset)
1868 i++;
1869 j++;
1871 else if (src->var_part[i].offset < dst->var_part[j].offset)
1872 i++;
1873 else
1874 j++;
1876 k += src->n_var_parts - i;
1877 k += dst->n_var_parts - j;
1879 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
1880 thus there are at most MAX_VAR_PARTS different offsets. */
1881 gcc_assert (dv_onepart_p (dst->dv) ? k == 1 : k <= MAX_VAR_PARTS);
1883 if ((dst->refcount > 1 || shared_hash_shared (set->vars))
1884 && dst->n_var_parts != k)
1886 dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN);
1887 dst = (variable)*dstp;
1890 i = src->n_var_parts - 1;
1891 j = dst->n_var_parts - 1;
1892 dst->n_var_parts = k;
1894 for (k--; k >= 0; k--)
1896 location_chain node, node2;
1898 if (i >= 0 && j >= 0
1899 && src->var_part[i].offset == dst->var_part[j].offset)
1901 /* Compute the "sorted" union of the chains, i.e. the locations which
1902 are in both chains go first, they are sorted by the sum of
1903 positions in the chains. */
1904 int dst_l, src_l;
1905 int ii, jj, n;
1906 struct variable_union_info *vui;
1908 /* If DST is shared compare the location chains.
1909 If they are different we will modify the chain in DST with
1910 high probability so make a copy of DST. */
1911 if (dst->refcount > 1 || shared_hash_shared (set->vars))
1913 for (node = src->var_part[i].loc_chain,
1914 node2 = dst->var_part[j].loc_chain; node && node2;
1915 node = node->next, node2 = node2->next)
1917 if (!((REG_P (node2->loc)
1918 && REG_P (node->loc)
1919 && REGNO (node2->loc) == REGNO (node->loc))
1920 || rtx_equal_p (node2->loc, node->loc)))
1922 if (node2->init < node->init)
1923 node2->init = node->init;
1924 break;
1927 if (node || node2)
1929 dstp = unshare_variable (set, dstp, dst,
1930 VAR_INIT_STATUS_UNKNOWN);
1931 dst = (variable)*dstp;
1935 src_l = 0;
1936 for (node = src->var_part[i].loc_chain; node; node = node->next)
1937 src_l++;
1938 dst_l = 0;
1939 for (node = dst->var_part[j].loc_chain; node; node = node->next)
1940 dst_l++;
1942 if (dst_l == 1)
1944 /* The most common case, much simpler, no qsort is needed. */
1945 location_chain dstnode = dst->var_part[j].loc_chain;
1946 dst->var_part[k].loc_chain = dstnode;
1947 dst->var_part[k].offset = dst->var_part[j].offset;
1948 node2 = dstnode;
1949 for (node = src->var_part[i].loc_chain; node; node = node->next)
1950 if (!((REG_P (dstnode->loc)
1951 && REG_P (node->loc)
1952 && REGNO (dstnode->loc) == REGNO (node->loc))
1953 || rtx_equal_p (dstnode->loc, node->loc)))
1955 location_chain new_node;
1957 /* Copy the location from SRC. */
1958 new_node = (location_chain) pool_alloc (loc_chain_pool);
1959 new_node->loc = node->loc;
1960 new_node->init = node->init;
1961 if (!node->set_src || MEM_P (node->set_src))
1962 new_node->set_src = NULL;
1963 else
1964 new_node->set_src = node->set_src;
1965 node2->next = new_node;
1966 node2 = new_node;
1968 node2->next = NULL;
1970 else
1972 if (src_l + dst_l > vui_allocated)
1974 vui_allocated = MAX (vui_allocated * 2, src_l + dst_l);
1975 vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec,
1976 vui_allocated);
1978 vui = vui_vec;
1980 /* Fill in the locations from DST. */
1981 for (node = dst->var_part[j].loc_chain, jj = 0; node;
1982 node = node->next, jj++)
1984 vui[jj].lc = node;
1985 vui[jj].pos_dst = jj;
1987 /* Pos plus value larger than a sum of 2 valid positions. */
1988 vui[jj].pos = jj + src_l + dst_l;
1991 /* Fill in the locations from SRC. */
1992 n = dst_l;
1993 for (node = src->var_part[i].loc_chain, ii = 0; node;
1994 node = node->next, ii++)
1996 /* Find location from NODE. */
1997 for (jj = 0; jj < dst_l; jj++)
1999 if ((REG_P (vui[jj].lc->loc)
2000 && REG_P (node->loc)
2001 && REGNO (vui[jj].lc->loc) == REGNO (node->loc))
2002 || rtx_equal_p (vui[jj].lc->loc, node->loc))
2004 vui[jj].pos = jj + ii;
2005 break;
2008 if (jj >= dst_l) /* The location has not been found. */
2010 location_chain new_node;
2012 /* Copy the location from SRC. */
2013 new_node = (location_chain) pool_alloc (loc_chain_pool);
2014 new_node->loc = node->loc;
2015 new_node->init = node->init;
2016 if (!node->set_src || MEM_P (node->set_src))
2017 new_node->set_src = NULL;
2018 else
2019 new_node->set_src = node->set_src;
2020 vui[n].lc = new_node;
2021 vui[n].pos_dst = src_l + dst_l;
2022 vui[n].pos = ii + src_l + dst_l;
2023 n++;
2027 if (dst_l == 2)
2029 /* Special case still very common case. For dst_l == 2
2030 all entries dst_l ... n-1 are sorted, with for i >= dst_l
2031 vui[i].pos == i + src_l + dst_l. */
2032 if (vui[0].pos > vui[1].pos)
2034 /* Order should be 1, 0, 2... */
2035 dst->var_part[k].loc_chain = vui[1].lc;
2036 vui[1].lc->next = vui[0].lc;
2037 if (n >= 3)
2039 vui[0].lc->next = vui[2].lc;
2040 vui[n - 1].lc->next = NULL;
2042 else
2043 vui[0].lc->next = NULL;
2044 ii = 3;
2046 else
2048 dst->var_part[k].loc_chain = vui[0].lc;
2049 if (n >= 3 && vui[2].pos < vui[1].pos)
2051 /* Order should be 0, 2, 1, 3... */
2052 vui[0].lc->next = vui[2].lc;
2053 vui[2].lc->next = vui[1].lc;
2054 if (n >= 4)
2056 vui[1].lc->next = vui[3].lc;
2057 vui[n - 1].lc->next = NULL;
2059 else
2060 vui[1].lc->next = NULL;
2061 ii = 4;
2063 else
2065 /* Order should be 0, 1, 2... */
2066 ii = 1;
2067 vui[n - 1].lc->next = NULL;
2070 for (; ii < n; ii++)
2071 vui[ii - 1].lc->next = vui[ii].lc;
2073 else
2075 qsort (vui, n, sizeof (struct variable_union_info),
2076 variable_union_info_cmp_pos);
2078 /* Reconnect the nodes in sorted order. */
2079 for (ii = 1; ii < n; ii++)
2080 vui[ii - 1].lc->next = vui[ii].lc;
2081 vui[n - 1].lc->next = NULL;
2082 dst->var_part[k].loc_chain = vui[0].lc;
2085 dst->var_part[k].offset = dst->var_part[j].offset;
2087 i--;
2088 j--;
2090 else if ((i >= 0 && j >= 0
2091 && src->var_part[i].offset < dst->var_part[j].offset)
2092 || i < 0)
2094 dst->var_part[k] = dst->var_part[j];
2095 j--;
2097 else if ((i >= 0 && j >= 0
2098 && src->var_part[i].offset > dst->var_part[j].offset)
2099 || j < 0)
2101 location_chain *nextp;
2103 /* Copy the chain from SRC. */
2104 nextp = &dst->var_part[k].loc_chain;
2105 for (node = src->var_part[i].loc_chain; node; node = node->next)
2107 location_chain new_lc;
2109 new_lc = (location_chain) pool_alloc (loc_chain_pool);
2110 new_lc->next = NULL;
2111 new_lc->init = node->init;
2112 if (!node->set_src || MEM_P (node->set_src))
2113 new_lc->set_src = NULL;
2114 else
2115 new_lc->set_src = node->set_src;
2116 new_lc->loc = node->loc;
2118 *nextp = new_lc;
2119 nextp = &new_lc->next;
2122 dst->var_part[k].offset = src->var_part[i].offset;
2123 i--;
2126 /* We are at the basic block boundary when computing union
2127 so set the CUR_LOC to be the first element of the chain. */
2128 if (dst->var_part[k].loc_chain)
2129 dst->var_part[k].cur_loc = dst->var_part[k].loc_chain->loc;
2130 else
2131 dst->var_part[k].cur_loc = NULL;
2134 if (flag_var_tracking_uninit)
2135 for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++)
2137 location_chain node, node2;
2138 for (node = src->var_part[i].loc_chain; node; node = node->next)
2139 for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next)
2140 if (rtx_equal_p (node->loc, node2->loc))
2142 if (node->init > node2->init)
2143 node2->init = node->init;
2147 /* Continue traversing the hash table. */
2148 return 1;
2151 /* Like variable_union, but only used when doing dataflow_set_union
2152 into an empty hashtab. To allow sharing, dst is initially shared
2153 with src (so all variables are "copied" from src to dst hashtab),
2154 so only unshare_variable for variables that need canonicalization
2155 are needed. */
2157 static int
2158 variable_canonicalize (void **slot, void *data)
2160 variable src;
2161 dataflow_set *set = (dataflow_set *) data;
2162 int k;
2164 src = *(variable *) slot;
2166 /* If CUR_LOC of some variable part is not the first element of
2167 the location chain we are going to change it so we have to make
2168 a copy of the variable. */
2169 for (k = 0; k < src->n_var_parts; k++)
2171 gcc_assert (!src->var_part[k].loc_chain == !src->var_part[k].cur_loc);
2172 if (src->var_part[k].loc_chain)
2174 gcc_assert (src->var_part[k].cur_loc);
2175 if (src->var_part[k].cur_loc != src->var_part[k].loc_chain->loc)
2176 break;
2179 if (k < src->n_var_parts)
2180 slot = unshare_variable (set, slot, src, VAR_INIT_STATUS_UNKNOWN);
2181 return 1;
2184 /* Compute union of dataflow sets SRC and DST and store it to DST. */
2186 static void
2187 dataflow_set_union (dataflow_set *dst, dataflow_set *src)
2189 int i;
2191 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2192 attrs_list_union (&dst->regs[i], src->regs[i]);
2194 if (dst->vars == empty_shared_hash)
2196 shared_hash_destroy (dst->vars);
2197 dst->vars = shared_hash_copy (src->vars);
2198 dst->traversed_vars = dst->vars;
2199 htab_traverse (shared_hash_htab (dst->vars), variable_canonicalize, dst);
2200 dst->traversed_vars = NULL;
2202 else
2203 htab_traverse (shared_hash_htab (src->vars), variable_union, dst);
2206 /* Whether the value is currently being expanded. */
2207 #define VALUE_RECURSED_INTO(x) \
2208 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
2209 /* Whether the value is in changed_variables hash table. */
2210 #define VALUE_CHANGED(x) \
2211 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
2212 /* Whether the decl is in changed_variables hash table. */
2213 #define DECL_CHANGED(x) TREE_VISITED (x)
2215 /* Record that DV has been added into resp. removed from changed_variables
2216 hashtable. */
2218 static inline void
2219 set_dv_changed (decl_or_value dv, bool newv)
2221 if (dv_is_value_p (dv))
2222 VALUE_CHANGED (dv_as_value (dv)) = newv;
2223 else
2224 DECL_CHANGED (dv_as_decl (dv)) = newv;
2227 /* Return true if DV is present in changed_variables hash table. */
2229 static inline bool
2230 dv_changed_p (decl_or_value dv)
2232 return (dv_is_value_p (dv)
2233 ? VALUE_CHANGED (dv_as_value (dv))
2234 : DECL_CHANGED (dv_as_decl (dv)));
2237 /* Vector of VALUEs that should have VALUE_RECURSED_INTO bit cleared
2238 at the end of find_loc_in_1pdv. Not a static variable in find_loc_in_1pdv
2239 to avoid constant allocation/freeing of it. */
2240 static VEC(rtx, heap) *values_to_unmark;
2242 /* Helper function for find_loc_in_1pdv.
2243 Return a location list node whose loc is rtx_equal to LOC, in the
2244 location list of a one-part variable or value VAR, or in that of
2245 any values recursively mentioned in the location lists. */
2247 static location_chain
2248 find_loc_in_1pdv_1 (rtx loc, variable var, htab_t vars)
2250 location_chain node;
2252 if (!var)
2253 return NULL;
2255 gcc_assert (dv_onepart_p (var->dv));
2257 if (!var->n_var_parts)
2258 return NULL;
2260 gcc_assert (var->var_part[0].offset == 0);
2262 for (node = var->var_part[0].loc_chain; node; node = node->next)
2263 if (rtx_equal_p (loc, node->loc))
2264 return node;
2265 else if (GET_CODE (node->loc) == VALUE
2266 && !VALUE_RECURSED_INTO (node->loc))
2268 decl_or_value dv = dv_from_value (node->loc);
2269 variable var = (variable)
2270 htab_find_with_hash (vars, dv, dv_htab_hash (dv));
2272 if (var)
2274 location_chain where;
2275 VALUE_RECURSED_INTO (node->loc) = true;
2276 VEC_safe_push (rtx, heap, values_to_unmark, node->loc);
2277 if ((where = find_loc_in_1pdv_1 (loc, var, vars)))
2278 return where;
2282 return NULL;
2285 /* Return a location list node whose loc is rtx_equal to LOC, in the
2286 location list of a one-part variable or value VAR, or in that of
2287 any values recursively mentioned in the location lists. */
2289 static location_chain
2290 find_loc_in_1pdv (rtx loc, variable var, htab_t vars)
2292 location_chain ret;
2293 unsigned int i;
2294 rtx value;
2296 ret = find_loc_in_1pdv_1 (loc, var, vars);
2297 for (i = 0; VEC_iterate (rtx, values_to_unmark, i, value); i++)
2298 VALUE_RECURSED_INTO (value) = false;
2299 VEC_truncate (rtx, values_to_unmark, 0);
2300 return ret;
2303 /* Hash table iteration argument passed to variable_merge. */
2304 struct dfset_merge
2306 /* The set in which the merge is to be inserted. */
2307 dataflow_set *dst;
2308 /* The set that we're iterating in. */
2309 dataflow_set *cur;
2310 /* The set that may contain the other dv we are to merge with. */
2311 dataflow_set *src;
2312 /* Number of onepart dvs in src. */
2313 int src_onepart_cnt;
2316 /* Insert LOC in *DNODE, if it's not there yet. The list must be in
2317 loc_cmp order, and it is maintained as such. */
2319 static void
2320 insert_into_intersection (location_chain *nodep, rtx loc,
2321 enum var_init_status status)
2323 location_chain node;
2324 int r;
2326 for (node = *nodep; node; nodep = &node->next, node = *nodep)
2327 if ((r = loc_cmp (node->loc, loc)) == 0)
2329 node->init = MIN (node->init, status);
2330 return;
2332 else if (r > 0)
2333 break;
2335 node = (location_chain) pool_alloc (loc_chain_pool);
2337 node->loc = loc;
2338 node->set_src = NULL;
2339 node->init = status;
2340 node->next = *nodep;
2341 *nodep = node;
2344 /* Insert in DEST the intersection the locations present in both
2345 S1NODE and S2VAR, directly or indirectly. S1NODE is from a
2346 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
2347 DSM->dst. */
2349 static void
2350 intersect_loc_chains (rtx val, location_chain *dest, struct dfset_merge *dsm,
2351 location_chain s1node, variable s2var)
2353 dataflow_set *s1set = dsm->cur;
2354 dataflow_set *s2set = dsm->src;
2355 location_chain found;
2357 for (; s1node; s1node = s1node->next)
2359 if (s1node->loc == val)
2360 continue;
2362 if ((found = find_loc_in_1pdv (s1node->loc, s2var,
2363 shared_hash_htab (s2set->vars))))
2365 insert_into_intersection (dest, s1node->loc,
2366 MIN (s1node->init, found->init));
2367 continue;
2370 if (GET_CODE (s1node->loc) == VALUE
2371 && !VALUE_RECURSED_INTO (s1node->loc))
2373 decl_or_value dv = dv_from_value (s1node->loc);
2374 variable svar = shared_hash_find (s1set->vars, dv);
2375 if (svar)
2377 if (svar->n_var_parts == 1)
2379 VALUE_RECURSED_INTO (s1node->loc) = true;
2380 intersect_loc_chains (val, dest, dsm,
2381 svar->var_part[0].loc_chain,
2382 s2var);
2383 VALUE_RECURSED_INTO (s1node->loc) = false;
2388 /* ??? if the location is equivalent to any location in src,
2389 searched recursively
2391 add to dst the values needed to represent the equivalence
2393 telling whether locations S is equivalent to another dv's
2394 location list:
2396 for each location D in the list
2398 if S and D satisfy rtx_equal_p, then it is present
2400 else if D is a value, recurse without cycles
2402 else if S and D have the same CODE and MODE
2404 for each operand oS and the corresponding oD
2406 if oS and oD are not equivalent, then S an D are not equivalent
2408 else if they are RTX vectors
2410 if any vector oS element is not equivalent to its respective oD,
2411 then S and D are not equivalent
2419 /* Return -1 if X should be before Y in a location list for a 1-part
2420 variable, 1 if Y should be before X, and 0 if they're equivalent
2421 and should not appear in the list. */
2423 static int
2424 loc_cmp (rtx x, rtx y)
2426 int i, j, r;
2427 RTX_CODE code = GET_CODE (x);
2428 const char *fmt;
2430 if (x == y)
2431 return 0;
2433 if (REG_P (x))
2435 if (!REG_P (y))
2436 return -1;
2437 gcc_assert (GET_MODE (x) == GET_MODE (y));
2438 if (REGNO (x) == REGNO (y))
2439 return 0;
2440 else if (REGNO (x) < REGNO (y))
2441 return -1;
2442 else
2443 return 1;
2446 if (REG_P (y))
2447 return 1;
2449 if (MEM_P (x))
2451 if (!MEM_P (y))
2452 return -1;
2453 gcc_assert (GET_MODE (x) == GET_MODE (y));
2454 return loc_cmp (XEXP (x, 0), XEXP (y, 0));
2457 if (MEM_P (y))
2458 return 1;
2460 if (GET_CODE (x) == VALUE)
2462 if (GET_CODE (y) != VALUE)
2463 return -1;
2464 gcc_assert (GET_MODE (x) == GET_MODE (y));
2465 if (canon_value_cmp (x, y))
2466 return -1;
2467 else
2468 return 1;
2471 if (GET_CODE (y) == VALUE)
2472 return 1;
2474 if (GET_CODE (x) == GET_CODE (y))
2475 /* Compare operands below. */;
2476 else if (GET_CODE (x) < GET_CODE (y))
2477 return -1;
2478 else
2479 return 1;
2481 gcc_assert (GET_MODE (x) == GET_MODE (y));
2483 fmt = GET_RTX_FORMAT (code);
2484 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2485 switch (fmt[i])
2487 case 'w':
2488 if (XWINT (x, i) == XWINT (y, i))
2489 break;
2490 else if (XWINT (x, i) < XWINT (y, i))
2491 return -1;
2492 else
2493 return 1;
2495 case 'n':
2496 case 'i':
2497 if (XINT (x, i) == XINT (y, i))
2498 break;
2499 else if (XINT (x, i) < XINT (y, i))
2500 return -1;
2501 else
2502 return 1;
2504 case 'V':
2505 case 'E':
2506 /* Compare the vector length first. */
2507 if (XVECLEN (x, i) == XVECLEN (y, i))
2508 /* Compare the vectors elements. */;
2509 else if (XVECLEN (x, i) < XVECLEN (y, i))
2510 return -1;
2511 else
2512 return 1;
2514 for (j = 0; j < XVECLEN (x, i); j++)
2515 if ((r = loc_cmp (XVECEXP (x, i, j),
2516 XVECEXP (y, i, j))))
2517 return r;
2518 break;
2520 case 'e':
2521 if ((r = loc_cmp (XEXP (x, i), XEXP (y, i))))
2522 return r;
2523 break;
2525 case 'S':
2526 case 's':
2527 if (XSTR (x, i) == XSTR (y, i))
2528 break;
2529 if (!XSTR (x, i))
2530 return -1;
2531 if (!XSTR (y, i))
2532 return 1;
2533 if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0)
2534 break;
2535 else if (r < 0)
2536 return -1;
2537 else
2538 return 1;
2540 case 'u':
2541 /* These are just backpointers, so they don't matter. */
2542 break;
2544 case '0':
2545 case 't':
2546 break;
2548 /* It is believed that rtx's at this level will never
2549 contain anything but integers and other rtx's,
2550 except for within LABEL_REFs and SYMBOL_REFs. */
2551 default:
2552 gcc_unreachable ();
2555 return 0;
2558 /* If decl or value DVP refers to VALUE from *LOC, add backlinks
2559 from VALUE to DVP. */
2561 static int
2562 add_value_chain (rtx *loc, void *dvp)
2564 if (GET_CODE (*loc) == VALUE && (void *) *loc != dvp)
2566 decl_or_value dv = (decl_or_value) dvp;
2567 decl_or_value ldv = dv_from_value (*loc);
2568 value_chain vc, nvc;
2569 void **slot = htab_find_slot_with_hash (value_chains, ldv,
2570 dv_htab_hash (ldv), INSERT);
2571 if (!*slot)
2573 vc = (value_chain) pool_alloc (value_chain_pool);
2574 vc->dv = ldv;
2575 vc->next = NULL;
2576 vc->refcount = 0;
2577 *slot = (void *) vc;
2579 else
2581 for (vc = ((value_chain) *slot)->next; vc; vc = vc->next)
2582 if (dv_as_opaque (vc->dv) == dv_as_opaque (dv))
2583 break;
2584 if (vc)
2586 vc->refcount++;
2587 return 0;
2590 vc = (value_chain) *slot;
2591 nvc = (value_chain) pool_alloc (value_chain_pool);
2592 nvc->dv = dv;
2593 nvc->next = vc->next;
2594 nvc->refcount = 1;
2595 vc->next = nvc;
2597 return 0;
2600 /* If decl or value DVP refers to VALUEs from within LOC, add backlinks
2601 from those VALUEs to DVP. */
2603 static void
2604 add_value_chains (decl_or_value dv, rtx loc)
2606 if (GET_CODE (loc) == VALUE)
2608 add_value_chain (&loc, dv_as_opaque (dv));
2609 return;
2611 if (REG_P (loc))
2612 return;
2613 if (MEM_P (loc))
2614 loc = XEXP (loc, 0);
2615 for_each_rtx (&loc, add_value_chain, dv_as_opaque (dv));
2618 /* If CSELIB_VAL_PTR of value DV refer to VALUEs, add backlinks from those
2619 VALUEs to DV. */
2621 static void
2622 add_cselib_value_chains (decl_or_value dv)
2624 struct elt_loc_list *l;
2626 for (l = CSELIB_VAL_PTR (dv_as_value (dv))->locs; l; l = l->next)
2627 for_each_rtx (&l->loc, add_value_chain, dv_as_opaque (dv));
2630 /* If decl or value DVP refers to VALUE from *LOC, remove backlinks
2631 from VALUE to DVP. */
2633 static int
2634 remove_value_chain (rtx *loc, void *dvp)
2636 if (GET_CODE (*loc) == VALUE && (void *) *loc != dvp)
2638 decl_or_value dv = (decl_or_value) dvp;
2639 decl_or_value ldv = dv_from_value (*loc);
2640 value_chain vc, dvc = NULL;
2641 void **slot = htab_find_slot_with_hash (value_chains, ldv,
2642 dv_htab_hash (ldv), NO_INSERT);
2643 for (vc = (value_chain) *slot; vc->next; vc = vc->next)
2644 if (dv_as_opaque (vc->next->dv) == dv_as_opaque (dv))
2646 dvc = vc->next;
2647 gcc_assert (dvc->refcount > 0);
2648 if (--dvc->refcount == 0)
2650 vc->next = dvc->next;
2651 pool_free (value_chain_pool, dvc);
2652 if (vc->next == NULL && vc == (value_chain) *slot)
2654 pool_free (value_chain_pool, vc);
2655 htab_clear_slot (value_chains, slot);
2658 return 0;
2660 gcc_unreachable ();
2662 return 0;
2665 /* If decl or value DVP refers to VALUEs from within LOC, remove backlinks
2666 from those VALUEs to DVP. */
2668 static void
2669 remove_value_chains (decl_or_value dv, rtx loc)
2671 if (GET_CODE (loc) == VALUE)
2673 remove_value_chain (&loc, dv_as_opaque (dv));
2674 return;
2676 if (REG_P (loc))
2677 return;
2678 if (MEM_P (loc))
2679 loc = XEXP (loc, 0);
2680 for_each_rtx (&loc, remove_value_chain, dv_as_opaque (dv));
2683 /* If CSELIB_VAL_PTR of value DV refer to VALUEs, remove backlinks from those
2684 VALUEs to DV. */
2686 static void
2687 remove_cselib_value_chains (decl_or_value dv)
2689 struct elt_loc_list *l;
2691 for (l = CSELIB_VAL_PTR (dv_as_value (dv))->locs; l; l = l->next)
2692 for_each_rtx (&l->loc, remove_value_chain, dv_as_opaque (dv));
2695 #if ENABLE_CHECKING
2696 /* Check the order of entries in one-part variables. */
2698 static int
2699 canonicalize_loc_order_check (void **slot, void *data ATTRIBUTE_UNUSED)
2701 variable var = (variable) *slot;
2702 decl_or_value dv = var->dv;
2703 location_chain node, next;
2705 if (!dv_onepart_p (dv))
2706 return 1;
2708 gcc_assert (var->n_var_parts == 1);
2709 node = var->var_part[0].loc_chain;
2710 gcc_assert (node);
2712 while ((next = node->next))
2714 gcc_assert (loc_cmp (node->loc, next->loc) < 0);
2715 node = next;
2718 return 1;
2720 #endif
2722 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
2723 more likely to be chosen as canonical for an equivalence set.
2724 Ensure less likely values can reach more likely neighbors, making
2725 the connections bidirectional. */
2727 static int
2728 canonicalize_values_mark (void **slot, void *data)
2730 dataflow_set *set = (dataflow_set *)data;
2731 variable var = (variable) *slot;
2732 decl_or_value dv = var->dv;
2733 rtx val;
2734 location_chain node;
2736 if (!dv_is_value_p (dv))
2737 return 1;
2739 gcc_assert (var->n_var_parts == 1);
2741 val = dv_as_value (dv);
2743 for (node = var->var_part[0].loc_chain; node; node = node->next)
2744 if (GET_CODE (node->loc) == VALUE)
2746 if (canon_value_cmp (node->loc, val))
2747 VALUE_RECURSED_INTO (val) = true;
2748 else
2750 decl_or_value odv = dv_from_value (node->loc);
2751 void **oslot = shared_hash_find_slot_noinsert (set->vars, odv);
2753 oslot = set_slot_part (set, val, oslot, odv, 0,
2754 node->init, NULL_RTX);
2756 VALUE_RECURSED_INTO (node->loc) = true;
2760 return 1;
2763 /* Remove redundant entries from equivalence lists in onepart
2764 variables, canonicalizing equivalence sets into star shapes. */
2766 static int
2767 canonicalize_values_star (void **slot, void *data)
2769 dataflow_set *set = (dataflow_set *)data;
2770 variable var = (variable) *slot;
2771 decl_or_value dv = var->dv;
2772 location_chain node;
2773 decl_or_value cdv;
2774 rtx val, cval;
2775 void **cslot;
2776 bool has_value;
2777 bool has_marks;
2779 if (!dv_onepart_p (dv))
2780 return 1;
2782 gcc_assert (var->n_var_parts == 1);
2784 if (dv_is_value_p (dv))
2786 cval = dv_as_value (dv);
2787 if (!VALUE_RECURSED_INTO (cval))
2788 return 1;
2789 VALUE_RECURSED_INTO (cval) = false;
2791 else
2792 cval = NULL_RTX;
2794 restart:
2795 val = cval;
2796 has_value = false;
2797 has_marks = false;
2799 gcc_assert (var->n_var_parts == 1);
2801 for (node = var->var_part[0].loc_chain; node; node = node->next)
2802 if (GET_CODE (node->loc) == VALUE)
2804 has_value = true;
2805 if (VALUE_RECURSED_INTO (node->loc))
2806 has_marks = true;
2807 if (canon_value_cmp (node->loc, cval))
2808 cval = node->loc;
2811 if (!has_value)
2812 return 1;
2814 if (cval == val)
2816 if (!has_marks || dv_is_decl_p (dv))
2817 return 1;
2819 /* Keep it marked so that we revisit it, either after visiting a
2820 child node, or after visiting a new parent that might be
2821 found out. */
2822 VALUE_RECURSED_INTO (val) = true;
2824 for (node = var->var_part[0].loc_chain; node; node = node->next)
2825 if (GET_CODE (node->loc) == VALUE
2826 && VALUE_RECURSED_INTO (node->loc))
2828 cval = node->loc;
2829 restart_with_cval:
2830 VALUE_RECURSED_INTO (cval) = false;
2831 dv = dv_from_value (cval);
2832 slot = shared_hash_find_slot_noinsert (set->vars, dv);
2833 if (!slot)
2835 gcc_assert (dv_is_decl_p (var->dv));
2836 /* The canonical value was reset and dropped.
2837 Remove it. */
2838 clobber_variable_part (set, NULL, var->dv, 0, NULL);
2839 return 1;
2841 var = (variable)*slot;
2842 gcc_assert (dv_is_value_p (var->dv));
2843 if (var->n_var_parts == 0)
2844 return 1;
2845 gcc_assert (var->n_var_parts == 1);
2846 goto restart;
2849 VALUE_RECURSED_INTO (val) = false;
2851 return 1;
2854 /* Push values to the canonical one. */
2855 cdv = dv_from_value (cval);
2856 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
2858 for (node = var->var_part[0].loc_chain; node; node = node->next)
2859 if (node->loc != cval)
2861 cslot = set_slot_part (set, node->loc, cslot, cdv, 0,
2862 node->init, NULL_RTX);
2863 if (GET_CODE (node->loc) == VALUE)
2865 decl_or_value ndv = dv_from_value (node->loc);
2867 set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX,
2868 NO_INSERT);
2870 if (canon_value_cmp (node->loc, val))
2872 /* If it could have been a local minimum, it's not any more,
2873 since it's now neighbor to cval, so it may have to push
2874 to it. Conversely, if it wouldn't have prevailed over
2875 val, then whatever mark it has is fine: if it was to
2876 push, it will now push to a more canonical node, but if
2877 it wasn't, then it has already pushed any values it might
2878 have to. */
2879 VALUE_RECURSED_INTO (node->loc) = true;
2880 /* Make sure we visit node->loc by ensuring we cval is
2881 visited too. */
2882 VALUE_RECURSED_INTO (cval) = true;
2884 else if (!VALUE_RECURSED_INTO (node->loc))
2885 /* If we have no need to "recurse" into this node, it's
2886 already "canonicalized", so drop the link to the old
2887 parent. */
2888 clobber_variable_part (set, cval, ndv, 0, NULL);
2890 else if (GET_CODE (node->loc) == REG)
2892 attrs list = set->regs[REGNO (node->loc)], *listp;
2894 /* Change an existing attribute referring to dv so that it
2895 refers to cdv, removing any duplicate this might
2896 introduce, and checking that no previous duplicates
2897 existed, all in a single pass. */
2899 while (list)
2901 if (list->offset == 0
2902 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
2903 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
2904 break;
2906 list = list->next;
2909 gcc_assert (list);
2910 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
2912 list->dv = cdv;
2913 for (listp = &list->next; (list = *listp); listp = &list->next)
2915 if (list->offset)
2916 continue;
2918 if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
2920 *listp = list->next;
2921 pool_free (attrs_pool, list);
2922 list = *listp;
2923 break;
2926 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv));
2929 else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
2931 for (listp = &list->next; (list = *listp); listp = &list->next)
2933 if (list->offset)
2934 continue;
2936 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
2938 *listp = list->next;
2939 pool_free (attrs_pool, list);
2940 list = *listp;
2941 break;
2944 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv));
2947 else
2948 gcc_unreachable ();
2950 #if ENABLE_CHECKING
2951 while (list)
2953 if (list->offset == 0
2954 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
2955 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
2956 gcc_unreachable ();
2958 list = list->next;
2960 #endif
2964 if (val)
2965 cslot = set_slot_part (set, val, cslot, cdv, 0,
2966 VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
2968 slot = clobber_slot_part (set, cval, slot, 0, NULL);
2970 /* Variable may have been unshared. */
2971 var = (variable)*slot;
2972 gcc_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval
2973 && var->var_part[0].loc_chain->next == NULL);
2975 if (VALUE_RECURSED_INTO (cval))
2976 goto restart_with_cval;
2978 return 1;
2981 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
2982 corresponding entry in DSM->src. Multi-part variables are combined
2983 with variable_union, whereas onepart dvs are combined with
2984 intersection. */
2986 static int
2987 variable_merge_over_cur (void **s1slot, void *data)
2989 struct dfset_merge *dsm = (struct dfset_merge *)data;
2990 dataflow_set *dst = dsm->dst;
2991 void **dstslot;
2992 variable s1var = (variable) *s1slot;
2993 variable s2var, dvar = NULL;
2994 decl_or_value dv = s1var->dv;
2995 bool onepart = dv_onepart_p (dv);
2996 rtx val;
2997 hashval_t dvhash;
2998 location_chain node, *nodep;
3000 /* If the incoming onepart variable has an empty location list, then
3001 the intersection will be just as empty. For other variables,
3002 it's always union. */
3003 gcc_assert (s1var->n_var_parts);
3004 gcc_assert (s1var->var_part[0].loc_chain);
3006 if (!onepart)
3007 return variable_union (s1slot, dst);
3009 gcc_assert (s1var->n_var_parts == 1);
3010 gcc_assert (s1var->var_part[0].offset == 0);
3012 dvhash = dv_htab_hash (dv);
3013 if (dv_is_value_p (dv))
3014 val = dv_as_value (dv);
3015 else
3016 val = NULL;
3018 s2var = shared_hash_find_1 (dsm->src->vars, dv, dvhash);
3019 if (!s2var)
3021 dst_can_be_shared = false;
3022 return 1;
3025 dsm->src_onepart_cnt--;
3026 gcc_assert (s2var->var_part[0].loc_chain);
3027 gcc_assert (s2var->n_var_parts == 1);
3028 gcc_assert (s2var->var_part[0].offset == 0);
3030 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3031 if (dstslot)
3033 dvar = (variable)*dstslot;
3034 gcc_assert (dvar->refcount == 1);
3035 gcc_assert (dvar->n_var_parts == 1);
3036 gcc_assert (dvar->var_part[0].offset == 0);
3037 nodep = &dvar->var_part[0].loc_chain;
3039 else
3041 nodep = &node;
3042 node = NULL;
3045 if (!dstslot && !onepart_variable_different_p (s1var, s2var))
3047 dstslot = shared_hash_find_slot_unshare_1 (&dst->vars, dv,
3048 dvhash, INSERT);
3049 *dstslot = dvar = s2var;
3050 dvar->refcount++;
3052 else
3054 dst_can_be_shared = false;
3056 intersect_loc_chains (val, nodep, dsm,
3057 s1var->var_part[0].loc_chain, s2var);
3059 if (!dstslot)
3061 if (node)
3063 dvar = (variable) pool_alloc (dv_pool (dv));
3064 dvar->dv = dv;
3065 dvar->refcount = 1;
3066 dvar->n_var_parts = 1;
3067 dvar->var_part[0].offset = 0;
3068 dvar->var_part[0].loc_chain = node;
3069 dvar->var_part[0].cur_loc = node->loc;
3071 dstslot
3072 = shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash,
3073 INSERT);
3074 gcc_assert (!*dstslot);
3075 *dstslot = dvar;
3077 else
3078 return 1;
3082 nodep = &dvar->var_part[0].loc_chain;
3083 while ((node = *nodep))
3085 location_chain *nextp = &node->next;
3087 if (GET_CODE (node->loc) == REG)
3089 attrs list;
3091 for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
3092 if (GET_MODE (node->loc) == GET_MODE (list->loc)
3093 && dv_is_value_p (list->dv))
3094 break;
3096 if (!list)
3097 attrs_list_insert (&dst->regs[REGNO (node->loc)],
3098 dv, 0, node->loc);
3099 /* If this value became canonical for another value that had
3100 this register, we want to leave it alone. */
3101 else if (dv_as_value (list->dv) != val)
3103 dstslot = set_slot_part (dst, dv_as_value (list->dv),
3104 dstslot, dv, 0,
3105 node->init, NULL_RTX);
3106 dstslot = delete_slot_part (dst, node->loc, dstslot, 0);
3108 /* Since nextp points into the removed node, we can't
3109 use it. The pointer to the next node moved to nodep.
3110 However, if the variable we're walking is unshared
3111 during our walk, we'll keep walking the location list
3112 of the previously-shared variable, in which case the
3113 node won't have been removed, and we'll want to skip
3114 it. That's why we test *nodep here. */
3115 if (*nodep != node)
3116 nextp = nodep;
3119 else
3120 /* Canonicalization puts registers first, so we don't have to
3121 walk it all. */
3122 break;
3123 nodep = nextp;
3126 if (dvar != (variable)*dstslot)
3127 dvar = (variable)*dstslot;
3128 nodep = &dvar->var_part[0].loc_chain;
3130 if (val)
3132 /* Mark all referenced nodes for canonicalization, and make sure
3133 we have mutual equivalence links. */
3134 VALUE_RECURSED_INTO (val) = true;
3135 for (node = *nodep; node; node = node->next)
3136 if (GET_CODE (node->loc) == VALUE)
3138 VALUE_RECURSED_INTO (node->loc) = true;
3139 set_variable_part (dst, val, dv_from_value (node->loc), 0,
3140 node->init, NULL, INSERT);
3143 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3144 gcc_assert (*dstslot == dvar);
3145 canonicalize_values_star (dstslot, dst);
3146 #ifdef ENABLE_CHECKING
3147 gcc_assert (dstslot
3148 == shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash));
3149 #endif
3150 dvar = (variable)*dstslot;
3152 else
3154 bool has_value = false, has_other = false;
3156 /* If we have one value and anything else, we're going to
3157 canonicalize this, so make sure all values have an entry in
3158 the table and are marked for canonicalization. */
3159 for (node = *nodep; node; node = node->next)
3161 if (GET_CODE (node->loc) == VALUE)
3163 /* If this was marked during register canonicalization,
3164 we know we have to canonicalize values. */
3165 if (has_value)
3166 has_other = true;
3167 has_value = true;
3168 if (has_other)
3169 break;
3171 else
3173 has_other = true;
3174 if (has_value)
3175 break;
3179 if (has_value && has_other)
3181 for (node = *nodep; node; node = node->next)
3183 if (GET_CODE (node->loc) == VALUE)
3185 decl_or_value dv = dv_from_value (node->loc);
3186 void **slot = NULL;
3188 if (shared_hash_shared (dst->vars))
3189 slot = shared_hash_find_slot_noinsert (dst->vars, dv);
3190 if (!slot)
3191 slot = shared_hash_find_slot_unshare (&dst->vars, dv,
3192 INSERT);
3193 if (!*slot)
3195 variable var = (variable) pool_alloc (dv_pool (dv));
3196 var->dv = dv;
3197 var->refcount = 1;
3198 var->n_var_parts = 1;
3199 var->var_part[0].offset = 0;
3200 var->var_part[0].loc_chain = NULL;
3201 var->var_part[0].cur_loc = NULL;
3202 *slot = var;
3205 VALUE_RECURSED_INTO (node->loc) = true;
3209 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3210 gcc_assert (*dstslot == dvar);
3211 canonicalize_values_star (dstslot, dst);
3212 #ifdef ENABLE_CHECKING
3213 gcc_assert (dstslot
3214 == shared_hash_find_slot_noinsert_1 (dst->vars,
3215 dv, dvhash));
3216 #endif
3217 dvar = (variable)*dstslot;
3221 if (!onepart_variable_different_p (dvar, s2var))
3223 variable_htab_free (dvar);
3224 *dstslot = dvar = s2var;
3225 dvar->refcount++;
3227 else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var))
3229 variable_htab_free (dvar);
3230 *dstslot = dvar = s1var;
3231 dvar->refcount++;
3232 dst_can_be_shared = false;
3234 else
3236 if (dvar->refcount == 1)
3237 dvar->var_part[0].cur_loc = dvar->var_part[0].loc_chain->loc;
3238 dst_can_be_shared = false;
3241 return 1;
3244 /* Combine variable in *S1SLOT (in DSM->src) with the corresponding
3245 entry in DSM->src. Only multi-part variables are combined, using
3246 variable_union. onepart dvs were already combined with
3247 intersection in variable_merge_over_cur(). */
3249 static int
3250 variable_merge_over_src (void **s2slot, void *data)
3252 struct dfset_merge *dsm = (struct dfset_merge *)data;
3253 dataflow_set *dst = dsm->dst;
3254 variable s2var = (variable) *s2slot;
3255 decl_or_value dv = s2var->dv;
3256 bool onepart = dv_onepart_p (dv);
3258 if (!onepart)
3260 void **dstp = shared_hash_find_slot (dst->vars, dv);
3261 *dstp = s2var;
3262 s2var->refcount++;
3263 return variable_canonicalize (dstp, dst);
3266 dsm->src_onepart_cnt++;
3267 return 1;
3270 /* Combine dataflow set information from SRC into DST, using PDST
3271 to carry over information across passes. */
3273 static void
3274 dataflow_set_merge (dataflow_set *dst, dataflow_set *src)
3276 dataflow_set src2 = *dst;
3277 struct dfset_merge dsm;
3278 int i;
3279 size_t src_elems, dst_elems;
3281 src_elems = htab_elements (shared_hash_htab (src->vars));
3282 dst_elems = htab_elements (shared_hash_htab (src2.vars));
3283 dataflow_set_init (dst);
3284 dst->stack_adjust = src2.stack_adjust;
3285 shared_hash_destroy (dst->vars);
3286 dst->vars = (shared_hash) pool_alloc (shared_hash_pool);
3287 dst->vars->refcount = 1;
3288 dst->vars->htab
3289 = htab_create (MAX (src_elems, dst_elems), variable_htab_hash,
3290 variable_htab_eq, variable_htab_free);
3292 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3293 attrs_list_mpdv_union (&dst->regs[i], src->regs[i], src2.regs[i]);
3295 dsm.dst = dst;
3296 dsm.src = &src2;
3297 dsm.cur = src;
3298 dsm.src_onepart_cnt = 0;
3300 htab_traverse (shared_hash_htab (dsm.src->vars), variable_merge_over_src,
3301 &dsm);
3302 htab_traverse (shared_hash_htab (dsm.cur->vars), variable_merge_over_cur,
3303 &dsm);
3305 if (dsm.src_onepart_cnt)
3306 dst_can_be_shared = false;
3308 dataflow_set_destroy (&src2);
3311 /* Mark register equivalences. */
3313 static void
3314 dataflow_set_equiv_regs (dataflow_set *set)
3316 int i;
3317 attrs list, *listp;
3319 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3321 rtx canon[NUM_MACHINE_MODES];
3323 memset (canon, 0, sizeof (canon));
3325 for (list = set->regs[i]; list; list = list->next)
3326 if (list->offset == 0 && dv_is_value_p (list->dv))
3328 rtx val = dv_as_value (list->dv);
3329 rtx *cvalp = &canon[(int)GET_MODE (val)];
3330 rtx cval = *cvalp;
3332 if (canon_value_cmp (val, cval))
3333 *cvalp = val;
3336 for (list = set->regs[i]; list; list = list->next)
3337 if (list->offset == 0 && dv_onepart_p (list->dv))
3339 rtx cval = canon[(int)GET_MODE (list->loc)];
3341 if (!cval)
3342 continue;
3344 if (dv_is_value_p (list->dv))
3346 rtx val = dv_as_value (list->dv);
3348 if (val == cval)
3349 continue;
3351 VALUE_RECURSED_INTO (val) = true;
3352 set_variable_part (set, val, dv_from_value (cval), 0,
3353 VAR_INIT_STATUS_INITIALIZED,
3354 NULL, NO_INSERT);
3357 VALUE_RECURSED_INTO (cval) = true;
3358 set_variable_part (set, cval, list->dv, 0,
3359 VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT);
3362 for (listp = &set->regs[i]; (list = *listp);
3363 listp = list ? &list->next : listp)
3364 if (list->offset == 0 && dv_onepart_p (list->dv))
3366 rtx cval = canon[(int)GET_MODE (list->loc)];
3367 void **slot;
3369 if (!cval)
3370 continue;
3372 if (dv_is_value_p (list->dv))
3374 rtx val = dv_as_value (list->dv);
3375 if (!VALUE_RECURSED_INTO (val))
3376 continue;
3379 slot = shared_hash_find_slot_noinsert (set->vars, list->dv);
3380 canonicalize_values_star (slot, set);
3381 if (*listp != list)
3382 list = NULL;
3387 /* Remove any redundant values in the location list of VAR, which must
3388 be unshared and 1-part. */
3390 static void
3391 remove_duplicate_values (variable var)
3393 location_chain node, *nodep;
3395 gcc_assert (dv_onepart_p (var->dv));
3396 gcc_assert (var->n_var_parts == 1);
3397 gcc_assert (var->refcount == 1);
3399 for (nodep = &var->var_part[0].loc_chain; (node = *nodep); )
3401 if (GET_CODE (node->loc) == VALUE)
3403 if (VALUE_RECURSED_INTO (node->loc))
3405 /* Remove duplicate value node. */
3406 *nodep = node->next;
3407 pool_free (loc_chain_pool, node);
3408 continue;
3410 else
3411 VALUE_RECURSED_INTO (node->loc) = true;
3413 nodep = &node->next;
3416 for (node = var->var_part[0].loc_chain; node; node = node->next)
3417 if (GET_CODE (node->loc) == VALUE)
3419 gcc_assert (VALUE_RECURSED_INTO (node->loc));
3420 VALUE_RECURSED_INTO (node->loc) = false;
3425 /* Hash table iteration argument passed to variable_post_merge. */
3426 struct dfset_post_merge
3428 /* The new input set for the current block. */
3429 dataflow_set *set;
3430 /* Pointer to the permanent input set for the current block, or
3431 NULL. */
3432 dataflow_set **permp;
3435 /* Create values for incoming expressions associated with one-part
3436 variables that don't have value numbers for them. */
3438 static int
3439 variable_post_merge_new_vals (void **slot, void *info)
3441 struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
3442 dataflow_set *set = dfpm->set;
3443 variable var = (variable)*slot;
3444 location_chain node;
3446 if (!dv_onepart_p (var->dv) || !var->n_var_parts)
3447 return 1;
3449 gcc_assert (var->n_var_parts == 1);
3451 if (dv_is_decl_p (var->dv))
3453 bool check_dupes = false;
3455 restart:
3456 for (node = var->var_part[0].loc_chain; node; node = node->next)
3458 if (GET_CODE (node->loc) == VALUE)
3459 gcc_assert (!VALUE_RECURSED_INTO (node->loc));
3460 else if (GET_CODE (node->loc) == REG)
3462 attrs att, *attp, *curp = NULL;
3464 if (var->refcount != 1)
3466 slot = unshare_variable (set, slot, var,
3467 VAR_INIT_STATUS_INITIALIZED);
3468 var = (variable)*slot;
3469 goto restart;
3472 for (attp = &set->regs[REGNO (node->loc)]; (att = *attp);
3473 attp = &att->next)
3474 if (att->offset == 0
3475 && GET_MODE (att->loc) == GET_MODE (node->loc))
3477 if (dv_is_value_p (att->dv))
3479 rtx cval = dv_as_value (att->dv);
3480 node->loc = cval;
3481 check_dupes = true;
3482 break;
3484 else if (dv_as_opaque (att->dv) == dv_as_opaque (var->dv))
3485 curp = attp;
3488 if (!curp)
3490 curp = attp;
3491 while (*curp)
3492 if ((*curp)->offset == 0
3493 && GET_MODE ((*curp)->loc) == GET_MODE (node->loc)
3494 && dv_as_opaque ((*curp)->dv) == dv_as_opaque (var->dv))
3495 break;
3496 else
3497 curp = &(*curp)->next;
3498 gcc_assert (*curp);
3501 if (!att)
3503 decl_or_value cdv;
3504 rtx cval;
3506 if (!*dfpm->permp)
3508 *dfpm->permp = XNEW (dataflow_set);
3509 dataflow_set_init (*dfpm->permp);
3512 for (att = (*dfpm->permp)->regs[REGNO (node->loc)];
3513 att; att = att->next)
3514 if (GET_MODE (att->loc) == GET_MODE (node->loc))
3516 gcc_assert (att->offset == 0);
3517 gcc_assert (dv_is_value_p (att->dv));
3518 val_reset (set, att->dv);
3519 break;
3522 if (att)
3524 cdv = att->dv;
3525 cval = dv_as_value (cdv);
3527 else
3529 /* Create a unique value to hold this register,
3530 that ought to be found and reused in
3531 subsequent rounds. */
3532 cselib_val *v;
3533 gcc_assert (!cselib_lookup (node->loc,
3534 GET_MODE (node->loc), 0));
3535 v = cselib_lookup (node->loc, GET_MODE (node->loc), 1);
3536 cselib_preserve_value (v);
3537 cselib_invalidate_rtx (node->loc);
3538 cval = v->val_rtx;
3539 cdv = dv_from_value (cval);
3540 if (dump_file)
3541 fprintf (dump_file,
3542 "Created new value %u:%u for reg %i\n",
3543 v->uid, v->hash, REGNO (node->loc));
3546 var_reg_decl_set (*dfpm->permp, node->loc,
3547 VAR_INIT_STATUS_INITIALIZED,
3548 cdv, 0, NULL, INSERT);
3550 node->loc = cval;
3551 check_dupes = true;
3554 /* Remove attribute referring to the decl, which now
3555 uses the value for the register, already existing or
3556 to be added when we bring perm in. */
3557 att = *curp;
3558 *curp = att->next;
3559 pool_free (attrs_pool, att);
3563 if (check_dupes)
3564 remove_duplicate_values (var);
3567 return 1;
3570 /* Reset values in the permanent set that are not associated with the
3571 chosen expression. */
3573 static int
3574 variable_post_merge_perm_vals (void **pslot, void *info)
3576 struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
3577 dataflow_set *set = dfpm->set;
3578 variable pvar = (variable)*pslot, var;
3579 location_chain pnode;
3580 decl_or_value dv;
3581 attrs att;
3583 gcc_assert (dv_is_value_p (pvar->dv));
3584 gcc_assert (pvar->n_var_parts == 1);
3585 pnode = pvar->var_part[0].loc_chain;
3586 gcc_assert (pnode);
3587 gcc_assert (!pnode->next);
3588 gcc_assert (REG_P (pnode->loc));
3590 dv = pvar->dv;
3592 var = shared_hash_find (set->vars, dv);
3593 if (var)
3595 if (find_loc_in_1pdv (pnode->loc, var, shared_hash_htab (set->vars)))
3596 return 1;
3597 val_reset (set, dv);
3600 for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next)
3601 if (att->offset == 0
3602 && GET_MODE (att->loc) == GET_MODE (pnode->loc)
3603 && dv_is_value_p (att->dv))
3604 break;
3606 /* If there is a value associated with this register already, create
3607 an equivalence. */
3608 if (att && dv_as_value (att->dv) != dv_as_value (dv))
3610 rtx cval = dv_as_value (att->dv);
3611 set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT);
3612 set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init,
3613 NULL, INSERT);
3615 else if (!att)
3617 attrs_list_insert (&set->regs[REGNO (pnode->loc)],
3618 dv, 0, pnode->loc);
3619 variable_union (pslot, set);
3622 return 1;
3625 /* Just checking stuff and registering register attributes for
3626 now. */
3628 static void
3629 dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp)
3631 struct dfset_post_merge dfpm;
3633 dfpm.set = set;
3634 dfpm.permp = permp;
3636 htab_traverse (shared_hash_htab (set->vars), variable_post_merge_new_vals,
3637 &dfpm);
3638 if (*permp)
3639 htab_traverse (shared_hash_htab ((*permp)->vars),
3640 variable_post_merge_perm_vals, &dfpm);
3641 htab_traverse (shared_hash_htab (set->vars), canonicalize_values_star, set);
3644 /* Return a node whose loc is a MEM that refers to EXPR in the
3645 location list of a one-part variable or value VAR, or in that of
3646 any values recursively mentioned in the location lists. */
3648 static location_chain
3649 find_mem_expr_in_1pdv (tree expr, rtx val, htab_t vars)
3651 location_chain node;
3652 decl_or_value dv;
3653 variable var;
3654 location_chain where = NULL;
3656 if (!val)
3657 return NULL;
3659 gcc_assert (GET_CODE (val) == VALUE);
3661 gcc_assert (!VALUE_RECURSED_INTO (val));
3663 dv = dv_from_value (val);
3664 var = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
3666 if (!var)
3667 return NULL;
3669 gcc_assert (dv_onepart_p (var->dv));
3671 if (!var->n_var_parts)
3672 return NULL;
3674 gcc_assert (var->var_part[0].offset == 0);
3676 VALUE_RECURSED_INTO (val) = true;
3678 for (node = var->var_part[0].loc_chain; node; node = node->next)
3679 if (MEM_P (node->loc) && MEM_EXPR (node->loc) == expr
3680 && MEM_OFFSET (node->loc) == 0)
3682 where = node;
3683 break;
3685 else if (GET_CODE (node->loc) == VALUE
3686 && !VALUE_RECURSED_INTO (node->loc)
3687 && (where = find_mem_expr_in_1pdv (expr, node->loc, vars)))
3688 break;
3690 VALUE_RECURSED_INTO (val) = false;
3692 return where;
3695 /* Return TRUE if the value of MEM may vary across a call. */
3697 static bool
3698 mem_dies_at_call (rtx mem)
3700 tree expr = MEM_EXPR (mem);
3701 tree decl;
3703 if (!expr)
3704 return true;
3706 decl = get_base_address (expr);
3708 if (!decl)
3709 return true;
3711 if (!DECL_P (decl))
3712 return true;
3714 return (may_be_aliased (decl)
3715 || (!TREE_READONLY (decl) && is_global_var (decl)));
3718 /* Remove all MEMs from the location list of a hash table entry for a
3719 one-part variable, except those whose MEM attributes map back to
3720 the variable itself, directly or within a VALUE. */
3722 static int
3723 dataflow_set_preserve_mem_locs (void **slot, void *data)
3725 dataflow_set *set = (dataflow_set *) data;
3726 variable var = (variable) *slot;
3728 if (dv_is_decl_p (var->dv) && dv_onepart_p (var->dv))
3730 tree decl = dv_as_decl (var->dv);
3731 location_chain loc, *locp;
3733 if (!var->n_var_parts)
3734 return 1;
3736 gcc_assert (var->n_var_parts == 1);
3738 if (var->refcount > 1 || shared_hash_shared (set->vars))
3740 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
3742 /* We want to remove dying MEMs that doesn't refer to
3743 DECL. */
3744 if (GET_CODE (loc->loc) == MEM
3745 && (MEM_EXPR (loc->loc) != decl
3746 || MEM_OFFSET (loc->loc))
3747 && !mem_dies_at_call (loc->loc))
3748 break;
3749 /* We want to move here MEMs that do refer to DECL. */
3750 else if (GET_CODE (loc->loc) == VALUE
3751 && find_mem_expr_in_1pdv (decl, loc->loc,
3752 shared_hash_htab (set->vars)))
3753 break;
3756 if (!loc)
3757 return 1;
3759 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
3760 var = (variable)*slot;
3761 gcc_assert (var->n_var_parts == 1);
3764 for (locp = &var->var_part[0].loc_chain, loc = *locp;
3765 loc; loc = *locp)
3767 rtx old_loc = loc->loc;
3768 if (GET_CODE (old_loc) == VALUE)
3770 location_chain mem_node
3771 = find_mem_expr_in_1pdv (decl, loc->loc,
3772 shared_hash_htab (set->vars));
3774 /* ??? This picks up only one out of multiple MEMs that
3775 refer to the same variable. Do we ever need to be
3776 concerned about dealing with more than one, or, given
3777 that they should all map to the same variable
3778 location, their addresses will have been merged and
3779 they will be regarded as equivalent? */
3780 if (mem_node)
3782 loc->loc = mem_node->loc;
3783 loc->set_src = mem_node->set_src;
3784 loc->init = MIN (loc->init, mem_node->init);
3788 if (GET_CODE (loc->loc) != MEM
3789 || (MEM_EXPR (loc->loc) == decl
3790 && MEM_OFFSET (loc->loc) == 0)
3791 || !mem_dies_at_call (loc->loc))
3793 if (old_loc != loc->loc && emit_notes)
3795 add_value_chains (var->dv, loc->loc);
3796 remove_value_chains (var->dv, old_loc);
3798 locp = &loc->next;
3799 continue;
3802 if (emit_notes)
3803 remove_value_chains (var->dv, old_loc);
3804 *locp = loc->next;
3805 pool_free (loc_chain_pool, loc);
3808 if (!var->var_part[0].loc_chain)
3810 var->n_var_parts--;
3811 if (emit_notes && dv_is_value_p (var->dv))
3812 remove_cselib_value_chains (var->dv);
3813 variable_was_changed (var, set);
3817 return 1;
3820 /* Remove all MEMs from the location list of a hash table entry for a
3821 value. */
3823 static int
3824 dataflow_set_remove_mem_locs (void **slot, void *data)
3826 dataflow_set *set = (dataflow_set *) data;
3827 variable var = (variable) *slot;
3829 if (dv_is_value_p (var->dv))
3831 location_chain loc, *locp;
3832 bool changed = false;
3834 gcc_assert (var->n_var_parts == 1);
3836 if (var->refcount > 1 || shared_hash_shared (set->vars))
3838 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
3839 if (GET_CODE (loc->loc) == MEM
3840 && mem_dies_at_call (loc->loc))
3841 break;
3843 if (!loc)
3844 return 1;
3846 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
3847 var = (variable)*slot;
3848 gcc_assert (var->n_var_parts == 1);
3851 for (locp = &var->var_part[0].loc_chain, loc = *locp;
3852 loc; loc = *locp)
3854 if (GET_CODE (loc->loc) != MEM
3855 || !mem_dies_at_call (loc->loc))
3857 locp = &loc->next;
3858 continue;
3861 if (emit_notes)
3862 remove_value_chains (var->dv, loc->loc);
3863 *locp = loc->next;
3864 /* If we have deleted the location which was last emitted
3865 we have to emit new location so add the variable to set
3866 of changed variables. */
3867 if (var->var_part[0].cur_loc
3868 && rtx_equal_p (loc->loc, var->var_part[0].cur_loc))
3869 changed = true;
3870 pool_free (loc_chain_pool, loc);
3873 if (!var->var_part[0].loc_chain)
3875 var->n_var_parts--;
3876 if (emit_notes && dv_is_value_p (var->dv))
3877 remove_cselib_value_chains (var->dv);
3878 gcc_assert (changed);
3880 if (changed)
3882 if (var->n_var_parts && var->var_part[0].loc_chain)
3883 var->var_part[0].cur_loc = var->var_part[0].loc_chain->loc;
3884 variable_was_changed (var, set);
3888 return 1;
3891 /* Remove all variable-location information about call-clobbered
3892 registers, as well as associations between MEMs and VALUEs. */
3894 static void
3895 dataflow_set_clear_at_call (dataflow_set *set)
3897 int r;
3899 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
3900 if (TEST_HARD_REG_BIT (call_used_reg_set, r))
3901 var_regno_delete (set, r);
3903 if (MAY_HAVE_DEBUG_INSNS)
3905 set->traversed_vars = set->vars;
3906 htab_traverse (shared_hash_htab (set->vars),
3907 dataflow_set_preserve_mem_locs, set);
3908 set->traversed_vars = set->vars;
3909 htab_traverse (shared_hash_htab (set->vars), dataflow_set_remove_mem_locs,
3910 set);
3911 set->traversed_vars = NULL;
3915 /* Flag whether two dataflow sets being compared contain different data. */
3916 static bool
3917 dataflow_set_different_value;
3919 static bool
3920 variable_part_different_p (variable_part *vp1, variable_part *vp2)
3922 location_chain lc1, lc2;
3924 for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next)
3926 for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next)
3928 if (REG_P (lc1->loc) && REG_P (lc2->loc))
3930 if (REGNO (lc1->loc) == REGNO (lc2->loc))
3931 break;
3933 if (rtx_equal_p (lc1->loc, lc2->loc))
3934 break;
3936 if (!lc2)
3937 return true;
3939 return false;
3942 /* Return true if one-part variables VAR1 and VAR2 are different.
3943 They must be in canonical order. */
3945 static bool
3946 onepart_variable_different_p (variable var1, variable var2)
3948 location_chain lc1, lc2;
3950 if (var1 == var2)
3951 return false;
3953 gcc_assert (var1->n_var_parts == 1);
3954 gcc_assert (var2->n_var_parts == 1);
3956 lc1 = var1->var_part[0].loc_chain;
3957 lc2 = var2->var_part[0].loc_chain;
3959 gcc_assert (lc1);
3960 gcc_assert (lc2);
3962 while (lc1 && lc2)
3964 if (loc_cmp (lc1->loc, lc2->loc))
3965 return true;
3966 lc1 = lc1->next;
3967 lc2 = lc2->next;
3970 return lc1 != lc2;
3973 /* Return true if variables VAR1 and VAR2 are different.
3974 If COMPARE_CURRENT_LOCATION is true compare also the cur_loc of each
3975 variable part. */
3977 static bool
3978 variable_different_p (variable var1, variable var2,
3979 bool compare_current_location)
3981 int i;
3983 if (var1 == var2)
3984 return false;
3986 if (var1->n_var_parts != var2->n_var_parts)
3987 return true;
3989 for (i = 0; i < var1->n_var_parts; i++)
3991 if (var1->var_part[i].offset != var2->var_part[i].offset)
3992 return true;
3993 if (compare_current_location)
3995 if (!((REG_P (var1->var_part[i].cur_loc)
3996 && REG_P (var2->var_part[i].cur_loc)
3997 && (REGNO (var1->var_part[i].cur_loc)
3998 == REGNO (var2->var_part[i].cur_loc)))
3999 || rtx_equal_p (var1->var_part[i].cur_loc,
4000 var2->var_part[i].cur_loc)))
4001 return true;
4003 /* One-part values have locations in a canonical order. */
4004 if (i == 0 && var1->var_part[i].offset == 0 && dv_onepart_p (var1->dv))
4006 gcc_assert (var1->n_var_parts == 1);
4007 gcc_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv));
4008 return onepart_variable_different_p (var1, var2);
4010 if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i]))
4011 return true;
4012 if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i]))
4013 return true;
4015 return false;
4018 /* Compare variable *SLOT with the same variable in hash table DATA
4019 and set DATAFLOW_SET_DIFFERENT_VALUE if they are different. */
4021 static int
4022 dataflow_set_different_1 (void **slot, void *data)
4024 htab_t htab = (htab_t) data;
4025 variable var1, var2;
4027 var1 = (variable) *slot;
4028 var2 = (variable) htab_find_with_hash (htab, var1->dv,
4029 dv_htab_hash (var1->dv));
4030 if (!var2)
4032 dataflow_set_different_value = true;
4034 if (dump_file && (dump_flags & TDF_DETAILS))
4036 fprintf (dump_file, "dataflow difference found: removal of:\n");
4037 dump_var (var1);
4040 /* Stop traversing the hash table. */
4041 return 0;
4044 if (variable_different_p (var1, var2, false))
4046 dataflow_set_different_value = true;
4048 if (dump_file && (dump_flags & TDF_DETAILS))
4050 fprintf (dump_file, "dataflow difference found: old and new follow:\n");
4051 dump_var (var1);
4052 dump_var (var2);
4055 /* Stop traversing the hash table. */
4056 return 0;
4059 /* Continue traversing the hash table. */
4060 return 1;
4063 /* Return true if dataflow sets OLD_SET and NEW_SET differ. */
4065 static bool
4066 dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
4068 if (old_set->vars == new_set->vars)
4069 return false;
4071 if (htab_elements (shared_hash_htab (old_set->vars))
4072 != htab_elements (shared_hash_htab (new_set->vars)))
4073 return true;
4075 dataflow_set_different_value = false;
4077 htab_traverse (shared_hash_htab (old_set->vars), dataflow_set_different_1,
4078 shared_hash_htab (new_set->vars));
4079 /* No need to traverse the second hashtab, if both have the same number
4080 of elements and the second one had all entries found in the first one,
4081 then it can't have any extra entries. */
4082 return dataflow_set_different_value;
4085 /* Free the contents of dataflow set SET. */
4087 static void
4088 dataflow_set_destroy (dataflow_set *set)
4090 int i;
4092 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4093 attrs_list_clear (&set->regs[i]);
4095 shared_hash_destroy (set->vars);
4096 set->vars = NULL;
4099 /* Return true if RTL X contains a SYMBOL_REF. */
4101 static bool
4102 contains_symbol_ref (rtx x)
4104 const char *fmt;
4105 RTX_CODE code;
4106 int i;
4108 if (!x)
4109 return false;
4111 code = GET_CODE (x);
4112 if (code == SYMBOL_REF)
4113 return true;
4115 fmt = GET_RTX_FORMAT (code);
4116 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4118 if (fmt[i] == 'e')
4120 if (contains_symbol_ref (XEXP (x, i)))
4121 return true;
4123 else if (fmt[i] == 'E')
4125 int j;
4126 for (j = 0; j < XVECLEN (x, i); j++)
4127 if (contains_symbol_ref (XVECEXP (x, i, j)))
4128 return true;
4132 return false;
4135 /* Shall EXPR be tracked? */
4137 static bool
4138 track_expr_p (tree expr, bool need_rtl)
4140 rtx decl_rtl;
4141 tree realdecl;
4143 if (TREE_CODE (expr) == DEBUG_EXPR_DECL)
4144 return DECL_RTL_SET_P (expr);
4146 /* If EXPR is not a parameter or a variable do not track it. */
4147 if (TREE_CODE (expr) != VAR_DECL && TREE_CODE (expr) != PARM_DECL)
4148 return 0;
4150 /* It also must have a name... */
4151 if (!DECL_NAME (expr))
4152 return 0;
4154 /* ... and a RTL assigned to it. */
4155 decl_rtl = DECL_RTL_IF_SET (expr);
4156 if (!decl_rtl && need_rtl)
4157 return 0;
4159 /* If this expression is really a debug alias of some other declaration, we
4160 don't need to track this expression if the ultimate declaration is
4161 ignored. */
4162 realdecl = expr;
4163 if (DECL_DEBUG_EXPR_IS_FROM (realdecl) && DECL_DEBUG_EXPR (realdecl))
4165 realdecl = DECL_DEBUG_EXPR (realdecl);
4166 /* ??? We don't yet know how to emit DW_OP_piece for variable
4167 that has been SRA'ed. */
4168 if (!DECL_P (realdecl))
4169 return 0;
4172 /* Do not track EXPR if REALDECL it should be ignored for debugging
4173 purposes. */
4174 if (DECL_IGNORED_P (realdecl))
4175 return 0;
4177 /* Do not track global variables until we are able to emit correct location
4178 list for them. */
4179 if (TREE_STATIC (realdecl))
4180 return 0;
4182 /* When the EXPR is a DECL for alias of some variable (see example)
4183 the TREE_STATIC flag is not used. Disable tracking all DECLs whose
4184 DECL_RTL contains SYMBOL_REF.
4186 Example:
4187 extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
4188 char **_dl_argv;
4190 if (decl_rtl && MEM_P (decl_rtl)
4191 && contains_symbol_ref (XEXP (decl_rtl, 0)))
4192 return 0;
4194 /* If RTX is a memory it should not be very large (because it would be
4195 an array or struct). */
4196 if (decl_rtl && MEM_P (decl_rtl))
4198 /* Do not track structures and arrays. */
4199 if (GET_MODE (decl_rtl) == BLKmode
4200 || AGGREGATE_TYPE_P (TREE_TYPE (realdecl)))
4201 return 0;
4202 if (MEM_SIZE (decl_rtl)
4203 && INTVAL (MEM_SIZE (decl_rtl)) > MAX_VAR_PARTS)
4204 return 0;
4207 DECL_CHANGED (expr) = 0;
4208 DECL_CHANGED (realdecl) = 0;
4209 return 1;
4212 /* Determine whether a given LOC refers to the same variable part as
4213 EXPR+OFFSET. */
4215 static bool
4216 same_variable_part_p (rtx loc, tree expr, HOST_WIDE_INT offset)
4218 tree expr2;
4219 HOST_WIDE_INT offset2;
4221 if (! DECL_P (expr))
4222 return false;
4224 if (REG_P (loc))
4226 expr2 = REG_EXPR (loc);
4227 offset2 = REG_OFFSET (loc);
4229 else if (MEM_P (loc))
4231 expr2 = MEM_EXPR (loc);
4232 offset2 = INT_MEM_OFFSET (loc);
4234 else
4235 return false;
4237 if (! expr2 || ! DECL_P (expr2))
4238 return false;
4240 expr = var_debug_decl (expr);
4241 expr2 = var_debug_decl (expr2);
4243 return (expr == expr2 && offset == offset2);
4246 /* LOC is a REG or MEM that we would like to track if possible.
4247 If EXPR is null, we don't know what expression LOC refers to,
4248 otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
4249 LOC is an lvalue register.
4251 Return true if EXPR is nonnull and if LOC, or some lowpart of it,
4252 is something we can track. When returning true, store the mode of
4253 the lowpart we can track in *MODE_OUT (if nonnull) and its offset
4254 from EXPR in *OFFSET_OUT (if nonnull). */
4256 static bool
4257 track_loc_p (rtx loc, tree expr, HOST_WIDE_INT offset, bool store_reg_p,
4258 enum machine_mode *mode_out, HOST_WIDE_INT *offset_out)
4260 enum machine_mode mode;
4262 if (expr == NULL || !track_expr_p (expr, true))
4263 return false;
4265 /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
4266 whole subreg, but only the old inner part is really relevant. */
4267 mode = GET_MODE (loc);
4268 if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc)))
4270 enum machine_mode pseudo_mode;
4272 pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc));
4273 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (pseudo_mode))
4275 offset += byte_lowpart_offset (pseudo_mode, mode);
4276 mode = pseudo_mode;
4280 /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
4281 Do the same if we are storing to a register and EXPR occupies
4282 the whole of register LOC; in that case, the whole of EXPR is
4283 being changed. We exclude complex modes from the second case
4284 because the real and imaginary parts are represented as separate
4285 pseudo registers, even if the whole complex value fits into one
4286 hard register. */
4287 if ((GET_MODE_SIZE (mode) > GET_MODE_SIZE (DECL_MODE (expr))
4288 || (store_reg_p
4289 && !COMPLEX_MODE_P (DECL_MODE (expr))
4290 && hard_regno_nregs[REGNO (loc)][DECL_MODE (expr)] == 1))
4291 && offset + byte_lowpart_offset (DECL_MODE (expr), mode) == 0)
4293 mode = DECL_MODE (expr);
4294 offset = 0;
4297 if (offset < 0 || offset >= MAX_VAR_PARTS)
4298 return false;
4300 if (mode_out)
4301 *mode_out = mode;
4302 if (offset_out)
4303 *offset_out = offset;
4304 return true;
4307 /* Return the MODE lowpart of LOC, or null if LOC is not something we
4308 want to track. When returning nonnull, make sure that the attributes
4309 on the returned value are updated. */
4311 static rtx
4312 var_lowpart (enum machine_mode mode, rtx loc)
4314 unsigned int offset, reg_offset, regno;
4316 if (!REG_P (loc) && !MEM_P (loc))
4317 return NULL;
4319 if (GET_MODE (loc) == mode)
4320 return loc;
4322 offset = byte_lowpart_offset (mode, GET_MODE (loc));
4324 if (MEM_P (loc))
4325 return adjust_address_nv (loc, mode, offset);
4327 reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc));
4328 regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc),
4329 reg_offset, mode);
4330 return gen_rtx_REG_offset (loc, mode, regno, offset);
4333 /* Carry information about uses and stores while walking rtx. */
4335 struct count_use_info
4337 /* The insn where the RTX is. */
4338 rtx insn;
4340 /* The basic block where insn is. */
4341 basic_block bb;
4343 /* The array of n_sets sets in the insn, as determined by cselib. */
4344 struct cselib_set *sets;
4345 int n_sets;
4347 /* True if we're counting stores, false otherwise. */
4348 bool store_p;
4351 /* Find a VALUE corresponding to X. */
4353 static inline cselib_val *
4354 find_use_val (rtx x, enum machine_mode mode, struct count_use_info *cui)
4356 int i;
4358 if (cui->sets)
4360 /* This is called after uses are set up and before stores are
4361 processed bycselib, so it's safe to look up srcs, but not
4362 dsts. So we look up expressions that appear in srcs or in
4363 dest expressions, but we search the sets array for dests of
4364 stores. */
4365 if (cui->store_p)
4367 for (i = 0; i < cui->n_sets; i++)
4368 if (cui->sets[i].dest == x)
4369 return cui->sets[i].src_elt;
4371 else
4372 return cselib_lookup (x, mode, 0);
4375 return NULL;
4378 /* Replace all registers and addresses in an expression with VALUE
4379 expressions that map back to them, unless the expression is a
4380 register. If no mapping is or can be performed, returns NULL. */
4382 static rtx
4383 replace_expr_with_values (rtx loc)
4385 if (REG_P (loc))
4386 return NULL;
4387 else if (MEM_P (loc))
4389 enum machine_mode address_mode
4390 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (loc));
4391 cselib_val *addr = cselib_lookup (XEXP (loc, 0), address_mode, 0);
4392 if (addr)
4393 return replace_equiv_address_nv (loc, addr->val_rtx);
4394 else
4395 return NULL;
4397 else
4398 return cselib_subst_to_values (loc);
4401 /* Determine what kind of micro operation to choose for a USE. Return
4402 MO_CLOBBER if no micro operation is to be generated. */
4404 static enum micro_operation_type
4405 use_type (rtx loc, struct count_use_info *cui, enum machine_mode *modep)
4407 tree expr;
4409 if (cui && cui->sets)
4411 if (GET_CODE (loc) == VAR_LOCATION)
4413 if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false))
4415 rtx ploc = PAT_VAR_LOCATION_LOC (loc);
4416 cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1);
4418 /* ??? flag_float_store and volatile mems are never
4419 given values, but we could in theory use them for
4420 locations. */
4421 gcc_assert (val || 1);
4422 return MO_VAL_LOC;
4424 else
4425 return MO_CLOBBER;
4428 if (REG_P (loc) || MEM_P (loc))
4430 if (modep)
4431 *modep = GET_MODE (loc);
4432 if (cui->store_p)
4434 if (REG_P (loc)
4435 || (find_use_val (loc, GET_MODE (loc), cui)
4436 && cselib_lookup (XEXP (loc, 0), GET_MODE (loc), 0)))
4437 return MO_VAL_SET;
4439 else
4441 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
4443 if (val && !cselib_preserved_value_p (val))
4444 return MO_VAL_USE;
4449 if (REG_P (loc))
4451 gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER);
4453 expr = REG_EXPR (loc);
4455 if (!expr)
4456 return MO_USE_NO_VAR;
4457 else if (target_for_debug_bind (var_debug_decl (expr)))
4458 return MO_CLOBBER;
4459 else if (track_loc_p (loc, expr, REG_OFFSET (loc),
4460 false, modep, NULL))
4461 return MO_USE;
4462 else
4463 return MO_USE_NO_VAR;
4465 else if (MEM_P (loc))
4467 expr = MEM_EXPR (loc);
4469 if (!expr)
4470 return MO_CLOBBER;
4471 else if (target_for_debug_bind (var_debug_decl (expr)))
4472 return MO_CLOBBER;
4473 else if (track_loc_p (loc, expr, INT_MEM_OFFSET (loc),
4474 false, modep, NULL))
4475 return MO_USE;
4476 else
4477 return MO_CLOBBER;
4480 return MO_CLOBBER;
4483 /* Log to OUT information about micro-operation MOPT involving X in
4484 INSN of BB. */
4486 static inline void
4487 log_op_type (rtx x, basic_block bb, rtx insn,
4488 enum micro_operation_type mopt, FILE *out)
4490 fprintf (out, "bb %i op %i insn %i %s ",
4491 bb->index, VTI (bb)->n_mos - 1,
4492 INSN_UID (insn), micro_operation_type_name[mopt]);
4493 print_inline_rtx (out, x, 2);
4494 fputc ('\n', out);
4497 /* Count uses (register and memory references) LOC which will be tracked.
4498 INSN is instruction which the LOC is part of. */
4500 static int
4501 count_uses (rtx *ploc, void *cuip)
4503 rtx loc = *ploc;
4504 struct count_use_info *cui = (struct count_use_info *) cuip;
4505 enum micro_operation_type mopt = use_type (loc, cui, NULL);
4507 if (mopt != MO_CLOBBER)
4509 cselib_val *val;
4510 enum machine_mode mode = GET_MODE (loc);
4512 switch (mopt)
4514 case MO_VAL_LOC:
4515 loc = PAT_VAR_LOCATION_LOC (loc);
4516 if (VAR_LOC_UNKNOWN_P (loc))
4517 break;
4518 /* Fall through. */
4520 case MO_VAL_USE:
4521 case MO_VAL_SET:
4522 if (MEM_P (loc)
4523 && !REG_P (XEXP (loc, 0)) && !MEM_P (XEXP (loc, 0)))
4525 enum machine_mode address_mode
4526 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (loc));
4527 val = cselib_lookup (XEXP (loc, 0), address_mode, 0);
4529 if (val && !cselib_preserved_value_p (val))
4531 VTI (cui->bb)->n_mos++;
4532 cselib_preserve_value (val);
4533 if (dump_file && (dump_flags & TDF_DETAILS))
4534 log_op_type (XEXP (loc, 0), cui->bb, cui->insn,
4535 MO_VAL_USE, dump_file);
4539 val = find_use_val (loc, mode, cui);
4540 if (val)
4542 if (mopt == MO_VAL_SET
4543 && GET_CODE (PATTERN (cui->insn)) == COND_EXEC
4544 && (REG_P (loc)
4545 || (MEM_P (loc)
4546 && (use_type (loc, NULL, NULL) == MO_USE
4547 || cui->sets))))
4549 cselib_val *oval = cselib_lookup (loc, GET_MODE (loc), 0);
4551 gcc_assert (oval != val);
4552 gcc_assert (REG_P (loc) || MEM_P (loc));
4554 if (!cselib_preserved_value_p (oval))
4556 VTI (cui->bb)->n_mos++;
4557 cselib_preserve_value (oval);
4558 if (dump_file && (dump_flags & TDF_DETAILS))
4559 log_op_type (loc, cui->bb, cui->insn,
4560 MO_VAL_USE, dump_file);
4564 cselib_preserve_value (val);
4566 else
4567 gcc_assert (mopt == MO_VAL_LOC
4568 || (mopt == MO_VAL_SET && cui->store_p));
4570 break;
4572 default:
4573 break;
4576 VTI (cui->bb)->n_mos++;
4577 if (dump_file && (dump_flags & TDF_DETAILS))
4578 log_op_type (loc, cui->bb, cui->insn, mopt, dump_file);
4581 return 0;
4584 /* Helper function for finding all uses of REG/MEM in X in CUI's
4585 insn. */
4587 static void
4588 count_uses_1 (rtx *x, void *cui)
4590 for_each_rtx (x, count_uses, cui);
4593 /* Count stores (register and memory references) LOC which will be
4594 tracked. CUI is a count_use_info object containing the instruction
4595 which the LOC is part of. */
4597 static void
4598 count_stores (rtx loc, const_rtx expr ATTRIBUTE_UNUSED, void *cui)
4600 count_uses (&loc, cui);
4603 /* Callback for cselib_record_sets_hook, that counts how many micro
4604 operations it takes for uses and stores in an insn after
4605 cselib_record_sets has analyzed the sets in an insn, but before it
4606 modifies the stored values in the internal tables, unless
4607 cselib_record_sets doesn't call it directly (perhaps because we're
4608 not doing cselib in the first place, in which case sets and n_sets
4609 will be 0). */
4611 static void
4612 count_with_sets (rtx insn, struct cselib_set *sets, int n_sets)
4614 basic_block bb = BLOCK_FOR_INSN (insn);
4615 struct count_use_info cui;
4617 cselib_hook_called = true;
4619 cui.insn = insn;
4620 cui.bb = bb;
4621 cui.sets = sets;
4622 cui.n_sets = n_sets;
4624 cui.store_p = false;
4625 note_uses (&PATTERN (insn), count_uses_1, &cui);
4626 cui.store_p = true;
4627 note_stores (PATTERN (insn), count_stores, &cui);
4630 /* Tell whether the CONCAT used to holds a VALUE and its location
4631 needs value resolution, i.e., an attempt of mapping the location
4632 back to other incoming values. */
4633 #define VAL_NEEDS_RESOLUTION(x) \
4634 (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
4635 /* Whether the location in the CONCAT is a tracked expression, that
4636 should also be handled like a MO_USE. */
4637 #define VAL_HOLDS_TRACK_EXPR(x) \
4638 (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
4639 /* Whether the location in the CONCAT should be handled like a MO_COPY
4640 as well. */
4641 #define VAL_EXPR_IS_COPIED(x) \
4642 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
4643 /* Whether the location in the CONCAT should be handled like a
4644 MO_CLOBBER as well. */
4645 #define VAL_EXPR_IS_CLOBBERED(x) \
4646 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
4648 /* Add uses (register and memory references) LOC which will be tracked
4649 to VTI (bb)->mos. INSN is instruction which the LOC is part of. */
4651 static int
4652 add_uses (rtx *ploc, void *data)
4654 rtx loc = *ploc;
4655 enum machine_mode mode = VOIDmode;
4656 struct count_use_info *cui = (struct count_use_info *)data;
4657 enum micro_operation_type type = use_type (loc, cui, &mode);
4659 if (type != MO_CLOBBER)
4661 basic_block bb = cui->bb;
4662 micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
4664 mo->type = type;
4665 mo->u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc;
4666 mo->insn = cui->insn;
4668 if (type == MO_VAL_LOC)
4670 rtx oloc = loc;
4671 rtx vloc = PAT_VAR_LOCATION_LOC (oloc);
4672 cselib_val *val;
4674 gcc_assert (cui->sets);
4676 if (MEM_P (vloc)
4677 && !REG_P (XEXP (vloc, 0)) && !MEM_P (XEXP (vloc, 0)))
4679 rtx mloc = vloc;
4680 enum machine_mode address_mode
4681 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (mloc));
4682 cselib_val *val
4683 = cselib_lookup (XEXP (mloc, 0), address_mode, 0);
4685 if (val && !cselib_preserved_value_p (val))
4687 micro_operation *mon = VTI (bb)->mos + VTI (bb)->n_mos++;
4688 mon->type = mo->type;
4689 mon->u.loc = mo->u.loc;
4690 mon->insn = mo->insn;
4691 cselib_preserve_value (val);
4692 mo->type = MO_VAL_USE;
4693 mloc = cselib_subst_to_values (XEXP (mloc, 0));
4694 mo->u.loc = gen_rtx_CONCAT (address_mode,
4695 val->val_rtx, mloc);
4696 if (dump_file && (dump_flags & TDF_DETAILS))
4697 log_op_type (mo->u.loc, cui->bb, cui->insn,
4698 mo->type, dump_file);
4699 mo = mon;
4703 if (!VAR_LOC_UNKNOWN_P (vloc)
4704 && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
4706 enum machine_mode mode2;
4707 enum micro_operation_type type2;
4708 rtx nloc = replace_expr_with_values (vloc);
4710 if (nloc)
4712 oloc = shallow_copy_rtx (oloc);
4713 PAT_VAR_LOCATION_LOC (oloc) = nloc;
4716 oloc = gen_rtx_CONCAT (mode, val->val_rtx, oloc);
4718 type2 = use_type (vloc, 0, &mode2);
4720 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
4721 || type2 == MO_CLOBBER);
4723 if (type2 == MO_CLOBBER
4724 && !cselib_preserved_value_p (val))
4726 VAL_NEEDS_RESOLUTION (oloc) = 1;
4727 cselib_preserve_value (val);
4730 else if (!VAR_LOC_UNKNOWN_P (vloc))
4732 oloc = shallow_copy_rtx (oloc);
4733 PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC ();
4736 mo->u.loc = oloc;
4738 else if (type == MO_VAL_USE)
4740 enum machine_mode mode2 = VOIDmode;
4741 enum micro_operation_type type2;
4742 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
4743 rtx vloc, oloc = loc, nloc;
4745 gcc_assert (cui->sets);
4747 if (MEM_P (oloc)
4748 && !REG_P (XEXP (oloc, 0)) && !MEM_P (XEXP (oloc, 0)))
4750 rtx mloc = oloc;
4751 enum machine_mode address_mode
4752 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (mloc));
4753 cselib_val *val
4754 = cselib_lookup (XEXP (mloc, 0), address_mode, 0);
4756 if (val && !cselib_preserved_value_p (val))
4758 micro_operation *mon = VTI (bb)->mos + VTI (bb)->n_mos++;
4759 mon->type = mo->type;
4760 mon->u.loc = mo->u.loc;
4761 mon->insn = mo->insn;
4762 cselib_preserve_value (val);
4763 mo->type = MO_VAL_USE;
4764 mloc = cselib_subst_to_values (XEXP (mloc, 0));
4765 mo->u.loc = gen_rtx_CONCAT (address_mode,
4766 val->val_rtx, mloc);
4767 mo->insn = cui->insn;
4768 if (dump_file && (dump_flags & TDF_DETAILS))
4769 log_op_type (mo->u.loc, cui->bb, cui->insn,
4770 mo->type, dump_file);
4771 mo = mon;
4775 type2 = use_type (loc, 0, &mode2);
4777 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
4778 || type2 == MO_CLOBBER);
4780 if (type2 == MO_USE)
4781 vloc = var_lowpart (mode2, loc);
4782 else
4783 vloc = oloc;
4785 /* The loc of a MO_VAL_USE may have two forms:
4787 (concat val src): val is at src, a value-based
4788 representation.
4790 (concat (concat val use) src): same as above, with use as
4791 the MO_USE tracked value, if it differs from src.
4795 nloc = replace_expr_with_values (loc);
4796 if (!nloc)
4797 nloc = oloc;
4799 if (vloc != nloc)
4800 oloc = gen_rtx_CONCAT (mode2, val->val_rtx, vloc);
4801 else
4802 oloc = val->val_rtx;
4804 mo->u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
4806 if (type2 == MO_USE)
4807 VAL_HOLDS_TRACK_EXPR (mo->u.loc) = 1;
4808 if (!cselib_preserved_value_p (val))
4810 VAL_NEEDS_RESOLUTION (mo->u.loc) = 1;
4811 cselib_preserve_value (val);
4814 else
4815 gcc_assert (type == MO_USE || type == MO_USE_NO_VAR);
4817 if (dump_file && (dump_flags & TDF_DETAILS))
4818 log_op_type (mo->u.loc, cui->bb, cui->insn, mo->type, dump_file);
4821 return 0;
4824 /* Helper function for finding all uses of REG/MEM in X in insn INSN. */
4826 static void
4827 add_uses_1 (rtx *x, void *cui)
4829 for_each_rtx (x, add_uses, cui);
4832 /* Add stores (register and memory references) LOC which will be tracked
4833 to VTI (bb)->mos. EXPR is the RTL expression containing the store.
4834 CUIP->insn is instruction which the LOC is part of. */
4836 static void
4837 add_stores (rtx loc, const_rtx expr, void *cuip)
4839 enum machine_mode mode = VOIDmode, mode2;
4840 struct count_use_info *cui = (struct count_use_info *)cuip;
4841 basic_block bb = cui->bb;
4842 micro_operation *mo;
4843 rtx oloc = loc, nloc, src = NULL;
4844 enum micro_operation_type type = use_type (loc, cui, &mode);
4845 bool track_p = false;
4846 cselib_val *v;
4847 bool resolve, preserve;
4849 if (type == MO_CLOBBER)
4850 return;
4852 mode2 = mode;
4854 if (REG_P (loc))
4856 mo = VTI (bb)->mos + VTI (bb)->n_mos++;
4858 if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
4859 || !(track_p = use_type (loc, NULL, &mode2) == MO_USE)
4860 || GET_CODE (expr) == CLOBBER)
4862 mo->type = MO_CLOBBER;
4863 mo->u.loc = loc;
4865 else
4867 if (GET_CODE (expr) == SET && SET_DEST (expr) == loc)
4868 src = var_lowpart (mode2, SET_SRC (expr));
4869 loc = var_lowpart (mode2, loc);
4871 if (src == NULL)
4873 mo->type = MO_SET;
4874 mo->u.loc = loc;
4876 else
4878 rtx xexpr = CONST_CAST_RTX (expr);
4880 if (SET_SRC (expr) != src)
4881 xexpr = gen_rtx_SET (VOIDmode, loc, src);
4882 if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
4883 mo->type = MO_COPY;
4884 else
4885 mo->type = MO_SET;
4886 mo->u.loc = xexpr;
4889 mo->insn = cui->insn;
4891 else if (MEM_P (loc)
4892 && ((track_p = use_type (loc, NULL, &mode2) == MO_USE)
4893 || cui->sets))
4895 mo = VTI (bb)->mos + VTI (bb)->n_mos++;
4897 if (MEM_P (loc) && type == MO_VAL_SET
4898 && !REG_P (XEXP (loc, 0)) && !MEM_P (XEXP (loc, 0)))
4900 rtx mloc = loc;
4901 enum machine_mode address_mode
4902 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (mloc));
4903 cselib_val *val = cselib_lookup (XEXP (mloc, 0), address_mode, 0);
4905 if (val && !cselib_preserved_value_p (val))
4907 cselib_preserve_value (val);
4908 mo->type = MO_VAL_USE;
4909 mloc = cselib_subst_to_values (XEXP (mloc, 0));
4910 mo->u.loc = gen_rtx_CONCAT (address_mode, val->val_rtx, mloc);
4911 mo->insn = cui->insn;
4912 if (dump_file && (dump_flags & TDF_DETAILS))
4913 log_op_type (mo->u.loc, cui->bb, cui->insn,
4914 mo->type, dump_file);
4915 mo = VTI (bb)->mos + VTI (bb)->n_mos++;
4919 if (GET_CODE (expr) == CLOBBER || !track_p)
4921 mo->type = MO_CLOBBER;
4922 mo->u.loc = track_p ? var_lowpart (mode2, loc) : loc;
4924 else
4926 if (GET_CODE (expr) == SET && SET_DEST (expr) == loc)
4927 src = var_lowpart (mode2, SET_SRC (expr));
4928 loc = var_lowpart (mode2, loc);
4930 if (src == NULL)
4932 mo->type = MO_SET;
4933 mo->u.loc = loc;
4935 else
4937 rtx xexpr = CONST_CAST_RTX (expr);
4939 if (SET_SRC (expr) != src)
4940 xexpr = gen_rtx_SET (VOIDmode, loc, src);
4941 if (same_variable_part_p (SET_SRC (xexpr),
4942 MEM_EXPR (loc),
4943 INT_MEM_OFFSET (loc)))
4944 mo->type = MO_COPY;
4945 else
4946 mo->type = MO_SET;
4947 mo->u.loc = xexpr;
4950 mo->insn = cui->insn;
4952 else
4953 return;
4955 if (type != MO_VAL_SET)
4956 goto log_and_return;
4958 v = find_use_val (oloc, mode, cui);
4960 if (!v)
4961 goto log_and_return;
4963 resolve = preserve = !cselib_preserved_value_p (v);
4965 nloc = replace_expr_with_values (oloc);
4966 if (nloc)
4967 oloc = nloc;
4969 if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC)
4971 cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0);
4973 gcc_assert (oval != v);
4974 gcc_assert (REG_P (oloc) || MEM_P (oloc));
4976 if (!cselib_preserved_value_p (oval))
4978 micro_operation *nmo = VTI (bb)->mos + VTI (bb)->n_mos++;
4980 cselib_preserve_value (oval);
4982 nmo->type = MO_VAL_USE;
4983 nmo->u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc);
4984 VAL_NEEDS_RESOLUTION (nmo->u.loc) = 1;
4985 nmo->insn = mo->insn;
4987 if (dump_file && (dump_flags & TDF_DETAILS))
4988 log_op_type (nmo->u.loc, cui->bb, cui->insn,
4989 nmo->type, dump_file);
4992 resolve = false;
4994 else if (resolve && GET_CODE (mo->u.loc) == SET)
4996 nloc = replace_expr_with_values (SET_SRC (expr));
4998 /* Avoid the mode mismatch between oexpr and expr. */
4999 if (!nloc && mode != mode2)
5001 nloc = SET_SRC (expr);
5002 gcc_assert (oloc == SET_DEST (expr));
5005 if (nloc)
5006 oloc = gen_rtx_SET (GET_MODE (mo->u.loc), oloc, nloc);
5007 else
5009 if (oloc == SET_DEST (mo->u.loc))
5010 /* No point in duplicating. */
5011 oloc = mo->u.loc;
5012 if (!REG_P (SET_SRC (mo->u.loc)))
5013 resolve = false;
5016 else if (!resolve)
5018 if (GET_CODE (mo->u.loc) == SET
5019 && oloc == SET_DEST (mo->u.loc))
5020 /* No point in duplicating. */
5021 oloc = mo->u.loc;
5023 else
5024 resolve = false;
5026 loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc);
5028 if (mo->u.loc != oloc)
5029 loc = gen_rtx_CONCAT (GET_MODE (mo->u.loc), loc, mo->u.loc);
5031 /* The loc of a MO_VAL_SET may have various forms:
5033 (concat val dst): dst now holds val
5035 (concat val (set dst src)): dst now holds val, copied from src
5037 (concat (concat val dstv) dst): dst now holds val; dstv is dst
5038 after replacing mems and non-top-level regs with values.
5040 (concat (concat val dstv) (set dst src)): dst now holds val,
5041 copied from src. dstv is a value-based representation of dst, if
5042 it differs from dst. If resolution is needed, src is a REG, and
5043 its mode is the same as that of val.
5045 (concat (concat val (set dstv srcv)) (set dst src)): src
5046 copied to dst, holding val. dstv and srcv are value-based
5047 representations of dst and src, respectively.
5051 mo->u.loc = loc;
5053 if (track_p)
5054 VAL_HOLDS_TRACK_EXPR (loc) = 1;
5055 if (preserve)
5057 VAL_NEEDS_RESOLUTION (loc) = resolve;
5058 cselib_preserve_value (v);
5060 if (mo->type == MO_CLOBBER)
5061 VAL_EXPR_IS_CLOBBERED (loc) = 1;
5062 if (mo->type == MO_COPY)
5063 VAL_EXPR_IS_COPIED (loc) = 1;
5065 mo->type = MO_VAL_SET;
5067 log_and_return:
5068 if (dump_file && (dump_flags & TDF_DETAILS))
5069 log_op_type (mo->u.loc, cui->bb, cui->insn, mo->type, dump_file);
5072 /* Callback for cselib_record_sets_hook, that records as micro
5073 operations uses and stores in an insn after cselib_record_sets has
5074 analyzed the sets in an insn, but before it modifies the stored
5075 values in the internal tables, unless cselib_record_sets doesn't
5076 call it directly (perhaps because we're not doing cselib in the
5077 first place, in which case sets and n_sets will be 0). */
5079 static void
5080 add_with_sets (rtx insn, struct cselib_set *sets, int n_sets)
5082 basic_block bb = BLOCK_FOR_INSN (insn);
5083 int n1, n2;
5084 struct count_use_info cui;
5086 cselib_hook_called = true;
5088 cui.insn = insn;
5089 cui.bb = bb;
5090 cui.sets = sets;
5091 cui.n_sets = n_sets;
5093 n1 = VTI (bb)->n_mos;
5094 cui.store_p = false;
5095 note_uses (&PATTERN (insn), add_uses_1, &cui);
5096 n2 = VTI (bb)->n_mos - 1;
5098 /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
5099 MO_VAL_LOC last. */
5100 while (n1 < n2)
5102 while (n1 < n2 && VTI (bb)->mos[n1].type == MO_USE)
5103 n1++;
5104 while (n1 < n2 && VTI (bb)->mos[n2].type != MO_USE)
5105 n2--;
5106 if (n1 < n2)
5108 micro_operation sw;
5110 sw = VTI (bb)->mos[n1];
5111 VTI (bb)->mos[n1] = VTI (bb)->mos[n2];
5112 VTI (bb)->mos[n2] = sw;
5116 n2 = VTI (bb)->n_mos - 1;
5118 while (n1 < n2)
5120 while (n1 < n2 && VTI (bb)->mos[n1].type != MO_VAL_LOC)
5121 n1++;
5122 while (n1 < n2 && VTI (bb)->mos[n2].type == MO_VAL_LOC)
5123 n2--;
5124 if (n1 < n2)
5126 micro_operation sw;
5128 sw = VTI (bb)->mos[n1];
5129 VTI (bb)->mos[n1] = VTI (bb)->mos[n2];
5130 VTI (bb)->mos[n2] = sw;
5134 if (CALL_P (insn))
5136 micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
5138 mo->type = MO_CALL;
5139 mo->insn = insn;
5141 if (dump_file && (dump_flags & TDF_DETAILS))
5142 log_op_type (PATTERN (insn), bb, insn, mo->type, dump_file);
5145 n1 = VTI (bb)->n_mos;
5146 /* This will record NEXT_INSN (insn), such that we can
5147 insert notes before it without worrying about any
5148 notes that MO_USEs might emit after the insn. */
5149 cui.store_p = true;
5150 note_stores (PATTERN (insn), add_stores, &cui);
5151 n2 = VTI (bb)->n_mos - 1;
5153 /* Order the MO_CLOBBERs to be before MO_SETs. */
5154 while (n1 < n2)
5156 while (n1 < n2 && VTI (bb)->mos[n1].type == MO_CLOBBER)
5157 n1++;
5158 while (n1 < n2 && VTI (bb)->mos[n2].type != MO_CLOBBER)
5159 n2--;
5160 if (n1 < n2)
5162 micro_operation sw;
5164 sw = VTI (bb)->mos[n1];
5165 VTI (bb)->mos[n1] = VTI (bb)->mos[n2];
5166 VTI (bb)->mos[n2] = sw;
5171 static enum var_init_status
5172 find_src_status (dataflow_set *in, rtx src)
5174 tree decl = NULL_TREE;
5175 enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
5177 if (! flag_var_tracking_uninit)
5178 status = VAR_INIT_STATUS_INITIALIZED;
5180 if (src && REG_P (src))
5181 decl = var_debug_decl (REG_EXPR (src));
5182 else if (src && MEM_P (src))
5183 decl = var_debug_decl (MEM_EXPR (src));
5185 if (src && decl)
5186 status = get_init_value (in, src, dv_from_decl (decl));
5188 return status;
5191 /* SRC is the source of an assignment. Use SET to try to find what
5192 was ultimately assigned to SRC. Return that value if known,
5193 otherwise return SRC itself. */
5195 static rtx
5196 find_src_set_src (dataflow_set *set, rtx src)
5198 tree decl = NULL_TREE; /* The variable being copied around. */
5199 rtx set_src = NULL_RTX; /* The value for "decl" stored in "src". */
5200 variable var;
5201 location_chain nextp;
5202 int i;
5203 bool found;
5205 if (src && REG_P (src))
5206 decl = var_debug_decl (REG_EXPR (src));
5207 else if (src && MEM_P (src))
5208 decl = var_debug_decl (MEM_EXPR (src));
5210 if (src && decl)
5212 decl_or_value dv = dv_from_decl (decl);
5214 var = shared_hash_find (set->vars, dv);
5215 if (var)
5217 found = false;
5218 for (i = 0; i < var->n_var_parts && !found; i++)
5219 for (nextp = var->var_part[i].loc_chain; nextp && !found;
5220 nextp = nextp->next)
5221 if (rtx_equal_p (nextp->loc, src))
5223 set_src = nextp->set_src;
5224 found = true;
5230 return set_src;
5233 /* Compute the changes of variable locations in the basic block BB. */
5235 static bool
5236 compute_bb_dataflow (basic_block bb)
5238 int i, n;
5239 bool changed;
5240 dataflow_set old_out;
5241 dataflow_set *in = &VTI (bb)->in;
5242 dataflow_set *out = &VTI (bb)->out;
5244 dataflow_set_init (&old_out);
5245 dataflow_set_copy (&old_out, out);
5246 dataflow_set_copy (out, in);
5248 n = VTI (bb)->n_mos;
5249 for (i = 0; i < n; i++)
5251 rtx insn = VTI (bb)->mos[i].insn;
5253 switch (VTI (bb)->mos[i].type)
5255 case MO_CALL:
5256 dataflow_set_clear_at_call (out);
5257 break;
5259 case MO_USE:
5261 rtx loc = VTI (bb)->mos[i].u.loc;
5263 if (REG_P (loc))
5264 var_reg_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
5265 else if (MEM_P (loc))
5266 var_mem_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
5268 break;
5270 case MO_VAL_LOC:
5272 rtx loc = VTI (bb)->mos[i].u.loc;
5273 rtx val, vloc;
5274 tree var;
5276 if (GET_CODE (loc) == CONCAT)
5278 val = XEXP (loc, 0);
5279 vloc = XEXP (loc, 1);
5281 else
5283 val = NULL_RTX;
5284 vloc = loc;
5287 var = PAT_VAR_LOCATION_DECL (vloc);
5289 clobber_variable_part (out, NULL_RTX,
5290 dv_from_decl (var), 0, NULL_RTX);
5291 if (val)
5293 if (VAL_NEEDS_RESOLUTION (loc))
5294 val_resolve (out, val, PAT_VAR_LOCATION_LOC (vloc), insn);
5295 set_variable_part (out, val, dv_from_decl (var), 0,
5296 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
5297 INSERT);
5300 break;
5302 case MO_VAL_USE:
5304 rtx loc = VTI (bb)->mos[i].u.loc;
5305 rtx val, vloc, uloc;
5307 vloc = uloc = XEXP (loc, 1);
5308 val = XEXP (loc, 0);
5310 if (GET_CODE (val) == CONCAT)
5312 uloc = XEXP (val, 1);
5313 val = XEXP (val, 0);
5316 if (VAL_NEEDS_RESOLUTION (loc))
5317 val_resolve (out, val, vloc, insn);
5318 else
5319 val_store (out, val, uloc, insn, false);
5321 if (VAL_HOLDS_TRACK_EXPR (loc))
5323 if (GET_CODE (uloc) == REG)
5324 var_reg_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
5325 NULL);
5326 else if (GET_CODE (uloc) == MEM)
5327 var_mem_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
5328 NULL);
5331 break;
5333 case MO_VAL_SET:
5335 rtx loc = VTI (bb)->mos[i].u.loc;
5336 rtx val, vloc, uloc;
5338 vloc = uloc = XEXP (loc, 1);
5339 val = XEXP (loc, 0);
5341 if (GET_CODE (val) == CONCAT)
5343 vloc = XEXP (val, 1);
5344 val = XEXP (val, 0);
5347 if (GET_CODE (vloc) == SET)
5349 rtx vsrc = SET_SRC (vloc);
5351 gcc_assert (val != vsrc);
5352 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
5354 vloc = SET_DEST (vloc);
5356 if (VAL_NEEDS_RESOLUTION (loc))
5357 val_resolve (out, val, vsrc, insn);
5359 else if (VAL_NEEDS_RESOLUTION (loc))
5361 gcc_assert (GET_CODE (uloc) == SET
5362 && GET_CODE (SET_SRC (uloc)) == REG);
5363 val_resolve (out, val, SET_SRC (uloc), insn);
5366 if (VAL_HOLDS_TRACK_EXPR (loc))
5368 if (VAL_EXPR_IS_CLOBBERED (loc))
5370 if (REG_P (uloc))
5371 var_reg_delete (out, uloc, true);
5372 else if (MEM_P (uloc))
5373 var_mem_delete (out, uloc, true);
5375 else
5377 bool copied_p = VAL_EXPR_IS_COPIED (loc);
5378 rtx set_src = NULL;
5379 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
5381 if (GET_CODE (uloc) == SET)
5383 set_src = SET_SRC (uloc);
5384 uloc = SET_DEST (uloc);
5387 if (copied_p)
5389 if (flag_var_tracking_uninit)
5391 status = find_src_status (in, set_src);
5393 if (status == VAR_INIT_STATUS_UNKNOWN)
5394 status = find_src_status (out, set_src);
5397 set_src = find_src_set_src (in, set_src);
5400 if (REG_P (uloc))
5401 var_reg_delete_and_set (out, uloc, !copied_p,
5402 status, set_src);
5403 else if (MEM_P (uloc))
5404 var_mem_delete_and_set (out, uloc, !copied_p,
5405 status, set_src);
5408 else if (REG_P (uloc))
5409 var_regno_delete (out, REGNO (uloc));
5411 val_store (out, val, vloc, insn, true);
5413 break;
5415 case MO_SET:
5417 rtx loc = VTI (bb)->mos[i].u.loc;
5418 rtx set_src = NULL;
5420 if (GET_CODE (loc) == SET)
5422 set_src = SET_SRC (loc);
5423 loc = SET_DEST (loc);
5426 if (REG_P (loc))
5427 var_reg_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
5428 set_src);
5429 else if (MEM_P (loc))
5430 var_mem_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
5431 set_src);
5433 break;
5435 case MO_COPY:
5437 rtx loc = VTI (bb)->mos[i].u.loc;
5438 enum var_init_status src_status;
5439 rtx set_src = NULL;
5441 if (GET_CODE (loc) == SET)
5443 set_src = SET_SRC (loc);
5444 loc = SET_DEST (loc);
5447 if (! flag_var_tracking_uninit)
5448 src_status = VAR_INIT_STATUS_INITIALIZED;
5449 else
5451 src_status = find_src_status (in, set_src);
5453 if (src_status == VAR_INIT_STATUS_UNKNOWN)
5454 src_status = find_src_status (out, set_src);
5457 set_src = find_src_set_src (in, set_src);
5459 if (REG_P (loc))
5460 var_reg_delete_and_set (out, loc, false, src_status, set_src);
5461 else if (MEM_P (loc))
5462 var_mem_delete_and_set (out, loc, false, src_status, set_src);
5464 break;
5466 case MO_USE_NO_VAR:
5468 rtx loc = VTI (bb)->mos[i].u.loc;
5470 if (REG_P (loc))
5471 var_reg_delete (out, loc, false);
5472 else if (MEM_P (loc))
5473 var_mem_delete (out, loc, false);
5475 break;
5477 case MO_CLOBBER:
5479 rtx loc = VTI (bb)->mos[i].u.loc;
5481 if (REG_P (loc))
5482 var_reg_delete (out, loc, true);
5483 else if (MEM_P (loc))
5484 var_mem_delete (out, loc, true);
5486 break;
5488 case MO_ADJUST:
5489 out->stack_adjust += VTI (bb)->mos[i].u.adjust;
5490 break;
5494 if (MAY_HAVE_DEBUG_INSNS)
5496 dataflow_set_equiv_regs (out);
5497 htab_traverse (shared_hash_htab (out->vars), canonicalize_values_mark,
5498 out);
5499 htab_traverse (shared_hash_htab (out->vars), canonicalize_values_star,
5500 out);
5501 #if ENABLE_CHECKING
5502 htab_traverse (shared_hash_htab (out->vars),
5503 canonicalize_loc_order_check, out);
5504 #endif
5506 changed = dataflow_set_different (&old_out, out);
5507 dataflow_set_destroy (&old_out);
5508 return changed;
5511 /* Find the locations of variables in the whole function. */
5513 static void
5514 vt_find_locations (void)
5516 fibheap_t worklist, pending, fibheap_swap;
5517 sbitmap visited, in_worklist, in_pending, sbitmap_swap;
5518 basic_block bb;
5519 edge e;
5520 int *bb_order;
5521 int *rc_order;
5522 int i;
5523 int htabsz = 0;
5525 /* Compute reverse completion order of depth first search of the CFG
5526 so that the data-flow runs faster. */
5527 rc_order = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
5528 bb_order = XNEWVEC (int, last_basic_block);
5529 pre_and_rev_post_order_compute (NULL, rc_order, false);
5530 for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
5531 bb_order[rc_order[i]] = i;
5532 free (rc_order);
5534 worklist = fibheap_new ();
5535 pending = fibheap_new ();
5536 visited = sbitmap_alloc (last_basic_block);
5537 in_worklist = sbitmap_alloc (last_basic_block);
5538 in_pending = sbitmap_alloc (last_basic_block);
5539 sbitmap_zero (in_worklist);
5541 FOR_EACH_BB (bb)
5542 fibheap_insert (pending, bb_order[bb->index], bb);
5543 sbitmap_ones (in_pending);
5545 while (!fibheap_empty (pending))
5547 fibheap_swap = pending;
5548 pending = worklist;
5549 worklist = fibheap_swap;
5550 sbitmap_swap = in_pending;
5551 in_pending = in_worklist;
5552 in_worklist = sbitmap_swap;
5554 sbitmap_zero (visited);
5556 while (!fibheap_empty (worklist))
5558 bb = (basic_block) fibheap_extract_min (worklist);
5559 RESET_BIT (in_worklist, bb->index);
5560 if (!TEST_BIT (visited, bb->index))
5562 bool changed;
5563 edge_iterator ei;
5564 int oldinsz, oldoutsz;
5566 SET_BIT (visited, bb->index);
5568 if (dump_file && VTI (bb)->in.vars)
5570 htabsz
5571 -= htab_size (shared_hash_htab (VTI (bb)->in.vars))
5572 + htab_size (shared_hash_htab (VTI (bb)->out.vars));
5573 oldinsz
5574 = htab_elements (shared_hash_htab (VTI (bb)->in.vars));
5575 oldoutsz
5576 = htab_elements (shared_hash_htab (VTI (bb)->out.vars));
5578 else
5579 oldinsz = oldoutsz = 0;
5581 if (MAY_HAVE_DEBUG_INSNS)
5583 dataflow_set *in = &VTI (bb)->in, *first_out = NULL;
5584 bool first = true, adjust = false;
5586 /* Calculate the IN set as the intersection of
5587 predecessor OUT sets. */
5589 dataflow_set_clear (in);
5590 dst_can_be_shared = true;
5592 FOR_EACH_EDGE (e, ei, bb->preds)
5593 if (!VTI (e->src)->flooded)
5594 gcc_assert (bb_order[bb->index]
5595 <= bb_order[e->src->index]);
5596 else if (first)
5598 dataflow_set_copy (in, &VTI (e->src)->out);
5599 first_out = &VTI (e->src)->out;
5600 first = false;
5602 else
5604 dataflow_set_merge (in, &VTI (e->src)->out);
5605 adjust = true;
5608 if (adjust)
5610 dataflow_post_merge_adjust (in, &VTI (bb)->permp);
5611 #if ENABLE_CHECKING
5612 /* Merge and merge_adjust should keep entries in
5613 canonical order. */
5614 htab_traverse (shared_hash_htab (in->vars),
5615 canonicalize_loc_order_check,
5616 in);
5617 #endif
5618 if (dst_can_be_shared)
5620 shared_hash_destroy (in->vars);
5621 in->vars = shared_hash_copy (first_out->vars);
5625 VTI (bb)->flooded = true;
5627 else
5629 /* Calculate the IN set as union of predecessor OUT sets. */
5630 dataflow_set_clear (&VTI (bb)->in);
5631 FOR_EACH_EDGE (e, ei, bb->preds)
5632 dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out);
5635 changed = compute_bb_dataflow (bb);
5636 if (dump_file)
5637 htabsz += htab_size (shared_hash_htab (VTI (bb)->in.vars))
5638 + htab_size (shared_hash_htab (VTI (bb)->out.vars));
5640 if (changed)
5642 FOR_EACH_EDGE (e, ei, bb->succs)
5644 if (e->dest == EXIT_BLOCK_PTR)
5645 continue;
5647 if (TEST_BIT (visited, e->dest->index))
5649 if (!TEST_BIT (in_pending, e->dest->index))
5651 /* Send E->DEST to next round. */
5652 SET_BIT (in_pending, e->dest->index);
5653 fibheap_insert (pending,
5654 bb_order[e->dest->index],
5655 e->dest);
5658 else if (!TEST_BIT (in_worklist, e->dest->index))
5660 /* Add E->DEST to current round. */
5661 SET_BIT (in_worklist, e->dest->index);
5662 fibheap_insert (worklist, bb_order[e->dest->index],
5663 e->dest);
5668 if (dump_file)
5669 fprintf (dump_file,
5670 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
5671 bb->index,
5672 (int)htab_elements (shared_hash_htab (VTI (bb)->in.vars)),
5673 oldinsz,
5674 (int)htab_elements (shared_hash_htab (VTI (bb)->out.vars)),
5675 oldoutsz,
5676 (int)worklist->nodes, (int)pending->nodes, htabsz);
5678 if (dump_file && (dump_flags & TDF_DETAILS))
5680 fprintf (dump_file, "BB %i IN:\n", bb->index);
5681 dump_dataflow_set (&VTI (bb)->in);
5682 fprintf (dump_file, "BB %i OUT:\n", bb->index);
5683 dump_dataflow_set (&VTI (bb)->out);
5689 if (MAY_HAVE_DEBUG_INSNS)
5690 FOR_EACH_BB (bb)
5691 gcc_assert (VTI (bb)->flooded);
5693 VEC_free (rtx, heap, values_to_unmark);
5694 free (bb_order);
5695 fibheap_delete (worklist);
5696 fibheap_delete (pending);
5697 sbitmap_free (visited);
5698 sbitmap_free (in_worklist);
5699 sbitmap_free (in_pending);
5702 /* Print the content of the LIST to dump file. */
5704 static void
5705 dump_attrs_list (attrs list)
5707 for (; list; list = list->next)
5709 if (dv_is_decl_p (list->dv))
5710 print_mem_expr (dump_file, dv_as_decl (list->dv));
5711 else
5712 print_rtl_single (dump_file, dv_as_value (list->dv));
5713 fprintf (dump_file, "+" HOST_WIDE_INT_PRINT_DEC, list->offset);
5715 fprintf (dump_file, "\n");
5718 /* Print the information about variable *SLOT to dump file. */
5720 static int
5721 dump_var_slot (void **slot, void *data ATTRIBUTE_UNUSED)
5723 variable var = (variable) *slot;
5725 dump_var (var);
5727 /* Continue traversing the hash table. */
5728 return 1;
5731 /* Print the information about variable VAR to dump file. */
5733 static void
5734 dump_var (variable var)
5736 int i;
5737 location_chain node;
5739 if (dv_is_decl_p (var->dv))
5741 const_tree decl = dv_as_decl (var->dv);
5743 if (DECL_NAME (decl))
5744 fprintf (dump_file, " name: %s",
5745 IDENTIFIER_POINTER (DECL_NAME (decl)));
5746 else
5747 fprintf (dump_file, " name: D.%u", DECL_UID (decl));
5748 if (dump_flags & TDF_UID)
5749 fprintf (dump_file, " D.%u\n", DECL_UID (decl));
5750 else
5751 fprintf (dump_file, "\n");
5753 else
5755 fputc (' ', dump_file);
5756 print_rtl_single (dump_file, dv_as_value (var->dv));
5759 for (i = 0; i < var->n_var_parts; i++)
5761 fprintf (dump_file, " offset %ld\n",
5762 (long) var->var_part[i].offset);
5763 for (node = var->var_part[i].loc_chain; node; node = node->next)
5765 fprintf (dump_file, " ");
5766 if (node->init == VAR_INIT_STATUS_UNINITIALIZED)
5767 fprintf (dump_file, "[uninit]");
5768 print_rtl_single (dump_file, node->loc);
5773 /* Print the information about variables from hash table VARS to dump file. */
5775 static void
5776 dump_vars (htab_t vars)
5778 if (htab_elements (vars) > 0)
5780 fprintf (dump_file, "Variables:\n");
5781 htab_traverse (vars, dump_var_slot, NULL);
5785 /* Print the dataflow set SET to dump file. */
5787 static void
5788 dump_dataflow_set (dataflow_set *set)
5790 int i;
5792 fprintf (dump_file, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC "\n",
5793 set->stack_adjust);
5794 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5796 if (set->regs[i])
5798 fprintf (dump_file, "Reg %d:", i);
5799 dump_attrs_list (set->regs[i]);
5802 dump_vars (shared_hash_htab (set->vars));
5803 fprintf (dump_file, "\n");
5806 /* Print the IN and OUT sets for each basic block to dump file. */
5808 static void
5809 dump_dataflow_sets (void)
5811 basic_block bb;
5813 FOR_EACH_BB (bb)
5815 fprintf (dump_file, "\nBasic block %d:\n", bb->index);
5816 fprintf (dump_file, "IN:\n");
5817 dump_dataflow_set (&VTI (bb)->in);
5818 fprintf (dump_file, "OUT:\n");
5819 dump_dataflow_set (&VTI (bb)->out);
5823 /* Add variable VAR to the hash table of changed variables and
5824 if it has no locations delete it from SET's hash table. */
5826 static void
5827 variable_was_changed (variable var, dataflow_set *set)
5829 hashval_t hash = dv_htab_hash (var->dv);
5831 if (emit_notes)
5833 void **slot;
5835 /* Remember this decl or VALUE has been added to changed_variables. */
5836 set_dv_changed (var->dv, true);
5838 slot = htab_find_slot_with_hash (changed_variables,
5839 var->dv,
5840 hash, INSERT);
5842 if (set && var->n_var_parts == 0)
5844 variable empty_var;
5846 empty_var = (variable) pool_alloc (dv_pool (var->dv));
5847 empty_var->dv = var->dv;
5848 empty_var->refcount = 1;
5849 empty_var->n_var_parts = 0;
5850 *slot = empty_var;
5851 goto drop_var;
5853 else
5855 var->refcount++;
5856 *slot = var;
5859 else
5861 gcc_assert (set);
5862 if (var->n_var_parts == 0)
5864 void **slot;
5866 drop_var:
5867 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
5868 if (slot)
5870 if (shared_hash_shared (set->vars))
5871 slot = shared_hash_find_slot_unshare (&set->vars, var->dv,
5872 NO_INSERT);
5873 htab_clear_slot (shared_hash_htab (set->vars), slot);
5879 /* Look for the index in VAR->var_part corresponding to OFFSET.
5880 Return -1 if not found. If INSERTION_POINT is non-NULL, the
5881 referenced int will be set to the index that the part has or should
5882 have, if it should be inserted. */
5884 static inline int
5885 find_variable_location_part (variable var, HOST_WIDE_INT offset,
5886 int *insertion_point)
5888 int pos, low, high;
5890 /* Find the location part. */
5891 low = 0;
5892 high = var->n_var_parts;
5893 while (low != high)
5895 pos = (low + high) / 2;
5896 if (var->var_part[pos].offset < offset)
5897 low = pos + 1;
5898 else
5899 high = pos;
5901 pos = low;
5903 if (insertion_point)
5904 *insertion_point = pos;
5906 if (pos < var->n_var_parts && var->var_part[pos].offset == offset)
5907 return pos;
5909 return -1;
5912 static void **
5913 set_slot_part (dataflow_set *set, rtx loc, void **slot,
5914 decl_or_value dv, HOST_WIDE_INT offset,
5915 enum var_init_status initialized, rtx set_src)
5917 int pos;
5918 location_chain node, next;
5919 location_chain *nextp;
5920 variable var;
5921 bool onepart = dv_onepart_p (dv);
5923 gcc_assert (offset == 0 || !onepart);
5924 gcc_assert (loc != dv_as_opaque (dv));
5926 var = (variable) *slot;
5928 if (! flag_var_tracking_uninit)
5929 initialized = VAR_INIT_STATUS_INITIALIZED;
5931 if (!var)
5933 /* Create new variable information. */
5934 var = (variable) pool_alloc (dv_pool (dv));
5935 var->dv = dv;
5936 var->refcount = 1;
5937 var->n_var_parts = 1;
5938 var->var_part[0].offset = offset;
5939 var->var_part[0].loc_chain = NULL;
5940 var->var_part[0].cur_loc = NULL;
5941 *slot = var;
5942 pos = 0;
5943 nextp = &var->var_part[0].loc_chain;
5944 if (emit_notes && dv_is_value_p (dv))
5945 add_cselib_value_chains (dv);
5947 else if (onepart)
5949 int r = -1, c = 0;
5951 gcc_assert (dv_as_opaque (var->dv) == dv_as_opaque (dv));
5953 pos = 0;
5955 if (GET_CODE (loc) == VALUE)
5957 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
5958 nextp = &node->next)
5959 if (GET_CODE (node->loc) == VALUE)
5961 if (node->loc == loc)
5963 r = 0;
5964 break;
5966 if (canon_value_cmp (node->loc, loc))
5967 c++;
5968 else
5970 r = 1;
5971 break;
5974 else if (REG_P (node->loc) || MEM_P (node->loc))
5975 c++;
5976 else
5978 r = 1;
5979 break;
5982 else if (REG_P (loc))
5984 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
5985 nextp = &node->next)
5986 if (REG_P (node->loc))
5988 if (REGNO (node->loc) < REGNO (loc))
5989 c++;
5990 else
5992 if (REGNO (node->loc) == REGNO (loc))
5993 r = 0;
5994 else
5995 r = 1;
5996 break;
5999 else
6001 r = 1;
6002 break;
6005 else if (MEM_P (loc))
6007 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
6008 nextp = &node->next)
6009 if (REG_P (node->loc))
6010 c++;
6011 else if (MEM_P (node->loc))
6013 if ((r = loc_cmp (XEXP (node->loc, 0), XEXP (loc, 0))) >= 0)
6014 break;
6015 else
6016 c++;
6018 else
6020 r = 1;
6021 break;
6024 else
6025 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
6026 nextp = &node->next)
6027 if ((r = loc_cmp (node->loc, loc)) >= 0)
6028 break;
6029 else
6030 c++;
6032 if (r == 0)
6033 return slot;
6035 if (var->refcount > 1 || shared_hash_shared (set->vars))
6037 slot = unshare_variable (set, slot, var, initialized);
6038 var = (variable)*slot;
6039 for (nextp = &var->var_part[0].loc_chain; c;
6040 nextp = &(*nextp)->next)
6041 c--;
6042 gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc);
6045 else
6047 int inspos = 0;
6049 gcc_assert (dv_as_decl (var->dv) == dv_as_decl (dv));
6051 pos = find_variable_location_part (var, offset, &inspos);
6053 if (pos >= 0)
6055 node = var->var_part[pos].loc_chain;
6057 if (node
6058 && ((REG_P (node->loc) && REG_P (loc)
6059 && REGNO (node->loc) == REGNO (loc))
6060 || rtx_equal_p (node->loc, loc)))
6062 /* LOC is in the beginning of the chain so we have nothing
6063 to do. */
6064 if (node->init < initialized)
6065 node->init = initialized;
6066 if (set_src != NULL)
6067 node->set_src = set_src;
6069 return slot;
6071 else
6073 /* We have to make a copy of a shared variable. */
6074 if (var->refcount > 1 || shared_hash_shared (set->vars))
6076 slot = unshare_variable (set, slot, var, initialized);
6077 var = (variable)*slot;
6081 else
6083 /* We have not found the location part, new one will be created. */
6085 /* We have to make a copy of the shared variable. */
6086 if (var->refcount > 1 || shared_hash_shared (set->vars))
6088 slot = unshare_variable (set, slot, var, initialized);
6089 var = (variable)*slot;
6092 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
6093 thus there are at most MAX_VAR_PARTS different offsets. */
6094 gcc_assert (var->n_var_parts < MAX_VAR_PARTS
6095 && (!var->n_var_parts || !dv_onepart_p (var->dv)));
6097 /* We have to move the elements of array starting at index
6098 inspos to the next position. */
6099 for (pos = var->n_var_parts; pos > inspos; pos--)
6100 var->var_part[pos] = var->var_part[pos - 1];
6102 var->n_var_parts++;
6103 var->var_part[pos].offset = offset;
6104 var->var_part[pos].loc_chain = NULL;
6105 var->var_part[pos].cur_loc = NULL;
6108 /* Delete the location from the list. */
6109 nextp = &var->var_part[pos].loc_chain;
6110 for (node = var->var_part[pos].loc_chain; node; node = next)
6112 next = node->next;
6113 if ((REG_P (node->loc) && REG_P (loc)
6114 && REGNO (node->loc) == REGNO (loc))
6115 || rtx_equal_p (node->loc, loc))
6117 /* Save these values, to assign to the new node, before
6118 deleting this one. */
6119 if (node->init > initialized)
6120 initialized = node->init;
6121 if (node->set_src != NULL && set_src == NULL)
6122 set_src = node->set_src;
6123 pool_free (loc_chain_pool, node);
6124 *nextp = next;
6125 break;
6127 else
6128 nextp = &node->next;
6131 nextp = &var->var_part[pos].loc_chain;
6134 /* Add the location to the beginning. */
6135 node = (location_chain) pool_alloc (loc_chain_pool);
6136 node->loc = loc;
6137 node->init = initialized;
6138 node->set_src = set_src;
6139 node->next = *nextp;
6140 *nextp = node;
6142 if (onepart && emit_notes)
6143 add_value_chains (var->dv, loc);
6145 /* If no location was emitted do so. */
6146 if (var->var_part[pos].cur_loc == NULL)
6148 var->var_part[pos].cur_loc = loc;
6149 variable_was_changed (var, set);
6152 return slot;
6155 /* Set the part of variable's location in the dataflow set SET. The
6156 variable part is specified by variable's declaration in DV and
6157 offset OFFSET and the part's location by LOC. IOPT should be
6158 NO_INSERT if the variable is known to be in SET already and the
6159 variable hash table must not be resized, and INSERT otherwise. */
6161 static void
6162 set_variable_part (dataflow_set *set, rtx loc,
6163 decl_or_value dv, HOST_WIDE_INT offset,
6164 enum var_init_status initialized, rtx set_src,
6165 enum insert_option iopt)
6167 void **slot;
6169 if (iopt == NO_INSERT)
6170 slot = shared_hash_find_slot_noinsert (set->vars, dv);
6171 else
6173 slot = shared_hash_find_slot (set->vars, dv);
6174 if (!slot)
6175 slot = shared_hash_find_slot_unshare (&set->vars, dv, iopt);
6177 slot = set_slot_part (set, loc, slot, dv, offset, initialized, set_src);
6180 /* Remove all recorded register locations for the given variable part
6181 from dataflow set SET, except for those that are identical to loc.
6182 The variable part is specified by variable's declaration or value
6183 DV and offset OFFSET. */
6185 static void **
6186 clobber_slot_part (dataflow_set *set, rtx loc, void **slot,
6187 HOST_WIDE_INT offset, rtx set_src)
6189 variable var = (variable) *slot;
6190 int pos = find_variable_location_part (var, offset, NULL);
6192 if (pos >= 0)
6194 location_chain node, next;
6196 /* Remove the register locations from the dataflow set. */
6197 next = var->var_part[pos].loc_chain;
6198 for (node = next; node; node = next)
6200 next = node->next;
6201 if (node->loc != loc
6202 && (!flag_var_tracking_uninit
6203 || !set_src
6204 || MEM_P (set_src)
6205 || !rtx_equal_p (set_src, node->set_src)))
6207 if (REG_P (node->loc))
6209 attrs anode, anext;
6210 attrs *anextp;
6212 /* Remove the variable part from the register's
6213 list, but preserve any other variable parts
6214 that might be regarded as live in that same
6215 register. */
6216 anextp = &set->regs[REGNO (node->loc)];
6217 for (anode = *anextp; anode; anode = anext)
6219 anext = anode->next;
6220 if (dv_as_opaque (anode->dv) == dv_as_opaque (var->dv)
6221 && anode->offset == offset)
6223 pool_free (attrs_pool, anode);
6224 *anextp = anext;
6226 else
6227 anextp = &anode->next;
6231 slot = delete_slot_part (set, node->loc, slot, offset);
6236 return slot;
6239 /* Remove all recorded register locations for the given variable part
6240 from dataflow set SET, except for those that are identical to loc.
6241 The variable part is specified by variable's declaration or value
6242 DV and offset OFFSET. */
6244 static void
6245 clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
6246 HOST_WIDE_INT offset, rtx set_src)
6248 void **slot;
6250 if (!dv_as_opaque (dv)
6251 || (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv))))
6252 return;
6254 slot = shared_hash_find_slot_noinsert (set->vars, dv);
6255 if (!slot)
6256 return;
6258 slot = clobber_slot_part (set, loc, slot, offset, set_src);
6261 /* Delete the part of variable's location from dataflow set SET. The
6262 variable part is specified by its SET->vars slot SLOT and offset
6263 OFFSET and the part's location by LOC. */
6265 static void **
6266 delete_slot_part (dataflow_set *set, rtx loc, void **slot,
6267 HOST_WIDE_INT offset)
6269 variable var = (variable) *slot;
6270 int pos = find_variable_location_part (var, offset, NULL);
6272 if (pos >= 0)
6274 location_chain node, next;
6275 location_chain *nextp;
6276 bool changed;
6278 if (var->refcount > 1 || shared_hash_shared (set->vars))
6280 /* If the variable contains the location part we have to
6281 make a copy of the variable. */
6282 for (node = var->var_part[pos].loc_chain; node;
6283 node = node->next)
6285 if ((REG_P (node->loc) && REG_P (loc)
6286 && REGNO (node->loc) == REGNO (loc))
6287 || rtx_equal_p (node->loc, loc))
6289 slot = unshare_variable (set, slot, var,
6290 VAR_INIT_STATUS_UNKNOWN);
6291 var = (variable)*slot;
6292 break;
6297 /* Delete the location part. */
6298 nextp = &var->var_part[pos].loc_chain;
6299 for (node = *nextp; node; node = next)
6301 next = node->next;
6302 if ((REG_P (node->loc) && REG_P (loc)
6303 && REGNO (node->loc) == REGNO (loc))
6304 || rtx_equal_p (node->loc, loc))
6306 if (emit_notes && pos == 0 && dv_onepart_p (var->dv))
6307 remove_value_chains (var->dv, node->loc);
6308 pool_free (loc_chain_pool, node);
6309 *nextp = next;
6310 break;
6312 else
6313 nextp = &node->next;
6316 /* If we have deleted the location which was last emitted
6317 we have to emit new location so add the variable to set
6318 of changed variables. */
6319 if (var->var_part[pos].cur_loc
6320 && ((REG_P (loc)
6321 && REG_P (var->var_part[pos].cur_loc)
6322 && REGNO (loc) == REGNO (var->var_part[pos].cur_loc))
6323 || rtx_equal_p (loc, var->var_part[pos].cur_loc)))
6325 changed = true;
6326 if (var->var_part[pos].loc_chain)
6327 var->var_part[pos].cur_loc = var->var_part[pos].loc_chain->loc;
6329 else
6330 changed = false;
6332 if (var->var_part[pos].loc_chain == NULL)
6334 gcc_assert (changed);
6335 var->n_var_parts--;
6336 if (emit_notes && var->n_var_parts == 0 && dv_is_value_p (var->dv))
6337 remove_cselib_value_chains (var->dv);
6338 while (pos < var->n_var_parts)
6340 var->var_part[pos] = var->var_part[pos + 1];
6341 pos++;
6344 if (changed)
6345 variable_was_changed (var, set);
6348 return slot;
6351 /* Delete the part of variable's location from dataflow set SET. The
6352 variable part is specified by variable's declaration or value DV
6353 and offset OFFSET and the part's location by LOC. */
6355 static void
6356 delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
6357 HOST_WIDE_INT offset)
6359 void **slot = shared_hash_find_slot_noinsert (set->vars, dv);
6360 if (!slot)
6361 return;
6363 slot = delete_slot_part (set, loc, slot, offset);
6366 /* Callback for cselib_expand_value, that looks for expressions
6367 holding the value in the var-tracking hash tables. Return X for
6368 standard processing, anything else is to be used as-is. */
6370 static rtx
6371 vt_expand_loc_callback (rtx x, bitmap regs, int max_depth, void *data)
6373 htab_t vars = (htab_t)data;
6374 decl_or_value dv;
6375 variable var;
6376 location_chain loc;
6377 rtx result, subreg, xret;
6379 switch (GET_CODE (x))
6381 case SUBREG:
6382 subreg = SUBREG_REG (x);
6384 if (GET_CODE (SUBREG_REG (x)) != VALUE)
6385 return x;
6387 subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs,
6388 max_depth - 1,
6389 vt_expand_loc_callback, data);
6391 if (!subreg)
6392 return NULL;
6394 result = simplify_gen_subreg (GET_MODE (x), subreg,
6395 GET_MODE (SUBREG_REG (x)),
6396 SUBREG_BYTE (x));
6398 /* Invalid SUBREGs are ok in debug info. ??? We could try
6399 alternate expansions for the VALUE as well. */
6400 if (!result && (REG_P (subreg) || MEM_P (subreg)))
6401 result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x));
6403 return result;
6405 case DEBUG_EXPR:
6406 dv = dv_from_decl (DEBUG_EXPR_TREE_DECL (x));
6407 xret = NULL;
6408 break;
6410 case VALUE:
6411 dv = dv_from_value (x);
6412 xret = x;
6413 break;
6415 default:
6416 return x;
6419 if (VALUE_RECURSED_INTO (x))
6420 return NULL;
6422 var = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
6424 if (!var)
6425 return xret;
6427 if (var->n_var_parts == 0)
6428 return xret;
6430 gcc_assert (var->n_var_parts == 1);
6432 VALUE_RECURSED_INTO (x) = true;
6433 result = NULL;
6435 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
6437 result = cselib_expand_value_rtx_cb (loc->loc, regs, max_depth,
6438 vt_expand_loc_callback, vars);
6439 if (result)
6440 break;
6443 VALUE_RECURSED_INTO (x) = false;
6444 if (result)
6445 return result;
6446 else
6447 return xret;
6450 /* Expand VALUEs in LOC, using VARS as well as cselib's equivalence
6451 tables. */
6453 static rtx
6454 vt_expand_loc (rtx loc, htab_t vars)
6456 if (!MAY_HAVE_DEBUG_INSNS)
6457 return loc;
6459 loc = cselib_expand_value_rtx_cb (loc, scratch_regs, 5,
6460 vt_expand_loc_callback, vars);
6462 if (loc && MEM_P (loc))
6463 loc = targetm.delegitimize_address (loc);
6465 return loc;
6468 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
6469 additional parameters: WHERE specifies whether the note shall be emitted
6470 before or after instruction INSN. */
6472 static int
6473 emit_note_insn_var_location (void **varp, void *data)
6475 variable var = (variable) *varp;
6476 rtx insn = ((emit_note_data *)data)->insn;
6477 enum emit_note_where where = ((emit_note_data *)data)->where;
6478 htab_t vars = ((emit_note_data *)data)->vars;
6479 rtx note;
6480 int i, j, n_var_parts;
6481 bool complete;
6482 enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED;
6483 HOST_WIDE_INT last_limit;
6484 tree type_size_unit;
6485 HOST_WIDE_INT offsets[MAX_VAR_PARTS];
6486 rtx loc[MAX_VAR_PARTS];
6487 tree decl;
6489 if (dv_is_value_p (var->dv))
6490 goto clear;
6492 decl = dv_as_decl (var->dv);
6494 if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
6495 goto clear;
6497 gcc_assert (decl);
6499 complete = true;
6500 last_limit = 0;
6501 n_var_parts = 0;
6502 for (i = 0; i < var->n_var_parts; i++)
6504 enum machine_mode mode, wider_mode;
6505 rtx loc2;
6507 if (last_limit < var->var_part[i].offset)
6509 complete = false;
6510 break;
6512 else if (last_limit > var->var_part[i].offset)
6513 continue;
6514 offsets[n_var_parts] = var->var_part[i].offset;
6515 loc2 = vt_expand_loc (var->var_part[i].loc_chain->loc, vars);
6516 if (!loc2)
6518 complete = false;
6519 continue;
6521 loc[n_var_parts] = loc2;
6522 mode = GET_MODE (var->var_part[i].loc_chain->loc);
6523 initialized = var->var_part[i].loc_chain->init;
6524 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
6526 /* Attempt to merge adjacent registers or memory. */
6527 wider_mode = GET_MODE_WIDER_MODE (mode);
6528 for (j = i + 1; j < var->n_var_parts; j++)
6529 if (last_limit <= var->var_part[j].offset)
6530 break;
6531 if (j < var->n_var_parts
6532 && wider_mode != VOIDmode
6533 && mode == GET_MODE (var->var_part[j].loc_chain->loc)
6534 && (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts]))
6535 && (loc2 = vt_expand_loc (var->var_part[j].loc_chain->loc, vars))
6536 && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2)
6537 && last_limit == var->var_part[j].offset)
6539 rtx new_loc = NULL;
6541 if (REG_P (loc[n_var_parts])
6542 && hard_regno_nregs[REGNO (loc[n_var_parts])][mode] * 2
6543 == hard_regno_nregs[REGNO (loc[n_var_parts])][wider_mode]
6544 && end_hard_regno (mode, REGNO (loc[n_var_parts]))
6545 == REGNO (loc2))
6547 if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN)
6548 new_loc = simplify_subreg (wider_mode, loc[n_var_parts],
6549 mode, 0);
6550 else if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
6551 new_loc = simplify_subreg (wider_mode, loc2, mode, 0);
6552 if (new_loc)
6554 if (!REG_P (new_loc)
6555 || REGNO (new_loc) != REGNO (loc[n_var_parts]))
6556 new_loc = NULL;
6557 else
6558 REG_ATTRS (new_loc) = REG_ATTRS (loc[n_var_parts]);
6561 else if (MEM_P (loc[n_var_parts])
6562 && GET_CODE (XEXP (loc2, 0)) == PLUS
6563 && REG_P (XEXP (XEXP (loc2, 0), 0))
6564 && CONST_INT_P (XEXP (XEXP (loc2, 0), 1)))
6566 if ((REG_P (XEXP (loc[n_var_parts], 0))
6567 && rtx_equal_p (XEXP (loc[n_var_parts], 0),
6568 XEXP (XEXP (loc2, 0), 0))
6569 && INTVAL (XEXP (XEXP (loc2, 0), 1))
6570 == GET_MODE_SIZE (mode))
6571 || (GET_CODE (XEXP (loc[n_var_parts], 0)) == PLUS
6572 && CONST_INT_P (XEXP (XEXP (loc[n_var_parts], 0), 1))
6573 && rtx_equal_p (XEXP (XEXP (loc[n_var_parts], 0), 0),
6574 XEXP (XEXP (loc2, 0), 0))
6575 && INTVAL (XEXP (XEXP (loc[n_var_parts], 0), 1))
6576 + GET_MODE_SIZE (mode)
6577 == INTVAL (XEXP (XEXP (loc2, 0), 1))))
6578 new_loc = adjust_address_nv (loc[n_var_parts],
6579 wider_mode, 0);
6582 if (new_loc)
6584 loc[n_var_parts] = new_loc;
6585 mode = wider_mode;
6586 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
6587 i = j;
6590 ++n_var_parts;
6592 type_size_unit = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6593 if ((unsigned HOST_WIDE_INT) last_limit < TREE_INT_CST_LOW (type_size_unit))
6594 complete = false;
6596 if (where != EMIT_NOTE_BEFORE_INSN)
6598 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
6599 if (where == EMIT_NOTE_AFTER_CALL_INSN)
6600 NOTE_DURING_CALL_P (note) = true;
6602 else
6603 note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
6605 if (! flag_var_tracking_uninit)
6606 initialized = VAR_INIT_STATUS_INITIALIZED;
6608 if (!complete)
6610 NOTE_VAR_LOCATION (note) = gen_rtx_VAR_LOCATION (VOIDmode, decl,
6611 NULL_RTX, (int) initialized);
6613 else if (n_var_parts == 1)
6615 rtx expr_list
6616 = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
6618 NOTE_VAR_LOCATION (note) = gen_rtx_VAR_LOCATION (VOIDmode, decl,
6619 expr_list,
6620 (int) initialized);
6622 else if (n_var_parts)
6624 rtx parallel;
6626 for (i = 0; i < n_var_parts; i++)
6627 loc[i]
6628 = gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i]));
6630 parallel = gen_rtx_PARALLEL (VOIDmode,
6631 gen_rtvec_v (n_var_parts, loc));
6632 NOTE_VAR_LOCATION (note) = gen_rtx_VAR_LOCATION (VOIDmode, decl,
6633 parallel,
6634 (int) initialized);
6637 clear:
6638 set_dv_changed (var->dv, false);
6639 htab_clear_slot (changed_variables, varp);
6641 /* Continue traversing the hash table. */
6642 return 1;
6645 DEF_VEC_P (variable);
6646 DEF_VEC_ALLOC_P (variable, heap);
6648 /* Stack of variable_def pointers that need processing with
6649 check_changed_vars_2. */
6651 static VEC (variable, heap) *changed_variables_stack;
6653 /* Populate changed_variables_stack with variable_def pointers
6654 that need variable_was_changed called on them. */
6656 static int
6657 check_changed_vars_1 (void **slot, void *data)
6659 variable var = (variable) *slot;
6660 htab_t htab = (htab_t) data;
6662 if (dv_is_value_p (var->dv))
6664 value_chain vc
6665 = (value_chain) htab_find_with_hash (value_chains, var->dv,
6666 dv_htab_hash (var->dv));
6668 if (vc == NULL)
6669 return 1;
6670 for (vc = vc->next; vc; vc = vc->next)
6671 if (!dv_changed_p (vc->dv))
6673 variable vcvar
6674 = (variable) htab_find_with_hash (htab, vc->dv,
6675 dv_htab_hash (vc->dv));
6676 if (vcvar)
6677 VEC_safe_push (variable, heap, changed_variables_stack,
6678 vcvar);
6681 return 1;
6684 /* Add VAR to changed_variables and also for VALUEs add recursively
6685 all DVs that aren't in changed_variables yet but reference the
6686 VALUE from its loc_chain. */
6688 static void
6689 check_changed_vars_2 (variable var, htab_t htab)
6691 variable_was_changed (var, NULL);
6692 if (dv_is_value_p (var->dv))
6694 value_chain vc
6695 = (value_chain) htab_find_with_hash (value_chains, var->dv,
6696 dv_htab_hash (var->dv));
6698 if (vc == NULL)
6699 return;
6700 for (vc = vc->next; vc; vc = vc->next)
6701 if (!dv_changed_p (vc->dv))
6703 variable vcvar
6704 = (variable) htab_find_with_hash (htab, vc->dv,
6705 dv_htab_hash (vc->dv));
6706 if (vcvar)
6707 check_changed_vars_2 (vcvar, htab);
6712 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
6713 CHANGED_VARIABLES and delete this chain. WHERE specifies whether the notes
6714 shall be emitted before of after instruction INSN. */
6716 static void
6717 emit_notes_for_changes (rtx insn, enum emit_note_where where,
6718 shared_hash vars)
6720 emit_note_data data;
6721 htab_t htab = shared_hash_htab (vars);
6723 if (!htab_elements (changed_variables))
6724 return;
6726 if (MAY_HAVE_DEBUG_INSNS)
6728 /* Unfortunately this has to be done in two steps, because
6729 we can't traverse a hashtab into which we are inserting
6730 through variable_was_changed. */
6731 htab_traverse (changed_variables, check_changed_vars_1, htab);
6732 while (VEC_length (variable, changed_variables_stack) > 0)
6733 check_changed_vars_2 (VEC_pop (variable, changed_variables_stack),
6734 htab);
6737 data.insn = insn;
6738 data.where = where;
6739 data.vars = htab;
6741 htab_traverse (changed_variables, emit_note_insn_var_location, &data);
6744 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
6745 same variable in hash table DATA or is not there at all. */
6747 static int
6748 emit_notes_for_differences_1 (void **slot, void *data)
6750 htab_t new_vars = (htab_t) data;
6751 variable old_var, new_var;
6753 old_var = (variable) *slot;
6754 new_var = (variable) htab_find_with_hash (new_vars, old_var->dv,
6755 dv_htab_hash (old_var->dv));
6757 if (!new_var)
6759 /* Variable has disappeared. */
6760 variable empty_var;
6762 empty_var = (variable) pool_alloc (dv_pool (old_var->dv));
6763 empty_var->dv = old_var->dv;
6764 empty_var->refcount = 0;
6765 empty_var->n_var_parts = 0;
6766 if (dv_onepart_p (old_var->dv))
6768 location_chain lc;
6770 gcc_assert (old_var->n_var_parts == 1);
6771 for (lc = old_var->var_part[0].loc_chain; lc; lc = lc->next)
6772 remove_value_chains (old_var->dv, lc->loc);
6773 if (dv_is_value_p (old_var->dv))
6774 remove_cselib_value_chains (old_var->dv);
6776 variable_was_changed (empty_var, NULL);
6778 else if (variable_different_p (old_var, new_var, true))
6780 if (dv_onepart_p (old_var->dv))
6782 location_chain lc1, lc2;
6784 gcc_assert (old_var->n_var_parts == 1);
6785 gcc_assert (new_var->n_var_parts == 1);
6786 lc1 = old_var->var_part[0].loc_chain;
6787 lc2 = new_var->var_part[0].loc_chain;
6788 while (lc1
6789 && lc2
6790 && ((REG_P (lc1->loc) && REG_P (lc2->loc))
6791 || rtx_equal_p (lc1->loc, lc2->loc)))
6793 lc1 = lc1->next;
6794 lc2 = lc2->next;
6796 for (; lc2; lc2 = lc2->next)
6797 add_value_chains (old_var->dv, lc2->loc);
6798 for (; lc1; lc1 = lc1->next)
6799 remove_value_chains (old_var->dv, lc1->loc);
6801 variable_was_changed (new_var, NULL);
6804 /* Continue traversing the hash table. */
6805 return 1;
6808 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
6809 table DATA. */
6811 static int
6812 emit_notes_for_differences_2 (void **slot, void *data)
6814 htab_t old_vars = (htab_t) data;
6815 variable old_var, new_var;
6817 new_var = (variable) *slot;
6818 old_var = (variable) htab_find_with_hash (old_vars, new_var->dv,
6819 dv_htab_hash (new_var->dv));
6820 if (!old_var)
6822 /* Variable has appeared. */
6823 if (dv_onepart_p (new_var->dv))
6825 location_chain lc;
6827 gcc_assert (new_var->n_var_parts == 1);
6828 for (lc = new_var->var_part[0].loc_chain; lc; lc = lc->next)
6829 add_value_chains (new_var->dv, lc->loc);
6830 if (dv_is_value_p (new_var->dv))
6831 add_cselib_value_chains (new_var->dv);
6833 variable_was_changed (new_var, NULL);
6836 /* Continue traversing the hash table. */
6837 return 1;
6840 /* Emit notes before INSN for differences between dataflow sets OLD_SET and
6841 NEW_SET. */
6843 static void
6844 emit_notes_for_differences (rtx insn, dataflow_set *old_set,
6845 dataflow_set *new_set)
6847 htab_traverse (shared_hash_htab (old_set->vars),
6848 emit_notes_for_differences_1,
6849 shared_hash_htab (new_set->vars));
6850 htab_traverse (shared_hash_htab (new_set->vars),
6851 emit_notes_for_differences_2,
6852 shared_hash_htab (old_set->vars));
6853 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars);
6856 /* Emit the notes for changes of location parts in the basic block BB. */
6858 static void
6859 emit_notes_in_bb (basic_block bb, dataflow_set *set)
6861 int i;
6863 dataflow_set_clear (set);
6864 dataflow_set_copy (set, &VTI (bb)->in);
6866 for (i = 0; i < VTI (bb)->n_mos; i++)
6868 rtx insn = VTI (bb)->mos[i].insn;
6870 switch (VTI (bb)->mos[i].type)
6872 case MO_CALL:
6873 dataflow_set_clear_at_call (set);
6874 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars);
6875 break;
6877 case MO_USE:
6879 rtx loc = VTI (bb)->mos[i].u.loc;
6881 if (REG_P (loc))
6882 var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6883 else
6884 var_mem_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6886 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
6888 break;
6890 case MO_VAL_LOC:
6892 rtx loc = VTI (bb)->mos[i].u.loc;
6893 rtx val, vloc;
6894 tree var;
6896 if (GET_CODE (loc) == CONCAT)
6898 val = XEXP (loc, 0);
6899 vloc = XEXP (loc, 1);
6901 else
6903 val = NULL_RTX;
6904 vloc = loc;
6907 var = PAT_VAR_LOCATION_DECL (vloc);
6909 clobber_variable_part (set, NULL_RTX,
6910 dv_from_decl (var), 0, NULL_RTX);
6911 if (val)
6913 if (VAL_NEEDS_RESOLUTION (loc))
6914 val_resolve (set, val, PAT_VAR_LOCATION_LOC (vloc), insn);
6915 set_variable_part (set, val, dv_from_decl (var), 0,
6916 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6917 INSERT);
6920 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
6922 break;
6924 case MO_VAL_USE:
6926 rtx loc = VTI (bb)->mos[i].u.loc;
6927 rtx val, vloc, uloc;
6929 vloc = uloc = XEXP (loc, 1);
6930 val = XEXP (loc, 0);
6932 if (GET_CODE (val) == CONCAT)
6934 uloc = XEXP (val, 1);
6935 val = XEXP (val, 0);
6938 if (VAL_NEEDS_RESOLUTION (loc))
6939 val_resolve (set, val, vloc, insn);
6940 else
6941 val_store (set, val, uloc, insn, false);
6943 if (VAL_HOLDS_TRACK_EXPR (loc))
6945 if (GET_CODE (uloc) == REG)
6946 var_reg_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6947 NULL);
6948 else if (GET_CODE (uloc) == MEM)
6949 var_mem_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6950 NULL);
6953 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
6955 break;
6957 case MO_VAL_SET:
6959 rtx loc = VTI (bb)->mos[i].u.loc;
6960 rtx val, vloc, uloc;
6962 vloc = uloc = XEXP (loc, 1);
6963 val = XEXP (loc, 0);
6965 if (GET_CODE (val) == CONCAT)
6967 vloc = XEXP (val, 1);
6968 val = XEXP (val, 0);
6971 if (GET_CODE (vloc) == SET)
6973 rtx vsrc = SET_SRC (vloc);
6975 gcc_assert (val != vsrc);
6976 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
6978 vloc = SET_DEST (vloc);
6980 if (VAL_NEEDS_RESOLUTION (loc))
6981 val_resolve (set, val, vsrc, insn);
6983 else if (VAL_NEEDS_RESOLUTION (loc))
6985 gcc_assert (GET_CODE (uloc) == SET
6986 && GET_CODE (SET_SRC (uloc)) == REG);
6987 val_resolve (set, val, SET_SRC (uloc), insn);
6990 if (VAL_HOLDS_TRACK_EXPR (loc))
6992 if (VAL_EXPR_IS_CLOBBERED (loc))
6994 if (REG_P (uloc))
6995 var_reg_delete (set, uloc, true);
6996 else if (MEM_P (uloc))
6997 var_mem_delete (set, uloc, true);
6999 else
7001 bool copied_p = VAL_EXPR_IS_COPIED (loc);
7002 rtx set_src = NULL;
7003 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
7005 if (GET_CODE (uloc) == SET)
7007 set_src = SET_SRC (uloc);
7008 uloc = SET_DEST (uloc);
7011 if (copied_p)
7013 status = find_src_status (set, set_src);
7015 set_src = find_src_set_src (set, set_src);
7018 if (REG_P (uloc))
7019 var_reg_delete_and_set (set, uloc, !copied_p,
7020 status, set_src);
7021 else if (MEM_P (uloc))
7022 var_mem_delete_and_set (set, uloc, !copied_p,
7023 status, set_src);
7026 else if (REG_P (uloc))
7027 var_regno_delete (set, REGNO (uloc));
7029 val_store (set, val, vloc, insn, true);
7031 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
7032 set->vars);
7034 break;
7036 case MO_SET:
7038 rtx loc = VTI (bb)->mos[i].u.loc;
7039 rtx set_src = NULL;
7041 if (GET_CODE (loc) == SET)
7043 set_src = SET_SRC (loc);
7044 loc = SET_DEST (loc);
7047 if (REG_P (loc))
7048 var_reg_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
7049 set_src);
7050 else
7051 var_mem_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
7052 set_src);
7054 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
7055 set->vars);
7057 break;
7059 case MO_COPY:
7061 rtx loc = VTI (bb)->mos[i].u.loc;
7062 enum var_init_status src_status;
7063 rtx set_src = NULL;
7065 if (GET_CODE (loc) == SET)
7067 set_src = SET_SRC (loc);
7068 loc = SET_DEST (loc);
7071 src_status = find_src_status (set, set_src);
7072 set_src = find_src_set_src (set, set_src);
7074 if (REG_P (loc))
7075 var_reg_delete_and_set (set, loc, false, src_status, set_src);
7076 else
7077 var_mem_delete_and_set (set, loc, false, src_status, set_src);
7079 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
7080 set->vars);
7082 break;
7084 case MO_USE_NO_VAR:
7086 rtx loc = VTI (bb)->mos[i].u.loc;
7088 if (REG_P (loc))
7089 var_reg_delete (set, loc, false);
7090 else
7091 var_mem_delete (set, loc, false);
7093 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
7095 break;
7097 case MO_CLOBBER:
7099 rtx loc = VTI (bb)->mos[i].u.loc;
7101 if (REG_P (loc))
7102 var_reg_delete (set, loc, true);
7103 else
7104 var_mem_delete (set, loc, true);
7106 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
7107 set->vars);
7109 break;
7111 case MO_ADJUST:
7112 set->stack_adjust += VTI (bb)->mos[i].u.adjust;
7113 break;
7118 /* Emit notes for the whole function. */
7120 static void
7121 vt_emit_notes (void)
7123 basic_block bb;
7124 dataflow_set cur;
7126 gcc_assert (!htab_elements (changed_variables));
7128 /* Free memory occupied by the out hash tables, as they aren't used
7129 anymore. */
7130 FOR_EACH_BB (bb)
7131 dataflow_set_clear (&VTI (bb)->out);
7133 /* Enable emitting notes by functions (mainly by set_variable_part and
7134 delete_variable_part). */
7135 emit_notes = true;
7137 if (MAY_HAVE_DEBUG_INSNS)
7138 changed_variables_stack = VEC_alloc (variable, heap, 40);
7140 dataflow_set_init (&cur);
7142 FOR_EACH_BB (bb)
7144 /* Emit the notes for changes of variable locations between two
7145 subsequent basic blocks. */
7146 emit_notes_for_differences (BB_HEAD (bb), &cur, &VTI (bb)->in);
7148 /* Emit the notes for the changes in the basic block itself. */
7149 emit_notes_in_bb (bb, &cur);
7151 /* Free memory occupied by the in hash table, we won't need it
7152 again. */
7153 dataflow_set_clear (&VTI (bb)->in);
7155 #ifdef ENABLE_CHECKING
7156 htab_traverse (shared_hash_htab (cur.vars),
7157 emit_notes_for_differences_1,
7158 shared_hash_htab (empty_shared_hash));
7159 if (MAY_HAVE_DEBUG_INSNS)
7160 gcc_assert (htab_elements (value_chains) == 0);
7161 #endif
7162 dataflow_set_destroy (&cur);
7164 if (MAY_HAVE_DEBUG_INSNS)
7165 VEC_free (variable, heap, changed_variables_stack);
7167 emit_notes = false;
7170 /* If there is a declaration and offset associated with register/memory RTL
7171 assign declaration to *DECLP and offset to *OFFSETP, and return true. */
7173 static bool
7174 vt_get_decl_and_offset (rtx rtl, tree *declp, HOST_WIDE_INT *offsetp)
7176 if (REG_P (rtl))
7178 if (REG_ATTRS (rtl))
7180 *declp = REG_EXPR (rtl);
7181 *offsetp = REG_OFFSET (rtl);
7182 return true;
7185 else if (MEM_P (rtl))
7187 if (MEM_ATTRS (rtl))
7189 *declp = MEM_EXPR (rtl);
7190 *offsetp = INT_MEM_OFFSET (rtl);
7191 return true;
7194 return false;
7197 /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
7199 static void
7200 vt_add_function_parameters (void)
7202 tree parm;
7204 for (parm = DECL_ARGUMENTS (current_function_decl);
7205 parm; parm = TREE_CHAIN (parm))
7207 rtx decl_rtl = DECL_RTL_IF_SET (parm);
7208 rtx incoming = DECL_INCOMING_RTL (parm);
7209 tree decl;
7210 enum machine_mode mode;
7211 HOST_WIDE_INT offset;
7212 dataflow_set *out;
7213 decl_or_value dv;
7215 if (TREE_CODE (parm) != PARM_DECL)
7216 continue;
7218 if (!DECL_NAME (parm))
7219 continue;
7221 if (!decl_rtl || !incoming)
7222 continue;
7224 if (GET_MODE (decl_rtl) == BLKmode || GET_MODE (incoming) == BLKmode)
7225 continue;
7227 if (!vt_get_decl_and_offset (incoming, &decl, &offset))
7229 if (REG_P (incoming) || MEM_P (incoming))
7231 /* This means argument is passed by invisible reference. */
7232 offset = 0;
7233 decl = parm;
7234 incoming = gen_rtx_MEM (GET_MODE (decl_rtl), incoming);
7236 else
7238 if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset))
7239 continue;
7240 offset += byte_lowpart_offset (GET_MODE (incoming),
7241 GET_MODE (decl_rtl));
7245 if (!decl)
7246 continue;
7248 if (parm != decl)
7250 /* Assume that DECL_RTL was a pseudo that got spilled to
7251 memory. The spill slot sharing code will force the
7252 memory to reference spill_slot_decl (%sfp), so we don't
7253 match above. That's ok, the pseudo must have referenced
7254 the entire parameter, so just reset OFFSET. */
7255 gcc_assert (decl == get_spill_slot_decl (false));
7256 offset = 0;
7259 if (!track_loc_p (incoming, parm, offset, false, &mode, &offset))
7260 continue;
7262 out = &VTI (ENTRY_BLOCK_PTR)->out;
7264 dv = dv_from_decl (parm);
7266 if (target_for_debug_bind (parm)
7267 /* We can't deal with these right now, because this kind of
7268 variable is single-part. ??? We could handle parallels
7269 that describe multiple locations for the same single
7270 value, but ATM we don't. */
7271 && GET_CODE (incoming) != PARALLEL)
7273 cselib_val *val;
7275 /* ??? We shouldn't ever hit this, but it may happen because
7276 arguments passed by invisible reference aren't dealt with
7277 above: incoming-rtl will have Pmode rather than the
7278 expected mode for the type. */
7279 if (offset)
7280 continue;
7282 val = cselib_lookup (var_lowpart (mode, incoming), mode, true);
7284 /* ??? Float-typed values in memory are not handled by
7285 cselib. */
7286 if (val)
7288 cselib_preserve_value (val);
7289 set_variable_part (out, val->val_rtx, dv, offset,
7290 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
7291 dv = dv_from_value (val->val_rtx);
7295 if (REG_P (incoming))
7297 incoming = var_lowpart (mode, incoming);
7298 gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER);
7299 attrs_list_insert (&out->regs[REGNO (incoming)], dv, offset,
7300 incoming);
7301 set_variable_part (out, incoming, dv, offset,
7302 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
7304 else if (MEM_P (incoming))
7306 incoming = var_lowpart (mode, incoming);
7307 set_variable_part (out, incoming, dv, offset,
7308 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
7312 if (MAY_HAVE_DEBUG_INSNS)
7314 cselib_preserve_only_values (true);
7315 cselib_reset_table (cselib_get_next_uid ());
7320 /* Allocate and initialize the data structures for variable tracking
7321 and parse the RTL to get the micro operations. */
7323 static void
7324 vt_initialize (void)
7326 basic_block bb;
7328 alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def));
7330 if (MAY_HAVE_DEBUG_INSNS)
7332 cselib_init (true);
7333 scratch_regs = BITMAP_ALLOC (NULL);
7334 valvar_pool = create_alloc_pool ("small variable_def pool",
7335 sizeof (struct variable_def), 256);
7337 else
7339 scratch_regs = NULL;
7340 valvar_pool = NULL;
7343 FOR_EACH_BB (bb)
7345 rtx insn;
7346 HOST_WIDE_INT pre, post = 0;
7347 int count;
7348 unsigned int next_uid_before = cselib_get_next_uid ();
7349 unsigned int next_uid_after = next_uid_before;
7351 if (MAY_HAVE_DEBUG_INSNS)
7353 cselib_record_sets_hook = count_with_sets;
7354 if (dump_file && (dump_flags & TDF_DETAILS))
7355 fprintf (dump_file, "first value: %i\n",
7356 cselib_get_next_uid ());
7359 /* Count the number of micro operations. */
7360 VTI (bb)->n_mos = 0;
7361 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
7362 insn = NEXT_INSN (insn))
7364 if (INSN_P (insn))
7366 if (!frame_pointer_needed)
7368 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
7369 if (pre)
7371 VTI (bb)->n_mos++;
7372 if (dump_file && (dump_flags & TDF_DETAILS))
7373 log_op_type (GEN_INT (pre), bb, insn,
7374 MO_ADJUST, dump_file);
7376 if (post)
7378 VTI (bb)->n_mos++;
7379 if (dump_file && (dump_flags & TDF_DETAILS))
7380 log_op_type (GEN_INT (post), bb, insn,
7381 MO_ADJUST, dump_file);
7384 cselib_hook_called = false;
7385 if (MAY_HAVE_DEBUG_INSNS)
7387 cselib_process_insn (insn);
7388 if (dump_file && (dump_flags & TDF_DETAILS))
7390 print_rtl_single (dump_file, insn);
7391 dump_cselib_table (dump_file);
7394 if (!cselib_hook_called)
7395 count_with_sets (insn, 0, 0);
7396 if (CALL_P (insn))
7398 VTI (bb)->n_mos++;
7399 if (dump_file && (dump_flags & TDF_DETAILS))
7400 log_op_type (PATTERN (insn), bb, insn,
7401 MO_CALL, dump_file);
7406 count = VTI (bb)->n_mos;
7408 if (MAY_HAVE_DEBUG_INSNS)
7410 cselib_preserve_only_values (false);
7411 next_uid_after = cselib_get_next_uid ();
7412 cselib_reset_table (next_uid_before);
7413 cselib_record_sets_hook = add_with_sets;
7414 if (dump_file && (dump_flags & TDF_DETAILS))
7415 fprintf (dump_file, "first value: %i\n",
7416 cselib_get_next_uid ());
7419 /* Add the micro-operations to the array. */
7420 VTI (bb)->mos = XNEWVEC (micro_operation, VTI (bb)->n_mos);
7421 VTI (bb)->n_mos = 0;
7422 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
7423 insn = NEXT_INSN (insn))
7425 if (INSN_P (insn))
7427 if (!frame_pointer_needed)
7429 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
7430 if (pre)
7432 micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
7434 mo->type = MO_ADJUST;
7435 mo->u.adjust = pre;
7436 mo->insn = insn;
7438 if (dump_file && (dump_flags & TDF_DETAILS))
7439 log_op_type (PATTERN (insn), bb, insn,
7440 MO_ADJUST, dump_file);
7444 cselib_hook_called = false;
7445 if (MAY_HAVE_DEBUG_INSNS)
7447 cselib_process_insn (insn);
7448 if (dump_file && (dump_flags & TDF_DETAILS))
7450 print_rtl_single (dump_file, insn);
7451 dump_cselib_table (dump_file);
7454 if (!cselib_hook_called)
7455 add_with_sets (insn, 0, 0);
7457 if (!frame_pointer_needed && post)
7459 micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
7461 mo->type = MO_ADJUST;
7462 mo->u.adjust = post;
7463 mo->insn = insn;
7465 if (dump_file && (dump_flags & TDF_DETAILS))
7466 log_op_type (PATTERN (insn), bb, insn,
7467 MO_ADJUST, dump_file);
7471 gcc_assert (count == VTI (bb)->n_mos);
7472 if (MAY_HAVE_DEBUG_INSNS)
7474 cselib_preserve_only_values (true);
7475 gcc_assert (next_uid_after == cselib_get_next_uid ());
7476 cselib_reset_table (next_uid_after);
7477 cselib_record_sets_hook = NULL;
7481 attrs_pool = create_alloc_pool ("attrs_def pool",
7482 sizeof (struct attrs_def), 1024);
7483 var_pool = create_alloc_pool ("variable_def pool",
7484 sizeof (struct variable_def)
7485 + (MAX_VAR_PARTS - 1)
7486 * sizeof (((variable)NULL)->var_part[0]), 64);
7487 loc_chain_pool = create_alloc_pool ("location_chain_def pool",
7488 sizeof (struct location_chain_def),
7489 1024);
7490 shared_hash_pool = create_alloc_pool ("shared_hash_def pool",
7491 sizeof (struct shared_hash_def), 256);
7492 empty_shared_hash = (shared_hash) pool_alloc (shared_hash_pool);
7493 empty_shared_hash->refcount = 1;
7494 empty_shared_hash->htab
7495 = htab_create (1, variable_htab_hash, variable_htab_eq,
7496 variable_htab_free);
7497 changed_variables = htab_create (10, variable_htab_hash, variable_htab_eq,
7498 variable_htab_free);
7499 if (MAY_HAVE_DEBUG_INSNS)
7501 value_chain_pool = create_alloc_pool ("value_chain_def pool",
7502 sizeof (struct value_chain_def),
7503 1024);
7504 value_chains = htab_create (32, value_chain_htab_hash,
7505 value_chain_htab_eq, NULL);
7508 /* Init the IN and OUT sets. */
7509 FOR_ALL_BB (bb)
7511 VTI (bb)->visited = false;
7512 VTI (bb)->flooded = false;
7513 dataflow_set_init (&VTI (bb)->in);
7514 dataflow_set_init (&VTI (bb)->out);
7515 VTI (bb)->permp = NULL;
7518 VTI (ENTRY_BLOCK_PTR)->flooded = true;
7519 vt_add_function_parameters ();
7522 /* Get rid of all debug insns from the insn stream. */
7524 static void
7525 delete_debug_insns (void)
7527 basic_block bb;
7528 rtx insn, next;
7530 if (!MAY_HAVE_DEBUG_INSNS)
7531 return;
7533 FOR_EACH_BB (bb)
7535 FOR_BB_INSNS_SAFE (bb, insn, next)
7536 if (DEBUG_INSN_P (insn))
7537 delete_insn (insn);
7541 /* Run a fast, BB-local only version of var tracking, to take care of
7542 information that we don't do global analysis on, such that not all
7543 information is lost. If SKIPPED holds, we're skipping the global
7544 pass entirely, so we should try to use information it would have
7545 handled as well.. */
7547 static void
7548 vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED)
7550 /* ??? Just skip it all for now. */
7551 delete_debug_insns ();
7554 /* Free the data structures needed for variable tracking. */
7556 static void
7557 vt_finalize (void)
7559 basic_block bb;
7561 FOR_EACH_BB (bb)
7563 free (VTI (bb)->mos);
7566 FOR_ALL_BB (bb)
7568 dataflow_set_destroy (&VTI (bb)->in);
7569 dataflow_set_destroy (&VTI (bb)->out);
7570 if (VTI (bb)->permp)
7572 dataflow_set_destroy (VTI (bb)->permp);
7573 XDELETE (VTI (bb)->permp);
7576 free_aux_for_blocks ();
7577 htab_delete (empty_shared_hash->htab);
7578 htab_delete (changed_variables);
7579 free_alloc_pool (attrs_pool);
7580 free_alloc_pool (var_pool);
7581 free_alloc_pool (loc_chain_pool);
7582 free_alloc_pool (shared_hash_pool);
7584 if (MAY_HAVE_DEBUG_INSNS)
7586 htab_delete (value_chains);
7587 free_alloc_pool (value_chain_pool);
7588 free_alloc_pool (valvar_pool);
7589 cselib_finish ();
7590 BITMAP_FREE (scratch_regs);
7591 scratch_regs = NULL;
7594 if (vui_vec)
7595 XDELETEVEC (vui_vec);
7596 vui_vec = NULL;
7597 vui_allocated = 0;
7600 /* The entry point to variable tracking pass. */
7602 unsigned int
7603 variable_tracking_main (void)
7605 if (flag_var_tracking_assignments < 0)
7607 delete_debug_insns ();
7608 return 0;
7611 if (n_basic_blocks > 500 && n_edges / n_basic_blocks >= 20)
7613 vt_debug_insns_local (true);
7614 return 0;
7617 mark_dfs_back_edges ();
7618 vt_initialize ();
7619 if (!frame_pointer_needed)
7621 if (!vt_stack_adjustments ())
7623 vt_finalize ();
7624 vt_debug_insns_local (true);
7625 return 0;
7629 vt_find_locations ();
7631 if (dump_file && (dump_flags & TDF_DETAILS))
7633 dump_dataflow_sets ();
7634 dump_flow_info (dump_file, dump_flags);
7637 vt_emit_notes ();
7639 vt_finalize ();
7640 vt_debug_insns_local (false);
7641 return 0;
7644 static bool
7645 gate_handle_var_tracking (void)
7647 return (flag_var_tracking);
7652 struct rtl_opt_pass pass_variable_tracking =
7655 RTL_PASS,
7656 "vartrack", /* name */
7657 gate_handle_var_tracking, /* gate */
7658 variable_tracking_main, /* execute */
7659 NULL, /* sub */
7660 NULL, /* next */
7661 0, /* static_pass_number */
7662 TV_VAR_TRACKING, /* tv_id */
7663 0, /* properties_required */
7664 0, /* properties_provided */
7665 0, /* properties_destroyed */
7666 0, /* todo_flags_start */
7667 TODO_dump_func | TODO_verify_rtl_sharing/* todo_flags_finish */