2011-08-19 Andrew Stubbs <ams@codesourcery.com>
[official-gcc.git] / gcc / var-tracking.c
blobf67b3f5d183bba5c33101ff27a8f619f1c6fc628
1 /* Variable tracking routines for the GNU compiler.
2 Copyright (C) 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This file contains the variable tracking pass. It computes where
22 variables are located (which registers or where in memory) at each position
23 in instruction stream and emits notes describing the locations.
24 Debug information (DWARF2 location lists) is finally generated from
25 these notes.
26 With this debug information, it is possible to show variables
27 even when debugging optimized code.
29 How does the variable tracking pass work?
31 First, it scans RTL code for uses, stores and clobbers (register/memory
32 references in instructions), for call insns and for stack adjustments
33 separately for each basic block and saves them to an array of micro
34 operations.
35 The micro operations of one instruction are ordered so that
36 pre-modifying stack adjustment < use < use with no var < call insn <
37 < clobber < set < post-modifying stack adjustment
39 Then, a forward dataflow analysis is performed to find out how locations
40 of variables change through code and to propagate the variable locations
41 along control flow graph.
42 The IN set for basic block BB is computed as a union of OUT sets of BB's
43 predecessors, the OUT set for BB is copied from the IN set for BB and
44 is changed according to micro operations in BB.
46 The IN and OUT sets for basic blocks consist of a current stack adjustment
47 (used for adjusting offset of variables addressed using stack pointer),
48 the table of structures describing the locations of parts of a variable
49 and for each physical register a linked list for each physical register.
50 The linked list is a list of variable parts stored in the register,
51 i.e. it is a list of triplets (reg, decl, offset) where decl is
52 REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
53 effective deleting appropriate variable parts when we set or clobber the
54 register.
56 There may be more than one variable part in a register. The linked lists
57 should be pretty short so it is a good data structure here.
58 For example in the following code, register allocator may assign same
59 register to variables A and B, and both of them are stored in the same
60 register in CODE:
62 if (cond)
63 set A;
64 else
65 set B;
66 CODE;
67 if (cond)
68 use A;
69 else
70 use B;
72 Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
73 are emitted to appropriate positions in RTL code. Each such a note describes
74 the location of one variable at the point in instruction stream where the
75 note is. There is no need to emit a note for each variable before each
76 instruction, we only emit these notes where the location of variable changes
77 (this means that we also emit notes for changes between the OUT set of the
78 previous block and the IN set of the current block).
80 The notes consist of two parts:
81 1. the declaration (from REG_EXPR or MEM_EXPR)
82 2. the location of a variable - it is either a simple register/memory
83 reference (for simple variables, for example int),
84 or a parallel of register/memory references (for a large variables
85 which consist of several parts, for example long long).
89 #include "config.h"
90 #include "system.h"
91 #include "coretypes.h"
92 #include "tm.h"
93 #include "rtl.h"
94 #include "tree.h"
95 #include "tm_p.h"
96 #include "hard-reg-set.h"
97 #include "basic-block.h"
98 #include "flags.h"
99 #include "output.h"
100 #include "insn-config.h"
101 #include "reload.h"
102 #include "sbitmap.h"
103 #include "alloc-pool.h"
104 #include "fibheap.h"
105 #include "hashtab.h"
106 #include "regs.h"
107 #include "expr.h"
108 #include "timevar.h"
109 #include "tree-pass.h"
110 #include "tree-flow.h"
111 #include "cselib.h"
112 #include "target.h"
113 #include "params.h"
114 #include "diagnostic.h"
115 #include "tree-pretty-print.h"
116 #include "pointer-set.h"
117 #include "recog.h"
118 #include "tm_p.h"
120 /* var-tracking.c assumes that tree code with the same value as VALUE rtx code
121 has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
122 Currently the value is the same as IDENTIFIER_NODE, which has such
123 a property. If this compile time assertion ever fails, make sure that
124 the new tree code that equals (int) VALUE has the same property. */
125 extern char check_value_val[(int) VALUE == (int) IDENTIFIER_NODE ? 1 : -1];
127 /* Type of micro operation. */
128 enum micro_operation_type
130 MO_USE, /* Use location (REG or MEM). */
131 MO_USE_NO_VAR,/* Use location which is not associated with a variable
132 or the variable is not trackable. */
133 MO_VAL_USE, /* Use location which is associated with a value. */
134 MO_VAL_LOC, /* Use location which appears in a debug insn. */
135 MO_VAL_SET, /* Set location associated with a value. */
136 MO_SET, /* Set location. */
137 MO_COPY, /* Copy the same portion of a variable from one
138 location to another. */
139 MO_CLOBBER, /* Clobber location. */
140 MO_CALL, /* Call insn. */
141 MO_ADJUST /* Adjust stack pointer. */
145 static const char * const ATTRIBUTE_UNUSED
146 micro_operation_type_name[] = {
147 "MO_USE",
148 "MO_USE_NO_VAR",
149 "MO_VAL_USE",
150 "MO_VAL_LOC",
151 "MO_VAL_SET",
152 "MO_SET",
153 "MO_COPY",
154 "MO_CLOBBER",
155 "MO_CALL",
156 "MO_ADJUST"
159 /* Where shall the note be emitted? BEFORE or AFTER the instruction.
160 Notes emitted as AFTER_CALL are to take effect during the call,
161 rather than after the call. */
162 enum emit_note_where
164 EMIT_NOTE_BEFORE_INSN,
165 EMIT_NOTE_AFTER_INSN,
166 EMIT_NOTE_AFTER_CALL_INSN
169 /* Structure holding information about micro operation. */
170 typedef struct micro_operation_def
172 /* Type of micro operation. */
173 enum micro_operation_type type;
175 /* The instruction which the micro operation is in, for MO_USE,
176 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
177 instruction or note in the original flow (before any var-tracking
178 notes are inserted, to simplify emission of notes), for MO_SET
179 and MO_CLOBBER. */
180 rtx insn;
182 union {
183 /* Location. For MO_SET and MO_COPY, this is the SET that
184 performs the assignment, if known, otherwise it is the target
185 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
186 CONCAT of the VALUE and the LOC associated with it. For
187 MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
188 associated with it. */
189 rtx loc;
191 /* Stack adjustment. */
192 HOST_WIDE_INT adjust;
193 } u;
194 } micro_operation;
196 DEF_VEC_O(micro_operation);
197 DEF_VEC_ALLOC_O(micro_operation,heap);
199 /* A declaration of a variable, or an RTL value being handled like a
200 declaration. */
201 typedef void *decl_or_value;
203 /* Structure for passing some other parameters to function
204 emit_note_insn_var_location. */
205 typedef struct emit_note_data_def
207 /* The instruction which the note will be emitted before/after. */
208 rtx insn;
210 /* Where the note will be emitted (before/after insn)? */
211 enum emit_note_where where;
213 /* The variables and values active at this point. */
214 htab_t vars;
215 } emit_note_data;
217 /* Description of location of a part of a variable. The content of a physical
218 register is described by a chain of these structures.
219 The chains are pretty short (usually 1 or 2 elements) and thus
220 chain is the best data structure. */
221 typedef struct attrs_def
223 /* Pointer to next member of the list. */
224 struct attrs_def *next;
226 /* The rtx of register. */
227 rtx loc;
229 /* The declaration corresponding to LOC. */
230 decl_or_value dv;
232 /* Offset from start of DECL. */
233 HOST_WIDE_INT offset;
234 } *attrs;
236 /* Structure holding a refcounted hash table. If refcount > 1,
237 it must be first unshared before modified. */
238 typedef struct shared_hash_def
240 /* Reference count. */
241 int refcount;
243 /* Actual hash table. */
244 htab_t htab;
245 } *shared_hash;
247 /* Structure holding the IN or OUT set for a basic block. */
248 typedef struct dataflow_set_def
250 /* Adjustment of stack offset. */
251 HOST_WIDE_INT stack_adjust;
253 /* Attributes for registers (lists of attrs). */
254 attrs regs[FIRST_PSEUDO_REGISTER];
256 /* Variable locations. */
257 shared_hash vars;
259 /* Vars that is being traversed. */
260 shared_hash traversed_vars;
261 } dataflow_set;
263 /* The structure (one for each basic block) containing the information
264 needed for variable tracking. */
265 typedef struct variable_tracking_info_def
267 /* The vector of micro operations. */
268 VEC(micro_operation, heap) *mos;
270 /* The IN and OUT set for dataflow analysis. */
271 dataflow_set in;
272 dataflow_set out;
274 /* The permanent-in dataflow set for this block. This is used to
275 hold values for which we had to compute entry values. ??? This
276 should probably be dynamically allocated, to avoid using more
277 memory in non-debug builds. */
278 dataflow_set *permp;
280 /* Has the block been visited in DFS? */
281 bool visited;
283 /* Has the block been flooded in VTA? */
284 bool flooded;
286 } *variable_tracking_info;
288 /* Structure for chaining the locations. */
289 typedef struct location_chain_def
291 /* Next element in the chain. */
292 struct location_chain_def *next;
294 /* The location (REG, MEM or VALUE). */
295 rtx loc;
297 /* The "value" stored in this location. */
298 rtx set_src;
300 /* Initialized? */
301 enum var_init_status init;
302 } *location_chain;
304 /* Structure describing one part of variable. */
305 typedef struct variable_part_def
307 /* Chain of locations of the part. */
308 location_chain loc_chain;
310 /* Location which was last emitted to location list. */
311 rtx cur_loc;
313 /* The offset in the variable. */
314 HOST_WIDE_INT offset;
315 } variable_part;
317 /* Maximum number of location parts. */
318 #define MAX_VAR_PARTS 16
320 /* Structure describing where the variable is located. */
321 typedef struct variable_def
323 /* The declaration of the variable, or an RTL value being handled
324 like a declaration. */
325 decl_or_value dv;
327 /* Reference count. */
328 int refcount;
330 /* Number of variable parts. */
331 char n_var_parts;
333 /* True if this variable changed (any of its) cur_loc fields
334 during the current emit_notes_for_changes resp.
335 emit_notes_for_differences call. */
336 bool cur_loc_changed;
338 /* True if this variable_def struct is currently in the
339 changed_variables hash table. */
340 bool in_changed_variables;
342 /* The variable parts. */
343 variable_part var_part[1];
344 } *variable;
345 typedef const struct variable_def *const_variable;
347 /* Structure for chaining backlinks from referenced VALUEs to
348 DVs that are referencing them. */
349 typedef struct value_chain_def
351 /* Next value_chain entry. */
352 struct value_chain_def *next;
354 /* The declaration of the variable, or an RTL value
355 being handled like a declaration, whose var_parts[0].loc_chain
356 references the VALUE owning this value_chain. */
357 decl_or_value dv;
359 /* Reference count. */
360 int refcount;
361 } *value_chain;
362 typedef const struct value_chain_def *const_value_chain;
364 /* Pointer to the BB's information specific to variable tracking pass. */
365 #define VTI(BB) ((variable_tracking_info) (BB)->aux)
367 /* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */
368 #define INT_MEM_OFFSET(mem) (MEM_OFFSET_KNOWN_P (mem) ? MEM_OFFSET (mem) : 0)
370 /* Alloc pool for struct attrs_def. */
371 static alloc_pool attrs_pool;
373 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
374 static alloc_pool var_pool;
376 /* Alloc pool for struct variable_def with a single var_part entry. */
377 static alloc_pool valvar_pool;
379 /* Alloc pool for struct location_chain_def. */
380 static alloc_pool loc_chain_pool;
382 /* Alloc pool for struct shared_hash_def. */
383 static alloc_pool shared_hash_pool;
385 /* Alloc pool for struct value_chain_def. */
386 static alloc_pool value_chain_pool;
388 /* Changed variables, notes will be emitted for them. */
389 static htab_t changed_variables;
391 /* Links from VALUEs to DVs referencing them in their current loc_chains. */
392 static htab_t value_chains;
394 /* Shall notes be emitted? */
395 static bool emit_notes;
397 /* Empty shared hashtable. */
398 static shared_hash empty_shared_hash;
400 /* Scratch register bitmap used by cselib_expand_value_rtx. */
401 static bitmap scratch_regs = NULL;
403 typedef struct GTY(()) parm_reg {
404 rtx outgoing;
405 rtx incoming;
406 } parm_reg_t;
408 DEF_VEC_O(parm_reg_t);
409 DEF_VEC_ALLOC_O(parm_reg_t, gc);
411 /* Vector of windowed parameter registers, if any. */
412 static VEC(parm_reg_t, gc) *windowed_parm_regs = NULL;
414 /* Variable used to tell whether cselib_process_insn called our hook. */
415 static bool cselib_hook_called;
417 /* Local function prototypes. */
418 static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
419 HOST_WIDE_INT *);
420 static void insn_stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
421 HOST_WIDE_INT *);
422 static bool vt_stack_adjustments (void);
423 static void note_register_arguments (rtx);
424 static hashval_t variable_htab_hash (const void *);
425 static int variable_htab_eq (const void *, const void *);
426 static void variable_htab_free (void *);
428 static void init_attrs_list_set (attrs *);
429 static void attrs_list_clear (attrs *);
430 static attrs attrs_list_member (attrs, decl_or_value, HOST_WIDE_INT);
431 static void attrs_list_insert (attrs *, decl_or_value, HOST_WIDE_INT, rtx);
432 static void attrs_list_copy (attrs *, attrs);
433 static void attrs_list_union (attrs *, attrs);
435 static void **unshare_variable (dataflow_set *set, void **slot, variable var,
436 enum var_init_status);
437 static void vars_copy (htab_t, htab_t);
438 static tree var_debug_decl (tree);
439 static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
440 static void var_reg_delete_and_set (dataflow_set *, rtx, bool,
441 enum var_init_status, rtx);
442 static void var_reg_delete (dataflow_set *, rtx, bool);
443 static void var_regno_delete (dataflow_set *, int);
444 static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx);
445 static void var_mem_delete_and_set (dataflow_set *, rtx, bool,
446 enum var_init_status, rtx);
447 static void var_mem_delete (dataflow_set *, rtx, bool);
449 static void dataflow_set_init (dataflow_set *);
450 static void dataflow_set_clear (dataflow_set *);
451 static void dataflow_set_copy (dataflow_set *, dataflow_set *);
452 static int variable_union_info_cmp_pos (const void *, const void *);
453 static void dataflow_set_union (dataflow_set *, dataflow_set *);
454 static location_chain find_loc_in_1pdv (rtx, variable, htab_t);
455 static bool canon_value_cmp (rtx, rtx);
456 static int loc_cmp (rtx, rtx);
457 static bool variable_part_different_p (variable_part *, variable_part *);
458 static bool onepart_variable_different_p (variable, variable);
459 static bool variable_different_p (variable, variable);
460 static bool dataflow_set_different (dataflow_set *, dataflow_set *);
461 static void dataflow_set_destroy (dataflow_set *);
463 static bool contains_symbol_ref (rtx);
464 static bool track_expr_p (tree, bool);
465 static bool same_variable_part_p (rtx, tree, HOST_WIDE_INT);
466 static int add_uses (rtx *, void *);
467 static void add_uses_1 (rtx *, void *);
468 static void add_stores (rtx, const_rtx, void *);
469 static bool compute_bb_dataflow (basic_block);
470 static bool vt_find_locations (void);
472 static void dump_attrs_list (attrs);
473 static int dump_var_slot (void **, void *);
474 static void dump_var (variable);
475 static void dump_vars (htab_t);
476 static void dump_dataflow_set (dataflow_set *);
477 static void dump_dataflow_sets (void);
479 static void variable_was_changed (variable, dataflow_set *);
480 static void **set_slot_part (dataflow_set *, rtx, void **,
481 decl_or_value, HOST_WIDE_INT,
482 enum var_init_status, rtx);
483 static void set_variable_part (dataflow_set *, rtx,
484 decl_or_value, HOST_WIDE_INT,
485 enum var_init_status, rtx, enum insert_option);
486 static void **clobber_slot_part (dataflow_set *, rtx,
487 void **, HOST_WIDE_INT, rtx);
488 static void clobber_variable_part (dataflow_set *, rtx,
489 decl_or_value, HOST_WIDE_INT, rtx);
490 static void **delete_slot_part (dataflow_set *, rtx, void **, HOST_WIDE_INT);
491 static void delete_variable_part (dataflow_set *, rtx,
492 decl_or_value, HOST_WIDE_INT);
493 static int emit_note_insn_var_location (void **, void *);
494 static void emit_notes_for_changes (rtx, enum emit_note_where, shared_hash);
495 static int emit_notes_for_differences_1 (void **, void *);
496 static int emit_notes_for_differences_2 (void **, void *);
497 static void emit_notes_for_differences (rtx, dataflow_set *, dataflow_set *);
498 static void emit_notes_in_bb (basic_block, dataflow_set *);
499 static void vt_emit_notes (void);
501 static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *);
502 static void vt_add_function_parameters (void);
503 static bool vt_initialize (void);
504 static void vt_finalize (void);
506 /* Given a SET, calculate the amount of stack adjustment it contains
507 PRE- and POST-modifying stack pointer.
508 This function is similar to stack_adjust_offset. */
510 static void
511 stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
512 HOST_WIDE_INT *post)
514 rtx src = SET_SRC (pattern);
515 rtx dest = SET_DEST (pattern);
516 enum rtx_code code;
518 if (dest == stack_pointer_rtx)
520 /* (set (reg sp) (plus (reg sp) (const_int))) */
521 code = GET_CODE (src);
522 if (! (code == PLUS || code == MINUS)
523 || XEXP (src, 0) != stack_pointer_rtx
524 || !CONST_INT_P (XEXP (src, 1)))
525 return;
527 if (code == MINUS)
528 *post += INTVAL (XEXP (src, 1));
529 else
530 *post -= INTVAL (XEXP (src, 1));
532 else if (MEM_P (dest))
534 /* (set (mem (pre_dec (reg sp))) (foo)) */
535 src = XEXP (dest, 0);
536 code = GET_CODE (src);
538 switch (code)
540 case PRE_MODIFY:
541 case POST_MODIFY:
542 if (XEXP (src, 0) == stack_pointer_rtx)
544 rtx val = XEXP (XEXP (src, 1), 1);
545 /* We handle only adjustments by constant amount. */
546 gcc_assert (GET_CODE (XEXP (src, 1)) == PLUS &&
547 CONST_INT_P (val));
549 if (code == PRE_MODIFY)
550 *pre -= INTVAL (val);
551 else
552 *post -= INTVAL (val);
553 break;
555 return;
557 case PRE_DEC:
558 if (XEXP (src, 0) == stack_pointer_rtx)
560 *pre += GET_MODE_SIZE (GET_MODE (dest));
561 break;
563 return;
565 case POST_DEC:
566 if (XEXP (src, 0) == stack_pointer_rtx)
568 *post += GET_MODE_SIZE (GET_MODE (dest));
569 break;
571 return;
573 case PRE_INC:
574 if (XEXP (src, 0) == stack_pointer_rtx)
576 *pre -= GET_MODE_SIZE (GET_MODE (dest));
577 break;
579 return;
581 case POST_INC:
582 if (XEXP (src, 0) == stack_pointer_rtx)
584 *post -= GET_MODE_SIZE (GET_MODE (dest));
585 break;
587 return;
589 default:
590 return;
595 /* Given an INSN, calculate the amount of stack adjustment it contains
596 PRE- and POST-modifying stack pointer. */
598 static void
599 insn_stack_adjust_offset_pre_post (rtx insn, HOST_WIDE_INT *pre,
600 HOST_WIDE_INT *post)
602 rtx pattern;
604 *pre = 0;
605 *post = 0;
607 pattern = PATTERN (insn);
608 if (RTX_FRAME_RELATED_P (insn))
610 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
611 if (expr)
612 pattern = XEXP (expr, 0);
615 if (GET_CODE (pattern) == SET)
616 stack_adjust_offset_pre_post (pattern, pre, post);
617 else if (GET_CODE (pattern) == PARALLEL
618 || GET_CODE (pattern) == SEQUENCE)
620 int i;
622 /* There may be stack adjustments inside compound insns. Search
623 for them. */
624 for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
625 if (GET_CODE (XVECEXP (pattern, 0, i)) == SET)
626 stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post);
630 /* Compute stack adjustments for all blocks by traversing DFS tree.
631 Return true when the adjustments on all incoming edges are consistent.
632 Heavily borrowed from pre_and_rev_post_order_compute. */
634 static bool
635 vt_stack_adjustments (void)
637 edge_iterator *stack;
638 int sp;
640 /* Initialize entry block. */
641 VTI (ENTRY_BLOCK_PTR)->visited = true;
642 VTI (ENTRY_BLOCK_PTR)->in.stack_adjust = INCOMING_FRAME_SP_OFFSET;
643 VTI (ENTRY_BLOCK_PTR)->out.stack_adjust = INCOMING_FRAME_SP_OFFSET;
645 /* Allocate stack for back-tracking up CFG. */
646 stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
647 sp = 0;
649 /* Push the first edge on to the stack. */
650 stack[sp++] = ei_start (ENTRY_BLOCK_PTR->succs);
652 while (sp)
654 edge_iterator ei;
655 basic_block src;
656 basic_block dest;
658 /* Look at the edge on the top of the stack. */
659 ei = stack[sp - 1];
660 src = ei_edge (ei)->src;
661 dest = ei_edge (ei)->dest;
663 /* Check if the edge destination has been visited yet. */
664 if (!VTI (dest)->visited)
666 rtx insn;
667 HOST_WIDE_INT pre, post, offset;
668 VTI (dest)->visited = true;
669 VTI (dest)->in.stack_adjust = offset = VTI (src)->out.stack_adjust;
671 if (dest != EXIT_BLOCK_PTR)
672 for (insn = BB_HEAD (dest);
673 insn != NEXT_INSN (BB_END (dest));
674 insn = NEXT_INSN (insn))
676 if (INSN_P (insn))
678 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
679 offset += pre + post;
681 if (CALL_P (insn))
682 note_register_arguments (insn);
685 VTI (dest)->out.stack_adjust = offset;
687 if (EDGE_COUNT (dest->succs) > 0)
688 /* Since the DEST node has been visited for the first
689 time, check its successors. */
690 stack[sp++] = ei_start (dest->succs);
692 else
694 /* Check whether the adjustments on the edges are the same. */
695 if (VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust)
697 free (stack);
698 return false;
701 if (! ei_one_before_end_p (ei))
702 /* Go to the next edge. */
703 ei_next (&stack[sp - 1]);
704 else
705 /* Return to previous level if there are no more edges. */
706 sp--;
710 free (stack);
711 return true;
714 /* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or
715 hard_frame_pointer_rtx is being mapped to it and offset for it. */
716 static rtx cfa_base_rtx;
717 static HOST_WIDE_INT cfa_base_offset;
719 /* Compute a CFA-based value for an ADJUSTMENT made to stack_pointer_rtx
720 or hard_frame_pointer_rtx. */
722 static inline rtx
723 compute_cfa_pointer (HOST_WIDE_INT adjustment)
725 return plus_constant (cfa_base_rtx, adjustment + cfa_base_offset);
728 /* Adjustment for hard_frame_pointer_rtx to cfa base reg,
729 or -1 if the replacement shouldn't be done. */
730 static HOST_WIDE_INT hard_frame_pointer_adjustment = -1;
732 /* Data for adjust_mems callback. */
734 struct adjust_mem_data
736 bool store;
737 enum machine_mode mem_mode;
738 HOST_WIDE_INT stack_adjust;
739 rtx side_effects;
742 /* Helper for adjust_mems. Return 1 if *loc is unsuitable for
743 transformation of wider mode arithmetics to narrower mode,
744 -1 if it is suitable and subexpressions shouldn't be
745 traversed and 0 if it is suitable and subexpressions should
746 be traversed. Called through for_each_rtx. */
748 static int
749 use_narrower_mode_test (rtx *loc, void *data)
751 rtx subreg = (rtx) data;
753 if (CONSTANT_P (*loc))
754 return -1;
755 switch (GET_CODE (*loc))
757 case REG:
758 if (cselib_lookup (*loc, GET_MODE (SUBREG_REG (subreg)), 0, VOIDmode))
759 return 1;
760 if (!validate_subreg (GET_MODE (subreg), GET_MODE (*loc),
761 *loc, subreg_lowpart_offset (GET_MODE (subreg),
762 GET_MODE (*loc))))
763 return 1;
764 return -1;
765 case PLUS:
766 case MINUS:
767 case MULT:
768 return 0;
769 case ASHIFT:
770 if (for_each_rtx (&XEXP (*loc, 0), use_narrower_mode_test, data))
771 return 1;
772 else
773 return -1;
774 default:
775 return 1;
779 /* Transform X into narrower mode MODE from wider mode WMODE. */
781 static rtx
782 use_narrower_mode (rtx x, enum machine_mode mode, enum machine_mode wmode)
784 rtx op0, op1;
785 if (CONSTANT_P (x))
786 return lowpart_subreg (mode, x, wmode);
787 switch (GET_CODE (x))
789 case REG:
790 return lowpart_subreg (mode, x, wmode);
791 case PLUS:
792 case MINUS:
793 case MULT:
794 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
795 op1 = use_narrower_mode (XEXP (x, 1), mode, wmode);
796 return simplify_gen_binary (GET_CODE (x), mode, op0, op1);
797 case ASHIFT:
798 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
799 return simplify_gen_binary (ASHIFT, mode, op0, XEXP (x, 1));
800 default:
801 gcc_unreachable ();
805 /* Helper function for adjusting used MEMs. */
807 static rtx
808 adjust_mems (rtx loc, const_rtx old_rtx, void *data)
810 struct adjust_mem_data *amd = (struct adjust_mem_data *) data;
811 rtx mem, addr = loc, tem;
812 enum machine_mode mem_mode_save;
813 bool store_save;
814 switch (GET_CODE (loc))
816 case REG:
817 /* Don't do any sp or fp replacements outside of MEM addresses
818 on the LHS. */
819 if (amd->mem_mode == VOIDmode && amd->store)
820 return loc;
821 if (loc == stack_pointer_rtx
822 && !frame_pointer_needed
823 && cfa_base_rtx)
824 return compute_cfa_pointer (amd->stack_adjust);
825 else if (loc == hard_frame_pointer_rtx
826 && frame_pointer_needed
827 && hard_frame_pointer_adjustment != -1
828 && cfa_base_rtx)
829 return compute_cfa_pointer (hard_frame_pointer_adjustment);
830 gcc_checking_assert (loc != virtual_incoming_args_rtx);
831 return loc;
832 case MEM:
833 mem = loc;
834 if (!amd->store)
836 mem = targetm.delegitimize_address (mem);
837 if (mem != loc && !MEM_P (mem))
838 return simplify_replace_fn_rtx (mem, old_rtx, adjust_mems, data);
841 addr = XEXP (mem, 0);
842 mem_mode_save = amd->mem_mode;
843 amd->mem_mode = GET_MODE (mem);
844 store_save = amd->store;
845 amd->store = false;
846 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
847 amd->store = store_save;
848 amd->mem_mode = mem_mode_save;
849 if (mem == loc)
850 addr = targetm.delegitimize_address (addr);
851 if (addr != XEXP (mem, 0))
852 mem = replace_equiv_address_nv (mem, addr);
853 if (!amd->store)
854 mem = avoid_constant_pool_reference (mem);
855 return mem;
856 case PRE_INC:
857 case PRE_DEC:
858 addr = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
859 GEN_INT (GET_CODE (loc) == PRE_INC
860 ? GET_MODE_SIZE (amd->mem_mode)
861 : -GET_MODE_SIZE (amd->mem_mode)));
862 case POST_INC:
863 case POST_DEC:
864 if (addr == loc)
865 addr = XEXP (loc, 0);
866 gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode);
867 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
868 tem = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
869 GEN_INT ((GET_CODE (loc) == PRE_INC
870 || GET_CODE (loc) == POST_INC)
871 ? GET_MODE_SIZE (amd->mem_mode)
872 : -GET_MODE_SIZE (amd->mem_mode)));
873 amd->side_effects = alloc_EXPR_LIST (0,
874 gen_rtx_SET (VOIDmode,
875 XEXP (loc, 0),
876 tem),
877 amd->side_effects);
878 return addr;
879 case PRE_MODIFY:
880 addr = XEXP (loc, 1);
881 case POST_MODIFY:
882 if (addr == loc)
883 addr = XEXP (loc, 0);
884 gcc_assert (amd->mem_mode != VOIDmode);
885 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
886 amd->side_effects = alloc_EXPR_LIST (0,
887 gen_rtx_SET (VOIDmode,
888 XEXP (loc, 0),
889 XEXP (loc, 1)),
890 amd->side_effects);
891 return addr;
892 case SUBREG:
893 /* First try without delegitimization of whole MEMs and
894 avoid_constant_pool_reference, which is more likely to succeed. */
895 store_save = amd->store;
896 amd->store = true;
897 addr = simplify_replace_fn_rtx (SUBREG_REG (loc), old_rtx, adjust_mems,
898 data);
899 amd->store = store_save;
900 mem = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
901 if (mem == SUBREG_REG (loc))
903 tem = loc;
904 goto finish_subreg;
906 tem = simplify_gen_subreg (GET_MODE (loc), mem,
907 GET_MODE (SUBREG_REG (loc)),
908 SUBREG_BYTE (loc));
909 if (tem)
910 goto finish_subreg;
911 tem = simplify_gen_subreg (GET_MODE (loc), addr,
912 GET_MODE (SUBREG_REG (loc)),
913 SUBREG_BYTE (loc));
914 if (tem == NULL_RTX)
915 tem = gen_rtx_raw_SUBREG (GET_MODE (loc), addr, SUBREG_BYTE (loc));
916 finish_subreg:
917 if (MAY_HAVE_DEBUG_INSNS
918 && GET_CODE (tem) == SUBREG
919 && (GET_CODE (SUBREG_REG (tem)) == PLUS
920 || GET_CODE (SUBREG_REG (tem)) == MINUS
921 || GET_CODE (SUBREG_REG (tem)) == MULT
922 || GET_CODE (SUBREG_REG (tem)) == ASHIFT)
923 && GET_MODE_CLASS (GET_MODE (tem)) == MODE_INT
924 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem))) == MODE_INT
925 && GET_MODE_SIZE (GET_MODE (tem))
926 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (tem)))
927 && subreg_lowpart_p (tem)
928 && !for_each_rtx (&SUBREG_REG (tem), use_narrower_mode_test, tem))
929 return use_narrower_mode (SUBREG_REG (tem), GET_MODE (tem),
930 GET_MODE (SUBREG_REG (tem)));
931 return tem;
932 case ASM_OPERANDS:
933 /* Don't do any replacements in second and following
934 ASM_OPERANDS of inline-asm with multiple sets.
935 ASM_OPERANDS_INPUT_VEC, ASM_OPERANDS_INPUT_CONSTRAINT_VEC
936 and ASM_OPERANDS_LABEL_VEC need to be equal between
937 all the ASM_OPERANDs in the insn and adjust_insn will
938 fix this up. */
939 if (ASM_OPERANDS_OUTPUT_IDX (loc) != 0)
940 return loc;
941 break;
942 default:
943 break;
945 return NULL_RTX;
948 /* Helper function for replacement of uses. */
950 static void
951 adjust_mem_uses (rtx *x, void *data)
953 rtx new_x = simplify_replace_fn_rtx (*x, NULL_RTX, adjust_mems, data);
954 if (new_x != *x)
955 validate_change (NULL_RTX, x, new_x, true);
958 /* Helper function for replacement of stores. */
960 static void
961 adjust_mem_stores (rtx loc, const_rtx expr, void *data)
963 if (MEM_P (loc))
965 rtx new_dest = simplify_replace_fn_rtx (SET_DEST (expr), NULL_RTX,
966 adjust_mems, data);
967 if (new_dest != SET_DEST (expr))
969 rtx xexpr = CONST_CAST_RTX (expr);
970 validate_change (NULL_RTX, &SET_DEST (xexpr), new_dest, true);
975 /* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
976 replace them with their value in the insn and add the side-effects
977 as other sets to the insn. */
979 static void
980 adjust_insn (basic_block bb, rtx insn)
982 struct adjust_mem_data amd;
983 rtx set;
985 #ifdef HAVE_window_save
986 /* If the target machine has an explicit window save instruction, the
987 transformation OUTGOING_REGNO -> INCOMING_REGNO is done there. */
988 if (RTX_FRAME_RELATED_P (insn)
989 && find_reg_note (insn, REG_CFA_WINDOW_SAVE, NULL_RTX))
991 unsigned int i, nregs = VEC_length(parm_reg_t, windowed_parm_regs);
992 rtx rtl = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nregs * 2));
993 parm_reg_t *p;
995 FOR_EACH_VEC_ELT (parm_reg_t, windowed_parm_regs, i, p)
997 XVECEXP (rtl, 0, i * 2)
998 = gen_rtx_SET (VOIDmode, p->incoming, p->outgoing);
999 /* Do not clobber the attached DECL, but only the REG. */
1000 XVECEXP (rtl, 0, i * 2 + 1)
1001 = gen_rtx_CLOBBER (GET_MODE (p->outgoing),
1002 gen_raw_REG (GET_MODE (p->outgoing),
1003 REGNO (p->outgoing)));
1006 validate_change (NULL_RTX, &PATTERN (insn), rtl, true);
1007 return;
1009 #endif
1011 amd.mem_mode = VOIDmode;
1012 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
1013 amd.side_effects = NULL_RTX;
1015 amd.store = true;
1016 note_stores (PATTERN (insn), adjust_mem_stores, &amd);
1018 amd.store = false;
1019 if (GET_CODE (PATTERN (insn)) == PARALLEL
1020 && asm_noperands (PATTERN (insn)) > 0
1021 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1023 rtx body, set0;
1024 int i;
1026 /* inline-asm with multiple sets is tiny bit more complicated,
1027 because the 3 vectors in ASM_OPERANDS need to be shared between
1028 all ASM_OPERANDS in the instruction. adjust_mems will
1029 not touch ASM_OPERANDS other than the first one, asm_noperands
1030 test above needs to be called before that (otherwise it would fail)
1031 and afterwards this code fixes it up. */
1032 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1033 body = PATTERN (insn);
1034 set0 = XVECEXP (body, 0, 0);
1035 gcc_checking_assert (GET_CODE (set0) == SET
1036 && GET_CODE (SET_SRC (set0)) == ASM_OPERANDS
1037 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set0)) == 0);
1038 for (i = 1; i < XVECLEN (body, 0); i++)
1039 if (GET_CODE (XVECEXP (body, 0, i)) != SET)
1040 break;
1041 else
1043 set = XVECEXP (body, 0, i);
1044 gcc_checking_assert (GET_CODE (SET_SRC (set)) == ASM_OPERANDS
1045 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set))
1046 == i);
1047 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (set))
1048 != ASM_OPERANDS_INPUT_VEC (SET_SRC (set0))
1049 || ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set))
1050 != ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0))
1051 || ASM_OPERANDS_LABEL_VEC (SET_SRC (set))
1052 != ASM_OPERANDS_LABEL_VEC (SET_SRC (set0)))
1054 rtx newsrc = shallow_copy_rtx (SET_SRC (set));
1055 ASM_OPERANDS_INPUT_VEC (newsrc)
1056 = ASM_OPERANDS_INPUT_VEC (SET_SRC (set0));
1057 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (newsrc)
1058 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0));
1059 ASM_OPERANDS_LABEL_VEC (newsrc)
1060 = ASM_OPERANDS_LABEL_VEC (SET_SRC (set0));
1061 validate_change (NULL_RTX, &SET_SRC (set), newsrc, true);
1065 else
1066 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1068 /* For read-only MEMs containing some constant, prefer those
1069 constants. */
1070 set = single_set (insn);
1071 if (set && MEM_P (SET_SRC (set)) && MEM_READONLY_P (SET_SRC (set)))
1073 rtx note = find_reg_equal_equiv_note (insn);
1075 if (note && CONSTANT_P (XEXP (note, 0)))
1076 validate_change (NULL_RTX, &SET_SRC (set), XEXP (note, 0), true);
1079 if (amd.side_effects)
1081 rtx *pat, new_pat, s;
1082 int i, oldn, newn;
1084 pat = &PATTERN (insn);
1085 if (GET_CODE (*pat) == COND_EXEC)
1086 pat = &COND_EXEC_CODE (*pat);
1087 if (GET_CODE (*pat) == PARALLEL)
1088 oldn = XVECLEN (*pat, 0);
1089 else
1090 oldn = 1;
1091 for (s = amd.side_effects, newn = 0; s; newn++)
1092 s = XEXP (s, 1);
1093 new_pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (oldn + newn));
1094 if (GET_CODE (*pat) == PARALLEL)
1095 for (i = 0; i < oldn; i++)
1096 XVECEXP (new_pat, 0, i) = XVECEXP (*pat, 0, i);
1097 else
1098 XVECEXP (new_pat, 0, 0) = *pat;
1099 for (s = amd.side_effects, i = oldn; i < oldn + newn; i++, s = XEXP (s, 1))
1100 XVECEXP (new_pat, 0, i) = XEXP (s, 0);
1101 free_EXPR_LIST_list (&amd.side_effects);
1102 validate_change (NULL_RTX, pat, new_pat, true);
1106 /* Return true if a decl_or_value DV is a DECL or NULL. */
1107 static inline bool
1108 dv_is_decl_p (decl_or_value dv)
1110 return !dv || (int) TREE_CODE ((tree) dv) != (int) VALUE;
1113 /* Return true if a decl_or_value is a VALUE rtl. */
1114 static inline bool
1115 dv_is_value_p (decl_or_value dv)
1117 return dv && !dv_is_decl_p (dv);
1120 /* Return the decl in the decl_or_value. */
1121 static inline tree
1122 dv_as_decl (decl_or_value dv)
1124 gcc_checking_assert (dv_is_decl_p (dv));
1125 return (tree) dv;
1128 /* Return the value in the decl_or_value. */
1129 static inline rtx
1130 dv_as_value (decl_or_value dv)
1132 gcc_checking_assert (dv_is_value_p (dv));
1133 return (rtx)dv;
1136 /* Return the opaque pointer in the decl_or_value. */
1137 static inline void *
1138 dv_as_opaque (decl_or_value dv)
1140 return dv;
1143 /* Return true if a decl_or_value must not have more than one variable
1144 part. */
1145 static inline bool
1146 dv_onepart_p (decl_or_value dv)
1148 tree decl;
1150 if (!MAY_HAVE_DEBUG_INSNS)
1151 return false;
1153 if (dv_is_value_p (dv))
1154 return true;
1156 decl = dv_as_decl (dv);
1158 if (!decl)
1159 return true;
1161 if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
1162 return true;
1164 return (target_for_debug_bind (decl) != NULL_TREE);
1167 /* Return the variable pool to be used for dv, depending on whether it
1168 can have multiple parts or not. */
1169 static inline alloc_pool
1170 dv_pool (decl_or_value dv)
1172 return dv_onepart_p (dv) ? valvar_pool : var_pool;
1175 /* Build a decl_or_value out of a decl. */
1176 static inline decl_or_value
1177 dv_from_decl (tree decl)
1179 decl_or_value dv;
1180 dv = decl;
1181 gcc_checking_assert (dv_is_decl_p (dv));
1182 return dv;
1185 /* Build a decl_or_value out of a value. */
1186 static inline decl_or_value
1187 dv_from_value (rtx value)
1189 decl_or_value dv;
1190 dv = value;
1191 gcc_checking_assert (dv_is_value_p (dv));
1192 return dv;
1195 extern void debug_dv (decl_or_value dv);
1197 DEBUG_FUNCTION void
1198 debug_dv (decl_or_value dv)
1200 if (dv_is_value_p (dv))
1201 debug_rtx (dv_as_value (dv));
1202 else
1203 debug_generic_stmt (dv_as_decl (dv));
1206 typedef unsigned int dvuid;
1208 /* Return the uid of DV. */
1210 static inline dvuid
1211 dv_uid (decl_or_value dv)
1213 if (dv_is_value_p (dv))
1214 return CSELIB_VAL_PTR (dv_as_value (dv))->uid;
1215 else
1216 return DECL_UID (dv_as_decl (dv));
1219 /* Compute the hash from the uid. */
1221 static inline hashval_t
1222 dv_uid2hash (dvuid uid)
1224 return uid;
1227 /* The hash function for a mask table in a shared_htab chain. */
1229 static inline hashval_t
1230 dv_htab_hash (decl_or_value dv)
1232 return dv_uid2hash (dv_uid (dv));
1235 /* The hash function for variable_htab, computes the hash value
1236 from the declaration of variable X. */
1238 static hashval_t
1239 variable_htab_hash (const void *x)
1241 const_variable const v = (const_variable) x;
1243 return dv_htab_hash (v->dv);
1246 /* Compare the declaration of variable X with declaration Y. */
1248 static int
1249 variable_htab_eq (const void *x, const void *y)
1251 const_variable const v = (const_variable) x;
1252 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
1254 return (dv_as_opaque (v->dv) == dv_as_opaque (dv));
1257 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
1259 static void
1260 variable_htab_free (void *elem)
1262 int i;
1263 variable var = (variable) elem;
1264 location_chain node, next;
1266 gcc_checking_assert (var->refcount > 0);
1268 var->refcount--;
1269 if (var->refcount > 0)
1270 return;
1272 for (i = 0; i < var->n_var_parts; i++)
1274 for (node = var->var_part[i].loc_chain; node; node = next)
1276 next = node->next;
1277 pool_free (loc_chain_pool, node);
1279 var->var_part[i].loc_chain = NULL;
1281 pool_free (dv_pool (var->dv), var);
1284 /* The hash function for value_chains htab, computes the hash value
1285 from the VALUE. */
1287 static hashval_t
1288 value_chain_htab_hash (const void *x)
1290 const_value_chain const v = (const_value_chain) x;
1292 return dv_htab_hash (v->dv);
1295 /* Compare the VALUE X with VALUE Y. */
1297 static int
1298 value_chain_htab_eq (const void *x, const void *y)
1300 const_value_chain const v = (const_value_chain) x;
1301 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
1303 return dv_as_opaque (v->dv) == dv_as_opaque (dv);
1306 /* Initialize the set (array) SET of attrs to empty lists. */
1308 static void
1309 init_attrs_list_set (attrs *set)
1311 int i;
1313 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1314 set[i] = NULL;
1317 /* Make the list *LISTP empty. */
1319 static void
1320 attrs_list_clear (attrs *listp)
1322 attrs list, next;
1324 for (list = *listp; list; list = next)
1326 next = list->next;
1327 pool_free (attrs_pool, list);
1329 *listp = NULL;
1332 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */
1334 static attrs
1335 attrs_list_member (attrs list, decl_or_value dv, HOST_WIDE_INT offset)
1337 for (; list; list = list->next)
1338 if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset)
1339 return list;
1340 return NULL;
1343 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
1345 static void
1346 attrs_list_insert (attrs *listp, decl_or_value dv,
1347 HOST_WIDE_INT offset, rtx loc)
1349 attrs list;
1351 list = (attrs) pool_alloc (attrs_pool);
1352 list->loc = loc;
1353 list->dv = dv;
1354 list->offset = offset;
1355 list->next = *listp;
1356 *listp = list;
1359 /* Copy all nodes from SRC and create a list *DSTP of the copies. */
1361 static void
1362 attrs_list_copy (attrs *dstp, attrs src)
1364 attrs n;
1366 attrs_list_clear (dstp);
1367 for (; src; src = src->next)
1369 n = (attrs) pool_alloc (attrs_pool);
1370 n->loc = src->loc;
1371 n->dv = src->dv;
1372 n->offset = src->offset;
1373 n->next = *dstp;
1374 *dstp = n;
1378 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */
1380 static void
1381 attrs_list_union (attrs *dstp, attrs src)
1383 for (; src; src = src->next)
1385 if (!attrs_list_member (*dstp, src->dv, src->offset))
1386 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1390 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1391 *DSTP. */
1393 static void
1394 attrs_list_mpdv_union (attrs *dstp, attrs src, attrs src2)
1396 gcc_assert (!*dstp);
1397 for (; src; src = src->next)
1399 if (!dv_onepart_p (src->dv))
1400 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1402 for (src = src2; src; src = src->next)
1404 if (!dv_onepart_p (src->dv)
1405 && !attrs_list_member (*dstp, src->dv, src->offset))
1406 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1410 /* Shared hashtable support. */
1412 /* Return true if VARS is shared. */
1414 static inline bool
1415 shared_hash_shared (shared_hash vars)
1417 return vars->refcount > 1;
1420 /* Return the hash table for VARS. */
1422 static inline htab_t
1423 shared_hash_htab (shared_hash vars)
1425 return vars->htab;
1428 /* Return true if VAR is shared, or maybe because VARS is shared. */
1430 static inline bool
1431 shared_var_p (variable var, shared_hash vars)
1433 /* Don't count an entry in the changed_variables table as a duplicate. */
1434 return ((var->refcount > 1 + (int) var->in_changed_variables)
1435 || shared_hash_shared (vars));
1438 /* Copy variables into a new hash table. */
1440 static shared_hash
1441 shared_hash_unshare (shared_hash vars)
1443 shared_hash new_vars = (shared_hash) pool_alloc (shared_hash_pool);
1444 gcc_assert (vars->refcount > 1);
1445 new_vars->refcount = 1;
1446 new_vars->htab
1447 = htab_create (htab_elements (vars->htab) + 3, variable_htab_hash,
1448 variable_htab_eq, variable_htab_free);
1449 vars_copy (new_vars->htab, vars->htab);
1450 vars->refcount--;
1451 return new_vars;
1454 /* Increment reference counter on VARS and return it. */
1456 static inline shared_hash
1457 shared_hash_copy (shared_hash vars)
1459 vars->refcount++;
1460 return vars;
1463 /* Decrement reference counter and destroy hash table if not shared
1464 anymore. */
1466 static void
1467 shared_hash_destroy (shared_hash vars)
1469 gcc_checking_assert (vars->refcount > 0);
1470 if (--vars->refcount == 0)
1472 htab_delete (vars->htab);
1473 pool_free (shared_hash_pool, vars);
1477 /* Unshare *PVARS if shared and return slot for DV. If INS is
1478 INSERT, insert it if not already present. */
1480 static inline void **
1481 shared_hash_find_slot_unshare_1 (shared_hash *pvars, decl_or_value dv,
1482 hashval_t dvhash, enum insert_option ins)
1484 if (shared_hash_shared (*pvars))
1485 *pvars = shared_hash_unshare (*pvars);
1486 return htab_find_slot_with_hash (shared_hash_htab (*pvars), dv, dvhash, ins);
1489 static inline void **
1490 shared_hash_find_slot_unshare (shared_hash *pvars, decl_or_value dv,
1491 enum insert_option ins)
1493 return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins);
1496 /* Return slot for DV, if it is already present in the hash table.
1497 If it is not present, insert it only VARS is not shared, otherwise
1498 return NULL. */
1500 static inline void **
1501 shared_hash_find_slot_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1503 return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
1504 shared_hash_shared (vars)
1505 ? NO_INSERT : INSERT);
1508 static inline void **
1509 shared_hash_find_slot (shared_hash vars, decl_or_value dv)
1511 return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv));
1514 /* Return slot for DV only if it is already present in the hash table. */
1516 static inline void **
1517 shared_hash_find_slot_noinsert_1 (shared_hash vars, decl_or_value dv,
1518 hashval_t dvhash)
1520 return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
1521 NO_INSERT);
1524 static inline void **
1525 shared_hash_find_slot_noinsert (shared_hash vars, decl_or_value dv)
1527 return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv));
1530 /* Return variable for DV or NULL if not already present in the hash
1531 table. */
1533 static inline variable
1534 shared_hash_find_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1536 return (variable) htab_find_with_hash (shared_hash_htab (vars), dv, dvhash);
1539 static inline variable
1540 shared_hash_find (shared_hash vars, decl_or_value dv)
1542 return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
1545 /* Return true if TVAL is better than CVAL as a canonival value. We
1546 choose lowest-numbered VALUEs, using the RTX address as a
1547 tie-breaker. The idea is to arrange them into a star topology,
1548 such that all of them are at most one step away from the canonical
1549 value, and the canonical value has backlinks to all of them, in
1550 addition to all the actual locations. We don't enforce this
1551 topology throughout the entire dataflow analysis, though.
1554 static inline bool
1555 canon_value_cmp (rtx tval, rtx cval)
1557 return !cval
1558 || CSELIB_VAL_PTR (tval)->uid < CSELIB_VAL_PTR (cval)->uid;
1561 static bool dst_can_be_shared;
1563 /* Return a copy of a variable VAR and insert it to dataflow set SET. */
1565 static void **
1566 unshare_variable (dataflow_set *set, void **slot, variable var,
1567 enum var_init_status initialized)
1569 variable new_var;
1570 int i;
1572 new_var = (variable) pool_alloc (dv_pool (var->dv));
1573 new_var->dv = var->dv;
1574 new_var->refcount = 1;
1575 var->refcount--;
1576 new_var->n_var_parts = var->n_var_parts;
1577 new_var->cur_loc_changed = var->cur_loc_changed;
1578 var->cur_loc_changed = false;
1579 new_var->in_changed_variables = false;
1581 if (! flag_var_tracking_uninit)
1582 initialized = VAR_INIT_STATUS_INITIALIZED;
1584 for (i = 0; i < var->n_var_parts; i++)
1586 location_chain node;
1587 location_chain *nextp;
1589 new_var->var_part[i].offset = var->var_part[i].offset;
1590 nextp = &new_var->var_part[i].loc_chain;
1591 for (node = var->var_part[i].loc_chain; node; node = node->next)
1593 location_chain new_lc;
1595 new_lc = (location_chain) pool_alloc (loc_chain_pool);
1596 new_lc->next = NULL;
1597 if (node->init > initialized)
1598 new_lc->init = node->init;
1599 else
1600 new_lc->init = initialized;
1601 if (node->set_src && !(MEM_P (node->set_src)))
1602 new_lc->set_src = node->set_src;
1603 else
1604 new_lc->set_src = NULL;
1605 new_lc->loc = node->loc;
1607 *nextp = new_lc;
1608 nextp = &new_lc->next;
1611 new_var->var_part[i].cur_loc = var->var_part[i].cur_loc;
1614 dst_can_be_shared = false;
1615 if (shared_hash_shared (set->vars))
1616 slot = shared_hash_find_slot_unshare (&set->vars, var->dv, NO_INSERT);
1617 else if (set->traversed_vars && set->vars != set->traversed_vars)
1618 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
1619 *slot = new_var;
1620 if (var->in_changed_variables)
1622 void **cslot
1623 = htab_find_slot_with_hash (changed_variables, var->dv,
1624 dv_htab_hash (var->dv), NO_INSERT);
1625 gcc_assert (*cslot == (void *) var);
1626 var->in_changed_variables = false;
1627 variable_htab_free (var);
1628 *cslot = new_var;
1629 new_var->in_changed_variables = true;
1631 return slot;
1634 /* Copy all variables from hash table SRC to hash table DST. */
1636 static void
1637 vars_copy (htab_t dst, htab_t src)
1639 htab_iterator hi;
1640 variable var;
1642 FOR_EACH_HTAB_ELEMENT (src, var, variable, hi)
1644 void **dstp;
1645 var->refcount++;
1646 dstp = htab_find_slot_with_hash (dst, var->dv,
1647 dv_htab_hash (var->dv),
1648 INSERT);
1649 *dstp = var;
1653 /* Map a decl to its main debug decl. */
1655 static inline tree
1656 var_debug_decl (tree decl)
1658 if (decl && DECL_P (decl)
1659 && DECL_DEBUG_EXPR_IS_FROM (decl))
1661 tree debugdecl = DECL_DEBUG_EXPR (decl);
1662 if (debugdecl && DECL_P (debugdecl))
1663 decl = debugdecl;
1666 return decl;
1669 /* Set the register LOC to contain DV, OFFSET. */
1671 static void
1672 var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1673 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1674 enum insert_option iopt)
1676 attrs node;
1677 bool decl_p = dv_is_decl_p (dv);
1679 if (decl_p)
1680 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1682 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1683 if (dv_as_opaque (node->dv) == dv_as_opaque (dv)
1684 && node->offset == offset)
1685 break;
1686 if (!node)
1687 attrs_list_insert (&set->regs[REGNO (loc)], dv, offset, loc);
1688 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1691 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
1693 static void
1694 var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1695 rtx set_src)
1697 tree decl = REG_EXPR (loc);
1698 HOST_WIDE_INT offset = REG_OFFSET (loc);
1700 var_reg_decl_set (set, loc, initialized,
1701 dv_from_decl (decl), offset, set_src, INSERT);
1704 static enum var_init_status
1705 get_init_value (dataflow_set *set, rtx loc, decl_or_value dv)
1707 variable var;
1708 int i;
1709 enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN;
1711 if (! flag_var_tracking_uninit)
1712 return VAR_INIT_STATUS_INITIALIZED;
1714 var = shared_hash_find (set->vars, dv);
1715 if (var)
1717 for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++)
1719 location_chain nextp;
1720 for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next)
1721 if (rtx_equal_p (nextp->loc, loc))
1723 ret_val = nextp->init;
1724 break;
1729 return ret_val;
1732 /* Delete current content of register LOC in dataflow set SET and set
1733 the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
1734 MODIFY is true, any other live copies of the same variable part are
1735 also deleted from the dataflow set, otherwise the variable part is
1736 assumed to be copied from another location holding the same
1737 part. */
1739 static void
1740 var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1741 enum var_init_status initialized, rtx set_src)
1743 tree decl = REG_EXPR (loc);
1744 HOST_WIDE_INT offset = REG_OFFSET (loc);
1745 attrs node, next;
1746 attrs *nextp;
1748 decl = var_debug_decl (decl);
1750 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1751 initialized = get_init_value (set, loc, dv_from_decl (decl));
1753 nextp = &set->regs[REGNO (loc)];
1754 for (node = *nextp; node; node = next)
1756 next = node->next;
1757 if (dv_as_opaque (node->dv) != decl || node->offset != offset)
1759 delete_variable_part (set, node->loc, node->dv, node->offset);
1760 pool_free (attrs_pool, node);
1761 *nextp = next;
1763 else
1765 node->loc = loc;
1766 nextp = &node->next;
1769 if (modify)
1770 clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src);
1771 var_reg_set (set, loc, initialized, set_src);
1774 /* Delete the association of register LOC in dataflow set SET with any
1775 variables that aren't onepart. If CLOBBER is true, also delete any
1776 other live copies of the same variable part, and delete the
1777 association with onepart dvs too. */
1779 static void
1780 var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
1782 attrs *nextp = &set->regs[REGNO (loc)];
1783 attrs node, next;
1785 if (clobber)
1787 tree decl = REG_EXPR (loc);
1788 HOST_WIDE_INT offset = REG_OFFSET (loc);
1790 decl = var_debug_decl (decl);
1792 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1795 for (node = *nextp; node; node = next)
1797 next = node->next;
1798 if (clobber || !dv_onepart_p (node->dv))
1800 delete_variable_part (set, node->loc, node->dv, node->offset);
1801 pool_free (attrs_pool, node);
1802 *nextp = next;
1804 else
1805 nextp = &node->next;
1809 /* Delete content of register with number REGNO in dataflow set SET. */
1811 static void
1812 var_regno_delete (dataflow_set *set, int regno)
1814 attrs *reg = &set->regs[regno];
1815 attrs node, next;
1817 for (node = *reg; node; node = next)
1819 next = node->next;
1820 delete_variable_part (set, node->loc, node->dv, node->offset);
1821 pool_free (attrs_pool, node);
1823 *reg = NULL;
1826 /* Set the location of DV, OFFSET as the MEM LOC. */
1828 static void
1829 var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1830 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1831 enum insert_option iopt)
1833 if (dv_is_decl_p (dv))
1834 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1836 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1839 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
1840 SET to LOC.
1841 Adjust the address first if it is stack pointer based. */
1843 static void
1844 var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1845 rtx set_src)
1847 tree decl = MEM_EXPR (loc);
1848 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1850 var_mem_decl_set (set, loc, initialized,
1851 dv_from_decl (decl), offset, set_src, INSERT);
1854 /* Delete and set the location part of variable MEM_EXPR (LOC) in
1855 dataflow set SET to LOC. If MODIFY is true, any other live copies
1856 of the same variable part are also deleted from the dataflow set,
1857 otherwise the variable part is assumed to be copied from another
1858 location holding the same part.
1859 Adjust the address first if it is stack pointer based. */
1861 static void
1862 var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1863 enum var_init_status initialized, rtx set_src)
1865 tree decl = MEM_EXPR (loc);
1866 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1868 decl = var_debug_decl (decl);
1870 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1871 initialized = get_init_value (set, loc, dv_from_decl (decl));
1873 if (modify)
1874 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src);
1875 var_mem_set (set, loc, initialized, set_src);
1878 /* Delete the location part LOC from dataflow set SET. If CLOBBER is
1879 true, also delete any other live copies of the same variable part.
1880 Adjust the address first if it is stack pointer based. */
1882 static void
1883 var_mem_delete (dataflow_set *set, rtx loc, bool clobber)
1885 tree decl = MEM_EXPR (loc);
1886 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1888 decl = var_debug_decl (decl);
1889 if (clobber)
1890 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1891 delete_variable_part (set, loc, dv_from_decl (decl), offset);
1894 /* Bind a value to a location it was just stored in. If MODIFIED
1895 holds, assume the location was modified, detaching it from any
1896 values bound to it. */
1898 static void
1899 val_store (dataflow_set *set, rtx val, rtx loc, rtx insn, bool modified)
1901 cselib_val *v = CSELIB_VAL_PTR (val);
1903 gcc_assert (cselib_preserved_value_p (v));
1905 if (dump_file)
1907 fprintf (dump_file, "%i: ", INSN_UID (insn));
1908 print_inline_rtx (dump_file, val, 0);
1909 fprintf (dump_file, " stored in ");
1910 print_inline_rtx (dump_file, loc, 0);
1911 if (v->locs)
1913 struct elt_loc_list *l;
1914 for (l = v->locs; l; l = l->next)
1916 fprintf (dump_file, "\n%i: ", INSN_UID (l->setting_insn));
1917 print_inline_rtx (dump_file, l->loc, 0);
1920 fprintf (dump_file, "\n");
1923 if (REG_P (loc))
1925 if (modified)
1926 var_regno_delete (set, REGNO (loc));
1927 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1928 dv_from_value (val), 0, NULL_RTX, INSERT);
1930 else if (MEM_P (loc))
1931 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1932 dv_from_value (val), 0, NULL_RTX, INSERT);
1933 else
1934 set_variable_part (set, loc, dv_from_value (val), 0,
1935 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1938 /* Reset this node, detaching all its equivalences. Return the slot
1939 in the variable hash table that holds dv, if there is one. */
1941 static void
1942 val_reset (dataflow_set *set, decl_or_value dv)
1944 variable var = shared_hash_find (set->vars, dv) ;
1945 location_chain node;
1946 rtx cval;
1948 if (!var || !var->n_var_parts)
1949 return;
1951 gcc_assert (var->n_var_parts == 1);
1953 cval = NULL;
1954 for (node = var->var_part[0].loc_chain; node; node = node->next)
1955 if (GET_CODE (node->loc) == VALUE
1956 && canon_value_cmp (node->loc, cval))
1957 cval = node->loc;
1959 for (node = var->var_part[0].loc_chain; node; node = node->next)
1960 if (GET_CODE (node->loc) == VALUE && cval != node->loc)
1962 /* Redirect the equivalence link to the new canonical
1963 value, or simply remove it if it would point at
1964 itself. */
1965 if (cval)
1966 set_variable_part (set, cval, dv_from_value (node->loc),
1967 0, node->init, node->set_src, NO_INSERT);
1968 delete_variable_part (set, dv_as_value (dv),
1969 dv_from_value (node->loc), 0);
1972 if (cval)
1974 decl_or_value cdv = dv_from_value (cval);
1976 /* Keep the remaining values connected, accummulating links
1977 in the canonical value. */
1978 for (node = var->var_part[0].loc_chain; node; node = node->next)
1980 if (node->loc == cval)
1981 continue;
1982 else if (GET_CODE (node->loc) == REG)
1983 var_reg_decl_set (set, node->loc, node->init, cdv, 0,
1984 node->set_src, NO_INSERT);
1985 else if (GET_CODE (node->loc) == MEM)
1986 var_mem_decl_set (set, node->loc, node->init, cdv, 0,
1987 node->set_src, NO_INSERT);
1988 else
1989 set_variable_part (set, node->loc, cdv, 0,
1990 node->init, node->set_src, NO_INSERT);
1994 /* We remove this last, to make sure that the canonical value is not
1995 removed to the point of requiring reinsertion. */
1996 if (cval)
1997 delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0);
1999 clobber_variable_part (set, NULL, dv, 0, NULL);
2001 /* ??? Should we make sure there aren't other available values or
2002 variables whose values involve this one other than by
2003 equivalence? E.g., at the very least we should reset MEMs, those
2004 shouldn't be too hard to find cselib-looking up the value as an
2005 address, then locating the resulting value in our own hash
2006 table. */
2009 /* Find the values in a given location and map the val to another
2010 value, if it is unique, or add the location as one holding the
2011 value. */
2013 static void
2014 val_resolve (dataflow_set *set, rtx val, rtx loc, rtx insn)
2016 decl_or_value dv = dv_from_value (val);
2018 if (dump_file && (dump_flags & TDF_DETAILS))
2020 if (insn)
2021 fprintf (dump_file, "%i: ", INSN_UID (insn));
2022 else
2023 fprintf (dump_file, "head: ");
2024 print_inline_rtx (dump_file, val, 0);
2025 fputs (" is at ", dump_file);
2026 print_inline_rtx (dump_file, loc, 0);
2027 fputc ('\n', dump_file);
2030 val_reset (set, dv);
2032 if (REG_P (loc))
2034 attrs node, found = NULL;
2036 for (node = set->regs[REGNO (loc)]; node; node = node->next)
2037 if (dv_is_value_p (node->dv)
2038 && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc))
2040 found = node;
2042 /* Map incoming equivalences. ??? Wouldn't it be nice if
2043 we just started sharing the location lists? Maybe a
2044 circular list ending at the value itself or some
2045 such. */
2046 set_variable_part (set, dv_as_value (node->dv),
2047 dv_from_value (val), node->offset,
2048 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2049 set_variable_part (set, val, node->dv, node->offset,
2050 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2053 /* If we didn't find any equivalence, we need to remember that
2054 this value is held in the named register. */
2055 if (!found)
2056 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2057 dv_from_value (val), 0, NULL_RTX, INSERT);
2059 else if (MEM_P (loc))
2060 /* ??? Merge equivalent MEMs. */
2061 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2062 dv_from_value (val), 0, NULL_RTX, INSERT);
2063 else
2064 /* ??? Merge equivalent expressions. */
2065 set_variable_part (set, loc, dv_from_value (val), 0,
2066 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2069 /* Initialize dataflow set SET to be empty.
2070 VARS_SIZE is the initial size of hash table VARS. */
2072 static void
2073 dataflow_set_init (dataflow_set *set)
2075 init_attrs_list_set (set->regs);
2076 set->vars = shared_hash_copy (empty_shared_hash);
2077 set->stack_adjust = 0;
2078 set->traversed_vars = NULL;
2081 /* Delete the contents of dataflow set SET. */
2083 static void
2084 dataflow_set_clear (dataflow_set *set)
2086 int i;
2088 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2089 attrs_list_clear (&set->regs[i]);
2091 shared_hash_destroy (set->vars);
2092 set->vars = shared_hash_copy (empty_shared_hash);
2095 /* Copy the contents of dataflow set SRC to DST. */
2097 static void
2098 dataflow_set_copy (dataflow_set *dst, dataflow_set *src)
2100 int i;
2102 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2103 attrs_list_copy (&dst->regs[i], src->regs[i]);
2105 shared_hash_destroy (dst->vars);
2106 dst->vars = shared_hash_copy (src->vars);
2107 dst->stack_adjust = src->stack_adjust;
2110 /* Information for merging lists of locations for a given offset of variable.
2112 struct variable_union_info
2114 /* Node of the location chain. */
2115 location_chain lc;
2117 /* The sum of positions in the input chains. */
2118 int pos;
2120 /* The position in the chain of DST dataflow set. */
2121 int pos_dst;
2124 /* Buffer for location list sorting and its allocated size. */
2125 static struct variable_union_info *vui_vec;
2126 static int vui_allocated;
2128 /* Compare function for qsort, order the structures by POS element. */
2130 static int
2131 variable_union_info_cmp_pos (const void *n1, const void *n2)
2133 const struct variable_union_info *const i1 =
2134 (const struct variable_union_info *) n1;
2135 const struct variable_union_info *const i2 =
2136 ( const struct variable_union_info *) n2;
2138 if (i1->pos != i2->pos)
2139 return i1->pos - i2->pos;
2141 return (i1->pos_dst - i2->pos_dst);
2144 /* Compute union of location parts of variable *SLOT and the same variable
2145 from hash table DATA. Compute "sorted" union of the location chains
2146 for common offsets, i.e. the locations of a variable part are sorted by
2147 a priority where the priority is the sum of the positions in the 2 chains
2148 (if a location is only in one list the position in the second list is
2149 defined to be larger than the length of the chains).
2150 When we are updating the location parts the newest location is in the
2151 beginning of the chain, so when we do the described "sorted" union
2152 we keep the newest locations in the beginning. */
2154 static int
2155 variable_union (variable src, dataflow_set *set)
2157 variable dst;
2158 void **dstp;
2159 int i, j, k;
2161 dstp = shared_hash_find_slot (set->vars, src->dv);
2162 if (!dstp || !*dstp)
2164 src->refcount++;
2166 dst_can_be_shared = false;
2167 if (!dstp)
2168 dstp = shared_hash_find_slot_unshare (&set->vars, src->dv, INSERT);
2170 *dstp = src;
2172 /* Continue traversing the hash table. */
2173 return 1;
2175 else
2176 dst = (variable) *dstp;
2178 gcc_assert (src->n_var_parts);
2180 /* We can combine one-part variables very efficiently, because their
2181 entries are in canonical order. */
2182 if (dv_onepart_p (src->dv))
2184 location_chain *nodep, dnode, snode;
2186 gcc_assert (src->n_var_parts == 1
2187 && dst->n_var_parts == 1);
2189 snode = src->var_part[0].loc_chain;
2190 gcc_assert (snode);
2192 restart_onepart_unshared:
2193 nodep = &dst->var_part[0].loc_chain;
2194 dnode = *nodep;
2195 gcc_assert (dnode);
2197 while (snode)
2199 int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1;
2201 if (r > 0)
2203 location_chain nnode;
2205 if (shared_var_p (dst, set->vars))
2207 dstp = unshare_variable (set, dstp, dst,
2208 VAR_INIT_STATUS_INITIALIZED);
2209 dst = (variable)*dstp;
2210 goto restart_onepart_unshared;
2213 *nodep = nnode = (location_chain) pool_alloc (loc_chain_pool);
2214 nnode->loc = snode->loc;
2215 nnode->init = snode->init;
2216 if (!snode->set_src || MEM_P (snode->set_src))
2217 nnode->set_src = NULL;
2218 else
2219 nnode->set_src = snode->set_src;
2220 nnode->next = dnode;
2221 dnode = nnode;
2223 else if (r == 0)
2224 gcc_checking_assert (rtx_equal_p (dnode->loc, snode->loc));
2226 if (r >= 0)
2227 snode = snode->next;
2229 nodep = &dnode->next;
2230 dnode = *nodep;
2233 return 1;
2236 /* Count the number of location parts, result is K. */
2237 for (i = 0, j = 0, k = 0;
2238 i < src->n_var_parts && j < dst->n_var_parts; k++)
2240 if (src->var_part[i].offset == dst->var_part[j].offset)
2242 i++;
2243 j++;
2245 else if (src->var_part[i].offset < dst->var_part[j].offset)
2246 i++;
2247 else
2248 j++;
2250 k += src->n_var_parts - i;
2251 k += dst->n_var_parts - j;
2253 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
2254 thus there are at most MAX_VAR_PARTS different offsets. */
2255 gcc_assert (dv_onepart_p (dst->dv) ? k == 1 : k <= MAX_VAR_PARTS);
2257 if (dst->n_var_parts != k && shared_var_p (dst, set->vars))
2259 dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN);
2260 dst = (variable)*dstp;
2263 i = src->n_var_parts - 1;
2264 j = dst->n_var_parts - 1;
2265 dst->n_var_parts = k;
2267 for (k--; k >= 0; k--)
2269 location_chain node, node2;
2271 if (i >= 0 && j >= 0
2272 && src->var_part[i].offset == dst->var_part[j].offset)
2274 /* Compute the "sorted" union of the chains, i.e. the locations which
2275 are in both chains go first, they are sorted by the sum of
2276 positions in the chains. */
2277 int dst_l, src_l;
2278 int ii, jj, n;
2279 struct variable_union_info *vui;
2281 /* If DST is shared compare the location chains.
2282 If they are different we will modify the chain in DST with
2283 high probability so make a copy of DST. */
2284 if (shared_var_p (dst, set->vars))
2286 for (node = src->var_part[i].loc_chain,
2287 node2 = dst->var_part[j].loc_chain; node && node2;
2288 node = node->next, node2 = node2->next)
2290 if (!((REG_P (node2->loc)
2291 && REG_P (node->loc)
2292 && REGNO (node2->loc) == REGNO (node->loc))
2293 || rtx_equal_p (node2->loc, node->loc)))
2295 if (node2->init < node->init)
2296 node2->init = node->init;
2297 break;
2300 if (node || node2)
2302 dstp = unshare_variable (set, dstp, dst,
2303 VAR_INIT_STATUS_UNKNOWN);
2304 dst = (variable)*dstp;
2308 src_l = 0;
2309 for (node = src->var_part[i].loc_chain; node; node = node->next)
2310 src_l++;
2311 dst_l = 0;
2312 for (node = dst->var_part[j].loc_chain; node; node = node->next)
2313 dst_l++;
2315 if (dst_l == 1)
2317 /* The most common case, much simpler, no qsort is needed. */
2318 location_chain dstnode = dst->var_part[j].loc_chain;
2319 dst->var_part[k].loc_chain = dstnode;
2320 dst->var_part[k].offset = dst->var_part[j].offset;
2321 node2 = dstnode;
2322 for (node = src->var_part[i].loc_chain; node; node = node->next)
2323 if (!((REG_P (dstnode->loc)
2324 && REG_P (node->loc)
2325 && REGNO (dstnode->loc) == REGNO (node->loc))
2326 || rtx_equal_p (dstnode->loc, node->loc)))
2328 location_chain new_node;
2330 /* Copy the location from SRC. */
2331 new_node = (location_chain) pool_alloc (loc_chain_pool);
2332 new_node->loc = node->loc;
2333 new_node->init = node->init;
2334 if (!node->set_src || MEM_P (node->set_src))
2335 new_node->set_src = NULL;
2336 else
2337 new_node->set_src = node->set_src;
2338 node2->next = new_node;
2339 node2 = new_node;
2341 node2->next = NULL;
2343 else
2345 if (src_l + dst_l > vui_allocated)
2347 vui_allocated = MAX (vui_allocated * 2, src_l + dst_l);
2348 vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec,
2349 vui_allocated);
2351 vui = vui_vec;
2353 /* Fill in the locations from DST. */
2354 for (node = dst->var_part[j].loc_chain, jj = 0; node;
2355 node = node->next, jj++)
2357 vui[jj].lc = node;
2358 vui[jj].pos_dst = jj;
2360 /* Pos plus value larger than a sum of 2 valid positions. */
2361 vui[jj].pos = jj + src_l + dst_l;
2364 /* Fill in the locations from SRC. */
2365 n = dst_l;
2366 for (node = src->var_part[i].loc_chain, ii = 0; node;
2367 node = node->next, ii++)
2369 /* Find location from NODE. */
2370 for (jj = 0; jj < dst_l; jj++)
2372 if ((REG_P (vui[jj].lc->loc)
2373 && REG_P (node->loc)
2374 && REGNO (vui[jj].lc->loc) == REGNO (node->loc))
2375 || rtx_equal_p (vui[jj].lc->loc, node->loc))
2377 vui[jj].pos = jj + ii;
2378 break;
2381 if (jj >= dst_l) /* The location has not been found. */
2383 location_chain new_node;
2385 /* Copy the location from SRC. */
2386 new_node = (location_chain) pool_alloc (loc_chain_pool);
2387 new_node->loc = node->loc;
2388 new_node->init = node->init;
2389 if (!node->set_src || MEM_P (node->set_src))
2390 new_node->set_src = NULL;
2391 else
2392 new_node->set_src = node->set_src;
2393 vui[n].lc = new_node;
2394 vui[n].pos_dst = src_l + dst_l;
2395 vui[n].pos = ii + src_l + dst_l;
2396 n++;
2400 if (dst_l == 2)
2402 /* Special case still very common case. For dst_l == 2
2403 all entries dst_l ... n-1 are sorted, with for i >= dst_l
2404 vui[i].pos == i + src_l + dst_l. */
2405 if (vui[0].pos > vui[1].pos)
2407 /* Order should be 1, 0, 2... */
2408 dst->var_part[k].loc_chain = vui[1].lc;
2409 vui[1].lc->next = vui[0].lc;
2410 if (n >= 3)
2412 vui[0].lc->next = vui[2].lc;
2413 vui[n - 1].lc->next = NULL;
2415 else
2416 vui[0].lc->next = NULL;
2417 ii = 3;
2419 else
2421 dst->var_part[k].loc_chain = vui[0].lc;
2422 if (n >= 3 && vui[2].pos < vui[1].pos)
2424 /* Order should be 0, 2, 1, 3... */
2425 vui[0].lc->next = vui[2].lc;
2426 vui[2].lc->next = vui[1].lc;
2427 if (n >= 4)
2429 vui[1].lc->next = vui[3].lc;
2430 vui[n - 1].lc->next = NULL;
2432 else
2433 vui[1].lc->next = NULL;
2434 ii = 4;
2436 else
2438 /* Order should be 0, 1, 2... */
2439 ii = 1;
2440 vui[n - 1].lc->next = NULL;
2443 for (; ii < n; ii++)
2444 vui[ii - 1].lc->next = vui[ii].lc;
2446 else
2448 qsort (vui, n, sizeof (struct variable_union_info),
2449 variable_union_info_cmp_pos);
2451 /* Reconnect the nodes in sorted order. */
2452 for (ii = 1; ii < n; ii++)
2453 vui[ii - 1].lc->next = vui[ii].lc;
2454 vui[n - 1].lc->next = NULL;
2455 dst->var_part[k].loc_chain = vui[0].lc;
2458 dst->var_part[k].offset = dst->var_part[j].offset;
2460 i--;
2461 j--;
2463 else if ((i >= 0 && j >= 0
2464 && src->var_part[i].offset < dst->var_part[j].offset)
2465 || i < 0)
2467 dst->var_part[k] = dst->var_part[j];
2468 j--;
2470 else if ((i >= 0 && j >= 0
2471 && src->var_part[i].offset > dst->var_part[j].offset)
2472 || j < 0)
2474 location_chain *nextp;
2476 /* Copy the chain from SRC. */
2477 nextp = &dst->var_part[k].loc_chain;
2478 for (node = src->var_part[i].loc_chain; node; node = node->next)
2480 location_chain new_lc;
2482 new_lc = (location_chain) pool_alloc (loc_chain_pool);
2483 new_lc->next = NULL;
2484 new_lc->init = node->init;
2485 if (!node->set_src || MEM_P (node->set_src))
2486 new_lc->set_src = NULL;
2487 else
2488 new_lc->set_src = node->set_src;
2489 new_lc->loc = node->loc;
2491 *nextp = new_lc;
2492 nextp = &new_lc->next;
2495 dst->var_part[k].offset = src->var_part[i].offset;
2496 i--;
2498 dst->var_part[k].cur_loc = NULL;
2501 if (flag_var_tracking_uninit)
2502 for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++)
2504 location_chain node, node2;
2505 for (node = src->var_part[i].loc_chain; node; node = node->next)
2506 for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next)
2507 if (rtx_equal_p (node->loc, node2->loc))
2509 if (node->init > node2->init)
2510 node2->init = node->init;
2514 /* Continue traversing the hash table. */
2515 return 1;
2518 /* Compute union of dataflow sets SRC and DST and store it to DST. */
2520 static void
2521 dataflow_set_union (dataflow_set *dst, dataflow_set *src)
2523 int i;
2525 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2526 attrs_list_union (&dst->regs[i], src->regs[i]);
2528 if (dst->vars == empty_shared_hash)
2530 shared_hash_destroy (dst->vars);
2531 dst->vars = shared_hash_copy (src->vars);
2533 else
2535 htab_iterator hi;
2536 variable var;
2538 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (src->vars), var, variable, hi)
2539 variable_union (var, dst);
2543 /* Whether the value is currently being expanded. */
2544 #define VALUE_RECURSED_INTO(x) \
2545 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
2546 /* Whether the value is in changed_variables hash table. */
2547 #define VALUE_CHANGED(x) \
2548 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
2549 /* Whether the decl is in changed_variables hash table. */
2550 #define DECL_CHANGED(x) TREE_VISITED (x)
2552 /* Record that DV has been added into resp. removed from changed_variables
2553 hashtable. */
2555 static inline void
2556 set_dv_changed (decl_or_value dv, bool newv)
2558 if (dv_is_value_p (dv))
2559 VALUE_CHANGED (dv_as_value (dv)) = newv;
2560 else
2561 DECL_CHANGED (dv_as_decl (dv)) = newv;
2564 /* Return true if DV is present in changed_variables hash table. */
2566 static inline bool
2567 dv_changed_p (decl_or_value dv)
2569 return (dv_is_value_p (dv)
2570 ? VALUE_CHANGED (dv_as_value (dv))
2571 : DECL_CHANGED (dv_as_decl (dv)));
2574 /* Return a location list node whose loc is rtx_equal to LOC, in the
2575 location list of a one-part variable or value VAR, or in that of
2576 any values recursively mentioned in the location lists. VARS must
2577 be in star-canonical form. */
2579 static location_chain
2580 find_loc_in_1pdv (rtx loc, variable var, htab_t vars)
2582 location_chain node;
2583 enum rtx_code loc_code;
2585 if (!var)
2586 return NULL;
2588 gcc_checking_assert (dv_onepart_p (var->dv));
2590 if (!var->n_var_parts)
2591 return NULL;
2593 gcc_checking_assert (var->var_part[0].offset == 0);
2594 gcc_checking_assert (loc != dv_as_opaque (var->dv));
2596 loc_code = GET_CODE (loc);
2597 for (node = var->var_part[0].loc_chain; node; node = node->next)
2599 decl_or_value dv;
2600 variable rvar;
2602 if (GET_CODE (node->loc) != loc_code)
2604 if (GET_CODE (node->loc) != VALUE)
2605 continue;
2607 else if (loc == node->loc)
2608 return node;
2609 else if (loc_code != VALUE)
2611 if (rtx_equal_p (loc, node->loc))
2612 return node;
2613 continue;
2616 /* Since we're in star-canonical form, we don't need to visit
2617 non-canonical nodes: one-part variables and non-canonical
2618 values would only point back to the canonical node. */
2619 if (dv_is_value_p (var->dv)
2620 && !canon_value_cmp (node->loc, dv_as_value (var->dv)))
2622 /* Skip all subsequent VALUEs. */
2623 while (node->next && GET_CODE (node->next->loc) == VALUE)
2625 node = node->next;
2626 gcc_checking_assert (!canon_value_cmp (node->loc,
2627 dv_as_value (var->dv)));
2628 if (loc == node->loc)
2629 return node;
2631 continue;
2634 gcc_checking_assert (node == var->var_part[0].loc_chain);
2635 gcc_checking_assert (!node->next);
2637 dv = dv_from_value (node->loc);
2638 rvar = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
2639 return find_loc_in_1pdv (loc, rvar, vars);
2642 return NULL;
2645 /* Hash table iteration argument passed to variable_merge. */
2646 struct dfset_merge
2648 /* The set in which the merge is to be inserted. */
2649 dataflow_set *dst;
2650 /* The set that we're iterating in. */
2651 dataflow_set *cur;
2652 /* The set that may contain the other dv we are to merge with. */
2653 dataflow_set *src;
2654 /* Number of onepart dvs in src. */
2655 int src_onepart_cnt;
2658 /* Insert LOC in *DNODE, if it's not there yet. The list must be in
2659 loc_cmp order, and it is maintained as such. */
2661 static void
2662 insert_into_intersection (location_chain *nodep, rtx loc,
2663 enum var_init_status status)
2665 location_chain node;
2666 int r;
2668 for (node = *nodep; node; nodep = &node->next, node = *nodep)
2669 if ((r = loc_cmp (node->loc, loc)) == 0)
2671 node->init = MIN (node->init, status);
2672 return;
2674 else if (r > 0)
2675 break;
2677 node = (location_chain) pool_alloc (loc_chain_pool);
2679 node->loc = loc;
2680 node->set_src = NULL;
2681 node->init = status;
2682 node->next = *nodep;
2683 *nodep = node;
2686 /* Insert in DEST the intersection the locations present in both
2687 S1NODE and S2VAR, directly or indirectly. S1NODE is from a
2688 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
2689 DSM->dst. */
2691 static void
2692 intersect_loc_chains (rtx val, location_chain *dest, struct dfset_merge *dsm,
2693 location_chain s1node, variable s2var)
2695 dataflow_set *s1set = dsm->cur;
2696 dataflow_set *s2set = dsm->src;
2697 location_chain found;
2699 if (s2var)
2701 location_chain s2node;
2703 gcc_checking_assert (dv_onepart_p (s2var->dv));
2705 if (s2var->n_var_parts)
2707 gcc_checking_assert (s2var->var_part[0].offset == 0);
2708 s2node = s2var->var_part[0].loc_chain;
2710 for (; s1node && s2node;
2711 s1node = s1node->next, s2node = s2node->next)
2712 if (s1node->loc != s2node->loc)
2713 break;
2714 else if (s1node->loc == val)
2715 continue;
2716 else
2717 insert_into_intersection (dest, s1node->loc,
2718 MIN (s1node->init, s2node->init));
2722 for (; s1node; s1node = s1node->next)
2724 if (s1node->loc == val)
2725 continue;
2727 if ((found = find_loc_in_1pdv (s1node->loc, s2var,
2728 shared_hash_htab (s2set->vars))))
2730 insert_into_intersection (dest, s1node->loc,
2731 MIN (s1node->init, found->init));
2732 continue;
2735 if (GET_CODE (s1node->loc) == VALUE
2736 && !VALUE_RECURSED_INTO (s1node->loc))
2738 decl_or_value dv = dv_from_value (s1node->loc);
2739 variable svar = shared_hash_find (s1set->vars, dv);
2740 if (svar)
2742 if (svar->n_var_parts == 1)
2744 VALUE_RECURSED_INTO (s1node->loc) = true;
2745 intersect_loc_chains (val, dest, dsm,
2746 svar->var_part[0].loc_chain,
2747 s2var);
2748 VALUE_RECURSED_INTO (s1node->loc) = false;
2753 /* ??? if the location is equivalent to any location in src,
2754 searched recursively
2756 add to dst the values needed to represent the equivalence
2758 telling whether locations S is equivalent to another dv's
2759 location list:
2761 for each location D in the list
2763 if S and D satisfy rtx_equal_p, then it is present
2765 else if D is a value, recurse without cycles
2767 else if S and D have the same CODE and MODE
2769 for each operand oS and the corresponding oD
2771 if oS and oD are not equivalent, then S an D are not equivalent
2773 else if they are RTX vectors
2775 if any vector oS element is not equivalent to its respective oD,
2776 then S and D are not equivalent
2784 /* Return -1 if X should be before Y in a location list for a 1-part
2785 variable, 1 if Y should be before X, and 0 if they're equivalent
2786 and should not appear in the list. */
2788 static int
2789 loc_cmp (rtx x, rtx y)
2791 int i, j, r;
2792 RTX_CODE code = GET_CODE (x);
2793 const char *fmt;
2795 if (x == y)
2796 return 0;
2798 if (REG_P (x))
2800 if (!REG_P (y))
2801 return -1;
2802 gcc_assert (GET_MODE (x) == GET_MODE (y));
2803 if (REGNO (x) == REGNO (y))
2804 return 0;
2805 else if (REGNO (x) < REGNO (y))
2806 return -1;
2807 else
2808 return 1;
2811 if (REG_P (y))
2812 return 1;
2814 if (MEM_P (x))
2816 if (!MEM_P (y))
2817 return -1;
2818 gcc_assert (GET_MODE (x) == GET_MODE (y));
2819 return loc_cmp (XEXP (x, 0), XEXP (y, 0));
2822 if (MEM_P (y))
2823 return 1;
2825 if (GET_CODE (x) == VALUE)
2827 if (GET_CODE (y) != VALUE)
2828 return -1;
2829 /* Don't assert the modes are the same, that is true only
2830 when not recursing. (subreg:QI (value:SI 1:1) 0)
2831 and (subreg:QI (value:DI 2:2) 0) can be compared,
2832 even when the modes are different. */
2833 if (canon_value_cmp (x, y))
2834 return -1;
2835 else
2836 return 1;
2839 if (GET_CODE (y) == VALUE)
2840 return 1;
2842 if (GET_CODE (x) == GET_CODE (y))
2843 /* Compare operands below. */;
2844 else if (GET_CODE (x) < GET_CODE (y))
2845 return -1;
2846 else
2847 return 1;
2849 gcc_assert (GET_MODE (x) == GET_MODE (y));
2851 if (GET_CODE (x) == DEBUG_EXPR)
2853 if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
2854 < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)))
2855 return -1;
2856 gcc_checking_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
2857 > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)));
2858 return 1;
2861 fmt = GET_RTX_FORMAT (code);
2862 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2863 switch (fmt[i])
2865 case 'w':
2866 if (XWINT (x, i) == XWINT (y, i))
2867 break;
2868 else if (XWINT (x, i) < XWINT (y, i))
2869 return -1;
2870 else
2871 return 1;
2873 case 'n':
2874 case 'i':
2875 if (XINT (x, i) == XINT (y, i))
2876 break;
2877 else if (XINT (x, i) < XINT (y, i))
2878 return -1;
2879 else
2880 return 1;
2882 case 'V':
2883 case 'E':
2884 /* Compare the vector length first. */
2885 if (XVECLEN (x, i) == XVECLEN (y, i))
2886 /* Compare the vectors elements. */;
2887 else if (XVECLEN (x, i) < XVECLEN (y, i))
2888 return -1;
2889 else
2890 return 1;
2892 for (j = 0; j < XVECLEN (x, i); j++)
2893 if ((r = loc_cmp (XVECEXP (x, i, j),
2894 XVECEXP (y, i, j))))
2895 return r;
2896 break;
2898 case 'e':
2899 if ((r = loc_cmp (XEXP (x, i), XEXP (y, i))))
2900 return r;
2901 break;
2903 case 'S':
2904 case 's':
2905 if (XSTR (x, i) == XSTR (y, i))
2906 break;
2907 if (!XSTR (x, i))
2908 return -1;
2909 if (!XSTR (y, i))
2910 return 1;
2911 if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0)
2912 break;
2913 else if (r < 0)
2914 return -1;
2915 else
2916 return 1;
2918 case 'u':
2919 /* These are just backpointers, so they don't matter. */
2920 break;
2922 case '0':
2923 case 't':
2924 break;
2926 /* It is believed that rtx's at this level will never
2927 contain anything but integers and other rtx's,
2928 except for within LABEL_REFs and SYMBOL_REFs. */
2929 default:
2930 gcc_unreachable ();
2933 return 0;
2936 /* If decl or value DVP refers to VALUE from *LOC, add backlinks
2937 from VALUE to DVP. */
2939 static int
2940 add_value_chain (rtx *loc, void *dvp)
2942 decl_or_value dv, ldv;
2943 value_chain vc, nvc;
2944 void **slot;
2946 if (GET_CODE (*loc) == VALUE)
2947 ldv = dv_from_value (*loc);
2948 else if (GET_CODE (*loc) == DEBUG_EXPR)
2949 ldv = dv_from_decl (DEBUG_EXPR_TREE_DECL (*loc));
2950 else
2951 return 0;
2953 if (dv_as_opaque (ldv) == dvp)
2954 return 0;
2956 dv = (decl_or_value) dvp;
2957 slot = htab_find_slot_with_hash (value_chains, ldv, dv_htab_hash (ldv),
2958 INSERT);
2959 if (!*slot)
2961 vc = (value_chain) pool_alloc (value_chain_pool);
2962 vc->dv = ldv;
2963 vc->next = NULL;
2964 vc->refcount = 0;
2965 *slot = (void *) vc;
2967 else
2969 for (vc = ((value_chain) *slot)->next; vc; vc = vc->next)
2970 if (dv_as_opaque (vc->dv) == dv_as_opaque (dv))
2971 break;
2972 if (vc)
2974 vc->refcount++;
2975 return 0;
2978 vc = (value_chain) *slot;
2979 nvc = (value_chain) pool_alloc (value_chain_pool);
2980 nvc->dv = dv;
2981 nvc->next = vc->next;
2982 nvc->refcount = 1;
2983 vc->next = nvc;
2984 return 0;
2987 /* If decl or value DVP refers to VALUEs from within LOC, add backlinks
2988 from those VALUEs to DVP. */
2990 static void
2991 add_value_chains (decl_or_value dv, rtx loc)
2993 if (GET_CODE (loc) == VALUE || GET_CODE (loc) == DEBUG_EXPR)
2995 add_value_chain (&loc, dv_as_opaque (dv));
2996 return;
2998 if (REG_P (loc))
2999 return;
3000 if (MEM_P (loc))
3001 loc = XEXP (loc, 0);
3002 for_each_rtx (&loc, add_value_chain, dv_as_opaque (dv));
3005 /* If CSELIB_VAL_PTR of value DV refer to VALUEs, add backlinks from those
3006 VALUEs to DV. Add the same time get rid of ASM_OPERANDS from locs list,
3007 that is something we never can express in .debug_info and can prevent
3008 reverse ops from being used. */
3010 static void
3011 add_cselib_value_chains (decl_or_value dv)
3013 struct elt_loc_list **l;
3015 for (l = &CSELIB_VAL_PTR (dv_as_value (dv))->locs; *l;)
3016 if (GET_CODE ((*l)->loc) == ASM_OPERANDS)
3017 *l = (*l)->next;
3018 else
3020 for_each_rtx (&(*l)->loc, add_value_chain, dv_as_opaque (dv));
3021 l = &(*l)->next;
3025 /* If decl or value DVP refers to VALUE from *LOC, remove backlinks
3026 from VALUE to DVP. */
3028 static int
3029 remove_value_chain (rtx *loc, void *dvp)
3031 decl_or_value dv, ldv;
3032 value_chain vc;
3033 void **slot;
3035 if (GET_CODE (*loc) == VALUE)
3036 ldv = dv_from_value (*loc);
3037 else if (GET_CODE (*loc) == DEBUG_EXPR)
3038 ldv = dv_from_decl (DEBUG_EXPR_TREE_DECL (*loc));
3039 else
3040 return 0;
3042 if (dv_as_opaque (ldv) == dvp)
3043 return 0;
3045 dv = (decl_or_value) dvp;
3046 slot = htab_find_slot_with_hash (value_chains, ldv, dv_htab_hash (ldv),
3047 NO_INSERT);
3048 for (vc = (value_chain) *slot; vc->next; vc = vc->next)
3049 if (dv_as_opaque (vc->next->dv) == dv_as_opaque (dv))
3051 value_chain dvc = vc->next;
3052 gcc_assert (dvc->refcount > 0);
3053 if (--dvc->refcount == 0)
3055 vc->next = dvc->next;
3056 pool_free (value_chain_pool, dvc);
3057 if (vc->next == NULL && vc == (value_chain) *slot)
3059 pool_free (value_chain_pool, vc);
3060 htab_clear_slot (value_chains, slot);
3063 return 0;
3065 gcc_unreachable ();
3068 /* If decl or value DVP refers to VALUEs from within LOC, remove backlinks
3069 from those VALUEs to DVP. */
3071 static void
3072 remove_value_chains (decl_or_value dv, rtx loc)
3074 if (GET_CODE (loc) == VALUE || GET_CODE (loc) == DEBUG_EXPR)
3076 remove_value_chain (&loc, dv_as_opaque (dv));
3077 return;
3079 if (REG_P (loc))
3080 return;
3081 if (MEM_P (loc))
3082 loc = XEXP (loc, 0);
3083 for_each_rtx (&loc, remove_value_chain, dv_as_opaque (dv));
3086 #if ENABLE_CHECKING
3087 /* If CSELIB_VAL_PTR of value DV refer to VALUEs, remove backlinks from those
3088 VALUEs to DV. */
3090 static void
3091 remove_cselib_value_chains (decl_or_value dv)
3093 struct elt_loc_list *l;
3095 for (l = CSELIB_VAL_PTR (dv_as_value (dv))->locs; l; l = l->next)
3096 for_each_rtx (&l->loc, remove_value_chain, dv_as_opaque (dv));
3099 /* Check the order of entries in one-part variables. */
3101 static int
3102 canonicalize_loc_order_check (void **slot, void *data ATTRIBUTE_UNUSED)
3104 variable var = (variable) *slot;
3105 decl_or_value dv = var->dv;
3106 location_chain node, next;
3108 #ifdef ENABLE_RTL_CHECKING
3109 int i;
3110 for (i = 0; i < var->n_var_parts; i++)
3111 gcc_assert (var->var_part[0].cur_loc == NULL);
3112 gcc_assert (!var->cur_loc_changed && !var->in_changed_variables);
3113 #endif
3115 if (!dv_onepart_p (dv))
3116 return 1;
3118 gcc_assert (var->n_var_parts == 1);
3119 node = var->var_part[0].loc_chain;
3120 gcc_assert (node);
3122 while ((next = node->next))
3124 gcc_assert (loc_cmp (node->loc, next->loc) < 0);
3125 node = next;
3128 return 1;
3130 #endif
3132 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
3133 more likely to be chosen as canonical for an equivalence set.
3134 Ensure less likely values can reach more likely neighbors, making
3135 the connections bidirectional. */
3137 static int
3138 canonicalize_values_mark (void **slot, void *data)
3140 dataflow_set *set = (dataflow_set *)data;
3141 variable var = (variable) *slot;
3142 decl_or_value dv = var->dv;
3143 rtx val;
3144 location_chain node;
3146 if (!dv_is_value_p (dv))
3147 return 1;
3149 gcc_checking_assert (var->n_var_parts == 1);
3151 val = dv_as_value (dv);
3153 for (node = var->var_part[0].loc_chain; node; node = node->next)
3154 if (GET_CODE (node->loc) == VALUE)
3156 if (canon_value_cmp (node->loc, val))
3157 VALUE_RECURSED_INTO (val) = true;
3158 else
3160 decl_or_value odv = dv_from_value (node->loc);
3161 void **oslot = shared_hash_find_slot_noinsert (set->vars, odv);
3163 set_slot_part (set, val, oslot, odv, 0,
3164 node->init, NULL_RTX);
3166 VALUE_RECURSED_INTO (node->loc) = true;
3170 return 1;
3173 /* Remove redundant entries from equivalence lists in onepart
3174 variables, canonicalizing equivalence sets into star shapes. */
3176 static int
3177 canonicalize_values_star (void **slot, void *data)
3179 dataflow_set *set = (dataflow_set *)data;
3180 variable var = (variable) *slot;
3181 decl_or_value dv = var->dv;
3182 location_chain node;
3183 decl_or_value cdv;
3184 rtx val, cval;
3185 void **cslot;
3186 bool has_value;
3187 bool has_marks;
3189 if (!dv_onepart_p (dv))
3190 return 1;
3192 gcc_checking_assert (var->n_var_parts == 1);
3194 if (dv_is_value_p (dv))
3196 cval = dv_as_value (dv);
3197 if (!VALUE_RECURSED_INTO (cval))
3198 return 1;
3199 VALUE_RECURSED_INTO (cval) = false;
3201 else
3202 cval = NULL_RTX;
3204 restart:
3205 val = cval;
3206 has_value = false;
3207 has_marks = false;
3209 gcc_assert (var->n_var_parts == 1);
3211 for (node = var->var_part[0].loc_chain; node; node = node->next)
3212 if (GET_CODE (node->loc) == VALUE)
3214 has_value = true;
3215 if (VALUE_RECURSED_INTO (node->loc))
3216 has_marks = true;
3217 if (canon_value_cmp (node->loc, cval))
3218 cval = node->loc;
3221 if (!has_value)
3222 return 1;
3224 if (cval == val)
3226 if (!has_marks || dv_is_decl_p (dv))
3227 return 1;
3229 /* Keep it marked so that we revisit it, either after visiting a
3230 child node, or after visiting a new parent that might be
3231 found out. */
3232 VALUE_RECURSED_INTO (val) = true;
3234 for (node = var->var_part[0].loc_chain; node; node = node->next)
3235 if (GET_CODE (node->loc) == VALUE
3236 && VALUE_RECURSED_INTO (node->loc))
3238 cval = node->loc;
3239 restart_with_cval:
3240 VALUE_RECURSED_INTO (cval) = false;
3241 dv = dv_from_value (cval);
3242 slot = shared_hash_find_slot_noinsert (set->vars, dv);
3243 if (!slot)
3245 gcc_assert (dv_is_decl_p (var->dv));
3246 /* The canonical value was reset and dropped.
3247 Remove it. */
3248 clobber_variable_part (set, NULL, var->dv, 0, NULL);
3249 return 1;
3251 var = (variable)*slot;
3252 gcc_assert (dv_is_value_p (var->dv));
3253 if (var->n_var_parts == 0)
3254 return 1;
3255 gcc_assert (var->n_var_parts == 1);
3256 goto restart;
3259 VALUE_RECURSED_INTO (val) = false;
3261 return 1;
3264 /* Push values to the canonical one. */
3265 cdv = dv_from_value (cval);
3266 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3268 for (node = var->var_part[0].loc_chain; node; node = node->next)
3269 if (node->loc != cval)
3271 cslot = set_slot_part (set, node->loc, cslot, cdv, 0,
3272 node->init, NULL_RTX);
3273 if (GET_CODE (node->loc) == VALUE)
3275 decl_or_value ndv = dv_from_value (node->loc);
3277 set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX,
3278 NO_INSERT);
3280 if (canon_value_cmp (node->loc, val))
3282 /* If it could have been a local minimum, it's not any more,
3283 since it's now neighbor to cval, so it may have to push
3284 to it. Conversely, if it wouldn't have prevailed over
3285 val, then whatever mark it has is fine: if it was to
3286 push, it will now push to a more canonical node, but if
3287 it wasn't, then it has already pushed any values it might
3288 have to. */
3289 VALUE_RECURSED_INTO (node->loc) = true;
3290 /* Make sure we visit node->loc by ensuring we cval is
3291 visited too. */
3292 VALUE_RECURSED_INTO (cval) = true;
3294 else if (!VALUE_RECURSED_INTO (node->loc))
3295 /* If we have no need to "recurse" into this node, it's
3296 already "canonicalized", so drop the link to the old
3297 parent. */
3298 clobber_variable_part (set, cval, ndv, 0, NULL);
3300 else if (GET_CODE (node->loc) == REG)
3302 attrs list = set->regs[REGNO (node->loc)], *listp;
3304 /* Change an existing attribute referring to dv so that it
3305 refers to cdv, removing any duplicate this might
3306 introduce, and checking that no previous duplicates
3307 existed, all in a single pass. */
3309 while (list)
3311 if (list->offset == 0
3312 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3313 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3314 break;
3316 list = list->next;
3319 gcc_assert (list);
3320 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3322 list->dv = cdv;
3323 for (listp = &list->next; (list = *listp); listp = &list->next)
3325 if (list->offset)
3326 continue;
3328 if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3330 *listp = list->next;
3331 pool_free (attrs_pool, list);
3332 list = *listp;
3333 break;
3336 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv));
3339 else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3341 for (listp = &list->next; (list = *listp); listp = &list->next)
3343 if (list->offset)
3344 continue;
3346 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3348 *listp = list->next;
3349 pool_free (attrs_pool, list);
3350 list = *listp;
3351 break;
3354 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv));
3357 else
3358 gcc_unreachable ();
3360 #if ENABLE_CHECKING
3361 while (list)
3363 if (list->offset == 0
3364 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3365 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3366 gcc_unreachable ();
3368 list = list->next;
3370 #endif
3374 if (val)
3375 set_slot_part (set, val, cslot, cdv, 0,
3376 VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
3378 slot = clobber_slot_part (set, cval, slot, 0, NULL);
3380 /* Variable may have been unshared. */
3381 var = (variable)*slot;
3382 gcc_checking_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval
3383 && var->var_part[0].loc_chain->next == NULL);
3385 if (VALUE_RECURSED_INTO (cval))
3386 goto restart_with_cval;
3388 return 1;
3391 /* Bind one-part variables to the canonical value in an equivalence
3392 set. Not doing this causes dataflow convergence failure in rare
3393 circumstances, see PR42873. Unfortunately we can't do this
3394 efficiently as part of canonicalize_values_star, since we may not
3395 have determined or even seen the canonical value of a set when we
3396 get to a variable that references another member of the set. */
3398 static int
3399 canonicalize_vars_star (void **slot, void *data)
3401 dataflow_set *set = (dataflow_set *)data;
3402 variable var = (variable) *slot;
3403 decl_or_value dv = var->dv;
3404 location_chain node;
3405 rtx cval;
3406 decl_or_value cdv;
3407 void **cslot;
3408 variable cvar;
3409 location_chain cnode;
3411 if (!dv_onepart_p (dv) || dv_is_value_p (dv))
3412 return 1;
3414 gcc_assert (var->n_var_parts == 1);
3416 node = var->var_part[0].loc_chain;
3418 if (GET_CODE (node->loc) != VALUE)
3419 return 1;
3421 gcc_assert (!node->next);
3422 cval = node->loc;
3424 /* Push values to the canonical one. */
3425 cdv = dv_from_value (cval);
3426 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3427 if (!cslot)
3428 return 1;
3429 cvar = (variable)*cslot;
3430 gcc_assert (cvar->n_var_parts == 1);
3432 cnode = cvar->var_part[0].loc_chain;
3434 /* CVAL is canonical if its value list contains non-VALUEs or VALUEs
3435 that are not “more canonical” than it. */
3436 if (GET_CODE (cnode->loc) != VALUE
3437 || !canon_value_cmp (cnode->loc, cval))
3438 return 1;
3440 /* CVAL was found to be non-canonical. Change the variable to point
3441 to the canonical VALUE. */
3442 gcc_assert (!cnode->next);
3443 cval = cnode->loc;
3445 slot = set_slot_part (set, cval, slot, dv, 0,
3446 node->init, node->set_src);
3447 clobber_slot_part (set, cval, slot, 0, node->set_src);
3449 return 1;
3452 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
3453 corresponding entry in DSM->src. Multi-part variables are combined
3454 with variable_union, whereas onepart dvs are combined with
3455 intersection. */
3457 static int
3458 variable_merge_over_cur (variable s1var, struct dfset_merge *dsm)
3460 dataflow_set *dst = dsm->dst;
3461 void **dstslot;
3462 variable s2var, dvar = NULL;
3463 decl_or_value dv = s1var->dv;
3464 bool onepart = dv_onepart_p (dv);
3465 rtx val;
3466 hashval_t dvhash;
3467 location_chain node, *nodep;
3469 /* If the incoming onepart variable has an empty location list, then
3470 the intersection will be just as empty. For other variables,
3471 it's always union. */
3472 gcc_checking_assert (s1var->n_var_parts
3473 && s1var->var_part[0].loc_chain);
3475 if (!onepart)
3476 return variable_union (s1var, dst);
3478 gcc_checking_assert (s1var->n_var_parts == 1
3479 && s1var->var_part[0].offset == 0);
3481 dvhash = dv_htab_hash (dv);
3482 if (dv_is_value_p (dv))
3483 val = dv_as_value (dv);
3484 else
3485 val = NULL;
3487 s2var = shared_hash_find_1 (dsm->src->vars, dv, dvhash);
3488 if (!s2var)
3490 dst_can_be_shared = false;
3491 return 1;
3494 dsm->src_onepart_cnt--;
3495 gcc_assert (s2var->var_part[0].loc_chain
3496 && s2var->n_var_parts == 1
3497 && s2var->var_part[0].offset == 0);
3499 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3500 if (dstslot)
3502 dvar = (variable)*dstslot;
3503 gcc_assert (dvar->refcount == 1
3504 && dvar->n_var_parts == 1
3505 && dvar->var_part[0].offset == 0);
3506 nodep = &dvar->var_part[0].loc_chain;
3508 else
3510 nodep = &node;
3511 node = NULL;
3514 if (!dstslot && !onepart_variable_different_p (s1var, s2var))
3516 dstslot = shared_hash_find_slot_unshare_1 (&dst->vars, dv,
3517 dvhash, INSERT);
3518 *dstslot = dvar = s2var;
3519 dvar->refcount++;
3521 else
3523 dst_can_be_shared = false;
3525 intersect_loc_chains (val, nodep, dsm,
3526 s1var->var_part[0].loc_chain, s2var);
3528 if (!dstslot)
3530 if (node)
3532 dvar = (variable) pool_alloc (dv_pool (dv));
3533 dvar->dv = dv;
3534 dvar->refcount = 1;
3535 dvar->n_var_parts = 1;
3536 dvar->cur_loc_changed = false;
3537 dvar->in_changed_variables = false;
3538 dvar->var_part[0].offset = 0;
3539 dvar->var_part[0].loc_chain = node;
3540 dvar->var_part[0].cur_loc = NULL;
3542 dstslot
3543 = shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash,
3544 INSERT);
3545 gcc_assert (!*dstslot);
3546 *dstslot = dvar;
3548 else
3549 return 1;
3553 nodep = &dvar->var_part[0].loc_chain;
3554 while ((node = *nodep))
3556 location_chain *nextp = &node->next;
3558 if (GET_CODE (node->loc) == REG)
3560 attrs list;
3562 for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
3563 if (GET_MODE (node->loc) == GET_MODE (list->loc)
3564 && dv_is_value_p (list->dv))
3565 break;
3567 if (!list)
3568 attrs_list_insert (&dst->regs[REGNO (node->loc)],
3569 dv, 0, node->loc);
3570 /* If this value became canonical for another value that had
3571 this register, we want to leave it alone. */
3572 else if (dv_as_value (list->dv) != val)
3574 dstslot = set_slot_part (dst, dv_as_value (list->dv),
3575 dstslot, dv, 0,
3576 node->init, NULL_RTX);
3577 dstslot = delete_slot_part (dst, node->loc, dstslot, 0);
3579 /* Since nextp points into the removed node, we can't
3580 use it. The pointer to the next node moved to nodep.
3581 However, if the variable we're walking is unshared
3582 during our walk, we'll keep walking the location list
3583 of the previously-shared variable, in which case the
3584 node won't have been removed, and we'll want to skip
3585 it. That's why we test *nodep here. */
3586 if (*nodep != node)
3587 nextp = nodep;
3590 else
3591 /* Canonicalization puts registers first, so we don't have to
3592 walk it all. */
3593 break;
3594 nodep = nextp;
3597 if (dvar != (variable)*dstslot)
3598 dvar = (variable)*dstslot;
3599 nodep = &dvar->var_part[0].loc_chain;
3601 if (val)
3603 /* Mark all referenced nodes for canonicalization, and make sure
3604 we have mutual equivalence links. */
3605 VALUE_RECURSED_INTO (val) = true;
3606 for (node = *nodep; node; node = node->next)
3607 if (GET_CODE (node->loc) == VALUE)
3609 VALUE_RECURSED_INTO (node->loc) = true;
3610 set_variable_part (dst, val, dv_from_value (node->loc), 0,
3611 node->init, NULL, INSERT);
3614 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3615 gcc_assert (*dstslot == dvar);
3616 canonicalize_values_star (dstslot, dst);
3617 gcc_checking_assert (dstslot
3618 == shared_hash_find_slot_noinsert_1 (dst->vars,
3619 dv, dvhash));
3620 dvar = (variable)*dstslot;
3622 else
3624 bool has_value = false, has_other = false;
3626 /* If we have one value and anything else, we're going to
3627 canonicalize this, so make sure all values have an entry in
3628 the table and are marked for canonicalization. */
3629 for (node = *nodep; node; node = node->next)
3631 if (GET_CODE (node->loc) == VALUE)
3633 /* If this was marked during register canonicalization,
3634 we know we have to canonicalize values. */
3635 if (has_value)
3636 has_other = true;
3637 has_value = true;
3638 if (has_other)
3639 break;
3641 else
3643 has_other = true;
3644 if (has_value)
3645 break;
3649 if (has_value && has_other)
3651 for (node = *nodep; node; node = node->next)
3653 if (GET_CODE (node->loc) == VALUE)
3655 decl_or_value dv = dv_from_value (node->loc);
3656 void **slot = NULL;
3658 if (shared_hash_shared (dst->vars))
3659 slot = shared_hash_find_slot_noinsert (dst->vars, dv);
3660 if (!slot)
3661 slot = shared_hash_find_slot_unshare (&dst->vars, dv,
3662 INSERT);
3663 if (!*slot)
3665 variable var = (variable) pool_alloc (dv_pool (dv));
3666 var->dv = dv;
3667 var->refcount = 1;
3668 var->n_var_parts = 1;
3669 var->cur_loc_changed = false;
3670 var->in_changed_variables = false;
3671 var->var_part[0].offset = 0;
3672 var->var_part[0].loc_chain = NULL;
3673 var->var_part[0].cur_loc = NULL;
3674 *slot = var;
3677 VALUE_RECURSED_INTO (node->loc) = true;
3681 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3682 gcc_assert (*dstslot == dvar);
3683 canonicalize_values_star (dstslot, dst);
3684 gcc_checking_assert (dstslot
3685 == shared_hash_find_slot_noinsert_1 (dst->vars,
3686 dv, dvhash));
3687 dvar = (variable)*dstslot;
3691 if (!onepart_variable_different_p (dvar, s2var))
3693 variable_htab_free (dvar);
3694 *dstslot = dvar = s2var;
3695 dvar->refcount++;
3697 else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var))
3699 variable_htab_free (dvar);
3700 *dstslot = dvar = s1var;
3701 dvar->refcount++;
3702 dst_can_be_shared = false;
3704 else
3705 dst_can_be_shared = false;
3707 return 1;
3710 /* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
3711 multi-part variable. Unions of multi-part variables and
3712 intersections of one-part ones will be handled in
3713 variable_merge_over_cur(). */
3715 static int
3716 variable_merge_over_src (variable s2var, struct dfset_merge *dsm)
3718 dataflow_set *dst = dsm->dst;
3719 decl_or_value dv = s2var->dv;
3720 bool onepart = dv_onepart_p (dv);
3722 if (!onepart)
3724 void **dstp = shared_hash_find_slot (dst->vars, dv);
3725 *dstp = s2var;
3726 s2var->refcount++;
3727 return 1;
3730 dsm->src_onepart_cnt++;
3731 return 1;
3734 /* Combine dataflow set information from SRC2 into DST, using PDST
3735 to carry over information across passes. */
3737 static void
3738 dataflow_set_merge (dataflow_set *dst, dataflow_set *src2)
3740 dataflow_set cur = *dst;
3741 dataflow_set *src1 = &cur;
3742 struct dfset_merge dsm;
3743 int i;
3744 size_t src1_elems, src2_elems;
3745 htab_iterator hi;
3746 variable var;
3748 src1_elems = htab_elements (shared_hash_htab (src1->vars));
3749 src2_elems = htab_elements (shared_hash_htab (src2->vars));
3750 dataflow_set_init (dst);
3751 dst->stack_adjust = cur.stack_adjust;
3752 shared_hash_destroy (dst->vars);
3753 dst->vars = (shared_hash) pool_alloc (shared_hash_pool);
3754 dst->vars->refcount = 1;
3755 dst->vars->htab
3756 = htab_create (MAX (src1_elems, src2_elems), variable_htab_hash,
3757 variable_htab_eq, variable_htab_free);
3759 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3760 attrs_list_mpdv_union (&dst->regs[i], src1->regs[i], src2->regs[i]);
3762 dsm.dst = dst;
3763 dsm.src = src2;
3764 dsm.cur = src1;
3765 dsm.src_onepart_cnt = 0;
3767 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (dsm.src->vars), var, variable, hi)
3768 variable_merge_over_src (var, &dsm);
3769 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (dsm.cur->vars), var, variable, hi)
3770 variable_merge_over_cur (var, &dsm);
3772 if (dsm.src_onepart_cnt)
3773 dst_can_be_shared = false;
3775 dataflow_set_destroy (src1);
3778 /* Mark register equivalences. */
3780 static void
3781 dataflow_set_equiv_regs (dataflow_set *set)
3783 int i;
3784 attrs list, *listp;
3786 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3788 rtx canon[NUM_MACHINE_MODES];
3790 /* If the list is empty or one entry, no need to canonicalize
3791 anything. */
3792 if (set->regs[i] == NULL || set->regs[i]->next == NULL)
3793 continue;
3795 memset (canon, 0, sizeof (canon));
3797 for (list = set->regs[i]; list; list = list->next)
3798 if (list->offset == 0 && dv_is_value_p (list->dv))
3800 rtx val = dv_as_value (list->dv);
3801 rtx *cvalp = &canon[(int)GET_MODE (val)];
3802 rtx cval = *cvalp;
3804 if (canon_value_cmp (val, cval))
3805 *cvalp = val;
3808 for (list = set->regs[i]; list; list = list->next)
3809 if (list->offset == 0 && dv_onepart_p (list->dv))
3811 rtx cval = canon[(int)GET_MODE (list->loc)];
3813 if (!cval)
3814 continue;
3816 if (dv_is_value_p (list->dv))
3818 rtx val = dv_as_value (list->dv);
3820 if (val == cval)
3821 continue;
3823 VALUE_RECURSED_INTO (val) = true;
3824 set_variable_part (set, val, dv_from_value (cval), 0,
3825 VAR_INIT_STATUS_INITIALIZED,
3826 NULL, NO_INSERT);
3829 VALUE_RECURSED_INTO (cval) = true;
3830 set_variable_part (set, cval, list->dv, 0,
3831 VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT);
3834 for (listp = &set->regs[i]; (list = *listp);
3835 listp = list ? &list->next : listp)
3836 if (list->offset == 0 && dv_onepart_p (list->dv))
3838 rtx cval = canon[(int)GET_MODE (list->loc)];
3839 void **slot;
3841 if (!cval)
3842 continue;
3844 if (dv_is_value_p (list->dv))
3846 rtx val = dv_as_value (list->dv);
3847 if (!VALUE_RECURSED_INTO (val))
3848 continue;
3851 slot = shared_hash_find_slot_noinsert (set->vars, list->dv);
3852 canonicalize_values_star (slot, set);
3853 if (*listp != list)
3854 list = NULL;
3859 /* Remove any redundant values in the location list of VAR, which must
3860 be unshared and 1-part. */
3862 static void
3863 remove_duplicate_values (variable var)
3865 location_chain node, *nodep;
3867 gcc_assert (dv_onepart_p (var->dv));
3868 gcc_assert (var->n_var_parts == 1);
3869 gcc_assert (var->refcount == 1);
3871 for (nodep = &var->var_part[0].loc_chain; (node = *nodep); )
3873 if (GET_CODE (node->loc) == VALUE)
3875 if (VALUE_RECURSED_INTO (node->loc))
3877 /* Remove duplicate value node. */
3878 *nodep = node->next;
3879 pool_free (loc_chain_pool, node);
3880 continue;
3882 else
3883 VALUE_RECURSED_INTO (node->loc) = true;
3885 nodep = &node->next;
3888 for (node = var->var_part[0].loc_chain; node; node = node->next)
3889 if (GET_CODE (node->loc) == VALUE)
3891 gcc_assert (VALUE_RECURSED_INTO (node->loc));
3892 VALUE_RECURSED_INTO (node->loc) = false;
3897 /* Hash table iteration argument passed to variable_post_merge. */
3898 struct dfset_post_merge
3900 /* The new input set for the current block. */
3901 dataflow_set *set;
3902 /* Pointer to the permanent input set for the current block, or
3903 NULL. */
3904 dataflow_set **permp;
3907 /* Create values for incoming expressions associated with one-part
3908 variables that don't have value numbers for them. */
3910 static int
3911 variable_post_merge_new_vals (void **slot, void *info)
3913 struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
3914 dataflow_set *set = dfpm->set;
3915 variable var = (variable)*slot;
3916 location_chain node;
3918 if (!dv_onepart_p (var->dv) || !var->n_var_parts)
3919 return 1;
3921 gcc_assert (var->n_var_parts == 1);
3923 if (dv_is_decl_p (var->dv))
3925 bool check_dupes = false;
3927 restart:
3928 for (node = var->var_part[0].loc_chain; node; node = node->next)
3930 if (GET_CODE (node->loc) == VALUE)
3931 gcc_assert (!VALUE_RECURSED_INTO (node->loc));
3932 else if (GET_CODE (node->loc) == REG)
3934 attrs att, *attp, *curp = NULL;
3936 if (var->refcount != 1)
3938 slot = unshare_variable (set, slot, var,
3939 VAR_INIT_STATUS_INITIALIZED);
3940 var = (variable)*slot;
3941 goto restart;
3944 for (attp = &set->regs[REGNO (node->loc)]; (att = *attp);
3945 attp = &att->next)
3946 if (att->offset == 0
3947 && GET_MODE (att->loc) == GET_MODE (node->loc))
3949 if (dv_is_value_p (att->dv))
3951 rtx cval = dv_as_value (att->dv);
3952 node->loc = cval;
3953 check_dupes = true;
3954 break;
3956 else if (dv_as_opaque (att->dv) == dv_as_opaque (var->dv))
3957 curp = attp;
3960 if (!curp)
3962 curp = attp;
3963 while (*curp)
3964 if ((*curp)->offset == 0
3965 && GET_MODE ((*curp)->loc) == GET_MODE (node->loc)
3966 && dv_as_opaque ((*curp)->dv) == dv_as_opaque (var->dv))
3967 break;
3968 else
3969 curp = &(*curp)->next;
3970 gcc_assert (*curp);
3973 if (!att)
3975 decl_or_value cdv;
3976 rtx cval;
3978 if (!*dfpm->permp)
3980 *dfpm->permp = XNEW (dataflow_set);
3981 dataflow_set_init (*dfpm->permp);
3984 for (att = (*dfpm->permp)->regs[REGNO (node->loc)];
3985 att; att = att->next)
3986 if (GET_MODE (att->loc) == GET_MODE (node->loc))
3988 gcc_assert (att->offset == 0
3989 && dv_is_value_p (att->dv));
3990 val_reset (set, att->dv);
3991 break;
3994 if (att)
3996 cdv = att->dv;
3997 cval = dv_as_value (cdv);
3999 else
4001 /* Create a unique value to hold this register,
4002 that ought to be found and reused in
4003 subsequent rounds. */
4004 cselib_val *v;
4005 gcc_assert (!cselib_lookup (node->loc,
4006 GET_MODE (node->loc), 0,
4007 VOIDmode));
4008 v = cselib_lookup (node->loc, GET_MODE (node->loc), 1,
4009 VOIDmode);
4010 cselib_preserve_value (v);
4011 cselib_invalidate_rtx (node->loc);
4012 cval = v->val_rtx;
4013 cdv = dv_from_value (cval);
4014 if (dump_file)
4015 fprintf (dump_file,
4016 "Created new value %u:%u for reg %i\n",
4017 v->uid, v->hash, REGNO (node->loc));
4020 var_reg_decl_set (*dfpm->permp, node->loc,
4021 VAR_INIT_STATUS_INITIALIZED,
4022 cdv, 0, NULL, INSERT);
4024 node->loc = cval;
4025 check_dupes = true;
4028 /* Remove attribute referring to the decl, which now
4029 uses the value for the register, already existing or
4030 to be added when we bring perm in. */
4031 att = *curp;
4032 *curp = att->next;
4033 pool_free (attrs_pool, att);
4037 if (check_dupes)
4038 remove_duplicate_values (var);
4041 return 1;
4044 /* Reset values in the permanent set that are not associated with the
4045 chosen expression. */
4047 static int
4048 variable_post_merge_perm_vals (void **pslot, void *info)
4050 struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
4051 dataflow_set *set = dfpm->set;
4052 variable pvar = (variable)*pslot, var;
4053 location_chain pnode;
4054 decl_or_value dv;
4055 attrs att;
4057 gcc_assert (dv_is_value_p (pvar->dv)
4058 && pvar->n_var_parts == 1);
4059 pnode = pvar->var_part[0].loc_chain;
4060 gcc_assert (pnode
4061 && !pnode->next
4062 && REG_P (pnode->loc));
4064 dv = pvar->dv;
4066 var = shared_hash_find (set->vars, dv);
4067 if (var)
4069 /* Although variable_post_merge_new_vals may have made decls
4070 non-star-canonical, values that pre-existed in canonical form
4071 remain canonical, and newly-created values reference a single
4072 REG, so they are canonical as well. Since VAR has the
4073 location list for a VALUE, using find_loc_in_1pdv for it is
4074 fine, since VALUEs don't map back to DECLs. */
4075 if (find_loc_in_1pdv (pnode->loc, var, shared_hash_htab (set->vars)))
4076 return 1;
4077 val_reset (set, dv);
4080 for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next)
4081 if (att->offset == 0
4082 && GET_MODE (att->loc) == GET_MODE (pnode->loc)
4083 && dv_is_value_p (att->dv))
4084 break;
4086 /* If there is a value associated with this register already, create
4087 an equivalence. */
4088 if (att && dv_as_value (att->dv) != dv_as_value (dv))
4090 rtx cval = dv_as_value (att->dv);
4091 set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT);
4092 set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init,
4093 NULL, INSERT);
4095 else if (!att)
4097 attrs_list_insert (&set->regs[REGNO (pnode->loc)],
4098 dv, 0, pnode->loc);
4099 variable_union (pvar, set);
4102 return 1;
4105 /* Just checking stuff and registering register attributes for
4106 now. */
4108 static void
4109 dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp)
4111 struct dfset_post_merge dfpm;
4113 dfpm.set = set;
4114 dfpm.permp = permp;
4116 htab_traverse (shared_hash_htab (set->vars), variable_post_merge_new_vals,
4117 &dfpm);
4118 if (*permp)
4119 htab_traverse (shared_hash_htab ((*permp)->vars),
4120 variable_post_merge_perm_vals, &dfpm);
4121 htab_traverse (shared_hash_htab (set->vars), canonicalize_values_star, set);
4122 htab_traverse (shared_hash_htab (set->vars), canonicalize_vars_star, set);
4125 /* Return a node whose loc is a MEM that refers to EXPR in the
4126 location list of a one-part variable or value VAR, or in that of
4127 any values recursively mentioned in the location lists. */
4129 static location_chain
4130 find_mem_expr_in_1pdv (tree expr, rtx val, htab_t vars)
4132 location_chain node;
4133 decl_or_value dv;
4134 variable var;
4135 location_chain where = NULL;
4137 if (!val)
4138 return NULL;
4140 gcc_assert (GET_CODE (val) == VALUE
4141 && !VALUE_RECURSED_INTO (val));
4143 dv = dv_from_value (val);
4144 var = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
4146 if (!var)
4147 return NULL;
4149 gcc_assert (dv_onepart_p (var->dv));
4151 if (!var->n_var_parts)
4152 return NULL;
4154 gcc_assert (var->var_part[0].offset == 0);
4156 VALUE_RECURSED_INTO (val) = true;
4158 for (node = var->var_part[0].loc_chain; node; node = node->next)
4159 if (MEM_P (node->loc)
4160 && MEM_EXPR (node->loc) == expr
4161 && INT_MEM_OFFSET (node->loc) == 0)
4163 where = node;
4164 break;
4166 else if (GET_CODE (node->loc) == VALUE
4167 && !VALUE_RECURSED_INTO (node->loc)
4168 && (where = find_mem_expr_in_1pdv (expr, node->loc, vars)))
4169 break;
4171 VALUE_RECURSED_INTO (val) = false;
4173 return where;
4176 /* Return TRUE if the value of MEM may vary across a call. */
4178 static bool
4179 mem_dies_at_call (rtx mem)
4181 tree expr = MEM_EXPR (mem);
4182 tree decl;
4184 if (!expr)
4185 return true;
4187 decl = get_base_address (expr);
4189 if (!decl)
4190 return true;
4192 if (!DECL_P (decl))
4193 return true;
4195 return (may_be_aliased (decl)
4196 || (!TREE_READONLY (decl) && is_global_var (decl)));
4199 /* Remove all MEMs from the location list of a hash table entry for a
4200 one-part variable, except those whose MEM attributes map back to
4201 the variable itself, directly or within a VALUE. */
4203 static int
4204 dataflow_set_preserve_mem_locs (void **slot, void *data)
4206 dataflow_set *set = (dataflow_set *) data;
4207 variable var = (variable) *slot;
4209 if (dv_is_decl_p (var->dv) && dv_onepart_p (var->dv))
4211 tree decl = dv_as_decl (var->dv);
4212 location_chain loc, *locp;
4213 bool changed = false;
4215 if (!var->n_var_parts)
4216 return 1;
4218 gcc_assert (var->n_var_parts == 1);
4220 if (shared_var_p (var, set->vars))
4222 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4224 /* We want to remove dying MEMs that doesn't refer to DECL. */
4225 if (GET_CODE (loc->loc) == MEM
4226 && (MEM_EXPR (loc->loc) != decl
4227 || INT_MEM_OFFSET (loc->loc) != 0)
4228 && !mem_dies_at_call (loc->loc))
4229 break;
4230 /* We want to move here MEMs that do refer to DECL. */
4231 else if (GET_CODE (loc->loc) == VALUE
4232 && find_mem_expr_in_1pdv (decl, loc->loc,
4233 shared_hash_htab (set->vars)))
4234 break;
4237 if (!loc)
4238 return 1;
4240 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4241 var = (variable)*slot;
4242 gcc_assert (var->n_var_parts == 1);
4245 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4246 loc; loc = *locp)
4248 rtx old_loc = loc->loc;
4249 if (GET_CODE (old_loc) == VALUE)
4251 location_chain mem_node
4252 = find_mem_expr_in_1pdv (decl, loc->loc,
4253 shared_hash_htab (set->vars));
4255 /* ??? This picks up only one out of multiple MEMs that
4256 refer to the same variable. Do we ever need to be
4257 concerned about dealing with more than one, or, given
4258 that they should all map to the same variable
4259 location, their addresses will have been merged and
4260 they will be regarded as equivalent? */
4261 if (mem_node)
4263 loc->loc = mem_node->loc;
4264 loc->set_src = mem_node->set_src;
4265 loc->init = MIN (loc->init, mem_node->init);
4269 if (GET_CODE (loc->loc) != MEM
4270 || (MEM_EXPR (loc->loc) == decl
4271 && INT_MEM_OFFSET (loc->loc) == 0)
4272 || !mem_dies_at_call (loc->loc))
4274 if (old_loc != loc->loc && emit_notes)
4276 if (old_loc == var->var_part[0].cur_loc)
4278 changed = true;
4279 var->var_part[0].cur_loc = NULL;
4280 var->cur_loc_changed = true;
4282 add_value_chains (var->dv, loc->loc);
4283 remove_value_chains (var->dv, old_loc);
4285 locp = &loc->next;
4286 continue;
4289 if (emit_notes)
4291 remove_value_chains (var->dv, old_loc);
4292 if (old_loc == var->var_part[0].cur_loc)
4294 changed = true;
4295 var->var_part[0].cur_loc = NULL;
4296 var->cur_loc_changed = true;
4299 *locp = loc->next;
4300 pool_free (loc_chain_pool, loc);
4303 if (!var->var_part[0].loc_chain)
4305 var->n_var_parts--;
4306 changed = true;
4308 if (changed)
4309 variable_was_changed (var, set);
4312 return 1;
4315 /* Remove all MEMs from the location list of a hash table entry for a
4316 value. */
4318 static int
4319 dataflow_set_remove_mem_locs (void **slot, void *data)
4321 dataflow_set *set = (dataflow_set *) data;
4322 variable var = (variable) *slot;
4324 if (dv_is_value_p (var->dv))
4326 location_chain loc, *locp;
4327 bool changed = false;
4329 gcc_assert (var->n_var_parts == 1);
4331 if (shared_var_p (var, set->vars))
4333 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4334 if (GET_CODE (loc->loc) == MEM
4335 && mem_dies_at_call (loc->loc))
4336 break;
4338 if (!loc)
4339 return 1;
4341 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4342 var = (variable)*slot;
4343 gcc_assert (var->n_var_parts == 1);
4346 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4347 loc; loc = *locp)
4349 if (GET_CODE (loc->loc) != MEM
4350 || !mem_dies_at_call (loc->loc))
4352 locp = &loc->next;
4353 continue;
4356 if (emit_notes)
4357 remove_value_chains (var->dv, loc->loc);
4358 *locp = loc->next;
4359 /* If we have deleted the location which was last emitted
4360 we have to emit new location so add the variable to set
4361 of changed variables. */
4362 if (var->var_part[0].cur_loc == loc->loc)
4364 changed = true;
4365 var->var_part[0].cur_loc = NULL;
4366 var->cur_loc_changed = true;
4368 pool_free (loc_chain_pool, loc);
4371 if (!var->var_part[0].loc_chain)
4373 var->n_var_parts--;
4374 changed = true;
4376 if (changed)
4377 variable_was_changed (var, set);
4380 return 1;
4383 /* Remove all variable-location information about call-clobbered
4384 registers, as well as associations between MEMs and VALUEs. */
4386 static void
4387 dataflow_set_clear_at_call (dataflow_set *set)
4389 int r;
4391 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
4392 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, r))
4393 var_regno_delete (set, r);
4395 if (MAY_HAVE_DEBUG_INSNS)
4397 set->traversed_vars = set->vars;
4398 htab_traverse (shared_hash_htab (set->vars),
4399 dataflow_set_preserve_mem_locs, set);
4400 set->traversed_vars = set->vars;
4401 htab_traverse (shared_hash_htab (set->vars), dataflow_set_remove_mem_locs,
4402 set);
4403 set->traversed_vars = NULL;
4407 static bool
4408 variable_part_different_p (variable_part *vp1, variable_part *vp2)
4410 location_chain lc1, lc2;
4412 for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next)
4414 for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next)
4416 if (REG_P (lc1->loc) && REG_P (lc2->loc))
4418 if (REGNO (lc1->loc) == REGNO (lc2->loc))
4419 break;
4421 if (rtx_equal_p (lc1->loc, lc2->loc))
4422 break;
4424 if (!lc2)
4425 return true;
4427 return false;
4430 /* Return true if one-part variables VAR1 and VAR2 are different.
4431 They must be in canonical order. */
4433 static bool
4434 onepart_variable_different_p (variable var1, variable var2)
4436 location_chain lc1, lc2;
4438 if (var1 == var2)
4439 return false;
4441 gcc_assert (var1->n_var_parts == 1
4442 && var2->n_var_parts == 1);
4444 lc1 = var1->var_part[0].loc_chain;
4445 lc2 = var2->var_part[0].loc_chain;
4447 gcc_assert (lc1 && lc2);
4449 while (lc1 && lc2)
4451 if (loc_cmp (lc1->loc, lc2->loc))
4452 return true;
4453 lc1 = lc1->next;
4454 lc2 = lc2->next;
4457 return lc1 != lc2;
4460 /* Return true if variables VAR1 and VAR2 are different. */
4462 static bool
4463 variable_different_p (variable var1, variable var2)
4465 int i;
4467 if (var1 == var2)
4468 return false;
4470 if (var1->n_var_parts != var2->n_var_parts)
4471 return true;
4473 for (i = 0; i < var1->n_var_parts; i++)
4475 if (var1->var_part[i].offset != var2->var_part[i].offset)
4476 return true;
4477 /* One-part values have locations in a canonical order. */
4478 if (i == 0 && var1->var_part[i].offset == 0 && dv_onepart_p (var1->dv))
4480 gcc_assert (var1->n_var_parts == 1
4481 && dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv));
4482 return onepart_variable_different_p (var1, var2);
4484 if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i]))
4485 return true;
4486 if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i]))
4487 return true;
4489 return false;
4492 /* Return true if dataflow sets OLD_SET and NEW_SET differ. */
4494 static bool
4495 dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
4497 htab_iterator hi;
4498 variable var1;
4500 if (old_set->vars == new_set->vars)
4501 return false;
4503 if (htab_elements (shared_hash_htab (old_set->vars))
4504 != htab_elements (shared_hash_htab (new_set->vars)))
4505 return true;
4507 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (old_set->vars), var1, variable, hi)
4509 htab_t htab = shared_hash_htab (new_set->vars);
4510 variable var2 = (variable) htab_find_with_hash (htab, var1->dv,
4511 dv_htab_hash (var1->dv));
4512 if (!var2)
4514 if (dump_file && (dump_flags & TDF_DETAILS))
4516 fprintf (dump_file, "dataflow difference found: removal of:\n");
4517 dump_var (var1);
4519 return true;
4522 if (variable_different_p (var1, var2))
4524 if (dump_file && (dump_flags & TDF_DETAILS))
4526 fprintf (dump_file, "dataflow difference found: "
4527 "old and new follow:\n");
4528 dump_var (var1);
4529 dump_var (var2);
4531 return true;
4535 /* No need to traverse the second hashtab, if both have the same number
4536 of elements and the second one had all entries found in the first one,
4537 then it can't have any extra entries. */
4538 return false;
4541 /* Free the contents of dataflow set SET. */
4543 static void
4544 dataflow_set_destroy (dataflow_set *set)
4546 int i;
4548 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4549 attrs_list_clear (&set->regs[i]);
4551 shared_hash_destroy (set->vars);
4552 set->vars = NULL;
4555 /* Return true if RTL X contains a SYMBOL_REF. */
4557 static bool
4558 contains_symbol_ref (rtx x)
4560 const char *fmt;
4561 RTX_CODE code;
4562 int i;
4564 if (!x)
4565 return false;
4567 code = GET_CODE (x);
4568 if (code == SYMBOL_REF)
4569 return true;
4571 fmt = GET_RTX_FORMAT (code);
4572 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4574 if (fmt[i] == 'e')
4576 if (contains_symbol_ref (XEXP (x, i)))
4577 return true;
4579 else if (fmt[i] == 'E')
4581 int j;
4582 for (j = 0; j < XVECLEN (x, i); j++)
4583 if (contains_symbol_ref (XVECEXP (x, i, j)))
4584 return true;
4588 return false;
4591 /* Shall EXPR be tracked? */
4593 static bool
4594 track_expr_p (tree expr, bool need_rtl)
4596 rtx decl_rtl;
4597 tree realdecl;
4599 if (TREE_CODE (expr) == DEBUG_EXPR_DECL)
4600 return DECL_RTL_SET_P (expr);
4602 /* If EXPR is not a parameter or a variable do not track it. */
4603 if (TREE_CODE (expr) != VAR_DECL && TREE_CODE (expr) != PARM_DECL)
4604 return 0;
4606 /* It also must have a name... */
4607 if (!DECL_NAME (expr) && need_rtl)
4608 return 0;
4610 /* ... and a RTL assigned to it. */
4611 decl_rtl = DECL_RTL_IF_SET (expr);
4612 if (!decl_rtl && need_rtl)
4613 return 0;
4615 /* If this expression is really a debug alias of some other declaration, we
4616 don't need to track this expression if the ultimate declaration is
4617 ignored. */
4618 realdecl = expr;
4619 if (DECL_DEBUG_EXPR_IS_FROM (realdecl))
4621 realdecl = DECL_DEBUG_EXPR (realdecl);
4622 if (realdecl == NULL_TREE)
4623 realdecl = expr;
4624 else if (!DECL_P (realdecl))
4626 if (handled_component_p (realdecl))
4628 HOST_WIDE_INT bitsize, bitpos, maxsize;
4629 tree innerdecl
4630 = get_ref_base_and_extent (realdecl, &bitpos, &bitsize,
4631 &maxsize);
4632 if (!DECL_P (innerdecl)
4633 || DECL_IGNORED_P (innerdecl)
4634 || TREE_STATIC (innerdecl)
4635 || bitsize <= 0
4636 || bitpos + bitsize > 256
4637 || bitsize != maxsize)
4638 return 0;
4639 else
4640 realdecl = expr;
4642 else
4643 return 0;
4647 /* Do not track EXPR if REALDECL it should be ignored for debugging
4648 purposes. */
4649 if (DECL_IGNORED_P (realdecl))
4650 return 0;
4652 /* Do not track global variables until we are able to emit correct location
4653 list for them. */
4654 if (TREE_STATIC (realdecl))
4655 return 0;
4657 /* When the EXPR is a DECL for alias of some variable (see example)
4658 the TREE_STATIC flag is not used. Disable tracking all DECLs whose
4659 DECL_RTL contains SYMBOL_REF.
4661 Example:
4662 extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
4663 char **_dl_argv;
4665 if (decl_rtl && MEM_P (decl_rtl)
4666 && contains_symbol_ref (XEXP (decl_rtl, 0)))
4667 return 0;
4669 /* If RTX is a memory it should not be very large (because it would be
4670 an array or struct). */
4671 if (decl_rtl && MEM_P (decl_rtl))
4673 /* Do not track structures and arrays. */
4674 if (GET_MODE (decl_rtl) == BLKmode
4675 || AGGREGATE_TYPE_P (TREE_TYPE (realdecl)))
4676 return 0;
4677 if (MEM_SIZE_KNOWN_P (decl_rtl)
4678 && MEM_SIZE (decl_rtl) > MAX_VAR_PARTS)
4679 return 0;
4682 DECL_CHANGED (expr) = 0;
4683 DECL_CHANGED (realdecl) = 0;
4684 return 1;
4687 /* Determine whether a given LOC refers to the same variable part as
4688 EXPR+OFFSET. */
4690 static bool
4691 same_variable_part_p (rtx loc, tree expr, HOST_WIDE_INT offset)
4693 tree expr2;
4694 HOST_WIDE_INT offset2;
4696 if (! DECL_P (expr))
4697 return false;
4699 if (REG_P (loc))
4701 expr2 = REG_EXPR (loc);
4702 offset2 = REG_OFFSET (loc);
4704 else if (MEM_P (loc))
4706 expr2 = MEM_EXPR (loc);
4707 offset2 = INT_MEM_OFFSET (loc);
4709 else
4710 return false;
4712 if (! expr2 || ! DECL_P (expr2))
4713 return false;
4715 expr = var_debug_decl (expr);
4716 expr2 = var_debug_decl (expr2);
4718 return (expr == expr2 && offset == offset2);
4721 /* LOC is a REG or MEM that we would like to track if possible.
4722 If EXPR is null, we don't know what expression LOC refers to,
4723 otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
4724 LOC is an lvalue register.
4726 Return true if EXPR is nonnull and if LOC, or some lowpart of it,
4727 is something we can track. When returning true, store the mode of
4728 the lowpart we can track in *MODE_OUT (if nonnull) and its offset
4729 from EXPR in *OFFSET_OUT (if nonnull). */
4731 static bool
4732 track_loc_p (rtx loc, tree expr, HOST_WIDE_INT offset, bool store_reg_p,
4733 enum machine_mode *mode_out, HOST_WIDE_INT *offset_out)
4735 enum machine_mode mode;
4737 if (expr == NULL || !track_expr_p (expr, true))
4738 return false;
4740 /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
4741 whole subreg, but only the old inner part is really relevant. */
4742 mode = GET_MODE (loc);
4743 if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc)))
4745 enum machine_mode pseudo_mode;
4747 pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc));
4748 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (pseudo_mode))
4750 offset += byte_lowpart_offset (pseudo_mode, mode);
4751 mode = pseudo_mode;
4755 /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
4756 Do the same if we are storing to a register and EXPR occupies
4757 the whole of register LOC; in that case, the whole of EXPR is
4758 being changed. We exclude complex modes from the second case
4759 because the real and imaginary parts are represented as separate
4760 pseudo registers, even if the whole complex value fits into one
4761 hard register. */
4762 if ((GET_MODE_SIZE (mode) > GET_MODE_SIZE (DECL_MODE (expr))
4763 || (store_reg_p
4764 && !COMPLEX_MODE_P (DECL_MODE (expr))
4765 && hard_regno_nregs[REGNO (loc)][DECL_MODE (expr)] == 1))
4766 && offset + byte_lowpart_offset (DECL_MODE (expr), mode) == 0)
4768 mode = DECL_MODE (expr);
4769 offset = 0;
4772 if (offset < 0 || offset >= MAX_VAR_PARTS)
4773 return false;
4775 if (mode_out)
4776 *mode_out = mode;
4777 if (offset_out)
4778 *offset_out = offset;
4779 return true;
4782 /* Return the MODE lowpart of LOC, or null if LOC is not something we
4783 want to track. When returning nonnull, make sure that the attributes
4784 on the returned value are updated. */
4786 static rtx
4787 var_lowpart (enum machine_mode mode, rtx loc)
4789 unsigned int offset, reg_offset, regno;
4791 if (!REG_P (loc) && !MEM_P (loc))
4792 return NULL;
4794 if (GET_MODE (loc) == mode)
4795 return loc;
4797 offset = byte_lowpart_offset (mode, GET_MODE (loc));
4799 if (MEM_P (loc))
4800 return adjust_address_nv (loc, mode, offset);
4802 reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc));
4803 regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc),
4804 reg_offset, mode);
4805 return gen_rtx_REG_offset (loc, mode, regno, offset);
4808 /* Carry information about uses and stores while walking rtx. */
4810 struct count_use_info
4812 /* The insn where the RTX is. */
4813 rtx insn;
4815 /* The basic block where insn is. */
4816 basic_block bb;
4818 /* The array of n_sets sets in the insn, as determined by cselib. */
4819 struct cselib_set *sets;
4820 int n_sets;
4822 /* True if we're counting stores, false otherwise. */
4823 bool store_p;
4826 /* Find a VALUE corresponding to X. */
4828 static inline cselib_val *
4829 find_use_val (rtx x, enum machine_mode mode, struct count_use_info *cui)
4831 int i;
4833 if (cui->sets)
4835 /* This is called after uses are set up and before stores are
4836 processed by cselib, so it's safe to look up srcs, but not
4837 dsts. So we look up expressions that appear in srcs or in
4838 dest expressions, but we search the sets array for dests of
4839 stores. */
4840 if (cui->store_p)
4842 /* Some targets represent memset and memcpy patterns
4843 by (set (mem:BLK ...) (reg:[QHSD]I ...)) or
4844 (set (mem:BLK ...) (const_int ...)) or
4845 (set (mem:BLK ...) (mem:BLK ...)). Don't return anything
4846 in that case, otherwise we end up with mode mismatches. */
4847 if (mode == BLKmode && MEM_P (x))
4848 return NULL;
4849 for (i = 0; i < cui->n_sets; i++)
4850 if (cui->sets[i].dest == x)
4851 return cui->sets[i].src_elt;
4853 else
4854 return cselib_lookup (x, mode, 0, VOIDmode);
4857 return NULL;
4860 /* Helper function to get mode of MEM's address. */
4862 static inline enum machine_mode
4863 get_address_mode (rtx mem)
4865 enum machine_mode mode = GET_MODE (XEXP (mem, 0));
4866 if (mode != VOIDmode)
4867 return mode;
4868 return targetm.addr_space.address_mode (MEM_ADDR_SPACE (mem));
4871 /* Replace all registers and addresses in an expression with VALUE
4872 expressions that map back to them, unless the expression is a
4873 register. If no mapping is or can be performed, returns NULL. */
4875 static rtx
4876 replace_expr_with_values (rtx loc)
4878 if (REG_P (loc) || GET_CODE (loc) == ENTRY_VALUE)
4879 return NULL;
4880 else if (MEM_P (loc))
4882 cselib_val *addr = cselib_lookup (XEXP (loc, 0),
4883 get_address_mode (loc), 0,
4884 GET_MODE (loc));
4885 if (addr)
4886 return replace_equiv_address_nv (loc, addr->val_rtx);
4887 else
4888 return NULL;
4890 else
4891 return cselib_subst_to_values (loc, VOIDmode);
4894 /* Determine what kind of micro operation to choose for a USE. Return
4895 MO_CLOBBER if no micro operation is to be generated. */
4897 static enum micro_operation_type
4898 use_type (rtx loc, struct count_use_info *cui, enum machine_mode *modep)
4900 tree expr;
4902 if (cui && cui->sets)
4904 if (GET_CODE (loc) == VAR_LOCATION)
4906 if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false))
4908 rtx ploc = PAT_VAR_LOCATION_LOC (loc);
4909 if (! VAR_LOC_UNKNOWN_P (ploc))
4911 cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1,
4912 VOIDmode);
4914 /* ??? flag_float_store and volatile mems are never
4915 given values, but we could in theory use them for
4916 locations. */
4917 gcc_assert (val || 1);
4919 return MO_VAL_LOC;
4921 else
4922 return MO_CLOBBER;
4925 if (REG_P (loc) || MEM_P (loc))
4927 if (modep)
4928 *modep = GET_MODE (loc);
4929 if (cui->store_p)
4931 if (REG_P (loc)
4932 || (find_use_val (loc, GET_MODE (loc), cui)
4933 && cselib_lookup (XEXP (loc, 0),
4934 get_address_mode (loc), 0,
4935 GET_MODE (loc))))
4936 return MO_VAL_SET;
4938 else
4940 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
4942 if (val && !cselib_preserved_value_p (val))
4943 return MO_VAL_USE;
4948 if (REG_P (loc))
4950 gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER);
4952 if (loc == cfa_base_rtx)
4953 return MO_CLOBBER;
4954 expr = REG_EXPR (loc);
4956 if (!expr)
4957 return MO_USE_NO_VAR;
4958 else if (target_for_debug_bind (var_debug_decl (expr)))
4959 return MO_CLOBBER;
4960 else if (track_loc_p (loc, expr, REG_OFFSET (loc),
4961 false, modep, NULL))
4962 return MO_USE;
4963 else
4964 return MO_USE_NO_VAR;
4966 else if (MEM_P (loc))
4968 expr = MEM_EXPR (loc);
4970 if (!expr)
4971 return MO_CLOBBER;
4972 else if (target_for_debug_bind (var_debug_decl (expr)))
4973 return MO_CLOBBER;
4974 else if (track_loc_p (loc, expr, INT_MEM_OFFSET (loc),
4975 false, modep, NULL))
4976 return MO_USE;
4977 else
4978 return MO_CLOBBER;
4981 return MO_CLOBBER;
4984 /* Log to OUT information about micro-operation MOPT involving X in
4985 INSN of BB. */
4987 static inline void
4988 log_op_type (rtx x, basic_block bb, rtx insn,
4989 enum micro_operation_type mopt, FILE *out)
4991 fprintf (out, "bb %i op %i insn %i %s ",
4992 bb->index, VEC_length (micro_operation, VTI (bb)->mos),
4993 INSN_UID (insn), micro_operation_type_name[mopt]);
4994 print_inline_rtx (out, x, 2);
4995 fputc ('\n', out);
4998 /* Tell whether the CONCAT used to holds a VALUE and its location
4999 needs value resolution, i.e., an attempt of mapping the location
5000 back to other incoming values. */
5001 #define VAL_NEEDS_RESOLUTION(x) \
5002 (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
5003 /* Whether the location in the CONCAT is a tracked expression, that
5004 should also be handled like a MO_USE. */
5005 #define VAL_HOLDS_TRACK_EXPR(x) \
5006 (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
5007 /* Whether the location in the CONCAT should be handled like a MO_COPY
5008 as well. */
5009 #define VAL_EXPR_IS_COPIED(x) \
5010 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
5011 /* Whether the location in the CONCAT should be handled like a
5012 MO_CLOBBER as well. */
5013 #define VAL_EXPR_IS_CLOBBERED(x) \
5014 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
5015 /* Whether the location is a CONCAT of the MO_VAL_SET expression and
5016 a reverse operation that should be handled afterwards. */
5017 #define VAL_EXPR_HAS_REVERSE(x) \
5018 (RTL_FLAG_CHECK1 ("VAL_EXPR_HAS_REVERSE", (x), CONCAT)->return_val)
5020 /* All preserved VALUEs. */
5021 static VEC (rtx, heap) *preserved_values;
5023 /* Registers used in the current function for passing parameters. */
5024 static HARD_REG_SET argument_reg_set;
5026 /* Ensure VAL is preserved and remember it in a vector for vt_emit_notes. */
5028 static void
5029 preserve_value (cselib_val *val)
5031 cselib_preserve_value (val);
5032 VEC_safe_push (rtx, heap, preserved_values, val->val_rtx);
5035 /* Helper function for MO_VAL_LOC handling. Return non-zero if
5036 any rtxes not suitable for CONST use not replaced by VALUEs
5037 are discovered. */
5039 static int
5040 non_suitable_const (rtx *x, void *data ATTRIBUTE_UNUSED)
5042 if (*x == NULL_RTX)
5043 return 0;
5045 switch (GET_CODE (*x))
5047 case REG:
5048 case DEBUG_EXPR:
5049 case PC:
5050 case SCRATCH:
5051 case CC0:
5052 case ASM_INPUT:
5053 case ASM_OPERANDS:
5054 return 1;
5055 case MEM:
5056 return !MEM_READONLY_P (*x);
5057 default:
5058 return 0;
5062 /* Add uses (register and memory references) LOC which will be tracked
5063 to VTI (bb)->mos. INSN is instruction which the LOC is part of. */
5065 static int
5066 add_uses (rtx *ploc, void *data)
5068 rtx loc = *ploc;
5069 enum machine_mode mode = VOIDmode;
5070 struct count_use_info *cui = (struct count_use_info *)data;
5071 enum micro_operation_type type = use_type (loc, cui, &mode);
5073 if (type != MO_CLOBBER)
5075 basic_block bb = cui->bb;
5076 micro_operation mo;
5078 mo.type = type;
5079 mo.u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc;
5080 mo.insn = cui->insn;
5082 if (type == MO_VAL_LOC)
5084 rtx oloc = loc;
5085 rtx vloc = PAT_VAR_LOCATION_LOC (oloc);
5086 cselib_val *val;
5088 gcc_assert (cui->sets);
5090 if (MEM_P (vloc)
5091 && !REG_P (XEXP (vloc, 0))
5092 && !MEM_P (XEXP (vloc, 0))
5093 && GET_CODE (XEXP (vloc, 0)) != ENTRY_VALUE
5094 && (GET_CODE (XEXP (vloc, 0)) != PLUS
5095 || XEXP (XEXP (vloc, 0), 0) != cfa_base_rtx
5096 || !CONST_INT_P (XEXP (XEXP (vloc, 0), 1))))
5098 rtx mloc = vloc;
5099 enum machine_mode address_mode = get_address_mode (mloc);
5100 cselib_val *val
5101 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5102 GET_MODE (mloc));
5104 if (val && !cselib_preserved_value_p (val))
5106 micro_operation moa;
5107 preserve_value (val);
5108 mloc = cselib_subst_to_values (XEXP (mloc, 0),
5109 GET_MODE (mloc));
5110 moa.type = MO_VAL_USE;
5111 moa.insn = cui->insn;
5112 moa.u.loc = gen_rtx_CONCAT (address_mode,
5113 val->val_rtx, mloc);
5114 if (dump_file && (dump_flags & TDF_DETAILS))
5115 log_op_type (moa.u.loc, cui->bb, cui->insn,
5116 moa.type, dump_file);
5117 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &moa);
5121 if (CONSTANT_P (vloc)
5122 && (GET_CODE (vloc) != CONST
5123 || for_each_rtx (&vloc, non_suitable_const, NULL)))
5124 /* For constants don't look up any value. */;
5125 else if (!VAR_LOC_UNKNOWN_P (vloc)
5126 && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
5128 enum machine_mode mode2;
5129 enum micro_operation_type type2;
5130 rtx nloc = replace_expr_with_values (vloc);
5132 if (nloc)
5134 oloc = shallow_copy_rtx (oloc);
5135 PAT_VAR_LOCATION_LOC (oloc) = nloc;
5138 oloc = gen_rtx_CONCAT (mode, val->val_rtx, oloc);
5140 type2 = use_type (vloc, 0, &mode2);
5142 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5143 || type2 == MO_CLOBBER);
5145 if (type2 == MO_CLOBBER
5146 && !cselib_preserved_value_p (val))
5148 VAL_NEEDS_RESOLUTION (oloc) = 1;
5149 preserve_value (val);
5152 else if (!VAR_LOC_UNKNOWN_P (vloc))
5154 oloc = shallow_copy_rtx (oloc);
5155 PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC ();
5158 mo.u.loc = oloc;
5160 else if (type == MO_VAL_USE)
5162 enum machine_mode mode2 = VOIDmode;
5163 enum micro_operation_type type2;
5164 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5165 rtx vloc, oloc = loc, nloc;
5167 gcc_assert (cui->sets);
5169 if (MEM_P (oloc)
5170 && !REG_P (XEXP (oloc, 0))
5171 && !MEM_P (XEXP (oloc, 0))
5172 && GET_CODE (XEXP (oloc, 0)) != ENTRY_VALUE
5173 && (GET_CODE (XEXP (oloc, 0)) != PLUS
5174 || XEXP (XEXP (oloc, 0), 0) != cfa_base_rtx
5175 || !CONST_INT_P (XEXP (XEXP (oloc, 0), 1))))
5177 rtx mloc = oloc;
5178 enum machine_mode address_mode = get_address_mode (mloc);
5179 cselib_val *val
5180 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5181 GET_MODE (mloc));
5183 if (val && !cselib_preserved_value_p (val))
5185 micro_operation moa;
5186 preserve_value (val);
5187 mloc = cselib_subst_to_values (XEXP (mloc, 0),
5188 GET_MODE (mloc));
5189 moa.type = MO_VAL_USE;
5190 moa.insn = cui->insn;
5191 moa.u.loc = gen_rtx_CONCAT (address_mode,
5192 val->val_rtx, mloc);
5193 if (dump_file && (dump_flags & TDF_DETAILS))
5194 log_op_type (moa.u.loc, cui->bb, cui->insn,
5195 moa.type, dump_file);
5196 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &moa);
5200 type2 = use_type (loc, 0, &mode2);
5202 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5203 || type2 == MO_CLOBBER);
5205 if (type2 == MO_USE)
5206 vloc = var_lowpart (mode2, loc);
5207 else
5208 vloc = oloc;
5210 /* The loc of a MO_VAL_USE may have two forms:
5212 (concat val src): val is at src, a value-based
5213 representation.
5215 (concat (concat val use) src): same as above, with use as
5216 the MO_USE tracked value, if it differs from src.
5220 nloc = replace_expr_with_values (loc);
5221 if (!nloc)
5222 nloc = oloc;
5224 if (vloc != nloc)
5225 oloc = gen_rtx_CONCAT (mode2, val->val_rtx, vloc);
5226 else
5227 oloc = val->val_rtx;
5229 mo.u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
5231 if (type2 == MO_USE)
5232 VAL_HOLDS_TRACK_EXPR (mo.u.loc) = 1;
5233 if (!cselib_preserved_value_p (val))
5235 VAL_NEEDS_RESOLUTION (mo.u.loc) = 1;
5236 preserve_value (val);
5239 else
5240 gcc_assert (type == MO_USE || type == MO_USE_NO_VAR);
5242 if (dump_file && (dump_flags & TDF_DETAILS))
5243 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
5244 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
5247 return 0;
5250 /* Helper function for finding all uses of REG/MEM in X in insn INSN. */
5252 static void
5253 add_uses_1 (rtx *x, void *cui)
5255 for_each_rtx (x, add_uses, cui);
5258 #define EXPR_DEPTH (PARAM_VALUE (PARAM_MAX_VARTRACK_EXPR_DEPTH))
5260 /* Attempt to reverse the EXPR operation in the debug info. Say for
5261 reg1 = reg2 + 6 even when reg2 is no longer live we
5262 can express its value as VAL - 6. */
5264 static rtx
5265 reverse_op (rtx val, const_rtx expr)
5267 rtx src, arg, ret;
5268 cselib_val *v;
5269 enum rtx_code code;
5271 if (GET_CODE (expr) != SET)
5272 return NULL_RTX;
5274 if (!REG_P (SET_DEST (expr)) || GET_MODE (val) != GET_MODE (SET_DEST (expr)))
5275 return NULL_RTX;
5277 src = SET_SRC (expr);
5278 switch (GET_CODE (src))
5280 case PLUS:
5281 case MINUS:
5282 case XOR:
5283 case NOT:
5284 case NEG:
5285 if (!REG_P (XEXP (src, 0)))
5286 return NULL_RTX;
5287 break;
5288 case SIGN_EXTEND:
5289 case ZERO_EXTEND:
5290 if (!REG_P (XEXP (src, 0)) && !MEM_P (XEXP (src, 0)))
5291 return NULL_RTX;
5292 break;
5293 default:
5294 return NULL_RTX;
5297 if (!SCALAR_INT_MODE_P (GET_MODE (src)) || XEXP (src, 0) == cfa_base_rtx)
5298 return NULL_RTX;
5300 v = cselib_lookup (XEXP (src, 0), GET_MODE (XEXP (src, 0)), 0, VOIDmode);
5301 if (!v || !cselib_preserved_value_p (v))
5302 return NULL_RTX;
5304 switch (GET_CODE (src))
5306 case NOT:
5307 case NEG:
5308 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5309 return NULL_RTX;
5310 ret = gen_rtx_fmt_e (GET_CODE (src), GET_MODE (val), val);
5311 break;
5312 case SIGN_EXTEND:
5313 case ZERO_EXTEND:
5314 ret = gen_lowpart_SUBREG (GET_MODE (v->val_rtx), val);
5315 break;
5316 case XOR:
5317 code = XOR;
5318 goto binary;
5319 case PLUS:
5320 code = MINUS;
5321 goto binary;
5322 case MINUS:
5323 code = PLUS;
5324 goto binary;
5325 binary:
5326 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5327 return NULL_RTX;
5328 arg = XEXP (src, 1);
5329 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5331 arg = cselib_expand_value_rtx (arg, scratch_regs, 5);
5332 if (arg == NULL_RTX)
5333 return NULL_RTX;
5334 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5335 return NULL_RTX;
5337 ret = simplify_gen_binary (code, GET_MODE (val), val, arg);
5338 if (ret == val)
5339 /* Ensure ret isn't VALUE itself (which can happen e.g. for
5340 (plus (reg1) (reg2)) when reg2 is known to be 0), as that
5341 breaks a lot of routines during var-tracking. */
5342 ret = gen_rtx_fmt_ee (PLUS, GET_MODE (val), val, const0_rtx);
5343 break;
5344 default:
5345 gcc_unreachable ();
5348 return gen_rtx_CONCAT (GET_MODE (v->val_rtx), v->val_rtx, ret);
5351 /* Add stores (register and memory references) LOC which will be tracked
5352 to VTI (bb)->mos. EXPR is the RTL expression containing the store.
5353 CUIP->insn is instruction which the LOC is part of. */
5355 static void
5356 add_stores (rtx loc, const_rtx expr, void *cuip)
5358 enum machine_mode mode = VOIDmode, mode2;
5359 struct count_use_info *cui = (struct count_use_info *)cuip;
5360 basic_block bb = cui->bb;
5361 micro_operation mo;
5362 rtx oloc = loc, nloc, src = NULL;
5363 enum micro_operation_type type = use_type (loc, cui, &mode);
5364 bool track_p = false;
5365 cselib_val *v;
5366 bool resolve, preserve;
5367 rtx reverse;
5369 if (type == MO_CLOBBER)
5370 return;
5372 mode2 = mode;
5374 if (REG_P (loc))
5376 gcc_assert (loc != cfa_base_rtx);
5377 if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
5378 || !(track_p = use_type (loc, NULL, &mode2) == MO_USE)
5379 || GET_CODE (expr) == CLOBBER)
5381 mo.type = MO_CLOBBER;
5382 mo.u.loc = loc;
5383 if (GET_CODE (expr) == SET
5384 && SET_DEST (expr) == loc
5385 && REGNO (loc) < FIRST_PSEUDO_REGISTER
5386 && TEST_HARD_REG_BIT (argument_reg_set, REGNO (loc))
5387 && find_use_val (loc, mode, cui)
5388 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
5390 gcc_checking_assert (type == MO_VAL_SET);
5391 mo.u.loc = gen_rtx_SET (VOIDmode, loc, SET_SRC (expr));
5394 else
5396 if (GET_CODE (expr) == SET
5397 && SET_DEST (expr) == loc
5398 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
5399 src = var_lowpart (mode2, SET_SRC (expr));
5400 loc = var_lowpart (mode2, loc);
5402 if (src == NULL)
5404 mo.type = MO_SET;
5405 mo.u.loc = loc;
5407 else
5409 rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
5410 if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
5411 mo.type = MO_COPY;
5412 else
5413 mo.type = MO_SET;
5414 mo.u.loc = xexpr;
5417 mo.insn = cui->insn;
5419 else if (MEM_P (loc)
5420 && ((track_p = use_type (loc, NULL, &mode2) == MO_USE)
5421 || cui->sets))
5423 if (MEM_P (loc) && type == MO_VAL_SET
5424 && !REG_P (XEXP (loc, 0))
5425 && !MEM_P (XEXP (loc, 0))
5426 && GET_CODE (XEXP (loc, 0)) != ENTRY_VALUE
5427 && (GET_CODE (XEXP (loc, 0)) != PLUS
5428 || XEXP (XEXP (loc, 0), 0) != cfa_base_rtx
5429 || !CONST_INT_P (XEXP (XEXP (loc, 0), 1))))
5431 rtx mloc = loc;
5432 enum machine_mode address_mode = get_address_mode (mloc);
5433 cselib_val *val = cselib_lookup (XEXP (mloc, 0),
5434 address_mode, 0,
5435 GET_MODE (mloc));
5437 if (val && !cselib_preserved_value_p (val))
5439 preserve_value (val);
5440 mo.type = MO_VAL_USE;
5441 mloc = cselib_subst_to_values (XEXP (mloc, 0),
5442 GET_MODE (mloc));
5443 mo.u.loc = gen_rtx_CONCAT (address_mode, val->val_rtx, mloc);
5444 mo.insn = cui->insn;
5445 if (dump_file && (dump_flags & TDF_DETAILS))
5446 log_op_type (mo.u.loc, cui->bb, cui->insn,
5447 mo.type, dump_file);
5448 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
5452 if (GET_CODE (expr) == CLOBBER || !track_p)
5454 mo.type = MO_CLOBBER;
5455 mo.u.loc = track_p ? var_lowpart (mode2, loc) : loc;
5457 else
5459 if (GET_CODE (expr) == SET
5460 && SET_DEST (expr) == loc
5461 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
5462 src = var_lowpart (mode2, SET_SRC (expr));
5463 loc = var_lowpart (mode2, loc);
5465 if (src == NULL)
5467 mo.type = MO_SET;
5468 mo.u.loc = loc;
5470 else
5472 rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
5473 if (same_variable_part_p (SET_SRC (xexpr),
5474 MEM_EXPR (loc),
5475 INT_MEM_OFFSET (loc)))
5476 mo.type = MO_COPY;
5477 else
5478 mo.type = MO_SET;
5479 mo.u.loc = xexpr;
5482 mo.insn = cui->insn;
5484 else
5485 return;
5487 if (type != MO_VAL_SET)
5488 goto log_and_return;
5490 v = find_use_val (oloc, mode, cui);
5492 if (!v)
5493 goto log_and_return;
5495 resolve = preserve = !cselib_preserved_value_p (v);
5497 nloc = replace_expr_with_values (oloc);
5498 if (nloc)
5499 oloc = nloc;
5501 if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC)
5503 cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0, VOIDmode);
5505 gcc_assert (oval != v);
5506 gcc_assert (REG_P (oloc) || MEM_P (oloc));
5508 if (!cselib_preserved_value_p (oval))
5510 micro_operation moa;
5512 preserve_value (oval);
5514 moa.type = MO_VAL_USE;
5515 moa.u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc);
5516 VAL_NEEDS_RESOLUTION (moa.u.loc) = 1;
5517 moa.insn = cui->insn;
5519 if (dump_file && (dump_flags & TDF_DETAILS))
5520 log_op_type (moa.u.loc, cui->bb, cui->insn,
5521 moa.type, dump_file);
5522 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &moa);
5525 resolve = false;
5527 else if (resolve && GET_CODE (mo.u.loc) == SET)
5529 nloc = replace_expr_with_values (SET_SRC (expr));
5531 /* Avoid the mode mismatch between oexpr and expr. */
5532 if (!nloc && mode != mode2)
5534 nloc = SET_SRC (expr);
5535 gcc_assert (oloc == SET_DEST (expr));
5538 if (nloc)
5539 oloc = gen_rtx_SET (GET_MODE (mo.u.loc), oloc, nloc);
5540 else
5542 if (oloc == SET_DEST (mo.u.loc))
5543 /* No point in duplicating. */
5544 oloc = mo.u.loc;
5545 if (!REG_P (SET_SRC (mo.u.loc)))
5546 resolve = false;
5549 else if (!resolve)
5551 if (GET_CODE (mo.u.loc) == SET
5552 && oloc == SET_DEST (mo.u.loc))
5553 /* No point in duplicating. */
5554 oloc = mo.u.loc;
5556 else
5557 resolve = false;
5559 loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc);
5561 if (mo.u.loc != oloc)
5562 loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, mo.u.loc);
5564 /* The loc of a MO_VAL_SET may have various forms:
5566 (concat val dst): dst now holds val
5568 (concat val (set dst src)): dst now holds val, copied from src
5570 (concat (concat val dstv) dst): dst now holds val; dstv is dst
5571 after replacing mems and non-top-level regs with values.
5573 (concat (concat val dstv) (set dst src)): dst now holds val,
5574 copied from src. dstv is a value-based representation of dst, if
5575 it differs from dst. If resolution is needed, src is a REG, and
5576 its mode is the same as that of val.
5578 (concat (concat val (set dstv srcv)) (set dst src)): src
5579 copied to dst, holding val. dstv and srcv are value-based
5580 representations of dst and src, respectively.
5584 if (GET_CODE (PATTERN (cui->insn)) != COND_EXEC)
5586 reverse = reverse_op (v->val_rtx, expr);
5587 if (reverse)
5589 loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, reverse);
5590 VAL_EXPR_HAS_REVERSE (loc) = 1;
5594 mo.u.loc = loc;
5596 if (track_p)
5597 VAL_HOLDS_TRACK_EXPR (loc) = 1;
5598 if (preserve)
5600 VAL_NEEDS_RESOLUTION (loc) = resolve;
5601 preserve_value (v);
5603 if (mo.type == MO_CLOBBER)
5604 VAL_EXPR_IS_CLOBBERED (loc) = 1;
5605 if (mo.type == MO_COPY)
5606 VAL_EXPR_IS_COPIED (loc) = 1;
5608 mo.type = MO_VAL_SET;
5610 log_and_return:
5611 if (dump_file && (dump_flags & TDF_DETAILS))
5612 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
5613 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
5616 /* Arguments to the call. */
5617 static rtx call_arguments;
5619 /* Compute call_arguments. */
5621 static void
5622 prepare_call_arguments (basic_block bb, rtx insn)
5624 rtx link, x;
5625 rtx prev, cur, next;
5626 rtx call = PATTERN (insn);
5627 rtx this_arg = NULL_RTX;
5628 tree type = NULL_TREE, t, fndecl = NULL_TREE;
5629 tree obj_type_ref = NULL_TREE;
5630 CUMULATIVE_ARGS args_so_far_v;
5631 cumulative_args_t args_so_far;
5633 memset (&args_so_far_v, 0, sizeof (args_so_far_v));
5634 args_so_far = pack_cumulative_args (&args_so_far_v);
5635 if (GET_CODE (call) == PARALLEL)
5636 call = XVECEXP (call, 0, 0);
5637 if (GET_CODE (call) == SET)
5638 call = SET_SRC (call);
5639 if (GET_CODE (call) == CALL && MEM_P (XEXP (call, 0)))
5641 if (GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
5643 rtx symbol = XEXP (XEXP (call, 0), 0);
5644 if (SYMBOL_REF_DECL (symbol))
5645 fndecl = SYMBOL_REF_DECL (symbol);
5647 if (fndecl == NULL_TREE)
5648 fndecl = MEM_EXPR (XEXP (call, 0));
5649 if (fndecl
5650 && TREE_CODE (TREE_TYPE (fndecl)) != FUNCTION_TYPE
5651 && TREE_CODE (TREE_TYPE (fndecl)) != METHOD_TYPE)
5652 fndecl = NULL_TREE;
5653 if (fndecl && TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
5654 type = TREE_TYPE (fndecl);
5655 if (fndecl && TREE_CODE (fndecl) != FUNCTION_DECL)
5657 if (TREE_CODE (fndecl) == INDIRECT_REF
5658 && TREE_CODE (TREE_OPERAND (fndecl, 0)) == OBJ_TYPE_REF)
5659 obj_type_ref = TREE_OPERAND (fndecl, 0);
5660 fndecl = NULL_TREE;
5662 if (type)
5664 for (t = TYPE_ARG_TYPES (type); t && t != void_list_node;
5665 t = TREE_CHAIN (t))
5666 if (TREE_CODE (TREE_VALUE (t)) == REFERENCE_TYPE
5667 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_VALUE (t))))
5668 break;
5669 if ((t == NULL || t == void_list_node) && obj_type_ref == NULL_TREE)
5670 type = NULL;
5671 else
5673 int nargs ATTRIBUTE_UNUSED = list_length (TYPE_ARG_TYPES (type));
5674 link = CALL_INSN_FUNCTION_USAGE (insn);
5675 #ifndef PCC_STATIC_STRUCT_RETURN
5676 if (aggregate_value_p (TREE_TYPE (type), type)
5677 && targetm.calls.struct_value_rtx (type, 0) == 0)
5679 tree struct_addr = build_pointer_type (TREE_TYPE (type));
5680 enum machine_mode mode = TYPE_MODE (struct_addr);
5681 rtx reg;
5682 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
5683 nargs + 1);
5684 reg = targetm.calls.function_arg (args_so_far, mode,
5685 struct_addr, true);
5686 targetm.calls.function_arg_advance (args_so_far, mode,
5687 struct_addr, true);
5688 if (reg == NULL_RTX)
5690 for (; link; link = XEXP (link, 1))
5691 if (GET_CODE (XEXP (link, 0)) == USE
5692 && MEM_P (XEXP (XEXP (link, 0), 0)))
5694 link = XEXP (link, 1);
5695 break;
5699 else
5700 #endif
5701 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
5702 nargs);
5703 if (obj_type_ref && TYPE_ARG_TYPES (type) != void_list_node)
5705 enum machine_mode mode;
5706 t = TYPE_ARG_TYPES (type);
5707 mode = TYPE_MODE (TREE_VALUE (t));
5708 this_arg = targetm.calls.function_arg (args_so_far, mode,
5709 TREE_VALUE (t), true);
5710 if (this_arg && !REG_P (this_arg))
5711 this_arg = NULL_RTX;
5712 else if (this_arg == NULL_RTX)
5714 for (; link; link = XEXP (link, 1))
5715 if (GET_CODE (XEXP (link, 0)) == USE
5716 && MEM_P (XEXP (XEXP (link, 0), 0)))
5718 this_arg = XEXP (XEXP (link, 0), 0);
5719 break;
5726 t = type ? TYPE_ARG_TYPES (type) : NULL_TREE;
5728 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
5729 if (GET_CODE (XEXP (link, 0)) == USE)
5731 rtx item = NULL_RTX;
5732 x = XEXP (XEXP (link, 0), 0);
5733 if (REG_P (x))
5735 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
5736 if (val && cselib_preserved_value_p (val))
5737 item = gen_rtx_CONCAT (GET_MODE (x), x, val->val_rtx);
5738 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT)
5740 enum machine_mode mode = GET_MODE (x);
5742 while ((mode = GET_MODE_WIDER_MODE (mode)) != VOIDmode
5743 && GET_MODE_BITSIZE (mode) <= BITS_PER_WORD)
5745 rtx reg = simplify_subreg (mode, x, GET_MODE (x), 0);
5747 if (reg == NULL_RTX || !REG_P (reg))
5748 continue;
5749 val = cselib_lookup (reg, mode, 0, VOIDmode);
5750 if (val && cselib_preserved_value_p (val))
5752 item = gen_rtx_CONCAT (GET_MODE (x), x,
5753 lowpart_subreg (GET_MODE (x),
5754 val->val_rtx,
5755 mode));
5756 break;
5761 else if (MEM_P (x))
5763 rtx mem = x;
5764 cselib_val *val;
5766 if (!frame_pointer_needed)
5768 struct adjust_mem_data amd;
5769 amd.mem_mode = VOIDmode;
5770 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
5771 amd.side_effects = NULL_RTX;
5772 amd.store = true;
5773 mem = simplify_replace_fn_rtx (mem, NULL_RTX, adjust_mems,
5774 &amd);
5775 gcc_assert (amd.side_effects == NULL_RTX);
5777 val = cselib_lookup (mem, GET_MODE (mem), 0, VOIDmode);
5778 if (val && cselib_preserved_value_p (val))
5779 item = gen_rtx_CONCAT (GET_MODE (x), copy_rtx (x), val->val_rtx);
5780 else if (GET_MODE_CLASS (GET_MODE (mem)) != MODE_INT)
5782 /* For non-integer stack argument see also if they weren't
5783 initialized by integers. */
5784 enum machine_mode imode = int_mode_for_mode (GET_MODE (mem));
5785 if (imode != GET_MODE (mem) && imode != BLKmode)
5787 val = cselib_lookup (adjust_address_nv (mem, imode, 0),
5788 imode, 0, VOIDmode);
5789 if (val && cselib_preserved_value_p (val))
5790 item = gen_rtx_CONCAT (GET_MODE (x), copy_rtx (x),
5791 lowpart_subreg (GET_MODE (x),
5792 val->val_rtx,
5793 imode));
5797 if (item)
5798 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item, call_arguments);
5799 if (t && t != void_list_node)
5801 tree argtype = TREE_VALUE (t);
5802 enum machine_mode mode = TYPE_MODE (argtype);
5803 rtx reg;
5804 if (pass_by_reference (&args_so_far_v, mode, argtype, true))
5806 argtype = build_pointer_type (argtype);
5807 mode = TYPE_MODE (argtype);
5809 reg = targetm.calls.function_arg (args_so_far, mode,
5810 argtype, true);
5811 if (TREE_CODE (argtype) == REFERENCE_TYPE
5812 && INTEGRAL_TYPE_P (TREE_TYPE (argtype))
5813 && reg
5814 && REG_P (reg)
5815 && GET_MODE (reg) == mode
5816 && GET_MODE_CLASS (mode) == MODE_INT
5817 && REG_P (x)
5818 && REGNO (x) == REGNO (reg)
5819 && GET_MODE (x) == mode
5820 && item)
5822 enum machine_mode indmode
5823 = TYPE_MODE (TREE_TYPE (argtype));
5824 rtx mem = gen_rtx_MEM (indmode, x);
5825 cselib_val *val = cselib_lookup (mem, indmode, 0, VOIDmode);
5826 if (val && cselib_preserved_value_p (val))
5828 item = gen_rtx_CONCAT (indmode, mem, val->val_rtx);
5829 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
5830 call_arguments);
5832 else
5834 struct elt_loc_list *l;
5835 tree initial;
5837 /* Try harder, when passing address of a constant
5838 pool integer it can be easily read back. */
5839 item = XEXP (item, 1);
5840 if (GET_CODE (item) == SUBREG)
5841 item = SUBREG_REG (item);
5842 gcc_assert (GET_CODE (item) == VALUE);
5843 val = CSELIB_VAL_PTR (item);
5844 for (l = val->locs; l; l = l->next)
5845 if (GET_CODE (l->loc) == SYMBOL_REF
5846 && TREE_CONSTANT_POOL_ADDRESS_P (l->loc)
5847 && SYMBOL_REF_DECL (l->loc)
5848 && DECL_INITIAL (SYMBOL_REF_DECL (l->loc)))
5850 initial = DECL_INITIAL (SYMBOL_REF_DECL (l->loc));
5851 if (host_integerp (initial, 0))
5853 item = GEN_INT (tree_low_cst (initial, 0));
5854 item = gen_rtx_CONCAT (indmode, mem, item);
5855 call_arguments
5856 = gen_rtx_EXPR_LIST (VOIDmode, item,
5857 call_arguments);
5859 break;
5863 targetm.calls.function_arg_advance (args_so_far, mode,
5864 argtype, true);
5865 t = TREE_CHAIN (t);
5869 /* Add debug arguments. */
5870 if (fndecl
5871 && TREE_CODE (fndecl) == FUNCTION_DECL
5872 && DECL_HAS_DEBUG_ARGS_P (fndecl))
5874 VEC(tree, gc) **debug_args = decl_debug_args_lookup (fndecl);
5875 if (debug_args)
5877 unsigned int ix;
5878 tree param;
5879 for (ix = 0; VEC_iterate (tree, *debug_args, ix, param); ix += 2)
5881 rtx item;
5882 tree dtemp = VEC_index (tree, *debug_args, ix + 1);
5883 enum machine_mode mode = DECL_MODE (dtemp);
5884 item = gen_rtx_DEBUG_PARAMETER_REF (mode, param);
5885 item = gen_rtx_CONCAT (mode, item, DECL_RTL (dtemp));
5886 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
5887 call_arguments);
5892 /* Reverse call_arguments chain. */
5893 prev = NULL_RTX;
5894 for (cur = call_arguments; cur; cur = next)
5896 next = XEXP (cur, 1);
5897 XEXP (cur, 1) = prev;
5898 prev = cur;
5900 call_arguments = prev;
5902 x = PATTERN (insn);
5903 if (GET_CODE (x) == PARALLEL)
5904 x = XVECEXP (x, 0, 0);
5905 if (GET_CODE (x) == SET)
5906 x = SET_SRC (x);
5907 if (GET_CODE (x) == CALL && MEM_P (XEXP (x, 0)))
5909 x = XEXP (XEXP (x, 0), 0);
5910 if (GET_CODE (x) == SYMBOL_REF)
5911 /* Don't record anything. */;
5912 else if (CONSTANT_P (x))
5914 x = gen_rtx_CONCAT (GET_MODE (x) == VOIDmode ? Pmode : GET_MODE (x),
5915 pc_rtx, x);
5916 call_arguments
5917 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
5919 else
5921 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
5922 if (val && cselib_preserved_value_p (val))
5924 x = gen_rtx_CONCAT (GET_MODE (x), pc_rtx, val->val_rtx);
5925 call_arguments
5926 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
5930 if (this_arg)
5932 enum machine_mode mode
5933 = TYPE_MODE (TREE_TYPE (OBJ_TYPE_REF_EXPR (obj_type_ref)));
5934 rtx clobbered = gen_rtx_MEM (mode, this_arg);
5935 HOST_WIDE_INT token
5936 = tree_low_cst (OBJ_TYPE_REF_TOKEN (obj_type_ref), 0);
5937 if (token)
5938 clobbered = plus_constant (clobbered, token * GET_MODE_SIZE (mode));
5939 clobbered = gen_rtx_MEM (mode, clobbered);
5940 x = gen_rtx_CONCAT (mode, gen_rtx_CLOBBER (VOIDmode, pc_rtx), clobbered);
5941 call_arguments
5942 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
5946 /* Callback for cselib_record_sets_hook, that records as micro
5947 operations uses and stores in an insn after cselib_record_sets has
5948 analyzed the sets in an insn, but before it modifies the stored
5949 values in the internal tables, unless cselib_record_sets doesn't
5950 call it directly (perhaps because we're not doing cselib in the
5951 first place, in which case sets and n_sets will be 0). */
5953 static void
5954 add_with_sets (rtx insn, struct cselib_set *sets, int n_sets)
5956 basic_block bb = BLOCK_FOR_INSN (insn);
5957 int n1, n2;
5958 struct count_use_info cui;
5959 micro_operation *mos;
5961 cselib_hook_called = true;
5963 cui.insn = insn;
5964 cui.bb = bb;
5965 cui.sets = sets;
5966 cui.n_sets = n_sets;
5968 n1 = VEC_length (micro_operation, VTI (bb)->mos);
5969 cui.store_p = false;
5970 note_uses (&PATTERN (insn), add_uses_1, &cui);
5971 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
5972 mos = VEC_address (micro_operation, VTI (bb)->mos);
5974 /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
5975 MO_VAL_LOC last. */
5976 while (n1 < n2)
5978 while (n1 < n2 && mos[n1].type == MO_USE)
5979 n1++;
5980 while (n1 < n2 && mos[n2].type != MO_USE)
5981 n2--;
5982 if (n1 < n2)
5984 micro_operation sw;
5986 sw = mos[n1];
5987 mos[n1] = mos[n2];
5988 mos[n2] = sw;
5992 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
5993 while (n1 < n2)
5995 while (n1 < n2 && mos[n1].type != MO_VAL_LOC)
5996 n1++;
5997 while (n1 < n2 && mos[n2].type == MO_VAL_LOC)
5998 n2--;
5999 if (n1 < n2)
6001 micro_operation sw;
6003 sw = mos[n1];
6004 mos[n1] = mos[n2];
6005 mos[n2] = sw;
6009 if (CALL_P (insn))
6011 micro_operation mo;
6013 mo.type = MO_CALL;
6014 mo.insn = insn;
6015 mo.u.loc = call_arguments;
6016 call_arguments = NULL_RTX;
6018 if (dump_file && (dump_flags & TDF_DETAILS))
6019 log_op_type (PATTERN (insn), bb, insn, mo.type, dump_file);
6020 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
6023 n1 = VEC_length (micro_operation, VTI (bb)->mos);
6024 /* This will record NEXT_INSN (insn), such that we can
6025 insert notes before it without worrying about any
6026 notes that MO_USEs might emit after the insn. */
6027 cui.store_p = true;
6028 note_stores (PATTERN (insn), add_stores, &cui);
6029 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
6030 mos = VEC_address (micro_operation, VTI (bb)->mos);
6032 /* Order the MO_VAL_USEs first (note_stores does nothing
6033 on DEBUG_INSNs, so there are no MO_VAL_LOCs from this
6034 insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET. */
6035 while (n1 < n2)
6037 while (n1 < n2 && mos[n1].type == MO_VAL_USE)
6038 n1++;
6039 while (n1 < n2 && mos[n2].type != MO_VAL_USE)
6040 n2--;
6041 if (n1 < n2)
6043 micro_operation sw;
6045 sw = mos[n1];
6046 mos[n1] = mos[n2];
6047 mos[n2] = sw;
6051 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
6052 while (n1 < n2)
6054 while (n1 < n2 && mos[n1].type == MO_CLOBBER)
6055 n1++;
6056 while (n1 < n2 && mos[n2].type != MO_CLOBBER)
6057 n2--;
6058 if (n1 < n2)
6060 micro_operation sw;
6062 sw = mos[n1];
6063 mos[n1] = mos[n2];
6064 mos[n2] = sw;
6069 static enum var_init_status
6070 find_src_status (dataflow_set *in, rtx src)
6072 tree decl = NULL_TREE;
6073 enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
6075 if (! flag_var_tracking_uninit)
6076 status = VAR_INIT_STATUS_INITIALIZED;
6078 if (src && REG_P (src))
6079 decl = var_debug_decl (REG_EXPR (src));
6080 else if (src && MEM_P (src))
6081 decl = var_debug_decl (MEM_EXPR (src));
6083 if (src && decl)
6084 status = get_init_value (in, src, dv_from_decl (decl));
6086 return status;
6089 /* SRC is the source of an assignment. Use SET to try to find what
6090 was ultimately assigned to SRC. Return that value if known,
6091 otherwise return SRC itself. */
6093 static rtx
6094 find_src_set_src (dataflow_set *set, rtx src)
6096 tree decl = NULL_TREE; /* The variable being copied around. */
6097 rtx set_src = NULL_RTX; /* The value for "decl" stored in "src". */
6098 variable var;
6099 location_chain nextp;
6100 int i;
6101 bool found;
6103 if (src && REG_P (src))
6104 decl = var_debug_decl (REG_EXPR (src));
6105 else if (src && MEM_P (src))
6106 decl = var_debug_decl (MEM_EXPR (src));
6108 if (src && decl)
6110 decl_or_value dv = dv_from_decl (decl);
6112 var = shared_hash_find (set->vars, dv);
6113 if (var)
6115 found = false;
6116 for (i = 0; i < var->n_var_parts && !found; i++)
6117 for (nextp = var->var_part[i].loc_chain; nextp && !found;
6118 nextp = nextp->next)
6119 if (rtx_equal_p (nextp->loc, src))
6121 set_src = nextp->set_src;
6122 found = true;
6128 return set_src;
6131 /* Compute the changes of variable locations in the basic block BB. */
6133 static bool
6134 compute_bb_dataflow (basic_block bb)
6136 unsigned int i;
6137 micro_operation *mo;
6138 bool changed;
6139 dataflow_set old_out;
6140 dataflow_set *in = &VTI (bb)->in;
6141 dataflow_set *out = &VTI (bb)->out;
6143 dataflow_set_init (&old_out);
6144 dataflow_set_copy (&old_out, out);
6145 dataflow_set_copy (out, in);
6147 FOR_EACH_VEC_ELT (micro_operation, VTI (bb)->mos, i, mo)
6149 rtx insn = mo->insn;
6151 switch (mo->type)
6153 case MO_CALL:
6154 dataflow_set_clear_at_call (out);
6155 break;
6157 case MO_USE:
6159 rtx loc = mo->u.loc;
6161 if (REG_P (loc))
6162 var_reg_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6163 else if (MEM_P (loc))
6164 var_mem_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6166 break;
6168 case MO_VAL_LOC:
6170 rtx loc = mo->u.loc;
6171 rtx val, vloc;
6172 tree var;
6174 if (GET_CODE (loc) == CONCAT)
6176 val = XEXP (loc, 0);
6177 vloc = XEXP (loc, 1);
6179 else
6181 val = NULL_RTX;
6182 vloc = loc;
6185 var = PAT_VAR_LOCATION_DECL (vloc);
6187 clobber_variable_part (out, NULL_RTX,
6188 dv_from_decl (var), 0, NULL_RTX);
6189 if (val)
6191 if (VAL_NEEDS_RESOLUTION (loc))
6192 val_resolve (out, val, PAT_VAR_LOCATION_LOC (vloc), insn);
6193 set_variable_part (out, val, dv_from_decl (var), 0,
6194 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6195 INSERT);
6197 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
6198 set_variable_part (out, PAT_VAR_LOCATION_LOC (vloc),
6199 dv_from_decl (var), 0,
6200 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6201 INSERT);
6203 break;
6205 case MO_VAL_USE:
6207 rtx loc = mo->u.loc;
6208 rtx val, vloc, uloc;
6210 vloc = uloc = XEXP (loc, 1);
6211 val = XEXP (loc, 0);
6213 if (GET_CODE (val) == CONCAT)
6215 uloc = XEXP (val, 1);
6216 val = XEXP (val, 0);
6219 if (VAL_NEEDS_RESOLUTION (loc))
6220 val_resolve (out, val, vloc, insn);
6221 else
6222 val_store (out, val, uloc, insn, false);
6224 if (VAL_HOLDS_TRACK_EXPR (loc))
6226 if (GET_CODE (uloc) == REG)
6227 var_reg_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6228 NULL);
6229 else if (GET_CODE (uloc) == MEM)
6230 var_mem_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6231 NULL);
6234 break;
6236 case MO_VAL_SET:
6238 rtx loc = mo->u.loc;
6239 rtx val, vloc, uloc, reverse = NULL_RTX;
6241 vloc = loc;
6242 if (VAL_EXPR_HAS_REVERSE (loc))
6244 reverse = XEXP (loc, 1);
6245 vloc = XEXP (loc, 0);
6247 uloc = XEXP (vloc, 1);
6248 val = XEXP (vloc, 0);
6249 vloc = uloc;
6251 if (GET_CODE (val) == CONCAT)
6253 vloc = XEXP (val, 1);
6254 val = XEXP (val, 0);
6257 if (GET_CODE (vloc) == SET)
6259 rtx vsrc = SET_SRC (vloc);
6261 gcc_assert (val != vsrc);
6262 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
6264 vloc = SET_DEST (vloc);
6266 if (VAL_NEEDS_RESOLUTION (loc))
6267 val_resolve (out, val, vsrc, insn);
6269 else if (VAL_NEEDS_RESOLUTION (loc))
6271 gcc_assert (GET_CODE (uloc) == SET
6272 && GET_CODE (SET_SRC (uloc)) == REG);
6273 val_resolve (out, val, SET_SRC (uloc), insn);
6276 if (VAL_HOLDS_TRACK_EXPR (loc))
6278 if (VAL_EXPR_IS_CLOBBERED (loc))
6280 if (REG_P (uloc))
6281 var_reg_delete (out, uloc, true);
6282 else if (MEM_P (uloc))
6283 var_mem_delete (out, uloc, true);
6285 else
6287 bool copied_p = VAL_EXPR_IS_COPIED (loc);
6288 rtx set_src = NULL;
6289 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
6291 if (GET_CODE (uloc) == SET)
6293 set_src = SET_SRC (uloc);
6294 uloc = SET_DEST (uloc);
6297 if (copied_p)
6299 if (flag_var_tracking_uninit)
6301 status = find_src_status (in, set_src);
6303 if (status == VAR_INIT_STATUS_UNKNOWN)
6304 status = find_src_status (out, set_src);
6307 set_src = find_src_set_src (in, set_src);
6310 if (REG_P (uloc))
6311 var_reg_delete_and_set (out, uloc, !copied_p,
6312 status, set_src);
6313 else if (MEM_P (uloc))
6314 var_mem_delete_and_set (out, uloc, !copied_p,
6315 status, set_src);
6318 else if (REG_P (uloc))
6319 var_regno_delete (out, REGNO (uloc));
6321 val_store (out, val, vloc, insn, true);
6323 if (reverse)
6324 val_store (out, XEXP (reverse, 0), XEXP (reverse, 1),
6325 insn, false);
6327 break;
6329 case MO_SET:
6331 rtx loc = mo->u.loc;
6332 rtx set_src = NULL;
6334 if (GET_CODE (loc) == SET)
6336 set_src = SET_SRC (loc);
6337 loc = SET_DEST (loc);
6340 if (REG_P (loc))
6341 var_reg_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6342 set_src);
6343 else if (MEM_P (loc))
6344 var_mem_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6345 set_src);
6347 break;
6349 case MO_COPY:
6351 rtx loc = mo->u.loc;
6352 enum var_init_status src_status;
6353 rtx set_src = NULL;
6355 if (GET_CODE (loc) == SET)
6357 set_src = SET_SRC (loc);
6358 loc = SET_DEST (loc);
6361 if (! flag_var_tracking_uninit)
6362 src_status = VAR_INIT_STATUS_INITIALIZED;
6363 else
6365 src_status = find_src_status (in, set_src);
6367 if (src_status == VAR_INIT_STATUS_UNKNOWN)
6368 src_status = find_src_status (out, set_src);
6371 set_src = find_src_set_src (in, set_src);
6373 if (REG_P (loc))
6374 var_reg_delete_and_set (out, loc, false, src_status, set_src);
6375 else if (MEM_P (loc))
6376 var_mem_delete_and_set (out, loc, false, src_status, set_src);
6378 break;
6380 case MO_USE_NO_VAR:
6382 rtx loc = mo->u.loc;
6384 if (REG_P (loc))
6385 var_reg_delete (out, loc, false);
6386 else if (MEM_P (loc))
6387 var_mem_delete (out, loc, false);
6389 break;
6391 case MO_CLOBBER:
6393 rtx loc = mo->u.loc;
6395 if (REG_P (loc))
6396 var_reg_delete (out, loc, true);
6397 else if (MEM_P (loc))
6398 var_mem_delete (out, loc, true);
6400 break;
6402 case MO_ADJUST:
6403 out->stack_adjust += mo->u.adjust;
6404 break;
6408 if (MAY_HAVE_DEBUG_INSNS)
6410 dataflow_set_equiv_regs (out);
6411 htab_traverse (shared_hash_htab (out->vars), canonicalize_values_mark,
6412 out);
6413 htab_traverse (shared_hash_htab (out->vars), canonicalize_values_star,
6414 out);
6415 #if ENABLE_CHECKING
6416 htab_traverse (shared_hash_htab (out->vars),
6417 canonicalize_loc_order_check, out);
6418 #endif
6420 changed = dataflow_set_different (&old_out, out);
6421 dataflow_set_destroy (&old_out);
6422 return changed;
6425 /* Find the locations of variables in the whole function. */
6427 static bool
6428 vt_find_locations (void)
6430 fibheap_t worklist, pending, fibheap_swap;
6431 sbitmap visited, in_worklist, in_pending, sbitmap_swap;
6432 basic_block bb;
6433 edge e;
6434 int *bb_order;
6435 int *rc_order;
6436 int i;
6437 int htabsz = 0;
6438 int htabmax = PARAM_VALUE (PARAM_MAX_VARTRACK_SIZE);
6439 bool success = true;
6441 timevar_push (TV_VAR_TRACKING_DATAFLOW);
6442 /* Compute reverse completion order of depth first search of the CFG
6443 so that the data-flow runs faster. */
6444 rc_order = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
6445 bb_order = XNEWVEC (int, last_basic_block);
6446 pre_and_rev_post_order_compute (NULL, rc_order, false);
6447 for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
6448 bb_order[rc_order[i]] = i;
6449 free (rc_order);
6451 worklist = fibheap_new ();
6452 pending = fibheap_new ();
6453 visited = sbitmap_alloc (last_basic_block);
6454 in_worklist = sbitmap_alloc (last_basic_block);
6455 in_pending = sbitmap_alloc (last_basic_block);
6456 sbitmap_zero (in_worklist);
6458 FOR_EACH_BB (bb)
6459 fibheap_insert (pending, bb_order[bb->index], bb);
6460 sbitmap_ones (in_pending);
6462 while (success && !fibheap_empty (pending))
6464 fibheap_swap = pending;
6465 pending = worklist;
6466 worklist = fibheap_swap;
6467 sbitmap_swap = in_pending;
6468 in_pending = in_worklist;
6469 in_worklist = sbitmap_swap;
6471 sbitmap_zero (visited);
6473 while (!fibheap_empty (worklist))
6475 bb = (basic_block) fibheap_extract_min (worklist);
6476 RESET_BIT (in_worklist, bb->index);
6477 gcc_assert (!TEST_BIT (visited, bb->index));
6478 if (!TEST_BIT (visited, bb->index))
6480 bool changed;
6481 edge_iterator ei;
6482 int oldinsz, oldoutsz;
6484 SET_BIT (visited, bb->index);
6486 if (VTI (bb)->in.vars)
6488 htabsz
6489 -= (htab_size (shared_hash_htab (VTI (bb)->in.vars))
6490 + htab_size (shared_hash_htab (VTI (bb)->out.vars)));
6491 oldinsz
6492 = htab_elements (shared_hash_htab (VTI (bb)->in.vars));
6493 oldoutsz
6494 = htab_elements (shared_hash_htab (VTI (bb)->out.vars));
6496 else
6497 oldinsz = oldoutsz = 0;
6499 if (MAY_HAVE_DEBUG_INSNS)
6501 dataflow_set *in = &VTI (bb)->in, *first_out = NULL;
6502 bool first = true, adjust = false;
6504 /* Calculate the IN set as the intersection of
6505 predecessor OUT sets. */
6507 dataflow_set_clear (in);
6508 dst_can_be_shared = true;
6510 FOR_EACH_EDGE (e, ei, bb->preds)
6511 if (!VTI (e->src)->flooded)
6512 gcc_assert (bb_order[bb->index]
6513 <= bb_order[e->src->index]);
6514 else if (first)
6516 dataflow_set_copy (in, &VTI (e->src)->out);
6517 first_out = &VTI (e->src)->out;
6518 first = false;
6520 else
6522 dataflow_set_merge (in, &VTI (e->src)->out);
6523 adjust = true;
6526 if (adjust)
6528 dataflow_post_merge_adjust (in, &VTI (bb)->permp);
6529 #if ENABLE_CHECKING
6530 /* Merge and merge_adjust should keep entries in
6531 canonical order. */
6532 htab_traverse (shared_hash_htab (in->vars),
6533 canonicalize_loc_order_check,
6534 in);
6535 #endif
6536 if (dst_can_be_shared)
6538 shared_hash_destroy (in->vars);
6539 in->vars = shared_hash_copy (first_out->vars);
6543 VTI (bb)->flooded = true;
6545 else
6547 /* Calculate the IN set as union of predecessor OUT sets. */
6548 dataflow_set_clear (&VTI (bb)->in);
6549 FOR_EACH_EDGE (e, ei, bb->preds)
6550 dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out);
6553 changed = compute_bb_dataflow (bb);
6554 htabsz += (htab_size (shared_hash_htab (VTI (bb)->in.vars))
6555 + htab_size (shared_hash_htab (VTI (bb)->out.vars)));
6557 if (htabmax && htabsz > htabmax)
6559 if (MAY_HAVE_DEBUG_INSNS)
6560 inform (DECL_SOURCE_LOCATION (cfun->decl),
6561 "variable tracking size limit exceeded with "
6562 "-fvar-tracking-assignments, retrying without");
6563 else
6564 inform (DECL_SOURCE_LOCATION (cfun->decl),
6565 "variable tracking size limit exceeded");
6566 success = false;
6567 break;
6570 if (changed)
6572 FOR_EACH_EDGE (e, ei, bb->succs)
6574 if (e->dest == EXIT_BLOCK_PTR)
6575 continue;
6577 if (TEST_BIT (visited, e->dest->index))
6579 if (!TEST_BIT (in_pending, e->dest->index))
6581 /* Send E->DEST to next round. */
6582 SET_BIT (in_pending, e->dest->index);
6583 fibheap_insert (pending,
6584 bb_order[e->dest->index],
6585 e->dest);
6588 else if (!TEST_BIT (in_worklist, e->dest->index))
6590 /* Add E->DEST to current round. */
6591 SET_BIT (in_worklist, e->dest->index);
6592 fibheap_insert (worklist, bb_order[e->dest->index],
6593 e->dest);
6598 if (dump_file)
6599 fprintf (dump_file,
6600 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
6601 bb->index,
6602 (int)htab_elements (shared_hash_htab (VTI (bb)->in.vars)),
6603 oldinsz,
6604 (int)htab_elements (shared_hash_htab (VTI (bb)->out.vars)),
6605 oldoutsz,
6606 (int)worklist->nodes, (int)pending->nodes, htabsz);
6608 if (dump_file && (dump_flags & TDF_DETAILS))
6610 fprintf (dump_file, "BB %i IN:\n", bb->index);
6611 dump_dataflow_set (&VTI (bb)->in);
6612 fprintf (dump_file, "BB %i OUT:\n", bb->index);
6613 dump_dataflow_set (&VTI (bb)->out);
6619 if (success && MAY_HAVE_DEBUG_INSNS)
6620 FOR_EACH_BB (bb)
6621 gcc_assert (VTI (bb)->flooded);
6623 free (bb_order);
6624 fibheap_delete (worklist);
6625 fibheap_delete (pending);
6626 sbitmap_free (visited);
6627 sbitmap_free (in_worklist);
6628 sbitmap_free (in_pending);
6630 timevar_pop (TV_VAR_TRACKING_DATAFLOW);
6631 return success;
6634 /* Print the content of the LIST to dump file. */
6636 static void
6637 dump_attrs_list (attrs list)
6639 for (; list; list = list->next)
6641 if (dv_is_decl_p (list->dv))
6642 print_mem_expr (dump_file, dv_as_decl (list->dv));
6643 else
6644 print_rtl_single (dump_file, dv_as_value (list->dv));
6645 fprintf (dump_file, "+" HOST_WIDE_INT_PRINT_DEC, list->offset);
6647 fprintf (dump_file, "\n");
6650 /* Print the information about variable *SLOT to dump file. */
6652 static int
6653 dump_var_slot (void **slot, void *data ATTRIBUTE_UNUSED)
6655 variable var = (variable) *slot;
6657 dump_var (var);
6659 /* Continue traversing the hash table. */
6660 return 1;
6663 /* Print the information about variable VAR to dump file. */
6665 static void
6666 dump_var (variable var)
6668 int i;
6669 location_chain node;
6671 if (dv_is_decl_p (var->dv))
6673 const_tree decl = dv_as_decl (var->dv);
6675 if (DECL_NAME (decl))
6677 fprintf (dump_file, " name: %s",
6678 IDENTIFIER_POINTER (DECL_NAME (decl)));
6679 if (dump_flags & TDF_UID)
6680 fprintf (dump_file, "D.%u", DECL_UID (decl));
6682 else if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
6683 fprintf (dump_file, " name: D#%u", DEBUG_TEMP_UID (decl));
6684 else
6685 fprintf (dump_file, " name: D.%u", DECL_UID (decl));
6686 fprintf (dump_file, "\n");
6688 else
6690 fputc (' ', dump_file);
6691 print_rtl_single (dump_file, dv_as_value (var->dv));
6694 for (i = 0; i < var->n_var_parts; i++)
6696 fprintf (dump_file, " offset %ld\n",
6697 (long) var->var_part[i].offset);
6698 for (node = var->var_part[i].loc_chain; node; node = node->next)
6700 fprintf (dump_file, " ");
6701 if (node->init == VAR_INIT_STATUS_UNINITIALIZED)
6702 fprintf (dump_file, "[uninit]");
6703 print_rtl_single (dump_file, node->loc);
6708 /* Print the information about variables from hash table VARS to dump file. */
6710 static void
6711 dump_vars (htab_t vars)
6713 if (htab_elements (vars) > 0)
6715 fprintf (dump_file, "Variables:\n");
6716 htab_traverse (vars, dump_var_slot, NULL);
6720 /* Print the dataflow set SET to dump file. */
6722 static void
6723 dump_dataflow_set (dataflow_set *set)
6725 int i;
6727 fprintf (dump_file, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC "\n",
6728 set->stack_adjust);
6729 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
6731 if (set->regs[i])
6733 fprintf (dump_file, "Reg %d:", i);
6734 dump_attrs_list (set->regs[i]);
6737 dump_vars (shared_hash_htab (set->vars));
6738 fprintf (dump_file, "\n");
6741 /* Print the IN and OUT sets for each basic block to dump file. */
6743 static void
6744 dump_dataflow_sets (void)
6746 basic_block bb;
6748 FOR_EACH_BB (bb)
6750 fprintf (dump_file, "\nBasic block %d:\n", bb->index);
6751 fprintf (dump_file, "IN:\n");
6752 dump_dataflow_set (&VTI (bb)->in);
6753 fprintf (dump_file, "OUT:\n");
6754 dump_dataflow_set (&VTI (bb)->out);
6758 /* Add variable VAR to the hash table of changed variables and
6759 if it has no locations delete it from SET's hash table. */
6761 static void
6762 variable_was_changed (variable var, dataflow_set *set)
6764 hashval_t hash = dv_htab_hash (var->dv);
6766 if (emit_notes)
6768 void **slot;
6769 bool old_cur_loc_changed = false;
6771 /* Remember this decl or VALUE has been added to changed_variables. */
6772 set_dv_changed (var->dv, true);
6774 slot = htab_find_slot_with_hash (changed_variables,
6775 var->dv,
6776 hash, INSERT);
6778 if (*slot)
6780 variable old_var = (variable) *slot;
6781 gcc_assert (old_var->in_changed_variables);
6782 old_var->in_changed_variables = false;
6783 old_cur_loc_changed = old_var->cur_loc_changed;
6784 variable_htab_free (*slot);
6786 if (set && var->n_var_parts == 0)
6788 variable empty_var;
6790 empty_var = (variable) pool_alloc (dv_pool (var->dv));
6791 empty_var->dv = var->dv;
6792 empty_var->refcount = 1;
6793 empty_var->n_var_parts = 0;
6794 empty_var->cur_loc_changed = true;
6795 empty_var->in_changed_variables = true;
6796 *slot = empty_var;
6797 goto drop_var;
6799 else
6801 var->refcount++;
6802 var->in_changed_variables = true;
6803 /* If within processing one uop a variable is deleted
6804 and then readded, we need to assume it has changed. */
6805 if (old_cur_loc_changed)
6806 var->cur_loc_changed = true;
6807 *slot = var;
6810 else
6812 gcc_assert (set);
6813 if (var->n_var_parts == 0)
6815 void **slot;
6817 drop_var:
6818 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
6819 if (slot)
6821 if (shared_hash_shared (set->vars))
6822 slot = shared_hash_find_slot_unshare (&set->vars, var->dv,
6823 NO_INSERT);
6824 htab_clear_slot (shared_hash_htab (set->vars), slot);
6830 /* Look for the index in VAR->var_part corresponding to OFFSET.
6831 Return -1 if not found. If INSERTION_POINT is non-NULL, the
6832 referenced int will be set to the index that the part has or should
6833 have, if it should be inserted. */
6835 static inline int
6836 find_variable_location_part (variable var, HOST_WIDE_INT offset,
6837 int *insertion_point)
6839 int pos, low, high;
6841 /* Find the location part. */
6842 low = 0;
6843 high = var->n_var_parts;
6844 while (low != high)
6846 pos = (low + high) / 2;
6847 if (var->var_part[pos].offset < offset)
6848 low = pos + 1;
6849 else
6850 high = pos;
6852 pos = low;
6854 if (insertion_point)
6855 *insertion_point = pos;
6857 if (pos < var->n_var_parts && var->var_part[pos].offset == offset)
6858 return pos;
6860 return -1;
6863 static void **
6864 set_slot_part (dataflow_set *set, rtx loc, void **slot,
6865 decl_or_value dv, HOST_WIDE_INT offset,
6866 enum var_init_status initialized, rtx set_src)
6868 int pos;
6869 location_chain node, next;
6870 location_chain *nextp;
6871 variable var;
6872 bool onepart = dv_onepart_p (dv);
6874 gcc_assert (offset == 0 || !onepart);
6875 gcc_assert (loc != dv_as_opaque (dv));
6877 var = (variable) *slot;
6879 if (! flag_var_tracking_uninit)
6880 initialized = VAR_INIT_STATUS_INITIALIZED;
6882 if (!var)
6884 /* Create new variable information. */
6885 var = (variable) pool_alloc (dv_pool (dv));
6886 var->dv = dv;
6887 var->refcount = 1;
6888 var->n_var_parts = 1;
6889 var->cur_loc_changed = false;
6890 var->in_changed_variables = false;
6891 var->var_part[0].offset = offset;
6892 var->var_part[0].loc_chain = NULL;
6893 var->var_part[0].cur_loc = NULL;
6894 *slot = var;
6895 pos = 0;
6896 nextp = &var->var_part[0].loc_chain;
6898 else if (onepart)
6900 int r = -1, c = 0;
6902 gcc_assert (dv_as_opaque (var->dv) == dv_as_opaque (dv));
6904 pos = 0;
6906 if (GET_CODE (loc) == VALUE)
6908 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
6909 nextp = &node->next)
6910 if (GET_CODE (node->loc) == VALUE)
6912 if (node->loc == loc)
6914 r = 0;
6915 break;
6917 if (canon_value_cmp (node->loc, loc))
6918 c++;
6919 else
6921 r = 1;
6922 break;
6925 else if (REG_P (node->loc) || MEM_P (node->loc))
6926 c++;
6927 else
6929 r = 1;
6930 break;
6933 else if (REG_P (loc))
6935 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
6936 nextp = &node->next)
6937 if (REG_P (node->loc))
6939 if (REGNO (node->loc) < REGNO (loc))
6940 c++;
6941 else
6943 if (REGNO (node->loc) == REGNO (loc))
6944 r = 0;
6945 else
6946 r = 1;
6947 break;
6950 else
6952 r = 1;
6953 break;
6956 else if (MEM_P (loc))
6958 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
6959 nextp = &node->next)
6960 if (REG_P (node->loc))
6961 c++;
6962 else if (MEM_P (node->loc))
6964 if ((r = loc_cmp (XEXP (node->loc, 0), XEXP (loc, 0))) >= 0)
6965 break;
6966 else
6967 c++;
6969 else
6971 r = 1;
6972 break;
6975 else
6976 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
6977 nextp = &node->next)
6978 if ((r = loc_cmp (node->loc, loc)) >= 0)
6979 break;
6980 else
6981 c++;
6983 if (r == 0)
6984 return slot;
6986 if (shared_var_p (var, set->vars))
6988 slot = unshare_variable (set, slot, var, initialized);
6989 var = (variable)*slot;
6990 for (nextp = &var->var_part[0].loc_chain; c;
6991 nextp = &(*nextp)->next)
6992 c--;
6993 gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc);
6996 else
6998 int inspos = 0;
7000 gcc_assert (dv_as_decl (var->dv) == dv_as_decl (dv));
7002 pos = find_variable_location_part (var, offset, &inspos);
7004 if (pos >= 0)
7006 node = var->var_part[pos].loc_chain;
7008 if (node
7009 && ((REG_P (node->loc) && REG_P (loc)
7010 && REGNO (node->loc) == REGNO (loc))
7011 || rtx_equal_p (node->loc, loc)))
7013 /* LOC is in the beginning of the chain so we have nothing
7014 to do. */
7015 if (node->init < initialized)
7016 node->init = initialized;
7017 if (set_src != NULL)
7018 node->set_src = set_src;
7020 return slot;
7022 else
7024 /* We have to make a copy of a shared variable. */
7025 if (shared_var_p (var, set->vars))
7027 slot = unshare_variable (set, slot, var, initialized);
7028 var = (variable)*slot;
7032 else
7034 /* We have not found the location part, new one will be created. */
7036 /* We have to make a copy of the shared variable. */
7037 if (shared_var_p (var, set->vars))
7039 slot = unshare_variable (set, slot, var, initialized);
7040 var = (variable)*slot;
7043 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
7044 thus there are at most MAX_VAR_PARTS different offsets. */
7045 gcc_assert (var->n_var_parts < MAX_VAR_PARTS
7046 && (!var->n_var_parts || !dv_onepart_p (var->dv)));
7048 /* We have to move the elements of array starting at index
7049 inspos to the next position. */
7050 for (pos = var->n_var_parts; pos > inspos; pos--)
7051 var->var_part[pos] = var->var_part[pos - 1];
7053 var->n_var_parts++;
7054 var->var_part[pos].offset = offset;
7055 var->var_part[pos].loc_chain = NULL;
7056 var->var_part[pos].cur_loc = NULL;
7059 /* Delete the location from the list. */
7060 nextp = &var->var_part[pos].loc_chain;
7061 for (node = var->var_part[pos].loc_chain; node; node = next)
7063 next = node->next;
7064 if ((REG_P (node->loc) && REG_P (loc)
7065 && REGNO (node->loc) == REGNO (loc))
7066 || rtx_equal_p (node->loc, loc))
7068 /* Save these values, to assign to the new node, before
7069 deleting this one. */
7070 if (node->init > initialized)
7071 initialized = node->init;
7072 if (node->set_src != NULL && set_src == NULL)
7073 set_src = node->set_src;
7074 if (var->var_part[pos].cur_loc == node->loc)
7076 var->var_part[pos].cur_loc = NULL;
7077 var->cur_loc_changed = true;
7079 pool_free (loc_chain_pool, node);
7080 *nextp = next;
7081 break;
7083 else
7084 nextp = &node->next;
7087 nextp = &var->var_part[pos].loc_chain;
7090 /* Add the location to the beginning. */
7091 node = (location_chain) pool_alloc (loc_chain_pool);
7092 node->loc = loc;
7093 node->init = initialized;
7094 node->set_src = set_src;
7095 node->next = *nextp;
7096 *nextp = node;
7098 if (onepart && emit_notes)
7099 add_value_chains (var->dv, loc);
7101 /* If no location was emitted do so. */
7102 if (var->var_part[pos].cur_loc == NULL)
7103 variable_was_changed (var, set);
7105 return slot;
7108 /* Set the part of variable's location in the dataflow set SET. The
7109 variable part is specified by variable's declaration in DV and
7110 offset OFFSET and the part's location by LOC. IOPT should be
7111 NO_INSERT if the variable is known to be in SET already and the
7112 variable hash table must not be resized, and INSERT otherwise. */
7114 static void
7115 set_variable_part (dataflow_set *set, rtx loc,
7116 decl_or_value dv, HOST_WIDE_INT offset,
7117 enum var_init_status initialized, rtx set_src,
7118 enum insert_option iopt)
7120 void **slot;
7122 if (iopt == NO_INSERT)
7123 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7124 else
7126 slot = shared_hash_find_slot (set->vars, dv);
7127 if (!slot)
7128 slot = shared_hash_find_slot_unshare (&set->vars, dv, iopt);
7130 set_slot_part (set, loc, slot, dv, offset, initialized, set_src);
7133 /* Remove all recorded register locations for the given variable part
7134 from dataflow set SET, except for those that are identical to loc.
7135 The variable part is specified by variable's declaration or value
7136 DV and offset OFFSET. */
7138 static void **
7139 clobber_slot_part (dataflow_set *set, rtx loc, void **slot,
7140 HOST_WIDE_INT offset, rtx set_src)
7142 variable var = (variable) *slot;
7143 int pos = find_variable_location_part (var, offset, NULL);
7145 if (pos >= 0)
7147 location_chain node, next;
7149 /* Remove the register locations from the dataflow set. */
7150 next = var->var_part[pos].loc_chain;
7151 for (node = next; node; node = next)
7153 next = node->next;
7154 if (node->loc != loc
7155 && (!flag_var_tracking_uninit
7156 || !set_src
7157 || MEM_P (set_src)
7158 || !rtx_equal_p (set_src, node->set_src)))
7160 if (REG_P (node->loc))
7162 attrs anode, anext;
7163 attrs *anextp;
7165 /* Remove the variable part from the register's
7166 list, but preserve any other variable parts
7167 that might be regarded as live in that same
7168 register. */
7169 anextp = &set->regs[REGNO (node->loc)];
7170 for (anode = *anextp; anode; anode = anext)
7172 anext = anode->next;
7173 if (dv_as_opaque (anode->dv) == dv_as_opaque (var->dv)
7174 && anode->offset == offset)
7176 pool_free (attrs_pool, anode);
7177 *anextp = anext;
7179 else
7180 anextp = &anode->next;
7184 slot = delete_slot_part (set, node->loc, slot, offset);
7189 return slot;
7192 /* Remove all recorded register locations for the given variable part
7193 from dataflow set SET, except for those that are identical to loc.
7194 The variable part is specified by variable's declaration or value
7195 DV and offset OFFSET. */
7197 static void
7198 clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
7199 HOST_WIDE_INT offset, rtx set_src)
7201 void **slot;
7203 if (!dv_as_opaque (dv)
7204 || (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv))))
7205 return;
7207 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7208 if (!slot)
7209 return;
7211 clobber_slot_part (set, loc, slot, offset, set_src);
7214 /* Delete the part of variable's location from dataflow set SET. The
7215 variable part is specified by its SET->vars slot SLOT and offset
7216 OFFSET and the part's location by LOC. */
7218 static void **
7219 delete_slot_part (dataflow_set *set, rtx loc, void **slot,
7220 HOST_WIDE_INT offset)
7222 variable var = (variable) *slot;
7223 int pos = find_variable_location_part (var, offset, NULL);
7225 if (pos >= 0)
7227 location_chain node, next;
7228 location_chain *nextp;
7229 bool changed;
7231 if (shared_var_p (var, set->vars))
7233 /* If the variable contains the location part we have to
7234 make a copy of the variable. */
7235 for (node = var->var_part[pos].loc_chain; node;
7236 node = node->next)
7238 if ((REG_P (node->loc) && REG_P (loc)
7239 && REGNO (node->loc) == REGNO (loc))
7240 || rtx_equal_p (node->loc, loc))
7242 slot = unshare_variable (set, slot, var,
7243 VAR_INIT_STATUS_UNKNOWN);
7244 var = (variable)*slot;
7245 break;
7250 /* Delete the location part. */
7251 changed = false;
7252 nextp = &var->var_part[pos].loc_chain;
7253 for (node = *nextp; node; node = next)
7255 next = node->next;
7256 if ((REG_P (node->loc) && REG_P (loc)
7257 && REGNO (node->loc) == REGNO (loc))
7258 || rtx_equal_p (node->loc, loc))
7260 if (emit_notes && pos == 0 && dv_onepart_p (var->dv))
7261 remove_value_chains (var->dv, node->loc);
7262 /* If we have deleted the location which was last emitted
7263 we have to emit new location so add the variable to set
7264 of changed variables. */
7265 if (var->var_part[pos].cur_loc == node->loc)
7267 changed = true;
7268 var->var_part[pos].cur_loc = NULL;
7269 var->cur_loc_changed = true;
7271 pool_free (loc_chain_pool, node);
7272 *nextp = next;
7273 break;
7275 else
7276 nextp = &node->next;
7279 if (var->var_part[pos].loc_chain == NULL)
7281 changed = true;
7282 var->n_var_parts--;
7283 if (emit_notes)
7284 var->cur_loc_changed = true;
7285 while (pos < var->n_var_parts)
7287 var->var_part[pos] = var->var_part[pos + 1];
7288 pos++;
7291 if (changed)
7292 variable_was_changed (var, set);
7295 return slot;
7298 /* Delete the part of variable's location from dataflow set SET. The
7299 variable part is specified by variable's declaration or value DV
7300 and offset OFFSET and the part's location by LOC. */
7302 static void
7303 delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
7304 HOST_WIDE_INT offset)
7306 void **slot = shared_hash_find_slot_noinsert (set->vars, dv);
7307 if (!slot)
7308 return;
7310 delete_slot_part (set, loc, slot, offset);
7313 /* Structure for passing some other parameters to function
7314 vt_expand_loc_callback. */
7315 struct expand_loc_callback_data
7317 /* The variables and values active at this point. */
7318 htab_t vars;
7320 /* True in vt_expand_loc_dummy calls, no rtl should be allocated.
7321 Non-NULL should be returned if vt_expand_loc would return
7322 non-NULL in that case, NULL otherwise. cur_loc_changed should be
7323 computed and cur_loc recomputed when possible (but just once
7324 per emit_notes_for_changes call). */
7325 bool dummy;
7327 /* True if expansion of subexpressions had to recompute some
7328 VALUE/DEBUG_EXPR_DECL's cur_loc or used a VALUE/DEBUG_EXPR_DECL
7329 whose cur_loc has been already recomputed during current
7330 emit_notes_for_changes call. */
7331 bool cur_loc_changed;
7333 /* True if cur_loc should be ignored and any possible location
7334 returned. */
7335 bool ignore_cur_loc;
7338 /* Callback for cselib_expand_value, that looks for expressions
7339 holding the value in the var-tracking hash tables. Return X for
7340 standard processing, anything else is to be used as-is. */
7342 static rtx
7343 vt_expand_loc_callback (rtx x, bitmap regs, int max_depth, void *data)
7345 struct expand_loc_callback_data *elcd
7346 = (struct expand_loc_callback_data *) data;
7347 bool dummy = elcd->dummy;
7348 bool cur_loc_changed = elcd->cur_loc_changed;
7349 rtx cur_loc;
7350 decl_or_value dv;
7351 variable var;
7352 location_chain loc;
7353 rtx result, subreg, xret;
7355 switch (GET_CODE (x))
7357 case SUBREG:
7358 if (dummy)
7360 if (cselib_dummy_expand_value_rtx_cb (SUBREG_REG (x), regs,
7361 max_depth - 1,
7362 vt_expand_loc_callback, data))
7363 return pc_rtx;
7364 else
7365 return NULL;
7368 subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs,
7369 max_depth - 1,
7370 vt_expand_loc_callback, data);
7372 if (!subreg)
7373 return NULL;
7375 result = simplify_gen_subreg (GET_MODE (x), subreg,
7376 GET_MODE (SUBREG_REG (x)),
7377 SUBREG_BYTE (x));
7379 /* Invalid SUBREGs are ok in debug info. ??? We could try
7380 alternate expansions for the VALUE as well. */
7381 if (!result)
7382 result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x));
7384 return result;
7386 case DEBUG_EXPR:
7387 dv = dv_from_decl (DEBUG_EXPR_TREE_DECL (x));
7388 xret = NULL;
7389 break;
7391 case VALUE:
7392 dv = dv_from_value (x);
7393 xret = x;
7394 break;
7396 default:
7397 return x;
7400 if (VALUE_RECURSED_INTO (x))
7401 return NULL;
7403 var = (variable) htab_find_with_hash (elcd->vars, dv, dv_htab_hash (dv));
7405 if (!var)
7407 if (dummy && dv_changed_p (dv))
7408 elcd->cur_loc_changed = true;
7409 return xret;
7412 if (var->n_var_parts == 0)
7414 if (dummy)
7415 elcd->cur_loc_changed = true;
7416 return xret;
7419 gcc_assert (var->n_var_parts == 1);
7421 VALUE_RECURSED_INTO (x) = true;
7422 result = NULL;
7424 if (var->var_part[0].cur_loc && !elcd->ignore_cur_loc)
7426 if (dummy)
7428 if (cselib_dummy_expand_value_rtx_cb (var->var_part[0].cur_loc, regs,
7429 max_depth,
7430 vt_expand_loc_callback, data))
7431 result = pc_rtx;
7433 else
7434 result = cselib_expand_value_rtx_cb (var->var_part[0].cur_loc, regs,
7435 max_depth,
7436 vt_expand_loc_callback, data);
7437 if (result)
7438 set_dv_changed (dv, false);
7439 cur_loc = var->var_part[0].cur_loc;
7441 else
7442 cur_loc = NULL_RTX;
7443 if (!result && (dv_changed_p (dv) || elcd->ignore_cur_loc))
7445 if (!elcd->ignore_cur_loc)
7446 set_dv_changed (dv, false);
7447 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
7448 if (loc->loc == cur_loc)
7449 continue;
7450 else if (dummy)
7452 elcd->cur_loc_changed = cur_loc_changed;
7453 if (cselib_dummy_expand_value_rtx_cb (loc->loc, regs, max_depth,
7454 vt_expand_loc_callback,
7455 data))
7457 result = pc_rtx;
7458 break;
7461 else
7463 result = cselib_expand_value_rtx_cb (loc->loc, regs, max_depth,
7464 vt_expand_loc_callback, data);
7465 if (result)
7466 break;
7468 if (dummy && (result || var->var_part[0].cur_loc))
7469 var->cur_loc_changed = true;
7470 if (!elcd->ignore_cur_loc)
7471 var->var_part[0].cur_loc = loc ? loc->loc : NULL_RTX;
7473 if (dummy)
7475 if (var->cur_loc_changed)
7476 elcd->cur_loc_changed = true;
7477 else if (!result && var->var_part[0].cur_loc == NULL_RTX)
7478 elcd->cur_loc_changed = cur_loc_changed;
7481 VALUE_RECURSED_INTO (x) = false;
7482 if (result)
7483 return result;
7484 else
7485 return xret;
7488 /* Expand VALUEs in LOC, using VARS as well as cselib's equivalence
7489 tables. */
7491 static rtx
7492 vt_expand_loc (rtx loc, htab_t vars, bool ignore_cur_loc)
7494 struct expand_loc_callback_data data;
7496 if (!MAY_HAVE_DEBUG_INSNS)
7497 return loc;
7499 data.vars = vars;
7500 data.dummy = false;
7501 data.cur_loc_changed = false;
7502 data.ignore_cur_loc = ignore_cur_loc;
7503 loc = cselib_expand_value_rtx_cb (loc, scratch_regs, EXPR_DEPTH,
7504 vt_expand_loc_callback, &data);
7506 if (loc && MEM_P (loc))
7507 loc = targetm.delegitimize_address (loc);
7508 return loc;
7511 /* Like vt_expand_loc, but only return true/false (whether vt_expand_loc
7512 would succeed or not, without actually allocating new rtxes. */
7514 static bool
7515 vt_expand_loc_dummy (rtx loc, htab_t vars, bool *pcur_loc_changed)
7517 struct expand_loc_callback_data data;
7518 bool ret;
7520 gcc_assert (MAY_HAVE_DEBUG_INSNS);
7521 data.vars = vars;
7522 data.dummy = true;
7523 data.cur_loc_changed = false;
7524 data.ignore_cur_loc = false;
7525 ret = cselib_dummy_expand_value_rtx_cb (loc, scratch_regs, EXPR_DEPTH,
7526 vt_expand_loc_callback, &data);
7527 *pcur_loc_changed = data.cur_loc_changed;
7528 return ret;
7531 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
7532 additional parameters: WHERE specifies whether the note shall be emitted
7533 before or after instruction INSN. */
7535 static int
7536 emit_note_insn_var_location (void **varp, void *data)
7538 variable var = (variable) *varp;
7539 rtx insn = ((emit_note_data *)data)->insn;
7540 enum emit_note_where where = ((emit_note_data *)data)->where;
7541 htab_t vars = ((emit_note_data *)data)->vars;
7542 rtx note, note_vl;
7543 int i, j, n_var_parts;
7544 bool complete;
7545 enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED;
7546 HOST_WIDE_INT last_limit;
7547 tree type_size_unit;
7548 HOST_WIDE_INT offsets[MAX_VAR_PARTS];
7549 rtx loc[MAX_VAR_PARTS];
7550 tree decl;
7551 location_chain lc;
7553 if (dv_is_value_p (var->dv))
7554 goto value_or_debug_decl;
7556 decl = dv_as_decl (var->dv);
7558 if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
7559 goto value_or_debug_decl;
7561 complete = true;
7562 last_limit = 0;
7563 n_var_parts = 0;
7564 if (!MAY_HAVE_DEBUG_INSNS)
7566 for (i = 0; i < var->n_var_parts; i++)
7567 if (var->var_part[i].cur_loc == NULL && var->var_part[i].loc_chain)
7569 var->var_part[i].cur_loc = var->var_part[i].loc_chain->loc;
7570 var->cur_loc_changed = true;
7572 if (var->n_var_parts == 0)
7573 var->cur_loc_changed = true;
7575 if (!var->cur_loc_changed)
7576 goto clear;
7577 for (i = 0; i < var->n_var_parts; i++)
7579 enum machine_mode mode, wider_mode;
7580 rtx loc2;
7582 if (last_limit < var->var_part[i].offset)
7584 complete = false;
7585 break;
7587 else if (last_limit > var->var_part[i].offset)
7588 continue;
7589 offsets[n_var_parts] = var->var_part[i].offset;
7590 if (!var->var_part[i].cur_loc)
7592 complete = false;
7593 continue;
7595 loc2 = vt_expand_loc (var->var_part[i].cur_loc, vars, false);
7596 if (!loc2)
7598 complete = false;
7599 continue;
7601 loc[n_var_parts] = loc2;
7602 mode = GET_MODE (var->var_part[i].cur_loc);
7603 if (mode == VOIDmode && dv_onepart_p (var->dv))
7604 mode = DECL_MODE (decl);
7605 for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
7606 if (var->var_part[i].cur_loc == lc->loc)
7608 initialized = lc->init;
7609 break;
7611 gcc_assert (lc);
7612 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
7614 /* Attempt to merge adjacent registers or memory. */
7615 wider_mode = GET_MODE_WIDER_MODE (mode);
7616 for (j = i + 1; j < var->n_var_parts; j++)
7617 if (last_limit <= var->var_part[j].offset)
7618 break;
7619 if (j < var->n_var_parts
7620 && wider_mode != VOIDmode
7621 && var->var_part[j].cur_loc
7622 && mode == GET_MODE (var->var_part[j].cur_loc)
7623 && (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts]))
7624 && last_limit == var->var_part[j].offset
7625 && (loc2 = vt_expand_loc (var->var_part[j].cur_loc, vars, false))
7626 && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2))
7628 rtx new_loc = NULL;
7630 if (REG_P (loc[n_var_parts])
7631 && hard_regno_nregs[REGNO (loc[n_var_parts])][mode] * 2
7632 == hard_regno_nregs[REGNO (loc[n_var_parts])][wider_mode]
7633 && end_hard_regno (mode, REGNO (loc[n_var_parts]))
7634 == REGNO (loc2))
7636 if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN)
7637 new_loc = simplify_subreg (wider_mode, loc[n_var_parts],
7638 mode, 0);
7639 else if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
7640 new_loc = simplify_subreg (wider_mode, loc2, mode, 0);
7641 if (new_loc)
7643 if (!REG_P (new_loc)
7644 || REGNO (new_loc) != REGNO (loc[n_var_parts]))
7645 new_loc = NULL;
7646 else
7647 REG_ATTRS (new_loc) = REG_ATTRS (loc[n_var_parts]);
7650 else if (MEM_P (loc[n_var_parts])
7651 && GET_CODE (XEXP (loc2, 0)) == PLUS
7652 && REG_P (XEXP (XEXP (loc2, 0), 0))
7653 && CONST_INT_P (XEXP (XEXP (loc2, 0), 1)))
7655 if ((REG_P (XEXP (loc[n_var_parts], 0))
7656 && rtx_equal_p (XEXP (loc[n_var_parts], 0),
7657 XEXP (XEXP (loc2, 0), 0))
7658 && INTVAL (XEXP (XEXP (loc2, 0), 1))
7659 == GET_MODE_SIZE (mode))
7660 || (GET_CODE (XEXP (loc[n_var_parts], 0)) == PLUS
7661 && CONST_INT_P (XEXP (XEXP (loc[n_var_parts], 0), 1))
7662 && rtx_equal_p (XEXP (XEXP (loc[n_var_parts], 0), 0),
7663 XEXP (XEXP (loc2, 0), 0))
7664 && INTVAL (XEXP (XEXP (loc[n_var_parts], 0), 1))
7665 + GET_MODE_SIZE (mode)
7666 == INTVAL (XEXP (XEXP (loc2, 0), 1))))
7667 new_loc = adjust_address_nv (loc[n_var_parts],
7668 wider_mode, 0);
7671 if (new_loc)
7673 loc[n_var_parts] = new_loc;
7674 mode = wider_mode;
7675 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
7676 i = j;
7679 ++n_var_parts;
7681 type_size_unit = TYPE_SIZE_UNIT (TREE_TYPE (decl));
7682 if ((unsigned HOST_WIDE_INT) last_limit < TREE_INT_CST_LOW (type_size_unit))
7683 complete = false;
7685 if (! flag_var_tracking_uninit)
7686 initialized = VAR_INIT_STATUS_INITIALIZED;
7688 note_vl = NULL_RTX;
7689 if (!complete)
7690 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, NULL_RTX,
7691 (int) initialized);
7692 else if (n_var_parts == 1)
7694 rtx expr_list;
7696 if (offsets[0] || GET_CODE (loc[0]) == PARALLEL)
7697 expr_list = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
7698 else
7699 expr_list = loc[0];
7701 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, expr_list,
7702 (int) initialized);
7704 else if (n_var_parts)
7706 rtx parallel;
7708 for (i = 0; i < n_var_parts; i++)
7709 loc[i]
7710 = gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i]));
7712 parallel = gen_rtx_PARALLEL (VOIDmode,
7713 gen_rtvec_v (n_var_parts, loc));
7714 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl,
7715 parallel, (int) initialized);
7718 if (where != EMIT_NOTE_BEFORE_INSN)
7720 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
7721 if (where == EMIT_NOTE_AFTER_CALL_INSN)
7722 NOTE_DURING_CALL_P (note) = true;
7724 else
7726 /* Make sure that the call related notes come first. */
7727 while (NEXT_INSN (insn)
7728 && NOTE_P (insn)
7729 && NOTE_DURING_CALL_P (insn))
7730 insn = NEXT_INSN (insn);
7731 if (NOTE_P (insn) && NOTE_DURING_CALL_P (insn))
7732 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
7733 else
7734 note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
7736 NOTE_VAR_LOCATION (note) = note_vl;
7738 clear:
7739 set_dv_changed (var->dv, false);
7740 var->cur_loc_changed = false;
7741 gcc_assert (var->in_changed_variables);
7742 var->in_changed_variables = false;
7743 htab_clear_slot (changed_variables, varp);
7745 /* Continue traversing the hash table. */
7746 return 1;
7748 value_or_debug_decl:
7749 if (dv_changed_p (var->dv) && var->n_var_parts)
7751 location_chain lc;
7752 bool cur_loc_changed;
7754 if (var->var_part[0].cur_loc
7755 && vt_expand_loc_dummy (var->var_part[0].cur_loc, vars,
7756 &cur_loc_changed))
7757 goto clear;
7758 for (lc = var->var_part[0].loc_chain; lc; lc = lc->next)
7759 if (lc->loc != var->var_part[0].cur_loc
7760 && vt_expand_loc_dummy (lc->loc, vars, &cur_loc_changed))
7761 break;
7762 var->var_part[0].cur_loc = lc ? lc->loc : NULL_RTX;
7764 goto clear;
7767 DEF_VEC_P (variable);
7768 DEF_VEC_ALLOC_P (variable, heap);
7770 /* Stack of variable_def pointers that need processing with
7771 check_changed_vars_2. */
7773 static VEC (variable, heap) *changed_variables_stack;
7775 /* VALUEs with no variables that need set_dv_changed (val, false)
7776 called before check_changed_vars_3. */
7778 static VEC (rtx, heap) *changed_values_stack;
7780 /* Helper function for check_changed_vars_1 and check_changed_vars_2. */
7782 static void
7783 check_changed_vars_0 (decl_or_value dv, htab_t htab)
7785 value_chain vc
7786 = (value_chain) htab_find_with_hash (value_chains, dv, dv_htab_hash (dv));
7788 if (vc == NULL)
7789 return;
7790 for (vc = vc->next; vc; vc = vc->next)
7791 if (!dv_changed_p (vc->dv))
7793 variable vcvar
7794 = (variable) htab_find_with_hash (htab, vc->dv,
7795 dv_htab_hash (vc->dv));
7796 if (vcvar)
7798 set_dv_changed (vc->dv, true);
7799 VEC_safe_push (variable, heap, changed_variables_stack, vcvar);
7801 else if (dv_is_value_p (vc->dv))
7803 set_dv_changed (vc->dv, true);
7804 VEC_safe_push (rtx, heap, changed_values_stack,
7805 dv_as_value (vc->dv));
7806 check_changed_vars_0 (vc->dv, htab);
7811 /* Populate changed_variables_stack with variable_def pointers
7812 that need variable_was_changed called on them. */
7814 static int
7815 check_changed_vars_1 (void **slot, void *data)
7817 variable var = (variable) *slot;
7818 htab_t htab = (htab_t) data;
7820 if (dv_is_value_p (var->dv)
7821 || TREE_CODE (dv_as_decl (var->dv)) == DEBUG_EXPR_DECL)
7822 check_changed_vars_0 (var->dv, htab);
7823 return 1;
7826 /* Add VAR to changed_variables and also for VALUEs add recursively
7827 all DVs that aren't in changed_variables yet but reference the
7828 VALUE from its loc_chain. */
7830 static void
7831 check_changed_vars_2 (variable var, htab_t htab)
7833 variable_was_changed (var, NULL);
7834 if (dv_is_value_p (var->dv)
7835 || TREE_CODE (dv_as_decl (var->dv)) == DEBUG_EXPR_DECL)
7836 check_changed_vars_0 (var->dv, htab);
7839 /* For each changed decl (except DEBUG_EXPR_DECLs) recompute
7840 cur_loc if needed (and cur_loc of all VALUEs and DEBUG_EXPR_DECLs
7841 it needs and are also in changed variables) and track whether
7842 cur_loc (or anything it uses to compute location) had to change
7843 during the current emit_notes_for_changes call. */
7845 static int
7846 check_changed_vars_3 (void **slot, void *data)
7848 variable var = (variable) *slot;
7849 htab_t vars = (htab_t) data;
7850 int i;
7851 location_chain lc;
7852 bool cur_loc_changed;
7854 if (dv_is_value_p (var->dv)
7855 || TREE_CODE (dv_as_decl (var->dv)) == DEBUG_EXPR_DECL)
7856 return 1;
7858 for (i = 0; i < var->n_var_parts; i++)
7860 if (var->var_part[i].cur_loc
7861 && vt_expand_loc_dummy (var->var_part[i].cur_loc, vars,
7862 &cur_loc_changed))
7864 if (cur_loc_changed)
7865 var->cur_loc_changed = true;
7866 continue;
7868 for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
7869 if (lc->loc != var->var_part[i].cur_loc
7870 && vt_expand_loc_dummy (lc->loc, vars, &cur_loc_changed))
7871 break;
7872 if (lc || var->var_part[i].cur_loc)
7873 var->cur_loc_changed = true;
7874 var->var_part[i].cur_loc = lc ? lc->loc : NULL_RTX;
7876 if (var->n_var_parts == 0)
7877 var->cur_loc_changed = true;
7878 return 1;
7881 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
7882 CHANGED_VARIABLES and delete this chain. WHERE specifies whether the notes
7883 shall be emitted before of after instruction INSN. */
7885 static void
7886 emit_notes_for_changes (rtx insn, enum emit_note_where where,
7887 shared_hash vars)
7889 emit_note_data data;
7890 htab_t htab = shared_hash_htab (vars);
7892 if (!htab_elements (changed_variables))
7893 return;
7895 if (MAY_HAVE_DEBUG_INSNS)
7897 /* Unfortunately this has to be done in two steps, because
7898 we can't traverse a hashtab into which we are inserting
7899 through variable_was_changed. */
7900 htab_traverse (changed_variables, check_changed_vars_1, htab);
7901 while (VEC_length (variable, changed_variables_stack) > 0)
7902 check_changed_vars_2 (VEC_pop (variable, changed_variables_stack),
7903 htab);
7904 while (VEC_length (rtx, changed_values_stack) > 0)
7905 set_dv_changed (dv_from_value (VEC_pop (rtx, changed_values_stack)),
7906 false);
7907 htab_traverse (changed_variables, check_changed_vars_3, htab);
7910 data.insn = insn;
7911 data.where = where;
7912 data.vars = htab;
7914 htab_traverse (changed_variables, emit_note_insn_var_location, &data);
7917 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
7918 same variable in hash table DATA or is not there at all. */
7920 static int
7921 emit_notes_for_differences_1 (void **slot, void *data)
7923 htab_t new_vars = (htab_t) data;
7924 variable old_var, new_var;
7926 old_var = (variable) *slot;
7927 new_var = (variable) htab_find_with_hash (new_vars, old_var->dv,
7928 dv_htab_hash (old_var->dv));
7930 if (!new_var)
7932 /* Variable has disappeared. */
7933 variable empty_var;
7935 empty_var = (variable) pool_alloc (dv_pool (old_var->dv));
7936 empty_var->dv = old_var->dv;
7937 empty_var->refcount = 0;
7938 empty_var->n_var_parts = 0;
7939 empty_var->cur_loc_changed = false;
7940 empty_var->in_changed_variables = false;
7941 if (dv_onepart_p (old_var->dv))
7943 location_chain lc;
7945 gcc_assert (old_var->n_var_parts == 1);
7946 for (lc = old_var->var_part[0].loc_chain; lc; lc = lc->next)
7947 remove_value_chains (old_var->dv, lc->loc);
7949 variable_was_changed (empty_var, NULL);
7950 /* Continue traversing the hash table. */
7951 return 1;
7953 if (variable_different_p (old_var, new_var))
7955 if (dv_onepart_p (old_var->dv))
7957 location_chain lc1, lc2;
7959 gcc_assert (old_var->n_var_parts == 1
7960 && new_var->n_var_parts == 1);
7961 lc1 = old_var->var_part[0].loc_chain;
7962 lc2 = new_var->var_part[0].loc_chain;
7963 while (lc1
7964 && lc2
7965 && ((REG_P (lc1->loc) && REG_P (lc2->loc))
7966 || rtx_equal_p (lc1->loc, lc2->loc)))
7968 lc1 = lc1->next;
7969 lc2 = lc2->next;
7971 for (; lc2; lc2 = lc2->next)
7972 add_value_chains (old_var->dv, lc2->loc);
7973 for (; lc1; lc1 = lc1->next)
7974 remove_value_chains (old_var->dv, lc1->loc);
7976 variable_was_changed (new_var, NULL);
7978 /* Update cur_loc. */
7979 if (old_var != new_var)
7981 int i;
7982 for (i = 0; i < new_var->n_var_parts; i++)
7984 new_var->var_part[i].cur_loc = NULL;
7985 if (old_var->n_var_parts != new_var->n_var_parts
7986 || old_var->var_part[i].offset != new_var->var_part[i].offset)
7987 new_var->cur_loc_changed = true;
7988 else if (old_var->var_part[i].cur_loc != NULL)
7990 location_chain lc;
7991 rtx cur_loc = old_var->var_part[i].cur_loc;
7993 for (lc = new_var->var_part[i].loc_chain; lc; lc = lc->next)
7994 if (lc->loc == cur_loc
7995 || rtx_equal_p (cur_loc, lc->loc))
7997 new_var->var_part[i].cur_loc = lc->loc;
7998 break;
8000 if (lc == NULL)
8001 new_var->cur_loc_changed = true;
8006 /* Continue traversing the hash table. */
8007 return 1;
8010 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
8011 table DATA. */
8013 static int
8014 emit_notes_for_differences_2 (void **slot, void *data)
8016 htab_t old_vars = (htab_t) data;
8017 variable old_var, new_var;
8019 new_var = (variable) *slot;
8020 old_var = (variable) htab_find_with_hash (old_vars, new_var->dv,
8021 dv_htab_hash (new_var->dv));
8022 if (!old_var)
8024 int i;
8025 /* Variable has appeared. */
8026 if (dv_onepart_p (new_var->dv))
8028 location_chain lc;
8030 gcc_assert (new_var->n_var_parts == 1);
8031 for (lc = new_var->var_part[0].loc_chain; lc; lc = lc->next)
8032 add_value_chains (new_var->dv, lc->loc);
8034 for (i = 0; i < new_var->n_var_parts; i++)
8035 new_var->var_part[i].cur_loc = NULL;
8036 variable_was_changed (new_var, NULL);
8039 /* Continue traversing the hash table. */
8040 return 1;
8043 /* Emit notes before INSN for differences between dataflow sets OLD_SET and
8044 NEW_SET. */
8046 static void
8047 emit_notes_for_differences (rtx insn, dataflow_set *old_set,
8048 dataflow_set *new_set)
8050 htab_traverse (shared_hash_htab (old_set->vars),
8051 emit_notes_for_differences_1,
8052 shared_hash_htab (new_set->vars));
8053 htab_traverse (shared_hash_htab (new_set->vars),
8054 emit_notes_for_differences_2,
8055 shared_hash_htab (old_set->vars));
8056 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars);
8059 /* Return the next insn after INSN that is not a NOTE_INSN_VAR_LOCATION. */
8061 static rtx
8062 next_non_note_insn_var_location (rtx insn)
8064 while (insn)
8066 insn = NEXT_INSN (insn);
8067 if (insn == 0
8068 || !NOTE_P (insn)
8069 || NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION)
8070 break;
8073 return insn;
8076 /* Emit the notes for changes of location parts in the basic block BB. */
8078 static void
8079 emit_notes_in_bb (basic_block bb, dataflow_set *set)
8081 unsigned int i;
8082 micro_operation *mo;
8084 dataflow_set_clear (set);
8085 dataflow_set_copy (set, &VTI (bb)->in);
8087 FOR_EACH_VEC_ELT (micro_operation, VTI (bb)->mos, i, mo)
8089 rtx insn = mo->insn;
8090 rtx next_insn = next_non_note_insn_var_location (insn);
8092 switch (mo->type)
8094 case MO_CALL:
8095 dataflow_set_clear_at_call (set);
8096 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars);
8098 rtx arguments = mo->u.loc, *p = &arguments, note;
8099 while (*p)
8101 XEXP (XEXP (*p, 0), 1)
8102 = vt_expand_loc (XEXP (XEXP (*p, 0), 1),
8103 shared_hash_htab (set->vars), true);
8104 /* If expansion is successful, keep it in the list. */
8105 if (XEXP (XEXP (*p, 0), 1))
8106 p = &XEXP (*p, 1);
8107 /* Otherwise, if the following item is data_value for it,
8108 drop it too too. */
8109 else if (XEXP (*p, 1)
8110 && REG_P (XEXP (XEXP (*p, 0), 0))
8111 && MEM_P (XEXP (XEXP (XEXP (*p, 1), 0), 0))
8112 && REG_P (XEXP (XEXP (XEXP (XEXP (*p, 1), 0), 0),
8114 && REGNO (XEXP (XEXP (*p, 0), 0))
8115 == REGNO (XEXP (XEXP (XEXP (XEXP (*p, 1), 0),
8116 0), 0)))
8117 *p = XEXP (XEXP (*p, 1), 1);
8118 /* Just drop this item. */
8119 else
8120 *p = XEXP (*p, 1);
8122 note = emit_note_after (NOTE_INSN_CALL_ARG_LOCATION, insn);
8123 NOTE_VAR_LOCATION (note) = arguments;
8125 break;
8127 case MO_USE:
8129 rtx loc = mo->u.loc;
8131 if (REG_P (loc))
8132 var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
8133 else
8134 var_mem_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
8136 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
8138 break;
8140 case MO_VAL_LOC:
8142 rtx loc = mo->u.loc;
8143 rtx val, vloc;
8144 tree var;
8146 if (GET_CODE (loc) == CONCAT)
8148 val = XEXP (loc, 0);
8149 vloc = XEXP (loc, 1);
8151 else
8153 val = NULL_RTX;
8154 vloc = loc;
8157 var = PAT_VAR_LOCATION_DECL (vloc);
8159 clobber_variable_part (set, NULL_RTX,
8160 dv_from_decl (var), 0, NULL_RTX);
8161 if (val)
8163 if (VAL_NEEDS_RESOLUTION (loc))
8164 val_resolve (set, val, PAT_VAR_LOCATION_LOC (vloc), insn);
8165 set_variable_part (set, val, dv_from_decl (var), 0,
8166 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
8167 INSERT);
8169 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
8170 set_variable_part (set, PAT_VAR_LOCATION_LOC (vloc),
8171 dv_from_decl (var), 0,
8172 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
8173 INSERT);
8175 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
8177 break;
8179 case MO_VAL_USE:
8181 rtx loc = mo->u.loc;
8182 rtx val, vloc, uloc;
8184 vloc = uloc = XEXP (loc, 1);
8185 val = XEXP (loc, 0);
8187 if (GET_CODE (val) == CONCAT)
8189 uloc = XEXP (val, 1);
8190 val = XEXP (val, 0);
8193 if (VAL_NEEDS_RESOLUTION (loc))
8194 val_resolve (set, val, vloc, insn);
8195 else
8196 val_store (set, val, uloc, insn, false);
8198 if (VAL_HOLDS_TRACK_EXPR (loc))
8200 if (GET_CODE (uloc) == REG)
8201 var_reg_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
8202 NULL);
8203 else if (GET_CODE (uloc) == MEM)
8204 var_mem_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
8205 NULL);
8208 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
8210 break;
8212 case MO_VAL_SET:
8214 rtx loc = mo->u.loc;
8215 rtx val, vloc, uloc, reverse = NULL_RTX;
8217 vloc = loc;
8218 if (VAL_EXPR_HAS_REVERSE (loc))
8220 reverse = XEXP (loc, 1);
8221 vloc = XEXP (loc, 0);
8223 uloc = XEXP (vloc, 1);
8224 val = XEXP (vloc, 0);
8225 vloc = uloc;
8227 if (GET_CODE (val) == CONCAT)
8229 vloc = XEXP (val, 1);
8230 val = XEXP (val, 0);
8233 if (GET_CODE (vloc) == SET)
8235 rtx vsrc = SET_SRC (vloc);
8237 gcc_assert (val != vsrc);
8238 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
8240 vloc = SET_DEST (vloc);
8242 if (VAL_NEEDS_RESOLUTION (loc))
8243 val_resolve (set, val, vsrc, insn);
8245 else if (VAL_NEEDS_RESOLUTION (loc))
8247 gcc_assert (GET_CODE (uloc) == SET
8248 && GET_CODE (SET_SRC (uloc)) == REG);
8249 val_resolve (set, val, SET_SRC (uloc), insn);
8252 if (VAL_HOLDS_TRACK_EXPR (loc))
8254 if (VAL_EXPR_IS_CLOBBERED (loc))
8256 if (REG_P (uloc))
8257 var_reg_delete (set, uloc, true);
8258 else if (MEM_P (uloc))
8259 var_mem_delete (set, uloc, true);
8261 else
8263 bool copied_p = VAL_EXPR_IS_COPIED (loc);
8264 rtx set_src = NULL;
8265 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
8267 if (GET_CODE (uloc) == SET)
8269 set_src = SET_SRC (uloc);
8270 uloc = SET_DEST (uloc);
8273 if (copied_p)
8275 status = find_src_status (set, set_src);
8277 set_src = find_src_set_src (set, set_src);
8280 if (REG_P (uloc))
8281 var_reg_delete_and_set (set, uloc, !copied_p,
8282 status, set_src);
8283 else if (MEM_P (uloc))
8284 var_mem_delete_and_set (set, uloc, !copied_p,
8285 status, set_src);
8288 else if (REG_P (uloc))
8289 var_regno_delete (set, REGNO (uloc));
8291 val_store (set, val, vloc, insn, true);
8293 if (reverse)
8294 val_store (set, XEXP (reverse, 0), XEXP (reverse, 1),
8295 insn, false);
8297 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
8298 set->vars);
8300 break;
8302 case MO_SET:
8304 rtx loc = mo->u.loc;
8305 rtx set_src = NULL;
8307 if (GET_CODE (loc) == SET)
8309 set_src = SET_SRC (loc);
8310 loc = SET_DEST (loc);
8313 if (REG_P (loc))
8314 var_reg_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
8315 set_src);
8316 else
8317 var_mem_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
8318 set_src);
8320 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
8321 set->vars);
8323 break;
8325 case MO_COPY:
8327 rtx loc = mo->u.loc;
8328 enum var_init_status src_status;
8329 rtx set_src = NULL;
8331 if (GET_CODE (loc) == SET)
8333 set_src = SET_SRC (loc);
8334 loc = SET_DEST (loc);
8337 src_status = find_src_status (set, set_src);
8338 set_src = find_src_set_src (set, set_src);
8340 if (REG_P (loc))
8341 var_reg_delete_and_set (set, loc, false, src_status, set_src);
8342 else
8343 var_mem_delete_and_set (set, loc, false, src_status, set_src);
8345 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
8346 set->vars);
8348 break;
8350 case MO_USE_NO_VAR:
8352 rtx loc = mo->u.loc;
8354 if (REG_P (loc))
8355 var_reg_delete (set, loc, false);
8356 else
8357 var_mem_delete (set, loc, false);
8359 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
8361 break;
8363 case MO_CLOBBER:
8365 rtx loc = mo->u.loc;
8367 if (REG_P (loc))
8368 var_reg_delete (set, loc, true);
8369 else
8370 var_mem_delete (set, loc, true);
8372 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
8373 set->vars);
8375 break;
8377 case MO_ADJUST:
8378 set->stack_adjust += mo->u.adjust;
8379 break;
8384 /* Emit notes for the whole function. */
8386 static void
8387 vt_emit_notes (void)
8389 basic_block bb;
8390 dataflow_set cur;
8392 gcc_assert (!htab_elements (changed_variables));
8394 /* Free memory occupied by the out hash tables, as they aren't used
8395 anymore. */
8396 FOR_EACH_BB (bb)
8397 dataflow_set_clear (&VTI (bb)->out);
8399 /* Enable emitting notes by functions (mainly by set_variable_part and
8400 delete_variable_part). */
8401 emit_notes = true;
8403 if (MAY_HAVE_DEBUG_INSNS)
8405 unsigned int i;
8406 rtx val;
8408 FOR_EACH_VEC_ELT (rtx, preserved_values, i, val)
8409 add_cselib_value_chains (dv_from_value (val));
8410 changed_variables_stack = VEC_alloc (variable, heap, 40);
8411 changed_values_stack = VEC_alloc (rtx, heap, 40);
8414 dataflow_set_init (&cur);
8416 FOR_EACH_BB (bb)
8418 /* Emit the notes for changes of variable locations between two
8419 subsequent basic blocks. */
8420 emit_notes_for_differences (BB_HEAD (bb), &cur, &VTI (bb)->in);
8422 /* Emit the notes for the changes in the basic block itself. */
8423 emit_notes_in_bb (bb, &cur);
8425 /* Free memory occupied by the in hash table, we won't need it
8426 again. */
8427 dataflow_set_clear (&VTI (bb)->in);
8429 #ifdef ENABLE_CHECKING
8430 htab_traverse (shared_hash_htab (cur.vars),
8431 emit_notes_for_differences_1,
8432 shared_hash_htab (empty_shared_hash));
8433 if (MAY_HAVE_DEBUG_INSNS)
8435 unsigned int i;
8436 rtx val;
8438 FOR_EACH_VEC_ELT (rtx, preserved_values, i, val)
8439 remove_cselib_value_chains (dv_from_value (val));
8440 gcc_assert (htab_elements (value_chains) == 0);
8442 #endif
8443 dataflow_set_destroy (&cur);
8445 if (MAY_HAVE_DEBUG_INSNS)
8447 VEC_free (variable, heap, changed_variables_stack);
8448 VEC_free (rtx, heap, changed_values_stack);
8451 emit_notes = false;
8454 /* If there is a declaration and offset associated with register/memory RTL
8455 assign declaration to *DECLP and offset to *OFFSETP, and return true. */
8457 static bool
8458 vt_get_decl_and_offset (rtx rtl, tree *declp, HOST_WIDE_INT *offsetp)
8460 if (REG_P (rtl))
8462 if (REG_ATTRS (rtl))
8464 *declp = REG_EXPR (rtl);
8465 *offsetp = REG_OFFSET (rtl);
8466 return true;
8469 else if (MEM_P (rtl))
8471 if (MEM_ATTRS (rtl))
8473 *declp = MEM_EXPR (rtl);
8474 *offsetp = INT_MEM_OFFSET (rtl);
8475 return true;
8478 return false;
8481 /* Helper function for vt_add_function_parameter. RTL is
8482 the expression and VAL corresponding cselib_val pointer
8483 for which ENTRY_VALUE should be created. */
8485 static void
8486 create_entry_value (rtx rtl, cselib_val *val)
8488 cselib_val *val2;
8489 struct elt_loc_list *el;
8490 el = (struct elt_loc_list *) ggc_alloc_cleared_atomic (sizeof (*el));
8491 el->next = val->locs;
8492 el->loc = gen_rtx_ENTRY_VALUE (GET_MODE (rtl));
8493 ENTRY_VALUE_EXP (el->loc) = rtl;
8494 el->setting_insn = get_insns ();
8495 val->locs = el;
8496 val2 = cselib_lookup_from_insn (el->loc, GET_MODE (rtl), true,
8497 VOIDmode, get_insns ());
8498 if (val2
8499 && val2 != val
8500 && val2->locs
8501 && rtx_equal_p (val2->locs->loc, el->loc))
8503 struct elt_loc_list *el2;
8505 preserve_value (val2);
8506 el2 = (struct elt_loc_list *) ggc_alloc_cleared_atomic (sizeof (*el2));
8507 el2->next = val2->locs;
8508 el2->loc = val->val_rtx;
8509 el2->setting_insn = get_insns ();
8510 val2->locs = el2;
8514 /* Insert function parameter PARM in IN and OUT sets of ENTRY_BLOCK. */
8516 static void
8517 vt_add_function_parameter (tree parm)
8519 rtx decl_rtl = DECL_RTL_IF_SET (parm);
8520 rtx incoming = DECL_INCOMING_RTL (parm);
8521 tree decl;
8522 enum machine_mode mode;
8523 HOST_WIDE_INT offset;
8524 dataflow_set *out;
8525 decl_or_value dv;
8527 if (TREE_CODE (parm) != PARM_DECL)
8528 return;
8530 if (!decl_rtl || !incoming)
8531 return;
8533 if (GET_MODE (decl_rtl) == BLKmode || GET_MODE (incoming) == BLKmode)
8534 return;
8536 /* If there is a DRAP register, rewrite the incoming location of parameters
8537 passed on the stack into MEMs based on the argument pointer, as the DRAP
8538 register can be reused for other purposes and we do not track locations
8539 based on generic registers. But the prerequisite is that this argument
8540 pointer be also the virtual CFA pointer, see vt_initialize. */
8541 if (MEM_P (incoming)
8542 && stack_realign_drap
8543 && arg_pointer_rtx == cfa_base_rtx
8544 && (XEXP (incoming, 0) == crtl->args.internal_arg_pointer
8545 || (GET_CODE (XEXP (incoming, 0)) == PLUS
8546 && XEXP (XEXP (incoming, 0), 0)
8547 == crtl->args.internal_arg_pointer
8548 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
8550 HOST_WIDE_INT off = -FIRST_PARM_OFFSET (current_function_decl);
8551 if (GET_CODE (XEXP (incoming, 0)) == PLUS)
8552 off += INTVAL (XEXP (XEXP (incoming, 0), 1));
8553 incoming
8554 = replace_equiv_address_nv (incoming,
8555 plus_constant (arg_pointer_rtx, off));
8558 #ifdef HAVE_window_save
8559 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
8560 If the target machine has an explicit window save instruction, the
8561 actual entry value is the corresponding OUTGOING_REGNO instead. */
8562 if (REG_P (incoming)
8563 && HARD_REGISTER_P (incoming)
8564 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
8566 parm_reg_t *p
8567 = VEC_safe_push (parm_reg_t, gc, windowed_parm_regs, NULL);
8568 p->incoming = incoming;
8569 incoming
8570 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
8571 OUTGOING_REGNO (REGNO (incoming)), 0);
8572 p->outgoing = incoming;
8574 else if (MEM_P (incoming)
8575 && REG_P (XEXP (incoming, 0))
8576 && HARD_REGISTER_P (XEXP (incoming, 0)))
8578 rtx reg = XEXP (incoming, 0);
8579 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
8581 parm_reg_t *p
8582 = VEC_safe_push (parm_reg_t, gc, windowed_parm_regs, NULL);
8583 p->incoming = reg;
8584 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
8585 p->outgoing = reg;
8586 incoming = replace_equiv_address_nv (incoming, reg);
8589 #endif
8591 if (!vt_get_decl_and_offset (incoming, &decl, &offset))
8593 if (REG_P (incoming) || MEM_P (incoming))
8595 /* This means argument is passed by invisible reference. */
8596 offset = 0;
8597 decl = parm;
8598 incoming = gen_rtx_MEM (GET_MODE (decl_rtl), incoming);
8600 else
8602 if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset))
8603 return;
8604 offset += byte_lowpart_offset (GET_MODE (incoming),
8605 GET_MODE (decl_rtl));
8609 if (!decl)
8610 return;
8612 if (parm != decl)
8614 /* Assume that DECL_RTL was a pseudo that got spilled to
8615 memory. The spill slot sharing code will force the
8616 memory to reference spill_slot_decl (%sfp), so we don't
8617 match above. That's ok, the pseudo must have referenced
8618 the entire parameter, so just reset OFFSET. */
8619 gcc_assert (decl == get_spill_slot_decl (false));
8620 offset = 0;
8623 if (!track_loc_p (incoming, parm, offset, false, &mode, &offset))
8624 return;
8626 out = &VTI (ENTRY_BLOCK_PTR)->out;
8628 dv = dv_from_decl (parm);
8630 if (target_for_debug_bind (parm)
8631 /* We can't deal with these right now, because this kind of
8632 variable is single-part. ??? We could handle parallels
8633 that describe multiple locations for the same single
8634 value, but ATM we don't. */
8635 && GET_CODE (incoming) != PARALLEL)
8637 cselib_val *val;
8639 /* ??? We shouldn't ever hit this, but it may happen because
8640 arguments passed by invisible reference aren't dealt with
8641 above: incoming-rtl will have Pmode rather than the
8642 expected mode for the type. */
8643 if (offset)
8644 return;
8646 val = cselib_lookup_from_insn (var_lowpart (mode, incoming), mode, true,
8647 VOIDmode, get_insns ());
8649 /* ??? Float-typed values in memory are not handled by
8650 cselib. */
8651 if (val)
8653 preserve_value (val);
8654 set_variable_part (out, val->val_rtx, dv, offset,
8655 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
8656 dv = dv_from_value (val->val_rtx);
8660 if (REG_P (incoming))
8662 incoming = var_lowpart (mode, incoming);
8663 gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER);
8664 attrs_list_insert (&out->regs[REGNO (incoming)], dv, offset,
8665 incoming);
8666 set_variable_part (out, incoming, dv, offset,
8667 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
8668 if (dv_is_value_p (dv))
8670 cselib_val *val = CSELIB_VAL_PTR (dv_as_value (dv));
8671 create_entry_value (incoming, val);
8672 if (TREE_CODE (TREE_TYPE (parm)) == REFERENCE_TYPE
8673 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (parm))))
8675 enum machine_mode indmode
8676 = TYPE_MODE (TREE_TYPE (TREE_TYPE (parm)));
8677 rtx mem = gen_rtx_MEM (indmode, incoming);
8678 val = cselib_lookup_from_insn (mem, indmode, true,
8679 VOIDmode, get_insns ());
8680 if (val)
8682 preserve_value (val);
8683 create_entry_value (mem, val);
8688 else if (MEM_P (incoming))
8690 incoming = var_lowpart (mode, incoming);
8691 set_variable_part (out, incoming, dv, offset,
8692 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
8696 /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
8698 static void
8699 vt_add_function_parameters (void)
8701 tree parm;
8703 for (parm = DECL_ARGUMENTS (current_function_decl);
8704 parm; parm = DECL_CHAIN (parm))
8705 vt_add_function_parameter (parm);
8707 if (DECL_HAS_VALUE_EXPR_P (DECL_RESULT (current_function_decl)))
8709 tree vexpr = DECL_VALUE_EXPR (DECL_RESULT (current_function_decl));
8711 if (TREE_CODE (vexpr) == INDIRECT_REF)
8712 vexpr = TREE_OPERAND (vexpr, 0);
8714 if (TREE_CODE (vexpr) == PARM_DECL
8715 && DECL_ARTIFICIAL (vexpr)
8716 && !DECL_IGNORED_P (vexpr)
8717 && DECL_NAMELESS (vexpr))
8718 vt_add_function_parameter (vexpr);
8722 /* Return true if INSN in the prologue initializes hard_frame_pointer_rtx. */
8724 static bool
8725 fp_setter (rtx insn)
8727 rtx pat = PATTERN (insn);
8728 if (RTX_FRAME_RELATED_P (insn))
8730 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
8731 if (expr)
8732 pat = XEXP (expr, 0);
8734 if (GET_CODE (pat) == SET)
8735 return SET_DEST (pat) == hard_frame_pointer_rtx;
8736 else if (GET_CODE (pat) == PARALLEL)
8738 int i;
8739 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
8740 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
8741 && SET_DEST (XVECEXP (pat, 0, i)) == hard_frame_pointer_rtx)
8742 return true;
8744 return false;
8747 /* Gather all registers used for passing arguments to other functions
8748 called from the current routine. */
8750 static void
8751 note_register_arguments (rtx insn)
8753 rtx link, x;
8755 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
8756 if (GET_CODE (XEXP (link, 0)) == USE)
8758 x = XEXP (XEXP (link, 0), 0);
8759 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
8760 SET_HARD_REG_BIT (argument_reg_set, REGNO (x));
8764 /* Initialize cfa_base_rtx, create a preserved VALUE for it and
8765 ensure it isn't flushed during cselib_reset_table.
8766 Can be called only if frame_pointer_rtx resp. arg_pointer_rtx
8767 has been eliminated. */
8769 static void
8770 vt_init_cfa_base (void)
8772 cselib_val *val;
8774 #ifdef FRAME_POINTER_CFA_OFFSET
8775 cfa_base_rtx = frame_pointer_rtx;
8776 cfa_base_offset = -FRAME_POINTER_CFA_OFFSET (current_function_decl);
8777 #else
8778 cfa_base_rtx = arg_pointer_rtx;
8779 cfa_base_offset = -ARG_POINTER_CFA_OFFSET (current_function_decl);
8780 #endif
8781 if (cfa_base_rtx == hard_frame_pointer_rtx
8782 || !fixed_regs[REGNO (cfa_base_rtx)])
8784 cfa_base_rtx = NULL_RTX;
8785 return;
8787 if (!MAY_HAVE_DEBUG_INSNS)
8788 return;
8790 /* Tell alias analysis that cfa_base_rtx should share
8791 find_base_term value with stack pointer or hard frame pointer. */
8792 if (!frame_pointer_needed)
8793 vt_equate_reg_base_value (cfa_base_rtx, stack_pointer_rtx);
8794 else if (!crtl->stack_realign_tried)
8795 vt_equate_reg_base_value (cfa_base_rtx, hard_frame_pointer_rtx);
8797 val = cselib_lookup_from_insn (cfa_base_rtx, GET_MODE (cfa_base_rtx), 1,
8798 VOIDmode, get_insns ());
8799 preserve_value (val);
8800 cselib_preserve_cfa_base_value (val, REGNO (cfa_base_rtx));
8801 var_reg_decl_set (&VTI (ENTRY_BLOCK_PTR)->out, cfa_base_rtx,
8802 VAR_INIT_STATUS_INITIALIZED, dv_from_value (val->val_rtx),
8803 0, NULL_RTX, INSERT);
8806 /* Allocate and initialize the data structures for variable tracking
8807 and parse the RTL to get the micro operations. */
8809 static bool
8810 vt_initialize (void)
8812 basic_block bb, prologue_bb = single_succ (ENTRY_BLOCK_PTR);
8813 HOST_WIDE_INT fp_cfa_offset = -1;
8815 alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def));
8817 attrs_pool = create_alloc_pool ("attrs_def pool",
8818 sizeof (struct attrs_def), 1024);
8819 var_pool = create_alloc_pool ("variable_def pool",
8820 sizeof (struct variable_def)
8821 + (MAX_VAR_PARTS - 1)
8822 * sizeof (((variable)NULL)->var_part[0]), 64);
8823 loc_chain_pool = create_alloc_pool ("location_chain_def pool",
8824 sizeof (struct location_chain_def),
8825 1024);
8826 shared_hash_pool = create_alloc_pool ("shared_hash_def pool",
8827 sizeof (struct shared_hash_def), 256);
8828 empty_shared_hash = (shared_hash) pool_alloc (shared_hash_pool);
8829 empty_shared_hash->refcount = 1;
8830 empty_shared_hash->htab
8831 = htab_create (1, variable_htab_hash, variable_htab_eq,
8832 variable_htab_free);
8833 changed_variables = htab_create (10, variable_htab_hash, variable_htab_eq,
8834 variable_htab_free);
8835 if (MAY_HAVE_DEBUG_INSNS)
8837 value_chain_pool = create_alloc_pool ("value_chain_def pool",
8838 sizeof (struct value_chain_def),
8839 1024);
8840 value_chains = htab_create (32, value_chain_htab_hash,
8841 value_chain_htab_eq, NULL);
8844 /* Init the IN and OUT sets. */
8845 FOR_ALL_BB (bb)
8847 VTI (bb)->visited = false;
8848 VTI (bb)->flooded = false;
8849 dataflow_set_init (&VTI (bb)->in);
8850 dataflow_set_init (&VTI (bb)->out);
8851 VTI (bb)->permp = NULL;
8854 if (MAY_HAVE_DEBUG_INSNS)
8856 cselib_init (CSELIB_RECORD_MEMORY | CSELIB_PRESERVE_CONSTANTS);
8857 scratch_regs = BITMAP_ALLOC (NULL);
8858 valvar_pool = create_alloc_pool ("small variable_def pool",
8859 sizeof (struct variable_def), 256);
8860 preserved_values = VEC_alloc (rtx, heap, 256);
8862 else
8864 scratch_regs = NULL;
8865 valvar_pool = NULL;
8868 CLEAR_HARD_REG_SET (argument_reg_set);
8870 /* In order to factor out the adjustments made to the stack pointer or to
8871 the hard frame pointer and thus be able to use DW_OP_fbreg operations
8872 instead of individual location lists, we're going to rewrite MEMs based
8873 on them into MEMs based on the CFA by de-eliminating stack_pointer_rtx
8874 or hard_frame_pointer_rtx to the virtual CFA pointer frame_pointer_rtx
8875 resp. arg_pointer_rtx. We can do this either when there is no frame
8876 pointer in the function and stack adjustments are consistent for all
8877 basic blocks or when there is a frame pointer and no stack realignment.
8878 But we first have to check that frame_pointer_rtx resp. arg_pointer_rtx
8879 has been eliminated. */
8880 if (!frame_pointer_needed)
8882 rtx reg, elim;
8884 if (!vt_stack_adjustments ())
8885 return false;
8887 #ifdef FRAME_POINTER_CFA_OFFSET
8888 reg = frame_pointer_rtx;
8889 #else
8890 reg = arg_pointer_rtx;
8891 #endif
8892 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
8893 if (elim != reg)
8895 if (GET_CODE (elim) == PLUS)
8896 elim = XEXP (elim, 0);
8897 if (elim == stack_pointer_rtx)
8898 vt_init_cfa_base ();
8901 else if (!crtl->stack_realign_tried)
8903 rtx reg, elim;
8905 #ifdef FRAME_POINTER_CFA_OFFSET
8906 reg = frame_pointer_rtx;
8907 fp_cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
8908 #else
8909 reg = arg_pointer_rtx;
8910 fp_cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
8911 #endif
8912 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
8913 if (elim != reg)
8915 if (GET_CODE (elim) == PLUS)
8917 fp_cfa_offset -= INTVAL (XEXP (elim, 1));
8918 elim = XEXP (elim, 0);
8920 if (elim != hard_frame_pointer_rtx)
8921 fp_cfa_offset = -1;
8923 else
8924 fp_cfa_offset = -1;
8927 /* If the stack is realigned and a DRAP register is used, we're going to
8928 rewrite MEMs based on it representing incoming locations of parameters
8929 passed on the stack into MEMs based on the argument pointer. Although
8930 we aren't going to rewrite other MEMs, we still need to initialize the
8931 virtual CFA pointer in order to ensure that the argument pointer will
8932 be seen as a constant throughout the function.
8934 ??? This doesn't work if FRAME_POINTER_CFA_OFFSET is defined. */
8935 else if (stack_realign_drap)
8937 rtx reg, elim;
8939 #ifdef FRAME_POINTER_CFA_OFFSET
8940 reg = frame_pointer_rtx;
8941 #else
8942 reg = arg_pointer_rtx;
8943 #endif
8944 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
8945 if (elim != reg)
8947 if (GET_CODE (elim) == PLUS)
8948 elim = XEXP (elim, 0);
8949 if (elim == hard_frame_pointer_rtx)
8950 vt_init_cfa_base ();
8954 if (frame_pointer_needed)
8956 rtx insn;
8957 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
8958 if (CALL_P (insn))
8959 note_register_arguments (insn);
8962 hard_frame_pointer_adjustment = -1;
8964 vt_add_function_parameters ();
8966 FOR_EACH_BB (bb)
8968 rtx insn;
8969 HOST_WIDE_INT pre, post = 0;
8970 basic_block first_bb, last_bb;
8972 if (MAY_HAVE_DEBUG_INSNS)
8974 cselib_record_sets_hook = add_with_sets;
8975 if (dump_file && (dump_flags & TDF_DETAILS))
8976 fprintf (dump_file, "first value: %i\n",
8977 cselib_get_next_uid ());
8980 first_bb = bb;
8981 for (;;)
8983 edge e;
8984 if (bb->next_bb == EXIT_BLOCK_PTR
8985 || ! single_pred_p (bb->next_bb))
8986 break;
8987 e = find_edge (bb, bb->next_bb);
8988 if (! e || (e->flags & EDGE_FALLTHRU) == 0)
8989 break;
8990 bb = bb->next_bb;
8992 last_bb = bb;
8994 /* Add the micro-operations to the vector. */
8995 FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb)
8997 HOST_WIDE_INT offset = VTI (bb)->out.stack_adjust;
8998 VTI (bb)->out.stack_adjust = VTI (bb)->in.stack_adjust;
8999 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
9000 insn = NEXT_INSN (insn))
9002 if (INSN_P (insn))
9004 if (!frame_pointer_needed)
9006 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
9007 if (pre)
9009 micro_operation mo;
9010 mo.type = MO_ADJUST;
9011 mo.u.adjust = pre;
9012 mo.insn = insn;
9013 if (dump_file && (dump_flags & TDF_DETAILS))
9014 log_op_type (PATTERN (insn), bb, insn,
9015 MO_ADJUST, dump_file);
9016 VEC_safe_push (micro_operation, heap, VTI (bb)->mos,
9017 &mo);
9018 VTI (bb)->out.stack_adjust += pre;
9022 cselib_hook_called = false;
9023 adjust_insn (bb, insn);
9024 if (MAY_HAVE_DEBUG_INSNS)
9026 if (CALL_P (insn))
9027 prepare_call_arguments (bb, insn);
9028 cselib_process_insn (insn);
9029 if (dump_file && (dump_flags & TDF_DETAILS))
9031 print_rtl_single (dump_file, insn);
9032 dump_cselib_table (dump_file);
9035 if (!cselib_hook_called)
9036 add_with_sets (insn, 0, 0);
9037 cancel_changes (0);
9039 if (!frame_pointer_needed && post)
9041 micro_operation mo;
9042 mo.type = MO_ADJUST;
9043 mo.u.adjust = post;
9044 mo.insn = insn;
9045 if (dump_file && (dump_flags & TDF_DETAILS))
9046 log_op_type (PATTERN (insn), bb, insn,
9047 MO_ADJUST, dump_file);
9048 VEC_safe_push (micro_operation, heap, VTI (bb)->mos,
9049 &mo);
9050 VTI (bb)->out.stack_adjust += post;
9053 if (bb == prologue_bb
9054 && fp_cfa_offset != -1
9055 && hard_frame_pointer_adjustment == -1
9056 && RTX_FRAME_RELATED_P (insn)
9057 && fp_setter (insn))
9059 vt_init_cfa_base ();
9060 hard_frame_pointer_adjustment = fp_cfa_offset;
9064 gcc_assert (offset == VTI (bb)->out.stack_adjust);
9067 bb = last_bb;
9069 if (MAY_HAVE_DEBUG_INSNS)
9071 cselib_preserve_only_values ();
9072 cselib_reset_table (cselib_get_next_uid ());
9073 cselib_record_sets_hook = NULL;
9077 hard_frame_pointer_adjustment = -1;
9078 VTI (ENTRY_BLOCK_PTR)->flooded = true;
9079 cfa_base_rtx = NULL_RTX;
9080 return true;
9083 /* Get rid of all debug insns from the insn stream. */
9085 static void
9086 delete_debug_insns (void)
9088 basic_block bb;
9089 rtx insn, next;
9091 if (!MAY_HAVE_DEBUG_INSNS)
9092 return;
9094 FOR_EACH_BB (bb)
9096 FOR_BB_INSNS_SAFE (bb, insn, next)
9097 if (DEBUG_INSN_P (insn))
9098 delete_insn (insn);
9102 /* Run a fast, BB-local only version of var tracking, to take care of
9103 information that we don't do global analysis on, such that not all
9104 information is lost. If SKIPPED holds, we're skipping the global
9105 pass entirely, so we should try to use information it would have
9106 handled as well.. */
9108 static void
9109 vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED)
9111 /* ??? Just skip it all for now. */
9112 delete_debug_insns ();
9115 /* Free the data structures needed for variable tracking. */
9117 static void
9118 vt_finalize (void)
9120 basic_block bb;
9122 FOR_EACH_BB (bb)
9124 VEC_free (micro_operation, heap, VTI (bb)->mos);
9127 FOR_ALL_BB (bb)
9129 dataflow_set_destroy (&VTI (bb)->in);
9130 dataflow_set_destroy (&VTI (bb)->out);
9131 if (VTI (bb)->permp)
9133 dataflow_set_destroy (VTI (bb)->permp);
9134 XDELETE (VTI (bb)->permp);
9137 free_aux_for_blocks ();
9138 htab_delete (empty_shared_hash->htab);
9139 htab_delete (changed_variables);
9140 free_alloc_pool (attrs_pool);
9141 free_alloc_pool (var_pool);
9142 free_alloc_pool (loc_chain_pool);
9143 free_alloc_pool (shared_hash_pool);
9145 if (MAY_HAVE_DEBUG_INSNS)
9147 htab_delete (value_chains);
9148 free_alloc_pool (value_chain_pool);
9149 free_alloc_pool (valvar_pool);
9150 VEC_free (rtx, heap, preserved_values);
9151 cselib_finish ();
9152 BITMAP_FREE (scratch_regs);
9153 scratch_regs = NULL;
9156 VEC_free (parm_reg_t, gc, windowed_parm_regs);
9158 if (vui_vec)
9159 XDELETEVEC (vui_vec);
9160 vui_vec = NULL;
9161 vui_allocated = 0;
9164 /* The entry point to variable tracking pass. */
9166 static inline unsigned int
9167 variable_tracking_main_1 (void)
9169 bool success;
9171 if (flag_var_tracking_assignments < 0)
9173 delete_debug_insns ();
9174 return 0;
9177 if (n_basic_blocks > 500 && n_edges / n_basic_blocks >= 20)
9179 vt_debug_insns_local (true);
9180 return 0;
9183 mark_dfs_back_edges ();
9184 if (!vt_initialize ())
9186 vt_finalize ();
9187 vt_debug_insns_local (true);
9188 return 0;
9191 success = vt_find_locations ();
9193 if (!success && flag_var_tracking_assignments > 0)
9195 vt_finalize ();
9197 delete_debug_insns ();
9199 /* This is later restored by our caller. */
9200 flag_var_tracking_assignments = 0;
9202 success = vt_initialize ();
9203 gcc_assert (success);
9205 success = vt_find_locations ();
9208 if (!success)
9210 vt_finalize ();
9211 vt_debug_insns_local (false);
9212 return 0;
9215 if (dump_file && (dump_flags & TDF_DETAILS))
9217 dump_dataflow_sets ();
9218 dump_flow_info (dump_file, dump_flags);
9221 timevar_push (TV_VAR_TRACKING_EMIT);
9222 vt_emit_notes ();
9223 timevar_pop (TV_VAR_TRACKING_EMIT);
9225 vt_finalize ();
9226 vt_debug_insns_local (false);
9227 return 0;
9230 unsigned int
9231 variable_tracking_main (void)
9233 unsigned int ret;
9234 int save = flag_var_tracking_assignments;
9236 ret = variable_tracking_main_1 ();
9238 flag_var_tracking_assignments = save;
9240 return ret;
9243 static bool
9244 gate_handle_var_tracking (void)
9246 return (flag_var_tracking && !targetm.delay_vartrack);
9251 struct rtl_opt_pass pass_variable_tracking =
9254 RTL_PASS,
9255 "vartrack", /* name */
9256 gate_handle_var_tracking, /* gate */
9257 variable_tracking_main, /* execute */
9258 NULL, /* sub */
9259 NULL, /* next */
9260 0, /* static_pass_number */
9261 TV_VAR_TRACKING, /* tv_id */
9262 0, /* properties_required */
9263 0, /* properties_provided */
9264 0, /* properties_destroyed */
9265 0, /* todo_flags_start */
9266 TODO_verify_rtl_sharing /* todo_flags_finish */