name-lookup.c (lookup_arg_dependent): Use conditional timevars.
[official-gcc.git] / gcc / var-tracking.c
blob214f58a8ac2d619c02820eb22eca8487bbd8c809
1 /* Variable tracking routines for the GNU compiler.
2 Copyright (C) 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This file contains the variable tracking pass. It computes where
22 variables are located (which registers or where in memory) at each position
23 in instruction stream and emits notes describing the locations.
24 Debug information (DWARF2 location lists) is finally generated from
25 these notes.
26 With this debug information, it is possible to show variables
27 even when debugging optimized code.
29 How does the variable tracking pass work?
31 First, it scans RTL code for uses, stores and clobbers (register/memory
32 references in instructions), for call insns and for stack adjustments
33 separately for each basic block and saves them to an array of micro
34 operations.
35 The micro operations of one instruction are ordered so that
36 pre-modifying stack adjustment < use < use with no var < call insn <
37 < clobber < set < post-modifying stack adjustment
39 Then, a forward dataflow analysis is performed to find out how locations
40 of variables change through code and to propagate the variable locations
41 along control flow graph.
42 The IN set for basic block BB is computed as a union of OUT sets of BB's
43 predecessors, the OUT set for BB is copied from the IN set for BB and
44 is changed according to micro operations in BB.
46 The IN and OUT sets for basic blocks consist of a current stack adjustment
47 (used for adjusting offset of variables addressed using stack pointer),
48 the table of structures describing the locations of parts of a variable
49 and for each physical register a linked list for each physical register.
50 The linked list is a list of variable parts stored in the register,
51 i.e. it is a list of triplets (reg, decl, offset) where decl is
52 REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
53 effective deleting appropriate variable parts when we set or clobber the
54 register.
56 There may be more than one variable part in a register. The linked lists
57 should be pretty short so it is a good data structure here.
58 For example in the following code, register allocator may assign same
59 register to variables A and B, and both of them are stored in the same
60 register in CODE:
62 if (cond)
63 set A;
64 else
65 set B;
66 CODE;
67 if (cond)
68 use A;
69 else
70 use B;
72 Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
73 are emitted to appropriate positions in RTL code. Each such a note describes
74 the location of one variable at the point in instruction stream where the
75 note is. There is no need to emit a note for each variable before each
76 instruction, we only emit these notes where the location of variable changes
77 (this means that we also emit notes for changes between the OUT set of the
78 previous block and the IN set of the current block).
80 The notes consist of two parts:
81 1. the declaration (from REG_EXPR or MEM_EXPR)
82 2. the location of a variable - it is either a simple register/memory
83 reference (for simple variables, for example int),
84 or a parallel of register/memory references (for a large variables
85 which consist of several parts, for example long long).
89 #include "config.h"
90 #include "system.h"
91 #include "coretypes.h"
92 #include "tm.h"
93 #include "rtl.h"
94 #include "tree.h"
95 #include "tm_p.h"
96 #include "hard-reg-set.h"
97 #include "basic-block.h"
98 #include "flags.h"
99 #include "output.h"
100 #include "insn-config.h"
101 #include "reload.h"
102 #include "sbitmap.h"
103 #include "alloc-pool.h"
104 #include "fibheap.h"
105 #include "hashtab.h"
106 #include "regs.h"
107 #include "expr.h"
108 #include "timevar.h"
109 #include "tree-pass.h"
110 #include "tree-flow.h"
111 #include "cselib.h"
112 #include "target.h"
113 #include "params.h"
114 #include "diagnostic.h"
115 #include "tree-pretty-print.h"
116 #include "pointer-set.h"
117 #include "recog.h"
118 #include "tm_p.h"
120 /* var-tracking.c assumes that tree code with the same value as VALUE rtx code
121 has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
122 Currently the value is the same as IDENTIFIER_NODE, which has such
123 a property. If this compile time assertion ever fails, make sure that
124 the new tree code that equals (int) VALUE has the same property. */
125 extern char check_value_val[(int) VALUE == (int) IDENTIFIER_NODE ? 1 : -1];
127 /* Type of micro operation. */
128 enum micro_operation_type
130 MO_USE, /* Use location (REG or MEM). */
131 MO_USE_NO_VAR,/* Use location which is not associated with a variable
132 or the variable is not trackable. */
133 MO_VAL_USE, /* Use location which is associated with a value. */
134 MO_VAL_LOC, /* Use location which appears in a debug insn. */
135 MO_VAL_SET, /* Set location associated with a value. */
136 MO_SET, /* Set location. */
137 MO_COPY, /* Copy the same portion of a variable from one
138 location to another. */
139 MO_CLOBBER, /* Clobber location. */
140 MO_CALL, /* Call insn. */
141 MO_ADJUST /* Adjust stack pointer. */
145 static const char * const ATTRIBUTE_UNUSED
146 micro_operation_type_name[] = {
147 "MO_USE",
148 "MO_USE_NO_VAR",
149 "MO_VAL_USE",
150 "MO_VAL_LOC",
151 "MO_VAL_SET",
152 "MO_SET",
153 "MO_COPY",
154 "MO_CLOBBER",
155 "MO_CALL",
156 "MO_ADJUST"
159 /* Where shall the note be emitted? BEFORE or AFTER the instruction.
160 Notes emitted as AFTER_CALL are to take effect during the call,
161 rather than after the call. */
162 enum emit_note_where
164 EMIT_NOTE_BEFORE_INSN,
165 EMIT_NOTE_AFTER_INSN,
166 EMIT_NOTE_AFTER_CALL_INSN
169 /* Structure holding information about micro operation. */
170 typedef struct micro_operation_def
172 /* Type of micro operation. */
173 enum micro_operation_type type;
175 /* The instruction which the micro operation is in, for MO_USE,
176 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
177 instruction or note in the original flow (before any var-tracking
178 notes are inserted, to simplify emission of notes), for MO_SET
179 and MO_CLOBBER. */
180 rtx insn;
182 union {
183 /* Location. For MO_SET and MO_COPY, this is the SET that
184 performs the assignment, if known, otherwise it is the target
185 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
186 CONCAT of the VALUE and the LOC associated with it. For
187 MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
188 associated with it. */
189 rtx loc;
191 /* Stack adjustment. */
192 HOST_WIDE_INT adjust;
193 } u;
194 } micro_operation;
196 DEF_VEC_O(micro_operation);
197 DEF_VEC_ALLOC_O(micro_operation,heap);
199 /* A declaration of a variable, or an RTL value being handled like a
200 declaration. */
201 typedef void *decl_or_value;
203 /* Structure for passing some other parameters to function
204 emit_note_insn_var_location. */
205 typedef struct emit_note_data_def
207 /* The instruction which the note will be emitted before/after. */
208 rtx insn;
210 /* Where the note will be emitted (before/after insn)? */
211 enum emit_note_where where;
213 /* The variables and values active at this point. */
214 htab_t vars;
215 } emit_note_data;
217 /* Description of location of a part of a variable. The content of a physical
218 register is described by a chain of these structures.
219 The chains are pretty short (usually 1 or 2 elements) and thus
220 chain is the best data structure. */
221 typedef struct attrs_def
223 /* Pointer to next member of the list. */
224 struct attrs_def *next;
226 /* The rtx of register. */
227 rtx loc;
229 /* The declaration corresponding to LOC. */
230 decl_or_value dv;
232 /* Offset from start of DECL. */
233 HOST_WIDE_INT offset;
234 } *attrs;
236 /* Structure holding a refcounted hash table. If refcount > 1,
237 it must be first unshared before modified. */
238 typedef struct shared_hash_def
240 /* Reference count. */
241 int refcount;
243 /* Actual hash table. */
244 htab_t htab;
245 } *shared_hash;
247 /* Structure holding the IN or OUT set for a basic block. */
248 typedef struct dataflow_set_def
250 /* Adjustment of stack offset. */
251 HOST_WIDE_INT stack_adjust;
253 /* Attributes for registers (lists of attrs). */
254 attrs regs[FIRST_PSEUDO_REGISTER];
256 /* Variable locations. */
257 shared_hash vars;
259 /* Vars that is being traversed. */
260 shared_hash traversed_vars;
261 } dataflow_set;
263 /* The structure (one for each basic block) containing the information
264 needed for variable tracking. */
265 typedef struct variable_tracking_info_def
267 /* The vector of micro operations. */
268 VEC(micro_operation, heap) *mos;
270 /* The IN and OUT set for dataflow analysis. */
271 dataflow_set in;
272 dataflow_set out;
274 /* The permanent-in dataflow set for this block. This is used to
275 hold values for which we had to compute entry values. ??? This
276 should probably be dynamically allocated, to avoid using more
277 memory in non-debug builds. */
278 dataflow_set *permp;
280 /* Has the block been visited in DFS? */
281 bool visited;
283 /* Has the block been flooded in VTA? */
284 bool flooded;
286 } *variable_tracking_info;
288 /* Structure for chaining the locations. */
289 typedef struct location_chain_def
291 /* Next element in the chain. */
292 struct location_chain_def *next;
294 /* The location (REG, MEM or VALUE). */
295 rtx loc;
297 /* The "value" stored in this location. */
298 rtx set_src;
300 /* Initialized? */
301 enum var_init_status init;
302 } *location_chain;
304 /* Structure describing one part of variable. */
305 typedef struct variable_part_def
307 /* Chain of locations of the part. */
308 location_chain loc_chain;
310 /* Location which was last emitted to location list. */
311 rtx cur_loc;
313 /* The offset in the variable. */
314 HOST_WIDE_INT offset;
315 } variable_part;
317 /* Maximum number of location parts. */
318 #define MAX_VAR_PARTS 16
320 /* Structure describing where the variable is located. */
321 typedef struct variable_def
323 /* The declaration of the variable, or an RTL value being handled
324 like a declaration. */
325 decl_or_value dv;
327 /* Reference count. */
328 int refcount;
330 /* Number of variable parts. */
331 char n_var_parts;
333 /* True if this variable changed (any of its) cur_loc fields
334 during the current emit_notes_for_changes resp.
335 emit_notes_for_differences call. */
336 bool cur_loc_changed;
338 /* True if this variable_def struct is currently in the
339 changed_variables hash table. */
340 bool in_changed_variables;
342 /* The variable parts. */
343 variable_part var_part[1];
344 } *variable;
345 typedef const struct variable_def *const_variable;
347 /* Structure for chaining backlinks from referenced VALUEs to
348 DVs that are referencing them. */
349 typedef struct value_chain_def
351 /* Next value_chain entry. */
352 struct value_chain_def *next;
354 /* The declaration of the variable, or an RTL value
355 being handled like a declaration, whose var_parts[0].loc_chain
356 references the VALUE owning this value_chain. */
357 decl_or_value dv;
359 /* Reference count. */
360 int refcount;
361 } *value_chain;
362 typedef const struct value_chain_def *const_value_chain;
364 /* Pointer to the BB's information specific to variable tracking pass. */
365 #define VTI(BB) ((variable_tracking_info) (BB)->aux)
367 /* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */
368 #define INT_MEM_OFFSET(mem) (MEM_OFFSET_KNOWN_P (mem) ? MEM_OFFSET (mem) : 0)
370 /* Alloc pool for struct attrs_def. */
371 static alloc_pool attrs_pool;
373 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
374 static alloc_pool var_pool;
376 /* Alloc pool for struct variable_def with a single var_part entry. */
377 static alloc_pool valvar_pool;
379 /* Alloc pool for struct location_chain_def. */
380 static alloc_pool loc_chain_pool;
382 /* Alloc pool for struct shared_hash_def. */
383 static alloc_pool shared_hash_pool;
385 /* Alloc pool for struct value_chain_def. */
386 static alloc_pool value_chain_pool;
388 /* Changed variables, notes will be emitted for them. */
389 static htab_t changed_variables;
391 /* Links from VALUEs to DVs referencing them in their current loc_chains. */
392 static htab_t value_chains;
394 /* Shall notes be emitted? */
395 static bool emit_notes;
397 /* Empty shared hashtable. */
398 static shared_hash empty_shared_hash;
400 /* Scratch register bitmap used by cselib_expand_value_rtx. */
401 static bitmap scratch_regs = NULL;
403 typedef struct GTY(()) parm_reg {
404 rtx outgoing;
405 rtx incoming;
406 } parm_reg_t;
408 DEF_VEC_O(parm_reg_t);
409 DEF_VEC_ALLOC_O(parm_reg_t, gc);
411 /* Vector of windowed parameter registers, if any. */
412 static VEC(parm_reg_t, gc) *windowed_parm_regs = NULL;
414 /* Variable used to tell whether cselib_process_insn called our hook. */
415 static bool cselib_hook_called;
417 /* Local function prototypes. */
418 static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
419 HOST_WIDE_INT *);
420 static void insn_stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
421 HOST_WIDE_INT *);
422 static bool vt_stack_adjustments (void);
423 static void note_register_arguments (rtx);
424 static hashval_t variable_htab_hash (const void *);
425 static int variable_htab_eq (const void *, const void *);
426 static void variable_htab_free (void *);
428 static void init_attrs_list_set (attrs *);
429 static void attrs_list_clear (attrs *);
430 static attrs attrs_list_member (attrs, decl_or_value, HOST_WIDE_INT);
431 static void attrs_list_insert (attrs *, decl_or_value, HOST_WIDE_INT, rtx);
432 static void attrs_list_copy (attrs *, attrs);
433 static void attrs_list_union (attrs *, attrs);
435 static void **unshare_variable (dataflow_set *set, void **slot, variable var,
436 enum var_init_status);
437 static void vars_copy (htab_t, htab_t);
438 static tree var_debug_decl (tree);
439 static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
440 static void var_reg_delete_and_set (dataflow_set *, rtx, bool,
441 enum var_init_status, rtx);
442 static void var_reg_delete (dataflow_set *, rtx, bool);
443 static void var_regno_delete (dataflow_set *, int);
444 static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx);
445 static void var_mem_delete_and_set (dataflow_set *, rtx, bool,
446 enum var_init_status, rtx);
447 static void var_mem_delete (dataflow_set *, rtx, bool);
449 static void dataflow_set_init (dataflow_set *);
450 static void dataflow_set_clear (dataflow_set *);
451 static void dataflow_set_copy (dataflow_set *, dataflow_set *);
452 static int variable_union_info_cmp_pos (const void *, const void *);
453 static void dataflow_set_union (dataflow_set *, dataflow_set *);
454 static location_chain find_loc_in_1pdv (rtx, variable, htab_t);
455 static bool canon_value_cmp (rtx, rtx);
456 static int loc_cmp (rtx, rtx);
457 static bool variable_part_different_p (variable_part *, variable_part *);
458 static bool onepart_variable_different_p (variable, variable);
459 static bool variable_different_p (variable, variable);
460 static bool dataflow_set_different (dataflow_set *, dataflow_set *);
461 static void dataflow_set_destroy (dataflow_set *);
463 static bool contains_symbol_ref (rtx);
464 static bool track_expr_p (tree, bool);
465 static bool same_variable_part_p (rtx, tree, HOST_WIDE_INT);
466 static int add_uses (rtx *, void *);
467 static void add_uses_1 (rtx *, void *);
468 static void add_stores (rtx, const_rtx, void *);
469 static bool compute_bb_dataflow (basic_block);
470 static bool vt_find_locations (void);
472 static void dump_attrs_list (attrs);
473 static int dump_var_slot (void **, void *);
474 static void dump_var (variable);
475 static void dump_vars (htab_t);
476 static void dump_dataflow_set (dataflow_set *);
477 static void dump_dataflow_sets (void);
479 static void variable_was_changed (variable, dataflow_set *);
480 static void **set_slot_part (dataflow_set *, rtx, void **,
481 decl_or_value, HOST_WIDE_INT,
482 enum var_init_status, rtx);
483 static void set_variable_part (dataflow_set *, rtx,
484 decl_or_value, HOST_WIDE_INT,
485 enum var_init_status, rtx, enum insert_option);
486 static void **clobber_slot_part (dataflow_set *, rtx,
487 void **, HOST_WIDE_INT, rtx);
488 static void clobber_variable_part (dataflow_set *, rtx,
489 decl_or_value, HOST_WIDE_INT, rtx);
490 static void **delete_slot_part (dataflow_set *, rtx, void **, HOST_WIDE_INT);
491 static void delete_variable_part (dataflow_set *, rtx,
492 decl_or_value, HOST_WIDE_INT);
493 static int emit_note_insn_var_location (void **, void *);
494 static void emit_notes_for_changes (rtx, enum emit_note_where, shared_hash);
495 static int emit_notes_for_differences_1 (void **, void *);
496 static int emit_notes_for_differences_2 (void **, void *);
497 static void emit_notes_for_differences (rtx, dataflow_set *, dataflow_set *);
498 static void emit_notes_in_bb (basic_block, dataflow_set *);
499 static void vt_emit_notes (void);
501 static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *);
502 static void vt_add_function_parameters (void);
503 static bool vt_initialize (void);
504 static void vt_finalize (void);
506 /* Given a SET, calculate the amount of stack adjustment it contains
507 PRE- and POST-modifying stack pointer.
508 This function is similar to stack_adjust_offset. */
510 static void
511 stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
512 HOST_WIDE_INT *post)
514 rtx src = SET_SRC (pattern);
515 rtx dest = SET_DEST (pattern);
516 enum rtx_code code;
518 if (dest == stack_pointer_rtx)
520 /* (set (reg sp) (plus (reg sp) (const_int))) */
521 code = GET_CODE (src);
522 if (! (code == PLUS || code == MINUS)
523 || XEXP (src, 0) != stack_pointer_rtx
524 || !CONST_INT_P (XEXP (src, 1)))
525 return;
527 if (code == MINUS)
528 *post += INTVAL (XEXP (src, 1));
529 else
530 *post -= INTVAL (XEXP (src, 1));
532 else if (MEM_P (dest))
534 /* (set (mem (pre_dec (reg sp))) (foo)) */
535 src = XEXP (dest, 0);
536 code = GET_CODE (src);
538 switch (code)
540 case PRE_MODIFY:
541 case POST_MODIFY:
542 if (XEXP (src, 0) == stack_pointer_rtx)
544 rtx val = XEXP (XEXP (src, 1), 1);
545 /* We handle only adjustments by constant amount. */
546 gcc_assert (GET_CODE (XEXP (src, 1)) == PLUS &&
547 CONST_INT_P (val));
549 if (code == PRE_MODIFY)
550 *pre -= INTVAL (val);
551 else
552 *post -= INTVAL (val);
553 break;
555 return;
557 case PRE_DEC:
558 if (XEXP (src, 0) == stack_pointer_rtx)
560 *pre += GET_MODE_SIZE (GET_MODE (dest));
561 break;
563 return;
565 case POST_DEC:
566 if (XEXP (src, 0) == stack_pointer_rtx)
568 *post += GET_MODE_SIZE (GET_MODE (dest));
569 break;
571 return;
573 case PRE_INC:
574 if (XEXP (src, 0) == stack_pointer_rtx)
576 *pre -= GET_MODE_SIZE (GET_MODE (dest));
577 break;
579 return;
581 case POST_INC:
582 if (XEXP (src, 0) == stack_pointer_rtx)
584 *post -= GET_MODE_SIZE (GET_MODE (dest));
585 break;
587 return;
589 default:
590 return;
595 /* Given an INSN, calculate the amount of stack adjustment it contains
596 PRE- and POST-modifying stack pointer. */
598 static void
599 insn_stack_adjust_offset_pre_post (rtx insn, HOST_WIDE_INT *pre,
600 HOST_WIDE_INT *post)
602 rtx pattern;
604 *pre = 0;
605 *post = 0;
607 pattern = PATTERN (insn);
608 if (RTX_FRAME_RELATED_P (insn))
610 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
611 if (expr)
612 pattern = XEXP (expr, 0);
615 if (GET_CODE (pattern) == SET)
616 stack_adjust_offset_pre_post (pattern, pre, post);
617 else if (GET_CODE (pattern) == PARALLEL
618 || GET_CODE (pattern) == SEQUENCE)
620 int i;
622 /* There may be stack adjustments inside compound insns. Search
623 for them. */
624 for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
625 if (GET_CODE (XVECEXP (pattern, 0, i)) == SET)
626 stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post);
630 /* Compute stack adjustments for all blocks by traversing DFS tree.
631 Return true when the adjustments on all incoming edges are consistent.
632 Heavily borrowed from pre_and_rev_post_order_compute. */
634 static bool
635 vt_stack_adjustments (void)
637 edge_iterator *stack;
638 int sp;
640 /* Initialize entry block. */
641 VTI (ENTRY_BLOCK_PTR)->visited = true;
642 VTI (ENTRY_BLOCK_PTR)->in.stack_adjust = INCOMING_FRAME_SP_OFFSET;
643 VTI (ENTRY_BLOCK_PTR)->out.stack_adjust = INCOMING_FRAME_SP_OFFSET;
645 /* Allocate stack for back-tracking up CFG. */
646 stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
647 sp = 0;
649 /* Push the first edge on to the stack. */
650 stack[sp++] = ei_start (ENTRY_BLOCK_PTR->succs);
652 while (sp)
654 edge_iterator ei;
655 basic_block src;
656 basic_block dest;
658 /* Look at the edge on the top of the stack. */
659 ei = stack[sp - 1];
660 src = ei_edge (ei)->src;
661 dest = ei_edge (ei)->dest;
663 /* Check if the edge destination has been visited yet. */
664 if (!VTI (dest)->visited)
666 rtx insn;
667 HOST_WIDE_INT pre, post, offset;
668 VTI (dest)->visited = true;
669 VTI (dest)->in.stack_adjust = offset = VTI (src)->out.stack_adjust;
671 if (dest != EXIT_BLOCK_PTR)
672 for (insn = BB_HEAD (dest);
673 insn != NEXT_INSN (BB_END (dest));
674 insn = NEXT_INSN (insn))
676 if (INSN_P (insn))
678 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
679 offset += pre + post;
681 if (CALL_P (insn))
682 note_register_arguments (insn);
685 VTI (dest)->out.stack_adjust = offset;
687 if (EDGE_COUNT (dest->succs) > 0)
688 /* Since the DEST node has been visited for the first
689 time, check its successors. */
690 stack[sp++] = ei_start (dest->succs);
692 else
694 /* Check whether the adjustments on the edges are the same. */
695 if (VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust)
697 free (stack);
698 return false;
701 if (! ei_one_before_end_p (ei))
702 /* Go to the next edge. */
703 ei_next (&stack[sp - 1]);
704 else
705 /* Return to previous level if there are no more edges. */
706 sp--;
710 free (stack);
711 return true;
714 /* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or
715 hard_frame_pointer_rtx is being mapped to it and offset for it. */
716 static rtx cfa_base_rtx;
717 static HOST_WIDE_INT cfa_base_offset;
719 /* Compute a CFA-based value for an ADJUSTMENT made to stack_pointer_rtx
720 or hard_frame_pointer_rtx. */
722 static inline rtx
723 compute_cfa_pointer (HOST_WIDE_INT adjustment)
725 return plus_constant (cfa_base_rtx, adjustment + cfa_base_offset);
728 /* Adjustment for hard_frame_pointer_rtx to cfa base reg,
729 or -1 if the replacement shouldn't be done. */
730 static HOST_WIDE_INT hard_frame_pointer_adjustment = -1;
732 /* Data for adjust_mems callback. */
734 struct adjust_mem_data
736 bool store;
737 enum machine_mode mem_mode;
738 HOST_WIDE_INT stack_adjust;
739 rtx side_effects;
742 /* Helper for adjust_mems. Return 1 if *loc is unsuitable for
743 transformation of wider mode arithmetics to narrower mode,
744 -1 if it is suitable and subexpressions shouldn't be
745 traversed and 0 if it is suitable and subexpressions should
746 be traversed. Called through for_each_rtx. */
748 static int
749 use_narrower_mode_test (rtx *loc, void *data)
751 rtx subreg = (rtx) data;
753 if (CONSTANT_P (*loc))
754 return -1;
755 switch (GET_CODE (*loc))
757 case REG:
758 if (cselib_lookup (*loc, GET_MODE (SUBREG_REG (subreg)), 0, VOIDmode))
759 return 1;
760 if (!validate_subreg (GET_MODE (subreg), GET_MODE (*loc),
761 *loc, subreg_lowpart_offset (GET_MODE (subreg),
762 GET_MODE (*loc))))
763 return 1;
764 return -1;
765 case PLUS:
766 case MINUS:
767 case MULT:
768 return 0;
769 case ASHIFT:
770 if (for_each_rtx (&XEXP (*loc, 0), use_narrower_mode_test, data))
771 return 1;
772 else
773 return -1;
774 default:
775 return 1;
779 /* Transform X into narrower mode MODE from wider mode WMODE. */
781 static rtx
782 use_narrower_mode (rtx x, enum machine_mode mode, enum machine_mode wmode)
784 rtx op0, op1;
785 if (CONSTANT_P (x))
786 return lowpart_subreg (mode, x, wmode);
787 switch (GET_CODE (x))
789 case REG:
790 return lowpart_subreg (mode, x, wmode);
791 case PLUS:
792 case MINUS:
793 case MULT:
794 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
795 op1 = use_narrower_mode (XEXP (x, 1), mode, wmode);
796 return simplify_gen_binary (GET_CODE (x), mode, op0, op1);
797 case ASHIFT:
798 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
799 return simplify_gen_binary (ASHIFT, mode, op0, XEXP (x, 1));
800 default:
801 gcc_unreachable ();
805 /* Helper function for adjusting used MEMs. */
807 static rtx
808 adjust_mems (rtx loc, const_rtx old_rtx, void *data)
810 struct adjust_mem_data *amd = (struct adjust_mem_data *) data;
811 rtx mem, addr = loc, tem;
812 enum machine_mode mem_mode_save;
813 bool store_save;
814 switch (GET_CODE (loc))
816 case REG:
817 /* Don't do any sp or fp replacements outside of MEM addresses
818 on the LHS. */
819 if (amd->mem_mode == VOIDmode && amd->store)
820 return loc;
821 if (loc == stack_pointer_rtx
822 && !frame_pointer_needed
823 && cfa_base_rtx)
824 return compute_cfa_pointer (amd->stack_adjust);
825 else if (loc == hard_frame_pointer_rtx
826 && frame_pointer_needed
827 && hard_frame_pointer_adjustment != -1
828 && cfa_base_rtx)
829 return compute_cfa_pointer (hard_frame_pointer_adjustment);
830 gcc_checking_assert (loc != virtual_incoming_args_rtx);
831 return loc;
832 case MEM:
833 mem = loc;
834 if (!amd->store)
836 mem = targetm.delegitimize_address (mem);
837 if (mem != loc && !MEM_P (mem))
838 return simplify_replace_fn_rtx (mem, old_rtx, adjust_mems, data);
841 addr = XEXP (mem, 0);
842 mem_mode_save = amd->mem_mode;
843 amd->mem_mode = GET_MODE (mem);
844 store_save = amd->store;
845 amd->store = false;
846 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
847 amd->store = store_save;
848 amd->mem_mode = mem_mode_save;
849 if (mem == loc)
850 addr = targetm.delegitimize_address (addr);
851 if (addr != XEXP (mem, 0))
852 mem = replace_equiv_address_nv (mem, addr);
853 if (!amd->store)
854 mem = avoid_constant_pool_reference (mem);
855 return mem;
856 case PRE_INC:
857 case PRE_DEC:
858 addr = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
859 GEN_INT (GET_CODE (loc) == PRE_INC
860 ? GET_MODE_SIZE (amd->mem_mode)
861 : -GET_MODE_SIZE (amd->mem_mode)));
862 case POST_INC:
863 case POST_DEC:
864 if (addr == loc)
865 addr = XEXP (loc, 0);
866 gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode);
867 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
868 tem = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
869 GEN_INT ((GET_CODE (loc) == PRE_INC
870 || GET_CODE (loc) == POST_INC)
871 ? GET_MODE_SIZE (amd->mem_mode)
872 : -GET_MODE_SIZE (amd->mem_mode)));
873 amd->side_effects = alloc_EXPR_LIST (0,
874 gen_rtx_SET (VOIDmode,
875 XEXP (loc, 0),
876 tem),
877 amd->side_effects);
878 return addr;
879 case PRE_MODIFY:
880 addr = XEXP (loc, 1);
881 case POST_MODIFY:
882 if (addr == loc)
883 addr = XEXP (loc, 0);
884 gcc_assert (amd->mem_mode != VOIDmode);
885 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
886 amd->side_effects = alloc_EXPR_LIST (0,
887 gen_rtx_SET (VOIDmode,
888 XEXP (loc, 0),
889 XEXP (loc, 1)),
890 amd->side_effects);
891 return addr;
892 case SUBREG:
893 /* First try without delegitimization of whole MEMs and
894 avoid_constant_pool_reference, which is more likely to succeed. */
895 store_save = amd->store;
896 amd->store = true;
897 addr = simplify_replace_fn_rtx (SUBREG_REG (loc), old_rtx, adjust_mems,
898 data);
899 amd->store = store_save;
900 mem = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
901 if (mem == SUBREG_REG (loc))
903 tem = loc;
904 goto finish_subreg;
906 tem = simplify_gen_subreg (GET_MODE (loc), mem,
907 GET_MODE (SUBREG_REG (loc)),
908 SUBREG_BYTE (loc));
909 if (tem)
910 goto finish_subreg;
911 tem = simplify_gen_subreg (GET_MODE (loc), addr,
912 GET_MODE (SUBREG_REG (loc)),
913 SUBREG_BYTE (loc));
914 if (tem == NULL_RTX)
915 tem = gen_rtx_raw_SUBREG (GET_MODE (loc), addr, SUBREG_BYTE (loc));
916 finish_subreg:
917 if (MAY_HAVE_DEBUG_INSNS
918 && GET_CODE (tem) == SUBREG
919 && (GET_CODE (SUBREG_REG (tem)) == PLUS
920 || GET_CODE (SUBREG_REG (tem)) == MINUS
921 || GET_CODE (SUBREG_REG (tem)) == MULT
922 || GET_CODE (SUBREG_REG (tem)) == ASHIFT)
923 && GET_MODE_CLASS (GET_MODE (tem)) == MODE_INT
924 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem))) == MODE_INT
925 && GET_MODE_SIZE (GET_MODE (tem))
926 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (tem)))
927 && subreg_lowpart_p (tem)
928 && !for_each_rtx (&SUBREG_REG (tem), use_narrower_mode_test, tem))
929 return use_narrower_mode (SUBREG_REG (tem), GET_MODE (tem),
930 GET_MODE (SUBREG_REG (tem)));
931 return tem;
932 case ASM_OPERANDS:
933 /* Don't do any replacements in second and following
934 ASM_OPERANDS of inline-asm with multiple sets.
935 ASM_OPERANDS_INPUT_VEC, ASM_OPERANDS_INPUT_CONSTRAINT_VEC
936 and ASM_OPERANDS_LABEL_VEC need to be equal between
937 all the ASM_OPERANDs in the insn and adjust_insn will
938 fix this up. */
939 if (ASM_OPERANDS_OUTPUT_IDX (loc) != 0)
940 return loc;
941 break;
942 default:
943 break;
945 return NULL_RTX;
948 /* Helper function for replacement of uses. */
950 static void
951 adjust_mem_uses (rtx *x, void *data)
953 rtx new_x = simplify_replace_fn_rtx (*x, NULL_RTX, adjust_mems, data);
954 if (new_x != *x)
955 validate_change (NULL_RTX, x, new_x, true);
958 /* Helper function for replacement of stores. */
960 static void
961 adjust_mem_stores (rtx loc, const_rtx expr, void *data)
963 if (MEM_P (loc))
965 rtx new_dest = simplify_replace_fn_rtx (SET_DEST (expr), NULL_RTX,
966 adjust_mems, data);
967 if (new_dest != SET_DEST (expr))
969 rtx xexpr = CONST_CAST_RTX (expr);
970 validate_change (NULL_RTX, &SET_DEST (xexpr), new_dest, true);
975 /* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
976 replace them with their value in the insn and add the side-effects
977 as other sets to the insn. */
979 static void
980 adjust_insn (basic_block bb, rtx insn)
982 struct adjust_mem_data amd;
983 rtx set;
985 #ifdef HAVE_window_save
986 /* If the target machine has an explicit window save instruction, the
987 transformation OUTGOING_REGNO -> INCOMING_REGNO is done there. */
988 if (RTX_FRAME_RELATED_P (insn)
989 && find_reg_note (insn, REG_CFA_WINDOW_SAVE, NULL_RTX))
991 unsigned int i, nregs = VEC_length(parm_reg_t, windowed_parm_regs);
992 rtx rtl = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nregs * 2));
993 parm_reg_t *p;
995 FOR_EACH_VEC_ELT (parm_reg_t, windowed_parm_regs, i, p)
997 XVECEXP (rtl, 0, i * 2)
998 = gen_rtx_SET (VOIDmode, p->incoming, p->outgoing);
999 /* Do not clobber the attached DECL, but only the REG. */
1000 XVECEXP (rtl, 0, i * 2 + 1)
1001 = gen_rtx_CLOBBER (GET_MODE (p->outgoing),
1002 gen_raw_REG (GET_MODE (p->outgoing),
1003 REGNO (p->outgoing)));
1006 validate_change (NULL_RTX, &PATTERN (insn), rtl, true);
1007 return;
1009 #endif
1011 amd.mem_mode = VOIDmode;
1012 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
1013 amd.side_effects = NULL_RTX;
1015 amd.store = true;
1016 note_stores (PATTERN (insn), adjust_mem_stores, &amd);
1018 amd.store = false;
1019 if (GET_CODE (PATTERN (insn)) == PARALLEL
1020 && asm_noperands (PATTERN (insn)) > 0
1021 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1023 rtx body, set0;
1024 int i;
1026 /* inline-asm with multiple sets is tiny bit more complicated,
1027 because the 3 vectors in ASM_OPERANDS need to be shared between
1028 all ASM_OPERANDS in the instruction. adjust_mems will
1029 not touch ASM_OPERANDS other than the first one, asm_noperands
1030 test above needs to be called before that (otherwise it would fail)
1031 and afterwards this code fixes it up. */
1032 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1033 body = PATTERN (insn);
1034 set0 = XVECEXP (body, 0, 0);
1035 gcc_checking_assert (GET_CODE (set0) == SET
1036 && GET_CODE (SET_SRC (set0)) == ASM_OPERANDS
1037 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set0)) == 0);
1038 for (i = 1; i < XVECLEN (body, 0); i++)
1039 if (GET_CODE (XVECEXP (body, 0, i)) != SET)
1040 break;
1041 else
1043 set = XVECEXP (body, 0, i);
1044 gcc_checking_assert (GET_CODE (SET_SRC (set)) == ASM_OPERANDS
1045 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set))
1046 == i);
1047 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (set))
1048 != ASM_OPERANDS_INPUT_VEC (SET_SRC (set0))
1049 || ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set))
1050 != ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0))
1051 || ASM_OPERANDS_LABEL_VEC (SET_SRC (set))
1052 != ASM_OPERANDS_LABEL_VEC (SET_SRC (set0)))
1054 rtx newsrc = shallow_copy_rtx (SET_SRC (set));
1055 ASM_OPERANDS_INPUT_VEC (newsrc)
1056 = ASM_OPERANDS_INPUT_VEC (SET_SRC (set0));
1057 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (newsrc)
1058 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0));
1059 ASM_OPERANDS_LABEL_VEC (newsrc)
1060 = ASM_OPERANDS_LABEL_VEC (SET_SRC (set0));
1061 validate_change (NULL_RTX, &SET_SRC (set), newsrc, true);
1065 else
1066 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1068 /* For read-only MEMs containing some constant, prefer those
1069 constants. */
1070 set = single_set (insn);
1071 if (set && MEM_P (SET_SRC (set)) && MEM_READONLY_P (SET_SRC (set)))
1073 rtx note = find_reg_equal_equiv_note (insn);
1075 if (note && CONSTANT_P (XEXP (note, 0)))
1076 validate_change (NULL_RTX, &SET_SRC (set), XEXP (note, 0), true);
1079 if (amd.side_effects)
1081 rtx *pat, new_pat, s;
1082 int i, oldn, newn;
1084 pat = &PATTERN (insn);
1085 if (GET_CODE (*pat) == COND_EXEC)
1086 pat = &COND_EXEC_CODE (*pat);
1087 if (GET_CODE (*pat) == PARALLEL)
1088 oldn = XVECLEN (*pat, 0);
1089 else
1090 oldn = 1;
1091 for (s = amd.side_effects, newn = 0; s; newn++)
1092 s = XEXP (s, 1);
1093 new_pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (oldn + newn));
1094 if (GET_CODE (*pat) == PARALLEL)
1095 for (i = 0; i < oldn; i++)
1096 XVECEXP (new_pat, 0, i) = XVECEXP (*pat, 0, i);
1097 else
1098 XVECEXP (new_pat, 0, 0) = *pat;
1099 for (s = amd.side_effects, i = oldn; i < oldn + newn; i++, s = XEXP (s, 1))
1100 XVECEXP (new_pat, 0, i) = XEXP (s, 0);
1101 free_EXPR_LIST_list (&amd.side_effects);
1102 validate_change (NULL_RTX, pat, new_pat, true);
1106 /* Return true if a decl_or_value DV is a DECL or NULL. */
1107 static inline bool
1108 dv_is_decl_p (decl_or_value dv)
1110 return !dv || (int) TREE_CODE ((tree) dv) != (int) VALUE;
1113 /* Return true if a decl_or_value is a VALUE rtl. */
1114 static inline bool
1115 dv_is_value_p (decl_or_value dv)
1117 return dv && !dv_is_decl_p (dv);
1120 /* Return the decl in the decl_or_value. */
1121 static inline tree
1122 dv_as_decl (decl_or_value dv)
1124 gcc_checking_assert (dv_is_decl_p (dv));
1125 return (tree) dv;
1128 /* Return the value in the decl_or_value. */
1129 static inline rtx
1130 dv_as_value (decl_or_value dv)
1132 gcc_checking_assert (dv_is_value_p (dv));
1133 return (rtx)dv;
1136 /* Return the opaque pointer in the decl_or_value. */
1137 static inline void *
1138 dv_as_opaque (decl_or_value dv)
1140 return dv;
1143 /* Return true if a decl_or_value must not have more than one variable
1144 part. */
1145 static inline bool
1146 dv_onepart_p (decl_or_value dv)
1148 tree decl;
1150 if (!MAY_HAVE_DEBUG_INSNS)
1151 return false;
1153 if (dv_is_value_p (dv))
1154 return true;
1156 decl = dv_as_decl (dv);
1158 if (!decl)
1159 return true;
1161 if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
1162 return true;
1164 return (target_for_debug_bind (decl) != NULL_TREE);
1167 /* Return the variable pool to be used for dv, depending on whether it
1168 can have multiple parts or not. */
1169 static inline alloc_pool
1170 dv_pool (decl_or_value dv)
1172 return dv_onepart_p (dv) ? valvar_pool : var_pool;
1175 /* Build a decl_or_value out of a decl. */
1176 static inline decl_or_value
1177 dv_from_decl (tree decl)
1179 decl_or_value dv;
1180 dv = decl;
1181 gcc_checking_assert (dv_is_decl_p (dv));
1182 return dv;
1185 /* Build a decl_or_value out of a value. */
1186 static inline decl_or_value
1187 dv_from_value (rtx value)
1189 decl_or_value dv;
1190 dv = value;
1191 gcc_checking_assert (dv_is_value_p (dv));
1192 return dv;
1195 extern void debug_dv (decl_or_value dv);
1197 DEBUG_FUNCTION void
1198 debug_dv (decl_or_value dv)
1200 if (dv_is_value_p (dv))
1201 debug_rtx (dv_as_value (dv));
1202 else
1203 debug_generic_stmt (dv_as_decl (dv));
1206 typedef unsigned int dvuid;
1208 /* Return the uid of DV. */
1210 static inline dvuid
1211 dv_uid (decl_or_value dv)
1213 if (dv_is_value_p (dv))
1214 return CSELIB_VAL_PTR (dv_as_value (dv))->uid;
1215 else
1216 return DECL_UID (dv_as_decl (dv));
1219 /* Compute the hash from the uid. */
1221 static inline hashval_t
1222 dv_uid2hash (dvuid uid)
1224 return uid;
1227 /* The hash function for a mask table in a shared_htab chain. */
1229 static inline hashval_t
1230 dv_htab_hash (decl_or_value dv)
1232 return dv_uid2hash (dv_uid (dv));
1235 /* The hash function for variable_htab, computes the hash value
1236 from the declaration of variable X. */
1238 static hashval_t
1239 variable_htab_hash (const void *x)
1241 const_variable const v = (const_variable) x;
1243 return dv_htab_hash (v->dv);
1246 /* Compare the declaration of variable X with declaration Y. */
1248 static int
1249 variable_htab_eq (const void *x, const void *y)
1251 const_variable const v = (const_variable) x;
1252 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
1254 return (dv_as_opaque (v->dv) == dv_as_opaque (dv));
1257 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
1259 static void
1260 variable_htab_free (void *elem)
1262 int i;
1263 variable var = (variable) elem;
1264 location_chain node, next;
1266 gcc_checking_assert (var->refcount > 0);
1268 var->refcount--;
1269 if (var->refcount > 0)
1270 return;
1272 for (i = 0; i < var->n_var_parts; i++)
1274 for (node = var->var_part[i].loc_chain; node; node = next)
1276 next = node->next;
1277 pool_free (loc_chain_pool, node);
1279 var->var_part[i].loc_chain = NULL;
1281 pool_free (dv_pool (var->dv), var);
1284 /* The hash function for value_chains htab, computes the hash value
1285 from the VALUE. */
1287 static hashval_t
1288 value_chain_htab_hash (const void *x)
1290 const_value_chain const v = (const_value_chain) x;
1292 return dv_htab_hash (v->dv);
1295 /* Compare the VALUE X with VALUE Y. */
1297 static int
1298 value_chain_htab_eq (const void *x, const void *y)
1300 const_value_chain const v = (const_value_chain) x;
1301 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
1303 return dv_as_opaque (v->dv) == dv_as_opaque (dv);
1306 /* Initialize the set (array) SET of attrs to empty lists. */
1308 static void
1309 init_attrs_list_set (attrs *set)
1311 int i;
1313 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1314 set[i] = NULL;
1317 /* Make the list *LISTP empty. */
1319 static void
1320 attrs_list_clear (attrs *listp)
1322 attrs list, next;
1324 for (list = *listp; list; list = next)
1326 next = list->next;
1327 pool_free (attrs_pool, list);
1329 *listp = NULL;
1332 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */
1334 static attrs
1335 attrs_list_member (attrs list, decl_or_value dv, HOST_WIDE_INT offset)
1337 for (; list; list = list->next)
1338 if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset)
1339 return list;
1340 return NULL;
1343 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
1345 static void
1346 attrs_list_insert (attrs *listp, decl_or_value dv,
1347 HOST_WIDE_INT offset, rtx loc)
1349 attrs list;
1351 list = (attrs) pool_alloc (attrs_pool);
1352 list->loc = loc;
1353 list->dv = dv;
1354 list->offset = offset;
1355 list->next = *listp;
1356 *listp = list;
1359 /* Copy all nodes from SRC and create a list *DSTP of the copies. */
1361 static void
1362 attrs_list_copy (attrs *dstp, attrs src)
1364 attrs n;
1366 attrs_list_clear (dstp);
1367 for (; src; src = src->next)
1369 n = (attrs) pool_alloc (attrs_pool);
1370 n->loc = src->loc;
1371 n->dv = src->dv;
1372 n->offset = src->offset;
1373 n->next = *dstp;
1374 *dstp = n;
1378 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */
1380 static void
1381 attrs_list_union (attrs *dstp, attrs src)
1383 for (; src; src = src->next)
1385 if (!attrs_list_member (*dstp, src->dv, src->offset))
1386 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1390 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1391 *DSTP. */
1393 static void
1394 attrs_list_mpdv_union (attrs *dstp, attrs src, attrs src2)
1396 gcc_assert (!*dstp);
1397 for (; src; src = src->next)
1399 if (!dv_onepart_p (src->dv))
1400 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1402 for (src = src2; src; src = src->next)
1404 if (!dv_onepart_p (src->dv)
1405 && !attrs_list_member (*dstp, src->dv, src->offset))
1406 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1410 /* Shared hashtable support. */
1412 /* Return true if VARS is shared. */
1414 static inline bool
1415 shared_hash_shared (shared_hash vars)
1417 return vars->refcount > 1;
1420 /* Return the hash table for VARS. */
1422 static inline htab_t
1423 shared_hash_htab (shared_hash vars)
1425 return vars->htab;
1428 /* Return true if VAR is shared, or maybe because VARS is shared. */
1430 static inline bool
1431 shared_var_p (variable var, shared_hash vars)
1433 /* Don't count an entry in the changed_variables table as a duplicate. */
1434 return ((var->refcount > 1 + (int) var->in_changed_variables)
1435 || shared_hash_shared (vars));
1438 /* Copy variables into a new hash table. */
1440 static shared_hash
1441 shared_hash_unshare (shared_hash vars)
1443 shared_hash new_vars = (shared_hash) pool_alloc (shared_hash_pool);
1444 gcc_assert (vars->refcount > 1);
1445 new_vars->refcount = 1;
1446 new_vars->htab
1447 = htab_create (htab_elements (vars->htab) + 3, variable_htab_hash,
1448 variable_htab_eq, variable_htab_free);
1449 vars_copy (new_vars->htab, vars->htab);
1450 vars->refcount--;
1451 return new_vars;
1454 /* Increment reference counter on VARS and return it. */
1456 static inline shared_hash
1457 shared_hash_copy (shared_hash vars)
1459 vars->refcount++;
1460 return vars;
1463 /* Decrement reference counter and destroy hash table if not shared
1464 anymore. */
1466 static void
1467 shared_hash_destroy (shared_hash vars)
1469 gcc_checking_assert (vars->refcount > 0);
1470 if (--vars->refcount == 0)
1472 htab_delete (vars->htab);
1473 pool_free (shared_hash_pool, vars);
1477 /* Unshare *PVARS if shared and return slot for DV. If INS is
1478 INSERT, insert it if not already present. */
1480 static inline void **
1481 shared_hash_find_slot_unshare_1 (shared_hash *pvars, decl_or_value dv,
1482 hashval_t dvhash, enum insert_option ins)
1484 if (shared_hash_shared (*pvars))
1485 *pvars = shared_hash_unshare (*pvars);
1486 return htab_find_slot_with_hash (shared_hash_htab (*pvars), dv, dvhash, ins);
1489 static inline void **
1490 shared_hash_find_slot_unshare (shared_hash *pvars, decl_or_value dv,
1491 enum insert_option ins)
1493 return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins);
1496 /* Return slot for DV, if it is already present in the hash table.
1497 If it is not present, insert it only VARS is not shared, otherwise
1498 return NULL. */
1500 static inline void **
1501 shared_hash_find_slot_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1503 return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
1504 shared_hash_shared (vars)
1505 ? NO_INSERT : INSERT);
1508 static inline void **
1509 shared_hash_find_slot (shared_hash vars, decl_or_value dv)
1511 return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv));
1514 /* Return slot for DV only if it is already present in the hash table. */
1516 static inline void **
1517 shared_hash_find_slot_noinsert_1 (shared_hash vars, decl_or_value dv,
1518 hashval_t dvhash)
1520 return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
1521 NO_INSERT);
1524 static inline void **
1525 shared_hash_find_slot_noinsert (shared_hash vars, decl_or_value dv)
1527 return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv));
1530 /* Return variable for DV or NULL if not already present in the hash
1531 table. */
1533 static inline variable
1534 shared_hash_find_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1536 return (variable) htab_find_with_hash (shared_hash_htab (vars), dv, dvhash);
1539 static inline variable
1540 shared_hash_find (shared_hash vars, decl_or_value dv)
1542 return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
1545 /* Return true if TVAL is better than CVAL as a canonival value. We
1546 choose lowest-numbered VALUEs, using the RTX address as a
1547 tie-breaker. The idea is to arrange them into a star topology,
1548 such that all of them are at most one step away from the canonical
1549 value, and the canonical value has backlinks to all of them, in
1550 addition to all the actual locations. We don't enforce this
1551 topology throughout the entire dataflow analysis, though.
1554 static inline bool
1555 canon_value_cmp (rtx tval, rtx cval)
1557 return !cval
1558 || CSELIB_VAL_PTR (tval)->uid < CSELIB_VAL_PTR (cval)->uid;
1561 static bool dst_can_be_shared;
1563 /* Return a copy of a variable VAR and insert it to dataflow set SET. */
1565 static void **
1566 unshare_variable (dataflow_set *set, void **slot, variable var,
1567 enum var_init_status initialized)
1569 variable new_var;
1570 int i;
1572 new_var = (variable) pool_alloc (dv_pool (var->dv));
1573 new_var->dv = var->dv;
1574 new_var->refcount = 1;
1575 var->refcount--;
1576 new_var->n_var_parts = var->n_var_parts;
1577 new_var->cur_loc_changed = var->cur_loc_changed;
1578 var->cur_loc_changed = false;
1579 new_var->in_changed_variables = false;
1581 if (! flag_var_tracking_uninit)
1582 initialized = VAR_INIT_STATUS_INITIALIZED;
1584 for (i = 0; i < var->n_var_parts; i++)
1586 location_chain node;
1587 location_chain *nextp;
1589 new_var->var_part[i].offset = var->var_part[i].offset;
1590 nextp = &new_var->var_part[i].loc_chain;
1591 for (node = var->var_part[i].loc_chain; node; node = node->next)
1593 location_chain new_lc;
1595 new_lc = (location_chain) pool_alloc (loc_chain_pool);
1596 new_lc->next = NULL;
1597 if (node->init > initialized)
1598 new_lc->init = node->init;
1599 else
1600 new_lc->init = initialized;
1601 if (node->set_src && !(MEM_P (node->set_src)))
1602 new_lc->set_src = node->set_src;
1603 else
1604 new_lc->set_src = NULL;
1605 new_lc->loc = node->loc;
1607 *nextp = new_lc;
1608 nextp = &new_lc->next;
1611 new_var->var_part[i].cur_loc = var->var_part[i].cur_loc;
1614 dst_can_be_shared = false;
1615 if (shared_hash_shared (set->vars))
1616 slot = shared_hash_find_slot_unshare (&set->vars, var->dv, NO_INSERT);
1617 else if (set->traversed_vars && set->vars != set->traversed_vars)
1618 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
1619 *slot = new_var;
1620 if (var->in_changed_variables)
1622 void **cslot
1623 = htab_find_slot_with_hash (changed_variables, var->dv,
1624 dv_htab_hash (var->dv), NO_INSERT);
1625 gcc_assert (*cslot == (void *) var);
1626 var->in_changed_variables = false;
1627 variable_htab_free (var);
1628 *cslot = new_var;
1629 new_var->in_changed_variables = true;
1631 return slot;
1634 /* Copy all variables from hash table SRC to hash table DST. */
1636 static void
1637 vars_copy (htab_t dst, htab_t src)
1639 htab_iterator hi;
1640 variable var;
1642 FOR_EACH_HTAB_ELEMENT (src, var, variable, hi)
1644 void **dstp;
1645 var->refcount++;
1646 dstp = htab_find_slot_with_hash (dst, var->dv,
1647 dv_htab_hash (var->dv),
1648 INSERT);
1649 *dstp = var;
1653 /* Map a decl to its main debug decl. */
1655 static inline tree
1656 var_debug_decl (tree decl)
1658 if (decl && DECL_P (decl)
1659 && DECL_DEBUG_EXPR_IS_FROM (decl))
1661 tree debugdecl = DECL_DEBUG_EXPR (decl);
1662 if (debugdecl && DECL_P (debugdecl))
1663 decl = debugdecl;
1666 return decl;
1669 /* Set the register LOC to contain DV, OFFSET. */
1671 static void
1672 var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1673 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1674 enum insert_option iopt)
1676 attrs node;
1677 bool decl_p = dv_is_decl_p (dv);
1679 if (decl_p)
1680 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1682 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1683 if (dv_as_opaque (node->dv) == dv_as_opaque (dv)
1684 && node->offset == offset)
1685 break;
1686 if (!node)
1687 attrs_list_insert (&set->regs[REGNO (loc)], dv, offset, loc);
1688 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1691 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
1693 static void
1694 var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1695 rtx set_src)
1697 tree decl = REG_EXPR (loc);
1698 HOST_WIDE_INT offset = REG_OFFSET (loc);
1700 var_reg_decl_set (set, loc, initialized,
1701 dv_from_decl (decl), offset, set_src, INSERT);
1704 static enum var_init_status
1705 get_init_value (dataflow_set *set, rtx loc, decl_or_value dv)
1707 variable var;
1708 int i;
1709 enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN;
1711 if (! flag_var_tracking_uninit)
1712 return VAR_INIT_STATUS_INITIALIZED;
1714 var = shared_hash_find (set->vars, dv);
1715 if (var)
1717 for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++)
1719 location_chain nextp;
1720 for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next)
1721 if (rtx_equal_p (nextp->loc, loc))
1723 ret_val = nextp->init;
1724 break;
1729 return ret_val;
1732 /* Delete current content of register LOC in dataflow set SET and set
1733 the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
1734 MODIFY is true, any other live copies of the same variable part are
1735 also deleted from the dataflow set, otherwise the variable part is
1736 assumed to be copied from another location holding the same
1737 part. */
1739 static void
1740 var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1741 enum var_init_status initialized, rtx set_src)
1743 tree decl = REG_EXPR (loc);
1744 HOST_WIDE_INT offset = REG_OFFSET (loc);
1745 attrs node, next;
1746 attrs *nextp;
1748 decl = var_debug_decl (decl);
1750 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1751 initialized = get_init_value (set, loc, dv_from_decl (decl));
1753 nextp = &set->regs[REGNO (loc)];
1754 for (node = *nextp; node; node = next)
1756 next = node->next;
1757 if (dv_as_opaque (node->dv) != decl || node->offset != offset)
1759 delete_variable_part (set, node->loc, node->dv, node->offset);
1760 pool_free (attrs_pool, node);
1761 *nextp = next;
1763 else
1765 node->loc = loc;
1766 nextp = &node->next;
1769 if (modify)
1770 clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src);
1771 var_reg_set (set, loc, initialized, set_src);
1774 /* Delete the association of register LOC in dataflow set SET with any
1775 variables that aren't onepart. If CLOBBER is true, also delete any
1776 other live copies of the same variable part, and delete the
1777 association with onepart dvs too. */
1779 static void
1780 var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
1782 attrs *nextp = &set->regs[REGNO (loc)];
1783 attrs node, next;
1785 if (clobber)
1787 tree decl = REG_EXPR (loc);
1788 HOST_WIDE_INT offset = REG_OFFSET (loc);
1790 decl = var_debug_decl (decl);
1792 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1795 for (node = *nextp; node; node = next)
1797 next = node->next;
1798 if (clobber || !dv_onepart_p (node->dv))
1800 delete_variable_part (set, node->loc, node->dv, node->offset);
1801 pool_free (attrs_pool, node);
1802 *nextp = next;
1804 else
1805 nextp = &node->next;
1809 /* Delete content of register with number REGNO in dataflow set SET. */
1811 static void
1812 var_regno_delete (dataflow_set *set, int regno)
1814 attrs *reg = &set->regs[regno];
1815 attrs node, next;
1817 for (node = *reg; node; node = next)
1819 next = node->next;
1820 delete_variable_part (set, node->loc, node->dv, node->offset);
1821 pool_free (attrs_pool, node);
1823 *reg = NULL;
1826 /* Set the location of DV, OFFSET as the MEM LOC. */
1828 static void
1829 var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1830 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1831 enum insert_option iopt)
1833 if (dv_is_decl_p (dv))
1834 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1836 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1839 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
1840 SET to LOC.
1841 Adjust the address first if it is stack pointer based. */
1843 static void
1844 var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1845 rtx set_src)
1847 tree decl = MEM_EXPR (loc);
1848 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1850 var_mem_decl_set (set, loc, initialized,
1851 dv_from_decl (decl), offset, set_src, INSERT);
1854 /* Delete and set the location part of variable MEM_EXPR (LOC) in
1855 dataflow set SET to LOC. If MODIFY is true, any other live copies
1856 of the same variable part are also deleted from the dataflow set,
1857 otherwise the variable part is assumed to be copied from another
1858 location holding the same part.
1859 Adjust the address first if it is stack pointer based. */
1861 static void
1862 var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1863 enum var_init_status initialized, rtx set_src)
1865 tree decl = MEM_EXPR (loc);
1866 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1868 decl = var_debug_decl (decl);
1870 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1871 initialized = get_init_value (set, loc, dv_from_decl (decl));
1873 if (modify)
1874 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src);
1875 var_mem_set (set, loc, initialized, set_src);
1878 /* Delete the location part LOC from dataflow set SET. If CLOBBER is
1879 true, also delete any other live copies of the same variable part.
1880 Adjust the address first if it is stack pointer based. */
1882 static void
1883 var_mem_delete (dataflow_set *set, rtx loc, bool clobber)
1885 tree decl = MEM_EXPR (loc);
1886 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1888 decl = var_debug_decl (decl);
1889 if (clobber)
1890 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1891 delete_variable_part (set, loc, dv_from_decl (decl), offset);
1894 /* Bind a value to a location it was just stored in. If MODIFIED
1895 holds, assume the location was modified, detaching it from any
1896 values bound to it. */
1898 static void
1899 val_store (dataflow_set *set, rtx val, rtx loc, rtx insn, bool modified)
1901 cselib_val *v = CSELIB_VAL_PTR (val);
1903 gcc_assert (cselib_preserved_value_p (v));
1905 if (dump_file)
1907 fprintf (dump_file, "%i: ", INSN_UID (insn));
1908 print_inline_rtx (dump_file, val, 0);
1909 fprintf (dump_file, " stored in ");
1910 print_inline_rtx (dump_file, loc, 0);
1911 if (v->locs)
1913 struct elt_loc_list *l;
1914 for (l = v->locs; l; l = l->next)
1916 fprintf (dump_file, "\n%i: ", INSN_UID (l->setting_insn));
1917 print_inline_rtx (dump_file, l->loc, 0);
1920 fprintf (dump_file, "\n");
1923 if (REG_P (loc))
1925 if (modified)
1926 var_regno_delete (set, REGNO (loc));
1927 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1928 dv_from_value (val), 0, NULL_RTX, INSERT);
1930 else if (MEM_P (loc))
1931 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1932 dv_from_value (val), 0, NULL_RTX, INSERT);
1933 else
1934 set_variable_part (set, loc, dv_from_value (val), 0,
1935 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1938 /* Reset this node, detaching all its equivalences. Return the slot
1939 in the variable hash table that holds dv, if there is one. */
1941 static void
1942 val_reset (dataflow_set *set, decl_or_value dv)
1944 variable var = shared_hash_find (set->vars, dv) ;
1945 location_chain node;
1946 rtx cval;
1948 if (!var || !var->n_var_parts)
1949 return;
1951 gcc_assert (var->n_var_parts == 1);
1953 cval = NULL;
1954 for (node = var->var_part[0].loc_chain; node; node = node->next)
1955 if (GET_CODE (node->loc) == VALUE
1956 && canon_value_cmp (node->loc, cval))
1957 cval = node->loc;
1959 for (node = var->var_part[0].loc_chain; node; node = node->next)
1960 if (GET_CODE (node->loc) == VALUE && cval != node->loc)
1962 /* Redirect the equivalence link to the new canonical
1963 value, or simply remove it if it would point at
1964 itself. */
1965 if (cval)
1966 set_variable_part (set, cval, dv_from_value (node->loc),
1967 0, node->init, node->set_src, NO_INSERT);
1968 delete_variable_part (set, dv_as_value (dv),
1969 dv_from_value (node->loc), 0);
1972 if (cval)
1974 decl_or_value cdv = dv_from_value (cval);
1976 /* Keep the remaining values connected, accummulating links
1977 in the canonical value. */
1978 for (node = var->var_part[0].loc_chain; node; node = node->next)
1980 if (node->loc == cval)
1981 continue;
1982 else if (GET_CODE (node->loc) == REG)
1983 var_reg_decl_set (set, node->loc, node->init, cdv, 0,
1984 node->set_src, NO_INSERT);
1985 else if (GET_CODE (node->loc) == MEM)
1986 var_mem_decl_set (set, node->loc, node->init, cdv, 0,
1987 node->set_src, NO_INSERT);
1988 else
1989 set_variable_part (set, node->loc, cdv, 0,
1990 node->init, node->set_src, NO_INSERT);
1994 /* We remove this last, to make sure that the canonical value is not
1995 removed to the point of requiring reinsertion. */
1996 if (cval)
1997 delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0);
1999 clobber_variable_part (set, NULL, dv, 0, NULL);
2001 /* ??? Should we make sure there aren't other available values or
2002 variables whose values involve this one other than by
2003 equivalence? E.g., at the very least we should reset MEMs, those
2004 shouldn't be too hard to find cselib-looking up the value as an
2005 address, then locating the resulting value in our own hash
2006 table. */
2009 /* Find the values in a given location and map the val to another
2010 value, if it is unique, or add the location as one holding the
2011 value. */
2013 static void
2014 val_resolve (dataflow_set *set, rtx val, rtx loc, rtx insn)
2016 decl_or_value dv = dv_from_value (val);
2018 if (dump_file && (dump_flags & TDF_DETAILS))
2020 if (insn)
2021 fprintf (dump_file, "%i: ", INSN_UID (insn));
2022 else
2023 fprintf (dump_file, "head: ");
2024 print_inline_rtx (dump_file, val, 0);
2025 fputs (" is at ", dump_file);
2026 print_inline_rtx (dump_file, loc, 0);
2027 fputc ('\n', dump_file);
2030 val_reset (set, dv);
2032 if (REG_P (loc))
2034 attrs node, found = NULL;
2036 for (node = set->regs[REGNO (loc)]; node; node = node->next)
2037 if (dv_is_value_p (node->dv)
2038 && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc))
2040 found = node;
2042 /* Map incoming equivalences. ??? Wouldn't it be nice if
2043 we just started sharing the location lists? Maybe a
2044 circular list ending at the value itself or some
2045 such. */
2046 set_variable_part (set, dv_as_value (node->dv),
2047 dv_from_value (val), node->offset,
2048 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2049 set_variable_part (set, val, node->dv, node->offset,
2050 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2053 /* If we didn't find any equivalence, we need to remember that
2054 this value is held in the named register. */
2055 if (!found)
2056 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2057 dv_from_value (val), 0, NULL_RTX, INSERT);
2059 else if (MEM_P (loc))
2060 /* ??? Merge equivalent MEMs. */
2061 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2062 dv_from_value (val), 0, NULL_RTX, INSERT);
2063 else
2064 /* ??? Merge equivalent expressions. */
2065 set_variable_part (set, loc, dv_from_value (val), 0,
2066 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2069 /* Initialize dataflow set SET to be empty.
2070 VARS_SIZE is the initial size of hash table VARS. */
2072 static void
2073 dataflow_set_init (dataflow_set *set)
2075 init_attrs_list_set (set->regs);
2076 set->vars = shared_hash_copy (empty_shared_hash);
2077 set->stack_adjust = 0;
2078 set->traversed_vars = NULL;
2081 /* Delete the contents of dataflow set SET. */
2083 static void
2084 dataflow_set_clear (dataflow_set *set)
2086 int i;
2088 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2089 attrs_list_clear (&set->regs[i]);
2091 shared_hash_destroy (set->vars);
2092 set->vars = shared_hash_copy (empty_shared_hash);
2095 /* Copy the contents of dataflow set SRC to DST. */
2097 static void
2098 dataflow_set_copy (dataflow_set *dst, dataflow_set *src)
2100 int i;
2102 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2103 attrs_list_copy (&dst->regs[i], src->regs[i]);
2105 shared_hash_destroy (dst->vars);
2106 dst->vars = shared_hash_copy (src->vars);
2107 dst->stack_adjust = src->stack_adjust;
2110 /* Information for merging lists of locations for a given offset of variable.
2112 struct variable_union_info
2114 /* Node of the location chain. */
2115 location_chain lc;
2117 /* The sum of positions in the input chains. */
2118 int pos;
2120 /* The position in the chain of DST dataflow set. */
2121 int pos_dst;
2124 /* Buffer for location list sorting and its allocated size. */
2125 static struct variable_union_info *vui_vec;
2126 static int vui_allocated;
2128 /* Compare function for qsort, order the structures by POS element. */
2130 static int
2131 variable_union_info_cmp_pos (const void *n1, const void *n2)
2133 const struct variable_union_info *const i1 =
2134 (const struct variable_union_info *) n1;
2135 const struct variable_union_info *const i2 =
2136 ( const struct variable_union_info *) n2;
2138 if (i1->pos != i2->pos)
2139 return i1->pos - i2->pos;
2141 return (i1->pos_dst - i2->pos_dst);
2144 /* Compute union of location parts of variable *SLOT and the same variable
2145 from hash table DATA. Compute "sorted" union of the location chains
2146 for common offsets, i.e. the locations of a variable part are sorted by
2147 a priority where the priority is the sum of the positions in the 2 chains
2148 (if a location is only in one list the position in the second list is
2149 defined to be larger than the length of the chains).
2150 When we are updating the location parts the newest location is in the
2151 beginning of the chain, so when we do the described "sorted" union
2152 we keep the newest locations in the beginning. */
2154 static int
2155 variable_union (variable src, dataflow_set *set)
2157 variable dst;
2158 void **dstp;
2159 int i, j, k;
2161 dstp = shared_hash_find_slot (set->vars, src->dv);
2162 if (!dstp || !*dstp)
2164 src->refcount++;
2166 dst_can_be_shared = false;
2167 if (!dstp)
2168 dstp = shared_hash_find_slot_unshare (&set->vars, src->dv, INSERT);
2170 *dstp = src;
2172 /* Continue traversing the hash table. */
2173 return 1;
2175 else
2176 dst = (variable) *dstp;
2178 gcc_assert (src->n_var_parts);
2180 /* We can combine one-part variables very efficiently, because their
2181 entries are in canonical order. */
2182 if (dv_onepart_p (src->dv))
2184 location_chain *nodep, dnode, snode;
2186 gcc_assert (src->n_var_parts == 1
2187 && dst->n_var_parts == 1);
2189 snode = src->var_part[0].loc_chain;
2190 gcc_assert (snode);
2192 restart_onepart_unshared:
2193 nodep = &dst->var_part[0].loc_chain;
2194 dnode = *nodep;
2195 gcc_assert (dnode);
2197 while (snode)
2199 int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1;
2201 if (r > 0)
2203 location_chain nnode;
2205 if (shared_var_p (dst, set->vars))
2207 dstp = unshare_variable (set, dstp, dst,
2208 VAR_INIT_STATUS_INITIALIZED);
2209 dst = (variable)*dstp;
2210 goto restart_onepart_unshared;
2213 *nodep = nnode = (location_chain) pool_alloc (loc_chain_pool);
2214 nnode->loc = snode->loc;
2215 nnode->init = snode->init;
2216 if (!snode->set_src || MEM_P (snode->set_src))
2217 nnode->set_src = NULL;
2218 else
2219 nnode->set_src = snode->set_src;
2220 nnode->next = dnode;
2221 dnode = nnode;
2223 else if (r == 0)
2224 gcc_checking_assert (rtx_equal_p (dnode->loc, snode->loc));
2226 if (r >= 0)
2227 snode = snode->next;
2229 nodep = &dnode->next;
2230 dnode = *nodep;
2233 return 1;
2236 /* Count the number of location parts, result is K. */
2237 for (i = 0, j = 0, k = 0;
2238 i < src->n_var_parts && j < dst->n_var_parts; k++)
2240 if (src->var_part[i].offset == dst->var_part[j].offset)
2242 i++;
2243 j++;
2245 else if (src->var_part[i].offset < dst->var_part[j].offset)
2246 i++;
2247 else
2248 j++;
2250 k += src->n_var_parts - i;
2251 k += dst->n_var_parts - j;
2253 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
2254 thus there are at most MAX_VAR_PARTS different offsets. */
2255 gcc_assert (dv_onepart_p (dst->dv) ? k == 1 : k <= MAX_VAR_PARTS);
2257 if (dst->n_var_parts != k && shared_var_p (dst, set->vars))
2259 dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN);
2260 dst = (variable)*dstp;
2263 i = src->n_var_parts - 1;
2264 j = dst->n_var_parts - 1;
2265 dst->n_var_parts = k;
2267 for (k--; k >= 0; k--)
2269 location_chain node, node2;
2271 if (i >= 0 && j >= 0
2272 && src->var_part[i].offset == dst->var_part[j].offset)
2274 /* Compute the "sorted" union of the chains, i.e. the locations which
2275 are in both chains go first, they are sorted by the sum of
2276 positions in the chains. */
2277 int dst_l, src_l;
2278 int ii, jj, n;
2279 struct variable_union_info *vui;
2281 /* If DST is shared compare the location chains.
2282 If they are different we will modify the chain in DST with
2283 high probability so make a copy of DST. */
2284 if (shared_var_p (dst, set->vars))
2286 for (node = src->var_part[i].loc_chain,
2287 node2 = dst->var_part[j].loc_chain; node && node2;
2288 node = node->next, node2 = node2->next)
2290 if (!((REG_P (node2->loc)
2291 && REG_P (node->loc)
2292 && REGNO (node2->loc) == REGNO (node->loc))
2293 || rtx_equal_p (node2->loc, node->loc)))
2295 if (node2->init < node->init)
2296 node2->init = node->init;
2297 break;
2300 if (node || node2)
2302 dstp = unshare_variable (set, dstp, dst,
2303 VAR_INIT_STATUS_UNKNOWN);
2304 dst = (variable)*dstp;
2308 src_l = 0;
2309 for (node = src->var_part[i].loc_chain; node; node = node->next)
2310 src_l++;
2311 dst_l = 0;
2312 for (node = dst->var_part[j].loc_chain; node; node = node->next)
2313 dst_l++;
2315 if (dst_l == 1)
2317 /* The most common case, much simpler, no qsort is needed. */
2318 location_chain dstnode = dst->var_part[j].loc_chain;
2319 dst->var_part[k].loc_chain = dstnode;
2320 dst->var_part[k].offset = dst->var_part[j].offset;
2321 node2 = dstnode;
2322 for (node = src->var_part[i].loc_chain; node; node = node->next)
2323 if (!((REG_P (dstnode->loc)
2324 && REG_P (node->loc)
2325 && REGNO (dstnode->loc) == REGNO (node->loc))
2326 || rtx_equal_p (dstnode->loc, node->loc)))
2328 location_chain new_node;
2330 /* Copy the location from SRC. */
2331 new_node = (location_chain) pool_alloc (loc_chain_pool);
2332 new_node->loc = node->loc;
2333 new_node->init = node->init;
2334 if (!node->set_src || MEM_P (node->set_src))
2335 new_node->set_src = NULL;
2336 else
2337 new_node->set_src = node->set_src;
2338 node2->next = new_node;
2339 node2 = new_node;
2341 node2->next = NULL;
2343 else
2345 if (src_l + dst_l > vui_allocated)
2347 vui_allocated = MAX (vui_allocated * 2, src_l + dst_l);
2348 vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec,
2349 vui_allocated);
2351 vui = vui_vec;
2353 /* Fill in the locations from DST. */
2354 for (node = dst->var_part[j].loc_chain, jj = 0; node;
2355 node = node->next, jj++)
2357 vui[jj].lc = node;
2358 vui[jj].pos_dst = jj;
2360 /* Pos plus value larger than a sum of 2 valid positions. */
2361 vui[jj].pos = jj + src_l + dst_l;
2364 /* Fill in the locations from SRC. */
2365 n = dst_l;
2366 for (node = src->var_part[i].loc_chain, ii = 0; node;
2367 node = node->next, ii++)
2369 /* Find location from NODE. */
2370 for (jj = 0; jj < dst_l; jj++)
2372 if ((REG_P (vui[jj].lc->loc)
2373 && REG_P (node->loc)
2374 && REGNO (vui[jj].lc->loc) == REGNO (node->loc))
2375 || rtx_equal_p (vui[jj].lc->loc, node->loc))
2377 vui[jj].pos = jj + ii;
2378 break;
2381 if (jj >= dst_l) /* The location has not been found. */
2383 location_chain new_node;
2385 /* Copy the location from SRC. */
2386 new_node = (location_chain) pool_alloc (loc_chain_pool);
2387 new_node->loc = node->loc;
2388 new_node->init = node->init;
2389 if (!node->set_src || MEM_P (node->set_src))
2390 new_node->set_src = NULL;
2391 else
2392 new_node->set_src = node->set_src;
2393 vui[n].lc = new_node;
2394 vui[n].pos_dst = src_l + dst_l;
2395 vui[n].pos = ii + src_l + dst_l;
2396 n++;
2400 if (dst_l == 2)
2402 /* Special case still very common case. For dst_l == 2
2403 all entries dst_l ... n-1 are sorted, with for i >= dst_l
2404 vui[i].pos == i + src_l + dst_l. */
2405 if (vui[0].pos > vui[1].pos)
2407 /* Order should be 1, 0, 2... */
2408 dst->var_part[k].loc_chain = vui[1].lc;
2409 vui[1].lc->next = vui[0].lc;
2410 if (n >= 3)
2412 vui[0].lc->next = vui[2].lc;
2413 vui[n - 1].lc->next = NULL;
2415 else
2416 vui[0].lc->next = NULL;
2417 ii = 3;
2419 else
2421 dst->var_part[k].loc_chain = vui[0].lc;
2422 if (n >= 3 && vui[2].pos < vui[1].pos)
2424 /* Order should be 0, 2, 1, 3... */
2425 vui[0].lc->next = vui[2].lc;
2426 vui[2].lc->next = vui[1].lc;
2427 if (n >= 4)
2429 vui[1].lc->next = vui[3].lc;
2430 vui[n - 1].lc->next = NULL;
2432 else
2433 vui[1].lc->next = NULL;
2434 ii = 4;
2436 else
2438 /* Order should be 0, 1, 2... */
2439 ii = 1;
2440 vui[n - 1].lc->next = NULL;
2443 for (; ii < n; ii++)
2444 vui[ii - 1].lc->next = vui[ii].lc;
2446 else
2448 qsort (vui, n, sizeof (struct variable_union_info),
2449 variable_union_info_cmp_pos);
2451 /* Reconnect the nodes in sorted order. */
2452 for (ii = 1; ii < n; ii++)
2453 vui[ii - 1].lc->next = vui[ii].lc;
2454 vui[n - 1].lc->next = NULL;
2455 dst->var_part[k].loc_chain = vui[0].lc;
2458 dst->var_part[k].offset = dst->var_part[j].offset;
2460 i--;
2461 j--;
2463 else if ((i >= 0 && j >= 0
2464 && src->var_part[i].offset < dst->var_part[j].offset)
2465 || i < 0)
2467 dst->var_part[k] = dst->var_part[j];
2468 j--;
2470 else if ((i >= 0 && j >= 0
2471 && src->var_part[i].offset > dst->var_part[j].offset)
2472 || j < 0)
2474 location_chain *nextp;
2476 /* Copy the chain from SRC. */
2477 nextp = &dst->var_part[k].loc_chain;
2478 for (node = src->var_part[i].loc_chain; node; node = node->next)
2480 location_chain new_lc;
2482 new_lc = (location_chain) pool_alloc (loc_chain_pool);
2483 new_lc->next = NULL;
2484 new_lc->init = node->init;
2485 if (!node->set_src || MEM_P (node->set_src))
2486 new_lc->set_src = NULL;
2487 else
2488 new_lc->set_src = node->set_src;
2489 new_lc->loc = node->loc;
2491 *nextp = new_lc;
2492 nextp = &new_lc->next;
2495 dst->var_part[k].offset = src->var_part[i].offset;
2496 i--;
2498 dst->var_part[k].cur_loc = NULL;
2501 if (flag_var_tracking_uninit)
2502 for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++)
2504 location_chain node, node2;
2505 for (node = src->var_part[i].loc_chain; node; node = node->next)
2506 for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next)
2507 if (rtx_equal_p (node->loc, node2->loc))
2509 if (node->init > node2->init)
2510 node2->init = node->init;
2514 /* Continue traversing the hash table. */
2515 return 1;
2518 /* Compute union of dataflow sets SRC and DST and store it to DST. */
2520 static void
2521 dataflow_set_union (dataflow_set *dst, dataflow_set *src)
2523 int i;
2525 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2526 attrs_list_union (&dst->regs[i], src->regs[i]);
2528 if (dst->vars == empty_shared_hash)
2530 shared_hash_destroy (dst->vars);
2531 dst->vars = shared_hash_copy (src->vars);
2533 else
2535 htab_iterator hi;
2536 variable var;
2538 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (src->vars), var, variable, hi)
2539 variable_union (var, dst);
2543 /* Whether the value is currently being expanded. */
2544 #define VALUE_RECURSED_INTO(x) \
2545 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
2546 /* Whether the value is in changed_variables hash table. */
2547 #define VALUE_CHANGED(x) \
2548 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
2549 /* Whether the decl is in changed_variables hash table. */
2550 #define DECL_CHANGED(x) TREE_VISITED (x)
2552 /* Record that DV has been added into resp. removed from changed_variables
2553 hashtable. */
2555 static inline void
2556 set_dv_changed (decl_or_value dv, bool newv)
2558 if (dv_is_value_p (dv))
2559 VALUE_CHANGED (dv_as_value (dv)) = newv;
2560 else
2561 DECL_CHANGED (dv_as_decl (dv)) = newv;
2564 /* Return true if DV is present in changed_variables hash table. */
2566 static inline bool
2567 dv_changed_p (decl_or_value dv)
2569 return (dv_is_value_p (dv)
2570 ? VALUE_CHANGED (dv_as_value (dv))
2571 : DECL_CHANGED (dv_as_decl (dv)));
2574 /* Return a location list node whose loc is rtx_equal to LOC, in the
2575 location list of a one-part variable or value VAR, or in that of
2576 any values recursively mentioned in the location lists. VARS must
2577 be in star-canonical form. */
2579 static location_chain
2580 find_loc_in_1pdv (rtx loc, variable var, htab_t vars)
2582 location_chain node;
2583 enum rtx_code loc_code;
2585 if (!var)
2586 return NULL;
2588 gcc_checking_assert (dv_onepart_p (var->dv));
2590 if (!var->n_var_parts)
2591 return NULL;
2593 gcc_checking_assert (var->var_part[0].offset == 0);
2594 gcc_checking_assert (loc != dv_as_opaque (var->dv));
2596 loc_code = GET_CODE (loc);
2597 for (node = var->var_part[0].loc_chain; node; node = node->next)
2599 decl_or_value dv;
2600 variable rvar;
2602 if (GET_CODE (node->loc) != loc_code)
2604 if (GET_CODE (node->loc) != VALUE)
2605 continue;
2607 else if (loc == node->loc)
2608 return node;
2609 else if (loc_code != VALUE)
2611 if (rtx_equal_p (loc, node->loc))
2612 return node;
2613 continue;
2616 /* Since we're in star-canonical form, we don't need to visit
2617 non-canonical nodes: one-part variables and non-canonical
2618 values would only point back to the canonical node. */
2619 if (dv_is_value_p (var->dv)
2620 && !canon_value_cmp (node->loc, dv_as_value (var->dv)))
2622 /* Skip all subsequent VALUEs. */
2623 while (node->next && GET_CODE (node->next->loc) == VALUE)
2625 node = node->next;
2626 gcc_checking_assert (!canon_value_cmp (node->loc,
2627 dv_as_value (var->dv)));
2628 if (loc == node->loc)
2629 return node;
2631 continue;
2634 gcc_checking_assert (node == var->var_part[0].loc_chain);
2635 gcc_checking_assert (!node->next);
2637 dv = dv_from_value (node->loc);
2638 rvar = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
2639 return find_loc_in_1pdv (loc, rvar, vars);
2642 return NULL;
2645 /* Hash table iteration argument passed to variable_merge. */
2646 struct dfset_merge
2648 /* The set in which the merge is to be inserted. */
2649 dataflow_set *dst;
2650 /* The set that we're iterating in. */
2651 dataflow_set *cur;
2652 /* The set that may contain the other dv we are to merge with. */
2653 dataflow_set *src;
2654 /* Number of onepart dvs in src. */
2655 int src_onepart_cnt;
2658 /* Insert LOC in *DNODE, if it's not there yet. The list must be in
2659 loc_cmp order, and it is maintained as such. */
2661 static void
2662 insert_into_intersection (location_chain *nodep, rtx loc,
2663 enum var_init_status status)
2665 location_chain node;
2666 int r;
2668 for (node = *nodep; node; nodep = &node->next, node = *nodep)
2669 if ((r = loc_cmp (node->loc, loc)) == 0)
2671 node->init = MIN (node->init, status);
2672 return;
2674 else if (r > 0)
2675 break;
2677 node = (location_chain) pool_alloc (loc_chain_pool);
2679 node->loc = loc;
2680 node->set_src = NULL;
2681 node->init = status;
2682 node->next = *nodep;
2683 *nodep = node;
2686 /* Insert in DEST the intersection the locations present in both
2687 S1NODE and S2VAR, directly or indirectly. S1NODE is from a
2688 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
2689 DSM->dst. */
2691 static void
2692 intersect_loc_chains (rtx val, location_chain *dest, struct dfset_merge *dsm,
2693 location_chain s1node, variable s2var)
2695 dataflow_set *s1set = dsm->cur;
2696 dataflow_set *s2set = dsm->src;
2697 location_chain found;
2699 if (s2var)
2701 location_chain s2node;
2703 gcc_checking_assert (dv_onepart_p (s2var->dv));
2705 if (s2var->n_var_parts)
2707 gcc_checking_assert (s2var->var_part[0].offset == 0);
2708 s2node = s2var->var_part[0].loc_chain;
2710 for (; s1node && s2node;
2711 s1node = s1node->next, s2node = s2node->next)
2712 if (s1node->loc != s2node->loc)
2713 break;
2714 else if (s1node->loc == val)
2715 continue;
2716 else
2717 insert_into_intersection (dest, s1node->loc,
2718 MIN (s1node->init, s2node->init));
2722 for (; s1node; s1node = s1node->next)
2724 if (s1node->loc == val)
2725 continue;
2727 if ((found = find_loc_in_1pdv (s1node->loc, s2var,
2728 shared_hash_htab (s2set->vars))))
2730 insert_into_intersection (dest, s1node->loc,
2731 MIN (s1node->init, found->init));
2732 continue;
2735 if (GET_CODE (s1node->loc) == VALUE
2736 && !VALUE_RECURSED_INTO (s1node->loc))
2738 decl_or_value dv = dv_from_value (s1node->loc);
2739 variable svar = shared_hash_find (s1set->vars, dv);
2740 if (svar)
2742 if (svar->n_var_parts == 1)
2744 VALUE_RECURSED_INTO (s1node->loc) = true;
2745 intersect_loc_chains (val, dest, dsm,
2746 svar->var_part[0].loc_chain,
2747 s2var);
2748 VALUE_RECURSED_INTO (s1node->loc) = false;
2753 /* ??? if the location is equivalent to any location in src,
2754 searched recursively
2756 add to dst the values needed to represent the equivalence
2758 telling whether locations S is equivalent to another dv's
2759 location list:
2761 for each location D in the list
2763 if S and D satisfy rtx_equal_p, then it is present
2765 else if D is a value, recurse without cycles
2767 else if S and D have the same CODE and MODE
2769 for each operand oS and the corresponding oD
2771 if oS and oD are not equivalent, then S an D are not equivalent
2773 else if they are RTX vectors
2775 if any vector oS element is not equivalent to its respective oD,
2776 then S and D are not equivalent
2784 /* Return -1 if X should be before Y in a location list for a 1-part
2785 variable, 1 if Y should be before X, and 0 if they're equivalent
2786 and should not appear in the list. */
2788 static int
2789 loc_cmp (rtx x, rtx y)
2791 int i, j, r;
2792 RTX_CODE code = GET_CODE (x);
2793 const char *fmt;
2795 if (x == y)
2796 return 0;
2798 if (REG_P (x))
2800 if (!REG_P (y))
2801 return -1;
2802 gcc_assert (GET_MODE (x) == GET_MODE (y));
2803 if (REGNO (x) == REGNO (y))
2804 return 0;
2805 else if (REGNO (x) < REGNO (y))
2806 return -1;
2807 else
2808 return 1;
2811 if (REG_P (y))
2812 return 1;
2814 if (MEM_P (x))
2816 if (!MEM_P (y))
2817 return -1;
2818 gcc_assert (GET_MODE (x) == GET_MODE (y));
2819 return loc_cmp (XEXP (x, 0), XEXP (y, 0));
2822 if (MEM_P (y))
2823 return 1;
2825 if (GET_CODE (x) == VALUE)
2827 if (GET_CODE (y) != VALUE)
2828 return -1;
2829 /* Don't assert the modes are the same, that is true only
2830 when not recursing. (subreg:QI (value:SI 1:1) 0)
2831 and (subreg:QI (value:DI 2:2) 0) can be compared,
2832 even when the modes are different. */
2833 if (canon_value_cmp (x, y))
2834 return -1;
2835 else
2836 return 1;
2839 if (GET_CODE (y) == VALUE)
2840 return 1;
2842 if (GET_CODE (x) == GET_CODE (y))
2843 /* Compare operands below. */;
2844 else if (GET_CODE (x) < GET_CODE (y))
2845 return -1;
2846 else
2847 return 1;
2849 gcc_assert (GET_MODE (x) == GET_MODE (y));
2851 if (GET_CODE (x) == DEBUG_EXPR)
2853 if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
2854 < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)))
2855 return -1;
2856 gcc_checking_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
2857 > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)));
2858 return 1;
2861 fmt = GET_RTX_FORMAT (code);
2862 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2863 switch (fmt[i])
2865 case 'w':
2866 if (XWINT (x, i) == XWINT (y, i))
2867 break;
2868 else if (XWINT (x, i) < XWINT (y, i))
2869 return -1;
2870 else
2871 return 1;
2873 case 'n':
2874 case 'i':
2875 if (XINT (x, i) == XINT (y, i))
2876 break;
2877 else if (XINT (x, i) < XINT (y, i))
2878 return -1;
2879 else
2880 return 1;
2882 case 'V':
2883 case 'E':
2884 /* Compare the vector length first. */
2885 if (XVECLEN (x, i) == XVECLEN (y, i))
2886 /* Compare the vectors elements. */;
2887 else if (XVECLEN (x, i) < XVECLEN (y, i))
2888 return -1;
2889 else
2890 return 1;
2892 for (j = 0; j < XVECLEN (x, i); j++)
2893 if ((r = loc_cmp (XVECEXP (x, i, j),
2894 XVECEXP (y, i, j))))
2895 return r;
2896 break;
2898 case 'e':
2899 if ((r = loc_cmp (XEXP (x, i), XEXP (y, i))))
2900 return r;
2901 break;
2903 case 'S':
2904 case 's':
2905 if (XSTR (x, i) == XSTR (y, i))
2906 break;
2907 if (!XSTR (x, i))
2908 return -1;
2909 if (!XSTR (y, i))
2910 return 1;
2911 if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0)
2912 break;
2913 else if (r < 0)
2914 return -1;
2915 else
2916 return 1;
2918 case 'u':
2919 /* These are just backpointers, so they don't matter. */
2920 break;
2922 case '0':
2923 case 't':
2924 break;
2926 /* It is believed that rtx's at this level will never
2927 contain anything but integers and other rtx's,
2928 except for within LABEL_REFs and SYMBOL_REFs. */
2929 default:
2930 gcc_unreachable ();
2933 return 0;
2936 /* If decl or value DVP refers to VALUE from *LOC, add backlinks
2937 from VALUE to DVP. */
2939 static int
2940 add_value_chain (rtx *loc, void *dvp)
2942 decl_or_value dv, ldv;
2943 value_chain vc, nvc;
2944 void **slot;
2946 if (GET_CODE (*loc) == VALUE)
2947 ldv = dv_from_value (*loc);
2948 else if (GET_CODE (*loc) == DEBUG_EXPR)
2949 ldv = dv_from_decl (DEBUG_EXPR_TREE_DECL (*loc));
2950 else
2951 return 0;
2953 if (dv_as_opaque (ldv) == dvp)
2954 return 0;
2956 dv = (decl_or_value) dvp;
2957 slot = htab_find_slot_with_hash (value_chains, ldv, dv_htab_hash (ldv),
2958 INSERT);
2959 if (!*slot)
2961 vc = (value_chain) pool_alloc (value_chain_pool);
2962 vc->dv = ldv;
2963 vc->next = NULL;
2964 vc->refcount = 0;
2965 *slot = (void *) vc;
2967 else
2969 for (vc = ((value_chain) *slot)->next; vc; vc = vc->next)
2970 if (dv_as_opaque (vc->dv) == dv_as_opaque (dv))
2971 break;
2972 if (vc)
2974 vc->refcount++;
2975 return 0;
2978 vc = (value_chain) *slot;
2979 nvc = (value_chain) pool_alloc (value_chain_pool);
2980 nvc->dv = dv;
2981 nvc->next = vc->next;
2982 nvc->refcount = 1;
2983 vc->next = nvc;
2984 return 0;
2987 /* If decl or value DVP refers to VALUEs from within LOC, add backlinks
2988 from those VALUEs to DVP. */
2990 static void
2991 add_value_chains (decl_or_value dv, rtx loc)
2993 if (GET_CODE (loc) == VALUE || GET_CODE (loc) == DEBUG_EXPR)
2995 add_value_chain (&loc, dv_as_opaque (dv));
2996 return;
2998 if (REG_P (loc))
2999 return;
3000 if (MEM_P (loc))
3001 loc = XEXP (loc, 0);
3002 for_each_rtx (&loc, add_value_chain, dv_as_opaque (dv));
3005 /* If CSELIB_VAL_PTR of value DV refer to VALUEs, add backlinks from those
3006 VALUEs to DV. Add the same time get rid of ASM_OPERANDS from locs list,
3007 that is something we never can express in .debug_info and can prevent
3008 reverse ops from being used. */
3010 static void
3011 add_cselib_value_chains (decl_or_value dv)
3013 struct elt_loc_list **l;
3015 for (l = &CSELIB_VAL_PTR (dv_as_value (dv))->locs; *l;)
3016 if (GET_CODE ((*l)->loc) == ASM_OPERANDS)
3017 *l = (*l)->next;
3018 else
3020 for_each_rtx (&(*l)->loc, add_value_chain, dv_as_opaque (dv));
3021 l = &(*l)->next;
3025 /* If decl or value DVP refers to VALUE from *LOC, remove backlinks
3026 from VALUE to DVP. */
3028 static int
3029 remove_value_chain (rtx *loc, void *dvp)
3031 decl_or_value dv, ldv;
3032 value_chain vc;
3033 void **slot;
3035 if (GET_CODE (*loc) == VALUE)
3036 ldv = dv_from_value (*loc);
3037 else if (GET_CODE (*loc) == DEBUG_EXPR)
3038 ldv = dv_from_decl (DEBUG_EXPR_TREE_DECL (*loc));
3039 else
3040 return 0;
3042 if (dv_as_opaque (ldv) == dvp)
3043 return 0;
3045 dv = (decl_or_value) dvp;
3046 slot = htab_find_slot_with_hash (value_chains, ldv, dv_htab_hash (ldv),
3047 NO_INSERT);
3048 for (vc = (value_chain) *slot; vc->next; vc = vc->next)
3049 if (dv_as_opaque (vc->next->dv) == dv_as_opaque (dv))
3051 value_chain dvc = vc->next;
3052 gcc_assert (dvc->refcount > 0);
3053 if (--dvc->refcount == 0)
3055 vc->next = dvc->next;
3056 pool_free (value_chain_pool, dvc);
3057 if (vc->next == NULL && vc == (value_chain) *slot)
3059 pool_free (value_chain_pool, vc);
3060 htab_clear_slot (value_chains, slot);
3063 return 0;
3065 gcc_unreachable ();
3068 /* If decl or value DVP refers to VALUEs from within LOC, remove backlinks
3069 from those VALUEs to DVP. */
3071 static void
3072 remove_value_chains (decl_or_value dv, rtx loc)
3074 if (GET_CODE (loc) == VALUE || GET_CODE (loc) == DEBUG_EXPR)
3076 remove_value_chain (&loc, dv_as_opaque (dv));
3077 return;
3079 if (REG_P (loc))
3080 return;
3081 if (MEM_P (loc))
3082 loc = XEXP (loc, 0);
3083 for_each_rtx (&loc, remove_value_chain, dv_as_opaque (dv));
3086 #if ENABLE_CHECKING
3087 /* If CSELIB_VAL_PTR of value DV refer to VALUEs, remove backlinks from those
3088 VALUEs to DV. */
3090 static void
3091 remove_cselib_value_chains (decl_or_value dv)
3093 struct elt_loc_list *l;
3095 for (l = CSELIB_VAL_PTR (dv_as_value (dv))->locs; l; l = l->next)
3096 for_each_rtx (&l->loc, remove_value_chain, dv_as_opaque (dv));
3099 /* Check the order of entries in one-part variables. */
3101 static int
3102 canonicalize_loc_order_check (void **slot, void *data ATTRIBUTE_UNUSED)
3104 variable var = (variable) *slot;
3105 decl_or_value dv = var->dv;
3106 location_chain node, next;
3108 #ifdef ENABLE_RTL_CHECKING
3109 int i;
3110 for (i = 0; i < var->n_var_parts; i++)
3111 gcc_assert (var->var_part[0].cur_loc == NULL);
3112 gcc_assert (!var->cur_loc_changed && !var->in_changed_variables);
3113 #endif
3115 if (!dv_onepart_p (dv))
3116 return 1;
3118 gcc_assert (var->n_var_parts == 1);
3119 node = var->var_part[0].loc_chain;
3120 gcc_assert (node);
3122 while ((next = node->next))
3124 gcc_assert (loc_cmp (node->loc, next->loc) < 0);
3125 node = next;
3128 return 1;
3130 #endif
3132 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
3133 more likely to be chosen as canonical for an equivalence set.
3134 Ensure less likely values can reach more likely neighbors, making
3135 the connections bidirectional. */
3137 static int
3138 canonicalize_values_mark (void **slot, void *data)
3140 dataflow_set *set = (dataflow_set *)data;
3141 variable var = (variable) *slot;
3142 decl_or_value dv = var->dv;
3143 rtx val;
3144 location_chain node;
3146 if (!dv_is_value_p (dv))
3147 return 1;
3149 gcc_checking_assert (var->n_var_parts == 1);
3151 val = dv_as_value (dv);
3153 for (node = var->var_part[0].loc_chain; node; node = node->next)
3154 if (GET_CODE (node->loc) == VALUE)
3156 if (canon_value_cmp (node->loc, val))
3157 VALUE_RECURSED_INTO (val) = true;
3158 else
3160 decl_or_value odv = dv_from_value (node->loc);
3161 void **oslot = shared_hash_find_slot_noinsert (set->vars, odv);
3163 set_slot_part (set, val, oslot, odv, 0,
3164 node->init, NULL_RTX);
3166 VALUE_RECURSED_INTO (node->loc) = true;
3170 return 1;
3173 /* Remove redundant entries from equivalence lists in onepart
3174 variables, canonicalizing equivalence sets into star shapes. */
3176 static int
3177 canonicalize_values_star (void **slot, void *data)
3179 dataflow_set *set = (dataflow_set *)data;
3180 variable var = (variable) *slot;
3181 decl_or_value dv = var->dv;
3182 location_chain node;
3183 decl_or_value cdv;
3184 rtx val, cval;
3185 void **cslot;
3186 bool has_value;
3187 bool has_marks;
3189 if (!dv_onepart_p (dv))
3190 return 1;
3192 gcc_checking_assert (var->n_var_parts == 1);
3194 if (dv_is_value_p (dv))
3196 cval = dv_as_value (dv);
3197 if (!VALUE_RECURSED_INTO (cval))
3198 return 1;
3199 VALUE_RECURSED_INTO (cval) = false;
3201 else
3202 cval = NULL_RTX;
3204 restart:
3205 val = cval;
3206 has_value = false;
3207 has_marks = false;
3209 gcc_assert (var->n_var_parts == 1);
3211 for (node = var->var_part[0].loc_chain; node; node = node->next)
3212 if (GET_CODE (node->loc) == VALUE)
3214 has_value = true;
3215 if (VALUE_RECURSED_INTO (node->loc))
3216 has_marks = true;
3217 if (canon_value_cmp (node->loc, cval))
3218 cval = node->loc;
3221 if (!has_value)
3222 return 1;
3224 if (cval == val)
3226 if (!has_marks || dv_is_decl_p (dv))
3227 return 1;
3229 /* Keep it marked so that we revisit it, either after visiting a
3230 child node, or after visiting a new parent that might be
3231 found out. */
3232 VALUE_RECURSED_INTO (val) = true;
3234 for (node = var->var_part[0].loc_chain; node; node = node->next)
3235 if (GET_CODE (node->loc) == VALUE
3236 && VALUE_RECURSED_INTO (node->loc))
3238 cval = node->loc;
3239 restart_with_cval:
3240 VALUE_RECURSED_INTO (cval) = false;
3241 dv = dv_from_value (cval);
3242 slot = shared_hash_find_slot_noinsert (set->vars, dv);
3243 if (!slot)
3245 gcc_assert (dv_is_decl_p (var->dv));
3246 /* The canonical value was reset and dropped.
3247 Remove it. */
3248 clobber_variable_part (set, NULL, var->dv, 0, NULL);
3249 return 1;
3251 var = (variable)*slot;
3252 gcc_assert (dv_is_value_p (var->dv));
3253 if (var->n_var_parts == 0)
3254 return 1;
3255 gcc_assert (var->n_var_parts == 1);
3256 goto restart;
3259 VALUE_RECURSED_INTO (val) = false;
3261 return 1;
3264 /* Push values to the canonical one. */
3265 cdv = dv_from_value (cval);
3266 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3268 for (node = var->var_part[0].loc_chain; node; node = node->next)
3269 if (node->loc != cval)
3271 cslot = set_slot_part (set, node->loc, cslot, cdv, 0,
3272 node->init, NULL_RTX);
3273 if (GET_CODE (node->loc) == VALUE)
3275 decl_or_value ndv = dv_from_value (node->loc);
3277 set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX,
3278 NO_INSERT);
3280 if (canon_value_cmp (node->loc, val))
3282 /* If it could have been a local minimum, it's not any more,
3283 since it's now neighbor to cval, so it may have to push
3284 to it. Conversely, if it wouldn't have prevailed over
3285 val, then whatever mark it has is fine: if it was to
3286 push, it will now push to a more canonical node, but if
3287 it wasn't, then it has already pushed any values it might
3288 have to. */
3289 VALUE_RECURSED_INTO (node->loc) = true;
3290 /* Make sure we visit node->loc by ensuring we cval is
3291 visited too. */
3292 VALUE_RECURSED_INTO (cval) = true;
3294 else if (!VALUE_RECURSED_INTO (node->loc))
3295 /* If we have no need to "recurse" into this node, it's
3296 already "canonicalized", so drop the link to the old
3297 parent. */
3298 clobber_variable_part (set, cval, ndv, 0, NULL);
3300 else if (GET_CODE (node->loc) == REG)
3302 attrs list = set->regs[REGNO (node->loc)], *listp;
3304 /* Change an existing attribute referring to dv so that it
3305 refers to cdv, removing any duplicate this might
3306 introduce, and checking that no previous duplicates
3307 existed, all in a single pass. */
3309 while (list)
3311 if (list->offset == 0
3312 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3313 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3314 break;
3316 list = list->next;
3319 gcc_assert (list);
3320 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3322 list->dv = cdv;
3323 for (listp = &list->next; (list = *listp); listp = &list->next)
3325 if (list->offset)
3326 continue;
3328 if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3330 *listp = list->next;
3331 pool_free (attrs_pool, list);
3332 list = *listp;
3333 break;
3336 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv));
3339 else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3341 for (listp = &list->next; (list = *listp); listp = &list->next)
3343 if (list->offset)
3344 continue;
3346 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3348 *listp = list->next;
3349 pool_free (attrs_pool, list);
3350 list = *listp;
3351 break;
3354 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv));
3357 else
3358 gcc_unreachable ();
3360 #if ENABLE_CHECKING
3361 while (list)
3363 if (list->offset == 0
3364 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3365 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3366 gcc_unreachable ();
3368 list = list->next;
3370 #endif
3374 if (val)
3375 set_slot_part (set, val, cslot, cdv, 0,
3376 VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
3378 slot = clobber_slot_part (set, cval, slot, 0, NULL);
3380 /* Variable may have been unshared. */
3381 var = (variable)*slot;
3382 gcc_checking_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval
3383 && var->var_part[0].loc_chain->next == NULL);
3385 if (VALUE_RECURSED_INTO (cval))
3386 goto restart_with_cval;
3388 return 1;
3391 /* Bind one-part variables to the canonical value in an equivalence
3392 set. Not doing this causes dataflow convergence failure in rare
3393 circumstances, see PR42873. Unfortunately we can't do this
3394 efficiently as part of canonicalize_values_star, since we may not
3395 have determined or even seen the canonical value of a set when we
3396 get to a variable that references another member of the set. */
3398 static int
3399 canonicalize_vars_star (void **slot, void *data)
3401 dataflow_set *set = (dataflow_set *)data;
3402 variable var = (variable) *slot;
3403 decl_or_value dv = var->dv;
3404 location_chain node;
3405 rtx cval;
3406 decl_or_value cdv;
3407 void **cslot;
3408 variable cvar;
3409 location_chain cnode;
3411 if (!dv_onepart_p (dv) || dv_is_value_p (dv))
3412 return 1;
3414 gcc_assert (var->n_var_parts == 1);
3416 node = var->var_part[0].loc_chain;
3418 if (GET_CODE (node->loc) != VALUE)
3419 return 1;
3421 gcc_assert (!node->next);
3422 cval = node->loc;
3424 /* Push values to the canonical one. */
3425 cdv = dv_from_value (cval);
3426 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3427 if (!cslot)
3428 return 1;
3429 cvar = (variable)*cslot;
3430 gcc_assert (cvar->n_var_parts == 1);
3432 cnode = cvar->var_part[0].loc_chain;
3434 /* CVAL is canonical if its value list contains non-VALUEs or VALUEs
3435 that are not “more canonical” than it. */
3436 if (GET_CODE (cnode->loc) != VALUE
3437 || !canon_value_cmp (cnode->loc, cval))
3438 return 1;
3440 /* CVAL was found to be non-canonical. Change the variable to point
3441 to the canonical VALUE. */
3442 gcc_assert (!cnode->next);
3443 cval = cnode->loc;
3445 slot = set_slot_part (set, cval, slot, dv, 0,
3446 node->init, node->set_src);
3447 clobber_slot_part (set, cval, slot, 0, node->set_src);
3449 return 1;
3452 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
3453 corresponding entry in DSM->src. Multi-part variables are combined
3454 with variable_union, whereas onepart dvs are combined with
3455 intersection. */
3457 static int
3458 variable_merge_over_cur (variable s1var, struct dfset_merge *dsm)
3460 dataflow_set *dst = dsm->dst;
3461 void **dstslot;
3462 variable s2var, dvar = NULL;
3463 decl_or_value dv = s1var->dv;
3464 bool onepart = dv_onepart_p (dv);
3465 rtx val;
3466 hashval_t dvhash;
3467 location_chain node, *nodep;
3469 /* If the incoming onepart variable has an empty location list, then
3470 the intersection will be just as empty. For other variables,
3471 it's always union. */
3472 gcc_checking_assert (s1var->n_var_parts
3473 && s1var->var_part[0].loc_chain);
3475 if (!onepart)
3476 return variable_union (s1var, dst);
3478 gcc_checking_assert (s1var->n_var_parts == 1
3479 && s1var->var_part[0].offset == 0);
3481 dvhash = dv_htab_hash (dv);
3482 if (dv_is_value_p (dv))
3483 val = dv_as_value (dv);
3484 else
3485 val = NULL;
3487 s2var = shared_hash_find_1 (dsm->src->vars, dv, dvhash);
3488 if (!s2var)
3490 dst_can_be_shared = false;
3491 return 1;
3494 dsm->src_onepart_cnt--;
3495 gcc_assert (s2var->var_part[0].loc_chain
3496 && s2var->n_var_parts == 1
3497 && s2var->var_part[0].offset == 0);
3499 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3500 if (dstslot)
3502 dvar = (variable)*dstslot;
3503 gcc_assert (dvar->refcount == 1
3504 && dvar->n_var_parts == 1
3505 && dvar->var_part[0].offset == 0);
3506 nodep = &dvar->var_part[0].loc_chain;
3508 else
3510 nodep = &node;
3511 node = NULL;
3514 if (!dstslot && !onepart_variable_different_p (s1var, s2var))
3516 dstslot = shared_hash_find_slot_unshare_1 (&dst->vars, dv,
3517 dvhash, INSERT);
3518 *dstslot = dvar = s2var;
3519 dvar->refcount++;
3521 else
3523 dst_can_be_shared = false;
3525 intersect_loc_chains (val, nodep, dsm,
3526 s1var->var_part[0].loc_chain, s2var);
3528 if (!dstslot)
3530 if (node)
3532 dvar = (variable) pool_alloc (dv_pool (dv));
3533 dvar->dv = dv;
3534 dvar->refcount = 1;
3535 dvar->n_var_parts = 1;
3536 dvar->cur_loc_changed = false;
3537 dvar->in_changed_variables = false;
3538 dvar->var_part[0].offset = 0;
3539 dvar->var_part[0].loc_chain = node;
3540 dvar->var_part[0].cur_loc = NULL;
3542 dstslot
3543 = shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash,
3544 INSERT);
3545 gcc_assert (!*dstslot);
3546 *dstslot = dvar;
3548 else
3549 return 1;
3553 nodep = &dvar->var_part[0].loc_chain;
3554 while ((node = *nodep))
3556 location_chain *nextp = &node->next;
3558 if (GET_CODE (node->loc) == REG)
3560 attrs list;
3562 for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
3563 if (GET_MODE (node->loc) == GET_MODE (list->loc)
3564 && dv_is_value_p (list->dv))
3565 break;
3567 if (!list)
3568 attrs_list_insert (&dst->regs[REGNO (node->loc)],
3569 dv, 0, node->loc);
3570 /* If this value became canonical for another value that had
3571 this register, we want to leave it alone. */
3572 else if (dv_as_value (list->dv) != val)
3574 dstslot = set_slot_part (dst, dv_as_value (list->dv),
3575 dstslot, dv, 0,
3576 node->init, NULL_RTX);
3577 dstslot = delete_slot_part (dst, node->loc, dstslot, 0);
3579 /* Since nextp points into the removed node, we can't
3580 use it. The pointer to the next node moved to nodep.
3581 However, if the variable we're walking is unshared
3582 during our walk, we'll keep walking the location list
3583 of the previously-shared variable, in which case the
3584 node won't have been removed, and we'll want to skip
3585 it. That's why we test *nodep here. */
3586 if (*nodep != node)
3587 nextp = nodep;
3590 else
3591 /* Canonicalization puts registers first, so we don't have to
3592 walk it all. */
3593 break;
3594 nodep = nextp;
3597 if (dvar != (variable)*dstslot)
3598 dvar = (variable)*dstslot;
3599 nodep = &dvar->var_part[0].loc_chain;
3601 if (val)
3603 /* Mark all referenced nodes for canonicalization, and make sure
3604 we have mutual equivalence links. */
3605 VALUE_RECURSED_INTO (val) = true;
3606 for (node = *nodep; node; node = node->next)
3607 if (GET_CODE (node->loc) == VALUE)
3609 VALUE_RECURSED_INTO (node->loc) = true;
3610 set_variable_part (dst, val, dv_from_value (node->loc), 0,
3611 node->init, NULL, INSERT);
3614 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3615 gcc_assert (*dstslot == dvar);
3616 canonicalize_values_star (dstslot, dst);
3617 gcc_checking_assert (dstslot
3618 == shared_hash_find_slot_noinsert_1 (dst->vars,
3619 dv, dvhash));
3620 dvar = (variable)*dstslot;
3622 else
3624 bool has_value = false, has_other = false;
3626 /* If we have one value and anything else, we're going to
3627 canonicalize this, so make sure all values have an entry in
3628 the table and are marked for canonicalization. */
3629 for (node = *nodep; node; node = node->next)
3631 if (GET_CODE (node->loc) == VALUE)
3633 /* If this was marked during register canonicalization,
3634 we know we have to canonicalize values. */
3635 if (has_value)
3636 has_other = true;
3637 has_value = true;
3638 if (has_other)
3639 break;
3641 else
3643 has_other = true;
3644 if (has_value)
3645 break;
3649 if (has_value && has_other)
3651 for (node = *nodep; node; node = node->next)
3653 if (GET_CODE (node->loc) == VALUE)
3655 decl_or_value dv = dv_from_value (node->loc);
3656 void **slot = NULL;
3658 if (shared_hash_shared (dst->vars))
3659 slot = shared_hash_find_slot_noinsert (dst->vars, dv);
3660 if (!slot)
3661 slot = shared_hash_find_slot_unshare (&dst->vars, dv,
3662 INSERT);
3663 if (!*slot)
3665 variable var = (variable) pool_alloc (dv_pool (dv));
3666 var->dv = dv;
3667 var->refcount = 1;
3668 var->n_var_parts = 1;
3669 var->cur_loc_changed = false;
3670 var->in_changed_variables = false;
3671 var->var_part[0].offset = 0;
3672 var->var_part[0].loc_chain = NULL;
3673 var->var_part[0].cur_loc = NULL;
3674 *slot = var;
3677 VALUE_RECURSED_INTO (node->loc) = true;
3681 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3682 gcc_assert (*dstslot == dvar);
3683 canonicalize_values_star (dstslot, dst);
3684 gcc_checking_assert (dstslot
3685 == shared_hash_find_slot_noinsert_1 (dst->vars,
3686 dv, dvhash));
3687 dvar = (variable)*dstslot;
3691 if (!onepart_variable_different_p (dvar, s2var))
3693 variable_htab_free (dvar);
3694 *dstslot = dvar = s2var;
3695 dvar->refcount++;
3697 else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var))
3699 variable_htab_free (dvar);
3700 *dstslot = dvar = s1var;
3701 dvar->refcount++;
3702 dst_can_be_shared = false;
3704 else
3705 dst_can_be_shared = false;
3707 return 1;
3710 /* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
3711 multi-part variable. Unions of multi-part variables and
3712 intersections of one-part ones will be handled in
3713 variable_merge_over_cur(). */
3715 static int
3716 variable_merge_over_src (variable s2var, struct dfset_merge *dsm)
3718 dataflow_set *dst = dsm->dst;
3719 decl_or_value dv = s2var->dv;
3720 bool onepart = dv_onepart_p (dv);
3722 if (!onepart)
3724 void **dstp = shared_hash_find_slot (dst->vars, dv);
3725 *dstp = s2var;
3726 s2var->refcount++;
3727 return 1;
3730 dsm->src_onepart_cnt++;
3731 return 1;
3734 /* Combine dataflow set information from SRC2 into DST, using PDST
3735 to carry over information across passes. */
3737 static void
3738 dataflow_set_merge (dataflow_set *dst, dataflow_set *src2)
3740 dataflow_set cur = *dst;
3741 dataflow_set *src1 = &cur;
3742 struct dfset_merge dsm;
3743 int i;
3744 size_t src1_elems, src2_elems;
3745 htab_iterator hi;
3746 variable var;
3748 src1_elems = htab_elements (shared_hash_htab (src1->vars));
3749 src2_elems = htab_elements (shared_hash_htab (src2->vars));
3750 dataflow_set_init (dst);
3751 dst->stack_adjust = cur.stack_adjust;
3752 shared_hash_destroy (dst->vars);
3753 dst->vars = (shared_hash) pool_alloc (shared_hash_pool);
3754 dst->vars->refcount = 1;
3755 dst->vars->htab
3756 = htab_create (MAX (src1_elems, src2_elems), variable_htab_hash,
3757 variable_htab_eq, variable_htab_free);
3759 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3760 attrs_list_mpdv_union (&dst->regs[i], src1->regs[i], src2->regs[i]);
3762 dsm.dst = dst;
3763 dsm.src = src2;
3764 dsm.cur = src1;
3765 dsm.src_onepart_cnt = 0;
3767 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (dsm.src->vars), var, variable, hi)
3768 variable_merge_over_src (var, &dsm);
3769 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (dsm.cur->vars), var, variable, hi)
3770 variable_merge_over_cur (var, &dsm);
3772 if (dsm.src_onepart_cnt)
3773 dst_can_be_shared = false;
3775 dataflow_set_destroy (src1);
3778 /* Mark register equivalences. */
3780 static void
3781 dataflow_set_equiv_regs (dataflow_set *set)
3783 int i;
3784 attrs list, *listp;
3786 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3788 rtx canon[NUM_MACHINE_MODES];
3790 /* If the list is empty or one entry, no need to canonicalize
3791 anything. */
3792 if (set->regs[i] == NULL || set->regs[i]->next == NULL)
3793 continue;
3795 memset (canon, 0, sizeof (canon));
3797 for (list = set->regs[i]; list; list = list->next)
3798 if (list->offset == 0 && dv_is_value_p (list->dv))
3800 rtx val = dv_as_value (list->dv);
3801 rtx *cvalp = &canon[(int)GET_MODE (val)];
3802 rtx cval = *cvalp;
3804 if (canon_value_cmp (val, cval))
3805 *cvalp = val;
3808 for (list = set->regs[i]; list; list = list->next)
3809 if (list->offset == 0 && dv_onepart_p (list->dv))
3811 rtx cval = canon[(int)GET_MODE (list->loc)];
3813 if (!cval)
3814 continue;
3816 if (dv_is_value_p (list->dv))
3818 rtx val = dv_as_value (list->dv);
3820 if (val == cval)
3821 continue;
3823 VALUE_RECURSED_INTO (val) = true;
3824 set_variable_part (set, val, dv_from_value (cval), 0,
3825 VAR_INIT_STATUS_INITIALIZED,
3826 NULL, NO_INSERT);
3829 VALUE_RECURSED_INTO (cval) = true;
3830 set_variable_part (set, cval, list->dv, 0,
3831 VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT);
3834 for (listp = &set->regs[i]; (list = *listp);
3835 listp = list ? &list->next : listp)
3836 if (list->offset == 0 && dv_onepart_p (list->dv))
3838 rtx cval = canon[(int)GET_MODE (list->loc)];
3839 void **slot;
3841 if (!cval)
3842 continue;
3844 if (dv_is_value_p (list->dv))
3846 rtx val = dv_as_value (list->dv);
3847 if (!VALUE_RECURSED_INTO (val))
3848 continue;
3851 slot = shared_hash_find_slot_noinsert (set->vars, list->dv);
3852 canonicalize_values_star (slot, set);
3853 if (*listp != list)
3854 list = NULL;
3859 /* Remove any redundant values in the location list of VAR, which must
3860 be unshared and 1-part. */
3862 static void
3863 remove_duplicate_values (variable var)
3865 location_chain node, *nodep;
3867 gcc_assert (dv_onepart_p (var->dv));
3868 gcc_assert (var->n_var_parts == 1);
3869 gcc_assert (var->refcount == 1);
3871 for (nodep = &var->var_part[0].loc_chain; (node = *nodep); )
3873 if (GET_CODE (node->loc) == VALUE)
3875 if (VALUE_RECURSED_INTO (node->loc))
3877 /* Remove duplicate value node. */
3878 *nodep = node->next;
3879 pool_free (loc_chain_pool, node);
3880 continue;
3882 else
3883 VALUE_RECURSED_INTO (node->loc) = true;
3885 nodep = &node->next;
3888 for (node = var->var_part[0].loc_chain; node; node = node->next)
3889 if (GET_CODE (node->loc) == VALUE)
3891 gcc_assert (VALUE_RECURSED_INTO (node->loc));
3892 VALUE_RECURSED_INTO (node->loc) = false;
3897 /* Hash table iteration argument passed to variable_post_merge. */
3898 struct dfset_post_merge
3900 /* The new input set for the current block. */
3901 dataflow_set *set;
3902 /* Pointer to the permanent input set for the current block, or
3903 NULL. */
3904 dataflow_set **permp;
3907 /* Create values for incoming expressions associated with one-part
3908 variables that don't have value numbers for them. */
3910 static int
3911 variable_post_merge_new_vals (void **slot, void *info)
3913 struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
3914 dataflow_set *set = dfpm->set;
3915 variable var = (variable)*slot;
3916 location_chain node;
3918 if (!dv_onepart_p (var->dv) || !var->n_var_parts)
3919 return 1;
3921 gcc_assert (var->n_var_parts == 1);
3923 if (dv_is_decl_p (var->dv))
3925 bool check_dupes = false;
3927 restart:
3928 for (node = var->var_part[0].loc_chain; node; node = node->next)
3930 if (GET_CODE (node->loc) == VALUE)
3931 gcc_assert (!VALUE_RECURSED_INTO (node->loc));
3932 else if (GET_CODE (node->loc) == REG)
3934 attrs att, *attp, *curp = NULL;
3936 if (var->refcount != 1)
3938 slot = unshare_variable (set, slot, var,
3939 VAR_INIT_STATUS_INITIALIZED);
3940 var = (variable)*slot;
3941 goto restart;
3944 for (attp = &set->regs[REGNO (node->loc)]; (att = *attp);
3945 attp = &att->next)
3946 if (att->offset == 0
3947 && GET_MODE (att->loc) == GET_MODE (node->loc))
3949 if (dv_is_value_p (att->dv))
3951 rtx cval = dv_as_value (att->dv);
3952 node->loc = cval;
3953 check_dupes = true;
3954 break;
3956 else if (dv_as_opaque (att->dv) == dv_as_opaque (var->dv))
3957 curp = attp;
3960 if (!curp)
3962 curp = attp;
3963 while (*curp)
3964 if ((*curp)->offset == 0
3965 && GET_MODE ((*curp)->loc) == GET_MODE (node->loc)
3966 && dv_as_opaque ((*curp)->dv) == dv_as_opaque (var->dv))
3967 break;
3968 else
3969 curp = &(*curp)->next;
3970 gcc_assert (*curp);
3973 if (!att)
3975 decl_or_value cdv;
3976 rtx cval;
3978 if (!*dfpm->permp)
3980 *dfpm->permp = XNEW (dataflow_set);
3981 dataflow_set_init (*dfpm->permp);
3984 for (att = (*dfpm->permp)->regs[REGNO (node->loc)];
3985 att; att = att->next)
3986 if (GET_MODE (att->loc) == GET_MODE (node->loc))
3988 gcc_assert (att->offset == 0
3989 && dv_is_value_p (att->dv));
3990 val_reset (set, att->dv);
3991 break;
3994 if (att)
3996 cdv = att->dv;
3997 cval = dv_as_value (cdv);
3999 else
4001 /* Create a unique value to hold this register,
4002 that ought to be found and reused in
4003 subsequent rounds. */
4004 cselib_val *v;
4005 gcc_assert (!cselib_lookup (node->loc,
4006 GET_MODE (node->loc), 0,
4007 VOIDmode));
4008 v = cselib_lookup (node->loc, GET_MODE (node->loc), 1,
4009 VOIDmode);
4010 cselib_preserve_value (v);
4011 cselib_invalidate_rtx (node->loc);
4012 cval = v->val_rtx;
4013 cdv = dv_from_value (cval);
4014 if (dump_file)
4015 fprintf (dump_file,
4016 "Created new value %u:%u for reg %i\n",
4017 v->uid, v->hash, REGNO (node->loc));
4020 var_reg_decl_set (*dfpm->permp, node->loc,
4021 VAR_INIT_STATUS_INITIALIZED,
4022 cdv, 0, NULL, INSERT);
4024 node->loc = cval;
4025 check_dupes = true;
4028 /* Remove attribute referring to the decl, which now
4029 uses the value for the register, already existing or
4030 to be added when we bring perm in. */
4031 att = *curp;
4032 *curp = att->next;
4033 pool_free (attrs_pool, att);
4037 if (check_dupes)
4038 remove_duplicate_values (var);
4041 return 1;
4044 /* Reset values in the permanent set that are not associated with the
4045 chosen expression. */
4047 static int
4048 variable_post_merge_perm_vals (void **pslot, void *info)
4050 struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
4051 dataflow_set *set = dfpm->set;
4052 variable pvar = (variable)*pslot, var;
4053 location_chain pnode;
4054 decl_or_value dv;
4055 attrs att;
4057 gcc_assert (dv_is_value_p (pvar->dv)
4058 && pvar->n_var_parts == 1);
4059 pnode = pvar->var_part[0].loc_chain;
4060 gcc_assert (pnode
4061 && !pnode->next
4062 && REG_P (pnode->loc));
4064 dv = pvar->dv;
4066 var = shared_hash_find (set->vars, dv);
4067 if (var)
4069 /* Although variable_post_merge_new_vals may have made decls
4070 non-star-canonical, values that pre-existed in canonical form
4071 remain canonical, and newly-created values reference a single
4072 REG, so they are canonical as well. Since VAR has the
4073 location list for a VALUE, using find_loc_in_1pdv for it is
4074 fine, since VALUEs don't map back to DECLs. */
4075 if (find_loc_in_1pdv (pnode->loc, var, shared_hash_htab (set->vars)))
4076 return 1;
4077 val_reset (set, dv);
4080 for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next)
4081 if (att->offset == 0
4082 && GET_MODE (att->loc) == GET_MODE (pnode->loc)
4083 && dv_is_value_p (att->dv))
4084 break;
4086 /* If there is a value associated with this register already, create
4087 an equivalence. */
4088 if (att && dv_as_value (att->dv) != dv_as_value (dv))
4090 rtx cval = dv_as_value (att->dv);
4091 set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT);
4092 set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init,
4093 NULL, INSERT);
4095 else if (!att)
4097 attrs_list_insert (&set->regs[REGNO (pnode->loc)],
4098 dv, 0, pnode->loc);
4099 variable_union (pvar, set);
4102 return 1;
4105 /* Just checking stuff and registering register attributes for
4106 now. */
4108 static void
4109 dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp)
4111 struct dfset_post_merge dfpm;
4113 dfpm.set = set;
4114 dfpm.permp = permp;
4116 htab_traverse (shared_hash_htab (set->vars), variable_post_merge_new_vals,
4117 &dfpm);
4118 if (*permp)
4119 htab_traverse (shared_hash_htab ((*permp)->vars),
4120 variable_post_merge_perm_vals, &dfpm);
4121 htab_traverse (shared_hash_htab (set->vars), canonicalize_values_star, set);
4122 htab_traverse (shared_hash_htab (set->vars), canonicalize_vars_star, set);
4125 /* Return a node whose loc is a MEM that refers to EXPR in the
4126 location list of a one-part variable or value VAR, or in that of
4127 any values recursively mentioned in the location lists. */
4129 static location_chain
4130 find_mem_expr_in_1pdv (tree expr, rtx val, htab_t vars)
4132 location_chain node;
4133 decl_or_value dv;
4134 variable var;
4135 location_chain where = NULL;
4137 if (!val)
4138 return NULL;
4140 gcc_assert (GET_CODE (val) == VALUE
4141 && !VALUE_RECURSED_INTO (val));
4143 dv = dv_from_value (val);
4144 var = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
4146 if (!var)
4147 return NULL;
4149 gcc_assert (dv_onepart_p (var->dv));
4151 if (!var->n_var_parts)
4152 return NULL;
4154 gcc_assert (var->var_part[0].offset == 0);
4156 VALUE_RECURSED_INTO (val) = true;
4158 for (node = var->var_part[0].loc_chain; node; node = node->next)
4159 if (MEM_P (node->loc)
4160 && MEM_EXPR (node->loc) == expr
4161 && INT_MEM_OFFSET (node->loc) == 0)
4163 where = node;
4164 break;
4166 else if (GET_CODE (node->loc) == VALUE
4167 && !VALUE_RECURSED_INTO (node->loc)
4168 && (where = find_mem_expr_in_1pdv (expr, node->loc, vars)))
4169 break;
4171 VALUE_RECURSED_INTO (val) = false;
4173 return where;
4176 /* Return TRUE if the value of MEM may vary across a call. */
4178 static bool
4179 mem_dies_at_call (rtx mem)
4181 tree expr = MEM_EXPR (mem);
4182 tree decl;
4184 if (!expr)
4185 return true;
4187 decl = get_base_address (expr);
4189 if (!decl)
4190 return true;
4192 if (!DECL_P (decl))
4193 return true;
4195 return (may_be_aliased (decl)
4196 || (!TREE_READONLY (decl) && is_global_var (decl)));
4199 /* Remove all MEMs from the location list of a hash table entry for a
4200 one-part variable, except those whose MEM attributes map back to
4201 the variable itself, directly or within a VALUE. */
4203 static int
4204 dataflow_set_preserve_mem_locs (void **slot, void *data)
4206 dataflow_set *set = (dataflow_set *) data;
4207 variable var = (variable) *slot;
4209 if (dv_is_decl_p (var->dv) && dv_onepart_p (var->dv))
4211 tree decl = dv_as_decl (var->dv);
4212 location_chain loc, *locp;
4213 bool changed = false;
4215 if (!var->n_var_parts)
4216 return 1;
4218 gcc_assert (var->n_var_parts == 1);
4220 if (shared_var_p (var, set->vars))
4222 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4224 /* We want to remove dying MEMs that doesn't refer to DECL. */
4225 if (GET_CODE (loc->loc) == MEM
4226 && (MEM_EXPR (loc->loc) != decl
4227 || INT_MEM_OFFSET (loc->loc) != 0)
4228 && !mem_dies_at_call (loc->loc))
4229 break;
4230 /* We want to move here MEMs that do refer to DECL. */
4231 else if (GET_CODE (loc->loc) == VALUE
4232 && find_mem_expr_in_1pdv (decl, loc->loc,
4233 shared_hash_htab (set->vars)))
4234 break;
4237 if (!loc)
4238 return 1;
4240 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4241 var = (variable)*slot;
4242 gcc_assert (var->n_var_parts == 1);
4245 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4246 loc; loc = *locp)
4248 rtx old_loc = loc->loc;
4249 if (GET_CODE (old_loc) == VALUE)
4251 location_chain mem_node
4252 = find_mem_expr_in_1pdv (decl, loc->loc,
4253 shared_hash_htab (set->vars));
4255 /* ??? This picks up only one out of multiple MEMs that
4256 refer to the same variable. Do we ever need to be
4257 concerned about dealing with more than one, or, given
4258 that they should all map to the same variable
4259 location, their addresses will have been merged and
4260 they will be regarded as equivalent? */
4261 if (mem_node)
4263 loc->loc = mem_node->loc;
4264 loc->set_src = mem_node->set_src;
4265 loc->init = MIN (loc->init, mem_node->init);
4269 if (GET_CODE (loc->loc) != MEM
4270 || (MEM_EXPR (loc->loc) == decl
4271 && INT_MEM_OFFSET (loc->loc) == 0)
4272 || !mem_dies_at_call (loc->loc))
4274 if (old_loc != loc->loc && emit_notes)
4276 if (old_loc == var->var_part[0].cur_loc)
4278 changed = true;
4279 var->var_part[0].cur_loc = NULL;
4280 var->cur_loc_changed = true;
4282 add_value_chains (var->dv, loc->loc);
4283 remove_value_chains (var->dv, old_loc);
4285 locp = &loc->next;
4286 continue;
4289 if (emit_notes)
4291 remove_value_chains (var->dv, old_loc);
4292 if (old_loc == var->var_part[0].cur_loc)
4294 changed = true;
4295 var->var_part[0].cur_loc = NULL;
4296 var->cur_loc_changed = true;
4299 *locp = loc->next;
4300 pool_free (loc_chain_pool, loc);
4303 if (!var->var_part[0].loc_chain)
4305 var->n_var_parts--;
4306 changed = true;
4308 if (changed)
4309 variable_was_changed (var, set);
4312 return 1;
4315 /* Remove all MEMs from the location list of a hash table entry for a
4316 value. */
4318 static int
4319 dataflow_set_remove_mem_locs (void **slot, void *data)
4321 dataflow_set *set = (dataflow_set *) data;
4322 variable var = (variable) *slot;
4324 if (dv_is_value_p (var->dv))
4326 location_chain loc, *locp;
4327 bool changed = false;
4329 gcc_assert (var->n_var_parts == 1);
4331 if (shared_var_p (var, set->vars))
4333 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4334 if (GET_CODE (loc->loc) == MEM
4335 && mem_dies_at_call (loc->loc))
4336 break;
4338 if (!loc)
4339 return 1;
4341 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4342 var = (variable)*slot;
4343 gcc_assert (var->n_var_parts == 1);
4346 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4347 loc; loc = *locp)
4349 if (GET_CODE (loc->loc) != MEM
4350 || !mem_dies_at_call (loc->loc))
4352 locp = &loc->next;
4353 continue;
4356 if (emit_notes)
4357 remove_value_chains (var->dv, loc->loc);
4358 *locp = loc->next;
4359 /* If we have deleted the location which was last emitted
4360 we have to emit new location so add the variable to set
4361 of changed variables. */
4362 if (var->var_part[0].cur_loc == loc->loc)
4364 changed = true;
4365 var->var_part[0].cur_loc = NULL;
4366 var->cur_loc_changed = true;
4368 pool_free (loc_chain_pool, loc);
4371 if (!var->var_part[0].loc_chain)
4373 var->n_var_parts--;
4374 changed = true;
4376 if (changed)
4377 variable_was_changed (var, set);
4380 return 1;
4383 /* Remove all variable-location information about call-clobbered
4384 registers, as well as associations between MEMs and VALUEs. */
4386 static void
4387 dataflow_set_clear_at_call (dataflow_set *set)
4389 int r;
4391 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
4392 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, r))
4393 var_regno_delete (set, r);
4395 if (MAY_HAVE_DEBUG_INSNS)
4397 set->traversed_vars = set->vars;
4398 htab_traverse (shared_hash_htab (set->vars),
4399 dataflow_set_preserve_mem_locs, set);
4400 set->traversed_vars = set->vars;
4401 htab_traverse (shared_hash_htab (set->vars), dataflow_set_remove_mem_locs,
4402 set);
4403 set->traversed_vars = NULL;
4407 static bool
4408 variable_part_different_p (variable_part *vp1, variable_part *vp2)
4410 location_chain lc1, lc2;
4412 for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next)
4414 for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next)
4416 if (REG_P (lc1->loc) && REG_P (lc2->loc))
4418 if (REGNO (lc1->loc) == REGNO (lc2->loc))
4419 break;
4421 if (rtx_equal_p (lc1->loc, lc2->loc))
4422 break;
4424 if (!lc2)
4425 return true;
4427 return false;
4430 /* Return true if one-part variables VAR1 and VAR2 are different.
4431 They must be in canonical order. */
4433 static bool
4434 onepart_variable_different_p (variable var1, variable var2)
4436 location_chain lc1, lc2;
4438 if (var1 == var2)
4439 return false;
4441 gcc_assert (var1->n_var_parts == 1
4442 && var2->n_var_parts == 1);
4444 lc1 = var1->var_part[0].loc_chain;
4445 lc2 = var2->var_part[0].loc_chain;
4447 gcc_assert (lc1 && lc2);
4449 while (lc1 && lc2)
4451 if (loc_cmp (lc1->loc, lc2->loc))
4452 return true;
4453 lc1 = lc1->next;
4454 lc2 = lc2->next;
4457 return lc1 != lc2;
4460 /* Return true if variables VAR1 and VAR2 are different. */
4462 static bool
4463 variable_different_p (variable var1, variable var2)
4465 int i;
4467 if (var1 == var2)
4468 return false;
4470 if (var1->n_var_parts != var2->n_var_parts)
4471 return true;
4473 for (i = 0; i < var1->n_var_parts; i++)
4475 if (var1->var_part[i].offset != var2->var_part[i].offset)
4476 return true;
4477 /* One-part values have locations in a canonical order. */
4478 if (i == 0 && var1->var_part[i].offset == 0 && dv_onepart_p (var1->dv))
4480 gcc_assert (var1->n_var_parts == 1
4481 && dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv));
4482 return onepart_variable_different_p (var1, var2);
4484 if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i]))
4485 return true;
4486 if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i]))
4487 return true;
4489 return false;
4492 /* Return true if dataflow sets OLD_SET and NEW_SET differ. */
4494 static bool
4495 dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
4497 htab_iterator hi;
4498 variable var1;
4500 if (old_set->vars == new_set->vars)
4501 return false;
4503 if (htab_elements (shared_hash_htab (old_set->vars))
4504 != htab_elements (shared_hash_htab (new_set->vars)))
4505 return true;
4507 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (old_set->vars), var1, variable, hi)
4509 htab_t htab = shared_hash_htab (new_set->vars);
4510 variable var2 = (variable) htab_find_with_hash (htab, var1->dv,
4511 dv_htab_hash (var1->dv));
4512 if (!var2)
4514 if (dump_file && (dump_flags & TDF_DETAILS))
4516 fprintf (dump_file, "dataflow difference found: removal of:\n");
4517 dump_var (var1);
4519 return true;
4522 if (variable_different_p (var1, var2))
4524 if (dump_file && (dump_flags & TDF_DETAILS))
4526 fprintf (dump_file, "dataflow difference found: "
4527 "old and new follow:\n");
4528 dump_var (var1);
4529 dump_var (var2);
4531 return true;
4535 /* No need to traverse the second hashtab, if both have the same number
4536 of elements and the second one had all entries found in the first one,
4537 then it can't have any extra entries. */
4538 return false;
4541 /* Free the contents of dataflow set SET. */
4543 static void
4544 dataflow_set_destroy (dataflow_set *set)
4546 int i;
4548 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4549 attrs_list_clear (&set->regs[i]);
4551 shared_hash_destroy (set->vars);
4552 set->vars = NULL;
4555 /* Return true if RTL X contains a SYMBOL_REF. */
4557 static bool
4558 contains_symbol_ref (rtx x)
4560 const char *fmt;
4561 RTX_CODE code;
4562 int i;
4564 if (!x)
4565 return false;
4567 code = GET_CODE (x);
4568 if (code == SYMBOL_REF)
4569 return true;
4571 fmt = GET_RTX_FORMAT (code);
4572 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4574 if (fmt[i] == 'e')
4576 if (contains_symbol_ref (XEXP (x, i)))
4577 return true;
4579 else if (fmt[i] == 'E')
4581 int j;
4582 for (j = 0; j < XVECLEN (x, i); j++)
4583 if (contains_symbol_ref (XVECEXP (x, i, j)))
4584 return true;
4588 return false;
4591 /* Shall EXPR be tracked? */
4593 static bool
4594 track_expr_p (tree expr, bool need_rtl)
4596 rtx decl_rtl;
4597 tree realdecl;
4599 if (TREE_CODE (expr) == DEBUG_EXPR_DECL)
4600 return DECL_RTL_SET_P (expr);
4602 /* If EXPR is not a parameter or a variable do not track it. */
4603 if (TREE_CODE (expr) != VAR_DECL && TREE_CODE (expr) != PARM_DECL)
4604 return 0;
4606 /* It also must have a name... */
4607 if (!DECL_NAME (expr) && need_rtl)
4608 return 0;
4610 /* ... and a RTL assigned to it. */
4611 decl_rtl = DECL_RTL_IF_SET (expr);
4612 if (!decl_rtl && need_rtl)
4613 return 0;
4615 /* If this expression is really a debug alias of some other declaration, we
4616 don't need to track this expression if the ultimate declaration is
4617 ignored. */
4618 realdecl = expr;
4619 if (DECL_DEBUG_EXPR_IS_FROM (realdecl))
4621 realdecl = DECL_DEBUG_EXPR (realdecl);
4622 if (realdecl == NULL_TREE)
4623 realdecl = expr;
4624 else if (!DECL_P (realdecl))
4626 if (handled_component_p (realdecl))
4628 HOST_WIDE_INT bitsize, bitpos, maxsize;
4629 tree innerdecl
4630 = get_ref_base_and_extent (realdecl, &bitpos, &bitsize,
4631 &maxsize);
4632 if (!DECL_P (innerdecl)
4633 || DECL_IGNORED_P (innerdecl)
4634 || TREE_STATIC (innerdecl)
4635 || bitsize <= 0
4636 || bitpos + bitsize > 256
4637 || bitsize != maxsize)
4638 return 0;
4639 else
4640 realdecl = expr;
4642 else
4643 return 0;
4647 /* Do not track EXPR if REALDECL it should be ignored for debugging
4648 purposes. */
4649 if (DECL_IGNORED_P (realdecl))
4650 return 0;
4652 /* Do not track global variables until we are able to emit correct location
4653 list for them. */
4654 if (TREE_STATIC (realdecl))
4655 return 0;
4657 /* When the EXPR is a DECL for alias of some variable (see example)
4658 the TREE_STATIC flag is not used. Disable tracking all DECLs whose
4659 DECL_RTL contains SYMBOL_REF.
4661 Example:
4662 extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
4663 char **_dl_argv;
4665 if (decl_rtl && MEM_P (decl_rtl)
4666 && contains_symbol_ref (XEXP (decl_rtl, 0)))
4667 return 0;
4669 /* If RTX is a memory it should not be very large (because it would be
4670 an array or struct). */
4671 if (decl_rtl && MEM_P (decl_rtl))
4673 /* Do not track structures and arrays. */
4674 if (GET_MODE (decl_rtl) == BLKmode
4675 || AGGREGATE_TYPE_P (TREE_TYPE (realdecl)))
4676 return 0;
4677 if (MEM_SIZE_KNOWN_P (decl_rtl)
4678 && MEM_SIZE (decl_rtl) > MAX_VAR_PARTS)
4679 return 0;
4682 DECL_CHANGED (expr) = 0;
4683 DECL_CHANGED (realdecl) = 0;
4684 return 1;
4687 /* Determine whether a given LOC refers to the same variable part as
4688 EXPR+OFFSET. */
4690 static bool
4691 same_variable_part_p (rtx loc, tree expr, HOST_WIDE_INT offset)
4693 tree expr2;
4694 HOST_WIDE_INT offset2;
4696 if (! DECL_P (expr))
4697 return false;
4699 if (REG_P (loc))
4701 expr2 = REG_EXPR (loc);
4702 offset2 = REG_OFFSET (loc);
4704 else if (MEM_P (loc))
4706 expr2 = MEM_EXPR (loc);
4707 offset2 = INT_MEM_OFFSET (loc);
4709 else
4710 return false;
4712 if (! expr2 || ! DECL_P (expr2))
4713 return false;
4715 expr = var_debug_decl (expr);
4716 expr2 = var_debug_decl (expr2);
4718 return (expr == expr2 && offset == offset2);
4721 /* LOC is a REG or MEM that we would like to track if possible.
4722 If EXPR is null, we don't know what expression LOC refers to,
4723 otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
4724 LOC is an lvalue register.
4726 Return true if EXPR is nonnull and if LOC, or some lowpart of it,
4727 is something we can track. When returning true, store the mode of
4728 the lowpart we can track in *MODE_OUT (if nonnull) and its offset
4729 from EXPR in *OFFSET_OUT (if nonnull). */
4731 static bool
4732 track_loc_p (rtx loc, tree expr, HOST_WIDE_INT offset, bool store_reg_p,
4733 enum machine_mode *mode_out, HOST_WIDE_INT *offset_out)
4735 enum machine_mode mode;
4737 if (expr == NULL || !track_expr_p (expr, true))
4738 return false;
4740 /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
4741 whole subreg, but only the old inner part is really relevant. */
4742 mode = GET_MODE (loc);
4743 if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc)))
4745 enum machine_mode pseudo_mode;
4747 pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc));
4748 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (pseudo_mode))
4750 offset += byte_lowpart_offset (pseudo_mode, mode);
4751 mode = pseudo_mode;
4755 /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
4756 Do the same if we are storing to a register and EXPR occupies
4757 the whole of register LOC; in that case, the whole of EXPR is
4758 being changed. We exclude complex modes from the second case
4759 because the real and imaginary parts are represented as separate
4760 pseudo registers, even if the whole complex value fits into one
4761 hard register. */
4762 if ((GET_MODE_SIZE (mode) > GET_MODE_SIZE (DECL_MODE (expr))
4763 || (store_reg_p
4764 && !COMPLEX_MODE_P (DECL_MODE (expr))
4765 && hard_regno_nregs[REGNO (loc)][DECL_MODE (expr)] == 1))
4766 && offset + byte_lowpart_offset (DECL_MODE (expr), mode) == 0)
4768 mode = DECL_MODE (expr);
4769 offset = 0;
4772 if (offset < 0 || offset >= MAX_VAR_PARTS)
4773 return false;
4775 if (mode_out)
4776 *mode_out = mode;
4777 if (offset_out)
4778 *offset_out = offset;
4779 return true;
4782 /* Return the MODE lowpart of LOC, or null if LOC is not something we
4783 want to track. When returning nonnull, make sure that the attributes
4784 on the returned value are updated. */
4786 static rtx
4787 var_lowpart (enum machine_mode mode, rtx loc)
4789 unsigned int offset, reg_offset, regno;
4791 if (!REG_P (loc) && !MEM_P (loc))
4792 return NULL;
4794 if (GET_MODE (loc) == mode)
4795 return loc;
4797 offset = byte_lowpart_offset (mode, GET_MODE (loc));
4799 if (MEM_P (loc))
4800 return adjust_address_nv (loc, mode, offset);
4802 reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc));
4803 regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc),
4804 reg_offset, mode);
4805 return gen_rtx_REG_offset (loc, mode, regno, offset);
4808 /* Carry information about uses and stores while walking rtx. */
4810 struct count_use_info
4812 /* The insn where the RTX is. */
4813 rtx insn;
4815 /* The basic block where insn is. */
4816 basic_block bb;
4818 /* The array of n_sets sets in the insn, as determined by cselib. */
4819 struct cselib_set *sets;
4820 int n_sets;
4822 /* True if we're counting stores, false otherwise. */
4823 bool store_p;
4826 /* Find a VALUE corresponding to X. */
4828 static inline cselib_val *
4829 find_use_val (rtx x, enum machine_mode mode, struct count_use_info *cui)
4831 int i;
4833 if (cui->sets)
4835 /* This is called after uses are set up and before stores are
4836 processed by cselib, so it's safe to look up srcs, but not
4837 dsts. So we look up expressions that appear in srcs or in
4838 dest expressions, but we search the sets array for dests of
4839 stores. */
4840 if (cui->store_p)
4842 /* Some targets represent memset and memcpy patterns
4843 by (set (mem:BLK ...) (reg:[QHSD]I ...)) or
4844 (set (mem:BLK ...) (const_int ...)) or
4845 (set (mem:BLK ...) (mem:BLK ...)). Don't return anything
4846 in that case, otherwise we end up with mode mismatches. */
4847 if (mode == BLKmode && MEM_P (x))
4848 return NULL;
4849 for (i = 0; i < cui->n_sets; i++)
4850 if (cui->sets[i].dest == x)
4851 return cui->sets[i].src_elt;
4853 else
4854 return cselib_lookup (x, mode, 0, VOIDmode);
4857 return NULL;
4860 /* Helper function to get mode of MEM's address. */
4862 static inline enum machine_mode
4863 get_address_mode (rtx mem)
4865 enum machine_mode mode = GET_MODE (XEXP (mem, 0));
4866 if (mode != VOIDmode)
4867 return mode;
4868 return targetm.addr_space.address_mode (MEM_ADDR_SPACE (mem));
4871 /* Replace all registers and addresses in an expression with VALUE
4872 expressions that map back to them, unless the expression is a
4873 register. If no mapping is or can be performed, returns NULL. */
4875 static rtx
4876 replace_expr_with_values (rtx loc)
4878 if (REG_P (loc) || GET_CODE (loc) == ENTRY_VALUE)
4879 return NULL;
4880 else if (MEM_P (loc))
4882 cselib_val *addr = cselib_lookup (XEXP (loc, 0),
4883 get_address_mode (loc), 0,
4884 GET_MODE (loc));
4885 if (addr)
4886 return replace_equiv_address_nv (loc, addr->val_rtx);
4887 else
4888 return NULL;
4890 else
4891 return cselib_subst_to_values (loc, VOIDmode);
4894 /* Determine what kind of micro operation to choose for a USE. Return
4895 MO_CLOBBER if no micro operation is to be generated. */
4897 static enum micro_operation_type
4898 use_type (rtx loc, struct count_use_info *cui, enum machine_mode *modep)
4900 tree expr;
4902 if (cui && cui->sets)
4904 if (GET_CODE (loc) == VAR_LOCATION)
4906 if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false))
4908 rtx ploc = PAT_VAR_LOCATION_LOC (loc);
4909 if (! VAR_LOC_UNKNOWN_P (ploc))
4911 cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1,
4912 VOIDmode);
4914 /* ??? flag_float_store and volatile mems are never
4915 given values, but we could in theory use them for
4916 locations. */
4917 gcc_assert (val || 1);
4919 return MO_VAL_LOC;
4921 else
4922 return MO_CLOBBER;
4925 if (REG_P (loc) || MEM_P (loc))
4927 if (modep)
4928 *modep = GET_MODE (loc);
4929 if (cui->store_p)
4931 if (REG_P (loc)
4932 || (find_use_val (loc, GET_MODE (loc), cui)
4933 && cselib_lookup (XEXP (loc, 0),
4934 get_address_mode (loc), 0,
4935 GET_MODE (loc))))
4936 return MO_VAL_SET;
4938 else
4940 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
4942 if (val && !cselib_preserved_value_p (val))
4943 return MO_VAL_USE;
4948 if (REG_P (loc))
4950 gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER);
4952 if (loc == cfa_base_rtx)
4953 return MO_CLOBBER;
4954 expr = REG_EXPR (loc);
4956 if (!expr)
4957 return MO_USE_NO_VAR;
4958 else if (target_for_debug_bind (var_debug_decl (expr)))
4959 return MO_CLOBBER;
4960 else if (track_loc_p (loc, expr, REG_OFFSET (loc),
4961 false, modep, NULL))
4962 return MO_USE;
4963 else
4964 return MO_USE_NO_VAR;
4966 else if (MEM_P (loc))
4968 expr = MEM_EXPR (loc);
4970 if (!expr)
4971 return MO_CLOBBER;
4972 else if (target_for_debug_bind (var_debug_decl (expr)))
4973 return MO_CLOBBER;
4974 else if (track_loc_p (loc, expr, INT_MEM_OFFSET (loc),
4975 false, modep, NULL))
4976 return MO_USE;
4977 else
4978 return MO_CLOBBER;
4981 return MO_CLOBBER;
4984 /* Log to OUT information about micro-operation MOPT involving X in
4985 INSN of BB. */
4987 static inline void
4988 log_op_type (rtx x, basic_block bb, rtx insn,
4989 enum micro_operation_type mopt, FILE *out)
4991 fprintf (out, "bb %i op %i insn %i %s ",
4992 bb->index, VEC_length (micro_operation, VTI (bb)->mos),
4993 INSN_UID (insn), micro_operation_type_name[mopt]);
4994 print_inline_rtx (out, x, 2);
4995 fputc ('\n', out);
4998 /* Tell whether the CONCAT used to holds a VALUE and its location
4999 needs value resolution, i.e., an attempt of mapping the location
5000 back to other incoming values. */
5001 #define VAL_NEEDS_RESOLUTION(x) \
5002 (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
5003 /* Whether the location in the CONCAT is a tracked expression, that
5004 should also be handled like a MO_USE. */
5005 #define VAL_HOLDS_TRACK_EXPR(x) \
5006 (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
5007 /* Whether the location in the CONCAT should be handled like a MO_COPY
5008 as well. */
5009 #define VAL_EXPR_IS_COPIED(x) \
5010 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
5011 /* Whether the location in the CONCAT should be handled like a
5012 MO_CLOBBER as well. */
5013 #define VAL_EXPR_IS_CLOBBERED(x) \
5014 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
5015 /* Whether the location is a CONCAT of the MO_VAL_SET expression and
5016 a reverse operation that should be handled afterwards. */
5017 #define VAL_EXPR_HAS_REVERSE(x) \
5018 (RTL_FLAG_CHECK1 ("VAL_EXPR_HAS_REVERSE", (x), CONCAT)->return_val)
5020 /* All preserved VALUEs. */
5021 static VEC (rtx, heap) *preserved_values;
5023 /* Registers used in the current function for passing parameters. */
5024 static HARD_REG_SET argument_reg_set;
5026 /* Ensure VAL is preserved and remember it in a vector for vt_emit_notes. */
5028 static void
5029 preserve_value (cselib_val *val)
5031 cselib_preserve_value (val);
5032 VEC_safe_push (rtx, heap, preserved_values, val->val_rtx);
5035 /* Helper function for MO_VAL_LOC handling. Return non-zero if
5036 any rtxes not suitable for CONST use not replaced by VALUEs
5037 are discovered. */
5039 static int
5040 non_suitable_const (rtx *x, void *data ATTRIBUTE_UNUSED)
5042 if (*x == NULL_RTX)
5043 return 0;
5045 switch (GET_CODE (*x))
5047 case REG:
5048 case DEBUG_EXPR:
5049 case PC:
5050 case SCRATCH:
5051 case CC0:
5052 case ASM_INPUT:
5053 case ASM_OPERANDS:
5054 return 1;
5055 case MEM:
5056 return !MEM_READONLY_P (*x);
5057 default:
5058 return 0;
5062 /* Add uses (register and memory references) LOC which will be tracked
5063 to VTI (bb)->mos. INSN is instruction which the LOC is part of. */
5065 static int
5066 add_uses (rtx *ploc, void *data)
5068 rtx loc = *ploc;
5069 enum machine_mode mode = VOIDmode;
5070 struct count_use_info *cui = (struct count_use_info *)data;
5071 enum micro_operation_type type = use_type (loc, cui, &mode);
5073 if (type != MO_CLOBBER)
5075 basic_block bb = cui->bb;
5076 micro_operation mo;
5078 mo.type = type;
5079 mo.u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc;
5080 mo.insn = cui->insn;
5082 if (type == MO_VAL_LOC)
5084 rtx oloc = loc;
5085 rtx vloc = PAT_VAR_LOCATION_LOC (oloc);
5086 cselib_val *val;
5088 gcc_assert (cui->sets);
5090 if (MEM_P (vloc)
5091 && !REG_P (XEXP (vloc, 0))
5092 && !MEM_P (XEXP (vloc, 0))
5093 && GET_CODE (XEXP (vloc, 0)) != ENTRY_VALUE
5094 && (GET_CODE (XEXP (vloc, 0)) != PLUS
5095 || XEXP (XEXP (vloc, 0), 0) != cfa_base_rtx
5096 || !CONST_INT_P (XEXP (XEXP (vloc, 0), 1))))
5098 rtx mloc = vloc;
5099 enum machine_mode address_mode = get_address_mode (mloc);
5100 cselib_val *val
5101 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5102 GET_MODE (mloc));
5104 if (val && !cselib_preserved_value_p (val))
5106 micro_operation moa;
5107 preserve_value (val);
5108 mloc = cselib_subst_to_values (XEXP (mloc, 0),
5109 GET_MODE (mloc));
5110 moa.type = MO_VAL_USE;
5111 moa.insn = cui->insn;
5112 moa.u.loc = gen_rtx_CONCAT (address_mode,
5113 val->val_rtx, mloc);
5114 if (dump_file && (dump_flags & TDF_DETAILS))
5115 log_op_type (moa.u.loc, cui->bb, cui->insn,
5116 moa.type, dump_file);
5117 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &moa);
5121 if (CONSTANT_P (vloc)
5122 && (GET_CODE (vloc) != CONST
5123 || for_each_rtx (&vloc, non_suitable_const, NULL)))
5124 /* For constants don't look up any value. */;
5125 else if (!VAR_LOC_UNKNOWN_P (vloc)
5126 && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
5128 enum machine_mode mode2;
5129 enum micro_operation_type type2;
5130 rtx nloc = replace_expr_with_values (vloc);
5132 if (nloc)
5134 oloc = shallow_copy_rtx (oloc);
5135 PAT_VAR_LOCATION_LOC (oloc) = nloc;
5138 oloc = gen_rtx_CONCAT (mode, val->val_rtx, oloc);
5140 type2 = use_type (vloc, 0, &mode2);
5142 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5143 || type2 == MO_CLOBBER);
5145 if (type2 == MO_CLOBBER
5146 && !cselib_preserved_value_p (val))
5148 VAL_NEEDS_RESOLUTION (oloc) = 1;
5149 preserve_value (val);
5152 else if (!VAR_LOC_UNKNOWN_P (vloc))
5154 oloc = shallow_copy_rtx (oloc);
5155 PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC ();
5158 mo.u.loc = oloc;
5160 else if (type == MO_VAL_USE)
5162 enum machine_mode mode2 = VOIDmode;
5163 enum micro_operation_type type2;
5164 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5165 rtx vloc, oloc = loc, nloc;
5167 gcc_assert (cui->sets);
5169 if (MEM_P (oloc)
5170 && !REG_P (XEXP (oloc, 0))
5171 && !MEM_P (XEXP (oloc, 0))
5172 && GET_CODE (XEXP (oloc, 0)) != ENTRY_VALUE
5173 && (GET_CODE (XEXP (oloc, 0)) != PLUS
5174 || XEXP (XEXP (oloc, 0), 0) != cfa_base_rtx
5175 || !CONST_INT_P (XEXP (XEXP (oloc, 0), 1))))
5177 rtx mloc = oloc;
5178 enum machine_mode address_mode = get_address_mode (mloc);
5179 cselib_val *val
5180 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5181 GET_MODE (mloc));
5183 if (val && !cselib_preserved_value_p (val))
5185 micro_operation moa;
5186 preserve_value (val);
5187 mloc = cselib_subst_to_values (XEXP (mloc, 0),
5188 GET_MODE (mloc));
5189 moa.type = MO_VAL_USE;
5190 moa.insn = cui->insn;
5191 moa.u.loc = gen_rtx_CONCAT (address_mode,
5192 val->val_rtx, mloc);
5193 if (dump_file && (dump_flags & TDF_DETAILS))
5194 log_op_type (moa.u.loc, cui->bb, cui->insn,
5195 moa.type, dump_file);
5196 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &moa);
5200 type2 = use_type (loc, 0, &mode2);
5202 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5203 || type2 == MO_CLOBBER);
5205 if (type2 == MO_USE)
5206 vloc = var_lowpart (mode2, loc);
5207 else
5208 vloc = oloc;
5210 /* The loc of a MO_VAL_USE may have two forms:
5212 (concat val src): val is at src, a value-based
5213 representation.
5215 (concat (concat val use) src): same as above, with use as
5216 the MO_USE tracked value, if it differs from src.
5220 nloc = replace_expr_with_values (loc);
5221 if (!nloc)
5222 nloc = oloc;
5224 if (vloc != nloc)
5225 oloc = gen_rtx_CONCAT (mode2, val->val_rtx, vloc);
5226 else
5227 oloc = val->val_rtx;
5229 mo.u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
5231 if (type2 == MO_USE)
5232 VAL_HOLDS_TRACK_EXPR (mo.u.loc) = 1;
5233 if (!cselib_preserved_value_p (val))
5235 VAL_NEEDS_RESOLUTION (mo.u.loc) = 1;
5236 preserve_value (val);
5239 else
5240 gcc_assert (type == MO_USE || type == MO_USE_NO_VAR);
5242 if (dump_file && (dump_flags & TDF_DETAILS))
5243 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
5244 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
5247 return 0;
5250 /* Helper function for finding all uses of REG/MEM in X in insn INSN. */
5252 static void
5253 add_uses_1 (rtx *x, void *cui)
5255 for_each_rtx (x, add_uses, cui);
5258 #define EXPR_DEPTH (PARAM_VALUE (PARAM_MAX_VARTRACK_EXPR_DEPTH))
5260 /* Attempt to reverse the EXPR operation in the debug info. Say for
5261 reg1 = reg2 + 6 even when reg2 is no longer live we
5262 can express its value as VAL - 6. */
5264 static rtx
5265 reverse_op (rtx val, const_rtx expr)
5267 rtx src, arg, ret;
5268 cselib_val *v;
5269 enum rtx_code code;
5271 if (GET_CODE (expr) != SET)
5272 return NULL_RTX;
5274 if (!REG_P (SET_DEST (expr)) || GET_MODE (val) != GET_MODE (SET_DEST (expr)))
5275 return NULL_RTX;
5277 src = SET_SRC (expr);
5278 switch (GET_CODE (src))
5280 case PLUS:
5281 case MINUS:
5282 case XOR:
5283 case NOT:
5284 case NEG:
5285 if (!REG_P (XEXP (src, 0)))
5286 return NULL_RTX;
5287 break;
5288 case SIGN_EXTEND:
5289 case ZERO_EXTEND:
5290 if (!REG_P (XEXP (src, 0)) && !MEM_P (XEXP (src, 0)))
5291 return NULL_RTX;
5292 break;
5293 default:
5294 return NULL_RTX;
5297 if (!SCALAR_INT_MODE_P (GET_MODE (src)) || XEXP (src, 0) == cfa_base_rtx)
5298 return NULL_RTX;
5300 v = cselib_lookup (XEXP (src, 0), GET_MODE (XEXP (src, 0)), 0, VOIDmode);
5301 if (!v || !cselib_preserved_value_p (v))
5302 return NULL_RTX;
5304 switch (GET_CODE (src))
5306 case NOT:
5307 case NEG:
5308 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5309 return NULL_RTX;
5310 ret = gen_rtx_fmt_e (GET_CODE (src), GET_MODE (val), val);
5311 break;
5312 case SIGN_EXTEND:
5313 case ZERO_EXTEND:
5314 ret = gen_lowpart_SUBREG (GET_MODE (v->val_rtx), val);
5315 break;
5316 case XOR:
5317 code = XOR;
5318 goto binary;
5319 case PLUS:
5320 code = MINUS;
5321 goto binary;
5322 case MINUS:
5323 code = PLUS;
5324 goto binary;
5325 binary:
5326 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5327 return NULL_RTX;
5328 arg = XEXP (src, 1);
5329 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5331 arg = cselib_expand_value_rtx (arg, scratch_regs, 5);
5332 if (arg == NULL_RTX)
5333 return NULL_RTX;
5334 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5335 return NULL_RTX;
5337 ret = simplify_gen_binary (code, GET_MODE (val), val, arg);
5338 if (ret == val)
5339 /* Ensure ret isn't VALUE itself (which can happen e.g. for
5340 (plus (reg1) (reg2)) when reg2 is known to be 0), as that
5341 breaks a lot of routines during var-tracking. */
5342 ret = gen_rtx_fmt_ee (PLUS, GET_MODE (val), val, const0_rtx);
5343 break;
5344 default:
5345 gcc_unreachable ();
5348 return gen_rtx_CONCAT (GET_MODE (v->val_rtx), v->val_rtx, ret);
5351 /* Add stores (register and memory references) LOC which will be tracked
5352 to VTI (bb)->mos. EXPR is the RTL expression containing the store.
5353 CUIP->insn is instruction which the LOC is part of. */
5355 static void
5356 add_stores (rtx loc, const_rtx expr, void *cuip)
5358 enum machine_mode mode = VOIDmode, mode2;
5359 struct count_use_info *cui = (struct count_use_info *)cuip;
5360 basic_block bb = cui->bb;
5361 micro_operation mo;
5362 rtx oloc = loc, nloc, src = NULL;
5363 enum micro_operation_type type = use_type (loc, cui, &mode);
5364 bool track_p = false;
5365 cselib_val *v;
5366 bool resolve, preserve;
5367 rtx reverse;
5369 if (type == MO_CLOBBER)
5370 return;
5372 mode2 = mode;
5374 if (REG_P (loc))
5376 gcc_assert (loc != cfa_base_rtx);
5377 if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
5378 || !(track_p = use_type (loc, NULL, &mode2) == MO_USE)
5379 || GET_CODE (expr) == CLOBBER)
5381 mo.type = MO_CLOBBER;
5382 mo.u.loc = loc;
5383 if (GET_CODE (expr) == SET
5384 && SET_DEST (expr) == loc
5385 && REGNO (loc) < FIRST_PSEUDO_REGISTER
5386 && TEST_HARD_REG_BIT (argument_reg_set, REGNO (loc))
5387 && find_use_val (loc, mode, cui)
5388 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
5390 gcc_checking_assert (type == MO_VAL_SET);
5391 mo.u.loc = gen_rtx_SET (VOIDmode, loc, SET_SRC (expr));
5394 else
5396 if (GET_CODE (expr) == SET
5397 && SET_DEST (expr) == loc
5398 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
5399 src = var_lowpart (mode2, SET_SRC (expr));
5400 loc = var_lowpart (mode2, loc);
5402 if (src == NULL)
5404 mo.type = MO_SET;
5405 mo.u.loc = loc;
5407 else
5409 rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
5410 if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
5411 mo.type = MO_COPY;
5412 else
5413 mo.type = MO_SET;
5414 mo.u.loc = xexpr;
5417 mo.insn = cui->insn;
5419 else if (MEM_P (loc)
5420 && ((track_p = use_type (loc, NULL, &mode2) == MO_USE)
5421 || cui->sets))
5423 if (MEM_P (loc) && type == MO_VAL_SET
5424 && !REG_P (XEXP (loc, 0))
5425 && !MEM_P (XEXP (loc, 0))
5426 && GET_CODE (XEXP (loc, 0)) != ENTRY_VALUE
5427 && (GET_CODE (XEXP (loc, 0)) != PLUS
5428 || XEXP (XEXP (loc, 0), 0) != cfa_base_rtx
5429 || !CONST_INT_P (XEXP (XEXP (loc, 0), 1))))
5431 rtx mloc = loc;
5432 enum machine_mode address_mode = get_address_mode (mloc);
5433 cselib_val *val = cselib_lookup (XEXP (mloc, 0),
5434 address_mode, 0,
5435 GET_MODE (mloc));
5437 if (val && !cselib_preserved_value_p (val))
5439 preserve_value (val);
5440 mo.type = MO_VAL_USE;
5441 mloc = cselib_subst_to_values (XEXP (mloc, 0),
5442 GET_MODE (mloc));
5443 mo.u.loc = gen_rtx_CONCAT (address_mode, val->val_rtx, mloc);
5444 mo.insn = cui->insn;
5445 if (dump_file && (dump_flags & TDF_DETAILS))
5446 log_op_type (mo.u.loc, cui->bb, cui->insn,
5447 mo.type, dump_file);
5448 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
5452 if (GET_CODE (expr) == CLOBBER || !track_p)
5454 mo.type = MO_CLOBBER;
5455 mo.u.loc = track_p ? var_lowpart (mode2, loc) : loc;
5457 else
5459 if (GET_CODE (expr) == SET
5460 && SET_DEST (expr) == loc
5461 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
5462 src = var_lowpart (mode2, SET_SRC (expr));
5463 loc = var_lowpart (mode2, loc);
5465 if (src == NULL)
5467 mo.type = MO_SET;
5468 mo.u.loc = loc;
5470 else
5472 rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
5473 if (same_variable_part_p (SET_SRC (xexpr),
5474 MEM_EXPR (loc),
5475 INT_MEM_OFFSET (loc)))
5476 mo.type = MO_COPY;
5477 else
5478 mo.type = MO_SET;
5479 mo.u.loc = xexpr;
5482 mo.insn = cui->insn;
5484 else
5485 return;
5487 if (type != MO_VAL_SET)
5488 goto log_and_return;
5490 v = find_use_val (oloc, mode, cui);
5492 if (!v)
5493 goto log_and_return;
5495 resolve = preserve = !cselib_preserved_value_p (v);
5497 nloc = replace_expr_with_values (oloc);
5498 if (nloc)
5499 oloc = nloc;
5501 if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC)
5503 cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0, VOIDmode);
5505 gcc_assert (oval != v);
5506 gcc_assert (REG_P (oloc) || MEM_P (oloc));
5508 if (!cselib_preserved_value_p (oval))
5510 micro_operation moa;
5512 preserve_value (oval);
5514 moa.type = MO_VAL_USE;
5515 moa.u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc);
5516 VAL_NEEDS_RESOLUTION (moa.u.loc) = 1;
5517 moa.insn = cui->insn;
5519 if (dump_file && (dump_flags & TDF_DETAILS))
5520 log_op_type (moa.u.loc, cui->bb, cui->insn,
5521 moa.type, dump_file);
5522 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &moa);
5525 resolve = false;
5527 else if (resolve && GET_CODE (mo.u.loc) == SET)
5529 nloc = replace_expr_with_values (SET_SRC (expr));
5531 /* Avoid the mode mismatch between oexpr and expr. */
5532 if (!nloc && mode != mode2)
5534 nloc = SET_SRC (expr);
5535 gcc_assert (oloc == SET_DEST (expr));
5538 if (nloc)
5539 oloc = gen_rtx_SET (GET_MODE (mo.u.loc), oloc, nloc);
5540 else
5542 if (oloc == SET_DEST (mo.u.loc))
5543 /* No point in duplicating. */
5544 oloc = mo.u.loc;
5545 if (!REG_P (SET_SRC (mo.u.loc)))
5546 resolve = false;
5549 else if (!resolve)
5551 if (GET_CODE (mo.u.loc) == SET
5552 && oloc == SET_DEST (mo.u.loc))
5553 /* No point in duplicating. */
5554 oloc = mo.u.loc;
5556 else
5557 resolve = false;
5559 loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc);
5561 if (mo.u.loc != oloc)
5562 loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, mo.u.loc);
5564 /* The loc of a MO_VAL_SET may have various forms:
5566 (concat val dst): dst now holds val
5568 (concat val (set dst src)): dst now holds val, copied from src
5570 (concat (concat val dstv) dst): dst now holds val; dstv is dst
5571 after replacing mems and non-top-level regs with values.
5573 (concat (concat val dstv) (set dst src)): dst now holds val,
5574 copied from src. dstv is a value-based representation of dst, if
5575 it differs from dst. If resolution is needed, src is a REG, and
5576 its mode is the same as that of val.
5578 (concat (concat val (set dstv srcv)) (set dst src)): src
5579 copied to dst, holding val. dstv and srcv are value-based
5580 representations of dst and src, respectively.
5584 if (GET_CODE (PATTERN (cui->insn)) != COND_EXEC)
5586 reverse = reverse_op (v->val_rtx, expr);
5587 if (reverse)
5589 loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, reverse);
5590 VAL_EXPR_HAS_REVERSE (loc) = 1;
5594 mo.u.loc = loc;
5596 if (track_p)
5597 VAL_HOLDS_TRACK_EXPR (loc) = 1;
5598 if (preserve)
5600 VAL_NEEDS_RESOLUTION (loc) = resolve;
5601 preserve_value (v);
5603 if (mo.type == MO_CLOBBER)
5604 VAL_EXPR_IS_CLOBBERED (loc) = 1;
5605 if (mo.type == MO_COPY)
5606 VAL_EXPR_IS_COPIED (loc) = 1;
5608 mo.type = MO_VAL_SET;
5610 log_and_return:
5611 if (dump_file && (dump_flags & TDF_DETAILS))
5612 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
5613 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
5616 /* Arguments to the call. */
5617 static rtx call_arguments;
5619 /* Compute call_arguments. */
5621 static void
5622 prepare_call_arguments (basic_block bb, rtx insn)
5624 rtx link, x;
5625 rtx prev, cur, next;
5626 rtx call = PATTERN (insn);
5627 rtx this_arg = NULL_RTX;
5628 tree type = NULL_TREE, t, fndecl = NULL_TREE;
5629 tree obj_type_ref = NULL_TREE;
5630 CUMULATIVE_ARGS args_so_far_v;
5631 cumulative_args_t args_so_far;
5633 memset (&args_so_far_v, 0, sizeof (args_so_far_v));
5634 args_so_far = pack_cumulative_args (&args_so_far_v);
5635 if (GET_CODE (call) == PARALLEL)
5636 call = XVECEXP (call, 0, 0);
5637 if (GET_CODE (call) == SET)
5638 call = SET_SRC (call);
5639 if (GET_CODE (call) == CALL && MEM_P (XEXP (call, 0)))
5641 if (GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
5643 rtx symbol = XEXP (XEXP (call, 0), 0);
5644 if (SYMBOL_REF_DECL (symbol))
5645 fndecl = SYMBOL_REF_DECL (symbol);
5647 if (fndecl == NULL_TREE)
5648 fndecl = MEM_EXPR (XEXP (call, 0));
5649 if (fndecl
5650 && TREE_CODE (TREE_TYPE (fndecl)) != FUNCTION_TYPE
5651 && TREE_CODE (TREE_TYPE (fndecl)) != METHOD_TYPE)
5652 fndecl = NULL_TREE;
5653 if (fndecl && TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
5654 type = TREE_TYPE (fndecl);
5655 if (fndecl && TREE_CODE (fndecl) != FUNCTION_DECL)
5657 if (TREE_CODE (fndecl) == INDIRECT_REF
5658 && TREE_CODE (TREE_OPERAND (fndecl, 0)) == OBJ_TYPE_REF)
5659 obj_type_ref = TREE_OPERAND (fndecl, 0);
5660 fndecl = NULL_TREE;
5662 if (type)
5664 for (t = TYPE_ARG_TYPES (type); t && t != void_list_node;
5665 t = TREE_CHAIN (t))
5666 if (TREE_CODE (TREE_VALUE (t)) == REFERENCE_TYPE
5667 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_VALUE (t))))
5668 break;
5669 if ((t == NULL || t == void_list_node) && obj_type_ref == NULL_TREE)
5670 type = NULL;
5671 else
5673 int nargs ATTRIBUTE_UNUSED = list_length (TYPE_ARG_TYPES (type));
5674 link = CALL_INSN_FUNCTION_USAGE (insn);
5675 #ifndef PCC_STATIC_STRUCT_RETURN
5676 if (aggregate_value_p (TREE_TYPE (type), type)
5677 && targetm.calls.struct_value_rtx (type, 0) == 0)
5679 tree struct_addr = build_pointer_type (TREE_TYPE (type));
5680 enum machine_mode mode = TYPE_MODE (struct_addr);
5681 rtx reg;
5682 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
5683 nargs + 1);
5684 reg = targetm.calls.function_arg (args_so_far, mode,
5685 struct_addr, true);
5686 targetm.calls.function_arg_advance (args_so_far, mode,
5687 struct_addr, true);
5688 if (reg == NULL_RTX)
5690 for (; link; link = XEXP (link, 1))
5691 if (GET_CODE (XEXP (link, 0)) == USE
5692 && MEM_P (XEXP (XEXP (link, 0), 0)))
5694 link = XEXP (link, 1);
5695 break;
5699 else
5700 #endif
5701 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
5702 nargs);
5703 if (obj_type_ref && TYPE_ARG_TYPES (type) != void_list_node)
5705 enum machine_mode mode;
5706 t = TYPE_ARG_TYPES (type);
5707 mode = TYPE_MODE (TREE_VALUE (t));
5708 this_arg = targetm.calls.function_arg (args_so_far, mode,
5709 TREE_VALUE (t), true);
5710 if (this_arg && !REG_P (this_arg))
5711 this_arg = NULL_RTX;
5712 else if (this_arg == NULL_RTX)
5714 for (; link; link = XEXP (link, 1))
5715 if (GET_CODE (XEXP (link, 0)) == USE
5716 && MEM_P (XEXP (XEXP (link, 0), 0)))
5718 this_arg = XEXP (XEXP (link, 0), 0);
5719 break;
5726 t = type ? TYPE_ARG_TYPES (type) : NULL_TREE;
5728 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
5729 if (GET_CODE (XEXP (link, 0)) == USE)
5731 rtx item = NULL_RTX;
5732 x = XEXP (XEXP (link, 0), 0);
5733 if (GET_MODE (link) == VOIDmode
5734 || GET_MODE (link) == BLKmode
5735 || (GET_MODE (link) != GET_MODE (x)
5736 && (GET_MODE_CLASS (GET_MODE (link)) != MODE_INT
5737 || GET_MODE_CLASS (GET_MODE (x)) != MODE_INT)))
5738 /* Can't do anything for these, if the original type mode
5739 isn't known or can't be converted. */;
5740 else if (REG_P (x))
5742 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
5743 if (val && cselib_preserved_value_p (val))
5744 item = val->val_rtx;
5745 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT)
5747 enum machine_mode mode = GET_MODE (x);
5749 while ((mode = GET_MODE_WIDER_MODE (mode)) != VOIDmode
5750 && GET_MODE_BITSIZE (mode) <= BITS_PER_WORD)
5752 rtx reg = simplify_subreg (mode, x, GET_MODE (x), 0);
5754 if (reg == NULL_RTX || !REG_P (reg))
5755 continue;
5756 val = cselib_lookup (reg, mode, 0, VOIDmode);
5757 if (val && cselib_preserved_value_p (val))
5759 item = val->val_rtx;
5760 break;
5765 else if (MEM_P (x))
5767 rtx mem = x;
5768 cselib_val *val;
5770 if (!frame_pointer_needed)
5772 struct adjust_mem_data amd;
5773 amd.mem_mode = VOIDmode;
5774 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
5775 amd.side_effects = NULL_RTX;
5776 amd.store = true;
5777 mem = simplify_replace_fn_rtx (mem, NULL_RTX, adjust_mems,
5778 &amd);
5779 gcc_assert (amd.side_effects == NULL_RTX);
5781 val = cselib_lookup (mem, GET_MODE (mem), 0, VOIDmode);
5782 if (val && cselib_preserved_value_p (val))
5783 item = val->val_rtx;
5784 else if (GET_MODE_CLASS (GET_MODE (mem)) != MODE_INT)
5786 /* For non-integer stack argument see also if they weren't
5787 initialized by integers. */
5788 enum machine_mode imode = int_mode_for_mode (GET_MODE (mem));
5789 if (imode != GET_MODE (mem) && imode != BLKmode)
5791 val = cselib_lookup (adjust_address_nv (mem, imode, 0),
5792 imode, 0, VOIDmode);
5793 if (val && cselib_preserved_value_p (val))
5794 item = lowpart_subreg (GET_MODE (x), val->val_rtx,
5795 imode);
5799 if (item)
5801 rtx x2 = x;
5802 if (GET_MODE (item) != GET_MODE (link))
5803 item = lowpart_subreg (GET_MODE (link), item, GET_MODE (item));
5804 if (GET_MODE (x2) != GET_MODE (link))
5805 x2 = lowpart_subreg (GET_MODE (link), x2, GET_MODE (x2));
5806 item = gen_rtx_CONCAT (GET_MODE (link), x2, item);
5807 call_arguments
5808 = gen_rtx_EXPR_LIST (VOIDmode, item, call_arguments);
5810 if (t && t != void_list_node)
5812 tree argtype = TREE_VALUE (t);
5813 enum machine_mode mode = TYPE_MODE (argtype);
5814 rtx reg;
5815 if (pass_by_reference (&args_so_far_v, mode, argtype, true))
5817 argtype = build_pointer_type (argtype);
5818 mode = TYPE_MODE (argtype);
5820 reg = targetm.calls.function_arg (args_so_far, mode,
5821 argtype, true);
5822 if (TREE_CODE (argtype) == REFERENCE_TYPE
5823 && INTEGRAL_TYPE_P (TREE_TYPE (argtype))
5824 && reg
5825 && REG_P (reg)
5826 && GET_MODE (reg) == mode
5827 && GET_MODE_CLASS (mode) == MODE_INT
5828 && REG_P (x)
5829 && REGNO (x) == REGNO (reg)
5830 && GET_MODE (x) == mode
5831 && item)
5833 enum machine_mode indmode
5834 = TYPE_MODE (TREE_TYPE (argtype));
5835 rtx mem = gen_rtx_MEM (indmode, x);
5836 cselib_val *val = cselib_lookup (mem, indmode, 0, VOIDmode);
5837 if (val && cselib_preserved_value_p (val))
5839 item = gen_rtx_CONCAT (indmode, mem, val->val_rtx);
5840 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
5841 call_arguments);
5843 else
5845 struct elt_loc_list *l;
5846 tree initial;
5848 /* Try harder, when passing address of a constant
5849 pool integer it can be easily read back. */
5850 item = XEXP (item, 1);
5851 if (GET_CODE (item) == SUBREG)
5852 item = SUBREG_REG (item);
5853 gcc_assert (GET_CODE (item) == VALUE);
5854 val = CSELIB_VAL_PTR (item);
5855 for (l = val->locs; l; l = l->next)
5856 if (GET_CODE (l->loc) == SYMBOL_REF
5857 && TREE_CONSTANT_POOL_ADDRESS_P (l->loc)
5858 && SYMBOL_REF_DECL (l->loc)
5859 && DECL_INITIAL (SYMBOL_REF_DECL (l->loc)))
5861 initial = DECL_INITIAL (SYMBOL_REF_DECL (l->loc));
5862 if (host_integerp (initial, 0))
5864 item = GEN_INT (tree_low_cst (initial, 0));
5865 item = gen_rtx_CONCAT (indmode, mem, item);
5866 call_arguments
5867 = gen_rtx_EXPR_LIST (VOIDmode, item,
5868 call_arguments);
5870 break;
5874 targetm.calls.function_arg_advance (args_so_far, mode,
5875 argtype, true);
5876 t = TREE_CHAIN (t);
5880 /* Add debug arguments. */
5881 if (fndecl
5882 && TREE_CODE (fndecl) == FUNCTION_DECL
5883 && DECL_HAS_DEBUG_ARGS_P (fndecl))
5885 VEC(tree, gc) **debug_args = decl_debug_args_lookup (fndecl);
5886 if (debug_args)
5888 unsigned int ix;
5889 tree param;
5890 for (ix = 0; VEC_iterate (tree, *debug_args, ix, param); ix += 2)
5892 rtx item;
5893 tree dtemp = VEC_index (tree, *debug_args, ix + 1);
5894 enum machine_mode mode = DECL_MODE (dtemp);
5895 item = gen_rtx_DEBUG_PARAMETER_REF (mode, param);
5896 item = gen_rtx_CONCAT (mode, item, DECL_RTL (dtemp));
5897 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
5898 call_arguments);
5903 /* Reverse call_arguments chain. */
5904 prev = NULL_RTX;
5905 for (cur = call_arguments; cur; cur = next)
5907 next = XEXP (cur, 1);
5908 XEXP (cur, 1) = prev;
5909 prev = cur;
5911 call_arguments = prev;
5913 x = PATTERN (insn);
5914 if (GET_CODE (x) == PARALLEL)
5915 x = XVECEXP (x, 0, 0);
5916 if (GET_CODE (x) == SET)
5917 x = SET_SRC (x);
5918 if (GET_CODE (x) == CALL && MEM_P (XEXP (x, 0)))
5920 x = XEXP (XEXP (x, 0), 0);
5921 if (GET_CODE (x) == SYMBOL_REF)
5922 /* Don't record anything. */;
5923 else if (CONSTANT_P (x))
5925 x = gen_rtx_CONCAT (GET_MODE (x) == VOIDmode ? Pmode : GET_MODE (x),
5926 pc_rtx, x);
5927 call_arguments
5928 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
5930 else
5932 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
5933 if (val && cselib_preserved_value_p (val))
5935 x = gen_rtx_CONCAT (GET_MODE (x), pc_rtx, val->val_rtx);
5936 call_arguments
5937 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
5941 if (this_arg)
5943 enum machine_mode mode
5944 = TYPE_MODE (TREE_TYPE (OBJ_TYPE_REF_EXPR (obj_type_ref)));
5945 rtx clobbered = gen_rtx_MEM (mode, this_arg);
5946 HOST_WIDE_INT token
5947 = tree_low_cst (OBJ_TYPE_REF_TOKEN (obj_type_ref), 0);
5948 if (token)
5949 clobbered = plus_constant (clobbered, token * GET_MODE_SIZE (mode));
5950 clobbered = gen_rtx_MEM (mode, clobbered);
5951 x = gen_rtx_CONCAT (mode, gen_rtx_CLOBBER (VOIDmode, pc_rtx), clobbered);
5952 call_arguments
5953 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
5957 /* Callback for cselib_record_sets_hook, that records as micro
5958 operations uses and stores in an insn after cselib_record_sets has
5959 analyzed the sets in an insn, but before it modifies the stored
5960 values in the internal tables, unless cselib_record_sets doesn't
5961 call it directly (perhaps because we're not doing cselib in the
5962 first place, in which case sets and n_sets will be 0). */
5964 static void
5965 add_with_sets (rtx insn, struct cselib_set *sets, int n_sets)
5967 basic_block bb = BLOCK_FOR_INSN (insn);
5968 int n1, n2;
5969 struct count_use_info cui;
5970 micro_operation *mos;
5972 cselib_hook_called = true;
5974 cui.insn = insn;
5975 cui.bb = bb;
5976 cui.sets = sets;
5977 cui.n_sets = n_sets;
5979 n1 = VEC_length (micro_operation, VTI (bb)->mos);
5980 cui.store_p = false;
5981 note_uses (&PATTERN (insn), add_uses_1, &cui);
5982 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
5983 mos = VEC_address (micro_operation, VTI (bb)->mos);
5985 /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
5986 MO_VAL_LOC last. */
5987 while (n1 < n2)
5989 while (n1 < n2 && mos[n1].type == MO_USE)
5990 n1++;
5991 while (n1 < n2 && mos[n2].type != MO_USE)
5992 n2--;
5993 if (n1 < n2)
5995 micro_operation sw;
5997 sw = mos[n1];
5998 mos[n1] = mos[n2];
5999 mos[n2] = sw;
6003 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
6004 while (n1 < n2)
6006 while (n1 < n2 && mos[n1].type != MO_VAL_LOC)
6007 n1++;
6008 while (n1 < n2 && mos[n2].type == MO_VAL_LOC)
6009 n2--;
6010 if (n1 < n2)
6012 micro_operation sw;
6014 sw = mos[n1];
6015 mos[n1] = mos[n2];
6016 mos[n2] = sw;
6020 if (CALL_P (insn))
6022 micro_operation mo;
6024 mo.type = MO_CALL;
6025 mo.insn = insn;
6026 mo.u.loc = call_arguments;
6027 call_arguments = NULL_RTX;
6029 if (dump_file && (dump_flags & TDF_DETAILS))
6030 log_op_type (PATTERN (insn), bb, insn, mo.type, dump_file);
6031 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
6034 n1 = VEC_length (micro_operation, VTI (bb)->mos);
6035 /* This will record NEXT_INSN (insn), such that we can
6036 insert notes before it without worrying about any
6037 notes that MO_USEs might emit after the insn. */
6038 cui.store_p = true;
6039 note_stores (PATTERN (insn), add_stores, &cui);
6040 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
6041 mos = VEC_address (micro_operation, VTI (bb)->mos);
6043 /* Order the MO_VAL_USEs first (note_stores does nothing
6044 on DEBUG_INSNs, so there are no MO_VAL_LOCs from this
6045 insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET. */
6046 while (n1 < n2)
6048 while (n1 < n2 && mos[n1].type == MO_VAL_USE)
6049 n1++;
6050 while (n1 < n2 && mos[n2].type != MO_VAL_USE)
6051 n2--;
6052 if (n1 < n2)
6054 micro_operation sw;
6056 sw = mos[n1];
6057 mos[n1] = mos[n2];
6058 mos[n2] = sw;
6062 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
6063 while (n1 < n2)
6065 while (n1 < n2 && mos[n1].type == MO_CLOBBER)
6066 n1++;
6067 while (n1 < n2 && mos[n2].type != MO_CLOBBER)
6068 n2--;
6069 if (n1 < n2)
6071 micro_operation sw;
6073 sw = mos[n1];
6074 mos[n1] = mos[n2];
6075 mos[n2] = sw;
6080 static enum var_init_status
6081 find_src_status (dataflow_set *in, rtx src)
6083 tree decl = NULL_TREE;
6084 enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
6086 if (! flag_var_tracking_uninit)
6087 status = VAR_INIT_STATUS_INITIALIZED;
6089 if (src && REG_P (src))
6090 decl = var_debug_decl (REG_EXPR (src));
6091 else if (src && MEM_P (src))
6092 decl = var_debug_decl (MEM_EXPR (src));
6094 if (src && decl)
6095 status = get_init_value (in, src, dv_from_decl (decl));
6097 return status;
6100 /* SRC is the source of an assignment. Use SET to try to find what
6101 was ultimately assigned to SRC. Return that value if known,
6102 otherwise return SRC itself. */
6104 static rtx
6105 find_src_set_src (dataflow_set *set, rtx src)
6107 tree decl = NULL_TREE; /* The variable being copied around. */
6108 rtx set_src = NULL_RTX; /* The value for "decl" stored in "src". */
6109 variable var;
6110 location_chain nextp;
6111 int i;
6112 bool found;
6114 if (src && REG_P (src))
6115 decl = var_debug_decl (REG_EXPR (src));
6116 else if (src && MEM_P (src))
6117 decl = var_debug_decl (MEM_EXPR (src));
6119 if (src && decl)
6121 decl_or_value dv = dv_from_decl (decl);
6123 var = shared_hash_find (set->vars, dv);
6124 if (var)
6126 found = false;
6127 for (i = 0; i < var->n_var_parts && !found; i++)
6128 for (nextp = var->var_part[i].loc_chain; nextp && !found;
6129 nextp = nextp->next)
6130 if (rtx_equal_p (nextp->loc, src))
6132 set_src = nextp->set_src;
6133 found = true;
6139 return set_src;
6142 /* Compute the changes of variable locations in the basic block BB. */
6144 static bool
6145 compute_bb_dataflow (basic_block bb)
6147 unsigned int i;
6148 micro_operation *mo;
6149 bool changed;
6150 dataflow_set old_out;
6151 dataflow_set *in = &VTI (bb)->in;
6152 dataflow_set *out = &VTI (bb)->out;
6154 dataflow_set_init (&old_out);
6155 dataflow_set_copy (&old_out, out);
6156 dataflow_set_copy (out, in);
6158 FOR_EACH_VEC_ELT (micro_operation, VTI (bb)->mos, i, mo)
6160 rtx insn = mo->insn;
6162 switch (mo->type)
6164 case MO_CALL:
6165 dataflow_set_clear_at_call (out);
6166 break;
6168 case MO_USE:
6170 rtx loc = mo->u.loc;
6172 if (REG_P (loc))
6173 var_reg_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6174 else if (MEM_P (loc))
6175 var_mem_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6177 break;
6179 case MO_VAL_LOC:
6181 rtx loc = mo->u.loc;
6182 rtx val, vloc;
6183 tree var;
6185 if (GET_CODE (loc) == CONCAT)
6187 val = XEXP (loc, 0);
6188 vloc = XEXP (loc, 1);
6190 else
6192 val = NULL_RTX;
6193 vloc = loc;
6196 var = PAT_VAR_LOCATION_DECL (vloc);
6198 clobber_variable_part (out, NULL_RTX,
6199 dv_from_decl (var), 0, NULL_RTX);
6200 if (val)
6202 if (VAL_NEEDS_RESOLUTION (loc))
6203 val_resolve (out, val, PAT_VAR_LOCATION_LOC (vloc), insn);
6204 set_variable_part (out, val, dv_from_decl (var), 0,
6205 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6206 INSERT);
6208 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
6209 set_variable_part (out, PAT_VAR_LOCATION_LOC (vloc),
6210 dv_from_decl (var), 0,
6211 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6212 INSERT);
6214 break;
6216 case MO_VAL_USE:
6218 rtx loc = mo->u.loc;
6219 rtx val, vloc, uloc;
6221 vloc = uloc = XEXP (loc, 1);
6222 val = XEXP (loc, 0);
6224 if (GET_CODE (val) == CONCAT)
6226 uloc = XEXP (val, 1);
6227 val = XEXP (val, 0);
6230 if (VAL_NEEDS_RESOLUTION (loc))
6231 val_resolve (out, val, vloc, insn);
6232 else
6233 val_store (out, val, uloc, insn, false);
6235 if (VAL_HOLDS_TRACK_EXPR (loc))
6237 if (GET_CODE (uloc) == REG)
6238 var_reg_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6239 NULL);
6240 else if (GET_CODE (uloc) == MEM)
6241 var_mem_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6242 NULL);
6245 break;
6247 case MO_VAL_SET:
6249 rtx loc = mo->u.loc;
6250 rtx val, vloc, uloc, reverse = NULL_RTX;
6252 vloc = loc;
6253 if (VAL_EXPR_HAS_REVERSE (loc))
6255 reverse = XEXP (loc, 1);
6256 vloc = XEXP (loc, 0);
6258 uloc = XEXP (vloc, 1);
6259 val = XEXP (vloc, 0);
6260 vloc = uloc;
6262 if (GET_CODE (val) == CONCAT)
6264 vloc = XEXP (val, 1);
6265 val = XEXP (val, 0);
6268 if (GET_CODE (vloc) == SET)
6270 rtx vsrc = SET_SRC (vloc);
6272 gcc_assert (val != vsrc);
6273 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
6275 vloc = SET_DEST (vloc);
6277 if (VAL_NEEDS_RESOLUTION (loc))
6278 val_resolve (out, val, vsrc, insn);
6280 else if (VAL_NEEDS_RESOLUTION (loc))
6282 gcc_assert (GET_CODE (uloc) == SET
6283 && GET_CODE (SET_SRC (uloc)) == REG);
6284 val_resolve (out, val, SET_SRC (uloc), insn);
6287 if (VAL_HOLDS_TRACK_EXPR (loc))
6289 if (VAL_EXPR_IS_CLOBBERED (loc))
6291 if (REG_P (uloc))
6292 var_reg_delete (out, uloc, true);
6293 else if (MEM_P (uloc))
6294 var_mem_delete (out, uloc, true);
6296 else
6298 bool copied_p = VAL_EXPR_IS_COPIED (loc);
6299 rtx set_src = NULL;
6300 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
6302 if (GET_CODE (uloc) == SET)
6304 set_src = SET_SRC (uloc);
6305 uloc = SET_DEST (uloc);
6308 if (copied_p)
6310 if (flag_var_tracking_uninit)
6312 status = find_src_status (in, set_src);
6314 if (status == VAR_INIT_STATUS_UNKNOWN)
6315 status = find_src_status (out, set_src);
6318 set_src = find_src_set_src (in, set_src);
6321 if (REG_P (uloc))
6322 var_reg_delete_and_set (out, uloc, !copied_p,
6323 status, set_src);
6324 else if (MEM_P (uloc))
6325 var_mem_delete_and_set (out, uloc, !copied_p,
6326 status, set_src);
6329 else if (REG_P (uloc))
6330 var_regno_delete (out, REGNO (uloc));
6332 val_store (out, val, vloc, insn, true);
6334 if (reverse)
6335 val_store (out, XEXP (reverse, 0), XEXP (reverse, 1),
6336 insn, false);
6338 break;
6340 case MO_SET:
6342 rtx loc = mo->u.loc;
6343 rtx set_src = NULL;
6345 if (GET_CODE (loc) == SET)
6347 set_src = SET_SRC (loc);
6348 loc = SET_DEST (loc);
6351 if (REG_P (loc))
6352 var_reg_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6353 set_src);
6354 else if (MEM_P (loc))
6355 var_mem_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6356 set_src);
6358 break;
6360 case MO_COPY:
6362 rtx loc = mo->u.loc;
6363 enum var_init_status src_status;
6364 rtx set_src = NULL;
6366 if (GET_CODE (loc) == SET)
6368 set_src = SET_SRC (loc);
6369 loc = SET_DEST (loc);
6372 if (! flag_var_tracking_uninit)
6373 src_status = VAR_INIT_STATUS_INITIALIZED;
6374 else
6376 src_status = find_src_status (in, set_src);
6378 if (src_status == VAR_INIT_STATUS_UNKNOWN)
6379 src_status = find_src_status (out, set_src);
6382 set_src = find_src_set_src (in, set_src);
6384 if (REG_P (loc))
6385 var_reg_delete_and_set (out, loc, false, src_status, set_src);
6386 else if (MEM_P (loc))
6387 var_mem_delete_and_set (out, loc, false, src_status, set_src);
6389 break;
6391 case MO_USE_NO_VAR:
6393 rtx loc = mo->u.loc;
6395 if (REG_P (loc))
6396 var_reg_delete (out, loc, false);
6397 else if (MEM_P (loc))
6398 var_mem_delete (out, loc, false);
6400 break;
6402 case MO_CLOBBER:
6404 rtx loc = mo->u.loc;
6406 if (REG_P (loc))
6407 var_reg_delete (out, loc, true);
6408 else if (MEM_P (loc))
6409 var_mem_delete (out, loc, true);
6411 break;
6413 case MO_ADJUST:
6414 out->stack_adjust += mo->u.adjust;
6415 break;
6419 if (MAY_HAVE_DEBUG_INSNS)
6421 dataflow_set_equiv_regs (out);
6422 htab_traverse (shared_hash_htab (out->vars), canonicalize_values_mark,
6423 out);
6424 htab_traverse (shared_hash_htab (out->vars), canonicalize_values_star,
6425 out);
6426 #if ENABLE_CHECKING
6427 htab_traverse (shared_hash_htab (out->vars),
6428 canonicalize_loc_order_check, out);
6429 #endif
6431 changed = dataflow_set_different (&old_out, out);
6432 dataflow_set_destroy (&old_out);
6433 return changed;
6436 /* Find the locations of variables in the whole function. */
6438 static bool
6439 vt_find_locations (void)
6441 fibheap_t worklist, pending, fibheap_swap;
6442 sbitmap visited, in_worklist, in_pending, sbitmap_swap;
6443 basic_block bb;
6444 edge e;
6445 int *bb_order;
6446 int *rc_order;
6447 int i;
6448 int htabsz = 0;
6449 int htabmax = PARAM_VALUE (PARAM_MAX_VARTRACK_SIZE);
6450 bool success = true;
6452 timevar_push (TV_VAR_TRACKING_DATAFLOW);
6453 /* Compute reverse completion order of depth first search of the CFG
6454 so that the data-flow runs faster. */
6455 rc_order = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
6456 bb_order = XNEWVEC (int, last_basic_block);
6457 pre_and_rev_post_order_compute (NULL, rc_order, false);
6458 for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
6459 bb_order[rc_order[i]] = i;
6460 free (rc_order);
6462 worklist = fibheap_new ();
6463 pending = fibheap_new ();
6464 visited = sbitmap_alloc (last_basic_block);
6465 in_worklist = sbitmap_alloc (last_basic_block);
6466 in_pending = sbitmap_alloc (last_basic_block);
6467 sbitmap_zero (in_worklist);
6469 FOR_EACH_BB (bb)
6470 fibheap_insert (pending, bb_order[bb->index], bb);
6471 sbitmap_ones (in_pending);
6473 while (success && !fibheap_empty (pending))
6475 fibheap_swap = pending;
6476 pending = worklist;
6477 worklist = fibheap_swap;
6478 sbitmap_swap = in_pending;
6479 in_pending = in_worklist;
6480 in_worklist = sbitmap_swap;
6482 sbitmap_zero (visited);
6484 while (!fibheap_empty (worklist))
6486 bb = (basic_block) fibheap_extract_min (worklist);
6487 RESET_BIT (in_worklist, bb->index);
6488 gcc_assert (!TEST_BIT (visited, bb->index));
6489 if (!TEST_BIT (visited, bb->index))
6491 bool changed;
6492 edge_iterator ei;
6493 int oldinsz, oldoutsz;
6495 SET_BIT (visited, bb->index);
6497 if (VTI (bb)->in.vars)
6499 htabsz
6500 -= (htab_size (shared_hash_htab (VTI (bb)->in.vars))
6501 + htab_size (shared_hash_htab (VTI (bb)->out.vars)));
6502 oldinsz
6503 = htab_elements (shared_hash_htab (VTI (bb)->in.vars));
6504 oldoutsz
6505 = htab_elements (shared_hash_htab (VTI (bb)->out.vars));
6507 else
6508 oldinsz = oldoutsz = 0;
6510 if (MAY_HAVE_DEBUG_INSNS)
6512 dataflow_set *in = &VTI (bb)->in, *first_out = NULL;
6513 bool first = true, adjust = false;
6515 /* Calculate the IN set as the intersection of
6516 predecessor OUT sets. */
6518 dataflow_set_clear (in);
6519 dst_can_be_shared = true;
6521 FOR_EACH_EDGE (e, ei, bb->preds)
6522 if (!VTI (e->src)->flooded)
6523 gcc_assert (bb_order[bb->index]
6524 <= bb_order[e->src->index]);
6525 else if (first)
6527 dataflow_set_copy (in, &VTI (e->src)->out);
6528 first_out = &VTI (e->src)->out;
6529 first = false;
6531 else
6533 dataflow_set_merge (in, &VTI (e->src)->out);
6534 adjust = true;
6537 if (adjust)
6539 dataflow_post_merge_adjust (in, &VTI (bb)->permp);
6540 #if ENABLE_CHECKING
6541 /* Merge and merge_adjust should keep entries in
6542 canonical order. */
6543 htab_traverse (shared_hash_htab (in->vars),
6544 canonicalize_loc_order_check,
6545 in);
6546 #endif
6547 if (dst_can_be_shared)
6549 shared_hash_destroy (in->vars);
6550 in->vars = shared_hash_copy (first_out->vars);
6554 VTI (bb)->flooded = true;
6556 else
6558 /* Calculate the IN set as union of predecessor OUT sets. */
6559 dataflow_set_clear (&VTI (bb)->in);
6560 FOR_EACH_EDGE (e, ei, bb->preds)
6561 dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out);
6564 changed = compute_bb_dataflow (bb);
6565 htabsz += (htab_size (shared_hash_htab (VTI (bb)->in.vars))
6566 + htab_size (shared_hash_htab (VTI (bb)->out.vars)));
6568 if (htabmax && htabsz > htabmax)
6570 if (MAY_HAVE_DEBUG_INSNS)
6571 inform (DECL_SOURCE_LOCATION (cfun->decl),
6572 "variable tracking size limit exceeded with "
6573 "-fvar-tracking-assignments, retrying without");
6574 else
6575 inform (DECL_SOURCE_LOCATION (cfun->decl),
6576 "variable tracking size limit exceeded");
6577 success = false;
6578 break;
6581 if (changed)
6583 FOR_EACH_EDGE (e, ei, bb->succs)
6585 if (e->dest == EXIT_BLOCK_PTR)
6586 continue;
6588 if (TEST_BIT (visited, e->dest->index))
6590 if (!TEST_BIT (in_pending, e->dest->index))
6592 /* Send E->DEST to next round. */
6593 SET_BIT (in_pending, e->dest->index);
6594 fibheap_insert (pending,
6595 bb_order[e->dest->index],
6596 e->dest);
6599 else if (!TEST_BIT (in_worklist, e->dest->index))
6601 /* Add E->DEST to current round. */
6602 SET_BIT (in_worklist, e->dest->index);
6603 fibheap_insert (worklist, bb_order[e->dest->index],
6604 e->dest);
6609 if (dump_file)
6610 fprintf (dump_file,
6611 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
6612 bb->index,
6613 (int)htab_elements (shared_hash_htab (VTI (bb)->in.vars)),
6614 oldinsz,
6615 (int)htab_elements (shared_hash_htab (VTI (bb)->out.vars)),
6616 oldoutsz,
6617 (int)worklist->nodes, (int)pending->nodes, htabsz);
6619 if (dump_file && (dump_flags & TDF_DETAILS))
6621 fprintf (dump_file, "BB %i IN:\n", bb->index);
6622 dump_dataflow_set (&VTI (bb)->in);
6623 fprintf (dump_file, "BB %i OUT:\n", bb->index);
6624 dump_dataflow_set (&VTI (bb)->out);
6630 if (success && MAY_HAVE_DEBUG_INSNS)
6631 FOR_EACH_BB (bb)
6632 gcc_assert (VTI (bb)->flooded);
6634 free (bb_order);
6635 fibheap_delete (worklist);
6636 fibheap_delete (pending);
6637 sbitmap_free (visited);
6638 sbitmap_free (in_worklist);
6639 sbitmap_free (in_pending);
6641 timevar_pop (TV_VAR_TRACKING_DATAFLOW);
6642 return success;
6645 /* Print the content of the LIST to dump file. */
6647 static void
6648 dump_attrs_list (attrs list)
6650 for (; list; list = list->next)
6652 if (dv_is_decl_p (list->dv))
6653 print_mem_expr (dump_file, dv_as_decl (list->dv));
6654 else
6655 print_rtl_single (dump_file, dv_as_value (list->dv));
6656 fprintf (dump_file, "+" HOST_WIDE_INT_PRINT_DEC, list->offset);
6658 fprintf (dump_file, "\n");
6661 /* Print the information about variable *SLOT to dump file. */
6663 static int
6664 dump_var_slot (void **slot, void *data ATTRIBUTE_UNUSED)
6666 variable var = (variable) *slot;
6668 dump_var (var);
6670 /* Continue traversing the hash table. */
6671 return 1;
6674 /* Print the information about variable VAR to dump file. */
6676 static void
6677 dump_var (variable var)
6679 int i;
6680 location_chain node;
6682 if (dv_is_decl_p (var->dv))
6684 const_tree decl = dv_as_decl (var->dv);
6686 if (DECL_NAME (decl))
6688 fprintf (dump_file, " name: %s",
6689 IDENTIFIER_POINTER (DECL_NAME (decl)));
6690 if (dump_flags & TDF_UID)
6691 fprintf (dump_file, "D.%u", DECL_UID (decl));
6693 else if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
6694 fprintf (dump_file, " name: D#%u", DEBUG_TEMP_UID (decl));
6695 else
6696 fprintf (dump_file, " name: D.%u", DECL_UID (decl));
6697 fprintf (dump_file, "\n");
6699 else
6701 fputc (' ', dump_file);
6702 print_rtl_single (dump_file, dv_as_value (var->dv));
6705 for (i = 0; i < var->n_var_parts; i++)
6707 fprintf (dump_file, " offset %ld\n",
6708 (long) var->var_part[i].offset);
6709 for (node = var->var_part[i].loc_chain; node; node = node->next)
6711 fprintf (dump_file, " ");
6712 if (node->init == VAR_INIT_STATUS_UNINITIALIZED)
6713 fprintf (dump_file, "[uninit]");
6714 print_rtl_single (dump_file, node->loc);
6719 /* Print the information about variables from hash table VARS to dump file. */
6721 static void
6722 dump_vars (htab_t vars)
6724 if (htab_elements (vars) > 0)
6726 fprintf (dump_file, "Variables:\n");
6727 htab_traverse (vars, dump_var_slot, NULL);
6731 /* Print the dataflow set SET to dump file. */
6733 static void
6734 dump_dataflow_set (dataflow_set *set)
6736 int i;
6738 fprintf (dump_file, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC "\n",
6739 set->stack_adjust);
6740 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
6742 if (set->regs[i])
6744 fprintf (dump_file, "Reg %d:", i);
6745 dump_attrs_list (set->regs[i]);
6748 dump_vars (shared_hash_htab (set->vars));
6749 fprintf (dump_file, "\n");
6752 /* Print the IN and OUT sets for each basic block to dump file. */
6754 static void
6755 dump_dataflow_sets (void)
6757 basic_block bb;
6759 FOR_EACH_BB (bb)
6761 fprintf (dump_file, "\nBasic block %d:\n", bb->index);
6762 fprintf (dump_file, "IN:\n");
6763 dump_dataflow_set (&VTI (bb)->in);
6764 fprintf (dump_file, "OUT:\n");
6765 dump_dataflow_set (&VTI (bb)->out);
6769 /* Add variable VAR to the hash table of changed variables and
6770 if it has no locations delete it from SET's hash table. */
6772 static void
6773 variable_was_changed (variable var, dataflow_set *set)
6775 hashval_t hash = dv_htab_hash (var->dv);
6777 if (emit_notes)
6779 void **slot;
6780 bool old_cur_loc_changed = false;
6782 /* Remember this decl or VALUE has been added to changed_variables. */
6783 set_dv_changed (var->dv, true);
6785 slot = htab_find_slot_with_hash (changed_variables,
6786 var->dv,
6787 hash, INSERT);
6789 if (*slot)
6791 variable old_var = (variable) *slot;
6792 gcc_assert (old_var->in_changed_variables);
6793 old_var->in_changed_variables = false;
6794 old_cur_loc_changed = old_var->cur_loc_changed;
6795 variable_htab_free (*slot);
6797 if (set && var->n_var_parts == 0)
6799 variable empty_var;
6801 empty_var = (variable) pool_alloc (dv_pool (var->dv));
6802 empty_var->dv = var->dv;
6803 empty_var->refcount = 1;
6804 empty_var->n_var_parts = 0;
6805 empty_var->cur_loc_changed = true;
6806 empty_var->in_changed_variables = true;
6807 *slot = empty_var;
6808 goto drop_var;
6810 else
6812 var->refcount++;
6813 var->in_changed_variables = true;
6814 /* If within processing one uop a variable is deleted
6815 and then readded, we need to assume it has changed. */
6816 if (old_cur_loc_changed)
6817 var->cur_loc_changed = true;
6818 *slot = var;
6821 else
6823 gcc_assert (set);
6824 if (var->n_var_parts == 0)
6826 void **slot;
6828 drop_var:
6829 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
6830 if (slot)
6832 if (shared_hash_shared (set->vars))
6833 slot = shared_hash_find_slot_unshare (&set->vars, var->dv,
6834 NO_INSERT);
6835 htab_clear_slot (shared_hash_htab (set->vars), slot);
6841 /* Look for the index in VAR->var_part corresponding to OFFSET.
6842 Return -1 if not found. If INSERTION_POINT is non-NULL, the
6843 referenced int will be set to the index that the part has or should
6844 have, if it should be inserted. */
6846 static inline int
6847 find_variable_location_part (variable var, HOST_WIDE_INT offset,
6848 int *insertion_point)
6850 int pos, low, high;
6852 /* Find the location part. */
6853 low = 0;
6854 high = var->n_var_parts;
6855 while (low != high)
6857 pos = (low + high) / 2;
6858 if (var->var_part[pos].offset < offset)
6859 low = pos + 1;
6860 else
6861 high = pos;
6863 pos = low;
6865 if (insertion_point)
6866 *insertion_point = pos;
6868 if (pos < var->n_var_parts && var->var_part[pos].offset == offset)
6869 return pos;
6871 return -1;
6874 static void **
6875 set_slot_part (dataflow_set *set, rtx loc, void **slot,
6876 decl_or_value dv, HOST_WIDE_INT offset,
6877 enum var_init_status initialized, rtx set_src)
6879 int pos;
6880 location_chain node, next;
6881 location_chain *nextp;
6882 variable var;
6883 bool onepart = dv_onepart_p (dv);
6885 gcc_assert (offset == 0 || !onepart);
6886 gcc_assert (loc != dv_as_opaque (dv));
6888 var = (variable) *slot;
6890 if (! flag_var_tracking_uninit)
6891 initialized = VAR_INIT_STATUS_INITIALIZED;
6893 if (!var)
6895 /* Create new variable information. */
6896 var = (variable) pool_alloc (dv_pool (dv));
6897 var->dv = dv;
6898 var->refcount = 1;
6899 var->n_var_parts = 1;
6900 var->cur_loc_changed = false;
6901 var->in_changed_variables = false;
6902 var->var_part[0].offset = offset;
6903 var->var_part[0].loc_chain = NULL;
6904 var->var_part[0].cur_loc = NULL;
6905 *slot = var;
6906 pos = 0;
6907 nextp = &var->var_part[0].loc_chain;
6909 else if (onepart)
6911 int r = -1, c = 0;
6913 gcc_assert (dv_as_opaque (var->dv) == dv_as_opaque (dv));
6915 pos = 0;
6917 if (GET_CODE (loc) == VALUE)
6919 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
6920 nextp = &node->next)
6921 if (GET_CODE (node->loc) == VALUE)
6923 if (node->loc == loc)
6925 r = 0;
6926 break;
6928 if (canon_value_cmp (node->loc, loc))
6929 c++;
6930 else
6932 r = 1;
6933 break;
6936 else if (REG_P (node->loc) || MEM_P (node->loc))
6937 c++;
6938 else
6940 r = 1;
6941 break;
6944 else if (REG_P (loc))
6946 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
6947 nextp = &node->next)
6948 if (REG_P (node->loc))
6950 if (REGNO (node->loc) < REGNO (loc))
6951 c++;
6952 else
6954 if (REGNO (node->loc) == REGNO (loc))
6955 r = 0;
6956 else
6957 r = 1;
6958 break;
6961 else
6963 r = 1;
6964 break;
6967 else if (MEM_P (loc))
6969 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
6970 nextp = &node->next)
6971 if (REG_P (node->loc))
6972 c++;
6973 else if (MEM_P (node->loc))
6975 if ((r = loc_cmp (XEXP (node->loc, 0), XEXP (loc, 0))) >= 0)
6976 break;
6977 else
6978 c++;
6980 else
6982 r = 1;
6983 break;
6986 else
6987 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
6988 nextp = &node->next)
6989 if ((r = loc_cmp (node->loc, loc)) >= 0)
6990 break;
6991 else
6992 c++;
6994 if (r == 0)
6995 return slot;
6997 if (shared_var_p (var, set->vars))
6999 slot = unshare_variable (set, slot, var, initialized);
7000 var = (variable)*slot;
7001 for (nextp = &var->var_part[0].loc_chain; c;
7002 nextp = &(*nextp)->next)
7003 c--;
7004 gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc);
7007 else
7009 int inspos = 0;
7011 gcc_assert (dv_as_decl (var->dv) == dv_as_decl (dv));
7013 pos = find_variable_location_part (var, offset, &inspos);
7015 if (pos >= 0)
7017 node = var->var_part[pos].loc_chain;
7019 if (node
7020 && ((REG_P (node->loc) && REG_P (loc)
7021 && REGNO (node->loc) == REGNO (loc))
7022 || rtx_equal_p (node->loc, loc)))
7024 /* LOC is in the beginning of the chain so we have nothing
7025 to do. */
7026 if (node->init < initialized)
7027 node->init = initialized;
7028 if (set_src != NULL)
7029 node->set_src = set_src;
7031 return slot;
7033 else
7035 /* We have to make a copy of a shared variable. */
7036 if (shared_var_p (var, set->vars))
7038 slot = unshare_variable (set, slot, var, initialized);
7039 var = (variable)*slot;
7043 else
7045 /* We have not found the location part, new one will be created. */
7047 /* We have to make a copy of the shared variable. */
7048 if (shared_var_p (var, set->vars))
7050 slot = unshare_variable (set, slot, var, initialized);
7051 var = (variable)*slot;
7054 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
7055 thus there are at most MAX_VAR_PARTS different offsets. */
7056 gcc_assert (var->n_var_parts < MAX_VAR_PARTS
7057 && (!var->n_var_parts || !dv_onepart_p (var->dv)));
7059 /* We have to move the elements of array starting at index
7060 inspos to the next position. */
7061 for (pos = var->n_var_parts; pos > inspos; pos--)
7062 var->var_part[pos] = var->var_part[pos - 1];
7064 var->n_var_parts++;
7065 var->var_part[pos].offset = offset;
7066 var->var_part[pos].loc_chain = NULL;
7067 var->var_part[pos].cur_loc = NULL;
7070 /* Delete the location from the list. */
7071 nextp = &var->var_part[pos].loc_chain;
7072 for (node = var->var_part[pos].loc_chain; node; node = next)
7074 next = node->next;
7075 if ((REG_P (node->loc) && REG_P (loc)
7076 && REGNO (node->loc) == REGNO (loc))
7077 || rtx_equal_p (node->loc, loc))
7079 /* Save these values, to assign to the new node, before
7080 deleting this one. */
7081 if (node->init > initialized)
7082 initialized = node->init;
7083 if (node->set_src != NULL && set_src == NULL)
7084 set_src = node->set_src;
7085 if (var->var_part[pos].cur_loc == node->loc)
7087 var->var_part[pos].cur_loc = NULL;
7088 var->cur_loc_changed = true;
7090 pool_free (loc_chain_pool, node);
7091 *nextp = next;
7092 break;
7094 else
7095 nextp = &node->next;
7098 nextp = &var->var_part[pos].loc_chain;
7101 /* Add the location to the beginning. */
7102 node = (location_chain) pool_alloc (loc_chain_pool);
7103 node->loc = loc;
7104 node->init = initialized;
7105 node->set_src = set_src;
7106 node->next = *nextp;
7107 *nextp = node;
7109 if (onepart && emit_notes)
7110 add_value_chains (var->dv, loc);
7112 /* If no location was emitted do so. */
7113 if (var->var_part[pos].cur_loc == NULL)
7114 variable_was_changed (var, set);
7116 return slot;
7119 /* Set the part of variable's location in the dataflow set SET. The
7120 variable part is specified by variable's declaration in DV and
7121 offset OFFSET and the part's location by LOC. IOPT should be
7122 NO_INSERT if the variable is known to be in SET already and the
7123 variable hash table must not be resized, and INSERT otherwise. */
7125 static void
7126 set_variable_part (dataflow_set *set, rtx loc,
7127 decl_or_value dv, HOST_WIDE_INT offset,
7128 enum var_init_status initialized, rtx set_src,
7129 enum insert_option iopt)
7131 void **slot;
7133 if (iopt == NO_INSERT)
7134 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7135 else
7137 slot = shared_hash_find_slot (set->vars, dv);
7138 if (!slot)
7139 slot = shared_hash_find_slot_unshare (&set->vars, dv, iopt);
7141 set_slot_part (set, loc, slot, dv, offset, initialized, set_src);
7144 /* Remove all recorded register locations for the given variable part
7145 from dataflow set SET, except for those that are identical to loc.
7146 The variable part is specified by variable's declaration or value
7147 DV and offset OFFSET. */
7149 static void **
7150 clobber_slot_part (dataflow_set *set, rtx loc, void **slot,
7151 HOST_WIDE_INT offset, rtx set_src)
7153 variable var = (variable) *slot;
7154 int pos = find_variable_location_part (var, offset, NULL);
7156 if (pos >= 0)
7158 location_chain node, next;
7160 /* Remove the register locations from the dataflow set. */
7161 next = var->var_part[pos].loc_chain;
7162 for (node = next; node; node = next)
7164 next = node->next;
7165 if (node->loc != loc
7166 && (!flag_var_tracking_uninit
7167 || !set_src
7168 || MEM_P (set_src)
7169 || !rtx_equal_p (set_src, node->set_src)))
7171 if (REG_P (node->loc))
7173 attrs anode, anext;
7174 attrs *anextp;
7176 /* Remove the variable part from the register's
7177 list, but preserve any other variable parts
7178 that might be regarded as live in that same
7179 register. */
7180 anextp = &set->regs[REGNO (node->loc)];
7181 for (anode = *anextp; anode; anode = anext)
7183 anext = anode->next;
7184 if (dv_as_opaque (anode->dv) == dv_as_opaque (var->dv)
7185 && anode->offset == offset)
7187 pool_free (attrs_pool, anode);
7188 *anextp = anext;
7190 else
7191 anextp = &anode->next;
7195 slot = delete_slot_part (set, node->loc, slot, offset);
7200 return slot;
7203 /* Remove all recorded register locations for the given variable part
7204 from dataflow set SET, except for those that are identical to loc.
7205 The variable part is specified by variable's declaration or value
7206 DV and offset OFFSET. */
7208 static void
7209 clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
7210 HOST_WIDE_INT offset, rtx set_src)
7212 void **slot;
7214 if (!dv_as_opaque (dv)
7215 || (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv))))
7216 return;
7218 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7219 if (!slot)
7220 return;
7222 clobber_slot_part (set, loc, slot, offset, set_src);
7225 /* Delete the part of variable's location from dataflow set SET. The
7226 variable part is specified by its SET->vars slot SLOT and offset
7227 OFFSET and the part's location by LOC. */
7229 static void **
7230 delete_slot_part (dataflow_set *set, rtx loc, void **slot,
7231 HOST_WIDE_INT offset)
7233 variable var = (variable) *slot;
7234 int pos = find_variable_location_part (var, offset, NULL);
7236 if (pos >= 0)
7238 location_chain node, next;
7239 location_chain *nextp;
7240 bool changed;
7242 if (shared_var_p (var, set->vars))
7244 /* If the variable contains the location part we have to
7245 make a copy of the variable. */
7246 for (node = var->var_part[pos].loc_chain; node;
7247 node = node->next)
7249 if ((REG_P (node->loc) && REG_P (loc)
7250 && REGNO (node->loc) == REGNO (loc))
7251 || rtx_equal_p (node->loc, loc))
7253 slot = unshare_variable (set, slot, var,
7254 VAR_INIT_STATUS_UNKNOWN);
7255 var = (variable)*slot;
7256 break;
7261 /* Delete the location part. */
7262 changed = false;
7263 nextp = &var->var_part[pos].loc_chain;
7264 for (node = *nextp; node; node = next)
7266 next = node->next;
7267 if ((REG_P (node->loc) && REG_P (loc)
7268 && REGNO (node->loc) == REGNO (loc))
7269 || rtx_equal_p (node->loc, loc))
7271 if (emit_notes && pos == 0 && dv_onepart_p (var->dv))
7272 remove_value_chains (var->dv, node->loc);
7273 /* If we have deleted the location which was last emitted
7274 we have to emit new location so add the variable to set
7275 of changed variables. */
7276 if (var->var_part[pos].cur_loc == node->loc)
7278 changed = true;
7279 var->var_part[pos].cur_loc = NULL;
7280 var->cur_loc_changed = true;
7282 pool_free (loc_chain_pool, node);
7283 *nextp = next;
7284 break;
7286 else
7287 nextp = &node->next;
7290 if (var->var_part[pos].loc_chain == NULL)
7292 changed = true;
7293 var->n_var_parts--;
7294 if (emit_notes)
7295 var->cur_loc_changed = true;
7296 while (pos < var->n_var_parts)
7298 var->var_part[pos] = var->var_part[pos + 1];
7299 pos++;
7302 if (changed)
7303 variable_was_changed (var, set);
7306 return slot;
7309 /* Delete the part of variable's location from dataflow set SET. The
7310 variable part is specified by variable's declaration or value DV
7311 and offset OFFSET and the part's location by LOC. */
7313 static void
7314 delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
7315 HOST_WIDE_INT offset)
7317 void **slot = shared_hash_find_slot_noinsert (set->vars, dv);
7318 if (!slot)
7319 return;
7321 delete_slot_part (set, loc, slot, offset);
7324 /* Structure for passing some other parameters to function
7325 vt_expand_loc_callback. */
7326 struct expand_loc_callback_data
7328 /* The variables and values active at this point. */
7329 htab_t vars;
7331 /* True in vt_expand_loc_dummy calls, no rtl should be allocated.
7332 Non-NULL should be returned if vt_expand_loc would return
7333 non-NULL in that case, NULL otherwise. cur_loc_changed should be
7334 computed and cur_loc recomputed when possible (but just once
7335 per emit_notes_for_changes call). */
7336 bool dummy;
7338 /* True if expansion of subexpressions had to recompute some
7339 VALUE/DEBUG_EXPR_DECL's cur_loc or used a VALUE/DEBUG_EXPR_DECL
7340 whose cur_loc has been already recomputed during current
7341 emit_notes_for_changes call. */
7342 bool cur_loc_changed;
7344 /* True if cur_loc should be ignored and any possible location
7345 returned. */
7346 bool ignore_cur_loc;
7349 /* Callback for cselib_expand_value, that looks for expressions
7350 holding the value in the var-tracking hash tables. Return X for
7351 standard processing, anything else is to be used as-is. */
7353 static rtx
7354 vt_expand_loc_callback (rtx x, bitmap regs, int max_depth, void *data)
7356 struct expand_loc_callback_data *elcd
7357 = (struct expand_loc_callback_data *) data;
7358 bool dummy = elcd->dummy;
7359 bool cur_loc_changed = elcd->cur_loc_changed;
7360 rtx cur_loc;
7361 decl_or_value dv;
7362 variable var;
7363 location_chain loc;
7364 rtx result, subreg, xret;
7366 switch (GET_CODE (x))
7368 case SUBREG:
7369 if (dummy)
7371 if (cselib_dummy_expand_value_rtx_cb (SUBREG_REG (x), regs,
7372 max_depth - 1,
7373 vt_expand_loc_callback, data))
7374 return pc_rtx;
7375 else
7376 return NULL;
7379 subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs,
7380 max_depth - 1,
7381 vt_expand_loc_callback, data);
7383 if (!subreg)
7384 return NULL;
7386 result = simplify_gen_subreg (GET_MODE (x), subreg,
7387 GET_MODE (SUBREG_REG (x)),
7388 SUBREG_BYTE (x));
7390 /* Invalid SUBREGs are ok in debug info. ??? We could try
7391 alternate expansions for the VALUE as well. */
7392 if (!result)
7393 result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x));
7395 return result;
7397 case DEBUG_EXPR:
7398 dv = dv_from_decl (DEBUG_EXPR_TREE_DECL (x));
7399 xret = NULL;
7400 break;
7402 case VALUE:
7403 dv = dv_from_value (x);
7404 xret = x;
7405 break;
7407 default:
7408 return x;
7411 if (VALUE_RECURSED_INTO (x))
7412 return NULL;
7414 var = (variable) htab_find_with_hash (elcd->vars, dv, dv_htab_hash (dv));
7416 if (!var)
7418 if (dummy && dv_changed_p (dv))
7419 elcd->cur_loc_changed = true;
7420 return xret;
7423 if (var->n_var_parts == 0)
7425 if (dummy)
7426 elcd->cur_loc_changed = true;
7427 return xret;
7430 gcc_assert (var->n_var_parts == 1);
7432 VALUE_RECURSED_INTO (x) = true;
7433 result = NULL;
7435 if (var->var_part[0].cur_loc && !elcd->ignore_cur_loc)
7437 if (dummy)
7439 if (cselib_dummy_expand_value_rtx_cb (var->var_part[0].cur_loc, regs,
7440 max_depth,
7441 vt_expand_loc_callback, data))
7442 result = pc_rtx;
7444 else
7445 result = cselib_expand_value_rtx_cb (var->var_part[0].cur_loc, regs,
7446 max_depth,
7447 vt_expand_loc_callback, data);
7448 if (result)
7449 set_dv_changed (dv, false);
7450 cur_loc = var->var_part[0].cur_loc;
7452 else
7453 cur_loc = NULL_RTX;
7454 if (!result && (dv_changed_p (dv) || elcd->ignore_cur_loc))
7456 if (!elcd->ignore_cur_loc)
7457 set_dv_changed (dv, false);
7458 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
7459 if (loc->loc == cur_loc)
7460 continue;
7461 else if (dummy)
7463 elcd->cur_loc_changed = cur_loc_changed;
7464 if (cselib_dummy_expand_value_rtx_cb (loc->loc, regs, max_depth,
7465 vt_expand_loc_callback,
7466 data))
7468 result = pc_rtx;
7469 break;
7472 else
7474 result = cselib_expand_value_rtx_cb (loc->loc, regs, max_depth,
7475 vt_expand_loc_callback, data);
7476 if (result)
7477 break;
7479 if (dummy && (result || var->var_part[0].cur_loc))
7480 var->cur_loc_changed = true;
7481 if (!elcd->ignore_cur_loc)
7482 var->var_part[0].cur_loc = loc ? loc->loc : NULL_RTX;
7484 if (dummy)
7486 if (var->cur_loc_changed)
7487 elcd->cur_loc_changed = true;
7488 else if (!result && var->var_part[0].cur_loc == NULL_RTX)
7489 elcd->cur_loc_changed = cur_loc_changed;
7492 VALUE_RECURSED_INTO (x) = false;
7493 if (result)
7494 return result;
7495 else
7496 return xret;
7499 /* Expand VALUEs in LOC, using VARS as well as cselib's equivalence
7500 tables. */
7502 static rtx
7503 vt_expand_loc (rtx loc, htab_t vars, bool ignore_cur_loc)
7505 struct expand_loc_callback_data data;
7507 if (!MAY_HAVE_DEBUG_INSNS)
7508 return loc;
7510 data.vars = vars;
7511 data.dummy = false;
7512 data.cur_loc_changed = false;
7513 data.ignore_cur_loc = ignore_cur_loc;
7514 loc = cselib_expand_value_rtx_cb (loc, scratch_regs, EXPR_DEPTH,
7515 vt_expand_loc_callback, &data);
7517 if (loc && MEM_P (loc))
7518 loc = targetm.delegitimize_address (loc);
7519 return loc;
7522 /* Like vt_expand_loc, but only return true/false (whether vt_expand_loc
7523 would succeed or not, without actually allocating new rtxes. */
7525 static bool
7526 vt_expand_loc_dummy (rtx loc, htab_t vars, bool *pcur_loc_changed)
7528 struct expand_loc_callback_data data;
7529 bool ret;
7531 gcc_assert (MAY_HAVE_DEBUG_INSNS);
7532 data.vars = vars;
7533 data.dummy = true;
7534 data.cur_loc_changed = false;
7535 data.ignore_cur_loc = false;
7536 ret = cselib_dummy_expand_value_rtx_cb (loc, scratch_regs, EXPR_DEPTH,
7537 vt_expand_loc_callback, &data);
7538 *pcur_loc_changed = data.cur_loc_changed;
7539 return ret;
7542 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
7543 additional parameters: WHERE specifies whether the note shall be emitted
7544 before or after instruction INSN. */
7546 static int
7547 emit_note_insn_var_location (void **varp, void *data)
7549 variable var = (variable) *varp;
7550 rtx insn = ((emit_note_data *)data)->insn;
7551 enum emit_note_where where = ((emit_note_data *)data)->where;
7552 htab_t vars = ((emit_note_data *)data)->vars;
7553 rtx note, note_vl;
7554 int i, j, n_var_parts;
7555 bool complete;
7556 enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED;
7557 HOST_WIDE_INT last_limit;
7558 tree type_size_unit;
7559 HOST_WIDE_INT offsets[MAX_VAR_PARTS];
7560 rtx loc[MAX_VAR_PARTS];
7561 tree decl;
7562 location_chain lc;
7564 if (dv_is_value_p (var->dv))
7565 goto value_or_debug_decl;
7567 decl = dv_as_decl (var->dv);
7569 if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
7570 goto value_or_debug_decl;
7572 complete = true;
7573 last_limit = 0;
7574 n_var_parts = 0;
7575 if (!MAY_HAVE_DEBUG_INSNS)
7577 for (i = 0; i < var->n_var_parts; i++)
7578 if (var->var_part[i].cur_loc == NULL && var->var_part[i].loc_chain)
7580 var->var_part[i].cur_loc = var->var_part[i].loc_chain->loc;
7581 var->cur_loc_changed = true;
7583 if (var->n_var_parts == 0)
7584 var->cur_loc_changed = true;
7586 if (!var->cur_loc_changed)
7587 goto clear;
7588 for (i = 0; i < var->n_var_parts; i++)
7590 enum machine_mode mode, wider_mode;
7591 rtx loc2;
7593 if (last_limit < var->var_part[i].offset)
7595 complete = false;
7596 break;
7598 else if (last_limit > var->var_part[i].offset)
7599 continue;
7600 offsets[n_var_parts] = var->var_part[i].offset;
7601 if (!var->var_part[i].cur_loc)
7603 complete = false;
7604 continue;
7606 loc2 = vt_expand_loc (var->var_part[i].cur_loc, vars, false);
7607 if (!loc2)
7609 complete = false;
7610 continue;
7612 loc[n_var_parts] = loc2;
7613 mode = GET_MODE (var->var_part[i].cur_loc);
7614 if (mode == VOIDmode && dv_onepart_p (var->dv))
7615 mode = DECL_MODE (decl);
7616 for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
7617 if (var->var_part[i].cur_loc == lc->loc)
7619 initialized = lc->init;
7620 break;
7622 gcc_assert (lc);
7623 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
7625 /* Attempt to merge adjacent registers or memory. */
7626 wider_mode = GET_MODE_WIDER_MODE (mode);
7627 for (j = i + 1; j < var->n_var_parts; j++)
7628 if (last_limit <= var->var_part[j].offset)
7629 break;
7630 if (j < var->n_var_parts
7631 && wider_mode != VOIDmode
7632 && var->var_part[j].cur_loc
7633 && mode == GET_MODE (var->var_part[j].cur_loc)
7634 && (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts]))
7635 && last_limit == var->var_part[j].offset
7636 && (loc2 = vt_expand_loc (var->var_part[j].cur_loc, vars, false))
7637 && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2))
7639 rtx new_loc = NULL;
7641 if (REG_P (loc[n_var_parts])
7642 && hard_regno_nregs[REGNO (loc[n_var_parts])][mode] * 2
7643 == hard_regno_nregs[REGNO (loc[n_var_parts])][wider_mode]
7644 && end_hard_regno (mode, REGNO (loc[n_var_parts]))
7645 == REGNO (loc2))
7647 if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN)
7648 new_loc = simplify_subreg (wider_mode, loc[n_var_parts],
7649 mode, 0);
7650 else if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
7651 new_loc = simplify_subreg (wider_mode, loc2, mode, 0);
7652 if (new_loc)
7654 if (!REG_P (new_loc)
7655 || REGNO (new_loc) != REGNO (loc[n_var_parts]))
7656 new_loc = NULL;
7657 else
7658 REG_ATTRS (new_loc) = REG_ATTRS (loc[n_var_parts]);
7661 else if (MEM_P (loc[n_var_parts])
7662 && GET_CODE (XEXP (loc2, 0)) == PLUS
7663 && REG_P (XEXP (XEXP (loc2, 0), 0))
7664 && CONST_INT_P (XEXP (XEXP (loc2, 0), 1)))
7666 if ((REG_P (XEXP (loc[n_var_parts], 0))
7667 && rtx_equal_p (XEXP (loc[n_var_parts], 0),
7668 XEXP (XEXP (loc2, 0), 0))
7669 && INTVAL (XEXP (XEXP (loc2, 0), 1))
7670 == GET_MODE_SIZE (mode))
7671 || (GET_CODE (XEXP (loc[n_var_parts], 0)) == PLUS
7672 && CONST_INT_P (XEXP (XEXP (loc[n_var_parts], 0), 1))
7673 && rtx_equal_p (XEXP (XEXP (loc[n_var_parts], 0), 0),
7674 XEXP (XEXP (loc2, 0), 0))
7675 && INTVAL (XEXP (XEXP (loc[n_var_parts], 0), 1))
7676 + GET_MODE_SIZE (mode)
7677 == INTVAL (XEXP (XEXP (loc2, 0), 1))))
7678 new_loc = adjust_address_nv (loc[n_var_parts],
7679 wider_mode, 0);
7682 if (new_loc)
7684 loc[n_var_parts] = new_loc;
7685 mode = wider_mode;
7686 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
7687 i = j;
7690 ++n_var_parts;
7692 type_size_unit = TYPE_SIZE_UNIT (TREE_TYPE (decl));
7693 if ((unsigned HOST_WIDE_INT) last_limit < TREE_INT_CST_LOW (type_size_unit))
7694 complete = false;
7696 if (! flag_var_tracking_uninit)
7697 initialized = VAR_INIT_STATUS_INITIALIZED;
7699 note_vl = NULL_RTX;
7700 if (!complete)
7701 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, NULL_RTX,
7702 (int) initialized);
7703 else if (n_var_parts == 1)
7705 rtx expr_list;
7707 if (offsets[0] || GET_CODE (loc[0]) == PARALLEL)
7708 expr_list = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
7709 else
7710 expr_list = loc[0];
7712 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, expr_list,
7713 (int) initialized);
7715 else if (n_var_parts)
7717 rtx parallel;
7719 for (i = 0; i < n_var_parts; i++)
7720 loc[i]
7721 = gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i]));
7723 parallel = gen_rtx_PARALLEL (VOIDmode,
7724 gen_rtvec_v (n_var_parts, loc));
7725 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl,
7726 parallel, (int) initialized);
7729 if (where != EMIT_NOTE_BEFORE_INSN)
7731 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
7732 if (where == EMIT_NOTE_AFTER_CALL_INSN)
7733 NOTE_DURING_CALL_P (note) = true;
7735 else
7737 /* Make sure that the call related notes come first. */
7738 while (NEXT_INSN (insn)
7739 && NOTE_P (insn)
7740 && NOTE_DURING_CALL_P (insn))
7741 insn = NEXT_INSN (insn);
7742 if (NOTE_P (insn) && NOTE_DURING_CALL_P (insn))
7743 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
7744 else
7745 note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
7747 NOTE_VAR_LOCATION (note) = note_vl;
7749 clear:
7750 set_dv_changed (var->dv, false);
7751 var->cur_loc_changed = false;
7752 gcc_assert (var->in_changed_variables);
7753 var->in_changed_variables = false;
7754 htab_clear_slot (changed_variables, varp);
7756 /* Continue traversing the hash table. */
7757 return 1;
7759 value_or_debug_decl:
7760 if (dv_changed_p (var->dv) && var->n_var_parts)
7762 location_chain lc;
7763 bool cur_loc_changed;
7765 if (var->var_part[0].cur_loc
7766 && vt_expand_loc_dummy (var->var_part[0].cur_loc, vars,
7767 &cur_loc_changed))
7768 goto clear;
7769 for (lc = var->var_part[0].loc_chain; lc; lc = lc->next)
7770 if (lc->loc != var->var_part[0].cur_loc
7771 && vt_expand_loc_dummy (lc->loc, vars, &cur_loc_changed))
7772 break;
7773 var->var_part[0].cur_loc = lc ? lc->loc : NULL_RTX;
7775 goto clear;
7778 DEF_VEC_P (variable);
7779 DEF_VEC_ALLOC_P (variable, heap);
7781 /* Stack of variable_def pointers that need processing with
7782 check_changed_vars_2. */
7784 static VEC (variable, heap) *changed_variables_stack;
7786 /* VALUEs with no variables that need set_dv_changed (val, false)
7787 called before check_changed_vars_3. */
7789 static VEC (rtx, heap) *changed_values_stack;
7791 /* Helper function for check_changed_vars_1 and check_changed_vars_2. */
7793 static void
7794 check_changed_vars_0 (decl_or_value dv, htab_t htab)
7796 value_chain vc
7797 = (value_chain) htab_find_with_hash (value_chains, dv, dv_htab_hash (dv));
7799 if (vc == NULL)
7800 return;
7801 for (vc = vc->next; vc; vc = vc->next)
7802 if (!dv_changed_p (vc->dv))
7804 variable vcvar
7805 = (variable) htab_find_with_hash (htab, vc->dv,
7806 dv_htab_hash (vc->dv));
7807 if (vcvar)
7809 set_dv_changed (vc->dv, true);
7810 VEC_safe_push (variable, heap, changed_variables_stack, vcvar);
7812 else if (dv_is_value_p (vc->dv))
7814 set_dv_changed (vc->dv, true);
7815 VEC_safe_push (rtx, heap, changed_values_stack,
7816 dv_as_value (vc->dv));
7817 check_changed_vars_0 (vc->dv, htab);
7822 /* Populate changed_variables_stack with variable_def pointers
7823 that need variable_was_changed called on them. */
7825 static int
7826 check_changed_vars_1 (void **slot, void *data)
7828 variable var = (variable) *slot;
7829 htab_t htab = (htab_t) data;
7831 if (dv_is_value_p (var->dv)
7832 || TREE_CODE (dv_as_decl (var->dv)) == DEBUG_EXPR_DECL)
7833 check_changed_vars_0 (var->dv, htab);
7834 return 1;
7837 /* Add VAR to changed_variables and also for VALUEs add recursively
7838 all DVs that aren't in changed_variables yet but reference the
7839 VALUE from its loc_chain. */
7841 static void
7842 check_changed_vars_2 (variable var, htab_t htab)
7844 variable_was_changed (var, NULL);
7845 if (dv_is_value_p (var->dv)
7846 || TREE_CODE (dv_as_decl (var->dv)) == DEBUG_EXPR_DECL)
7847 check_changed_vars_0 (var->dv, htab);
7850 /* For each changed decl (except DEBUG_EXPR_DECLs) recompute
7851 cur_loc if needed (and cur_loc of all VALUEs and DEBUG_EXPR_DECLs
7852 it needs and are also in changed variables) and track whether
7853 cur_loc (or anything it uses to compute location) had to change
7854 during the current emit_notes_for_changes call. */
7856 static int
7857 check_changed_vars_3 (void **slot, void *data)
7859 variable var = (variable) *slot;
7860 htab_t vars = (htab_t) data;
7861 int i;
7862 location_chain lc;
7863 bool cur_loc_changed;
7865 if (dv_is_value_p (var->dv)
7866 || TREE_CODE (dv_as_decl (var->dv)) == DEBUG_EXPR_DECL)
7867 return 1;
7869 for (i = 0; i < var->n_var_parts; i++)
7871 if (var->var_part[i].cur_loc
7872 && vt_expand_loc_dummy (var->var_part[i].cur_loc, vars,
7873 &cur_loc_changed))
7875 if (cur_loc_changed)
7876 var->cur_loc_changed = true;
7877 continue;
7879 for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
7880 if (lc->loc != var->var_part[i].cur_loc
7881 && vt_expand_loc_dummy (lc->loc, vars, &cur_loc_changed))
7882 break;
7883 if (lc || var->var_part[i].cur_loc)
7884 var->cur_loc_changed = true;
7885 var->var_part[i].cur_loc = lc ? lc->loc : NULL_RTX;
7887 if (var->n_var_parts == 0)
7888 var->cur_loc_changed = true;
7889 return 1;
7892 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
7893 CHANGED_VARIABLES and delete this chain. WHERE specifies whether the notes
7894 shall be emitted before of after instruction INSN. */
7896 static void
7897 emit_notes_for_changes (rtx insn, enum emit_note_where where,
7898 shared_hash vars)
7900 emit_note_data data;
7901 htab_t htab = shared_hash_htab (vars);
7903 if (!htab_elements (changed_variables))
7904 return;
7906 if (MAY_HAVE_DEBUG_INSNS)
7908 /* Unfortunately this has to be done in two steps, because
7909 we can't traverse a hashtab into which we are inserting
7910 through variable_was_changed. */
7911 htab_traverse (changed_variables, check_changed_vars_1, htab);
7912 while (VEC_length (variable, changed_variables_stack) > 0)
7913 check_changed_vars_2 (VEC_pop (variable, changed_variables_stack),
7914 htab);
7915 while (VEC_length (rtx, changed_values_stack) > 0)
7916 set_dv_changed (dv_from_value (VEC_pop (rtx, changed_values_stack)),
7917 false);
7918 htab_traverse (changed_variables, check_changed_vars_3, htab);
7921 data.insn = insn;
7922 data.where = where;
7923 data.vars = htab;
7925 htab_traverse (changed_variables, emit_note_insn_var_location, &data);
7928 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
7929 same variable in hash table DATA or is not there at all. */
7931 static int
7932 emit_notes_for_differences_1 (void **slot, void *data)
7934 htab_t new_vars = (htab_t) data;
7935 variable old_var, new_var;
7937 old_var = (variable) *slot;
7938 new_var = (variable) htab_find_with_hash (new_vars, old_var->dv,
7939 dv_htab_hash (old_var->dv));
7941 if (!new_var)
7943 /* Variable has disappeared. */
7944 variable empty_var;
7946 empty_var = (variable) pool_alloc (dv_pool (old_var->dv));
7947 empty_var->dv = old_var->dv;
7948 empty_var->refcount = 0;
7949 empty_var->n_var_parts = 0;
7950 empty_var->cur_loc_changed = false;
7951 empty_var->in_changed_variables = false;
7952 if (dv_onepart_p (old_var->dv))
7954 location_chain lc;
7956 gcc_assert (old_var->n_var_parts == 1);
7957 for (lc = old_var->var_part[0].loc_chain; lc; lc = lc->next)
7958 remove_value_chains (old_var->dv, lc->loc);
7960 variable_was_changed (empty_var, NULL);
7961 /* Continue traversing the hash table. */
7962 return 1;
7964 if (variable_different_p (old_var, new_var))
7966 if (dv_onepart_p (old_var->dv))
7968 location_chain lc1, lc2;
7970 gcc_assert (old_var->n_var_parts == 1
7971 && new_var->n_var_parts == 1);
7972 lc1 = old_var->var_part[0].loc_chain;
7973 lc2 = new_var->var_part[0].loc_chain;
7974 while (lc1
7975 && lc2
7976 && ((REG_P (lc1->loc) && REG_P (lc2->loc))
7977 || rtx_equal_p (lc1->loc, lc2->loc)))
7979 lc1 = lc1->next;
7980 lc2 = lc2->next;
7982 for (; lc2; lc2 = lc2->next)
7983 add_value_chains (old_var->dv, lc2->loc);
7984 for (; lc1; lc1 = lc1->next)
7985 remove_value_chains (old_var->dv, lc1->loc);
7987 variable_was_changed (new_var, NULL);
7989 /* Update cur_loc. */
7990 if (old_var != new_var)
7992 int i;
7993 for (i = 0; i < new_var->n_var_parts; i++)
7995 new_var->var_part[i].cur_loc = NULL;
7996 if (old_var->n_var_parts != new_var->n_var_parts
7997 || old_var->var_part[i].offset != new_var->var_part[i].offset)
7998 new_var->cur_loc_changed = true;
7999 else if (old_var->var_part[i].cur_loc != NULL)
8001 location_chain lc;
8002 rtx cur_loc = old_var->var_part[i].cur_loc;
8004 for (lc = new_var->var_part[i].loc_chain; lc; lc = lc->next)
8005 if (lc->loc == cur_loc
8006 || rtx_equal_p (cur_loc, lc->loc))
8008 new_var->var_part[i].cur_loc = lc->loc;
8009 break;
8011 if (lc == NULL)
8012 new_var->cur_loc_changed = true;
8017 /* Continue traversing the hash table. */
8018 return 1;
8021 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
8022 table DATA. */
8024 static int
8025 emit_notes_for_differences_2 (void **slot, void *data)
8027 htab_t old_vars = (htab_t) data;
8028 variable old_var, new_var;
8030 new_var = (variable) *slot;
8031 old_var = (variable) htab_find_with_hash (old_vars, new_var->dv,
8032 dv_htab_hash (new_var->dv));
8033 if (!old_var)
8035 int i;
8036 /* Variable has appeared. */
8037 if (dv_onepart_p (new_var->dv))
8039 location_chain lc;
8041 gcc_assert (new_var->n_var_parts == 1);
8042 for (lc = new_var->var_part[0].loc_chain; lc; lc = lc->next)
8043 add_value_chains (new_var->dv, lc->loc);
8045 for (i = 0; i < new_var->n_var_parts; i++)
8046 new_var->var_part[i].cur_loc = NULL;
8047 variable_was_changed (new_var, NULL);
8050 /* Continue traversing the hash table. */
8051 return 1;
8054 /* Emit notes before INSN for differences between dataflow sets OLD_SET and
8055 NEW_SET. */
8057 static void
8058 emit_notes_for_differences (rtx insn, dataflow_set *old_set,
8059 dataflow_set *new_set)
8061 htab_traverse (shared_hash_htab (old_set->vars),
8062 emit_notes_for_differences_1,
8063 shared_hash_htab (new_set->vars));
8064 htab_traverse (shared_hash_htab (new_set->vars),
8065 emit_notes_for_differences_2,
8066 shared_hash_htab (old_set->vars));
8067 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars);
8070 /* Return the next insn after INSN that is not a NOTE_INSN_VAR_LOCATION. */
8072 static rtx
8073 next_non_note_insn_var_location (rtx insn)
8075 while (insn)
8077 insn = NEXT_INSN (insn);
8078 if (insn == 0
8079 || !NOTE_P (insn)
8080 || NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION)
8081 break;
8084 return insn;
8087 /* Emit the notes for changes of location parts in the basic block BB. */
8089 static void
8090 emit_notes_in_bb (basic_block bb, dataflow_set *set)
8092 unsigned int i;
8093 micro_operation *mo;
8095 dataflow_set_clear (set);
8096 dataflow_set_copy (set, &VTI (bb)->in);
8098 FOR_EACH_VEC_ELT (micro_operation, VTI (bb)->mos, i, mo)
8100 rtx insn = mo->insn;
8101 rtx next_insn = next_non_note_insn_var_location (insn);
8103 switch (mo->type)
8105 case MO_CALL:
8106 dataflow_set_clear_at_call (set);
8107 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars);
8109 rtx arguments = mo->u.loc, *p = &arguments, note;
8110 while (*p)
8112 XEXP (XEXP (*p, 0), 1)
8113 = vt_expand_loc (XEXP (XEXP (*p, 0), 1),
8114 shared_hash_htab (set->vars), true);
8115 /* If expansion is successful, keep it in the list. */
8116 if (XEXP (XEXP (*p, 0), 1))
8117 p = &XEXP (*p, 1);
8118 /* Otherwise, if the following item is data_value for it,
8119 drop it too too. */
8120 else if (XEXP (*p, 1)
8121 && REG_P (XEXP (XEXP (*p, 0), 0))
8122 && MEM_P (XEXP (XEXP (XEXP (*p, 1), 0), 0))
8123 && REG_P (XEXP (XEXP (XEXP (XEXP (*p, 1), 0), 0),
8125 && REGNO (XEXP (XEXP (*p, 0), 0))
8126 == REGNO (XEXP (XEXP (XEXP (XEXP (*p, 1), 0),
8127 0), 0)))
8128 *p = XEXP (XEXP (*p, 1), 1);
8129 /* Just drop this item. */
8130 else
8131 *p = XEXP (*p, 1);
8133 note = emit_note_after (NOTE_INSN_CALL_ARG_LOCATION, insn);
8134 NOTE_VAR_LOCATION (note) = arguments;
8136 break;
8138 case MO_USE:
8140 rtx loc = mo->u.loc;
8142 if (REG_P (loc))
8143 var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
8144 else
8145 var_mem_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
8147 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
8149 break;
8151 case MO_VAL_LOC:
8153 rtx loc = mo->u.loc;
8154 rtx val, vloc;
8155 tree var;
8157 if (GET_CODE (loc) == CONCAT)
8159 val = XEXP (loc, 0);
8160 vloc = XEXP (loc, 1);
8162 else
8164 val = NULL_RTX;
8165 vloc = loc;
8168 var = PAT_VAR_LOCATION_DECL (vloc);
8170 clobber_variable_part (set, NULL_RTX,
8171 dv_from_decl (var), 0, NULL_RTX);
8172 if (val)
8174 if (VAL_NEEDS_RESOLUTION (loc))
8175 val_resolve (set, val, PAT_VAR_LOCATION_LOC (vloc), insn);
8176 set_variable_part (set, val, dv_from_decl (var), 0,
8177 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
8178 INSERT);
8180 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
8181 set_variable_part (set, PAT_VAR_LOCATION_LOC (vloc),
8182 dv_from_decl (var), 0,
8183 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
8184 INSERT);
8186 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
8188 break;
8190 case MO_VAL_USE:
8192 rtx loc = mo->u.loc;
8193 rtx val, vloc, uloc;
8195 vloc = uloc = XEXP (loc, 1);
8196 val = XEXP (loc, 0);
8198 if (GET_CODE (val) == CONCAT)
8200 uloc = XEXP (val, 1);
8201 val = XEXP (val, 0);
8204 if (VAL_NEEDS_RESOLUTION (loc))
8205 val_resolve (set, val, vloc, insn);
8206 else
8207 val_store (set, val, uloc, insn, false);
8209 if (VAL_HOLDS_TRACK_EXPR (loc))
8211 if (GET_CODE (uloc) == REG)
8212 var_reg_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
8213 NULL);
8214 else if (GET_CODE (uloc) == MEM)
8215 var_mem_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
8216 NULL);
8219 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
8221 break;
8223 case MO_VAL_SET:
8225 rtx loc = mo->u.loc;
8226 rtx val, vloc, uloc, reverse = NULL_RTX;
8228 vloc = loc;
8229 if (VAL_EXPR_HAS_REVERSE (loc))
8231 reverse = XEXP (loc, 1);
8232 vloc = XEXP (loc, 0);
8234 uloc = XEXP (vloc, 1);
8235 val = XEXP (vloc, 0);
8236 vloc = uloc;
8238 if (GET_CODE (val) == CONCAT)
8240 vloc = XEXP (val, 1);
8241 val = XEXP (val, 0);
8244 if (GET_CODE (vloc) == SET)
8246 rtx vsrc = SET_SRC (vloc);
8248 gcc_assert (val != vsrc);
8249 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
8251 vloc = SET_DEST (vloc);
8253 if (VAL_NEEDS_RESOLUTION (loc))
8254 val_resolve (set, val, vsrc, insn);
8256 else if (VAL_NEEDS_RESOLUTION (loc))
8258 gcc_assert (GET_CODE (uloc) == SET
8259 && GET_CODE (SET_SRC (uloc)) == REG);
8260 val_resolve (set, val, SET_SRC (uloc), insn);
8263 if (VAL_HOLDS_TRACK_EXPR (loc))
8265 if (VAL_EXPR_IS_CLOBBERED (loc))
8267 if (REG_P (uloc))
8268 var_reg_delete (set, uloc, true);
8269 else if (MEM_P (uloc))
8270 var_mem_delete (set, uloc, true);
8272 else
8274 bool copied_p = VAL_EXPR_IS_COPIED (loc);
8275 rtx set_src = NULL;
8276 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
8278 if (GET_CODE (uloc) == SET)
8280 set_src = SET_SRC (uloc);
8281 uloc = SET_DEST (uloc);
8284 if (copied_p)
8286 status = find_src_status (set, set_src);
8288 set_src = find_src_set_src (set, set_src);
8291 if (REG_P (uloc))
8292 var_reg_delete_and_set (set, uloc, !copied_p,
8293 status, set_src);
8294 else if (MEM_P (uloc))
8295 var_mem_delete_and_set (set, uloc, !copied_p,
8296 status, set_src);
8299 else if (REG_P (uloc))
8300 var_regno_delete (set, REGNO (uloc));
8302 val_store (set, val, vloc, insn, true);
8304 if (reverse)
8305 val_store (set, XEXP (reverse, 0), XEXP (reverse, 1),
8306 insn, false);
8308 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
8309 set->vars);
8311 break;
8313 case MO_SET:
8315 rtx loc = mo->u.loc;
8316 rtx set_src = NULL;
8318 if (GET_CODE (loc) == SET)
8320 set_src = SET_SRC (loc);
8321 loc = SET_DEST (loc);
8324 if (REG_P (loc))
8325 var_reg_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
8326 set_src);
8327 else
8328 var_mem_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
8329 set_src);
8331 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
8332 set->vars);
8334 break;
8336 case MO_COPY:
8338 rtx loc = mo->u.loc;
8339 enum var_init_status src_status;
8340 rtx set_src = NULL;
8342 if (GET_CODE (loc) == SET)
8344 set_src = SET_SRC (loc);
8345 loc = SET_DEST (loc);
8348 src_status = find_src_status (set, set_src);
8349 set_src = find_src_set_src (set, set_src);
8351 if (REG_P (loc))
8352 var_reg_delete_and_set (set, loc, false, src_status, set_src);
8353 else
8354 var_mem_delete_and_set (set, loc, false, src_status, set_src);
8356 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
8357 set->vars);
8359 break;
8361 case MO_USE_NO_VAR:
8363 rtx loc = mo->u.loc;
8365 if (REG_P (loc))
8366 var_reg_delete (set, loc, false);
8367 else
8368 var_mem_delete (set, loc, false);
8370 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
8372 break;
8374 case MO_CLOBBER:
8376 rtx loc = mo->u.loc;
8378 if (REG_P (loc))
8379 var_reg_delete (set, loc, true);
8380 else
8381 var_mem_delete (set, loc, true);
8383 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
8384 set->vars);
8386 break;
8388 case MO_ADJUST:
8389 set->stack_adjust += mo->u.adjust;
8390 break;
8395 /* Emit notes for the whole function. */
8397 static void
8398 vt_emit_notes (void)
8400 basic_block bb;
8401 dataflow_set cur;
8403 gcc_assert (!htab_elements (changed_variables));
8405 /* Free memory occupied by the out hash tables, as they aren't used
8406 anymore. */
8407 FOR_EACH_BB (bb)
8408 dataflow_set_clear (&VTI (bb)->out);
8410 /* Enable emitting notes by functions (mainly by set_variable_part and
8411 delete_variable_part). */
8412 emit_notes = true;
8414 if (MAY_HAVE_DEBUG_INSNS)
8416 unsigned int i;
8417 rtx val;
8419 FOR_EACH_VEC_ELT (rtx, preserved_values, i, val)
8420 add_cselib_value_chains (dv_from_value (val));
8421 changed_variables_stack = VEC_alloc (variable, heap, 40);
8422 changed_values_stack = VEC_alloc (rtx, heap, 40);
8425 dataflow_set_init (&cur);
8427 FOR_EACH_BB (bb)
8429 /* Emit the notes for changes of variable locations between two
8430 subsequent basic blocks. */
8431 emit_notes_for_differences (BB_HEAD (bb), &cur, &VTI (bb)->in);
8433 /* Emit the notes for the changes in the basic block itself. */
8434 emit_notes_in_bb (bb, &cur);
8436 /* Free memory occupied by the in hash table, we won't need it
8437 again. */
8438 dataflow_set_clear (&VTI (bb)->in);
8440 #ifdef ENABLE_CHECKING
8441 htab_traverse (shared_hash_htab (cur.vars),
8442 emit_notes_for_differences_1,
8443 shared_hash_htab (empty_shared_hash));
8444 if (MAY_HAVE_DEBUG_INSNS)
8446 unsigned int i;
8447 rtx val;
8449 FOR_EACH_VEC_ELT (rtx, preserved_values, i, val)
8450 remove_cselib_value_chains (dv_from_value (val));
8451 gcc_assert (htab_elements (value_chains) == 0);
8453 #endif
8454 dataflow_set_destroy (&cur);
8456 if (MAY_HAVE_DEBUG_INSNS)
8458 VEC_free (variable, heap, changed_variables_stack);
8459 VEC_free (rtx, heap, changed_values_stack);
8462 emit_notes = false;
8465 /* If there is a declaration and offset associated with register/memory RTL
8466 assign declaration to *DECLP and offset to *OFFSETP, and return true. */
8468 static bool
8469 vt_get_decl_and_offset (rtx rtl, tree *declp, HOST_WIDE_INT *offsetp)
8471 if (REG_P (rtl))
8473 if (REG_ATTRS (rtl))
8475 *declp = REG_EXPR (rtl);
8476 *offsetp = REG_OFFSET (rtl);
8477 return true;
8480 else if (MEM_P (rtl))
8482 if (MEM_ATTRS (rtl))
8484 *declp = MEM_EXPR (rtl);
8485 *offsetp = INT_MEM_OFFSET (rtl);
8486 return true;
8489 return false;
8492 /* Helper function for vt_add_function_parameter. RTL is
8493 the expression and VAL corresponding cselib_val pointer
8494 for which ENTRY_VALUE should be created. */
8496 static void
8497 create_entry_value (rtx rtl, cselib_val *val)
8499 cselib_val *val2;
8500 struct elt_loc_list *el;
8501 el = (struct elt_loc_list *) ggc_alloc_cleared_atomic (sizeof (*el));
8502 el->loc = gen_rtx_ENTRY_VALUE (GET_MODE (rtl));
8503 ENTRY_VALUE_EXP (el->loc) = rtl;
8504 val2 = cselib_lookup_from_insn (el->loc, GET_MODE (rtl), true,
8505 VOIDmode, get_insns ());
8506 el->next = val->locs;
8507 el->setting_insn = get_insns ();
8508 val->locs = el;
8509 if (val2
8510 && val2 != val
8511 && val2->locs
8512 && rtx_equal_p (val2->locs->loc, el->loc))
8514 struct elt_loc_list *el2;
8516 preserve_value (val2);
8517 el2 = (struct elt_loc_list *) ggc_alloc_cleared_atomic (sizeof (*el2));
8518 el2->next = val2->locs;
8519 el2->loc = val->val_rtx;
8520 el2->setting_insn = get_insns ();
8521 val2->locs = el2;
8525 /* Insert function parameter PARM in IN and OUT sets of ENTRY_BLOCK. */
8527 static void
8528 vt_add_function_parameter (tree parm)
8530 rtx decl_rtl = DECL_RTL_IF_SET (parm);
8531 rtx incoming = DECL_INCOMING_RTL (parm);
8532 tree decl;
8533 enum machine_mode mode;
8534 HOST_WIDE_INT offset;
8535 dataflow_set *out;
8536 decl_or_value dv;
8538 if (TREE_CODE (parm) != PARM_DECL)
8539 return;
8541 if (!decl_rtl || !incoming)
8542 return;
8544 if (GET_MODE (decl_rtl) == BLKmode || GET_MODE (incoming) == BLKmode)
8545 return;
8547 /* If there is a DRAP register, rewrite the incoming location of parameters
8548 passed on the stack into MEMs based on the argument pointer, as the DRAP
8549 register can be reused for other purposes and we do not track locations
8550 based on generic registers. But the prerequisite is that this argument
8551 pointer be also the virtual CFA pointer, see vt_initialize. */
8552 if (MEM_P (incoming)
8553 && stack_realign_drap
8554 && arg_pointer_rtx == cfa_base_rtx
8555 && (XEXP (incoming, 0) == crtl->args.internal_arg_pointer
8556 || (GET_CODE (XEXP (incoming, 0)) == PLUS
8557 && XEXP (XEXP (incoming, 0), 0)
8558 == crtl->args.internal_arg_pointer
8559 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
8561 HOST_WIDE_INT off = -FIRST_PARM_OFFSET (current_function_decl);
8562 if (GET_CODE (XEXP (incoming, 0)) == PLUS)
8563 off += INTVAL (XEXP (XEXP (incoming, 0), 1));
8564 incoming
8565 = replace_equiv_address_nv (incoming,
8566 plus_constant (arg_pointer_rtx, off));
8569 #ifdef HAVE_window_save
8570 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
8571 If the target machine has an explicit window save instruction, the
8572 actual entry value is the corresponding OUTGOING_REGNO instead. */
8573 if (REG_P (incoming)
8574 && HARD_REGISTER_P (incoming)
8575 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
8577 parm_reg_t *p
8578 = VEC_safe_push (parm_reg_t, gc, windowed_parm_regs, NULL);
8579 p->incoming = incoming;
8580 incoming
8581 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
8582 OUTGOING_REGNO (REGNO (incoming)), 0);
8583 p->outgoing = incoming;
8585 else if (MEM_P (incoming)
8586 && REG_P (XEXP (incoming, 0))
8587 && HARD_REGISTER_P (XEXP (incoming, 0)))
8589 rtx reg = XEXP (incoming, 0);
8590 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
8592 parm_reg_t *p
8593 = VEC_safe_push (parm_reg_t, gc, windowed_parm_regs, NULL);
8594 p->incoming = reg;
8595 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
8596 p->outgoing = reg;
8597 incoming = replace_equiv_address_nv (incoming, reg);
8600 #endif
8602 if (!vt_get_decl_and_offset (incoming, &decl, &offset))
8604 if (REG_P (incoming) || MEM_P (incoming))
8606 /* This means argument is passed by invisible reference. */
8607 offset = 0;
8608 decl = parm;
8609 incoming = gen_rtx_MEM (GET_MODE (decl_rtl), incoming);
8611 else
8613 if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset))
8614 return;
8615 offset += byte_lowpart_offset (GET_MODE (incoming),
8616 GET_MODE (decl_rtl));
8620 if (!decl)
8621 return;
8623 if (parm != decl)
8625 /* Assume that DECL_RTL was a pseudo that got spilled to
8626 memory. The spill slot sharing code will force the
8627 memory to reference spill_slot_decl (%sfp), so we don't
8628 match above. That's ok, the pseudo must have referenced
8629 the entire parameter, so just reset OFFSET. */
8630 gcc_assert (decl == get_spill_slot_decl (false));
8631 offset = 0;
8634 if (!track_loc_p (incoming, parm, offset, false, &mode, &offset))
8635 return;
8637 out = &VTI (ENTRY_BLOCK_PTR)->out;
8639 dv = dv_from_decl (parm);
8641 if (target_for_debug_bind (parm)
8642 /* We can't deal with these right now, because this kind of
8643 variable is single-part. ??? We could handle parallels
8644 that describe multiple locations for the same single
8645 value, but ATM we don't. */
8646 && GET_CODE (incoming) != PARALLEL)
8648 cselib_val *val;
8650 /* ??? We shouldn't ever hit this, but it may happen because
8651 arguments passed by invisible reference aren't dealt with
8652 above: incoming-rtl will have Pmode rather than the
8653 expected mode for the type. */
8654 if (offset)
8655 return;
8657 val = cselib_lookup_from_insn (var_lowpart (mode, incoming), mode, true,
8658 VOIDmode, get_insns ());
8660 /* ??? Float-typed values in memory are not handled by
8661 cselib. */
8662 if (val)
8664 preserve_value (val);
8665 set_variable_part (out, val->val_rtx, dv, offset,
8666 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
8667 dv = dv_from_value (val->val_rtx);
8671 if (REG_P (incoming))
8673 incoming = var_lowpart (mode, incoming);
8674 gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER);
8675 attrs_list_insert (&out->regs[REGNO (incoming)], dv, offset,
8676 incoming);
8677 set_variable_part (out, incoming, dv, offset,
8678 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
8679 if (dv_is_value_p (dv))
8681 cselib_val *val = CSELIB_VAL_PTR (dv_as_value (dv));
8682 create_entry_value (incoming, val);
8683 if (TREE_CODE (TREE_TYPE (parm)) == REFERENCE_TYPE
8684 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (parm))))
8686 enum machine_mode indmode
8687 = TYPE_MODE (TREE_TYPE (TREE_TYPE (parm)));
8688 rtx mem = gen_rtx_MEM (indmode, incoming);
8689 val = cselib_lookup_from_insn (mem, indmode, true,
8690 VOIDmode, get_insns ());
8691 if (val)
8693 preserve_value (val);
8694 create_entry_value (mem, val);
8699 else if (MEM_P (incoming))
8701 incoming = var_lowpart (mode, incoming);
8702 set_variable_part (out, incoming, dv, offset,
8703 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
8707 /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
8709 static void
8710 vt_add_function_parameters (void)
8712 tree parm;
8714 for (parm = DECL_ARGUMENTS (current_function_decl);
8715 parm; parm = DECL_CHAIN (parm))
8716 vt_add_function_parameter (parm);
8718 if (DECL_HAS_VALUE_EXPR_P (DECL_RESULT (current_function_decl)))
8720 tree vexpr = DECL_VALUE_EXPR (DECL_RESULT (current_function_decl));
8722 if (TREE_CODE (vexpr) == INDIRECT_REF)
8723 vexpr = TREE_OPERAND (vexpr, 0);
8725 if (TREE_CODE (vexpr) == PARM_DECL
8726 && DECL_ARTIFICIAL (vexpr)
8727 && !DECL_IGNORED_P (vexpr)
8728 && DECL_NAMELESS (vexpr))
8729 vt_add_function_parameter (vexpr);
8733 /* Return true if INSN in the prologue initializes hard_frame_pointer_rtx. */
8735 static bool
8736 fp_setter (rtx insn)
8738 rtx pat = PATTERN (insn);
8739 if (RTX_FRAME_RELATED_P (insn))
8741 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
8742 if (expr)
8743 pat = XEXP (expr, 0);
8745 if (GET_CODE (pat) == SET)
8746 return SET_DEST (pat) == hard_frame_pointer_rtx;
8747 else if (GET_CODE (pat) == PARALLEL)
8749 int i;
8750 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
8751 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
8752 && SET_DEST (XVECEXP (pat, 0, i)) == hard_frame_pointer_rtx)
8753 return true;
8755 return false;
8758 /* Gather all registers used for passing arguments to other functions
8759 called from the current routine. */
8761 static void
8762 note_register_arguments (rtx insn)
8764 rtx link, x;
8766 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
8767 if (GET_CODE (XEXP (link, 0)) == USE)
8769 x = XEXP (XEXP (link, 0), 0);
8770 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
8771 SET_HARD_REG_BIT (argument_reg_set, REGNO (x));
8775 /* Initialize cfa_base_rtx, create a preserved VALUE for it and
8776 ensure it isn't flushed during cselib_reset_table.
8777 Can be called only if frame_pointer_rtx resp. arg_pointer_rtx
8778 has been eliminated. */
8780 static void
8781 vt_init_cfa_base (void)
8783 cselib_val *val;
8785 #ifdef FRAME_POINTER_CFA_OFFSET
8786 cfa_base_rtx = frame_pointer_rtx;
8787 cfa_base_offset = -FRAME_POINTER_CFA_OFFSET (current_function_decl);
8788 #else
8789 cfa_base_rtx = arg_pointer_rtx;
8790 cfa_base_offset = -ARG_POINTER_CFA_OFFSET (current_function_decl);
8791 #endif
8792 if (cfa_base_rtx == hard_frame_pointer_rtx
8793 || !fixed_regs[REGNO (cfa_base_rtx)])
8795 cfa_base_rtx = NULL_RTX;
8796 return;
8798 if (!MAY_HAVE_DEBUG_INSNS)
8799 return;
8801 /* Tell alias analysis that cfa_base_rtx should share
8802 find_base_term value with stack pointer or hard frame pointer. */
8803 if (!frame_pointer_needed)
8804 vt_equate_reg_base_value (cfa_base_rtx, stack_pointer_rtx);
8805 else if (!crtl->stack_realign_tried)
8806 vt_equate_reg_base_value (cfa_base_rtx, hard_frame_pointer_rtx);
8808 val = cselib_lookup_from_insn (cfa_base_rtx, GET_MODE (cfa_base_rtx), 1,
8809 VOIDmode, get_insns ());
8810 preserve_value (val);
8811 cselib_preserve_cfa_base_value (val, REGNO (cfa_base_rtx));
8812 var_reg_decl_set (&VTI (ENTRY_BLOCK_PTR)->out, cfa_base_rtx,
8813 VAR_INIT_STATUS_INITIALIZED, dv_from_value (val->val_rtx),
8814 0, NULL_RTX, INSERT);
8817 /* Allocate and initialize the data structures for variable tracking
8818 and parse the RTL to get the micro operations. */
8820 static bool
8821 vt_initialize (void)
8823 basic_block bb, prologue_bb = single_succ (ENTRY_BLOCK_PTR);
8824 HOST_WIDE_INT fp_cfa_offset = -1;
8826 alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def));
8828 attrs_pool = create_alloc_pool ("attrs_def pool",
8829 sizeof (struct attrs_def), 1024);
8830 var_pool = create_alloc_pool ("variable_def pool",
8831 sizeof (struct variable_def)
8832 + (MAX_VAR_PARTS - 1)
8833 * sizeof (((variable)NULL)->var_part[0]), 64);
8834 loc_chain_pool = create_alloc_pool ("location_chain_def pool",
8835 sizeof (struct location_chain_def),
8836 1024);
8837 shared_hash_pool = create_alloc_pool ("shared_hash_def pool",
8838 sizeof (struct shared_hash_def), 256);
8839 empty_shared_hash = (shared_hash) pool_alloc (shared_hash_pool);
8840 empty_shared_hash->refcount = 1;
8841 empty_shared_hash->htab
8842 = htab_create (1, variable_htab_hash, variable_htab_eq,
8843 variable_htab_free);
8844 changed_variables = htab_create (10, variable_htab_hash, variable_htab_eq,
8845 variable_htab_free);
8846 if (MAY_HAVE_DEBUG_INSNS)
8848 value_chain_pool = create_alloc_pool ("value_chain_def pool",
8849 sizeof (struct value_chain_def),
8850 1024);
8851 value_chains = htab_create (32, value_chain_htab_hash,
8852 value_chain_htab_eq, NULL);
8855 /* Init the IN and OUT sets. */
8856 FOR_ALL_BB (bb)
8858 VTI (bb)->visited = false;
8859 VTI (bb)->flooded = false;
8860 dataflow_set_init (&VTI (bb)->in);
8861 dataflow_set_init (&VTI (bb)->out);
8862 VTI (bb)->permp = NULL;
8865 if (MAY_HAVE_DEBUG_INSNS)
8867 cselib_init (CSELIB_RECORD_MEMORY | CSELIB_PRESERVE_CONSTANTS);
8868 scratch_regs = BITMAP_ALLOC (NULL);
8869 valvar_pool = create_alloc_pool ("small variable_def pool",
8870 sizeof (struct variable_def), 256);
8871 preserved_values = VEC_alloc (rtx, heap, 256);
8873 else
8875 scratch_regs = NULL;
8876 valvar_pool = NULL;
8879 CLEAR_HARD_REG_SET (argument_reg_set);
8881 /* In order to factor out the adjustments made to the stack pointer or to
8882 the hard frame pointer and thus be able to use DW_OP_fbreg operations
8883 instead of individual location lists, we're going to rewrite MEMs based
8884 on them into MEMs based on the CFA by de-eliminating stack_pointer_rtx
8885 or hard_frame_pointer_rtx to the virtual CFA pointer frame_pointer_rtx
8886 resp. arg_pointer_rtx. We can do this either when there is no frame
8887 pointer in the function and stack adjustments are consistent for all
8888 basic blocks or when there is a frame pointer and no stack realignment.
8889 But we first have to check that frame_pointer_rtx resp. arg_pointer_rtx
8890 has been eliminated. */
8891 if (!frame_pointer_needed)
8893 rtx reg, elim;
8895 if (!vt_stack_adjustments ())
8896 return false;
8898 #ifdef FRAME_POINTER_CFA_OFFSET
8899 reg = frame_pointer_rtx;
8900 #else
8901 reg = arg_pointer_rtx;
8902 #endif
8903 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
8904 if (elim != reg)
8906 if (GET_CODE (elim) == PLUS)
8907 elim = XEXP (elim, 0);
8908 if (elim == stack_pointer_rtx)
8909 vt_init_cfa_base ();
8912 else if (!crtl->stack_realign_tried)
8914 rtx reg, elim;
8916 #ifdef FRAME_POINTER_CFA_OFFSET
8917 reg = frame_pointer_rtx;
8918 fp_cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
8919 #else
8920 reg = arg_pointer_rtx;
8921 fp_cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
8922 #endif
8923 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
8924 if (elim != reg)
8926 if (GET_CODE (elim) == PLUS)
8928 fp_cfa_offset -= INTVAL (XEXP (elim, 1));
8929 elim = XEXP (elim, 0);
8931 if (elim != hard_frame_pointer_rtx)
8932 fp_cfa_offset = -1;
8934 else
8935 fp_cfa_offset = -1;
8938 /* If the stack is realigned and a DRAP register is used, we're going to
8939 rewrite MEMs based on it representing incoming locations of parameters
8940 passed on the stack into MEMs based on the argument pointer. Although
8941 we aren't going to rewrite other MEMs, we still need to initialize the
8942 virtual CFA pointer in order to ensure that the argument pointer will
8943 be seen as a constant throughout the function.
8945 ??? This doesn't work if FRAME_POINTER_CFA_OFFSET is defined. */
8946 else if (stack_realign_drap)
8948 rtx reg, elim;
8950 #ifdef FRAME_POINTER_CFA_OFFSET
8951 reg = frame_pointer_rtx;
8952 #else
8953 reg = arg_pointer_rtx;
8954 #endif
8955 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
8956 if (elim != reg)
8958 if (GET_CODE (elim) == PLUS)
8959 elim = XEXP (elim, 0);
8960 if (elim == hard_frame_pointer_rtx)
8961 vt_init_cfa_base ();
8965 if (frame_pointer_needed)
8967 rtx insn;
8968 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
8969 if (CALL_P (insn))
8970 note_register_arguments (insn);
8973 hard_frame_pointer_adjustment = -1;
8975 vt_add_function_parameters ();
8977 FOR_EACH_BB (bb)
8979 rtx insn;
8980 HOST_WIDE_INT pre, post = 0;
8981 basic_block first_bb, last_bb;
8983 if (MAY_HAVE_DEBUG_INSNS)
8985 cselib_record_sets_hook = add_with_sets;
8986 if (dump_file && (dump_flags & TDF_DETAILS))
8987 fprintf (dump_file, "first value: %i\n",
8988 cselib_get_next_uid ());
8991 first_bb = bb;
8992 for (;;)
8994 edge e;
8995 if (bb->next_bb == EXIT_BLOCK_PTR
8996 || ! single_pred_p (bb->next_bb))
8997 break;
8998 e = find_edge (bb, bb->next_bb);
8999 if (! e || (e->flags & EDGE_FALLTHRU) == 0)
9000 break;
9001 bb = bb->next_bb;
9003 last_bb = bb;
9005 /* Add the micro-operations to the vector. */
9006 FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb)
9008 HOST_WIDE_INT offset = VTI (bb)->out.stack_adjust;
9009 VTI (bb)->out.stack_adjust = VTI (bb)->in.stack_adjust;
9010 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
9011 insn = NEXT_INSN (insn))
9013 if (INSN_P (insn))
9015 if (!frame_pointer_needed)
9017 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
9018 if (pre)
9020 micro_operation mo;
9021 mo.type = MO_ADJUST;
9022 mo.u.adjust = pre;
9023 mo.insn = insn;
9024 if (dump_file && (dump_flags & TDF_DETAILS))
9025 log_op_type (PATTERN (insn), bb, insn,
9026 MO_ADJUST, dump_file);
9027 VEC_safe_push (micro_operation, heap, VTI (bb)->mos,
9028 &mo);
9029 VTI (bb)->out.stack_adjust += pre;
9033 cselib_hook_called = false;
9034 adjust_insn (bb, insn);
9035 if (MAY_HAVE_DEBUG_INSNS)
9037 if (CALL_P (insn))
9038 prepare_call_arguments (bb, insn);
9039 cselib_process_insn (insn);
9040 if (dump_file && (dump_flags & TDF_DETAILS))
9042 print_rtl_single (dump_file, insn);
9043 dump_cselib_table (dump_file);
9046 if (!cselib_hook_called)
9047 add_with_sets (insn, 0, 0);
9048 cancel_changes (0);
9050 if (!frame_pointer_needed && post)
9052 micro_operation mo;
9053 mo.type = MO_ADJUST;
9054 mo.u.adjust = post;
9055 mo.insn = insn;
9056 if (dump_file && (dump_flags & TDF_DETAILS))
9057 log_op_type (PATTERN (insn), bb, insn,
9058 MO_ADJUST, dump_file);
9059 VEC_safe_push (micro_operation, heap, VTI (bb)->mos,
9060 &mo);
9061 VTI (bb)->out.stack_adjust += post;
9064 if (bb == prologue_bb
9065 && fp_cfa_offset != -1
9066 && hard_frame_pointer_adjustment == -1
9067 && RTX_FRAME_RELATED_P (insn)
9068 && fp_setter (insn))
9070 vt_init_cfa_base ();
9071 hard_frame_pointer_adjustment = fp_cfa_offset;
9075 gcc_assert (offset == VTI (bb)->out.stack_adjust);
9078 bb = last_bb;
9080 if (MAY_HAVE_DEBUG_INSNS)
9082 cselib_preserve_only_values ();
9083 cselib_reset_table (cselib_get_next_uid ());
9084 cselib_record_sets_hook = NULL;
9088 hard_frame_pointer_adjustment = -1;
9089 VTI (ENTRY_BLOCK_PTR)->flooded = true;
9090 cfa_base_rtx = NULL_RTX;
9091 return true;
9094 /* Get rid of all debug insns from the insn stream. */
9096 static void
9097 delete_debug_insns (void)
9099 basic_block bb;
9100 rtx insn, next;
9102 if (!MAY_HAVE_DEBUG_INSNS)
9103 return;
9105 FOR_EACH_BB (bb)
9107 FOR_BB_INSNS_SAFE (bb, insn, next)
9108 if (DEBUG_INSN_P (insn))
9109 delete_insn (insn);
9113 /* Run a fast, BB-local only version of var tracking, to take care of
9114 information that we don't do global analysis on, such that not all
9115 information is lost. If SKIPPED holds, we're skipping the global
9116 pass entirely, so we should try to use information it would have
9117 handled as well.. */
9119 static void
9120 vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED)
9122 /* ??? Just skip it all for now. */
9123 delete_debug_insns ();
9126 /* Free the data structures needed for variable tracking. */
9128 static void
9129 vt_finalize (void)
9131 basic_block bb;
9133 FOR_EACH_BB (bb)
9135 VEC_free (micro_operation, heap, VTI (bb)->mos);
9138 FOR_ALL_BB (bb)
9140 dataflow_set_destroy (&VTI (bb)->in);
9141 dataflow_set_destroy (&VTI (bb)->out);
9142 if (VTI (bb)->permp)
9144 dataflow_set_destroy (VTI (bb)->permp);
9145 XDELETE (VTI (bb)->permp);
9148 free_aux_for_blocks ();
9149 htab_delete (empty_shared_hash->htab);
9150 htab_delete (changed_variables);
9151 free_alloc_pool (attrs_pool);
9152 free_alloc_pool (var_pool);
9153 free_alloc_pool (loc_chain_pool);
9154 free_alloc_pool (shared_hash_pool);
9156 if (MAY_HAVE_DEBUG_INSNS)
9158 htab_delete (value_chains);
9159 free_alloc_pool (value_chain_pool);
9160 free_alloc_pool (valvar_pool);
9161 VEC_free (rtx, heap, preserved_values);
9162 cselib_finish ();
9163 BITMAP_FREE (scratch_regs);
9164 scratch_regs = NULL;
9167 VEC_free (parm_reg_t, gc, windowed_parm_regs);
9169 if (vui_vec)
9170 XDELETEVEC (vui_vec);
9171 vui_vec = NULL;
9172 vui_allocated = 0;
9175 /* The entry point to variable tracking pass. */
9177 static inline unsigned int
9178 variable_tracking_main_1 (void)
9180 bool success;
9182 if (flag_var_tracking_assignments < 0)
9184 delete_debug_insns ();
9185 return 0;
9188 if (n_basic_blocks > 500 && n_edges / n_basic_blocks >= 20)
9190 vt_debug_insns_local (true);
9191 return 0;
9194 mark_dfs_back_edges ();
9195 if (!vt_initialize ())
9197 vt_finalize ();
9198 vt_debug_insns_local (true);
9199 return 0;
9202 success = vt_find_locations ();
9204 if (!success && flag_var_tracking_assignments > 0)
9206 vt_finalize ();
9208 delete_debug_insns ();
9210 /* This is later restored by our caller. */
9211 flag_var_tracking_assignments = 0;
9213 success = vt_initialize ();
9214 gcc_assert (success);
9216 success = vt_find_locations ();
9219 if (!success)
9221 vt_finalize ();
9222 vt_debug_insns_local (false);
9223 return 0;
9226 if (dump_file && (dump_flags & TDF_DETAILS))
9228 dump_dataflow_sets ();
9229 dump_flow_info (dump_file, dump_flags);
9232 timevar_push (TV_VAR_TRACKING_EMIT);
9233 vt_emit_notes ();
9234 timevar_pop (TV_VAR_TRACKING_EMIT);
9236 vt_finalize ();
9237 vt_debug_insns_local (false);
9238 return 0;
9241 unsigned int
9242 variable_tracking_main (void)
9244 unsigned int ret;
9245 int save = flag_var_tracking_assignments;
9247 ret = variable_tracking_main_1 ();
9249 flag_var_tracking_assignments = save;
9251 return ret;
9254 static bool
9255 gate_handle_var_tracking (void)
9257 return (flag_var_tracking && !targetm.delay_vartrack);
9262 struct rtl_opt_pass pass_variable_tracking =
9265 RTL_PASS,
9266 "vartrack", /* name */
9267 gate_handle_var_tracking, /* gate */
9268 variable_tracking_main, /* execute */
9269 NULL, /* sub */
9270 NULL, /* next */
9271 0, /* static_pass_number */
9272 TV_VAR_TRACKING, /* tv_id */
9273 0, /* properties_required */
9274 0, /* properties_provided */
9275 0, /* properties_destroyed */
9276 0, /* todo_flags_start */
9277 TODO_verify_rtl_sharing /* todo_flags_finish */