1 /* Shared code for before and after reload gcse implementations.
2 Copyright (C) 1997-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>.
20 It is expected that more hunks of gcse.c and postreload-gcse.c should
21 migrate into this file. */
25 #include "coretypes.h"
29 #include "gcse-common.h"
32 /* Record all of the canonicalized MEMs of record_last_mem_set_info's insn.
33 Note we store a pair of elements in the list, so they have to be
34 taken off pairwise. */
37 canon_list_insert (rtx dest
, const_rtx x ATTRIBUTE_UNUSED
, void *data
)
43 while (GET_CODE (dest
) == SUBREG
44 || GET_CODE (dest
) == ZERO_EXTRACT
45 || GET_CODE (dest
) == STRICT_LOW_PART
)
46 dest
= XEXP (dest
, 0);
48 /* If DEST is not a MEM, then it will not conflict with a load. Note
49 that function calls are assumed to clobber memory, but are handled
55 dest_addr
= get_addr (XEXP (dest
, 0));
56 dest_addr
= canon_rtx (dest_addr
);
57 rtx_insn
*insn
= ((struct gcse_note_stores_info
*)data
)->insn
;
58 bb
= BLOCK_FOR_INSN (insn
)->index
;
61 pair
.dest_addr
= dest_addr
;
62 vec
<modify_pair
> *canon_mem_list
63 = ((struct gcse_note_stores_info
*)data
)->canon_mem_list
;
64 canon_mem_list
[bb
].safe_push (pair
);
67 /* Record memory modification information for INSN. We do not actually care
68 about the memory location(s) that are set, or even how they are set (consider
69 a CALL_INSN). We merely need to record which insns modify memory. */
72 record_last_mem_set_info_common (rtx_insn
*insn
,
73 vec
<rtx_insn
*> *modify_mem_list
,
74 vec
<modify_pair
> *canon_modify_mem_list
,
75 bitmap modify_mem_list_set
,
76 bitmap blocks_with_calls
)
81 bb
= BLOCK_FOR_INSN (insn
)->index
;
82 modify_mem_list
[bb
].safe_push (insn
);
83 bitmap_set_bit (modify_mem_list_set
, bb
);
86 bitmap_set_bit (blocks_with_calls
, bb
);
89 struct gcse_note_stores_info data
;
91 data
.canon_mem_list
= canon_modify_mem_list
;
92 note_stores (PATTERN (insn
), canon_list_insert
, (void*) &data
);
97 /* For each block, compute whether X is transparent. X is either an
98 expression or an assignment [though we don't care which, for this context
99 an assignment is treated as an expression]. For each block where an
100 element of X is modified, reset the INDX bit in BMAP.
102 BLOCKS_WITH_CALLS indicates which blocks contain CALL_INSNs which kill
105 MODIFY_MEM_LIST_SET indicates which blocks have memory stores which might
106 kill a particular memory location.
108 CANON_MODIFY_MEM_LIST is the canonicalized list of memory locations modified
112 compute_transp (const_rtx x
, int indx
, sbitmap
*bmap
,
113 bitmap blocks_with_calls
,
114 bitmap modify_mem_list_set
,
115 vec
<modify_pair
> *canon_modify_mem_list
)
121 /* repeat is used to turn tail-recursion into iteration since GCC
122 can't do it when there's no return value. */
134 for (def
= DF_REG_DEF_CHAIN (REGNO (x
));
136 def
= DF_REF_NEXT_REG (def
))
137 bitmap_clear_bit (bmap
[DF_REF_BB (def
)->index
], indx
);
143 if (! MEM_READONLY_P (x
))
149 x_addr
= get_addr (XEXP (x
, 0));
150 x_addr
= canon_rtx (x_addr
);
152 /* First handle all the blocks with calls. We don't need to
153 do any list walking for them. */
154 EXECUTE_IF_SET_IN_BITMAP (blocks_with_calls
, 0, bb_index
, bi
)
156 bitmap_clear_bit (bmap
[bb_index
], indx
);
159 /* Now iterate over the blocks which have memory modifications
160 but which do not have any calls. */
161 EXECUTE_IF_AND_COMPL_IN_BITMAP (modify_mem_list_set
,
165 vec
<modify_pair
> list
166 = canon_modify_mem_list
[bb_index
];
170 FOR_EACH_VEC_ELT_REVERSE (list
, ix
, pair
)
172 rtx dest
= pair
->dest
;
173 rtx dest_addr
= pair
->dest_addr
;
175 if (canon_true_dependence (dest
, GET_MODE (dest
),
176 dest_addr
, x
, x_addr
))
178 bitmap_clear_bit (bmap
[bb_index
], indx
);
202 for (i
= GET_RTX_LENGTH (code
) - 1, fmt
= GET_RTX_FORMAT (code
); i
>= 0; i
--)
206 /* If we are about to do the last recursive call
207 needed at this level, change it into iteration.
208 This function is called enough to be worth it. */
215 compute_transp (XEXP (x
, i
), indx
, bmap
, blocks_with_calls
,
216 modify_mem_list_set
, canon_modify_mem_list
);
218 else if (fmt
[i
] == 'E')
219 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
220 compute_transp (XVECEXP (x
, i
, j
), indx
, bmap
, blocks_with_calls
,
221 modify_mem_list_set
, canon_modify_mem_list
);