1 /* Shared code for before and after reload gcse implementations.
2 Copyright (C) 1997-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>.
20 It is expected that more hunks of gcse.c and postreload-gcse.c should
21 migrate into this file. */
25 #include "coretypes.h"
31 #include "basic-block.h"
33 #include "gcse-common.h"
36 /* Record all of the canonicalized MEMs of record_last_mem_set_info's insn.
37 Note we store a pair of elements in the list, so they have to be
38 taken off pairwise. */
41 canon_list_insert (rtx dest
, const_rtx x ATTRIBUTE_UNUSED
, void *data
)
47 while (GET_CODE (dest
) == SUBREG
48 || GET_CODE (dest
) == ZERO_EXTRACT
49 || GET_CODE (dest
) == STRICT_LOW_PART
)
50 dest
= XEXP (dest
, 0);
52 /* If DEST is not a MEM, then it will not conflict with a load. Note
53 that function calls are assumed to clobber memory, but are handled
59 dest_addr
= get_addr (XEXP (dest
, 0));
60 dest_addr
= canon_rtx (dest_addr
);
61 insn
= ((struct gcse_note_stores_info
*)data
)->insn
;
62 bb
= BLOCK_FOR_INSN (insn
)->index
;
65 pair
.dest_addr
= dest_addr
;
66 vec
<modify_pair
> *canon_mem_list
67 = ((struct gcse_note_stores_info
*)data
)->canon_mem_list
;
68 canon_mem_list
[bb
].safe_push (pair
);
71 /* Record memory modification information for INSN. We do not actually care
72 about the memory location(s) that are set, or even how they are set (consider
73 a CALL_INSN). We merely need to record which insns modify memory. */
76 record_last_mem_set_info_common (rtx_insn
*insn
,
77 vec
<rtx_insn
*> *modify_mem_list
,
78 vec
<modify_pair
> *canon_modify_mem_list
,
79 bitmap modify_mem_list_set
,
80 bitmap blocks_with_calls
)
85 bb
= BLOCK_FOR_INSN (insn
)->index
;
86 modify_mem_list
[bb
].safe_push (insn
);
87 bitmap_set_bit (modify_mem_list_set
, bb
);
90 bitmap_set_bit (blocks_with_calls
, bb
);
93 struct gcse_note_stores_info data
;
95 data
.canon_mem_list
= canon_modify_mem_list
;
96 note_stores (PATTERN (insn
), canon_list_insert
, (void*) &data
);
101 /* For each block, compute whether X is transparent. X is either an
102 expression or an assignment [though we don't care which, for this context
103 an assignment is treated as an expression]. For each block where an
104 element of X is modified, reset the INDX bit in BMAP.
106 BLOCKS_WITH_CALLS indicates which blocks contain CALL_INSNs which kill
109 MODIFY_MEM_LIST_SET indicates which blocks have memory stores which might
110 kill a particular memory location.
112 CANON_MODIFY_MEM_LIST is the canonicalized list of memory locations modified
116 compute_transp (const_rtx x
, int indx
, sbitmap
*bmap
,
117 bitmap blocks_with_calls
,
118 bitmap modify_mem_list_set
,
119 vec
<modify_pair
> *canon_modify_mem_list
)
125 /* repeat is used to turn tail-recursion into iteration since GCC
126 can't do it when there's no return value. */
138 for (def
= DF_REG_DEF_CHAIN (REGNO (x
));
140 def
= DF_REF_NEXT_REG (def
))
141 bitmap_clear_bit (bmap
[DF_REF_BB (def
)->index
], indx
);
147 if (! MEM_READONLY_P (x
))
153 x_addr
= get_addr (XEXP (x
, 0));
154 x_addr
= canon_rtx (x_addr
);
156 /* First handle all the blocks with calls. We don't need to
157 do any list walking for them. */
158 EXECUTE_IF_SET_IN_BITMAP (blocks_with_calls
, 0, bb_index
, bi
)
160 bitmap_clear_bit (bmap
[bb_index
], indx
);
163 /* Now iterate over the blocks which have memory modifications
164 but which do not have any calls. */
165 EXECUTE_IF_AND_COMPL_IN_BITMAP (modify_mem_list_set
,
169 vec
<modify_pair
> list
170 = canon_modify_mem_list
[bb_index
];
174 FOR_EACH_VEC_ELT_REVERSE (list
, ix
, pair
)
176 rtx dest
= pair
->dest
;
177 rtx dest_addr
= pair
->dest_addr
;
179 if (canon_true_dependence (dest
, GET_MODE (dest
),
180 dest_addr
, x
, x_addr
))
182 bitmap_clear_bit (bmap
[bb_index
], indx
);
206 for (i
= GET_RTX_LENGTH (code
) - 1, fmt
= GET_RTX_FORMAT (code
); i
>= 0; i
--)
210 /* If we are about to do the last recursive call
211 needed at this level, change it into iteration.
212 This function is called enough to be worth it. */
219 compute_transp (XEXP (x
, i
), indx
, bmap
, blocks_with_calls
,
220 modify_mem_list_set
, canon_modify_mem_list
);
222 else if (fmt
[i
] == 'E')
223 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
224 compute_transp (XVECEXP (x
, i
, j
), indx
, bmap
, blocks_with_calls
,
225 modify_mem_list_set
, canon_modify_mem_list
);