* configure.ac (ACX_PROG_GNAT): Append "libgnat" to the include dir.
[official-gcc.git] / gcc / gcse-common.c
blob37971f90014c494b392a7c01c90988fb2146e928
1 /* Shared code for before and after reload gcse implementations.
2 Copyright (C) 1997-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>.
20 It is expected that more hunks of gcse.c and postreload-gcse.c should
21 migrate into this file. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "rtl.h"
28 #include "df.h"
29 #include "gcse-common.h"
32 /* Record all of the canonicalized MEMs of record_last_mem_set_info's insn.
33 Note we store a pair of elements in the list, so they have to be
34 taken off pairwise. */
36 void
37 canon_list_insert (rtx dest, const_rtx x ATTRIBUTE_UNUSED, void *data)
39 rtx dest_addr;
40 int bb;
41 modify_pair pair;
43 while (GET_CODE (dest) == SUBREG
44 || GET_CODE (dest) == ZERO_EXTRACT
45 || GET_CODE (dest) == STRICT_LOW_PART)
46 dest = XEXP (dest, 0);
48 /* If DEST is not a MEM, then it will not conflict with a load. Note
49 that function calls are assumed to clobber memory, but are handled
50 elsewhere. */
52 if (! MEM_P (dest))
53 return;
55 dest_addr = get_addr (XEXP (dest, 0));
56 dest_addr = canon_rtx (dest_addr);
57 rtx_insn *insn = ((struct gcse_note_stores_info *)data)->insn;
58 bb = BLOCK_FOR_INSN (insn)->index;
60 pair.dest = dest;
61 pair.dest_addr = dest_addr;
62 vec<modify_pair> *canon_mem_list
63 = ((struct gcse_note_stores_info *)data)->canon_mem_list;
64 canon_mem_list[bb].safe_push (pair);
67 /* Record memory modification information for INSN. We do not actually care
68 about the memory location(s) that are set, or even how they are set (consider
69 a CALL_INSN). We merely need to record which insns modify memory. */
71 void
72 record_last_mem_set_info_common (rtx_insn *insn,
73 vec<rtx_insn *> *modify_mem_list,
74 vec<modify_pair> *canon_modify_mem_list,
75 bitmap modify_mem_list_set,
76 bitmap blocks_with_calls)
79 int bb;
81 bb = BLOCK_FOR_INSN (insn)->index;
82 modify_mem_list[bb].safe_push (insn);
83 bitmap_set_bit (modify_mem_list_set, bb);
85 if (CALL_P (insn))
86 bitmap_set_bit (blocks_with_calls, bb);
87 else
89 struct gcse_note_stores_info data;
90 data.insn = insn;
91 data.canon_mem_list = canon_modify_mem_list;
92 note_stores (PATTERN (insn), canon_list_insert, (void*) &data);
97 /* For each block, compute whether X is transparent. X is either an
98 expression or an assignment [though we don't care which, for this context
99 an assignment is treated as an expression]. For each block where an
100 element of X is modified, reset the INDX bit in BMAP.
102 BLOCKS_WITH_CALLS indicates which blocks contain CALL_INSNs which kill
103 memory.
105 MODIFY_MEM_LIST_SET indicates which blocks have memory stores which might
106 kill a particular memory location.
108 CANON_MODIFY_MEM_LIST is the canonicalized list of memory locations modified
109 for each block. */
111 void
112 compute_transp (const_rtx x, int indx, sbitmap *bmap,
113 bitmap blocks_with_calls,
114 bitmap modify_mem_list_set,
115 vec<modify_pair> *canon_modify_mem_list)
117 int i, j;
118 enum rtx_code code;
119 const char *fmt;
121 /* repeat is used to turn tail-recursion into iteration since GCC
122 can't do it when there's no return value. */
123 repeat:
125 if (x == 0)
126 return;
128 code = GET_CODE (x);
129 switch (code)
131 case REG:
133 df_ref def;
134 for (def = DF_REG_DEF_CHAIN (REGNO (x));
135 def;
136 def = DF_REF_NEXT_REG (def))
137 bitmap_clear_bit (bmap[DF_REF_BB (def)->index], indx);
140 return;
142 case MEM:
143 if (! MEM_READONLY_P (x))
145 bitmap_iterator bi;
146 unsigned bb_index;
147 rtx x_addr;
149 x_addr = get_addr (XEXP (x, 0));
150 x_addr = canon_rtx (x_addr);
152 /* First handle all the blocks with calls. We don't need to
153 do any list walking for them. */
154 EXECUTE_IF_SET_IN_BITMAP (blocks_with_calls, 0, bb_index, bi)
156 bitmap_clear_bit (bmap[bb_index], indx);
159 /* Now iterate over the blocks which have memory modifications
160 but which do not have any calls. */
161 EXECUTE_IF_AND_COMPL_IN_BITMAP (modify_mem_list_set,
162 blocks_with_calls,
163 0, bb_index, bi)
165 vec<modify_pair> list
166 = canon_modify_mem_list[bb_index];
167 modify_pair *pair;
168 unsigned ix;
170 FOR_EACH_VEC_ELT_REVERSE (list, ix, pair)
172 rtx dest = pair->dest;
173 rtx dest_addr = pair->dest_addr;
175 if (canon_true_dependence (dest, GET_MODE (dest),
176 dest_addr, x, x_addr))
178 bitmap_clear_bit (bmap[bb_index], indx);
179 break;
185 x = XEXP (x, 0);
186 goto repeat;
188 case PC:
189 case CC0: /*FIXME*/
190 case CONST:
191 CASE_CONST_ANY:
192 case SYMBOL_REF:
193 case LABEL_REF:
194 case ADDR_VEC:
195 case ADDR_DIFF_VEC:
196 return;
198 default:
199 break;
202 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
204 if (fmt[i] == 'e')
206 /* If we are about to do the last recursive call
207 needed at this level, change it into iteration.
208 This function is called enough to be worth it. */
209 if (i == 0)
211 x = XEXP (x, i);
212 goto repeat;
215 compute_transp (XEXP (x, i), indx, bmap, blocks_with_calls,
216 modify_mem_list_set, canon_modify_mem_list);
218 else if (fmt[i] == 'E')
219 for (j = 0; j < XVECLEN (x, i); j++)
220 compute_transp (XVECEXP (x, i, j), indx, bmap, blocks_with_calls,
221 modify_mem_list_set, canon_modify_mem_list);