PR rtl-optimization/82913
[official-gcc.git] / gcc / rtl-chkp.c
blob64194ca6112a222a2a4ca007274703b258667ee0
1 /* RTL manipulation functions exported by Pointer Bounds Checker.
2 Copyright (C) 2014-2017 Free Software Foundation, Inc.
3 Contributed by Ilya Enkovich (ilya.enkovich@intel.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "memmodel.h"
29 #include "emit-rtl.h"
30 #include "expr.h"
31 #include "rtl-chkp.h"
32 #include "tree-chkp.h"
34 static hash_map<tree, rtx> *chkp_rtx_bounds_map;
36 /* Get bounds rtx associated with NODE via
37 chkp_set_rtl_bounds call. */
38 rtx
39 chkp_get_rtl_bounds (tree node)
41 rtx *slot;
43 if (!chkp_rtx_bounds_map)
44 return NULL_RTX;
46 slot = chkp_rtx_bounds_map->get (node);
47 return slot ? *slot : NULL_RTX;
50 /* Associate bounds rtx VAL with NODE. */
51 void
52 chkp_set_rtl_bounds (tree node, rtx val)
54 if (!chkp_rtx_bounds_map)
55 chkp_rtx_bounds_map = new hash_map<tree, rtx>;
57 chkp_rtx_bounds_map->put (node, val);
60 /* Reset all bounds stored via chkp_set_rtl_bounds. */
61 void
62 chkp_reset_rtl_bounds ()
64 if (!chkp_rtx_bounds_map)
65 return;
67 delete chkp_rtx_bounds_map;
68 chkp_rtx_bounds_map = NULL;
71 /* Split SLOT identifying slot for function value or
72 argument into two parts SLOT_VAL and SLOT_BND.
73 First is the slot for regular value and the other one is
74 for bounds. */
75 void
76 chkp_split_slot (rtx slot, rtx *slot_val, rtx *slot_bnd)
78 int i;
79 int val_num = 0;
80 int bnd_num = 0;
81 rtx *val_tmps;
82 rtx *bnd_tmps;
84 *slot_bnd = 0;
86 if (!slot
87 || GET_CODE (slot) != PARALLEL)
89 *slot_val = slot;
90 return;
93 val_tmps = XALLOCAVEC (rtx, XVECLEN (slot, 0));
94 bnd_tmps = XALLOCAVEC (rtx, XVECLEN (slot, 0));
96 for (i = 0; i < XVECLEN (slot, 0); i++)
98 rtx elem = XVECEXP (slot, 0, i);
99 rtx reg = GET_CODE (elem) == EXPR_LIST ? XEXP (elem, 0) : elem;
101 if (!reg)
102 continue;
104 if (POINTER_BOUNDS_MODE_P (GET_MODE (reg)) || CONST_INT_P (reg))
105 bnd_tmps[bnd_num++] = elem;
106 else
107 val_tmps[val_num++] = elem;
110 gcc_assert (val_num);
112 if (!bnd_num)
114 *slot_val = slot;
115 return;
118 if ((GET_CODE (val_tmps[0]) == EXPR_LIST) || (val_num > 1))
119 *slot_val = gen_rtx_PARALLEL (GET_MODE (slot),
120 gen_rtvec_v (val_num, val_tmps));
121 else
122 *slot_val = val_tmps[0];
124 if ((GET_CODE (bnd_tmps[0]) == EXPR_LIST) || (bnd_num > 1))
125 *slot_bnd = gen_rtx_PARALLEL (VOIDmode,
126 gen_rtvec_v (bnd_num, bnd_tmps));
127 else
128 *slot_bnd = bnd_tmps[0];
131 /* Join previously splitted to VAL and BND rtx for function
132 value or argument and return it. */
134 chkp_join_splitted_slot (rtx val, rtx bnd)
136 rtx res;
137 int i, n = 0;
139 if (!bnd)
140 return val;
142 if (GET_CODE (val) == PARALLEL)
143 n += XVECLEN (val, 0);
144 else
145 n++;
147 if (GET_CODE (bnd) == PARALLEL)
148 n += XVECLEN (bnd, 0);
149 else
150 n++;
152 res = gen_rtx_PARALLEL (GET_MODE (val), rtvec_alloc (n));
154 n = 0;
156 if (GET_CODE (val) == PARALLEL)
157 for (i = 0; i < XVECLEN (val, 0); i++)
158 XVECEXP (res, 0, n++) = XVECEXP (val, 0, i);
159 else
160 XVECEXP (res, 0, n++) = val;
162 if (GET_CODE (bnd) == PARALLEL)
163 for (i = 0; i < XVECLEN (bnd, 0); i++)
164 XVECEXP (res, 0, n++) = XVECEXP (bnd, 0, i);
165 else
166 XVECEXP (res, 0, n++) = bnd;
168 return res;
171 /* If PAR is PARALLEL holding registers then transform
172 it into PARALLEL holding EXPR_LISTs of those regs
173 and zero constant (similar to how function value
174 on multiple registers looks like). */
175 void
176 chkp_put_regs_to_expr_list (rtx par)
178 int n;
180 if (GET_CODE (par) != PARALLEL
181 || GET_CODE (XVECEXP (par, 0, 0)) == EXPR_LIST)
182 return;
184 for (n = 0; n < XVECLEN (par, 0); n++)
185 XVECEXP (par, 0, n) = gen_rtx_EXPR_LIST (VOIDmode,
186 XVECEXP (par, 0, n),
187 const0_rtx);
190 /* Search rtx PAR describing function return value for an
191 item related to value at offset OFFS and return it.
192 Return NULL if item was not found. */
194 chkp_get_value_with_offs (rtx par, rtx offs)
196 int n;
198 gcc_assert (GET_CODE (par) == PARALLEL);
200 for (n = 0; n < XVECLEN (par, 0); n++)
202 rtx par_offs = XEXP (XVECEXP (par, 0, n), 1);
203 if (INTVAL (offs) == INTVAL (par_offs))
204 return XEXP (XVECEXP (par, 0, n), 0);
207 return NULL;
210 /* Emit instructions to store BOUNDS for pointer VALUE
211 stored in MEM.
212 Function is used by expand to pass bounds for args
213 passed on stack. */
214 void
215 chkp_emit_bounds_store (rtx bounds, rtx value, rtx mem)
217 gcc_assert (MEM_P (mem));
219 if (REG_P (bounds) || CONST_INT_P (bounds))
221 rtx ptr;
223 if (REG_P (value))
224 ptr = value;
225 else
227 rtx slot = adjust_address (value, Pmode, 0);
228 ptr = gen_reg_rtx (Pmode);
229 emit_move_insn (ptr, slot);
232 if (CONST_INT_P (bounds))
233 bounds = targetm.calls.load_bounds_for_arg (value, ptr, bounds);
235 targetm.calls.store_bounds_for_arg (ptr, mem,
236 bounds, NULL);
238 else
240 int i;
242 gcc_assert (GET_CODE (bounds) == PARALLEL);
243 gcc_assert (GET_CODE (value) == PARALLEL || MEM_P (value) || REG_P (value));
245 for (i = 0; i < XVECLEN (bounds, 0); i++)
247 rtx reg = XEXP (XVECEXP (bounds, 0, i), 0);
248 rtx offs = XEXP (XVECEXP (bounds, 0, i), 1);
249 rtx slot = adjust_address (mem, Pmode, INTVAL (offs));
250 rtx ptr;
252 if (GET_CODE (value) == PARALLEL)
253 ptr = chkp_get_value_with_offs (value, offs);
254 else if (MEM_P (value))
256 rtx tmp = adjust_address (value, Pmode, INTVAL (offs));
257 ptr = gen_reg_rtx (Pmode);
258 emit_move_insn (ptr, tmp);
260 else
261 ptr = gen_rtx_SUBREG (Pmode, value, INTVAL (offs));
263 targetm.calls.store_bounds_for_arg (ptr, slot, reg, NULL);
268 /* Emit code to copy bounds for structure VALUE of type TYPE
269 copied to SLOT. */
270 void
271 chkp_copy_bounds_for_stack_parm (rtx slot, rtx value, tree type)
273 bitmap have_bound;
274 bitmap_iterator bi;
275 unsigned i;
276 rtx tmp = NULL, bnd;
278 gcc_assert (TYPE_SIZE (type));
279 gcc_assert (MEM_P (value));
280 gcc_assert (MEM_P (slot));
281 gcc_assert (RECORD_OR_UNION_TYPE_P (type));
283 bitmap_obstack_initialize (NULL);
284 have_bound = BITMAP_ALLOC (NULL);
285 chkp_find_bound_slots (type, have_bound);
287 EXECUTE_IF_SET_IN_BITMAP (have_bound, 0, i, bi)
289 rtx ptr = adjust_address (value, Pmode, i * POINTER_SIZE / 8);
290 rtx to = adjust_address (slot, Pmode, i * POINTER_SIZE / 8);
292 if (!tmp)
293 tmp = gen_reg_rtx (Pmode);
295 emit_move_insn (tmp, ptr);
296 bnd = targetm.calls.load_bounds_for_arg (ptr, tmp, NULL);
297 targetm.calls.store_bounds_for_arg (tmp, to, bnd, NULL);
300 BITMAP_FREE (have_bound);
301 bitmap_obstack_release (NULL);