capped: addresses can't be capped
[smatch.git] / ssa.c
blob3e8800507f63e3273bf38f36837d6ac9578e1972
1 // SPDX-License-Identifier: MIT
2 //
3 // SSA conversion
4 // Copyright (C) 2005 Luc Van Oostenryck
5 //
7 #include <assert.h>
8 #include "ssa.h"
9 #include "lib.h"
10 #include "sset.h"
11 #include "dominate.h"
12 #include "flowgraph.h"
13 #include "linearize.h"
14 #include "flow.h" // for convert_load_instruction()
17 // Is it possible and desirable for this to be promoted to a pseudo?
18 static inline bool is_promotable(struct symbol *type)
20 struct symbol *member;
21 int bf_seen;
22 int nbr;
24 if (type->type == SYM_NODE)
25 type = type->ctype.base_type;
26 switch (type->type) {
27 case SYM_ENUM:
28 case SYM_BITFIELD:
29 case SYM_PTR:
30 case SYM_RESTRICT: // OK, always integer types
31 return 1;
32 case SYM_STRUCT:
33 // we allow a single scalar field
34 // but a run of bitfields count for 1
35 nbr = 0;
36 bf_seen = 0;
37 FOR_EACH_PTR(type->symbol_list, member) {
38 if (is_bitfield_type(member)) {
39 if (bf_seen)
40 continue;
41 bf_seen = 1;
42 } else {
43 bf_seen = 0;
45 if (!is_scalar_type(member))
46 return 0;
47 if (nbr++)
48 return 0;
49 } END_FOR_EACH_PTR(member);
50 if (bf_seen && (type->bit_size > long_ctype.bit_size))
51 return 0;
52 return 1;
53 case SYM_UNION:
54 // FIXME: should be like struct but has problem
55 // when used with/for type cohercion
56 // -----> OK if only same sized integral types
57 FOR_EACH_PTR(type->symbol_list, member) {
58 if (member->bit_size != type->bit_size)
59 return 0;
60 if (!is_integral_type(member))
61 return 0;
62 } END_FOR_EACH_PTR(member);
63 return 1;
64 default:
65 break;
67 if (type->ctype.base_type == &int_type)
68 return 1;
69 if (type->ctype.base_type == &fp_type)
70 return 1;
71 return 0;
74 static bool insn_before(struct instruction *a, struct instruction *b)
76 struct basic_block *bb = a->bb;
77 struct instruction *insn;
79 assert(b->bb == bb);
80 FOR_EACH_PTR(bb->insns, insn) {
81 if (insn == a)
82 return true;
83 if (insn == b)
84 return false;
85 } END_FOR_EACH_PTR(insn);
86 assert(0);
89 static void kill_store(struct instruction *insn)
91 remove_use(&insn->src);
92 remove_use(&insn->target);
93 insn->bb = NULL;
96 static void rewrite_local_var(struct basic_block *bb, pseudo_t addr, int nbr_stores, int nbr_uses)
98 struct instruction *insn;
99 pseudo_t val = NULL;
101 if (!bb)
102 return;
104 FOR_EACH_PTR(bb->insns, insn) {
106 if (!insn->bb || insn->src != addr)
107 continue;
108 switch (insn->opcode) {
109 case OP_LOAD:
110 if (!val)
111 val = undef_pseudo();
112 convert_load_instruction(insn, val);
113 break;
114 case OP_STORE:
115 val = insn->target;
116 // can't use kill_instruction() unless
117 // we add a fake user to val
118 kill_store(insn);
119 break;
121 } END_FOR_EACH_PTR(insn);
124 static bool rewrite_single_store(struct instruction *store)
126 pseudo_t addr = store->src;
127 struct pseudo_user *pu;
129 FOR_EACH_PTR(addr->users, pu) {
130 struct instruction *insn = pu->insn;
132 if (insn->opcode != OP_LOAD)
133 continue;
135 // Let's try to replace the value of the load
136 // by the value from the store. This is only valid
137 // if the store dominate the load.
139 if (insn->bb == store->bb) {
140 // the load and the store are in the same BB
141 // we can convert if the load is after the store.
142 if (!insn_before(store, insn))
143 continue;
144 } else if (!domtree_dominates(store->bb, insn->bb)) {
145 // we can't convert this load
146 continue;
149 // OK, we can rewrite this load
151 // undefs ?
153 convert_load_instruction(insn, store->target);
154 } END_FOR_EACH_PTR(pu);
156 // is there some unconverted loads?
157 if (pseudo_user_list_size(addr->users) > 1)
158 return false;
160 kill_store(store);
161 return true;
164 static struct sset *processed;
166 // we would like to know:
167 // is there one or more stores?
168 // are all loads & stores local/done in a single block?
169 static void ssa_convert_one_var(struct entrypoint *ep, struct symbol *var)
171 struct basic_block_list *alpha = NULL;
172 struct basic_block_list *idf = NULL;
173 struct basic_block *samebb = NULL;
174 struct instruction *store = NULL;
175 struct basic_block *bb;
176 struct pseudo_user *pu;
177 unsigned long mod = var->ctype.modifiers;
178 bool local = true;
179 int nbr_stores = 0;
180 int nbr_uses = 0;
181 pseudo_t addr;
183 /* Never used as a symbol? */
184 addr = var->pseudo;
185 if (!addr)
186 return;
188 /* We don't do coverage analysis of volatiles.. */
189 if (mod & MOD_VOLATILE)
190 return;
192 /* ..and symbols with external visibility need more care */
193 mod &= (MOD_NONLOCAL | MOD_STATIC | MOD_ADDRESSABLE);
194 if (mod)
195 goto external_visibility;
197 if (!is_promotable(var))
198 return;
200 // 1) insert in the worklist all BBs that may modify var
201 sset_reset(processed);
202 FOR_EACH_PTR(addr->users, pu) {
203 struct instruction *insn = pu->insn;
204 struct basic_block *bb = insn->bb;
206 switch (insn->opcode) {
207 case OP_STORE:
208 nbr_stores++;
209 store = insn;
210 if (!sset_testset(processed, bb->nr))
211 add_bb(&alpha, bb);
212 /* fall through */
213 case OP_LOAD:
214 if (local) {
215 if (!samebb)
216 samebb = bb;
217 else if (samebb != bb)
218 local = false;
220 nbr_uses++;
221 break;
222 case OP_SYMADDR:
223 mod |= MOD_ADDRESSABLE;
224 goto external_visibility;
225 default:
226 warning(var->pos, "symbol '%s' pseudo used in unexpected way",
227 show_ident(var->ident));
229 } END_FOR_EACH_PTR(pu);
231 if (nbr_stores == 1) {
232 if (rewrite_single_store(store))
233 return;
236 // if all uses are local to a single block
237 // they can easily be rewritten and doesn't need phi-nodes
238 // FIXME: could be done for extended BB too
239 if (local) {
240 rewrite_local_var(samebb, addr, nbr_stores, nbr_uses);
241 return;
244 idf_compute(ep, &idf, alpha);
245 FOR_EACH_PTR(idf, bb) {
246 struct instruction *node = insert_phi_node(bb, var);
247 node->phi_var = var->pseudo;
248 } END_FOR_EACH_PTR(bb);
249 var->torename = 1;
251 external_visibility:
252 if (mod & (MOD_NONLOCAL | MOD_STATIC))
253 return;
254 kill_dead_stores(ep, addr, !mod);
257 static pseudo_t lookup_var(struct basic_block *bb, struct symbol *var)
259 do {
260 pseudo_t val = phi_map_lookup(bb->phi_map, var);
261 if (val)
262 return val;
263 } while ((bb = bb->idom));
264 return undef_pseudo();
267 static struct instruction_list *phis_all;
268 static struct instruction_list *phis_used;
270 static void ssa_rename_insn(struct basic_block *bb, struct instruction *insn)
272 struct symbol *var;
273 pseudo_t addr;
274 pseudo_t val;
276 switch (insn->opcode) {
277 case OP_STORE:
278 addr = insn->src;
279 if (addr->type != PSEUDO_SYM)
280 break;
281 var = addr->sym;
282 if (!var || !var->torename)
283 break;
284 phi_map_update(&bb->phi_map, var, insn->target);
285 kill_store(insn);
286 break;
287 case OP_LOAD:
288 addr = insn->src;
289 if (addr->type != PSEUDO_SYM)
290 break;
291 var = addr->sym;
292 if (!var || !var->torename)
293 break;
294 val = lookup_var(bb, var);
295 convert_load_instruction(insn, val);
296 break;
297 case OP_PHI:
298 var = insn->type;
299 if (!var || !var->torename)
300 break;
301 phi_map_update(&bb->phi_map, var, insn->target);
302 add_instruction(&phis_all, insn);
303 break;
307 static void ssa_rename_insns(struct entrypoint *ep)
309 struct basic_block *bb;
311 FOR_EACH_PTR(ep->bbs, bb) {
312 struct instruction *insn;
313 FOR_EACH_PTR(bb->insns, insn) {
314 if (!insn->bb)
315 continue;
316 ssa_rename_insn(bb, insn);
317 } END_FOR_EACH_PTR(insn);
318 } END_FOR_EACH_PTR(bb);
321 static void mark_phi_used(pseudo_t val)
323 struct instruction *node;
325 if (val->type != PSEUDO_REG)
326 return;
327 node = val->def;
328 if (node->opcode != OP_PHI)
329 return;
330 if (node->used)
331 return;
332 node->used = 1;
333 add_instruction(&phis_used, node);
336 static void ssa_rename_phi(struct instruction *insn)
338 struct basic_block *par;
339 struct symbol *var;
341 if (!insn->phi_var)
342 return;
343 var = insn->phi_var->sym;
344 if (!var->torename)
345 return;
346 FOR_EACH_PTR(insn->bb->parents, par) {
347 struct instruction *term = delete_last_instruction(&par->insns);
348 pseudo_t val = lookup_var(par, var);
349 pseudo_t phi = alloc_phi(par, val, var);
350 phi->ident = var->ident;
351 add_instruction(&par->insns, term);
352 use_pseudo(insn, phi, add_pseudo(&insn->phi_list, phi));
353 mark_phi_used(val);
354 } END_FOR_EACH_PTR(par);
357 static void ssa_rename_phis(struct entrypoint *ep)
359 struct instruction *phi;
361 phis_used = NULL;
362 FOR_EACH_PTR(phis_all, phi) {
363 if (has_users(phi->target)) {
364 phi->used = 1;
365 add_instruction(&phis_used, phi);
367 } END_FOR_EACH_PTR(phi);
369 FOR_EACH_PTR(phis_used, phi) {
370 if (!phi->bb)
371 continue;
372 ssa_rename_phi(phi);
373 } END_FOR_EACH_PTR(phi);
376 void ssa_convert(struct entrypoint *ep)
378 struct basic_block *bb;
379 pseudo_t pseudo;
380 int first, last;
382 // calculate the number of BBs
383 first = ep->entry->bb->nr;
384 last = first;
385 FOR_EACH_PTR(ep->bbs, bb) {
386 int nr = bb->nr;
387 if (nr > last)
388 last = nr;
389 } END_FOR_EACH_PTR(bb);
391 processed = sset_init(first, last);
393 // try to promote memory accesses to pseudos
394 FOR_EACH_PTR(ep->accesses, pseudo) {
395 ssa_convert_one_var(ep, pseudo->sym);
396 } END_FOR_EACH_PTR(pseudo);
398 // rename the converted accesses
399 phis_all = phis_used = NULL;
400 ssa_rename_insns(ep);
401 ssa_rename_phis(ep);