Add missing coercion when recording select(string, ...)
[luajit-2.0.git] / src / lj_opt_mem.c
blobdc74a06de3fef6b5a4afd3360cd4daf6ab6e1b76
1 /*
2 ** Memory access optimizations.
3 ** AA: Alias Analysis using high-level semantic disambiguation.
4 ** FWD: Load Forwarding (L2L) + Store Forwarding (S2L).
5 ** DSE: Dead-Store Elimination.
6 ** Copyright (C) 2005-2023 Mike Pall. See Copyright Notice in luajit.h
7 */
9 #define lj_opt_mem_c
10 #define LUA_CORE
12 #include "lj_obj.h"
14 #if LJ_HASJIT
16 #include "lj_tab.h"
17 #include "lj_ir.h"
18 #include "lj_jit.h"
19 #include "lj_iropt.h"
21 /* Some local macros to save typing. Undef'd at the end. */
22 #define IR(ref) (&J->cur.ir[(ref)])
23 #define fins (&J->fold.ins)
24 #define fleft (&J->fold.left)
25 #define fright (&J->fold.right)
28 ** Caveat #1: return value is not always a TRef -- only use with tref_ref().
29 ** Caveat #2: FWD relies on active CSE for xREF operands -- see lj_opt_fold().
32 /* Return values from alias analysis. */
33 typedef enum {
34 ALIAS_NO, /* The two refs CANNOT alias (exact). */
35 ALIAS_MAY, /* The two refs MAY alias (inexact). */
36 ALIAS_MUST /* The two refs MUST alias (exact). */
37 } AliasRet;
39 /* -- ALOAD/HLOAD forwarding and ASTORE/HSTORE elimination ---------------- */
41 /* Simplified escape analysis: check for intervening stores. */
42 static AliasRet aa_escape(jit_State *J, IRIns *ir, IRIns *stop)
44 IRRef ref = (IRRef)(ir - J->cur.ir); /* The ref that might be stored. */
45 for (ir++; ir < stop; ir++)
46 if (ir->op2 == ref &&
47 (ir->o == IR_ASTORE || ir->o == IR_HSTORE ||
48 ir->o == IR_USTORE || ir->o == IR_FSTORE))
49 return ALIAS_MAY; /* Reference was stored and might alias. */
50 return ALIAS_NO; /* Reference was not stored. */
53 /* Alias analysis for two different table references. */
54 static AliasRet aa_table(jit_State *J, IRRef ta, IRRef tb)
56 IRIns *taba = IR(ta), *tabb = IR(tb);
57 int newa, newb;
58 lua_assert(ta != tb);
59 lua_assert(irt_istab(taba->t) && irt_istab(tabb->t));
60 /* Disambiguate new allocations. */
61 newa = (taba->o == IR_TNEW || taba->o == IR_TDUP);
62 newb = (tabb->o == IR_TNEW || tabb->o == IR_TDUP);
63 if (newa && newb)
64 return ALIAS_NO; /* Two different allocations never alias. */
65 if (newb) { /* At least one allocation? */
66 IRIns *tmp = taba; taba = tabb; tabb = tmp;
67 } else if (!newa) {
68 return ALIAS_MAY; /* Anything else: we just don't know. */
70 return aa_escape(J, taba, tabb);
73 /* Alias analysis for array and hash access using key-based disambiguation. */
74 static AliasRet aa_ahref(jit_State *J, IRIns *refa, IRIns *refb)
76 IRRef ka = refa->op2;
77 IRRef kb = refb->op2;
78 IRIns *keya, *keyb;
79 IRRef ta, tb;
80 if (refa == refb)
81 return ALIAS_MUST; /* Shortcut for same refs. */
82 keya = IR(ka);
83 if (keya->o == IR_KSLOT) { ka = keya->op1; keya = IR(ka); }
84 keyb = IR(kb);
85 if (keyb->o == IR_KSLOT) { kb = keyb->op1; keyb = IR(kb); }
86 ta = (refa->o==IR_HREFK || refa->o==IR_AREF) ? IR(refa->op1)->op1 : refa->op1;
87 tb = (refb->o==IR_HREFK || refb->o==IR_AREF) ? IR(refb->op1)->op1 : refb->op1;
88 if (ka == kb) {
89 /* Same key. Check for same table with different ref (NEWREF vs. HREF). */
90 if (ta == tb)
91 return ALIAS_MUST; /* Same key, same table. */
92 else
93 return aa_table(J, ta, tb); /* Same key, possibly different table. */
95 if (irref_isk(ka) && irref_isk(kb))
96 return ALIAS_NO; /* Different constant keys. */
97 if (refa->o == IR_AREF) {
98 /* Disambiguate array references based on index arithmetic. */
99 int32_t ofsa = 0, ofsb = 0;
100 IRRef basea = ka, baseb = kb;
101 lua_assert(refb->o == IR_AREF);
102 /* Gather base and offset from t[base] or t[base+-ofs]. */
103 if (keya->o == IR_ADD && irref_isk(keya->op2)) {
104 basea = keya->op1;
105 ofsa = IR(keya->op2)->i;
106 if (basea == kb && ofsa != 0)
107 return ALIAS_NO; /* t[base+-ofs] vs. t[base]. */
109 if (keyb->o == IR_ADD && irref_isk(keyb->op2)) {
110 baseb = keyb->op1;
111 ofsb = IR(keyb->op2)->i;
112 if (ka == baseb && ofsb != 0)
113 return ALIAS_NO; /* t[base] vs. t[base+-ofs]. */
115 if (basea == baseb && ofsa != ofsb)
116 return ALIAS_NO; /* t[base+-o1] vs. t[base+-o2] and o1 != o2. */
117 } else {
118 /* Disambiguate hash references based on the type of their keys. */
119 lua_assert((refa->o==IR_HREF || refa->o==IR_HREFK || refa->o==IR_NEWREF) &&
120 (refb->o==IR_HREF || refb->o==IR_HREFK || refb->o==IR_NEWREF));
121 if (!irt_sametype(keya->t, keyb->t))
122 return ALIAS_NO; /* Different key types. */
124 if (ta == tb)
125 return ALIAS_MAY; /* Same table, cannot disambiguate keys. */
126 else
127 return aa_table(J, ta, tb); /* Try to disambiguate tables. */
130 /* Array and hash load forwarding. */
131 static TRef fwd_ahload(jit_State *J, IRRef xref)
133 IRIns *xr = IR(xref);
134 IRRef lim = xref; /* Search limit. */
135 IRRef ref;
137 /* Search for conflicting stores. */
138 ref = J->chain[fins->o+IRDELTA_L2S];
139 while (ref > xref) {
140 IRIns *store = IR(ref);
141 switch (aa_ahref(J, xr, IR(store->op1))) {
142 case ALIAS_NO: break; /* Continue searching. */
143 case ALIAS_MAY: lim = ref; goto cselim; /* Limit search for load. */
144 case ALIAS_MUST: return store->op2; /* Store forwarding. */
146 ref = store->prev;
149 /* No conflicting store (yet): const-fold loads from allocations. */
151 IRIns *ir = (xr->o == IR_HREFK || xr->o == IR_AREF) ? IR(xr->op1) : xr;
152 IRRef tab = ir->op1;
153 ir = IR(tab);
154 if (ir->o == IR_TNEW || (ir->o == IR_TDUP && irref_isk(xr->op2))) {
155 /* A NEWREF with a number key may end up pointing to the array part.
156 ** But it's referenced from HSTORE and not found in the ASTORE chain.
157 ** Or a NEWREF may rehash the table and move unrelated number keys.
158 ** For now simply consider this a conflict without forwarding anything.
160 if (xr->o == IR_AREF) {
161 IRRef ref2 = J->chain[IR_NEWREF];
162 while (ref2 > tab) {
163 IRIns *newref = IR(ref2);
164 if (irt_isnum(IR(newref->op2)->t))
165 goto cselim;
166 ref2 = newref->prev;
168 } else {
169 IRIns *key = IR(xr->op2);
170 if (key->o == IR_KSLOT) key = IR(key->op1);
171 if (irt_isnum(key->t) && J->chain[IR_NEWREF] > tab)
172 goto cselim;
174 /* NEWREF inhibits CSE for HREF, and dependent FLOADs from HREFK/AREF.
175 ** But the above search for conflicting stores was limited by xref.
176 ** So continue searching, limited by the TNEW/TDUP. Store forwarding
177 ** is ok, too. A conflict does NOT limit the search for a matching load.
179 while (ref > tab) {
180 IRIns *store = IR(ref);
181 switch (aa_ahref(J, xr, IR(store->op1))) {
182 case ALIAS_NO: break; /* Continue searching. */
183 case ALIAS_MAY: goto cselim; /* Conflicting store. */
184 case ALIAS_MUST: return store->op2; /* Store forwarding. */
186 ref = store->prev;
188 if (ir->o == IR_TNEW && !irt_isnil(fins->t))
189 return 0; /* Type instability in loop-carried dependency. */
190 if (irt_ispri(fins->t)) {
191 return TREF_PRI(irt_type(fins->t));
192 } else if (irt_isnum(fins->t) || (LJ_DUALNUM && irt_isint(fins->t)) ||
193 irt_isstr(fins->t)) {
194 TValue keyv;
195 cTValue *tv;
196 IRIns *key = IR(xr->op2);
197 if (key->o == IR_KSLOT) key = IR(key->op1);
198 lj_ir_kvalue(J->L, &keyv, key);
199 tv = lj_tab_get(J->L, ir_ktab(IR(ir->op1)), &keyv);
200 if (itype2irt(tv) != irt_type(fins->t))
201 return 0; /* Type instability in loop-carried dependency. */
202 if (irt_isnum(fins->t))
203 return lj_ir_knum_u64(J, tv->u64);
204 else if (LJ_DUALNUM && irt_isint(fins->t))
205 return lj_ir_kint(J, intV(tv));
206 else
207 return lj_ir_kstr(J, strV(tv));
209 /* Othwerwise: don't intern as a constant. */
213 cselim:
214 /* Try to find a matching load. Below the conflicting store, if any. */
215 ref = J->chain[fins->o];
216 while (ref > lim) {
217 IRIns *load = IR(ref);
218 if (load->op1 == xref)
219 return ref; /* Load forwarding. */
220 ref = load->prev;
222 return 0; /* Conflict or no match. */
225 /* Reassociate ALOAD across PHIs to handle t[i-1] forwarding case. */
226 static TRef fwd_aload_reassoc(jit_State *J)
228 IRIns *irx = IR(fins->op1);
229 IRIns *key = IR(irx->op2);
230 if (key->o == IR_ADD && irref_isk(key->op2)) {
231 IRIns *add2 = IR(key->op1);
232 if (add2->o == IR_ADD && irref_isk(add2->op2) &&
233 IR(key->op2)->i == -IR(add2->op2)->i) {
234 IRRef ref = J->chain[IR_AREF];
235 IRRef lim = add2->op1;
236 if (irx->op1 > lim) lim = irx->op1;
237 while (ref > lim) {
238 IRIns *ir = IR(ref);
239 if (ir->op1 == irx->op1 && ir->op2 == add2->op1)
240 return fwd_ahload(J, ref);
241 ref = ir->prev;
245 return 0;
248 /* ALOAD forwarding. */
249 TRef LJ_FASTCALL lj_opt_fwd_aload(jit_State *J)
251 IRRef ref;
252 if ((ref = fwd_ahload(J, fins->op1)) ||
253 (ref = fwd_aload_reassoc(J)))
254 return ref;
255 return EMITFOLD;
258 /* HLOAD forwarding. */
259 TRef LJ_FASTCALL lj_opt_fwd_hload(jit_State *J)
261 IRRef ref = fwd_ahload(J, fins->op1);
262 if (ref)
263 return ref;
264 return EMITFOLD;
267 /* HREFK forwarding. */
268 TRef LJ_FASTCALL lj_opt_fwd_hrefk(jit_State *J)
270 IRRef tab = fleft->op1;
271 IRRef ref = J->chain[IR_NEWREF];
272 while (ref > tab) {
273 IRIns *newref = IR(ref);
274 if (tab == newref->op1) {
275 if (fright->op1 == newref->op2)
276 return ref; /* Forward from NEWREF. */
277 else
278 goto docse;
279 } else if (aa_table(J, tab, newref->op1) != ALIAS_NO) {
280 goto docse;
282 ref = newref->prev;
284 /* No conflicting NEWREF: key location unchanged for HREFK of TDUP. */
285 if (IR(tab)->o == IR_TDUP)
286 fins->t.irt &= ~IRT_GUARD; /* Drop HREFK guard. */
287 docse:
288 return CSEFOLD;
291 /* Check whether HREF of TNEW/TDUP can be folded to niltv. */
292 int LJ_FASTCALL lj_opt_fwd_href_nokey(jit_State *J)
294 IRRef lim = fins->op1; /* Search limit. */
295 IRRef ref;
297 /* The key for an ASTORE may end up in the hash part after a NEWREF. */
298 if (irt_isnum(fright->t) && J->chain[IR_NEWREF] > lim) {
299 ref = J->chain[IR_ASTORE];
300 while (ref > lim) {
301 if (ref < J->chain[IR_NEWREF])
302 return 0; /* Conflict. */
303 ref = IR(ref)->prev;
307 /* Search for conflicting stores. */
308 ref = J->chain[IR_HSTORE];
309 while (ref > lim) {
310 IRIns *store = IR(ref);
311 if (aa_ahref(J, fins, IR(store->op1)) != ALIAS_NO)
312 return 0; /* Conflict. */
313 ref = store->prev;
316 return 1; /* No conflict. Can fold to niltv. */
319 /* Check whether there's no aliasing NEWREF for the left operand. */
320 int LJ_FASTCALL lj_opt_fwd_tptr(jit_State *J, IRRef lim)
322 IRRef ta = fins->op1;
323 IRRef ref = J->chain[IR_NEWREF];
324 while (ref > lim) {
325 IRIns *newref = IR(ref);
326 if (ta == newref->op1 || aa_table(J, ta, newref->op1) != ALIAS_NO)
327 return 0; /* Conflict. */
328 ref = newref->prev;
330 return 1; /* No conflict. Can safely FOLD/CSE. */
333 /* ASTORE/HSTORE elimination. */
334 TRef LJ_FASTCALL lj_opt_dse_ahstore(jit_State *J)
336 IRRef xref = fins->op1; /* xREF reference. */
337 IRRef val = fins->op2; /* Stored value reference. */
338 IRIns *xr = IR(xref);
339 IRRef1 *refp = &J->chain[fins->o];
340 IRRef ref = *refp;
341 while (ref > xref) { /* Search for redundant or conflicting stores. */
342 IRIns *store = IR(ref);
343 switch (aa_ahref(J, xr, IR(store->op1))) {
344 case ALIAS_NO:
345 break; /* Continue searching. */
346 case ALIAS_MAY: /* Store to MAYBE the same location. */
347 if (store->op2 != val) /* Conflict if the value is different. */
348 goto doemit;
349 break; /* Otherwise continue searching. */
350 case ALIAS_MUST: /* Store to the same location. */
351 if (store->op2 == val) /* Same value: drop the new store. */
352 return DROPFOLD;
353 /* Different value: try to eliminate the redundant store. */
354 if (ref > J->chain[IR_LOOP]) { /* Quick check to avoid crossing LOOP. */
355 IRIns *ir;
356 /* Check for any intervening guards (includes conflicting loads). */
357 for (ir = IR(J->cur.nins-1); ir > store; ir--)
358 if (irt_isguard(ir->t) || ir->o == IR_CALLL)
359 goto doemit; /* No elimination possible. */
360 /* Remove redundant store from chain and replace with NOP. */
361 *refp = store->prev;
362 lj_ir_nop(store);
363 /* Now emit the new store instead. */
365 goto doemit;
367 ref = *(refp = &store->prev);
369 doemit:
370 return EMITFOLD; /* Otherwise we have a conflict or simply no match. */
373 /* -- ULOAD forwarding ---------------------------------------------------- */
375 /* The current alias analysis for upvalues is very simplistic. It only
376 ** disambiguates between the unique upvalues of the same function.
377 ** This is good enough for now, since most upvalues are read-only.
379 ** A more precise analysis would be feasible with the help of the parser:
380 ** generate a unique key for every upvalue, even across all prototypes.
381 ** Lacking a realistic use-case, it's unclear whether this is beneficial.
383 static AliasRet aa_uref(IRIns *refa, IRIns *refb)
385 if (refa->o != refb->o)
386 return ALIAS_NO; /* Different UREFx type. */
387 if (refa->op1 == refb->op1) { /* Same function. */
388 if (refa->op2 == refb->op2)
389 return ALIAS_MUST; /* Same function, same upvalue idx. */
390 else
391 return ALIAS_NO; /* Same function, different upvalue idx. */
392 } else { /* Different functions, check disambiguation hash values. */
393 if (((refa->op2 ^ refb->op2) & 0xff))
394 return ALIAS_NO; /* Upvalues with different hash values cannot alias. */
395 else
396 return ALIAS_MAY; /* No conclusion can be drawn for same hash value. */
400 /* ULOAD forwarding. */
401 TRef LJ_FASTCALL lj_opt_fwd_uload(jit_State *J)
403 IRRef uref = fins->op1;
404 IRRef lim = REF_BASE; /* Search limit. */
405 IRIns *xr = IR(uref);
406 IRRef ref;
408 /* Search for conflicting stores. */
409 ref = J->chain[IR_USTORE];
410 while (ref > lim) {
411 IRIns *store = IR(ref);
412 switch (aa_uref(xr, IR(store->op1))) {
413 case ALIAS_NO: break; /* Continue searching. */
414 case ALIAS_MAY: lim = ref; goto cselim; /* Limit search for load. */
415 case ALIAS_MUST: return store->op2; /* Store forwarding. */
417 ref = store->prev;
420 cselim:
421 /* Try to find a matching load. Below the conflicting store, if any. */
423 ref = J->chain[IR_ULOAD];
424 while (ref > lim) {
425 IRIns *ir = IR(ref);
426 if (ir->op1 == uref ||
427 (IR(ir->op1)->op12 == IR(uref)->op12 && IR(ir->op1)->o == IR(uref)->o))
428 return ref; /* Match for identical or equal UREFx (non-CSEable UREFO). */
429 ref = ir->prev;
431 return lj_ir_emit(J);
434 /* USTORE elimination. */
435 TRef LJ_FASTCALL lj_opt_dse_ustore(jit_State *J)
437 IRRef xref = fins->op1; /* xREF reference. */
438 IRRef val = fins->op2; /* Stored value reference. */
439 IRIns *xr = IR(xref);
440 IRRef1 *refp = &J->chain[IR_USTORE];
441 IRRef ref = *refp;
442 while (ref > xref) { /* Search for redundant or conflicting stores. */
443 IRIns *store = IR(ref);
444 switch (aa_uref(xr, IR(store->op1))) {
445 case ALIAS_NO:
446 break; /* Continue searching. */
447 case ALIAS_MAY: /* Store to MAYBE the same location. */
448 if (store->op2 != val) /* Conflict if the value is different. */
449 goto doemit;
450 break; /* Otherwise continue searching. */
451 case ALIAS_MUST: /* Store to the same location. */
452 if (store->op2 == val) /* Same value: drop the new store. */
453 return DROPFOLD;
454 /* Different value: try to eliminate the redundant store. */
455 if (ref > J->chain[IR_LOOP]) { /* Quick check to avoid crossing LOOP. */
456 IRIns *ir;
457 /* Check for any intervening guards (includes conflicting loads). */
458 for (ir = IR(J->cur.nins-1); ir > store; ir--)
459 if (irt_isguard(ir->t))
460 goto doemit; /* No elimination possible. */
461 /* Remove redundant store from chain and replace with NOP. */
462 *refp = store->prev;
463 lj_ir_nop(store);
464 if (ref+1 < J->cur.nins &&
465 store[1].o == IR_OBAR && store[1].op1 == xref) {
466 IRRef1 *bp = &J->chain[IR_OBAR];
467 IRIns *obar;
468 for (obar = IR(*bp); *bp > ref+1; obar = IR(*bp))
469 bp = &obar->prev;
470 /* Remove OBAR, too. */
471 *bp = obar->prev;
472 lj_ir_nop(obar);
474 /* Now emit the new store instead. */
476 goto doemit;
478 ref = *(refp = &store->prev);
480 doemit:
481 return EMITFOLD; /* Otherwise we have a conflict or simply no match. */
484 /* -- FLOAD forwarding and FSTORE elimination ----------------------------- */
486 /* Alias analysis for field access.
487 ** Field loads are cheap and field stores are rare.
488 ** Simple disambiguation based on field types is good enough.
490 static AliasRet aa_fref(jit_State *J, IRIns *refa, IRIns *refb)
492 if (refa->op2 != refb->op2)
493 return ALIAS_NO; /* Different fields. */
494 if (refa->op1 == refb->op1)
495 return ALIAS_MUST; /* Same field, same object. */
496 else if (refa->op2 >= IRFL_TAB_META && refa->op2 <= IRFL_TAB_NOMM)
497 return aa_table(J, refa->op1, refb->op1); /* Disambiguate tables. */
498 else
499 return ALIAS_MAY; /* Same field, possibly different object. */
502 /* Only the loads for mutable fields end up here (see FOLD). */
503 TRef LJ_FASTCALL lj_opt_fwd_fload(jit_State *J)
505 IRRef oref = fins->op1; /* Object reference. */
506 IRRef fid = fins->op2; /* Field ID. */
507 IRRef lim = oref; /* Search limit. */
508 IRRef ref;
510 /* Search for conflicting stores. */
511 ref = J->chain[IR_FSTORE];
512 while (ref > oref) {
513 IRIns *store = IR(ref);
514 switch (aa_fref(J, fins, IR(store->op1))) {
515 case ALIAS_NO: break; /* Continue searching. */
516 case ALIAS_MAY: lim = ref; goto cselim; /* Limit search for load. */
517 case ALIAS_MUST: return store->op2; /* Store forwarding. */
519 ref = store->prev;
522 /* No conflicting store: const-fold field loads from allocations. */
523 if (fid == IRFL_TAB_META) {
524 IRIns *ir = IR(oref);
525 if (ir->o == IR_TNEW || ir->o == IR_TDUP)
526 return lj_ir_knull(J, IRT_TAB);
529 cselim:
530 /* Try to find a matching load. Below the conflicting store, if any. */
531 return lj_opt_cselim(J, lim);
534 /* FSTORE elimination. */
535 TRef LJ_FASTCALL lj_opt_dse_fstore(jit_State *J)
537 IRRef fref = fins->op1; /* FREF reference. */
538 IRRef val = fins->op2; /* Stored value reference. */
539 IRIns *xr = IR(fref);
540 IRRef1 *refp = &J->chain[IR_FSTORE];
541 IRRef ref = *refp;
542 while (ref > fref) { /* Search for redundant or conflicting stores. */
543 IRIns *store = IR(ref);
544 switch (aa_fref(J, xr, IR(store->op1))) {
545 case ALIAS_NO:
546 break; /* Continue searching. */
547 case ALIAS_MAY:
548 if (store->op2 != val) /* Conflict if the value is different. */
549 goto doemit;
550 break; /* Otherwise continue searching. */
551 case ALIAS_MUST:
552 if (store->op2 == val) /* Same value: drop the new store. */
553 return DROPFOLD;
554 /* Different value: try to eliminate the redundant store. */
555 if (ref > J->chain[IR_LOOP]) { /* Quick check to avoid crossing LOOP. */
556 IRIns *ir;
557 /* Check for any intervening guards or conflicting loads. */
558 for (ir = IR(J->cur.nins-1); ir > store; ir--)
559 if (irt_isguard(ir->t) || (ir->o == IR_FLOAD && ir->op2 == xr->op2))
560 goto doemit; /* No elimination possible. */
561 /* Remove redundant store from chain and replace with NOP. */
562 *refp = store->prev;
563 lj_ir_nop(store);
564 /* Now emit the new store instead. */
566 goto doemit;
568 ref = *(refp = &store->prev);
570 doemit:
571 return EMITFOLD; /* Otherwise we have a conflict or simply no match. */
574 /* -- XLOAD forwarding and XSTORE elimination ----------------------------- */
576 /* Find cdata allocation for a reference (if any). */
577 static IRIns *aa_findcnew(jit_State *J, IRIns *ir)
579 while (ir->o == IR_ADD) {
580 if (!irref_isk(ir->op1)) {
581 IRIns *ir1 = aa_findcnew(J, IR(ir->op1)); /* Left-recursion. */
582 if (ir1) return ir1;
584 if (irref_isk(ir->op2)) return NULL;
585 ir = IR(ir->op2); /* Flatten right-recursion. */
587 return ir->o == IR_CNEW ? ir : NULL;
590 /* Alias analysis for two cdata allocations. */
591 static AliasRet aa_cnew(jit_State *J, IRIns *refa, IRIns *refb)
593 IRIns *cnewa = aa_findcnew(J, refa);
594 IRIns *cnewb = aa_findcnew(J, refb);
595 if (cnewa == cnewb)
596 return ALIAS_MAY; /* Same allocation or neither is an allocation. */
597 if (cnewa && cnewb)
598 return ALIAS_NO; /* Two different allocations never alias. */
599 if (cnewb) { cnewa = cnewb; refb = refa; }
600 return aa_escape(J, cnewa, refb);
603 /* Alias analysis for XLOAD/XSTORE. */
604 static AliasRet aa_xref(jit_State *J, IRIns *refa, IRIns *xa, IRIns *xb)
606 ptrdiff_t ofsa = 0, ofsb = 0;
607 IRIns *refb = IR(xb->op1);
608 IRIns *basea = refa, *baseb = refb;
609 if (refa == refb && irt_sametype(xa->t, xb->t))
610 return ALIAS_MUST; /* Shortcut for same refs with identical type. */
611 /* Offset-based disambiguation. */
612 if (refa->o == IR_ADD && irref_isk(refa->op2)) {
613 IRIns *irk = IR(refa->op2);
614 basea = IR(refa->op1);
615 ofsa = (LJ_64 && irk->o == IR_KINT64) ? (ptrdiff_t)ir_k64(irk)->u64 :
616 (ptrdiff_t)irk->i;
618 if (refb->o == IR_ADD && irref_isk(refb->op2)) {
619 IRIns *irk = IR(refb->op2);
620 baseb = IR(refb->op1);
621 ofsb = (LJ_64 && irk->o == IR_KINT64) ? (ptrdiff_t)ir_k64(irk)->u64 :
622 (ptrdiff_t)irk->i;
624 /* Treat constified pointers like base vs. base+offset. */
625 if (basea->o == IR_KPTR && baseb->o == IR_KPTR) {
626 ofsb += (char *)ir_kptr(baseb) - (char *)ir_kptr(basea);
627 baseb = basea;
629 /* This implements (very) strict aliasing rules.
630 ** Different types do NOT alias, except for differences in signedness.
631 ** Type punning through unions is allowed (but forces a reload).
633 if (basea == baseb) {
634 ptrdiff_t sza = irt_size(xa->t), szb = irt_size(xb->t);
635 if (ofsa == ofsb) {
636 if (sza == szb && irt_isfp(xa->t) == irt_isfp(xb->t))
637 return ALIAS_MUST; /* Same-sized, same-kind. May need to convert. */
638 } else if (ofsa + sza <= ofsb || ofsb + szb <= ofsa) {
639 return ALIAS_NO; /* Non-overlapping base+-o1 vs. base+-o2. */
641 /* NYI: extract, extend or reinterpret bits (int <-> fp). */
642 return ALIAS_MAY; /* Overlapping or type punning: force reload. */
644 if (!irt_sametype(xa->t, xb->t) &&
645 !(irt_typerange(xa->t, IRT_I8, IRT_U64) &&
646 ((xa->t.irt - IRT_I8) ^ (xb->t.irt - IRT_I8)) == 1))
647 return ALIAS_NO;
648 /* NYI: structural disambiguation. */
649 return aa_cnew(J, basea, baseb); /* Try to disambiguate allocations. */
652 /* Return CSEd reference or 0. Caveat: swaps lower ref to the right! */
653 static IRRef reassoc_trycse(jit_State *J, IROp op, IRRef op1, IRRef op2)
655 IRRef ref = J->chain[op];
656 IRRef lim = op1;
657 if (op2 > lim) { lim = op2; op2 = op1; op1 = lim; }
658 while (ref > lim) {
659 IRIns *ir = IR(ref);
660 if (ir->op1 == op1 && ir->op2 == op2)
661 return ref;
662 ref = ir->prev;
664 return 0;
667 /* Reassociate index references. */
668 static IRRef reassoc_xref(jit_State *J, IRIns *ir)
670 ptrdiff_t ofs = 0;
671 if (ir->o == IR_ADD && irref_isk(ir->op2)) { /* Get constant offset. */
672 IRIns *irk = IR(ir->op2);
673 ofs = (LJ_64 && irk->o == IR_KINT64) ? (ptrdiff_t)ir_k64(irk)->u64 :
674 (ptrdiff_t)irk->i;
675 ir = IR(ir->op1);
677 if (ir->o == IR_ADD) { /* Add of base + index. */
678 /* Index ref > base ref for loop-carried dependences. Only check op1. */
679 IRIns *ir2, *ir1 = IR(ir->op1);
680 int32_t shift = 0;
681 IRRef idxref;
682 /* Determine index shifts. Don't bother with IR_MUL here. */
683 if (ir1->o == IR_BSHL && irref_isk(ir1->op2))
684 shift = IR(ir1->op2)->i;
685 else if (ir1->o == IR_ADD && ir1->op1 == ir1->op2)
686 shift = 1;
687 else
688 ir1 = ir;
689 ir2 = IR(ir1->op1);
690 /* A non-reassociated add. Must be a loop-carried dependence. */
691 if (ir2->o == IR_ADD && irt_isint(ir2->t) && irref_isk(ir2->op2))
692 ofs += (ptrdiff_t)IR(ir2->op2)->i << shift;
693 else
694 return 0;
695 idxref = ir2->op1;
696 /* Try to CSE the reassociated chain. Give up if not found. */
697 if (ir1 != ir &&
698 !(idxref = reassoc_trycse(J, ir1->o, idxref,
699 ir1->o == IR_BSHL ? ir1->op2 : idxref)))
700 return 0;
701 if (!(idxref = reassoc_trycse(J, IR_ADD, idxref, ir->op2)))
702 return 0;
703 if (ofs != 0) {
704 IRRef refk = tref_ref(lj_ir_kintp(J, ofs));
705 if (!(idxref = reassoc_trycse(J, IR_ADD, idxref, refk)))
706 return 0;
708 return idxref; /* Success, found a reassociated index reference. Phew. */
710 return 0; /* Failure. */
713 /* XLOAD forwarding. */
714 TRef LJ_FASTCALL lj_opt_fwd_xload(jit_State *J)
716 IRRef xref = fins->op1;
717 IRIns *xr = IR(xref);
718 IRRef lim = xref; /* Search limit. */
719 IRRef ref;
721 if ((fins->op2 & IRXLOAD_READONLY))
722 goto cselim;
723 if ((fins->op2 & IRXLOAD_VOLATILE))
724 goto doemit;
726 /* Search for conflicting stores. */
727 ref = J->chain[IR_XSTORE];
728 retry:
729 if (J->chain[IR_CALLXS] > lim) lim = J->chain[IR_CALLXS];
730 if (J->chain[IR_XBAR] > lim) lim = J->chain[IR_XBAR];
731 while (ref > lim) {
732 IRIns *store = IR(ref);
733 switch (aa_xref(J, xr, fins, store)) {
734 case ALIAS_NO: break; /* Continue searching. */
735 case ALIAS_MAY: lim = ref; goto cselim; /* Limit search for load. */
736 case ALIAS_MUST:
737 /* Emit conversion if the loaded type doesn't match the forwarded type. */
738 if (!irt_sametype(fins->t, IR(store->op2)->t)) {
739 IRType dt = irt_type(fins->t), st = irt_type(IR(store->op2)->t);
740 if (dt == IRT_I8 || dt == IRT_I16) { /* Trunc + sign-extend. */
741 st = dt | IRCONV_SEXT;
742 dt = IRT_INT;
743 } else if (dt == IRT_U8 || dt == IRT_U16) { /* Trunc + zero-extend. */
744 st = dt;
745 dt = IRT_INT;
747 fins->ot = IRT(IR_CONV, dt);
748 fins->op1 = store->op2;
749 fins->op2 = (dt<<5)|st;
750 return RETRYFOLD;
752 return store->op2; /* Store forwarding. */
754 ref = store->prev;
757 cselim:
758 /* Try to find a matching load. Below the conflicting store, if any. */
759 ref = J->chain[IR_XLOAD];
760 while (ref > lim) {
761 /* CSE for XLOAD depends on the type, but not on the IRXLOAD_* flags. */
762 if (IR(ref)->op1 == xref && irt_sametype(IR(ref)->t, fins->t))
763 return ref;
764 ref = IR(ref)->prev;
767 /* Reassociate XLOAD across PHIs to handle a[i-1] forwarding case. */
768 if (!(fins->op2 & IRXLOAD_READONLY) && J->chain[IR_LOOP] &&
769 xref == fins->op1 && (xref = reassoc_xref(J, xr)) != 0) {
770 ref = J->chain[IR_XSTORE];
771 while (ref > lim) /* Skip stores that have already been checked. */
772 ref = IR(ref)->prev;
773 lim = xref;
774 xr = IR(xref);
775 goto retry; /* Retry with the reassociated reference. */
777 doemit:
778 return EMITFOLD;
781 /* XSTORE elimination. */
782 TRef LJ_FASTCALL lj_opt_dse_xstore(jit_State *J)
784 IRRef xref = fins->op1;
785 IRIns *xr = IR(xref);
786 IRRef lim = xref; /* Search limit. */
787 IRRef val = fins->op2; /* Stored value reference. */
788 IRRef1 *refp = &J->chain[IR_XSTORE];
789 IRRef ref = *refp;
790 if (J->chain[IR_CALLXS] > lim) lim = J->chain[IR_CALLXS];
791 if (J->chain[IR_XBAR] > lim) lim = J->chain[IR_XBAR];
792 if (J->chain[IR_XSNEW] > lim) lim = J->chain[IR_XSNEW];
793 while (ref > lim) { /* Search for redundant or conflicting stores. */
794 IRIns *store = IR(ref);
795 switch (aa_xref(J, xr, fins, store)) {
796 case ALIAS_NO:
797 break; /* Continue searching. */
798 case ALIAS_MAY:
799 if (store->op2 != val) /* Conflict if the value is different. */
800 goto doemit;
801 break; /* Otherwise continue searching. */
802 case ALIAS_MUST:
803 if (store->op2 == val) /* Same value: drop the new store. */
804 return DROPFOLD;
805 /* Different value: try to eliminate the redundant store. */
806 if (ref > J->chain[IR_LOOP]) { /* Quick check to avoid crossing LOOP. */
807 IRIns *ir;
808 /* Check for any intervening guards or any XLOADs (no AA performed). */
809 for (ir = IR(J->cur.nins-1); ir > store; ir--)
810 if (irt_isguard(ir->t) || ir->o == IR_XLOAD)
811 goto doemit; /* No elimination possible. */
812 /* Remove redundant store from chain and replace with NOP. */
813 *refp = store->prev;
814 lj_ir_nop(store);
815 /* Now emit the new store instead. */
817 goto doemit;
819 ref = *(refp = &store->prev);
821 doemit:
822 return EMITFOLD; /* Otherwise we have a conflict or simply no match. */
825 /* -- Forwarding of lj_tab_len -------------------------------------------- */
827 /* This is rather simplistic right now, but better than nothing. */
828 TRef LJ_FASTCALL lj_opt_fwd_tab_len(jit_State *J)
830 IRRef tab = fins->op1; /* Table reference. */
831 IRRef lim = tab; /* Search limit. */
832 IRRef ref;
834 /* Any ASTORE is a conflict and limits the search. */
835 if (J->chain[IR_ASTORE] > lim) lim = J->chain[IR_ASTORE];
837 /* Search for conflicting HSTORE with numeric key. */
838 ref = J->chain[IR_HSTORE];
839 while (ref > lim) {
840 IRIns *store = IR(ref);
841 IRIns *href = IR(store->op1);
842 IRIns *key = IR(href->op2);
843 if (irt_isnum(key->o == IR_KSLOT ? IR(key->op1)->t : key->t)) {
844 lim = ref; /* Conflicting store found, limits search for TLEN. */
845 break;
847 ref = store->prev;
850 /* Try to find a matching load. Below the conflicting store, if any. */
851 return lj_opt_cselim(J, lim);
854 /* -- ASTORE/HSTORE previous type analysis -------------------------------- */
856 /* Check whether the previous value for a table store is non-nil.
857 ** This can be derived either from a previous store or from a previous
858 ** load (because all loads from tables perform a type check).
860 ** The result of the analysis can be used to avoid the metatable check
861 ** and the guard against HREF returning niltv. Both of these are cheap,
862 ** so let's not spend too much effort on the analysis.
864 ** A result of 1 is exact: previous value CANNOT be nil.
865 ** A result of 0 is inexact: previous value MAY be nil.
867 int lj_opt_fwd_wasnonnil(jit_State *J, IROpT loadop, IRRef xref)
869 /* First check stores. */
870 IRRef ref = J->chain[loadop+IRDELTA_L2S];
871 while (ref > xref) {
872 IRIns *store = IR(ref);
873 if (store->op1 == xref) { /* Same xREF. */
874 /* A nil store MAY alias, but a non-nil store MUST alias. */
875 return !irt_isnil(store->t);
876 } else if (irt_isnil(store->t)) { /* Must check any nil store. */
877 IRRef skref = IR(store->op1)->op2;
878 IRRef xkref = IR(xref)->op2;
879 /* Same key type MAY alias. Need ALOAD check due to multiple int types. */
880 if (loadop == IR_ALOAD || irt_sametype(IR(skref)->t, IR(xkref)->t)) {
881 if (skref == xkref || !irref_isk(skref) || !irref_isk(xkref))
882 return 0; /* A nil store with same const key or var key MAY alias. */
883 /* Different const keys CANNOT alias. */
884 } /* Different key types CANNOT alias. */
885 } /* Other non-nil stores MAY alias. */
886 ref = store->prev;
889 /* Check loads since nothing could be derived from stores. */
890 ref = J->chain[loadop];
891 while (ref > xref) {
892 IRIns *load = IR(ref);
893 if (load->op1 == xref) { /* Same xREF. */
894 /* A nil load MAY alias, but a non-nil load MUST alias. */
895 return !irt_isnil(load->t);
896 } /* Other non-nil loads MAY alias. */
897 ref = load->prev;
899 return 0; /* Nothing derived at all, previous value MAY be nil. */
902 /* ------------------------------------------------------------------------ */
904 #undef IR
905 #undef fins
906 #undef fleft
907 #undef fright
909 #endif