FFI: Save GetLastError() around ffi.load() and symbol resolving, too.
[luajit-2.0.git] / src / lj_ir.c
blob457d918768da54a79aa8e2c7cc658fabe4fb2427
1 /*
2 ** SSA IR (Intermediate Representation) emitter.
3 ** Copyright (C) 2005-2011 Mike Pall. See Copyright Notice in luajit.h
4 */
6 #define lj_ir_c
7 #define LUA_CORE
9 /* For pointers to libc/libm functions. */
10 #include <stdio.h>
11 #include <math.h>
13 #include "lj_obj.h"
15 #if LJ_HASJIT
17 #include "lj_gc.h"
18 #include "lj_str.h"
19 #include "lj_tab.h"
20 #include "lj_ir.h"
21 #include "lj_jit.h"
22 #include "lj_ircall.h"
23 #include "lj_iropt.h"
24 #include "lj_trace.h"
25 #if LJ_HASFFI
26 #include "lj_ctype.h"
27 #include "lj_cdata.h"
28 #include "lj_carith.h"
29 #endif
30 #include "lj_vm.h"
31 #include "lj_lib.h"
33 /* Some local macros to save typing. Undef'd at the end. */
34 #define IR(ref) (&J->cur.ir[(ref)])
35 #define fins (&J->fold.ins)
37 /* Pass IR on to next optimization in chain (FOLD). */
38 #define emitir(ot, a, b) (lj_ir_set(J, (ot), (a), (b)), lj_opt_fold(J))
40 /* -- IR tables ----------------------------------------------------------- */
42 /* IR instruction modes. */
43 LJ_DATADEF const uint8_t lj_ir_mode[IR__MAX+1] = {
44 IRDEF(IRMODE)
48 /* C call info for CALL* instructions. */
49 LJ_DATADEF const CCallInfo lj_ir_callinfo[] = {
50 #define IRCALLCI(cond, name, nargs, kind, type, flags) \
51 { (ASMFunction)IRCALLCOND_##cond(name), \
52 (nargs)|(CCI_CALL_##kind)|(IRT_##type<<CCI_OTSHIFT)|(flags) },
53 IRCALLDEF(IRCALLCI)
54 #undef IRCALLCI
55 { NULL, 0 }
58 /* -- IR emitter ---------------------------------------------------------- */
60 /* Grow IR buffer at the top. */
61 void LJ_FASTCALL lj_ir_growtop(jit_State *J)
63 IRIns *baseir = J->irbuf + J->irbotlim;
64 MSize szins = J->irtoplim - J->irbotlim;
65 if (szins) {
66 baseir = (IRIns *)lj_mem_realloc(J->L, baseir, szins*sizeof(IRIns),
67 2*szins*sizeof(IRIns));
68 J->irtoplim = J->irbotlim + 2*szins;
69 } else {
70 baseir = (IRIns *)lj_mem_realloc(J->L, NULL, 0, LJ_MIN_IRSZ*sizeof(IRIns));
71 J->irbotlim = REF_BASE - LJ_MIN_IRSZ/4;
72 J->irtoplim = J->irbotlim + LJ_MIN_IRSZ;
74 J->cur.ir = J->irbuf = baseir - J->irbotlim;
77 /* Grow IR buffer at the bottom or shift it up. */
78 static void lj_ir_growbot(jit_State *J)
80 IRIns *baseir = J->irbuf + J->irbotlim;
81 MSize szins = J->irtoplim - J->irbotlim;
82 lua_assert(szins != 0);
83 lua_assert(J->cur.nk == J->irbotlim);
84 if (J->cur.nins + (szins >> 1) < J->irtoplim) {
85 /* More than half of the buffer is free on top: shift up by a quarter. */
86 MSize ofs = szins >> 2;
87 memmove(baseir + ofs, baseir, (J->cur.nins - J->irbotlim)*sizeof(IRIns));
88 J->irbotlim -= ofs;
89 J->irtoplim -= ofs;
90 J->cur.ir = J->irbuf = baseir - J->irbotlim;
91 } else {
92 /* Double the buffer size, but split the growth amongst top/bottom. */
93 IRIns *newbase = lj_mem_newt(J->L, 2*szins*sizeof(IRIns), IRIns);
94 MSize ofs = szins >= 256 ? 128 : (szins >> 1); /* Limit bottom growth. */
95 memcpy(newbase + ofs, baseir, (J->cur.nins - J->irbotlim)*sizeof(IRIns));
96 lj_mem_free(G(J->L), baseir, szins*sizeof(IRIns));
97 J->irbotlim -= ofs;
98 J->irtoplim = J->irbotlim + 2*szins;
99 J->cur.ir = J->irbuf = newbase - J->irbotlim;
103 /* Emit IR without any optimizations. */
104 TRef LJ_FASTCALL lj_ir_emit(jit_State *J)
106 IRRef ref = lj_ir_nextins(J);
107 IRIns *ir = IR(ref);
108 IROp op = fins->o;
109 ir->prev = J->chain[op];
110 J->chain[op] = (IRRef1)ref;
111 ir->o = op;
112 ir->op1 = fins->op1;
113 ir->op2 = fins->op2;
114 J->guardemit.irt |= fins->t.irt;
115 return TREF(ref, irt_t((ir->t = fins->t)));
118 /* Emit call to a C function. */
119 TRef lj_ir_call(jit_State *J, IRCallID id, ...)
121 const CCallInfo *ci = &lj_ir_callinfo[id];
122 uint32_t n = CCI_NARGS(ci);
123 TRef tr = TREF_NIL;
124 va_list argp;
125 va_start(argp, id);
126 if ((ci->flags & CCI_L)) n--;
127 if (n > 0)
128 tr = va_arg(argp, IRRef);
129 while (n-- > 1)
130 tr = emitir(IRT(IR_CARG, IRT_NIL), tr, va_arg(argp, IRRef));
131 va_end(argp);
132 if (CCI_OP(ci) == IR_CALLS)
133 J->needsnap = 1; /* Need snapshot after call with side effect. */
134 return emitir(CCI_OPTYPE(ci), tr, id);
137 /* -- Interning of constants ---------------------------------------------- */
140 ** IR instructions for constants are kept between J->cur.nk >= ref < REF_BIAS.
141 ** They are chained like all other instructions, but grow downwards.
142 ** The are interned (like strings in the VM) to facilitate reference
143 ** comparisons. The same constant must get the same reference.
146 /* Get ref of next IR constant and optionally grow IR.
147 ** Note: this may invalidate all IRIns *!
149 static LJ_AINLINE IRRef ir_nextk(jit_State *J)
151 IRRef ref = J->cur.nk;
152 if (LJ_UNLIKELY(ref <= J->irbotlim)) lj_ir_growbot(J);
153 J->cur.nk = --ref;
154 return ref;
157 /* Intern int32_t constant. */
158 TRef LJ_FASTCALL lj_ir_kint(jit_State *J, int32_t k)
160 IRIns *ir, *cir = J->cur.ir;
161 IRRef ref;
162 for (ref = J->chain[IR_KINT]; ref; ref = cir[ref].prev)
163 if (cir[ref].i == k)
164 goto found;
165 ref = ir_nextk(J);
166 ir = IR(ref);
167 ir->i = k;
168 ir->t.irt = IRT_INT;
169 ir->o = IR_KINT;
170 ir->prev = J->chain[IR_KINT];
171 J->chain[IR_KINT] = (IRRef1)ref;
172 found:
173 return TREF(ref, IRT_INT);
176 /* The MRef inside the KNUM/KINT64 IR instructions holds the address of the
177 ** 64 bit constant. The constants themselves are stored in a chained array
178 ** and shared across traces.
180 ** Rationale for choosing this data structure:
181 ** - The address of the constants is embedded in the generated machine code
182 ** and must never move. A resizable array or hash table wouldn't work.
183 ** - Most apps need very few non-32 bit integer constants (less than a dozen).
184 ** - Linear search is hard to beat in terms of speed and low complexity.
186 typedef struct K64Array {
187 MRef next; /* Pointer to next list. */
188 MSize numk; /* Number of used elements in this array. */
189 TValue k[LJ_MIN_K64SZ]; /* Array of constants. */
190 } K64Array;
192 /* Free all chained arrays. */
193 void lj_ir_k64_freeall(jit_State *J)
195 K64Array *k;
196 for (k = mref(J->k64, K64Array); k; ) {
197 K64Array *next = mref(k->next, K64Array);
198 lj_mem_free(J2G(J), k, sizeof(K64Array));
199 k = next;
203 /* Find 64 bit constant in chained array or add it. */
204 cTValue *lj_ir_k64_find(jit_State *J, uint64_t u64)
206 K64Array *k, *kp = NULL;
207 TValue *ntv;
208 MSize idx;
209 /* Search for the constant in the whole chain of arrays. */
210 for (k = mref(J->k64, K64Array); k; k = mref(k->next, K64Array)) {
211 kp = k; /* Remember previous element in list. */
212 for (idx = 0; idx < k->numk; idx++) { /* Search one array. */
213 TValue *tv = &k->k[idx];
214 if (tv->u64 == u64) /* Needed for +-0/NaN/absmask. */
215 return tv;
218 /* Constant was not found, need to add it. */
219 if (!(kp && kp->numk < LJ_MIN_K64SZ)) { /* Allocate a new array. */
220 K64Array *kn = lj_mem_newt(J->L, sizeof(K64Array), K64Array);
221 setmref(kn->next, NULL);
222 kn->numk = 0;
223 if (kp)
224 setmref(kp->next, kn); /* Chain to the end of the list. */
225 else
226 setmref(J->k64, kn); /* Link first array. */
227 kp = kn;
229 ntv = &kp->k[kp->numk++]; /* Add to current array. */
230 ntv->u64 = u64;
231 return ntv;
234 /* Intern 64 bit constant, given by its address. */
235 TRef lj_ir_k64(jit_State *J, IROp op, cTValue *tv)
237 IRIns *ir, *cir = J->cur.ir;
238 IRRef ref;
239 IRType t = op == IR_KNUM ? IRT_NUM : IRT_I64;
240 for (ref = J->chain[op]; ref; ref = cir[ref].prev)
241 if (ir_k64(&cir[ref]) == tv)
242 goto found;
243 ref = ir_nextk(J);
244 ir = IR(ref);
245 lua_assert(checkptr32(tv));
246 setmref(ir->ptr, tv);
247 ir->t.irt = t;
248 ir->o = op;
249 ir->prev = J->chain[op];
250 J->chain[op] = (IRRef1)ref;
251 found:
252 return TREF(ref, t);
255 /* Intern FP constant, given by its 64 bit pattern. */
256 TRef lj_ir_knum_u64(jit_State *J, uint64_t u64)
258 return lj_ir_k64(J, IR_KNUM, lj_ir_k64_find(J, u64));
261 /* Intern 64 bit integer constant. */
262 TRef lj_ir_kint64(jit_State *J, uint64_t u64)
264 return lj_ir_k64(J, IR_KINT64, lj_ir_k64_find(J, u64));
267 /* Check whether a number is int and return it. -0 is NOT considered an int. */
268 static int numistrueint(lua_Number n, int32_t *kp)
270 int32_t k = lj_num2int(n);
271 if (n == (lua_Number)k) {
272 if (kp) *kp = k;
273 if (k == 0) { /* Special check for -0. */
274 TValue tv;
275 setnumV(&tv, n);
276 if (tv.u32.hi != 0)
277 return 0;
279 return 1;
281 return 0;
284 /* Intern number as int32_t constant if possible, otherwise as FP constant. */
285 TRef lj_ir_knumint(jit_State *J, lua_Number n)
287 int32_t k;
288 if (numistrueint(n, &k))
289 return lj_ir_kint(J, k);
290 else
291 return lj_ir_knum(J, n);
294 /* Intern GC object "constant". */
295 TRef lj_ir_kgc(jit_State *J, GCobj *o, IRType t)
297 IRIns *ir, *cir = J->cur.ir;
298 IRRef ref;
299 lua_assert(!isdead(J2G(J), o));
300 for (ref = J->chain[IR_KGC]; ref; ref = cir[ref].prev)
301 if (ir_kgc(&cir[ref]) == o)
302 goto found;
303 ref = ir_nextk(J);
304 ir = IR(ref);
305 /* NOBARRIER: Current trace is a GC root. */
306 setgcref(ir->gcr, o);
307 ir->t.irt = (uint8_t)t;
308 ir->o = IR_KGC;
309 ir->prev = J->chain[IR_KGC];
310 J->chain[IR_KGC] = (IRRef1)ref;
311 found:
312 return TREF(ref, t);
315 /* Intern 32 bit pointer constant. */
316 TRef lj_ir_kptr_(jit_State *J, IROp op, void *ptr)
318 IRIns *ir, *cir = J->cur.ir;
319 IRRef ref;
320 lua_assert((void *)(intptr_t)i32ptr(ptr) == ptr);
321 for (ref = J->chain[op]; ref; ref = cir[ref].prev)
322 if (mref(cir[ref].ptr, void) == ptr)
323 goto found;
324 ref = ir_nextk(J);
325 ir = IR(ref);
326 setmref(ir->ptr, ptr);
327 ir->t.irt = IRT_P32;
328 ir->o = op;
329 ir->prev = J->chain[op];
330 J->chain[op] = (IRRef1)ref;
331 found:
332 return TREF(ref, IRT_P32);
335 /* Intern typed NULL constant. */
336 TRef lj_ir_knull(jit_State *J, IRType t)
338 IRIns *ir, *cir = J->cur.ir;
339 IRRef ref;
340 for (ref = J->chain[IR_KNULL]; ref; ref = cir[ref].prev)
341 if (irt_t(cir[ref].t) == t)
342 goto found;
343 ref = ir_nextk(J);
344 ir = IR(ref);
345 ir->i = 0;
346 ir->t.irt = (uint8_t)t;
347 ir->o = IR_KNULL;
348 ir->prev = J->chain[IR_KNULL];
349 J->chain[IR_KNULL] = (IRRef1)ref;
350 found:
351 return TREF(ref, t);
354 /* Intern key slot. */
355 TRef lj_ir_kslot(jit_State *J, TRef key, IRRef slot)
357 IRIns *ir, *cir = J->cur.ir;
358 IRRef2 op12 = IRREF2((IRRef1)key, (IRRef1)slot);
359 IRRef ref;
360 /* Const part is not touched by CSE/DCE, so 0-65535 is ok for IRMlit here. */
361 lua_assert(tref_isk(key) && slot == (IRRef)(IRRef1)slot);
362 for (ref = J->chain[IR_KSLOT]; ref; ref = cir[ref].prev)
363 if (cir[ref].op12 == op12)
364 goto found;
365 ref = ir_nextk(J);
366 ir = IR(ref);
367 ir->op12 = op12;
368 ir->t.irt = IRT_P32;
369 ir->o = IR_KSLOT;
370 ir->prev = J->chain[IR_KSLOT];
371 J->chain[IR_KSLOT] = (IRRef1)ref;
372 found:
373 return TREF(ref, IRT_P32);
376 /* -- Access to IR constants ---------------------------------------------- */
378 /* Copy value of IR constant. */
379 void lj_ir_kvalue(lua_State *L, TValue *tv, const IRIns *ir)
381 UNUSED(L);
382 lua_assert(ir->o != IR_KSLOT); /* Common mistake. */
383 switch (ir->o) {
384 case IR_KPRI: setitype(tv, irt_toitype(ir->t)); break;
385 case IR_KINT: setintV(tv, ir->i); break;
386 case IR_KGC: setgcV(L, tv, ir_kgc(ir), irt_toitype(ir->t)); break;
387 case IR_KPTR: case IR_KKPTR: case IR_KNULL:
388 setlightudV(tv, mref(ir->ptr, void));
389 break;
390 case IR_KNUM: setnumV(tv, ir_knum(ir)->n); break;
391 #if LJ_HASFFI
392 case IR_KINT64: {
393 GCcdata *cd = lj_cdata_new_(L, CTID_INT64, 8);
394 *(uint64_t *)cdataptr(cd) = ir_kint64(ir)->u64;
395 setcdataV(L, tv, cd);
396 break;
398 #endif
399 default: lua_assert(0); break;
403 /* -- Convert IR operand types -------------------------------------------- */
405 /* Convert from string to number. */
406 TRef LJ_FASTCALL lj_ir_tonumber(jit_State *J, TRef tr)
408 if (!tref_isnumber(tr)) {
409 if (tref_isstr(tr))
410 tr = emitir(IRTG(IR_STRTO, IRT_NUM), tr, 0);
411 else
412 lj_trace_err(J, LJ_TRERR_BADTYPE);
414 return tr;
417 /* Convert from integer or string to number. */
418 TRef LJ_FASTCALL lj_ir_tonum(jit_State *J, TRef tr)
420 if (!tref_isnum(tr)) {
421 if (tref_isinteger(tr))
422 tr = emitir(IRTN(IR_CONV), tr, IRCONV_NUM_INT);
423 else if (tref_isstr(tr))
424 tr = emitir(IRTG(IR_STRTO, IRT_NUM), tr, 0);
425 else
426 lj_trace_err(J, LJ_TRERR_BADTYPE);
428 return tr;
431 /* Convert from integer or number to string. */
432 TRef LJ_FASTCALL lj_ir_tostr(jit_State *J, TRef tr)
434 if (!tref_isstr(tr)) {
435 if (!tref_isnumber(tr))
436 lj_trace_err(J, LJ_TRERR_BADTYPE);
437 tr = emitir(IRT(IR_TOSTR, IRT_STR), tr, 0);
439 return tr;
442 /* -- Miscellaneous IR ops ------------------------------------------------ */
444 /* Evaluate numeric comparison. */
445 int lj_ir_numcmp(lua_Number a, lua_Number b, IROp op)
447 switch (op) {
448 case IR_EQ: return (a == b);
449 case IR_NE: return (a != b);
450 case IR_LT: return (a < b);
451 case IR_GE: return (a >= b);
452 case IR_LE: return (a <= b);
453 case IR_GT: return (a > b);
454 case IR_ULT: return !(a >= b);
455 case IR_UGE: return !(a < b);
456 case IR_ULE: return !(a > b);
457 case IR_UGT: return !(a <= b);
458 default: lua_assert(0); return 0;
462 /* Evaluate string comparison. */
463 int lj_ir_strcmp(GCstr *a, GCstr *b, IROp op)
465 int res = lj_str_cmp(a, b);
466 switch (op) {
467 case IR_LT: return (res < 0);
468 case IR_GE: return (res >= 0);
469 case IR_LE: return (res <= 0);
470 case IR_GT: return (res > 0);
471 default: lua_assert(0); return 0;
475 /* Rollback IR to previous state. */
476 void lj_ir_rollback(jit_State *J, IRRef ref)
478 IRRef nins = J->cur.nins;
479 while (nins > ref) {
480 IRIns *ir;
481 nins--;
482 ir = IR(nins);
483 J->chain[ir->o] = ir->prev;
485 J->cur.nins = nins;
488 #undef IR
489 #undef fins
490 #undef emitir
492 #endif