OSX/iOS: Fix SDK incompatibility.
[luajit-2.0.git] / src / lj_parse.c
blob5a44f8db51f5324abe6dae2daea2b71fb37a7236
1 /*
2 ** Lua parser (source code -> bytecode).
3 ** Copyright (C) 2005-2023 Mike Pall. See Copyright Notice in luajit.h
4 **
5 ** Major portions taken verbatim or adapted from the Lua interpreter.
6 ** Copyright (C) 1994-2008 Lua.org, PUC-Rio. See Copyright Notice in lua.h
7 */
9 #define lj_parse_c
10 #define LUA_CORE
12 #include "lj_obj.h"
13 #include "lj_gc.h"
14 #include "lj_err.h"
15 #include "lj_debug.h"
16 #include "lj_buf.h"
17 #include "lj_str.h"
18 #include "lj_tab.h"
19 #include "lj_func.h"
20 #include "lj_state.h"
21 #include "lj_bc.h"
22 #if LJ_HASFFI
23 #include "lj_ctype.h"
24 #endif
25 #include "lj_strfmt.h"
26 #include "lj_lex.h"
27 #include "lj_parse.h"
28 #include "lj_vm.h"
29 #include "lj_vmevent.h"
31 /* -- Parser structures and definitions ----------------------------------- */
33 /* Expression kinds. */
34 typedef enum {
35 /* Constant expressions must be first and in this order: */
36 VKNIL,
37 VKFALSE,
38 VKTRUE,
39 VKSTR, /* sval = string value */
40 VKNUM, /* nval = number value */
41 VKLAST = VKNUM,
42 VKCDATA, /* nval = cdata value, not treated as a constant expression */
43 /* Non-constant expressions follow: */
44 VLOCAL, /* info = local register, aux = vstack index */
45 VUPVAL, /* info = upvalue index, aux = vstack index */
46 VGLOBAL, /* sval = string value */
47 VINDEXED, /* info = table register, aux = index reg/byte/string const */
48 VJMP, /* info = instruction PC */
49 VRELOCABLE, /* info = instruction PC */
50 VNONRELOC, /* info = result register */
51 VCALL, /* info = instruction PC, aux = base */
52 VVOID
53 } ExpKind;
55 /* Expression descriptor. */
56 typedef struct ExpDesc {
57 union {
58 struct {
59 uint32_t info; /* Primary info. */
60 uint32_t aux; /* Secondary info. */
61 } s;
62 TValue nval; /* Number value. */
63 GCstr *sval; /* String value. */
64 } u;
65 ExpKind k;
66 BCPos t; /* True condition jump list. */
67 BCPos f; /* False condition jump list. */
68 } ExpDesc;
70 /* Macros for expressions. */
71 #define expr_hasjump(e) ((e)->t != (e)->f)
73 #define expr_isk(e) ((e)->k <= VKLAST)
74 #define expr_isk_nojump(e) (expr_isk(e) && !expr_hasjump(e))
75 #define expr_isnumk(e) ((e)->k == VKNUM)
76 #define expr_isnumk_nojump(e) (expr_isnumk(e) && !expr_hasjump(e))
77 #define expr_isstrk(e) ((e)->k == VKSTR)
79 #define expr_numtv(e) check_exp(expr_isnumk((e)), &(e)->u.nval)
80 #define expr_numberV(e) numberVnum(expr_numtv((e)))
82 /* Initialize expression. */
83 static LJ_AINLINE void expr_init(ExpDesc *e, ExpKind k, uint32_t info)
85 e->k = k;
86 e->u.s.info = info;
87 e->f = e->t = NO_JMP;
90 /* Check number constant for +-0. */
91 static int expr_numiszero(ExpDesc *e)
93 TValue *o = expr_numtv(e);
94 return tvisint(o) ? (intV(o) == 0) : tviszero(o);
97 /* Per-function linked list of scope blocks. */
98 typedef struct FuncScope {
99 struct FuncScope *prev; /* Link to outer scope. */
100 MSize vstart; /* Start of block-local variables. */
101 uint8_t nactvar; /* Number of active vars outside the scope. */
102 uint8_t flags; /* Scope flags. */
103 } FuncScope;
105 #define FSCOPE_LOOP 0x01 /* Scope is a (breakable) loop. */
106 #define FSCOPE_BREAK 0x02 /* Break used in scope. */
107 #define FSCOPE_GOLA 0x04 /* Goto or label used in scope. */
108 #define FSCOPE_UPVAL 0x08 /* Upvalue in scope. */
109 #define FSCOPE_NOCLOSE 0x10 /* Do not close upvalues. */
111 #define NAME_BREAK ((GCstr *)(uintptr_t)1)
113 /* Index into variable stack. */
114 typedef uint16_t VarIndex;
115 #define LJ_MAX_VSTACK (65536 - LJ_MAX_UPVAL)
117 /* Variable/goto/label info. */
118 #define VSTACK_VAR_RW 0x01 /* R/W variable. */
119 #define VSTACK_GOTO 0x02 /* Pending goto. */
120 #define VSTACK_LABEL 0x04 /* Label. */
122 /* Per-function state. */
123 typedef struct FuncState {
124 GCtab *kt; /* Hash table for constants. */
125 LexState *ls; /* Lexer state. */
126 lua_State *L; /* Lua state. */
127 FuncScope *bl; /* Current scope. */
128 struct FuncState *prev; /* Enclosing function. */
129 BCPos pc; /* Next bytecode position. */
130 BCPos lasttarget; /* Bytecode position of last jump target. */
131 BCPos jpc; /* Pending jump list to next bytecode. */
132 BCReg freereg; /* First free register. */
133 BCReg nactvar; /* Number of active local variables. */
134 BCReg nkn, nkgc; /* Number of lua_Number/GCobj constants */
135 BCLine linedefined; /* First line of the function definition. */
136 BCInsLine *bcbase; /* Base of bytecode stack. */
137 BCPos bclim; /* Limit of bytecode stack. */
138 MSize vbase; /* Base of variable stack for this function. */
139 uint8_t flags; /* Prototype flags. */
140 uint8_t numparams; /* Number of parameters. */
141 uint8_t framesize; /* Fixed frame size. */
142 uint8_t nuv; /* Number of upvalues */
143 VarIndex varmap[LJ_MAX_LOCVAR]; /* Map from register to variable idx. */
144 VarIndex uvmap[LJ_MAX_UPVAL]; /* Map from upvalue to variable idx. */
145 VarIndex uvtmp[LJ_MAX_UPVAL]; /* Temporary upvalue map. */
146 } FuncState;
148 /* Binary and unary operators. ORDER OPR */
149 typedef enum BinOpr {
150 OPR_ADD, OPR_SUB, OPR_MUL, OPR_DIV, OPR_MOD, OPR_POW, /* ORDER ARITH */
151 OPR_CONCAT,
152 OPR_NE, OPR_EQ,
153 OPR_LT, OPR_GE, OPR_LE, OPR_GT,
154 OPR_AND, OPR_OR,
155 OPR_NOBINOPR
156 } BinOpr;
158 LJ_STATIC_ASSERT((int)BC_ISGE-(int)BC_ISLT == (int)OPR_GE-(int)OPR_LT);
159 LJ_STATIC_ASSERT((int)BC_ISLE-(int)BC_ISLT == (int)OPR_LE-(int)OPR_LT);
160 LJ_STATIC_ASSERT((int)BC_ISGT-(int)BC_ISLT == (int)OPR_GT-(int)OPR_LT);
161 LJ_STATIC_ASSERT((int)BC_SUBVV-(int)BC_ADDVV == (int)OPR_SUB-(int)OPR_ADD);
162 LJ_STATIC_ASSERT((int)BC_MULVV-(int)BC_ADDVV == (int)OPR_MUL-(int)OPR_ADD);
163 LJ_STATIC_ASSERT((int)BC_DIVVV-(int)BC_ADDVV == (int)OPR_DIV-(int)OPR_ADD);
164 LJ_STATIC_ASSERT((int)BC_MODVV-(int)BC_ADDVV == (int)OPR_MOD-(int)OPR_ADD);
166 #ifdef LUA_USE_ASSERT
167 #define lj_assertFS(c, ...) (lj_assertG_(G(fs->L), (c), __VA_ARGS__))
168 #else
169 #define lj_assertFS(c, ...) ((void)fs)
170 #endif
172 /* -- Error handling ------------------------------------------------------ */
174 LJ_NORET LJ_NOINLINE static void err_syntax(LexState *ls, ErrMsg em)
176 lj_lex_error(ls, ls->tok, em);
179 LJ_NORET LJ_NOINLINE static void err_token(LexState *ls, LexToken tok)
181 lj_lex_error(ls, ls->tok, LJ_ERR_XTOKEN, lj_lex_token2str(ls, tok));
184 LJ_NORET static void err_limit(FuncState *fs, uint32_t limit, const char *what)
186 if (fs->linedefined == 0)
187 lj_lex_error(fs->ls, 0, LJ_ERR_XLIMM, limit, what);
188 else
189 lj_lex_error(fs->ls, 0, LJ_ERR_XLIMF, fs->linedefined, limit, what);
192 #define checklimit(fs, v, l, m) if ((v) >= (l)) err_limit(fs, l, m)
193 #define checklimitgt(fs, v, l, m) if ((v) > (l)) err_limit(fs, l, m)
194 #define checkcond(ls, c, em) { if (!(c)) err_syntax(ls, em); }
196 /* -- Management of constants --------------------------------------------- */
198 /* Return bytecode encoding for primitive constant. */
199 #define const_pri(e) check_exp((e)->k <= VKTRUE, (e)->k)
201 #define tvhaskslot(o) ((o)->u32.hi == 0)
202 #define tvkslot(o) ((o)->u32.lo)
204 /* Add a number constant. */
205 static BCReg const_num(FuncState *fs, ExpDesc *e)
207 lua_State *L = fs->L;
208 TValue *o;
209 lj_assertFS(expr_isnumk(e), "bad usage");
210 o = lj_tab_set(L, fs->kt, &e->u.nval);
211 if (tvhaskslot(o))
212 return tvkslot(o);
213 o->u64 = fs->nkn;
214 return fs->nkn++;
217 /* Add a GC object constant. */
218 static BCReg const_gc(FuncState *fs, GCobj *gc, uint32_t itype)
220 lua_State *L = fs->L;
221 TValue key, *o;
222 setgcV(L, &key, gc, itype);
223 /* NOBARRIER: the key is new or kept alive. */
224 o = lj_tab_set(L, fs->kt, &key);
225 if (tvhaskslot(o))
226 return tvkslot(o);
227 o->u64 = fs->nkgc;
228 return fs->nkgc++;
231 /* Add a string constant. */
232 static BCReg const_str(FuncState *fs, ExpDesc *e)
234 lj_assertFS(expr_isstrk(e) || e->k == VGLOBAL, "bad usage");
235 return const_gc(fs, obj2gco(e->u.sval), LJ_TSTR);
238 /* Anchor string constant to avoid GC. */
239 GCstr *lj_parse_keepstr(LexState *ls, const char *str, size_t len)
241 /* NOBARRIER: the key is new or kept alive. */
242 lua_State *L = ls->L;
243 GCstr *s = lj_str_new(L, str, len);
244 TValue *tv = lj_tab_setstr(L, ls->fs->kt, s);
245 if (tvisnil(tv)) setboolV(tv, 1);
246 lj_gc_check(L);
247 return s;
250 #if LJ_HASFFI
251 /* Anchor cdata to avoid GC. */
252 void lj_parse_keepcdata(LexState *ls, TValue *tv, GCcdata *cd)
254 /* NOBARRIER: the key is new or kept alive. */
255 lua_State *L = ls->L;
256 setcdataV(L, tv, cd);
257 setboolV(lj_tab_set(L, ls->fs->kt, tv), 1);
259 #endif
261 /* -- Jump list handling -------------------------------------------------- */
263 /* Get next element in jump list. */
264 static BCPos jmp_next(FuncState *fs, BCPos pc)
266 ptrdiff_t delta = bc_j(fs->bcbase[pc].ins);
267 if ((BCPos)delta == NO_JMP)
268 return NO_JMP;
269 else
270 return (BCPos)(((ptrdiff_t)pc+1)+delta);
273 /* Check if any of the instructions on the jump list produce no value. */
274 static int jmp_novalue(FuncState *fs, BCPos list)
276 for (; list != NO_JMP; list = jmp_next(fs, list)) {
277 BCIns p = fs->bcbase[list >= 1 ? list-1 : list].ins;
278 if (!(bc_op(p) == BC_ISTC || bc_op(p) == BC_ISFC || bc_a(p) == NO_REG))
279 return 1;
281 return 0;
284 /* Patch register of test instructions. */
285 static int jmp_patchtestreg(FuncState *fs, BCPos pc, BCReg reg)
287 BCInsLine *ilp = &fs->bcbase[pc >= 1 ? pc-1 : pc];
288 BCOp op = bc_op(ilp->ins);
289 if (op == BC_ISTC || op == BC_ISFC) {
290 if (reg != NO_REG && reg != bc_d(ilp->ins)) {
291 setbc_a(&ilp->ins, reg);
292 } else { /* Nothing to store or already in the right register. */
293 setbc_op(&ilp->ins, op+(BC_IST-BC_ISTC));
294 setbc_a(&ilp->ins, 0);
296 } else if (bc_a(ilp->ins) == NO_REG) {
297 if (reg == NO_REG) {
298 ilp->ins = BCINS_AJ(BC_JMP, bc_a(fs->bcbase[pc].ins), 0);
299 } else {
300 setbc_a(&ilp->ins, reg);
301 if (reg >= bc_a(ilp[1].ins))
302 setbc_a(&ilp[1].ins, reg+1);
304 } else {
305 return 0; /* Cannot patch other instructions. */
307 return 1;
310 /* Drop values for all instructions on jump list. */
311 static void jmp_dropval(FuncState *fs, BCPos list)
313 for (; list != NO_JMP; list = jmp_next(fs, list))
314 jmp_patchtestreg(fs, list, NO_REG);
317 /* Patch jump instruction to target. */
318 static void jmp_patchins(FuncState *fs, BCPos pc, BCPos dest)
320 BCIns *jmp = &fs->bcbase[pc].ins;
321 BCPos offset = dest-(pc+1)+BCBIAS_J;
322 lj_assertFS(dest != NO_JMP, "uninitialized jump target");
323 if (offset > BCMAX_D)
324 err_syntax(fs->ls, LJ_ERR_XJUMP);
325 setbc_d(jmp, offset);
328 /* Append to jump list. */
329 static void jmp_append(FuncState *fs, BCPos *l1, BCPos l2)
331 if (l2 == NO_JMP) {
332 return;
333 } else if (*l1 == NO_JMP) {
334 *l1 = l2;
335 } else {
336 BCPos list = *l1;
337 BCPos next;
338 while ((next = jmp_next(fs, list)) != NO_JMP) /* Find last element. */
339 list = next;
340 jmp_patchins(fs, list, l2);
344 /* Patch jump list and preserve produced values. */
345 static void jmp_patchval(FuncState *fs, BCPos list, BCPos vtarget,
346 BCReg reg, BCPos dtarget)
348 while (list != NO_JMP) {
349 BCPos next = jmp_next(fs, list);
350 if (jmp_patchtestreg(fs, list, reg))
351 jmp_patchins(fs, list, vtarget); /* Jump to target with value. */
352 else
353 jmp_patchins(fs, list, dtarget); /* Jump to default target. */
354 list = next;
358 /* Jump to following instruction. Append to list of pending jumps. */
359 static void jmp_tohere(FuncState *fs, BCPos list)
361 fs->lasttarget = fs->pc;
362 jmp_append(fs, &fs->jpc, list);
365 /* Patch jump list to target. */
366 static void jmp_patch(FuncState *fs, BCPos list, BCPos target)
368 if (target == fs->pc) {
369 jmp_tohere(fs, list);
370 } else {
371 lj_assertFS(target < fs->pc, "bad jump target");
372 jmp_patchval(fs, list, target, NO_REG, target);
376 /* -- Bytecode register allocator ----------------------------------------- */
378 /* Bump frame size. */
379 static void bcreg_bump(FuncState *fs, BCReg n)
381 BCReg sz = fs->freereg + n;
382 if (sz > fs->framesize) {
383 if (sz >= LJ_MAX_SLOTS)
384 err_syntax(fs->ls, LJ_ERR_XSLOTS);
385 fs->framesize = (uint8_t)sz;
389 /* Reserve registers. */
390 static void bcreg_reserve(FuncState *fs, BCReg n)
392 bcreg_bump(fs, n);
393 fs->freereg += n;
396 /* Free register. */
397 static void bcreg_free(FuncState *fs, BCReg reg)
399 if (reg >= fs->nactvar) {
400 fs->freereg--;
401 lj_assertFS(reg == fs->freereg, "bad regfree");
405 /* Free register for expression. */
406 static void expr_free(FuncState *fs, ExpDesc *e)
408 if (e->k == VNONRELOC)
409 bcreg_free(fs, e->u.s.info);
412 /* -- Bytecode emitter ---------------------------------------------------- */
414 /* Emit bytecode instruction. */
415 static BCPos bcemit_INS(FuncState *fs, BCIns ins)
417 BCPos pc = fs->pc;
418 LexState *ls = fs->ls;
419 jmp_patchval(fs, fs->jpc, pc, NO_REG, pc);
420 fs->jpc = NO_JMP;
421 if (LJ_UNLIKELY(pc >= fs->bclim)) {
422 ptrdiff_t base = fs->bcbase - ls->bcstack;
423 checklimit(fs, ls->sizebcstack, LJ_MAX_BCINS, "bytecode instructions");
424 lj_mem_growvec(fs->L, ls->bcstack, ls->sizebcstack, LJ_MAX_BCINS,BCInsLine);
425 fs->bclim = (BCPos)(ls->sizebcstack - base);
426 fs->bcbase = ls->bcstack + base;
428 fs->bcbase[pc].ins = ins;
429 fs->bcbase[pc].line = ls->lastline;
430 fs->pc = pc+1;
431 return pc;
434 #define bcemit_ABC(fs, o, a, b, c) bcemit_INS(fs, BCINS_ABC(o, a, b, c))
435 #define bcemit_AD(fs, o, a, d) bcemit_INS(fs, BCINS_AD(o, a, d))
436 #define bcemit_AJ(fs, o, a, j) bcemit_INS(fs, BCINS_AJ(o, a, j))
438 #define bcptr(fs, e) (&(fs)->bcbase[(e)->u.s.info].ins)
440 /* -- Bytecode emitter for expressions ------------------------------------ */
442 /* Discharge non-constant expression to any register. */
443 static void expr_discharge(FuncState *fs, ExpDesc *e)
445 BCIns ins;
446 if (e->k == VUPVAL) {
447 ins = BCINS_AD(BC_UGET, 0, e->u.s.info);
448 } else if (e->k == VGLOBAL) {
449 ins = BCINS_AD(BC_GGET, 0, const_str(fs, e));
450 } else if (e->k == VINDEXED) {
451 BCReg rc = e->u.s.aux;
452 if ((int32_t)rc < 0) {
453 ins = BCINS_ABC(BC_TGETS, 0, e->u.s.info, ~rc);
454 } else if (rc > BCMAX_C) {
455 ins = BCINS_ABC(BC_TGETB, 0, e->u.s.info, rc-(BCMAX_C+1));
456 } else {
457 bcreg_free(fs, rc);
458 ins = BCINS_ABC(BC_TGETV, 0, e->u.s.info, rc);
460 bcreg_free(fs, e->u.s.info);
461 } else if (e->k == VCALL) {
462 e->u.s.info = e->u.s.aux;
463 e->k = VNONRELOC;
464 return;
465 } else if (e->k == VLOCAL) {
466 e->k = VNONRELOC;
467 return;
468 } else {
469 return;
471 e->u.s.info = bcemit_INS(fs, ins);
472 e->k = VRELOCABLE;
475 /* Emit bytecode to set a range of registers to nil. */
476 static void bcemit_nil(FuncState *fs, BCReg from, BCReg n)
478 if (fs->pc > fs->lasttarget) { /* No jumps to current position? */
479 BCIns *ip = &fs->bcbase[fs->pc-1].ins;
480 BCReg pto, pfrom = bc_a(*ip);
481 switch (bc_op(*ip)) { /* Try to merge with the previous instruction. */
482 case BC_KPRI:
483 if (bc_d(*ip) != ~LJ_TNIL) break;
484 if (from == pfrom) {
485 if (n == 1) return;
486 } else if (from == pfrom+1) {
487 from = pfrom;
488 n++;
489 } else {
490 break;
492 *ip = BCINS_AD(BC_KNIL, from, from+n-1); /* Replace KPRI. */
493 return;
494 case BC_KNIL:
495 pto = bc_d(*ip);
496 if (pfrom <= from && from <= pto+1) { /* Can we connect both ranges? */
497 if (from+n-1 > pto)
498 setbc_d(ip, from+n-1); /* Patch previous instruction range. */
499 return;
501 break;
502 default:
503 break;
506 /* Emit new instruction or replace old instruction. */
507 bcemit_INS(fs, n == 1 ? BCINS_AD(BC_KPRI, from, VKNIL) :
508 BCINS_AD(BC_KNIL, from, from+n-1));
511 /* Discharge an expression to a specific register. Ignore branches. */
512 static void expr_toreg_nobranch(FuncState *fs, ExpDesc *e, BCReg reg)
514 BCIns ins;
515 expr_discharge(fs, e);
516 if (e->k == VKSTR) {
517 ins = BCINS_AD(BC_KSTR, reg, const_str(fs, e));
518 } else if (e->k == VKNUM) {
519 #if LJ_DUALNUM
520 cTValue *tv = expr_numtv(e);
521 if (tvisint(tv) && checki16(intV(tv)))
522 ins = BCINS_AD(BC_KSHORT, reg, (BCReg)(uint16_t)intV(tv));
523 else
524 #else
525 lua_Number n = expr_numberV(e);
526 int32_t k = lj_num2int(n);
527 if (checki16(k) && n == (lua_Number)k)
528 ins = BCINS_AD(BC_KSHORT, reg, (BCReg)(uint16_t)k);
529 else
530 #endif
531 ins = BCINS_AD(BC_KNUM, reg, const_num(fs, e));
532 #if LJ_HASFFI
533 } else if (e->k == VKCDATA) {
534 fs->flags |= PROTO_FFI;
535 ins = BCINS_AD(BC_KCDATA, reg,
536 const_gc(fs, obj2gco(cdataV(&e->u.nval)), LJ_TCDATA));
537 #endif
538 } else if (e->k == VRELOCABLE) {
539 setbc_a(bcptr(fs, e), reg);
540 goto noins;
541 } else if (e->k == VNONRELOC) {
542 if (reg == e->u.s.info)
543 goto noins;
544 ins = BCINS_AD(BC_MOV, reg, e->u.s.info);
545 } else if (e->k == VKNIL) {
546 bcemit_nil(fs, reg, 1);
547 goto noins;
548 } else if (e->k <= VKTRUE) {
549 ins = BCINS_AD(BC_KPRI, reg, const_pri(e));
550 } else {
551 lj_assertFS(e->k == VVOID || e->k == VJMP, "bad expr type %d", e->k);
552 return;
554 bcemit_INS(fs, ins);
555 noins:
556 e->u.s.info = reg;
557 e->k = VNONRELOC;
560 /* Forward declaration. */
561 static BCPos bcemit_jmp(FuncState *fs);
563 /* Discharge an expression to a specific register. */
564 static void expr_toreg(FuncState *fs, ExpDesc *e, BCReg reg)
566 expr_toreg_nobranch(fs, e, reg);
567 if (e->k == VJMP)
568 jmp_append(fs, &e->t, e->u.s.info); /* Add it to the true jump list. */
569 if (expr_hasjump(e)) { /* Discharge expression with branches. */
570 BCPos jend, jfalse = NO_JMP, jtrue = NO_JMP;
571 if (jmp_novalue(fs, e->t) || jmp_novalue(fs, e->f)) {
572 BCPos jval = (e->k == VJMP) ? NO_JMP : bcemit_jmp(fs);
573 jfalse = bcemit_AD(fs, BC_KPRI, reg, VKFALSE);
574 bcemit_AJ(fs, BC_JMP, fs->freereg, 1);
575 jtrue = bcemit_AD(fs, BC_KPRI, reg, VKTRUE);
576 jmp_tohere(fs, jval);
578 jend = fs->pc;
579 fs->lasttarget = jend;
580 jmp_patchval(fs, e->f, jend, reg, jfalse);
581 jmp_patchval(fs, e->t, jend, reg, jtrue);
583 e->f = e->t = NO_JMP;
584 e->u.s.info = reg;
585 e->k = VNONRELOC;
588 /* Discharge an expression to the next free register. */
589 static void expr_tonextreg(FuncState *fs, ExpDesc *e)
591 expr_discharge(fs, e);
592 expr_free(fs, e);
593 bcreg_reserve(fs, 1);
594 expr_toreg(fs, e, fs->freereg - 1);
597 /* Discharge an expression to any register. */
598 static BCReg expr_toanyreg(FuncState *fs, ExpDesc *e)
600 expr_discharge(fs, e);
601 if (e->k == VNONRELOC) {
602 if (!expr_hasjump(e)) return e->u.s.info; /* Already in a register. */
603 if (e->u.s.info >= fs->nactvar) {
604 expr_toreg(fs, e, e->u.s.info); /* Discharge to temp. register. */
605 return e->u.s.info;
608 expr_tonextreg(fs, e); /* Discharge to next register. */
609 return e->u.s.info;
612 /* Partially discharge expression to a value. */
613 static void expr_toval(FuncState *fs, ExpDesc *e)
615 if (expr_hasjump(e))
616 expr_toanyreg(fs, e);
617 else
618 expr_discharge(fs, e);
621 /* Emit store for LHS expression. */
622 static void bcemit_store(FuncState *fs, ExpDesc *var, ExpDesc *e)
624 BCIns ins;
625 if (var->k == VLOCAL) {
626 fs->ls->vstack[var->u.s.aux].info |= VSTACK_VAR_RW;
627 expr_free(fs, e);
628 expr_toreg(fs, e, var->u.s.info);
629 return;
630 } else if (var->k == VUPVAL) {
631 fs->ls->vstack[var->u.s.aux].info |= VSTACK_VAR_RW;
632 expr_toval(fs, e);
633 if (e->k <= VKTRUE)
634 ins = BCINS_AD(BC_USETP, var->u.s.info, const_pri(e));
635 else if (e->k == VKSTR)
636 ins = BCINS_AD(BC_USETS, var->u.s.info, const_str(fs, e));
637 else if (e->k == VKNUM)
638 ins = BCINS_AD(BC_USETN, var->u.s.info, const_num(fs, e));
639 else
640 ins = BCINS_AD(BC_USETV, var->u.s.info, expr_toanyreg(fs, e));
641 } else if (var->k == VGLOBAL) {
642 BCReg ra = expr_toanyreg(fs, e);
643 ins = BCINS_AD(BC_GSET, ra, const_str(fs, var));
644 } else {
645 BCReg ra, rc;
646 lj_assertFS(var->k == VINDEXED, "bad expr type %d", var->k);
647 ra = expr_toanyreg(fs, e);
648 rc = var->u.s.aux;
649 if ((int32_t)rc < 0) {
650 ins = BCINS_ABC(BC_TSETS, ra, var->u.s.info, ~rc);
651 } else if (rc > BCMAX_C) {
652 ins = BCINS_ABC(BC_TSETB, ra, var->u.s.info, rc-(BCMAX_C+1));
653 } else {
654 #ifdef LUA_USE_ASSERT
655 /* Free late alloced key reg to avoid assert on free of value reg. */
656 /* This can only happen when called from expr_table(). */
657 if (e->k == VNONRELOC && ra >= fs->nactvar && rc >= ra)
658 bcreg_free(fs, rc);
659 #endif
660 ins = BCINS_ABC(BC_TSETV, ra, var->u.s.info, rc);
663 bcemit_INS(fs, ins);
664 expr_free(fs, e);
667 /* Emit method lookup expression. */
668 static void bcemit_method(FuncState *fs, ExpDesc *e, ExpDesc *key)
670 BCReg idx, func, fr2, obj = expr_toanyreg(fs, e);
671 expr_free(fs, e);
672 func = fs->freereg;
673 fr2 = fs->ls->fr2;
674 bcemit_AD(fs, BC_MOV, func+1+fr2, obj); /* Copy object to 1st argument. */
675 lj_assertFS(expr_isstrk(key), "bad usage");
676 idx = const_str(fs, key);
677 if (idx <= BCMAX_C) {
678 bcreg_reserve(fs, 2+fr2);
679 bcemit_ABC(fs, BC_TGETS, func, obj, idx);
680 } else {
681 bcreg_reserve(fs, 3+fr2);
682 bcemit_AD(fs, BC_KSTR, func+2+fr2, idx);
683 bcemit_ABC(fs, BC_TGETV, func, obj, func+2+fr2);
684 fs->freereg--;
686 e->u.s.info = func;
687 e->k = VNONRELOC;
690 /* -- Bytecode emitter for branches --------------------------------------- */
692 /* Emit unconditional branch. */
693 static BCPos bcemit_jmp(FuncState *fs)
695 BCPos jpc = fs->jpc;
696 BCPos j = fs->pc - 1;
697 BCIns *ip = &fs->bcbase[j].ins;
698 fs->jpc = NO_JMP;
699 if ((int32_t)j >= (int32_t)fs->lasttarget && bc_op(*ip) == BC_UCLO) {
700 setbc_j(ip, NO_JMP);
701 fs->lasttarget = j+1;
702 } else {
703 j = bcemit_AJ(fs, BC_JMP, fs->freereg, NO_JMP);
705 jmp_append(fs, &j, jpc);
706 return j;
709 /* Invert branch condition of bytecode instruction. */
710 static void invertcond(FuncState *fs, ExpDesc *e)
712 BCIns *ip = &fs->bcbase[e->u.s.info - 1].ins;
713 setbc_op(ip, bc_op(*ip)^1);
716 /* Emit conditional branch. */
717 static BCPos bcemit_branch(FuncState *fs, ExpDesc *e, int cond)
719 BCPos pc;
720 if (e->k == VRELOCABLE) {
721 BCIns *ip = bcptr(fs, e);
722 if (bc_op(*ip) == BC_NOT) {
723 *ip = BCINS_AD(cond ? BC_ISF : BC_IST, 0, bc_d(*ip));
724 return bcemit_jmp(fs);
727 if (e->k != VNONRELOC) {
728 bcreg_reserve(fs, 1);
729 expr_toreg_nobranch(fs, e, fs->freereg-1);
731 bcemit_AD(fs, cond ? BC_ISTC : BC_ISFC, NO_REG, e->u.s.info);
732 pc = bcemit_jmp(fs);
733 expr_free(fs, e);
734 return pc;
737 /* Emit branch on true condition. */
738 static void bcemit_branch_t(FuncState *fs, ExpDesc *e)
740 BCPos pc;
741 expr_discharge(fs, e);
742 if (e->k == VKSTR || e->k == VKNUM || e->k == VKTRUE)
743 pc = NO_JMP; /* Never jump. */
744 else if (e->k == VJMP)
745 invertcond(fs, e), pc = e->u.s.info;
746 else if (e->k == VKFALSE || e->k == VKNIL)
747 expr_toreg_nobranch(fs, e, NO_REG), pc = bcemit_jmp(fs);
748 else
749 pc = bcemit_branch(fs, e, 0);
750 jmp_append(fs, &e->f, pc);
751 jmp_tohere(fs, e->t);
752 e->t = NO_JMP;
755 /* Emit branch on false condition. */
756 static void bcemit_branch_f(FuncState *fs, ExpDesc *e)
758 BCPos pc;
759 expr_discharge(fs, e);
760 if (e->k == VKNIL || e->k == VKFALSE)
761 pc = NO_JMP; /* Never jump. */
762 else if (e->k == VJMP)
763 pc = e->u.s.info;
764 else if (e->k == VKSTR || e->k == VKNUM || e->k == VKTRUE)
765 expr_toreg_nobranch(fs, e, NO_REG), pc = bcemit_jmp(fs);
766 else
767 pc = bcemit_branch(fs, e, 1);
768 jmp_append(fs, &e->t, pc);
769 jmp_tohere(fs, e->f);
770 e->f = NO_JMP;
773 /* -- Bytecode emitter for operators -------------------------------------- */
775 /* Try constant-folding of arithmetic operators. */
776 static int foldarith(BinOpr opr, ExpDesc *e1, ExpDesc *e2)
778 TValue o;
779 lua_Number n;
780 if (!expr_isnumk_nojump(e1) || !expr_isnumk_nojump(e2)) return 0;
781 n = lj_vm_foldarith(expr_numberV(e1), expr_numberV(e2), (int)opr-OPR_ADD);
782 setnumV(&o, n);
783 if (tvisnan(&o) || tvismzero(&o)) return 0; /* Avoid NaN and -0 as consts. */
784 if (LJ_DUALNUM) {
785 int32_t k = lj_num2int(n);
786 if ((lua_Number)k == n) {
787 setintV(&e1->u.nval, k);
788 return 1;
791 setnumV(&e1->u.nval, n);
792 return 1;
795 /* Emit arithmetic operator. */
796 static void bcemit_arith(FuncState *fs, BinOpr opr, ExpDesc *e1, ExpDesc *e2)
798 BCReg rb, rc, t;
799 uint32_t op;
800 if (foldarith(opr, e1, e2))
801 return;
802 if (opr == OPR_POW) {
803 op = BC_POW;
804 rc = expr_toanyreg(fs, e2);
805 rb = expr_toanyreg(fs, e1);
806 } else {
807 op = opr-OPR_ADD+BC_ADDVV;
808 /* Must discharge 2nd operand first since VINDEXED might free regs. */
809 expr_toval(fs, e2);
810 if (expr_isnumk(e2) && (rc = const_num(fs, e2)) <= BCMAX_C)
811 op -= BC_ADDVV-BC_ADDVN;
812 else
813 rc = expr_toanyreg(fs, e2);
814 /* 1st operand discharged by bcemit_binop_left, but need KNUM/KSHORT. */
815 lj_assertFS(expr_isnumk(e1) || e1->k == VNONRELOC,
816 "bad expr type %d", e1->k);
817 expr_toval(fs, e1);
818 /* Avoid two consts to satisfy bytecode constraints. */
819 if (expr_isnumk(e1) && !expr_isnumk(e2) &&
820 (t = const_num(fs, e1)) <= BCMAX_B) {
821 rb = rc; rc = t; op -= BC_ADDVV-BC_ADDNV;
822 } else {
823 rb = expr_toanyreg(fs, e1);
826 /* Using expr_free might cause asserts if the order is wrong. */
827 if (e1->k == VNONRELOC && e1->u.s.info >= fs->nactvar) fs->freereg--;
828 if (e2->k == VNONRELOC && e2->u.s.info >= fs->nactvar) fs->freereg--;
829 e1->u.s.info = bcemit_ABC(fs, op, 0, rb, rc);
830 e1->k = VRELOCABLE;
833 /* Emit comparison operator. */
834 static void bcemit_comp(FuncState *fs, BinOpr opr, ExpDesc *e1, ExpDesc *e2)
836 ExpDesc *eret = e1;
837 BCIns ins;
838 expr_toval(fs, e1);
839 if (opr == OPR_EQ || opr == OPR_NE) {
840 BCOp op = opr == OPR_EQ ? BC_ISEQV : BC_ISNEV;
841 BCReg ra;
842 if (expr_isk(e1)) { e1 = e2; e2 = eret; } /* Need constant in 2nd arg. */
843 ra = expr_toanyreg(fs, e1); /* First arg must be in a reg. */
844 expr_toval(fs, e2);
845 switch (e2->k) {
846 case VKNIL: case VKFALSE: case VKTRUE:
847 ins = BCINS_AD(op+(BC_ISEQP-BC_ISEQV), ra, const_pri(e2));
848 break;
849 case VKSTR:
850 ins = BCINS_AD(op+(BC_ISEQS-BC_ISEQV), ra, const_str(fs, e2));
851 break;
852 case VKNUM:
853 ins = BCINS_AD(op+(BC_ISEQN-BC_ISEQV), ra, const_num(fs, e2));
854 break;
855 default:
856 ins = BCINS_AD(op, ra, expr_toanyreg(fs, e2));
857 break;
859 } else {
860 uint32_t op = opr-OPR_LT+BC_ISLT;
861 BCReg ra, rd;
862 if ((op-BC_ISLT) & 1) { /* GT -> LT, GE -> LE */
863 e1 = e2; e2 = eret; /* Swap operands. */
864 op = ((op-BC_ISLT)^3)+BC_ISLT;
865 expr_toval(fs, e1);
866 ra = expr_toanyreg(fs, e1);
867 rd = expr_toanyreg(fs, e2);
868 } else {
869 rd = expr_toanyreg(fs, e2);
870 ra = expr_toanyreg(fs, e1);
872 ins = BCINS_AD(op, ra, rd);
874 /* Using expr_free might cause asserts if the order is wrong. */
875 if (e1->k == VNONRELOC && e1->u.s.info >= fs->nactvar) fs->freereg--;
876 if (e2->k == VNONRELOC && e2->u.s.info >= fs->nactvar) fs->freereg--;
877 bcemit_INS(fs, ins);
878 eret->u.s.info = bcemit_jmp(fs);
879 eret->k = VJMP;
882 /* Fixup left side of binary operator. */
883 static void bcemit_binop_left(FuncState *fs, BinOpr op, ExpDesc *e)
885 if (op == OPR_AND) {
886 bcemit_branch_t(fs, e);
887 } else if (op == OPR_OR) {
888 bcemit_branch_f(fs, e);
889 } else if (op == OPR_CONCAT) {
890 expr_tonextreg(fs, e);
891 } else if (op == OPR_EQ || op == OPR_NE) {
892 if (!expr_isk_nojump(e)) expr_toanyreg(fs, e);
893 } else {
894 if (!expr_isnumk_nojump(e)) expr_toanyreg(fs, e);
898 /* Emit binary operator. */
899 static void bcemit_binop(FuncState *fs, BinOpr op, ExpDesc *e1, ExpDesc *e2)
901 if (op <= OPR_POW) {
902 bcemit_arith(fs, op, e1, e2);
903 } else if (op == OPR_AND) {
904 lj_assertFS(e1->t == NO_JMP, "jump list not closed");
905 expr_discharge(fs, e2);
906 jmp_append(fs, &e2->f, e1->f);
907 *e1 = *e2;
908 } else if (op == OPR_OR) {
909 lj_assertFS(e1->f == NO_JMP, "jump list not closed");
910 expr_discharge(fs, e2);
911 jmp_append(fs, &e2->t, e1->t);
912 *e1 = *e2;
913 } else if (op == OPR_CONCAT) {
914 expr_toval(fs, e2);
915 if (e2->k == VRELOCABLE && bc_op(*bcptr(fs, e2)) == BC_CAT) {
916 lj_assertFS(e1->u.s.info == bc_b(*bcptr(fs, e2))-1,
917 "bad CAT stack layout");
918 expr_free(fs, e1);
919 setbc_b(bcptr(fs, e2), e1->u.s.info);
920 e1->u.s.info = e2->u.s.info;
921 } else {
922 expr_tonextreg(fs, e2);
923 expr_free(fs, e2);
924 expr_free(fs, e1);
925 e1->u.s.info = bcemit_ABC(fs, BC_CAT, 0, e1->u.s.info, e2->u.s.info);
927 e1->k = VRELOCABLE;
928 } else {
929 lj_assertFS(op == OPR_NE || op == OPR_EQ ||
930 op == OPR_LT || op == OPR_GE || op == OPR_LE || op == OPR_GT,
931 "bad binop %d", op);
932 bcemit_comp(fs, op, e1, e2);
936 /* Emit unary operator. */
937 static void bcemit_unop(FuncState *fs, BCOp op, ExpDesc *e)
939 if (op == BC_NOT) {
940 /* Swap true and false lists. */
941 { BCPos temp = e->f; e->f = e->t; e->t = temp; }
942 jmp_dropval(fs, e->f);
943 jmp_dropval(fs, e->t);
944 expr_discharge(fs, e);
945 if (e->k == VKNIL || e->k == VKFALSE) {
946 e->k = VKTRUE;
947 return;
948 } else if (expr_isk(e) || (LJ_HASFFI && e->k == VKCDATA)) {
949 e->k = VKFALSE;
950 return;
951 } else if (e->k == VJMP) {
952 invertcond(fs, e);
953 return;
954 } else if (e->k == VRELOCABLE) {
955 bcreg_reserve(fs, 1);
956 setbc_a(bcptr(fs, e), fs->freereg-1);
957 e->u.s.info = fs->freereg-1;
958 e->k = VNONRELOC;
959 } else {
960 lj_assertFS(e->k == VNONRELOC, "bad expr type %d", e->k);
962 } else {
963 lj_assertFS(op == BC_UNM || op == BC_LEN, "bad unop %d", op);
964 if (op == BC_UNM && !expr_hasjump(e)) { /* Constant-fold negations. */
965 #if LJ_HASFFI
966 if (e->k == VKCDATA) { /* Fold in-place since cdata is not interned. */
967 GCcdata *cd = cdataV(&e->u.nval);
968 uint64_t *p = (uint64_t *)cdataptr(cd);
969 if (cd->ctypeid == CTID_COMPLEX_DOUBLE)
970 p[1] ^= U64x(80000000,00000000);
971 else
972 *p = ~*p+1u;
973 return;
974 } else
975 #endif
976 if (expr_isnumk(e) && !expr_numiszero(e)) { /* Avoid folding to -0. */
977 TValue *o = expr_numtv(e);
978 if (tvisint(o)) {
979 int32_t k = intV(o), negk = (int32_t)(~(uint32_t)k+1u);
980 if (k == negk)
981 setnumV(o, -(lua_Number)k);
982 else
983 setintV(o, negk);
984 return;
985 } else {
986 o->u64 ^= U64x(80000000,00000000);
987 return;
991 expr_toanyreg(fs, e);
993 expr_free(fs, e);
994 e->u.s.info = bcemit_AD(fs, op, 0, e->u.s.info);
995 e->k = VRELOCABLE;
998 /* -- Lexer support ------------------------------------------------------- */
1000 /* Check and consume optional token. */
1001 static int lex_opt(LexState *ls, LexToken tok)
1003 if (ls->tok == tok) {
1004 lj_lex_next(ls);
1005 return 1;
1007 return 0;
1010 /* Check and consume token. */
1011 static void lex_check(LexState *ls, LexToken tok)
1013 if (ls->tok != tok)
1014 err_token(ls, tok);
1015 lj_lex_next(ls);
1018 /* Check for matching token. */
1019 static void lex_match(LexState *ls, LexToken what, LexToken who, BCLine line)
1021 if (!lex_opt(ls, what)) {
1022 if (line == ls->linenumber) {
1023 err_token(ls, what);
1024 } else {
1025 const char *swhat = lj_lex_token2str(ls, what);
1026 const char *swho = lj_lex_token2str(ls, who);
1027 lj_lex_error(ls, ls->tok, LJ_ERR_XMATCH, swhat, swho, line);
1032 /* Check for string token. */
1033 static GCstr *lex_str(LexState *ls)
1035 GCstr *s;
1036 if (ls->tok != TK_name && (LJ_52 || ls->tok != TK_goto))
1037 err_token(ls, TK_name);
1038 s = strV(&ls->tokval);
1039 lj_lex_next(ls);
1040 return s;
1043 /* -- Variable handling --------------------------------------------------- */
1045 #define var_get(ls, fs, i) ((ls)->vstack[(fs)->varmap[(i)]])
1047 /* Define a new local variable. */
1048 static void var_new(LexState *ls, BCReg n, GCstr *name)
1050 FuncState *fs = ls->fs;
1051 MSize vtop = ls->vtop;
1052 checklimit(fs, fs->nactvar+n, LJ_MAX_LOCVAR, "local variables");
1053 if (LJ_UNLIKELY(vtop >= ls->sizevstack)) {
1054 if (ls->sizevstack >= LJ_MAX_VSTACK)
1055 lj_lex_error(ls, 0, LJ_ERR_XLIMC, LJ_MAX_VSTACK);
1056 lj_mem_growvec(ls->L, ls->vstack, ls->sizevstack, LJ_MAX_VSTACK, VarInfo);
1058 lj_assertFS((uintptr_t)name < VARNAME__MAX ||
1059 lj_tab_getstr(fs->kt, name) != NULL,
1060 "unanchored variable name");
1061 /* NOBARRIER: name is anchored in fs->kt and ls->vstack is not a GCobj. */
1062 setgcref(ls->vstack[vtop].name, obj2gco(name));
1063 fs->varmap[fs->nactvar+n] = (uint16_t)vtop;
1064 ls->vtop = vtop+1;
1067 #define var_new_lit(ls, n, v) \
1068 var_new(ls, (n), lj_parse_keepstr(ls, "" v, sizeof(v)-1))
1070 #define var_new_fixed(ls, n, vn) \
1071 var_new(ls, (n), (GCstr *)(uintptr_t)(vn))
1073 /* Add local variables. */
1074 static void var_add(LexState *ls, BCReg nvars)
1076 FuncState *fs = ls->fs;
1077 BCReg nactvar = fs->nactvar;
1078 while (nvars--) {
1079 VarInfo *v = &var_get(ls, fs, nactvar);
1080 v->startpc = fs->pc;
1081 v->slot = nactvar++;
1082 v->info = 0;
1084 fs->nactvar = nactvar;
1087 /* Remove local variables. */
1088 static void var_remove(LexState *ls, BCReg tolevel)
1090 FuncState *fs = ls->fs;
1091 while (fs->nactvar > tolevel)
1092 var_get(ls, fs, --fs->nactvar).endpc = fs->pc;
1095 /* Lookup local variable name. */
1096 static BCReg var_lookup_local(FuncState *fs, GCstr *n)
1098 int i;
1099 for (i = fs->nactvar-1; i >= 0; i--) {
1100 if (n == strref(var_get(fs->ls, fs, i).name))
1101 return (BCReg)i;
1103 return (BCReg)-1; /* Not found. */
1106 /* Lookup or add upvalue index. */
1107 static MSize var_lookup_uv(FuncState *fs, MSize vidx, ExpDesc *e)
1109 MSize i, n = fs->nuv;
1110 for (i = 0; i < n; i++)
1111 if (fs->uvmap[i] == vidx)
1112 return i; /* Already exists. */
1113 /* Otherwise create a new one. */
1114 checklimit(fs, fs->nuv, LJ_MAX_UPVAL, "upvalues");
1115 lj_assertFS(e->k == VLOCAL || e->k == VUPVAL, "bad expr type %d", e->k);
1116 fs->uvmap[n] = (uint16_t)vidx;
1117 fs->uvtmp[n] = (uint16_t)(e->k == VLOCAL ? vidx : LJ_MAX_VSTACK+e->u.s.info);
1118 fs->nuv = n+1;
1119 return n;
1122 /* Forward declaration. */
1123 static void fscope_uvmark(FuncState *fs, BCReg level);
1125 /* Recursively lookup variables in enclosing functions. */
1126 static MSize var_lookup_(FuncState *fs, GCstr *name, ExpDesc *e, int first)
1128 if (fs) {
1129 BCReg reg = var_lookup_local(fs, name);
1130 if ((int32_t)reg >= 0) { /* Local in this function? */
1131 expr_init(e, VLOCAL, reg);
1132 if (!first)
1133 fscope_uvmark(fs, reg); /* Scope now has an upvalue. */
1134 return (MSize)(e->u.s.aux = (uint32_t)fs->varmap[reg]);
1135 } else {
1136 MSize vidx = var_lookup_(fs->prev, name, e, 0); /* Var in outer func? */
1137 if ((int32_t)vidx >= 0) { /* Yes, make it an upvalue here. */
1138 e->u.s.info = (uint8_t)var_lookup_uv(fs, vidx, e);
1139 e->k = VUPVAL;
1140 return vidx;
1143 } else { /* Not found in any function, must be a global. */
1144 expr_init(e, VGLOBAL, 0);
1145 e->u.sval = name;
1147 return (MSize)-1; /* Global. */
1150 /* Lookup variable name. */
1151 #define var_lookup(ls, e) \
1152 var_lookup_((ls)->fs, lex_str(ls), (e), 1)
1154 /* -- Goto an label handling ---------------------------------------------- */
1156 /* Add a new goto or label. */
1157 static MSize gola_new(LexState *ls, GCstr *name, uint8_t info, BCPos pc)
1159 FuncState *fs = ls->fs;
1160 MSize vtop = ls->vtop;
1161 if (LJ_UNLIKELY(vtop >= ls->sizevstack)) {
1162 if (ls->sizevstack >= LJ_MAX_VSTACK)
1163 lj_lex_error(ls, 0, LJ_ERR_XLIMC, LJ_MAX_VSTACK);
1164 lj_mem_growvec(ls->L, ls->vstack, ls->sizevstack, LJ_MAX_VSTACK, VarInfo);
1166 lj_assertFS(name == NAME_BREAK || lj_tab_getstr(fs->kt, name) != NULL,
1167 "unanchored label name");
1168 /* NOBARRIER: name is anchored in fs->kt and ls->vstack is not a GCobj. */
1169 setgcref(ls->vstack[vtop].name, obj2gco(name));
1170 ls->vstack[vtop].startpc = pc;
1171 ls->vstack[vtop].slot = (uint8_t)fs->nactvar;
1172 ls->vstack[vtop].info = info;
1173 ls->vtop = vtop+1;
1174 return vtop;
1177 #define gola_isgoto(v) ((v)->info & VSTACK_GOTO)
1178 #define gola_islabel(v) ((v)->info & VSTACK_LABEL)
1179 #define gola_isgotolabel(v) ((v)->info & (VSTACK_GOTO|VSTACK_LABEL))
1181 /* Patch goto to jump to label. */
1182 static void gola_patch(LexState *ls, VarInfo *vg, VarInfo *vl)
1184 FuncState *fs = ls->fs;
1185 BCPos pc = vg->startpc;
1186 setgcrefnull(vg->name); /* Invalidate pending goto. */
1187 setbc_a(&fs->bcbase[pc].ins, vl->slot);
1188 jmp_patch(fs, pc, vl->startpc);
1191 /* Patch goto to close upvalues. */
1192 static void gola_close(LexState *ls, VarInfo *vg)
1194 FuncState *fs = ls->fs;
1195 BCPos pc = vg->startpc;
1196 BCIns *ip = &fs->bcbase[pc].ins;
1197 lj_assertFS(gola_isgoto(vg), "expected goto");
1198 lj_assertFS(bc_op(*ip) == BC_JMP || bc_op(*ip) == BC_UCLO,
1199 "bad bytecode op %d", bc_op(*ip));
1200 setbc_a(ip, vg->slot);
1201 if (bc_op(*ip) == BC_JMP) {
1202 BCPos next = jmp_next(fs, pc);
1203 if (next != NO_JMP) jmp_patch(fs, next, pc); /* Jump to UCLO. */
1204 setbc_op(ip, BC_UCLO); /* Turn into UCLO. */
1205 setbc_j(ip, NO_JMP);
1209 /* Resolve pending forward gotos for label. */
1210 static void gola_resolve(LexState *ls, FuncScope *bl, MSize idx)
1212 VarInfo *vg = ls->vstack + bl->vstart;
1213 VarInfo *vl = ls->vstack + idx;
1214 for (; vg < vl; vg++)
1215 if (gcrefeq(vg->name, vl->name) && gola_isgoto(vg)) {
1216 if (vg->slot < vl->slot) {
1217 GCstr *name = strref(var_get(ls, ls->fs, vg->slot).name);
1218 lj_assertLS((uintptr_t)name >= VARNAME__MAX, "expected goto name");
1219 ls->linenumber = ls->fs->bcbase[vg->startpc].line;
1220 lj_assertLS(strref(vg->name) != NAME_BREAK, "unexpected break");
1221 lj_lex_error(ls, 0, LJ_ERR_XGSCOPE,
1222 strdata(strref(vg->name)), strdata(name));
1224 gola_patch(ls, vg, vl);
1228 /* Fixup remaining gotos and labels for scope. */
1229 static void gola_fixup(LexState *ls, FuncScope *bl)
1231 VarInfo *v = ls->vstack + bl->vstart;
1232 VarInfo *ve = ls->vstack + ls->vtop;
1233 for (; v < ve; v++) {
1234 GCstr *name = strref(v->name);
1235 if (name != NULL) { /* Only consider remaining valid gotos/labels. */
1236 if (gola_islabel(v)) {
1237 VarInfo *vg;
1238 setgcrefnull(v->name); /* Invalidate label that goes out of scope. */
1239 for (vg = v+1; vg < ve; vg++) /* Resolve pending backward gotos. */
1240 if (strref(vg->name) == name && gola_isgoto(vg)) {
1241 if ((bl->flags&FSCOPE_UPVAL) && vg->slot > v->slot)
1242 gola_close(ls, vg);
1243 gola_patch(ls, vg, v);
1245 } else if (gola_isgoto(v)) {
1246 if (bl->prev) { /* Propagate goto or break to outer scope. */
1247 bl->prev->flags |= name == NAME_BREAK ? FSCOPE_BREAK : FSCOPE_GOLA;
1248 v->slot = bl->nactvar;
1249 if ((bl->flags & FSCOPE_UPVAL))
1250 gola_close(ls, v);
1251 } else { /* No outer scope: undefined goto label or no loop. */
1252 ls->linenumber = ls->fs->bcbase[v->startpc].line;
1253 if (name == NAME_BREAK)
1254 lj_lex_error(ls, 0, LJ_ERR_XBREAK);
1255 else
1256 lj_lex_error(ls, 0, LJ_ERR_XLUNDEF, strdata(name));
1263 /* Find existing label. */
1264 static VarInfo *gola_findlabel(LexState *ls, GCstr *name)
1266 VarInfo *v = ls->vstack + ls->fs->bl->vstart;
1267 VarInfo *ve = ls->vstack + ls->vtop;
1268 for (; v < ve; v++)
1269 if (strref(v->name) == name && gola_islabel(v))
1270 return v;
1271 return NULL;
1274 /* -- Scope handling ------------------------------------------------------ */
1276 /* Begin a scope. */
1277 static void fscope_begin(FuncState *fs, FuncScope *bl, int flags)
1279 bl->nactvar = (uint8_t)fs->nactvar;
1280 bl->flags = flags;
1281 bl->vstart = fs->ls->vtop;
1282 bl->prev = fs->bl;
1283 fs->bl = bl;
1284 lj_assertFS(fs->freereg == fs->nactvar, "bad regalloc");
1287 /* End a scope. */
1288 static void fscope_end(FuncState *fs)
1290 FuncScope *bl = fs->bl;
1291 LexState *ls = fs->ls;
1292 fs->bl = bl->prev;
1293 var_remove(ls, bl->nactvar);
1294 fs->freereg = fs->nactvar;
1295 lj_assertFS(bl->nactvar == fs->nactvar, "bad regalloc");
1296 if ((bl->flags & (FSCOPE_UPVAL|FSCOPE_NOCLOSE)) == FSCOPE_UPVAL)
1297 bcemit_AJ(fs, BC_UCLO, bl->nactvar, 0);
1298 if ((bl->flags & FSCOPE_BREAK)) {
1299 if ((bl->flags & FSCOPE_LOOP)) {
1300 MSize idx = gola_new(ls, NAME_BREAK, VSTACK_LABEL, fs->pc);
1301 ls->vtop = idx; /* Drop break label immediately. */
1302 gola_resolve(ls, bl, idx);
1303 } else { /* Need the fixup step to propagate the breaks. */
1304 gola_fixup(ls, bl);
1305 return;
1308 if ((bl->flags & FSCOPE_GOLA)) {
1309 gola_fixup(ls, bl);
1313 /* Mark scope as having an upvalue. */
1314 static void fscope_uvmark(FuncState *fs, BCReg level)
1316 FuncScope *bl;
1317 for (bl = fs->bl; bl && bl->nactvar > level; bl = bl->prev)
1319 if (bl)
1320 bl->flags |= FSCOPE_UPVAL;
1323 /* -- Function state management ------------------------------------------- */
1325 /* Fixup bytecode for prototype. */
1326 static void fs_fixup_bc(FuncState *fs, GCproto *pt, BCIns *bc, MSize n)
1328 BCInsLine *base = fs->bcbase;
1329 MSize i;
1330 BCIns op;
1331 pt->sizebc = n;
1332 if (fs->ls->fr2 != LJ_FR2) op = BC_NOT; /* Mark non-native prototype. */
1333 else if ((fs->flags & PROTO_VARARG)) op = BC_FUNCV;
1334 else op = BC_FUNCF;
1335 bc[0] = BCINS_AD(op, fs->framesize, 0);
1336 for (i = 1; i < n; i++)
1337 bc[i] = base[i].ins;
1340 /* Fixup upvalues for child prototype, step #2. */
1341 static void fs_fixup_uv2(FuncState *fs, GCproto *pt)
1343 VarInfo *vstack = fs->ls->vstack;
1344 uint16_t *uv = proto_uv(pt);
1345 MSize i, n = pt->sizeuv;
1346 for (i = 0; i < n; i++) {
1347 VarIndex vidx = uv[i];
1348 if (vidx >= LJ_MAX_VSTACK)
1349 uv[i] = vidx - LJ_MAX_VSTACK;
1350 else if ((vstack[vidx].info & VSTACK_VAR_RW))
1351 uv[i] = vstack[vidx].slot | PROTO_UV_LOCAL;
1352 else
1353 uv[i] = vstack[vidx].slot | PROTO_UV_LOCAL | PROTO_UV_IMMUTABLE;
1357 /* Fixup constants for prototype. */
1358 static void fs_fixup_k(FuncState *fs, GCproto *pt, void *kptr)
1360 GCtab *kt;
1361 TValue *array;
1362 Node *node;
1363 MSize i, hmask;
1364 checklimitgt(fs, fs->nkn, BCMAX_D+1, "constants");
1365 checklimitgt(fs, fs->nkgc, BCMAX_D+1, "constants");
1366 setmref(pt->k, kptr);
1367 pt->sizekn = fs->nkn;
1368 pt->sizekgc = fs->nkgc;
1369 kt = fs->kt;
1370 array = tvref(kt->array);
1371 for (i = 0; i < kt->asize; i++)
1372 if (tvhaskslot(&array[i])) {
1373 TValue *tv = &((TValue *)kptr)[tvkslot(&array[i])];
1374 if (LJ_DUALNUM)
1375 setintV(tv, (int32_t)i);
1376 else
1377 setnumV(tv, (lua_Number)i);
1379 node = noderef(kt->node);
1380 hmask = kt->hmask;
1381 for (i = 0; i <= hmask; i++) {
1382 Node *n = &node[i];
1383 if (tvhaskslot(&n->val)) {
1384 ptrdiff_t kidx = (ptrdiff_t)tvkslot(&n->val);
1385 lj_assertFS(!tvisint(&n->key), "unexpected integer key");
1386 if (tvisnum(&n->key)) {
1387 TValue *tv = &((TValue *)kptr)[kidx];
1388 if (LJ_DUALNUM) {
1389 lua_Number nn = numV(&n->key);
1390 int32_t k = lj_num2int(nn);
1391 lj_assertFS(!tvismzero(&n->key), "unexpected -0 key");
1392 if ((lua_Number)k == nn)
1393 setintV(tv, k);
1394 else
1395 *tv = n->key;
1396 } else {
1397 *tv = n->key;
1399 } else {
1400 GCobj *o = gcV(&n->key);
1401 setgcref(((GCRef *)kptr)[~kidx], o);
1402 lj_gc_objbarrier(fs->L, pt, o);
1403 if (tvisproto(&n->key))
1404 fs_fixup_uv2(fs, gco2pt(o));
1410 /* Fixup upvalues for prototype, step #1. */
1411 static void fs_fixup_uv1(FuncState *fs, GCproto *pt, uint16_t *uv)
1413 setmref(pt->uv, uv);
1414 pt->sizeuv = fs->nuv;
1415 memcpy(uv, fs->uvtmp, fs->nuv*sizeof(VarIndex));
1418 #ifndef LUAJIT_DISABLE_DEBUGINFO
1419 /* Prepare lineinfo for prototype. */
1420 static size_t fs_prep_line(FuncState *fs, BCLine numline)
1422 return (fs->pc-1) << (numline < 256 ? 0 : numline < 65536 ? 1 : 2);
1425 /* Fixup lineinfo for prototype. */
1426 static void fs_fixup_line(FuncState *fs, GCproto *pt,
1427 void *lineinfo, BCLine numline)
1429 BCInsLine *base = fs->bcbase + 1;
1430 BCLine first = fs->linedefined;
1431 MSize i = 0, n = fs->pc-1;
1432 pt->firstline = fs->linedefined;
1433 pt->numline = numline;
1434 setmref(pt->lineinfo, lineinfo);
1435 if (LJ_LIKELY(numline < 256)) {
1436 uint8_t *li = (uint8_t *)lineinfo;
1437 do {
1438 BCLine delta = base[i].line - first;
1439 lj_assertFS(delta >= 0 && delta < 256, "bad line delta");
1440 li[i] = (uint8_t)delta;
1441 } while (++i < n);
1442 } else if (LJ_LIKELY(numline < 65536)) {
1443 uint16_t *li = (uint16_t *)lineinfo;
1444 do {
1445 BCLine delta = base[i].line - first;
1446 lj_assertFS(delta >= 0 && delta < 65536, "bad line delta");
1447 li[i] = (uint16_t)delta;
1448 } while (++i < n);
1449 } else {
1450 uint32_t *li = (uint32_t *)lineinfo;
1451 do {
1452 BCLine delta = base[i].line - first;
1453 lj_assertFS(delta >= 0, "bad line delta");
1454 li[i] = (uint32_t)delta;
1455 } while (++i < n);
1459 /* Prepare variable info for prototype. */
1460 static size_t fs_prep_var(LexState *ls, FuncState *fs, size_t *ofsvar)
1462 VarInfo *vs =ls->vstack, *ve;
1463 MSize i, n;
1464 BCPos lastpc;
1465 lj_buf_reset(&ls->sb); /* Copy to temp. string buffer. */
1466 /* Store upvalue names. */
1467 for (i = 0, n = fs->nuv; i < n; i++) {
1468 GCstr *s = strref(vs[fs->uvmap[i]].name);
1469 MSize len = s->len+1;
1470 char *p = lj_buf_more(&ls->sb, len);
1471 p = lj_buf_wmem(p, strdata(s), len);
1472 ls->sb.w = p;
1474 *ofsvar = sbuflen(&ls->sb);
1475 lastpc = 0;
1476 /* Store local variable names and compressed ranges. */
1477 for (ve = vs + ls->vtop, vs += fs->vbase; vs < ve; vs++) {
1478 if (!gola_isgotolabel(vs)) {
1479 GCstr *s = strref(vs->name);
1480 BCPos startpc;
1481 char *p;
1482 if ((uintptr_t)s < VARNAME__MAX) {
1483 p = lj_buf_more(&ls->sb, 1 + 2*5);
1484 *p++ = (char)(uintptr_t)s;
1485 } else {
1486 MSize len = s->len+1;
1487 p = lj_buf_more(&ls->sb, len + 2*5);
1488 p = lj_buf_wmem(p, strdata(s), len);
1490 startpc = vs->startpc;
1491 p = lj_strfmt_wuleb128(p, startpc-lastpc);
1492 p = lj_strfmt_wuleb128(p, vs->endpc-startpc);
1493 ls->sb.w = p;
1494 lastpc = startpc;
1497 lj_buf_putb(&ls->sb, '\0'); /* Terminator for varinfo. */
1498 return sbuflen(&ls->sb);
1501 /* Fixup variable info for prototype. */
1502 static void fs_fixup_var(LexState *ls, GCproto *pt, uint8_t *p, size_t ofsvar)
1504 setmref(pt->uvinfo, p);
1505 setmref(pt->varinfo, (char *)p + ofsvar);
1506 memcpy(p, ls->sb.b, sbuflen(&ls->sb)); /* Copy from temp. buffer. */
1508 #else
1510 /* Initialize with empty debug info, if disabled. */
1511 #define fs_prep_line(fs, numline) (UNUSED(numline), 0)
1512 #define fs_fixup_line(fs, pt, li, numline) \
1513 pt->firstline = pt->numline = 0, setmref((pt)->lineinfo, NULL)
1514 #define fs_prep_var(ls, fs, ofsvar) (UNUSED(ofsvar), 0)
1515 #define fs_fixup_var(ls, pt, p, ofsvar) \
1516 setmref((pt)->uvinfo, NULL), setmref((pt)->varinfo, NULL)
1518 #endif
1520 /* Check if bytecode op returns. */
1521 static int bcopisret(BCOp op)
1523 switch (op) {
1524 case BC_CALLMT: case BC_CALLT:
1525 case BC_RETM: case BC_RET: case BC_RET0: case BC_RET1:
1526 return 1;
1527 default:
1528 return 0;
1532 /* Fixup return instruction for prototype. */
1533 static void fs_fixup_ret(FuncState *fs)
1535 BCPos lastpc = fs->pc;
1536 if (lastpc <= fs->lasttarget || !bcopisret(bc_op(fs->bcbase[lastpc-1].ins))) {
1537 if ((fs->bl->flags & FSCOPE_UPVAL))
1538 bcemit_AJ(fs, BC_UCLO, 0, 0);
1539 bcemit_AD(fs, BC_RET0, 0, 1); /* Need final return. */
1541 fs->bl->flags |= FSCOPE_NOCLOSE; /* Handled above. */
1542 fscope_end(fs);
1543 lj_assertFS(fs->bl == NULL, "bad scope nesting");
1544 /* May need to fixup returns encoded before first function was created. */
1545 if (fs->flags & PROTO_FIXUP_RETURN) {
1546 BCPos pc;
1547 for (pc = 1; pc < lastpc; pc++) {
1548 BCIns ins = fs->bcbase[pc].ins;
1549 BCPos offset;
1550 switch (bc_op(ins)) {
1551 case BC_CALLMT: case BC_CALLT:
1552 case BC_RETM: case BC_RET: case BC_RET0: case BC_RET1:
1553 offset = bcemit_INS(fs, ins); /* Copy original instruction. */
1554 fs->bcbase[offset].line = fs->bcbase[pc].line;
1555 offset = offset-(pc+1)+BCBIAS_J;
1556 if (offset > BCMAX_D)
1557 err_syntax(fs->ls, LJ_ERR_XFIXUP);
1558 /* Replace with UCLO plus branch. */
1559 fs->bcbase[pc].ins = BCINS_AD(BC_UCLO, 0, offset);
1560 break;
1561 case BC_FNEW:
1562 return; /* We're done. */
1563 default:
1564 break;
1570 /* Finish a FuncState and return the new prototype. */
1571 static GCproto *fs_finish(LexState *ls, BCLine line)
1573 lua_State *L = ls->L;
1574 FuncState *fs = ls->fs;
1575 BCLine numline = line - fs->linedefined;
1576 size_t sizept, ofsk, ofsuv, ofsli, ofsdbg, ofsvar;
1577 GCproto *pt;
1579 /* Apply final fixups. */
1580 fs_fixup_ret(fs);
1582 /* Calculate total size of prototype including all colocated arrays. */
1583 sizept = sizeof(GCproto) + fs->pc*sizeof(BCIns) + fs->nkgc*sizeof(GCRef);
1584 sizept = (sizept + sizeof(TValue)-1) & ~(sizeof(TValue)-1);
1585 ofsk = sizept; sizept += fs->nkn*sizeof(TValue);
1586 ofsuv = sizept; sizept += ((fs->nuv+1)&~1)*2;
1587 ofsli = sizept; sizept += fs_prep_line(fs, numline);
1588 ofsdbg = sizept; sizept += fs_prep_var(ls, fs, &ofsvar);
1590 /* Allocate prototype and initialize its fields. */
1591 pt = (GCproto *)lj_mem_newgco(L, (MSize)sizept);
1592 pt->gct = ~LJ_TPROTO;
1593 pt->sizept = (MSize)sizept;
1594 pt->trace = 0;
1595 pt->flags = (uint8_t)(fs->flags & ~(PROTO_HAS_RETURN|PROTO_FIXUP_RETURN));
1596 pt->numparams = fs->numparams;
1597 pt->framesize = fs->framesize;
1598 setgcref(pt->chunkname, obj2gco(ls->chunkname));
1600 /* Close potentially uninitialized gap between bc and kgc. */
1601 *(uint32_t *)((char *)pt + ofsk - sizeof(GCRef)*(fs->nkgc+1)) = 0;
1602 fs_fixup_bc(fs, pt, (BCIns *)((char *)pt + sizeof(GCproto)), fs->pc);
1603 fs_fixup_k(fs, pt, (void *)((char *)pt + ofsk));
1604 fs_fixup_uv1(fs, pt, (uint16_t *)((char *)pt + ofsuv));
1605 fs_fixup_line(fs, pt, (void *)((char *)pt + ofsli), numline);
1606 fs_fixup_var(ls, pt, (uint8_t *)((char *)pt + ofsdbg), ofsvar);
1608 lj_vmevent_send(L, BC,
1609 setprotoV(L, L->top++, pt);
1612 L->top--; /* Pop table of constants. */
1613 ls->vtop = fs->vbase; /* Reset variable stack. */
1614 ls->fs = fs->prev;
1615 lj_assertL(ls->fs != NULL || ls->tok == TK_eof, "bad parser state");
1616 return pt;
1619 /* Initialize a new FuncState. */
1620 static void fs_init(LexState *ls, FuncState *fs)
1622 lua_State *L = ls->L;
1623 fs->prev = ls->fs; ls->fs = fs; /* Append to list. */
1624 fs->ls = ls;
1625 fs->vbase = ls->vtop;
1626 fs->L = L;
1627 fs->pc = 0;
1628 fs->lasttarget = 0;
1629 fs->jpc = NO_JMP;
1630 fs->freereg = 0;
1631 fs->nkgc = 0;
1632 fs->nkn = 0;
1633 fs->nactvar = 0;
1634 fs->nuv = 0;
1635 fs->bl = NULL;
1636 fs->flags = 0;
1637 fs->framesize = 1; /* Minimum frame size. */
1638 fs->kt = lj_tab_new(L, 0, 0);
1639 /* Anchor table of constants in stack to avoid being collected. */
1640 settabV(L, L->top, fs->kt);
1641 incr_top(L);
1644 /* -- Expressions --------------------------------------------------------- */
1646 /* Forward declaration. */
1647 static void expr(LexState *ls, ExpDesc *v);
1649 /* Return string expression. */
1650 static void expr_str(LexState *ls, ExpDesc *e)
1652 expr_init(e, VKSTR, 0);
1653 e->u.sval = lex_str(ls);
1656 /* Return index expression. */
1657 static void expr_index(FuncState *fs, ExpDesc *t, ExpDesc *e)
1659 /* Already called: expr_toval(fs, e). */
1660 t->k = VINDEXED;
1661 if (expr_isnumk(e)) {
1662 #if LJ_DUALNUM
1663 if (tvisint(expr_numtv(e))) {
1664 int32_t k = intV(expr_numtv(e));
1665 if (checku8(k)) {
1666 t->u.s.aux = BCMAX_C+1+(uint32_t)k; /* 256..511: const byte key */
1667 return;
1670 #else
1671 lua_Number n = expr_numberV(e);
1672 int32_t k = lj_num2int(n);
1673 if (checku8(k) && n == (lua_Number)k) {
1674 t->u.s.aux = BCMAX_C+1+(uint32_t)k; /* 256..511: const byte key */
1675 return;
1677 #endif
1678 } else if (expr_isstrk(e)) {
1679 BCReg idx = const_str(fs, e);
1680 if (idx <= BCMAX_C) {
1681 t->u.s.aux = ~idx; /* -256..-1: const string key */
1682 return;
1685 t->u.s.aux = expr_toanyreg(fs, e); /* 0..255: register */
1688 /* Parse index expression with named field. */
1689 static void expr_field(LexState *ls, ExpDesc *v)
1691 FuncState *fs = ls->fs;
1692 ExpDesc key;
1693 expr_toanyreg(fs, v);
1694 lj_lex_next(ls); /* Skip dot or colon. */
1695 expr_str(ls, &key);
1696 expr_index(fs, v, &key);
1699 /* Parse index expression with brackets. */
1700 static void expr_bracket(LexState *ls, ExpDesc *v)
1702 lj_lex_next(ls); /* Skip '['. */
1703 expr(ls, v);
1704 expr_toval(ls->fs, v);
1705 lex_check(ls, ']');
1708 /* Get value of constant expression. */
1709 static void expr_kvalue(FuncState *fs, TValue *v, ExpDesc *e)
1711 UNUSED(fs);
1712 if (e->k <= VKTRUE) {
1713 setpriV(v, ~(uint32_t)e->k);
1714 } else if (e->k == VKSTR) {
1715 setgcVraw(v, obj2gco(e->u.sval), LJ_TSTR);
1716 } else {
1717 lj_assertFS(tvisnumber(expr_numtv(e)), "bad number constant");
1718 *v = *expr_numtv(e);
1722 /* Parse table constructor expression. */
1723 static void expr_table(LexState *ls, ExpDesc *e)
1725 FuncState *fs = ls->fs;
1726 BCLine line = ls->linenumber;
1727 GCtab *t = NULL;
1728 int vcall = 0, needarr = 0, fixt = 0;
1729 uint32_t narr = 1; /* First array index. */
1730 uint32_t nhash = 0; /* Number of hash entries. */
1731 BCReg freg = fs->freereg;
1732 BCPos pc = bcemit_AD(fs, BC_TNEW, freg, 0);
1733 expr_init(e, VNONRELOC, freg);
1734 bcreg_reserve(fs, 1);
1735 freg++;
1736 lex_check(ls, '{');
1737 while (ls->tok != '}') {
1738 ExpDesc key, val;
1739 vcall = 0;
1740 if (ls->tok == '[') {
1741 expr_bracket(ls, &key); /* Already calls expr_toval. */
1742 if (!expr_isk(&key)) expr_index(fs, e, &key);
1743 if (expr_isnumk(&key) && expr_numiszero(&key)) needarr = 1; else nhash++;
1744 lex_check(ls, '=');
1745 } else if ((ls->tok == TK_name || (!LJ_52 && ls->tok == TK_goto)) &&
1746 lj_lex_lookahead(ls) == '=') {
1747 expr_str(ls, &key);
1748 lex_check(ls, '=');
1749 nhash++;
1750 } else {
1751 expr_init(&key, VKNUM, 0);
1752 setintV(&key.u.nval, (int)narr);
1753 narr++;
1754 needarr = vcall = 1;
1756 expr(ls, &val);
1757 if (expr_isk(&key) && key.k != VKNIL &&
1758 (key.k == VKSTR || expr_isk_nojump(&val))) {
1759 TValue k, *v;
1760 if (!t) { /* Create template table on demand. */
1761 BCReg kidx;
1762 t = lj_tab_new(fs->L, needarr ? narr : 0, hsize2hbits(nhash));
1763 kidx = const_gc(fs, obj2gco(t), LJ_TTAB);
1764 fs->bcbase[pc].ins = BCINS_AD(BC_TDUP, freg-1, kidx);
1766 vcall = 0;
1767 expr_kvalue(fs, &k, &key);
1768 v = lj_tab_set(fs->L, t, &k);
1769 lj_gc_anybarriert(fs->L, t);
1770 if (expr_isk_nojump(&val)) { /* Add const key/value to template table. */
1771 expr_kvalue(fs, v, &val);
1772 } else { /* Otherwise create dummy string key (avoids lj_tab_newkey). */
1773 settabV(fs->L, v, t); /* Preserve key with table itself as value. */
1774 fixt = 1; /* Fix this later, after all resizes. */
1775 goto nonconst;
1777 } else {
1778 nonconst:
1779 if (val.k != VCALL) { expr_toanyreg(fs, &val); vcall = 0; }
1780 if (expr_isk(&key)) expr_index(fs, e, &key);
1781 bcemit_store(fs, e, &val);
1783 fs->freereg = freg;
1784 if (!lex_opt(ls, ',') && !lex_opt(ls, ';')) break;
1786 lex_match(ls, '}', '{', line);
1787 if (vcall) {
1788 BCInsLine *ilp = &fs->bcbase[fs->pc-1];
1789 ExpDesc en;
1790 lj_assertFS(bc_a(ilp->ins) == freg &&
1791 bc_op(ilp->ins) == (narr > 256 ? BC_TSETV : BC_TSETB),
1792 "bad CALL code generation");
1793 expr_init(&en, VKNUM, 0);
1794 en.u.nval.u32.lo = narr-1;
1795 en.u.nval.u32.hi = 0x43300000; /* Biased integer to avoid denormals. */
1796 if (narr > 256) { fs->pc--; ilp--; }
1797 ilp->ins = BCINS_AD(BC_TSETM, freg, const_num(fs, &en));
1798 setbc_b(&ilp[-1].ins, 0);
1800 if (pc == fs->pc-1) { /* Make expr relocable if possible. */
1801 e->u.s.info = pc;
1802 fs->freereg--;
1803 e->k = VRELOCABLE;
1804 } else {
1805 e->k = VNONRELOC; /* May have been changed by expr_index. */
1807 if (!t) { /* Construct TNEW RD: hhhhhaaaaaaaaaaa. */
1808 BCIns *ip = &fs->bcbase[pc].ins;
1809 if (!needarr) narr = 0;
1810 else if (narr < 3) narr = 3;
1811 else if (narr > 0x7ff) narr = 0x7ff;
1812 setbc_d(ip, narr|(hsize2hbits(nhash)<<11));
1813 } else {
1814 if (needarr && t->asize < narr)
1815 lj_tab_reasize(fs->L, t, narr-1);
1816 if (fixt) { /* Fix value for dummy keys in template table. */
1817 Node *node = noderef(t->node);
1818 uint32_t i, hmask = t->hmask;
1819 for (i = 0; i <= hmask; i++) {
1820 Node *n = &node[i];
1821 if (tvistab(&n->val)) {
1822 lj_assertFS(tabV(&n->val) == t, "bad dummy key in template table");
1823 setnilV(&n->val); /* Turn value into nil. */
1827 lj_gc_check(fs->L);
1831 /* Parse function parameters. */
1832 static BCReg parse_params(LexState *ls, int needself)
1834 FuncState *fs = ls->fs;
1835 BCReg nparams = 0;
1836 lex_check(ls, '(');
1837 if (needself)
1838 var_new_lit(ls, nparams++, "self");
1839 if (ls->tok != ')') {
1840 do {
1841 if (ls->tok == TK_name || (!LJ_52 && ls->tok == TK_goto)) {
1842 var_new(ls, nparams++, lex_str(ls));
1843 } else if (ls->tok == TK_dots) {
1844 lj_lex_next(ls);
1845 fs->flags |= PROTO_VARARG;
1846 break;
1847 } else {
1848 err_syntax(ls, LJ_ERR_XPARAM);
1850 } while (lex_opt(ls, ','));
1852 var_add(ls, nparams);
1853 lj_assertFS(fs->nactvar == nparams, "bad regalloc");
1854 bcreg_reserve(fs, nparams);
1855 lex_check(ls, ')');
1856 return nparams;
1859 /* Forward declaration. */
1860 static void parse_chunk(LexState *ls);
1862 /* Parse body of a function. */
1863 static void parse_body(LexState *ls, ExpDesc *e, int needself, BCLine line)
1865 FuncState fs, *pfs = ls->fs;
1866 FuncScope bl;
1867 GCproto *pt;
1868 ptrdiff_t oldbase = pfs->bcbase - ls->bcstack;
1869 fs_init(ls, &fs);
1870 fscope_begin(&fs, &bl, 0);
1871 fs.linedefined = line;
1872 fs.numparams = (uint8_t)parse_params(ls, needself);
1873 fs.bcbase = pfs->bcbase + pfs->pc;
1874 fs.bclim = pfs->bclim - pfs->pc;
1875 bcemit_AD(&fs, BC_FUNCF, 0, 0); /* Placeholder. */
1876 parse_chunk(ls);
1877 if (ls->tok != TK_end) lex_match(ls, TK_end, TK_function, line);
1878 pt = fs_finish(ls, (ls->lastline = ls->linenumber));
1879 pfs->bcbase = ls->bcstack + oldbase; /* May have been reallocated. */
1880 pfs->bclim = (BCPos)(ls->sizebcstack - oldbase);
1881 /* Store new prototype in the constant array of the parent. */
1882 expr_init(e, VRELOCABLE,
1883 bcemit_AD(pfs, BC_FNEW, 0, const_gc(pfs, obj2gco(pt), LJ_TPROTO)));
1884 #if LJ_HASFFI
1885 pfs->flags |= (fs.flags & PROTO_FFI);
1886 #endif
1887 if (!(pfs->flags & PROTO_CHILD)) {
1888 if (pfs->flags & PROTO_HAS_RETURN)
1889 pfs->flags |= PROTO_FIXUP_RETURN;
1890 pfs->flags |= PROTO_CHILD;
1892 lj_lex_next(ls);
1895 /* Parse expression list. Last expression is left open. */
1896 static BCReg expr_list(LexState *ls, ExpDesc *v)
1898 BCReg n = 1;
1899 expr(ls, v);
1900 while (lex_opt(ls, ',')) {
1901 expr_tonextreg(ls->fs, v);
1902 expr(ls, v);
1903 n++;
1905 return n;
1908 /* Parse function argument list. */
1909 static void parse_args(LexState *ls, ExpDesc *e)
1911 FuncState *fs = ls->fs;
1912 ExpDesc args;
1913 BCIns ins;
1914 BCReg base;
1915 BCLine line = ls->linenumber;
1916 if (ls->tok == '(') {
1917 #if !LJ_52
1918 if (line != ls->lastline)
1919 err_syntax(ls, LJ_ERR_XAMBIG);
1920 #endif
1921 lj_lex_next(ls);
1922 if (ls->tok == ')') { /* f(). */
1923 args.k = VVOID;
1924 } else {
1925 expr_list(ls, &args);
1926 if (args.k == VCALL) /* f(a, b, g()) or f(a, b, ...). */
1927 setbc_b(bcptr(fs, &args), 0); /* Pass on multiple results. */
1929 lex_match(ls, ')', '(', line);
1930 } else if (ls->tok == '{') {
1931 expr_table(ls, &args);
1932 } else if (ls->tok == TK_string) {
1933 expr_init(&args, VKSTR, 0);
1934 args.u.sval = strV(&ls->tokval);
1935 lj_lex_next(ls);
1936 } else {
1937 err_syntax(ls, LJ_ERR_XFUNARG);
1938 return; /* Silence compiler. */
1940 lj_assertFS(e->k == VNONRELOC, "bad expr type %d", e->k);
1941 base = e->u.s.info; /* Base register for call. */
1942 if (args.k == VCALL) {
1943 ins = BCINS_ABC(BC_CALLM, base, 2, args.u.s.aux - base - 1 - ls->fr2);
1944 } else {
1945 if (args.k != VVOID)
1946 expr_tonextreg(fs, &args);
1947 ins = BCINS_ABC(BC_CALL, base, 2, fs->freereg - base - ls->fr2);
1949 expr_init(e, VCALL, bcemit_INS(fs, ins));
1950 e->u.s.aux = base;
1951 fs->bcbase[fs->pc - 1].line = line;
1952 fs->freereg = base+1; /* Leave one result by default. */
1955 /* Parse primary expression. */
1956 static void expr_primary(LexState *ls, ExpDesc *v)
1958 FuncState *fs = ls->fs;
1959 /* Parse prefix expression. */
1960 if (ls->tok == '(') {
1961 BCLine line = ls->linenumber;
1962 lj_lex_next(ls);
1963 expr(ls, v);
1964 lex_match(ls, ')', '(', line);
1965 expr_discharge(ls->fs, v);
1966 } else if (ls->tok == TK_name || (!LJ_52 && ls->tok == TK_goto)) {
1967 var_lookup(ls, v);
1968 } else {
1969 err_syntax(ls, LJ_ERR_XSYMBOL);
1971 for (;;) { /* Parse multiple expression suffixes. */
1972 if (ls->tok == '.') {
1973 expr_field(ls, v);
1974 } else if (ls->tok == '[') {
1975 ExpDesc key;
1976 expr_toanyreg(fs, v);
1977 expr_bracket(ls, &key);
1978 expr_index(fs, v, &key);
1979 } else if (ls->tok == ':') {
1980 ExpDesc key;
1981 lj_lex_next(ls);
1982 expr_str(ls, &key);
1983 bcemit_method(fs, v, &key);
1984 parse_args(ls, v);
1985 } else if (ls->tok == '(' || ls->tok == TK_string || ls->tok == '{') {
1986 expr_tonextreg(fs, v);
1987 if (ls->fr2) bcreg_reserve(fs, 1);
1988 parse_args(ls, v);
1989 } else {
1990 break;
1995 /* Parse simple expression. */
1996 static void expr_simple(LexState *ls, ExpDesc *v)
1998 switch (ls->tok) {
1999 case TK_number:
2000 expr_init(v, (LJ_HASFFI && tviscdata(&ls->tokval)) ? VKCDATA : VKNUM, 0);
2001 copyTV(ls->L, &v->u.nval, &ls->tokval);
2002 break;
2003 case TK_string:
2004 expr_init(v, VKSTR, 0);
2005 v->u.sval = strV(&ls->tokval);
2006 break;
2007 case TK_nil:
2008 expr_init(v, VKNIL, 0);
2009 break;
2010 case TK_true:
2011 expr_init(v, VKTRUE, 0);
2012 break;
2013 case TK_false:
2014 expr_init(v, VKFALSE, 0);
2015 break;
2016 case TK_dots: { /* Vararg. */
2017 FuncState *fs = ls->fs;
2018 BCReg base;
2019 checkcond(ls, fs->flags & PROTO_VARARG, LJ_ERR_XDOTS);
2020 bcreg_reserve(fs, 1);
2021 base = fs->freereg-1;
2022 expr_init(v, VCALL, bcemit_ABC(fs, BC_VARG, base, 2, fs->numparams));
2023 v->u.s.aux = base;
2024 break;
2026 case '{': /* Table constructor. */
2027 expr_table(ls, v);
2028 return;
2029 case TK_function:
2030 lj_lex_next(ls);
2031 parse_body(ls, v, 0, ls->linenumber);
2032 return;
2033 default:
2034 expr_primary(ls, v);
2035 return;
2037 lj_lex_next(ls);
2040 /* Manage syntactic levels to avoid blowing up the stack. */
2041 static void synlevel_begin(LexState *ls)
2043 if (++ls->level >= LJ_MAX_XLEVEL)
2044 lj_lex_error(ls, 0, LJ_ERR_XLEVELS);
2047 #define synlevel_end(ls) ((ls)->level--)
2049 /* Convert token to binary operator. */
2050 static BinOpr token2binop(LexToken tok)
2052 switch (tok) {
2053 case '+': return OPR_ADD;
2054 case '-': return OPR_SUB;
2055 case '*': return OPR_MUL;
2056 case '/': return OPR_DIV;
2057 case '%': return OPR_MOD;
2058 case '^': return OPR_POW;
2059 case TK_concat: return OPR_CONCAT;
2060 case TK_ne: return OPR_NE;
2061 case TK_eq: return OPR_EQ;
2062 case '<': return OPR_LT;
2063 case TK_le: return OPR_LE;
2064 case '>': return OPR_GT;
2065 case TK_ge: return OPR_GE;
2066 case TK_and: return OPR_AND;
2067 case TK_or: return OPR_OR;
2068 default: return OPR_NOBINOPR;
2072 /* Priorities for each binary operator. ORDER OPR. */
2073 static const struct {
2074 uint8_t left; /* Left priority. */
2075 uint8_t right; /* Right priority. */
2076 } priority[] = {
2077 {6,6}, {6,6}, {7,7}, {7,7}, {7,7}, /* ADD SUB MUL DIV MOD */
2078 {10,9}, {5,4}, /* POW CONCAT (right associative) */
2079 {3,3}, {3,3}, /* EQ NE */
2080 {3,3}, {3,3}, {3,3}, {3,3}, /* LT GE GT LE */
2081 {2,2}, {1,1} /* AND OR */
2084 #define UNARY_PRIORITY 8 /* Priority for unary operators. */
2086 /* Forward declaration. */
2087 static BinOpr expr_binop(LexState *ls, ExpDesc *v, uint32_t limit);
2089 /* Parse unary expression. */
2090 static void expr_unop(LexState *ls, ExpDesc *v)
2092 BCOp op;
2093 if (ls->tok == TK_not) {
2094 op = BC_NOT;
2095 } else if (ls->tok == '-') {
2096 op = BC_UNM;
2097 } else if (ls->tok == '#') {
2098 op = BC_LEN;
2099 } else {
2100 expr_simple(ls, v);
2101 return;
2103 lj_lex_next(ls);
2104 expr_binop(ls, v, UNARY_PRIORITY);
2105 bcemit_unop(ls->fs, op, v);
2108 /* Parse binary expressions with priority higher than the limit. */
2109 static BinOpr expr_binop(LexState *ls, ExpDesc *v, uint32_t limit)
2111 BinOpr op;
2112 synlevel_begin(ls);
2113 expr_unop(ls, v);
2114 op = token2binop(ls->tok);
2115 while (op != OPR_NOBINOPR && priority[op].left > limit) {
2116 ExpDesc v2;
2117 BinOpr nextop;
2118 lj_lex_next(ls);
2119 bcemit_binop_left(ls->fs, op, v);
2120 /* Parse binary expression with higher priority. */
2121 nextop = expr_binop(ls, &v2, priority[op].right);
2122 bcemit_binop(ls->fs, op, v, &v2);
2123 op = nextop;
2125 synlevel_end(ls);
2126 return op; /* Return unconsumed binary operator (if any). */
2129 /* Parse expression. */
2130 static void expr(LexState *ls, ExpDesc *v)
2132 expr_binop(ls, v, 0); /* Priority 0: parse whole expression. */
2135 /* Assign expression to the next register. */
2136 static void expr_next(LexState *ls)
2138 ExpDesc e;
2139 expr(ls, &e);
2140 expr_tonextreg(ls->fs, &e);
2143 /* Parse conditional expression. */
2144 static BCPos expr_cond(LexState *ls)
2146 ExpDesc v;
2147 expr(ls, &v);
2148 if (v.k == VKNIL) v.k = VKFALSE;
2149 bcemit_branch_t(ls->fs, &v);
2150 return v.f;
2153 /* -- Assignments --------------------------------------------------------- */
2155 /* List of LHS variables. */
2156 typedef struct LHSVarList {
2157 ExpDesc v; /* LHS variable. */
2158 struct LHSVarList *prev; /* Link to previous LHS variable. */
2159 } LHSVarList;
2161 /* Eliminate write-after-read hazards for local variable assignment. */
2162 static void assign_hazard(LexState *ls, LHSVarList *lh, const ExpDesc *v)
2164 FuncState *fs = ls->fs;
2165 BCReg reg = v->u.s.info; /* Check against this variable. */
2166 BCReg tmp = fs->freereg; /* Rename to this temp. register (if needed). */
2167 int hazard = 0;
2168 for (; lh; lh = lh->prev) {
2169 if (lh->v.k == VINDEXED) {
2170 if (lh->v.u.s.info == reg) { /* t[i], t = 1, 2 */
2171 hazard = 1;
2172 lh->v.u.s.info = tmp;
2174 if (lh->v.u.s.aux == reg) { /* t[i], i = 1, 2 */
2175 hazard = 1;
2176 lh->v.u.s.aux = tmp;
2180 if (hazard) {
2181 bcemit_AD(fs, BC_MOV, tmp, reg); /* Rename conflicting variable. */
2182 bcreg_reserve(fs, 1);
2186 /* Adjust LHS/RHS of an assignment. */
2187 static void assign_adjust(LexState *ls, BCReg nvars, BCReg nexps, ExpDesc *e)
2189 FuncState *fs = ls->fs;
2190 int32_t extra = (int32_t)nvars - (int32_t)nexps;
2191 if (e->k == VCALL) {
2192 extra++; /* Compensate for the VCALL itself. */
2193 if (extra < 0) extra = 0;
2194 setbc_b(bcptr(fs, e), extra+1); /* Fixup call results. */
2195 if (extra > 1) bcreg_reserve(fs, (BCReg)extra-1);
2196 } else {
2197 if (e->k != VVOID)
2198 expr_tonextreg(fs, e); /* Close last expression. */
2199 if (extra > 0) { /* Leftover LHS are set to nil. */
2200 BCReg reg = fs->freereg;
2201 bcreg_reserve(fs, (BCReg)extra);
2202 bcemit_nil(fs, reg, (BCReg)extra);
2205 if (nexps > nvars)
2206 ls->fs->freereg -= nexps - nvars; /* Drop leftover regs. */
2209 /* Recursively parse assignment statement. */
2210 static void parse_assignment(LexState *ls, LHSVarList *lh, BCReg nvars)
2212 ExpDesc e;
2213 checkcond(ls, VLOCAL <= lh->v.k && lh->v.k <= VINDEXED, LJ_ERR_XSYNTAX);
2214 if (lex_opt(ls, ',')) { /* Collect LHS list and recurse upwards. */
2215 LHSVarList vl;
2216 vl.prev = lh;
2217 expr_primary(ls, &vl.v);
2218 if (vl.v.k == VLOCAL)
2219 assign_hazard(ls, lh, &vl.v);
2220 checklimit(ls->fs, ls->level + nvars, LJ_MAX_XLEVEL, "variable names");
2221 parse_assignment(ls, &vl, nvars+1);
2222 } else { /* Parse RHS. */
2223 BCReg nexps;
2224 lex_check(ls, '=');
2225 nexps = expr_list(ls, &e);
2226 if (nexps == nvars) {
2227 if (e.k == VCALL) {
2228 if (bc_op(*bcptr(ls->fs, &e)) == BC_VARG) { /* Vararg assignment. */
2229 ls->fs->freereg--;
2230 e.k = VRELOCABLE;
2231 } else { /* Multiple call results. */
2232 e.u.s.info = e.u.s.aux; /* Base of call is not relocatable. */
2233 e.k = VNONRELOC;
2236 bcemit_store(ls->fs, &lh->v, &e);
2237 return;
2239 assign_adjust(ls, nvars, nexps, &e);
2241 /* Assign RHS to LHS and recurse downwards. */
2242 expr_init(&e, VNONRELOC, ls->fs->freereg-1);
2243 bcemit_store(ls->fs, &lh->v, &e);
2246 /* Parse call statement or assignment. */
2247 static void parse_call_assign(LexState *ls)
2249 FuncState *fs = ls->fs;
2250 LHSVarList vl;
2251 expr_primary(ls, &vl.v);
2252 if (vl.v.k == VCALL) { /* Function call statement. */
2253 setbc_b(bcptr(fs, &vl.v), 1); /* No results. */
2254 } else { /* Start of an assignment. */
2255 vl.prev = NULL;
2256 parse_assignment(ls, &vl, 1);
2260 /* Parse 'local' statement. */
2261 static void parse_local(LexState *ls)
2263 if (lex_opt(ls, TK_function)) { /* Local function declaration. */
2264 ExpDesc v, b;
2265 FuncState *fs = ls->fs;
2266 var_new(ls, 0, lex_str(ls));
2267 expr_init(&v, VLOCAL, fs->freereg);
2268 v.u.s.aux = fs->varmap[fs->freereg];
2269 bcreg_reserve(fs, 1);
2270 var_add(ls, 1);
2271 parse_body(ls, &b, 0, ls->linenumber);
2272 /* bcemit_store(fs, &v, &b) without setting VSTACK_VAR_RW. */
2273 expr_free(fs, &b);
2274 expr_toreg(fs, &b, v.u.s.info);
2275 /* The upvalue is in scope, but the local is only valid after the store. */
2276 var_get(ls, fs, fs->nactvar - 1).startpc = fs->pc;
2277 } else { /* Local variable declaration. */
2278 ExpDesc e;
2279 BCReg nexps, nvars = 0;
2280 do { /* Collect LHS. */
2281 var_new(ls, nvars++, lex_str(ls));
2282 } while (lex_opt(ls, ','));
2283 if (lex_opt(ls, '=')) { /* Optional RHS. */
2284 nexps = expr_list(ls, &e);
2285 } else { /* Or implicitly set to nil. */
2286 e.k = VVOID;
2287 nexps = 0;
2289 assign_adjust(ls, nvars, nexps, &e);
2290 var_add(ls, nvars);
2294 /* Parse 'function' statement. */
2295 static void parse_func(LexState *ls, BCLine line)
2297 FuncState *fs;
2298 ExpDesc v, b;
2299 int needself = 0;
2300 lj_lex_next(ls); /* Skip 'function'. */
2301 /* Parse function name. */
2302 var_lookup(ls, &v);
2303 while (ls->tok == '.') /* Multiple dot-separated fields. */
2304 expr_field(ls, &v);
2305 if (ls->tok == ':') { /* Optional colon to signify method call. */
2306 needself = 1;
2307 expr_field(ls, &v);
2309 parse_body(ls, &b, needself, line);
2310 fs = ls->fs;
2311 bcemit_store(fs, &v, &b);
2312 fs->bcbase[fs->pc - 1].line = line; /* Set line for the store. */
2315 /* -- Control transfer statements ----------------------------------------- */
2317 /* Check for end of block. */
2318 static int parse_isend(LexToken tok)
2320 switch (tok) {
2321 case TK_else: case TK_elseif: case TK_end: case TK_until: case TK_eof:
2322 return 1;
2323 default:
2324 return 0;
2328 /* Parse 'return' statement. */
2329 static void parse_return(LexState *ls)
2331 BCIns ins;
2332 FuncState *fs = ls->fs;
2333 lj_lex_next(ls); /* Skip 'return'. */
2334 fs->flags |= PROTO_HAS_RETURN;
2335 if (parse_isend(ls->tok) || ls->tok == ';') { /* Bare return. */
2336 ins = BCINS_AD(BC_RET0, 0, 1);
2337 } else { /* Return with one or more values. */
2338 ExpDesc e; /* Receives the _last_ expression in the list. */
2339 BCReg nret = expr_list(ls, &e);
2340 if (nret == 1) { /* Return one result. */
2341 if (e.k == VCALL) { /* Check for tail call. */
2342 BCIns *ip = bcptr(fs, &e);
2343 /* It doesn't pay off to add BC_VARGT just for 'return ...'. */
2344 if (bc_op(*ip) == BC_VARG) goto notailcall;
2345 fs->pc--;
2346 ins = BCINS_AD(bc_op(*ip)-BC_CALL+BC_CALLT, bc_a(*ip), bc_c(*ip));
2347 } else { /* Can return the result from any register. */
2348 ins = BCINS_AD(BC_RET1, expr_toanyreg(fs, &e), 2);
2350 } else {
2351 if (e.k == VCALL) { /* Append all results from a call. */
2352 notailcall:
2353 setbc_b(bcptr(fs, &e), 0);
2354 ins = BCINS_AD(BC_RETM, fs->nactvar, e.u.s.aux - fs->nactvar);
2355 } else {
2356 expr_tonextreg(fs, &e); /* Force contiguous registers. */
2357 ins = BCINS_AD(BC_RET, fs->nactvar, nret+1);
2361 if (fs->flags & PROTO_CHILD)
2362 bcemit_AJ(fs, BC_UCLO, 0, 0); /* May need to close upvalues first. */
2363 bcemit_INS(fs, ins);
2366 /* Parse 'break' statement. */
2367 static void parse_break(LexState *ls)
2369 ls->fs->bl->flags |= FSCOPE_BREAK;
2370 gola_new(ls, NAME_BREAK, VSTACK_GOTO, bcemit_jmp(ls->fs));
2373 /* Parse 'goto' statement. */
2374 static void parse_goto(LexState *ls)
2376 FuncState *fs = ls->fs;
2377 GCstr *name = lex_str(ls);
2378 VarInfo *vl = gola_findlabel(ls, name);
2379 if (vl) /* Treat backwards goto within same scope like a loop. */
2380 bcemit_AJ(fs, BC_LOOP, vl->slot, -1); /* No BC range check. */
2381 fs->bl->flags |= FSCOPE_GOLA;
2382 gola_new(ls, name, VSTACK_GOTO, bcemit_jmp(fs));
2385 /* Parse label. */
2386 static void parse_label(LexState *ls)
2388 FuncState *fs = ls->fs;
2389 GCstr *name;
2390 MSize idx;
2391 fs->lasttarget = fs->pc;
2392 fs->bl->flags |= FSCOPE_GOLA;
2393 lj_lex_next(ls); /* Skip '::'. */
2394 name = lex_str(ls);
2395 if (gola_findlabel(ls, name))
2396 lj_lex_error(ls, 0, LJ_ERR_XLDUP, strdata(name));
2397 idx = gola_new(ls, name, VSTACK_LABEL, fs->pc);
2398 lex_check(ls, TK_label);
2399 /* Recursively parse trailing statements: labels and ';' (Lua 5.2 only). */
2400 for (;;) {
2401 if (ls->tok == TK_label) {
2402 synlevel_begin(ls);
2403 parse_label(ls);
2404 synlevel_end(ls);
2405 } else if (LJ_52 && ls->tok == ';') {
2406 lj_lex_next(ls);
2407 } else {
2408 break;
2411 /* Trailing label is considered to be outside of scope. */
2412 if (parse_isend(ls->tok) && ls->tok != TK_until)
2413 ls->vstack[idx].slot = fs->bl->nactvar;
2414 gola_resolve(ls, fs->bl, idx);
2417 /* -- Blocks, loops and conditional statements ---------------------------- */
2419 /* Parse a block. */
2420 static void parse_block(LexState *ls)
2422 FuncState *fs = ls->fs;
2423 FuncScope bl;
2424 fscope_begin(fs, &bl, 0);
2425 parse_chunk(ls);
2426 fscope_end(fs);
2429 /* Parse 'while' statement. */
2430 static void parse_while(LexState *ls, BCLine line)
2432 FuncState *fs = ls->fs;
2433 BCPos start, loop, condexit;
2434 FuncScope bl;
2435 lj_lex_next(ls); /* Skip 'while'. */
2436 start = fs->lasttarget = fs->pc;
2437 condexit = expr_cond(ls);
2438 fscope_begin(fs, &bl, FSCOPE_LOOP);
2439 lex_check(ls, TK_do);
2440 loop = bcemit_AD(fs, BC_LOOP, fs->nactvar, 0);
2441 parse_block(ls);
2442 jmp_patch(fs, bcemit_jmp(fs), start);
2443 lex_match(ls, TK_end, TK_while, line);
2444 fscope_end(fs);
2445 jmp_tohere(fs, condexit);
2446 jmp_patchins(fs, loop, fs->pc);
2449 /* Parse 'repeat' statement. */
2450 static void parse_repeat(LexState *ls, BCLine line)
2452 FuncState *fs = ls->fs;
2453 BCPos loop = fs->lasttarget = fs->pc;
2454 BCPos condexit;
2455 FuncScope bl1, bl2;
2456 fscope_begin(fs, &bl1, FSCOPE_LOOP); /* Breakable loop scope. */
2457 fscope_begin(fs, &bl2, 0); /* Inner scope. */
2458 lj_lex_next(ls); /* Skip 'repeat'. */
2459 bcemit_AD(fs, BC_LOOP, fs->nactvar, 0);
2460 parse_chunk(ls);
2461 lex_match(ls, TK_until, TK_repeat, line);
2462 condexit = expr_cond(ls); /* Parse condition (still inside inner scope). */
2463 if (!(bl2.flags & FSCOPE_UPVAL)) { /* No upvalues? Just end inner scope. */
2464 fscope_end(fs);
2465 } else { /* Otherwise generate: cond: UCLO+JMP out, !cond: UCLO+JMP loop. */
2466 parse_break(ls); /* Break from loop and close upvalues. */
2467 jmp_tohere(fs, condexit);
2468 fscope_end(fs); /* End inner scope and close upvalues. */
2469 condexit = bcemit_jmp(fs);
2471 jmp_patch(fs, condexit, loop); /* Jump backwards if !cond. */
2472 jmp_patchins(fs, loop, fs->pc);
2473 fscope_end(fs); /* End loop scope. */
2476 /* Parse numeric 'for'. */
2477 static void parse_for_num(LexState *ls, GCstr *varname, BCLine line)
2479 FuncState *fs = ls->fs;
2480 BCReg base = fs->freereg;
2481 FuncScope bl;
2482 BCPos loop, loopend;
2483 /* Hidden control variables. */
2484 var_new_fixed(ls, FORL_IDX, VARNAME_FOR_IDX);
2485 var_new_fixed(ls, FORL_STOP, VARNAME_FOR_STOP);
2486 var_new_fixed(ls, FORL_STEP, VARNAME_FOR_STEP);
2487 /* Visible copy of index variable. */
2488 var_new(ls, FORL_EXT, varname);
2489 lex_check(ls, '=');
2490 expr_next(ls);
2491 lex_check(ls, ',');
2492 expr_next(ls);
2493 if (lex_opt(ls, ',')) {
2494 expr_next(ls);
2495 } else {
2496 bcemit_AD(fs, BC_KSHORT, fs->freereg, 1); /* Default step is 1. */
2497 bcreg_reserve(fs, 1);
2499 var_add(ls, 3); /* Hidden control variables. */
2500 lex_check(ls, TK_do);
2501 loop = bcemit_AJ(fs, BC_FORI, base, NO_JMP);
2502 fscope_begin(fs, &bl, 0); /* Scope for visible variables. */
2503 var_add(ls, 1);
2504 bcreg_reserve(fs, 1);
2505 parse_block(ls);
2506 fscope_end(fs);
2507 /* Perform loop inversion. Loop control instructions are at the end. */
2508 loopend = bcemit_AJ(fs, BC_FORL, base, NO_JMP);
2509 fs->bcbase[loopend].line = line; /* Fix line for control ins. */
2510 jmp_patchins(fs, loopend, loop+1);
2511 jmp_patchins(fs, loop, fs->pc);
2514 /* Try to predict whether the iterator is next() and specialize the bytecode.
2515 ** Detecting next() and pairs() by name is simplistic, but quite effective.
2516 ** The interpreter backs off if the check for the closure fails at runtime.
2518 static int predict_next(LexState *ls, FuncState *fs, BCPos pc)
2520 BCIns ins;
2521 GCstr *name;
2522 cTValue *o;
2523 if (pc >= fs->bclim) return 0;
2524 ins = fs->bcbase[pc].ins;
2525 switch (bc_op(ins)) {
2526 case BC_MOV:
2527 if (bc_d(ins) >= fs->nactvar) return 0;
2528 name = gco2str(gcref(var_get(ls, fs, bc_d(ins)).name));
2529 break;
2530 case BC_UGET:
2531 name = gco2str(gcref(ls->vstack[fs->uvmap[bc_d(ins)]].name));
2532 break;
2533 case BC_GGET:
2534 /* There's no inverse index (yet), so lookup the strings. */
2535 o = lj_tab_getstr(fs->kt, lj_str_newlit(ls->L, "pairs"));
2536 if (o && tvhaskslot(o) && tvkslot(o) == bc_d(ins))
2537 return 1;
2538 o = lj_tab_getstr(fs->kt, lj_str_newlit(ls->L, "next"));
2539 if (o && tvhaskslot(o) && tvkslot(o) == bc_d(ins))
2540 return 1;
2541 return 0;
2542 default:
2543 return 0;
2545 return (name->len == 5 && !strcmp(strdata(name), "pairs")) ||
2546 (name->len == 4 && !strcmp(strdata(name), "next"));
2549 /* Parse 'for' iterator. */
2550 static void parse_for_iter(LexState *ls, GCstr *indexname)
2552 FuncState *fs = ls->fs;
2553 ExpDesc e;
2554 BCReg nvars = 0;
2555 BCLine line;
2556 BCReg base = fs->freereg + 3;
2557 BCPos loop, loopend, exprpc = fs->pc;
2558 FuncScope bl;
2559 int isnext;
2560 /* Hidden control variables. */
2561 var_new_fixed(ls, nvars++, VARNAME_FOR_GEN);
2562 var_new_fixed(ls, nvars++, VARNAME_FOR_STATE);
2563 var_new_fixed(ls, nvars++, VARNAME_FOR_CTL);
2564 /* Visible variables returned from iterator. */
2565 var_new(ls, nvars++, indexname);
2566 while (lex_opt(ls, ','))
2567 var_new(ls, nvars++, lex_str(ls));
2568 lex_check(ls, TK_in);
2569 line = ls->linenumber;
2570 assign_adjust(ls, 3, expr_list(ls, &e), &e);
2571 /* The iterator needs another 3 [4] slots (func [pc] | state ctl). */
2572 bcreg_bump(fs, 3+ls->fr2);
2573 isnext = (nvars <= 5 && predict_next(ls, fs, exprpc));
2574 var_add(ls, 3); /* Hidden control variables. */
2575 lex_check(ls, TK_do);
2576 loop = bcemit_AJ(fs, isnext ? BC_ISNEXT : BC_JMP, base, NO_JMP);
2577 fscope_begin(fs, &bl, 0); /* Scope for visible variables. */
2578 var_add(ls, nvars-3);
2579 bcreg_reserve(fs, nvars-3);
2580 parse_block(ls);
2581 fscope_end(fs);
2582 /* Perform loop inversion. Loop control instructions are at the end. */
2583 jmp_patchins(fs, loop, fs->pc);
2584 bcemit_ABC(fs, isnext ? BC_ITERN : BC_ITERC, base, nvars-3+1, 2+1);
2585 loopend = bcemit_AJ(fs, BC_ITERL, base, NO_JMP);
2586 fs->bcbase[loopend-1].line = line; /* Fix line for control ins. */
2587 fs->bcbase[loopend].line = line;
2588 jmp_patchins(fs, loopend, loop+1);
2591 /* Parse 'for' statement. */
2592 static void parse_for(LexState *ls, BCLine line)
2594 FuncState *fs = ls->fs;
2595 GCstr *varname;
2596 FuncScope bl;
2597 fscope_begin(fs, &bl, FSCOPE_LOOP);
2598 lj_lex_next(ls); /* Skip 'for'. */
2599 varname = lex_str(ls); /* Get first variable name. */
2600 if (ls->tok == '=')
2601 parse_for_num(ls, varname, line);
2602 else if (ls->tok == ',' || ls->tok == TK_in)
2603 parse_for_iter(ls, varname);
2604 else
2605 err_syntax(ls, LJ_ERR_XFOR);
2606 lex_match(ls, TK_end, TK_for, line);
2607 fscope_end(fs); /* Resolve break list. */
2610 /* Parse condition and 'then' block. */
2611 static BCPos parse_then(LexState *ls)
2613 BCPos condexit;
2614 lj_lex_next(ls); /* Skip 'if' or 'elseif'. */
2615 condexit = expr_cond(ls);
2616 lex_check(ls, TK_then);
2617 parse_block(ls);
2618 return condexit;
2621 /* Parse 'if' statement. */
2622 static void parse_if(LexState *ls, BCLine line)
2624 FuncState *fs = ls->fs;
2625 BCPos flist;
2626 BCPos escapelist = NO_JMP;
2627 flist = parse_then(ls);
2628 while (ls->tok == TK_elseif) { /* Parse multiple 'elseif' blocks. */
2629 jmp_append(fs, &escapelist, bcemit_jmp(fs));
2630 jmp_tohere(fs, flist);
2631 flist = parse_then(ls);
2633 if (ls->tok == TK_else) { /* Parse optional 'else' block. */
2634 jmp_append(fs, &escapelist, bcemit_jmp(fs));
2635 jmp_tohere(fs, flist);
2636 lj_lex_next(ls); /* Skip 'else'. */
2637 parse_block(ls);
2638 } else {
2639 jmp_append(fs, &escapelist, flist);
2641 jmp_tohere(fs, escapelist);
2642 lex_match(ls, TK_end, TK_if, line);
2645 /* -- Parse statements ---------------------------------------------------- */
2647 /* Parse a statement. Returns 1 if it must be the last one in a chunk. */
2648 static int parse_stmt(LexState *ls)
2650 BCLine line = ls->linenumber;
2651 switch (ls->tok) {
2652 case TK_if:
2653 parse_if(ls, line);
2654 break;
2655 case TK_while:
2656 parse_while(ls, line);
2657 break;
2658 case TK_do:
2659 lj_lex_next(ls);
2660 parse_block(ls);
2661 lex_match(ls, TK_end, TK_do, line);
2662 break;
2663 case TK_for:
2664 parse_for(ls, line);
2665 break;
2666 case TK_repeat:
2667 parse_repeat(ls, line);
2668 break;
2669 case TK_function:
2670 parse_func(ls, line);
2671 break;
2672 case TK_local:
2673 lj_lex_next(ls);
2674 parse_local(ls);
2675 break;
2676 case TK_return:
2677 parse_return(ls);
2678 return 1; /* Must be last. */
2679 case TK_break:
2680 lj_lex_next(ls);
2681 parse_break(ls);
2682 return !LJ_52; /* Must be last in Lua 5.1. */
2683 #if LJ_52
2684 case ';':
2685 lj_lex_next(ls);
2686 break;
2687 #endif
2688 case TK_label:
2689 parse_label(ls);
2690 break;
2691 case TK_goto:
2692 if (LJ_52 || lj_lex_lookahead(ls) == TK_name) {
2693 lj_lex_next(ls);
2694 parse_goto(ls);
2695 break;
2697 /* fallthrough */
2698 default:
2699 parse_call_assign(ls);
2700 break;
2702 return 0;
2705 /* A chunk is a list of statements optionally separated by semicolons. */
2706 static void parse_chunk(LexState *ls)
2708 int islast = 0;
2709 synlevel_begin(ls);
2710 while (!islast && !parse_isend(ls->tok)) {
2711 islast = parse_stmt(ls);
2712 lex_opt(ls, ';');
2713 lj_assertLS(ls->fs->framesize >= ls->fs->freereg &&
2714 ls->fs->freereg >= ls->fs->nactvar,
2715 "bad regalloc");
2716 ls->fs->freereg = ls->fs->nactvar; /* Free registers after each stmt. */
2718 synlevel_end(ls);
2721 /* Entry point of bytecode parser. */
2722 GCproto *lj_parse(LexState *ls)
2724 FuncState fs;
2725 FuncScope bl;
2726 GCproto *pt;
2727 lua_State *L = ls->L;
2728 #ifdef LUAJIT_DISABLE_DEBUGINFO
2729 ls->chunkname = lj_str_newlit(L, "=");
2730 #else
2731 ls->chunkname = lj_str_newz(L, ls->chunkarg);
2732 #endif
2733 setstrV(L, L->top, ls->chunkname); /* Anchor chunkname string. */
2734 incr_top(L);
2735 ls->level = 0;
2736 fs_init(ls, &fs);
2737 fs.linedefined = 0;
2738 fs.numparams = 0;
2739 fs.bcbase = NULL;
2740 fs.bclim = 0;
2741 fs.flags |= PROTO_VARARG; /* Main chunk is always a vararg func. */
2742 fscope_begin(&fs, &bl, 0);
2743 bcemit_AD(&fs, BC_FUNCV, 0, 0); /* Placeholder. */
2744 lj_lex_next(ls); /* Read-ahead first token. */
2745 parse_chunk(ls);
2746 if (ls->tok != TK_eof)
2747 err_token(ls, TK_eof);
2748 pt = fs_finish(ls, ls->linenumber);
2749 L->top--; /* Drop chunkname. */
2750 lj_assertL(fs.prev == NULL && ls->fs == NULL, "mismatched frame nesting");
2751 lj_assertL(pt->sizeuv == 0, "toplevel proto has upvalues");
2752 return pt;