2 ** Lua parser (source code -> bytecode).
3 ** Copyright (C) 2005-2023 Mike Pall. See Copyright Notice in luajit.h
5 ** Major portions taken verbatim or adapted from the Lua interpreter.
6 ** Copyright (C) 1994-2008 Lua.org, PUC-Rio. See Copyright Notice in lua.h
25 #include "lj_strfmt.h"
29 #include "lj_vmevent.h"
31 /* -- Parser structures and definitions ----------------------------------- */
33 /* Expression kinds. */
35 /* Constant expressions must be first and in this order: */
39 VKSTR
, /* sval = string value */
40 VKNUM
, /* nval = number value */
42 VKCDATA
, /* nval = cdata value, not treated as a constant expression */
43 /* Non-constant expressions follow: */
44 VLOCAL
, /* info = local register, aux = vstack index */
45 VUPVAL
, /* info = upvalue index, aux = vstack index */
46 VGLOBAL
, /* sval = string value */
47 VINDEXED
, /* info = table register, aux = index reg/byte/string const */
48 VJMP
, /* info = instruction PC */
49 VRELOCABLE
, /* info = instruction PC */
50 VNONRELOC
, /* info = result register */
51 VCALL
, /* info = instruction PC, aux = base */
55 /* Expression descriptor. */
56 typedef struct ExpDesc
{
59 uint32_t info
; /* Primary info. */
60 uint32_t aux
; /* Secondary info. */
62 TValue nval
; /* Number value. */
63 GCstr
*sval
; /* String value. */
66 BCPos t
; /* True condition jump list. */
67 BCPos f
; /* False condition jump list. */
70 /* Macros for expressions. */
71 #define expr_hasjump(e) ((e)->t != (e)->f)
73 #define expr_isk(e) ((e)->k <= VKLAST)
74 #define expr_isk_nojump(e) (expr_isk(e) && !expr_hasjump(e))
75 #define expr_isnumk(e) ((e)->k == VKNUM)
76 #define expr_isnumk_nojump(e) (expr_isnumk(e) && !expr_hasjump(e))
77 #define expr_isstrk(e) ((e)->k == VKSTR)
79 #define expr_numtv(e) check_exp(expr_isnumk((e)), &(e)->u.nval)
80 #define expr_numberV(e) numberVnum(expr_numtv((e)))
82 /* Initialize expression. */
83 static LJ_AINLINE
void expr_init(ExpDesc
*e
, ExpKind k
, uint32_t info
)
90 /* Check number constant for +-0. */
91 static int expr_numiszero(ExpDesc
*e
)
93 TValue
*o
= expr_numtv(e
);
94 return tvisint(o
) ? (intV(o
) == 0) : tviszero(o
);
97 /* Per-function linked list of scope blocks. */
98 typedef struct FuncScope
{
99 struct FuncScope
*prev
; /* Link to outer scope. */
100 MSize vstart
; /* Start of block-local variables. */
101 uint8_t nactvar
; /* Number of active vars outside the scope. */
102 uint8_t flags
; /* Scope flags. */
105 #define FSCOPE_LOOP 0x01 /* Scope is a (breakable) loop. */
106 #define FSCOPE_BREAK 0x02 /* Break used in scope. */
107 #define FSCOPE_GOLA 0x04 /* Goto or label used in scope. */
108 #define FSCOPE_UPVAL 0x08 /* Upvalue in scope. */
109 #define FSCOPE_NOCLOSE 0x10 /* Do not close upvalues. */
111 #define NAME_BREAK ((GCstr *)(uintptr_t)1)
113 /* Index into variable stack. */
114 typedef uint16_t VarIndex
;
115 #define LJ_MAX_VSTACK (65536 - LJ_MAX_UPVAL)
117 /* Variable/goto/label info. */
118 #define VSTACK_VAR_RW 0x01 /* R/W variable. */
119 #define VSTACK_GOTO 0x02 /* Pending goto. */
120 #define VSTACK_LABEL 0x04 /* Label. */
122 /* Per-function state. */
123 typedef struct FuncState
{
124 GCtab
*kt
; /* Hash table for constants. */
125 LexState
*ls
; /* Lexer state. */
126 lua_State
*L
; /* Lua state. */
127 FuncScope
*bl
; /* Current scope. */
128 struct FuncState
*prev
; /* Enclosing function. */
129 BCPos pc
; /* Next bytecode position. */
130 BCPos lasttarget
; /* Bytecode position of last jump target. */
131 BCPos jpc
; /* Pending jump list to next bytecode. */
132 BCReg freereg
; /* First free register. */
133 BCReg nactvar
; /* Number of active local variables. */
134 BCReg nkn
, nkgc
; /* Number of lua_Number/GCobj constants */
135 BCLine linedefined
; /* First line of the function definition. */
136 BCInsLine
*bcbase
; /* Base of bytecode stack. */
137 BCPos bclim
; /* Limit of bytecode stack. */
138 MSize vbase
; /* Base of variable stack for this function. */
139 uint8_t flags
; /* Prototype flags. */
140 uint8_t numparams
; /* Number of parameters. */
141 uint8_t framesize
; /* Fixed frame size. */
142 uint8_t nuv
; /* Number of upvalues */
143 VarIndex varmap
[LJ_MAX_LOCVAR
]; /* Map from register to variable idx. */
144 VarIndex uvmap
[LJ_MAX_UPVAL
]; /* Map from upvalue to variable idx. */
145 VarIndex uvtmp
[LJ_MAX_UPVAL
]; /* Temporary upvalue map. */
148 /* Binary and unary operators. ORDER OPR */
149 typedef enum BinOpr
{
150 OPR_ADD
, OPR_SUB
, OPR_MUL
, OPR_DIV
, OPR_MOD
, OPR_POW
, /* ORDER ARITH */
153 OPR_LT
, OPR_GE
, OPR_LE
, OPR_GT
,
158 LJ_STATIC_ASSERT((int)BC_ISGE
-(int)BC_ISLT
== (int)OPR_GE
-(int)OPR_LT
);
159 LJ_STATIC_ASSERT((int)BC_ISLE
-(int)BC_ISLT
== (int)OPR_LE
-(int)OPR_LT
);
160 LJ_STATIC_ASSERT((int)BC_ISGT
-(int)BC_ISLT
== (int)OPR_GT
-(int)OPR_LT
);
161 LJ_STATIC_ASSERT((int)BC_SUBVV
-(int)BC_ADDVV
== (int)OPR_SUB
-(int)OPR_ADD
);
162 LJ_STATIC_ASSERT((int)BC_MULVV
-(int)BC_ADDVV
== (int)OPR_MUL
-(int)OPR_ADD
);
163 LJ_STATIC_ASSERT((int)BC_DIVVV
-(int)BC_ADDVV
== (int)OPR_DIV
-(int)OPR_ADD
);
164 LJ_STATIC_ASSERT((int)BC_MODVV
-(int)BC_ADDVV
== (int)OPR_MOD
-(int)OPR_ADD
);
166 #ifdef LUA_USE_ASSERT
167 #define lj_assertFS(c, ...) (lj_assertG_(G(fs->L), (c), __VA_ARGS__))
169 #define lj_assertFS(c, ...) ((void)fs)
172 /* -- Error handling ------------------------------------------------------ */
174 LJ_NORET LJ_NOINLINE
static void err_syntax(LexState
*ls
, ErrMsg em
)
176 lj_lex_error(ls
, ls
->tok
, em
);
179 LJ_NORET LJ_NOINLINE
static void err_token(LexState
*ls
, LexToken tok
)
181 lj_lex_error(ls
, ls
->tok
, LJ_ERR_XTOKEN
, lj_lex_token2str(ls
, tok
));
184 LJ_NORET
static void err_limit(FuncState
*fs
, uint32_t limit
, const char *what
)
186 if (fs
->linedefined
== 0)
187 lj_lex_error(fs
->ls
, 0, LJ_ERR_XLIMM
, limit
, what
);
189 lj_lex_error(fs
->ls
, 0, LJ_ERR_XLIMF
, fs
->linedefined
, limit
, what
);
192 #define checklimit(fs, v, l, m) if ((v) >= (l)) err_limit(fs, l, m)
193 #define checklimitgt(fs, v, l, m) if ((v) > (l)) err_limit(fs, l, m)
194 #define checkcond(ls, c, em) { if (!(c)) err_syntax(ls, em); }
196 /* -- Management of constants --------------------------------------------- */
198 /* Return bytecode encoding for primitive constant. */
199 #define const_pri(e) check_exp((e)->k <= VKTRUE, (e)->k)
201 #define tvhaskslot(o) ((o)->u32.hi == 0)
202 #define tvkslot(o) ((o)->u32.lo)
204 /* Add a number constant. */
205 static BCReg
const_num(FuncState
*fs
, ExpDesc
*e
)
207 lua_State
*L
= fs
->L
;
209 lj_assertFS(expr_isnumk(e
), "bad usage");
210 o
= lj_tab_set(L
, fs
->kt
, &e
->u
.nval
);
217 /* Add a GC object constant. */
218 static BCReg
const_gc(FuncState
*fs
, GCobj
*gc
, uint32_t itype
)
220 lua_State
*L
= fs
->L
;
222 setgcV(L
, &key
, gc
, itype
);
223 /* NOBARRIER: the key is new or kept alive. */
224 o
= lj_tab_set(L
, fs
->kt
, &key
);
231 /* Add a string constant. */
232 static BCReg
const_str(FuncState
*fs
, ExpDesc
*e
)
234 lj_assertFS(expr_isstrk(e
) || e
->k
== VGLOBAL
, "bad usage");
235 return const_gc(fs
, obj2gco(e
->u
.sval
), LJ_TSTR
);
238 /* Anchor string constant to avoid GC. */
239 GCstr
*lj_parse_keepstr(LexState
*ls
, const char *str
, size_t len
)
241 /* NOBARRIER: the key is new or kept alive. */
242 lua_State
*L
= ls
->L
;
243 GCstr
*s
= lj_str_new(L
, str
, len
);
244 TValue
*tv
= lj_tab_setstr(L
, ls
->fs
->kt
, s
);
245 if (tvisnil(tv
)) setboolV(tv
, 1);
251 /* Anchor cdata to avoid GC. */
252 void lj_parse_keepcdata(LexState
*ls
, TValue
*tv
, GCcdata
*cd
)
254 /* NOBARRIER: the key is new or kept alive. */
255 lua_State
*L
= ls
->L
;
256 setcdataV(L
, tv
, cd
);
257 setboolV(lj_tab_set(L
, ls
->fs
->kt
, tv
), 1);
261 /* -- Jump list handling -------------------------------------------------- */
263 /* Get next element in jump list. */
264 static BCPos
jmp_next(FuncState
*fs
, BCPos pc
)
266 ptrdiff_t delta
= bc_j(fs
->bcbase
[pc
].ins
);
267 if ((BCPos
)delta
== NO_JMP
)
270 return (BCPos
)(((ptrdiff_t)pc
+1)+delta
);
273 /* Check if any of the instructions on the jump list produce no value. */
274 static int jmp_novalue(FuncState
*fs
, BCPos list
)
276 for (; list
!= NO_JMP
; list
= jmp_next(fs
, list
)) {
277 BCIns p
= fs
->bcbase
[list
>= 1 ? list
-1 : list
].ins
;
278 if (!(bc_op(p
) == BC_ISTC
|| bc_op(p
) == BC_ISFC
|| bc_a(p
) == NO_REG
))
284 /* Patch register of test instructions. */
285 static int jmp_patchtestreg(FuncState
*fs
, BCPos pc
, BCReg reg
)
287 BCInsLine
*ilp
= &fs
->bcbase
[pc
>= 1 ? pc
-1 : pc
];
288 BCOp op
= bc_op(ilp
->ins
);
289 if (op
== BC_ISTC
|| op
== BC_ISFC
) {
290 if (reg
!= NO_REG
&& reg
!= bc_d(ilp
->ins
)) {
291 setbc_a(&ilp
->ins
, reg
);
292 } else { /* Nothing to store or already in the right register. */
293 setbc_op(&ilp
->ins
, op
+(BC_IST
-BC_ISTC
));
294 setbc_a(&ilp
->ins
, 0);
296 } else if (bc_a(ilp
->ins
) == NO_REG
) {
298 ilp
->ins
= BCINS_AJ(BC_JMP
, bc_a(fs
->bcbase
[pc
].ins
), 0);
300 setbc_a(&ilp
->ins
, reg
);
301 if (reg
>= bc_a(ilp
[1].ins
))
302 setbc_a(&ilp
[1].ins
, reg
+1);
305 return 0; /* Cannot patch other instructions. */
310 /* Drop values for all instructions on jump list. */
311 static void jmp_dropval(FuncState
*fs
, BCPos list
)
313 for (; list
!= NO_JMP
; list
= jmp_next(fs
, list
))
314 jmp_patchtestreg(fs
, list
, NO_REG
);
317 /* Patch jump instruction to target. */
318 static void jmp_patchins(FuncState
*fs
, BCPos pc
, BCPos dest
)
320 BCIns
*jmp
= &fs
->bcbase
[pc
].ins
;
321 BCPos offset
= dest
-(pc
+1)+BCBIAS_J
;
322 lj_assertFS(dest
!= NO_JMP
, "uninitialized jump target");
323 if (offset
> BCMAX_D
)
324 err_syntax(fs
->ls
, LJ_ERR_XJUMP
);
325 setbc_d(jmp
, offset
);
328 /* Append to jump list. */
329 static void jmp_append(FuncState
*fs
, BCPos
*l1
, BCPos l2
)
333 } else if (*l1
== NO_JMP
) {
338 while ((next
= jmp_next(fs
, list
)) != NO_JMP
) /* Find last element. */
340 jmp_patchins(fs
, list
, l2
);
344 /* Patch jump list and preserve produced values. */
345 static void jmp_patchval(FuncState
*fs
, BCPos list
, BCPos vtarget
,
346 BCReg reg
, BCPos dtarget
)
348 while (list
!= NO_JMP
) {
349 BCPos next
= jmp_next(fs
, list
);
350 if (jmp_patchtestreg(fs
, list
, reg
))
351 jmp_patchins(fs
, list
, vtarget
); /* Jump to target with value. */
353 jmp_patchins(fs
, list
, dtarget
); /* Jump to default target. */
358 /* Jump to following instruction. Append to list of pending jumps. */
359 static void jmp_tohere(FuncState
*fs
, BCPos list
)
361 fs
->lasttarget
= fs
->pc
;
362 jmp_append(fs
, &fs
->jpc
, list
);
365 /* Patch jump list to target. */
366 static void jmp_patch(FuncState
*fs
, BCPos list
, BCPos target
)
368 if (target
== fs
->pc
) {
369 jmp_tohere(fs
, list
);
371 lj_assertFS(target
< fs
->pc
, "bad jump target");
372 jmp_patchval(fs
, list
, target
, NO_REG
, target
);
376 /* -- Bytecode register allocator ----------------------------------------- */
378 /* Bump frame size. */
379 static void bcreg_bump(FuncState
*fs
, BCReg n
)
381 BCReg sz
= fs
->freereg
+ n
;
382 if (sz
> fs
->framesize
) {
383 if (sz
>= LJ_MAX_SLOTS
)
384 err_syntax(fs
->ls
, LJ_ERR_XSLOTS
);
385 fs
->framesize
= (uint8_t)sz
;
389 /* Reserve registers. */
390 static void bcreg_reserve(FuncState
*fs
, BCReg n
)
397 static void bcreg_free(FuncState
*fs
, BCReg reg
)
399 if (reg
>= fs
->nactvar
) {
401 lj_assertFS(reg
== fs
->freereg
, "bad regfree");
405 /* Free register for expression. */
406 static void expr_free(FuncState
*fs
, ExpDesc
*e
)
408 if (e
->k
== VNONRELOC
)
409 bcreg_free(fs
, e
->u
.s
.info
);
412 /* -- Bytecode emitter ---------------------------------------------------- */
414 /* Emit bytecode instruction. */
415 static BCPos
bcemit_INS(FuncState
*fs
, BCIns ins
)
418 LexState
*ls
= fs
->ls
;
419 jmp_patchval(fs
, fs
->jpc
, pc
, NO_REG
, pc
);
421 if (LJ_UNLIKELY(pc
>= fs
->bclim
)) {
422 ptrdiff_t base
= fs
->bcbase
- ls
->bcstack
;
423 checklimit(fs
, ls
->sizebcstack
, LJ_MAX_BCINS
, "bytecode instructions");
424 lj_mem_growvec(fs
->L
, ls
->bcstack
, ls
->sizebcstack
, LJ_MAX_BCINS
,BCInsLine
);
425 fs
->bclim
= (BCPos
)(ls
->sizebcstack
- base
);
426 fs
->bcbase
= ls
->bcstack
+ base
;
428 fs
->bcbase
[pc
].ins
= ins
;
429 fs
->bcbase
[pc
].line
= ls
->lastline
;
434 #define bcemit_ABC(fs, o, a, b, c) bcemit_INS(fs, BCINS_ABC(o, a, b, c))
435 #define bcemit_AD(fs, o, a, d) bcemit_INS(fs, BCINS_AD(o, a, d))
436 #define bcemit_AJ(fs, o, a, j) bcemit_INS(fs, BCINS_AJ(o, a, j))
438 #define bcptr(fs, e) (&(fs)->bcbase[(e)->u.s.info].ins)
440 /* -- Bytecode emitter for expressions ------------------------------------ */
442 /* Discharge non-constant expression to any register. */
443 static void expr_discharge(FuncState
*fs
, ExpDesc
*e
)
446 if (e
->k
== VUPVAL
) {
447 ins
= BCINS_AD(BC_UGET
, 0, e
->u
.s
.info
);
448 } else if (e
->k
== VGLOBAL
) {
449 ins
= BCINS_AD(BC_GGET
, 0, const_str(fs
, e
));
450 } else if (e
->k
== VINDEXED
) {
451 BCReg rc
= e
->u
.s
.aux
;
452 if ((int32_t)rc
< 0) {
453 ins
= BCINS_ABC(BC_TGETS
, 0, e
->u
.s
.info
, ~rc
);
454 } else if (rc
> BCMAX_C
) {
455 ins
= BCINS_ABC(BC_TGETB
, 0, e
->u
.s
.info
, rc
-(BCMAX_C
+1));
458 ins
= BCINS_ABC(BC_TGETV
, 0, e
->u
.s
.info
, rc
);
460 bcreg_free(fs
, e
->u
.s
.info
);
461 } else if (e
->k
== VCALL
) {
462 e
->u
.s
.info
= e
->u
.s
.aux
;
465 } else if (e
->k
== VLOCAL
) {
471 e
->u
.s
.info
= bcemit_INS(fs
, ins
);
475 /* Emit bytecode to set a range of registers to nil. */
476 static void bcemit_nil(FuncState
*fs
, BCReg from
, BCReg n
)
478 if (fs
->pc
> fs
->lasttarget
) { /* No jumps to current position? */
479 BCIns
*ip
= &fs
->bcbase
[fs
->pc
-1].ins
;
480 BCReg pto
, pfrom
= bc_a(*ip
);
481 switch (bc_op(*ip
)) { /* Try to merge with the previous instruction. */
483 if (bc_d(*ip
) != ~LJ_TNIL
) break;
486 } else if (from
== pfrom
+1) {
492 *ip
= BCINS_AD(BC_KNIL
, from
, from
+n
-1); /* Replace KPRI. */
496 if (pfrom
<= from
&& from
<= pto
+1) { /* Can we connect both ranges? */
498 setbc_d(ip
, from
+n
-1); /* Patch previous instruction range. */
506 /* Emit new instruction or replace old instruction. */
507 bcemit_INS(fs
, n
== 1 ? BCINS_AD(BC_KPRI
, from
, VKNIL
) :
508 BCINS_AD(BC_KNIL
, from
, from
+n
-1));
511 /* Discharge an expression to a specific register. Ignore branches. */
512 static void expr_toreg_nobranch(FuncState
*fs
, ExpDesc
*e
, BCReg reg
)
515 expr_discharge(fs
, e
);
517 ins
= BCINS_AD(BC_KSTR
, reg
, const_str(fs
, e
));
518 } else if (e
->k
== VKNUM
) {
520 cTValue
*tv
= expr_numtv(e
);
521 if (tvisint(tv
) && checki16(intV(tv
)))
522 ins
= BCINS_AD(BC_KSHORT
, reg
, (BCReg
)(uint16_t)intV(tv
));
525 lua_Number n
= expr_numberV(e
);
526 int32_t k
= lj_num2int(n
);
527 if (checki16(k
) && n
== (lua_Number
)k
)
528 ins
= BCINS_AD(BC_KSHORT
, reg
, (BCReg
)(uint16_t)k
);
531 ins
= BCINS_AD(BC_KNUM
, reg
, const_num(fs
, e
));
533 } else if (e
->k
== VKCDATA
) {
534 fs
->flags
|= PROTO_FFI
;
535 ins
= BCINS_AD(BC_KCDATA
, reg
,
536 const_gc(fs
, obj2gco(cdataV(&e
->u
.nval
)), LJ_TCDATA
));
538 } else if (e
->k
== VRELOCABLE
) {
539 setbc_a(bcptr(fs
, e
), reg
);
541 } else if (e
->k
== VNONRELOC
) {
542 if (reg
== e
->u
.s
.info
)
544 ins
= BCINS_AD(BC_MOV
, reg
, e
->u
.s
.info
);
545 } else if (e
->k
== VKNIL
) {
546 bcemit_nil(fs
, reg
, 1);
548 } else if (e
->k
<= VKTRUE
) {
549 ins
= BCINS_AD(BC_KPRI
, reg
, const_pri(e
));
551 lj_assertFS(e
->k
== VVOID
|| e
->k
== VJMP
, "bad expr type %d", e
->k
);
560 /* Forward declaration. */
561 static BCPos
bcemit_jmp(FuncState
*fs
);
563 /* Discharge an expression to a specific register. */
564 static void expr_toreg(FuncState
*fs
, ExpDesc
*e
, BCReg reg
)
566 expr_toreg_nobranch(fs
, e
, reg
);
568 jmp_append(fs
, &e
->t
, e
->u
.s
.info
); /* Add it to the true jump list. */
569 if (expr_hasjump(e
)) { /* Discharge expression with branches. */
570 BCPos jend
, jfalse
= NO_JMP
, jtrue
= NO_JMP
;
571 if (jmp_novalue(fs
, e
->t
) || jmp_novalue(fs
, e
->f
)) {
572 BCPos jval
= (e
->k
== VJMP
) ? NO_JMP
: bcemit_jmp(fs
);
573 jfalse
= bcemit_AD(fs
, BC_KPRI
, reg
, VKFALSE
);
574 bcemit_AJ(fs
, BC_JMP
, fs
->freereg
, 1);
575 jtrue
= bcemit_AD(fs
, BC_KPRI
, reg
, VKTRUE
);
576 jmp_tohere(fs
, jval
);
579 fs
->lasttarget
= jend
;
580 jmp_patchval(fs
, e
->f
, jend
, reg
, jfalse
);
581 jmp_patchval(fs
, e
->t
, jend
, reg
, jtrue
);
583 e
->f
= e
->t
= NO_JMP
;
588 /* Discharge an expression to the next free register. */
589 static void expr_tonextreg(FuncState
*fs
, ExpDesc
*e
)
591 expr_discharge(fs
, e
);
593 bcreg_reserve(fs
, 1);
594 expr_toreg(fs
, e
, fs
->freereg
- 1);
597 /* Discharge an expression to any register. */
598 static BCReg
expr_toanyreg(FuncState
*fs
, ExpDesc
*e
)
600 expr_discharge(fs
, e
);
601 if (e
->k
== VNONRELOC
) {
602 if (!expr_hasjump(e
)) return e
->u
.s
.info
; /* Already in a register. */
603 if (e
->u
.s
.info
>= fs
->nactvar
) {
604 expr_toreg(fs
, e
, e
->u
.s
.info
); /* Discharge to temp. register. */
608 expr_tonextreg(fs
, e
); /* Discharge to next register. */
612 /* Partially discharge expression to a value. */
613 static void expr_toval(FuncState
*fs
, ExpDesc
*e
)
616 expr_toanyreg(fs
, e
);
618 expr_discharge(fs
, e
);
621 /* Emit store for LHS expression. */
622 static void bcemit_store(FuncState
*fs
, ExpDesc
*var
, ExpDesc
*e
)
625 if (var
->k
== VLOCAL
) {
626 fs
->ls
->vstack
[var
->u
.s
.aux
].info
|= VSTACK_VAR_RW
;
628 expr_toreg(fs
, e
, var
->u
.s
.info
);
630 } else if (var
->k
== VUPVAL
) {
631 fs
->ls
->vstack
[var
->u
.s
.aux
].info
|= VSTACK_VAR_RW
;
634 ins
= BCINS_AD(BC_USETP
, var
->u
.s
.info
, const_pri(e
));
635 else if (e
->k
== VKSTR
)
636 ins
= BCINS_AD(BC_USETS
, var
->u
.s
.info
, const_str(fs
, e
));
637 else if (e
->k
== VKNUM
)
638 ins
= BCINS_AD(BC_USETN
, var
->u
.s
.info
, const_num(fs
, e
));
640 ins
= BCINS_AD(BC_USETV
, var
->u
.s
.info
, expr_toanyreg(fs
, e
));
641 } else if (var
->k
== VGLOBAL
) {
642 BCReg ra
= expr_toanyreg(fs
, e
);
643 ins
= BCINS_AD(BC_GSET
, ra
, const_str(fs
, var
));
646 lj_assertFS(var
->k
== VINDEXED
, "bad expr type %d", var
->k
);
647 ra
= expr_toanyreg(fs
, e
);
649 if ((int32_t)rc
< 0) {
650 ins
= BCINS_ABC(BC_TSETS
, ra
, var
->u
.s
.info
, ~rc
);
651 } else if (rc
> BCMAX_C
) {
652 ins
= BCINS_ABC(BC_TSETB
, ra
, var
->u
.s
.info
, rc
-(BCMAX_C
+1));
654 #ifdef LUA_USE_ASSERT
655 /* Free late alloced key reg to avoid assert on free of value reg. */
656 /* This can only happen when called from expr_table(). */
657 if (e
->k
== VNONRELOC
&& ra
>= fs
->nactvar
&& rc
>= ra
)
660 ins
= BCINS_ABC(BC_TSETV
, ra
, var
->u
.s
.info
, rc
);
667 /* Emit method lookup expression. */
668 static void bcemit_method(FuncState
*fs
, ExpDesc
*e
, ExpDesc
*key
)
670 BCReg idx
, func
, obj
= expr_toanyreg(fs
, e
);
673 bcemit_AD(fs
, BC_MOV
, func
+1+LJ_FR2
, obj
); /* Copy object to 1st argument. */
674 lj_assertFS(expr_isstrk(key
), "bad usage");
675 idx
= const_str(fs
, key
);
676 if (idx
<= BCMAX_C
) {
677 bcreg_reserve(fs
, 2+LJ_FR2
);
678 bcemit_ABC(fs
, BC_TGETS
, func
, obj
, idx
);
680 bcreg_reserve(fs
, 3+LJ_FR2
);
681 bcemit_AD(fs
, BC_KSTR
, func
+2+LJ_FR2
, idx
);
682 bcemit_ABC(fs
, BC_TGETV
, func
, obj
, func
+2+LJ_FR2
);
689 /* -- Bytecode emitter for branches --------------------------------------- */
691 /* Emit unconditional branch. */
692 static BCPos
bcemit_jmp(FuncState
*fs
)
695 BCPos j
= fs
->pc
- 1;
696 BCIns
*ip
= &fs
->bcbase
[j
].ins
;
698 if ((int32_t)j
>= (int32_t)fs
->lasttarget
&& bc_op(*ip
) == BC_UCLO
) {
700 fs
->lasttarget
= j
+1;
702 j
= bcemit_AJ(fs
, BC_JMP
, fs
->freereg
, NO_JMP
);
704 jmp_append(fs
, &j
, jpc
);
708 /* Invert branch condition of bytecode instruction. */
709 static void invertcond(FuncState
*fs
, ExpDesc
*e
)
711 BCIns
*ip
= &fs
->bcbase
[e
->u
.s
.info
- 1].ins
;
712 setbc_op(ip
, bc_op(*ip
)^1);
715 /* Emit conditional branch. */
716 static BCPos
bcemit_branch(FuncState
*fs
, ExpDesc
*e
, int cond
)
719 if (e
->k
== VRELOCABLE
) {
720 BCIns
*ip
= bcptr(fs
, e
);
721 if (bc_op(*ip
) == BC_NOT
) {
722 *ip
= BCINS_AD(cond
? BC_ISF
: BC_IST
, 0, bc_d(*ip
));
723 return bcemit_jmp(fs
);
726 if (e
->k
!= VNONRELOC
) {
727 bcreg_reserve(fs
, 1);
728 expr_toreg_nobranch(fs
, e
, fs
->freereg
-1);
730 bcemit_AD(fs
, cond
? BC_ISTC
: BC_ISFC
, NO_REG
, e
->u
.s
.info
);
736 /* Emit branch on true condition. */
737 static void bcemit_branch_t(FuncState
*fs
, ExpDesc
*e
)
740 expr_discharge(fs
, e
);
741 if (e
->k
== VKSTR
|| e
->k
== VKNUM
|| e
->k
== VKTRUE
)
742 pc
= NO_JMP
; /* Never jump. */
743 else if (e
->k
== VJMP
)
744 invertcond(fs
, e
), pc
= e
->u
.s
.info
;
745 else if (e
->k
== VKFALSE
|| e
->k
== VKNIL
)
746 expr_toreg_nobranch(fs
, e
, NO_REG
), pc
= bcemit_jmp(fs
);
748 pc
= bcemit_branch(fs
, e
, 0);
749 jmp_append(fs
, &e
->f
, pc
);
750 jmp_tohere(fs
, e
->t
);
754 /* Emit branch on false condition. */
755 static void bcemit_branch_f(FuncState
*fs
, ExpDesc
*e
)
758 expr_discharge(fs
, e
);
759 if (e
->k
== VKNIL
|| e
->k
== VKFALSE
)
760 pc
= NO_JMP
; /* Never jump. */
761 else if (e
->k
== VJMP
)
763 else if (e
->k
== VKSTR
|| e
->k
== VKNUM
|| e
->k
== VKTRUE
)
764 expr_toreg_nobranch(fs
, e
, NO_REG
), pc
= bcemit_jmp(fs
);
766 pc
= bcemit_branch(fs
, e
, 1);
767 jmp_append(fs
, &e
->t
, pc
);
768 jmp_tohere(fs
, e
->f
);
772 /* -- Bytecode emitter for operators -------------------------------------- */
774 /* Try constant-folding of arithmetic operators. */
775 static int foldarith(BinOpr opr
, ExpDesc
*e1
, ExpDesc
*e2
)
779 if (!expr_isnumk_nojump(e1
) || !expr_isnumk_nojump(e2
)) return 0;
780 n
= lj_vm_foldarith(expr_numberV(e1
), expr_numberV(e2
), (int)opr
-OPR_ADD
);
782 if (tvisnan(&o
) || tvismzero(&o
)) return 0; /* Avoid NaN and -0 as consts. */
784 int32_t k
= lj_num2int(n
);
785 if ((lua_Number
)k
== n
) {
786 setintV(&e1
->u
.nval
, k
);
790 setnumV(&e1
->u
.nval
, n
);
794 /* Emit arithmetic operator. */
795 static void bcemit_arith(FuncState
*fs
, BinOpr opr
, ExpDesc
*e1
, ExpDesc
*e2
)
799 if (foldarith(opr
, e1
, e2
))
801 if (opr
== OPR_POW
) {
803 rc
= expr_toanyreg(fs
, e2
);
804 rb
= expr_toanyreg(fs
, e1
);
806 op
= opr
-OPR_ADD
+BC_ADDVV
;
807 /* Must discharge 2nd operand first since VINDEXED might free regs. */
809 if (expr_isnumk(e2
) && (rc
= const_num(fs
, e2
)) <= BCMAX_C
)
810 op
-= BC_ADDVV
-BC_ADDVN
;
812 rc
= expr_toanyreg(fs
, e2
);
813 /* 1st operand discharged by bcemit_binop_left, but need KNUM/KSHORT. */
814 lj_assertFS(expr_isnumk(e1
) || e1
->k
== VNONRELOC
,
815 "bad expr type %d", e1
->k
);
817 /* Avoid two consts to satisfy bytecode constraints. */
818 if (expr_isnumk(e1
) && !expr_isnumk(e2
) &&
819 (t
= const_num(fs
, e1
)) <= BCMAX_B
) {
820 rb
= rc
; rc
= t
; op
-= BC_ADDVV
-BC_ADDNV
;
822 rb
= expr_toanyreg(fs
, e1
);
825 /* Using expr_free might cause asserts if the order is wrong. */
826 if (e1
->k
== VNONRELOC
&& e1
->u
.s
.info
>= fs
->nactvar
) fs
->freereg
--;
827 if (e2
->k
== VNONRELOC
&& e2
->u
.s
.info
>= fs
->nactvar
) fs
->freereg
--;
828 e1
->u
.s
.info
= bcemit_ABC(fs
, op
, 0, rb
, rc
);
832 /* Emit comparison operator. */
833 static void bcemit_comp(FuncState
*fs
, BinOpr opr
, ExpDesc
*e1
, ExpDesc
*e2
)
838 if (opr
== OPR_EQ
|| opr
== OPR_NE
) {
839 BCOp op
= opr
== OPR_EQ
? BC_ISEQV
: BC_ISNEV
;
841 if (expr_isk(e1
)) { e1
= e2
; e2
= eret
; } /* Need constant in 2nd arg. */
842 ra
= expr_toanyreg(fs
, e1
); /* First arg must be in a reg. */
845 case VKNIL
: case VKFALSE
: case VKTRUE
:
846 ins
= BCINS_AD(op
+(BC_ISEQP
-BC_ISEQV
), ra
, const_pri(e2
));
849 ins
= BCINS_AD(op
+(BC_ISEQS
-BC_ISEQV
), ra
, const_str(fs
, e2
));
852 ins
= BCINS_AD(op
+(BC_ISEQN
-BC_ISEQV
), ra
, const_num(fs
, e2
));
855 ins
= BCINS_AD(op
, ra
, expr_toanyreg(fs
, e2
));
859 uint32_t op
= opr
-OPR_LT
+BC_ISLT
;
861 if ((op
-BC_ISLT
) & 1) { /* GT -> LT, GE -> LE */
862 e1
= e2
; e2
= eret
; /* Swap operands. */
863 op
= ((op
-BC_ISLT
)^3)+BC_ISLT
;
865 ra
= expr_toanyreg(fs
, e1
);
866 rd
= expr_toanyreg(fs
, e2
);
868 rd
= expr_toanyreg(fs
, e2
);
869 ra
= expr_toanyreg(fs
, e1
);
871 ins
= BCINS_AD(op
, ra
, rd
);
873 /* Using expr_free might cause asserts if the order is wrong. */
874 if (e1
->k
== VNONRELOC
&& e1
->u
.s
.info
>= fs
->nactvar
) fs
->freereg
--;
875 if (e2
->k
== VNONRELOC
&& e2
->u
.s
.info
>= fs
->nactvar
) fs
->freereg
--;
877 eret
->u
.s
.info
= bcemit_jmp(fs
);
881 /* Fixup left side of binary operator. */
882 static void bcemit_binop_left(FuncState
*fs
, BinOpr op
, ExpDesc
*e
)
885 bcemit_branch_t(fs
, e
);
886 } else if (op
== OPR_OR
) {
887 bcemit_branch_f(fs
, e
);
888 } else if (op
== OPR_CONCAT
) {
889 expr_tonextreg(fs
, e
);
890 } else if (op
== OPR_EQ
|| op
== OPR_NE
) {
891 if (!expr_isk_nojump(e
)) expr_toanyreg(fs
, e
);
893 if (!expr_isnumk_nojump(e
)) expr_toanyreg(fs
, e
);
897 /* Emit binary operator. */
898 static void bcemit_binop(FuncState
*fs
, BinOpr op
, ExpDesc
*e1
, ExpDesc
*e2
)
901 bcemit_arith(fs
, op
, e1
, e2
);
902 } else if (op
== OPR_AND
) {
903 lj_assertFS(e1
->t
== NO_JMP
, "jump list not closed");
904 expr_discharge(fs
, e2
);
905 jmp_append(fs
, &e2
->f
, e1
->f
);
907 } else if (op
== OPR_OR
) {
908 lj_assertFS(e1
->f
== NO_JMP
, "jump list not closed");
909 expr_discharge(fs
, e2
);
910 jmp_append(fs
, &e2
->t
, e1
->t
);
912 } else if (op
== OPR_CONCAT
) {
914 if (e2
->k
== VRELOCABLE
&& bc_op(*bcptr(fs
, e2
)) == BC_CAT
) {
915 lj_assertFS(e1
->u
.s
.info
== bc_b(*bcptr(fs
, e2
))-1,
916 "bad CAT stack layout");
918 setbc_b(bcptr(fs
, e2
), e1
->u
.s
.info
);
919 e1
->u
.s
.info
= e2
->u
.s
.info
;
921 expr_tonextreg(fs
, e2
);
924 e1
->u
.s
.info
= bcemit_ABC(fs
, BC_CAT
, 0, e1
->u
.s
.info
, e2
->u
.s
.info
);
928 lj_assertFS(op
== OPR_NE
|| op
== OPR_EQ
||
929 op
== OPR_LT
|| op
== OPR_GE
|| op
== OPR_LE
|| op
== OPR_GT
,
931 bcemit_comp(fs
, op
, e1
, e2
);
935 /* Emit unary operator. */
936 static void bcemit_unop(FuncState
*fs
, BCOp op
, ExpDesc
*e
)
939 /* Swap true and false lists. */
940 { BCPos temp
= e
->f
; e
->f
= e
->t
; e
->t
= temp
; }
941 jmp_dropval(fs
, e
->f
);
942 jmp_dropval(fs
, e
->t
);
943 expr_discharge(fs
, e
);
944 if (e
->k
== VKNIL
|| e
->k
== VKFALSE
) {
947 } else if (expr_isk(e
) || (LJ_HASFFI
&& e
->k
== VKCDATA
)) {
950 } else if (e
->k
== VJMP
) {
953 } else if (e
->k
== VRELOCABLE
) {
954 bcreg_reserve(fs
, 1);
955 setbc_a(bcptr(fs
, e
), fs
->freereg
-1);
956 e
->u
.s
.info
= fs
->freereg
-1;
959 lj_assertFS(e
->k
== VNONRELOC
, "bad expr type %d", e
->k
);
962 lj_assertFS(op
== BC_UNM
|| op
== BC_LEN
, "bad unop %d", op
);
963 if (op
== BC_UNM
&& !expr_hasjump(e
)) { /* Constant-fold negations. */
965 if (e
->k
== VKCDATA
) { /* Fold in-place since cdata is not interned. */
966 GCcdata
*cd
= cdataV(&e
->u
.nval
);
967 uint64_t *p
= (uint64_t *)cdataptr(cd
);
968 if (cd
->ctypeid
== CTID_COMPLEX_DOUBLE
)
969 p
[1] ^= U64x(80000000,00000000);
975 if (expr_isnumk(e
) && !expr_numiszero(e
)) { /* Avoid folding to -0. */
976 TValue
*o
= expr_numtv(e
);
978 int32_t k
= intV(o
), negk
= (int32_t)(~(uint32_t)k
+1u);
980 setnumV(o
, -(lua_Number
)k
);
985 o
->u64
^= U64x(80000000,00000000);
990 expr_toanyreg(fs
, e
);
993 e
->u
.s
.info
= bcemit_AD(fs
, op
, 0, e
->u
.s
.info
);
997 /* -- Lexer support ------------------------------------------------------- */
999 /* Check and consume optional token. */
1000 static int lex_opt(LexState
*ls
, LexToken tok
)
1002 if (ls
->tok
== tok
) {
1009 /* Check and consume token. */
1010 static void lex_check(LexState
*ls
, LexToken tok
)
1017 /* Check for matching token. */
1018 static void lex_match(LexState
*ls
, LexToken what
, LexToken who
, BCLine line
)
1020 if (!lex_opt(ls
, what
)) {
1021 if (line
== ls
->linenumber
) {
1022 err_token(ls
, what
);
1024 const char *swhat
= lj_lex_token2str(ls
, what
);
1025 const char *swho
= lj_lex_token2str(ls
, who
);
1026 lj_lex_error(ls
, ls
->tok
, LJ_ERR_XMATCH
, swhat
, swho
, line
);
1031 /* Check for string token. */
1032 static GCstr
*lex_str(LexState
*ls
)
1035 if (ls
->tok
!= TK_name
&& (LJ_52
|| ls
->tok
!= TK_goto
))
1036 err_token(ls
, TK_name
);
1037 s
= strV(&ls
->tokval
);
1042 /* -- Variable handling --------------------------------------------------- */
1044 #define var_get(ls, fs, i) ((ls)->vstack[(fs)->varmap[(i)]])
1046 /* Define a new local variable. */
1047 static void var_new(LexState
*ls
, BCReg n
, GCstr
*name
)
1049 FuncState
*fs
= ls
->fs
;
1050 MSize vtop
= ls
->vtop
;
1051 checklimit(fs
, fs
->nactvar
+n
, LJ_MAX_LOCVAR
, "local variables");
1052 if (LJ_UNLIKELY(vtop
>= ls
->sizevstack
)) {
1053 if (ls
->sizevstack
>= LJ_MAX_VSTACK
)
1054 lj_lex_error(ls
, 0, LJ_ERR_XLIMC
, LJ_MAX_VSTACK
);
1055 lj_mem_growvec(ls
->L
, ls
->vstack
, ls
->sizevstack
, LJ_MAX_VSTACK
, VarInfo
);
1057 lj_assertFS((uintptr_t)name
< VARNAME__MAX
||
1058 lj_tab_getstr(fs
->kt
, name
) != NULL
,
1059 "unanchored variable name");
1060 /* NOBARRIER: name is anchored in fs->kt and ls->vstack is not a GCobj. */
1061 setgcref(ls
->vstack
[vtop
].name
, obj2gco(name
));
1062 fs
->varmap
[fs
->nactvar
+n
] = (uint16_t)vtop
;
1066 #define var_new_lit(ls, n, v) \
1067 var_new(ls, (n), lj_parse_keepstr(ls, "" v, sizeof(v)-1))
1069 #define var_new_fixed(ls, n, vn) \
1070 var_new(ls, (n), (GCstr *)(uintptr_t)(vn))
1072 /* Add local variables. */
1073 static void var_add(LexState
*ls
, BCReg nvars
)
1075 FuncState
*fs
= ls
->fs
;
1076 BCReg nactvar
= fs
->nactvar
;
1078 VarInfo
*v
= &var_get(ls
, fs
, nactvar
);
1079 v
->startpc
= fs
->pc
;
1080 v
->slot
= nactvar
++;
1083 fs
->nactvar
= nactvar
;
1086 /* Remove local variables. */
1087 static void var_remove(LexState
*ls
, BCReg tolevel
)
1089 FuncState
*fs
= ls
->fs
;
1090 while (fs
->nactvar
> tolevel
)
1091 var_get(ls
, fs
, --fs
->nactvar
).endpc
= fs
->pc
;
1094 /* Lookup local variable name. */
1095 static BCReg
var_lookup_local(FuncState
*fs
, GCstr
*n
)
1098 for (i
= fs
->nactvar
-1; i
>= 0; i
--) {
1099 if (n
== strref(var_get(fs
->ls
, fs
, i
).name
))
1102 return (BCReg
)-1; /* Not found. */
1105 /* Lookup or add upvalue index. */
1106 static MSize
var_lookup_uv(FuncState
*fs
, MSize vidx
, ExpDesc
*e
)
1108 MSize i
, n
= fs
->nuv
;
1109 for (i
= 0; i
< n
; i
++)
1110 if (fs
->uvmap
[i
] == vidx
)
1111 return i
; /* Already exists. */
1112 /* Otherwise create a new one. */
1113 checklimit(fs
, fs
->nuv
, LJ_MAX_UPVAL
, "upvalues");
1114 lj_assertFS(e
->k
== VLOCAL
|| e
->k
== VUPVAL
, "bad expr type %d", e
->k
);
1115 fs
->uvmap
[n
] = (uint16_t)vidx
;
1116 fs
->uvtmp
[n
] = (uint16_t)(e
->k
== VLOCAL
? vidx
: LJ_MAX_VSTACK
+e
->u
.s
.info
);
1121 /* Forward declaration. */
1122 static void fscope_uvmark(FuncState
*fs
, BCReg level
);
1124 /* Recursively lookup variables in enclosing functions. */
1125 static MSize
var_lookup_(FuncState
*fs
, GCstr
*name
, ExpDesc
*e
, int first
)
1128 BCReg reg
= var_lookup_local(fs
, name
);
1129 if ((int32_t)reg
>= 0) { /* Local in this function? */
1130 expr_init(e
, VLOCAL
, reg
);
1132 fscope_uvmark(fs
, reg
); /* Scope now has an upvalue. */
1133 return (MSize
)(e
->u
.s
.aux
= (uint32_t)fs
->varmap
[reg
]);
1135 MSize vidx
= var_lookup_(fs
->prev
, name
, e
, 0); /* Var in outer func? */
1136 if ((int32_t)vidx
>= 0) { /* Yes, make it an upvalue here. */
1137 e
->u
.s
.info
= (uint8_t)var_lookup_uv(fs
, vidx
, e
);
1142 } else { /* Not found in any function, must be a global. */
1143 expr_init(e
, VGLOBAL
, 0);
1146 return (MSize
)-1; /* Global. */
1149 /* Lookup variable name. */
1150 #define var_lookup(ls, e) \
1151 var_lookup_((ls)->fs, lex_str(ls), (e), 1)
1153 /* -- Goto an label handling ---------------------------------------------- */
1155 /* Add a new goto or label. */
1156 static MSize
gola_new(LexState
*ls
, GCstr
*name
, uint8_t info
, BCPos pc
)
1158 FuncState
*fs
= ls
->fs
;
1159 MSize vtop
= ls
->vtop
;
1160 if (LJ_UNLIKELY(vtop
>= ls
->sizevstack
)) {
1161 if (ls
->sizevstack
>= LJ_MAX_VSTACK
)
1162 lj_lex_error(ls
, 0, LJ_ERR_XLIMC
, LJ_MAX_VSTACK
);
1163 lj_mem_growvec(ls
->L
, ls
->vstack
, ls
->sizevstack
, LJ_MAX_VSTACK
, VarInfo
);
1165 lj_assertFS(name
== NAME_BREAK
|| lj_tab_getstr(fs
->kt
, name
) != NULL
,
1166 "unanchored label name");
1167 /* NOBARRIER: name is anchored in fs->kt and ls->vstack is not a GCobj. */
1168 setgcref(ls
->vstack
[vtop
].name
, obj2gco(name
));
1169 ls
->vstack
[vtop
].startpc
= pc
;
1170 ls
->vstack
[vtop
].slot
= (uint8_t)fs
->nactvar
;
1171 ls
->vstack
[vtop
].info
= info
;
1176 #define gola_isgoto(v) ((v)->info & VSTACK_GOTO)
1177 #define gola_islabel(v) ((v)->info & VSTACK_LABEL)
1178 #define gola_isgotolabel(v) ((v)->info & (VSTACK_GOTO|VSTACK_LABEL))
1180 /* Patch goto to jump to label. */
1181 static void gola_patch(LexState
*ls
, VarInfo
*vg
, VarInfo
*vl
)
1183 FuncState
*fs
= ls
->fs
;
1184 BCPos pc
= vg
->startpc
;
1185 setgcrefnull(vg
->name
); /* Invalidate pending goto. */
1186 setbc_a(&fs
->bcbase
[pc
].ins
, vl
->slot
);
1187 jmp_patch(fs
, pc
, vl
->startpc
);
1190 /* Patch goto to close upvalues. */
1191 static void gola_close(LexState
*ls
, VarInfo
*vg
)
1193 FuncState
*fs
= ls
->fs
;
1194 BCPos pc
= vg
->startpc
;
1195 BCIns
*ip
= &fs
->bcbase
[pc
].ins
;
1196 lj_assertFS(gola_isgoto(vg
), "expected goto");
1197 lj_assertFS(bc_op(*ip
) == BC_JMP
|| bc_op(*ip
) == BC_UCLO
,
1198 "bad bytecode op %d", bc_op(*ip
));
1199 setbc_a(ip
, vg
->slot
);
1200 if (bc_op(*ip
) == BC_JMP
) {
1201 BCPos next
= jmp_next(fs
, pc
);
1202 if (next
!= NO_JMP
) jmp_patch(fs
, next
, pc
); /* Jump to UCLO. */
1203 setbc_op(ip
, BC_UCLO
); /* Turn into UCLO. */
1204 setbc_j(ip
, NO_JMP
);
1208 /* Resolve pending forward gotos for label. */
1209 static void gola_resolve(LexState
*ls
, FuncScope
*bl
, MSize idx
)
1211 VarInfo
*vg
= ls
->vstack
+ bl
->vstart
;
1212 VarInfo
*vl
= ls
->vstack
+ idx
;
1213 for (; vg
< vl
; vg
++)
1214 if (gcrefeq(vg
->name
, vl
->name
) && gola_isgoto(vg
)) {
1215 if (vg
->slot
< vl
->slot
) {
1216 GCstr
*name
= strref(var_get(ls
, ls
->fs
, vg
->slot
).name
);
1217 lj_assertLS((uintptr_t)name
>= VARNAME__MAX
, "expected goto name");
1218 ls
->linenumber
= ls
->fs
->bcbase
[vg
->startpc
].line
;
1219 lj_assertLS(strref(vg
->name
) != NAME_BREAK
, "unexpected break");
1220 lj_lex_error(ls
, 0, LJ_ERR_XGSCOPE
,
1221 strdata(strref(vg
->name
)), strdata(name
));
1223 gola_patch(ls
, vg
, vl
);
1227 /* Fixup remaining gotos and labels for scope. */
1228 static void gola_fixup(LexState
*ls
, FuncScope
*bl
)
1230 VarInfo
*v
= ls
->vstack
+ bl
->vstart
;
1231 VarInfo
*ve
= ls
->vstack
+ ls
->vtop
;
1232 for (; v
< ve
; v
++) {
1233 GCstr
*name
= strref(v
->name
);
1234 if (name
!= NULL
) { /* Only consider remaining valid gotos/labels. */
1235 if (gola_islabel(v
)) {
1237 setgcrefnull(v
->name
); /* Invalidate label that goes out of scope. */
1238 for (vg
= v
+1; vg
< ve
; vg
++) /* Resolve pending backward gotos. */
1239 if (strref(vg
->name
) == name
&& gola_isgoto(vg
)) {
1240 if ((bl
->flags
&FSCOPE_UPVAL
) && vg
->slot
> v
->slot
)
1242 gola_patch(ls
, vg
, v
);
1244 } else if (gola_isgoto(v
)) {
1245 if (bl
->prev
) { /* Propagate goto or break to outer scope. */
1246 bl
->prev
->flags
|= name
== NAME_BREAK
? FSCOPE_BREAK
: FSCOPE_GOLA
;
1247 v
->slot
= bl
->nactvar
;
1248 if ((bl
->flags
& FSCOPE_UPVAL
))
1250 } else { /* No outer scope: undefined goto label or no loop. */
1251 ls
->linenumber
= ls
->fs
->bcbase
[v
->startpc
].line
;
1252 if (name
== NAME_BREAK
)
1253 lj_lex_error(ls
, 0, LJ_ERR_XBREAK
);
1255 lj_lex_error(ls
, 0, LJ_ERR_XLUNDEF
, strdata(name
));
1262 /* Find existing label. */
1263 static VarInfo
*gola_findlabel(LexState
*ls
, GCstr
*name
)
1265 VarInfo
*v
= ls
->vstack
+ ls
->fs
->bl
->vstart
;
1266 VarInfo
*ve
= ls
->vstack
+ ls
->vtop
;
1268 if (strref(v
->name
) == name
&& gola_islabel(v
))
1273 /* -- Scope handling ------------------------------------------------------ */
1275 /* Begin a scope. */
1276 static void fscope_begin(FuncState
*fs
, FuncScope
*bl
, int flags
)
1278 bl
->nactvar
= (uint8_t)fs
->nactvar
;
1280 bl
->vstart
= fs
->ls
->vtop
;
1283 lj_assertFS(fs
->freereg
== fs
->nactvar
, "bad regalloc");
1287 static void fscope_end(FuncState
*fs
)
1289 FuncScope
*bl
= fs
->bl
;
1290 LexState
*ls
= fs
->ls
;
1292 var_remove(ls
, bl
->nactvar
);
1293 fs
->freereg
= fs
->nactvar
;
1294 lj_assertFS(bl
->nactvar
== fs
->nactvar
, "bad regalloc");
1295 if ((bl
->flags
& (FSCOPE_UPVAL
|FSCOPE_NOCLOSE
)) == FSCOPE_UPVAL
)
1296 bcemit_AJ(fs
, BC_UCLO
, bl
->nactvar
, 0);
1297 if ((bl
->flags
& FSCOPE_BREAK
)) {
1298 if ((bl
->flags
& FSCOPE_LOOP
)) {
1299 MSize idx
= gola_new(ls
, NAME_BREAK
, VSTACK_LABEL
, fs
->pc
);
1300 ls
->vtop
= idx
; /* Drop break label immediately. */
1301 gola_resolve(ls
, bl
, idx
);
1302 } else { /* Need the fixup step to propagate the breaks. */
1307 if ((bl
->flags
& FSCOPE_GOLA
)) {
1312 /* Mark scope as having an upvalue. */
1313 static void fscope_uvmark(FuncState
*fs
, BCReg level
)
1316 for (bl
= fs
->bl
; bl
&& bl
->nactvar
> level
; bl
= bl
->prev
)
1319 bl
->flags
|= FSCOPE_UPVAL
;
1322 /* -- Function state management ------------------------------------------- */
1324 /* Fixup bytecode for prototype. */
1325 static void fs_fixup_bc(FuncState
*fs
, GCproto
*pt
, BCIns
*bc
, MSize n
)
1327 BCInsLine
*base
= fs
->bcbase
;
1330 bc
[0] = BCINS_AD((fs
->flags
& PROTO_VARARG
) ? BC_FUNCV
: BC_FUNCF
,
1332 for (i
= 1; i
< n
; i
++)
1333 bc
[i
] = base
[i
].ins
;
1336 /* Fixup upvalues for child prototype, step #2. */
1337 static void fs_fixup_uv2(FuncState
*fs
, GCproto
*pt
)
1339 VarInfo
*vstack
= fs
->ls
->vstack
;
1340 uint16_t *uv
= proto_uv(pt
);
1341 MSize i
, n
= pt
->sizeuv
;
1342 for (i
= 0; i
< n
; i
++) {
1343 VarIndex vidx
= uv
[i
];
1344 if (vidx
>= LJ_MAX_VSTACK
)
1345 uv
[i
] = vidx
- LJ_MAX_VSTACK
;
1346 else if ((vstack
[vidx
].info
& VSTACK_VAR_RW
))
1347 uv
[i
] = vstack
[vidx
].slot
| PROTO_UV_LOCAL
;
1349 uv
[i
] = vstack
[vidx
].slot
| PROTO_UV_LOCAL
| PROTO_UV_IMMUTABLE
;
1353 /* Fixup constants for prototype. */
1354 static void fs_fixup_k(FuncState
*fs
, GCproto
*pt
, void *kptr
)
1360 checklimitgt(fs
, fs
->nkn
, BCMAX_D
+1, "constants");
1361 checklimitgt(fs
, fs
->nkgc
, BCMAX_D
+1, "constants");
1362 setmref(pt
->k
, kptr
);
1363 pt
->sizekn
= fs
->nkn
;
1364 pt
->sizekgc
= fs
->nkgc
;
1366 array
= tvref(kt
->array
);
1367 for (i
= 0; i
< kt
->asize
; i
++)
1368 if (tvhaskslot(&array
[i
])) {
1369 TValue
*tv
= &((TValue
*)kptr
)[tvkslot(&array
[i
])];
1371 setintV(tv
, (int32_t)i
);
1373 setnumV(tv
, (lua_Number
)i
);
1375 node
= noderef(kt
->node
);
1377 for (i
= 0; i
<= hmask
; i
++) {
1379 if (tvhaskslot(&n
->val
)) {
1380 ptrdiff_t kidx
= (ptrdiff_t)tvkslot(&n
->val
);
1381 lj_assertFS(!tvisint(&n
->key
), "unexpected integer key");
1382 if (tvisnum(&n
->key
)) {
1383 TValue
*tv
= &((TValue
*)kptr
)[kidx
];
1385 lua_Number nn
= numV(&n
->key
);
1386 int32_t k
= lj_num2int(nn
);
1387 lj_assertFS(!tvismzero(&n
->key
), "unexpected -0 key");
1388 if ((lua_Number
)k
== nn
)
1396 GCobj
*o
= gcV(&n
->key
);
1397 setgcref(((GCRef
*)kptr
)[~kidx
], o
);
1398 lj_gc_objbarrier(fs
->L
, pt
, o
);
1399 if (tvisproto(&n
->key
))
1400 fs_fixup_uv2(fs
, gco2pt(o
));
1406 /* Fixup upvalues for prototype, step #1. */
1407 static void fs_fixup_uv1(FuncState
*fs
, GCproto
*pt
, uint16_t *uv
)
1409 setmref(pt
->uv
, uv
);
1410 pt
->sizeuv
= fs
->nuv
;
1411 memcpy(uv
, fs
->uvtmp
, fs
->nuv
*sizeof(VarIndex
));
1414 #ifndef LUAJIT_DISABLE_DEBUGINFO
1415 /* Prepare lineinfo for prototype. */
1416 static size_t fs_prep_line(FuncState
*fs
, BCLine numline
)
1418 return (fs
->pc
-1) << (numline
< 256 ? 0 : numline
< 65536 ? 1 : 2);
1421 /* Fixup lineinfo for prototype. */
1422 static void fs_fixup_line(FuncState
*fs
, GCproto
*pt
,
1423 void *lineinfo
, BCLine numline
)
1425 BCInsLine
*base
= fs
->bcbase
+ 1;
1426 BCLine first
= fs
->linedefined
;
1427 MSize i
= 0, n
= fs
->pc
-1;
1428 pt
->firstline
= fs
->linedefined
;
1429 pt
->numline
= numline
;
1430 setmref(pt
->lineinfo
, lineinfo
);
1431 if (LJ_LIKELY(numline
< 256)) {
1432 uint8_t *li
= (uint8_t *)lineinfo
;
1434 BCLine delta
= base
[i
].line
- first
;
1435 lj_assertFS(delta
>= 0 && delta
< 256, "bad line delta");
1436 li
[i
] = (uint8_t)delta
;
1438 } else if (LJ_LIKELY(numline
< 65536)) {
1439 uint16_t *li
= (uint16_t *)lineinfo
;
1441 BCLine delta
= base
[i
].line
- first
;
1442 lj_assertFS(delta
>= 0 && delta
< 65536, "bad line delta");
1443 li
[i
] = (uint16_t)delta
;
1446 uint32_t *li
= (uint32_t *)lineinfo
;
1448 BCLine delta
= base
[i
].line
- first
;
1449 lj_assertFS(delta
>= 0, "bad line delta");
1450 li
[i
] = (uint32_t)delta
;
1455 /* Prepare variable info for prototype. */
1456 static size_t fs_prep_var(LexState
*ls
, FuncState
*fs
, size_t *ofsvar
)
1458 VarInfo
*vs
=ls
->vstack
, *ve
;
1461 lj_buf_reset(&ls
->sb
); /* Copy to temp. string buffer. */
1462 /* Store upvalue names. */
1463 for (i
= 0, n
= fs
->nuv
; i
< n
; i
++) {
1464 GCstr
*s
= strref(vs
[fs
->uvmap
[i
]].name
);
1465 MSize len
= s
->len
+1;
1466 char *p
= lj_buf_more(&ls
->sb
, len
);
1467 p
= lj_buf_wmem(p
, strdata(s
), len
);
1470 *ofsvar
= sbuflen(&ls
->sb
);
1472 /* Store local variable names and compressed ranges. */
1473 for (ve
= vs
+ ls
->vtop
, vs
+= fs
->vbase
; vs
< ve
; vs
++) {
1474 if (!gola_isgotolabel(vs
)) {
1475 GCstr
*s
= strref(vs
->name
);
1478 if ((uintptr_t)s
< VARNAME__MAX
) {
1479 p
= lj_buf_more(&ls
->sb
, 1 + 2*5);
1480 *p
++ = (char)(uintptr_t)s
;
1482 MSize len
= s
->len
+1;
1483 p
= lj_buf_more(&ls
->sb
, len
+ 2*5);
1484 p
= lj_buf_wmem(p
, strdata(s
), len
);
1486 startpc
= vs
->startpc
;
1487 p
= lj_strfmt_wuleb128(p
, startpc
-lastpc
);
1488 p
= lj_strfmt_wuleb128(p
, vs
->endpc
-startpc
);
1493 lj_buf_putb(&ls
->sb
, '\0'); /* Terminator for varinfo. */
1494 return sbuflen(&ls
->sb
);
1497 /* Fixup variable info for prototype. */
1498 static void fs_fixup_var(LexState
*ls
, GCproto
*pt
, uint8_t *p
, size_t ofsvar
)
1500 setmref(pt
->uvinfo
, p
);
1501 setmref(pt
->varinfo
, (char *)p
+ ofsvar
);
1502 memcpy(p
, ls
->sb
.b
, sbuflen(&ls
->sb
)); /* Copy from temp. buffer. */
1506 /* Initialize with empty debug info, if disabled. */
1507 #define fs_prep_line(fs, numline) (UNUSED(numline), 0)
1508 #define fs_fixup_line(fs, pt, li, numline) \
1509 pt->firstline = pt->numline = 0, setmref((pt)->lineinfo, NULL)
1510 #define fs_prep_var(ls, fs, ofsvar) (UNUSED(ofsvar), 0)
1511 #define fs_fixup_var(ls, pt, p, ofsvar) \
1512 setmref((pt)->uvinfo, NULL), setmref((pt)->varinfo, NULL)
1516 /* Check if bytecode op returns. */
1517 static int bcopisret(BCOp op
)
1520 case BC_CALLMT
: case BC_CALLT
:
1521 case BC_RETM
: case BC_RET
: case BC_RET0
: case BC_RET1
:
1528 /* Fixup return instruction for prototype. */
1529 static void fs_fixup_ret(FuncState
*fs
)
1531 BCPos lastpc
= fs
->pc
;
1532 if (lastpc
<= fs
->lasttarget
|| !bcopisret(bc_op(fs
->bcbase
[lastpc
-1].ins
))) {
1533 if ((fs
->bl
->flags
& FSCOPE_UPVAL
))
1534 bcemit_AJ(fs
, BC_UCLO
, 0, 0);
1535 bcemit_AD(fs
, BC_RET0
, 0, 1); /* Need final return. */
1537 fs
->bl
->flags
|= FSCOPE_NOCLOSE
; /* Handled above. */
1539 lj_assertFS(fs
->bl
== NULL
, "bad scope nesting");
1540 /* May need to fixup returns encoded before first function was created. */
1541 if (fs
->flags
& PROTO_FIXUP_RETURN
) {
1543 for (pc
= 1; pc
< lastpc
; pc
++) {
1544 BCIns ins
= fs
->bcbase
[pc
].ins
;
1546 switch (bc_op(ins
)) {
1547 case BC_CALLMT
: case BC_CALLT
:
1548 case BC_RETM
: case BC_RET
: case BC_RET0
: case BC_RET1
:
1549 offset
= bcemit_INS(fs
, ins
); /* Copy original instruction. */
1550 fs
->bcbase
[offset
].line
= fs
->bcbase
[pc
].line
;
1551 offset
= offset
-(pc
+1)+BCBIAS_J
;
1552 if (offset
> BCMAX_D
)
1553 err_syntax(fs
->ls
, LJ_ERR_XFIXUP
);
1554 /* Replace with UCLO plus branch. */
1555 fs
->bcbase
[pc
].ins
= BCINS_AD(BC_UCLO
, 0, offset
);
1558 return; /* We're done. */
1566 /* Finish a FuncState and return the new prototype. */
1567 static GCproto
*fs_finish(LexState
*ls
, BCLine line
)
1569 lua_State
*L
= ls
->L
;
1570 FuncState
*fs
= ls
->fs
;
1571 BCLine numline
= line
- fs
->linedefined
;
1572 size_t sizept
, ofsk
, ofsuv
, ofsli
, ofsdbg
, ofsvar
;
1575 /* Apply final fixups. */
1578 /* Calculate total size of prototype including all colocated arrays. */
1579 sizept
= sizeof(GCproto
) + fs
->pc
*sizeof(BCIns
) + fs
->nkgc
*sizeof(GCRef
);
1580 sizept
= (sizept
+ sizeof(TValue
)-1) & ~(sizeof(TValue
)-1);
1581 ofsk
= sizept
; sizept
+= fs
->nkn
*sizeof(TValue
);
1582 ofsuv
= sizept
; sizept
+= ((fs
->nuv
+1)&~1)*2;
1583 ofsli
= sizept
; sizept
+= fs_prep_line(fs
, numline
);
1584 ofsdbg
= sizept
; sizept
+= fs_prep_var(ls
, fs
, &ofsvar
);
1586 /* Allocate prototype and initialize its fields. */
1587 pt
= (GCproto
*)lj_mem_newgco(L
, (MSize
)sizept
);
1588 pt
->gct
= ~LJ_TPROTO
;
1589 pt
->sizept
= (MSize
)sizept
;
1591 pt
->flags
= (uint8_t)(fs
->flags
& ~(PROTO_HAS_RETURN
|PROTO_FIXUP_RETURN
));
1592 pt
->numparams
= fs
->numparams
;
1593 pt
->framesize
= fs
->framesize
;
1594 setgcref(pt
->chunkname
, obj2gco(ls
->chunkname
));
1596 /* Close potentially uninitialized gap between bc and kgc. */
1597 *(uint32_t *)((char *)pt
+ ofsk
- sizeof(GCRef
)*(fs
->nkgc
+1)) = 0;
1598 fs_fixup_bc(fs
, pt
, (BCIns
*)((char *)pt
+ sizeof(GCproto
)), fs
->pc
);
1599 fs_fixup_k(fs
, pt
, (void *)((char *)pt
+ ofsk
));
1600 fs_fixup_uv1(fs
, pt
, (uint16_t *)((char *)pt
+ ofsuv
));
1601 fs_fixup_line(fs
, pt
, (void *)((char *)pt
+ ofsli
), numline
);
1602 fs_fixup_var(ls
, pt
, (uint8_t *)((char *)pt
+ ofsdbg
), ofsvar
);
1604 lj_vmevent_send(L
, BC
,
1605 setprotoV(L
, L
->top
++, pt
);
1608 L
->top
--; /* Pop table of constants. */
1609 ls
->vtop
= fs
->vbase
; /* Reset variable stack. */
1611 lj_assertL(ls
->fs
!= NULL
|| ls
->tok
== TK_eof
, "bad parser state");
1615 /* Initialize a new FuncState. */
1616 static void fs_init(LexState
*ls
, FuncState
*fs
)
1618 lua_State
*L
= ls
->L
;
1619 fs
->prev
= ls
->fs
; ls
->fs
= fs
; /* Append to list. */
1621 fs
->vbase
= ls
->vtop
;
1633 fs
->framesize
= 1; /* Minimum frame size. */
1634 fs
->kt
= lj_tab_new(L
, 0, 0);
1635 /* Anchor table of constants in stack to avoid being collected. */
1636 settabV(L
, L
->top
, fs
->kt
);
1640 /* -- Expressions --------------------------------------------------------- */
1642 /* Forward declaration. */
1643 static void expr(LexState
*ls
, ExpDesc
*v
);
1645 /* Return string expression. */
1646 static void expr_str(LexState
*ls
, ExpDesc
*e
)
1648 expr_init(e
, VKSTR
, 0);
1649 e
->u
.sval
= lex_str(ls
);
1652 /* Return index expression. */
1653 static void expr_index(FuncState
*fs
, ExpDesc
*t
, ExpDesc
*e
)
1655 /* Already called: expr_toval(fs, e). */
1657 if (expr_isnumk(e
)) {
1659 if (tvisint(expr_numtv(e
))) {
1660 int32_t k
= intV(expr_numtv(e
));
1662 t
->u
.s
.aux
= BCMAX_C
+1+(uint32_t)k
; /* 256..511: const byte key */
1667 lua_Number n
= expr_numberV(e
);
1668 int32_t k
= lj_num2int(n
);
1669 if (checku8(k
) && n
== (lua_Number
)k
) {
1670 t
->u
.s
.aux
= BCMAX_C
+1+(uint32_t)k
; /* 256..511: const byte key */
1674 } else if (expr_isstrk(e
)) {
1675 BCReg idx
= const_str(fs
, e
);
1676 if (idx
<= BCMAX_C
) {
1677 t
->u
.s
.aux
= ~idx
; /* -256..-1: const string key */
1681 t
->u
.s
.aux
= expr_toanyreg(fs
, e
); /* 0..255: register */
1684 /* Parse index expression with named field. */
1685 static void expr_field(LexState
*ls
, ExpDesc
*v
)
1687 FuncState
*fs
= ls
->fs
;
1689 expr_toanyreg(fs
, v
);
1690 lj_lex_next(ls
); /* Skip dot or colon. */
1692 expr_index(fs
, v
, &key
);
1695 /* Parse index expression with brackets. */
1696 static void expr_bracket(LexState
*ls
, ExpDesc
*v
)
1698 lj_lex_next(ls
); /* Skip '['. */
1700 expr_toval(ls
->fs
, v
);
1704 /* Get value of constant expression. */
1705 static void expr_kvalue(FuncState
*fs
, TValue
*v
, ExpDesc
*e
)
1708 if (e
->k
<= VKTRUE
) {
1709 setpriV(v
, ~(uint32_t)e
->k
);
1710 } else if (e
->k
== VKSTR
) {
1711 setgcVraw(v
, obj2gco(e
->u
.sval
), LJ_TSTR
);
1713 lj_assertFS(tvisnumber(expr_numtv(e
)), "bad number constant");
1714 *v
= *expr_numtv(e
);
1718 /* Parse table constructor expression. */
1719 static void expr_table(LexState
*ls
, ExpDesc
*e
)
1721 FuncState
*fs
= ls
->fs
;
1722 BCLine line
= ls
->linenumber
;
1724 int vcall
= 0, needarr
= 0, fixt
= 0;
1725 uint32_t narr
= 1; /* First array index. */
1726 uint32_t nhash
= 0; /* Number of hash entries. */
1727 BCReg freg
= fs
->freereg
;
1728 BCPos pc
= bcemit_AD(fs
, BC_TNEW
, freg
, 0);
1729 expr_init(e
, VNONRELOC
, freg
);
1730 bcreg_reserve(fs
, 1);
1733 while (ls
->tok
!= '}') {
1736 if (ls
->tok
== '[') {
1737 expr_bracket(ls
, &key
); /* Already calls expr_toval. */
1738 if (!expr_isk(&key
)) expr_index(fs
, e
, &key
);
1739 if (expr_isnumk(&key
) && expr_numiszero(&key
)) needarr
= 1; else nhash
++;
1741 } else if ((ls
->tok
== TK_name
|| (!LJ_52
&& ls
->tok
== TK_goto
)) &&
1742 lj_lex_lookahead(ls
) == '=') {
1747 expr_init(&key
, VKNUM
, 0);
1748 setintV(&key
.u
.nval
, (int)narr
);
1750 needarr
= vcall
= 1;
1753 if (expr_isk(&key
) && key
.k
!= VKNIL
&&
1754 (key
.k
== VKSTR
|| expr_isk_nojump(&val
))) {
1756 if (!t
) { /* Create template table on demand. */
1758 t
= lj_tab_new(fs
->L
, needarr
? narr
: 0, hsize2hbits(nhash
));
1759 kidx
= const_gc(fs
, obj2gco(t
), LJ_TTAB
);
1760 fs
->bcbase
[pc
].ins
= BCINS_AD(BC_TDUP
, freg
-1, kidx
);
1763 expr_kvalue(fs
, &k
, &key
);
1764 v
= lj_tab_set(fs
->L
, t
, &k
);
1765 lj_gc_anybarriert(fs
->L
, t
);
1766 if (expr_isk_nojump(&val
)) { /* Add const key/value to template table. */
1767 expr_kvalue(fs
, v
, &val
);
1768 } else { /* Otherwise create dummy string key (avoids lj_tab_newkey). */
1769 settabV(fs
->L
, v
, t
); /* Preserve key with table itself as value. */
1770 fixt
= 1; /* Fix this later, after all resizes. */
1775 if (val
.k
!= VCALL
) { expr_toanyreg(fs
, &val
); vcall
= 0; }
1776 if (expr_isk(&key
)) expr_index(fs
, e
, &key
);
1777 bcemit_store(fs
, e
, &val
);
1780 if (!lex_opt(ls
, ',') && !lex_opt(ls
, ';')) break;
1782 lex_match(ls
, '}', '{', line
);
1784 BCInsLine
*ilp
= &fs
->bcbase
[fs
->pc
-1];
1786 lj_assertFS(bc_a(ilp
->ins
) == freg
&&
1787 bc_op(ilp
->ins
) == (narr
> 256 ? BC_TSETV
: BC_TSETB
),
1788 "bad CALL code generation");
1789 expr_init(&en
, VKNUM
, 0);
1790 en
.u
.nval
.u32
.lo
= narr
-1;
1791 en
.u
.nval
.u32
.hi
= 0x43300000; /* Biased integer to avoid denormals. */
1792 if (narr
> 256) { fs
->pc
--; ilp
--; }
1793 ilp
->ins
= BCINS_AD(BC_TSETM
, freg
, const_num(fs
, &en
));
1794 setbc_b(&ilp
[-1].ins
, 0);
1796 if (pc
== fs
->pc
-1) { /* Make expr relocable if possible. */
1801 e
->k
= VNONRELOC
; /* May have been changed by expr_index. */
1803 if (!t
) { /* Construct TNEW RD: hhhhhaaaaaaaaaaa. */
1804 BCIns
*ip
= &fs
->bcbase
[pc
].ins
;
1805 if (!needarr
) narr
= 0;
1806 else if (narr
< 3) narr
= 3;
1807 else if (narr
> 0x7ff) narr
= 0x7ff;
1808 setbc_d(ip
, narr
|(hsize2hbits(nhash
)<<11));
1810 if (needarr
&& t
->asize
< narr
)
1811 lj_tab_reasize(fs
->L
, t
, narr
-1);
1812 if (fixt
) { /* Fix value for dummy keys in template table. */
1813 Node
*node
= noderef(t
->node
);
1814 uint32_t i
, hmask
= t
->hmask
;
1815 for (i
= 0; i
<= hmask
; i
++) {
1817 if (tvistab(&n
->val
)) {
1818 lj_assertFS(tabV(&n
->val
) == t
, "bad dummy key in template table");
1819 setnilV(&n
->val
); /* Turn value into nil. */
1827 /* Parse function parameters. */
1828 static BCReg
parse_params(LexState
*ls
, int needself
)
1830 FuncState
*fs
= ls
->fs
;
1834 var_new_lit(ls
, nparams
++, "self");
1835 if (ls
->tok
!= ')') {
1837 if (ls
->tok
== TK_name
|| (!LJ_52
&& ls
->tok
== TK_goto
)) {
1838 var_new(ls
, nparams
++, lex_str(ls
));
1839 } else if (ls
->tok
== TK_dots
) {
1841 fs
->flags
|= PROTO_VARARG
;
1844 err_syntax(ls
, LJ_ERR_XPARAM
);
1846 } while (lex_opt(ls
, ','));
1848 var_add(ls
, nparams
);
1849 lj_assertFS(fs
->nactvar
== nparams
, "bad regalloc");
1850 bcreg_reserve(fs
, nparams
);
1855 /* Forward declaration. */
1856 static void parse_chunk(LexState
*ls
);
1858 /* Parse body of a function. */
1859 static void parse_body(LexState
*ls
, ExpDesc
*e
, int needself
, BCLine line
)
1861 FuncState fs
, *pfs
= ls
->fs
;
1864 ptrdiff_t oldbase
= pfs
->bcbase
- ls
->bcstack
;
1866 fscope_begin(&fs
, &bl
, 0);
1867 fs
.linedefined
= line
;
1868 fs
.numparams
= (uint8_t)parse_params(ls
, needself
);
1869 fs
.bcbase
= pfs
->bcbase
+ pfs
->pc
;
1870 fs
.bclim
= pfs
->bclim
- pfs
->pc
;
1871 bcemit_AD(&fs
, BC_FUNCF
, 0, 0); /* Placeholder. */
1873 if (ls
->tok
!= TK_end
) lex_match(ls
, TK_end
, TK_function
, line
);
1874 pt
= fs_finish(ls
, (ls
->lastline
= ls
->linenumber
));
1875 pfs
->bcbase
= ls
->bcstack
+ oldbase
; /* May have been reallocated. */
1876 pfs
->bclim
= (BCPos
)(ls
->sizebcstack
- oldbase
);
1877 /* Store new prototype in the constant array of the parent. */
1878 expr_init(e
, VRELOCABLE
,
1879 bcemit_AD(pfs
, BC_FNEW
, 0, const_gc(pfs
, obj2gco(pt
), LJ_TPROTO
)));
1881 pfs
->flags
|= (fs
.flags
& PROTO_FFI
);
1883 if (!(pfs
->flags
& PROTO_CHILD
)) {
1884 if (pfs
->flags
& PROTO_HAS_RETURN
)
1885 pfs
->flags
|= PROTO_FIXUP_RETURN
;
1886 pfs
->flags
|= PROTO_CHILD
;
1891 /* Parse expression list. Last expression is left open. */
1892 static BCReg
expr_list(LexState
*ls
, ExpDesc
*v
)
1896 while (lex_opt(ls
, ',')) {
1897 expr_tonextreg(ls
->fs
, v
);
1904 /* Parse function argument list. */
1905 static void parse_args(LexState
*ls
, ExpDesc
*e
)
1907 FuncState
*fs
= ls
->fs
;
1911 BCLine line
= ls
->linenumber
;
1912 if (ls
->tok
== '(') {
1914 if (line
!= ls
->lastline
)
1915 err_syntax(ls
, LJ_ERR_XAMBIG
);
1918 if (ls
->tok
== ')') { /* f(). */
1921 expr_list(ls
, &args
);
1922 if (args
.k
== VCALL
) /* f(a, b, g()) or f(a, b, ...). */
1923 setbc_b(bcptr(fs
, &args
), 0); /* Pass on multiple results. */
1925 lex_match(ls
, ')', '(', line
);
1926 } else if (ls
->tok
== '{') {
1927 expr_table(ls
, &args
);
1928 } else if (ls
->tok
== TK_string
) {
1929 expr_init(&args
, VKSTR
, 0);
1930 args
.u
.sval
= strV(&ls
->tokval
);
1933 err_syntax(ls
, LJ_ERR_XFUNARG
);
1934 return; /* Silence compiler. */
1936 lj_assertFS(e
->k
== VNONRELOC
, "bad expr type %d", e
->k
);
1937 base
= e
->u
.s
.info
; /* Base register for call. */
1938 if (args
.k
== VCALL
) {
1939 ins
= BCINS_ABC(BC_CALLM
, base
, 2, args
.u
.s
.aux
- base
- 1 - LJ_FR2
);
1941 if (args
.k
!= VVOID
)
1942 expr_tonextreg(fs
, &args
);
1943 ins
= BCINS_ABC(BC_CALL
, base
, 2, fs
->freereg
- base
- LJ_FR2
);
1945 expr_init(e
, VCALL
, bcemit_INS(fs
, ins
));
1947 fs
->bcbase
[fs
->pc
- 1].line
= line
;
1948 fs
->freereg
= base
+1; /* Leave one result by default. */
1951 /* Parse primary expression. */
1952 static void expr_primary(LexState
*ls
, ExpDesc
*v
)
1954 FuncState
*fs
= ls
->fs
;
1955 /* Parse prefix expression. */
1956 if (ls
->tok
== '(') {
1957 BCLine line
= ls
->linenumber
;
1960 lex_match(ls
, ')', '(', line
);
1961 expr_discharge(ls
->fs
, v
);
1962 } else if (ls
->tok
== TK_name
|| (!LJ_52
&& ls
->tok
== TK_goto
)) {
1965 err_syntax(ls
, LJ_ERR_XSYMBOL
);
1967 for (;;) { /* Parse multiple expression suffixes. */
1968 if (ls
->tok
== '.') {
1970 } else if (ls
->tok
== '[') {
1972 expr_toanyreg(fs
, v
);
1973 expr_bracket(ls
, &key
);
1974 expr_index(fs
, v
, &key
);
1975 } else if (ls
->tok
== ':') {
1979 bcemit_method(fs
, v
, &key
);
1981 } else if (ls
->tok
== '(' || ls
->tok
== TK_string
|| ls
->tok
== '{') {
1982 expr_tonextreg(fs
, v
);
1983 if (LJ_FR2
) bcreg_reserve(fs
, 1);
1991 /* Parse simple expression. */
1992 static void expr_simple(LexState
*ls
, ExpDesc
*v
)
1996 expr_init(v
, (LJ_HASFFI
&& tviscdata(&ls
->tokval
)) ? VKCDATA
: VKNUM
, 0);
1997 copyTV(ls
->L
, &v
->u
.nval
, &ls
->tokval
);
2000 expr_init(v
, VKSTR
, 0);
2001 v
->u
.sval
= strV(&ls
->tokval
);
2004 expr_init(v
, VKNIL
, 0);
2007 expr_init(v
, VKTRUE
, 0);
2010 expr_init(v
, VKFALSE
, 0);
2012 case TK_dots
: { /* Vararg. */
2013 FuncState
*fs
= ls
->fs
;
2015 checkcond(ls
, fs
->flags
& PROTO_VARARG
, LJ_ERR_XDOTS
);
2016 bcreg_reserve(fs
, 1);
2017 base
= fs
->freereg
-1;
2018 expr_init(v
, VCALL
, bcemit_ABC(fs
, BC_VARG
, base
, 2, fs
->numparams
));
2022 case '{': /* Table constructor. */
2027 parse_body(ls
, v
, 0, ls
->linenumber
);
2030 expr_primary(ls
, v
);
2036 /* Manage syntactic levels to avoid blowing up the stack. */
2037 static void synlevel_begin(LexState
*ls
)
2039 if (++ls
->level
>= LJ_MAX_XLEVEL
)
2040 lj_lex_error(ls
, 0, LJ_ERR_XLEVELS
);
2043 #define synlevel_end(ls) ((ls)->level--)
2045 /* Convert token to binary operator. */
2046 static BinOpr
token2binop(LexToken tok
)
2049 case '+': return OPR_ADD
;
2050 case '-': return OPR_SUB
;
2051 case '*': return OPR_MUL
;
2052 case '/': return OPR_DIV
;
2053 case '%': return OPR_MOD
;
2054 case '^': return OPR_POW
;
2055 case TK_concat
: return OPR_CONCAT
;
2056 case TK_ne
: return OPR_NE
;
2057 case TK_eq
: return OPR_EQ
;
2058 case '<': return OPR_LT
;
2059 case TK_le
: return OPR_LE
;
2060 case '>': return OPR_GT
;
2061 case TK_ge
: return OPR_GE
;
2062 case TK_and
: return OPR_AND
;
2063 case TK_or
: return OPR_OR
;
2064 default: return OPR_NOBINOPR
;
2068 /* Priorities for each binary operator. ORDER OPR. */
2069 static const struct {
2070 uint8_t left
; /* Left priority. */
2071 uint8_t right
; /* Right priority. */
2073 {6,6}, {6,6}, {7,7}, {7,7}, {7,7}, /* ADD SUB MUL DIV MOD */
2074 {10,9}, {5,4}, /* POW CONCAT (right associative) */
2075 {3,3}, {3,3}, /* EQ NE */
2076 {3,3}, {3,3}, {3,3}, {3,3}, /* LT GE GT LE */
2077 {2,2}, {1,1} /* AND OR */
2080 #define UNARY_PRIORITY 8 /* Priority for unary operators. */
2082 /* Forward declaration. */
2083 static BinOpr
expr_binop(LexState
*ls
, ExpDesc
*v
, uint32_t limit
);
2085 /* Parse unary expression. */
2086 static void expr_unop(LexState
*ls
, ExpDesc
*v
)
2089 if (ls
->tok
== TK_not
) {
2091 } else if (ls
->tok
== '-') {
2093 } else if (ls
->tok
== '#') {
2100 expr_binop(ls
, v
, UNARY_PRIORITY
);
2101 bcemit_unop(ls
->fs
, op
, v
);
2104 /* Parse binary expressions with priority higher than the limit. */
2105 static BinOpr
expr_binop(LexState
*ls
, ExpDesc
*v
, uint32_t limit
)
2110 op
= token2binop(ls
->tok
);
2111 while (op
!= OPR_NOBINOPR
&& priority
[op
].left
> limit
) {
2115 bcemit_binop_left(ls
->fs
, op
, v
);
2116 /* Parse binary expression with higher priority. */
2117 nextop
= expr_binop(ls
, &v2
, priority
[op
].right
);
2118 bcemit_binop(ls
->fs
, op
, v
, &v2
);
2122 return op
; /* Return unconsumed binary operator (if any). */
2125 /* Parse expression. */
2126 static void expr(LexState
*ls
, ExpDesc
*v
)
2128 expr_binop(ls
, v
, 0); /* Priority 0: parse whole expression. */
2131 /* Assign expression to the next register. */
2132 static void expr_next(LexState
*ls
)
2136 expr_tonextreg(ls
->fs
, &e
);
2139 /* Parse conditional expression. */
2140 static BCPos
expr_cond(LexState
*ls
)
2144 if (v
.k
== VKNIL
) v
.k
= VKFALSE
;
2145 bcemit_branch_t(ls
->fs
, &v
);
2149 /* -- Assignments --------------------------------------------------------- */
2151 /* List of LHS variables. */
2152 typedef struct LHSVarList
{
2153 ExpDesc v
; /* LHS variable. */
2154 struct LHSVarList
*prev
; /* Link to previous LHS variable. */
2157 /* Eliminate write-after-read hazards for local variable assignment. */
2158 static void assign_hazard(LexState
*ls
, LHSVarList
*lh
, const ExpDesc
*v
)
2160 FuncState
*fs
= ls
->fs
;
2161 BCReg reg
= v
->u
.s
.info
; /* Check against this variable. */
2162 BCReg tmp
= fs
->freereg
; /* Rename to this temp. register (if needed). */
2164 for (; lh
; lh
= lh
->prev
) {
2165 if (lh
->v
.k
== VINDEXED
) {
2166 if (lh
->v
.u
.s
.info
== reg
) { /* t[i], t = 1, 2 */
2168 lh
->v
.u
.s
.info
= tmp
;
2170 if (lh
->v
.u
.s
.aux
== reg
) { /* t[i], i = 1, 2 */
2172 lh
->v
.u
.s
.aux
= tmp
;
2177 bcemit_AD(fs
, BC_MOV
, tmp
, reg
); /* Rename conflicting variable. */
2178 bcreg_reserve(fs
, 1);
2182 /* Adjust LHS/RHS of an assignment. */
2183 static void assign_adjust(LexState
*ls
, BCReg nvars
, BCReg nexps
, ExpDesc
*e
)
2185 FuncState
*fs
= ls
->fs
;
2186 int32_t extra
= (int32_t)nvars
- (int32_t)nexps
;
2187 if (e
->k
== VCALL
) {
2188 extra
++; /* Compensate for the VCALL itself. */
2189 if (extra
< 0) extra
= 0;
2190 setbc_b(bcptr(fs
, e
), extra
+1); /* Fixup call results. */
2191 if (extra
> 1) bcreg_reserve(fs
, (BCReg
)extra
-1);
2194 expr_tonextreg(fs
, e
); /* Close last expression. */
2195 if (extra
> 0) { /* Leftover LHS are set to nil. */
2196 BCReg reg
= fs
->freereg
;
2197 bcreg_reserve(fs
, (BCReg
)extra
);
2198 bcemit_nil(fs
, reg
, (BCReg
)extra
);
2202 ls
->fs
->freereg
-= nexps
- nvars
; /* Drop leftover regs. */
2205 /* Recursively parse assignment statement. */
2206 static void parse_assignment(LexState
*ls
, LHSVarList
*lh
, BCReg nvars
)
2209 checkcond(ls
, VLOCAL
<= lh
->v
.k
&& lh
->v
.k
<= VINDEXED
, LJ_ERR_XSYNTAX
);
2210 if (lex_opt(ls
, ',')) { /* Collect LHS list and recurse upwards. */
2213 expr_primary(ls
, &vl
.v
);
2214 if (vl
.v
.k
== VLOCAL
)
2215 assign_hazard(ls
, lh
, &vl
.v
);
2216 checklimit(ls
->fs
, ls
->level
+ nvars
, LJ_MAX_XLEVEL
, "variable names");
2217 parse_assignment(ls
, &vl
, nvars
+1);
2218 } else { /* Parse RHS. */
2221 nexps
= expr_list(ls
, &e
);
2222 if (nexps
== nvars
) {
2224 if (bc_op(*bcptr(ls
->fs
, &e
)) == BC_VARG
) { /* Vararg assignment. */
2227 } else { /* Multiple call results. */
2228 e
.u
.s
.info
= e
.u
.s
.aux
; /* Base of call is not relocatable. */
2232 bcemit_store(ls
->fs
, &lh
->v
, &e
);
2235 assign_adjust(ls
, nvars
, nexps
, &e
);
2237 /* Assign RHS to LHS and recurse downwards. */
2238 expr_init(&e
, VNONRELOC
, ls
->fs
->freereg
-1);
2239 bcemit_store(ls
->fs
, &lh
->v
, &e
);
2242 /* Parse call statement or assignment. */
2243 static void parse_call_assign(LexState
*ls
)
2245 FuncState
*fs
= ls
->fs
;
2247 expr_primary(ls
, &vl
.v
);
2248 if (vl
.v
.k
== VCALL
) { /* Function call statement. */
2249 setbc_b(bcptr(fs
, &vl
.v
), 1); /* No results. */
2250 } else { /* Start of an assignment. */
2252 parse_assignment(ls
, &vl
, 1);
2256 /* Parse 'local' statement. */
2257 static void parse_local(LexState
*ls
)
2259 if (lex_opt(ls
, TK_function
)) { /* Local function declaration. */
2261 FuncState
*fs
= ls
->fs
;
2262 var_new(ls
, 0, lex_str(ls
));
2263 expr_init(&v
, VLOCAL
, fs
->freereg
);
2264 v
.u
.s
.aux
= fs
->varmap
[fs
->freereg
];
2265 bcreg_reserve(fs
, 1);
2267 parse_body(ls
, &b
, 0, ls
->linenumber
);
2268 /* bcemit_store(fs, &v, &b) without setting VSTACK_VAR_RW. */
2270 expr_toreg(fs
, &b
, v
.u
.s
.info
);
2271 /* The upvalue is in scope, but the local is only valid after the store. */
2272 var_get(ls
, fs
, fs
->nactvar
- 1).startpc
= fs
->pc
;
2273 } else { /* Local variable declaration. */
2275 BCReg nexps
, nvars
= 0;
2276 do { /* Collect LHS. */
2277 var_new(ls
, nvars
++, lex_str(ls
));
2278 } while (lex_opt(ls
, ','));
2279 if (lex_opt(ls
, '=')) { /* Optional RHS. */
2280 nexps
= expr_list(ls
, &e
);
2281 } else { /* Or implicitly set to nil. */
2285 assign_adjust(ls
, nvars
, nexps
, &e
);
2290 /* Parse 'function' statement. */
2291 static void parse_func(LexState
*ls
, BCLine line
)
2296 lj_lex_next(ls
); /* Skip 'function'. */
2297 /* Parse function name. */
2299 while (ls
->tok
== '.') /* Multiple dot-separated fields. */
2301 if (ls
->tok
== ':') { /* Optional colon to signify method call. */
2305 parse_body(ls
, &b
, needself
, line
);
2307 bcemit_store(fs
, &v
, &b
);
2308 fs
->bcbase
[fs
->pc
- 1].line
= line
; /* Set line for the store. */
2311 /* -- Control transfer statements ----------------------------------------- */
2313 /* Check for end of block. */
2314 static int parse_isend(LexToken tok
)
2317 case TK_else
: case TK_elseif
: case TK_end
: case TK_until
: case TK_eof
:
2324 /* Parse 'return' statement. */
2325 static void parse_return(LexState
*ls
)
2328 FuncState
*fs
= ls
->fs
;
2329 lj_lex_next(ls
); /* Skip 'return'. */
2330 fs
->flags
|= PROTO_HAS_RETURN
;
2331 if (parse_isend(ls
->tok
) || ls
->tok
== ';') { /* Bare return. */
2332 ins
= BCINS_AD(BC_RET0
, 0, 1);
2333 } else { /* Return with one or more values. */
2334 ExpDesc e
; /* Receives the _last_ expression in the list. */
2335 BCReg nret
= expr_list(ls
, &e
);
2336 if (nret
== 1) { /* Return one result. */
2337 if (e
.k
== VCALL
) { /* Check for tail call. */
2338 BCIns
*ip
= bcptr(fs
, &e
);
2339 /* It doesn't pay off to add BC_VARGT just for 'return ...'. */
2340 if (bc_op(*ip
) == BC_VARG
) goto notailcall
;
2342 ins
= BCINS_AD(bc_op(*ip
)-BC_CALL
+BC_CALLT
, bc_a(*ip
), bc_c(*ip
));
2343 } else { /* Can return the result from any register. */
2344 ins
= BCINS_AD(BC_RET1
, expr_toanyreg(fs
, &e
), 2);
2347 if (e
.k
== VCALL
) { /* Append all results from a call. */
2349 setbc_b(bcptr(fs
, &e
), 0);
2350 ins
= BCINS_AD(BC_RETM
, fs
->nactvar
, e
.u
.s
.aux
- fs
->nactvar
);
2352 expr_tonextreg(fs
, &e
); /* Force contiguous registers. */
2353 ins
= BCINS_AD(BC_RET
, fs
->nactvar
, nret
+1);
2357 if (fs
->flags
& PROTO_CHILD
)
2358 bcemit_AJ(fs
, BC_UCLO
, 0, 0); /* May need to close upvalues first. */
2359 bcemit_INS(fs
, ins
);
2362 /* Parse 'break' statement. */
2363 static void parse_break(LexState
*ls
)
2365 ls
->fs
->bl
->flags
|= FSCOPE_BREAK
;
2366 gola_new(ls
, NAME_BREAK
, VSTACK_GOTO
, bcemit_jmp(ls
->fs
));
2369 /* Parse 'goto' statement. */
2370 static void parse_goto(LexState
*ls
)
2372 FuncState
*fs
= ls
->fs
;
2373 GCstr
*name
= lex_str(ls
);
2374 VarInfo
*vl
= gola_findlabel(ls
, name
);
2375 if (vl
) /* Treat backwards goto within same scope like a loop. */
2376 bcemit_AJ(fs
, BC_LOOP
, vl
->slot
, -1); /* No BC range check. */
2377 fs
->bl
->flags
|= FSCOPE_GOLA
;
2378 gola_new(ls
, name
, VSTACK_GOTO
, bcemit_jmp(fs
));
2382 static void parse_label(LexState
*ls
)
2384 FuncState
*fs
= ls
->fs
;
2387 fs
->lasttarget
= fs
->pc
;
2388 fs
->bl
->flags
|= FSCOPE_GOLA
;
2389 lj_lex_next(ls
); /* Skip '::'. */
2391 if (gola_findlabel(ls
, name
))
2392 lj_lex_error(ls
, 0, LJ_ERR_XLDUP
, strdata(name
));
2393 idx
= gola_new(ls
, name
, VSTACK_LABEL
, fs
->pc
);
2394 lex_check(ls
, TK_label
);
2395 /* Recursively parse trailing statements: labels and ';' (Lua 5.2 only). */
2397 if (ls
->tok
== TK_label
) {
2401 } else if (LJ_52
&& ls
->tok
== ';') {
2407 /* Trailing label is considered to be outside of scope. */
2408 if (parse_isend(ls
->tok
) && ls
->tok
!= TK_until
)
2409 ls
->vstack
[idx
].slot
= fs
->bl
->nactvar
;
2410 gola_resolve(ls
, fs
->bl
, idx
);
2413 /* -- Blocks, loops and conditional statements ---------------------------- */
2415 /* Parse a block. */
2416 static void parse_block(LexState
*ls
)
2418 FuncState
*fs
= ls
->fs
;
2420 fscope_begin(fs
, &bl
, 0);
2425 /* Parse 'while' statement. */
2426 static void parse_while(LexState
*ls
, BCLine line
)
2428 FuncState
*fs
= ls
->fs
;
2429 BCPos start
, loop
, condexit
;
2431 lj_lex_next(ls
); /* Skip 'while'. */
2432 start
= fs
->lasttarget
= fs
->pc
;
2433 condexit
= expr_cond(ls
);
2434 fscope_begin(fs
, &bl
, FSCOPE_LOOP
);
2435 lex_check(ls
, TK_do
);
2436 loop
= bcemit_AD(fs
, BC_LOOP
, fs
->nactvar
, 0);
2438 jmp_patch(fs
, bcemit_jmp(fs
), start
);
2439 lex_match(ls
, TK_end
, TK_while
, line
);
2441 jmp_tohere(fs
, condexit
);
2442 jmp_patchins(fs
, loop
, fs
->pc
);
2445 /* Parse 'repeat' statement. */
2446 static void parse_repeat(LexState
*ls
, BCLine line
)
2448 FuncState
*fs
= ls
->fs
;
2449 BCPos loop
= fs
->lasttarget
= fs
->pc
;
2452 fscope_begin(fs
, &bl1
, FSCOPE_LOOP
); /* Breakable loop scope. */
2453 fscope_begin(fs
, &bl2
, 0); /* Inner scope. */
2454 lj_lex_next(ls
); /* Skip 'repeat'. */
2455 bcemit_AD(fs
, BC_LOOP
, fs
->nactvar
, 0);
2457 lex_match(ls
, TK_until
, TK_repeat
, line
);
2458 condexit
= expr_cond(ls
); /* Parse condition (still inside inner scope). */
2459 if (!(bl2
.flags
& FSCOPE_UPVAL
)) { /* No upvalues? Just end inner scope. */
2461 } else { /* Otherwise generate: cond: UCLO+JMP out, !cond: UCLO+JMP loop. */
2462 parse_break(ls
); /* Break from loop and close upvalues. */
2463 jmp_tohere(fs
, condexit
);
2464 fscope_end(fs
); /* End inner scope and close upvalues. */
2465 condexit
= bcemit_jmp(fs
);
2467 jmp_patch(fs
, condexit
, loop
); /* Jump backwards if !cond. */
2468 jmp_patchins(fs
, loop
, fs
->pc
);
2469 fscope_end(fs
); /* End loop scope. */
2472 /* Parse numeric 'for'. */
2473 static void parse_for_num(LexState
*ls
, GCstr
*varname
, BCLine line
)
2475 FuncState
*fs
= ls
->fs
;
2476 BCReg base
= fs
->freereg
;
2478 BCPos loop
, loopend
;
2479 /* Hidden control variables. */
2480 var_new_fixed(ls
, FORL_IDX
, VARNAME_FOR_IDX
);
2481 var_new_fixed(ls
, FORL_STOP
, VARNAME_FOR_STOP
);
2482 var_new_fixed(ls
, FORL_STEP
, VARNAME_FOR_STEP
);
2483 /* Visible copy of index variable. */
2484 var_new(ls
, FORL_EXT
, varname
);
2489 if (lex_opt(ls
, ',')) {
2492 bcemit_AD(fs
, BC_KSHORT
, fs
->freereg
, 1); /* Default step is 1. */
2493 bcreg_reserve(fs
, 1);
2495 var_add(ls
, 3); /* Hidden control variables. */
2496 lex_check(ls
, TK_do
);
2497 loop
= bcemit_AJ(fs
, BC_FORI
, base
, NO_JMP
);
2498 fscope_begin(fs
, &bl
, 0); /* Scope for visible variables. */
2500 bcreg_reserve(fs
, 1);
2503 /* Perform loop inversion. Loop control instructions are at the end. */
2504 loopend
= bcemit_AJ(fs
, BC_FORL
, base
, NO_JMP
);
2505 fs
->bcbase
[loopend
].line
= line
; /* Fix line for control ins. */
2506 jmp_patchins(fs
, loopend
, loop
+1);
2507 jmp_patchins(fs
, loop
, fs
->pc
);
2510 /* Try to predict whether the iterator is next() and specialize the bytecode.
2511 ** Detecting next() and pairs() by name is simplistic, but quite effective.
2512 ** The interpreter backs off if the check for the closure fails at runtime.
2514 static int predict_next(LexState
*ls
, FuncState
*fs
, BCPos pc
)
2519 if (pc
>= fs
->bclim
) return 0;
2520 ins
= fs
->bcbase
[pc
].ins
;
2521 switch (bc_op(ins
)) {
2523 if (bc_d(ins
) >= fs
->nactvar
) return 0;
2524 name
= gco2str(gcref(var_get(ls
, fs
, bc_d(ins
)).name
));
2527 name
= gco2str(gcref(ls
->vstack
[fs
->uvmap
[bc_d(ins
)]].name
));
2530 /* There's no inverse index (yet), so lookup the strings. */
2531 o
= lj_tab_getstr(fs
->kt
, lj_str_newlit(ls
->L
, "pairs"));
2532 if (o
&& tvhaskslot(o
) && tvkslot(o
) == bc_d(ins
))
2534 o
= lj_tab_getstr(fs
->kt
, lj_str_newlit(ls
->L
, "next"));
2535 if (o
&& tvhaskslot(o
) && tvkslot(o
) == bc_d(ins
))
2541 return (name
->len
== 5 && !strcmp(strdata(name
), "pairs")) ||
2542 (name
->len
== 4 && !strcmp(strdata(name
), "next"));
2545 /* Parse 'for' iterator. */
2546 static void parse_for_iter(LexState
*ls
, GCstr
*indexname
)
2548 FuncState
*fs
= ls
->fs
;
2552 BCReg base
= fs
->freereg
+ 3;
2553 BCPos loop
, loopend
, exprpc
= fs
->pc
;
2556 /* Hidden control variables. */
2557 var_new_fixed(ls
, nvars
++, VARNAME_FOR_GEN
);
2558 var_new_fixed(ls
, nvars
++, VARNAME_FOR_STATE
);
2559 var_new_fixed(ls
, nvars
++, VARNAME_FOR_CTL
);
2560 /* Visible variables returned from iterator. */
2561 var_new(ls
, nvars
++, indexname
);
2562 while (lex_opt(ls
, ','))
2563 var_new(ls
, nvars
++, lex_str(ls
));
2564 lex_check(ls
, TK_in
);
2565 line
= ls
->linenumber
;
2566 assign_adjust(ls
, 3, expr_list(ls
, &e
), &e
);
2567 /* The iterator needs another 3 [4] slots (func [pc] | state ctl). */
2568 bcreg_bump(fs
, 3+LJ_FR2
);
2569 isnext
= (nvars
<= 5 && predict_next(ls
, fs
, exprpc
));
2570 var_add(ls
, 3); /* Hidden control variables. */
2571 lex_check(ls
, TK_do
);
2572 loop
= bcemit_AJ(fs
, isnext
? BC_ISNEXT
: BC_JMP
, base
, NO_JMP
);
2573 fscope_begin(fs
, &bl
, 0); /* Scope for visible variables. */
2574 var_add(ls
, nvars
-3);
2575 bcreg_reserve(fs
, nvars
-3);
2578 /* Perform loop inversion. Loop control instructions are at the end. */
2579 jmp_patchins(fs
, loop
, fs
->pc
);
2580 bcemit_ABC(fs
, isnext
? BC_ITERN
: BC_ITERC
, base
, nvars
-3+1, 2+1);
2581 loopend
= bcemit_AJ(fs
, BC_ITERL
, base
, NO_JMP
);
2582 fs
->bcbase
[loopend
-1].line
= line
; /* Fix line for control ins. */
2583 fs
->bcbase
[loopend
].line
= line
;
2584 jmp_patchins(fs
, loopend
, loop
+1);
2587 /* Parse 'for' statement. */
2588 static void parse_for(LexState
*ls
, BCLine line
)
2590 FuncState
*fs
= ls
->fs
;
2593 fscope_begin(fs
, &bl
, FSCOPE_LOOP
);
2594 lj_lex_next(ls
); /* Skip 'for'. */
2595 varname
= lex_str(ls
); /* Get first variable name. */
2597 parse_for_num(ls
, varname
, line
);
2598 else if (ls
->tok
== ',' || ls
->tok
== TK_in
)
2599 parse_for_iter(ls
, varname
);
2601 err_syntax(ls
, LJ_ERR_XFOR
);
2602 lex_match(ls
, TK_end
, TK_for
, line
);
2603 fscope_end(fs
); /* Resolve break list. */
2606 /* Parse condition and 'then' block. */
2607 static BCPos
parse_then(LexState
*ls
)
2610 lj_lex_next(ls
); /* Skip 'if' or 'elseif'. */
2611 condexit
= expr_cond(ls
);
2612 lex_check(ls
, TK_then
);
2617 /* Parse 'if' statement. */
2618 static void parse_if(LexState
*ls
, BCLine line
)
2620 FuncState
*fs
= ls
->fs
;
2622 BCPos escapelist
= NO_JMP
;
2623 flist
= parse_then(ls
);
2624 while (ls
->tok
== TK_elseif
) { /* Parse multiple 'elseif' blocks. */
2625 jmp_append(fs
, &escapelist
, bcemit_jmp(fs
));
2626 jmp_tohere(fs
, flist
);
2627 flist
= parse_then(ls
);
2629 if (ls
->tok
== TK_else
) { /* Parse optional 'else' block. */
2630 jmp_append(fs
, &escapelist
, bcemit_jmp(fs
));
2631 jmp_tohere(fs
, flist
);
2632 lj_lex_next(ls
); /* Skip 'else'. */
2635 jmp_append(fs
, &escapelist
, flist
);
2637 jmp_tohere(fs
, escapelist
);
2638 lex_match(ls
, TK_end
, TK_if
, line
);
2641 /* -- Parse statements ---------------------------------------------------- */
2643 /* Parse a statement. Returns 1 if it must be the last one in a chunk. */
2644 static int parse_stmt(LexState
*ls
)
2646 BCLine line
= ls
->linenumber
;
2652 parse_while(ls
, line
);
2657 lex_match(ls
, TK_end
, TK_do
, line
);
2660 parse_for(ls
, line
);
2663 parse_repeat(ls
, line
);
2666 parse_func(ls
, line
);
2674 return 1; /* Must be last. */
2678 return !LJ_52
; /* Must be last in Lua 5.1. */
2688 if (LJ_52
|| lj_lex_lookahead(ls
) == TK_name
) {
2695 parse_call_assign(ls
);
2701 /* A chunk is a list of statements optionally separated by semicolons. */
2702 static void parse_chunk(LexState
*ls
)
2706 while (!islast
&& !parse_isend(ls
->tok
)) {
2707 islast
= parse_stmt(ls
);
2709 lj_assertLS(ls
->fs
->framesize
>= ls
->fs
->freereg
&&
2710 ls
->fs
->freereg
>= ls
->fs
->nactvar
,
2712 ls
->fs
->freereg
= ls
->fs
->nactvar
; /* Free registers after each stmt. */
2717 /* Entry point of bytecode parser. */
2718 GCproto
*lj_parse(LexState
*ls
)
2723 lua_State
*L
= ls
->L
;
2724 #ifdef LUAJIT_DISABLE_DEBUGINFO
2725 ls
->chunkname
= lj_str_newlit(L
, "=");
2727 ls
->chunkname
= lj_str_newz(L
, ls
->chunkarg
);
2729 setstrV(L
, L
->top
, ls
->chunkname
); /* Anchor chunkname string. */
2737 fs
.flags
|= PROTO_VARARG
; /* Main chunk is always a vararg func. */
2738 fscope_begin(&fs
, &bl
, 0);
2739 bcemit_AD(&fs
, BC_FUNCV
, 0, 0); /* Placeholder. */
2740 lj_lex_next(ls
); /* Read-ahead first token. */
2742 if (ls
->tok
!= TK_eof
)
2743 err_token(ls
, TK_eof
);
2744 pt
= fs_finish(ls
, ls
->linenumber
);
2745 L
->top
--; /* Drop chunkname. */
2746 lj_assertL(fs
.prev
== NULL
&& ls
->fs
== NULL
, "mismatched frame nesting");
2747 lj_assertL(pt
->sizeuv
== 0, "toplevel proto has upvalues");