2 ** FFI C callback handling.
3 ** Copyright (C) 2005-2015 Mike Pall. See Copyright Notice in luajit.h
18 #include "lj_ccallback.h"
19 #include "lj_target.h"
24 /* -- Target-specific handling of callback slots -------------------------- */
26 #define CALLBACK_MCODE_SIZE (LJ_PAGESIZE * LJ_NUM_CBPAGE)
30 /* Callbacks disabled. */
31 #define CALLBACK_SLOT2OFS(slot) (0*(slot))
32 #define CALLBACK_OFS2SLOT(ofs) (0*(ofs))
33 #define CALLBACK_MAX_SLOT 0
35 #elif LJ_TARGET_X86ORX64
37 #define CALLBACK_MCODE_HEAD (LJ_64 ? 8 : 0)
38 #define CALLBACK_MCODE_GROUP (-2+1+2+(LJ_GC64 ? 10 : 5)+(LJ_64 ? 6 : 5))
40 #define CALLBACK_SLOT2OFS(slot) \
41 (CALLBACK_MCODE_HEAD + CALLBACK_MCODE_GROUP*((slot)/32) + 4*(slot))
43 static MSize
CALLBACK_OFS2SLOT(MSize ofs
)
46 ofs
-= CALLBACK_MCODE_HEAD
;
47 group
= ofs
/ (32*4 + CALLBACK_MCODE_GROUP
);
48 return (ofs
% (32*4 + CALLBACK_MCODE_GROUP
))/4 + group
*32;
51 #define CALLBACK_MAX_SLOT \
52 (((CALLBACK_MCODE_SIZE-CALLBACK_MCODE_HEAD)/(CALLBACK_MCODE_GROUP+4*32))*32)
56 #define CALLBACK_MCODE_HEAD 32
60 #define CALLBACK_MCODE_HEAD 32
64 #define CALLBACK_MCODE_HEAD 24
68 #define CALLBACK_MCODE_HEAD 24
72 /* Missing support for this architecture. */
73 #define CALLBACK_SLOT2OFS(slot) (0*(slot))
74 #define CALLBACK_OFS2SLOT(ofs) (0*(ofs))
75 #define CALLBACK_MAX_SLOT 0
79 #ifndef CALLBACK_SLOT2OFS
80 #define CALLBACK_SLOT2OFS(slot) (CALLBACK_MCODE_HEAD + 8*(slot))
81 #define CALLBACK_OFS2SLOT(ofs) (((ofs)-CALLBACK_MCODE_HEAD)/8)
82 #define CALLBACK_MAX_SLOT (CALLBACK_OFS2SLOT(CALLBACK_MCODE_SIZE))
85 /* Convert callback slot number to callback function pointer. */
86 static void *callback_slot2ptr(CTState
*cts
, MSize slot
)
88 return (uint8_t *)cts
->cb
.mcode
+ CALLBACK_SLOT2OFS(slot
);
91 /* Convert callback function pointer to slot number. */
92 MSize
lj_ccallback_ptr2slot(CTState
*cts
, void *p
)
94 uintptr_t ofs
= (uintptr_t)((uint8_t *)p
-(uint8_t *)cts
->cb
.mcode
);
95 if (ofs
< CALLBACK_MCODE_SIZE
) {
96 MSize slot
= CALLBACK_OFS2SLOT((MSize
)ofs
);
97 if (CALLBACK_SLOT2OFS(slot
) == (MSize
)ofs
)
100 return ~0u; /* Not a known callback function pointer. */
103 /* Initialize machine code for callback function pointers. */
105 /* Disabled callback support. */
106 #define callback_mcode_init(g, p) UNUSED(p)
107 #elif LJ_TARGET_X86ORX64
108 static void callback_mcode_init(global_State
*g
, uint8_t *page
)
111 uint8_t *target
= (uint8_t *)(void *)lj_vm_ffi_callback
;
114 *(void **)p
= target
; p
+= 8;
116 for (slot
= 0; slot
< CALLBACK_MAX_SLOT
; slot
++) {
117 /* mov al, slot; jmp group */
118 *p
++ = XI_MOVrib
| RID_EAX
; *p
++ = (uint8_t)slot
;
119 if ((slot
& 31) == 31 || slot
== CALLBACK_MAX_SLOT
-1) {
120 /* push ebp/rbp; mov ah, slot>>8; mov ebp, &g. */
121 *p
++ = XI_PUSH
+ RID_EBP
;
122 *p
++ = XI_MOVrib
| (RID_EAX
+4); *p
++ = (uint8_t)(slot
>> 8);
124 *p
++ = 0x48; *p
++ = XI_MOVri
| RID_EBP
;
125 *(uint64_t *)p
= (uint64_t)(g
); p
+= 8;
127 *p
++ = XI_MOVri
| RID_EBP
;
128 *(int32_t *)p
= i32ptr(g
); p
+= 4;
131 /* jmp [rip-pageofs] where lj_vm_ffi_callback is stored. */
132 *p
++ = XI_GROUP5
; *p
++ = XM_OFS0
+ (XOg_JMP
<<3) + RID_EBP
;
133 *(int32_t *)p
= (int32_t)(page
-(p
+4)); p
+= 4;
135 /* jmp lj_vm_ffi_callback. */
136 *p
++ = XI_JMP
; *(int32_t *)p
= target
-(p
+4); p
+= 4;
139 *p
++ = XI_JMPs
; *p
++ = (uint8_t)((2+2)*(31-(slot
&31)) - 2);
142 lua_assert(p
- page
<= CALLBACK_MCODE_SIZE
);
145 static void callback_mcode_init(global_State
*g
, uint32_t *page
)
148 void *target
= (void *)lj_vm_ffi_callback
;
150 /* This must match with the saveregs macro in buildvm_arm.dasc. */
151 *p
++ = ARMI_SUB
|ARMF_D(RID_R12
)|ARMF_N(RID_R12
)|ARMF_M(RID_PC
);
152 *p
++ = ARMI_PUSH
|ARMF_N(RID_SP
)|RSET_RANGE(RID_R4
,RID_R11
+1)|RID2RSET(RID_LR
);
153 *p
++ = ARMI_SUB
|ARMI_K12
|ARMF_D(RID_R12
)|ARMF_N(RID_R12
)|CALLBACK_MCODE_HEAD
;
154 *p
++ = ARMI_STR
|ARMI_LS_P
|ARMI_LS_W
|ARMF_D(RID_R12
)|ARMF_N(RID_SP
)|(CFRAME_SIZE
-4*9);
155 *p
++ = ARMI_LDR
|ARMI_LS_P
|ARMI_LS_U
|ARMF_D(RID_R12
)|ARMF_N(RID_PC
);
156 *p
++ = ARMI_LDR
|ARMI_LS_P
|ARMI_LS_U
|ARMF_D(RID_PC
)|ARMF_N(RID_PC
);
158 *p
++ = u32ptr(target
);
159 for (slot
= 0; slot
< CALLBACK_MAX_SLOT
; slot
++) {
160 *p
++ = ARMI_MOV
|ARMF_D(RID_R12
)|ARMF_M(RID_PC
);
161 *p
= ARMI_B
| ((page
-p
-2) & 0x00ffffffu
);
164 lua_assert(p
- page
<= CALLBACK_MCODE_SIZE
);
166 #elif LJ_TARGET_ARM64
167 static void callback_mcode_init(global_State
*g
, uint32_t *page
)
170 void *target
= (void *)lj_vm_ffi_callback
;
172 *p
++ = A64I_LDRLx
| A64F_D(RID_X11
) | A64F_S19(4);
173 *p
++ = A64I_LDRLx
| A64F_D(RID_X10
) | A64F_S19(5);
174 *p
++ = A64I_BR
| A64F_N(RID_X11
);
176 ((void **)p
)[0] = target
;
179 for (slot
= 0; slot
< CALLBACK_MAX_SLOT
; slot
++) {
180 *p
++ = A64I_MOVZw
| A64F_D(RID_X9
) | A64F_U16(slot
);
181 *p
= A64I_B
| A64F_S26((page
-p
) & 0x03ffffffu
);
184 lua_assert(p
- page
<= CALLBACK_MCODE_SIZE
);
187 static void callback_mcode_init(global_State
*g
, uint32_t *page
)
190 void *target
= (void *)lj_vm_ffi_callback
;
192 *p
++ = PPCI_LIS
| PPCF_T(RID_TMP
) | (u32ptr(target
) >> 16);
193 *p
++ = PPCI_LIS
| PPCF_T(RID_R12
) | (u32ptr(g
) >> 16);
194 *p
++ = PPCI_ORI
| PPCF_A(RID_TMP
)|PPCF_T(RID_TMP
) | (u32ptr(target
) & 0xffff);
195 *p
++ = PPCI_ORI
| PPCF_A(RID_R12
)|PPCF_T(RID_R12
) | (u32ptr(g
) & 0xffff);
196 *p
++ = PPCI_MTCTR
| PPCF_T(RID_TMP
);
198 for (slot
= 0; slot
< CALLBACK_MAX_SLOT
; slot
++) {
199 *p
++ = PPCI_LI
| PPCF_T(RID_R11
) | slot
;
200 *p
= PPCI_B
| (((page
-p
) & 0x00ffffffu
) << 2);
203 lua_assert(p
- page
<= CALLBACK_MCODE_SIZE
);
206 static void callback_mcode_init(global_State
*g
, uint32_t *page
)
209 void *target
= (void *)lj_vm_ffi_callback
;
211 *p
++ = MIPSI_SW
| MIPSF_T(RID_R1
)|MIPSF_S(RID_SP
) | 0;
212 *p
++ = MIPSI_LUI
| MIPSF_T(RID_R3
) | (u32ptr(target
) >> 16);
213 *p
++ = MIPSI_LUI
| MIPSF_T(RID_R2
) | (u32ptr(g
) >> 16);
214 *p
++ = MIPSI_ORI
| MIPSF_T(RID_R3
)|MIPSF_S(RID_R3
) |(u32ptr(target
)&0xffff);
215 *p
++ = MIPSI_JR
| MIPSF_S(RID_R3
);
216 *p
++ = MIPSI_ORI
| MIPSF_T(RID_R2
)|MIPSF_S(RID_R2
) | (u32ptr(g
)&0xffff);
217 for (slot
= 0; slot
< CALLBACK_MAX_SLOT
; slot
++) {
218 *p
= MIPSI_B
| ((page
-p
-1) & 0x0000ffffu
);
220 *p
++ = MIPSI_LI
| MIPSF_T(RID_R1
) | slot
;
222 lua_assert(p
- page
<= CALLBACK_MCODE_SIZE
);
225 /* Missing support for this architecture. */
226 #define callback_mcode_init(g, p) UNUSED(p)
229 /* -- Machine code management --------------------------------------------- */
231 #if LJ_TARGET_WINDOWS
233 #define WIN32_LEAN_AND_MEAN
236 #elif LJ_TARGET_POSIX
238 #include <sys/mman.h>
239 #ifndef MAP_ANONYMOUS
240 #define MAP_ANONYMOUS MAP_ANON
245 /* Allocate and initialize area for callback function pointers. */
246 static void callback_mcode_new(CTState
*cts
)
248 size_t sz
= (size_t)CALLBACK_MCODE_SIZE
;
250 if (CALLBACK_MAX_SLOT
== 0)
251 lj_err_caller(cts
->L
, LJ_ERR_FFI_CBACKOV
);
252 #if LJ_TARGET_WINDOWS
253 p
= VirtualAlloc(NULL
, sz
, MEM_RESERVE
|MEM_COMMIT
, PAGE_READWRITE
);
255 lj_err_caller(cts
->L
, LJ_ERR_FFI_CBACKOV
);
256 #elif LJ_TARGET_POSIX
257 p
= mmap(NULL
, sz
, (PROT_READ
|PROT_WRITE
), MAP_PRIVATE
|MAP_ANONYMOUS
,
260 lj_err_caller(cts
->L
, LJ_ERR_FFI_CBACKOV
);
262 /* Fallback allocator. Fails if memory is not executable by default. */
263 p
= lj_mem_new(cts
->L
, sz
);
266 callback_mcode_init(cts
->g
, p
);
267 lj_mcode_sync(p
, (char *)p
+ sz
);
268 #if LJ_TARGET_WINDOWS
271 VirtualProtect(p
, sz
, PAGE_EXECUTE_READ
, &oprot
);
273 #elif LJ_TARGET_POSIX
274 mprotect(p
, sz
, (PROT_READ
|PROT_EXEC
));
278 /* Free area for callback function pointers. */
279 void lj_ccallback_mcode_free(CTState
*cts
)
281 size_t sz
= (size_t)CALLBACK_MCODE_SIZE
;
282 void *p
= cts
->cb
.mcode
;
283 if (p
== NULL
) return;
284 #if LJ_TARGET_WINDOWS
285 VirtualFree(p
, 0, MEM_RELEASE
);
287 #elif LJ_TARGET_POSIX
290 lj_mem_free(cts
->g
, p
, sz
);
294 /* -- C callback entry ---------------------------------------------------- */
296 /* Target-specific handling of register arguments. Similar to lj_ccall.c. */
299 #define CALLBACK_HANDLE_REGARG \
300 if (!isfp) { /* Only non-FP values may be passed in registers. */ \
301 if (n > 1) { /* Anything > 32 bit is passed on the stack. */ \
302 if (!LJ_ABI_WIN) ngpr = maxgpr; /* Prevent reordering. */ \
303 } else if (ngpr + 1 <= maxgpr) { \
304 sp = &cts->cb.gpr[ngpr]; \
310 #elif LJ_TARGET_X64 && LJ_ABI_WIN
312 /* Windows/x64 argument registers are strictly positional (use ngpr). */
313 #define CALLBACK_HANDLE_REGARG \
315 if (ngpr < maxgpr) { sp = &cts->cb.fpr[ngpr++]; UNUSED(nfpr); goto done; } \
317 if (ngpr < maxgpr) { sp = &cts->cb.gpr[ngpr++]; goto done; } \
322 #define CALLBACK_HANDLE_REGARG \
324 if (nfpr + n <= CCALL_NARG_FPR) { \
325 sp = &cts->cb.fpr[nfpr]; \
330 if (ngpr + n <= maxgpr) { \
331 sp = &cts->cb.gpr[ngpr]; \
341 #define CALLBACK_HANDLE_REGARG_FP1 UNUSED(isfp);
342 #define CALLBACK_HANDLE_REGARG_FP2
346 #define CALLBACK_HANDLE_REGARG_FP1 \
350 sp = &cts->cb.fpr[fprodd-1]; \
353 } else if (nfpr + 1 <= CCALL_NARG_FPR) { \
354 sp = &cts->cb.fpr[nfpr++]; \
359 if (nfpr + 1 <= CCALL_NARG_FPR) { \
360 sp = &cts->cb.fpr[nfpr++]; \
364 fprodd = 0; /* No reordering after the first FP value is on stack. */ \
367 #define CALLBACK_HANDLE_REGARG_FP2 }
371 #define CALLBACK_HANDLE_REGARG \
372 CALLBACK_HANDLE_REGARG_FP1 \
373 if (n > 1) ngpr = (ngpr + 1u) & ~1u; /* Align to regpair. */ \
374 if (ngpr + n <= maxgpr) { \
375 sp = &cts->cb.gpr[ngpr]; \
378 } CALLBACK_HANDLE_REGARG_FP2
380 #elif LJ_TARGET_ARM64
382 #define CALLBACK_HANDLE_REGARG \
384 if (nfpr + n <= CCALL_NARG_FPR) { \
385 sp = &cts->cb.fpr[nfpr]; \
389 nfpr = CCALL_NARG_FPR; /* Prevent reordering. */ \
392 if (!LJ_TARGET_IOS && n > 1) \
393 ngpr = (ngpr + 1u) & ~1u; /* Align to regpair. */ \
394 if (ngpr + n <= maxgpr) { \
395 sp = &cts->cb.gpr[ngpr]; \
399 ngpr = CCALL_NARG_GPR; /* Prevent reordering. */ \
405 #define CALLBACK_HANDLE_REGARG \
407 if (nfpr + 1 <= CCALL_NARG_FPR) { \
408 sp = &cts->cb.fpr[nfpr++]; \
409 cta = ctype_get(cts, CTID_DOUBLE); /* FPRs always hold doubles. */ \
412 } else { /* Try to pass argument in GPRs. */ \
414 lua_assert(ctype_isinteger(cta->info) && n == 2); /* int64_t. */ \
415 ngpr = (ngpr + 1u) & ~1u; /* Align int64_t to regpair. */ \
417 if (ngpr + n <= maxgpr) { \
418 sp = &cts->cb.gpr[ngpr]; \
424 #define CALLBACK_HANDLE_RET \
425 if (ctype_isfp(ctr->info) && ctr->size == sizeof(float)) \
426 *(double *)dp = *(float *)dp; /* FPRs always hold doubles. */
430 #define CALLBACK_HANDLE_REGARG \
431 if (isfp && nfpr < CCALL_NARG_FPR) { /* Try to pass argument in FPRs. */ \
432 sp = (void *)((uint8_t *)&cts->cb.fpr[nfpr] + ((LJ_BE && n==1) ? 4 : 0)); \
435 } else { /* Try to pass argument in GPRs. */ \
436 nfpr = CCALL_NARG_FPR; \
437 if (n > 1) ngpr = (ngpr + 1u) & ~1u; /* Align to regpair. */ \
438 if (ngpr + n <= maxgpr) { \
439 sp = &cts->cb.gpr[ngpr]; \
445 #define CALLBACK_HANDLE_RET \
446 if (ctype_isfp(ctr->info) && ctr->size == sizeof(float)) \
447 ((float *)dp)[1] = *(float *)dp;
450 #error "Missing calling convention definitions for this architecture"
453 /* Convert and push callback arguments to Lua stack. */
454 static void callback_conv_args(CTState
*cts
, lua_State
*L
)
457 intptr_t *stack
= cts
->cb
.stack
;
458 MSize slot
= cts
->cb
.slot
;
459 CTypeID id
= 0, rid
, fid
;
464 MSize ngpr
= 0, nsp
= 0, maxgpr
= CCALL_NARG_GPR
;
472 if (slot
< cts
->cb
.sizeid
&& (id
= cts
->cb
.cbid
[slot
]) != 0) {
473 ct
= ctype_get(cts
, id
);
474 rid
= ctype_cid(ct
->info
); /* Return type. x86: +(spadj<<16). */
475 fn
= funcV(lj_tab_getint(cts
->miscmap
, (int32_t)slot
));
477 } else { /* Must set up frame first, before throwing the error. */
483 /* Continuation returns from callback. */
485 (o
++)->u64
= LJ_CONT_FFI_CALLBACK
;
489 o
->u32
.lo
= LJ_CONT_FFI_CALLBACK
;
493 setframe_gc(o
, obj2gco(fn
), fntp
);
494 setframe_ftsz(o
, ((char *)(o
+1) - (char *)L
->base
) + FRAME_CONT
);
495 L
->top
= L
->base
= ++o
;
497 lj_err_caller(cts
->L
, LJ_ERR_FFI_BADCBACK
);
499 setcframe_pc(L
->cframe
, proto_bc(funcproto(fn
))+1);
500 lj_state_checkstack(L
, LUA_MINSTACK
); /* May throw. */
501 o
= L
->base
; /* Might have been reallocated. */
504 /* x86 has several different calling conventions. */
505 switch (ctype_cconv(ct
->info
)) {
506 case CTCC_FASTCALL
: maxgpr
= 2; break;
507 case CTCC_THISCALL
: maxgpr
= 1; break;
508 default: maxgpr
= 0; break;
514 CType
*ctf
= ctype_get(cts
, fid
);
515 if (!ctype_isattrib(ctf
->info
)) {
521 lua_assert(ctype_isfield(ctf
->info
));
522 cta
= ctype_rawchild(cts
, ctf
);
523 isfp
= ctype_isfp(cta
->info
);
524 sz
= (cta
->size
+ CTSIZE_PTR
-1) & ~(CTSIZE_PTR
-1);
525 n
= sz
/ CTSIZE_PTR
; /* Number of GPRs or stack slots needed. */
527 CALLBACK_HANDLE_REGARG
/* Handle register arguments. */
529 /* Otherwise pass argument on stack. */
530 if (CCALL_ALIGN_STACKARG
&& LJ_32
&& sz
== 8)
531 nsp
= (nsp
+ 1) & ~1u; /* Align 64 bit argument on stack. */
536 if (LJ_BE
&& cta
->size
< CTSIZE_PTR
)
537 sp
= (void *)((uint8_t *)sp
+ CTSIZE_PTR
-cta
->size
);
538 gcsteps
+= lj_cconv_tv_ct(cts
, cta
, 0, o
++, sp
);
544 /* Store stack adjustment for returns from non-cdecl callbacks. */
545 if (ctype_cconv(ct
->info
) != CTCC_CDECL
) {
547 (L
->base
-3)->u64
|= (nsp
<< (16+2));
549 (L
->base
-2)->u32
.hi
|= (nsp
<< (16+2));
553 while (gcsteps
-- > 0)
557 /* Convert Lua object to callback result. */
558 static void callback_conv_result(CTState
*cts
, lua_State
*L
, TValue
*o
)
561 CType
*ctr
= ctype_raw(cts
, (uint16_t)(L
->base
-3)->u64
);
563 CType
*ctr
= ctype_raw(cts
, (uint16_t)(L
->base
-2)->u32
.hi
);
568 if (!ctype_isvoid(ctr
->info
)) {
569 uint8_t *dp
= (uint8_t *)&cts
->cb
.gpr
[0];
571 if (ctype_isfp(ctr
->info
))
572 dp
= (uint8_t *)&cts
->cb
.fpr
[0];
574 lj_cconv_ct_tv(cts
, ctr
, dp
, o
, 0);
575 #ifdef CALLBACK_HANDLE_RET
578 /* Extend returned integers to (at least) 32 bits. */
579 if (ctype_isinteger_or_bool(ctr
->info
) && ctr
->size
< 4) {
580 if (ctr
->info
& CTF_UNSIGNED
)
581 *(uint32_t *)dp
= ctr
->size
== 1 ? (uint32_t)*(uint8_t *)dp
:
582 (uint32_t)*(uint16_t *)dp
;
584 *(int32_t *)dp
= ctr
->size
== 1 ? (int32_t)*(int8_t *)dp
:
585 (int32_t)*(int16_t *)dp
;
588 if (ctype_isfp(ctr
->info
))
589 cts
->cb
.gpr
[2] = ctr
->size
== sizeof(float) ? 1 : 2;
594 /* Enter callback. */
595 lua_State
* LJ_FASTCALL
lj_ccallback_enter(CTState
*cts
, void *cf
)
597 lua_State
*L
= cts
->L
;
598 global_State
*g
= cts
->g
;
599 lua_assert(L
!= NULL
);
600 if (tvref(g
->jit_base
)) {
601 setstrV(L
, L
->top
++, lj_err_str(L
, LJ_ERR_FFI_BADCBACK
));
602 if (g
->panic
) g
->panic(L
);
605 lj_trace_abort(g
); /* Never record across callback. */
607 cframe_prev(cf
) = L
->cframe
;
609 cframe_errfunc(cf
) = -1;
612 callback_conv_args(cts
, L
);
613 return L
; /* Now call the function on this stack. */
616 /* Leave callback. */
617 void LJ_FASTCALL
lj_ccallback_leave(CTState
*cts
, TValue
*o
)
619 lua_State
*L
= cts
->L
;
621 TValue
*obase
= L
->base
;
622 L
->base
= L
->top
; /* Keep continuation frame for throwing errors. */
624 /* PC of RET* is lost. Point to last line for result conv. errors. */
627 GCproto
*pt
= funcproto(fn
);
628 setcframe_pc(L
->cframe
, proto_bc(pt
)+pt
->sizebc
+1);
631 callback_conv_result(cts
, L
, o
);
632 /* Finally drop C frame and continuation frame. */
633 L
->top
-= 2+2*LJ_FR2
;
635 L
->cframe
= cframe_prev(L
->cframe
);
636 cts
->cb
.slot
= 0; /* Blacklist C function that called the callback. */
639 /* -- C callback management ----------------------------------------------- */
641 /* Get an unused slot in the callback slot table. */
642 static MSize
callback_slot_new(CTState
*cts
, CType
*ct
)
644 CTypeID id
= ctype_typeid(cts
, ct
);
645 CTypeID1
*cbid
= cts
->cb
.cbid
;
647 for (top
= cts
->cb
.topid
; top
< cts
->cb
.sizeid
; top
++)
648 if (LJ_LIKELY(cbid
[top
] == 0))
650 #if CALLBACK_MAX_SLOT
651 if (top
>= CALLBACK_MAX_SLOT
)
653 lj_err_caller(cts
->L
, LJ_ERR_FFI_CBACKOV
);
655 callback_mcode_new(cts
);
656 lj_mem_growvec(cts
->L
, cbid
, cts
->cb
.sizeid
, CALLBACK_MAX_SLOT
, CTypeID1
);
658 memset(cbid
+top
, 0, (cts
->cb
.sizeid
-top
)*sizeof(CTypeID1
));
661 cts
->cb
.topid
= top
+1;
665 /* Check for function pointer and supported argument/result types. */
666 static CType
*callback_checkfunc(CTState
*cts
, CType
*ct
)
669 if (!ctype_isptr(ct
->info
) || (LJ_64
&& ct
->size
!= CTSIZE_PTR
))
671 ct
= ctype_rawchild(cts
, ct
);
672 if (ctype_isfunc(ct
->info
)) {
673 CType
*ctr
= ctype_rawchild(cts
, ct
);
674 CTypeID fid
= ct
->sib
;
675 if (!(ctype_isvoid(ctr
->info
) || ctype_isenum(ctr
->info
) ||
676 ctype_isptr(ctr
->info
) || (ctype_isnum(ctr
->info
) && ctr
->size
<= 8)))
678 if ((ct
->info
& CTF_VARARG
))
681 CType
*ctf
= ctype_get(cts
, fid
);
682 if (!ctype_isattrib(ctf
->info
)) {
684 lua_assert(ctype_isfield(ctf
->info
));
685 cta
= ctype_rawchild(cts
, ctf
);
686 if (!(ctype_isenum(cta
->info
) || ctype_isptr(cta
->info
) ||
687 (ctype_isnum(cta
->info
) && cta
->size
<= 8)) ||
688 ++narg
>= LUA_MINSTACK
-3)
698 /* Create a new callback and return the callback function pointer. */
699 void *lj_ccallback_new(CTState
*cts
, CType
*ct
, GCfunc
*fn
)
701 ct
= callback_checkfunc(cts
, ct
);
703 MSize slot
= callback_slot_new(cts
, ct
);
704 GCtab
*t
= cts
->miscmap
;
705 setfuncV(cts
->L
, lj_tab_setint(cts
->L
, t
, (int32_t)slot
), fn
);
706 lj_gc_anybarriert(cts
->L
, t
);
707 return callback_slot2ptr(cts
, slot
);
709 return NULL
; /* Bad conversion. */