DynASM: Lua 5.2 compatibility fixes.
[luajit-2.0.git] / src / lj_ccallback.c
blob8e89bc6c752b13527c4075d14ade7993f60364d8
1 /*
2 ** FFI C callback handling.
3 ** Copyright (C) 2005-2012 Mike Pall. See Copyright Notice in luajit.h
4 */
6 #include "lj_obj.h"
8 #if LJ_HASFFI
10 #include "lj_gc.h"
11 #include "lj_err.h"
12 #include "lj_tab.h"
13 #include "lj_state.h"
14 #include "lj_frame.h"
15 #include "lj_ctype.h"
16 #include "lj_cconv.h"
17 #include "lj_ccall.h"
18 #include "lj_ccallback.h"
19 #include "lj_target.h"
20 #include "lj_mcode.h"
21 #include "lj_vm.h"
23 /* -- Target-specific handling of callback slots -------------------------- */
25 #define CALLBACK_MCODE_SIZE (LJ_PAGESIZE * LJ_NUM_CBPAGE)
27 #if LJ_TARGET_X86ORX64
29 #define CALLBACK_MCODE_HEAD (LJ_64 ? 8 : 0)
30 #define CALLBACK_MCODE_GROUP (-2+1+2+5+(LJ_64 ? 6 : 5))
32 #define CALLBACK_SLOT2OFS(slot) \
33 (CALLBACK_MCODE_HEAD + CALLBACK_MCODE_GROUP*((slot)/32) + 4*(slot))
35 static MSize CALLBACK_OFS2SLOT(MSize ofs)
37 MSize group;
38 ofs -= CALLBACK_MCODE_HEAD;
39 group = ofs / (32*4 + CALLBACK_MCODE_GROUP);
40 return (ofs % (32*4 + CALLBACK_MCODE_GROUP))/4 + group*32;
43 #define CALLBACK_MAX_SLOT \
44 (((CALLBACK_MCODE_SIZE-CALLBACK_MCODE_HEAD)/(CALLBACK_MCODE_GROUP+4*32))*32)
46 #elif LJ_TARGET_ARM
48 #define CALLBACK_MCODE_HEAD 32
49 #define CALLBACK_SLOT2OFS(slot) (CALLBACK_MCODE_HEAD + 8*(slot))
50 #define CALLBACK_OFS2SLOT(ofs) (((ofs)-CALLBACK_MCODE_HEAD)/8)
51 #define CALLBACK_MAX_SLOT (CALLBACK_OFS2SLOT(CALLBACK_MCODE_SIZE))
53 #elif LJ_TARGET_PPC
55 #define CALLBACK_MCODE_HEAD 24
56 #define CALLBACK_SLOT2OFS(slot) (CALLBACK_MCODE_HEAD + 8*(slot))
57 #define CALLBACK_OFS2SLOT(ofs) (((ofs)-CALLBACK_MCODE_HEAD)/8)
58 #define CALLBACK_MAX_SLOT (CALLBACK_OFS2SLOT(CALLBACK_MCODE_SIZE))
60 #elif LJ_TARGET_MIPS
62 #define CALLBACK_MCODE_HEAD 24
63 #define CALLBACK_SLOT2OFS(slot) (CALLBACK_MCODE_HEAD + 8*(slot))
64 #define CALLBACK_OFS2SLOT(ofs) (((ofs)-CALLBACK_MCODE_HEAD)/8)
65 #define CALLBACK_MAX_SLOT (CALLBACK_OFS2SLOT(CALLBACK_MCODE_SIZE))
67 #else
69 /* Missing support for this architecture. */
70 #define CALLBACK_SLOT2OFS(slot) (0*(slot))
71 #define CALLBACK_OFS2SLOT(ofs) (0*(ofs))
72 #define CALLBACK_MAX_SLOT 0
74 #endif
76 /* Convert callback slot number to callback function pointer. */
77 static void *callback_slot2ptr(CTState *cts, MSize slot)
79 return (uint8_t *)cts->cb.mcode + CALLBACK_SLOT2OFS(slot);
82 /* Convert callback function pointer to slot number. */
83 MSize lj_ccallback_ptr2slot(CTState *cts, void *p)
85 uintptr_t ofs = (uintptr_t)((uint8_t *)p -(uint8_t *)cts->cb.mcode);
86 if (ofs < CALLBACK_MCODE_SIZE) {
87 MSize slot = CALLBACK_OFS2SLOT((MSize)ofs);
88 if (CALLBACK_SLOT2OFS(slot) == (MSize)ofs)
89 return slot;
91 return ~0u; /* Not a known callback function pointer. */
94 /* Initialize machine code for callback function pointers. */
95 #if LJ_TARGET_X86ORX64
96 static void callback_mcode_init(global_State *g, uint8_t *page)
98 uint8_t *p = page;
99 uint8_t *target = (uint8_t *)(void *)lj_vm_ffi_callback;
100 MSize slot;
101 #if LJ_64
102 *(void **)p = target; p += 8;
103 #endif
104 for (slot = 0; slot < CALLBACK_MAX_SLOT; slot++) {
105 /* mov al, slot; jmp group */
106 *p++ = XI_MOVrib | RID_EAX; *p++ = (uint8_t)slot;
107 if ((slot & 31) == 31 || slot == CALLBACK_MAX_SLOT-1) {
108 /* push ebp/rbp; mov ah, slot>>8; mov ebp, &g. */
109 *p++ = XI_PUSH + RID_EBP;
110 *p++ = XI_MOVrib | (RID_EAX+4); *p++ = (uint8_t)(slot >> 8);
111 *p++ = XI_MOVri | RID_EBP;
112 *(int32_t *)p = i32ptr(g); p += 4;
113 #if LJ_64
114 /* jmp [rip-pageofs] where lj_vm_ffi_callback is stored. */
115 *p++ = XI_GROUP5; *p++ = XM_OFS0 + (XOg_JMP<<3) + RID_EBP;
116 *(int32_t *)p = (int32_t)(page-(p+4)); p += 4;
117 #else
118 /* jmp lj_vm_ffi_callback. */
119 *p++ = XI_JMP; *(int32_t *)p = target-(p+4); p += 4;
120 #endif
121 } else {
122 *p++ = XI_JMPs; *p++ = (uint8_t)((2+2)*(31-(slot&31)) - 2);
125 lua_assert(p - page <= CALLBACK_MCODE_SIZE);
127 #elif LJ_TARGET_ARM
128 static void callback_mcode_init(global_State *g, uint32_t *page)
130 uint32_t *p = page;
131 void *target = (void *)lj_vm_ffi_callback;
132 MSize slot;
133 /* This must match with the saveregs macro in buildvm_arm.dasc. */
134 *p++ = ARMI_SUB|ARMF_D(RID_R12)|ARMF_N(RID_R12)|ARMF_M(RID_PC);
135 *p++ = ARMI_PUSH|ARMF_N(RID_SP)|RSET_RANGE(RID_R4,RID_R11+1)|RID2RSET(RID_LR);
136 *p++ = ARMI_SUB|ARMI_K12|ARMF_D(RID_R12)|ARMF_N(RID_R12)|CALLBACK_MCODE_HEAD;
137 *p++ = ARMI_STR|ARMI_LS_P|ARMI_LS_W|ARMF_D(RID_R12)|ARMF_N(RID_SP)|(CFRAME_SIZE-4*9);
138 *p++ = ARMI_LDR|ARMI_LS_P|ARMI_LS_U|ARMF_D(RID_R12)|ARMF_N(RID_PC);
139 *p++ = ARMI_LDR|ARMI_LS_P|ARMI_LS_U|ARMF_D(RID_PC)|ARMF_N(RID_PC);
140 *p++ = u32ptr(g);
141 *p++ = u32ptr(target);
142 for (slot = 0; slot < CALLBACK_MAX_SLOT; slot++) {
143 *p++ = ARMI_MOV|ARMF_D(RID_R12)|ARMF_M(RID_PC);
144 *p = ARMI_B | ((page-p-2) & 0x00ffffffu);
145 p++;
147 lua_assert(p - page <= CALLBACK_MCODE_SIZE);
149 #elif LJ_TARGET_PPC
150 static void callback_mcode_init(global_State *g, uint32_t *page)
152 uint32_t *p = page;
153 void *target = (void *)lj_vm_ffi_callback;
154 MSize slot;
155 *p++ = PPCI_LIS | PPCF_T(RID_TMP) | (u32ptr(target) >> 16);
156 *p++ = PPCI_LIS | PPCF_T(RID_R12) | (u32ptr(g) >> 16);
157 *p++ = PPCI_ORI | PPCF_A(RID_TMP)|PPCF_T(RID_TMP) | (u32ptr(target) & 0xffff);
158 *p++ = PPCI_ORI | PPCF_A(RID_R12)|PPCF_T(RID_R12) | (u32ptr(g) & 0xffff);
159 *p++ = PPCI_MTCTR | PPCF_T(RID_TMP);
160 *p++ = PPCI_BCTR;
161 for (slot = 0; slot < CALLBACK_MAX_SLOT; slot++) {
162 *p++ = PPCI_LI | PPCF_T(RID_R11) | slot;
163 *p = PPCI_B | (((page-p) & 0x00ffffffu) << 2);
164 p++;
166 lua_assert(p - page <= CALLBACK_MCODE_SIZE);
168 #elif LJ_TARGET_MIPS
169 static void callback_mcode_init(global_State *g, uint32_t *page)
171 uint32_t *p = page;
172 void *target = (void *)lj_vm_ffi_callback;
173 MSize slot;
174 *p++ = MIPSI_SW | MIPSF_T(RID_R1)|MIPSF_S(RID_SP) | 0;
175 *p++ = MIPSI_LUI | MIPSF_T(RID_R3) | (u32ptr(target) >> 16);
176 *p++ = MIPSI_LUI | MIPSF_T(RID_R2) | (u32ptr(g) >> 16);
177 *p++ = MIPSI_ORI | MIPSF_T(RID_R3)|MIPSF_S(RID_R3) |(u32ptr(target)&0xffff);
178 *p++ = MIPSI_JR | MIPSF_S(RID_R3);
179 *p++ = MIPSI_ORI | MIPSF_T(RID_R2)|MIPSF_S(RID_R2) | (u32ptr(g)&0xffff);
180 for (slot = 0; slot < CALLBACK_MAX_SLOT; slot++) {
181 *p = MIPSI_B | ((page-p-1) & 0x0000ffffu);
182 p++;
183 *p++ = MIPSI_LI | MIPSF_T(RID_R1) | slot;
185 lua_assert(p - page <= CALLBACK_MCODE_SIZE);
187 #else
188 /* Missing support for this architecture. */
189 #define callback_mcode_init(g, p) UNUSED(p)
190 #endif
192 /* -- Machine code management --------------------------------------------- */
194 #if LJ_TARGET_WINDOWS
196 #define WIN32_LEAN_AND_MEAN
197 #include <windows.h>
199 #elif LJ_TARGET_POSIX
201 #include <sys/mman.h>
202 #ifndef MAP_ANONYMOUS
203 #define MAP_ANONYMOUS MAP_ANON
204 #endif
206 #endif
208 /* Allocate and initialize area for callback function pointers. */
209 static void callback_mcode_new(CTState *cts)
211 size_t sz = (size_t)CALLBACK_MCODE_SIZE;
212 void *p;
213 if (CALLBACK_MAX_SLOT == 0)
214 lj_err_caller(cts->L, LJ_ERR_FFI_CBACKOV);
215 #if LJ_TARGET_WINDOWS
216 p = VirtualAlloc(NULL, sz, MEM_RESERVE|MEM_COMMIT, PAGE_READWRITE);
217 if (!p)
218 lj_err_caller(cts->L, LJ_ERR_FFI_CBACKOV);
219 #elif LJ_TARGET_POSIX
220 p = mmap(NULL, sz, (PROT_READ|PROT_WRITE), MAP_PRIVATE|MAP_ANONYMOUS,
221 -1, 0);
222 if (p == MAP_FAILED)
223 lj_err_caller(cts->L, LJ_ERR_FFI_CBACKOV);
224 #else
225 /* Fallback allocator. Fails if memory is not executable by default. */
226 p = lj_mem_new(cts->L, sz);
227 #endif
228 cts->cb.mcode = p;
229 callback_mcode_init(cts->g, p);
230 lj_mcode_sync(p, (char *)p + sz);
231 #if LJ_TARGET_WINDOWS
233 DWORD oprot;
234 VirtualProtect(p, sz, PAGE_EXECUTE_READ, &oprot);
236 #elif LJ_TARGET_POSIX
237 mprotect(p, sz, (PROT_READ|PROT_EXEC));
238 #endif
241 /* Free area for callback function pointers. */
242 void lj_ccallback_mcode_free(CTState *cts)
244 size_t sz = (size_t)CALLBACK_MCODE_SIZE;
245 void *p = cts->cb.mcode;
246 if (p == NULL) return;
247 #if LJ_TARGET_WINDOWS
248 VirtualFree(p, 0, MEM_RELEASE);
249 UNUSED(sz);
250 #elif LJ_TARGET_POSIX
251 munmap(p, sz);
252 #else
253 lj_mem_free(cts->g, p, sz);
254 #endif
257 /* -- C callback entry ---------------------------------------------------- */
259 /* Target-specific handling of register arguments. Similar to lj_ccall.c. */
260 #if LJ_TARGET_X86
262 #define CALLBACK_HANDLE_REGARG \
263 if (!isfp) { /* Only non-FP values may be passed in registers. */ \
264 if (n > 1) { /* Anything > 32 bit is passed on the stack. */ \
265 if (!LJ_ABI_WIN) ngpr = maxgpr; /* Prevent reordering. */ \
266 } else if (ngpr + 1 <= maxgpr) { \
267 sp = &cts->cb.gpr[ngpr]; \
268 ngpr += n; \
269 goto done; \
273 #elif LJ_TARGET_X64 && LJ_ABI_WIN
275 /* Windows/x64 argument registers are strictly positional (use ngpr). */
276 #define CALLBACK_HANDLE_REGARG \
277 if (isfp) { \
278 if (ngpr < 4) { sp = &cts->cb.fpr[ngpr++]; nfpr = ngpr; goto done; } \
279 } else { \
280 if (ngpr < 4) { sp = &cts->cb.gpr[ngpr++]; goto done; } \
283 #elif LJ_TARGET_X64
285 #define CALLBACK_HANDLE_REGARG \
286 if (isfp) { \
287 if (nfpr + n <= CCALL_NARG_FPR) { \
288 sp = &cts->cb.fpr[nfpr]; \
289 nfpr += n; \
290 goto done; \
292 } else { \
293 if (ngpr + n <= maxgpr) { \
294 sp = &cts->cb.gpr[ngpr]; \
295 ngpr += n; \
296 goto done; \
300 #elif LJ_TARGET_ARM
302 #define CALLBACK_HANDLE_REGARG \
303 UNUSED(isfp); \
304 if (n > 1) ngpr = (ngpr + 1u) & ~1u; /* Align to regpair. */ \
305 if (ngpr + n <= maxgpr) { \
306 sp = &cts->cb.gpr[ngpr]; \
307 ngpr += n; \
308 goto done; \
311 #elif LJ_TARGET_PPC
313 #define CALLBACK_HANDLE_REGARG \
314 if (isfp) { \
315 if (nfpr + 1 <= CCALL_NARG_FPR) { \
316 sp = &cts->cb.fpr[nfpr]; \
317 nfpr += 1; \
318 cta = ctype_get(cts, CTID_DOUBLE); /* FPRs always hold doubles. */ \
319 goto done; \
321 } else { /* Try to pass argument in GPRs. */ \
322 if (n > 1) { \
323 lua_assert(ctype_isinteger(cta->info) && n == 2); /* int64_t. */ \
324 ngpr = (ngpr + 1u) & ~1u; /* Align int64_t to regpair. */ \
326 if (ngpr + n <= maxgpr) { \
327 sp = &cts->cb.gpr[ngpr]; \
328 ngpr += n; \
329 goto done; \
333 #define CALLBACK_HANDLE_RET \
334 if (ctype_isfp(ctr->info) && ctr->size == sizeof(float)) \
335 *(double *)dp = *(float *)dp; /* FPRs always hold doubles. */
337 #elif LJ_TARGET_MIPS
339 #define CALLBACK_HANDLE_REGARG \
340 if (isfp && nfpr < CCALL_NARG_FPR) { /* Try to pass argument in FPRs. */ \
341 sp = (void *)((uint8_t *)&cts->cb.fpr[nfpr] + ((LJ_BE && n==1) ? 4 : 0)); \
342 nfpr++; ngpr += n; \
343 goto done; \
344 } else { /* Try to pass argument in GPRs. */ \
345 nfpr = CCALL_NARG_FPR; \
346 if (n > 1) ngpr = (ngpr + 1u) & ~1u; /* Align to regpair. */ \
347 if (ngpr + n <= maxgpr) { \
348 sp = &cts->cb.gpr[ngpr]; \
349 ngpr += n; \
350 goto done; \
354 #define CALLBACK_HANDLE_RET \
355 if (ctype_isfp(ctr->info) && ctr->size == sizeof(float)) \
356 ((float *)dp)[1] = *(float *)dp;
358 #else
359 #error "Missing calling convention definitions for this architecture"
360 #endif
362 /* Convert and push callback arguments to Lua stack. */
363 static void callback_conv_args(CTState *cts, lua_State *L)
365 TValue *o = L->top;
366 intptr_t *stack = cts->cb.stack;
367 MSize slot = cts->cb.slot;
368 CTypeID id = 0, rid, fid;
369 CType *ct;
370 GCfunc *fn;
371 MSize ngpr = 0, nsp = 0, maxgpr = CCALL_NARG_GPR;
372 #if CCALL_NARG_FPR
373 MSize nfpr = 0;
374 #endif
376 if (slot < cts->cb.sizeid && (id = cts->cb.cbid[slot]) != 0) {
377 ct = ctype_get(cts, id);
378 rid = ctype_cid(ct->info);
379 fn = funcV(lj_tab_getint(cts->miscmap, (int32_t)slot));
380 } else { /* Must set up frame first, before throwing the error. */
381 ct = NULL;
382 rid = 0;
383 fn = (GCfunc *)L;
385 o->u32.lo = LJ_CONT_FFI_CALLBACK; /* Continuation returns from callback. */
386 o->u32.hi = rid; /* Return type. x86: +(spadj<<16). */
387 o++;
388 setframe_gc(o, obj2gco(fn));
389 setframe_ftsz(o, (int)((char *)(o+1) - (char *)L->base) + FRAME_CONT);
390 L->top = L->base = ++o;
391 if (!ct)
392 lj_err_caller(cts->L, LJ_ERR_FFI_BADCBACK);
393 if (isluafunc(fn))
394 setcframe_pc(L->cframe, proto_bc(funcproto(fn))+1);
395 lj_state_checkstack(L, LUA_MINSTACK); /* May throw. */
396 o = L->base; /* Might have been reallocated. */
398 #if LJ_TARGET_X86
399 /* x86 has several different calling conventions. */
400 switch (ctype_cconv(ct->info)) {
401 case CTCC_FASTCALL: maxgpr = 2; break;
402 case CTCC_THISCALL: maxgpr = 1; break;
403 default: maxgpr = 0; break;
405 #endif
407 fid = ct->sib;
408 while (fid) {
409 CType *ctf = ctype_get(cts, fid);
410 if (!ctype_isattrib(ctf->info)) {
411 CType *cta;
412 void *sp;
413 CTSize sz;
414 int isfp;
415 MSize n;
416 lua_assert(ctype_isfield(ctf->info));
417 cta = ctype_rawchild(cts, ctf);
418 if (ctype_isenum(cta->info)) cta = ctype_child(cts, cta);
419 isfp = ctype_isfp(cta->info);
420 sz = (cta->size + CTSIZE_PTR-1) & ~(CTSIZE_PTR-1);
421 n = sz / CTSIZE_PTR; /* Number of GPRs or stack slots needed. */
423 CALLBACK_HANDLE_REGARG /* Handle register arguments. */
425 /* Otherwise pass argument on stack. */
426 if (CCALL_ALIGN_STACKARG && LJ_32 && sz == 8)
427 nsp = (nsp + 1) & ~1u; /* Align 64 bit argument on stack. */
428 sp = &stack[nsp];
429 nsp += n;
431 done:
432 if (LJ_BE && cta->size < CTSIZE_PTR)
433 sp = (void *)((uint8_t *)sp + CTSIZE_PTR-cta->size);
434 lj_cconv_tv_ct(cts, cta, 0, o++, sp);
436 fid = ctf->sib;
438 L->top = o;
439 #if LJ_TARGET_X86
440 /* Store stack adjustment for returns from fastcall/stdcall callbacks. */
441 switch (ctype_cconv(ct->info)) {
442 case CTCC_FASTCALL: case CTCC_STDCALL:
443 (L->base-2)->u32.hi |= (nsp << (16+2));
444 break;
446 #endif
449 /* Convert Lua object to callback result. */
450 static void callback_conv_result(CTState *cts, lua_State *L, TValue *o)
452 CType *ctr = ctype_raw(cts, (uint16_t)(L->base-2)->u32.hi);
453 #if LJ_TARGET_X86
454 cts->cb.gpr[2] = 0;
455 #endif
456 if (!ctype_isvoid(ctr->info)) {
457 uint8_t *dp = (uint8_t *)&cts->cb.gpr[0];
458 #if CCALL_NUM_FPR
459 if (ctype_isfp(ctr->info))
460 dp = (uint8_t *)&cts->cb.fpr[0];
461 #endif
462 lj_cconv_ct_tv(cts, ctr, dp, o, 0);
463 #ifdef CALLBACK_HANDLE_RET
464 CALLBACK_HANDLE_RET
465 #endif
466 /* Extend returned integers to (at least) 32 bits. */
467 if (ctype_isinteger_or_bool(ctr->info) && ctr->size < 4) {
468 if (ctr->info & CTF_UNSIGNED)
469 *(uint32_t *)dp = ctr->size == 1 ? (uint32_t)*(uint8_t *)dp :
470 (uint32_t)*(uint16_t *)dp;
471 else
472 *(int32_t *)dp = ctr->size == 1 ? (int32_t)*(int8_t *)dp :
473 (int32_t)*(int16_t *)dp;
475 #if LJ_TARGET_X86
476 if (ctype_isfp(ctr->info))
477 cts->cb.gpr[2] = ctr->size == sizeof(float) ? 1 : 2;
478 #endif
482 /* Enter callback. */
483 lua_State * LJ_FASTCALL lj_ccallback_enter(CTState *cts, void *cf)
485 lua_State *L = cts->L;
486 lua_assert(L != NULL);
487 if (gcref(cts->g->jit_L))
488 lj_err_caller(gco2th(gcref(cts->g->jit_L)), LJ_ERR_FFI_BADCBACK);
489 /* Setup C frame. */
490 cframe_prev(cf) = L->cframe;
491 setcframe_L(cf, L);
492 cframe_errfunc(cf) = -1;
493 cframe_nres(cf) = 0;
494 L->cframe = cf;
495 callback_conv_args(cts, L);
496 return L; /* Now call the function on this stack. */
499 /* Leave callback. */
500 void LJ_FASTCALL lj_ccallback_leave(CTState *cts, TValue *o)
502 lua_State *L = cts->L;
503 GCfunc *fn;
504 TValue *obase = L->base;
505 L->base = L->top; /* Keep continuation frame for throwing errors. */
506 if (o >= L->base) {
507 /* PC of RET* is lost. Point to last line for result conv. errors. */
508 fn = curr_func(L);
509 if (isluafunc(fn)) {
510 GCproto *pt = funcproto(fn);
511 setcframe_pc(L->cframe, proto_bc(pt)+pt->sizebc+1);
514 callback_conv_result(cts, L, o);
515 /* Finally drop C frame and continuation frame. */
516 L->cframe = cframe_prev(L->cframe);
517 L->top -= 2;
518 L->base = obase;
521 /* -- C callback management ----------------------------------------------- */
523 /* Get an unused slot in the callback slot table. */
524 static MSize callback_slot_new(CTState *cts, CType *ct)
526 CTypeID id = ctype_typeid(cts, ct);
527 CTypeID1 *cbid = cts->cb.cbid;
528 MSize top;
529 for (top = cts->cb.topid; top < cts->cb.sizeid; top++)
530 if (LJ_LIKELY(cbid[top] == 0))
531 goto found;
532 #if CALLBACK_MAX_SLOT
533 if (top >= CALLBACK_MAX_SLOT)
534 #endif
535 lj_err_caller(cts->L, LJ_ERR_FFI_CBACKOV);
536 if (!cts->cb.mcode)
537 callback_mcode_new(cts);
538 lj_mem_growvec(cts->L, cbid, cts->cb.sizeid, CALLBACK_MAX_SLOT, CTypeID1);
539 cts->cb.cbid = cbid;
540 memset(cbid+top, 0, (cts->cb.sizeid-top)*sizeof(CTypeID1));
541 found:
542 cbid[top] = id;
543 cts->cb.topid = top+1;
544 return top;
547 /* Check for function pointer and supported argument/result types. */
548 static CType *callback_checkfunc(CTState *cts, CType *ct)
550 int narg = 0;
551 if (!ctype_isptr(ct->info) || (LJ_64 && ct->size != CTSIZE_PTR))
552 return NULL;
553 ct = ctype_rawchild(cts, ct);
554 if (ctype_isfunc(ct->info)) {
555 CType *ctr = ctype_rawchild(cts, ct);
556 CTypeID fid = ct->sib;
557 if (!(ctype_isvoid(ctr->info) || ctype_isenum(ctr->info) ||
558 ctype_isptr(ctr->info) || (ctype_isnum(ctr->info) && ctr->size <= 8)))
559 return NULL;
560 if ((ct->info & CTF_VARARG))
561 return NULL;
562 while (fid) {
563 CType *ctf = ctype_get(cts, fid);
564 if (!ctype_isattrib(ctf->info)) {
565 CType *cta;
566 lua_assert(ctype_isfield(ctf->info));
567 cta = ctype_rawchild(cts, ctf);
568 if (!(ctype_isenum(cta->info) || ctype_isptr(cta->info) ||
569 (ctype_isnum(cta->info) && cta->size <= 8)) ||
570 ++narg >= LUA_MINSTACK-3)
571 return NULL;
573 fid = ctf->sib;
575 return ct;
577 return NULL;
580 /* Create a new callback and return the callback function pointer. */
581 void *lj_ccallback_new(CTState *cts, CType *ct, GCfunc *fn)
583 ct = callback_checkfunc(cts, ct);
584 if (ct) {
585 MSize slot = callback_slot_new(cts, ct);
586 GCtab *t = cts->miscmap;
587 setfuncV(cts->L, lj_tab_setint(cts->L, t, (int32_t)slot), fn);
588 lj_gc_anybarriert(cts->L, t);
589 return callback_slot2ptr(cts, slot);
591 return NULL; /* Bad conversion. */
594 #endif