2 ** Trace recorder for C data operations.
3 ** Copyright (C) 2005-2023 Mike Pall. See Copyright Notice in luajit.h
11 #if LJ_HASJIT && LJ_HASFFI
18 #include "lj_cparse.h"
20 #include "lj_carith.h"
26 #include "lj_ircall.h"
29 #include "lj_record.h"
30 #include "lj_ffrecord.h"
32 #include "lj_crecord.h"
33 #include "lj_dispatch.h"
34 #include "lj_strfmt.h"
36 /* Some local macros to save typing. Undef'd at the end. */
37 #define IR(ref) (&J->cur.ir[(ref)])
39 /* Pass IR on to next optimization in chain (FOLD). */
40 #define emitir(ot, a, b) (lj_ir_set(J, (ot), (a), (b)), lj_opt_fold(J))
42 #define emitconv(a, dt, st, flags) \
43 emitir(IRT(IR_CONV, (dt)), (a), (st)|((dt) << 5)|(flags))
45 /* -- C type checks ------------------------------------------------------- */
47 static GCcdata
*argv2cdata(jit_State
*J
, TRef tr
, cTValue
*o
)
51 if (!tref_iscdata(tr
))
52 lj_trace_err(J
, LJ_TRERR_BADTYPE
);
54 /* Specialize to the CTypeID. */
55 trtypeid
= emitir(IRT(IR_FLOAD
, IRT_U16
), tr
, IRFL_CDATA_CTYPEID
);
56 emitir(IRTG(IR_EQ
, IRT_INT
), trtypeid
, lj_ir_kint(J
, (int32_t)cd
->ctypeid
));
60 /* Specialize to the CTypeID held by a cdata constructor. */
61 static CTypeID
crec_constructor(jit_State
*J
, GCcdata
*cd
, TRef tr
)
64 lj_assertJ(tref_iscdata(tr
) && cd
->ctypeid
== CTID_CTYPEID
,
65 "expected CTypeID cdata");
66 id
= *(CTypeID
*)cdataptr(cd
);
67 tr
= emitir(IRT(IR_FLOAD
, IRT_INT
), tr
, IRFL_CDATA_INT
);
68 emitir(IRTG(IR_EQ
, IRT_INT
), tr
, lj_ir_kint(J
, (int32_t)id
));
72 static CTypeID
argv2ctype(jit_State
*J
, TRef tr
, cTValue
*o
)
78 /* Specialize to the string containing the C type declaration. */
79 emitir(IRTG(IR_EQ
, IRT_STR
), tr
, lj_ir_kstr(J
, s
));
81 cp
.cts
= ctype_cts(J
->L
);
83 cp
.srcname
= strdata(s
);
86 cp
.mode
= CPARSE_MODE_ABSTRACT
|CPARSE_MODE_NOIMPLICIT
;
87 if (lj_cparse(&cp
) || cp
.cts
->top
> oldtop
) /* Avoid new struct defs. */
88 lj_trace_err(J
, LJ_TRERR_BADTYPE
);
91 GCcdata
*cd
= argv2cdata(J
, tr
, o
);
92 return cd
->ctypeid
== CTID_CTYPEID
? crec_constructor(J
, cd
, tr
) :
97 /* Convert CType to IRType (if possible). */
98 static IRType
crec_ct2irt(CTState
*cts
, CType
*ct
)
100 if (ctype_isenum(ct
->info
)) ct
= ctype_child(cts
, ct
);
101 if (LJ_LIKELY(ctype_isnum(ct
->info
))) {
102 if ((ct
->info
& CTF_FP
)) {
103 if (ct
->size
== sizeof(double))
105 else if (ct
->size
== sizeof(float))
108 uint32_t b
= lj_fls(ct
->size
);
110 return IRT_I8
+ 2*b
+ ((ct
->info
& CTF_UNSIGNED
) ? 1 : 0);
112 } else if (ctype_isptr(ct
->info
)) {
113 return (LJ_64
&& ct
->size
== 8) ? IRT_P64
: IRT_P32
;
114 } else if (ctype_iscomplex(ct
->info
)) {
115 if (ct
->size
== 2*sizeof(double))
117 else if (ct
->size
== 2*sizeof(float))
123 /* -- Optimized memory fill and copy -------------------------------------- */
125 /* Maximum length and unroll of inlined copy/fill. */
126 #define CREC_COPY_MAXUNROLL 16
127 #define CREC_COPY_MAXLEN 128
129 #define CREC_FILL_MAXUNROLL 16
131 /* Number of windowed registers used for optimized memory copy. */
133 #define CREC_COPY_REGWIN 2
134 #elif LJ_TARGET_PPC || LJ_TARGET_MIPS
135 #define CREC_COPY_REGWIN 8
137 #define CREC_COPY_REGWIN 4
140 /* List of memory offsets for copy/fill. */
141 typedef struct CRecMemList
{
142 CTSize ofs
; /* Offset in bytes. */
143 IRType tp
; /* Type of load/store. */
144 TRef trofs
; /* TRef of interned offset. */
145 TRef trval
; /* TRef of load value. */
148 /* Generate copy list for element-wise struct copy. */
149 static MSize
crec_copy_struct(CRecMemList
*ml
, CTState
*cts
, CType
*ct
)
151 CTypeID fid
= ct
->sib
;
154 CType
*df
= ctype_get(cts
, fid
);
156 if (ctype_isfield(df
->info
)) {
159 if (!gcref(df
->name
)) continue; /* Ignore unnamed fields. */
160 cct
= ctype_rawchild(cts
, df
); /* Field type. */
161 tp
= crec_ct2irt(cts
, cct
);
162 if (tp
== IRT_CDATA
) return 0; /* NYI: aggregates. */
163 if (mlp
>= CREC_COPY_MAXUNROLL
) return 0;
164 ml
[mlp
].ofs
= df
->size
;
167 if (ctype_iscomplex(cct
->info
)) {
168 if (mlp
>= CREC_COPY_MAXUNROLL
) return 0;
169 ml
[mlp
].ofs
= df
->size
+ (cct
->size
>> 1);
173 } else if (!ctype_isconstval(df
->info
)) {
174 /* NYI: bitfields and sub-structures. */
181 /* Generate unrolled copy list, from highest to lowest step size/alignment. */
182 static MSize
crec_copy_unroll(CRecMemList
*ml
, CTSize len
, CTSize step
,
187 if (tp
== IRT_CDATA
) tp
= IRT_U8
+ 2*lj_fls(step
);
189 while (ofs
+ step
<= len
) {
190 if (mlp
>= CREC_COPY_MAXUNROLL
) return 0;
203 ** Emit copy list with windowed loads/stores.
204 ** LJ_TARGET_UNALIGNED: may emit unaligned loads/stores (not marked as such).
206 static void crec_copy_emit(jit_State
*J
, CRecMemList
*ml
, MSize mlp
,
207 TRef trdst
, TRef trsrc
)
209 MSize i
, j
, rwin
= 0;
210 for (i
= 0, j
= 0; i
< mlp
; ) {
211 TRef trofs
= lj_ir_kintp(J
, ml
[i
].ofs
);
212 TRef trsptr
= emitir(IRT(IR_ADD
, IRT_PTR
), trsrc
, trofs
);
213 ml
[i
].trval
= emitir(IRT(IR_XLOAD
, ml
[i
].tp
), trsptr
, 0);
216 rwin
+= (LJ_SOFTFP32
&& ml
[i
].tp
== IRT_NUM
) ? 2 : 1;
217 if (rwin
>= CREC_COPY_REGWIN
|| i
>= mlp
) { /* Flush buffered stores. */
219 for ( ; j
< i
; j
++) {
220 TRef trdptr
= emitir(IRT(IR_ADD
, IRT_PTR
), trdst
, ml
[j
].trofs
);
221 emitir(IRT(IR_XSTORE
, ml
[j
].tp
), trdptr
, ml
[j
].trval
);
227 /* Optimized memory copy. */
228 static void crec_copy(jit_State
*J
, TRef trdst
, TRef trsrc
, TRef trlen
,
231 if (tref_isk(trlen
)) { /* Length must be constant. */
232 CRecMemList ml
[CREC_COPY_MAXUNROLL
];
234 CTSize step
= 1, len
= (CTSize
)IR(tref_ref(trlen
))->i
;
235 IRType tp
= IRT_CDATA
;
237 if (len
== 0) return; /* Shortcut. */
238 if (len
> CREC_COPY_MAXLEN
) goto fallback
;
240 CTState
*cts
= ctype_ctsG(J2G(J
));
241 lj_assertJ(ctype_isarray(ct
->info
) || ctype_isstruct(ct
->info
),
242 "copy of non-aggregate");
243 if (ctype_isarray(ct
->info
)) {
244 CType
*cct
= ctype_rawchild(cts
, ct
);
245 tp
= crec_ct2irt(cts
, cct
);
246 if (tp
== IRT_CDATA
) goto rawcopy
;
247 step
= lj_ir_type_size
[tp
];
248 lj_assertJ((len
& (step
-1)) == 0, "copy of fractional size");
249 } else if ((ct
->info
& CTF_UNION
)) {
250 step
= (1u << ctype_align(ct
->info
));
253 mlp
= crec_copy_struct(ml
, cts
, ct
);
259 if (LJ_TARGET_UNALIGNED
|| step
>= CTSIZE_PTR
)
262 mlp
= crec_copy_unroll(ml
, len
, step
, tp
);
265 crec_copy_emit(J
, ml
, mlp
, trdst
, trsrc
);
267 emitir(IRT(IR_XBAR
, IRT_NIL
), 0, 0);
272 /* Call memcpy. Always needs a barrier to disable alias analysis. */
273 lj_ir_call(J
, IRCALL_memcpy
, trdst
, trsrc
, trlen
);
274 emitir(IRT(IR_XBAR
, IRT_NIL
), 0, 0);
277 /* Generate unrolled fill list, from highest to lowest step size/alignment. */
278 static MSize
crec_fill_unroll(CRecMemList
*ml
, CTSize len
, CTSize step
)
282 IRType tp
= IRT_U8
+ 2*lj_fls(step
);
284 while (ofs
+ step
<= len
) {
285 if (mlp
>= CREC_COPY_MAXUNROLL
) return 0;
298 ** Emit stores for fill list.
299 ** LJ_TARGET_UNALIGNED: may emit unaligned stores (not marked as such).
301 static void crec_fill_emit(jit_State
*J
, CRecMemList
*ml
, MSize mlp
,
302 TRef trdst
, TRef trfill
)
305 for (i
= 0; i
< mlp
; i
++) {
306 TRef trofs
= lj_ir_kintp(J
, ml
[i
].ofs
);
307 TRef trdptr
= emitir(IRT(IR_ADD
, IRT_PTR
), trdst
, trofs
);
308 emitir(IRT(IR_XSTORE
, ml
[i
].tp
), trdptr
, trfill
);
312 /* Optimized memory fill. */
313 static void crec_fill(jit_State
*J
, TRef trdst
, TRef trlen
, TRef trfill
,
316 if (tref_isk(trlen
)) { /* Length must be constant. */
317 CRecMemList ml
[CREC_FILL_MAXUNROLL
];
319 CTSize len
= (CTSize
)IR(tref_ref(trlen
))->i
;
320 if (len
== 0) return; /* Shortcut. */
321 if (LJ_TARGET_UNALIGNED
|| step
>= CTSIZE_PTR
)
323 if (step
* CREC_FILL_MAXUNROLL
< len
) goto fallback
;
324 mlp
= crec_fill_unroll(ml
, len
, step
);
325 if (!mlp
) goto fallback
;
326 if (tref_isk(trfill
) || ml
[0].tp
!= IRT_U8
)
327 trfill
= emitconv(trfill
, IRT_INT
, IRT_U8
, 0);
328 if (ml
[0].tp
!= IRT_U8
) { /* Scatter U8 to U16/U32/U64. */
329 if (CTSIZE_PTR
== 8 && ml
[0].tp
== IRT_U64
) {
330 if (tref_isk(trfill
)) /* Pointless on x64 with zero-extended regs. */
331 trfill
= emitconv(trfill
, IRT_U64
, IRT_U32
, 0);
332 trfill
= emitir(IRT(IR_MUL
, IRT_U64
), trfill
,
333 lj_ir_kint64(J
, U64x(01010101,01010101)));
335 trfill
= emitir(IRTI(IR_MUL
), trfill
,
336 lj_ir_kint(J
, ml
[0].tp
== IRT_U16
? 0x0101 : 0x01010101));
339 crec_fill_emit(J
, ml
, mlp
, trdst
, trfill
);
342 /* Call memset. Always needs a barrier to disable alias analysis. */
343 lj_ir_call(J
, IRCALL_memset
, trdst
, trfill
, trlen
); /* Note: arg order! */
345 emitir(IRT(IR_XBAR
, IRT_NIL
), 0, 0);
348 /* -- Convert C type to C type -------------------------------------------- */
351 ** This code mirrors the code in lj_cconv.c. It performs the same steps
352 ** for the trace recorder that lj_cconv.c does for the interpreter.
354 ** One major difference is that we can get away with much fewer checks
355 ** here. E.g. checks for casts, constness or correct types can often be
356 ** omitted, even if they might fail. The interpreter subsequently throws
357 ** an error, which aborts the trace.
359 ** All operations are specialized to their C types, so the on-trace
360 ** outcome must be the same as the outcome in the interpreter. If the
361 ** interpreter doesn't throw an error, then the trace is correct, too.
362 ** Care must be taken not to generate invalid (temporary) IR or to
366 /* Determine whether a passed number or cdata number is non-zero. */
367 static int crec_isnonzero(CType
*s
, void *p
)
373 if ((s
->info
& CTF_FP
)) {
374 if (s
->size
== sizeof(float))
375 return (*(float *)p
!= 0);
377 return (*(double *)p
!= 0);
380 return (*(uint8_t *)p
!= 0);
381 else if (s
->size
== 2)
382 return (*(uint16_t *)p
!= 0);
383 else if (s
->size
== 4)
384 return (*(uint32_t *)p
!= 0);
386 return (*(uint64_t *)p
!= 0);
390 static TRef
crec_ct_ct(jit_State
*J
, CType
*d
, CType
*s
, TRef dp
, TRef sp
,
393 IRType dt
= crec_ct2irt(ctype_ctsG(J2G(J
)), d
);
394 IRType st
= crec_ct2irt(ctype_ctsG(J2G(J
)), s
);
395 CTSize dsize
= d
->size
, ssize
= s
->size
;
396 CTInfo dinfo
= d
->info
, sinfo
= s
->info
;
398 if (ctype_type(dinfo
) > CT_MAYCONVERT
|| ctype_type(sinfo
) > CT_MAYCONVERT
)
402 ** Note: Unlike lj_cconv_ct_ct(), sp holds the _value_ of pointers and
403 ** numbers up to 8 bytes. Otherwise sp holds a pointer.
406 switch (cconv_idx2(dinfo
, sinfo
)) {
407 /* Destination is a bool. */
409 goto xstore
; /* Source operand is already normalized. */
412 if (st
!= IRT_CDATA
) {
413 /* Specialize to the result of a comparison against 0. */
414 TRef zero
= (st
== IRT_NUM
|| st
== IRT_FLOAT
) ? lj_ir_knum(J
, 0) :
415 (st
== IRT_I64
|| st
== IRT_U64
) ? lj_ir_kint64(J
, 0) :
417 int isnz
= crec_isnonzero(s
, svisnz
);
418 emitir(IRTG(isnz
? IR_NE
: IR_EQ
, st
), sp
, zero
);
419 sp
= lj_ir_kint(J
, isnz
);
424 /* Destination is an integer. */
428 if (dt
== IRT_CDATA
|| st
== IRT_CDATA
) goto err_nyi
;
429 /* Extend 32 to 64 bit integer. */
430 if (dsize
== 8 && ssize
< 8 && !(LJ_64
&& (sinfo
& CTF_UNSIGNED
)))
431 sp
= emitconv(sp
, dt
, ssize
< 4 ? IRT_INT
: st
,
432 (sinfo
& CTF_UNSIGNED
) ? 0 : IRCONV_SEXT
);
433 else if (dsize
< 8 && ssize
== 8) /* Truncate from 64 bit integer. */
434 sp
= emitconv(sp
, dsize
< 4 ? IRT_INT
: dt
, st
, 0);
435 else if (st
== IRT_INT
)
436 sp
= lj_opt_narrow_toint(J
, sp
);
438 if (dt
== IRT_I64
|| dt
== IRT_U64
) lj_needsplit(J
);
439 if (dp
== 0) return sp
;
440 emitir(IRT(IR_XSTORE
, dt
), dp
, sp
);
443 sp
= emitir(IRT(IR_XLOAD
, st
), sp
, 0); /* Load re. */
446 if (dt
== IRT_CDATA
|| st
== IRT_CDATA
) goto err_nyi
;
447 sp
= emitconv(sp
, dsize
< 4 ? IRT_INT
: dt
, st
, IRCONV_ANY
);
451 sinfo
= CTINFO(CT_NUM
, CTF_UNSIGNED
);
454 if (((dsize
^ ssize
) & 8) == 0) { /* Must insert no-op type conversion. */
455 sp
= emitconv(sp
, dsize
< 4 ? IRT_INT
: dt
, IRT_PTR
, 0);
460 /* Destination is a floating-point number. */
464 if (dt
== IRT_CDATA
|| st
== IRT_CDATA
) goto err_nyi
;
465 sp
= emitconv(sp
, dt
, ssize
< 4 ? IRT_INT
: st
, 0);
468 sp
= emitir(IRT(IR_XLOAD
, st
), sp
, 0); /* Load re. */
472 if (dt
== IRT_CDATA
|| st
== IRT_CDATA
) goto err_nyi
;
473 if (dt
!= st
) sp
= emitconv(sp
, dt
, st
, 0);
476 /* Destination is a complex number. */
480 TRef ptr
= emitir(IRT(IR_ADD
, IRT_PTR
), dp
, lj_ir_kintp(J
, (dsize
>> 1)));
481 emitir(IRT(IR_XSTORE
, dt
), ptr
, lj_ir_knum(J
, 0));
484 if ((sinfo
& CTF_FP
)) goto conv_F_F
; else goto conv_F_I
;
487 if (dt
== IRT_CDATA
|| st
== IRT_CDATA
) goto err_nyi
;
490 re
= emitir(IRT(IR_XLOAD
, st
), sp
, 0);
491 ptr
= emitir(IRT(IR_ADD
, IRT_PTR
), sp
, lj_ir_kintp(J
, (ssize
>> 1)));
492 im
= emitir(IRT(IR_XLOAD
, st
), ptr
, 0);
494 re
= emitconv(re
, dt
, st
, 0);
495 im
= emitconv(im
, dt
, st
, 0);
497 emitir(IRT(IR_XSTORE
, dt
), dp
, re
);
498 ptr
= emitir(IRT(IR_ADD
, IRT_PTR
), dp
, lj_ir_kintp(J
, (dsize
>> 1)));
499 emitir(IRT(IR_XSTORE
, dt
), ptr
, im
);
503 /* Destination is a vector. */
510 /* Destination is a pointer. */
514 /* There are only 32 bit pointers/addresses on 32 bit machines.
515 ** Also ok on x64, since all 32 bit ops clear the upper part of the reg.
519 if (st
== IRT_CDATA
) goto err_nyi
;
520 if (!LJ_64
&& ssize
== 8) /* Truncate from 64 bit integer. */
521 sp
= emitconv(sp
, IRT_U32
, st
, 0);
524 if (st
== IRT_CDATA
) goto err_nyi
;
525 /* The signed conversion is cheaper. x64 really has 47 bit pointers. */
526 sp
= emitconv(sp
, (LJ_64
&& dsize
== 8) ? IRT_I64
: IRT_U32
,
530 /* Destination is an array. */
532 /* Destination is a struct/union. */
534 if (dp
== 0) goto err_conv
;
535 crec_copy(J
, dp
, sp
, lj_ir_kint(J
, dsize
), d
);
541 lj_trace_err(J
, LJ_TRERR_NYICONV
);
547 /* -- Convert C type to TValue (load) ------------------------------------- */
549 static TRef
crec_tv_ct(jit_State
*J
, CType
*s
, CTypeID sid
, TRef sp
)
551 CTState
*cts
= ctype_ctsG(J2G(J
));
552 IRType t
= crec_ct2irt(cts
, s
);
553 CTInfo sinfo
= s
->info
;
554 if (ctype_isnum(sinfo
)) {
557 goto err_nyi
; /* NYI: copyval of >64 bit integers. */
558 tr
= emitir(IRT(IR_XLOAD
, t
), sp
, 0);
559 if (t
== IRT_FLOAT
|| t
== IRT_U32
) { /* Keep uint32_t/float as numbers. */
560 return emitconv(tr
, IRT_NUM
, t
, 0);
561 } else if (t
== IRT_I64
|| t
== IRT_U64
) { /* Box 64 bit integer. */
564 } else if ((sinfo
& CTF_BOOL
)) {
565 /* Assume not equal to zero. Fixup and emit pending guard later. */
566 lj_ir_set(J
, IRTGI(IR_NE
), tr
, lj_ir_kint(J
, 0));
567 J
->postproc
= LJ_POST_FIXGUARD
;
572 } else if (ctype_isptr(sinfo
) || ctype_isenum(sinfo
)) {
573 sp
= emitir(IRT(IR_XLOAD
, t
), sp
, 0); /* Box pointers and enums. */
574 } else if (ctype_isrefarray(sinfo
) || ctype_isstruct(sinfo
)) {
576 sid
= lj_ctype_intern(cts
, CTINFO_REF(sid
), CTSIZE_PTR
); /* Create ref. */
577 } else if (ctype_iscomplex(sinfo
)) { /* Unbox/box complex. */
578 ptrdiff_t esz
= (ptrdiff_t)(s
->size
>> 1);
579 TRef ptr
, tr1
, tr2
, dp
;
580 dp
= emitir(IRTG(IR_CNEW
, IRT_CDATA
), lj_ir_kint(J
, sid
), TREF_NIL
);
581 tr1
= emitir(IRT(IR_XLOAD
, t
), sp
, 0);
582 ptr
= emitir(IRT(IR_ADD
, IRT_PTR
), sp
, lj_ir_kintp(J
, esz
));
583 tr2
= emitir(IRT(IR_XLOAD
, t
), ptr
, 0);
584 ptr
= emitir(IRT(IR_ADD
, IRT_PTR
), dp
, lj_ir_kintp(J
, sizeof(GCcdata
)));
585 emitir(IRT(IR_XSTORE
, t
), ptr
, tr1
);
586 ptr
= emitir(IRT(IR_ADD
, IRT_PTR
), dp
, lj_ir_kintp(J
, sizeof(GCcdata
)+esz
));
587 emitir(IRT(IR_XSTORE
, t
), ptr
, tr2
);
590 /* NYI: copyval of vectors. */
592 lj_trace_err(J
, LJ_TRERR_NYICONV
);
594 /* Box pointer, ref, enum or 64 bit integer. */
595 return emitir(IRTG(IR_CNEWI
, IRT_CDATA
), lj_ir_kint(J
, sid
), sp
);
598 /* -- Convert TValue to C type (store) ------------------------------------ */
600 static TRef
crec_ct_tv(jit_State
*J
, CType
*d
, TRef dp
, TRef sp
, cTValue
*sval
)
602 CTState
*cts
= ctype_ctsG(J2G(J
));
603 CTypeID sid
= CTID_P_VOID
;
606 if (LJ_LIKELY(tref_isinteger(sp
))) {
608 svisnz
= (void *)(intptr_t)(tvisint(sval
)?(intV(sval
)!=0):!tviszero(sval
));
609 } else if (tref_isnum(sp
)) {
611 svisnz
= (void *)(intptr_t)(tvisint(sval
)?(intV(sval
)!=0):!tviszero(sval
));
612 } else if (tref_isbool(sp
)) {
613 sp
= lj_ir_kint(J
, tref_istrue(sp
) ? 1 : 0);
615 } else if (tref_isnil(sp
)) {
616 sp
= lj_ir_kptr(J
, NULL
);
617 } else if (tref_isudata(sp
)) {
618 GCudata
*ud
= udataV(sval
);
619 if (ud
->udtype
== UDTYPE_IO_FILE
|| ud
->udtype
== UDTYPE_BUFFER
) {
620 TRef tr
= emitir(IRT(IR_FLOAD
, IRT_U8
), sp
, IRFL_UDATA_UDTYPE
);
621 emitir(IRTGI(IR_EQ
), tr
, lj_ir_kint(J
, ud
->udtype
));
622 sp
= emitir(IRT(IR_FLOAD
, IRT_PTR
), sp
,
623 ud
->udtype
== UDTYPE_IO_FILE
? IRFL_UDATA_FILE
:
626 sp
= emitir(IRT(IR_ADD
, IRT_PTR
), sp
, lj_ir_kintp(J
, sizeof(GCudata
)));
628 } else if (tref_isstr(sp
)) {
629 if (ctype_isenum(d
->info
)) { /* Match string against enum constant. */
630 GCstr
*str
= strV(sval
);
632 CType
*cct
= lj_ctype_getfield(cts
, d
, str
, &ofs
);
633 /* Specialize to the name of the enum constant. */
634 emitir(IRTG(IR_EQ
, IRT_STR
), sp
, lj_ir_kstr(J
, str
));
635 if (cct
&& ctype_isconstval(cct
->info
)) {
636 lj_assertJ(ctype_child(cts
, cct
)->size
== 4,
637 "only 32 bit const supported"); /* NYI */
638 svisnz
= (void *)(intptr_t)(ofs
!= 0);
639 sp
= lj_ir_kint(J
, (int32_t)ofs
);
640 sid
= ctype_cid(cct
->info
);
641 } /* else: interpreter will throw. */
642 } else if (ctype_isrefarray(d
->info
)) { /* Copy string to array. */
643 lj_trace_err(J
, LJ_TRERR_BADTYPE
); /* NYI */
644 } else { /* Otherwise pass the string data as a const char[]. */
645 /* Don't use STRREF. It folds with SNEW, which loses the trailing NUL. */
646 sp
= emitir(IRT(IR_ADD
, IRT_PTR
), sp
, lj_ir_kintp(J
, sizeof(GCstr
)));
649 } else if (tref_islightud(sp
)) {
651 lj_trace_err(J
, LJ_TRERR_NYICONV
);
653 } else { /* NYI: tref_istab(sp). */
655 sid
= argv2cdata(J
, sp
, sval
)->ctypeid
;
656 s
= ctype_raw(cts
, sid
);
657 svisnz
= cdataptr(cdataV(sval
));
658 if (ctype_isfunc(s
->info
)) {
659 sid
= lj_ctype_intern(cts
, CTINFO(CT_PTR
, CTALIGN_PTR
|sid
), CTSIZE_PTR
);
660 s
= ctype_get(cts
, sid
);
663 t
= crec_ct2irt(cts
, s
);
665 if (ctype_isptr(s
->info
)) {
666 sp
= emitir(IRT(IR_FLOAD
, t
), sp
, IRFL_CDATA_PTR
);
667 if (ctype_isref(s
->info
)) {
668 svisnz
= *(void **)svisnz
;
669 s
= ctype_rawchild(cts
, s
);
670 if (ctype_isenum(s
->info
)) s
= ctype_child(cts
, s
);
671 t
= crec_ct2irt(cts
, s
);
675 } else if (t
== IRT_I64
|| t
== IRT_U64
) {
676 sp
= emitir(IRT(IR_FLOAD
, t
), sp
, IRFL_CDATA_INT64
);
679 } else if (t
== IRT_INT
|| t
== IRT_U32
) {
680 if (ctype_isenum(s
->info
)) s
= ctype_child(cts
, s
);
681 sp
= emitir(IRT(IR_FLOAD
, t
), sp
, IRFL_CDATA_INT
);
684 sp
= emitir(IRT(IR_ADD
, IRT_PTR
), sp
, lj_ir_kintp(J
, sizeof(GCcdata
)));
686 if (ctype_isnum(s
->info
) && t
!= IRT_CDATA
)
687 sp
= emitir(IRT(IR_XLOAD
, t
), sp
, 0); /* Load number value. */
690 s
= ctype_get(cts
, sid
);
692 if (ctype_isenum(d
->info
)) d
= ctype_child(cts
, d
);
693 return crec_ct_ct(J
, d
, s
, dp
, sp
, svisnz
);
696 /* -- C data metamethods -------------------------------------------------- */
698 /* This would be rather difficult in FOLD, so do it here:
699 ** (base+k)+(idx*sz)+ofs ==> (base+idx*sz)+(ofs+k)
700 ** (base+(idx+k)*sz)+ofs ==> (base+idx*sz)+(ofs+k*sz)
702 static TRef
crec_reassoc_ofs(jit_State
*J
, TRef tr
, ptrdiff_t *ofsp
, MSize sz
)
704 IRIns
*ir
= IR(tref_ref(tr
));
705 if (LJ_LIKELY(J
->flags
& JIT_F_OPT_FOLD
) && irref_isk(ir
->op2
) &&
706 (ir
->o
== IR_ADD
|| ir
->o
== IR_ADDOV
|| ir
->o
== IR_SUBOV
)) {
707 IRIns
*irk
= IR(ir
->op2
);
709 if (LJ_64
&& irk
->o
== IR_KINT64
)
710 k
= (ptrdiff_t)ir_kint64(irk
)->u64
* sz
;
712 k
= (ptrdiff_t)irk
->i
* sz
;
713 if (ir
->o
== IR_SUBOV
) *ofsp
-= k
; else *ofsp
+= k
;
714 tr
= ir
->op1
; /* Not a TRef, but the caller doesn't care. */
719 /* Tailcall to function. */
720 static void crec_tailcall(jit_State
*J
, RecordFFData
*rd
, cTValue
*tv
)
722 TRef kfunc
= lj_ir_kfunc(J
, funcV(tv
));
725 J
->base
[-1] = TREF_FRAME
;
727 J
->base
[-1] = kfunc
| TREF_FRAME
;
729 rd
->nres
= -1; /* Pending tailcall. */
732 /* Record ctype __index/__newindex metamethods. */
733 static void crec_index_meta(jit_State
*J
, CTState
*cts
, CType
*ct
,
736 CTypeID id
= ctype_typeid(cts
, ct
);
737 cTValue
*tv
= lj_ctype_meta(cts
, id
, rd
->data
? MM_newindex
: MM_index
);
739 lj_trace_err(J
, LJ_TRERR_BADTYPE
);
741 crec_tailcall(J
, rd
, tv
);
742 } else if (rd
->data
== 0 && tvistab(tv
) && tref_isstr(J
->base
[1])) {
743 /* Specialize to result of __index lookup. */
744 cTValue
*o
= lj_tab_get(J
->L
, tabV(tv
), &rd
->argv
[1]);
745 J
->base
[0] = lj_record_constify(J
, o
);
747 lj_trace_err(J
, LJ_TRERR_BADTYPE
);
748 /* Always specialize to the key. */
749 emitir(IRTG(IR_EQ
, IRT_STR
), J
->base
[1], lj_ir_kstr(J
, strV(&rd
->argv
[1])));
751 /* NYI: resolving of non-function metamethods. */
752 /* NYI: non-string keys for __index table. */
753 /* NYI: stores to __newindex table. */
754 lj_trace_err(J
, LJ_TRERR_BADTYPE
);
758 /* Record bitfield load/store. */
759 static void crec_index_bf(jit_State
*J
, RecordFFData
*rd
, TRef ptr
, CTInfo info
)
761 IRType t
= IRT_I8
+ 2*lj_fls(ctype_bitcsz(info
)) + ((info
&CTF_UNSIGNED
)?1:0);
762 TRef tr
= emitir(IRT(IR_XLOAD
, t
), ptr
, 0);
763 CTSize pos
= ctype_bitpos(info
), bsz
= ctype_bitbsz(info
), shift
= 32 - bsz
;
764 lj_assertJ(t
<= IRT_U32
, "only 32 bit bitfields supported"); /* NYI */
765 if (rd
->data
== 0) { /* __index metamethod. */
766 if ((info
& CTF_BOOL
)) {
767 tr
= emitir(IRTI(IR_BAND
), tr
, lj_ir_kint(J
, (int32_t)((1u << pos
))));
768 /* Assume not equal to zero. Fixup and emit pending guard later. */
769 lj_ir_set(J
, IRTGI(IR_NE
), tr
, lj_ir_kint(J
, 0));
770 J
->postproc
= LJ_POST_FIXGUARD
;
772 } else if (!(info
& CTF_UNSIGNED
)) {
773 tr
= emitir(IRTI(IR_BSHL
), tr
, lj_ir_kint(J
, shift
- pos
));
774 tr
= emitir(IRTI(IR_BSAR
), tr
, lj_ir_kint(J
, shift
));
776 lj_assertJ(bsz
< 32, "unexpected full bitfield index");
777 tr
= emitir(IRTI(IR_BSHR
), tr
, lj_ir_kint(J
, pos
));
778 tr
= emitir(IRTI(IR_BAND
), tr
, lj_ir_kint(J
, (int32_t)((1u << bsz
)-1)));
779 /* We can omit the U32 to NUM conversion, since bsz < 32. */
782 } else { /* __newindex metamethod. */
783 CTState
*cts
= ctype_ctsG(J2G(J
));
784 CType
*ct
= ctype_get(cts
,
785 (info
& CTF_BOOL
) ? CTID_BOOL
:
786 (info
& CTF_UNSIGNED
) ? CTID_UINT32
: CTID_INT32
);
787 int32_t mask
= (int32_t)(((1u << bsz
)-1) << pos
);
788 TRef sp
= crec_ct_tv(J
, ct
, 0, J
->base
[2], &rd
->argv
[2]);
789 sp
= emitir(IRTI(IR_BSHL
), sp
, lj_ir_kint(J
, pos
));
790 /* Use of the target type avoids forwarding conversions. */
791 sp
= emitir(IRT(IR_BAND
, t
), sp
, lj_ir_kint(J
, mask
));
792 tr
= emitir(IRT(IR_BAND
, t
), tr
, lj_ir_kint(J
, (int32_t)~mask
));
793 tr
= emitir(IRT(IR_BOR
, t
), tr
, sp
);
794 emitir(IRT(IR_XSTORE
, t
), ptr
, tr
);
800 void LJ_FASTCALL
recff_cdata_index(jit_State
*J
, RecordFFData
*rd
)
802 TRef idx
, ptr
= J
->base
[0];
803 ptrdiff_t ofs
= sizeof(GCcdata
);
804 GCcdata
*cd
= argv2cdata(J
, ptr
, &rd
->argv
[0]);
805 CTState
*cts
= ctype_ctsG(J2G(J
));
806 CType
*ct
= ctype_raw(cts
, cd
->ctypeid
);
809 /* Resolve pointer or reference for cdata object. */
810 if (ctype_isptr(ct
->info
)) {
811 IRType t
= (LJ_64
&& ct
->size
== 8) ? IRT_P64
: IRT_P32
;
812 if (ctype_isref(ct
->info
)) ct
= ctype_rawchild(cts
, ct
);
813 ptr
= emitir(IRT(IR_FLOAD
, t
), ptr
, IRFL_CDATA_PTR
);
815 ptr
= crec_reassoc_ofs(J
, ptr
, &ofs
, 1);
820 if (tref_isnumber(idx
)) {
821 idx
= lj_opt_narrow_cindex(J
, idx
);
822 if (ctype_ispointer(ct
->info
)) {
825 if ((ct
->info
& CTF_COMPLEX
))
826 idx
= emitir(IRT(IR_BAND
, IRT_INTP
), idx
, lj_ir_kintp(J
, 1));
827 sz
= lj_ctype_size(cts
, (sid
= ctype_cid(ct
->info
)));
828 idx
= crec_reassoc_ofs(J
, idx
, &ofs
, sz
);
829 #if LJ_TARGET_ARM || LJ_TARGET_PPC
830 /* Hoist base add to allow fusion of index/shift into operands. */
831 if (LJ_LIKELY(J
->flags
& JIT_F_OPT_LOOP
) && ofs
833 && (sz
== 1 || sz
== 4)
836 ptr
= emitir(IRT(IR_ADD
, IRT_PTR
), ptr
, lj_ir_kintp(J
, ofs
));
840 idx
= emitir(IRT(IR_MUL
, IRT_INTP
), idx
, lj_ir_kintp(J
, sz
));
841 ptr
= emitir(IRT(IR_ADD
, IRT_PTR
), idx
, ptr
);
843 } else if (tref_iscdata(idx
)) {
844 GCcdata
*cdk
= cdataV(&rd
->argv
[1]);
845 CType
*ctk
= ctype_raw(cts
, cdk
->ctypeid
);
846 IRType t
= crec_ct2irt(cts
, ctk
);
847 if (ctype_ispointer(ct
->info
) && t
>= IRT_I8
&& t
<= IRT_U64
) {
848 if (ctk
->size
== 8) {
849 idx
= emitir(IRT(IR_FLOAD
, t
), idx
, IRFL_CDATA_INT64
);
850 } else if (ctk
->size
== 4) {
851 idx
= emitir(IRT(IR_FLOAD
, t
), idx
, IRFL_CDATA_INT
);
853 idx
= emitir(IRT(IR_ADD
, IRT_PTR
), idx
,
854 lj_ir_kintp(J
, sizeof(GCcdata
)));
855 idx
= emitir(IRT(IR_XLOAD
, t
), idx
, 0);
857 if (LJ_64
&& ctk
->size
< sizeof(intptr_t) && !(ctk
->info
& CTF_UNSIGNED
))
858 idx
= emitconv(idx
, IRT_INTP
, IRT_INT
, IRCONV_SEXT
);
859 if (!LJ_64
&& ctk
->size
> sizeof(intptr_t)) {
860 idx
= emitconv(idx
, IRT_INTP
, t
, 0);
865 } else if (tref_isstr(idx
)) {
866 GCstr
*name
= strV(&rd
->argv
[1]);
867 if (cd
&& cd
->ctypeid
== CTID_CTYPEID
)
868 ct
= ctype_raw(cts
, crec_constructor(J
, cd
, ptr
));
869 if (ctype_isstruct(ct
->info
)) {
872 fct
= lj_ctype_getfield(cts
, ct
, name
, &fofs
);
874 ofs
+= (ptrdiff_t)fofs
;
875 /* Always specialize to the field name. */
876 emitir(IRTG(IR_EQ
, IRT_STR
), idx
, lj_ir_kstr(J
, name
));
877 if (ctype_isconstval(fct
->info
)) {
878 if (fct
->size
>= 0x80000000u
&&
879 (ctype_child(cts
, fct
)->info
& CTF_UNSIGNED
)) {
880 J
->base
[0] = lj_ir_knum(J
, (lua_Number
)(uint32_t)fct
->size
);
883 J
->base
[0] = lj_ir_kint(J
, (int32_t)fct
->size
);
884 return; /* Interpreter will throw for newindex. */
885 } else if (ctype_isbitfield(fct
->info
)) {
887 ptr
= emitir(IRT(IR_ADD
, IRT_PTR
), ptr
, lj_ir_kintp(J
, ofs
));
888 crec_index_bf(J
, rd
, ptr
, fct
->info
);
891 lj_assertJ(ctype_isfield(fct
->info
), "field expected");
892 sid
= ctype_cid(fct
->info
);
895 } else if (ctype_iscomplex(ct
->info
)) {
896 if (name
->len
== 2 &&
897 ((strdata(name
)[0] == 'r' && strdata(name
)[1] == 'e') ||
898 (strdata(name
)[0] == 'i' && strdata(name
)[1] == 'm'))) {
899 /* Always specialize to the field name. */
900 emitir(IRTG(IR_EQ
, IRT_STR
), idx
, lj_ir_kstr(J
, name
));
901 if (strdata(name
)[0] == 'i') ofs
+= (ct
->size
>> 1);
902 sid
= ctype_cid(ct
->info
);
907 if (ctype_isptr(ct
->info
)) { /* Automatically perform '->'. */
908 CType
*cct
= ctype_rawchild(cts
, ct
);
909 if (ctype_isstruct(cct
->info
)) {
912 if (tref_isstr(idx
)) goto again
;
915 crec_index_meta(J
, cts
, ct
, rd
);
920 ptr
= emitir(IRT(IR_ADD
, IRT_PTR
), ptr
, lj_ir_kintp(J
, ofs
));
922 /* Resolve reference for field. */
923 ct
= ctype_get(cts
, sid
);
924 if (ctype_isref(ct
->info
)) {
925 ptr
= emitir(IRT(IR_XLOAD
, IRT_PTR
), ptr
, 0);
926 sid
= ctype_cid(ct
->info
);
927 ct
= ctype_get(cts
, sid
);
930 while (ctype_isattrib(ct
->info
))
931 ct
= ctype_child(cts
, ct
); /* Skip attributes. */
933 if (rd
->data
== 0) { /* __index metamethod. */
934 J
->base
[0] = crec_tv_ct(J
, ct
, sid
, ptr
);
935 } else { /* __newindex metamethod. */
938 crec_ct_tv(J
, ct
, ptr
, J
->base
[2], &rd
->argv
[2]);
942 /* Record setting a finalizer. */
943 static void crec_finalizer(jit_State
*J
, TRef trcd
, TRef trfin
, cTValue
*fin
)
946 if (!trfin
) trfin
= lj_ir_kptr(J
, gcval(fin
));
947 } else if (tvisnil(fin
)) {
948 trfin
= lj_ir_kptr(J
, NULL
);
950 lj_trace_err(J
, LJ_TRERR_BADTYPE
);
952 lj_ir_call(J
, IRCALL_lj_cdata_setfin
, trcd
,
953 trfin
, lj_ir_kint(J
, (int32_t)itype(fin
)));
957 /* Record cdata allocation. */
958 static void crec_alloc(jit_State
*J
, RecordFFData
*rd
, CTypeID id
)
960 CTState
*cts
= ctype_ctsG(J2G(J
));
962 CTInfo info
= lj_ctype_info(cts
, id
, &sz
);
963 CType
*d
= ctype_raw(cts
, id
);
964 TRef trcd
, trid
= lj_ir_kint(J
, id
);
966 /* Use special instruction to box pointer or 32/64 bit integer. */
967 if (ctype_isptr(info
) || (ctype_isinteger(info
) && (sz
== 4 || sz
== 8))) {
968 TRef sp
= J
->base
[1] ? crec_ct_tv(J
, d
, 0, J
->base
[1], &rd
->argv
[1]) :
969 ctype_isptr(info
) ? lj_ir_kptr(J
, NULL
) :
970 sz
== 4 ? lj_ir_kint(J
, 0) :
971 (lj_needsplit(J
), lj_ir_kint64(J
, 0));
972 J
->base
[0] = emitir(IRTG(IR_CNEWI
, IRT_CDATA
), trid
, sp
);
975 TRef trsz
= TREF_NIL
;
976 if ((info
& CTF_VLA
)) { /* Calculate VLA/VLS size at runtime. */
978 if (!J
->base
[1] || J
->base
[2])
979 lj_trace_err(J
, LJ_TRERR_NYICONV
); /* NYI: init VLA/VLS. */
980 trsz
= crec_ct_tv(J
, ctype_get(cts
, CTID_INT32
), 0,
981 J
->base
[1], &rd
->argv
[1]);
982 sz0
= lj_ctype_vlsize(cts
, d
, 0);
983 sz1
= lj_ctype_vlsize(cts
, d
, 1);
984 trsz
= emitir(IRTGI(IR_MULOV
), trsz
, lj_ir_kint(J
, (int32_t)(sz1
-sz0
)));
985 trsz
= emitir(IRTGI(IR_ADDOV
), trsz
, lj_ir_kint(J
, (int32_t)sz0
));
986 J
->base
[1] = 0; /* Simplify logic below. */
987 } else if (ctype_align(info
) > CT_MEMALIGN
) {
988 trsz
= lj_ir_kint(J
, sz
);
990 trcd
= emitir(IRTG(IR_CNEW
, IRT_CDATA
), trid
, trsz
);
991 if (sz
> 128 || (info
& CTF_VLA
)) {
994 special
: /* Only handle bulk zero-fill for large/VLA/VLS types. */
996 lj_trace_err(J
, LJ_TRERR_NYICONV
); /* NYI: init large/VLA/VLS types. */
997 dp
= emitir(IRT(IR_ADD
, IRT_PTR
), trcd
, lj_ir_kintp(J
, sizeof(GCcdata
)));
998 if (trsz
== TREF_NIL
) trsz
= lj_ir_kint(J
, sz
);
999 align
= ctype_align(info
);
1000 if (align
< CT_MEMALIGN
) align
= CT_MEMALIGN
;
1001 crec_fill(J
, dp
, trsz
, lj_ir_kint(J
, 0), (1u << align
));
1002 } else if (J
->base
[1] && !J
->base
[2] &&
1003 !lj_cconv_multi_init(cts
, d
, &rd
->argv
[1])) {
1005 } else if (ctype_isarray(d
->info
)) {
1006 CType
*dc
= ctype_rawchild(cts
, d
); /* Array element type. */
1007 CTSize ofs
, esize
= dc
->size
;
1013 if (!(ctype_isnum(dc
->info
) || ctype_isptr(dc
->info
)) ||
1014 esize
* CREC_FILL_MAXUNROLL
< sz
)
1016 for (i
= 1, ofs
= 0; ofs
< sz
; ofs
+= esize
) {
1017 TRef dp
= emitir(IRT(IR_ADD
, IRT_PTR
), trcd
,
1018 lj_ir_kintp(J
, ofs
+ sizeof(GCcdata
)));
1021 sval
= &rd
->argv
[i
];
1023 } else if (i
!= 2) {
1024 sp
= ctype_isnum(dc
->info
) ? lj_ir_kint(J
, 0) : TREF_NIL
;
1026 crec_ct_tv(J
, dc
, dp
, sp
, sval
);
1028 } else if (ctype_isstruct(d
->info
)) {
1031 if (!J
->base
[1]) { /* Handle zero-fill of struct-of-NYI. */
1034 CType
*df
= ctype_get(cts
, fid
);
1036 if (ctype_isfield(df
->info
)) {
1038 if (!gcref(df
->name
)) continue; /* Ignore unnamed fields. */
1039 dc
= ctype_rawchild(cts
, df
); /* Field type. */
1040 if (!(ctype_isnum(dc
->info
) || ctype_isptr(dc
->info
) ||
1041 ctype_isenum(dc
->info
)))
1043 } else if (!ctype_isconstval(df
->info
)) {
1050 CType
*df
= ctype_get(cts
, fid
);
1052 if (ctype_isfield(df
->info
)) {
1058 if (!gcref(df
->name
)) continue; /* Ignore unnamed fields. */
1059 dc
= ctype_rawchild(cts
, df
); /* Field type. */
1060 if (!(ctype_isnum(dc
->info
) || ctype_isptr(dc
->info
) ||
1061 ctype_isenum(dc
->info
)))
1062 lj_trace_err(J
, LJ_TRERR_NYICONV
); /* NYI: init aggregates. */
1065 sval
= &rd
->argv
[i
];
1068 sp
= ctype_isptr(dc
->info
) ? TREF_NIL
: lj_ir_kint(J
, 0);
1070 dp
= emitir(IRT(IR_ADD
, IRT_PTR
), trcd
,
1071 lj_ir_kintp(J
, df
->size
+ sizeof(GCcdata
)));
1072 crec_ct_tv(J
, dc
, dp
, sp
, sval
);
1073 if ((d
->info
& CTF_UNION
)) {
1074 if (d
->size
!= dc
->size
) /* NYI: partial init of union. */
1075 lj_trace_err(J
, LJ_TRERR_NYICONV
);
1078 } else if (!ctype_isconstval(df
->info
)) {
1079 /* NYI: init bitfields and sub-structures. */
1080 lj_trace_err(J
, LJ_TRERR_NYICONV
);
1086 dp
= emitir(IRT(IR_ADD
, IRT_PTR
), trcd
, lj_ir_kintp(J
, sizeof(GCcdata
)));
1088 crec_ct_tv(J
, d
, dp
, J
->base
[1], &rd
->argv
[1]);
1092 crec_ct_tv(J
, d
, dp
, lj_ir_kint(J
, 0), &tv
);
1097 /* Handle __gc metamethod. */
1098 fin
= lj_ctype_meta(cts
, id
, MM_gc
);
1100 crec_finalizer(J
, trcd
, 0, fin
);
1103 /* Record argument conversions. */
1104 static TRef
crec_call_args(jit_State
*J
, RecordFFData
*rd
,
1105 CTState
*cts
, CType
*ct
)
1107 TRef args
[CCI_NARGS_MAX
];
1114 TRef
*arg0
= NULL
, *arg1
= NULL
;
1117 if (ctype_cconv(ct
->info
) == CTCC_THISCALL
)
1119 else if (ctype_cconv(ct
->info
) == CTCC_FASTCALL
)
1121 #elif LJ_TARGET_ARM64 && LJ_TARGET_OSX
1122 int ngpr
= CCALL_NARG_GPR
;
1125 /* Skip initial attributes. */
1128 CType
*ctf
= ctype_get(cts
, fid
);
1129 if (!ctype_isattrib(ctf
->info
)) break;
1133 for (n
= 0, base
= J
->base
+1, o
= rd
->argv
+1; *base
; n
++, base
++, o
++) {
1137 if (n
>= CCI_NARGS_MAX
)
1138 lj_trace_err(J
, LJ_TRERR_NYICALL
);
1140 if (fid
) { /* Get argument type from field. */
1141 CType
*ctf
= ctype_get(cts
, fid
);
1143 lj_assertJ(ctype_isfield(ctf
->info
), "field expected");
1144 did
= ctype_cid(ctf
->info
);
1146 if (!(ct
->info
& CTF_VARARG
))
1147 lj_trace_err(J
, LJ_TRERR_NYICALL
); /* Too many arguments. */
1148 #if LJ_TARGET_ARM64 && LJ_TARGET_OSX
1151 args
[n
++] = TREF_NIL
; /* Marker for start of varargs. */
1152 if (n
>= CCI_NARGS_MAX
)
1153 lj_trace_err(J
, LJ_TRERR_NYICALL
);
1156 did
= lj_ccall_ctid_vararg(cts
, o
); /* Infer vararg type. */
1158 d
= ctype_raw(cts
, did
);
1159 if (!(ctype_isnum(d
->info
) || ctype_isptr(d
->info
) ||
1160 ctype_isenum(d
->info
)))
1161 lj_trace_err(J
, LJ_TRERR_NYICALL
);
1162 tr
= crec_ct_tv(J
, d
, 0, *base
, o
);
1163 if (ctype_isinteger_or_bool(d
->info
)) {
1164 #if LJ_TARGET_ARM64 && LJ_TARGET_OSX
1166 /* Fixed args passed on the stack use their unpromoted size. */
1167 if (d
->size
!= lj_ir_type_size
[tref_type(tr
)]) {
1168 lj_assertJ(d
->size
== 1 || d
->size
==2, "unexpected size %d", d
->size
);
1169 tr
= emitconv(tr
, d
->size
==1 ? IRT_U8
: IRT_U16
, tref_type(tr
), 0);
1174 if ((d
->info
& CTF_UNSIGNED
))
1175 tr
= emitconv(tr
, IRT_INT
, d
->size
==1 ? IRT_U8
: IRT_U16
, 0);
1177 tr
= emitconv(tr
, IRT_INT
, d
->size
==1 ? IRT_I8
: IRT_I16
,IRCONV_SEXT
);
1179 } else if (LJ_SOFTFP32
&& ctype_isfp(d
->info
) && d
->size
> 4) {
1183 /* 64 bit args must not end up in registers for fastcall/thiscall. */
1185 if (!ctype_isfp(d
->info
)) {
1186 /* Sigh, the Windows/x86 ABI allows reordering across 64 bit args. */
1187 if (tref_typerange(tr
, IRT_I64
, IRT_U64
)) {
1189 arg0
= &args
[n
]; args
[n
++] = TREF_NIL
; ngpr
--;
1191 arg1
= &args
[n
]; args
[n
++] = TREF_NIL
; ngpr
--;
1195 if (arg0
) { *arg0
= tr
; arg0
= NULL
; n
--; continue; }
1196 if (arg1
) { *arg1
= tr
; arg1
= NULL
; n
--; continue; }
1201 if (!ctype_isfp(d
->info
) && ngpr
) {
1202 if (tref_typerange(tr
, IRT_I64
, IRT_U64
)) {
1203 /* No reordering for other x86 ABIs. Simply add alignment args. */
1204 do { args
[n
++] = TREF_NIL
; } while (--ngpr
);
1210 #elif LJ_TARGET_ARM64 && LJ_TARGET_OSX
1211 if (!ctype_isfp(d
->info
) && ngpr
) {
1218 for (i
= 1; i
< n
; i
++)
1219 tr
= emitir(IRT(IR_CARG
, IRT_NIL
), tr
, args
[i
]);
1223 /* Create a snapshot for the caller, simulating a 'false' return value. */
1224 static void crec_snap_caller(jit_State
*J
)
1226 lua_State
*L
= J
->L
;
1227 TValue
*base
= L
->base
, *top
= L
->top
;
1228 const BCIns
*pc
= J
->pc
;
1229 TRef ftr
= J
->base
[-1-LJ_FR2
];
1231 if (!frame_islua(base
-1) || J
->framedepth
<= 0)
1232 lj_trace_err(J
, LJ_TRERR_NYICALL
);
1233 J
->pc
= frame_pc(base
-1); delta
= 1+LJ_FR2
+bc_a(J
->pc
[-1]);
1234 L
->top
= base
; L
->base
= base
- delta
;
1235 J
->base
[-1-LJ_FR2
] = TREF_FALSE
;
1236 J
->base
-= delta
; J
->baseslot
-= (BCReg
)delta
;
1237 J
->maxslot
= (BCReg
)delta
-LJ_FR2
; J
->framedepth
--;
1239 L
->base
= base
; L
->top
= top
;
1240 J
->framedepth
++; J
->maxslot
= 1;
1241 J
->base
+= delta
; J
->baseslot
+= (BCReg
)delta
;
1242 J
->base
[-1-LJ_FR2
] = ftr
; J
->pc
= pc
;
1245 /* Record function call. */
1246 static int crec_call(jit_State
*J
, RecordFFData
*rd
, GCcdata
*cd
)
1248 CTState
*cts
= ctype_ctsG(J2G(J
));
1249 CType
*ct
= ctype_raw(cts
, cd
->ctypeid
);
1250 IRType tp
= IRT_PTR
;
1251 if (ctype_isptr(ct
->info
)) {
1252 tp
= (LJ_64
&& ct
->size
== 8) ? IRT_P64
: IRT_P32
;
1253 ct
= ctype_rawchild(cts
, ct
);
1255 if (ctype_isfunc(ct
->info
)) {
1256 TRef func
= emitir(IRT(IR_FLOAD
, tp
), J
->base
[0], IRFL_CDATA_PTR
);
1257 CType
*ctr
= ctype_rawchild(cts
, ct
);
1258 IRType t
= crec_ct2irt(cts
, ctr
);
1261 /* Check for blacklisted C functions that might call a callback. */
1262 tv
.u64
= ((uintptr_t)cdata_getptr(cdataptr(cd
), (LJ_64
&& tp
== IRT_P64
) ? 8 : 4) >> 2) | U64x(800000000, 00000000);
1263 if (tvistrue(lj_tab_get(J
->L
, cts
->miscmap
, &tv
)))
1264 lj_trace_err(J
, LJ_TRERR_BLACKL
);
1265 if (ctype_isvoid(ctr
->info
)) {
1268 } else if (!(ctype_isnum(ctr
->info
) || ctype_isptr(ctr
->info
) ||
1269 ctype_isenum(ctr
->info
)) || t
== IRT_CDATA
) {
1270 lj_trace_err(J
, LJ_TRERR_NYICALL
);
1272 if ((ct
->info
& CTF_VARARG
)
1274 || ctype_cconv(ct
->info
) != CTCC_CDECL
1277 func
= emitir(IRT(IR_CARG
, IRT_NIL
), func
,
1278 lj_ir_kint(J
, ctype_typeid(cts
, ct
)));
1279 tr
= emitir(IRT(IR_CALLXS
, t
), crec_call_args(J
, rd
, cts
, ct
), func
);
1280 if (ctype_isbool(ctr
->info
)) {
1281 if (frame_islua(J
->L
->base
-1) && bc_b(frame_pc(J
->L
->base
-1)[-1]) == 1) {
1282 /* Don't check result if ignored. */
1285 crec_snap_caller(J
);
1286 #if LJ_TARGET_X86ORX64
1287 /* Note: only the x86/x64 backend supports U8 and only for EQ(tr, 0). */
1288 lj_ir_set(J
, IRTG(IR_NE
, IRT_U8
), tr
, lj_ir_kint(J
, 0));
1290 lj_ir_set(J
, IRTGI(IR_NE
), tr
, lj_ir_kint(J
, 0));
1292 J
->postproc
= LJ_POST_FIXGUARDSNAP
;
1295 } else if (t
== IRT_PTR
|| (LJ_64
&& t
== IRT_P32
) ||
1296 t
== IRT_I64
|| t
== IRT_U64
|| ctype_isenum(ctr
->info
)) {
1297 TRef trid
= lj_ir_kint(J
, ctype_cid(ct
->info
));
1298 tr
= emitir(IRTG(IR_CNEWI
, IRT_CDATA
), trid
, tr
);
1299 if (t
== IRT_I64
|| t
== IRT_U64
) lj_needsplit(J
);
1300 } else if (t
== IRT_FLOAT
|| t
== IRT_U32
) {
1301 tr
= emitconv(tr
, IRT_NUM
, t
, 0);
1302 } else if (t
== IRT_I8
|| t
== IRT_I16
) {
1303 tr
= emitconv(tr
, IRT_INT
, t
, IRCONV_SEXT
);
1304 } else if (t
== IRT_U8
|| t
== IRT_U16
) {
1305 tr
= emitconv(tr
, IRT_INT
, t
, 0);
1314 void LJ_FASTCALL
recff_cdata_call(jit_State
*J
, RecordFFData
*rd
)
1316 CTState
*cts
= ctype_ctsG(J2G(J
));
1317 GCcdata
*cd
= argv2cdata(J
, J
->base
[0], &rd
->argv
[0]);
1318 CTypeID id
= cd
->ctypeid
;
1322 if (id
== CTID_CTYPEID
) {
1323 id
= crec_constructor(J
, cd
, J
->base
[0]);
1325 } else if (crec_call(J
, rd
, cd
)) {
1328 /* Record ctype __call/__new metamethod. */
1329 ct
= ctype_raw(cts
, id
);
1330 tv
= lj_ctype_meta(cts
, ctype_isptr(ct
->info
) ? ctype_cid(ct
->info
) : id
, mm
);
1333 crec_tailcall(J
, rd
, tv
);
1336 } else if (mm
== MM_new
) {
1337 crec_alloc(J
, rd
, id
);
1340 /* No metamethod or NYI: non-function metamethods. */
1341 lj_trace_err(J
, LJ_TRERR_BADTYPE
);
1344 static TRef
crec_arith_int64(jit_State
*J
, TRef
*sp
, CType
**s
, MMS mm
)
1346 if (sp
[0] && sp
[1] && ctype_isnum(s
[0]->info
) && ctype_isnum(s
[1]->info
)) {
1353 if (((s
[0]->info
& CTF_UNSIGNED
) && s
[0]->size
== 8) ||
1354 ((s
[1]->info
& CTF_UNSIGNED
) && s
[1]->size
== 8)) {
1355 dt
= IRT_U64
; id
= CTID_UINT64
;
1357 dt
= IRT_I64
; id
= CTID_INT64
;
1359 !((s
[0]->info
| s
[1]->info
) & CTF_FP
) &&
1360 s
[0]->size
== 4 && s
[1]->size
== 4) { /* Try to narrow comparison. */
1361 if (!((s
[0]->info
^ s
[1]->info
) & CTF_UNSIGNED
) ||
1362 (tref_isk(sp
[1]) && IR(tref_ref(sp
[1]))->i
>= 0)) {
1363 dt
= (s
[0]->info
& CTF_UNSIGNED
) ? IRT_U32
: IRT_INT
;
1365 } else if (tref_isk(sp
[0]) && IR(tref_ref(sp
[0]))->i
>= 0) {
1366 dt
= (s
[1]->info
& CTF_UNSIGNED
) ? IRT_U32
: IRT_INT
;
1371 for (i
= 0; i
< 2; i
++) {
1372 IRType st
= tref_type(sp
[i
]);
1373 if (st
== IRT_NUM
|| st
== IRT_FLOAT
)
1374 sp
[i
] = emitconv(sp
[i
], dt
, st
, IRCONV_ANY
);
1375 else if (!(st
== IRT_I64
|| st
== IRT_U64
))
1376 sp
[i
] = emitconv(sp
[i
], dt
, IRT_INT
,
1377 (s
[i
]->info
& CTF_UNSIGNED
) ? 0 : IRCONV_SEXT
);
1381 /* Assume true comparison. Fixup and emit pending guard later. */
1385 op
= mm
== MM_lt
? IR_LT
: IR_LE
;
1386 if (dt
== IRT_U32
|| dt
== IRT_U64
)
1387 op
+= (IR_ULT
-IR_LT
);
1389 lj_ir_set(J
, IRTG(op
, dt
), sp
[0], sp
[1]);
1390 J
->postproc
= LJ_POST_FIXGUARD
;
1393 tr
= emitir(IRT(mm
+(int)IR_ADD
-(int)MM_add
, dt
), sp
[0], sp
[1]);
1395 return emitir(IRTG(IR_CNEWI
, IRT_CDATA
), lj_ir_kint(J
, id
), tr
);
1400 static TRef
crec_arith_ptr(jit_State
*J
, TRef
*sp
, CType
**s
, MMS mm
)
1402 CTState
*cts
= ctype_ctsG(J2G(J
));
1404 if (!(sp
[0] && sp
[1])) return 0;
1405 if (ctype_isptr(ctp
->info
) || ctype_isrefarray(ctp
->info
)) {
1406 if ((mm
== MM_sub
|| mm
== MM_eq
|| mm
== MM_lt
|| mm
== MM_le
) &&
1407 (ctype_isptr(s
[1]->info
) || ctype_isrefarray(s
[1]->info
))) {
1408 if (mm
== MM_sub
) { /* Pointer difference. */
1410 CTSize sz
= lj_ctype_size(cts
, ctype_cid(ctp
->info
));
1411 if (sz
== 0 || (sz
& (sz
-1)) != 0)
1412 return 0; /* NYI: integer division. */
1413 tr
= emitir(IRT(IR_SUB
, IRT_INTP
), sp
[0], sp
[1]);
1414 tr
= emitir(IRT(IR_BSAR
, IRT_INTP
), tr
, lj_ir_kint(J
, lj_fls(sz
)));
1416 tr
= emitconv(tr
, IRT_NUM
, IRT_INTP
, 0);
1419 } else { /* Pointer comparison (unsigned). */
1420 /* Assume true comparison. Fixup and emit pending guard later. */
1421 IROp op
= mm
== MM_eq
? IR_EQ
: mm
== MM_lt
? IR_ULT
: IR_ULE
;
1422 lj_ir_set(J
, IRTG(op
, IRT_PTR
), sp
[0], sp
[1]);
1423 J
->postproc
= LJ_POST_FIXGUARD
;
1427 if (!((mm
== MM_add
|| mm
== MM_sub
) && ctype_isnum(s
[1]->info
)))
1429 } else if (mm
== MM_add
&& ctype_isnum(ctp
->info
) &&
1430 (ctype_isptr(s
[1]->info
) || ctype_isrefarray(s
[1]->info
))) {
1431 TRef tr
= sp
[0]; sp
[0] = sp
[1]; sp
[1] = tr
; /* Swap pointer and index. */
1438 IRType t
= tref_type(tr
);
1439 CTSize sz
= lj_ctype_size(cts
, ctype_cid(ctp
->info
));
1442 if (t
== IRT_NUM
|| t
== IRT_FLOAT
)
1443 tr
= emitconv(tr
, IRT_INTP
, t
, IRCONV_ANY
);
1444 else if (!(t
== IRT_I64
|| t
== IRT_U64
))
1445 tr
= emitconv(tr
, IRT_INTP
, IRT_INT
,
1446 ((t
- IRT_I8
) & 1) ? 0 : IRCONV_SEXT
);
1448 if (!tref_typerange(sp
[1], IRT_I8
, IRT_U32
)) {
1449 tr
= emitconv(tr
, IRT_INTP
, t
,
1450 (t
== IRT_NUM
|| t
== IRT_FLOAT
) ? IRCONV_ANY
: 0);
1453 tr
= emitir(IRT(IR_MUL
, IRT_INTP
), tr
, lj_ir_kintp(J
, sz
));
1454 tr
= emitir(IRT(mm
+(int)IR_ADD
-(int)MM_add
, IRT_PTR
), sp
[0], tr
);
1455 id
= lj_ctype_intern(cts
, CTINFO(CT_PTR
, CTALIGN_PTR
|ctype_cid(ctp
->info
)),
1457 return emitir(IRTG(IR_CNEWI
, IRT_CDATA
), lj_ir_kint(J
, id
), tr
);
1461 /* Record ctype arithmetic metamethods. */
1462 static TRef
crec_arith_meta(jit_State
*J
, TRef
*sp
, CType
**s
, CTState
*cts
,
1467 if (tviscdata(&rd
->argv
[0])) {
1468 CTypeID id
= argv2cdata(J
, J
->base
[0], &rd
->argv
[0])->ctypeid
;
1469 CType
*ct
= ctype_raw(cts
, id
);
1470 if (ctype_isptr(ct
->info
)) id
= ctype_cid(ct
->info
);
1471 tv
= lj_ctype_meta(cts
, id
, (MMS
)rd
->data
);
1473 if (!tv
&& J
->base
[1] && tviscdata(&rd
->argv
[1])) {
1474 CTypeID id
= argv2cdata(J
, J
->base
[1], &rd
->argv
[1])->ctypeid
;
1475 CType
*ct
= ctype_raw(cts
, id
);
1476 if (ctype_isptr(ct
->info
)) id
= ctype_cid(ct
->info
);
1477 tv
= lj_ctype_meta(cts
, id
, (MMS
)rd
->data
);
1482 crec_tailcall(J
, rd
, tv
);
1484 } /* NYI: non-function metamethods. */
1485 } else if ((MMS
)rd
->data
== MM_eq
) { /* Fallback cdata pointer comparison. */
1486 if (sp
[0] && sp
[1] && ctype_isnum(s
[0]->info
) == ctype_isnum(s
[1]->info
)) {
1487 /* Assume true comparison. Fixup and emit pending guard later. */
1488 lj_ir_set(J
, IRTG(IR_EQ
, IRT_PTR
), sp
[0], sp
[1]);
1489 J
->postproc
= LJ_POST_FIXGUARD
;
1495 lj_trace_err(J
, LJ_TRERR_BADTYPE
);
1499 void LJ_FASTCALL
recff_cdata_arith(jit_State
*J
, RecordFFData
*rd
)
1501 CTState
*cts
= ctype_ctsG(J2G(J
));
1505 for (i
= 0; i
< 2; i
++) {
1506 TRef tr
= J
->base
[i
];
1507 CType
*ct
= ctype_get(cts
, CTID_DOUBLE
);
1509 lj_trace_err(J
, LJ_TRERR_BADTYPE
);
1510 } else if (tref_iscdata(tr
)) {
1511 CTypeID id
= argv2cdata(J
, tr
, &rd
->argv
[i
])->ctypeid
;
1513 ct
= ctype_raw(cts
, id
);
1514 t
= crec_ct2irt(cts
, ct
);
1515 if (ctype_isptr(ct
->info
)) { /* Resolve pointer or reference. */
1516 tr
= emitir(IRT(IR_FLOAD
, t
), tr
, IRFL_CDATA_PTR
);
1517 if (ctype_isref(ct
->info
)) {
1518 ct
= ctype_rawchild(cts
, ct
);
1519 t
= crec_ct2irt(cts
, ct
);
1521 } else if (t
== IRT_I64
|| t
== IRT_U64
) {
1522 tr
= emitir(IRT(IR_FLOAD
, t
), tr
, IRFL_CDATA_INT64
);
1525 } else if (t
== IRT_INT
|| t
== IRT_U32
) {
1526 tr
= emitir(IRT(IR_FLOAD
, t
), tr
, IRFL_CDATA_INT
);
1527 if (ctype_isenum(ct
->info
)) ct
= ctype_child(cts
, ct
);
1529 } else if (ctype_isfunc(ct
->info
)) {
1530 CTypeID id0
= i
? ctype_typeid(cts
, s
[0]) : 0;
1531 tr
= emitir(IRT(IR_FLOAD
, IRT_PTR
), tr
, IRFL_CDATA_PTR
);
1533 lj_ctype_intern(cts
, CTINFO(CT_PTR
, CTALIGN_PTR
|id
), CTSIZE_PTR
));
1535 s
[0] = ctype_get(cts
, id0
); /* cts->tab may have been reallocated. */
1539 tr
= emitir(IRT(IR_ADD
, IRT_PTR
), tr
, lj_ir_kintp(J
, sizeof(GCcdata
)));
1541 if (ctype_isenum(ct
->info
)) ct
= ctype_child(cts
, ct
);
1542 if (ctype_isnum(ct
->info
)) {
1543 if (t
== IRT_CDATA
) {
1546 if (t
== IRT_I64
|| t
== IRT_U64
) lj_needsplit(J
);
1547 tr
= emitir(IRT(IR_XLOAD
, t
), tr
, 0);
1550 } else if (tref_isnil(tr
)) {
1551 tr
= lj_ir_kptr(J
, NULL
);
1552 ct
= ctype_get(cts
, CTID_P_VOID
);
1553 } else if (tref_isinteger(tr
)) {
1554 ct
= ctype_get(cts
, CTID_INT32
);
1555 } else if (tref_isstr(tr
)) {
1556 TRef tr2
= J
->base
[1-i
];
1557 CTypeID id
= argv2cdata(J
, tr2
, &rd
->argv
[1-i
])->ctypeid
;
1558 ct
= ctype_raw(cts
, id
);
1559 if (ctype_isenum(ct
->info
)) { /* Match string against enum constant. */
1560 GCstr
*str
= strV(&rd
->argv
[i
]);
1562 CType
*cct
= lj_ctype_getfield(cts
, ct
, str
, &ofs
);
1563 if (cct
&& ctype_isconstval(cct
->info
)) {
1564 /* Specialize to the name of the enum constant. */
1565 emitir(IRTG(IR_EQ
, IRT_STR
), tr
, lj_ir_kstr(J
, str
));
1566 ct
= ctype_child(cts
, cct
);
1567 tr
= lj_ir_kint(J
, (int32_t)ofs
);
1568 } else { /* Interpreter will throw or return false. */
1569 ct
= ctype_get(cts
, CTID_P_VOID
);
1571 } else if (ctype_isptr(ct
->info
)) {
1572 tr
= emitir(IRT(IR_ADD
, IRT_PTR
), tr
, lj_ir_kintp(J
, sizeof(GCstr
)));
1574 ct
= ctype_get(cts
, CTID_P_VOID
);
1576 } else if (!tref_isnum(tr
)) {
1578 ct
= ctype_get(cts
, CTID_P_VOID
);
1586 MMS mm
= (MMS
)rd
->data
;
1587 if ((mm
== MM_len
|| mm
== MM_concat
||
1588 (!(tr
= crec_arith_int64(J
, sp
, s
, mm
)) &&
1589 !(tr
= crec_arith_ptr(J
, sp
, s
, mm
)))) &&
1590 !(tr
= crec_arith_meta(J
, sp
, s
, cts
, rd
)))
1593 /* Fixup cdata comparisons, too. Avoids some cdata escapes. */
1594 if (J
->postproc
== LJ_POST_FIXGUARD
&& frame_iscont(J
->L
->base
-1) &&
1595 !irt_isguard(J
->guardemit
)) {
1596 const BCIns
*pc
= frame_contpc(J
->L
->base
-1) - 1;
1597 if (bc_op(*pc
) <= BC_ISNEP
) {
1598 J2G(J
)->tmptv
.u64
= (uint64_t)(uintptr_t)pc
;
1599 J
->postproc
= LJ_POST_FIXCOMP
;
1605 /* -- C library namespace metamethods ------------------------------------- */
1607 void LJ_FASTCALL
recff_clib_index(jit_State
*J
, RecordFFData
*rd
)
1609 CTState
*cts
= ctype_ctsG(J2G(J
));
1610 if (tref_isudata(J
->base
[0]) && tref_isstr(J
->base
[1]) &&
1611 udataV(&rd
->argv
[0])->udtype
== UDTYPE_FFI_CLIB
) {
1612 CLibrary
*cl
= (CLibrary
*)uddata(udataV(&rd
->argv
[0]));
1613 GCstr
*name
= strV(&rd
->argv
[1]);
1615 CTypeID id
= lj_ctype_getname(cts
, &ct
, name
, CLNS_INDEX
);
1616 cTValue
*tv
= lj_tab_getstr(cl
->cache
, name
);
1617 rd
->nres
= rd
->data
;
1618 if (id
&& tv
&& !tvisnil(tv
)) {
1619 /* Specialize to the symbol name and make the result a constant. */
1620 emitir(IRTG(IR_EQ
, IRT_STR
), J
->base
[1], lj_ir_kstr(J
, name
));
1621 if (ctype_isconstval(ct
->info
)) {
1622 if (ct
->size
>= 0x80000000u
&&
1623 (ctype_child(cts
, ct
)->info
& CTF_UNSIGNED
))
1624 J
->base
[0] = lj_ir_knum(J
, (lua_Number
)(uint32_t)ct
->size
);
1626 J
->base
[0] = lj_ir_kint(J
, (int32_t)ct
->size
);
1627 } else if (ctype_isextern(ct
->info
)) {
1628 CTypeID sid
= ctype_cid(ct
->info
);
1629 void *sp
= *(void **)cdataptr(cdataV(tv
));
1631 ct
= ctype_raw(cts
, sid
);
1632 if (LJ_64
&& !checkptr32(sp
))
1633 ptr
= lj_ir_kintp(J
, (uintptr_t)sp
);
1635 ptr
= lj_ir_kptr(J
, sp
);
1637 J
->base
[0] = crec_tv_ct(J
, ct
, sid
, ptr
);
1640 crec_ct_tv(J
, ct
, ptr
, J
->base
[2], &rd
->argv
[2]);
1643 J
->base
[0] = lj_ir_kgc(J
, obj2gco(cdataV(tv
)), IRT_CDATA
);
1646 lj_trace_err(J
, LJ_TRERR_NOCACHE
);
1648 } /* else: interpreter will throw. */
1651 /* -- FFI library functions ----------------------------------------------- */
1653 static TRef
crec_toint(jit_State
*J
, CTState
*cts
, TRef sp
, TValue
*sval
)
1655 return crec_ct_tv(J
, ctype_get(cts
, CTID_INT32
), 0, sp
, sval
);
1658 void LJ_FASTCALL
recff_ffi_new(jit_State
*J
, RecordFFData
*rd
)
1660 crec_alloc(J
, rd
, argv2ctype(J
, J
->base
[0], &rd
->argv
[0]));
1663 void LJ_FASTCALL
recff_ffi_errno(jit_State
*J
, RecordFFData
*rd
)
1667 lj_trace_err(J
, LJ_TRERR_NYICALL
);
1668 J
->base
[0] = lj_ir_call(J
, IRCALL_lj_vm_errno
);
1671 void LJ_FASTCALL
recff_ffi_string(jit_State
*J
, RecordFFData
*rd
)
1673 CTState
*cts
= ctype_ctsG(J2G(J
));
1674 TRef tr
= J
->base
[0];
1676 TRef trlen
= J
->base
[1];
1677 if (!tref_isnil(trlen
)) {
1678 trlen
= crec_toint(J
, cts
, trlen
, &rd
->argv
[1]);
1679 tr
= crec_ct_tv(J
, ctype_get(cts
, CTID_P_CVOID
), 0, tr
, &rd
->argv
[0]);
1681 tr
= crec_ct_tv(J
, ctype_get(cts
, CTID_P_CCHAR
), 0, tr
, &rd
->argv
[0]);
1682 trlen
= lj_ir_call(J
, IRCALL_strlen
, tr
);
1684 J
->base
[0] = emitir(IRT(IR_XSNEW
, IRT_STR
), tr
, trlen
);
1685 } /* else: interpreter will throw. */
1688 void LJ_FASTCALL
recff_ffi_copy(jit_State
*J
, RecordFFData
*rd
)
1690 CTState
*cts
= ctype_ctsG(J2G(J
));
1691 TRef trdst
= J
->base
[0], trsrc
= J
->base
[1], trlen
= J
->base
[2];
1692 if (trdst
&& trsrc
&& (trlen
|| tref_isstr(trsrc
))) {
1693 trdst
= crec_ct_tv(J
, ctype_get(cts
, CTID_P_VOID
), 0, trdst
, &rd
->argv
[0]);
1694 trsrc
= crec_ct_tv(J
, ctype_get(cts
, CTID_P_CVOID
), 0, trsrc
, &rd
->argv
[1]);
1696 trlen
= crec_toint(J
, cts
, trlen
, &rd
->argv
[2]);
1698 trlen
= emitir(IRTI(IR_FLOAD
), J
->base
[1], IRFL_STR_LEN
);
1699 trlen
= emitir(IRTI(IR_ADD
), trlen
, lj_ir_kint(J
, 1));
1702 crec_copy(J
, trdst
, trsrc
, trlen
, NULL
);
1703 } /* else: interpreter will throw. */
1706 void LJ_FASTCALL
recff_ffi_fill(jit_State
*J
, RecordFFData
*rd
)
1708 CTState
*cts
= ctype_ctsG(J2G(J
));
1709 TRef trdst
= J
->base
[0], trlen
= J
->base
[1], trfill
= J
->base
[2];
1710 if (trdst
&& trlen
) {
1712 if (tviscdata(&rd
->argv
[0])) { /* Get alignment of original destination. */
1714 CType
*ct
= ctype_raw(cts
, cdataV(&rd
->argv
[0])->ctypeid
);
1715 if (ctype_isptr(ct
->info
))
1716 ct
= ctype_rawchild(cts
, ct
);
1717 step
= (1u<<ctype_align(lj_ctype_info(cts
, ctype_typeid(cts
, ct
), &sz
)));
1719 trdst
= crec_ct_tv(J
, ctype_get(cts
, CTID_P_VOID
), 0, trdst
, &rd
->argv
[0]);
1720 trlen
= crec_toint(J
, cts
, trlen
, &rd
->argv
[1]);
1722 trfill
= crec_toint(J
, cts
, trfill
, &rd
->argv
[2]);
1724 trfill
= lj_ir_kint(J
, 0);
1726 crec_fill(J
, trdst
, trlen
, trfill
, step
);
1727 } /* else: interpreter will throw. */
1730 void LJ_FASTCALL
recff_ffi_typeof(jit_State
*J
, RecordFFData
*rd
)
1732 if (tref_iscdata(J
->base
[0])) {
1733 TRef trid
= lj_ir_kint(J
, argv2ctype(J
, J
->base
[0], &rd
->argv
[0]));
1734 J
->base
[0] = emitir(IRTG(IR_CNEWI
, IRT_CDATA
),
1735 lj_ir_kint(J
, CTID_CTYPEID
), trid
);
1737 setfuncV(J
->L
, &J
->errinfo
, J
->fn
);
1738 lj_trace_err_info(J
, LJ_TRERR_NYIFFU
);
1742 void LJ_FASTCALL
recff_ffi_istype(jit_State
*J
, RecordFFData
*rd
)
1744 argv2ctype(J
, J
->base
[0], &rd
->argv
[0]);
1745 if (tref_iscdata(J
->base
[1])) {
1746 argv2ctype(J
, J
->base
[1], &rd
->argv
[1]);
1747 J
->postproc
= LJ_POST_FIXBOOL
;
1748 J
->base
[0] = TREF_TRUE
;
1750 J
->base
[0] = TREF_FALSE
;
1754 void LJ_FASTCALL
recff_ffi_abi(jit_State
*J
, RecordFFData
*rd
)
1756 if (tref_isstr(J
->base
[0])) {
1757 /* Specialize to the ABI string to make the boolean result a constant. */
1758 emitir(IRTG(IR_EQ
, IRT_STR
), J
->base
[0], lj_ir_kstr(J
, strV(&rd
->argv
[0])));
1759 J
->postproc
= LJ_POST_FIXBOOL
;
1760 J
->base
[0] = TREF_TRUE
;
1762 lj_trace_err(J
, LJ_TRERR_BADTYPE
);
1766 /* Record ffi.sizeof(), ffi.alignof(), ffi.offsetof(). */
1767 void LJ_FASTCALL
recff_ffi_xof(jit_State
*J
, RecordFFData
*rd
)
1769 CTypeID id
= argv2ctype(J
, J
->base
[0], &rd
->argv
[0]);
1770 if (rd
->data
== FF_ffi_sizeof
) {
1771 CType
*ct
= lj_ctype_rawref(ctype_ctsG(J2G(J
)), id
);
1772 if (ctype_isvltype(ct
->info
))
1773 lj_trace_err(J
, LJ_TRERR_BADTYPE
);
1774 } else if (rd
->data
== FF_ffi_offsetof
) { /* Specialize to the field name. */
1775 if (!tref_isstr(J
->base
[1]))
1776 lj_trace_err(J
, LJ_TRERR_BADTYPE
);
1777 emitir(IRTG(IR_EQ
, IRT_STR
), J
->base
[1], lj_ir_kstr(J
, strV(&rd
->argv
[1])));
1778 rd
->nres
= 3; /* Just in case. */
1780 J
->postproc
= LJ_POST_FIXCONST
;
1781 J
->base
[0] = J
->base
[1] = J
->base
[2] = TREF_NIL
;
1784 void LJ_FASTCALL
recff_ffi_gc(jit_State
*J
, RecordFFData
*rd
)
1786 argv2cdata(J
, J
->base
[0], &rd
->argv
[0]);
1788 lj_trace_err(J
, LJ_TRERR_BADTYPE
);
1789 crec_finalizer(J
, J
->base
[0], J
->base
[1], &rd
->argv
[1]);
1792 /* -- 64 bit bit.* library functions -------------------------------------- */
1794 /* Determine bit operation type from argument type. */
1795 static CTypeID
crec_bit64_type(CTState
*cts
, cTValue
*tv
)
1797 if (tviscdata(tv
)) {
1798 CType
*ct
= lj_ctype_rawref(cts
, cdataV(tv
)->ctypeid
);
1799 if (ctype_isenum(ct
->info
)) ct
= ctype_child(cts
, ct
);
1800 if ((ct
->info
& (CTMASK_NUM
|CTF_BOOL
|CTF_FP
|CTF_UNSIGNED
)) ==
1801 CTINFO(CT_NUM
, CTF_UNSIGNED
) && ct
->size
== 8)
1802 return CTID_UINT64
; /* Use uint64_t, since it has the highest rank. */
1803 return CTID_INT64
; /* Otherwise use int64_t. */
1805 return 0; /* Use regular 32 bit ops. */
1808 void LJ_FASTCALL
recff_bit64_tobit(jit_State
*J
, RecordFFData
*rd
)
1810 CTState
*cts
= ctype_ctsG(J2G(J
));
1811 TRef tr
= crec_ct_tv(J
, ctype_get(cts
, CTID_INT64
), 0,
1812 J
->base
[0], &rd
->argv
[0]);
1813 if (!tref_isinteger(tr
))
1814 tr
= emitconv(tr
, IRT_INT
, tref_type(tr
), 0);
1818 int LJ_FASTCALL
recff_bit64_unary(jit_State
*J
, RecordFFData
*rd
)
1820 CTState
*cts
= ctype_ctsG(J2G(J
));
1821 CTypeID id
= crec_bit64_type(cts
, &rd
->argv
[0]);
1823 TRef tr
= crec_ct_tv(J
, ctype_get(cts
, id
), 0, J
->base
[0], &rd
->argv
[0]);
1824 tr
= emitir(IRT(rd
->data
, id
-CTID_INT64
+IRT_I64
), tr
, 0);
1825 J
->base
[0] = emitir(IRTG(IR_CNEWI
, IRT_CDATA
), lj_ir_kint(J
, id
), tr
);
1831 int LJ_FASTCALL
recff_bit64_nary(jit_State
*J
, RecordFFData
*rd
)
1833 CTState
*cts
= ctype_ctsG(J2G(J
));
1836 for (i
= 0; J
->base
[i
] != 0; i
++) {
1837 CTypeID aid
= crec_bit64_type(cts
, &rd
->argv
[i
]);
1838 if (id
< aid
) id
= aid
; /* Determine highest type rank of all arguments. */
1841 CType
*ct
= ctype_get(cts
, id
);
1842 uint32_t ot
= IRT(rd
->data
, id
-CTID_INT64
+IRT_I64
);
1843 TRef tr
= crec_ct_tv(J
, ct
, 0, J
->base
[0], &rd
->argv
[0]);
1844 for (i
= 1; J
->base
[i
] != 0; i
++) {
1845 TRef tr2
= crec_ct_tv(J
, ct
, 0, J
->base
[i
], &rd
->argv
[i
]);
1846 tr
= emitir(ot
, tr
, tr2
);
1848 J
->base
[0] = emitir(IRTG(IR_CNEWI
, IRT_CDATA
), lj_ir_kint(J
, id
), tr
);
1854 int LJ_FASTCALL
recff_bit64_shift(jit_State
*J
, RecordFFData
*rd
)
1856 CTState
*cts
= ctype_ctsG(J2G(J
));
1859 if (J
->base
[0] && tref_iscdata(J
->base
[1])) {
1860 tsh
= crec_ct_tv(J
, ctype_get(cts
, CTID_INT64
), 0,
1861 J
->base
[1], &rd
->argv
[1]);
1862 if (!tref_isinteger(tsh
))
1863 tsh
= emitconv(tsh
, IRT_INT
, tref_type(tsh
), 0);
1866 id
= crec_bit64_type(cts
, &rd
->argv
[0]);
1868 TRef tr
= crec_ct_tv(J
, ctype_get(cts
, id
), 0, J
->base
[0], &rd
->argv
[0]);
1869 uint32_t op
= rd
->data
;
1870 if (!tsh
) tsh
= lj_opt_narrow_tobit(J
, J
->base
[1]);
1871 if (!(op
< IR_BROL
? LJ_TARGET_MASKSHIFT
: LJ_TARGET_MASKROT
) &&
1873 tsh
= emitir(IRTI(IR_BAND
), tsh
, lj_ir_kint(J
, 63));
1874 #ifdef LJ_TARGET_UNIFYROT
1875 if (op
== (LJ_TARGET_UNIFYROT
== 1 ? IR_BROR
: IR_BROL
)) {
1876 op
= LJ_TARGET_UNIFYROT
== 1 ? IR_BROL
: IR_BROR
;
1877 tsh
= emitir(IRTI(IR_NEG
), tsh
, tsh
);
1880 tr
= emitir(IRT(op
, id
-CTID_INT64
+IRT_I64
), tr
, tsh
);
1881 J
->base
[0] = emitir(IRTG(IR_CNEWI
, IRT_CDATA
), lj_ir_kint(J
, id
), tr
);
1887 TRef
recff_bit64_tohex(jit_State
*J
, RecordFFData
*rd
, TRef hdr
)
1889 CTState
*cts
= ctype_ctsG(J2G(J
));
1890 CTypeID id
= crec_bit64_type(cts
, &rd
->argv
[0]);
1891 TRef tr
, trsf
= J
->base
[1];
1892 SFormat sf
= (STRFMT_UINT
|STRFMT_T_HEX
);
1896 n
= (int32_t)lj_carith_check64(J
->L
, 2, &id2
);
1898 trsf
= crec_ct_tv(J
, ctype_get(cts
, CTID_INT32
), 0, trsf
, &rd
->argv
[1]);
1900 trsf
= lj_opt_narrow_tobit(J
, trsf
);
1901 emitir(IRTGI(IR_EQ
), trsf
, lj_ir_kint(J
, n
)); /* Specialize to n. */
1905 if (n
< 0) { n
= (int32_t)(~n
+1u); sf
|= STRFMT_F_UPPER
; }
1906 if ((uint32_t)n
> 254) n
= 254;
1907 sf
|= ((SFormat
)((n
+1)&255) << STRFMT_SH_PREC
);
1909 tr
= crec_ct_tv(J
, ctype_get(cts
, id
), 0, J
->base
[0], &rd
->argv
[0]);
1911 tr
= emitir(IRT(IR_BAND
, IRT_U64
), tr
,
1912 lj_ir_kint64(J
, ((uint64_t)1 << 4*n
)-1));
1914 tr
= lj_opt_narrow_tobit(J
, J
->base
[0]);
1916 tr
= emitir(IRTI(IR_BAND
), tr
, lj_ir_kint(J
, (int32_t)((1u << 4*n
)-1)));
1917 tr
= emitconv(tr
, IRT_U64
, IRT_INT
, 0); /* No sign-extension. */
1920 return lj_ir_call(J
, IRCALL_lj_strfmt_putfxint
, hdr
, lj_ir_kint(J
, sf
), tr
);
1923 /* -- Miscellaneous library functions ------------------------------------- */
1925 void LJ_FASTCALL
lj_crecord_tonumber(jit_State
*J
, RecordFFData
*rd
)
1927 CTState
*cts
= ctype_ctsG(J2G(J
));
1928 CType
*d
, *ct
= lj_ctype_rawref(cts
, cdataV(&rd
->argv
[0])->ctypeid
);
1929 if (ctype_isenum(ct
->info
)) ct
= ctype_child(cts
, ct
);
1930 if (ctype_isnum(ct
->info
) || ctype_iscomplex(ct
->info
)) {
1931 if (ctype_isinteger_or_bool(ct
->info
) && ct
->size
<= 4 &&
1932 !(ct
->size
== 4 && (ct
->info
& CTF_UNSIGNED
)))
1933 d
= ctype_get(cts
, CTID_INT32
);
1935 d
= ctype_get(cts
, CTID_DOUBLE
);
1936 J
->base
[0] = crec_ct_tv(J
, d
, 0, J
->base
[0], &rd
->argv
[0]);
1938 /* Specialize to the ctype that couldn't be converted. */
1939 argv2cdata(J
, J
->base
[0], &rd
->argv
[0]);
1940 J
->base
[0] = TREF_NIL
;
1944 TRef
lj_crecord_loadiu64(jit_State
*J
, TRef tr
, cTValue
*o
)
1946 CTypeID id
= argv2cdata(J
, tr
, o
)->ctypeid
;
1947 if (!(id
== CTID_INT64
|| id
== CTID_UINT64
))
1948 lj_trace_err(J
, LJ_TRERR_BADTYPE
);
1950 return emitir(IRT(IR_FLOAD
, id
== CTID_INT64
? IRT_I64
: IRT_U64
), tr
,
1955 TRef
lj_crecord_topcvoid(jit_State
*J
, TRef tr
, cTValue
*o
)
1957 CTState
*cts
= ctype_ctsG(J2G(J
));
1958 if (!tref_iscdata(tr
)) lj_trace_err(J
, LJ_TRERR_BADTYPE
);
1959 return crec_ct_tv(J
, ctype_get(cts
, CTID_P_CVOID
), 0, tr
, o
);
1962 TRef
lj_crecord_topuint8(jit_State
*J
, TRef tr
)
1964 return emitir(IRTG(IR_CNEWI
, IRT_CDATA
), lj_ir_kint(J
, CTID_P_UINT8
), tr
);