2 ** Common header for IR emitter and optimizations.
3 ** Copyright (C) 2005-2023 Mike Pall. See Copyright Notice in luajit.h
16 LJ_FUNC
void LJ_FASTCALL
lj_ir_growtop(jit_State
*J
);
17 LJ_FUNC TRef LJ_FASTCALL
lj_ir_emit(jit_State
*J
);
19 /* Save current IR in J->fold.ins, but do not emit it (yet). */
20 static LJ_AINLINE
void lj_ir_set_(jit_State
*J
, uint16_t ot
, IRRef1 a
, IRRef1 b
)
22 J
->fold
.ins
.ot
= ot
; J
->fold
.ins
.op1
= a
; J
->fold
.ins
.op2
= b
;
25 #define lj_ir_set(J, ot, a, b) \
26 lj_ir_set_(J, (uint16_t)(ot), (IRRef1)(a), (IRRef1)(b))
28 /* Get ref of next IR instruction and optionally grow IR.
29 ** Note: this may invalidate all IRIns*!
31 static LJ_AINLINE IRRef
lj_ir_nextins(jit_State
*J
)
33 IRRef ref
= J
->cur
.nins
;
34 if (LJ_UNLIKELY(ref
>= J
->irtoplim
)) lj_ir_growtop(J
);
35 J
->cur
.nins
= ref
+ 1;
39 LJ_FUNC TRef
lj_ir_ggfload(jit_State
*J
, IRType t
, uintptr_t ofs
);
41 /* Interning of constants. */
42 LJ_FUNC TRef LJ_FASTCALL
lj_ir_kint(jit_State
*J
, int32_t k
);
43 LJ_FUNC TRef
lj_ir_k64(jit_State
*J
, IROp op
, uint64_t u64
);
44 LJ_FUNC TRef
lj_ir_knum_u64(jit_State
*J
, uint64_t u64
);
45 LJ_FUNC TRef
lj_ir_knumint(jit_State
*J
, lua_Number n
);
46 LJ_FUNC TRef
lj_ir_kint64(jit_State
*J
, uint64_t u64
);
47 LJ_FUNC TRef
lj_ir_kgc(jit_State
*J
, GCobj
*o
, IRType t
);
48 LJ_FUNC TRef
lj_ir_kptr_(jit_State
*J
, IROp op
, void *ptr
);
49 LJ_FUNC TRef
lj_ir_knull(jit_State
*J
, IRType t
);
50 LJ_FUNC TRef
lj_ir_kslot(jit_State
*J
, TRef key
, IRRef slot
);
51 LJ_FUNC TRef
lj_ir_ktrace(jit_State
*J
);
54 #define lj_ir_kintp(J, k) lj_ir_kint64(J, (uint64_t)(k))
56 #define lj_ir_kintp(J, k) lj_ir_kint(J, (int32_t)(k))
60 #define lj_ir_kintpgc lj_ir_kintp
62 #define lj_ir_kintpgc lj_ir_kint
65 static LJ_AINLINE TRef
lj_ir_knum(jit_State
*J
, lua_Number n
)
69 return lj_ir_knum_u64(J
, tv
.u64
);
72 #define lj_ir_kstr(J, str) lj_ir_kgc(J, obj2gco((str)), IRT_STR)
73 #define lj_ir_ktab(J, tab) lj_ir_kgc(J, obj2gco((tab)), IRT_TAB)
74 #define lj_ir_kfunc(J, func) lj_ir_kgc(J, obj2gco((func)), IRT_FUNC)
75 #define lj_ir_kptr(J, ptr) lj_ir_kptr_(J, IR_KPTR, (ptr))
76 #define lj_ir_kkptr(J, ptr) lj_ir_kptr_(J, IR_KKPTR, (ptr))
78 /* Special FP constants. */
79 #define lj_ir_knum_zero(J) lj_ir_knum_u64(J, U64x(00000000,00000000))
80 #define lj_ir_knum_one(J) lj_ir_knum_u64(J, U64x(3ff00000,00000000))
81 #define lj_ir_knum_tobit(J) lj_ir_knum_u64(J, U64x(43380000,00000000))
83 /* Special 128 bit SIMD constants. */
84 #define lj_ir_ksimd(J, idx) \
85 lj_ir_ggfload(J, IRT_NUM, (uintptr_t)LJ_KSIMD(J, idx) - (uintptr_t)J2GG(J))
87 /* Access to constants. */
88 LJ_FUNC
void lj_ir_kvalue(lua_State
*L
, TValue
*tv
, const IRIns
*ir
);
90 /* Convert IR operand types. */
91 LJ_FUNC TRef LJ_FASTCALL
lj_ir_tonumber(jit_State
*J
, TRef tr
);
92 LJ_FUNC TRef LJ_FASTCALL
lj_ir_tonum(jit_State
*J
, TRef tr
);
93 LJ_FUNC TRef LJ_FASTCALL
lj_ir_tostr(jit_State
*J
, TRef tr
);
95 /* Miscellaneous IR ops. */
96 LJ_FUNC
int lj_ir_numcmp(lua_Number a
, lua_Number b
, IROp op
);
97 LJ_FUNC
int lj_ir_strcmp(GCstr
*a
, GCstr
*b
, IROp op
);
98 LJ_FUNC
void lj_ir_rollback(jit_State
*J
, IRRef ref
);
100 /* Emit IR instructions with on-the-fly optimizations. */
101 LJ_FUNC TRef LJ_FASTCALL
lj_opt_fold(jit_State
*J
);
102 LJ_FUNC TRef LJ_FASTCALL
lj_opt_cse(jit_State
*J
);
103 LJ_FUNC TRef LJ_FASTCALL
lj_opt_cselim(jit_State
*J
, IRRef lim
);
105 /* Special return values for the fold functions. */
107 NEXTFOLD
, /* Couldn't fold, pass on. */
108 RETRYFOLD
, /* Retry fold with modified fins. */
109 KINTFOLD
, /* Return ref for int constant in fins->i. */
110 FAILFOLD
, /* Guard would always fail. */
111 DROPFOLD
, /* Guard eliminated. */
115 #define INTFOLD(k) ((J->fold.ins.i = (k)), (TRef)KINTFOLD)
116 #define INT64FOLD(k) (lj_ir_kint64(J, (k)))
117 #define CONDFOLD(cond) ((TRef)FAILFOLD + (TRef)(cond))
118 #define LEFTFOLD (J->fold.ins.op1)
119 #define RIGHTFOLD (J->fold.ins.op2)
120 #define CSEFOLD (lj_opt_cse(J))
121 #define EMITFOLD (lj_ir_emit(J))
123 /* Load/store forwarding. */
124 LJ_FUNC TRef LJ_FASTCALL
lj_opt_fwd_aload(jit_State
*J
);
125 LJ_FUNC TRef LJ_FASTCALL
lj_opt_fwd_hload(jit_State
*J
);
126 LJ_FUNC TRef LJ_FASTCALL
lj_opt_fwd_uload(jit_State
*J
);
127 LJ_FUNC TRef LJ_FASTCALL
lj_opt_fwd_fload(jit_State
*J
);
128 LJ_FUNC TRef LJ_FASTCALL
lj_opt_fwd_xload(jit_State
*J
);
129 LJ_FUNC TRef LJ_FASTCALL
lj_opt_fwd_alen(jit_State
*J
);
130 LJ_FUNC TRef LJ_FASTCALL
lj_opt_fwd_hrefk(jit_State
*J
);
131 LJ_FUNC
int LJ_FASTCALL
lj_opt_fwd_href_nokey(jit_State
*J
);
132 LJ_FUNC
int LJ_FASTCALL
lj_opt_fwd_tptr(jit_State
*J
, IRRef lim
);
133 LJ_FUNC
int LJ_FASTCALL
lj_opt_fwd_sbuf(jit_State
*J
, IRRef lim
);
134 LJ_FUNC
int lj_opt_fwd_wasnonnil(jit_State
*J
, IROpT loadop
, IRRef xref
);
136 /* Dead-store elimination. */
137 LJ_FUNC TRef LJ_FASTCALL
lj_opt_dse_ahstore(jit_State
*J
);
138 LJ_FUNC TRef LJ_FASTCALL
lj_opt_dse_ustore(jit_State
*J
);
139 LJ_FUNC TRef LJ_FASTCALL
lj_opt_dse_fstore(jit_State
*J
);
140 LJ_FUNC TRef LJ_FASTCALL
lj_opt_dse_xstore(jit_State
*J
);
143 LJ_FUNC TRef LJ_FASTCALL
lj_opt_narrow_convert(jit_State
*J
);
144 LJ_FUNC TRef LJ_FASTCALL
lj_opt_narrow_index(jit_State
*J
, TRef key
);
145 LJ_FUNC TRef LJ_FASTCALL
lj_opt_narrow_toint(jit_State
*J
, TRef tr
);
146 LJ_FUNC TRef LJ_FASTCALL
lj_opt_narrow_tobit(jit_State
*J
, TRef tr
);
148 LJ_FUNC TRef LJ_FASTCALL
lj_opt_narrow_cindex(jit_State
*J
, TRef key
);
150 LJ_FUNC TRef
lj_opt_narrow_arith(jit_State
*J
, TRef rb
, TRef rc
,
151 TValue
*vb
, TValue
*vc
, IROp op
);
152 LJ_FUNC TRef
lj_opt_narrow_unm(jit_State
*J
, TRef rc
, TValue
*vc
);
153 LJ_FUNC TRef
lj_opt_narrow_mod(jit_State
*J
, TRef rb
, TRef rc
, TValue
*vb
, TValue
*vc
);
154 LJ_FUNC IRType
lj_opt_narrow_forl(jit_State
*J
, cTValue
*forbase
);
156 /* Optimization passes. */
157 LJ_FUNC
void lj_opt_dce(jit_State
*J
);
158 LJ_FUNC
int lj_opt_loop(jit_State
*J
);
159 #if LJ_SOFTFP32 || (LJ_32 && LJ_HASFFI)
160 LJ_FUNC
void lj_opt_split(jit_State
*J
);
162 #define lj_opt_split(J) UNUSED(J)
164 LJ_FUNC
void lj_opt_sink(jit_State
*J
);