2 ** Common header for IR emitter and optimizations.
3 ** Copyright (C) 2005-2012 Mike Pall. See Copyright Notice in luajit.h
16 LJ_FUNC
void LJ_FASTCALL
lj_ir_growtop(jit_State
*J
);
17 LJ_FUNC TRef LJ_FASTCALL
lj_ir_emit(jit_State
*J
);
19 /* Save current IR in J->fold.ins, but do not emit it (yet). */
20 static LJ_AINLINE
void lj_ir_set_(jit_State
*J
, uint16_t ot
, IRRef1 a
, IRRef1 b
)
22 J
->fold
.ins
.ot
= ot
; J
->fold
.ins
.op1
= a
; J
->fold
.ins
.op2
= b
;
25 #define lj_ir_set(J, ot, a, b) \
26 lj_ir_set_(J, (uint16_t)(ot), (IRRef1)(a), (IRRef1)(b))
28 /* Get ref of next IR instruction and optionally grow IR.
29 ** Note: this may invalidate all IRIns*!
31 static LJ_AINLINE IRRef
lj_ir_nextins(jit_State
*J
)
33 IRRef ref
= J
->cur
.nins
;
34 if (LJ_UNLIKELY(ref
>= J
->irtoplim
)) lj_ir_growtop(J
);
35 J
->cur
.nins
= ref
+ 1;
39 /* Interning of constants. */
40 LJ_FUNC TRef LJ_FASTCALL
lj_ir_kint(jit_State
*J
, int32_t k
);
41 LJ_FUNC
void lj_ir_k64_freeall(jit_State
*J
);
42 LJ_FUNC TRef
lj_ir_k64(jit_State
*J
, IROp op
, cTValue
*tv
);
43 LJ_FUNC cTValue
*lj_ir_k64_find(jit_State
*J
, uint64_t u64
);
44 LJ_FUNC TRef
lj_ir_knum_u64(jit_State
*J
, uint64_t u64
);
45 LJ_FUNC TRef
lj_ir_knumint(jit_State
*J
, lua_Number n
);
46 LJ_FUNC TRef
lj_ir_kint64(jit_State
*J
, uint64_t u64
);
47 LJ_FUNC TRef
lj_ir_kgc(jit_State
*J
, GCobj
*o
, IRType t
);
48 LJ_FUNC TRef
lj_ir_kptr_(jit_State
*J
, IROp op
, void *ptr
);
49 LJ_FUNC TRef
lj_ir_knull(jit_State
*J
, IRType t
);
50 LJ_FUNC TRef
lj_ir_kslot(jit_State
*J
, TRef key
, IRRef slot
);
53 #define lj_ir_kintp(J, k) lj_ir_kint64(J, (uint64_t)(k))
55 #define lj_ir_kintp(J, k) lj_ir_kint(J, (int32_t)(k))
58 static LJ_AINLINE TRef
lj_ir_knum(jit_State
*J
, lua_Number n
)
62 return lj_ir_knum_u64(J
, tv
.u64
);
65 #define lj_ir_kstr(J, str) lj_ir_kgc(J, obj2gco((str)), IRT_STR)
66 #define lj_ir_ktab(J, tab) lj_ir_kgc(J, obj2gco((tab)), IRT_TAB)
67 #define lj_ir_kfunc(J, func) lj_ir_kgc(J, obj2gco((func)), IRT_FUNC)
68 #define lj_ir_kptr(J, ptr) lj_ir_kptr_(J, IR_KPTR, (ptr))
69 #define lj_ir_kkptr(J, ptr) lj_ir_kptr_(J, IR_KKPTR, (ptr))
71 /* Special FP constants. */
72 #define lj_ir_knum_zero(J) lj_ir_knum_u64(J, U64x(00000000,00000000))
73 #define lj_ir_knum_one(J) lj_ir_knum_u64(J, U64x(3ff00000,00000000))
74 #define lj_ir_knum_tobit(J) lj_ir_knum_u64(J, U64x(43380000,00000000))
76 /* Special 128 bit SIMD constants. */
77 #define lj_ir_knum_abs(J) lj_ir_k64(J, IR_KNUM, LJ_KSIMD(J, LJ_KSIMD_ABS))
78 #define lj_ir_knum_neg(J) lj_ir_k64(J, IR_KNUM, LJ_KSIMD(J, LJ_KSIMD_NEG))
80 /* Access to constants. */
81 LJ_FUNC
void lj_ir_kvalue(lua_State
*L
, TValue
*tv
, const IRIns
*ir
);
83 /* Convert IR operand types. */
84 LJ_FUNC TRef LJ_FASTCALL
lj_ir_tonumber(jit_State
*J
, TRef tr
);
85 LJ_FUNC TRef LJ_FASTCALL
lj_ir_tonum(jit_State
*J
, TRef tr
);
86 LJ_FUNC TRef LJ_FASTCALL
lj_ir_tostr(jit_State
*J
, TRef tr
);
88 /* Miscellaneous IR ops. */
89 LJ_FUNC
int lj_ir_numcmp(lua_Number a
, lua_Number b
, IROp op
);
90 LJ_FUNC
int lj_ir_strcmp(GCstr
*a
, GCstr
*b
, IROp op
);
91 LJ_FUNC
void lj_ir_rollback(jit_State
*J
, IRRef ref
);
93 /* Emit IR instructions with on-the-fly optimizations. */
94 LJ_FUNC TRef LJ_FASTCALL
lj_opt_fold(jit_State
*J
);
95 LJ_FUNC TRef LJ_FASTCALL
lj_opt_cse(jit_State
*J
);
96 LJ_FUNC TRef LJ_FASTCALL
lj_opt_cselim(jit_State
*J
, IRRef lim
);
98 /* Special return values for the fold functions. */
100 NEXTFOLD
, /* Couldn't fold, pass on. */
101 RETRYFOLD
, /* Retry fold with modified fins. */
102 KINTFOLD
, /* Return ref for int constant in fins->i. */
103 FAILFOLD
, /* Guard would always fail. */
104 DROPFOLD
, /* Guard eliminated. */
108 #define INTFOLD(k) ((J->fold.ins.i = (k)), (TRef)KINTFOLD)
109 #define INT64FOLD(k) (lj_ir_kint64(J, (k)))
110 #define CONDFOLD(cond) ((TRef)FAILFOLD + (TRef)(cond))
111 #define LEFTFOLD (J->fold.ins.op1)
112 #define RIGHTFOLD (J->fold.ins.op2)
113 #define CSEFOLD (lj_opt_cse(J))
114 #define EMITFOLD (lj_ir_emit(J))
116 /* Load/store forwarding. */
117 LJ_FUNC TRef LJ_FASTCALL
lj_opt_fwd_aload(jit_State
*J
);
118 LJ_FUNC TRef LJ_FASTCALL
lj_opt_fwd_hload(jit_State
*J
);
119 LJ_FUNC TRef LJ_FASTCALL
lj_opt_fwd_uload(jit_State
*J
);
120 LJ_FUNC TRef LJ_FASTCALL
lj_opt_fwd_fload(jit_State
*J
);
121 LJ_FUNC TRef LJ_FASTCALL
lj_opt_fwd_xload(jit_State
*J
);
122 LJ_FUNC TRef LJ_FASTCALL
lj_opt_fwd_tab_len(jit_State
*J
);
123 LJ_FUNC
int LJ_FASTCALL
lj_opt_fwd_href_nokey(jit_State
*J
);
124 LJ_FUNC
int LJ_FASTCALL
lj_opt_fwd_tptr(jit_State
*J
, IRRef lim
);
125 LJ_FUNC
int lj_opt_fwd_wasnonnil(jit_State
*J
, IROpT loadop
, IRRef xref
);
127 /* Dead-store elimination. */
128 LJ_FUNC TRef LJ_FASTCALL
lj_opt_dse_ahstore(jit_State
*J
);
129 LJ_FUNC TRef LJ_FASTCALL
lj_opt_dse_ustore(jit_State
*J
);
130 LJ_FUNC TRef LJ_FASTCALL
lj_opt_dse_fstore(jit_State
*J
);
131 LJ_FUNC TRef LJ_FASTCALL
lj_opt_dse_xstore(jit_State
*J
);
134 LJ_FUNC TRef LJ_FASTCALL
lj_opt_narrow_convert(jit_State
*J
);
135 LJ_FUNC TRef LJ_FASTCALL
lj_opt_narrow_index(jit_State
*J
, TRef key
);
136 LJ_FUNC TRef LJ_FASTCALL
lj_opt_narrow_toint(jit_State
*J
, TRef tr
);
137 LJ_FUNC TRef LJ_FASTCALL
lj_opt_narrow_tobit(jit_State
*J
, TRef tr
);
139 LJ_FUNC TRef LJ_FASTCALL
lj_opt_narrow_cindex(jit_State
*J
, TRef key
);
141 LJ_FUNC TRef
lj_opt_narrow_arith(jit_State
*J
, TRef rb
, TRef rc
,
142 TValue
*vb
, TValue
*vc
, IROp op
);
143 LJ_FUNC TRef
lj_opt_narrow_unm(jit_State
*J
, TRef rc
, TValue
*vc
);
144 LJ_FUNC TRef
lj_opt_narrow_mod(jit_State
*J
, TRef rb
, TRef rc
, TValue
*vc
);
145 LJ_FUNC TRef
lj_opt_narrow_pow(jit_State
*J
, TRef rb
, TRef rc
, TValue
*vc
);
146 LJ_FUNC IRType
lj_opt_narrow_forl(jit_State
*J
, cTValue
*forbase
);
148 /* Optimization passes. */
149 LJ_FUNC
void lj_opt_dce(jit_State
*J
);
150 LJ_FUNC
int lj_opt_loop(jit_State
*J
);
151 #if LJ_SOFTFP || (LJ_32 && LJ_HASFFI)
152 LJ_FUNC
void lj_opt_split(jit_State
*J
);
154 #define lj_opt_split(J) UNUSED(J)