2 +----------------------------------------------------------------------+
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-2014 Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
17 #ifndef incl_HPHP_VM_CG_H_
18 #define incl_HPHP_VM_CG_H_
21 #include "hphp/runtime/vm/jit/ir.h"
22 #include "hphp/runtime/vm/jit/ir-unit.h"
23 #include "hphp/runtime/vm/jit/reg-alloc.h"
24 #include "hphp/runtime/base/rds.h"
25 #include "hphp/runtime/vm/jit/arg-group.h"
26 #include "hphp/runtime/vm/jit/code-gen-helpers.h"
27 #include "hphp/runtime/vm/jit/translator-x64.h"
28 #include "hphp/runtime/vm/jit/state-vector.h"
30 namespace HPHP
{ namespace JIT
{
32 enum class SyncOptions
{
36 kSmashableAndSyncPoint
,
39 // Returned information from cgCallHelper
40 struct CallHelperInfo
{
44 // Information about where code was generated, for pretty-printing.
46 explicit AsmInfo(const IRUnit
& unit
)
47 : instRanges(unit
, TcaRange(nullptr, nullptr))
48 , asmRanges(unit
, TcaRange(nullptr, nullptr))
49 , astubRanges(unit
, TcaRange(nullptr, nullptr))
52 // Asm address info for each instruction and block
53 StateVector
<IRInstruction
,TcaRange
> instRanges
;
54 StateVector
<Block
,TcaRange
> asmRanges
;
55 StateVector
<Block
,TcaRange
> astubRanges
;
57 void updateForInstruction(IRInstruction
* inst
, TCA start
, TCA end
);
60 typedef StateVector
<IRInstruction
, RegSet
> LiveRegs
;
62 // Stuff we need to preserve between blocks while generating code,
63 // and address information produced during codegen.
65 CodegenState(const IRUnit
& unit
, const RegAllocInfo
& regs
,
66 const LiveRegs
& liveRegs
, AsmInfo
* asmInfo
)
67 : patches(unit
, nullptr)
68 , addresses(unit
, nullptr)
72 , catches(unit
, CatchInfo())
75 // Each block has a list of addresses to patch, and an address if
76 // it's already been emitted.
77 StateVector
<Block
,void*> patches
;
78 StateVector
<Block
,TCA
> addresses
;
80 // True if this block's terminal Jmp has a desination equal to the
81 // next block in the same assmbler.
84 // output from register allocator
85 const RegAllocInfo
& regs
;
87 // for each instruction, holds the RegSet of registers that must be
88 // preserved across that instruction. This is for push/pop of caller-saved
90 const LiveRegs
& liveRegs
;
92 // Output: start/end ranges of machine code addresses of each instruction.
95 // Used to pass information about the state of the world at native
96 // calls between cgCallHelper and cgBeginCatch.
97 StateVector
<Block
, CatchInfo
> catches
;
100 constexpr Reg64
rCgGP (reg::r11
);
101 constexpr RegXMM
rCgXMM0(reg::xmm0
);
102 constexpr RegXMM
rCgXMM1(reg::xmm1
);
104 struct CodeGenerator
{
105 typedef JIT::X64Assembler Asm
;
107 CodeGenerator(const IRUnit
& unit
, CodeBlock
& mainCode
, CodeBlock
& stubsCode
,
108 JIT::TranslatorX64
* tx64
, CodegenState
& state
)
110 , m_mainCode(mainCode
)
111 , m_stubsCode(stubsCode
)
113 , m_astubs(stubsCode
)
116 , m_rScratch(InvalidReg
)
121 void cgBlock(Block
* block
, std::vector
<TransBCMapping
>* bcMap
);
124 Address
cgInst(IRInstruction
* inst
);
126 const PhysLoc
srcLoc(unsigned i
) const {
127 return (*m_instRegs
).src(i
);
129 const PhysLoc
dstLoc(unsigned i
) const {
130 return (*m_instRegs
).dst(i
);
132 ArgGroup
argGroup() const {
133 return ArgGroup(m_curInst
, *m_instRegs
);
136 // Autogenerate function declarations for each IR instruction in ir.h
137 #define O(name, dsts, srcs, flags) void cg##name(IRInstruction* inst);
141 void cgCallNative(Asm
& a
, IRInstruction
* inst
);
143 CallDest
callDest(PhysReg reg0
, PhysReg reg1
= InvalidReg
) const;
144 CallDest
callDest(const IRInstruction
*) const;
145 CallDest
callDestTV(const IRInstruction
*) const;
146 CallDest
callDest2(const IRInstruction
*) const;
149 CallHelperInfo
cgCallHelper(Asm
& a
,
151 const CallDest
& dstInfo
,
155 // Overload to make the toSave RegSet optional:
156 CallHelperInfo
cgCallHelper(Asm
& a
,
158 const CallDest
& dstInfo
,
161 void cgInterpOneCommon(IRInstruction
* inst
);
163 enum class Width
{ Value
, Full
};
164 template<class MemRef
>
165 void cgStore(MemRef dst
, SSATmp
* src
, PhysLoc src_loc
, Width
);
166 template<class MemRef
>
167 void cgStoreTypedValue(MemRef dst
, SSATmp
* src
, PhysLoc src_loc
);
169 // helpers to load a value in dst. When label is not null a type check
170 // is performed against value to ensure it is of the type expected by dst
171 template<class BaseRef
>
172 void cgLoad(SSATmp
* dst
, PhysLoc dstLoc
, BaseRef value
,
173 Block
* label
= nullptr);
174 template<class BaseRef
>
175 void cgLoadTypedValue(SSATmp
* dst
, PhysLoc dstLoc
, BaseRef base
,
176 Block
* label
= nullptr);
178 // internal helpers to manage register conflicts from a source to a PhysReg
180 // If the conflict cannot be resolved the out param isResolved is set to
181 // false and the caller should take proper action
182 IndexedMemoryRef
resolveRegCollision(PhysReg dst
,
183 IndexedMemoryRef value
,
185 MemoryRef
resolveRegCollision(PhysReg dst
,
189 template<class Loc1
, class Loc2
, class JmpFn
>
190 void emitTypeTest(Type type
, Loc1 typeSrc
, Loc2 dataSrc
, JmpFn doJcc
,
191 OptType prevType
= folly::none
);
193 template<class DataLoc
, class JmpFn
>
194 void emitSpecializedTypeTest(Type type
, DataLoc data
, JmpFn doJcc
);
197 void emitTypeCheck(Type type
, Loc typeSrc
, Loc dataSrc
, Block
* taken
,
198 OptType prevType
= folly::none
);
200 void emitTypeGuard(Type type
, Loc typeLoc
, Loc dataLoc
);
202 void cgIncRefWork(Type type
, SSATmp
* src
, PhysLoc srcLoc
);
203 void cgDecRefWork(IRInstruction
* inst
, bool genZeroCheck
);
205 template<class OpInstr
, class Oper
>
206 void cgUnaryIntOp(PhysLoc dst
, SSATmp
* src
, PhysLoc src_loc
, OpInstr
, Oper
);
208 enum Commutativity
{ Commutative
, NonCommutative
};
210 void cgRoundCommon(IRInstruction
* inst
, RoundDirection dir
);
212 template<class Oper
, class RegType
>
213 void cgBinaryIntOp(IRInstruction
*,
214 void (Asm::*intImm
)(Immed
, RegType
),
215 void (Asm::*intRR
)(RegType
, RegType
),
216 void (Asm::*mov
)(RegType
, RegType
),
218 RegType (*conv
)(PhysReg
),
220 void cgBinaryDblOp(IRInstruction
*,
221 void (Asm::*fpRR
)(RegXMM
, RegXMM
));
224 void cgShiftCommon(IRInstruction
* inst
,
225 void (Asm::*instrIR
)(Immed
, Reg64
),
226 void (Asm::*instrR
)(Reg64
),
229 void cgVerifyClsWork(IRInstruction
* inst
);
231 void emitGetCtxFwdCallWithThis(PhysReg ctxReg
,
234 void emitGetCtxFwdCallWithThisDyn(PhysReg destCtxReg
,
238 void cgJcc(IRInstruction
* inst
); // helper
239 void cgReqBindJcc(IRInstruction
* inst
); // helper
240 void cgExitJcc(IRInstruction
* inst
); // helper
241 void cgJccInt(IRInstruction
* inst
); // helper
242 void cgReqBindJccInt(IRInstruction
* inst
); // helper
243 void cgExitJccInt(IRInstruction
* inst
); // helper
244 void emitCmpInt(IRInstruction
* inst
, ConditionCode
);
245 void cgCmpHelper(IRInstruction
* inst
,
246 void (Asm::*setter
)(Reg8
),
247 int64_t (*str_cmp_str
)(StringData
*, StringData
*),
248 int64_t (*str_cmp_int
)(StringData
*, int64_t),
249 int64_t (*str_cmp_obj
)(StringData
*, ObjectData
*),
250 int64_t (*obj_cmp_obj
)(ObjectData
*, ObjectData
*),
251 int64_t (*obj_cmp_int
)(ObjectData
*, int64_t),
252 int64_t (*arr_cmp_arr
)(ArrayData
*, ArrayData
*));
255 void emitSideExitGuard(Type type
, Loc typeLoc
,
256 Loc dataLoc
, Offset taken
,
257 OptType prevType
= folly::none
);
258 void emitReqBindJcc(ConditionCode cc
, const ReqBindJccData
*);
260 void emitCompare(IRInstruction
* inst
);
261 void emitCompareInt(IRInstruction
* inst
);
262 void emitTestZero(SSATmp
*, PhysLoc
);
263 bool emitIncDecHelper(PhysLoc dst
, SSATmp
* src1
, PhysLoc loc1
,
264 SSATmp
* src2
, PhysLoc loc2
,
265 void(Asm::*emitFunc
)(Reg64
));
268 PhysReg
selectScratchReg(IRInstruction
* inst
);
269 void emitLoadImm(Asm
& as
, int64_t val
, PhysReg dstReg
);
270 PhysReg
prepXMMReg(const SSATmp
* tmp
,
274 void emitSetCc(IRInstruction
*, ConditionCode
);
275 template<class JmpFn
>
276 void emitIsTypeTest(IRInstruction
* inst
, JmpFn doJcc
);
277 void doubleCmp(Asm
& a
, RegXMM xmmReg0
, RegXMM xmmReg1
);
278 void cgIsTypeCommon(IRInstruction
* inst
, bool negate
);
279 void cgJmpIsTypeCommon(IRInstruction
* inst
, bool negate
);
280 void cgIsTypeMemCommon(IRInstruction
*, bool negate
);
281 void emitInstanceBitmaskCheck(IRInstruction
*);
282 void emitTraceRet(Asm
& as
);
283 void emitInitObjProps(PhysReg dstReg
, const Class
* cls
, size_t nProps
);
285 template <typename F
>
286 Address
cgCheckStaticBitAndDecRef(Type type
,
289 Address
cgCheckStaticBitAndDecRef(Type type
,
291 Address
cgCheckRefCountedType(PhysReg typeReg
);
292 Address
cgCheckRefCountedType(PhysReg baseReg
,
294 void cgDecRefStaticType(Type type
,
297 void cgDecRefDynamicType(PhysReg typeReg
,
300 void cgDecRefDynamicTypeMem(PhysReg baseReg
,
302 void cgDecRefMem(Type type
,
306 void cgIterNextCommon(IRInstruction
* inst
);
307 void cgIterInitCommon(IRInstruction
* inst
);
308 void cgMIterNextCommon(IRInstruction
* inst
);
309 void cgMIterInitCommon(IRInstruction
* inst
);
310 void cgLdFuncCachedCommon(IRInstruction
* inst
);
311 void cgLookupCnsCommon(IRInstruction
* inst
);
312 RDS::Handle
cgLdClsCachedCommon(IRInstruction
* inst
);
313 void emitFwdJcc(ConditionCode cc
, Block
* target
);
314 void emitFwdJcc(Asm
& a
, ConditionCode cc
, Block
* target
);
315 const Func
* curFunc() const;
316 Class
* curClass() const { return curFunc()->cls(); }
317 const Unit
* curUnit() const { return curFunc()->unit(); }
318 void recordSyncPoint(Asm
& as
, SyncOptions sync
= SyncOptions::kSyncPoint
);
319 int iterOffset(SSATmp
* tmp
) { return iterOffset(tmp
->getValInt()); }
320 int iterOffset(uint32_t id
);
321 void emitReqBindAddr(const Func
* func
, TCA
& dest
, Offset offset
);
323 void emitAdjustSp(PhysReg spReg
, PhysReg dstReg
, int64_t adjustment
);
324 void emitConvBoolOrIntToDbl(IRInstruction
* inst
);
325 void cgLdClsMethodCacheCommon(IRInstruction
* inst
, Offset offset
);
328 * Generate an if-block that branches around some unlikely code, handling
329 * the cases when a == astubs and a != astubs. cc is the branch condition
330 * to run the unlikely block.
332 * Passes the proper assembler to use to the unlikely function.
334 template <class Block
>
335 void unlikelyIfBlock(ConditionCode cc
, Block unlikely
) {
336 if (m_as
.base() == m_astubs
.base()) {
338 m_as
.jcc(ccNegate(cc
), done
);
340 asm_label(m_as
, done
);
342 Label unlikelyLabel
, done
;
343 m_as
.jcc(cc
, unlikelyLabel
);
344 asm_label(m_astubs
, unlikelyLabel
);
347 asm_label(m_as
, done
);
351 template <class Then
>
352 void ifBlock(ConditionCode cc
, Then thenBlock
) {
354 m_as
.jcc8(ccNegate(cc
), done
);
356 asm_label(m_as
, done
);
359 // Generate an if-then-else block
360 template <class Then
, class Else
>
361 void ifThenElse(Asm
& a
, ConditionCode cc
, Then thenBlock
, Else elseBlock
) {
362 Label elseLabel
, done
;
363 a
.jcc8(ccNegate(cc
), elseLabel
);
366 asm_label(a
, elseLabel
);
371 // Generate an if-then-else block into m_as.
372 template <class Then
, class Else
>
373 void ifThenElse(ConditionCode cc
, Then thenBlock
, Else elseBlock
) {
374 ifThenElse(m_as
, cc
, thenBlock
, elseBlock
);
378 * Same as ifThenElse except the first block is off in astubs
380 template <class Then
, class Else
>
381 void unlikelyIfThenElse(ConditionCode cc
, Then unlikely
, Else elseBlock
) {
382 if (m_as
.base() == m_astubs
.base()) {
383 Label elseLabel
, done
;
384 m_as
.jcc8(ccNegate(cc
), elseLabel
);
387 asm_label(m_as
, elseLabel
);
389 asm_label(m_as
, done
);
391 Label unlikelyLabel
, done
;
392 m_as
.jcc(cc
, unlikelyLabel
);
394 asm_label(m_astubs
, unlikelyLabel
);
397 asm_label(m_as
, done
);
401 // This is for printing partially-generated traces when debugging
405 const IRUnit
& m_unit
;
406 CodeBlock
& m_mainCode
;
407 CodeBlock
& m_stubsCode
;
408 Asm m_as
; // current "main" assembler
409 Asm m_astubs
; // for stubs and other cold code
410 TranslatorX64
* m_tx64
;
411 CodegenState
& m_state
;
412 Reg64 m_rScratch
; // currently selected GP scratch reg
413 IRInstruction
* m_curInst
; // current instruction being generated
414 const RegAllocInfo::RegMap
* m_instRegs
; // registers for current m_curInst.
417 const Func
* loadClassCtor(Class
* cls
);
419 ObjectData
* createClHelper(Class
*, int, ActRec
*, TypedValue
*);
421 void genCode(CodeBlock
& mainCode
,
422 CodeBlock
& stubsCode
,
424 std::vector
<TransBCMapping
>* bcMap
,
426 const RegAllocInfo
& regs
);
428 // Helpers to compute a reference to a TypedValue type and data
429 inline MemoryRef
refTVType(PhysReg reg
) {
430 return reg
[TVOFF(m_type
)];
433 inline MemoryRef
refTVData(PhysReg reg
) {
434 return reg
[TVOFF(m_data
)];
437 inline MemoryRef
refTVType(MemoryRef ref
) {
438 return *(ref
.r
+ TVOFF(m_type
));
441 inline MemoryRef
refTVData(MemoryRef ref
) {
442 return *(ref
.r
+ TVOFF(m_data
));
445 inline IndexedMemoryRef
refTVType(IndexedMemoryRef ref
) {
446 return *(ref
.r
+ TVOFF(m_type
));
449 inline IndexedMemoryRef
refTVData(IndexedMemoryRef ref
) {
450 return *(ref
.r
+ TVOFF(m_data
));