Remove JSOP_BEGIN and fix tracer integration issues (bug 603044, r=luke+dmandelin).
[mozilla-central.git] / js / src / methodjit / Compiler.cpp
blob23fba7995c3913885f9c0fe16b9afa6881bde40f
1 /* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=4 sw=4 et tw=99:
4 * ***** BEGIN LICENSE BLOCK *****
5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
7 * The contents of this file are subject to the Mozilla Public License Version
8 * 1.1 (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
10 * http://www.mozilla.org/MPL/
12 * Software distributed under the License is distributed on an "AS IS" basis,
13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 * for the specific language governing rights and limitations under the
15 * License.
17 * The Original Code is Mozilla SpiderMonkey JavaScript 1.9 code, released
18 * May 28, 2008.
20 * The Initial Developer of the Original Code is
21 * Brendan Eich <brendan@mozilla.org>
23 * Contributor(s):
24 * David Anderson <danderson@mozilla.com>
25 * David Mandelin <dmandelin@mozilla.com>
27 * Alternatively, the contents of this file may be used under the terms of
28 * either of the GNU General Public License Version 2 or later (the "GPL"),
29 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
30 * in which case the provisions of the GPL or the LGPL are applicable instead
31 * of those above. If you wish to allow use of your version of this file only
32 * under the terms of either the GPL or the LGPL, and not to allow others to
33 * use your version of this file under the terms of the MPL, indicate your
34 * decision by deleting the provisions above and replace them with the notice
35 * and other provisions required by the GPL or the LGPL. If you do not delete
36 * the provisions above, a recipient may use your version of this file under
37 * the terms of any one of the MPL, the GPL or the LGPL.
39 * ***** END LICENSE BLOCK ***** */
41 #include "MethodJIT.h"
42 #include "jsnum.h"
43 #include "jsbool.h"
44 #include "jsiter.h"
45 #include "Compiler.h"
46 #include "StubCalls.h"
47 #include "MonoIC.h"
48 #include "PolyIC.h"
49 #include "Retcon.h"
50 #include "assembler/jit/ExecutableAllocator.h"
51 #include "assembler/assembler/LinkBuffer.h"
52 #include "FrameState-inl.h"
53 #include "jsobjinlines.h"
54 #include "jsscriptinlines.h"
55 #include "InlineFrameAssembler.h"
56 #include "jscompartment.h"
57 #include "jsobjinlines.h"
58 #include "jsopcodeinlines.h"
60 #include "jsautooplen.h"
62 using namespace js;
63 using namespace js::mjit;
64 #if defined JS_POLYIC
65 using namespace js::mjit::ic;
66 #endif
68 #define ADD_CALLSITE(stub) if (debugMode) addCallSite(__LINE__, (stub))
70 #if defined(JS_METHODJIT_SPEW)
71 static const char *OpcodeNames[] = {
72 # define OPDEF(op,val,name,token,length,nuses,ndefs,prec,format) #name,
73 # include "jsopcode.tbl"
74 # undef OPDEF
76 #endif
78 mjit::Compiler::Compiler(JSContext *cx, JSStackFrame *fp)
79 : BaseCompiler(cx),
80 fp(fp),
81 script(fp->script()),
82 scopeChain(&fp->scopeChain()),
83 globalObj(scopeChain->getGlobal()),
84 fun(fp->isFunctionFrame() && !fp->isEvalFrame()
85 ? fp->fun()
86 : NULL),
87 isConstructing(fp->isConstructing()),
88 analysis(cx, script), jumpMap(NULL), frame(cx, script, masm),
89 branchPatches(ContextAllocPolicy(cx)),
90 #if defined JS_MONOIC
91 mics(ContextAllocPolicy(cx)),
92 callICs(ContextAllocPolicy(cx)),
93 #endif
94 #if defined JS_POLYIC
95 pics(ContextAllocPolicy(cx)),
96 #endif
97 callPatches(ContextAllocPolicy(cx)),
98 callSites(ContextAllocPolicy(cx)),
99 doubleList(ContextAllocPolicy(cx)),
100 stubcc(cx, *this, frame, script),
101 debugMode(cx->compartment->debugMode)
102 #if defined JS_TRACER
103 ,addTraceHints(cx->traceJitEnabled)
104 #endif
108 CompileStatus
109 mjit::Compiler::compile()
111 JS_ASSERT(!script->isEmpty());
112 JS_ASSERT_IF(isConstructing, !script->jitCtor);
113 JS_ASSERT_IF(!isConstructing, !script->jitNormal);
115 JITScript **jit = isConstructing ? &script->jitCtor : &script->jitNormal;
116 void **checkAddr = isConstructing
117 ? &script->jitArityCheckCtor
118 : &script->jitArityCheckNormal;
120 CompileStatus status = performCompilation(jit);
121 if (status == Compile_Okay) {
122 // Global scripts don't have an arity check entry. That's okay, we
123 // just need a pointer so the VM can quickly decide whether this
124 // method can be JIT'd or not. Global scripts cannot be IC'd, since
125 // they have no functions, so there is no danger.
126 *checkAddr = (*jit)->arityCheckEntry
127 ? (*jit)->arityCheckEntry
128 : (*jit)->invokeEntry;
129 } else {
130 *checkAddr = JS_UNJITTABLE_SCRIPT;
133 return status;
136 #define CHECK_STATUS(expr) \
137 JS_BEGIN_MACRO \
138 CompileStatus status_ = (expr); \
139 if (status_ != Compile_Okay) \
140 return status_; \
141 JS_END_MACRO
143 CompileStatus
144 mjit::Compiler::performCompilation(JITScript **jitp)
146 JaegerSpew(JSpew_Scripts, "compiling script (file \"%s\") (line \"%d\") (length \"%d\")\n",
147 script->filename, script->lineno, script->length);
149 /* Perform bytecode analysis. */
150 if (!analysis.analyze()) {
151 if (analysis.OOM())
152 return Compile_Error;
153 JaegerSpew(JSpew_Abort, "couldn't analyze bytecode; probably switchX or OOM\n");
154 return Compile_Abort;
157 uint32 nargs = fun ? fun->nargs : 0;
158 if (!frame.init(nargs) || !stubcc.init(nargs))
159 return Compile_Abort;
161 jumpMap = (Label *)cx->malloc(sizeof(Label) * script->length);
162 if (!jumpMap)
163 return Compile_Error;
164 #ifdef DEBUG
165 for (uint32 i = 0; i < script->length; i++)
166 jumpMap[i] = Label();
167 #endif
169 #ifdef JS_METHODJIT_SPEW
170 Profiler prof;
171 prof.start();
172 #endif
174 /* Initialize PC early so stub calls in the prologue can be fallible. */
175 PC = script->code;
177 #ifdef JS_METHODJIT
178 script->debugMode = debugMode;
179 #endif
181 for (uint32 i = 0; i < script->nClosedVars; i++)
182 frame.setClosedVar(script->getClosedVar(i));
184 CHECK_STATUS(generatePrologue());
185 CHECK_STATUS(generateMethod());
186 CHECK_STATUS(generateEpilogue());
187 CHECK_STATUS(finishThisUp(jitp));
189 #ifdef JS_METHODJIT_SPEW
190 prof.stop();
191 JaegerSpew(JSpew_Prof, "compilation took %d us\n", prof.time_us());
192 #endif
194 JaegerSpew(JSpew_Scripts, "successfully compiled (code \"%p\") (size \"%ld\")\n",
195 (*jitp)->code.m_code.executableAddress(), (*jitp)->code.m_size);
197 return Compile_Okay;
200 #undef CHECK_STATUS
202 mjit::Compiler::~Compiler()
204 cx->free(jumpMap);
207 CompileStatus JS_NEVER_INLINE
208 mjit::TryCompile(JSContext *cx, JSStackFrame *fp)
210 JS_ASSERT(cx->fp() == fp);
212 Compiler cc(cx, fp);
214 return cc.compile();
217 CompileStatus
218 mjit::Compiler::generatePrologue()
220 invokeLabel = masm.label();
223 * If there is no function, then this can only be called via JaegerShot(),
224 * which expects an existing frame to be initialized like the interpreter.
226 if (fun) {
227 Jump j = masm.jump();
230 * Entry point #2: The caller has partially constructed a frame, and
231 * either argc >= nargs or the arity check has corrected the frame.
233 invokeLabel = masm.label();
235 Label fastPath = masm.label();
237 /* Store this early on so slow paths can access it. */
238 masm.storePtr(ImmPtr(fun), Address(JSFrameReg, JSStackFrame::offsetOfExec()));
242 * Entry point #3: The caller has partially constructed a frame,
243 * but argc might be != nargs, so an arity check might be called.
245 * This loops back to entry point #2.
247 arityLabel = stubcc.masm.label();
248 Jump argMatch = stubcc.masm.branch32(Assembler::Equal, JSParamReg_Argc,
249 Imm32(fun->nargs));
250 stubcc.crossJump(argMatch, fastPath);
252 if (JSParamReg_Argc != Registers::ArgReg1)
253 stubcc.masm.move(JSParamReg_Argc, Registers::ArgReg1);
255 /* Slow path - call the arity check function. Returns new fp. */
256 stubcc.masm.storePtr(ImmPtr(fun), Address(JSFrameReg, JSStackFrame::offsetOfExec()));
257 stubcc.masm.storePtr(JSFrameReg, FrameAddress(offsetof(VMFrame, regs.fp)));
258 stubcc.call(stubs::FixupArity);
259 stubcc.masm.move(Registers::ReturnReg, JSFrameReg);
260 stubcc.crossJump(stubcc.masm.jump(), fastPath);
264 * Guard that there is enough stack space. Note we include the size of
265 * a second frame, to ensure we can create a frame from call sites.
267 masm.addPtr(Imm32((script->nslots + VALUES_PER_STACK_FRAME * 2) * sizeof(Value)),
268 JSFrameReg,
269 Registers::ReturnReg);
270 Jump stackCheck = masm.branchPtr(Assembler::AboveOrEqual, Registers::ReturnReg,
271 FrameAddress(offsetof(VMFrame, stackLimit)));
273 /* If the stack check fails... */
275 stubcc.linkExitDirect(stackCheck, stubcc.masm.label());
276 stubcc.call(stubs::HitStackQuota);
277 stubcc.crossJump(stubcc.masm.jump(), masm.label());
280 /* Set locals to undefined, as in initCallFrameLatePrologue */
281 for (uint32 i = 0; i < script->nfixed; i++) {
282 Address local(JSFrameReg, sizeof(JSStackFrame) + i * sizeof(Value));
283 masm.storeValue(UndefinedValue(), local);
286 /* Create the call object. */
287 if (fun->isHeavyweight()) {
288 prepareStubCall(Uses(0));
289 stubCall(stubs::GetCallObject);
292 j.linkTo(masm.label(), &masm);
294 if (analysis.usesScopeChain() && !fun->isHeavyweight()) {
296 * Load the scope chain into the frame if necessary. The scope chain
297 * is always set for global and eval frames, and will have been set by
298 * GetCallObject for heavyweight function frames.
300 RegisterID t0 = Registers::ReturnReg;
301 Jump hasScope = masm.branchTest32(Assembler::NonZero,
302 FrameFlagsAddress(), Imm32(JSFRAME_HAS_SCOPECHAIN));
303 masm.loadPayload(Address(JSFrameReg, JSStackFrame::offsetOfCallee(fun)), t0);
304 masm.loadPtr(Address(t0, offsetof(JSObject, parent)), t0);
305 masm.storePtr(t0, Address(JSFrameReg, JSStackFrame::offsetOfScopeChain()));
306 hasScope.linkTo(masm.label(), &masm);
310 if (isConstructing)
311 constructThis();
313 if (debugMode)
314 stubCall(stubs::EnterScript);
316 return Compile_Okay;
319 CompileStatus
320 mjit::Compiler::generateEpilogue()
322 return Compile_Okay;
325 CompileStatus
326 mjit::Compiler::finishThisUp(JITScript **jitp)
328 for (size_t i = 0; i < branchPatches.length(); i++) {
329 Label label = labelOf(branchPatches[i].pc);
330 branchPatches[i].jump.linkTo(label, &masm);
333 #ifdef JS_CPU_ARM
334 masm.forceFlushConstantPool();
335 stubcc.masm.forceFlushConstantPool();
336 #endif
337 JaegerSpew(JSpew_Insns, "## Fast code (masm) size = %u, Slow code (stubcc) size = %u.\n", masm.size(), stubcc.size());
339 size_t totalSize = masm.size() +
340 stubcc.size() +
341 doubleList.length() * sizeof(double);
343 JSC::ExecutablePool *execPool = getExecPool(totalSize);
344 if (!execPool)
345 return Compile_Abort;
347 uint8 *result = (uint8 *)execPool->alloc(totalSize);
348 JSC::ExecutableAllocator::makeWritable(result, totalSize);
349 masm.executableCopy(result);
350 stubcc.masm.executableCopy(result + masm.size());
352 JSC::LinkBuffer fullCode(result, totalSize);
353 JSC::LinkBuffer stubCode(result + masm.size(), stubcc.size());
355 size_t totalBytes = sizeof(JITScript) +
356 sizeof(void *) * script->length +
357 #if defined JS_MONOIC
358 sizeof(ic::MICInfo) * mics.length() +
359 sizeof(ic::CallICInfo) * callICs.length() +
360 #endif
361 #if defined JS_POLYIC
362 sizeof(ic::PICInfo) * pics.length() +
363 #endif
364 sizeof(CallSite) * callSites.length();
366 uint8 *cursor = (uint8 *)cx->calloc(totalBytes);
367 if (!cursor) {
368 execPool->release();
369 return Compile_Error;
372 JITScript *jit = (JITScript *)cursor;
373 cursor += sizeof(JITScript);
375 jit->code = JSC::MacroAssemblerCodeRef(result, execPool, masm.size() + stubcc.size());
376 jit->nCallSites = callSites.length();
377 jit->invokeEntry = result;
379 /* Build the pc -> ncode mapping. */
380 void **nmap = (void **)cursor;
381 cursor += sizeof(void *) * script->length;
383 for (size_t i = 0; i < script->length; i++) {
384 Label L = jumpMap[i];
385 if (analysis[i].safePoint) {
386 JS_ASSERT(L.isValid());
387 nmap[i] = (uint8 *)(result + masm.distanceOf(L));
391 if (fun) {
392 jit->arityCheckEntry = stubCode.locationOf(arityLabel).executableAddress();
393 jit->fastEntry = fullCode.locationOf(invokeLabel).executableAddress();
396 #if defined JS_MONOIC
397 jit->nMICs = mics.length();
398 if (mics.length()) {
399 jit->mics = (ic::MICInfo *)cursor;
400 cursor += sizeof(ic::MICInfo) * mics.length();
401 } else {
402 jit->mics = NULL;
405 if (ic::MICInfo *scriptMICs = jit->mics) {
406 for (size_t i = 0; i < mics.length(); i++) {
407 scriptMICs[i].kind = mics[i].kind;
408 scriptMICs[i].entry = fullCode.locationOf(mics[i].entry);
409 switch (mics[i].kind) {
410 case ic::MICInfo::GET:
411 case ic::MICInfo::SET:
412 scriptMICs[i].load = fullCode.locationOf(mics[i].load);
413 scriptMICs[i].shape = fullCode.locationOf(mics[i].shape);
414 scriptMICs[i].stubCall = stubCode.locationOf(mics[i].call);
415 scriptMICs[i].stubEntry = stubCode.locationOf(mics[i].stubEntry);
416 scriptMICs[i].u.name.typeConst = mics[i].u.name.typeConst;
417 scriptMICs[i].u.name.dataConst = mics[i].u.name.dataConst;
418 #if defined JS_PUNBOX64
419 scriptMICs[i].patchValueOffset = mics[i].patchValueOffset;
420 #endif
421 break;
422 case ic::MICInfo::TRACER: {
423 uint32 offs = uint32(mics[i].jumpTarget - script->code);
424 JS_ASSERT(jumpMap[offs].isValid());
425 scriptMICs[i].traceHint = fullCode.locationOf(mics[i].traceHint);
426 scriptMICs[i].load = fullCode.locationOf(jumpMap[offs]);
427 scriptMICs[i].u.hints.hasSlowTraceHintOne = mics[i].slowTraceHintOne.isSet();
428 if (mics[i].slowTraceHintOne.isSet())
429 scriptMICs[i].slowTraceHintOne = stubCode.locationOf(mics[i].slowTraceHintOne.get());
430 scriptMICs[i].u.hints.hasSlowTraceHintTwo = mics[i].slowTraceHintTwo.isSet();
431 if (mics[i].slowTraceHintTwo.isSet())
432 scriptMICs[i].slowTraceHintTwo = stubCode.locationOf(mics[i].slowTraceHintTwo.get());
433 break;
435 default:
436 JS_NOT_REACHED("Bad MIC kind");
438 stubCode.patch(mics[i].addrLabel, &scriptMICs[i]);
442 jit->nCallICs = callICs.length();
443 if (callICs.length()) {
444 jit->callICs = (ic::CallICInfo *)cursor;
445 cursor += sizeof(ic::CallICInfo) * callICs.length();
446 } else {
447 jit->callICs = NULL;
450 if (ic::CallICInfo *cics = jit->callICs) {
451 for (size_t i = 0; i < callICs.length(); i++) {
452 cics[i].reset();
453 cics[i].funGuard = fullCode.locationOf(callICs[i].funGuard);
454 cics[i].funJump = fullCode.locationOf(callICs[i].funJump);
455 cics[i].slowPathStart = stubCode.locationOf(callICs[i].slowPathStart);
457 /* Compute the hot call offset. */
458 uint32 offset = fullCode.locationOf(callICs[i].hotJump) -
459 fullCode.locationOf(callICs[i].funGuard);
460 cics[i].hotJumpOffset = offset;
461 JS_ASSERT(cics[i].hotJumpOffset == offset);
463 /* Compute the join point offset. */
464 offset = fullCode.locationOf(callICs[i].joinPoint) -
465 fullCode.locationOf(callICs[i].funGuard);
466 cics[i].joinPointOffset = offset;
467 JS_ASSERT(cics[i].joinPointOffset == offset);
469 /* Compute the OOL call offset. */
470 offset = stubCode.locationOf(callICs[i].oolCall) -
471 stubCode.locationOf(callICs[i].slowPathStart);
472 cics[i].oolCallOffset = offset;
473 JS_ASSERT(cics[i].oolCallOffset == offset);
475 /* Compute the OOL jump offset. */
476 offset = stubCode.locationOf(callICs[i].oolJump) -
477 stubCode.locationOf(callICs[i].slowPathStart);
478 cics[i].oolJumpOffset = offset;
479 JS_ASSERT(cics[i].oolJumpOffset == offset);
481 /* Compute the slow join point offset. */
482 offset = stubCode.locationOf(callICs[i].slowJoinPoint) -
483 stubCode.locationOf(callICs[i].slowPathStart);
484 cics[i].slowJoinOffset = offset;
485 JS_ASSERT(cics[i].slowJoinOffset == offset);
487 /* Compute the join point offset for continuing on the hot path. */
488 offset = stubCode.locationOf(callICs[i].hotPathLabel) -
489 stubCode.locationOf(callICs[i].funGuard);
490 cics[i].hotPathOffset = offset;
491 JS_ASSERT(cics[i].hotPathOffset == offset);
493 cics[i].pc = callICs[i].pc;
494 cics[i].argc = callICs[i].argc;
495 cics[i].funObjReg = callICs[i].funObjReg;
496 cics[i].funPtrReg = callICs[i].funPtrReg;
497 cics[i].frameDepth = callICs[i].frameDepth;
498 stubCode.patch(callICs[i].addrLabel1, &cics[i]);
499 stubCode.patch(callICs[i].addrLabel2, &cics[i]);
502 #endif /* JS_MONOIC */
504 for (size_t i = 0; i < callPatches.length(); i++) {
505 CallPatchInfo &patch = callPatches[i];
507 fullCode.patch(patch.fastNcodePatch, fullCode.locationOf(patch.joinPoint));
508 if (patch.hasSlowNcode)
509 stubCode.patch(patch.slowNcodePatch, fullCode.locationOf(patch.joinPoint));
512 #if defined JS_POLYIC
513 jit->nPICs = pics.length();
514 if (pics.length()) {
515 jit->pics = (ic::PICInfo *)cursor;
516 cursor += sizeof(ic::PICInfo) * pics.length();
517 } else {
518 jit->pics = NULL;
521 if (ic::PICInfo *scriptPICs = jit->pics) {
522 for (size_t i = 0; i < pics.length(); i++) {
523 pics[i].copySimpleMembersTo(scriptPICs[i]);
524 scriptPICs[i].fastPathStart = fullCode.locationOf(pics[i].fastPathStart);
525 scriptPICs[i].storeBack = fullCode.locationOf(pics[i].storeBack);
526 scriptPICs[i].slowPathStart = stubCode.locationOf(pics[i].slowPathStart);
527 scriptPICs[i].callReturn = uint16((uint8*)stubCode.locationOf(pics[i].callReturn).executableAddress() -
528 (uint8*)scriptPICs[i].slowPathStart.executableAddress());
529 scriptPICs[i].shapeGuard = masm.distanceOf(pics[i].shapeGuard) -
530 masm.distanceOf(pics[i].fastPathStart);
531 JS_ASSERT(scriptPICs[i].shapeGuard == masm.distanceOf(pics[i].shapeGuard) -
532 masm.distanceOf(pics[i].fastPathStart));
533 scriptPICs[i].shapeRegHasBaseShape = true;
535 # if defined JS_CPU_X64
536 memcpy(&scriptPICs[i].labels, &pics[i].labels, sizeof(PICLabels));
537 # endif
539 if (pics[i].kind == ic::PICInfo::SET ||
540 pics[i].kind == ic::PICInfo::SETMETHOD) {
541 scriptPICs[i].u.vr = pics[i].vr;
542 } else if (pics[i].kind != ic::PICInfo::NAME) {
543 if (pics[i].hasTypeCheck) {
544 int32 distance = stubcc.masm.distanceOf(pics[i].typeCheck) -
545 stubcc.masm.distanceOf(pics[i].slowPathStart);
546 JS_ASSERT(distance <= 0);
547 scriptPICs[i].u.get.typeCheckOffset = distance;
550 new (&scriptPICs[i].execPools) ic::PICInfo::ExecPoolVector(SystemAllocPolicy());
551 scriptPICs[i].reset();
552 stubCode.patch(pics[i].addrLabel, &scriptPICs[i]);
555 #endif /* JS_POLYIC */
557 /* Link fast and slow paths together. */
558 stubcc.fixCrossJumps(result, masm.size(), masm.size() + stubcc.size());
560 /* Patch all double references. */
561 size_t doubleOffset = masm.size() + stubcc.size();
562 double *doubleVec = (double *)(result + doubleOffset);
563 for (size_t i = 0; i < doubleList.length(); i++) {
564 DoublePatch &patch = doubleList[i];
565 doubleVec[i] = patch.d;
566 if (patch.ool)
567 stubCode.patch(patch.label, &doubleVec[i]);
568 else
569 fullCode.patch(patch.label, &doubleVec[i]);
572 /* Patch all outgoing calls. */
573 masm.finalize(fullCode);
574 stubcc.masm.finalize(stubCode);
576 JSC::ExecutableAllocator::makeExecutable(result, masm.size() + stubcc.size());
577 JSC::ExecutableAllocator::cacheFlush(result, masm.size() + stubcc.size());
579 /* Build the table of call sites. */
580 jit->nCallSites = callSites.length();
581 if (callSites.length()) {
582 CallSite *callSiteList = (CallSite *)cursor;
583 cursor += sizeof(CallSite) * callSites.length();
585 for (size_t i = 0; i < callSites.length(); i++) {
586 if (callSites[i].stub)
587 callSiteList[i].codeOffset = masm.size() + stubcc.masm.distanceOf(callSites[i].location);
588 else
589 callSiteList[i].codeOffset = masm.distanceOf(callSites[i].location);
590 callSiteList[i].pcOffset = callSites[i].pc - script->code;
591 callSiteList[i].id = callSites[i].id;
593 jit->callSites = callSiteList;
594 } else {
595 jit->callSites = NULL;
598 JS_ASSERT(size_t(cursor - (uint8*)jit) == totalBytes);
600 jit->nmap = nmap;
601 *jitp = jit;
603 return Compile_Okay;
606 #ifdef DEBUG
607 #define SPEW_OPCODE() \
608 JS_BEGIN_MACRO \
609 if (IsJaegerSpewChannelActive(JSpew_JSOps)) { \
610 JaegerSpew(JSpew_JSOps, " %2d ", frame.stackDepth()); \
611 js_Disassemble1(cx, script, PC, PC - script->code, \
612 JS_TRUE, stdout); \
614 JS_END_MACRO;
615 #else
616 #define SPEW_OPCODE()
617 #endif /* DEBUG */
619 #define BEGIN_CASE(name) case name:
620 #define END_CASE(name) \
621 JS_BEGIN_MACRO \
622 PC += name##_LENGTH; \
623 JS_END_MACRO; \
624 break;
626 CompileStatus
627 mjit::Compiler::generateMethod()
629 mjit::AutoScriptRetrapper trapper(cx, script);
631 for (;;) {
632 JSOp op = JSOp(*PC);
634 OpcodeStatus &opinfo = analysis[PC];
635 frame.setInTryBlock(opinfo.inTryBlock);
636 if (opinfo.nincoming || opinfo.trap) {
637 frame.syncAndForgetEverything(opinfo.stackDepth);
638 opinfo.safePoint = true;
640 jumpMap[uint32(PC - script->code)] = masm.label();
642 if (opinfo.trap) {
643 if (!trapper.untrap(PC))
644 return Compile_Error;
645 op = JSOp(*PC);
648 if (!opinfo.visited) {
649 if (op == JSOP_STOP)
650 break;
651 if (js_CodeSpec[op].length != -1)
652 PC += js_CodeSpec[op].length;
653 else
654 PC += js_GetVariableBytecodeLength(PC);
655 continue;
658 SPEW_OPCODE();
659 JS_ASSERT(frame.stackDepth() == opinfo.stackDepth);
661 if (opinfo.trap) {
662 prepareStubCall(Uses(0));
663 masm.move(ImmPtr(PC), Registers::ArgReg1);
664 stubCall(stubs::Trap);
666 #if defined(JS_NO_FASTCALL) && defined(JS_CPU_X86)
667 // In case of no fast call, when we change the return address,
668 // we need to make sure add esp by 8. For normal call, we need
669 // to make sure the esp is not changed.
670 else {
671 masm.subPtr(Imm32(8), Registers::StackPointer);
672 masm.callLabel = masm.label();
673 masm.addPtr(Imm32(8), Registers::StackPointer);
675 #elif defined(_WIN64)
676 // In case of Win64 ABI, stub caller make 32-bytes spcae on stack
677 else {
678 masm.subPtr(Imm32(32), Registers::StackPointer);
679 masm.callLabel = masm.label();
680 masm.addPtr(Imm32(32), Registers::StackPointer);
682 #endif
683 ADD_CALLSITE(false);
685 /**********************
686 * BEGIN COMPILER OPS *
687 **********************/
689 switch (op) {
690 BEGIN_CASE(JSOP_NOP)
691 END_CASE(JSOP_NOP)
693 BEGIN_CASE(JSOP_PUSH)
694 frame.push(UndefinedValue());
695 END_CASE(JSOP_PUSH)
697 BEGIN_CASE(JSOP_POPV)
698 BEGIN_CASE(JSOP_SETRVAL)
700 RegisterID reg = frame.allocReg();
701 masm.load32(FrameFlagsAddress(), reg);
702 masm.or32(Imm32(JSFRAME_HAS_RVAL), reg);
703 masm.store32(reg, FrameFlagsAddress());
704 frame.freeReg(reg);
706 FrameEntry *fe = frame.peek(-1);
707 frame.storeTo(fe, Address(JSFrameReg, JSStackFrame::offsetOfReturnValue()), true);
708 frame.pop();
710 END_CASE(JSOP_POPV)
712 BEGIN_CASE(JSOP_RETURN)
713 emitReturn(frame.peek(-1));
714 END_CASE(JSOP_RETURN)
716 BEGIN_CASE(JSOP_GOTO)
718 /* :XXX: this isn't really necessary if we follow the branch. */
719 frame.syncAndForgetEverything();
720 Jump j = masm.jump();
721 jumpAndTrace(j, PC + GET_JUMP_OFFSET(PC));
723 END_CASE(JSOP_GOTO)
725 BEGIN_CASE(JSOP_IFEQ)
726 BEGIN_CASE(JSOP_IFNE)
727 jsop_ifneq(op, PC + GET_JUMP_OFFSET(PC));
728 END_CASE(JSOP_IFNE)
730 BEGIN_CASE(JSOP_ARGUMENTS)
731 prepareStubCall(Uses(0));
732 stubCall(stubs::Arguments);
733 frame.pushSynced();
734 END_CASE(JSOP_ARGUMENTS)
736 BEGIN_CASE(JSOP_FORLOCAL)
737 iterNext();
738 frame.storeLocal(GET_SLOTNO(PC), true);
739 frame.pop();
740 END_CASE(JSOP_FORLOCAL)
742 BEGIN_CASE(JSOP_DUP)
743 frame.dup();
744 END_CASE(JSOP_DUP)
746 BEGIN_CASE(JSOP_DUP2)
747 frame.dup2();
748 END_CASE(JSOP_DUP2)
750 BEGIN_CASE(JSOP_BITOR)
751 BEGIN_CASE(JSOP_BITXOR)
752 BEGIN_CASE(JSOP_BITAND)
753 jsop_bitop(op);
754 END_CASE(JSOP_BITAND)
756 BEGIN_CASE(JSOP_LT)
757 BEGIN_CASE(JSOP_LE)
758 BEGIN_CASE(JSOP_GT)
759 BEGIN_CASE(JSOP_GE)
760 BEGIN_CASE(JSOP_EQ)
761 BEGIN_CASE(JSOP_NE)
763 /* Detect fusions. */
764 jsbytecode *next = &PC[JSOP_GE_LENGTH];
765 JSOp fused = JSOp(*next);
766 if ((fused != JSOP_IFEQ && fused != JSOP_IFNE) || analysis[next].nincoming)
767 fused = JSOP_NOP;
769 /* Get jump target, if any. */
770 jsbytecode *target = NULL;
771 if (fused != JSOP_NOP)
772 target = next + GET_JUMP_OFFSET(next);
774 BoolStub stub = NULL;
775 switch (op) {
776 case JSOP_LT:
777 stub = stubs::LessThan;
778 break;
779 case JSOP_LE:
780 stub = stubs::LessEqual;
781 break;
782 case JSOP_GT:
783 stub = stubs::GreaterThan;
784 break;
785 case JSOP_GE:
786 stub = stubs::GreaterEqual;
787 break;
788 case JSOP_EQ:
789 stub = stubs::Equal;
790 break;
791 case JSOP_NE:
792 stub = stubs::NotEqual;
793 break;
794 default:
795 JS_NOT_REACHED("WAT");
796 break;
799 FrameEntry *rhs = frame.peek(-1);
800 FrameEntry *lhs = frame.peek(-2);
802 /* Check for easy cases that the parser does not constant fold. */
803 if (lhs->isConstant() && rhs->isConstant()) {
804 /* Primitives can be trivially constant folded. */
805 const Value &lv = lhs->getValue();
806 const Value &rv = rhs->getValue();
808 if (lv.isPrimitive() && rv.isPrimitive()) {
809 bool result = compareTwoValues(cx, op, lv, rv);
811 frame.pop();
812 frame.pop();
814 if (!target) {
815 frame.push(Value(BooleanValue(result)));
816 } else {
817 if (fused == JSOP_IFEQ)
818 result = !result;
820 /* Branch is never taken, don't bother doing anything. */
821 if (result) {
822 frame.syncAndForgetEverything();
823 Jump j = masm.jump();
824 jumpAndTrace(j, target);
827 } else {
828 emitStubCmpOp(stub, target, fused);
830 } else {
831 /* Anything else should go through the fast path generator. */
832 jsop_relational(op, stub, target, fused);
835 /* Advance PC manually. */
836 JS_STATIC_ASSERT(JSOP_LT_LENGTH == JSOP_GE_LENGTH);
837 JS_STATIC_ASSERT(JSOP_LE_LENGTH == JSOP_GE_LENGTH);
838 JS_STATIC_ASSERT(JSOP_GT_LENGTH == JSOP_GE_LENGTH);
839 JS_STATIC_ASSERT(JSOP_EQ_LENGTH == JSOP_GE_LENGTH);
840 JS_STATIC_ASSERT(JSOP_NE_LENGTH == JSOP_GE_LENGTH);
842 PC += JSOP_GE_LENGTH;
843 if (fused != JSOP_NOP) {
844 SPEW_OPCODE();
845 PC += JSOP_IFNE_LENGTH;
847 break;
849 END_CASE(JSOP_GE)
851 BEGIN_CASE(JSOP_LSH)
852 jsop_bitop(op);
853 END_CASE(JSOP_LSH)
855 BEGIN_CASE(JSOP_RSH)
856 jsop_rsh();
857 END_CASE(JSOP_RSH)
859 BEGIN_CASE(JSOP_URSH)
860 jsop_bitop(op);
861 END_CASE(JSOP_URSH)
863 BEGIN_CASE(JSOP_ADD)
864 jsop_binary(op, stubs::Add);
865 END_CASE(JSOP_ADD)
867 BEGIN_CASE(JSOP_SUB)
868 jsop_binary(op, stubs::Sub);
869 END_CASE(JSOP_SUB)
871 BEGIN_CASE(JSOP_MUL)
872 jsop_binary(op, stubs::Mul);
873 END_CASE(JSOP_MUL)
875 BEGIN_CASE(JSOP_DIV)
876 jsop_binary(op, stubs::Div);
877 END_CASE(JSOP_DIV)
879 BEGIN_CASE(JSOP_MOD)
880 jsop_mod();
881 END_CASE(JSOP_MOD)
883 BEGIN_CASE(JSOP_NOT)
884 jsop_not();
885 END_CASE(JSOP_NOT)
887 BEGIN_CASE(JSOP_BITNOT)
889 FrameEntry *top = frame.peek(-1);
890 if (top->isConstant() && top->getValue().isPrimitive()) {
891 int32_t i;
892 ValueToECMAInt32(cx, top->getValue(), &i);
893 i = ~i;
894 frame.pop();
895 frame.push(Int32Value(i));
896 } else {
897 jsop_bitnot();
900 END_CASE(JSOP_BITNOT)
902 BEGIN_CASE(JSOP_NEG)
904 FrameEntry *top = frame.peek(-1);
905 if (top->isConstant() && top->getValue().isPrimitive()) {
906 double d;
907 ValueToNumber(cx, top->getValue(), &d);
908 d = -d;
909 frame.pop();
910 frame.push(NumberValue(d));
911 } else {
912 jsop_neg();
915 END_CASE(JSOP_NEG)
917 BEGIN_CASE(JSOP_POS)
918 jsop_pos();
919 END_CASE(JSOP_POS)
921 BEGIN_CASE(JSOP_DELNAME)
923 uint32 index = fullAtomIndex(PC);
924 JSAtom *atom = script->getAtom(index);
926 prepareStubCall(Uses(0));
927 masm.move(ImmPtr(atom), Registers::ArgReg1);
928 stubCall(stubs::DelName);
929 frame.pushSynced();
931 END_CASE(JSOP_DELNAME)
933 BEGIN_CASE(JSOP_DELPROP)
935 uint32 index = fullAtomIndex(PC);
936 JSAtom *atom = script->getAtom(index);
938 prepareStubCall(Uses(1));
939 masm.move(ImmPtr(atom), Registers::ArgReg1);
940 stubCall(STRICT_VARIANT(stubs::DelProp));
941 frame.pop();
942 frame.pushSynced();
944 END_CASE(JSOP_DELPROP)
946 BEGIN_CASE(JSOP_DELELEM)
947 prepareStubCall(Uses(2));
948 stubCall(STRICT_VARIANT(stubs::DelElem));
949 frame.popn(2);
950 frame.pushSynced();
951 END_CASE(JSOP_DELELEM)
953 BEGIN_CASE(JSOP_TYPEOF)
954 BEGIN_CASE(JSOP_TYPEOFEXPR)
955 jsop_typeof();
956 END_CASE(JSOP_TYPEOF)
958 BEGIN_CASE(JSOP_VOID)
959 frame.pop();
960 frame.push(UndefinedValue());
961 END_CASE(JSOP_VOID)
963 BEGIN_CASE(JSOP_INCNAME)
964 jsop_nameinc(op, STRICT_VARIANT(stubs::IncName), fullAtomIndex(PC));
965 break;
966 END_CASE(JSOP_INCNAME)
968 BEGIN_CASE(JSOP_INCGNAME)
969 jsop_gnameinc(op, STRICT_VARIANT(stubs::IncGlobalName), fullAtomIndex(PC));
970 break;
971 END_CASE(JSOP_INCGNAME)
973 BEGIN_CASE(JSOP_INCPROP)
974 jsop_propinc(op, STRICT_VARIANT(stubs::IncProp), fullAtomIndex(PC));
975 break;
976 END_CASE(JSOP_INCPROP)
978 BEGIN_CASE(JSOP_INCELEM)
979 jsop_eleminc(op, STRICT_VARIANT(stubs::IncElem));
980 END_CASE(JSOP_INCELEM)
982 BEGIN_CASE(JSOP_DECNAME)
983 jsop_nameinc(op, STRICT_VARIANT(stubs::DecName), fullAtomIndex(PC));
984 break;
985 END_CASE(JSOP_DECNAME)
987 BEGIN_CASE(JSOP_DECGNAME)
988 jsop_gnameinc(op, STRICT_VARIANT(stubs::DecGlobalName), fullAtomIndex(PC));
989 break;
990 END_CASE(JSOP_DECGNAME)
992 BEGIN_CASE(JSOP_DECPROP)
993 jsop_propinc(op, STRICT_VARIANT(stubs::DecProp), fullAtomIndex(PC));
994 break;
995 END_CASE(JSOP_DECPROP)
997 BEGIN_CASE(JSOP_DECELEM)
998 jsop_eleminc(op, STRICT_VARIANT(stubs::DecElem));
999 END_CASE(JSOP_DECELEM)
1001 BEGIN_CASE(JSOP_NAMEINC)
1002 jsop_nameinc(op, STRICT_VARIANT(stubs::NameInc), fullAtomIndex(PC));
1003 break;
1004 END_CASE(JSOP_NAMEINC)
1006 BEGIN_CASE(JSOP_GNAMEINC)
1007 jsop_gnameinc(op, STRICT_VARIANT(stubs::GlobalNameInc), fullAtomIndex(PC));
1008 break;
1009 END_CASE(JSOP_GNAMEINC)
1011 BEGIN_CASE(JSOP_PROPINC)
1012 jsop_propinc(op, STRICT_VARIANT(stubs::PropInc), fullAtomIndex(PC));
1013 break;
1014 END_CASE(JSOP_PROPINC)
1016 BEGIN_CASE(JSOP_ELEMINC)
1017 jsop_eleminc(op, STRICT_VARIANT(stubs::ElemInc));
1018 END_CASE(JSOP_ELEMINC)
1020 BEGIN_CASE(JSOP_NAMEDEC)
1021 jsop_nameinc(op, STRICT_VARIANT(stubs::NameDec), fullAtomIndex(PC));
1022 break;
1023 END_CASE(JSOP_NAMEDEC)
1025 BEGIN_CASE(JSOP_GNAMEDEC)
1026 jsop_gnameinc(op, STRICT_VARIANT(stubs::GlobalNameDec), fullAtomIndex(PC));
1027 break;
1028 END_CASE(JSOP_GNAMEDEC)
1030 BEGIN_CASE(JSOP_PROPDEC)
1031 jsop_propinc(op, STRICT_VARIANT(stubs::PropDec), fullAtomIndex(PC));
1032 break;
1033 END_CASE(JSOP_PROPDEC)
1035 BEGIN_CASE(JSOP_ELEMDEC)
1036 jsop_eleminc(op, STRICT_VARIANT(stubs::ElemDec));
1037 END_CASE(JSOP_ELEMDEC)
1039 BEGIN_CASE(JSOP_GETTHISPROP)
1040 /* Push thisv onto stack. */
1041 jsop_this();
1042 jsop_getprop(script->getAtom(fullAtomIndex(PC)));
1043 END_CASE(JSOP_GETTHISPROP);
1045 BEGIN_CASE(JSOP_GETARGPROP)
1046 /* Push arg onto stack. */
1047 jsop_getarg(GET_SLOTNO(PC));
1048 jsop_getprop(script->getAtom(fullAtomIndex(&PC[ARGNO_LEN])));
1049 END_CASE(JSOP_GETARGPROP)
1051 BEGIN_CASE(JSOP_GETLOCALPROP)
1052 frame.pushLocal(GET_SLOTNO(PC));
1053 jsop_getprop(script->getAtom(fullAtomIndex(&PC[SLOTNO_LEN])));
1054 END_CASE(JSOP_GETLOCALPROP)
1056 BEGIN_CASE(JSOP_GETPROP)
1057 jsop_getprop(script->getAtom(fullAtomIndex(PC)));
1058 END_CASE(JSOP_GETPROP)
1060 BEGIN_CASE(JSOP_LENGTH)
1061 jsop_length();
1062 END_CASE(JSOP_LENGTH)
1064 BEGIN_CASE(JSOP_GETELEM)
1065 jsop_getelem();
1066 END_CASE(JSOP_GETELEM)
1068 BEGIN_CASE(JSOP_SETELEM)
1069 jsop_setelem();
1070 END_CASE(JSOP_SETELEM);
1072 BEGIN_CASE(JSOP_CALLNAME)
1073 prepareStubCall(Uses(0));
1074 masm.move(Imm32(fullAtomIndex(PC)), Registers::ArgReg1);
1075 stubCall(stubs::CallName);
1076 frame.pushSynced();
1077 frame.pushSynced();
1078 END_CASE(JSOP_CALLNAME)
1080 BEGIN_CASE(JSOP_CALL)
1081 BEGIN_CASE(JSOP_EVAL)
1082 BEGIN_CASE(JSOP_APPLY)
1084 JaegerSpew(JSpew_Insns, " --- SCRIPTED CALL --- \n");
1085 inlineCallHelper(GET_ARGC(PC), false);
1086 JaegerSpew(JSpew_Insns, " --- END SCRIPTED CALL --- \n");
1088 END_CASE(JSOP_CALL)
1090 BEGIN_CASE(JSOP_NAME)
1091 jsop_name(script->getAtom(fullAtomIndex(PC)));
1092 END_CASE(JSOP_NAME)
1094 BEGIN_CASE(JSOP_DOUBLE)
1096 uint32 index = fullAtomIndex(PC);
1097 double d = script->getConst(index).toDouble();
1098 frame.push(Value(DoubleValue(d)));
1100 END_CASE(JSOP_DOUBLE)
1102 BEGIN_CASE(JSOP_STRING)
1104 JSAtom *atom = script->getAtom(fullAtomIndex(PC));
1105 JSString *str = ATOM_TO_STRING(atom);
1106 frame.push(Value(StringValue(str)));
1108 END_CASE(JSOP_STRING)
1110 BEGIN_CASE(JSOP_ZERO)
1111 frame.push(Valueify(JSVAL_ZERO));
1112 END_CASE(JSOP_ZERO)
1114 BEGIN_CASE(JSOP_ONE)
1115 frame.push(Valueify(JSVAL_ONE));
1116 END_CASE(JSOP_ONE)
1118 BEGIN_CASE(JSOP_NULL)
1119 frame.push(NullValue());
1120 END_CASE(JSOP_NULL)
1122 BEGIN_CASE(JSOP_THIS)
1123 jsop_this();
1124 END_CASE(JSOP_THIS)
1126 BEGIN_CASE(JSOP_FALSE)
1127 frame.push(Value(BooleanValue(false)));
1128 END_CASE(JSOP_FALSE)
1130 BEGIN_CASE(JSOP_TRUE)
1131 frame.push(Value(BooleanValue(true)));
1132 END_CASE(JSOP_TRUE)
1134 BEGIN_CASE(JSOP_OR)
1135 BEGIN_CASE(JSOP_AND)
1136 jsop_andor(op, PC + GET_JUMP_OFFSET(PC));
1137 END_CASE(JSOP_AND)
1139 BEGIN_CASE(JSOP_TABLESWITCH)
1140 frame.syncAndForgetEverything();
1141 masm.move(ImmPtr(PC), Registers::ArgReg1);
1143 /* prepareStubCall() is not needed due to syncAndForgetEverything() */
1144 stubCall(stubs::TableSwitch);
1145 frame.pop();
1147 masm.jump(Registers::ReturnReg);
1148 PC += js_GetVariableBytecodeLength(PC);
1149 break;
1150 END_CASE(JSOP_TABLESWITCH)
1152 BEGIN_CASE(JSOP_LOOKUPSWITCH)
1153 frame.syncAndForgetEverything();
1154 masm.move(ImmPtr(PC), Registers::ArgReg1);
1156 /* prepareStubCall() is not needed due to syncAndForgetEverything() */
1157 stubCall(stubs::LookupSwitch);
1158 frame.pop();
1160 masm.jump(Registers::ReturnReg);
1161 PC += js_GetVariableBytecodeLength(PC);
1162 break;
1163 END_CASE(JSOP_LOOKUPSWITCH)
1165 BEGIN_CASE(JSOP_STRICTEQ)
1166 jsop_stricteq(op);
1167 END_CASE(JSOP_STRICTEQ)
1169 BEGIN_CASE(JSOP_STRICTNE)
1170 jsop_stricteq(op);
1171 END_CASE(JSOP_STRICTNE)
1173 BEGIN_CASE(JSOP_ITER)
1174 # if defined JS_CPU_X64
1175 prepareStubCall(Uses(1));
1176 masm.move(Imm32(PC[1]), Registers::ArgReg1);
1177 stubCall(stubs::Iter);
1178 frame.pop();
1179 frame.pushSynced();
1180 #else
1181 iter(PC[1]);
1182 #endif
1183 END_CASE(JSOP_ITER)
1185 BEGIN_CASE(JSOP_MOREITER)
1186 /* This MUST be fused with IFNE or IFNEX. */
1187 iterMore();
1188 break;
1189 END_CASE(JSOP_MOREITER)
1191 BEGIN_CASE(JSOP_ENDITER)
1192 # if defined JS_CPU_X64
1193 prepareStubCall(Uses(1));
1194 stubCall(stubs::EndIter);
1195 frame.pop();
1196 #else
1197 iterEnd();
1198 #endif
1199 END_CASE(JSOP_ENDITER)
1201 BEGIN_CASE(JSOP_POP)
1202 frame.pop();
1203 END_CASE(JSOP_POP)
1205 BEGIN_CASE(JSOP_NEW)
1207 JaegerSpew(JSpew_Insns, " --- NEW OPERATOR --- \n");
1208 inlineCallHelper(GET_ARGC(PC), true);
1209 JaegerSpew(JSpew_Insns, " --- END NEW OPERATOR --- \n");
1211 END_CASE(JSOP_NEW)
1213 BEGIN_CASE(JSOP_GETARG)
1214 BEGIN_CASE(JSOP_CALLARG)
1216 jsop_getarg(GET_SLOTNO(PC));
1217 if (op == JSOP_CALLARG)
1218 frame.push(UndefinedValue());
1220 END_CASE(JSOP_GETARG)
1222 BEGIN_CASE(JSOP_BINDGNAME)
1223 jsop_bindgname();
1224 END_CASE(JSOP_BINDGNAME)
1226 BEGIN_CASE(JSOP_SETARG)
1228 uint32 slot = GET_SLOTNO(PC);
1229 FrameEntry *top = frame.peek(-1);
1231 bool popped = PC[JSOP_SETARG_LENGTH] == JSOP_POP;
1233 RegisterID reg = frame.allocReg();
1234 Address address = Address(JSFrameReg, JSStackFrame::offsetOfFormalArg(fun, slot));
1235 frame.storeTo(top, address, popped);
1236 frame.freeReg(reg);
1238 END_CASE(JSOP_SETARG)
1240 BEGIN_CASE(JSOP_GETLOCAL)
1242 uint32 slot = GET_SLOTNO(PC);
1243 frame.pushLocal(slot);
1245 END_CASE(JSOP_GETLOCAL)
1247 BEGIN_CASE(JSOP_SETLOCAL)
1249 jsbytecode *next = &PC[JSOP_SETLOCAL_LENGTH];
1250 bool pop = JSOp(*next) == JSOP_POP && !analysis[next].nincoming;
1251 frame.storeLocal(GET_SLOTNO(PC), pop);
1252 if (pop) {
1253 frame.pop();
1254 PC += JSOP_SETLOCAL_LENGTH + JSOP_POP_LENGTH;
1255 break;
1258 END_CASE(JSOP_SETLOCAL)
1260 BEGIN_CASE(JSOP_SETLOCALPOP)
1261 frame.storeLocal(GET_SLOTNO(PC), true);
1262 frame.pop();
1263 END_CASE(JSOP_SETLOCALPOP)
1265 BEGIN_CASE(JSOP_UINT16)
1266 frame.push(Value(Int32Value((int32_t) GET_UINT16(PC))));
1267 END_CASE(JSOP_UINT16)
1269 BEGIN_CASE(JSOP_NEWINIT)
1271 jsint i = GET_UINT16(PC);
1272 uint32 count = GET_UINT16(PC + UINT16_LEN);
1274 JS_ASSERT(i == JSProto_Array || i == JSProto_Object);
1276 prepareStubCall(Uses(0));
1277 masm.move(Imm32(count), Registers::ArgReg1);
1278 if (i == JSProto_Array)
1279 stubCall(stubs::NewInitArray);
1280 else
1281 stubCall(stubs::NewInitObject);
1282 frame.takeReg(Registers::ReturnReg);
1283 frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
1285 END_CASE(JSOP_NEWINIT)
1287 BEGIN_CASE(JSOP_ENDINIT)
1288 END_CASE(JSOP_ENDINIT)
1290 BEGIN_CASE(JSOP_INITPROP)
1292 JSAtom *atom = script->getAtom(fullAtomIndex(PC));
1293 prepareStubCall(Uses(2));
1294 masm.move(ImmPtr(atom), Registers::ArgReg1);
1295 stubCall(stubs::InitProp);
1296 frame.pop();
1298 END_CASE(JSOP_INITPROP)
1300 BEGIN_CASE(JSOP_INITELEM)
1302 JSOp next = JSOp(PC[JSOP_INITELEM_LENGTH]);
1303 prepareStubCall(Uses(3));
1304 masm.move(Imm32(next == JSOP_ENDINIT ? 1 : 0), Registers::ArgReg1);
1305 stubCall(stubs::InitElem);
1306 frame.popn(2);
1308 END_CASE(JSOP_INITELEM)
1310 BEGIN_CASE(JSOP_INCARG)
1311 BEGIN_CASE(JSOP_DECARG)
1312 BEGIN_CASE(JSOP_ARGINC)
1313 BEGIN_CASE(JSOP_ARGDEC)
1315 jsbytecode *next = &PC[JSOP_ARGINC_LENGTH];
1316 bool popped = false;
1317 if (JSOp(*next) == JSOP_POP && !analysis[next].nincoming)
1318 popped = true;
1319 jsop_arginc(op, GET_SLOTNO(PC), popped);
1320 PC += JSOP_ARGINC_LENGTH;
1321 if (popped)
1322 PC += JSOP_POP_LENGTH;
1323 break;
1325 END_CASE(JSOP_ARGDEC)
1327 BEGIN_CASE(JSOP_FORNAME)
1328 prepareStubCall(Uses(1));
1329 masm.move(ImmPtr(script->getAtom(fullAtomIndex(PC))), Registers::ArgReg1);
1330 stubCall(STRICT_VARIANT(stubs::ForName));
1331 END_CASE(JSOP_FORNAME)
1333 BEGIN_CASE(JSOP_INCLOCAL)
1334 BEGIN_CASE(JSOP_DECLOCAL)
1335 BEGIN_CASE(JSOP_LOCALINC)
1336 BEGIN_CASE(JSOP_LOCALDEC)
1338 jsbytecode *next = &PC[JSOP_LOCALINC_LENGTH];
1339 bool popped = false;
1340 if (JSOp(*next) == JSOP_POP && !analysis[next].nincoming)
1341 popped = true;
1342 /* These manually advance the PC. */
1343 jsop_localinc(op, GET_SLOTNO(PC), popped);
1344 PC += JSOP_LOCALINC_LENGTH;
1345 if (popped)
1346 PC += JSOP_POP_LENGTH;
1347 break;
1349 END_CASE(JSOP_LOCALDEC)
1351 BEGIN_CASE(JSOP_BINDNAME)
1352 jsop_bindname(fullAtomIndex(PC), true);
1353 END_CASE(JSOP_BINDNAME)
1355 BEGIN_CASE(JSOP_SETPROP)
1356 jsop_setprop(script->getAtom(fullAtomIndex(PC)), true);
1357 END_CASE(JSOP_SETPROP)
1359 BEGIN_CASE(JSOP_SETNAME)
1360 BEGIN_CASE(JSOP_SETMETHOD)
1361 jsop_setprop(script->getAtom(fullAtomIndex(PC)), true);
1362 END_CASE(JSOP_SETNAME)
1364 BEGIN_CASE(JSOP_THROW)
1365 prepareStubCall(Uses(1));
1366 stubCall(stubs::Throw);
1367 frame.pop();
1368 END_CASE(JSOP_THROW)
1370 BEGIN_CASE(JSOP_IN)
1371 prepareStubCall(Uses(2));
1372 stubCall(stubs::In);
1373 frame.popn(2);
1374 frame.takeReg(Registers::ReturnReg);
1375 frame.pushTypedPayload(JSVAL_TYPE_BOOLEAN, Registers::ReturnReg);
1376 END_CASE(JSOP_IN)
1378 BEGIN_CASE(JSOP_INSTANCEOF)
1379 jsop_instanceof();
1380 END_CASE(JSOP_INSTANCEOF)
1382 BEGIN_CASE(JSOP_EXCEPTION)
1384 JS_STATIC_ASSERT(sizeof(cx->throwing) == 4);
1385 RegisterID reg = frame.allocReg();
1386 masm.loadPtr(FrameAddress(offsetof(VMFrame, cx)), reg);
1387 masm.store32(Imm32(JS_FALSE), Address(reg, offsetof(JSContext, throwing)));
1389 Address excn(reg, offsetof(JSContext, exception));
1390 frame.freeReg(reg);
1391 frame.push(excn);
1393 END_CASE(JSOP_EXCEPTION)
1395 BEGIN_CASE(JSOP_LINENO)
1396 END_CASE(JSOP_LINENO)
1398 BEGIN_CASE(JSOP_BLOCKCHAIN)
1399 END_CASE(JSOP_BLOCKCHAIN)
1401 BEGIN_CASE(JSOP_NULLBLOCKCHAIN)
1402 END_CASE(JSOP_NULLBLOCKCHAIN)
1404 BEGIN_CASE(JSOP_CONDSWITCH)
1405 /* No-op for the decompiler. */
1406 END_CASE(JSOP_CONDSWITCH)
1408 BEGIN_CASE(JSOP_DEFFUN)
1410 uint32 index = fullAtomIndex(PC);
1411 JSFunction *inner = script->getFunction(index);
1413 if (fun) {
1414 JSLocalKind localKind = fun->lookupLocal(cx, inner->atom, NULL);
1415 if (localKind != JSLOCAL_NONE)
1416 frame.syncAndForgetEverything();
1419 prepareStubCall(Uses(0));
1420 masm.move(ImmPtr(inner), Registers::ArgReg1);
1421 stubCall(STRICT_VARIANT(stubs::DefFun));
1423 END_CASE(JSOP_DEFFUN)
1425 BEGIN_CASE(JSOP_DEFVAR)
1427 uint32 index = fullAtomIndex(PC);
1428 JSAtom *atom = script->getAtom(index);
1430 prepareStubCall(Uses(0));
1431 masm.move(ImmPtr(atom), Registers::ArgReg1);
1432 stubCall(stubs::DefVar);
1434 END_CASE(JSOP_DEFVAR)
1436 BEGIN_CASE(JSOP_DEFLOCALFUN_FC)
1438 uint32 slot = GET_SLOTNO(PC);
1439 JSFunction *fun = script->getFunction(fullAtomIndex(&PC[SLOTNO_LEN]));
1440 prepareStubCall(Uses(frame.frameDepth()));
1441 masm.move(ImmPtr(fun), Registers::ArgReg1);
1442 stubCall(stubs::DefLocalFun_FC);
1443 frame.takeReg(Registers::ReturnReg);
1444 frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
1445 frame.storeLocal(slot, true);
1446 frame.pop();
1448 END_CASE(JSOP_DEFLOCALFUN_FC)
1450 BEGIN_CASE(JSOP_LAMBDA)
1452 JSFunction *fun = script->getFunction(fullAtomIndex(PC));
1454 JSObjStubFun stub = stubs::Lambda;
1455 uint32 uses = 0;
1457 jsbytecode *pc2 = js_AdvanceOverBlockchain(PC + JSOP_LAMBDA_LENGTH);
1458 JSOp next = JSOp(*pc2);
1460 if (next == JSOP_INITMETHOD) {
1461 stub = stubs::LambdaForInit;
1462 } else if (next == JSOP_SETMETHOD) {
1463 stub = stubs::LambdaForSet;
1464 uses = 1;
1465 } else if (fun->joinable()) {
1466 if (next == JSOP_CALL) {
1467 stub = stubs::LambdaJoinableForCall;
1468 uses = frame.frameDepth();
1469 } else if (next == JSOP_NULL) {
1470 stub = stubs::LambdaJoinableForNull;
1474 prepareStubCall(Uses(uses));
1475 masm.move(ImmPtr(fun), Registers::ArgReg1);
1477 if (stub == stubs::Lambda) {
1478 stubCall(stub);
1479 } else {
1480 jsbytecode *savedPC = PC;
1481 PC = pc2;
1482 stubCall(stub);
1483 PC = savedPC;
1486 frame.takeReg(Registers::ReturnReg);
1487 frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
1489 END_CASE(JSOP_LAMBDA)
1491 BEGIN_CASE(JSOP_TRY)
1492 frame.syncAndForgetEverything();
1493 END_CASE(JSOP_TRY)
1495 BEGIN_CASE(JSOP_GETFCSLOT)
1496 BEGIN_CASE(JSOP_CALLFCSLOT)
1498 uintN index = GET_UINT16(PC);
1499 // JSObject *obj = &fp->argv[-2].toObject();
1500 RegisterID reg = frame.allocReg();
1501 masm.loadPayload(Address(JSFrameReg, JSStackFrame::offsetOfCallee(fun)), reg);
1502 // obj->getFlatClosureUpvars()
1503 masm.loadPtr(Address(reg, offsetof(JSObject, slots)), reg);
1504 Address upvarAddress(reg, JSObject::JSSLOT_FLAT_CLOSURE_UPVARS * sizeof(Value));
1505 masm.loadPrivate(upvarAddress, reg);
1506 // push ((Value *) reg)[index]
1507 frame.freeReg(reg);
1508 frame.push(Address(reg, index * sizeof(Value)));
1509 if (op == JSOP_CALLFCSLOT)
1510 frame.push(UndefinedValue());
1512 END_CASE(JSOP_CALLFCSLOT)
1514 BEGIN_CASE(JSOP_ARGSUB)
1515 prepareStubCall(Uses(0));
1516 masm.move(Imm32(GET_ARGNO(PC)), Registers::ArgReg1);
1517 stubCall(stubs::ArgSub);
1518 frame.pushSynced();
1519 END_CASE(JSOP_ARGSUB)
1521 BEGIN_CASE(JSOP_ARGCNT)
1522 prepareStubCall(Uses(0));
1523 stubCall(stubs::ArgCnt);
1524 frame.pushSynced();
1525 END_CASE(JSOP_ARGCNT)
1527 BEGIN_CASE(JSOP_DEFLOCALFUN)
1529 uint32 slot = GET_SLOTNO(PC);
1530 JSFunction *fun = script->getFunction(fullAtomIndex(&PC[SLOTNO_LEN]));
1531 prepareStubCall(Uses(0));
1532 masm.move(ImmPtr(fun), Registers::ArgReg1);
1533 stubCall(stubs::DefLocalFun);
1534 frame.takeReg(Registers::ReturnReg);
1535 frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
1536 frame.storeLocal(slot, true);
1537 frame.pop();
1539 END_CASE(JSOP_DEFLOCALFUN)
1541 BEGIN_CASE(JSOP_RETRVAL)
1542 emitReturn(NULL);
1543 END_CASE(JSOP_RETRVAL)
1545 BEGIN_CASE(JSOP_GETGNAME)
1546 BEGIN_CASE(JSOP_CALLGNAME)
1547 jsop_getgname(fullAtomIndex(PC));
1548 if (op == JSOP_CALLGNAME)
1549 frame.push(UndefinedValue());
1550 END_CASE(JSOP_GETGNAME)
1552 BEGIN_CASE(JSOP_SETGNAME)
1553 jsop_setgname(fullAtomIndex(PC));
1554 END_CASE(JSOP_SETGNAME)
1556 BEGIN_CASE(JSOP_REGEXP)
1558 JSObject *regex = script->getRegExp(fullAtomIndex(PC));
1559 prepareStubCall(Uses(0));
1560 masm.move(ImmPtr(regex), Registers::ArgReg1);
1561 stubCall(stubs::RegExp);
1562 frame.takeReg(Registers::ReturnReg);
1563 frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
1565 END_CASE(JSOP_REGEXP)
1567 BEGIN_CASE(JSOP_CALLPROP)
1568 if (!jsop_callprop(script->getAtom(fullAtomIndex(PC))))
1569 return Compile_Error;
1570 END_CASE(JSOP_CALLPROP)
1572 BEGIN_CASE(JSOP_GETUPVAR)
1573 BEGIN_CASE(JSOP_CALLUPVAR)
1575 uint32 index = GET_UINT16(PC);
1576 JSUpvarArray *uva = script->upvars();
1577 JS_ASSERT(index < uva->length);
1579 prepareStubCall(Uses(0));
1580 masm.move(Imm32(uva->vector[index].asInteger()), Registers::ArgReg1);
1581 stubCall(stubs::GetUpvar);
1582 frame.pushSynced();
1583 if (op == JSOP_CALLUPVAR)
1584 frame.push(UndefinedValue());
1586 END_CASE(JSOP_CALLUPVAR)
1588 BEGIN_CASE(JSOP_UINT24)
1589 frame.push(Value(Int32Value((int32_t) GET_UINT24(PC))));
1590 END_CASE(JSOP_UINT24)
1592 BEGIN_CASE(JSOP_CALLELEM)
1593 prepareStubCall(Uses(2));
1594 stubCall(stubs::CallElem);
1595 frame.popn(2);
1596 frame.pushSynced();
1597 frame.pushSynced();
1598 END_CASE(JSOP_CALLELEM)
1600 BEGIN_CASE(JSOP_STOP)
1601 /* Safe point! */
1602 emitReturn(NULL);
1603 goto done;
1604 END_CASE(JSOP_STOP)
1606 BEGIN_CASE(JSOP_GETXPROP)
1607 jsop_xname(script->getAtom(fullAtomIndex(PC)));
1608 END_CASE(JSOP_GETXPROP)
1610 BEGIN_CASE(JSOP_ENTERBLOCK)
1611 enterBlock(script->getObject(fullAtomIndex(PC)));
1612 END_CASE(JSOP_ENTERBLOCK);
1614 BEGIN_CASE(JSOP_LEAVEBLOCK)
1615 leaveBlock();
1616 END_CASE(JSOP_LEAVEBLOCK)
1618 BEGIN_CASE(JSOP_CALLLOCAL)
1619 frame.pushLocal(GET_SLOTNO(PC));
1620 frame.push(UndefinedValue());
1621 END_CASE(JSOP_CALLLOCAL)
1623 BEGIN_CASE(JSOP_INT8)
1624 frame.push(Value(Int32Value(GET_INT8(PC))));
1625 END_CASE(JSOP_INT8)
1627 BEGIN_CASE(JSOP_INT32)
1628 frame.push(Value(Int32Value(GET_INT32(PC))));
1629 END_CASE(JSOP_INT32)
1631 BEGIN_CASE(JSOP_NEWARRAY)
1633 uint32 len = GET_UINT16(PC);
1634 prepareStubCall(Uses(len));
1635 masm.move(Imm32(len), Registers::ArgReg1);
1636 stubCall(stubs::NewArray);
1637 frame.popn(len);
1638 frame.takeReg(Registers::ReturnReg);
1639 frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
1641 END_CASE(JSOP_NEWARRAY)
1643 BEGIN_CASE(JSOP_HOLE)
1644 frame.push(MagicValue(JS_ARRAY_HOLE));
1645 END_CASE(JSOP_HOLE)
1647 BEGIN_CASE(JSOP_LAMBDA_FC)
1649 JSFunction *fun = script->getFunction(fullAtomIndex(PC));
1650 prepareStubCall(Uses(frame.frameDepth()));
1651 masm.move(ImmPtr(fun), Registers::ArgReg1);
1652 stubCall(stubs::FlatLambda);
1653 frame.takeReg(Registers::ReturnReg);
1654 frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
1656 END_CASE(JSOP_LAMBDA_FC)
1658 BEGIN_CASE(JSOP_TRACE)
1660 if (analysis[PC].nincoming > 0)
1661 interruptCheckHelper();
1663 END_CASE(JSOP_TRACE)
1665 BEGIN_CASE(JSOP_DEBUGGER)
1666 prepareStubCall(Uses(0));
1667 masm.move(ImmPtr(PC), Registers::ArgReg1);
1668 stubCall(stubs::Debugger);
1669 END_CASE(JSOP_DEBUGGER)
1671 BEGIN_CASE(JSOP_INITMETHOD)
1673 JSAtom *atom = script->getAtom(fullAtomIndex(PC));
1674 prepareStubCall(Uses(2));
1675 masm.move(ImmPtr(atom), Registers::ArgReg1);
1676 stubCall(stubs::InitMethod);
1677 frame.pop();
1679 END_CASE(JSOP_INITMETHOD)
1681 BEGIN_CASE(JSOP_UNBRAND)
1682 jsop_unbrand();
1683 END_CASE(JSOP_UNBRAND)
1685 BEGIN_CASE(JSOP_UNBRANDTHIS)
1686 jsop_this();
1687 jsop_unbrand();
1688 frame.pop();
1689 END_CASE(JSOP_UNBRANDTHIS)
1691 BEGIN_CASE(JSOP_GETGLOBAL)
1692 BEGIN_CASE(JSOP_CALLGLOBAL)
1693 jsop_getglobal(GET_SLOTNO(PC));
1694 if (op == JSOP_CALLGLOBAL)
1695 frame.push(UndefinedValue());
1696 END_CASE(JSOP_GETGLOBAL)
1698 BEGIN_CASE(JSOP_SETGLOBAL)
1699 jsop_setglobal(GET_SLOTNO(PC));
1700 END_CASE(JSOP_SETGLOBAL)
1702 BEGIN_CASE(JSOP_INCGLOBAL)
1703 BEGIN_CASE(JSOP_DECGLOBAL)
1704 BEGIN_CASE(JSOP_GLOBALINC)
1705 BEGIN_CASE(JSOP_GLOBALDEC)
1706 /* Advances PC automatically. */
1707 jsop_globalinc(op, GET_SLOTNO(PC));
1708 break;
1709 END_CASE(JSOP_GLOBALINC)
1711 default:
1712 /* Sorry, this opcode isn't implemented yet. */
1713 #ifdef JS_METHODJIT_SPEW
1714 JaegerSpew(JSpew_Abort, "opcode %s not handled yet (%s line %d)\n", OpcodeNames[op],
1715 script->filename, js_PCToLineNumber(cx, script, PC));
1716 #endif
1717 return Compile_Abort;
1720 /**********************
1721 * END COMPILER OPS *
1722 **********************/
1724 #ifdef DEBUG
1725 frame.assertValidRegisterState();
1726 #endif
1729 done:
1730 return Compile_Okay;
1733 #undef END_CASE
1734 #undef BEGIN_CASE
1736 JSC::MacroAssembler::Label
1737 mjit::Compiler::labelOf(jsbytecode *pc)
1739 uint32 offs = uint32(pc - script->code);
1740 JS_ASSERT(jumpMap[offs].isValid());
1741 return jumpMap[offs];
1744 uint32
1745 mjit::Compiler::fullAtomIndex(jsbytecode *pc)
1747 return GET_SLOTNO(pc);
1749 /* If we ever enable INDEXBASE garbage, use this below. */
1750 #if 0
1751 return GET_SLOTNO(pc) + (atoms - script->atomMap.vector);
1752 #endif
1755 bool
1756 mjit::Compiler::knownJump(jsbytecode *pc)
1758 return pc < PC;
1761 void *
1762 mjit::Compiler::findCallSite(const CallSite &callSite)
1764 JS_ASSERT(callSite.pcOffset < script->length);
1766 JITScript *jit = script->getJIT(fp->isConstructing());
1767 uint8* ilPath = (uint8 *)jit->code.m_code.executableAddress();
1768 uint8* oolPath = ilPath + masm.size();
1770 for (uint32 i = 0; i < callSites.length(); i++) {
1771 if (callSites[i].pc == script->code + callSite.pcOffset &&
1772 callSites[i].id == callSite.id) {
1773 if (callSites[i].stub) {
1774 return oolPath + stubcc.masm.distanceOf(callSites[i].location);
1776 return ilPath + masm.distanceOf(callSites[i].location);
1780 /* We have no idea where to patch up to. */
1781 JS_NOT_REACHED("Call site vanished.");
1782 return NULL;
1785 void
1786 mjit::Compiler::jumpInScript(Jump j, jsbytecode *pc)
1788 JS_ASSERT(pc >= script->code && uint32(pc - script->code) < script->length);
1790 /* :TODO: OOM failure possible here. */
1792 if (pc < PC)
1793 j.linkTo(jumpMap[uint32(pc - script->code)], &masm);
1794 else
1795 branchPatches.append(BranchPatch(j, pc));
1798 void
1799 mjit::Compiler::jsop_setglobal(uint32 index)
1801 JS_ASSERT(globalObj);
1802 uint32 slot = script->getGlobalSlot(index);
1804 FrameEntry *fe = frame.peek(-1);
1805 bool popped = PC[JSOP_SETGLOBAL_LENGTH] == JSOP_POP;
1807 RegisterID reg = frame.allocReg();
1808 Address address = masm.objSlotRef(globalObj, reg, slot);
1809 frame.storeTo(fe, address, popped);
1810 frame.freeReg(reg);
1813 void
1814 mjit::Compiler::jsop_getglobal(uint32 index)
1816 JS_ASSERT(globalObj);
1817 uint32 slot = script->getGlobalSlot(index);
1819 RegisterID reg = frame.allocReg();
1820 Address address = masm.objSlotRef(globalObj, reg, slot);
1821 frame.freeReg(reg);
1822 frame.push(address);
1825 void
1826 mjit::Compiler::emitFinalReturn(Assembler &masm)
1828 masm.loadPtr(Address(JSFrameReg, JSStackFrame::offsetOfncode()), Registers::ReturnReg);
1829 masm.jump(Registers::ReturnReg);
1832 // Emits code to load a return value of the frame into the scripted-ABI
1833 // type & data register pair. If the return value is in fp->rval, then |fe|
1834 // is NULL. Otherwise, |fe| contains the return value.
1836 // If reading from fp->rval, |undefined| is loaded optimistically, before
1837 // checking if fp->rval is set in the frame flags and loading that instead.
1839 // Otherwise, if |masm| is the inline path, it is loaded as efficiently as
1840 // the FrameState can manage. If |masm| is the OOL path, the value is simply
1841 // loaded from its slot in the frame, since the caller has guaranteed it's
1842 // been synced.
1844 void
1845 mjit::Compiler::loadReturnValue(Assembler *masm, FrameEntry *fe)
1847 RegisterID typeReg = JSReturnReg_Type;
1848 RegisterID dataReg = JSReturnReg_Data;
1850 if (fe) {
1851 // If using the OOL assembler, the caller signifies that the |fe| is
1852 // synced, but not to rely on its register state.
1853 if (masm != &this->masm) {
1854 if (fe->isConstant()) {
1855 stubcc.masm.loadValueAsComponents(fe->getValue(), typeReg, dataReg);
1856 } else {
1857 Address rval(frame.addressOf(fe));
1858 if (fe->isTypeKnown()) {
1859 stubcc.masm.loadPayload(rval, dataReg);
1860 stubcc.masm.move(ImmType(fe->getKnownType()), typeReg);
1861 } else {
1862 stubcc.masm.loadValueAsComponents(rval, typeReg, dataReg);
1865 } else {
1866 frame.loadTo(fe, typeReg, dataReg, Registers::ReturnReg);
1868 } else {
1869 // Load a return value from POPV or SETRVAL into the return registers,
1870 // otherwise return undefined.
1871 masm->loadValueAsComponents(UndefinedValue(), typeReg, dataReg);
1872 if (analysis.usesReturnValue()) {
1873 Jump rvalClear = masm->branchTest32(Assembler::Zero,
1874 FrameFlagsAddress(),
1875 Imm32(JSFRAME_HAS_RVAL));
1876 Address rvalAddress(JSFrameReg, JSStackFrame::offsetOfReturnValue());
1877 masm->loadValueAsComponents(rvalAddress, typeReg, dataReg);
1878 rvalClear.linkTo(masm->label(), masm);
1883 // This ensures that constructor return values are an object. If a non-object
1884 // is returned, either explicitly or implicitly, the newly created object is
1885 // loaded out of the frame. Otherwise, the explicitly returned object is kept.
1887 void
1888 mjit::Compiler::fixPrimitiveReturn(Assembler *masm, FrameEntry *fe)
1890 JS_ASSERT(isConstructing);
1892 Address thisv(JSFrameReg, JSStackFrame::offsetOfThis(fun));
1894 // Easy cases - no return value, or known primitive, so just return thisv.
1895 if (!fe || (fe->isTypeKnown() && fe->getKnownType() != JSVAL_TYPE_OBJECT)) {
1896 masm->loadValueAsComponents(thisv, JSReturnReg_Type, JSReturnReg_Data);
1897 return;
1900 // If the type is known to be an object, just load the return value as normal.
1901 if (fe->isTypeKnown() && fe->getKnownType() == JSVAL_TYPE_OBJECT) {
1902 loadReturnValue(masm, fe);
1903 return;
1906 // There's a return value, and its type is unknown. Test the type and load
1907 // |thisv| if necessary.
1908 loadReturnValue(masm, fe);
1909 Jump j = masm->testObject(Assembler::Equal, JSReturnReg_Type);
1910 masm->loadValueAsComponents(thisv, JSReturnReg_Type, JSReturnReg_Data);
1911 j.linkTo(masm->label(), masm);
1914 // Loads the return value into the scripted ABI register pair, such that JS
1915 // semantics in constructors are preserved.
1917 void
1918 mjit::Compiler::emitReturnValue(Assembler *masm, FrameEntry *fe)
1920 if (isConstructing)
1921 fixPrimitiveReturn(masm, fe);
1922 else
1923 loadReturnValue(masm, fe);
1926 void
1927 mjit::Compiler::emitReturn(FrameEntry *fe)
1929 JS_ASSERT_IF(!fun, JSOp(*PC) == JSOP_STOP);
1931 /* Only the top of the stack can be returned. */
1932 JS_ASSERT_IF(fe, fe == frame.peek(-1));
1934 if (debugMode) {
1935 prepareStubCall(Uses(0));
1936 stubCall(stubs::LeaveScript);
1940 * If there's a function object, deal with the fact that it can escape.
1941 * Note that after we've placed the call object, all tracked state can
1942 * be thrown away. This will happen anyway because the next live opcode
1943 * (if any) must have an incoming edge.
1945 * However, it's an optimization to throw it away early - the tracker
1946 * won't be spilled on further exits or join points.
1948 if (fun) {
1949 if (fun->isHeavyweight()) {
1950 /* There will always be a call object. */
1951 prepareStubCall(Uses(fe ? 1 : 0));
1952 stubCall(stubs::PutActivationObjects);
1954 if (fe) {
1955 emitReturnValue(&masm, fe);
1956 emitFinalReturn(masm);
1957 frame.discardFrame();
1958 return;
1960 } else {
1961 /* if (hasCallObj() || hasArgsObj()) stubs::PutActivationObjects() */
1962 Jump putObjs = masm.branchTest32(Assembler::NonZero,
1963 Address(JSFrameReg, JSStackFrame::offsetOfFlags()),
1964 Imm32(JSFRAME_HAS_CALL_OBJ | JSFRAME_HAS_ARGS_OBJ));
1965 stubcc.linkExit(putObjs, Uses(frame.frameDepth()));
1967 stubcc.leave();
1968 stubcc.call(stubs::PutActivationObjects);
1970 emitReturnValue(&stubcc.masm, fe);
1971 emitFinalReturn(stubcc.masm);
1975 emitReturnValue(&masm, fe);
1976 emitFinalReturn(masm);
1977 frame.discardFrame();
1980 void
1981 mjit::Compiler::prepareStubCall(Uses uses)
1983 JaegerSpew(JSpew_Insns, " ---- STUB CALL, SYNCING FRAME ---- \n");
1984 frame.syncAndKill(Registers(Registers::TempRegs), uses);
1985 JaegerSpew(JSpew_Insns, " ---- FRAME SYNCING DONE ---- \n");
1988 JSC::MacroAssembler::Call
1989 mjit::Compiler::stubCall(void *ptr)
1991 JaegerSpew(JSpew_Insns, " ---- CALLING STUB ---- \n");
1992 Call cl = masm.stubCall(ptr, PC, frame.stackDepth() + script->nfixed);
1993 JaegerSpew(JSpew_Insns, " ---- END STUB CALL ---- \n");
1994 return cl;
1997 void
1998 mjit::Compiler::interruptCheckHelper()
2000 RegisterID reg = frame.allocReg();
2003 * Bake in and test the address of the interrupt counter for the runtime.
2004 * This is faster than doing two additional loads for the context's
2005 * thread data, but will cause this thread to run slower if there are
2006 * pending interrupts on some other thread. For non-JS_THREADSAFE builds
2007 * we can skip this, as there is only one flag to poll.
2009 #ifdef JS_THREADSAFE
2010 void *interrupt = (void*) &cx->runtime->interruptCounter;
2011 #else
2012 void *interrupt = (void*) &JS_THREAD_DATA(cx)->interruptFlags;
2013 #endif
2015 #if defined(JS_CPU_X86) || defined(JS_CPU_ARM)
2016 Jump jump = masm.branch32(Assembler::NotEqual, AbsoluteAddress(interrupt), Imm32(0));
2017 #else
2018 /* Handle processors that can't load from absolute addresses. */
2019 masm.move(ImmPtr(interrupt), reg);
2020 Jump jump = masm.branchTest32(Assembler::NonZero, Address(reg, 0));
2021 #endif
2023 stubcc.linkExitDirect(jump, stubcc.masm.label());
2025 #ifdef JS_THREADSAFE
2027 * Do a slightly slower check for an interrupt on this thread.
2028 * We don't want this thread to slow down excessively if the pending
2029 * interrupt is on another thread.
2031 stubcc.masm.loadPtr(FrameAddress(offsetof(VMFrame, cx)), reg);
2032 stubcc.masm.loadPtr(Address(reg, offsetof(JSContext, thread)), reg);
2033 Address flag(reg, offsetof(JSThread, data.interruptFlags));
2034 Jump noInterrupt = stubcc.masm.branchTest32(Assembler::Zero, flag);
2035 #endif
2037 frame.freeReg(reg);
2039 frame.sync(stubcc.masm, Uses(0));
2040 stubcc.masm.move(ImmPtr(PC), Registers::ArgReg1);
2041 stubcc.call(stubs::Interrupt);
2042 ADD_CALLSITE(true);
2043 stubcc.rejoin(Changes(0));
2045 #ifdef JS_THREADSAFE
2046 stubcc.linkRejoin(noInterrupt);
2047 #endif
2050 void
2051 mjit::Compiler::emitUncachedCall(uint32 argc, bool callingNew)
2053 CallPatchInfo callPatch;
2054 callPatch.hasSlowNcode = false;
2056 RegisterID r0 = Registers::ReturnReg;
2057 VoidPtrStubUInt32 stub = callingNew ? stubs::UncachedNew : stubs::UncachedCall;
2059 frame.syncAndKill(Registers(Registers::AvailRegs), Uses(argc + 2));
2060 prepareStubCall(Uses(argc + 2));
2061 masm.move(Imm32(argc), Registers::ArgReg1);
2062 stubCall(stub);
2063 ADD_CALLSITE(false);
2065 Jump notCompiled = masm.branchTestPtr(Assembler::Zero, r0, r0);
2067 masm.loadPtr(FrameAddress(offsetof(VMFrame, regs.fp)), JSFrameReg);
2068 callPatch.fastNcodePatch =
2069 masm.storePtrWithPatch(ImmPtr(NULL),
2070 Address(JSFrameReg, JSStackFrame::offsetOfncode()));
2072 masm.jump(r0);
2074 #if (defined(JS_NO_FASTCALL) && defined(JS_CPU_X86)) || defined(_WIN64)
2075 masm.callLabel = masm.label();
2076 #endif
2077 ADD_CALLSITE(false);
2079 callPatch.joinPoint = masm.label();
2080 masm.loadPtr(Address(JSFrameReg, JSStackFrame::offsetOfPrev()), JSFrameReg);
2082 frame.popn(argc + 2);
2083 frame.takeReg(JSReturnReg_Type);
2084 frame.takeReg(JSReturnReg_Data);
2085 frame.pushRegs(JSReturnReg_Type, JSReturnReg_Data);
2087 stubcc.linkExitDirect(notCompiled, stubcc.masm.label());
2088 stubcc.rejoin(Changes(0));
2089 callPatches.append(callPatch);
2092 /* See MonoIC.cpp, CallCompiler for more information on call ICs. */
2093 void
2094 mjit::Compiler::inlineCallHelper(uint32 argc, bool callingNew)
2096 /* Check for interrupts on function call */
2097 interruptCheckHelper();
2099 // |thisv| does not need to be synced for constructing.
2100 if (callingNew)
2101 frame.discardFe(frame.peek(-int(argc + 1)));
2103 FrameEntry *fe = frame.peek(-int(argc + 2));
2105 /* Currently, we don't support constant functions. */
2106 if (fe->isConstant() || fe->isNotType(JSVAL_TYPE_OBJECT) || debugMode) {
2107 emitUncachedCall(argc, callingNew);
2108 return;
2111 #ifdef JS_MONOIC
2112 CallGenInfo callIC(argc);
2113 CallPatchInfo callPatch;
2116 * Save constant |this| to optimize thisv stores for common call cases
2117 * like CALL[LOCAL, GLOBAL, ARG] which push NULL.
2119 callIC.pc = PC;
2120 callIC.frameDepth = frame.frameDepth();
2122 /* Grab type and data registers up-front. */
2123 MaybeRegisterID typeReg;
2124 frame.ensureFullRegs(fe);
2126 if (!fe->isTypeKnown()) {
2127 typeReg = frame.tempRegForType(fe);
2128 frame.pinReg(typeReg.reg());
2130 RegisterID dataReg = frame.tempRegForData(fe);
2131 frame.pinReg(dataReg);
2134 * We rely on the fact that syncAndKill() is not allowed to touch the
2135 * registers we've preserved.
2137 frame.syncAndKill(Registers(Registers::AvailRegs), Uses(argc + 2));
2138 frame.unpinKilledReg(dataReg);
2139 if (typeReg.isSet())
2140 frame.unpinKilledReg(typeReg.reg());
2142 Registers tempRegs;
2144 /* Test the type if necessary. Failing this always takes a really slow path. */
2145 MaybeJump notObjectJump;
2146 if (typeReg.isSet())
2147 notObjectJump = masm.testObject(Assembler::NotEqual, typeReg.reg());
2149 tempRegs.takeReg(dataReg);
2150 RegisterID t0 = tempRegs.takeAnyReg();
2151 RegisterID t1 = tempRegs.takeAnyReg();
2154 * Guard on the callee identity. This misses on the first run. If the
2155 * callee is scripted, compiled/compilable, and argc == nargs, then this
2156 * guard is patched, and the compiled code address is baked in.
2158 Jump j = masm.branchPtrWithPatch(Assembler::NotEqual, dataReg, callIC.funGuard);
2159 callIC.funJump = j;
2161 Jump rejoin1, rejoin2;
2163 stubcc.linkExitDirect(j, stubcc.masm.label());
2164 callIC.slowPathStart = stubcc.masm.label();
2167 * Test if the callee is even a function. If this doesn't match, we
2168 * take a _really_ slow path later.
2170 Jump notFunction = stubcc.masm.testFunction(Assembler::NotEqual, dataReg);
2172 /* Test if the function is scripted. */
2173 stubcc.masm.loadFunctionPrivate(dataReg, t0);
2174 stubcc.masm.load16(Address(t0, offsetof(JSFunction, flags)), t1);
2175 stubcc.masm.and32(Imm32(JSFUN_KINDMASK), t1);
2176 Jump isNative = stubcc.masm.branch32(Assembler::Below, t1, Imm32(JSFUN_INTERPRETED));
2179 * No-op jump that gets re-patched. This is so ArgReg1 won't be
2180 * clobbered, with the added bonus that the generated stub doesn't
2181 * need to pop its own return address.
2183 Jump toPatch = stubcc.masm.jump();
2184 toPatch.linkTo(stubcc.masm.label(), &stubcc.masm);
2185 callIC.oolJump = toPatch;
2187 /* At this point the function is definitely scripted. Call the link routine. */
2188 callIC.addrLabel1 = stubcc.masm.moveWithPatch(ImmPtr(NULL), Registers::ArgReg1);
2189 callIC.oolCall = stubcc.call(callingNew ? ic::New : ic::Call);
2191 callIC.funObjReg = dataReg;
2192 callIC.funPtrReg = t0;
2195 * The IC call either returns NULL, meaning call completed, or a
2196 * function pointer to jump to. Caveat: Must restore JSFrameReg
2197 * because a new frame has been pushed.
2199 * This function only executes once. If hit, it will generate a stub
2200 * to compile and execute calls on demand.
2202 rejoin1 = stubcc.masm.branchTestPtr(Assembler::Zero, Registers::ReturnReg,
2203 Registers::ReturnReg);
2204 stubcc.masm.move(Imm32(argc), JSParamReg_Argc);
2205 stubcc.masm.loadPtr(FrameAddress(offsetof(VMFrame, regs.fp)), JSFrameReg);
2206 callPatch.hasSlowNcode = true;
2207 callPatch.slowNcodePatch =
2208 stubcc.masm.storePtrWithPatch(ImmPtr(NULL),
2209 Address(JSFrameReg, JSStackFrame::offsetOfncode()));
2210 stubcc.masm.jump(Registers::ReturnReg);
2212 /* Catch-all case, for natives this will turn into a MIC. */
2213 if (notObjectJump.isSet())
2214 stubcc.linkExitDirect(notObjectJump.get(), stubcc.masm.label());
2215 notFunction.linkTo(stubcc.masm.label(), &stubcc.masm);
2216 isNative.linkTo(stubcc.masm.label(), &stubcc.masm);
2218 callIC.addrLabel2 = stubcc.masm.moveWithPatch(ImmPtr(NULL), Registers::ArgReg1);
2219 stubcc.call(callingNew ? ic::NativeNew : ic::NativeCall);
2221 rejoin2 = stubcc.masm.jump();
2225 * If the call site goes to a closure over the same function, it will
2226 * generate an out-of-line stub that joins back here.
2228 callIC.hotPathLabel = masm.label();
2230 uint32 flags = 0;
2231 if (callingNew)
2232 flags |= JSFRAME_CONSTRUCTING;
2234 InlineFrameAssembler inlFrame(masm, callIC, flags);
2235 callPatch.fastNcodePatch = inlFrame.assemble(NULL);
2237 callIC.hotJump = masm.jump();
2238 callIC.joinPoint = callPatch.joinPoint = masm.label();
2239 masm.loadPtr(Address(JSFrameReg, JSStackFrame::offsetOfPrev()), JSFrameReg);
2241 frame.popn(argc + 2);
2242 frame.takeReg(JSReturnReg_Type);
2243 frame.takeReg(JSReturnReg_Data);
2244 frame.pushRegs(JSReturnReg_Type, JSReturnReg_Data);
2246 callIC.slowJoinPoint = stubcc.masm.label();
2247 rejoin1.linkTo(callIC.slowJoinPoint, &stubcc.masm);
2248 rejoin2.linkTo(callIC.slowJoinPoint, &stubcc.masm);
2249 stubcc.rejoin(Changes(0));
2251 callICs.append(callIC);
2252 callPatches.append(callPatch);
2253 #else
2254 emitUncachedCall(argc, callingNew);
2255 #endif
2259 * This function must be called immediately after any instruction which could
2260 * cause a new JSStackFrame to be pushed and could lead to a new debug trap
2261 * being set. This includes any API callbacks and any scripted or native call.
2263 void
2264 mjit::Compiler::addCallSite(uint32 id, bool stub)
2266 InternalCallSite site;
2267 site.stub = stub;
2268 #if (defined(JS_NO_FASTCALL) && defined(JS_CPU_X86)) || defined(_WIN64)
2269 site.location = stub ? stubcc.masm.callLabel : masm.callLabel;
2270 #else
2271 site.location = stub ? stubcc.masm.label() : masm.label();
2272 #endif
2274 site.pc = PC;
2275 site.id = id;
2276 callSites.append(site);
2279 void
2280 mjit::Compiler::restoreFrameRegs(Assembler &masm)
2282 masm.loadPtr(FrameAddress(offsetof(VMFrame, regs.fp)), JSFrameReg);
2285 bool
2286 mjit::Compiler::compareTwoValues(JSContext *cx, JSOp op, const Value &lhs, const Value &rhs)
2288 JS_ASSERT(lhs.isPrimitive());
2289 JS_ASSERT(rhs.isPrimitive());
2291 if (lhs.isString() && rhs.isString()) {
2292 int cmp = js_CompareStrings(lhs.toString(), rhs.toString());
2293 switch (op) {
2294 case JSOP_LT:
2295 return cmp < 0;
2296 case JSOP_LE:
2297 return cmp <= 0;
2298 case JSOP_GT:
2299 return cmp > 0;
2300 case JSOP_GE:
2301 return cmp >= 0;
2302 case JSOP_EQ:
2303 return cmp == 0;
2304 case JSOP_NE:
2305 return cmp != 0;
2306 default:
2307 JS_NOT_REACHED("NYI");
2309 } else {
2310 double ld, rd;
2312 /* These should be infallible w/ primitives. */
2313 ValueToNumber(cx, lhs, &ld);
2314 ValueToNumber(cx, rhs, &rd);
2315 switch(op) {
2316 case JSOP_LT:
2317 return ld < rd;
2318 case JSOP_LE:
2319 return ld <= rd;
2320 case JSOP_GT:
2321 return ld > rd;
2322 case JSOP_GE:
2323 return ld >= rd;
2324 case JSOP_EQ: /* fall through */
2325 case JSOP_NE:
2326 /* Special case null/undefined/void comparisons. */
2327 if (lhs.isNullOrUndefined()) {
2328 if (rhs.isNullOrUndefined())
2329 return op == JSOP_EQ;
2330 return op == JSOP_NE;
2332 if (rhs.isNullOrUndefined())
2333 return op == JSOP_NE;
2335 /* Normal return. */
2336 return (op == JSOP_EQ) ? (ld == rd) : (ld != rd);
2337 default:
2338 JS_NOT_REACHED("NYI");
2342 JS_NOT_REACHED("NYI");
2343 return false;
2346 void
2347 mjit::Compiler::emitStubCmpOp(BoolStub stub, jsbytecode *target, JSOp fused)
2349 prepareStubCall(Uses(2));
2350 stubCall(stub);
2351 frame.pop();
2352 frame.pop();
2354 if (!target) {
2355 frame.takeReg(Registers::ReturnReg);
2356 frame.pushTypedPayload(JSVAL_TYPE_BOOLEAN, Registers::ReturnReg);
2357 } else {
2358 JS_ASSERT(fused == JSOP_IFEQ || fused == JSOP_IFNE);
2360 frame.syncAndForgetEverything();
2361 Assembler::Condition cond = (fused == JSOP_IFEQ)
2362 ? Assembler::Zero
2363 : Assembler::NonZero;
2364 Jump j = masm.branchTest32(cond, Registers::ReturnReg,
2365 Registers::ReturnReg);
2366 jumpAndTrace(j, target);
2370 void
2371 mjit::Compiler::jsop_setprop_slow(JSAtom *atom, bool usePropCache)
2373 prepareStubCall(Uses(2));
2374 masm.move(ImmPtr(atom), Registers::ArgReg1);
2375 if (usePropCache)
2376 stubCall(STRICT_VARIANT(stubs::SetName));
2377 else
2378 stubCall(STRICT_VARIANT(stubs::SetPropNoCache));
2379 JS_STATIC_ASSERT(JSOP_SETNAME_LENGTH == JSOP_SETPROP_LENGTH);
2380 frame.shimmy(1);
2383 void
2384 mjit::Compiler::jsop_getprop_slow(JSAtom *atom, bool usePropCache)
2386 prepareStubCall(Uses(1));
2387 if (usePropCache) {
2388 stubCall(stubs::GetProp);
2389 } else {
2390 masm.move(ImmPtr(atom), Registers::ArgReg1);
2391 stubCall(stubs::GetPropNoCache);
2393 frame.pop();
2394 frame.pushSynced();
2397 bool
2398 mjit::Compiler::jsop_callprop_slow(JSAtom *atom)
2400 prepareStubCall(Uses(1));
2401 masm.move(ImmPtr(atom), Registers::ArgReg1);
2402 stubCall(stubs::CallProp);
2403 frame.pop();
2404 frame.pushSynced();
2405 frame.pushSynced();
2406 return true;
2409 void
2410 mjit::Compiler::jsop_length()
2412 FrameEntry *top = frame.peek(-1);
2414 if (top->isTypeKnown() && top->getKnownType() == JSVAL_TYPE_STRING) {
2415 if (top->isConstant()) {
2416 JSString *str = top->getValue().toString();
2417 Value v;
2418 v.setNumber(uint32(str->length()));
2419 frame.pop();
2420 frame.push(v);
2421 } else {
2422 RegisterID str = frame.ownRegForData(top);
2423 masm.loadPtr(Address(str, offsetof(JSString, mLengthAndFlags)), str);
2424 masm.rshiftPtr(Imm32(JSString::FLAGS_LENGTH_SHIFT), str);
2425 frame.pop();
2426 frame.pushTypedPayload(JSVAL_TYPE_INT32, str);
2428 return;
2431 #if defined JS_POLYIC
2432 jsop_getprop(cx->runtime->atomState.lengthAtom);
2433 #else
2434 prepareStubCall(Uses(1));
2435 stubCall(stubs::Length);
2436 frame.pop();
2437 frame.pushSynced();
2438 #endif
2441 #ifdef JS_MONOIC
2442 void
2443 mjit::Compiler::passMICAddress(MICGenInfo &mic)
2445 mic.addrLabel = stubcc.masm.moveWithPatch(ImmPtr(NULL), Registers::ArgReg1);
2447 #endif
2449 #if defined JS_POLYIC
2450 void
2451 mjit::Compiler::passPICAddress(PICGenInfo &pic)
2453 pic.addrLabel = stubcc.masm.moveWithPatch(ImmPtr(NULL), Registers::ArgReg1);
2456 void
2457 mjit::Compiler::jsop_getprop(JSAtom *atom, bool doTypeCheck, bool usePropCache)
2459 FrameEntry *top = frame.peek(-1);
2461 /* If the incoming type will never PIC, take slow path. */
2462 if (top->isTypeKnown() && top->getKnownType() != JSVAL_TYPE_OBJECT) {
2463 JS_ASSERT_IF(atom == cx->runtime->atomState.lengthAtom,
2464 top->getKnownType() != JSVAL_TYPE_STRING);
2465 jsop_getprop_slow(atom, usePropCache);
2466 return;
2470 * These two must be loaded first. The objReg because the string path
2471 * wants to read it, and the shapeReg because it could cause a spill that
2472 * the string path wouldn't sink back.
2474 RegisterID objReg = Registers::ReturnReg;
2475 RegisterID shapeReg = Registers::ReturnReg;
2476 if (atom == cx->runtime->atomState.lengthAtom) {
2477 objReg = frame.copyDataIntoReg(top);
2478 shapeReg = frame.allocReg();
2481 PICGenInfo pic(ic::PICInfo::GET, usePropCache);
2483 /* Guard that the type is an object. */
2484 Jump typeCheck;
2485 if (doTypeCheck && !top->isTypeKnown()) {
2486 RegisterID reg = frame.tempRegForType(top);
2487 pic.typeReg = reg;
2489 /* Start the hot path where it's easy to patch it. */
2490 pic.fastPathStart = masm.label();
2491 Jump j = masm.testObject(Assembler::NotEqual, reg);
2493 /* GETPROP_INLINE_TYPE_GUARD is used to patch the jmp, not cmp. */
2494 JS_ASSERT(masm.differenceBetween(pic.fastPathStart, masm.label()) == GETPROP_INLINE_TYPE_GUARD);
2496 pic.typeCheck = stubcc.linkExit(j, Uses(1));
2497 pic.hasTypeCheck = true;
2498 } else {
2499 pic.fastPathStart = masm.label();
2500 pic.hasTypeCheck = false;
2501 pic.typeReg = Registers::ReturnReg;
2504 if (atom != cx->runtime->atomState.lengthAtom) {
2505 objReg = frame.copyDataIntoReg(top);
2506 shapeReg = frame.allocReg();
2509 pic.shapeReg = shapeReg;
2510 pic.atom = atom;
2511 pic.objRemat = frame.dataRematInfo(top);
2513 /* Guard on shape. */
2514 masm.loadShape(objReg, shapeReg);
2515 pic.shapeGuard = masm.label();
2517 DataLabel32 inlineShapeLabel;
2518 Jump j = masm.branch32WithPatch(Assembler::NotEqual, shapeReg,
2519 Imm32(int32(JSObjectMap::INVALID_SHAPE)),
2520 inlineShapeLabel);
2521 DBGLABEL(dbgInlineShapeJump);
2523 pic.slowPathStart = stubcc.linkExit(j, Uses(1));
2525 stubcc.leave();
2526 passPICAddress(pic);
2527 pic.callReturn = stubcc.call(ic::GetProp);
2529 /* Load dslots. */
2530 #if defined JS_NUNBOX32
2531 DBGLABEL(dbgDslotsLoad);
2532 #elif defined JS_PUNBOX64
2533 Label dslotsLoadLabel = masm.label();
2534 #endif
2535 masm.loadPtr(Address(objReg, offsetof(JSObject, slots)), objReg);
2537 /* Copy the slot value to the expression stack. */
2538 Address slot(objReg, 1 << 24);
2539 frame.pop();
2541 #if defined JS_NUNBOX32
2542 masm.loadTypeTag(slot, shapeReg);
2543 DBGLABEL(dbgTypeLoad);
2545 masm.loadPayload(slot, objReg);
2546 DBGLABEL(dbgDataLoad);
2547 #elif defined JS_PUNBOX64
2548 Label inlineValueLoadLabel =
2549 masm.loadValueAsComponents(slot, shapeReg, objReg);
2550 #endif
2551 pic.storeBack = masm.label();
2554 /* Assert correctness of hardcoded offsets. */
2555 #if defined JS_NUNBOX32
2556 JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgDslotsLoad) == GETPROP_DSLOTS_LOAD);
2557 JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgTypeLoad) == GETPROP_TYPE_LOAD);
2558 JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgDataLoad) == GETPROP_DATA_LOAD);
2559 JS_ASSERT(masm.differenceBetween(pic.shapeGuard, inlineShapeLabel) == GETPROP_INLINE_SHAPE_OFFSET);
2560 JS_ASSERT(masm.differenceBetween(pic.shapeGuard, dbgInlineShapeJump) == GETPROP_INLINE_SHAPE_JUMP);
2561 #elif defined JS_PUNBOX64
2562 pic.labels.getprop.dslotsLoadOffset = masm.differenceBetween(pic.storeBack, dslotsLoadLabel);
2563 JS_ASSERT(pic.labels.getprop.dslotsLoadOffset == masm.differenceBetween(pic.storeBack, dslotsLoadLabel));
2565 pic.labels.getprop.inlineShapeOffset = masm.differenceBetween(pic.shapeGuard, inlineShapeLabel);
2566 JS_ASSERT(pic.labels.getprop.inlineShapeOffset == masm.differenceBetween(pic.shapeGuard, inlineShapeLabel));
2568 pic.labels.getprop.inlineValueOffset = masm.differenceBetween(pic.storeBack, inlineValueLoadLabel);
2569 JS_ASSERT(pic.labels.getprop.inlineValueOffset == masm.differenceBetween(pic.storeBack, inlineValueLoadLabel));
2571 JS_ASSERT(masm.differenceBetween(inlineShapeLabel, dbgInlineShapeJump) == GETPROP_INLINE_SHAPE_JUMP);
2572 #endif
2573 /* GETPROP_INLINE_TYPE_GUARD's validity is asserted above. */
2575 pic.objReg = objReg;
2576 frame.pushRegs(shapeReg, objReg);
2578 stubcc.rejoin(Changes(1));
2580 pics.append(pic);
2583 #ifdef JS_POLYIC
2584 void
2585 mjit::Compiler::jsop_getelem_pic(FrameEntry *obj, FrameEntry *id, RegisterID objReg,
2586 RegisterID idReg, RegisterID shapeReg)
2588 PICGenInfo pic(ic::PICInfo::GETELEM, true);
2590 pic.objRemat = frame.dataRematInfo(obj);
2591 pic.idRemat = frame.dataRematInfo(id);
2592 pic.shapeReg = shapeReg;
2593 pic.hasTypeCheck = false;
2595 pic.fastPathStart = masm.label();
2597 /* Guard on shape. */
2598 masm.loadShape(objReg, shapeReg);
2599 pic.shapeGuard = masm.label();
2601 DataLabel32 inlineShapeOffsetLabel;
2602 Jump jmpShapeGuard = masm.branch32WithPatch(Assembler::NotEqual, shapeReg,
2603 Imm32(int32(JSObjectMap::INVALID_SHAPE)),
2604 inlineShapeOffsetLabel);
2605 DBGLABEL(dbgInlineShapeJump);
2607 /* Guard on id identity. */
2608 #if defined JS_NUNBOX32
2609 static const void *BOGUS_ATOM = (void *)0xdeadbeef;
2610 #elif defined JS_PUNBOX64
2611 static const void *BOGUS_ATOM = (void *)0xfeedfacedeadbeef;
2612 #endif
2614 DataLabelPtr inlineAtomOffsetLabel;
2615 Jump idGuard = masm.branchPtrWithPatch(Assembler::NotEqual, idReg,
2616 inlineAtomOffsetLabel, ImmPtr(BOGUS_ATOM));
2617 DBGLABEL(dbgInlineAtomJump);
2620 * The state between these two exits is identical, so this safe. The
2621 * GETELEM PIC repatches both jumps to the slowPathStart on reset.
2623 stubcc.linkExit(idGuard, Uses(2));
2624 pic.slowPathStart = stubcc.linkExit(jmpShapeGuard, Uses(2));
2626 stubcc.leave();
2627 passPICAddress(pic);
2628 pic.callReturn = stubcc.call(ic::GetElem);
2630 /* Load dslots. */
2631 #if defined JS_NUNBOX32
2632 DBGLABEL(dbgDslotsLoad);
2633 #elif defined JS_PUNBOX64
2634 Label dslotsLoadLabel = masm.label();
2635 #endif
2636 masm.loadPtr(Address(objReg, offsetof(JSObject, slots)), objReg);
2638 /* Copy the slot value to the expression stack. */
2639 Address slot(objReg, 1 << 24);
2640 #if defined JS_NUNBOX32
2641 masm.loadTypeTag(slot, shapeReg);
2642 DBGLABEL(dbgTypeLoad);
2643 masm.loadPayload(slot, objReg);
2644 DBGLABEL(dbgDataLoad);
2645 #elif defined JS_PUNBOX64
2646 Label inlineValueOffsetLabel =
2647 masm.loadValueAsComponents(slot, shapeReg, objReg);
2648 #endif
2649 pic.storeBack = masm.label();
2651 pic.objReg = objReg;
2652 pic.idReg = idReg;
2654 #if defined JS_NUNBOX32
2655 JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgDslotsLoad) == GETELEM_DSLOTS_LOAD);
2656 JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgTypeLoad) == GETELEM_TYPE_LOAD);
2657 JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgDataLoad) == GETELEM_DATA_LOAD);
2658 JS_ASSERT(masm.differenceBetween(pic.shapeGuard, inlineAtomOffsetLabel) == GETELEM_INLINE_ATOM_OFFSET);
2659 JS_ASSERT(masm.differenceBetween(pic.shapeGuard, dbgInlineAtomJump) == GETELEM_INLINE_ATOM_JUMP);
2660 JS_ASSERT(masm.differenceBetween(pic.shapeGuard, inlineShapeOffsetLabel) == GETELEM_INLINE_SHAPE_OFFSET);
2661 JS_ASSERT(masm.differenceBetween(pic.shapeGuard, dbgInlineShapeJump) == GETELEM_INLINE_SHAPE_JUMP);
2662 #elif defined JS_PUNBOX64
2663 pic.labels.getelem.dslotsLoadOffset = masm.differenceBetween(pic.storeBack, dslotsLoadLabel);
2664 JS_ASSERT(pic.labels.getelem.dslotsLoadOffset == masm.differenceBetween(pic.storeBack, dslotsLoadLabel));
2666 pic.labels.getelem.inlineShapeOffset = masm.differenceBetween(pic.shapeGuard, inlineShapeOffsetLabel);
2667 JS_ASSERT(pic.labels.getelem.inlineShapeOffset == masm.differenceBetween(pic.shapeGuard, inlineShapeOffsetLabel));
2669 pic.labels.getelem.inlineAtomOffset = masm.differenceBetween(pic.shapeGuard, inlineAtomOffsetLabel);
2670 JS_ASSERT(pic.labels.getelem.inlineAtomOffset == masm.differenceBetween(pic.shapeGuard, inlineAtomOffsetLabel));
2672 pic.labels.getelem.inlineValueOffset = masm.differenceBetween(pic.storeBack, inlineValueOffsetLabel);
2673 JS_ASSERT(pic.labels.getelem.inlineValueOffset == masm.differenceBetween(pic.storeBack, inlineValueOffsetLabel));
2675 JS_ASSERT(masm.differenceBetween(inlineShapeOffsetLabel, dbgInlineShapeJump) == GETELEM_INLINE_SHAPE_JUMP);
2676 JS_ASSERT(masm.differenceBetween(pic.shapeGuard, dbgInlineAtomJump) ==
2677 pic.labels.getelem.inlineAtomOffset + GETELEM_INLINE_ATOM_JUMP);
2678 #endif
2680 JS_ASSERT(pic.idReg != pic.objReg);
2681 JS_ASSERT(pic.idReg != pic.shapeReg);
2682 JS_ASSERT(pic.objReg != pic.shapeReg);
2684 pics.append(pic);
2686 #endif
2688 bool
2689 mjit::Compiler::jsop_callprop_generic(JSAtom *atom)
2691 FrameEntry *top = frame.peek(-1);
2694 * These two must be loaded first. The objReg because the string path
2695 * wants to read it, and the shapeReg because it could cause a spill that
2696 * the string path wouldn't sink back.
2698 RegisterID objReg = frame.copyDataIntoReg(top);
2699 RegisterID shapeReg = frame.allocReg();
2701 PICGenInfo pic(ic::PICInfo::CALL, true);
2703 /* Guard that the type is an object. */
2704 pic.typeReg = frame.copyTypeIntoReg(top);
2706 /* Start the hot path where it's easy to patch it. */
2707 pic.fastPathStart = masm.label();
2710 * Guard that the value is an object. This part needs some extra gunk
2711 * because the leave() after the shape guard will emit a jump from this
2712 * path to the final call. We need a label in between that jump, which
2713 * will be the target of patched jumps in the PIC.
2715 Jump typeCheck = masm.testObject(Assembler::NotEqual, pic.typeReg);
2716 DBGLABEL(dbgInlineTypeGuard);
2718 pic.typeCheck = stubcc.linkExit(typeCheck, Uses(1));
2719 pic.hasTypeCheck = true;
2720 pic.objReg = objReg;
2721 pic.shapeReg = shapeReg;
2722 pic.atom = atom;
2723 pic.objRemat = frame.dataRematInfo(top);
2726 * Store the type and object back. Don't bother keeping them in registers,
2727 * since a sync will be needed for the upcoming call.
2729 uint32 thisvSlot = frame.frameDepth();
2730 Address thisv = Address(JSFrameReg, sizeof(JSStackFrame) + thisvSlot * sizeof(Value));
2731 #if defined JS_NUNBOX32
2732 masm.storeValueFromComponents(pic.typeReg, pic.objReg, thisv);
2733 #elif defined JS_PUNBOX64
2734 masm.orPtr(pic.objReg, pic.typeReg);
2735 masm.storePtr(pic.typeReg, thisv);
2736 #endif
2737 frame.freeReg(pic.typeReg);
2739 /* Guard on shape. */
2740 masm.loadShape(objReg, shapeReg);
2741 pic.shapeGuard = masm.label();
2743 DataLabel32 inlineShapeLabel;
2744 Jump j = masm.branch32WithPatch(Assembler::NotEqual, shapeReg,
2745 Imm32(int32(JSObjectMap::INVALID_SHAPE)),
2746 inlineShapeLabel);
2747 DBGLABEL(dbgInlineShapeJump);
2749 pic.slowPathStart = stubcc.linkExit(j, Uses(1));
2751 /* Slow path. */
2752 stubcc.leave();
2753 passPICAddress(pic);
2754 pic.callReturn = stubcc.call(ic::CallProp);
2756 /* Adjust the frame. None of this will generate code. */
2757 frame.pop();
2758 frame.pushRegs(shapeReg, objReg);
2759 frame.pushSynced();
2761 /* Load dslots. */
2762 #if defined JS_NUNBOX32
2763 DBGLABEL(dbgDslotsLoad);
2764 #elif defined JS_PUNBOX64
2765 Label dslotsLoadLabel = masm.label();
2766 #endif
2767 masm.loadPtr(Address(objReg, offsetof(JSObject, slots)), objReg);
2769 /* Copy the slot value to the expression stack. */
2770 Address slot(objReg, 1 << 24);
2772 #if defined JS_NUNBOX32
2773 masm.loadTypeTag(slot, shapeReg);
2774 DBGLABEL(dbgTypeLoad);
2776 masm.loadPayload(slot, objReg);
2777 DBGLABEL(dbgDataLoad);
2778 #elif defined JS_PUNBOX64
2779 Label inlineValueLoadLabel =
2780 masm.loadValueAsComponents(slot, shapeReg, objReg);
2781 #endif
2782 pic.storeBack = masm.label();
2784 /* Assert correctness of hardcoded offsets. */
2785 JS_ASSERT(masm.differenceBetween(pic.fastPathStart, dbgInlineTypeGuard) == GETPROP_INLINE_TYPE_GUARD);
2786 #if defined JS_NUNBOX32
2787 JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgDslotsLoad) == GETPROP_DSLOTS_LOAD);
2788 JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgTypeLoad) == GETPROP_TYPE_LOAD);
2789 JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgDataLoad) == GETPROP_DATA_LOAD);
2790 JS_ASSERT(masm.differenceBetween(pic.shapeGuard, inlineShapeLabel) == GETPROP_INLINE_SHAPE_OFFSET);
2791 JS_ASSERT(masm.differenceBetween(pic.shapeGuard, dbgInlineShapeJump) == GETPROP_INLINE_SHAPE_JUMP);
2792 #elif defined JS_PUNBOX64
2793 pic.labels.getprop.dslotsLoadOffset = masm.differenceBetween(pic.storeBack, dslotsLoadLabel);
2794 JS_ASSERT(pic.labels.getprop.dslotsLoadOffset == masm.differenceBetween(pic.storeBack, dslotsLoadLabel));
2796 pic.labels.getprop.inlineShapeOffset = masm.differenceBetween(pic.shapeGuard, inlineShapeLabel);
2797 JS_ASSERT(pic.labels.getprop.inlineShapeOffset == masm.differenceBetween(pic.shapeGuard, inlineShapeLabel));
2799 pic.labels.getprop.inlineValueOffset = masm.differenceBetween(pic.storeBack, inlineValueLoadLabel);
2800 JS_ASSERT(pic.labels.getprop.inlineValueOffset == masm.differenceBetween(pic.storeBack, inlineValueLoadLabel));
2802 JS_ASSERT(masm.differenceBetween(inlineShapeLabel, dbgInlineShapeJump) == GETPROP_INLINE_SHAPE_JUMP);
2803 #endif
2805 stubcc.rejoin(Changes(2));
2806 pics.append(pic);
2808 return true;
2811 bool
2812 mjit::Compiler::jsop_callprop_str(JSAtom *atom)
2814 if (!script->compileAndGo) {
2815 jsop_callprop_slow(atom);
2816 return true;
2819 /* Bake in String.prototype. Is this safe? */
2820 JSObject *obj;
2821 if (!js_GetClassPrototype(cx, NULL, JSProto_String, &obj))
2822 return false;
2824 /* Force into a register because getprop won't expect a constant. */
2825 RegisterID reg = frame.allocReg();
2827 masm.move(ImmPtr(obj), reg);
2828 frame.pushTypedPayload(JSVAL_TYPE_OBJECT, reg);
2830 /* Get the property. */
2831 jsop_getprop(atom);
2833 /* Perform a swap. */
2834 frame.dup2();
2835 frame.shift(-3);
2836 frame.shift(-1);
2838 /* 4) Test if the function can take a primitive. */
2839 #ifdef DEBUG
2840 FrameEntry *funFe = frame.peek(-2);
2841 #endif
2842 JS_ASSERT(!funFe->isTypeKnown());
2845 * See bug 584579 - need to forget string type, since wrapping could
2846 * create an object. forgetType() alone is not valid because it cannot be
2847 * used on copies or constants.
2849 RegisterID strReg;
2850 FrameEntry *strFe = frame.peek(-1);
2851 if (strFe->isConstant()) {
2852 strReg = frame.allocReg();
2853 masm.move(ImmPtr(strFe->getValue().toString()), strReg);
2854 } else {
2855 strReg = frame.ownRegForData(strFe);
2857 frame.pop();
2858 frame.pushTypedPayload(JSVAL_TYPE_STRING, strReg);
2859 frame.forgetType(frame.peek(-1));
2861 return true;
2864 bool
2865 mjit::Compiler::jsop_callprop_obj(JSAtom *atom)
2867 FrameEntry *top = frame.peek(-1);
2869 PICGenInfo pic(ic::PICInfo::CALL, true);
2871 JS_ASSERT(top->isTypeKnown());
2872 JS_ASSERT(top->getKnownType() == JSVAL_TYPE_OBJECT);
2874 pic.fastPathStart = masm.label();
2875 pic.hasTypeCheck = false;
2876 pic.typeReg = Registers::ReturnReg;
2878 RegisterID objReg = frame.copyDataIntoReg(top);
2879 RegisterID shapeReg = frame.allocReg();
2881 pic.shapeReg = shapeReg;
2882 pic.atom = atom;
2883 pic.objRemat = frame.dataRematInfo(top);
2885 /* Guard on shape. */
2886 masm.loadShape(objReg, shapeReg);
2887 pic.shapeGuard = masm.label();
2889 DataLabel32 inlineShapeLabel;
2890 Jump j = masm.branch32WithPatch(Assembler::NotEqual, shapeReg,
2891 Imm32(int32(JSObjectMap::INVALID_SHAPE)),
2892 inlineShapeLabel);
2893 DBGLABEL(dbgInlineShapeJump);
2895 pic.slowPathStart = stubcc.linkExit(j, Uses(1));
2897 stubcc.leave();
2898 passPICAddress(pic);
2899 pic.callReturn = stubcc.call(ic::CallProp);
2901 /* Load dslots. */
2902 #if defined JS_NUNBOX32
2903 DBGLABEL(dbgDslotsLoad);
2904 #elif defined JS_PUNBOX64
2905 Label dslotsLoadLabel = masm.label();
2906 #endif
2907 masm.loadPtr(Address(objReg, offsetof(JSObject, slots)), objReg);
2909 /* Copy the slot value to the expression stack. */
2910 Address slot(objReg, 1 << 24);
2912 #if defined JS_NUNBOX32
2913 masm.loadTypeTag(slot, shapeReg);
2914 DBGLABEL(dbgTypeLoad);
2916 masm.loadPayload(slot, objReg);
2917 DBGLABEL(dbgDataLoad);
2918 #elif defined JS_PUNBOX64
2919 Label inlineValueLoadLabel =
2920 masm.loadValueAsComponents(slot, shapeReg, objReg);
2921 #endif
2923 pic.storeBack = masm.label();
2924 pic.objReg = objReg;
2927 * 1) Dup the |this| object.
2928 * 2) Push the property value onto the stack.
2929 * 3) Move the value below the dup'd |this|, uncopying it. This could
2930 * generate code, thus the storeBack label being prior. This is safe
2931 * as a stack transition, because JSOP_CALLPROP has JOF_TMPSLOT. It is
2932 * also safe for correctness, because if we know the LHS is an object, it
2933 * is the resulting vp[1].
2935 frame.dup();
2936 frame.pushRegs(shapeReg, objReg);
2937 frame.shift(-2);
2940 * Assert correctness of hardcoded offsets.
2941 * No type guard: type is asserted.
2943 #if defined JS_NUNBOX32
2944 JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgDslotsLoad) == GETPROP_DSLOTS_LOAD);
2945 JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgTypeLoad) == GETPROP_TYPE_LOAD);
2946 JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgDataLoad) == GETPROP_DATA_LOAD);
2947 JS_ASSERT(masm.differenceBetween(pic.shapeGuard, inlineShapeLabel) == GETPROP_INLINE_SHAPE_OFFSET);
2948 JS_ASSERT(masm.differenceBetween(pic.shapeGuard, dbgInlineShapeJump) == GETPROP_INLINE_SHAPE_JUMP);
2949 #elif defined JS_PUNBOX64
2950 pic.labels.getprop.dslotsLoadOffset = masm.differenceBetween(pic.storeBack, dslotsLoadLabel);
2951 JS_ASSERT(pic.labels.getprop.dslotsLoadOffset == masm.differenceBetween(pic.storeBack, dslotsLoadLabel));
2953 pic.labels.getprop.inlineShapeOffset = masm.differenceBetween(pic.shapeGuard, inlineShapeLabel);
2954 JS_ASSERT(pic.labels.getprop.inlineShapeOffset == masm.differenceBetween(pic.shapeGuard, inlineShapeLabel));
2956 pic.labels.getprop.inlineValueOffset = masm.differenceBetween(pic.storeBack, inlineValueLoadLabel);
2957 JS_ASSERT(pic.labels.getprop.inlineValueOffset == masm.differenceBetween(pic.storeBack, inlineValueLoadLabel));
2959 JS_ASSERT(masm.differenceBetween(inlineShapeLabel, dbgInlineShapeJump) == GETPROP_INLINE_SHAPE_JUMP);
2960 #endif
2962 stubcc.rejoin(Changes(2));
2963 pics.append(pic);
2965 return true;
2968 bool
2969 mjit::Compiler::jsop_callprop(JSAtom *atom)
2971 FrameEntry *top = frame.peek(-1);
2973 /* If the incoming type will never PIC, take slow path. */
2974 if (top->isTypeKnown() && top->getKnownType() != JSVAL_TYPE_OBJECT) {
2975 if (top->getKnownType() == JSVAL_TYPE_STRING)
2976 return jsop_callprop_str(atom);
2977 return jsop_callprop_slow(atom);
2980 if (top->isTypeKnown())
2981 return jsop_callprop_obj(atom);
2982 return jsop_callprop_generic(atom);
2985 void
2986 mjit::Compiler::jsop_setprop(JSAtom *atom, bool usePropCache)
2988 FrameEntry *lhs = frame.peek(-2);
2989 FrameEntry *rhs = frame.peek(-1);
2991 /* If the incoming type will never PIC, take slow path. */
2992 if (lhs->isTypeKnown() && lhs->getKnownType() != JSVAL_TYPE_OBJECT) {
2993 jsop_setprop_slow(atom, usePropCache);
2994 return;
2997 JSOp op = JSOp(*PC);
2999 PICGenInfo pic(op == JSOP_SETMETHOD ? ic::PICInfo::SETMETHOD : ic::PICInfo::SET, usePropCache);
3000 pic.atom = atom;
3002 /* Guard that the type is an object. */
3003 Jump typeCheck;
3004 if (!lhs->isTypeKnown()) {
3005 RegisterID reg = frame.tempRegForType(lhs);
3006 pic.typeReg = reg;
3008 /* Start the hot path where it's easy to patch it. */
3009 pic.fastPathStart = masm.label();
3010 Jump j = masm.testObject(Assembler::NotEqual, reg);
3012 pic.typeCheck = stubcc.linkExit(j, Uses(2));
3013 stubcc.leave();
3015 stubcc.masm.move(ImmPtr(atom), Registers::ArgReg1);
3016 if (usePropCache)
3017 stubcc.call(STRICT_VARIANT(stubs::SetName));
3018 else
3019 stubcc.call(STRICT_VARIANT(stubs::SetPropNoCache));
3020 typeCheck = stubcc.masm.jump();
3021 pic.hasTypeCheck = true;
3022 } else {
3023 pic.fastPathStart = masm.label();
3024 pic.hasTypeCheck = false;
3025 pic.typeReg = Registers::ReturnReg;
3028 /* Get the object into a mutable register. */
3029 RegisterID objReg = frame.copyDataIntoReg(lhs);
3030 pic.objReg = objReg;
3032 /* Get info about the RHS and pin it. */
3033 ValueRemat vr;
3034 frame.pinEntry(rhs, vr);
3035 pic.vr = vr;
3037 RegisterID shapeReg = frame.allocReg();
3038 pic.shapeReg = shapeReg;
3039 pic.objRemat = frame.dataRematInfo(lhs);
3041 frame.unpinEntry(vr);
3043 /* Guard on shape. */
3044 masm.loadShape(objReg, shapeReg);
3045 pic.shapeGuard = masm.label();
3046 DataLabel32 inlineShapeOffsetLabel;
3047 Jump j = masm.branch32WithPatch(Assembler::NotEqual, shapeReg,
3048 Imm32(int32(JSObjectMap::INVALID_SHAPE)),
3049 inlineShapeOffsetLabel);
3050 DBGLABEL(dbgInlineShapeJump);
3052 /* Slow path. */
3054 pic.slowPathStart = stubcc.linkExit(j, Uses(2));
3056 stubcc.leave();
3057 passPICAddress(pic);
3058 pic.callReturn = stubcc.call(ic::SetProp);
3061 /* Load dslots. */
3062 #if defined JS_NUNBOX32
3063 DBGLABEL(dbgDslots);
3064 #elif defined JS_PUNBOX64
3065 Label dslotsLoadLabel = masm.label();
3066 #endif
3067 masm.loadPtr(Address(objReg, offsetof(JSObject, slots)), objReg);
3069 /* Store RHS into object slot. */
3070 Address slot(objReg, 1 << 24);
3071 #if defined JS_NUNBOX32
3072 Label dbgInlineStoreType = masm.storeValue(vr, slot);
3073 #elif defined JS_PUNBOX64
3074 masm.storeValue(vr, slot);
3075 #endif
3076 DBGLABEL(dbgAfterValueStore);
3077 pic.storeBack = masm.label();
3079 frame.freeReg(objReg);
3080 frame.freeReg(shapeReg);
3082 /* "Pop under", taking out object (LHS) and leaving RHS. */
3083 frame.shimmy(1);
3085 /* Finish slow path. */
3087 if (pic.hasTypeCheck)
3088 typeCheck.linkTo(stubcc.masm.label(), &stubcc.masm);
3089 stubcc.rejoin(Changes(1));
3092 #if defined JS_PUNBOX64
3093 pic.labels.setprop.dslotsLoadOffset = masm.differenceBetween(pic.storeBack, dslotsLoadLabel);
3094 pic.labels.setprop.inlineShapeOffset = masm.differenceBetween(pic.shapeGuard, inlineShapeOffsetLabel);
3095 JS_ASSERT(masm.differenceBetween(inlineShapeOffsetLabel, dbgInlineShapeJump) == SETPROP_INLINE_SHAPE_JUMP);
3096 JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgAfterValueStore) == SETPROP_INLINE_STORE_VALUE);
3097 #elif defined JS_NUNBOX32
3098 JS_ASSERT(masm.differenceBetween(pic.shapeGuard, inlineShapeOffsetLabel) == SETPROP_INLINE_SHAPE_OFFSET);
3099 JS_ASSERT(masm.differenceBetween(pic.shapeGuard, dbgInlineShapeJump) == SETPROP_INLINE_SHAPE_JUMP);
3100 if (vr.isConstant) {
3101 /* Constants are offset inside the opcode by 4. */
3102 JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgInlineStoreType)-4 == SETPROP_INLINE_STORE_CONST_TYPE);
3103 JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgAfterValueStore)-4 == SETPROP_INLINE_STORE_CONST_DATA);
3104 JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgDslots) == SETPROP_DSLOTS_BEFORE_CONSTANT);
3105 } else if (vr.u.s.isTypeKnown) {
3106 JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgInlineStoreType)-4 == SETPROP_INLINE_STORE_KTYPE_TYPE);
3107 JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgAfterValueStore) == SETPROP_INLINE_STORE_KTYPE_DATA);
3108 JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgDslots) == SETPROP_DSLOTS_BEFORE_KTYPE);
3109 } else {
3110 JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgInlineStoreType) == SETPROP_INLINE_STORE_DYN_TYPE);
3111 JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgAfterValueStore) == SETPROP_INLINE_STORE_DYN_DATA);
3112 JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgDslots) == SETPROP_DSLOTS_BEFORE_DYNAMIC);
3114 #endif
3116 pics.append(pic);
3119 void
3120 mjit::Compiler::jsop_name(JSAtom *atom)
3122 PICGenInfo pic(ic::PICInfo::NAME, true);
3124 pic.shapeReg = frame.allocReg();
3125 pic.objReg = frame.allocReg();
3126 pic.typeReg = Registers::ReturnReg;
3127 pic.atom = atom;
3128 pic.hasTypeCheck = false;
3129 pic.fastPathStart = masm.label();
3131 pic.shapeGuard = masm.label();
3132 Jump j = masm.jump();
3133 DBGLABEL(dbgJumpOffset);
3135 pic.slowPathStart = stubcc.linkExit(j, Uses(0));
3136 stubcc.leave();
3137 passPICAddress(pic);
3138 pic.callReturn = stubcc.call(ic::Name);
3141 pic.storeBack = masm.label();
3142 frame.pushRegs(pic.shapeReg, pic.objReg);
3144 JS_ASSERT(masm.differenceBetween(pic.fastPathStart, dbgJumpOffset) == SCOPENAME_JUMP_OFFSET);
3146 stubcc.rejoin(Changes(1));
3148 pics.append(pic);
3151 void
3152 mjit::Compiler::jsop_xname(JSAtom *atom)
3154 PICGenInfo pic(ic::PICInfo::XNAME, true);
3156 FrameEntry *fe = frame.peek(-1);
3157 if (fe->isNotType(JSVAL_TYPE_OBJECT)) {
3158 jsop_getprop(atom);
3159 return;
3162 if (!fe->isTypeKnown()) {
3163 Jump notObject = frame.testObject(Assembler::NotEqual, fe);
3164 stubcc.linkExit(notObject, Uses(1));
3167 pic.shapeReg = frame.allocReg();
3168 pic.objReg = frame.copyDataIntoReg(fe);
3169 pic.typeReg = Registers::ReturnReg;
3170 pic.atom = atom;
3171 pic.hasTypeCheck = false;
3172 pic.fastPathStart = masm.label();
3174 pic.shapeGuard = masm.label();
3175 Jump j = masm.jump();
3176 DBGLABEL(dbgJumpOffset);
3178 pic.slowPathStart = stubcc.linkExit(j, Uses(1));
3179 stubcc.leave();
3180 passPICAddress(pic);
3181 pic.callReturn = stubcc.call(ic::XName);
3184 pic.storeBack = masm.label();
3185 frame.pop();
3186 frame.pushRegs(pic.shapeReg, pic.objReg);
3188 JS_ASSERT(masm.differenceBetween(pic.fastPathStart, dbgJumpOffset) == SCOPENAME_JUMP_OFFSET);
3190 stubcc.rejoin(Changes(1));
3192 pics.append(pic);
3195 void
3196 mjit::Compiler::jsop_bindname(uint32 index, bool usePropCache)
3198 PICGenInfo pic(ic::PICInfo::BIND, usePropCache);
3200 pic.shapeReg = frame.allocReg();
3201 pic.objReg = frame.allocReg();
3202 pic.typeReg = Registers::ReturnReg;
3203 pic.atom = script->getAtom(index);
3204 pic.hasTypeCheck = false;
3205 pic.fastPathStart = masm.label();
3207 Address parent(pic.objReg, offsetof(JSObject, parent));
3208 masm.loadPtr(Address(JSFrameReg, JSStackFrame::offsetOfScopeChain()), pic.objReg);
3210 pic.shapeGuard = masm.label();
3211 #if defined JS_NUNBOX32
3212 Jump j = masm.branchPtr(Assembler::NotEqual, masm.payloadOf(parent), ImmPtr(0));
3213 DBGLABEL(inlineJumpOffset);
3214 #elif defined JS_PUNBOX64
3215 masm.loadPayload(parent, Registers::ValueReg);
3216 Jump j = masm.branchPtr(Assembler::NotEqual, Registers::ValueReg, ImmPtr(0));
3217 Label inlineJumpOffset = masm.label();
3218 #endif
3220 pic.slowPathStart = stubcc.linkExit(j, Uses(0));
3221 stubcc.leave();
3222 passPICAddress(pic);
3223 pic.callReturn = stubcc.call(ic::BindName);
3226 pic.storeBack = masm.label();
3227 frame.pushTypedPayload(JSVAL_TYPE_OBJECT, pic.objReg);
3228 frame.freeReg(pic.shapeReg);
3230 #if defined JS_NUNBOX32
3231 JS_ASSERT(masm.differenceBetween(pic.shapeGuard, inlineJumpOffset) == BINDNAME_INLINE_JUMP_OFFSET);
3232 #elif defined JS_PUNBOX64
3233 pic.labels.bindname.inlineJumpOffset = masm.differenceBetween(pic.shapeGuard, inlineJumpOffset);
3234 JS_ASSERT(pic.labels.bindname.inlineJumpOffset == masm.differenceBetween(pic.shapeGuard, inlineJumpOffset));
3235 #endif
3237 stubcc.rejoin(Changes(1));
3239 pics.append(pic);
3242 #else /* JS_POLYIC */
3244 void
3245 mjit::Compiler::jsop_name(JSAtom *atom)
3247 prepareStubCall(Uses(0));
3248 stubCall(stubs::Name);
3249 frame.pushSynced();
3252 void
3253 mjit::Compiler::jsop_xname(JSAtom *atom)
3255 jsop_getprop(atom);
3258 void
3259 mjit::Compiler::jsop_getprop(JSAtom *atom, bool typecheck, bool usePropCache)
3261 jsop_getprop_slow(atom, usePropCache);
3264 bool
3265 mjit::Compiler::jsop_callprop(JSAtom *atom)
3267 return jsop_callprop_slow(atom);
3270 void
3271 mjit::Compiler::jsop_setprop(JSAtom *atom, bool usePropCache)
3273 jsop_setprop_slow(atom, usePropCache);
3276 void
3277 mjit::Compiler::jsop_bindname(uint32 index, bool usePropCache)
3279 RegisterID reg = frame.allocReg();
3280 Address scopeChain(JSFrameReg, JSStackFrame::offsetOfScopeChain());
3281 masm.loadPtr(scopeChain, reg);
3283 Address address(reg, offsetof(JSObject, parent));
3285 Jump j = masm.branchPtr(Assembler::NotEqual, masm.payloadOf(address), ImmPtr(0));
3287 stubcc.linkExit(j, Uses(0));
3288 stubcc.leave();
3289 if (usePropCache) {
3290 stubcc.call(stubs::BindName);
3291 } else {
3292 masm.move(ImmPtr(script->getAtom(index)), Registers::ArgReg1);
3293 stubcc.call(stubs::BindNameNoCache);
3296 frame.pushTypedPayload(JSVAL_TYPE_OBJECT, reg);
3298 stubcc.rejoin(Changes(1));
3300 #endif
3302 void
3303 mjit::Compiler::jsop_getarg(uint32 index)
3305 frame.push(Address(JSFrameReg, JSStackFrame::offsetOfFormalArg(fun, index)));
3308 void
3309 mjit::Compiler::jsop_this()
3311 Address thisvAddr(JSFrameReg, JSStackFrame::offsetOfThis(fun));
3312 frame.push(thisvAddr);
3314 * In strict mode code, we don't wrap 'this'.
3315 * In direct-call eval code, we wrapped 'this' before entering the eval.
3316 * In global code, 'this' is always an object.
3318 if (fun && !script->strictModeCode) {
3319 Jump notObj = frame.testObject(Assembler::NotEqual, frame.peek(-1));
3320 stubcc.linkExit(notObj, Uses(1));
3321 stubcc.leave();
3322 stubcc.call(stubs::This);
3323 stubcc.rejoin(Changes(1));
3327 void
3328 mjit::Compiler::jsop_gnameinc(JSOp op, VoidStubAtom stub, uint32 index)
3330 #if defined JS_MONOIC
3331 jsbytecode *next = &PC[JSOP_GNAMEINC_LENGTH];
3332 bool pop = (JSOp(*next) == JSOP_POP) && !analysis[next].nincoming;
3333 int amt = (op == JSOP_GNAMEINC || op == JSOP_INCGNAME) ? -1 : 1;
3335 if (pop || (op == JSOP_INCGNAME || op == JSOP_DECGNAME)) {
3336 /* These cases are easy, the original value is not observed. */
3338 jsop_getgname(index);
3339 // V
3341 frame.push(Int32Value(amt));
3342 // V 1
3344 /* Use sub since it calls ValueToNumber instead of string concat. */
3345 jsop_binary(JSOP_SUB, stubs::Sub);
3346 // N+1
3348 jsop_bindgname();
3349 // V+1 OBJ
3351 frame.dup2();
3352 // V+1 OBJ V+1 OBJ
3354 frame.shift(-3);
3355 // OBJ OBJ V+1
3357 frame.shift(-1);
3358 // OBJ V+1
3360 jsop_setgname(index);
3361 // V+1
3363 if (pop)
3364 frame.pop();
3365 } else {
3366 /* The pre-value is observed, making this more tricky. */
3368 jsop_getgname(index);
3369 // V
3371 jsop_pos();
3372 // N
3374 frame.dup();
3375 // N N
3377 frame.push(Int32Value(-amt));
3378 // N N 1
3380 jsop_binary(JSOP_ADD, stubs::Add);
3381 // N N+1
3383 jsop_bindgname();
3384 // N N+1 OBJ
3386 frame.dup2();
3387 // N N+1 OBJ N+1 OBJ
3389 frame.shift(-3);
3390 // N OBJ OBJ N+1
3392 frame.shift(-1);
3393 // N OBJ N+1
3395 jsop_setgname(index);
3396 // N N+1
3398 frame.pop();
3399 // N
3402 if (pop)
3403 PC += JSOP_POP_LENGTH;
3404 #else
3405 JSAtom *atom = script->getAtom(index);
3406 prepareStubCall(Uses(0));
3407 masm.move(ImmPtr(atom), Registers::ArgReg1);
3408 stubCall(stub);
3409 frame.pushSynced();
3410 #endif
3412 PC += JSOP_GNAMEINC_LENGTH;
3415 void
3416 mjit::Compiler::jsop_nameinc(JSOp op, VoidStubAtom stub, uint32 index)
3418 JSAtom *atom = script->getAtom(index);
3419 #if defined JS_POLYIC
3420 jsbytecode *next = &PC[JSOP_NAMEINC_LENGTH];
3421 bool pop = (JSOp(*next) == JSOP_POP) && !analysis[next].nincoming;
3422 int amt = (op == JSOP_NAMEINC || op == JSOP_INCNAME) ? -1 : 1;
3424 if (pop || (op == JSOP_INCNAME || op == JSOP_DECNAME)) {
3425 /* These cases are easy, the original value is not observed. */
3427 jsop_name(atom);
3428 // V
3430 frame.push(Int32Value(amt));
3431 // V 1
3433 /* Use sub since it calls ValueToNumber instead of string concat. */
3434 jsop_binary(JSOP_SUB, stubs::Sub);
3435 // N+1
3437 jsop_bindname(index, false);
3438 // V+1 OBJ
3440 frame.dup2();
3441 // V+1 OBJ V+1 OBJ
3443 frame.shift(-3);
3444 // OBJ OBJ V+1
3446 frame.shift(-1);
3447 // OBJ V+1
3449 jsop_setprop(atom, false);
3450 // V+1
3452 if (pop)
3453 frame.pop();
3454 } else {
3455 /* The pre-value is observed, making this more tricky. */
3457 jsop_name(atom);
3458 // V
3460 jsop_pos();
3461 // N
3463 frame.dup();
3464 // N N
3466 frame.push(Int32Value(-amt));
3467 // N N 1
3469 jsop_binary(JSOP_ADD, stubs::Add);
3470 // N N+1
3472 jsop_bindname(index, false);
3473 // N N+1 OBJ
3475 frame.dup2();
3476 // N N+1 OBJ N+1 OBJ
3478 frame.shift(-3);
3479 // N OBJ OBJ N+1
3481 frame.shift(-1);
3482 // N OBJ N+1
3484 jsop_setprop(atom, false);
3485 // N N+1
3487 frame.pop();
3488 // N
3491 if (pop)
3492 PC += JSOP_POP_LENGTH;
3493 #else
3494 prepareStubCall(Uses(0));
3495 masm.move(ImmPtr(atom), Registers::ArgReg1);
3496 stubCall(stub);
3497 frame.pushSynced();
3498 #endif
3500 PC += JSOP_NAMEINC_LENGTH;
3503 void
3504 mjit::Compiler::jsop_propinc(JSOp op, VoidStubAtom stub, uint32 index)
3506 JSAtom *atom = script->getAtom(index);
3507 #if defined JS_POLYIC
3508 FrameEntry *objFe = frame.peek(-1);
3509 if (!objFe->isTypeKnown() || objFe->getKnownType() == JSVAL_TYPE_OBJECT) {
3510 jsbytecode *next = &PC[JSOP_PROPINC_LENGTH];
3511 bool pop = (JSOp(*next) == JSOP_POP) && !analysis[next].nincoming;
3512 int amt = (op == JSOP_PROPINC || op == JSOP_INCPROP) ? -1 : 1;
3514 if (pop || (op == JSOP_INCPROP || op == JSOP_DECPROP)) {
3515 /* These cases are easy, the original value is not observed. */
3517 frame.dup();
3518 // OBJ OBJ
3520 jsop_getprop(atom);
3521 // OBJ V
3523 frame.push(Int32Value(amt));
3524 // OBJ V 1
3526 /* Use sub since it calls ValueToNumber instead of string concat. */
3527 jsop_binary(JSOP_SUB, stubs::Sub);
3528 // OBJ V+1
3530 jsop_setprop(atom, false);
3531 // V+1
3533 if (pop)
3534 frame.pop();
3535 } else {
3536 /* The pre-value is observed, making this more tricky. */
3538 frame.dup();
3539 // OBJ OBJ
3541 jsop_getprop(atom);
3542 // OBJ V
3544 jsop_pos();
3545 // OBJ N
3547 frame.dup();
3548 // OBJ N N
3550 frame.push(Int32Value(-amt));
3551 // OBJ N N 1
3553 jsop_binary(JSOP_ADD, stubs::Add);
3554 // OBJ N N+1
3556 frame.dupAt(-3);
3557 // OBJ N N+1 OBJ
3559 frame.dupAt(-2);
3560 // OBJ N N+1 OBJ N+1
3562 jsop_setprop(atom, false);
3563 // OBJ N N+1 N+1
3565 frame.popn(2);
3566 // OBJ N
3568 frame.shimmy(1);
3569 // N
3571 if (pop)
3572 PC += JSOP_POP_LENGTH;
3573 } else
3574 #endif
3576 prepareStubCall(Uses(1));
3577 masm.move(ImmPtr(atom), Registers::ArgReg1);
3578 stubCall(stub);
3579 frame.pop();
3580 frame.pushSynced();
3583 PC += JSOP_PROPINC_LENGTH;
3586 void
3587 mjit::Compiler::iter(uintN flags)
3589 FrameEntry *fe = frame.peek(-1);
3592 * Stub the call if this is not a simple 'for in' loop or if the iterated
3593 * value is known to not be an object.
3595 if ((flags != JSITER_ENUMERATE) || fe->isNotType(JSVAL_TYPE_OBJECT)) {
3596 prepareStubCall(Uses(1));
3597 masm.move(Imm32(flags), Registers::ArgReg1);
3598 stubCall(stubs::Iter);
3599 frame.pop();
3600 frame.pushSynced();
3601 return;
3604 if (!fe->isTypeKnown()) {
3605 Jump notObject = frame.testObject(Assembler::NotEqual, fe);
3606 stubcc.linkExit(notObject, Uses(1));
3609 RegisterID reg = frame.tempRegForData(fe);
3611 frame.pinReg(reg);
3612 RegisterID ioreg = frame.allocReg(); /* Will hold iterator JSObject */
3613 RegisterID nireg = frame.allocReg(); /* Will hold NativeIterator */
3614 RegisterID T1 = frame.allocReg();
3615 RegisterID T2 = frame.allocReg();
3616 frame.unpinReg(reg);
3619 * Fetch the most recent iterator. TODO: bake this pointer in when
3620 * iterator caches become per-compartment.
3622 masm.loadPtr(FrameAddress(offsetof(VMFrame, cx)), T1);
3623 #ifdef JS_THREADSAFE
3624 masm.loadPtr(Address(T1, offsetof(JSContext, thread)), T1);
3625 masm.loadPtr(Address(T1, offsetof(JSThread, data.lastNativeIterator)), ioreg);
3626 #else
3627 masm.loadPtr(Address(T1, offsetof(JSContext, runtime)), T1);
3628 masm.loadPtr(Address(T1, offsetof(JSRuntime, threadData.lastNativeIterator)), ioreg);
3629 #endif
3631 /* Test for NULL. */
3632 Jump nullIterator = masm.branchTest32(Assembler::Zero, ioreg, ioreg);
3633 stubcc.linkExit(nullIterator, Uses(1));
3635 /* Get NativeIterator from iter obj. :FIXME: X64, also most of this function */
3636 masm.loadPtr(Address(ioreg, offsetof(JSObject, privateData)), nireg);
3638 /* Test for active iterator. */
3639 Address flagsAddr(nireg, offsetof(NativeIterator, flags));
3640 masm.load32(flagsAddr, T1);
3641 Jump activeIterator = masm.branchTest32(Assembler::NonZero, T1, Imm32(JSITER_ACTIVE));
3642 stubcc.linkExit(activeIterator, Uses(1));
3644 /* Compare shape of object with iterator. */
3645 masm.loadShape(reg, T1);
3646 masm.loadPtr(Address(nireg, offsetof(NativeIterator, shapes_array)), T2);
3647 masm.load32(Address(T2, 0), T2);
3648 Jump mismatchedObject = masm.branch32(Assembler::NotEqual, T1, T2);
3649 stubcc.linkExit(mismatchedObject, Uses(1));
3651 /* Compare shape of object's prototype with iterator. */
3652 masm.loadPtr(Address(reg, offsetof(JSObject, proto)), T1);
3653 masm.loadShape(T1, T1);
3654 masm.loadPtr(Address(nireg, offsetof(NativeIterator, shapes_array)), T2);
3655 masm.load32(Address(T2, sizeof(uint32)), T2);
3656 Jump mismatchedProto = masm.branch32(Assembler::NotEqual, T1, T2);
3657 stubcc.linkExit(mismatchedProto, Uses(1));
3660 * Compare object's prototype's prototype with NULL. The last native
3661 * iterator will always have a prototype chain length of one
3662 * (i.e. it must be a plain object), so we do not need to generate
3663 * a loop here.
3665 masm.loadPtr(Address(reg, offsetof(JSObject, proto)), T1);
3666 masm.loadPtr(Address(T1, offsetof(JSObject, proto)), T1);
3667 Jump overlongChain = masm.branchPtr(Assembler::NonZero, T1, T1);
3668 stubcc.linkExit(overlongChain, Uses(1));
3670 /* Found a match with the most recent iterator. Hooray! */
3672 /* Mark iterator as active. */
3673 masm.load32(flagsAddr, T1);
3674 masm.or32(Imm32(JSITER_ACTIVE), T1);
3675 masm.store32(T1, flagsAddr);
3677 /* Chain onto the active iterator stack. */
3678 masm.loadPtr(FrameAddress(offsetof(VMFrame, cx)), T1);
3679 masm.loadPtr(Address(T1, offsetof(JSContext, enumerators)), T2);
3680 masm.storePtr(T2, Address(nireg, offsetof(NativeIterator, next)));
3681 masm.storePtr(ioreg, Address(T1, offsetof(JSContext, enumerators)));
3683 frame.freeReg(nireg);
3684 frame.freeReg(T1);
3685 frame.freeReg(T2);
3687 stubcc.leave();
3688 stubcc.masm.move(Imm32(flags), Registers::ArgReg1);
3689 stubcc.call(stubs::Iter);
3691 /* Push the iterator object. */
3692 frame.pop();
3693 frame.pushTypedPayload(JSVAL_TYPE_OBJECT, ioreg);
3695 stubcc.rejoin(Changes(1));
3699 * This big nasty function emits a fast-path for native iterators, producing
3700 * a temporary value on the stack for FORLOCAL,ARG,GLOBAL,etc ops to use.
3702 void
3703 mjit::Compiler::iterNext()
3705 FrameEntry *fe = frame.peek(-1);
3706 RegisterID reg = frame.tempRegForData(fe);
3708 /* Is it worth trying to pin this longer? Prolly not. */
3709 frame.pinReg(reg);
3710 RegisterID T1 = frame.allocReg();
3711 frame.unpinReg(reg);
3713 /* Test clasp */
3714 masm.loadPtr(Address(reg, offsetof(JSObject, clasp)), T1);
3715 Jump notFast = masm.branchPtr(Assembler::NotEqual, T1, ImmPtr(&js_IteratorClass));
3716 stubcc.linkExit(notFast, Uses(1));
3718 /* Get private from iter obj. */
3719 masm.loadFunctionPrivate(reg, T1);
3721 RegisterID T3 = frame.allocReg();
3722 RegisterID T4 = frame.allocReg();
3724 /* Test if for-each. */
3725 masm.load32(Address(T1, offsetof(NativeIterator, flags)), T3);
3726 notFast = masm.branchTest32(Assembler::NonZero, T3, Imm32(JSITER_FOREACH));
3727 stubcc.linkExit(notFast, Uses(1));
3729 RegisterID T2 = frame.allocReg();
3731 /* Get cursor. */
3732 masm.loadPtr(Address(T1, offsetof(NativeIterator, props_cursor)), T2);
3734 /* Test if the jsid is a string. */
3735 masm.loadPtr(T2, T3);
3736 masm.move(T3, T4);
3737 masm.andPtr(Imm32(JSID_TYPE_MASK), T4);
3738 notFast = masm.branchTestPtr(Assembler::NonZero, T4, T4);
3739 stubcc.linkExit(notFast, Uses(1));
3741 /* It's safe to increase the cursor now. */
3742 masm.addPtr(Imm32(sizeof(jsid)), T2, T4);
3743 masm.storePtr(T4, Address(T1, offsetof(NativeIterator, props_cursor)));
3745 frame.freeReg(T4);
3746 frame.freeReg(T1);
3747 frame.freeReg(T2);
3749 stubcc.leave();
3750 stubcc.call(stubs::IterNext);
3752 frame.pushUntypedPayload(JSVAL_TYPE_STRING, T3);
3754 /* Join with the stub call. */
3755 stubcc.rejoin(Changes(1));
3758 void
3759 mjit::Compiler::iterMore()
3761 FrameEntry *fe= frame.peek(-1);
3762 RegisterID reg = frame.tempRegForData(fe);
3764 frame.pinReg(reg);
3765 RegisterID T1 = frame.allocReg();
3766 frame.unpinReg(reg);
3768 /* Test clasp */
3769 masm.loadPtr(Address(reg, offsetof(JSObject, clasp)), T1);
3770 Jump notFast = masm.branchPtr(Assembler::NotEqual, T1, ImmPtr(&js_IteratorClass));
3771 stubcc.linkExitForBranch(notFast);
3773 /* Get private from iter obj. */
3774 masm.loadFunctionPrivate(reg, T1);
3776 /* Get props_cursor, test */
3777 RegisterID T2 = frame.allocReg();
3778 frame.syncAndForgetEverything();
3779 masm.loadPtr(Address(T1, offsetof(NativeIterator, props_cursor)), T2);
3780 masm.loadPtr(Address(T1, offsetof(NativeIterator, props_end)), T1);
3781 Jump jFast = masm.branchPtr(Assembler::LessThan, T2, T1);
3783 jsbytecode *target = &PC[JSOP_MOREITER_LENGTH];
3784 JSOp next = JSOp(*target);
3785 JS_ASSERT(next == JSOP_IFNE || next == JSOP_IFNEX);
3787 target += (next == JSOP_IFNE)
3788 ? GET_JUMP_OFFSET(target)
3789 : GET_JUMPX_OFFSET(target);
3791 stubcc.leave();
3792 stubcc.call(stubs::IterMore);
3793 Jump j = stubcc.masm.branchTest32(Assembler::NonZero, Registers::ReturnReg,
3794 Registers::ReturnReg);
3796 PC += JSOP_MOREITER_LENGTH;
3797 PC += js_CodeSpec[next].length;
3799 stubcc.rejoin(Changes(1));
3801 jumpAndTrace(jFast, target, &j);
3804 void
3805 mjit::Compiler::iterEnd()
3807 FrameEntry *fe= frame.peek(-1);
3808 RegisterID reg = frame.tempRegForData(fe);
3810 frame.pinReg(reg);
3811 RegisterID T1 = frame.allocReg();
3812 frame.unpinReg(reg);
3814 /* Test clasp */
3815 masm.loadPtr(Address(reg, offsetof(JSObject, clasp)), T1);
3816 Jump notIterator = masm.branchPtr(Assembler::NotEqual, T1, ImmPtr(&js_IteratorClass));
3817 stubcc.linkExit(notIterator, Uses(1));
3819 /* Get private from iter obj. :FIXME: X64 */
3820 masm.loadPtr(Address(reg, offsetof(JSObject, privateData)), T1);
3822 RegisterID T2 = frame.allocReg();
3824 /* Load flags. */
3825 Address flagAddr(T1, offsetof(NativeIterator, flags));
3826 masm.loadPtr(flagAddr, T2);
3828 /* Test for (flags == ENUMERATE | ACTIVE). */
3829 Jump notEnumerate = masm.branch32(Assembler::NotEqual, T2,
3830 Imm32(JSITER_ENUMERATE | JSITER_ACTIVE));
3831 stubcc.linkExit(notEnumerate, Uses(1));
3833 /* Clear active bit. */
3834 masm.and32(Imm32(~JSITER_ACTIVE), T2);
3835 masm.storePtr(T2, flagAddr);
3837 /* Reset property cursor. */
3838 masm.loadPtr(Address(T1, offsetof(NativeIterator, props_array)), T2);
3839 masm.storePtr(T2, Address(T1, offsetof(NativeIterator, props_cursor)));
3841 /* Advance enumerators list. */
3842 masm.loadPtr(FrameAddress(offsetof(VMFrame, cx)), T2);
3843 masm.loadPtr(Address(T1, offsetof(NativeIterator, next)), T1);
3844 masm.storePtr(T1, Address(T2, offsetof(JSContext, enumerators)));
3846 frame.freeReg(T1);
3847 frame.freeReg(T2);
3849 stubcc.leave();
3850 stubcc.call(stubs::EndIter);
3852 frame.pop();
3854 stubcc.rejoin(Changes(1));
3857 void
3858 mjit::Compiler::jsop_eleminc(JSOp op, VoidStub stub)
3860 prepareStubCall(Uses(2));
3861 stubCall(stub);
3862 frame.popn(2);
3863 frame.pushSynced();
3866 void
3867 mjit::Compiler::jsop_getgname_slow(uint32 index)
3869 prepareStubCall(Uses(0));
3870 stubCall(stubs::GetGlobalName);
3871 frame.pushSynced();
3874 void
3875 mjit::Compiler::jsop_bindgname()
3877 if (script->compileAndGo && globalObj) {
3878 frame.push(ObjectValue(*globalObj));
3879 return;
3882 /* :TODO: this is slower than it needs to be. */
3883 prepareStubCall(Uses(0));
3884 stubCall(stubs::BindGlobalName);
3885 frame.takeReg(Registers::ReturnReg);
3886 frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
3889 void
3890 mjit::Compiler::jsop_getgname(uint32 index)
3892 #if defined JS_MONOIC
3893 jsop_bindgname();
3895 FrameEntry *fe = frame.peek(-1);
3896 JS_ASSERT(fe->isTypeKnown() && fe->getKnownType() == JSVAL_TYPE_OBJECT);
3898 MICGenInfo mic(ic::MICInfo::GET);
3899 RegisterID objReg;
3900 Jump shapeGuard;
3902 mic.entry = masm.label();
3903 if (fe->isConstant()) {
3904 JSObject *obj = &fe->getValue().toObject();
3905 frame.pop();
3906 JS_ASSERT(obj->isNative());
3908 objReg = frame.allocReg();
3910 masm.load32FromImm(&obj->objShape, objReg);
3911 shapeGuard = masm.branch32WithPatch(Assembler::NotEqual, objReg,
3912 Imm32(int32(JSObjectMap::INVALID_SHAPE)), mic.shape);
3913 masm.move(ImmPtr(obj), objReg);
3914 } else {
3915 objReg = frame.ownRegForData(fe);
3916 frame.pop();
3917 RegisterID reg = frame.allocReg();
3919 masm.loadShape(objReg, reg);
3920 shapeGuard = masm.branch32WithPatch(Assembler::NotEqual, reg,
3921 Imm32(int32(JSObjectMap::INVALID_SHAPE)), mic.shape);
3922 frame.freeReg(reg);
3924 stubcc.linkExit(shapeGuard, Uses(0));
3926 stubcc.leave();
3927 passMICAddress(mic);
3928 mic.stubEntry = stubcc.masm.label();
3929 mic.call = stubcc.call(ic::GetGlobalName);
3931 /* Garbage value. */
3932 uint32 slot = 1 << 24;
3934 masm.loadPtr(Address(objReg, offsetof(JSObject, slots)), objReg);
3935 Address address(objReg, slot);
3938 * On x86_64, the length of the movq instruction used is variable
3939 * depending on the registers used. For example, 'movq $0x5(%r12), %r12'
3940 * is one byte larger than 'movq $0x5(%r14), %r14'. This means that
3941 * the constant '0x5' that we want to write is at a variable position.
3943 * x86_64 only performs a single load. The constant offset is always
3944 * at the end of the bytecode. Knowing the start and end of the move
3945 * bytecode is sufficient for patching.
3948 /* Allocate any register other than objReg. */
3949 RegisterID dreg = frame.allocReg();
3950 /* After dreg is loaded, it's safe to clobber objReg. */
3951 RegisterID treg = objReg;
3953 mic.load = masm.label();
3954 # if defined JS_NUNBOX32
3955 # if defined JS_CPU_ARM
3956 DataLabel32 offsetAddress = masm.load64WithAddressOffsetPatch(address, treg, dreg);
3957 JS_ASSERT(masm.differenceBetween(mic.load, offsetAddress) == 0);
3958 # else
3959 masm.loadPayload(address, dreg);
3960 masm.loadTypeTag(address, treg);
3961 # endif
3962 # elif defined JS_PUNBOX64
3963 Label inlineValueLoadLabel =
3964 masm.loadValueAsComponents(address, treg, dreg);
3965 mic.patchValueOffset = masm.differenceBetween(mic.load, inlineValueLoadLabel);
3966 JS_ASSERT(mic.patchValueOffset == masm.differenceBetween(mic.load, inlineValueLoadLabel));
3967 # endif
3969 frame.pushRegs(treg, dreg);
3971 stubcc.rejoin(Changes(1));
3972 mics.append(mic);
3974 #else
3975 jsop_getgname_slow(index);
3976 #endif
3979 void
3980 mjit::Compiler::jsop_setgname_slow(uint32 index)
3982 JSAtom *atom = script->getAtom(index);
3983 prepareStubCall(Uses(2));
3984 masm.move(ImmPtr(atom), Registers::ArgReg1);
3985 stubCall(STRICT_VARIANT(stubs::SetGlobalName));
3986 frame.popn(2);
3987 frame.pushSynced();
3990 void
3991 mjit::Compiler::jsop_setgname(uint32 index)
3993 #if defined JS_MONOIC
3994 FrameEntry *objFe = frame.peek(-2);
3995 JS_ASSERT_IF(objFe->isTypeKnown(), objFe->getKnownType() == JSVAL_TYPE_OBJECT);
3997 MICGenInfo mic(ic::MICInfo::SET);
3998 RegisterID objReg;
3999 Jump shapeGuard;
4001 mic.entry = masm.label();
4002 if (objFe->isConstant()) {
4003 JSObject *obj = &objFe->getValue().toObject();
4004 JS_ASSERT(obj->isNative());
4006 objReg = frame.allocReg();
4008 masm.load32FromImm(&obj->objShape, objReg);
4009 shapeGuard = masm.branch32WithPatch(Assembler::NotEqual, objReg,
4010 Imm32(int32(JSObjectMap::INVALID_SHAPE)),
4011 mic.shape);
4012 masm.move(ImmPtr(obj), objReg);
4013 } else {
4014 objReg = frame.copyDataIntoReg(objFe);
4015 RegisterID reg = frame.allocReg();
4017 masm.loadShape(objReg, reg);
4018 shapeGuard = masm.branch32WithPatch(Assembler::NotEqual, reg,
4019 Imm32(int32(JSObjectMap::INVALID_SHAPE)),
4020 mic.shape);
4021 frame.freeReg(reg);
4023 stubcc.linkExit(shapeGuard, Uses(2));
4025 stubcc.leave();
4026 passMICAddress(mic);
4027 mic.stubEntry = stubcc.masm.label();
4028 mic.call = stubcc.call(ic::SetGlobalName);
4030 /* Garbage value. */
4031 uint32 slot = 1 << 24;
4033 /* Get both type and reg into registers. */
4034 FrameEntry *fe = frame.peek(-1);
4036 Value v;
4037 RegisterID typeReg = Registers::ReturnReg;
4038 RegisterID dataReg = Registers::ReturnReg;
4039 JSValueType typeTag = JSVAL_TYPE_INT32;
4041 mic.u.name.typeConst = fe->isTypeKnown();
4042 mic.u.name.dataConst = fe->isConstant();
4044 if (!mic.u.name.dataConst) {
4045 dataReg = frame.ownRegForData(fe);
4046 if (!mic.u.name.typeConst)
4047 typeReg = frame.ownRegForType(fe);
4048 else
4049 typeTag = fe->getKnownType();
4050 } else {
4051 v = fe->getValue();
4054 masm.loadPtr(Address(objReg, offsetof(JSObject, slots)), objReg);
4055 Address address(objReg, slot);
4057 mic.load = masm.label();
4059 #if defined JS_CPU_ARM
4060 DataLabel32 offsetAddress;
4061 if (mic.u.name.dataConst) {
4062 offsetAddress = masm.moveWithPatch(Imm32(address.offset), JSC::ARMRegisters::S0);
4063 masm.add32(address.base, JSC::ARMRegisters::S0);
4064 masm.storeValue(v, Address(JSC::ARMRegisters::S0, 0));
4065 } else {
4066 if (mic.u.name.typeConst) {
4067 offsetAddress = masm.store64WithAddressOffsetPatch(ImmType(typeTag), dataReg, address);
4068 } else {
4069 offsetAddress = masm.store64WithAddressOffsetPatch(typeReg, dataReg, address);
4072 JS_ASSERT(masm.differenceBetween(mic.load, offsetAddress) == 0);
4073 #else
4074 if (mic.u.name.dataConst) {
4075 masm.storeValue(v, address);
4076 } else if (mic.u.name.typeConst) {
4077 masm.storeValueFromComponents(ImmType(typeTag), dataReg, address);
4078 } else {
4079 masm.storeValueFromComponents(typeReg, dataReg, address);
4081 #endif
4083 #if defined JS_PUNBOX64
4085 * Instructions on x86_64 can vary in size based on registers
4086 * used. Since we only need to patch the last instruction in
4087 * both paths above, remember the distance between the
4088 * load label and after the instruction to be patched.
4090 mic.patchValueOffset = masm.differenceBetween(mic.load, masm.label());
4091 JS_ASSERT(mic.patchValueOffset == masm.differenceBetween(mic.load, masm.label()));
4092 #endif
4094 frame.freeReg(objReg);
4095 frame.popn(2);
4096 if (mic.u.name.dataConst) {
4097 frame.push(v);
4098 } else {
4099 if (mic.u.name.typeConst)
4100 frame.pushTypedPayload(typeTag, dataReg);
4101 else
4102 frame.pushRegs(typeReg, dataReg);
4105 stubcc.rejoin(Changes(1));
4107 mics.append(mic);
4108 #else
4109 jsop_setgname_slow(index);
4110 #endif
4113 void
4114 mjit::Compiler::jsop_setelem_slow()
4116 prepareStubCall(Uses(3));
4117 stubCall(STRICT_VARIANT(stubs::SetElem));
4118 frame.popn(3);
4119 frame.pushSynced();
4122 void
4123 mjit::Compiler::jsop_getelem_slow()
4125 prepareStubCall(Uses(2));
4126 stubCall(stubs::GetElem);
4127 frame.popn(2);
4128 frame.pushSynced();
4131 void
4132 mjit::Compiler::jsop_unbrand()
4134 prepareStubCall(Uses(1));
4135 stubCall(stubs::Unbrand);
4138 void
4139 mjit::Compiler::jsop_instanceof()
4141 FrameEntry *lhs = frame.peek(-2);
4142 FrameEntry *rhs = frame.peek(-1);
4144 // The fast path applies only when both operands are objects.
4145 if (rhs->isNotType(JSVAL_TYPE_OBJECT) || lhs->isNotType(JSVAL_TYPE_OBJECT)) {
4146 prepareStubCall(Uses(2));
4147 stubCall(stubs::InstanceOf);
4148 frame.popn(2);
4149 frame.takeReg(Registers::ReturnReg);
4150 frame.pushTypedPayload(JSVAL_TYPE_BOOLEAN, Registers::ReturnReg);
4151 return;
4154 MaybeJump firstSlow;
4155 if (!rhs->isTypeKnown()) {
4156 Jump j = frame.testObject(Assembler::NotEqual, rhs);
4157 stubcc.linkExit(j, Uses(2));
4158 RegisterID reg = frame.tempRegForData(rhs);
4159 j = masm.testFunction(Assembler::NotEqual, reg);
4160 stubcc.linkExit(j, Uses(2));
4163 /* Test for bound functions. */
4164 RegisterID obj = frame.tempRegForData(rhs);
4165 Jump isBound = masm.branchTest32(Assembler::NonZero, Address(obj, offsetof(JSObject, flags)),
4166 Imm32(JSObject::BOUND_FUNCTION));
4168 stubcc.linkExit(isBound, Uses(2));
4169 stubcc.leave();
4170 stubcc.call(stubs::InstanceOf);
4171 firstSlow = stubcc.masm.jump();
4175 /* This is sadly necessary because the error case needs the object. */
4176 frame.dup();
4178 jsop_getprop(cx->runtime->atomState.classPrototypeAtom, false);
4180 /* Primitive prototypes are invalid. */
4181 rhs = frame.peek(-1);
4182 Jump j = frame.testPrimitive(Assembler::Equal, rhs);
4183 stubcc.linkExit(j, Uses(3));
4185 /* Allocate registers up front, because of branchiness. */
4186 obj = frame.copyDataIntoReg(lhs);
4187 RegisterID proto = frame.copyDataIntoReg(rhs);
4188 RegisterID temp = frame.allocReg();
4190 MaybeJump isFalse;
4191 if (!lhs->isTypeKnown())
4192 isFalse = frame.testPrimitive(Assembler::Equal, lhs);
4194 /* Quick test to avoid wrapped objects. */
4195 masm.loadPtr(Address(obj, offsetof(JSObject, clasp)), temp);
4196 masm.loadPtr(Address(temp, offsetof(Class, ext) +
4197 offsetof(ClassExtension, wrappedObject)), temp);
4198 j = masm.branchTestPtr(Assembler::NonZero, temp, temp);
4199 stubcc.linkExit(j, Uses(3));
4201 Address protoAddr(obj, offsetof(JSObject, proto));
4202 Label loop = masm.label();
4204 /* Walk prototype chain, break out on NULL or hit. */
4205 masm.loadPayload(protoAddr, obj);
4206 Jump isFalse2 = masm.branchTestPtr(Assembler::Zero, obj, obj);
4207 Jump isTrue = masm.branchPtr(Assembler::NotEqual, obj, proto);
4208 isTrue.linkTo(loop, &masm);
4209 masm.move(Imm32(1), temp);
4210 isTrue = masm.jump();
4212 if (isFalse.isSet())
4213 isFalse.getJump().linkTo(masm.label(), &masm);
4214 isFalse2.linkTo(masm.label(), &masm);
4215 masm.move(Imm32(0), temp);
4216 isTrue.linkTo(masm.label(), &masm);
4218 frame.freeReg(proto);
4219 frame.freeReg(obj);
4221 stubcc.leave();
4222 stubcc.call(stubs::FastInstanceOf);
4224 frame.popn(3);
4225 frame.pushTypedPayload(JSVAL_TYPE_BOOLEAN, temp);
4227 if (firstSlow.isSet())
4228 firstSlow.getJump().linkTo(stubcc.masm.label(), &stubcc.masm);
4229 stubcc.rejoin(Changes(1));
4233 * Note: This function emits tracer hooks into the OOL path. This means if
4234 * it is used in the middle of an in-progress slow path, the stream will be
4235 * hopelessly corrupted. Take care to only call this before linkExits() and
4236 * after rejoin()s.
4238 void
4239 mjit::Compiler::jumpAndTrace(Jump j, jsbytecode *target, Jump *slowOne, Jump *slowTwo)
4241 #ifndef JS_TRACER
4242 jumpInScript(j, target);
4243 if (slowOne)
4244 stubcc.jumpInScript(*slowOne, target);
4245 if (slowTwo)
4246 stubcc.jumpInScript(*slowTwo, target);
4247 #else
4248 if (!addTraceHints || target >= PC || JSOp(*target) != JSOP_TRACE) {
4249 jumpInScript(j, target);
4250 if (slowOne)
4251 stubcc.jumpInScript(*slowOne, target);
4252 if (slowTwo)
4253 stubcc.jumpInScript(*slowTwo, target);
4254 return;
4257 # if JS_MONOIC
4258 MICGenInfo mic(ic::MICInfo::TRACER);
4260 mic.entry = masm.label();
4261 mic.jumpTarget = target;
4262 mic.traceHint = j;
4263 if (slowOne)
4264 mic.slowTraceHintOne = *slowOne;
4265 if (slowTwo)
4266 mic.slowTraceHintTwo = *slowTwo;
4267 # endif
4269 Label traceStart = stubcc.masm.label();
4271 stubcc.linkExitDirect(j, traceStart);
4272 if (slowOne)
4273 slowOne->linkTo(traceStart, &stubcc.masm);
4274 if (slowTwo)
4275 slowTwo->linkTo(traceStart, &stubcc.masm);
4276 # if JS_MONOIC
4277 passMICAddress(mic);
4278 # endif
4280 /* Save and restore compiler-tracked PC, so cx->regs is right in InvokeTracer. */
4282 jsbytecode* pc = PC;
4283 PC = target;
4285 stubcc.call(stubs::InvokeTracer);
4287 PC = pc;
4290 Jump no = stubcc.masm.branchTestPtr(Assembler::Zero, Registers::ReturnReg,
4291 Registers::ReturnReg);
4292 restoreFrameRegs(stubcc.masm);
4293 stubcc.masm.jump(Registers::ReturnReg);
4294 no.linkTo(stubcc.masm.label(), &stubcc.masm);
4295 stubcc.jumpInScript(stubcc.masm.jump(), target);
4297 # if JS_MONOIC
4298 mics.append(mic);
4299 # endif
4300 #endif
4303 void
4304 mjit::Compiler::enterBlock(JSObject *obj)
4306 // If this is an exception entry point, then jsl_InternalThrow has set
4307 // VMFrame::fp to the correct fp for the entry point. We need to copy
4308 // that value here to FpReg so that FpReg also has the correct sp.
4309 // Otherwise, we would simply be using a stale FpReg value.
4310 if (analysis[PC].exceptionEntry)
4311 restoreFrameRegs(masm);
4313 uint32 oldFrameDepth = frame.frameDepth();
4315 /* For now, don't bother doing anything for this opcode. */
4316 frame.syncAndForgetEverything();
4317 masm.move(ImmPtr(obj), Registers::ArgReg1);
4318 uint32 n = js_GetEnterBlockStackDefs(cx, script, PC);
4319 stubCall(stubs::EnterBlock);
4320 frame.enterBlock(n);
4322 uintN base = JSSLOT_FREE(&js_BlockClass);
4323 uintN count = OBJ_BLOCK_COUNT(cx, obj);
4324 uintN limit = base + count;
4325 for (uintN slot = base, i = 0; slot < limit; slot++, i++) {
4326 const Value &v = obj->getSlotRef(slot);
4327 if (v.isBoolean() && v.toBoolean())
4328 frame.setClosedVar(oldFrameDepth + i);
4332 void
4333 mjit::Compiler::leaveBlock()
4336 * Note: After bug 535912, we can pass the block obj directly, inline
4337 * PutBlockObject, and do away with the muckiness in PutBlockObject.
4339 uint32 n = js_GetVariableStackUses(JSOP_LEAVEBLOCK, PC);
4340 JSObject *obj = script->getObject(fullAtomIndex(PC + UINT16_LEN));
4341 prepareStubCall(Uses(n));
4342 masm.move(ImmPtr(obj), Registers::ArgReg1);
4343 stubCall(stubs::LeaveBlock);
4344 frame.leaveBlock(n);
4347 // Creates the new object expected for constructors, and places it in |thisv|.
4348 // It is broken down into the following operations:
4349 // CALLEE
4350 // GETPROP "prototype"
4351 // IFPRIMTOP:
4352 // NULL
4353 // call js_CreateThisFromFunctionWithProto(...)
4355 void
4356 mjit::Compiler::constructThis()
4358 JS_ASSERT(isConstructing);
4360 // Load the callee.
4361 Address callee(JSFrameReg, JSStackFrame::offsetOfCallee(fun));
4362 RegisterID calleeReg = frame.allocReg();
4363 masm.loadPayload(callee, calleeReg);
4364 frame.pushTypedPayload(JSVAL_TYPE_OBJECT, calleeReg);
4366 // Get callee.prototype.
4367 jsop_getprop(cx->runtime->atomState.classPrototypeAtom, false, false);
4369 // Reach into the proto Value and grab a register for its data.
4370 FrameEntry *protoFe = frame.peek(-1);
4371 RegisterID protoReg = frame.ownRegForData(protoFe);
4373 // Now, get the type. If it's not an object, set protoReg to NULL.
4374 Jump isNotObject = frame.testObject(Assembler::NotEqual, protoFe);
4375 stubcc.linkExitDirect(isNotObject, stubcc.masm.label());
4376 stubcc.masm.move(ImmPtr(NULL), protoReg);
4377 stubcc.crossJump(stubcc.masm.jump(), masm.label());
4379 // Done with the protoFe.
4380 frame.pop();
4382 prepareStubCall(Uses(0));
4383 if (protoReg != Registers::ArgReg1)
4384 masm.move(protoReg, Registers::ArgReg1);
4385 stubCall(stubs::CreateThis);
4386 frame.freeReg(protoReg);