IC for JSOP_CALLELEM, re-landed (bug 604031, r=dmandelin).
[mozilla-central.git] / js / src / methodjit / Compiler.cpp
blob375b97aca72e395aae88244a9ae2090c653a5972
1 /* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=4 sw=4 et tw=99:
4 * ***** BEGIN LICENSE BLOCK *****
5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
7 * The contents of this file are subject to the Mozilla Public License Version
8 * 1.1 (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
10 * http://www.mozilla.org/MPL/
12 * Software distributed under the License is distributed on an "AS IS" basis,
13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 * for the specific language governing rights and limitations under the
15 * License.
17 * The Original Code is Mozilla SpiderMonkey JavaScript 1.9 code, released
18 * May 28, 2008.
20 * The Initial Developer of the Original Code is
21 * Brendan Eich <brendan@mozilla.org>
23 * Contributor(s):
24 * David Anderson <danderson@mozilla.com>
25 * David Mandelin <dmandelin@mozilla.com>
26 * Jan de Mooij <jandemooij@gmail.com>
28 * Alternatively, the contents of this file may be used under the terms of
29 * either of the GNU General Public License Version 2 or later (the "GPL"),
30 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
31 * in which case the provisions of the GPL or the LGPL are applicable instead
32 * of those above. If you wish to allow use of your version of this file only
33 * under the terms of either the GPL or the LGPL, and not to allow others to
34 * use your version of this file under the terms of the MPL, indicate your
35 * decision by deleting the provisions above and replace them with the notice
36 * and other provisions required by the GPL or the LGPL. If you do not delete
37 * the provisions above, a recipient may use your version of this file under
38 * the terms of any one of the MPL, the GPL or the LGPL.
40 * ***** END LICENSE BLOCK ***** */
42 #include "MethodJIT.h"
43 #include "jsnum.h"
44 #include "jsbool.h"
45 #include "jsiter.h"
46 #include "Compiler.h"
47 #include "StubCalls.h"
48 #include "MonoIC.h"
49 #include "PolyIC.h"
50 #include "Retcon.h"
51 #include "assembler/jit/ExecutableAllocator.h"
52 #include "assembler/assembler/LinkBuffer.h"
53 #include "FrameState-inl.h"
54 #include "jsobjinlines.h"
55 #include "jsscriptinlines.h"
56 #include "InlineFrameAssembler.h"
57 #include "jscompartment.h"
58 #include "jsobjinlines.h"
59 #include "jsopcodeinlines.h"
61 #include "jsautooplen.h"
63 using namespace js;
64 using namespace js::mjit;
65 #if defined(JS_POLYIC) || defined(JS_MONOIC)
66 using namespace js::mjit::ic;
67 #endif
69 /* This macro should be used after stub calls (which automatically set callLabel). */
70 #define ADD_CALLSITE(stub) \
71 if (debugMode) addCallSite(__LINE__, (stub))
73 /* For custom calls/jumps, this macro sets callLabel before adding the callsite. */
74 #if (defined(JS_NO_FASTCALL) && defined(JS_CPU_X86)) || defined(_WIN64)
75 # define ADD_NON_STUB_CALLSITE(stub) \
76 if (stub) \
77 stubcc.masm.callLabel = stubcc.masm.label() \
78 else \
79 masm.callLabel = masm.label(); \
80 ADD_CALLSITE(stub)
81 #else
82 # define ADD_NON_STUB_CALLSITE(stub) \
83 ADD_CALLSITE(stub)
84 #endif
86 #define RETURN_IF_OOM(retval) \
87 JS_BEGIN_MACRO \
88 if (oomInVector || masm.oom() || stubcc.masm.oom()) { \
89 js_ReportOutOfMemory(cx); \
90 return retval; \
91 } \
92 JS_END_MACRO
94 #if defined(JS_METHODJIT_SPEW)
95 static const char *OpcodeNames[] = {
96 # define OPDEF(op,val,name,token,length,nuses,ndefs,prec,format) #name,
97 # include "jsopcode.tbl"
98 # undef OPDEF
100 #endif
102 mjit::Compiler::Compiler(JSContext *cx, JSStackFrame *fp)
103 : BaseCompiler(cx),
104 fp(fp),
105 script(fp->script()),
106 scopeChain(&fp->scopeChain()),
107 globalObj(scopeChain->getGlobal()),
108 fun(fp->isFunctionFrame() && !fp->isEvalFrame()
109 ? fp->fun()
110 : NULL),
111 isConstructing(fp->isConstructing()),
112 analysis(NULL), jumpMap(NULL), frame(cx, script, masm),
113 branchPatches(CompilerAllocPolicy(cx, *thisFromCtor())),
114 #if defined JS_MONOIC
115 mics(CompilerAllocPolicy(cx, *thisFromCtor())),
116 callICs(CompilerAllocPolicy(cx, *thisFromCtor())),
117 equalityICs(CompilerAllocPolicy(cx, *thisFromCtor())),
118 traceICs(CompilerAllocPolicy(cx, *thisFromCtor())),
119 #endif
120 #if defined JS_POLYIC
121 pics(CompilerAllocPolicy(cx, *thisFromCtor())),
122 getElemICs(CompilerAllocPolicy(cx, *thisFromCtor())),
123 setElemICs(CompilerAllocPolicy(cx, *thisFromCtor())),
124 #endif
125 callPatches(CompilerAllocPolicy(cx, *thisFromCtor())),
126 callSites(CompilerAllocPolicy(cx, *thisFromCtor())),
127 doubleList(CompilerAllocPolicy(cx, *thisFromCtor())),
128 stubcc(cx, *thisFromCtor(), frame, script),
129 debugMode(cx->compartment->debugMode),
130 #if defined JS_TRACER
131 addTraceHints(cx->traceJitEnabled),
132 #endif
133 oomInVector(false),
134 applyTricks(NoApplyTricks)
138 CompileStatus
139 mjit::Compiler::compile()
141 JS_ASSERT(!script->isEmpty());
142 JS_ASSERT_IF(isConstructing, !script->jitCtor);
143 JS_ASSERT_IF(!isConstructing, !script->jitNormal);
145 JITScript **jit = isConstructing ? &script->jitCtor : &script->jitNormal;
146 void **checkAddr = isConstructing
147 ? &script->jitArityCheckCtor
148 : &script->jitArityCheckNormal;
150 CompileStatus status = performCompilation(jit);
151 if (status == Compile_Okay) {
152 // Global scripts don't have an arity check entry. That's okay, we
153 // just need a pointer so the VM can quickly decide whether this
154 // method can be JIT'd or not. Global scripts cannot be IC'd, since
155 // they have no functions, so there is no danger.
156 *checkAddr = (*jit)->arityCheckEntry
157 ? (*jit)->arityCheckEntry
158 : (*jit)->invokeEntry;
159 } else {
160 *checkAddr = JS_UNJITTABLE_SCRIPT;
163 return status;
166 #define CHECK_STATUS(expr) \
167 JS_BEGIN_MACRO \
168 CompileStatus status_ = (expr); \
169 if (status_ != Compile_Okay) \
170 return status_; \
171 JS_END_MACRO
173 CompileStatus
174 mjit::Compiler::performCompilation(JITScript **jitp)
176 JaegerSpew(JSpew_Scripts, "compiling script (file \"%s\") (line \"%d\") (length \"%d\")\n",
177 script->filename, script->lineno, script->length);
179 analyze::Script analysis;
180 PodZero(&analysis);
182 analysis.analyze(cx, script);
184 if (analysis.OOM())
185 return Compile_Error;
186 if (analysis.failed()) {
187 JaegerSpew(JSpew_Abort, "couldn't analyze bytecode; probably switchX or OOM\n");
188 return Compile_Abort;
191 this->analysis = &analysis;
193 uint32 nargs = fun ? fun->nargs : 0;
194 if (!frame.init(nargs) || !stubcc.init(nargs))
195 return Compile_Abort;
197 jumpMap = (Label *)cx->malloc(sizeof(Label) * script->length);
198 if (!jumpMap)
199 return Compile_Error;
200 #ifdef DEBUG
201 for (uint32 i = 0; i < script->length; i++)
202 jumpMap[i] = Label();
203 #endif
205 #ifdef JS_METHODJIT_SPEW
206 Profiler prof;
207 prof.start();
208 #endif
210 /* Initialize PC early so stub calls in the prologue can be fallible. */
211 PC = script->code;
213 #ifdef JS_METHODJIT
214 script->debugMode = debugMode;
215 #endif
217 for (uint32 i = 0; i < script->nClosedVars; i++)
218 frame.setClosedVar(script->getClosedVar(i));
220 CHECK_STATUS(generatePrologue());
221 CHECK_STATUS(generateMethod());
222 CHECK_STATUS(generateEpilogue());
223 CHECK_STATUS(finishThisUp(jitp));
225 #ifdef JS_METHODJIT_SPEW
226 prof.stop();
227 JaegerSpew(JSpew_Prof, "compilation took %d us\n", prof.time_us());
228 #endif
230 JaegerSpew(JSpew_Scripts, "successfully compiled (code \"%p\") (size \"%ld\")\n",
231 (*jitp)->code.m_code.executableAddress(), (*jitp)->code.m_size);
233 return Compile_Okay;
236 #undef CHECK_STATUS
238 mjit::Compiler::~Compiler()
240 cx->free(jumpMap);
243 CompileStatus JS_NEVER_INLINE
244 mjit::TryCompile(JSContext *cx, JSStackFrame *fp)
246 JS_ASSERT(cx->fp() == fp);
248 #if JS_HAS_SHARP_VARS
249 if (fp->script()->hasSharps)
250 return Compile_Abort;
251 #endif
253 // Ensure that constructors have at least one slot.
254 if (fp->isConstructing() && !fp->script()->nslots)
255 fp->script()->nslots++;
257 Compiler cc(cx, fp);
259 return cc.compile();
262 CompileStatus
263 mjit::Compiler::generatePrologue()
265 invokeLabel = masm.label();
268 * If there is no function, then this can only be called via JaegerShot(),
269 * which expects an existing frame to be initialized like the interpreter.
271 if (fun) {
272 Jump j = masm.jump();
275 * Entry point #2: The caller has partially constructed a frame, and
276 * either argc >= nargs or the arity check has corrected the frame.
278 invokeLabel = masm.label();
280 Label fastPath = masm.label();
282 /* Store this early on so slow paths can access it. */
283 masm.storePtr(ImmPtr(fun), Address(JSFrameReg, JSStackFrame::offsetOfExec()));
287 * Entry point #3: The caller has partially constructed a frame,
288 * but argc might be != nargs, so an arity check might be called.
290 * This loops back to entry point #2.
292 arityLabel = stubcc.masm.label();
293 Jump argMatch = stubcc.masm.branch32(Assembler::Equal, JSParamReg_Argc,
294 Imm32(fun->nargs));
295 stubcc.crossJump(argMatch, fastPath);
297 if (JSParamReg_Argc != Registers::ArgReg1)
298 stubcc.masm.move(JSParamReg_Argc, Registers::ArgReg1);
300 /* Slow path - call the arity check function. Returns new fp. */
301 stubcc.masm.storePtr(ImmPtr(fun), Address(JSFrameReg, JSStackFrame::offsetOfExec()));
302 stubcc.masm.storePtr(JSFrameReg, FrameAddress(offsetof(VMFrame, regs.fp)));
303 stubcc.call(stubs::FixupArity);
304 stubcc.masm.move(Registers::ReturnReg, JSFrameReg);
305 stubcc.crossJump(stubcc.masm.jump(), fastPath);
309 * Guard that there is enough stack space. Note we include the size of
310 * a second frame, to ensure we can create a frame from call sites.
312 masm.addPtr(Imm32((script->nslots + VALUES_PER_STACK_FRAME * 2) * sizeof(Value)),
313 JSFrameReg,
314 Registers::ReturnReg);
315 Jump stackCheck = masm.branchPtr(Assembler::AboveOrEqual, Registers::ReturnReg,
316 FrameAddress(offsetof(VMFrame, stackLimit)));
318 /* If the stack check fails... */
320 stubcc.linkExitDirect(stackCheck, stubcc.masm.label());
321 stubcc.call(stubs::HitStackQuota);
322 stubcc.crossJump(stubcc.masm.jump(), masm.label());
326 * Set locals to undefined, as in initCallFrameLatePrologue.
327 * Skip locals which aren't closed and are known to be defined before used,
328 * :FIXME: bug 604541: write undefined if we might be using the tracer, so it works.
330 for (uint32 i = 0; i < script->nfixed; i++) {
331 if (analysis->localHasUseBeforeDef(i) || addTraceHints) {
332 Address local(JSFrameReg, sizeof(JSStackFrame) + i * sizeof(Value));
333 masm.storeValue(UndefinedValue(), local);
337 /* Create the call object. */
338 if (fun->isHeavyweight()) {
339 prepareStubCall(Uses(0));
340 stubCall(stubs::GetCallObject);
343 j.linkTo(masm.label(), &masm);
345 if (analysis->usesScopeChain() && !fun->isHeavyweight()) {
347 * Load the scope chain into the frame if necessary. The scope chain
348 * is always set for global and eval frames, and will have been set by
349 * GetCallObject for heavyweight function frames.
351 RegisterID t0 = Registers::ReturnReg;
352 Jump hasScope = masm.branchTest32(Assembler::NonZero,
353 FrameFlagsAddress(), Imm32(JSFRAME_HAS_SCOPECHAIN));
354 masm.loadPayload(Address(JSFrameReg, JSStackFrame::offsetOfCallee(fun)), t0);
355 masm.loadPtr(Address(t0, offsetof(JSObject, parent)), t0);
356 masm.storePtr(t0, Address(JSFrameReg, JSStackFrame::offsetOfScopeChain()));
357 hasScope.linkTo(masm.label(), &masm);
361 if (isConstructing)
362 constructThis();
364 if (debugMode)
365 stubCall(stubs::EnterScript);
367 return Compile_Okay;
370 CompileStatus
371 mjit::Compiler::generateEpilogue()
373 return Compile_Okay;
376 CompileStatus
377 mjit::Compiler::finishThisUp(JITScript **jitp)
379 RETURN_IF_OOM(Compile_Error);
381 for (size_t i = 0; i < branchPatches.length(); i++) {
382 Label label = labelOf(branchPatches[i].pc);
383 branchPatches[i].jump.linkTo(label, &masm);
386 #ifdef JS_CPU_ARM
387 masm.forceFlushConstantPool();
388 stubcc.masm.forceFlushConstantPool();
389 #endif
390 JaegerSpew(JSpew_Insns, "## Fast code (masm) size = %u, Slow code (stubcc) size = %u.\n", masm.size(), stubcc.size());
392 size_t totalSize = masm.size() +
393 stubcc.size() +
394 doubleList.length() * sizeof(double);
396 JSC::ExecutablePool *execPool = getExecPool(totalSize);
397 if (!execPool)
398 return Compile_Abort;
400 uint8 *result = (uint8 *)execPool->alloc(totalSize);
401 JSC::ExecutableAllocator::makeWritable(result, totalSize);
402 masm.executableCopy(result);
403 stubcc.masm.executableCopy(result + masm.size());
405 JSC::LinkBuffer fullCode(result, totalSize);
406 JSC::LinkBuffer stubCode(result + masm.size(), stubcc.size());
408 size_t totalBytes = sizeof(JITScript) +
409 sizeof(void *) * script->length +
410 #if defined JS_MONOIC
411 sizeof(ic::MICInfo) * mics.length() +
412 sizeof(ic::CallICInfo) * callICs.length() +
413 sizeof(ic::EqualityICInfo) * equalityICs.length() +
414 sizeof(ic::TraceICInfo) * traceICs.length() +
415 #endif
416 #if defined JS_POLYIC
417 sizeof(ic::PICInfo) * pics.length() +
418 sizeof(ic::GetElementIC) * getElemICs.length() +
419 sizeof(ic::SetElementIC) * setElemICs.length() +
420 #endif
421 sizeof(CallSite) * callSites.length();
423 uint8 *cursor = (uint8 *)cx->calloc(totalBytes);
424 if (!cursor) {
425 execPool->release();
426 return Compile_Error;
429 JITScript *jit = (JITScript *)cursor;
430 cursor += sizeof(JITScript);
432 jit->code = JSC::MacroAssemblerCodeRef(result, execPool, masm.size() + stubcc.size());
433 jit->nCallSites = callSites.length();
434 jit->invokeEntry = result;
436 /* Build the pc -> ncode mapping. */
437 void **nmap = (void **)cursor;
438 cursor += sizeof(void *) * script->length;
440 for (size_t i = 0; i < script->length; i++) {
441 Label L = jumpMap[i];
442 analyze::Bytecode *opinfo = analysis->maybeCode(i);
443 if (opinfo && opinfo->safePoint) {
444 JS_ASSERT(L.isValid());
445 nmap[i] = (uint8 *)(result + masm.distanceOf(L));
449 if (fun) {
450 jit->arityCheckEntry = stubCode.locationOf(arityLabel).executableAddress();
451 jit->fastEntry = fullCode.locationOf(invokeLabel).executableAddress();
454 #if defined JS_MONOIC
455 jit->nMICs = mics.length();
456 if (mics.length()) {
457 jit->mics = (ic::MICInfo *)cursor;
458 cursor += sizeof(ic::MICInfo) * mics.length();
459 } else {
460 jit->mics = NULL;
463 if (ic::MICInfo *scriptMICs = jit->mics) {
464 for (size_t i = 0; i < mics.length(); i++) {
465 scriptMICs[i].kind = mics[i].kind;
466 scriptMICs[i].entry = fullCode.locationOf(mics[i].entry);
467 switch (mics[i].kind) {
468 case ic::MICInfo::GET:
469 case ic::MICInfo::SET:
470 scriptMICs[i].load = fullCode.locationOf(mics[i].load);
471 scriptMICs[i].shape = fullCode.locationOf(mics[i].shape);
472 scriptMICs[i].stubCall = stubCode.locationOf(mics[i].call);
473 scriptMICs[i].stubEntry = stubCode.locationOf(mics[i].stubEntry);
474 scriptMICs[i].u.name.typeConst = mics[i].u.name.typeConst;
475 scriptMICs[i].u.name.dataConst = mics[i].u.name.dataConst;
476 #if defined JS_PUNBOX64
477 scriptMICs[i].patchValueOffset = mics[i].patchValueOffset;
478 #endif
479 break;
480 default:
481 JS_NOT_REACHED("Bad MIC kind");
483 stubCode.patch(mics[i].addrLabel, &scriptMICs[i]);
487 jit->nCallICs = callICs.length();
488 if (callICs.length()) {
489 jit->callICs = (ic::CallICInfo *)cursor;
490 cursor += sizeof(ic::CallICInfo) * callICs.length();
491 } else {
492 jit->callICs = NULL;
495 if (ic::CallICInfo *cics = jit->callICs) {
496 for (size_t i = 0; i < callICs.length(); i++) {
497 cics[i].reset();
498 cics[i].funGuard = fullCode.locationOf(callICs[i].funGuard);
499 cics[i].funJump = fullCode.locationOf(callICs[i].funJump);
500 cics[i].slowPathStart = stubCode.locationOf(callICs[i].slowPathStart);
502 /* Compute the hot call offset. */
503 uint32 offset = fullCode.locationOf(callICs[i].hotJump) -
504 fullCode.locationOf(callICs[i].funGuard);
505 cics[i].hotJumpOffset = offset;
506 JS_ASSERT(cics[i].hotJumpOffset == offset);
508 /* Compute the join point offset. */
509 offset = fullCode.locationOf(callICs[i].joinPoint) -
510 fullCode.locationOf(callICs[i].funGuard);
511 cics[i].joinPointOffset = offset;
512 JS_ASSERT(cics[i].joinPointOffset == offset);
514 /* Compute the OOL call offset. */
515 offset = stubCode.locationOf(callICs[i].oolCall) -
516 stubCode.locationOf(callICs[i].slowPathStart);
517 cics[i].oolCallOffset = offset;
518 JS_ASSERT(cics[i].oolCallOffset == offset);
520 /* Compute the OOL jump offset. */
521 offset = stubCode.locationOf(callICs[i].oolJump) -
522 stubCode.locationOf(callICs[i].slowPathStart);
523 cics[i].oolJumpOffset = offset;
524 JS_ASSERT(cics[i].oolJumpOffset == offset);
526 /* Compute the slow join point offset. */
527 offset = stubCode.locationOf(callICs[i].slowJoinPoint) -
528 stubCode.locationOf(callICs[i].slowPathStart);
529 cics[i].slowJoinOffset = offset;
530 JS_ASSERT(cics[i].slowJoinOffset == offset);
532 /* Compute the join point offset for continuing on the hot path. */
533 offset = stubCode.locationOf(callICs[i].hotPathLabel) -
534 stubCode.locationOf(callICs[i].funGuard);
535 cics[i].hotPathOffset = offset;
536 JS_ASSERT(cics[i].hotPathOffset == offset);
538 cics[i].pc = callICs[i].pc;
539 cics[i].frameSize = callICs[i].frameSize;
540 cics[i].funObjReg = callICs[i].funObjReg;
541 cics[i].funPtrReg = callICs[i].funPtrReg;
542 stubCode.patch(callICs[i].addrLabel1, &cics[i]);
543 stubCode.patch(callICs[i].addrLabel2, &cics[i]);
547 jit->nEqualityICs = equalityICs.length();
548 if (equalityICs.length()) {
549 jit->equalityICs = (ic::EqualityICInfo *)cursor;
550 cursor += sizeof(ic::EqualityICInfo) * equalityICs.length();
551 } else {
552 jit->equalityICs = NULL;
555 if (ic::EqualityICInfo *scriptEICs = jit->equalityICs) {
556 for (size_t i = 0; i < equalityICs.length(); i++) {
557 uint32 offs = uint32(equalityICs[i].jumpTarget - script->code);
558 JS_ASSERT(jumpMap[offs].isValid());
559 scriptEICs[i].target = fullCode.locationOf(jumpMap[offs]);
560 scriptEICs[i].stubEntry = stubCode.locationOf(equalityICs[i].stubEntry);
561 scriptEICs[i].stubCall = stubCode.locationOf(equalityICs[i].stubCall);
562 scriptEICs[i].stub = equalityICs[i].stub;
563 scriptEICs[i].lvr = equalityICs[i].lvr;
564 scriptEICs[i].rvr = equalityICs[i].rvr;
565 scriptEICs[i].tempReg = equalityICs[i].tempReg;
566 scriptEICs[i].cond = equalityICs[i].cond;
567 if (equalityICs[i].jumpToStub.isSet())
568 scriptEICs[i].jumpToStub = fullCode.locationOf(equalityICs[i].jumpToStub.get());
569 scriptEICs[i].fallThrough = fullCode.locationOf(equalityICs[i].fallThrough);
571 stubCode.patch(equalityICs[i].addrLabel, &scriptEICs[i]);
575 jit->nTraceICs = traceICs.length();
576 if (traceICs.length()) {
577 jit->traceICs = (ic::TraceICInfo *)cursor;
578 cursor += sizeof(ic::TraceICInfo) * traceICs.length();
579 } else {
580 jit->traceICs = NULL;
583 if (ic::TraceICInfo *scriptTICs = jit->traceICs) {
584 for (size_t i = 0; i < traceICs.length(); i++) {
585 if (!traceICs[i].initialized)
586 continue;
588 uint32 offs = uint32(traceICs[i].jumpTarget - script->code);
589 JS_ASSERT(jumpMap[offs].isValid());
590 scriptTICs[i].traceHint = fullCode.locationOf(traceICs[i].traceHint);
591 scriptTICs[i].jumpTarget = fullCode.locationOf(jumpMap[offs]);
592 scriptTICs[i].stubEntry = stubCode.locationOf(traceICs[i].stubEntry);
593 scriptTICs[i].traceData = NULL;
594 #ifdef DEBUG
595 scriptTICs[i].jumpTargetPC = traceICs[i].jumpTarget;
596 #endif
597 scriptTICs[i].hasSlowTraceHint = traceICs[i].slowTraceHint.isSet();
598 if (traceICs[i].slowTraceHint.isSet())
599 scriptTICs[i].slowTraceHint = stubCode.locationOf(traceICs[i].slowTraceHint.get());
601 stubCode.patch(traceICs[i].addrLabel, &scriptTICs[i]);
604 #endif /* JS_MONOIC */
606 for (size_t i = 0; i < callPatches.length(); i++) {
607 CallPatchInfo &patch = callPatches[i];
609 if (patch.hasFastNcode)
610 fullCode.patch(patch.fastNcodePatch, fullCode.locationOf(patch.joinPoint));
611 if (patch.hasSlowNcode)
612 stubCode.patch(patch.slowNcodePatch, fullCode.locationOf(patch.joinPoint));
615 #if defined JS_POLYIC
616 jit->nGetElems = getElemICs.length();
617 if (getElemICs.length()) {
618 jit->getElems = (ic::GetElementIC *)cursor;
619 cursor += sizeof(ic::GetElementIC) * getElemICs.length();
620 } else {
621 jit->getElems = NULL;
624 for (size_t i = 0; i < getElemICs.length(); i++) {
625 ic::GetElementIC &to = jit->getElems[i];
626 GetElementICInfo &from = getElemICs[i];
628 new (&to) ic::GetElementIC();
629 from.copyTo(to, fullCode, stubCode);
631 to.typeReg = from.typeReg;
632 to.objReg = from.objReg;
633 to.idRemat = from.id;
635 if (from.typeGuard.isSet()) {
636 int inlineTypeGuard = fullCode.locationOf(from.typeGuard.get()) -
637 fullCode.locationOf(from.fastPathStart);
638 to.inlineTypeGuard = inlineTypeGuard;
639 JS_ASSERT(to.inlineTypeGuard == inlineTypeGuard);
641 int inlineClaspGuard = fullCode.locationOf(from.claspGuard) -
642 fullCode.locationOf(from.fastPathStart);
643 to.inlineClaspGuard = inlineClaspGuard;
644 JS_ASSERT(to.inlineClaspGuard == inlineClaspGuard);
646 stubCode.patch(from.paramAddr, &to);
649 jit->nSetElems = setElemICs.length();
650 if (setElemICs.length()) {
651 jit->setElems = (ic::SetElementIC *)cursor;
652 cursor += sizeof(ic::SetElementIC) * setElemICs.length();
653 } else {
654 jit->setElems = NULL;
657 for (size_t i = 0; i < setElemICs.length(); i++) {
658 ic::SetElementIC &to = jit->setElems[i];
659 SetElementICInfo &from = setElemICs[i];
661 new (&to) ic::SetElementIC();
662 from.copyTo(to, fullCode, stubCode);
664 to.strictMode = script->strictModeCode;
665 to.vr = from.vr;
666 to.objReg = from.objReg;
667 to.objRemat = from.objRemat.toInt32();
668 JS_ASSERT(to.objRemat == from.objRemat.toInt32());
670 to.hasConstantKey = from.key.isConstant();
671 if (from.key.isConstant())
672 to.keyValue = from.key.index();
673 else
674 to.keyReg = from.key.reg();
676 int inlineClaspGuard = fullCode.locationOf(from.claspGuard) -
677 fullCode.locationOf(from.fastPathStart);
678 to.inlineClaspGuard = inlineClaspGuard;
679 JS_ASSERT(to.inlineClaspGuard == inlineClaspGuard);
681 int inlineHoleGuard = fullCode.locationOf(from.holeGuard) -
682 fullCode.locationOf(from.fastPathStart);
683 to.inlineHoleGuard = inlineHoleGuard;
684 JS_ASSERT(to.inlineHoleGuard == inlineHoleGuard);
686 stubCode.patch(from.paramAddr, &to);
689 jit->nPICs = pics.length();
690 if (pics.length()) {
691 jit->pics = (ic::PICInfo *)cursor;
692 cursor += sizeof(ic::PICInfo) * pics.length();
693 } else {
694 jit->pics = NULL;
697 if (ic::PICInfo *scriptPICs = jit->pics) {
698 for (size_t i = 0; i < pics.length(); i++) {
699 new (&scriptPICs[i]) ic::PICInfo();
700 pics[i].copyTo(scriptPICs[i], fullCode, stubCode);
701 pics[i].copySimpleMembersTo(scriptPICs[i]);
703 scriptPICs[i].shapeGuard = masm.distanceOf(pics[i].shapeGuard) -
704 masm.distanceOf(pics[i].fastPathStart);
705 JS_ASSERT(scriptPICs[i].shapeGuard == masm.distanceOf(pics[i].shapeGuard) -
706 masm.distanceOf(pics[i].fastPathStart));
707 scriptPICs[i].shapeRegHasBaseShape = true;
708 scriptPICs[i].pc = pics[i].pc;
710 # if defined JS_CPU_X64
711 memcpy(&scriptPICs[i].labels, &pics[i].labels, sizeof(PICLabels));
712 # endif
714 if (pics[i].kind == ic::PICInfo::SET ||
715 pics[i].kind == ic::PICInfo::SETMETHOD) {
716 scriptPICs[i].u.vr = pics[i].vr;
717 } else if (pics[i].kind != ic::PICInfo::NAME) {
718 if (pics[i].hasTypeCheck) {
719 int32 distance = stubcc.masm.distanceOf(pics[i].typeCheck) -
720 stubcc.masm.distanceOf(pics[i].slowPathStart);
721 JS_ASSERT(distance <= 0);
722 scriptPICs[i].u.get.typeCheckOffset = distance;
725 stubCode.patch(pics[i].paramAddr, &scriptPICs[i]);
728 #endif /* JS_POLYIC */
730 /* Link fast and slow paths together. */
731 stubcc.fixCrossJumps(result, masm.size(), masm.size() + stubcc.size());
733 /* Patch all double references. */
734 size_t doubleOffset = masm.size() + stubcc.size();
735 double *doubleVec = (double *)(result + doubleOffset);
736 for (size_t i = 0; i < doubleList.length(); i++) {
737 DoublePatch &patch = doubleList[i];
738 doubleVec[i] = patch.d;
739 if (patch.ool)
740 stubCode.patch(patch.label, &doubleVec[i]);
741 else
742 fullCode.patch(patch.label, &doubleVec[i]);
745 /* Patch all outgoing calls. */
746 masm.finalize(fullCode);
747 stubcc.masm.finalize(stubCode);
749 JSC::ExecutableAllocator::makeExecutable(result, masm.size() + stubcc.size());
750 JSC::ExecutableAllocator::cacheFlush(result, masm.size() + stubcc.size());
752 /* Build the table of call sites. */
753 jit->nCallSites = callSites.length();
754 if (callSites.length()) {
755 CallSite *callSiteList = (CallSite *)cursor;
756 cursor += sizeof(CallSite) * callSites.length();
758 for (size_t i = 0; i < callSites.length(); i++) {
759 if (callSites[i].stub)
760 callSiteList[i].codeOffset = masm.size() + stubcc.masm.distanceOf(callSites[i].location);
761 else
762 callSiteList[i].codeOffset = masm.distanceOf(callSites[i].location);
763 callSiteList[i].pcOffset = callSites[i].pc - script->code;
764 callSiteList[i].id = callSites[i].id;
766 jit->callSites = callSiteList;
767 } else {
768 jit->callSites = NULL;
771 JS_ASSERT(size_t(cursor - (uint8*)jit) == totalBytes);
773 jit->nmap = nmap;
774 *jitp = jit;
776 return Compile_Okay;
779 #ifdef DEBUG
780 #define SPEW_OPCODE() \
781 JS_BEGIN_MACRO \
782 if (IsJaegerSpewChannelActive(JSpew_JSOps)) { \
783 JaegerSpew(JSpew_JSOps, " %2d ", frame.stackDepth()); \
784 js_Disassemble1(cx, script, PC, PC - script->code, \
785 JS_TRUE, stdout); \
787 JS_END_MACRO;
788 #else
789 #define SPEW_OPCODE()
790 #endif /* DEBUG */
792 #define BEGIN_CASE(name) case name:
793 #define END_CASE(name) \
794 JS_BEGIN_MACRO \
795 PC += name##_LENGTH; \
796 JS_END_MACRO; \
797 break;
799 CompileStatus
800 mjit::Compiler::generateMethod()
802 mjit::AutoScriptRetrapper trapper(cx, script);
804 for (;;) {
805 JSOp op = JSOp(*PC);
806 bool trap = (op == JSOP_TRAP);
808 if (trap) {
809 if (!trapper.untrap(PC))
810 return Compile_Error;
811 op = JSOp(*PC);
814 analyze::Bytecode *opinfo = analysis->maybeCode(PC);
816 if (!opinfo) {
817 if (op == JSOP_STOP)
818 break;
819 if (js_CodeSpec[op].length != -1)
820 PC += js_CodeSpec[op].length;
821 else
822 PC += js_GetVariableBytecodeLength(PC);
823 continue;
826 frame.setInTryBlock(opinfo->inTryBlock);
827 if (opinfo->jumpTarget || trap) {
828 frame.syncAndForgetEverything(opinfo->stackDepth);
829 opinfo->safePoint = true;
831 jumpMap[uint32(PC - script->code)] = masm.label();
833 SPEW_OPCODE();
834 JS_ASSERT(frame.stackDepth() == opinfo->stackDepth);
836 if (trap) {
837 prepareStubCall(Uses(0));
838 masm.move(ImmPtr(PC), Registers::ArgReg1);
839 stubCall(stubs::Trap);
841 #if defined(JS_NO_FASTCALL) && defined(JS_CPU_X86)
842 // In case of no fast call, when we change the return address,
843 // we need to make sure add esp by 8. For normal call, we need
844 // to make sure the esp is not changed.
845 else {
846 masm.subPtr(Imm32(8), Registers::StackPointer);
847 masm.callLabel = masm.label();
848 masm.addPtr(Imm32(8), Registers::StackPointer);
850 #elif defined(_WIN64)
851 // In case of Win64 ABI, stub caller make 32-bytes spcae on stack
852 else {
853 masm.subPtr(Imm32(32), Registers::StackPointer);
854 masm.callLabel = masm.label();
855 masm.addPtr(Imm32(32), Registers::StackPointer);
857 #endif
858 ADD_CALLSITE(false);
860 /**********************
861 * BEGIN COMPILER OPS *
862 **********************/
864 switch (op) {
865 BEGIN_CASE(JSOP_NOP)
866 END_CASE(JSOP_NOP)
868 BEGIN_CASE(JSOP_PUSH)
869 frame.push(UndefinedValue());
870 END_CASE(JSOP_PUSH)
872 BEGIN_CASE(JSOP_POPV)
873 BEGIN_CASE(JSOP_SETRVAL)
875 RegisterID reg = frame.allocReg();
876 masm.load32(FrameFlagsAddress(), reg);
877 masm.or32(Imm32(JSFRAME_HAS_RVAL), reg);
878 masm.store32(reg, FrameFlagsAddress());
879 frame.freeReg(reg);
881 FrameEntry *fe = frame.peek(-1);
882 frame.storeTo(fe, Address(JSFrameReg, JSStackFrame::offsetOfReturnValue()), true);
883 frame.pop();
885 END_CASE(JSOP_POPV)
887 BEGIN_CASE(JSOP_RETURN)
888 emitReturn(frame.peek(-1));
889 END_CASE(JSOP_RETURN)
891 BEGIN_CASE(JSOP_GOTO)
893 /* :XXX: this isn't really necessary if we follow the branch. */
894 frame.syncAndForgetEverything();
895 Jump j = masm.jump();
896 if (!jumpAndTrace(j, PC + GET_JUMP_OFFSET(PC)))
897 return Compile_Error;
899 END_CASE(JSOP_GOTO)
901 BEGIN_CASE(JSOP_IFEQ)
902 BEGIN_CASE(JSOP_IFNE)
903 if (!jsop_ifneq(op, PC + GET_JUMP_OFFSET(PC)))
904 return Compile_Error;
905 END_CASE(JSOP_IFNE)
907 BEGIN_CASE(JSOP_ARGUMENTS)
909 * For calls of the form 'f.apply(x, arguments)' we can avoid
910 * creating an args object by having ic::SplatApplyArgs pull
911 * directly from the stack. To do this, we speculate here that
912 * 'apply' actually refers to js_fun_apply. If this is not true,
913 * the slow path in JSOP_FUNAPPLY will create the args object.
915 if (canUseApplyTricks())
916 applyTricks = LazyArgsObj;
917 else
918 jsop_arguments();
919 frame.pushSynced();
920 END_CASE(JSOP_ARGUMENTS)
922 BEGIN_CASE(JSOP_FORARG)
923 iterNext();
924 jsop_setarg(GET_SLOTNO(PC), true);
925 frame.pop();
926 END_CASE(JSOP_FORARG)
928 BEGIN_CASE(JSOP_FORLOCAL)
929 iterNext();
930 frame.storeLocal(GET_SLOTNO(PC), true);
931 frame.pop();
932 END_CASE(JSOP_FORLOCAL)
934 BEGIN_CASE(JSOP_DUP)
935 frame.dup();
936 END_CASE(JSOP_DUP)
938 BEGIN_CASE(JSOP_DUP2)
939 frame.dup2();
940 END_CASE(JSOP_DUP2)
942 BEGIN_CASE(JSOP_BITOR)
943 BEGIN_CASE(JSOP_BITXOR)
944 BEGIN_CASE(JSOP_BITAND)
945 jsop_bitop(op);
946 END_CASE(JSOP_BITAND)
948 BEGIN_CASE(JSOP_LT)
949 BEGIN_CASE(JSOP_LE)
950 BEGIN_CASE(JSOP_GT)
951 BEGIN_CASE(JSOP_GE)
952 BEGIN_CASE(JSOP_EQ)
953 BEGIN_CASE(JSOP_NE)
955 /* Detect fusions. */
956 jsbytecode *next = &PC[JSOP_GE_LENGTH];
957 JSOp fused = JSOp(*next);
958 if ((fused != JSOP_IFEQ && fused != JSOP_IFNE) || analysis->jumpTarget(next))
959 fused = JSOP_NOP;
961 /* Get jump target, if any. */
962 jsbytecode *target = NULL;
963 if (fused != JSOP_NOP)
964 target = next + GET_JUMP_OFFSET(next);
966 BoolStub stub = NULL;
967 switch (op) {
968 case JSOP_LT:
969 stub = stubs::LessThan;
970 break;
971 case JSOP_LE:
972 stub = stubs::LessEqual;
973 break;
974 case JSOP_GT:
975 stub = stubs::GreaterThan;
976 break;
977 case JSOP_GE:
978 stub = stubs::GreaterEqual;
979 break;
980 case JSOP_EQ:
981 stub = stubs::Equal;
982 break;
983 case JSOP_NE:
984 stub = stubs::NotEqual;
985 break;
986 default:
987 JS_NOT_REACHED("WAT");
988 break;
991 FrameEntry *rhs = frame.peek(-1);
992 FrameEntry *lhs = frame.peek(-2);
994 /* Check for easy cases that the parser does not constant fold. */
995 if (lhs->isConstant() && rhs->isConstant()) {
996 /* Primitives can be trivially constant folded. */
997 const Value &lv = lhs->getValue();
998 const Value &rv = rhs->getValue();
1000 if (lv.isPrimitive() && rv.isPrimitive()) {
1001 bool result = compareTwoValues(cx, op, lv, rv);
1003 frame.pop();
1004 frame.pop();
1006 if (!target) {
1007 frame.push(Value(BooleanValue(result)));
1008 } else {
1009 if (fused == JSOP_IFEQ)
1010 result = !result;
1012 /* Branch is never taken, don't bother doing anything. */
1013 if (result) {
1014 frame.syncAndForgetEverything();
1015 Jump j = masm.jump();
1016 if (!jumpAndTrace(j, target))
1017 return Compile_Error;
1020 } else {
1021 if (!emitStubCmpOp(stub, target, fused))
1022 return Compile_Error;
1024 } else {
1025 /* Anything else should go through the fast path generator. */
1026 if (!jsop_relational(op, stub, target, fused))
1027 return Compile_Error;
1030 /* Advance PC manually. */
1031 JS_STATIC_ASSERT(JSOP_LT_LENGTH == JSOP_GE_LENGTH);
1032 JS_STATIC_ASSERT(JSOP_LE_LENGTH == JSOP_GE_LENGTH);
1033 JS_STATIC_ASSERT(JSOP_GT_LENGTH == JSOP_GE_LENGTH);
1034 JS_STATIC_ASSERT(JSOP_EQ_LENGTH == JSOP_GE_LENGTH);
1035 JS_STATIC_ASSERT(JSOP_NE_LENGTH == JSOP_GE_LENGTH);
1037 PC += JSOP_GE_LENGTH;
1038 if (fused != JSOP_NOP) {
1039 SPEW_OPCODE();
1040 PC += JSOP_IFNE_LENGTH;
1042 break;
1044 END_CASE(JSOP_GE)
1046 BEGIN_CASE(JSOP_LSH)
1047 jsop_bitop(op);
1048 END_CASE(JSOP_LSH)
1050 BEGIN_CASE(JSOP_RSH)
1051 jsop_rsh();
1052 END_CASE(JSOP_RSH)
1054 BEGIN_CASE(JSOP_URSH)
1055 jsop_bitop(op);
1056 END_CASE(JSOP_URSH)
1058 BEGIN_CASE(JSOP_ADD)
1059 jsop_binary(op, stubs::Add);
1060 END_CASE(JSOP_ADD)
1062 BEGIN_CASE(JSOP_SUB)
1063 jsop_binary(op, stubs::Sub);
1064 END_CASE(JSOP_SUB)
1066 BEGIN_CASE(JSOP_MUL)
1067 jsop_binary(op, stubs::Mul);
1068 END_CASE(JSOP_MUL)
1070 BEGIN_CASE(JSOP_DIV)
1071 jsop_binary(op, stubs::Div);
1072 END_CASE(JSOP_DIV)
1074 BEGIN_CASE(JSOP_MOD)
1075 jsop_mod();
1076 END_CASE(JSOP_MOD)
1078 BEGIN_CASE(JSOP_NOT)
1079 jsop_not();
1080 END_CASE(JSOP_NOT)
1082 BEGIN_CASE(JSOP_BITNOT)
1084 FrameEntry *top = frame.peek(-1);
1085 if (top->isConstant() && top->getValue().isPrimitive()) {
1086 int32_t i;
1087 ValueToECMAInt32(cx, top->getValue(), &i);
1088 i = ~i;
1089 frame.pop();
1090 frame.push(Int32Value(i));
1091 } else {
1092 jsop_bitnot();
1095 END_CASE(JSOP_BITNOT)
1097 BEGIN_CASE(JSOP_NEG)
1099 FrameEntry *top = frame.peek(-1);
1100 if (top->isConstant() && top->getValue().isPrimitive()) {
1101 double d;
1102 ValueToNumber(cx, top->getValue(), &d);
1103 d = -d;
1104 frame.pop();
1105 frame.push(NumberValue(d));
1106 } else {
1107 jsop_neg();
1110 END_CASE(JSOP_NEG)
1112 BEGIN_CASE(JSOP_POS)
1113 jsop_pos();
1114 END_CASE(JSOP_POS)
1116 BEGIN_CASE(JSOP_DELNAME)
1118 uint32 index = fullAtomIndex(PC);
1119 JSAtom *atom = script->getAtom(index);
1121 prepareStubCall(Uses(0));
1122 masm.move(ImmPtr(atom), Registers::ArgReg1);
1123 stubCall(stubs::DelName);
1124 frame.pushSynced();
1126 END_CASE(JSOP_DELNAME)
1128 BEGIN_CASE(JSOP_DELPROP)
1130 uint32 index = fullAtomIndex(PC);
1131 JSAtom *atom = script->getAtom(index);
1133 prepareStubCall(Uses(1));
1134 masm.move(ImmPtr(atom), Registers::ArgReg1);
1135 stubCall(STRICT_VARIANT(stubs::DelProp));
1136 frame.pop();
1137 frame.pushSynced();
1139 END_CASE(JSOP_DELPROP)
1141 BEGIN_CASE(JSOP_DELELEM)
1142 prepareStubCall(Uses(2));
1143 stubCall(STRICT_VARIANT(stubs::DelElem));
1144 frame.popn(2);
1145 frame.pushSynced();
1146 END_CASE(JSOP_DELELEM)
1148 BEGIN_CASE(JSOP_TYPEOF)
1149 BEGIN_CASE(JSOP_TYPEOFEXPR)
1150 jsop_typeof();
1151 END_CASE(JSOP_TYPEOF)
1153 BEGIN_CASE(JSOP_VOID)
1154 frame.pop();
1155 frame.push(UndefinedValue());
1156 END_CASE(JSOP_VOID)
1158 BEGIN_CASE(JSOP_INCNAME)
1159 if (!jsop_nameinc(op, STRICT_VARIANT(stubs::IncName), fullAtomIndex(PC)))
1160 return Compile_Error;
1161 break;
1162 END_CASE(JSOP_INCNAME)
1164 BEGIN_CASE(JSOP_INCGNAME)
1165 jsop_gnameinc(op, STRICT_VARIANT(stubs::IncGlobalName), fullAtomIndex(PC));
1166 break;
1167 END_CASE(JSOP_INCGNAME)
1169 BEGIN_CASE(JSOP_INCPROP)
1170 if (!jsop_propinc(op, STRICT_VARIANT(stubs::IncProp), fullAtomIndex(PC)))
1171 return Compile_Error;
1172 break;
1173 END_CASE(JSOP_INCPROP)
1175 BEGIN_CASE(JSOP_INCELEM)
1176 jsop_eleminc(op, STRICT_VARIANT(stubs::IncElem));
1177 END_CASE(JSOP_INCELEM)
1179 BEGIN_CASE(JSOP_DECNAME)
1180 if (!jsop_nameinc(op, STRICT_VARIANT(stubs::DecName), fullAtomIndex(PC)))
1181 return Compile_Error;
1182 break;
1183 END_CASE(JSOP_DECNAME)
1185 BEGIN_CASE(JSOP_DECGNAME)
1186 jsop_gnameinc(op, STRICT_VARIANT(stubs::DecGlobalName), fullAtomIndex(PC));
1187 break;
1188 END_CASE(JSOP_DECGNAME)
1190 BEGIN_CASE(JSOP_DECPROP)
1191 if (!jsop_propinc(op, STRICT_VARIANT(stubs::DecProp), fullAtomIndex(PC)))
1192 return Compile_Error;
1193 break;
1194 END_CASE(JSOP_DECPROP)
1196 BEGIN_CASE(JSOP_DECELEM)
1197 jsop_eleminc(op, STRICT_VARIANT(stubs::DecElem));
1198 END_CASE(JSOP_DECELEM)
1200 BEGIN_CASE(JSOP_NAMEINC)
1201 if (!jsop_nameinc(op, STRICT_VARIANT(stubs::NameInc), fullAtomIndex(PC)))
1202 return Compile_Error;
1203 break;
1204 END_CASE(JSOP_NAMEINC)
1206 BEGIN_CASE(JSOP_GNAMEINC)
1207 jsop_gnameinc(op, STRICT_VARIANT(stubs::GlobalNameInc), fullAtomIndex(PC));
1208 break;
1209 END_CASE(JSOP_GNAMEINC)
1211 BEGIN_CASE(JSOP_PROPINC)
1212 if (!jsop_propinc(op, STRICT_VARIANT(stubs::PropInc), fullAtomIndex(PC)))
1213 return Compile_Error;
1214 break;
1215 END_CASE(JSOP_PROPINC)
1217 BEGIN_CASE(JSOP_ELEMINC)
1218 jsop_eleminc(op, STRICT_VARIANT(stubs::ElemInc));
1219 END_CASE(JSOP_ELEMINC)
1221 BEGIN_CASE(JSOP_NAMEDEC)
1222 if (!jsop_nameinc(op, STRICT_VARIANT(stubs::NameDec), fullAtomIndex(PC)))
1223 return Compile_Error;
1224 break;
1225 END_CASE(JSOP_NAMEDEC)
1227 BEGIN_CASE(JSOP_GNAMEDEC)
1228 jsop_gnameinc(op, STRICT_VARIANT(stubs::GlobalNameDec), fullAtomIndex(PC));
1229 break;
1230 END_CASE(JSOP_GNAMEDEC)
1232 BEGIN_CASE(JSOP_PROPDEC)
1233 if (!jsop_propinc(op, STRICT_VARIANT(stubs::PropDec), fullAtomIndex(PC)))
1234 return Compile_Error;
1235 break;
1236 END_CASE(JSOP_PROPDEC)
1238 BEGIN_CASE(JSOP_ELEMDEC)
1239 jsop_eleminc(op, STRICT_VARIANT(stubs::ElemDec));
1240 END_CASE(JSOP_ELEMDEC)
1242 BEGIN_CASE(JSOP_GETTHISPROP)
1243 /* Push thisv onto stack. */
1244 jsop_this();
1245 if (!jsop_getprop(script->getAtom(fullAtomIndex(PC))))
1246 return Compile_Error;
1247 END_CASE(JSOP_GETTHISPROP);
1249 BEGIN_CASE(JSOP_GETARGPROP)
1250 /* Push arg onto stack. */
1251 jsop_getarg(GET_SLOTNO(PC));
1252 if (!jsop_getprop(script->getAtom(fullAtomIndex(&PC[ARGNO_LEN]))))
1253 return Compile_Error;
1254 END_CASE(JSOP_GETARGPROP)
1256 BEGIN_CASE(JSOP_GETLOCALPROP)
1257 frame.pushLocal(GET_SLOTNO(PC));
1258 if (!jsop_getprop(script->getAtom(fullAtomIndex(&PC[SLOTNO_LEN]))))
1259 return Compile_Error;
1260 END_CASE(JSOP_GETLOCALPROP)
1262 BEGIN_CASE(JSOP_GETPROP)
1263 if (!jsop_getprop(script->getAtom(fullAtomIndex(PC))))
1264 return Compile_Error;
1265 END_CASE(JSOP_GETPROP)
1267 BEGIN_CASE(JSOP_LENGTH)
1268 if (!jsop_length())
1269 return Compile_Error;
1270 END_CASE(JSOP_LENGTH)
1272 BEGIN_CASE(JSOP_GETELEM)
1273 if (!jsop_getelem(false))
1274 return Compile_Error;
1275 END_CASE(JSOP_GETELEM)
1277 BEGIN_CASE(JSOP_SETELEM)
1278 if (!jsop_setelem())
1279 return Compile_Error;
1280 END_CASE(JSOP_SETELEM);
1282 BEGIN_CASE(JSOP_CALLNAME)
1283 prepareStubCall(Uses(0));
1284 masm.move(Imm32(fullAtomIndex(PC)), Registers::ArgReg1);
1285 stubCall(stubs::CallName);
1286 frame.pushSynced();
1287 frame.pushSynced();
1288 END_CASE(JSOP_CALLNAME)
1290 BEGIN_CASE(JSOP_EVAL)
1292 JaegerSpew(JSpew_Insns, " --- EVAL --- \n");
1293 emitEval(GET_ARGC(PC));
1294 JaegerSpew(JSpew_Insns, " --- END EVAL --- \n");
1296 END_CASE(JSOP_EVAL)
1298 BEGIN_CASE(JSOP_CALL)
1299 BEGIN_CASE(JSOP_FUNAPPLY)
1300 BEGIN_CASE(JSOP_FUNCALL)
1302 JaegerSpew(JSpew_Insns, " --- SCRIPTED CALL --- \n");
1303 inlineCallHelper(GET_ARGC(PC), false);
1304 JaegerSpew(JSpew_Insns, " --- END SCRIPTED CALL --- \n");
1306 END_CASE(JSOP_CALL)
1308 BEGIN_CASE(JSOP_NAME)
1309 jsop_name(script->getAtom(fullAtomIndex(PC)));
1310 END_CASE(JSOP_NAME)
1312 BEGIN_CASE(JSOP_DOUBLE)
1314 uint32 index = fullAtomIndex(PC);
1315 double d = script->getConst(index).toDouble();
1316 frame.push(Value(DoubleValue(d)));
1318 END_CASE(JSOP_DOUBLE)
1320 BEGIN_CASE(JSOP_STRING)
1322 JSAtom *atom = script->getAtom(fullAtomIndex(PC));
1323 JSString *str = ATOM_TO_STRING(atom);
1324 frame.push(Value(StringValue(str)));
1326 END_CASE(JSOP_STRING)
1328 BEGIN_CASE(JSOP_ZERO)
1329 frame.push(Valueify(JSVAL_ZERO));
1330 END_CASE(JSOP_ZERO)
1332 BEGIN_CASE(JSOP_ONE)
1333 frame.push(Valueify(JSVAL_ONE));
1334 END_CASE(JSOP_ONE)
1336 BEGIN_CASE(JSOP_NULL)
1337 frame.push(NullValue());
1338 END_CASE(JSOP_NULL)
1340 BEGIN_CASE(JSOP_THIS)
1341 jsop_this();
1342 END_CASE(JSOP_THIS)
1344 BEGIN_CASE(JSOP_FALSE)
1345 frame.push(Value(BooleanValue(false)));
1346 END_CASE(JSOP_FALSE)
1348 BEGIN_CASE(JSOP_TRUE)
1349 frame.push(Value(BooleanValue(true)));
1350 END_CASE(JSOP_TRUE)
1352 BEGIN_CASE(JSOP_OR)
1353 BEGIN_CASE(JSOP_AND)
1354 if (!jsop_andor(op, PC + GET_JUMP_OFFSET(PC)))
1355 return Compile_Error;
1356 END_CASE(JSOP_AND)
1358 BEGIN_CASE(JSOP_TABLESWITCH)
1359 frame.syncAndForgetEverything();
1360 masm.move(ImmPtr(PC), Registers::ArgReg1);
1362 /* prepareStubCall() is not needed due to syncAndForgetEverything() */
1363 stubCall(stubs::TableSwitch);
1364 frame.pop();
1366 masm.jump(Registers::ReturnReg);
1367 PC += js_GetVariableBytecodeLength(PC);
1368 break;
1369 END_CASE(JSOP_TABLESWITCH)
1371 BEGIN_CASE(JSOP_LOOKUPSWITCH)
1372 frame.syncAndForgetEverything();
1373 masm.move(ImmPtr(PC), Registers::ArgReg1);
1375 /* prepareStubCall() is not needed due to syncAndForgetEverything() */
1376 stubCall(stubs::LookupSwitch);
1377 frame.pop();
1379 masm.jump(Registers::ReturnReg);
1380 PC += js_GetVariableBytecodeLength(PC);
1381 break;
1382 END_CASE(JSOP_LOOKUPSWITCH)
1384 BEGIN_CASE(JSOP_STRICTEQ)
1385 jsop_stricteq(op);
1386 END_CASE(JSOP_STRICTEQ)
1388 BEGIN_CASE(JSOP_STRICTNE)
1389 jsop_stricteq(op);
1390 END_CASE(JSOP_STRICTNE)
1392 BEGIN_CASE(JSOP_ITER)
1393 # if defined JS_CPU_X64
1394 prepareStubCall(Uses(1));
1395 masm.move(Imm32(PC[1]), Registers::ArgReg1);
1396 stubCall(stubs::Iter);
1397 frame.pop();
1398 frame.pushSynced();
1399 #else
1400 iter(PC[1]);
1401 #endif
1402 END_CASE(JSOP_ITER)
1404 BEGIN_CASE(JSOP_MOREITER)
1405 /* This MUST be fused with IFNE or IFNEX. */
1406 iterMore();
1407 break;
1408 END_CASE(JSOP_MOREITER)
1410 BEGIN_CASE(JSOP_ENDITER)
1411 # if defined JS_CPU_X64
1412 prepareStubCall(Uses(1));
1413 stubCall(stubs::EndIter);
1414 frame.pop();
1415 #else
1416 iterEnd();
1417 #endif
1418 END_CASE(JSOP_ENDITER)
1420 BEGIN_CASE(JSOP_POP)
1421 frame.pop();
1422 END_CASE(JSOP_POP)
1424 BEGIN_CASE(JSOP_NEW)
1426 JaegerSpew(JSpew_Insns, " --- NEW OPERATOR --- \n");
1427 inlineCallHelper(GET_ARGC(PC), true);
1428 JaegerSpew(JSpew_Insns, " --- END NEW OPERATOR --- \n");
1430 END_CASE(JSOP_NEW)
1432 BEGIN_CASE(JSOP_GETARG)
1433 BEGIN_CASE(JSOP_CALLARG)
1435 jsop_getarg(GET_SLOTNO(PC));
1436 if (op == JSOP_CALLARG)
1437 frame.push(UndefinedValue());
1439 END_CASE(JSOP_GETARG)
1441 BEGIN_CASE(JSOP_BINDGNAME)
1442 jsop_bindgname();
1443 END_CASE(JSOP_BINDGNAME)
1445 BEGIN_CASE(JSOP_SETARG)
1446 jsop_setarg(GET_SLOTNO(PC), JSOp(PC[JSOP_SETARG_LENGTH]) == JSOP_POP);
1447 END_CASE(JSOP_SETARG)
1449 BEGIN_CASE(JSOP_GETLOCAL)
1451 uint32 slot = GET_SLOTNO(PC);
1452 frame.pushLocal(slot);
1454 END_CASE(JSOP_GETLOCAL)
1456 BEGIN_CASE(JSOP_SETLOCAL)
1458 jsbytecode *next = &PC[JSOP_SETLOCAL_LENGTH];
1459 bool pop = JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next);
1460 frame.storeLocal(GET_SLOTNO(PC), pop);
1461 if (pop) {
1462 frame.pop();
1463 PC += JSOP_SETLOCAL_LENGTH + JSOP_POP_LENGTH;
1464 break;
1467 END_CASE(JSOP_SETLOCAL)
1469 BEGIN_CASE(JSOP_SETLOCALPOP)
1470 frame.storeLocal(GET_SLOTNO(PC), true);
1471 frame.pop();
1472 END_CASE(JSOP_SETLOCALPOP)
1474 BEGIN_CASE(JSOP_UINT16)
1475 frame.push(Value(Int32Value((int32_t) GET_UINT16(PC))));
1476 END_CASE(JSOP_UINT16)
1478 BEGIN_CASE(JSOP_NEWINIT)
1480 jsint i = GET_UINT16(PC);
1481 uint32 count = GET_UINT16(PC + UINT16_LEN);
1483 JS_ASSERT(i == JSProto_Array || i == JSProto_Object);
1485 prepareStubCall(Uses(0));
1486 masm.move(Imm32(count), Registers::ArgReg1);
1487 if (i == JSProto_Array)
1488 stubCall(stubs::NewInitArray);
1489 else
1490 stubCall(stubs::NewInitObject);
1491 frame.takeReg(Registers::ReturnReg);
1492 frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
1494 END_CASE(JSOP_NEWINIT)
1496 BEGIN_CASE(JSOP_ENDINIT)
1497 END_CASE(JSOP_ENDINIT)
1499 BEGIN_CASE(JSOP_INITPROP)
1501 JSAtom *atom = script->getAtom(fullAtomIndex(PC));
1502 prepareStubCall(Uses(2));
1503 masm.move(ImmPtr(atom), Registers::ArgReg1);
1504 stubCall(stubs::InitProp);
1505 frame.pop();
1507 END_CASE(JSOP_INITPROP)
1509 BEGIN_CASE(JSOP_INITELEM)
1511 JSOp next = JSOp(PC[JSOP_INITELEM_LENGTH]);
1512 prepareStubCall(Uses(3));
1513 masm.move(Imm32(next == JSOP_ENDINIT ? 1 : 0), Registers::ArgReg1);
1514 stubCall(stubs::InitElem);
1515 frame.popn(2);
1517 END_CASE(JSOP_INITELEM)
1519 BEGIN_CASE(JSOP_INCARG)
1520 BEGIN_CASE(JSOP_DECARG)
1521 BEGIN_CASE(JSOP_ARGINC)
1522 BEGIN_CASE(JSOP_ARGDEC)
1524 jsbytecode *next = &PC[JSOP_ARGINC_LENGTH];
1525 bool popped = false;
1526 if (JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next))
1527 popped = true;
1528 jsop_arginc(op, GET_SLOTNO(PC), popped);
1529 PC += JSOP_ARGINC_LENGTH;
1530 if (popped)
1531 PC += JSOP_POP_LENGTH;
1532 break;
1534 END_CASE(JSOP_ARGDEC)
1536 BEGIN_CASE(JSOP_INCLOCAL)
1537 BEGIN_CASE(JSOP_DECLOCAL)
1538 BEGIN_CASE(JSOP_LOCALINC)
1539 BEGIN_CASE(JSOP_LOCALDEC)
1541 jsbytecode *next = &PC[JSOP_LOCALINC_LENGTH];
1542 bool popped = false;
1543 if (JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next))
1544 popped = true;
1545 /* These manually advance the PC. */
1546 jsop_localinc(op, GET_SLOTNO(PC), popped);
1547 PC += JSOP_LOCALINC_LENGTH;
1548 if (popped)
1549 PC += JSOP_POP_LENGTH;
1550 break;
1552 END_CASE(JSOP_LOCALDEC)
1554 BEGIN_CASE(JSOP_FORNAME)
1555 // Before: ITER
1556 // After: ITER SCOPEOBJ
1557 jsop_bindname(fullAtomIndex(PC), false);
1559 // Fall through to FORPROP.
1561 BEGIN_CASE(JSOP_FORPROP)
1562 // Before: ITER OBJ
1563 // After: ITER OBJ ITER
1564 frame.dupAt(-2);
1566 // Before: ITER OBJ ITER
1567 // After: ITER OBJ ITER VALUE
1568 iterNext();
1570 // Before: ITER OBJ ITER VALUE
1571 // After: ITER OBJ VALUE
1572 frame.shimmy(1);
1574 // Before: ITER OBJ VALUE
1575 // After: ITER VALUE
1576 jsop_setprop(script->getAtom(fullAtomIndex(PC)), false);
1578 // Before: ITER VALUE
1579 // After: ITER
1580 frame.pop();
1581 END_CASE(JSOP_FORPROP)
1583 BEGIN_CASE(JSOP_FORELEM)
1584 // This opcode is for the decompiler; it is succeeded by an
1585 // ENUMELEM, which performs the actual array store.
1586 iterNext();
1587 END_CASE(JSOP_FORELEM)
1589 BEGIN_CASE(JSOP_BINDNAME)
1590 jsop_bindname(fullAtomIndex(PC), true);
1591 END_CASE(JSOP_BINDNAME)
1593 BEGIN_CASE(JSOP_SETPROP)
1594 if (!jsop_setprop(script->getAtom(fullAtomIndex(PC)), true))
1595 return Compile_Error;
1596 END_CASE(JSOP_SETPROP)
1598 BEGIN_CASE(JSOP_SETNAME)
1599 BEGIN_CASE(JSOP_SETMETHOD)
1600 if (!jsop_setprop(script->getAtom(fullAtomIndex(PC)), true))
1601 return Compile_Error;
1602 END_CASE(JSOP_SETNAME)
1604 BEGIN_CASE(JSOP_THROW)
1605 prepareStubCall(Uses(1));
1606 stubCall(stubs::Throw);
1607 frame.pop();
1608 END_CASE(JSOP_THROW)
1610 BEGIN_CASE(JSOP_IN)
1611 prepareStubCall(Uses(2));
1612 stubCall(stubs::In);
1613 frame.popn(2);
1614 frame.takeReg(Registers::ReturnReg);
1615 frame.pushTypedPayload(JSVAL_TYPE_BOOLEAN, Registers::ReturnReg);
1616 END_CASE(JSOP_IN)
1618 BEGIN_CASE(JSOP_INSTANCEOF)
1619 if (!jsop_instanceof())
1620 return Compile_Error;
1621 END_CASE(JSOP_INSTANCEOF)
1623 BEGIN_CASE(JSOP_EXCEPTION)
1625 JS_STATIC_ASSERT(sizeof(cx->throwing) == 4);
1626 RegisterID reg = frame.allocReg();
1627 masm.loadPtr(FrameAddress(offsetof(VMFrame, cx)), reg);
1628 masm.store32(Imm32(JS_FALSE), Address(reg, offsetof(JSContext, throwing)));
1630 Address excn(reg, offsetof(JSContext, exception));
1631 frame.freeReg(reg);
1632 frame.push(excn);
1634 END_CASE(JSOP_EXCEPTION)
1636 BEGIN_CASE(JSOP_LINENO)
1637 END_CASE(JSOP_LINENO)
1639 BEGIN_CASE(JSOP_ENUMELEM)
1640 // Normally, SETELEM transforms the stack
1641 // from: OBJ ID VALUE
1642 // to: VALUE
1644 // Here, the stack transition is
1645 // from: VALUE OBJ ID
1646 // to:
1647 // So we make the stack look like a SETELEM, and re-use it.
1649 // Before: VALUE OBJ ID
1650 // After: VALUE OBJ ID VALUE
1651 frame.dupAt(-3);
1653 // Before: VALUE OBJ ID VALUE
1654 // After: VALUE VALUE
1655 if (!jsop_setelem())
1656 return Compile_Error;
1658 // Before: VALUE VALUE
1659 // After:
1660 frame.popn(2);
1661 END_CASE(JSOP_ENUMELEM)
1663 BEGIN_CASE(JSOP_BLOCKCHAIN)
1664 END_CASE(JSOP_BLOCKCHAIN)
1666 BEGIN_CASE(JSOP_NULLBLOCKCHAIN)
1667 END_CASE(JSOP_NULLBLOCKCHAIN)
1669 BEGIN_CASE(JSOP_CONDSWITCH)
1670 /* No-op for the decompiler. */
1671 END_CASE(JSOP_CONDSWITCH)
1673 BEGIN_CASE(JSOP_DEFFUN)
1675 uint32 index = fullAtomIndex(PC);
1676 JSFunction *inner = script->getFunction(index);
1678 if (fun) {
1679 JSLocalKind localKind = fun->lookupLocal(cx, inner->atom, NULL);
1680 if (localKind != JSLOCAL_NONE)
1681 frame.syncAndForgetEverything();
1684 prepareStubCall(Uses(0));
1685 masm.move(ImmPtr(inner), Registers::ArgReg1);
1686 stubCall(STRICT_VARIANT(stubs::DefFun));
1688 END_CASE(JSOP_DEFFUN)
1690 BEGIN_CASE(JSOP_DEFVAR)
1692 uint32 index = fullAtomIndex(PC);
1693 JSAtom *atom = script->getAtom(index);
1695 prepareStubCall(Uses(0));
1696 masm.move(ImmPtr(atom), Registers::ArgReg1);
1697 stubCall(stubs::DefVar);
1699 END_CASE(JSOP_DEFVAR)
1701 BEGIN_CASE(JSOP_DEFLOCALFUN_FC)
1703 uint32 slot = GET_SLOTNO(PC);
1704 JSFunction *fun = script->getFunction(fullAtomIndex(&PC[SLOTNO_LEN]));
1705 prepareStubCall(Uses(frame.frameDepth()));
1706 masm.move(ImmPtr(fun), Registers::ArgReg1);
1707 stubCall(stubs::DefLocalFun_FC);
1708 frame.takeReg(Registers::ReturnReg);
1709 frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
1710 frame.storeLocal(slot, true);
1711 frame.pop();
1713 END_CASE(JSOP_DEFLOCALFUN_FC)
1715 BEGIN_CASE(JSOP_LAMBDA)
1717 JSFunction *fun = script->getFunction(fullAtomIndex(PC));
1719 JSObjStubFun stub = stubs::Lambda;
1720 uint32 uses = 0;
1722 jsbytecode *pc2 = js_AdvanceOverBlockchain(PC + JSOP_LAMBDA_LENGTH);
1723 JSOp next = JSOp(*pc2);
1725 if (next == JSOP_INITMETHOD) {
1726 stub = stubs::LambdaForInit;
1727 } else if (next == JSOP_SETMETHOD) {
1728 stub = stubs::LambdaForSet;
1729 uses = 1;
1730 } else if (fun->joinable()) {
1731 if (next == JSOP_CALL) {
1732 stub = stubs::LambdaJoinableForCall;
1733 uses = frame.frameDepth();
1734 } else if (next == JSOP_NULL) {
1735 stub = stubs::LambdaJoinableForNull;
1739 prepareStubCall(Uses(uses));
1740 masm.move(ImmPtr(fun), Registers::ArgReg1);
1742 if (stub == stubs::Lambda) {
1743 stubCall(stub);
1744 } else {
1745 jsbytecode *savedPC = PC;
1746 PC = pc2;
1747 stubCall(stub);
1748 PC = savedPC;
1751 frame.takeReg(Registers::ReturnReg);
1752 frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
1754 END_CASE(JSOP_LAMBDA)
1756 BEGIN_CASE(JSOP_TRY)
1757 frame.syncAndForgetEverything();
1758 END_CASE(JSOP_TRY)
1760 BEGIN_CASE(JSOP_GETFCSLOT)
1761 BEGIN_CASE(JSOP_CALLFCSLOT)
1763 uintN index = GET_UINT16(PC);
1764 // JSObject *obj = &fp->argv[-2].toObject();
1765 RegisterID reg = frame.allocReg();
1766 masm.loadPayload(Address(JSFrameReg, JSStackFrame::offsetOfCallee(fun)), reg);
1767 // obj->getFlatClosureUpvars()
1768 masm.loadPtr(Address(reg, offsetof(JSObject, slots)), reg);
1769 Address upvarAddress(reg, JSObject::JSSLOT_FLAT_CLOSURE_UPVARS * sizeof(Value));
1770 masm.loadPrivate(upvarAddress, reg);
1771 // push ((Value *) reg)[index]
1772 frame.freeReg(reg);
1773 frame.push(Address(reg, index * sizeof(Value)));
1774 if (op == JSOP_CALLFCSLOT)
1775 frame.push(UndefinedValue());
1777 END_CASE(JSOP_CALLFCSLOT)
1779 BEGIN_CASE(JSOP_ARGSUB)
1780 prepareStubCall(Uses(0));
1781 masm.move(Imm32(GET_ARGNO(PC)), Registers::ArgReg1);
1782 stubCall(stubs::ArgSub);
1783 frame.pushSynced();
1784 END_CASE(JSOP_ARGSUB)
1786 BEGIN_CASE(JSOP_ARGCNT)
1787 prepareStubCall(Uses(0));
1788 stubCall(stubs::ArgCnt);
1789 frame.pushSynced();
1790 END_CASE(JSOP_ARGCNT)
1792 BEGIN_CASE(JSOP_DEFLOCALFUN)
1794 uint32 slot = GET_SLOTNO(PC);
1795 JSFunction *fun = script->getFunction(fullAtomIndex(&PC[SLOTNO_LEN]));
1796 prepareStubCall(Uses(0));
1797 masm.move(ImmPtr(fun), Registers::ArgReg1);
1798 stubCall(stubs::DefLocalFun);
1799 frame.takeReg(Registers::ReturnReg);
1800 frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
1801 frame.storeLocal(slot, true);
1802 frame.pop();
1804 END_CASE(JSOP_DEFLOCALFUN)
1806 BEGIN_CASE(JSOP_RETRVAL)
1807 emitReturn(NULL);
1808 END_CASE(JSOP_RETRVAL)
1810 BEGIN_CASE(JSOP_GETGNAME)
1811 BEGIN_CASE(JSOP_CALLGNAME)
1812 jsop_getgname(fullAtomIndex(PC));
1813 if (op == JSOP_CALLGNAME)
1814 frame.push(UndefinedValue());
1815 END_CASE(JSOP_GETGNAME)
1817 BEGIN_CASE(JSOP_SETGNAME)
1818 jsop_setgname(fullAtomIndex(PC));
1819 END_CASE(JSOP_SETGNAME)
1821 BEGIN_CASE(JSOP_REGEXP)
1823 JSObject *regex = script->getRegExp(fullAtomIndex(PC));
1824 prepareStubCall(Uses(0));
1825 masm.move(ImmPtr(regex), Registers::ArgReg1);
1826 stubCall(stubs::RegExp);
1827 frame.takeReg(Registers::ReturnReg);
1828 frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
1830 END_CASE(JSOP_REGEXP)
1832 BEGIN_CASE(JSOP_CALLPROP)
1833 if (!jsop_callprop(script->getAtom(fullAtomIndex(PC))))
1834 return Compile_Error;
1835 END_CASE(JSOP_CALLPROP)
1837 BEGIN_CASE(JSOP_GETUPVAR)
1838 BEGIN_CASE(JSOP_CALLUPVAR)
1840 uint32 index = GET_UINT16(PC);
1841 JSUpvarArray *uva = script->upvars();
1842 JS_ASSERT(index < uva->length);
1844 prepareStubCall(Uses(0));
1845 masm.move(Imm32(uva->vector[index].asInteger()), Registers::ArgReg1);
1846 stubCall(stubs::GetUpvar);
1847 frame.pushSynced();
1848 if (op == JSOP_CALLUPVAR)
1849 frame.push(UndefinedValue());
1851 END_CASE(JSOP_CALLUPVAR)
1853 BEGIN_CASE(JSOP_UINT24)
1854 frame.push(Value(Int32Value((int32_t) GET_UINT24(PC))));
1855 END_CASE(JSOP_UINT24)
1857 BEGIN_CASE(JSOP_CALLELEM)
1858 jsop_getelem(true);
1859 END_CASE(JSOP_CALLELEM)
1861 BEGIN_CASE(JSOP_STOP)
1862 /* Safe point! */
1863 emitReturn(NULL);
1864 goto done;
1865 END_CASE(JSOP_STOP)
1867 BEGIN_CASE(JSOP_GETXPROP)
1868 if (!jsop_xname(script->getAtom(fullAtomIndex(PC))))
1869 return Compile_Error;
1870 END_CASE(JSOP_GETXPROP)
1872 BEGIN_CASE(JSOP_ENTERBLOCK)
1873 enterBlock(script->getObject(fullAtomIndex(PC)));
1874 END_CASE(JSOP_ENTERBLOCK);
1876 BEGIN_CASE(JSOP_LEAVEBLOCK)
1877 leaveBlock();
1878 END_CASE(JSOP_LEAVEBLOCK)
1880 BEGIN_CASE(JSOP_CALLLOCAL)
1881 frame.pushLocal(GET_SLOTNO(PC));
1882 frame.push(UndefinedValue());
1883 END_CASE(JSOP_CALLLOCAL)
1885 BEGIN_CASE(JSOP_INT8)
1886 frame.push(Value(Int32Value(GET_INT8(PC))));
1887 END_CASE(JSOP_INT8)
1889 BEGIN_CASE(JSOP_INT32)
1890 frame.push(Value(Int32Value(GET_INT32(PC))));
1891 END_CASE(JSOP_INT32)
1893 BEGIN_CASE(JSOP_NEWARRAY)
1895 uint32 len = GET_UINT16(PC);
1896 prepareStubCall(Uses(len));
1897 masm.move(Imm32(len), Registers::ArgReg1);
1898 stubCall(stubs::NewArray);
1899 frame.popn(len);
1900 frame.takeReg(Registers::ReturnReg);
1901 frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
1903 END_CASE(JSOP_NEWARRAY)
1905 BEGIN_CASE(JSOP_HOLE)
1906 frame.push(MagicValue(JS_ARRAY_HOLE));
1907 END_CASE(JSOP_HOLE)
1909 BEGIN_CASE(JSOP_LAMBDA_FC)
1911 JSFunction *fun = script->getFunction(fullAtomIndex(PC));
1912 prepareStubCall(Uses(frame.frameDepth()));
1913 masm.move(ImmPtr(fun), Registers::ArgReg1);
1914 stubCall(stubs::FlatLambda);
1915 frame.takeReg(Registers::ReturnReg);
1916 frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
1918 END_CASE(JSOP_LAMBDA_FC)
1920 BEGIN_CASE(JSOP_TRACE)
1921 BEGIN_CASE(JSOP_NOTRACE)
1923 if (analysis->jumpTarget(PC))
1924 interruptCheckHelper();
1926 END_CASE(JSOP_TRACE)
1928 BEGIN_CASE(JSOP_DEBUGGER)
1929 prepareStubCall(Uses(0));
1930 masm.move(ImmPtr(PC), Registers::ArgReg1);
1931 stubCall(stubs::Debugger);
1932 END_CASE(JSOP_DEBUGGER)
1934 BEGIN_CASE(JSOP_INITMETHOD)
1936 JSAtom *atom = script->getAtom(fullAtomIndex(PC));
1937 prepareStubCall(Uses(2));
1938 masm.move(ImmPtr(atom), Registers::ArgReg1);
1939 stubCall(stubs::InitMethod);
1940 frame.pop();
1942 END_CASE(JSOP_INITMETHOD)
1944 BEGIN_CASE(JSOP_UNBRAND)
1945 jsop_unbrand();
1946 END_CASE(JSOP_UNBRAND)
1948 BEGIN_CASE(JSOP_UNBRANDTHIS)
1949 jsop_this();
1950 jsop_unbrand();
1951 frame.pop();
1952 END_CASE(JSOP_UNBRANDTHIS)
1954 BEGIN_CASE(JSOP_GETGLOBAL)
1955 BEGIN_CASE(JSOP_CALLGLOBAL)
1956 jsop_getglobal(GET_SLOTNO(PC));
1957 if (op == JSOP_CALLGLOBAL)
1958 frame.push(UndefinedValue());
1959 END_CASE(JSOP_GETGLOBAL)
1961 BEGIN_CASE(JSOP_SETGLOBAL)
1962 jsop_setglobal(GET_SLOTNO(PC));
1963 END_CASE(JSOP_SETGLOBAL)
1965 BEGIN_CASE(JSOP_INCGLOBAL)
1966 BEGIN_CASE(JSOP_DECGLOBAL)
1967 BEGIN_CASE(JSOP_GLOBALINC)
1968 BEGIN_CASE(JSOP_GLOBALDEC)
1969 /* Advances PC automatically. */
1970 jsop_globalinc(op, GET_SLOTNO(PC));
1971 break;
1972 END_CASE(JSOP_GLOBALINC)
1974 BEGIN_CASE(JSOP_FORGLOBAL)
1975 iterNext();
1976 jsop_setglobal(GET_SLOTNO(PC));
1977 frame.pop();
1978 END_CASE(JSOP_FORGLOBAL)
1980 default:
1981 /* Sorry, this opcode isn't implemented yet. */
1982 #ifdef JS_METHODJIT_SPEW
1983 JaegerSpew(JSpew_Abort, "opcode %s not handled yet (%s line %d)\n", OpcodeNames[op],
1984 script->filename, js_PCToLineNumber(cx, script, PC));
1985 #endif
1986 return Compile_Abort;
1989 /**********************
1990 * END COMPILER OPS *
1991 **********************/
1993 #ifdef DEBUG
1994 frame.assertValidRegisterState();
1995 #endif
1998 done:
1999 return Compile_Okay;
2002 #undef END_CASE
2003 #undef BEGIN_CASE
2005 JSC::MacroAssembler::Label
2006 mjit::Compiler::labelOf(jsbytecode *pc)
2008 uint32 offs = uint32(pc - script->code);
2009 JS_ASSERT(jumpMap[offs].isValid());
2010 return jumpMap[offs];
2013 uint32
2014 mjit::Compiler::fullAtomIndex(jsbytecode *pc)
2016 return GET_SLOTNO(pc);
2018 /* If we ever enable INDEXBASE garbage, use this below. */
2019 #if 0
2020 return GET_SLOTNO(pc) + (atoms - script->atomMap.vector);
2021 #endif
2024 bool
2025 mjit::Compiler::knownJump(jsbytecode *pc)
2027 return pc < PC;
2030 void *
2031 mjit::Compiler::findCallSite(const CallSite &callSite)
2033 JS_ASSERT(callSite.pcOffset < script->length);
2035 JITScript *jit = script->getJIT(fp->isConstructing());
2036 uint8* ilPath = (uint8 *)jit->code.m_code.executableAddress();
2037 uint8* oolPath = ilPath + masm.size();
2039 for (uint32 i = 0; i < callSites.length(); i++) {
2040 if (callSites[i].pc == script->code + callSite.pcOffset &&
2041 callSites[i].id == callSite.id) {
2042 if (callSites[i].stub) {
2043 return oolPath + stubcc.masm.distanceOf(callSites[i].location);
2045 return ilPath + masm.distanceOf(callSites[i].location);
2049 /* We have no idea where to patch up to. */
2050 JS_NOT_REACHED("Call site vanished.");
2051 return NULL;
2054 bool
2055 mjit::Compiler::jumpInScript(Jump j, jsbytecode *pc)
2057 JS_ASSERT(pc >= script->code && uint32(pc - script->code) < script->length);
2059 if (pc < PC) {
2060 j.linkTo(jumpMap[uint32(pc - script->code)], &masm);
2061 return true;
2063 return branchPatches.append(BranchPatch(j, pc));
2066 void
2067 mjit::Compiler::jsop_setglobal(uint32 index)
2069 JS_ASSERT(globalObj);
2070 uint32 slot = script->getGlobalSlot(index);
2072 FrameEntry *fe = frame.peek(-1);
2073 bool popped = PC[JSOP_SETGLOBAL_LENGTH] == JSOP_POP;
2075 RegisterID reg = frame.allocReg();
2076 Address address = masm.objSlotRef(globalObj, reg, slot);
2077 frame.storeTo(fe, address, popped);
2078 frame.freeReg(reg);
2081 void
2082 mjit::Compiler::jsop_getglobal(uint32 index)
2084 JS_ASSERT(globalObj);
2085 uint32 slot = script->getGlobalSlot(index);
2087 RegisterID reg = frame.allocReg();
2088 Address address = masm.objSlotRef(globalObj, reg, slot);
2089 frame.freeReg(reg);
2090 frame.push(address);
2093 void
2094 mjit::Compiler::emitFinalReturn(Assembler &masm)
2096 masm.loadPtr(Address(JSFrameReg, JSStackFrame::offsetOfncode()), Registers::ReturnReg);
2097 masm.jump(Registers::ReturnReg);
2100 // Emits code to load a return value of the frame into the scripted-ABI
2101 // type & data register pair. If the return value is in fp->rval, then |fe|
2102 // is NULL. Otherwise, |fe| contains the return value.
2104 // If reading from fp->rval, |undefined| is loaded optimistically, before
2105 // checking if fp->rval is set in the frame flags and loading that instead.
2107 // Otherwise, if |masm| is the inline path, it is loaded as efficiently as
2108 // the FrameState can manage. If |masm| is the OOL path, the value is simply
2109 // loaded from its slot in the frame, since the caller has guaranteed it's
2110 // been synced.
2112 void
2113 mjit::Compiler::loadReturnValue(Assembler *masm, FrameEntry *fe)
2115 RegisterID typeReg = JSReturnReg_Type;
2116 RegisterID dataReg = JSReturnReg_Data;
2118 if (fe) {
2119 // If using the OOL assembler, the caller signifies that the |fe| is
2120 // synced, but not to rely on its register state.
2121 if (masm != &this->masm) {
2122 if (fe->isConstant()) {
2123 stubcc.masm.loadValueAsComponents(fe->getValue(), typeReg, dataReg);
2124 } else {
2125 Address rval(frame.addressOf(fe));
2126 if (fe->isTypeKnown()) {
2127 stubcc.masm.loadPayload(rval, dataReg);
2128 stubcc.masm.move(ImmType(fe->getKnownType()), typeReg);
2129 } else {
2130 stubcc.masm.loadValueAsComponents(rval, typeReg, dataReg);
2133 } else {
2134 frame.loadForReturn(fe, typeReg, dataReg, Registers::ReturnReg);
2136 } else {
2137 // Load a return value from POPV or SETRVAL into the return registers,
2138 // otherwise return undefined.
2139 masm->loadValueAsComponents(UndefinedValue(), typeReg, dataReg);
2140 if (analysis->usesReturnValue()) {
2141 Jump rvalClear = masm->branchTest32(Assembler::Zero,
2142 FrameFlagsAddress(),
2143 Imm32(JSFRAME_HAS_RVAL));
2144 Address rvalAddress(JSFrameReg, JSStackFrame::offsetOfReturnValue());
2145 masm->loadValueAsComponents(rvalAddress, typeReg, dataReg);
2146 rvalClear.linkTo(masm->label(), masm);
2151 // This ensures that constructor return values are an object. If a non-object
2152 // is returned, either explicitly or implicitly, the newly created object is
2153 // loaded out of the frame. Otherwise, the explicitly returned object is kept.
2155 void
2156 mjit::Compiler::fixPrimitiveReturn(Assembler *masm, FrameEntry *fe)
2158 JS_ASSERT(isConstructing);
2160 Address thisv(JSFrameReg, JSStackFrame::offsetOfThis(fun));
2162 // Easy cases - no return value, or known primitive, so just return thisv.
2163 if (!fe || (fe->isTypeKnown() && fe->getKnownType() != JSVAL_TYPE_OBJECT)) {
2164 masm->loadValueAsComponents(thisv, JSReturnReg_Type, JSReturnReg_Data);
2165 return;
2168 // If the type is known to be an object, just load the return value as normal.
2169 if (fe->isTypeKnown() && fe->getKnownType() == JSVAL_TYPE_OBJECT) {
2170 loadReturnValue(masm, fe);
2171 return;
2174 // There's a return value, and its type is unknown. Test the type and load
2175 // |thisv| if necessary.
2176 loadReturnValue(masm, fe);
2177 Jump j = masm->testObject(Assembler::Equal, JSReturnReg_Type);
2178 masm->loadValueAsComponents(thisv, JSReturnReg_Type, JSReturnReg_Data);
2179 j.linkTo(masm->label(), masm);
2182 // Loads the return value into the scripted ABI register pair, such that JS
2183 // semantics in constructors are preserved.
2185 void
2186 mjit::Compiler::emitReturnValue(Assembler *masm, FrameEntry *fe)
2188 if (isConstructing)
2189 fixPrimitiveReturn(masm, fe);
2190 else
2191 loadReturnValue(masm, fe);
2194 void
2195 mjit::Compiler::emitReturn(FrameEntry *fe)
2197 JS_ASSERT_IF(!fun, JSOp(*PC) == JSOP_STOP);
2199 /* Only the top of the stack can be returned. */
2200 JS_ASSERT_IF(fe, fe == frame.peek(-1));
2202 if (debugMode) {
2203 prepareStubCall(Uses(0));
2204 stubCall(stubs::LeaveScript);
2208 * If there's a function object, deal with the fact that it can escape.
2209 * Note that after we've placed the call object, all tracked state can
2210 * be thrown away. This will happen anyway because the next live opcode
2211 * (if any) must have an incoming edge.
2213 * However, it's an optimization to throw it away early - the tracker
2214 * won't be spilled on further exits or join points.
2216 if (fun) {
2217 if (fun->isHeavyweight()) {
2218 /* There will always be a call object. */
2219 prepareStubCall(Uses(fe ? 1 : 0));
2220 stubCall(stubs::PutActivationObjects);
2221 } else {
2222 /* if (hasCallObj() || hasArgsObj()) stubs::PutActivationObjects() */
2223 Jump putObjs = masm.branchTest32(Assembler::NonZero,
2224 Address(JSFrameReg, JSStackFrame::offsetOfFlags()),
2225 Imm32(JSFRAME_HAS_CALL_OBJ | JSFRAME_HAS_ARGS_OBJ));
2226 stubcc.linkExit(putObjs, Uses(frame.frameDepth()));
2228 stubcc.leave();
2229 stubcc.call(stubs::PutActivationObjects);
2231 emitReturnValue(&stubcc.masm, fe);
2232 emitFinalReturn(stubcc.masm);
2236 emitReturnValue(&masm, fe);
2237 emitFinalReturn(masm);
2238 frame.discardFrame();
2241 void
2242 mjit::Compiler::prepareStubCall(Uses uses)
2244 JaegerSpew(JSpew_Insns, " ---- STUB CALL, SYNCING FRAME ---- \n");
2245 frame.syncAndKill(Registers(Registers::TempRegs), uses);
2246 JaegerSpew(JSpew_Insns, " ---- FRAME SYNCING DONE ---- \n");
2249 JSC::MacroAssembler::Call
2250 mjit::Compiler::stubCall(void *ptr)
2252 JaegerSpew(JSpew_Insns, " ---- CALLING STUB ---- \n");
2253 Call cl = masm.stubCall(ptr, PC, frame.stackDepth() + script->nfixed);
2254 JaegerSpew(JSpew_Insns, " ---- END STUB CALL ---- \n");
2255 return cl;
2258 void
2259 mjit::Compiler::interruptCheckHelper()
2261 RegisterID reg = frame.allocReg();
2264 * Bake in and test the address of the interrupt counter for the runtime.
2265 * This is faster than doing two additional loads for the context's
2266 * thread data, but will cause this thread to run slower if there are
2267 * pending interrupts on some other thread. For non-JS_THREADSAFE builds
2268 * we can skip this, as there is only one flag to poll.
2270 #ifdef JS_THREADSAFE
2271 void *interrupt = (void*) &cx->runtime->interruptCounter;
2272 #else
2273 void *interrupt = (void*) &JS_THREAD_DATA(cx)->interruptFlags;
2274 #endif
2276 #if defined(JS_CPU_X86) || defined(JS_CPU_ARM)
2277 Jump jump = masm.branch32(Assembler::NotEqual, AbsoluteAddress(interrupt), Imm32(0));
2278 #else
2279 /* Handle processors that can't load from absolute addresses. */
2280 masm.move(ImmPtr(interrupt), reg);
2281 Jump jump = masm.branchTest32(Assembler::NonZero, Address(reg, 0));
2282 #endif
2284 stubcc.linkExitDirect(jump, stubcc.masm.label());
2286 #ifdef JS_THREADSAFE
2288 * Do a slightly slower check for an interrupt on this thread.
2289 * We don't want this thread to slow down excessively if the pending
2290 * interrupt is on another thread.
2292 stubcc.masm.loadPtr(FrameAddress(offsetof(VMFrame, cx)), reg);
2293 stubcc.masm.loadPtr(Address(reg, offsetof(JSContext, thread)), reg);
2294 Address flag(reg, offsetof(JSThread, data.interruptFlags));
2295 Jump noInterrupt = stubcc.masm.branchTest32(Assembler::Zero, flag);
2296 #endif
2298 frame.sync(stubcc.masm, Uses(0));
2299 stubcc.masm.move(ImmPtr(PC), Registers::ArgReg1);
2300 stubcc.call(stubs::Interrupt);
2301 ADD_CALLSITE(true);
2302 stubcc.rejoin(Changes(0));
2304 #ifdef JS_THREADSAFE
2305 stubcc.linkRejoin(noInterrupt);
2306 #endif
2308 frame.freeReg(reg);
2311 void
2312 mjit::Compiler::emitUncachedCall(uint32 argc, bool callingNew)
2314 CallPatchInfo callPatch;
2316 RegisterID r0 = Registers::ReturnReg;
2317 VoidPtrStubUInt32 stub = callingNew ? stubs::UncachedNew : stubs::UncachedCall;
2319 frame.syncAndKill(Registers(Registers::AvailRegs), Uses(argc + 2));
2320 prepareStubCall(Uses(argc + 2));
2321 masm.move(Imm32(argc), Registers::ArgReg1);
2322 stubCall(stub);
2323 ADD_CALLSITE(false);
2325 Jump notCompiled = masm.branchTestPtr(Assembler::Zero, r0, r0);
2327 masm.loadPtr(FrameAddress(offsetof(VMFrame, regs.fp)), JSFrameReg);
2328 callPatch.hasFastNcode = true;
2329 callPatch.fastNcodePatch =
2330 masm.storePtrWithPatch(ImmPtr(NULL),
2331 Address(JSFrameReg, JSStackFrame::offsetOfncode()));
2333 masm.jump(r0);
2334 ADD_NON_STUB_CALLSITE(false);
2336 callPatch.joinPoint = masm.label();
2337 masm.loadPtr(Address(JSFrameReg, JSStackFrame::offsetOfPrev()), JSFrameReg);
2339 frame.popn(argc + 2);
2340 frame.takeReg(JSReturnReg_Type);
2341 frame.takeReg(JSReturnReg_Data);
2342 frame.pushRegs(JSReturnReg_Type, JSReturnReg_Data);
2344 stubcc.linkExitDirect(notCompiled, stubcc.masm.label());
2345 stubcc.rejoin(Changes(0));
2346 callPatches.append(callPatch);
2349 static bool
2350 IsLowerableFunCallOrApply(jsbytecode *pc)
2352 #ifdef JS_MONOIC
2353 return (*pc == JSOP_FUNCALL && GET_ARGC(pc) >= 1) ||
2354 (*pc == JSOP_FUNAPPLY && GET_ARGC(pc) == 2);
2355 #else
2356 return false;
2357 #endif
2360 void
2361 mjit::Compiler::checkCallApplySpeculation(uint32 callImmArgc, uint32 speculatedArgc,
2362 FrameEntry *origCallee, FrameEntry *origThis,
2363 MaybeRegisterID origCalleeType, RegisterID origCalleeData,
2364 MaybeRegisterID origThisType, RegisterID origThisData,
2365 Jump *uncachedCallSlowRejoin, CallPatchInfo *uncachedCallPatch)
2367 JS_ASSERT(IsLowerableFunCallOrApply(PC));
2370 * if (origCallee.isObject() &&
2371 * origCallee.toObject().isFunction &&
2372 * origCallee.toObject().getFunctionPrivate() == js_fun_{call,apply})
2374 MaybeJump isObj;
2375 if (origCalleeType.isSet())
2376 isObj = masm.testObject(Assembler::NotEqual, origCalleeType.reg());
2377 Jump isFun = masm.testFunction(Assembler::NotEqual, origCalleeData);
2378 masm.loadFunctionPrivate(origCalleeData, origCalleeData);
2379 Native native = *PC == JSOP_FUNCALL ? js_fun_call : js_fun_apply;
2380 Jump isNative = masm.branchPtr(Assembler::NotEqual,
2381 Address(origCalleeData, JSFunction::offsetOfNativeOrScript()),
2382 ImmPtr(JS_FUNC_TO_DATA_PTR(void *, native)));
2385 * If speculation fails, we can't use the ic, since it is compiled on the
2386 * assumption that speculation succeeds. Instead, just do an uncached call.
2389 if (isObj.isSet())
2390 stubcc.linkExitDirect(isObj.getJump(), stubcc.masm.label());
2391 stubcc.linkExitDirect(isFun, stubcc.masm.label());
2392 stubcc.linkExitDirect(isNative, stubcc.masm.label());
2394 int32 frameDepthAdjust;
2395 if (applyTricks == LazyArgsObj) {
2396 stubcc.call(stubs::Arguments);
2397 frameDepthAdjust = +1;
2398 } else {
2399 frameDepthAdjust = 0;
2402 stubcc.masm.move(Imm32(callImmArgc), Registers::ArgReg1);
2403 JaegerSpew(JSpew_Insns, " ---- BEGIN SLOW CALL CODE ---- \n");
2404 stubcc.masm.stubCall(JS_FUNC_TO_DATA_PTR(void *, stubs::UncachedCall),
2405 PC, frame.frameDepth() + frameDepthAdjust);
2406 JaegerSpew(JSpew_Insns, " ---- END SLOW CALL CODE ---- \n");
2407 ADD_CALLSITE(true);
2409 RegisterID r0 = Registers::ReturnReg;
2410 Jump notCompiled = stubcc.masm.branchTestPtr(Assembler::Zero, r0, r0);
2412 stubcc.masm.loadPtr(FrameAddress(offsetof(VMFrame, regs.fp)), JSFrameReg);
2413 Address ncodeAddr(JSFrameReg, JSStackFrame::offsetOfncode());
2414 uncachedCallPatch->hasSlowNcode = true;
2415 uncachedCallPatch->slowNcodePatch = stubcc.masm.storePtrWithPatch(ImmPtr(NULL), ncodeAddr);
2417 stubcc.masm.jump(r0);
2418 ADD_NON_STUB_CALLSITE(true);
2420 notCompiled.linkTo(stubcc.masm.label(), &stubcc.masm);
2423 * inlineCallHelper will link uncachedCallSlowRejoin to the join point
2424 * at the end of the ic. At that join point, the return value of the
2425 * call is assumed to be in registers, so load them before jumping.
2427 JaegerSpew(JSpew_Insns, " ---- BEGIN SLOW RESTORE CODE ---- \n");
2428 Address rval = frame.addressOf(origCallee); /* vp[0] == rval */
2429 stubcc.masm.loadValueAsComponents(rval, JSReturnReg_Type, JSReturnReg_Data);
2430 *uncachedCallSlowRejoin = stubcc.masm.jump();
2431 JaegerSpew(JSpew_Insns, " ---- END SLOW RESTORE CODE ---- \n");
2435 * For simplicity, we don't statically specialize calls to
2436 * ic::SplatApplyArgs based on applyTricks. Rather, this state is
2437 * communicated dynamically through the VMFrame.
2439 if (*PC == JSOP_FUNAPPLY) {
2440 masm.store32(Imm32(applyTricks == LazyArgsObj),
2441 FrameAddress(offsetof(VMFrame, u.call.lazyArgsObj)));
2445 /* This predicate must be called before the current op mutates the FrameState. */
2446 bool
2447 mjit::Compiler::canUseApplyTricks()
2449 JS_ASSERT(*PC == JSOP_ARGUMENTS);
2450 jsbytecode *nextpc = PC + JSOP_ARGUMENTS_LENGTH;
2451 return *nextpc == JSOP_FUNAPPLY &&
2452 IsLowerableFunCallOrApply(nextpc) &&
2453 !debugMode;
2456 /* See MonoIC.cpp, CallCompiler for more information on call ICs. */
2457 void
2458 mjit::Compiler::inlineCallHelper(uint32 callImmArgc, bool callingNew)
2460 /* Check for interrupts on function call */
2461 interruptCheckHelper();
2463 int32 speculatedArgc;
2464 if (applyTricks == LazyArgsObj) {
2465 frame.pop();
2466 speculatedArgc = 1;
2467 } else {
2468 speculatedArgc = callImmArgc;
2471 FrameEntry *origCallee = frame.peek(-(speculatedArgc + 2));
2472 FrameEntry *origThis = frame.peek(-(speculatedArgc + 1));
2474 /* 'this' does not need to be synced for constructing. */
2475 if (callingNew)
2476 frame.discardFe(origThis);
2479 * From the presence of JSOP_FUN{CALL,APPLY}, we speculate that we are
2480 * going to call js_fun_{call,apply}. Normally, this call would go through
2481 * js::Invoke to ultimately call 'this'. We can do much better by having
2482 * the callIC cache and call 'this' directly. However, if it turns out that
2483 * we are not actually calling js_fun_call, the callIC must act as normal.
2485 bool lowerFunCallOrApply = IsLowerableFunCallOrApply(PC);
2488 * Currently, constant values are not functions, so don't even try to
2489 * optimize. This lets us assume that callee/this have regs below.
2491 #ifdef JS_MONOIC
2492 if (debugMode ||
2493 origCallee->isConstant() || origCallee->isNotType(JSVAL_TYPE_OBJECT) ||
2494 (lowerFunCallOrApply &&
2495 (origThis->isConstant() || origThis->isNotType(JSVAL_TYPE_OBJECT)))) {
2496 #endif
2497 if (applyTricks == LazyArgsObj) {
2498 /* frame.pop() above reset us to pre-JSOP_ARGUMENTS state */
2499 jsop_arguments();
2500 frame.pushSynced();
2502 emitUncachedCall(callImmArgc, callingNew);
2503 return;
2504 #ifdef JS_MONOIC
2507 /* Initialized by both branches below. */
2508 CallGenInfo callIC(PC);
2509 CallPatchInfo callPatch;
2510 MaybeRegisterID icCalleeType; /* type to test for function-ness */
2511 RegisterID icCalleeData; /* data to call */
2512 Address icRvalAddr; /* return slot on slow-path rejoin */
2514 /* Initialized only on lowerFunCallOrApply branch. */
2515 Jump uncachedCallSlowRejoin;
2516 CallPatchInfo uncachedCallPatch;
2519 MaybeRegisterID origCalleeType, maybeOrigCalleeData;
2520 RegisterID origCalleeData;
2522 /* Get the callee in registers. */
2523 frame.ensureFullRegs(origCallee, &origCalleeType, &maybeOrigCalleeData);
2524 origCalleeData = maybeOrigCalleeData.reg();
2525 PinRegAcrossSyncAndKill p1(frame, origCalleeData), p2(frame, origCalleeType);
2527 if (lowerFunCallOrApply) {
2528 MaybeRegisterID origThisType, maybeOrigThisData;
2529 RegisterID origThisData;
2531 /* Get thisv in registers. */
2532 frame.ensureFullRegs(origThis, &origThisType, &maybeOrigThisData);
2533 origThisData = maybeOrigThisData.reg();
2534 PinRegAcrossSyncAndKill p3(frame, origThisData), p4(frame, origThisType);
2536 /* Leaves pinned regs untouched. */
2537 frame.syncAndKill(Registers(Registers::AvailRegs), Uses(speculatedArgc + 2));
2540 checkCallApplySpeculation(callImmArgc, speculatedArgc,
2541 origCallee, origThis,
2542 origCalleeType, origCalleeData,
2543 origThisType, origThisData,
2544 &uncachedCallSlowRejoin, &uncachedCallPatch);
2546 icCalleeType = origThisType;
2547 icCalleeData = origThisData;
2548 icRvalAddr = frame.addressOf(origThis);
2551 * For f.call(), since we compile the ic under the (checked)
2552 * assumption that call == js_fun_call, we still have a static
2553 * frame size. For f.apply(), the frame size depends on the dynamic
2554 * length of the array passed to apply.
2556 if (*PC == JSOP_FUNCALL)
2557 callIC.frameSize.initStatic(frame.frameDepth(), speculatedArgc - 1);
2558 else
2559 callIC.frameSize.initDynamic();
2560 } else {
2561 /* Leaves pinned regs untouched. */
2562 frame.syncAndKill(Registers(Registers::AvailRegs), Uses(speculatedArgc + 2));
2564 icCalleeType = origCalleeType;
2565 icCalleeData = origCalleeData;
2566 icRvalAddr = frame.addressOf(origCallee);
2567 callIC.frameSize.initStatic(frame.frameDepth(), speculatedArgc);
2571 /* Test the type if necessary. Failing this always takes a really slow path. */
2572 MaybeJump notObjectJump;
2573 if (icCalleeType.isSet())
2574 notObjectJump = masm.testObject(Assembler::NotEqual, icCalleeType.reg());
2577 * For an optimized apply, keep icCalleeData and funPtrReg in a
2578 * callee-saved registers for the subsequent ic::SplatApplyArgs call.
2580 Registers tempRegs;
2581 if (callIC.frameSize.isDynamic() && !Registers::isSaved(icCalleeData)) {
2582 RegisterID x = tempRegs.takeRegInMask(Registers::SavedRegs);
2583 masm.move(icCalleeData, x);
2584 icCalleeData = x;
2585 } else {
2586 tempRegs.takeReg(icCalleeData);
2588 RegisterID funPtrReg = tempRegs.takeRegInMask(Registers::SavedRegs);
2591 * Guard on the callee identity. This misses on the first run. If the
2592 * callee is scripted, compiled/compilable, and argc == nargs, then this
2593 * guard is patched, and the compiled code address is baked in.
2595 Jump j = masm.branchPtrWithPatch(Assembler::NotEqual, icCalleeData, callIC.funGuard);
2596 callIC.funJump = j;
2598 Jump rejoin1, rejoin2;
2600 stubcc.linkExitDirect(j, stubcc.masm.label());
2601 callIC.slowPathStart = stubcc.masm.label();
2604 * Test if the callee is even a function. If this doesn't match, we
2605 * take a _really_ slow path later.
2607 Jump notFunction = stubcc.masm.testFunction(Assembler::NotEqual, icCalleeData);
2609 /* Test if the function is scripted. */
2610 RegisterID tmp = tempRegs.takeAnyReg();
2611 stubcc.masm.loadFunctionPrivate(icCalleeData, funPtrReg);
2612 stubcc.masm.load16(Address(funPtrReg, offsetof(JSFunction, flags)), tmp);
2613 stubcc.masm.and32(Imm32(JSFUN_KINDMASK), tmp);
2614 Jump isNative = stubcc.masm.branch32(Assembler::Below, tmp, Imm32(JSFUN_INTERPRETED));
2615 tempRegs.putReg(tmp);
2618 * N.B. After this call, the frame will have a dynamic frame size.
2619 * Check after the function is known not to be a native so that the
2620 * catch-all/native path has a static depth.
2622 if (callIC.frameSize.isDynamic())
2623 stubcc.call(ic::SplatApplyArgs);
2626 * No-op jump that gets patched by ic::New/Call to the stub generated
2627 * by generateFullCallStub.
2629 Jump toPatch = stubcc.masm.jump();
2630 toPatch.linkTo(stubcc.masm.label(), &stubcc.masm);
2631 callIC.oolJump = toPatch;
2634 * At this point the function is definitely scripted, so we try to
2635 * compile it and patch either funGuard/funJump or oolJump. This code
2636 * is only executed once.
2638 callIC.addrLabel1 = stubcc.masm.moveWithPatch(ImmPtr(NULL), Registers::ArgReg1);
2639 void *icFunPtr = JS_FUNC_TO_DATA_PTR(void *, callingNew ? ic::New : ic::Call);
2640 if (callIC.frameSize.isStatic())
2641 callIC.oolCall = stubcc.masm.stubCall(icFunPtr, PC, frame.frameDepth());
2642 else
2643 callIC.oolCall = stubcc.masm.stubCallWithDynamicDepth(icFunPtr, PC);
2645 callIC.funObjReg = icCalleeData;
2646 callIC.funPtrReg = funPtrReg;
2649 * The IC call either returns NULL, meaning call completed, or a
2650 * function pointer to jump to. Caveat: Must restore JSFrameReg
2651 * because a new frame has been pushed.
2653 rejoin1 = stubcc.masm.branchTestPtr(Assembler::Zero, Registers::ReturnReg,
2654 Registers::ReturnReg);
2655 if (callIC.frameSize.isStatic())
2656 stubcc.masm.move(Imm32(callIC.frameSize.staticArgc()), JSParamReg_Argc);
2657 else
2658 stubcc.masm.load32(FrameAddress(offsetof(VMFrame, u.call.dynamicArgc)), JSParamReg_Argc);
2659 stubcc.masm.loadPtr(FrameAddress(offsetof(VMFrame, regs.fp)), JSFrameReg);
2660 callPatch.hasSlowNcode = true;
2661 callPatch.slowNcodePatch =
2662 stubcc.masm.storePtrWithPatch(ImmPtr(NULL),
2663 Address(JSFrameReg, JSStackFrame::offsetOfncode()));
2664 stubcc.masm.jump(Registers::ReturnReg);
2667 * This ool path is the catch-all for everything but scripted function
2668 * callees. For native functions, ic::NativeNew/NativeCall will repatch
2669 * funGaurd/funJump with a fast call stub. All other cases
2670 * (non-function callable objects and invalid callees) take the slow
2671 * path through js::Invoke.
2673 if (notObjectJump.isSet())
2674 stubcc.linkExitDirect(notObjectJump.get(), stubcc.masm.label());
2675 notFunction.linkTo(stubcc.masm.label(), &stubcc.masm);
2676 isNative.linkTo(stubcc.masm.label(), &stubcc.masm);
2678 callIC.addrLabel2 = stubcc.masm.moveWithPatch(ImmPtr(NULL), Registers::ArgReg1);
2679 stubcc.call(callingNew ? ic::NativeNew : ic::NativeCall);
2681 rejoin2 = stubcc.masm.jump();
2685 * If the call site goes to a closure over the same function, it will
2686 * generate an out-of-line stub that joins back here.
2688 callIC.hotPathLabel = masm.label();
2690 uint32 flags = 0;
2691 if (callingNew)
2692 flags |= JSFRAME_CONSTRUCTING;
2694 InlineFrameAssembler inlFrame(masm, callIC, flags);
2695 callPatch.hasFastNcode = true;
2696 callPatch.fastNcodePatch = inlFrame.assemble(NULL);
2698 callIC.hotJump = masm.jump();
2699 callIC.joinPoint = callPatch.joinPoint = masm.label();
2700 if (lowerFunCallOrApply)
2701 uncachedCallPatch.joinPoint = callIC.joinPoint;
2702 masm.loadPtr(Address(JSFrameReg, JSStackFrame::offsetOfPrev()), JSFrameReg);
2704 frame.popn(speculatedArgc + 2);
2705 frame.takeReg(JSReturnReg_Type);
2706 frame.takeReg(JSReturnReg_Data);
2707 frame.pushRegs(JSReturnReg_Type, JSReturnReg_Data);
2710 * Now that the frame state is set, generate the rejoin path. Note that, if
2711 * lowerFunCallOrApply, we cannot just call 'stubcc.rejoin' since the return
2712 * value has been placed at vp[1] which is not the stack address associated
2713 * with frame.peek(-1).
2715 callIC.slowJoinPoint = stubcc.masm.label();
2716 rejoin1.linkTo(callIC.slowJoinPoint, &stubcc.masm);
2717 rejoin2.linkTo(callIC.slowJoinPoint, &stubcc.masm);
2718 JaegerSpew(JSpew_Insns, " ---- BEGIN SLOW RESTORE CODE ---- \n");
2719 stubcc.masm.loadValueAsComponents(icRvalAddr, JSReturnReg_Type, JSReturnReg_Data);
2720 stubcc.crossJump(stubcc.masm.jump(), masm.label());
2721 JaegerSpew(JSpew_Insns, " ---- END SLOW RESTORE CODE ---- \n");
2723 if (lowerFunCallOrApply)
2724 stubcc.crossJump(uncachedCallSlowRejoin, masm.label());
2726 callICs.append(callIC);
2727 callPatches.append(callPatch);
2728 if (lowerFunCallOrApply)
2729 callPatches.append(uncachedCallPatch);
2731 applyTricks = NoApplyTricks;
2732 #endif
2736 * This function must be called immediately after any instruction which could
2737 * cause a new JSStackFrame to be pushed and could lead to a new debug trap
2738 * being set. This includes any API callbacks and any scripted or native call.
2740 void
2741 mjit::Compiler::addCallSite(uint32 id, bool stub)
2743 InternalCallSite site;
2744 site.stub = stub;
2745 #if (defined(JS_NO_FASTCALL) && defined(JS_CPU_X86)) || defined(_WIN64)
2746 site.location = stub ? stubcc.masm.callLabel : masm.callLabel;
2747 #else
2748 site.location = stub ? stubcc.masm.label() : masm.label();
2749 #endif
2751 site.pc = PC;
2752 site.id = id;
2753 callSites.append(site);
2756 void
2757 mjit::Compiler::restoreFrameRegs(Assembler &masm)
2759 masm.loadPtr(FrameAddress(offsetof(VMFrame, regs.fp)), JSFrameReg);
2762 bool
2763 mjit::Compiler::compareTwoValues(JSContext *cx, JSOp op, const Value &lhs, const Value &rhs)
2765 JS_ASSERT(lhs.isPrimitive());
2766 JS_ASSERT(rhs.isPrimitive());
2768 if (lhs.isString() && rhs.isString()) {
2769 int cmp = js_CompareStrings(lhs.toString(), rhs.toString());
2770 switch (op) {
2771 case JSOP_LT:
2772 return cmp < 0;
2773 case JSOP_LE:
2774 return cmp <= 0;
2775 case JSOP_GT:
2776 return cmp > 0;
2777 case JSOP_GE:
2778 return cmp >= 0;
2779 case JSOP_EQ:
2780 return cmp == 0;
2781 case JSOP_NE:
2782 return cmp != 0;
2783 default:
2784 JS_NOT_REACHED("NYI");
2786 } else {
2787 double ld, rd;
2789 /* These should be infallible w/ primitives. */
2790 ValueToNumber(cx, lhs, &ld);
2791 ValueToNumber(cx, rhs, &rd);
2792 switch(op) {
2793 case JSOP_LT:
2794 return ld < rd;
2795 case JSOP_LE:
2796 return ld <= rd;
2797 case JSOP_GT:
2798 return ld > rd;
2799 case JSOP_GE:
2800 return ld >= rd;
2801 case JSOP_EQ: /* fall through */
2802 case JSOP_NE:
2803 /* Special case null/undefined/void comparisons. */
2804 if (lhs.isNullOrUndefined()) {
2805 if (rhs.isNullOrUndefined())
2806 return op == JSOP_EQ;
2807 return op == JSOP_NE;
2809 if (rhs.isNullOrUndefined())
2810 return op == JSOP_NE;
2812 /* Normal return. */
2813 return (op == JSOP_EQ) ? (ld == rd) : (ld != rd);
2814 default:
2815 JS_NOT_REACHED("NYI");
2819 JS_NOT_REACHED("NYI");
2820 return false;
2823 bool
2824 mjit::Compiler::emitStubCmpOp(BoolStub stub, jsbytecode *target, JSOp fused)
2826 prepareStubCall(Uses(2));
2827 stubCall(stub);
2828 frame.pop();
2829 frame.pop();
2831 if (!target) {
2832 frame.takeReg(Registers::ReturnReg);
2833 frame.pushTypedPayload(JSVAL_TYPE_BOOLEAN, Registers::ReturnReg);
2834 return true;
2837 JS_ASSERT(fused == JSOP_IFEQ || fused == JSOP_IFNE);
2838 frame.syncAndForgetEverything();
2839 Assembler::Condition cond = (fused == JSOP_IFEQ)
2840 ? Assembler::Zero
2841 : Assembler::NonZero;
2842 Jump j = masm.branchTest32(cond, Registers::ReturnReg,
2843 Registers::ReturnReg);
2844 return jumpAndTrace(j, target);
2847 void
2848 mjit::Compiler::jsop_setprop_slow(JSAtom *atom, bool usePropCache)
2850 prepareStubCall(Uses(2));
2851 masm.move(ImmPtr(atom), Registers::ArgReg1);
2852 if (usePropCache)
2853 stubCall(STRICT_VARIANT(stubs::SetName));
2854 else
2855 stubCall(STRICT_VARIANT(stubs::SetPropNoCache));
2856 JS_STATIC_ASSERT(JSOP_SETNAME_LENGTH == JSOP_SETPROP_LENGTH);
2857 frame.shimmy(1);
2860 void
2861 mjit::Compiler::jsop_getprop_slow(JSAtom *atom, bool usePropCache)
2863 prepareStubCall(Uses(1));
2864 if (usePropCache) {
2865 stubCall(stubs::GetProp);
2866 } else {
2867 masm.move(ImmPtr(atom), Registers::ArgReg1);
2868 stubCall(stubs::GetPropNoCache);
2870 frame.pop();
2871 frame.pushSynced();
2874 bool
2875 mjit::Compiler::jsop_callprop_slow(JSAtom *atom)
2877 prepareStubCall(Uses(1));
2878 masm.move(ImmPtr(atom), Registers::ArgReg1);
2879 stubCall(stubs::CallProp);
2880 frame.pop();
2881 frame.pushSynced();
2882 frame.pushSynced();
2883 return true;
2886 bool
2887 mjit::Compiler::jsop_length()
2889 FrameEntry *top = frame.peek(-1);
2891 if (top->isTypeKnown() && top->getKnownType() == JSVAL_TYPE_STRING) {
2892 if (top->isConstant()) {
2893 JSString *str = top->getValue().toString();
2894 Value v;
2895 v.setNumber(uint32(str->length()));
2896 frame.pop();
2897 frame.push(v);
2898 } else {
2899 RegisterID str = frame.ownRegForData(top);
2900 masm.loadPtr(Address(str, offsetof(JSString, mLengthAndFlags)), str);
2901 masm.rshiftPtr(Imm32(JSString::FLAGS_LENGTH_SHIFT), str);
2902 frame.pop();
2903 frame.pushTypedPayload(JSVAL_TYPE_INT32, str);
2905 return true;
2908 #if defined JS_POLYIC
2909 return jsop_getprop(cx->runtime->atomState.lengthAtom);
2910 #else
2911 prepareStubCall(Uses(1));
2912 stubCall(stubs::Length);
2913 frame.pop();
2914 frame.pushSynced();
2915 return true;
2916 #endif
2919 #ifdef JS_MONOIC
2920 void
2921 mjit::Compiler::passMICAddress(MICGenInfo &mic)
2923 mic.addrLabel = stubcc.masm.moveWithPatch(ImmPtr(NULL), Registers::ArgReg1);
2925 #endif
2927 #if defined JS_POLYIC
2928 void
2929 mjit::Compiler::passICAddress(BaseICInfo *ic)
2931 ic->paramAddr = stubcc.masm.moveWithPatch(ImmPtr(NULL), Registers::ArgReg1);
2934 bool
2935 mjit::Compiler::jsop_getprop(JSAtom *atom, bool doTypeCheck, bool usePropCache)
2937 FrameEntry *top = frame.peek(-1);
2939 /* If the incoming type will never PIC, take slow path. */
2940 if (top->isTypeKnown() && top->getKnownType() != JSVAL_TYPE_OBJECT) {
2941 JS_ASSERT_IF(atom == cx->runtime->atomState.lengthAtom,
2942 top->getKnownType() != JSVAL_TYPE_STRING);
2943 jsop_getprop_slow(atom, usePropCache);
2944 return true;
2948 * These two must be loaded first. The objReg because the string path
2949 * wants to read it, and the shapeReg because it could cause a spill that
2950 * the string path wouldn't sink back.
2952 RegisterID objReg = Registers::ReturnReg;
2953 RegisterID shapeReg = Registers::ReturnReg;
2954 if (atom == cx->runtime->atomState.lengthAtom) {
2955 objReg = frame.copyDataIntoReg(top);
2956 shapeReg = frame.allocReg();
2959 PICGenInfo pic(ic::PICInfo::GET, JSOp(*PC), usePropCache);
2961 /* Guard that the type is an object. */
2962 Jump typeCheck;
2963 if (doTypeCheck && !top->isTypeKnown()) {
2964 RegisterID reg = frame.tempRegForType(top);
2965 pic.typeReg = reg;
2967 /* Start the hot path where it's easy to patch it. */
2968 pic.fastPathStart = masm.label();
2969 Jump j = masm.testObject(Assembler::NotEqual, reg);
2971 /* GETPROP_INLINE_TYPE_GUARD is used to patch the jmp, not cmp. */
2972 RETURN_IF_OOM(false);
2973 JS_ASSERT(masm.differenceBetween(pic.fastPathStart, masm.label()) == GETPROP_INLINE_TYPE_GUARD);
2975 pic.typeCheck = stubcc.linkExit(j, Uses(1));
2976 pic.hasTypeCheck = true;
2977 } else {
2978 pic.fastPathStart = masm.label();
2979 pic.hasTypeCheck = false;
2980 pic.typeReg = Registers::ReturnReg;
2983 if (atom != cx->runtime->atomState.lengthAtom) {
2984 objReg = frame.copyDataIntoReg(top);
2985 shapeReg = frame.allocReg();
2988 pic.shapeReg = shapeReg;
2989 pic.atom = atom;
2991 /* Guard on shape. */
2992 masm.loadShape(objReg, shapeReg);
2993 pic.shapeGuard = masm.label();
2995 DataLabel32 inlineShapeLabel;
2996 Jump j = masm.branch32WithPatch(Assembler::NotEqual, shapeReg,
2997 Imm32(int32(JSObjectMap::INVALID_SHAPE)),
2998 inlineShapeLabel);
2999 DBGLABEL(dbgInlineShapeJump);
3001 pic.slowPathStart = stubcc.linkExit(j, Uses(1));
3003 stubcc.leave();
3004 passICAddress(&pic);
3005 pic.slowPathCall = stubcc.call(ic::GetProp);
3007 /* Load dslots. */
3008 #if defined JS_NUNBOX32
3009 DBGLABEL(dbgDslotsLoad);
3010 #elif defined JS_PUNBOX64
3011 Label dslotsLoadLabel = masm.label();
3012 #endif
3013 masm.loadPtr(Address(objReg, offsetof(JSObject, slots)), objReg);
3015 /* Copy the slot value to the expression stack. */
3016 Address slot(objReg, 1 << 24);
3017 frame.pop();
3019 #if defined JS_NUNBOX32
3020 masm.loadTypeTag(slot, shapeReg);
3021 DBGLABEL(dbgTypeLoad);
3023 masm.loadPayload(slot, objReg);
3024 DBGLABEL(dbgDataLoad);
3025 #elif defined JS_PUNBOX64
3026 Label inlineValueLoadLabel =
3027 masm.loadValueAsComponents(slot, shapeReg, objReg);
3028 #endif
3029 pic.fastPathRejoin = masm.label();
3031 /* Assert correctness of hardcoded offsets. */
3032 RETURN_IF_OOM(false);
3033 #if defined JS_NUNBOX32
3034 JS_ASSERT(masm.differenceBetween(pic.fastPathRejoin, dbgDslotsLoad) == GETPROP_DSLOTS_LOAD);
3035 JS_ASSERT(masm.differenceBetween(pic.fastPathRejoin, dbgTypeLoad) == GETPROP_TYPE_LOAD);
3036 JS_ASSERT(masm.differenceBetween(pic.fastPathRejoin, dbgDataLoad) == GETPROP_DATA_LOAD);
3037 JS_ASSERT(masm.differenceBetween(pic.shapeGuard, inlineShapeLabel) == GETPROP_INLINE_SHAPE_OFFSET);
3038 JS_ASSERT(masm.differenceBetween(pic.shapeGuard, dbgInlineShapeJump) == GETPROP_INLINE_SHAPE_JUMP);
3039 #elif defined JS_PUNBOX64
3040 pic.labels.getprop.dslotsLoadOffset = masm.differenceBetween(pic.fastPathRejoin, dslotsLoadLabel);
3041 JS_ASSERT(pic.labels.getprop.dslotsLoadOffset == masm.differenceBetween(pic.fastPathRejoin, dslotsLoadLabel));
3043 pic.labels.getprop.inlineShapeOffset = masm.differenceBetween(pic.shapeGuard, inlineShapeLabel);
3044 JS_ASSERT(pic.labels.getprop.inlineShapeOffset == masm.differenceBetween(pic.shapeGuard, inlineShapeLabel));
3046 pic.labels.getprop.inlineValueOffset = masm.differenceBetween(pic.fastPathRejoin, inlineValueLoadLabel);
3047 JS_ASSERT(pic.labels.getprop.inlineValueOffset == masm.differenceBetween(pic.fastPathRejoin, inlineValueLoadLabel));
3049 JS_ASSERT(masm.differenceBetween(inlineShapeLabel, dbgInlineShapeJump) == GETPROP_INLINE_SHAPE_JUMP);
3050 #endif
3051 /* GETPROP_INLINE_TYPE_GUARD's validity is asserted above. */
3053 pic.objReg = objReg;
3054 frame.pushRegs(shapeReg, objReg);
3056 stubcc.rejoin(Changes(1));
3058 pics.append(pic);
3059 return true;
3062 bool
3063 mjit::Compiler::jsop_callprop_generic(JSAtom *atom)
3065 FrameEntry *top = frame.peek(-1);
3068 * These two must be loaded first. The objReg because the string path
3069 * wants to read it, and the shapeReg because it could cause a spill that
3070 * the string path wouldn't sink back.
3072 RegisterID objReg = frame.copyDataIntoReg(top);
3073 RegisterID shapeReg = frame.allocReg();
3075 PICGenInfo pic(ic::PICInfo::CALL, JSOp(*PC), true);
3077 pic.pc = PC;
3079 /* Guard that the type is an object. */
3080 pic.typeReg = frame.copyTypeIntoReg(top);
3082 /* Start the hot path where it's easy to patch it. */
3083 pic.fastPathStart = masm.label();
3086 * Guard that the value is an object. This part needs some extra gunk
3087 * because the leave() after the shape guard will emit a jump from this
3088 * path to the final call. We need a label in between that jump, which
3089 * will be the target of patched jumps in the PIC.
3091 Jump typeCheck = masm.testObject(Assembler::NotEqual, pic.typeReg);
3092 DBGLABEL(dbgInlineTypeGuard);
3094 pic.typeCheck = stubcc.linkExit(typeCheck, Uses(1));
3095 pic.hasTypeCheck = true;
3096 pic.objReg = objReg;
3097 pic.shapeReg = shapeReg;
3098 pic.atom = atom;
3101 * Store the type and object back. Don't bother keeping them in registers,
3102 * since a sync will be needed for the upcoming call.
3104 uint32 thisvSlot = frame.frameDepth();
3105 Address thisv = Address(JSFrameReg, sizeof(JSStackFrame) + thisvSlot * sizeof(Value));
3106 #if defined JS_NUNBOX32
3107 masm.storeValueFromComponents(pic.typeReg, pic.objReg, thisv);
3108 #elif defined JS_PUNBOX64
3109 masm.orPtr(pic.objReg, pic.typeReg);
3110 masm.storePtr(pic.typeReg, thisv);
3111 #endif
3112 frame.freeReg(pic.typeReg);
3114 /* Guard on shape. */
3115 masm.loadShape(objReg, shapeReg);
3116 pic.shapeGuard = masm.label();
3118 DataLabel32 inlineShapeLabel;
3119 Jump j = masm.branch32WithPatch(Assembler::NotEqual, shapeReg,
3120 Imm32(int32(JSObjectMap::INVALID_SHAPE)),
3121 inlineShapeLabel);
3122 DBGLABEL(dbgInlineShapeJump);
3124 pic.slowPathStart = stubcc.linkExit(j, Uses(1));
3126 /* Slow path. */
3127 stubcc.leave();
3128 passICAddress(&pic);
3129 pic.slowPathCall = stubcc.call(ic::CallProp);
3131 /* Adjust the frame. None of this will generate code. */
3132 frame.pop();
3133 frame.pushRegs(shapeReg, objReg);
3134 frame.pushSynced();
3136 /* Load dslots. */
3137 #if defined JS_NUNBOX32
3138 DBGLABEL(dbgDslotsLoad);
3139 #elif defined JS_PUNBOX64
3140 Label dslotsLoadLabel = masm.label();
3141 #endif
3142 masm.loadPtr(Address(objReg, offsetof(JSObject, slots)), objReg);
3144 /* Copy the slot value to the expression stack. */
3145 Address slot(objReg, 1 << 24);
3147 #if defined JS_NUNBOX32
3148 masm.loadTypeTag(slot, shapeReg);
3149 DBGLABEL(dbgTypeLoad);
3151 masm.loadPayload(slot, objReg);
3152 DBGLABEL(dbgDataLoad);
3153 #elif defined JS_PUNBOX64
3154 Label inlineValueLoadLabel =
3155 masm.loadValueAsComponents(slot, shapeReg, objReg);
3156 #endif
3157 pic.fastPathRejoin = masm.label();
3159 /* Assert correctness of hardcoded offsets. */
3160 RETURN_IF_OOM(false);
3161 JS_ASSERT(masm.differenceBetween(pic.fastPathStart, dbgInlineTypeGuard) == GETPROP_INLINE_TYPE_GUARD);
3162 #if defined JS_NUNBOX32
3163 JS_ASSERT(masm.differenceBetween(pic.fastPathRejoin, dbgDslotsLoad) == GETPROP_DSLOTS_LOAD);
3164 JS_ASSERT(masm.differenceBetween(pic.fastPathRejoin, dbgTypeLoad) == GETPROP_TYPE_LOAD);
3165 JS_ASSERT(masm.differenceBetween(pic.fastPathRejoin, dbgDataLoad) == GETPROP_DATA_LOAD);
3166 JS_ASSERT(masm.differenceBetween(pic.shapeGuard, inlineShapeLabel) == GETPROP_INLINE_SHAPE_OFFSET);
3167 JS_ASSERT(masm.differenceBetween(pic.shapeGuard, dbgInlineShapeJump) == GETPROP_INLINE_SHAPE_JUMP);
3168 #elif defined JS_PUNBOX64
3169 pic.labels.getprop.dslotsLoadOffset = masm.differenceBetween(pic.fastPathRejoin, dslotsLoadLabel);
3170 JS_ASSERT(pic.labels.getprop.dslotsLoadOffset == masm.differenceBetween(pic.fastPathRejoin, dslotsLoadLabel));
3172 pic.labels.getprop.inlineShapeOffset = masm.differenceBetween(pic.shapeGuard, inlineShapeLabel);
3173 JS_ASSERT(pic.labels.getprop.inlineShapeOffset == masm.differenceBetween(pic.shapeGuard, inlineShapeLabel));
3175 pic.labels.getprop.inlineValueOffset = masm.differenceBetween(pic.fastPathRejoin, inlineValueLoadLabel);
3176 JS_ASSERT(pic.labels.getprop.inlineValueOffset == masm.differenceBetween(pic.fastPathRejoin, inlineValueLoadLabel));
3178 JS_ASSERT(masm.differenceBetween(inlineShapeLabel, dbgInlineShapeJump) == GETPROP_INLINE_SHAPE_JUMP);
3179 #endif
3181 stubcc.rejoin(Changes(2));
3182 pics.append(pic);
3184 return true;
3187 bool
3188 mjit::Compiler::jsop_callprop_str(JSAtom *atom)
3190 if (!script->compileAndGo) {
3191 jsop_callprop_slow(atom);
3192 return true;
3195 /* Bake in String.prototype. Is this safe? */
3196 JSObject *obj;
3197 if (!js_GetClassPrototype(cx, NULL, JSProto_String, &obj))
3198 return false;
3200 /* Force into a register because getprop won't expect a constant. */
3201 RegisterID reg = frame.allocReg();
3203 masm.move(ImmPtr(obj), reg);
3204 frame.pushTypedPayload(JSVAL_TYPE_OBJECT, reg);
3206 /* Get the property. */
3207 if (!jsop_getprop(atom))
3208 return false;
3210 /* Perform a swap. */
3211 frame.dup2();
3212 frame.shift(-3);
3213 frame.shift(-1);
3215 /* 4) Test if the function can take a primitive. */
3216 #ifdef DEBUG
3217 FrameEntry *funFe = frame.peek(-2);
3218 #endif
3219 JS_ASSERT(!funFe->isTypeKnown());
3222 * See bug 584579 - need to forget string type, since wrapping could
3223 * create an object. forgetType() alone is not valid because it cannot be
3224 * used on copies or constants.
3226 RegisterID strReg;
3227 FrameEntry *strFe = frame.peek(-1);
3228 if (strFe->isConstant()) {
3229 strReg = frame.allocReg();
3230 masm.move(ImmPtr(strFe->getValue().toString()), strReg);
3231 } else {
3232 strReg = frame.ownRegForData(strFe);
3234 frame.pop();
3235 frame.pushTypedPayload(JSVAL_TYPE_STRING, strReg);
3236 frame.forgetType(frame.peek(-1));
3238 return true;
3241 bool
3242 mjit::Compiler::jsop_callprop_obj(JSAtom *atom)
3244 FrameEntry *top = frame.peek(-1);
3246 PICGenInfo pic(ic::PICInfo::CALL, JSOp(*PC), true);
3248 JS_ASSERT(top->isTypeKnown());
3249 JS_ASSERT(top->getKnownType() == JSVAL_TYPE_OBJECT);
3251 pic.pc = PC;
3252 pic.fastPathStart = masm.label();
3253 pic.hasTypeCheck = false;
3254 pic.typeReg = Registers::ReturnReg;
3256 RegisterID objReg = frame.copyDataIntoReg(top);
3257 RegisterID shapeReg = frame.allocReg();
3259 pic.shapeReg = shapeReg;
3260 pic.atom = atom;
3262 /* Guard on shape. */
3263 masm.loadShape(objReg, shapeReg);
3264 pic.shapeGuard = masm.label();
3266 DataLabel32 inlineShapeLabel;
3267 Jump j = masm.branch32WithPatch(Assembler::NotEqual, shapeReg,
3268 Imm32(int32(JSObjectMap::INVALID_SHAPE)),
3269 inlineShapeLabel);
3270 DBGLABEL(dbgInlineShapeJump);
3272 pic.slowPathStart = stubcc.linkExit(j, Uses(1));
3274 stubcc.leave();
3275 passICAddress(&pic);
3276 pic.slowPathCall = stubcc.call(ic::CallProp);
3278 /* Load dslots. */
3279 #if defined JS_NUNBOX32
3280 DBGLABEL(dbgDslotsLoad);
3281 #elif defined JS_PUNBOX64
3282 Label dslotsLoadLabel = masm.label();
3283 #endif
3284 masm.loadPtr(Address(objReg, offsetof(JSObject, slots)), objReg);
3286 /* Copy the slot value to the expression stack. */
3287 Address slot(objReg, 1 << 24);
3289 #if defined JS_NUNBOX32
3290 masm.loadTypeTag(slot, shapeReg);
3291 DBGLABEL(dbgTypeLoad);
3293 masm.loadPayload(slot, objReg);
3294 DBGLABEL(dbgDataLoad);
3295 #elif defined JS_PUNBOX64
3296 Label inlineValueLoadLabel =
3297 masm.loadValueAsComponents(slot, shapeReg, objReg);
3298 #endif
3300 pic.fastPathRejoin = masm.label();
3301 pic.objReg = objReg;
3304 * 1) Dup the |this| object.
3305 * 2) Push the property value onto the stack.
3306 * 3) Move the value below the dup'd |this|, uncopying it. This could
3307 * generate code, thus the fastPathRejoin label being prior. This is safe
3308 * as a stack transition, because JSOP_CALLPROP has JOF_TMPSLOT. It is
3309 * also safe for correctness, because if we know the LHS is an object, it
3310 * is the resulting vp[1].
3312 frame.dup();
3313 frame.pushRegs(shapeReg, objReg);
3314 frame.shift(-2);
3317 * Assert correctness of hardcoded offsets.
3318 * No type guard: type is asserted.
3320 RETURN_IF_OOM(false);
3321 #if defined JS_NUNBOX32
3322 JS_ASSERT(masm.differenceBetween(pic.fastPathRejoin, dbgDslotsLoad) == GETPROP_DSLOTS_LOAD);
3323 JS_ASSERT(masm.differenceBetween(pic.fastPathRejoin, dbgTypeLoad) == GETPROP_TYPE_LOAD);
3324 JS_ASSERT(masm.differenceBetween(pic.fastPathRejoin, dbgDataLoad) == GETPROP_DATA_LOAD);
3325 JS_ASSERT(masm.differenceBetween(pic.shapeGuard, inlineShapeLabel) == GETPROP_INLINE_SHAPE_OFFSET);
3326 JS_ASSERT(masm.differenceBetween(pic.shapeGuard, dbgInlineShapeJump) == GETPROP_INLINE_SHAPE_JUMP);
3327 #elif defined JS_PUNBOX64
3328 pic.labels.getprop.dslotsLoadOffset = masm.differenceBetween(pic.fastPathRejoin, dslotsLoadLabel);
3329 JS_ASSERT(pic.labels.getprop.dslotsLoadOffset == masm.differenceBetween(pic.fastPathRejoin, dslotsLoadLabel));
3331 pic.labels.getprop.inlineShapeOffset = masm.differenceBetween(pic.shapeGuard, inlineShapeLabel);
3332 JS_ASSERT(pic.labels.getprop.inlineShapeOffset == masm.differenceBetween(pic.shapeGuard, inlineShapeLabel));
3334 pic.labels.getprop.inlineValueOffset = masm.differenceBetween(pic.fastPathRejoin, inlineValueLoadLabel);
3335 JS_ASSERT(pic.labels.getprop.inlineValueOffset == masm.differenceBetween(pic.fastPathRejoin, inlineValueLoadLabel));
3337 JS_ASSERT(masm.differenceBetween(inlineShapeLabel, dbgInlineShapeJump) == GETPROP_INLINE_SHAPE_JUMP);
3338 #endif
3340 stubcc.rejoin(Changes(2));
3341 pics.append(pic);
3343 return true;
3346 bool
3347 mjit::Compiler::jsop_callprop(JSAtom *atom)
3349 FrameEntry *top = frame.peek(-1);
3351 /* If the incoming type will never PIC, take slow path. */
3352 if (top->isTypeKnown() && top->getKnownType() != JSVAL_TYPE_OBJECT) {
3353 if (top->getKnownType() == JSVAL_TYPE_STRING)
3354 return jsop_callprop_str(atom);
3355 return jsop_callprop_slow(atom);
3358 if (top->isTypeKnown())
3359 return jsop_callprop_obj(atom);
3360 return jsop_callprop_generic(atom);
3363 bool
3364 mjit::Compiler::jsop_setprop(JSAtom *atom, bool usePropCache)
3366 FrameEntry *lhs = frame.peek(-2);
3367 FrameEntry *rhs = frame.peek(-1);
3369 /* If the incoming type will never PIC, take slow path. */
3370 if (lhs->isTypeKnown() && lhs->getKnownType() != JSVAL_TYPE_OBJECT) {
3371 jsop_setprop_slow(atom, usePropCache);
3372 return true;
3375 JSOp op = JSOp(*PC);
3377 ic::PICInfo::Kind kind = (op == JSOP_SETMETHOD)
3378 ? ic::PICInfo::SETMETHOD
3379 : ic::PICInfo::SET;
3380 PICGenInfo pic(kind, op, usePropCache);
3381 pic.atom = atom;
3383 /* Guard that the type is an object. */
3384 Jump typeCheck;
3385 if (!lhs->isTypeKnown()) {
3386 RegisterID reg = frame.tempRegForType(lhs);
3387 pic.typeReg = reg;
3389 /* Start the hot path where it's easy to patch it. */
3390 pic.fastPathStart = masm.label();
3391 Jump j = masm.testObject(Assembler::NotEqual, reg);
3393 pic.typeCheck = stubcc.linkExit(j, Uses(2));
3394 stubcc.leave();
3396 stubcc.masm.move(ImmPtr(atom), Registers::ArgReg1);
3397 if (usePropCache)
3398 stubcc.call(STRICT_VARIANT(stubs::SetName));
3399 else
3400 stubcc.call(STRICT_VARIANT(stubs::SetPropNoCache));
3401 typeCheck = stubcc.masm.jump();
3402 pic.hasTypeCheck = true;
3403 } else {
3404 pic.fastPathStart = masm.label();
3405 pic.hasTypeCheck = false;
3406 pic.typeReg = Registers::ReturnReg;
3409 /* Get the object into a mutable register. */
3410 RegisterID objReg = frame.copyDataIntoReg(lhs);
3411 pic.objReg = objReg;
3413 /* Get info about the RHS and pin it. */
3414 ValueRemat vr;
3415 frame.pinEntry(rhs, vr);
3416 pic.vr = vr;
3418 RegisterID shapeReg = frame.allocReg();
3419 pic.shapeReg = shapeReg;
3421 frame.unpinEntry(vr);
3423 /* Guard on shape. */
3424 masm.loadShape(objReg, shapeReg);
3425 pic.shapeGuard = masm.label();
3426 DataLabel32 inlineShapeOffsetLabel;
3427 Jump j = masm.branch32WithPatch(Assembler::NotEqual, shapeReg,
3428 Imm32(int32(JSObjectMap::INVALID_SHAPE)),
3429 inlineShapeOffsetLabel);
3430 DBGLABEL(dbgInlineShapeJump);
3432 /* Slow path. */
3434 pic.slowPathStart = stubcc.linkExit(j, Uses(2));
3436 stubcc.leave();
3437 passICAddress(&pic);
3438 pic.slowPathCall = stubcc.call(ic::SetProp);
3441 /* Load dslots. */
3442 #if defined JS_NUNBOX32
3443 DBGLABEL(dbgDslots);
3444 #elif defined JS_PUNBOX64
3445 Label dslotsLoadLabel = masm.label();
3446 #endif
3447 masm.loadPtr(Address(objReg, offsetof(JSObject, slots)), objReg);
3449 /* Store RHS into object slot. */
3450 Address slot(objReg, 1 << 24);
3451 #if defined JS_NUNBOX32
3452 Label dbgInlineStoreType = masm.storeValue(vr, slot);
3453 #elif defined JS_PUNBOX64
3454 masm.storeValue(vr, slot);
3455 #endif
3456 DBGLABEL(dbgAfterValueStore);
3457 pic.fastPathRejoin = masm.label();
3459 frame.freeReg(objReg);
3460 frame.freeReg(shapeReg);
3462 /* "Pop under", taking out object (LHS) and leaving RHS. */
3463 frame.shimmy(1);
3465 /* Finish slow path. */
3467 if (pic.hasTypeCheck)
3468 typeCheck.linkTo(stubcc.masm.label(), &stubcc.masm);
3469 stubcc.rejoin(Changes(1));
3472 RETURN_IF_OOM(false);
3473 #if defined JS_PUNBOX64
3474 pic.labels.setprop.dslotsLoadOffset = masm.differenceBetween(pic.fastPathRejoin, dslotsLoadLabel);
3475 pic.labels.setprop.inlineShapeOffset = masm.differenceBetween(pic.shapeGuard, inlineShapeOffsetLabel);
3476 JS_ASSERT(masm.differenceBetween(inlineShapeOffsetLabel, dbgInlineShapeJump) == SETPROP_INLINE_SHAPE_JUMP);
3477 JS_ASSERT(masm.differenceBetween(pic.fastPathRejoin, dbgAfterValueStore) == SETPROP_INLINE_STORE_VALUE);
3478 #elif defined JS_NUNBOX32
3479 JS_ASSERT(masm.differenceBetween(pic.shapeGuard, inlineShapeOffsetLabel) == SETPROP_INLINE_SHAPE_OFFSET);
3480 JS_ASSERT(masm.differenceBetween(pic.shapeGuard, dbgInlineShapeJump) == SETPROP_INLINE_SHAPE_JUMP);
3481 if (vr.isConstant()) {
3482 /* Constants are offset inside the opcode by 4. */
3483 JS_ASSERT(masm.differenceBetween(pic.fastPathRejoin, dbgInlineStoreType)-4 == SETPROP_INLINE_STORE_CONST_TYPE);
3484 JS_ASSERT(masm.differenceBetween(pic.fastPathRejoin, dbgAfterValueStore)-4 == SETPROP_INLINE_STORE_CONST_DATA);
3485 JS_ASSERT(masm.differenceBetween(pic.fastPathRejoin, dbgDslots) == SETPROP_DSLOTS_BEFORE_CONSTANT);
3486 } else if (vr.isTypeKnown()) {
3487 JS_ASSERT(masm.differenceBetween(pic.fastPathRejoin, dbgInlineStoreType)-4 == SETPROP_INLINE_STORE_KTYPE_TYPE);
3488 JS_ASSERT(masm.differenceBetween(pic.fastPathRejoin, dbgAfterValueStore) == SETPROP_INLINE_STORE_KTYPE_DATA);
3489 JS_ASSERT(masm.differenceBetween(pic.fastPathRejoin, dbgDslots) == SETPROP_DSLOTS_BEFORE_KTYPE);
3490 } else {
3491 JS_ASSERT(masm.differenceBetween(pic.fastPathRejoin, dbgInlineStoreType) == SETPROP_INLINE_STORE_DYN_TYPE);
3492 JS_ASSERT(masm.differenceBetween(pic.fastPathRejoin, dbgAfterValueStore) == SETPROP_INLINE_STORE_DYN_DATA);
3493 JS_ASSERT(masm.differenceBetween(pic.fastPathRejoin, dbgDslots) == SETPROP_DSLOTS_BEFORE_DYNAMIC);
3495 #endif
3497 pics.append(pic);
3498 return true;
3501 void
3502 mjit::Compiler::jsop_name(JSAtom *atom)
3504 PICGenInfo pic(ic::PICInfo::NAME, JSOp(*PC), true);
3506 pic.shapeReg = frame.allocReg();
3507 pic.objReg = frame.allocReg();
3508 pic.typeReg = Registers::ReturnReg;
3509 pic.atom = atom;
3510 pic.hasTypeCheck = false;
3511 pic.fastPathStart = masm.label();
3513 pic.shapeGuard = masm.label();
3514 Jump j = masm.jump();
3515 DBGLABEL(dbgJumpOffset);
3517 pic.slowPathStart = stubcc.linkExit(j, Uses(0));
3518 stubcc.leave();
3519 passICAddress(&pic);
3520 pic.slowPathCall = stubcc.call(ic::Name);
3523 pic.fastPathRejoin = masm.label();
3524 frame.pushRegs(pic.shapeReg, pic.objReg);
3526 JS_ASSERT(masm.differenceBetween(pic.fastPathStart, dbgJumpOffset) == SCOPENAME_JUMP_OFFSET);
3528 stubcc.rejoin(Changes(1));
3530 pics.append(pic);
3533 bool
3534 mjit::Compiler::jsop_xname(JSAtom *atom)
3536 PICGenInfo pic(ic::PICInfo::XNAME, JSOp(*PC), true);
3538 FrameEntry *fe = frame.peek(-1);
3539 if (fe->isNotType(JSVAL_TYPE_OBJECT)) {
3540 return jsop_getprop(atom);
3543 if (!fe->isTypeKnown()) {
3544 Jump notObject = frame.testObject(Assembler::NotEqual, fe);
3545 stubcc.linkExit(notObject, Uses(1));
3548 pic.shapeReg = frame.allocReg();
3549 pic.objReg = frame.copyDataIntoReg(fe);
3550 pic.typeReg = Registers::ReturnReg;
3551 pic.atom = atom;
3552 pic.hasTypeCheck = false;
3553 pic.fastPathStart = masm.label();
3555 pic.shapeGuard = masm.label();
3556 Jump j = masm.jump();
3557 DBGLABEL(dbgJumpOffset);
3559 pic.slowPathStart = stubcc.linkExit(j, Uses(1));
3560 stubcc.leave();
3561 passICAddress(&pic);
3562 pic.slowPathCall = stubcc.call(ic::XName);
3565 pic.fastPathRejoin = masm.label();
3566 frame.pop();
3567 frame.pushRegs(pic.shapeReg, pic.objReg);
3569 JS_ASSERT(masm.differenceBetween(pic.fastPathStart, dbgJumpOffset) == SCOPENAME_JUMP_OFFSET);
3571 stubcc.rejoin(Changes(1));
3573 pics.append(pic);
3574 return true;
3577 void
3578 mjit::Compiler::jsop_bindname(uint32 index, bool usePropCache)
3580 PICGenInfo pic(ic::PICInfo::BIND, JSOp(*PC), usePropCache);
3582 // This code does not check the frame flags to see if scopeChain has been
3583 // set. Rather, it relies on the up-front analysis statically determining
3584 // whether BINDNAME can be used, which reifies the scope chain at the
3585 // prologue.
3586 JS_ASSERT(analysis->usesScopeChain());
3588 pic.shapeReg = frame.allocReg();
3589 pic.objReg = frame.allocReg();
3590 pic.typeReg = Registers::ReturnReg;
3591 pic.atom = script->getAtom(index);
3592 pic.hasTypeCheck = false;
3593 pic.fastPathStart = masm.label();
3595 Address parent(pic.objReg, offsetof(JSObject, parent));
3596 masm.loadPtr(Address(JSFrameReg, JSStackFrame::offsetOfScopeChain()), pic.objReg);
3598 pic.shapeGuard = masm.label();
3599 #if defined JS_NUNBOX32
3600 Jump j = masm.branchPtr(Assembler::NotEqual, masm.payloadOf(parent), ImmPtr(0));
3601 DBGLABEL(inlineJumpOffset);
3602 #elif defined JS_PUNBOX64
3603 masm.loadPayload(parent, Registers::ValueReg);
3604 Jump j = masm.branchPtr(Assembler::NotEqual, Registers::ValueReg, ImmPtr(0));
3605 Label inlineJumpOffset = masm.label();
3606 #endif
3608 pic.slowPathStart = stubcc.linkExit(j, Uses(0));
3609 stubcc.leave();
3610 passICAddress(&pic);
3611 pic.slowPathCall = stubcc.call(ic::BindName);
3614 pic.fastPathRejoin = masm.label();
3615 frame.pushTypedPayload(JSVAL_TYPE_OBJECT, pic.objReg);
3616 frame.freeReg(pic.shapeReg);
3618 #if defined JS_NUNBOX32
3619 JS_ASSERT(masm.differenceBetween(pic.shapeGuard, inlineJumpOffset) == BINDNAME_INLINE_JUMP_OFFSET);
3620 #elif defined JS_PUNBOX64
3621 pic.labels.bindname.inlineJumpOffset = masm.differenceBetween(pic.shapeGuard, inlineJumpOffset);
3622 JS_ASSERT(pic.labels.bindname.inlineJumpOffset == masm.differenceBetween(pic.shapeGuard, inlineJumpOffset));
3623 #endif
3625 stubcc.rejoin(Changes(1));
3627 pics.append(pic);
3630 #else /* JS_POLYIC */
3632 void
3633 mjit::Compiler::jsop_name(JSAtom *atom)
3635 prepareStubCall(Uses(0));
3636 stubCall(stubs::Name);
3637 frame.pushSynced();
3640 bool
3641 mjit::Compiler::jsop_xname(JSAtom *atom)
3643 return jsop_getprop(atom);
3646 bool
3647 mjit::Compiler::jsop_getprop(JSAtom *atom, bool typecheck, bool usePropCache)
3649 jsop_getprop_slow(atom, usePropCache);
3650 return true;
3653 bool
3654 mjit::Compiler::jsop_callprop(JSAtom *atom)
3656 return jsop_callprop_slow(atom);
3659 bool
3660 mjit::Compiler::jsop_setprop(JSAtom *atom, bool usePropCache)
3662 jsop_setprop_slow(atom, usePropCache);
3663 return true;
3666 void
3667 mjit::Compiler::jsop_bindname(uint32 index, bool usePropCache)
3669 RegisterID reg = frame.allocReg();
3670 Address scopeChain(JSFrameReg, JSStackFrame::offsetOfScopeChain());
3671 masm.loadPtr(scopeChain, reg);
3673 Address address(reg, offsetof(JSObject, parent));
3675 Jump j = masm.branchPtr(Assembler::NotEqual, masm.payloadOf(address), ImmPtr(0));
3677 stubcc.linkExit(j, Uses(0));
3678 stubcc.leave();
3679 if (usePropCache) {
3680 stubcc.call(stubs::BindName);
3681 } else {
3682 stubcc.masm.move(ImmPtr(script->getAtom(index)), Registers::ArgReg1);
3683 stubcc.call(stubs::BindNameNoCache);
3686 frame.pushTypedPayload(JSVAL_TYPE_OBJECT, reg);
3688 stubcc.rejoin(Changes(1));
3690 #endif
3692 void
3693 mjit::Compiler::jsop_getarg(uint32 slot)
3695 frame.push(Address(JSFrameReg, JSStackFrame::offsetOfFormalArg(fun, slot)));
3698 void
3699 mjit::Compiler::jsop_setarg(uint32 slot, bool popped)
3701 FrameEntry *top = frame.peek(-1);
3702 RegisterID reg = frame.allocReg();
3703 Address address = Address(JSFrameReg, JSStackFrame::offsetOfFormalArg(fun, slot));
3704 frame.storeTo(top, address, popped);
3705 frame.freeReg(reg);
3708 void
3709 mjit::Compiler::jsop_this()
3711 Address thisvAddr(JSFrameReg, JSStackFrame::offsetOfThis(fun));
3712 frame.push(thisvAddr);
3714 * In strict mode code, we don't wrap 'this'.
3715 * In direct-call eval code, we wrapped 'this' before entering the eval.
3716 * In global code, 'this' is always an object.
3718 if (fun && !script->strictModeCode) {
3719 Jump notObj = frame.testObject(Assembler::NotEqual, frame.peek(-1));
3720 stubcc.linkExit(notObj, Uses(1));
3721 stubcc.leave();
3722 stubcc.call(stubs::This);
3723 stubcc.rejoin(Changes(1));
3727 void
3728 mjit::Compiler::jsop_gnameinc(JSOp op, VoidStubAtom stub, uint32 index)
3730 #if defined JS_MONOIC
3731 jsbytecode *next = &PC[JSOP_GNAMEINC_LENGTH];
3732 bool pop = (JSOp(*next) == JSOP_POP) && !analysis->jumpTarget(next);
3733 int amt = (op == JSOP_GNAMEINC || op == JSOP_INCGNAME) ? -1 : 1;
3735 if (pop || (op == JSOP_INCGNAME || op == JSOP_DECGNAME)) {
3736 /* These cases are easy, the original value is not observed. */
3738 jsop_getgname(index);
3739 // V
3741 frame.push(Int32Value(amt));
3742 // V 1
3744 /* Use sub since it calls ValueToNumber instead of string concat. */
3745 jsop_binary(JSOP_SUB, stubs::Sub);
3746 // N+1
3748 jsop_bindgname();
3749 // V+1 OBJ
3751 frame.dup2();
3752 // V+1 OBJ V+1 OBJ
3754 frame.shift(-3);
3755 // OBJ OBJ V+1
3757 frame.shift(-1);
3758 // OBJ V+1
3760 jsop_setgname(index);
3761 // V+1
3763 if (pop)
3764 frame.pop();
3765 } else {
3766 /* The pre-value is observed, making this more tricky. */
3768 jsop_getgname(index);
3769 // V
3771 jsop_pos();
3772 // N
3774 frame.dup();
3775 // N N
3777 frame.push(Int32Value(-amt));
3778 // N N 1
3780 jsop_binary(JSOP_ADD, stubs::Add);
3781 // N N+1
3783 jsop_bindgname();
3784 // N N+1 OBJ
3786 frame.dup2();
3787 // N N+1 OBJ N+1 OBJ
3789 frame.shift(-3);
3790 // N OBJ OBJ N+1
3792 frame.shift(-1);
3793 // N OBJ N+1
3795 jsop_setgname(index);
3796 // N N+1
3798 frame.pop();
3799 // N
3802 if (pop)
3803 PC += JSOP_POP_LENGTH;
3804 #else
3805 JSAtom *atom = script->getAtom(index);
3806 prepareStubCall(Uses(0));
3807 masm.move(ImmPtr(atom), Registers::ArgReg1);
3808 stubCall(stub);
3809 frame.pushSynced();
3810 #endif
3812 PC += JSOP_GNAMEINC_LENGTH;
3815 bool
3816 mjit::Compiler::jsop_nameinc(JSOp op, VoidStubAtom stub, uint32 index)
3818 JSAtom *atom = script->getAtom(index);
3819 #if defined JS_POLYIC
3820 jsbytecode *next = &PC[JSOP_NAMEINC_LENGTH];
3821 bool pop = (JSOp(*next) == JSOP_POP) && !analysis->jumpTarget(next);
3822 int amt = (op == JSOP_NAMEINC || op == JSOP_INCNAME) ? -1 : 1;
3824 if (pop || (op == JSOP_INCNAME || op == JSOP_DECNAME)) {
3825 /* These cases are easy, the original value is not observed. */
3827 jsop_name(atom);
3828 // V
3830 frame.push(Int32Value(amt));
3831 // V 1
3833 /* Use sub since it calls ValueToNumber instead of string concat. */
3834 jsop_binary(JSOP_SUB, stubs::Sub);
3835 // N+1
3837 jsop_bindname(index, false);
3838 // V+1 OBJ
3840 frame.dup2();
3841 // V+1 OBJ V+1 OBJ
3843 frame.shift(-3);
3844 // OBJ OBJ V+1
3846 frame.shift(-1);
3847 // OBJ V+1
3849 if (!jsop_setprop(atom, false))
3850 return false;
3851 // V+1
3853 if (pop)
3854 frame.pop();
3855 } else {
3856 /* The pre-value is observed, making this more tricky. */
3858 jsop_name(atom);
3859 // V
3861 jsop_pos();
3862 // N
3864 frame.dup();
3865 // N N
3867 frame.push(Int32Value(-amt));
3868 // N N 1
3870 jsop_binary(JSOP_ADD, stubs::Add);
3871 // N N+1
3873 jsop_bindname(index, false);
3874 // N N+1 OBJ
3876 frame.dup2();
3877 // N N+1 OBJ N+1 OBJ
3879 frame.shift(-3);
3880 // N OBJ OBJ N+1
3882 frame.shift(-1);
3883 // N OBJ N+1
3885 if (!jsop_setprop(atom, false))
3886 return false;
3887 // N N+1
3889 frame.pop();
3890 // N
3893 if (pop)
3894 PC += JSOP_POP_LENGTH;
3895 #else
3896 prepareStubCall(Uses(0));
3897 masm.move(ImmPtr(atom), Registers::ArgReg1);
3898 stubCall(stub);
3899 frame.pushSynced();
3900 #endif
3902 PC += JSOP_NAMEINC_LENGTH;
3903 return true;
3906 bool
3907 mjit::Compiler::jsop_propinc(JSOp op, VoidStubAtom stub, uint32 index)
3909 JSAtom *atom = script->getAtom(index);
3910 #if defined JS_POLYIC
3911 FrameEntry *objFe = frame.peek(-1);
3912 if (!objFe->isTypeKnown() || objFe->getKnownType() == JSVAL_TYPE_OBJECT) {
3913 jsbytecode *next = &PC[JSOP_PROPINC_LENGTH];
3914 bool pop = (JSOp(*next) == JSOP_POP) && !analysis->jumpTarget(next);
3915 int amt = (op == JSOP_PROPINC || op == JSOP_INCPROP) ? -1 : 1;
3917 if (pop || (op == JSOP_INCPROP || op == JSOP_DECPROP)) {
3918 /* These cases are easy, the original value is not observed. */
3920 frame.dup();
3921 // OBJ OBJ
3923 if (!jsop_getprop(atom))
3924 return false;
3925 // OBJ V
3927 frame.push(Int32Value(amt));
3928 // OBJ V 1
3930 /* Use sub since it calls ValueToNumber instead of string concat. */
3931 jsop_binary(JSOP_SUB, stubs::Sub);
3932 // OBJ V+1
3934 if (!jsop_setprop(atom, false))
3935 return false;
3936 // V+1
3938 if (pop)
3939 frame.pop();
3940 } else {
3941 /* The pre-value is observed, making this more tricky. */
3943 frame.dup();
3944 // OBJ OBJ
3946 if (!jsop_getprop(atom))
3947 return false;
3948 // OBJ V
3950 jsop_pos();
3951 // OBJ N
3953 frame.dup();
3954 // OBJ N N
3956 frame.push(Int32Value(-amt));
3957 // OBJ N N 1
3959 jsop_binary(JSOP_ADD, stubs::Add);
3960 // OBJ N N+1
3962 frame.dupAt(-3);
3963 // OBJ N N+1 OBJ
3965 frame.dupAt(-2);
3966 // OBJ N N+1 OBJ N+1
3968 if (!jsop_setprop(atom, false))
3969 return false;
3970 // OBJ N N+1 N+1
3972 frame.popn(2);
3973 // OBJ N
3975 frame.shimmy(1);
3976 // N
3978 if (pop)
3979 PC += JSOP_POP_LENGTH;
3980 } else
3981 #endif
3983 prepareStubCall(Uses(1));
3984 masm.move(ImmPtr(atom), Registers::ArgReg1);
3985 stubCall(stub);
3986 frame.pop();
3987 frame.pushSynced();
3990 PC += JSOP_PROPINC_LENGTH;
3991 return true;
3994 void
3995 mjit::Compiler::iter(uintN flags)
3997 FrameEntry *fe = frame.peek(-1);
4000 * Stub the call if this is not a simple 'for in' loop or if the iterated
4001 * value is known to not be an object.
4003 if ((flags != JSITER_ENUMERATE) || fe->isNotType(JSVAL_TYPE_OBJECT)) {
4004 prepareStubCall(Uses(1));
4005 masm.move(Imm32(flags), Registers::ArgReg1);
4006 stubCall(stubs::Iter);
4007 frame.pop();
4008 frame.pushSynced();
4009 return;
4012 if (!fe->isTypeKnown()) {
4013 Jump notObject = frame.testObject(Assembler::NotEqual, fe);
4014 stubcc.linkExit(notObject, Uses(1));
4017 RegisterID reg = frame.tempRegForData(fe);
4019 frame.pinReg(reg);
4020 RegisterID ioreg = frame.allocReg(); /* Will hold iterator JSObject */
4021 RegisterID nireg = frame.allocReg(); /* Will hold NativeIterator */
4022 RegisterID T1 = frame.allocReg();
4023 RegisterID T2 = frame.allocReg();
4024 frame.unpinReg(reg);
4027 * Fetch the most recent iterator. TODO: bake this pointer in when
4028 * iterator caches become per-compartment.
4030 masm.loadPtr(FrameAddress(offsetof(VMFrame, cx)), T1);
4031 #ifdef JS_THREADSAFE
4032 masm.loadPtr(Address(T1, offsetof(JSContext, thread)), T1);
4033 masm.loadPtr(Address(T1, offsetof(JSThread, data.lastNativeIterator)), ioreg);
4034 #else
4035 masm.loadPtr(Address(T1, offsetof(JSContext, runtime)), T1);
4036 masm.loadPtr(Address(T1, offsetof(JSRuntime, threadData.lastNativeIterator)), ioreg);
4037 #endif
4039 /* Test for NULL. */
4040 Jump nullIterator = masm.branchTest32(Assembler::Zero, ioreg, ioreg);
4041 stubcc.linkExit(nullIterator, Uses(1));
4043 /* Get NativeIterator from iter obj. :FIXME: X64, also most of this function */
4044 masm.loadPtr(Address(ioreg, offsetof(JSObject, privateData)), nireg);
4046 /* Test for active iterator. */
4047 Address flagsAddr(nireg, offsetof(NativeIterator, flags));
4048 masm.load32(flagsAddr, T1);
4049 Jump activeIterator = masm.branchTest32(Assembler::NonZero, T1, Imm32(JSITER_ACTIVE));
4050 stubcc.linkExit(activeIterator, Uses(1));
4052 /* Compare shape of object with iterator. */
4053 masm.loadShape(reg, T1);
4054 masm.loadPtr(Address(nireg, offsetof(NativeIterator, shapes_array)), T2);
4055 masm.load32(Address(T2, 0), T2);
4056 Jump mismatchedObject = masm.branch32(Assembler::NotEqual, T1, T2);
4057 stubcc.linkExit(mismatchedObject, Uses(1));
4059 /* Compare shape of object's prototype with iterator. */
4060 masm.loadPtr(Address(reg, offsetof(JSObject, proto)), T1);
4061 masm.loadShape(T1, T1);
4062 masm.loadPtr(Address(nireg, offsetof(NativeIterator, shapes_array)), T2);
4063 masm.load32(Address(T2, sizeof(uint32)), T2);
4064 Jump mismatchedProto = masm.branch32(Assembler::NotEqual, T1, T2);
4065 stubcc.linkExit(mismatchedProto, Uses(1));
4068 * Compare object's prototype's prototype with NULL. The last native
4069 * iterator will always have a prototype chain length of one
4070 * (i.e. it must be a plain object), so we do not need to generate
4071 * a loop here.
4073 masm.loadPtr(Address(reg, offsetof(JSObject, proto)), T1);
4074 masm.loadPtr(Address(T1, offsetof(JSObject, proto)), T1);
4075 Jump overlongChain = masm.branchPtr(Assembler::NonZero, T1, T1);
4076 stubcc.linkExit(overlongChain, Uses(1));
4078 /* Found a match with the most recent iterator. Hooray! */
4080 /* Mark iterator as active. */
4081 masm.load32(flagsAddr, T1);
4082 masm.or32(Imm32(JSITER_ACTIVE), T1);
4083 masm.store32(T1, flagsAddr);
4085 /* Chain onto the active iterator stack. */
4086 masm.loadPtr(FrameAddress(offsetof(VMFrame, cx)), T1);
4087 masm.loadPtr(Address(T1, offsetof(JSContext, enumerators)), T2);
4088 masm.storePtr(T2, Address(nireg, offsetof(NativeIterator, next)));
4089 masm.storePtr(ioreg, Address(T1, offsetof(JSContext, enumerators)));
4091 frame.freeReg(nireg);
4092 frame.freeReg(T1);
4093 frame.freeReg(T2);
4095 stubcc.leave();
4096 stubcc.masm.move(Imm32(flags), Registers::ArgReg1);
4097 stubcc.call(stubs::Iter);
4099 /* Push the iterator object. */
4100 frame.pop();
4101 frame.pushTypedPayload(JSVAL_TYPE_OBJECT, ioreg);
4103 stubcc.rejoin(Changes(1));
4107 * This big nasty function emits a fast-path for native iterators, producing
4108 * a temporary value on the stack for FORLOCAL,ARG,GLOBAL,etc ops to use.
4110 void
4111 mjit::Compiler::iterNext()
4113 FrameEntry *fe = frame.peek(-1);
4114 RegisterID reg = frame.tempRegForData(fe);
4116 /* Is it worth trying to pin this longer? Prolly not. */
4117 frame.pinReg(reg);
4118 RegisterID T1 = frame.allocReg();
4119 frame.unpinReg(reg);
4121 /* Test clasp */
4122 Jump notFast = masm.testObjClass(Assembler::NotEqual, reg, &js_IteratorClass);
4123 stubcc.linkExit(notFast, Uses(1));
4125 /* Get private from iter obj. */
4126 masm.loadFunctionPrivate(reg, T1);
4128 RegisterID T3 = frame.allocReg();
4129 RegisterID T4 = frame.allocReg();
4131 /* Test if for-each. */
4132 masm.load32(Address(T1, offsetof(NativeIterator, flags)), T3);
4133 notFast = masm.branchTest32(Assembler::NonZero, T3, Imm32(JSITER_FOREACH));
4134 stubcc.linkExit(notFast, Uses(1));
4136 RegisterID T2 = frame.allocReg();
4138 /* Get cursor. */
4139 masm.loadPtr(Address(T1, offsetof(NativeIterator, props_cursor)), T2);
4141 /* Test if the jsid is a string. */
4142 masm.loadPtr(T2, T3);
4143 masm.move(T3, T4);
4144 masm.andPtr(Imm32(JSID_TYPE_MASK), T4);
4145 notFast = masm.branchTestPtr(Assembler::NonZero, T4, T4);
4146 stubcc.linkExit(notFast, Uses(1));
4148 /* It's safe to increase the cursor now. */
4149 masm.addPtr(Imm32(sizeof(jsid)), T2, T4);
4150 masm.storePtr(T4, Address(T1, offsetof(NativeIterator, props_cursor)));
4152 frame.freeReg(T4);
4153 frame.freeReg(T1);
4154 frame.freeReg(T2);
4156 stubcc.leave();
4157 stubcc.call(stubs::IterNext);
4159 frame.pushUntypedPayload(JSVAL_TYPE_STRING, T3);
4161 /* Join with the stub call. */
4162 stubcc.rejoin(Changes(1));
4165 bool
4166 mjit::Compiler::iterMore()
4168 FrameEntry *fe= frame.peek(-1);
4169 RegisterID reg = frame.tempRegForData(fe);
4171 frame.pinReg(reg);
4172 RegisterID T1 = frame.allocReg();
4173 frame.unpinReg(reg);
4175 /* Test clasp */
4176 Jump notFast = masm.testObjClass(Assembler::NotEqual, reg, &js_IteratorClass);
4177 stubcc.linkExitForBranch(notFast);
4179 /* Get private from iter obj. */
4180 masm.loadFunctionPrivate(reg, T1);
4182 /* Get props_cursor, test */
4183 RegisterID T2 = frame.allocReg();
4184 frame.syncAndForgetEverything();
4185 masm.loadPtr(Address(T1, offsetof(NativeIterator, props_cursor)), T2);
4186 masm.loadPtr(Address(T1, offsetof(NativeIterator, props_end)), T1);
4187 Jump jFast = masm.branchPtr(Assembler::LessThan, T2, T1);
4189 jsbytecode *target = &PC[JSOP_MOREITER_LENGTH];
4190 JSOp next = JSOp(*target);
4191 JS_ASSERT(next == JSOP_IFNE || next == JSOP_IFNEX);
4193 target += (next == JSOP_IFNE)
4194 ? GET_JUMP_OFFSET(target)
4195 : GET_JUMPX_OFFSET(target);
4197 stubcc.leave();
4198 stubcc.call(stubs::IterMore);
4199 Jump j = stubcc.masm.branchTest32(Assembler::NonZero, Registers::ReturnReg,
4200 Registers::ReturnReg);
4202 PC += JSOP_MOREITER_LENGTH;
4203 PC += js_CodeSpec[next].length;
4205 stubcc.rejoin(Changes(1));
4207 return jumpAndTrace(jFast, target, &j);
4210 void
4211 mjit::Compiler::iterEnd()
4213 FrameEntry *fe= frame.peek(-1);
4214 RegisterID reg = frame.tempRegForData(fe);
4216 frame.pinReg(reg);
4217 RegisterID T1 = frame.allocReg();
4218 frame.unpinReg(reg);
4220 /* Test clasp */
4221 Jump notIterator = masm.testObjClass(Assembler::NotEqual, reg, &js_IteratorClass);
4222 stubcc.linkExit(notIterator, Uses(1));
4224 /* Get private from iter obj. :FIXME: X64 */
4225 masm.loadPtr(Address(reg, offsetof(JSObject, privateData)), T1);
4227 RegisterID T2 = frame.allocReg();
4229 /* Load flags. */
4230 Address flagAddr(T1, offsetof(NativeIterator, flags));
4231 masm.loadPtr(flagAddr, T2);
4233 /* Test for (flags == ENUMERATE | ACTIVE). */
4234 Jump notEnumerate = masm.branch32(Assembler::NotEqual, T2,
4235 Imm32(JSITER_ENUMERATE | JSITER_ACTIVE));
4236 stubcc.linkExit(notEnumerate, Uses(1));
4238 /* Clear active bit. */
4239 masm.and32(Imm32(~JSITER_ACTIVE), T2);
4240 masm.storePtr(T2, flagAddr);
4242 /* Reset property cursor. */
4243 masm.loadPtr(Address(T1, offsetof(NativeIterator, props_array)), T2);
4244 masm.storePtr(T2, Address(T1, offsetof(NativeIterator, props_cursor)));
4246 /* Advance enumerators list. */
4247 masm.loadPtr(FrameAddress(offsetof(VMFrame, cx)), T2);
4248 masm.loadPtr(Address(T1, offsetof(NativeIterator, next)), T1);
4249 masm.storePtr(T1, Address(T2, offsetof(JSContext, enumerators)));
4251 frame.freeReg(T1);
4252 frame.freeReg(T2);
4254 stubcc.leave();
4255 stubcc.call(stubs::EndIter);
4257 frame.pop();
4259 stubcc.rejoin(Changes(1));
4262 void
4263 mjit::Compiler::jsop_eleminc(JSOp op, VoidStub stub)
4265 prepareStubCall(Uses(2));
4266 stubCall(stub);
4267 frame.popn(2);
4268 frame.pushSynced();
4271 void
4272 mjit::Compiler::jsop_getgname_slow(uint32 index)
4274 prepareStubCall(Uses(0));
4275 stubCall(stubs::GetGlobalName);
4276 frame.pushSynced();
4279 void
4280 mjit::Compiler::jsop_bindgname()
4282 if (script->compileAndGo && globalObj) {
4283 frame.push(ObjectValue(*globalObj));
4284 return;
4287 /* :TODO: this is slower than it needs to be. */
4288 prepareStubCall(Uses(0));
4289 stubCall(stubs::BindGlobalName);
4290 frame.takeReg(Registers::ReturnReg);
4291 frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
4294 void
4295 mjit::Compiler::jsop_getgname(uint32 index)
4297 #if defined JS_MONOIC
4298 jsop_bindgname();
4300 FrameEntry *fe = frame.peek(-1);
4301 JS_ASSERT(fe->isTypeKnown() && fe->getKnownType() == JSVAL_TYPE_OBJECT);
4303 MICGenInfo mic(ic::MICInfo::GET);
4304 RegisterID objReg;
4305 Jump shapeGuard;
4307 mic.entry = masm.label();
4308 if (fe->isConstant()) {
4309 JSObject *obj = &fe->getValue().toObject();
4310 frame.pop();
4311 JS_ASSERT(obj->isNative());
4313 objReg = frame.allocReg();
4315 masm.load32FromImm(&obj->objShape, objReg);
4316 shapeGuard = masm.branch32WithPatch(Assembler::NotEqual, objReg,
4317 Imm32(int32(JSObjectMap::INVALID_SHAPE)), mic.shape);
4318 masm.move(ImmPtr(obj), objReg);
4319 } else {
4320 objReg = frame.ownRegForData(fe);
4321 frame.pop();
4322 RegisterID reg = frame.allocReg();
4324 masm.loadShape(objReg, reg);
4325 shapeGuard = masm.branch32WithPatch(Assembler::NotEqual, reg,
4326 Imm32(int32(JSObjectMap::INVALID_SHAPE)), mic.shape);
4327 frame.freeReg(reg);
4329 stubcc.linkExit(shapeGuard, Uses(0));
4331 stubcc.leave();
4332 passMICAddress(mic);
4333 mic.stubEntry = stubcc.masm.label();
4334 mic.call = stubcc.call(ic::GetGlobalName);
4336 /* Garbage value. */
4337 uint32 slot = 1 << 24;
4339 masm.loadPtr(Address(objReg, offsetof(JSObject, slots)), objReg);
4340 Address address(objReg, slot);
4343 * On x86_64, the length of the movq instruction used is variable
4344 * depending on the registers used. For example, 'movq $0x5(%r12), %r12'
4345 * is one byte larger than 'movq $0x5(%r14), %r14'. This means that
4346 * the constant '0x5' that we want to write is at a variable position.
4348 * x86_64 only performs a single load. The constant offset is always
4349 * at the end of the bytecode. Knowing the start and end of the move
4350 * bytecode is sufficient for patching.
4353 /* Allocate any register other than objReg. */
4354 RegisterID dreg = frame.allocReg();
4355 /* After dreg is loaded, it's safe to clobber objReg. */
4356 RegisterID treg = objReg;
4358 mic.load = masm.label();
4359 # if defined JS_NUNBOX32
4360 # if defined JS_CPU_ARM
4361 DataLabel32 offsetAddress = masm.load64WithAddressOffsetPatch(address, treg, dreg);
4362 JS_ASSERT(masm.differenceBetween(mic.load, offsetAddress) == 0);
4363 # else
4364 masm.loadPayload(address, dreg);
4365 masm.loadTypeTag(address, treg);
4366 # endif
4367 # elif defined JS_PUNBOX64
4368 Label inlineValueLoadLabel =
4369 masm.loadValueAsComponents(address, treg, dreg);
4370 mic.patchValueOffset = masm.differenceBetween(mic.load, inlineValueLoadLabel);
4371 JS_ASSERT(mic.patchValueOffset == masm.differenceBetween(mic.load, inlineValueLoadLabel));
4372 # endif
4374 frame.pushRegs(treg, dreg);
4376 stubcc.rejoin(Changes(1));
4377 mics.append(mic);
4379 #else
4380 jsop_getgname_slow(index);
4381 #endif
4384 void
4385 mjit::Compiler::jsop_setgname_slow(uint32 index)
4387 JSAtom *atom = script->getAtom(index);
4388 prepareStubCall(Uses(2));
4389 masm.move(ImmPtr(atom), Registers::ArgReg1);
4390 stubCall(STRICT_VARIANT(stubs::SetGlobalName));
4391 frame.popn(2);
4392 frame.pushSynced();
4395 void
4396 mjit::Compiler::jsop_setgname(uint32 index)
4398 #if defined JS_MONOIC
4399 FrameEntry *objFe = frame.peek(-2);
4400 JS_ASSERT_IF(objFe->isTypeKnown(), objFe->getKnownType() == JSVAL_TYPE_OBJECT);
4402 MICGenInfo mic(ic::MICInfo::SET);
4403 RegisterID objReg;
4404 Jump shapeGuard;
4406 mic.entry = masm.label();
4407 if (objFe->isConstant()) {
4408 JSObject *obj = &objFe->getValue().toObject();
4409 JS_ASSERT(obj->isNative());
4411 objReg = frame.allocReg();
4413 masm.load32FromImm(&obj->objShape, objReg);
4414 shapeGuard = masm.branch32WithPatch(Assembler::NotEqual, objReg,
4415 Imm32(int32(JSObjectMap::INVALID_SHAPE)),
4416 mic.shape);
4417 masm.move(ImmPtr(obj), objReg);
4418 } else {
4419 objReg = frame.copyDataIntoReg(objFe);
4420 RegisterID reg = frame.allocReg();
4422 masm.loadShape(objReg, reg);
4423 shapeGuard = masm.branch32WithPatch(Assembler::NotEqual, reg,
4424 Imm32(int32(JSObjectMap::INVALID_SHAPE)),
4425 mic.shape);
4426 frame.freeReg(reg);
4428 stubcc.linkExit(shapeGuard, Uses(2));
4430 stubcc.leave();
4431 passMICAddress(mic);
4432 mic.stubEntry = stubcc.masm.label();
4433 mic.call = stubcc.call(ic::SetGlobalName);
4435 /* Garbage value. */
4436 uint32 slot = 1 << 24;
4438 /* Get both type and reg into registers. */
4439 FrameEntry *fe = frame.peek(-1);
4441 Value v;
4442 RegisterID typeReg = Registers::ReturnReg;
4443 RegisterID dataReg = Registers::ReturnReg;
4444 JSValueType typeTag = JSVAL_TYPE_INT32;
4446 mic.u.name.typeConst = fe->isTypeKnown();
4447 mic.u.name.dataConst = fe->isConstant();
4449 if (!mic.u.name.dataConst) {
4450 dataReg = frame.ownRegForData(fe);
4451 if (!mic.u.name.typeConst)
4452 typeReg = frame.ownRegForType(fe);
4453 else
4454 typeTag = fe->getKnownType();
4455 } else {
4456 v = fe->getValue();
4459 masm.loadPtr(Address(objReg, offsetof(JSObject, slots)), objReg);
4460 Address address(objReg, slot);
4462 mic.load = masm.label();
4464 #if defined JS_CPU_ARM
4465 DataLabel32 offsetAddress;
4466 if (mic.u.name.dataConst) {
4467 offsetAddress = masm.moveWithPatch(Imm32(address.offset), JSC::ARMRegisters::S0);
4468 masm.add32(address.base, JSC::ARMRegisters::S0);
4469 masm.storeValue(v, Address(JSC::ARMRegisters::S0, 0));
4470 } else {
4471 if (mic.u.name.typeConst) {
4472 offsetAddress = masm.store64WithAddressOffsetPatch(ImmType(typeTag), dataReg, address);
4473 } else {
4474 offsetAddress = masm.store64WithAddressOffsetPatch(typeReg, dataReg, address);
4477 JS_ASSERT(masm.differenceBetween(mic.load, offsetAddress) == 0);
4478 #else
4479 if (mic.u.name.dataConst) {
4480 masm.storeValue(v, address);
4481 } else if (mic.u.name.typeConst) {
4482 masm.storeValueFromComponents(ImmType(typeTag), dataReg, address);
4483 } else {
4484 masm.storeValueFromComponents(typeReg, dataReg, address);
4486 #endif
4488 #if defined JS_PUNBOX64
4490 * Instructions on x86_64 can vary in size based on registers
4491 * used. Since we only need to patch the last instruction in
4492 * both paths above, remember the distance between the
4493 * load label and after the instruction to be patched.
4495 mic.patchValueOffset = masm.differenceBetween(mic.load, masm.label());
4496 JS_ASSERT(mic.patchValueOffset == masm.differenceBetween(mic.load, masm.label()));
4497 #endif
4499 frame.freeReg(objReg);
4500 frame.popn(2);
4501 if (mic.u.name.dataConst) {
4502 frame.push(v);
4503 } else {
4504 if (mic.u.name.typeConst)
4505 frame.pushTypedPayload(typeTag, dataReg);
4506 else
4507 frame.pushRegs(typeReg, dataReg);
4510 stubcc.rejoin(Changes(1));
4512 mics.append(mic);
4513 #else
4514 jsop_setgname_slow(index);
4515 #endif
4518 void
4519 mjit::Compiler::jsop_setelem_slow()
4521 prepareStubCall(Uses(3));
4522 stubCall(STRICT_VARIANT(stubs::SetElem));
4523 frame.popn(3);
4524 frame.pushSynced();
4527 void
4528 mjit::Compiler::jsop_getelem_slow()
4530 prepareStubCall(Uses(2));
4531 stubCall(stubs::GetElem);
4532 frame.popn(2);
4533 frame.pushSynced();
4536 void
4537 mjit::Compiler::jsop_unbrand()
4539 prepareStubCall(Uses(1));
4540 stubCall(stubs::Unbrand);
4543 bool
4544 mjit::Compiler::jsop_instanceof()
4546 FrameEntry *lhs = frame.peek(-2);
4547 FrameEntry *rhs = frame.peek(-1);
4549 // The fast path applies only when both operands are objects.
4550 if (rhs->isNotType(JSVAL_TYPE_OBJECT) || lhs->isNotType(JSVAL_TYPE_OBJECT)) {
4551 prepareStubCall(Uses(2));
4552 stubCall(stubs::InstanceOf);
4553 frame.popn(2);
4554 frame.takeReg(Registers::ReturnReg);
4555 frame.pushTypedPayload(JSVAL_TYPE_BOOLEAN, Registers::ReturnReg);
4556 return true;
4559 MaybeJump firstSlow;
4560 if (!rhs->isTypeKnown()) {
4561 Jump j = frame.testObject(Assembler::NotEqual, rhs);
4562 stubcc.linkExit(j, Uses(2));
4563 RegisterID reg = frame.tempRegForData(rhs);
4564 j = masm.testFunction(Assembler::NotEqual, reg);
4565 stubcc.linkExit(j, Uses(2));
4568 /* Test for bound functions. */
4569 RegisterID obj = frame.tempRegForData(rhs);
4570 Jump isBound = masm.branchTest32(Assembler::NonZero, Address(obj, offsetof(JSObject, flags)),
4571 Imm32(JSObject::BOUND_FUNCTION));
4573 stubcc.linkExit(isBound, Uses(2));
4574 stubcc.leave();
4575 stubcc.call(stubs::InstanceOf);
4576 firstSlow = stubcc.masm.jump();
4580 /* This is sadly necessary because the error case needs the object. */
4581 frame.dup();
4583 if (!jsop_getprop(cx->runtime->atomState.classPrototypeAtom, false))
4584 return false;
4586 /* Primitive prototypes are invalid. */
4587 rhs = frame.peek(-1);
4588 Jump j = frame.testPrimitive(Assembler::Equal, rhs);
4589 stubcc.linkExit(j, Uses(3));
4591 /* Allocate registers up front, because of branchiness. */
4592 obj = frame.copyDataIntoReg(lhs);
4593 RegisterID proto = frame.copyDataIntoReg(rhs);
4594 RegisterID temp = frame.allocReg();
4596 MaybeJump isFalse;
4597 if (!lhs->isTypeKnown())
4598 isFalse = frame.testPrimitive(Assembler::Equal, lhs);
4600 Address protoAddr(obj, offsetof(JSObject, proto));
4601 Label loop = masm.label();
4603 /* Walk prototype chain, break out on NULL or hit. */
4604 masm.loadPayload(protoAddr, obj);
4605 Jump isFalse2 = masm.branchTestPtr(Assembler::Zero, obj, obj);
4606 Jump isTrue = masm.branchPtr(Assembler::NotEqual, obj, proto);
4607 isTrue.linkTo(loop, &masm);
4608 masm.move(Imm32(1), temp);
4609 isTrue = masm.jump();
4611 if (isFalse.isSet())
4612 isFalse.getJump().linkTo(masm.label(), &masm);
4613 isFalse2.linkTo(masm.label(), &masm);
4614 masm.move(Imm32(0), temp);
4615 isTrue.linkTo(masm.label(), &masm);
4617 frame.freeReg(proto);
4618 frame.freeReg(obj);
4620 stubcc.leave();
4621 stubcc.call(stubs::FastInstanceOf);
4623 frame.popn(3);
4624 frame.pushTypedPayload(JSVAL_TYPE_BOOLEAN, temp);
4626 if (firstSlow.isSet())
4627 firstSlow.getJump().linkTo(stubcc.masm.label(), &stubcc.masm);
4628 stubcc.rejoin(Changes(1));
4629 return true;
4632 void
4633 mjit::Compiler::emitEval(uint32 argc)
4635 /* Check for interrupts on function call */
4636 interruptCheckHelper();
4638 frame.syncAndKill(Registers(Registers::AvailRegs), Uses(argc + 2));
4639 prepareStubCall(Uses(argc + 2));
4640 masm.move(Imm32(argc), Registers::ArgReg1);
4641 stubCall(stubs::Eval);
4642 frame.popn(argc + 2);
4643 frame.pushSynced();
4646 void
4647 mjit::Compiler::jsop_arguments()
4649 prepareStubCall(Uses(0));
4650 stubCall(stubs::Arguments);
4654 * Note: This function emits tracer hooks into the OOL path. This means if
4655 * it is used in the middle of an in-progress slow path, the stream will be
4656 * hopelessly corrupted. Take care to only call this before linkExits() and
4657 * after rejoin()s.
4659 bool
4660 mjit::Compiler::jumpAndTrace(Jump j, jsbytecode *target, Jump *slow)
4662 // XXX refactor this little bit
4663 #ifndef JS_TRACER
4664 if (!jumpInScript(j, target))
4665 return false;
4667 if (slow) {
4668 if (!stubcc.jumpInScript(*slow, target))
4669 return false;
4671 #else
4672 if (!addTraceHints || target >= PC || JSOp(*target) != JSOP_TRACE
4673 #ifdef JS_MONOIC
4674 || GET_UINT16(target) == BAD_TRACEIC_INDEX
4675 #endif
4678 if (!jumpInScript(j, target))
4679 return false;
4680 if (slow) {
4681 if (!stubcc.jumpInScript(*slow, target))
4682 stubcc.jumpInScript(*slow, target);
4684 return true;
4687 # if JS_MONOIC
4688 TraceGenInfo ic;
4690 ic.initialized = true;
4691 ic.stubEntry = stubcc.masm.label();
4692 ic.jumpTarget = target;
4693 ic.traceHint = j;
4694 if (slow)
4695 ic.slowTraceHint = *slow;
4697 uint16 index = GET_UINT16(target);
4698 if (traceICs.length() <= index)
4699 if (!traceICs.resize(index+1))
4700 return false;
4701 # endif
4703 Label traceStart = stubcc.masm.label();
4705 stubcc.linkExitDirect(j, traceStart);
4706 if (slow)
4707 slow->linkTo(traceStart, &stubcc.masm);
4708 # if JS_MONOIC
4709 ic.addrLabel = stubcc.masm.moveWithPatch(ImmPtr(NULL), Registers::ArgReg1);
4710 traceICs[index] = ic;
4711 # endif
4713 /* Save and restore compiler-tracked PC, so cx->regs is right in InvokeTracer. */
4715 jsbytecode* pc = PC;
4716 PC = target;
4718 stubcc.call(stubs::InvokeTracer);
4720 PC = pc;
4723 Jump no = stubcc.masm.branchTestPtr(Assembler::Zero, Registers::ReturnReg,
4724 Registers::ReturnReg);
4725 restoreFrameRegs(stubcc.masm);
4726 stubcc.masm.jump(Registers::ReturnReg);
4727 no.linkTo(stubcc.masm.label(), &stubcc.masm);
4728 if (!stubcc.jumpInScript(stubcc.masm.jump(), target))
4729 return false;
4730 #endif
4731 return true;
4734 void
4735 mjit::Compiler::enterBlock(JSObject *obj)
4737 // If this is an exception entry point, then jsl_InternalThrow has set
4738 // VMFrame::fp to the correct fp for the entry point. We need to copy
4739 // that value here to FpReg so that FpReg also has the correct sp.
4740 // Otherwise, we would simply be using a stale FpReg value.
4741 if (analysis->getCode(PC).exceptionEntry)
4742 restoreFrameRegs(masm);
4744 uint32 oldFrameDepth = frame.frameDepth();
4746 /* For now, don't bother doing anything for this opcode. */
4747 frame.syncAndForgetEverything();
4748 masm.move(ImmPtr(obj), Registers::ArgReg1);
4749 uint32 n = js_GetEnterBlockStackDefs(cx, script, PC);
4750 stubCall(stubs::EnterBlock);
4751 frame.enterBlock(n);
4753 uintN base = JSSLOT_FREE(&js_BlockClass);
4754 uintN count = OBJ_BLOCK_COUNT(cx, obj);
4755 uintN limit = base + count;
4756 for (uintN slot = base, i = 0; slot < limit; slot++, i++) {
4757 const Value &v = obj->getSlotRef(slot);
4758 if (v.isBoolean() && v.toBoolean())
4759 frame.setClosedVar(oldFrameDepth + i);
4763 void
4764 mjit::Compiler::leaveBlock()
4767 * Note: After bug 535912, we can pass the block obj directly, inline
4768 * PutBlockObject, and do away with the muckiness in PutBlockObject.
4770 uint32 n = js_GetVariableStackUses(JSOP_LEAVEBLOCK, PC);
4771 JSObject *obj = script->getObject(fullAtomIndex(PC + UINT16_LEN));
4772 prepareStubCall(Uses(n));
4773 masm.move(ImmPtr(obj), Registers::ArgReg1);
4774 stubCall(stubs::LeaveBlock);
4775 frame.leaveBlock(n);
4778 // Creates the new object expected for constructors, and places it in |thisv|.
4779 // It is broken down into the following operations:
4780 // CALLEE
4781 // GETPROP "prototype"
4782 // IFPRIMTOP:
4783 // NULL
4784 // call js_CreateThisFromFunctionWithProto(...)
4786 bool
4787 mjit::Compiler::constructThis()
4789 JS_ASSERT(isConstructing);
4791 // Load the callee.
4792 Address callee(JSFrameReg, JSStackFrame::offsetOfCallee(fun));
4793 RegisterID calleeReg = frame.allocReg();
4794 masm.loadPayload(callee, calleeReg);
4795 frame.pushTypedPayload(JSVAL_TYPE_OBJECT, calleeReg);
4797 // Get callee.prototype.
4798 if (!jsop_getprop(cx->runtime->atomState.classPrototypeAtom, false, false))
4799 return false;
4801 // Reach into the proto Value and grab a register for its data.
4802 FrameEntry *protoFe = frame.peek(-1);
4803 RegisterID protoReg = frame.ownRegForData(protoFe);
4805 // Now, get the type. If it's not an object, set protoReg to NULL.
4806 Jump isNotObject = frame.testObject(Assembler::NotEqual, protoFe);
4807 stubcc.linkExitDirect(isNotObject, stubcc.masm.label());
4808 stubcc.masm.move(ImmPtr(NULL), protoReg);
4809 stubcc.crossJump(stubcc.masm.jump(), masm.label());
4811 // Done with the protoFe.
4812 frame.pop();
4814 prepareStubCall(Uses(0));
4815 if (protoReg != Registers::ArgReg1)
4816 masm.move(protoReg, Registers::ArgReg1);
4817 stubCall(stubs::CreateThis);
4818 frame.freeReg(protoReg);
4819 return true;
4822 void
4823 mjit::Compiler::jsop_callelem_slow()
4825 prepareStubCall(Uses(2));
4826 stubCall(stubs::CallElem);
4827 frame.popn(2);
4828 frame.pushSynced();
4829 frame.pushSynced();