IC for JSOP_CALLELEM, re-landed (bug 604031, r=dmandelin).
[mozilla-central.git] / js / src / methodjit / PolyIC.cpp
blob8117dd847d724e8d9a7594c85a368bd43306f01a
1 /* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=4 sw=4 et tw=99:
4 * ***** BEGIN LICENSE BLOCK *****
5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
7 * The contents of this file are subject to the Mozilla Public License Version
8 * 1.1 (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
10 * http://www.mozilla.org/MPL/
12 * Software distributed under the License is distributed on an "AS IS" basis,
13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 * for the specific language governing rights and limitations under the
15 * License.
17 * The Original Code is Mozilla SpiderMonkey JavaScript 1.9 code, released
18 * May 28, 2008.
20 * The Initial Developer of the Original Code is
21 * Brendan Eich <brendan@mozilla.org>
23 * Contributor(s):
24 * David Mandelin <dmandelin@mozilla.com>
26 * Alternatively, the contents of this file may be used under the terms of
27 * either of the GNU General Public License Version 2 or later (the "GPL"),
28 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
29 * in which case the provisions of the GPL or the LGPL are applicable instead
30 * of those above. If you wish to allow use of your version of this file only
31 * under the terms of either the GPL or the LGPL, and not to allow others to
32 * use your version of this file under the terms of the MPL, indicate your
33 * decision by deleting the provisions above and replace them with the notice
34 * and other provisions required by the GPL or the LGPL. If you do not delete
35 * the provisions above, a recipient may use your version of this file under
36 * the terms of any one of the MPL, the GPL or the LGPL.
38 * ***** END LICENSE BLOCK ***** */
39 #include "PolyIC.h"
40 #include "StubCalls.h"
41 #include "CodeGenIncludes.h"
42 #include "StubCalls-inl.h"
43 #include "BaseCompiler.h"
44 #include "assembler/assembler/LinkBuffer.h"
45 #include "assembler/assembler/RepatchBuffer.h"
46 #include "jsscope.h"
47 #include "jsnum.h"
48 #include "jsatominlines.h"
49 #include "jsobjinlines.h"
50 #include "jsscopeinlines.h"
51 #include "jspropertycache.h"
52 #include "jspropertycacheinlines.h"
53 #include "jsinterpinlines.h"
54 #include "jsautooplen.h"
56 #if defined JS_POLYIC
58 using namespace js;
59 using namespace js::mjit;
60 using namespace js::mjit::ic;
62 typedef JSC::RepatchBuffer RepatchBuffer;
63 typedef JSC::FunctionPtr FunctionPtr;
65 /* Rough over-estimate of how much memory we need to unprotect. */
66 static const uint32 INLINE_PATH_LENGTH = 64;
68 // Helper class to simplify LinkBuffer usage in PIC stub generators.
69 // This guarantees correct OOM and refcount handling for buffers while they
70 // are instantiated and rooted.
71 class PICLinker : public LinkerHelper
73 ic::BasePolyIC &ic;
75 public:
76 PICLinker(JSContext *cx, ic::BasePolyIC &ic)
77 : LinkerHelper(cx), ic(ic)
78 { }
80 bool init(Assembler &masm) {
81 JSC::ExecutablePool *pool = LinkerHelper::init(masm);
82 if (!pool)
83 return false;
84 if (!ic.execPools.append(pool)) {
85 pool->release();
86 js_ReportOutOfMemory(cx);
87 return false;
89 return true;
93 class PICStubCompiler : public BaseCompiler
95 protected:
96 const char *type;
97 VMFrame &f;
98 JSScript *script;
99 ic::PICInfo &pic;
100 void *stub;
102 public:
103 PICStubCompiler(const char *type, VMFrame &f, JSScript *script, ic::PICInfo &pic, void *stub)
104 : BaseCompiler(f.cx), type(type), f(f), script(script), pic(pic), stub(stub)
107 bool isCallOp() const {
108 if (pic.kind == ic::PICInfo::CALL)
109 return true;
110 return !!(js_CodeSpec[pic.op].format & JOF_CALLOP);
113 LookupStatus error() {
114 disable("error");
115 return Lookup_Error;
118 LookupStatus error(JSContext *cx) {
119 return error();
122 LookupStatus disable(const char *reason) {
123 return disable(f.cx, reason);
126 LookupStatus disable(JSContext *cx, const char *reason) {
127 return pic.disable(cx, reason, stub);
130 protected:
131 void spew(const char *event, const char *op) {
132 #ifdef JS_METHODJIT_SPEW
133 JaegerSpew(JSpew_PICs, "%s %s: %s (%s: %d)\n",
134 type, event, op, script->filename,
135 js_FramePCToLineNumber(cx, f.fp()));
136 #endif
140 class PICRepatchBuffer : public JSC::RepatchBuffer
142 ic::BaseIC &ic;
143 JSC::CodeLocationLabel label;
145 public:
146 PICRepatchBuffer(ic::BaseIC &ic, JSC::CodeLocationLabel path)
147 : JSC::RepatchBuffer(path.executableAddress(), INLINE_PATH_LENGTH),
148 ic(ic), label(path)
151 void relink(int32 offset, JSC::CodeLocationLabel target) {
152 JSC::RepatchBuffer::relink(label.jumpAtOffset(offset), target);
156 class SetPropCompiler : public PICStubCompiler
158 JSObject *obj;
159 JSAtom *atom;
160 int lastStubSecondShapeGuard;
162 static int32 dslotsLoadOffset(ic::PICInfo &pic) {
163 #if defined JS_NUNBOX32
164 if (pic.u.vr.isConstant())
165 return SETPROP_DSLOTS_BEFORE_CONSTANT;
166 if (pic.u.vr.isTypeKnown())
167 return SETPROP_DSLOTS_BEFORE_KTYPE;
168 return SETPROP_DSLOTS_BEFORE_DYNAMIC;
169 #elif defined JS_PUNBOX64
170 return pic.labels.setprop.dslotsLoadOffset;
171 #endif
174 #if defined JS_NUNBOX32
175 inline int32 inlineTypeOffset() {
176 if (pic.u.vr.isConstant())
177 return SETPROP_INLINE_STORE_CONST_TYPE;
178 if (pic.u.vr.isTypeKnown())
179 return SETPROP_INLINE_STORE_KTYPE_TYPE;
180 return SETPROP_INLINE_STORE_DYN_TYPE;
182 #endif
184 #if defined JS_NUNBOX32
185 inline int32 inlineDataOffset() {
186 if (pic.u.vr.isConstant())
187 return SETPROP_INLINE_STORE_CONST_DATA;
188 if (pic.u.vr.isTypeKnown())
189 return SETPROP_INLINE_STORE_KTYPE_DATA;
190 return SETPROP_INLINE_STORE_DYN_DATA;
192 #endif
194 static int32 inlineShapeOffset(ic::PICInfo &pic) {
195 #if defined JS_NUNBOX32
196 return SETPROP_INLINE_SHAPE_OFFSET;
197 #elif defined JS_PUNBOX64
198 return pic.labels.setprop.inlineShapeOffset;
199 #endif
202 static int32 inlineShapeJump(ic::PICInfo &pic) {
203 #if defined JS_NUNBOX32
204 return SETPROP_INLINE_SHAPE_JUMP;
205 #elif defined JS_PUNBOX64
206 return inlineShapeOffset(pic) + SETPROP_INLINE_SHAPE_JUMP;
207 #endif
210 inline int32 dslotsLoadOffset() {
211 return dslotsLoadOffset(pic);
214 inline int32 inlineShapeOffset() {
215 return inlineShapeOffset(pic);
218 inline int32 inlineShapeJump() {
219 return inlineShapeJump(pic);
222 public:
223 SetPropCompiler(VMFrame &f, JSScript *script, JSObject *obj, ic::PICInfo &pic, JSAtom *atom,
224 VoidStubPIC stub)
225 : PICStubCompiler("setprop", f, script, pic, JS_FUNC_TO_DATA_PTR(void *, stub)),
226 obj(obj), atom(atom), lastStubSecondShapeGuard(pic.secondShapeGuard)
229 static void reset(ic::PICInfo &pic)
231 RepatchBuffer repatcher(pic.fastPathStart.executableAddress(), INLINE_PATH_LENGTH);
232 repatcher.repatchLEAToLoadPtr(pic.fastPathRejoin.instructionAtOffset(dslotsLoadOffset(pic)));
233 repatcher.repatch(pic.fastPathStart.dataLabel32AtOffset(
234 pic.shapeGuard + inlineShapeOffset(pic)),
235 int32(JSObjectMap::INVALID_SHAPE));
236 repatcher.relink(pic.fastPathStart.jumpAtOffset(
237 pic.shapeGuard + inlineShapeJump(pic)),
238 pic.slowPathStart);
240 RepatchBuffer repatcher2(pic.slowPathStart.executableAddress(), INLINE_PATH_LENGTH);
241 FunctionPtr target(JS_FUNC_TO_DATA_PTR(void *, ic::SetProp));
242 repatcher.relink(pic.slowPathCall, target);
245 LookupStatus patchInline(const Shape *shape, bool inlineSlot)
247 JS_ASSERT(!pic.inlinePathPatched);
248 JaegerSpew(JSpew_PICs, "patch setprop inline at %p\n", pic.fastPathStart.executableAddress());
250 PICRepatchBuffer repatcher(pic, pic.fastPathStart);
252 int32 offset;
253 if (inlineSlot) {
254 JSC::CodeLocationInstruction istr;
255 istr = pic.fastPathRejoin.instructionAtOffset(dslotsLoadOffset());
256 repatcher.repatchLoadPtrToLEA(istr);
259 // We've patched | mov dslots, [obj + DSLOTS_OFFSET]
260 // To: | lea fslots, [obj + DSLOTS_OFFSET]
262 // Because the offset is wrong, it's necessary to correct it
263 // below.
265 int32 diff = int32(JSObject::getFixedSlotOffset(0)) -
266 int32(offsetof(JSObject, slots));
267 JS_ASSERT(diff != 0);
268 offset = (int32(shape->slot) * sizeof(Value)) + diff;
269 } else {
270 offset = shape->slot * sizeof(Value);
273 uint32 shapeOffs = pic.shapeGuard + inlineShapeOffset();
274 repatcher.repatch(pic.fastPathStart.dataLabel32AtOffset(shapeOffs), obj->shape());
275 #if defined JS_NUNBOX32
276 repatcher.repatch(pic.fastPathRejoin.dataLabel32AtOffset(inlineTypeOffset()), offset + 4);
277 repatcher.repatch(pic.fastPathRejoin.dataLabel32AtOffset(inlineDataOffset()), offset);
278 #elif defined JS_PUNBOX64
279 repatcher.repatch(pic.fastPathRejoin.dataLabel32AtOffset(SETPROP_INLINE_STORE_VALUE), offset);
280 #endif
282 pic.inlinePathPatched = true;
284 return Lookup_Cacheable;
287 void patchPreviousToHere(PICRepatchBuffer &repatcher, CodeLocationLabel cs)
289 // Patch either the inline fast path or a generated stub. The stub
290 // omits the prefix of the inline fast path that loads the shape, so
291 // the offsets are different.
292 int shapeGuardJumpOffset;
293 if (pic.stubsGenerated)
294 #if defined JS_NUNBOX32
295 shapeGuardJumpOffset = SETPROP_STUB_SHAPE_JUMP;
296 #elif defined JS_PUNBOX64
297 shapeGuardJumpOffset = pic.labels.setprop.stubShapeJump;
298 #endif
299 else
300 shapeGuardJumpOffset = pic.shapeGuard + inlineShapeJump();
301 repatcher.relink(shapeGuardJumpOffset, cs);
302 if (lastStubSecondShapeGuard)
303 repatcher.relink(lastStubSecondShapeGuard, cs);
306 LookupStatus generateStub(uint32 initialShape, const Shape *shape, bool adding, bool inlineSlot)
308 /* Exits to the slow path. */
309 Vector<Jump, 8> slowExits(cx);
310 Vector<Jump, 8> otherGuards(cx);
312 Assembler masm;
314 // Shape guard.
315 if (pic.shapeNeedsRemat()) {
316 masm.loadShape(pic.objReg, pic.shapeReg);
317 pic.shapeRegHasBaseShape = true;
320 Label start = masm.label();
321 Jump shapeGuard = masm.branch32_force32(Assembler::NotEqual, pic.shapeReg,
322 Imm32(initialShape));
324 #if defined JS_NUNBOX32
325 DBGLABEL(dbgStubShapeJump);
326 JS_ASSERT(masm.differenceBetween(start, dbgStubShapeJump) == SETPROP_STUB_SHAPE_JUMP);
327 #elif defined JS_PUNBOX64
328 Label stubShapeJumpLabel = masm.label();
329 #endif
331 JS_ASSERT_IF(!shape->hasDefaultSetter(), obj->getClass() == &js_CallClass);
333 MaybeJump skipOver;
335 if (adding) {
336 JS_ASSERT(shape->hasSlot());
337 pic.shapeRegHasBaseShape = false;
339 /* Emit shape guards for the object's prototype chain. */
340 JSObject *proto = obj->getProto();
341 RegisterID lastReg = pic.objReg;
342 while (proto) {
343 masm.loadPtr(Address(lastReg, offsetof(JSObject, proto)), pic.shapeReg);
344 Jump protoGuard = masm.guardShape(pic.shapeReg, proto);
345 if (!otherGuards.append(protoGuard))
346 return error();
348 proto = proto->getProto();
349 lastReg = pic.shapeReg;
352 if (pic.kind == ic::PICInfo::SETMETHOD) {
354 * Guard that the value is equal to the shape's method.
355 * We already know it is a function, so test the payload.
357 JS_ASSERT(shape->isMethod());
358 JSObject *funobj = &shape->methodObject();
359 if (pic.u.vr.isConstant()) {
360 JS_ASSERT(funobj == &pic.u.vr.value().toObject());
361 } else {
362 Jump mismatchedFunction =
363 masm.branchPtr(Assembler::NotEqual, pic.u.vr.dataReg(), ImmPtr(funobj));
364 if (!slowExits.append(mismatchedFunction))
365 return error();
369 if (inlineSlot) {
370 Address address(pic.objReg,
371 JSObject::getFixedSlotOffset(shape->slot));
372 masm.storeValue(pic.u.vr, address);
373 } else {
374 /* Check capacity. */
375 Address capacity(pic.objReg, offsetof(JSObject, capacity));
376 masm.load32(masm.payloadOf(capacity), pic.shapeReg);
377 Jump overCapacity = masm.branch32(Assembler::LessThanOrEqual, pic.shapeReg,
378 Imm32(shape->slot));
379 if (!slowExits.append(overCapacity))
380 return error();
382 masm.loadPtr(Address(pic.objReg, offsetof(JSObject, slots)), pic.shapeReg);
383 Address address(pic.shapeReg, shape->slot * sizeof(Value));
384 masm.storeValue(pic.u.vr, address);
387 uint32 newShape = obj->shape();
388 JS_ASSERT(newShape != initialShape);
390 /* Write the object's new shape. */
391 masm.storePtr(ImmPtr(shape), Address(pic.objReg, offsetof(JSObject, lastProp)));
392 masm.store32(Imm32(newShape), Address(pic.objReg, offsetof(JSObject, objShape)));
394 /* If this is a method shape, update the object's flags. */
395 if (shape->isMethod()) {
396 Address flags(pic.objReg, offsetof(JSObject, flags));
398 /* Use shapeReg to load, bitwise-or, and store flags. */
399 masm.load32(flags, pic.shapeReg);
400 masm.or32(Imm32(JSObject::METHOD_BARRIER), pic.shapeReg);
401 masm.store32(pic.shapeReg, flags);
403 } else if (shape->hasDefaultSetter()) {
404 Address address(pic.objReg, JSObject::getFixedSlotOffset(shape->slot));
405 if (!inlineSlot) {
406 masm.loadPtr(Address(pic.objReg, offsetof(JSObject, slots)), pic.objReg);
407 address = Address(pic.objReg, shape->slot * sizeof(Value));
410 // If the scope is branded, or has a method barrier. It's now necessary
411 // to guard that we're not overwriting a function-valued property.
412 if (obj->brandedOrHasMethodBarrier()) {
413 masm.loadTypeTag(address, pic.shapeReg);
414 Jump skip = masm.testObject(Assembler::NotEqual, pic.shapeReg);
415 masm.loadPayload(address, pic.shapeReg);
416 Jump rebrand = masm.testFunction(Assembler::Equal, pic.shapeReg);
417 if (!slowExits.append(rebrand))
418 return error();
419 skip.linkTo(masm.label(), &masm);
420 pic.shapeRegHasBaseShape = false;
423 masm.storeValue(pic.u.vr, address);
424 } else {
425 // \ / In general, two function objects with different JSFunctions
426 // # can have the same shape, thus we must not rely on the identity
427 // >--+--< of 'fun' remaining the same. However, since:
428 // ||| 1. the shape includes all arguments and locals and their setters
429 // \\ V and getters, and
430 // \===/ 2. arguments and locals have different getters
431 // then we can rely on fun->nargs remaining invariant.
432 JSFunction *fun = obj->getCallObjCalleeFunction();
433 uint16 slot = uint16(shape->shortid);
435 /* Guard that the call object has a frame. */
436 masm.loadFunctionPrivate(pic.objReg, pic.shapeReg);
437 Jump escapedFrame = masm.branchTestPtr(Assembler::Zero, pic.shapeReg, pic.shapeReg);
440 Address addr(pic.shapeReg, shape->setterOp() == SetCallArg
441 ? JSStackFrame::offsetOfFormalArg(fun, slot)
442 : JSStackFrame::offsetOfFixed(slot));
443 masm.storeValue(pic.u.vr, addr);
444 skipOver = masm.jump();
447 escapedFrame.linkTo(masm.label(), &masm);
449 if (shape->setterOp() == SetCallVar)
450 slot += fun->nargs;
451 masm.loadPtr(Address(pic.objReg, offsetof(JSObject, slots)), pic.objReg);
453 Address dslot(pic.objReg, (slot + JSObject::CALL_RESERVED_SLOTS) * sizeof(Value));
454 masm.storeValue(pic.u.vr, dslot);
457 pic.shapeRegHasBaseShape = false;
459 Jump done = masm.jump();
461 // Common all secondary guards into one big exit.
462 MaybeJump slowExit;
463 if (otherGuards.length()) {
464 for (Jump *pj = otherGuards.begin(); pj != otherGuards.end(); ++pj)
465 pj->linkTo(masm.label(), &masm);
466 slowExit = masm.jump();
467 pic.secondShapeGuard = masm.distanceOf(masm.label()) - masm.distanceOf(start);
468 } else {
469 pic.secondShapeGuard = 0;
472 PICLinker buffer(cx, pic);
473 if (!buffer.init(masm))
474 return error();
476 buffer.link(shapeGuard, pic.slowPathStart);
477 if (slowExit.isSet())
478 buffer.link(slowExit.get(), pic.slowPathStart);
479 for (Jump *pj = slowExits.begin(); pj != slowExits.end(); ++pj)
480 buffer.link(*pj, pic.slowPathStart);
481 buffer.link(done, pic.fastPathRejoin);
482 if (skipOver.isSet())
483 buffer.link(skipOver.get(), pic.fastPathRejoin);
484 CodeLocationLabel cs = buffer.finalizeCodeAddendum();
485 JaegerSpew(JSpew_PICs, "generate setprop stub %p %d %d at %p\n",
486 (void*)&pic,
487 initialShape,
488 pic.stubsGenerated,
489 cs.executableAddress());
491 PICRepatchBuffer repatcher(pic, pic.lastPathStart());
493 // This function can patch either the inline fast path for a generated
494 // stub. The stub omits the prefix of the inline fast path that loads
495 // the shape, so the offsets are different.
496 patchPreviousToHere(repatcher, cs);
498 pic.stubsGenerated++;
499 pic.lastStubStart = buffer.locationOf(start);
501 #if defined JS_PUNBOX64
502 pic.labels.setprop.stubShapeJump = masm.differenceBetween(start, stubShapeJumpLabel);
503 JS_ASSERT(pic.labels.setprop.stubShapeJump == masm.differenceBetween(start, stubShapeJumpLabel));
504 #endif
506 if (pic.stubsGenerated == MAX_PIC_STUBS)
507 disable("max stubs reached");
509 return Lookup_Cacheable;
512 LookupStatus update()
514 JS_ASSERT(pic.hit);
516 if (obj->isDenseArray())
517 return disable("dense array");
518 if (!obj->isNative())
519 return disable("non-native");
521 Class *clasp = obj->getClass();
523 if (clasp->setProperty != PropertyStub)
524 return disable("set property hook");
525 if (clasp->ops.lookupProperty)
526 return disable("ops lookup property hook");
527 if (clasp->ops.setProperty)
528 return disable("ops set property hook");
530 jsid id = ATOM_TO_JSID(atom);
532 JSObject *holder;
533 JSProperty *prop = NULL;
534 if (!obj->lookupProperty(cx, id, &holder, &prop))
535 return error();
537 /* If the property exists but is on a prototype, treat as addprop. */
538 if (prop && holder != obj) {
539 const Shape *shape = (const Shape *) prop;
541 if (!holder->isNative())
542 return disable("non-native holder");
544 if (!shape->writable())
545 return disable("readonly");
546 if (!shape->hasDefaultSetter() || !shape->hasDefaultGetter())
547 return disable("getter/setter in prototype");
548 if (shape->hasShortID())
549 return disable("short ID in prototype");
550 if (!shape->hasSlot())
551 return disable("missing slot");
553 prop = NULL;
556 if (!prop) {
557 /* Adding a property to the object. */
558 if (obj->isDelegate())
559 return disable("delegate");
560 if (!obj->isExtensible())
561 return disable("not extensible");
563 if (clasp->addProperty != PropertyStub)
564 return disable("add property hook");
565 if (clasp->ops.defineProperty)
566 return disable("ops define property hook");
568 uint32 index;
569 if (js_IdIsIndex(id, &index))
570 return disable("index");
572 uint32 initialShape = obj->shape();
574 if (!obj->ensureClassReservedSlots(cx))
575 return error();
577 uint32 slots = obj->numSlots();
578 uintN flags = 0;
579 PropertyOp getter = clasp->getProperty;
581 if (pic.kind == ic::PICInfo::SETMETHOD) {
582 if (!obj->canHaveMethodBarrier())
583 return disable("can't have method barrier");
585 JSObject *funobj = &f.regs.sp[-1].toObject();
586 if (funobj != GET_FUNCTION_PRIVATE(cx, funobj))
587 return disable("mismatched function");
589 flags |= Shape::METHOD;
590 getter = CastAsPropertyOp(funobj);
593 const Shape *shape =
594 obj->putProperty(cx, id, getter, clasp->setProperty,
595 SHAPE_INVALID_SLOT, JSPROP_ENUMERATE, flags, 0);
597 if (!shape)
598 return error();
601 * Test after calling putProperty since it can switch obj into
602 * dictionary mode, specifically if the shape tree ancestor line
603 * exceeds PropertyTree::MAX_HEIGHT.
605 if (obj->inDictionaryMode())
606 return disable("dictionary");
608 if (!shape->hasDefaultSetter())
609 return disable("adding non-default setter");
610 if (!shape->hasSlot())
611 return disable("adding invalid slot");
614 * Watch for cases where the object reallocated its slots when
615 * adding the property, and disable the PIC. Otherwise we will
616 * keep generating identical PICs as side exits are taken on the
617 * capacity checks. Alternatively, we could avoid the disable
618 * and just not generate a stub in case there are multiple shapes
619 * that can flow here which don't all require reallocation.
620 * Doing this would cause us to walk down this same update path
621 * every time a reallocation is needed, however, which will
622 * usually be a slowdown even if there *are* other shapes that
623 * don't realloc.
625 if (obj->numSlots() != slots)
626 return disable("insufficient slot capacity");
628 return generateStub(initialShape, shape, true, !obj->hasSlotsArray());
631 const Shape *shape = (const Shape *) prop;
632 if (pic.kind == ic::PICInfo::SETMETHOD && !shape->isMethod())
633 return disable("set method on non-method shape");
634 if (!shape->writable())
635 return disable("readonly");
637 if (shape->hasDefaultSetter()) {
638 if (!shape->hasSlot())
639 return disable("invalid slot");
640 } else {
641 if (shape->hasSetterValue())
642 return disable("scripted setter");
643 if (shape->setterOp() != SetCallArg &&
644 shape->setterOp() != SetCallVar) {
645 return disable("setter");
649 JS_ASSERT(obj == holder);
650 if (!pic.inlinePathPatched &&
651 !obj->brandedOrHasMethodBarrier() &&
652 shape->hasDefaultSetter() &&
653 !obj->isDenseArray()) {
654 return patchInline(shape, !obj->hasSlotsArray());
657 return generateStub(obj->shape(), shape, false, !obj->hasSlotsArray());
661 static bool
662 IsCacheableProtoChain(JSObject *obj, JSObject *holder)
664 while (obj != holder) {
665 JSObject *proto = obj->getProto();
666 if (!proto->isNative())
667 return false;
668 obj = proto;
670 return true;
673 template <typename IC>
674 struct GetPropertyHelper {
675 JSContext *cx;
676 JSObject *obj;
677 JSAtom *atom;
678 IC &ic;
680 JSObject *aobj;
681 JSObject *holder;
682 const Shape *shape;
684 GetPropertyHelper(JSContext *cx, JSObject *obj, JSAtom *atom, IC &ic)
685 : cx(cx), obj(obj), atom(atom), ic(ic), holder(NULL), shape(NULL)
688 public:
689 LookupStatus bind() {
690 JSProperty *prop;
691 if (!js_FindProperty(cx, ATOM_TO_JSID(atom), &obj, &holder, &prop))
692 return ic.error(cx);
693 if (!prop)
694 return ic.disable(cx, "lookup failed");
695 shape = (const Shape *)prop;
696 return Lookup_Cacheable;
699 LookupStatus lookup() {
700 JSObject *aobj = js_GetProtoIfDenseArray(obj);
701 if (!aobj->isNative())
702 return ic.disable(cx, "non-native");
703 JSProperty *prop;
704 if (!aobj->lookupProperty(cx, ATOM_TO_JSID(atom), &holder, &prop))
705 return ic.error(cx);
706 if (!prop)
707 return ic.disable(cx, "lookup failed");
708 if (!IsCacheableProtoChain(obj, holder))
709 return ic.disable(cx, "non-native holder");
710 shape = (const Shape *)prop;
711 return Lookup_Cacheable;
714 LookupStatus testForGet() {
715 if (!shape->hasDefaultGetter()) {
716 if (!shape->isMethod())
717 return ic.disable(cx, "getter");
718 if (!ic.isCallOp())
719 return ic.disable(cx, "method valued shape");
720 } else if (!shape->hasSlot()) {
721 return ic.disable(cx, "no slot");
724 return Lookup_Cacheable;
727 LookupStatus lookupAndTest() {
728 LookupStatus status = lookup();
729 if (status != Lookup_Cacheable)
730 return status;
731 return testForGet();
735 class GetPropCompiler : public PICStubCompiler
737 JSObject *obj;
738 JSAtom *atom;
739 int lastStubSecondShapeGuard;
741 static int32 inlineShapeOffset(ic::PICInfo &pic) {
742 #if defined JS_NUNBOX32
743 return GETPROP_INLINE_SHAPE_OFFSET;
744 #elif defined JS_PUNBOX64
745 return pic.labels.getprop.inlineShapeOffset;
746 #endif
749 inline int32 inlineShapeOffset() {
750 return inlineShapeOffset(pic);
753 static int32 inlineShapeJump(ic::PICInfo &pic) {
754 #if defined JS_NUNBOX32
755 return GETPROP_INLINE_SHAPE_JUMP;
756 #elif defined JS_PUNBOX64
757 return inlineShapeOffset(pic) + GETPROP_INLINE_SHAPE_JUMP;
758 #endif
761 inline int32 inlineShapeJump() {
762 return inlineShapeJump(pic);
765 static int32 dslotsLoad(ic::PICInfo &pic) {
766 #if defined JS_NUNBOX32
767 return GETPROP_DSLOTS_LOAD;
768 #elif defined JS_PUNBOX64
769 return pic.labels.getprop.dslotsLoadOffset;
770 #endif
773 inline int32 dslotsLoad() {
774 return dslotsLoad(pic);
777 public:
778 GetPropCompiler(VMFrame &f, JSScript *script, JSObject *obj, ic::PICInfo &pic, JSAtom *atom,
779 VoidStubPIC stub)
780 : PICStubCompiler(pic.kind == ic::PICInfo::CALL ? "callprop" : "getprop", f, script, pic,
781 JS_FUNC_TO_DATA_PTR(void *, stub)),
782 obj(obj),
783 atom(atom),
784 lastStubSecondShapeGuard(pic.secondShapeGuard)
787 static void reset(ic::PICInfo &pic)
789 RepatchBuffer repatcher(pic.fastPathStart.executableAddress(), INLINE_PATH_LENGTH);
790 repatcher.repatchLEAToLoadPtr(pic.fastPathRejoin.instructionAtOffset(dslotsLoad(pic)));
791 repatcher.repatch(pic.fastPathStart.dataLabel32AtOffset(
792 pic.shapeGuard + inlineShapeOffset(pic)),
793 int32(JSObjectMap::INVALID_SHAPE));
794 repatcher.relink(pic.fastPathStart.jumpAtOffset(pic.shapeGuard + inlineShapeJump(pic)),
795 pic.slowPathStart);
797 if (pic.hasTypeCheck()) {
798 repatcher.relink(pic.fastPathStart.jumpAtOffset(GETPROP_INLINE_TYPE_GUARD),
799 pic.slowPathStart.labelAtOffset(pic.u.get.typeCheckOffset));
802 RepatchBuffer repatcher2(pic.slowPathStart.executableAddress(), INLINE_PATH_LENGTH);
804 VoidStubPIC stub;
805 switch (pic.kind) {
806 case ic::PICInfo::GET:
807 stub = ic::GetProp;
808 break;
809 case ic::PICInfo::CALL:
810 stub = ic::CallProp;
811 break;
812 default:
813 JS_NOT_REACHED("invalid pic kind for GetPropCompiler::reset");
814 return;
817 FunctionPtr target(JS_FUNC_TO_DATA_PTR(void *, stub));
818 repatcher.relink(pic.slowPathCall, target);
821 LookupStatus generateArgsLengthStub()
823 Assembler masm;
825 Jump notArgs = masm.testObjClass(Assembler::NotEqual, pic.objReg, obj->getClass());
827 masm.loadPtr(Address(pic.objReg, offsetof(JSObject, slots)), pic.objReg);
828 masm.load32(Address(pic.objReg, JSObject::JSSLOT_ARGS_LENGTH * sizeof(Value)),
829 pic.objReg);
830 masm.move(pic.objReg, pic.shapeReg);
831 Jump overridden = masm.branchTest32(Assembler::NonZero, pic.shapeReg, Imm32(1));
832 masm.rshift32(Imm32(JSObject::ARGS_PACKED_BITS_COUNT), pic.objReg);
834 masm.move(ImmType(JSVAL_TYPE_INT32), pic.shapeReg);
835 Jump done = masm.jump();
837 PICLinker buffer(cx, pic);
838 if (!buffer.init(masm))
839 return error();
841 buffer.link(notArgs, pic.slowPathStart);
842 buffer.link(overridden, pic.slowPathStart);
843 buffer.link(done, pic.fastPathRejoin);
845 CodeLocationLabel start = buffer.finalizeCodeAddendum();
846 JaegerSpew(JSpew_PICs, "generate args length stub at %p\n",
847 start.executableAddress());
849 PICRepatchBuffer repatcher(pic, pic.lastPathStart());
850 patchPreviousToHere(repatcher, start);
852 disable("args length done");
854 return Lookup_Cacheable;
857 LookupStatus generateArrayLengthStub()
859 Assembler masm;
861 masm.loadObjClass(pic.objReg, pic.shapeReg);
862 Jump isDense = masm.testClass(Assembler::Equal, pic.shapeReg, &js_ArrayClass);
863 Jump notArray = masm.testClass(Assembler::NotEqual, pic.shapeReg, &js_SlowArrayClass);
865 isDense.linkTo(masm.label(), &masm);
866 masm.load32(Address(pic.objReg, offsetof(JSObject, privateData)), pic.objReg);
867 Jump oob = masm.branch32(Assembler::Above, pic.objReg, Imm32(JSVAL_INT_MAX));
868 masm.move(ImmType(JSVAL_TYPE_INT32), pic.shapeReg);
869 Jump done = masm.jump();
871 PICLinker buffer(cx, pic);
872 if (!buffer.init(masm))
873 return error();
875 buffer.link(notArray, pic.slowPathStart);
876 buffer.link(oob, pic.slowPathStart);
877 buffer.link(done, pic.fastPathRejoin);
879 CodeLocationLabel start = buffer.finalizeCodeAddendum();
880 JaegerSpew(JSpew_PICs, "generate array length stub at %p\n",
881 start.executableAddress());
883 PICRepatchBuffer repatcher(pic, pic.lastPathStart());
884 patchPreviousToHere(repatcher, start);
886 disable("array length done");
888 return Lookup_Cacheable;
891 LookupStatus generateStringCallStub()
893 JS_ASSERT(pic.hasTypeCheck());
894 JS_ASSERT(pic.kind == ic::PICInfo::CALL);
896 if (!f.fp()->script()->compileAndGo)
897 return disable("String.prototype without compile-and-go");
899 GetPropertyHelper<GetPropCompiler> getprop(cx, obj, atom, *this);
900 LookupStatus status = getprop.lookupAndTest();
901 if (status != Lookup_Cacheable)
902 return status;
903 if (getprop.obj != getprop.holder)
904 return disable("proto walk on String.prototype");
906 Assembler masm;
908 /* Only strings are allowed. */
909 Jump notString = masm.branchPtr(Assembler::NotEqual, pic.typeReg(),
910 ImmType(JSVAL_TYPE_STRING));
913 * Sink pic.objReg, since we're about to lose it.
915 * Note: This is really hacky, and relies on f.regs.sp being set
916 * correctly in ic::CallProp. Should we just move the store higher
917 * up in the fast path, or put this offset in PICInfo?
919 uint32 thisvOffset = uint32(f.regs.sp - f.fp()->slots()) - 1;
920 Address thisv(JSFrameReg, sizeof(JSStackFrame) + thisvOffset * sizeof(Value));
921 masm.storeValueFromComponents(ImmType(JSVAL_TYPE_STRING),
922 pic.objReg, thisv);
925 * Clobber objReg with String.prototype and do some PIC stuff. Well,
926 * really this is now a MIC, except it won't ever be patched, so we
927 * just disable the PIC at the end. :FIXME:? String.prototype probably
928 * does not get random shape changes.
930 masm.move(ImmPtr(obj), pic.objReg);
931 masm.loadShape(pic.objReg, pic.shapeReg);
932 Jump shapeMismatch = masm.branch32(Assembler::NotEqual, pic.shapeReg,
933 Imm32(obj->shape()));
934 masm.loadObjProp(obj, pic.objReg, getprop.shape, pic.shapeReg, pic.objReg);
936 Jump done = masm.jump();
938 PICLinker buffer(cx, pic);
939 if (!buffer.init(masm))
940 return error();
942 buffer.link(notString, pic.slowPathStart.labelAtOffset(pic.u.get.typeCheckOffset));
943 buffer.link(shapeMismatch, pic.slowPathStart);
944 buffer.link(done, pic.fastPathRejoin);
946 CodeLocationLabel cs = buffer.finalizeCodeAddendum();
947 JaegerSpew(JSpew_PICs, "generate string call stub at %p\n",
948 cs.executableAddress());
950 /* Patch the type check to jump here. */
951 if (pic.hasTypeCheck()) {
952 RepatchBuffer repatcher(pic.fastPathStart.executableAddress(), INLINE_PATH_LENGTH);
953 repatcher.relink(pic.fastPathStart.jumpAtOffset(GETPROP_INLINE_TYPE_GUARD), cs);
956 /* Disable the PIC so we don't keep generating stubs on the above shape mismatch. */
957 disable("generated string call stub");
959 return Lookup_Cacheable;
962 LookupStatus generateStringLengthStub()
964 JS_ASSERT(pic.hasTypeCheck());
966 Assembler masm;
967 Jump notString = masm.branchPtr(Assembler::NotEqual, pic.typeReg(),
968 ImmType(JSVAL_TYPE_STRING));
969 masm.loadPtr(Address(pic.objReg, offsetof(JSString, mLengthAndFlags)), pic.objReg);
970 // String length is guaranteed to be no more than 2**28, so the 32-bit operation is OK.
971 masm.urshift32(Imm32(JSString::FLAGS_LENGTH_SHIFT), pic.objReg);
972 masm.move(ImmType(JSVAL_TYPE_INT32), pic.shapeReg);
973 Jump done = masm.jump();
975 PICLinker buffer(cx, pic);
976 if (!buffer.init(masm))
977 return error();
979 buffer.link(notString, pic.slowPathStart.labelAtOffset(pic.u.get.typeCheckOffset));
980 buffer.link(done, pic.fastPathRejoin);
982 CodeLocationLabel start = buffer.finalizeCodeAddendum();
983 JaegerSpew(JSpew_PICs, "generate string length stub at %p\n",
984 start.executableAddress());
986 if (pic.hasTypeCheck()) {
987 RepatchBuffer repatcher(pic.fastPathStart.executableAddress(), INLINE_PATH_LENGTH);
988 repatcher.relink(pic.fastPathStart.jumpAtOffset(GETPROP_INLINE_TYPE_GUARD), start);
991 disable("generated string length stub");
993 return Lookup_Cacheable;
996 LookupStatus patchInline(JSObject *holder, const Shape *shape)
998 spew("patch", "inline");
999 PICRepatchBuffer repatcher(pic, pic.fastPathStart);
1001 int32 offset;
1002 if (!holder->hasSlotsArray()) {
1003 JSC::CodeLocationInstruction istr;
1004 istr = pic.fastPathRejoin.instructionAtOffset(dslotsLoad());
1005 repatcher.repatchLoadPtrToLEA(istr);
1008 // We've patched | mov dslots, [obj + DSLOTS_OFFSET]
1009 // To: | lea fslots, [obj + DSLOTS_OFFSET]
1011 // Because the offset is wrong, it's necessary to correct it
1012 // below.
1014 int32 diff = int32(JSObject::getFixedSlotOffset(0)) -
1015 int32(offsetof(JSObject, slots));
1016 JS_ASSERT(diff != 0);
1017 offset = (int32(shape->slot) * sizeof(Value)) + diff;
1018 } else {
1019 offset = shape->slot * sizeof(Value);
1022 uint32 shapeOffs = pic.shapeGuard + inlineShapeOffset();
1023 repatcher.repatch(pic.fastPathStart.dataLabel32AtOffset(shapeOffs), obj->shape());
1024 #if defined JS_NUNBOX32
1025 repatcher.repatch(pic.fastPathRejoin.dataLabel32AtOffset(GETPROP_TYPE_LOAD), offset + 4);
1026 repatcher.repatch(pic.fastPathRejoin.dataLabel32AtOffset(GETPROP_DATA_LOAD), offset);
1027 #elif defined JS_PUNBOX64
1028 repatcher.repatch(pic.fastPathRejoin.dataLabel32AtOffset(pic.labels.getprop.inlineValueOffset), offset);
1029 #endif
1031 pic.inlinePathPatched = true;
1033 return Lookup_Cacheable;
1036 LookupStatus generateStub(JSObject *holder, const Shape *shape)
1038 Vector<Jump, 8> shapeMismatches(cx);
1040 Assembler masm;
1042 Label start;
1043 Jump shapeGuard;
1044 Jump argsLenGuard;
1045 if (obj->isDenseArray()) {
1046 start = masm.label();
1047 shapeGuard = masm.testObjClass(Assembler::NotEqual, pic.objReg, obj->getClass());
1050 * No need to assert validity of GETPROP_STUB_SHAPE_JUMP in this case:
1051 * the IC is disabled after a dense array hit, so no patching can occur.
1053 } else {
1054 if (pic.shapeNeedsRemat()) {
1055 masm.loadShape(pic.objReg, pic.shapeReg);
1056 pic.shapeRegHasBaseShape = true;
1059 start = masm.label();
1060 shapeGuard = masm.branch32_force32(Assembler::NotEqual, pic.shapeReg,
1061 Imm32(obj->shape()));
1062 #if defined JS_NUNBOX32
1063 JS_ASSERT(masm.differenceBetween(start, shapeGuard) == GETPROP_STUB_SHAPE_JUMP);
1064 #endif
1067 #if defined JS_PUNBOX64
1068 Label stubShapeJumpLabel = masm.label();
1069 #endif
1071 if (!shapeMismatches.append(shapeGuard))
1072 return error();
1074 RegisterID holderReg = pic.objReg;
1075 if (obj != holder) {
1076 // Bake in the holder identity. Careful not to clobber |objReg|, since we can't remat it.
1077 holderReg = pic.shapeReg;
1078 masm.move(ImmPtr(holder), holderReg);
1079 pic.shapeRegHasBaseShape = false;
1081 // Guard on the holder's shape.
1082 Jump j = masm.guardShape(holderReg, holder);
1083 if (!shapeMismatches.append(j))
1084 return error();
1086 pic.secondShapeGuard = masm.distanceOf(masm.label()) - masm.distanceOf(start);
1087 } else {
1088 pic.secondShapeGuard = 0;
1091 /* Load the value out of the object. */
1092 masm.loadObjProp(holder, holderReg, shape, pic.shapeReg, pic.objReg);
1093 Jump done = masm.jump();
1095 PICLinker buffer(cx, pic);
1096 if (!buffer.init(masm))
1097 return error();
1099 // The guard exit jumps to the original slow case.
1100 for (Jump *pj = shapeMismatches.begin(); pj != shapeMismatches.end(); ++pj)
1101 buffer.link(*pj, pic.slowPathStart);
1103 // The final exit jumps to the store-back in the inline stub.
1104 buffer.link(done, pic.fastPathRejoin);
1105 CodeLocationLabel cs = buffer.finalizeCodeAddendum();
1106 JaegerSpew(JSpew_PICs, "generated %s stub at %p\n", type, cs.executableAddress());
1108 PICRepatchBuffer repatcher(pic, pic.lastPathStart());
1109 patchPreviousToHere(repatcher, cs);
1111 pic.stubsGenerated++;
1112 pic.lastStubStart = buffer.locationOf(start);
1114 #if defined JS_PUNBOX64
1115 pic.labels.getprop.stubShapeJump = masm.differenceBetween(start, stubShapeJumpLabel);
1116 JS_ASSERT(pic.labels.getprop.stubShapeJump == masm.differenceBetween(start, stubShapeJumpLabel));
1117 #endif
1119 if (pic.stubsGenerated == MAX_PIC_STUBS)
1120 disable("max stubs reached");
1121 if (obj->isDenseArray())
1122 disable("dense array");
1124 return Lookup_Cacheable;
1127 void patchPreviousToHere(PICRepatchBuffer &repatcher, CodeLocationLabel cs)
1129 // Patch either the inline fast path or a generated stub. The stub
1130 // omits the prefix of the inline fast path that loads the shape, so
1131 // the offsets are different.
1132 int shapeGuardJumpOffset;
1133 if (pic.stubsGenerated)
1134 #if defined JS_NUNBOX32
1135 shapeGuardJumpOffset = GETPROP_STUB_SHAPE_JUMP;
1136 #elif defined JS_PUNBOX64
1137 shapeGuardJumpOffset = pic.labels.getprop.stubShapeJump;
1138 #endif
1139 else
1140 shapeGuardJumpOffset = pic.shapeGuard + inlineShapeJump();
1141 repatcher.relink(shapeGuardJumpOffset, cs);
1142 if (lastStubSecondShapeGuard)
1143 repatcher.relink(lastStubSecondShapeGuard, cs);
1146 LookupStatus update()
1148 JS_ASSERT(pic.hit);
1150 GetPropertyHelper<GetPropCompiler> getprop(cx, obj, atom, *this);
1151 LookupStatus status = getprop.lookupAndTest();
1152 if (status != Lookup_Cacheable)
1153 return status;
1155 if (obj == getprop.holder && !pic.inlinePathPatched)
1156 return patchInline(getprop.holder, getprop.shape);
1158 return generateStub(getprop.holder, getprop.shape);
1162 class ScopeNameCompiler : public PICStubCompiler
1164 JSObject *scopeChain;
1165 JSAtom *atom;
1167 GetPropertyHelper<ScopeNameCompiler> getprop;
1169 public:
1170 ScopeNameCompiler(VMFrame &f, JSScript *script, JSObject *scopeChain, ic::PICInfo &pic,
1171 JSAtom *atom, VoidStubPIC stub)
1172 : PICStubCompiler("name", f, script, pic, JS_FUNC_TO_DATA_PTR(void *, stub)),
1173 scopeChain(scopeChain), atom(atom),
1174 getprop(f.cx, NULL, atom, *this)
1177 static void reset(ic::PICInfo &pic)
1179 RepatchBuffer repatcher(pic.fastPathStart.executableAddress(), INLINE_PATH_LENGTH);
1180 repatcher.relink(pic.fastPathStart.jumpAtOffset(SCOPENAME_JUMP_OFFSET),
1181 pic.slowPathStart);
1183 RepatchBuffer repatcher2(pic.slowPathStart.executableAddress(), INLINE_PATH_LENGTH);
1184 VoidStubPIC stub = (pic.kind == ic::PICInfo::NAME) ? ic::Name : ic::XName;
1185 FunctionPtr target(JS_FUNC_TO_DATA_PTR(void *, stub));
1186 repatcher.relink(pic.slowPathCall, target);
1189 typedef Vector<Jump, 8, ContextAllocPolicy> JumpList;
1191 LookupStatus walkScopeChain(Assembler &masm, JumpList &fails)
1193 /* Walk the scope chain. */
1194 JSObject *tobj = scopeChain;
1196 /* For GETXPROP, we'll never enter this loop. */
1197 JS_ASSERT_IF(pic.kind == ic::PICInfo::XNAME, tobj && tobj == getprop.holder);
1198 JS_ASSERT_IF(pic.kind == ic::PICInfo::XNAME, getprop.obj == tobj);
1200 while (tobj && tobj != getprop.holder) {
1201 if (!js_IsCacheableNonGlobalScope(tobj))
1202 return disable("non-cacheable scope chain object");
1203 JS_ASSERT(tobj->isNative());
1205 if (tobj != scopeChain) {
1206 /* scopeChain will never be NULL, but parents can be NULL. */
1207 Jump j = masm.branchTestPtr(Assembler::Zero, pic.objReg, pic.objReg);
1208 if (!fails.append(j))
1209 return error();
1212 /* Guard on intervening shapes. */
1213 masm.loadShape(pic.objReg, pic.shapeReg);
1214 Jump j = masm.branch32(Assembler::NotEqual, pic.shapeReg, Imm32(tobj->shape()));
1215 if (!fails.append(j))
1216 return error();
1218 /* Load the next link in the scope chain. */
1219 Address parent(pic.objReg, offsetof(JSObject, parent));
1220 masm.loadPtr(parent, pic.objReg);
1222 tobj = tobj->getParent();
1225 if (tobj != getprop.holder)
1226 return disable("scope chain walk terminated early");
1228 return Lookup_Cacheable;
1231 LookupStatus generateGlobalStub(JSObject *obj)
1233 Assembler masm;
1234 JumpList fails(cx);
1236 /* For GETXPROP, the object is already in objReg. */
1237 if (pic.kind == ic::PICInfo::NAME)
1238 masm.loadPtr(Address(JSFrameReg, JSStackFrame::offsetOfScopeChain()), pic.objReg);
1240 JS_ASSERT(obj == getprop.holder);
1241 JS_ASSERT(getprop.holder == scopeChain->getGlobal());
1243 LookupStatus status = walkScopeChain(masm, fails);
1244 if (status != Lookup_Cacheable)
1245 return status;
1247 /* If a scope chain walk was required, the final object needs a NULL test. */
1248 MaybeJump finalNull;
1249 if (pic.kind == ic::PICInfo::NAME)
1250 finalNull = masm.branchTestPtr(Assembler::Zero, pic.objReg, pic.objReg);
1251 masm.loadShape(pic.objReg, pic.shapeReg);
1252 Jump finalShape = masm.branch32(Assembler::NotEqual, pic.shapeReg, Imm32(getprop.holder->shape()));
1254 masm.loadObjProp(obj, pic.objReg, getprop.shape, pic.shapeReg, pic.objReg);
1255 Jump done = masm.jump();
1257 // All failures flow to here, so there is a common point to patch.
1258 for (Jump *pj = fails.begin(); pj != fails.end(); ++pj)
1259 pj->linkTo(masm.label(), &masm);
1260 if (finalNull.isSet())
1261 finalNull.get().linkTo(masm.label(), &masm);
1262 finalShape.linkTo(masm.label(), &masm);
1263 Label failLabel = masm.label();
1264 Jump failJump = masm.jump();
1265 DBGLABEL(dbgJumpOffset);
1267 JS_ASSERT(masm.differenceBetween(failLabel, dbgJumpOffset) == SCOPENAME_JUMP_OFFSET);
1269 PICLinker buffer(cx, pic);
1270 if (!buffer.init(masm))
1271 return error();
1273 buffer.link(failJump, pic.slowPathStart);
1274 buffer.link(done, pic.fastPathRejoin);
1275 CodeLocationLabel cs = buffer.finalizeCodeAddendum();
1276 JaegerSpew(JSpew_PICs, "generated %s global stub at %p\n", type, cs.executableAddress());
1277 spew("NAME stub", "global");
1279 PICRepatchBuffer repatcher(pic, pic.lastPathStart());
1280 repatcher.relink(SCOPENAME_JUMP_OFFSET, cs);
1282 pic.stubsGenerated++;
1283 pic.lastStubStart = buffer.locationOf(failLabel);
1285 if (pic.stubsGenerated == MAX_PIC_STUBS)
1286 disable("max stubs reached");
1288 return Lookup_Cacheable;
1291 enum CallObjPropKind {
1292 ARG,
1296 LookupStatus generateCallStub(JSObject *obj)
1298 Assembler masm;
1299 Vector<Jump, 8, ContextAllocPolicy> fails(cx);
1301 /* For GETXPROP, the object is already in objReg. */
1302 if (pic.kind == ic::PICInfo::NAME)
1303 masm.loadPtr(Address(JSFrameReg, JSStackFrame::offsetOfScopeChain()), pic.objReg);
1305 JS_ASSERT(obj == getprop.holder);
1306 JS_ASSERT(getprop.holder != scopeChain->getGlobal());
1308 CallObjPropKind kind;
1309 const Shape *shape = getprop.shape;
1310 if (shape->getterOp() == js_GetCallArg) {
1311 kind = ARG;
1312 } else if (shape->getterOp() == js_GetCallVar) {
1313 kind = VAR;
1314 } else {
1315 return disable("unhandled callobj sprop getter");
1318 LookupStatus status = walkScopeChain(masm, fails);
1319 if (status != Lookup_Cacheable)
1320 return status;
1322 /* If a scope chain walk was required, the final object needs a NULL test. */
1323 MaybeJump finalNull;
1324 if (pic.kind == ic::PICInfo::NAME)
1325 finalNull = masm.branchTestPtr(Assembler::Zero, pic.objReg, pic.objReg);
1326 masm.loadShape(pic.objReg, pic.shapeReg);
1327 Jump finalShape = masm.branch32(Assembler::NotEqual, pic.shapeReg, Imm32(getprop.holder->shape()));
1329 /* Get callobj's stack frame. */
1330 masm.loadFunctionPrivate(pic.objReg, pic.shapeReg);
1332 JSFunction *fun = getprop.holder->getCallObjCalleeFunction();
1333 uint16 slot = uint16(shape->shortid);
1335 Jump skipOver;
1336 Jump escapedFrame = masm.branchTestPtr(Assembler::Zero, pic.shapeReg, pic.shapeReg);
1338 /* Not-escaped case. */
1340 Address addr(pic.shapeReg, kind == ARG ? JSStackFrame::offsetOfFormalArg(fun, slot)
1341 : JSStackFrame::offsetOfFixed(slot));
1342 masm.loadPayload(addr, pic.objReg);
1343 masm.loadTypeTag(addr, pic.shapeReg);
1344 skipOver = masm.jump();
1347 escapedFrame.linkTo(masm.label(), &masm);
1350 masm.loadPtr(Address(pic.objReg, offsetof(JSObject, slots)), pic.objReg);
1352 if (kind == VAR)
1353 slot += fun->nargs;
1354 Address dslot(pic.objReg, (slot + JSObject::CALL_RESERVED_SLOTS) * sizeof(Value));
1356 /* Safe because type is loaded first. */
1357 masm.loadValueAsComponents(dslot, pic.shapeReg, pic.objReg);
1360 skipOver.linkTo(masm.label(), &masm);
1361 Jump done = masm.jump();
1363 // All failures flow to here, so there is a common point to patch.
1364 for (Jump *pj = fails.begin(); pj != fails.end(); ++pj)
1365 pj->linkTo(masm.label(), &masm);
1366 if (finalNull.isSet())
1367 finalNull.get().linkTo(masm.label(), &masm);
1368 finalShape.linkTo(masm.label(), &masm);
1369 Label failLabel = masm.label();
1370 Jump failJump = masm.jump();
1372 PICLinker buffer(cx, pic);
1373 if (!buffer.init(masm))
1374 return error();
1376 buffer.link(failJump, pic.slowPathStart);
1377 buffer.link(done, pic.fastPathRejoin);
1378 CodeLocationLabel cs = buffer.finalizeCodeAddendum();
1379 JaegerSpew(JSpew_PICs, "generated %s call stub at %p\n", type, cs.executableAddress());
1381 PICRepatchBuffer repatcher(pic, pic.lastPathStart());
1382 repatcher.relink(SCOPENAME_JUMP_OFFSET, cs);
1384 pic.stubsGenerated++;
1385 pic.lastStubStart = buffer.locationOf(failLabel);
1387 if (pic.stubsGenerated == MAX_PIC_STUBS)
1388 disable("max stubs reached");
1390 return Lookup_Cacheable;
1393 LookupStatus updateForName()
1395 // |getprop.obj| is filled by bind()
1396 LookupStatus status = getprop.bind();
1397 if (status != Lookup_Cacheable)
1398 return status;
1400 return update(getprop.obj);
1403 LookupStatus updateForXName()
1405 // |obj| and |getprop.obj| are NULL, but should be the given scopeChain.
1406 getprop.obj = scopeChain;
1407 LookupStatus status = getprop.lookup();
1408 if (status != Lookup_Cacheable)
1409 return status;
1411 return update(getprop.obj);
1414 LookupStatus update(JSObject *obj)
1416 if (obj != getprop.holder)
1417 return disable("property is on proto of a scope object");
1419 if (obj->getClass() == &js_CallClass)
1420 return generateCallStub(obj);
1422 LookupStatus status = getprop.testForGet();
1423 if (status != Lookup_Cacheable)
1424 return status;
1426 if (!obj->getParent())
1427 return generateGlobalStub(obj);
1429 return disable("scope object not handled yet");
1432 bool retrieve(Value *vp)
1434 JSObject *obj = getprop.obj;
1435 JSObject *holder = getprop.holder;
1436 const Shape *shape = getprop.shape;
1438 if (shape && (!obj->isNative() || !holder->isNative())) {
1439 if (!obj->getProperty(cx, ATOM_TO_JSID(atom), vp))
1440 return false;
1441 } else {
1442 if (!shape) {
1443 /* Kludge to allow (typeof foo == "undefined") tests. */
1444 disable("property not found");
1445 if (pic.kind == ic::PICInfo::NAME) {
1446 JSOp op2 = js_GetOpcode(cx, script, cx->regs->pc + JSOP_NAME_LENGTH);
1447 if (op2 == JSOP_TYPEOF) {
1448 vp->setUndefined();
1449 return true;
1452 ReportAtomNotDefined(cx, atom);
1453 return false;
1455 JSObject *normalized = obj;
1456 if (obj->getClass() == &js_WithClass && !shape->hasDefaultGetter())
1457 normalized = js_UnwrapWithObject(cx, obj);
1458 NATIVE_GET(cx, normalized, holder, shape, JSGET_METHOD_BARRIER, vp, return false);
1461 return true;
1465 class BindNameCompiler : public PICStubCompiler
1467 JSObject *scopeChain;
1468 JSAtom *atom;
1470 static int32 inlineJumpOffset(ic::PICInfo &pic) {
1471 #if defined JS_NUNBOX32
1472 return BINDNAME_INLINE_JUMP_OFFSET;
1473 #elif defined JS_PUNBOX64
1474 return pic.labels.bindname.inlineJumpOffset;
1475 #endif
1478 inline int32 inlineJumpOffset() {
1479 return inlineJumpOffset(pic);
1482 public:
1483 BindNameCompiler(VMFrame &f, JSScript *script, JSObject *scopeChain, ic::PICInfo &pic,
1484 JSAtom *atom, VoidStubPIC stub)
1485 : PICStubCompiler("bind", f, script, pic, JS_FUNC_TO_DATA_PTR(void *, stub)),
1486 scopeChain(scopeChain), atom(atom)
1489 static void reset(ic::PICInfo &pic)
1491 PICRepatchBuffer repatcher(pic, pic.fastPathStart);
1492 repatcher.relink(pic.shapeGuard + inlineJumpOffset(pic), pic.slowPathStart);
1494 RepatchBuffer repatcher2(pic.slowPathStart.executableAddress(), INLINE_PATH_LENGTH);
1495 FunctionPtr target(JS_FUNC_TO_DATA_PTR(void *, ic::BindName));
1496 repatcher2.relink(pic.slowPathCall, target);
1499 LookupStatus generateStub(JSObject *obj)
1501 Assembler masm;
1502 js::Vector<Jump, 8, ContextAllocPolicy> fails(cx);
1504 /* Guard on the shape of the scope chain. */
1505 masm.loadPtr(Address(JSFrameReg, JSStackFrame::offsetOfScopeChain()), pic.objReg);
1506 masm.loadShape(pic.objReg, pic.shapeReg);
1507 Jump firstShape = masm.branch32(Assembler::NotEqual, pic.shapeReg,
1508 Imm32(scopeChain->shape()));
1510 /* Walk up the scope chain. */
1511 JSObject *tobj = scopeChain;
1512 Address parent(pic.objReg, offsetof(JSObject, parent));
1513 while (tobj && tobj != obj) {
1514 if (!js_IsCacheableNonGlobalScope(tobj))
1515 return disable("non-cacheable obj in scope chain");
1516 masm.loadPtr(parent, pic.objReg);
1517 Jump nullTest = masm.branchTestPtr(Assembler::Zero, pic.objReg, pic.objReg);
1518 if (!fails.append(nullTest))
1519 return error();
1520 masm.loadShape(pic.objReg, pic.shapeReg);
1521 Jump shapeTest = masm.branch32(Assembler::NotEqual, pic.shapeReg,
1522 Imm32(tobj->shape()));
1523 tobj = tobj->getParent();
1525 if (tobj != obj)
1526 return disable("indirect hit");
1528 Jump done = masm.jump();
1530 // All failures flow to here, so there is a common point to patch.
1531 for (Jump *pj = fails.begin(); pj != fails.end(); ++pj)
1532 pj->linkTo(masm.label(), &masm);
1533 firstShape.linkTo(masm.label(), &masm);
1534 Label failLabel = masm.label();
1535 Jump failJump = masm.jump();
1536 DBGLABEL(dbgStubJumpOffset);
1538 JS_ASSERT(masm.differenceBetween(failLabel, dbgStubJumpOffset) == BINDNAME_STUB_JUMP_OFFSET);
1540 PICLinker buffer(cx, pic);
1541 if (!buffer.init(masm))
1542 return error();
1544 buffer.link(failJump, pic.slowPathStart);
1545 buffer.link(done, pic.fastPathRejoin);
1546 CodeLocationLabel cs = buffer.finalizeCodeAddendum();
1547 JaegerSpew(JSpew_PICs, "generated %s stub at %p\n", type, cs.executableAddress());
1549 PICRepatchBuffer repatcher(pic, pic.lastPathStart());
1550 if (!pic.stubsGenerated)
1551 repatcher.relink(pic.shapeGuard + inlineJumpOffset(), cs);
1552 else
1553 repatcher.relink(BINDNAME_STUB_JUMP_OFFSET, cs);
1555 pic.stubsGenerated++;
1556 pic.lastStubStart = buffer.locationOf(failLabel);
1558 if (pic.stubsGenerated == MAX_PIC_STUBS)
1559 disable("max stubs reached");
1561 return Lookup_Cacheable;
1564 JSObject *update()
1566 JS_ASSERT(scopeChain->getParent());
1568 JSObject *obj = js_FindIdentifierBase(cx, scopeChain, ATOM_TO_JSID(atom));
1569 if (!obj)
1570 return obj;
1572 if (!pic.hit) {
1573 spew("first hit", "nop");
1574 pic.hit = true;
1575 return obj;
1578 LookupStatus status = generateStub(obj);
1579 if (status == Lookup_Error)
1580 return NULL;
1582 return obj;
1586 static void JS_FASTCALL
1587 DisabledLengthIC(VMFrame &f, ic::PICInfo *pic)
1589 stubs::Length(f);
1592 static void JS_FASTCALL
1593 DisabledGetPropIC(VMFrame &f, ic::PICInfo *pic)
1595 stubs::GetProp(f);
1598 static void JS_FASTCALL
1599 DisabledGetPropICNoCache(VMFrame &f, ic::PICInfo *pic)
1601 stubs::GetPropNoCache(f, pic->atom);
1604 void JS_FASTCALL
1605 ic::GetProp(VMFrame &f, ic::PICInfo *pic)
1607 JSScript *script = f.fp()->script();
1609 JSAtom *atom = pic->atom;
1610 if (atom == f.cx->runtime->atomState.lengthAtom) {
1611 if (f.regs.sp[-1].isString()) {
1612 GetPropCompiler cc(f, script, NULL, *pic, NULL, DisabledLengthIC);
1613 LookupStatus status = cc.generateStringLengthStub();
1614 if (status == Lookup_Error)
1615 THROW();
1616 JSString *str = f.regs.sp[-1].toString();
1617 f.regs.sp[-1].setInt32(str->length());
1618 return;
1619 } else if (!f.regs.sp[-1].isPrimitive()) {
1620 JSObject *obj = &f.regs.sp[-1].toObject();
1621 if (obj->isArray() || (obj->isArguments() && !obj->isArgsLengthOverridden())) {
1622 GetPropCompiler cc(f, script, obj, *pic, NULL, DisabledLengthIC);
1623 if (obj->isArray()) {
1624 LookupStatus status = cc.generateArrayLengthStub();
1625 if (status == Lookup_Error)
1626 THROW();
1627 f.regs.sp[-1].setNumber(obj->getArrayLength());
1628 } else if (obj->isArguments()) {
1629 LookupStatus status = cc.generateArgsLengthStub();
1630 if (status == Lookup_Error)
1631 THROW();
1632 f.regs.sp[-1].setInt32(int32_t(obj->getArgsInitialLength()));
1634 return;
1637 atom = f.cx->runtime->atomState.lengthAtom;
1640 JSObject *obj = ValueToObject(f.cx, &f.regs.sp[-1]);
1641 if (!obj)
1642 THROW();
1644 if (pic->shouldUpdate(f.cx)) {
1645 VoidStubPIC stub = pic->usePropCache
1646 ? DisabledGetPropIC
1647 : DisabledGetPropICNoCache;
1648 GetPropCompiler cc(f, script, obj, *pic, atom, stub);
1649 if (!cc.update()) {
1650 cc.disable("error");
1651 THROW();
1655 Value v;
1656 if (!obj->getProperty(f.cx, ATOM_TO_JSID(atom), &v))
1657 THROW();
1658 f.regs.sp[-1] = v;
1661 template <JSBool strict>
1662 static void JS_FASTCALL
1663 DisabledSetPropIC(VMFrame &f, ic::PICInfo *pic)
1665 stubs::SetName<strict>(f, pic->atom);
1668 template <JSBool strict>
1669 static void JS_FASTCALL
1670 DisabledSetPropICNoCache(VMFrame &f, ic::PICInfo *pic)
1672 stubs::SetPropNoCache<strict>(f, pic->atom);
1675 void JS_FASTCALL
1676 ic::SetProp(VMFrame &f, ic::PICInfo *pic)
1678 JSObject *obj = ValueToObject(f.cx, &f.regs.sp[-2]);
1679 if (!obj)
1680 THROW();
1682 JSScript *script = f.fp()->script();
1683 JS_ASSERT(pic->isSet());
1685 VoidStubPIC stub = pic->usePropCache
1686 ? STRICT_VARIANT(DisabledSetPropIC)
1687 : STRICT_VARIANT(DisabledSetPropICNoCache);
1690 // Important: We update the PIC before looking up the property so that the
1691 // PIC is updated only if the property already exists. The PIC doesn't try
1692 // to optimize adding new properties; that is for the slow case.
1694 // Also note, we can't use SetName for PROPINC PICs because the property
1695 // cache can't handle a GET and SET from the same scripted PC.
1696 if (pic->shouldUpdate(f.cx)) {
1698 SetPropCompiler cc(f, script, obj, *pic, pic->atom, stub);
1699 LookupStatus status = cc.update();
1700 if (status == Lookup_Error)
1701 THROW();
1704 Value rval = f.regs.sp[-1];
1705 stub(f, pic);
1708 static void JS_FASTCALL
1709 DisabledCallPropIC(VMFrame &f, ic::PICInfo *pic)
1711 stubs::CallProp(f, pic->atom);
1714 void JS_FASTCALL
1715 ic::CallProp(VMFrame &f, ic::PICInfo *pic)
1717 JSContext *cx = f.cx;
1718 JSFrameRegs &regs = f.regs;
1720 JSScript *script = f.fp()->script();
1722 Value lval;
1723 lval = regs.sp[-1];
1725 Value objv;
1726 if (lval.isObject()) {
1727 objv = lval;
1728 } else {
1729 JSProtoKey protoKey;
1730 if (lval.isString()) {
1731 protoKey = JSProto_String;
1732 } else if (lval.isNumber()) {
1733 protoKey = JSProto_Number;
1734 } else if (lval.isBoolean()) {
1735 protoKey = JSProto_Boolean;
1736 } else {
1737 JS_ASSERT(lval.isNull() || lval.isUndefined());
1738 js_ReportIsNullOrUndefined(cx, -1, lval, NULL);
1739 THROW();
1741 JSObject *pobj;
1742 if (!js_GetClassPrototype(cx, NULL, protoKey, &pobj))
1743 THROW();
1744 objv.setObject(*pobj);
1747 JSObject *aobj = js_GetProtoIfDenseArray(&objv.toObject());
1748 Value rval;
1750 PropertyCacheEntry *entry;
1751 JSObject *obj2;
1752 JSAtom *atom;
1753 JS_PROPERTY_CACHE(cx).test(cx, regs.pc, aobj, obj2, entry, atom);
1754 if (!atom) {
1755 if (entry->vword.isFunObj()) {
1756 rval.setObject(entry->vword.toFunObj());
1757 } else if (entry->vword.isSlot()) {
1758 uint32 slot = entry->vword.toSlot();
1759 rval = obj2->nativeGetSlot(slot);
1760 } else {
1761 JS_ASSERT(entry->vword.isShape());
1762 const Shape *shape = entry->vword.toShape();
1763 NATIVE_GET(cx, &objv.toObject(), obj2, shape, JSGET_NO_METHOD_BARRIER, &rval,
1764 THROW());
1766 regs.sp++;
1767 regs.sp[-2] = rval;
1768 regs.sp[-1] = lval;
1769 } else {
1771 * Cache miss: use the immediate atom that was loaded for us under
1772 * PropertyCache::test.
1774 jsid id;
1775 id = ATOM_TO_JSID(pic->atom);
1777 regs.sp++;
1778 regs.sp[-1].setNull();
1779 if (lval.isObject()) {
1780 if (!js_GetMethod(cx, &objv.toObject(), id,
1781 JS_LIKELY(!objv.toObject().getOps()->getProperty)
1782 ? JSGET_CACHE_RESULT | JSGET_NO_METHOD_BARRIER
1783 : JSGET_NO_METHOD_BARRIER,
1784 &rval)) {
1785 THROW();
1787 regs.sp[-1] = objv;
1788 regs.sp[-2] = rval;
1789 } else {
1790 JS_ASSERT(!objv.toObject().getOps()->getProperty);
1791 if (!js_GetPropertyHelper(cx, &objv.toObject(), id,
1792 JSGET_CACHE_RESULT | JSGET_NO_METHOD_BARRIER,
1793 &rval)) {
1794 THROW();
1796 regs.sp[-1] = lval;
1797 regs.sp[-2] = rval;
1801 GetPropCompiler cc(f, script, &objv.toObject(), *pic, pic->atom, DisabledCallPropIC);
1802 if (lval.isObject()) {
1803 if (pic->shouldUpdate(cx)) {
1804 LookupStatus status = cc.update();
1805 if (status == Lookup_Error)
1806 THROW();
1808 } else if (lval.isString()) {
1809 LookupStatus status = cc.generateStringCallStub();
1810 if (status == Lookup_Error)
1811 THROW();
1812 } else {
1813 cc.disable("non-string primitive");
1816 #if JS_HAS_NO_SUCH_METHOD
1817 if (JS_UNLIKELY(rval.isUndefined()) && regs.sp[-1].isObject()) {
1818 regs.sp[-2].setString(ATOM_TO_STRING(pic->atom));
1819 if (!js_OnUnknownMethod(cx, regs.sp - 2))
1820 THROW();
1822 #endif
1825 static void JS_FASTCALL
1826 DisabledNameIC(VMFrame &f, ic::PICInfo *pic)
1828 stubs::Name(f);
1831 static void JS_FASTCALL
1832 DisabledXNameIC(VMFrame &f, ic::PICInfo *pic)
1834 stubs::GetProp(f);
1837 void JS_FASTCALL
1838 ic::XName(VMFrame &f, ic::PICInfo *pic)
1840 JSScript *script = f.fp()->script();
1842 /* GETXPROP is guaranteed to have an object. */
1843 JSObject *obj = &f.regs.sp[-1].toObject();
1845 ScopeNameCompiler cc(f, script, obj, *pic, pic->atom, DisabledXNameIC);
1847 LookupStatus status = cc.updateForXName();
1848 if (status == Lookup_Error)
1849 THROW();
1851 Value rval;
1852 if (!cc.retrieve(&rval))
1853 THROW();
1854 f.regs.sp[-1] = rval;
1857 void JS_FASTCALL
1858 ic::Name(VMFrame &f, ic::PICInfo *pic)
1860 JSScript *script = f.fp()->script();
1862 ScopeNameCompiler cc(f, script, &f.fp()->scopeChain(), *pic, pic->atom, DisabledNameIC);
1864 LookupStatus status = cc.updateForName();
1865 if (status == Lookup_Error)
1866 THROW();
1868 Value rval;
1869 if (!cc.retrieve(&rval))
1870 THROW();
1871 f.regs.sp[0] = rval;
1874 static void JS_FASTCALL
1875 DisabledBindNameIC(VMFrame &f, ic::PICInfo *pic)
1877 stubs::BindName(f);
1880 static void JS_FASTCALL
1881 DisabledBindNameICNoCache(VMFrame &f, ic::PICInfo *pic)
1883 stubs::BindNameNoCache(f, pic->atom);
1886 void JS_FASTCALL
1887 ic::BindName(VMFrame &f, ic::PICInfo *pic)
1889 JSScript *script = f.fp()->script();
1891 VoidStubPIC stub = pic->usePropCache
1892 ? DisabledBindNameIC
1893 : DisabledBindNameICNoCache;
1894 BindNameCompiler cc(f, script, &f.fp()->scopeChain(), *pic, pic->atom, stub);
1896 JSObject *obj = cc.update();
1897 if (!obj) {
1898 cc.disable("error");
1899 THROW();
1902 f.regs.sp[0].setObject(*obj);
1905 bool
1906 BaseIC::isCallOp()
1908 return !!(js_CodeSpec[op].format & JOF_CALLOP);
1911 void
1912 BaseIC::spew(JSContext *cx, const char *event, const char *message)
1914 #ifdef JS_METHODJIT_SPEW
1915 JaegerSpew(JSpew_PICs, "%s %s: %s (%s: %d)\n",
1916 js_CodeName[op], event, message, cx->fp()->script()->filename,
1917 js_FramePCToLineNumber(cx, cx->fp()));
1918 #endif
1921 LookupStatus
1922 BaseIC::disable(JSContext *cx, const char *reason, void *stub)
1924 spew(cx, "disabled", reason);
1925 RepatchBuffer repatcher(slowPathStart.executableAddress(), INLINE_PATH_LENGTH);
1926 repatcher.relink(slowPathCall, FunctionPtr(stub));
1927 return Lookup_Uncacheable;
1930 bool
1931 BaseIC::shouldUpdate(JSContext *cx)
1933 if (!hit) {
1934 hit = true;
1935 spew(cx, "ignored", "first hit");
1936 return false;
1938 JS_ASSERT(stubsGenerated < MAX_PIC_STUBS);
1939 return true;
1942 static void JS_FASTCALL
1943 DisabledGetElem(VMFrame &f, ic::GetElementIC *ic)
1945 stubs::GetElem(f);
1948 static void JS_FASTCALL
1949 DisabledCallElem(VMFrame &f, ic::GetElementIC *ic)
1951 stubs::CallElem(f);
1954 bool
1955 GetElementIC::shouldUpdate(JSContext *cx)
1957 if (!hit) {
1958 hit = true;
1959 spew(cx, "ignored", "first hit");
1960 return false;
1962 JS_ASSERT(stubsGenerated < MAX_GETELEM_IC_STUBS);
1963 return true;
1966 LookupStatus
1967 GetElementIC::disable(JSContext *cx, const char *reason)
1969 slowCallPatched = true;
1970 void *stub = (op == JSOP_GETELEM)
1971 ? JS_FUNC_TO_DATA_PTR(void *, DisabledGetElem)
1972 : JS_FUNC_TO_DATA_PTR(void *, DisabledCallElem);
1973 BaseIC::disable(cx, reason, stub);
1974 return Lookup_Uncacheable;
1977 LookupStatus
1978 GetElementIC::error(JSContext *cx)
1980 disable(cx, "error");
1981 return Lookup_Error;
1984 void
1985 GetElementIC::purge()
1987 if (inlineTypeGuardPatched || inlineClaspGuardPatched) {
1988 RepatchBuffer repatcher(fastPathStart.executableAddress(), INLINE_PATH_LENGTH);
1990 // Repatch the inline jumps.
1991 if (inlineTypeGuardPatched)
1992 repatcher.relink(fastPathStart.jumpAtOffset(inlineTypeGuard), slowPathStart);
1993 if (inlineClaspGuardPatched)
1994 repatcher.relink(fastPathStart.jumpAtOffset(inlineClaspGuard), slowPathStart);
1997 if (slowCallPatched) {
1998 RepatchBuffer repatcher(slowPathStart.executableAddress(), INLINE_PATH_LENGTH);
1999 if (op == JSOP_GETELEM)
2000 repatcher.relink(slowPathCall, FunctionPtr(JS_FUNC_TO_DATA_PTR(void *, ic::GetElement)));
2001 else if (op == JSOP_CALLELEM)
2002 repatcher.relink(slowPathCall, FunctionPtr(JS_FUNC_TO_DATA_PTR(void *, ic::CallElement)));
2005 reset();
2008 LookupStatus
2009 GetElementIC::attachGetProp(JSContext *cx, JSObject *obj, const Value &v, jsid id, Value *vp)
2011 JS_ASSERT(v.isString());
2013 GetPropertyHelper<GetElementIC> getprop(cx, obj, JSID_TO_ATOM(id), *this);
2014 LookupStatus status = getprop.lookupAndTest();
2015 if (status != Lookup_Cacheable)
2016 return status;
2018 Assembler masm;
2020 // Guard on the string's type and identity.
2021 MaybeJump atomTypeGuard;
2022 if (hasInlineTypeGuard() && !inlineTypeGuardPatched) {
2023 // We link all string-key dependent stubs together, and store the
2024 // first set of guards in the IC, separately, from int-key dependent
2025 // stubs. As long as we guarantee that the first string-key dependent
2026 // stub guards on the key type, then all other string-key stubs can
2027 // omit the guard.
2028 JS_ASSERT(!idRemat.isTypeKnown());
2029 atomTypeGuard = masm.testString(Assembler::NotEqual, typeReg);
2030 } else {
2031 // If there was no inline type guard, then a string type is guaranteed.
2032 // Otherwise, we are guaranteed the type has already been checked, via
2033 // the comment above.
2034 JS_ASSERT_IF(!hasInlineTypeGuard(), idRemat.knownType() == JSVAL_TYPE_STRING);
2037 // Reify the shape before guards that could flow into shape guarding stubs.
2038 if (!obj->isDenseArray() && !typeRegHasBaseShape) {
2039 masm.loadShape(objReg, typeReg);
2040 typeRegHasBaseShape = true;
2043 MaybeJump atomIdGuard;
2044 if (!idRemat.isConstant())
2045 atomIdGuard = masm.branchPtr(Assembler::NotEqual, idRemat.dataReg(), ImmPtr(v.toString()));
2047 // Guard on the base shape (or in the dense array case, the clasp).
2048 Jump shapeGuard;
2049 if (obj->isDenseArray()) {
2050 shapeGuard = masm.testObjClass(Assembler::NotEqual, objReg, obj->getClass());
2051 } else {
2052 shapeGuard = masm.branch32(Assembler::NotEqual, typeReg, Imm32(obj->shape()));
2055 // Guard on the prototype, if applicable.
2056 MaybeJump protoGuard;
2057 JSObject *holder = getprop.holder;
2058 RegisterID holderReg = objReg;
2059 if (obj != holder) {
2060 // Bake in the holder identity. Careful not to clobber |objReg|, since we can't remat it.
2061 holderReg = typeReg;
2062 masm.move(ImmPtr(holder), holderReg);
2063 typeRegHasBaseShape = false;
2065 // Guard on the holder's shape.
2066 protoGuard = masm.guardShape(holderReg, holder);
2069 if (op == JSOP_CALLELEM) {
2070 // Emit a write of |obj| to the top of the stack, before we lose it.
2071 Value *thisVp = &cx->regs->sp[-1];
2072 Address thisSlot(JSFrameReg, JSStackFrame::offsetOfFixed(thisVp - cx->fp()->slots()));
2073 masm.storeValueFromComponents(ImmType(JSVAL_TYPE_OBJECT), objReg, thisSlot);
2076 // Load the value.
2077 const Shape *shape = getprop.shape;
2078 masm.loadObjProp(holder, holderReg, shape, typeReg, objReg);
2080 Jump done = masm.jump();
2082 PICLinker buffer(cx, *this);
2083 if (!buffer.init(masm))
2084 return error(cx);
2086 // Patch all guards.
2087 buffer.maybeLink(atomIdGuard, slowPathStart);
2088 buffer.maybeLink(atomTypeGuard, slowPathStart);
2089 buffer.link(shapeGuard, slowPathStart);
2090 buffer.maybeLink(protoGuard, slowPathStart);
2091 buffer.link(done, fastPathRejoin);
2093 CodeLocationLabel cs = buffer.finalizeCodeAddendum();
2094 #if DEBUG
2095 char *chars = js_DeflateString(cx, v.toString()->chars(), v.toString()->length());
2096 JaegerSpew(JSpew_PICs, "generated %s stub at %p for atom 0x%x (\"%s\") shape 0x%x (%s: %d)\n",
2097 js_CodeName[op], cs.executableAddress(), id, chars, holder->shape(),
2098 cx->fp()->script()->filename, js_FramePCToLineNumber(cx, cx->fp()));
2099 cx->free(chars);
2100 #endif
2102 // Update the inline guards, if needed.
2103 if (shouldPatchInlineTypeGuard() || shouldPatchUnconditionalClaspGuard()) {
2104 PICRepatchBuffer repatcher(*this, fastPathStart);
2106 if (shouldPatchInlineTypeGuard()) {
2107 // A type guard is present in the inline path, and this is the
2108 // first string stub, so patch it now.
2109 JS_ASSERT(!inlineTypeGuardPatched);
2110 JS_ASSERT(atomTypeGuard.isSet());
2112 repatcher.relink(inlineTypeGuard, cs);
2113 inlineTypeGuardPatched = true;
2116 if (shouldPatchUnconditionalClaspGuard()) {
2117 // The clasp guard is unconditional, meaning there is no type
2118 // check. This is the first stub, so it has to be patched. Note
2119 // that it is wrong to patch the inline clasp guard otherwise,
2120 // because it follows an integer-id guard.
2121 JS_ASSERT(!hasInlineTypeGuard());
2123 repatcher.relink(inlineClaspGuard, cs);
2124 inlineClaspGuardPatched = true;
2128 // If there were previous stub guards, patch them now.
2129 if (hasLastStringStub) {
2130 PICRepatchBuffer repatcher(*this, lastStringStub);
2131 if (atomGuard)
2132 repatcher.relink(atomGuard, cs);
2133 repatcher.relink(firstShapeGuard, cs);
2134 if (secondShapeGuard)
2135 repatcher.relink(secondShapeGuard, cs);
2138 // Update state.
2139 hasLastStringStub = true;
2140 lastStringStub = cs;
2141 if (atomIdGuard.isSet()) {
2142 atomGuard = buffer.locationOf(atomIdGuard.get()) - cs;
2143 JS_ASSERT(atomGuard == buffer.locationOf(atomIdGuard.get()) - cs);
2144 JS_ASSERT(atomGuard);
2145 } else {
2146 atomGuard = 0;
2148 if (protoGuard.isSet()) {
2149 secondShapeGuard = buffer.locationOf(protoGuard.get()) - cs;
2150 JS_ASSERT(secondShapeGuard == buffer.locationOf(protoGuard.get()) - cs);
2151 JS_ASSERT(secondShapeGuard);
2152 } else {
2153 secondShapeGuard = 0;
2155 firstShapeGuard = buffer.locationOf(shapeGuard) - cs;
2156 JS_ASSERT(firstShapeGuard == buffer.locationOf(shapeGuard) - cs);
2157 JS_ASSERT(firstShapeGuard);
2159 stubsGenerated++;
2161 if (stubsGenerated == MAX_GETELEM_IC_STUBS)
2162 disable(cx, "max stubs reached");
2164 // Finally, fetch the value to avoid redoing the property lookup.
2165 if (shape->isMethod())
2166 *vp = ObjectValue(shape->methodObject());
2167 else
2168 *vp = holder->getSlot(shape->slot);
2170 return Lookup_Cacheable;
2173 LookupStatus
2174 GetElementIC::update(JSContext *cx, JSObject *obj, const Value &v, jsid id, Value *vp)
2176 if (v.isString())
2177 return attachGetProp(cx, obj, v, id, vp);
2178 return disable(cx, "unhandled object and key type");
2181 void JS_FASTCALL
2182 ic::CallElement(VMFrame &f, ic::GetElementIC *ic)
2184 JSContext *cx = f.cx;
2186 // Right now, we don't optimize for strings.
2187 if (!f.regs.sp[-2].isObject()) {
2188 ic->disable(cx, "non-object");
2189 stubs::CallElem(f);
2190 return;
2193 Value thisv = f.regs.sp[-2];
2194 JSObject *thisObj = ValuePropertyBearer(cx, thisv, -2);
2195 if (!thisObj)
2196 THROW();
2198 jsid id;
2199 Value idval = f.regs.sp[-1];
2200 if (idval.isInt32() && INT_FITS_IN_JSID(idval.toInt32()))
2201 id = INT_TO_JSID(idval.toInt32());
2202 else if (!js_InternNonIntElementId(cx, thisObj, idval, &id))
2203 THROW();
2205 if (ic->shouldUpdate(cx)) {
2206 #ifdef DEBUG
2207 f.regs.sp[-2] = MagicValue(JS_GENERIC_MAGIC);
2208 #endif
2209 LookupStatus status = ic->update(cx, thisObj, idval, id, &f.regs.sp[-2]);
2210 if (status != Lookup_Uncacheable) {
2211 if (status == Lookup_Error)
2212 THROW();
2214 // If the result can be cached, the value was already retrieved.
2215 JS_ASSERT(!f.regs.sp[-2].isMagic());
2216 f.regs.sp[-1].setObject(*thisObj);
2217 return;
2221 /* Get or set the element. */
2222 if (!js_GetMethod(cx, thisObj, id, JSGET_NO_METHOD_BARRIER, &f.regs.sp[-2]))
2223 THROW();
2225 #if JS_HAS_NO_SUCH_METHOD
2226 if (JS_UNLIKELY(f.regs.sp[-2].isUndefined()) && thisv.isObject()) {
2227 f.regs.sp[-2] = f.regs.sp[-1];
2228 f.regs.sp[-1].setObject(*thisObj);
2229 if (!js_OnUnknownMethod(cx, f.regs.sp - 2))
2230 THROW();
2231 } else
2232 #endif
2234 f.regs.sp[-1] = thisv;
2238 void JS_FASTCALL
2239 ic::GetElement(VMFrame &f, ic::GetElementIC *ic)
2241 JSContext *cx = f.cx;
2243 // Right now, we don't optimize for strings.
2244 if (!f.regs.sp[-2].isObject()) {
2245 ic->disable(cx, "non-object");
2246 stubs::GetElem(f);
2247 return;
2250 JSObject *obj = ValueToObject(cx, &f.regs.sp[-2]);
2251 if (!obj)
2252 THROW();
2254 Value idval = f.regs.sp[-1];
2256 jsid id;
2257 if (idval.isInt32() && INT_FITS_IN_JSID(idval.toInt32())) {
2258 id = INT_TO_JSID(idval.toInt32());
2259 } else {
2260 if (!js_InternNonIntElementId(cx, obj, idval, &id))
2261 THROW();
2264 if (ic->shouldUpdate(cx)) {
2265 #ifdef DEBUG
2266 f.regs.sp[-2] = MagicValue(JS_GENERIC_MAGIC);
2267 #endif
2268 LookupStatus status = ic->update(cx, obj, idval, id, &f.regs.sp[-2]);
2269 if (status != Lookup_Uncacheable) {
2270 if (status == Lookup_Error)
2271 THROW();
2273 // If the result can be cached, the value was already retrieved.
2274 JS_ASSERT(!f.regs.sp[-2].isMagic());
2275 return;
2279 if (!obj->getProperty(cx, id, &f.regs.sp[-2]))
2280 THROW();
2283 #define APPLY_STRICTNESS(f, s) \
2284 (FunctionTemplateConditional(s, f<true>, f<false>))
2286 LookupStatus
2287 SetElementIC::disable(JSContext *cx, const char *reason)
2289 slowCallPatched = true;
2290 VoidStub stub = APPLY_STRICTNESS(stubs::SetElem, strictMode);
2291 BaseIC::disable(cx, reason, JS_FUNC_TO_DATA_PTR(void *, stub));
2292 return Lookup_Uncacheable;
2295 LookupStatus
2296 SetElementIC::error(JSContext *cx)
2298 disable(cx, "error");
2299 return Lookup_Error;
2302 void
2303 SetElementIC::purge()
2305 if (inlineClaspGuardPatched || inlineHoleGuardPatched) {
2306 RepatchBuffer repatcher(fastPathStart.executableAddress(), INLINE_PATH_LENGTH);
2308 // Repatch the inline jumps.
2309 if (inlineClaspGuardPatched)
2310 repatcher.relink(fastPathStart.jumpAtOffset(inlineClaspGuard), slowPathStart);
2311 if (inlineHoleGuardPatched)
2312 repatcher.relink(fastPathStart.jumpAtOffset(inlineHoleGuard), slowPathStart);
2315 if (slowCallPatched) {
2316 RepatchBuffer repatcher(slowPathStart.executableAddress(), INLINE_PATH_LENGTH);
2317 void *stub = JS_FUNC_TO_DATA_PTR(void *, APPLY_STRICTNESS(ic::SetElement, strictMode));
2318 repatcher.relink(slowPathCall, FunctionPtr(stub));
2321 reset();
2324 LookupStatus
2325 SetElementIC::attachHoleStub(JSContext *cx, JSObject *obj, int32 keyval)
2327 if (keyval < 0)
2328 return disable(cx, "negative key index");
2330 // We may have failed a capacity check instead of a dense array check.
2331 // However we should still build the IC in this case, since it could
2332 // be in a loop that is filling in the array. We can assert, however,
2333 // that either we're in capacity or there's a hole - guaranteed by
2334 // the fast path.
2335 JS_ASSERT((jsuint)keyval >= obj->getDenseArrayCapacity() ||
2336 obj->getDenseArrayElement(keyval).isMagic(JS_ARRAY_HOLE));
2338 if (js_PrototypeHasIndexedProperties(cx, obj))
2339 return disable(cx, "prototype has indexed properties");
2341 Assembler masm;
2343 // Test for indexed properties in Array.prototype. It is safe to bake in
2344 // this pointer because changing __proto__ will slowify.
2345 JSObject *arrayProto = obj->getProto();
2346 masm.move(ImmPtr(arrayProto), objReg);
2347 Jump extendedArray = masm.branchTest32(Assembler::NonZero,
2348 Address(objReg, offsetof(JSObject, flags)),
2349 Imm32(JSObject::INDEXED));
2351 // Text for indexed properties in Object.prototype. Guard that
2352 // Array.prototype doesn't change, too.
2353 JSObject *objProto = arrayProto->getProto();
2354 Jump sameProto = masm.branchPtr(Assembler::NotEqual,
2355 Address(objReg, offsetof(JSObject, proto)),
2356 ImmPtr(objProto));
2357 masm.move(ImmPtr(objProto), objReg);
2358 Jump extendedObject = masm.branchTest32(Assembler::NonZero,
2359 Address(objReg, offsetof(JSObject, flags)),
2360 Imm32(JSObject::INDEXED));
2362 // Restore |obj|.
2363 masm.rematPayload(StateRemat::FromInt32(objRemat), objReg);
2365 // Guard against negative indices.
2366 MaybeJump keyGuard;
2367 if (!hasConstantKey)
2368 keyGuard = masm.branch32(Assembler::LessThan, keyReg, Imm32(0));
2370 // Update the array length if necessary.
2371 Jump skipUpdate;
2372 Address arrayLength(objReg, offsetof(JSObject, privateData));
2373 if (hasConstantKey) {
2374 skipUpdate = masm.branch32(Assembler::Above, arrayLength, Imm32(keyValue));
2375 masm.store32(Imm32(keyValue + 1), arrayLength);
2376 } else {
2377 skipUpdate = masm.branch32(Assembler::Above, arrayLength, keyReg);
2378 masm.add32(Imm32(1), keyReg);
2379 masm.store32(keyReg, arrayLength);
2380 masm.sub32(Imm32(1), keyReg);
2382 skipUpdate.linkTo(masm.label(), &masm);
2384 // Store the value back.
2385 masm.loadPtr(Address(objReg, offsetof(JSObject, slots)), objReg);
2386 if (hasConstantKey) {
2387 Address slot(objReg, keyValue * sizeof(Value));
2388 masm.storeValue(vr, slot);
2389 } else {
2390 BaseIndex slot(objReg, keyReg, Assembler::JSVAL_SCALE);
2391 masm.storeValue(vr, slot);
2394 Jump done = masm.jump();
2396 JS_ASSERT(!execPool);
2397 JS_ASSERT(!inlineHoleGuardPatched);
2399 LinkerHelper buffer(cx);
2400 execPool = buffer.init(masm);
2401 if (!execPool)
2402 return error(cx);
2404 // Patch all guards.
2405 buffer.link(extendedArray, slowPathStart);
2406 buffer.link(sameProto, slowPathStart);
2407 buffer.link(extendedObject, slowPathStart);
2408 buffer.link(done, fastPathRejoin);
2410 CodeLocationLabel cs = buffer.finalizeCodeAddendum();
2411 JaegerSpew(JSpew_PICs, "generated dense array hole stub at %p\n", cs.executableAddress());
2413 PICRepatchBuffer repatcher(*this, fastPathStart);
2414 repatcher.relink(inlineHoleGuard, cs);
2415 inlineHoleGuardPatched = true;
2417 disable(cx, "generated dense array hole stub");
2419 return Lookup_Cacheable;
2422 LookupStatus
2423 SetElementIC::update(JSContext *cx, const Value &objval, const Value &idval)
2425 if (!objval.isObject())
2426 return disable(cx, "primitive lval");
2427 if (!idval.isInt32())
2428 return disable(cx, "non-int32 key");
2430 JSObject *obj = &objval.toObject();
2431 int32 key = idval.toInt32();
2433 if (obj->isDenseArray())
2434 return attachHoleStub(cx, obj, key);
2436 return disable(cx, "unsupported object type");
2439 template<JSBool strict>
2440 void JS_FASTCALL
2441 ic::SetElement(VMFrame &f, ic::SetElementIC *ic)
2443 JSContext *cx = f.cx;
2445 if (ic->shouldUpdate(cx)) {
2446 LookupStatus status = ic->update(cx, f.regs.sp[-3], f.regs.sp[-2]);
2447 if (status == Lookup_Error)
2448 THROW();
2451 stubs::SetElem<strict>(f);
2454 template void JS_FASTCALL ic::SetElement<true>(VMFrame &f, SetElementIC *ic);
2455 template void JS_FASTCALL ic::SetElement<false>(VMFrame &f, SetElementIC *ic);
2457 void
2458 JITScript::purgePICs()
2460 for (uint32 i = 0; i < nPICs; i++) {
2461 ic::PICInfo &pic = pics[i];
2462 switch (pic.kind) {
2463 case ic::PICInfo::SET:
2464 case ic::PICInfo::SETMETHOD:
2465 SetPropCompiler::reset(pic);
2466 break;
2467 case ic::PICInfo::NAME:
2468 case ic::PICInfo::XNAME:
2469 ScopeNameCompiler::reset(pic);
2470 break;
2471 case ic::PICInfo::BIND:
2472 BindNameCompiler::reset(pic);
2473 break;
2474 case ic::PICInfo::CALL: /* fall-through */
2475 case ic::PICInfo::GET:
2476 GetPropCompiler::reset(pic);
2477 break;
2478 default:
2479 JS_NOT_REACHED("Unhandled PIC kind");
2480 break;
2482 pic.reset();
2485 for (uint32 i = 0; i < nGetElems; i++)
2486 getElems[i].purge();
2487 for (uint32 i = 0; i < nSetElems; i++)
2488 setElems[i].purge();
2491 void
2492 ic::PurgePICs(JSContext *cx, JSScript *script)
2494 if (script->jitNormal)
2495 script->jitNormal->purgePICs();
2496 if (script->jitCtor)
2497 script->jitCtor->purgePICs();
2500 #endif /* JS_POLYIC */