Refactor and improve GETELEM IC (bug 602641, r=dmandelin).
[mozilla-central.git] / js / src / methodjit / FrameState.cpp
blob4ade98dd62fb58004c2c0f284f1a210c2d5634d5
1 /* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=4 sw=4 et tw=99:
4 * ***** BEGIN LICENSE BLOCK *****
5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
7 * The contents of this file are subject to the Mozilla Public License Version
8 * 1.1 (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
10 * http://www.mozilla.org/MPL/
12 * Software distributed under the License is distributed on an "AS IS" basis,
13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 * for the specific language governing rights and limitations under the
15 * License.
17 * The Original Code is Mozilla SpiderMonkey JavaScript 1.9 code, released
18 * May 28, 2008.
20 * The Initial Developer of the Original Code is
21 * Brendan Eich <brendan@mozilla.org>
23 * Contributor(s):
24 * David Anderson <danderson@mozilla.com>
26 * Alternatively, the contents of this file may be used under the terms of
27 * either of the GNU General Public License Version 2 or later (the "GPL"),
28 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
29 * in which case the provisions of the GPL or the LGPL are applicable instead
30 * of those above. If you wish to allow use of your version of this file only
31 * under the terms of either the GPL or the LGPL, and not to allow others to
32 * use your version of this file under the terms of the MPL, indicate your
33 * decision by deleting the provisions above and replace them with the notice
34 * and other provisions required by the GPL or the LGPL. If you do not delete
35 * the provisions above, a recipient may use your version of this file under
36 * the terms of any one of the MPL, the GPL or the LGPL.
38 * ***** END LICENSE BLOCK ***** */
39 #include "jscntxt.h"
40 #include "FrameState.h"
41 #include "FrameState-inl.h"
43 using namespace js;
44 using namespace js::mjit;
46 /* Because of Value alignment */
47 JS_STATIC_ASSERT(sizeof(FrameEntry) % 8 == 0);
49 FrameState::FrameState(JSContext *cx, JSScript *script, Assembler &masm)
50 : cx(cx), script(script), masm(masm), entries(NULL),
51 #if defined JS_NUNBOX32
52 reifier(cx, *this),
53 #endif
54 inTryBlock(false)
58 FrameState::~FrameState()
60 cx->free(entries);
63 bool
64 FrameState::init(uint32 nargs)
66 this->nargs = nargs;
68 uint32 nslots = script->nslots + nargs;
69 if (!nslots) {
70 sp = spBase = locals = args = NULL;
71 return true;
74 eval = script->usesEval || cx->compartment->debugMode;
76 size_t totalBytes = sizeof(FrameEntry) * nslots + // entries[]
77 sizeof(FrameEntry *) * nslots + // tracker.entries
78 (eval ? 0 : sizeof(JSPackedBool) * nslots); // closedVars[]
80 uint8 *cursor = (uint8 *)cx->calloc(totalBytes);
81 if (!cursor)
82 return false;
84 #if defined JS_NUNBOX32
85 if (!reifier.init(nslots))
86 return false;
87 #endif
89 entries = (FrameEntry *)cursor;
90 cursor += sizeof(FrameEntry) * nslots;
92 args = entries;
93 locals = args + nargs;
94 spBase = locals + script->nfixed;
95 sp = spBase;
97 tracker.entries = (FrameEntry **)cursor;
98 cursor += sizeof(FrameEntry *) * nslots;
100 if (!eval && nslots) {
101 closedVars = (JSPackedBool *)cursor;
102 cursor += sizeof(JSPackedBool) * nslots;
105 JS_ASSERT(reinterpret_cast<uint8 *>(entries) + totalBytes == cursor);
107 return true;
110 void
111 FrameState::takeReg(RegisterID reg)
113 if (freeRegs.hasReg(reg)) {
114 freeRegs.takeReg(reg);
115 JS_ASSERT(!regstate[reg].usedBy());
116 } else {
117 JS_ASSERT(regstate[reg].fe());
118 evictReg(reg);
119 regstate[reg].forget();
123 void
124 FrameState::evictReg(RegisterID reg)
126 FrameEntry *fe = regstate[reg].fe();
128 if (regstate[reg].type() == RematInfo::TYPE) {
129 ensureTypeSynced(fe, masm);
130 fe->type.setMemory();
131 } else {
132 ensureDataSynced(fe, masm);
133 fe->data.setMemory();
137 JSC::MacroAssembler::RegisterID
138 FrameState::evictSomeReg(uint32 mask)
140 #ifdef DEBUG
141 bool fallbackSet = false;
142 #endif
143 RegisterID fallback = Registers::ReturnReg;
145 for (uint32 i = 0; i < JSC::MacroAssembler::TotalRegisters; i++) {
146 RegisterID reg = RegisterID(i);
148 /* Register is not allocatable, don't bother. */
149 if (!(Registers::maskReg(reg) & mask))
150 continue;
152 /* Register is not owned by the FrameState. */
153 FrameEntry *fe = regstate[i].fe();
154 if (!fe)
155 continue;
157 /* Try to find a candidate... that doesn't need spilling. */
158 #ifdef DEBUG
159 fallbackSet = true;
160 #endif
161 fallback = reg;
163 if (regstate[i].type() == RematInfo::TYPE && fe->type.synced()) {
164 fe->type.setMemory();
165 return fallback;
167 if (regstate[i].type() == RematInfo::DATA && fe->data.synced()) {
168 fe->data.setMemory();
169 return fallback;
173 JS_ASSERT(fallbackSet);
175 evictReg(fallback);
176 return fallback;
180 void
181 FrameState::syncAndForgetEverything()
183 syncAndKill(Registers(Registers::AvailRegs), Uses(frameDepth()));
184 forgetEverything();
187 void
188 FrameState::resetInternalState()
190 for (uint32 i = 0; i < tracker.nentries; i++)
191 tracker[i]->untrack();
193 tracker.reset();
194 freeRegs.reset();
197 void
198 FrameState::discardFrame()
200 resetInternalState();
202 memset(regstate, 0, sizeof(regstate));
205 void
206 FrameState::forgetEverything()
208 resetInternalState();
210 #ifdef DEBUG
211 for (uint32 i = 0; i < JSC::MacroAssembler::TotalRegisters; i++) {
212 JS_ASSERT(!regstate[i].usedBy());
214 #endif
217 void
218 FrameState::storeTo(FrameEntry *fe, Address address, bool popped)
220 if (fe->isConstant()) {
221 masm.storeValue(fe->getValue(), address);
222 return;
225 if (fe->isCopy())
226 fe = fe->copyOf();
228 /* Cannot clobber the address's register. */
229 JS_ASSERT(!freeRegs.hasReg(address.base));
231 /* If loading from memory, ensure destination differs. */
232 JS_ASSERT_IF((fe->type.inMemory() || fe->data.inMemory()),
233 addressOf(fe).base != address.base ||
234 addressOf(fe).offset != address.offset);
236 #if defined JS_PUNBOX64
237 if (fe->type.inMemory() && fe->data.inMemory()) {
238 /* Future optimization: track that the Value is in a register. */
239 RegisterID vreg = Registers::ValueReg;
240 masm.loadPtr(addressOf(fe), vreg);
241 masm.storePtr(vreg, address);
242 return;
246 * If dreg is obtained via allocReg(), then calling
247 * pinReg() trips an assertion. But in all other cases,
248 * calling pinReg() is necessary in the fe->type.inMemory() path.
249 * Remember whether pinReg() can be safely called.
251 bool canPinDreg = true;
252 bool wasInRegister = fe->data.inRegister();
254 /* Get a register for the payload. */
255 MaybeRegisterID dreg;
256 if (fe->data.inRegister()) {
257 dreg = fe->data.reg();
258 } else {
259 JS_ASSERT(fe->data.inMemory());
260 if (popped) {
261 dreg = allocReg();
262 canPinDreg = false;
263 } else {
264 dreg = allocReg(fe, RematInfo::DATA);
265 fe->data.setRegister(dreg.reg());
267 masm.loadPayload(addressOf(fe), dreg.reg());
270 /* Store the Value. */
271 if (fe->type.inRegister()) {
272 masm.storeValueFromComponents(fe->type.reg(), dreg.reg(), address);
273 } else if (fe->isTypeKnown()) {
274 masm.storeValueFromComponents(ImmType(fe->getKnownType()), dreg.reg(), address);
275 } else {
276 JS_ASSERT(fe->type.inMemory());
277 if (canPinDreg)
278 pinReg(dreg.reg());
280 RegisterID treg = popped ? allocReg() : allocReg(fe, RematInfo::TYPE);
281 masm.loadTypeTag(addressOf(fe), treg);
282 masm.storeValueFromComponents(treg, dreg.reg(), address);
284 if (popped)
285 freeReg(treg);
286 else
287 fe->type.setRegister(treg);
289 if (canPinDreg)
290 unpinReg(dreg.reg());
293 /* If register is untracked, free it. */
294 if (!wasInRegister && popped)
295 freeReg(dreg.reg());
297 #elif defined JS_NUNBOX32
299 if (fe->data.inRegister()) {
300 masm.storePayload(fe->data.reg(), address);
301 } else {
302 JS_ASSERT(fe->data.inMemory());
303 RegisterID reg = popped ? allocReg() : allocReg(fe, RematInfo::DATA);
304 masm.loadPayload(addressOf(fe), reg);
305 masm.storePayload(reg, address);
306 if (popped)
307 freeReg(reg);
308 else
309 fe->data.setRegister(reg);
312 if (fe->isTypeKnown()) {
313 masm.storeTypeTag(ImmType(fe->getKnownType()), address);
314 } else if (fe->type.inRegister()) {
315 masm.storeTypeTag(fe->type.reg(), address);
316 } else {
317 JS_ASSERT(fe->type.inMemory());
318 RegisterID reg = popped ? allocReg() : allocReg(fe, RematInfo::TYPE);
319 masm.loadTypeTag(addressOf(fe), reg);
320 masm.storeTypeTag(reg, address);
321 if (popped)
322 freeReg(reg);
323 else
324 fe->type.setRegister(reg);
326 #endif
329 void FrameState::loadForReturn(FrameEntry *fe, RegisterID typeReg, RegisterID dataReg, RegisterID tempReg)
331 JS_ASSERT(dataReg != typeReg && dataReg != tempReg && typeReg != tempReg);
333 if (fe->isConstant()) {
334 masm.loadValueAsComponents(fe->getValue(), typeReg, dataReg);
335 return;
338 if (fe->isCopy())
339 fe = fe->copyOf();
341 MaybeRegisterID maybeType = maybePinType(fe);
342 MaybeRegisterID maybeData = maybePinData(fe);
344 if (fe->isTypeKnown()) {
345 // If the data is in memory, or in the wrong reg, load/move it.
346 if (!maybeData.isSet())
347 masm.loadPayload(addressOf(fe), dataReg);
348 else if (maybeData.reg() != dataReg)
349 masm.move(maybeData.reg(), dataReg);
350 masm.move(ImmType(fe->getKnownType()), typeReg);
351 return;
354 // If both halves of the value are in memory, make this easier and load
355 // both pieces into their respective registers.
356 if (fe->type.inMemory() && fe->data.inMemory()) {
357 masm.loadValueAsComponents(addressOf(fe), typeReg, dataReg);
358 return;
361 // Now, we should be guaranteed that at least one part is in a register.
362 JS_ASSERT(maybeType.isSet() || maybeData.isSet());
364 // Make sure we have two registers while making sure not clobber either half.
365 // Here we are allowed to mess up the FrameState invariants, because this
366 // is specialized code for a path that is about to discard the entire frame.
367 if (!maybeType.isSet()) {
368 JS_ASSERT(maybeData.isSet());
369 if (maybeData.reg() != typeReg)
370 maybeType = typeReg;
371 else
372 maybeType = tempReg;
373 masm.loadTypeTag(addressOf(fe), maybeType.reg());
374 } else if (!maybeData.isSet()) {
375 JS_ASSERT(maybeType.isSet());
376 if (maybeType.reg() != dataReg)
377 maybeData = dataReg;
378 else
379 maybeData = tempReg;
380 masm.loadPayload(addressOf(fe), maybeData.reg());
383 RegisterID type = maybeType.reg();
384 RegisterID data = maybeData.reg();
386 if (data == typeReg && type == dataReg) {
387 masm.move(type, tempReg);
388 masm.move(data, dataReg);
389 masm.move(tempReg, typeReg);
390 } else if (data != dataReg) {
391 if (type == typeReg) {
392 masm.move(data, dataReg);
393 } else if (type != dataReg) {
394 masm.move(data, dataReg);
395 if (type != typeReg)
396 masm.move(type, typeReg);
397 } else {
398 JS_ASSERT(data != typeReg);
399 masm.move(type, typeReg);
400 masm.move(data, dataReg);
402 } else if (type != typeReg) {
403 masm.move(type, typeReg);
407 #ifdef DEBUG
408 void
409 FrameState::assertValidRegisterState() const
411 Registers checkedFreeRegs;
413 for (uint32 i = 0; i < tracker.nentries; i++) {
414 FrameEntry *fe = tracker[i];
415 if (fe >= sp)
416 continue;
418 JS_ASSERT(i == fe->trackerIndex());
419 JS_ASSERT_IF(fe->isCopy(),
420 fe->trackerIndex() > fe->copyOf()->trackerIndex());
421 JS_ASSERT_IF(fe->isCopy(), fe > fe->copyOf());
422 JS_ASSERT_IF(fe->isCopy(), !fe->type.inRegister() && !fe->data.inRegister());
423 JS_ASSERT_IF(fe->isCopy(), fe->copyOf() < sp);
424 JS_ASSERT_IF(fe->isCopy(), fe->copyOf()->isCopied());
426 if (fe->isCopy())
427 continue;
428 if (fe->type.inRegister()) {
429 checkedFreeRegs.takeReg(fe->type.reg());
430 JS_ASSERT(regstate[fe->type.reg()].fe() == fe);
432 if (fe->data.inRegister()) {
433 checkedFreeRegs.takeReg(fe->data.reg());
434 JS_ASSERT(regstate[fe->data.reg()].fe() == fe);
438 JS_ASSERT(checkedFreeRegs == freeRegs);
440 for (uint32 i = 0; i < JSC::MacroAssembler::TotalRegisters; i++) {
441 JS_ASSERT(!regstate[i].isPinned());
442 JS_ASSERT_IF(regstate[i].fe(), !freeRegs.hasReg(RegisterID(i)));
443 JS_ASSERT_IF(regstate[i].fe(), regstate[i].fe()->isTracked());
446 #endif
448 #if defined JS_NUNBOX32
449 void
450 FrameState::syncFancy(Assembler &masm, Registers avail, FrameEntry *resumeAt,
451 FrameEntry *bottom) const
453 reifier.reset(&masm, avail, resumeAt, bottom);
455 for (FrameEntry *fe = resumeAt; fe >= bottom; fe--) {
456 if (!fe->isTracked())
457 continue;
459 reifier.sync(fe);
462 #endif
464 void
465 FrameState::sync(Assembler &masm, Uses uses) const
467 if (!entries)
468 return;
470 /* Sync all registers up-front. */
471 Registers allRegs(Registers::AvailRegs);
472 while (!allRegs.empty()) {
473 RegisterID reg = allRegs.takeAnyReg();
474 FrameEntry *fe = regstate[reg].usedBy();
475 if (!fe)
476 continue;
478 JS_ASSERT(fe->isTracked());
480 #if defined JS_PUNBOX64
481 /* Sync entire FE to prevent loads. */
482 ensureFeSynced(fe, masm);
484 /* Take the other register in the pair, if one exists. */
485 if (regstate[reg].type() == RematInfo::DATA && fe->type.inRegister())
486 allRegs.takeReg(fe->type.reg());
487 else if (regstate[reg].type() == RematInfo::TYPE && fe->data.inRegister())
488 allRegs.takeReg(fe->data.reg());
489 #elif defined JS_NUNBOX32
490 /* Sync register if unsynced. */
491 if (regstate[reg].type() == RematInfo::DATA) {
492 JS_ASSERT(fe->data.reg() == reg);
493 ensureDataSynced(fe, masm);
494 } else {
495 JS_ASSERT(fe->type.reg() == reg);
496 ensureTypeSynced(fe, masm);
498 #endif
502 * Keep track of free registers using a bitmask. If we have to drop into
503 * syncFancy(), then this mask will help avoid eviction.
505 Registers avail(freeRegs);
506 Registers temp(Registers::TempRegs);
508 FrameEntry *bottom = sp - uses.nuses;
510 for (FrameEntry *fe = sp - 1; fe >= bottom; fe--) {
511 if (!fe->isTracked())
512 continue;
514 FrameEntry *backing = fe;
516 if (!fe->isCopy()) {
517 if (fe->data.inRegister())
518 avail.putReg(fe->data.reg());
519 if (fe->type.inRegister())
520 avail.putReg(fe->type.reg());
521 } else {
522 backing = fe->copyOf();
523 JS_ASSERT(!backing->isConstant() && !fe->isConstant());
525 #if defined JS_PUNBOX64
526 if ((!fe->type.synced() && backing->type.inMemory()) ||
527 (!fe->data.synced() && backing->data.inMemory())) {
529 RegisterID syncReg = Registers::ValueReg;
531 /* Load the entire Value into syncReg. */
532 if (backing->type.synced() && backing->data.synced()) {
533 masm.loadValue(addressOf(backing), syncReg);
534 } else if (backing->type.inMemory()) {
535 masm.loadTypeTag(addressOf(backing), syncReg);
536 masm.orPtr(backing->data.reg(), syncReg);
537 } else {
538 JS_ASSERT(backing->data.inMemory());
539 masm.loadPayload(addressOf(backing), syncReg);
540 if (backing->isTypeKnown())
541 masm.orPtr(ImmType(backing->getKnownType()), syncReg);
542 else
543 masm.orPtr(backing->type.reg(), syncReg);
546 masm.storeValue(syncReg, addressOf(fe));
547 continue;
549 #elif defined JS_NUNBOX32
550 /* Fall back to a slower sync algorithm if load required. */
551 if ((!fe->type.synced() && backing->type.inMemory()) ||
552 (!fe->data.synced() && backing->data.inMemory())) {
553 syncFancy(masm, avail, fe, bottom);
554 return;
556 #endif
559 /* If a part still needs syncing, it is either a copy or constant. */
560 #if defined JS_PUNBOX64
561 /* All register-backed FEs have been entirely synced up-front. */
562 if (!fe->type.inRegister() && !fe->data.inRegister())
563 ensureFeSynced(fe, masm);
564 #elif defined JS_NUNBOX32
565 /* All components held in registers have been already synced. */
566 if (!fe->data.inRegister())
567 ensureDataSynced(fe, masm);
568 if (!fe->type.inRegister())
569 ensureTypeSynced(fe, masm);
570 #endif
574 void
575 FrameState::syncAndKill(Registers kill, Uses uses, Uses ignore)
577 FrameEntry *spStop = sp - ignore.nuses;
579 /* Sync all kill-registers up-front. */
580 Registers search(kill.freeMask & ~freeRegs.freeMask);
581 while (!search.empty()) {
582 RegisterID reg = search.takeAnyReg();
583 FrameEntry *fe = regstate[reg].usedBy();
584 if (!fe || fe >= spStop)
585 continue;
587 JS_ASSERT(fe->isTracked());
589 #if defined JS_PUNBOX64
590 /* Don't use syncFe(), since that may clobber more registers. */
591 ensureFeSynced(fe, masm);
593 if (!fe->type.synced())
594 fe->type.sync();
595 if (!fe->data.synced())
596 fe->data.sync();
598 /* Take the other register in the pair, if one exists. */
599 if (regstate[reg].type() == RematInfo::DATA) {
600 JS_ASSERT(fe->data.reg() == reg);
601 if (fe->type.inRegister() && search.hasReg(fe->type.reg()))
602 search.takeReg(fe->type.reg());
603 } else {
604 JS_ASSERT(fe->type.reg() == reg);
605 if (fe->data.inRegister() && search.hasReg(fe->data.reg()))
606 search.takeReg(fe->data.reg());
608 #elif defined JS_NUNBOX32
609 /* Sync this register. */
610 if (regstate[reg].type() == RematInfo::DATA) {
611 JS_ASSERT(fe->data.reg() == reg);
612 syncData(fe);
613 } else {
614 JS_ASSERT(fe->type.reg() == reg);
615 syncType(fe);
617 #endif
620 uint32 maxvisits = tracker.nentries;
621 FrameEntry *bottom = sp - uses.nuses;
623 for (FrameEntry *fe = sp - 1; fe >= bottom && maxvisits; fe--) {
624 if (!fe->isTracked())
625 continue;
627 maxvisits--;
629 if (fe >= spStop)
630 continue;
632 syncFe(fe);
634 /* Forget registers. */
635 if (fe->data.inRegister() && kill.hasReg(fe->data.reg()) &&
636 !regstate[fe->data.reg()].isPinned()) {
637 forgetReg(fe->data.reg());
638 fe->data.setMemory();
640 if (fe->type.inRegister() && kill.hasReg(fe->type.reg()) &&
641 !regstate[fe->type.reg()].isPinned()) {
642 forgetReg(fe->type.reg());
643 fe->type.setMemory();
648 * Anything still alive at this point is guaranteed to be synced. However,
649 * it is necessary to evict temporary registers.
651 search = Registers(kill.freeMask & ~freeRegs.freeMask);
652 while (!search.empty()) {
653 RegisterID reg = search.takeAnyReg();
654 FrameEntry *fe = regstate[reg].usedBy();
655 if (!fe || fe >= spStop)
656 continue;
658 JS_ASSERT(fe->isTracked());
660 if (regstate[reg].type() == RematInfo::DATA) {
661 JS_ASSERT(fe->data.reg() == reg);
662 JS_ASSERT(fe->data.synced());
663 fe->data.setMemory();
664 } else {
665 JS_ASSERT(fe->type.reg() == reg);
666 JS_ASSERT(fe->type.synced());
667 fe->type.setMemory();
670 forgetReg(reg);
674 void
675 FrameState::merge(Assembler &masm, Changes changes) const
677 Registers search(Registers::AvailRegs & ~freeRegs.freeMask);
679 while (!search.empty()) {
680 RegisterID reg = search.peekReg();
681 FrameEntry *fe = regstate[reg].usedBy();
683 if (!fe) {
684 search.takeReg(reg);
685 continue;
688 if (fe->data.inRegister() && fe->type.inRegister()) {
689 search.takeReg(fe->data.reg());
690 search.takeReg(fe->type.reg());
691 masm.loadValueAsComponents(addressOf(fe), fe->type.reg(), fe->data.reg());
692 } else {
693 if (fe->data.inRegister()) {
694 search.takeReg(fe->data.reg());
695 masm.loadPayload(addressOf(fe), fe->data.reg());
697 if (fe->type.inRegister()) {
698 search.takeReg(fe->type.reg());
699 masm.loadTypeTag(addressOf(fe), fe->type.reg());
705 JSC::MacroAssembler::RegisterID
706 FrameState::copyDataIntoReg(FrameEntry *fe)
708 return copyDataIntoReg(this->masm, fe);
711 void
712 FrameState::copyDataIntoReg(FrameEntry *fe, RegisterID hint)
714 JS_ASSERT(!fe->data.isConstant());
716 if (fe->isCopy())
717 fe = fe->copyOf();
719 if (!fe->data.inRegister())
720 tempRegForData(fe);
722 RegisterID reg = fe->data.reg();
723 if (reg == hint) {
724 if (freeRegs.empty()) {
725 ensureDataSynced(fe, masm);
726 fe->data.setMemory();
727 } else {
728 reg = allocReg();
729 masm.move(hint, reg);
730 fe->data.setRegister(reg);
731 regstate[reg].associate(regstate[hint].fe(), RematInfo::DATA);
733 regstate[hint].forget();
734 } else {
735 pinReg(reg);
736 takeReg(hint);
737 unpinReg(reg);
738 masm.move(reg, hint);
742 JSC::MacroAssembler::RegisterID
743 FrameState::copyDataIntoReg(Assembler &masm, FrameEntry *fe)
745 JS_ASSERT(!fe->data.isConstant());
747 if (fe->isCopy())
748 fe = fe->copyOf();
750 if (fe->data.inRegister()) {
751 RegisterID reg = fe->data.reg();
752 if (freeRegs.empty()) {
753 ensureDataSynced(fe, masm);
754 fe->data.setMemory();
755 regstate[reg].forget();
756 } else {
757 RegisterID newReg = allocReg();
758 masm.move(reg, newReg);
759 reg = newReg;
761 return reg;
764 RegisterID reg = allocReg();
766 if (!freeRegs.empty())
767 masm.move(tempRegForData(fe), reg);
768 else
769 masm.loadPayload(addressOf(fe),reg);
771 return reg;
774 JSC::MacroAssembler::RegisterID
775 FrameState::copyTypeIntoReg(FrameEntry *fe)
777 JS_ASSERT(!fe->type.isConstant());
779 if (fe->isCopy())
780 fe = fe->copyOf();
782 if (fe->type.inRegister()) {
783 RegisterID reg = fe->type.reg();
784 if (freeRegs.empty()) {
785 ensureTypeSynced(fe, masm);
786 fe->type.setMemory();
787 regstate[reg].forget();
788 } else {
789 RegisterID newReg = allocReg();
790 masm.move(reg, newReg);
791 reg = newReg;
793 return reg;
796 RegisterID reg = allocReg();
798 if (!freeRegs.empty())
799 masm.move(tempRegForType(fe), reg);
800 else
801 masm.loadTypeTag(addressOf(fe), reg);
803 return reg;
806 JSC::MacroAssembler::RegisterID
807 FrameState::copyInt32ConstantIntoReg(FrameEntry *fe)
809 return copyInt32ConstantIntoReg(masm, fe);
812 JSC::MacroAssembler::RegisterID
813 FrameState::copyInt32ConstantIntoReg(Assembler &masm, FrameEntry *fe)
815 JS_ASSERT(fe->data.isConstant());
817 if (fe->isCopy())
818 fe = fe->copyOf();
820 RegisterID reg = allocReg();
821 masm.move(Imm32(fe->getValue().toInt32()), reg);
822 return reg;
825 JSC::MacroAssembler::FPRegisterID
826 FrameState::copyEntryIntoFPReg(FrameEntry *fe, FPRegisterID fpreg)
828 return copyEntryIntoFPReg(this->masm, fe, fpreg);
831 JSC::MacroAssembler::FPRegisterID
832 FrameState::copyEntryIntoFPReg(Assembler &masm, FrameEntry *fe, FPRegisterID fpreg)
834 if (fe->isCopy())
835 fe = fe->copyOf();
837 ensureFeSynced(fe, masm);
838 masm.loadDouble(addressOf(fe), fpreg);
840 return fpreg;
843 JSC::MacroAssembler::RegisterID
844 FrameState::ownRegForType(FrameEntry *fe)
846 JS_ASSERT(!fe->type.isConstant());
848 RegisterID reg;
849 if (fe->isCopy()) {
850 /* For now, just do an extra move. The reg must be mutable. */
851 FrameEntry *backing = fe->copyOf();
852 if (!backing->type.inRegister()) {
853 JS_ASSERT(backing->type.inMemory());
854 tempRegForType(backing);
857 if (freeRegs.empty()) {
858 /* For now... just steal the register that already exists. */
859 ensureTypeSynced(backing, masm);
860 reg = backing->type.reg();
861 backing->type.setMemory();
862 regstate[reg].forget();
863 } else {
864 reg = allocReg();
865 masm.move(backing->type.reg(), reg);
867 return reg;
870 if (fe->type.inRegister()) {
871 reg = fe->type.reg();
873 /* Remove ownership of this register. */
874 JS_ASSERT(regstate[reg].fe() == fe);
875 JS_ASSERT(regstate[reg].type() == RematInfo::TYPE);
876 regstate[reg].forget();
877 fe->type.invalidate();
878 } else {
879 JS_ASSERT(fe->type.inMemory());
880 reg = allocReg();
881 masm.loadTypeTag(addressOf(fe), reg);
883 return reg;
886 JSC::MacroAssembler::RegisterID
887 FrameState::ownRegForData(FrameEntry *fe)
889 JS_ASSERT(!fe->data.isConstant());
891 RegisterID reg;
892 if (fe->isCopy()) {
893 /* For now, just do an extra move. The reg must be mutable. */
894 FrameEntry *backing = fe->copyOf();
895 if (!backing->data.inRegister()) {
896 JS_ASSERT(backing->data.inMemory());
897 tempRegForData(backing);
900 if (freeRegs.empty()) {
901 /* For now... just steal the register that already exists. */
902 ensureDataSynced(backing, masm);
903 reg = backing->data.reg();
904 backing->data.setMemory();
905 regstate[reg].forget();
906 } else {
907 reg = allocReg();
908 masm.move(backing->data.reg(), reg);
910 return reg;
913 if (fe->isCopied()) {
914 FrameEntry *copy = uncopy(fe);
915 if (fe->isCopied()) {
916 fe->type.invalidate();
917 fe->data.invalidate();
918 return copyDataIntoReg(copy);
922 if (fe->data.inRegister()) {
923 reg = fe->data.reg();
924 /* Remove ownership of this register. */
925 JS_ASSERT(regstate[reg].fe() == fe);
926 JS_ASSERT(regstate[reg].type() == RematInfo::DATA);
927 regstate[reg].forget();
928 fe->data.invalidate();
929 } else {
930 JS_ASSERT(fe->data.inMemory());
931 reg = allocReg();
932 masm.loadPayload(addressOf(fe), reg);
934 return reg;
937 void
938 FrameState::discardFe(FrameEntry *fe)
940 forgetEntry(fe);
941 fe->type.setMemory();
942 fe->data.setMemory();
945 void
946 FrameState::pushCopyOf(uint32 index)
948 FrameEntry *backing = entryFor(index);
949 FrameEntry *fe = rawPush();
950 fe->resetUnsynced();
951 if (backing->isConstant()) {
952 fe->setConstant(Jsvalify(backing->getValue()));
953 } else {
954 if (backing->isTypeKnown())
955 fe->setType(backing->getKnownType());
956 else
957 fe->type.invalidate();
958 fe->isNumber = backing->isNumber;
959 fe->data.invalidate();
960 if (backing->isCopy()) {
961 backing = backing->copyOf();
962 fe->setCopyOf(backing);
963 } else {
964 fe->setCopyOf(backing);
965 backing->setCopied();
968 /* Maintain tracker ordering guarantees for copies. */
969 JS_ASSERT(backing->isCopied());
970 if (fe->trackerIndex() < backing->trackerIndex())
971 swapInTracker(fe, backing);
975 FrameEntry *
976 FrameState::walkTrackerForUncopy(FrameEntry *original)
978 uint32 firstCopy = InvalidIndex;
979 FrameEntry *bestFe = NULL;
980 uint32 ncopies = 0;
981 for (uint32 i = original->trackerIndex() + 1; i < tracker.nentries; i++) {
982 FrameEntry *fe = tracker[i];
983 if (fe >= sp)
984 continue;
985 if (fe->isCopy() && fe->copyOf() == original) {
986 if (firstCopy == InvalidIndex) {
987 firstCopy = i;
988 bestFe = fe;
989 } else if (fe < bestFe) {
990 bestFe = fe;
992 ncopies++;
996 if (!ncopies) {
997 JS_ASSERT(firstCopy == InvalidIndex);
998 JS_ASSERT(!bestFe);
999 return NULL;
1002 JS_ASSERT(firstCopy != InvalidIndex);
1003 JS_ASSERT(bestFe);
1004 JS_ASSERT(bestFe > original);
1006 /* Mark all extra copies as copies of the new backing index. */
1007 bestFe->setCopyOf(NULL);
1008 if (ncopies > 1) {
1009 bestFe->setCopied();
1010 for (uint32 i = firstCopy; i < tracker.nentries; i++) {
1011 FrameEntry *other = tracker[i];
1012 if (other >= sp || other == bestFe)
1013 continue;
1015 /* The original must be tracked before copies. */
1016 JS_ASSERT(other != original);
1018 if (!other->isCopy() || other->copyOf() != original)
1019 continue;
1021 other->setCopyOf(bestFe);
1024 * This is safe even though we're mutating during iteration. There
1025 * are two cases. The first is that both indexes are <= i, and :.
1026 * will never be observed. The other case is we're placing the
1027 * other FE such that it will be observed later. Luckily, copyOf()
1028 * will return != original, so nothing will happen.
1030 if (other->trackerIndex() < bestFe->trackerIndex())
1031 swapInTracker(bestFe, other);
1033 } else {
1034 bestFe->setNotCopied();
1037 return bestFe;
1040 FrameEntry *
1041 FrameState::walkFrameForUncopy(FrameEntry *original)
1043 FrameEntry *bestFe = NULL;
1044 uint32 ncopies = 0;
1046 /* It's only necessary to visit as many FEs are being tracked. */
1047 uint32 maxvisits = tracker.nentries;
1049 for (FrameEntry *fe = original + 1; fe < sp && maxvisits; fe++) {
1050 if (!fe->isTracked())
1051 continue;
1053 maxvisits--;
1055 if (fe->isCopy() && fe->copyOf() == original) {
1056 if (!bestFe) {
1057 bestFe = fe;
1058 bestFe->setCopyOf(NULL);
1059 } else {
1060 fe->setCopyOf(bestFe);
1061 if (fe->trackerIndex() < bestFe->trackerIndex())
1062 swapInTracker(bestFe, fe);
1064 ncopies++;
1068 if (ncopies)
1069 bestFe->setCopied();
1071 return bestFe;
1074 FrameEntry *
1075 FrameState::uncopy(FrameEntry *original)
1077 JS_ASSERT(original->isCopied());
1080 * Copies have three critical invariants:
1081 * 1) The backing store precedes all copies in the tracker.
1082 * 2) The backing store precedes all copies in the FrameState.
1083 * 3) The backing store of a copy cannot be popped from the stack
1084 * while the copy is still live.
1086 * Maintaining this invariant iteratively is kind of hard, so we choose
1087 * the "lowest" copy in the frame up-front.
1089 * For example, if the stack is:
1090 * [A, B, C, D]
1091 * And the tracker has:
1092 * [A, D, C, B]
1094 * If B, C, and D are copies of A - we will walk the tracker to the end
1095 * and select B, not D (see bug 583684).
1097 * Note: |tracker.nentries <= (nslots + nargs)|. However, this walk is
1098 * sub-optimal if |tracker.nentries - original->trackerIndex() > sp - original|.
1099 * With large scripts this may be a problem worth investigating. Note that
1100 * the tracker is walked twice, so we multiply by 2 for pessimism.
1102 FrameEntry *fe;
1103 if ((tracker.nentries - original->trackerIndex()) * 2 > uint32(sp - original))
1104 fe = walkFrameForUncopy(original);
1105 else
1106 fe = walkTrackerForUncopy(original);
1107 if (!fe) {
1108 original->setNotCopied();
1109 return NULL;
1113 * Switch the new backing store to the old backing store. During
1114 * this process we also necessarily make sure the copy can be
1115 * synced.
1117 if (!original->isTypeKnown()) {
1119 * If the copy is unsynced, and the original is in memory,
1120 * give the original a register. We do this below too; it's
1121 * okay if it's spilled.
1123 if (original->type.inMemory() && !fe->type.synced())
1124 tempRegForType(original);
1125 fe->type.inherit(original->type);
1126 if (fe->type.inRegister())
1127 regstate[fe->type.reg()].reassociate(fe);
1128 } else {
1129 JS_ASSERT(fe->isTypeKnown());
1130 JS_ASSERT(fe->getKnownType() == original->getKnownType());
1132 if (original->data.inMemory() && !fe->data.synced())
1133 tempRegForData(original);
1134 fe->data.inherit(original->data);
1135 if (fe->data.inRegister())
1136 regstate[fe->data.reg()].reassociate(fe);
1138 return fe;
1141 void
1142 FrameState::storeLocal(uint32 n, bool popGuaranteed, bool typeChange)
1144 FrameEntry *local = getLocal(n);
1146 storeTop(local, popGuaranteed, typeChange);
1148 bool closed = eval || isClosedVar(n);
1149 if (!closed && !inTryBlock)
1150 return;
1152 /* Ensure that the local variable remains synced. */
1153 syncFe(local);
1155 if (closed) {
1156 /* If the FE can have registers, free them before resetting. */
1157 if (!local->isCopy())
1158 forgetEntry(local);
1159 local->resetSynced();
1163 void
1164 FrameState::forgetEntry(FrameEntry *fe)
1166 if (fe->isCopied()) {
1167 uncopy(fe);
1168 if (!fe->isCopied())
1169 forgetAllRegs(fe);
1170 } else {
1171 forgetAllRegs(fe);
1175 void
1176 FrameState::storeTop(FrameEntry *target, bool popGuaranteed, bool typeChange)
1178 bool wasSynced = target->type.synced();
1179 /* Detect something like (x = x) which is a no-op. */
1180 FrameEntry *top = peek(-1);
1181 if (top->isCopy() && top->copyOf() == target) {
1182 JS_ASSERT(target->isCopied());
1183 return;
1186 /* Completely invalidate the local variable. */
1187 forgetEntry(target);
1188 target->resetUnsynced();
1190 /* Constants are easy to propagate. */
1191 if (top->isConstant()) {
1192 target->setCopyOf(NULL);
1193 target->setNotCopied();
1194 target->setConstant(Jsvalify(top->getValue()));
1195 return;
1199 * When dealing with copies, there are three important invariants:
1201 * 1) The backing store precedes all copies in the tracker.
1202 * 2) The backing store precedes all copies in the FrameState.
1203 * 2) The backing store of a local is never a stack slot, UNLESS the local
1204 * variable itself is a stack slot (blocks) that precedes the stack
1205 * slot.
1207 * If the top is a copy, and the second condition holds true, the local
1208 * can be rewritten as a copy of the original backing slot. If the first
1209 * condition does not hold, force it to hold by swapping in-place.
1211 FrameEntry *backing = top;
1212 bool copied = false;
1213 if (top->isCopy()) {
1214 backing = top->copyOf();
1215 JS_ASSERT(backing->trackerIndex() < top->trackerIndex());
1217 if (backing < target) {
1218 /* local.idx < backing.idx means local cannot be a copy yet */
1219 if (target->trackerIndex() < backing->trackerIndex())
1220 swapInTracker(backing, target);
1221 target->setNotCopied();
1222 target->setCopyOf(backing);
1223 if (backing->isTypeKnown())
1224 target->setType(backing->getKnownType());
1225 else
1226 target->type.invalidate();
1227 target->data.invalidate();
1228 target->isNumber = backing->isNumber;
1229 return;
1233 * If control flow lands here, then there was a bytecode sequence like
1235 * ENTERBLOCK 2
1236 * GETLOCAL 1
1237 * SETLOCAL 0
1239 * The problem is slot N can't be backed by M if M could be popped
1240 * before N. We want a guarantee that when we pop M, even if it was
1241 * copied, it has no outstanding copies.
1243 * Because of |let| expressions, it's kind of hard to really know
1244 * whether a region on the stack will be popped all at once. Bleh!
1246 * This should be rare except in browser code (and maybe even then),
1247 * but even so there's a quick workaround. We take all copies of the
1248 * backing fe, and redirect them to be copies of the destination.
1250 for (uint32 i = backing->trackerIndex() + 1; i < tracker.nentries; i++) {
1251 FrameEntry *fe = tracker[i];
1252 if (fe >= sp)
1253 continue;
1254 if (fe->isCopy() && fe->copyOf() == backing) {
1255 fe->setCopyOf(target);
1256 copied = true;
1260 backing->setNotCopied();
1263 * This is valid from the top->isCopy() path because we're guaranteed a
1264 * consistent ordering - all copies of |backing| are tracked after
1265 * |backing|. Transitively, only one swap is needed.
1267 if (backing->trackerIndex() < target->trackerIndex())
1268 swapInTracker(backing, target);
1271 * Move the backing store down - we spill registers here, but we could be
1272 * smarter and re-use the type reg.
1274 RegisterID reg = tempRegForData(backing);
1275 target->data.setRegister(reg);
1276 regstate[reg].reassociate(target);
1278 if (typeChange) {
1279 if (backing->isTypeKnown()) {
1280 target->setType(backing->getKnownType());
1281 } else {
1282 RegisterID reg = tempRegForType(backing);
1283 target->type.setRegister(reg);
1284 regstate[reg].reassociate(target);
1286 } else {
1287 if (!wasSynced)
1288 masm.storeTypeTag(ImmType(backing->getKnownType()), addressOf(target));
1289 target->type.setMemory();
1292 if (!backing->isTypeKnown())
1293 backing->type.invalidate();
1294 backing->data.invalidate();
1295 backing->setCopyOf(target);
1296 backing->isNumber = target->isNumber;
1298 JS_ASSERT(top->copyOf() == target);
1301 * Right now, |backing| is a copy of |target| (note the reversal), but
1302 * |target| is not marked as copied. This is an optimization so uncopy()
1303 * may avoid frame traversal.
1305 * There are two cases where we must set the copy bit, however:
1306 * - The fixup phase redirected more copies to |target|.
1307 * - An immediate pop is not guaranteed.
1309 if (copied || !popGuaranteed)
1310 target->setCopied();
1313 void
1314 FrameState::shimmy(uint32 n)
1316 JS_ASSERT(sp - n >= spBase);
1317 int32 depth = 0 - int32(n);
1318 storeTop(peek(depth - 1), true);
1319 popn(n);
1322 void
1323 FrameState::shift(int32 n)
1325 JS_ASSERT(n < 0);
1326 JS_ASSERT(sp + n - 1 >= spBase);
1327 storeTop(peek(n - 1), true);
1328 pop();
1331 void
1332 FrameState::pinEntry(FrameEntry *fe, ValueRemat &vr)
1334 if (fe->isConstant()) {
1335 vr = ValueRemat::FromConstant(fe->getValue());
1336 } else {
1337 // Pin the type register so it can't spill.
1338 MaybeRegisterID maybePinnedType = maybePinType(fe);
1340 // Get and pin the data register.
1341 RegisterID dataReg = tempRegForData(fe);
1342 pinReg(dataReg);
1344 if (fe->isTypeKnown()) {
1345 vr = ValueRemat::FromKnownType(fe->getKnownType(), dataReg);
1346 } else {
1347 // The type might not be loaded yet, so unpin for simplicity.
1348 maybeUnpinReg(maybePinnedType);
1350 vr = ValueRemat::FromRegisters(tempRegForType(fe), dataReg);
1351 pinReg(vr.typeReg());
1355 // Set these bits last, since allocation could have caused a sync.
1356 vr.isDataSynced = fe->data.synced();
1357 vr.isTypeSynced = fe->type.synced();
1360 void
1361 FrameState::unpinEntry(const ValueRemat &vr)
1363 if (!vr.isConstant()) {
1364 if (!vr.isTypeKnown())
1365 unpinReg(vr.typeReg());
1366 unpinReg(vr.dataReg());
1370 void
1371 FrameState::ensureValueSynced(Assembler &masm, FrameEntry *fe, const ValueRemat &vr)
1373 #if defined JS_PUNBOX64
1374 if (!vr.isDataSynced || !vr.isTypeSynced)
1375 masm.storeValue(vr, addressOf(fe));
1376 #elif defined JS_NUNBOX32
1377 if (vr.isConstant()) {
1378 if (!vr.isDataSynced || !vr.isTypeSynced)
1379 masm.storeValue(vr.value(), addressOf(fe));
1380 } else {
1381 if (!vr.isDataSynced)
1382 masm.storePayload(vr.dataReg(), addressOf(fe));
1383 if (!vr.isTypeSynced) {
1384 if (vr.isTypeKnown())
1385 masm.storeTypeTag(ImmType(vr.knownType()), addressOf(fe));
1386 else
1387 masm.storeTypeTag(vr.typeReg(), addressOf(fe));
1390 #endif
1393 static inline bool
1394 AllocHelper(RematInfo &info, MaybeRegisterID &maybe)
1396 if (info.inRegister()) {
1397 maybe = info.reg();
1398 return true;
1400 return false;
1403 void
1404 FrameState::allocForSameBinary(FrameEntry *fe, JSOp op, BinaryAlloc &alloc)
1406 if (!fe->isTypeKnown()) {
1407 alloc.lhsType = tempRegForType(fe);
1408 pinReg(alloc.lhsType.reg());
1411 alloc.lhsData = tempRegForData(fe);
1413 if (!freeRegs.empty()) {
1414 alloc.result = allocReg();
1415 masm.move(alloc.lhsData.reg(), alloc.result);
1416 alloc.lhsNeedsRemat = false;
1417 } else {
1418 alloc.result = alloc.lhsData.reg();
1419 takeReg(alloc.result);
1420 alloc.lhsNeedsRemat = true;
1423 if (alloc.lhsType.isSet())
1424 unpinReg(alloc.lhsType.reg());
1427 void
1428 FrameState::ensureFullRegs(FrameEntry *fe, MaybeRegisterID *type, MaybeRegisterID *data)
1430 fe = fe->isCopy() ? fe->copyOf() : fe;
1432 JS_ASSERT(!data->isSet() && !type->isSet());
1433 if (!fe->type.inMemory()) {
1434 if (fe->type.inRegister())
1435 *type = fe->type.reg();
1436 if (fe->data.isConstant())
1437 return;
1438 if (fe->data.inRegister()) {
1439 *data = fe->data.reg();
1440 return;
1442 if (fe->type.inRegister())
1443 pinReg(fe->type.reg());
1444 *data = tempRegForData(fe);
1445 if (fe->type.inRegister())
1446 unpinReg(fe->type.reg());
1447 } else if (!fe->data.inMemory()) {
1448 if (fe->data.inRegister())
1449 *data = fe->data.reg();
1450 if (fe->type.isConstant())
1451 return;
1452 if (fe->type.inRegister()) {
1453 *type = fe->type.reg();
1454 return;
1456 if (fe->data.inRegister())
1457 pinReg(fe->data.reg());
1458 *type = tempRegForType(fe);
1459 if (fe->data.inRegister())
1460 unpinReg(fe->data.reg());
1461 } else {
1462 *data = tempRegForData(fe);
1463 pinReg(data->reg());
1464 *type = tempRegForType(fe);
1465 unpinReg(data->reg());
1469 void
1470 FrameState::allocForBinary(FrameEntry *lhs, FrameEntry *rhs, JSOp op, BinaryAlloc &alloc,
1471 bool needsResult)
1473 FrameEntry *backingLeft = lhs;
1474 FrameEntry *backingRight = rhs;
1476 if (backingLeft->isCopy())
1477 backingLeft = backingLeft->copyOf();
1478 if (backingRight->isCopy())
1479 backingRight = backingRight->copyOf();
1482 * For each remat piece of both FEs, if a register is assigned, get it now
1483 * and pin it. This is safe - constants and known types will be avoided.
1485 if (AllocHelper(backingLeft->type, alloc.lhsType))
1486 pinReg(alloc.lhsType.reg());
1487 if (AllocHelper(backingLeft->data, alloc.lhsData))
1488 pinReg(alloc.lhsData.reg());
1489 if (AllocHelper(backingRight->type, alloc.rhsType))
1490 pinReg(alloc.rhsType.reg());
1491 if (AllocHelper(backingRight->data, alloc.rhsData))
1492 pinReg(alloc.rhsData.reg());
1494 /* For each type without a register, give it a register if needed. */
1495 if (!alloc.lhsType.isSet() && backingLeft->type.inMemory()) {
1496 alloc.lhsType = tempRegForType(lhs);
1497 pinReg(alloc.lhsType.reg());
1499 if (!alloc.rhsType.isSet() && backingRight->type.inMemory()) {
1500 alloc.rhsType = tempRegForType(rhs);
1501 pinReg(alloc.rhsType.reg());
1504 bool commu;
1505 switch (op) {
1506 case JSOP_EQ:
1507 case JSOP_GT:
1508 case JSOP_GE:
1509 case JSOP_LT:
1510 case JSOP_LE:
1511 /* fall through */
1512 case JSOP_ADD:
1513 case JSOP_MUL:
1514 case JSOP_SUB:
1515 commu = true;
1516 break;
1518 case JSOP_DIV:
1519 commu = false;
1520 break;
1522 default:
1523 JS_NOT_REACHED("unknown op");
1524 return;
1528 * Data is a little more complicated. If the op is MUL, not all CPUs
1529 * have multiplication on immediates, so a register is needed. Also,
1530 * if the op is not commutative, the LHS _must_ be in a register.
1532 JS_ASSERT_IF(lhs->isConstant(), !rhs->isConstant());
1533 JS_ASSERT_IF(rhs->isConstant(), !lhs->isConstant());
1535 if (!alloc.lhsData.isSet()) {
1536 if (backingLeft->data.inMemory()) {
1537 alloc.lhsData = tempRegForData(lhs);
1538 pinReg(alloc.lhsData.reg());
1539 } else if (op == JSOP_MUL || !commu) {
1540 JS_ASSERT(lhs->isConstant());
1541 alloc.lhsData = allocReg();
1542 alloc.extraFree = alloc.lhsData;
1543 masm.move(Imm32(lhs->getValue().toInt32()), alloc.lhsData.reg());
1546 if (!alloc.rhsData.isSet()) {
1547 if (backingRight->data.inMemory()) {
1548 alloc.rhsData = tempRegForData(rhs);
1549 pinReg(alloc.rhsData.reg());
1550 } else if (op == JSOP_MUL) {
1551 JS_ASSERT(rhs->isConstant());
1552 alloc.rhsData = allocReg();
1553 alloc.extraFree = alloc.rhsData;
1554 masm.move(Imm32(rhs->getValue().toInt32()), alloc.rhsData.reg());
1558 alloc.lhsNeedsRemat = false;
1559 alloc.rhsNeedsRemat = false;
1561 if (!needsResult)
1562 goto skip;
1565 * Now a result register is needed. It must contain a mutable copy of the
1566 * LHS. For commutative operations, we can opt to use the RHS instead. At
1567 * this point, if for some reason either must be in a register, that has
1568 * already been guaranteed at this point.
1570 if (!freeRegs.empty()) {
1571 /* Free reg - just grab it. */
1572 alloc.result = allocReg();
1573 if (!alloc.lhsData.isSet()) {
1574 JS_ASSERT(alloc.rhsData.isSet());
1575 JS_ASSERT(commu);
1576 masm.move(alloc.rhsData.reg(), alloc.result);
1577 alloc.resultHasRhs = true;
1578 } else {
1579 masm.move(alloc.lhsData.reg(), alloc.result);
1580 alloc.resultHasRhs = false;
1582 } else {
1584 * No free regs. Find a good candidate to re-use. Best candidates don't
1585 * require syncs on the inline path.
1587 bool leftInReg = backingLeft->data.inRegister();
1588 bool rightInReg = backingRight->data.inRegister();
1589 bool leftSynced = backingLeft->data.synced();
1590 bool rightSynced = backingRight->data.synced();
1591 if (!commu || (leftInReg && (leftSynced || (!rightInReg || !rightSynced)))) {
1592 JS_ASSERT(backingLeft->data.inRegister() || !commu);
1593 JS_ASSERT_IF(backingLeft->data.inRegister(),
1594 backingLeft->data.reg() == alloc.lhsData.reg());
1595 if (backingLeft->data.inRegister()) {
1596 alloc.result = backingLeft->data.reg();
1597 unpinReg(alloc.result);
1598 takeReg(alloc.result);
1599 alloc.lhsNeedsRemat = true;
1600 } else {
1601 /* For now, just spill... */
1602 alloc.result = allocReg();
1603 masm.move(alloc.lhsData.reg(), alloc.result);
1605 alloc.resultHasRhs = false;
1606 } else {
1607 JS_ASSERT(commu);
1608 JS_ASSERT(!leftInReg || (rightInReg && rightSynced));
1609 alloc.result = backingRight->data.reg();
1610 unpinReg(alloc.result);
1611 takeReg(alloc.result);
1612 alloc.resultHasRhs = true;
1613 alloc.rhsNeedsRemat = true;
1617 skip:
1618 /* Unpin everything that was pinned. */
1619 if (backingLeft->type.inRegister())
1620 unpinReg(backingLeft->type.reg());
1621 if (backingRight->type.inRegister())
1622 unpinReg(backingRight->type.reg());
1623 if (backingLeft->data.inRegister())
1624 unpinReg(backingLeft->data.reg());
1625 if (backingRight->data.inRegister())
1626 unpinReg(backingRight->data.reg());
1629 MaybeRegisterID
1630 FrameState::maybePinData(FrameEntry *fe)
1632 fe = fe->isCopy() ? fe->copyOf() : fe;
1633 if (fe->data.inRegister()) {
1634 pinReg(fe->data.reg());
1635 return fe->data.reg();
1637 return MaybeRegisterID();
1640 MaybeRegisterID
1641 FrameState::maybePinType(FrameEntry *fe)
1643 fe = fe->isCopy() ? fe->copyOf() : fe;
1644 if (fe->type.inRegister()) {
1645 pinReg(fe->type.reg());
1646 return fe->type.reg();
1648 return MaybeRegisterID();
1651 void
1652 FrameState::maybeUnpinReg(MaybeRegisterID reg)
1654 if (reg.isSet())
1655 unpinReg(reg.reg());