1 /* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=4 sw=4 et tw=99:
4 * ***** BEGIN LICENSE BLOCK *****
5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
7 * The contents of this file are subject to the Mozilla Public License Version
8 * 1.1 (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
10 * http://www.mozilla.org/MPL/
12 * Software distributed under the License is distributed on an "AS IS" basis,
13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 * for the specific language governing rights and limitations under the
17 * The Original Code is Mozilla SpiderMonkey JavaScript 1.9 code, released
20 * The Initial Developer of the Original Code is
21 * Brendan Eich <brendan@mozilla.org>
24 * David Anderson <danderson@mozilla.com>
26 * Alternatively, the contents of this file may be used under the terms of
27 * either of the GNU General Public License Version 2 or later (the "GPL"),
28 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
29 * in which case the provisions of the GPL or the LGPL are applicable instead
30 * of those above. If you wish to allow use of your version of this file only
31 * under the terms of either the GPL or the LGPL, and not to allow others to
32 * use your version of this file under the terms of the MPL, indicate your
33 * decision by deleting the provisions above and replace them with the notice
34 * and other provisions required by the GPL or the LGPL. If you do not delete
35 * the provisions above, a recipient may use your version of this file under
36 * the terms of any one of the MPL, the GPL or the LGPL.
38 * ***** END LICENSE BLOCK ***** */
40 #include "FrameState.h"
41 #include "FrameState-inl.h"
44 using namespace js::mjit;
46 /* Because of Value alignment */
47 JS_STATIC_ASSERT(sizeof(FrameEntry) % 8 == 0);
49 FrameState::FrameState(JSContext *cx, JSScript *script, JSFunction *fun, Assembler &masm)
50 : cx(cx), script(script), fun(fun),
51 nargs(fun ? fun->nargs : 0),
52 masm(masm), entries(NULL),
53 #if defined JS_NUNBOX32
54 reifier(cx, *thisFromCtor()),
58 usesArguments(script->usesArguments),
63 FrameState::~FrameState()
71 // nslots + nargs + 2 (callee, this)
72 uint32 nentries = feLimit();
74 sp = spBase = locals = args = NULL;
78 eval = script->usesEval || cx->compartment->debugMode;
80 size_t totalBytes = sizeof(FrameEntry) * nentries + // entries[], w/ callee+this
81 sizeof(FrameEntry *) * nentries + // tracker.entries
84 : sizeof(JSPackedBool) * script->nslots) + // closedVars[]
85 (eval || usesArguments
87 : sizeof(JSPackedBool) * nargs); // closedArgs[]
89 uint8 *cursor = (uint8 *)cx->calloc(totalBytes);
93 #if defined JS_NUNBOX32
94 if (!reifier.init(nentries))
98 entries = (FrameEntry *)cursor;
99 cursor += sizeof(FrameEntry) * nentries;
104 locals = args + nargs;
105 spBase = locals + script->nfixed;
108 tracker.entries = (FrameEntry **)cursor;
109 cursor += sizeof(FrameEntry *) * nentries;
112 if (script->nslots) {
113 closedVars = (JSPackedBool *)cursor;
114 cursor += sizeof(JSPackedBool) * script->nslots;
116 if (!usesArguments && nargs) {
117 closedArgs = (JSPackedBool *)cursor;
118 cursor += sizeof(JSPackedBool) * nargs;
122 JS_ASSERT(reinterpret_cast<uint8 *>(entries) + totalBytes == cursor);
128 FrameState::takeReg(RegisterID reg)
130 if (freeRegs.hasReg(reg)) {
131 freeRegs.takeReg(reg);
132 JS_ASSERT(!regstate[reg].usedBy());
134 JS_ASSERT(regstate[reg].fe());
136 regstate[reg].forget();
141 FrameState::evictReg(RegisterID reg)
143 FrameEntry *fe = regstate[reg].fe();
145 if (regstate[reg].type() == RematInfo::TYPE) {
146 ensureTypeSynced(fe, masm);
147 fe->type.setMemory();
149 ensureDataSynced(fe, masm);
150 fe->data.setMemory();
154 JSC::MacroAssembler::RegisterID
155 FrameState::evictSomeReg(uint32 mask)
158 bool fallbackSet = false;
160 RegisterID fallback = Registers::ReturnReg;
162 for (uint32 i = 0; i < JSC::MacroAssembler::TotalRegisters; i++) {
163 RegisterID reg = RegisterID(i);
165 /* Register is not allocatable, don't bother. */
166 if (!(Registers::maskReg(reg) & mask))
169 /* Register is not owned by the FrameState. */
170 FrameEntry *fe = regstate[i].fe();
174 /* Try to find a candidate... that doesn't need spilling. */
180 if (regstate[i].type() == RematInfo::TYPE && fe->type.synced()) {
181 fe->type.setMemory();
184 if (regstate[i].type() == RematInfo::DATA && fe->data.synced()) {
185 fe->data.setMemory();
190 JS_ASSERT(fallbackSet);
198 FrameState::syncAndForgetEverything()
200 syncAndKill(Registers(Registers::AvailRegs), Uses(frameSlots()));
205 FrameState::resetInternalState()
207 for (uint32 i = 0; i < tracker.nentries; i++)
208 tracker[i]->untrack();
215 FrameState::discardFrame()
217 resetInternalState();
219 memset(regstate, 0, sizeof(regstate));
223 FrameState::forgetEverything()
225 resetInternalState();
228 for (uint32 i = 0; i < JSC::MacroAssembler::TotalRegisters; i++) {
229 JS_ASSERT(!regstate[i].usedBy());
235 FrameState::storeTo(FrameEntry *fe, Address address, bool popped)
237 if (fe->isConstant()) {
238 masm.storeValue(fe->getValue(), address);
245 /* Cannot clobber the address's register. */
246 JS_ASSERT(!freeRegs.hasReg(address.base));
248 /* If loading from memory, ensure destination differs. */
249 JS_ASSERT_IF((fe->type.inMemory() || fe->data.inMemory()),
250 addressOf(fe).base != address.base ||
251 addressOf(fe).offset != address.offset);
253 #if defined JS_PUNBOX64
254 if (fe->type.inMemory() && fe->data.inMemory()) {
255 /* Future optimization: track that the Value is in a register. */
256 RegisterID vreg = Registers::ValueReg;
257 masm.loadPtr(addressOf(fe), vreg);
258 masm.storePtr(vreg, address);
263 * If dreg is obtained via allocReg(), then calling
264 * pinReg() trips an assertion. But in all other cases,
265 * calling pinReg() is necessary in the fe->type.inMemory() path.
266 * Remember whether pinReg() can be safely called.
268 bool canPinDreg = true;
269 bool wasInRegister = fe->data.inRegister();
271 /* Get a register for the payload. */
272 MaybeRegisterID dreg;
273 if (fe->data.inRegister()) {
274 dreg = fe->data.reg();
276 JS_ASSERT(fe->data.inMemory());
281 dreg = allocReg(fe, RematInfo::DATA);
282 fe->data.setRegister(dreg.reg());
284 masm.loadPayload(addressOf(fe), dreg.reg());
287 /* Store the Value. */
288 if (fe->type.inRegister()) {
289 masm.storeValueFromComponents(fe->type.reg(), dreg.reg(), address);
290 } else if (fe->isTypeKnown()) {
291 masm.storeValueFromComponents(ImmType(fe->getKnownType()), dreg.reg(), address);
293 JS_ASSERT(fe->type.inMemory());
297 RegisterID treg = popped ? allocReg() : allocReg(fe, RematInfo::TYPE);
298 masm.loadTypeTag(addressOf(fe), treg);
299 masm.storeValueFromComponents(treg, dreg.reg(), address);
304 fe->type.setRegister(treg);
307 unpinReg(dreg.reg());
310 /* If register is untracked, free it. */
311 if (!wasInRegister && popped)
314 #elif defined JS_NUNBOX32
316 if (fe->data.inRegister()) {
317 masm.storePayload(fe->data.reg(), address);
319 JS_ASSERT(fe->data.inMemory());
320 RegisterID reg = popped ? allocReg() : allocReg(fe, RematInfo::DATA);
321 masm.loadPayload(addressOf(fe), reg);
322 masm.storePayload(reg, address);
326 fe->data.setRegister(reg);
329 if (fe->isTypeKnown()) {
330 masm.storeTypeTag(ImmType(fe->getKnownType()), address);
331 } else if (fe->type.inRegister()) {
332 masm.storeTypeTag(fe->type.reg(), address);
334 JS_ASSERT(fe->type.inMemory());
335 RegisterID reg = popped ? allocReg() : allocReg(fe, RematInfo::TYPE);
336 masm.loadTypeTag(addressOf(fe), reg);
337 masm.storeTypeTag(reg, address);
341 fe->type.setRegister(reg);
347 FrameState::loadThisForReturn(RegisterID typeReg, RegisterID dataReg, RegisterID tempReg)
349 return loadForReturn(getThis(), typeReg, dataReg, tempReg);
352 void FrameState::loadForReturn(FrameEntry *fe, RegisterID typeReg, RegisterID dataReg, RegisterID tempReg)
354 JS_ASSERT(dataReg != typeReg && dataReg != tempReg && typeReg != tempReg);
356 if (fe->isConstant()) {
357 masm.loadValueAsComponents(fe->getValue(), typeReg, dataReg);
364 MaybeRegisterID maybeType = maybePinType(fe);
365 MaybeRegisterID maybeData = maybePinData(fe);
367 if (fe->isTypeKnown()) {
368 // If the data is in memory, or in the wrong reg, load/move it.
369 if (!maybeData.isSet())
370 masm.loadPayload(addressOf(fe), dataReg);
371 else if (maybeData.reg() != dataReg)
372 masm.move(maybeData.reg(), dataReg);
373 masm.move(ImmType(fe->getKnownType()), typeReg);
377 // If both halves of the value are in memory, make this easier and load
378 // both pieces into their respective registers.
379 if (fe->type.inMemory() && fe->data.inMemory()) {
380 masm.loadValueAsComponents(addressOf(fe), typeReg, dataReg);
384 // Now, we should be guaranteed that at least one part is in a register.
385 JS_ASSERT(maybeType.isSet() || maybeData.isSet());
387 // Make sure we have two registers while making sure not clobber either half.
388 // Here we are allowed to mess up the FrameState invariants, because this
389 // is specialized code for a path that is about to discard the entire frame.
390 if (!maybeType.isSet()) {
391 JS_ASSERT(maybeData.isSet());
392 if (maybeData.reg() != typeReg)
396 masm.loadTypeTag(addressOf(fe), maybeType.reg());
397 } else if (!maybeData.isSet()) {
398 JS_ASSERT(maybeType.isSet());
399 if (maybeType.reg() != dataReg)
403 masm.loadPayload(addressOf(fe), maybeData.reg());
406 RegisterID type = maybeType.reg();
407 RegisterID data = maybeData.reg();
409 if (data == typeReg && type == dataReg) {
410 masm.move(type, tempReg);
411 masm.move(data, dataReg);
412 masm.move(tempReg, typeReg);
413 } else if (data != dataReg) {
414 if (type == typeReg) {
415 masm.move(data, dataReg);
416 } else if (type != dataReg) {
417 masm.move(data, dataReg);
419 masm.move(type, typeReg);
421 JS_ASSERT(data != typeReg);
422 masm.move(type, typeReg);
423 masm.move(data, dataReg);
425 } else if (type != typeReg) {
426 masm.move(type, typeReg);
432 FrameState::assertValidRegisterState() const
434 Registers checkedFreeRegs;
436 for (uint32 i = 0; i < tracker.nentries; i++) {
437 FrameEntry *fe = tracker[i];
441 JS_ASSERT(i == fe->trackerIndex());
442 JS_ASSERT_IF(fe->isCopy(),
443 fe->trackerIndex() > fe->copyOf()->trackerIndex());
444 JS_ASSERT_IF(fe->isCopy(), fe > fe->copyOf());
445 JS_ASSERT_IF(fe->isCopy(), !fe->type.inRegister() && !fe->data.inRegister());
446 JS_ASSERT_IF(fe->isCopy(), fe->copyOf() < sp);
447 JS_ASSERT_IF(fe->isCopy(), fe->copyOf()->isCopied());
451 if (fe->type.inRegister()) {
452 checkedFreeRegs.takeReg(fe->type.reg());
453 JS_ASSERT(regstate[fe->type.reg()].fe() == fe);
455 if (fe->data.inRegister()) {
456 checkedFreeRegs.takeReg(fe->data.reg());
457 JS_ASSERT(regstate[fe->data.reg()].fe() == fe);
461 JS_ASSERT(checkedFreeRegs == freeRegs);
463 for (uint32 i = 0; i < JSC::MacroAssembler::TotalRegisters; i++) {
464 JS_ASSERT(!regstate[i].isPinned());
465 JS_ASSERT_IF(regstate[i].fe(), !freeRegs.hasReg(RegisterID(i)));
466 JS_ASSERT_IF(regstate[i].fe(), regstate[i].fe()->isTracked());
471 #if defined JS_NUNBOX32
473 FrameState::syncFancy(Assembler &masm, Registers avail, FrameEntry *resumeAt,
474 FrameEntry *bottom) const
476 reifier.reset(&masm, avail, resumeAt, bottom);
478 for (FrameEntry *fe = resumeAt; fe >= bottom; fe--) {
479 if (!fe->isTracked())
488 FrameState::sync(Assembler &masm, Uses uses) const
493 /* Sync all registers up-front. */
494 Registers allRegs(Registers::AvailRegs);
495 while (!allRegs.empty()) {
496 RegisterID reg = allRegs.takeAnyReg();
497 FrameEntry *fe = regstate[reg].usedBy();
501 JS_ASSERT(fe->isTracked());
503 #if defined JS_PUNBOX64
504 /* Sync entire FE to prevent loads. */
505 ensureFeSynced(fe, masm);
507 /* Take the other register in the pair, if one exists. */
508 if (regstate[reg].type() == RematInfo::DATA && fe->type.inRegister())
509 allRegs.takeReg(fe->type.reg());
510 else if (regstate[reg].type() == RematInfo::TYPE && fe->data.inRegister())
511 allRegs.takeReg(fe->data.reg());
512 #elif defined JS_NUNBOX32
513 /* Sync register if unsynced. */
514 if (regstate[reg].type() == RematInfo::DATA) {
515 JS_ASSERT(fe->data.reg() == reg);
516 ensureDataSynced(fe, masm);
518 JS_ASSERT(fe->type.reg() == reg);
519 ensureTypeSynced(fe, masm);
525 * Keep track of free registers using a bitmask. If we have to drop into
526 * syncFancy(), then this mask will help avoid eviction.
528 Registers avail(freeRegs);
529 Registers temp(Registers::TempRegs);
531 FrameEntry *bottom = sp - uses.nuses;
533 for (FrameEntry *fe = sp - 1; fe >= bottom; fe--) {
534 if (!fe->isTracked())
537 FrameEntry *backing = fe;
540 if (fe->data.inRegister())
541 avail.putReg(fe->data.reg());
542 if (fe->type.inRegister())
543 avail.putReg(fe->type.reg());
545 backing = fe->copyOf();
546 JS_ASSERT(!backing->isConstant() && !fe->isConstant());
548 #if defined JS_PUNBOX64
549 if ((!fe->type.synced() && backing->type.inMemory()) ||
550 (!fe->data.synced() && backing->data.inMemory())) {
552 RegisterID syncReg = Registers::ValueReg;
554 /* Load the entire Value into syncReg. */
555 if (backing->type.synced() && backing->data.synced()) {
556 masm.loadValue(addressOf(backing), syncReg);
557 } else if (backing->type.inMemory()) {
558 masm.loadTypeTag(addressOf(backing), syncReg);
559 masm.orPtr(backing->data.reg(), syncReg);
561 JS_ASSERT(backing->data.inMemory());
562 masm.loadPayload(addressOf(backing), syncReg);
563 if (backing->isTypeKnown())
564 masm.orPtr(ImmType(backing->getKnownType()), syncReg);
566 masm.orPtr(backing->type.reg(), syncReg);
569 masm.storeValue(syncReg, addressOf(fe));
572 #elif defined JS_NUNBOX32
573 /* Fall back to a slower sync algorithm if load required. */
574 if ((!fe->type.synced() && backing->type.inMemory()) ||
575 (!fe->data.synced() && backing->data.inMemory())) {
576 syncFancy(masm, avail, fe, bottom);
582 /* If a part still needs syncing, it is either a copy or constant. */
583 #if defined JS_PUNBOX64
584 /* All register-backed FEs have been entirely synced up-front. */
585 if (!fe->type.inRegister() && !fe->data.inRegister())
586 ensureFeSynced(fe, masm);
587 #elif defined JS_NUNBOX32
588 /* All components held in registers have been already synced. */
589 if (!fe->data.inRegister())
590 ensureDataSynced(fe, masm);
591 if (!fe->type.inRegister())
592 ensureTypeSynced(fe, masm);
598 FrameState::syncAndKill(Registers kill, Uses uses, Uses ignore)
600 FrameEntry *spStop = sp - ignore.nuses;
602 /* Sync all kill-registers up-front. */
603 Registers search(kill.freeMask & ~freeRegs.freeMask);
604 while (!search.empty()) {
605 RegisterID reg = search.takeAnyReg();
606 FrameEntry *fe = regstate[reg].usedBy();
607 if (!fe || fe >= spStop)
610 JS_ASSERT(fe->isTracked());
612 #if defined JS_PUNBOX64
613 /* Don't use syncFe(), since that may clobber more registers. */
614 ensureFeSynced(fe, masm);
616 if (!fe->type.synced())
618 if (!fe->data.synced())
621 /* Take the other register in the pair, if one exists. */
622 if (regstate[reg].type() == RematInfo::DATA) {
623 JS_ASSERT(fe->data.reg() == reg);
624 if (fe->type.inRegister() && search.hasReg(fe->type.reg()))
625 search.takeReg(fe->type.reg());
627 JS_ASSERT(fe->type.reg() == reg);
628 if (fe->data.inRegister() && search.hasReg(fe->data.reg()))
629 search.takeReg(fe->data.reg());
631 #elif defined JS_NUNBOX32
632 /* Sync this register. */
633 if (regstate[reg].type() == RematInfo::DATA) {
634 JS_ASSERT(fe->data.reg() == reg);
637 JS_ASSERT(fe->type.reg() == reg);
643 uint32 maxvisits = tracker.nentries;
644 FrameEntry *bottom = sp - uses.nuses;
646 for (FrameEntry *fe = sp - 1; fe >= bottom && maxvisits; fe--) {
647 if (!fe->isTracked())
657 /* Forget registers. */
658 if (fe->data.inRegister() && kill.hasReg(fe->data.reg()) &&
659 !regstate[fe->data.reg()].isPinned()) {
660 forgetReg(fe->data.reg());
661 fe->data.setMemory();
663 if (fe->type.inRegister() && kill.hasReg(fe->type.reg()) &&
664 !regstate[fe->type.reg()].isPinned()) {
665 forgetReg(fe->type.reg());
666 fe->type.setMemory();
671 * Anything still alive at this point is guaranteed to be synced. However,
672 * it is necessary to evict temporary registers.
674 search = Registers(kill.freeMask & ~freeRegs.freeMask);
675 while (!search.empty()) {
676 RegisterID reg = search.takeAnyReg();
677 FrameEntry *fe = regstate[reg].usedBy();
678 if (!fe || fe >= spStop)
681 JS_ASSERT(fe->isTracked());
683 if (regstate[reg].type() == RematInfo::DATA) {
684 JS_ASSERT(fe->data.reg() == reg);
685 JS_ASSERT(fe->data.synced());
686 fe->data.setMemory();
688 JS_ASSERT(fe->type.reg() == reg);
689 JS_ASSERT(fe->type.synced());
690 fe->type.setMemory();
698 FrameState::merge(Assembler &masm, Changes changes) const
700 Registers search(Registers::AvailRegs & ~freeRegs.freeMask);
702 while (!search.empty()) {
703 RegisterID reg = search.peekReg();
704 FrameEntry *fe = regstate[reg].usedBy();
711 if (fe->data.inRegister() && fe->type.inRegister()) {
712 search.takeReg(fe->data.reg());
713 search.takeReg(fe->type.reg());
714 masm.loadValueAsComponents(addressOf(fe), fe->type.reg(), fe->data.reg());
716 if (fe->data.inRegister()) {
717 search.takeReg(fe->data.reg());
718 masm.loadPayload(addressOf(fe), fe->data.reg());
720 if (fe->type.inRegister()) {
721 search.takeReg(fe->type.reg());
722 masm.loadTypeTag(addressOf(fe), fe->type.reg());
728 JSC::MacroAssembler::RegisterID
729 FrameState::copyDataIntoReg(FrameEntry *fe)
731 return copyDataIntoReg(this->masm, fe);
735 FrameState::copyDataIntoReg(FrameEntry *fe, RegisterID hint)
737 JS_ASSERT(!fe->data.isConstant());
742 if (!fe->data.inRegister())
745 RegisterID reg = fe->data.reg();
747 if (freeRegs.empty()) {
748 ensureDataSynced(fe, masm);
749 fe->data.setMemory();
752 masm.move(hint, reg);
753 fe->data.setRegister(reg);
754 regstate[reg].associate(regstate[hint].fe(), RematInfo::DATA);
756 regstate[hint].forget();
761 masm.move(reg, hint);
765 JSC::MacroAssembler::RegisterID
766 FrameState::copyDataIntoReg(Assembler &masm, FrameEntry *fe)
768 JS_ASSERT(!fe->data.isConstant());
773 if (fe->data.inRegister()) {
774 RegisterID reg = fe->data.reg();
775 if (freeRegs.empty()) {
776 ensureDataSynced(fe, masm);
777 fe->data.setMemory();
778 regstate[reg].forget();
780 RegisterID newReg = allocReg();
781 masm.move(reg, newReg);
787 RegisterID reg = allocReg();
789 if (!freeRegs.empty())
790 masm.move(tempRegForData(fe), reg);
792 masm.loadPayload(addressOf(fe),reg);
797 JSC::MacroAssembler::RegisterID
798 FrameState::copyTypeIntoReg(FrameEntry *fe)
800 JS_ASSERT(!fe->type.isConstant());
805 if (fe->type.inRegister()) {
806 RegisterID reg = fe->type.reg();
807 if (freeRegs.empty()) {
808 ensureTypeSynced(fe, masm);
809 fe->type.setMemory();
810 regstate[reg].forget();
812 RegisterID newReg = allocReg();
813 masm.move(reg, newReg);
819 RegisterID reg = allocReg();
821 if (!freeRegs.empty())
822 masm.move(tempRegForType(fe), reg);
824 masm.loadTypeTag(addressOf(fe), reg);
829 JSC::MacroAssembler::RegisterID
830 FrameState::copyInt32ConstantIntoReg(FrameEntry *fe)
832 return copyInt32ConstantIntoReg(masm, fe);
835 JSC::MacroAssembler::RegisterID
836 FrameState::copyInt32ConstantIntoReg(Assembler &masm, FrameEntry *fe)
838 JS_ASSERT(fe->data.isConstant());
843 RegisterID reg = allocReg();
844 masm.move(Imm32(fe->getValue().toInt32()), reg);
848 JSC::MacroAssembler::FPRegisterID
849 FrameState::copyEntryIntoFPReg(FrameEntry *fe, FPRegisterID fpreg)
851 return copyEntryIntoFPReg(this->masm, fe, fpreg);
854 JSC::MacroAssembler::FPRegisterID
855 FrameState::copyEntryIntoFPReg(Assembler &masm, FrameEntry *fe, FPRegisterID fpreg)
860 ensureFeSynced(fe, masm);
861 masm.loadDouble(addressOf(fe), fpreg);
866 JSC::MacroAssembler::RegisterID
867 FrameState::ownRegForType(FrameEntry *fe)
869 JS_ASSERT(!fe->type.isConstant());
873 /* For now, just do an extra move. The reg must be mutable. */
874 FrameEntry *backing = fe->copyOf();
875 if (!backing->type.inRegister()) {
876 JS_ASSERT(backing->type.inMemory());
877 tempRegForType(backing);
880 if (freeRegs.empty()) {
881 /* For now... just steal the register that already exists. */
882 ensureTypeSynced(backing, masm);
883 reg = backing->type.reg();
884 backing->type.setMemory();
885 regstate[reg].forget();
888 masm.move(backing->type.reg(), reg);
893 if (fe->type.inRegister()) {
894 reg = fe->type.reg();
896 /* Remove ownership of this register. */
897 JS_ASSERT(regstate[reg].fe() == fe);
898 JS_ASSERT(regstate[reg].type() == RematInfo::TYPE);
899 regstate[reg].forget();
900 fe->type.invalidate();
902 JS_ASSERT(fe->type.inMemory());
904 masm.loadTypeTag(addressOf(fe), reg);
909 JSC::MacroAssembler::RegisterID
910 FrameState::ownRegForData(FrameEntry *fe)
912 JS_ASSERT(!fe->data.isConstant());
916 /* For now, just do an extra move. The reg must be mutable. */
917 FrameEntry *backing = fe->copyOf();
918 if (!backing->data.inRegister()) {
919 JS_ASSERT(backing->data.inMemory());
920 tempRegForData(backing);
923 if (freeRegs.empty()) {
924 /* For now... just steal the register that already exists. */
925 ensureDataSynced(backing, masm);
926 reg = backing->data.reg();
927 backing->data.setMemory();
928 regstate[reg].forget();
931 masm.move(backing->data.reg(), reg);
936 if (fe->isCopied()) {
937 FrameEntry *copy = uncopy(fe);
938 if (fe->isCopied()) {
939 fe->type.invalidate();
940 fe->data.invalidate();
941 return copyDataIntoReg(copy);
945 if (fe->data.inRegister()) {
946 reg = fe->data.reg();
947 /* Remove ownership of this register. */
948 JS_ASSERT(regstate[reg].fe() == fe);
949 JS_ASSERT(regstate[reg].type() == RematInfo::DATA);
950 regstate[reg].forget();
951 fe->data.invalidate();
953 JS_ASSERT(fe->data.inMemory());
955 masm.loadPayload(addressOf(fe), reg);
961 FrameState::discardFe(FrameEntry *fe)
964 fe->type.setMemory();
965 fe->data.setMemory();
969 FrameState::pushCopyOf(uint32 index)
971 FrameEntry *backing = entryFor(index);
972 FrameEntry *fe = rawPush();
974 if (backing->isConstant()) {
975 fe->setConstant(Jsvalify(backing->getValue()));
977 if (backing->isTypeKnown())
978 fe->setType(backing->getKnownType());
980 fe->type.invalidate();
981 fe->isNumber = backing->isNumber;
982 fe->data.invalidate();
983 if (backing->isCopy()) {
984 backing = backing->copyOf();
985 fe->setCopyOf(backing);
987 fe->setCopyOf(backing);
988 backing->setCopied();
991 /* Maintain tracker ordering guarantees for copies. */
992 JS_ASSERT(backing->isCopied());
993 if (fe->trackerIndex() < backing->trackerIndex())
994 swapInTracker(fe, backing);
999 FrameState::walkTrackerForUncopy(FrameEntry *original)
1001 uint32 firstCopy = InvalidIndex;
1002 FrameEntry *bestFe = NULL;
1004 for (uint32 i = original->trackerIndex() + 1; i < tracker.nentries; i++) {
1005 FrameEntry *fe = tracker[i];
1008 if (fe->isCopy() && fe->copyOf() == original) {
1009 if (firstCopy == InvalidIndex) {
1012 } else if (fe < bestFe) {
1020 JS_ASSERT(firstCopy == InvalidIndex);
1025 JS_ASSERT(firstCopy != InvalidIndex);
1027 JS_ASSERT(bestFe > original);
1029 /* Mark all extra copies as copies of the new backing index. */
1030 bestFe->setCopyOf(NULL);
1032 bestFe->setCopied();
1033 for (uint32 i = firstCopy; i < tracker.nentries; i++) {
1034 FrameEntry *other = tracker[i];
1035 if (other >= sp || other == bestFe)
1038 /* The original must be tracked before copies. */
1039 JS_ASSERT(other != original);
1041 if (!other->isCopy() || other->copyOf() != original)
1044 other->setCopyOf(bestFe);
1047 * This is safe even though we're mutating during iteration. There
1048 * are two cases. The first is that both indexes are <= i, and :.
1049 * will never be observed. The other case is we're placing the
1050 * other FE such that it will be observed later. Luckily, copyOf()
1051 * will return != original, so nothing will happen.
1053 if (other->trackerIndex() < bestFe->trackerIndex())
1054 swapInTracker(bestFe, other);
1057 bestFe->setNotCopied();
1064 FrameState::walkFrameForUncopy(FrameEntry *original)
1066 FrameEntry *bestFe = NULL;
1069 /* It's only necessary to visit as many FEs are being tracked. */
1070 uint32 maxvisits = tracker.nentries;
1072 for (FrameEntry *fe = original + 1; fe < sp && maxvisits; fe++) {
1073 if (!fe->isTracked())
1078 if (fe->isCopy() && fe->copyOf() == original) {
1081 bestFe->setCopyOf(NULL);
1083 fe->setCopyOf(bestFe);
1084 if (fe->trackerIndex() < bestFe->trackerIndex())
1085 swapInTracker(bestFe, fe);
1092 bestFe->setCopied();
1098 FrameState::uncopy(FrameEntry *original)
1100 JS_ASSERT(original->isCopied());
1103 * Copies have three critical invariants:
1104 * 1) The backing store precedes all copies in the tracker.
1105 * 2) The backing store precedes all copies in the FrameState.
1106 * 3) The backing store of a copy cannot be popped from the stack
1107 * while the copy is still live.
1109 * Maintaining this invariant iteratively is kind of hard, so we choose
1110 * the "lowest" copy in the frame up-front.
1112 * For example, if the stack is:
1114 * And the tracker has:
1117 * If B, C, and D are copies of A - we will walk the tracker to the end
1118 * and select B, not D (see bug 583684).
1120 * Note: |tracker.nentries <= (nslots + nargs)|. However, this walk is
1121 * sub-optimal if |tracker.nentries - original->trackerIndex() > sp - original|.
1122 * With large scripts this may be a problem worth investigating. Note that
1123 * the tracker is walked twice, so we multiply by 2 for pessimism.
1126 if ((tracker.nentries - original->trackerIndex()) * 2 > uint32(sp - original))
1127 fe = walkFrameForUncopy(original);
1129 fe = walkTrackerForUncopy(original);
1131 original->setNotCopied();
1136 * Switch the new backing store to the old backing store. During
1137 * this process we also necessarily make sure the copy can be
1140 if (!original->isTypeKnown()) {
1142 * If the copy is unsynced, and the original is in memory,
1143 * give the original a register. We do this below too; it's
1144 * okay if it's spilled.
1146 if (original->type.inMemory() && !fe->type.synced())
1147 tempRegForType(original);
1148 fe->type.inherit(original->type);
1149 if (fe->type.inRegister())
1150 regstate[fe->type.reg()].reassociate(fe);
1152 JS_ASSERT(fe->isTypeKnown());
1153 JS_ASSERT(fe->getKnownType() == original->getKnownType());
1155 if (original->data.inMemory() && !fe->data.synced())
1156 tempRegForData(original);
1157 fe->data.inherit(original->data);
1158 if (fe->data.inRegister())
1159 regstate[fe->data.reg()].reassociate(fe);
1165 FrameState::finishStore(FrameEntry *fe, bool closed)
1167 // Make sure the backing store entry is synced to memory, then if it's
1168 // closed, forget it entirely (removing all copies) and reset it to a
1169 // synced, in-memory state.
1179 FrameState::storeLocal(uint32 n, bool popGuaranteed, bool typeChange)
1181 FrameEntry *local = getLocal(n);
1182 storeTop(local, popGuaranteed, typeChange);
1184 bool closed = isClosedVar(n);
1185 if (!closed && !inTryBlock)
1188 finishStore(local, closed);
1192 FrameState::storeArg(uint32 n, bool popGuaranteed)
1194 // Note that args are always immediately synced, because they can be
1195 // aliased (but not written to) via f.arguments.
1196 FrameEntry *arg = getArg(n);
1197 storeTop(arg, popGuaranteed, true);
1198 finishStore(arg, isClosedArg(n));
1202 FrameState::forgetEntry(FrameEntry *fe)
1204 if (fe->isCopied()) {
1206 if (!fe->isCopied())
1214 FrameState::storeTop(FrameEntry *target, bool popGuaranteed, bool typeChange)
1216 bool wasSynced = target->type.synced();
1217 /* Detect something like (x = x) which is a no-op. */
1218 FrameEntry *top = peek(-1);
1219 if (top->isCopy() && top->copyOf() == target) {
1220 JS_ASSERT(target->isCopied());
1224 /* Completely invalidate the local variable. */
1225 forgetEntry(target);
1226 target->resetUnsynced();
1228 /* Constants are easy to propagate. */
1229 if (top->isConstant()) {
1230 target->setCopyOf(NULL);
1231 target->setNotCopied();
1232 target->setConstant(Jsvalify(top->getValue()));
1237 * When dealing with copies, there are three important invariants:
1239 * 1) The backing store precedes all copies in the tracker.
1240 * 2) The backing store precedes all copies in the FrameState.
1241 * 2) The backing store of a local is never a stack slot, UNLESS the local
1242 * variable itself is a stack slot (blocks) that precedes the stack
1245 * If the top is a copy, and the second condition holds true, the local
1246 * can be rewritten as a copy of the original backing slot. If the first
1247 * condition does not hold, force it to hold by swapping in-place.
1249 FrameEntry *backing = top;
1250 bool copied = false;
1251 if (top->isCopy()) {
1252 backing = top->copyOf();
1253 JS_ASSERT(backing->trackerIndex() < top->trackerIndex());
1255 if (backing < target) {
1256 /* local.idx < backing.idx means local cannot be a copy yet */
1257 if (target->trackerIndex() < backing->trackerIndex())
1258 swapInTracker(backing, target);
1259 target->setNotCopied();
1260 target->setCopyOf(backing);
1261 if (backing->isTypeKnown())
1262 target->setType(backing->getKnownType());
1264 target->type.invalidate();
1265 target->data.invalidate();
1266 target->isNumber = backing->isNumber;
1271 * If control flow lands here, then there was a bytecode sequence like
1277 * The problem is slot N can't be backed by M if M could be popped
1278 * before N. We want a guarantee that when we pop M, even if it was
1279 * copied, it has no outstanding copies.
1281 * Because of |let| expressions, it's kind of hard to really know
1282 * whether a region on the stack will be popped all at once. Bleh!
1284 * This should be rare except in browser code (and maybe even then),
1285 * but even so there's a quick workaround. We take all copies of the
1286 * backing fe, and redirect them to be copies of the destination.
1288 for (uint32 i = backing->trackerIndex() + 1; i < tracker.nentries; i++) {
1289 FrameEntry *fe = tracker[i];
1292 if (fe->isCopy() && fe->copyOf() == backing) {
1293 fe->setCopyOf(target);
1298 backing->setNotCopied();
1301 * This is valid from the top->isCopy() path because we're guaranteed a
1302 * consistent ordering - all copies of |backing| are tracked after
1303 * |backing|. Transitively, only one swap is needed.
1305 if (backing->trackerIndex() < target->trackerIndex())
1306 swapInTracker(backing, target);
1309 * Move the backing store down - we spill registers here, but we could be
1310 * smarter and re-use the type reg.
1312 RegisterID reg = tempRegForData(backing);
1313 target->data.setRegister(reg);
1314 regstate[reg].reassociate(target);
1317 if (backing->isTypeKnown()) {
1318 target->setType(backing->getKnownType());
1320 RegisterID reg = tempRegForType(backing);
1321 target->type.setRegister(reg);
1322 regstate[reg].reassociate(target);
1326 masm.storeTypeTag(ImmType(backing->getKnownType()), addressOf(target));
1327 target->type.setMemory();
1330 if (!backing->isTypeKnown())
1331 backing->type.invalidate();
1332 backing->data.invalidate();
1333 backing->setCopyOf(target);
1334 backing->isNumber = target->isNumber;
1336 JS_ASSERT(top->copyOf() == target);
1339 * Right now, |backing| is a copy of |target| (note the reversal), but
1340 * |target| is not marked as copied. This is an optimization so uncopy()
1341 * may avoid frame traversal.
1343 * There are two cases where we must set the copy bit, however:
1344 * - The fixup phase redirected more copies to |target|.
1345 * - An immediate pop is not guaranteed.
1347 if (copied || !popGuaranteed)
1348 target->setCopied();
1352 FrameState::shimmy(uint32 n)
1354 JS_ASSERT(sp - n >= spBase);
1355 int32 depth = 0 - int32(n);
1356 storeTop(peek(depth - 1), true);
1361 FrameState::shift(int32 n)
1364 JS_ASSERT(sp + n - 1 >= spBase);
1365 storeTop(peek(n - 1), true);
1370 FrameState::pinEntry(FrameEntry *fe, ValueRemat &vr)
1372 if (fe->isConstant()) {
1373 vr = ValueRemat::FromConstant(fe->getValue());
1375 // Pin the type register so it can't spill.
1376 MaybeRegisterID maybePinnedType = maybePinType(fe);
1378 // Get and pin the data register.
1379 RegisterID dataReg = tempRegForData(fe);
1382 if (fe->isTypeKnown()) {
1383 vr = ValueRemat::FromKnownType(fe->getKnownType(), dataReg);
1385 // The type might not be loaded yet, so unpin for simplicity.
1386 maybeUnpinReg(maybePinnedType);
1388 vr = ValueRemat::FromRegisters(tempRegForType(fe), dataReg);
1389 pinReg(vr.typeReg());
1393 // Set these bits last, since allocation could have caused a sync.
1394 vr.isDataSynced = fe->data.synced();
1395 vr.isTypeSynced = fe->type.synced();
1399 FrameState::unpinEntry(const ValueRemat &vr)
1401 if (!vr.isConstant()) {
1402 if (!vr.isTypeKnown())
1403 unpinReg(vr.typeReg());
1404 unpinReg(vr.dataReg());
1409 FrameState::ensureValueSynced(Assembler &masm, FrameEntry *fe, const ValueRemat &vr)
1411 #if defined JS_PUNBOX64
1412 if (!vr.isDataSynced || !vr.isTypeSynced)
1413 masm.storeValue(vr, addressOf(fe));
1414 #elif defined JS_NUNBOX32
1415 if (vr.isConstant()) {
1416 if (!vr.isDataSynced || !vr.isTypeSynced)
1417 masm.storeValue(vr.value(), addressOf(fe));
1419 if (!vr.isDataSynced)
1420 masm.storePayload(vr.dataReg(), addressOf(fe));
1421 if (!vr.isTypeSynced) {
1422 if (vr.isTypeKnown())
1423 masm.storeTypeTag(ImmType(vr.knownType()), addressOf(fe));
1425 masm.storeTypeTag(vr.typeReg(), addressOf(fe));
1432 AllocHelper(RematInfo &info, MaybeRegisterID &maybe)
1434 if (info.inRegister()) {
1442 FrameState::allocForSameBinary(FrameEntry *fe, JSOp op, BinaryAlloc &alloc)
1444 if (!fe->isTypeKnown()) {
1445 alloc.lhsType = tempRegForType(fe);
1446 pinReg(alloc.lhsType.reg());
1449 alloc.lhsData = tempRegForData(fe);
1451 if (!freeRegs.empty()) {
1452 alloc.result = allocReg();
1453 masm.move(alloc.lhsData.reg(), alloc.result);
1454 alloc.lhsNeedsRemat = false;
1456 alloc.result = alloc.lhsData.reg();
1457 takeReg(alloc.result);
1458 alloc.lhsNeedsRemat = true;
1461 if (alloc.lhsType.isSet())
1462 unpinReg(alloc.lhsType.reg());
1466 FrameState::ensureFullRegs(FrameEntry *fe, MaybeRegisterID *type, MaybeRegisterID *data)
1468 fe = fe->isCopy() ? fe->copyOf() : fe;
1470 JS_ASSERT(!data->isSet() && !type->isSet());
1471 if (!fe->type.inMemory()) {
1472 if (fe->type.inRegister())
1473 *type = fe->type.reg();
1474 if (fe->data.isConstant())
1476 if (fe->data.inRegister()) {
1477 *data = fe->data.reg();
1480 if (fe->type.inRegister())
1481 pinReg(fe->type.reg());
1482 *data = tempRegForData(fe);
1483 if (fe->type.inRegister())
1484 unpinReg(fe->type.reg());
1485 } else if (!fe->data.inMemory()) {
1486 if (fe->data.inRegister())
1487 *data = fe->data.reg();
1488 if (fe->type.isConstant())
1490 if (fe->type.inRegister()) {
1491 *type = fe->type.reg();
1494 if (fe->data.inRegister())
1495 pinReg(fe->data.reg());
1496 *type = tempRegForType(fe);
1497 if (fe->data.inRegister())
1498 unpinReg(fe->data.reg());
1500 *data = tempRegForData(fe);
1501 pinReg(data->reg());
1502 *type = tempRegForType(fe);
1503 unpinReg(data->reg());
1508 FrameState::allocForBinary(FrameEntry *lhs, FrameEntry *rhs, JSOp op, BinaryAlloc &alloc,
1511 FrameEntry *backingLeft = lhs;
1512 FrameEntry *backingRight = rhs;
1514 if (backingLeft->isCopy())
1515 backingLeft = backingLeft->copyOf();
1516 if (backingRight->isCopy())
1517 backingRight = backingRight->copyOf();
1520 * For each remat piece of both FEs, if a register is assigned, get it now
1521 * and pin it. This is safe - constants and known types will be avoided.
1523 if (AllocHelper(backingLeft->type, alloc.lhsType))
1524 pinReg(alloc.lhsType.reg());
1525 if (AllocHelper(backingLeft->data, alloc.lhsData))
1526 pinReg(alloc.lhsData.reg());
1527 if (AllocHelper(backingRight->type, alloc.rhsType))
1528 pinReg(alloc.rhsType.reg());
1529 if (AllocHelper(backingRight->data, alloc.rhsData))
1530 pinReg(alloc.rhsData.reg());
1532 /* For each type without a register, give it a register if needed. */
1533 if (!alloc.lhsType.isSet() && backingLeft->type.inMemory()) {
1534 alloc.lhsType = tempRegForType(lhs);
1535 pinReg(alloc.lhsType.reg());
1537 if (!alloc.rhsType.isSet() && backingRight->type.inMemory()) {
1538 alloc.rhsType = tempRegForType(rhs);
1539 pinReg(alloc.rhsType.reg());
1561 JS_NOT_REACHED("unknown op");
1566 * Data is a little more complicated. If the op is MUL, not all CPUs
1567 * have multiplication on immediates, so a register is needed. Also,
1568 * if the op is not commutative, the LHS _must_ be in a register.
1570 JS_ASSERT_IF(lhs->isConstant(), !rhs->isConstant());
1571 JS_ASSERT_IF(rhs->isConstant(), !lhs->isConstant());
1573 if (!alloc.lhsData.isSet()) {
1574 if (backingLeft->data.inMemory()) {
1575 alloc.lhsData = tempRegForData(lhs);
1576 pinReg(alloc.lhsData.reg());
1577 } else if (op == JSOP_MUL || !commu) {
1578 JS_ASSERT(lhs->isConstant());
1579 alloc.lhsData = allocReg();
1580 alloc.extraFree = alloc.lhsData;
1581 masm.move(Imm32(lhs->getValue().toInt32()), alloc.lhsData.reg());
1584 if (!alloc.rhsData.isSet()) {
1585 if (backingRight->data.inMemory()) {
1586 alloc.rhsData = tempRegForData(rhs);
1587 pinReg(alloc.rhsData.reg());
1588 } else if (op == JSOP_MUL) {
1589 JS_ASSERT(rhs->isConstant());
1590 alloc.rhsData = allocReg();
1591 alloc.extraFree = alloc.rhsData;
1592 masm.move(Imm32(rhs->getValue().toInt32()), alloc.rhsData.reg());
1596 alloc.lhsNeedsRemat = false;
1597 alloc.rhsNeedsRemat = false;
1603 * Now a result register is needed. It must contain a mutable copy of the
1604 * LHS. For commutative operations, we can opt to use the RHS instead. At
1605 * this point, if for some reason either must be in a register, that has
1606 * already been guaranteed at this point.
1608 if (!freeRegs.empty()) {
1609 /* Free reg - just grab it. */
1610 alloc.result = allocReg();
1611 if (!alloc.lhsData.isSet()) {
1612 JS_ASSERT(alloc.rhsData.isSet());
1614 masm.move(alloc.rhsData.reg(), alloc.result);
1615 alloc.resultHasRhs = true;
1617 masm.move(alloc.lhsData.reg(), alloc.result);
1618 alloc.resultHasRhs = false;
1622 * No free regs. Find a good candidate to re-use. Best candidates don't
1623 * require syncs on the inline path.
1625 bool leftInReg = backingLeft->data.inRegister();
1626 bool rightInReg = backingRight->data.inRegister();
1627 bool leftSynced = backingLeft->data.synced();
1628 bool rightSynced = backingRight->data.synced();
1629 if (!commu || (leftInReg && (leftSynced || (!rightInReg || !rightSynced)))) {
1630 JS_ASSERT(backingLeft->data.inRegister() || !commu);
1631 JS_ASSERT_IF(backingLeft->data.inRegister(),
1632 backingLeft->data.reg() == alloc.lhsData.reg());
1633 if (backingLeft->data.inRegister()) {
1634 alloc.result = backingLeft->data.reg();
1635 unpinReg(alloc.result);
1636 takeReg(alloc.result);
1637 alloc.lhsNeedsRemat = true;
1639 /* For now, just spill... */
1640 alloc.result = allocReg();
1641 masm.move(alloc.lhsData.reg(), alloc.result);
1643 alloc.resultHasRhs = false;
1646 JS_ASSERT(!leftInReg || (rightInReg && rightSynced));
1647 alloc.result = backingRight->data.reg();
1648 unpinReg(alloc.result);
1649 takeReg(alloc.result);
1650 alloc.resultHasRhs = true;
1651 alloc.rhsNeedsRemat = true;
1656 /* Unpin everything that was pinned. */
1657 if (backingLeft->type.inRegister())
1658 unpinReg(backingLeft->type.reg());
1659 if (backingRight->type.inRegister())
1660 unpinReg(backingRight->type.reg());
1661 if (backingLeft->data.inRegister())
1662 unpinReg(backingLeft->data.reg());
1663 if (backingRight->data.inRegister())
1664 unpinReg(backingRight->data.reg());
1668 FrameState::maybePinData(FrameEntry *fe)
1670 fe = fe->isCopy() ? fe->copyOf() : fe;
1671 if (fe->data.inRegister()) {
1672 pinReg(fe->data.reg());
1673 return fe->data.reg();
1675 return MaybeRegisterID();
1679 FrameState::maybePinType(FrameEntry *fe)
1681 fe = fe->isCopy() ? fe->copyOf() : fe;
1682 if (fe->type.inRegister()) {
1683 pinReg(fe->type.reg());
1684 return fe->type.reg();
1686 return MaybeRegisterID();
1690 FrameState::maybeUnpinReg(MaybeRegisterID reg)
1693 unpinReg(reg.reg());