1 /* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=4 sw=4 et tw=99:
4 * ***** BEGIN LICENSE BLOCK *****
5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
7 * The contents of this file are subject to the Mozilla Public License Version
8 * 1.1 (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
10 * http://www.mozilla.org/MPL/
12 * Software distributed under the License is distributed on an "AS IS" basis,
13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 * for the specific language governing rights and limitations under the
17 * The Original Code is Mozilla SpiderMonkey JavaScript 1.9 code, released
20 * The Initial Developer of the Original Code is
21 * Brendan Eich <brendan@mozilla.org>
24 * David Anderson <danderson@mozilla.com>
25 * David Mandelin <dmandelin@mozilla.com>
26 * Jan de Mooij <jandemooij@gmail.com>
28 * Alternatively, the contents of this file may be used under the terms of
29 * either of the GNU General Public License Version 2 or later (the "GPL"),
30 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
31 * in which case the provisions of the GPL or the LGPL are applicable instead
32 * of those above. If you wish to allow use of your version of this file only
33 * under the terms of either the GPL or the LGPL, and not to allow others to
34 * use your version of this file under the terms of the MPL, indicate your
35 * decision by deleting the provisions above and replace them with the notice
36 * and other provisions required by the GPL or the LGPL. If you do not delete
37 * the provisions above, a recipient may use your version of this file under
38 * the terms of any one of the MPL, the GPL or the LGPL.
40 * ***** END LICENSE BLOCK ***** */
42 #include "MethodJIT.h"
48 #include "StubCalls.h"
51 #include "ICChecker.h"
53 #include "assembler/jit/ExecutableAllocator.h"
54 #include "assembler/assembler/LinkBuffer.h"
55 #include "FrameState-inl.h"
56 #include "jsobjinlines.h"
57 #include "jsscriptinlines.h"
58 #include "InlineFrameAssembler.h"
59 #include "jscompartment.h"
60 #include "jsobjinlines.h"
61 #include "jsopcodeinlines.h"
62 #include "jshotloop.h"
64 #include "jsautooplen.h"
67 using namespace js::mjit;
68 #if defined(JS_POLYIC) || defined(JS_MONOIC)
69 using namespace js::mjit::ic;
72 #define RETURN_IF_OOM(retval) \
74 if (oomInVector || masm.oom() || stubcc.masm.oom()) \
78 #if defined(JS_METHODJIT_SPEW)
79 static const char *OpcodeNames[] = {
80 # define OPDEF(op,val,name,token,length,nuses,ndefs,prec,format) #name,
81 # include "jsopcode.tbl"
86 mjit::Compiler::Compiler(JSContext *cx, JSStackFrame *fp)
90 scopeChain(&fp->scopeChain()),
91 globalObj(scopeChain->getGlobal()),
92 fun(fp->isFunctionFrame() && !fp->isEvalFrame()
95 isConstructing(fp->isConstructing()),
96 analysis(NULL), jumpMap(NULL), savedTraps(NULL),
97 frame(cx, script, fun, masm),
98 branchPatches(CompilerAllocPolicy(cx, *thisFromCtor())),
100 getGlobalNames(CompilerAllocPolicy(cx, *thisFromCtor())),
101 setGlobalNames(CompilerAllocPolicy(cx, *thisFromCtor())),
102 callICs(CompilerAllocPolicy(cx, *thisFromCtor())),
103 equalityICs(CompilerAllocPolicy(cx, *thisFromCtor())),
104 traceICs(CompilerAllocPolicy(cx, *thisFromCtor())),
106 #if defined JS_POLYIC
107 pics(CompilerAllocPolicy(cx, *thisFromCtor())),
108 getElemICs(CompilerAllocPolicy(cx, *thisFromCtor())),
109 setElemICs(CompilerAllocPolicy(cx, *thisFromCtor())),
111 callPatches(CompilerAllocPolicy(cx, *thisFromCtor())),
112 callSites(CompilerAllocPolicy(cx, *thisFromCtor())),
113 doubleList(CompilerAllocPolicy(cx, *thisFromCtor())),
114 jumpTables(CompilerAllocPolicy(cx, *thisFromCtor())),
115 jumpTableOffsets(CompilerAllocPolicy(cx, *thisFromCtor())),
116 stubcc(cx, *thisFromCtor(), frame, script),
117 debugMode_(cx->compartment->debugMode),
118 #if defined JS_TRACER
119 addTraceHints(cx->traceJitEnabled),
122 applyTricks(NoApplyTricks)
127 mjit::Compiler::compile()
129 JS_ASSERT_IF(isConstructing, !script->jitCtor);
130 JS_ASSERT_IF(!isConstructing, !script->jitNormal);
132 JITScript **jit = isConstructing ? &script->jitCtor : &script->jitNormal;
133 void **checkAddr = isConstructing
134 ? &script->jitArityCheckCtor
135 : &script->jitArityCheckNormal;
137 CompileStatus status = performCompilation(jit);
138 if (status == Compile_Okay) {
139 // Global scripts don't have an arity check entry. That's okay, we
140 // just need a pointer so the VM can quickly decide whether this
141 // method can be JIT'd or not. Global scripts cannot be IC'd, since
142 // they have no functions, so there is no danger.
143 *checkAddr = (*jit)->arityCheckEntry
144 ? (*jit)->arityCheckEntry
145 : (*jit)->invokeEntry;
147 *checkAddr = JS_UNJITTABLE_SCRIPT;
153 #define CHECK_STATUS(expr) \
155 CompileStatus status_ = (expr); \
156 if (status_ != Compile_Okay) { \
157 if (oomInVector || masm.oom() || stubcc.masm.oom()) \
158 js_ReportOutOfMemory(cx); \
164 mjit::Compiler::performCompilation(JITScript **jitp)
166 JaegerSpew(JSpew_Scripts, "compiling script (file \"%s\") (line \"%d\") (length \"%d\")\n",
167 script->filename, script->lineno, script->length);
169 analyze::Script analysis;
172 analysis.analyze(cx, script);
174 if (analysis.OOM()) {
175 js_ReportOutOfMemory(cx);
176 return Compile_Error;
178 if (analysis.failed()) {
179 JaegerSpew(JSpew_Abort, "couldn't analyze bytecode; probably switchX or OOM\n");
180 return Compile_Abort;
183 this->analysis = &analysis;
186 js_ReportOutOfMemory(cx);
187 return Compile_Error;
190 jumpMap = (Label *)cx->malloc(sizeof(Label) * script->length);
192 js_ReportOutOfMemory(cx);
193 return Compile_Error;
196 for (uint32 i = 0; i < script->length; i++)
197 jumpMap[i] = Label();
200 #ifdef JS_METHODJIT_SPEW
205 /* Initialize PC early so stub calls in the prologue can be fallible. */
209 script->debugMode = debugMode();
212 for (uint32 i = 0; i < script->nClosedVars; i++)
213 frame.setClosedVar(script->getClosedVar(i));
214 for (uint32 i = 0; i < script->nClosedArgs; i++)
215 frame.setClosedArg(script->getClosedArg(i));
217 CHECK_STATUS(generatePrologue());
218 CHECK_STATUS(generateMethod());
219 CHECK_STATUS(generateEpilogue());
220 CHECK_STATUS(finishThisUp(jitp));
222 #ifdef JS_METHODJIT_SPEW
224 JaegerSpew(JSpew_Prof, "compilation took %d us\n", prof.time_us());
227 JaegerSpew(JSpew_Scripts, "successfully compiled (code \"%p\") (size \"%ld\")\n",
228 (*jitp)->code.m_code.executableAddress(), (*jitp)->code.m_size);
235 mjit::Compiler::~Compiler()
238 cx->free(savedTraps);
241 CompileStatus JS_NEVER_INLINE
242 mjit::TryCompile(JSContext *cx, JSStackFrame *fp)
244 JS_ASSERT(cx->fp() == fp);
246 #if JS_HAS_SHARP_VARS
247 if (fp->script()->hasSharps)
248 return Compile_Abort;
251 // Ensure that constructors have at least one slot.
252 if (fp->isConstructing() && !fp->script()->nslots)
253 fp->script()->nslots++;
261 mjit::Compiler::loadOldTraps(const Vector<CallSite> &sites)
263 savedTraps = (bool *)cx->calloc(sizeof(bool) * script->length);
267 for (size_t i = 0; i < sites.length(); i++) {
268 const CallSite &site = sites[i];
270 savedTraps[site.pcOffset] = true;
277 mjit::Compiler::generatePrologue()
279 invokeLabel = masm.label();
282 * If there is no function, then this can only be called via JaegerShot(),
283 * which expects an existing frame to be initialized like the interpreter.
286 Jump j = masm.jump();
289 * Entry point #2: The caller has partially constructed a frame, and
290 * either argc >= nargs or the arity check has corrected the frame.
292 invokeLabel = masm.label();
294 Label fastPath = masm.label();
296 /* Store this early on so slow paths can access it. */
297 masm.storePtr(ImmPtr(fun), Address(JSFrameReg, JSStackFrame::offsetOfExec()));
301 * Entry point #3: The caller has partially constructed a frame,
302 * but argc might be != nargs, so an arity check might be called.
304 * This loops back to entry point #2.
306 arityLabel = stubcc.masm.label();
307 Jump argMatch = stubcc.masm.branch32(Assembler::Equal, JSParamReg_Argc,
309 stubcc.crossJump(argMatch, fastPath);
311 if (JSParamReg_Argc != Registers::ArgReg1)
312 stubcc.masm.move(JSParamReg_Argc, Registers::ArgReg1);
314 /* Slow path - call the arity check function. Returns new fp. */
315 stubcc.masm.storePtr(ImmPtr(fun), Address(JSFrameReg, JSStackFrame::offsetOfExec()));
316 stubcc.masm.storePtr(JSFrameReg, FrameAddress(offsetof(VMFrame, regs.fp)));
317 OOL_STUBCALL(stubs::FixupArity);
318 stubcc.masm.move(Registers::ReturnReg, JSFrameReg);
319 stubcc.crossJump(stubcc.masm.jump(), fastPath);
323 * Guard that there is enough stack space. Note we include the size of
324 * a second frame, to ensure we can create a frame from call sites.
326 masm.addPtr(Imm32((script->nslots + VALUES_PER_STACK_FRAME * 2) * sizeof(Value)),
328 Registers::ReturnReg);
329 Jump stackCheck = masm.branchPtr(Assembler::AboveOrEqual, Registers::ReturnReg,
330 FrameAddress(offsetof(VMFrame, stackLimit)));
332 /* If the stack check fails... */
334 stubcc.linkExitDirect(stackCheck, stubcc.masm.label());
335 OOL_STUBCALL(stubs::HitStackQuota);
336 stubcc.crossJump(stubcc.masm.jump(), masm.label());
340 * Set locals to undefined, as in initCallFrameLatePrologue.
341 * Skip locals which aren't closed and are known to be defined before used,
342 * :FIXME: bug 604541: write undefined if we might be using the tracer, so it works.
344 for (uint32 i = 0; i < script->nfixed; i++) {
345 if (analysis->localHasUseBeforeDef(i) || addTraceHints) {
346 Address local(JSFrameReg, sizeof(JSStackFrame) + i * sizeof(Value));
347 masm.storeValue(UndefinedValue(), local);
351 /* Create the call object. */
352 if (fun->isHeavyweight()) {
353 prepareStubCall(Uses(0));
354 INLINE_STUBCALL(stubs::GetCallObject);
357 j.linkTo(masm.label(), &masm);
359 if (analysis->usesScopeChain() && !fun->isHeavyweight()) {
361 * Load the scope chain into the frame if necessary. The scope chain
362 * is always set for global and eval frames, and will have been set by
363 * GetCallObject for heavyweight function frames.
365 RegisterID t0 = Registers::ReturnReg;
366 Jump hasScope = masm.branchTest32(Assembler::NonZero,
367 FrameFlagsAddress(), Imm32(JSFRAME_HAS_SCOPECHAIN));
368 masm.loadPayload(Address(JSFrameReg, JSStackFrame::offsetOfCallee(fun)), t0);
369 masm.loadPtr(Address(t0, offsetof(JSObject, parent)), t0);
370 masm.storePtr(t0, Address(JSFrameReg, JSStackFrame::offsetOfScopeChain()));
371 hasScope.linkTo(masm.label(), &masm);
378 if (debugMode() || Probes::callTrackingActive(cx))
379 INLINE_STUBCALL(stubs::EnterScript);
385 mjit::Compiler::generateEpilogue()
391 mjit::Compiler::finishThisUp(JITScript **jitp)
393 RETURN_IF_OOM(Compile_Error);
395 for (size_t i = 0; i < branchPatches.length(); i++) {
396 Label label = labelOf(branchPatches[i].pc);
397 branchPatches[i].jump.linkTo(label, &masm);
401 masm.forceFlushConstantPool();
402 stubcc.masm.forceFlushConstantPool();
404 JaegerSpew(JSpew_Insns, "## Fast code (masm) size = %u, Slow code (stubcc) size = %u.\n", masm.size(), stubcc.size());
406 size_t totalSize = masm.size() +
408 doubleList.length() * sizeof(double) +
409 jumpTableOffsets.length() * sizeof(void *);
411 JSC::ExecutablePool *execPool = getExecPool(script, totalSize);
413 js_ReportOutOfMemory(cx);
414 return Compile_Error;
417 uint8 *result = (uint8 *)execPool->alloc(totalSize);
420 js_ReportOutOfMemory(cx);
421 return Compile_Error;
423 JSC::ExecutableAllocator::makeWritable(result, totalSize);
424 masm.executableCopy(result);
425 stubcc.masm.executableCopy(result + masm.size());
427 JSC::LinkBuffer fullCode(result, totalSize);
428 JSC::LinkBuffer stubCode(result + masm.size(), stubcc.size());
430 size_t nNmapLive = 0;
431 for (size_t i = 0; i < script->length; i++) {
432 analyze::Bytecode *opinfo = analysis->maybeCode(i);
433 if (opinfo && opinfo->safePoint)
437 /* Please keep in sync with JITScript::scriptDataSize! */
438 size_t totalBytes = sizeof(JITScript) +
439 sizeof(NativeMapEntry) * nNmapLive +
440 #if defined JS_MONOIC
441 sizeof(ic::GetGlobalNameIC) * getGlobalNames.length() +
442 sizeof(ic::SetGlobalNameIC) * setGlobalNames.length() +
443 sizeof(ic::CallICInfo) * callICs.length() +
444 sizeof(ic::EqualityICInfo) * equalityICs.length() +
445 sizeof(ic::TraceICInfo) * traceICs.length() +
447 #if defined JS_POLYIC
448 sizeof(ic::PICInfo) * pics.length() +
449 sizeof(ic::GetElementIC) * getElemICs.length() +
450 sizeof(ic::SetElementIC) * setElemICs.length() +
452 sizeof(CallSite) * callSites.length();
454 uint8 *cursor = (uint8 *)cx->calloc(totalBytes);
457 js_ReportOutOfMemory(cx);
458 return Compile_Error;
461 JITScript *jit = new(cursor) JITScript;
462 cursor += sizeof(JITScript);
464 jit->code = JSC::MacroAssemblerCodeRef(result, execPool, masm.size() + stubcc.size());
465 jit->invokeEntry = result;
466 jit->singleStepMode = script->singleStepMode;
468 jit->arityCheckEntry = stubCode.locationOf(arityLabel).executableAddress();
469 jit->fastEntry = fullCode.locationOf(invokeLabel).executableAddress();
473 * WARNING: mics(), callICs() et al depend on the ordering of these
474 * variable-length sections. See JITScript's declaration for details.
477 /* Build the pc -> ncode mapping. */
478 NativeMapEntry *jitNmap = (NativeMapEntry *)cursor;
479 jit->nNmapPairs = nNmapLive;
480 cursor += sizeof(NativeMapEntry) * jit->nNmapPairs;
482 if (jit->nNmapPairs > 0) {
483 for (size_t i = 0; i < script->length; i++) {
484 analyze::Bytecode *opinfo = analysis->maybeCode(i);
485 if (opinfo && opinfo->safePoint) {
486 Label L = jumpMap[i];
487 JS_ASSERT(L.isValid());
488 jitNmap[ix].bcOff = i;
489 jitNmap[ix].ncode = (uint8 *)(result + masm.distanceOf(L));
494 JS_ASSERT(ix == jit->nNmapPairs);
496 #if defined JS_MONOIC
497 ic::GetGlobalNameIC *getGlobalNames_ = (ic::GetGlobalNameIC *)cursor;
498 jit->nGetGlobalNames = getGlobalNames.length();
499 cursor += sizeof(ic::GetGlobalNameIC) * jit->nGetGlobalNames;
500 for (size_t i = 0; i < jit->nGetGlobalNames; i++) {
501 ic::GetGlobalNameIC &to = getGlobalNames_[i];
502 GetGlobalNameICInfo &from = getGlobalNames[i];
503 from.copyTo(to, fullCode, stubCode);
505 int offset = fullCode.locationOf(from.load) - to.fastPathStart;
506 to.loadStoreOffset = offset;
507 JS_ASSERT(to.loadStoreOffset == offset);
509 stubCode.patch(from.addrLabel, &to);
512 ic::SetGlobalNameIC *setGlobalNames_ = (ic::SetGlobalNameIC *)cursor;
513 jit->nSetGlobalNames = setGlobalNames.length();
514 cursor += sizeof(ic::SetGlobalNameIC) * jit->nSetGlobalNames;
515 for (size_t i = 0; i < jit->nSetGlobalNames; i++) {
516 ic::SetGlobalNameIC &to = setGlobalNames_[i];
517 SetGlobalNameICInfo &from = setGlobalNames[i];
518 from.copyTo(to, fullCode, stubCode);
519 to.slowPathStart = stubCode.locationOf(from.slowPathStart);
521 int offset = fullCode.locationOf(from.store).labelAtOffset(0) -
523 to.loadStoreOffset = offset;
524 JS_ASSERT(to.loadStoreOffset == offset);
527 to.objConst = from.objConst;
528 to.shapeReg = from.shapeReg;
529 to.objReg = from.objReg;
532 offset = fullCode.locationOf(from.shapeGuardJump) -
534 to.inlineShapeJump = offset;
535 JS_ASSERT(to.inlineShapeJump == offset);
537 offset = fullCode.locationOf(from.fastPathRejoin) -
539 to.fastRejoinOffset = offset;
540 JS_ASSERT(to.fastRejoinOffset == offset);
542 stubCode.patch(from.addrLabel, &to);
545 ic::CallICInfo *jitCallICs = (ic::CallICInfo *)cursor;
546 jit->nCallICs = callICs.length();
547 cursor += sizeof(ic::CallICInfo) * jit->nCallICs;
548 for (size_t i = 0; i < jit->nCallICs; i++) {
549 jitCallICs[i].reset();
550 jitCallICs[i].funGuard = fullCode.locationOf(callICs[i].funGuard);
551 jitCallICs[i].funJump = fullCode.locationOf(callICs[i].funJump);
552 jitCallICs[i].slowPathStart = stubCode.locationOf(callICs[i].slowPathStart);
554 /* Compute the hot call offset. */
555 uint32 offset = fullCode.locationOf(callICs[i].hotJump) -
556 fullCode.locationOf(callICs[i].funGuard);
557 jitCallICs[i].hotJumpOffset = offset;
558 JS_ASSERT(jitCallICs[i].hotJumpOffset == offset);
560 /* Compute the join point offset. */
561 offset = fullCode.locationOf(callICs[i].joinPoint) -
562 fullCode.locationOf(callICs[i].funGuard);
563 jitCallICs[i].joinPointOffset = offset;
564 JS_ASSERT(jitCallICs[i].joinPointOffset == offset);
566 /* Compute the OOL call offset. */
567 offset = stubCode.locationOf(callICs[i].oolCall) -
568 stubCode.locationOf(callICs[i].slowPathStart);
569 jitCallICs[i].oolCallOffset = offset;
570 JS_ASSERT(jitCallICs[i].oolCallOffset == offset);
572 /* Compute the OOL jump offset. */
573 offset = stubCode.locationOf(callICs[i].oolJump) -
574 stubCode.locationOf(callICs[i].slowPathStart);
575 jitCallICs[i].oolJumpOffset = offset;
576 JS_ASSERT(jitCallICs[i].oolJumpOffset == offset);
578 /* Compute the start of the OOL IC call. */
579 offset = stubCode.locationOf(callICs[i].icCall) -
580 stubCode.locationOf(callICs[i].slowPathStart);
581 jitCallICs[i].icCallOffset = offset;
582 JS_ASSERT(jitCallICs[i].icCallOffset == offset);
584 /* Compute the slow join point offset. */
585 offset = stubCode.locationOf(callICs[i].slowJoinPoint) -
586 stubCode.locationOf(callICs[i].slowPathStart);
587 jitCallICs[i].slowJoinOffset = offset;
588 JS_ASSERT(jitCallICs[i].slowJoinOffset == offset);
590 /* Compute the join point offset for continuing on the hot path. */
591 offset = stubCode.locationOf(callICs[i].hotPathLabel) -
592 stubCode.locationOf(callICs[i].funGuard);
593 jitCallICs[i].hotPathOffset = offset;
594 JS_ASSERT(jitCallICs[i].hotPathOffset == offset);
596 jitCallICs[i].pc = callICs[i].pc;
597 jitCallICs[i].frameSize = callICs[i].frameSize;
598 jitCallICs[i].funObjReg = callICs[i].funObjReg;
599 jitCallICs[i].funPtrReg = callICs[i].funPtrReg;
600 stubCode.patch(callICs[i].addrLabel1, &jitCallICs[i]);
601 stubCode.patch(callICs[i].addrLabel2, &jitCallICs[i]);
604 ic::EqualityICInfo *jitEqualityICs = (ic::EqualityICInfo *)cursor;
605 jit->nEqualityICs = equalityICs.length();
606 cursor += sizeof(ic::EqualityICInfo) * jit->nEqualityICs;
607 for (size_t i = 0; i < jit->nEqualityICs; i++) {
608 uint32 offs = uint32(equalityICs[i].jumpTarget - script->code);
609 JS_ASSERT(jumpMap[offs].isValid());
610 jitEqualityICs[i].target = fullCode.locationOf(jumpMap[offs]);
611 jitEqualityICs[i].stubEntry = stubCode.locationOf(equalityICs[i].stubEntry);
612 jitEqualityICs[i].stubCall = stubCode.locationOf(equalityICs[i].stubCall);
613 jitEqualityICs[i].stub = equalityICs[i].stub;
614 jitEqualityICs[i].lvr = equalityICs[i].lvr;
615 jitEqualityICs[i].rvr = equalityICs[i].rvr;
616 jitEqualityICs[i].tempReg = equalityICs[i].tempReg;
617 jitEqualityICs[i].cond = equalityICs[i].cond;
618 if (equalityICs[i].jumpToStub.isSet())
619 jitEqualityICs[i].jumpToStub = fullCode.locationOf(equalityICs[i].jumpToStub.get());
620 jitEqualityICs[i].fallThrough = fullCode.locationOf(equalityICs[i].fallThrough);
622 stubCode.patch(equalityICs[i].addrLabel, &jitEqualityICs[i]);
625 ic::TraceICInfo *jitTraceICs = (ic::TraceICInfo *)cursor;
626 jit->nTraceICs = traceICs.length();
627 cursor += sizeof(ic::TraceICInfo) * jit->nTraceICs;
628 for (size_t i = 0; i < jit->nTraceICs; i++) {
629 jitTraceICs[i].initialized = traceICs[i].initialized;
630 if (!traceICs[i].initialized)
633 uint32 offs = uint32(traceICs[i].jumpTarget - script->code);
634 JS_ASSERT(jumpMap[offs].isValid());
635 jitTraceICs[i].traceHint = fullCode.locationOf(traceICs[i].traceHint);
636 jitTraceICs[i].jumpTarget = fullCode.locationOf(jumpMap[offs]);
637 jitTraceICs[i].stubEntry = stubCode.locationOf(traceICs[i].stubEntry);
638 jitTraceICs[i].traceData = NULL;
640 jitTraceICs[i].jumpTargetPC = traceICs[i].jumpTarget;
642 jitTraceICs[i].hasSlowTraceHint = traceICs[i].slowTraceHint.isSet();
643 if (traceICs[i].slowTraceHint.isSet())
644 jitTraceICs[i].slowTraceHint = stubCode.locationOf(traceICs[i].slowTraceHint.get());
646 jitTraceICs[i].loopCounterStart = GetHotloop(cx);
648 jitTraceICs[i].loopCounter = jitTraceICs[i].loopCounterStart
649 - cx->compartment->backEdgeCount(traceICs[i].jumpTarget);
651 stubCode.patch(traceICs[i].addrLabel, &jitTraceICs[i]);
653 #endif /* JS_MONOIC */
655 for (size_t i = 0; i < callPatches.length(); i++) {
656 CallPatchInfo &patch = callPatches[i];
658 if (patch.hasFastNcode)
659 fullCode.patch(patch.fastNcodePatch, fullCode.locationOf(patch.joinPoint));
660 if (patch.hasSlowNcode)
661 stubCode.patch(patch.slowNcodePatch, fullCode.locationOf(patch.joinPoint));
665 ic::GetElementIC *jitGetElems = (ic::GetElementIC *)cursor;
666 jit->nGetElems = getElemICs.length();
667 cursor += sizeof(ic::GetElementIC) * jit->nGetElems;
668 for (size_t i = 0; i < jit->nGetElems; i++) {
669 ic::GetElementIC &to = jitGetElems[i];
670 GetElementICInfo &from = getElemICs[i];
672 new (&to) ic::GetElementIC();
673 from.copyTo(to, fullCode, stubCode);
675 to.typeReg = from.typeReg;
676 to.objReg = from.objReg;
677 to.idRemat = from.id;
679 if (from.typeGuard.isSet()) {
680 int inlineTypeGuard = fullCode.locationOf(from.typeGuard.get()) -
681 fullCode.locationOf(from.fastPathStart);
682 to.inlineTypeGuard = inlineTypeGuard;
683 JS_ASSERT(to.inlineTypeGuard == inlineTypeGuard);
685 int inlineClaspGuard = fullCode.locationOf(from.claspGuard) -
686 fullCode.locationOf(from.fastPathStart);
687 to.inlineClaspGuard = inlineClaspGuard;
688 JS_ASSERT(to.inlineClaspGuard == inlineClaspGuard);
690 stubCode.patch(from.paramAddr, &to);
693 ic::SetElementIC *jitSetElems = (ic::SetElementIC *)cursor;
694 jit->nSetElems = setElemICs.length();
695 cursor += sizeof(ic::SetElementIC) * jit->nSetElems;
696 for (size_t i = 0; i < jit->nSetElems; i++) {
697 ic::SetElementIC &to = jitSetElems[i];
698 SetElementICInfo &from = setElemICs[i];
700 new (&to) ic::SetElementIC();
701 from.copyTo(to, fullCode, stubCode);
703 to.strictMode = script->strictModeCode;
705 to.objReg = from.objReg;
706 to.objRemat = from.objRemat.toInt32();
707 JS_ASSERT(to.objRemat == from.objRemat.toInt32());
709 to.hasConstantKey = from.key.isConstant();
710 if (from.key.isConstant())
711 to.keyValue = from.key.index();
713 to.keyReg = from.key.reg();
715 int inlineClaspGuard = fullCode.locationOf(from.claspGuard) -
716 fullCode.locationOf(from.fastPathStart);
717 to.inlineClaspGuard = inlineClaspGuard;
718 JS_ASSERT(to.inlineClaspGuard == inlineClaspGuard);
720 int inlineHoleGuard = fullCode.locationOf(from.holeGuard) -
721 fullCode.locationOf(from.fastPathStart);
722 to.inlineHoleGuard = inlineHoleGuard;
723 JS_ASSERT(to.inlineHoleGuard == inlineHoleGuard);
725 CheckIsStubCall(to.slowPathCall.labelAtOffset(0));
727 to.volatileMask = from.volatileMask;
728 JS_ASSERT(to.volatileMask == from.volatileMask);
730 stubCode.patch(from.paramAddr, &to);
733 ic::PICInfo *jitPics = (ic::PICInfo *)cursor;
734 jit->nPICs = pics.length();
735 cursor += sizeof(ic::PICInfo) * jit->nPICs;
736 for (size_t i = 0; i < jit->nPICs; i++) {
737 new (&jitPics[i]) ic::PICInfo();
738 pics[i].copyTo(jitPics[i], fullCode, stubCode);
739 pics[i].copySimpleMembersTo(jitPics[i]);
741 jitPics[i].shapeGuard = masm.distanceOf(pics[i].shapeGuard) -
742 masm.distanceOf(pics[i].fastPathStart);
743 JS_ASSERT(jitPics[i].shapeGuard == masm.distanceOf(pics[i].shapeGuard) -
744 masm.distanceOf(pics[i].fastPathStart));
745 jitPics[i].shapeRegHasBaseShape = true;
746 jitPics[i].pc = pics[i].pc;
748 if (pics[i].kind == ic::PICInfo::SET ||
749 pics[i].kind == ic::PICInfo::SETMETHOD) {
750 jitPics[i].u.vr = pics[i].vr;
751 } else if (pics[i].kind != ic::PICInfo::NAME) {
752 if (pics[i].hasTypeCheck) {
753 int32 distance = stubcc.masm.distanceOf(pics[i].typeCheck) -
754 stubcc.masm.distanceOf(pics[i].slowPathStart);
755 JS_ASSERT(distance <= 0);
756 jitPics[i].u.get.typeCheckOffset = distance;
759 stubCode.patch(pics[i].paramAddr, &jitPics[i]);
763 /* Link fast and slow paths together. */
764 stubcc.fixCrossJumps(result, masm.size(), masm.size() + stubcc.size());
766 /* Patch all double references. */
767 size_t doubleOffset = masm.size() + stubcc.size();
768 double *doubleVec = (double *)(result + doubleOffset);
769 for (size_t i = 0; i < doubleList.length(); i++) {
770 DoublePatch &patch = doubleList[i];
771 doubleVec[i] = patch.d;
773 stubCode.patch(patch.label, &doubleVec[i]);
775 fullCode.patch(patch.label, &doubleVec[i]);
778 /* Generate jump tables. */
779 void **jumpVec = (void **)(doubleVec + doubleList.length());
781 for (size_t i = 0; i < jumpTableOffsets.length(); i++) {
782 uint32 offset = jumpTableOffsets[i];
783 JS_ASSERT(jumpMap[offset].isValid());
784 jumpVec[i] = (void *)(result + masm.distanceOf(jumpMap[offset]));
787 /* Patch jump table references. */
788 for (size_t i = 0; i < jumpTables.length(); i++) {
789 JumpTable &jumpTable = jumpTables[i];
790 fullCode.patch(jumpTable.label, &jumpVec[jumpTable.offsetIndex]);
793 /* Patch all outgoing calls. */
794 masm.finalize(fullCode);
795 stubcc.masm.finalize(stubCode);
797 JSC::ExecutableAllocator::makeExecutable(result, masm.size() + stubcc.size());
798 JSC::ExecutableAllocator::cacheFlush(result, masm.size() + stubcc.size());
800 /* Build the table of call sites. */
801 CallSite *jitCallSites = (CallSite *)cursor;
802 jit->nCallSites = callSites.length();
803 cursor += sizeof(CallSite) * jit->nCallSites;
804 for (size_t i = 0; i < jit->nCallSites; i++) {
805 CallSite &to = jitCallSites[i];
806 InternalCallSite &from = callSites[i];
807 uint32 codeOffset = from.ool
808 ? masm.size() + from.returnOffset
810 to.initialize(codeOffset, from.pc - script->code, from.id);
813 JS_ASSERT(size_t(cursor - (uint8*)jit) == totalBytes);
817 /* We tolerate a race in the stats. */
818 cx->runtime->mjitMemoryUsed += totalSize + totalBytes;
823 class SrcNoteLineScanner {
828 SrcNoteLineScanner(jssrcnote *sn) : offset(SN_DELTA(sn)), sn(sn) {}
830 bool firstOpInLine(ptrdiff_t relpc) {
831 while ((offset < relpc) && !SN_IS_TERMINATOR(sn)) {
833 offset += SN_DELTA(sn);
836 while ((offset == relpc) && !SN_IS_TERMINATOR(sn)) {
837 JSSrcNoteType type = (JSSrcNoteType) SN_TYPE(sn);
838 if (type == SRC_SETLINE || type == SRC_NEWLINE)
842 offset += SN_DELTA(sn);
850 #define SPEW_OPCODE() \
852 if (IsJaegerSpewChannelActive(JSpew_JSOps)) { \
853 JaegerSpew(JSpew_JSOps, " %2d ", frame.stackDepth()); \
854 js_Disassemble1(cx, script, PC, PC - script->code, \
859 #define SPEW_OPCODE()
862 #define BEGIN_CASE(name) case name:
863 #define END_CASE(name) \
865 PC += name##_LENGTH; \
870 mjit::Compiler::generateMethod()
872 mjit::AutoScriptRetrapper trapper(cx, script);
873 SrcNoteLineScanner scanner(script->notes());
877 int trap = stubs::JSTRAP_NONE;
878 if (op == JSOP_TRAP) {
879 if (!trapper.untrap(PC))
880 return Compile_Error;
882 trap |= stubs::JSTRAP_TRAP;
884 if (script->singleStepMode && scanner.firstOpInLine(PC - script->code))
885 trap |= stubs::JSTRAP_SINGLESTEP;
887 analyze::Bytecode *opinfo = analysis->maybeCode(PC);
892 if (js_CodeSpec[op].length != -1)
893 PC += js_CodeSpec[op].length;
895 PC += js_GetVariableBytecodeLength(PC);
899 frame.setInTryBlock(opinfo->inTryBlock);
900 if (opinfo->jumpTarget || trap) {
901 frame.syncAndForgetEverything(opinfo->stackDepth);
902 opinfo->safePoint = true;
904 jumpMap[uint32(PC - script->code)] = masm.label();
907 JS_ASSERT(frame.stackDepth() == opinfo->stackDepth);
910 prepareStubCall(Uses(0));
911 masm.move(Imm32(trap), Registers::ArgReg1);
912 Call cl = emitStubCall(JS_FUNC_TO_DATA_PTR(void *, stubs::Trap));
913 InternalCallSite site(masm.callReturnOffset(cl), PC,
914 CallSite::MAGIC_TRAP_ID, true, false);
916 } else if (savedTraps && savedTraps[PC - script->code]) {
917 // Normally when we patch return addresses, we have generated the
918 // same exact code at that site. For example, patching a stub call's
919 // return address will resume at the same stub call.
921 // In the case we're handling here, we could potentially be
922 // recompiling to remove a trap, and therefore we won't generate
923 // a call to the trap. However, we could be re-entering from that
924 // trap. The callsite will be missing, and fixing the stack will
925 // fail! Worse, we can't just put a label here, because on some
926 // platforms the stack needs to be adjusted when returning from
927 // the old trap call.
929 // To deal with this, we add a small bit of code in the OOL path
930 // that will adjust the stack and jump back into the script.
931 // Note that this uses MAGIC_TRAP_ID, which is necessary for
932 // repatching to detect the callsite as identical to the return
935 // Unfortunately, this means that if a bytecode is ever trapped,
936 // we will always generate a CallSite (either Trapped or not) for
937 // every debug recompilation of the script thereafter. The reason
938 // is that MAGIC_TRAP_ID callsites always propagate to the next
939 // recompilation. That's okay, and not worth fixing - it's a small
941 uint32 offset = stubcc.masm.distanceOf(stubcc.masm.label());
942 if (Assembler::ReturnStackAdjustment) {
943 stubcc.masm.addPtr(Imm32(Assembler::ReturnStackAdjustment),
944 Assembler::stackPointerRegister);
946 stubcc.crossJump(stubcc.masm.jump(), masm.label());
948 InternalCallSite site(offset, PC, CallSite::MAGIC_TRAP_ID, false, true);
952 /**********************
953 * BEGIN COMPILER OPS *
954 **********************/
960 BEGIN_CASE(JSOP_PUSH)
961 frame.push(UndefinedValue());
964 BEGIN_CASE(JSOP_POPV)
965 BEGIN_CASE(JSOP_SETRVAL)
967 RegisterID reg = frame.allocReg();
968 masm.load32(FrameFlagsAddress(), reg);
969 masm.or32(Imm32(JSFRAME_HAS_RVAL), reg);
970 masm.store32(reg, FrameFlagsAddress());
973 FrameEntry *fe = frame.peek(-1);
974 frame.storeTo(fe, Address(JSFrameReg, JSStackFrame::offsetOfReturnValue()), true);
979 BEGIN_CASE(JSOP_RETURN)
980 emitReturn(frame.peek(-1));
981 END_CASE(JSOP_RETURN)
983 BEGIN_CASE(JSOP_GOTO)
985 /* :XXX: this isn't really necessary if we follow the branch. */
986 frame.syncAndForgetEverything();
987 Jump j = masm.jump();
988 if (!jumpAndTrace(j, PC + GET_JUMP_OFFSET(PC)))
989 return Compile_Error;
993 BEGIN_CASE(JSOP_IFEQ)
994 BEGIN_CASE(JSOP_IFNE)
995 if (!jsop_ifneq(op, PC + GET_JUMP_OFFSET(PC)))
996 return Compile_Error;
999 BEGIN_CASE(JSOP_ARGUMENTS)
1001 * For calls of the form 'f.apply(x, arguments)' we can avoid
1002 * creating an args object by having ic::SplatApplyArgs pull
1003 * directly from the stack. To do this, we speculate here that
1004 * 'apply' actually refers to js_fun_apply. If this is not true,
1005 * the slow path in JSOP_FUNAPPLY will create the args object.
1007 if (canUseApplyTricks())
1008 applyTricks = LazyArgsObj;
1012 END_CASE(JSOP_ARGUMENTS)
1014 BEGIN_CASE(JSOP_FORARG)
1016 frame.storeArg(GET_SLOTNO(PC), true);
1018 END_CASE(JSOP_FORARG)
1020 BEGIN_CASE(JSOP_FORLOCAL)
1022 frame.storeLocal(GET_SLOTNO(PC), true);
1024 END_CASE(JSOP_FORLOCAL)
1026 BEGIN_CASE(JSOP_DUP)
1030 BEGIN_CASE(JSOP_DUP2)
1034 BEGIN_CASE(JSOP_BITOR)
1035 BEGIN_CASE(JSOP_BITXOR)
1036 BEGIN_CASE(JSOP_BITAND)
1038 END_CASE(JSOP_BITAND)
1047 /* Detect fusions. */
1048 jsbytecode *next = &PC[JSOP_GE_LENGTH];
1049 JSOp fused = JSOp(*next);
1050 if ((fused != JSOP_IFEQ && fused != JSOP_IFNE) || analysis->jumpTarget(next))
1053 /* Get jump target, if any. */
1054 jsbytecode *target = NULL;
1055 if (fused != JSOP_NOP)
1056 target = next + GET_JUMP_OFFSET(next);
1058 BoolStub stub = NULL;
1061 stub = stubs::LessThan;
1064 stub = stubs::LessEqual;
1067 stub = stubs::GreaterThan;
1070 stub = stubs::GreaterEqual;
1073 stub = stubs::Equal;
1076 stub = stubs::NotEqual;
1079 JS_NOT_REACHED("WAT");
1083 FrameEntry *rhs = frame.peek(-1);
1084 FrameEntry *lhs = frame.peek(-2);
1086 /* Check for easy cases that the parser does not constant fold. */
1087 if (lhs->isConstant() && rhs->isConstant()) {
1088 /* Primitives can be trivially constant folded. */
1089 const Value &lv = lhs->getValue();
1090 const Value &rv = rhs->getValue();
1092 if (lv.isPrimitive() && rv.isPrimitive()) {
1093 bool result = compareTwoValues(cx, op, lv, rv);
1099 frame.push(Value(BooleanValue(result)));
1101 if (fused == JSOP_IFEQ)
1104 /* Branch is never taken, don't bother doing anything. */
1106 frame.syncAndForgetEverything();
1107 Jump j = masm.jump();
1108 if (!jumpAndTrace(j, target))
1109 return Compile_Error;
1113 if (!emitStubCmpOp(stub, target, fused))
1114 return Compile_Error;
1117 /* Anything else should go through the fast path generator. */
1118 if (!jsop_relational(op, stub, target, fused))
1119 return Compile_Error;
1122 /* Advance PC manually. */
1123 JS_STATIC_ASSERT(JSOP_LT_LENGTH == JSOP_GE_LENGTH);
1124 JS_STATIC_ASSERT(JSOP_LE_LENGTH == JSOP_GE_LENGTH);
1125 JS_STATIC_ASSERT(JSOP_GT_LENGTH == JSOP_GE_LENGTH);
1126 JS_STATIC_ASSERT(JSOP_EQ_LENGTH == JSOP_GE_LENGTH);
1127 JS_STATIC_ASSERT(JSOP_NE_LENGTH == JSOP_GE_LENGTH);
1129 PC += JSOP_GE_LENGTH;
1130 if (fused != JSOP_NOP) {
1132 PC += JSOP_IFNE_LENGTH;
1138 BEGIN_CASE(JSOP_LSH)
1142 BEGIN_CASE(JSOP_RSH)
1146 BEGIN_CASE(JSOP_URSH)
1150 BEGIN_CASE(JSOP_ADD)
1151 jsop_binary(op, stubs::Add);
1154 BEGIN_CASE(JSOP_SUB)
1155 jsop_binary(op, stubs::Sub);
1158 BEGIN_CASE(JSOP_MUL)
1159 jsop_binary(op, stubs::Mul);
1162 BEGIN_CASE(JSOP_DIV)
1163 jsop_binary(op, stubs::Div);
1166 BEGIN_CASE(JSOP_MOD)
1170 BEGIN_CASE(JSOP_NOT)
1174 BEGIN_CASE(JSOP_BITNOT)
1176 FrameEntry *top = frame.peek(-1);
1177 if (top->isConstant() && top->getValue().isPrimitive()) {
1179 ValueToECMAInt32(cx, top->getValue(), &i);
1182 frame.push(Int32Value(i));
1187 END_CASE(JSOP_BITNOT)
1189 BEGIN_CASE(JSOP_NEG)
1191 FrameEntry *top = frame.peek(-1);
1192 if (top->isConstant() && top->getValue().isPrimitive()) {
1194 ValueToNumber(cx, top->getValue(), &d);
1197 frame.push(NumberValue(d));
1204 BEGIN_CASE(JSOP_POS)
1208 BEGIN_CASE(JSOP_DELNAME)
1210 uint32 index = fullAtomIndex(PC);
1211 JSAtom *atom = script->getAtom(index);
1213 prepareStubCall(Uses(0));
1214 masm.move(ImmPtr(atom), Registers::ArgReg1);
1215 INLINE_STUBCALL(stubs::DelName);
1218 END_CASE(JSOP_DELNAME)
1220 BEGIN_CASE(JSOP_DELPROP)
1222 uint32 index = fullAtomIndex(PC);
1223 JSAtom *atom = script->getAtom(index);
1225 prepareStubCall(Uses(1));
1226 masm.move(ImmPtr(atom), Registers::ArgReg1);
1227 INLINE_STUBCALL(STRICT_VARIANT(stubs::DelProp));
1231 END_CASE(JSOP_DELPROP)
1233 BEGIN_CASE(JSOP_DELELEM)
1234 prepareStubCall(Uses(2));
1235 INLINE_STUBCALL(STRICT_VARIANT(stubs::DelElem));
1238 END_CASE(JSOP_DELELEM)
1240 BEGIN_CASE(JSOP_TYPEOF)
1241 BEGIN_CASE(JSOP_TYPEOFEXPR)
1243 END_CASE(JSOP_TYPEOF)
1245 BEGIN_CASE(JSOP_VOID)
1247 frame.push(UndefinedValue());
1250 BEGIN_CASE(JSOP_INCNAME)
1251 if (!jsop_nameinc(op, STRICT_VARIANT(stubs::IncName), fullAtomIndex(PC)))
1252 return Compile_Error;
1254 END_CASE(JSOP_INCNAME)
1256 BEGIN_CASE(JSOP_INCGNAME)
1257 jsop_gnameinc(op, STRICT_VARIANT(stubs::IncGlobalName), fullAtomIndex(PC));
1259 END_CASE(JSOP_INCGNAME)
1261 BEGIN_CASE(JSOP_INCPROP)
1262 if (!jsop_propinc(op, STRICT_VARIANT(stubs::IncProp), fullAtomIndex(PC)))
1263 return Compile_Error;
1265 END_CASE(JSOP_INCPROP)
1267 BEGIN_CASE(JSOP_INCELEM)
1268 jsop_eleminc(op, STRICT_VARIANT(stubs::IncElem));
1269 END_CASE(JSOP_INCELEM)
1271 BEGIN_CASE(JSOP_DECNAME)
1272 if (!jsop_nameinc(op, STRICT_VARIANT(stubs::DecName), fullAtomIndex(PC)))
1273 return Compile_Error;
1275 END_CASE(JSOP_DECNAME)
1277 BEGIN_CASE(JSOP_DECGNAME)
1278 jsop_gnameinc(op, STRICT_VARIANT(stubs::DecGlobalName), fullAtomIndex(PC));
1280 END_CASE(JSOP_DECGNAME)
1282 BEGIN_CASE(JSOP_DECPROP)
1283 if (!jsop_propinc(op, STRICT_VARIANT(stubs::DecProp), fullAtomIndex(PC)))
1284 return Compile_Error;
1286 END_CASE(JSOP_DECPROP)
1288 BEGIN_CASE(JSOP_DECELEM)
1289 jsop_eleminc(op, STRICT_VARIANT(stubs::DecElem));
1290 END_CASE(JSOP_DECELEM)
1292 BEGIN_CASE(JSOP_NAMEINC)
1293 if (!jsop_nameinc(op, STRICT_VARIANT(stubs::NameInc), fullAtomIndex(PC)))
1294 return Compile_Error;
1296 END_CASE(JSOP_NAMEINC)
1298 BEGIN_CASE(JSOP_GNAMEINC)
1299 jsop_gnameinc(op, STRICT_VARIANT(stubs::GlobalNameInc), fullAtomIndex(PC));
1301 END_CASE(JSOP_GNAMEINC)
1303 BEGIN_CASE(JSOP_PROPINC)
1304 if (!jsop_propinc(op, STRICT_VARIANT(stubs::PropInc), fullAtomIndex(PC)))
1305 return Compile_Error;
1307 END_CASE(JSOP_PROPINC)
1309 BEGIN_CASE(JSOP_ELEMINC)
1310 jsop_eleminc(op, STRICT_VARIANT(stubs::ElemInc));
1311 END_CASE(JSOP_ELEMINC)
1313 BEGIN_CASE(JSOP_NAMEDEC)
1314 if (!jsop_nameinc(op, STRICT_VARIANT(stubs::NameDec), fullAtomIndex(PC)))
1315 return Compile_Error;
1317 END_CASE(JSOP_NAMEDEC)
1319 BEGIN_CASE(JSOP_GNAMEDEC)
1320 jsop_gnameinc(op, STRICT_VARIANT(stubs::GlobalNameDec), fullAtomIndex(PC));
1322 END_CASE(JSOP_GNAMEDEC)
1324 BEGIN_CASE(JSOP_PROPDEC)
1325 if (!jsop_propinc(op, STRICT_VARIANT(stubs::PropDec), fullAtomIndex(PC)))
1326 return Compile_Error;
1328 END_CASE(JSOP_PROPDEC)
1330 BEGIN_CASE(JSOP_ELEMDEC)
1331 jsop_eleminc(op, STRICT_VARIANT(stubs::ElemDec));
1332 END_CASE(JSOP_ELEMDEC)
1334 BEGIN_CASE(JSOP_GETTHISPROP)
1335 /* Push thisv onto stack. */
1337 if (!jsop_getprop(script->getAtom(fullAtomIndex(PC))))
1338 return Compile_Error;
1339 END_CASE(JSOP_GETTHISPROP);
1341 BEGIN_CASE(JSOP_GETARGPROP)
1342 /* Push arg onto stack. */
1343 frame.pushArg(GET_SLOTNO(PC));
1344 if (!jsop_getprop(script->getAtom(fullAtomIndex(&PC[ARGNO_LEN]))))
1345 return Compile_Error;
1346 END_CASE(JSOP_GETARGPROP)
1348 BEGIN_CASE(JSOP_GETLOCALPROP)
1349 frame.pushLocal(GET_SLOTNO(PC));
1350 if (!jsop_getprop(script->getAtom(fullAtomIndex(&PC[SLOTNO_LEN]))))
1351 return Compile_Error;
1352 END_CASE(JSOP_GETLOCALPROP)
1354 BEGIN_CASE(JSOP_GETPROP)
1355 if (!jsop_getprop(script->getAtom(fullAtomIndex(PC))))
1356 return Compile_Error;
1357 END_CASE(JSOP_GETPROP)
1359 BEGIN_CASE(JSOP_LENGTH)
1361 return Compile_Error;
1362 END_CASE(JSOP_LENGTH)
1364 BEGIN_CASE(JSOP_GETELEM)
1365 if (!jsop_getelem(false))
1366 return Compile_Error;
1367 END_CASE(JSOP_GETELEM)
1369 BEGIN_CASE(JSOP_SETELEM)
1371 jsbytecode *next = &PC[JSOP_SETELEM_LENGTH];
1372 bool pop = (JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next));
1373 if (!jsop_setelem(pop))
1374 return Compile_Error;
1376 END_CASE(JSOP_SETELEM);
1378 BEGIN_CASE(JSOP_CALLNAME)
1379 prepareStubCall(Uses(0));
1380 masm.move(Imm32(fullAtomIndex(PC)), Registers::ArgReg1);
1381 INLINE_STUBCALL(stubs::CallName);
1384 END_CASE(JSOP_CALLNAME)
1386 BEGIN_CASE(JSOP_EVAL)
1388 JaegerSpew(JSpew_Insns, " --- EVAL --- \n");
1389 emitEval(GET_ARGC(PC));
1390 JaegerSpew(JSpew_Insns, " --- END EVAL --- \n");
1394 BEGIN_CASE(JSOP_CALL)
1395 BEGIN_CASE(JSOP_FUNAPPLY)
1396 BEGIN_CASE(JSOP_FUNCALL)
1398 JaegerSpew(JSpew_Insns, " --- SCRIPTED CALL --- \n");
1399 inlineCallHelper(GET_ARGC(PC), false);
1400 JaegerSpew(JSpew_Insns, " --- END SCRIPTED CALL --- \n");
1404 BEGIN_CASE(JSOP_NAME)
1405 jsop_name(script->getAtom(fullAtomIndex(PC)));
1408 BEGIN_CASE(JSOP_DOUBLE)
1410 uint32 index = fullAtomIndex(PC);
1411 double d = script->getConst(index).toDouble();
1412 frame.push(Value(DoubleValue(d)));
1414 END_CASE(JSOP_DOUBLE)
1416 BEGIN_CASE(JSOP_STRING)
1418 JSAtom *atom = script->getAtom(fullAtomIndex(PC));
1419 JSString *str = ATOM_TO_STRING(atom);
1420 frame.push(Value(StringValue(str)));
1422 END_CASE(JSOP_STRING)
1424 BEGIN_CASE(JSOP_ZERO)
1425 frame.push(Valueify(JSVAL_ZERO));
1428 BEGIN_CASE(JSOP_ONE)
1429 frame.push(Valueify(JSVAL_ONE));
1432 BEGIN_CASE(JSOP_NULL)
1433 frame.push(NullValue());
1436 BEGIN_CASE(JSOP_THIS)
1440 BEGIN_CASE(JSOP_FALSE)
1441 frame.push(Value(BooleanValue(false)));
1442 END_CASE(JSOP_FALSE)
1444 BEGIN_CASE(JSOP_TRUE)
1445 frame.push(Value(BooleanValue(true)));
1449 BEGIN_CASE(JSOP_AND)
1450 if (!jsop_andor(op, PC + GET_JUMP_OFFSET(PC)))
1451 return Compile_Error;
1454 BEGIN_CASE(JSOP_TABLESWITCH)
1455 #if defined JS_CPU_ARM /* Need to implement jump(BaseIndex) for ARM */
1456 frame.syncAndForgetEverything();
1457 masm.move(ImmPtr(PC), Registers::ArgReg1);
1459 /* prepareStubCall() is not needed due to syncAndForgetEverything() */
1460 INLINE_STUBCALL(stubs::TableSwitch);
1463 masm.jump(Registers::ReturnReg);
1465 if (!jsop_tableswitch(PC))
1466 return Compile_Error;
1468 PC += js_GetVariableBytecodeLength(PC);
1470 END_CASE(JSOP_TABLESWITCH)
1472 BEGIN_CASE(JSOP_LOOKUPSWITCH)
1473 frame.syncAndForgetEverything();
1474 masm.move(ImmPtr(PC), Registers::ArgReg1);
1476 /* prepareStubCall() is not needed due to syncAndForgetEverything() */
1477 INLINE_STUBCALL(stubs::LookupSwitch);
1480 masm.jump(Registers::ReturnReg);
1481 PC += js_GetVariableBytecodeLength(PC);
1483 END_CASE(JSOP_LOOKUPSWITCH)
1485 BEGIN_CASE(JSOP_STRICTEQ)
1487 END_CASE(JSOP_STRICTEQ)
1489 BEGIN_CASE(JSOP_STRICTNE)
1491 END_CASE(JSOP_STRICTNE)
1493 BEGIN_CASE(JSOP_ITER)
1495 return Compile_Error;
1498 BEGIN_CASE(JSOP_MOREITER)
1499 /* At the byte level, this is always fused with IFNE or IFNEX. */
1501 return Compile_Error;
1503 END_CASE(JSOP_MOREITER)
1505 BEGIN_CASE(JSOP_ENDITER)
1507 END_CASE(JSOP_ENDITER)
1509 BEGIN_CASE(JSOP_POP)
1513 BEGIN_CASE(JSOP_NEW)
1515 JaegerSpew(JSpew_Insns, " --- NEW OPERATOR --- \n");
1516 inlineCallHelper(GET_ARGC(PC), true);
1517 JaegerSpew(JSpew_Insns, " --- END NEW OPERATOR --- \n");
1521 BEGIN_CASE(JSOP_GETARG)
1522 BEGIN_CASE(JSOP_CALLARG)
1524 frame.pushArg(GET_SLOTNO(PC));
1525 if (op == JSOP_CALLARG)
1526 frame.push(UndefinedValue());
1528 END_CASE(JSOP_GETARG)
1530 BEGIN_CASE(JSOP_BINDGNAME)
1532 END_CASE(JSOP_BINDGNAME)
1534 BEGIN_CASE(JSOP_SETARG)
1536 jsbytecode *next = &PC[JSOP_SETLOCAL_LENGTH];
1537 bool pop = JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next);
1538 frame.storeArg(GET_SLOTNO(PC), pop);
1541 PC += JSOP_SETARG_LENGTH + JSOP_POP_LENGTH;
1545 END_CASE(JSOP_SETARG)
1547 BEGIN_CASE(JSOP_GETLOCAL)
1549 uint32 slot = GET_SLOTNO(PC);
1550 frame.pushLocal(slot);
1552 END_CASE(JSOP_GETLOCAL)
1554 BEGIN_CASE(JSOP_SETLOCAL)
1556 jsbytecode *next = &PC[JSOP_SETLOCAL_LENGTH];
1557 bool pop = JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next);
1558 frame.storeLocal(GET_SLOTNO(PC), pop);
1561 PC += JSOP_SETLOCAL_LENGTH + JSOP_POP_LENGTH;
1565 END_CASE(JSOP_SETLOCAL)
1567 BEGIN_CASE(JSOP_SETLOCALPOP)
1568 frame.storeLocal(GET_SLOTNO(PC), true);
1570 END_CASE(JSOP_SETLOCALPOP)
1572 BEGIN_CASE(JSOP_UINT16)
1573 frame.push(Value(Int32Value((int32_t) GET_UINT16(PC))));
1574 END_CASE(JSOP_UINT16)
1576 BEGIN_CASE(JSOP_NEWINIT)
1578 END_CASE(JSOP_NEWINIT)
1580 BEGIN_CASE(JSOP_NEWARRAY)
1582 END_CASE(JSOP_NEWARRAY)
1584 BEGIN_CASE(JSOP_NEWOBJECT)
1586 END_CASE(JSOP_NEWOBJECT)
1588 BEGIN_CASE(JSOP_ENDINIT)
1589 END_CASE(JSOP_ENDINIT)
1591 BEGIN_CASE(JSOP_INITMETHOD)
1594 END_CASE(JSOP_INITMETHOD)
1596 BEGIN_CASE(JSOP_INITPROP)
1599 END_CASE(JSOP_INITPROP)
1601 BEGIN_CASE(JSOP_INITELEM)
1604 END_CASE(JSOP_INITELEM)
1606 BEGIN_CASE(JSOP_INCARG)
1607 BEGIN_CASE(JSOP_DECARG)
1608 BEGIN_CASE(JSOP_ARGINC)
1609 BEGIN_CASE(JSOP_ARGDEC)
1611 jsbytecode *next = &PC[JSOP_ARGINC_LENGTH];
1612 bool popped = false;
1613 if (JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next))
1615 jsop_arginc(op, GET_SLOTNO(PC), popped);
1616 PC += JSOP_ARGINC_LENGTH;
1618 PC += JSOP_POP_LENGTH;
1621 END_CASE(JSOP_ARGDEC)
1623 BEGIN_CASE(JSOP_INCLOCAL)
1624 BEGIN_CASE(JSOP_DECLOCAL)
1625 BEGIN_CASE(JSOP_LOCALINC)
1626 BEGIN_CASE(JSOP_LOCALDEC)
1628 jsbytecode *next = &PC[JSOP_LOCALINC_LENGTH];
1629 bool popped = false;
1630 if (JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next))
1632 /* These manually advance the PC. */
1633 jsop_localinc(op, GET_SLOTNO(PC), popped);
1634 PC += JSOP_LOCALINC_LENGTH;
1636 PC += JSOP_POP_LENGTH;
1639 END_CASE(JSOP_LOCALDEC)
1641 BEGIN_CASE(JSOP_FORNAME)
1642 jsop_forname(script->getAtom(fullAtomIndex(PC)));
1643 END_CASE(JSOP_FORNAME)
1645 BEGIN_CASE(JSOP_FORGNAME)
1646 jsop_forgname(script->getAtom(fullAtomIndex(PC)));
1647 END_CASE(JSOP_FORGNAME)
1649 BEGIN_CASE(JSOP_FORPROP)
1650 jsop_forprop(script->getAtom(fullAtomIndex(PC)));
1651 END_CASE(JSOP_FORPROP)
1653 BEGIN_CASE(JSOP_FORELEM)
1654 // This opcode is for the decompiler; it is succeeded by an
1655 // ENUMELEM, which performs the actual array store.
1657 END_CASE(JSOP_FORELEM)
1659 BEGIN_CASE(JSOP_BINDNAME)
1660 jsop_bindname(script->getAtom(fullAtomIndex(PC)), true);
1661 END_CASE(JSOP_BINDNAME)
1663 BEGIN_CASE(JSOP_SETPROP)
1664 if (!jsop_setprop(script->getAtom(fullAtomIndex(PC)), true))
1665 return Compile_Error;
1666 END_CASE(JSOP_SETPROP)
1668 BEGIN_CASE(JSOP_SETNAME)
1669 BEGIN_CASE(JSOP_SETMETHOD)
1670 if (!jsop_setprop(script->getAtom(fullAtomIndex(PC)), true))
1671 return Compile_Error;
1672 END_CASE(JSOP_SETNAME)
1674 BEGIN_CASE(JSOP_THROW)
1675 prepareStubCall(Uses(1));
1676 INLINE_STUBCALL(stubs::Throw);
1678 END_CASE(JSOP_THROW)
1681 prepareStubCall(Uses(2));
1682 INLINE_STUBCALL(stubs::In);
1684 frame.takeReg(Registers::ReturnReg);
1685 frame.pushTypedPayload(JSVAL_TYPE_BOOLEAN, Registers::ReturnReg);
1688 BEGIN_CASE(JSOP_INSTANCEOF)
1689 if (!jsop_instanceof())
1690 return Compile_Error;
1691 END_CASE(JSOP_INSTANCEOF)
1693 BEGIN_CASE(JSOP_EXCEPTION)
1694 prepareStubCall(Uses(0));
1695 INLINE_STUBCALL(stubs::Exception);
1697 END_CASE(JSOP_EXCEPTION)
1699 BEGIN_CASE(JSOP_LINENO)
1700 END_CASE(JSOP_LINENO)
1702 BEGIN_CASE(JSOP_ENUMELEM)
1703 // Normally, SETELEM transforms the stack
1704 // from: OBJ ID VALUE
1707 // Here, the stack transition is
1708 // from: VALUE OBJ ID
1710 // So we make the stack look like a SETELEM, and re-use it.
1712 // Before: VALUE OBJ ID
1713 // After: VALUE OBJ ID VALUE
1716 // Before: VALUE OBJ ID VALUE
1717 // After: VALUE VALUE
1718 if (!jsop_setelem(true))
1719 return Compile_Error;
1721 // Before: VALUE VALUE
1724 END_CASE(JSOP_ENUMELEM)
1726 BEGIN_CASE(JSOP_BLOCKCHAIN)
1727 END_CASE(JSOP_BLOCKCHAIN)
1729 BEGIN_CASE(JSOP_NULLBLOCKCHAIN)
1730 END_CASE(JSOP_NULLBLOCKCHAIN)
1732 BEGIN_CASE(JSOP_CONDSWITCH)
1733 /* No-op for the decompiler. */
1734 END_CASE(JSOP_CONDSWITCH)
1736 BEGIN_CASE(JSOP_DEFFUN)
1738 uint32 index = fullAtomIndex(PC);
1739 JSFunction *innerFun = script->getFunction(index);
1741 if (fun && script->bindings.hasBinding(cx, innerFun->atom))
1742 frame.syncAndForgetEverything();
1744 prepareStubCall(Uses(0));
1745 masm.move(ImmPtr(innerFun), Registers::ArgReg1);
1746 INLINE_STUBCALL(STRICT_VARIANT(stubs::DefFun));
1748 END_CASE(JSOP_DEFFUN)
1750 BEGIN_CASE(JSOP_DEFVAR)
1751 BEGIN_CASE(JSOP_DEFCONST)
1753 uint32 index = fullAtomIndex(PC);
1754 JSAtom *atom = script->getAtom(index);
1756 prepareStubCall(Uses(0));
1757 masm.move(ImmPtr(atom), Registers::ArgReg1);
1758 INLINE_STUBCALL(stubs::DefVarOrConst);
1760 END_CASE(JSOP_DEFVAR)
1762 BEGIN_CASE(JSOP_SETCONST)
1764 uint32 index = fullAtomIndex(PC);
1765 JSAtom *atom = script->getAtom(index);
1767 if (fun && script->bindings.hasBinding(cx, atom))
1768 frame.syncAndForgetEverything();
1770 prepareStubCall(Uses(1));
1771 masm.move(ImmPtr(atom), Registers::ArgReg1);
1772 INLINE_STUBCALL(stubs::SetConst);
1774 END_CASE(JSOP_SETCONST)
1776 BEGIN_CASE(JSOP_DEFLOCALFUN_FC)
1778 uint32 slot = GET_SLOTNO(PC);
1779 JSFunction *fun = script->getFunction(fullAtomIndex(&PC[SLOTNO_LEN]));
1780 prepareStubCall(Uses(frame.frameSlots()));
1781 masm.move(ImmPtr(fun), Registers::ArgReg1);
1782 INLINE_STUBCALL(stubs::DefLocalFun_FC);
1783 frame.takeReg(Registers::ReturnReg);
1784 frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
1785 frame.storeLocal(slot, true);
1788 END_CASE(JSOP_DEFLOCALFUN_FC)
1790 BEGIN_CASE(JSOP_LAMBDA)
1792 JSFunction *fun = script->getFunction(fullAtomIndex(PC));
1794 JSObjStubFun stub = stubs::Lambda;
1797 jsbytecode *pc2 = AdvanceOverBlockchainOp(PC + JSOP_LAMBDA_LENGTH);
1798 JSOp next = JSOp(*pc2);
1800 if (next == JSOP_INITMETHOD) {
1801 stub = stubs::LambdaForInit;
1802 } else if (next == JSOP_SETMETHOD) {
1803 stub = stubs::LambdaForSet;
1805 } else if (fun->joinable()) {
1806 if (next == JSOP_CALL) {
1807 stub = stubs::LambdaJoinableForCall;
1808 uses = frame.frameSlots();
1809 } else if (next == JSOP_NULL) {
1810 stub = stubs::LambdaJoinableForNull;
1814 prepareStubCall(Uses(uses));
1815 masm.move(ImmPtr(fun), Registers::ArgReg1);
1817 if (stub == stubs::Lambda) {
1818 INLINE_STUBCALL(stub);
1820 jsbytecode *savedPC = PC;
1822 INLINE_STUBCALL(stub);
1826 frame.takeReg(Registers::ReturnReg);
1827 frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
1829 END_CASE(JSOP_LAMBDA)
1831 BEGIN_CASE(JSOP_TRY)
1832 frame.syncAndForgetEverything();
1835 BEGIN_CASE(JSOP_GETFCSLOT)
1836 BEGIN_CASE(JSOP_CALLFCSLOT)
1838 uintN index = GET_UINT16(PC);
1840 // Load the callee's payload into a register.
1842 RegisterID reg = frame.copyDataIntoReg(frame.peek(-1));
1845 // obj->getFlatClosureUpvars()
1846 masm.loadPtr(Address(reg, offsetof(JSObject, slots)), reg);
1847 Address upvarAddress(reg, JSObject::JSSLOT_FLAT_CLOSURE_UPVARS * sizeof(Value));
1848 masm.loadPrivate(upvarAddress, reg);
1849 // push ((Value *) reg)[index]
1851 frame.push(Address(reg, index * sizeof(Value)));
1852 if (op == JSOP_CALLFCSLOT)
1853 frame.push(UndefinedValue());
1855 END_CASE(JSOP_CALLFCSLOT)
1857 BEGIN_CASE(JSOP_ARGSUB)
1858 prepareStubCall(Uses(0));
1859 masm.move(Imm32(GET_ARGNO(PC)), Registers::ArgReg1);
1860 INLINE_STUBCALL(stubs::ArgSub);
1862 END_CASE(JSOP_ARGSUB)
1864 BEGIN_CASE(JSOP_ARGCNT)
1865 prepareStubCall(Uses(0));
1866 INLINE_STUBCALL(stubs::ArgCnt);
1868 END_CASE(JSOP_ARGCNT)
1870 BEGIN_CASE(JSOP_DEFLOCALFUN)
1872 uint32 slot = GET_SLOTNO(PC);
1873 JSFunction *fun = script->getFunction(fullAtomIndex(&PC[SLOTNO_LEN]));
1874 prepareStubCall(Uses(0));
1875 masm.move(ImmPtr(fun), Registers::ArgReg1);
1876 INLINE_STUBCALL(stubs::DefLocalFun);
1877 frame.takeReg(Registers::ReturnReg);
1878 frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
1879 frame.storeLocal(slot, true);
1882 END_CASE(JSOP_DEFLOCALFUN)
1884 BEGIN_CASE(JSOP_RETRVAL)
1886 END_CASE(JSOP_RETRVAL)
1888 BEGIN_CASE(JSOP_GETGNAME)
1889 BEGIN_CASE(JSOP_CALLGNAME)
1890 jsop_getgname(fullAtomIndex(PC));
1891 if (op == JSOP_CALLGNAME)
1892 jsop_callgname_epilogue();
1893 END_CASE(JSOP_GETGNAME)
1895 BEGIN_CASE(JSOP_SETGNAME)
1896 jsop_setgname(script->getAtom(fullAtomIndex(PC)), true);
1897 END_CASE(JSOP_SETGNAME)
1899 BEGIN_CASE(JSOP_REGEXP)
1901 JSObject *regex = script->getRegExp(fullAtomIndex(PC));
1902 prepareStubCall(Uses(0));
1903 masm.move(ImmPtr(regex), Registers::ArgReg1);
1904 INLINE_STUBCALL(stubs::RegExp);
1905 frame.takeReg(Registers::ReturnReg);
1906 frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
1908 END_CASE(JSOP_REGEXP)
1910 BEGIN_CASE(JSOP_OBJECT)
1912 JSObject *object = script->getObject(fullAtomIndex(PC));
1913 RegisterID reg = frame.allocReg();
1914 masm.move(ImmPtr(object), reg);
1915 frame.pushTypedPayload(JSVAL_TYPE_OBJECT, reg);
1917 END_CASE(JSOP_OBJECT)
1919 BEGIN_CASE(JSOP_CALLPROP)
1920 if (!jsop_callprop(script->getAtom(fullAtomIndex(PC))))
1921 return Compile_Error;
1922 END_CASE(JSOP_CALLPROP)
1924 BEGIN_CASE(JSOP_UINT24)
1925 frame.push(Value(Int32Value((int32_t) GET_UINT24(PC))));
1926 END_CASE(JSOP_UINT24)
1928 BEGIN_CASE(JSOP_CALLELEM)
1930 END_CASE(JSOP_CALLELEM)
1932 BEGIN_CASE(JSOP_STOP)
1938 BEGIN_CASE(JSOP_GETXPROP)
1939 if (!jsop_xname(script->getAtom(fullAtomIndex(PC))))
1940 return Compile_Error;
1941 END_CASE(JSOP_GETXPROP)
1943 BEGIN_CASE(JSOP_ENTERBLOCK)
1944 enterBlock(script->getObject(fullAtomIndex(PC)));
1945 END_CASE(JSOP_ENTERBLOCK);
1947 BEGIN_CASE(JSOP_LEAVEBLOCK)
1949 END_CASE(JSOP_LEAVEBLOCK)
1951 BEGIN_CASE(JSOP_CALLLOCAL)
1952 frame.pushLocal(GET_SLOTNO(PC));
1953 frame.push(UndefinedValue());
1954 END_CASE(JSOP_CALLLOCAL)
1956 BEGIN_CASE(JSOP_INT8)
1957 frame.push(Value(Int32Value(GET_INT8(PC))));
1960 BEGIN_CASE(JSOP_INT32)
1961 frame.push(Value(Int32Value(GET_INT32(PC))));
1962 END_CASE(JSOP_INT32)
1964 BEGIN_CASE(JSOP_HOLE)
1965 frame.push(MagicValue(JS_ARRAY_HOLE));
1968 BEGIN_CASE(JSOP_LAMBDA_FC)
1970 JSFunction *fun = script->getFunction(fullAtomIndex(PC));
1971 prepareStubCall(Uses(frame.frameSlots()));
1972 masm.move(ImmPtr(fun), Registers::ArgReg1);
1973 INLINE_STUBCALL(stubs::FlatLambda);
1974 frame.takeReg(Registers::ReturnReg);
1975 frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
1977 END_CASE(JSOP_LAMBDA_FC)
1979 BEGIN_CASE(JSOP_TRACE)
1980 BEGIN_CASE(JSOP_NOTRACE)
1982 if (analysis->jumpTarget(PC))
1983 interruptCheckHelper();
1985 END_CASE(JSOP_TRACE)
1987 BEGIN_CASE(JSOP_DEBUGGER)
1988 prepareStubCall(Uses(0));
1989 masm.move(ImmPtr(PC), Registers::ArgReg1);
1990 INLINE_STUBCALL(stubs::Debugger);
1991 END_CASE(JSOP_DEBUGGER)
1993 BEGIN_CASE(JSOP_UNBRAND)
1995 END_CASE(JSOP_UNBRAND)
1997 BEGIN_CASE(JSOP_UNBRANDTHIS)
2001 END_CASE(JSOP_UNBRANDTHIS)
2003 BEGIN_CASE(JSOP_GETGLOBAL)
2004 BEGIN_CASE(JSOP_CALLGLOBAL)
2005 jsop_getglobal(GET_SLOTNO(PC));
2006 if (op == JSOP_CALLGLOBAL)
2007 frame.push(UndefinedValue());
2008 END_CASE(JSOP_GETGLOBAL)
2011 /* Sorry, this opcode isn't implemented yet. */
2012 #ifdef JS_METHODJIT_SPEW
2013 JaegerSpew(JSpew_Abort, "opcode %s not handled yet (%s line %d)\n", OpcodeNames[op],
2014 script->filename, js_PCToLineNumber(cx, script, PC));
2016 return Compile_Abort;
2019 /**********************
2020 * END COMPILER OPS *
2021 **********************/
2024 frame.assertValidRegisterState();
2029 return Compile_Okay;
2035 JSC::MacroAssembler::Label
2036 mjit::Compiler::labelOf(jsbytecode *pc)
2038 uint32 offs = uint32(pc - script->code);
2039 JS_ASSERT(jumpMap[offs].isValid());
2040 return jumpMap[offs];
2044 mjit::Compiler::fullAtomIndex(jsbytecode *pc)
2046 return GET_SLOTNO(pc);
2048 /* If we ever enable INDEXBASE garbage, use this below. */
2050 return GET_SLOTNO(pc) + (atoms - script->atomMap.vector);
2055 mjit::Compiler::knownJump(jsbytecode *pc)
2061 mjit::Compiler::findCallSite(const CallSite &callSite)
2063 JS_ASSERT(callSite.pcOffset < script->length);
2065 JITScript *jit = script->getJIT(fp->isConstructing());
2066 uint8* ilPath = (uint8 *)jit->code.m_code.executableAddress();
2067 uint8* oolPath = ilPath + masm.size();
2069 for (uint32 i = 0; i < callSites.length(); i++) {
2070 InternalCallSite &cs = callSites[i];
2071 if (cs.pc == script->code + callSite.pcOffset && cs.id == callSite.id) {
2073 return oolPath + cs.returnOffset;
2074 return ilPath + cs.returnOffset;
2078 /* We have no idea where to patch up to. */
2079 JS_NOT_REACHED("Call site vanished.");
2084 mjit::Compiler::jumpInScript(Jump j, jsbytecode *pc)
2086 JS_ASSERT(pc >= script->code && uint32(pc - script->code) < script->length);
2089 j.linkTo(jumpMap[uint32(pc - script->code)], &masm);
2092 return branchPatches.append(BranchPatch(j, pc));
2096 mjit::Compiler::jsop_getglobal(uint32 index)
2098 JS_ASSERT(globalObj);
2099 uint32 slot = script->getGlobalSlot(index);
2101 RegisterID reg = frame.allocReg();
2102 Address address = masm.objSlotRef(globalObj, reg, slot);
2104 frame.push(address);
2108 mjit::Compiler::emitFinalReturn(Assembler &masm)
2110 masm.loadPtr(Address(JSFrameReg, JSStackFrame::offsetOfncode()), Registers::ReturnReg);
2111 masm.jump(Registers::ReturnReg);
2114 // Emits code to load a return value of the frame into the scripted-ABI
2115 // type & data register pair. If the return value is in fp->rval, then |fe|
2116 // is NULL. Otherwise, |fe| contains the return value.
2118 // If reading from fp->rval, |undefined| is loaded optimistically, before
2119 // checking if fp->rval is set in the frame flags and loading that instead.
2121 // Otherwise, if |masm| is the inline path, it is loaded as efficiently as
2122 // the FrameState can manage. If |masm| is the OOL path, the value is simply
2123 // loaded from its slot in the frame, since the caller has guaranteed it's
2127 mjit::Compiler::loadReturnValue(Assembler *masm, FrameEntry *fe)
2129 RegisterID typeReg = JSReturnReg_Type;
2130 RegisterID dataReg = JSReturnReg_Data;
2133 // If using the OOL assembler, the caller signifies that the |fe| is
2134 // synced, but not to rely on its register state.
2135 if (masm != &this->masm) {
2136 if (fe->isConstant()) {
2137 stubcc.masm.loadValueAsComponents(fe->getValue(), typeReg, dataReg);
2139 Address rval(frame.addressOf(fe));
2140 if (fe->isTypeKnown()) {
2141 stubcc.masm.loadPayload(rval, dataReg);
2142 stubcc.masm.move(ImmType(fe->getKnownType()), typeReg);
2144 stubcc.masm.loadValueAsComponents(rval, typeReg, dataReg);
2148 frame.loadForReturn(fe, typeReg, dataReg, Registers::ReturnReg);
2151 // Load a return value from POPV or SETRVAL into the return registers,
2152 // otherwise return undefined.
2153 masm->loadValueAsComponents(UndefinedValue(), typeReg, dataReg);
2154 if (analysis->usesReturnValue()) {
2155 Jump rvalClear = masm->branchTest32(Assembler::Zero,
2156 FrameFlagsAddress(),
2157 Imm32(JSFRAME_HAS_RVAL));
2158 Address rvalAddress(JSFrameReg, JSStackFrame::offsetOfReturnValue());
2159 masm->loadValueAsComponents(rvalAddress, typeReg, dataReg);
2160 rvalClear.linkTo(masm->label(), masm);
2165 // This ensures that constructor return values are an object. If a non-object
2166 // is returned, either explicitly or implicitly, the newly created object is
2167 // loaded out of the frame. Otherwise, the explicitly returned object is kept.
2170 mjit::Compiler::fixPrimitiveReturn(Assembler *masm, FrameEntry *fe)
2172 JS_ASSERT(isConstructing);
2174 bool ool = (masm != &this->masm);
2175 Address thisv(JSFrameReg, JSStackFrame::offsetOfThis(fun));
2177 // We can just load |thisv| if either of the following is true:
2178 // (1) There is no explicit return value, AND fp->rval is not used.
2179 // (2) There is an explicit return value, and it's known to be primitive.
2180 if ((!fe && !analysis->usesReturnValue()) ||
2181 (fe && fe->isTypeKnown() && fe->getKnownType() != JSVAL_TYPE_OBJECT))
2184 masm->loadValueAsComponents(thisv, JSReturnReg_Type, JSReturnReg_Data);
2186 frame.loadThisForReturn(JSReturnReg_Type, JSReturnReg_Data, Registers::ReturnReg);
2190 // If the type is known to be an object, just load the return value as normal.
2191 if (fe && fe->isTypeKnown() && fe->getKnownType() == JSVAL_TYPE_OBJECT) {
2192 loadReturnValue(masm, fe);
2196 // There's a return value, and its type is unknown. Test the type and load
2197 // |thisv| if necessary.
2198 loadReturnValue(masm, fe);
2199 Jump j = masm->testObject(Assembler::Equal, JSReturnReg_Type);
2200 masm->loadValueAsComponents(thisv, JSReturnReg_Type, JSReturnReg_Data);
2201 j.linkTo(masm->label(), masm);
2204 // Loads the return value into the scripted ABI register pair, such that JS
2205 // semantics in constructors are preserved.
2208 mjit::Compiler::emitReturnValue(Assembler *masm, FrameEntry *fe)
2211 fixPrimitiveReturn(masm, fe);
2213 loadReturnValue(masm, fe);
2217 mjit::Compiler::emitReturn(FrameEntry *fe)
2219 JS_ASSERT_IF(!fun, JSOp(*PC) == JSOP_STOP);
2221 /* Only the top of the stack can be returned. */
2222 JS_ASSERT_IF(fe, fe == frame.peek(-1));
2224 if (debugMode() || Probes::callTrackingActive(cx)) {
2225 prepareStubCall(Uses(0));
2226 INLINE_STUBCALL(stubs::LeaveScript);
2230 * If there's a function object, deal with the fact that it can escape.
2231 * Note that after we've placed the call object, all tracked state can
2232 * be thrown away. This will happen anyway because the next live opcode
2233 * (if any) must have an incoming edge.
2235 * However, it's an optimization to throw it away early - the tracker
2236 * won't be spilled on further exits or join points.
2239 if (fun->isHeavyweight()) {
2240 /* There will always be a call object. */
2241 prepareStubCall(Uses(fe ? 1 : 0));
2242 INLINE_STUBCALL(stubs::PutActivationObjects);
2244 /* if (hasCallObj() || hasArgsObj()) stubs::PutActivationObjects() */
2245 Jump putObjs = masm.branchTest32(Assembler::NonZero,
2246 Address(JSFrameReg, JSStackFrame::offsetOfFlags()),
2247 Imm32(JSFRAME_HAS_CALL_OBJ | JSFRAME_HAS_ARGS_OBJ));
2248 stubcc.linkExit(putObjs, Uses(frame.frameSlots()));
2251 OOL_STUBCALL(stubs::PutActivationObjects);
2253 emitReturnValue(&stubcc.masm, fe);
2254 emitFinalReturn(stubcc.masm);
2257 if (fp->isEvalFrame() && script->strictModeCode) {
2258 /* There will always be a call object. */
2259 prepareStubCall(Uses(fe ? 1 : 0));
2260 INLINE_STUBCALL(stubs::PutStrictEvalCallObject);
2264 emitReturnValue(&masm, fe);
2265 emitFinalReturn(masm);
2266 frame.discardFrame();
2270 mjit::Compiler::prepareStubCall(Uses uses)
2272 JaegerSpew(JSpew_Insns, " ---- STUB CALL, SYNCING FRAME ---- \n");
2273 frame.syncAndKill(Registers(Registers::TempRegs), uses);
2274 JaegerSpew(JSpew_Insns, " ---- FRAME SYNCING DONE ---- \n");
2277 JSC::MacroAssembler::Call
2278 mjit::Compiler::emitStubCall(void *ptr)
2280 JaegerSpew(JSpew_Insns, " ---- CALLING STUB ---- \n");
2281 Call cl = masm.fallibleVMCall(ptr, PC, frame.stackDepth() + script->nfixed);
2282 JaegerSpew(JSpew_Insns, " ---- END STUB CALL ---- \n");
2287 mjit::Compiler::interruptCheckHelper()
2289 RegisterID reg = frame.allocReg();
2292 * Bake in and test the address of the interrupt counter for the runtime.
2293 * This is faster than doing two additional loads for the context's
2294 * thread data, but will cause this thread to run slower if there are
2295 * pending interrupts on some other thread. For non-JS_THREADSAFE builds
2296 * we can skip this, as there is only one flag to poll.
2298 #ifdef JS_THREADSAFE
2299 void *interrupt = (void*) &cx->runtime->interruptCounter;
2301 void *interrupt = (void*) &JS_THREAD_DATA(cx)->interruptFlags;
2304 #if defined(JS_CPU_X86) || defined(JS_CPU_ARM)
2305 Jump jump = masm.branch32(Assembler::NotEqual, AbsoluteAddress(interrupt), Imm32(0));
2307 /* Handle processors that can't load from absolute addresses. */
2308 masm.move(ImmPtr(interrupt), reg);
2309 Jump jump = masm.branchTest32(Assembler::NonZero, Address(reg, 0));
2312 stubcc.linkExitDirect(jump, stubcc.masm.label());
2314 #ifdef JS_THREADSAFE
2316 * Do a slightly slower check for an interrupt on this thread.
2317 * We don't want this thread to slow down excessively if the pending
2318 * interrupt is on another thread.
2320 stubcc.masm.loadPtr(FrameAddress(offsetof(VMFrame, cx)), reg);
2321 stubcc.masm.loadPtr(Address(reg, offsetof(JSContext, thread)), reg);
2322 Address flag(reg, offsetof(JSThread, data.interruptFlags));
2323 Jump noInterrupt = stubcc.masm.branchTest32(Assembler::Zero, flag);
2326 frame.sync(stubcc.masm, Uses(0));
2327 stubcc.masm.move(ImmPtr(PC), Registers::ArgReg1);
2328 OOL_STUBCALL(stubs::Interrupt);
2329 stubcc.rejoin(Changes(0));
2331 #ifdef JS_THREADSAFE
2332 stubcc.linkRejoin(noInterrupt);
2339 mjit::Compiler::addReturnSite(Label joinPoint, uint32 id)
2341 InternalCallSite site(masm.distanceOf(joinPoint), PC, id, false, false);
2346 mjit::Compiler::emitUncachedCall(uint32 argc, bool callingNew)
2348 CallPatchInfo callPatch;
2350 RegisterID r0 = Registers::ReturnReg;
2351 VoidPtrStubUInt32 stub = callingNew ? stubs::UncachedNew : stubs::UncachedCall;
2353 frame.syncAndKill(Registers(Registers::AvailRegs), Uses(argc + 2));
2354 prepareStubCall(Uses(argc + 2));
2355 masm.move(Imm32(argc), Registers::ArgReg1);
2356 INLINE_STUBCALL(stub);
2358 Jump notCompiled = masm.branchTestPtr(Assembler::Zero, r0, r0);
2360 masm.loadPtr(FrameAddress(offsetof(VMFrame, regs.fp)), JSFrameReg);
2361 callPatch.hasFastNcode = true;
2362 callPatch.fastNcodePatch =
2363 masm.storePtrWithPatch(ImmPtr(NULL),
2364 Address(JSFrameReg, JSStackFrame::offsetOfncode()));
2367 callPatch.joinPoint = masm.label();
2368 addReturnSite(callPatch.joinPoint, __LINE__);
2369 masm.loadPtr(Address(JSFrameReg, JSStackFrame::offsetOfPrev()), JSFrameReg);
2371 frame.popn(argc + 2);
2372 frame.takeReg(JSReturnReg_Type);
2373 frame.takeReg(JSReturnReg_Data);
2374 frame.pushRegs(JSReturnReg_Type, JSReturnReg_Data);
2376 stubcc.linkExitDirect(notCompiled, stubcc.masm.label());
2377 stubcc.rejoin(Changes(0));
2378 callPatches.append(callPatch);
2382 IsLowerableFunCallOrApply(jsbytecode *pc)
2385 return (*pc == JSOP_FUNCALL && GET_ARGC(pc) >= 1) ||
2386 (*pc == JSOP_FUNAPPLY && GET_ARGC(pc) == 2);
2393 mjit::Compiler::checkCallApplySpeculation(uint32 callImmArgc, uint32 speculatedArgc,
2394 FrameEntry *origCallee, FrameEntry *origThis,
2395 MaybeRegisterID origCalleeType, RegisterID origCalleeData,
2396 MaybeRegisterID origThisType, RegisterID origThisData,
2397 Jump *uncachedCallSlowRejoin, CallPatchInfo *uncachedCallPatch)
2399 JS_ASSERT(IsLowerableFunCallOrApply(PC));
2402 * if (origCallee.isObject() &&
2403 * origCallee.toObject().isFunction &&
2404 * origCallee.toObject().getFunctionPrivate() == js_fun_{call,apply})
2407 if (origCalleeType.isSet())
2408 isObj = masm.testObject(Assembler::NotEqual, origCalleeType.reg());
2409 Jump isFun = masm.testFunction(Assembler::NotEqual, origCalleeData);
2410 masm.loadObjPrivate(origCalleeData, origCalleeData);
2411 Native native = *PC == JSOP_FUNCALL ? js_fun_call : js_fun_apply;
2412 Jump isNative = masm.branchPtr(Assembler::NotEqual,
2413 Address(origCalleeData, JSFunction::offsetOfNativeOrScript()),
2414 ImmPtr(JS_FUNC_TO_DATA_PTR(void *, native)));
2417 * If speculation fails, we can't use the ic, since it is compiled on the
2418 * assumption that speculation succeeds. Instead, just do an uncached call.
2422 stubcc.linkExitDirect(isObj.getJump(), stubcc.masm.label());
2423 stubcc.linkExitDirect(isFun, stubcc.masm.label());
2424 stubcc.linkExitDirect(isNative, stubcc.masm.label());
2426 int32 frameDepthAdjust;
2427 if (applyTricks == LazyArgsObj) {
2428 OOL_STUBCALL(stubs::Arguments);
2429 frameDepthAdjust = +1;
2431 frameDepthAdjust = 0;
2434 stubcc.masm.move(Imm32(callImmArgc), Registers::ArgReg1);
2435 JaegerSpew(JSpew_Insns, " ---- BEGIN SLOW CALL CODE ---- \n");
2436 OOL_STUBCALL_LOCAL_SLOTS(JS_FUNC_TO_DATA_PTR(void *, stubs::UncachedCall),
2437 frame.localSlots() + frameDepthAdjust);
2438 JaegerSpew(JSpew_Insns, " ---- END SLOW CALL CODE ---- \n");
2440 RegisterID r0 = Registers::ReturnReg;
2441 Jump notCompiled = stubcc.masm.branchTestPtr(Assembler::Zero, r0, r0);
2443 stubcc.masm.loadPtr(FrameAddress(offsetof(VMFrame, regs.fp)), JSFrameReg);
2444 Address ncodeAddr(JSFrameReg, JSStackFrame::offsetOfncode());
2445 uncachedCallPatch->hasSlowNcode = true;
2446 uncachedCallPatch->slowNcodePatch = stubcc.masm.storePtrWithPatch(ImmPtr(NULL), ncodeAddr);
2448 stubcc.masm.jump(r0);
2449 addReturnSite(masm.label(), __LINE__);
2451 notCompiled.linkTo(stubcc.masm.label(), &stubcc.masm);
2454 * inlineCallHelper will link uncachedCallSlowRejoin to the join point
2455 * at the end of the ic. At that join point, the return value of the
2456 * call is assumed to be in registers, so load them before jumping.
2458 JaegerSpew(JSpew_Insns, " ---- BEGIN SLOW RESTORE CODE ---- \n");
2459 Address rval = frame.addressOf(origCallee); /* vp[0] == rval */
2460 stubcc.masm.loadValueAsComponents(rval, JSReturnReg_Type, JSReturnReg_Data);
2461 *uncachedCallSlowRejoin = stubcc.masm.jump();
2462 JaegerSpew(JSpew_Insns, " ---- END SLOW RESTORE CODE ---- \n");
2466 * For simplicity, we don't statically specialize calls to
2467 * ic::SplatApplyArgs based on applyTricks. Rather, this state is
2468 * communicated dynamically through the VMFrame.
2470 if (*PC == JSOP_FUNAPPLY) {
2471 masm.store32(Imm32(applyTricks == LazyArgsObj),
2472 FrameAddress(offsetof(VMFrame, u.call.lazyArgsObj)));
2476 /* This predicate must be called before the current op mutates the FrameState. */
2478 mjit::Compiler::canUseApplyTricks()
2480 JS_ASSERT(*PC == JSOP_ARGUMENTS);
2481 jsbytecode *nextpc = PC + JSOP_ARGUMENTS_LENGTH;
2482 return *nextpc == JSOP_FUNAPPLY &&
2483 IsLowerableFunCallOrApply(nextpc) &&
2484 !analysis->jumpTarget(nextpc) &&
2488 /* See MonoIC.cpp, CallCompiler for more information on call ICs. */
2490 mjit::Compiler::inlineCallHelper(uint32 callImmArgc, bool callingNew)
2492 /* Check for interrupts on function call */
2493 interruptCheckHelper();
2495 int32 speculatedArgc;
2496 if (applyTricks == LazyArgsObj) {
2500 speculatedArgc = callImmArgc;
2503 FrameEntry *origCallee = frame.peek(-(speculatedArgc + 2));
2504 FrameEntry *origThis = frame.peek(-(speculatedArgc + 1));
2506 /* 'this' does not need to be synced for constructing. */
2508 frame.discardFe(origThis);
2511 * From the presence of JSOP_FUN{CALL,APPLY}, we speculate that we are
2512 * going to call js_fun_{call,apply}. Normally, this call would go through
2513 * js::Invoke to ultimately call 'this'. We can do much better by having
2514 * the callIC cache and call 'this' directly. However, if it turns out that
2515 * we are not actually calling js_fun_call, the callIC must act as normal.
2517 bool lowerFunCallOrApply = IsLowerableFunCallOrApply(PC);
2520 * Currently, constant values are not functions, so don't even try to
2521 * optimize. This lets us assume that callee/this have regs below.
2525 origCallee->isConstant() || origCallee->isNotType(JSVAL_TYPE_OBJECT) ||
2526 (lowerFunCallOrApply &&
2527 (origThis->isConstant() || origThis->isNotType(JSVAL_TYPE_OBJECT)))) {
2529 if (applyTricks == LazyArgsObj) {
2530 /* frame.pop() above reset us to pre-JSOP_ARGUMENTS state */
2534 emitUncachedCall(callImmArgc, callingNew);
2539 /* Initialized by both branches below. */
2540 CallGenInfo callIC(PC);
2541 CallPatchInfo callPatch;
2542 MaybeRegisterID icCalleeType; /* type to test for function-ness */
2543 RegisterID icCalleeData; /* data to call */
2544 Address icRvalAddr; /* return slot on slow-path rejoin */
2547 * IC space must be reserved (using RESERVE_IC_SPACE or RESERVE_OOL_SPACE) between the
2548 * following labels (as used in finishThisUp):
2549 * - funGuard -> hotJump
2550 * - funGuard -> joinPoint
2551 * - funGuard -> hotPathLabel
2552 * - slowPathStart -> oolCall
2553 * - slowPathStart -> oolJump
2554 * - slowPathStart -> icCall
2555 * - slowPathStart -> slowJoinPoint
2556 * Because the call ICs are fairly long (compared to PICs), we don't reserve the space in each
2557 * path until the first usage of funGuard (for the in-line path) or slowPathStart (for the
2558 * out-of-line path).
2561 /* Initialized only on lowerFunCallOrApply branch. */
2562 Jump uncachedCallSlowRejoin;
2563 CallPatchInfo uncachedCallPatch;
2566 MaybeRegisterID origCalleeType, maybeOrigCalleeData;
2567 RegisterID origCalleeData;
2569 /* Get the callee in registers. */
2570 frame.ensureFullRegs(origCallee, &origCalleeType, &maybeOrigCalleeData);
2571 origCalleeData = maybeOrigCalleeData.reg();
2572 PinRegAcrossSyncAndKill p1(frame, origCalleeData), p2(frame, origCalleeType);
2574 if (lowerFunCallOrApply) {
2575 MaybeRegisterID origThisType, maybeOrigThisData;
2576 RegisterID origThisData;
2578 /* Get thisv in registers. */
2579 frame.ensureFullRegs(origThis, &origThisType, &maybeOrigThisData);
2580 origThisData = maybeOrigThisData.reg();
2581 PinRegAcrossSyncAndKill p3(frame, origThisData), p4(frame, origThisType);
2583 /* Leaves pinned regs untouched. */
2584 frame.syncAndKill(Registers(Registers::AvailRegs), Uses(speculatedArgc + 2));
2587 checkCallApplySpeculation(callImmArgc, speculatedArgc,
2588 origCallee, origThis,
2589 origCalleeType, origCalleeData,
2590 origThisType, origThisData,
2591 &uncachedCallSlowRejoin, &uncachedCallPatch);
2593 icCalleeType = origThisType;
2594 icCalleeData = origThisData;
2595 icRvalAddr = frame.addressOf(origThis);
2598 * For f.call(), since we compile the ic under the (checked)
2599 * assumption that call == js_fun_call, we still have a static
2600 * frame size. For f.apply(), the frame size depends on the dynamic
2601 * length of the array passed to apply.
2603 if (*PC == JSOP_FUNCALL)
2604 callIC.frameSize.initStatic(frame.localSlots(), speculatedArgc - 1);
2606 callIC.frameSize.initDynamic();
2608 /* Leaves pinned regs untouched. */
2609 frame.syncAndKill(Registers(Registers::AvailRegs), Uses(speculatedArgc + 2));
2611 icCalleeType = origCalleeType;
2612 icCalleeData = origCalleeData;
2613 icRvalAddr = frame.addressOf(origCallee);
2614 callIC.frameSize.initStatic(frame.localSlots(), speculatedArgc);
2618 /* Test the type if necessary. Failing this always takes a really slow path. */
2619 MaybeJump notObjectJump;
2620 if (icCalleeType.isSet())
2621 notObjectJump = masm.testObject(Assembler::NotEqual, icCalleeType.reg());
2624 * For an optimized apply, keep icCalleeData and funPtrReg in a
2625 * callee-saved registers for the subsequent ic::SplatApplyArgs call.
2628 if (callIC.frameSize.isDynamic() && !Registers::isSaved(icCalleeData)) {
2629 RegisterID x = tempRegs.takeRegInMask(Registers::SavedRegs);
2630 masm.move(icCalleeData, x);
2633 tempRegs.takeReg(icCalleeData);
2635 RegisterID funPtrReg = tempRegs.takeRegInMask(Registers::SavedRegs);
2637 /* Reserve space just before initialization of funGuard. */
2638 RESERVE_IC_SPACE(masm);
2641 * Guard on the callee identity. This misses on the first run. If the
2642 * callee is scripted, compiled/compilable, and argc == nargs, then this
2643 * guard is patched, and the compiled code address is baked in.
2645 Jump j = masm.branchPtrWithPatch(Assembler::NotEqual, icCalleeData, callIC.funGuard);
2648 /* Reserve space just before initialization of slowPathStart. */
2649 RESERVE_OOL_SPACE(stubcc.masm);
2651 Jump rejoin1, rejoin2;
2653 RESERVE_OOL_SPACE(stubcc.masm);
2654 stubcc.linkExitDirect(j, stubcc.masm.label());
2655 callIC.slowPathStart = stubcc.masm.label();
2658 * Test if the callee is even a function. If this doesn't match, we
2659 * take a _really_ slow path later.
2661 Jump notFunction = stubcc.masm.testFunction(Assembler::NotEqual, icCalleeData);
2663 /* Test if the function is scripted. */
2664 RegisterID tmp = tempRegs.takeAnyReg();
2665 stubcc.masm.loadObjPrivate(icCalleeData, funPtrReg);
2666 stubcc.masm.load16(Address(funPtrReg, offsetof(JSFunction, flags)), tmp);
2667 stubcc.masm.and32(Imm32(JSFUN_KINDMASK), tmp);
2668 Jump isNative = stubcc.masm.branch32(Assembler::Below, tmp, Imm32(JSFUN_INTERPRETED));
2669 tempRegs.putReg(tmp);
2672 * N.B. After this call, the frame will have a dynamic frame size.
2673 * Check after the function is known not to be a native so that the
2674 * catch-all/native path has a static depth.
2676 if (callIC.frameSize.isDynamic())
2677 OOL_STUBCALL(ic::SplatApplyArgs);
2680 * No-op jump that gets patched by ic::New/Call to the stub generated
2681 * by generateFullCallStub.
2683 Jump toPatch = stubcc.masm.jump();
2684 toPatch.linkTo(stubcc.masm.label(), &stubcc.masm);
2685 callIC.oolJump = toPatch;
2686 callIC.icCall = stubcc.masm.label();
2689 * At this point the function is definitely scripted, so we try to
2690 * compile it and patch either funGuard/funJump or oolJump. This code
2691 * is only executed once.
2693 callIC.addrLabel1 = stubcc.masm.moveWithPatch(ImmPtr(NULL), Registers::ArgReg1);
2694 void *icFunPtr = JS_FUNC_TO_DATA_PTR(void *, callingNew ? ic::New : ic::Call);
2695 if (callIC.frameSize.isStatic())
2696 callIC.oolCall = OOL_STUBCALL_LOCAL_SLOTS(icFunPtr, frame.localSlots());
2698 callIC.oolCall = OOL_STUBCALL_LOCAL_SLOTS(icFunPtr, -1);
2700 callIC.funObjReg = icCalleeData;
2701 callIC.funPtrReg = funPtrReg;
2704 * The IC call either returns NULL, meaning call completed, or a
2705 * function pointer to jump to. Caveat: Must restore JSFrameReg
2706 * because a new frame has been pushed.
2708 rejoin1 = stubcc.masm.branchTestPtr(Assembler::Zero, Registers::ReturnReg,
2709 Registers::ReturnReg);
2710 if (callIC.frameSize.isStatic())
2711 stubcc.masm.move(Imm32(callIC.frameSize.staticArgc()), JSParamReg_Argc);
2713 stubcc.masm.load32(FrameAddress(offsetof(VMFrame, u.call.dynamicArgc)), JSParamReg_Argc);
2714 stubcc.masm.loadPtr(FrameAddress(offsetof(VMFrame, regs.fp)), JSFrameReg);
2715 callPatch.hasSlowNcode = true;
2716 callPatch.slowNcodePatch =
2717 stubcc.masm.storePtrWithPatch(ImmPtr(NULL),
2718 Address(JSFrameReg, JSStackFrame::offsetOfncode()));
2719 stubcc.masm.jump(Registers::ReturnReg);
2722 * This ool path is the catch-all for everything but scripted function
2723 * callees. For native functions, ic::NativeNew/NativeCall will repatch
2724 * funGaurd/funJump with a fast call stub. All other cases
2725 * (non-function callable objects and invalid callees) take the slow
2726 * path through js::Invoke.
2728 if (notObjectJump.isSet())
2729 stubcc.linkExitDirect(notObjectJump.get(), stubcc.masm.label());
2730 notFunction.linkTo(stubcc.masm.label(), &stubcc.masm);
2731 isNative.linkTo(stubcc.masm.label(), &stubcc.masm);
2733 callIC.addrLabel2 = stubcc.masm.moveWithPatch(ImmPtr(NULL), Registers::ArgReg1);
2734 OOL_STUBCALL(callingNew ? ic::NativeNew : ic::NativeCall);
2736 rejoin2 = stubcc.masm.jump();
2740 * If the call site goes to a closure over the same function, it will
2741 * generate an out-of-line stub that joins back here.
2743 callIC.hotPathLabel = masm.label();
2747 flags |= JSFRAME_CONSTRUCTING;
2749 InlineFrameAssembler inlFrame(masm, callIC, flags);
2750 callPatch.hasFastNcode = true;
2751 callPatch.fastNcodePatch = inlFrame.assemble(NULL);
2753 callIC.hotJump = masm.jump();
2754 callIC.joinPoint = callPatch.joinPoint = masm.label();
2755 addReturnSite(callPatch.joinPoint, __LINE__);
2756 if (lowerFunCallOrApply)
2757 uncachedCallPatch.joinPoint = callIC.joinPoint;
2758 masm.loadPtr(Address(JSFrameReg, JSStackFrame::offsetOfPrev()), JSFrameReg);
2761 * We've placed hotJump, joinPoint and hotPathLabel, and no other labels are located by offset
2762 * in the in-line path so we can check the IC space now.
2766 frame.popn(speculatedArgc + 2);
2767 frame.takeReg(JSReturnReg_Type);
2768 frame.takeReg(JSReturnReg_Data);
2769 frame.pushRegs(JSReturnReg_Type, JSReturnReg_Data);
2772 * Now that the frame state is set, generate the rejoin path. Note that, if
2773 * lowerFunCallOrApply, we cannot just call 'stubcc.rejoin' since the return
2774 * value has been placed at vp[1] which is not the stack address associated
2775 * with frame.peek(-1).
2777 callIC.slowJoinPoint = stubcc.masm.label();
2778 rejoin1.linkTo(callIC.slowJoinPoint, &stubcc.masm);
2779 rejoin2.linkTo(callIC.slowJoinPoint, &stubcc.masm);
2780 JaegerSpew(JSpew_Insns, " ---- BEGIN SLOW RESTORE CODE ---- \n");
2781 stubcc.masm.loadValueAsComponents(icRvalAddr, JSReturnReg_Type, JSReturnReg_Data);
2782 stubcc.crossJump(stubcc.masm.jump(), masm.label());
2783 JaegerSpew(JSpew_Insns, " ---- END SLOW RESTORE CODE ---- \n");
2787 if (lowerFunCallOrApply)
2788 stubcc.crossJump(uncachedCallSlowRejoin, masm.label());
2790 callICs.append(callIC);
2791 callPatches.append(callPatch);
2792 if (lowerFunCallOrApply)
2793 callPatches.append(uncachedCallPatch);
2795 applyTricks = NoApplyTricks;
2800 * This function must be called immediately after any instruction which could
2801 * cause a new JSStackFrame to be pushed and could lead to a new debug trap
2802 * being set. This includes any API callbacks and any scripted or native call.
2805 mjit::Compiler::addCallSite(const InternalCallSite &site)
2807 callSites.append(site);
2811 mjit::Compiler::restoreFrameRegs(Assembler &masm)
2813 masm.loadPtr(FrameAddress(offsetof(VMFrame, regs.fp)), JSFrameReg);
2817 mjit::Compiler::compareTwoValues(JSContext *cx, JSOp op, const Value &lhs, const Value &rhs)
2819 JS_ASSERT(lhs.isPrimitive());
2820 JS_ASSERT(rhs.isPrimitive());
2822 if (lhs.isString() && rhs.isString()) {
2824 CompareStrings(cx, lhs.toString(), rhs.toString(), &cmp);
2839 JS_NOT_REACHED("NYI");
2844 /* These should be infallible w/ primitives. */
2845 ValueToNumber(cx, lhs, &ld);
2846 ValueToNumber(cx, rhs, &rd);
2856 case JSOP_EQ: /* fall through */
2858 /* Special case null/undefined/void comparisons. */
2859 if (lhs.isNullOrUndefined()) {
2860 if (rhs.isNullOrUndefined())
2861 return op == JSOP_EQ;
2862 return op == JSOP_NE;
2864 if (rhs.isNullOrUndefined())
2865 return op == JSOP_NE;
2867 /* Normal return. */
2868 return (op == JSOP_EQ) ? (ld == rd) : (ld != rd);
2870 JS_NOT_REACHED("NYI");
2874 JS_NOT_REACHED("NYI");
2879 mjit::Compiler::emitStubCmpOp(BoolStub stub, jsbytecode *target, JSOp fused)
2881 prepareStubCall(Uses(2));
2882 INLINE_STUBCALL(stub);
2887 frame.takeReg(Registers::ReturnReg);
2888 frame.pushTypedPayload(JSVAL_TYPE_BOOLEAN, Registers::ReturnReg);
2892 JS_ASSERT(fused == JSOP_IFEQ || fused == JSOP_IFNE);
2893 frame.syncAndForgetEverything();
2894 Assembler::Condition cond = (fused == JSOP_IFEQ)
2896 : Assembler::NonZero;
2897 Jump j = masm.branchTest32(cond, Registers::ReturnReg,
2898 Registers::ReturnReg);
2899 return jumpAndTrace(j, target);
2903 mjit::Compiler::jsop_setprop_slow(JSAtom *atom, bool usePropCache)
2905 prepareStubCall(Uses(2));
2906 masm.move(ImmPtr(atom), Registers::ArgReg1);
2908 INLINE_STUBCALL(STRICT_VARIANT(stubs::SetName));
2910 INLINE_STUBCALL(STRICT_VARIANT(stubs::SetPropNoCache));
2911 JS_STATIC_ASSERT(JSOP_SETNAME_LENGTH == JSOP_SETPROP_LENGTH);
2916 mjit::Compiler::jsop_getprop_slow(JSAtom *atom, bool usePropCache)
2918 prepareStubCall(Uses(1));
2920 INLINE_STUBCALL(stubs::GetProp);
2922 masm.move(ImmPtr(atom), Registers::ArgReg1);
2923 INLINE_STUBCALL(stubs::GetPropNoCache);
2930 mjit::Compiler::jsop_callprop_slow(JSAtom *atom)
2932 prepareStubCall(Uses(1));
2933 masm.move(ImmPtr(atom), Registers::ArgReg1);
2934 INLINE_STUBCALL(stubs::CallProp);
2942 mjit::Compiler::jsop_length()
2944 FrameEntry *top = frame.peek(-1);
2946 if (top->isTypeKnown() && top->getKnownType() == JSVAL_TYPE_STRING) {
2947 if (top->isConstant()) {
2948 JSString *str = top->getValue().toString();
2950 v.setNumber(uint32(str->length()));
2954 RegisterID str = frame.ownRegForData(top);
2955 masm.loadPtr(Address(str, JSString::offsetOfLengthAndFlags()), str);
2956 masm.urshift32(Imm32(JSString::LENGTH_SHIFT), str);
2958 frame.pushTypedPayload(JSVAL_TYPE_INT32, str);
2963 #if defined JS_POLYIC
2964 return jsop_getprop(cx->runtime->atomState.lengthAtom);
2966 prepareStubCall(Uses(1));
2967 INLINE_STUBCALL(stubs::Length);
2976 mjit::Compiler::passMICAddress(GlobalNameICInfo &ic)
2978 ic.addrLabel = stubcc.masm.moveWithPatch(ImmPtr(NULL), Registers::ArgReg1);
2982 #if defined JS_POLYIC
2984 mjit::Compiler::passICAddress(BaseICInfo *ic)
2986 ic->paramAddr = stubcc.masm.moveWithPatch(ImmPtr(NULL), Registers::ArgReg1);
2990 mjit::Compiler::jsop_getprop(JSAtom *atom, bool doTypeCheck, bool usePropCache)
2992 FrameEntry *top = frame.peek(-1);
2994 /* If the incoming type will never PIC, take slow path. */
2995 if (top->isTypeKnown() && top->getKnownType() != JSVAL_TYPE_OBJECT) {
2996 JS_ASSERT_IF(atom == cx->runtime->atomState.lengthAtom,
2997 top->getKnownType() != JSVAL_TYPE_STRING);
2998 jsop_getprop_slow(atom, usePropCache);
3003 * These two must be loaded first. The objReg because the string path
3004 * wants to read it, and the shapeReg because it could cause a spill that
3005 * the string path wouldn't sink back.
3007 RegisterID objReg = Registers::ReturnReg;
3008 RegisterID shapeReg = Registers::ReturnReg;
3009 if (atom == cx->runtime->atomState.lengthAtom) {
3010 objReg = frame.copyDataIntoReg(top);
3011 shapeReg = frame.allocReg();
3014 RESERVE_IC_SPACE(masm);
3016 PICGenInfo pic(ic::PICInfo::GET, JSOp(*PC), usePropCache);
3018 /* Guard that the type is an object. */
3020 if (doTypeCheck && !top->isTypeKnown()) {
3021 RegisterID reg = frame.tempRegForType(top);
3024 /* Start the hot path where it's easy to patch it. */
3025 pic.fastPathStart = masm.label();
3026 Jump j = masm.testObject(Assembler::NotEqual, reg);
3027 typeCheck = masm.label();
3028 RETURN_IF_OOM(false);
3030 pic.typeCheck = stubcc.linkExit(j, Uses(1));
3031 pic.hasTypeCheck = true;
3033 pic.fastPathStart = masm.label();
3034 pic.hasTypeCheck = false;
3035 pic.typeReg = Registers::ReturnReg;
3038 if (atom != cx->runtime->atomState.lengthAtom) {
3039 objReg = frame.copyDataIntoReg(top);
3040 shapeReg = frame.allocReg();
3043 pic.shapeReg = shapeReg;
3046 /* Guard on shape. */
3047 masm.loadShape(objReg, shapeReg);
3048 pic.shapeGuard = masm.label();
3050 DataLabel32 inlineShapeLabel;
3051 Jump j = masm.branch32WithPatch(Assembler::NotEqual, shapeReg,
3052 Imm32(int32(JSObjectMap::INVALID_SHAPE)),
3054 Label inlineShapeJump = masm.label();
3056 RESERVE_OOL_SPACE(stubcc.masm);
3057 pic.slowPathStart = stubcc.linkExit(j, Uses(1));
3060 passICAddress(&pic);
3061 pic.slowPathCall = OOL_STUBCALL(ic::GetProp);
3064 /* Load the base slot address. */
3065 Label dslotsLoadLabel = masm.loadPtrWithPatchToLEA(Address(objReg, offsetof(JSObject, slots)),
3068 /* Copy the slot value to the expression stack. */
3069 Address slot(objReg, 1 << 24);
3072 Label fastValueLoad = masm.loadValueWithAddressOffsetPatch(slot, shapeReg, objReg);
3073 pic.fastPathRejoin = masm.label();
3075 RETURN_IF_OOM(false);
3077 /* Initialize op labels. */
3078 GetPropLabels &labels = pic.getPropLabels();
3079 labels.setDslotsLoad(masm, pic.fastPathRejoin, dslotsLoadLabel);
3080 labels.setInlineShapeData(masm, pic.shapeGuard, inlineShapeLabel);
3082 labels.setValueLoad(masm, pic.fastPathRejoin, fastValueLoad);
3083 if (pic.hasTypeCheck)
3084 labels.setInlineTypeJump(masm, pic.fastPathStart, typeCheck);
3086 labels.setInlineShapeJump(masm, inlineShapeLabel, inlineShapeJump);
3088 labels.setInlineShapeJump(masm, pic.shapeGuard, inlineShapeJump);
3091 pic.objReg = objReg;
3092 frame.pushRegs(shapeReg, objReg);
3094 stubcc.rejoin(Changes(1));
3101 mjit::Compiler::jsop_callprop_generic(JSAtom *atom)
3103 FrameEntry *top = frame.peek(-1);
3106 * These two must be loaded first. The objReg because the string path
3107 * wants to read it, and the shapeReg because it could cause a spill that
3108 * the string path wouldn't sink back.
3110 RegisterID objReg = frame.copyDataIntoReg(top);
3111 RegisterID shapeReg = frame.allocReg();
3113 PICGenInfo pic(ic::PICInfo::CALL, JSOp(*PC), true);
3117 /* Guard that the type is an object. */
3118 pic.typeReg = frame.copyTypeIntoReg(top);
3120 RESERVE_IC_SPACE(masm);
3122 /* Start the hot path where it's easy to patch it. */
3123 pic.fastPathStart = masm.label();
3126 * Guard that the value is an object. This part needs some extra gunk
3127 * because the leave() after the shape guard will emit a jump from this
3128 * path to the final call. We need a label in between that jump, which
3129 * will be the target of patched jumps in the PIC.
3131 Jump typeCheckJump = masm.testObject(Assembler::NotEqual, pic.typeReg);
3132 Label typeCheck = masm.label();
3133 RETURN_IF_OOM(false);
3135 pic.typeCheck = stubcc.linkExit(typeCheckJump, Uses(1));
3136 pic.hasTypeCheck = true;
3137 pic.objReg = objReg;
3138 pic.shapeReg = shapeReg;
3142 * Store the type and object back. Don't bother keeping them in registers,
3143 * since a sync will be needed for the upcoming call.
3145 uint32 thisvSlot = frame.localSlots();
3146 Address thisv = Address(JSFrameReg, sizeof(JSStackFrame) + thisvSlot * sizeof(Value));
3148 #if defined JS_NUNBOX32
3149 masm.storeValueFromComponents(pic.typeReg, pic.objReg, thisv);
3150 #elif defined JS_PUNBOX64
3151 masm.orPtr(pic.objReg, pic.typeReg);
3152 masm.storePtr(pic.typeReg, thisv);
3155 frame.freeReg(pic.typeReg);
3157 /* Guard on shape. */
3158 masm.loadShape(objReg, shapeReg);
3159 pic.shapeGuard = masm.label();
3161 DataLabel32 inlineShapeLabel;
3162 Jump j = masm.branch32WithPatch(Assembler::NotEqual, shapeReg,
3163 Imm32(int32(JSObjectMap::INVALID_SHAPE)),
3165 Label inlineShapeJump = masm.label();
3168 RESERVE_OOL_SPACE(stubcc.masm);
3169 pic.slowPathStart = stubcc.linkExit(j, Uses(1));
3171 passICAddress(&pic);
3172 pic.slowPathCall = OOL_STUBCALL(ic::CallProp);
3175 /* Adjust the frame. None of this will generate code. */
3177 frame.pushRegs(shapeReg, objReg);
3180 /* Load the base slot address. */
3181 Label dslotsLoadLabel = masm.loadPtrWithPatchToLEA(Address(objReg, offsetof(JSObject, slots)),
3184 /* Copy the slot value to the expression stack. */
3185 Address slot(objReg, 1 << 24);
3187 Label fastValueLoad = masm.loadValueWithAddressOffsetPatch(slot, shapeReg, objReg);
3188 pic.fastPathRejoin = masm.label();
3190 RETURN_IF_OOM(false);
3193 * Initialize op labels. We use GetPropLabels here because we have the same patching
3194 * requirements for CallProp.
3196 GetPropLabels &labels = pic.getPropLabels();
3197 labels.setDslotsLoadOffset(masm.differenceBetween(pic.fastPathRejoin, dslotsLoadLabel));
3198 labels.setInlineShapeOffset(masm.differenceBetween(pic.shapeGuard, inlineShapeLabel));
3199 labels.setValueLoad(masm, pic.fastPathRejoin, fastValueLoad);
3200 labels.setInlineTypeJump(masm, pic.fastPathStart, typeCheck);
3202 labels.setInlineShapeJump(masm, inlineShapeLabel, inlineShapeJump);
3204 labels.setInlineShapeJump(masm, pic.shapeGuard, inlineShapeJump);
3207 stubcc.rejoin(Changes(2));
3214 mjit::Compiler::jsop_callprop_str(JSAtom *atom)
3216 if (!script->compileAndGo) {
3217 jsop_callprop_slow(atom);
3222 * Bake in String.prototype. This is safe because of compileAndGo.
3223 * We must pass an explicit scope chain only because JSD calls into
3224 * here via the recompiler with a dummy context, and we need to use
3225 * the global object for the script we are now compiling.
3228 if (!js_GetClassPrototype(cx, &fp->scopeChain(), JSProto_String, &obj))
3231 /* Force into a register because getprop won't expect a constant. */
3232 RegisterID reg = frame.allocReg();
3234 masm.move(ImmPtr(obj), reg);
3235 frame.pushTypedPayload(JSVAL_TYPE_OBJECT, reg);
3237 /* Get the property. */
3238 if (!jsop_getprop(atom))
3241 /* Perform a swap. */
3246 /* 4) Test if the function can take a primitive. */
3248 FrameEntry *funFe = frame.peek(-2);
3250 JS_ASSERT(!funFe->isTypeKnown());
3253 * See bug 584579 - need to forget string type, since wrapping could
3254 * create an object. forgetType() alone is not valid because it cannot be
3255 * used on copies or constants.
3258 FrameEntry *strFe = frame.peek(-1);
3259 if (strFe->isConstant()) {
3260 strReg = frame.allocReg();
3261 masm.move(ImmPtr(strFe->getValue().toString()), strReg);
3263 strReg = frame.ownRegForData(strFe);
3266 frame.pushTypedPayload(JSVAL_TYPE_STRING, strReg);
3267 frame.forgetType(frame.peek(-1));
3273 mjit::Compiler::jsop_callprop_obj(JSAtom *atom)
3275 FrameEntry *top = frame.peek(-1);
3277 PICGenInfo pic(ic::PICInfo::CALL, JSOp(*PC), true);
3279 JS_ASSERT(top->isTypeKnown());
3280 JS_ASSERT(top->getKnownType() == JSVAL_TYPE_OBJECT);
3282 RESERVE_IC_SPACE(masm);
3285 pic.fastPathStart = masm.label();
3286 pic.hasTypeCheck = false;
3287 pic.typeReg = Registers::ReturnReg;
3289 RegisterID objReg = frame.copyDataIntoReg(top);
3290 RegisterID shapeReg = frame.allocReg();
3292 pic.shapeReg = shapeReg;
3295 /* Guard on shape. */
3296 masm.loadShape(objReg, shapeReg);
3297 pic.shapeGuard = masm.label();
3299 DataLabel32 inlineShapeLabel;
3300 Jump j = masm.branch32WithPatch(Assembler::NotEqual, shapeReg,
3301 Imm32(int32(JSObjectMap::INVALID_SHAPE)),
3303 Label inlineShapeJump = masm.label();
3306 RESERVE_OOL_SPACE(stubcc.masm);
3307 pic.slowPathStart = stubcc.linkExit(j, Uses(1));
3309 passICAddress(&pic);
3310 pic.slowPathCall = OOL_STUBCALL(ic::CallProp);
3313 /* Load the base slot address. */
3314 Label dslotsLoadLabel = masm.loadPtrWithPatchToLEA(Address(objReg, offsetof(JSObject, slots)),
3317 /* Copy the slot value to the expression stack. */
3318 Address slot(objReg, 1 << 24);
3320 Label fastValueLoad = masm.loadValueWithAddressOffsetPatch(slot, shapeReg, objReg);
3322 pic.fastPathRejoin = masm.label();
3323 pic.objReg = objReg;
3326 * 1) Dup the |this| object.
3327 * 2) Push the property value onto the stack.
3328 * 3) Move the value below the dup'd |this|, uncopying it. This could
3329 * generate code, thus the fastPathRejoin label being prior. This is safe
3330 * as a stack transition, because JSOP_CALLPROP has JOF_TMPSLOT. It is
3331 * also safe for correctness, because if we know the LHS is an object, it
3332 * is the resulting vp[1].
3335 frame.pushRegs(shapeReg, objReg);
3339 * Assert correctness of hardcoded offsets.
3340 * No type guard: type is asserted.
3342 RETURN_IF_OOM(false);
3344 GetPropLabels &labels = pic.getPropLabels();
3345 labels.setDslotsLoadOffset(masm.differenceBetween(pic.fastPathRejoin, dslotsLoadLabel));
3346 labels.setInlineShapeOffset(masm.differenceBetween(pic.shapeGuard, inlineShapeLabel));
3347 labels.setValueLoad(masm, pic.fastPathRejoin, fastValueLoad);
3349 labels.setInlineShapeJump(masm, inlineShapeLabel, inlineShapeJump);
3351 labels.setInlineShapeJump(masm, pic.shapeGuard, inlineShapeJump);
3354 stubcc.rejoin(Changes(2));
3361 mjit::Compiler::jsop_callprop(JSAtom *atom)
3363 FrameEntry *top = frame.peek(-1);
3365 /* If the incoming type will never PIC, take slow path. */
3366 if (top->isTypeKnown() && top->getKnownType() != JSVAL_TYPE_OBJECT) {
3367 if (top->getKnownType() == JSVAL_TYPE_STRING)
3368 return jsop_callprop_str(atom);
3369 return jsop_callprop_slow(atom);
3372 if (top->isTypeKnown())
3373 return jsop_callprop_obj(atom);
3374 return jsop_callprop_generic(atom);
3378 mjit::Compiler::jsop_setprop(JSAtom *atom, bool usePropCache)
3380 FrameEntry *lhs = frame.peek(-2);
3381 FrameEntry *rhs = frame.peek(-1);
3383 /* If the incoming type will never PIC, take slow path. */
3384 if (lhs->isTypeKnown() && lhs->getKnownType() != JSVAL_TYPE_OBJECT) {
3385 jsop_setprop_slow(atom, usePropCache);
3389 JSOp op = JSOp(*PC);
3391 ic::PICInfo::Kind kind = (op == JSOP_SETMETHOD)
3392 ? ic::PICInfo::SETMETHOD
3394 PICGenInfo pic(kind, op, usePropCache);
3397 RESERVE_IC_SPACE(masm);
3398 RESERVE_OOL_SPACE(stubcc.masm);
3400 /* Guard that the type is an object. */
3402 if (!lhs->isTypeKnown()) {
3403 RegisterID reg = frame.tempRegForType(lhs);
3406 /* Start the hot path where it's easy to patch it. */
3407 pic.fastPathStart = masm.label();
3408 Jump j = masm.testObject(Assembler::NotEqual, reg);
3410 pic.typeCheck = stubcc.linkExit(j, Uses(2));
3413 stubcc.masm.move(ImmPtr(atom), Registers::ArgReg1);
3415 OOL_STUBCALL(STRICT_VARIANT(stubs::SetName));
3417 OOL_STUBCALL(STRICT_VARIANT(stubs::SetPropNoCache));
3418 typeCheck = stubcc.masm.jump();
3419 pic.hasTypeCheck = true;
3421 pic.fastPathStart = masm.label();
3422 pic.hasTypeCheck = false;
3423 pic.typeReg = Registers::ReturnReg;
3426 /* Get the object into a mutable register. */
3427 RegisterID objReg = frame.copyDataIntoReg(lhs);
3428 pic.objReg = objReg;
3430 /* Get info about the RHS and pin it. */
3432 frame.pinEntry(rhs, vr);
3435 RegisterID shapeReg = frame.allocReg();
3436 pic.shapeReg = shapeReg;
3438 frame.unpinEntry(vr);
3440 /* Guard on shape. */
3441 masm.loadShape(objReg, shapeReg);
3442 pic.shapeGuard = masm.label();
3443 DataLabel32 inlineShapeData;
3444 Jump j = masm.branch32WithPatch(Assembler::NotEqual, shapeReg,
3445 Imm32(int32(JSObjectMap::INVALID_SHAPE)),
3447 Label afterInlineShapeJump = masm.label();
3451 pic.slowPathStart = stubcc.linkExit(j, Uses(2));
3454 passICAddress(&pic);
3455 pic.slowPathCall = OOL_STUBCALL(ic::SetProp);
3460 Label dslotsLoadLabel = masm.loadPtrWithPatchToLEA(Address(objReg, offsetof(JSObject, slots)),
3463 /* Store RHS into object slot. */
3464 Address slot(objReg, 1 << 24);
3465 DataLabel32 inlineValueStore = masm.storeValueWithAddressOffsetPatch(vr, slot);
3466 pic.fastPathRejoin = masm.label();
3468 frame.freeReg(objReg);
3469 frame.freeReg(shapeReg);
3471 /* "Pop under", taking out object (LHS) and leaving RHS. */
3474 /* Finish slow path. */
3476 if (pic.hasTypeCheck)
3477 typeCheck.linkTo(stubcc.masm.label(), &stubcc.masm);
3478 stubcc.rejoin(Changes(1));
3481 RETURN_IF_OOM(false);
3483 SetPropLabels &labels = pic.setPropLabels();
3484 labels.setInlineShapeData(masm, pic.shapeGuard, inlineShapeData);
3485 labels.setDslotsLoad(masm, pic.fastPathRejoin, dslotsLoadLabel, vr);
3486 labels.setInlineValueStore(masm, pic.fastPathRejoin, inlineValueStore, vr);
3487 labels.setInlineShapeJump(masm, pic.shapeGuard, afterInlineShapeJump);
3494 mjit::Compiler::jsop_name(JSAtom *atom)
3496 PICGenInfo pic(ic::PICInfo::NAME, JSOp(*PC), true);
3498 RESERVE_IC_SPACE(masm);
3500 pic.shapeReg = frame.allocReg();
3501 pic.objReg = frame.allocReg();
3502 pic.typeReg = Registers::ReturnReg;
3504 pic.hasTypeCheck = false;
3505 pic.fastPathStart = masm.label();
3507 /* There is no inline implementation, so we always jump to the slow path or to a stub. */
3508 pic.shapeGuard = masm.label();
3509 Jump inlineJump = masm.jump();
3511 RESERVE_OOL_SPACE(stubcc.masm);
3512 pic.slowPathStart = stubcc.linkExit(inlineJump, Uses(0));
3514 passICAddress(&pic);
3515 pic.slowPathCall = OOL_STUBCALL(ic::Name);
3518 pic.fastPathRejoin = masm.label();
3520 /* Initialize op labels. */
3521 ScopeNameLabels &labels = pic.scopeNameLabels();
3522 labels.setInlineJump(masm, pic.fastPathStart, inlineJump);
3524 frame.pushRegs(pic.shapeReg, pic.objReg);
3526 stubcc.rejoin(Changes(1));
3532 mjit::Compiler::jsop_xname(JSAtom *atom)
3534 PICGenInfo pic(ic::PICInfo::XNAME, JSOp(*PC), true);
3536 FrameEntry *fe = frame.peek(-1);
3537 if (fe->isNotType(JSVAL_TYPE_OBJECT)) {
3538 return jsop_getprop(atom);
3541 if (!fe->isTypeKnown()) {
3542 Jump notObject = frame.testObject(Assembler::NotEqual, fe);
3543 stubcc.linkExit(notObject, Uses(1));
3546 RESERVE_IC_SPACE(masm);
3548 pic.shapeReg = frame.allocReg();
3549 pic.objReg = frame.copyDataIntoReg(fe);
3550 pic.typeReg = Registers::ReturnReg;
3552 pic.hasTypeCheck = false;
3553 pic.fastPathStart = masm.label();
3555 /* There is no inline implementation, so we always jump to the slow path or to a stub. */
3556 pic.shapeGuard = masm.label();
3557 Jump inlineJump = masm.jump();
3559 RESERVE_OOL_SPACE(stubcc.masm);
3560 pic.slowPathStart = stubcc.linkExit(inlineJump, Uses(1));
3562 passICAddress(&pic);
3563 pic.slowPathCall = OOL_STUBCALL(ic::XName);
3567 pic.fastPathRejoin = masm.label();
3569 RETURN_IF_OOM(false);
3571 /* Initialize op labels. */
3572 ScopeNameLabels &labels = pic.scopeNameLabels();
3573 labels.setInlineJumpOffset(masm.differenceBetween(pic.fastPathStart, inlineJump));
3576 frame.pushRegs(pic.shapeReg, pic.objReg);
3578 stubcc.rejoin(Changes(1));
3585 mjit::Compiler::jsop_bindname(JSAtom *atom, bool usePropCache)
3587 PICGenInfo pic(ic::PICInfo::BIND, JSOp(*PC), usePropCache);
3589 // This code does not check the frame flags to see if scopeChain has been
3590 // set. Rather, it relies on the up-front analysis statically determining
3591 // whether BINDNAME can be used, which reifies the scope chain at the
3593 JS_ASSERT(analysis->usesScopeChain());
3595 pic.shapeReg = frame.allocReg();
3596 pic.objReg = frame.allocReg();
3597 pic.typeReg = Registers::ReturnReg;
3599 pic.hasTypeCheck = false;
3601 RESERVE_IC_SPACE(masm);
3602 pic.fastPathStart = masm.label();
3604 Address parent(pic.objReg, offsetof(JSObject, parent));
3605 masm.loadPtr(Address(JSFrameReg, JSStackFrame::offsetOfScopeChain()), pic.objReg);
3607 pic.shapeGuard = masm.label();
3608 Jump inlineJump = masm.branchPtr(Assembler::NotEqual, masm.payloadOf(parent), ImmPtr(0));
3610 RESERVE_OOL_SPACE(stubcc.masm);
3611 pic.slowPathStart = stubcc.linkExit(inlineJump, Uses(0));
3613 passICAddress(&pic);
3614 pic.slowPathCall = OOL_STUBCALL(ic::BindName);
3618 pic.fastPathRejoin = masm.label();
3620 /* Initialize op labels. */
3621 BindNameLabels &labels = pic.bindNameLabels();
3622 labels.setInlineJump(masm, pic.shapeGuard, inlineJump);
3624 frame.pushTypedPayload(JSVAL_TYPE_OBJECT, pic.objReg);
3625 frame.freeReg(pic.shapeReg);
3627 stubcc.rejoin(Changes(1));
3632 #else /* !JS_POLYIC */
3635 mjit::Compiler::jsop_name(JSAtom *atom)
3637 prepareStubCall(Uses(0));
3638 INLINE_STUBCALL(stubs::Name);
3643 mjit::Compiler::jsop_xname(JSAtom *atom)
3645 return jsop_getprop(atom);
3649 mjit::Compiler::jsop_getprop(JSAtom *atom, bool typecheck, bool usePropCache)
3651 jsop_getprop_slow(atom, usePropCache);
3656 mjit::Compiler::jsop_callprop(JSAtom *atom)
3658 return jsop_callprop_slow(atom);
3662 mjit::Compiler::jsop_setprop(JSAtom *atom, bool usePropCache)
3664 jsop_setprop_slow(atom, usePropCache);
3669 mjit::Compiler::jsop_bindname(JSAtom *atom, bool usePropCache)
3671 RegisterID reg = frame.allocReg();
3672 Address scopeChain(JSFrameReg, JSStackFrame::offsetOfScopeChain());
3673 masm.loadPtr(scopeChain, reg);
3675 Address address(reg, offsetof(JSObject, parent));
3677 Jump j = masm.branchPtr(Assembler::NotEqual, masm.payloadOf(address), ImmPtr(0));
3679 stubcc.linkExit(j, Uses(0));
3682 OOL_STUBCALL(stubs::BindName);
3684 stubcc.masm.move(ImmPtr(atom), Registers::ArgReg1);
3685 OOL_STUBCALL(stubs::BindNameNoCache);
3688 frame.pushTypedPayload(JSVAL_TYPE_OBJECT, reg);
3690 stubcc.rejoin(Changes(1));
3695 mjit::Compiler::jsop_this()
3700 * In strict mode code, we don't wrap 'this'.
3701 * In direct-call eval code, we wrapped 'this' before entering the eval.
3702 * In global code, 'this' is always an object.
3704 if (fun && !script->strictModeCode) {
3705 FrameEntry *thisFe = frame.peek(-1);
3706 if (!thisFe->isTypeKnown()) {
3707 Jump notObj = frame.testObject(Assembler::NotEqual, thisFe);
3708 stubcc.linkExit(notObj, Uses(1));
3710 OOL_STUBCALL(stubs::This);
3711 stubcc.rejoin(Changes(1));
3713 // Now we know that |this| is an object.
3715 frame.learnThisIsObject();
3719 JS_ASSERT(thisFe->isType(JSVAL_TYPE_OBJECT));
3724 mjit::Compiler::jsop_gnameinc(JSOp op, VoidStubAtom stub, uint32 index)
3726 JSAtom *atom = script->getAtom(index);
3728 #if defined JS_MONOIC
3729 jsbytecode *next = &PC[JSOP_GNAMEINC_LENGTH];
3730 bool pop = (JSOp(*next) == JSOP_POP) && !analysis->jumpTarget(next);
3731 int amt = (op == JSOP_GNAMEINC || op == JSOP_INCGNAME) ? -1 : 1;
3733 if (pop || (op == JSOP_INCGNAME || op == JSOP_DECGNAME)) {
3734 /* These cases are easy, the original value is not observed. */
3736 jsop_getgname(index);
3739 frame.push(Int32Value(amt));
3742 /* Use sub since it calls ValueToNumber instead of string concat. */
3743 jsop_binary(JSOP_SUB, stubs::Sub);
3758 jsop_setgname(atom, false);
3764 /* The pre-value is observed, making this more tricky. */
3766 jsop_getgname(index);
3775 frame.push(Int32Value(-amt));
3778 jsop_binary(JSOP_ADD, stubs::Add);
3785 // N N+1 OBJ N+1 OBJ
3793 jsop_setgname(atom, false);
3801 PC += JSOP_POP_LENGTH;
3803 prepareStubCall(Uses(0));
3804 masm.move(ImmPtr(atom), Registers::ArgReg1);
3805 INLINE_STUBCALL(stub);
3809 PC += JSOP_GNAMEINC_LENGTH;
3813 mjit::Compiler::jsop_nameinc(JSOp op, VoidStubAtom stub, uint32 index)
3815 JSAtom *atom = script->getAtom(index);
3816 #if defined JS_POLYIC
3817 jsbytecode *next = &PC[JSOP_NAMEINC_LENGTH];
3818 bool pop = (JSOp(*next) == JSOP_POP) && !analysis->jumpTarget(next);
3819 int amt = (op == JSOP_NAMEINC || op == JSOP_INCNAME) ? -1 : 1;
3821 if (pop || (op == JSOP_INCNAME || op == JSOP_DECNAME)) {
3822 /* These cases are easy, the original value is not observed. */
3827 frame.push(Int32Value(amt));
3830 /* Use sub since it calls ValueToNumber instead of string concat. */
3831 jsop_binary(JSOP_SUB, stubs::Sub);
3834 jsop_bindname(atom, false);
3846 if (!jsop_setprop(atom, false))
3853 /* The pre-value is observed, making this more tricky. */
3864 frame.push(Int32Value(-amt));
3867 jsop_binary(JSOP_ADD, stubs::Add);
3870 jsop_bindname(atom, false);
3874 // N N+1 OBJ N+1 OBJ
3882 if (!jsop_setprop(atom, false))
3891 PC += JSOP_POP_LENGTH;
3893 prepareStubCall(Uses(0));
3894 masm.move(ImmPtr(atom), Registers::ArgReg1);
3895 INLINE_STUBCALL(stub);
3899 PC += JSOP_NAMEINC_LENGTH;
3904 mjit::Compiler::jsop_propinc(JSOp op, VoidStubAtom stub, uint32 index)
3906 JSAtom *atom = script->getAtom(index);
3907 #if defined JS_POLYIC
3908 FrameEntry *objFe = frame.peek(-1);
3909 if (!objFe->isTypeKnown() || objFe->getKnownType() == JSVAL_TYPE_OBJECT) {
3910 jsbytecode *next = &PC[JSOP_PROPINC_LENGTH];
3911 bool pop = (JSOp(*next) == JSOP_POP) && !analysis->jumpTarget(next);
3912 int amt = (op == JSOP_PROPINC || op == JSOP_INCPROP) ? -1 : 1;
3914 if (pop || (op == JSOP_INCPROP || op == JSOP_DECPROP)) {
3915 /* These cases are easy, the original value is not observed. */
3920 if (!jsop_getprop(atom))
3924 frame.push(Int32Value(amt));
3927 /* Use sub since it calls ValueToNumber instead of string concat. */
3928 jsop_binary(JSOP_SUB, stubs::Sub);
3931 if (!jsop_setprop(atom, false))
3938 /* The pre-value is observed, making this more tricky. */
3943 if (!jsop_getprop(atom))
3953 frame.push(Int32Value(-amt));
3956 jsop_binary(JSOP_ADD, stubs::Add);
3963 // OBJ N N+1 OBJ N+1
3965 if (!jsop_setprop(atom, false))
3976 PC += JSOP_POP_LENGTH;
3980 prepareStubCall(Uses(1));
3981 masm.move(ImmPtr(atom), Registers::ArgReg1);
3982 INLINE_STUBCALL(stub);
3987 PC += JSOP_PROPINC_LENGTH;
3992 mjit::Compiler::iter(uintN flags)
3994 FrameEntry *fe = frame.peek(-1);
3997 * Stub the call if this is not a simple 'for in' loop or if the iterated
3998 * value is known to not be an object.
4000 if ((flags != JSITER_ENUMERATE) || fe->isNotType(JSVAL_TYPE_OBJECT)) {
4001 prepareStubCall(Uses(1));
4002 masm.move(Imm32(flags), Registers::ArgReg1);
4003 INLINE_STUBCALL(stubs::Iter);
4009 if (!fe->isTypeKnown()) {
4010 Jump notObject = frame.testObject(Assembler::NotEqual, fe);
4011 stubcc.linkExit(notObject, Uses(1));
4014 RegisterID reg = frame.tempRegForData(fe);
4017 RegisterID ioreg = frame.allocReg(); /* Will hold iterator JSObject */
4018 RegisterID nireg = frame.allocReg(); /* Will hold NativeIterator */
4019 RegisterID T1 = frame.allocReg();
4020 RegisterID T2 = frame.allocReg();
4021 frame.unpinReg(reg);
4023 /* Fetch the most recent iterator. */
4024 masm.loadPtr(&script->compartment->nativeIterCache.last, ioreg);
4026 /* Test for NULL. */
4027 Jump nullIterator = masm.branchTest32(Assembler::Zero, ioreg, ioreg);
4028 stubcc.linkExit(nullIterator, Uses(1));
4030 /* Get NativeIterator from iter obj. */
4031 masm.loadObjPrivate(ioreg, nireg);
4033 /* Test for active iterator. */
4034 Address flagsAddr(nireg, offsetof(NativeIterator, flags));
4035 masm.load32(flagsAddr, T1);
4036 Jump activeIterator = masm.branchTest32(Assembler::NonZero, T1,
4037 Imm32(JSITER_ACTIVE|JSITER_UNREUSABLE));
4038 stubcc.linkExit(activeIterator, Uses(1));
4040 /* Compare shape of object with iterator. */
4041 masm.loadShape(reg, T1);
4042 masm.loadPtr(Address(nireg, offsetof(NativeIterator, shapes_array)), T2);
4043 masm.load32(Address(T2, 0), T2);
4044 Jump mismatchedObject = masm.branch32(Assembler::NotEqual, T1, T2);
4045 stubcc.linkExit(mismatchedObject, Uses(1));
4047 /* Compare shape of object's prototype with iterator. */
4048 masm.loadPtr(Address(reg, offsetof(JSObject, proto)), T1);
4049 masm.loadShape(T1, T1);
4050 masm.loadPtr(Address(nireg, offsetof(NativeIterator, shapes_array)), T2);
4051 masm.load32(Address(T2, sizeof(uint32)), T2);
4052 Jump mismatchedProto = masm.branch32(Assembler::NotEqual, T1, T2);
4053 stubcc.linkExit(mismatchedProto, Uses(1));
4056 * Compare object's prototype's prototype with NULL. The last native
4057 * iterator will always have a prototype chain length of one
4058 * (i.e. it must be a plain object), so we do not need to generate
4061 masm.loadPtr(Address(reg, offsetof(JSObject, proto)), T1);
4062 masm.loadPtr(Address(T1, offsetof(JSObject, proto)), T1);
4063 Jump overlongChain = masm.branchPtr(Assembler::NonZero, T1, T1);
4064 stubcc.linkExit(overlongChain, Uses(1));
4066 /* Found a match with the most recent iterator. Hooray! */
4068 /* Mark iterator as active. */
4069 masm.storePtr(reg, Address(nireg, offsetof(NativeIterator, obj)));
4070 masm.load32(flagsAddr, T1);
4071 masm.or32(Imm32(JSITER_ACTIVE), T1);
4072 masm.store32(T1, flagsAddr);
4074 /* Chain onto the active iterator stack. */
4075 masm.loadPtr(FrameAddress(offsetof(VMFrame, cx)), T1);
4076 masm.loadPtr(Address(T1, offsetof(JSContext, enumerators)), T2);
4077 masm.storePtr(T2, Address(nireg, offsetof(NativeIterator, next)));
4078 masm.storePtr(ioreg, Address(T1, offsetof(JSContext, enumerators)));
4080 frame.freeReg(nireg);
4085 stubcc.masm.move(Imm32(flags), Registers::ArgReg1);
4086 OOL_STUBCALL(stubs::Iter);
4088 /* Push the iterator object. */
4090 frame.pushTypedPayload(JSVAL_TYPE_OBJECT, ioreg);
4092 stubcc.rejoin(Changes(1));
4098 * This big nasty function emits a fast-path for native iterators, producing
4099 * a temporary value on the stack for FORLOCAL,ARG,GLOBAL,etc ops to use.
4102 mjit::Compiler::iterNext()
4104 FrameEntry *fe = frame.peek(-1);
4105 RegisterID reg = frame.tempRegForData(fe);
4107 /* Is it worth trying to pin this longer? Prolly not. */
4109 RegisterID T1 = frame.allocReg();
4110 frame.unpinReg(reg);
4113 Jump notFast = masm.testObjClass(Assembler::NotEqual, reg, &js_IteratorClass);
4114 stubcc.linkExit(notFast, Uses(1));
4116 /* Get private from iter obj. */
4117 masm.loadObjPrivate(reg, T1);
4119 RegisterID T3 = frame.allocReg();
4120 RegisterID T4 = frame.allocReg();
4122 /* Test for a value iterator, which could come through an Iterator object. */
4123 masm.load32(Address(T1, offsetof(NativeIterator, flags)), T3);
4124 notFast = masm.branchTest32(Assembler::NonZero, T3, Imm32(JSITER_FOREACH));
4125 stubcc.linkExit(notFast, Uses(1));
4127 RegisterID T2 = frame.allocReg();
4130 masm.loadPtr(Address(T1, offsetof(NativeIterator, props_cursor)), T2);
4132 /* Test if the jsid is a string. */
4133 masm.loadPtr(T2, T3);
4135 masm.andPtr(Imm32(JSID_TYPE_MASK), T4);
4136 notFast = masm.branchTestPtr(Assembler::NonZero, T4, T4);
4137 stubcc.linkExit(notFast, Uses(1));
4139 /* It's safe to increase the cursor now. */
4140 masm.addPtr(Imm32(sizeof(jsid)), T2, T4);
4141 masm.storePtr(T4, Address(T1, offsetof(NativeIterator, props_cursor)));
4148 OOL_STUBCALL(stubs::IterNext);
4150 frame.pushUntypedPayload(JSVAL_TYPE_STRING, T3);
4152 /* Join with the stub call. */
4153 stubcc.rejoin(Changes(1));
4157 mjit::Compiler::iterMore()
4159 FrameEntry *fe = frame.peek(-1);
4160 RegisterID reg = frame.tempRegForData(fe);
4163 RegisterID T1 = frame.allocReg();
4164 frame.unpinReg(reg);
4167 Jump notFast = masm.testObjClass(Assembler::NotEqual, reg, &js_IteratorClass);
4168 stubcc.linkExitForBranch(notFast);
4170 /* Get private from iter obj. */
4171 masm.loadObjPrivate(reg, T1);
4173 /* Test that the iterator supports fast iteration. */
4174 notFast = masm.branchTest32(Assembler::NonZero, Address(T1, offsetof(NativeIterator, flags)),
4175 Imm32(JSITER_FOREACH));
4176 stubcc.linkExitForBranch(notFast);
4178 /* Get props_cursor, test */
4179 RegisterID T2 = frame.allocReg();
4180 frame.syncAndForgetEverything();
4181 masm.loadPtr(Address(T1, offsetof(NativeIterator, props_cursor)), T2);
4182 masm.loadPtr(Address(T1, offsetof(NativeIterator, props_end)), T1);
4183 Jump jFast = masm.branchPtr(Assembler::LessThan, T2, T1);
4185 jsbytecode *target = &PC[JSOP_MOREITER_LENGTH];
4186 JSOp next = JSOp(*target);
4187 JS_ASSERT(next == JSOP_IFNE || next == JSOP_IFNEX);
4189 target += (next == JSOP_IFNE)
4190 ? GET_JUMP_OFFSET(target)
4191 : GET_JUMPX_OFFSET(target);
4194 OOL_STUBCALL(stubs::IterMore);
4195 Jump j = stubcc.masm.branchTest32(Assembler::NonZero, Registers::ReturnReg,
4196 Registers::ReturnReg);
4198 PC += JSOP_MOREITER_LENGTH;
4199 PC += js_CodeSpec[next].length;
4201 stubcc.rejoin(Changes(1));
4203 return jumpAndTrace(jFast, target, &j);
4207 mjit::Compiler::iterEnd()
4209 FrameEntry *fe= frame.peek(-1);
4210 RegisterID reg = frame.tempRegForData(fe);
4213 RegisterID T1 = frame.allocReg();
4214 frame.unpinReg(reg);
4217 Jump notIterator = masm.testObjClass(Assembler::NotEqual, reg, &js_IteratorClass);
4218 stubcc.linkExit(notIterator, Uses(1));
4220 /* Get private from iter obj. */
4221 masm.loadObjPrivate(reg, T1);
4223 RegisterID T2 = frame.allocReg();
4226 Address flagAddr(T1, offsetof(NativeIterator, flags));
4227 masm.loadPtr(flagAddr, T2);
4229 /* Test for a normal enumerate iterator. */
4230 Jump notEnumerate = masm.branchTest32(Assembler::Zero, T2, Imm32(JSITER_ENUMERATE));
4231 stubcc.linkExit(notEnumerate, Uses(1));
4233 /* Clear active bit. */
4234 masm.and32(Imm32(~JSITER_ACTIVE), T2);
4235 masm.storePtr(T2, flagAddr);
4237 /* Reset property cursor. */
4238 masm.loadPtr(Address(T1, offsetof(NativeIterator, props_array)), T2);
4239 masm.storePtr(T2, Address(T1, offsetof(NativeIterator, props_cursor)));
4241 /* Advance enumerators list. */
4242 masm.loadPtr(FrameAddress(offsetof(VMFrame, cx)), T2);
4243 masm.loadPtr(Address(T1, offsetof(NativeIterator, next)), T1);
4244 masm.storePtr(T1, Address(T2, offsetof(JSContext, enumerators)));
4250 OOL_STUBCALL(stubs::EndIter);
4254 stubcc.rejoin(Changes(1));
4258 mjit::Compiler::jsop_eleminc(JSOp op, VoidStub stub)
4260 prepareStubCall(Uses(2));
4261 INLINE_STUBCALL(stub);
4267 mjit::Compiler::jsop_getgname_slow(uint32 index)
4269 prepareStubCall(Uses(0));
4270 INLINE_STUBCALL(stubs::GetGlobalName);
4275 mjit::Compiler::jsop_bindgname()
4277 if (script->compileAndGo && globalObj) {
4278 frame.push(ObjectValue(*globalObj));
4282 /* :TODO: this is slower than it needs to be. */
4283 prepareStubCall(Uses(0));
4284 INLINE_STUBCALL(stubs::BindGlobalName);
4285 frame.takeReg(Registers::ReturnReg);
4286 frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
4290 mjit::Compiler::jsop_getgname(uint32 index)
4292 /* Optimize undefined, NaN and Infinity. */
4293 JSAtom *atom = script->getAtom(index);
4294 if (atom == cx->runtime->atomState.typeAtoms[JSTYPE_VOID]) {
4295 frame.push(UndefinedValue());
4298 if (atom == cx->runtime->atomState.NaNAtom) {
4299 frame.push(cx->runtime->NaNValue);
4302 if (atom == cx->runtime->atomState.InfinityAtom) {
4303 frame.push(cx->runtime->positiveInfinityValue);
4306 #if defined JS_MONOIC
4309 FrameEntry *fe = frame.peek(-1);
4310 JS_ASSERT(fe->isTypeKnown() && fe->getKnownType() == JSVAL_TYPE_OBJECT);
4312 GetGlobalNameICInfo ic;
4313 RESERVE_IC_SPACE(masm);
4317 ic.usePropertyCache = true;
4319 ic.fastPathStart = masm.label();
4320 if (fe->isConstant()) {
4321 JSObject *obj = &fe->getValue().toObject();
4323 JS_ASSERT(obj->isNative());
4325 objReg = frame.allocReg();
4327 masm.load32FromImm(&obj->objShape, objReg);
4328 shapeGuard = masm.branch32WithPatch(Assembler::NotEqual, objReg,
4329 Imm32(int32(JSObjectMap::INVALID_SHAPE)), ic.shape);
4330 masm.move(ImmPtr(obj), objReg);
4332 objReg = frame.ownRegForData(fe);
4334 RegisterID reg = frame.allocReg();
4336 masm.loadShape(objReg, reg);
4337 shapeGuard = masm.branch32WithPatch(Assembler::NotEqual, reg,
4338 Imm32(int32(JSObjectMap::INVALID_SHAPE)), ic.shape);
4341 stubcc.linkExit(shapeGuard, Uses(0));
4345 ic.slowPathCall = OOL_STUBCALL(ic::GetGlobalName);
4347 /* Garbage value. */
4348 uint32 slot = 1 << 24;
4350 masm.loadPtr(Address(objReg, offsetof(JSObject, slots)), objReg);
4351 Address address(objReg, slot);
4353 /* Allocate any register other than objReg. */
4354 RegisterID treg = frame.allocReg();
4355 /* After dreg is loaded, it's safe to clobber objReg. */
4356 RegisterID dreg = objReg;
4358 ic.load = masm.loadValueWithAddressOffsetPatch(address, treg, dreg);
4360 frame.pushRegs(treg, dreg);
4362 stubcc.rejoin(Changes(1));
4364 getGlobalNames.append(ic);
4367 jsop_getgname_slow(index);
4372 * Generate just the epilogue code that is specific to callgname. The rest
4373 * is shared with getgname.
4376 mjit::Compiler::jsop_callgname_epilogue()
4379 * This slow path does the same thing as the interpreter.
4381 if (!script->compileAndGo) {
4382 prepareStubCall(Uses(1));
4383 INLINE_STUBCALL(stubs::PushImplicitThisForGlobal);
4388 /* Fast path for known-not-an-object callee. */
4389 FrameEntry *fval = frame.peek(-1);
4390 if (fval->isNotType(JSVAL_TYPE_OBJECT)) {
4391 frame.push(UndefinedValue());
4396 * Optimized version. This inlines the common case, calling a
4397 * (non-proxied) function that has the same global as the current
4398 * script. To make the code simpler, we:
4399 * 1. test the stronger property that the callee's parent is
4400 * equal to the global of the current script, and
4401 * 2. bake in the global of the current script, which is why
4402 * this optimized path requires compile-and-go.
4405 /* If the callee is not an object, jump to the inline fast path. */
4406 MaybeRegisterID typeReg = frame.maybePinType(fval);
4407 RegisterID objReg = frame.copyDataIntoReg(fval);
4410 if (!fval->isType(JSVAL_TYPE_OBJECT)) {
4411 isNotObj = frame.testObject(Assembler::NotEqual, fval);
4412 frame.maybeUnpinReg(typeReg);
4416 * If the callee is not a function, jump to OOL slow path.
4418 Jump notFunction = masm.testFunction(Assembler::NotEqual, objReg);
4419 stubcc.linkExit(notFunction, Uses(1));
4422 * If the callee's parent is not equal to the global, jump to
4425 masm.loadPtr(Address(objReg, offsetof(JSObject, parent)), objReg);
4426 Jump globalMismatch = masm.branchPtr(Assembler::NotEqual, objReg, ImmPtr(globalObj));
4427 stubcc.linkExit(globalMismatch, Uses(1));
4428 frame.freeReg(objReg);
4430 /* OOL stub call path. */
4432 OOL_STUBCALL(stubs::PushImplicitThisForGlobal);
4435 if (isNotObj.isSet())
4436 isNotObj.getJump().linkTo(masm.label(), &masm);
4437 frame.pushUntypedValue(UndefinedValue());
4439 stubcc.rejoin(Changes(1));
4443 mjit::Compiler::jsop_setgname_slow(JSAtom *atom, bool usePropertyCache)
4445 prepareStubCall(Uses(2));
4446 masm.move(ImmPtr(atom), Registers::ArgReg1);
4447 if (usePropertyCache)
4448 INLINE_STUBCALL(STRICT_VARIANT(stubs::SetGlobalName));
4450 INLINE_STUBCALL(STRICT_VARIANT(stubs::SetGlobalNameNoCache));
4456 mjit::Compiler::jsop_setgname(JSAtom *atom, bool usePropertyCache)
4458 #if defined JS_MONOIC
4459 FrameEntry *objFe = frame.peek(-2);
4460 FrameEntry *fe = frame.peek(-1);
4461 JS_ASSERT_IF(objFe->isTypeKnown(), objFe->getKnownType() == JSVAL_TYPE_OBJECT);
4463 SetGlobalNameICInfo ic;
4465 frame.pinEntry(fe, ic.vr);
4468 RESERVE_IC_SPACE(masm);
4469 ic.fastPathStart = masm.label();
4470 if (objFe->isConstant()) {
4471 JSObject *obj = &objFe->getValue().toObject();
4472 JS_ASSERT(obj->isNative());
4474 ic.objReg = frame.allocReg();
4475 ic.shapeReg = ic.objReg;
4478 masm.load32FromImm(&obj->objShape, ic.shapeReg);
4479 shapeGuard = masm.branch32WithPatch(Assembler::NotEqual, ic.shapeReg,
4480 Imm32(int32(JSObjectMap::INVALID_SHAPE)),
4482 masm.move(ImmPtr(obj), ic.objReg);
4484 ic.objReg = frame.copyDataIntoReg(objFe);
4485 ic.shapeReg = frame.allocReg();
4486 ic.objConst = false;
4488 masm.loadShape(ic.objReg, ic.shapeReg);
4489 shapeGuard = masm.branch32WithPatch(Assembler::NotEqual, ic.shapeReg,
4490 Imm32(int32(JSObjectMap::INVALID_SHAPE)),
4492 frame.freeReg(ic.shapeReg);
4494 ic.shapeGuardJump = shapeGuard;
4495 ic.slowPathStart = stubcc.linkExit(shapeGuard, Uses(2));
4499 ic.slowPathCall = OOL_STUBCALL(ic::SetGlobalName);
4501 /* Garbage value. */
4502 uint32 slot = 1 << 24;
4504 ic.usePropertyCache = usePropertyCache;
4506 masm.loadPtr(Address(ic.objReg, offsetof(JSObject, slots)), ic.objReg);
4507 Address address(ic.objReg, slot);
4509 if (ic.vr.isConstant()) {
4510 ic.store = masm.storeValueWithAddressOffsetPatch(ic.vr.value(), address);
4511 } else if (ic.vr.isTypeKnown()) {
4512 ic.store = masm.storeValueWithAddressOffsetPatch(ImmType(ic.vr.knownType()),
4513 ic.vr.dataReg(), address);
4515 ic.store = masm.storeValueWithAddressOffsetPatch(ic.vr.typeReg(), ic.vr.dataReg(), address);
4518 frame.freeReg(ic.objReg);
4519 frame.unpinEntry(ic.vr);
4522 stubcc.rejoin(Changes(1));
4524 ic.fastPathRejoin = masm.label();
4525 setGlobalNames.append(ic);
4527 jsop_setgname_slow(atom, usePropertyCache);
4532 mjit::Compiler::jsop_setelem_slow()
4534 prepareStubCall(Uses(3));
4535 INLINE_STUBCALL(STRICT_VARIANT(stubs::SetElem));
4541 mjit::Compiler::jsop_getelem_slow()
4543 prepareStubCall(Uses(2));
4544 INLINE_STUBCALL(stubs::GetElem);
4550 mjit::Compiler::jsop_unbrand()
4552 prepareStubCall(Uses(1));
4553 INLINE_STUBCALL(stubs::Unbrand);
4557 mjit::Compiler::jsop_instanceof()
4559 FrameEntry *lhs = frame.peek(-2);
4560 FrameEntry *rhs = frame.peek(-1);
4562 // The fast path applies only when both operands are objects.
4563 if (rhs->isNotType(JSVAL_TYPE_OBJECT) || lhs->isNotType(JSVAL_TYPE_OBJECT)) {
4564 prepareStubCall(Uses(2));
4565 INLINE_STUBCALL(stubs::InstanceOf);
4567 frame.takeReg(Registers::ReturnReg);
4568 frame.pushTypedPayload(JSVAL_TYPE_BOOLEAN, Registers::ReturnReg);
4572 MaybeJump firstSlow;
4573 if (!rhs->isTypeKnown()) {
4574 Jump j = frame.testObject(Assembler::NotEqual, rhs);
4575 stubcc.linkExit(j, Uses(2));
4576 RegisterID reg = frame.tempRegForData(rhs);
4577 j = masm.testFunction(Assembler::NotEqual, reg);
4578 stubcc.linkExit(j, Uses(2));
4581 /* Test for bound functions. */
4582 RegisterID obj = frame.tempRegForData(rhs);
4583 Jump isBound = masm.branchTest32(Assembler::NonZero, Address(obj, offsetof(JSObject, flags)),
4584 Imm32(JSObject::BOUND_FUNCTION));
4586 stubcc.linkExit(isBound, Uses(2));
4588 OOL_STUBCALL(stubs::InstanceOf);
4589 firstSlow = stubcc.masm.jump();
4593 /* This is sadly necessary because the error case needs the object. */
4596 if (!jsop_getprop(cx->runtime->atomState.classPrototypeAtom, false))
4599 /* Primitive prototypes are invalid. */
4600 rhs = frame.peek(-1);
4601 Jump j = frame.testPrimitive(Assembler::Equal, rhs);
4602 stubcc.linkExit(j, Uses(3));
4604 /* Allocate registers up front, because of branchiness. */
4605 obj = frame.copyDataIntoReg(lhs);
4606 RegisterID proto = frame.copyDataIntoReg(rhs);
4607 RegisterID temp = frame.allocReg();
4610 if (!lhs->isTypeKnown())
4611 isFalse = frame.testPrimitive(Assembler::Equal, lhs);
4613 Address protoAddr(obj, offsetof(JSObject, proto));
4614 Label loop = masm.label();
4616 /* Walk prototype chain, break out on NULL or hit. */
4617 masm.loadPayload(protoAddr, obj);
4618 Jump isFalse2 = masm.branchTestPtr(Assembler::Zero, obj, obj);
4619 Jump isTrue = masm.branchPtr(Assembler::NotEqual, obj, proto);
4620 isTrue.linkTo(loop, &masm);
4621 masm.move(Imm32(1), temp);
4622 isTrue = masm.jump();
4624 if (isFalse.isSet())
4625 isFalse.getJump().linkTo(masm.label(), &masm);
4626 isFalse2.linkTo(masm.label(), &masm);
4627 masm.move(Imm32(0), temp);
4628 isTrue.linkTo(masm.label(), &masm);
4630 frame.freeReg(proto);
4634 OOL_STUBCALL(stubs::FastInstanceOf);
4637 frame.pushTypedPayload(JSVAL_TYPE_BOOLEAN, temp);
4639 if (firstSlow.isSet())
4640 firstSlow.getJump().linkTo(stubcc.masm.label(), &stubcc.masm);
4641 stubcc.rejoin(Changes(1));
4646 mjit::Compiler::emitEval(uint32 argc)
4648 /* Check for interrupts on function call */
4649 interruptCheckHelper();
4651 frame.syncAndKill(Registers(Registers::AvailRegs), Uses(argc + 2));
4652 prepareStubCall(Uses(argc + 2));
4653 masm.move(Imm32(argc), Registers::ArgReg1);
4654 INLINE_STUBCALL(stubs::Eval);
4655 frame.popn(argc + 2);
4660 mjit::Compiler::jsop_arguments()
4662 prepareStubCall(Uses(0));
4663 INLINE_STUBCALL(stubs::Arguments);
4667 mjit::Compiler::jsop_newinit()
4671 JSObject *baseobj = NULL;
4674 isArray = (PC[1] == JSProto_Array);
4678 count = GET_UINT24(PC);
4680 case JSOP_NEWOBJECT:
4682 baseobj = script->getObject(fullAtomIndex(PC));
4685 JS_NOT_REACHED("Bad op");
4689 prepareStubCall(Uses(0));
4691 masm.move(Imm32(count), Registers::ArgReg1);
4692 INLINE_STUBCALL(stubs::NewInitArray);
4694 masm.move(ImmPtr(baseobj), Registers::ArgReg1);
4695 INLINE_STUBCALL(stubs::NewInitObject);
4697 frame.takeReg(Registers::ReturnReg);
4698 frame.pushInitializerObject(Registers::ReturnReg, *PC == JSOP_NEWARRAY, baseobj);
4702 * Note: This function emits tracer hooks into the OOL path. This means if
4703 * it is used in the middle of an in-progress slow path, the stream will be
4704 * hopelessly corrupted. Take care to only call this before linkExits() and
4708 mjit::Compiler::jumpAndTrace(Jump j, jsbytecode *target, Jump *slow)
4710 // XXX refactor this little bit
4712 if (!jumpInScript(j, target))
4716 if (!stubcc.jumpInScript(*slow, target))
4720 if (!addTraceHints || target >= PC ||
4721 (JSOp(*target) != JSOP_TRACE && JSOp(*target) != JSOP_NOTRACE)
4723 || GET_UINT16(target) == BAD_TRACEIC_INDEX
4727 if (!jumpInScript(j, target))
4729 if (slow && !stubcc.jumpInScript(*slow, target))
4737 ic.initialized = true;
4738 ic.stubEntry = stubcc.masm.label();
4739 ic.jumpTarget = target;
4742 ic.slowTraceHint = *slow;
4744 uint16 index = GET_UINT16(target);
4745 if (traceICs.length() <= index)
4746 if (!traceICs.resize(index+1))
4750 Label traceStart = stubcc.masm.label();
4753 * We make a trace IC even if the trace is currently disabled, in case it is
4754 * enabled later, but set up the jumps so that InvokeTracer is initially skipped.
4756 if (JSOp(*target) == JSOP_TRACE) {
4757 stubcc.linkExitDirect(j, traceStart);
4759 slow->linkTo(traceStart, &stubcc.masm);
4761 if (!jumpInScript(j, target))
4763 if (slow && !stubcc.jumpInScript(*slow, target))
4768 ic.addrLabel = stubcc.masm.moveWithPatch(ImmPtr(NULL), Registers::ArgReg1);
4769 traceICs[index] = ic;
4771 Jump nonzero = stubcc.masm.branchSub32(Assembler::NonZero, Imm32(1),
4772 Address(Registers::ArgReg1,
4773 offsetof(TraceICInfo, loopCounter)));
4774 stubcc.jumpInScript(nonzero, target);
4777 /* Save and restore compiler-tracked PC, so cx->regs is right in InvokeTracer. */
4779 jsbytecode* pc = PC;
4782 OOL_STUBCALL(stubs::InvokeTracer);
4787 Jump no = stubcc.masm.branchTestPtr(Assembler::Zero, Registers::ReturnReg,
4788 Registers::ReturnReg);
4789 if (!stubcc.jumpInScript(no, target))
4791 restoreFrameRegs(stubcc.masm);
4792 stubcc.masm.jump(Registers::ReturnReg);
4798 mjit::Compiler::enterBlock(JSObject *obj)
4800 // If this is an exception entry point, then jsl_InternalThrow has set
4801 // VMFrame::fp to the correct fp for the entry point. We need to copy
4802 // that value here to FpReg so that FpReg also has the correct sp.
4803 // Otherwise, we would simply be using a stale FpReg value.
4804 // Additionally, we check the interrupt flag to allow interrupting
4805 // deeply nested exception handling.
4806 if (analysis->getCode(PC).exceptionEntry) {
4807 restoreFrameRegs(masm);
4808 interruptCheckHelper();
4811 uint32 oldFrameDepth = frame.localSlots();
4813 /* For now, don't bother doing anything for this opcode. */
4814 frame.syncAndForgetEverything();
4815 masm.move(ImmPtr(obj), Registers::ArgReg1);
4816 uint32 n = js_GetEnterBlockStackDefs(cx, script, PC);
4817 INLINE_STUBCALL(stubs::EnterBlock);
4818 frame.enterBlock(n);
4820 uintN base = JSSLOT_FREE(&js_BlockClass);
4821 uintN count = OBJ_BLOCK_COUNT(cx, obj);
4822 uintN limit = base + count;
4823 for (uintN slot = base, i = 0; slot < limit; slot++, i++) {
4824 const Value &v = obj->getSlotRef(slot);
4825 if (v.isBoolean() && v.toBoolean())
4826 frame.setClosedVar(oldFrameDepth + i);
4831 mjit::Compiler::leaveBlock()
4834 * Note: After bug 535912, we can pass the block obj directly, inline
4835 * PutBlockObject, and do away with the muckiness in PutBlockObject.
4837 uint32 n = js_GetVariableStackUses(JSOP_LEAVEBLOCK, PC);
4838 JSObject *obj = script->getObject(fullAtomIndex(PC + UINT16_LEN));
4839 prepareStubCall(Uses(n));
4840 masm.move(ImmPtr(obj), Registers::ArgReg1);
4841 INLINE_STUBCALL(stubs::LeaveBlock);
4842 frame.leaveBlock(n);
4845 // Creates the new object expected for constructors, and places it in |thisv|.
4846 // It is broken down into the following operations:
4848 // GETPROP "prototype"
4851 // call js_CreateThisFromFunctionWithProto(...)
4854 mjit::Compiler::constructThis()
4856 JS_ASSERT(isConstructing);
4861 // Get callee.prototype.
4862 if (!jsop_getprop(cx->runtime->atomState.classPrototypeAtom, false, false))
4865 // Reach into the proto Value and grab a register for its data.
4866 FrameEntry *protoFe = frame.peek(-1);
4867 RegisterID protoReg = frame.ownRegForData(protoFe);
4869 // Now, get the type. If it's not an object, set protoReg to NULL.
4870 Jump isNotObject = frame.testObject(Assembler::NotEqual, protoFe);
4871 stubcc.linkExitDirect(isNotObject, stubcc.masm.label());
4872 stubcc.masm.move(ImmPtr(NULL), protoReg);
4873 stubcc.crossJump(stubcc.masm.jump(), masm.label());
4875 // Done with the protoFe.
4878 prepareStubCall(Uses(0));
4879 if (protoReg != Registers::ArgReg1)
4880 masm.move(protoReg, Registers::ArgReg1);
4881 INLINE_STUBCALL(stubs::CreateThis);
4882 frame.freeReg(protoReg);
4887 mjit::Compiler::jsop_tableswitch(jsbytecode *pc)
4889 #if defined JS_CPU_ARM
4890 JS_NOT_REACHED("Implement jump(BaseIndex) for ARM");
4893 jsbytecode *originalPC = pc;
4895 uint32 defaultTarget = GET_JUMP_OFFSET(pc);
4896 pc += JUMP_OFFSET_LEN;
4898 jsint low = GET_JUMP_OFFSET(pc);
4899 pc += JUMP_OFFSET_LEN;
4900 jsint high = GET_JUMP_OFFSET(pc);
4901 pc += JUMP_OFFSET_LEN;
4902 int numJumps = high + 1 - low;
4903 JS_ASSERT(numJumps >= 0);
4906 * If there are no cases, this is a no-op. The default case immediately
4907 * follows in the bytecode and is always taken.
4909 if (numJumps == 0) {
4914 FrameEntry *fe = frame.peek(-1);
4915 if (fe->isNotType(JSVAL_TYPE_INT32) || numJumps > 256) {
4916 frame.syncAndForgetEverything();
4917 masm.move(ImmPtr(originalPC), Registers::ArgReg1);
4919 /* prepareStubCall() is not needed due to forgetEverything() */
4920 INLINE_STUBCALL(stubs::TableSwitch);
4922 masm.jump(Registers::ReturnReg);
4927 if (fe->isConstant()) {
4928 JS_ASSERT(fe->isType(JSVAL_TYPE_INT32));
4929 dataReg = frame.allocReg();
4930 masm.move(Imm32(fe->getValue().toInt32()), dataReg);
4932 dataReg = frame.copyDataIntoReg(fe);
4935 RegisterID reg = frame.allocReg();
4936 frame.syncAndForgetEverything();
4939 if (!fe->isType(JSVAL_TYPE_INT32))
4940 notInt = masm.testInt32(Assembler::NotEqual, frame.addressOf(fe));
4943 jt.offsetIndex = jumpTableOffsets.length();
4944 jt.label = masm.moveWithPatch(ImmPtr(NULL), reg);
4945 jumpTables.append(jt);
4947 for (int i = 0; i < numJumps; i++) {
4948 uint32 target = GET_JUMP_OFFSET(pc);
4950 target = defaultTarget;
4951 uint32 offset = (originalPC + target) - script->code;
4952 jumpTableOffsets.append(offset);
4953 pc += JUMP_OFFSET_LEN;
4956 masm.sub32(Imm32(low), dataReg);
4957 Jump defaultCase = masm.branch32(Assembler::AboveOrEqual, dataReg, Imm32(numJumps));
4958 BaseIndex jumpTarget(reg, dataReg, Assembler::ScalePtr);
4959 masm.jump(jumpTarget);
4961 if (notInt.isSet()) {
4962 stubcc.linkExitDirect(notInt.get(), stubcc.masm.label());
4964 stubcc.masm.move(ImmPtr(originalPC), Registers::ArgReg1);
4965 OOL_STUBCALL(stubs::TableSwitch);
4966 stubcc.masm.jump(Registers::ReturnReg);
4969 return jumpAndTrace(defaultCase, originalPC + defaultTarget);
4974 mjit::Compiler::jsop_callelem_slow()
4976 prepareStubCall(Uses(2));
4977 INLINE_STUBCALL(stubs::CallElem);
4984 mjit::Compiler::jsop_forprop(JSAtom *atom)
4987 // After: ITER OBJ ITER
4990 // Before: ITER OBJ ITER
4991 // After: ITER OBJ ITER VALUE
4994 // Before: ITER OBJ ITER VALUE
4995 // After: ITER OBJ VALUE
4998 // Before: ITER OBJ VALUE
4999 // After: ITER VALUE
5000 jsop_setprop(atom, false);
5002 // Before: ITER VALUE
5008 mjit::Compiler::jsop_forname(JSAtom *atom)
5011 // After: ITER SCOPEOBJ
5012 jsop_bindname(atom, false);
5017 mjit::Compiler::jsop_forgname(JSAtom *atom)
5020 // After: ITER GLOBAL
5023 // Before: ITER GLOBAL
5024 // After: ITER GLOBAL ITER
5027 // Before: ITER GLOBAL ITER
5028 // After: ITER GLOBAL ITER VALUE
5031 // Before: ITER GLOBAL ITER VALUE
5032 // After: ITER GLOBAL VALUE
5035 // Before: ITER GLOBAL VALUE
5036 // After: ITER VALUE
5037 jsop_setgname(atom, false);
5039 // Before: ITER VALUE