Imported Upstream version 1.0.0
[platform/upstream/js.git] / js / src / methodjit / Compiler.cpp
1 /* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2  * vim: set ts=4 sw=4 et tw=99:
3  *
4  * ***** BEGIN LICENSE BLOCK *****
5  * Version: MPL 1.1/GPL 2.0/LGPL 2.1
6  *
7  * The contents of this file are subject to the Mozilla Public License Version
8  * 1.1 (the "License"); you may not use this file except in compliance with
9  * the License. You may obtain a copy of the License at
10  * http://www.mozilla.org/MPL/
11  *
12  * Software distributed under the License is distributed on an "AS IS" basis,
13  * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14  * for the specific language governing rights and limitations under the
15  * License.
16  *
17  * The Original Code is Mozilla SpiderMonkey JavaScript 1.9 code, released
18  * May 28, 2008.
19  *
20  * The Initial Developer of the Original Code is
21  *   Brendan Eich <brendan@mozilla.org>
22  *
23  * Contributor(s):
24  *   David Anderson <danderson@mozilla.com>
25  *   David Mandelin <dmandelin@mozilla.com>
26  *   Jan de Mooij <jandemooij@gmail.com>
27  *
28  * Alternatively, the contents of this file may be used under the terms of
29  * either of the GNU General Public License Version 2 or later (the "GPL"),
30  * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
31  * in which case the provisions of the GPL or the LGPL are applicable instead
32  * of those above. If you wish to allow use of your version of this file only
33  * under the terms of either the GPL or the LGPL, and not to allow others to
34  * use your version of this file under the terms of the MPL, indicate your
35  * decision by deleting the provisions above and replace them with the notice
36  * and other provisions required by the GPL or the LGPL. If you do not delete
37  * the provisions above, a recipient may use your version of this file under
38  * the terms of any one of the MPL, the GPL or the LGPL.
39  *
40  * ***** END LICENSE BLOCK ***** */
41
42 #include "MethodJIT.h"
43 #include "jsnum.h"
44 #include "jsbool.h"
45 #include "jsemit.h"
46 #include "jsiter.h"
47 #include "Compiler.h"
48 #include "StubCalls.h"
49 #include "MonoIC.h"
50 #include "PolyIC.h"
51 #include "ICChecker.h"
52 #include "Retcon.h"
53 #include "assembler/jit/ExecutableAllocator.h"
54 #include "assembler/assembler/LinkBuffer.h"
55 #include "FrameState-inl.h"
56 #include "jsobjinlines.h"
57 #include "jsscriptinlines.h"
58 #include "InlineFrameAssembler.h"
59 #include "jscompartment.h"
60 #include "jsobjinlines.h"
61 #include "jsopcodeinlines.h"
62 #include "jshotloop.h"
63
64 #include "jsautooplen.h"
65
66 using namespace js;
67 using namespace js::mjit;
68 #if defined(JS_POLYIC) || defined(JS_MONOIC)
69 using namespace js::mjit::ic;
70 #endif
71
72 #define RETURN_IF_OOM(retval)                                   \
73     JS_BEGIN_MACRO                                              \
74         if (oomInVector || masm.oom() || stubcc.masm.oom())     \
75             return retval;                                      \
76     JS_END_MACRO
77
78 #if defined(JS_METHODJIT_SPEW)
79 static const char *OpcodeNames[] = {
80 # define OPDEF(op,val,name,token,length,nuses,ndefs,prec,format) #name,
81 # include "jsopcode.tbl"
82 # undef OPDEF
83 };
84 #endif
85
86 mjit::Compiler::Compiler(JSContext *cx, JSStackFrame *fp)
87   : BaseCompiler(cx),
88     fp(fp),
89     script(fp->script()),
90     scopeChain(&fp->scopeChain()),
91     globalObj(scopeChain->getGlobal()),
92     fun(fp->isFunctionFrame() && !fp->isEvalFrame()
93         ? fp->fun()
94         : NULL),
95     isConstructing(fp->isConstructing()),
96     analysis(NULL), jumpMap(NULL), savedTraps(NULL),
97     frame(cx, script, fun, masm),
98     branchPatches(CompilerAllocPolicy(cx, *thisFromCtor())),
99 #if defined JS_MONOIC
100     getGlobalNames(CompilerAllocPolicy(cx, *thisFromCtor())),
101     setGlobalNames(CompilerAllocPolicy(cx, *thisFromCtor())),
102     callICs(CompilerAllocPolicy(cx, *thisFromCtor())),
103     equalityICs(CompilerAllocPolicy(cx, *thisFromCtor())),
104     traceICs(CompilerAllocPolicy(cx, *thisFromCtor())),
105 #endif
106 #if defined JS_POLYIC
107     pics(CompilerAllocPolicy(cx, *thisFromCtor())), 
108     getElemICs(CompilerAllocPolicy(cx, *thisFromCtor())),
109     setElemICs(CompilerAllocPolicy(cx, *thisFromCtor())),
110 #endif
111     callPatches(CompilerAllocPolicy(cx, *thisFromCtor())),
112     callSites(CompilerAllocPolicy(cx, *thisFromCtor())), 
113     doubleList(CompilerAllocPolicy(cx, *thisFromCtor())),
114     jumpTables(CompilerAllocPolicy(cx, *thisFromCtor())),
115     jumpTableOffsets(CompilerAllocPolicy(cx, *thisFromCtor())),
116     stubcc(cx, *thisFromCtor(), frame, script),
117     debugMode_(cx->compartment->debugMode),
118 #if defined JS_TRACER
119     addTraceHints(cx->traceJitEnabled),
120 #endif
121     oomInVector(false),
122     applyTricks(NoApplyTricks)
123 {
124 }
125
126 CompileStatus
127 mjit::Compiler::compile()
128 {
129     JS_ASSERT_IF(isConstructing, !script->jitCtor);
130     JS_ASSERT_IF(!isConstructing, !script->jitNormal);
131
132     JITScript **jit = isConstructing ? &script->jitCtor : &script->jitNormal;
133     void **checkAddr = isConstructing
134                        ? &script->jitArityCheckCtor
135                        : &script->jitArityCheckNormal;
136
137     CompileStatus status = performCompilation(jit);
138     if (status == Compile_Okay) {
139         // Global scripts don't have an arity check entry. That's okay, we
140         // just need a pointer so the VM can quickly decide whether this
141         // method can be JIT'd or not. Global scripts cannot be IC'd, since
142         // they have no functions, so there is no danger.
143         *checkAddr = (*jit)->arityCheckEntry
144                      ? (*jit)->arityCheckEntry
145                      : (*jit)->invokeEntry;
146     } else {
147         *checkAddr = JS_UNJITTABLE_SCRIPT;
148     }
149
150     return status;
151 }
152
153 #define CHECK_STATUS(expr)                                           \
154     JS_BEGIN_MACRO                                                   \
155         CompileStatus status_ = (expr);                              \
156         if (status_ != Compile_Okay) {                               \
157             if (oomInVector || masm.oom() || stubcc.masm.oom())      \
158                 js_ReportOutOfMemory(cx);                            \
159             return status_;                                          \
160         }                                                            \
161     JS_END_MACRO
162
163 CompileStatus
164 mjit::Compiler::performCompilation(JITScript **jitp)
165 {
166     JaegerSpew(JSpew_Scripts, "compiling script (file \"%s\") (line \"%d\") (length \"%d\")\n",
167                script->filename, script->lineno, script->length);
168
169     analyze::Script analysis;
170     PodZero(&analysis);
171
172     analysis.analyze(cx, script);
173
174     if (analysis.OOM()) {
175         js_ReportOutOfMemory(cx);
176         return Compile_Error;
177     }
178     if (analysis.failed()) {
179         JaegerSpew(JSpew_Abort, "couldn't analyze bytecode; probably switchX or OOM\n");
180         return Compile_Abort;
181     }
182
183     this->analysis = &analysis;
184
185     if (!frame.init()) {
186         js_ReportOutOfMemory(cx);
187         return Compile_Error;
188     }
189
190     jumpMap = (Label *)cx->malloc(sizeof(Label) * script->length);
191     if (!jumpMap) {
192         js_ReportOutOfMemory(cx);
193         return Compile_Error;
194     }
195 #ifdef DEBUG
196     for (uint32 i = 0; i < script->length; i++)
197         jumpMap[i] = Label();
198 #endif
199
200 #ifdef JS_METHODJIT_SPEW
201     Profiler prof;
202     prof.start();
203 #endif
204
205     /* Initialize PC early so stub calls in the prologue can be fallible. */
206     PC = script->code;
207
208 #ifdef JS_METHODJIT
209     script->debugMode = debugMode();
210 #endif
211
212     for (uint32 i = 0; i < script->nClosedVars; i++)
213         frame.setClosedVar(script->getClosedVar(i));
214     for (uint32 i = 0; i < script->nClosedArgs; i++)
215         frame.setClosedArg(script->getClosedArg(i));
216
217     CHECK_STATUS(generatePrologue());
218     CHECK_STATUS(generateMethod());
219     CHECK_STATUS(generateEpilogue());
220     CHECK_STATUS(finishThisUp(jitp));
221
222 #ifdef JS_METHODJIT_SPEW
223     prof.stop();
224     JaegerSpew(JSpew_Prof, "compilation took %d us\n", prof.time_us());
225 #endif
226
227     JaegerSpew(JSpew_Scripts, "successfully compiled (code \"%p\") (size \"%ld\")\n",
228                (*jitp)->code.m_code.executableAddress(), (*jitp)->code.m_size);
229
230     return Compile_Okay;
231 }
232
233 #undef CHECK_STATUS
234
235 mjit::Compiler::~Compiler()
236 {
237     cx->free(jumpMap);
238     cx->free(savedTraps);
239 }
240
241 CompileStatus JS_NEVER_INLINE
242 mjit::TryCompile(JSContext *cx, JSStackFrame *fp)
243 {
244     JS_ASSERT(cx->fp() == fp);
245
246 #if JS_HAS_SHARP_VARS
247     if (fp->script()->hasSharps)
248         return Compile_Abort;
249 #endif
250
251     // Ensure that constructors have at least one slot.
252     if (fp->isConstructing() && !fp->script()->nslots)
253         fp->script()->nslots++;
254
255     Compiler cc(cx, fp);
256
257     return cc.compile();
258 }
259
260 bool
261 mjit::Compiler::loadOldTraps(const Vector<CallSite> &sites)
262 {
263     savedTraps = (bool *)cx->calloc(sizeof(bool) * script->length);
264     if (!savedTraps)
265         return false;
266     
267     for (size_t i = 0; i < sites.length(); i++) {
268         const CallSite &site = sites[i];
269         if (site.isTrap())
270             savedTraps[site.pcOffset] = true;
271     }
272
273     return true;
274 }
275
276 CompileStatus
277 mjit::Compiler::generatePrologue()
278 {
279     invokeLabel = masm.label();
280
281     /*
282      * If there is no function, then this can only be called via JaegerShot(),
283      * which expects an existing frame to be initialized like the interpreter.
284      */
285     if (fun) {
286         Jump j = masm.jump();
287
288         /*
289          * Entry point #2: The caller has partially constructed a frame, and
290          * either argc >= nargs or the arity check has corrected the frame.
291          */
292         invokeLabel = masm.label();
293
294         Label fastPath = masm.label();
295
296         /* Store this early on so slow paths can access it. */
297         masm.storePtr(ImmPtr(fun), Address(JSFrameReg, JSStackFrame::offsetOfExec()));
298
299         {
300             /*
301              * Entry point #3: The caller has partially constructed a frame,
302              * but argc might be != nargs, so an arity check might be called.
303              *
304              * This loops back to entry point #2.
305              */
306             arityLabel = stubcc.masm.label();
307             Jump argMatch = stubcc.masm.branch32(Assembler::Equal, JSParamReg_Argc,
308                                                  Imm32(fun->nargs));
309             stubcc.crossJump(argMatch, fastPath);
310
311             if (JSParamReg_Argc != Registers::ArgReg1)
312                 stubcc.masm.move(JSParamReg_Argc, Registers::ArgReg1);
313
314             /* Slow path - call the arity check function. Returns new fp. */
315             stubcc.masm.storePtr(ImmPtr(fun), Address(JSFrameReg, JSStackFrame::offsetOfExec()));
316             stubcc.masm.storePtr(JSFrameReg, FrameAddress(offsetof(VMFrame, regs.fp)));
317             OOL_STUBCALL(stubs::FixupArity);
318             stubcc.masm.move(Registers::ReturnReg, JSFrameReg);
319             stubcc.crossJump(stubcc.masm.jump(), fastPath);
320         }
321
322         /*
323          * Guard that there is enough stack space. Note we include the size of
324          * a second frame, to ensure we can create a frame from call sites.
325          */
326         masm.addPtr(Imm32((script->nslots + VALUES_PER_STACK_FRAME * 2) * sizeof(Value)),
327                     JSFrameReg,
328                     Registers::ReturnReg);
329         Jump stackCheck = masm.branchPtr(Assembler::AboveOrEqual, Registers::ReturnReg,
330                                          FrameAddress(offsetof(VMFrame, stackLimit)));
331
332         /* If the stack check fails... */
333         {
334             stubcc.linkExitDirect(stackCheck, stubcc.masm.label());
335             OOL_STUBCALL(stubs::HitStackQuota);
336             stubcc.crossJump(stubcc.masm.jump(), masm.label());
337         }
338
339         /*
340          * Set locals to undefined, as in initCallFrameLatePrologue.
341          * Skip locals which aren't closed and are known to be defined before used,
342          * :FIXME: bug 604541: write undefined if we might be using the tracer, so it works.
343          */
344         for (uint32 i = 0; i < script->nfixed; i++) {
345             if (analysis->localHasUseBeforeDef(i) || addTraceHints) {
346                 Address local(JSFrameReg, sizeof(JSStackFrame) + i * sizeof(Value));
347                 masm.storeValue(UndefinedValue(), local);
348             }
349         }
350
351         /* Create the call object. */
352         if (fun->isHeavyweight()) {
353             prepareStubCall(Uses(0));
354             INLINE_STUBCALL(stubs::GetCallObject);
355         }
356
357         j.linkTo(masm.label(), &masm);
358
359         if (analysis->usesScopeChain() && !fun->isHeavyweight()) {
360             /*
361              * Load the scope chain into the frame if necessary.  The scope chain
362              * is always set for global and eval frames, and will have been set by
363              * GetCallObject for heavyweight function frames.
364              */
365             RegisterID t0 = Registers::ReturnReg;
366             Jump hasScope = masm.branchTest32(Assembler::NonZero,
367                                               FrameFlagsAddress(), Imm32(JSFRAME_HAS_SCOPECHAIN));
368             masm.loadPayload(Address(JSFrameReg, JSStackFrame::offsetOfCallee(fun)), t0);
369             masm.loadPtr(Address(t0, offsetof(JSObject, parent)), t0);
370             masm.storePtr(t0, Address(JSFrameReg, JSStackFrame::offsetOfScopeChain()));
371             hasScope.linkTo(masm.label(), &masm);
372         }
373     }
374
375     if (isConstructing)
376         constructThis();
377
378     if (debugMode() || Probes::callTrackingActive(cx))
379         INLINE_STUBCALL(stubs::EnterScript);
380
381     return Compile_Okay;
382 }
383
384 CompileStatus
385 mjit::Compiler::generateEpilogue()
386 {
387     return Compile_Okay;
388 }
389
390 CompileStatus
391 mjit::Compiler::finishThisUp(JITScript **jitp)
392 {
393     RETURN_IF_OOM(Compile_Error);
394
395     for (size_t i = 0; i < branchPatches.length(); i++) {
396         Label label = labelOf(branchPatches[i].pc);
397         branchPatches[i].jump.linkTo(label, &masm);
398     }
399
400 #ifdef JS_CPU_ARM
401     masm.forceFlushConstantPool();
402     stubcc.masm.forceFlushConstantPool();
403 #endif
404     JaegerSpew(JSpew_Insns, "## Fast code (masm) size = %u, Slow code (stubcc) size = %u.\n", masm.size(), stubcc.size());
405
406     size_t totalSize = masm.size() +
407                        stubcc.size() +
408                        doubleList.length() * sizeof(double) +
409                        jumpTableOffsets.length() * sizeof(void *);
410
411     JSC::ExecutablePool *execPool = getExecPool(script, totalSize);
412     if (!execPool) {
413         js_ReportOutOfMemory(cx);
414         return Compile_Error;
415     }
416
417     uint8 *result = (uint8 *)execPool->alloc(totalSize);
418     if (!result) {
419         execPool->release();
420         js_ReportOutOfMemory(cx);
421         return Compile_Error;
422     }
423     JSC::ExecutableAllocator::makeWritable(result, totalSize);
424     masm.executableCopy(result);
425     stubcc.masm.executableCopy(result + masm.size());
426     
427     JSC::LinkBuffer fullCode(result, totalSize);
428     JSC::LinkBuffer stubCode(result + masm.size(), stubcc.size());
429
430     size_t nNmapLive = 0;
431     for (size_t i = 0; i < script->length; i++) {
432         analyze::Bytecode *opinfo = analysis->maybeCode(i);
433         if (opinfo && opinfo->safePoint)
434             nNmapLive++;
435     }
436
437     /* Please keep in sync with JITScript::scriptDataSize! */
438     size_t totalBytes = sizeof(JITScript) +
439                         sizeof(NativeMapEntry) * nNmapLive +
440 #if defined JS_MONOIC
441                         sizeof(ic::GetGlobalNameIC) * getGlobalNames.length() +
442                         sizeof(ic::SetGlobalNameIC) * setGlobalNames.length() +
443                         sizeof(ic::CallICInfo) * callICs.length() +
444                         sizeof(ic::EqualityICInfo) * equalityICs.length() +
445                         sizeof(ic::TraceICInfo) * traceICs.length() +
446 #endif
447 #if defined JS_POLYIC
448                         sizeof(ic::PICInfo) * pics.length() +
449                         sizeof(ic::GetElementIC) * getElemICs.length() +
450                         sizeof(ic::SetElementIC) * setElemICs.length() +
451 #endif
452                         sizeof(CallSite) * callSites.length();
453
454     uint8 *cursor = (uint8 *)cx->calloc(totalBytes);
455     if (!cursor) {
456         execPool->release();
457         js_ReportOutOfMemory(cx);
458         return Compile_Error;
459     }
460
461     JITScript *jit = new(cursor) JITScript;
462     cursor += sizeof(JITScript);
463
464     jit->code = JSC::MacroAssemblerCodeRef(result, execPool, masm.size() + stubcc.size());
465     jit->invokeEntry = result;
466     jit->singleStepMode = script->singleStepMode;
467     if (fun) {
468         jit->arityCheckEntry = stubCode.locationOf(arityLabel).executableAddress();
469         jit->fastEntry = fullCode.locationOf(invokeLabel).executableAddress();
470     }
471
472     /* 
473      * WARNING: mics(), callICs() et al depend on the ordering of these
474      * variable-length sections.  See JITScript's declaration for details.
475      */
476
477     /* Build the pc -> ncode mapping. */
478     NativeMapEntry *jitNmap = (NativeMapEntry *)cursor;
479     jit->nNmapPairs = nNmapLive;
480     cursor += sizeof(NativeMapEntry) * jit->nNmapPairs;
481     size_t ix = 0;
482     if (jit->nNmapPairs > 0) {
483         for (size_t i = 0; i < script->length; i++) {
484             analyze::Bytecode *opinfo = analysis->maybeCode(i);
485             if (opinfo && opinfo->safePoint) {
486                 Label L = jumpMap[i];
487                 JS_ASSERT(L.isValid());
488                 jitNmap[ix].bcOff = i;
489                 jitNmap[ix].ncode = (uint8 *)(result + masm.distanceOf(L));
490                 ix++;
491             }
492         }
493     }
494     JS_ASSERT(ix == jit->nNmapPairs);
495
496 #if defined JS_MONOIC
497     ic::GetGlobalNameIC *getGlobalNames_ = (ic::GetGlobalNameIC *)cursor;
498     jit->nGetGlobalNames = getGlobalNames.length();
499     cursor += sizeof(ic::GetGlobalNameIC) * jit->nGetGlobalNames;
500     for (size_t i = 0; i < jit->nGetGlobalNames; i++) {
501         ic::GetGlobalNameIC &to = getGlobalNames_[i];
502         GetGlobalNameICInfo &from = getGlobalNames[i];
503         from.copyTo(to, fullCode, stubCode);
504
505         int offset = fullCode.locationOf(from.load) - to.fastPathStart;
506         to.loadStoreOffset = offset;
507         JS_ASSERT(to.loadStoreOffset == offset);
508
509         stubCode.patch(from.addrLabel, &to);
510     }
511
512     ic::SetGlobalNameIC *setGlobalNames_ = (ic::SetGlobalNameIC *)cursor;
513     jit->nSetGlobalNames = setGlobalNames.length();
514     cursor += sizeof(ic::SetGlobalNameIC) * jit->nSetGlobalNames;
515     for (size_t i = 0; i < jit->nSetGlobalNames; i++) {
516         ic::SetGlobalNameIC &to = setGlobalNames_[i];
517         SetGlobalNameICInfo &from = setGlobalNames[i];
518         from.copyTo(to, fullCode, stubCode);
519         to.slowPathStart = stubCode.locationOf(from.slowPathStart);
520
521         int offset = fullCode.locationOf(from.store).labelAtOffset(0) -
522                      to.fastPathStart;
523         to.loadStoreOffset = offset;
524         JS_ASSERT(to.loadStoreOffset == offset);
525
526         to.hasExtraStub = 0;
527         to.objConst = from.objConst;
528         to.shapeReg = from.shapeReg;
529         to.objReg = from.objReg;
530         to.vr = from.vr;
531
532         offset = fullCode.locationOf(from.shapeGuardJump) -
533                  to.fastPathStart;
534         to.inlineShapeJump = offset;
535         JS_ASSERT(to.inlineShapeJump == offset);
536
537         offset = fullCode.locationOf(from.fastPathRejoin) -
538                  to.fastPathStart;
539         to.fastRejoinOffset = offset;
540         JS_ASSERT(to.fastRejoinOffset == offset);
541
542         stubCode.patch(from.addrLabel, &to);
543     }
544
545     ic::CallICInfo *jitCallICs = (ic::CallICInfo *)cursor;
546     jit->nCallICs = callICs.length();
547     cursor += sizeof(ic::CallICInfo) * jit->nCallICs;
548     for (size_t i = 0; i < jit->nCallICs; i++) {
549         jitCallICs[i].reset();
550         jitCallICs[i].funGuard = fullCode.locationOf(callICs[i].funGuard);
551         jitCallICs[i].funJump = fullCode.locationOf(callICs[i].funJump);
552         jitCallICs[i].slowPathStart = stubCode.locationOf(callICs[i].slowPathStart);
553
554         /* Compute the hot call offset. */
555         uint32 offset = fullCode.locationOf(callICs[i].hotJump) -
556                         fullCode.locationOf(callICs[i].funGuard);
557         jitCallICs[i].hotJumpOffset = offset;
558         JS_ASSERT(jitCallICs[i].hotJumpOffset == offset);
559
560         /* Compute the join point offset. */
561         offset = fullCode.locationOf(callICs[i].joinPoint) -
562                  fullCode.locationOf(callICs[i].funGuard);
563         jitCallICs[i].joinPointOffset = offset;
564         JS_ASSERT(jitCallICs[i].joinPointOffset == offset);
565                                         
566         /* Compute the OOL call offset. */
567         offset = stubCode.locationOf(callICs[i].oolCall) -
568                  stubCode.locationOf(callICs[i].slowPathStart);
569         jitCallICs[i].oolCallOffset = offset;
570         JS_ASSERT(jitCallICs[i].oolCallOffset == offset);
571
572         /* Compute the OOL jump offset. */
573         offset = stubCode.locationOf(callICs[i].oolJump) -
574                  stubCode.locationOf(callICs[i].slowPathStart);
575         jitCallICs[i].oolJumpOffset = offset;
576         JS_ASSERT(jitCallICs[i].oolJumpOffset == offset);
577
578         /* Compute the start of the OOL IC call. */
579         offset = stubCode.locationOf(callICs[i].icCall) -
580                  stubCode.locationOf(callICs[i].slowPathStart);
581         jitCallICs[i].icCallOffset = offset;
582         JS_ASSERT(jitCallICs[i].icCallOffset == offset);
583
584         /* Compute the slow join point offset. */
585         offset = stubCode.locationOf(callICs[i].slowJoinPoint) -
586                  stubCode.locationOf(callICs[i].slowPathStart);
587         jitCallICs[i].slowJoinOffset = offset;
588         JS_ASSERT(jitCallICs[i].slowJoinOffset == offset);
589
590         /* Compute the join point offset for continuing on the hot path. */
591         offset = stubCode.locationOf(callICs[i].hotPathLabel) -
592                  stubCode.locationOf(callICs[i].funGuard);
593         jitCallICs[i].hotPathOffset = offset;
594         JS_ASSERT(jitCallICs[i].hotPathOffset == offset);
595
596         jitCallICs[i].pc = callICs[i].pc;
597         jitCallICs[i].frameSize = callICs[i].frameSize;
598         jitCallICs[i].funObjReg = callICs[i].funObjReg;
599         jitCallICs[i].funPtrReg = callICs[i].funPtrReg;
600         stubCode.patch(callICs[i].addrLabel1, &jitCallICs[i]);
601         stubCode.patch(callICs[i].addrLabel2, &jitCallICs[i]);
602     }
603
604     ic::EqualityICInfo *jitEqualityICs = (ic::EqualityICInfo *)cursor;
605     jit->nEqualityICs = equalityICs.length();
606     cursor += sizeof(ic::EqualityICInfo) * jit->nEqualityICs;
607     for (size_t i = 0; i < jit->nEqualityICs; i++) {
608         uint32 offs = uint32(equalityICs[i].jumpTarget - script->code);
609         JS_ASSERT(jumpMap[offs].isValid());
610         jitEqualityICs[i].target = fullCode.locationOf(jumpMap[offs]);
611         jitEqualityICs[i].stubEntry = stubCode.locationOf(equalityICs[i].stubEntry);
612         jitEqualityICs[i].stubCall = stubCode.locationOf(equalityICs[i].stubCall);
613         jitEqualityICs[i].stub = equalityICs[i].stub;
614         jitEqualityICs[i].lvr = equalityICs[i].lvr;
615         jitEqualityICs[i].rvr = equalityICs[i].rvr;
616         jitEqualityICs[i].tempReg = equalityICs[i].tempReg;
617         jitEqualityICs[i].cond = equalityICs[i].cond;
618         if (equalityICs[i].jumpToStub.isSet())
619             jitEqualityICs[i].jumpToStub = fullCode.locationOf(equalityICs[i].jumpToStub.get());
620         jitEqualityICs[i].fallThrough = fullCode.locationOf(equalityICs[i].fallThrough);
621         
622         stubCode.patch(equalityICs[i].addrLabel, &jitEqualityICs[i]);
623     }
624
625     ic::TraceICInfo *jitTraceICs = (ic::TraceICInfo *)cursor;
626     jit->nTraceICs = traceICs.length();
627     cursor += sizeof(ic::TraceICInfo) * jit->nTraceICs;
628     for (size_t i = 0; i < jit->nTraceICs; i++) {
629         jitTraceICs[i].initialized = traceICs[i].initialized;
630         if (!traceICs[i].initialized)
631             continue;
632
633         uint32 offs = uint32(traceICs[i].jumpTarget - script->code);
634         JS_ASSERT(jumpMap[offs].isValid());
635         jitTraceICs[i].traceHint = fullCode.locationOf(traceICs[i].traceHint);
636         jitTraceICs[i].jumpTarget = fullCode.locationOf(jumpMap[offs]);
637         jitTraceICs[i].stubEntry = stubCode.locationOf(traceICs[i].stubEntry);
638         jitTraceICs[i].traceData = NULL;
639 #ifdef DEBUG
640         jitTraceICs[i].jumpTargetPC = traceICs[i].jumpTarget;
641 #endif
642         jitTraceICs[i].hasSlowTraceHint = traceICs[i].slowTraceHint.isSet();
643         if (traceICs[i].slowTraceHint.isSet())
644             jitTraceICs[i].slowTraceHint = stubCode.locationOf(traceICs[i].slowTraceHint.get());
645 #ifdef JS_TRACER
646         jitTraceICs[i].loopCounterStart = GetHotloop(cx);
647 #endif
648         jitTraceICs[i].loopCounter = jitTraceICs[i].loopCounterStart
649             - cx->compartment->backEdgeCount(traceICs[i].jumpTarget);
650         
651         stubCode.patch(traceICs[i].addrLabel, &jitTraceICs[i]);
652     }
653 #endif /* JS_MONOIC */
654
655     for (size_t i = 0; i < callPatches.length(); i++) {
656         CallPatchInfo &patch = callPatches[i];
657
658         if (patch.hasFastNcode)
659             fullCode.patch(patch.fastNcodePatch, fullCode.locationOf(patch.joinPoint));
660         if (patch.hasSlowNcode)
661             stubCode.patch(patch.slowNcodePatch, fullCode.locationOf(patch.joinPoint));
662     }
663
664 #ifdef JS_POLYIC
665     ic::GetElementIC *jitGetElems = (ic::GetElementIC *)cursor;
666     jit->nGetElems = getElemICs.length();
667     cursor += sizeof(ic::GetElementIC) * jit->nGetElems;
668     for (size_t i = 0; i < jit->nGetElems; i++) {
669         ic::GetElementIC &to = jitGetElems[i];
670         GetElementICInfo &from = getElemICs[i];
671
672         new (&to) ic::GetElementIC();
673         from.copyTo(to, fullCode, stubCode);
674
675         to.typeReg = from.typeReg;
676         to.objReg = from.objReg;
677         to.idRemat = from.id;
678
679         if (from.typeGuard.isSet()) {
680             int inlineTypeGuard = fullCode.locationOf(from.typeGuard.get()) -
681                                   fullCode.locationOf(from.fastPathStart);
682             to.inlineTypeGuard = inlineTypeGuard;
683             JS_ASSERT(to.inlineTypeGuard == inlineTypeGuard);
684         }
685         int inlineClaspGuard = fullCode.locationOf(from.claspGuard) -
686                                fullCode.locationOf(from.fastPathStart);
687         to.inlineClaspGuard = inlineClaspGuard;
688         JS_ASSERT(to.inlineClaspGuard == inlineClaspGuard);
689
690         stubCode.patch(from.paramAddr, &to);
691     }
692
693     ic::SetElementIC *jitSetElems = (ic::SetElementIC *)cursor;
694     jit->nSetElems = setElemICs.length();
695     cursor += sizeof(ic::SetElementIC) * jit->nSetElems;
696     for (size_t i = 0; i < jit->nSetElems; i++) {
697         ic::SetElementIC &to = jitSetElems[i];
698         SetElementICInfo &from = setElemICs[i];
699
700         new (&to) ic::SetElementIC();
701         from.copyTo(to, fullCode, stubCode);
702
703         to.strictMode = script->strictModeCode;
704         to.vr = from.vr;
705         to.objReg = from.objReg;
706         to.objRemat = from.objRemat.toInt32();
707         JS_ASSERT(to.objRemat == from.objRemat.toInt32());
708
709         to.hasConstantKey = from.key.isConstant();
710         if (from.key.isConstant())
711             to.keyValue = from.key.index();
712         else
713             to.keyReg = from.key.reg();
714
715         int inlineClaspGuard = fullCode.locationOf(from.claspGuard) -
716                                fullCode.locationOf(from.fastPathStart);
717         to.inlineClaspGuard = inlineClaspGuard;
718         JS_ASSERT(to.inlineClaspGuard == inlineClaspGuard);
719
720         int inlineHoleGuard = fullCode.locationOf(from.holeGuard) -
721                                fullCode.locationOf(from.fastPathStart);
722         to.inlineHoleGuard = inlineHoleGuard;
723         JS_ASSERT(to.inlineHoleGuard == inlineHoleGuard);
724
725         CheckIsStubCall(to.slowPathCall.labelAtOffset(0));
726
727         to.volatileMask = from.volatileMask;
728         JS_ASSERT(to.volatileMask == from.volatileMask);
729
730         stubCode.patch(from.paramAddr, &to);
731     }
732
733     ic::PICInfo *jitPics = (ic::PICInfo *)cursor;
734     jit->nPICs = pics.length();
735     cursor += sizeof(ic::PICInfo) * jit->nPICs;
736     for (size_t i = 0; i < jit->nPICs; i++) {
737         new (&jitPics[i]) ic::PICInfo();
738         pics[i].copyTo(jitPics[i], fullCode, stubCode);
739         pics[i].copySimpleMembersTo(jitPics[i]);
740
741         jitPics[i].shapeGuard = masm.distanceOf(pics[i].shapeGuard) -
742                                 masm.distanceOf(pics[i].fastPathStart);
743         JS_ASSERT(jitPics[i].shapeGuard == masm.distanceOf(pics[i].shapeGuard) -
744                                            masm.distanceOf(pics[i].fastPathStart));
745         jitPics[i].shapeRegHasBaseShape = true;
746         jitPics[i].pc = pics[i].pc;
747
748         if (pics[i].kind == ic::PICInfo::SET ||
749             pics[i].kind == ic::PICInfo::SETMETHOD) {
750             jitPics[i].u.vr = pics[i].vr;
751         } else if (pics[i].kind != ic::PICInfo::NAME) {
752             if (pics[i].hasTypeCheck) {
753                 int32 distance = stubcc.masm.distanceOf(pics[i].typeCheck) -
754                                  stubcc.masm.distanceOf(pics[i].slowPathStart);
755                 JS_ASSERT(distance <= 0);
756                 jitPics[i].u.get.typeCheckOffset = distance;
757             }
758         }
759         stubCode.patch(pics[i].paramAddr, &jitPics[i]);
760     }
761 #endif
762
763     /* Link fast and slow paths together. */
764     stubcc.fixCrossJumps(result, masm.size(), masm.size() + stubcc.size());
765
766     /* Patch all double references. */
767     size_t doubleOffset = masm.size() + stubcc.size();
768     double *doubleVec = (double *)(result + doubleOffset);
769     for (size_t i = 0; i < doubleList.length(); i++) {
770         DoublePatch &patch = doubleList[i];
771         doubleVec[i] = patch.d;
772         if (patch.ool)
773             stubCode.patch(patch.label, &doubleVec[i]);
774         else
775             fullCode.patch(patch.label, &doubleVec[i]);
776     }
777
778     /* Generate jump tables. */
779     void **jumpVec = (void **)(doubleVec + doubleList.length());
780
781     for (size_t i = 0; i < jumpTableOffsets.length(); i++) {
782         uint32 offset = jumpTableOffsets[i];
783         JS_ASSERT(jumpMap[offset].isValid());
784         jumpVec[i] = (void *)(result + masm.distanceOf(jumpMap[offset]));
785     }
786
787     /* Patch jump table references. */
788     for (size_t i = 0; i < jumpTables.length(); i++) {
789         JumpTable &jumpTable = jumpTables[i];
790         fullCode.patch(jumpTable.label, &jumpVec[jumpTable.offsetIndex]);
791     }
792
793     /* Patch all outgoing calls. */
794     masm.finalize(fullCode);
795     stubcc.masm.finalize(stubCode);
796
797     JSC::ExecutableAllocator::makeExecutable(result, masm.size() + stubcc.size());
798     JSC::ExecutableAllocator::cacheFlush(result, masm.size() + stubcc.size());
799
800     /* Build the table of call sites. */
801     CallSite *jitCallSites = (CallSite *)cursor;
802     jit->nCallSites = callSites.length();
803     cursor += sizeof(CallSite) * jit->nCallSites;
804     for (size_t i = 0; i < jit->nCallSites; i++) {
805         CallSite &to = jitCallSites[i];
806         InternalCallSite &from = callSites[i];
807         uint32 codeOffset = from.ool
808                             ? masm.size() + from.returnOffset
809                             : from.returnOffset;
810         to.initialize(codeOffset, from.pc - script->code, from.id);
811     }
812
813     JS_ASSERT(size_t(cursor - (uint8*)jit) == totalBytes);
814
815     *jitp = jit;
816
817     /* We tolerate a race in the stats. */
818     cx->runtime->mjitMemoryUsed += totalSize + totalBytes;
819
820     return Compile_Okay;
821 }
822
823 class SrcNoteLineScanner {
824     ptrdiff_t offset;
825     jssrcnote *sn;
826
827 public:
828     SrcNoteLineScanner(jssrcnote *sn) : offset(SN_DELTA(sn)), sn(sn) {}
829
830     bool firstOpInLine(ptrdiff_t relpc) {
831         while ((offset < relpc) && !SN_IS_TERMINATOR(sn)) {
832             sn = SN_NEXT(sn);
833             offset += SN_DELTA(sn);
834         }
835
836         while ((offset == relpc) && !SN_IS_TERMINATOR(sn)) {
837             JSSrcNoteType type = (JSSrcNoteType) SN_TYPE(sn);
838             if (type == SRC_SETLINE || type == SRC_NEWLINE)
839                 return true;
840
841             sn = SN_NEXT(sn);
842             offset += SN_DELTA(sn);
843         }
844
845         return false;
846     }
847 };
848
849 #ifdef DEBUG
850 #define SPEW_OPCODE()                                                         \
851     JS_BEGIN_MACRO                                                            \
852         if (IsJaegerSpewChannelActive(JSpew_JSOps)) {                         \
853             JaegerSpew(JSpew_JSOps, "    %2d ", frame.stackDepth());          \
854             js_Disassemble1(cx, script, PC, PC - script->code,                \
855                             JS_TRUE, stdout);                                 \
856         }                                                                     \
857     JS_END_MACRO;
858 #else
859 #define SPEW_OPCODE()
860 #endif /* DEBUG */
861
862 #define BEGIN_CASE(name)        case name:
863 #define END_CASE(name)                      \
864     JS_BEGIN_MACRO                          \
865         PC += name##_LENGTH;                \
866     JS_END_MACRO;                           \
867     break;
868
869 CompileStatus
870 mjit::Compiler::generateMethod()
871 {
872     mjit::AutoScriptRetrapper trapper(cx, script);
873     SrcNoteLineScanner scanner(script->notes());
874
875     for (;;) {
876         JSOp op = JSOp(*PC);
877         int trap = stubs::JSTRAP_NONE;
878         if (op == JSOP_TRAP) {
879             if (!trapper.untrap(PC))
880                 return Compile_Error;
881             op = JSOp(*PC);
882             trap |= stubs::JSTRAP_TRAP;
883         }
884         if (script->singleStepMode && scanner.firstOpInLine(PC - script->code))
885             trap |= stubs::JSTRAP_SINGLESTEP;
886
887         analyze::Bytecode *opinfo = analysis->maybeCode(PC);
888
889         if (!opinfo) {
890             if (op == JSOP_STOP)
891                 break;
892             if (js_CodeSpec[op].length != -1)
893                 PC += js_CodeSpec[op].length;
894             else
895                 PC += js_GetVariableBytecodeLength(PC);
896             continue;
897         }
898
899         frame.setInTryBlock(opinfo->inTryBlock);
900         if (opinfo->jumpTarget || trap) {
901             frame.syncAndForgetEverything(opinfo->stackDepth);
902             opinfo->safePoint = true;
903         }
904         jumpMap[uint32(PC - script->code)] = masm.label();
905
906         SPEW_OPCODE();
907         JS_ASSERT(frame.stackDepth() == opinfo->stackDepth);
908
909         if (trap) {
910             prepareStubCall(Uses(0));
911             masm.move(Imm32(trap), Registers::ArgReg1);
912             Call cl = emitStubCall(JS_FUNC_TO_DATA_PTR(void *, stubs::Trap));
913             InternalCallSite site(masm.callReturnOffset(cl), PC,
914                                   CallSite::MAGIC_TRAP_ID, true, false);
915             addCallSite(site);
916         } else if (savedTraps && savedTraps[PC - script->code]) {
917             // Normally when we patch return addresses, we have generated the
918             // same exact code at that site. For example, patching a stub call's
919             // return address will resume at the same stub call.
920             //
921             // In the case we're handling here, we could potentially be
922             // recompiling to remove a trap, and therefore we won't generate
923             // a call to the trap. However, we could be re-entering from that
924             // trap. The callsite will be missing, and fixing the stack will
925             // fail! Worse, we can't just put a label here, because on some
926             // platforms the stack needs to be adjusted when returning from
927             // the old trap call.
928             //
929             // To deal with this, we add a small bit of code in the OOL path
930             // that will adjust the stack and jump back into the script.
931             // Note that this uses MAGIC_TRAP_ID, which is necessary for
932             // repatching to detect the callsite as identical to the return
933             // address.
934             //
935             // Unfortunately, this means that if a bytecode is ever trapped,
936             // we will always generate a CallSite (either Trapped or not) for
937             // every debug recompilation of the script thereafter. The reason
938             // is that MAGIC_TRAP_ID callsites always propagate to the next
939             // recompilation. That's okay, and not worth fixing - it's a small
940             // amount of memory.
941             uint32 offset = stubcc.masm.distanceOf(stubcc.masm.label());
942             if (Assembler::ReturnStackAdjustment) {
943                 stubcc.masm.addPtr(Imm32(Assembler::ReturnStackAdjustment),
944                                    Assembler::stackPointerRegister);
945             }
946             stubcc.crossJump(stubcc.masm.jump(), masm.label());
947
948             InternalCallSite site(offset, PC, CallSite::MAGIC_TRAP_ID, false, true);
949             addCallSite(site);
950         }
951
952     /**********************
953      * BEGIN COMPILER OPS *
954      **********************/ 
955
956         switch (op) {
957           BEGIN_CASE(JSOP_NOP)
958           END_CASE(JSOP_NOP)
959
960           BEGIN_CASE(JSOP_PUSH)
961             frame.push(UndefinedValue());
962           END_CASE(JSOP_PUSH)
963
964           BEGIN_CASE(JSOP_POPV)
965           BEGIN_CASE(JSOP_SETRVAL)
966           {
967             RegisterID reg = frame.allocReg();
968             masm.load32(FrameFlagsAddress(), reg);
969             masm.or32(Imm32(JSFRAME_HAS_RVAL), reg);
970             masm.store32(reg, FrameFlagsAddress());
971             frame.freeReg(reg);
972
973             FrameEntry *fe = frame.peek(-1);
974             frame.storeTo(fe, Address(JSFrameReg, JSStackFrame::offsetOfReturnValue()), true);
975             frame.pop();
976           }
977           END_CASE(JSOP_POPV)
978
979           BEGIN_CASE(JSOP_RETURN)
980             emitReturn(frame.peek(-1));
981           END_CASE(JSOP_RETURN)
982
983           BEGIN_CASE(JSOP_GOTO)
984           {
985             /* :XXX: this isn't really necessary if we follow the branch. */
986             frame.syncAndForgetEverything();
987             Jump j = masm.jump();
988             if (!jumpAndTrace(j, PC + GET_JUMP_OFFSET(PC)))
989                 return Compile_Error;
990           }
991           END_CASE(JSOP_GOTO)
992
993           BEGIN_CASE(JSOP_IFEQ)
994           BEGIN_CASE(JSOP_IFNE)
995             if (!jsop_ifneq(op, PC + GET_JUMP_OFFSET(PC)))
996                 return Compile_Error;
997           END_CASE(JSOP_IFNE)
998
999           BEGIN_CASE(JSOP_ARGUMENTS)
1000             /*
1001              * For calls of the form 'f.apply(x, arguments)' we can avoid
1002              * creating an args object by having ic::SplatApplyArgs pull
1003              * directly from the stack. To do this, we speculate here that
1004              * 'apply' actually refers to js_fun_apply. If this is not true,
1005              * the slow path in JSOP_FUNAPPLY will create the args object.
1006              */
1007             if (canUseApplyTricks())
1008                 applyTricks = LazyArgsObj;
1009             else
1010                 jsop_arguments();
1011             frame.pushSynced();
1012           END_CASE(JSOP_ARGUMENTS)
1013
1014           BEGIN_CASE(JSOP_FORARG)
1015             iterNext();
1016             frame.storeArg(GET_SLOTNO(PC), true);
1017             frame.pop();
1018           END_CASE(JSOP_FORARG)
1019
1020           BEGIN_CASE(JSOP_FORLOCAL)
1021             iterNext();
1022             frame.storeLocal(GET_SLOTNO(PC), true);
1023             frame.pop();
1024           END_CASE(JSOP_FORLOCAL)
1025
1026           BEGIN_CASE(JSOP_DUP)
1027             frame.dup();
1028           END_CASE(JSOP_DUP)
1029
1030           BEGIN_CASE(JSOP_DUP2)
1031             frame.dup2();
1032           END_CASE(JSOP_DUP2)
1033
1034           BEGIN_CASE(JSOP_BITOR)
1035           BEGIN_CASE(JSOP_BITXOR)
1036           BEGIN_CASE(JSOP_BITAND)
1037             jsop_bitop(op);
1038           END_CASE(JSOP_BITAND)
1039
1040           BEGIN_CASE(JSOP_LT)
1041           BEGIN_CASE(JSOP_LE)
1042           BEGIN_CASE(JSOP_GT)
1043           BEGIN_CASE(JSOP_GE)
1044           BEGIN_CASE(JSOP_EQ)
1045           BEGIN_CASE(JSOP_NE)
1046           {
1047             /* Detect fusions. */
1048             jsbytecode *next = &PC[JSOP_GE_LENGTH];
1049             JSOp fused = JSOp(*next);
1050             if ((fused != JSOP_IFEQ && fused != JSOP_IFNE) || analysis->jumpTarget(next))
1051                 fused = JSOP_NOP;
1052
1053             /* Get jump target, if any. */
1054             jsbytecode *target = NULL;
1055             if (fused != JSOP_NOP)
1056                 target = next + GET_JUMP_OFFSET(next);
1057
1058             BoolStub stub = NULL;
1059             switch (op) {
1060               case JSOP_LT:
1061                 stub = stubs::LessThan;
1062                 break;
1063               case JSOP_LE:
1064                 stub = stubs::LessEqual;
1065                 break;
1066               case JSOP_GT:
1067                 stub = stubs::GreaterThan;
1068                 break;
1069               case JSOP_GE:
1070                 stub = stubs::GreaterEqual;
1071                 break;
1072               case JSOP_EQ:
1073                 stub = stubs::Equal;
1074                 break;
1075               case JSOP_NE:
1076                 stub = stubs::NotEqual;
1077                 break;
1078               default:
1079                 JS_NOT_REACHED("WAT");
1080                 break;
1081             }
1082
1083             FrameEntry *rhs = frame.peek(-1);
1084             FrameEntry *lhs = frame.peek(-2);
1085
1086             /* Check for easy cases that the parser does not constant fold. */
1087             if (lhs->isConstant() && rhs->isConstant()) {
1088                 /* Primitives can be trivially constant folded. */
1089                 const Value &lv = lhs->getValue();
1090                 const Value &rv = rhs->getValue();
1091
1092                 if (lv.isPrimitive() && rv.isPrimitive()) {
1093                     bool result = compareTwoValues(cx, op, lv, rv);
1094
1095                     frame.pop();
1096                     frame.pop();
1097
1098                     if (!target) {
1099                         frame.push(Value(BooleanValue(result)));
1100                     } else {
1101                         if (fused == JSOP_IFEQ)
1102                             result = !result;
1103
1104                         /* Branch is never taken, don't bother doing anything. */
1105                         if (result) {
1106                             frame.syncAndForgetEverything();
1107                             Jump j = masm.jump();
1108                             if (!jumpAndTrace(j, target))
1109                                 return Compile_Error;
1110                         }
1111                     }
1112                 } else {
1113                     if (!emitStubCmpOp(stub, target, fused))
1114                         return Compile_Error;
1115                 }
1116             } else {
1117                 /* Anything else should go through the fast path generator. */
1118                 if (!jsop_relational(op, stub, target, fused))
1119                     return Compile_Error;
1120             }
1121
1122             /* Advance PC manually. */
1123             JS_STATIC_ASSERT(JSOP_LT_LENGTH == JSOP_GE_LENGTH);
1124             JS_STATIC_ASSERT(JSOP_LE_LENGTH == JSOP_GE_LENGTH);
1125             JS_STATIC_ASSERT(JSOP_GT_LENGTH == JSOP_GE_LENGTH);
1126             JS_STATIC_ASSERT(JSOP_EQ_LENGTH == JSOP_GE_LENGTH);
1127             JS_STATIC_ASSERT(JSOP_NE_LENGTH == JSOP_GE_LENGTH);
1128
1129             PC += JSOP_GE_LENGTH;
1130             if (fused != JSOP_NOP) {
1131                 SPEW_OPCODE();
1132                 PC += JSOP_IFNE_LENGTH;
1133             }
1134             break;
1135           }
1136           END_CASE(JSOP_GE)
1137
1138           BEGIN_CASE(JSOP_LSH)
1139             jsop_bitop(op);
1140           END_CASE(JSOP_LSH)
1141
1142           BEGIN_CASE(JSOP_RSH)
1143             jsop_rsh();
1144           END_CASE(JSOP_RSH)
1145
1146           BEGIN_CASE(JSOP_URSH)
1147             jsop_bitop(op);
1148           END_CASE(JSOP_URSH)
1149
1150           BEGIN_CASE(JSOP_ADD)
1151             jsop_binary(op, stubs::Add);
1152           END_CASE(JSOP_ADD)
1153
1154           BEGIN_CASE(JSOP_SUB)
1155             jsop_binary(op, stubs::Sub);
1156           END_CASE(JSOP_SUB)
1157
1158           BEGIN_CASE(JSOP_MUL)
1159             jsop_binary(op, stubs::Mul);
1160           END_CASE(JSOP_MUL)
1161
1162           BEGIN_CASE(JSOP_DIV)
1163             jsop_binary(op, stubs::Div);
1164           END_CASE(JSOP_DIV)
1165
1166           BEGIN_CASE(JSOP_MOD)
1167             jsop_mod();
1168           END_CASE(JSOP_MOD)
1169
1170           BEGIN_CASE(JSOP_NOT)
1171             jsop_not();
1172           END_CASE(JSOP_NOT)
1173
1174           BEGIN_CASE(JSOP_BITNOT)
1175           {
1176             FrameEntry *top = frame.peek(-1);
1177             if (top->isConstant() && top->getValue().isPrimitive()) {
1178                 int32_t i;
1179                 ValueToECMAInt32(cx, top->getValue(), &i);
1180                 i = ~i;
1181                 frame.pop();
1182                 frame.push(Int32Value(i));
1183             } else {
1184                 jsop_bitnot();
1185             }
1186           }
1187           END_CASE(JSOP_BITNOT)
1188
1189           BEGIN_CASE(JSOP_NEG)
1190           {
1191             FrameEntry *top = frame.peek(-1);
1192             if (top->isConstant() && top->getValue().isPrimitive()) {
1193                 double d;
1194                 ValueToNumber(cx, top->getValue(), &d);
1195                 d = -d;
1196                 frame.pop();
1197                 frame.push(NumberValue(d));
1198             } else {
1199                 jsop_neg();
1200             }
1201           }
1202           END_CASE(JSOP_NEG)
1203
1204           BEGIN_CASE(JSOP_POS)
1205             jsop_pos();
1206           END_CASE(JSOP_POS)
1207
1208           BEGIN_CASE(JSOP_DELNAME)
1209           {
1210             uint32 index = fullAtomIndex(PC);
1211             JSAtom *atom = script->getAtom(index);
1212
1213             prepareStubCall(Uses(0));
1214             masm.move(ImmPtr(atom), Registers::ArgReg1);
1215             INLINE_STUBCALL(stubs::DelName);
1216             frame.pushSynced();
1217           }
1218           END_CASE(JSOP_DELNAME)
1219
1220           BEGIN_CASE(JSOP_DELPROP)
1221           {
1222             uint32 index = fullAtomIndex(PC);
1223             JSAtom *atom = script->getAtom(index);
1224
1225             prepareStubCall(Uses(1));
1226             masm.move(ImmPtr(atom), Registers::ArgReg1);
1227             INLINE_STUBCALL(STRICT_VARIANT(stubs::DelProp));
1228             frame.pop();
1229             frame.pushSynced();
1230           }
1231           END_CASE(JSOP_DELPROP) 
1232
1233           BEGIN_CASE(JSOP_DELELEM)
1234             prepareStubCall(Uses(2));
1235             INLINE_STUBCALL(STRICT_VARIANT(stubs::DelElem));
1236             frame.popn(2);
1237             frame.pushSynced();
1238           END_CASE(JSOP_DELELEM)
1239
1240           BEGIN_CASE(JSOP_TYPEOF)
1241           BEGIN_CASE(JSOP_TYPEOFEXPR)
1242             jsop_typeof();
1243           END_CASE(JSOP_TYPEOF)
1244
1245           BEGIN_CASE(JSOP_VOID)
1246             frame.pop();
1247             frame.push(UndefinedValue());
1248           END_CASE(JSOP_VOID)
1249
1250           BEGIN_CASE(JSOP_INCNAME)
1251             if (!jsop_nameinc(op, STRICT_VARIANT(stubs::IncName), fullAtomIndex(PC)))
1252                 return Compile_Error;
1253             break;
1254           END_CASE(JSOP_INCNAME)
1255
1256           BEGIN_CASE(JSOP_INCGNAME)
1257             jsop_gnameinc(op, STRICT_VARIANT(stubs::IncGlobalName), fullAtomIndex(PC));
1258             break;
1259           END_CASE(JSOP_INCGNAME)
1260
1261           BEGIN_CASE(JSOP_INCPROP)
1262             if (!jsop_propinc(op, STRICT_VARIANT(stubs::IncProp), fullAtomIndex(PC)))
1263                 return Compile_Error;
1264             break;
1265           END_CASE(JSOP_INCPROP)
1266
1267           BEGIN_CASE(JSOP_INCELEM)
1268             jsop_eleminc(op, STRICT_VARIANT(stubs::IncElem));
1269           END_CASE(JSOP_INCELEM)
1270
1271           BEGIN_CASE(JSOP_DECNAME)
1272             if (!jsop_nameinc(op, STRICT_VARIANT(stubs::DecName), fullAtomIndex(PC)))
1273                 return Compile_Error;
1274             break;
1275           END_CASE(JSOP_DECNAME)
1276
1277           BEGIN_CASE(JSOP_DECGNAME)
1278             jsop_gnameinc(op, STRICT_VARIANT(stubs::DecGlobalName), fullAtomIndex(PC));
1279             break;
1280           END_CASE(JSOP_DECGNAME)
1281
1282           BEGIN_CASE(JSOP_DECPROP)
1283             if (!jsop_propinc(op, STRICT_VARIANT(stubs::DecProp), fullAtomIndex(PC)))
1284                 return Compile_Error;
1285             break;
1286           END_CASE(JSOP_DECPROP)
1287
1288           BEGIN_CASE(JSOP_DECELEM)
1289             jsop_eleminc(op, STRICT_VARIANT(stubs::DecElem));
1290           END_CASE(JSOP_DECELEM)
1291
1292           BEGIN_CASE(JSOP_NAMEINC)
1293             if (!jsop_nameinc(op, STRICT_VARIANT(stubs::NameInc), fullAtomIndex(PC)))
1294                 return Compile_Error;
1295             break;
1296           END_CASE(JSOP_NAMEINC)
1297
1298           BEGIN_CASE(JSOP_GNAMEINC)
1299             jsop_gnameinc(op, STRICT_VARIANT(stubs::GlobalNameInc), fullAtomIndex(PC));
1300             break;
1301           END_CASE(JSOP_GNAMEINC)
1302
1303           BEGIN_CASE(JSOP_PROPINC)
1304             if (!jsop_propinc(op, STRICT_VARIANT(stubs::PropInc), fullAtomIndex(PC)))
1305                 return Compile_Error;
1306             break;
1307           END_CASE(JSOP_PROPINC)
1308
1309           BEGIN_CASE(JSOP_ELEMINC)
1310             jsop_eleminc(op, STRICT_VARIANT(stubs::ElemInc));
1311           END_CASE(JSOP_ELEMINC)
1312
1313           BEGIN_CASE(JSOP_NAMEDEC)
1314             if (!jsop_nameinc(op, STRICT_VARIANT(stubs::NameDec), fullAtomIndex(PC)))
1315                 return Compile_Error;
1316             break;
1317           END_CASE(JSOP_NAMEDEC)
1318
1319           BEGIN_CASE(JSOP_GNAMEDEC)
1320             jsop_gnameinc(op, STRICT_VARIANT(stubs::GlobalNameDec), fullAtomIndex(PC));
1321             break;
1322           END_CASE(JSOP_GNAMEDEC)
1323
1324           BEGIN_CASE(JSOP_PROPDEC)
1325             if (!jsop_propinc(op, STRICT_VARIANT(stubs::PropDec), fullAtomIndex(PC)))
1326                 return Compile_Error;
1327             break;
1328           END_CASE(JSOP_PROPDEC)
1329
1330           BEGIN_CASE(JSOP_ELEMDEC)
1331             jsop_eleminc(op, STRICT_VARIANT(stubs::ElemDec));
1332           END_CASE(JSOP_ELEMDEC)
1333
1334           BEGIN_CASE(JSOP_GETTHISPROP)
1335             /* Push thisv onto stack. */
1336             jsop_this();
1337             if (!jsop_getprop(script->getAtom(fullAtomIndex(PC))))
1338                 return Compile_Error;
1339           END_CASE(JSOP_GETTHISPROP);
1340
1341           BEGIN_CASE(JSOP_GETARGPROP)
1342             /* Push arg onto stack. */
1343             frame.pushArg(GET_SLOTNO(PC));
1344             if (!jsop_getprop(script->getAtom(fullAtomIndex(&PC[ARGNO_LEN]))))
1345                 return Compile_Error;
1346           END_CASE(JSOP_GETARGPROP)
1347
1348           BEGIN_CASE(JSOP_GETLOCALPROP)
1349             frame.pushLocal(GET_SLOTNO(PC));
1350             if (!jsop_getprop(script->getAtom(fullAtomIndex(&PC[SLOTNO_LEN]))))
1351                 return Compile_Error;
1352           END_CASE(JSOP_GETLOCALPROP)
1353
1354           BEGIN_CASE(JSOP_GETPROP)
1355             if (!jsop_getprop(script->getAtom(fullAtomIndex(PC))))
1356                 return Compile_Error;
1357           END_CASE(JSOP_GETPROP)
1358
1359           BEGIN_CASE(JSOP_LENGTH)
1360             if (!jsop_length())
1361                 return Compile_Error;
1362           END_CASE(JSOP_LENGTH)
1363
1364           BEGIN_CASE(JSOP_GETELEM)
1365             if (!jsop_getelem(false))
1366                 return Compile_Error;
1367           END_CASE(JSOP_GETELEM)
1368
1369           BEGIN_CASE(JSOP_SETELEM)
1370           {
1371             jsbytecode *next = &PC[JSOP_SETELEM_LENGTH];
1372             bool pop = (JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next));
1373             if (!jsop_setelem(pop))
1374                 return Compile_Error;
1375           }
1376           END_CASE(JSOP_SETELEM);
1377
1378           BEGIN_CASE(JSOP_CALLNAME)
1379             prepareStubCall(Uses(0));
1380             masm.move(Imm32(fullAtomIndex(PC)), Registers::ArgReg1);
1381             INLINE_STUBCALL(stubs::CallName);
1382             frame.pushSynced();
1383             frame.pushSynced();
1384           END_CASE(JSOP_CALLNAME)
1385
1386           BEGIN_CASE(JSOP_EVAL)
1387           {
1388             JaegerSpew(JSpew_Insns, " --- EVAL --- \n");
1389             emitEval(GET_ARGC(PC));
1390             JaegerSpew(JSpew_Insns, " --- END EVAL --- \n");
1391           }
1392           END_CASE(JSOP_EVAL)
1393
1394           BEGIN_CASE(JSOP_CALL)
1395           BEGIN_CASE(JSOP_FUNAPPLY)
1396           BEGIN_CASE(JSOP_FUNCALL)
1397           {
1398             JaegerSpew(JSpew_Insns, " --- SCRIPTED CALL --- \n");
1399             inlineCallHelper(GET_ARGC(PC), false);
1400             JaegerSpew(JSpew_Insns, " --- END SCRIPTED CALL --- \n");
1401           }
1402           END_CASE(JSOP_CALL)
1403
1404           BEGIN_CASE(JSOP_NAME)
1405             jsop_name(script->getAtom(fullAtomIndex(PC)));
1406           END_CASE(JSOP_NAME)
1407
1408           BEGIN_CASE(JSOP_DOUBLE)
1409           {
1410             uint32 index = fullAtomIndex(PC);
1411             double d = script->getConst(index).toDouble();
1412             frame.push(Value(DoubleValue(d)));
1413           }
1414           END_CASE(JSOP_DOUBLE)
1415
1416           BEGIN_CASE(JSOP_STRING)
1417           {
1418             JSAtom *atom = script->getAtom(fullAtomIndex(PC));
1419             JSString *str = ATOM_TO_STRING(atom);
1420             frame.push(Value(StringValue(str)));
1421           }
1422           END_CASE(JSOP_STRING)
1423
1424           BEGIN_CASE(JSOP_ZERO)
1425             frame.push(Valueify(JSVAL_ZERO));
1426           END_CASE(JSOP_ZERO)
1427
1428           BEGIN_CASE(JSOP_ONE)
1429             frame.push(Valueify(JSVAL_ONE));
1430           END_CASE(JSOP_ONE)
1431
1432           BEGIN_CASE(JSOP_NULL)
1433             frame.push(NullValue());
1434           END_CASE(JSOP_NULL)
1435
1436           BEGIN_CASE(JSOP_THIS)
1437             jsop_this();
1438           END_CASE(JSOP_THIS)
1439
1440           BEGIN_CASE(JSOP_FALSE)
1441             frame.push(Value(BooleanValue(false)));
1442           END_CASE(JSOP_FALSE)
1443
1444           BEGIN_CASE(JSOP_TRUE)
1445             frame.push(Value(BooleanValue(true)));
1446           END_CASE(JSOP_TRUE)
1447
1448           BEGIN_CASE(JSOP_OR)
1449           BEGIN_CASE(JSOP_AND)
1450             if (!jsop_andor(op, PC + GET_JUMP_OFFSET(PC)))
1451                 return Compile_Error;
1452           END_CASE(JSOP_AND)
1453
1454           BEGIN_CASE(JSOP_TABLESWITCH)
1455 #if defined JS_CPU_ARM /* Need to implement jump(BaseIndex) for ARM */
1456             frame.syncAndForgetEverything();
1457             masm.move(ImmPtr(PC), Registers::ArgReg1);
1458
1459             /* prepareStubCall() is not needed due to syncAndForgetEverything() */
1460             INLINE_STUBCALL(stubs::TableSwitch);
1461             frame.pop();
1462
1463             masm.jump(Registers::ReturnReg);
1464 #else
1465             if (!jsop_tableswitch(PC))
1466                 return Compile_Error;
1467 #endif
1468             PC += js_GetVariableBytecodeLength(PC);
1469             break;
1470           END_CASE(JSOP_TABLESWITCH)
1471
1472           BEGIN_CASE(JSOP_LOOKUPSWITCH)
1473             frame.syncAndForgetEverything();
1474             masm.move(ImmPtr(PC), Registers::ArgReg1);
1475
1476             /* prepareStubCall() is not needed due to syncAndForgetEverything() */
1477             INLINE_STUBCALL(stubs::LookupSwitch);
1478             frame.pop();
1479
1480             masm.jump(Registers::ReturnReg);
1481             PC += js_GetVariableBytecodeLength(PC);
1482             break;
1483           END_CASE(JSOP_LOOKUPSWITCH)
1484
1485           BEGIN_CASE(JSOP_STRICTEQ)
1486             jsop_stricteq(op);
1487           END_CASE(JSOP_STRICTEQ)
1488
1489           BEGIN_CASE(JSOP_STRICTNE)
1490             jsop_stricteq(op);
1491           END_CASE(JSOP_STRICTNE)
1492
1493           BEGIN_CASE(JSOP_ITER)
1494             if (!iter(PC[1]))
1495                 return Compile_Error;
1496           END_CASE(JSOP_ITER)
1497
1498           BEGIN_CASE(JSOP_MOREITER)
1499             /* At the byte level, this is always fused with IFNE or IFNEX. */
1500             if (!iterMore())
1501                 return Compile_Error;
1502             break;
1503           END_CASE(JSOP_MOREITER)
1504
1505           BEGIN_CASE(JSOP_ENDITER)
1506             iterEnd();
1507           END_CASE(JSOP_ENDITER)
1508
1509           BEGIN_CASE(JSOP_POP)
1510             frame.pop();
1511           END_CASE(JSOP_POP)
1512
1513           BEGIN_CASE(JSOP_NEW)
1514           {
1515             JaegerSpew(JSpew_Insns, " --- NEW OPERATOR --- \n");
1516             inlineCallHelper(GET_ARGC(PC), true);
1517             JaegerSpew(JSpew_Insns, " --- END NEW OPERATOR --- \n");
1518           }
1519           END_CASE(JSOP_NEW)
1520
1521           BEGIN_CASE(JSOP_GETARG)
1522           BEGIN_CASE(JSOP_CALLARG)
1523           {
1524             frame.pushArg(GET_SLOTNO(PC));
1525             if (op == JSOP_CALLARG)
1526                 frame.push(UndefinedValue());
1527           }
1528           END_CASE(JSOP_GETARG)
1529
1530           BEGIN_CASE(JSOP_BINDGNAME)
1531             jsop_bindgname();
1532           END_CASE(JSOP_BINDGNAME)
1533
1534           BEGIN_CASE(JSOP_SETARG)
1535           {
1536             jsbytecode *next = &PC[JSOP_SETLOCAL_LENGTH];
1537             bool pop = JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next);
1538             frame.storeArg(GET_SLOTNO(PC), pop);
1539             if (pop) {
1540                 frame.pop();
1541                 PC += JSOP_SETARG_LENGTH + JSOP_POP_LENGTH;
1542                 break;
1543             }
1544           }
1545           END_CASE(JSOP_SETARG)
1546
1547           BEGIN_CASE(JSOP_GETLOCAL)
1548           {
1549             uint32 slot = GET_SLOTNO(PC);
1550             frame.pushLocal(slot);
1551           }
1552           END_CASE(JSOP_GETLOCAL)
1553
1554           BEGIN_CASE(JSOP_SETLOCAL)
1555           {
1556             jsbytecode *next = &PC[JSOP_SETLOCAL_LENGTH];
1557             bool pop = JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next);
1558             frame.storeLocal(GET_SLOTNO(PC), pop);
1559             if (pop) {
1560                 frame.pop();
1561                 PC += JSOP_SETLOCAL_LENGTH + JSOP_POP_LENGTH;
1562                 break;
1563             }
1564           }
1565           END_CASE(JSOP_SETLOCAL)
1566
1567           BEGIN_CASE(JSOP_SETLOCALPOP)
1568             frame.storeLocal(GET_SLOTNO(PC), true);
1569             frame.pop();
1570           END_CASE(JSOP_SETLOCALPOP)
1571
1572           BEGIN_CASE(JSOP_UINT16)
1573             frame.push(Value(Int32Value((int32_t) GET_UINT16(PC))));
1574           END_CASE(JSOP_UINT16)
1575
1576           BEGIN_CASE(JSOP_NEWINIT)
1577             jsop_newinit();
1578           END_CASE(JSOP_NEWINIT)
1579
1580           BEGIN_CASE(JSOP_NEWARRAY)
1581             jsop_newinit();
1582           END_CASE(JSOP_NEWARRAY)
1583
1584           BEGIN_CASE(JSOP_NEWOBJECT)
1585             jsop_newinit();
1586           END_CASE(JSOP_NEWOBJECT)
1587
1588           BEGIN_CASE(JSOP_ENDINIT)
1589           END_CASE(JSOP_ENDINIT)
1590
1591           BEGIN_CASE(JSOP_INITMETHOD)
1592             jsop_initmethod();
1593             frame.pop();
1594           END_CASE(JSOP_INITMETHOD)
1595
1596           BEGIN_CASE(JSOP_INITPROP)
1597             jsop_initprop();
1598             frame.pop();
1599           END_CASE(JSOP_INITPROP)
1600
1601           BEGIN_CASE(JSOP_INITELEM)
1602             jsop_initelem();
1603             frame.popn(2);
1604           END_CASE(JSOP_INITELEM)
1605
1606           BEGIN_CASE(JSOP_INCARG)
1607           BEGIN_CASE(JSOP_DECARG)
1608           BEGIN_CASE(JSOP_ARGINC)
1609           BEGIN_CASE(JSOP_ARGDEC)
1610           {
1611             jsbytecode *next = &PC[JSOP_ARGINC_LENGTH];
1612             bool popped = false;
1613             if (JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next))
1614                 popped = true;
1615             jsop_arginc(op, GET_SLOTNO(PC), popped);
1616             PC += JSOP_ARGINC_LENGTH;
1617             if (popped)
1618                 PC += JSOP_POP_LENGTH;
1619             break;
1620           }
1621           END_CASE(JSOP_ARGDEC)
1622
1623           BEGIN_CASE(JSOP_INCLOCAL)
1624           BEGIN_CASE(JSOP_DECLOCAL)
1625           BEGIN_CASE(JSOP_LOCALINC)
1626           BEGIN_CASE(JSOP_LOCALDEC)
1627           {
1628             jsbytecode *next = &PC[JSOP_LOCALINC_LENGTH];
1629             bool popped = false;
1630             if (JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next))
1631                 popped = true;
1632             /* These manually advance the PC. */
1633             jsop_localinc(op, GET_SLOTNO(PC), popped);
1634             PC += JSOP_LOCALINC_LENGTH;
1635             if (popped)
1636                 PC += JSOP_POP_LENGTH;
1637             break;
1638           }
1639           END_CASE(JSOP_LOCALDEC)
1640
1641           BEGIN_CASE(JSOP_FORNAME)
1642             jsop_forname(script->getAtom(fullAtomIndex(PC)));
1643           END_CASE(JSOP_FORNAME)
1644
1645           BEGIN_CASE(JSOP_FORGNAME)
1646             jsop_forgname(script->getAtom(fullAtomIndex(PC)));
1647           END_CASE(JSOP_FORGNAME)
1648
1649           BEGIN_CASE(JSOP_FORPROP)
1650             jsop_forprop(script->getAtom(fullAtomIndex(PC)));
1651           END_CASE(JSOP_FORPROP)
1652
1653           BEGIN_CASE(JSOP_FORELEM)
1654             // This opcode is for the decompiler; it is succeeded by an
1655             // ENUMELEM, which performs the actual array store.
1656             iterNext();
1657           END_CASE(JSOP_FORELEM)
1658
1659           BEGIN_CASE(JSOP_BINDNAME)
1660             jsop_bindname(script->getAtom(fullAtomIndex(PC)), true);
1661           END_CASE(JSOP_BINDNAME)
1662
1663           BEGIN_CASE(JSOP_SETPROP)
1664             if (!jsop_setprop(script->getAtom(fullAtomIndex(PC)), true))
1665                 return Compile_Error;
1666           END_CASE(JSOP_SETPROP)
1667
1668           BEGIN_CASE(JSOP_SETNAME)
1669           BEGIN_CASE(JSOP_SETMETHOD)
1670             if (!jsop_setprop(script->getAtom(fullAtomIndex(PC)), true))
1671                 return Compile_Error;
1672           END_CASE(JSOP_SETNAME)
1673
1674           BEGIN_CASE(JSOP_THROW)
1675             prepareStubCall(Uses(1));
1676             INLINE_STUBCALL(stubs::Throw);
1677             frame.pop();
1678           END_CASE(JSOP_THROW)
1679
1680           BEGIN_CASE(JSOP_IN)
1681             prepareStubCall(Uses(2));
1682             INLINE_STUBCALL(stubs::In);
1683             frame.popn(2);
1684             frame.takeReg(Registers::ReturnReg);
1685             frame.pushTypedPayload(JSVAL_TYPE_BOOLEAN, Registers::ReturnReg);
1686           END_CASE(JSOP_IN)
1687
1688           BEGIN_CASE(JSOP_INSTANCEOF)
1689             if (!jsop_instanceof())
1690                 return Compile_Error;
1691           END_CASE(JSOP_INSTANCEOF)
1692
1693           BEGIN_CASE(JSOP_EXCEPTION)
1694             prepareStubCall(Uses(0));
1695             INLINE_STUBCALL(stubs::Exception);
1696             frame.pushSynced();
1697           END_CASE(JSOP_EXCEPTION)
1698
1699           BEGIN_CASE(JSOP_LINENO)
1700           END_CASE(JSOP_LINENO)
1701
1702           BEGIN_CASE(JSOP_ENUMELEM)
1703             // Normally, SETELEM transforms the stack
1704             //  from: OBJ ID VALUE
1705             //  to:   VALUE
1706             //
1707             // Here, the stack transition is
1708             //  from: VALUE OBJ ID
1709             //  to:
1710             // So we make the stack look like a SETELEM, and re-use it.
1711
1712             // Before: VALUE OBJ ID
1713             // After:  VALUE OBJ ID VALUE
1714             frame.dupAt(-3);
1715
1716             // Before: VALUE OBJ ID VALUE
1717             // After:  VALUE VALUE
1718             if (!jsop_setelem(true))
1719                 return Compile_Error;
1720
1721             // Before: VALUE VALUE
1722             // After:
1723             frame.popn(2);
1724           END_CASE(JSOP_ENUMELEM)
1725
1726           BEGIN_CASE(JSOP_BLOCKCHAIN)
1727           END_CASE(JSOP_BLOCKCHAIN)
1728
1729           BEGIN_CASE(JSOP_NULLBLOCKCHAIN)
1730           END_CASE(JSOP_NULLBLOCKCHAIN)
1731
1732           BEGIN_CASE(JSOP_CONDSWITCH)
1733             /* No-op for the decompiler. */
1734           END_CASE(JSOP_CONDSWITCH)
1735
1736           BEGIN_CASE(JSOP_DEFFUN)
1737           {
1738             uint32 index = fullAtomIndex(PC);
1739             JSFunction *innerFun = script->getFunction(index);
1740
1741             if (fun && script->bindings.hasBinding(cx, innerFun->atom))
1742                 frame.syncAndForgetEverything();
1743
1744             prepareStubCall(Uses(0));
1745             masm.move(ImmPtr(innerFun), Registers::ArgReg1);
1746             INLINE_STUBCALL(STRICT_VARIANT(stubs::DefFun));
1747           }
1748           END_CASE(JSOP_DEFFUN)
1749
1750           BEGIN_CASE(JSOP_DEFVAR)
1751           BEGIN_CASE(JSOP_DEFCONST)
1752           {
1753             uint32 index = fullAtomIndex(PC);
1754             JSAtom *atom = script->getAtom(index);
1755
1756             prepareStubCall(Uses(0));
1757             masm.move(ImmPtr(atom), Registers::ArgReg1);
1758             INLINE_STUBCALL(stubs::DefVarOrConst);
1759           }
1760           END_CASE(JSOP_DEFVAR)
1761
1762           BEGIN_CASE(JSOP_SETCONST)
1763           {
1764             uint32 index = fullAtomIndex(PC);
1765             JSAtom *atom = script->getAtom(index);
1766
1767             if (fun && script->bindings.hasBinding(cx, atom))
1768                 frame.syncAndForgetEverything();
1769
1770             prepareStubCall(Uses(1));
1771             masm.move(ImmPtr(atom), Registers::ArgReg1);
1772             INLINE_STUBCALL(stubs::SetConst);
1773           }
1774           END_CASE(JSOP_SETCONST)
1775
1776           BEGIN_CASE(JSOP_DEFLOCALFUN_FC)
1777           {
1778             uint32 slot = GET_SLOTNO(PC);
1779             JSFunction *fun = script->getFunction(fullAtomIndex(&PC[SLOTNO_LEN]));
1780             prepareStubCall(Uses(frame.frameSlots()));
1781             masm.move(ImmPtr(fun), Registers::ArgReg1);
1782             INLINE_STUBCALL(stubs::DefLocalFun_FC);
1783             frame.takeReg(Registers::ReturnReg);
1784             frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
1785             frame.storeLocal(slot, true);
1786             frame.pop();
1787           }
1788           END_CASE(JSOP_DEFLOCALFUN_FC)
1789
1790           BEGIN_CASE(JSOP_LAMBDA)
1791           {
1792             JSFunction *fun = script->getFunction(fullAtomIndex(PC));
1793
1794             JSObjStubFun stub = stubs::Lambda;
1795             uint32 uses = 0;
1796
1797             jsbytecode *pc2 = AdvanceOverBlockchainOp(PC + JSOP_LAMBDA_LENGTH);
1798             JSOp next = JSOp(*pc2);
1799             
1800             if (next == JSOP_INITMETHOD) {
1801                 stub = stubs::LambdaForInit;
1802             } else if (next == JSOP_SETMETHOD) {
1803                 stub = stubs::LambdaForSet;
1804                 uses = 1;
1805             } else if (fun->joinable()) {
1806                 if (next == JSOP_CALL) {
1807                     stub = stubs::LambdaJoinableForCall;
1808                     uses = frame.frameSlots();
1809                 } else if (next == JSOP_NULL) {
1810                     stub = stubs::LambdaJoinableForNull;
1811                 }
1812             }
1813
1814             prepareStubCall(Uses(uses));
1815             masm.move(ImmPtr(fun), Registers::ArgReg1);
1816
1817             if (stub == stubs::Lambda) {
1818                 INLINE_STUBCALL(stub);
1819             } else {
1820                 jsbytecode *savedPC = PC;
1821                 PC = pc2;
1822                 INLINE_STUBCALL(stub);
1823                 PC = savedPC;
1824             }
1825
1826             frame.takeReg(Registers::ReturnReg);
1827             frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
1828           }
1829           END_CASE(JSOP_LAMBDA)
1830
1831           BEGIN_CASE(JSOP_TRY)
1832             frame.syncAndForgetEverything();
1833           END_CASE(JSOP_TRY)
1834
1835           BEGIN_CASE(JSOP_GETFCSLOT)
1836           BEGIN_CASE(JSOP_CALLFCSLOT)
1837           {
1838             uintN index = GET_UINT16(PC);
1839
1840             // Load the callee's payload into a register.
1841             frame.pushCallee();
1842             RegisterID reg = frame.copyDataIntoReg(frame.peek(-1));
1843             frame.pop();
1844
1845             // obj->getFlatClosureUpvars()
1846             masm.loadPtr(Address(reg, offsetof(JSObject, slots)), reg);
1847             Address upvarAddress(reg, JSObject::JSSLOT_FLAT_CLOSURE_UPVARS * sizeof(Value));
1848             masm.loadPrivate(upvarAddress, reg);
1849             // push ((Value *) reg)[index]
1850             frame.freeReg(reg);
1851             frame.push(Address(reg, index * sizeof(Value)));
1852             if (op == JSOP_CALLFCSLOT)
1853                 frame.push(UndefinedValue());
1854           }
1855           END_CASE(JSOP_CALLFCSLOT)
1856
1857           BEGIN_CASE(JSOP_ARGSUB)
1858             prepareStubCall(Uses(0));
1859             masm.move(Imm32(GET_ARGNO(PC)), Registers::ArgReg1);
1860             INLINE_STUBCALL(stubs::ArgSub);
1861             frame.pushSynced();
1862           END_CASE(JSOP_ARGSUB)
1863
1864           BEGIN_CASE(JSOP_ARGCNT)
1865             prepareStubCall(Uses(0));
1866             INLINE_STUBCALL(stubs::ArgCnt);
1867             frame.pushSynced();
1868           END_CASE(JSOP_ARGCNT)
1869
1870           BEGIN_CASE(JSOP_DEFLOCALFUN)
1871           {
1872             uint32 slot = GET_SLOTNO(PC);
1873             JSFunction *fun = script->getFunction(fullAtomIndex(&PC[SLOTNO_LEN]));
1874             prepareStubCall(Uses(0));
1875             masm.move(ImmPtr(fun), Registers::ArgReg1);
1876             INLINE_STUBCALL(stubs::DefLocalFun);
1877             frame.takeReg(Registers::ReturnReg);
1878             frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
1879             frame.storeLocal(slot, true);
1880             frame.pop();
1881           }
1882           END_CASE(JSOP_DEFLOCALFUN)
1883
1884           BEGIN_CASE(JSOP_RETRVAL)
1885             emitReturn(NULL);
1886           END_CASE(JSOP_RETRVAL)
1887
1888           BEGIN_CASE(JSOP_GETGNAME)
1889           BEGIN_CASE(JSOP_CALLGNAME)
1890             jsop_getgname(fullAtomIndex(PC));
1891             if (op == JSOP_CALLGNAME)
1892                 jsop_callgname_epilogue();
1893           END_CASE(JSOP_GETGNAME)
1894
1895           BEGIN_CASE(JSOP_SETGNAME)
1896             jsop_setgname(script->getAtom(fullAtomIndex(PC)), true);
1897           END_CASE(JSOP_SETGNAME)
1898
1899           BEGIN_CASE(JSOP_REGEXP)
1900           {
1901             JSObject *regex = script->getRegExp(fullAtomIndex(PC));
1902             prepareStubCall(Uses(0));
1903             masm.move(ImmPtr(regex), Registers::ArgReg1);
1904             INLINE_STUBCALL(stubs::RegExp);
1905             frame.takeReg(Registers::ReturnReg);
1906             frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
1907           }
1908           END_CASE(JSOP_REGEXP)
1909
1910           BEGIN_CASE(JSOP_OBJECT)
1911           {
1912             JSObject *object = script->getObject(fullAtomIndex(PC));
1913             RegisterID reg = frame.allocReg();
1914             masm.move(ImmPtr(object), reg);
1915             frame.pushTypedPayload(JSVAL_TYPE_OBJECT, reg);
1916           }
1917           END_CASE(JSOP_OBJECT)
1918
1919           BEGIN_CASE(JSOP_CALLPROP)
1920             if (!jsop_callprop(script->getAtom(fullAtomIndex(PC))))
1921                 return Compile_Error;
1922           END_CASE(JSOP_CALLPROP)
1923
1924           BEGIN_CASE(JSOP_UINT24)
1925             frame.push(Value(Int32Value((int32_t) GET_UINT24(PC))));
1926           END_CASE(JSOP_UINT24)
1927
1928           BEGIN_CASE(JSOP_CALLELEM)
1929             jsop_getelem(true);
1930           END_CASE(JSOP_CALLELEM)
1931
1932           BEGIN_CASE(JSOP_STOP)
1933             /* Safe point! */
1934             emitReturn(NULL);
1935             goto done;
1936           END_CASE(JSOP_STOP)
1937
1938           BEGIN_CASE(JSOP_GETXPROP)
1939             if (!jsop_xname(script->getAtom(fullAtomIndex(PC))))
1940                 return Compile_Error;
1941           END_CASE(JSOP_GETXPROP)
1942
1943           BEGIN_CASE(JSOP_ENTERBLOCK)
1944             enterBlock(script->getObject(fullAtomIndex(PC)));
1945           END_CASE(JSOP_ENTERBLOCK);
1946
1947           BEGIN_CASE(JSOP_LEAVEBLOCK)
1948             leaveBlock();
1949           END_CASE(JSOP_LEAVEBLOCK)
1950
1951           BEGIN_CASE(JSOP_CALLLOCAL)
1952             frame.pushLocal(GET_SLOTNO(PC));
1953             frame.push(UndefinedValue());
1954           END_CASE(JSOP_CALLLOCAL)
1955
1956           BEGIN_CASE(JSOP_INT8)
1957             frame.push(Value(Int32Value(GET_INT8(PC))));
1958           END_CASE(JSOP_INT8)
1959
1960           BEGIN_CASE(JSOP_INT32)
1961             frame.push(Value(Int32Value(GET_INT32(PC))));
1962           END_CASE(JSOP_INT32)
1963
1964           BEGIN_CASE(JSOP_HOLE)
1965             frame.push(MagicValue(JS_ARRAY_HOLE));
1966           END_CASE(JSOP_HOLE)
1967
1968           BEGIN_CASE(JSOP_LAMBDA_FC)
1969           {
1970             JSFunction *fun = script->getFunction(fullAtomIndex(PC));
1971             prepareStubCall(Uses(frame.frameSlots()));
1972             masm.move(ImmPtr(fun), Registers::ArgReg1);
1973             INLINE_STUBCALL(stubs::FlatLambda);
1974             frame.takeReg(Registers::ReturnReg);
1975             frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
1976           }
1977           END_CASE(JSOP_LAMBDA_FC)
1978
1979           BEGIN_CASE(JSOP_TRACE)
1980           BEGIN_CASE(JSOP_NOTRACE)
1981           {
1982             if (analysis->jumpTarget(PC))
1983                 interruptCheckHelper();
1984           }
1985           END_CASE(JSOP_TRACE)
1986
1987           BEGIN_CASE(JSOP_DEBUGGER)
1988             prepareStubCall(Uses(0));
1989             masm.move(ImmPtr(PC), Registers::ArgReg1);
1990             INLINE_STUBCALL(stubs::Debugger);
1991           END_CASE(JSOP_DEBUGGER)
1992
1993           BEGIN_CASE(JSOP_UNBRAND)
1994             jsop_unbrand();
1995           END_CASE(JSOP_UNBRAND)
1996
1997           BEGIN_CASE(JSOP_UNBRANDTHIS)
1998             jsop_this();
1999             jsop_unbrand();
2000             frame.pop();
2001           END_CASE(JSOP_UNBRANDTHIS)
2002
2003           BEGIN_CASE(JSOP_GETGLOBAL)
2004           BEGIN_CASE(JSOP_CALLGLOBAL)
2005             jsop_getglobal(GET_SLOTNO(PC));
2006             if (op == JSOP_CALLGLOBAL)
2007                 frame.push(UndefinedValue());
2008           END_CASE(JSOP_GETGLOBAL)
2009
2010           default:
2011            /* Sorry, this opcode isn't implemented yet. */
2012 #ifdef JS_METHODJIT_SPEW
2013             JaegerSpew(JSpew_Abort, "opcode %s not handled yet (%s line %d)\n", OpcodeNames[op],
2014                        script->filename, js_PCToLineNumber(cx, script, PC));
2015 #endif
2016             return Compile_Abort;
2017         }
2018
2019     /**********************
2020      *  END COMPILER OPS  *
2021      **********************/ 
2022
2023 #ifdef DEBUG
2024         frame.assertValidRegisterState();
2025 #endif
2026     }
2027
2028   done:
2029     return Compile_Okay;
2030 }
2031
2032 #undef END_CASE
2033 #undef BEGIN_CASE
2034
2035 JSC::MacroAssembler::Label
2036 mjit::Compiler::labelOf(jsbytecode *pc)
2037 {
2038     uint32 offs = uint32(pc - script->code);
2039     JS_ASSERT(jumpMap[offs].isValid());
2040     return jumpMap[offs];
2041 }
2042
2043 uint32
2044 mjit::Compiler::fullAtomIndex(jsbytecode *pc)
2045 {
2046     return GET_SLOTNO(pc);
2047
2048     /* If we ever enable INDEXBASE garbage, use this below. */
2049 #if 0
2050     return GET_SLOTNO(pc) + (atoms - script->atomMap.vector);
2051 #endif
2052 }
2053
2054 bool
2055 mjit::Compiler::knownJump(jsbytecode *pc)
2056 {
2057     return pc < PC;
2058 }
2059
2060 void *
2061 mjit::Compiler::findCallSite(const CallSite &callSite)
2062 {
2063     JS_ASSERT(callSite.pcOffset < script->length);
2064
2065     JITScript *jit = script->getJIT(fp->isConstructing());
2066     uint8* ilPath = (uint8 *)jit->code.m_code.executableAddress();
2067     uint8* oolPath = ilPath + masm.size();
2068
2069     for (uint32 i = 0; i < callSites.length(); i++) {
2070         InternalCallSite &cs = callSites[i];
2071         if (cs.pc == script->code + callSite.pcOffset && cs.id == callSite.id) {
2072             if (cs.ool)
2073                 return oolPath + cs.returnOffset;
2074             return ilPath + cs.returnOffset;
2075         }
2076     }
2077
2078     /* We have no idea where to patch up to. */
2079     JS_NOT_REACHED("Call site vanished.");
2080     return NULL;
2081 }
2082
2083 bool
2084 mjit::Compiler::jumpInScript(Jump j, jsbytecode *pc)
2085 {
2086     JS_ASSERT(pc >= script->code && uint32(pc - script->code) < script->length);
2087
2088     if (pc < PC) {
2089         j.linkTo(jumpMap[uint32(pc - script->code)], &masm);
2090         return true;
2091     }
2092     return branchPatches.append(BranchPatch(j, pc));
2093 }
2094
2095 void
2096 mjit::Compiler::jsop_getglobal(uint32 index)
2097 {
2098     JS_ASSERT(globalObj);
2099     uint32 slot = script->getGlobalSlot(index);
2100
2101     RegisterID reg = frame.allocReg();
2102     Address address = masm.objSlotRef(globalObj, reg, slot);
2103     frame.freeReg(reg);
2104     frame.push(address);
2105 }
2106
2107 void
2108 mjit::Compiler::emitFinalReturn(Assembler &masm)
2109 {
2110     masm.loadPtr(Address(JSFrameReg, JSStackFrame::offsetOfncode()), Registers::ReturnReg);
2111     masm.jump(Registers::ReturnReg);
2112 }
2113
2114 // Emits code to load a return value of the frame into the scripted-ABI
2115 // type & data register pair. If the return value is in fp->rval, then |fe|
2116 // is NULL. Otherwise, |fe| contains the return value.
2117 //
2118 // If reading from fp->rval, |undefined| is loaded optimistically, before
2119 // checking if fp->rval is set in the frame flags and loading that instead.
2120 //
2121 // Otherwise, if |masm| is the inline path, it is loaded as efficiently as
2122 // the FrameState can manage. If |masm| is the OOL path, the value is simply
2123 // loaded from its slot in the frame, since the caller has guaranteed it's
2124 // been synced.
2125 //
2126 void
2127 mjit::Compiler::loadReturnValue(Assembler *masm, FrameEntry *fe)
2128 {
2129     RegisterID typeReg = JSReturnReg_Type;
2130     RegisterID dataReg = JSReturnReg_Data;
2131
2132     if (fe) {
2133         // If using the OOL assembler, the caller signifies that the |fe| is
2134         // synced, but not to rely on its register state.
2135         if (masm != &this->masm) {
2136             if (fe->isConstant()) {
2137                 stubcc.masm.loadValueAsComponents(fe->getValue(), typeReg, dataReg);
2138             } else {
2139                 Address rval(frame.addressOf(fe));
2140                 if (fe->isTypeKnown()) {
2141                     stubcc.masm.loadPayload(rval, dataReg);
2142                     stubcc.masm.move(ImmType(fe->getKnownType()), typeReg);
2143                 } else {
2144                     stubcc.masm.loadValueAsComponents(rval, typeReg, dataReg);
2145                 }
2146             }
2147         } else {
2148             frame.loadForReturn(fe, typeReg, dataReg, Registers::ReturnReg);
2149         }
2150     } else {
2151          // Load a return value from POPV or SETRVAL into the return registers,
2152          // otherwise return undefined.
2153         masm->loadValueAsComponents(UndefinedValue(), typeReg, dataReg);
2154         if (analysis->usesReturnValue()) {
2155             Jump rvalClear = masm->branchTest32(Assembler::Zero,
2156                                                FrameFlagsAddress(),
2157                                                Imm32(JSFRAME_HAS_RVAL));
2158             Address rvalAddress(JSFrameReg, JSStackFrame::offsetOfReturnValue());
2159             masm->loadValueAsComponents(rvalAddress, typeReg, dataReg);
2160             rvalClear.linkTo(masm->label(), masm);
2161         }
2162     }
2163 }
2164
2165 // This ensures that constructor return values are an object. If a non-object
2166 // is returned, either explicitly or implicitly, the newly created object is
2167 // loaded out of the frame. Otherwise, the explicitly returned object is kept.
2168 //
2169 void
2170 mjit::Compiler::fixPrimitiveReturn(Assembler *masm, FrameEntry *fe)
2171 {
2172     JS_ASSERT(isConstructing);
2173
2174     bool ool = (masm != &this->masm);
2175     Address thisv(JSFrameReg, JSStackFrame::offsetOfThis(fun));
2176
2177     // We can just load |thisv| if either of the following is true:
2178     //  (1) There is no explicit return value, AND fp->rval is not used.
2179     //  (2) There is an explicit return value, and it's known to be primitive.
2180     if ((!fe && !analysis->usesReturnValue()) ||
2181         (fe && fe->isTypeKnown() && fe->getKnownType() != JSVAL_TYPE_OBJECT))
2182     {
2183         if (ool)
2184             masm->loadValueAsComponents(thisv, JSReturnReg_Type, JSReturnReg_Data);
2185         else
2186             frame.loadThisForReturn(JSReturnReg_Type, JSReturnReg_Data, Registers::ReturnReg);
2187         return;
2188     }
2189
2190     // If the type is known to be an object, just load the return value as normal.
2191     if (fe && fe->isTypeKnown() && fe->getKnownType() == JSVAL_TYPE_OBJECT) {
2192         loadReturnValue(masm, fe);
2193         return;
2194     }
2195
2196     // There's a return value, and its type is unknown. Test the type and load
2197     // |thisv| if necessary.
2198     loadReturnValue(masm, fe);
2199     Jump j = masm->testObject(Assembler::Equal, JSReturnReg_Type);
2200     masm->loadValueAsComponents(thisv, JSReturnReg_Type, JSReturnReg_Data);
2201     j.linkTo(masm->label(), masm);
2202 }
2203
2204 // Loads the return value into the scripted ABI register pair, such that JS
2205 // semantics in constructors are preserved.
2206 //
2207 void
2208 mjit::Compiler::emitReturnValue(Assembler *masm, FrameEntry *fe)
2209 {
2210     if (isConstructing)
2211         fixPrimitiveReturn(masm, fe);
2212     else
2213         loadReturnValue(masm, fe);
2214 }
2215
2216 void
2217 mjit::Compiler::emitReturn(FrameEntry *fe)
2218 {
2219     JS_ASSERT_IF(!fun, JSOp(*PC) == JSOP_STOP);
2220
2221     /* Only the top of the stack can be returned. */
2222     JS_ASSERT_IF(fe, fe == frame.peek(-1));
2223
2224     if (debugMode() || Probes::callTrackingActive(cx)) {
2225         prepareStubCall(Uses(0));
2226         INLINE_STUBCALL(stubs::LeaveScript);
2227     }
2228
2229     /*
2230      * If there's a function object, deal with the fact that it can escape.
2231      * Note that after we've placed the call object, all tracked state can
2232      * be thrown away. This will happen anyway because the next live opcode
2233      * (if any) must have an incoming edge.
2234      *
2235      * However, it's an optimization to throw it away early - the tracker
2236      * won't be spilled on further exits or join points.
2237      */
2238     if (fun) {
2239         if (fun->isHeavyweight()) {
2240             /* There will always be a call object. */
2241             prepareStubCall(Uses(fe ? 1 : 0));
2242             INLINE_STUBCALL(stubs::PutActivationObjects);
2243         } else {
2244             /* if (hasCallObj() || hasArgsObj()) stubs::PutActivationObjects() */
2245             Jump putObjs = masm.branchTest32(Assembler::NonZero,
2246                                              Address(JSFrameReg, JSStackFrame::offsetOfFlags()),
2247                                              Imm32(JSFRAME_HAS_CALL_OBJ | JSFRAME_HAS_ARGS_OBJ));
2248             stubcc.linkExit(putObjs, Uses(frame.frameSlots()));
2249
2250             stubcc.leave();
2251             OOL_STUBCALL(stubs::PutActivationObjects);
2252
2253             emitReturnValue(&stubcc.masm, fe);
2254             emitFinalReturn(stubcc.masm);
2255         }
2256     } else {
2257         if (fp->isEvalFrame() && script->strictModeCode) {
2258             /* There will always be a call object. */
2259             prepareStubCall(Uses(fe ? 1 : 0));
2260             INLINE_STUBCALL(stubs::PutStrictEvalCallObject);
2261         }
2262     }
2263
2264     emitReturnValue(&masm, fe);
2265     emitFinalReturn(masm);
2266     frame.discardFrame();
2267 }
2268
2269 void
2270 mjit::Compiler::prepareStubCall(Uses uses)
2271 {
2272     JaegerSpew(JSpew_Insns, " ---- STUB CALL, SYNCING FRAME ---- \n");
2273     frame.syncAndKill(Registers(Registers::TempRegs), uses);
2274     JaegerSpew(JSpew_Insns, " ---- FRAME SYNCING DONE ---- \n");
2275 }
2276
2277 JSC::MacroAssembler::Call
2278 mjit::Compiler::emitStubCall(void *ptr)
2279 {
2280     JaegerSpew(JSpew_Insns, " ---- CALLING STUB ---- \n");
2281     Call cl = masm.fallibleVMCall(ptr, PC, frame.stackDepth() + script->nfixed);
2282     JaegerSpew(JSpew_Insns, " ---- END STUB CALL ---- \n");
2283     return cl;
2284 }
2285
2286 void
2287 mjit::Compiler::interruptCheckHelper()
2288 {
2289     RegisterID reg = frame.allocReg();
2290
2291     /*
2292      * Bake in and test the address of the interrupt counter for the runtime.
2293      * This is faster than doing two additional loads for the context's
2294      * thread data, but will cause this thread to run slower if there are
2295      * pending interrupts on some other thread.  For non-JS_THREADSAFE builds
2296      * we can skip this, as there is only one flag to poll.
2297      */
2298 #ifdef JS_THREADSAFE
2299     void *interrupt = (void*) &cx->runtime->interruptCounter;
2300 #else
2301     void *interrupt = (void*) &JS_THREAD_DATA(cx)->interruptFlags;
2302 #endif
2303
2304 #if defined(JS_CPU_X86) || defined(JS_CPU_ARM)
2305     Jump jump = masm.branch32(Assembler::NotEqual, AbsoluteAddress(interrupt), Imm32(0));
2306 #else
2307     /* Handle processors that can't load from absolute addresses. */
2308     masm.move(ImmPtr(interrupt), reg);
2309     Jump jump = masm.branchTest32(Assembler::NonZero, Address(reg, 0));
2310 #endif
2311
2312     stubcc.linkExitDirect(jump, stubcc.masm.label());
2313
2314 #ifdef JS_THREADSAFE
2315     /*
2316      * Do a slightly slower check for an interrupt on this thread.
2317      * We don't want this thread to slow down excessively if the pending
2318      * interrupt is on another thread.
2319      */
2320     stubcc.masm.loadPtr(FrameAddress(offsetof(VMFrame, cx)), reg);
2321     stubcc.masm.loadPtr(Address(reg, offsetof(JSContext, thread)), reg);
2322     Address flag(reg, offsetof(JSThread, data.interruptFlags));
2323     Jump noInterrupt = stubcc.masm.branchTest32(Assembler::Zero, flag);
2324 #endif
2325
2326     frame.sync(stubcc.masm, Uses(0));
2327     stubcc.masm.move(ImmPtr(PC), Registers::ArgReg1);
2328     OOL_STUBCALL(stubs::Interrupt);
2329     stubcc.rejoin(Changes(0));
2330
2331 #ifdef JS_THREADSAFE
2332     stubcc.linkRejoin(noInterrupt);
2333 #endif
2334
2335     frame.freeReg(reg);
2336 }
2337
2338 void
2339 mjit::Compiler::addReturnSite(Label joinPoint, uint32 id)
2340 {
2341     InternalCallSite site(masm.distanceOf(joinPoint), PC, id, false, false);
2342     addCallSite(site);
2343 }
2344
2345 void
2346 mjit::Compiler::emitUncachedCall(uint32 argc, bool callingNew)
2347 {
2348     CallPatchInfo callPatch;
2349
2350     RegisterID r0 = Registers::ReturnReg;
2351     VoidPtrStubUInt32 stub = callingNew ? stubs::UncachedNew : stubs::UncachedCall;
2352
2353     frame.syncAndKill(Registers(Registers::AvailRegs), Uses(argc + 2));
2354     prepareStubCall(Uses(argc + 2));
2355     masm.move(Imm32(argc), Registers::ArgReg1);
2356     INLINE_STUBCALL(stub);
2357
2358     Jump notCompiled = masm.branchTestPtr(Assembler::Zero, r0, r0);
2359
2360     masm.loadPtr(FrameAddress(offsetof(VMFrame, regs.fp)), JSFrameReg);
2361     callPatch.hasFastNcode = true;
2362     callPatch.fastNcodePatch =
2363         masm.storePtrWithPatch(ImmPtr(NULL),
2364                                Address(JSFrameReg, JSStackFrame::offsetOfncode()));
2365
2366     masm.jump(r0);
2367     callPatch.joinPoint = masm.label();
2368     addReturnSite(callPatch.joinPoint, __LINE__);
2369     masm.loadPtr(Address(JSFrameReg, JSStackFrame::offsetOfPrev()), JSFrameReg);
2370
2371     frame.popn(argc + 2);
2372     frame.takeReg(JSReturnReg_Type);
2373     frame.takeReg(JSReturnReg_Data);
2374     frame.pushRegs(JSReturnReg_Type, JSReturnReg_Data);
2375
2376     stubcc.linkExitDirect(notCompiled, stubcc.masm.label());
2377     stubcc.rejoin(Changes(0));
2378     callPatches.append(callPatch);
2379 }
2380
2381 static bool
2382 IsLowerableFunCallOrApply(jsbytecode *pc)
2383 {
2384 #ifdef JS_MONOIC
2385     return (*pc == JSOP_FUNCALL && GET_ARGC(pc) >= 1) ||
2386            (*pc == JSOP_FUNAPPLY && GET_ARGC(pc) == 2);
2387 #else
2388     return false;
2389 #endif
2390 }
2391
2392 void
2393 mjit::Compiler::checkCallApplySpeculation(uint32 callImmArgc, uint32 speculatedArgc,
2394                                           FrameEntry *origCallee, FrameEntry *origThis,
2395                                           MaybeRegisterID origCalleeType, RegisterID origCalleeData,
2396                                           MaybeRegisterID origThisType, RegisterID origThisData,
2397                                           Jump *uncachedCallSlowRejoin, CallPatchInfo *uncachedCallPatch)
2398 {
2399     JS_ASSERT(IsLowerableFunCallOrApply(PC));
2400
2401     /*
2402      * if (origCallee.isObject() &&
2403      *     origCallee.toObject().isFunction &&
2404      *     origCallee.toObject().getFunctionPrivate() == js_fun_{call,apply})
2405      */
2406     MaybeJump isObj;
2407     if (origCalleeType.isSet())
2408         isObj = masm.testObject(Assembler::NotEqual, origCalleeType.reg());
2409     Jump isFun = masm.testFunction(Assembler::NotEqual, origCalleeData);
2410     masm.loadObjPrivate(origCalleeData, origCalleeData);
2411     Native native = *PC == JSOP_FUNCALL ? js_fun_call : js_fun_apply;
2412     Jump isNative = masm.branchPtr(Assembler::NotEqual,
2413                                    Address(origCalleeData, JSFunction::offsetOfNativeOrScript()),
2414                                    ImmPtr(JS_FUNC_TO_DATA_PTR(void *, native)));
2415
2416     /*
2417      * If speculation fails, we can't use the ic, since it is compiled on the
2418      * assumption that speculation succeeds. Instead, just do an uncached call.
2419      */
2420     {
2421         if (isObj.isSet())
2422             stubcc.linkExitDirect(isObj.getJump(), stubcc.masm.label());
2423         stubcc.linkExitDirect(isFun, stubcc.masm.label());
2424         stubcc.linkExitDirect(isNative, stubcc.masm.label());
2425
2426         int32 frameDepthAdjust;
2427         if (applyTricks == LazyArgsObj) {
2428             OOL_STUBCALL(stubs::Arguments);
2429             frameDepthAdjust = +1;
2430         } else {
2431             frameDepthAdjust = 0;
2432         }
2433
2434         stubcc.masm.move(Imm32(callImmArgc), Registers::ArgReg1);
2435         JaegerSpew(JSpew_Insns, " ---- BEGIN SLOW CALL CODE ---- \n");
2436         OOL_STUBCALL_LOCAL_SLOTS(JS_FUNC_TO_DATA_PTR(void *, stubs::UncachedCall),
2437                            frame.localSlots() + frameDepthAdjust);
2438         JaegerSpew(JSpew_Insns, " ---- END SLOW CALL CODE ---- \n");
2439
2440         RegisterID r0 = Registers::ReturnReg;
2441         Jump notCompiled = stubcc.masm.branchTestPtr(Assembler::Zero, r0, r0);
2442
2443         stubcc.masm.loadPtr(FrameAddress(offsetof(VMFrame, regs.fp)), JSFrameReg);
2444         Address ncodeAddr(JSFrameReg, JSStackFrame::offsetOfncode());
2445         uncachedCallPatch->hasSlowNcode = true;
2446         uncachedCallPatch->slowNcodePatch = stubcc.masm.storePtrWithPatch(ImmPtr(NULL), ncodeAddr);
2447
2448         stubcc.masm.jump(r0);
2449         addReturnSite(masm.label(), __LINE__);
2450
2451         notCompiled.linkTo(stubcc.masm.label(), &stubcc.masm);
2452
2453         /*
2454          * inlineCallHelper will link uncachedCallSlowRejoin to the join point
2455          * at the end of the ic. At that join point, the return value of the
2456          * call is assumed to be in registers, so load them before jumping.
2457          */
2458         JaegerSpew(JSpew_Insns, " ---- BEGIN SLOW RESTORE CODE ---- \n");
2459         Address rval = frame.addressOf(origCallee);  /* vp[0] == rval */
2460         stubcc.masm.loadValueAsComponents(rval, JSReturnReg_Type, JSReturnReg_Data);
2461         *uncachedCallSlowRejoin = stubcc.masm.jump();
2462         JaegerSpew(JSpew_Insns, " ---- END SLOW RESTORE CODE ---- \n");
2463     }
2464
2465     /*
2466      * For simplicity, we don't statically specialize calls to
2467      * ic::SplatApplyArgs based on applyTricks. Rather, this state is
2468      * communicated dynamically through the VMFrame.
2469      */
2470     if (*PC == JSOP_FUNAPPLY) {
2471         masm.store32(Imm32(applyTricks == LazyArgsObj),
2472                      FrameAddress(offsetof(VMFrame, u.call.lazyArgsObj)));
2473     }
2474 }
2475
2476 /* This predicate must be called before the current op mutates the FrameState. */
2477 bool
2478 mjit::Compiler::canUseApplyTricks()
2479 {
2480     JS_ASSERT(*PC == JSOP_ARGUMENTS);
2481     jsbytecode *nextpc = PC + JSOP_ARGUMENTS_LENGTH;
2482     return *nextpc == JSOP_FUNAPPLY &&
2483            IsLowerableFunCallOrApply(nextpc) &&
2484            !analysis->jumpTarget(nextpc) &&
2485            !debugMode();
2486 }
2487
2488 /* See MonoIC.cpp, CallCompiler for more information on call ICs. */
2489 void
2490 mjit::Compiler::inlineCallHelper(uint32 callImmArgc, bool callingNew)
2491 {
2492     /* Check for interrupts on function call */
2493     interruptCheckHelper();
2494
2495     int32 speculatedArgc;
2496     if (applyTricks == LazyArgsObj) {
2497         frame.pop();
2498         speculatedArgc = 1;
2499     } else {
2500         speculatedArgc = callImmArgc;
2501     }
2502
2503     FrameEntry *origCallee = frame.peek(-(speculatedArgc + 2));
2504     FrameEntry *origThis = frame.peek(-(speculatedArgc + 1));
2505
2506     /* 'this' does not need to be synced for constructing. */
2507     if (callingNew)
2508         frame.discardFe(origThis);
2509
2510     /*
2511      * From the presence of JSOP_FUN{CALL,APPLY}, we speculate that we are
2512      * going to call js_fun_{call,apply}. Normally, this call would go through
2513      * js::Invoke to ultimately call 'this'. We can do much better by having
2514      * the callIC cache and call 'this' directly. However, if it turns out that
2515      * we are not actually calling js_fun_call, the callIC must act as normal.
2516      */
2517     bool lowerFunCallOrApply = IsLowerableFunCallOrApply(PC);
2518
2519     /*
2520      * Currently, constant values are not functions, so don't even try to
2521      * optimize. This lets us assume that callee/this have regs below.
2522      */
2523 #ifdef JS_MONOIC
2524     if (debugMode() ||
2525         origCallee->isConstant() || origCallee->isNotType(JSVAL_TYPE_OBJECT) ||
2526         (lowerFunCallOrApply &&
2527          (origThis->isConstant() || origThis->isNotType(JSVAL_TYPE_OBJECT)))) {
2528 #endif
2529         if (applyTricks == LazyArgsObj) {
2530             /* frame.pop() above reset us to pre-JSOP_ARGUMENTS state */
2531             jsop_arguments();
2532             frame.pushSynced();
2533         }
2534         emitUncachedCall(callImmArgc, callingNew);
2535         return;
2536 #ifdef JS_MONOIC
2537     }
2538
2539     /* Initialized by both branches below. */
2540     CallGenInfo     callIC(PC);
2541     CallPatchInfo   callPatch;
2542     MaybeRegisterID icCalleeType; /* type to test for function-ness */
2543     RegisterID      icCalleeData; /* data to call */
2544     Address         icRvalAddr;   /* return slot on slow-path rejoin */
2545
2546     /*
2547      * IC space must be reserved (using RESERVE_IC_SPACE or RESERVE_OOL_SPACE) between the
2548      * following labels (as used in finishThisUp):
2549      *  - funGuard -> hotJump
2550      *  - funGuard -> joinPoint
2551      *  - funGuard -> hotPathLabel
2552      *  - slowPathStart -> oolCall
2553      *  - slowPathStart -> oolJump
2554      *  - slowPathStart -> icCall
2555      *  - slowPathStart -> slowJoinPoint
2556      * Because the call ICs are fairly long (compared to PICs), we don't reserve the space in each
2557      * path until the first usage of funGuard (for the in-line path) or slowPathStart (for the
2558      * out-of-line path).
2559      */
2560
2561     /* Initialized only on lowerFunCallOrApply branch. */
2562     Jump            uncachedCallSlowRejoin;
2563     CallPatchInfo   uncachedCallPatch;
2564
2565     {
2566         MaybeRegisterID origCalleeType, maybeOrigCalleeData;
2567         RegisterID origCalleeData;
2568
2569         /* Get the callee in registers. */
2570         frame.ensureFullRegs(origCallee, &origCalleeType, &maybeOrigCalleeData);
2571         origCalleeData = maybeOrigCalleeData.reg();
2572         PinRegAcrossSyncAndKill p1(frame, origCalleeData), p2(frame, origCalleeType);
2573
2574         if (lowerFunCallOrApply) {
2575             MaybeRegisterID origThisType, maybeOrigThisData;
2576             RegisterID origThisData;
2577             {
2578                 /* Get thisv in registers. */
2579                 frame.ensureFullRegs(origThis, &origThisType, &maybeOrigThisData);
2580                 origThisData = maybeOrigThisData.reg();
2581                 PinRegAcrossSyncAndKill p3(frame, origThisData), p4(frame, origThisType);
2582
2583                 /* Leaves pinned regs untouched. */
2584                 frame.syncAndKill(Registers(Registers::AvailRegs), Uses(speculatedArgc + 2));
2585             }
2586
2587             checkCallApplySpeculation(callImmArgc, speculatedArgc,
2588                                       origCallee, origThis,
2589                                       origCalleeType, origCalleeData,
2590                                       origThisType, origThisData,
2591                                       &uncachedCallSlowRejoin, &uncachedCallPatch);
2592
2593             icCalleeType = origThisType;
2594             icCalleeData = origThisData;
2595             icRvalAddr = frame.addressOf(origThis);
2596
2597             /*
2598              * For f.call(), since we compile the ic under the (checked)
2599              * assumption that call == js_fun_call, we still have a static
2600              * frame size. For f.apply(), the frame size depends on the dynamic
2601              * length of the array passed to apply.
2602              */
2603             if (*PC == JSOP_FUNCALL)
2604                 callIC.frameSize.initStatic(frame.localSlots(), speculatedArgc - 1);
2605             else
2606                 callIC.frameSize.initDynamic();
2607         } else {
2608             /* Leaves pinned regs untouched. */
2609             frame.syncAndKill(Registers(Registers::AvailRegs), Uses(speculatedArgc + 2));
2610
2611             icCalleeType = origCalleeType;
2612             icCalleeData = origCalleeData;
2613             icRvalAddr = frame.addressOf(origCallee);
2614             callIC.frameSize.initStatic(frame.localSlots(), speculatedArgc);
2615         }
2616     }
2617
2618     /* Test the type if necessary. Failing this always takes a really slow path. */
2619     MaybeJump notObjectJump;
2620     if (icCalleeType.isSet())
2621         notObjectJump = masm.testObject(Assembler::NotEqual, icCalleeType.reg());
2622
2623     /*
2624      * For an optimized apply, keep icCalleeData and funPtrReg in a
2625      * callee-saved registers for the subsequent ic::SplatApplyArgs call.
2626      */
2627     Registers tempRegs;
2628     if (callIC.frameSize.isDynamic() && !Registers::isSaved(icCalleeData)) {
2629         RegisterID x = tempRegs.takeRegInMask(Registers::SavedRegs);
2630         masm.move(icCalleeData, x);
2631         icCalleeData = x;
2632     } else {
2633         tempRegs.takeReg(icCalleeData);
2634     }
2635     RegisterID funPtrReg = tempRegs.takeRegInMask(Registers::SavedRegs);
2636
2637     /* Reserve space just before initialization of funGuard. */
2638     RESERVE_IC_SPACE(masm);
2639
2640     /*
2641      * Guard on the callee identity. This misses on the first run. If the
2642      * callee is scripted, compiled/compilable, and argc == nargs, then this
2643      * guard is patched, and the compiled code address is baked in.
2644      */
2645     Jump j = masm.branchPtrWithPatch(Assembler::NotEqual, icCalleeData, callIC.funGuard);
2646     callIC.funJump = j;
2647
2648     /* Reserve space just before initialization of slowPathStart. */
2649     RESERVE_OOL_SPACE(stubcc.masm);
2650
2651     Jump rejoin1, rejoin2;
2652     {
2653         RESERVE_OOL_SPACE(stubcc.masm);
2654         stubcc.linkExitDirect(j, stubcc.masm.label());
2655         callIC.slowPathStart = stubcc.masm.label();
2656
2657         /*
2658          * Test if the callee is even a function. If this doesn't match, we
2659          * take a _really_ slow path later.
2660          */
2661         Jump notFunction = stubcc.masm.testFunction(Assembler::NotEqual, icCalleeData);
2662
2663         /* Test if the function is scripted. */
2664         RegisterID tmp = tempRegs.takeAnyReg();
2665         stubcc.masm.loadObjPrivate(icCalleeData, funPtrReg);
2666         stubcc.masm.load16(Address(funPtrReg, offsetof(JSFunction, flags)), tmp);
2667         stubcc.masm.and32(Imm32(JSFUN_KINDMASK), tmp);
2668         Jump isNative = stubcc.masm.branch32(Assembler::Below, tmp, Imm32(JSFUN_INTERPRETED));
2669         tempRegs.putReg(tmp);
2670
2671         /*
2672          * N.B. After this call, the frame will have a dynamic frame size.
2673          * Check after the function is known not to be a native so that the
2674          * catch-all/native path has a static depth.
2675          */
2676         if (callIC.frameSize.isDynamic())
2677             OOL_STUBCALL(ic::SplatApplyArgs);
2678
2679         /*
2680          * No-op jump that gets patched by ic::New/Call to the stub generated
2681          * by generateFullCallStub.
2682          */
2683         Jump toPatch = stubcc.masm.jump();
2684         toPatch.linkTo(stubcc.masm.label(), &stubcc.masm);
2685         callIC.oolJump = toPatch;
2686         callIC.icCall = stubcc.masm.label();
2687
2688         /*
2689          * At this point the function is definitely scripted, so we try to
2690          * compile it and patch either funGuard/funJump or oolJump. This code
2691          * is only executed once.
2692          */
2693         callIC.addrLabel1 = stubcc.masm.moveWithPatch(ImmPtr(NULL), Registers::ArgReg1);
2694         void *icFunPtr = JS_FUNC_TO_DATA_PTR(void *, callingNew ? ic::New : ic::Call);
2695         if (callIC.frameSize.isStatic())
2696             callIC.oolCall = OOL_STUBCALL_LOCAL_SLOTS(icFunPtr, frame.localSlots());
2697         else
2698             callIC.oolCall = OOL_STUBCALL_LOCAL_SLOTS(icFunPtr, -1);
2699
2700         callIC.funObjReg = icCalleeData;
2701         callIC.funPtrReg = funPtrReg;
2702
2703         /*
2704          * The IC call either returns NULL, meaning call completed, or a
2705          * function pointer to jump to. Caveat: Must restore JSFrameReg
2706          * because a new frame has been pushed.
2707          */
2708         rejoin1 = stubcc.masm.branchTestPtr(Assembler::Zero, Registers::ReturnReg,
2709                                             Registers::ReturnReg);
2710         if (callIC.frameSize.isStatic())
2711             stubcc.masm.move(Imm32(callIC.frameSize.staticArgc()), JSParamReg_Argc);
2712         else
2713             stubcc.masm.load32(FrameAddress(offsetof(VMFrame, u.call.dynamicArgc)), JSParamReg_Argc);
2714         stubcc.masm.loadPtr(FrameAddress(offsetof(VMFrame, regs.fp)), JSFrameReg);
2715         callPatch.hasSlowNcode = true;
2716         callPatch.slowNcodePatch =
2717             stubcc.masm.storePtrWithPatch(ImmPtr(NULL),
2718                                           Address(JSFrameReg, JSStackFrame::offsetOfncode()));
2719         stubcc.masm.jump(Registers::ReturnReg);
2720
2721         /*
2722          * This ool path is the catch-all for everything but scripted function
2723          * callees. For native functions, ic::NativeNew/NativeCall will repatch
2724          * funGaurd/funJump with a fast call stub. All other cases
2725          * (non-function callable objects and invalid callees) take the slow
2726          * path through js::Invoke.
2727          */
2728         if (notObjectJump.isSet())
2729             stubcc.linkExitDirect(notObjectJump.get(), stubcc.masm.label());
2730         notFunction.linkTo(stubcc.masm.label(), &stubcc.masm);
2731         isNative.linkTo(stubcc.masm.label(), &stubcc.masm);
2732
2733         callIC.addrLabel2 = stubcc.masm.moveWithPatch(ImmPtr(NULL), Registers::ArgReg1);
2734         OOL_STUBCALL(callingNew ? ic::NativeNew : ic::NativeCall);
2735
2736         rejoin2 = stubcc.masm.jump();
2737     }
2738
2739     /*
2740      * If the call site goes to a closure over the same function, it will
2741      * generate an out-of-line stub that joins back here.
2742      */
2743     callIC.hotPathLabel = masm.label();
2744
2745     uint32 flags = 0;
2746     if (callingNew)
2747         flags |= JSFRAME_CONSTRUCTING;
2748
2749     InlineFrameAssembler inlFrame(masm, callIC, flags);
2750     callPatch.hasFastNcode = true;
2751     callPatch.fastNcodePatch = inlFrame.assemble(NULL);
2752
2753     callIC.hotJump = masm.jump();
2754     callIC.joinPoint = callPatch.joinPoint = masm.label();
2755     addReturnSite(callPatch.joinPoint, __LINE__);
2756     if (lowerFunCallOrApply)
2757         uncachedCallPatch.joinPoint = callIC.joinPoint;
2758     masm.loadPtr(Address(JSFrameReg, JSStackFrame::offsetOfPrev()), JSFrameReg);
2759
2760     /*
2761      * We've placed hotJump, joinPoint and hotPathLabel, and no other labels are located by offset
2762      * in the in-line path so we can check the IC space now.
2763      */
2764     CHECK_IC_SPACE();
2765
2766     frame.popn(speculatedArgc + 2);
2767     frame.takeReg(JSReturnReg_Type);
2768     frame.takeReg(JSReturnReg_Data);
2769     frame.pushRegs(JSReturnReg_Type, JSReturnReg_Data);
2770
2771     /*
2772      * Now that the frame state is set, generate the rejoin path. Note that, if
2773      * lowerFunCallOrApply, we cannot just call 'stubcc.rejoin' since the return
2774      * value has been placed at vp[1] which is not the stack address associated
2775      * with frame.peek(-1).
2776      */
2777     callIC.slowJoinPoint = stubcc.masm.label();
2778     rejoin1.linkTo(callIC.slowJoinPoint, &stubcc.masm);
2779     rejoin2.linkTo(callIC.slowJoinPoint, &stubcc.masm);
2780     JaegerSpew(JSpew_Insns, " ---- BEGIN SLOW RESTORE CODE ---- \n");
2781     stubcc.masm.loadValueAsComponents(icRvalAddr, JSReturnReg_Type, JSReturnReg_Data);
2782     stubcc.crossJump(stubcc.masm.jump(), masm.label());
2783     JaegerSpew(JSpew_Insns, " ---- END SLOW RESTORE CODE ---- \n");
2784
2785     CHECK_OOL_SPACE();
2786
2787     if (lowerFunCallOrApply)
2788         stubcc.crossJump(uncachedCallSlowRejoin, masm.label());
2789
2790     callICs.append(callIC);
2791     callPatches.append(callPatch);
2792     if (lowerFunCallOrApply)
2793         callPatches.append(uncachedCallPatch);
2794
2795     applyTricks = NoApplyTricks;
2796 #endif
2797 }
2798
2799 /*
2800  * This function must be called immediately after any instruction which could
2801  * cause a new JSStackFrame to be pushed and could lead to a new debug trap
2802  * being set. This includes any API callbacks and any scripted or native call.
2803  */
2804 void
2805 mjit::Compiler::addCallSite(const InternalCallSite &site)
2806 {
2807     callSites.append(site);
2808 }
2809
2810 void
2811 mjit::Compiler::restoreFrameRegs(Assembler &masm)
2812 {
2813     masm.loadPtr(FrameAddress(offsetof(VMFrame, regs.fp)), JSFrameReg);
2814 }
2815
2816 bool
2817 mjit::Compiler::compareTwoValues(JSContext *cx, JSOp op, const Value &lhs, const Value &rhs)
2818 {
2819     JS_ASSERT(lhs.isPrimitive());
2820     JS_ASSERT(rhs.isPrimitive());
2821
2822     if (lhs.isString() && rhs.isString()) {
2823         int32 cmp;
2824         CompareStrings(cx, lhs.toString(), rhs.toString(), &cmp);
2825         switch (op) {
2826           case JSOP_LT:
2827             return cmp < 0;
2828           case JSOP_LE:
2829             return cmp <= 0;
2830           case JSOP_GT:
2831             return cmp > 0;
2832           case JSOP_GE:
2833             return cmp >= 0;
2834           case JSOP_EQ:
2835             return cmp == 0;
2836           case JSOP_NE:
2837             return cmp != 0;
2838           default:
2839             JS_NOT_REACHED("NYI");
2840         }
2841     } else {
2842         double ld, rd;
2843         
2844         /* These should be infallible w/ primitives. */
2845         ValueToNumber(cx, lhs, &ld);
2846         ValueToNumber(cx, rhs, &rd);
2847         switch(op) {
2848           case JSOP_LT:
2849             return ld < rd;
2850           case JSOP_LE:
2851             return ld <= rd;
2852           case JSOP_GT:
2853             return ld > rd;
2854           case JSOP_GE:
2855             return ld >= rd;
2856           case JSOP_EQ: /* fall through */
2857           case JSOP_NE:
2858             /* Special case null/undefined/void comparisons. */
2859             if (lhs.isNullOrUndefined()) {
2860                 if (rhs.isNullOrUndefined())
2861                     return op == JSOP_EQ;
2862                 return op == JSOP_NE;
2863             }
2864             if (rhs.isNullOrUndefined())
2865                 return op == JSOP_NE;
2866
2867             /* Normal return. */
2868             return (op == JSOP_EQ) ? (ld == rd) : (ld != rd);
2869           default:
2870             JS_NOT_REACHED("NYI");
2871         }
2872     }
2873
2874     JS_NOT_REACHED("NYI");
2875     return false;
2876 }
2877
2878 bool
2879 mjit::Compiler::emitStubCmpOp(BoolStub stub, jsbytecode *target, JSOp fused)
2880 {
2881     prepareStubCall(Uses(2));
2882     INLINE_STUBCALL(stub);
2883     frame.pop();
2884     frame.pop();
2885
2886     if (!target) {
2887         frame.takeReg(Registers::ReturnReg);
2888         frame.pushTypedPayload(JSVAL_TYPE_BOOLEAN, Registers::ReturnReg);
2889         return true;
2890     }
2891
2892     JS_ASSERT(fused == JSOP_IFEQ || fused == JSOP_IFNE);
2893     frame.syncAndForgetEverything();
2894     Assembler::Condition cond = (fused == JSOP_IFEQ)
2895                                 ? Assembler::Zero
2896                                 : Assembler::NonZero;
2897     Jump j = masm.branchTest32(cond, Registers::ReturnReg,
2898                                Registers::ReturnReg);
2899     return jumpAndTrace(j, target);
2900 }
2901
2902 void
2903 mjit::Compiler::jsop_setprop_slow(JSAtom *atom, bool usePropCache)
2904 {
2905     prepareStubCall(Uses(2));
2906     masm.move(ImmPtr(atom), Registers::ArgReg1);
2907     if (usePropCache)
2908         INLINE_STUBCALL(STRICT_VARIANT(stubs::SetName));
2909     else
2910         INLINE_STUBCALL(STRICT_VARIANT(stubs::SetPropNoCache));
2911     JS_STATIC_ASSERT(JSOP_SETNAME_LENGTH == JSOP_SETPROP_LENGTH);
2912     frame.shimmy(1);
2913 }
2914
2915 void
2916 mjit::Compiler::jsop_getprop_slow(JSAtom *atom, bool usePropCache)
2917 {
2918     prepareStubCall(Uses(1));
2919     if (usePropCache) {
2920         INLINE_STUBCALL(stubs::GetProp);
2921     } else {
2922         masm.move(ImmPtr(atom), Registers::ArgReg1);
2923         INLINE_STUBCALL(stubs::GetPropNoCache);
2924     }
2925     frame.pop();
2926     frame.pushSynced();
2927 }
2928
2929 bool
2930 mjit::Compiler::jsop_callprop_slow(JSAtom *atom)
2931 {
2932     prepareStubCall(Uses(1));
2933     masm.move(ImmPtr(atom), Registers::ArgReg1);
2934     INLINE_STUBCALL(stubs::CallProp);
2935     frame.pop();
2936     frame.pushSynced();
2937     frame.pushSynced();
2938     return true;
2939 }
2940
2941 bool
2942 mjit::Compiler::jsop_length()
2943 {
2944     FrameEntry *top = frame.peek(-1);
2945
2946     if (top->isTypeKnown() && top->getKnownType() == JSVAL_TYPE_STRING) {
2947         if (top->isConstant()) {
2948             JSString *str = top->getValue().toString();
2949             Value v;
2950             v.setNumber(uint32(str->length()));
2951             frame.pop();
2952             frame.push(v);
2953         } else {
2954             RegisterID str = frame.ownRegForData(top);
2955             masm.loadPtr(Address(str, JSString::offsetOfLengthAndFlags()), str);
2956             masm.urshift32(Imm32(JSString::LENGTH_SHIFT), str);
2957             frame.pop();
2958             frame.pushTypedPayload(JSVAL_TYPE_INT32, str);
2959         }
2960         return true;
2961     }
2962
2963 #if defined JS_POLYIC
2964     return jsop_getprop(cx->runtime->atomState.lengthAtom);
2965 #else
2966     prepareStubCall(Uses(1));
2967     INLINE_STUBCALL(stubs::Length);
2968     frame.pop();
2969     frame.pushSynced();
2970     return true;
2971 #endif
2972 }
2973
2974 #ifdef JS_MONOIC
2975 void
2976 mjit::Compiler::passMICAddress(GlobalNameICInfo &ic)
2977 {
2978     ic.addrLabel = stubcc.masm.moveWithPatch(ImmPtr(NULL), Registers::ArgReg1);
2979 }
2980 #endif
2981
2982 #if defined JS_POLYIC
2983 void
2984 mjit::Compiler::passICAddress(BaseICInfo *ic)
2985 {
2986     ic->paramAddr = stubcc.masm.moveWithPatch(ImmPtr(NULL), Registers::ArgReg1);
2987 }
2988
2989 bool
2990 mjit::Compiler::jsop_getprop(JSAtom *atom, bool doTypeCheck, bool usePropCache)
2991 {
2992     FrameEntry *top = frame.peek(-1);
2993
2994     /* If the incoming type will never PIC, take slow path. */
2995     if (top->isTypeKnown() && top->getKnownType() != JSVAL_TYPE_OBJECT) {
2996         JS_ASSERT_IF(atom == cx->runtime->atomState.lengthAtom,
2997                      top->getKnownType() != JSVAL_TYPE_STRING);
2998         jsop_getprop_slow(atom, usePropCache);
2999         return true;
3000     }
3001
3002     /*
3003      * These two must be loaded first. The objReg because the string path
3004      * wants to read it, and the shapeReg because it could cause a spill that
3005      * the string path wouldn't sink back.
3006      */
3007     RegisterID objReg = Registers::ReturnReg;
3008     RegisterID shapeReg = Registers::ReturnReg;
3009     if (atom == cx->runtime->atomState.lengthAtom) {
3010         objReg = frame.copyDataIntoReg(top);
3011         shapeReg = frame.allocReg();
3012     }
3013
3014     RESERVE_IC_SPACE(masm);
3015
3016     PICGenInfo pic(ic::PICInfo::GET, JSOp(*PC), usePropCache);
3017
3018     /* Guard that the type is an object. */
3019     Label typeCheck;
3020     if (doTypeCheck && !top->isTypeKnown()) {
3021         RegisterID reg = frame.tempRegForType(top);
3022         pic.typeReg = reg;
3023
3024         /* Start the hot path where it's easy to patch it. */
3025         pic.fastPathStart = masm.label();
3026         Jump j = masm.testObject(Assembler::NotEqual, reg);
3027         typeCheck = masm.label();
3028         RETURN_IF_OOM(false);
3029
3030         pic.typeCheck = stubcc.linkExit(j, Uses(1));
3031         pic.hasTypeCheck = true;
3032     } else {
3033         pic.fastPathStart = masm.label();
3034         pic.hasTypeCheck = false;
3035         pic.typeReg = Registers::ReturnReg;
3036     }
3037
3038     if (atom != cx->runtime->atomState.lengthAtom) {
3039         objReg = frame.copyDataIntoReg(top);
3040         shapeReg = frame.allocReg();
3041     }
3042
3043     pic.shapeReg = shapeReg;
3044     pic.atom = atom;
3045
3046     /* Guard on shape. */
3047     masm.loadShape(objReg, shapeReg);
3048     pic.shapeGuard = masm.label();
3049
3050     DataLabel32 inlineShapeLabel;
3051     Jump j = masm.branch32WithPatch(Assembler::NotEqual, shapeReg,
3052                                     Imm32(int32(JSObjectMap::INVALID_SHAPE)),
3053                                     inlineShapeLabel);
3054     Label inlineShapeJump = masm.label();
3055
3056     RESERVE_OOL_SPACE(stubcc.masm);
3057     pic.slowPathStart = stubcc.linkExit(j, Uses(1));
3058
3059     stubcc.leave();
3060     passICAddress(&pic);
3061     pic.slowPathCall = OOL_STUBCALL(ic::GetProp);
3062     CHECK_OOL_SPACE();
3063
3064     /* Load the base slot address. */
3065     Label dslotsLoadLabel = masm.loadPtrWithPatchToLEA(Address(objReg, offsetof(JSObject, slots)),
3066                                                                objReg);
3067
3068     /* Copy the slot value to the expression stack. */
3069     Address slot(objReg, 1 << 24);
3070     frame.pop();
3071
3072     Label fastValueLoad = masm.loadValueWithAddressOffsetPatch(slot, shapeReg, objReg);
3073     pic.fastPathRejoin = masm.label();
3074
3075     RETURN_IF_OOM(false);
3076
3077     /* Initialize op labels. */
3078     GetPropLabels &labels = pic.getPropLabels();
3079     labels.setDslotsLoad(masm, pic.fastPathRejoin, dslotsLoadLabel);
3080     labels.setInlineShapeData(masm, pic.shapeGuard, inlineShapeLabel);
3081
3082     labels.setValueLoad(masm, pic.fastPathRejoin, fastValueLoad);
3083     if (pic.hasTypeCheck)
3084         labels.setInlineTypeJump(masm, pic.fastPathStart, typeCheck);
3085 #ifdef JS_CPU_X64
3086     labels.setInlineShapeJump(masm, inlineShapeLabel, inlineShapeJump);
3087 #else
3088     labels.setInlineShapeJump(masm, pic.shapeGuard, inlineShapeJump);
3089 #endif
3090
3091     pic.objReg = objReg;
3092     frame.pushRegs(shapeReg, objReg);
3093
3094     stubcc.rejoin(Changes(1));
3095
3096     pics.append(pic);
3097     return true;
3098 }
3099
3100 bool
3101 mjit::Compiler::jsop_callprop_generic(JSAtom *atom)
3102 {
3103     FrameEntry *top = frame.peek(-1);
3104
3105     /*
3106      * These two must be loaded first. The objReg because the string path
3107      * wants to read it, and the shapeReg because it could cause a spill that
3108      * the string path wouldn't sink back.
3109      */
3110     RegisterID objReg = frame.copyDataIntoReg(top);
3111     RegisterID shapeReg = frame.allocReg();
3112
3113     PICGenInfo pic(ic::PICInfo::CALL, JSOp(*PC), true);
3114
3115     pic.pc = PC;
3116
3117     /* Guard that the type is an object. */
3118     pic.typeReg = frame.copyTypeIntoReg(top);
3119
3120     RESERVE_IC_SPACE(masm);
3121
3122     /* Start the hot path where it's easy to patch it. */
3123     pic.fastPathStart = masm.label();
3124
3125     /*
3126      * Guard that the value is an object. This part needs some extra gunk
3127      * because the leave() after the shape guard will emit a jump from this
3128      * path to the final call. We need a label in between that jump, which
3129      * will be the target of patched jumps in the PIC.
3130      */
3131     Jump typeCheckJump = masm.testObject(Assembler::NotEqual, pic.typeReg);
3132     Label typeCheck = masm.label();
3133     RETURN_IF_OOM(false);
3134
3135     pic.typeCheck = stubcc.linkExit(typeCheckJump, Uses(1));
3136     pic.hasTypeCheck = true;
3137     pic.objReg = objReg;
3138     pic.shapeReg = shapeReg;
3139     pic.atom = atom;
3140
3141     /*
3142      * Store the type and object back. Don't bother keeping them in registers,
3143      * since a sync will be needed for the upcoming call.
3144      */
3145     uint32 thisvSlot = frame.localSlots();
3146     Address thisv = Address(JSFrameReg, sizeof(JSStackFrame) + thisvSlot * sizeof(Value));
3147
3148 #if defined JS_NUNBOX32
3149     masm.storeValueFromComponents(pic.typeReg, pic.objReg, thisv);
3150 #elif defined JS_PUNBOX64
3151     masm.orPtr(pic.objReg, pic.typeReg);
3152     masm.storePtr(pic.typeReg, thisv);
3153 #endif
3154
3155     frame.freeReg(pic.typeReg);
3156
3157     /* Guard on shape. */
3158     masm.loadShape(objReg, shapeReg);
3159     pic.shapeGuard = masm.label();
3160
3161     DataLabel32 inlineShapeLabel;
3162     Jump j = masm.branch32WithPatch(Assembler::NotEqual, shapeReg,
3163                            Imm32(int32(JSObjectMap::INVALID_SHAPE)),
3164                            inlineShapeLabel);
3165     Label inlineShapeJump = masm.label();
3166
3167     /* Slow path. */
3168     RESERVE_OOL_SPACE(stubcc.masm);
3169     pic.slowPathStart = stubcc.linkExit(j, Uses(1));
3170     stubcc.leave();
3171     passICAddress(&pic);
3172     pic.slowPathCall = OOL_STUBCALL(ic::CallProp);
3173     CHECK_OOL_SPACE();
3174
3175     /* Adjust the frame. None of this will generate code. */
3176     frame.pop();
3177     frame.pushRegs(shapeReg, objReg);
3178     frame.pushSynced();
3179
3180     /* Load the base slot address. */
3181     Label dslotsLoadLabel = masm.loadPtrWithPatchToLEA(Address(objReg, offsetof(JSObject, slots)),
3182                                                                objReg);
3183
3184     /* Copy the slot value to the expression stack. */
3185     Address slot(objReg, 1 << 24);
3186
3187     Label fastValueLoad = masm.loadValueWithAddressOffsetPatch(slot, shapeReg, objReg);
3188     pic.fastPathRejoin = masm.label();
3189
3190     RETURN_IF_OOM(false);
3191
3192     /* 
3193      * Initialize op labels. We use GetPropLabels here because we have the same patching
3194      * requirements for CallProp.
3195      */
3196     GetPropLabels &labels = pic.getPropLabels();
3197     labels.setDslotsLoadOffset(masm.differenceBetween(pic.fastPathRejoin, dslotsLoadLabel));
3198     labels.setInlineShapeOffset(masm.differenceBetween(pic.shapeGuard, inlineShapeLabel));
3199     labels.setValueLoad(masm, pic.fastPathRejoin, fastValueLoad);
3200     labels.setInlineTypeJump(masm, pic.fastPathStart, typeCheck);
3201 #ifdef JS_CPU_X64
3202     labels.setInlineShapeJump(masm, inlineShapeLabel, inlineShapeJump);
3203 #else
3204     labels.setInlineShapeJump(masm, pic.shapeGuard, inlineShapeJump);
3205 #endif
3206
3207     stubcc.rejoin(Changes(2));
3208     pics.append(pic);
3209
3210     return true;
3211 }
3212
3213 bool
3214 mjit::Compiler::jsop_callprop_str(JSAtom *atom)
3215 {
3216     if (!script->compileAndGo) {
3217         jsop_callprop_slow(atom);
3218         return true; 
3219     }
3220
3221     /*
3222      * Bake in String.prototype. This is safe because of compileAndGo.
3223      * We must pass an explicit scope chain only because JSD calls into
3224      * here via the recompiler with a dummy context, and we need to use
3225      * the global object for the script we are now compiling.
3226      */
3227     JSObject *obj;
3228     if (!js_GetClassPrototype(cx, &fp->scopeChain(), JSProto_String, &obj))
3229         return false;
3230
3231     /* Force into a register because getprop won't expect a constant. */
3232     RegisterID reg = frame.allocReg();
3233
3234     masm.move(ImmPtr(obj), reg);
3235     frame.pushTypedPayload(JSVAL_TYPE_OBJECT, reg);
3236
3237     /* Get the property. */
3238     if (!jsop_getprop(atom))
3239         return false;
3240
3241     /* Perform a swap. */
3242     frame.dup2();
3243     frame.shift(-3);
3244     frame.shift(-1);
3245
3246     /* 4) Test if the function can take a primitive. */
3247 #ifdef DEBUG
3248     FrameEntry *funFe = frame.peek(-2);
3249 #endif
3250     JS_ASSERT(!funFe->isTypeKnown());
3251
3252     /*
3253      * See bug 584579 - need to forget string type, since wrapping could
3254      * create an object. forgetType() alone is not valid because it cannot be
3255      * used on copies or constants.
3256      */
3257     RegisterID strReg;
3258     FrameEntry *strFe = frame.peek(-1);
3259     if (strFe->isConstant()) {
3260         strReg = frame.allocReg();
3261         masm.move(ImmPtr(strFe->getValue().toString()), strReg);
3262     } else {
3263         strReg = frame.ownRegForData(strFe);
3264     }
3265     frame.pop();
3266     frame.pushTypedPayload(JSVAL_TYPE_STRING, strReg);
3267     frame.forgetType(frame.peek(-1));
3268
3269     return true;
3270 }
3271
3272 bool
3273 mjit::Compiler::jsop_callprop_obj(JSAtom *atom)
3274 {
3275     FrameEntry *top = frame.peek(-1);
3276
3277     PICGenInfo pic(ic::PICInfo::CALL, JSOp(*PC), true);
3278
3279     JS_ASSERT(top->isTypeKnown());
3280     JS_ASSERT(top->getKnownType() == JSVAL_TYPE_OBJECT);
3281     
3282     RESERVE_IC_SPACE(masm);
3283
3284     pic.pc = PC;
3285     pic.fastPathStart = masm.label();
3286     pic.hasTypeCheck = false;
3287     pic.typeReg = Registers::ReturnReg;
3288
3289     RegisterID objReg = frame.copyDataIntoReg(top);
3290     RegisterID shapeReg = frame.allocReg();
3291
3292     pic.shapeReg = shapeReg;
3293     pic.atom = atom;
3294
3295     /* Guard on shape. */
3296     masm.loadShape(objReg, shapeReg);
3297     pic.shapeGuard = masm.label();
3298
3299     DataLabel32 inlineShapeLabel;
3300     Jump j = masm.branch32WithPatch(Assembler::NotEqual, shapeReg,
3301                            Imm32(int32(JSObjectMap::INVALID_SHAPE)),
3302                            inlineShapeLabel);
3303     Label inlineShapeJump = masm.label();
3304
3305     /* Slow path. */
3306     RESERVE_OOL_SPACE(stubcc.masm);
3307     pic.slowPathStart = stubcc.linkExit(j, Uses(1));
3308     stubcc.leave();
3309     passICAddress(&pic);
3310     pic.slowPathCall = OOL_STUBCALL(ic::CallProp);
3311     CHECK_OOL_SPACE();
3312
3313     /* Load the base slot address. */
3314     Label dslotsLoadLabel = masm.loadPtrWithPatchToLEA(Address(objReg, offsetof(JSObject, slots)),
3315                                                                objReg);
3316
3317     /* Copy the slot value to the expression stack. */
3318     Address slot(objReg, 1 << 24);
3319
3320     Label fastValueLoad = masm.loadValueWithAddressOffsetPatch(slot, shapeReg, objReg);
3321
3322     pic.fastPathRejoin = masm.label();
3323     pic.objReg = objReg;
3324
3325     /*
3326      * 1) Dup the |this| object.
3327      * 2) Push the property value onto the stack.
3328      * 3) Move the value below the dup'd |this|, uncopying it. This could
3329      * generate code, thus the fastPathRejoin label being prior. This is safe
3330      * as a stack transition, because JSOP_CALLPROP has JOF_TMPSLOT. It is
3331      * also safe for correctness, because if we know the LHS is an object, it
3332      * is the resulting vp[1].
3333      */
3334     frame.dup();
3335     frame.pushRegs(shapeReg, objReg);
3336     frame.shift(-2);
3337
3338     /* 
3339      * Assert correctness of hardcoded offsets.
3340      * No type guard: type is asserted.
3341      */
3342     RETURN_IF_OOM(false);
3343
3344     GetPropLabels &labels = pic.getPropLabels();
3345     labels.setDslotsLoadOffset(masm.differenceBetween(pic.fastPathRejoin, dslotsLoadLabel));
3346     labels.setInlineShapeOffset(masm.differenceBetween(pic.shapeGuard, inlineShapeLabel));
3347     labels.setValueLoad(masm, pic.fastPathRejoin, fastValueLoad);
3348 #ifdef JS_CPU_X64
3349     labels.setInlineShapeJump(masm, inlineShapeLabel, inlineShapeJump);
3350 #else
3351     labels.setInlineShapeJump(masm, pic.shapeGuard, inlineShapeJump);
3352 #endif
3353
3354     stubcc.rejoin(Changes(2));
3355     pics.append(pic);
3356
3357     return true;
3358 }
3359
3360 bool
3361 mjit::Compiler::jsop_callprop(JSAtom *atom)
3362 {
3363     FrameEntry *top = frame.peek(-1);
3364
3365     /* If the incoming type will never PIC, take slow path. */
3366     if (top->isTypeKnown() && top->getKnownType() != JSVAL_TYPE_OBJECT) {
3367         if (top->getKnownType() == JSVAL_TYPE_STRING)
3368             return jsop_callprop_str(atom);
3369         return jsop_callprop_slow(atom);
3370     }
3371
3372     if (top->isTypeKnown())
3373         return jsop_callprop_obj(atom);
3374     return jsop_callprop_generic(atom);
3375 }
3376
3377 bool
3378 mjit::Compiler::jsop_setprop(JSAtom *atom, bool usePropCache)
3379 {
3380     FrameEntry *lhs = frame.peek(-2);
3381     FrameEntry *rhs = frame.peek(-1);
3382
3383     /* If the incoming type will never PIC, take slow path. */
3384     if (lhs->isTypeKnown() && lhs->getKnownType() != JSVAL_TYPE_OBJECT) {
3385         jsop_setprop_slow(atom, usePropCache);
3386         return true;
3387     }
3388
3389     JSOp op = JSOp(*PC);
3390
3391     ic::PICInfo::Kind kind = (op == JSOP_SETMETHOD)
3392                              ? ic::PICInfo::SETMETHOD
3393                              : ic::PICInfo::SET;
3394     PICGenInfo pic(kind, op, usePropCache);
3395     pic.atom = atom;
3396
3397     RESERVE_IC_SPACE(masm);
3398     RESERVE_OOL_SPACE(stubcc.masm);
3399
3400     /* Guard that the type is an object. */
3401     Jump typeCheck;
3402     if (!lhs->isTypeKnown()) {
3403         RegisterID reg = frame.tempRegForType(lhs);
3404         pic.typeReg = reg;
3405
3406         /* Start the hot path where it's easy to patch it. */
3407         pic.fastPathStart = masm.label();
3408         Jump j = masm.testObject(Assembler::NotEqual, reg);
3409
3410         pic.typeCheck = stubcc.linkExit(j, Uses(2));
3411         stubcc.leave();
3412
3413         stubcc.masm.move(ImmPtr(atom), Registers::ArgReg1);
3414         if (usePropCache)
3415             OOL_STUBCALL(STRICT_VARIANT(stubs::SetName));
3416         else
3417             OOL_STUBCALL(STRICT_VARIANT(stubs::SetPropNoCache));
3418         typeCheck = stubcc.masm.jump();
3419         pic.hasTypeCheck = true;
3420     } else {
3421         pic.fastPathStart = masm.label();
3422         pic.hasTypeCheck = false;
3423         pic.typeReg = Registers::ReturnReg;
3424     }
3425
3426     /* Get the object into a mutable register. */
3427     RegisterID objReg = frame.copyDataIntoReg(lhs);
3428     pic.objReg = objReg;
3429
3430     /* Get info about the RHS and pin it. */
3431     ValueRemat vr;
3432     frame.pinEntry(rhs, vr);
3433     pic.vr = vr;
3434
3435     RegisterID shapeReg = frame.allocReg();
3436     pic.shapeReg = shapeReg;
3437
3438     frame.unpinEntry(vr);
3439
3440     /* Guard on shape. */
3441     masm.loadShape(objReg, shapeReg);
3442     pic.shapeGuard = masm.label();
3443     DataLabel32 inlineShapeData;
3444     Jump j = masm.branch32WithPatch(Assembler::NotEqual, shapeReg,
3445                                     Imm32(int32(JSObjectMap::INVALID_SHAPE)),
3446                                     inlineShapeData);
3447     Label afterInlineShapeJump = masm.label();
3448
3449     /* Slow path. */
3450     {
3451         pic.slowPathStart = stubcc.linkExit(j, Uses(2));
3452
3453         stubcc.leave();
3454         passICAddress(&pic);
3455         pic.slowPathCall = OOL_STUBCALL(ic::SetProp);
3456         CHECK_OOL_SPACE();
3457     }
3458
3459     /* Load dslots. */
3460     Label dslotsLoadLabel = masm.loadPtrWithPatchToLEA(Address(objReg, offsetof(JSObject, slots)),
3461                                                        objReg);
3462
3463     /* Store RHS into object slot. */
3464     Address slot(objReg, 1 << 24);
3465     DataLabel32 inlineValueStore = masm.storeValueWithAddressOffsetPatch(vr, slot);
3466     pic.fastPathRejoin = masm.label();
3467
3468     frame.freeReg(objReg);
3469     frame.freeReg(shapeReg);
3470
3471     /* "Pop under", taking out object (LHS) and leaving RHS. */
3472     frame.shimmy(1);
3473
3474     /* Finish slow path. */
3475     {
3476         if (pic.hasTypeCheck)
3477             typeCheck.linkTo(stubcc.masm.label(), &stubcc.masm);
3478         stubcc.rejoin(Changes(1));
3479     }
3480
3481     RETURN_IF_OOM(false);
3482
3483     SetPropLabels &labels = pic.setPropLabels();
3484     labels.setInlineShapeData(masm, pic.shapeGuard, inlineShapeData);
3485     labels.setDslotsLoad(masm, pic.fastPathRejoin, dslotsLoadLabel, vr);
3486     labels.setInlineValueStore(masm, pic.fastPathRejoin, inlineValueStore, vr);
3487     labels.setInlineShapeJump(masm, pic.shapeGuard, afterInlineShapeJump);
3488
3489     pics.append(pic);
3490     return true;
3491 }
3492
3493 void
3494 mjit::Compiler::jsop_name(JSAtom *atom)
3495 {
3496     PICGenInfo pic(ic::PICInfo::NAME, JSOp(*PC), true);
3497
3498     RESERVE_IC_SPACE(masm);
3499
3500     pic.shapeReg = frame.allocReg();
3501     pic.objReg = frame.allocReg();
3502     pic.typeReg = Registers::ReturnReg;
3503     pic.atom = atom;
3504     pic.hasTypeCheck = false;
3505     pic.fastPathStart = masm.label();
3506
3507     /* There is no inline implementation, so we always jump to the slow path or to a stub. */
3508     pic.shapeGuard = masm.label();
3509     Jump inlineJump = masm.jump();
3510     {
3511         RESERVE_OOL_SPACE(stubcc.masm);
3512         pic.slowPathStart = stubcc.linkExit(inlineJump, Uses(0));
3513         stubcc.leave();
3514         passICAddress(&pic);
3515         pic.slowPathCall = OOL_STUBCALL(ic::Name);
3516         CHECK_OOL_SPACE();
3517     }
3518     pic.fastPathRejoin = masm.label();
3519
3520     /* Initialize op labels. */
3521     ScopeNameLabels &labels = pic.scopeNameLabels();
3522     labels.setInlineJump(masm, pic.fastPathStart, inlineJump);
3523
3524     frame.pushRegs(pic.shapeReg, pic.objReg);
3525
3526     stubcc.rejoin(Changes(1));
3527
3528     pics.append(pic);
3529 }
3530
3531 bool
3532 mjit::Compiler::jsop_xname(JSAtom *atom)
3533 {
3534     PICGenInfo pic(ic::PICInfo::XNAME, JSOp(*PC), true);
3535
3536     FrameEntry *fe = frame.peek(-1);
3537     if (fe->isNotType(JSVAL_TYPE_OBJECT)) {
3538         return jsop_getprop(atom);
3539     }
3540
3541     if (!fe->isTypeKnown()) {
3542         Jump notObject = frame.testObject(Assembler::NotEqual, fe);
3543         stubcc.linkExit(notObject, Uses(1));
3544     }
3545
3546     RESERVE_IC_SPACE(masm);
3547
3548     pic.shapeReg = frame.allocReg();
3549     pic.objReg = frame.copyDataIntoReg(fe);
3550     pic.typeReg = Registers::ReturnReg;
3551     pic.atom = atom;
3552     pic.hasTypeCheck = false;
3553     pic.fastPathStart = masm.label();
3554
3555     /* There is no inline implementation, so we always jump to the slow path or to a stub. */
3556     pic.shapeGuard = masm.label();
3557     Jump inlineJump = masm.jump();
3558     {
3559         RESERVE_OOL_SPACE(stubcc.masm);
3560         pic.slowPathStart = stubcc.linkExit(inlineJump, Uses(1));
3561         stubcc.leave();
3562         passICAddress(&pic);
3563         pic.slowPathCall = OOL_STUBCALL(ic::XName);
3564         CHECK_OOL_SPACE();
3565     }
3566
3567     pic.fastPathRejoin = masm.label();
3568
3569     RETURN_IF_OOM(false);
3570
3571     /* Initialize op labels. */
3572     ScopeNameLabels &labels = pic.scopeNameLabels();
3573     labels.setInlineJumpOffset(masm.differenceBetween(pic.fastPathStart, inlineJump));
3574
3575     frame.pop();
3576     frame.pushRegs(pic.shapeReg, pic.objReg);
3577
3578     stubcc.rejoin(Changes(1));
3579
3580     pics.append(pic);
3581     return true;
3582 }
3583
3584 void
3585 mjit::Compiler::jsop_bindname(JSAtom *atom, bool usePropCache)
3586 {
3587     PICGenInfo pic(ic::PICInfo::BIND, JSOp(*PC), usePropCache);
3588
3589     // This code does not check the frame flags to see if scopeChain has been
3590     // set. Rather, it relies on the up-front analysis statically determining
3591     // whether BINDNAME can be used, which reifies the scope chain at the
3592     // prologue.
3593     JS_ASSERT(analysis->usesScopeChain());
3594
3595     pic.shapeReg = frame.allocReg();
3596     pic.objReg = frame.allocReg();
3597     pic.typeReg = Registers::ReturnReg;
3598     pic.atom = atom;
3599     pic.hasTypeCheck = false;
3600
3601     RESERVE_IC_SPACE(masm);
3602     pic.fastPathStart = masm.label();
3603
3604     Address parent(pic.objReg, offsetof(JSObject, parent));
3605     masm.loadPtr(Address(JSFrameReg, JSStackFrame::offsetOfScopeChain()), pic.objReg);
3606
3607     pic.shapeGuard = masm.label();
3608     Jump inlineJump = masm.branchPtr(Assembler::NotEqual, masm.payloadOf(parent), ImmPtr(0));
3609     {
3610         RESERVE_OOL_SPACE(stubcc.masm);
3611         pic.slowPathStart = stubcc.linkExit(inlineJump, Uses(0));
3612         stubcc.leave();
3613         passICAddress(&pic);
3614         pic.slowPathCall = OOL_STUBCALL(ic::BindName);
3615         CHECK_OOL_SPACE();
3616     }
3617
3618     pic.fastPathRejoin = masm.label();
3619
3620     /* Initialize op labels. */
3621     BindNameLabels &labels = pic.bindNameLabels();
3622     labels.setInlineJump(masm, pic.shapeGuard, inlineJump);
3623
3624     frame.pushTypedPayload(JSVAL_TYPE_OBJECT, pic.objReg);
3625     frame.freeReg(pic.shapeReg);
3626
3627     stubcc.rejoin(Changes(1));
3628
3629     pics.append(pic);
3630 }
3631
3632 #else /* !JS_POLYIC */
3633
3634 void
3635 mjit::Compiler::jsop_name(JSAtom *atom)
3636 {
3637     prepareStubCall(Uses(0));
3638     INLINE_STUBCALL(stubs::Name);
3639     frame.pushSynced();
3640 }
3641
3642 bool
3643 mjit::Compiler::jsop_xname(JSAtom *atom)
3644 {
3645     return jsop_getprop(atom);
3646 }
3647
3648 bool
3649 mjit::Compiler::jsop_getprop(JSAtom *atom, bool typecheck, bool usePropCache)
3650 {
3651     jsop_getprop_slow(atom, usePropCache);
3652     return true;
3653 }
3654
3655 bool
3656 mjit::Compiler::jsop_callprop(JSAtom *atom)
3657 {
3658     return jsop_callprop_slow(atom);
3659 }
3660
3661 bool
3662 mjit::Compiler::jsop_setprop(JSAtom *atom, bool usePropCache)
3663 {
3664     jsop_setprop_slow(atom, usePropCache);
3665     return true;
3666 }
3667
3668 void
3669 mjit::Compiler::jsop_bindname(JSAtom *atom, bool usePropCache)
3670 {
3671     RegisterID reg = frame.allocReg();
3672     Address scopeChain(JSFrameReg, JSStackFrame::offsetOfScopeChain());
3673     masm.loadPtr(scopeChain, reg);
3674
3675     Address address(reg, offsetof(JSObject, parent));
3676
3677     Jump j = masm.branchPtr(Assembler::NotEqual, masm.payloadOf(address), ImmPtr(0));
3678
3679     stubcc.linkExit(j, Uses(0));
3680     stubcc.leave();
3681     if (usePropCache) {
3682         OOL_STUBCALL(stubs::BindName);
3683     } else {
3684         stubcc.masm.move(ImmPtr(atom), Registers::ArgReg1);
3685         OOL_STUBCALL(stubs::BindNameNoCache);
3686     }
3687
3688     frame.pushTypedPayload(JSVAL_TYPE_OBJECT, reg);
3689
3690     stubcc.rejoin(Changes(1));
3691 }
3692 #endif
3693
3694 void
3695 mjit::Compiler::jsop_this()
3696 {
3697     frame.pushThis();
3698
3699     /* 
3700      * In strict mode code, we don't wrap 'this'.
3701      * In direct-call eval code, we wrapped 'this' before entering the eval.
3702      * In global code, 'this' is always an object.
3703      */
3704     if (fun && !script->strictModeCode) {
3705         FrameEntry *thisFe = frame.peek(-1);
3706         if (!thisFe->isTypeKnown()) {
3707             Jump notObj = frame.testObject(Assembler::NotEqual, thisFe);
3708             stubcc.linkExit(notObj, Uses(1));
3709             stubcc.leave();
3710             OOL_STUBCALL(stubs::This);
3711             stubcc.rejoin(Changes(1));
3712
3713             // Now we know that |this| is an object.
3714             frame.pop();
3715             frame.learnThisIsObject();
3716             frame.pushThis();
3717         }
3718
3719         JS_ASSERT(thisFe->isType(JSVAL_TYPE_OBJECT));
3720     }
3721 }
3722
3723 void
3724 mjit::Compiler::jsop_gnameinc(JSOp op, VoidStubAtom stub, uint32 index)
3725 {
3726     JSAtom *atom = script->getAtom(index);
3727
3728 #if defined JS_MONOIC
3729     jsbytecode *next = &PC[JSOP_GNAMEINC_LENGTH];
3730     bool pop = (JSOp(*next) == JSOP_POP) && !analysis->jumpTarget(next);
3731     int amt = (op == JSOP_GNAMEINC || op == JSOP_INCGNAME) ? -1 : 1;
3732
3733     if (pop || (op == JSOP_INCGNAME || op == JSOP_DECGNAME)) {
3734         /* These cases are easy, the original value is not observed. */
3735
3736         jsop_getgname(index);
3737         // V
3738
3739         frame.push(Int32Value(amt));
3740         // V 1
3741
3742         /* Use sub since it calls ValueToNumber instead of string concat. */
3743         jsop_binary(JSOP_SUB, stubs::Sub);
3744         // N+1
3745
3746         jsop_bindgname();
3747         // V+1 OBJ
3748
3749         frame.dup2();
3750         // V+1 OBJ V+1 OBJ
3751
3752         frame.shift(-3);
3753         // OBJ OBJ V+1
3754
3755         frame.shift(-1);
3756         // OBJ V+1
3757
3758         jsop_setgname(atom, false);
3759         // V+1
3760
3761         if (pop)
3762             frame.pop();
3763     } else {
3764         /* The pre-value is observed, making this more tricky. */
3765
3766         jsop_getgname(index);
3767         // V
3768
3769         jsop_pos();
3770         // N
3771
3772         frame.dup();
3773         // N N
3774
3775         frame.push(Int32Value(-amt));
3776         // N N 1
3777
3778         jsop_binary(JSOP_ADD, stubs::Add);
3779         // N N+1
3780
3781         jsop_bindgname();
3782         // N N+1 OBJ
3783
3784         frame.dup2();
3785         // N N+1 OBJ N+1 OBJ
3786
3787         frame.shift(-3);
3788         // N OBJ OBJ N+1
3789
3790         frame.shift(-1);
3791         // N OBJ N+1
3792
3793         jsop_setgname(atom, false);
3794         // N N+1
3795
3796         frame.pop();
3797         // N
3798     }
3799
3800     if (pop)
3801         PC += JSOP_POP_LENGTH;
3802 #else
3803     prepareStubCall(Uses(0));
3804     masm.move(ImmPtr(atom), Registers::ArgReg1);
3805     INLINE_STUBCALL(stub);
3806     frame.pushSynced();
3807 #endif
3808
3809     PC += JSOP_GNAMEINC_LENGTH;
3810 }
3811
3812 bool
3813 mjit::Compiler::jsop_nameinc(JSOp op, VoidStubAtom stub, uint32 index)
3814 {
3815     JSAtom *atom = script->getAtom(index);
3816 #if defined JS_POLYIC
3817     jsbytecode *next = &PC[JSOP_NAMEINC_LENGTH];
3818     bool pop = (JSOp(*next) == JSOP_POP) && !analysis->jumpTarget(next);
3819     int amt = (op == JSOP_NAMEINC || op == JSOP_INCNAME) ? -1 : 1;
3820
3821     if (pop || (op == JSOP_INCNAME || op == JSOP_DECNAME)) {
3822         /* These cases are easy, the original value is not observed. */
3823
3824         jsop_name(atom);
3825         // V
3826
3827         frame.push(Int32Value(amt));
3828         // V 1
3829
3830         /* Use sub since it calls ValueToNumber instead of string concat. */
3831         jsop_binary(JSOP_SUB, stubs::Sub);
3832         // N+1
3833
3834         jsop_bindname(atom, false);
3835         // V+1 OBJ
3836
3837         frame.dup2();
3838         // V+1 OBJ V+1 OBJ
3839
3840         frame.shift(-3);
3841         // OBJ OBJ V+1
3842
3843         frame.shift(-1);
3844         // OBJ V+1
3845
3846         if (!jsop_setprop(atom, false))
3847             return false;
3848         // V+1
3849
3850         if (pop)
3851             frame.pop();
3852     } else {
3853         /* The pre-value is observed, making this more tricky. */
3854
3855         jsop_name(atom);
3856         // V
3857
3858         jsop_pos();
3859         // N
3860
3861         frame.dup();
3862         // N N
3863
3864         frame.push(Int32Value(-amt));
3865         // N N 1
3866
3867         jsop_binary(JSOP_ADD, stubs::Add);
3868         // N N+1
3869
3870         jsop_bindname(atom, false);
3871         // N N+1 OBJ
3872
3873         frame.dup2();
3874         // N N+1 OBJ N+1 OBJ
3875
3876         frame.shift(-3);
3877         // N OBJ OBJ N+1
3878
3879         frame.shift(-1);
3880         // N OBJ N+1
3881
3882         if (!jsop_setprop(atom, false))
3883             return false;
3884         // N N+1
3885
3886         frame.pop();
3887         // N
3888     }
3889
3890     if (pop)
3891         PC += JSOP_POP_LENGTH;
3892 #else
3893     prepareStubCall(Uses(0));
3894     masm.move(ImmPtr(atom), Registers::ArgReg1);
3895     INLINE_STUBCALL(stub);
3896     frame.pushSynced();
3897 #endif
3898
3899     PC += JSOP_NAMEINC_LENGTH;
3900     return true;
3901 }
3902
3903 bool
3904 mjit::Compiler::jsop_propinc(JSOp op, VoidStubAtom stub, uint32 index)
3905 {
3906     JSAtom *atom = script->getAtom(index);
3907 #if defined JS_POLYIC
3908     FrameEntry *objFe = frame.peek(-1);
3909     if (!objFe->isTypeKnown() || objFe->getKnownType() == JSVAL_TYPE_OBJECT) {
3910         jsbytecode *next = &PC[JSOP_PROPINC_LENGTH];
3911         bool pop = (JSOp(*next) == JSOP_POP) && !analysis->jumpTarget(next);
3912         int amt = (op == JSOP_PROPINC || op == JSOP_INCPROP) ? -1 : 1;
3913
3914         if (pop || (op == JSOP_INCPROP || op == JSOP_DECPROP)) {
3915             /* These cases are easy, the original value is not observed. */
3916
3917             frame.dup();
3918             // OBJ OBJ
3919
3920             if (!jsop_getprop(atom))
3921                 return false;
3922             // OBJ V
3923
3924             frame.push(Int32Value(amt));
3925             // OBJ V 1
3926
3927             /* Use sub since it calls ValueToNumber instead of string concat. */
3928             jsop_binary(JSOP_SUB, stubs::Sub);
3929             // OBJ V+1
3930
3931             if (!jsop_setprop(atom, false))
3932                 return false;
3933             // V+1
3934
3935             if (pop)
3936                 frame.pop();
3937         } else {
3938             /* The pre-value is observed, making this more tricky. */
3939
3940             frame.dup();
3941             // OBJ OBJ 
3942
3943             if (!jsop_getprop(atom))
3944                 return false;
3945             // OBJ V
3946
3947             jsop_pos();
3948             // OBJ N
3949
3950             frame.dup();
3951             // OBJ N N
3952
3953             frame.push(Int32Value(-amt));
3954             // OBJ N N 1
3955
3956             jsop_binary(JSOP_ADD, stubs::Add);
3957             // OBJ N N+1
3958
3959             frame.dupAt(-3);
3960             // OBJ N N+1 OBJ
3961
3962             frame.dupAt(-2);
3963             // OBJ N N+1 OBJ N+1
3964
3965             if (!jsop_setprop(atom, false))
3966                 return false;
3967             // OBJ N N+1 N+1
3968
3969             frame.popn(2);
3970             // OBJ N
3971
3972             frame.shimmy(1);
3973             // N
3974         }
3975         if (pop)
3976             PC += JSOP_POP_LENGTH;
3977     } else
3978 #endif
3979     {
3980         prepareStubCall(Uses(1));
3981         masm.move(ImmPtr(atom), Registers::ArgReg1);
3982         INLINE_STUBCALL(stub);
3983         frame.pop();
3984         frame.pushSynced();
3985     }
3986
3987     PC += JSOP_PROPINC_LENGTH;
3988     return true;
3989 }
3990
3991 bool
3992 mjit::Compiler::iter(uintN flags)
3993 {
3994     FrameEntry *fe = frame.peek(-1);
3995
3996     /*
3997      * Stub the call if this is not a simple 'for in' loop or if the iterated
3998      * value is known to not be an object.
3999      */
4000     if ((flags != JSITER_ENUMERATE) || fe->isNotType(JSVAL_TYPE_OBJECT)) {
4001         prepareStubCall(Uses(1));
4002         masm.move(Imm32(flags), Registers::ArgReg1);
4003         INLINE_STUBCALL(stubs::Iter);
4004         frame.pop();
4005         frame.pushSynced();
4006         return true;
4007     }
4008
4009     if (!fe->isTypeKnown()) {
4010         Jump notObject = frame.testObject(Assembler::NotEqual, fe);
4011         stubcc.linkExit(notObject, Uses(1));
4012     }
4013
4014     RegisterID reg = frame.tempRegForData(fe);
4015
4016     frame.pinReg(reg);
4017     RegisterID ioreg = frame.allocReg();  /* Will hold iterator JSObject */
4018     RegisterID nireg = frame.allocReg();  /* Will hold NativeIterator */
4019     RegisterID T1 = frame.allocReg();
4020     RegisterID T2 = frame.allocReg();
4021     frame.unpinReg(reg);
4022
4023     /* Fetch the most recent iterator. */
4024     masm.loadPtr(&script->compartment->nativeIterCache.last, ioreg);
4025
4026     /* Test for NULL. */
4027     Jump nullIterator = masm.branchTest32(Assembler::Zero, ioreg, ioreg);
4028     stubcc.linkExit(nullIterator, Uses(1));
4029
4030     /* Get NativeIterator from iter obj. */
4031     masm.loadObjPrivate(ioreg, nireg);
4032
4033     /* Test for active iterator. */
4034     Address flagsAddr(nireg, offsetof(NativeIterator, flags));
4035     masm.load32(flagsAddr, T1);
4036     Jump activeIterator = masm.branchTest32(Assembler::NonZero, T1,
4037                                             Imm32(JSITER_ACTIVE|JSITER_UNREUSABLE));
4038     stubcc.linkExit(activeIterator, Uses(1));
4039
4040     /* Compare shape of object with iterator. */
4041     masm.loadShape(reg, T1);
4042     masm.loadPtr(Address(nireg, offsetof(NativeIterator, shapes_array)), T2);
4043     masm.load32(Address(T2, 0), T2);
4044     Jump mismatchedObject = masm.branch32(Assembler::NotEqual, T1, T2);
4045     stubcc.linkExit(mismatchedObject, Uses(1));
4046
4047     /* Compare shape of object's prototype with iterator. */
4048     masm.loadPtr(Address(reg, offsetof(JSObject, proto)), T1);
4049     masm.loadShape(T1, T1);
4050     masm.loadPtr(Address(nireg, offsetof(NativeIterator, shapes_array)), T2);
4051     masm.load32(Address(T2, sizeof(uint32)), T2);
4052     Jump mismatchedProto = masm.branch32(Assembler::NotEqual, T1, T2);
4053     stubcc.linkExit(mismatchedProto, Uses(1));
4054
4055     /*
4056      * Compare object's prototype's prototype with NULL. The last native
4057      * iterator will always have a prototype chain length of one
4058      * (i.e. it must be a plain object), so we do not need to generate
4059      * a loop here.
4060      */
4061     masm.loadPtr(Address(reg, offsetof(JSObject, proto)), T1);
4062     masm.loadPtr(Address(T1, offsetof(JSObject, proto)), T1);
4063     Jump overlongChain = masm.branchPtr(Assembler::NonZero, T1, T1);
4064     stubcc.linkExit(overlongChain, Uses(1));
4065
4066     /* Found a match with the most recent iterator. Hooray! */
4067
4068     /* Mark iterator as active. */
4069     masm.storePtr(reg, Address(nireg, offsetof(NativeIterator, obj)));
4070     masm.load32(flagsAddr, T1);
4071     masm.or32(Imm32(JSITER_ACTIVE), T1);
4072     masm.store32(T1, flagsAddr);
4073
4074     /* Chain onto the active iterator stack. */
4075     masm.loadPtr(FrameAddress(offsetof(VMFrame, cx)), T1);
4076     masm.loadPtr(Address(T1, offsetof(JSContext, enumerators)), T2);
4077     masm.storePtr(T2, Address(nireg, offsetof(NativeIterator, next)));
4078     masm.storePtr(ioreg, Address(T1, offsetof(JSContext, enumerators)));
4079
4080     frame.freeReg(nireg);
4081     frame.freeReg(T1);
4082     frame.freeReg(T2);
4083
4084     stubcc.leave();
4085     stubcc.masm.move(Imm32(flags), Registers::ArgReg1);
4086     OOL_STUBCALL(stubs::Iter);
4087
4088     /* Push the iterator object. */
4089     frame.pop();
4090     frame.pushTypedPayload(JSVAL_TYPE_OBJECT, ioreg);
4091
4092     stubcc.rejoin(Changes(1));
4093
4094     return true;
4095 }
4096
4097 /*
4098  * This big nasty function emits a fast-path for native iterators, producing
4099  * a temporary value on the stack for FORLOCAL,ARG,GLOBAL,etc ops to use.
4100  */
4101 void
4102 mjit::Compiler::iterNext()
4103 {
4104     FrameEntry *fe = frame.peek(-1);
4105     RegisterID reg = frame.tempRegForData(fe);
4106
4107     /* Is it worth trying to pin this longer? Prolly not. */
4108     frame.pinReg(reg);
4109     RegisterID T1 = frame.allocReg();
4110     frame.unpinReg(reg);
4111
4112     /* Test clasp */
4113     Jump notFast = masm.testObjClass(Assembler::NotEqual, reg, &js_IteratorClass);
4114     stubcc.linkExit(notFast, Uses(1));
4115
4116     /* Get private from iter obj. */
4117     masm.loadObjPrivate(reg, T1);
4118
4119     RegisterID T3 = frame.allocReg();
4120     RegisterID T4 = frame.allocReg();
4121
4122     /* Test for a value iterator, which could come through an Iterator object. */
4123     masm.load32(Address(T1, offsetof(NativeIterator, flags)), T3);
4124     notFast = masm.branchTest32(Assembler::NonZero, T3, Imm32(JSITER_FOREACH));
4125     stubcc.linkExit(notFast, Uses(1));
4126
4127     RegisterID T2 = frame.allocReg();
4128
4129     /* Get cursor. */
4130     masm.loadPtr(Address(T1, offsetof(NativeIterator, props_cursor)), T2);
4131
4132     /* Test if the jsid is a string. */
4133     masm.loadPtr(T2, T3);
4134     masm.move(T3, T4);
4135     masm.andPtr(Imm32(JSID_TYPE_MASK), T4);
4136     notFast = masm.branchTestPtr(Assembler::NonZero, T4, T4);
4137     stubcc.linkExit(notFast, Uses(1));
4138
4139     /* It's safe to increase the cursor now. */
4140     masm.addPtr(Imm32(sizeof(jsid)), T2, T4);
4141     masm.storePtr(T4, Address(T1, offsetof(NativeIterator, props_cursor)));
4142
4143     frame.freeReg(T4);
4144     frame.freeReg(T1);
4145     frame.freeReg(T2);
4146
4147     stubcc.leave();
4148     OOL_STUBCALL(stubs::IterNext);
4149
4150     frame.pushUntypedPayload(JSVAL_TYPE_STRING, T3);
4151
4152     /* Join with the stub call. */
4153     stubcc.rejoin(Changes(1));
4154 }
4155
4156 bool
4157 mjit::Compiler::iterMore()
4158 {
4159     FrameEntry *fe = frame.peek(-1);
4160     RegisterID reg = frame.tempRegForData(fe);
4161
4162     frame.pinReg(reg);
4163     RegisterID T1 = frame.allocReg();
4164     frame.unpinReg(reg);
4165
4166     /* Test clasp */
4167     Jump notFast = masm.testObjClass(Assembler::NotEqual, reg, &js_IteratorClass);
4168     stubcc.linkExitForBranch(notFast);
4169
4170     /* Get private from iter obj. */
4171     masm.loadObjPrivate(reg, T1);
4172
4173     /* Test that the iterator supports fast iteration. */
4174     notFast = masm.branchTest32(Assembler::NonZero, Address(T1, offsetof(NativeIterator, flags)),
4175                                 Imm32(JSITER_FOREACH));
4176     stubcc.linkExitForBranch(notFast);
4177
4178     /* Get props_cursor, test */
4179     RegisterID T2 = frame.allocReg();
4180     frame.syncAndForgetEverything();
4181     masm.loadPtr(Address(T1, offsetof(NativeIterator, props_cursor)), T2);
4182     masm.loadPtr(Address(T1, offsetof(NativeIterator, props_end)), T1);
4183     Jump jFast = masm.branchPtr(Assembler::LessThan, T2, T1);
4184
4185     jsbytecode *target = &PC[JSOP_MOREITER_LENGTH];
4186     JSOp next = JSOp(*target);
4187     JS_ASSERT(next == JSOP_IFNE || next == JSOP_IFNEX);
4188
4189     target += (next == JSOP_IFNE)
4190               ? GET_JUMP_OFFSET(target)
4191               : GET_JUMPX_OFFSET(target);
4192
4193     stubcc.leave();
4194     OOL_STUBCALL(stubs::IterMore);
4195     Jump j = stubcc.masm.branchTest32(Assembler::NonZero, Registers::ReturnReg,
4196                                       Registers::ReturnReg);
4197
4198     PC += JSOP_MOREITER_LENGTH;
4199     PC += js_CodeSpec[next].length;
4200
4201     stubcc.rejoin(Changes(1));
4202
4203     return jumpAndTrace(jFast, target, &j);
4204 }
4205
4206 void
4207 mjit::Compiler::iterEnd()
4208 {
4209     FrameEntry *fe= frame.peek(-1);
4210     RegisterID reg = frame.tempRegForData(fe);
4211
4212     frame.pinReg(reg);
4213     RegisterID T1 = frame.allocReg();
4214     frame.unpinReg(reg);
4215
4216     /* Test clasp */
4217     Jump notIterator = masm.testObjClass(Assembler::NotEqual, reg, &js_IteratorClass);
4218     stubcc.linkExit(notIterator, Uses(1));
4219
4220     /* Get private from iter obj. */
4221     masm.loadObjPrivate(reg, T1);
4222
4223     RegisterID T2 = frame.allocReg();
4224
4225     /* Load flags. */
4226     Address flagAddr(T1, offsetof(NativeIterator, flags));
4227     masm.loadPtr(flagAddr, T2);
4228
4229     /* Test for a normal enumerate iterator. */
4230     Jump notEnumerate = masm.branchTest32(Assembler::Zero, T2, Imm32(JSITER_ENUMERATE));
4231     stubcc.linkExit(notEnumerate, Uses(1));
4232
4233     /* Clear active bit. */
4234     masm.and32(Imm32(~JSITER_ACTIVE), T2);
4235     masm.storePtr(T2, flagAddr);
4236
4237     /* Reset property cursor. */
4238     masm.loadPtr(Address(T1, offsetof(NativeIterator, props_array)), T2);
4239     masm.storePtr(T2, Address(T1, offsetof(NativeIterator, props_cursor)));
4240
4241     /* Advance enumerators list. */
4242     masm.loadPtr(FrameAddress(offsetof(VMFrame, cx)), T2);
4243     masm.loadPtr(Address(T1, offsetof(NativeIterator, next)), T1);
4244     masm.storePtr(T1, Address(T2, offsetof(JSContext, enumerators)));
4245
4246     frame.freeReg(T1);
4247     frame.freeReg(T2);
4248
4249     stubcc.leave();
4250     OOL_STUBCALL(stubs::EndIter);
4251
4252     frame.pop();
4253
4254     stubcc.rejoin(Changes(1));
4255 }
4256
4257 void
4258 mjit::Compiler::jsop_eleminc(JSOp op, VoidStub stub)
4259 {
4260     prepareStubCall(Uses(2));
4261     INLINE_STUBCALL(stub);
4262     frame.popn(2);
4263     frame.pushSynced();
4264 }
4265
4266 void
4267 mjit::Compiler::jsop_getgname_slow(uint32 index)
4268 {
4269     prepareStubCall(Uses(0));
4270     INLINE_STUBCALL(stubs::GetGlobalName);
4271     frame.pushSynced();
4272 }
4273
4274 void
4275 mjit::Compiler::jsop_bindgname()
4276 {
4277     if (script->compileAndGo && globalObj) {
4278         frame.push(ObjectValue(*globalObj));
4279         return;
4280     }
4281
4282     /* :TODO: this is slower than it needs to be. */
4283     prepareStubCall(Uses(0));
4284     INLINE_STUBCALL(stubs::BindGlobalName);
4285     frame.takeReg(Registers::ReturnReg);
4286     frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
4287 }
4288
4289 void
4290 mjit::Compiler::jsop_getgname(uint32 index)
4291 {
4292     /* Optimize undefined, NaN and Infinity. */
4293     JSAtom *atom = script->getAtom(index);
4294     if (atom == cx->runtime->atomState.typeAtoms[JSTYPE_VOID]) {
4295         frame.push(UndefinedValue());
4296         return;
4297     }
4298     if (atom == cx->runtime->atomState.NaNAtom) {
4299         frame.push(cx->runtime->NaNValue);
4300         return;
4301     }
4302     if (atom == cx->runtime->atomState.InfinityAtom) {
4303         frame.push(cx->runtime->positiveInfinityValue);
4304         return;
4305     }
4306 #if defined JS_MONOIC
4307     jsop_bindgname();
4308
4309     FrameEntry *fe = frame.peek(-1);
4310     JS_ASSERT(fe->isTypeKnown() && fe->getKnownType() == JSVAL_TYPE_OBJECT);
4311
4312     GetGlobalNameICInfo ic;
4313     RESERVE_IC_SPACE(masm);
4314     RegisterID objReg;
4315     Jump shapeGuard;
4316
4317     ic.usePropertyCache = true;
4318
4319     ic.fastPathStart = masm.label();
4320     if (fe->isConstant()) {
4321         JSObject *obj = &fe->getValue().toObject();
4322         frame.pop();
4323         JS_ASSERT(obj->isNative());
4324
4325         objReg = frame.allocReg();
4326
4327         masm.load32FromImm(&obj->objShape, objReg);
4328         shapeGuard = masm.branch32WithPatch(Assembler::NotEqual, objReg,
4329                                             Imm32(int32(JSObjectMap::INVALID_SHAPE)), ic.shape);
4330         masm.move(ImmPtr(obj), objReg);
4331     } else {
4332         objReg = frame.ownRegForData(fe);
4333         frame.pop();
4334         RegisterID reg = frame.allocReg();
4335
4336         masm.loadShape(objReg, reg);
4337         shapeGuard = masm.branch32WithPatch(Assembler::NotEqual, reg,
4338                                             Imm32(int32(JSObjectMap::INVALID_SHAPE)), ic.shape);
4339         frame.freeReg(reg);
4340     }
4341     stubcc.linkExit(shapeGuard, Uses(0));
4342
4343     stubcc.leave();
4344     passMICAddress(ic);
4345     ic.slowPathCall = OOL_STUBCALL(ic::GetGlobalName);
4346
4347     /* Garbage value. */
4348     uint32 slot = 1 << 24;
4349
4350     masm.loadPtr(Address(objReg, offsetof(JSObject, slots)), objReg);
4351     Address address(objReg, slot);
4352     
4353     /* Allocate any register other than objReg. */
4354     RegisterID treg = frame.allocReg();
4355     /* After dreg is loaded, it's safe to clobber objReg. */
4356     RegisterID dreg = objReg;
4357
4358     ic.load = masm.loadValueWithAddressOffsetPatch(address, treg, dreg);
4359
4360     frame.pushRegs(treg, dreg);
4361
4362     stubcc.rejoin(Changes(1));
4363
4364     getGlobalNames.append(ic);
4365
4366 #else
4367     jsop_getgname_slow(index);
4368 #endif
4369 }
4370
4371 /*
4372  * Generate just the epilogue code that is specific to callgname. The rest
4373  * is shared with getgname.
4374  */
4375 void
4376 mjit::Compiler::jsop_callgname_epilogue()
4377 {
4378     /*
4379      * This slow path does the same thing as the interpreter.
4380      */
4381     if (!script->compileAndGo) {
4382         prepareStubCall(Uses(1));
4383         INLINE_STUBCALL(stubs::PushImplicitThisForGlobal);
4384         frame.pushSynced();
4385         return;
4386     }
4387
4388     /* Fast path for known-not-an-object callee. */
4389     FrameEntry *fval = frame.peek(-1);
4390     if (fval->isNotType(JSVAL_TYPE_OBJECT)) {
4391         frame.push(UndefinedValue());
4392         return;
4393     }
4394
4395     /*
4396      * Optimized version. This inlines the common case, calling a
4397      * (non-proxied) function that has the same global as the current
4398      * script. To make the code simpler, we:
4399      *      1. test the stronger property that the callee's parent is
4400      *         equal to the global of the current script, and
4401      *      2. bake in the global of the current script, which is why
4402      *         this optimized path requires compile-and-go.
4403      */
4404
4405     /* If the callee is not an object, jump to the inline fast path. */
4406     MaybeRegisterID typeReg = frame.maybePinType(fval);
4407     RegisterID objReg = frame.copyDataIntoReg(fval);
4408
4409     MaybeJump isNotObj;
4410     if (!fval->isType(JSVAL_TYPE_OBJECT)) {
4411         isNotObj = frame.testObject(Assembler::NotEqual, fval);
4412         frame.maybeUnpinReg(typeReg);
4413     }
4414
4415     /*
4416      * If the callee is not a function, jump to OOL slow path.
4417      */
4418     Jump notFunction = masm.testFunction(Assembler::NotEqual, objReg);
4419     stubcc.linkExit(notFunction, Uses(1));
4420
4421     /*
4422      * If the callee's parent is not equal to the global, jump to
4423      * OOL slow path.
4424      */
4425     masm.loadPtr(Address(objReg, offsetof(JSObject, parent)), objReg);
4426     Jump globalMismatch = masm.branchPtr(Assembler::NotEqual, objReg, ImmPtr(globalObj));
4427     stubcc.linkExit(globalMismatch, Uses(1));
4428     frame.freeReg(objReg);
4429
4430     /* OOL stub call path. */
4431     stubcc.leave();
4432     OOL_STUBCALL(stubs::PushImplicitThisForGlobal);
4433
4434     /* Fast path. */
4435     if (isNotObj.isSet())
4436         isNotObj.getJump().linkTo(masm.label(), &masm);
4437     frame.pushUntypedValue(UndefinedValue());
4438
4439     stubcc.rejoin(Changes(1));
4440 }
4441
4442 void
4443 mjit::Compiler::jsop_setgname_slow(JSAtom *atom, bool usePropertyCache)
4444 {
4445     prepareStubCall(Uses(2));
4446     masm.move(ImmPtr(atom), Registers::ArgReg1);
4447     if (usePropertyCache)
4448         INLINE_STUBCALL(STRICT_VARIANT(stubs::SetGlobalName));
4449     else
4450         INLINE_STUBCALL(STRICT_VARIANT(stubs::SetGlobalNameNoCache));
4451     frame.popn(2);
4452     frame.pushSynced();
4453 }
4454
4455 void
4456 mjit::Compiler::jsop_setgname(JSAtom *atom, bool usePropertyCache)
4457 {
4458 #if defined JS_MONOIC
4459     FrameEntry *objFe = frame.peek(-2);
4460     FrameEntry *fe = frame.peek(-1);
4461     JS_ASSERT_IF(objFe->isTypeKnown(), objFe->getKnownType() == JSVAL_TYPE_OBJECT);
4462
4463     SetGlobalNameICInfo ic;
4464
4465     frame.pinEntry(fe, ic.vr);
4466     Jump shapeGuard;
4467
4468     RESERVE_IC_SPACE(masm);
4469     ic.fastPathStart = masm.label();
4470     if (objFe->isConstant()) {
4471         JSObject *obj = &objFe->getValue().toObject();
4472         JS_ASSERT(obj->isNative());
4473
4474         ic.objReg = frame.allocReg();
4475         ic.shapeReg = ic.objReg;
4476         ic.objConst = true;
4477
4478         masm.load32FromImm(&obj->objShape, ic.shapeReg);
4479         shapeGuard = masm.branch32WithPatch(Assembler::NotEqual, ic.shapeReg,
4480                                             Imm32(int32(JSObjectMap::INVALID_SHAPE)),
4481                                             ic.shape);
4482         masm.move(ImmPtr(obj), ic.objReg);
4483     } else {
4484         ic.objReg = frame.copyDataIntoReg(objFe);
4485         ic.shapeReg = frame.allocReg();
4486         ic.objConst = false;
4487
4488         masm.loadShape(ic.objReg, ic.shapeReg);
4489         shapeGuard = masm.branch32WithPatch(Assembler::NotEqual, ic.shapeReg,
4490                                             Imm32(int32(JSObjectMap::INVALID_SHAPE)),
4491                                             ic.shape);
4492         frame.freeReg(ic.shapeReg);
4493     }
4494     ic.shapeGuardJump = shapeGuard;
4495     ic.slowPathStart = stubcc.linkExit(shapeGuard, Uses(2));
4496
4497     stubcc.leave();
4498     passMICAddress(ic);
4499     ic.slowPathCall = OOL_STUBCALL(ic::SetGlobalName);
4500
4501     /* Garbage value. */
4502     uint32 slot = 1 << 24;
4503
4504     ic.usePropertyCache = usePropertyCache;
4505
4506     masm.loadPtr(Address(ic.objReg, offsetof(JSObject, slots)), ic.objReg);
4507     Address address(ic.objReg, slot);
4508
4509     if (ic.vr.isConstant()) {
4510         ic.store = masm.storeValueWithAddressOffsetPatch(ic.vr.value(), address);
4511     } else if (ic.vr.isTypeKnown()) {
4512         ic.store = masm.storeValueWithAddressOffsetPatch(ImmType(ic.vr.knownType()),
4513                                                           ic.vr.dataReg(), address);
4514     } else {
4515         ic.store = masm.storeValueWithAddressOffsetPatch(ic.vr.typeReg(), ic.vr.dataReg(), address);
4516     }
4517
4518     frame.freeReg(ic.objReg);
4519     frame.unpinEntry(ic.vr);
4520     frame.shimmy(1);
4521
4522     stubcc.rejoin(Changes(1));
4523
4524     ic.fastPathRejoin = masm.label();
4525     setGlobalNames.append(ic);
4526 #else
4527     jsop_setgname_slow(atom, usePropertyCache);
4528 #endif
4529 }
4530
4531 void
4532 mjit::Compiler::jsop_setelem_slow()
4533 {
4534     prepareStubCall(Uses(3));
4535     INLINE_STUBCALL(STRICT_VARIANT(stubs::SetElem));
4536     frame.popn(3);
4537     frame.pushSynced();
4538 }
4539
4540 void
4541 mjit::Compiler::jsop_getelem_slow()
4542 {
4543     prepareStubCall(Uses(2));
4544     INLINE_STUBCALL(stubs::GetElem);
4545     frame.popn(2);
4546     frame.pushSynced();
4547 }
4548
4549 void
4550 mjit::Compiler::jsop_unbrand()
4551 {
4552     prepareStubCall(Uses(1));
4553     INLINE_STUBCALL(stubs::Unbrand);
4554 }
4555
4556 bool
4557 mjit::Compiler::jsop_instanceof()
4558 {
4559     FrameEntry *lhs = frame.peek(-2);
4560     FrameEntry *rhs = frame.peek(-1);
4561
4562     // The fast path applies only when both operands are objects.
4563     if (rhs->isNotType(JSVAL_TYPE_OBJECT) || lhs->isNotType(JSVAL_TYPE_OBJECT)) {
4564         prepareStubCall(Uses(2));
4565         INLINE_STUBCALL(stubs::InstanceOf);
4566         frame.popn(2);
4567         frame.takeReg(Registers::ReturnReg);
4568         frame.pushTypedPayload(JSVAL_TYPE_BOOLEAN, Registers::ReturnReg);
4569         return true;
4570     }
4571
4572     MaybeJump firstSlow;
4573     if (!rhs->isTypeKnown()) {
4574         Jump j = frame.testObject(Assembler::NotEqual, rhs);
4575         stubcc.linkExit(j, Uses(2));
4576         RegisterID reg = frame.tempRegForData(rhs);
4577         j = masm.testFunction(Assembler::NotEqual, reg);
4578         stubcc.linkExit(j, Uses(2));
4579     }
4580
4581     /* Test for bound functions. */
4582     RegisterID obj = frame.tempRegForData(rhs);
4583     Jump isBound = masm.branchTest32(Assembler::NonZero, Address(obj, offsetof(JSObject, flags)),
4584                                      Imm32(JSObject::BOUND_FUNCTION));
4585     {
4586         stubcc.linkExit(isBound, Uses(2));
4587         stubcc.leave();
4588         OOL_STUBCALL(stubs::InstanceOf);
4589         firstSlow = stubcc.masm.jump();
4590     }
4591     
4592
4593     /* This is sadly necessary because the error case needs the object. */
4594     frame.dup();
4595
4596     if (!jsop_getprop(cx->runtime->atomState.classPrototypeAtom, false))
4597         return false;
4598
4599     /* Primitive prototypes are invalid. */
4600     rhs = frame.peek(-1);
4601     Jump j = frame.testPrimitive(Assembler::Equal, rhs);
4602     stubcc.linkExit(j, Uses(3));
4603
4604     /* Allocate registers up front, because of branchiness. */
4605     obj = frame.copyDataIntoReg(lhs);
4606     RegisterID proto = frame.copyDataIntoReg(rhs);
4607     RegisterID temp = frame.allocReg();
4608
4609     MaybeJump isFalse;
4610     if (!lhs->isTypeKnown())
4611         isFalse = frame.testPrimitive(Assembler::Equal, lhs);
4612
4613     Address protoAddr(obj, offsetof(JSObject, proto));
4614     Label loop = masm.label();
4615
4616     /* Walk prototype chain, break out on NULL or hit. */
4617     masm.loadPayload(protoAddr, obj);
4618     Jump isFalse2 = masm.branchTestPtr(Assembler::Zero, obj, obj);
4619     Jump isTrue = masm.branchPtr(Assembler::NotEqual, obj, proto);
4620     isTrue.linkTo(loop, &masm);
4621     masm.move(Imm32(1), temp);
4622     isTrue = masm.jump();
4623
4624     if (isFalse.isSet())
4625         isFalse.getJump().linkTo(masm.label(), &masm);
4626     isFalse2.linkTo(masm.label(), &masm);
4627     masm.move(Imm32(0), temp);
4628     isTrue.linkTo(masm.label(), &masm);
4629
4630     frame.freeReg(proto);
4631     frame.freeReg(obj);
4632
4633     stubcc.leave();
4634     OOL_STUBCALL(stubs::FastInstanceOf);
4635
4636     frame.popn(3);
4637     frame.pushTypedPayload(JSVAL_TYPE_BOOLEAN, temp);
4638
4639     if (firstSlow.isSet())
4640         firstSlow.getJump().linkTo(stubcc.masm.label(), &stubcc.masm);
4641     stubcc.rejoin(Changes(1));
4642     return true;
4643 }
4644
4645 void
4646 mjit::Compiler::emitEval(uint32 argc)
4647 {
4648     /* Check for interrupts on function call */
4649     interruptCheckHelper();
4650
4651     frame.syncAndKill(Registers(Registers::AvailRegs), Uses(argc + 2));
4652     prepareStubCall(Uses(argc + 2));
4653     masm.move(Imm32(argc), Registers::ArgReg1);
4654     INLINE_STUBCALL(stubs::Eval);
4655     frame.popn(argc + 2);
4656     frame.pushSynced();
4657 }
4658
4659 void
4660 mjit::Compiler::jsop_arguments()
4661 {
4662     prepareStubCall(Uses(0));
4663     INLINE_STUBCALL(stubs::Arguments);
4664 }
4665
4666 void
4667 mjit::Compiler::jsop_newinit()
4668 {
4669     bool isArray;
4670     unsigned count = 0;
4671     JSObject *baseobj = NULL;
4672     switch (*PC) {
4673       case JSOP_NEWINIT:
4674         isArray = (PC[1] == JSProto_Array);
4675         break;
4676       case JSOP_NEWARRAY:
4677         isArray = true;
4678         count = GET_UINT24(PC);
4679         break;
4680       case JSOP_NEWOBJECT:
4681         isArray = false;
4682         baseobj = script->getObject(fullAtomIndex(PC));
4683         break;
4684       default:
4685         JS_NOT_REACHED("Bad op");
4686         return;
4687     }
4688
4689     prepareStubCall(Uses(0));
4690     if (isArray) {
4691         masm.move(Imm32(count), Registers::ArgReg1);
4692         INLINE_STUBCALL(stubs::NewInitArray);
4693     } else {
4694         masm.move(ImmPtr(baseobj), Registers::ArgReg1);
4695         INLINE_STUBCALL(stubs::NewInitObject);
4696     }
4697     frame.takeReg(Registers::ReturnReg);
4698     frame.pushInitializerObject(Registers::ReturnReg, *PC == JSOP_NEWARRAY, baseobj);
4699 }
4700
4701 /*
4702  * Note: This function emits tracer hooks into the OOL path. This means if
4703  * it is used in the middle of an in-progress slow path, the stream will be
4704  * hopelessly corrupted. Take care to only call this before linkExits() and
4705  * after rejoin()s.
4706  */
4707 bool
4708 mjit::Compiler::jumpAndTrace(Jump j, jsbytecode *target, Jump *slow)
4709 {
4710     // XXX refactor this little bit
4711 #ifndef JS_TRACER
4712     if (!jumpInScript(j, target))
4713         return false;
4714
4715     if (slow) {
4716         if (!stubcc.jumpInScript(*slow, target))
4717             return false;
4718     }
4719 #else
4720     if (!addTraceHints || target >= PC ||
4721         (JSOp(*target) != JSOP_TRACE && JSOp(*target) != JSOP_NOTRACE)
4722 #ifdef JS_MONOIC
4723         || GET_UINT16(target) == BAD_TRACEIC_INDEX
4724 #endif
4725         )
4726     {
4727         if (!jumpInScript(j, target))
4728             return false;
4729         if (slow && !stubcc.jumpInScript(*slow, target))
4730             return false;
4731         return true;
4732     }
4733
4734 # if JS_MONOIC
4735     TraceGenInfo ic;
4736
4737     ic.initialized = true;
4738     ic.stubEntry = stubcc.masm.label();
4739     ic.jumpTarget = target;
4740     ic.traceHint = j;
4741     if (slow)
4742         ic.slowTraceHint = *slow;
4743
4744     uint16 index = GET_UINT16(target);
4745     if (traceICs.length() <= index)
4746         if (!traceICs.resize(index+1))
4747             return false;
4748 # endif
4749
4750     Label traceStart = stubcc.masm.label();
4751
4752     /*
4753      * We make a trace IC even if the trace is currently disabled, in case it is
4754      * enabled later, but set up the jumps so that InvokeTracer is initially skipped.
4755      */
4756     if (JSOp(*target) == JSOP_TRACE) {
4757         stubcc.linkExitDirect(j, traceStart);
4758         if (slow)
4759             slow->linkTo(traceStart, &stubcc.masm);
4760     } else {
4761         if (!jumpInScript(j, target))
4762             return false;
4763         if (slow && !stubcc.jumpInScript(*slow, target))
4764             return false;
4765     }
4766
4767 # if JS_MONOIC
4768     ic.addrLabel = stubcc.masm.moveWithPatch(ImmPtr(NULL), Registers::ArgReg1);
4769     traceICs[index] = ic;
4770
4771     Jump nonzero = stubcc.masm.branchSub32(Assembler::NonZero, Imm32(1),
4772                                            Address(Registers::ArgReg1,
4773                                                    offsetof(TraceICInfo, loopCounter)));
4774     stubcc.jumpInScript(nonzero, target);
4775 # endif
4776
4777     /* Save and restore compiler-tracked PC, so cx->regs is right in InvokeTracer. */
4778     {
4779         jsbytecode* pc = PC;
4780         PC = target;
4781
4782         OOL_STUBCALL(stubs::InvokeTracer);
4783
4784         PC = pc;
4785     }
4786
4787     Jump no = stubcc.masm.branchTestPtr(Assembler::Zero, Registers::ReturnReg,
4788                                         Registers::ReturnReg);
4789     if (!stubcc.jumpInScript(no, target))
4790         return false;
4791     restoreFrameRegs(stubcc.masm);
4792     stubcc.masm.jump(Registers::ReturnReg);
4793 #endif
4794     return true;
4795 }
4796
4797 void
4798 mjit::Compiler::enterBlock(JSObject *obj)
4799 {
4800     // If this is an exception entry point, then jsl_InternalThrow has set
4801     // VMFrame::fp to the correct fp for the entry point. We need to copy
4802     // that value here to FpReg so that FpReg also has the correct sp.
4803     // Otherwise, we would simply be using a stale FpReg value.
4804     // Additionally, we check the interrupt flag to allow interrupting
4805     // deeply nested exception handling.
4806     if (analysis->getCode(PC).exceptionEntry) {
4807         restoreFrameRegs(masm);
4808         interruptCheckHelper();
4809     }
4810
4811     uint32 oldFrameDepth = frame.localSlots();
4812
4813     /* For now, don't bother doing anything for this opcode. */
4814     frame.syncAndForgetEverything();
4815     masm.move(ImmPtr(obj), Registers::ArgReg1);
4816     uint32 n = js_GetEnterBlockStackDefs(cx, script, PC);
4817     INLINE_STUBCALL(stubs::EnterBlock);
4818     frame.enterBlock(n);
4819
4820     uintN base = JSSLOT_FREE(&js_BlockClass);
4821     uintN count = OBJ_BLOCK_COUNT(cx, obj);
4822     uintN limit = base + count;
4823     for (uintN slot = base, i = 0; slot < limit; slot++, i++) {
4824         const Value &v = obj->getSlotRef(slot);
4825         if (v.isBoolean() && v.toBoolean())
4826             frame.setClosedVar(oldFrameDepth + i);
4827     }
4828 }
4829
4830 void
4831 mjit::Compiler::leaveBlock()
4832 {
4833     /*
4834      * Note: After bug 535912, we can pass the block obj directly, inline
4835      * PutBlockObject, and do away with the muckiness in PutBlockObject.
4836      */
4837     uint32 n = js_GetVariableStackUses(JSOP_LEAVEBLOCK, PC);
4838     JSObject *obj = script->getObject(fullAtomIndex(PC + UINT16_LEN));
4839     prepareStubCall(Uses(n));
4840     masm.move(ImmPtr(obj), Registers::ArgReg1);
4841     INLINE_STUBCALL(stubs::LeaveBlock);
4842     frame.leaveBlock(n);
4843 }
4844
4845 // Creates the new object expected for constructors, and places it in |thisv|.
4846 // It is broken down into the following operations:
4847 //   CALLEE
4848 //   GETPROP "prototype"
4849 //   IFPRIMTOP:
4850 //       NULL
4851 //   call js_CreateThisFromFunctionWithProto(...)
4852 //
4853 bool
4854 mjit::Compiler::constructThis()
4855 {
4856     JS_ASSERT(isConstructing);
4857
4858     // Load the callee.
4859     frame.pushCallee();
4860
4861     // Get callee.prototype.
4862     if (!jsop_getprop(cx->runtime->atomState.classPrototypeAtom, false, false))
4863         return false;
4864
4865     // Reach into the proto Value and grab a register for its data.
4866     FrameEntry *protoFe = frame.peek(-1);
4867     RegisterID protoReg = frame.ownRegForData(protoFe);
4868
4869     // Now, get the type. If it's not an object, set protoReg to NULL.
4870     Jump isNotObject = frame.testObject(Assembler::NotEqual, protoFe);
4871     stubcc.linkExitDirect(isNotObject, stubcc.masm.label());
4872     stubcc.masm.move(ImmPtr(NULL), protoReg);
4873     stubcc.crossJump(stubcc.masm.jump(), masm.label());
4874
4875     // Done with the protoFe.
4876     frame.pop();
4877
4878     prepareStubCall(Uses(0));
4879     if (protoReg != Registers::ArgReg1)
4880         masm.move(protoReg, Registers::ArgReg1);
4881     INLINE_STUBCALL(stubs::CreateThis);
4882     frame.freeReg(protoReg);
4883     return true;
4884 }
4885
4886 bool
4887 mjit::Compiler::jsop_tableswitch(jsbytecode *pc)
4888 {
4889 #if defined JS_CPU_ARM
4890     JS_NOT_REACHED("Implement jump(BaseIndex) for ARM");
4891     return true;
4892 #else
4893     jsbytecode *originalPC = pc;
4894
4895     uint32 defaultTarget = GET_JUMP_OFFSET(pc);
4896     pc += JUMP_OFFSET_LEN;
4897
4898     jsint low = GET_JUMP_OFFSET(pc);
4899     pc += JUMP_OFFSET_LEN;
4900     jsint high = GET_JUMP_OFFSET(pc);
4901     pc += JUMP_OFFSET_LEN;
4902     int numJumps = high + 1 - low;
4903     JS_ASSERT(numJumps >= 0);
4904
4905     /*
4906      * If there are no cases, this is a no-op. The default case immediately
4907      * follows in the bytecode and is always taken.
4908      */
4909     if (numJumps == 0) {
4910         frame.pop();
4911         return true;
4912     }
4913
4914     FrameEntry *fe = frame.peek(-1);
4915     if (fe->isNotType(JSVAL_TYPE_INT32) || numJumps > 256) {
4916         frame.syncAndForgetEverything();
4917         masm.move(ImmPtr(originalPC), Registers::ArgReg1);
4918
4919         /* prepareStubCall() is not needed due to forgetEverything() */
4920         INLINE_STUBCALL(stubs::TableSwitch);
4921         frame.pop();
4922         masm.jump(Registers::ReturnReg);
4923         return true;
4924     }
4925
4926     RegisterID dataReg;
4927     if (fe->isConstant()) {
4928         JS_ASSERT(fe->isType(JSVAL_TYPE_INT32));
4929         dataReg = frame.allocReg();
4930         masm.move(Imm32(fe->getValue().toInt32()), dataReg);
4931     } else {
4932         dataReg = frame.copyDataIntoReg(fe);
4933     }
4934
4935     RegisterID reg = frame.allocReg();
4936     frame.syncAndForgetEverything();
4937
4938     MaybeJump notInt;
4939     if (!fe->isType(JSVAL_TYPE_INT32))
4940         notInt = masm.testInt32(Assembler::NotEqual, frame.addressOf(fe));
4941
4942     JumpTable jt;
4943     jt.offsetIndex = jumpTableOffsets.length();
4944     jt.label = masm.moveWithPatch(ImmPtr(NULL), reg);
4945     jumpTables.append(jt);
4946
4947     for (int i = 0; i < numJumps; i++) {
4948         uint32 target = GET_JUMP_OFFSET(pc);
4949         if (!target)
4950             target = defaultTarget;
4951         uint32 offset = (originalPC + target) - script->code;
4952         jumpTableOffsets.append(offset);
4953         pc += JUMP_OFFSET_LEN;
4954     }
4955     if (low != 0)
4956         masm.sub32(Imm32(low), dataReg);
4957     Jump defaultCase = masm.branch32(Assembler::AboveOrEqual, dataReg, Imm32(numJumps));
4958     BaseIndex jumpTarget(reg, dataReg, Assembler::ScalePtr);
4959     masm.jump(jumpTarget);
4960
4961     if (notInt.isSet()) {
4962         stubcc.linkExitDirect(notInt.get(), stubcc.masm.label());
4963         stubcc.leave();
4964         stubcc.masm.move(ImmPtr(originalPC), Registers::ArgReg1);
4965         OOL_STUBCALL(stubs::TableSwitch);
4966         stubcc.masm.jump(Registers::ReturnReg);
4967     }
4968     frame.pop();
4969     return jumpAndTrace(defaultCase, originalPC + defaultTarget);
4970 #endif
4971 }
4972
4973 void
4974 mjit::Compiler::jsop_callelem_slow()
4975 {
4976     prepareStubCall(Uses(2));
4977     INLINE_STUBCALL(stubs::CallElem);
4978     frame.popn(2);
4979     frame.pushSynced();
4980     frame.pushSynced();
4981 }
4982
4983 void
4984 mjit::Compiler::jsop_forprop(JSAtom *atom)
4985 {
4986     // Before: ITER OBJ
4987     // After:  ITER OBJ ITER
4988     frame.dupAt(-2);
4989
4990     // Before: ITER OBJ ITER 
4991     // After:  ITER OBJ ITER VALUE
4992     iterNext();
4993
4994     // Before: ITER OBJ ITER VALUE
4995     // After:  ITER OBJ VALUE
4996     frame.shimmy(1);
4997
4998     // Before: ITER OBJ VALUE
4999     // After:  ITER VALUE
5000     jsop_setprop(atom, false);
5001
5002     // Before: ITER VALUE
5003     // After:  ITER
5004     frame.pop();
5005 }
5006
5007 void
5008 mjit::Compiler::jsop_forname(JSAtom *atom)
5009 {
5010     // Before: ITER
5011     // After:  ITER SCOPEOBJ
5012     jsop_bindname(atom, false);
5013     jsop_forprop(atom);
5014 }
5015
5016 void
5017 mjit::Compiler::jsop_forgname(JSAtom *atom)
5018 {
5019     // Before: ITER
5020     // After:  ITER GLOBAL
5021     jsop_bindgname();
5022
5023     // Before: ITER GLOBAL
5024     // After:  ITER GLOBAL ITER
5025     frame.dupAt(-2);
5026
5027     // Before: ITER GLOBAL ITER 
5028     // After:  ITER GLOBAL ITER VALUE
5029     iterNext();
5030
5031     // Before: ITER GLOBAL ITER VALUE
5032     // After:  ITER GLOBAL VALUE
5033     frame.shimmy(1);
5034
5035     // Before: ITER GLOBAL VALUE
5036     // After:  ITER VALUE
5037     jsop_setgname(atom, false);
5038
5039     // Before: ITER VALUE
5040     // After:  ITER
5041     frame.pop();
5042 }
5043