Fix TIVI-504 (backport of trac.webkit.org/changeset/144137)
[profile/ivi/webkit-efl.git] / Source / JavaScriptCore / jit / JITOpcodes.cpp
1 /*
2  * Copyright (C) 2009 Apple Inc. All rights reserved.
3  * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
4  *
5  * Redistribution and use in source and binary forms, with or without
6  * modification, are permitted provided that the following conditions
7  * are met:
8  * 1. Redistributions of source code must retain the above copyright
9  *    notice, this list of conditions and the following disclaimer.
10  * 2. Redistributions in binary form must reproduce the above copyright
11  *    notice, this list of conditions and the following disclaimer in the
12  *    documentation and/or other materials provided with the distribution.
13  *
14  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
18  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
25  */
26
27 #include "config.h"
28 #if ENABLE(JIT)
29 #include "JIT.h"
30
31 #include "Arguments.h"
32 #include "CopiedSpaceInlineMethods.h"
33 #include "Heap.h"
34 #include "JITInlineMethods.h"
35 #include "JITStubCall.h"
36 #include "JSArray.h"
37 #include "JSCell.h"
38 #include "JSFunction.h"
39 #include "JSPropertyNameIterator.h"
40 #include "LinkBuffer.h"
41
42 namespace JSC {
43
44 #if USE(JSVALUE64)
45
46 PassRefPtr<ExecutableMemoryHandle> JIT::privateCompileCTIMachineTrampolines(JSGlobalData* globalData, TrampolineStructure *trampolines)
47 {
48     // (2) The second function provides fast property access for string length
49     Label stringLengthBegin = align();
50
51     // Check eax is a string
52     Jump string_failureCases1 = emitJumpIfNotJSCell(regT0);
53     Jump string_failureCases2 = branchPtr(NotEqual, Address(regT0, JSCell::classInfoOffset()), TrustedImmPtr(&JSString::s_info));
54
55     // Checks out okay! - get the length from the Ustring.
56     load32(Address(regT0, OBJECT_OFFSETOF(JSString, m_length)), regT0);
57
58     Jump string_failureCases3 = branch32(LessThan, regT0, TrustedImm32(0));
59
60     // regT0 contains a 64 bit value (is positive, is zero extended) so we don't need sign extend here.
61     emitFastArithIntToImmNoCheck(regT0, regT0);
62     
63     ret();
64
65     // (3) Trampolines for the slow cases of op_call / op_call_eval / op_construct.
66     COMPILE_ASSERT(sizeof(CodeType) == 4, CodeTypeEnumMustBe32Bit);
67
68     JumpList callSlowCase;
69     JumpList constructSlowCase;
70
71     // VirtualCallLink Trampoline
72     // regT0 holds callee; callFrame is moved and partially initialized.
73     Label virtualCallLinkBegin = align();
74     callSlowCase.append(emitJumpIfNotJSCell(regT0));
75     callSlowCase.append(emitJumpIfNotType(regT0, regT1, JSFunctionType));
76
77     // Finish canonical initialization before JS function call.
78     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_scopeChain)), regT1);
79     emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
80
81     // Also initialize ReturnPC for use by lazy linking and exceptions.
82     preserveReturnAddressAfterCall(regT3);
83     emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
84     
85     storePtr(callFrameRegister, &m_globalData->topCallFrame);
86     restoreArgumentReference();
87     Call callLazyLinkCall = call();
88     restoreReturnAddressBeforeReturn(regT3);
89     jump(regT0);
90
91     // VirtualConstructLink Trampoline
92     // regT0 holds callee; callFrame is moved and partially initialized.
93     Label virtualConstructLinkBegin = align();
94     constructSlowCase.append(emitJumpIfNotJSCell(regT0));
95     constructSlowCase.append(emitJumpIfNotType(regT0, regT1, JSFunctionType));
96
97     // Finish canonical initialization before JS function call.
98     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_scopeChain)), regT1);
99     emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
100
101     // Also initialize ReturnPC for use by lazy linking and exeptions.
102     preserveReturnAddressAfterCall(regT3);
103     emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
104     
105     storePtr(callFrameRegister, &m_globalData->topCallFrame);
106     restoreArgumentReference();
107     Call callLazyLinkConstruct = call();
108     restoreReturnAddressBeforeReturn(regT3);
109     jump(regT0);
110
111     // VirtualCall Trampoline
112     // regT0 holds callee; regT2 will hold the FunctionExecutable.
113     Label virtualCallBegin = align();
114     callSlowCase.append(emitJumpIfNotJSCell(regT0));
115     callSlowCase.append(emitJumpIfNotType(regT0, regT1, JSFunctionType));
116
117     // Finish canonical initialization before JS function call.
118     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_scopeChain)), regT1);
119     emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
120
121     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
122     Jump hasCodeBlock1 = branch32(GreaterThanOrEqual, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParametersForCall)), TrustedImm32(0));
123     preserveReturnAddressAfterCall(regT3);
124     storePtr(callFrameRegister, &m_globalData->topCallFrame);
125     restoreArgumentReference();
126     Call callCompileCall = call();
127     restoreReturnAddressBeforeReturn(regT3);
128     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
129
130     hasCodeBlock1.link(this);
131     loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCodeForCallWithArityCheck)), regT0);
132     jump(regT0);
133
134     // VirtualConstruct Trampoline
135     // regT0 holds callee; regT2 will hold the FunctionExecutable.
136     Label virtualConstructBegin = align();
137     constructSlowCase.append(emitJumpIfNotJSCell(regT0));
138     constructSlowCase.append(emitJumpIfNotType(regT0, regT1, JSFunctionType));
139
140     // Finish canonical initialization before JS function call.
141     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_scopeChain)), regT1);
142     emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
143
144     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
145     Jump hasCodeBlock2 = branch32(GreaterThanOrEqual, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParametersForConstruct)), TrustedImm32(0));
146     preserveReturnAddressAfterCall(regT3);
147     storePtr(callFrameRegister, &m_globalData->topCallFrame);
148     restoreArgumentReference();
149     Call callCompileConstruct = call();
150     restoreReturnAddressBeforeReturn(regT3);
151     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
152
153     hasCodeBlock2.link(this);
154     loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCodeForConstructWithArityCheck)), regT0);
155     jump(regT0);
156
157     callSlowCase.link(this);
158     // Finish canonical initialization before JS function call.
159     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
160     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2, regT2);
161     emitPutCellToCallFrameHeader(regT2, RegisterFile::ScopeChain);
162
163     // Also initialize ReturnPC and CodeBlock, like a JS function would.
164     preserveReturnAddressAfterCall(regT3);
165     emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
166     emitPutImmediateToCallFrameHeader(0, RegisterFile::CodeBlock);
167
168     storePtr(callFrameRegister, &m_globalData->topCallFrame);
169     restoreArgumentReference();
170     Call callCallNotJSFunction = call();
171     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
172     restoreReturnAddressBeforeReturn(regT3);
173     ret();
174
175     constructSlowCase.link(this);
176     // Finish canonical initialization before JS function call.
177     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
178     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2, regT2);
179     emitPutCellToCallFrameHeader(regT2, RegisterFile::ScopeChain);
180
181     // Also initialize ReturnPC and CodeBlock, like a JS function would.
182     preserveReturnAddressAfterCall(regT3);
183     emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
184     emitPutImmediateToCallFrameHeader(0, RegisterFile::CodeBlock);
185
186     storePtr(callFrameRegister, &m_globalData->topCallFrame);
187     restoreArgumentReference();
188     Call callConstructNotJSFunction = call();
189     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
190     restoreReturnAddressBeforeReturn(regT3);
191     ret();
192
193     // NativeCall Trampoline
194     Label nativeCallThunk = privateCompileCTINativeCall(globalData);    
195     Label nativeConstructThunk = privateCompileCTINativeCall(globalData, true);    
196
197     Call string_failureCases1Call = makeTailRecursiveCall(string_failureCases1);
198     Call string_failureCases2Call = makeTailRecursiveCall(string_failureCases2);
199     Call string_failureCases3Call = makeTailRecursiveCall(string_failureCases3);
200
201     // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
202     LinkBuffer patchBuffer(*m_globalData, this, GLOBAL_THUNK_ID);
203
204     patchBuffer.link(string_failureCases1Call, FunctionPtr(cti_op_get_by_id_string_fail));
205     patchBuffer.link(string_failureCases2Call, FunctionPtr(cti_op_get_by_id_string_fail));
206     patchBuffer.link(string_failureCases3Call, FunctionPtr(cti_op_get_by_id_string_fail));
207     patchBuffer.link(callLazyLinkCall, FunctionPtr(cti_vm_lazyLinkCall));
208     patchBuffer.link(callLazyLinkConstruct, FunctionPtr(cti_vm_lazyLinkConstruct));
209     patchBuffer.link(callCompileCall, FunctionPtr(cti_op_call_jitCompile));
210     patchBuffer.link(callCompileConstruct, FunctionPtr(cti_op_construct_jitCompile));
211     patchBuffer.link(callCallNotJSFunction, FunctionPtr(cti_op_call_NotJSFunction));
212     patchBuffer.link(callConstructNotJSFunction, FunctionPtr(cti_op_construct_NotJSConstruct));
213
214     CodeRef finalCode = patchBuffer.finalizeCode();
215     RefPtr<ExecutableMemoryHandle> executableMemory = finalCode.executableMemory();
216
217     trampolines->ctiVirtualCallLink = patchBuffer.trampolineAt(virtualCallLinkBegin);
218     trampolines->ctiVirtualConstructLink = patchBuffer.trampolineAt(virtualConstructLinkBegin);
219     trampolines->ctiVirtualCall = patchBuffer.trampolineAt(virtualCallBegin);
220     trampolines->ctiVirtualConstruct = patchBuffer.trampolineAt(virtualConstructBegin);
221     trampolines->ctiNativeCall = patchBuffer.trampolineAt(nativeCallThunk);
222     trampolines->ctiNativeConstruct = patchBuffer.trampolineAt(nativeConstructThunk);
223     trampolines->ctiStringLengthTrampoline = patchBuffer.trampolineAt(stringLengthBegin);
224     
225     return executableMemory.release();
226 }
227
228 JIT::Label JIT::privateCompileCTINativeCall(JSGlobalData* globalData, bool isConstruct)
229 {
230     int executableOffsetToFunction = isConstruct ? OBJECT_OFFSETOF(NativeExecutable, m_constructor) : OBJECT_OFFSETOF(NativeExecutable, m_function);
231
232     Label nativeCallThunk = align();
233     
234     emitPutImmediateToCallFrameHeader(0, RegisterFile::CodeBlock);
235     storePtr(callFrameRegister, &m_globalData->topCallFrame);
236
237 #if CPU(X86_64)
238     // Load caller frame's scope chain into this callframe so that whatever we call can
239     // get to its global data.
240     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
241     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
242     emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
243
244     peek(regT1);
245     emitPutToCallFrameHeader(regT1, RegisterFile::ReturnPC);
246
247     // Calling convention:      f(edi, esi, edx, ecx, ...);
248     // Host function signature: f(ExecState*);
249     move(callFrameRegister, X86Registers::edi);
250
251     subPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister); // Align stack after call.
252
253     emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86Registers::esi);
254     loadPtr(Address(X86Registers::esi, OBJECT_OFFSETOF(JSFunction, m_executable)), X86Registers::r9);
255     move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
256     call(Address(X86Registers::r9, executableOffsetToFunction));
257
258     addPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister);
259
260 #elif CPU(ARM)
261     // Load caller frame's scope chain into this callframe so that whatever we call can
262     // get to its global data.
263     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
264     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT2);
265     emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
266
267     preserveReturnAddressAfterCall(regT3); // Callee preserved
268     emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
269
270     // Calling convention:      f(r0 == regT0, r1 == regT1, ...);
271     // Host function signature: f(ExecState*);
272     move(callFrameRegister, ARMRegisters::r0);
273
274     emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, ARMRegisters::r1);
275     move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
276     loadPtr(Address(ARMRegisters::r1, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
277     call(Address(regT2, executableOffsetToFunction));
278
279     restoreReturnAddressBeforeReturn(regT3);
280
281 #elif CPU(MIPS)
282     // Load caller frame's scope chain into this callframe so that whatever we call can
283     // get to its global data.
284     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
285     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
286     emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
287
288     preserveReturnAddressAfterCall(regT3); // Callee preserved
289     emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
290
291     // Calling convention:      f(a0, a1, a2, a3);
292     // Host function signature: f(ExecState*);
293
294     // Allocate stack space for 16 bytes (8-byte aligned)
295     // 16 bytes (unused) for 4 arguments
296     subPtr(TrustedImm32(16), stackPointerRegister);
297
298     // Setup arg0
299     move(callFrameRegister, MIPSRegisters::a0);
300
301     // Call
302     emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, MIPSRegisters::a2);
303     loadPtr(Address(MIPSRegisters::a2, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
304     move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
305     call(Address(regT2, executableOffsetToFunction));
306
307     // Restore stack space
308     addPtr(TrustedImm32(16), stackPointerRegister);
309
310     restoreReturnAddressBeforeReturn(regT3);
311
312 #else
313 #error "JIT not supported on this platform."
314     UNUSED_PARAM(executableOffsetToFunction);
315     breakpoint();
316 #endif
317
318     // Check for an exception
319     loadPtr(&(globalData->exception), regT2);
320     Jump exceptionHandler = branchTestPtr(NonZero, regT2);
321
322     // Return.
323     ret();
324
325     // Handle an exception
326     exceptionHandler.link(this);
327
328     // Grab the return address.
329     preserveReturnAddressAfterCall(regT1);
330
331     move(TrustedImmPtr(&globalData->exceptionLocation), regT2);
332     storePtr(regT1, regT2);
333     poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
334
335     storePtr(callFrameRegister, &m_globalData->topCallFrame);
336     // Set the return address.
337     move(TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()), regT1);
338     restoreReturnAddressBeforeReturn(regT1);
339
340     ret();
341
342     return nativeCallThunk;
343 }
344
345 JIT::CodeRef JIT::privateCompileCTINativeCall(JSGlobalData* globalData, NativeFunction)
346 {
347     return CodeRef::createSelfManagedCodeRef(globalData->jitStubs->ctiNativeCall());
348 }
349
350 void JIT::emit_op_mov(Instruction* currentInstruction)
351 {
352     int dst = currentInstruction[1].u.operand;
353     int src = currentInstruction[2].u.operand;
354
355     if (canBeOptimizedOrInlined()) {
356         // Use simpler approach, since the DFG thinks that the last result register
357         // is always set to the destination on every operation.
358         emitGetVirtualRegister(src, regT0);
359         emitPutVirtualRegister(dst);
360     } else {
361         if (m_codeBlock->isConstantRegisterIndex(src)) {
362             if (!getConstantOperand(src).isNumber())
363                 storePtr(TrustedImmPtr(JSValue::encode(getConstantOperand(src))), Address(callFrameRegister, dst * sizeof(Register)));
364             else
365                 storePtr(ImmPtr(JSValue::encode(getConstantOperand(src))), Address(callFrameRegister, dst * sizeof(Register)));
366             if (dst == m_lastResultBytecodeRegister)
367                 killLastResultRegister();
368         } else if ((src == m_lastResultBytecodeRegister) || (dst == m_lastResultBytecodeRegister)) {
369             // If either the src or dst is the cached register go though
370             // get/put registers to make sure we track this correctly.
371             emitGetVirtualRegister(src, regT0);
372             emitPutVirtualRegister(dst);
373         } else {
374             // Perform the copy via regT1; do not disturb any mapping in regT0.
375             loadPtr(Address(callFrameRegister, src * sizeof(Register)), regT1);
376             storePtr(regT1, Address(callFrameRegister, dst * sizeof(Register)));
377         }
378     }
379 }
380
381 void JIT::emit_op_end(Instruction* currentInstruction)
382 {
383     ASSERT(returnValueRegister != callFrameRegister);
384     emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
385     restoreReturnAddressBeforeReturn(Address(callFrameRegister, RegisterFile::ReturnPC * static_cast<int>(sizeof(Register))));
386     ret();
387 }
388
389 void JIT::emit_op_jmp(Instruction* currentInstruction)
390 {
391     unsigned target = currentInstruction[1].u.operand;
392     addJump(jump(), target);
393 }
394
395 void JIT::emit_op_new_object(Instruction* currentInstruction)
396 {
397     emitAllocateJSFinalObject(TrustedImmPtr(m_codeBlock->globalObject()->emptyObjectStructure()), regT0, regT1);
398     
399     emitPutVirtualRegister(currentInstruction[1].u.operand);
400 }
401
402 void JIT::emitSlow_op_new_object(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
403 {
404     linkSlowCase(iter);
405     JITStubCall(this, cti_op_new_object).call(currentInstruction[1].u.operand);
406 }
407
408 void JIT::emit_op_check_has_instance(Instruction* currentInstruction)
409 {
410     unsigned baseVal = currentInstruction[1].u.operand;
411
412     emitGetVirtualRegister(baseVal, regT0);
413
414     // Check that baseVal is a cell.
415     emitJumpSlowCaseIfNotJSCell(regT0, baseVal);
416
417     // Check that baseVal 'ImplementsHasInstance'.
418     loadPtr(Address(regT0, JSCell::structureOffset()), regT0);
419     addSlowCase(branchTest8(Zero, Address(regT0, Structure::typeInfoFlagsOffset()), TrustedImm32(ImplementsHasInstance)));
420 }
421
422 void JIT::emit_op_instanceof(Instruction* currentInstruction)
423 {
424     unsigned dst = currentInstruction[1].u.operand;
425     unsigned value = currentInstruction[2].u.operand;
426     unsigned baseVal = currentInstruction[3].u.operand;
427     unsigned proto = currentInstruction[4].u.operand;
428
429     // Load the operands (baseVal, proto, and value respectively) into registers.
430     // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
431     emitGetVirtualRegister(value, regT2);
432     emitGetVirtualRegister(baseVal, regT0);
433     emitGetVirtualRegister(proto, regT1);
434
435     // Check that proto are cells.  baseVal must be a cell - this is checked by op_check_has_instance.
436     emitJumpSlowCaseIfNotJSCell(regT2, value);
437     emitJumpSlowCaseIfNotJSCell(regT1, proto);
438
439     // Check that prototype is an object
440     loadPtr(Address(regT1, JSCell::structureOffset()), regT3);
441     addSlowCase(emitJumpIfNotObject(regT3));
442     
443     // Fixme: this check is only needed because the JSC API allows HasInstance to be overridden; we should deprecate this.
444     // Check that baseVal 'ImplementsDefaultHasInstance'.
445     loadPtr(Address(regT0, JSCell::structureOffset()), regT0);
446     addSlowCase(branchTest8(Zero, Address(regT0, Structure::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance)));
447
448     // Optimistically load the result true, and start looping.
449     // Initially, regT1 still contains proto and regT2 still contains value.
450     // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
451     move(TrustedImmPtr(JSValue::encode(jsBoolean(true))), regT0);
452     Label loop(this);
453
454     // Load the prototype of the object in regT2.  If this is equal to regT1 - WIN!
455     // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
456     loadPtr(Address(regT2, JSCell::structureOffset()), regT2);
457     loadPtr(Address(regT2, Structure::prototypeOffset()), regT2);
458     Jump isInstance = branchPtr(Equal, regT2, regT1);
459     emitJumpIfJSCell(regT2).linkTo(loop, this);
460
461     // We get here either by dropping out of the loop, or if value was not an Object.  Result is false.
462     move(TrustedImmPtr(JSValue::encode(jsBoolean(false))), regT0);
463
464     // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
465     isInstance.link(this);
466     emitPutVirtualRegister(dst);
467 }
468
469 void JIT::emit_op_is_undefined(Instruction* currentInstruction)
470 {
471     unsigned dst = currentInstruction[1].u.operand;
472     unsigned value = currentInstruction[2].u.operand;
473     
474     emitGetVirtualRegister(value, regT0);
475     Jump isCell = emitJumpIfJSCell(regT0);
476
477     comparePtr(Equal, regT0, TrustedImm32(ValueUndefined), regT0);
478     Jump done = jump();
479     
480     isCell.link(this);
481     loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
482     test8(NonZero, Address(regT1, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined), regT0);
483     
484     done.link(this);
485     emitTagAsBoolImmediate(regT0);
486     emitPutVirtualRegister(dst);
487 }
488
489 void JIT::emit_op_is_boolean(Instruction* currentInstruction)
490 {
491     unsigned dst = currentInstruction[1].u.operand;
492     unsigned value = currentInstruction[2].u.operand;
493     
494     emitGetVirtualRegister(value, regT0);
495     xorPtr(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
496     testPtr(Zero, regT0, TrustedImm32(static_cast<int32_t>(~1)), regT0);
497     emitTagAsBoolImmediate(regT0);
498     emitPutVirtualRegister(dst);
499 }
500
501 void JIT::emit_op_is_number(Instruction* currentInstruction)
502 {
503     unsigned dst = currentInstruction[1].u.operand;
504     unsigned value = currentInstruction[2].u.operand;
505     
506     emitGetVirtualRegister(value, regT0);
507     testPtr(NonZero, regT0, tagTypeNumberRegister, regT0);
508     emitTagAsBoolImmediate(regT0);
509     emitPutVirtualRegister(dst);
510 }
511
512 void JIT::emit_op_is_string(Instruction* currentInstruction)
513 {
514     unsigned dst = currentInstruction[1].u.operand;
515     unsigned value = currentInstruction[2].u.operand;
516     
517     emitGetVirtualRegister(value, regT0);
518     Jump isNotCell = emitJumpIfNotJSCell(regT0);
519     
520     loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
521     compare8(Equal, Address(regT1, Structure::typeInfoTypeOffset()), TrustedImm32(StringType), regT0);
522     emitTagAsBoolImmediate(regT0);
523     Jump done = jump();
524     
525     isNotCell.link(this);
526     move(TrustedImm32(ValueFalse), regT0);
527     
528     done.link(this);
529     emitPutVirtualRegister(dst);
530 }
531
532 void JIT::emit_op_call(Instruction* currentInstruction)
533 {
534     compileOpCall(op_call, currentInstruction, m_callLinkInfoIndex++);
535 }
536
537 void JIT::emit_op_call_eval(Instruction* currentInstruction)
538 {
539     compileOpCall(op_call_eval, currentInstruction, m_callLinkInfoIndex);
540 }
541
542 void JIT::emit_op_call_varargs(Instruction* currentInstruction)
543 {
544     compileOpCall(op_call_varargs, currentInstruction, m_callLinkInfoIndex++);
545 }
546
547 void JIT::emit_op_construct(Instruction* currentInstruction)
548 {
549     compileOpCall(op_construct, currentInstruction, m_callLinkInfoIndex++);
550 }
551
552 void JIT::emit_op_tear_off_activation(Instruction* currentInstruction)
553 {
554     unsigned activation = currentInstruction[1].u.operand;
555     unsigned arguments = currentInstruction[2].u.operand;
556     Jump activationCreated = branchTestPtr(NonZero, addressFor(activation));
557     Jump argumentsNotCreated = branchTestPtr(Zero, addressFor(arguments));
558     activationCreated.link(this);
559     JITStubCall stubCall(this, cti_op_tear_off_activation);
560     stubCall.addArgument(activation, regT2);
561     stubCall.addArgument(unmodifiedArgumentsRegister(arguments), regT2);
562     stubCall.call();
563     argumentsNotCreated.link(this);
564 }
565
566 void JIT::emit_op_tear_off_arguments(Instruction* currentInstruction)
567 {
568     unsigned dst = currentInstruction[1].u.operand;
569
570     Jump argsNotCreated = branchTestPtr(Zero, Address(callFrameRegister, sizeof(Register) * (unmodifiedArgumentsRegister(dst))));
571     JITStubCall stubCall(this, cti_op_tear_off_arguments);
572     stubCall.addArgument(unmodifiedArgumentsRegister(dst), regT2);
573     stubCall.call();
574     argsNotCreated.link(this);
575 }
576
577 void JIT::emit_op_ret(Instruction* currentInstruction)
578 {
579     emitOptimizationCheck(RetOptimizationCheck);
580     
581     ASSERT(callFrameRegister != regT1);
582     ASSERT(regT1 != returnValueRegister);
583     ASSERT(returnValueRegister != callFrameRegister);
584
585     // Return the result in %eax.
586     emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
587
588     // Grab the return address.
589     emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
590
591     // Restore our caller's "r".
592     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
593
594     // Return.
595     restoreReturnAddressBeforeReturn(regT1);
596     ret();
597 }
598
599 void JIT::emit_op_ret_object_or_this(Instruction* currentInstruction)
600 {
601     emitOptimizationCheck(RetOptimizationCheck);
602     
603     ASSERT(callFrameRegister != regT1);
604     ASSERT(regT1 != returnValueRegister);
605     ASSERT(returnValueRegister != callFrameRegister);
606
607     // Return the result in %eax.
608     emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
609     Jump notJSCell = emitJumpIfNotJSCell(returnValueRegister);
610     loadPtr(Address(returnValueRegister, JSCell::structureOffset()), regT2);
611     Jump notObject = emitJumpIfNotObject(regT2);
612
613     // Grab the return address.
614     emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
615
616     // Restore our caller's "r".
617     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
618
619     // Return.
620     restoreReturnAddressBeforeReturn(regT1);
621     ret();
622
623     // Return 'this' in %eax.
624     notJSCell.link(this);
625     notObject.link(this);
626     emitGetVirtualRegister(currentInstruction[2].u.operand, returnValueRegister);
627
628     // Grab the return address.
629     emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
630
631     // Restore our caller's "r".
632     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
633
634     // Return.
635     restoreReturnAddressBeforeReturn(regT1);
636     ret();
637 }
638
639 void JIT::emit_op_resolve(Instruction* currentInstruction)
640 {
641     JITStubCall stubCall(this, cti_op_resolve);
642     stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
643     stubCall.callWithValueProfiling(currentInstruction[1].u.operand);
644 }
645
646 void JIT::emit_op_to_primitive(Instruction* currentInstruction)
647 {
648     int dst = currentInstruction[1].u.operand;
649     int src = currentInstruction[2].u.operand;
650
651     emitGetVirtualRegister(src, regT0);
652     
653     Jump isImm = emitJumpIfNotJSCell(regT0);
654     addSlowCase(branchPtr(NotEqual, Address(regT0, JSCell::classInfoOffset()), TrustedImmPtr(&JSString::s_info)));
655     isImm.link(this);
656
657     if (dst != src)
658         emitPutVirtualRegister(dst);
659
660 }
661
662 void JIT::emit_op_strcat(Instruction* currentInstruction)
663 {
664     JITStubCall stubCall(this, cti_op_strcat);
665     stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand));
666     stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
667     stubCall.call(currentInstruction[1].u.operand);
668 }
669
670 void JIT::emit_op_resolve_base(Instruction* currentInstruction)
671 {
672     JITStubCall stubCall(this, currentInstruction[3].u.operand ? cti_op_resolve_base_strict_put : cti_op_resolve_base);
673     stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
674     stubCall.callWithValueProfiling(currentInstruction[1].u.operand);
675 }
676
677 void JIT::emit_op_ensure_property_exists(Instruction* currentInstruction)
678 {
679     JITStubCall stubCall(this, cti_op_ensure_property_exists);
680     stubCall.addArgument(TrustedImm32(currentInstruction[1].u.operand));
681     stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
682     stubCall.call(currentInstruction[1].u.operand);
683 }
684
685 void JIT::emit_op_resolve_skip(Instruction* currentInstruction)
686 {
687     JITStubCall stubCall(this, cti_op_resolve_skip);
688     stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
689     stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
690     stubCall.callWithValueProfiling(currentInstruction[1].u.operand);
691 }
692
693 void JIT::emit_op_resolve_global(Instruction* currentInstruction, bool)
694 {
695     // Fast case
696     void* globalObject = m_codeBlock->globalObject();
697     unsigned currentIndex = m_globalResolveInfoIndex++;
698     GlobalResolveInfo* resolveInfoAddress = &(m_codeBlock->globalResolveInfo(currentIndex));
699
700     // Check Structure of global object
701     move(TrustedImmPtr(globalObject), regT0);
702     move(TrustedImmPtr(resolveInfoAddress), regT2);
703     loadPtr(Address(regT2, OBJECT_OFFSETOF(GlobalResolveInfo, structure)), regT1);
704     addSlowCase(branchPtr(NotEqual, regT1, Address(regT0, JSCell::structureOffset()))); // Structures don't match
705
706     // Load cached property
707     // Assume that the global object always uses external storage.
708     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSGlobalObject, m_propertyStorage)), regT0);
709     load32(Address(regT2, OBJECT_OFFSETOF(GlobalResolveInfo, offset)), regT1);
710     loadPtr(BaseIndex(regT0, regT1, ScalePtr), regT0);
711     emitValueProfilingSite();
712     emitPutVirtualRegister(currentInstruction[1].u.operand);
713 }
714
715 void JIT::emitSlow_op_resolve_global(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
716 {
717     unsigned dst = currentInstruction[1].u.operand;
718     Identifier* ident = &m_codeBlock->identifier(currentInstruction[2].u.operand);
719     
720     unsigned currentIndex = m_globalResolveInfoIndex++;
721     
722     linkSlowCase(iter);
723     JITStubCall stubCall(this, cti_op_resolve_global);
724     stubCall.addArgument(TrustedImmPtr(ident));
725     stubCall.addArgument(TrustedImm32(currentIndex));
726     stubCall.addArgument(regT0);
727     stubCall.callWithValueProfiling(dst);
728 }
729
730 void JIT::emit_op_not(Instruction* currentInstruction)
731 {
732     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
733
734     // Invert against JSValue(false); if the value was tagged as a boolean, then all bits will be
735     // clear other than the low bit (which will be 0 or 1 for false or true inputs respectively).
736     // Then invert against JSValue(true), which will add the tag back in, and flip the low bit.
737     xorPtr(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
738     addSlowCase(branchTestPtr(NonZero, regT0, TrustedImm32(static_cast<int32_t>(~1))));
739     xorPtr(TrustedImm32(static_cast<int32_t>(ValueTrue)), regT0);
740
741     emitPutVirtualRegister(currentInstruction[1].u.operand);
742 }
743
744 void JIT::emit_op_jfalse(Instruction* currentInstruction)
745 {
746     unsigned target = currentInstruction[2].u.operand;
747     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
748
749     addJump(branchPtr(Equal, regT0, TrustedImmPtr(JSValue::encode(jsNumber(0)))), target);
750     Jump isNonZero = emitJumpIfImmediateInteger(regT0);
751
752     addJump(branchPtr(Equal, regT0, TrustedImmPtr(JSValue::encode(jsBoolean(false)))), target);
753     addSlowCase(branchPtr(NotEqual, regT0, TrustedImmPtr(JSValue::encode(jsBoolean(true)))));
754
755     isNonZero.link(this);
756 }
757
758 void JIT::emit_op_jeq_null(Instruction* currentInstruction)
759 {
760     unsigned src = currentInstruction[1].u.operand;
761     unsigned target = currentInstruction[2].u.operand;
762
763     emitGetVirtualRegister(src, regT0);
764     Jump isImmediate = emitJumpIfNotJSCell(regT0);
765
766     // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
767     loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
768     addJump(branchTest8(NonZero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
769     Jump wasNotImmediate = jump();
770
771     // Now handle the immediate cases - undefined & null
772     isImmediate.link(this);
773     andPtr(TrustedImm32(~TagBitUndefined), regT0);
774     addJump(branchPtr(Equal, regT0, TrustedImmPtr(JSValue::encode(jsNull()))), target);            
775
776     wasNotImmediate.link(this);
777 };
778 void JIT::emit_op_jneq_null(Instruction* currentInstruction)
779 {
780     unsigned src = currentInstruction[1].u.operand;
781     unsigned target = currentInstruction[2].u.operand;
782
783     emitGetVirtualRegister(src, regT0);
784     Jump isImmediate = emitJumpIfNotJSCell(regT0);
785
786     // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
787     loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
788     addJump(branchTest8(Zero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
789     Jump wasNotImmediate = jump();
790
791     // Now handle the immediate cases - undefined & null
792     isImmediate.link(this);
793     andPtr(TrustedImm32(~TagBitUndefined), regT0);
794     addJump(branchPtr(NotEqual, regT0, TrustedImmPtr(JSValue::encode(jsNull()))), target);            
795
796     wasNotImmediate.link(this);
797 }
798
799 void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
800 {
801     unsigned src = currentInstruction[1].u.operand;
802     JSCell* ptr = currentInstruction[2].u.jsCell.get();
803     unsigned target = currentInstruction[3].u.operand;
804     
805     emitGetVirtualRegister(src, regT0);
806     addJump(branchPtr(NotEqual, regT0, TrustedImmPtr(JSValue::encode(JSValue(ptr)))), target);            
807 }
808
809 void JIT::emit_op_eq(Instruction* currentInstruction)
810 {
811     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
812     emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
813     compare32(Equal, regT1, regT0, regT0);
814     emitTagAsBoolImmediate(regT0);
815     emitPutVirtualRegister(currentInstruction[1].u.operand);
816 }
817
818 void JIT::emit_op_resolve_with_base(Instruction* currentInstruction)
819 {
820     JITStubCall stubCall(this, cti_op_resolve_with_base);
821     stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
822     stubCall.addArgument(TrustedImm32(currentInstruction[1].u.operand));
823     stubCall.callWithValueProfiling(currentInstruction[2].u.operand);
824 }
825
826 void JIT::emit_op_resolve_with_this(Instruction* currentInstruction)
827 {
828     JITStubCall stubCall(this, cti_op_resolve_with_this);
829     stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
830     stubCall.addArgument(TrustedImm32(currentInstruction[1].u.operand));
831     stubCall.callWithValueProfiling(currentInstruction[2].u.operand);
832 }
833
834 void JIT::emit_op_jtrue(Instruction* currentInstruction)
835 {
836     unsigned target = currentInstruction[2].u.operand;
837     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
838
839     Jump isZero = branchPtr(Equal, regT0, TrustedImmPtr(JSValue::encode(jsNumber(0))));
840     addJump(emitJumpIfImmediateInteger(regT0), target);
841
842     addJump(branchPtr(Equal, regT0, TrustedImmPtr(JSValue::encode(jsBoolean(true)))), target);
843     addSlowCase(branchPtr(NotEqual, regT0, TrustedImmPtr(JSValue::encode(jsBoolean(false)))));
844
845     isZero.link(this);
846 }
847
848 void JIT::emit_op_neq(Instruction* currentInstruction)
849 {
850     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
851     emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
852     compare32(NotEqual, regT1, regT0, regT0);
853     emitTagAsBoolImmediate(regT0);
854
855     emitPutVirtualRegister(currentInstruction[1].u.operand);
856
857 }
858
859 void JIT::emit_op_bitxor(Instruction* currentInstruction)
860 {
861     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
862     emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
863     xorPtr(regT1, regT0);
864     emitFastArithReTagImmediate(regT0, regT0);
865     emitPutVirtualRegister(currentInstruction[1].u.operand);
866 }
867
868 void JIT::emit_op_bitor(Instruction* currentInstruction)
869 {
870     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
871     emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
872     orPtr(regT1, regT0);
873     emitPutVirtualRegister(currentInstruction[1].u.operand);
874 }
875
876 void JIT::emit_op_throw(Instruction* currentInstruction)
877 {
878     JITStubCall stubCall(this, cti_op_throw);
879     stubCall.addArgument(currentInstruction[1].u.operand, regT2);
880     stubCall.call();
881     ASSERT(regT0 == returnValueRegister);
882 #ifndef NDEBUG
883     // cti_op_throw always changes it's return address,
884     // this point in the code should never be reached.
885     breakpoint();
886 #endif
887 }
888
889 void JIT::emit_op_get_pnames(Instruction* currentInstruction)
890 {
891     int dst = currentInstruction[1].u.operand;
892     int base = currentInstruction[2].u.operand;
893     int i = currentInstruction[3].u.operand;
894     int size = currentInstruction[4].u.operand;
895     int breakTarget = currentInstruction[5].u.operand;
896
897     JumpList isNotObject;
898
899     emitGetVirtualRegister(base, regT0);
900     if (!m_codeBlock->isKnownNotImmediate(base))
901         isNotObject.append(emitJumpIfNotJSCell(regT0));
902     if (base != m_codeBlock->thisRegister() || m_codeBlock->isStrictMode()) {
903         loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
904         isNotObject.append(emitJumpIfNotObject(regT2));
905     }
906
907     // We could inline the case where you have a valid cache, but
908     // this call doesn't seem to be hot.
909     Label isObject(this);
910     JITStubCall getPnamesStubCall(this, cti_op_get_pnames);
911     getPnamesStubCall.addArgument(regT0);
912     getPnamesStubCall.call(dst);
913     load32(Address(regT0, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStringsSize)), regT3);
914     storePtr(tagTypeNumberRegister, payloadFor(i));
915     store32(TrustedImm32(Int32Tag), intTagFor(size));
916     store32(regT3, intPayloadFor(size));
917     Jump end = jump();
918
919     isNotObject.link(this);
920     move(regT0, regT1);
921     and32(TrustedImm32(~TagBitUndefined), regT1);
922     addJump(branch32(Equal, regT1, TrustedImm32(ValueNull)), breakTarget);
923
924     JITStubCall toObjectStubCall(this, cti_to_object);
925     toObjectStubCall.addArgument(regT0);
926     toObjectStubCall.call(base);
927     jump().linkTo(isObject, this);
928     
929     end.link(this);
930 }
931
932 void JIT::emit_op_next_pname(Instruction* currentInstruction)
933 {
934     int dst = currentInstruction[1].u.operand;
935     int base = currentInstruction[2].u.operand;
936     int i = currentInstruction[3].u.operand;
937     int size = currentInstruction[4].u.operand;
938     int it = currentInstruction[5].u.operand;
939     int target = currentInstruction[6].u.operand;
940     
941     JumpList callHasProperty;
942
943     Label begin(this);
944     load32(intPayloadFor(i), regT0);
945     Jump end = branch32(Equal, regT0, intPayloadFor(size));
946
947     // Grab key @ i
948     loadPtr(addressFor(it), regT1);
949     loadPtr(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStrings)), regT2);
950
951     loadPtr(BaseIndex(regT2, regT0, TimesEight), regT2);
952
953     emitPutVirtualRegister(dst, regT2);
954
955     // Increment i
956     add32(TrustedImm32(1), regT0);
957     store32(regT0, intPayloadFor(i));
958
959     // Verify that i is valid:
960     emitGetVirtualRegister(base, regT0);
961
962     // Test base's structure
963     loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
964     callHasProperty.append(branchPtr(NotEqual, regT2, Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure)))));
965
966     // Test base's prototype chain
967     loadPtr(Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedPrototypeChain))), regT3);
968     loadPtr(Address(regT3, OBJECT_OFFSETOF(StructureChain, m_vector)), regT3);
969     addJump(branchTestPtr(Zero, Address(regT3)), target);
970
971     Label checkPrototype(this);
972     loadPtr(Address(regT2, Structure::prototypeOffset()), regT2);
973     callHasProperty.append(emitJumpIfNotJSCell(regT2));
974     loadPtr(Address(regT2, JSCell::structureOffset()), regT2);
975     callHasProperty.append(branchPtr(NotEqual, regT2, Address(regT3)));
976     addPtr(TrustedImm32(sizeof(Structure*)), regT3);
977     branchTestPtr(NonZero, Address(regT3)).linkTo(checkPrototype, this);
978
979     // Continue loop.
980     addJump(jump(), target);
981
982     // Slow case: Ask the object if i is valid.
983     callHasProperty.link(this);
984     emitGetVirtualRegister(dst, regT1);
985     JITStubCall stubCall(this, cti_has_property);
986     stubCall.addArgument(regT0);
987     stubCall.addArgument(regT1);
988     stubCall.call();
989
990     // Test for valid key.
991     addJump(branchTest32(NonZero, regT0), target);
992     jump().linkTo(begin, this);
993
994     // End of loop.
995     end.link(this);
996 }
997
998 void JIT::emit_op_push_scope(Instruction* currentInstruction)
999 {
1000     JITStubCall stubCall(this, cti_op_push_scope);
1001     stubCall.addArgument(currentInstruction[1].u.operand, regT2);
1002     stubCall.call(currentInstruction[1].u.operand);
1003 }
1004
1005 void JIT::emit_op_pop_scope(Instruction*)
1006 {
1007     JITStubCall(this, cti_op_pop_scope).call();
1008 }
1009
1010 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
1011 {
1012     unsigned dst = currentInstruction[1].u.operand;
1013     unsigned src1 = currentInstruction[2].u.operand;
1014     unsigned src2 = currentInstruction[3].u.operand;
1015
1016     emitGetVirtualRegisters(src1, regT0, src2, regT1);
1017     
1018     // Jump slow if both are cells (to cover strings).
1019     move(regT0, regT2);
1020     orPtr(regT1, regT2);
1021     addSlowCase(emitJumpIfJSCell(regT2));
1022     
1023     // Jump slow if either is a double. First test if it's an integer, which is fine, and then test
1024     // if it's a double.
1025     Jump leftOK = emitJumpIfImmediateInteger(regT0);
1026     addSlowCase(emitJumpIfImmediateNumber(regT0));
1027     leftOK.link(this);
1028     Jump rightOK = emitJumpIfImmediateInteger(regT1);
1029     addSlowCase(emitJumpIfImmediateNumber(regT1));
1030     rightOK.link(this);
1031
1032     if (type == OpStrictEq)
1033         comparePtr(Equal, regT1, regT0, regT0);
1034     else
1035         comparePtr(NotEqual, regT1, regT0, regT0);
1036     emitTagAsBoolImmediate(regT0);
1037
1038     emitPutVirtualRegister(dst);
1039 }
1040
1041 void JIT::emit_op_stricteq(Instruction* currentInstruction)
1042 {
1043     compileOpStrictEq(currentInstruction, OpStrictEq);
1044 }
1045
1046 void JIT::emit_op_nstricteq(Instruction* currentInstruction)
1047 {
1048     compileOpStrictEq(currentInstruction, OpNStrictEq);
1049 }
1050
1051 void JIT::emit_op_to_jsnumber(Instruction* currentInstruction)
1052 {
1053     int srcVReg = currentInstruction[2].u.operand;
1054     emitGetVirtualRegister(srcVReg, regT0);
1055     
1056     Jump wasImmediate = emitJumpIfImmediateInteger(regT0);
1057
1058     emitJumpSlowCaseIfNotJSCell(regT0, srcVReg);
1059     loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
1060     addSlowCase(branch8(NotEqual, Address(regT2, Structure::typeInfoTypeOffset()), TrustedImm32(NumberType)));
1061     
1062     wasImmediate.link(this);
1063
1064     emitPutVirtualRegister(currentInstruction[1].u.operand);
1065 }
1066
1067 void JIT::emit_op_push_new_scope(Instruction* currentInstruction)
1068 {
1069     JITStubCall stubCall(this, cti_op_push_new_scope);
1070     stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
1071     stubCall.addArgument(currentInstruction[3].u.operand, regT2);
1072     stubCall.call(currentInstruction[1].u.operand);
1073 }
1074
1075 void JIT::emit_op_catch(Instruction* currentInstruction)
1076 {
1077     killLastResultRegister(); // FIXME: Implicitly treat op_catch as a labeled statement, and remove this line of code.
1078     move(regT0, callFrameRegister);
1079     peek(regT3, OBJECT_OFFSETOF(struct JITStackFrame, globalData) / sizeof(void*));
1080     loadPtr(Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception)), regT0);
1081     storePtr(TrustedImmPtr(JSValue::encode(JSValue())), Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception)));
1082     emitPutVirtualRegister(currentInstruction[1].u.operand);
1083 }
1084
1085 void JIT::emit_op_jmp_scopes(Instruction* currentInstruction)
1086 {
1087     JITStubCall stubCall(this, cti_op_jmp_scopes);
1088     stubCall.addArgument(TrustedImm32(currentInstruction[1].u.operand));
1089     stubCall.call();
1090     addJump(jump(), currentInstruction[2].u.operand);
1091 }
1092
1093 void JIT::emit_op_switch_imm(Instruction* currentInstruction)
1094 {
1095     unsigned tableIndex = currentInstruction[1].u.operand;
1096     unsigned defaultOffset = currentInstruction[2].u.operand;
1097     unsigned scrutinee = currentInstruction[3].u.operand;
1098
1099     // create jump table for switch destinations, track this switch statement.
1100     SimpleJumpTable* jumpTable = &m_codeBlock->immediateSwitchJumpTable(tableIndex);
1101     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
1102     jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1103
1104     JITStubCall stubCall(this, cti_op_switch_imm);
1105     stubCall.addArgument(scrutinee, regT2);
1106     stubCall.addArgument(TrustedImm32(tableIndex));
1107     stubCall.call();
1108     jump(regT0);
1109 }
1110
1111 void JIT::emit_op_switch_char(Instruction* currentInstruction)
1112 {
1113     unsigned tableIndex = currentInstruction[1].u.operand;
1114     unsigned defaultOffset = currentInstruction[2].u.operand;
1115     unsigned scrutinee = currentInstruction[3].u.operand;
1116
1117     // create jump table for switch destinations, track this switch statement.
1118     SimpleJumpTable* jumpTable = &m_codeBlock->characterSwitchJumpTable(tableIndex);
1119     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
1120     jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1121
1122     JITStubCall stubCall(this, cti_op_switch_char);
1123     stubCall.addArgument(scrutinee, regT2);
1124     stubCall.addArgument(TrustedImm32(tableIndex));
1125     stubCall.call();
1126     jump(regT0);
1127 }
1128
1129 void JIT::emit_op_switch_string(Instruction* currentInstruction)
1130 {
1131     unsigned tableIndex = currentInstruction[1].u.operand;
1132     unsigned defaultOffset = currentInstruction[2].u.operand;
1133     unsigned scrutinee = currentInstruction[3].u.operand;
1134
1135     // create jump table for switch destinations, track this switch statement.
1136     StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
1137     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
1138
1139     JITStubCall stubCall(this, cti_op_switch_string);
1140     stubCall.addArgument(scrutinee, regT2);
1141     stubCall.addArgument(TrustedImm32(tableIndex));
1142     stubCall.call();
1143     jump(regT0);
1144 }
1145
1146 void JIT::emit_op_throw_reference_error(Instruction* currentInstruction)
1147 {
1148     JITStubCall stubCall(this, cti_op_throw_reference_error);
1149     if (!m_codeBlock->getConstant(currentInstruction[1].u.operand).isNumber())
1150         stubCall.addArgument(TrustedImmPtr(JSValue::encode(m_codeBlock->getConstant(currentInstruction[1].u.operand))));
1151     else
1152         stubCall.addArgument(ImmPtr(JSValue::encode(m_codeBlock->getConstant(currentInstruction[1].u.operand))));
1153     stubCall.call();
1154 }
1155
1156 void JIT::emit_op_debug(Instruction* currentInstruction)
1157 {
1158 #if ENABLE(DEBUG_WITH_BREAKPOINT)
1159     UNUSED_PARAM(currentInstruction);
1160     breakpoint();
1161 #else
1162     JITStubCall stubCall(this, cti_op_debug);
1163     stubCall.addArgument(TrustedImm32(currentInstruction[1].u.operand));
1164     stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand));
1165     stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
1166     stubCall.call();
1167 #endif
1168 }
1169
1170 void JIT::emit_op_eq_null(Instruction* currentInstruction)
1171 {
1172     unsigned dst = currentInstruction[1].u.operand;
1173     unsigned src1 = currentInstruction[2].u.operand;
1174
1175     emitGetVirtualRegister(src1, regT0);
1176     Jump isImmediate = emitJumpIfNotJSCell(regT0);
1177
1178     loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
1179     test8(NonZero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined), regT0);
1180
1181     Jump wasNotImmediate = jump();
1182
1183     isImmediate.link(this);
1184
1185     andPtr(TrustedImm32(~TagBitUndefined), regT0);
1186     comparePtr(Equal, regT0, TrustedImm32(ValueNull), regT0);
1187
1188     wasNotImmediate.link(this);
1189
1190     emitTagAsBoolImmediate(regT0);
1191     emitPutVirtualRegister(dst);
1192
1193 }
1194
1195 void JIT::emit_op_neq_null(Instruction* currentInstruction)
1196 {
1197     unsigned dst = currentInstruction[1].u.operand;
1198     unsigned src1 = currentInstruction[2].u.operand;
1199
1200     emitGetVirtualRegister(src1, regT0);
1201     Jump isImmediate = emitJumpIfNotJSCell(regT0);
1202
1203     loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
1204     test8(Zero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined), regT0);
1205
1206     Jump wasNotImmediate = jump();
1207
1208     isImmediate.link(this);
1209
1210     andPtr(TrustedImm32(~TagBitUndefined), regT0);
1211     comparePtr(NotEqual, regT0, TrustedImm32(ValueNull), regT0);
1212
1213     wasNotImmediate.link(this);
1214
1215     emitTagAsBoolImmediate(regT0);
1216     emitPutVirtualRegister(dst);
1217 }
1218
1219 void JIT::emit_op_enter(Instruction*)
1220 {
1221     // Even though CTI doesn't use them, we initialize our constant
1222     // registers to zap stale pointers, to avoid unnecessarily prolonging
1223     // object lifetime and increasing GC pressure.
1224     size_t count = m_codeBlock->m_numVars;
1225     for (size_t j = 0; j < count; ++j)
1226         emitInitRegister(j);
1227
1228 }
1229
1230 void JIT::emit_op_create_activation(Instruction* currentInstruction)
1231 {
1232     unsigned dst = currentInstruction[1].u.operand;
1233     
1234     Jump activationCreated = branchTestPtr(NonZero, Address(callFrameRegister, sizeof(Register) * dst));
1235     JITStubCall(this, cti_op_push_activation).call(currentInstruction[1].u.operand);
1236     emitPutVirtualRegister(dst);
1237     activationCreated.link(this);
1238 }
1239
1240 void JIT::emit_op_create_arguments(Instruction* currentInstruction)
1241 {
1242     unsigned dst = currentInstruction[1].u.operand;
1243
1244     Jump argsCreated = branchTestPtr(NonZero, Address(callFrameRegister, sizeof(Register) * dst));
1245     JITStubCall(this, cti_op_create_arguments).call();
1246     emitPutVirtualRegister(dst);
1247     emitPutVirtualRegister(unmodifiedArgumentsRegister(dst));
1248     argsCreated.link(this);
1249 }
1250
1251 void JIT::emit_op_init_lazy_reg(Instruction* currentInstruction)
1252 {
1253     unsigned dst = currentInstruction[1].u.operand;
1254
1255     storePtr(TrustedImmPtr(0), Address(callFrameRegister, sizeof(Register) * dst));
1256 }
1257
1258 void JIT::emit_op_convert_this(Instruction* currentInstruction)
1259 {
1260     emitGetVirtualRegister(currentInstruction[1].u.operand, regT1);
1261
1262     emitJumpSlowCaseIfNotJSCell(regT1);
1263     if (shouldEmitProfiling()) {
1264         loadPtr(Address(regT1, JSCell::structureOffset()), regT0);
1265         emitValueProfilingSite();
1266     }
1267     addSlowCase(branchPtr(Equal, Address(regT1, JSCell::classInfoOffset()), TrustedImmPtr(&JSString::s_info)));
1268 }
1269
1270 void JIT::emit_op_create_this(Instruction* currentInstruction)
1271 {
1272     emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT0);
1273     loadPtr(Address(regT0, JSFunction::offsetOfCachedInheritorID()), regT2);
1274     addSlowCase(branchTestPtr(Zero, regT2));
1275     
1276     // now regT2 contains the inheritorID, which is the structure that the newly
1277     // allocated object will have.
1278     
1279     emitAllocateJSFinalObject(regT2, regT0, regT1);
1280     emitPutVirtualRegister(currentInstruction[1].u.operand);
1281 }
1282
1283 void JIT::emitSlow_op_create_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1284 {
1285     linkSlowCase(iter); // doesn't have an inheritor ID
1286     linkSlowCase(iter); // allocation failed
1287     JITStubCall stubCall(this, cti_op_create_this);
1288     stubCall.call(currentInstruction[1].u.operand);
1289 }
1290
1291 void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
1292 {
1293     peek(regT1, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof(void*));
1294     Jump noProfiler = branchTestPtr(Zero, Address(regT1));
1295
1296     JITStubCall stubCall(this, cti_op_profile_will_call);
1297     stubCall.addArgument(currentInstruction[1].u.operand, regT1);
1298     stubCall.call();
1299     noProfiler.link(this);
1300
1301 }
1302
1303 void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
1304 {
1305     peek(regT1, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof(void*));
1306     Jump noProfiler = branchTestPtr(Zero, Address(regT1));
1307
1308     JITStubCall stubCall(this, cti_op_profile_did_call);
1309     stubCall.addArgument(currentInstruction[1].u.operand, regT1);
1310     stubCall.call();
1311     noProfiler.link(this);
1312 }
1313
1314
1315 // Slow cases
1316
1317 void JIT::emitSlow_op_convert_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1318 {
1319     void* globalThis = m_codeBlock->globalObject()->globalScopeChain()->globalThis.get();
1320
1321     linkSlowCase(iter);
1322     if (shouldEmitProfiling())
1323         move(TrustedImmPtr(bitwise_cast<void*>(JSValue::encode(jsUndefined()))), regT0);
1324     Jump isNotUndefined = branchPtr(NotEqual, regT1, TrustedImmPtr(JSValue::encode(jsUndefined())));
1325     emitValueProfilingSite();
1326     move(TrustedImmPtr(globalThis), regT0);
1327     emitPutVirtualRegister(currentInstruction[1].u.operand, regT0);
1328     emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_convert_this));
1329
1330     linkSlowCase(iter);
1331     if (shouldEmitProfiling())
1332         move(TrustedImmPtr(bitwise_cast<void*>(JSValue::encode(m_globalData->stringStructure.get()))), regT0);
1333     isNotUndefined.link(this);
1334     emitValueProfilingSite();
1335     JITStubCall stubCall(this, cti_op_convert_this);
1336     stubCall.addArgument(regT1);
1337     stubCall.call(currentInstruction[1].u.operand);
1338 }
1339
1340 void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1341 {
1342     linkSlowCase(iter);
1343
1344     JITStubCall stubCall(this, cti_op_to_primitive);
1345     stubCall.addArgument(regT0);
1346     stubCall.call(currentInstruction[1].u.operand);
1347 }
1348
1349 void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1350 {
1351     linkSlowCase(iter);
1352     xorPtr(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
1353     JITStubCall stubCall(this, cti_op_not);
1354     stubCall.addArgument(regT0);
1355     stubCall.call(currentInstruction[1].u.operand);
1356 }
1357
1358 void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1359 {
1360     linkSlowCase(iter);
1361     JITStubCall stubCall(this, cti_op_jtrue);
1362     stubCall.addArgument(regT0);
1363     stubCall.call();
1364     emitJumpSlowToHot(branchTest32(Zero, regT0), currentInstruction[2].u.operand); // inverted!
1365 }
1366
1367 void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1368 {
1369     linkSlowCase(iter);
1370     JITStubCall stubCall(this, cti_op_jtrue);
1371     stubCall.addArgument(regT0);
1372     stubCall.call();
1373     emitJumpSlowToHot(branchTest32(NonZero, regT0), currentInstruction[2].u.operand);
1374 }
1375
1376 void JIT::emitSlow_op_bitxor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1377 {
1378     linkSlowCase(iter);
1379     JITStubCall stubCall(this, cti_op_bitxor);
1380     stubCall.addArgument(regT0);
1381     stubCall.addArgument(regT1);
1382     stubCall.call(currentInstruction[1].u.operand);
1383 }
1384
1385 void JIT::emitSlow_op_bitor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1386 {
1387     linkSlowCase(iter);
1388     JITStubCall stubCall(this, cti_op_bitor);
1389     stubCall.addArgument(regT0);
1390     stubCall.addArgument(regT1);
1391     stubCall.call(currentInstruction[1].u.operand);
1392 }
1393
1394 void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1395 {
1396     linkSlowCase(iter);
1397     JITStubCall stubCall(this, cti_op_eq);
1398     stubCall.addArgument(regT0);
1399     stubCall.addArgument(regT1);
1400     stubCall.call();
1401     emitTagAsBoolImmediate(regT0);
1402     emitPutVirtualRegister(currentInstruction[1].u.operand);
1403 }
1404
1405 void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1406 {
1407     linkSlowCase(iter);
1408     JITStubCall stubCall(this, cti_op_eq);
1409     stubCall.addArgument(regT0);
1410     stubCall.addArgument(regT1);
1411     stubCall.call();
1412     xor32(TrustedImm32(0x1), regT0);
1413     emitTagAsBoolImmediate(regT0);
1414     emitPutVirtualRegister(currentInstruction[1].u.operand);
1415 }
1416
1417 void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1418 {
1419     linkSlowCase(iter);
1420     linkSlowCase(iter);
1421     linkSlowCase(iter);
1422     JITStubCall stubCall(this, cti_op_stricteq);
1423     stubCall.addArgument(regT0);
1424     stubCall.addArgument(regT1);
1425     stubCall.call(currentInstruction[1].u.operand);
1426 }
1427
1428 void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1429 {
1430     linkSlowCase(iter);
1431     linkSlowCase(iter);
1432     linkSlowCase(iter);
1433     JITStubCall stubCall(this, cti_op_nstricteq);
1434     stubCall.addArgument(regT0);
1435     stubCall.addArgument(regT1);
1436     stubCall.call(currentInstruction[1].u.operand);
1437 }
1438
1439 void JIT::emitSlow_op_check_has_instance(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1440 {
1441     unsigned baseVal = currentInstruction[1].u.operand;
1442
1443     linkSlowCaseIfNotJSCell(iter, baseVal);
1444     linkSlowCase(iter);
1445     JITStubCall stubCall(this, cti_op_check_has_instance);
1446     stubCall.addArgument(baseVal, regT2);
1447     stubCall.call();
1448 }
1449
1450 void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1451 {
1452     unsigned dst = currentInstruction[1].u.operand;
1453     unsigned value = currentInstruction[2].u.operand;
1454     unsigned baseVal = currentInstruction[3].u.operand;
1455     unsigned proto = currentInstruction[4].u.operand;
1456
1457     linkSlowCaseIfNotJSCell(iter, value);
1458     linkSlowCaseIfNotJSCell(iter, proto);
1459     linkSlowCase(iter);
1460     linkSlowCase(iter);
1461     JITStubCall stubCall(this, cti_op_instanceof);
1462     stubCall.addArgument(value, regT2);
1463     stubCall.addArgument(baseVal, regT2);
1464     stubCall.addArgument(proto, regT2);
1465     stubCall.call(dst);
1466 }
1467
1468 void JIT::emitSlow_op_call(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1469 {
1470     compileOpCallSlowCase(op_call, currentInstruction, iter, m_callLinkInfoIndex++);
1471 }
1472
1473 void JIT::emitSlow_op_call_eval(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1474 {
1475     compileOpCallSlowCase(op_call_eval, currentInstruction, iter, m_callLinkInfoIndex);
1476 }
1477  
1478 void JIT::emitSlow_op_call_varargs(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1479 {
1480     compileOpCallSlowCase(op_call_varargs, currentInstruction, iter, m_callLinkInfoIndex++);
1481 }
1482
1483 void JIT::emitSlow_op_construct(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1484 {
1485     compileOpCallSlowCase(op_construct, currentInstruction, iter, m_callLinkInfoIndex++);
1486 }
1487
1488 void JIT::emitSlow_op_to_jsnumber(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1489 {
1490     linkSlowCaseIfNotJSCell(iter, currentInstruction[2].u.operand);
1491     linkSlowCase(iter);
1492
1493     JITStubCall stubCall(this, cti_op_to_jsnumber);
1494     stubCall.addArgument(regT0);
1495     stubCall.call(currentInstruction[1].u.operand);
1496 }
1497
1498 void JIT::emit_op_get_arguments_length(Instruction* currentInstruction)
1499 {
1500     int dst = currentInstruction[1].u.operand;
1501     int argumentsRegister = currentInstruction[2].u.operand;
1502     addSlowCase(branchTestPtr(NonZero, addressFor(argumentsRegister)));
1503     emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
1504     sub32(TrustedImm32(1), regT0);
1505     emitFastArithReTagImmediate(regT0, regT0);
1506     emitPutVirtualRegister(dst, regT0);
1507 }
1508
1509 void JIT::emitSlow_op_get_arguments_length(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1510 {
1511     linkSlowCase(iter);
1512     unsigned dst = currentInstruction[1].u.operand;
1513     unsigned base = currentInstruction[2].u.operand;
1514     Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
1515     
1516     emitGetVirtualRegister(base, regT0);
1517     JITStubCall stubCall(this, cti_op_get_by_id_generic);
1518     stubCall.addArgument(regT0);
1519     stubCall.addArgument(TrustedImmPtr(ident));
1520     stubCall.call(dst);
1521 }
1522
1523 void JIT::emit_op_get_argument_by_val(Instruction* currentInstruction)
1524 {
1525     int dst = currentInstruction[1].u.operand;
1526     int argumentsRegister = currentInstruction[2].u.operand;
1527     int property = currentInstruction[3].u.operand;
1528     addSlowCase(branchTestPtr(NonZero, addressFor(argumentsRegister)));
1529     emitGetVirtualRegister(property, regT1);
1530     addSlowCase(emitJumpIfNotImmediateInteger(regT1));
1531     add32(TrustedImm32(1), regT1);
1532     // regT1 now contains the integer index of the argument we want, including this
1533     emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT2);
1534     addSlowCase(branch32(AboveOrEqual, regT1, regT2));
1535
1536     neg32(regT1);
1537     signExtend32ToPtr(regT1, regT1);
1538     loadPtr(BaseIndex(callFrameRegister, regT1, TimesEight, CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register))), regT0);
1539     emitValueProfilingSite();
1540     emitPutVirtualRegister(dst, regT0);
1541 }
1542
1543 void JIT::emitSlow_op_get_argument_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1544 {
1545     unsigned dst = currentInstruction[1].u.operand;
1546     unsigned arguments = currentInstruction[2].u.operand;
1547     unsigned property = currentInstruction[3].u.operand;
1548     
1549     linkSlowCase(iter);
1550     Jump skipArgumentsCreation = jump();
1551     
1552     linkSlowCase(iter);
1553     linkSlowCase(iter);
1554     JITStubCall(this, cti_op_create_arguments).call();
1555     emitPutVirtualRegister(arguments);
1556     emitPutVirtualRegister(unmodifiedArgumentsRegister(arguments));
1557     
1558     skipArgumentsCreation.link(this);
1559     JITStubCall stubCall(this, cti_op_get_by_val);
1560     stubCall.addArgument(arguments, regT2);
1561     stubCall.addArgument(property, regT2);
1562     stubCall.callWithValueProfiling(dst);
1563 }
1564
1565 #endif // USE(JSVALUE64)
1566
1567 void JIT::emit_op_resolve_global_dynamic(Instruction* currentInstruction)
1568 {
1569     int skip = currentInstruction[5].u.operand;
1570     
1571     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT0);
1572     
1573     bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
1574     ASSERT(skip || !checkTopLevel);
1575     if (checkTopLevel && skip--) {
1576         Jump activationNotCreated;
1577         if (checkTopLevel)
1578             activationNotCreated = branchTestPtr(Zero, addressFor(m_codeBlock->activationRegister()));
1579         loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, object)), regT1);
1580         addSlowCase(checkStructure(regT1, m_globalData->activationStructure.get()));
1581         loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, next)), regT0);
1582         activationNotCreated.link(this);
1583     }
1584     while (skip--) {
1585         loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, object)), regT1);
1586         addSlowCase(checkStructure(regT1, m_globalData->activationStructure.get()));
1587         loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, next)), regT0);
1588     }
1589     emit_op_resolve_global(currentInstruction, true);
1590 }
1591
1592 void JIT::emitSlow_op_resolve_global_dynamic(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1593 {
1594     unsigned dst = currentInstruction[1].u.operand;
1595     Identifier* ident = &m_codeBlock->identifier(currentInstruction[2].u.operand);
1596     int skip = currentInstruction[5].u.operand;
1597     while (skip--)
1598         linkSlowCase(iter);
1599     JITStubCall resolveStubCall(this, cti_op_resolve);
1600     resolveStubCall.addArgument(TrustedImmPtr(ident));
1601     resolveStubCall.call(dst);
1602     emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_resolve_global_dynamic));
1603     
1604     unsigned currentIndex = m_globalResolveInfoIndex++;
1605     
1606     linkSlowCase(iter); // We managed to skip all the nodes in the scope chain, but the cache missed.
1607     JITStubCall stubCall(this, cti_op_resolve_global);
1608     stubCall.addArgument(TrustedImmPtr(ident));
1609     stubCall.addArgument(TrustedImm32(currentIndex));
1610     stubCall.addArgument(regT0);
1611     stubCall.callWithValueProfiling(dst);
1612 }
1613
1614 void JIT::emit_op_new_regexp(Instruction* currentInstruction)
1615 {
1616     JITStubCall stubCall(this, cti_op_new_regexp);
1617     stubCall.addArgument(TrustedImmPtr(m_codeBlock->regexp(currentInstruction[2].u.operand)));
1618     stubCall.call(currentInstruction[1].u.operand);
1619 }
1620
1621 void JIT::emit_op_new_func(Instruction* currentInstruction)
1622 {
1623     Jump lazyJump;
1624     int dst = currentInstruction[1].u.operand;
1625     if (currentInstruction[3].u.operand) {
1626 #if USE(JSVALUE32_64)
1627         lazyJump = branch32(NotEqual, tagFor(dst), TrustedImm32(JSValue::EmptyValueTag));
1628 #else
1629         lazyJump = branchTestPtr(NonZero, addressFor(dst));
1630 #endif
1631     }
1632
1633     FunctionExecutable* executable = m_codeBlock->functionDecl(currentInstruction[2].u.operand);
1634     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2);
1635     emitAllocateJSFunction(executable, regT2, regT0, regT1);
1636
1637     emitStoreCell(dst, regT0);
1638
1639     if (currentInstruction[3].u.operand) {
1640 #if USE(JSVALUE32_64)        
1641         unmap();
1642 #else
1643         killLastResultRegister();
1644 #endif
1645         lazyJump.link(this);
1646     }
1647 }
1648
1649 void JIT::emitSlow_op_new_func(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1650 {
1651     linkSlowCase(iter);
1652     JITStubCall stubCall(this, cti_op_new_func);
1653     stubCall.addArgument(TrustedImmPtr(m_codeBlock->functionDecl(currentInstruction[2].u.operand)));
1654     stubCall.call(currentInstruction[1].u.operand);
1655 }
1656
1657 void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
1658 {
1659     FunctionExecutable* executable = m_codeBlock->functionExpr(currentInstruction[2].u.operand);
1660
1661     // We only inline the allocation of a anonymous function expressions
1662     // If we want to be able to allocate a named function expression, we would
1663     // need to be able to do inline allocation of a JSStaticScopeObject.
1664     if (executable->name().isNull()) {
1665         emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2);
1666         emitAllocateJSFunction(executable, regT2, regT0, regT1);
1667         emitStoreCell(currentInstruction[1].u.operand, regT0);
1668         return;
1669     }
1670
1671     JITStubCall stubCall(this, cti_op_new_func_exp);
1672     stubCall.addArgument(TrustedImmPtr(m_codeBlock->functionExpr(currentInstruction[2].u.operand)));
1673     stubCall.call(currentInstruction[1].u.operand);
1674 }
1675
1676 void JIT::emitSlow_op_new_func_exp(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1677 {
1678     FunctionExecutable* executable = m_codeBlock->functionExpr(currentInstruction[2].u.operand);
1679     if (!executable->name().isNull())
1680         return;
1681     linkSlowCase(iter);
1682     JITStubCall stubCall(this, cti_op_new_func_exp);
1683     stubCall.addArgument(TrustedImmPtr(executable));
1684     stubCall.call(currentInstruction[1].u.operand);
1685 }
1686
1687 void JIT::emit_op_new_array(Instruction* currentInstruction)
1688 {
1689     int length = currentInstruction[3].u.operand;
1690     if (CopiedSpace::isOversize(JSArray::storageSize(length))) {
1691         JITStubCall stubCall(this, cti_op_new_array);
1692         stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand));
1693         stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
1694         stubCall.call(currentInstruction[1].u.operand);
1695         return;
1696     }
1697     int dst = currentInstruction[1].u.operand;
1698     int values = currentInstruction[2].u.operand;
1699
1700     emitAllocateJSArray(values, length, regT0, regT1, regT2);
1701     emitStoreCell(dst, regT0); 
1702 }
1703
1704 void JIT::emitSlow_op_new_array(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1705 {
1706     // If the allocation would be oversize, we will already make the proper stub call above in 
1707     // emit_op_new_array.
1708     int length = currentInstruction[3].u.operand;
1709     if (CopiedSpace::isOversize(JSArray::storageSize(length)))
1710         return;
1711     linkSlowCase(iter); // Not enough space in CopiedSpace for storage.
1712     linkSlowCase(iter); // Not enough space in MarkedSpace for cell.
1713
1714     JITStubCall stubCall(this, cti_op_new_array);
1715     stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand));
1716     stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
1717     stubCall.call(currentInstruction[1].u.operand);
1718 }
1719
1720 void JIT::emit_op_new_array_buffer(Instruction* currentInstruction)
1721 {
1722     JITStubCall stubCall(this, cti_op_new_array_buffer);
1723     stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand));
1724     stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
1725     stubCall.call(currentInstruction[1].u.operand);
1726 }
1727
1728 } // namespace JSC
1729
1730 #endif // ENABLE(JIT)