2 * Copyright (C) 2009 Apple Inc. All rights reserved.
3 * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
14 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
18 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 #include "Arguments.h"
32 #include "CopiedSpaceInlineMethods.h"
34 #include "JITInlineMethods.h"
35 #include "JITStubCall.h"
38 #include "JSFunction.h"
39 #include "JSPropertyNameIterator.h"
40 #include "LinkBuffer.h"
46 PassRefPtr<ExecutableMemoryHandle> JIT::privateCompileCTIMachineTrampolines(JSGlobalData* globalData, TrampolineStructure *trampolines)
48 // (2) The second function provides fast property access for string length
49 Label stringLengthBegin = align();
51 // Check eax is a string
52 Jump string_failureCases1 = emitJumpIfNotJSCell(regT0);
53 Jump string_failureCases2 = branchPtr(NotEqual, Address(regT0, JSCell::classInfoOffset()), TrustedImmPtr(&JSString::s_info));
55 // Checks out okay! - get the length from the Ustring.
56 load32(Address(regT0, OBJECT_OFFSETOF(JSString, m_length)), regT0);
58 Jump string_failureCases3 = branch32(LessThan, regT0, TrustedImm32(0));
60 // regT0 contains a 64 bit value (is positive, is zero extended) so we don't need sign extend here.
61 emitFastArithIntToImmNoCheck(regT0, regT0);
65 // (3) Trampolines for the slow cases of op_call / op_call_eval / op_construct.
66 COMPILE_ASSERT(sizeof(CodeType) == 4, CodeTypeEnumMustBe32Bit);
68 JumpList callSlowCase;
69 JumpList constructSlowCase;
71 // VirtualCallLink Trampoline
72 // regT0 holds callee; callFrame is moved and partially initialized.
73 Label virtualCallLinkBegin = align();
74 callSlowCase.append(emitJumpIfNotJSCell(regT0));
75 callSlowCase.append(emitJumpIfNotType(regT0, regT1, JSFunctionType));
77 // Finish canonical initialization before JS function call.
78 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_scopeChain)), regT1);
79 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
81 // Also initialize ReturnPC for use by lazy linking and exceptions.
82 preserveReturnAddressAfterCall(regT3);
83 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
85 storePtr(callFrameRegister, &m_globalData->topCallFrame);
86 restoreArgumentReference();
87 Call callLazyLinkCall = call();
88 restoreReturnAddressBeforeReturn(regT3);
91 // VirtualConstructLink Trampoline
92 // regT0 holds callee; callFrame is moved and partially initialized.
93 Label virtualConstructLinkBegin = align();
94 constructSlowCase.append(emitJumpIfNotJSCell(regT0));
95 constructSlowCase.append(emitJumpIfNotType(regT0, regT1, JSFunctionType));
97 // Finish canonical initialization before JS function call.
98 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_scopeChain)), regT1);
99 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
101 // Also initialize ReturnPC for use by lazy linking and exeptions.
102 preserveReturnAddressAfterCall(regT3);
103 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
105 storePtr(callFrameRegister, &m_globalData->topCallFrame);
106 restoreArgumentReference();
107 Call callLazyLinkConstruct = call();
108 restoreReturnAddressBeforeReturn(regT3);
111 // VirtualCall Trampoline
112 // regT0 holds callee; regT2 will hold the FunctionExecutable.
113 Label virtualCallBegin = align();
114 callSlowCase.append(emitJumpIfNotJSCell(regT0));
115 callSlowCase.append(emitJumpIfNotType(regT0, regT1, JSFunctionType));
117 // Finish canonical initialization before JS function call.
118 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_scopeChain)), regT1);
119 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
121 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
122 Jump hasCodeBlock1 = branch32(GreaterThanOrEqual, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParametersForCall)), TrustedImm32(0));
123 preserveReturnAddressAfterCall(regT3);
124 storePtr(callFrameRegister, &m_globalData->topCallFrame);
125 restoreArgumentReference();
126 Call callCompileCall = call();
127 restoreReturnAddressBeforeReturn(regT3);
128 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
130 hasCodeBlock1.link(this);
131 loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCodeForCallWithArityCheck)), regT0);
134 // VirtualConstruct Trampoline
135 // regT0 holds callee; regT2 will hold the FunctionExecutable.
136 Label virtualConstructBegin = align();
137 constructSlowCase.append(emitJumpIfNotJSCell(regT0));
138 constructSlowCase.append(emitJumpIfNotType(regT0, regT1, JSFunctionType));
140 // Finish canonical initialization before JS function call.
141 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_scopeChain)), regT1);
142 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
144 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
145 Jump hasCodeBlock2 = branch32(GreaterThanOrEqual, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParametersForConstruct)), TrustedImm32(0));
146 preserveReturnAddressAfterCall(regT3);
147 storePtr(callFrameRegister, &m_globalData->topCallFrame);
148 restoreArgumentReference();
149 Call callCompileConstruct = call();
150 restoreReturnAddressBeforeReturn(regT3);
151 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
153 hasCodeBlock2.link(this);
154 loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCodeForConstructWithArityCheck)), regT0);
157 callSlowCase.link(this);
158 // Finish canonical initialization before JS function call.
159 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
160 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2, regT2);
161 emitPutCellToCallFrameHeader(regT2, RegisterFile::ScopeChain);
163 // Also initialize ReturnPC and CodeBlock, like a JS function would.
164 preserveReturnAddressAfterCall(regT3);
165 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
166 emitPutImmediateToCallFrameHeader(0, RegisterFile::CodeBlock);
168 storePtr(callFrameRegister, &m_globalData->topCallFrame);
169 restoreArgumentReference();
170 Call callCallNotJSFunction = call();
171 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
172 restoreReturnAddressBeforeReturn(regT3);
175 constructSlowCase.link(this);
176 // Finish canonical initialization before JS function call.
177 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
178 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2, regT2);
179 emitPutCellToCallFrameHeader(regT2, RegisterFile::ScopeChain);
181 // Also initialize ReturnPC and CodeBlock, like a JS function would.
182 preserveReturnAddressAfterCall(regT3);
183 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
184 emitPutImmediateToCallFrameHeader(0, RegisterFile::CodeBlock);
186 storePtr(callFrameRegister, &m_globalData->topCallFrame);
187 restoreArgumentReference();
188 Call callConstructNotJSFunction = call();
189 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
190 restoreReturnAddressBeforeReturn(regT3);
193 // NativeCall Trampoline
194 Label nativeCallThunk = privateCompileCTINativeCall(globalData);
195 Label nativeConstructThunk = privateCompileCTINativeCall(globalData, true);
197 Call string_failureCases1Call = makeTailRecursiveCall(string_failureCases1);
198 Call string_failureCases2Call = makeTailRecursiveCall(string_failureCases2);
199 Call string_failureCases3Call = makeTailRecursiveCall(string_failureCases3);
201 // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
202 LinkBuffer patchBuffer(*m_globalData, this, GLOBAL_THUNK_ID);
204 patchBuffer.link(string_failureCases1Call, FunctionPtr(cti_op_get_by_id_string_fail));
205 patchBuffer.link(string_failureCases2Call, FunctionPtr(cti_op_get_by_id_string_fail));
206 patchBuffer.link(string_failureCases3Call, FunctionPtr(cti_op_get_by_id_string_fail));
207 patchBuffer.link(callLazyLinkCall, FunctionPtr(cti_vm_lazyLinkCall));
208 patchBuffer.link(callLazyLinkConstruct, FunctionPtr(cti_vm_lazyLinkConstruct));
209 patchBuffer.link(callCompileCall, FunctionPtr(cti_op_call_jitCompile));
210 patchBuffer.link(callCompileConstruct, FunctionPtr(cti_op_construct_jitCompile));
211 patchBuffer.link(callCallNotJSFunction, FunctionPtr(cti_op_call_NotJSFunction));
212 patchBuffer.link(callConstructNotJSFunction, FunctionPtr(cti_op_construct_NotJSConstruct));
214 CodeRef finalCode = patchBuffer.finalizeCode();
215 RefPtr<ExecutableMemoryHandle> executableMemory = finalCode.executableMemory();
217 trampolines->ctiVirtualCallLink = patchBuffer.trampolineAt(virtualCallLinkBegin);
218 trampolines->ctiVirtualConstructLink = patchBuffer.trampolineAt(virtualConstructLinkBegin);
219 trampolines->ctiVirtualCall = patchBuffer.trampolineAt(virtualCallBegin);
220 trampolines->ctiVirtualConstruct = patchBuffer.trampolineAt(virtualConstructBegin);
221 trampolines->ctiNativeCall = patchBuffer.trampolineAt(nativeCallThunk);
222 trampolines->ctiNativeConstruct = patchBuffer.trampolineAt(nativeConstructThunk);
223 trampolines->ctiStringLengthTrampoline = patchBuffer.trampolineAt(stringLengthBegin);
225 return executableMemory.release();
228 JIT::Label JIT::privateCompileCTINativeCall(JSGlobalData* globalData, bool isConstruct)
230 int executableOffsetToFunction = isConstruct ? OBJECT_OFFSETOF(NativeExecutable, m_constructor) : OBJECT_OFFSETOF(NativeExecutable, m_function);
232 Label nativeCallThunk = align();
234 emitPutImmediateToCallFrameHeader(0, RegisterFile::CodeBlock);
235 storePtr(callFrameRegister, &m_globalData->topCallFrame);
238 // Load caller frame's scope chain into this callframe so that whatever we call can
239 // get to its global data.
240 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
241 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
242 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
245 emitPutToCallFrameHeader(regT1, RegisterFile::ReturnPC);
247 // Calling convention: f(edi, esi, edx, ecx, ...);
248 // Host function signature: f(ExecState*);
249 move(callFrameRegister, X86Registers::edi);
251 subPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister); // Align stack after call.
253 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86Registers::esi);
254 loadPtr(Address(X86Registers::esi, OBJECT_OFFSETOF(JSFunction, m_executable)), X86Registers::r9);
255 move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
256 call(Address(X86Registers::r9, executableOffsetToFunction));
258 addPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister);
261 // Load caller frame's scope chain into this callframe so that whatever we call can
262 // get to its global data.
263 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
264 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT2);
265 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
267 preserveReturnAddressAfterCall(regT3); // Callee preserved
268 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
270 // Calling convention: f(r0 == regT0, r1 == regT1, ...);
271 // Host function signature: f(ExecState*);
272 move(callFrameRegister, ARMRegisters::r0);
274 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, ARMRegisters::r1);
275 move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
276 loadPtr(Address(ARMRegisters::r1, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
277 call(Address(regT2, executableOffsetToFunction));
279 restoreReturnAddressBeforeReturn(regT3);
282 // Load caller frame's scope chain into this callframe so that whatever we call can
283 // get to its global data.
284 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
285 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
286 emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
288 preserveReturnAddressAfterCall(regT3); // Callee preserved
289 emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
291 // Calling convention: f(a0, a1, a2, a3);
292 // Host function signature: f(ExecState*);
294 // Allocate stack space for 16 bytes (8-byte aligned)
295 // 16 bytes (unused) for 4 arguments
296 subPtr(TrustedImm32(16), stackPointerRegister);
299 move(callFrameRegister, MIPSRegisters::a0);
302 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, MIPSRegisters::a2);
303 loadPtr(Address(MIPSRegisters::a2, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
304 move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
305 call(Address(regT2, executableOffsetToFunction));
307 // Restore stack space
308 addPtr(TrustedImm32(16), stackPointerRegister);
310 restoreReturnAddressBeforeReturn(regT3);
313 #error "JIT not supported on this platform."
314 UNUSED_PARAM(executableOffsetToFunction);
318 // Check for an exception
319 loadPtr(&(globalData->exception), regT2);
320 Jump exceptionHandler = branchTestPtr(NonZero, regT2);
325 // Handle an exception
326 exceptionHandler.link(this);
328 // Grab the return address.
329 preserveReturnAddressAfterCall(regT1);
331 move(TrustedImmPtr(&globalData->exceptionLocation), regT2);
332 storePtr(regT1, regT2);
333 poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
335 storePtr(callFrameRegister, &m_globalData->topCallFrame);
336 // Set the return address.
337 move(TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()), regT1);
338 restoreReturnAddressBeforeReturn(regT1);
342 return nativeCallThunk;
345 JIT::CodeRef JIT::privateCompileCTINativeCall(JSGlobalData* globalData, NativeFunction)
347 return CodeRef::createSelfManagedCodeRef(globalData->jitStubs->ctiNativeCall());
350 void JIT::emit_op_mov(Instruction* currentInstruction)
352 int dst = currentInstruction[1].u.operand;
353 int src = currentInstruction[2].u.operand;
355 if (canBeOptimizedOrInlined()) {
356 // Use simpler approach, since the DFG thinks that the last result register
357 // is always set to the destination on every operation.
358 emitGetVirtualRegister(src, regT0);
359 emitPutVirtualRegister(dst);
361 if (m_codeBlock->isConstantRegisterIndex(src)) {
362 if (!getConstantOperand(src).isNumber())
363 storePtr(TrustedImmPtr(JSValue::encode(getConstantOperand(src))), Address(callFrameRegister, dst * sizeof(Register)));
365 storePtr(ImmPtr(JSValue::encode(getConstantOperand(src))), Address(callFrameRegister, dst * sizeof(Register)));
366 if (dst == m_lastResultBytecodeRegister)
367 killLastResultRegister();
368 } else if ((src == m_lastResultBytecodeRegister) || (dst == m_lastResultBytecodeRegister)) {
369 // If either the src or dst is the cached register go though
370 // get/put registers to make sure we track this correctly.
371 emitGetVirtualRegister(src, regT0);
372 emitPutVirtualRegister(dst);
374 // Perform the copy via regT1; do not disturb any mapping in regT0.
375 loadPtr(Address(callFrameRegister, src * sizeof(Register)), regT1);
376 storePtr(regT1, Address(callFrameRegister, dst * sizeof(Register)));
381 void JIT::emit_op_end(Instruction* currentInstruction)
383 ASSERT(returnValueRegister != callFrameRegister);
384 emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
385 restoreReturnAddressBeforeReturn(Address(callFrameRegister, RegisterFile::ReturnPC * static_cast<int>(sizeof(Register))));
389 void JIT::emit_op_jmp(Instruction* currentInstruction)
391 unsigned target = currentInstruction[1].u.operand;
392 addJump(jump(), target);
395 void JIT::emit_op_new_object(Instruction* currentInstruction)
397 emitAllocateJSFinalObject(TrustedImmPtr(m_codeBlock->globalObject()->emptyObjectStructure()), regT0, regT1);
399 emitPutVirtualRegister(currentInstruction[1].u.operand);
402 void JIT::emitSlow_op_new_object(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
405 JITStubCall(this, cti_op_new_object).call(currentInstruction[1].u.operand);
408 void JIT::emit_op_check_has_instance(Instruction* currentInstruction)
410 unsigned baseVal = currentInstruction[1].u.operand;
412 emitGetVirtualRegister(baseVal, regT0);
414 // Check that baseVal is a cell.
415 emitJumpSlowCaseIfNotJSCell(regT0, baseVal);
417 // Check that baseVal 'ImplementsHasInstance'.
418 loadPtr(Address(regT0, JSCell::structureOffset()), regT0);
419 addSlowCase(branchTest8(Zero, Address(regT0, Structure::typeInfoFlagsOffset()), TrustedImm32(ImplementsHasInstance)));
422 void JIT::emit_op_instanceof(Instruction* currentInstruction)
424 unsigned dst = currentInstruction[1].u.operand;
425 unsigned value = currentInstruction[2].u.operand;
426 unsigned baseVal = currentInstruction[3].u.operand;
427 unsigned proto = currentInstruction[4].u.operand;
429 // Load the operands (baseVal, proto, and value respectively) into registers.
430 // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
431 emitGetVirtualRegister(value, regT2);
432 emitGetVirtualRegister(baseVal, regT0);
433 emitGetVirtualRegister(proto, regT1);
435 // Check that proto are cells. baseVal must be a cell - this is checked by op_check_has_instance.
436 emitJumpSlowCaseIfNotJSCell(regT2, value);
437 emitJumpSlowCaseIfNotJSCell(regT1, proto);
439 // Check that prototype is an object
440 loadPtr(Address(regT1, JSCell::structureOffset()), regT3);
441 addSlowCase(emitJumpIfNotObject(regT3));
443 // Fixme: this check is only needed because the JSC API allows HasInstance to be overridden; we should deprecate this.
444 // Check that baseVal 'ImplementsDefaultHasInstance'.
445 loadPtr(Address(regT0, JSCell::structureOffset()), regT0);
446 addSlowCase(branchTest8(Zero, Address(regT0, Structure::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance)));
448 // Optimistically load the result true, and start looping.
449 // Initially, regT1 still contains proto and regT2 still contains value.
450 // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
451 move(TrustedImmPtr(JSValue::encode(jsBoolean(true))), regT0);
454 // Load the prototype of the object in regT2. If this is equal to regT1 - WIN!
455 // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
456 loadPtr(Address(regT2, JSCell::structureOffset()), regT2);
457 loadPtr(Address(regT2, Structure::prototypeOffset()), regT2);
458 Jump isInstance = branchPtr(Equal, regT2, regT1);
459 emitJumpIfJSCell(regT2).linkTo(loop, this);
461 // We get here either by dropping out of the loop, or if value was not an Object. Result is false.
462 move(TrustedImmPtr(JSValue::encode(jsBoolean(false))), regT0);
464 // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
465 isInstance.link(this);
466 emitPutVirtualRegister(dst);
469 void JIT::emit_op_is_undefined(Instruction* currentInstruction)
471 unsigned dst = currentInstruction[1].u.operand;
472 unsigned value = currentInstruction[2].u.operand;
474 emitGetVirtualRegister(value, regT0);
475 Jump isCell = emitJumpIfJSCell(regT0);
477 comparePtr(Equal, regT0, TrustedImm32(ValueUndefined), regT0);
481 loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
482 test8(NonZero, Address(regT1, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined), regT0);
485 emitTagAsBoolImmediate(regT0);
486 emitPutVirtualRegister(dst);
489 void JIT::emit_op_is_boolean(Instruction* currentInstruction)
491 unsigned dst = currentInstruction[1].u.operand;
492 unsigned value = currentInstruction[2].u.operand;
494 emitGetVirtualRegister(value, regT0);
495 xorPtr(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
496 testPtr(Zero, regT0, TrustedImm32(static_cast<int32_t>(~1)), regT0);
497 emitTagAsBoolImmediate(regT0);
498 emitPutVirtualRegister(dst);
501 void JIT::emit_op_is_number(Instruction* currentInstruction)
503 unsigned dst = currentInstruction[1].u.operand;
504 unsigned value = currentInstruction[2].u.operand;
506 emitGetVirtualRegister(value, regT0);
507 testPtr(NonZero, regT0, tagTypeNumberRegister, regT0);
508 emitTagAsBoolImmediate(regT0);
509 emitPutVirtualRegister(dst);
512 void JIT::emit_op_is_string(Instruction* currentInstruction)
514 unsigned dst = currentInstruction[1].u.operand;
515 unsigned value = currentInstruction[2].u.operand;
517 emitGetVirtualRegister(value, regT0);
518 Jump isNotCell = emitJumpIfNotJSCell(regT0);
520 loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
521 compare8(Equal, Address(regT1, Structure::typeInfoTypeOffset()), TrustedImm32(StringType), regT0);
522 emitTagAsBoolImmediate(regT0);
525 isNotCell.link(this);
526 move(TrustedImm32(ValueFalse), regT0);
529 emitPutVirtualRegister(dst);
532 void JIT::emit_op_call(Instruction* currentInstruction)
534 compileOpCall(op_call, currentInstruction, m_callLinkInfoIndex++);
537 void JIT::emit_op_call_eval(Instruction* currentInstruction)
539 compileOpCall(op_call_eval, currentInstruction, m_callLinkInfoIndex);
542 void JIT::emit_op_call_varargs(Instruction* currentInstruction)
544 compileOpCall(op_call_varargs, currentInstruction, m_callLinkInfoIndex++);
547 void JIT::emit_op_construct(Instruction* currentInstruction)
549 compileOpCall(op_construct, currentInstruction, m_callLinkInfoIndex++);
552 void JIT::emit_op_tear_off_activation(Instruction* currentInstruction)
554 unsigned activation = currentInstruction[1].u.operand;
555 unsigned arguments = currentInstruction[2].u.operand;
556 Jump activationCreated = branchTestPtr(NonZero, addressFor(activation));
557 Jump argumentsNotCreated = branchTestPtr(Zero, addressFor(arguments));
558 activationCreated.link(this);
559 JITStubCall stubCall(this, cti_op_tear_off_activation);
560 stubCall.addArgument(activation, regT2);
561 stubCall.addArgument(unmodifiedArgumentsRegister(arguments), regT2);
563 argumentsNotCreated.link(this);
566 void JIT::emit_op_tear_off_arguments(Instruction* currentInstruction)
568 unsigned dst = currentInstruction[1].u.operand;
570 Jump argsNotCreated = branchTestPtr(Zero, Address(callFrameRegister, sizeof(Register) * (unmodifiedArgumentsRegister(dst))));
571 JITStubCall stubCall(this, cti_op_tear_off_arguments);
572 stubCall.addArgument(unmodifiedArgumentsRegister(dst), regT2);
574 argsNotCreated.link(this);
577 void JIT::emit_op_ret(Instruction* currentInstruction)
579 emitOptimizationCheck(RetOptimizationCheck);
581 ASSERT(callFrameRegister != regT1);
582 ASSERT(regT1 != returnValueRegister);
583 ASSERT(returnValueRegister != callFrameRegister);
585 // Return the result in %eax.
586 emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
588 // Grab the return address.
589 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
591 // Restore our caller's "r".
592 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
595 restoreReturnAddressBeforeReturn(regT1);
599 void JIT::emit_op_ret_object_or_this(Instruction* currentInstruction)
601 emitOptimizationCheck(RetOptimizationCheck);
603 ASSERT(callFrameRegister != regT1);
604 ASSERT(regT1 != returnValueRegister);
605 ASSERT(returnValueRegister != callFrameRegister);
607 // Return the result in %eax.
608 emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
609 Jump notJSCell = emitJumpIfNotJSCell(returnValueRegister);
610 loadPtr(Address(returnValueRegister, JSCell::structureOffset()), regT2);
611 Jump notObject = emitJumpIfNotObject(regT2);
613 // Grab the return address.
614 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
616 // Restore our caller's "r".
617 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
620 restoreReturnAddressBeforeReturn(regT1);
623 // Return 'this' in %eax.
624 notJSCell.link(this);
625 notObject.link(this);
626 emitGetVirtualRegister(currentInstruction[2].u.operand, returnValueRegister);
628 // Grab the return address.
629 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
631 // Restore our caller's "r".
632 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
635 restoreReturnAddressBeforeReturn(regT1);
639 void JIT::emit_op_resolve(Instruction* currentInstruction)
641 JITStubCall stubCall(this, cti_op_resolve);
642 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
643 stubCall.callWithValueProfiling(currentInstruction[1].u.operand);
646 void JIT::emit_op_to_primitive(Instruction* currentInstruction)
648 int dst = currentInstruction[1].u.operand;
649 int src = currentInstruction[2].u.operand;
651 emitGetVirtualRegister(src, regT0);
653 Jump isImm = emitJumpIfNotJSCell(regT0);
654 addSlowCase(branchPtr(NotEqual, Address(regT0, JSCell::classInfoOffset()), TrustedImmPtr(&JSString::s_info)));
658 emitPutVirtualRegister(dst);
662 void JIT::emit_op_strcat(Instruction* currentInstruction)
664 JITStubCall stubCall(this, cti_op_strcat);
665 stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand));
666 stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
667 stubCall.call(currentInstruction[1].u.operand);
670 void JIT::emit_op_resolve_base(Instruction* currentInstruction)
672 JITStubCall stubCall(this, currentInstruction[3].u.operand ? cti_op_resolve_base_strict_put : cti_op_resolve_base);
673 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
674 stubCall.callWithValueProfiling(currentInstruction[1].u.operand);
677 void JIT::emit_op_ensure_property_exists(Instruction* currentInstruction)
679 JITStubCall stubCall(this, cti_op_ensure_property_exists);
680 stubCall.addArgument(TrustedImm32(currentInstruction[1].u.operand));
681 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
682 stubCall.call(currentInstruction[1].u.operand);
685 void JIT::emit_op_resolve_skip(Instruction* currentInstruction)
687 JITStubCall stubCall(this, cti_op_resolve_skip);
688 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
689 stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
690 stubCall.callWithValueProfiling(currentInstruction[1].u.operand);
693 void JIT::emit_op_resolve_global(Instruction* currentInstruction, bool)
696 void* globalObject = m_codeBlock->globalObject();
697 unsigned currentIndex = m_globalResolveInfoIndex++;
698 GlobalResolveInfo* resolveInfoAddress = &(m_codeBlock->globalResolveInfo(currentIndex));
700 // Check Structure of global object
701 move(TrustedImmPtr(globalObject), regT0);
702 move(TrustedImmPtr(resolveInfoAddress), regT2);
703 loadPtr(Address(regT2, OBJECT_OFFSETOF(GlobalResolveInfo, structure)), regT1);
704 addSlowCase(branchPtr(NotEqual, regT1, Address(regT0, JSCell::structureOffset()))); // Structures don't match
706 // Load cached property
707 // Assume that the global object always uses external storage.
708 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSGlobalObject, m_propertyStorage)), regT0);
709 load32(Address(regT2, OBJECT_OFFSETOF(GlobalResolveInfo, offset)), regT1);
710 loadPtr(BaseIndex(regT0, regT1, ScalePtr), regT0);
711 emitValueProfilingSite();
712 emitPutVirtualRegister(currentInstruction[1].u.operand);
715 void JIT::emitSlow_op_resolve_global(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
717 unsigned dst = currentInstruction[1].u.operand;
718 Identifier* ident = &m_codeBlock->identifier(currentInstruction[2].u.operand);
720 unsigned currentIndex = m_globalResolveInfoIndex++;
723 JITStubCall stubCall(this, cti_op_resolve_global);
724 stubCall.addArgument(TrustedImmPtr(ident));
725 stubCall.addArgument(TrustedImm32(currentIndex));
726 stubCall.addArgument(regT0);
727 stubCall.callWithValueProfiling(dst);
730 void JIT::emit_op_not(Instruction* currentInstruction)
732 emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
734 // Invert against JSValue(false); if the value was tagged as a boolean, then all bits will be
735 // clear other than the low bit (which will be 0 or 1 for false or true inputs respectively).
736 // Then invert against JSValue(true), which will add the tag back in, and flip the low bit.
737 xorPtr(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
738 addSlowCase(branchTestPtr(NonZero, regT0, TrustedImm32(static_cast<int32_t>(~1))));
739 xorPtr(TrustedImm32(static_cast<int32_t>(ValueTrue)), regT0);
741 emitPutVirtualRegister(currentInstruction[1].u.operand);
744 void JIT::emit_op_jfalse(Instruction* currentInstruction)
746 unsigned target = currentInstruction[2].u.operand;
747 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
749 addJump(branchPtr(Equal, regT0, TrustedImmPtr(JSValue::encode(jsNumber(0)))), target);
750 Jump isNonZero = emitJumpIfImmediateInteger(regT0);
752 addJump(branchPtr(Equal, regT0, TrustedImmPtr(JSValue::encode(jsBoolean(false)))), target);
753 addSlowCase(branchPtr(NotEqual, regT0, TrustedImmPtr(JSValue::encode(jsBoolean(true)))));
755 isNonZero.link(this);
758 void JIT::emit_op_jeq_null(Instruction* currentInstruction)
760 unsigned src = currentInstruction[1].u.operand;
761 unsigned target = currentInstruction[2].u.operand;
763 emitGetVirtualRegister(src, regT0);
764 Jump isImmediate = emitJumpIfNotJSCell(regT0);
766 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
767 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
768 addJump(branchTest8(NonZero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
769 Jump wasNotImmediate = jump();
771 // Now handle the immediate cases - undefined & null
772 isImmediate.link(this);
773 andPtr(TrustedImm32(~TagBitUndefined), regT0);
774 addJump(branchPtr(Equal, regT0, TrustedImmPtr(JSValue::encode(jsNull()))), target);
776 wasNotImmediate.link(this);
778 void JIT::emit_op_jneq_null(Instruction* currentInstruction)
780 unsigned src = currentInstruction[1].u.operand;
781 unsigned target = currentInstruction[2].u.operand;
783 emitGetVirtualRegister(src, regT0);
784 Jump isImmediate = emitJumpIfNotJSCell(regT0);
786 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
787 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
788 addJump(branchTest8(Zero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
789 Jump wasNotImmediate = jump();
791 // Now handle the immediate cases - undefined & null
792 isImmediate.link(this);
793 andPtr(TrustedImm32(~TagBitUndefined), regT0);
794 addJump(branchPtr(NotEqual, regT0, TrustedImmPtr(JSValue::encode(jsNull()))), target);
796 wasNotImmediate.link(this);
799 void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
801 unsigned src = currentInstruction[1].u.operand;
802 JSCell* ptr = currentInstruction[2].u.jsCell.get();
803 unsigned target = currentInstruction[3].u.operand;
805 emitGetVirtualRegister(src, regT0);
806 addJump(branchPtr(NotEqual, regT0, TrustedImmPtr(JSValue::encode(JSValue(ptr)))), target);
809 void JIT::emit_op_eq(Instruction* currentInstruction)
811 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
812 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
813 compare32(Equal, regT1, regT0, regT0);
814 emitTagAsBoolImmediate(regT0);
815 emitPutVirtualRegister(currentInstruction[1].u.operand);
818 void JIT::emit_op_resolve_with_base(Instruction* currentInstruction)
820 JITStubCall stubCall(this, cti_op_resolve_with_base);
821 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
822 stubCall.addArgument(TrustedImm32(currentInstruction[1].u.operand));
823 stubCall.callWithValueProfiling(currentInstruction[2].u.operand);
826 void JIT::emit_op_resolve_with_this(Instruction* currentInstruction)
828 JITStubCall stubCall(this, cti_op_resolve_with_this);
829 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
830 stubCall.addArgument(TrustedImm32(currentInstruction[1].u.operand));
831 stubCall.callWithValueProfiling(currentInstruction[2].u.operand);
834 void JIT::emit_op_jtrue(Instruction* currentInstruction)
836 unsigned target = currentInstruction[2].u.operand;
837 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
839 Jump isZero = branchPtr(Equal, regT0, TrustedImmPtr(JSValue::encode(jsNumber(0))));
840 addJump(emitJumpIfImmediateInteger(regT0), target);
842 addJump(branchPtr(Equal, regT0, TrustedImmPtr(JSValue::encode(jsBoolean(true)))), target);
843 addSlowCase(branchPtr(NotEqual, regT0, TrustedImmPtr(JSValue::encode(jsBoolean(false)))));
848 void JIT::emit_op_neq(Instruction* currentInstruction)
850 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
851 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
852 compare32(NotEqual, regT1, regT0, regT0);
853 emitTagAsBoolImmediate(regT0);
855 emitPutVirtualRegister(currentInstruction[1].u.operand);
859 void JIT::emit_op_bitxor(Instruction* currentInstruction)
861 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
862 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
863 xorPtr(regT1, regT0);
864 emitFastArithReTagImmediate(regT0, regT0);
865 emitPutVirtualRegister(currentInstruction[1].u.operand);
868 void JIT::emit_op_bitor(Instruction* currentInstruction)
870 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
871 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
873 emitPutVirtualRegister(currentInstruction[1].u.operand);
876 void JIT::emit_op_throw(Instruction* currentInstruction)
878 JITStubCall stubCall(this, cti_op_throw);
879 stubCall.addArgument(currentInstruction[1].u.operand, regT2);
881 ASSERT(regT0 == returnValueRegister);
883 // cti_op_throw always changes it's return address,
884 // this point in the code should never be reached.
889 void JIT::emit_op_get_pnames(Instruction* currentInstruction)
891 int dst = currentInstruction[1].u.operand;
892 int base = currentInstruction[2].u.operand;
893 int i = currentInstruction[3].u.operand;
894 int size = currentInstruction[4].u.operand;
895 int breakTarget = currentInstruction[5].u.operand;
897 JumpList isNotObject;
899 emitGetVirtualRegister(base, regT0);
900 if (!m_codeBlock->isKnownNotImmediate(base))
901 isNotObject.append(emitJumpIfNotJSCell(regT0));
902 if (base != m_codeBlock->thisRegister() || m_codeBlock->isStrictMode()) {
903 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
904 isNotObject.append(emitJumpIfNotObject(regT2));
907 // We could inline the case where you have a valid cache, but
908 // this call doesn't seem to be hot.
909 Label isObject(this);
910 JITStubCall getPnamesStubCall(this, cti_op_get_pnames);
911 getPnamesStubCall.addArgument(regT0);
912 getPnamesStubCall.call(dst);
913 load32(Address(regT0, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStringsSize)), regT3);
914 storePtr(tagTypeNumberRegister, payloadFor(i));
915 store32(TrustedImm32(Int32Tag), intTagFor(size));
916 store32(regT3, intPayloadFor(size));
919 isNotObject.link(this);
921 and32(TrustedImm32(~TagBitUndefined), regT1);
922 addJump(branch32(Equal, regT1, TrustedImm32(ValueNull)), breakTarget);
924 JITStubCall toObjectStubCall(this, cti_to_object);
925 toObjectStubCall.addArgument(regT0);
926 toObjectStubCall.call(base);
927 jump().linkTo(isObject, this);
932 void JIT::emit_op_next_pname(Instruction* currentInstruction)
934 int dst = currentInstruction[1].u.operand;
935 int base = currentInstruction[2].u.operand;
936 int i = currentInstruction[3].u.operand;
937 int size = currentInstruction[4].u.operand;
938 int it = currentInstruction[5].u.operand;
939 int target = currentInstruction[6].u.operand;
941 JumpList callHasProperty;
944 load32(intPayloadFor(i), regT0);
945 Jump end = branch32(Equal, regT0, intPayloadFor(size));
948 loadPtr(addressFor(it), regT1);
949 loadPtr(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStrings)), regT2);
951 loadPtr(BaseIndex(regT2, regT0, TimesEight), regT2);
953 emitPutVirtualRegister(dst, regT2);
956 add32(TrustedImm32(1), regT0);
957 store32(regT0, intPayloadFor(i));
959 // Verify that i is valid:
960 emitGetVirtualRegister(base, regT0);
962 // Test base's structure
963 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
964 callHasProperty.append(branchPtr(NotEqual, regT2, Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure)))));
966 // Test base's prototype chain
967 loadPtr(Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedPrototypeChain))), regT3);
968 loadPtr(Address(regT3, OBJECT_OFFSETOF(StructureChain, m_vector)), regT3);
969 addJump(branchTestPtr(Zero, Address(regT3)), target);
971 Label checkPrototype(this);
972 loadPtr(Address(regT2, Structure::prototypeOffset()), regT2);
973 callHasProperty.append(emitJumpIfNotJSCell(regT2));
974 loadPtr(Address(regT2, JSCell::structureOffset()), regT2);
975 callHasProperty.append(branchPtr(NotEqual, regT2, Address(regT3)));
976 addPtr(TrustedImm32(sizeof(Structure*)), regT3);
977 branchTestPtr(NonZero, Address(regT3)).linkTo(checkPrototype, this);
980 addJump(jump(), target);
982 // Slow case: Ask the object if i is valid.
983 callHasProperty.link(this);
984 emitGetVirtualRegister(dst, regT1);
985 JITStubCall stubCall(this, cti_has_property);
986 stubCall.addArgument(regT0);
987 stubCall.addArgument(regT1);
990 // Test for valid key.
991 addJump(branchTest32(NonZero, regT0), target);
992 jump().linkTo(begin, this);
998 void JIT::emit_op_push_scope(Instruction* currentInstruction)
1000 JITStubCall stubCall(this, cti_op_push_scope);
1001 stubCall.addArgument(currentInstruction[1].u.operand, regT2);
1002 stubCall.call(currentInstruction[1].u.operand);
1005 void JIT::emit_op_pop_scope(Instruction*)
1007 JITStubCall(this, cti_op_pop_scope).call();
1010 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
1012 unsigned dst = currentInstruction[1].u.operand;
1013 unsigned src1 = currentInstruction[2].u.operand;
1014 unsigned src2 = currentInstruction[3].u.operand;
1016 emitGetVirtualRegisters(src1, regT0, src2, regT1);
1018 // Jump slow if both are cells (to cover strings).
1020 orPtr(regT1, regT2);
1021 addSlowCase(emitJumpIfJSCell(regT2));
1023 // Jump slow if either is a double. First test if it's an integer, which is fine, and then test
1024 // if it's a double.
1025 Jump leftOK = emitJumpIfImmediateInteger(regT0);
1026 addSlowCase(emitJumpIfImmediateNumber(regT0));
1028 Jump rightOK = emitJumpIfImmediateInteger(regT1);
1029 addSlowCase(emitJumpIfImmediateNumber(regT1));
1032 if (type == OpStrictEq)
1033 comparePtr(Equal, regT1, regT0, regT0);
1035 comparePtr(NotEqual, regT1, regT0, regT0);
1036 emitTagAsBoolImmediate(regT0);
1038 emitPutVirtualRegister(dst);
1041 void JIT::emit_op_stricteq(Instruction* currentInstruction)
1043 compileOpStrictEq(currentInstruction, OpStrictEq);
1046 void JIT::emit_op_nstricteq(Instruction* currentInstruction)
1048 compileOpStrictEq(currentInstruction, OpNStrictEq);
1051 void JIT::emit_op_to_jsnumber(Instruction* currentInstruction)
1053 int srcVReg = currentInstruction[2].u.operand;
1054 emitGetVirtualRegister(srcVReg, regT0);
1056 Jump wasImmediate = emitJumpIfImmediateInteger(regT0);
1058 emitJumpSlowCaseIfNotJSCell(regT0, srcVReg);
1059 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
1060 addSlowCase(branch8(NotEqual, Address(regT2, Structure::typeInfoTypeOffset()), TrustedImm32(NumberType)));
1062 wasImmediate.link(this);
1064 emitPutVirtualRegister(currentInstruction[1].u.operand);
1067 void JIT::emit_op_push_new_scope(Instruction* currentInstruction)
1069 JITStubCall stubCall(this, cti_op_push_new_scope);
1070 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
1071 stubCall.addArgument(currentInstruction[3].u.operand, regT2);
1072 stubCall.call(currentInstruction[1].u.operand);
1075 void JIT::emit_op_catch(Instruction* currentInstruction)
1077 killLastResultRegister(); // FIXME: Implicitly treat op_catch as a labeled statement, and remove this line of code.
1078 move(regT0, callFrameRegister);
1079 peek(regT3, OBJECT_OFFSETOF(struct JITStackFrame, globalData) / sizeof(void*));
1080 loadPtr(Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception)), regT0);
1081 storePtr(TrustedImmPtr(JSValue::encode(JSValue())), Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception)));
1082 emitPutVirtualRegister(currentInstruction[1].u.operand);
1085 void JIT::emit_op_jmp_scopes(Instruction* currentInstruction)
1087 JITStubCall stubCall(this, cti_op_jmp_scopes);
1088 stubCall.addArgument(TrustedImm32(currentInstruction[1].u.operand));
1090 addJump(jump(), currentInstruction[2].u.operand);
1093 void JIT::emit_op_switch_imm(Instruction* currentInstruction)
1095 unsigned tableIndex = currentInstruction[1].u.operand;
1096 unsigned defaultOffset = currentInstruction[2].u.operand;
1097 unsigned scrutinee = currentInstruction[3].u.operand;
1099 // create jump table for switch destinations, track this switch statement.
1100 SimpleJumpTable* jumpTable = &m_codeBlock->immediateSwitchJumpTable(tableIndex);
1101 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
1102 jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1104 JITStubCall stubCall(this, cti_op_switch_imm);
1105 stubCall.addArgument(scrutinee, regT2);
1106 stubCall.addArgument(TrustedImm32(tableIndex));
1111 void JIT::emit_op_switch_char(Instruction* currentInstruction)
1113 unsigned tableIndex = currentInstruction[1].u.operand;
1114 unsigned defaultOffset = currentInstruction[2].u.operand;
1115 unsigned scrutinee = currentInstruction[3].u.operand;
1117 // create jump table for switch destinations, track this switch statement.
1118 SimpleJumpTable* jumpTable = &m_codeBlock->characterSwitchJumpTable(tableIndex);
1119 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
1120 jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1122 JITStubCall stubCall(this, cti_op_switch_char);
1123 stubCall.addArgument(scrutinee, regT2);
1124 stubCall.addArgument(TrustedImm32(tableIndex));
1129 void JIT::emit_op_switch_string(Instruction* currentInstruction)
1131 unsigned tableIndex = currentInstruction[1].u.operand;
1132 unsigned defaultOffset = currentInstruction[2].u.operand;
1133 unsigned scrutinee = currentInstruction[3].u.operand;
1135 // create jump table for switch destinations, track this switch statement.
1136 StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
1137 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
1139 JITStubCall stubCall(this, cti_op_switch_string);
1140 stubCall.addArgument(scrutinee, regT2);
1141 stubCall.addArgument(TrustedImm32(tableIndex));
1146 void JIT::emit_op_throw_reference_error(Instruction* currentInstruction)
1148 JITStubCall stubCall(this, cti_op_throw_reference_error);
1149 if (!m_codeBlock->getConstant(currentInstruction[1].u.operand).isNumber())
1150 stubCall.addArgument(TrustedImmPtr(JSValue::encode(m_codeBlock->getConstant(currentInstruction[1].u.operand))));
1152 stubCall.addArgument(ImmPtr(JSValue::encode(m_codeBlock->getConstant(currentInstruction[1].u.operand))));
1156 void JIT::emit_op_debug(Instruction* currentInstruction)
1158 #if ENABLE(DEBUG_WITH_BREAKPOINT)
1159 UNUSED_PARAM(currentInstruction);
1162 JITStubCall stubCall(this, cti_op_debug);
1163 stubCall.addArgument(TrustedImm32(currentInstruction[1].u.operand));
1164 stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand));
1165 stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
1170 void JIT::emit_op_eq_null(Instruction* currentInstruction)
1172 unsigned dst = currentInstruction[1].u.operand;
1173 unsigned src1 = currentInstruction[2].u.operand;
1175 emitGetVirtualRegister(src1, regT0);
1176 Jump isImmediate = emitJumpIfNotJSCell(regT0);
1178 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
1179 test8(NonZero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined), regT0);
1181 Jump wasNotImmediate = jump();
1183 isImmediate.link(this);
1185 andPtr(TrustedImm32(~TagBitUndefined), regT0);
1186 comparePtr(Equal, regT0, TrustedImm32(ValueNull), regT0);
1188 wasNotImmediate.link(this);
1190 emitTagAsBoolImmediate(regT0);
1191 emitPutVirtualRegister(dst);
1195 void JIT::emit_op_neq_null(Instruction* currentInstruction)
1197 unsigned dst = currentInstruction[1].u.operand;
1198 unsigned src1 = currentInstruction[2].u.operand;
1200 emitGetVirtualRegister(src1, regT0);
1201 Jump isImmediate = emitJumpIfNotJSCell(regT0);
1203 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
1204 test8(Zero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined), regT0);
1206 Jump wasNotImmediate = jump();
1208 isImmediate.link(this);
1210 andPtr(TrustedImm32(~TagBitUndefined), regT0);
1211 comparePtr(NotEqual, regT0, TrustedImm32(ValueNull), regT0);
1213 wasNotImmediate.link(this);
1215 emitTagAsBoolImmediate(regT0);
1216 emitPutVirtualRegister(dst);
1219 void JIT::emit_op_enter(Instruction*)
1221 // Even though CTI doesn't use them, we initialize our constant
1222 // registers to zap stale pointers, to avoid unnecessarily prolonging
1223 // object lifetime and increasing GC pressure.
1224 size_t count = m_codeBlock->m_numVars;
1225 for (size_t j = 0; j < count; ++j)
1226 emitInitRegister(j);
1230 void JIT::emit_op_create_activation(Instruction* currentInstruction)
1232 unsigned dst = currentInstruction[1].u.operand;
1234 Jump activationCreated = branchTestPtr(NonZero, Address(callFrameRegister, sizeof(Register) * dst));
1235 JITStubCall(this, cti_op_push_activation).call(currentInstruction[1].u.operand);
1236 emitPutVirtualRegister(dst);
1237 activationCreated.link(this);
1240 void JIT::emit_op_create_arguments(Instruction* currentInstruction)
1242 unsigned dst = currentInstruction[1].u.operand;
1244 Jump argsCreated = branchTestPtr(NonZero, Address(callFrameRegister, sizeof(Register) * dst));
1245 JITStubCall(this, cti_op_create_arguments).call();
1246 emitPutVirtualRegister(dst);
1247 emitPutVirtualRegister(unmodifiedArgumentsRegister(dst));
1248 argsCreated.link(this);
1251 void JIT::emit_op_init_lazy_reg(Instruction* currentInstruction)
1253 unsigned dst = currentInstruction[1].u.operand;
1255 storePtr(TrustedImmPtr(0), Address(callFrameRegister, sizeof(Register) * dst));
1258 void JIT::emit_op_convert_this(Instruction* currentInstruction)
1260 emitGetVirtualRegister(currentInstruction[1].u.operand, regT1);
1262 emitJumpSlowCaseIfNotJSCell(regT1);
1263 if (shouldEmitProfiling()) {
1264 loadPtr(Address(regT1, JSCell::structureOffset()), regT0);
1265 emitValueProfilingSite();
1267 addSlowCase(branchPtr(Equal, Address(regT1, JSCell::classInfoOffset()), TrustedImmPtr(&JSString::s_info)));
1270 void JIT::emit_op_create_this(Instruction* currentInstruction)
1272 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT0);
1273 loadPtr(Address(regT0, JSFunction::offsetOfCachedInheritorID()), regT2);
1274 addSlowCase(branchTestPtr(Zero, regT2));
1276 // now regT2 contains the inheritorID, which is the structure that the newly
1277 // allocated object will have.
1279 emitAllocateJSFinalObject(regT2, regT0, regT1);
1280 emitPutVirtualRegister(currentInstruction[1].u.operand);
1283 void JIT::emitSlow_op_create_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1285 linkSlowCase(iter); // doesn't have an inheritor ID
1286 linkSlowCase(iter); // allocation failed
1287 JITStubCall stubCall(this, cti_op_create_this);
1288 stubCall.call(currentInstruction[1].u.operand);
1291 void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
1293 peek(regT1, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof(void*));
1294 Jump noProfiler = branchTestPtr(Zero, Address(regT1));
1296 JITStubCall stubCall(this, cti_op_profile_will_call);
1297 stubCall.addArgument(currentInstruction[1].u.operand, regT1);
1299 noProfiler.link(this);
1303 void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
1305 peek(regT1, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof(void*));
1306 Jump noProfiler = branchTestPtr(Zero, Address(regT1));
1308 JITStubCall stubCall(this, cti_op_profile_did_call);
1309 stubCall.addArgument(currentInstruction[1].u.operand, regT1);
1311 noProfiler.link(this);
1317 void JIT::emitSlow_op_convert_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1319 void* globalThis = m_codeBlock->globalObject()->globalScopeChain()->globalThis.get();
1322 if (shouldEmitProfiling())
1323 move(TrustedImmPtr(bitwise_cast<void*>(JSValue::encode(jsUndefined()))), regT0);
1324 Jump isNotUndefined = branchPtr(NotEqual, regT1, TrustedImmPtr(JSValue::encode(jsUndefined())));
1325 emitValueProfilingSite();
1326 move(TrustedImmPtr(globalThis), regT0);
1327 emitPutVirtualRegister(currentInstruction[1].u.operand, regT0);
1328 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_convert_this));
1331 if (shouldEmitProfiling())
1332 move(TrustedImmPtr(bitwise_cast<void*>(JSValue::encode(m_globalData->stringStructure.get()))), regT0);
1333 isNotUndefined.link(this);
1334 emitValueProfilingSite();
1335 JITStubCall stubCall(this, cti_op_convert_this);
1336 stubCall.addArgument(regT1);
1337 stubCall.call(currentInstruction[1].u.operand);
1340 void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1344 JITStubCall stubCall(this, cti_op_to_primitive);
1345 stubCall.addArgument(regT0);
1346 stubCall.call(currentInstruction[1].u.operand);
1349 void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1352 xorPtr(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
1353 JITStubCall stubCall(this, cti_op_not);
1354 stubCall.addArgument(regT0);
1355 stubCall.call(currentInstruction[1].u.operand);
1358 void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1361 JITStubCall stubCall(this, cti_op_jtrue);
1362 stubCall.addArgument(regT0);
1364 emitJumpSlowToHot(branchTest32(Zero, regT0), currentInstruction[2].u.operand); // inverted!
1367 void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1370 JITStubCall stubCall(this, cti_op_jtrue);
1371 stubCall.addArgument(regT0);
1373 emitJumpSlowToHot(branchTest32(NonZero, regT0), currentInstruction[2].u.operand);
1376 void JIT::emitSlow_op_bitxor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1379 JITStubCall stubCall(this, cti_op_bitxor);
1380 stubCall.addArgument(regT0);
1381 stubCall.addArgument(regT1);
1382 stubCall.call(currentInstruction[1].u.operand);
1385 void JIT::emitSlow_op_bitor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1388 JITStubCall stubCall(this, cti_op_bitor);
1389 stubCall.addArgument(regT0);
1390 stubCall.addArgument(regT1);
1391 stubCall.call(currentInstruction[1].u.operand);
1394 void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1397 JITStubCall stubCall(this, cti_op_eq);
1398 stubCall.addArgument(regT0);
1399 stubCall.addArgument(regT1);
1401 emitTagAsBoolImmediate(regT0);
1402 emitPutVirtualRegister(currentInstruction[1].u.operand);
1405 void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1408 JITStubCall stubCall(this, cti_op_eq);
1409 stubCall.addArgument(regT0);
1410 stubCall.addArgument(regT1);
1412 xor32(TrustedImm32(0x1), regT0);
1413 emitTagAsBoolImmediate(regT0);
1414 emitPutVirtualRegister(currentInstruction[1].u.operand);
1417 void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1422 JITStubCall stubCall(this, cti_op_stricteq);
1423 stubCall.addArgument(regT0);
1424 stubCall.addArgument(regT1);
1425 stubCall.call(currentInstruction[1].u.operand);
1428 void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1433 JITStubCall stubCall(this, cti_op_nstricteq);
1434 stubCall.addArgument(regT0);
1435 stubCall.addArgument(regT1);
1436 stubCall.call(currentInstruction[1].u.operand);
1439 void JIT::emitSlow_op_check_has_instance(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1441 unsigned baseVal = currentInstruction[1].u.operand;
1443 linkSlowCaseIfNotJSCell(iter, baseVal);
1445 JITStubCall stubCall(this, cti_op_check_has_instance);
1446 stubCall.addArgument(baseVal, regT2);
1450 void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1452 unsigned dst = currentInstruction[1].u.operand;
1453 unsigned value = currentInstruction[2].u.operand;
1454 unsigned baseVal = currentInstruction[3].u.operand;
1455 unsigned proto = currentInstruction[4].u.operand;
1457 linkSlowCaseIfNotJSCell(iter, value);
1458 linkSlowCaseIfNotJSCell(iter, proto);
1461 JITStubCall stubCall(this, cti_op_instanceof);
1462 stubCall.addArgument(value, regT2);
1463 stubCall.addArgument(baseVal, regT2);
1464 stubCall.addArgument(proto, regT2);
1468 void JIT::emitSlow_op_call(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1470 compileOpCallSlowCase(op_call, currentInstruction, iter, m_callLinkInfoIndex++);
1473 void JIT::emitSlow_op_call_eval(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1475 compileOpCallSlowCase(op_call_eval, currentInstruction, iter, m_callLinkInfoIndex);
1478 void JIT::emitSlow_op_call_varargs(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1480 compileOpCallSlowCase(op_call_varargs, currentInstruction, iter, m_callLinkInfoIndex++);
1483 void JIT::emitSlow_op_construct(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1485 compileOpCallSlowCase(op_construct, currentInstruction, iter, m_callLinkInfoIndex++);
1488 void JIT::emitSlow_op_to_jsnumber(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1490 linkSlowCaseIfNotJSCell(iter, currentInstruction[2].u.operand);
1493 JITStubCall stubCall(this, cti_op_to_jsnumber);
1494 stubCall.addArgument(regT0);
1495 stubCall.call(currentInstruction[1].u.operand);
1498 void JIT::emit_op_get_arguments_length(Instruction* currentInstruction)
1500 int dst = currentInstruction[1].u.operand;
1501 int argumentsRegister = currentInstruction[2].u.operand;
1502 addSlowCase(branchTestPtr(NonZero, addressFor(argumentsRegister)));
1503 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
1504 sub32(TrustedImm32(1), regT0);
1505 emitFastArithReTagImmediate(regT0, regT0);
1506 emitPutVirtualRegister(dst, regT0);
1509 void JIT::emitSlow_op_get_arguments_length(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1512 unsigned dst = currentInstruction[1].u.operand;
1513 unsigned base = currentInstruction[2].u.operand;
1514 Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
1516 emitGetVirtualRegister(base, regT0);
1517 JITStubCall stubCall(this, cti_op_get_by_id_generic);
1518 stubCall.addArgument(regT0);
1519 stubCall.addArgument(TrustedImmPtr(ident));
1523 void JIT::emit_op_get_argument_by_val(Instruction* currentInstruction)
1525 int dst = currentInstruction[1].u.operand;
1526 int argumentsRegister = currentInstruction[2].u.operand;
1527 int property = currentInstruction[3].u.operand;
1528 addSlowCase(branchTestPtr(NonZero, addressFor(argumentsRegister)));
1529 emitGetVirtualRegister(property, regT1);
1530 addSlowCase(emitJumpIfNotImmediateInteger(regT1));
1531 add32(TrustedImm32(1), regT1);
1532 // regT1 now contains the integer index of the argument we want, including this
1533 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT2);
1534 addSlowCase(branch32(AboveOrEqual, regT1, regT2));
1537 signExtend32ToPtr(regT1, regT1);
1538 loadPtr(BaseIndex(callFrameRegister, regT1, TimesEight, CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register))), regT0);
1539 emitValueProfilingSite();
1540 emitPutVirtualRegister(dst, regT0);
1543 void JIT::emitSlow_op_get_argument_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1545 unsigned dst = currentInstruction[1].u.operand;
1546 unsigned arguments = currentInstruction[2].u.operand;
1547 unsigned property = currentInstruction[3].u.operand;
1550 Jump skipArgumentsCreation = jump();
1554 JITStubCall(this, cti_op_create_arguments).call();
1555 emitPutVirtualRegister(arguments);
1556 emitPutVirtualRegister(unmodifiedArgumentsRegister(arguments));
1558 skipArgumentsCreation.link(this);
1559 JITStubCall stubCall(this, cti_op_get_by_val);
1560 stubCall.addArgument(arguments, regT2);
1561 stubCall.addArgument(property, regT2);
1562 stubCall.callWithValueProfiling(dst);
1565 #endif // USE(JSVALUE64)
1567 void JIT::emit_op_resolve_global_dynamic(Instruction* currentInstruction)
1569 int skip = currentInstruction[5].u.operand;
1571 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT0);
1573 bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
1574 ASSERT(skip || !checkTopLevel);
1575 if (checkTopLevel && skip--) {
1576 Jump activationNotCreated;
1578 activationNotCreated = branchTestPtr(Zero, addressFor(m_codeBlock->activationRegister()));
1579 loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, object)), regT1);
1580 addSlowCase(checkStructure(regT1, m_globalData->activationStructure.get()));
1581 loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, next)), regT0);
1582 activationNotCreated.link(this);
1585 loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, object)), regT1);
1586 addSlowCase(checkStructure(regT1, m_globalData->activationStructure.get()));
1587 loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, next)), regT0);
1589 emit_op_resolve_global(currentInstruction, true);
1592 void JIT::emitSlow_op_resolve_global_dynamic(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1594 unsigned dst = currentInstruction[1].u.operand;
1595 Identifier* ident = &m_codeBlock->identifier(currentInstruction[2].u.operand);
1596 int skip = currentInstruction[5].u.operand;
1599 JITStubCall resolveStubCall(this, cti_op_resolve);
1600 resolveStubCall.addArgument(TrustedImmPtr(ident));
1601 resolveStubCall.call(dst);
1602 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_resolve_global_dynamic));
1604 unsigned currentIndex = m_globalResolveInfoIndex++;
1606 linkSlowCase(iter); // We managed to skip all the nodes in the scope chain, but the cache missed.
1607 JITStubCall stubCall(this, cti_op_resolve_global);
1608 stubCall.addArgument(TrustedImmPtr(ident));
1609 stubCall.addArgument(TrustedImm32(currentIndex));
1610 stubCall.addArgument(regT0);
1611 stubCall.callWithValueProfiling(dst);
1614 void JIT::emit_op_new_regexp(Instruction* currentInstruction)
1616 JITStubCall stubCall(this, cti_op_new_regexp);
1617 stubCall.addArgument(TrustedImmPtr(m_codeBlock->regexp(currentInstruction[2].u.operand)));
1618 stubCall.call(currentInstruction[1].u.operand);
1621 void JIT::emit_op_new_func(Instruction* currentInstruction)
1624 int dst = currentInstruction[1].u.operand;
1625 if (currentInstruction[3].u.operand) {
1626 #if USE(JSVALUE32_64)
1627 lazyJump = branch32(NotEqual, tagFor(dst), TrustedImm32(JSValue::EmptyValueTag));
1629 lazyJump = branchTestPtr(NonZero, addressFor(dst));
1633 FunctionExecutable* executable = m_codeBlock->functionDecl(currentInstruction[2].u.operand);
1634 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2);
1635 emitAllocateJSFunction(executable, regT2, regT0, regT1);
1637 emitStoreCell(dst, regT0);
1639 if (currentInstruction[3].u.operand) {
1640 #if USE(JSVALUE32_64)
1643 killLastResultRegister();
1645 lazyJump.link(this);
1649 void JIT::emitSlow_op_new_func(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1652 JITStubCall stubCall(this, cti_op_new_func);
1653 stubCall.addArgument(TrustedImmPtr(m_codeBlock->functionDecl(currentInstruction[2].u.operand)));
1654 stubCall.call(currentInstruction[1].u.operand);
1657 void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
1659 FunctionExecutable* executable = m_codeBlock->functionExpr(currentInstruction[2].u.operand);
1661 // We only inline the allocation of a anonymous function expressions
1662 // If we want to be able to allocate a named function expression, we would
1663 // need to be able to do inline allocation of a JSStaticScopeObject.
1664 if (executable->name().isNull()) {
1665 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2);
1666 emitAllocateJSFunction(executable, regT2, regT0, regT1);
1667 emitStoreCell(currentInstruction[1].u.operand, regT0);
1671 JITStubCall stubCall(this, cti_op_new_func_exp);
1672 stubCall.addArgument(TrustedImmPtr(m_codeBlock->functionExpr(currentInstruction[2].u.operand)));
1673 stubCall.call(currentInstruction[1].u.operand);
1676 void JIT::emitSlow_op_new_func_exp(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1678 FunctionExecutable* executable = m_codeBlock->functionExpr(currentInstruction[2].u.operand);
1679 if (!executable->name().isNull())
1682 JITStubCall stubCall(this, cti_op_new_func_exp);
1683 stubCall.addArgument(TrustedImmPtr(executable));
1684 stubCall.call(currentInstruction[1].u.operand);
1687 void JIT::emit_op_new_array(Instruction* currentInstruction)
1689 int length = currentInstruction[3].u.operand;
1690 if (CopiedSpace::isOversize(JSArray::storageSize(length))) {
1691 JITStubCall stubCall(this, cti_op_new_array);
1692 stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand));
1693 stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
1694 stubCall.call(currentInstruction[1].u.operand);
1697 int dst = currentInstruction[1].u.operand;
1698 int values = currentInstruction[2].u.operand;
1700 emitAllocateJSArray(values, length, regT0, regT1, regT2);
1701 emitStoreCell(dst, regT0);
1704 void JIT::emitSlow_op_new_array(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1706 // If the allocation would be oversize, we will already make the proper stub call above in
1707 // emit_op_new_array.
1708 int length = currentInstruction[3].u.operand;
1709 if (CopiedSpace::isOversize(JSArray::storageSize(length)))
1711 linkSlowCase(iter); // Not enough space in CopiedSpace for storage.
1712 linkSlowCase(iter); // Not enough space in MarkedSpace for cell.
1714 JITStubCall stubCall(this, cti_op_new_array);
1715 stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand));
1716 stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
1717 stubCall.call(currentInstruction[1].u.operand);
1720 void JIT::emit_op_new_array_buffer(Instruction* currentInstruction)
1722 JITStubCall stubCall(this, cti_op_new_array_buffer);
1723 stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand));
1724 stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
1725 stubCall.call(currentInstruction[1].u.operand);
1730 #endif // ENABLE(JIT)