2 * Copyright (C) 2008, 2009 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32 #include "CodeBlock.h"
33 #include "GCAwareJITStubRoutine.h"
34 #include "Interpreter.h"
35 #include "JITInlineMethods.h"
36 #include "JITStubCall.h"
38 #include "JSFunction.h"
39 #include "JSPropertyNameIterator.h"
40 #include "JSVariableObject.h"
41 #include "LinkBuffer.h"
42 #include "RepatchBuffer.h"
43 #include "ResultType.h"
44 #include "SamplingTool.h"
54 void JIT::emit_op_put_by_index(Instruction* currentInstruction)
56 unsigned base = currentInstruction[1].u.operand;
57 unsigned property = currentInstruction[2].u.operand;
58 unsigned value = currentInstruction[3].u.operand;
60 JITStubCall stubCall(this, cti_op_put_by_index);
61 stubCall.addArgument(base);
62 stubCall.addArgument(TrustedImm32(property));
63 stubCall.addArgument(value);
67 void JIT::emit_op_put_getter_setter(Instruction* currentInstruction)
69 unsigned base = currentInstruction[1].u.operand;
70 unsigned property = currentInstruction[2].u.operand;
71 unsigned getter = currentInstruction[3].u.operand;
72 unsigned setter = currentInstruction[4].u.operand;
74 JITStubCall stubCall(this, cti_op_put_getter_setter);
75 stubCall.addArgument(base);
76 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(property)));
77 stubCall.addArgument(getter);
78 stubCall.addArgument(setter);
82 void JIT::emit_op_del_by_id(Instruction* currentInstruction)
84 unsigned dst = currentInstruction[1].u.operand;
85 unsigned base = currentInstruction[2].u.operand;
86 unsigned property = currentInstruction[3].u.operand;
88 JITStubCall stubCall(this, cti_op_del_by_id);
89 stubCall.addArgument(base);
90 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(property)));
94 void JIT::emit_op_method_check(Instruction* currentInstruction)
96 // Assert that the following instruction is a get_by_id.
97 ASSERT(m_interpreter->getOpcodeID((currentInstruction + OPCODE_LENGTH(op_method_check))->u.opcode) == op_get_by_id
98 || m_interpreter->getOpcodeID((currentInstruction + OPCODE_LENGTH(op_method_check))->u.opcode) == op_get_by_id_out_of_line);
100 currentInstruction += OPCODE_LENGTH(op_method_check);
102 // Do the method check - check the object & its prototype's structure inline (this is the common case).
103 m_methodCallCompilationInfo.append(MethodCallCompilationInfo(m_bytecodeOffset, m_propertyAccessCompilationInfo.size()));
104 MethodCallCompilationInfo& info = m_methodCallCompilationInfo.last();
106 int dst = currentInstruction[1].u.operand;
107 int base = currentInstruction[2].u.operand;
109 emitLoad(base, regT1, regT0);
110 emitJumpSlowCaseIfNotJSCell(base, regT1);
112 BEGIN_UNINTERRUPTED_SEQUENCE(sequenceMethodCheck);
114 Jump structureCheck = branchPtrWithPatch(NotEqual, Address(regT0, JSCell::structureOffset()), info.structureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure)));
115 DataLabelPtr protoStructureToCompare, protoObj = moveWithPatch(TrustedImmPtr(0), regT2);
116 Jump protoStructureCheck = branchPtrWithPatch(NotEqual, Address(regT2, JSCell::structureOffset()), protoStructureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure)));
118 // This will be relinked to load the function without doing a load.
119 DataLabelPtr putFunction = moveWithPatch(TrustedImmPtr(0), regT0);
121 END_UNINTERRUPTED_SEQUENCE(sequenceMethodCheck);
123 move(TrustedImm32(JSValue::CellTag), regT1);
126 // Link the failure cases here.
127 structureCheck.link(this);
128 protoStructureCheck.link(this);
130 // Do a regular(ish) get_by_id (the slow case will be link to
131 // cti_op_get_by_id_method_check instead of cti_op_get_by_id.
132 compileGetByIdHotPath();
135 emitValueProfilingSite(m_bytecodeOffset + OPCODE_LENGTH(op_method_check));
136 emitStore(dst, regT1, regT0);
137 map(m_bytecodeOffset + OPCODE_LENGTH(op_method_check) + OPCODE_LENGTH(op_get_by_id), dst, regT1, regT0);
139 // We've already generated the following get_by_id, so make sure it's skipped over.
140 m_bytecodeOffset += OPCODE_LENGTH(op_get_by_id);
142 m_propertyAccessCompilationInfo.last().addMethodCheckInfo(info.structureToCompare, protoObj, protoStructureToCompare, putFunction);
145 void JIT::emitSlow_op_method_check(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
147 currentInstruction += OPCODE_LENGTH(op_method_check);
149 int dst = currentInstruction[1].u.operand;
150 int base = currentInstruction[2].u.operand;
151 int ident = currentInstruction[3].u.operand;
153 compileGetByIdSlowCase(dst, base, &(m_codeBlock->identifier(ident)), iter, true);
154 emitValueProfilingSite(m_bytecodeOffset + OPCODE_LENGTH(op_method_check));
156 // We've already generated the following get_by_id, so make sure it's skipped over.
157 m_bytecodeOffset += OPCODE_LENGTH(op_get_by_id);
160 JIT::CodeRef JIT::stringGetByValStubGenerator(JSGlobalData* globalData)
164 failures.append(jit.branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(globalData->stringStructure.get())));
166 // Load string length to regT1, and start the process of loading the data pointer into regT0
167 jit.load32(Address(regT0, ThunkHelpers::jsStringLengthOffset()), regT1);
168 jit.loadPtr(Address(regT0, ThunkHelpers::jsStringValueOffset()), regT0);
169 failures.append(jit.branchTest32(Zero, regT0));
171 // Do an unsigned compare to simultaneously filter negative indices as well as indices that are too large
172 failures.append(jit.branch32(AboveOrEqual, regT2, regT1));
174 // Load the character
177 // Load the string flags
178 jit.loadPtr(Address(regT0, ThunkHelpers::stringImplFlagsOffset()), regT1);
179 jit.loadPtr(Address(regT0, ThunkHelpers::stringImplDataOffset()), regT0);
180 is16Bit.append(jit.branchTest32(Zero, regT1, TrustedImm32(ThunkHelpers::stringImpl8BitFlag())));
181 jit.load8(BaseIndex(regT0, regT2, TimesOne, 0), regT0);
182 cont8Bit.append(jit.jump());
184 jit.load16(BaseIndex(regT0, regT2, TimesTwo, 0), regT0);
188 failures.append(jit.branch32(AboveOrEqual, regT0, TrustedImm32(0x100)));
189 jit.move(TrustedImmPtr(globalData->smallStrings.singleCharacterStrings()), regT1);
190 jit.loadPtr(BaseIndex(regT1, regT0, ScalePtr, 0), regT0);
191 jit.move(TrustedImm32(JSValue::CellTag), regT1); // We null check regT0 on return so this is safe
195 jit.move(TrustedImm32(0), regT0);
198 LinkBuffer patchBuffer(*globalData, &jit, GLOBAL_THUNK_ID);
199 return FINALIZE_CODE(patchBuffer, ("String get_by_val stub"));
202 void JIT::emit_op_get_by_val(Instruction* currentInstruction)
204 unsigned dst = currentInstruction[1].u.operand;
205 unsigned base = currentInstruction[2].u.operand;
206 unsigned property = currentInstruction[3].u.operand;
208 emitLoad2(base, regT1, regT0, property, regT3, regT2);
210 addSlowCase(branch32(NotEqual, regT3, TrustedImm32(JSValue::Int32Tag)));
211 emitJumpSlowCaseIfNotJSCell(base, regT1);
212 addSlowCase(branchPtr(NotEqual, Address(regT0, JSCell::classInfoOffset()), TrustedImmPtr(&JSArray::s_info)));
214 loadPtr(Address(regT0, JSArray::storageOffset()), regT3);
215 addSlowCase(branch32(AboveOrEqual, regT2, Address(regT0, JSArray::vectorLengthOffset())));
217 load32(BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1); // tag
218 load32(BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0); // payload
219 addSlowCase(branch32(Equal, regT1, TrustedImm32(JSValue::EmptyValueTag)));
221 emitValueProfilingSite();
222 emitStore(dst, regT1, regT0);
223 map(m_bytecodeOffset + OPCODE_LENGTH(op_get_by_val), dst, regT1, regT0);
226 void JIT::emitSlow_op_get_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
228 unsigned dst = currentInstruction[1].u.operand;
229 unsigned base = currentInstruction[2].u.operand;
230 unsigned property = currentInstruction[3].u.operand;
232 linkSlowCase(iter); // property int32 check
233 linkSlowCaseIfNotJSCell(iter, base); // base cell check
235 Jump nonCell = jump();
236 linkSlowCase(iter); // base array check
237 Jump notString = branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(m_globalData->stringStructure.get()));
238 emitNakedCall(m_globalData->getCTIStub(stringGetByValStubGenerator).code());
239 Jump failed = branchTestPtr(Zero, regT0);
240 emitStore(dst, regT1, regT0);
241 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_get_by_val));
243 notString.link(this);
246 linkSlowCase(iter); // vector length check
247 linkSlowCase(iter); // empty value
249 JITStubCall stubCall(this, cti_op_get_by_val);
250 stubCall.addArgument(base);
251 stubCall.addArgument(property);
254 emitValueProfilingSite();
257 void JIT::emit_op_put_by_val(Instruction* currentInstruction)
259 unsigned base = currentInstruction[1].u.operand;
260 unsigned property = currentInstruction[2].u.operand;
261 unsigned value = currentInstruction[3].u.operand;
263 emitLoad2(base, regT1, regT0, property, regT3, regT2);
265 addSlowCase(branch32(NotEqual, regT3, TrustedImm32(JSValue::Int32Tag)));
266 emitJumpSlowCaseIfNotJSCell(base, regT1);
267 addSlowCase(branchPtr(NotEqual, Address(regT0, JSCell::classInfoOffset()), TrustedImmPtr(&JSArray::s_info)));
268 addSlowCase(branch32(AboveOrEqual, regT2, Address(regT0, JSArray::vectorLengthOffset())));
270 emitWriteBarrier(regT0, regT1, regT1, regT3, UnconditionalWriteBarrier, WriteBarrierForPropertyAccess);
271 loadPtr(Address(regT0, JSArray::storageOffset()), regT3);
273 Jump empty = branch32(Equal, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::EmptyValueTag));
275 Label storeResult(this);
276 emitLoad(value, regT1, regT0);
277 store32(regT0, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload))); // payload
278 store32(regT1, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag))); // tag
282 add32(TrustedImm32(1), Address(regT3, OBJECT_OFFSETOF(ArrayStorage, m_numValuesInVector)));
283 branch32(Below, regT2, Address(regT3, OBJECT_OFFSETOF(ArrayStorage, m_length))).linkTo(storeResult, this);
285 add32(TrustedImm32(1), regT2, regT0);
286 store32(regT0, Address(regT3, OBJECT_OFFSETOF(ArrayStorage, m_length)));
287 jump().linkTo(storeResult, this);
292 void JIT::emitSlow_op_put_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
294 unsigned base = currentInstruction[1].u.operand;
295 unsigned property = currentInstruction[2].u.operand;
296 unsigned value = currentInstruction[3].u.operand;
298 linkSlowCase(iter); // property int32 check
299 linkSlowCaseIfNotJSCell(iter, base); // base cell check
300 linkSlowCase(iter); // base not array check
301 linkSlowCase(iter); // in vector check
303 JITStubCall stubPutByValCall(this, cti_op_put_by_val);
304 stubPutByValCall.addArgument(base);
305 stubPutByValCall.addArgument(property);
306 stubPutByValCall.addArgument(value);
307 stubPutByValCall.call();
310 void JIT::emit_op_get_by_id(Instruction* currentInstruction)
312 int dst = currentInstruction[1].u.operand;
313 int base = currentInstruction[2].u.operand;
315 emitLoad(base, regT1, regT0);
316 emitJumpSlowCaseIfNotJSCell(base, regT1);
317 compileGetByIdHotPath();
318 emitValueProfilingSite();
319 emitStore(dst, regT1, regT0);
320 map(m_bytecodeOffset + OPCODE_LENGTH(op_get_by_id), dst, regT1, regT0);
323 void JIT::compileGetByIdHotPath()
325 // As for put_by_id, get_by_id requires the offset of the Structure and the offset of the access to be patched.
326 // Additionally, for get_by_id we need patch the offset of the branch to the slow case (we patch this to jump
327 // to array-length / prototype access tranpolines, and finally we also the the property-map access offset as a label
328 // to jump back to if one of these trampolies finds a match.
330 BEGIN_UNINTERRUPTED_SEQUENCE(sequenceGetByIdHotPath);
332 Label hotPathBegin(this);
334 DataLabelPtr structureToCompare;
335 PatchableJump structureCheck = patchableBranchPtrWithPatch(NotEqual, Address(regT0, JSCell::structureOffset()), structureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure)));
336 addSlowCase(structureCheck);
338 ConvertibleLoadLabel propertyStorageLoad = convertibleLoadPtr(Address(regT0, JSObject::offsetOfOutOfLineStorage()), regT2);
339 DataLabelCompact displacementLabel1 = loadPtrWithCompactAddressOffsetPatch(Address(regT2, patchGetByIdDefaultOffset), regT0); // payload
340 DataLabelCompact displacementLabel2 = loadPtrWithCompactAddressOffsetPatch(Address(regT2, patchGetByIdDefaultOffset), regT1); // tag
342 Label putResult(this);
344 END_UNINTERRUPTED_SEQUENCE(sequenceGetByIdHotPath);
346 m_propertyAccessCompilationInfo.append(PropertyStubCompilationInfo(PropertyStubGetById, m_bytecodeOffset, hotPathBegin, structureToCompare, structureCheck, propertyStorageLoad, displacementLabel1, displacementLabel2, putResult));
349 void JIT::emitSlow_op_get_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
351 int dst = currentInstruction[1].u.operand;
352 int base = currentInstruction[2].u.operand;
353 int ident = currentInstruction[3].u.operand;
355 compileGetByIdSlowCase(dst, base, &(m_codeBlock->identifier(ident)), iter);
356 emitValueProfilingSite();
359 void JIT::compileGetByIdSlowCase(int dst, int base, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter, bool isMethodCheck)
361 // As for the hot path of get_by_id, above, we ensure that we can use an architecture specific offset
362 // so that we only need track one pointer into the slow case code - we track a pointer to the location
363 // of the call (which we can use to look up the patch information), but should a array-length or
364 // prototype access trampoline fail we want to bail out back to here. To do so we can subtract back
365 // the distance from the call to the head of the slow case.
366 linkSlowCaseIfNotJSCell(iter, base);
369 BEGIN_UNINTERRUPTED_SEQUENCE(sequenceGetByIdSlowCase);
371 Label coldPathBegin(this);
372 JITStubCall stubCall(this, isMethodCheck ? cti_op_get_by_id_method_check : cti_op_get_by_id);
373 stubCall.addArgument(regT1, regT0);
374 stubCall.addArgument(TrustedImmPtr(ident));
375 Call call = stubCall.call(dst);
377 END_UNINTERRUPTED_SEQUENCE_FOR_PUT(sequenceGetByIdSlowCase, dst);
379 // Track the location of the call; this will be used to recover patch information.
380 m_propertyAccessCompilationInfo[m_propertyAccessInstructionIndex++].slowCaseInfo(PropertyStubGetById, coldPathBegin, call);
383 void JIT::emit_op_put_by_id(Instruction* currentInstruction)
385 // In order to be able to patch both the Structure, and the object offset, we store one pointer,
386 // to just after the arguments have been loaded into registers 'hotPathBegin', and we generate code
387 // such that the Structure & offset are always at the same distance from this.
389 int base = currentInstruction[1].u.operand;
390 int value = currentInstruction[3].u.operand;
392 emitLoad2(base, regT1, regT0, value, regT3, regT2);
394 emitJumpSlowCaseIfNotJSCell(base, regT1);
396 BEGIN_UNINTERRUPTED_SEQUENCE(sequencePutById);
398 Label hotPathBegin(this);
400 // It is important that the following instruction plants a 32bit immediate, in order that it can be patched over.
401 DataLabelPtr structureToCompare;
402 addSlowCase(branchPtrWithPatch(NotEqual, Address(regT0, JSCell::structureOffset()), structureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure))));
404 ConvertibleLoadLabel propertyStorageLoad = convertibleLoadPtr(Address(regT0, JSObject::offsetOfOutOfLineStorage()), regT1);
405 DataLabel32 displacementLabel1 = storePtrWithAddressOffsetPatch(regT2, Address(regT1, patchPutByIdDefaultOffset)); // payload
406 DataLabel32 displacementLabel2 = storePtrWithAddressOffsetPatch(regT3, Address(regT1, patchPutByIdDefaultOffset)); // tag
408 END_UNINTERRUPTED_SEQUENCE(sequencePutById);
410 emitWriteBarrier(regT0, regT2, regT1, regT2, ShouldFilterImmediates, WriteBarrierForPropertyAccess);
412 m_propertyAccessCompilationInfo.append(PropertyStubCompilationInfo(PropertyStubPutById, m_bytecodeOffset, hotPathBegin, structureToCompare, propertyStorageLoad, displacementLabel1, displacementLabel2));
415 void JIT::emitSlow_op_put_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
417 int base = currentInstruction[1].u.operand;
418 int ident = currentInstruction[2].u.operand;
419 int direct = currentInstruction[8].u.operand;
421 linkSlowCaseIfNotJSCell(iter, base);
424 JITStubCall stubCall(this, direct ? cti_op_put_by_id_direct : cti_op_put_by_id);
425 stubCall.addArgument(base);
426 stubCall.addArgument(TrustedImmPtr(&(m_codeBlock->identifier(ident))));
427 stubCall.addArgument(regT3, regT2);
428 Call call = stubCall.call();
430 // Track the location of the call; this will be used to recover patch information.
431 m_propertyAccessCompilationInfo[m_propertyAccessInstructionIndex++].slowCaseInfo(PropertyStubPutById, call);
434 // Compile a store into an object's property storage. May overwrite base.
435 void JIT::compilePutDirectOffset(RegisterID base, RegisterID valueTag, RegisterID valuePayload, PropertyOffset cachedOffset)
437 if (isOutOfLineOffset(cachedOffset))
438 loadPtr(Address(base, JSObject::offsetOfOutOfLineStorage()), base);
439 emitStore(indexRelativeToBase(cachedOffset), valueTag, valuePayload, base);
442 // Compile a load from an object's property storage. May overwrite base.
443 void JIT::compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, PropertyOffset cachedOffset)
445 if (isInlineOffset(cachedOffset)) {
446 emitLoad(indexRelativeToBase(cachedOffset), resultTag, resultPayload, base);
450 RegisterID temp = resultPayload;
451 loadPtr(Address(base, JSObject::offsetOfOutOfLineStorage()), temp);
452 emitLoad(indexRelativeToBase(cachedOffset), resultTag, resultPayload, temp);
455 void JIT::compileGetDirectOffset(JSObject* base, RegisterID resultTag, RegisterID resultPayload, PropertyOffset cachedOffset)
457 if (isInlineOffset(cachedOffset)) {
458 move(TrustedImmPtr(base->locationForOffset(cachedOffset)), resultTag);
459 load32(Address(resultTag, OBJECT_OFFSETOF(JSValue, u.asBits.payload)), resultPayload);
460 load32(Address(resultTag, OBJECT_OFFSETOF(JSValue, u.asBits.tag)), resultTag);
464 loadPtr(base->addressOfOutOfLineStorage(), resultTag);
465 load32(Address(resultTag, offsetInOutOfLineStorage(cachedOffset) * sizeof(WriteBarrier<Unknown>) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), resultPayload);
466 load32(Address(resultTag, offsetInOutOfLineStorage(cachedOffset) * sizeof(WriteBarrier<Unknown>) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), resultTag);
469 void JIT::privateCompilePutByIdTransition(StructureStubInfo* stubInfo, Structure* oldStructure, Structure* newStructure, PropertyOffset cachedOffset, StructureChain* chain, ReturnAddressPtr returnAddress, bool direct)
471 // The code below assumes that regT0 contains the basePayload and regT1 contains the baseTag. Restore them from the stack.
472 #if CPU(MIPS) || CPU(SH4) || CPU(ARM)
473 // For MIPS, we don't add sizeof(void*) to the stack offset.
474 load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
475 // For MIPS, we don't add sizeof(void*) to the stack offset.
476 load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
478 load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
479 load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
482 JumpList failureCases;
483 failureCases.append(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
484 failureCases.append(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(oldStructure)));
485 testPrototype(oldStructure->storedPrototype(), failureCases);
488 // Verify that nothing in the prototype chain has a setter for this property.
489 for (WriteBarrier<Structure>* it = chain->head(); *it; ++it)
490 testPrototype((*it)->storedPrototype(), failureCases);
493 // If we succeed in all of our checks, and the code was optimizable, then make sure we
494 // decrement the rare case counter.
495 #if ENABLE(VALUE_PROFILER)
496 if (m_codeBlock->canCompileWithDFG() >= DFG::MayInline) {
499 AbsoluteAddress(&m_codeBlock->rareCaseProfileForBytecodeOffset(stubInfo->bytecodeIndex)->m_counter));
503 // Reallocate property storage if needed.
505 bool willNeedStorageRealloc = oldStructure->outOfLineCapacity() != newStructure->outOfLineCapacity();
506 if (willNeedStorageRealloc) {
507 // This trampoline was called to like a JIT stub; before we can can call again we need to
508 // remove the return address from the stack, to prevent the stack from becoming misaligned.
509 preserveReturnAddressAfterCall(regT3);
511 JITStubCall stubCall(this, cti_op_put_by_id_transition_realloc);
512 stubCall.skipArgument(); // base
513 stubCall.skipArgument(); // ident
514 stubCall.skipArgument(); // value
515 stubCall.addArgument(TrustedImm32(oldStructure->outOfLineCapacity()));
516 stubCall.addArgument(TrustedImmPtr(newStructure));
517 stubCall.call(regT0);
519 restoreReturnAddressBeforeReturn(regT3);
521 #if CPU(MIPS) || CPU(SH4) || CPU(ARM)
522 // For MIPS, we don't add sizeof(void*) to the stack offset.
523 load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
524 // For MIPS, we don't add sizeof(void*) to the stack offset.
525 load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
527 load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
528 load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
532 emitWriteBarrier(regT0, regT1, regT1, regT3, UnconditionalWriteBarrier, WriteBarrierForPropertyAccess);
534 storePtr(TrustedImmPtr(newStructure), Address(regT0, JSCell::structureOffset()));
535 #if CPU(MIPS) || CPU(SH4) || CPU(ARM)
536 // For MIPS, we don't add sizeof(void*) to the stack offset.
537 load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[2]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT3);
538 load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[2]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT2);
540 load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[2]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT3);
541 load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[2]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT2);
543 compilePutDirectOffset(regT0, regT2, regT3, cachedOffset);
547 ASSERT(!failureCases.empty());
548 failureCases.link(this);
549 restoreArgumentReferenceForTrampoline();
550 Call failureCall = tailRecursiveCall();
552 LinkBuffer patchBuffer(*m_globalData, this, m_codeBlock);
554 patchBuffer.link(failureCall, FunctionPtr(direct ? cti_op_put_by_id_direct_fail : cti_op_put_by_id_fail));
556 if (willNeedStorageRealloc) {
557 ASSERT(m_calls.size() == 1);
558 patchBuffer.link(m_calls[0].from, FunctionPtr(cti_op_put_by_id_transition_realloc));
561 stubInfo->stubRoutine = createJITStubRoutine(
564 ("Baseline put_by_id transition stub for CodeBlock %p, return point %p",
565 m_codeBlock, returnAddress.value())),
567 m_codeBlock->ownerExecutable(),
568 willNeedStorageRealloc,
570 RepatchBuffer repatchBuffer(m_codeBlock);
571 repatchBuffer.relinkCallerToTrampoline(returnAddress, CodeLocationLabel(stubInfo->stubRoutine->code().code()));
574 void JIT::patchGetByIdSelf(CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, PropertyOffset cachedOffset, ReturnAddressPtr returnAddress)
576 RepatchBuffer repatchBuffer(codeBlock);
578 // We don't want to patch more than once - in future go to cti_op_get_by_id_generic.
579 // Should probably go to JITStubs::cti_op_get_by_id_fail, but that doesn't do anything interesting right now.
580 repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_self_fail));
582 // Patch the offset into the propoerty map to load from, then patch the Structure to look for.
583 repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(stubInfo->patch.baseline.u.get.structureToCompare), structure);
584 repatchBuffer.setLoadInstructionIsActive(stubInfo->hotPathBegin.convertibleLoadAtOffset(stubInfo->patch.baseline.u.get.propertyStorageLoad), isOutOfLineOffset(cachedOffset));
585 repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(stubInfo->patch.baseline.u.get.displacementLabel1), offsetRelativeToPatchedStorage(cachedOffset) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)); // payload
586 repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(stubInfo->patch.baseline.u.get.displacementLabel2), offsetRelativeToPatchedStorage(cachedOffset) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)); // tag
589 void JIT::patchPutByIdReplace(CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, PropertyOffset cachedOffset, ReturnAddressPtr returnAddress, bool direct)
591 RepatchBuffer repatchBuffer(codeBlock);
593 // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
594 // Should probably go to cti_op_put_by_id_fail, but that doesn't do anything interesting right now.
595 repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
597 // Patch the offset into the propoerty map to load from, then patch the Structure to look for.
598 repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(stubInfo->patch.baseline.u.put.structureToCompare), structure);
599 repatchBuffer.setLoadInstructionIsActive(stubInfo->hotPathBegin.convertibleLoadAtOffset(stubInfo->patch.baseline.u.put.propertyStorageLoad), isOutOfLineOffset(cachedOffset));
600 repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(stubInfo->patch.baseline.u.put.displacementLabel1), offsetRelativeToPatchedStorage(cachedOffset) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)); // payload
601 repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(stubInfo->patch.baseline.u.put.displacementLabel2), offsetRelativeToPatchedStorage(cachedOffset) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)); // tag
604 void JIT::privateCompilePatchGetArrayLength(ReturnAddressPtr returnAddress)
606 StructureStubInfo* stubInfo = &m_codeBlock->getStubInfo(returnAddress);
608 // regT0 holds a JSCell*
611 Jump failureCases1 = branchPtr(NotEqual, Address(regT0, JSCell::classInfoOffset()), TrustedImmPtr(&JSArray::s_info));
613 // Checks out okay! - get the length from the storage
614 loadPtr(Address(regT0, JSArray::storageOffset()), regT2);
615 load32(Address(regT2, OBJECT_OFFSETOF(ArrayStorage, m_length)), regT2);
617 Jump failureCases2 = branch32(Above, regT2, TrustedImm32(INT_MAX));
619 move(TrustedImm32(JSValue::Int32Tag), regT1);
620 Jump success = jump();
622 LinkBuffer patchBuffer(*m_globalData, this, m_codeBlock);
624 // Use the patch information to link the failure cases back to the original slow case routine.
625 CodeLocationLabel slowCaseBegin = stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin);
626 patchBuffer.link(failureCases1, slowCaseBegin);
627 patchBuffer.link(failureCases2, slowCaseBegin);
629 // On success return back to the hot patch code, at a point it will perform the store to dest for us.
630 patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
632 // Track the stub we have created so that it will be deleted later.
633 stubInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
635 ("Baseline get_by_id array length stub for CodeBlock %p, return point %p",
636 m_codeBlock, stubInfo->hotPathBegin.labelAtOffset(
637 stubInfo->patch.baseline.u.get.putResult).executableAddress()));
639 // Finally patch the jump to slow case back in the hot path to jump here instead.
640 CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
641 RepatchBuffer repatchBuffer(m_codeBlock);
642 repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubInfo->stubRoutine->code().code()));
644 // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
645 repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_array_fail));
648 void JIT::privateCompileGetByIdProto(StructureStubInfo* stubInfo, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, PropertyOffset cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame)
650 // regT0 holds a JSCell*
652 // The prototype object definitely exists (if this stub exists the CodeBlock is referencing a Structure that is
653 // referencing the prototype object - let's speculatively load it's table nice and early!)
654 JSObject* protoObject = asObject(structure->prototypeForLookup(callFrame));
656 Jump failureCases1 = checkStructure(regT0, structure);
658 // Check the prototype object's Structure had not changed.
659 move(TrustedImmPtr(protoObject), regT3);
660 Jump failureCases2 = branchPtr(NotEqual, Address(regT3, JSCell::structureOffset()), TrustedImmPtr(prototypeStructure));
662 bool needsStubLink = false;
664 if (slot.cachedPropertyType() == PropertySlot::Getter) {
665 needsStubLink = true;
666 compileGetDirectOffset(protoObject, regT2, regT1, cachedOffset);
667 JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
668 stubCall.addArgument(regT1);
669 stubCall.addArgument(regT0);
670 stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
672 } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
673 needsStubLink = true;
674 JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
675 stubCall.addArgument(TrustedImmPtr(protoObject));
676 stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
677 stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
678 stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
681 compileGetDirectOffset(protoObject, regT1, regT0, cachedOffset);
683 Jump success = jump();
685 LinkBuffer patchBuffer(*m_globalData, this, m_codeBlock);
687 // Use the patch information to link the failure cases back to the original slow case routine.
688 CodeLocationLabel slowCaseBegin = stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin);
689 patchBuffer.link(failureCases1, slowCaseBegin);
690 patchBuffer.link(failureCases2, slowCaseBegin);
692 // On success return back to the hot patch code, at a point it will perform the store to dest for us.
693 patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
696 for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
698 patchBuffer.link(iter->from, FunctionPtr(iter->to));
702 // Track the stub we have created so that it will be deleted later.
703 stubInfo->stubRoutine = createJITStubRoutine(
706 ("Baseline get_by_id proto stub for CodeBlock %p, return point %p",
707 m_codeBlock, stubInfo->hotPathBegin.labelAtOffset(
708 stubInfo->patch.baseline.u.get.putResult).executableAddress())),
710 m_codeBlock->ownerExecutable(),
713 // Finally patch the jump to slow case back in the hot path to jump here instead.
714 CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
715 RepatchBuffer repatchBuffer(m_codeBlock);
716 repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubInfo->stubRoutine->code().code()));
718 // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
719 repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_proto_list));
723 void JIT::privateCompileGetByIdSelfList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* polymorphicStructures, int currentIndex, Structure* structure, const Identifier& ident, const PropertySlot& slot, PropertyOffset cachedOffset)
725 // regT0 holds a JSCell*
726 Jump failureCase = checkStructure(regT0, structure);
727 bool needsStubLink = false;
728 bool isDirect = false;
729 if (slot.cachedPropertyType() == PropertySlot::Getter) {
730 needsStubLink = true;
731 compileGetDirectOffset(regT0, regT2, regT1, cachedOffset);
732 JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
733 stubCall.addArgument(regT1);
734 stubCall.addArgument(regT0);
735 stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
737 } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
738 needsStubLink = true;
739 JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
740 stubCall.addArgument(regT0);
741 stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
742 stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
743 stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
747 compileGetDirectOffset(regT0, regT1, regT0, cachedOffset);
750 Jump success = jump();
752 LinkBuffer patchBuffer(*m_globalData, this, m_codeBlock);
754 for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
756 patchBuffer.link(iter->from, FunctionPtr(iter->to));
759 // Use the patch information to link the failure cases back to the original slow case routine.
760 CodeLocationLabel lastProtoBegin = CodeLocationLabel(JITStubRoutine::asCodePtr(polymorphicStructures->list[currentIndex - 1].stubRoutine));
762 lastProtoBegin = stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin);
764 patchBuffer.link(failureCase, lastProtoBegin);
766 // On success return back to the hot patch code, at a point it will perform the store to dest for us.
767 patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
769 RefPtr<JITStubRoutine> stubRoutine = createJITStubRoutine(
772 ("Baseline get_by_id self list stub for CodeBlock %p, return point %p",
773 m_codeBlock, stubInfo->hotPathBegin.labelAtOffset(
774 stubInfo->patch.baseline.u.get.putResult).executableAddress())),
776 m_codeBlock->ownerExecutable(),
779 polymorphicStructures->list[currentIndex].set(*m_globalData, m_codeBlock->ownerExecutable(), stubRoutine, structure, isDirect);
781 // Finally patch the jump to slow case back in the hot path to jump here instead.
782 CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
783 RepatchBuffer repatchBuffer(m_codeBlock);
784 repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine->code().code()));
787 void JIT::privateCompileGetByIdProtoList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructures, int currentIndex, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, PropertyOffset cachedOffset, CallFrame* callFrame)
789 // regT0 holds a JSCell*
791 // The prototype object definitely exists (if this stub exists the CodeBlock is referencing a Structure that is
792 // referencing the prototype object - let's speculatively load it's table nice and early!)
793 JSObject* protoObject = asObject(structure->prototypeForLookup(callFrame));
795 // Check eax is an object of the right Structure.
796 Jump failureCases1 = checkStructure(regT0, structure);
798 // Check the prototype object's Structure had not changed.
799 move(TrustedImmPtr(protoObject), regT3);
800 Jump failureCases2 = branchPtr(NotEqual, Address(regT3, JSCell::structureOffset()), TrustedImmPtr(prototypeStructure));
802 bool needsStubLink = false;
803 bool isDirect = false;
804 if (slot.cachedPropertyType() == PropertySlot::Getter) {
805 needsStubLink = true;
806 compileGetDirectOffset(protoObject, regT2, regT1, cachedOffset);
807 JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
808 stubCall.addArgument(regT1);
809 stubCall.addArgument(regT0);
810 stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
812 } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
813 needsStubLink = true;
814 JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
815 stubCall.addArgument(TrustedImmPtr(protoObject));
816 stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
817 stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
818 stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
822 compileGetDirectOffset(protoObject, regT1, regT0, cachedOffset);
825 Jump success = jump();
827 LinkBuffer patchBuffer(*m_globalData, this, m_codeBlock);
829 for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
831 patchBuffer.link(iter->from, FunctionPtr(iter->to));
834 // Use the patch information to link the failure cases back to the original slow case routine.
835 CodeLocationLabel lastProtoBegin = CodeLocationLabel(JITStubRoutine::asCodePtr(prototypeStructures->list[currentIndex - 1].stubRoutine));
836 patchBuffer.link(failureCases1, lastProtoBegin);
837 patchBuffer.link(failureCases2, lastProtoBegin);
839 // On success return back to the hot patch code, at a point it will perform the store to dest for us.
840 patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
842 RefPtr<JITStubRoutine> stubRoutine = createJITStubRoutine(
845 ("Baseline get_by_id proto list stub for CodeBlock %p, return point %p",
846 m_codeBlock, stubInfo->hotPathBegin.labelAtOffset(
847 stubInfo->patch.baseline.u.get.putResult).executableAddress())),
849 m_codeBlock->ownerExecutable(),
852 prototypeStructures->list[currentIndex].set(callFrame->globalData(), m_codeBlock->ownerExecutable(), stubRoutine, structure, prototypeStructure, isDirect);
854 // Finally patch the jump to slow case back in the hot path to jump here instead.
855 CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
856 RepatchBuffer repatchBuffer(m_codeBlock);
857 repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine->code().code()));
860 void JIT::privateCompileGetByIdChainList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructures, int currentIndex, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, PropertyOffset cachedOffset, CallFrame* callFrame)
862 // regT0 holds a JSCell*
865 JumpList bucketsOfFail;
867 // Check eax is an object of the right Structure.
868 bucketsOfFail.append(checkStructure(regT0, structure));
870 Structure* currStructure = structure;
871 WriteBarrier<Structure>* it = chain->head();
872 JSObject* protoObject = 0;
873 for (unsigned i = 0; i < count; ++i, ++it) {
874 protoObject = asObject(currStructure->prototypeForLookup(callFrame));
875 currStructure = it->get();
876 testPrototype(protoObject, bucketsOfFail);
880 bool needsStubLink = false;
881 bool isDirect = false;
882 if (slot.cachedPropertyType() == PropertySlot::Getter) {
883 needsStubLink = true;
884 compileGetDirectOffset(protoObject, regT2, regT1, cachedOffset);
885 JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
886 stubCall.addArgument(regT1);
887 stubCall.addArgument(regT0);
888 stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
890 } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
891 needsStubLink = true;
892 JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
893 stubCall.addArgument(TrustedImmPtr(protoObject));
894 stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
895 stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
896 stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
900 compileGetDirectOffset(protoObject, regT1, regT0, cachedOffset);
903 Jump success = jump();
905 LinkBuffer patchBuffer(*m_globalData, this, m_codeBlock);
907 for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
909 patchBuffer.link(iter->from, FunctionPtr(iter->to));
912 // Use the patch information to link the failure cases back to the original slow case routine.
913 CodeLocationLabel lastProtoBegin = CodeLocationLabel(JITStubRoutine::asCodePtr(prototypeStructures->list[currentIndex - 1].stubRoutine));
915 patchBuffer.link(bucketsOfFail, lastProtoBegin);
917 // On success return back to the hot patch code, at a point it will perform the store to dest for us.
918 patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
920 RefPtr<JITStubRoutine> stubRoutine = createJITStubRoutine(
923 ("Baseline get_by_id chain list stub for CodeBlock %p, return point %p",
924 m_codeBlock, stubInfo->hotPathBegin.labelAtOffset(
925 stubInfo->patch.baseline.u.get.putResult).executableAddress())),
927 m_codeBlock->ownerExecutable(),
930 // Track the stub we have created so that it will be deleted later.
931 prototypeStructures->list[currentIndex].set(callFrame->globalData(), m_codeBlock->ownerExecutable(), stubRoutine, structure, chain, isDirect);
933 // Finally patch the jump to slow case back in the hot path to jump here instead.
934 CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
935 RepatchBuffer repatchBuffer(m_codeBlock);
936 repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine->code().code()));
939 void JIT::privateCompileGetByIdChain(StructureStubInfo* stubInfo, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, PropertyOffset cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame)
941 // regT0 holds a JSCell*
944 JumpList bucketsOfFail;
946 // Check eax is an object of the right Structure.
947 bucketsOfFail.append(checkStructure(regT0, structure));
949 Structure* currStructure = structure;
950 WriteBarrier<Structure>* it = chain->head();
951 JSObject* protoObject = 0;
952 for (unsigned i = 0; i < count; ++i, ++it) {
953 protoObject = asObject(currStructure->prototypeForLookup(callFrame));
954 currStructure = it->get();
955 testPrototype(protoObject, bucketsOfFail);
959 bool needsStubLink = false;
960 if (slot.cachedPropertyType() == PropertySlot::Getter) {
961 needsStubLink = true;
962 compileGetDirectOffset(protoObject, regT2, regT1, cachedOffset);
963 JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
964 stubCall.addArgument(regT1);
965 stubCall.addArgument(regT0);
966 stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
968 } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
969 needsStubLink = true;
970 JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
971 stubCall.addArgument(TrustedImmPtr(protoObject));
972 stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
973 stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
974 stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
977 compileGetDirectOffset(protoObject, regT1, regT0, cachedOffset);
978 Jump success = jump();
980 LinkBuffer patchBuffer(*m_globalData, this, m_codeBlock);
982 for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
984 patchBuffer.link(iter->from, FunctionPtr(iter->to));
987 // Use the patch information to link the failure cases back to the original slow case routine.
988 patchBuffer.link(bucketsOfFail, stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin));
990 // On success return back to the hot patch code, at a point it will perform the store to dest for us.
991 patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
993 // Track the stub we have created so that it will be deleted later.
994 RefPtr<JITStubRoutine> stubRoutine = createJITStubRoutine(
997 ("Baseline get_by_id chain stub for CodeBlock %p, return point %p",
998 m_codeBlock, stubInfo->hotPathBegin.labelAtOffset(
999 stubInfo->patch.baseline.u.get.putResult).executableAddress())),
1001 m_codeBlock->ownerExecutable(),
1003 stubInfo->stubRoutine = stubRoutine;
1005 // Finally patch the jump to slow case back in the hot path to jump here instead.
1006 CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
1007 RepatchBuffer repatchBuffer(m_codeBlock);
1008 repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine->code().code()));
1010 // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
1011 repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_proto_list));
1014 void JIT::compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, RegisterID offset, FinalObjectMode finalObjectMode)
1016 ASSERT(sizeof(JSValue) == 8);
1018 if (finalObjectMode == MayBeFinal) {
1019 Jump isInline = branch32(LessThan, offset, TrustedImm32(inlineStorageCapacity));
1020 loadPtr(Address(base, JSObject::offsetOfOutOfLineStorage()), base);
1023 isInline.link(this);
1024 addPtr(TrustedImmPtr(JSObject::offsetOfInlineStorage() - (inlineStorageCapacity - 2) * sizeof(EncodedJSValue)), base);
1027 #if !ASSERT_DISABLED
1028 Jump isOutOfLine = branch32(GreaterThanOrEqual, offset, TrustedImm32(inlineStorageCapacity));
1030 isOutOfLine.link(this);
1032 loadPtr(Address(base, JSObject::offsetOfOutOfLineStorage()), base);
1035 load32(BaseIndex(base, offset, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload) + (inlineStorageCapacity - 2) * sizeof(EncodedJSValue)), resultPayload);
1036 load32(BaseIndex(base, offset, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag) + (inlineStorageCapacity - 2) * sizeof(EncodedJSValue)), resultTag);
1039 void JIT::emit_op_get_by_pname(Instruction* currentInstruction)
1041 unsigned dst = currentInstruction[1].u.operand;
1042 unsigned base = currentInstruction[2].u.operand;
1043 unsigned property = currentInstruction[3].u.operand;
1044 unsigned expected = currentInstruction[4].u.operand;
1045 unsigned iter = currentInstruction[5].u.operand;
1046 unsigned i = currentInstruction[6].u.operand;
1048 emitLoad2(property, regT1, regT0, base, regT3, regT2);
1049 emitJumpSlowCaseIfNotJSCell(property, regT1);
1050 addSlowCase(branchPtr(NotEqual, regT0, payloadFor(expected)));
1051 // Property registers are now available as the property is known
1052 emitJumpSlowCaseIfNotJSCell(base, regT3);
1053 emitLoadPayload(iter, regT1);
1055 // Test base's structure
1056 loadPtr(Address(regT2, JSCell::structureOffset()), regT0);
1057 addSlowCase(branchPtr(NotEqual, regT0, Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure))));
1058 load32(addressFor(i), regT3);
1059 sub32(TrustedImm32(1), regT3);
1060 addSlowCase(branch32(AboveOrEqual, regT3, Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_numCacheableSlots))));
1061 add32(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_offsetBase)), regT3);
1062 compileGetDirectOffset(regT2, regT1, regT0, regT3);
1064 emitStore(dst, regT1, regT0);
1065 map(m_bytecodeOffset + OPCODE_LENGTH(op_get_by_pname), dst, regT1, regT0);
1068 void JIT::emitSlow_op_get_by_pname(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1070 unsigned dst = currentInstruction[1].u.operand;
1071 unsigned base = currentInstruction[2].u.operand;
1072 unsigned property = currentInstruction[3].u.operand;
1074 linkSlowCaseIfNotJSCell(iter, property);
1076 linkSlowCaseIfNotJSCell(iter, base);
1080 JITStubCall stubCall(this, cti_op_get_by_val);
1081 stubCall.addArgument(base);
1082 stubCall.addArgument(property);
1086 void JIT::emit_op_get_scoped_var(Instruction* currentInstruction)
1088 int dst = currentInstruction[1].u.operand;
1089 int index = currentInstruction[2].u.operand;
1090 int skip = currentInstruction[3].u.operand;
1092 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2);
1093 bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
1094 ASSERT(skip || !checkTopLevel);
1095 if (checkTopLevel && skip--) {
1096 Jump activationNotCreated;
1098 activationNotCreated = branch32(Equal, tagFor(m_codeBlock->activationRegister()), TrustedImm32(JSValue::EmptyValueTag));
1099 loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
1100 activationNotCreated.link(this);
1103 loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
1105 loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, object)), regT2);
1106 loadPtr(Address(regT2, JSVariableObject::offsetOfRegisters()), regT2);
1108 emitLoad(index, regT1, regT0, regT2);
1109 emitValueProfilingSite();
1110 emitStore(dst, regT1, regT0);
1111 map(m_bytecodeOffset + OPCODE_LENGTH(op_get_scoped_var), dst, regT1, regT0);
1114 void JIT::emit_op_put_scoped_var(Instruction* currentInstruction)
1116 int index = currentInstruction[1].u.operand;
1117 int skip = currentInstruction[2].u.operand;
1118 int value = currentInstruction[3].u.operand;
1120 emitLoad(value, regT1, regT0);
1122 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2);
1123 bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
1124 ASSERT(skip || !checkTopLevel);
1125 if (checkTopLevel && skip--) {
1126 Jump activationNotCreated;
1128 activationNotCreated = branch32(Equal, tagFor(m_codeBlock->activationRegister()), TrustedImm32(JSValue::EmptyValueTag));
1129 loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
1130 activationNotCreated.link(this);
1133 loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
1134 loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, object)), regT2);
1136 loadPtr(Address(regT2, JSVariableObject::offsetOfRegisters()), regT3);
1137 emitStore(index, regT1, regT0, regT3);
1138 emitWriteBarrier(regT2, regT1, regT0, regT1, ShouldFilterImmediates, WriteBarrierForVariableAccess);
1141 void JIT::emit_op_get_global_var(Instruction* currentInstruction)
1143 int dst = currentInstruction[1].u.operand;
1144 WriteBarrier<Unknown>* registerPointer = currentInstruction[2].u.registerPointer;
1146 load32(registerPointer->tagPointer(), regT1);
1147 load32(registerPointer->payloadPointer(), regT0);
1148 emitValueProfilingSite();
1149 emitStore(dst, regT1, regT0);
1150 map(m_bytecodeOffset + OPCODE_LENGTH(op_get_global_var), dst, regT1, regT0);
1153 void JIT::emit_op_put_global_var(Instruction* currentInstruction)
1155 WriteBarrier<Unknown>* registerPointer = currentInstruction[1].u.registerPointer;
1156 int value = currentInstruction[2].u.operand;
1158 JSGlobalObject* globalObject = m_codeBlock->globalObject();
1160 emitLoad(value, regT1, regT0);
1162 if (Heap::isWriteBarrierEnabled()) {
1163 move(TrustedImmPtr(globalObject), regT2);
1165 emitWriteBarrier(globalObject, regT1, regT3, ShouldFilterImmediates, WriteBarrierForVariableAccess);
1168 store32(regT1, registerPointer->tagPointer());
1169 store32(regT0, registerPointer->payloadPointer());
1170 map(m_bytecodeOffset + OPCODE_LENGTH(op_put_global_var), value, regT1, regT0);
1173 void JIT::emit_op_put_global_var_check(Instruction* currentInstruction)
1175 WriteBarrier<Unknown>* registerPointer = currentInstruction[1].u.registerPointer;
1176 int value = currentInstruction[2].u.operand;
1178 JSGlobalObject* globalObject = m_codeBlock->globalObject();
1180 emitLoad(value, regT1, regT0);
1182 addSlowCase(branchTest8(NonZero, AbsoluteAddress(currentInstruction[3].u.predicatePointer)));
1184 if (Heap::isWriteBarrierEnabled()) {
1185 move(TrustedImmPtr(globalObject), regT2);
1186 emitWriteBarrier(globalObject, regT1, regT3, ShouldFilterImmediates, WriteBarrierForVariableAccess);
1189 store32(regT1, registerPointer->tagPointer());
1190 store32(regT0, registerPointer->payloadPointer());
1194 void JIT::emitSlow_op_put_global_var_check(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1198 JITStubCall stubCall(this, cti_op_put_global_var_check);
1199 stubCall.addArgument(regT1, regT0);
1200 stubCall.addArgument(TrustedImm32(currentInstruction[4].u.operand));
1204 void JIT::resetPatchGetById(RepatchBuffer& repatchBuffer, StructureStubInfo* stubInfo)
1206 repatchBuffer.relink(stubInfo->callReturnLocation, cti_op_get_by_id);
1207 repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(stubInfo->patch.baseline.u.get.structureToCompare), reinterpret_cast<void*>(-1));
1208 repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(stubInfo->patch.baseline.u.get.displacementLabel1), 0);
1209 repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(stubInfo->patch.baseline.u.get.displacementLabel2), 0);
1210 repatchBuffer.relink(stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck), stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin));
1213 void JIT::resetPatchPutById(RepatchBuffer& repatchBuffer, StructureStubInfo* stubInfo)
1215 if (isDirectPutById(stubInfo))
1216 repatchBuffer.relink(stubInfo->callReturnLocation, cti_op_put_by_id_direct);
1218 repatchBuffer.relink(stubInfo->callReturnLocation, cti_op_put_by_id);
1219 repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(stubInfo->patch.baseline.u.put.structureToCompare), reinterpret_cast<void*>(-1));
1220 repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(stubInfo->patch.baseline.u.put.displacementLabel1), 0);
1221 repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(stubInfo->patch.baseline.u.put.displacementLabel2), 0);
1226 #endif // USE(JSVALUE32_64)
1227 #endif // ENABLE(JIT)