2 * Copyright (C) 2008, 2009 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32 #include "CodeBlock.h"
33 #include "JITInlineMethods.h"
34 #include "JITStubCall.h"
36 #include "JSFunction.h"
37 #include "JSPropertyNameIterator.h"
38 #include "Interpreter.h"
39 #include "LinkBuffer.h"
40 #include "RepatchBuffer.h"
41 #include "ResultType.h"
42 #include "SamplingTool.h"
52 void JIT::emit_op_put_by_index(Instruction* currentInstruction)
54 unsigned base = currentInstruction[1].u.operand;
55 unsigned property = currentInstruction[2].u.operand;
56 unsigned value = currentInstruction[3].u.operand;
58 JITStubCall stubCall(this, cti_op_put_by_index);
59 stubCall.addArgument(base);
60 stubCall.addArgument(Imm32(property));
61 stubCall.addArgument(value);
65 void JIT::emit_op_put_getter(Instruction* currentInstruction)
67 unsigned base = currentInstruction[1].u.operand;
68 unsigned property = currentInstruction[2].u.operand;
69 unsigned function = currentInstruction[3].u.operand;
71 JITStubCall stubCall(this, cti_op_put_getter);
72 stubCall.addArgument(base);
73 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(property)));
74 stubCall.addArgument(function);
78 void JIT::emit_op_put_setter(Instruction* currentInstruction)
80 unsigned base = currentInstruction[1].u.operand;
81 unsigned property = currentInstruction[2].u.operand;
82 unsigned function = currentInstruction[3].u.operand;
84 JITStubCall stubCall(this, cti_op_put_setter);
85 stubCall.addArgument(base);
86 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(property)));
87 stubCall.addArgument(function);
91 void JIT::emit_op_del_by_id(Instruction* currentInstruction)
93 unsigned dst = currentInstruction[1].u.operand;
94 unsigned base = currentInstruction[2].u.operand;
95 unsigned property = currentInstruction[3].u.operand;
97 JITStubCall stubCall(this, cti_op_del_by_id);
98 stubCall.addArgument(base);
99 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(property)));
103 void JIT::emit_op_method_check(Instruction* currentInstruction)
105 // Assert that the following instruction is a get_by_id.
106 ASSERT(m_interpreter->getOpcodeID((currentInstruction + OPCODE_LENGTH(op_method_check))->u.opcode) == op_get_by_id);
108 currentInstruction += OPCODE_LENGTH(op_method_check);
110 // Do the method check - check the object & its prototype's structure inline (this is the common case).
111 m_methodCallCompilationInfo.append(MethodCallCompilationInfo(m_bytecodeOffset, m_propertyAccessCompilationInfo.size()));
112 MethodCallCompilationInfo& info = m_methodCallCompilationInfo.last();
114 int dst = currentInstruction[1].u.operand;
115 int base = currentInstruction[2].u.operand;
117 emitLoad(base, regT1, regT0);
118 emitJumpSlowCaseIfNotJSCell(base, regT1);
120 BEGIN_UNINTERRUPTED_SEQUENCE(sequenceMethodCheck);
122 Jump structureCheck = branchPtrWithPatch(NotEqual, Address(regT0, JSCell::structureOffset()), info.structureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure)));
123 DataLabelPtr protoStructureToCompare, protoObj = moveWithPatch(TrustedImmPtr(0), regT2);
124 Jump protoStructureCheck = branchPtrWithPatch(NotEqual, Address(regT2, JSCell::structureOffset()), protoStructureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure)));
126 // This will be relinked to load the function without doing a load.
127 DataLabelPtr putFunction = moveWithPatch(TrustedImmPtr(0), regT0);
129 END_UNINTERRUPTED_SEQUENCE(sequenceMethodCheck);
131 move(TrustedImm32(JSValue::CellTag), regT1);
134 ASSERT_JIT_OFFSET_UNUSED(protoObj, differenceBetween(info.structureToCompare, protoObj), patchOffsetMethodCheckProtoObj);
135 ASSERT_JIT_OFFSET(differenceBetween(info.structureToCompare, protoStructureToCompare), patchOffsetMethodCheckProtoStruct);
136 ASSERT_JIT_OFFSET_UNUSED(putFunction, differenceBetween(info.structureToCompare, putFunction), patchOffsetMethodCheckPutFunction);
138 // Link the failure cases here.
139 structureCheck.link(this);
140 protoStructureCheck.link(this);
142 // Do a regular(ish) get_by_id (the slow case will be link to
143 // cti_op_get_by_id_method_check instead of cti_op_get_by_id.
144 compileGetByIdHotPath();
147 emitValueProfilingSite(FirstProfilingSite);
148 emitStore(dst, regT1, regT0);
149 map(m_bytecodeOffset + OPCODE_LENGTH(op_method_check) + OPCODE_LENGTH(op_get_by_id), dst, regT1, regT0);
151 // We've already generated the following get_by_id, so make sure it's skipped over.
152 m_bytecodeOffset += OPCODE_LENGTH(op_get_by_id);
155 void JIT::emitSlow_op_method_check(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
157 currentInstruction += OPCODE_LENGTH(op_method_check);
159 int dst = currentInstruction[1].u.operand;
160 int base = currentInstruction[2].u.operand;
161 int ident = currentInstruction[3].u.operand;
163 compileGetByIdSlowCase(dst, base, &(m_codeBlock->identifier(ident)), iter, true);
164 emitValueProfilingSite(SubsequentProfilingSite);
166 // We've already generated the following get_by_id, so make sure it's skipped over.
167 m_bytecodeOffset += OPCODE_LENGTH(op_get_by_id);
170 JIT::CodeRef JIT::stringGetByValStubGenerator(JSGlobalData* globalData)
174 failures.append(jit.branchPtr(NotEqual, Address(regT0), TrustedImmPtr(globalData->jsStringVPtr)));
176 // Load string length to regT1, and start the process of loading the data pointer into regT0
177 jit.load32(Address(regT0, ThunkHelpers::jsStringLengthOffset()), regT1);
178 jit.loadPtr(Address(regT0, ThunkHelpers::jsStringValueOffset()), regT0);
179 failures.append(jit.branchTest32(Zero, regT0));
181 // Do an unsigned compare to simultaneously filter negative indices as well as indices that are too large
182 failures.append(jit.branch32(AboveOrEqual, regT2, regT1));
184 // Load the character
187 // Load the string flags
188 jit.loadPtr(Address(regT0, ThunkHelpers::stringImplFlagsOffset()), regT1);
189 jit.loadPtr(Address(regT0, ThunkHelpers::stringImplDataOffset()), regT0);
190 is16Bit.append(jit.branchTest32(Zero, regT1, TrustedImm32(ThunkHelpers::stringImpl8BitFlag())));
191 jit.load8(BaseIndex(regT0, regT2, TimesOne, 0), regT0);
192 cont8Bit.append(jit.jump());
194 jit.load16(BaseIndex(regT0, regT2, TimesTwo, 0), regT0);
198 failures.append(jit.branch32(AboveOrEqual, regT0, TrustedImm32(0x100)));
199 jit.move(TrustedImmPtr(globalData->smallStrings.singleCharacterStrings()), regT1);
200 jit.loadPtr(BaseIndex(regT1, regT0, ScalePtr, 0), regT0);
201 jit.move(TrustedImm32(JSValue::CellTag), regT1); // We null check regT0 on return so this is safe
205 jit.move(TrustedImm32(0), regT0);
208 LinkBuffer patchBuffer(*globalData, &jit);
209 return patchBuffer.finalizeCode();
212 void JIT::emit_op_get_by_val(Instruction* currentInstruction)
214 unsigned dst = currentInstruction[1].u.operand;
215 unsigned base = currentInstruction[2].u.operand;
216 unsigned property = currentInstruction[3].u.operand;
218 emitLoad2(base, regT1, regT0, property, regT3, regT2);
220 addSlowCase(branch32(NotEqual, regT3, TrustedImm32(JSValue::Int32Tag)));
221 emitJumpSlowCaseIfNotJSCell(base, regT1);
222 addSlowCase(branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsArrayVPtr)));
224 loadPtr(Address(regT0, JSArray::storageOffset()), regT3);
225 addSlowCase(branch32(AboveOrEqual, regT2, Address(regT0, JSArray::vectorLengthOffset())));
227 load32(BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1); // tag
228 load32(BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0); // payload
229 addSlowCase(branch32(Equal, regT1, TrustedImm32(JSValue::EmptyValueTag)));
231 emitValueProfilingSite(FirstProfilingSite);
232 emitStore(dst, regT1, regT0);
233 map(m_bytecodeOffset + OPCODE_LENGTH(op_get_by_val), dst, regT1, regT0);
236 void JIT::emitSlow_op_get_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
238 unsigned dst = currentInstruction[1].u.operand;
239 unsigned base = currentInstruction[2].u.operand;
240 unsigned property = currentInstruction[3].u.operand;
242 linkSlowCase(iter); // property int32 check
243 linkSlowCaseIfNotJSCell(iter, base); // base cell check
245 Jump nonCell = jump();
246 linkSlowCase(iter); // base array check
247 Jump notString = branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsStringVPtr));
248 emitNakedCall(m_globalData->getCTIStub(stringGetByValStubGenerator).code());
249 Jump failed = branchTestPtr(Zero, regT0);
250 emitStore(dst, regT1, regT0);
251 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_get_by_val));
253 notString.link(this);
256 linkSlowCase(iter); // vector length check
257 linkSlowCase(iter); // empty value
259 JITStubCall stubCall(this, cti_op_get_by_val);
260 stubCall.addArgument(base);
261 stubCall.addArgument(property);
264 emitValueProfilingSite(SubsequentProfilingSite);
267 void JIT::emit_op_put_by_val(Instruction* currentInstruction)
269 unsigned base = currentInstruction[1].u.operand;
270 unsigned property = currentInstruction[2].u.operand;
271 unsigned value = currentInstruction[3].u.operand;
273 emitLoad2(base, regT1, regT0, property, regT3, regT2);
275 addSlowCase(branch32(NotEqual, regT3, TrustedImm32(JSValue::Int32Tag)));
276 emitJumpSlowCaseIfNotJSCell(base, regT1);
277 addSlowCase(branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsArrayVPtr)));
278 addSlowCase(branch32(AboveOrEqual, regT2, Address(regT0, JSArray::vectorLengthOffset())));
280 emitWriteBarrier(regT0, regT1, regT1, regT3, UnconditionalWriteBarrier, WriteBarrierForPropertyAccess);
281 loadPtr(Address(regT0, JSArray::storageOffset()), regT3);
283 Jump empty = branch32(Equal, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::EmptyValueTag));
285 Label storeResult(this);
286 emitLoad(value, regT1, regT0);
287 store32(regT0, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload))); // payload
288 store32(regT1, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag))); // tag
292 add32(TrustedImm32(1), Address(regT3, OBJECT_OFFSETOF(ArrayStorage, m_numValuesInVector)));
293 branch32(Below, regT2, Address(regT3, OBJECT_OFFSETOF(ArrayStorage, m_length))).linkTo(storeResult, this);
295 add32(TrustedImm32(1), regT2, regT0);
296 store32(regT0, Address(regT3, OBJECT_OFFSETOF(ArrayStorage, m_length)));
297 jump().linkTo(storeResult, this);
302 void JIT::emitSlow_op_put_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
304 unsigned base = currentInstruction[1].u.operand;
305 unsigned property = currentInstruction[2].u.operand;
306 unsigned value = currentInstruction[3].u.operand;
308 linkSlowCase(iter); // property int32 check
309 linkSlowCaseIfNotJSCell(iter, base); // base cell check
310 linkSlowCase(iter); // base not array check
311 linkSlowCase(iter); // in vector check
313 JITStubCall stubPutByValCall(this, cti_op_put_by_val);
314 stubPutByValCall.addArgument(base);
315 stubPutByValCall.addArgument(property);
316 stubPutByValCall.addArgument(value);
317 stubPutByValCall.call();
320 void JIT::emit_op_get_by_id(Instruction* currentInstruction)
322 int dst = currentInstruction[1].u.operand;
323 int base = currentInstruction[2].u.operand;
325 emitLoad(base, regT1, regT0);
326 emitJumpSlowCaseIfNotJSCell(base, regT1);
327 compileGetByIdHotPath();
328 emitValueProfilingSite(FirstProfilingSite);
329 emitStore(dst, regT1, regT0);
330 map(m_bytecodeOffset + OPCODE_LENGTH(op_get_by_id), dst, regT1, regT0);
333 void JIT::compileGetByIdHotPath()
335 // As for put_by_id, get_by_id requires the offset of the Structure and the offset of the access to be patched.
336 // Additionally, for get_by_id we need patch the offset of the branch to the slow case (we patch this to jump
337 // to array-length / prototype access tranpolines, and finally we also the the property-map access offset as a label
338 // to jump back to if one of these trampolies finds a match.
340 BEGIN_UNINTERRUPTED_SEQUENCE(sequenceGetByIdHotPath);
342 Label hotPathBegin(this);
343 m_propertyAccessCompilationInfo.append(PropertyStubCompilationInfo());
344 m_propertyAccessCompilationInfo.last().bytecodeIndex = m_bytecodeOffset;
345 m_propertyAccessCompilationInfo.last().hotPathBegin = hotPathBegin;
347 DataLabelPtr structureToCompare;
348 Jump structureCheck = branchPtrWithPatch(NotEqual, Address(regT0, JSCell::structureOffset()), structureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure)));
349 addSlowCase(structureCheck);
350 ASSERT_JIT_OFFSET(differenceBetween(hotPathBegin, structureToCompare), patchOffsetGetByIdStructure);
351 ASSERT_JIT_OFFSET(differenceBetween(hotPathBegin, structureCheck), patchOffsetGetByIdBranchToSlowCase);
353 loadPtr(Address(regT0, JSObject::offsetOfPropertyStorage()), regT2);
354 DataLabelCompact displacementLabel1 = loadPtrWithCompactAddressOffsetPatch(Address(regT2, patchGetByIdDefaultOffset), regT0); // payload
355 ASSERT_JIT_OFFSET_UNUSED(displacementLabel1, differenceBetween(hotPathBegin, displacementLabel1), patchOffsetGetByIdPropertyMapOffset1);
356 DataLabelCompact displacementLabel2 = loadPtrWithCompactAddressOffsetPatch(Address(regT2, patchGetByIdDefaultOffset), regT1); // tag
357 ASSERT_JIT_OFFSET_UNUSED(displacementLabel2, differenceBetween(hotPathBegin, displacementLabel2), patchOffsetGetByIdPropertyMapOffset2);
359 Label putResult(this);
360 ASSERT_JIT_OFFSET(differenceBetween(hotPathBegin, putResult), patchOffsetGetByIdPutResult);
362 END_UNINTERRUPTED_SEQUENCE(sequenceGetByIdHotPath);
365 void JIT::emitSlow_op_get_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
367 int dst = currentInstruction[1].u.operand;
368 int base = currentInstruction[2].u.operand;
369 int ident = currentInstruction[3].u.operand;
371 compileGetByIdSlowCase(dst, base, &(m_codeBlock->identifier(ident)), iter);
372 emitValueProfilingSite(SubsequentProfilingSite);
375 void JIT::compileGetByIdSlowCase(int dst, int base, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter, bool isMethodCheck)
377 // As for the hot path of get_by_id, above, we ensure that we can use an architecture specific offset
378 // so that we only need track one pointer into the slow case code - we track a pointer to the location
379 // of the call (which we can use to look up the patch information), but should a array-length or
380 // prototype access trampoline fail we want to bail out back to here. To do so we can subtract back
381 // the distance from the call to the head of the slow case.
382 linkSlowCaseIfNotJSCell(iter, base);
385 BEGIN_UNINTERRUPTED_SEQUENCE(sequenceGetByIdSlowCase);
388 Label coldPathBegin(this);
390 JITStubCall stubCall(this, isMethodCheck ? cti_op_get_by_id_method_check : cti_op_get_by_id);
391 stubCall.addArgument(regT1, regT0);
392 stubCall.addArgument(TrustedImmPtr(ident));
393 Call call = stubCall.call(dst);
395 END_UNINTERRUPTED_SEQUENCE_FOR_PUT(sequenceGetByIdSlowCase, dst);
397 ASSERT_JIT_OFFSET(differenceBetween(coldPathBegin, call), patchOffsetGetByIdSlowCaseCall);
399 // Track the location of the call; this will be used to recover patch information.
400 m_propertyAccessCompilationInfo[m_propertyAccessInstructionIndex++].callReturnLocation = call;
403 void JIT::emit_op_put_by_id(Instruction* currentInstruction)
405 // In order to be able to patch both the Structure, and the object offset, we store one pointer,
406 // to just after the arguments have been loaded into registers 'hotPathBegin', and we generate code
407 // such that the Structure & offset are always at the same distance from this.
409 int base = currentInstruction[1].u.operand;
410 int value = currentInstruction[3].u.operand;
412 emitLoad2(base, regT1, regT0, value, regT3, regT2);
414 emitJumpSlowCaseIfNotJSCell(base, regT1);
416 BEGIN_UNINTERRUPTED_SEQUENCE(sequencePutById);
418 Label hotPathBegin(this);
419 m_propertyAccessCompilationInfo.append(PropertyStubCompilationInfo());
420 m_propertyAccessCompilationInfo.last().bytecodeIndex = m_bytecodeOffset;
421 m_propertyAccessCompilationInfo.last().hotPathBegin = hotPathBegin;
423 // It is important that the following instruction plants a 32bit immediate, in order that it can be patched over.
424 DataLabelPtr structureToCompare;
425 addSlowCase(branchPtrWithPatch(NotEqual, Address(regT0, JSCell::structureOffset()), structureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure))));
426 ASSERT_JIT_OFFSET(differenceBetween(hotPathBegin, structureToCompare), patchOffsetPutByIdStructure);
428 loadPtr(Address(regT0, JSObject::offsetOfPropertyStorage()), regT1);
429 DataLabel32 displacementLabel1 = storePtrWithAddressOffsetPatch(regT2, Address(regT1, patchPutByIdDefaultOffset)); // payload
430 DataLabel32 displacementLabel2 = storePtrWithAddressOffsetPatch(regT3, Address(regT1, patchPutByIdDefaultOffset)); // tag
432 END_UNINTERRUPTED_SEQUENCE(sequencePutById);
434 emitWriteBarrier(regT0, regT2, regT1, regT2, ShouldFilterImmediates, WriteBarrierForPropertyAccess);
436 ASSERT_JIT_OFFSET_UNUSED(displacementLabel1, differenceBetween(hotPathBegin, displacementLabel1), patchOffsetPutByIdPropertyMapOffset1);
437 ASSERT_JIT_OFFSET_UNUSED(displacementLabel2, differenceBetween(hotPathBegin, displacementLabel2), patchOffsetPutByIdPropertyMapOffset2);
440 void JIT::emitSlow_op_put_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
442 int base = currentInstruction[1].u.operand;
443 int ident = currentInstruction[2].u.operand;
444 int direct = currentInstruction[8].u.operand;
446 linkSlowCaseIfNotJSCell(iter, base);
449 JITStubCall stubCall(this, direct ? cti_op_put_by_id_direct : cti_op_put_by_id);
450 stubCall.addArgument(base);
451 stubCall.addArgument(TrustedImmPtr(&(m_codeBlock->identifier(ident))));
452 stubCall.addArgument(regT3, regT2);
453 Call call = stubCall.call();
455 // Track the location of the call; this will be used to recover patch information.
456 m_propertyAccessCompilationInfo[m_propertyAccessInstructionIndex++].callReturnLocation = call;
459 // Compile a store into an object's property storage. May overwrite base.
460 void JIT::compilePutDirectOffset(RegisterID base, RegisterID valueTag, RegisterID valuePayload, size_t cachedOffset)
462 int offset = cachedOffset;
463 loadPtr(Address(base, JSObject::offsetOfPropertyStorage()), base);
464 emitStore(offset, valueTag, valuePayload, base);
467 // Compile a load from an object's property storage. May overwrite base.
468 void JIT::compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, size_t cachedOffset)
470 int offset = cachedOffset;
471 RegisterID temp = resultPayload;
472 loadPtr(Address(base, JSObject::offsetOfPropertyStorage()), temp);
473 emitLoad(offset, resultTag, resultPayload, temp);
476 void JIT::compileGetDirectOffset(JSObject* base, RegisterID resultTag, RegisterID resultPayload, size_t cachedOffset)
478 loadPtr(base->addressOfPropertyStorage(), resultTag);
479 load32(Address(resultTag, cachedOffset * sizeof(WriteBarrier<Unknown>) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), resultPayload);
480 load32(Address(resultTag, cachedOffset * sizeof(WriteBarrier<Unknown>) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), resultTag);
483 void JIT::privateCompilePutByIdTransition(StructureStubInfo* stubInfo, Structure* oldStructure, Structure* newStructure, size_t cachedOffset, StructureChain* chain, ReturnAddressPtr returnAddress, bool direct)
485 // The code below assumes that regT0 contains the basePayload and regT1 contains the baseTag. Restore them from the stack.
486 #if CPU(MIPS) || CPU(SH4) || CPU(ARM)
487 // For MIPS, we don't add sizeof(void*) to the stack offset.
488 load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
489 // For MIPS, we don't add sizeof(void*) to the stack offset.
490 load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
492 load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
493 load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
496 JumpList failureCases;
497 failureCases.append(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
498 failureCases.append(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(oldStructure)));
499 testPrototype(oldStructure->storedPrototype(), failureCases);
502 // Verify that nothing in the prototype chain has a setter for this property.
503 for (WriteBarrier<Structure>* it = chain->head(); *it; ++it)
504 testPrototype((*it)->storedPrototype(), failureCases);
507 // Reallocate property storage if needed.
509 bool willNeedStorageRealloc = oldStructure->propertyStorageCapacity() != newStructure->propertyStorageCapacity();
510 if (willNeedStorageRealloc) {
511 // This trampoline was called to like a JIT stub; before we can can call again we need to
512 // remove the return address from the stack, to prevent the stack from becoming misaligned.
513 preserveReturnAddressAfterCall(regT3);
515 JITStubCall stubCall(this, cti_op_put_by_id_transition_realloc);
516 stubCall.skipArgument(); // base
517 stubCall.skipArgument(); // ident
518 stubCall.skipArgument(); // value
519 stubCall.addArgument(TrustedImm32(oldStructure->propertyStorageCapacity()));
520 stubCall.addArgument(TrustedImm32(newStructure->propertyStorageCapacity()));
521 stubCall.call(regT0);
523 restoreReturnAddressBeforeReturn(regT3);
525 #if CPU(MIPS) || CPU(SH4) || CPU(ARM)
526 // For MIPS, we don't add sizeof(void*) to the stack offset.
527 load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
528 // For MIPS, we don't add sizeof(void*) to the stack offset.
529 load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
531 load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
532 load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
536 emitWriteBarrier(regT0, regT1, regT1, regT3, UnconditionalWriteBarrier, WriteBarrierForPropertyAccess);
538 storePtr(TrustedImmPtr(newStructure), Address(regT0, JSCell::structureOffset()));
539 #if CPU(MIPS) || CPU(SH4) || CPU(ARM)
540 // For MIPS, we don't add sizeof(void*) to the stack offset.
541 load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[2]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT3);
542 load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[2]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT2);
544 load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[2]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT3);
545 load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[2]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT2);
547 compilePutDirectOffset(regT0, regT2, regT3, cachedOffset);
551 ASSERT(!failureCases.empty());
552 failureCases.link(this);
553 restoreArgumentReferenceForTrampoline();
554 Call failureCall = tailRecursiveCall();
556 LinkBuffer patchBuffer(*m_globalData, this);
558 patchBuffer.link(failureCall, FunctionPtr(direct ? cti_op_put_by_id_direct_fail : cti_op_put_by_id_fail));
560 if (willNeedStorageRealloc) {
561 ASSERT(m_calls.size() == 1);
562 patchBuffer.link(m_calls[0].from, FunctionPtr(cti_op_put_by_id_transition_realloc));
565 stubInfo->stubRoutine = patchBuffer.finalizeCode();
566 RepatchBuffer repatchBuffer(m_codeBlock);
567 repatchBuffer.relinkCallerToTrampoline(returnAddress, CodeLocationLabel(stubInfo->stubRoutine.code()));
570 void JIT::patchGetByIdSelf(CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, size_t cachedOffset, ReturnAddressPtr returnAddress)
572 RepatchBuffer repatchBuffer(codeBlock);
574 // We don't want to patch more than once - in future go to cti_op_get_by_id_generic.
575 // Should probably go to JITStubs::cti_op_get_by_id_fail, but that doesn't do anything interesting right now.
576 repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_self_fail));
578 int offset = sizeof(JSValue) * cachedOffset;
580 // Patch the offset into the propoerty map to load from, then patch the Structure to look for.
581 repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(patchOffsetGetByIdStructure), structure);
582 repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(patchOffsetGetByIdPropertyMapOffset1), offset + OBJECT_OFFSETOF(JSValue, u.asBits.payload)); // payload
583 repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(patchOffsetGetByIdPropertyMapOffset2), offset + OBJECT_OFFSETOF(JSValue, u.asBits.tag)); // tag
586 void JIT::patchPutByIdReplace(CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, size_t cachedOffset, ReturnAddressPtr returnAddress, bool direct)
588 RepatchBuffer repatchBuffer(codeBlock);
590 // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
591 // Should probably go to cti_op_put_by_id_fail, but that doesn't do anything interesting right now.
592 repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
594 int offset = sizeof(JSValue) * cachedOffset;
596 // Patch the offset into the propoerty map to load from, then patch the Structure to look for.
597 repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(patchOffsetPutByIdStructure), structure);
598 repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(patchOffsetPutByIdPropertyMapOffset1), offset + OBJECT_OFFSETOF(JSValue, u.asBits.payload)); // payload
599 repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(patchOffsetPutByIdPropertyMapOffset2), offset + OBJECT_OFFSETOF(JSValue, u.asBits.tag)); // tag
602 void JIT::privateCompilePatchGetArrayLength(ReturnAddressPtr returnAddress)
604 StructureStubInfo* stubInfo = &m_codeBlock->getStubInfo(returnAddress);
606 // regT0 holds a JSCell*
609 Jump failureCases1 = branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsArrayVPtr));
611 // Checks out okay! - get the length from the storage
612 loadPtr(Address(regT0, JSArray::storageOffset()), regT2);
613 load32(Address(regT2, OBJECT_OFFSETOF(ArrayStorage, m_length)), regT2);
615 Jump failureCases2 = branch32(Above, regT2, TrustedImm32(INT_MAX));
617 move(TrustedImm32(JSValue::Int32Tag), regT1);
618 Jump success = jump();
620 LinkBuffer patchBuffer(*m_globalData, this);
622 // Use the patch information to link the failure cases back to the original slow case routine.
623 CodeLocationLabel slowCaseBegin = stubInfo->callReturnLocation.labelAtOffset(-patchOffsetGetByIdSlowCaseCall);
624 patchBuffer.link(failureCases1, slowCaseBegin);
625 patchBuffer.link(failureCases2, slowCaseBegin);
627 // On success return back to the hot patch code, at a point it will perform the store to dest for us.
628 patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(patchOffsetGetByIdPutResult));
630 // Track the stub we have created so that it will be deleted later.
631 stubInfo->stubRoutine = patchBuffer.finalizeCode();
633 // Finally patch the jump to slow case back in the hot path to jump here instead.
634 CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
635 RepatchBuffer repatchBuffer(m_codeBlock);
636 repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubInfo->stubRoutine.code()));
638 // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
639 repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_array_fail));
642 void JIT::privateCompileGetByIdProto(StructureStubInfo* stubInfo, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame)
644 // regT0 holds a JSCell*
646 // The prototype object definitely exists (if this stub exists the CodeBlock is referencing a Structure that is
647 // referencing the prototype object - let's speculatively load it's table nice and early!)
648 JSObject* protoObject = asObject(structure->prototypeForLookup(callFrame));
650 Jump failureCases1 = checkStructure(regT0, structure);
652 // Check the prototype object's Structure had not changed.
653 move(TrustedImmPtr(protoObject), regT3);
654 Jump failureCases2 = branchPtr(NotEqual, Address(regT3, JSCell::structureOffset()), TrustedImmPtr(prototypeStructure));
656 bool needsStubLink = false;
658 if (slot.cachedPropertyType() == PropertySlot::Getter) {
659 needsStubLink = true;
660 compileGetDirectOffset(protoObject, regT2, regT1, cachedOffset);
661 JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
662 stubCall.addArgument(regT1);
663 stubCall.addArgument(regT0);
664 stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
666 } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
667 needsStubLink = true;
668 JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
669 stubCall.addArgument(TrustedImmPtr(protoObject));
670 stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
671 stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
672 stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
675 compileGetDirectOffset(protoObject, regT1, regT0, cachedOffset);
677 Jump success = jump();
679 LinkBuffer patchBuffer(*m_globalData, this);
681 // Use the patch information to link the failure cases back to the original slow case routine.
682 CodeLocationLabel slowCaseBegin = stubInfo->callReturnLocation.labelAtOffset(-patchOffsetGetByIdSlowCaseCall);
683 patchBuffer.link(failureCases1, slowCaseBegin);
684 patchBuffer.link(failureCases2, slowCaseBegin);
686 // On success return back to the hot patch code, at a point it will perform the store to dest for us.
687 patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(patchOffsetGetByIdPutResult));
690 for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
692 patchBuffer.link(iter->from, FunctionPtr(iter->to));
696 // Track the stub we have created so that it will be deleted later.
697 stubInfo->stubRoutine = patchBuffer.finalizeCode();
699 // Finally patch the jump to slow case back in the hot path to jump here instead.
700 CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
701 RepatchBuffer repatchBuffer(m_codeBlock);
702 repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubInfo->stubRoutine.code()));
704 // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
705 repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_proto_list));
709 void JIT::privateCompileGetByIdSelfList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* polymorphicStructures, int currentIndex, Structure* structure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset)
711 // regT0 holds a JSCell*
712 Jump failureCase = checkStructure(regT0, structure);
713 bool needsStubLink = false;
714 bool isDirect = false;
715 if (slot.cachedPropertyType() == PropertySlot::Getter) {
716 needsStubLink = true;
717 compileGetDirectOffset(regT0, regT2, regT1, cachedOffset);
718 JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
719 stubCall.addArgument(regT1);
720 stubCall.addArgument(regT0);
721 stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
723 } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
724 needsStubLink = true;
725 JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
726 stubCall.addArgument(regT0);
727 stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
728 stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
729 stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
733 compileGetDirectOffset(regT0, regT1, regT0, cachedOffset);
736 Jump success = jump();
738 LinkBuffer patchBuffer(*m_globalData, this);
740 for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
742 patchBuffer.link(iter->from, FunctionPtr(iter->to));
745 // Use the patch information to link the failure cases back to the original slow case routine.
746 CodeLocationLabel lastProtoBegin = CodeLocationLabel(polymorphicStructures->list[currentIndex - 1].stubRoutine.code());
748 lastProtoBegin = stubInfo->callReturnLocation.labelAtOffset(-patchOffsetGetByIdSlowCaseCall);
750 patchBuffer.link(failureCase, lastProtoBegin);
752 // On success return back to the hot patch code, at a point it will perform the store to dest for us.
753 patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(patchOffsetGetByIdPutResult));
755 CodeRef stubRoutine = patchBuffer.finalizeCode();
757 polymorphicStructures->list[currentIndex].set(*m_globalData, m_codeBlock->ownerExecutable(), stubRoutine, structure, isDirect);
759 // Finally patch the jump to slow case back in the hot path to jump here instead.
760 CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
761 RepatchBuffer repatchBuffer(m_codeBlock);
762 repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine.code()));
765 void JIT::privateCompileGetByIdProtoList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructures, int currentIndex, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, CallFrame* callFrame)
767 // regT0 holds a JSCell*
769 // The prototype object definitely exists (if this stub exists the CodeBlock is referencing a Structure that is
770 // referencing the prototype object - let's speculatively load it's table nice and early!)
771 JSObject* protoObject = asObject(structure->prototypeForLookup(callFrame));
773 // Check eax is an object of the right Structure.
774 Jump failureCases1 = checkStructure(regT0, structure);
776 // Check the prototype object's Structure had not changed.
777 move(TrustedImmPtr(protoObject), regT3);
778 Jump failureCases2 = branchPtr(NotEqual, Address(regT3, JSCell::structureOffset()), TrustedImmPtr(prototypeStructure));
780 bool needsStubLink = false;
781 bool isDirect = false;
782 if (slot.cachedPropertyType() == PropertySlot::Getter) {
783 needsStubLink = true;
784 compileGetDirectOffset(protoObject, regT2, regT1, cachedOffset);
785 JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
786 stubCall.addArgument(regT1);
787 stubCall.addArgument(regT0);
788 stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
790 } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
791 needsStubLink = true;
792 JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
793 stubCall.addArgument(TrustedImmPtr(protoObject));
794 stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
795 stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
796 stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
800 compileGetDirectOffset(protoObject, regT1, regT0, cachedOffset);
803 Jump success = jump();
805 LinkBuffer patchBuffer(*m_globalData, this);
807 for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
809 patchBuffer.link(iter->from, FunctionPtr(iter->to));
812 // Use the patch information to link the failure cases back to the original slow case routine.
813 CodeLocationLabel lastProtoBegin = CodeLocationLabel(prototypeStructures->list[currentIndex - 1].stubRoutine.code());
814 patchBuffer.link(failureCases1, lastProtoBegin);
815 patchBuffer.link(failureCases2, lastProtoBegin);
817 // On success return back to the hot patch code, at a point it will perform the store to dest for us.
818 patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(patchOffsetGetByIdPutResult));
820 CodeRef stubRoutine = patchBuffer.finalizeCode();
822 prototypeStructures->list[currentIndex].set(callFrame->globalData(), m_codeBlock->ownerExecutable(), stubRoutine, structure, prototypeStructure, isDirect);
824 // Finally patch the jump to slow case back in the hot path to jump here instead.
825 CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
826 RepatchBuffer repatchBuffer(m_codeBlock);
827 repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine.code()));
830 void JIT::privateCompileGetByIdChainList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructures, int currentIndex, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, CallFrame* callFrame)
832 // regT0 holds a JSCell*
835 JumpList bucketsOfFail;
837 // Check eax is an object of the right Structure.
838 bucketsOfFail.append(checkStructure(regT0, structure));
840 Structure* currStructure = structure;
841 WriteBarrier<Structure>* it = chain->head();
842 JSObject* protoObject = 0;
843 for (unsigned i = 0; i < count; ++i, ++it) {
844 protoObject = asObject(currStructure->prototypeForLookup(callFrame));
845 currStructure = it->get();
846 testPrototype(protoObject, bucketsOfFail);
850 bool needsStubLink = false;
851 bool isDirect = false;
852 if (slot.cachedPropertyType() == PropertySlot::Getter) {
853 needsStubLink = true;
854 compileGetDirectOffset(protoObject, regT2, regT1, cachedOffset);
855 JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
856 stubCall.addArgument(regT1);
857 stubCall.addArgument(regT0);
858 stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
860 } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
861 needsStubLink = true;
862 JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
863 stubCall.addArgument(TrustedImmPtr(protoObject));
864 stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
865 stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
866 stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
870 compileGetDirectOffset(protoObject, regT1, regT0, cachedOffset);
873 Jump success = jump();
875 LinkBuffer patchBuffer(*m_globalData, this);
877 for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
879 patchBuffer.link(iter->from, FunctionPtr(iter->to));
882 // Use the patch information to link the failure cases back to the original slow case routine.
883 CodeLocationLabel lastProtoBegin = CodeLocationLabel(prototypeStructures->list[currentIndex - 1].stubRoutine.code());
885 patchBuffer.link(bucketsOfFail, lastProtoBegin);
887 // On success return back to the hot patch code, at a point it will perform the store to dest for us.
888 patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(patchOffsetGetByIdPutResult));
890 CodeRef stubRoutine = patchBuffer.finalizeCode();
892 // Track the stub we have created so that it will be deleted later.
893 prototypeStructures->list[currentIndex].set(callFrame->globalData(), m_codeBlock->ownerExecutable(), stubRoutine, structure, chain, isDirect);
895 // Finally patch the jump to slow case back in the hot path to jump here instead.
896 CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
897 RepatchBuffer repatchBuffer(m_codeBlock);
898 repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine.code()));
901 void JIT::privateCompileGetByIdChain(StructureStubInfo* stubInfo, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame)
903 // regT0 holds a JSCell*
906 JumpList bucketsOfFail;
908 // Check eax is an object of the right Structure.
909 bucketsOfFail.append(checkStructure(regT0, structure));
911 Structure* currStructure = structure;
912 WriteBarrier<Structure>* it = chain->head();
913 JSObject* protoObject = 0;
914 for (unsigned i = 0; i < count; ++i, ++it) {
915 protoObject = asObject(currStructure->prototypeForLookup(callFrame));
916 currStructure = it->get();
917 testPrototype(protoObject, bucketsOfFail);
921 bool needsStubLink = false;
922 if (slot.cachedPropertyType() == PropertySlot::Getter) {
923 needsStubLink = true;
924 compileGetDirectOffset(protoObject, regT2, regT1, cachedOffset);
925 JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
926 stubCall.addArgument(regT1);
927 stubCall.addArgument(regT0);
928 stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
930 } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
931 needsStubLink = true;
932 JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
933 stubCall.addArgument(TrustedImmPtr(protoObject));
934 stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
935 stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
936 stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
939 compileGetDirectOffset(protoObject, regT1, regT0, cachedOffset);
940 Jump success = jump();
942 LinkBuffer patchBuffer(*m_globalData, this);
944 for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
946 patchBuffer.link(iter->from, FunctionPtr(iter->to));
949 // Use the patch information to link the failure cases back to the original slow case routine.
950 patchBuffer.link(bucketsOfFail, stubInfo->callReturnLocation.labelAtOffset(-patchOffsetGetByIdSlowCaseCall));
952 // On success return back to the hot patch code, at a point it will perform the store to dest for us.
953 patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(patchOffsetGetByIdPutResult));
955 // Track the stub we have created so that it will be deleted later.
956 CodeRef stubRoutine = patchBuffer.finalizeCode();
957 stubInfo->stubRoutine = stubRoutine;
959 // Finally patch the jump to slow case back in the hot path to jump here instead.
960 CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
961 RepatchBuffer repatchBuffer(m_codeBlock);
962 repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine.code()));
964 // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
965 repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_proto_list));
968 void JIT::compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, RegisterID offset)
970 ASSERT(sizeof(JSValue) == 8);
972 loadPtr(Address(base, JSObject::offsetOfPropertyStorage()), base);
973 loadPtr(BaseIndex(base, offset, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)), resultPayload);
974 loadPtr(BaseIndex(base, offset, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)), resultTag);
977 void JIT::emit_op_get_by_pname(Instruction* currentInstruction)
979 unsigned dst = currentInstruction[1].u.operand;
980 unsigned base = currentInstruction[2].u.operand;
981 unsigned property = currentInstruction[3].u.operand;
982 unsigned expected = currentInstruction[4].u.operand;
983 unsigned iter = currentInstruction[5].u.operand;
984 unsigned i = currentInstruction[6].u.operand;
986 emitLoad2(property, regT1, regT0, base, regT3, regT2);
987 emitJumpSlowCaseIfNotJSCell(property, regT1);
988 addSlowCase(branchPtr(NotEqual, regT0, payloadFor(expected)));
989 // Property registers are now available as the property is known
990 emitJumpSlowCaseIfNotJSCell(base, regT3);
991 emitLoadPayload(iter, regT1);
993 // Test base's structure
994 loadPtr(Address(regT2, JSCell::structureOffset()), regT0);
995 addSlowCase(branchPtr(NotEqual, regT0, Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure))));
996 load32(addressFor(i), regT3);
997 sub32(TrustedImm32(1), regT3);
998 addSlowCase(branch32(AboveOrEqual, regT3, Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_numCacheableSlots))));
999 compileGetDirectOffset(regT2, regT1, regT0, regT3);
1001 emitStore(dst, regT1, regT0);
1002 map(m_bytecodeOffset + OPCODE_LENGTH(op_get_by_pname), dst, regT1, regT0);
1005 void JIT::emitSlow_op_get_by_pname(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1007 unsigned dst = currentInstruction[1].u.operand;
1008 unsigned base = currentInstruction[2].u.operand;
1009 unsigned property = currentInstruction[3].u.operand;
1011 linkSlowCaseIfNotJSCell(iter, property);
1013 linkSlowCaseIfNotJSCell(iter, base);
1017 JITStubCall stubCall(this, cti_op_get_by_val);
1018 stubCall.addArgument(base);
1019 stubCall.addArgument(property);
1023 void JIT::emit_op_get_scoped_var(Instruction* currentInstruction)
1025 int dst = currentInstruction[1].u.operand;
1026 int index = currentInstruction[2].u.operand;
1027 int skip = currentInstruction[3].u.operand;
1029 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2);
1030 bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
1031 ASSERT(skip || !checkTopLevel);
1032 if (checkTopLevel && skip--) {
1033 Jump activationNotCreated;
1035 activationNotCreated = branch32(Equal, tagFor(m_codeBlock->activationRegister()), TrustedImm32(JSValue::EmptyValueTag));
1036 loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
1037 activationNotCreated.link(this);
1040 loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
1042 loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, object)), regT2);
1043 loadPtr(Address(regT2, JSVariableObject::offsetOfRegisters()), regT2);
1045 emitLoad(index, regT1, regT0, regT2);
1046 emitValueProfilingSite(FirstProfilingSite);
1047 emitStore(dst, regT1, regT0);
1048 map(m_bytecodeOffset + OPCODE_LENGTH(op_get_scoped_var), dst, regT1, regT0);
1051 void JIT::emit_op_put_scoped_var(Instruction* currentInstruction)
1053 int index = currentInstruction[1].u.operand;
1054 int skip = currentInstruction[2].u.operand;
1055 int value = currentInstruction[3].u.operand;
1057 emitLoad(value, regT1, regT0);
1059 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2);
1060 bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
1061 ASSERT(skip || !checkTopLevel);
1062 if (checkTopLevel && skip--) {
1063 Jump activationNotCreated;
1065 activationNotCreated = branch32(Equal, tagFor(m_codeBlock->activationRegister()), TrustedImm32(JSValue::EmptyValueTag));
1066 loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
1067 activationNotCreated.link(this);
1070 loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
1071 loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, object)), regT2);
1073 loadPtr(Address(regT2, JSVariableObject::offsetOfRegisters()), regT3);
1074 emitStore(index, regT1, regT0, regT3);
1075 emitWriteBarrier(regT2, regT1, regT0, regT1, ShouldFilterImmediates, WriteBarrierForVariableAccess);
1078 void JIT::emit_op_get_global_var(Instruction* currentInstruction)
1080 int dst = currentInstruction[1].u.operand;
1081 JSGlobalObject* globalObject = m_codeBlock->globalObject();
1082 ASSERT(globalObject->isGlobalObject());
1083 int index = currentInstruction[2].u.operand;
1085 loadPtr(&globalObject->m_registers, regT2);
1087 emitLoad(index, regT1, regT0, regT2);
1088 emitValueProfilingSite(FirstProfilingSite);
1089 emitStore(dst, regT1, regT0);
1090 map(m_bytecodeOffset + OPCODE_LENGTH(op_get_global_var), dst, regT1, regT0);
1093 void JIT::emit_op_put_global_var(Instruction* currentInstruction)
1095 int index = currentInstruction[1].u.operand;
1096 int value = currentInstruction[2].u.operand;
1098 JSGlobalObject* globalObject = m_codeBlock->globalObject();
1100 emitLoad(value, regT1, regT0);
1101 move(TrustedImmPtr(globalObject), regT2);
1103 emitWriteBarrier(globalObject, regT1, regT3, ShouldFilterImmediates, WriteBarrierForVariableAccess);
1105 loadPtr(Address(regT2, JSVariableObject::offsetOfRegisters()), regT2);
1106 emitStore(index, regT1, regT0, regT2);
1107 map(m_bytecodeOffset + OPCODE_LENGTH(op_put_global_var), value, regT1, regT0);
1110 void JIT::resetPatchGetById(RepatchBuffer& repatchBuffer, StructureStubInfo* stubInfo)
1112 repatchBuffer.relink(stubInfo->callReturnLocation, cti_op_get_by_id);
1113 repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(patchOffsetGetByIdStructure), reinterpret_cast<void*>(-1));
1114 repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(patchOffsetGetByIdPropertyMapOffset1), 0);
1115 repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(patchOffsetGetByIdPropertyMapOffset2), 0);
1116 repatchBuffer.relink(stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase), stubInfo->callReturnLocation.labelAtOffset(-patchOffsetGetByIdSlowCaseCall));
1119 void JIT::resetPatchPutById(RepatchBuffer& repatchBuffer, StructureStubInfo* stubInfo)
1121 if (isDirectPutById(stubInfo))
1122 repatchBuffer.relink(stubInfo->callReturnLocation, cti_op_put_by_id_direct);
1124 repatchBuffer.relink(stubInfo->callReturnLocation, cti_op_put_by_id);
1125 repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(patchOffsetPutByIdStructure), reinterpret_cast<void*>(-1));
1126 repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(patchOffsetPutByIdPropertyMapOffset1), 0);
1127 repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(patchOffsetPutByIdPropertyMapOffset2), 0);
1132 #endif // USE(JSVALUE32_64)
1133 #endif // ENABLE(JIT)