Fix TIVI-504 (backport of trac.webkit.org/changeset/144137)
[profile/ivi/webkit-efl.git] / Source / JavaScriptCore / jit / JITPropertyAccess32_64.cpp
1 /*
2  * Copyright (C) 2008, 2009 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27
28 #if ENABLE(JIT)
29 #if USE(JSVALUE32_64)
30 #include "JIT.h"
31
32 #include "CodeBlock.h"
33 #include "GCAwareJITStubRoutine.h"
34 #include "Interpreter.h"
35 #include "JITInlineMethods.h"
36 #include "JITStubCall.h"
37 #include "JSArray.h"
38 #include "JSFunction.h"
39 #include "JSPropertyNameIterator.h"
40 #include "JSVariableObject.h"
41 #include "LinkBuffer.h"
42 #include "RepatchBuffer.h"
43 #include "ResultType.h"
44 #include "SamplingTool.h"
45
46 #ifndef NDEBUG
47 #include <stdio.h>
48 #endif
49
50 using namespace std;
51
52 namespace JSC {
53     
54 void JIT::emit_op_put_by_index(Instruction* currentInstruction)
55 {
56     unsigned base = currentInstruction[1].u.operand;
57     unsigned property = currentInstruction[2].u.operand;
58     unsigned value = currentInstruction[3].u.operand;
59     
60     JITStubCall stubCall(this, cti_op_put_by_index);
61     stubCall.addArgument(base);
62     stubCall.addArgument(TrustedImm32(property));
63     stubCall.addArgument(value);
64     stubCall.call();
65 }
66
67 void JIT::emit_op_put_getter_setter(Instruction* currentInstruction)
68 {
69     unsigned base = currentInstruction[1].u.operand;
70     unsigned property = currentInstruction[2].u.operand;
71     unsigned getter = currentInstruction[3].u.operand;
72     unsigned setter = currentInstruction[4].u.operand;
73     
74     JITStubCall stubCall(this, cti_op_put_getter_setter);
75     stubCall.addArgument(base);
76     stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(property)));
77     stubCall.addArgument(getter);
78     stubCall.addArgument(setter);
79     stubCall.call();
80 }
81
82 void JIT::emit_op_del_by_id(Instruction* currentInstruction)
83 {
84     unsigned dst = currentInstruction[1].u.operand;
85     unsigned base = currentInstruction[2].u.operand;
86     unsigned property = currentInstruction[3].u.operand;
87     
88     JITStubCall stubCall(this, cti_op_del_by_id);
89     stubCall.addArgument(base);
90     stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(property)));
91     stubCall.call(dst);
92 }
93
94 void JIT::emit_op_method_check(Instruction* currentInstruction)
95 {
96     // Assert that the following instruction is a get_by_id.
97     ASSERT(m_interpreter->getOpcodeID((currentInstruction + OPCODE_LENGTH(op_method_check))->u.opcode) == op_get_by_id
98         || m_interpreter->getOpcodeID((currentInstruction + OPCODE_LENGTH(op_method_check))->u.opcode) == op_get_by_id_out_of_line);
99     
100     currentInstruction += OPCODE_LENGTH(op_method_check);
101     
102     // Do the method check - check the object & its prototype's structure inline (this is the common case).
103     m_methodCallCompilationInfo.append(MethodCallCompilationInfo(m_bytecodeOffset, m_propertyAccessCompilationInfo.size()));
104     MethodCallCompilationInfo& info = m_methodCallCompilationInfo.last();
105     
106     int dst = currentInstruction[1].u.operand;
107     int base = currentInstruction[2].u.operand;
108     
109     emitLoad(base, regT1, regT0);
110     emitJumpSlowCaseIfNotJSCell(base, regT1);
111     
112     BEGIN_UNINTERRUPTED_SEQUENCE(sequenceMethodCheck);
113     
114     Jump structureCheck = branchPtrWithPatch(NotEqual, Address(regT0, JSCell::structureOffset()), info.structureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure)));
115     DataLabelPtr protoStructureToCompare, protoObj = moveWithPatch(TrustedImmPtr(0), regT2);
116     Jump protoStructureCheck = branchPtrWithPatch(NotEqual, Address(regT2, JSCell::structureOffset()), protoStructureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure)));
117     
118     // This will be relinked to load the function without doing a load.
119     DataLabelPtr putFunction = moveWithPatch(TrustedImmPtr(0), regT0);
120     
121     END_UNINTERRUPTED_SEQUENCE(sequenceMethodCheck);
122     
123     move(TrustedImm32(JSValue::CellTag), regT1);
124     Jump match = jump();
125     
126     // Link the failure cases here.
127     structureCheck.link(this);
128     protoStructureCheck.link(this);
129     
130     // Do a regular(ish) get_by_id (the slow case will be link to
131     // cti_op_get_by_id_method_check instead of cti_op_get_by_id.
132     compileGetByIdHotPath();
133     
134     match.link(this);
135     emitValueProfilingSite(m_bytecodeOffset + OPCODE_LENGTH(op_method_check));
136     emitStore(dst, regT1, regT0);
137     map(m_bytecodeOffset + OPCODE_LENGTH(op_method_check) + OPCODE_LENGTH(op_get_by_id), dst, regT1, regT0);
138     
139     // We've already generated the following get_by_id, so make sure it's skipped over.
140     m_bytecodeOffset += OPCODE_LENGTH(op_get_by_id);
141
142     m_propertyAccessCompilationInfo.last().addMethodCheckInfo(info.structureToCompare, protoObj, protoStructureToCompare, putFunction);
143 }
144
145 void JIT::emitSlow_op_method_check(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
146 {
147     currentInstruction += OPCODE_LENGTH(op_method_check);
148     
149     int dst = currentInstruction[1].u.operand;
150     int base = currentInstruction[2].u.operand;
151     int ident = currentInstruction[3].u.operand;
152     
153     compileGetByIdSlowCase(dst, base, &(m_codeBlock->identifier(ident)), iter, true);
154     emitValueProfilingSite(m_bytecodeOffset + OPCODE_LENGTH(op_method_check));
155     
156     // We've already generated the following get_by_id, so make sure it's skipped over.
157     m_bytecodeOffset += OPCODE_LENGTH(op_get_by_id);
158 }
159
160 JIT::CodeRef JIT::stringGetByValStubGenerator(JSGlobalData* globalData)
161 {
162     JSInterfaceJIT jit;
163     JumpList failures;
164     failures.append(jit.branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(globalData->stringStructure.get())));
165     
166     // Load string length to regT1, and start the process of loading the data pointer into regT0
167     jit.load32(Address(regT0, ThunkHelpers::jsStringLengthOffset()), regT1);
168     jit.loadPtr(Address(regT0, ThunkHelpers::jsStringValueOffset()), regT0);
169     failures.append(jit.branchTest32(Zero, regT0));
170     
171     // Do an unsigned compare to simultaneously filter negative indices as well as indices that are too large
172     failures.append(jit.branch32(AboveOrEqual, regT2, regT1));
173     
174     // Load the character
175     JumpList is16Bit;
176     JumpList cont8Bit;
177     // Load the string flags
178     jit.loadPtr(Address(regT0, ThunkHelpers::stringImplFlagsOffset()), regT1);
179     jit.loadPtr(Address(regT0, ThunkHelpers::stringImplDataOffset()), regT0);
180     is16Bit.append(jit.branchTest32(Zero, regT1, TrustedImm32(ThunkHelpers::stringImpl8BitFlag())));
181     jit.load8(BaseIndex(regT0, regT2, TimesOne, 0), regT0);
182     cont8Bit.append(jit.jump());
183     is16Bit.link(&jit);
184     jit.load16(BaseIndex(regT0, regT2, TimesTwo, 0), regT0);
185
186     cont8Bit.link(&jit);
187     
188     failures.append(jit.branch32(AboveOrEqual, regT0, TrustedImm32(0x100)));
189     jit.move(TrustedImmPtr(globalData->smallStrings.singleCharacterStrings()), regT1);
190     jit.loadPtr(BaseIndex(regT1, regT0, ScalePtr, 0), regT0);
191     jit.move(TrustedImm32(JSValue::CellTag), regT1); // We null check regT0 on return so this is safe
192     jit.ret();
193
194     failures.link(&jit);
195     jit.move(TrustedImm32(0), regT0);
196     jit.ret();
197     
198     LinkBuffer patchBuffer(*globalData, &jit, GLOBAL_THUNK_ID);
199     return FINALIZE_CODE(patchBuffer, ("String get_by_val stub"));
200 }
201
202 void JIT::emit_op_get_by_val(Instruction* currentInstruction)
203 {
204     unsigned dst = currentInstruction[1].u.operand;
205     unsigned base = currentInstruction[2].u.operand;
206     unsigned property = currentInstruction[3].u.operand;
207     
208     emitLoad2(base, regT1, regT0, property, regT3, regT2);
209     
210     addSlowCase(branch32(NotEqual, regT3, TrustedImm32(JSValue::Int32Tag)));
211     emitJumpSlowCaseIfNotJSCell(base, regT1);
212     addSlowCase(branchPtr(NotEqual, Address(regT0, JSCell::classInfoOffset()), TrustedImmPtr(&JSArray::s_info)));
213     
214     loadPtr(Address(regT0, JSArray::storageOffset()), regT3);
215     addSlowCase(branch32(AboveOrEqual, regT2, Address(regT0, JSArray::vectorLengthOffset())));
216     
217     load32(BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1); // tag
218     load32(BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0); // payload
219     addSlowCase(branch32(Equal, regT1, TrustedImm32(JSValue::EmptyValueTag)));
220     
221     emitValueProfilingSite();
222     emitStore(dst, regT1, regT0);
223     map(m_bytecodeOffset + OPCODE_LENGTH(op_get_by_val), dst, regT1, regT0);
224 }
225
226 void JIT::emitSlow_op_get_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
227 {
228     unsigned dst = currentInstruction[1].u.operand;
229     unsigned base = currentInstruction[2].u.operand;
230     unsigned property = currentInstruction[3].u.operand;
231     
232     linkSlowCase(iter); // property int32 check
233     linkSlowCaseIfNotJSCell(iter, base); // base cell check
234
235     Jump nonCell = jump();
236     linkSlowCase(iter); // base array check
237     Jump notString = branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(m_globalData->stringStructure.get()));
238     emitNakedCall(m_globalData->getCTIStub(stringGetByValStubGenerator).code());
239     Jump failed = branchTestPtr(Zero, regT0);
240     emitStore(dst, regT1, regT0);
241     emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_get_by_val));
242     failed.link(this);
243     notString.link(this);
244     nonCell.link(this);
245
246     linkSlowCase(iter); // vector length check
247     linkSlowCase(iter); // empty value
248     
249     JITStubCall stubCall(this, cti_op_get_by_val);
250     stubCall.addArgument(base);
251     stubCall.addArgument(property);
252     stubCall.call(dst);
253
254     emitValueProfilingSite();
255 }
256
257 void JIT::emit_op_put_by_val(Instruction* currentInstruction)
258 {
259     unsigned base = currentInstruction[1].u.operand;
260     unsigned property = currentInstruction[2].u.operand;
261     unsigned value = currentInstruction[3].u.operand;
262     
263     emitLoad2(base, regT1, regT0, property, regT3, regT2);
264     
265     addSlowCase(branch32(NotEqual, regT3, TrustedImm32(JSValue::Int32Tag)));
266     emitJumpSlowCaseIfNotJSCell(base, regT1);
267     addSlowCase(branchPtr(NotEqual, Address(regT0, JSCell::classInfoOffset()), TrustedImmPtr(&JSArray::s_info)));
268     addSlowCase(branch32(AboveOrEqual, regT2, Address(regT0, JSArray::vectorLengthOffset())));
269
270     emitWriteBarrier(regT0, regT1, regT1, regT3, UnconditionalWriteBarrier, WriteBarrierForPropertyAccess);
271     loadPtr(Address(regT0, JSArray::storageOffset()), regT3);
272     
273     Jump empty = branch32(Equal, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::EmptyValueTag));
274     
275     Label storeResult(this);
276     emitLoad(value, regT1, regT0);
277     store32(regT0, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload))); // payload
278     store32(regT1, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag))); // tag
279     Jump end = jump();
280     
281     empty.link(this);
282     add32(TrustedImm32(1), Address(regT3, OBJECT_OFFSETOF(ArrayStorage, m_numValuesInVector)));
283     branch32(Below, regT2, Address(regT3, OBJECT_OFFSETOF(ArrayStorage, m_length))).linkTo(storeResult, this);
284     
285     add32(TrustedImm32(1), regT2, regT0);
286     store32(regT0, Address(regT3, OBJECT_OFFSETOF(ArrayStorage, m_length)));
287     jump().linkTo(storeResult, this);
288     
289     end.link(this);
290 }
291
292 void JIT::emitSlow_op_put_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
293 {
294     unsigned base = currentInstruction[1].u.operand;
295     unsigned property = currentInstruction[2].u.operand;
296     unsigned value = currentInstruction[3].u.operand;
297     
298     linkSlowCase(iter); // property int32 check
299     linkSlowCaseIfNotJSCell(iter, base); // base cell check
300     linkSlowCase(iter); // base not array check
301     linkSlowCase(iter); // in vector check
302     
303     JITStubCall stubPutByValCall(this, cti_op_put_by_val);
304     stubPutByValCall.addArgument(base);
305     stubPutByValCall.addArgument(property);
306     stubPutByValCall.addArgument(value);
307     stubPutByValCall.call();
308 }
309
310 void JIT::emit_op_get_by_id(Instruction* currentInstruction)
311 {
312     int dst = currentInstruction[1].u.operand;
313     int base = currentInstruction[2].u.operand;
314     
315     emitLoad(base, regT1, regT0);
316     emitJumpSlowCaseIfNotJSCell(base, regT1);
317     compileGetByIdHotPath();
318     emitValueProfilingSite();
319     emitStore(dst, regT1, regT0);
320     map(m_bytecodeOffset + OPCODE_LENGTH(op_get_by_id), dst, regT1, regT0);
321 }
322
323 void JIT::compileGetByIdHotPath()
324 {
325     // As for put_by_id, get_by_id requires the offset of the Structure and the offset of the access to be patched.
326     // Additionally, for get_by_id we need patch the offset of the branch to the slow case (we patch this to jump
327     // to array-length / prototype access tranpolines, and finally we also the the property-map access offset as a label
328     // to jump back to if one of these trampolies finds a match.
329     
330     BEGIN_UNINTERRUPTED_SEQUENCE(sequenceGetByIdHotPath);
331     
332     Label hotPathBegin(this);
333     
334     DataLabelPtr structureToCompare;
335     PatchableJump structureCheck = patchableBranchPtrWithPatch(NotEqual, Address(regT0, JSCell::structureOffset()), structureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure)));
336     addSlowCase(structureCheck);
337     
338     ConvertibleLoadLabel propertyStorageLoad = convertibleLoadPtr(Address(regT0, JSObject::offsetOfOutOfLineStorage()), regT2);
339     DataLabelCompact displacementLabel1 = loadPtrWithCompactAddressOffsetPatch(Address(regT2, patchGetByIdDefaultOffset), regT0); // payload
340     DataLabelCompact displacementLabel2 = loadPtrWithCompactAddressOffsetPatch(Address(regT2, patchGetByIdDefaultOffset), regT1); // tag
341     
342     Label putResult(this);
343     
344     END_UNINTERRUPTED_SEQUENCE(sequenceGetByIdHotPath);
345
346     m_propertyAccessCompilationInfo.append(PropertyStubCompilationInfo(PropertyStubGetById, m_bytecodeOffset, hotPathBegin, structureToCompare, structureCheck, propertyStorageLoad, displacementLabel1, displacementLabel2, putResult));
347 }
348
349 void JIT::emitSlow_op_get_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
350 {
351     int dst = currentInstruction[1].u.operand;
352     int base = currentInstruction[2].u.operand;
353     int ident = currentInstruction[3].u.operand;
354     
355     compileGetByIdSlowCase(dst, base, &(m_codeBlock->identifier(ident)), iter);
356     emitValueProfilingSite();
357 }
358
359 void JIT::compileGetByIdSlowCase(int dst, int base, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter, bool isMethodCheck)
360 {
361     // As for the hot path of get_by_id, above, we ensure that we can use an architecture specific offset
362     // so that we only need track one pointer into the slow case code - we track a pointer to the location
363     // of the call (which we can use to look up the patch information), but should a array-length or
364     // prototype access trampoline fail we want to bail out back to here.  To do so we can subtract back
365     // the distance from the call to the head of the slow case.
366     linkSlowCaseIfNotJSCell(iter, base);
367     linkSlowCase(iter);
368     
369     BEGIN_UNINTERRUPTED_SEQUENCE(sequenceGetByIdSlowCase);
370     
371     Label coldPathBegin(this);
372     JITStubCall stubCall(this, isMethodCheck ? cti_op_get_by_id_method_check : cti_op_get_by_id);
373     stubCall.addArgument(regT1, regT0);
374     stubCall.addArgument(TrustedImmPtr(ident));
375     Call call = stubCall.call(dst);
376     
377     END_UNINTERRUPTED_SEQUENCE_FOR_PUT(sequenceGetByIdSlowCase, dst);
378     
379     // Track the location of the call; this will be used to recover patch information.
380     m_propertyAccessCompilationInfo[m_propertyAccessInstructionIndex++].slowCaseInfo(PropertyStubGetById, coldPathBegin, call);
381 }
382
383 void JIT::emit_op_put_by_id(Instruction* currentInstruction)
384 {
385     // In order to be able to patch both the Structure, and the object offset, we store one pointer,
386     // to just after the arguments have been loaded into registers 'hotPathBegin', and we generate code
387     // such that the Structure & offset are always at the same distance from this.
388     
389     int base = currentInstruction[1].u.operand;
390     int value = currentInstruction[3].u.operand;
391     
392     emitLoad2(base, regT1, regT0, value, regT3, regT2);
393     
394     emitJumpSlowCaseIfNotJSCell(base, regT1);
395     
396     BEGIN_UNINTERRUPTED_SEQUENCE(sequencePutById);
397     
398     Label hotPathBegin(this);
399     
400     // It is important that the following instruction plants a 32bit immediate, in order that it can be patched over.
401     DataLabelPtr structureToCompare;
402     addSlowCase(branchPtrWithPatch(NotEqual, Address(regT0, JSCell::structureOffset()), structureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure))));
403     
404     ConvertibleLoadLabel propertyStorageLoad = convertibleLoadPtr(Address(regT0, JSObject::offsetOfOutOfLineStorage()), regT1);
405     DataLabel32 displacementLabel1 = storePtrWithAddressOffsetPatch(regT2, Address(regT1, patchPutByIdDefaultOffset)); // payload
406     DataLabel32 displacementLabel2 = storePtrWithAddressOffsetPatch(regT3, Address(regT1, patchPutByIdDefaultOffset)); // tag
407     
408     END_UNINTERRUPTED_SEQUENCE(sequencePutById);
409
410     emitWriteBarrier(regT0, regT2, regT1, regT2, ShouldFilterImmediates, WriteBarrierForPropertyAccess);
411
412     m_propertyAccessCompilationInfo.append(PropertyStubCompilationInfo(PropertyStubPutById, m_bytecodeOffset, hotPathBegin, structureToCompare, propertyStorageLoad, displacementLabel1, displacementLabel2));
413 }
414
415 void JIT::emitSlow_op_put_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
416 {
417     int base = currentInstruction[1].u.operand;
418     int ident = currentInstruction[2].u.operand;
419     int direct = currentInstruction[8].u.operand;
420
421     linkSlowCaseIfNotJSCell(iter, base);
422     linkSlowCase(iter);
423     
424     JITStubCall stubCall(this, direct ? cti_op_put_by_id_direct : cti_op_put_by_id);
425     stubCall.addArgument(base);
426     stubCall.addArgument(TrustedImmPtr(&(m_codeBlock->identifier(ident))));
427     stubCall.addArgument(regT3, regT2); 
428     Call call = stubCall.call();
429     
430     // Track the location of the call; this will be used to recover patch information.
431     m_propertyAccessCompilationInfo[m_propertyAccessInstructionIndex++].slowCaseInfo(PropertyStubPutById, call);
432 }
433
434 // Compile a store into an object's property storage.  May overwrite base.
435 void JIT::compilePutDirectOffset(RegisterID base, RegisterID valueTag, RegisterID valuePayload, PropertyOffset cachedOffset)
436 {
437     if (isOutOfLineOffset(cachedOffset))
438         loadPtr(Address(base, JSObject::offsetOfOutOfLineStorage()), base);
439     emitStore(indexRelativeToBase(cachedOffset), valueTag, valuePayload, base);
440 }
441
442 // Compile a load from an object's property storage.  May overwrite base.
443 void JIT::compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, PropertyOffset cachedOffset)
444 {
445     if (isInlineOffset(cachedOffset)) {
446         emitLoad(indexRelativeToBase(cachedOffset), resultTag, resultPayload, base);
447         return;
448     }
449     
450     RegisterID temp = resultPayload;
451     loadPtr(Address(base, JSObject::offsetOfOutOfLineStorage()), temp);
452     emitLoad(indexRelativeToBase(cachedOffset), resultTag, resultPayload, temp);
453 }
454
455 void JIT::compileGetDirectOffset(JSObject* base, RegisterID resultTag, RegisterID resultPayload, PropertyOffset cachedOffset)
456 {
457     if (isInlineOffset(cachedOffset)) {
458         move(TrustedImmPtr(base->locationForOffset(cachedOffset)), resultTag);
459         load32(Address(resultTag, OBJECT_OFFSETOF(JSValue, u.asBits.payload)), resultPayload);
460         load32(Address(resultTag, OBJECT_OFFSETOF(JSValue, u.asBits.tag)), resultTag);
461         return;
462     }
463     
464     loadPtr(base->addressOfOutOfLineStorage(), resultTag);
465     load32(Address(resultTag, offsetInOutOfLineStorage(cachedOffset) * sizeof(WriteBarrier<Unknown>) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), resultPayload);
466     load32(Address(resultTag, offsetInOutOfLineStorage(cachedOffset) * sizeof(WriteBarrier<Unknown>) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), resultTag);
467 }
468
469 void JIT::privateCompilePutByIdTransition(StructureStubInfo* stubInfo, Structure* oldStructure, Structure* newStructure, PropertyOffset cachedOffset, StructureChain* chain, ReturnAddressPtr returnAddress, bool direct)
470 {
471     // The code below assumes that regT0 contains the basePayload and regT1 contains the baseTag. Restore them from the stack.
472 #if CPU(MIPS) || CPU(SH4) || CPU(ARM)
473     // For MIPS, we don't add sizeof(void*) to the stack offset.
474     load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
475     // For MIPS, we don't add sizeof(void*) to the stack offset.
476     load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
477 #else
478     load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
479     load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
480 #endif
481
482     JumpList failureCases;
483     failureCases.append(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
484     failureCases.append(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(oldStructure)));
485     testPrototype(oldStructure->storedPrototype(), failureCases);
486     
487     if (!direct) {
488         // Verify that nothing in the prototype chain has a setter for this property. 
489         for (WriteBarrier<Structure>* it = chain->head(); *it; ++it)
490             testPrototype((*it)->storedPrototype(), failureCases);
491     }
492
493     // If we succeed in all of our checks, and the code was optimizable, then make sure we
494     // decrement the rare case counter.
495 #if ENABLE(VALUE_PROFILER)
496     if (m_codeBlock->canCompileWithDFG() >= DFG::MayInline) {
497         sub32(
498             TrustedImm32(1),
499             AbsoluteAddress(&m_codeBlock->rareCaseProfileForBytecodeOffset(stubInfo->bytecodeIndex)->m_counter));
500     }
501 #endif
502     
503     // Reallocate property storage if needed.
504     Call callTarget;
505     bool willNeedStorageRealloc = oldStructure->outOfLineCapacity() != newStructure->outOfLineCapacity();
506     if (willNeedStorageRealloc) {
507         // This trampoline was called to like a JIT stub; before we can can call again we need to
508         // remove the return address from the stack, to prevent the stack from becoming misaligned.
509         preserveReturnAddressAfterCall(regT3);
510         
511         JITStubCall stubCall(this, cti_op_put_by_id_transition_realloc);
512         stubCall.skipArgument(); // base
513         stubCall.skipArgument(); // ident
514         stubCall.skipArgument(); // value
515         stubCall.addArgument(TrustedImm32(oldStructure->outOfLineCapacity()));
516         stubCall.addArgument(TrustedImmPtr(newStructure));
517         stubCall.call(regT0);
518
519         restoreReturnAddressBeforeReturn(regT3);
520
521 #if CPU(MIPS) || CPU(SH4) || CPU(ARM)
522         // For MIPS, we don't add sizeof(void*) to the stack offset.
523         load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
524         // For MIPS, we don't add sizeof(void*) to the stack offset.
525         load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
526 #else
527         load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
528         load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
529 #endif
530     }
531
532     emitWriteBarrier(regT0, regT1, regT1, regT3, UnconditionalWriteBarrier, WriteBarrierForPropertyAccess);
533
534     storePtr(TrustedImmPtr(newStructure), Address(regT0, JSCell::structureOffset()));
535 #if CPU(MIPS) || CPU(SH4) || CPU(ARM)
536     // For MIPS, we don't add sizeof(void*) to the stack offset.
537     load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[2]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT3);
538     load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[2]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT2);
539 #else
540     load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[2]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT3);
541     load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[2]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT2);
542 #endif
543     compilePutDirectOffset(regT0, regT2, regT3, cachedOffset);
544     
545     ret();
546     
547     ASSERT(!failureCases.empty());
548     failureCases.link(this);
549     restoreArgumentReferenceForTrampoline();
550     Call failureCall = tailRecursiveCall();
551     
552     LinkBuffer patchBuffer(*m_globalData, this, m_codeBlock);
553     
554     patchBuffer.link(failureCall, FunctionPtr(direct ? cti_op_put_by_id_direct_fail : cti_op_put_by_id_fail));
555     
556     if (willNeedStorageRealloc) {
557         ASSERT(m_calls.size() == 1);
558         patchBuffer.link(m_calls[0].from, FunctionPtr(cti_op_put_by_id_transition_realloc));
559     }
560     
561     stubInfo->stubRoutine = createJITStubRoutine(
562         FINALIZE_CODE(
563             patchBuffer,
564             ("Baseline put_by_id transition stub for CodeBlock %p, return point %p",
565              m_codeBlock, returnAddress.value())),
566         *m_globalData,
567         m_codeBlock->ownerExecutable(),
568         willNeedStorageRealloc,
569         newStructure);
570     RepatchBuffer repatchBuffer(m_codeBlock);
571     repatchBuffer.relinkCallerToTrampoline(returnAddress, CodeLocationLabel(stubInfo->stubRoutine->code().code()));
572 }
573
574 void JIT::patchGetByIdSelf(CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, PropertyOffset cachedOffset, ReturnAddressPtr returnAddress)
575 {
576     RepatchBuffer repatchBuffer(codeBlock);
577     
578     // We don't want to patch more than once - in future go to cti_op_get_by_id_generic.
579     // Should probably go to JITStubs::cti_op_get_by_id_fail, but that doesn't do anything interesting right now.
580     repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_self_fail));
581     
582     // Patch the offset into the propoerty map to load from, then patch the Structure to look for.
583     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(stubInfo->patch.baseline.u.get.structureToCompare), structure);
584     repatchBuffer.setLoadInstructionIsActive(stubInfo->hotPathBegin.convertibleLoadAtOffset(stubInfo->patch.baseline.u.get.propertyStorageLoad), isOutOfLineOffset(cachedOffset));
585     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(stubInfo->patch.baseline.u.get.displacementLabel1), offsetRelativeToPatchedStorage(cachedOffset) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)); // payload
586     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(stubInfo->patch.baseline.u.get.displacementLabel2), offsetRelativeToPatchedStorage(cachedOffset) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)); // tag
587 }
588
589 void JIT::patchPutByIdReplace(CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, PropertyOffset cachedOffset, ReturnAddressPtr returnAddress, bool direct)
590 {
591     RepatchBuffer repatchBuffer(codeBlock);
592     
593     // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
594     // Should probably go to cti_op_put_by_id_fail, but that doesn't do anything interesting right now.
595     repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
596     
597     // Patch the offset into the propoerty map to load from, then patch the Structure to look for.
598     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(stubInfo->patch.baseline.u.put.structureToCompare), structure);
599     repatchBuffer.setLoadInstructionIsActive(stubInfo->hotPathBegin.convertibleLoadAtOffset(stubInfo->patch.baseline.u.put.propertyStorageLoad), isOutOfLineOffset(cachedOffset));
600     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(stubInfo->patch.baseline.u.put.displacementLabel1), offsetRelativeToPatchedStorage(cachedOffset) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)); // payload
601     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(stubInfo->patch.baseline.u.put.displacementLabel2), offsetRelativeToPatchedStorage(cachedOffset) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)); // tag
602 }
603
604 void JIT::privateCompilePatchGetArrayLength(ReturnAddressPtr returnAddress)
605 {
606     StructureStubInfo* stubInfo = &m_codeBlock->getStubInfo(returnAddress);
607     
608     // regT0 holds a JSCell*
609     
610     // Check for array
611     Jump failureCases1 = branchPtr(NotEqual, Address(regT0, JSCell::classInfoOffset()), TrustedImmPtr(&JSArray::s_info));
612     
613     // Checks out okay! - get the length from the storage
614     loadPtr(Address(regT0, JSArray::storageOffset()), regT2);
615     load32(Address(regT2, OBJECT_OFFSETOF(ArrayStorage, m_length)), regT2);
616     
617     Jump failureCases2 = branch32(Above, regT2, TrustedImm32(INT_MAX));
618     move(regT2, regT0);
619     move(TrustedImm32(JSValue::Int32Tag), regT1);
620     Jump success = jump();
621     
622     LinkBuffer patchBuffer(*m_globalData, this, m_codeBlock);
623     
624     // Use the patch information to link the failure cases back to the original slow case routine.
625     CodeLocationLabel slowCaseBegin = stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin);
626     patchBuffer.link(failureCases1, slowCaseBegin);
627     patchBuffer.link(failureCases2, slowCaseBegin);
628     
629     // On success return back to the hot patch code, at a point it will perform the store to dest for us.
630     patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
631     
632     // Track the stub we have created so that it will be deleted later.
633     stubInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
634         patchBuffer,
635         ("Baseline get_by_id array length stub for CodeBlock %p, return point %p",
636          m_codeBlock, stubInfo->hotPathBegin.labelAtOffset(
637              stubInfo->patch.baseline.u.get.putResult).executableAddress()));
638     
639     // Finally patch the jump to slow case back in the hot path to jump here instead.
640     CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
641     RepatchBuffer repatchBuffer(m_codeBlock);
642     repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubInfo->stubRoutine->code().code()));
643     
644     // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
645     repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_array_fail));
646 }
647
648 void JIT::privateCompileGetByIdProto(StructureStubInfo* stubInfo, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, PropertyOffset cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame)
649 {
650     // regT0 holds a JSCell*
651     
652     // The prototype object definitely exists (if this stub exists the CodeBlock is referencing a Structure that is
653     // referencing the prototype object - let's speculatively load it's table nice and early!)
654     JSObject* protoObject = asObject(structure->prototypeForLookup(callFrame));
655     
656     Jump failureCases1 = checkStructure(regT0, structure);
657     
658     // Check the prototype object's Structure had not changed.
659     move(TrustedImmPtr(protoObject), regT3);
660     Jump failureCases2 = branchPtr(NotEqual, Address(regT3, JSCell::structureOffset()), TrustedImmPtr(prototypeStructure));
661
662     bool needsStubLink = false;
663     // Checks out okay!
664     if (slot.cachedPropertyType() == PropertySlot::Getter) {
665         needsStubLink = true;
666         compileGetDirectOffset(protoObject, regT2, regT1, cachedOffset);
667         JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
668         stubCall.addArgument(regT1);
669         stubCall.addArgument(regT0);
670         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
671         stubCall.call();
672     } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
673         needsStubLink = true;
674         JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
675         stubCall.addArgument(TrustedImmPtr(protoObject));
676         stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
677         stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
678         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
679         stubCall.call();
680     } else
681         compileGetDirectOffset(protoObject, regT1, regT0, cachedOffset);
682     
683     Jump success = jump();
684     
685     LinkBuffer patchBuffer(*m_globalData, this, m_codeBlock);
686     
687     // Use the patch information to link the failure cases back to the original slow case routine.
688     CodeLocationLabel slowCaseBegin = stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin);
689     patchBuffer.link(failureCases1, slowCaseBegin);
690     patchBuffer.link(failureCases2, slowCaseBegin);
691     
692     // On success return back to the hot patch code, at a point it will perform the store to dest for us.
693     patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
694
695     if (needsStubLink) {
696         for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
697             if (iter->to)
698                 patchBuffer.link(iter->from, FunctionPtr(iter->to));
699         }
700     }
701
702     // Track the stub we have created so that it will be deleted later.
703     stubInfo->stubRoutine = createJITStubRoutine(
704         FINALIZE_CODE(
705             patchBuffer,
706             ("Baseline get_by_id proto stub for CodeBlock %p, return point %p",
707              m_codeBlock, stubInfo->hotPathBegin.labelAtOffset(
708                  stubInfo->patch.baseline.u.get.putResult).executableAddress())),
709         *m_globalData,
710         m_codeBlock->ownerExecutable(),
711         needsStubLink);
712     
713     // Finally patch the jump to slow case back in the hot path to jump here instead.
714     CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
715     RepatchBuffer repatchBuffer(m_codeBlock);
716     repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubInfo->stubRoutine->code().code()));
717     
718     // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
719     repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_proto_list));
720 }
721
722
723 void JIT::privateCompileGetByIdSelfList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* polymorphicStructures, int currentIndex, Structure* structure, const Identifier& ident, const PropertySlot& slot, PropertyOffset cachedOffset)
724 {
725     // regT0 holds a JSCell*
726     Jump failureCase = checkStructure(regT0, structure);
727     bool needsStubLink = false;
728     bool isDirect = false;
729     if (slot.cachedPropertyType() == PropertySlot::Getter) {
730         needsStubLink = true;
731         compileGetDirectOffset(regT0, regT2, regT1, cachedOffset);
732         JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
733         stubCall.addArgument(regT1);
734         stubCall.addArgument(regT0);
735         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
736         stubCall.call();
737     } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
738         needsStubLink = true;
739         JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
740         stubCall.addArgument(regT0);
741         stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
742         stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
743         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
744         stubCall.call();
745     } else {
746         isDirect = true;
747         compileGetDirectOffset(regT0, regT1, regT0, cachedOffset);
748     }
749
750     Jump success = jump();
751     
752     LinkBuffer patchBuffer(*m_globalData, this, m_codeBlock);
753     if (needsStubLink) {
754         for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
755             if (iter->to)
756                 patchBuffer.link(iter->from, FunctionPtr(iter->to));
757         }
758     }    
759     // Use the patch information to link the failure cases back to the original slow case routine.
760     CodeLocationLabel lastProtoBegin = CodeLocationLabel(JITStubRoutine::asCodePtr(polymorphicStructures->list[currentIndex - 1].stubRoutine));
761     if (!lastProtoBegin)
762         lastProtoBegin = stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin);
763     
764     patchBuffer.link(failureCase, lastProtoBegin);
765     
766     // On success return back to the hot patch code, at a point it will perform the store to dest for us.
767     patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
768
769     RefPtr<JITStubRoutine> stubRoutine = createJITStubRoutine(
770         FINALIZE_CODE(
771             patchBuffer,
772             ("Baseline get_by_id self list stub for CodeBlock %p, return point %p",
773              m_codeBlock, stubInfo->hotPathBegin.labelAtOffset(
774                  stubInfo->patch.baseline.u.get.putResult).executableAddress())),
775         *m_globalData,
776         m_codeBlock->ownerExecutable(),
777         needsStubLink);
778
779     polymorphicStructures->list[currentIndex].set(*m_globalData, m_codeBlock->ownerExecutable(), stubRoutine, structure, isDirect);
780     
781     // Finally patch the jump to slow case back in the hot path to jump here instead.
782     CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
783     RepatchBuffer repatchBuffer(m_codeBlock);
784     repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine->code().code()));
785 }
786
787 void JIT::privateCompileGetByIdProtoList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructures, int currentIndex, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, PropertyOffset cachedOffset, CallFrame* callFrame)
788 {
789     // regT0 holds a JSCell*
790     
791     // The prototype object definitely exists (if this stub exists the CodeBlock is referencing a Structure that is
792     // referencing the prototype object - let's speculatively load it's table nice and early!)
793     JSObject* protoObject = asObject(structure->prototypeForLookup(callFrame));
794     
795     // Check eax is an object of the right Structure.
796     Jump failureCases1 = checkStructure(regT0, structure);
797     
798     // Check the prototype object's Structure had not changed.
799     move(TrustedImmPtr(protoObject), regT3);
800     Jump failureCases2 = branchPtr(NotEqual, Address(regT3, JSCell::structureOffset()), TrustedImmPtr(prototypeStructure));
801     
802     bool needsStubLink = false;
803     bool isDirect = false;
804     if (slot.cachedPropertyType() == PropertySlot::Getter) {
805         needsStubLink = true;
806         compileGetDirectOffset(protoObject, regT2, regT1, cachedOffset);
807         JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
808         stubCall.addArgument(regT1);
809         stubCall.addArgument(regT0);
810         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
811         stubCall.call();
812     } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
813         needsStubLink = true;
814         JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
815         stubCall.addArgument(TrustedImmPtr(protoObject));
816         stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
817         stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
818         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
819         stubCall.call();
820     } else {
821         isDirect = true;
822         compileGetDirectOffset(protoObject, regT1, regT0, cachedOffset);
823     }
824     
825     Jump success = jump();
826     
827     LinkBuffer patchBuffer(*m_globalData, this, m_codeBlock);
828     if (needsStubLink) {
829         for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
830             if (iter->to)
831                 patchBuffer.link(iter->from, FunctionPtr(iter->to));
832         }
833     }
834     // Use the patch information to link the failure cases back to the original slow case routine.
835     CodeLocationLabel lastProtoBegin = CodeLocationLabel(JITStubRoutine::asCodePtr(prototypeStructures->list[currentIndex - 1].stubRoutine));
836     patchBuffer.link(failureCases1, lastProtoBegin);
837     patchBuffer.link(failureCases2, lastProtoBegin);
838     
839     // On success return back to the hot patch code, at a point it will perform the store to dest for us.
840     patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
841     
842     RefPtr<JITStubRoutine> stubRoutine = createJITStubRoutine(
843         FINALIZE_CODE(
844             patchBuffer,
845             ("Baseline get_by_id proto list stub for CodeBlock %p, return point %p",
846              m_codeBlock, stubInfo->hotPathBegin.labelAtOffset(
847                  stubInfo->patch.baseline.u.get.putResult).executableAddress())),
848         *m_globalData,
849         m_codeBlock->ownerExecutable(),
850         needsStubLink);
851
852     prototypeStructures->list[currentIndex].set(callFrame->globalData(), m_codeBlock->ownerExecutable(), stubRoutine, structure, prototypeStructure, isDirect);
853     
854     // Finally patch the jump to slow case back in the hot path to jump here instead.
855     CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
856     RepatchBuffer repatchBuffer(m_codeBlock);
857     repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine->code().code()));
858 }
859
860 void JIT::privateCompileGetByIdChainList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructures, int currentIndex, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, PropertyOffset cachedOffset, CallFrame* callFrame)
861 {
862     // regT0 holds a JSCell*
863     ASSERT(count);
864     
865     JumpList bucketsOfFail;
866     
867     // Check eax is an object of the right Structure.
868     bucketsOfFail.append(checkStructure(regT0, structure));
869     
870     Structure* currStructure = structure;
871     WriteBarrier<Structure>* it = chain->head();
872     JSObject* protoObject = 0;
873     for (unsigned i = 0; i < count; ++i, ++it) {
874         protoObject = asObject(currStructure->prototypeForLookup(callFrame));
875         currStructure = it->get();
876         testPrototype(protoObject, bucketsOfFail);
877     }
878     ASSERT(protoObject);
879     
880     bool needsStubLink = false;
881     bool isDirect = false;
882     if (slot.cachedPropertyType() == PropertySlot::Getter) {
883         needsStubLink = true;
884         compileGetDirectOffset(protoObject, regT2, regT1, cachedOffset);
885         JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
886         stubCall.addArgument(regT1);
887         stubCall.addArgument(regT0);
888         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
889         stubCall.call();
890     } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
891         needsStubLink = true;
892         JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
893         stubCall.addArgument(TrustedImmPtr(protoObject));
894         stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
895         stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
896         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
897         stubCall.call();
898     } else {
899         isDirect = true;
900         compileGetDirectOffset(protoObject, regT1, regT0, cachedOffset);
901     }
902
903     Jump success = jump();
904     
905     LinkBuffer patchBuffer(*m_globalData, this, m_codeBlock);
906     if (needsStubLink) {
907         for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
908             if (iter->to)
909                 patchBuffer.link(iter->from, FunctionPtr(iter->to));
910         }
911     }
912     // Use the patch information to link the failure cases back to the original slow case routine.
913     CodeLocationLabel lastProtoBegin = CodeLocationLabel(JITStubRoutine::asCodePtr(prototypeStructures->list[currentIndex - 1].stubRoutine));
914     
915     patchBuffer.link(bucketsOfFail, lastProtoBegin);
916     
917     // On success return back to the hot patch code, at a point it will perform the store to dest for us.
918     patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
919     
920     RefPtr<JITStubRoutine> stubRoutine = createJITStubRoutine(
921         FINALIZE_CODE(
922             patchBuffer,
923             ("Baseline get_by_id chain list stub for CodeBlock %p, return point %p",
924              m_codeBlock, stubInfo->hotPathBegin.labelAtOffset(
925                  stubInfo->patch.baseline.u.get.putResult).executableAddress())),
926         *m_globalData,
927         m_codeBlock->ownerExecutable(),
928         needsStubLink);
929     
930     // Track the stub we have created so that it will be deleted later.
931     prototypeStructures->list[currentIndex].set(callFrame->globalData(), m_codeBlock->ownerExecutable(), stubRoutine, structure, chain, isDirect);
932     
933     // Finally patch the jump to slow case back in the hot path to jump here instead.
934     CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
935     RepatchBuffer repatchBuffer(m_codeBlock);
936     repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine->code().code()));
937 }
938
939 void JIT::privateCompileGetByIdChain(StructureStubInfo* stubInfo, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, PropertyOffset cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame)
940 {
941     // regT0 holds a JSCell*
942     ASSERT(count);
943     
944     JumpList bucketsOfFail;
945     
946     // Check eax is an object of the right Structure.
947     bucketsOfFail.append(checkStructure(regT0, structure));
948     
949     Structure* currStructure = structure;
950     WriteBarrier<Structure>* it = chain->head();
951     JSObject* protoObject = 0;
952     for (unsigned i = 0; i < count; ++i, ++it) {
953         protoObject = asObject(currStructure->prototypeForLookup(callFrame));
954         currStructure = it->get();
955         testPrototype(protoObject, bucketsOfFail);
956     }
957     ASSERT(protoObject);
958     
959     bool needsStubLink = false;
960     if (slot.cachedPropertyType() == PropertySlot::Getter) {
961         needsStubLink = true;
962         compileGetDirectOffset(protoObject, regT2, regT1, cachedOffset);
963         JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
964         stubCall.addArgument(regT1);
965         stubCall.addArgument(regT0);
966         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
967         stubCall.call();
968     } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
969         needsStubLink = true;
970         JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
971         stubCall.addArgument(TrustedImmPtr(protoObject));
972         stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
973         stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
974         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
975         stubCall.call();
976     } else
977         compileGetDirectOffset(protoObject, regT1, regT0, cachedOffset);
978     Jump success = jump();
979     
980     LinkBuffer patchBuffer(*m_globalData, this, m_codeBlock);
981     if (needsStubLink) {
982         for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
983             if (iter->to)
984                 patchBuffer.link(iter->from, FunctionPtr(iter->to));
985         }
986     }
987     // Use the patch information to link the failure cases back to the original slow case routine.
988     patchBuffer.link(bucketsOfFail, stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin));
989     
990     // On success return back to the hot patch code, at a point it will perform the store to dest for us.
991     patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
992     
993     // Track the stub we have created so that it will be deleted later.
994     RefPtr<JITStubRoutine> stubRoutine = createJITStubRoutine(
995         FINALIZE_CODE(
996             patchBuffer,
997             ("Baseline get_by_id chain stub for CodeBlock %p, return point %p",
998              m_codeBlock, stubInfo->hotPathBegin.labelAtOffset(
999                  stubInfo->patch.baseline.u.get.putResult).executableAddress())),
1000         *m_globalData,
1001         m_codeBlock->ownerExecutable(),
1002         needsStubLink);
1003     stubInfo->stubRoutine = stubRoutine;
1004     
1005     // Finally patch the jump to slow case back in the hot path to jump here instead.
1006     CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
1007     RepatchBuffer repatchBuffer(m_codeBlock);
1008     repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine->code().code()));
1009     
1010     // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
1011     repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_proto_list));
1012 }
1013
1014 void JIT::compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, RegisterID offset, FinalObjectMode finalObjectMode)
1015 {
1016     ASSERT(sizeof(JSValue) == 8);
1017     
1018     if (finalObjectMode == MayBeFinal) {
1019         Jump isInline = branch32(LessThan, offset, TrustedImm32(inlineStorageCapacity));
1020         loadPtr(Address(base, JSObject::offsetOfOutOfLineStorage()), base);
1021         neg32(offset);
1022         Jump done = jump();
1023         isInline.link(this);
1024         addPtr(TrustedImmPtr(JSObject::offsetOfInlineStorage() - (inlineStorageCapacity - 2) * sizeof(EncodedJSValue)), base);
1025         done.link(this);
1026     } else {
1027 #if !ASSERT_DISABLED
1028         Jump isOutOfLine = branch32(GreaterThanOrEqual, offset, TrustedImm32(inlineStorageCapacity));
1029         breakpoint();
1030         isOutOfLine.link(this);
1031 #endif
1032         loadPtr(Address(base, JSObject::offsetOfOutOfLineStorage()), base);
1033         neg32(offset);
1034     }
1035     load32(BaseIndex(base, offset, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload) + (inlineStorageCapacity - 2) * sizeof(EncodedJSValue)), resultPayload);
1036     load32(BaseIndex(base, offset, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag) + (inlineStorageCapacity - 2) * sizeof(EncodedJSValue)), resultTag);
1037 }
1038
1039 void JIT::emit_op_get_by_pname(Instruction* currentInstruction)
1040 {
1041     unsigned dst = currentInstruction[1].u.operand;
1042     unsigned base = currentInstruction[2].u.operand;
1043     unsigned property = currentInstruction[3].u.operand;
1044     unsigned expected = currentInstruction[4].u.operand;
1045     unsigned iter = currentInstruction[5].u.operand;
1046     unsigned i = currentInstruction[6].u.operand;
1047     
1048     emitLoad2(property, regT1, regT0, base, regT3, regT2);
1049     emitJumpSlowCaseIfNotJSCell(property, regT1);
1050     addSlowCase(branchPtr(NotEqual, regT0, payloadFor(expected)));
1051     // Property registers are now available as the property is known
1052     emitJumpSlowCaseIfNotJSCell(base, regT3);
1053     emitLoadPayload(iter, regT1);
1054     
1055     // Test base's structure
1056     loadPtr(Address(regT2, JSCell::structureOffset()), regT0);
1057     addSlowCase(branchPtr(NotEqual, regT0, Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure))));
1058     load32(addressFor(i), regT3);
1059     sub32(TrustedImm32(1), regT3);
1060     addSlowCase(branch32(AboveOrEqual, regT3, Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_numCacheableSlots))));
1061     add32(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_offsetBase)), regT3);
1062     compileGetDirectOffset(regT2, regT1, regT0, regT3);    
1063     
1064     emitStore(dst, regT1, regT0);
1065     map(m_bytecodeOffset + OPCODE_LENGTH(op_get_by_pname), dst, regT1, regT0);
1066 }
1067
1068 void JIT::emitSlow_op_get_by_pname(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1069 {
1070     unsigned dst = currentInstruction[1].u.operand;
1071     unsigned base = currentInstruction[2].u.operand;
1072     unsigned property = currentInstruction[3].u.operand;
1073     
1074     linkSlowCaseIfNotJSCell(iter, property);
1075     linkSlowCase(iter);
1076     linkSlowCaseIfNotJSCell(iter, base);
1077     linkSlowCase(iter);
1078     linkSlowCase(iter);
1079     
1080     JITStubCall stubCall(this, cti_op_get_by_val);
1081     stubCall.addArgument(base);
1082     stubCall.addArgument(property);
1083     stubCall.call(dst);
1084 }
1085
1086 void JIT::emit_op_get_scoped_var(Instruction* currentInstruction)
1087 {
1088     int dst = currentInstruction[1].u.operand;
1089     int index = currentInstruction[2].u.operand;
1090     int skip = currentInstruction[3].u.operand;
1091
1092     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2);
1093     bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
1094     ASSERT(skip || !checkTopLevel);
1095     if (checkTopLevel && skip--) {
1096         Jump activationNotCreated;
1097         if (checkTopLevel)
1098             activationNotCreated = branch32(Equal, tagFor(m_codeBlock->activationRegister()), TrustedImm32(JSValue::EmptyValueTag));
1099         loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
1100         activationNotCreated.link(this);
1101     }
1102     while (skip--)
1103         loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
1104
1105     loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, object)), regT2);
1106     loadPtr(Address(regT2, JSVariableObject::offsetOfRegisters()), regT2);
1107
1108     emitLoad(index, regT1, regT0, regT2);
1109     emitValueProfilingSite();
1110     emitStore(dst, regT1, regT0);
1111     map(m_bytecodeOffset + OPCODE_LENGTH(op_get_scoped_var), dst, regT1, regT0);
1112 }
1113
1114 void JIT::emit_op_put_scoped_var(Instruction* currentInstruction)
1115 {
1116     int index = currentInstruction[1].u.operand;
1117     int skip = currentInstruction[2].u.operand;
1118     int value = currentInstruction[3].u.operand;
1119
1120     emitLoad(value, regT1, regT0);
1121
1122     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2);
1123     bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
1124     ASSERT(skip || !checkTopLevel);
1125     if (checkTopLevel && skip--) {
1126         Jump activationNotCreated;
1127         if (checkTopLevel)
1128             activationNotCreated = branch32(Equal, tagFor(m_codeBlock->activationRegister()), TrustedImm32(JSValue::EmptyValueTag));
1129         loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
1130         activationNotCreated.link(this);
1131     }
1132     while (skip--)
1133         loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
1134     loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, object)), regT2);
1135
1136     loadPtr(Address(regT2, JSVariableObject::offsetOfRegisters()), regT3);
1137     emitStore(index, regT1, regT0, regT3);
1138     emitWriteBarrier(regT2, regT1, regT0, regT1, ShouldFilterImmediates, WriteBarrierForVariableAccess);
1139 }
1140
1141 void JIT::emit_op_get_global_var(Instruction* currentInstruction)
1142 {
1143     int dst = currentInstruction[1].u.operand;
1144     WriteBarrier<Unknown>* registerPointer = currentInstruction[2].u.registerPointer;
1145
1146     load32(registerPointer->tagPointer(), regT1);
1147     load32(registerPointer->payloadPointer(), regT0);
1148     emitValueProfilingSite();
1149     emitStore(dst, regT1, regT0);
1150     map(m_bytecodeOffset + OPCODE_LENGTH(op_get_global_var), dst, regT1, regT0);
1151 }
1152
1153 void JIT::emit_op_put_global_var(Instruction* currentInstruction)
1154 {
1155     WriteBarrier<Unknown>* registerPointer = currentInstruction[1].u.registerPointer;
1156     int value = currentInstruction[2].u.operand;
1157
1158     JSGlobalObject* globalObject = m_codeBlock->globalObject();
1159
1160     emitLoad(value, regT1, regT0);
1161     
1162     if (Heap::isWriteBarrierEnabled()) {
1163         move(TrustedImmPtr(globalObject), regT2);
1164         
1165         emitWriteBarrier(globalObject, regT1, regT3, ShouldFilterImmediates, WriteBarrierForVariableAccess);
1166     }
1167
1168     store32(regT1, registerPointer->tagPointer());
1169     store32(regT0, registerPointer->payloadPointer());
1170     map(m_bytecodeOffset + OPCODE_LENGTH(op_put_global_var), value, regT1, regT0);
1171 }
1172
1173 void JIT::emit_op_put_global_var_check(Instruction* currentInstruction)
1174 {
1175     WriteBarrier<Unknown>* registerPointer = currentInstruction[1].u.registerPointer;
1176     int value = currentInstruction[2].u.operand;
1177     
1178     JSGlobalObject* globalObject = m_codeBlock->globalObject();
1179     
1180     emitLoad(value, regT1, regT0);
1181     
1182     addSlowCase(branchTest8(NonZero, AbsoluteAddress(currentInstruction[3].u.predicatePointer)));
1183     
1184     if (Heap::isWriteBarrierEnabled()) {
1185         move(TrustedImmPtr(globalObject), regT2);
1186         emitWriteBarrier(globalObject, regT1, regT3, ShouldFilterImmediates, WriteBarrierForVariableAccess);
1187     }
1188     
1189     store32(regT1, registerPointer->tagPointer());
1190     store32(regT0, registerPointer->payloadPointer());
1191     unmap();
1192 }
1193
1194 void JIT::emitSlow_op_put_global_var_check(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1195 {
1196     linkSlowCase(iter);
1197     
1198     JITStubCall stubCall(this, cti_op_put_global_var_check);
1199     stubCall.addArgument(regT1, regT0);
1200     stubCall.addArgument(TrustedImm32(currentInstruction[4].u.operand));
1201     stubCall.call();
1202 }
1203
1204 void JIT::resetPatchGetById(RepatchBuffer& repatchBuffer, StructureStubInfo* stubInfo)
1205 {
1206     repatchBuffer.relink(stubInfo->callReturnLocation, cti_op_get_by_id);
1207     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(stubInfo->patch.baseline.u.get.structureToCompare), reinterpret_cast<void*>(-1));
1208     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(stubInfo->patch.baseline.u.get.displacementLabel1), 0);
1209     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(stubInfo->patch.baseline.u.get.displacementLabel2), 0);
1210     repatchBuffer.relink(stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck), stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin));
1211 }
1212
1213 void JIT::resetPatchPutById(RepatchBuffer& repatchBuffer, StructureStubInfo* stubInfo)
1214 {
1215     if (isDirectPutById(stubInfo))
1216         repatchBuffer.relink(stubInfo->callReturnLocation, cti_op_put_by_id_direct);
1217     else
1218         repatchBuffer.relink(stubInfo->callReturnLocation, cti_op_put_by_id);
1219     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(stubInfo->patch.baseline.u.put.structureToCompare), reinterpret_cast<void*>(-1));
1220     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(stubInfo->patch.baseline.u.put.displacementLabel1), 0);
1221     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(stubInfo->patch.baseline.u.put.displacementLabel2), 0);
1222 }
1223
1224 } // namespace JSC
1225
1226 #endif // USE(JSVALUE32_64)
1227 #endif // ENABLE(JIT)