Fix TIVI-504 (backport of trac.webkit.org/changeset/144137)
[profile/ivi/webkit-efl.git] / Source / JavaScriptCore / jit / JITPropertyAccess32_64.cpp
1 /*
2  * Copyright (C) 2008, 2009 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27
28 #if ENABLE(JIT)
29 #if USE(JSVALUE32_64)
30 #include "JIT.h"
31
32 #include "CodeBlock.h"
33 #include "JITInlineMethods.h"
34 #include "JITStubCall.h"
35 #include "JSArray.h"
36 #include "JSFunction.h"
37 #include "JSPropertyNameIterator.h"
38 #include "Interpreter.h"
39 #include "LinkBuffer.h"
40 #include "RepatchBuffer.h"
41 #include "ResultType.h"
42 #include "SamplingTool.h"
43
44 #ifndef NDEBUG
45 #include <stdio.h>
46 #endif
47
48 using namespace std;
49
50 namespace JSC {
51     
52 void JIT::emit_op_put_by_index(Instruction* currentInstruction)
53 {
54     unsigned base = currentInstruction[1].u.operand;
55     unsigned property = currentInstruction[2].u.operand;
56     unsigned value = currentInstruction[3].u.operand;
57     
58     JITStubCall stubCall(this, cti_op_put_by_index);
59     stubCall.addArgument(base);
60     stubCall.addArgument(TrustedImm32(property));
61     stubCall.addArgument(value);
62     stubCall.call();
63 }
64
65 void JIT::emit_op_put_getter_setter(Instruction* currentInstruction)
66 {
67     unsigned base = currentInstruction[1].u.operand;
68     unsigned property = currentInstruction[2].u.operand;
69     unsigned getter = currentInstruction[3].u.operand;
70     unsigned setter = currentInstruction[4].u.operand;
71     
72     JITStubCall stubCall(this, cti_op_put_getter_setter);
73     stubCall.addArgument(base);
74     stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(property)));
75     stubCall.addArgument(getter);
76     stubCall.addArgument(setter);
77     stubCall.call();
78 }
79
80 void JIT::emit_op_del_by_id(Instruction* currentInstruction)
81 {
82     unsigned dst = currentInstruction[1].u.operand;
83     unsigned base = currentInstruction[2].u.operand;
84     unsigned property = currentInstruction[3].u.operand;
85     
86     JITStubCall stubCall(this, cti_op_del_by_id);
87     stubCall.addArgument(base);
88     stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(property)));
89     stubCall.call(dst);
90 }
91
92 void JIT::emit_op_method_check(Instruction* currentInstruction)
93 {
94     // Assert that the following instruction is a get_by_id.
95     ASSERT(m_interpreter->getOpcodeID((currentInstruction + OPCODE_LENGTH(op_method_check))->u.opcode) == op_get_by_id);
96     
97     currentInstruction += OPCODE_LENGTH(op_method_check);
98     
99     // Do the method check - check the object & its prototype's structure inline (this is the common case).
100     m_methodCallCompilationInfo.append(MethodCallCompilationInfo(m_bytecodeOffset, m_propertyAccessCompilationInfo.size()));
101     MethodCallCompilationInfo& info = m_methodCallCompilationInfo.last();
102     
103     int dst = currentInstruction[1].u.operand;
104     int base = currentInstruction[2].u.operand;
105     
106     emitLoad(base, regT1, regT0);
107     emitJumpSlowCaseIfNotJSCell(base, regT1);
108     
109     BEGIN_UNINTERRUPTED_SEQUENCE(sequenceMethodCheck);
110     
111     Jump structureCheck = branchPtrWithPatch(NotEqual, Address(regT0, JSCell::structureOffset()), info.structureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure)));
112     DataLabelPtr protoStructureToCompare, protoObj = moveWithPatch(TrustedImmPtr(0), regT2);
113     Jump protoStructureCheck = branchPtrWithPatch(NotEqual, Address(regT2, JSCell::structureOffset()), protoStructureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure)));
114     
115     // This will be relinked to load the function without doing a load.
116     DataLabelPtr putFunction = moveWithPatch(TrustedImmPtr(0), regT0);
117     
118     END_UNINTERRUPTED_SEQUENCE(sequenceMethodCheck);
119     
120     move(TrustedImm32(JSValue::CellTag), regT1);
121     Jump match = jump();
122     
123     // Link the failure cases here.
124     structureCheck.link(this);
125     protoStructureCheck.link(this);
126     
127     // Do a regular(ish) get_by_id (the slow case will be link to
128     // cti_op_get_by_id_method_check instead of cti_op_get_by_id.
129     compileGetByIdHotPath();
130     
131     match.link(this);
132     emitValueProfilingSite(m_bytecodeOffset + OPCODE_LENGTH(op_method_check));
133     emitStore(dst, regT1, regT0);
134     map(m_bytecodeOffset + OPCODE_LENGTH(op_method_check) + OPCODE_LENGTH(op_get_by_id), dst, regT1, regT0);
135     
136     // We've already generated the following get_by_id, so make sure it's skipped over.
137     m_bytecodeOffset += OPCODE_LENGTH(op_get_by_id);
138
139     m_propertyAccessCompilationInfo.last().addMethodCheckInfo(info.structureToCompare, protoObj, protoStructureToCompare, putFunction);
140 }
141
142 void JIT::emitSlow_op_method_check(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
143 {
144     currentInstruction += OPCODE_LENGTH(op_method_check);
145     
146     int dst = currentInstruction[1].u.operand;
147     int base = currentInstruction[2].u.operand;
148     int ident = currentInstruction[3].u.operand;
149     
150     compileGetByIdSlowCase(dst, base, &(m_codeBlock->identifier(ident)), iter, true);
151     emitValueProfilingSite(m_bytecodeOffset + OPCODE_LENGTH(op_method_check));
152     
153     // We've already generated the following get_by_id, so make sure it's skipped over.
154     m_bytecodeOffset += OPCODE_LENGTH(op_get_by_id);
155 }
156
157 JIT::CodeRef JIT::stringGetByValStubGenerator(JSGlobalData* globalData)
158 {
159     JSInterfaceJIT jit;
160     JumpList failures;
161     failures.append(jit.branchPtr(NotEqual, Address(regT0, JSCell::classInfoOffset()), TrustedImmPtr(&JSString::s_info)));
162     
163     // Load string length to regT1, and start the process of loading the data pointer into regT0
164     jit.load32(Address(regT0, ThunkHelpers::jsStringLengthOffset()), regT1);
165     jit.loadPtr(Address(regT0, ThunkHelpers::jsStringValueOffset()), regT0);
166     failures.append(jit.branchTest32(Zero, regT0));
167     
168     // Do an unsigned compare to simultaneously filter negative indices as well as indices that are too large
169     failures.append(jit.branch32(AboveOrEqual, regT2, regT1));
170     
171     // Load the character
172     JumpList is16Bit;
173     JumpList cont8Bit;
174     // Load the string flags
175     jit.loadPtr(Address(regT0, ThunkHelpers::stringImplFlagsOffset()), regT1);
176     jit.loadPtr(Address(regT0, ThunkHelpers::stringImplDataOffset()), regT0);
177     is16Bit.append(jit.branchTest32(Zero, regT1, TrustedImm32(ThunkHelpers::stringImpl8BitFlag())));
178     jit.load8(BaseIndex(regT0, regT2, TimesOne, 0), regT0);
179     cont8Bit.append(jit.jump());
180     is16Bit.link(&jit);
181     jit.load16(BaseIndex(regT0, regT2, TimesTwo, 0), regT0);
182
183     cont8Bit.link(&jit);
184     
185     failures.append(jit.branch32(AboveOrEqual, regT0, TrustedImm32(0x100)));
186     jit.move(TrustedImmPtr(globalData->smallStrings.singleCharacterStrings()), regT1);
187     jit.loadPtr(BaseIndex(regT1, regT0, ScalePtr, 0), regT0);
188     jit.move(TrustedImm32(JSValue::CellTag), regT1); // We null check regT0 on return so this is safe
189     jit.ret();
190
191     failures.link(&jit);
192     jit.move(TrustedImm32(0), regT0);
193     jit.ret();
194     
195     LinkBuffer patchBuffer(*globalData, &jit, GLOBAL_THUNK_ID);
196     return patchBuffer.finalizeCode();
197 }
198
199 void JIT::emit_op_get_by_val(Instruction* currentInstruction)
200 {
201     unsigned dst = currentInstruction[1].u.operand;
202     unsigned base = currentInstruction[2].u.operand;
203     unsigned property = currentInstruction[3].u.operand;
204     
205     emitLoad2(base, regT1, regT0, property, regT3, regT2);
206     
207     addSlowCase(branch32(NotEqual, regT3, TrustedImm32(JSValue::Int32Tag)));
208     emitJumpSlowCaseIfNotJSCell(base, regT1);
209     addSlowCase(branchPtr(NotEqual, Address(regT0, JSCell::classInfoOffset()), TrustedImmPtr(&JSArray::s_info)));
210     
211     loadPtr(Address(regT0, JSArray::storageOffset()), regT3);
212     addSlowCase(branch32(AboveOrEqual, regT2, Address(regT0, JSArray::vectorLengthOffset())));
213     
214     load32(BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1); // tag
215     load32(BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0); // payload
216     addSlowCase(branch32(Equal, regT1, TrustedImm32(JSValue::EmptyValueTag)));
217     
218     emitValueProfilingSite();
219     emitStore(dst, regT1, regT0);
220     map(m_bytecodeOffset + OPCODE_LENGTH(op_get_by_val), dst, regT1, regT0);
221 }
222
223 void JIT::emitSlow_op_get_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
224 {
225     unsigned dst = currentInstruction[1].u.operand;
226     unsigned base = currentInstruction[2].u.operand;
227     unsigned property = currentInstruction[3].u.operand;
228     
229     linkSlowCase(iter); // property int32 check
230     linkSlowCaseIfNotJSCell(iter, base); // base cell check
231
232     Jump nonCell = jump();
233     linkSlowCase(iter); // base array check
234     Jump notString = branchPtr(NotEqual, Address(regT0, JSCell::classInfoOffset()), TrustedImmPtr(&JSString::s_info));
235     emitNakedCall(m_globalData->getCTIStub(stringGetByValStubGenerator).code());
236     Jump failed = branchTestPtr(Zero, regT0);
237     emitStore(dst, regT1, regT0);
238     emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_get_by_val));
239     failed.link(this);
240     notString.link(this);
241     nonCell.link(this);
242
243     linkSlowCase(iter); // vector length check
244     linkSlowCase(iter); // empty value
245     
246     JITStubCall stubCall(this, cti_op_get_by_val);
247     stubCall.addArgument(base);
248     stubCall.addArgument(property);
249     stubCall.call(dst);
250
251     emitValueProfilingSite();
252 }
253
254 void JIT::emit_op_put_by_val(Instruction* currentInstruction)
255 {
256     unsigned base = currentInstruction[1].u.operand;
257     unsigned property = currentInstruction[2].u.operand;
258     unsigned value = currentInstruction[3].u.operand;
259     
260     emitLoad2(base, regT1, regT0, property, regT3, regT2);
261     
262     addSlowCase(branch32(NotEqual, regT3, TrustedImm32(JSValue::Int32Tag)));
263     emitJumpSlowCaseIfNotJSCell(base, regT1);
264     addSlowCase(branchPtr(NotEqual, Address(regT0, JSCell::classInfoOffset()), TrustedImmPtr(&JSArray::s_info)));
265     addSlowCase(branch32(AboveOrEqual, regT2, Address(regT0, JSArray::vectorLengthOffset())));
266
267     emitWriteBarrier(regT0, regT1, regT1, regT3, UnconditionalWriteBarrier, WriteBarrierForPropertyAccess);
268     loadPtr(Address(regT0, JSArray::storageOffset()), regT3);
269     
270     Jump empty = branch32(Equal, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::EmptyValueTag));
271     
272     Label storeResult(this);
273     emitLoad(value, regT1, regT0);
274     store32(regT0, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload))); // payload
275     store32(regT1, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag))); // tag
276     Jump end = jump();
277     
278     empty.link(this);
279     add32(TrustedImm32(1), Address(regT3, OBJECT_OFFSETOF(ArrayStorage, m_numValuesInVector)));
280     branch32(Below, regT2, Address(regT3, OBJECT_OFFSETOF(ArrayStorage, m_length))).linkTo(storeResult, this);
281     
282     add32(TrustedImm32(1), regT2, regT0);
283     store32(regT0, Address(regT3, OBJECT_OFFSETOF(ArrayStorage, m_length)));
284     jump().linkTo(storeResult, this);
285     
286     end.link(this);
287 }
288
289 void JIT::emitSlow_op_put_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
290 {
291     unsigned base = currentInstruction[1].u.operand;
292     unsigned property = currentInstruction[2].u.operand;
293     unsigned value = currentInstruction[3].u.operand;
294     
295     linkSlowCase(iter); // property int32 check
296     linkSlowCaseIfNotJSCell(iter, base); // base cell check
297     linkSlowCase(iter); // base not array check
298     linkSlowCase(iter); // in vector check
299     
300     JITStubCall stubPutByValCall(this, cti_op_put_by_val);
301     stubPutByValCall.addArgument(base);
302     stubPutByValCall.addArgument(property);
303     stubPutByValCall.addArgument(value);
304     stubPutByValCall.call();
305 }
306
307 void JIT::emit_op_get_by_id(Instruction* currentInstruction)
308 {
309     int dst = currentInstruction[1].u.operand;
310     int base = currentInstruction[2].u.operand;
311     
312     emitLoad(base, regT1, regT0);
313     emitJumpSlowCaseIfNotJSCell(base, regT1);
314     compileGetByIdHotPath();
315     emitValueProfilingSite();
316     emitStore(dst, regT1, regT0);
317     map(m_bytecodeOffset + OPCODE_LENGTH(op_get_by_id), dst, regT1, regT0);
318 }
319
320 void JIT::compileGetByIdHotPath()
321 {
322     // As for put_by_id, get_by_id requires the offset of the Structure and the offset of the access to be patched.
323     // Additionally, for get_by_id we need patch the offset of the branch to the slow case (we patch this to jump
324     // to array-length / prototype access tranpolines, and finally we also the the property-map access offset as a label
325     // to jump back to if one of these trampolies finds a match.
326     
327     BEGIN_UNINTERRUPTED_SEQUENCE(sequenceGetByIdHotPath);
328     
329     Label hotPathBegin(this);
330     
331     DataLabelPtr structureToCompare;
332     PatchableJump structureCheck = patchableBranchPtrWithPatch(NotEqual, Address(regT0, JSCell::structureOffset()), structureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure)));
333     addSlowCase(structureCheck);
334     
335     loadPtr(Address(regT0, JSObject::offsetOfPropertyStorage()), regT2);
336     DataLabelCompact displacementLabel1 = loadPtrWithCompactAddressOffsetPatch(Address(regT2, patchGetByIdDefaultOffset), regT0); // payload
337     DataLabelCompact displacementLabel2 = loadPtrWithCompactAddressOffsetPatch(Address(regT2, patchGetByIdDefaultOffset), regT1); // tag
338     
339     Label putResult(this);
340     
341     END_UNINTERRUPTED_SEQUENCE(sequenceGetByIdHotPath);
342
343     m_propertyAccessCompilationInfo.append(PropertyStubCompilationInfo(PropertyStubGetById, m_bytecodeOffset, hotPathBegin, structureToCompare, structureCheck, displacementLabel1, displacementLabel2, putResult));
344 }
345
346 void JIT::emitSlow_op_get_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
347 {
348     int dst = currentInstruction[1].u.operand;
349     int base = currentInstruction[2].u.operand;
350     int ident = currentInstruction[3].u.operand;
351     
352     compileGetByIdSlowCase(dst, base, &(m_codeBlock->identifier(ident)), iter);
353     emitValueProfilingSite();
354 }
355
356 void JIT::compileGetByIdSlowCase(int dst, int base, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter, bool isMethodCheck)
357 {
358     // As for the hot path of get_by_id, above, we ensure that we can use an architecture specific offset
359     // so that we only need track one pointer into the slow case code - we track a pointer to the location
360     // of the call (which we can use to look up the patch information), but should a array-length or
361     // prototype access trampoline fail we want to bail out back to here.  To do so we can subtract back
362     // the distance from the call to the head of the slow case.
363     linkSlowCaseIfNotJSCell(iter, base);
364     linkSlowCase(iter);
365     
366     BEGIN_UNINTERRUPTED_SEQUENCE(sequenceGetByIdSlowCase);
367     
368     Label coldPathBegin(this);
369     JITStubCall stubCall(this, isMethodCheck ? cti_op_get_by_id_method_check : cti_op_get_by_id);
370     stubCall.addArgument(regT1, regT0);
371     stubCall.addArgument(TrustedImmPtr(ident));
372     Call call = stubCall.call(dst);
373     
374     END_UNINTERRUPTED_SEQUENCE_FOR_PUT(sequenceGetByIdSlowCase, dst);
375     
376     // Track the location of the call; this will be used to recover patch information.
377     m_propertyAccessCompilationInfo[m_propertyAccessInstructionIndex++].slowCaseInfo(PropertyStubGetById, coldPathBegin, call);
378 }
379
380 void JIT::emit_op_put_by_id(Instruction* currentInstruction)
381 {
382     // In order to be able to patch both the Structure, and the object offset, we store one pointer,
383     // to just after the arguments have been loaded into registers 'hotPathBegin', and we generate code
384     // such that the Structure & offset are always at the same distance from this.
385     
386     int base = currentInstruction[1].u.operand;
387     int value = currentInstruction[3].u.operand;
388     
389     emitLoad2(base, regT1, regT0, value, regT3, regT2);
390     
391     emitJumpSlowCaseIfNotJSCell(base, regT1);
392     
393     BEGIN_UNINTERRUPTED_SEQUENCE(sequencePutById);
394     
395     Label hotPathBegin(this);
396     
397     // It is important that the following instruction plants a 32bit immediate, in order that it can be patched over.
398     DataLabelPtr structureToCompare;
399     addSlowCase(branchPtrWithPatch(NotEqual, Address(regT0, JSCell::structureOffset()), structureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure))));
400     
401     loadPtr(Address(regT0, JSObject::offsetOfPropertyStorage()), regT1);
402     DataLabel32 displacementLabel1 = storePtrWithAddressOffsetPatch(regT2, Address(regT1, patchPutByIdDefaultOffset)); // payload
403     DataLabel32 displacementLabel2 = storePtrWithAddressOffsetPatch(regT3, Address(regT1, patchPutByIdDefaultOffset)); // tag
404     
405     END_UNINTERRUPTED_SEQUENCE(sequencePutById);
406
407     emitWriteBarrier(regT0, regT2, regT1, regT2, ShouldFilterImmediates, WriteBarrierForPropertyAccess);
408
409     m_propertyAccessCompilationInfo.append(PropertyStubCompilationInfo(PropertyStubPutById, m_bytecodeOffset, hotPathBegin, structureToCompare, displacementLabel1, displacementLabel2));
410 }
411
412 void JIT::emitSlow_op_put_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
413 {
414     int base = currentInstruction[1].u.operand;
415     int ident = currentInstruction[2].u.operand;
416     int direct = currentInstruction[8].u.operand;
417
418     linkSlowCaseIfNotJSCell(iter, base);
419     linkSlowCase(iter);
420     
421     JITStubCall stubCall(this, direct ? cti_op_put_by_id_direct : cti_op_put_by_id);
422     stubCall.addArgument(base);
423     stubCall.addArgument(TrustedImmPtr(&(m_codeBlock->identifier(ident))));
424     stubCall.addArgument(regT3, regT2); 
425     Call call = stubCall.call();
426     
427     // Track the location of the call; this will be used to recover patch information.
428     m_propertyAccessCompilationInfo[m_propertyAccessInstructionIndex++].slowCaseInfo(PropertyStubPutById, call);
429 }
430
431 // Compile a store into an object's property storage.  May overwrite base.
432 void JIT::compilePutDirectOffset(RegisterID base, RegisterID valueTag, RegisterID valuePayload, size_t cachedOffset)
433 {
434     int offset = cachedOffset;
435     loadPtr(Address(base, JSObject::offsetOfPropertyStorage()), base);
436     emitStore(offset, valueTag, valuePayload, base);
437 }
438
439 // Compile a load from an object's property storage.  May overwrite base.
440 void JIT::compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, size_t cachedOffset)
441 {
442     int offset = cachedOffset;
443     RegisterID temp = resultPayload;
444     loadPtr(Address(base, JSObject::offsetOfPropertyStorage()), temp);
445     emitLoad(offset, resultTag, resultPayload, temp);
446 }
447
448 void JIT::compileGetDirectOffset(JSObject* base, RegisterID resultTag, RegisterID resultPayload, size_t cachedOffset)
449 {
450     loadPtr(base->addressOfPropertyStorage(), resultTag);
451     load32(Address(resultTag, cachedOffset * sizeof(WriteBarrier<Unknown>) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), resultPayload);
452     load32(Address(resultTag, cachedOffset * sizeof(WriteBarrier<Unknown>) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), resultTag);
453 }
454
455 void JIT::privateCompilePutByIdTransition(StructureStubInfo* stubInfo, Structure* oldStructure, Structure* newStructure, size_t cachedOffset, StructureChain* chain, ReturnAddressPtr returnAddress, bool direct)
456 {
457     // The code below assumes that regT0 contains the basePayload and regT1 contains the baseTag. Restore them from the stack.
458 #if CPU(MIPS) || CPU(SH4) || CPU(ARM)
459     // For MIPS, we don't add sizeof(void*) to the stack offset.
460     load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
461     // For MIPS, we don't add sizeof(void*) to the stack offset.
462     load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
463 #else
464     load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
465     load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
466 #endif
467
468     JumpList failureCases;
469     failureCases.append(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
470     failureCases.append(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(oldStructure)));
471     testPrototype(oldStructure->storedPrototype(), failureCases);
472     
473     if (!direct) {
474         // Verify that nothing in the prototype chain has a setter for this property. 
475         for (WriteBarrier<Structure>* it = chain->head(); *it; ++it)
476             testPrototype((*it)->storedPrototype(), failureCases);
477     }
478
479     // If we succeed in all of our checks, and the code was optimizable, then make sure we
480     // decrement the rare case counter.
481 #if ENABLE(VALUE_PROFILER)
482     if (m_codeBlock->canCompileWithDFG() >= DFG::MayInline) {
483         sub32(
484             TrustedImm32(1),
485             AbsoluteAddress(&m_codeBlock->rareCaseProfileForBytecodeOffset(stubInfo->bytecodeIndex)->m_counter));
486     }
487 #endif
488     
489     // Reallocate property storage if needed.
490     Call callTarget;
491     bool willNeedStorageRealloc = oldStructure->propertyStorageCapacity() != newStructure->propertyStorageCapacity();
492     if (willNeedStorageRealloc) {
493         // This trampoline was called to like a JIT stub; before we can can call again we need to
494         // remove the return address from the stack, to prevent the stack from becoming misaligned.
495         preserveReturnAddressAfterCall(regT3);
496         
497         JITStubCall stubCall(this, cti_op_put_by_id_transition_realloc);
498         stubCall.skipArgument(); // base
499         stubCall.skipArgument(); // ident
500         stubCall.skipArgument(); // value
501         stubCall.addArgument(TrustedImm32(oldStructure->propertyStorageCapacity()));
502         stubCall.addArgument(TrustedImmPtr(newStructure));
503         stubCall.call(regT0);
504
505         restoreReturnAddressBeforeReturn(regT3);
506
507 #if CPU(MIPS) || CPU(SH4) || CPU(ARM)
508         // For MIPS, we don't add sizeof(void*) to the stack offset.
509         load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
510         // For MIPS, we don't add sizeof(void*) to the stack offset.
511         load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
512 #else
513         load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
514         load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
515 #endif
516     }
517
518     emitWriteBarrier(regT0, regT1, regT1, regT3, UnconditionalWriteBarrier, WriteBarrierForPropertyAccess);
519
520     storePtr(TrustedImmPtr(newStructure), Address(regT0, JSCell::structureOffset()));
521 #if CPU(MIPS) || CPU(SH4) || CPU(ARM)
522     // For MIPS, we don't add sizeof(void*) to the stack offset.
523     load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[2]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT3);
524     load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[2]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT2);
525 #else
526     load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[2]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT3);
527     load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[2]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT2);
528 #endif
529     compilePutDirectOffset(regT0, regT2, regT3, cachedOffset);
530     
531     ret();
532     
533     ASSERT(!failureCases.empty());
534     failureCases.link(this);
535     restoreArgumentReferenceForTrampoline();
536     Call failureCall = tailRecursiveCall();
537     
538     LinkBuffer patchBuffer(*m_globalData, this, m_codeBlock);
539     
540     patchBuffer.link(failureCall, FunctionPtr(direct ? cti_op_put_by_id_direct_fail : cti_op_put_by_id_fail));
541     
542     if (willNeedStorageRealloc) {
543         ASSERT(m_calls.size() == 1);
544         patchBuffer.link(m_calls[0].from, FunctionPtr(cti_op_put_by_id_transition_realloc));
545     }
546     
547     stubInfo->stubRoutine = patchBuffer.finalizeCode();
548     RepatchBuffer repatchBuffer(m_codeBlock);
549     repatchBuffer.relinkCallerToTrampoline(returnAddress, CodeLocationLabel(stubInfo->stubRoutine.code()));
550 }
551
552 void JIT::patchGetByIdSelf(CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, size_t cachedOffset, ReturnAddressPtr returnAddress)
553 {
554     RepatchBuffer repatchBuffer(codeBlock);
555     
556     // We don't want to patch more than once - in future go to cti_op_get_by_id_generic.
557     // Should probably go to JITStubs::cti_op_get_by_id_fail, but that doesn't do anything interesting right now.
558     repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_self_fail));
559     
560     int offset = sizeof(JSValue) * cachedOffset;
561
562     // Patch the offset into the propoerty map to load from, then patch the Structure to look for.
563     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(stubInfo->patch.baseline.u.get.structureToCompare), structure);
564     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(stubInfo->patch.baseline.u.get.displacementLabel1), offset + OBJECT_OFFSETOF(JSValue, u.asBits.payload)); // payload
565     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(stubInfo->patch.baseline.u.get.displacementLabel2), offset + OBJECT_OFFSETOF(JSValue, u.asBits.tag)); // tag
566 }
567
568 void JIT::patchPutByIdReplace(CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, size_t cachedOffset, ReturnAddressPtr returnAddress, bool direct)
569 {
570     RepatchBuffer repatchBuffer(codeBlock);
571     
572     // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
573     // Should probably go to cti_op_put_by_id_fail, but that doesn't do anything interesting right now.
574     repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
575     
576     int offset = sizeof(JSValue) * cachedOffset;
577
578     // Patch the offset into the propoerty map to load from, then patch the Structure to look for.
579     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(stubInfo->patch.baseline.u.put.structureToCompare), structure);
580     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(stubInfo->patch.baseline.u.put.displacementLabel1), offset + OBJECT_OFFSETOF(JSValue, u.asBits.payload)); // payload
581     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(stubInfo->patch.baseline.u.put.displacementLabel2), offset + OBJECT_OFFSETOF(JSValue, u.asBits.tag)); // tag
582 }
583
584 void JIT::privateCompilePatchGetArrayLength(ReturnAddressPtr returnAddress)
585 {
586     StructureStubInfo* stubInfo = &m_codeBlock->getStubInfo(returnAddress);
587     
588     // regT0 holds a JSCell*
589     
590     // Check for array
591     Jump failureCases1 = branchPtr(NotEqual, Address(regT0, JSCell::classInfoOffset()), TrustedImmPtr(&JSArray::s_info));
592     
593     // Checks out okay! - get the length from the storage
594     loadPtr(Address(regT0, JSArray::storageOffset()), regT2);
595     load32(Address(regT2, OBJECT_OFFSETOF(ArrayStorage, m_length)), regT2);
596     
597     Jump failureCases2 = branch32(Above, regT2, TrustedImm32(INT_MAX));
598     move(regT2, regT0);
599     move(TrustedImm32(JSValue::Int32Tag), regT1);
600     Jump success = jump();
601     
602     LinkBuffer patchBuffer(*m_globalData, this, m_codeBlock);
603     
604     // Use the patch information to link the failure cases back to the original slow case routine.
605     CodeLocationLabel slowCaseBegin = stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin);
606     patchBuffer.link(failureCases1, slowCaseBegin);
607     patchBuffer.link(failureCases2, slowCaseBegin);
608     
609     // On success return back to the hot patch code, at a point it will perform the store to dest for us.
610     patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
611     
612     // Track the stub we have created so that it will be deleted later.
613     stubInfo->stubRoutine = patchBuffer.finalizeCode();
614     
615     // Finally patch the jump to slow case back in the hot path to jump here instead.
616     CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
617     RepatchBuffer repatchBuffer(m_codeBlock);
618     repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubInfo->stubRoutine.code()));
619     
620     // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
621     repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_array_fail));
622 }
623
624 void JIT::privateCompileGetByIdProto(StructureStubInfo* stubInfo, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame)
625 {
626     // regT0 holds a JSCell*
627     
628     // The prototype object definitely exists (if this stub exists the CodeBlock is referencing a Structure that is
629     // referencing the prototype object - let's speculatively load it's table nice and early!)
630     JSObject* protoObject = asObject(structure->prototypeForLookup(callFrame));
631     
632     Jump failureCases1 = checkStructure(regT0, structure);
633     
634     // Check the prototype object's Structure had not changed.
635     move(TrustedImmPtr(protoObject), regT3);
636     Jump failureCases2 = branchPtr(NotEqual, Address(regT3, JSCell::structureOffset()), TrustedImmPtr(prototypeStructure));
637
638     bool needsStubLink = false;
639     // Checks out okay!
640     if (slot.cachedPropertyType() == PropertySlot::Getter) {
641         needsStubLink = true;
642         compileGetDirectOffset(protoObject, regT2, regT1, cachedOffset);
643         JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
644         stubCall.addArgument(regT1);
645         stubCall.addArgument(regT0);
646         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
647         stubCall.call();
648     } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
649         needsStubLink = true;
650         JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
651         stubCall.addArgument(TrustedImmPtr(protoObject));
652         stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
653         stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
654         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
655         stubCall.call();
656     } else
657         compileGetDirectOffset(protoObject, regT1, regT0, cachedOffset);
658     
659     Jump success = jump();
660     
661     LinkBuffer patchBuffer(*m_globalData, this, m_codeBlock);
662     
663     // Use the patch information to link the failure cases back to the original slow case routine.
664     CodeLocationLabel slowCaseBegin = stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin);
665     patchBuffer.link(failureCases1, slowCaseBegin);
666     patchBuffer.link(failureCases2, slowCaseBegin);
667     
668     // On success return back to the hot patch code, at a point it will perform the store to dest for us.
669     patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
670
671     if (needsStubLink) {
672         for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
673             if (iter->to)
674                 patchBuffer.link(iter->from, FunctionPtr(iter->to));
675         }
676     }
677
678     // Track the stub we have created so that it will be deleted later.
679     stubInfo->stubRoutine = patchBuffer.finalizeCode();
680     
681     // Finally patch the jump to slow case back in the hot path to jump here instead.
682     CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
683     RepatchBuffer repatchBuffer(m_codeBlock);
684     repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubInfo->stubRoutine.code()));
685     
686     // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
687     repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_proto_list));
688 }
689
690
691 void JIT::privateCompileGetByIdSelfList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* polymorphicStructures, int currentIndex, Structure* structure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset)
692 {
693     // regT0 holds a JSCell*
694     Jump failureCase = checkStructure(regT0, structure);
695     bool needsStubLink = false;
696     bool isDirect = false;
697     if (slot.cachedPropertyType() == PropertySlot::Getter) {
698         needsStubLink = true;
699         compileGetDirectOffset(regT0, regT2, regT1, cachedOffset);
700         JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
701         stubCall.addArgument(regT1);
702         stubCall.addArgument(regT0);
703         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
704         stubCall.call();
705     } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
706         needsStubLink = true;
707         JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
708         stubCall.addArgument(regT0);
709         stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
710         stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
711         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
712         stubCall.call();
713     } else {
714         isDirect = true;
715         compileGetDirectOffset(regT0, regT1, regT0, cachedOffset);
716     }
717
718     Jump success = jump();
719     
720     LinkBuffer patchBuffer(*m_globalData, this, m_codeBlock);
721     if (needsStubLink) {
722         for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
723             if (iter->to)
724                 patchBuffer.link(iter->from, FunctionPtr(iter->to));
725         }
726     }    
727     // Use the patch information to link the failure cases back to the original slow case routine.
728     CodeLocationLabel lastProtoBegin = CodeLocationLabel(polymorphicStructures->list[currentIndex - 1].stubRoutine.code());
729     if (!lastProtoBegin)
730         lastProtoBegin = stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin);
731     
732     patchBuffer.link(failureCase, lastProtoBegin);
733     
734     // On success return back to the hot patch code, at a point it will perform the store to dest for us.
735     patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
736
737     CodeRef stubRoutine = patchBuffer.finalizeCode();
738
739     polymorphicStructures->list[currentIndex].set(*m_globalData, m_codeBlock->ownerExecutable(), stubRoutine, structure, isDirect);
740     
741     // Finally patch the jump to slow case back in the hot path to jump here instead.
742     CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
743     RepatchBuffer repatchBuffer(m_codeBlock);
744     repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine.code()));
745 }
746
747 void JIT::privateCompileGetByIdProtoList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructures, int currentIndex, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, CallFrame* callFrame)
748 {
749     // regT0 holds a JSCell*
750     
751     // The prototype object definitely exists (if this stub exists the CodeBlock is referencing a Structure that is
752     // referencing the prototype object - let's speculatively load it's table nice and early!)
753     JSObject* protoObject = asObject(structure->prototypeForLookup(callFrame));
754     
755     // Check eax is an object of the right Structure.
756     Jump failureCases1 = checkStructure(regT0, structure);
757     
758     // Check the prototype object's Structure had not changed.
759     move(TrustedImmPtr(protoObject), regT3);
760     Jump failureCases2 = branchPtr(NotEqual, Address(regT3, JSCell::structureOffset()), TrustedImmPtr(prototypeStructure));
761     
762     bool needsStubLink = false;
763     bool isDirect = false;
764     if (slot.cachedPropertyType() == PropertySlot::Getter) {
765         needsStubLink = true;
766         compileGetDirectOffset(protoObject, regT2, regT1, cachedOffset);
767         JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
768         stubCall.addArgument(regT1);
769         stubCall.addArgument(regT0);
770         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
771         stubCall.call();
772     } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
773         needsStubLink = true;
774         JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
775         stubCall.addArgument(TrustedImmPtr(protoObject));
776         stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
777         stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
778         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
779         stubCall.call();
780     } else {
781         isDirect = true;
782         compileGetDirectOffset(protoObject, regT1, regT0, cachedOffset);
783     }
784     
785     Jump success = jump();
786     
787     LinkBuffer patchBuffer(*m_globalData, this, m_codeBlock);
788     if (needsStubLink) {
789         for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
790             if (iter->to)
791                 patchBuffer.link(iter->from, FunctionPtr(iter->to));
792         }
793     }
794     // Use the patch information to link the failure cases back to the original slow case routine.
795     CodeLocationLabel lastProtoBegin = CodeLocationLabel(prototypeStructures->list[currentIndex - 1].stubRoutine.code());
796     patchBuffer.link(failureCases1, lastProtoBegin);
797     patchBuffer.link(failureCases2, lastProtoBegin);
798     
799     // On success return back to the hot patch code, at a point it will perform the store to dest for us.
800     patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
801     
802     CodeRef stubRoutine = patchBuffer.finalizeCode();
803
804     prototypeStructures->list[currentIndex].set(callFrame->globalData(), m_codeBlock->ownerExecutable(), stubRoutine, structure, prototypeStructure, isDirect);
805     
806     // Finally patch the jump to slow case back in the hot path to jump here instead.
807     CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
808     RepatchBuffer repatchBuffer(m_codeBlock);
809     repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine.code()));
810 }
811
812 void JIT::privateCompileGetByIdChainList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructures, int currentIndex, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, CallFrame* callFrame)
813 {
814     // regT0 holds a JSCell*
815     ASSERT(count);
816     
817     JumpList bucketsOfFail;
818     
819     // Check eax is an object of the right Structure.
820     bucketsOfFail.append(checkStructure(regT0, structure));
821     
822     Structure* currStructure = structure;
823     WriteBarrier<Structure>* it = chain->head();
824     JSObject* protoObject = 0;
825     for (unsigned i = 0; i < count; ++i, ++it) {
826         protoObject = asObject(currStructure->prototypeForLookup(callFrame));
827         currStructure = it->get();
828         testPrototype(protoObject, bucketsOfFail);
829     }
830     ASSERT(protoObject);
831     
832     bool needsStubLink = false;
833     bool isDirect = false;
834     if (slot.cachedPropertyType() == PropertySlot::Getter) {
835         needsStubLink = true;
836         compileGetDirectOffset(protoObject, regT2, regT1, cachedOffset);
837         JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
838         stubCall.addArgument(regT1);
839         stubCall.addArgument(regT0);
840         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
841         stubCall.call();
842     } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
843         needsStubLink = true;
844         JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
845         stubCall.addArgument(TrustedImmPtr(protoObject));
846         stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
847         stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
848         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
849         stubCall.call();
850     } else {
851         isDirect = true;
852         compileGetDirectOffset(protoObject, regT1, regT0, cachedOffset);
853     }
854
855     Jump success = jump();
856     
857     LinkBuffer patchBuffer(*m_globalData, this, m_codeBlock);
858     if (needsStubLink) {
859         for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
860             if (iter->to)
861                 patchBuffer.link(iter->from, FunctionPtr(iter->to));
862         }
863     }
864     // Use the patch information to link the failure cases back to the original slow case routine.
865     CodeLocationLabel lastProtoBegin = CodeLocationLabel(prototypeStructures->list[currentIndex - 1].stubRoutine.code());
866     
867     patchBuffer.link(bucketsOfFail, lastProtoBegin);
868     
869     // On success return back to the hot patch code, at a point it will perform the store to dest for us.
870     patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
871     
872     CodeRef stubRoutine = patchBuffer.finalizeCode();
873     
874     // Track the stub we have created so that it will be deleted later.
875     prototypeStructures->list[currentIndex].set(callFrame->globalData(), m_codeBlock->ownerExecutable(), stubRoutine, structure, chain, isDirect);
876     
877     // Finally patch the jump to slow case back in the hot path to jump here instead.
878     CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
879     RepatchBuffer repatchBuffer(m_codeBlock);
880     repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine.code()));
881 }
882
883 void JIT::privateCompileGetByIdChain(StructureStubInfo* stubInfo, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame)
884 {
885     // regT0 holds a JSCell*
886     ASSERT(count);
887     
888     JumpList bucketsOfFail;
889     
890     // Check eax is an object of the right Structure.
891     bucketsOfFail.append(checkStructure(regT0, structure));
892     
893     Structure* currStructure = structure;
894     WriteBarrier<Structure>* it = chain->head();
895     JSObject* protoObject = 0;
896     for (unsigned i = 0; i < count; ++i, ++it) {
897         protoObject = asObject(currStructure->prototypeForLookup(callFrame));
898         currStructure = it->get();
899         testPrototype(protoObject, bucketsOfFail);
900     }
901     ASSERT(protoObject);
902     
903     bool needsStubLink = false;
904     if (slot.cachedPropertyType() == PropertySlot::Getter) {
905         needsStubLink = true;
906         compileGetDirectOffset(protoObject, regT2, regT1, cachedOffset);
907         JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
908         stubCall.addArgument(regT1);
909         stubCall.addArgument(regT0);
910         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
911         stubCall.call();
912     } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
913         needsStubLink = true;
914         JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
915         stubCall.addArgument(TrustedImmPtr(protoObject));
916         stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
917         stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
918         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
919         stubCall.call();
920     } else
921         compileGetDirectOffset(protoObject, regT1, regT0, cachedOffset);
922     Jump success = jump();
923     
924     LinkBuffer patchBuffer(*m_globalData, this, m_codeBlock);
925     if (needsStubLink) {
926         for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
927             if (iter->to)
928                 patchBuffer.link(iter->from, FunctionPtr(iter->to));
929         }
930     }
931     // Use the patch information to link the failure cases back to the original slow case routine.
932     patchBuffer.link(bucketsOfFail, stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin));
933     
934     // On success return back to the hot patch code, at a point it will perform the store to dest for us.
935     patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
936     
937     // Track the stub we have created so that it will be deleted later.
938     CodeRef stubRoutine = patchBuffer.finalizeCode();
939     stubInfo->stubRoutine = stubRoutine;
940     
941     // Finally patch the jump to slow case back in the hot path to jump here instead.
942     CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
943     RepatchBuffer repatchBuffer(m_codeBlock);
944     repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine.code()));
945     
946     // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
947     repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_proto_list));
948 }
949
950 void JIT::compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, RegisterID offset)
951 {
952     ASSERT(sizeof(JSValue) == 8);
953     
954     loadPtr(Address(base, JSObject::offsetOfPropertyStorage()), base);
955     loadPtr(BaseIndex(base, offset, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)), resultPayload);
956     loadPtr(BaseIndex(base, offset, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)), resultTag);
957 }
958
959 void JIT::emit_op_get_by_pname(Instruction* currentInstruction)
960 {
961     unsigned dst = currentInstruction[1].u.operand;
962     unsigned base = currentInstruction[2].u.operand;
963     unsigned property = currentInstruction[3].u.operand;
964     unsigned expected = currentInstruction[4].u.operand;
965     unsigned iter = currentInstruction[5].u.operand;
966     unsigned i = currentInstruction[6].u.operand;
967     
968     emitLoad2(property, regT1, regT0, base, regT3, regT2);
969     emitJumpSlowCaseIfNotJSCell(property, regT1);
970     addSlowCase(branchPtr(NotEqual, regT0, payloadFor(expected)));
971     // Property registers are now available as the property is known
972     emitJumpSlowCaseIfNotJSCell(base, regT3);
973     emitLoadPayload(iter, regT1);
974     
975     // Test base's structure
976     loadPtr(Address(regT2, JSCell::structureOffset()), regT0);
977     addSlowCase(branchPtr(NotEqual, regT0, Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure))));
978     load32(addressFor(i), regT3);
979     sub32(TrustedImm32(1), regT3);
980     addSlowCase(branch32(AboveOrEqual, regT3, Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_numCacheableSlots))));
981     compileGetDirectOffset(regT2, regT1, regT0, regT3);    
982     
983     emitStore(dst, regT1, regT0);
984     map(m_bytecodeOffset + OPCODE_LENGTH(op_get_by_pname), dst, regT1, regT0);
985 }
986
987 void JIT::emitSlow_op_get_by_pname(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
988 {
989     unsigned dst = currentInstruction[1].u.operand;
990     unsigned base = currentInstruction[2].u.operand;
991     unsigned property = currentInstruction[3].u.operand;
992     
993     linkSlowCaseIfNotJSCell(iter, property);
994     linkSlowCase(iter);
995     linkSlowCaseIfNotJSCell(iter, base);
996     linkSlowCase(iter);
997     linkSlowCase(iter);
998     
999     JITStubCall stubCall(this, cti_op_get_by_val);
1000     stubCall.addArgument(base);
1001     stubCall.addArgument(property);
1002     stubCall.call(dst);
1003 }
1004
1005 void JIT::emit_op_get_scoped_var(Instruction* currentInstruction)
1006 {
1007     int dst = currentInstruction[1].u.operand;
1008     int index = currentInstruction[2].u.operand;
1009     int skip = currentInstruction[3].u.operand;
1010
1011     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2);
1012     bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
1013     ASSERT(skip || !checkTopLevel);
1014     if (checkTopLevel && skip--) {
1015         Jump activationNotCreated;
1016         if (checkTopLevel)
1017             activationNotCreated = branch32(Equal, tagFor(m_codeBlock->activationRegister()), TrustedImm32(JSValue::EmptyValueTag));
1018         loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
1019         activationNotCreated.link(this);
1020     }
1021     while (skip--)
1022         loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
1023
1024     loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, object)), regT2);
1025     loadPtr(Address(regT2, JSVariableObject::offsetOfRegisters()), regT2);
1026
1027     emitLoad(index, regT1, regT0, regT2);
1028     emitValueProfilingSite();
1029     emitStore(dst, regT1, regT0);
1030     map(m_bytecodeOffset + OPCODE_LENGTH(op_get_scoped_var), dst, regT1, regT0);
1031 }
1032
1033 void JIT::emit_op_put_scoped_var(Instruction* currentInstruction)
1034 {
1035     int index = currentInstruction[1].u.operand;
1036     int skip = currentInstruction[2].u.operand;
1037     int value = currentInstruction[3].u.operand;
1038
1039     emitLoad(value, regT1, regT0);
1040
1041     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2);
1042     bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
1043     ASSERT(skip || !checkTopLevel);
1044     if (checkTopLevel && skip--) {
1045         Jump activationNotCreated;
1046         if (checkTopLevel)
1047             activationNotCreated = branch32(Equal, tagFor(m_codeBlock->activationRegister()), TrustedImm32(JSValue::EmptyValueTag));
1048         loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
1049         activationNotCreated.link(this);
1050     }
1051     while (skip--)
1052         loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
1053     loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, object)), regT2);
1054
1055     loadPtr(Address(regT2, JSVariableObject::offsetOfRegisters()), regT3);
1056     emitStore(index, regT1, regT0, regT3);
1057     emitWriteBarrier(regT2, regT1, regT0, regT1, ShouldFilterImmediates, WriteBarrierForVariableAccess);
1058 }
1059
1060 void JIT::emit_op_get_global_var(Instruction* currentInstruction)
1061 {
1062     int dst = currentInstruction[1].u.operand;
1063     JSGlobalObject* globalObject = m_codeBlock->globalObject();
1064     ASSERT(globalObject->isGlobalObject());
1065     int index = currentInstruction[2].u.operand;
1066
1067     loadPtr(&globalObject->m_registers, regT2);
1068
1069     emitLoad(index, regT1, regT0, regT2);
1070     emitValueProfilingSite();
1071     emitStore(dst, regT1, regT0);
1072     map(m_bytecodeOffset + OPCODE_LENGTH(op_get_global_var), dst, regT1, regT0);
1073 }
1074
1075 void JIT::emit_op_put_global_var(Instruction* currentInstruction)
1076 {
1077     int index = currentInstruction[1].u.operand;
1078     int value = currentInstruction[2].u.operand;
1079
1080     JSGlobalObject* globalObject = m_codeBlock->globalObject();
1081
1082     emitLoad(value, regT1, regT0);
1083     move(TrustedImmPtr(globalObject), regT2);
1084
1085     emitWriteBarrier(globalObject, regT1, regT3, ShouldFilterImmediates, WriteBarrierForVariableAccess);
1086
1087     loadPtr(Address(regT2, JSVariableObject::offsetOfRegisters()), regT2);
1088     emitStore(index, regT1, regT0, regT2);
1089     map(m_bytecodeOffset + OPCODE_LENGTH(op_put_global_var), value, regT1, regT0);
1090 }
1091
1092 void JIT::resetPatchGetById(RepatchBuffer& repatchBuffer, StructureStubInfo* stubInfo)
1093 {
1094     repatchBuffer.relink(stubInfo->callReturnLocation, cti_op_get_by_id);
1095     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(stubInfo->patch.baseline.u.get.structureToCompare), reinterpret_cast<void*>(-1));
1096     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(stubInfo->patch.baseline.u.get.displacementLabel1), 0);
1097     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(stubInfo->patch.baseline.u.get.displacementLabel2), 0);
1098     repatchBuffer.relink(stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck), stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin));
1099 }
1100
1101 void JIT::resetPatchPutById(RepatchBuffer& repatchBuffer, StructureStubInfo* stubInfo)
1102 {
1103     if (isDirectPutById(stubInfo))
1104         repatchBuffer.relink(stubInfo->callReturnLocation, cti_op_put_by_id_direct);
1105     else
1106         repatchBuffer.relink(stubInfo->callReturnLocation, cti_op_put_by_id);
1107     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(stubInfo->patch.baseline.u.put.structureToCompare), reinterpret_cast<void*>(-1));
1108     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(stubInfo->patch.baseline.u.put.displacementLabel1), 0);
1109     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(stubInfo->patch.baseline.u.put.displacementLabel2), 0);
1110 }
1111
1112 } // namespace JSC
1113
1114 #endif // USE(JSVALUE32_64)
1115 #endif // ENABLE(JIT)