tizen beta release
[profile/ivi/webkit-efl.git] / Source / JavaScriptCore / jit / JITPropertyAccess32_64.cpp
1 /*
2  * Copyright (C) 2008, 2009 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27
28 #if ENABLE(JIT)
29 #if USE(JSVALUE32_64)
30 #include "JIT.h"
31
32 #include "CodeBlock.h"
33 #include "JITInlineMethods.h"
34 #include "JITStubCall.h"
35 #include "JSArray.h"
36 #include "JSFunction.h"
37 #include "JSPropertyNameIterator.h"
38 #include "Interpreter.h"
39 #include "LinkBuffer.h"
40 #include "RepatchBuffer.h"
41 #include "ResultType.h"
42 #include "SamplingTool.h"
43
44 #ifndef NDEBUG
45 #include <stdio.h>
46 #endif
47
48 using namespace std;
49
50 namespace JSC {
51     
52 void JIT::emit_op_put_by_index(Instruction* currentInstruction)
53 {
54     unsigned base = currentInstruction[1].u.operand;
55     unsigned property = currentInstruction[2].u.operand;
56     unsigned value = currentInstruction[3].u.operand;
57     
58     JITStubCall stubCall(this, cti_op_put_by_index);
59     stubCall.addArgument(base);
60     stubCall.addArgument(Imm32(property));
61     stubCall.addArgument(value);
62     stubCall.call();
63 }
64
65 void JIT::emit_op_put_getter(Instruction* currentInstruction)
66 {
67     unsigned base = currentInstruction[1].u.operand;
68     unsigned property = currentInstruction[2].u.operand;
69     unsigned function = currentInstruction[3].u.operand;
70     
71     JITStubCall stubCall(this, cti_op_put_getter);
72     stubCall.addArgument(base);
73     stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(property)));
74     stubCall.addArgument(function);
75     stubCall.call();
76 }
77
78 void JIT::emit_op_put_setter(Instruction* currentInstruction)
79 {
80     unsigned base = currentInstruction[1].u.operand;
81     unsigned property = currentInstruction[2].u.operand;
82     unsigned function = currentInstruction[3].u.operand;
83     
84     JITStubCall stubCall(this, cti_op_put_setter);
85     stubCall.addArgument(base);
86     stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(property)));
87     stubCall.addArgument(function);
88     stubCall.call();
89 }
90
91 void JIT::emit_op_del_by_id(Instruction* currentInstruction)
92 {
93     unsigned dst = currentInstruction[1].u.operand;
94     unsigned base = currentInstruction[2].u.operand;
95     unsigned property = currentInstruction[3].u.operand;
96     
97     JITStubCall stubCall(this, cti_op_del_by_id);
98     stubCall.addArgument(base);
99     stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(property)));
100     stubCall.call(dst);
101 }
102
103 void JIT::emit_op_method_check(Instruction* currentInstruction)
104 {
105     // Assert that the following instruction is a get_by_id.
106     ASSERT(m_interpreter->getOpcodeID((currentInstruction + OPCODE_LENGTH(op_method_check))->u.opcode) == op_get_by_id);
107     
108     currentInstruction += OPCODE_LENGTH(op_method_check);
109     
110     // Do the method check - check the object & its prototype's structure inline (this is the common case).
111     m_methodCallCompilationInfo.append(MethodCallCompilationInfo(m_bytecodeOffset, m_propertyAccessCompilationInfo.size()));
112     MethodCallCompilationInfo& info = m_methodCallCompilationInfo.last();
113     
114     int dst = currentInstruction[1].u.operand;
115     int base = currentInstruction[2].u.operand;
116     
117     emitLoad(base, regT1, regT0);
118     emitJumpSlowCaseIfNotJSCell(base, regT1);
119     
120     BEGIN_UNINTERRUPTED_SEQUENCE(sequenceMethodCheck);
121     
122     Jump structureCheck = branchPtrWithPatch(NotEqual, Address(regT0, JSCell::structureOffset()), info.structureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure)));
123     DataLabelPtr protoStructureToCompare, protoObj = moveWithPatch(TrustedImmPtr(0), regT2);
124     Jump protoStructureCheck = branchPtrWithPatch(NotEqual, Address(regT2, JSCell::structureOffset()), protoStructureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure)));
125     
126     // This will be relinked to load the function without doing a load.
127     DataLabelPtr putFunction = moveWithPatch(TrustedImmPtr(0), regT0);
128     
129     END_UNINTERRUPTED_SEQUENCE(sequenceMethodCheck);
130     
131     move(TrustedImm32(JSValue::CellTag), regT1);
132     Jump match = jump();
133     
134     ASSERT_JIT_OFFSET_UNUSED(protoObj, differenceBetween(info.structureToCompare, protoObj), patchOffsetMethodCheckProtoObj);
135     ASSERT_JIT_OFFSET(differenceBetween(info.structureToCompare, protoStructureToCompare), patchOffsetMethodCheckProtoStruct);
136     ASSERT_JIT_OFFSET_UNUSED(putFunction, differenceBetween(info.structureToCompare, putFunction), patchOffsetMethodCheckPutFunction);
137     
138     // Link the failure cases here.
139     structureCheck.link(this);
140     protoStructureCheck.link(this);
141     
142     // Do a regular(ish) get_by_id (the slow case will be link to
143     // cti_op_get_by_id_method_check instead of cti_op_get_by_id.
144     compileGetByIdHotPath();
145     
146     match.link(this);
147     emitValueProfilingSite(FirstProfilingSite);
148     emitStore(dst, regT1, regT0);
149     map(m_bytecodeOffset + OPCODE_LENGTH(op_method_check) + OPCODE_LENGTH(op_get_by_id), dst, regT1, regT0);
150     
151     // We've already generated the following get_by_id, so make sure it's skipped over.
152     m_bytecodeOffset += OPCODE_LENGTH(op_get_by_id);
153 }
154
155 void JIT::emitSlow_op_method_check(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
156 {
157     currentInstruction += OPCODE_LENGTH(op_method_check);
158     
159     int dst = currentInstruction[1].u.operand;
160     int base = currentInstruction[2].u.operand;
161     int ident = currentInstruction[3].u.operand;
162     
163     compileGetByIdSlowCase(dst, base, &(m_codeBlock->identifier(ident)), iter, true);
164     emitValueProfilingSite(SubsequentProfilingSite);
165     
166     // We've already generated the following get_by_id, so make sure it's skipped over.
167     m_bytecodeOffset += OPCODE_LENGTH(op_get_by_id);
168 }
169
170 JIT::CodeRef JIT::stringGetByValStubGenerator(JSGlobalData* globalData)
171 {
172     JSInterfaceJIT jit;
173     JumpList failures;
174     failures.append(jit.branchPtr(NotEqual, Address(regT0), TrustedImmPtr(globalData->jsStringVPtr)));
175     
176     // Load string length to regT1, and start the process of loading the data pointer into regT0
177     jit.load32(Address(regT0, ThunkHelpers::jsStringLengthOffset()), regT1);
178     jit.loadPtr(Address(regT0, ThunkHelpers::jsStringValueOffset()), regT0);
179     failures.append(jit.branchTest32(Zero, regT0));
180     
181     // Do an unsigned compare to simultaneously filter negative indices as well as indices that are too large
182     failures.append(jit.branch32(AboveOrEqual, regT2, regT1));
183     
184     // Load the character
185     JumpList is16Bit;
186     JumpList cont8Bit;
187     // Load the string flags
188     jit.loadPtr(Address(regT0, ThunkHelpers::stringImplFlagsOffset()), regT1);
189     jit.loadPtr(Address(regT0, ThunkHelpers::stringImplDataOffset()), regT0);
190     is16Bit.append(jit.branchTest32(Zero, regT1, TrustedImm32(ThunkHelpers::stringImpl8BitFlag())));
191     jit.load8(BaseIndex(regT0, regT2, TimesOne, 0), regT0);
192     cont8Bit.append(jit.jump());
193     is16Bit.link(&jit);
194     jit.load16(BaseIndex(regT0, regT2, TimesTwo, 0), regT0);
195
196     cont8Bit.link(&jit);
197     
198     failures.append(jit.branch32(AboveOrEqual, regT0, TrustedImm32(0x100)));
199     jit.move(TrustedImmPtr(globalData->smallStrings.singleCharacterStrings()), regT1);
200     jit.loadPtr(BaseIndex(regT1, regT0, ScalePtr, 0), regT0);
201     jit.move(TrustedImm32(JSValue::CellTag), regT1); // We null check regT0 on return so this is safe
202     jit.ret();
203
204     failures.link(&jit);
205     jit.move(TrustedImm32(0), regT0);
206     jit.ret();
207     
208     LinkBuffer patchBuffer(*globalData, &jit);
209     return patchBuffer.finalizeCode();
210 }
211
212 void JIT::emit_op_get_by_val(Instruction* currentInstruction)
213 {
214     unsigned dst = currentInstruction[1].u.operand;
215     unsigned base = currentInstruction[2].u.operand;
216     unsigned property = currentInstruction[3].u.operand;
217     
218     emitLoad2(base, regT1, regT0, property, regT3, regT2);
219     
220     addSlowCase(branch32(NotEqual, regT3, TrustedImm32(JSValue::Int32Tag)));
221     emitJumpSlowCaseIfNotJSCell(base, regT1);
222     addSlowCase(branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsArrayVPtr)));
223     
224     loadPtr(Address(regT0, JSArray::storageOffset()), regT3);
225     addSlowCase(branch32(AboveOrEqual, regT2, Address(regT0, JSArray::vectorLengthOffset())));
226     
227     load32(BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1); // tag
228     load32(BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0); // payload
229     addSlowCase(branch32(Equal, regT1, TrustedImm32(JSValue::EmptyValueTag)));
230     
231     emitValueProfilingSite(FirstProfilingSite);
232     emitStore(dst, regT1, regT0);
233     map(m_bytecodeOffset + OPCODE_LENGTH(op_get_by_val), dst, regT1, regT0);
234 }
235
236 void JIT::emitSlow_op_get_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
237 {
238     unsigned dst = currentInstruction[1].u.operand;
239     unsigned base = currentInstruction[2].u.operand;
240     unsigned property = currentInstruction[3].u.operand;
241     
242     linkSlowCase(iter); // property int32 check
243     linkSlowCaseIfNotJSCell(iter, base); // base cell check
244
245     Jump nonCell = jump();
246     linkSlowCase(iter); // base array check
247     Jump notString = branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsStringVPtr));
248     emitNakedCall(m_globalData->getCTIStub(stringGetByValStubGenerator).code());
249     Jump failed = branchTestPtr(Zero, regT0);
250     emitStore(dst, regT1, regT0);
251     emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_get_by_val));
252     failed.link(this);
253     notString.link(this);
254     nonCell.link(this);
255
256     linkSlowCase(iter); // vector length check
257     linkSlowCase(iter); // empty value
258     
259     JITStubCall stubCall(this, cti_op_get_by_val);
260     stubCall.addArgument(base);
261     stubCall.addArgument(property);
262     stubCall.call(dst);
263
264     emitValueProfilingSite(SubsequentProfilingSite);
265 }
266
267 void JIT::emit_op_put_by_val(Instruction* currentInstruction)
268 {
269     unsigned base = currentInstruction[1].u.operand;
270     unsigned property = currentInstruction[2].u.operand;
271     unsigned value = currentInstruction[3].u.operand;
272     
273     emitLoad2(base, regT1, regT0, property, regT3, regT2);
274     
275     addSlowCase(branch32(NotEqual, regT3, TrustedImm32(JSValue::Int32Tag)));
276     emitJumpSlowCaseIfNotJSCell(base, regT1);
277     addSlowCase(branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsArrayVPtr)));
278     addSlowCase(branch32(AboveOrEqual, regT2, Address(regT0, JSArray::vectorLengthOffset())));
279
280     emitWriteBarrier(regT0, regT1, regT1, regT3, UnconditionalWriteBarrier, WriteBarrierForPropertyAccess);
281     loadPtr(Address(regT0, JSArray::storageOffset()), regT3);
282     
283     Jump empty = branch32(Equal, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::EmptyValueTag));
284     
285     Label storeResult(this);
286     emitLoad(value, regT1, regT0);
287     store32(regT0, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload))); // payload
288     store32(regT1, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag))); // tag
289     Jump end = jump();
290     
291     empty.link(this);
292     add32(TrustedImm32(1), Address(regT3, OBJECT_OFFSETOF(ArrayStorage, m_numValuesInVector)));
293     branch32(Below, regT2, Address(regT3, OBJECT_OFFSETOF(ArrayStorage, m_length))).linkTo(storeResult, this);
294     
295     add32(TrustedImm32(1), regT2, regT0);
296     store32(regT0, Address(regT3, OBJECT_OFFSETOF(ArrayStorage, m_length)));
297     jump().linkTo(storeResult, this);
298     
299     end.link(this);
300 }
301
302 void JIT::emitSlow_op_put_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
303 {
304     unsigned base = currentInstruction[1].u.operand;
305     unsigned property = currentInstruction[2].u.operand;
306     unsigned value = currentInstruction[3].u.operand;
307     
308     linkSlowCase(iter); // property int32 check
309     linkSlowCaseIfNotJSCell(iter, base); // base cell check
310     linkSlowCase(iter); // base not array check
311     linkSlowCase(iter); // in vector check
312     
313     JITStubCall stubPutByValCall(this, cti_op_put_by_val);
314     stubPutByValCall.addArgument(base);
315     stubPutByValCall.addArgument(property);
316     stubPutByValCall.addArgument(value);
317     stubPutByValCall.call();
318 }
319
320 void JIT::emit_op_get_by_id(Instruction* currentInstruction)
321 {
322     int dst = currentInstruction[1].u.operand;
323     int base = currentInstruction[2].u.operand;
324     
325     emitLoad(base, regT1, regT0);
326     emitJumpSlowCaseIfNotJSCell(base, regT1);
327     compileGetByIdHotPath();
328     emitValueProfilingSite(FirstProfilingSite);
329     emitStore(dst, regT1, regT0);
330     map(m_bytecodeOffset + OPCODE_LENGTH(op_get_by_id), dst, regT1, regT0);
331 }
332
333 void JIT::compileGetByIdHotPath()
334 {
335     // As for put_by_id, get_by_id requires the offset of the Structure and the offset of the access to be patched.
336     // Additionally, for get_by_id we need patch the offset of the branch to the slow case (we patch this to jump
337     // to array-length / prototype access tranpolines, and finally we also the the property-map access offset as a label
338     // to jump back to if one of these trampolies finds a match.
339     
340     BEGIN_UNINTERRUPTED_SEQUENCE(sequenceGetByIdHotPath);
341     
342     Label hotPathBegin(this);
343     m_propertyAccessCompilationInfo.append(PropertyStubCompilationInfo());
344     m_propertyAccessCompilationInfo.last().bytecodeIndex = m_bytecodeOffset;
345     m_propertyAccessCompilationInfo.last().hotPathBegin = hotPathBegin;
346     
347     DataLabelPtr structureToCompare;
348     Jump structureCheck = branchPtrWithPatch(NotEqual, Address(regT0, JSCell::structureOffset()), structureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure)));
349     addSlowCase(structureCheck);
350     ASSERT_JIT_OFFSET(differenceBetween(hotPathBegin, structureToCompare), patchOffsetGetByIdStructure);
351     ASSERT_JIT_OFFSET(differenceBetween(hotPathBegin, structureCheck), patchOffsetGetByIdBranchToSlowCase);
352     
353     loadPtr(Address(regT0, JSObject::offsetOfPropertyStorage()), regT2);
354     DataLabelCompact displacementLabel1 = loadPtrWithCompactAddressOffsetPatch(Address(regT2, patchGetByIdDefaultOffset), regT0); // payload
355     ASSERT_JIT_OFFSET_UNUSED(displacementLabel1, differenceBetween(hotPathBegin, displacementLabel1), patchOffsetGetByIdPropertyMapOffset1);
356     DataLabelCompact displacementLabel2 = loadPtrWithCompactAddressOffsetPatch(Address(regT2, patchGetByIdDefaultOffset), regT1); // tag
357     ASSERT_JIT_OFFSET_UNUSED(displacementLabel2, differenceBetween(hotPathBegin, displacementLabel2), patchOffsetGetByIdPropertyMapOffset2);
358     
359     Label putResult(this);
360     ASSERT_JIT_OFFSET(differenceBetween(hotPathBegin, putResult), patchOffsetGetByIdPutResult);
361     
362     END_UNINTERRUPTED_SEQUENCE(sequenceGetByIdHotPath);
363 }
364
365 void JIT::emitSlow_op_get_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
366 {
367     int dst = currentInstruction[1].u.operand;
368     int base = currentInstruction[2].u.operand;
369     int ident = currentInstruction[3].u.operand;
370     
371     compileGetByIdSlowCase(dst, base, &(m_codeBlock->identifier(ident)), iter);
372     emitValueProfilingSite(SubsequentProfilingSite);
373 }
374
375 void JIT::compileGetByIdSlowCase(int dst, int base, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter, bool isMethodCheck)
376 {
377     // As for the hot path of get_by_id, above, we ensure that we can use an architecture specific offset
378     // so that we only need track one pointer into the slow case code - we track a pointer to the location
379     // of the call (which we can use to look up the patch information), but should a array-length or
380     // prototype access trampoline fail we want to bail out back to here.  To do so we can subtract back
381     // the distance from the call to the head of the slow case.
382     linkSlowCaseIfNotJSCell(iter, base);
383     linkSlowCase(iter);
384     
385     BEGIN_UNINTERRUPTED_SEQUENCE(sequenceGetByIdSlowCase);
386     
387 #ifndef NDEBUG
388     Label coldPathBegin(this);
389 #endif
390     JITStubCall stubCall(this, isMethodCheck ? cti_op_get_by_id_method_check : cti_op_get_by_id);
391     stubCall.addArgument(regT1, regT0);
392     stubCall.addArgument(TrustedImmPtr(ident));
393     Call call = stubCall.call(dst);
394     
395     END_UNINTERRUPTED_SEQUENCE_FOR_PUT(sequenceGetByIdSlowCase, dst);
396     
397     ASSERT_JIT_OFFSET(differenceBetween(coldPathBegin, call), patchOffsetGetByIdSlowCaseCall);
398     
399     // Track the location of the call; this will be used to recover patch information.
400     m_propertyAccessCompilationInfo[m_propertyAccessInstructionIndex++].callReturnLocation = call;
401 }
402
403 void JIT::emit_op_put_by_id(Instruction* currentInstruction)
404 {
405     // In order to be able to patch both the Structure, and the object offset, we store one pointer,
406     // to just after the arguments have been loaded into registers 'hotPathBegin', and we generate code
407     // such that the Structure & offset are always at the same distance from this.
408     
409     int base = currentInstruction[1].u.operand;
410     int value = currentInstruction[3].u.operand;
411     
412     emitLoad2(base, regT1, regT0, value, regT3, regT2);
413     
414     emitJumpSlowCaseIfNotJSCell(base, regT1);
415     
416     BEGIN_UNINTERRUPTED_SEQUENCE(sequencePutById);
417     
418     Label hotPathBegin(this);
419     m_propertyAccessCompilationInfo.append(PropertyStubCompilationInfo());
420     m_propertyAccessCompilationInfo.last().bytecodeIndex = m_bytecodeOffset;
421     m_propertyAccessCompilationInfo.last().hotPathBegin = hotPathBegin;
422     
423     // It is important that the following instruction plants a 32bit immediate, in order that it can be patched over.
424     DataLabelPtr structureToCompare;
425     addSlowCase(branchPtrWithPatch(NotEqual, Address(regT0, JSCell::structureOffset()), structureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure))));
426     ASSERT_JIT_OFFSET(differenceBetween(hotPathBegin, structureToCompare), patchOffsetPutByIdStructure);
427     
428     loadPtr(Address(regT0, JSObject::offsetOfPropertyStorage()), regT1);
429     DataLabel32 displacementLabel1 = storePtrWithAddressOffsetPatch(regT2, Address(regT1, patchPutByIdDefaultOffset)); // payload
430     DataLabel32 displacementLabel2 = storePtrWithAddressOffsetPatch(regT3, Address(regT1, patchPutByIdDefaultOffset)); // tag
431     
432     END_UNINTERRUPTED_SEQUENCE(sequencePutById);
433
434     emitWriteBarrier(regT0, regT2, regT1, regT2, ShouldFilterImmediates, WriteBarrierForPropertyAccess);
435     
436     ASSERT_JIT_OFFSET_UNUSED(displacementLabel1, differenceBetween(hotPathBegin, displacementLabel1), patchOffsetPutByIdPropertyMapOffset1);
437     ASSERT_JIT_OFFSET_UNUSED(displacementLabel2, differenceBetween(hotPathBegin, displacementLabel2), patchOffsetPutByIdPropertyMapOffset2);
438 }
439
440 void JIT::emitSlow_op_put_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
441 {
442     int base = currentInstruction[1].u.operand;
443     int ident = currentInstruction[2].u.operand;
444     int direct = currentInstruction[8].u.operand;
445
446     linkSlowCaseIfNotJSCell(iter, base);
447     linkSlowCase(iter);
448     
449     JITStubCall stubCall(this, direct ? cti_op_put_by_id_direct : cti_op_put_by_id);
450     stubCall.addArgument(base);
451     stubCall.addArgument(TrustedImmPtr(&(m_codeBlock->identifier(ident))));
452     stubCall.addArgument(regT3, regT2); 
453     Call call = stubCall.call();
454     
455     // Track the location of the call; this will be used to recover patch information.
456     m_propertyAccessCompilationInfo[m_propertyAccessInstructionIndex++].callReturnLocation = call;
457 }
458
459 // Compile a store into an object's property storage.  May overwrite base.
460 void JIT::compilePutDirectOffset(RegisterID base, RegisterID valueTag, RegisterID valuePayload, size_t cachedOffset)
461 {
462     int offset = cachedOffset;
463     loadPtr(Address(base, JSObject::offsetOfPropertyStorage()), base);
464     emitStore(offset, valueTag, valuePayload, base);
465 }
466
467 // Compile a load from an object's property storage.  May overwrite base.
468 void JIT::compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, size_t cachedOffset)
469 {
470     int offset = cachedOffset;
471     RegisterID temp = resultPayload;
472     loadPtr(Address(base, JSObject::offsetOfPropertyStorage()), temp);
473     emitLoad(offset, resultTag, resultPayload, temp);
474 }
475
476 void JIT::compileGetDirectOffset(JSObject* base, RegisterID resultTag, RegisterID resultPayload, size_t cachedOffset)
477 {
478     loadPtr(base->addressOfPropertyStorage(), resultTag);
479     load32(Address(resultTag, cachedOffset * sizeof(WriteBarrier<Unknown>) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), resultPayload);
480     load32(Address(resultTag, cachedOffset * sizeof(WriteBarrier<Unknown>) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), resultTag);
481 }
482
483 void JIT::privateCompilePutByIdTransition(StructureStubInfo* stubInfo, Structure* oldStructure, Structure* newStructure, size_t cachedOffset, StructureChain* chain, ReturnAddressPtr returnAddress, bool direct)
484 {
485     // The code below assumes that regT0 contains the basePayload and regT1 contains the baseTag. Restore them from the stack.
486 #if CPU(MIPS) || CPU(SH4) || CPU(ARM)
487     // For MIPS, we don't add sizeof(void*) to the stack offset.
488     load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
489     // For MIPS, we don't add sizeof(void*) to the stack offset.
490     load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
491 #else
492     load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
493     load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
494 #endif
495
496     JumpList failureCases;
497     failureCases.append(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
498     failureCases.append(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(oldStructure)));
499     testPrototype(oldStructure->storedPrototype(), failureCases);
500     
501     if (!direct) {
502         // Verify that nothing in the prototype chain has a setter for this property. 
503         for (WriteBarrier<Structure>* it = chain->head(); *it; ++it)
504             testPrototype((*it)->storedPrototype(), failureCases);
505     }
506
507     // Reallocate property storage if needed.
508     Call callTarget;
509     bool willNeedStorageRealloc = oldStructure->propertyStorageCapacity() != newStructure->propertyStorageCapacity();
510     if (willNeedStorageRealloc) {
511         // This trampoline was called to like a JIT stub; before we can can call again we need to
512         // remove the return address from the stack, to prevent the stack from becoming misaligned.
513         preserveReturnAddressAfterCall(regT3);
514         
515         JITStubCall stubCall(this, cti_op_put_by_id_transition_realloc);
516         stubCall.skipArgument(); // base
517         stubCall.skipArgument(); // ident
518         stubCall.skipArgument(); // value
519         stubCall.addArgument(TrustedImm32(oldStructure->propertyStorageCapacity()));
520         stubCall.addArgument(TrustedImm32(newStructure->propertyStorageCapacity()));
521         stubCall.call(regT0);
522
523         restoreReturnAddressBeforeReturn(regT3);
524
525 #if CPU(MIPS) || CPU(SH4) || CPU(ARM)
526         // For MIPS, we don't add sizeof(void*) to the stack offset.
527         load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
528         // For MIPS, we don't add sizeof(void*) to the stack offset.
529         load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
530 #else
531         load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
532         load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
533 #endif
534     }
535
536     emitWriteBarrier(regT0, regT1, regT1, regT3, UnconditionalWriteBarrier, WriteBarrierForPropertyAccess);
537
538     storePtr(TrustedImmPtr(newStructure), Address(regT0, JSCell::structureOffset()));
539 #if CPU(MIPS) || CPU(SH4) || CPU(ARM)
540     // For MIPS, we don't add sizeof(void*) to the stack offset.
541     load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[2]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT3);
542     load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[2]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT2);
543 #else
544     load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[2]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT3);
545     load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[2]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT2);
546 #endif
547     compilePutDirectOffset(regT0, regT2, regT3, cachedOffset);
548     
549     ret();
550     
551     ASSERT(!failureCases.empty());
552     failureCases.link(this);
553     restoreArgumentReferenceForTrampoline();
554     Call failureCall = tailRecursiveCall();
555     
556     LinkBuffer patchBuffer(*m_globalData, this);
557     
558     patchBuffer.link(failureCall, FunctionPtr(direct ? cti_op_put_by_id_direct_fail : cti_op_put_by_id_fail));
559     
560     if (willNeedStorageRealloc) {
561         ASSERT(m_calls.size() == 1);
562         patchBuffer.link(m_calls[0].from, FunctionPtr(cti_op_put_by_id_transition_realloc));
563     }
564     
565     stubInfo->stubRoutine = patchBuffer.finalizeCode();
566     RepatchBuffer repatchBuffer(m_codeBlock);
567     repatchBuffer.relinkCallerToTrampoline(returnAddress, CodeLocationLabel(stubInfo->stubRoutine.code()));
568 }
569
570 void JIT::patchGetByIdSelf(CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, size_t cachedOffset, ReturnAddressPtr returnAddress)
571 {
572     RepatchBuffer repatchBuffer(codeBlock);
573     
574     // We don't want to patch more than once - in future go to cti_op_get_by_id_generic.
575     // Should probably go to JITStubs::cti_op_get_by_id_fail, but that doesn't do anything interesting right now.
576     repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_self_fail));
577     
578     int offset = sizeof(JSValue) * cachedOffset;
579
580     // Patch the offset into the propoerty map to load from, then patch the Structure to look for.
581     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(patchOffsetGetByIdStructure), structure);
582     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(patchOffsetGetByIdPropertyMapOffset1), offset + OBJECT_OFFSETOF(JSValue, u.asBits.payload)); // payload
583     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(patchOffsetGetByIdPropertyMapOffset2), offset + OBJECT_OFFSETOF(JSValue, u.asBits.tag)); // tag
584 }
585
586 void JIT::patchPutByIdReplace(CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, size_t cachedOffset, ReturnAddressPtr returnAddress, bool direct)
587 {
588     RepatchBuffer repatchBuffer(codeBlock);
589     
590     // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
591     // Should probably go to cti_op_put_by_id_fail, but that doesn't do anything interesting right now.
592     repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
593     
594     int offset = sizeof(JSValue) * cachedOffset;
595
596     // Patch the offset into the propoerty map to load from, then patch the Structure to look for.
597     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(patchOffsetPutByIdStructure), structure);
598     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(patchOffsetPutByIdPropertyMapOffset1), offset + OBJECT_OFFSETOF(JSValue, u.asBits.payload)); // payload
599     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(patchOffsetPutByIdPropertyMapOffset2), offset + OBJECT_OFFSETOF(JSValue, u.asBits.tag)); // tag
600 }
601
602 void JIT::privateCompilePatchGetArrayLength(ReturnAddressPtr returnAddress)
603 {
604     StructureStubInfo* stubInfo = &m_codeBlock->getStubInfo(returnAddress);
605     
606     // regT0 holds a JSCell*
607     
608     // Check for array
609     Jump failureCases1 = branchPtr(NotEqual, Address(regT0), TrustedImmPtr(m_globalData->jsArrayVPtr));
610     
611     // Checks out okay! - get the length from the storage
612     loadPtr(Address(regT0, JSArray::storageOffset()), regT2);
613     load32(Address(regT2, OBJECT_OFFSETOF(ArrayStorage, m_length)), regT2);
614     
615     Jump failureCases2 = branch32(Above, regT2, TrustedImm32(INT_MAX));
616     move(regT2, regT0);
617     move(TrustedImm32(JSValue::Int32Tag), regT1);
618     Jump success = jump();
619     
620     LinkBuffer patchBuffer(*m_globalData, this);
621     
622     // Use the patch information to link the failure cases back to the original slow case routine.
623     CodeLocationLabel slowCaseBegin = stubInfo->callReturnLocation.labelAtOffset(-patchOffsetGetByIdSlowCaseCall);
624     patchBuffer.link(failureCases1, slowCaseBegin);
625     patchBuffer.link(failureCases2, slowCaseBegin);
626     
627     // On success return back to the hot patch code, at a point it will perform the store to dest for us.
628     patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(patchOffsetGetByIdPutResult));
629     
630     // Track the stub we have created so that it will be deleted later.
631     stubInfo->stubRoutine = patchBuffer.finalizeCode();
632     
633     // Finally patch the jump to slow case back in the hot path to jump here instead.
634     CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
635     RepatchBuffer repatchBuffer(m_codeBlock);
636     repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubInfo->stubRoutine.code()));
637     
638     // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
639     repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_array_fail));
640 }
641
642 void JIT::privateCompileGetByIdProto(StructureStubInfo* stubInfo, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame)
643 {
644     // regT0 holds a JSCell*
645     
646     // The prototype object definitely exists (if this stub exists the CodeBlock is referencing a Structure that is
647     // referencing the prototype object - let's speculatively load it's table nice and early!)
648     JSObject* protoObject = asObject(structure->prototypeForLookup(callFrame));
649     
650     Jump failureCases1 = checkStructure(regT0, structure);
651     
652     // Check the prototype object's Structure had not changed.
653     move(TrustedImmPtr(protoObject), regT3);
654     Jump failureCases2 = branchPtr(NotEqual, Address(regT3, JSCell::structureOffset()), TrustedImmPtr(prototypeStructure));
655
656     bool needsStubLink = false;
657     // Checks out okay!
658     if (slot.cachedPropertyType() == PropertySlot::Getter) {
659         needsStubLink = true;
660         compileGetDirectOffset(protoObject, regT2, regT1, cachedOffset);
661         JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
662         stubCall.addArgument(regT1);
663         stubCall.addArgument(regT0);
664         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
665         stubCall.call();
666     } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
667         needsStubLink = true;
668         JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
669         stubCall.addArgument(TrustedImmPtr(protoObject));
670         stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
671         stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
672         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
673         stubCall.call();
674     } else
675         compileGetDirectOffset(protoObject, regT1, regT0, cachedOffset);
676     
677     Jump success = jump();
678     
679     LinkBuffer patchBuffer(*m_globalData, this);
680     
681     // Use the patch information to link the failure cases back to the original slow case routine.
682     CodeLocationLabel slowCaseBegin = stubInfo->callReturnLocation.labelAtOffset(-patchOffsetGetByIdSlowCaseCall);
683     patchBuffer.link(failureCases1, slowCaseBegin);
684     patchBuffer.link(failureCases2, slowCaseBegin);
685     
686     // On success return back to the hot patch code, at a point it will perform the store to dest for us.
687     patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(patchOffsetGetByIdPutResult));
688
689     if (needsStubLink) {
690         for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
691             if (iter->to)
692                 patchBuffer.link(iter->from, FunctionPtr(iter->to));
693         }
694     }
695
696     // Track the stub we have created so that it will be deleted later.
697     stubInfo->stubRoutine = patchBuffer.finalizeCode();
698     
699     // Finally patch the jump to slow case back in the hot path to jump here instead.
700     CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
701     RepatchBuffer repatchBuffer(m_codeBlock);
702     repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubInfo->stubRoutine.code()));
703     
704     // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
705     repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_proto_list));
706 }
707
708
709 void JIT::privateCompileGetByIdSelfList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* polymorphicStructures, int currentIndex, Structure* structure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset)
710 {
711     // regT0 holds a JSCell*
712     Jump failureCase = checkStructure(regT0, structure);
713     bool needsStubLink = false;
714     bool isDirect = false;
715     if (slot.cachedPropertyType() == PropertySlot::Getter) {
716         needsStubLink = true;
717         compileGetDirectOffset(regT0, regT2, regT1, cachedOffset);
718         JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
719         stubCall.addArgument(regT1);
720         stubCall.addArgument(regT0);
721         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
722         stubCall.call();
723     } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
724         needsStubLink = true;
725         JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
726         stubCall.addArgument(regT0);
727         stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
728         stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
729         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
730         stubCall.call();
731     } else {
732         isDirect = true;
733         compileGetDirectOffset(regT0, regT1, regT0, cachedOffset);
734     }
735
736     Jump success = jump();
737     
738     LinkBuffer patchBuffer(*m_globalData, this);
739     if (needsStubLink) {
740         for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
741             if (iter->to)
742                 patchBuffer.link(iter->from, FunctionPtr(iter->to));
743         }
744     }    
745     // Use the patch information to link the failure cases back to the original slow case routine.
746     CodeLocationLabel lastProtoBegin = CodeLocationLabel(polymorphicStructures->list[currentIndex - 1].stubRoutine.code());
747     if (!lastProtoBegin)
748         lastProtoBegin = stubInfo->callReturnLocation.labelAtOffset(-patchOffsetGetByIdSlowCaseCall);
749     
750     patchBuffer.link(failureCase, lastProtoBegin);
751     
752     // On success return back to the hot patch code, at a point it will perform the store to dest for us.
753     patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(patchOffsetGetByIdPutResult));
754
755     CodeRef stubRoutine = patchBuffer.finalizeCode();
756
757     polymorphicStructures->list[currentIndex].set(*m_globalData, m_codeBlock->ownerExecutable(), stubRoutine, structure, isDirect);
758     
759     // Finally patch the jump to slow case back in the hot path to jump here instead.
760     CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
761     RepatchBuffer repatchBuffer(m_codeBlock);
762     repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine.code()));
763 }
764
765 void JIT::privateCompileGetByIdProtoList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructures, int currentIndex, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, CallFrame* callFrame)
766 {
767     // regT0 holds a JSCell*
768     
769     // The prototype object definitely exists (if this stub exists the CodeBlock is referencing a Structure that is
770     // referencing the prototype object - let's speculatively load it's table nice and early!)
771     JSObject* protoObject = asObject(structure->prototypeForLookup(callFrame));
772     
773     // Check eax is an object of the right Structure.
774     Jump failureCases1 = checkStructure(regT0, structure);
775     
776     // Check the prototype object's Structure had not changed.
777     move(TrustedImmPtr(protoObject), regT3);
778     Jump failureCases2 = branchPtr(NotEqual, Address(regT3, JSCell::structureOffset()), TrustedImmPtr(prototypeStructure));
779     
780     bool needsStubLink = false;
781     bool isDirect = false;
782     if (slot.cachedPropertyType() == PropertySlot::Getter) {
783         needsStubLink = true;
784         compileGetDirectOffset(protoObject, regT2, regT1, cachedOffset);
785         JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
786         stubCall.addArgument(regT1);
787         stubCall.addArgument(regT0);
788         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
789         stubCall.call();
790     } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
791         needsStubLink = true;
792         JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
793         stubCall.addArgument(TrustedImmPtr(protoObject));
794         stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
795         stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
796         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
797         stubCall.call();
798     } else {
799         isDirect = true;
800         compileGetDirectOffset(protoObject, regT1, regT0, cachedOffset);
801     }
802     
803     Jump success = jump();
804     
805     LinkBuffer patchBuffer(*m_globalData, this);
806     if (needsStubLink) {
807         for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
808             if (iter->to)
809                 patchBuffer.link(iter->from, FunctionPtr(iter->to));
810         }
811     }
812     // Use the patch information to link the failure cases back to the original slow case routine.
813     CodeLocationLabel lastProtoBegin = CodeLocationLabel(prototypeStructures->list[currentIndex - 1].stubRoutine.code());
814     patchBuffer.link(failureCases1, lastProtoBegin);
815     patchBuffer.link(failureCases2, lastProtoBegin);
816     
817     // On success return back to the hot patch code, at a point it will perform the store to dest for us.
818     patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(patchOffsetGetByIdPutResult));
819     
820     CodeRef stubRoutine = patchBuffer.finalizeCode();
821
822     prototypeStructures->list[currentIndex].set(callFrame->globalData(), m_codeBlock->ownerExecutable(), stubRoutine, structure, prototypeStructure, isDirect);
823     
824     // Finally patch the jump to slow case back in the hot path to jump here instead.
825     CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
826     RepatchBuffer repatchBuffer(m_codeBlock);
827     repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine.code()));
828 }
829
830 void JIT::privateCompileGetByIdChainList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructures, int currentIndex, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, CallFrame* callFrame)
831 {
832     // regT0 holds a JSCell*
833     ASSERT(count);
834     
835     JumpList bucketsOfFail;
836     
837     // Check eax is an object of the right Structure.
838     bucketsOfFail.append(checkStructure(regT0, structure));
839     
840     Structure* currStructure = structure;
841     WriteBarrier<Structure>* it = chain->head();
842     JSObject* protoObject = 0;
843     for (unsigned i = 0; i < count; ++i, ++it) {
844         protoObject = asObject(currStructure->prototypeForLookup(callFrame));
845         currStructure = it->get();
846         testPrototype(protoObject, bucketsOfFail);
847     }
848     ASSERT(protoObject);
849     
850     bool needsStubLink = false;
851     bool isDirect = false;
852     if (slot.cachedPropertyType() == PropertySlot::Getter) {
853         needsStubLink = true;
854         compileGetDirectOffset(protoObject, regT2, regT1, cachedOffset);
855         JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
856         stubCall.addArgument(regT1);
857         stubCall.addArgument(regT0);
858         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
859         stubCall.call();
860     } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
861         needsStubLink = true;
862         JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
863         stubCall.addArgument(TrustedImmPtr(protoObject));
864         stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
865         stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
866         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
867         stubCall.call();
868     } else {
869         isDirect = true;
870         compileGetDirectOffset(protoObject, regT1, regT0, cachedOffset);
871     }
872
873     Jump success = jump();
874     
875     LinkBuffer patchBuffer(*m_globalData, this);
876     if (needsStubLink) {
877         for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
878             if (iter->to)
879                 patchBuffer.link(iter->from, FunctionPtr(iter->to));
880         }
881     }
882     // Use the patch information to link the failure cases back to the original slow case routine.
883     CodeLocationLabel lastProtoBegin = CodeLocationLabel(prototypeStructures->list[currentIndex - 1].stubRoutine.code());
884     
885     patchBuffer.link(bucketsOfFail, lastProtoBegin);
886     
887     // On success return back to the hot patch code, at a point it will perform the store to dest for us.
888     patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(patchOffsetGetByIdPutResult));
889     
890     CodeRef stubRoutine = patchBuffer.finalizeCode();
891     
892     // Track the stub we have created so that it will be deleted later.
893     prototypeStructures->list[currentIndex].set(callFrame->globalData(), m_codeBlock->ownerExecutable(), stubRoutine, structure, chain, isDirect);
894     
895     // Finally patch the jump to slow case back in the hot path to jump here instead.
896     CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
897     RepatchBuffer repatchBuffer(m_codeBlock);
898     repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine.code()));
899 }
900
901 void JIT::privateCompileGetByIdChain(StructureStubInfo* stubInfo, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame)
902 {
903     // regT0 holds a JSCell*
904     ASSERT(count);
905     
906     JumpList bucketsOfFail;
907     
908     // Check eax is an object of the right Structure.
909     bucketsOfFail.append(checkStructure(regT0, structure));
910     
911     Structure* currStructure = structure;
912     WriteBarrier<Structure>* it = chain->head();
913     JSObject* protoObject = 0;
914     for (unsigned i = 0; i < count; ++i, ++it) {
915         protoObject = asObject(currStructure->prototypeForLookup(callFrame));
916         currStructure = it->get();
917         testPrototype(protoObject, bucketsOfFail);
918     }
919     ASSERT(protoObject);
920     
921     bool needsStubLink = false;
922     if (slot.cachedPropertyType() == PropertySlot::Getter) {
923         needsStubLink = true;
924         compileGetDirectOffset(protoObject, regT2, regT1, cachedOffset);
925         JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
926         stubCall.addArgument(regT1);
927         stubCall.addArgument(regT0);
928         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
929         stubCall.call();
930     } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
931         needsStubLink = true;
932         JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
933         stubCall.addArgument(TrustedImmPtr(protoObject));
934         stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
935         stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
936         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
937         stubCall.call();
938     } else
939         compileGetDirectOffset(protoObject, regT1, regT0, cachedOffset);
940     Jump success = jump();
941     
942     LinkBuffer patchBuffer(*m_globalData, this);
943     if (needsStubLink) {
944         for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
945             if (iter->to)
946                 patchBuffer.link(iter->from, FunctionPtr(iter->to));
947         }
948     }
949     // Use the patch information to link the failure cases back to the original slow case routine.
950     patchBuffer.link(bucketsOfFail, stubInfo->callReturnLocation.labelAtOffset(-patchOffsetGetByIdSlowCaseCall));
951     
952     // On success return back to the hot patch code, at a point it will perform the store to dest for us.
953     patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(patchOffsetGetByIdPutResult));
954     
955     // Track the stub we have created so that it will be deleted later.
956     CodeRef stubRoutine = patchBuffer.finalizeCode();
957     stubInfo->stubRoutine = stubRoutine;
958     
959     // Finally patch the jump to slow case back in the hot path to jump here instead.
960     CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
961     RepatchBuffer repatchBuffer(m_codeBlock);
962     repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine.code()));
963     
964     // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
965     repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_proto_list));
966 }
967
968 void JIT::compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, RegisterID offset)
969 {
970     ASSERT(sizeof(JSValue) == 8);
971     
972     loadPtr(Address(base, JSObject::offsetOfPropertyStorage()), base);
973     loadPtr(BaseIndex(base, offset, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)), resultPayload);
974     loadPtr(BaseIndex(base, offset, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)), resultTag);
975 }
976
977 void JIT::emit_op_get_by_pname(Instruction* currentInstruction)
978 {
979     unsigned dst = currentInstruction[1].u.operand;
980     unsigned base = currentInstruction[2].u.operand;
981     unsigned property = currentInstruction[3].u.operand;
982     unsigned expected = currentInstruction[4].u.operand;
983     unsigned iter = currentInstruction[5].u.operand;
984     unsigned i = currentInstruction[6].u.operand;
985     
986     emitLoad2(property, regT1, regT0, base, regT3, regT2);
987     emitJumpSlowCaseIfNotJSCell(property, regT1);
988     addSlowCase(branchPtr(NotEqual, regT0, payloadFor(expected)));
989     // Property registers are now available as the property is known
990     emitJumpSlowCaseIfNotJSCell(base, regT3);
991     emitLoadPayload(iter, regT1);
992     
993     // Test base's structure
994     loadPtr(Address(regT2, JSCell::structureOffset()), regT0);
995     addSlowCase(branchPtr(NotEqual, regT0, Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure))));
996     load32(addressFor(i), regT3);
997     sub32(TrustedImm32(1), regT3);
998     addSlowCase(branch32(AboveOrEqual, regT3, Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_numCacheableSlots))));
999     compileGetDirectOffset(regT2, regT1, regT0, regT3);    
1000     
1001     emitStore(dst, regT1, regT0);
1002     map(m_bytecodeOffset + OPCODE_LENGTH(op_get_by_pname), dst, regT1, regT0);
1003 }
1004
1005 void JIT::emitSlow_op_get_by_pname(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1006 {
1007     unsigned dst = currentInstruction[1].u.operand;
1008     unsigned base = currentInstruction[2].u.operand;
1009     unsigned property = currentInstruction[3].u.operand;
1010     
1011     linkSlowCaseIfNotJSCell(iter, property);
1012     linkSlowCase(iter);
1013     linkSlowCaseIfNotJSCell(iter, base);
1014     linkSlowCase(iter);
1015     linkSlowCase(iter);
1016     
1017     JITStubCall stubCall(this, cti_op_get_by_val);
1018     stubCall.addArgument(base);
1019     stubCall.addArgument(property);
1020     stubCall.call(dst);
1021 }
1022
1023 void JIT::emit_op_get_scoped_var(Instruction* currentInstruction)
1024 {
1025     int dst = currentInstruction[1].u.operand;
1026     int index = currentInstruction[2].u.operand;
1027     int skip = currentInstruction[3].u.operand;
1028
1029     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2);
1030     bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
1031     ASSERT(skip || !checkTopLevel);
1032     if (checkTopLevel && skip--) {
1033         Jump activationNotCreated;
1034         if (checkTopLevel)
1035             activationNotCreated = branch32(Equal, tagFor(m_codeBlock->activationRegister()), TrustedImm32(JSValue::EmptyValueTag));
1036         loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
1037         activationNotCreated.link(this);
1038     }
1039     while (skip--)
1040         loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
1041
1042     loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, object)), regT2);
1043     loadPtr(Address(regT2, JSVariableObject::offsetOfRegisters()), regT2);
1044
1045     emitLoad(index, regT1, regT0, regT2);
1046     emitValueProfilingSite(FirstProfilingSite);
1047     emitStore(dst, regT1, regT0);
1048     map(m_bytecodeOffset + OPCODE_LENGTH(op_get_scoped_var), dst, regT1, regT0);
1049 }
1050
1051 void JIT::emit_op_put_scoped_var(Instruction* currentInstruction)
1052 {
1053     int index = currentInstruction[1].u.operand;
1054     int skip = currentInstruction[2].u.operand;
1055     int value = currentInstruction[3].u.operand;
1056
1057     emitLoad(value, regT1, regT0);
1058
1059     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2);
1060     bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
1061     ASSERT(skip || !checkTopLevel);
1062     if (checkTopLevel && skip--) {
1063         Jump activationNotCreated;
1064         if (checkTopLevel)
1065             activationNotCreated = branch32(Equal, tagFor(m_codeBlock->activationRegister()), TrustedImm32(JSValue::EmptyValueTag));
1066         loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
1067         activationNotCreated.link(this);
1068     }
1069     while (skip--)
1070         loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
1071     loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, object)), regT2);
1072
1073     loadPtr(Address(regT2, JSVariableObject::offsetOfRegisters()), regT3);
1074     emitStore(index, regT1, regT0, regT3);
1075     emitWriteBarrier(regT2, regT1, regT0, regT1, ShouldFilterImmediates, WriteBarrierForVariableAccess);
1076 }
1077
1078 void JIT::emit_op_get_global_var(Instruction* currentInstruction)
1079 {
1080     int dst = currentInstruction[1].u.operand;
1081     JSGlobalObject* globalObject = m_codeBlock->globalObject();
1082     ASSERT(globalObject->isGlobalObject());
1083     int index = currentInstruction[2].u.operand;
1084
1085     loadPtr(&globalObject->m_registers, regT2);
1086
1087     emitLoad(index, regT1, regT0, regT2);
1088     emitValueProfilingSite(FirstProfilingSite);
1089     emitStore(dst, regT1, regT0);
1090     map(m_bytecodeOffset + OPCODE_LENGTH(op_get_global_var), dst, regT1, regT0);
1091 }
1092
1093 void JIT::emit_op_put_global_var(Instruction* currentInstruction)
1094 {
1095     int index = currentInstruction[1].u.operand;
1096     int value = currentInstruction[2].u.operand;
1097
1098     JSGlobalObject* globalObject = m_codeBlock->globalObject();
1099
1100     emitLoad(value, regT1, regT0);
1101     move(TrustedImmPtr(globalObject), regT2);
1102
1103     emitWriteBarrier(globalObject, regT1, regT3, ShouldFilterImmediates, WriteBarrierForVariableAccess);
1104
1105     loadPtr(Address(regT2, JSVariableObject::offsetOfRegisters()), regT2);
1106     emitStore(index, regT1, regT0, regT2);
1107     map(m_bytecodeOffset + OPCODE_LENGTH(op_put_global_var), value, regT1, regT0);
1108 }
1109
1110 void JIT::resetPatchGetById(RepatchBuffer& repatchBuffer, StructureStubInfo* stubInfo)
1111 {
1112     repatchBuffer.relink(stubInfo->callReturnLocation, cti_op_get_by_id);
1113     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(patchOffsetGetByIdStructure), reinterpret_cast<void*>(-1));
1114     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(patchOffsetGetByIdPropertyMapOffset1), 0);
1115     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(patchOffsetGetByIdPropertyMapOffset2), 0);
1116     repatchBuffer.relink(stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase), stubInfo->callReturnLocation.labelAtOffset(-patchOffsetGetByIdSlowCaseCall));
1117 }
1118
1119 void JIT::resetPatchPutById(RepatchBuffer& repatchBuffer, StructureStubInfo* stubInfo)
1120 {
1121     if (isDirectPutById(stubInfo))
1122         repatchBuffer.relink(stubInfo->callReturnLocation, cti_op_put_by_id_direct);
1123     else
1124         repatchBuffer.relink(stubInfo->callReturnLocation, cti_op_put_by_id);
1125     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(patchOffsetPutByIdStructure), reinterpret_cast<void*>(-1));
1126     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(patchOffsetPutByIdPropertyMapOffset1), 0);
1127     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(patchOffsetPutByIdPropertyMapOffset2), 0);
1128 }
1129
1130 } // namespace JSC
1131
1132 #endif // USE(JSVALUE32_64)
1133 #endif // ENABLE(JIT)