upload tizen1.0 source
[profile/ivi/webkit-efl.git] / Source / JavaScriptCore / jit / JITPropertyAccess.cpp
1 /*
2  * Copyright (C) 2008, 2009 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27
28 #if ENABLE(JIT)
29 #include "JIT.h"
30
31 #include "CodeBlock.h"
32 #include "GetterSetter.h"
33 #include "JITInlineMethods.h"
34 #include "JITStubCall.h"
35 #include "JSArray.h"
36 #include "JSFunction.h"
37 #include "JSPropertyNameIterator.h"
38 #include "Interpreter.h"
39 #include "LinkBuffer.h"
40 #include "RepatchBuffer.h"
41 #include "ResultType.h"
42 #include "SamplingTool.h"
43
44 #ifndef NDEBUG
45 #include <stdio.h>
46 #endif
47
48 using namespace std;
49
50 namespace JSC {
51 #if USE(JSVALUE64)
52
53 JIT::CodeRef JIT::stringGetByValStubGenerator(JSGlobalData* globalData)
54 {
55     JSInterfaceJIT jit;
56     JumpList failures;
57     failures.append(jit.branchPtr(NotEqual, Address(regT0, JSCell::classInfoOffset()), TrustedImmPtr(&JSString::s_info)));
58
59     // Load string length to regT2, and start the process of loading the data pointer into regT0
60     jit.load32(Address(regT0, ThunkHelpers::jsStringLengthOffset()), regT2);
61     jit.loadPtr(Address(regT0, ThunkHelpers::jsStringValueOffset()), regT0);
62     failures.append(jit.branchTest32(Zero, regT0));
63
64     // Do an unsigned compare to simultaneously filter negative indices as well as indices that are too large
65     failures.append(jit.branch32(AboveOrEqual, regT1, regT2));
66     
67     // Load the character
68     JumpList is16Bit;
69     JumpList cont8Bit;
70     // Load the string flags
71     jit.loadPtr(Address(regT0, ThunkHelpers::stringImplFlagsOffset()), regT2);
72     jit.loadPtr(Address(regT0, ThunkHelpers::stringImplDataOffset()), regT0);
73     is16Bit.append(jit.branchTest32(Zero, regT2, TrustedImm32(ThunkHelpers::stringImpl8BitFlag())));
74     jit.load8(BaseIndex(regT0, regT1, TimesOne, 0), regT0);
75     cont8Bit.append(jit.jump());
76     is16Bit.link(&jit);
77     jit.load16(BaseIndex(regT0, regT1, TimesTwo, 0), regT0);
78     cont8Bit.link(&jit);
79
80     failures.append(jit.branch32(AboveOrEqual, regT0, TrustedImm32(0x100)));
81     jit.move(TrustedImmPtr(globalData->smallStrings.singleCharacterStrings()), regT1);
82     jit.loadPtr(BaseIndex(regT1, regT0, ScalePtr, 0), regT0);
83     jit.ret();
84     
85     failures.link(&jit);
86     jit.move(TrustedImm32(0), regT0);
87     jit.ret();
88     
89     LinkBuffer patchBuffer(*globalData, &jit);
90     return patchBuffer.finalizeCode();
91 }
92
93 void JIT::emit_op_get_by_val(Instruction* currentInstruction)
94 {
95     unsigned dst = currentInstruction[1].u.operand;
96     unsigned base = currentInstruction[2].u.operand;
97     unsigned property = currentInstruction[3].u.operand;
98
99     emitGetVirtualRegisters(base, regT0, property, regT1);
100     emitJumpSlowCaseIfNotImmediateInteger(regT1);
101
102     // This is technically incorrect - we're zero-extending an int32.  On the hot path this doesn't matter.
103     // We check the value as if it was a uint32 against the m_vectorLength - which will always fail if
104     // number was signed since m_vectorLength is always less than intmax (since the total allocation
105     // size is always less than 4Gb).  As such zero extending wil have been correct (and extending the value
106     // to 64-bits is necessary since it's used in the address calculation.  We zero extend rather than sign
107     // extending since it makes it easier to re-tag the value in the slow case.
108     zeroExtend32ToPtr(regT1, regT1);
109
110     emitJumpSlowCaseIfNotJSCell(regT0, base);
111     addSlowCase(branchPtr(NotEqual, Address(regT0, JSCell::classInfoOffset()), TrustedImmPtr(&JSArray::s_info)));
112
113     loadPtr(Address(regT0, JSArray::storageOffset()), regT2);
114     addSlowCase(branch32(AboveOrEqual, regT1, Address(regT0, JSArray::vectorLengthOffset())));
115
116     loadPtr(BaseIndex(regT2, regT1, ScalePtr, OBJECT_OFFSETOF(ArrayStorage, m_vector[0])), regT0);
117     addSlowCase(branchTestPtr(Zero, regT0));
118
119     emitValueProfilingSite(FirstProfilingSite);
120     emitPutVirtualRegister(dst);
121 }
122
123 void JIT::emitSlow_op_get_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
124 {
125     unsigned dst = currentInstruction[1].u.operand;
126     unsigned base = currentInstruction[2].u.operand;
127     unsigned property = currentInstruction[3].u.operand;
128     
129     linkSlowCase(iter); // property int32 check
130     linkSlowCaseIfNotJSCell(iter, base); // base cell check
131     Jump nonCell = jump();
132     linkSlowCase(iter); // base array check
133     Jump notString = branchPtr(NotEqual, Address(regT0, JSCell::classInfoOffset()), TrustedImmPtr(&JSString::s_info));
134     emitNakedCall(CodeLocationLabel(m_globalData->getCTIStub(stringGetByValStubGenerator).code()));
135     Jump failed = branchTestPtr(Zero, regT0);
136     emitPutVirtualRegister(dst, regT0);
137     emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_get_by_val));
138     failed.link(this);
139     notString.link(this);
140     nonCell.link(this);
141     
142     linkSlowCase(iter); // vector length check
143     linkSlowCase(iter); // empty value
144     
145     JITStubCall stubCall(this, cti_op_get_by_val);
146     stubCall.addArgument(base, regT2);
147     stubCall.addArgument(property, regT2);
148     stubCall.call(dst);
149
150     emitValueProfilingSite(SubsequentProfilingSite);
151 }
152
153 void JIT::compileGetDirectOffset(RegisterID base, RegisterID result, RegisterID offset, RegisterID scratch)
154 {
155     loadPtr(Address(base, JSObject::offsetOfPropertyStorage()), scratch);
156     loadPtr(BaseIndex(scratch, offset, ScalePtr, 0), result);
157 }
158
159 void JIT::emit_op_get_by_pname(Instruction* currentInstruction)
160 {
161     unsigned dst = currentInstruction[1].u.operand;
162     unsigned base = currentInstruction[2].u.operand;
163     unsigned property = currentInstruction[3].u.operand;
164     unsigned expected = currentInstruction[4].u.operand;
165     unsigned iter = currentInstruction[5].u.operand;
166     unsigned i = currentInstruction[6].u.operand;
167
168     emitGetVirtualRegister(property, regT0);
169     addSlowCase(branchPtr(NotEqual, regT0, addressFor(expected)));
170     emitGetVirtualRegisters(base, regT0, iter, regT1);
171     emitJumpSlowCaseIfNotJSCell(regT0, base);
172
173     // Test base's structure
174     loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
175     addSlowCase(branchPtr(NotEqual, regT2, Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure))));
176     load32(addressFor(i), regT3);
177     sub32(TrustedImm32(1), regT3);
178     addSlowCase(branch32(AboveOrEqual, regT3, Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_numCacheableSlots))));
179     compileGetDirectOffset(regT0, regT0, regT3, regT1);
180
181     emitPutVirtualRegister(dst, regT0);
182 }
183
184 void JIT::emitSlow_op_get_by_pname(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
185 {
186     unsigned dst = currentInstruction[1].u.operand;
187     unsigned base = currentInstruction[2].u.operand;
188     unsigned property = currentInstruction[3].u.operand;
189
190     linkSlowCase(iter);
191     linkSlowCaseIfNotJSCell(iter, base);
192     linkSlowCase(iter);
193     linkSlowCase(iter);
194
195     JITStubCall stubCall(this, cti_op_get_by_val);
196     stubCall.addArgument(base, regT2);
197     stubCall.addArgument(property, regT2);
198     stubCall.call(dst);
199 }
200
201 void JIT::emit_op_put_by_val(Instruction* currentInstruction)
202 {
203     unsigned base = currentInstruction[1].u.operand;
204     unsigned property = currentInstruction[2].u.operand;
205     unsigned value = currentInstruction[3].u.operand;
206
207     emitGetVirtualRegisters(base, regT0, property, regT1);
208     emitJumpSlowCaseIfNotImmediateInteger(regT1);
209     // See comment in op_get_by_val.
210     zeroExtend32ToPtr(regT1, regT1);
211     emitJumpSlowCaseIfNotJSCell(regT0, base);
212     addSlowCase(branchPtr(NotEqual, Address(regT0, JSCell::classInfoOffset()), TrustedImmPtr(&JSArray::s_info)));
213     addSlowCase(branch32(AboveOrEqual, regT1, Address(regT0, JSArray::vectorLengthOffset())));
214
215     loadPtr(Address(regT0, JSArray::storageOffset()), regT2);
216     Jump empty = branchTestPtr(Zero, BaseIndex(regT2, regT1, ScalePtr, OBJECT_OFFSETOF(ArrayStorage, m_vector[0])));
217
218     Label storeResult(this);
219     emitGetVirtualRegister(value, regT3);
220     storePtr(regT3, BaseIndex(regT2, regT1, ScalePtr, OBJECT_OFFSETOF(ArrayStorage, m_vector[0])));
221     Jump end = jump();
222     
223     empty.link(this);
224     add32(TrustedImm32(1), Address(regT2, OBJECT_OFFSETOF(ArrayStorage, m_numValuesInVector)));
225     branch32(Below, regT1, Address(regT2, OBJECT_OFFSETOF(ArrayStorage, m_length))).linkTo(storeResult, this);
226
227     add32(TrustedImm32(1), regT1);
228     store32(regT1, Address(regT2, OBJECT_OFFSETOF(ArrayStorage, m_length)));
229     sub32(TrustedImm32(1), regT1);
230     jump().linkTo(storeResult, this);
231
232     end.link(this);
233
234     emitWriteBarrier(regT0, regT3, regT1, regT3, ShouldFilterImmediates, WriteBarrierForPropertyAccess);
235 }
236
237 void JIT::emitSlow_op_put_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
238 {
239     unsigned base = currentInstruction[1].u.operand;
240     unsigned property = currentInstruction[2].u.operand;
241     unsigned value = currentInstruction[3].u.operand;
242
243     linkSlowCase(iter); // property int32 check
244     linkSlowCaseIfNotJSCell(iter, base); // base cell check
245     linkSlowCase(iter); // base not array check
246     linkSlowCase(iter); // in vector check
247
248     JITStubCall stubPutByValCall(this, cti_op_put_by_val);
249     stubPutByValCall.addArgument(regT0);
250     stubPutByValCall.addArgument(property, regT2);
251     stubPutByValCall.addArgument(value, regT2);
252     stubPutByValCall.call();
253 }
254
255 void JIT::emit_op_put_by_index(Instruction* currentInstruction)
256 {
257     JITStubCall stubCall(this, cti_op_put_by_index);
258     stubCall.addArgument(currentInstruction[1].u.operand, regT2);
259     stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand));
260     stubCall.addArgument(currentInstruction[3].u.operand, regT2);
261     stubCall.call();
262 }
263
264 void JIT::emit_op_put_getter(Instruction* currentInstruction)
265 {
266     JITStubCall stubCall(this, cti_op_put_getter);
267     stubCall.addArgument(currentInstruction[1].u.operand, regT2);
268     stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
269     stubCall.addArgument(currentInstruction[3].u.operand, regT2);
270     stubCall.call();
271 }
272
273 void JIT::emit_op_put_setter(Instruction* currentInstruction)
274 {
275     JITStubCall stubCall(this, cti_op_put_setter);
276     stubCall.addArgument(currentInstruction[1].u.operand, regT2);
277     stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
278     stubCall.addArgument(currentInstruction[3].u.operand, regT2);
279     stubCall.call();
280 }
281
282 void JIT::emit_op_del_by_id(Instruction* currentInstruction)
283 {
284     JITStubCall stubCall(this, cti_op_del_by_id);
285     stubCall.addArgument(currentInstruction[2].u.operand, regT2);
286     stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
287     stubCall.call(currentInstruction[1].u.operand);
288 }
289
290 void JIT::emit_op_method_check(Instruction* currentInstruction)
291 {
292     // Assert that the following instruction is a get_by_id.
293     ASSERT(m_interpreter->getOpcodeID((currentInstruction + OPCODE_LENGTH(op_method_check))->u.opcode) == op_get_by_id);
294
295     currentInstruction += OPCODE_LENGTH(op_method_check);
296     unsigned resultVReg = currentInstruction[1].u.operand;
297     unsigned baseVReg = currentInstruction[2].u.operand;
298     Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
299
300     emitGetVirtualRegister(baseVReg, regT0);
301
302     // Do the method check - check the object & its prototype's structure inline (this is the common case).
303     m_methodCallCompilationInfo.append(MethodCallCompilationInfo(m_bytecodeOffset, m_propertyAccessCompilationInfo.size()));
304     MethodCallCompilationInfo& info = m_methodCallCompilationInfo.last();
305
306     Jump notCell = emitJumpIfNotJSCell(regT0);
307
308     BEGIN_UNINTERRUPTED_SEQUENCE(sequenceMethodCheck);
309
310     Jump structureCheck = branchPtrWithPatch(NotEqual, Address(regT0, JSCell::structureOffset()), info.structureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure)));
311     DataLabelPtr protoStructureToCompare, protoObj = moveWithPatch(TrustedImmPtr(0), regT1);
312     Jump protoStructureCheck = branchPtrWithPatch(NotEqual, Address(regT1, JSCell::structureOffset()), protoStructureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure)));
313
314     // This will be relinked to load the function without doing a load.
315     DataLabelPtr putFunction = moveWithPatch(TrustedImmPtr(0), regT0);
316
317     END_UNINTERRUPTED_SEQUENCE(sequenceMethodCheck);
318
319     Jump match = jump();
320
321     ASSERT_JIT_OFFSET_UNUSED(protoObj, differenceBetween(info.structureToCompare, protoObj), patchOffsetMethodCheckProtoObj);
322     ASSERT_JIT_OFFSET(differenceBetween(info.structureToCompare, protoStructureToCompare), patchOffsetMethodCheckProtoStruct);
323     ASSERT_JIT_OFFSET_UNUSED(putFunction, differenceBetween(info.structureToCompare, putFunction), patchOffsetMethodCheckPutFunction);
324
325     // Link the failure cases here.
326     notCell.link(this);
327     structureCheck.link(this);
328     protoStructureCheck.link(this);
329
330     // Do a regular(ish) get_by_id (the slow case will be link to
331     // cti_op_get_by_id_method_check instead of cti_op_get_by_id.
332     compileGetByIdHotPath(baseVReg, ident);
333
334     match.link(this);
335     emitValueProfilingSite(FirstProfilingSite);
336     emitPutVirtualRegister(resultVReg);
337
338     // We've already generated the following get_by_id, so make sure it's skipped over.
339     m_bytecodeOffset += OPCODE_LENGTH(op_get_by_id);
340 }
341
342 void JIT::emitSlow_op_method_check(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
343 {
344     currentInstruction += OPCODE_LENGTH(op_method_check);
345     unsigned resultVReg = currentInstruction[1].u.operand;
346     unsigned baseVReg = currentInstruction[2].u.operand;
347     Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
348
349     compileGetByIdSlowCase(resultVReg, baseVReg, ident, iter, true);
350     emitValueProfilingSite(SubsequentProfilingSite);
351
352     // We've already generated the following get_by_id, so make sure it's skipped over.
353     m_bytecodeOffset += OPCODE_LENGTH(op_get_by_id);
354 }
355
356 void JIT::emit_op_get_by_id(Instruction* currentInstruction)
357 {
358     unsigned resultVReg = currentInstruction[1].u.operand;
359     unsigned baseVReg = currentInstruction[2].u.operand;
360     Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
361
362     emitGetVirtualRegister(baseVReg, regT0);
363     compileGetByIdHotPath(baseVReg, ident);
364     emitValueProfilingSite(FirstProfilingSite);
365     emitPutVirtualRegister(resultVReg);
366 }
367
368 void JIT::compileGetByIdHotPath(int baseVReg, Identifier*)
369 {
370     // As for put_by_id, get_by_id requires the offset of the Structure and the offset of the access to be patched.
371     // Additionally, for get_by_id we need patch the offset of the branch to the slow case (we patch this to jump
372     // to array-length / prototype access tranpolines, and finally we also the the property-map access offset as a label
373     // to jump back to if one of these trampolies finds a match.
374
375     emitJumpSlowCaseIfNotJSCell(regT0, baseVReg);
376
377     BEGIN_UNINTERRUPTED_SEQUENCE(sequenceGetByIdHotPath);
378
379     Label hotPathBegin(this);
380     m_propertyAccessCompilationInfo.append(PropertyStubCompilationInfo());
381     m_propertyAccessCompilationInfo.last().bytecodeIndex = m_bytecodeOffset;
382     m_propertyAccessCompilationInfo.last().hotPathBegin = hotPathBegin;
383
384     DataLabelPtr structureToCompare;
385     Jump structureCheck = branchPtrWithPatch(NotEqual, Address(regT0, JSCell::structureOffset()), structureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure)));
386     addSlowCase(structureCheck);
387     ASSERT_JIT_OFFSET(differenceBetween(hotPathBegin, structureToCompare), patchOffsetGetByIdStructure);
388     ASSERT_JIT_OFFSET(differenceBetween(hotPathBegin, structureCheck), patchOffsetGetByIdBranchToSlowCase)
389
390     loadPtr(Address(regT0, JSObject::offsetOfPropertyStorage()), regT0);
391     DataLabelCompact displacementLabel = loadPtrWithCompactAddressOffsetPatch(Address(regT0, patchGetByIdDefaultOffset), regT0);
392     ASSERT_JIT_OFFSET_UNUSED(displacementLabel, differenceBetween(hotPathBegin, displacementLabel), patchOffsetGetByIdPropertyMapOffset);
393
394     Label putResult(this);
395
396     END_UNINTERRUPTED_SEQUENCE(sequenceGetByIdHotPath);
397
398     ASSERT_JIT_OFFSET(differenceBetween(hotPathBegin, putResult), patchOffsetGetByIdPutResult);
399 }
400
401 void JIT::emitSlow_op_get_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
402 {
403     unsigned resultVReg = currentInstruction[1].u.operand;
404     unsigned baseVReg = currentInstruction[2].u.operand;
405     Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
406
407     compileGetByIdSlowCase(resultVReg, baseVReg, ident, iter, false);
408     emitValueProfilingSite(SubsequentProfilingSite);
409 }
410
411 void JIT::compileGetByIdSlowCase(int resultVReg, int baseVReg, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter, bool isMethodCheck)
412 {
413     // As for the hot path of get_by_id, above, we ensure that we can use an architecture specific offset
414     // so that we only need track one pointer into the slow case code - we track a pointer to the location
415     // of the call (which we can use to look up the patch information), but should a array-length or
416     // prototype access trampoline fail we want to bail out back to here.  To do so we can subtract back
417     // the distance from the call to the head of the slow case.
418
419     linkSlowCaseIfNotJSCell(iter, baseVReg);
420     linkSlowCase(iter);
421
422     BEGIN_UNINTERRUPTED_SEQUENCE(sequenceGetByIdSlowCase);
423
424 #ifndef NDEBUG
425     Label coldPathBegin(this);
426 #endif
427     JITStubCall stubCall(this, isMethodCheck ? cti_op_get_by_id_method_check : cti_op_get_by_id);
428     stubCall.addArgument(regT0);
429     stubCall.addArgument(TrustedImmPtr(ident));
430     Call call = stubCall.call(resultVReg);
431
432     END_UNINTERRUPTED_SEQUENCE(sequenceGetByIdSlowCase);
433
434     ASSERT_JIT_OFFSET(differenceBetween(coldPathBegin, call), patchOffsetGetByIdSlowCaseCall);
435
436     // Track the location of the call; this will be used to recover patch information.
437     m_propertyAccessCompilationInfo[m_propertyAccessInstructionIndex++].callReturnLocation = call;
438 }
439
440 void JIT::emit_op_put_by_id(Instruction* currentInstruction)
441 {
442     unsigned baseVReg = currentInstruction[1].u.operand;
443     unsigned valueVReg = currentInstruction[3].u.operand;
444
445     // In order to be able to patch both the Structure, and the object offset, we store one pointer,
446     // to just after the arguments have been loaded into registers 'hotPathBegin', and we generate code
447     // such that the Structure & offset are always at the same distance from this.
448
449     emitGetVirtualRegisters(baseVReg, regT0, valueVReg, regT1);
450
451     // Jump to a slow case if either the base object is an immediate, or if the Structure does not match.
452     emitJumpSlowCaseIfNotJSCell(regT0, baseVReg);
453
454     BEGIN_UNINTERRUPTED_SEQUENCE(sequencePutById);
455
456     Label hotPathBegin(this);
457     m_propertyAccessCompilationInfo.append(PropertyStubCompilationInfo());
458     m_propertyAccessCompilationInfo.last().bytecodeIndex = m_bytecodeOffset;
459     m_propertyAccessCompilationInfo.last().hotPathBegin = hotPathBegin;
460
461     // It is important that the following instruction plants a 32bit immediate, in order that it can be patched over.
462     DataLabelPtr structureToCompare;
463     addSlowCase(branchPtrWithPatch(NotEqual, Address(regT0, JSCell::structureOffset()), structureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure))));
464     ASSERT_JIT_OFFSET(differenceBetween(hotPathBegin, structureToCompare), patchOffsetPutByIdStructure);
465
466     loadPtr(Address(regT0, JSObject::offsetOfPropertyStorage()), regT2);
467     DataLabel32 displacementLabel = storePtrWithAddressOffsetPatch(regT1, Address(regT2, patchPutByIdDefaultOffset));
468
469     END_UNINTERRUPTED_SEQUENCE(sequencePutById);
470
471     emitWriteBarrier(regT0, regT1, regT2, regT3, ShouldFilterImmediates, WriteBarrierForPropertyAccess);
472
473     ASSERT_JIT_OFFSET_UNUSED(displacementLabel, differenceBetween(hotPathBegin, displacementLabel), patchOffsetPutByIdPropertyMapOffset);
474 }
475
476 void JIT::emitSlow_op_put_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
477 {
478     unsigned baseVReg = currentInstruction[1].u.operand;
479     Identifier* ident = &(m_codeBlock->identifier(currentInstruction[2].u.operand));
480     unsigned direct = currentInstruction[8].u.operand;
481
482     linkSlowCaseIfNotJSCell(iter, baseVReg);
483     linkSlowCase(iter);
484
485     JITStubCall stubCall(this, direct ? cti_op_put_by_id_direct : cti_op_put_by_id);
486     stubCall.addArgument(regT0);
487     stubCall.addArgument(TrustedImmPtr(ident));
488     stubCall.addArgument(regT1);
489     Call call = stubCall.call();
490
491     // Track the location of the call; this will be used to recover patch information.
492     m_propertyAccessCompilationInfo[m_propertyAccessInstructionIndex++].callReturnLocation = call;
493 }
494
495 // Compile a store into an object's property storage.  May overwrite the
496 // value in objectReg.
497 void JIT::compilePutDirectOffset(RegisterID base, RegisterID value, size_t cachedOffset)
498 {
499     int offset = cachedOffset * sizeof(JSValue);
500     loadPtr(Address(base, JSObject::offsetOfPropertyStorage()), base);
501     storePtr(value, Address(base, offset));
502 }
503
504 // Compile a load from an object's property storage.  May overwrite base.
505 void JIT::compileGetDirectOffset(RegisterID base, RegisterID result, size_t cachedOffset)
506 {
507     int offset = cachedOffset * sizeof(JSValue);
508     loadPtr(Address(base, JSObject::offsetOfPropertyStorage()), result);
509     loadPtr(Address(result, offset), result);
510 }
511
512 void JIT::compileGetDirectOffset(JSObject* base, RegisterID result, size_t cachedOffset)
513 {
514     loadPtr(base->addressOfPropertyStorage(), result);
515     loadPtr(Address(result, cachedOffset * sizeof(WriteBarrier<Unknown>)), result);
516 }
517
518 void JIT::privateCompilePutByIdTransition(StructureStubInfo* stubInfo, Structure* oldStructure, Structure* newStructure, size_t cachedOffset, StructureChain* chain, ReturnAddressPtr returnAddress, bool direct)
519 {
520     JumpList failureCases;
521     // Check eax is an object of the right Structure.
522     failureCases.append(emitJumpIfNotJSCell(regT0));
523     failureCases.append(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(oldStructure)));
524     
525     testPrototype(oldStructure->storedPrototype(), failureCases);
526     
527     ASSERT(oldStructure->storedPrototype().isNull() || oldStructure->storedPrototype().asCell()->structure() == chain->head()->get());
528
529     // ecx = baseObject->m_structure
530     if (!direct) {
531         for (WriteBarrier<Structure>* it = chain->head(); *it; ++it) {
532             ASSERT((*it)->storedPrototype().isNull() || (*it)->storedPrototype().asCell()->structure() == it[1].get());
533             testPrototype((*it)->storedPrototype(), failureCases);
534         }
535     }
536
537     Call callTarget;
538
539     // emit a call only if storage realloc is needed
540     bool willNeedStorageRealloc = oldStructure->propertyStorageCapacity() != newStructure->propertyStorageCapacity();
541     if (willNeedStorageRealloc) {
542         // This trampoline was called to like a JIT stub; before we can can call again we need to
543         // remove the return address from the stack, to prevent the stack from becoming misaligned.
544         preserveReturnAddressAfterCall(regT3);
545  
546         JITStubCall stubCall(this, cti_op_put_by_id_transition_realloc);
547         stubCall.skipArgument(); // base
548         stubCall.skipArgument(); // ident
549         stubCall.skipArgument(); // value
550         stubCall.addArgument(TrustedImm32(oldStructure->propertyStorageCapacity()));
551         stubCall.addArgument(TrustedImm32(newStructure->propertyStorageCapacity()));
552         stubCall.call(regT0);
553         emitGetJITStubArg(2, regT1);
554
555         restoreReturnAddressBeforeReturn(regT3);
556     }
557
558     // Planting the new structure triggers the write barrier so we need
559     // an unconditional barrier here.
560     emitWriteBarrier(regT0, regT1, regT2, regT3, UnconditionalWriteBarrier, WriteBarrierForPropertyAccess);
561
562     ASSERT(newStructure->classInfo() == oldStructure->classInfo());
563     storePtr(TrustedImmPtr(newStructure), Address(regT0, JSCell::structureOffset()));
564     compilePutDirectOffset(regT0, regT1, cachedOffset);
565
566     ret();
567     
568     ASSERT(!failureCases.empty());
569     failureCases.link(this);
570     restoreArgumentReferenceForTrampoline();
571     Call failureCall = tailRecursiveCall();
572
573     LinkBuffer patchBuffer(*m_globalData, this);
574
575     patchBuffer.link(failureCall, FunctionPtr(direct ? cti_op_put_by_id_direct_fail : cti_op_put_by_id_fail));
576
577     if (willNeedStorageRealloc) {
578         ASSERT(m_calls.size() == 1);
579         patchBuffer.link(m_calls[0].from, FunctionPtr(cti_op_put_by_id_transition_realloc));
580     }
581     
582     stubInfo->stubRoutine = patchBuffer.finalizeCode();
583     RepatchBuffer repatchBuffer(m_codeBlock);
584     repatchBuffer.relinkCallerToTrampoline(returnAddress, CodeLocationLabel(stubInfo->stubRoutine.code()));
585 }
586
587 void JIT::patchGetByIdSelf(CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, size_t cachedOffset, ReturnAddressPtr returnAddress)
588 {
589     RepatchBuffer repatchBuffer(codeBlock);
590
591     // We don't want to patch more than once - in future go to cti_op_get_by_id_generic.
592     // Should probably go to cti_op_get_by_id_fail, but that doesn't do anything interesting right now.
593     repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_self_fail));
594
595     int offset = sizeof(JSValue) * cachedOffset;
596
597     // Patch the offset into the propoerty map to load from, then patch the Structure to look for.
598     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(patchOffsetGetByIdStructure), structure);
599     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(patchOffsetGetByIdPropertyMapOffset), offset);
600 }
601
602 void JIT::patchPutByIdReplace(CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, size_t cachedOffset, ReturnAddressPtr returnAddress, bool direct)
603 {
604     RepatchBuffer repatchBuffer(codeBlock);
605
606     // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
607     // Should probably go to cti_op_put_by_id_fail, but that doesn't do anything interesting right now.
608     repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
609
610     int offset = sizeof(JSValue) * cachedOffset;
611
612     // Patch the offset into the propoerty map to load from, then patch the Structure to look for.
613     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(patchOffsetPutByIdStructure), structure);
614     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(patchOffsetPutByIdPropertyMapOffset), offset);
615 }
616
617 void JIT::privateCompilePatchGetArrayLength(ReturnAddressPtr returnAddress)
618 {
619     StructureStubInfo* stubInfo = &m_codeBlock->getStubInfo(returnAddress);
620
621     // Check eax is an array
622     Jump failureCases1 = branchPtr(NotEqual, Address(regT0, JSCell::classInfoOffset()), TrustedImmPtr(&JSArray::s_info));
623
624     // Checks out okay! - get the length from the storage
625     loadPtr(Address(regT0, JSArray::storageOffset()), regT3);
626     load32(Address(regT3, OBJECT_OFFSETOF(ArrayStorage, m_length)), regT2);
627     Jump failureCases2 = branch32(LessThan, regT2, TrustedImm32(0));
628
629     emitFastArithIntToImmNoCheck(regT2, regT0);
630     Jump success = jump();
631
632     LinkBuffer patchBuffer(*m_globalData, this);
633
634     // Use the patch information to link the failure cases back to the original slow case routine.
635     CodeLocationLabel slowCaseBegin = stubInfo->callReturnLocation.labelAtOffset(-patchOffsetGetByIdSlowCaseCall);
636     patchBuffer.link(failureCases1, slowCaseBegin);
637     patchBuffer.link(failureCases2, slowCaseBegin);
638
639     // On success return back to the hot patch code, at a point it will perform the store to dest for us.
640     patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(patchOffsetGetByIdPutResult));
641
642     // Track the stub we have created so that it will be deleted later.
643     stubInfo->stubRoutine = patchBuffer.finalizeCode();
644
645     // Finally patch the jump to slow case back in the hot path to jump here instead.
646     CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
647     RepatchBuffer repatchBuffer(m_codeBlock);
648     repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubInfo->stubRoutine.code()));
649
650     // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
651     repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_array_fail));
652 }
653
654 void JIT::privateCompileGetByIdProto(StructureStubInfo* stubInfo, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame)
655 {
656     // The prototype object definitely exists (if this stub exists the CodeBlock is referencing a Structure that is
657     // referencing the prototype object - let's speculatively load it's table nice and early!)
658     JSObject* protoObject = asObject(structure->prototypeForLookup(callFrame));
659
660     // Check eax is an object of the right Structure.
661     Jump failureCases1 = checkStructure(regT0, structure);
662
663     // Check the prototype object's Structure had not changed.
664     move(TrustedImmPtr(protoObject), regT3);
665     Jump failureCases2 = branchPtr(NotEqual, Address(regT3, JSCell::structureOffset()), TrustedImmPtr(prototypeStructure));
666
667     bool needsStubLink = false;
668     
669     // Checks out okay!
670     if (slot.cachedPropertyType() == PropertySlot::Getter) {
671         needsStubLink = true;
672         compileGetDirectOffset(protoObject, regT1, cachedOffset);
673         JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
674         stubCall.addArgument(regT1);
675         stubCall.addArgument(regT0);
676         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
677         stubCall.call();
678     } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
679         needsStubLink = true;
680         JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
681         stubCall.addArgument(TrustedImmPtr(protoObject));
682         stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
683         stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
684         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
685         stubCall.call();
686     } else
687         compileGetDirectOffset(protoObject, regT0, cachedOffset);
688     Jump success = jump();
689     LinkBuffer patchBuffer(*m_globalData, this);
690
691     // Use the patch information to link the failure cases back to the original slow case routine.
692     CodeLocationLabel slowCaseBegin = stubInfo->callReturnLocation.labelAtOffset(-patchOffsetGetByIdSlowCaseCall);
693     patchBuffer.link(failureCases1, slowCaseBegin);
694     patchBuffer.link(failureCases2, slowCaseBegin);
695
696     // On success return back to the hot patch code, at a point it will perform the store to dest for us.
697     patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(patchOffsetGetByIdPutResult));
698
699     if (needsStubLink) {
700         for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
701             if (iter->to)
702                 patchBuffer.link(iter->from, FunctionPtr(iter->to));
703         }
704     }
705     // Track the stub we have created so that it will be deleted later.
706     stubInfo->stubRoutine = patchBuffer.finalizeCode();
707
708     // Finally patch the jump to slow case back in the hot path to jump here instead.
709     CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
710     RepatchBuffer repatchBuffer(m_codeBlock);
711     repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubInfo->stubRoutine.code()));
712
713     // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
714     repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_proto_list));
715 }
716
717 void JIT::privateCompileGetByIdSelfList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* polymorphicStructures, int currentIndex, Structure* structure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset)
718 {
719     Jump failureCase = checkStructure(regT0, structure);
720     bool needsStubLink = false;
721     bool isDirect = false;
722     if (slot.cachedPropertyType() == PropertySlot::Getter) {
723         needsStubLink = true;
724         compileGetDirectOffset(regT0, regT1, cachedOffset);
725         JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
726         stubCall.addArgument(regT1);
727         stubCall.addArgument(regT0);
728         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
729         stubCall.call();
730     } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
731         needsStubLink = true;
732         JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
733         stubCall.addArgument(regT0);
734         stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
735         stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
736         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
737         stubCall.call();
738     } else {
739         isDirect = true;
740         compileGetDirectOffset(regT0, regT0, cachedOffset);
741     }
742     Jump success = jump();
743
744     LinkBuffer patchBuffer(*m_globalData, this);
745
746     if (needsStubLink) {
747         for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
748             if (iter->to)
749                 patchBuffer.link(iter->from, FunctionPtr(iter->to));
750         }
751     }
752
753     // Use the patch information to link the failure cases back to the original slow case routine.
754     CodeLocationLabel lastProtoBegin = CodeLocationLabel(polymorphicStructures->list[currentIndex - 1].stubRoutine.code());
755     if (!lastProtoBegin)
756         lastProtoBegin = stubInfo->callReturnLocation.labelAtOffset(-patchOffsetGetByIdSlowCaseCall);
757
758     patchBuffer.link(failureCase, lastProtoBegin);
759
760     // On success return back to the hot patch code, at a point it will perform the store to dest for us.
761     patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(patchOffsetGetByIdPutResult));
762
763     MacroAssemblerCodeRef stubCode = patchBuffer.finalizeCode();
764
765     polymorphicStructures->list[currentIndex].set(*m_globalData, m_codeBlock->ownerExecutable(), stubCode, structure, isDirect);
766
767     // Finally patch the jump to slow case back in the hot path to jump here instead.
768     CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
769     RepatchBuffer repatchBuffer(m_codeBlock);
770     repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubCode.code()));
771 }
772
773 void JIT::privateCompileGetByIdProtoList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructures, int currentIndex, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, CallFrame* callFrame)
774 {
775     // The prototype object definitely exists (if this stub exists the CodeBlock is referencing a Structure that is
776     // referencing the prototype object - let's speculatively load it's table nice and early!)
777     JSObject* protoObject = asObject(structure->prototypeForLookup(callFrame));
778
779     // Check eax is an object of the right Structure.
780     Jump failureCases1 = checkStructure(regT0, structure);
781
782     // Check the prototype object's Structure had not changed.
783     move(TrustedImmPtr(protoObject), regT3);
784     Jump failureCases2 = branchPtr(NotEqual, Address(regT3, JSCell::structureOffset()), TrustedImmPtr(prototypeStructure));
785
786     // Checks out okay!
787     bool needsStubLink = false;
788     bool isDirect = false;
789     if (slot.cachedPropertyType() == PropertySlot::Getter) {
790         needsStubLink = true;
791         compileGetDirectOffset(protoObject, regT1, cachedOffset);
792         JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
793         stubCall.addArgument(regT1);
794         stubCall.addArgument(regT0);
795         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
796         stubCall.call();
797     } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
798         needsStubLink = true;
799         JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
800         stubCall.addArgument(TrustedImmPtr(protoObject));
801         stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
802         stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
803         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
804         stubCall.call();
805     } else {
806         isDirect = true;
807         compileGetDirectOffset(protoObject, regT0, cachedOffset);
808     }
809
810     Jump success = jump();
811
812     LinkBuffer patchBuffer(*m_globalData, this);
813
814     if (needsStubLink) {
815         for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
816             if (iter->to)
817                 patchBuffer.link(iter->from, FunctionPtr(iter->to));
818         }
819     }
820
821     // Use the patch information to link the failure cases back to the original slow case routine.
822     CodeLocationLabel lastProtoBegin = CodeLocationLabel(prototypeStructures->list[currentIndex - 1].stubRoutine.code());
823     patchBuffer.link(failureCases1, lastProtoBegin);
824     patchBuffer.link(failureCases2, lastProtoBegin);
825
826     // On success return back to the hot patch code, at a point it will perform the store to dest for us.
827     patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(patchOffsetGetByIdPutResult));
828
829     MacroAssemblerCodeRef stubCode = patchBuffer.finalizeCode();
830     prototypeStructures->list[currentIndex].set(*m_globalData, m_codeBlock->ownerExecutable(), stubCode, structure, prototypeStructure, isDirect);
831
832     // Finally patch the jump to slow case back in the hot path to jump here instead.
833     CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
834     RepatchBuffer repatchBuffer(m_codeBlock);
835     repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubCode.code()));
836 }
837
838 void JIT::privateCompileGetByIdChainList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructures, int currentIndex, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, CallFrame* callFrame)
839 {
840     ASSERT(count);
841     JumpList bucketsOfFail;
842
843     // Check eax is an object of the right Structure.
844     Jump baseObjectCheck = checkStructure(regT0, structure);
845     bucketsOfFail.append(baseObjectCheck);
846
847     Structure* currStructure = structure;
848     WriteBarrier<Structure>* it = chain->head();
849     JSObject* protoObject = 0;
850     for (unsigned i = 0; i < count; ++i, ++it) {
851         protoObject = asObject(currStructure->prototypeForLookup(callFrame));
852         currStructure = it->get();
853         testPrototype(protoObject, bucketsOfFail);
854     }
855     ASSERT(protoObject);
856     
857     bool needsStubLink = false;
858     bool isDirect = false;
859     if (slot.cachedPropertyType() == PropertySlot::Getter) {
860         needsStubLink = true;
861         compileGetDirectOffset(protoObject, regT1, cachedOffset);
862         JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
863         stubCall.addArgument(regT1);
864         stubCall.addArgument(regT0);
865         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
866         stubCall.call();
867     } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
868         needsStubLink = true;
869         JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
870         stubCall.addArgument(TrustedImmPtr(protoObject));
871         stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
872         stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
873         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
874         stubCall.call();
875     } else {
876         isDirect = true;
877         compileGetDirectOffset(protoObject, regT0, cachedOffset);
878     }
879     Jump success = jump();
880
881     LinkBuffer patchBuffer(*m_globalData, this);
882     
883     if (needsStubLink) {
884         for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
885             if (iter->to)
886                 patchBuffer.link(iter->from, FunctionPtr(iter->to));
887         }
888     }
889
890     // Use the patch information to link the failure cases back to the original slow case routine.
891     CodeLocationLabel lastProtoBegin = CodeLocationLabel(prototypeStructures->list[currentIndex - 1].stubRoutine.code());
892
893     patchBuffer.link(bucketsOfFail, lastProtoBegin);
894
895     // On success return back to the hot patch code, at a point it will perform the store to dest for us.
896     patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(patchOffsetGetByIdPutResult));
897
898     CodeRef stubRoutine = patchBuffer.finalizeCode();
899
900     // Track the stub we have created so that it will be deleted later.
901     prototypeStructures->list[currentIndex].set(callFrame->globalData(), m_codeBlock->ownerExecutable(), stubRoutine, structure, chain, isDirect);
902
903     // Finally patch the jump to slow case back in the hot path to jump here instead.
904     CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
905     RepatchBuffer repatchBuffer(m_codeBlock);
906     repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine.code()));
907 }
908
909 void JIT::privateCompileGetByIdChain(StructureStubInfo* stubInfo, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame)
910 {
911     ASSERT(count);
912
913     JumpList bucketsOfFail;
914
915     // Check eax is an object of the right Structure.
916     bucketsOfFail.append(checkStructure(regT0, structure));
917
918     Structure* currStructure = structure;
919     WriteBarrier<Structure>* it = chain->head();
920     JSObject* protoObject = 0;
921     for (unsigned i = 0; i < count; ++i, ++it) {
922         protoObject = asObject(currStructure->prototypeForLookup(callFrame));
923         currStructure = it->get();
924         testPrototype(protoObject, bucketsOfFail);
925     }
926     ASSERT(protoObject);
927
928     bool needsStubLink = false;
929     if (slot.cachedPropertyType() == PropertySlot::Getter) {
930         needsStubLink = true;
931         compileGetDirectOffset(protoObject, regT1, cachedOffset);
932         JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
933         stubCall.addArgument(regT1);
934         stubCall.addArgument(regT0);
935         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
936         stubCall.call();
937     } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
938         needsStubLink = true;
939         JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
940         stubCall.addArgument(TrustedImmPtr(protoObject));
941         stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
942         stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
943         stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
944         stubCall.call();
945     } else
946         compileGetDirectOffset(protoObject, regT0, cachedOffset);
947     Jump success = jump();
948
949     LinkBuffer patchBuffer(*m_globalData, this);
950
951     if (needsStubLink) {
952         for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
953             if (iter->to)
954                 patchBuffer.link(iter->from, FunctionPtr(iter->to));
955         }
956     }
957
958     // Use the patch information to link the failure cases back to the original slow case routine.
959     patchBuffer.link(bucketsOfFail, stubInfo->callReturnLocation.labelAtOffset(-patchOffsetGetByIdSlowCaseCall));
960
961     // On success return back to the hot patch code, at a point it will perform the store to dest for us.
962     patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(patchOffsetGetByIdPutResult));
963
964     // Track the stub we have created so that it will be deleted later.
965     CodeRef stubRoutine = patchBuffer.finalizeCode();
966     stubInfo->stubRoutine = stubRoutine;
967
968     // Finally patch the jump to slow case back in the hot path to jump here instead.
969     CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
970     RepatchBuffer repatchBuffer(m_codeBlock);
971     repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine.code()));
972
973     // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
974     repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_proto_list));
975 }
976
977 void JIT::emit_op_get_scoped_var(Instruction* currentInstruction)
978 {
979     int skip = currentInstruction[3].u.operand;
980
981     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT0);
982     bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
983     ASSERT(skip || !checkTopLevel);
984     if (checkTopLevel && skip--) {
985         Jump activationNotCreated;
986         if (checkTopLevel)
987             activationNotCreated = branchTestPtr(Zero, addressFor(m_codeBlock->activationRegister()));
988         loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, next)), regT0);
989         activationNotCreated.link(this);
990     }
991     while (skip--)
992         loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, next)), regT0);
993
994     loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, object)), regT0);
995     loadPtr(Address(regT0, JSVariableObject::offsetOfRegisters()), regT0);
996     loadPtr(Address(regT0, currentInstruction[2].u.operand * sizeof(Register)), regT0);
997     emitValueProfilingSite(FirstProfilingSite);
998     emitPutVirtualRegister(currentInstruction[1].u.operand);
999 }
1000
1001 void JIT::emit_op_put_scoped_var(Instruction* currentInstruction)
1002 {
1003     int skip = currentInstruction[2].u.operand;
1004
1005     emitGetVirtualRegister(currentInstruction[3].u.operand, regT0);
1006
1007     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1);
1008     bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
1009     ASSERT(skip || !checkTopLevel);
1010     if (checkTopLevel && skip--) {
1011         Jump activationNotCreated;
1012         if (checkTopLevel)
1013             activationNotCreated = branchTestPtr(Zero, addressFor(m_codeBlock->activationRegister()));
1014         loadPtr(Address(regT1, OBJECT_OFFSETOF(ScopeChainNode, next)), regT1);
1015         activationNotCreated.link(this);
1016     }
1017     while (skip--)
1018         loadPtr(Address(regT1, OBJECT_OFFSETOF(ScopeChainNode, next)), regT1);
1019     loadPtr(Address(regT1, OBJECT_OFFSETOF(ScopeChainNode, object)), regT1);
1020
1021     emitWriteBarrier(regT1, regT0, regT2, regT3, ShouldFilterImmediates, WriteBarrierForVariableAccess);
1022
1023     loadPtr(Address(regT1, JSVariableObject::offsetOfRegisters()), regT1);
1024     storePtr(regT0, Address(regT1, currentInstruction[1].u.operand * sizeof(Register)));
1025 }
1026
1027 void JIT::emit_op_get_global_var(Instruction* currentInstruction)
1028 {
1029     JSVariableObject* globalObject = m_codeBlock->globalObject();
1030     loadPtr(&globalObject->m_registers, regT0);
1031     loadPtr(Address(regT0, currentInstruction[2].u.operand * sizeof(Register)), regT0);
1032     emitValueProfilingSite(FirstProfilingSite);
1033     emitPutVirtualRegister(currentInstruction[1].u.operand);
1034 }
1035
1036 void JIT::emit_op_put_global_var(Instruction* currentInstruction)
1037 {
1038     JSGlobalObject* globalObject = m_codeBlock->globalObject();
1039
1040     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
1041
1042     move(TrustedImmPtr(globalObject), regT1);
1043     loadPtr(Address(regT1, JSVariableObject::offsetOfRegisters()), regT1);
1044     storePtr(regT0, Address(regT1, currentInstruction[1].u.operand * sizeof(Register)));
1045     emitWriteBarrier(globalObject, regT0, regT2, ShouldFilterImmediates, WriteBarrierForVariableAccess);
1046 }
1047
1048 void JIT::resetPatchGetById(RepatchBuffer& repatchBuffer, StructureStubInfo* stubInfo)
1049 {
1050     repatchBuffer.relink(stubInfo->callReturnLocation, cti_op_get_by_id);
1051     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(patchOffsetGetByIdStructure), reinterpret_cast<void*>(-1));
1052     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(patchOffsetGetByIdPropertyMapOffset), 0);
1053     repatchBuffer.relink(stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase), stubInfo->callReturnLocation.labelAtOffset(-patchOffsetGetByIdSlowCaseCall));
1054 }
1055
1056 void JIT::resetPatchPutById(RepatchBuffer& repatchBuffer, StructureStubInfo* stubInfo)
1057 {
1058     if (isDirectPutById(stubInfo))
1059         repatchBuffer.relink(stubInfo->callReturnLocation, cti_op_put_by_id_direct);
1060     else
1061         repatchBuffer.relink(stubInfo->callReturnLocation, cti_op_put_by_id);
1062     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(patchOffsetPutByIdStructure), reinterpret_cast<void*>(-1));
1063     repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(patchOffsetPutByIdPropertyMapOffset), 0);
1064 }
1065
1066 #endif // USE(JSVALUE64)
1067
1068 void JIT::emitWriteBarrier(RegisterID owner, RegisterID value, RegisterID scratch, RegisterID scratch2, WriteBarrierMode mode, WriteBarrierUseKind useKind)
1069 {
1070     UNUSED_PARAM(owner);
1071     UNUSED_PARAM(scratch);
1072     UNUSED_PARAM(scratch2);
1073     UNUSED_PARAM(useKind);
1074     UNUSED_PARAM(value);
1075     UNUSED_PARAM(mode);
1076     ASSERT(owner != scratch);
1077     ASSERT(owner != scratch2);
1078     
1079 #if ENABLE(WRITE_BARRIER_PROFILING)
1080     emitCount(WriteBarrierCounters::jitCounterFor(useKind));
1081 #endif
1082     
1083 #if ENABLE(GGC)
1084     Jump filterCells;
1085     if (mode == ShouldFilterImmediates)
1086         filterCells = emitJumpIfNotJSCell(value);
1087     move(owner, scratch);
1088     andPtr(TrustedImm32(static_cast<int32_t>(MarkedBlock::blockMask)), scratch);
1089     move(owner, scratch2);
1090     // consume additional 8 bits as we're using an approximate filter
1091     rshift32(TrustedImm32(MarkedBlock::atomShift + 8), scratch2);
1092     andPtr(TrustedImm32(MarkedBlock::atomMask >> 8), scratch2);
1093     Jump filter = branchTest8(Zero, BaseIndex(scratch, scratch2, TimesOne, MarkedBlock::offsetOfMarks()));
1094     move(owner, scratch2);
1095     rshift32(TrustedImm32(MarkedBlock::cardShift), scratch2);
1096     andPtr(TrustedImm32(MarkedBlock::cardMask), scratch2);
1097     store8(TrustedImm32(1), BaseIndex(scratch, scratch2, TimesOne, MarkedBlock::offsetOfCards()));
1098     filter.link(this);
1099     if (mode == ShouldFilterImmediates)
1100         filterCells.link(this);
1101 #endif
1102 }
1103
1104 void JIT::emitWriteBarrier(JSCell* owner, RegisterID value, RegisterID scratch, WriteBarrierMode mode, WriteBarrierUseKind useKind)
1105 {
1106     UNUSED_PARAM(owner);
1107     UNUSED_PARAM(scratch);
1108     UNUSED_PARAM(useKind);
1109     UNUSED_PARAM(value);
1110     UNUSED_PARAM(mode);
1111     
1112 #if ENABLE(WRITE_BARRIER_PROFILING)
1113     emitCount(WriteBarrierCounters::jitCounterFor(useKind));
1114 #endif
1115     
1116 #if ENABLE(GGC)
1117     Jump filterCells;
1118     if (mode == ShouldFilterImmediates)
1119         filterCells = emitJumpIfNotJSCell(value);
1120     uint8_t* cardAddress = Heap::addressOfCardFor(owner);
1121     move(TrustedImmPtr(cardAddress), scratch);
1122     store8(TrustedImm32(1), Address(scratch));
1123     if (mode == ShouldFilterImmediates)
1124         filterCells.link(this);
1125 #endif
1126 }
1127
1128 void JIT::testPrototype(JSValue prototype, JumpList& failureCases)
1129 {
1130     if (prototype.isNull())
1131         return;
1132
1133     ASSERT(prototype.isCell());
1134     move(TrustedImmPtr(prototype.asCell()), regT3);
1135     failureCases.append(branchPtr(NotEqual, Address(regT3, JSCell::structureOffset()), TrustedImmPtr(prototype.asCell()->structure())));
1136 }
1137
1138 void JIT::patchMethodCallProto(JSGlobalData& globalData, CodeBlock* codeBlock, MethodCallLinkInfo& methodCallLinkInfo, JSObject* callee, Structure* structure, JSObject* proto, ReturnAddressPtr returnAddress)
1139 {
1140     RepatchBuffer repatchBuffer(codeBlock);
1141     
1142     CodeLocationDataLabelPtr structureLocation = methodCallLinkInfo.cachedStructure.location();
1143     methodCallLinkInfo.cachedStructure.set(globalData, structureLocation, codeBlock->ownerExecutable(), structure);
1144     
1145     Structure* prototypeStructure = proto->structure();
1146     methodCallLinkInfo.cachedPrototypeStructure.set(globalData, structureLocation.dataLabelPtrAtOffset(patchOffsetMethodCheckProtoStruct), codeBlock->ownerExecutable(), prototypeStructure);
1147     methodCallLinkInfo.cachedPrototype.set(globalData, structureLocation.dataLabelPtrAtOffset(patchOffsetMethodCheckProtoObj), codeBlock->ownerExecutable(), proto);
1148     methodCallLinkInfo.cachedFunction.set(globalData, structureLocation.dataLabelPtrAtOffset(patchOffsetMethodCheckPutFunction), codeBlock->ownerExecutable(), callee);
1149     repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_method_check_update));
1150 }
1151
1152 bool JIT::isDirectPutById(StructureStubInfo* stubInfo)
1153 {
1154     switch (stubInfo->accessType) {
1155     case access_put_by_id_transition_normal:
1156         return false;
1157     case access_put_by_id_transition_direct:
1158         return true;
1159     case access_put_by_id_replace:
1160     case access_put_by_id_generic: {
1161         void* oldCall = MacroAssembler::readCallTarget(stubInfo->callReturnLocation).executableAddress();
1162         if (oldCall == bitwise_cast<void*>(cti_op_put_by_id_direct)
1163             || oldCall == bitwise_cast<void*>(cti_op_put_by_id_direct_generic)
1164             || oldCall == bitwise_cast<void*>(cti_op_put_by_id_direct_fail))
1165             return true;
1166         ASSERT(oldCall == bitwise_cast<void*>(cti_op_put_by_id)
1167                || oldCall == bitwise_cast<void*>(cti_op_put_by_id_generic)
1168                || oldCall == bitwise_cast<void*>(cti_op_put_by_id_fail));
1169         return false;
1170     }
1171     default:
1172         ASSERT_NOT_REACHED();
1173         return false;
1174     }
1175 }
1176
1177 } // namespace JSC
1178
1179 #endif // ENABLE(JIT)